Skip to content

Instantly share code, notes, and snippets.

@esabook
Last active March 24, 2025 23:54
Show Gist options
  • Save esabook/ba4abdd2766a64c15cfc3a62ed426e52 to your computer and use it in GitHub Desktop.
Save esabook/ba4abdd2766a64c15cfc3a62ed426e52 to your computer and use it in GitHub Desktop.
resizeable Heigh x Width of zxing's scanner viewfinder
package ***;
import android.Manifest;
import android.annotation.SuppressLint;
import android.content.Context;
import android.content.Intent;
import android.content.pm.ActivityInfo;
import android.content.pm.PackageManager;
import android.content.res.Configuration;
import android.databinding.DataBindingUtil;
import android.graphics.Bitmap;
import android.net.Uri;
import android.os.Bundle;
import android.os.Handler;
import android.os.Message;
import android.os.Vibrator;
import android.provider.Settings;
import android.support.annotation.NonNull;
import android.support.v4.app.ActivityCompat;
import android.support.v7.app.AlertDialog;
import android.text.Editable;
import android.text.TextWatcher;
import android.util.Log;
import android.view.KeyEvent;
import android.view.MenuItem;
import android.view.Surface;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.View;
import android.view.Window;
import android.view.WindowManager;
import com.google.zxing.BarcodeFormat;
import com.google.zxing.DecodeHintType;
import com.google.zxing.Result;
import ***.R;
import ***.databinding.BatteryScanActivityBinding;
import ***.zxing.AmbientLightManager;
import ***.zxing.CaptureActivityHandler;
import ***.zxing.DecodeFormatManager;
import ***.zxing.InactivityTimer;
import ***.zxing.IntentSource;
import ***.zxing.Intents;
import ***.zxing.ViewfinderView;
import ***.zxing.camera.CameraManager;
import java.io.IOException;
import java.util.Collection;
import java.util.Map;
//import com.github.esabook.swaplock.zxing.BeepManager;
//import com.github.esabook.swaplock.zxing.result.ResultButtonListener;
//import com.github.esabook.swaplock.zxing.result.ResultHandlerFactory;
public class BatteryScanActivity extends BatterySwapActivity implements SurfaceHolder.Callback {
private static final String TAG = BatteryScanActivity.class.getSimpleName();
BatteryScanActivityBinding mBinding;
private CameraManager cameraManager;
private CaptureActivityHandler handler;
private Result savedResultToShow;
private ViewfinderView viewfinderView;
private Result lastResult;
private boolean hasSurface;
private IntentSource source;
private Collection<BarcodeFormat> decodeFormats = DecodeFormatManager.QR_CODE_FORMATS;
private Map<DecodeHintType, ?> decodeHints;
private String characterSet;
private InactivityTimer inactivityTimer;
private AmbientLightManager ambientLightManager;
public ViewfinderView getViewfinderView() {
return viewfinderView;
}
public Handler getHandler() {
return handler;
}
public CameraManager getCameraManager() {
return cameraManager;
}
@Override
public void onCreate(Bundle icicle) {
super.onCreate(icicle);
Window window = getWindow();
window.addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
mBinding = DataBindingUtil.setContentView(this, R.layout.battery_scan_activity);
hasSurface = false;
inactivityTimer = new InactivityTimer(this);
ambientLightManager = new AmbientLightManager(this);
// button scan (next page)
mBinding.btnForScan.setOnClickListener(v -> {
Intent intent = new Intent(BatteryScanActivity.this, BatteryControlActivity.class);
intent.putExtra(BatteryControlActivity.BT_DEVICE_ADDRES, mBinding.mac.getText().toString());
startActivity(intent);
finish();
});
// back to list
mBinding.btnOpenList.setOnClickListener(v -> {
finish();
});
// text watcher
mBinding.mac.addTextChangedListener(new TextWatcher() {
@Override
public void beforeTextChanged(CharSequence s, int start, int count, int after) {
}
@Override
public void onTextChanged(CharSequence s, int start, int before, int count) {
}
@Override
public void afterTextChanged(Editable s) {
if (s.toString().matches("(^([0-9A-Fa-f]{2}[:-]){5}([0-9A-Fa-f]{2})$)")) {
Vibrator vibrator = (Vibrator) getSystemService(Context.VIBRATOR_SERVICE);
vibrator.vibrate(200);
inactivityTimer.onActivity();
stopScanner();
if (source == IntentSource.NATIVE_APP_INTENT) {
setResult(RESULT_CANCELED);
}
mBinding.btnForScan.setEnabled(true);
} else {
mBinding.btnForScan.setEnabled(false);
if ((source == IntentSource.NONE || source == IntentSource.ZXING_LINK) && lastResult != null) {
restartPreviewAfterDelay(0L);
}
}
}
});
mBinding.previewView.addOnLayoutChangeListener((v, left, top, right, bottom, oldLeft, oldTop, oldRight, oldBottom) -> {
if (cameraManager == null) return;
cameraManager.getConfigManager().getScreenResolution().set(v.getWidth(), v.getHeight());
});
getSupportActionBar().hide();
}
@Override
protected void onResume() {
super.onResume();
if (isCameraPermissionGranted()) {
startScanner();
}
}
@Override
public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults) {
if (isCameraPermissionGranted()) {
finish();
startActivity(new Intent(this, this.getClass()));
} else {
stopScanner();
}
}
@Override
protected void onPause() {
stopScanner();
super.onPause();
}
@Override
protected void onDestroy() {
inactivityTimer.shutdown();
super.onDestroy();
}
@Override
public boolean onKeyDown(int keyCode, KeyEvent event) {
switch (keyCode) {
case KeyEvent.KEYCODE_FOCUS:
case KeyEvent.KEYCODE_CAMERA:
// Handle these events so they don't launch the Camera app
return true;
// Use volume up/down to turn on light
case KeyEvent.KEYCODE_VOLUME_DOWN:
cameraManager.setTorch(false);
return true;
case KeyEvent.KEYCODE_VOLUME_UP:
cameraManager.setTorch(true);
return true;
}
return super.onKeyDown(keyCode, event);
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
Intent intent = new Intent(Intent.ACTION_VIEW);
intent.addFlags(Intents.FLAG_NEW_DOC);
return true;
}
@Override
public void surfaceCreated(SurfaceHolder holder) {
if (holder == null) {
Log.e(TAG, "*** WARNING *** surfaceCreated() gave us a null surface!");
}
if (!hasSurface) {
hasSurface = true;
initCamera(holder);
}
}
@Override
public void surfaceDestroyed(SurfaceHolder holder) {
hasSurface = false;
}
@Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
// do nothing
}
@Override
public void onPointerCaptureChanged(boolean hasCapture) {
}
boolean isCameraPermissionGranted() {
boolean shouldShowReqPermission = ActivityCompat.shouldShowRequestPermissionRationale(
this,
Manifest.permission.CAMERA);
boolean isNeedPermission = ActivityCompat.checkSelfPermission(this,
Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED;
// dialog request permission
if (isNeedPermission) {
if (shouldShowReqPermission) {
// manual storage permission
AlertDialog.Builder alert = new AlertDialog.Builder(this)
.setCancelable(false)
.setMessage("Enabled camera to able start QR scanner.");
alert.setPositiveButton("Open Setting", (dialog, which) -> {
Intent intent = new Intent(Settings.ACTION_APPLICATION_DETAILS_SETTINGS,
Uri.parse("package:" + this.getPackageName()));
intent.addCategory(Intent.CATEGORY_DEFAULT);
intent.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
startActivity(intent);
});
alert.setNeutralButton("Turn On", (d, w) -> {
ActivityCompat.requestPermissions(
this,
new String[]{
Manifest.permission.CAMERA},
0);
});
alert.setNegativeButton("Later", (dialog, which) -> dialog.dismiss());
alert.create().show();
} else {
ActivityCompat.requestPermissions(
this,
new String[]{
Manifest.permission.CAMERA},
0);
}
}
return !isNeedPermission;
}
void startScanner() {
if (handler != null) return;
// CameraManager must be initialized here, not in onCreate(). This is necessary because we don't
// want to open the camera driver and measure the screen size if we're going to show the help on
// first launch. That led to bugs where the scanning rectangle was the wrong size and partially
// off screen.
cameraManager = new CameraManager(getApplication());
viewfinderView = findViewById(R.id.viewfinder_view);
viewfinderView.setCameraManager(cameraManager);
handler = null;
lastResult = null;
setRequestedOrientation(getCurrentOrientation());
resetStatusView();
ambientLightManager.start(cameraManager);
inactivityTimer.onResume();
source = IntentSource.NONE;
decodeFormats = null;
characterSet = null;
SurfaceView surfaceView = findViewById(R.id.preview_view);
SurfaceHolder surfaceHolder = surfaceView.getHolder();
if (hasSurface) {
// // The activity was paused but not stopped, so the surface still exists. Therefore
// // surfaceCreated() won't be called, so init the camera here.
initCamera(surfaceHolder);
} else {
// // Install the callback and wait for surfaceCreated() to init the camera.
surfaceHolder.addCallback(this);
}
}
private int getCurrentOrientation() {
int rotation = getWindowManager().getDefaultDisplay().getRotation();
if (getResources().getConfiguration().orientation == Configuration.ORIENTATION_LANDSCAPE) {
switch (rotation) {
case Surface.ROTATION_0:
case Surface.ROTATION_90:
return ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE;
default:
return ActivityInfo.SCREEN_ORIENTATION_REVERSE_LANDSCAPE;
}
} else {
switch (rotation) {
case Surface.ROTATION_0:
case Surface.ROTATION_270:
return ActivityInfo.SCREEN_ORIENTATION_PORTRAIT;
default:
return ActivityInfo.SCREEN_ORIENTATION_REVERSE_PORTRAIT;
}
}
}
void stopScanner() {
try {
if (handler != null) {
handler.quitSynchronously();
handler = null;
}
inactivityTimer.onPause();
ambientLightManager.stop();
// beepManager.close();
if (cameraManager != null)
cameraManager.closeDriver();
if (!hasSurface) {
SurfaceView surfaceView = findViewById(R.id.preview_view);
SurfaceHolder surfaceHolder = surfaceView.getHolder();
surfaceHolder.removeCallback(this);
}
} catch (Exception e) {
Log.e(TAG, e.getMessage());
}
}
private void decodeOrStoreSavedBitmap(Bitmap bitmap, Result result) {
// Bitmap isn't used yet -- will be used soon
if (handler == null) {
savedResultToShow = result;
} else {
if (result != null) {
savedResultToShow = result;
}
if (savedResultToShow != null) {
Message message = Message.obtain(handler, R.id.decode_succeeded, savedResultToShow);
handler.sendMessage(message);
}
savedResultToShow = null;
}
}
/**
* A valid barcode has been found, so give an indication of success and show the results.
*
* @param rawResult The contents of the barcode.
* @param scaleFactor amount by which thumbnail was scaled
* @param barcode A greyscale bitmap of the camera data which was decoded.
*/
public void handleDecode(Result rawResult, Bitmap barcode, float scaleFactor) {
lastResult = rawResult;
mBinding.mac.setText(lastResult.getText());
}
@SuppressLint("MissingPermission")
private void initCamera(SurfaceHolder surfaceHolder) {
if (surfaceHolder == null) {
throw new IllegalStateException("No SurfaceHolder provided");
}
if (cameraManager.isOpen()) {
Log.w(TAG, "initCamera() while already open -- late SurfaceView callback?");
return;
}
try {
cameraManager.openDriver(surfaceHolder);
// Creating the handler starts the preview, which can also throw a RuntimeException.
if (handler == null) {
handler = new CaptureActivityHandler(this, decodeFormats, decodeHints, characterSet, cameraManager);
}
decodeOrStoreSavedBitmap(null, null);
} catch (IOException ioe) {
Log.w(TAG, ioe);
} catch (RuntimeException e) {
// Barcode Scanner has seen crashes in the wild of this variety:
// java.?lang.?RuntimeException: Fail to connect to camera service
e.printStackTrace();
Log.w(TAG, "Unexpected error initializing camera", e);
}
}
public void restartPreviewAfterDelay(long delayMS) {
if (handler != null) {
handler.sendEmptyMessageDelayed(R.id.restart_preview, delayMS);
}
resetStatusView();
}
private void resetStatusView() {
viewfinderView.setVisibility(View.VISIBLE);
lastResult = null;
}
public void drawViewfinder() {
viewfinderView.drawViewfinder();
}
}
/*
* Copyright (C) 2012 ZXing authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ***.zxing;
import android.content.Context;
import android.content.SharedPreferences;
import android.hardware.Sensor;
import android.hardware.SensorEvent;
import android.hardware.SensorEventListener;
import android.hardware.SensorManager;
import android.preference.PreferenceManager;
import ***.zxing.camera.CameraManager;
import ***.zxing.camera.FrontLightMode;
/**
* Detects ambient light and switches on the front light when very dark, and off again when sufficiently light.
*
* @author Sean Owen
* @author Nikolaus Huber
*/
final public class AmbientLightManager implements SensorEventListener {
private static final float TOO_DARK_LUX = 45.0f;
private static final float BRIGHT_ENOUGH_LUX = 450.0f;
private final Context context;
private CameraManager cameraManager;
private Sensor lightSensor;
public AmbientLightManager(Context context) {
this.context = context;
}
public void start(CameraManager cameraManager) {
this.cameraManager = cameraManager;
SharedPreferences sharedPrefs = PreferenceManager.getDefaultSharedPreferences(context);
if (FrontLightMode.readPref(sharedPrefs) == FrontLightMode.AUTO) {
SensorManager sensorManager = (SensorManager) context.getSystemService(Context.SENSOR_SERVICE);
lightSensor = sensorManager.getDefaultSensor(Sensor.TYPE_LIGHT);
if (lightSensor != null) {
sensorManager.registerListener(this, lightSensor, SensorManager.SENSOR_DELAY_NORMAL);
}
}
}
public void stop() {
if (lightSensor != null) {
SensorManager sensorManager = (SensorManager) context.getSystemService(Context.SENSOR_SERVICE);
sensorManager.unregisterListener(this);
cameraManager = null;
lightSensor = null;
}
}
@Override
public void onSensorChanged(SensorEvent sensorEvent) {
float ambientLightLux = sensorEvent.values[0];
if (cameraManager != null) {
if (ambientLightLux <= TOO_DARK_LUX) {
cameraManager.setTorch(true);
} else if (ambientLightLux >= BRIGHT_ENOUGH_LUX) {
cameraManager.setTorch(false);
}
}
}
@Override
public void onAccuracyChanged(Sensor sensor, int accuracy) {
// do nothing
}
}
/*
* Copyright (C) 2012 ZXing authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ***.camera;
import android.content.Context;
import android.content.SharedPreferences;
import android.hardware.Camera;
import android.os.AsyncTask;
import android.preference.PreferenceManager;
import android.util.Log;
import java.util.ArrayList;
import java.util.Collection;
import java.util.concurrent.RejectedExecutionException;
//import com.google.zxing.client.android.PreferencesActivity;
@SuppressWarnings("deprecation") // camera APIs
final class AutoFocusManager implements Camera.AutoFocusCallback {
private static final String TAG = AutoFocusManager.class.getSimpleName();
private static final long AUTO_FOCUS_INTERVAL_MS = 2000L;
private static final Collection<String> FOCUS_MODES_CALLING_AF;
static {
FOCUS_MODES_CALLING_AF = new ArrayList<>(2);
FOCUS_MODES_CALLING_AF.add(Camera.Parameters.FOCUS_MODE_AUTO);
FOCUS_MODES_CALLING_AF.add(Camera.Parameters.FOCUS_MODE_MACRO);
}
private final boolean useAutoFocus;
private final Camera camera;
private boolean stopped;
private boolean focusing;
private AsyncTask<?, ?, ?> outstandingTask;
AutoFocusManager(Context context, Camera camera) {
this.camera = camera;
SharedPreferences sharedPrefs = PreferenceManager.getDefaultSharedPreferences(context);
String currentFocusMode = camera.getParameters().getFocusMode();
useAutoFocus =
true /*sharedPrefs.getBoolean(PreferencesActivity.KEY_AUTO_FOCUS, true)*/ &&
FOCUS_MODES_CALLING_AF.contains(currentFocusMode);
Log.i(TAG, "Current focus mode '" + currentFocusMode + "'; use auto focus? " + useAutoFocus);
start();
}
@Override
public synchronized void onAutoFocus(boolean success, Camera theCamera) {
focusing = false;
autoFocusAgainLater();
}
private synchronized void autoFocusAgainLater() {
if (!stopped && outstandingTask == null) {
AutoFocusTask newTask = new AutoFocusTask();
try {
newTask.executeOnExecutor(AsyncTask.THREAD_POOL_EXECUTOR);
outstandingTask = newTask;
} catch (RejectedExecutionException ree) {
Log.w(TAG, "Could not request auto focus", ree);
}
}
}
synchronized void start() {
if (useAutoFocus) {
outstandingTask = null;
if (!stopped && !focusing) {
try {
camera.autoFocus(this);
focusing = true;
} catch (RuntimeException re) {
// Have heard RuntimeException reported in Android 4.0.x+; continue?
Log.w(TAG, "Unexpected exception while focusing", re);
// Try again later to keep cycle going
autoFocusAgainLater();
}
}
}
}
private synchronized void cancelOutstandingTask() {
if (outstandingTask != null) {
if (outstandingTask.getStatus() != AsyncTask.Status.FINISHED) {
outstandingTask.cancel(true);
}
outstandingTask = null;
}
}
synchronized void stop() {
stopped = true;
if (useAutoFocus) {
cancelOutstandingTask();
// Doesn't hurt to call this even if not focusing
try {
camera.cancelAutoFocus();
} catch (RuntimeException re) {
// Have heard RuntimeException reported in Android 4.0.x+; continue?
Log.w(TAG, "Unexpected exception while cancelling focusing", re);
}
}
}
private final class AutoFocusTask extends AsyncTask<Object, Object, Object> {
@Override
protected Object doInBackground(Object... voids) {
try {
Thread.sleep(AUTO_FOCUS_INTERVAL_MS);
} catch (InterruptedException e) {
// continue
}
start();
return null;
}
}
}
/*
* Copyright (C) 2010 ZXing authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.sales.evi.rinus.evisales.zxing.camera;
import android.content.Context;
import android.content.SharedPreferences;
import android.graphics.Point;
import android.hardware.Camera;
import android.preference.PreferenceManager;
import android.util.Log;
import android.view.Display;
import android.view.Surface;
import android.view.WindowManager;
import com.sales.evi.rinus.evisales.zxing.camera.open.CameraFacing;
import com.sales.evi.rinus.evisales.zxing.camera.open.OpenCamera;
//import com.google.zxing.client.android.PreferencesActivity;
/**
* A class which deals with reading, parsing, and setting the camera parameters which are used to
* configure the camera hardware.
*/
@SuppressWarnings("deprecation") // camera APIs
public final class CameraConfigurationManager {
private static final String TAG = "CameraConfiguration";
private final Context context;
private int cwNeededRotation;
private int cwRotationFromDisplayToCamera;
private Point screenResolution = new Point(1500, 1500);
private Point cameraResolution;
private Point bestPreviewSize;
private Point previewSizeOnScreen;
CameraConfigurationManager(Context context) {
this.context = context;
}
/**
* Reads, one time, values from the camera that are needed by the app.
*/
void initFromCameraParameters(OpenCamera camera) {
Camera.Parameters parameters = camera.getCamera().getParameters();
WindowManager manager = (WindowManager) context.getSystemService(Context.WINDOW_SERVICE);
Display display = manager.getDefaultDisplay();
int displayRotation = display.getRotation();
int cwRotationFromNaturalToDisplay;
switch (displayRotation) {
case Surface.ROTATION_0:
cwRotationFromNaturalToDisplay = 0;
break;
case Surface.ROTATION_90:
cwRotationFromNaturalToDisplay = 90;
break;
case Surface.ROTATION_180:
cwRotationFromNaturalToDisplay = 180;
break;
case Surface.ROTATION_270:
cwRotationFromNaturalToDisplay = 270;
break;
default:
// Have seen this return incorrect values like -90
if (displayRotation % 90 == 0) {
cwRotationFromNaturalToDisplay = (360 + displayRotation) % 360;
} else {
throw new IllegalArgumentException("Bad rotation: " + displayRotation);
}
}
Log.i(TAG, "Display at: " + cwRotationFromNaturalToDisplay);
int cwRotationFromNaturalToCamera = camera.getOrientation();
Log.i(TAG, "Camera at: " + cwRotationFromNaturalToCamera);
// Still not 100% sure about this. But acts like we need to flip this:
if (camera.getFacing() == CameraFacing.FRONT) {
cwRotationFromNaturalToCamera = (360 - cwRotationFromNaturalToCamera) % 360;
Log.i(TAG, "Front camera overriden to: " + cwRotationFromNaturalToCamera);
}
/*
SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(context);
String overrideRotationString;
if (camera.getFacing() == CameraFacing.FRONT) {
overrideRotationString = prefs.getString(PreferencesActivity.KEY_FORCE_CAMERA_ORIENTATION_FRONT, null);
} else {
overrideRotationString = prefs.getString(PreferencesActivity.KEY_FORCE_CAMERA_ORIENTATION, null);
}
if (overrideRotationString != null && !"-".equals(overrideRotationString)) {
Log.i(TAG, "Overriding camera manually to " + overrideRotationString);
cwRotationFromNaturalToCamera = Integer.parseInt(overrideRotationString);
}
*/
cwRotationFromDisplayToCamera =
(360 + cwRotationFromNaturalToCamera - cwRotationFromNaturalToDisplay) % 360;
Log.i(TAG, "Final display orientation: " + cwRotationFromDisplayToCamera);
if (camera.getFacing() == CameraFacing.FRONT) {
Log.i(TAG, "Compensating rotation for front camera");
cwNeededRotation = (360 - cwRotationFromDisplayToCamera) % 360;
} else {
cwNeededRotation = cwRotationFromDisplayToCamera;
}
Log.i(TAG, "Clockwise rotation from display to camera: " + cwNeededRotation);
// if (screenResolution == null) {
// Point theScreenResolution = new Point();
// if (desiredScreenResolution != null || desiredScreenResolution.x != 0 || desiredScreenResolution.y != 0) {
// theScreenResolution = desiredScreenResolution;
// } else {
// display.getSize(theScreenResolution);
// }
// screenResolution.set(theScreenResolution.x, screenResolution.y);
// }
Log.i(TAG, "Screen resolution in current orientation: " + screenResolution);
cameraResolution = CameraConfigurationUtils.findBestPreviewSizeValue(parameters, screenResolution);
Log.i(TAG, "Camera resolution: " + cameraResolution);
bestPreviewSize = CameraConfigurationUtils.findBestPreviewSizeValue(parameters, screenResolution);
Log.i(TAG, "Best available preview size: " + bestPreviewSize);
boolean isScreenPortrait = screenResolution.x < screenResolution.y;
boolean isPreviewSizePortrait = bestPreviewSize.x < bestPreviewSize.y;
if (isScreenPortrait == isPreviewSizePortrait) {
previewSizeOnScreen = bestPreviewSize;
} else {
previewSizeOnScreen = new Point(bestPreviewSize.y, bestPreviewSize.x);
}
Log.i(TAG, "Preview size on screen: " + previewSizeOnScreen);
}
void setDesiredCameraParameters(OpenCamera camera, boolean safeMode) {
Camera theCamera = camera.getCamera();
Camera.Parameters parameters = theCamera.getParameters();
if (parameters == null) {
Log.w(TAG, "Device error: no camera parameters are available. Proceeding without configuration.");
return;
}
Log.i(TAG, "Initial camera parameters: " + parameters.flatten());
if (safeMode) {
Log.w(TAG, "In camera config safe mode -- most settings will not be honored");
}
SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(context);
initializeTorch(parameters, prefs, safeMode);
CameraConfigurationUtils.setFocus(
parameters,
true,
true,
safeMode);
if (!safeMode) {
// if (prefs.getBoolean(PreferencesActivity.KEY_INVERT_SCAN, false)) {
// CameraConfigurationUtils.setInvertColor(parameters);
// }
// if (!prefs.getBoolean(PreferencesActivity.KEY_DISABLE_BARCODE_SCENE_MODE, true)) {
CameraConfigurationUtils.setBarcodeSceneMode(parameters);
// }
// if (!prefs.getBoolean(PreferencesActivity.KEY_DISABLE_METERING, true)) {
CameraConfigurationUtils.setVideoStabilization(parameters);
CameraConfigurationUtils.setFocusArea(parameters);
CameraConfigurationUtils.setMetering(parameters);
// }
//SetRecordingHint to true also a workaround for low framerate on Nexus 4
//https://stackoverflow.com/questions/14131900/extreme-camera-lag-on-nexus-4
parameters.setRecordingHint(true);
}
parameters.setPreviewSize(bestPreviewSize.x, bestPreviewSize.y);
theCamera.setParameters(parameters);
theCamera.setDisplayOrientation(cwRotationFromDisplayToCamera);
Camera.Parameters afterParameters = theCamera.getParameters();
Camera.Size afterSize = afterParameters.getPreviewSize();
if (afterSize != null && (bestPreviewSize.x != afterSize.width || bestPreviewSize.y != afterSize.height)) {
Log.w(TAG, "Camera said it supported preview size " + bestPreviewSize.x + 'x' + bestPreviewSize.y +
", but after setting it, preview size is " + afterSize.width + 'x' + afterSize.height);
bestPreviewSize.x = afterSize.width;
bestPreviewSize.y = afterSize.height;
}
}
Point getBestPreviewSize() {
return bestPreviewSize;
}
Point getPreviewSizeOnScreen() {
return previewSizeOnScreen;
}
Point getCameraResolution() {
return cameraResolution;
}
public Point getScreenResolution() {
return screenResolution;
}
int getCWNeededRotation() {
return cwNeededRotation;
}
boolean getTorchState(Camera camera) {
if (camera != null) {
Camera.Parameters parameters = camera.getParameters();
if (parameters != null) {
String flashMode = parameters.getFlashMode();
return
Camera.Parameters.FLASH_MODE_ON.equals(flashMode) ||
Camera.Parameters.FLASH_MODE_TORCH.equals(flashMode);
}
}
return false;
}
void setTorch(Camera camera, boolean newSetting) {
Camera.Parameters parameters = camera.getParameters();
doSetTorch(parameters, newSetting, false);
camera.setParameters(parameters);
}
private void initializeTorch(Camera.Parameters parameters, SharedPreferences prefs, boolean safeMode) {
boolean currentSetting = FrontLightMode.readPref(prefs) == FrontLightMode.ON;
doSetTorch(parameters, currentSetting, safeMode);
}
private void doSetTorch(Camera.Parameters parameters, boolean newSetting, boolean safeMode) {
CameraConfigurationUtils.setTorch(parameters, newSetting);
SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(context);
// if (!safeMode && !prefs.getBoolean(PreferencesActivity.KEY_DISABLE_EXPOSURE, true)) {
CameraConfigurationUtils.setBestExposure(parameters, newSetting);
// }
}
}
/*
* Copyright (C) 2014 ZXing authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.sales.evi.rinus.evisales.zxing.camera;
import android.graphics.Point;
import android.graphics.Rect;
import android.hardware.Camera;
import android.os.Build;
import android.util.Log;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.regex.Pattern;
/**
* Utility methods for configuring the Android camera.
*
* @author Sean Owen
*/
@SuppressWarnings("deprecation") // camera APIs
public final class CameraConfigurationUtils {
private static final String TAG = "CameraConfiguration";
private static final Pattern SEMICOLON = Pattern.compile(";");
private static final int MIN_PREVIEW_PIXELS = 480 * 320; // normal screen
private static final float MAX_EXPOSURE_COMPENSATION = 1.5f;
private static final float MIN_EXPOSURE_COMPENSATION = 0.0f;
private static final double MAX_ASPECT_DISTORTION = 0.15;
private static final int MIN_FPS = 10;
private static final int MAX_FPS = 20;
private static final int AREA_PER_1000 = 400;
private CameraConfigurationUtils() {
}
public static void setFocus(Camera.Parameters parameters,
boolean autoFocus,
boolean disableContinuous,
boolean safeMode) {
List<String> supportedFocusModes = parameters.getSupportedFocusModes();
String focusMode = null;
if (autoFocus) {
if (safeMode || disableContinuous) {
focusMode = findSettableValue("focus mode",
supportedFocusModes,
Camera.Parameters.FOCUS_MODE_AUTO);
} else {
focusMode = findSettableValue("focus mode",
supportedFocusModes,
Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE,
Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO,
Camera.Parameters.FOCUS_MODE_AUTO);
}
}
// Maybe selected auto-focus but not available, so fall through here:
if (!safeMode && focusMode == null) {
focusMode = findSettableValue("focus mode",
supportedFocusModes,
Camera.Parameters.FOCUS_MODE_MACRO,
Camera.Parameters.FOCUS_MODE_EDOF);
}
if (focusMode != null) {
if (focusMode.equals(parameters.getFocusMode())) {
Log.i(TAG, "Focus mode already set to " + focusMode);
} else {
parameters.setFocusMode(focusMode);
}
}
}
public static void setTorch(Camera.Parameters parameters, boolean on) {
List<String> supportedFlashModes = parameters.getSupportedFlashModes();
String flashMode;
if (on) {
flashMode = findSettableValue("flash mode",
supportedFlashModes,
Camera.Parameters.FLASH_MODE_TORCH,
Camera.Parameters.FLASH_MODE_ON);
} else {
flashMode = findSettableValue("flash mode",
supportedFlashModes,
Camera.Parameters.FLASH_MODE_OFF);
}
if (flashMode != null) {
if (flashMode.equals(parameters.getFlashMode())) {
Log.i(TAG, "Flash mode already set to " + flashMode);
} else {
Log.i(TAG, "Setting flash mode to " + flashMode);
parameters.setFlashMode(flashMode);
}
}
}
public static void setBestExposure(Camera.Parameters parameters, boolean lightOn) {
int minExposure = parameters.getMinExposureCompensation();
int maxExposure = parameters.getMaxExposureCompensation();
float step = parameters.getExposureCompensationStep();
if ((minExposure != 0 || maxExposure != 0) && step > 0.0f) {
// Set low when light is on
float targetCompensation = lightOn ? MIN_EXPOSURE_COMPENSATION : MAX_EXPOSURE_COMPENSATION;
int compensationSteps = Math.round(targetCompensation / step);
float actualCompensation = step * compensationSteps;
// Clamp value:
compensationSteps = Math.max(Math.min(compensationSteps, maxExposure), minExposure);
if (parameters.getExposureCompensation() == compensationSteps) {
Log.i(TAG, "Exposure compensation already set to " + compensationSteps + " / " + actualCompensation);
} else {
Log.i(TAG, "Setting exposure compensation to " + compensationSteps + " / " + actualCompensation);
parameters.setExposureCompensation(compensationSteps);
}
} else {
Log.i(TAG, "Camera does not support exposure compensation");
}
}
public static void setBestPreviewFPS(Camera.Parameters parameters) {
setBestPreviewFPS(parameters, MIN_FPS, MAX_FPS);
}
public static void setBestPreviewFPS(Camera.Parameters parameters, int minFPS, int maxFPS) {
List<int[]> supportedPreviewFpsRanges = parameters.getSupportedPreviewFpsRange();
Log.i(TAG, "Supported FPS ranges: " + toString(supportedPreviewFpsRanges));
if (supportedPreviewFpsRanges != null && !supportedPreviewFpsRanges.isEmpty()) {
int[] suitableFPSRange = null;
for (int[] fpsRange : supportedPreviewFpsRanges) {
int thisMin = fpsRange[Camera.Parameters.PREVIEW_FPS_MIN_INDEX];
int thisMax = fpsRange[Camera.Parameters.PREVIEW_FPS_MAX_INDEX];
if (thisMin >= minFPS * 1000 && thisMax <= maxFPS * 1000) {
suitableFPSRange = fpsRange;
break;
}
}
if (suitableFPSRange == null) {
Log.i(TAG, "No suitable FPS range?");
} else {
int[] currentFpsRange = new int[2];
parameters.getPreviewFpsRange(currentFpsRange);
if (Arrays.equals(currentFpsRange, suitableFPSRange)) {
Log.i(TAG, "FPS range already set to " + Arrays.toString(suitableFPSRange));
} else {
Log.i(TAG, "Setting FPS range to " + Arrays.toString(suitableFPSRange));
parameters.setPreviewFpsRange(suitableFPSRange[Camera.Parameters.PREVIEW_FPS_MIN_INDEX],
suitableFPSRange[Camera.Parameters.PREVIEW_FPS_MAX_INDEX]);
}
}
}
}
public static void setFocusArea(Camera.Parameters parameters) {
if (parameters.getMaxNumFocusAreas() > 0) {
Log.i(TAG, "Old focus areas: " + toString(parameters.getFocusAreas()));
List<Camera.Area> middleArea = buildMiddleArea(AREA_PER_1000);
Log.i(TAG, "Setting focus area to : " + toString(middleArea));
parameters.setFocusAreas(middleArea);
} else {
Log.i(TAG, "Device does not support focus areas");
}
}
public static void setMetering(Camera.Parameters parameters) {
if (parameters.getMaxNumMeteringAreas() > 0) {
Log.i(TAG, "Old metering areas: " + parameters.getMeteringAreas());
List<Camera.Area> middleArea = buildMiddleArea(AREA_PER_1000);
Log.i(TAG, "Setting metering area to : " + toString(middleArea));
parameters.setMeteringAreas(middleArea);
} else {
Log.i(TAG, "Device does not support metering areas");
}
}
private static List<Camera.Area> buildMiddleArea(int areaPer1000) {
return Collections.singletonList(
new Camera.Area(new Rect(-areaPer1000, -areaPer1000, areaPer1000, areaPer1000), 1));
}
public static void setVideoStabilization(Camera.Parameters parameters) {
if (parameters.isVideoStabilizationSupported()) {
if (parameters.getVideoStabilization()) {
Log.i(TAG, "Video stabilization already enabled");
} else {
Log.i(TAG, "Enabling video stabilization...");
parameters.setVideoStabilization(true);
}
} else {
Log.i(TAG, "This device does not support video stabilization");
}
}
public static void setBarcodeSceneMode(Camera.Parameters parameters) {
if (Camera.Parameters.SCENE_MODE_BARCODE.equals(parameters.getSceneMode())) {
Log.i(TAG, "Barcode scene mode already set");
return;
}
String sceneMode = findSettableValue("scene mode",
parameters.getSupportedSceneModes(),
Camera.Parameters.SCENE_MODE_BARCODE);
if (sceneMode != null) {
parameters.setSceneMode(sceneMode);
}
}
public static void setZoom(Camera.Parameters parameters, double targetZoomRatio) {
if (parameters.isZoomSupported()) {
Integer zoom = indexOfClosestZoom(parameters, targetZoomRatio);
if (zoom == null) {
return;
}
if (parameters.getZoom() == zoom) {
Log.i(TAG, "Zoom is already set to " + zoom);
} else {
Log.i(TAG, "Setting zoom to " + zoom);
parameters.setZoom(zoom);
}
} else {
Log.i(TAG, "Zoom is not supported");
}
}
private static Integer indexOfClosestZoom(Camera.Parameters parameters, double targetZoomRatio) {
List<Integer> ratios = parameters.getZoomRatios();
Log.i(TAG, "Zoom ratios: " + ratios);
int maxZoom = parameters.getMaxZoom();
if (ratios == null || ratios.isEmpty() || ratios.size() != maxZoom + 1) {
Log.w(TAG, "Invalid zoom ratios!");
return null;
}
double target100 = 100.0 * targetZoomRatio;
double smallestDiff = Double.POSITIVE_INFINITY;
int closestIndex = 0;
for (int i = 0; i < ratios.size(); i++) {
double diff = Math.abs(ratios.get(i) - target100);
if (diff < smallestDiff) {
smallestDiff = diff;
closestIndex = i;
}
}
Log.i(TAG, "Chose zoom ratio of " + (ratios.get(closestIndex) / 100.0));
return closestIndex;
}
public static void setInvertColor(Camera.Parameters parameters) {
if (Camera.Parameters.EFFECT_NEGATIVE.equals(parameters.getColorEffect())) {
Log.i(TAG, "Negative effect already set");
return;
}
String colorMode = findSettableValue("color effect",
parameters.getSupportedColorEffects(),
Camera.Parameters.EFFECT_NEGATIVE);
if (colorMode != null) {
parameters.setColorEffect(colorMode);
}
}
public static Point findBestPreviewSizeValue(Camera.Parameters parameters, Point screenResolution) {
List<Camera.Size> rawSupportedSizes = parameters.getSupportedPreviewSizes();
if (rawSupportedSizes == null) {
Log.w(TAG, "Device returned no supported preview sizes; using default");
Camera.Size defaultSize = parameters.getPreviewSize();
if (defaultSize == null) {
throw new IllegalStateException("Parameters contained no preview size!");
}
return new Point(defaultSize.width, defaultSize.height);
}
if (Log.isLoggable(TAG, Log.INFO)) {
StringBuilder previewSizesString = new StringBuilder();
for (Camera.Size size : rawSupportedSizes) {
previewSizesString.append(size.width).append('x').append(size.height).append(' ');
}
Log.i(TAG, "Supported preview sizes: " + previewSizesString);
}
double screenAspectRatio = screenResolution.x / (double) screenResolution.y;
// Find a suitable size, with max resolution
int maxResolution = 0;
Camera.Size maxResPreviewSize = null;
for (Camera.Size size : rawSupportedSizes) {
int realWidth = size.width;
int realHeight = size.height;
int resolution = realWidth * realHeight;
if (resolution < MIN_PREVIEW_PIXELS) {
continue;
}
boolean isCandidatePortrait = realWidth < realHeight;
int maybeFlippedWidth = isCandidatePortrait ? realHeight : realWidth;
int maybeFlippedHeight = isCandidatePortrait ? realWidth : realHeight;
double aspectRatio = maybeFlippedWidth / (double) maybeFlippedHeight;
double distortion = Math.abs(aspectRatio - screenAspectRatio);
if (distortion > MAX_ASPECT_DISTORTION) {
continue;
}
if (maybeFlippedWidth == screenResolution.x && maybeFlippedHeight == screenResolution.y) {
Point exactPoint = new Point(realWidth, realHeight);
Log.i(TAG, "Found preview size exactly matching screen size: " + exactPoint);
return exactPoint;
}
// Resolution is suitable; record the one with max resolution
if (resolution > maxResolution) {
maxResolution = resolution;
maxResPreviewSize = size;
}
}
// If no exact match, use largest preview size. This was not a great idea on older devices because
// of the additional computation needed. We're likely to get here on newer Android 4+ devices, where
// the CPU is much more powerful.
if (maxResPreviewSize != null) {
Point largestSize = new Point(maxResPreviewSize.width, maxResPreviewSize.height);
Log.i(TAG, "Using largest suitable preview size: " + largestSize);
return largestSize;
}
// If there is nothing at all suitable, return current preview size
Camera.Size defaultPreview = parameters.getPreviewSize();
if (defaultPreview == null) {
throw new IllegalStateException("Parameters contained no preview size!");
}
Point defaultSize = new Point(defaultPreview.width, defaultPreview.height);
Log.i(TAG, "No suitable preview sizes, using default: " + defaultSize);
return defaultSize;
}
private static String findSettableValue(String name,
Collection<String> supportedValues,
String... desiredValues) {
Log.i(TAG, "Requesting " + name + " value from among: " + Arrays.toString(desiredValues));
Log.i(TAG, "Supported " + name + " values: " + supportedValues);
if (supportedValues != null) {
for (String desiredValue : desiredValues) {
if (supportedValues.contains(desiredValue)) {
Log.i(TAG, "Can set " + name + " to: " + desiredValue);
return desiredValue;
}
}
}
Log.i(TAG, "No supported values match");
return null;
}
private static String toString(Collection<int[]> arrays) {
if (arrays == null || arrays.isEmpty()) {
return "[]";
}
StringBuilder buffer = new StringBuilder();
buffer.append('[');
Iterator<int[]> it = arrays.iterator();
while (it.hasNext()) {
buffer.append(Arrays.toString(it.next()));
if (it.hasNext()) {
buffer.append(", ");
}
}
buffer.append(']');
return buffer.toString();
}
private static String toString(Iterable<Camera.Area> areas) {
if (areas == null) {
return null;
}
StringBuilder result = new StringBuilder();
for (Camera.Area area : areas) {
result.append(area.rect).append(':').append(area.weight).append(' ');
}
return result.toString();
}
public static String collectStats(Camera.Parameters parameters) {
return collectStats(parameters.flatten());
}
public static String collectStats(CharSequence flattenedParams) {
StringBuilder result = new StringBuilder(1000);
result.append("BOARD=").append(Build.BOARD).append('\n');
result.append("BRAND=").append(Build.BRAND).append('\n');
result.append("CPU_ABI=").append(Build.CPU_ABI).append('\n');
result.append("DEVICE=").append(Build.DEVICE).append('\n');
result.append("DISPLAY=").append(Build.DISPLAY).append('\n');
result.append("FINGERPRINT=").append(Build.FINGERPRINT).append('\n');
result.append("HOST=").append(Build.HOST).append('\n');
result.append("ID=").append(Build.ID).append('\n');
result.append("MANUFACTURER=").append(Build.MANUFACTURER).append('\n');
result.append("MODEL=").append(Build.MODEL).append('\n');
result.append("PRODUCT=").append(Build.PRODUCT).append('\n');
result.append("TAGS=").append(Build.TAGS).append('\n');
result.append("TIME=").append(Build.TIME).append('\n');
result.append("TYPE=").append(Build.TYPE).append('\n');
result.append("USER=").append(Build.USER).append('\n');
result.append("VERSION.CODENAME=").append(Build.VERSION.CODENAME).append('\n');
result.append("VERSION.INCREMENTAL=").append(Build.VERSION.INCREMENTAL).append('\n');
result.append("VERSION.RELEASE=").append(Build.VERSION.RELEASE).append('\n');
result.append("VERSION.SDK_INT=").append(Build.VERSION.SDK_INT).append('\n');
if (flattenedParams != null) {
String[] params = SEMICOLON.split(flattenedParams);
Arrays.sort(params);
for (String param : params) {
result.append(param).append('\n');
}
}
return result.toString();
}
}
/*
* Copyright (C) 2008 ZXing authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.sales.evi.rinus.evisales.zxing.camera;
import android.Manifest;
import android.content.Context;
import android.graphics.Point;
import android.graphics.Rect;
import android.hardware.Camera;
import android.os.Handler;
import android.support.annotation.RequiresPermission;
import android.util.Log;
import android.view.SurfaceHolder;
import com.google.zxing.PlanarYUVLuminanceSource;
import com.sales.evi.rinus.evisales.zxing.camera.open.OpenCamera;
import com.sales.evi.rinus.evisales.zxing.camera.open.OpenCameraInterface;
import java.io.IOException;
/**
* This object wraps the Camera service object and expects to be the only one talking to it. The
* implementation encapsulates the steps needed to take preview-sized images, which are used for
* both preview and decoding.
*
* @author [email protected] (Daniel Switkin)
*/
@SuppressWarnings("deprecation") // camera APIs
public final class CameraManager {
private static final String TAG = CameraManager.class.getSimpleName();
private static final int MIN_FRAME_WIDTH = 240;
private static final int MIN_FRAME_HEIGHT = 240;
private static final int MAX_FRAME_WIDTH = 1200; // = 5/8 * 1920
private static final int MAX_FRAME_HEIGHT = 675; // = 5/8 * 1080
private final Context context;
private final CameraConfigurationManager configManager;
/**
* Preview frames are delivered here, which we pass on to the registered handler. Make sure to
* clear the handler so it will only receive one message.
*/
private final PreviewCallback previewCallback;
private OpenCamera camera;
private AutoFocusManager autoFocusManager;
private Rect framingRect;
private Rect framingRectInPreview;
private boolean initialized;
private boolean previewing;
private int requestedCameraId = OpenCameraInterface.NO_REQUESTED_CAMERA;
private int requestedFramingRectWidth;
private int requestedFramingRectHeight;
public CameraManager(Context context) {
this.context = context;
this.configManager = new CameraConfigurationManager(context);
previewCallback = new PreviewCallback(configManager);
}
private static int findDesiredDimensionInRange(int resolution, int hardMin, int hardMax) {
int dim = 5 * resolution / 8; // Target 5/8 of each dimension
if (dim < hardMin) {
return hardMin;
}
if (dim > hardMax) {
return hardMax;
}
return dim;
}
public CameraConfigurationManager getConfigManager() {
return configManager;
}
/**
* Opens the camera driver and initializes the hardware parameters.
*
* @param holder The surface object which the camera will draw preview frames into.
* @throws IOException Indicates the camera driver failed to open.
*/
@RequiresPermission(Manifest.permission.CAMERA)
public synchronized void openDriver(SurfaceHolder holder) throws IOException {
OpenCamera theCamera = camera;
if (theCamera == null) {
theCamera = OpenCameraInterface.open(requestedCameraId);
if (theCamera == null) {
throw new IOException("Camera.open() failed to return object from driver");
}
camera = theCamera;
}
if (!initialized) {
initialized = true;
configManager.initFromCameraParameters(theCamera);
if (requestedFramingRectWidth > 0 && requestedFramingRectHeight > 0) {
// configManager.getScreenResolution().set(requestedFramingRectWidth, requestedFramingRectHeight);
setManualFramingRect(requestedFramingRectWidth, requestedFramingRectHeight);
requestedFramingRectWidth = 0;
requestedFramingRectHeight = 0;
}
}
Camera cameraObject = theCamera.getCamera();
Camera.Parameters parameters = cameraObject.getParameters();
String parametersFlattened = parameters == null ? null : parameters.flatten(); // Save these, temporarily
try {
configManager.setDesiredCameraParameters(theCamera, false);
} catch (RuntimeException re) {
// Driver failed
Log.w(TAG, "Camera rejected parameters. Setting only minimal safe-mode parameters");
Log.i(TAG, "Resetting to saved camera params: " + parametersFlattened);
// Reset:
if (parametersFlattened != null) {
parameters = cameraObject.getParameters();
parameters.unflatten(parametersFlattened);
try {
cameraObject.setParameters(parameters);
configManager.setDesiredCameraParameters(theCamera, true);
} catch (RuntimeException re2) {
// Well, darn. Give up
Log.w(TAG, "Camera rejected even safe-mode parameters! No configuration");
}
}
}
cameraObject.setPreviewDisplay(holder);
}
public synchronized boolean isOpen() {
return camera != null;
}
/**
* Closes the camera driver if still in use.
*/
public synchronized void closeDriver() {
if (camera != null) {
camera.getCamera().release();
camera = null;
// Make sure to clear these each time we close the camera, so that any scanning rect
// requested by intent is forgotten.
framingRect = null;
framingRectInPreview = null;
}
}
/**
* Asks the camera hardware to begin drawing preview frames to the screen.
*/
public synchronized void startPreview() {
OpenCamera theCamera = camera;
if (theCamera != null && !previewing) {
theCamera.getCamera().startPreview();
previewing = true;
autoFocusManager = new AutoFocusManager(context, theCamera.getCamera());
}
}
/**
* Tells the camera to stop drawing preview frames.
*/
public synchronized void stopPreview() {
if (autoFocusManager != null) {
autoFocusManager.stop();
autoFocusManager = null;
}
if (camera != null && previewing) {
camera.getCamera().stopPreview();
previewCallback.setHandler(null, 0);
previewing = false;
}
}
/**
* Convenience method for {@link com.google.zxing.client//.android.CaptureActivity}
*
* @param newSetting if {@code true}, light should be turned on if currently off. And vice versa.
*/
public synchronized void setTorch(boolean newSetting) {
OpenCamera theCamera = camera;
if (theCamera != null && newSetting != configManager.getTorchState(theCamera.getCamera())) {
boolean wasAutoFocusManager = autoFocusManager != null;
if (wasAutoFocusManager) {
autoFocusManager.stop();
autoFocusManager = null;
}
configManager.setTorch(theCamera.getCamera(), newSetting);
if (wasAutoFocusManager) {
autoFocusManager = new AutoFocusManager(context, theCamera.getCamera());
autoFocusManager.start();
}
}
}
/**
* A single preview frame will be returned to the handler supplied. The data will arrive as byte[]
* in the message.obj field, with width and height encoded as message.arg1 and message.arg2,
* respectively.
*
* @param handler The handler to send the message to.
* @param message The what field of the message to be sent.
*/
public synchronized void requestPreviewFrame(Handler handler, int message) {
OpenCamera theCamera = camera;
if (theCamera != null && previewing) {
previewCallback.setHandler(handler, message);
theCamera.getCamera().setOneShotPreviewCallback(previewCallback);
}
}
/**
* Calculates the framing rect which the UI should draw to show the user where to place the
* barcode. This target helps with alignment as well as forces the user to hold the device
* far enough away to ensure the image will be in focus.
*
* @return The rectangle to draw on screen in window coordinates.
*/
public synchronized Rect getFramingRect() {
if (framingRect == null) {
if (camera == null) {
return null;
}
Point screenResolution = configManager.getScreenResolution();
if (screenResolution == null) {
// Called early, before init even finished
return null;
}
int width = findDesiredDimensionInRange(screenResolution.x, MIN_FRAME_WIDTH, MAX_FRAME_WIDTH);
int height = findDesiredDimensionInRange(screenResolution.y, MIN_FRAME_HEIGHT, MAX_FRAME_HEIGHT);
int leftOffset = (screenResolution.x - width) / 2;
int topOffset = (screenResolution.y - height) / 2;
framingRect = new Rect(leftOffset, topOffset, leftOffset + width, topOffset + height);
Log.d(TAG, "Calculated framing rect: " + framingRect);
}
return framingRect;
}
/**
* Like {@link #getFramingRect} but coordinates are in terms of the preview frame,
* not UI / screen.
*
* @return {@link Rect} expressing barcode scan area in terms of the preview size
*/
public synchronized Rect getFramingRectInPreview() {
if (framingRectInPreview == null) {
Rect framingRect = getFramingRect();
if (framingRect == null) {
return null;
}
Rect rect = new Rect(framingRect);
Point cameraResolution = configManager.getCameraResolution();
Point screenResolution = configManager.getScreenResolution();
if (cameraResolution == null || screenResolution == null) {
// Called early, before init even finished
return null;
}
rect.left = rect.left * cameraResolution.x / screenResolution.x;
rect.right = rect.right * cameraResolution.x / screenResolution.x;
rect.top = rect.top * cameraResolution.y / screenResolution.y;
rect.bottom = rect.bottom * cameraResolution.y / screenResolution.y;
framingRectInPreview = rect;
}
return framingRectInPreview;
}
/**
* Allows third party apps to specify the camera ID, rather than determine
* it automatically based on available cameras and their orientation.
*
* @param cameraId camera ID of the camera to use. A negative value means "no preference".
*/
public synchronized void setManualCameraId(int cameraId) {
requestedCameraId = cameraId;
}
/**
* Allows third party apps to specify the scanning rectangle dimensions, rather than determine
* them automatically based on screen resolution.
*
* @param width The width in pixels to scan.
* @param height The height in pixels to scan.
*/
public synchronized void setManualFramingRect(int width, int height) {
if (initialized) {
Point screenResolution = configManager.getScreenResolution();
if (width > screenResolution.x) {
width = screenResolution.x;
}
if (height > screenResolution.y) {
height = screenResolution.y;
}
int leftOffset = (screenResolution.x - width) / 2;
int topOffset = (screenResolution.y - height) / 2;
framingRect = new Rect(leftOffset, topOffset, leftOffset + width, topOffset + height);
Log.d(TAG, "Calculated manual framing rect: " + framingRect);
framingRectInPreview = null;
} else {
requestedFramingRectWidth = width;
requestedFramingRectHeight = height;
}
}
/**
* A factory method to build the appropriate LuminanceSource object based on the format
* of the preview buffers, as described by Camera.Parameters.
*
* @param data A preview frame.
* @param width The width of the image.
* @param height The height of the image.
* @return A PlanarYUVLuminanceSource instance.
*/
public PlanarYUVLuminanceSource buildLuminanceSource(byte[] data, int width, int height) {
Rect rect = getFramingRectInPreview();
if (rect == null) {
return null;
}
// Go ahead and assume it's YUV rather than die.
return new PlanarYUVLuminanceSource(data, width, height, rect.left, rect.top,
rect.width(), rect.height(), false);
}
}
/*
* Copyright (C) 2012 ZXing authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.sales.evi.rinus.evisales.zxing.camera;
import android.content.SharedPreferences;
//import com.google.zxing.client.android.PreferencesActivity;
/**
* Enumerates settings of the preference controlling the front light.
*/
public enum FrontLightMode {
/**
* Always on.
*/
ON,
/**
* On only when ambient light is low.
*/
AUTO,
/**
* Always off.
*/
OFF;
private static FrontLightMode parse(String modeString) {
return modeString == null ? OFF : valueOf(modeString);
}
public static FrontLightMode readPref(SharedPreferences sharedPrefs) {
return parse(OFF.toString()); // parse(sharedPrefs.getString(PreferencesActivity.KEY_FRONT_LIGHT_MODE, OFF.toString()));
}
}
/*
* Copyright (C) 2015 ZXing authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.sales.evi.rinus.evisales.zxing.camera.open;
/**
* Enumeration of directions a camera may face: front or back.
*/
public enum CameraFacing {
BACK, // must be value 0!
FRONT, // must be value 1!
}
/*
* Copyright (C) 2015 ZXing authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.sales.evi.rinus.evisales.zxing.camera.open;
import android.hardware.Camera;
/**
* Represents an open {@link Camera} and its metadata, like facing direction and orientation.
*/
@SuppressWarnings("deprecation") // camera APIs
public final class OpenCamera {
private final int index;
private final Camera camera;
private final CameraFacing facing;
private final int orientation;
public OpenCamera(int index, Camera camera, CameraFacing facing, int orientation) {
this.index = index;
this.camera = camera;
this.facing = facing;
this.orientation = orientation;
}
public Camera getCamera() {
return camera;
}
public CameraFacing getFacing() {
return facing;
}
public int getOrientation() {
return orientation;
}
@Override
public String toString() {
return "Camera #" + index + " : " + facing + ',' + orientation;
}
}
/*
* Copyright (C) 2012 ZXing authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.sales.evi.rinus.evisales.zxing.camera.open;
import android.hardware.Camera;
import android.util.Log;
/**
* Abstraction over the {@link Camera} API that helps open them and return their metadata.
*/
@SuppressWarnings("deprecation") // camera APIs
public final class OpenCameraInterface {
/**
* For {@link #open(int)}, means no preference for which camera to open.
*/
public static final int NO_REQUESTED_CAMERA = -1;
private static final String TAG = OpenCameraInterface.class.getName();
private OpenCameraInterface() {
}
/**
* Opens the requested camera with {@link Camera#open(int)}, if one exists.
*
* @param cameraId camera ID of the camera to use. A negative value
* or {@link #NO_REQUESTED_CAMERA} means "no preference", in which case a rear-facing
* camera is returned if possible or else any camera
* @return handle to {@link OpenCamera} that was opened
*/
public static OpenCamera open(int cameraId) {
int numCameras = Camera.getNumberOfCameras();
if (numCameras == 0) {
Log.w(TAG, "No cameras!");
return null;
}
if (cameraId >= numCameras) {
Log.w(TAG, "Requested camera does not exist: " + cameraId);
return null;
}
if (cameraId <= NO_REQUESTED_CAMERA) {
cameraId = 0;
while (cameraId < numCameras) {
Camera.CameraInfo cameraInfo = new Camera.CameraInfo();
Camera.getCameraInfo(cameraId, cameraInfo);
if (CameraFacing.values()[cameraInfo.facing] == CameraFacing.BACK) {
break;
}
cameraId++;
}
if (cameraId == numCameras) {
Log.i(TAG, "No camera facing " + CameraFacing.BACK + "; returning camera #0");
cameraId = 0;
}
}
Log.i(TAG, "Opening camera #" + cameraId);
Camera.CameraInfo cameraInfo = new Camera.CameraInfo();
Camera.getCameraInfo(cameraId, cameraInfo);
Camera camera = Camera.open(cameraId);
if (camera == null) {
return null;
}
return new OpenCamera(cameraId,
camera,
CameraFacing.values()[cameraInfo.facing],
cameraInfo.orientation);
}
}
/*
* Copyright (C) 2010 ZXing authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.sales.evi.rinus.evisales.zxing.camera;
import android.graphics.Point;
import android.hardware.Camera;
import android.os.Handler;
import android.os.Message;
import android.util.Log;
@SuppressWarnings("deprecation") // camera APIs
final class PreviewCallback implements Camera.PreviewCallback {
private static final String TAG = PreviewCallback.class.getSimpleName();
private final CameraConfigurationManager configManager;
private Handler previewHandler;
private int previewMessage;
PreviewCallback(CameraConfigurationManager configManager) {
this.configManager = configManager;
}
void setHandler(Handler previewHandler, int previewMessage) {
this.previewHandler = previewHandler;
this.previewMessage = previewMessage;
}
@Override
public void onPreviewFrame(byte[] data, Camera camera) {
Point cameraResolution = configManager.getCameraResolution();
Handler thePreviewHandler = previewHandler;
if (cameraResolution != null && thePreviewHandler != null) {
Message message = thePreviewHandler.obtainMessage(previewMessage, cameraResolution.x,
cameraResolution.y, data);
message.sendToTarget();
previewHandler = null;
} else {
Log.d(TAG, "Got preview callback, but no handler or resolution available");
}
}
}
/*
* Copyright (C) 2008 ZXing authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ***.zxing;
import android.app.Activity;
import android.content.ActivityNotFoundException;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.content.pm.ResolveInfo;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.net.Uri;
import android.os.Bundle;
import android.os.Handler;
import android.os.Message;
import android.provider.Browser;
import android.util.Log;
import com.google.zxing.BarcodeFormat;
import com.google.zxing.DecodeHintType;
import com.google.zxing.Result;
import ***.R;
import ***.activity.BatteryScanActivity;
import ***.zxing.camera.CameraManager;
import java.util.Collection;
import java.util.Map;
//import com.google.zxing.client.android.camera.CameraManager;
/**
* This class handles all the messaging which comprises the state machine for capture.
*
* @author [email protected] (Daniel Switkin)
*/
public final class CaptureActivityHandler extends Handler {
private static final String TAG = CaptureActivityHandler.class.getSimpleName();
private final BatteryScanActivity activity;
private final DecodeThread decodeThread;
private final CameraManager cameraManager;
private State state;
public CaptureActivityHandler(BatteryScanActivity activity,
Collection<BarcodeFormat> decodeFormats,
Map<DecodeHintType, ?> baseHints,
String characterSet,
CameraManager cameraManager) {
this.activity = activity;
decodeThread = new DecodeThread(activity, decodeFormats, baseHints, characterSet,
new ViewfinderResultPointCallback(activity.getViewfinderView()));
decodeThread.start();
state = State.SUCCESS;
// Start ourselves capturing previews and decoding.
this.cameraManager = cameraManager;
cameraManager.startPreview();
restartPreviewAndDecode();
}
@Override
public void handleMessage(Message message) {
switch (message.what) {
case R.id.restart_preview:
restartPreviewAndDecode();
break;
case R.id.decode_succeeded:
state = State.SUCCESS;
Bundle bundle = message.getData();
Bitmap barcode = null;
float scaleFactor = 1.0f;
if (bundle != null) {
byte[] compressedBitmap = bundle.getByteArray(DecodeThread.BARCODE_BITMAP);
if (compressedBitmap != null) {
barcode = BitmapFactory.decodeByteArray(compressedBitmap, 0, compressedBitmap.length, null);
// Mutable copy:
barcode = barcode.copy(Bitmap.Config.ARGB_8888, true);
}
scaleFactor = bundle.getFloat(DecodeThread.BARCODE_SCALED_FACTOR);
}
activity.handleDecode((Result) message.obj, barcode, scaleFactor);
break;
case R.id.decode_failed:
// We're decoding as fast as possible, so when one decode fails, start another.
state = State.PREVIEW;
cameraManager.requestPreviewFrame(decodeThread.getHandler(), R.id.decode);
break;
case R.id.return_scan_result:
activity.setResult(Activity.RESULT_OK, (Intent) message.obj);
activity.finish();
break;
case R.id.launch_product_query:
String url = (String) message.obj;
Intent intent = new Intent(Intent.ACTION_VIEW);
intent.addFlags(Intents.FLAG_NEW_DOC);
intent.setData(Uri.parse(url));
ResolveInfo resolveInfo =
activity.getPackageManager().resolveActivity(intent, PackageManager.MATCH_DEFAULT_ONLY);
String browserPackageName = null;
if (resolveInfo != null && resolveInfo.activityInfo != null) {
browserPackageName = resolveInfo.activityInfo.packageName;
Log.d(TAG, "Using browser in package " + browserPackageName);
}
// Needed for default Android browser / Chrome only apparently
if (browserPackageName != null) {
switch (browserPackageName) {
case "com.android.browser":
case "com.android.chrome":
intent.setPackage(browserPackageName);
intent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
intent.putExtra(Browser.EXTRA_APPLICATION_ID, browserPackageName);
break;
}
}
try {
activity.startActivity(intent);
} catch (ActivityNotFoundException ignored) {
Log.w(TAG, "Can't find anything to handle VIEW of URI");
}
break;
}
}
public void quitSynchronously() {
state = State.DONE;
cameraManager.stopPreview();
Message quit = Message.obtain(decodeThread.getHandler(), R.id.quit);
quit.sendToTarget();
try {
// Wait at most half a second; should be enough time, and onPause() will timeout quickly
decodeThread.join(500L);
} catch (InterruptedException e) {
// continue
}
// Be absolutely sure we don't send any queued up messages
removeMessages(R.id.decode_succeeded);
removeMessages(R.id.decode_failed);
}
private void restartPreviewAndDecode() {
if (state == State.SUCCESS) {
state = State.PREVIEW;
cameraManager.requestPreviewFrame(decodeThread.getHandler(), R.id.decode);
activity.drawViewfinder();
}
}
private enum State {
PREVIEW,
SUCCESS,
DONE
}
}
/*
* Copyright (C) 2008 ZXing authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ***.zxing;
import android.provider.ContactsContract;
/**
* The set of constants to use when sending Barcode Scanner an Intent which requests a barcode
* to be encoded.
*
* @author [email protected] (Daniel Switkin)
*/
public final class Contents {
public static final String URL_KEY = "URL_KEY";
public static final String NOTE_KEY = "NOTE_KEY";
/**
* When using Type.CONTACT, these arrays provide the keys for adding or retrieving multiple
* phone numbers and addresses.
*/
public static final String[] PHONE_KEYS = {
ContactsContract.Intents.Insert.PHONE,
ContactsContract.Intents.Insert.SECONDARY_PHONE,
ContactsContract.Intents.Insert.TERTIARY_PHONE
};
public static final String[] PHONE_TYPE_KEYS = {
ContactsContract.Intents.Insert.PHONE_TYPE,
ContactsContract.Intents.Insert.SECONDARY_PHONE_TYPE,
ContactsContract.Intents.Insert.TERTIARY_PHONE_TYPE
};
public static final String[] EMAIL_KEYS = {
ContactsContract.Intents.Insert.EMAIL,
ContactsContract.Intents.Insert.SECONDARY_EMAIL,
ContactsContract.Intents.Insert.TERTIARY_EMAIL
};
public static final String[] EMAIL_TYPE_KEYS = {
ContactsContract.Intents.Insert.EMAIL_TYPE,
ContactsContract.Intents.Insert.SECONDARY_EMAIL_TYPE,
ContactsContract.Intents.Insert.TERTIARY_EMAIL_TYPE
};
private Contents() {
}
/**
* Contains type constants used when sending Intents.
*/
public static final class Type {
/**
* Plain text. Use Intent.putExtra(DATA, string). This can be used for URLs too, but string
* must include "http://" or "https://".
*/
public static final String TEXT = "TEXT_TYPE";
/**
* An email type. Use Intent.putExtra(DATA, string) where string is the email address.
*/
public static final String EMAIL = "EMAIL_TYPE";
/**
* Use Intent.putExtra(DATA, string) where string is the phone number to call.
*/
public static final String PHONE = "PHONE_TYPE";
/**
* An SMS type. Use Intent.putExtra(DATA, string) where string is the number to SMS.
*/
public static final String SMS = "SMS_TYPE";
/**
* A contact. Send a request to encode it as follows:
* {@code
* import android.provider.Contacts;
* <p>
* Intent intent = new Intent(Intents.Encode.ACTION);
* intent.putExtra(Intents.Encode.TYPE, CONTACT);
* Bundle bundle = new Bundle();
* bundle.putString(ContactsContract.Intents.Insert.NAME, "Jenny");
* bundle.putString(ContactsContract.Intents.Insert.PHONE, "8675309");
* bundle.putString(ContactsContract.Intents.Insert.EMAIL, "[email protected]");
* bundle.putString(ContactsContract.Intents.Insert.POSTAL, "123 Fake St. San Francisco, CA 94102");
* intent.putExtra(Intents.Encode.DATA, bundle);
* }
*/
public static final String CONTACT = "CONTACT_TYPE";
/**
* A geographic location. Use as follows:
* Bundle bundle = new Bundle();
* bundle.putFloat("LAT", latitude);
* bundle.putFloat("LONG", longitude);
* intent.putExtra(Intents.Encode.DATA, bundle);
*/
public static final String LOCATION = "LOCATION_TYPE";
private Type() {
}
}
}
/*
* Copyright (C) 2010 ZXing authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ***.zxing;
import android.content.Intent;
import android.net.Uri;
import com.google.zxing.BarcodeFormat;
import java.util.Arrays;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.regex.Pattern;
public final class DecodeFormatManager {
public static final Set<BarcodeFormat> QR_CODE_FORMATS = EnumSet.of(BarcodeFormat.QR_CODE);
static final Set<BarcodeFormat> PRODUCT_FORMATS;
static final Set<BarcodeFormat> INDUSTRIAL_FORMATS;
static final Set<BarcodeFormat> DATA_MATRIX_FORMATS = EnumSet.of(BarcodeFormat.DATA_MATRIX);
static final Set<BarcodeFormat> AZTEC_FORMATS = EnumSet.of(BarcodeFormat.AZTEC);
static final Set<BarcodeFormat> PDF417_FORMATS = EnumSet.of(BarcodeFormat.PDF_417);
private static final Pattern COMMA_PATTERN = Pattern.compile(",");
private static final Set<BarcodeFormat> ONE_D_FORMATS;
private static final Map<String, Set<BarcodeFormat>> FORMATS_FOR_MODE;
static {
PRODUCT_FORMATS = EnumSet.of(BarcodeFormat.UPC_A,
BarcodeFormat.UPC_E,
BarcodeFormat.EAN_13,
BarcodeFormat.EAN_8,
BarcodeFormat.RSS_14,
BarcodeFormat.RSS_EXPANDED);
INDUSTRIAL_FORMATS = EnumSet.of(BarcodeFormat.CODE_39,
BarcodeFormat.CODE_93,
BarcodeFormat.CODE_128,
BarcodeFormat.ITF,
BarcodeFormat.CODABAR);
ONE_D_FORMATS = EnumSet.copyOf(PRODUCT_FORMATS);
ONE_D_FORMATS.addAll(INDUSTRIAL_FORMATS);
}
static {
FORMATS_FOR_MODE = new HashMap<>();
FORMATS_FOR_MODE.put(Intents.Scan.ONE_D_MODE, ONE_D_FORMATS);
FORMATS_FOR_MODE.put(Intents.Scan.PRODUCT_MODE, PRODUCT_FORMATS);
FORMATS_FOR_MODE.put(Intents.Scan.QR_CODE_MODE, QR_CODE_FORMATS);
FORMATS_FOR_MODE.put(Intents.Scan.DATA_MATRIX_MODE, DATA_MATRIX_FORMATS);
FORMATS_FOR_MODE.put(Intents.Scan.AZTEC_MODE, AZTEC_FORMATS);
FORMATS_FOR_MODE.put(Intents.Scan.PDF417_MODE, PDF417_FORMATS);
}
private DecodeFormatManager() {
}
static Set<BarcodeFormat> parseDecodeFormats(Intent intent) {
Iterable<String> scanFormats = null;
CharSequence scanFormatsString = intent.getStringExtra(Intents.Scan.FORMATS);
if (scanFormatsString != null) {
scanFormats = Arrays.asList(COMMA_PATTERN.split(scanFormatsString));
}
return parseDecodeFormats(scanFormats, intent.getStringExtra(Intents.Scan.MODE));
}
static Set<BarcodeFormat> parseDecodeFormats(Uri inputUri) {
List<String> formats = inputUri.getQueryParameters(Intents.Scan.FORMATS);
if (formats != null && formats.size() == 1 && formats.get(0) != null) {
formats = Arrays.asList(COMMA_PATTERN.split(formats.get(0)));
}
return parseDecodeFormats(formats, inputUri.getQueryParameter(Intents.Scan.MODE));
}
private static Set<BarcodeFormat> parseDecodeFormats(Iterable<String> scanFormats, String decodeMode) {
if (scanFormats != null) {
Set<BarcodeFormat> formats = EnumSet.noneOf(BarcodeFormat.class);
try {
for (String format : scanFormats) {
formats.add(BarcodeFormat.valueOf(format));
}
return formats;
} catch (IllegalArgumentException iae) {
// ignore it then
}
}
if (decodeMode != null) {
return FORMATS_FOR_MODE.get(decodeMode);
}
return null;
}
}
/*
* Copyright (C) 2010 ZXing authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ***.zxing;
import android.graphics.Bitmap;
import android.os.Bundle;
import android.os.Handler;
import android.os.Looper;
import android.os.Message;
import android.util.Log;
import com.google.zxing.BinaryBitmap;
import com.google.zxing.DecodeHintType;
import com.google.zxing.MultiFormatReader;
import com.google.zxing.PlanarYUVLuminanceSource;
import com.google.zxing.ReaderException;
import com.google.zxing.Result;
import com.google.zxing.common.HybridBinarizer;
import ***.R;
import ***.activity.BatteryScanActivity;
import java.io.ByteArrayOutputStream;
import java.util.Map;
import java.util.concurrent.TimeUnit;
final class DecodeHandler extends Handler {
private static final String TAG = DecodeHandler.class.getSimpleName();
private final BatteryScanActivity activity;
private final MultiFormatReader multiFormatReader;
private boolean running = true;
DecodeHandler(BatteryScanActivity activity, Map<DecodeHintType, Object> hints) {
multiFormatReader = new MultiFormatReader();
multiFormatReader.setHints(hints);
this.activity = activity;
}
private static void bundleThumbnail(PlanarYUVLuminanceSource source, Bundle bundle) {
int[] pixels = source.renderThumbnail();
int width = source.getThumbnailWidth();
int height = source.getThumbnailHeight();
Bitmap bitmap = Bitmap.createBitmap(pixels, 0, width, width, height, Bitmap.Config.ARGB_8888);
ByteArrayOutputStream out = new ByteArrayOutputStream();
bitmap.compress(Bitmap.CompressFormat.JPEG, 50, out);
bundle.putByteArray(DecodeThread.BARCODE_BITMAP, out.toByteArray());
bundle.putFloat(DecodeThread.BARCODE_SCALED_FACTOR, (float) width / source.getWidth());
}
@Override
public void handleMessage(Message message) {
if (message == null || !running) {
return;
}
switch (message.what) {
case R.id.decode:
decode((byte[]) message.obj, message.arg1, message.arg2);
break;
case R.id.quit:
running = false;
Looper.myLooper().quit();
break;
}
}
/**
* Decode the data within the viewfinder rectangle, and time how long it took. For efficiency,
* reuse the same reader objects from one decode to the next.
*
* @param data The YUV preview frame.
* @param width The width of the preview frame.
* @param height The height of the preview frame.
*/
private void decode(byte[] data, int width, int height) {
long start = System.nanoTime();
Result rawResult = null;
PlanarYUVLuminanceSource source = activity.getCameraManager().buildLuminanceSource(data, width, height);
if (source != null) {
BinaryBitmap bitmap = new BinaryBitmap(new HybridBinarizer(source));
try {
rawResult = multiFormatReader.decodeWithState(bitmap);
} catch (ReaderException re) {
// continue
} finally {
multiFormatReader.reset();
}
}
Handler handler = activity.getHandler();
if (rawResult != null) {
// Don't log the barcode contents for security.
long end = System.nanoTime();
Log.d(TAG, "Found barcode in " + TimeUnit.NANOSECONDS.toMillis(end - start) + " ms");
if (handler != null) {
Message message = Message.obtain(handler, R.id.decode_succeeded, rawResult);
Bundle bundle = new Bundle();
bundleThumbnail(source, bundle);
message.setData(bundle);
message.sendToTarget();
}
} else {
if (handler != null) {
Message message = Message.obtain(handler, R.id.decode_failed);
message.sendToTarget();
}
}
}
}
/*
* Copyright (C) 2008 ZXing authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ***.zxing;
import android.content.SharedPreferences;
import android.os.Handler;
import android.os.Looper;
import android.preference.PreferenceManager;
import com.google.zxing.BarcodeFormat;
import com.google.zxing.DecodeHintType;
import com.google.zxing.ResultPointCallback;
import ***.activity.BatteryScanActivity;
import java.util.Collection;
import java.util.EnumMap;
import java.util.EnumSet;
import java.util.Map;
import java.util.concurrent.CountDownLatch;
/**
* This thread does all the heavy lifting of decoding the images.
*
* @author [email protected] (Daniel Switkin)
*/
final class DecodeThread extends Thread {
public static final String BARCODE_BITMAP = "barcode_bitmap";
public static final String BARCODE_SCALED_FACTOR = "barcode_scaled_factor";
private final BatteryScanActivity activity;
private final Map<DecodeHintType, Object> hints;
private final CountDownLatch handlerInitLatch;
private Handler handler;
DecodeThread(BatteryScanActivity activity,
Collection<BarcodeFormat> decodeFormats,
Map<DecodeHintType, ?> baseHints,
String characterSet,
ResultPointCallback resultPointCallback) {
this.activity = activity;
handlerInitLatch = new CountDownLatch(1);
hints = new EnumMap<>(DecodeHintType.class);
if (baseHints != null) {
hints.putAll(baseHints);
}
// The prefs can't change while the thread is running, so pick them up once here.
if (decodeFormats == null || decodeFormats.isEmpty()) {
SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(activity);
decodeFormats = EnumSet.noneOf(BarcodeFormat.class);
// if (prefs.getBoolean(PreferencesActivity.KEY_DECODE_1D_PRODUCT, true)) {
decodeFormats.addAll(DecodeFormatManager.PRODUCT_FORMATS);
// }
// if (prefs.getBoolean(PreferencesActivity.KEY_DECODE_1D_INDUSTRIAL, true)) {
decodeFormats.addAll(DecodeFormatManager.INDUSTRIAL_FORMATS);
// }
// if (prefs.getBoolean(PreferencesActivity.KEY_DECODE_QR, true)) {
decodeFormats.addAll(DecodeFormatManager.QR_CODE_FORMATS);
// }
// if (prefs.getBoolean(PreferencesActivity.KEY_DECODE_DATA_MATRIX, true)) {
decodeFormats.addAll(DecodeFormatManager.DATA_MATRIX_FORMATS);
// }
// if (prefs.getBoolean(PreferencesActivity.KEY_DECODE_AZTEC, false)) {
decodeFormats.addAll(DecodeFormatManager.AZTEC_FORMATS);
// }
// if (prefs.getBoolean(PreferencesActivity.KEY_DECODE_PDF417, false)) {
decodeFormats.addAll(DecodeFormatManager.PDF417_FORMATS);
// }
}
hints.put(DecodeHintType.POSSIBLE_FORMATS, decodeFormats);
if (characterSet != null) {
hints.put(DecodeHintType.CHARACTER_SET, characterSet);
}
hints.put(DecodeHintType.NEED_RESULT_POINT_CALLBACK, resultPointCallback);
}
Handler getHandler() {
try {
handlerInitLatch.await();
} catch (InterruptedException ie) {
// continue?
}
return handler;
}
@Override
public void run() {
Looper.prepare();
handler = new DecodeHandler(activity, hints);
handlerInitLatch.countDown();
Looper.loop();
}
}
/*
* Copyright (C) 2010 ZXing authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ***.zxing;
import android.app.Activity;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.os.AsyncTask;
import android.os.BatteryManager;
import android.util.Log;
import java.util.concurrent.RejectedExecutionException;
/**
* Finishes an activity after a period of inactivity if the device is on battery power.
*/
final public class InactivityTimer {
private static final String TAG = InactivityTimer.class.getSimpleName();
private static final long INACTIVITY_DELAY_MS = 5 * 60 * 1000L;
private final Activity activity;
private final BroadcastReceiver powerStatusReceiver;
private boolean registered;
private AsyncTask<Object, Object, Object> inactivityTask;
public InactivityTimer(Activity activity) {
this.activity = activity;
powerStatusReceiver = new PowerStatusReceiver();
registered = false;
onActivity();
}
public synchronized void onActivity() {
cancel();
inactivityTask = new InactivityAsyncTask();
try {
inactivityTask.executeOnExecutor(AsyncTask.THREAD_POOL_EXECUTOR);
} catch (RejectedExecutionException ree) {
Log.w(TAG, "Couldn't schedule inactivity task; ignoring");
}
}
public synchronized void onPause() {
cancel();
if (registered) {
activity.unregisterReceiver(powerStatusReceiver);
registered = false;
} else {
Log.w(TAG, "PowerStatusReceiver was never registered?");
}
}
public synchronized void onResume() {
if (registered) {
Log.w(TAG, "PowerStatusReceiver was already registered?");
} else {
activity.registerReceiver(powerStatusReceiver, new IntentFilter(Intent.ACTION_BATTERY_CHANGED));
registered = true;
}
onActivity();
}
private synchronized void cancel() {
AsyncTask<?, ?, ?> task = inactivityTask;
if (task != null) {
task.cancel(true);
inactivityTask = null;
}
}
public void shutdown() {
cancel();
}
private final class PowerStatusReceiver extends BroadcastReceiver {
@Override
public void onReceive(Context context, Intent intent) {
if (Intent.ACTION_BATTERY_CHANGED.equals(intent.getAction())) {
// 0 indicates that we're on battery
boolean onBatteryNow = intent.getIntExtra(BatteryManager.EXTRA_PLUGGED, -1) <= 0;
if (onBatteryNow) {
InactivityTimer.this.onActivity();
} else {
InactivityTimer.this.cancel();
}
}
}
}
private final class InactivityAsyncTask extends AsyncTask<Object, Object, Object> {
@Override
protected Object doInBackground(Object... objects) {
try {
Thread.sleep(INACTIVITY_DELAY_MS);
Log.i(TAG, "Finishing activity due to inactivity");
activity.finish();
} catch (InterruptedException e) {
// continue without killing
}
return null;
}
}
}
/*
* Copyright (C) 2008 ZXing authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ***.zxing;
import android.content.Intent;
/**
* This class provides the constants to use when sending an Intent to Barcode Scanner.
* These strings are effectively API and cannot be changed.
*
* @author [email protected] (Daniel Switkin)
*/
public final class Intents {
// Not the best place for this, but, better than a new class
// Should be FLAG_ACTIVITY_NEW_DOCUMENT in API 21+.
// Defined once here because the current value is deprecated, so generates just one warning
public static final int FLAG_NEW_DOC = Intent.FLAG_ACTIVITY_CLEAR_WHEN_TASK_RESET;
private Intents() {
}
/**
* Constants related to the {@link Scan#ACTION} Intent.
*/
public static final class Scan {
/**
* Send this intent to open the Barcodes app in scanning mode, find a barcode, and return
* the results.
*/
public static final String ACTION = "com.google.zxing.client.android.SCAN";
/**
* By default, sending this will decode all barcodes that we understand. However it
* may be useful to limit scanning to certain formats. Use
* {@link Intent#putExtra(String, String)} with one of the values below.
* <p>
* Setting this is effectively shorthand for setting explicit formats with {@link #FORMATS}.
* It is overridden by that setting.
*/
public static final String MODE = "SCAN_MODE";
/**
* Decode only UPC and EAN barcodes. This is the right choice for shopping apps which get
* prices, reviews, etc. for products.
*/
public static final String PRODUCT_MODE = "PRODUCT_MODE";
/**
* Decode only 1D barcodes.
*/
public static final String ONE_D_MODE = "ONE_D_MODE";
/**
* Decode only QR codes.
*/
public static final String QR_CODE_MODE = "QR_CODE_MODE";
/**
* Decode only Data Matrix codes.
*/
public static final String DATA_MATRIX_MODE = "DATA_MATRIX_MODE";
/**
* Decode only Aztec.
*/
public static final String AZTEC_MODE = "AZTEC_MODE";
/**
* Decode only PDF417.
*/
public static final String PDF417_MODE = "PDF417_MODE";
/**
* Comma-separated list of formats to scan for. The values must match the names of
* {@link com.google.zxing.BarcodeFormat}s, e.g. {@link com.google.zxing.BarcodeFormat#EAN_13}.
* Example: "EAN_13,EAN_8,QR_CODE". This overrides {@link #MODE}.
*/
public static final String FORMATS = "SCAN_FORMATS";
/**
* Optional parameter to specify the id of the camera from which to recognize barcodes.
* Overrides the default camera that would otherwise would have been selected.
* If provided, should be an int.
*/
public static final String CAMERA_ID = "SCAN_CAMERA_ID";
/**
* @see com.google.zxing.DecodeHintType#CHARACTER_SET
*/
public static final String CHARACTER_SET = "CHARACTER_SET";
/**
* Optional parameters to specify the width and height of the scanning rectangle in pixels.
* The app will try to honor these, but will clamp them to the size of the preview frame.
* You should specify both or neither, and pass the size as an int.
*/
public static final String WIDTH = "SCAN_WIDTH";
public static final String HEIGHT = "SCAN_HEIGHT";
/**
* Desired duration in milliseconds for which to pause after a successful scan before
* returning to the calling intent. Specified as a long, not an integer!
* For example: 1000L, not 1000.
*/
public static final String RESULT_DISPLAY_DURATION_MS = "RESULT_DISPLAY_DURATION_MS";
/**
* Prompt to show on-screen when scanning by intent. Specified as a {@link String}.
*/
public static final String PROMPT_MESSAGE = "PROMPT_MESSAGE";
/**
* If a barcode is found, Barcodes returns {@link android.app.Activity#RESULT_OK} to
* {@link android.app.Activity#onActivityResult(int, int, Intent)}
* of the app which requested the scan via
* {@link android.app.Activity#startActivityForResult(Intent, int)}
* The barcodes contents can be retrieved with
* {@link Intent#getStringExtra(String)}.
* If the user presses Back, the result code will be {@link android.app.Activity#RESULT_CANCELED}.
*/
public static final String RESULT = "SCAN_RESULT";
/**
* Call {@link Intent#getStringExtra(String)} with {@code RESULT_FORMAT}
* to determine which barcode format was found.
* See {@link com.google.zxing.BarcodeFormat} for possible values.
*/
public static final String RESULT_FORMAT = "SCAN_RESULT_FORMAT";
/**
* Call {@link Intent#getStringExtra(String)} with {@code RESULT_UPC_EAN_EXTENSION}
* to return the content of any UPC extension barcode that was also found. Only applicable
* to {@link com.google.zxing.BarcodeFormat#UPC_A} and {@link com.google.zxing.BarcodeFormat#EAN_13}
* formats.
*/
public static final String RESULT_UPC_EAN_EXTENSION = "SCAN_RESULT_UPC_EAN_EXTENSION";
/**
* Call {@link Intent#getByteArrayExtra(String)} with {@code RESULT_BYTES}
* to get a {@code byte[]} of raw bytes in the barcode, if available.
*/
public static final String RESULT_BYTES = "SCAN_RESULT_BYTES";
/**
* Key for the value of {@link com.google.zxing.ResultMetadataType#ORIENTATION}, if available.
* Call {@link Intent#getIntArrayExtra(String)} with {@code RESULT_ORIENTATION}.
*/
public static final String RESULT_ORIENTATION = "SCAN_RESULT_ORIENTATION";
/**
* Key for the value of {@link com.google.zxing.ResultMetadataType#ERROR_CORRECTION_LEVEL}, if available.
* Call {@link Intent#getStringExtra(String)} with {@code RESULT_ERROR_CORRECTION_LEVEL}.
*/
public static final String RESULT_ERROR_CORRECTION_LEVEL = "SCAN_RESULT_ERROR_CORRECTION_LEVEL";
/**
* Prefix for keys that map to the values of {@link com.google.zxing.ResultMetadataType#BYTE_SEGMENTS},
* if available. The actual values will be set under a series of keys formed by adding 0, 1, 2, ...
* to this prefix. So the first byte segment is under key "SCAN_RESULT_BYTE_SEGMENTS_0" for example.
* Call {@link Intent#getByteArrayExtra(String)} with these keys.
*/
public static final String RESULT_BYTE_SEGMENTS_PREFIX = "SCAN_RESULT_BYTE_SEGMENTS_";
/**
* Setting this to false will not save scanned codes in the history. Specified as a {@code boolean}.
*/
public static final String SAVE_HISTORY = "SAVE_HISTORY";
private Scan() {
}
}
/**
* Constants related to the scan history and retrieving history items.
*/
public static final class History {
public static final String ITEM_NUMBER = "ITEM_NUMBER";
private History() {
}
}
/**
* Constants related to the {@link Encode#ACTION} Intent.
*/
public static final class Encode {
/**
* Send this intent to encode a piece of data as a QR code and display it full screen, so
* that another person can scan the barcode from your screen.
*/
public static final String ACTION = "com.google.zxing.client.android.ENCODE";
/**
* The data to encode. Use {@link Intent#putExtra(String, String)} or
* {@link Intent#putExtra(String, android.os.Bundle)},
* depending on the type and format specified. Non-QR Code formats should
* just use a String here. For QR Code, see Contents for details.
*/
public static final String DATA = "ENCODE_DATA";
/**
* The type of data being supplied if the format is QR Code. Use
* {@link Intent#putExtra(String, String)} with one of {@link Contents.Type}.
*/
public static final String TYPE = "ENCODE_TYPE";
/**
* The barcode format to be displayed. If this isn't specified or is blank,
* it defaults to QR Code. Use {@link Intent#putExtra(String, String)}, where
* format is one of {@link com.google.zxing.BarcodeFormat}.
*/
public static final String FORMAT = "ENCODE_FORMAT";
/**
* Normally the contents of the barcode are displayed to the user in a TextView. Setting this
* boolean to false will hide that TextView, showing only the encode barcode.
*/
public static final String SHOW_CONTENTS = "ENCODE_SHOW_CONTENTS";
private Encode() {
}
}
/**
* Constants related to the {@link SearchBookContents#ACTION} Intent.
*/
public static final class SearchBookContents {
/**
* Use Google Book Search to search the contents of the book provided.
*/
public static final String ACTION = "com.google.zxing.client.android.SEARCH_BOOK_CONTENTS";
/**
* The book to search, identified by ISBN number.
*/
public static final String ISBN = "ISBN";
/**
* An optional field which is the text to search for.
*/
public static final String QUERY = "QUERY";
private SearchBookContents() {
}
}
/**
* Constants related to the {@link WifiConnect#ACTION} Intent.
*/
public static final class WifiConnect {
/**
* Internal intent used to trigger connection to a wi-fi network.
*/
public static final String ACTION = "com.google.zxing.client.android.WIFI_CONNECT";
/**
* The network to connect to, all the configuration provided here.
*/
public static final String SSID = "SSID";
/**
* The network to connect to, all the configuration provided here.
*/
public static final String TYPE = "TYPE";
/**
* The network to connect to, all the configuration provided here.
*/
public static final String PASSWORD = "PASSWORD";
private WifiConnect() {
}
}
/**
* Constants related to the {@link Share#ACTION} Intent.
*/
public static final class Share {
/**
* Give the user a choice of items to encode as a barcode, then render it as a QR Code and
* display onscreen for a friend to scan with their phone.
*/
public static final String ACTION = "com.google.zxing.client.android.SHARE";
private Share() {
}
}
}
/*
* Copyright (C) 2011 ZXing authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ***.zxing;
public enum IntentSource {
NATIVE_APP_INTENT,
PRODUCT_SEARCH_LINK,
ZXING_LINK,
NONE
}
/*
* Copyright (C) 2009 ZXing authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ***.zxing;
import com.google.zxing.ResultPoint;
import com.google.zxing.ResultPointCallback;
final class ViewfinderResultPointCallback implements ResultPointCallback {
private final ViewfinderView viewfinderView;
ViewfinderResultPointCallback(ViewfinderView viewfinderView) {
this.viewfinderView = viewfinderView;
}
@Override
public void foundPossibleResultPoint(ResultPoint point) {
viewfinderView.addPossibleResultPoint(point);
}
}
/*
* Copyright (C) 2008 ZXing authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ***.zxing;
import android.annotation.SuppressLint;
import android.content.Context;
import android.content.res.Resources;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Paint;
import android.graphics.Rect;
import android.util.AttributeSet;
import android.view.View;
import com.google.zxing.ResultPoint;
import com.sales.evi.rinus.evisales.R;
import com.sales.evi.rinus.evisales.zxing.camera.CameraManager;
import java.util.ArrayList;
import java.util.List;
//import com.google.zxing.client.android.camera.CameraManager;
/**
* This view is overlaid on top of the camera preview. It adds the viewfinder rectangle and partial
* transparency outside it, as well as the laser scanner animation and result points.
*
* @author [email protected] (Daniel Switkin)
*/
public final class ViewfinderView extends View {
private static final int[] SCANNER_ALPHA = {0, 64, 128, 192, 255, 192, 128, 64};
private static final long ANIMATION_DELAY = 80L;
private static final int CURRENT_POINT_OPACITY = 0xA0;
private static final int MAX_RESULT_POINTS = 20;
private static final int POINT_SIZE = 6;
private final Paint paint;
private final int maskColor;
private final int resultColor;
private final int laserColor;
private final int resultPointColor;
private CameraManager cameraManager;
private Bitmap resultBitmap;
private int scannerAlpha;
private List<ResultPoint> possibleResultPoints;
private List<ResultPoint> lastPossibleResultPoints;
// This constructor is used when the class is built from an XML resource.
public ViewfinderView(Context context, AttributeSet attrs) {
super(context, attrs);
// Initialize these once for performance rather than calling them every time in onDraw().
paint = new Paint(Paint.ANTI_ALIAS_FLAG);
Resources resources = getResources();
maskColor = resources.getColor(R.color.background_tab_pressed);
resultColor = resources.getColor(R.color.Ivy);
laserColor = resources.getColor(R.color.Orange);
resultPointColor = resources.getColor(R.color.Concord);
scannerAlpha = 0;
possibleResultPoints = new ArrayList<>(5);
lastPossibleResultPoints = null;
}
public void setCameraManager(CameraManager cameraManager) {
this.cameraManager = cameraManager;
}
@SuppressLint("DrawAllocation")
@Override
public void onDraw(Canvas canvas) {
if (cameraManager == null) {
return; // not ready yet, early draw before done configuring
}
Rect frame = cameraManager.getFramingRect();
Rect previewFrame = cameraManager.getFramingRectInPreview();
if (frame == null || previewFrame == null) {
return;
}
int width = canvas.getWidth();
int height = canvas.getHeight();
// Draw the exterior (i.e. outside the framing rect) darkened
paint.setColor(resultBitmap != null ? resultColor : maskColor);
canvas.drawRect(0, 0, width, frame.top, paint);
canvas.drawRect(0, frame.top, frame.left, frame.bottom + 1, paint);
canvas.drawRect(frame.right + 1, frame.top, width, frame.bottom + 1, paint);
canvas.drawRect(0, frame.bottom + 1, width, height, paint);
if (resultBitmap != null) {
// Draw the opaque result bitmap over the scanning rectangle
paint.setAlpha(CURRENT_POINT_OPACITY);
canvas.drawBitmap(resultBitmap, null, frame, paint);
} else {
// Draw a red "laser scanner" line through the middle to show decoding is active
paint.setColor(laserColor);
paint.setAlpha(SCANNER_ALPHA[scannerAlpha]);
scannerAlpha = (scannerAlpha + 1) % SCANNER_ALPHA.length;
int middle = frame.height() / 2 + frame.top;
canvas.drawRect(frame.left + 2, middle - 1, frame.right - 1, middle + 2, paint);
float scaleX = frame.width() / (float) previewFrame.width();
float scaleY = frame.height() / (float) previewFrame.height();
List<ResultPoint> currentPossible = possibleResultPoints;
List<ResultPoint> currentLast = lastPossibleResultPoints;
int frameLeft = frame.left;
int frameTop = frame.top;
if (currentPossible.isEmpty()) {
lastPossibleResultPoints = null;
} else {
possibleResultPoints = new ArrayList<>(5);
lastPossibleResultPoints = currentPossible;
paint.setAlpha(CURRENT_POINT_OPACITY);
paint.setColor(resultPointColor);
synchronized (currentPossible) {
for (ResultPoint point : currentPossible) {
canvas.drawCircle(frameLeft + (int) (point.getX() * scaleX),
frameTop + (int) (point.getY() * scaleY),
POINT_SIZE, paint);
}
}
}
if (currentLast != null) {
paint.setAlpha(CURRENT_POINT_OPACITY / 2);
paint.setColor(resultPointColor);
synchronized (currentLast) {
float radius = POINT_SIZE / 2.0f;
for (ResultPoint point : currentLast) {
canvas.drawCircle(frameLeft + (int) (point.getX() * scaleX),
frameTop + (int) (point.getY() * scaleY),
radius, paint);
}
}
}
// Request another update at the animation interval, but only repaint the laser line,
// not the entire viewfinder mask.
postInvalidateDelayed(ANIMATION_DELAY,
frame.left - POINT_SIZE,
frame.top - POINT_SIZE,
frame.right + POINT_SIZE,
frame.bottom + POINT_SIZE);
}
}
public void drawViewfinder() {
Bitmap resultBitmap = this.resultBitmap;
this.resultBitmap = null;
if (resultBitmap != null) {
resultBitmap.recycle();
}
invalidate();
}
/**
* Draw a bitmap with the result points highlighted instead of the live scanning display.
*
* @param barcode An image of the decoded barcode.
*/
public void drawResultBitmap(Bitmap barcode) {
resultBitmap = barcode;
invalidate();
}
public void addPossibleResultPoint(ResultPoint point) {
List<ResultPoint> points = possibleResultPoints;
synchronized (points) {
points.add(point);
int size = points.size();
if (size > MAX_RESULT_POINTS) {
// trim it
points.subList(0, size - MAX_RESULT_POINTS / 2).clear();
}
}
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment