Android 基於Zxing的二維碼掃描優化
最近公司專案App中要整合二維碼掃描來適應在戶外工作的時候,對碼頭集裝箱等上面貼的A4紙張列印的二維碼進行識別, 一般App二維碼整合後,能掃出來就不管了,但是我們在整合成功後,根據使用者反饋,在戶外的環境下,很多二維碼識別不了,或者識別速度慢,我們自己也是適用了一下,發現也確實是這樣.
一般造成這個識別不出來的原因,我們總結了以下幾點:
- A4紙張列印的標籤二維碼, 本來列印就不是特別清晰,有些畫素點,不一定都列印了出來,
- 戶外環境,多變,灰塵,粉塵影響
- 日晒雨淋,還有各種劃痕,造成了二維碼破損和有汙漬
- Android手機配置不一樣,有的手機好一點,畫素高,有的使用者的畫素低
大概就是這些,但是用基於QBar(在Zxing上做了優化)的微信,卻能很快的識別出上面幾種情況造成的二維碼; 基於libqrencode 庫整合的支付寶或者釘釘二維碼掃描,一樣也能識別出來;還有IOS也就是呼叫系統的掃描,也一樣能夠掃描出來,為啥我們大android不行?
老闆不管這些,只是說了,別人的可以,為啥你的不可以,那就是你的問題.......
網上找了很多各種幾千個讚的第三方整合的二維碼,根本就不能滿足上面的需求,當時感覺真心不知道怎麼辦好了.
唯獨網上找的這個還可以一點:https://github.com/vondear/RxTool 但是破損一些的還是掃描不出來;那個網上幾千個讚的一片楓葉的 ,這種環境下的二維碼掃描根本邊都摸不到.
還有郭林推薦的:https://github.com/al4fun/SimpleScanner 這個庫,雖然是大神推薦的,但是比上面的這個,還要差那麼好幾點,郭林的推薦的二維碼掃描連結:https://mp.weixin.qq.com/s/aPqSK1FlsPiENzSE48BVUA
沒辦法,最後只能自己動手,網上找的,沒有找到合適的,目前我們修改的二維碼掃描基本可以做到:除了破損的識別不了,其他的都能識別,就是有時候速度慢了點,要多對一下焦,勉強能夠比上面好那麼一點點而已.
程式碼如下: (這裡面很多類都在原有類的基礎上有改動,雖然類名相同,但是裡面的方法有些有變動!)
build.gradle
dependencies{
api fileTree(include: ['*.jar'], dir: 'libs')
api files('libs/core-3.3.0.jar')
// provided 'com.android.support:appcompat-v7:26.1.0'
compileOnly 'com.android.support:design:26.1.0'
compileOnly 'com.android.support:support-vector-drawable:26.1.0'
}
檔案目錄如下:
ZxingConfig.java
public class ZxingConfig implements Serializable {
/*是否播放聲音*/
private boolean isPlayBeep = true;
/*是否震動*/
private boolean isShake = false;
/*是否顯示下方的其他功能佈局*/
private boolean isShowbottomLayout = true;
/*是否顯示閃光燈按鈕*/
private boolean isShowFlashLight = true;
/*是否顯示相簿按鈕*/
private boolean isShowAlbum = true;
public boolean isPlayBeep() {
return isPlayBeep;
}
public void setPlayBeep(boolean playBeep) {
isPlayBeep = playBeep;
}
public boolean isShake() {
return isShake;
}
public void setShake(boolean shake) {
isShake = shake;
}
public boolean isShowbottomLayout() {
return isShowbottomLayout;
}
public void setShowbottomLayout(boolean showbottomLayout) {
isShowbottomLayout = showbottomLayout;
}
public boolean isShowFlashLight() {
return isShowFlashLight;
}
public void setShowFlashLight(boolean showFlashLight) {
isShowFlashLight = showFlashLight;
}
public boolean isShowAlbum() {
return isShowAlbum;
}
public void setShowAlbum(boolean showAlbum) {
isShowAlbum = showAlbum;
}
}
CameraFacing.java
public enum CameraFacing {
BACK, // must be value 0!
FRONT, // must be value 1!
}
OpenCamera.java
public final class OpenCamera {
private final int index;
private final Camera camera;
private final CameraFacing facing;
private final int orientation;
public OpenCamera(int index, Camera camera, CameraFacing facing, int orientation) {
this.index = index;
this.camera = camera;
this.facing = facing;
this.orientation = orientation;
}
public Camera getCamera() {
return camera;
}
public CameraFacing getFacing() {
return facing;
}
public int getOrientation() {
return orientation;
}
@Override
public String toString() {
return "Camera #" + index + " : " + facing + ',' + orientation;
}
}
OpenCameraInterface.java
public final class OpenCameraInterface {
private static final String TAG = OpenCameraInterface.class.getName();
private OpenCameraInterface() {
}
/**
* For {@link #open(int)}, means no preference for which camera to open.
*/
public static final int NO_REQUESTED_CAMERA = -1;
/**
* Opens the requested camera with {@link Camera#open(int)}, if one exists.
*
* @param cameraId camera ID of the camera to use. A negative value
* or {@link #NO_REQUESTED_CAMERA} means "no preference", in which case a rear-facing
* camera is returned if possible or else any camera
* @return handle to {@link OpenCamera} that was opened
*/
public static OpenCamera open(int cameraId) {
int numCameras = Camera.getNumberOfCameras();
if (numCameras == 0) {
Log.w(TAG, "No cameras!");
return null;
}
boolean explicitRequest = cameraId >= 0;
Camera.CameraInfo selectedCameraInfo = null;
int index;
if (explicitRequest) {
index = cameraId;
selectedCameraInfo = new Camera.CameraInfo();
Camera.getCameraInfo(index, selectedCameraInfo);
} else {
index = 0;
while (index < numCameras) {
Camera.CameraInfo cameraInfo = new Camera.CameraInfo();
Camera.getCameraInfo(index, cameraInfo);
CameraFacing reportedFacing = CameraFacing.values()[cameraInfo.facing];
if (reportedFacing == CameraFacing.BACK) {
selectedCameraInfo = cameraInfo;
break;
}
index++;
}
}
Camera camera;
if (index < numCameras) {
Log.i(TAG, "Opening camera #" + index);
camera = Camera.open(index);
} else {
if (explicitRequest) {
Log.w(TAG, "Requested camera does not exist: " + cameraId);
camera = null;
} else {
Log.i(TAG, "No camera facing " + CameraFacing.BACK + "; returning camera #0");
camera = Camera.open(0);
selectedCameraInfo = new Camera.CameraInfo();
Camera.getCameraInfo(0, selectedCameraInfo);
}
}
if (camera == null) {
return null;
}
return new OpenCamera(index,
camera,
CameraFacing.values()[selectedCameraInfo.facing],
selectedCameraInfo.orientation);
}
}
AutoFocusManager.java
final class AutoFocusManager implements Camera.AutoFocusCallback {
private static final String TAG = AutoFocusManager.class.getSimpleName();
private static final long AUTO_FOCUS_INTERVAL_MS = 2000L;
private static final Collection<String> FOCUS_MODES_CALLING_AF;
static {
FOCUS_MODES_CALLING_AF = new ArrayList<>(2);
FOCUS_MODES_CALLING_AF.add(Camera.Parameters.FOCUS_MODE_AUTO);
FOCUS_MODES_CALLING_AF.add(Camera.Parameters.FOCUS_MODE_MACRO);
}
private boolean stopped;
private boolean focusing;
private final boolean useAutoFocus;
private final Camera camera;
private AsyncTask<?,?,?> outstandingTask;
AutoFocusManager(Context context, Camera camera) {
this.camera = camera;
SharedPreferences sharedPrefs = PreferenceManager.getDefaultSharedPreferences(context);
String currentFocusMode = camera.getParameters().getFocusMode();
useAutoFocus =
sharedPrefs.getBoolean(QRConstants.KEY_AUTO_FOCUS, true) &&
FOCUS_MODES_CALLING_AF.contains(currentFocusMode);
Log.i(TAG, "Current focus mode '" + currentFocusMode + "'; use auto focus? " + useAutoFocus);
start();
}
@Override
public synchronized void onAutoFocus(boolean success, Camera theCamera) {
focusing = false;
autoFocusAgainLater();
}
private synchronized void autoFocusAgainLater() {
if (!stopped && outstandingTask == null) {
AutoFocusTask newTask = new AutoFocusTask();
try {
newTask.executeOnExecutor(AsyncTask.THREAD_POOL_EXECUTOR);
outstandingTask = newTask;
} catch (RejectedExecutionException ree) {
Log.w(TAG, "Could not request auto focus", ree);
}
}
}
synchronized void start() {
// if (useAutoFocus) {
outstandingTask = null;
if (!stopped && !focusing) {
try {
camera.autoFocus(this);
focusing = true;
} catch (RuntimeException re) {
// Have heard RuntimeException reported in Android 4.0.x+; continue?
Log.w(TAG, "Unexpected exception while focusing", re);
// Try again later to keep cycle going
autoFocusAgainLater();
}
}
// }
}
private synchronized void cancelOutstandingTask() {
if (outstandingTask != null) {
if (outstandingTask.getStatus() != AsyncTask.Status.FINISHED) {
outstandingTask.cancel(true);
}
outstandingTask = null;
}
}
synchronized void stop() {
stopped = true;
// if (useAutoFocus) {
cancelOutstandingTask();
// Doesn't hurt to call this even if not focusing
try {
camera.cancelAutoFocus();
} catch (RuntimeException re) {
// Have heard RuntimeException reported in Android 4.0.x+; continue?
Log.w(TAG, "Unexpected exception while cancelling focusing", re);
}
// }
}
private final class AutoFocusTask extends AsyncTask<Object,Object,Object> {
@Override
protected Object doInBackground(Object... voids) {
try {
Thread.sleep(AUTO_FOCUS_INTERVAL_MS);
} catch (InterruptedException e) {
// continue
}
start();
return null;
}
}
}
CameraConfigurationManager.java
final class CameraConfigurationManager {
private static final String TAG = "CameraConfiguration";
private final Context context;
private int cwNeededRotation;
private int cwRotationFromDisplayToCamera;
private Point screenResolution;
private Point cameraResolution;
private Point bestPreviewSize;
private Point previewSizeOnScreen;
CameraConfigurationManager(Context context) {
this.context = context;
}
/**
* Reads, one time, values from the camera that are needed by the app.
*/
void initFromCameraParameters(OpenCamera camera) {
Camera.Parameters parameters = camera.getCamera().getParameters();
WindowManager manager = (WindowManager) context.getSystemService(Context.WINDOW_SERVICE);
Display display = manager.getDefaultDisplay();
int displayRotation = display.getRotation();
int cwRotationFromNaturalToDisplay;
switch (displayRotation) {
case Surface.ROTATION_0:
cwRotationFromNaturalToDisplay = 0;
break;
case Surface.ROTATION_90:
cwRotationFromNaturalToDisplay = 90;
break;
case Surface.ROTATION_180:
cwRotationFromNaturalToDisplay = 180;
break;
case Surface.ROTATION_270:
cwRotationFromNaturalToDisplay = 270;
break;
default:
// Have seen this return incorrect values like -90
if (displayRotation % 90 == 0) {
cwRotationFromNaturalToDisplay = (360 + displayRotation) % 360;
} else {
throw new IllegalArgumentException("Bad rotation: " + displayRotation);
}
}
Log.i(TAG, "Display at: " + cwRotationFromNaturalToDisplay);
int cwRotationFromNaturalToCamera = camera.getOrientation();
Log.i(TAG, "Camera at: " + cwRotationFromNaturalToCamera);
// Still not 100% sure about this. But acts like we need to flip this:
if (camera.getFacing() == CameraFacing.FRONT) {
cwRotationFromNaturalToCamera = (360 - cwRotationFromNaturalToCamera) % 360;
Log.i(TAG, "Front camera overriden to: " + cwRotationFromNaturalToCamera);
}
/*
SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(context);
String overrideRotationString;
if (camera.getFacing() == CameraFacing.FRONT) {
overrideRotationString = prefs.getString(PreferencesActivity.KEY_FORCE_CAMERA_ORIENTATION_FRONT, null);
} else {
overrideRotationString = prefs.getString(PreferencesActivity.KEY_FORCE_CAMERA_ORIENTATION, null);
}
if (overrideRotationString != null && !"-".equals(overrideRotationString)) {
Log.i(TAG, "Overriding camera manually to " + overrideRotationString);
cwRotationFromNaturalToCamera = Integer.parseInt(overrideRotationString);
}
*/
cwRotationFromDisplayToCamera =
(360 + cwRotationFromNaturalToCamera - cwRotationFromNaturalToDisplay) % 360;
Log.i(TAG, "Final display orientation: " + cwRotationFromDisplayToCamera);
if (camera.getFacing() == CameraFacing.FRONT) {
Log.i(TAG, "Compensating rotation for front camera");
cwNeededRotation = (360 - cwRotationFromDisplayToCamera) % 360;
} else {
cwNeededRotation = cwRotationFromDisplayToCamera;
}
Log.i(TAG, "Clockwise rotation from display to camera: " + cwNeededRotation);
Point theScreenResolution = new Point();
display.getSize(theScreenResolution);
screenResolution = theScreenResolution;
Log.i(TAG, "Screen resolution in current orientation: " + screenResolution);
cameraResolution = CameraConfigurationUtils.findBestPreviewSizeValue(parameters, screenResolution);
Log.i(TAG, "Camera resolution: " + cameraResolution);
bestPreviewSize = CameraConfigurationUtils.findBestPreviewSizeValue(parameters, screenResolution);
Log.i(TAG, "Best available preview size: " + bestPreviewSize);
boolean isScreenPortrait = screenResolution.x < screenResolution.y;
boolean isPreviewSizePortrait = bestPreviewSize.x < bestPreviewSize.y;
if (isScreenPortrait == isPreviewSizePortrait) {
previewSizeOnScreen = bestPreviewSize;
} else {
previewSizeOnScreen = new Point(bestPreviewSize.y, bestPreviewSize.x);
}
Log.i(TAG, "Preview size on screen: " + previewSizeOnScreen);
}
void setDesiredCameraParameters(OpenCamera camera, boolean safeMode) {
Camera theCamera = camera.getCamera();
Camera.Parameters parameters = theCamera.getParameters();
if (parameters == null) {
Log.w(TAG, "Device error: no camera parameters are available. Proceeding without configuration.");
return;
}
Log.i(TAG, "Initial camera parameters: " + parameters.flatten());
if (safeMode) {
Log.w(TAG, "In camera config safe mode -- most settings will not be honored");
}
// SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(context);
initializeTorch(parameters, safeMode, QRConstants.disableExposure);
CameraConfigurationUtils.setFocus(
parameters,
//是否支援自動對焦
QRConstants.autoFocus,
true,
safeMode);
if (!safeMode) {
//
//CameraConfigurationUtils.setInvertColor(parameters);
CameraConfigurationUtils.setBarcodeSceneMode(parameters);
CameraConfigurationUtils.setVideoStabilization(parameters);
CameraConfigurationUtils.setFocusArea(parameters);
CameraConfigurationUtils.setMetering(parameters);
}
parameters.setPreviewSize(bestPreviewSize.x, bestPreviewSize.y);
theCamera.setParameters(parameters);
theCamera.setDisplayOrientation(cwRotationFromDisplayToCamera);
Camera.Parameters afterParameters = theCamera.getParameters();
Camera.Size afterSize = afterParameters.getPreviewSize();
if (afterSize != null && (bestPreviewSize.x != afterSize.width || bestPreviewSize.y != afterSize.height)) {
Log.w(TAG, "Camera said it supported preview size " + bestPreviewSize.x + 'x' + bestPreviewSize.y +
", but after setting it, preview size is " + afterSize.width + 'x' + afterSize.height);
bestPreviewSize.x = afterSize.width;
bestPreviewSize.y = afterSize.height;
}
}
Point getBestPreviewSize() {
return bestPreviewSize;
}
Point getPreviewSizeOnScreen() {
return previewSizeOnScreen;
}
Point getCameraResolution() {
return cameraResolution;
}
Point getScreenResolution() {
return screenResolution;
}
int getCWNeededRotation() {
return cwNeededRotation;
}
boolean getTorchState(Camera camera) {
if (camera != null) {
Camera.Parameters parameters = camera.getParameters();
if (parameters != null) {
String flashMode = camera.getParameters().getFlashMode();
return flashMode != null &&
(Camera.Parameters.FLASH_MODE_ON.equals(flashMode) ||
Camera.Parameters.FLASH_MODE_TORCH.equals(flashMode));
}
}
return false;
}
void setTorch(Camera camera, boolean newSetting) {
Camera.Parameters parameters = camera.getParameters();
doSetTorch(parameters, newSetting, false, QRConstants.disableExposure);
camera.setParameters(parameters);
}
private void initializeTorch(Camera.Parameters parameters, boolean safeMode, boolean disableExposure) {
boolean currentSetting = QRConstants.frontLightMode == FrontLightMode.ON;
doSetTorch(parameters, currentSetting, safeMode, disableExposure);
}
private void doSetTorch(Camera.Parameters parameters, boolean newSetting, boolean safeMode, boolean disableExposure) {
CameraConfigurationUtils.setTorch(parameters, newSetting);
if (!safeMode && !disableExposure) {
CameraConfigurationUtils.setBestExposure(parameters, newSetting);
}
}
}
CameraConfigurationUtils.java
@TargetApi(Build.VERSION_CODES.ICE_CREAM_SANDWICH_MR1)
public final class CameraConfigurationUtils {
private static final String TAG = "CameraConfiguration";
private static final Pattern SEMICOLON = Pattern.compile(";");
private static final int MIN_PREVIEW_PIXELS = 480 * 320; // normal screen
private static final float MAX_EXPOSURE_COMPENSATION = 1.5f;
private static final float MIN_EXPOSURE_COMPENSATION = 0.0f;
private static final double MAX_ASPECT_DISTORTION = 0.15;
private static final int MIN_FPS = 10;
private static final int MAX_FPS = 20;
private static final int AREA_PER_1000 = 400;
private CameraConfigurationUtils() {
}
public static void setFocus(Camera.Parameters parameters,
boolean autoFocus,
boolean disableContinuous,
boolean safeMode) {
List<String> supportedFocusModes = parameters.getSupportedFocusModes();
String focusMode = null;
if (autoFocus) {
if (safeMode || disableContinuous) {
focusMode = findSettableValue("focus mode",
supportedFocusModes,
Camera.Parameters.FOCUS_MODE_AUTO);
} else {
focusMode = findSettableValue("focus mode",
supportedFocusModes,
Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE,
Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO,
Camera.Parameters.FOCUS_MODE_AUTO);
}
}
// Maybe selected auto-focus but not available, so fall through here:
if (!safeMode && focusMode == null) {
focusMode = findSettableValue("focus mode",
supportedFocusModes,
Camera.Parameters.FOCUS_MODE_MACRO,
Camera.Parameters.FOCUS_MODE_EDOF);
}
if (focusMode != null) {
if (focusMode.equals(parameters.getFocusMode())) {
Log.i(TAG, "Focus mode already set to " + focusMode);
} else {
parameters.setFocusMode(focusMode);
}
}
}
public static void setTorch(Camera.Parameters parameters, boolean on) {
List<String> supportedFlashModes = parameters.getSupportedFlashModes();
String flashMode;
if (on) {
flashMode = findSettableValue("flash mode",
supportedFlashModes,
Camera.Parameters.FLASH_MODE_TORCH,
Camera.Parameters.FLASH_MODE_ON);
} else {
flashMode = findSettableValue("flash mode",
supportedFlashModes,
Camera.Parameters.FLASH_MODE_OFF);
}
if (flashMode != null) {
if (flashMode.equals(parameters.getFlashMode())) {
Log.i(TAG, "Flash mode already set to " + flashMode);
} else {
Log.i(TAG, "Setting flash mode to " + flashMode);
parameters.setFlashMode(flashMode);
}
}
}
public static void setBestExposure(Camera.Parameters parameters, boolean lightOn) {
int minExposure = parameters.getMinExposureCompensation();
int maxExposure = parameters.getMaxExposureCompensation();
float step = parameters.getExposureCompensationStep();
if ((minExposure != 0 || maxExposure != 0) && step > 0.0f) {
// Set low when light is on
float targetCompensation = lightOn ? MIN_EXPOSURE_COMPENSATION : MAX_EXPOSURE_COMPENSATION;
int compensationSteps = Math.round(targetCompensation / step);
float actualCompensation = step * compensationSteps;
// Clamp value:
compensationSteps = Math.max(Math.min(compensationSteps, maxExposure), minExposure);
if (parameters.getExposureCompensation() == compensationSteps) {
Log.i(TAG, "Exposure compensation already set to " + compensationSteps + " / " + actualCompensation);
} else {
Log.i(TAG, "Setting exposure compensation to " + compensationSteps + " / " + actualCompensation);
parameters.setExposureCompensation(compensationSteps);
}
} else {
Log.i(TAG, "Camera does not support exposure compensation");
}
}
public static void setBestPreviewFPS(Camera.Parameters parameters) {
setBestPreviewFPS(parameters, MIN_FPS, MAX_FPS);
}
public static void setBestPreviewFPS(Camera.Parameters parameters, int minFPS, int maxFPS) {
List<int[]> supportedPreviewFpsRanges = parameters.getSupportedPreviewFpsRange();
Log.i(TAG, "Supported FPS ranges: " + toString(supportedPreviewFpsRanges));
if (supportedPreviewFpsRanges != null && !supportedPreviewFpsRanges.isEmpty()) {
int[] suitableFPSRange = null;
for (int[] fpsRange : supportedPreviewFpsRanges) {
int thisMin = fpsRange[Camera.Parameters.PREVIEW_FPS_MIN_INDEX];
int thisMax = fpsRange[Camera.Parameters.PREVIEW_FPS_MAX_INDEX];
if (thisMin >= minFPS * 1000 && thisMax <= maxFPS * 1000) {
suitableFPSRange = fpsRange;
break;
}
}
if (suitableFPSRange == null) {
Log.i(TAG, "No suitable FPS range?");
} else {
int[] currentFpsRange = new int[2];
parameters.getPreviewFpsRange(currentFpsRange);
if (Arrays.equals(currentFpsRange, suitableFPSRange)) {
Log.i(TAG, "FPS range already set to " + Arrays.toString(suitableFPSRange));
} else {
Log.i(TAG, "Setting FPS range to " + Arrays.toString(suitableFPSRange));
parameters.setPreviewFpsRange(suitableFPSRange[Camera.Parameters.PREVIEW_FPS_MIN_INDEX],
suitableFPSRange[Camera.Parameters.PREVIEW_FPS_MAX_INDEX]);
}
}
}
}
public static void setFocusArea(Camera.Parameters parameters) {
if (parameters.getMaxNumFocusAreas() > 0) {
Log.i(TAG, "Old focus areas: " + toString(parameters.getFocusAreas()));
List<Camera.Area> middleArea = buildMiddleArea(AREA_PER_1000);
Log.i(TAG, "Setting focus area to : " + toString(middleArea));
parameters.setFocusAreas(middleArea);
} else {
Log.i(TAG, "Device does not support focus areas");
}
}
public static void setMetering(Camera.Parameters parameters) {
if (parameters.getMaxNumMeteringAreas() > 0) {
Log.i(TAG, "Old metering areas: " + parameters.getMeteringAreas());
List<Camera.Area> middleArea = buildMiddleArea(AREA_PER_1000);
Log.i(TAG, "Setting metering area to : " + toString(middleArea));
parameters.setMeteringAreas(middleArea);
} else {
Log.i(TAG, "Device does not support metering areas");
}
}
private static List<Camera.Area> buildMiddleArea(int areaPer1000) {
return Collections.singletonList(
new Camera.Area(new Rect(-areaPer1000, -areaPer1000, areaPer1000, areaPer1000), 1));
}
public static void setVideoStabilization(Camera.Parameters parameters) {
if (parameters.isVideoStabilizationSupported()) {
if (parameters.getVideoStabilization()) {
Log.i(TAG, "Video stabilization already enabled");
} else {
Log.i(TAG, "Enabling video stabilization...");
parameters.setVideoStabilization(true);
}
} else {
Log.i(TAG, "This device does not support video stabilization");
}
}
public static void setBarcodeSceneMode(Camera.Parameters parameters) {
if (Camera.Parameters.SCENE_MODE_BARCODE.equals(parameters.getSceneMode())) {
Log.i(TAG, "Barcode scene mode already set");
return;
}
String sceneMode = findSettableValue("scene mode",
parameters.getSupportedSceneModes(),
Camera.Parameters.SCENE_MODE_BARCODE);
if (sceneMode != null) {
parameters.setSceneMode(sceneMode);
}
}
public static void setZoom(Camera.Parameters parameters, double targetZoomRatio) {
if (parameters.isZoomSupported()) {
Integer zoom = indexOfClosestZoom(parameters, targetZoomRatio);
if (zoom == null) {
return;
}
if (parameters.getZoom() == zoom) {
Log.i(TAG, "Zoom is already set to " + zoom);
} else {
Log.i(TAG, "Setting zoom to " + zoom);
parameters.setZoom(zoom);
}
} else {
Log.i(TAG, "Zoom is not supported");
}
}
private static Integer indexOfClosestZoom(Camera.Parameters parameters, double targetZoomRatio) {
List<Integer> ratios = parameters.getZoomRatios();
Log.i(TAG, "Zoom ratios: " + ratios);
int maxZoom = parameters.getMaxZoom();
if (ratios == null || ratios.isEmpty() || ratios.size() != maxZoom + 1) {
Log.w(TAG, "Invalid zoom ratios!");
return null;
}
double target100 = 100.0 * targetZoomRatio;
double smallestDiff = Double.POSITIVE_INFINITY;
int closestIndex = 0;
for (int i = 0; i < ratios.size(); i++) {
double diff = Math.abs(ratios.get(i) - target100);
if (diff < smallestDiff) {
smallestDiff = diff;
closestIndex = i;
}
}
Log.i(TAG, "Chose zoom ratio of " + (ratios.get(closestIndex) / 100.0));
return closestIndex;
}
public static void setInvertColor(Camera.Parameters parameters) {
if (Camera.Parameters.EFFECT_NEGATIVE.equals(parameters.getColorEffect())) {
Log.i(TAG, "Negative effect already set");
return;
}
String colorMode = findSettableValue("color effect",
parameters.getSupportedColorEffects(),
Camera.Parameters.EFFECT_NEGATIVE);
if (colorMode != null) {
parameters.setColorEffect(colorMode);
}
}
public static Point findBestPreviewSizeValue(Camera.Parameters parameters, Point screenResolution) {
List<Camera.Size> rawSupportedSizes = parameters.getSupportedPreviewSizes();
if (rawSupportedSizes == null) {
Log.w(TAG, "Device returned no supported preview sizes; using default");
Camera.Size defaultSize = parameters.getPreviewSize();
if (defaultSize == null) {
throw new IllegalStateException("Parameters contained no preview size!");
}
return new Point(defaultSize.width, defaultSize.height);
}
// Sort by size, descending
List<Camera.Size> supportedPreviewSizes = new ArrayList<>(rawSupportedSizes);
Collections.sort(supportedPreviewSizes, new Comparator<Camera.Size>() {
@Override
public int compare(Camera.Size a, Camera.Size b) {
int aPixels = a.height * a.width;
int bPixels = b.height * b.width;
if (bPixels < aPixels) {
return -1;
}
if (bPixels > aPixels) {
return 1;
}
return 0;
}
});
if (Log.isLoggable(TAG, Log.INFO)) {
StringBuilder previewSizesString = new StringBuilder();
for (Camera.Size supportedPreviewSize : supportedPreviewSizes) {
previewSizesString.append(supportedPreviewSize.width).append('x')
.append(supportedPreviewSize.height).append(' ');
}
Log.i(TAG, "Supported preview sizes: " + previewSizesString);
}
double screenAspectRatio = (double) screenResolution.x / (double) screenResolution.y;
// Remove sizes that are unsuitable
Iterator<Camera.Size> it = supportedPreviewSizes.iterator();
while (it.hasNext()) {
Camera.Size supportedPreviewSize = it.next();
int realWidth = supportedPreviewSize.width;
int realHeight = supportedPreviewSize.height;
if (realWidth * realHeight < MIN_PREVIEW_PIXELS) {
it.remove();
continue;
}
boolean isCandidatePortrait = realWidth < realHeight;
int maybeFlippedWidth = isCandidatePortrait ? realHeight : realWidth;
int maybeFlippedHeight = isCandidatePortrait ? realWidth : realHeight;
double aspectRatio = (double) maybeFlippedWidth / (double) maybeFlippedHeight;
double distortion = Math.abs(aspectRatio - screenAspectRatio);
if (distortion > MAX_ASPECT_DISTORTION) {
it.remove();
continue;
}
if (maybeFlippedWidth == screenResolution.x && maybeFlippedHeight == screenResolution.y) {
Point exactPoint = new Point(realWidth, realHeight);
Log.i(TAG, "Found preview size exactly matching screen size: " + exactPoint);
return exactPoint;
}
}
// If no exact match, use largest preview size. This was not a great idea on older devices because
// of the additional computation needed. We're likely to get here on newer Android 4+ devices, where
// the CPU is much more powerful.
if (!supportedPreviewSizes.isEmpty()) {
Camera.Size largestPreview = supportedPreviewSizes.get(0);
Point largestSize = new Point(largestPreview.width, largestPreview.height);
Log.i(TAG, "Using largest suitable preview size: " + largestSize);
return largestSize;
}
// If there is nothing at all suitable, return current preview size
Camera.Size defaultPreview = parameters.getPreviewSize();
if (defaultPreview == null) {
throw new IllegalStateException("Parameters contained no preview size!");
}
Point defaultSize = new Point(defaultPreview.width, defaultPreview.height);
Log.i(TAG, "No suitable preview sizes, using default: " + defaultSize);
return defaultSize;
}
private static String findSettableValue(String name,
Collection<String> supportedValues,
String... desiredValues) {
Log.i(TAG, "Requesting " + name + " value from among: " + Arrays.toString(desiredValues));
Log.i(TAG, "Supported " + name + " values: " + supportedValues);
if (supportedValues != null) {
for (String desiredValue : desiredValues) {
if (supportedValues.contains(desiredValue)) {
Log.i(TAG, "Can set " + name + " to: " + desiredValue);
return desiredValue;
}
}
}
Log.i(TAG, "No supported values match");
return null;
}
private static String toString(Collection<int[]> arrays) {
if (arrays == null || arrays.isEmpty()) {
return "[]";
}
StringBuilder buffer = new StringBuilder();
buffer.append('[');
Iterator<int[]> it = arrays.iterator();
while (it.hasNext()) {
buffer.append(Arrays.toString(it.next()));
if (it.hasNext()) {
buffer.append(", ");
}
}
buffer.append(']');
return buffer.toString();
}
private static String toString(Iterable<Camera.Area> areas) {
if (areas == null) {
return null;
}
StringBuilder result = new StringBuilder();
for (Camera.Area area : areas) {
result.append(area.rect).append(':').append(area.weight).append(' ');
}
return result.toString();
}
public static String collectStats(Camera.Parameters parameters) {
return collectStats(parameters.flatten());
}
public static String collectStats(CharSequence flattenedParams) {
StringBuilder result = new StringBuilder(1000);
result.append("BOARD=").append(Build.BOARD).append('\n');
result.append("BRAND=").append(Build.BRAND).append('\n');
result.append("CPU_ABI=").append(Build.CPU_ABI).append('\n');
result.append("DEVICE=").append(Build.DEVICE).append('\n');
result.append("DISPLAY=").append(Build.DISPLAY).append('\n');
result.append("FINGERPRINT=").append(Build.FINGERPRINT).append('\n');
result.append("HOST=").append(Build.HOST).append('\n');
result.append("ID=").append(Build.ID).append('\n');
result.append("MANUFACTURER=").append(Build.MANUFACTURER).append('\n');
result.append("MODEL=").append(Build.MODEL).append('\n');
result.append("PRODUCT=").append(Build.PRODUCT).append('\n');
result.append("TAGS=").append(Build.TAGS).append('\n');
result.append("TIME=").append(Build.TIME).append('\n');
result.append("TYPE=").append(Build.TYPE).append('\n');
result.append("USER=").append(Build.USER).append('\n');
result.append("VERSION.CODENAME=").append(Build.VERSION.CODENAME).append('\n');
result.append("VERSION.INCREMENTAL=").append(Build.VERSION.INCREMENTAL).append('\n');
result.append("VERSION.RELEASE=").append(Build.VERSION.RELEASE).append('\n');
result.append("VERSION.SDK_INT=").append(Build.VERSION.SDK_INT).append('\n');
if (flattenedParams != null) {
String[] params = SEMICOLON.split(flattenedParams);
Arrays.sort(params);
for (String param : params) {
result.append(param).append('\n');
}
}
return result.toString();
}
}
CameraManager.java
public final class CameraManager {
private static final String TAG = CameraManager.class.getSimpleName();
private static final int MIN_FRAME_WIDTH = 240;
private static final int MIN_FRAME_HEIGHT = 240;
private static final int MAX_FRAME_WIDTH = 1200; // = 5/8 * 1920
private static final int MAX_FRAME_HEIGHT = 675; // = 5/8 * 1080
private final Context context;
private final CameraConfigurationManager configManager;
private OpenCamera camera;
private AutoFocusManager autoFocusManager;
private Rect framingRect;
private Rect framingRectInPreview;
private boolean initialized;
private boolean previewing;
private int requestedCameraId = OpenCameraInterface.NO_REQUESTED_CAMERA;
private int requestedFramingRectWidth;
private int requestedFramingRectHeight;
/**
* Preview frames are delivered here, which we pass on to the registered handler. Make sure to
* clear the handler so it will only receive one message.
*/
private final PreviewCallback previewCallback;
public CameraManager(Context context) {
this.context = context;
this.configManager = new CameraConfigurationManager(context);
previewCallback = new PreviewCallback(configManager);
}
/**
* Opens the camera driver and initializes the hardware parameters.
*
* @param holder The surface object which the camera will draw preview frames into.
* @throws IOException Indicates the camera driver failed to open.
*/
public synchronized void openDriver(SurfaceHolder holder) throws IOException {
OpenCamera theCamera = camera;
if (theCamera == null) {
theCamera = OpenCameraInterface.open(requestedCameraId);
if (theCamera == null) {
throw new IOException("Camera.open() failed to return object from driver");
}
camera = theCamera;
}
if (!initialized) {
initialized = true;
configManager.initFromCameraParameters(theCamera);
if (requestedFramingRectWidth > 0 && requestedFramingRectHeight > 0) {
setManualFramingRect(requestedFramingRectWidth, requestedFramingRectHeight);
requestedFramingRectWidth = 0;
requestedFramingRectHeight = 0;
}
}
Camera cameraObject = theCamera.getCamera();
Camera.Parameters parameters = cameraObject.getParameters();
String parametersFlattened = parameters == null ? null : parameters.flatten(); // Save these, temporarily
try {
configManager.setDesiredCameraParameters(theCamera, false);
} catch (RuntimeException re) {
// Driver failed
Log.w(TAG, "Camera rejected parameters. Setting only minimal safe-mode parameters");
Log.i(TAG, "Resetting to saved camera params: " + parametersFlattened);
// Reset:
if (parametersFlattened != null) {
parameters = cameraObject.getParameters();
parameters.unflatten(parametersFlattened);
try {
cameraObject.setParameters(parameters);
configManager.setDesiredCameraParameters(theCamera, true);
} catch (RuntimeException re2) {
// Well, darn. Give up
Log.w(TAG, "Camera rejected even safe-mode parameters! No configuration");
}
}
}
cameraObject.setPreviewDisplay(holder);
}
public synchronized boolean isOpen() {
return camera != null;
}
/**
* Closes the camera driver if still in use.
*/
public synchronized void closeDriver() {
if (camera != null) {
camera.getCamera().release();
camera = null;
// Make sure to clear these each time we close the camera, so that any scanning rect
// requested by intent is forgotten.
framingRect = null;
framingRectInPreview = null;
}
}
/**
* Asks the camera hardware to begin drawing preview frames to the screen.
*/
public synchronized void startPreview() {
OpenCamera theCamera = camera;
if (theCamera != null && !previewing) {
theCamera.getCamera().startPreview();
previewing = true;
autoFocusManager = new AutoFocusManager(context, theCamera.getCamera());
}
}
/**
* Tells the camera to stop drawing preview frames.
*/
public synchronized void stopPreview() {
if (autoFocusManager != null) {
autoFocusManager.stop();
autoFocusManager = null;
}
if (camera != null && previewing) {
camera.getCamera().stopPreview();
previewCallback.setHandler(null, 0);
previewing = false;
}
}
/**
* Convenience method for {@link CaptureActivity}
*
* @param newSetting if {@code true}, light should be turned on if currently off. And vice versa.
*/
public synchronized void setTorch(boolean newSetting) {
OpenCamera theCamera = camera;
if (theCamera != null) {
if (newSetting != configManager.getTorchState(theCamera.getCamera())) {
boolean wasAutoFocusManager = autoFocusManager != null;
if (wasAutoFocusManager) {
autoFocusManager.stop();
autoFocusManager = null;
}
configManager.setTorch(theCamera.getCamera(), newSetting);
if (wasAutoFocusManager) {
autoFocusManager = new AutoFocusManager(context, theCamera.getCamera());
autoFocusManager.start();
}
}
}
}
/**
* A single preview frame will be returned to the handler supplied. The data will arrive as byte[]
* in the message.obj field, with width and height encoded as message.arg1 and message.arg2,
* respectively.
*
* @param handler The handler to send the message to.
* @param message The what field of the message to be sent.
*/
public synchronized void requestPreviewFrame(Handler handler, int message) {
OpenCamera theCamera = camera;
if (theCamera != null && previewing) {
previewCallback.setHandler(handler, message);
theCamera.getCamera().setOneShotPreviewCallback(previewCallback);
}
}
/**
* Calculates the framing rect which the UI should draw to show the user where to place the
* barcode. This target helps with alignment as well as forces the user to hold the device
* far enough away to ensure the image will be in focus.
*
* @return The rectangle to draw on screen in window coordinates.
*/
public synchronized Rect getFramingRect() {
if (framingRect == null) {
if (camera == null) {
return null;
}
Point screenResolution = configManager.getScreenResolution();
if (screenResolution == null) {
// Called early, before init even finished
return null;
}
int width = findDesiredDimensionInRange(screenResolution.x, MIN_FRAME_WIDTH, MAX_FRAME_WIDTH);
int height = findDesiredDimensionInRange(screenResolution.y, MIN_FRAME_HEIGHT, MAX_FRAME_HEIGHT);
//保持掃描框寬高一致
int finalSize = height;
if (height > width) {
finalSize = width;
}
int leftOffset = (screenResolution.x - finalSize) / 2;
int topOffset = (screenResolution.y - finalSize) / 2;
framingRect = new Rect(leftOffset, topOffset, leftOffset + finalSize, topOffset + finalSize);
Log.d(TAG, "Calculated framing rect: " + framingRect + " width =" + width + " height = " + height +
" screenResolution.x = " + screenResolution.x + " screenResolution.y = " + screenResolution.y);
}
return framingRect;
}
private static int findDesiredDimensionInRange(int resolution, int hardMin, int hardMax) {
int dim = 5 * resolution / 8; // Target 5/8 of each dimension
if (dim < hardMin) {
return hardMin;
}
if (dim > hardMax) {
return hardMax;
}
return dim;
}
/**
* Like {@link #getFramingRect} but coordinates are in terms of the preview frame,
* not UI / screen.
*
* @return {@link Rect} expressing barcode scan area in terms of the preview size
*/
public synchronized Rect getFramingRectInPreview() {
if (framingRectInPreview == null) {
Rect framingRect = getFramingRect();
if (framingRect == null) {
return null;
}
Rect rect = new Rect(framingRect);
Point cameraResolution = configManager.getCameraResolution();
Point screenResolution = configManager.getScreenResolution();
if (cameraResolution == null || screenResolution == null) {
// Called early, before init even finished
return null;
}
rect.left = rect.left * cameraResolution.x / screenResolution.x;
rect.right = rect.right * cameraResolution.x / screenResolution.x;
rect.top = rect.top * cameraResolution.y / screenResolution.y;
rect.bottom = rect.bottom * cameraResolution.y / screenResolution.y;
framingRectInPreview = rect;
}
return framingRectInPreview;
}
/**
* Allows third party apps to specify the camera ID, rather than determine
* it automatically based on available cameras and their orientation.
*
* @param cameraId camera ID of the camera to use. A negative value means "no preference".
*/
public synchronized void setManualCameraId(int cameraId) {
requestedCameraId = cameraId;
}
/**
* Allows third party apps to specify the scanning rectangle dimensions, rather than determine
* them automatically based on screen resolution.
*
* @param width The width in pixels to scan.
* @param height The height in pixels to scan.
*/
public synchronized void setManualFramingRect(int width, int height) {
if (initialized) {
Point screenResolution = configManager.getScreenResolution();
if (width > screenResolution.x) {
width = screenResolution.x;
}
if (height > screenResolution.y) {
height = screenResolution.y;
}
int leftOffset = (screenResolution.x - width) / 2;
int topOffset = (screenResolution.y - height) / 2;
framingRect = new Rect(leftOffset, topOffset, leftOffset + width, topOffset + height);
Log.d(TAG, "Calculated manual framing rect: " + framingRect);
framingRectInPreview = null;
} else {
requestedFramingRectWidth = width;
requestedFramingRectHeight = height;
}
}
/**
* A factory method to build the appropriate LuminanceSource object based on the format
* of the preview buffers, as described by Camera.Parameters.
*
* @param data A preview frame.
* @param width The width of the image.
* @param height The height of the image.
* @return A PlanarYUVLuminanceSource instance.
*/
public PlanarYUVLuminanceSource buildLuminanceSource(byte[] data, int width, int height) {
Rect rect = getFramingRectInPreview();
if (rect == null) {
return null;
}
// Go ahead and assume it's YUV rather than die.
return new PlanarYUVLuminanceSource(data, width, height, rect.left, rect.top,
rect.width(), rect.height(), false);
}
public void openLight() {
if (camera != null && camera.getCamera() != null) {
Camera.Parameters parameter = camera.getCamera().getParameters();
parameter.setFlashMode(Camera.Parameters.FLASH_MODE_TORCH);
camera.getCamera().setParameters(parameter);
}
}
public void offLight() {
if (camera != null && camera.getCamera() != null) {
Camera.Parameters parameter = camera.getCamera().getParameters();
parameter.setFlashMode(Camera.Parameters.FLASH_MODE_OFF);
camera.getCamera().setParameters(parameter);
}
}
}
FrontLightMode.java
public enum FrontLightMode {
/** Always on. */
ON,
/** On only when ambient light is low. */
AUTO,
/** Always off. */
OFF
/* private static FrontLightMode parse(String modeString) {
return modeString == null ? OFF : valueOf(modeString);
}
public static FrontLightMode readPref(SharedPreferences sharedPrefs) {
return parse(sharedPrefs.getString(PreferencesActivity.KEY_FRONT_LIGHT_MODE, OFF.toString()));
}*/
}
PreviewCallback.java
final class PreviewCallback implements Camera.PreviewCallback {
private static final String TAG = PreviewCallback.class.getSimpleName();
private final CameraConfigurationManager configManager;
private Handler previewHandler;
private int previewMessage;
PreviewCallback(CameraConfigurationManager configManager) {
this.configManager = configManager;
}
void setHandler(Handler previewHandler, int previewMessage) {
this.previewHandler = previewHandler;
this.previewMessage = previewMessage;
}
@Override
public void onPreviewFrame(byte[] data, Camera camera) {
Point cameraResolution = configManager.getCameraResolution();
Handler thePreviewHandler = previewHandler;
if (cameraResolution != null && thePreviewHandler != null) {
//給DecodeHandler發訊息
Message message = thePreviewHandler.obtainMessage(previewMessage, cameraResolution.x,
cameraResolution.y, data);
message.sendToTarget();
previewHandler = null;
} else {
Log.d(TAG, "Got preview callback, but no handler or resolution available");
}
}
}
QRConstants.java
public class QRConstants {
public static boolean vibrateEnable = true;
public static boolean beepEnable = true;
public static FrontLightMode frontLightMode = FrontLightMode.OFF;
public static boolean disableExposure = true;
public static boolean autoFocus = true;
public static final String KEY_AUTO_FOCUS = "preferences_auto_focus";
}
Constant.java
public class Constant {
public static final int DECODE = 1;
public static final int DECODE_FAILED = 2;
public static final int DECODE_SUCCEEDED = 3;
public static final int LAUNCH_PRODUCT_QUERY = 4;
public static final int QUIT = 5;
public static final int RESTART_PREVIEW = 6;
public static final int RETURN_SCAN_RESULT = 7;
public static final int FLASH_OPEN = 8;
public static final int FLASH_CLOSE = 9;
public static final int REQUEST_IMAGE = 10;
public static final String CODED_CONTENT = "codedContent";
public static final String CODED_BITMAP = "codedBitmap";
/*傳遞的zxingconfing*/
public static final String INTENT_ZXING_CONFIG = "zxingConfig";
}
AmbientLightManager.java
public final class AmbientLightManager implements SensorEventListener {
private static final float TOO_DARK_LUX = 45.0f;
private static final float BRIGHT_ENOUGH_LUX = 450.0f;
private final Context context;
private CameraManager cameraManager;
private Sensor lightSensor;
public AmbientLightManager(Context context) {
this.context = context;
}
public void start(CameraManager cameraManager) {
this.cameraManager = cameraManager;
SharedPreferences sharedPrefs = PreferenceManager.getDefaultSharedPreferences(context);
if (QRConstants.frontLightMode == FrontLightMode.AUTO) {
SensorManager sensorManager = (SensorManager) context.getSystemService(Context.SENSOR_SERVICE);
lightSensor = sensorManager.getDefaultSensor(Sensor.TYPE_LIGHT);
if (lightSensor != null) {
sensorManager.registerListener(this, lightSensor, SensorManager.SENSOR_DELAY_NORMAL);
}
}
}
public void stop() {
if (lightSensor != null) {
SensorManager sensorManager = (SensorManager) context.getSystemService(Context.SENSOR_SERVICE);
sensorManager.unregisterListener(this);
cameraManager = null;
lightSensor = null;
}
}
@Override
public void onSensorChanged(SensorEvent sensorEvent) {
float ambientLightLux = sensorEvent.values[0];
if (cameraManager != null) {
if (ambientLightLux <= TOO_DARK_LUX) {
cameraManager.setTorch(true);
} else if (ambientLightLux >= BRIGHT_ENOUGH_LUX) {
cameraManager.setTorch(false);
}
}
}
@Override
public void onAccuracyChanged(Sensor sensor, int accuracy) {
// do nothing
}
}
BeepManager.java
public final class BeepManager implements MediaPlayer.OnCompletionListener,
MediaPlayer.OnErrorListener, Closeable {
private static final String TAG = BeepManager.class.getSimpleName();
private static final float BEEP_VOLUME = 0.10f;
private static final long VIBRATE_DURATION = 200L;
private final Activity activity;
private MediaPlayer mediaPlayer;
private boolean playBeep;
private boolean vibrate;
public BeepManager(Activity activity) {
this.activity = activity;
this.mediaPlayer = null;
updatePrefs();
}
public boolean isPlayBeep() {
return playBeep;
}
public void setPlayBeep(boolean playBeep) {
this.playBeep = playBeep;
}
public boolean isVibrate() {
return vibrate;
}
public void setVibrate(boolean vibrate) {
this.vibrate = vibrate;
}
public synchronized void updatePrefs() {
if (playBeep && mediaPlayer == null) {
// The volume on STREAM_SYSTEM is not adjustable, and users found it
// too loud,
// so we now play on the music stream.
// 設定activity音量控制鍵控制的音訊流
activity.setVolumeControlStream(AudioManager.STREAM_MUSIC);
mediaPlayer = buildMediaPlayer(activity);
}
}
/**
* 開啟響鈴和震動
*/
@SuppressLint("MissingPermission")
public synchronized void playBeepSoundAndVibrate() {
if (playBeep && mediaPlayer != null) {
mediaPlayer.start();
}
if (vibrate) {
Vibrator vibrator = (Vibrator) activity
.getSystemService(Context.VIBRATOR_SERVICE);
vibrator.vibrate(VIBRATE_DURATION);
}
}
/**
* 建立MediaPlayer
*
* @param activity
* @return
*/
private MediaPlayer buildMediaPlayer(Context activity) {
MediaPlayer mediaPlayer = new MediaPlayer();
mediaPlayer.setAudioStreamType(AudioManager.STREAM_MUSIC);
// 監聽是否播放完成
mediaPlayer.setOnCompletionListener(this);
mediaPlayer.setOnErrorListener(this);
// 配置播放資源
try {
AssetFileDescriptor file = activity.getResources()
.openRawResourceFd(R.raw.beep);
try {
mediaPlayer.setDataSource(file.getFileDescriptor(),
file.getStartOffset(), file.getLength());
} finally {
file.close();
}
// 設定音量
mediaPlayer.setVolume(BEEP_VOLUME, BEEP_VOLUME);
mediaPlayer.prepare();
return mediaPlayer;
} catch (IOException ioe) {
Log.w(TAG, ioe);
mediaPlayer.release();
return null;
}
}
@Override
public void onCompletion(MediaPlayer mp) {
// When the beep has finished playing, rewind to queue up another one.
mp.seekTo(0);
}
@Override
public synchronized boolean onError(MediaPlayer mp, int what, int extra) {
if (what == MediaPlayer.MEDIA_ERROR_SERVER_DIED) {
// we are finished, so put up an appropriate error toast if required
// and finish
activity.finish();
} else {
// possibly media player error, so release and recreate
mp.release();
mediaPlayer = null;
updatePrefs();
}
return true;
}
@Override
public synchronized void close() {
if (mediaPlayer != null) {
mediaPlayer.release();
mediaPlayer = null;
}
}
}
CaptureActivity.java
public class CaptureActivity extends Activity implements SurfaceHolder.Callback, View.OnClickListener {
static {
AppCompatDelegate.setCompatVectorFromResourcesEnabled(true);//處理api 5.1以下手機不相容問題
}
private static final String TAG = CaptureActivity.class.getSimpleName();
private ImageView mBackmImg;
private TextView mTitle;
public int REQ_ID_GALLERY = 0;
public static boolean isLightOn = false;
private IMResUtil mImResUtil;
public static void startAction(Activity activity, Bundle bundle, int requestCode) {
Intent intent = new Intent(activity, CaptureActivity.class);
intent.putExtras(bundle);
activity.startActivityForResult(intent, requestCode);
}
private CameraManager cameraManager;
private CaptureActivityHandler handler;
private ViewfinderView viewfinderView;
private boolean hasSurface;
private Collection<BarcodeFormat> decodeFormats;
private String characterSet;
private InactivityTimer inactivityTimer;
private BeepManager beepManager;
private AmbientLightManager ambientLightManager;
private LinearLayout bottomLayout;
private TextView flashLightTv;
private ImageView flashLightIv;
private LinearLayout flashLightLayout;
private LinearLayout albumLayout;
private ZxingConfig config;
private ImageView img_phone;
public ViewfinderView getViewfinderView() {
return viewfinderView;
}
public Handler getHandler() {
return handler;
}
public CameraManager getCameraManager() {
return cameraManager;
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
mImResUtil = new IMResUtil(this);
setContentView(mImResUtil.getLayout("activity_device_qrcode_capture"));
//設定浸入式狀態列
setColor(this, Color.BLACK);
hasSurface = false;
inactivityTimer = new InactivityTimer(this);
ambientLightManager = new AmbientLightManager(this);
/*先獲取配置資訊*/
try {
config = (ZxingConfig) getIntent().getExtras().get(Constant.INTENT_ZXING_CONFIG);
} catch (Exception e) {
Log.i("config", e.toString());
}
if (config == null) {
config = new ZxingConfig();
}
beepManager = new BeepManager(this);
beepManager.setPlayBeep(config.isPlayBeep());
beepManager.setVibrate(config.isShake());
initView(getIntent().getExtras());
onEvent(getIntent().getExtras());
}
private void initView(Bundle bundle) {
//判斷是否為橫屏狀態
if ("landscape".equals(bundle.getString("portraitOrLandscape"))) {
setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE);
}
mBackmImg = (ImageView) findViewById(mImResUtil.getId("iv_qr_back"));
mTitle = (TextView) findViewById(mImResUtil.getId("tv_qr_title"));
viewfinderView = (ViewfinderView) findViewById(mImResUtil.getId("vv_qr_viewfinderView"));
flashLightTv = (TextView) findViewById(mImResUtil.getId("flashLightTv"));
bottomLayout = (LinearLayout) findViewById(mImResUtil.getId("bottomLayout"));
flashLightIv = (ImageView) findViewById(mImResUtil.getId("flashLightIv"));
img_phone = (ImageView) findViewById(mImResUtil.getId("img_phone"));
flashLightLayout = (LinearLayout) findViewById(mImResUtil.getId("flashLightLayout"));
flashLightLayout.setOnClickListener(this);
albumLayout = (LinearLayout) findViewById(mImResUtil.getId("albumLayout"));
albumLayout.setOnClickListener(this);
}
private void onEvent(Bundle bundle) {
switchVisibility(bottomLayout, config.isShowbottomLayout());
switchVisibility(flashLightLayout, config.isShowFlashLight());
switchVisibility(albumLayout, config.isShowAlbum());
flashLightIv.setImageResource(R.drawable.device_qrcode_scan_flash_off);
img_phone.setImageResource(R.drawable.ic_photo);
/*有閃光燈就顯示手電筒按鈕 否則不顯示*/
if (isSupportCameraLedFlash(getPackageManager())) {
flashLightLayout.setVisibility(View.VISIBLE);
} else {
flashLightLayout.setVisibility(View.GONE);
}
/********************新增 END*****************************/
mBackmImg.setOnClickListener(this);
if (bundle == null) {
return;
}
String titileText = bundle.getString("titileText");
if (titileText != null && !titileText.isEmpty()) {
mTitle.setText(titileText);
}
String headColor = bundle.getString("headColor");
mTitle.setTextColor(Color.parseColor(headColor));
float headSize = bundle.getFloat("headSize");
if (headSize > 0) {
mTitle.setTextSize(headSize);
}
}
/**
* 沉浸式狀態列
*
* @param activity
* @param color
*/
public static void setColor(Activity activity, int color) {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT) {
// 設定狀態列透明
activity.getWindow().addFlags(WindowManager.LayoutParams.FLAG_TRANSLUCENT_STATUS);
// 生成一個狀態列大小的矩形
View statusView = createStatusView(activity, color);
// 新增 statusView 到佈局中
ViewGroup decorView = (ViewGroup) activity.getWindow().getDecorView();
decorView.addView(statusView);
// 設定根佈局的引數
ViewGroup rootView = (ViewGroup) ((ViewGroup) activity.findViewById(android.R.id.content)).getChildAt(0);
rootView.setFitsSystemWindows(true);
rootView.setClipToPadding(true);
}
}
/**
* 繪製一個和狀態列登