This is the first version of the ZXing Android client.

git-svn-id: https://zxing.googlecode.com/svn/trunk@185 59b500cc-1b3d-0410-9834-0bbf25fbcc57
This commit is contained in:
dswitkin 2008-02-14 19:02:18 +00:00
parent de766db767
commit 1ca6c123d4
11 changed files with 1010 additions and 0 deletions

View file

@ -0,0 +1,14 @@
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="com.google.zxing.client.android">
<application android:icon="@drawable/icon">
<activity class=".BarcodeReaderCaptureActivity" android:label="@string/app_name">
<intent-filter>
<action android:value="android.intent.action.MAIN" />
<category android:value="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
</application>
<uses-permission id="android.permission.READ_CONTACTS" />
<uses-permission id="android.permission.WRITE_CONTACTS" />
</manifest>

Binary file not shown.

After

Width:  |  Height:  |  Size: 5.8 KiB

View file

@ -0,0 +1,7 @@
<?xml version="1.0" encoding="utf-8"?>
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:orientation="vertical"
android:layout_width="fill_parent"
android:layout_height="fill_parent"
>
</LinearLayout>

View file

@ -0,0 +1,5 @@
<?xml version="1.0" encoding="UTF-8"?>
<resources>
<item type="id" name="decoding_succeeded_message"/>
<item type="id" name="decoding_failed_message"/>
</resources>

View file

@ -0,0 +1,18 @@
<?xml version="1.0" encoding="utf-8"?>
<resources>
<string name="app_name">Barcode Reader</string>
<string name="button_no">No</string>
<string name="button_ok">OK</string>
<string name="button_yes">Yes</string>
<string name="menu_about">About...</string>
<string name="msg_about">ZXing Barcode Reader v0.4\nhttp://code.google.com/p/zxing</string>
<string name="msg_no_barcode_detected">Sorry, no barcode was found.</string>
<string name="title_about">About</string>
<string name="title_barcode_detected">Barcode Detected</string>
<string name="title_no_barcode_detected">No Barcode Detected</string>
<string name="title_error">Error</string>
<string name="title_open_url">Open Web Page?</string>
<string name="title_add_contact">Add Contact?</string>
<string name="title_compose_email">Compose E-mail?</string>
<string name="title_lookup_barcode">Look Up Barcode Online?</string>
</resources>

View file

@ -0,0 +1,189 @@
/*
* Copyright (C) 2008 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.zxing.client.android;
import com.google.zxing.Result;
import com.google.zxing.ResultPoint;
import com.google.zxing.client.result.ParsedReaderResult;
import com.google.zxing.client.result.ParsedReaderResultType;
import android.app.Activity;
import android.content.Context;
import android.graphics.PixelFormat;
import android.os.Bundle;
import android.os.Handler;
import android.os.Message;
import android.view.KeyEvent;
import android.view.Menu;
import android.view.Window;
import android.view.WindowManager.LayoutParams;
public final class BarcodeReaderCaptureActivity extends Activity {
private CameraManager cameraManager;
private CameraSurfaceView surfaceView;
private WorkerThread workerThread;
private static final int ABOUT_ID = Menu.FIRST;
@Override
public void onCreate(Bundle icicle) {
super.onCreate(icicle);
requestWindowFeature(Window.FEATURE_NO_TITLE);
// Make sure to create a TRANSLUCENT window. This is required for SurfaceView to work.
// Eventually this'll be done by the system automatically.
getWindow().setAttributes(new LayoutParams(LayoutParams.APPLICATION_TYPE,
LayoutParams.NO_STATUS_BAR_FLAG));
getWindow().setFormat(PixelFormat.TRANSLUCENT);
cameraManager = new CameraManager(getApplication());
surfaceView = new CameraSurfaceView(getApplication(), cameraManager);
setContentView(surfaceView);
workerThread = new WorkerThread(surfaceView, cameraManager, messageHandler);
workerThread.requestPreviewLoop();
workerThread.start();
}
@Override
protected boolean isFullscreenOpaque() {
// Our main window is set to translucent, but we know that we will
// fill it with opaque data. Tell the system that so it can perform
// some important optimizations.
return true;
}
@Override
protected void onResume() {
super.onResume();
cameraManager.openDriver();
if (workerThread == null) {
workerThread = new WorkerThread(surfaceView, cameraManager, messageHandler);
workerThread.requestPreviewLoop();
workerThread.start();
}
}
@Override
protected void onPause() {
super.onPause();
if (workerThread != null) {
workerThread.requestExitAndWait();
workerThread = null;
}
cameraManager.closeDriver();
}
@Override
public boolean onKeyDown(int keyCode, KeyEvent event) {
if (keyCode == KeyEvent.KEYCODE_DPAD_CENTER) {
workerThread.requestStillAndDecode();
return true;
} else {
return super.onKeyDown(keyCode, event);
}
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
super.onCreateOptionsMenu(menu);
menu.add(0, ABOUT_ID, R.string.menu_about);
return true;
}
@Override
public boolean onOptionsItemSelected(Menu.Item item) {
switch (item.getId()) {
case ABOUT_ID:
Context context = getApplication();
showAlert(context.getString(R.string.title_about),
context.getString(R.string.msg_about),
context.getString(R.string.button_ok), null, true, null);
break;
}
return super.onOptionsItemSelected(item);
}
Handler messageHandler = new Handler() {
@Override
public void handleMessage(Message message) {
switch (message.what) {
case R.id.decoding_succeeded_message:
handleDecode((Result)message.obj);
break;
case R.id.decoding_failed_message:
Context context = getApplication();
showAlert(context.getString(R.string.title_no_barcode_detected),
context.getString(R.string.msg_no_barcode_detected),
context.getString(R.string.button_ok), null, true, null);
break;
}
}
};
public void restartPreview() {
workerThread.requestPreviewLoop();
}
// TODO(dswitkin): These deprecated showAlert calls need to be updated.
private void handleDecode(Result rawResult) {
ResultPoint[] points = rawResult.getResultPoints();
if (points != null && points.length > 0) {
surfaceView.drawResultPoints(points);
}
Context context = getApplication();
ParsedReaderResult readerResult = ParsedReaderResult.parseReaderResult(rawResult.getText());
Handler handler = new ResultHandler(this, readerResult);
if (canBeHandled(readerResult.getType())) {
// Can be handled by some external app; ask if the user wants to
// proceed first though
Message yesMessage = handler.obtainMessage(R.string.button_yes);
Message noMessage = handler.obtainMessage(R.string.button_no);
showAlert(context.getString(getDialogTitleID(readerResult.getType())),
readerResult.getDisplayResult(), context.getString(R.string.button_yes),
yesMessage, context.getString(R.string.button_no), noMessage, true, noMessage);
} else {
// Just show information to user
Message okMessage = handler.obtainMessage(R.string.button_ok);
showAlert(context.getString(R.string.title_barcode_detected),
readerResult.getDisplayResult(), context.getString(R.string.button_ok), okMessage, null,
null, true, okMessage);
}
}
private static boolean canBeHandled(ParsedReaderResultType type) {
return type != ParsedReaderResultType.TEXT;
}
private static int getDialogTitleID(ParsedReaderResultType type) {
if (type == ParsedReaderResultType.ADDRESSBOOK) {
return R.string.title_add_contact;
} else if (type == ParsedReaderResultType.BOOKMARK) {
return R.string.title_open_url;
} else if (type == ParsedReaderResultType.EMAIL || type == ParsedReaderResultType.EMAIL_ADDRESS) {
return R.string.title_compose_email;
} else if (type == ParsedReaderResultType.UPC) {
return R.string.title_lookup_barcode;
} else if (type == ParsedReaderResultType.URI) {
return R.string.title_open_url;
} else {
return R.string.title_barcode_detected;
}
}
}

View file

@ -0,0 +1,247 @@
/*
* Copyright (C) 2008 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.zxing.client.android;
import com.google.zxing.ResultPoint;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Point;
import android.graphics.Rect;
import android.hardware.CameraDevice;
import android.util.Log;
import android.view.Display;
import android.view.WindowManager;
/**
* This object wraps the CameraDevice and expects to be the only one talking to it. The
* implementation encapsulates the steps needed to take preview-sized images and well as high
* resolution stills.
*
* @author dswitkin@google.com (Daniel Switkin)
*/
final class CameraManager {
private static final String TAG = "CameraManager";
private Context context;
private Point cameraResolution;
private Point stillResolution;
private int stillMultiplier;
private Point screenResolution;
private Rect framingRect;
private Bitmap bitmap;
private CameraDevice camera;
private CameraDevice.CaptureParams params;
private boolean previewMode;
CameraManager(Context context) {
this.context = context;
calculateStillResolution();
getScreenResolution();
bitmap = Bitmap.createBitmap(stillResolution.x, stillResolution.y, false);
camera = CameraDevice.open();
params = new CameraDevice.CaptureParams();
previewMode = false;
setPreviewMode(true);
}
public void openDriver() {
if (camera == null) {
camera = CameraDevice.open();
}
}
public void closeDriver() {
if (camera != null) {
camera.close();
camera = null;
}
}
public void capturePreview(Canvas canvas) {
setPreviewMode(true);
camera.capture(canvas);
}
public Bitmap captureStill() {
setPreviewMode(false);
Canvas canvas = new Canvas(bitmap);
camera.capture(canvas);
return bitmap;
}
/**
* Calculates the framing rect which the UI should draw to show the user where to place the
* barcode. The actual captured image should be a bit larger than indicated because they might
* frame the shot too tightly. This target helps with alignment as well as forces the user to hold
* the device far enough away to ensure the image will be in focus.
*
* @return The rectangle to draw on screen in window coordinates.
*/
public Rect getFramingRect() {
if (framingRect == null) {
int size = stillResolution.x * screenResolution.x / cameraResolution.x;
int leftOffset = (screenResolution.x - size) / 2;
int topOffset = (screenResolution.y - size) / 2;
framingRect = new Rect(leftOffset, topOffset, leftOffset + size, topOffset + size);
}
return framingRect;
}
/**
* Converts the result points from still resolution coordinates to screen coordinates.
*
* @param points The points returned by the Reader subclass through Result.getResultPoints().
* @return An array of Points scaled to the size of the framing rect and offset appropriately
* so they can be drawn in screen coordinates.
*/
public Point[] convertResultPoints(ResultPoint[] points) {
Rect frame = getFramingRect();
int frameSize = frame.width();
int count = points.length;
Point[] output = new Point[count];
for (int x = 0; x < count; x++) {
output[x] = new Point();
output[x].x = frame.left + (int)(points[x].getX() * frameSize / stillResolution.x + 0.5f);
output[x].y = frame.top + (int)(points[x].getY() * frameSize / stillResolution.y + 0.5f);
}
return output;
}
/**
* Images for the live preview are taken at low resolution in RGB. The final stills for the
* decoding step are taken in YUV, since we only need the luminance channel. Other code depends
* on the ability to call this method for free if the correct mode is already set.
*
* @param on Setting on true will engage preview mode, setting it false will request still mode.
*/
private void setPreviewMode(boolean on) {
if (on != previewMode) {
if (on) {
params.type = 1; // preview
if (cameraResolution.x / (float)cameraResolution.y <
screenResolution.x / (float)screenResolution.y) {
params.srcWidth = cameraResolution.x;
params.srcHeight = cameraResolution.x * screenResolution.y / screenResolution.x;
params.leftPixel = 0;
params.topPixel = (cameraResolution.y - params.srcHeight) / 2;
} else {
params.srcWidth = cameraResolution.y * screenResolution.x / screenResolution.y;
params.srcHeight = cameraResolution.y;
params.leftPixel = (cameraResolution.x - params.srcWidth) / 2;
params.topPixel = 0;
}
params.outputWidth = screenResolution.x;
params.outputHeight = screenResolution.y;
params.dataFormat = 2; // RGB565
} else {
params.type = 0; // still
params.srcWidth = stillResolution.x * stillMultiplier;
params.srcHeight = stillResolution.y * stillMultiplier;
params.leftPixel = (cameraResolution.x - params.srcWidth) / 2;
params.topPixel = (cameraResolution.y - params.srcHeight) / 2;
params.outputWidth = stillResolution.x;
params.outputHeight = stillResolution.y;
params.dataFormat = 0; // YUV packed (planar would be better, but it doesn't work right now)
}
String captureType = on ? "preview" : "still";
Log.v(TAG, "Setting params for " + captureType + ": srcWidth " + params.srcWidth +
" srcHeight " + params.srcHeight + " leftPixel " + params.leftPixel + " topPixel " +
params.topPixel + " outputWidth " + params.outputWidth + " outputHeight " +
params.outputHeight);
camera.setCaptureParams(params);
previewMode = on;
}
}
/**
* This method determines how to take the highest quality image (i.e. the one which has the best
* chance of being decoded) given the capabilities of the camera. It is a balancing act between
* having enough resolution to read UPCs and having few enough pixels to keep the QR Code
* processing fast. The result is the dimensions of the rectangle to capture from the center of
* the sensor, plus a stillMultiplier which indicates whether we'll ask the driver to downsample
* for us. This has the added benefit of keeping the memory footprint of the bitmap as small as
* possible.
*/
private void calculateStillResolution() {
cameraResolution = getMaximumCameraResolution();
int minDimension = (cameraResolution.x < cameraResolution.y) ? cameraResolution.x :
cameraResolution.y;
int diagonalResolution = (int)Math.sqrt(cameraResolution.x * cameraResolution.x +
cameraResolution.y * cameraResolution.y);
float diagonalFov = getFieldOfView();
// Determine the field of view in the smaller dimension, then calculate how large an object
// would be at the minimum focus distance.
float fov = diagonalFov * minDimension / diagonalResolution;
double objectSize = Math.tan(Math.toRadians(fov / 2.0)) * getMinimumFocusDistance() * 2;
// Let's assume the largest barcode we might photograph at this distance is 3 inches across. By
// cropping to this size, we can avoid processing surrounding pixels, which helps with speed and
// accuracy.
// TODO(dswitkin): Handle a device with a great macro mode where objectSize < 4 inches.
double crop = 3.0 / objectSize;
int nativeResolution = (int)(minDimension * crop);
// The camera driver can only capture images which are a multiple of eight, so it's necessary to
// round up.
nativeResolution = (nativeResolution + 7) / 8 * 8;
if (nativeResolution > minDimension) {
nativeResolution = minDimension;
}
// There's no point in capturing too much detail, so ask the driver to downsample. I haven't
// tried a non-integer multiple, but it seems unlikely to work.
double dpi = nativeResolution / objectSize;
stillMultiplier = 1;
if (dpi > 200) {
stillMultiplier = (int)(dpi / 200 + 1);
}
stillResolution = new Point(nativeResolution, nativeResolution);
Log.v(TAG, "FOV " + fov + " objectSize " + objectSize + " crop " + crop + " dpi " + dpi +
" nativeResolution " + nativeResolution + " stillMultiplier " + stillMultiplier);
}
// FIXME(dswitkin): These three methods have temporary constants until the new Camera API can
// provide the real values for the current device.
// Temporary: the camera's maximum resolution in pixels.
private static Point getMaximumCameraResolution() {
return new Point(1280, 1024);
}
// Temporary: the diagonal field of view in degrees.
private static float getFieldOfView() {
return 60.0f;
}
// Temporary: the minimum focus distance in inches.
private static float getMinimumFocusDistance() {
return 12.0f;
}
private Point getScreenResolution() {
if (screenResolution == null) {
WindowManager manager = (WindowManager)context.getSystemService(Context.WINDOW_SERVICE);
Display display = manager.getDefaultDisplay();
screenResolution = new Point(display.getWidth(), display.getHeight());
}
return screenResolution;
}
}

View file

@ -0,0 +1,156 @@
/*
* Copyright (C) 2008 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.zxing.client.android;
import com.google.zxing.ResultPoint;
import android.content.Context;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.Point;
import android.graphics.Rect;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
final class CameraSurfaceView extends SurfaceView implements SurfaceHolder.Callback {
private static int[] SCANNER_ALPHA = { 0, 64, 128, 192, 255, 192, 128, 64 };
private CameraManager cameraManager;
private SurfaceHolder surfaceHolder;
private boolean hasSurface;
private int scannerAlpha;
CameraSurfaceView(Context context, CameraManager cameraManager) {
super(context);
this.cameraManager = cameraManager;
// Install a SurfaceHolder.Callback so we get notified when the underlying surface is created
// and destroyed.
surfaceHolder = getHolder();
surfaceHolder.setCallback(this);
hasSurface = false;
scannerAlpha = 0;
// FIXME(dswitkin): This resolution needs to be made dynamic to handle different devices and
// orientations.
surfaceHolder.setFixedSize(320, 240);
}
public boolean surfaceCreated(SurfaceHolder holder) {
hasSurface = true;
// Tell the system that we filled the surface in this call. This is a lie to prevent the system
// from filling the surface for us automatically. THIS IS REQUIRED because otherwise we'll
// access the Surface object from 2 different threads which is not allowed.
return true;
}
public void surfaceDestroyed(SurfaceHolder holder) {
// FIXME(dswitkin): The docs say this surface will be destroyed when this method returns. In
// practice this has not been a problem so far. I need to investigate.
hasSurface = false;
}
public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) {
// Surface size or format has changed. This won't happen because of the setFixedSize() call.
}
/**
* This method is only called from the WorkerThread. It's job is to grab the next preview frame
* from the camera, draw the framing rectangle, and blit everything to the screen.
*/
public void capturePreviewAndDraw() {
if (hasSurface) {
Canvas canvas = surfaceHolder.lockCanvas();
cameraManager.capturePreview(canvas);
Rect frame = cameraManager.getFramingRect();
int width = canvas.getBitmapWidth();
int height = canvas.getBitmapHeight();
// Draw the exterior (i.e. outside the framing rect) as half darkened
Paint paint = new Paint();
paint.setColor(Color.BLACK);
paint.setAlpha(96);
Rect box = new Rect(0, 0, width, frame.top);
canvas.drawRect(box, paint);
box.set(0, frame.top, frame.left, frame.bottom + 1);
canvas.drawRect(box, paint);
box.set(frame.right + 1, frame.top, width, frame.bottom + 1);
canvas.drawRect(box, paint);
box.set(0, frame.bottom + 1, width, height);
canvas.drawRect(box, paint);
// Draw a two pixel solid black border inside the framing rect
paint.setAlpha(255);
box.set(frame.left, frame.top, frame.right + 1, frame.top + 2);
canvas.drawRect(box, paint);
box.set(frame.left, frame.top + 2, frame.left + 2, frame.bottom - 1);
canvas.drawRect(box, paint);
box.set(frame.right - 1, frame.top, frame.right + 1, frame.bottom - 1);
canvas.drawRect(box, paint);
box.set(frame.left, frame.bottom - 1, frame.right + 1, frame.bottom + 1);
canvas.drawRect(box, paint);
// Draw a red "laser scanner" line through the middle
paint.setColor(Color.RED);
paint.setAlpha(SCANNER_ALPHA[scannerAlpha]);
int middle = frame.height() / 2 + frame.top;
box.set(frame.left + 2, middle - 1, frame.right - 1, middle + 2);
canvas.drawRect(box, paint);
surfaceHolder.unlockCanvasAndPost(canvas);
// This cheap animation is tied to the rate at which we pull previews from the camera.
scannerAlpha = (scannerAlpha + 1) % SCANNER_ALPHA.length;
}
}
/**
* Draw a line for 1D barcodes (which return two points) or otherwise a set of points returned
* from the decoder to indicate what we found.
* TODO(dswitkin): It might be nice to clear the framing rect and zoom in on the actual still that
* was captured, then paint the green points on it. This would also clear the red scanner line
* which doesn't make sense after the capture.
*
* @param resultPoints An array of points from the decoder, whose coordinates are expressed
* relative to the still image from the camera.
*/
public void drawResultPoints(ResultPoint[] resultPoints) {
if (hasSurface) {
Canvas canvas = surfaceHolder.lockCanvas();
Paint paint = new Paint();
paint.setColor(Color.GREEN);
paint.setAlpha(128);
Point[] points = cameraManager.convertResultPoints(resultPoints);
if (points.length == 2) {
paint.setStrokeWidth(4);
canvas.drawLine(points[0].x, points[0].y, points[1].x, points[1].y, paint);
} else {
paint.setStrokeWidth(10);
for (int x = 0; x < points.length; x++) {
canvas.drawPoint(points[x].x, points[x].y, paint);
}
}
surfaceHolder.unlockCanvasAndPost(canvas);
}
}
}

View file

@ -0,0 +1,109 @@
/*
* Copyright (C) 2008 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.zxing.client.android;
import com.google.zxing.client.result.*;
import android.content.Intent;
import android.net.ContentURI;
import android.os.Handler;
import android.os.Message;
import android.provider.Contacts;
import java.net.URISyntaxException;
/**
* @author srowen@google.com (Sean Owen)
*/
final class ResultHandler extends Handler {
private final ParsedReaderResult result;
private final BarcodeReaderCaptureActivity captureActivity;
ResultHandler(BarcodeReaderCaptureActivity captureActivity, ParsedReaderResult result) {
this.captureActivity = captureActivity;
this.result = result;
}
@Override
public void handleMessage(Message message) {
if (message.what == R.string.button_yes) {
Intent intent = null;
ParsedReaderResultType type = result.getType();
if (type == ParsedReaderResultType.ADDRESSBOOK) {
AddressBookDoCoMoResult addressResult = (AddressBookDoCoMoResult)result;
intent = new Intent(Contacts.Intents.Insert.ACTION, Contacts.People.CONTENT_URI);
putExtra(intent, Contacts.Intents.Insert.NAME, addressResult.getName());
putExtra(intent, Contacts.Intents.Insert.PHONE, addressResult.getPhoneNumbers()[0]);
putExtra(intent, Contacts.Intents.Insert.EMAIL, addressResult.getEmail());
putExtra(intent, Contacts.Intents.Insert.NOTES, addressResult.getNote());
putExtra(intent, Contacts.Intents.Insert.POSTAL, addressResult.getAddress());
} else if (type == ParsedReaderResultType.BOOKMARK) {
// For now, we can only open the browser, and not actually add a bookmark
try {
intent = new Intent(Intent.VIEW_ACTION,
new ContentURI(((BookmarkDoCoMoResult)result).getURI()));
} catch (URISyntaxException e) {
return;
}
} else if (type == ParsedReaderResultType.EMAIL) {
EmailDoCoMoResult emailResult = (EmailDoCoMoResult)result;
try {
intent = new Intent(Intent.SENDTO_ACTION, new ContentURI(emailResult.getTo()));
} catch (URISyntaxException e) {
return;
}
putExtra(intent, "subject", emailResult.getSubject());
putExtra(intent, "body", emailResult.getBody());
} else if (type == ParsedReaderResultType.EMAIL_ADDRESS) {
EmailAddressResult emailResult = (EmailAddressResult)result;
try {
intent = new Intent(Intent.SENDTO_ACTION, new ContentURI(emailResult.getEmailAddress()));
} catch (URISyntaxException e) {
return;
}
} else if (type == ParsedReaderResultType.UPC) {
UPCParsedResult upcResult = (UPCParsedResult)result;
try {
ContentURI uri = new ContentURI("http://www.upcdatabase.com/item.asp?upc=" +
upcResult.getUPC());
intent = new Intent(Intent.VIEW_ACTION, uri);
} catch (URISyntaxException e) {
return;
}
} else if (type == ParsedReaderResultType.URI) {
URIParsedResult uriResult = (URIParsedResult)result;
try {
intent = new Intent(Intent.VIEW_ACTION, new ContentURI(uriResult.getURI()));
} catch (URISyntaxException e) {
return;
}
}
if (intent != null) {
captureActivity.startActivity(intent);
}
} else {
captureActivity.restartPreview();
}
}
private static void putExtra(Intent intent, String key, String value) {
if (key != null && key.length() > 0) {
intent.putExtra(key, value);
}
}
}

View file

@ -0,0 +1,104 @@
/*
* Copyright (C) 2008 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.zxing.client.android;
import com.google.zxing.MonochromeBitmapSource;
import com.google.zxing.MultiFormatReader;
import com.google.zxing.ReaderException;
import com.google.zxing.Result;
import android.graphics.Bitmap;
import android.os.Handler;
import android.os.Message;
/**
* This thread does all the heavy lifting, both during preview and for the final capture and
* decoding. That leaves the main thread free to handle UI tasks.
*
* @author dswitkin@google.com (Daniel Switkin)
*/
final class WorkerThread extends Thread {
private CameraSurfaceView surfaceView;
private CameraManager cameraManager;
private Handler handler;
private enum State {
IDLE,
PREVIEW_LOOP,
STILL_AND_DECODE,
DONE
}
private State state;
WorkerThread(CameraSurfaceView surfaceView, CameraManager cameraManager, Handler handler) {
this.surfaceView = surfaceView;
this.cameraManager = cameraManager;
this.handler = handler;
state = State.IDLE;
}
@Override
public void run() {
while (true) {
switch (state) {
case IDLE:
try {
sleep(50);
} catch (InterruptedException e) { }
break;
case PREVIEW_LOOP:
surfaceView.capturePreviewAndDraw();
break;
case STILL_AND_DECODE:
Bitmap bitmap = cameraManager.captureStill();
Result rawResult;
try {
MonochromeBitmapSource source = new YUVMonochromeBitmapSource(bitmap);
rawResult = new MultiFormatReader().decode(source);
} catch (ReaderException e) {
Message message = Message.obtain(handler, R.id.decoding_failed_message);
message.sendToTarget();
state = State.PREVIEW_LOOP;
break;
}
Message message = Message.obtain(handler, R.id.decoding_succeeded_message, rawResult);
message.sendToTarget();
state = State.IDLE;
break;
case DONE:
return;
}
}
}
public void requestPreviewLoop() {
state = State.PREVIEW_LOOP;
}
public void requestStillAndDecode() {
state = State.STILL_AND_DECODE;
}
public void requestExitAndWait() {
state = State.DONE;
try {
join();
} catch (InterruptedException e) {
}
}
}

View file

@ -0,0 +1,161 @@
/*
* Copyright (C) 2008 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.zxing.client.android;
import com.google.zxing.MonochromeBitmapSource;
import com.google.zxing.BlackPointEstimationMethod;
import com.google.zxing.common.BitArray;
import com.google.zxing.common.BlackPointEstimator;
import android.graphics.Bitmap;
/**
* This object implements MonochromeBitmapSource around an Android Bitmap. Rather than capturing an
* RGB image and calculating the grey value at each pixel, we ask the camera driver for YUV data and
* strip out the luminance channel directly. This should be faster but provides fewer bits, i.e.
* fewer grey levels.
*
* @author dswitkin@google.com (Daniel Switkin)
* @author srowen@google.com (Sean Owen)
*/
final class YUVMonochromeBitmapSource implements MonochromeBitmapSource {
private final Bitmap image;
private final BitArray[] blackWhitePixels;
private final int width;
private final int height;
private int blackPoint;
private BlackPointEstimationMethod lastMethod;
private int lastArgument;
private static final int LUMINANCE_BITS = 5;
private static final int LUMINANCE_SHIFT = 8 - LUMINANCE_BITS;
private static final int LUMINANCE_BUCKETS = 1 << LUMINANCE_BITS;
YUVMonochromeBitmapSource(Bitmap image) {
width = image.width();
height = image.height();
this.image = image;
blackWhitePixels = new BitArray[height];
blackPoint = 0x7F;
lastMethod = null;
lastArgument = 0;
}
public boolean isBlack(int x, int y) {
BitArray blackWhite = blackWhitePixels[y];
if (blackWhite == null) {
blackWhite = parseBlackWhite(y);
}
return blackWhite.get(x);
}
public BitArray getBlackRow(int y, BitArray row, int startX, int getWidth) {
BitArray blackWhite = blackWhitePixels[y];
if (blackWhite == null) {
blackWhite = parseBlackWhite(y);
}
if (row == null) {
if (startX == 0 && getWidth == width) {
return blackWhite;
}
row = new BitArray(getWidth);
} else {
row.clear();
}
for (int i = 0; i < getWidth; i++) {
if (blackWhite.get(startX + i)) {
row.set(i);
}
}
return row;
}
private BitArray parseBlackWhite(int y) {
int width = this.width;
int[] pixelRow = new int[width];
image.getPixels(pixelRow, 0, width, 0, y, width, 1);
BitArray luminanceRow = new BitArray(width);
int blackPoint = this.blackPoint;
// Calculate 32 bits at a time to more efficiently set the bit array
int bits = 0;
int bitCount = 0;
for (int j = 0; j < width; j++) {
bits >>>= 1;
// Computation of luminance is inlined here for speed:
if (((pixelRow[j] >> 16) & 0xFF) <= blackPoint) {
bits |= 0x80000000;
}
if (++bitCount == 32) {
luminanceRow.setBulk(j, bits);
bits = 0;
bitCount = 0;
}
}
if (bitCount > 0) {
luminanceRow.setBulk(width, bits >>> (32 - bitCount));
}
blackWhitePixels[y] = luminanceRow;
return luminanceRow;
}
public int getHeight() {
return height;
}
public int getWidth() {
return width;
}
public void estimateBlackPoint(BlackPointEstimationMethod method, int argument) {
if (!method.equals(lastMethod) || argument != lastArgument) {
for (int i = 0; i < blackWhitePixels.length; i++) {
blackWhitePixels[i] = null;
}
int[] histogram = new int[LUMINANCE_BUCKETS];
if (method.equals(BlackPointEstimationMethod.TWO_D_SAMPLING)) {
int minDimension = width < height ? width : height;
int startI = height == minDimension ? 0 : (height - width) >> 1;
int startJ = width == minDimension ? 0 : (width - height) >> 1;
for (int n = 0; n < minDimension; n++) {
int pixel = image.getPixel(startJ + n, startI + n);
// Computation of luminance is inlined here for speed:
histogram[((pixel >> 16) & 0xFF) >> LUMINANCE_SHIFT]++;
}
} else if (method.equals(BlackPointEstimationMethod.ROW_SAMPLING)) {
if (argument < 0 || argument >= height) {
throw new IllegalArgumentException("Row is not within the image: " + argument);
}
int[] yuvArray = new int[width];
image.getPixels(yuvArray, 0, width, 0, argument, width, 1);
for (int x = 0; x < width; x++) {
histogram[((yuvArray[x] >> 16) & 0xFF) >> LUMINANCE_SHIFT]++;
}
} else {
throw new IllegalArgumentException("Unknown method: " + method);
}
blackPoint = BlackPointEstimator.estimate(histogram, 1.0f) << LUMINANCE_SHIFT;
lastMethod = method;
lastArgument = argument;
}
}
public BlackPointEstimationMethod getLastEstimationMethod() {
return lastMethod;
}
}