Consolidated all the Android LuminanceSource classes into one file. Either a device can be handled by this file, or the app won't work. The Android platform requires every device to support yuv420sp camera preview buffers, but we also let two other similar formats sneak in for compatibility. I also removed the ability to convert LuminanceSource data into ARGB images, which wasn't used.

I also reverted a String-related change to QRCodeEncoder.java. The new method appears present in Java 1.6's String class but not in Android's.

git-svn-id: https://zxing.googlecode.com/svn/trunk@1121 59b500cc-1b3d-0410-9834-0bbf25fbcc57
This commit is contained in:
dswitkin 2009-11-23 03:16:41 +00:00
parent 5339ef7757
commit f4cd22ee1a
9 changed files with 29 additions and 415 deletions

View file

@ -20,8 +20,8 @@ version to be published. The next versionCode will be 7, regardless of whether t
versionName is 2.31, 2.4, or 3.0. -->
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="com.google.zxing.client.android"
android:versionName="3.01"
android:versionCode="39">
android:versionName="3.1 alpha1"
android:versionCode="40">
<!-- We require Cupcake (Android 1.5) or later. -->
<uses-sdk android:minSdkVersion="3"/>
<!-- Donut-specific flags which allow us to run on large and high dpi screens. -->

View file

@ -1,62 +0,0 @@
/*
* Copyright 2009 ZXing authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.zxing.client.android;
import com.google.zxing.LuminanceSource;
import android.graphics.Bitmap;
/**
* An extension of LuminanceSource which adds some Android-specific methods.
*
* @author dswitkin@google.com (Daniel Switkin)
*/
public abstract class BaseLuminanceSource extends LuminanceSource {
BaseLuminanceSource(int width, int height) {
super(width, height);
}
/**
* Requests the width of the underlying platform's bitmap.
*
* @return The width in pixels.
*/
public abstract int getDataWidth();
/**
* Requests the height of the underlying platform's bitmap.
*
* @return The height in pixels.
*/
public abstract int getDataHeight();
/**
* Creates a greyscale Android Bitmap from the YUV data based on the crop rectangle.
*
* @return An 8888 bitmap.
*/
public abstract Bitmap renderCroppedGreyscaleBitmap();
/**
* Creates a color Android Bitmap from the YUV data, ignoring the crop rectangle.
*
* @param halfSize If true, downsample to 50% in each dimension, otherwise not.
* @return An 8888 bitmap.
*/
public abstract Bitmap renderFullColorBitmap(boolean halfSize);
}

View file

@ -279,31 +279,31 @@ final class CameraManager {
* @param data A preview frame.
* @param width The width of the image.
* @param height The height of the image.
* @return A BaseLuminanceSource subclass.
* @return A PlanarYUVLuminanceSource instance.
*/
public BaseLuminanceSource buildLuminanceSource(byte[] data, int width, int height) {
public PlanarYUVLuminanceSource buildLuminanceSource(byte[] data, int width, int height) {
Rect rect = getFramingRect();
switch (previewFormat) {
// This is the standard Android format which all devices are REQUIRED to support.
// In theory, it's the only one we should ever care about.
case PixelFormat.YCbCr_420_SP:
return new PlanarYUV420LuminanceSource(data, width, height, rect.left, rect.top,
return new PlanarYUVLuminanceSource(data, width, height, rect.left, rect.top,
rect.width(), rect.height());
// This format has never been seen in the wild, but is compatible as we only care
// about the Y channel, so allow it.
case PixelFormat.YCbCr_422_SP:
return new PlanarYUV422LuminanceSource(data, width, height, rect.left, rect.top,
return new PlanarYUVLuminanceSource(data, width, height, rect.left, rect.top,
rect.width(), rect.height());
default:
// Handle some non-standard values:
// There's no PixelFormat constant for this buffer format yet.
if (previewFormatString.equals("yuv422i-yuyv")) {
return new InterleavedYUV422LuminanceSource(data, width, height, rect.left, rect.top,
rect.width(), rect.height());
} else if (previewFormatString.equals("yuv420p")) {
// Assume this is a synonym for YUV420SP -- note the lack of 's'
return new PlanarYUV420LuminanceSource(data, width, height, rect.left, rect.top,
// The Samsung Moment incorrectly uses this variant instead of the 'sp' version.
// Fortunately, it too has all the Y data up front, so we can read it.
if (previewFormatString.equals("yuv420p")) {
return new PlanarYUVLuminanceSource(data, width, height, rect.left, rect.top,
rect.width(), rect.height());
}
}
}
throw new IllegalArgumentException("Unsupported picture format: " +
previewFormat + '/' + previewFormatString);
previewFormat + '/' + previewFormatString);
}
/**

View file

@ -166,7 +166,7 @@ final class DecodeThread extends Thread {
private void decode(byte[] data, int width, int height) {
long start = System.currentTimeMillis();
Result rawResult = null;
BaseLuminanceSource source = CameraManager.get().buildLuminanceSource(data, width, height);
PlanarYUVLuminanceSource source = CameraManager.get().buildLuminanceSource(data, width, height);
BinaryBitmap bitmap = new BinaryBitmap(new GlobalHistogramBinarizer(source));
try {
rawResult = multiFormatReader.decodeWithState(bitmap);

View file

@ -1,189 +0,0 @@
/*
* Copyright 2009 ZXing authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.zxing.client.android;
import com.google.zxing.LuminanceSource;
import android.graphics.Bitmap;
/**
* This object extends LuminanceSource around an array of YUV data returned from the camera driver,
* with the option to crop to a rectangle within the full data. This can be used to exclude
* superfluous pixels around the perimeter and speed up decoding.
*
* It handles YUV 422 interleaved data, where each pixel consists of first a Y value, then
* a color value, with U and V alternating at each pixel.
*
* @author dswitkin@google.com (Daniel Switkin)
*/
public final class InterleavedYUV422LuminanceSource extends BaseLuminanceSource {
private static final int OPAQUE_ALPHA = 0xFF000000;
private final byte[] yuvData;
private final int dataWidth;
private final int dataHeight;
private final int left;
private final int top;
public InterleavedYUV422LuminanceSource(byte[] yuvData, int dataWidth, int dataHeight,
int left, int top, int width, int height) {
super(width, height);
if (left + width > dataWidth || top + height > dataHeight) {
throw new IllegalArgumentException("Crop rectangle does not fit within image data.");
}
this.yuvData = yuvData;
this.dataWidth = dataWidth;
this.dataHeight = dataHeight;
this.left = left;
this.top = top;
}
@Override
public byte[] getRow(int y, byte[] row) {
if (y < 0 || y >= getHeight()) {
throw new IllegalArgumentException("Requested row is outside the image: " + y);
}
int width = getWidth();
if (row == null || row.length < width) {
row = new byte[width];
}
int offset = ((y + top) * dataWidth << 1) + (left << 1);
byte[] yuv = yuvData;
for (int x = 0; x < width; x++) {
row[x] = yuv[offset + (x << 1)];
}
return row;
}
@Override
public byte[] getMatrix() {
int width = getWidth();
int height = getHeight();
int area = width * height;
byte[] matrix = new byte[area];
int dataWidth = this.dataWidth;
int inputOffset = (top * dataWidth << 1) + (left << 1);
byte[] yuv = yuvData;
for (int y = 0; y < height; y++) {
int outputOffset = y * width;
for (int x = 0; x < width; x++) {
matrix[outputOffset + x] = yuv[inputOffset + (x << 1)];
}
inputOffset += (dataWidth << 1);
}
return matrix;
}
@Override
public boolean isCropSupported() {
return true;
}
@Override
public LuminanceSource crop(int left, int top, int width, int height) {
return new InterleavedYUV422LuminanceSource(yuvData, dataWidth, dataHeight, left, top,
width, height);
}
@Override
public int getDataWidth() {
return dataWidth;
}
@Override
public int getDataHeight() {
return dataHeight;
}
@Override
public Bitmap renderCroppedGreyscaleBitmap() {
int width = getWidth();
int height = getHeight();
int[] pixels = new int[width * height];
byte[] yuv = yuvData;
int dataWidth = this.dataWidth;
int inputOffset = (top * dataWidth + left) << 1;
for (int y = 0; y < height; y++) {
int outputOffset = y * width;
for (int x = 0; x < width; x++) {
int grey = yuv[inputOffset + (x << 1)] & 0xff;
pixels[outputOffset + x] = OPAQUE_ALPHA | (grey * 0x00010101);
}
inputOffset += (dataWidth << 1);
}
Bitmap bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
bitmap.setPixels(pixels, 0, width, 0, 0, width, height);
return bitmap;
}
// Not currently needed.
@Override
public Bitmap renderFullColorBitmap(boolean halfSize) {
// TODO implement halfSize
int width = getWidth();
int height = getHeight();
int[] pixels = new int[width * height];
byte[] yuv = yuvData;
int dataWidth = this.dataWidth;
int inputOffset = (top * dataWidth + left) << 1;
for (int y = 0; y < height; y++) {
int outputOffset = y * width;
for (int x = 0; x < width; x += 2) {
int localOffset = inputOffset + (x << 1);
int y1 = yuv[localOffset] & 0xFF;
int u = yuv[localOffset + 1] & 0xFF;
int y2 = yuv[localOffset + 2] & 0xFF;
int v = yuv[localOffset + 3] & 0xFF;
int rgb1 = yuvToRGB(y1, u, v);
int rgb2 = yuvToRGB(y2, u, v);
pixels[outputOffset + x] = OPAQUE_ALPHA | rgb1;
pixels[outputOffset + x + 1] = OPAQUE_ALPHA | rgb2;
}
inputOffset += (dataWidth << 1);
}
Bitmap bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
bitmap.setPixels(pixels, 0, width, 0, 0, width, height);
return bitmap;
}
/**
* @link http://en.wikipedia.org/wiki/YUV#Y.27UV444
*/
static int yuvToRGB(int y, int u, int v) {
int c = y - 16;
int d = u - 128;
int e = v - 128;
int c298 = 298 * c;
int r = clip((c298 + 409 * e + 128) >> 8);
int g = clip((c298 - 100 * d - 208 * e + 128) >> 8);
int b = clip((c298 + 516 * d + 128) >> 8);
return (r << 16) | (g << 8) | b;
}
private static int clip(int x) {
return x < 0 ? 0 : x & 0xFF;
}
}

View file

@ -1,77 +0,0 @@
/*
* Copyright 2009 ZXing authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.zxing.client.android;
import android.graphics.Bitmap;
import com.google.zxing.LuminanceSource;
public final class PlanarYUV420LuminanceSource extends AbstractPlanarYUVLuminanceSource {
public PlanarYUV420LuminanceSource(byte[] yuvData, int dataWidth, int dataHeight,
int left, int top, int width, int height) {
super(yuvData, dataWidth, dataHeight, left, top, width, height);
}
@Override
public LuminanceSource crop(int left, int top, int width, int height) {
return new PlanarYUV420LuminanceSource(
getYUVData(), getDataWidth(), getDataHeight(), left, top, width, height);
}
@Override
public Bitmap renderFullColorBitmap(boolean halfSize) {
// TODO implement halfSize
int width = getWidth();
int height = getHeight();
int dataWidth = getDataWidth();
int dataHeight = getDataHeight();
byte[] yuv = getYUVData();
int expectedYBytes = dataWidth * dataHeight;
int expectedUBytes = expectedYBytes >> 2;
int expectedVBytes = expectedYBytes >> 2;
int expectedBytes = expectedYBytes + expectedUBytes + expectedVBytes;
if (yuv.length != expectedBytes) {
throw new IllegalStateException("Expected " + expectedBytes + " bytes");
}
int[] pixels = new int[width * height];
int inputYOffset = getTop() * getDataWidth() + getLeft();
int uOffset = expectedYBytes;
int vOffset = expectedYBytes + expectedUBytes;
for (int y = 0; y < height; y++) {
int outputOffset = y * width;
for (int x = 0; x < width; x++) {
int yOffset = inputYOffset + x;
int yDataRow = yOffset / dataWidth;
int yDataOffset = yOffset % dataWidth;
int uvOffset = ((yDataRow >> 1) * dataWidth + yDataOffset) >> 1;
int y1 = yuv[yOffset] & 0xFF;
int u = yuv[uOffset + uvOffset] & 0xFF;
int v = yuv[vOffset + uvOffset] & 0XFF;
pixels[outputOffset + x] =
OPAQUE_ALPHA | InterleavedYUV422LuminanceSource.yuvToRGB(y1, u, v);
}
inputYOffset += dataWidth;
}
Bitmap bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
bitmap.setPixels(pixels, 0, width, 0, 0, width, height);
return bitmap;
}
}

View file

@ -1,40 +0,0 @@
/*
* Copyright 2009 ZXing authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.zxing.client.android;
import android.graphics.Bitmap;
import com.google.zxing.LuminanceSource;
public final class PlanarYUV422LuminanceSource extends AbstractPlanarYUVLuminanceSource {
public PlanarYUV422LuminanceSource(byte[] yuvData, int dataWidth, int dataHeight,
int left, int top, int width, int height) {
super(yuvData, dataWidth, dataHeight, left, top, width, height);
}
@Override
public LuminanceSource crop(int left, int top, int width, int height) {
return new PlanarYUV422LuminanceSource(
getYUVData(), getDataWidth(), getDataHeight(), left, top, width, height);
}
@Override
public Bitmap renderFullColorBitmap(boolean halfSize) {
throw new UnsupportedOperationException();
}
}

View file

@ -16,6 +16,8 @@
package com.google.zxing.client.android;
import com.google.zxing.LuminanceSource;
import android.graphics.Bitmap;
/**
@ -28,17 +30,14 @@ import android.graphics.Bitmap;
*
* @author dswitkin@google.com (Daniel Switkin)
*/
public abstract class AbstractPlanarYUVLuminanceSource extends BaseLuminanceSource {
protected static final int OPAQUE_ALPHA = 0xFF000000;
public final class PlanarYUVLuminanceSource extends LuminanceSource {
private final byte[] yuvData;
private final int dataWidth;
private final int dataHeight;
private final int left;
private final int top;
AbstractPlanarYUVLuminanceSource(byte[] yuvData, int dataWidth, int dataHeight, int left, int top,
PlanarYUVLuminanceSource(byte[] yuvData, int dataWidth, int dataHeight, int left, int top,
int width, int height) {
super(width, height);
@ -54,7 +53,7 @@ public abstract class AbstractPlanarYUVLuminanceSource extends BaseLuminanceSour
}
@Override
public final byte[] getRow(int y, byte[] row) {
public byte[] getRow(int y, byte[] row) {
if (y < 0 || y >= getHeight()) {
throw new IllegalArgumentException("Requested row is outside the image: " + y);
}
@ -68,7 +67,7 @@ public abstract class AbstractPlanarYUVLuminanceSource extends BaseLuminanceSour
}
@Override
public final byte[] getMatrix() {
public byte[] getMatrix() {
int width = getWidth();
int height = getHeight();
@ -99,34 +98,19 @@ public abstract class AbstractPlanarYUVLuminanceSource extends BaseLuminanceSour
}
@Override
public final boolean isCropSupported() {
public boolean isCropSupported() {
return true;
}
@Override
public final int getDataWidth() {
public int getDataWidth() {
return dataWidth;
}
@Override
public final int getDataHeight() {
public int getDataHeight() {
return dataHeight;
}
protected final byte[] getYUVData() {
return yuvData;
}
protected final int getLeft() {
return left;
}
protected final int getTop() {
return top;
}
@Override
public final Bitmap renderCroppedGreyscaleBitmap() {
public Bitmap renderCroppedGreyscaleBitmap() {
int width = getWidth();
int height = getHeight();
int[] pixels = new int[width * height];
@ -137,7 +121,7 @@ public abstract class AbstractPlanarYUVLuminanceSource extends BaseLuminanceSour
int outputOffset = y * width;
for (int x = 0; x < width; x++) {
int grey = yuv[inputOffset + x] & 0xff;
pixels[outputOffset + x] = OPAQUE_ALPHA | (grey * 0x00010101);
pixels[outputOffset + x] = 0xFF000000 | (grey * 0x00010101);
}
inputOffset += dataWidth;
}
@ -146,5 +130,4 @@ public abstract class AbstractPlanarYUVLuminanceSource extends BaseLuminanceSour
bitmap.setPixels(pixels, 0, width, 0, 0, width, height);
return bitmap;
}
}

View file

@ -42,7 +42,6 @@ import android.util.Log;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.nio.charset.Charset;
/**
* This class does the work of decoding the user's request and extracting all the data
@ -146,7 +145,7 @@ final class QRCodeEncoder {
int length = stream.available();
byte[] vcard = new byte[length];
stream.read(vcard, 0, length);
String vcardString = new String(vcard, Charset.forName("UTF-8"));
String vcardString = new String(vcard, "UTF-8");
Log.d(TAG, "Encoding share intent content: " + vcardString);
Result result = new Result(vcardString, vcard, null, BarcodeFormat.QR_CODE);
ParsedResult parsedResult = ResultParser.parseResult(result);