public String decodeWithZxing(byte[] data, int width, int height, Rect crop) { MultiFormatReader multiFormatReader = new MultiFormatReader(); multiFormatReader.setHints(changeZXingDecodeDataMode()); Result rawResult = null; PlanarYUVLuminanceSource source = new PlanarYUVLuminanceSource(data, width, height, crop.left, crop.top, crop.width(), crop.height(), false); if (source != null) { BinaryBitmap bitmap = new BinaryBitmap(new HybridBinarizer(source)); try { rawResult = multiFormatReader.decodeWithState(bitmap); } catch (ReaderException re) { // continue } finally { multiFormatReader.reset(); } } return rawResult != null ? rawResult.getText() : null; }
private Result decode(byte[] data, int width, int height, Rect rect, boolean rotate) { PlanarYUVLuminanceSource source; if (rotate) {//zxing竖屏下无法识别一维码,对数据旋转 byte[] rotatedData = new byte[data.length]; for (int y = 0; y < height; y++) { for (int x = 0; x < width; x++) rotatedData[x * height + height - y - 1] = data[x + y * width]; } int tmp = width; width = height; height = tmp; data = rotatedData; Rect rotedRect = new Rect(width - rect.bottom, rect.left, width - rect.top, rect.right); source = buildLuminanceSource(data, width, height, rotedRect); } else { source = buildLuminanceSource(data, width, height, rect); } return decode(source); }
public static Result decodeImage(final String path) { Bitmap bitmap = QrUtils.decodeSampledBitmapFromFile(path, 256, 256); // Google Photo 相册中选取云照片是会出现 Bitmap == null if (bitmap == null) return null; int width = bitmap.getWidth(); int height = bitmap.getHeight(); int[] pixels = new int[width * height]; bitmap.getPixels(pixels, 0, width, 0, 0, width, height); // RGBLuminanceSource source = new RGBLuminanceSource(width, height, pixels); PlanarYUVLuminanceSource source1 = new PlanarYUVLuminanceSource(getYUV420sp(width, height, bitmap), width, height, 0, 0, width, height, false); BinaryBitmap binaryBitmap = new BinaryBitmap(new HybridBinarizer(source1)); // BinaryBitmap binaryBitmap = new BinaryBitmap(new GlobalHistogramBinarizer(source1)); HashMap<DecodeHintType, Object> hints = new HashMap<>(); hints.put(DecodeHintType.TRY_HARDER, Boolean.TRUE); hints.put(DecodeHintType.CHARACTER_SET, "UTF-8"); try { return new MultiFormatReader().decode(binaryBitmap, hints); } catch (NotFoundException e) { e.printStackTrace(); } return null; }
/** * A factory method to build the appropriate LuminanceSource object based on the format * of the preview buffers, as described by Camera.Parameters. * * @param data A preview frame. * @param width The width of the image. * @param height The height of the image. * @return A PlanarYUVLuminanceSource instance. */ public PlanarYUVLuminanceSource buildLuminanceSource(byte[] data, int width, int height) { Rect rect = getFramingRectInPreview(); if (rect == null) { return null; } //this if saves the app from crashing when switching orientation........ //finally debugged the biggest problem ...yippee if (rect.left + rect.width() > width || rect.top + rect.height() > height) return new PlanarYUVLuminanceSource(data, height, width, rect.left, rect.top, rect.width(), rect.height(), false); // Go ahead and assume it's YUV rather than die. return new PlanarYUVLuminanceSource(data, width, height, rect.left, rect.top, rect.width(), rect.height(), false); }
/** * Called as preview frames are displayed. This callback is invoked * on the event thread open(int) was called from. * <p/> * <p>If using the {@link ImageFormat#YV12} format, * refer to the equations in {@link Camera.Parameters#setPreviewFormat} * for the arrangement of the pixel data in the preview callback * buffers. * * @param data the contents of the preview frame in the format defined * by {@link ImageFormat}, which can be queried * with {@link Camera.Parameters#getPreviewFormat()}. * If {@link Camera.Parameters#setPreviewFormat(int)} * is never called, the default will be the YCbCr_420_SP * (NV21) format. * @param camera the Camera service object. */ @Override public void onPreviewFrame(byte[] data, Camera camera) { // get the size of the picture int frameHeight = camera.getParameters().getPreviewSize().height; int frameWidth = camera.getParameters().getPreviewSize().width; // create new Luminance Source final PlanarYUVLuminanceSource source = new PlanarYUVLuminanceSource(data, frameWidth, frameHeight, 0, 0, frameWidth, frameHeight); // convert to binary bitmap which can be used by our image decoder final BinaryBitmap binaryBitmap = new BinaryBitmap(new HybridBinarizer(source)); // set self as listener, and start decode image DecodeImageTask decodeImageTask = new DecodeImageTask(); decodeImageTask.setCallbackListener(this); decodeImageTask.execute(binaryBitmap); }
public PlanarYUVLuminanceSource buildLuminanceSource(byte[] data, int width, int height) { byte[] rotatedData = new byte[data.length]; int rotation = context.getApplicationContext().getResources().getConfiguration().orientation; if (rotation == Configuration.ORIENTATION_PORTRAIT) { for (int y = 0; y < height; y++) { for (int x = 0; x < width; x++) { rotatedData[x * height + height - y - 1] = data[x + y * width]; } } int tmp = width; width = height; height = tmp; } else { rotatedData = null; } Rect rect = getFramingRectInPreview(); if (rect == null) { return null; } // Go ahead and assume it's YUV rather than die. return new PlanarYUVLuminanceSource(rotation == Configuration.ORIENTATION_PORTRAIT ? rotatedData : data, width, height, rect.left, rect.top, rect.width(), rect.height(), false); }
public PlanarYUVLuminanceSource buildLuminanceSource(byte[] rotatedData, int width, int height, boolean horizontal) { Rect rect = getFramingRectInPreview(horizontal); int previewFormat = mCameraConfig.getPreviewFormat(); String previewFormatString = mCameraConfig.getPreviewFormatString(); switch (previewFormat) { // This is the standard Android format which all devices are REQUIRED to support. // In theory, it's the only one we should ever care about. case PixelFormat.YCbCr_420_SP: // This format has never been seen in the wild, but is compatible as we only care // about the Y channel, so allow it. case PixelFormat.YCbCr_422_SP: return new PlanarYUVLuminanceSource(rotatedData, width, height, rect.left, rect.top, rect.width(), rect.height(), horizontal); default: // The Samsung Moment incorrectly uses this variant instead of the 'sp' version. // Fortunately, it too has all the Y data up front, so we can read it. if ("yuv420p".equals(previewFormatString)) { return new PlanarYUVLuminanceSource(rotatedData, width, height, rect.left, rect.top, rect.width(), rect.height(), horizontal); } } throw new IllegalArgumentException("Unsupported picture format: " + previewFormat + '/' + previewFormatString); }
private String decode(byte[] data, int width, int height) { ScannerManager manager = mManager.get(); if (manager == null) { return null; } Rect rect = manager.getFramingRectInPreview(); PlanarYUVLuminanceSource source = new PlanarYUVLuminanceSource(data, width, height, rect.left, rect.top, rect.right, rect.bottom, false); BinaryBitmap bitmap = new BinaryBitmap(new HybridBinarizer(source)); QRCodeReader reader = new QRCodeReader(); try { Result result = reader.decode(bitmap, mHints); return result.getText(); } catch (ReaderException e) { // Ignore as we will repeatedly decode the preview frame return null; } }
private synchronized PlanarYUVLuminanceSource rotate90(byte[] data, int dataWidth, Rect rect) { int sourceWidth = rect.width(); int sourceHeight = rect.height(); int desiredSize = sourceHeight * sourceWidth; if (destDataCache.length != desiredSize) { destDataCache = new byte[desiredSize]; } byte[] destData = destDataCache; int destOffset = 0; for (int sourceX = 0; sourceX < sourceWidth; sourceX++) { int sourceOffset = (rect.bottom - 1) * dataWidth + rect.left + sourceX; for (int sourceY = sourceHeight - 1; sourceY >= 0; sourceY--) { destData[destOffset++] = data[sourceOffset]; sourceOffset -= dataWidth; } } return new PlanarYUVLuminanceSource(destData, sourceHeight, sourceWidth, 0, 0, sourceHeight, sourceWidth, false); }
private synchronized PlanarYUVLuminanceSource rotate180(byte[] data, int dataWidth, Rect rect) { int sourceWidth = rect.width(); int sourceHeight = rect.height(); int desiredSize = sourceHeight * sourceWidth; if (destDataCache.length != desiredSize) { destDataCache = new byte[desiredSize]; } byte[] destData = destDataCache; int destOffset = 0; for (int sourceY = sourceHeight - 1; sourceY >= 0; sourceY--) { int sourceOffset = rect.left + sourceWidth - 1 + (rect.top + sourceY) * dataWidth; for (int sourceX = sourceWidth - 1; sourceX >= 0; sourceX--) { destData[destOffset++] = data[sourceOffset--]; } } return new PlanarYUVLuminanceSource(destData, sourceWidth, sourceHeight, 0, 0, sourceWidth, sourceHeight, false); }
private synchronized PlanarYUVLuminanceSource rotate270(byte[] data, int dataWidth, Rect rect) { int sourceWidth = rect.width(); int sourceHeight = rect.height(); int desiredSize = sourceHeight * sourceWidth; if (destDataCache.length != desiredSize) { destDataCache = new byte[desiredSize]; } byte[] destData = destDataCache; int destOffset = 0; for (int sourceX = sourceWidth - 1; sourceX >= 0; sourceX--) { int sourceOffset = rect.left + sourceX + rect.top * dataWidth; for (int sourceY = 0; sourceY < sourceHeight; sourceY++) { destData[destOffset++] = data[sourceOffset]; sourceOffset += dataWidth; } } return new PlanarYUVLuminanceSource(destData, sourceHeight, sourceWidth, 0, 0, sourceHeight, sourceWidth, false); }
@Override public void onPreviewFrame(byte[] data, Camera camera) { // TODO Auto-generated method stub if (mDialog.isShowing()) return; LuminanceSource source = new PlanarYUVLuminanceSource(data, mWidth, mHeight, mLeft, mTop, mAreaWidth, mAreaHeight, false); BinaryBitmap bitmap = new BinaryBitmap(new HybridBinarizer( source)); Result result; try { result = mMultiFormatReader.decode(bitmap, null); if (result != null) { mDialog.setTitle("Result"); mDialog.setMessage(result.getText()); mDialog.show(); } } catch (NotFoundException e) { // TODO Auto-generated catch block e.printStackTrace(); } }
private void scan(byte[] data, int width, int height) { // END:scan Log.d(TAG, "scan"); // START:scan PlanarYUVLuminanceSource luminanceSource = new PlanarYUVLuminanceSource(data, width, height, 0, 0, width, height, false); // END:scan // new ScanTask().execute(luminanceSource); // uncomment to use ScanTask // START:scan BinaryBitmap bitmap = new BinaryBitmap(new HybridBinarizer(luminanceSource)); Result result = null; try { result = multiFormatReader.decodeWithState(bitmap); } catch (ReaderException re) { // nothing found to decode } finally { multiFormatReader.reset(); } if (result != null) { Intent intent = new QRIntentBuilder(result.getText()).buildIntent(); activity.launchIntent(intent); } }
/** * A factory method to build the appropriate LuminanceSource object based on the format * of the preview buffers, as described by Camera.Parameters. * * @param data A preview frame. * @param width The width of the image. * @param height The height of the image. * @return A PlanarYUVLuminanceSource instance. */ public synchronized PlanarYUVLuminanceSource buildLuminanceSource(byte[] data, int width, int height, Rect boundingRect) { switch (orientation) { case 0: //data = flip(data); break; case 90: rotate90(data, width, height); return new PlanarYUVLuminanceSource(data, height, width, boundingRect.top, boundingRect.left, boundingRect.height(), boundingRect.width(), false); case 180: break; case 270: rotate90(data, width, height); break; } return new PlanarYUVLuminanceSource(data, width, height, boundingRect.left, boundingRect.top, boundingRect.width(), boundingRect.height(), false); }
@Override public void onPreviewFrame(byte[] data, Camera camera) { int previewWidth = camera.getParameters().getPreviewSize().width; int previewHeight = camera.getParameters().getPreviewSize().height; PlanarYUVLuminanceSource source = new PlanarYUVLuminanceSource( data, previewWidth, previewHeight, 0, 0, previewWidth, previewHeight, false); BinaryBitmap bitmap = new BinaryBitmap(new HybridBinarizer(source)); Reader reader = new QRCodeReader(); try { Result result = reader.decode(bitmap); String text = result.getText(); Intent intent = new Intent(); intent.setData(Uri.parse(text)); setResult(RESULT_OK, intent); finish(); } catch (Exception e) { e.printStackTrace(); Toast.makeText(getApplicationContext(), "Not Found", Toast.LENGTH_SHORT).show(); } }
private static void bundleThumbnail(PlanarYUVLuminanceSource source, Bundle bundle) { int[] pixels = source.renderThumbnail(); int width = source.getThumbnailWidth(); int height = source.getThumbnailHeight(); Bitmap bitmap = Bitmap.createBitmap(pixels, 0, width, width, height, Bitmap.Config.ARGB_8888); ByteArrayOutputStream out = new ByteArrayOutputStream(); bitmap.compress(Bitmap.CompressFormat.JPEG, 50, out); bundle.putByteArray(DecodeThread.BARCODE_BITMAP, out.toByteArray()); }
/** * A factory method to build the appropriate LuminanceSource object based on the format * of the preview buffers, as described by Camera.Parameters. * * @param data A preview frame. * @param width The width of the image. * @param height The height of the image. * @return A PlanarYUVLuminanceSource instance. */ public PlanarYUVLuminanceSource buildLuminanceSource(byte[] data, int width, int height) { Rect rect = getFramingRectInPreview(); if (rect == null) { return null; } // Go ahead and assume it's YUV rather than die. return new PlanarYUVLuminanceSource(data, width, height, rect.left, rect.top, rect.width(), rect.height(), false); }
private static void bundleThumbnail(PlanarYUVLuminanceSource source, Bundle bundle) { int[] pixels = source.renderThumbnail(); int width = source.getThumbnailWidth(); int height = source.getThumbnailHeight(); Bitmap bitmap = Bitmap.createBitmap(pixels, 0, width, width, height, Bitmap.Config.ARGB_8888); ByteArrayOutputStream out = new ByteArrayOutputStream(); bitmap.compress(Bitmap.CompressFormat.JPEG, 50, out); bundle.putByteArray(DecodeThread.BARCODE_BITMAP, out.toByteArray()); bundle.putFloat(DecodeThread.BARCODE_SCALED_FACTOR, (float) width / source.getWidth()); }
/** * A factory method to build the appropriate LuminanceSource object based on * the format of the preview buffers, as described by Camera.Parameters. * * @param data A preview frame. * @param width The width of the image. * @param height The height of the image. * @return A PlanarYUVLuminanceSource instance. */ public PlanarYUVLuminanceSource buildLuminanceSource(byte[] data, int width, int height) { Rect rect = activity.getCropRect(); if (rect == null) { return null; } // Go ahead and assume it's YUV rather than die. return new PlanarYUVLuminanceSource(data, width, height, rect.left, rect.top, rect.width(), rect .height(), false); }
@NonNull @SuppressWarnings("SuspiciousNameCombination") public Result decode(@NonNull MultiFormatReader reader) throws ReaderException { int imageWidth = mImageSize.getX(); int imageHeight = mImageSize.getY(); byte[] image; int width; int height; if (mOrientation == 0) { image = mImage; width = imageWidth; height = imageHeight; } else { image = Utils.rotateNV21(mImage, imageWidth, imageHeight, mOrientation); if (mOrientation == 90 || mOrientation == 270) { width = imageHeight; height = imageWidth; } else { width = imageWidth; height = imageHeight; } } Rect frameRect = Utils.getImageFrameRect(mSquareFrame, width, height, mPreviewSize, mViewSize); return reader.decodeWithState(new BinaryBitmap(new HybridBinarizer( new PlanarYUVLuminanceSource(image, width, height, frameRect.getLeft(), frameRect.getTop(), frameRect.getWidth(), frameRect.getHeight(), mReverseHorizontal)))); }
public PlanarYUVLuminanceSource createSource() { byte[] rotated = rotateCameraPreview(rotation, data, dataWidth, dataHeight); // TODO: handle mirrored (front) camera. Probably only the ResultPoints should be mirrored, // not the preview for decoding. if (isRotated()) { //noinspection SuspiciousNameCombination return new PlanarYUVLuminanceSource(rotated, dataHeight, dataWidth, cropRect.left, cropRect.top, cropRect.width(), cropRect.height(), false); } else { return new PlanarYUVLuminanceSource(rotated, dataWidth, dataHeight, cropRect.left, cropRect.top, cropRect.width(), cropRect.height(), false); } }
static void bundleThumbnail(PlanarYUVLuminanceSource source, Bundle bundle) { int[] pixels = source.renderThumbnail(); int width = source.getThumbnailWidth(); int height = source.getThumbnailHeight(); Bitmap bitmap = Bitmap.createBitmap(pixels, 0, width, width, height, Bitmap.Config.ARGB_8888); ByteArrayOutputStream out = new ByteArrayOutputStream(); bitmap.compress(Bitmap.CompressFormat.JPEG, 50, out); bundle.putByteArray(DecodeThread.BARCODE_BITMAP, out.toByteArray()); }
/** * create thumbnail * * @param source * @param bundle */ private void bundleThumbnail(PlanarYUVLuminanceSource source, Bundle bundle) { int[] pixels = source.renderThumbnail(); int width = source.getThumbnailWidth(); int height = source.getThumbnailHeight(); Bitmap bitmap = Bitmap.createBitmap(pixels, 0, width, width, height, Bitmap.Config.RGB_565); ByteArrayOutputStream out = new ByteArrayOutputStream(); bitmap.compress(Bitmap.CompressFormat.JPEG, 50, out); bundle.putByteArray(DecodeThread.BARCODE_BITMAP, out.toByteArray()); }