Java 类android.graphics.YuvImage 实例源码

项目:Telephoto    文件:ImageShot.java   
private byte[] imgToByte(boolean quality) {
    Camera.Parameters parameters = getParameters();
    int width = parameters.getPreviewSize().width;
    int height = parameters.getPreviewSize().height;

    YuvImage yuv = new YuvImage(getImage(), parameters.getPreviewFormat(), width, height, null);
    ByteArrayOutputStream out =
            new ByteArrayOutputStream();
    yuv.compressToJpeg(new Rect(0, 0, width, height), 100, out);

    byte[] compressed = out.toByteArray();

    Bitmap newBmp = BitmapFactory.decodeByteArray(compressed, 0, compressed.length);
    Matrix mat = new Matrix();
    mat.postRotate(PrefsController.instance.getPrefs().getCameraPrefs(cameraId).angle);
    newBmp = Bitmap.createBitmap(newBmp, 0, 0, newBmp.getWidth(), newBmp.getHeight(), mat, true);
    ByteArrayOutputStream out2 = new ByteArrayOutputStream();
    if (quality) {
        newBmp.compress(Bitmap.CompressFormat.PNG, 100, out2);
    } else {
        newBmp.compress(Bitmap.CompressFormat.JPEG, 80, out2);
    }

    return out2.toByteArray();
}
项目:Viewer    文件:SourceData.java   
private Bitmap getBitmap(Rect cropRect, int scaleFactor) {
    if(isRotated()) {
        //noinspection SuspiciousNameCombination
        cropRect = new Rect(cropRect.top, cropRect.left, cropRect.bottom, cropRect.right);
    }

    // TODO: there should be a way to do this without JPEG compression / decompression cycle.
    YuvImage img = new YuvImage(data, imageFormat, dataWidth, dataHeight, null);
    ByteArrayOutputStream buffer = new ByteArrayOutputStream();
    img.compressToJpeg(cropRect, 90, buffer);
    byte[] jpegData = buffer.toByteArray();

    BitmapFactory.Options options = new BitmapFactory.Options();
    options.inSampleSize = scaleFactor;
    Bitmap bitmap = BitmapFactory.decodeByteArray(jpegData, 0, jpegData.length, options);

    // Rotate if required
    if (rotation != 0) {
        Matrix imageMatrix = new Matrix();
        imageMatrix.postRotate(rotation);
        bitmap = Bitmap.createBitmap(bitmap, 0, 0, bitmap.getWidth(), bitmap.getHeight(), imageMatrix, false);
    }

    return bitmap;
}
项目:OpModeCamera    文件:OpModeCamera.java   
static public Bitmap convertYuvImageToRgb(YuvImage yuvImage, int width, int height, int downSample) {
  Bitmap rgbImage;
  ByteArrayOutputStream out = new ByteArrayOutputStream();
  yuvImage.compressToJpeg(new Rect(0, 0, width, height), 0, out);
  byte[] imageBytes = out.toByteArray();

  BitmapFactory.Options opt;
  opt = new BitmapFactory.Options();
  opt.inSampleSize = downSample;

  // get image and rotate it so (0,0) is in the bottom left
  Bitmap tmpImage;
  Matrix matrix = new Matrix();
  matrix.postRotate(90); // to rotate the camera images so (0,0) is in the bottom left
  tmpImage = BitmapFactory.decodeByteArray(imageBytes, 0, imageBytes.length, opt);
  rgbImage=Bitmap.createBitmap(tmpImage , 0, 0, tmpImage.getWidth(), tmpImage.getHeight(), matrix, true);

  return rgbImage;
}
项目:seeta4Android    文件:FaceDetector.java   
private void saveFace(final int x, final int y, final int r, final int b) {
    if (DEBUG) Log.d(TAG, "[saveFace()]");
    new Thread(new Runnable() {
        @Override
        public void run() {
            synchronized (mVideoSource) {
                mImageYuv = new YuvImage(mVideoSource, ImageFormat.NV21, CameraWrapper.IMAGE_WIDTH, CameraWrapper.IMAGE_HEIGHT, null);
            }
            ByteArrayOutputStream stream = new ByteArrayOutputStream();
            mImageYuv.compressToJpeg(new Rect(0, 0, CameraWrapper.IMAGE_WIDTH, CameraWrapper.IMAGE_HEIGHT), 100, stream);
            Bitmap bitmap = BitmapFactory.decodeByteArray(stream.toByteArray(), 0, stream.size());

            int left = (x > 0) ? x : 0;
            int top = (y > 0) ? y : 0;
            int creatW = (r < CameraWrapper.IMAGE_WIDTH) ? (r - x) : (CameraWrapper.IMAGE_HEIGHT - x - 1);
            int creatH = (b < CameraWrapper.IMAGE_WIDTH) ? (b - y) : (CameraWrapper.IMAGE_HEIGHT - y - 1);

            mImage = Bitmap.createBitmap(bitmap, left, top, creatW, creatH, null, false);
            if (DEBUG) Log.d(TAG, "[saveFace()] x:" + x + "  y:" + y + "\n" +
                    "[saveFace()] h:" + mImage.getHeight() + "  w:" + mImage.getWidth());
            if (null != mImage)
                FaceUtil.saveBitmapToFile(mImage);
        }
    }).start();
}
项目:PeSanKita-android    文件:BitmapUtil.java   
public static byte[] createFromNV21(@NonNull final byte[] data,
                                    final int width,
                                    final int height,
                                    int rotation,
                                    final Rect croppingRect,
                                    final boolean flipHorizontal)
    throws IOException
{
  byte[] rotated = rotateNV21(data, width, height, rotation, flipHorizontal);
  final int rotatedWidth  = rotation % 180 > 0 ? height : width;
  final int rotatedHeight = rotation % 180 > 0 ? width  : height;
  YuvImage previewImage = new YuvImage(rotated, ImageFormat.NV21,
                                       rotatedWidth, rotatedHeight, null);

  ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
  previewImage.compressToJpeg(croppingRect, 80, outputStream);
  byte[] bytes = outputStream.toByteArray();
  outputStream.close();
  return bytes;
}
项目:WithYou    文件:VideoVerify.java   
private Bitmap decodeToBitMap(byte[] data) {
    try {
        YuvImage image = new YuvImage(data, ImageFormat.NV21, PREVIEW_WIDTH,
                PREVIEW_HEIGHT, null);
        if (image != null) {
            ByteArrayOutputStream stream = new ByteArrayOutputStream();
            image.compressToJpeg(new Rect(0, 0, PREVIEW_WIDTH, PREVIEW_HEIGHT),
                    80, stream);
            Bitmap bmp = BitmapFactory.decodeByteArray(
                    stream.toByteArray(), 0, stream.size());
            stream.close();
            return bmp ;
        }
    } catch (Exception ex) {
        Log.e("Sys", "Error:" + ex.getMessage());
    }
    return null;
}
项目:Cable-Android    文件:BitmapUtil.java   
public static byte[] createFromNV21(@NonNull final byte[] data,
                                    final int width,
                                    final int height,
                                    int rotation,
                                    final Rect croppingRect,
                                    final boolean flipHorizontal)
    throws IOException
{
  byte[] rotated = rotateNV21(data, width, height, rotation, flipHorizontal);
  final int rotatedWidth  = rotation % 180 > 0 ? height : width;
  final int rotatedHeight = rotation % 180 > 0 ? width  : height;
  YuvImage previewImage = new YuvImage(rotated, ImageFormat.NV21,
                                       rotatedWidth, rotatedHeight, null);

  ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
  previewImage.compressToJpeg(croppingRect, 80, outputStream);
  byte[] bytes = outputStream.toByteArray();
  outputStream.close();
  return bytes;
}
项目:video-quickstart-android    文件:SnapshotVideoRenderer.java   
private Bitmap captureBitmapFromYuvFrame(I420Frame i420Frame) {
    YuvImage yuvImage = i420ToYuvImage(i420Frame.yuvPlanes,
            i420Frame.yuvStrides,
            i420Frame.width,
            i420Frame.height);
    ByteArrayOutputStream stream = new ByteArrayOutputStream();
    Rect rect = new Rect(0, 0, yuvImage.getWidth(), yuvImage.getHeight());

    // Compress YuvImage to jpeg
    yuvImage.compressToJpeg(rect, 100, stream);

    // Convert jpeg to Bitmap
    byte[] imageBytes = stream.toByteArray();
    Bitmap bitmap = BitmapFactory.decodeByteArray(imageBytes, 0, imageBytes.length);
    Matrix matrix = new Matrix();

    // Apply any needed rotation
    matrix.postRotate(i420Frame.rotationDegree);
    bitmap = Bitmap.createBitmap(bitmap, 0, 0, bitmap.getWidth(), bitmap.getHeight(), matrix,
            true);

    return bitmap;
}
项目:video-quickstart-android    文件:SnapshotVideoRenderer.java   
private YuvImage fastI420ToYuvImage(ByteBuffer[] yuvPlanes,
                                    int[] yuvStrides,
                                    int width,
                                    int height) {
    byte[] bytes = new byte[width * height * 3 / 2];
    int i = 0;
    for (int row = 0 ; row < height ; row++) {
        for (int col = 0 ; col < width ; col++) {
            bytes[i++] = yuvPlanes[0].get(col + row * yuvStrides[0]);
        }
    }
    for (int row = 0 ; row < height / 2 ; row++) {
        for (int col = 0 ; col < width / 2; col++) {
            bytes[i++] = yuvPlanes[2].get(col + row * yuvStrides[2]);
            bytes[i++] = yuvPlanes[1].get(col + row * yuvStrides[1]);
        }
    }
    return new YuvImage(bytes, NV21, width, height, null);
}
项目:PreRect    文件:ImgUtil.java   
/**
 * yuv转换图像
 * @param frameData
 * @param yuvFormat
 * @param prevWidth
 * @param prevHeight
 * @param quality
 * @return
 */
public static Bitmap yuv2Img(byte[] frameData, int yuvFormat, int prevWidth, int prevHeight,
    int quality) {
  Long start = System.currentTimeMillis();
  Bitmap img = null;
  try {
    YuvImage image = new YuvImage(frameData, yuvFormat, prevWidth, prevHeight, null);
    BitmapFactory.Options options = new BitmapFactory.Options();
    options.inPreferredConfig = Bitmap.Config.RGB_565;
    if (image != null) {
      ByteArrayOutputStream stream = new ByteArrayOutputStream();
      image.compressToJpeg(new Rect(0, 0, prevWidth, prevHeight), quality, stream);
      img = BitmapFactory.decodeByteArray(stream.toByteArray(), 0, stream.size(), options);
      stream.close();
    }
  } catch (Exception e) {
    e.printStackTrace();
  }
  return img;
}
项目:cordova-plugin-preview-camera    文件:CameraActivity.java   
public byte[] getFramePicture(byte[] data, Camera camera) {
    Camera.Parameters parameters = camera.getParameters();
    int format = parameters.getPreviewFormat();

    //YUV formats require conversion
    if (format == ImageFormat.NV21 || format == ImageFormat.YUY2 || format == ImageFormat.NV16) {
        int w = parameters.getPreviewSize().width;
        int h = parameters.getPreviewSize().height;

        // Get the YuV image
        YuvImage yuvImage = new YuvImage(data, format, w, h, null);
        // Convert YuV to Jpeg
        Rect rect = new Rect(0, 0, w, h);
        ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
        yuvImage.compressToJpeg(rect, 80, outputStream);
        return outputStream.toByteArray();
    }
    return data;
}
项目:VisiSynth    文件:MainActivity.java   
public void onPictureTaken(byte[] data, Camera camera) {
    //new SaveImageTask().execute(data);
    long b = System.currentTimeMillis()-a;

    Log.d(TAG, "onPictureTaken - jpeg");
    String str = "Data: " + data.length;
    Log.d(TAG, str);
    // Convert to JPG
    Camera.Size previewSize = camera.getParameters().getPreviewSize();
    YuvImage yuvimage=new YuvImage(data, ImageFormat.NV21, previewSize.width, previewSize.height, null);
    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    yuvimage.compressToJpeg(new Rect(0, 0, previewSize.width, previewSize.height), 100, baos);
    byte[] jdata = baos.toByteArray();

    // Convert to Bitmap
    Bitmap bmp = BitmapFactory.decodeByteArray(jdata, 0, jdata.length);

    str = "BMP: " + bmp.getPixel(35, 84);
    Log.d(TAG, str);


    str = "Time: " + b;
    Log.d(TAG, str);
    resetCam();
}
项目:VisiSynth    文件:Preview.java   
public void onPreviewFrame(byte[] data, Camera camera){
    Log.d("TAG", "frame1 "+data.length);
    Camera.Size previewSize = mCamera.getParameters().getPreviewSize();
    YuvImage yuvimage=new YuvImage(data, ImageFormat.NV21, previewSize.width, previewSize.height, null);

    // Convert to Bitmap
    final double [][] imgmat = imgpro.BufferedYUVImage2Mat(yuvimage.getYuvData(),
            yuvimage.getWidth(), yuvimage.getHeight(), 640, 480);

    List<Double> ld = imgpro.AnalyzeMat(imgmat, 0.6);

    String logline = "points:";
    for(Double p : ld)
        logline += " " + (1-p);
    Log.d("TAG", logline);
    double [] f = new double[ld.size()];
    for(int i = 0; i < f.length; i ++)
        f[i] = Math.pow(2.0, ld.get(i) * 2) * 440.0;
    play(f);
}
项目:Viewer    文件:MyRenderer.java   
public void rawByteArray2RGBABitmap2(FileOutputStream b)
{
    int yuvi = yuv_w * yuv_h;
    int uvi = 0;
    byte[] yuv = new byte[yuv_w * yuv_h * 3 / 2];
    System.arraycopy(y, 0, yuv, 0, yuvi);
    for (int i = 0; i < yuv_h / 2; i++)
    {
        for (int j = 0; j < yuv_w / 2; j++)
        {
            yuv[yuvi++] = v[uvi];
            yuv[yuvi++] = u[uvi++];
        }
    }
    YuvImage yuvImage = new YuvImage(yuv, ImageFormat.NV21, yuv_w, yuv_h, null);
    Rect rect = new Rect(0, 0, yuv_w, yuv_h);
    yuvImage.compressToJpeg(rect, 100, b);
}
项目:PhoneChat    文件:CameraActivity.java   
public byte[] getFramePicture(byte[] data, Camera camera) {
    Camera.Parameters parameters = camera.getParameters();
    int format = parameters.getPreviewFormat();

    //YUV formats require conversion
    if (format == ImageFormat.NV21 || format == ImageFormat.YUY2 || format == ImageFormat.NV16) {
        int w = parameters.getPreviewSize().width;
        int h = parameters.getPreviewSize().height;

        // Get the YuV image
        YuvImage yuvImage = new YuvImage(data, format, w, h, null);
        // Convert YuV to Jpeg
        Rect rect = new Rect(0, 0, w, h);
        ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
        yuvImage.compressToJpeg(rect, 80, outputStream);
        return outputStream.toByteArray();
    }
    return data;
}
项目:BluetoothCameraAndroid    文件:CameraModelImpl.java   
@Override
public void onPreviewFrame(final byte[] data, final Camera camera) {
    if (mInitAck && mDataAck) {
        mInitAck = false;
        mDataAck = false;
        previewMissedCount = 0;
        ThreadHandler.getInstance().doInBackground(new Runnable() {
            @Override
            public void run() {
                Camera.Size size = camera.getParameters().getPreviewSize();
                ByteArrayOutputStream out = new ByteArrayOutputStream();
                YuvImage yuvImage = new YuvImage(data, ImageFormat.NV21, size.width, size.height, null);
                yuvImage.compressToJpeg(new Rect(0, 0, size.width, size.height), 50, out);
                byte[] imageBytes = out.toByteArray();
                mBluetoothHandler.write(BluetoothHandler.DATA_START.getBytes());
                mPendingImageBytes = imageBytes;
            }
        });
    } else {
        previewMissedCount++;
        if (previewMissedCount > 50) {
            mInitAck = true;
            mDataAck = true;
        }
    }
}
项目:DeviceConnect-Android    文件:YuvConverter.java   
/**
 * YuvImage converter (line by line).
 * @param width The width of image.
 * @param height The height of image.
 * @param yuvStrides yuvStrides array.
 * @param yuvPlanes yuvPlanes array.
 * @return YuvImage data.
 */
public static YuvImage convertToYuvImageLineByLine(final int width, final int height, final int[] yuvStrides, final ByteBuffer[] yuvPlanes) {
    byte[] bytes = new byte[width * height * 3 / 2];
    byte[] yuvPlanes0 = yuvPlanes[0].array();
    byte[] yuvPlanes1 = yuvPlanes[1].array();
    byte[] yuvPlanes2 = yuvPlanes[2].array();

    int i = 0;
    for (int row = 0; row < height; row++) {
        for (int col = 0; col < width; col++) {
            bytes[i++] = yuvPlanes0[col + row * yuvStrides[0]];
        }
    }
    for (int row = 0; row < height / 2; row++) {
        for (int col = 0; col < width / 2; col++) {
            bytes[i++] = yuvPlanes2[col + row * yuvStrides[2]];
            bytes[i++] = yuvPlanes1[col + row * yuvStrides[1]];
        }
    }
    return new YuvImage(bytes, ImageFormat.NV21, width, height, null);
}
项目:Ancaffe    文件:Util.java   
public String saveImageToPath(byte[] data, Camera.Parameters parameters, String desPath) {
    Camera.Size size = parameters.getPreviewSize();
    String filePath = Environment.getExternalStorageDirectory().getPath() + desPath;
    try {
        data = rotateYUV90(data, size.width, size.height);
        int rotatedHeight = size.width;
        int rotatedWidth = size.height;
        YuvImage image = new YuvImage(data, parameters.getPreviewFormat(),
                rotatedWidth, rotatedHeight, null);
        File file = new File(filePath);
        if (!file.exists()) {
            FileOutputStream fos = new FileOutputStream(file);
            image.compressToJpeg(new Rect(0, 0, image.getWidth(), image.getHeight()), 90, fos);
        }

    } catch (FileNotFoundException e) {

    }
    return filePath;
}
项目:Silence    文件:BitmapUtil.java   
public static byte[] createFromNV21(@NonNull final byte[] data,
                                    final int width,
                                    final int height,
                                    int rotation,
                                    final Rect croppingRect)
    throws IOException
{
  byte[] rotated = rotateNV21(data, width, height, rotation);
  final int rotatedWidth  = rotation % 180 > 0 ? height : width;
  final int rotatedHeight = rotation % 180 > 0 ? width  : height;
  YuvImage previewImage = new YuvImage(rotated, ImageFormat.NV21,
                                       rotatedWidth, rotatedHeight, null);

  ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
  previewImage.compressToJpeg(croppingRect, 80, outputStream);
  byte[] bytes = outputStream.toByteArray();
  outputStream.close();
  return bytes;
}
项目:smartcar    文件:SimpleVideoActivity.java   
@Override
public void onPreviewFrame(byte[] bytes, Camera camera) {
    if(!isRecording)return;

    ++frame_skipped;
    if(frame_skipped==SKIP_FRAME)frame_skipped=0;
    else return;

    try{
        if(bytes!=null){
            YuvImage img = new YuvImage(bytes,videoFormatIndex,
                    bestSize.width,bestSize.height,null);
            ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
            img.compressToJpeg(new Rect(0,0,bestSize.width,bestSize.height),
                    VIDEO_QUALITY,outputStream);
            //sendImage(outputStream.toByteArray());
            send(outputStream.toByteArray());
        }
    }catch (Exception e){
        e.printStackTrace();
    }
}
项目:previewOCR    文件:CameraTool.java   
public static Bitmap getBitmapImageFromYUV(byte[] data, int width,
        int height, Rect rect) {
    YuvImage yuvimage = new YuvImage(data, ImageFormat.NV21, width, height,
            null);
    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    yuvimage.compressToJpeg(new Rect(0, 0, width, height), 90, baos);

    byte[] jdata = baos.toByteArray();
    BitmapFactory.Options bitmapFatoryOptions = new BitmapFactory.Options();
    bitmapFatoryOptions.inPreferredConfig = Bitmap.Config.ARGB_8888;
    Bitmap bmp = BitmapFactory.decodeByteArray(jdata, 0, jdata.length,
            bitmapFatoryOptions);

    Log.d(TAG,"getBitmapImageFromYUV w:"+bmp.getWidth()+" h:"+bmp.getHeight());


    return bmp;
}
项目:Eye    文件:Person.java   
public void queryPersonInfo(YuvImage yuvImage) {

        // Send the Jpeg to the server and get the person's
        // info back.
        PersonQueryRequest personQueryRequest = new PersonQueryRequest(this, yuvImage);
        if (personQueryRequest.generateBase64FromImage()) {
            personQueryRequest.execute();
        } else {
            setQueryingStatus(QUERY_FAILED);
        }
    }
项目:Eye    文件:PersonQueryRequest.java   
public PersonQueryRequest(Person person, YuvImage yuvImage) {
    this.person = person;
    this.yuvImage = yuvImage;
    this.b64 = null;
    this.status = true;
    person.setQueryingStatus(Person.QUERY_IN_PROGRESS);
}
项目:MegviiFacepp-Android-SDK    文件:ConUtil.java   
public static Bitmap decodeToBitMap(byte[] data, Camera _camera) {
    Camera.Size size = _camera.getParameters().getPreviewSize();
    try {
        YuvImage image = new YuvImage(data, ImageFormat.NV21, size.width, size.height, null);
        if (image != null) {
            ByteArrayOutputStream stream = new ByteArrayOutputStream();
            image.compressToJpeg(new Rect(0, 0, size.width, size.height), 80, stream);
            Bitmap bmp = BitmapFactory.decodeByteArray(stream.toByteArray(), 0, stream.size());
            stream.close();
            return bmp;
        }
    } catch (Exception ex) {
    }
    return null;
}
项目:MegviiFacepp-Android-SDK    文件:ICamera.java   
public Bitmap getBitMap(byte[] data, Camera camera, boolean mIsFrontalCamera) {
    int width = camera.getParameters().getPreviewSize().width;
    int height = camera.getParameters().getPreviewSize().height;
    YuvImage yuvImage = new YuvImage(data, camera.getParameters()
            .getPreviewFormat(), width, height, null);
    ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
    yuvImage.compressToJpeg(new Rect(0, 0, width, height), 80,
            byteArrayOutputStream);
    byte[] jpegData = byteArrayOutputStream.toByteArray();
    // 获取照相后的bitmap
    Bitmap tmpBitmap = BitmapFactory.decodeByteArray(jpegData, 0,
            jpegData.length);
    Matrix matrix = new Matrix();
    matrix.reset();
    if (mIsFrontalCamera) {
        matrix.setRotate(-90);
    } else {
        matrix.setRotate(90);
    }
    tmpBitmap = Bitmap.createBitmap(tmpBitmap, 0, 0, tmpBitmap.getWidth(),
            tmpBitmap.getHeight(), matrix, true);
    tmpBitmap = tmpBitmap.copy(Bitmap.Config.ARGB_8888, true);

    int hight = tmpBitmap.getHeight() > tmpBitmap.getWidth() ? tmpBitmap
            .getHeight() : tmpBitmap.getWidth();

    float scale = hight / 800.0f;

    if (scale > 1) {
        tmpBitmap = Bitmap.createScaledBitmap(tmpBitmap,
                (int) (tmpBitmap.getWidth() / scale),
                (int) (tmpBitmap.getHeight() / scale), false);
    }
    return tmpBitmap;
}
项目:QuickDrawEverywhere    文件:CameraView.java   
@Override
public void onPreviewFrame(byte[] data, Camera camera) {
    try {
        Camera.Parameters p = camera.getParameters();
        int w = p.getPreviewSize().width;
        int h = p.getPreviewSize().height;
        int format = p.getPreviewFormat();
        mYuvImage = new YuvImage(data, format, w, h, null);
        byteArray = data;
    } catch (Exception e) {
        e.printStackTrace();
    }
}
项目:video-quickstart-android    文件:SnapshotVideoRenderer.java   
private YuvImage i420ToYuvImage(ByteBuffer[] yuvPlanes,
                                int[] yuvStrides,
                                int width,
                                int height) {
    if (yuvStrides[0] != width) {
        return fastI420ToYuvImage(yuvPlanes, yuvStrides, width, height);
    }
    if (yuvStrides[1] != width / 2) {
        return fastI420ToYuvImage(yuvPlanes, yuvStrides, width, height);
    }
    if (yuvStrides[2] != width / 2) {
        return fastI420ToYuvImage(yuvPlanes, yuvStrides, width, height);
    }

    byte[] bytes = new byte[yuvStrides[0] * height +
            yuvStrides[1] * height / 2 +
            yuvStrides[2] * height / 2];
    ByteBuffer tmp = ByteBuffer.wrap(bytes, 0, width * height);
    copyPlane(yuvPlanes[0], tmp);

    byte[] tmpBytes = new byte[width / 2 * height / 2];
    tmp = ByteBuffer.wrap(tmpBytes, 0, width / 2 * height / 2);

    copyPlane(yuvPlanes[2], tmp);
    for (int row = 0 ; row < height / 2 ; row++) {
        for (int col = 0 ; col < width / 2 ; col++) {
            bytes[width * height + row * width + col * 2]
                    = tmpBytes[row * width / 2 + col];
        }
    }
    copyPlane(yuvPlanes[1], tmp);
    for (int row = 0 ; row < height / 2 ; row++) {
        for (int col = 0 ; col < width / 2 ; col++) {
            bytes[width * height + row * width + col * 2 + 1] =
                    tmpBytes[row * width / 2 + col];
        }
    }
    return new YuvImage(bytes, NV21, width, height, null);
}
项目:easyrs    文件:YuvToRgbTest.java   
@NonNull
private Bitmap getExpectedBitmap(Nv21Image nv21Image) {
    YuvImage yuvImage = new YuvImage(nv21Image.nv21ByteArray, ImageFormat.NV21, nv21Image.width,
            nv21Image.height, null);
    ByteArrayOutputStream os = new ByteArrayOutputStream();
    yuvImage.compressToJpeg(new Rect(0, 0, nv21Image.width, nv21Image.height), 100, os);
    byte[] jpegByteArray = os.toByteArray();
    return BitmapFactory.decodeByteArray(jpegByteArray, 0, jpegByteArray.length);
}
项目:easyrs    文件:Nv21ImageTest.java   
@NonNull
private Bitmap getConvertedBitmap(Nv21Image nv21Image) {
    YuvImage yuvImage = new YuvImage(nv21Image.nv21ByteArray, ImageFormat.NV21, nv21Image.width,
            nv21Image.height, null);
    ByteArrayOutputStream os = new ByteArrayOutputStream();
    yuvImage.compressToJpeg(new Rect(0, 0, nv21Image.width, nv21Image.height), 100, os);
    byte[] jpegByteArray = os.toByteArray();
    return BitmapFactory.decodeByteArray(jpegByteArray, 0, jpegByteArray.length);
}
项目:AndroidRosJava    文件:CompressedImagePublisher.java   
@Override
public void onNewRawImage(byte[] data, Size size) {
  Preconditions.checkNotNull(data);
  Preconditions.checkNotNull(size);
  if (data != rawImageBuffer || !size.equals(rawImageSize)) {
    rawImageBuffer = data;
    rawImageSize = size;
    yuvImage = new YuvImage(rawImageBuffer, ImageFormat.NV21, size.width, size.height, null);
    rect = new Rect(0, 0, size.width, size.height);
  }

  Time currentTime = connectedNode.getCurrentTime();
  String frameId = "camera";

  sensor_msgs.CompressedImage image = imagePublisher.newMessage();
  image.setFormat("jpeg");
  image.getHeader().setStamp(currentTime);
  image.getHeader().setFrameId(frameId);

  Preconditions.checkState(yuvImage.compressToJpeg(rect, 20, stream));
  image.setData(stream.buffer().copy());
  stream.buffer().clear();

  imagePublisher.publish(image);

  sensor_msgs.CameraInfo cameraInfo = cameraInfoPublisher.newMessage();
  cameraInfo.getHeader().setStamp(currentTime);
  cameraInfo.getHeader().setFrameId(frameId);

  cameraInfo.setWidth(size.width);
  cameraInfo.setHeight(size.height);
  cameraInfoPublisher.publish(cameraInfo);
}
项目:ProjectOxford-Apps-MimickerAlarm    文件:CameraPreview.java   
@Override
// Decode the image data and rotate it to the proper orientation.
// then run the callback, if any, on the image to do post processing
protected Boolean doInBackground(Object... params) {
    byte[] data = (byte[]) params[0];
    Camera camera = (Camera) params[1];
    Camera.Parameters parameters = camera.getParameters();
    int format = parameters.getPreviewFormat();
    //YUV formats require more conversion
    if (format == ImageFormat.NV21 || format == ImageFormat.YUY2 || format == ImageFormat.NV16) {
        int w = parameters.getPreviewSize().width;
        int h = parameters.getPreviewSize().height;
        // Get the YuV image
        YuvImage yuv_image = new YuvImage(data, format, w, h, null);
        // Convert YuV to Jpeg
        Rect rect = new Rect(0, 0, w, h);
        ByteArrayOutputStream output_stream = new ByteArrayOutputStream();
        yuv_image.compressToJpeg(rect, 100, output_stream);
        byte[] imageBytes = output_stream.toByteArray();
        Bitmap bitmap = BitmapFactory.decodeByteArray(imageBytes, 0, imageBytes.length);

        Matrix transform = new Matrix();
        if (mCameraFacing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
            transform.preScale(-1, 1);
        }
        transform.postRotate(mCameraRotation);
        bitmap = Bitmap.createBitmap(bitmap, 0, 0, bitmap.getWidth(), bitmap.getHeight(), transform, true);

        if (mCapturedCapturedImageCallbackAsync != null) {
            mCapturedCapturedImageCallbackAsync.execute(bitmap);
        }
    }
    return null;
}
项目:driverless-rccar    文件:TcpClient.java   
private byte[] preprocess(byte[] preview, int width, int height) {
    byte[] jpeg = null;
    YuvImage image = new YuvImage(preview, ImageFormat.NV21, width, height, null);
    Rect r = new Rect(0, 0, width, height);
    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    boolean ok = image.compressToJpeg(r, 100, baos);
    if (ok) {
        jpeg = baos.toByteArray();
    }
    return jpeg;
}
项目:truth-android    文件:YuvImageSubject.java   
public static SubjectFactory<YuvImageSubject, YuvImage> type() {
  return new SubjectFactory<YuvImageSubject, YuvImage>() {
    @Override
    public YuvImageSubject getSubject(FailureStrategy fs, YuvImage that) {
      return new YuvImageSubject(fs, that);
    }
  };
}
项目:ISeeU    文件:Utils.java   
public static byte[] frameByteToJpegByte(byte[] data, Camera camera) {
    try {
        Camera.Parameters parameters = camera.getParameters();
        Camera.Size size = parameters.getPreviewSize();
        YuvImage image = new YuvImage(data, parameters.getPreviewFormat(),
                size.width, size.height, null);
        ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
        image.compressToJpeg(
                new Rect(0, 0, image.getWidth(), image.getHeight()), COMPRESS_QUALITY,
                outputStream);
        return outputStream.toByteArray();
    } catch (Exception e) {
        return null;
    }
}
项目:cloudturbine    文件:CTandroidAV.java   
byte[] jpegFromPreview(byte[] currentPreview) {

        ByteArrayOutputStream baos = new ByteArrayOutputStream();

        Parameters parameters = mCamera.getParameters();
        Size size = parameters.getPreviewSize();
        YuvImage image = new YuvImage(currentPreview, parameters.getPreviewFormat(), size.width, size.height, null);

        image.compressToJpeg(new Rect(0, 0, image.getWidth(), image.getHeight()), quality, baos);

        byte[] jpeg = baos.toByteArray();
        float rotation = (float) 0.;
        if (cameraId == 1 && mDisplay.getRotation() == Surface.ROTATION_0)
                rotation = (float) 270.;
        else if (cameraId == 0 && mDisplay.getRotation() == Surface.ROTATION_0)
                rotation = (float) 90.;

        if (debug) Log.i(TAG, "cameraId: " + cameraId + ", getRotation: " + mDisplay.getRotation() + ", rotation: " + rotation);

        if (rotation != 0.) {
            // This is the same image as the preview but in JPEG and not rotated
            Bitmap bitmap = BitmapFactory.decodeByteArray(jpeg, 0, jpeg.length);
            ByteArrayOutputStream rotatedStream = new ByteArrayOutputStream();

            // Rotate the Bitmap
            Matrix matrix = new Matrix();
            matrix.postRotate(rotation);

            // We rotate the same Bitmap
            bitmap = Bitmap.createBitmap(bitmap, 0, 0, image.getWidth(), image.getHeight(), matrix, false);

            // We dump the rotated Bitmap to the stream
            bitmap.compress(CompressFormat.JPEG, 50, rotatedStream);
            jpeg = rotatedStream.toByteArray();
            // whew
        }
        return jpeg;
    }
项目:cameraserve    文件:MainActivity.java   
@Override
public void onPreviewFrame(byte[] bytes, Camera camera) {
    previewStream.reset();
    Camera.Parameters p = camera.getParameters();

    int previewHeight = p.getPreviewSize().height,
        previewWidth = p.getPreviewSize().width;

    switch(rotationSteps) {
        case 1:
            bytes = Rotator.rotateYUV420Degree90(bytes, previewWidth, previewHeight);
            break;
        case 2:
            bytes = Rotator.rotateYUV420Degree180(bytes, previewWidth, previewHeight);
            break;
        case 3:
            bytes = Rotator.rotateYUV420Degree270(bytes, previewWidth, previewHeight);
            break;
    }

    if (rotationSteps == 1 || rotationSteps == 3) {
        int tmp = previewHeight;
        previewHeight = previewWidth;
        previewWidth = tmp;
    }

    int format = p.getPreviewFormat();
    new YuvImage(bytes, format, previewWidth, previewHeight, null)
            .compressToJpeg(new Rect(0, 0, previewWidth, previewHeight),
                    100, previewStream);

    setJpegFrame(previewStream);
}
项目:SecureSmartHome    文件:OdroidCamera.java   
private void sendImage() {
    if (getContainer() == null || lastSnapshot == null) {
        return;
    }
    int width = params.getPreviewSize().width;
    int height = params.getPreviewSize().height;
    Rect rect = new Rect(0, 0, width, height);
    YuvImage yuvimage = new YuvImage(lastSnapshot, ImageFormat.NV21, width, height, null);

    try (ByteArrayOutputStream outStream = new ByteArrayOutputStream()) {
        yuvimage.compressToJpeg(rect, 80, outStream);
        byte[] jpegData = outStream.toByteArray();

        CameraPayload payload = new CameraPayload(getCameraID(), getModuleName());
        payload.setPicture(jpegData);
        Message reply = new Message(payload);
        requireComponent(OutgoingRouter.KEY).sendReply(getReplyToMessage(), reply);
        imageSent = true;

        //File file = new File(Environment.getExternalStorageDirectory().getPath(),
        //        "snapshot" + System.currentTimeMillis() + ".jpg");
        //FileOutputStream outstr = new FileOutputStream(file);
        //yuvimage.compressToJpeg(rect, 80, outstr);
    } catch (IOException e) {
        Log.e(TAG, "Could not compress image", e);
    }

    finish();
}
项目:RadicalRobotics2017    文件:CameraPreviewCallback.java   
@Override
public void onPreviewFrame(final byte[] data, Camera camera) {
    if (timestamp == 0) {
        timestamp = System.nanoTime();
    }
    if (timestamp + delay >= System.nanoTime()) {
        return;
    }

    if (extensibleCameraManager != null && context.cameraManager().getCamera() != null) {
        Camera.Parameters parameters = context.cameraManager().getCamera().getParameters();
        Camera.Size previewSize = parameters.getPreviewSize();
        YuvImage image = new YuvImage(data, parameters.getPreviewFormat(),
                previewSize.width,
                parameters.getPreviewSize().height, null);

        synchronized (outputStream) {
            image.compressToJpeg(new Rect(0, 0, previewSize.width, previewSize.height), 55, outputStream);

            if (jpeg == null) {
                jpeg = outputStream.toByteArray();
            } else {
                System.arraycopy(outputStream.toByteArray(), 0, jpeg, 0, jpeg.length);
            }
            outputStream.reset();
        }
        try {
            Bitmap bitmap = BitmapFactory.decodeByteArray(jpeg, 0, jpeg.length);
            if (bitmap != null) {
                extensibleCameraManager.addImage(bitmap);
            }
            timestamp = System.nanoTime();
        } catch (Exception e) {
            Log.e(TAG, e.getLocalizedMessage(), e);
        }

    }
}
项目:faceswap    文件:VideoStreamingThread.java   
@Override
protected byte[] doInBackground(Object... objs) {
    byte[] frame = (byte[]) objs[0];
    Parameters parameters = (Parameters) objs[1];
    if (frame_firstUpdateTime == 0) {
        frame_firstUpdateTime = System.currentTimeMillis();
    }
    frame_currentUpdateTime = System.currentTimeMillis();

    int datasize = 0;
    cameraImageSize = parameters.getPreviewSize();
    YuvImage image = new YuvImage(frame, parameters.getPreviewFormat(), cameraImageSize.width,
            cameraImageSize.height, null);
    ByteArrayOutputStream tmpBuffer = new ByteArrayOutputStream();
    image.compressToJpeg(new Rect(0, 0, image.getWidth(), image.getHeight()), 90, tmpBuffer);
    Log.d(LOG_TAG, "compression took: "
            + (System.currentTimeMillis()-frame_currentUpdateTime));
    synchronized (frameLock) {
        frameBuffer = tmpBuffer.toByteArray();
        frameGeneratedTime = System.currentTimeMillis();
        frameID++;
        frameLock.notify();
    }
    datasize = tmpBuffer.size();
    frame_count++;
    frame_totalsize += datasize;
    if (frame_count % 50 == 0) {
        Log.d(LOG_TAG, "(IMG)\t" +
                "BW: " + 8.0 * frame_totalsize / (frame_currentUpdateTime - frame_firstUpdateTime) / 1000 +
                " Mbps\tCurrent FPS: " + 8.0 * datasize / (frame_currentUpdateTime - frame_prevUpdateTime) / 1000 + " Mbps\t" +
                "FPS: " + 1000.0 * frame_count / (frame_currentUpdateTime - frame_firstUpdateTime));
    }
    frame_prevUpdateTime = frame_currentUpdateTime;
    return tmpBuffer.toByteArray();
}
项目:osh-android    文件:AndroidCameraOutputMJPEG.java   
@Override
public void onPreviewFrame(byte[] data, Camera camera)
{
    long timeStamp = SystemClock.elapsedRealtimeNanos();

    // select current buffer
    YuvImage yuvImg = (data == imgBuf1) ? yuvImg1 : yuvImg2;

    // compress as JPEG
    jpegBuf.reset();
    yuvImg.compressToJpeg(imgArea, 90, jpegBuf);

    // release buffer for next frame
    camera.addCallbackBuffer(data);

    // generate new data record
    DataBlock newRecord;
    if (latestRecord == null)
        newRecord = dataStruct.createDataBlock();
    else
        newRecord = latestRecord.renew();

    // set time stamp
    double samplingTime = getJulianTimeStamp(timeStamp);
    newRecord.setDoubleValue(0, samplingTime);

    // set encoded data
    AbstractDataBlock frameData = ((DataBlockMixed)newRecord).getUnderlyingObject()[1];
    frameData.setUnderlyingObject(jpegBuf.toByteArray());

    // send event
    latestRecord = newRecord;
    latestRecordTime = System.currentTimeMillis();
    eventHandler.publishEvent(new SensorDataEvent(latestRecordTime, AndroidCameraOutputMJPEG.this, latestRecord));          
}