Java 类android.graphics.ImageFormat 实例源码

项目:humaniq-android    文件:PhotoFragment.java   
private void configureCamera() {
    final Camera.Parameters parameters = camera.getParameters();
    try {
        parameters.setPreviewFormat(ImageFormat.NV21);

        // set focus for video if present
        List<String> focusModes = parameters.getSupportedFocusModes();

        if (null != focusModes && focusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO)) {
            parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
        }

        // check if torch is present
        List<String> flashModes = parameters.getSupportedFlashModes();

        cameraFlashIsSupported = null != flashModes && flashModes.contains(Camera.Parameters.FLASH_MODE_TORCH);

        final Camera.Size bestPreviewSize = getBestPreviewSize();
        photoProcessor.setPreviewSize(bestPreviewSize.width, bestPreviewSize.height);
        parameters.setPreviewSize(bestPreviewSize.width, bestPreviewSize.height);
        camera.setParameters(parameters);
    } catch (RuntimeException exception) {
        Toast.makeText(getContext(), R.string.camera_configuration_failed, Toast.LENGTH_SHORT).show();
    }
}
项目:Amazing    文件:CameraActivity.java   
private void setParams() {
    //LogUtil.e("preview set size=" + width + " : " + height);
    Camera.Parameters parameters = camera.getParameters();
    //        parameters.setPreviewSize(width, height);
    //        parameters.setPictureSize(width, height);
    parameters.setPreviewFormat(ImageFormat.NV21);
    camera.setDisplayOrientation(90);
    parameters.setRotation(90);

    List<Integer> supportedPreviewFormats = parameters.getSupportedPreviewFormats();
    for (Integer integer : supportedPreviewFormats) {
        //LogUtil.e("preview format=" + integer);
    }

    List<Camera.Size> supportedPreviewSizes = parameters.getSupportedPreviewSizes();
    for (Camera.Size size : supportedPreviewSizes) {
        //LogUtil.e("preview size=" + size.width + " : " + size.height);
    }
    camera.setParameters(parameters);
}
项目:seeta4Android    文件:FaceDetector.java   
private void saveFace(final int x, final int y, final int r, final int b) {
    if (DEBUG) Log.d(TAG, "[saveFace()]");
    new Thread(new Runnable() {
        @Override
        public void run() {
            synchronized (mVideoSource) {
                mImageYuv = new YuvImage(mVideoSource, ImageFormat.NV21, CameraWrapper.IMAGE_WIDTH, CameraWrapper.IMAGE_HEIGHT, null);
            }
            ByteArrayOutputStream stream = new ByteArrayOutputStream();
            mImageYuv.compressToJpeg(new Rect(0, 0, CameraWrapper.IMAGE_WIDTH, CameraWrapper.IMAGE_HEIGHT), 100, stream);
            Bitmap bitmap = BitmapFactory.decodeByteArray(stream.toByteArray(), 0, stream.size());

            int left = (x > 0) ? x : 0;
            int top = (y > 0) ? y : 0;
            int creatW = (r < CameraWrapper.IMAGE_WIDTH) ? (r - x) : (CameraWrapper.IMAGE_HEIGHT - x - 1);
            int creatH = (b < CameraWrapper.IMAGE_WIDTH) ? (b - y) : (CameraWrapper.IMAGE_HEIGHT - y - 1);

            mImage = Bitmap.createBitmap(bitmap, left, top, creatW, creatH, null, false);
            if (DEBUG) Log.d(TAG, "[saveFace()] x:" + x + "  y:" + y + "\n" +
                    "[saveFace()] h:" + mImage.getHeight() + "  w:" + mImage.getWidth());
            if (null != mImage)
                FaceUtil.saveBitmapToFile(mImage);
        }
    }).start();
}
项目:PeSanKita-android    文件:BitmapUtil.java   
public static byte[] createFromNV21(@NonNull final byte[] data,
                                    final int width,
                                    final int height,
                                    int rotation,
                                    final Rect croppingRect,
                                    final boolean flipHorizontal)
    throws IOException
{
  byte[] rotated = rotateNV21(data, width, height, rotation, flipHorizontal);
  final int rotatedWidth  = rotation % 180 > 0 ? height : width;
  final int rotatedHeight = rotation % 180 > 0 ? width  : height;
  YuvImage previewImage = new YuvImage(rotated, ImageFormat.NV21,
                                       rotatedWidth, rotatedHeight, null);

  ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
  previewImage.compressToJpeg(croppingRect, 80, outputStream);
  byte[] bytes = outputStream.toByteArray();
  outputStream.close();
  return bytes;
}
项目:BuddyBook    文件:CameraSource.java   
/**
 * Creates one buffer for the camera preview callback.  The size of the buffer is based off of
 * the camera preview size and the format of the camera image.
 *
 * @return a new preview buffer of the appropriate size for the current camera settings
 */
private byte[] createPreviewBuffer(Size previewSize) {
    int bitsPerPixel = ImageFormat.getBitsPerPixel(ImageFormat.NV21);
    long sizeInBits = previewSize.getHeight() * previewSize.getWidth() * bitsPerPixel;
    int bufferSize = (int) Math.ceil(sizeInBits / 8.0d) + 1;

    //
    // NOTICE: This code only works when using play services v. 8.1 or higher.
    //

    // Creating the byte array this way and wrapping it, as opposed to using .allocate(),
    // should guarantee that there will be an array to work with.
    byte[] byteArray = new byte[bufferSize];
    ByteBuffer buffer = ByteBuffer.wrap(byteArray);
    if (!buffer.hasArray() || (buffer.array() != byteArray)) {
        // I don't think that this will ever happen.  But if it does, then we wouldn't be
        // passing the preview content to the underlying detector later.
        throw new IllegalStateException("Failed to create valid buffer for camera source.");
    }

    mBytesToByteBuffer.put(byteArray, buffer);
    return byteArray;
}
项目:WithYou    文件:VideoVerify.java   
private Bitmap decodeToBitMap(byte[] data) {
    try {
        YuvImage image = new YuvImage(data, ImageFormat.NV21, PREVIEW_WIDTH,
                PREVIEW_HEIGHT, null);
        if (image != null) {
            ByteArrayOutputStream stream = new ByteArrayOutputStream();
            image.compressToJpeg(new Rect(0, 0, PREVIEW_WIDTH, PREVIEW_HEIGHT),
                    80, stream);
            Bitmap bmp = BitmapFactory.decodeByteArray(
                    stream.toByteArray(), 0, stream.size());
            stream.close();
            return bmp ;
        }
    } catch (Exception ex) {
        Log.e("Sys", "Error:" + ex.getMessage());
    }
    return null;
}
项目:OCR-Reader    文件:CameraSource.java   
/**
 * Creates one buffer for the camera preview callback.  The size of the buffer is based off of
 * the camera preview size and the format of the camera image.
 *
 * @return a new preview buffer of the appropriate size for the current camera settings
 */
private byte[] createPreviewBuffer(Size previewSize) {
    int bitsPerPixel = ImageFormat.getBitsPerPixel(ImageFormat.NV21);
    long sizeInBits = previewSize.getHeight() * previewSize.getWidth() * bitsPerPixel;
    int bufferSize = (int) Math.ceil(sizeInBits / 8.0d) + 1;

    //
    // NOTICE: This code only works when using play services v. 8.1 or higher.
    //

    // Creating the byte array this way and wrapping it, as opposed to using .allocate(),
    // should guarantee that there will be an array to work with.
    byte[] byteArray = new byte[bufferSize];
    ByteBuffer buffer = ByteBuffer.wrap(byteArray);
    if (!buffer.hasArray() || (buffer.array() != byteArray)) {
        // I don't think that this will ever happen.  But if it does, then we wouldn't be
        // passing the preview content to the underlying detector later.
        throw new IllegalStateException("Failed to create valid buffer for camera source.");
    }

    mBytesToByteBuffer.put(byteArray, buffer);
    return byteArray;
}
项目:Camera2Vision    文件:CameraSource.java   
/**
 * Creates one buffer for the camera preview callback.  The size of the buffer is based off of
 * the camera preview size and the format of the camera image.
 *
 * @return a new preview buffer of the appropriate size for the current camera settings
 */
private byte[] createPreviewBuffer(Size previewSize) {
    int bitsPerPixel = ImageFormat.getBitsPerPixel(ImageFormat.NV21);
    long sizeInBits = previewSize.getHeight() * previewSize.getWidth() * bitsPerPixel;
    int bufferSize = (int) Math.ceil(sizeInBits / 8.0d) + 1;
    //
    // NOTICE: This code only works when using play services v. 8.1 or higher.
    //
    // Creating the byte array this way and wrapping it, as opposed to using .allocate(),
    // should guarantee that there will be an array to work with.
    byte[] byteArray = new byte[bufferSize];
    ByteBuffer buffer = ByteBuffer.wrap(byteArray);
    if (!buffer.hasArray() || (buffer.array() != byteArray)) {
        // I don't think that this will ever happen.  But if it does, then we wouldn't be
        // passing the preview content to the underlying detector later.
        throw new IllegalStateException("Failed to create valid buffer for camera source.");
    }
    mBytesToByteBuffer.put(byteArray, buffer);
    return byteArray;
}
项目:FaceDetectDemo    文件:JavaCameraView.java   
@Override
public Mat rgba() {
    if (mPreviewFormat == ImageFormat.NV21)
        Imgproc.cvtColor(mYuvFrameData, mRgba, Imgproc.COLOR_YUV2RGBA_NV21, 4);
    else if (mPreviewFormat == ImageFormat.YV12)
        Imgproc.cvtColor(mYuvFrameData, mRgba, Imgproc.COLOR_YUV2RGB_I420, 4);  // COLOR_YUV2RGBA_YV12 produces inverted colors
    else
        throw new IllegalArgumentException("Preview Format can be NV21 or YV12");

    return mRgba;
}
项目:BWS-Android    文件:IntensityPlane.java   
/**
 * Extracts the Y-Plane from the YUV_420_8888 image to creates a IntensityPlane.
 * The actual plane data will be copied into the new IntensityPlane object.
 *
 * @throws IllegalArgumentException if the provided images is not in the YUV_420_888 format
 */
@NonNull
public static IntensityPlane extract(@NonNull Image img) {
    if (img.getFormat() != ImageFormat.YUV_420_888) {
        throw new IllegalArgumentException("image format must be YUV_420_888");
    }

    Image.Plane[] planes = img.getPlanes();

    ByteBuffer buffer = planes[0].getBuffer();
    byte[] yPlane = new byte[buffer.remaining()];
    buffer.get(yPlane);

    int yRowStride = planes[0].getRowStride();

    return new IntensityPlane(img.getWidth(), img.getHeight(), yPlane, yRowStride);
}
项目:BWS-Android    文件:FacialRecognitionFragment.java   
/**
 * lazily initialize ImageReader and select preview size
 */
private void setupPreviewSizeAndImageReader() {
    if (previewSize == null) {
        previewSize = cameraHelper.selectPreviewSize(openCamera);
    }

    if (imageReader == null) {
        int maxImages = 2;  // should be at least 2 according to ImageReader.acquireLatestImage() documentation
        imageReader = ImageReader.newInstance(previewSize.getWidth(), previewSize.getHeight(), ImageFormat.YUV_420_888, maxImages);
        imageReader.setOnImageAvailableListener(new ImageReader.OnImageAvailableListener() {
            @Override
            public void onImageAvailable(ImageReader reader) {
                Image img = reader.acquireLatestImage();
                if (img != null) {

                    // Make a in memory copy of the image to close the image from the reader as soon as possible.
                    // This helps the thread running the preview staying up to date.
                    IntensityPlane imgCopy = IntensityPlane.extract(img);
                    img.close();

                    int imageRotation = cameraHelper.getImageRotation(openCamera, getRelativeDisplayRotation());

                    presenter.onImageCaptured(imgCopy, imageRotation);
                }
            }
        }, null);
    }
}
项目:rtmp-rtsp-stream-client-java    文件:Camera1Base.java   
/**
 * Need be called after @prepareVideo or/and @prepareAudio.
 * This method override resolution of @startPreview to resolution seated in @prepareVideo. If you
 * never startPreview this method startPreview for you to resolution seated in @prepareVideo.
 *
 * @param url of the stream like:
 * protocol://ip:port/application/streamName
 *
 * RTSP: rtsp://192.168.1.1:1935/live/pedroSG94
 * RTSPS: rtsps://192.168.1.1:1935/live/pedroSG94
 * RTMP: rtmp://192.168.1.1:1935/live/pedroSG94
 * RTMPS: rtmps://192.168.1.1:1935/live/pedroSG94
 */
public void startStream(String url) {
  if (openGlView != null && Build.VERSION.SDK_INT >= 18) {
    if (videoEncoder.getRotation() == 90 || videoEncoder.getRotation() == 270) {
      openGlView.setEncoderSize(videoEncoder.getHeight(), videoEncoder.getWidth());
    } else {
      openGlView.setEncoderSize(videoEncoder.getWidth(), videoEncoder.getHeight());
    }
    openGlView.startGLThread();
    openGlView.addMediaCodecSurface(videoEncoder.getInputSurface());
    cameraManager =
        new Camera1ApiManager(openGlView.getSurfaceTexture(), openGlView.getContext());
    cameraManager.prepareCamera(videoEncoder.getWidth(), videoEncoder.getHeight(),
        videoEncoder.getFps(), ImageFormat.NV21);
  }
  startStreamRtp(url);
  videoEncoder.start();
  audioEncoder.start();
  cameraManager.start();
  microphoneManager.start();
  streaming = true;
  onPreview = true;
}
项目:DeepImagePreview-Project    文件:CameraSource.java   
/**
 * Creates one buffer for the camera preview callback.  The size of the buffer is based off of
 * the camera preview size and the format of the camera image.
 *
 * @return a new preview buffer of the appropriate size for the current camera settings
 */
private byte[] createPreviewBuffer(Size previewSize) {
    int bitsPerPixel = ImageFormat.getBitsPerPixel(ImageFormat.NV21);
    long sizeInBits = previewSize.getHeight() * previewSize.getWidth() * bitsPerPixel;
    int bufferSize = (int) Math.ceil(sizeInBits / 8.0d) + 1;

    //
    // NOTICE: This code only works when using play services v. 8.1 or higher.
    //

    // Creating the byte array this way and wrapping it, as opposed to using .allocate(),
    // should guarantee that there will be an array to work with.
    byte[] byteArray = new byte[bufferSize];
    ByteBuffer buffer = ByteBuffer.wrap(byteArray);
    if (!buffer.hasArray() || (buffer.array() != byteArray)) {
        // I don't think that this will ever happen.  But if it does, then we wouldn't be
        // passing the preview content to the underlying detector later.
        throw new IllegalStateException("Failed to create valid buffer for camera source.");
    }

    mBytesToByteBuffer.put(byteArray, buffer);
    return byteArray;
}
项目:Barcode-Reader    文件:CameraSource.java   
/**
 * Creates one buffer for the camera preview callback.  The size of the buffer is based off of
 * the camera preview size and the format of the camera image.
 *
 * @return a new preview buffer of the appropriate size for the current camera settings
 */
private byte[] createPreviewBuffer(Size previewSize) {
    int bitsPerPixel = ImageFormat.getBitsPerPixel(ImageFormat.NV21);
    long sizeInBits = previewSize.getHeight() * previewSize.getWidth() * bitsPerPixel;
    int bufferSize = (int) Math.ceil(sizeInBits / 8.0d) + 1;

    //
    // NOTICE: This code only works when using play services v. 8.1 or higher.
    //

    // Creating the byte array this way and wrapping it, as opposed to using .allocate(),
    // should guarantee that there will be an array to work with.
    byte[] byteArray = new byte[bufferSize];
    ByteBuffer buffer = ByteBuffer.wrap(byteArray);
    if (!buffer.hasArray() || (buffer.array() != byteArray)) {
        // I don't think that this will ever happen.  But if it does, then we wouldn't be
        // passing the preview content to the underlying detector later.
        throw new IllegalStateException("Failed to create valid buffer for camera source.");
    }

    mBytesToByteBuffer.put(byteArray, buffer);
    return byteArray;
}
项目:Toodoo    文件:CameraSource.java   
/**
 * Creates one buffer for the camera preview callback.  The size of the buffer is based off of
 * the camera preview size and the format of the camera image.
 *
 * @return a new preview buffer of the appropriate size for the current camera settings
 */
private byte[] createPreviewBuffer(Size previewSize) {
    int bitsPerPixel = ImageFormat.getBitsPerPixel(ImageFormat.NV21);
    long sizeInBits = previewSize.getHeight() * previewSize.getWidth() * bitsPerPixel;
    int bufferSize = (int) Math.ceil(sizeInBits / 8.0d) + 1;

    //
    // NOTICE: This code only works when using play services v. 8.1 or higher.
    //

    // Creating the byte array this way and wrapping it, as opposed to using .allocate(),
    // should guarantee that there will be an array to work with.
    byte[] byteArray = new byte[bufferSize];
    ByteBuffer buffer = ByteBuffer.wrap(byteArray);
    if (!buffer.hasArray() || (buffer.array() != byteArray)) {
        // I don't think that this will ever happen.  But if it does, then we wouldn't be
        // passing the preview content to the underlying detector later.
        throw new IllegalStateException("Failed to create valid buffer for camera source.");
    }

    mBytesToByteBuffer.put(byteArray, buffer);
    return byteArray;
}
项目:AndroidOCRFforID    文件:CameraSource.java   
/**
 * Creates one buffer for the camera preview callback.  The size of the buffer is based off of
 * the camera preview size and the format of the camera image.
 *
 * @return a new preview buffer of the appropriate size for the current camera settings
 */
private byte[] createPreviewBuffer(Size previewSize) {
    int bitsPerPixel = ImageFormat.getBitsPerPixel(ImageFormat.NV21);
    long sizeInBits = previewSize.getHeight() * previewSize.getWidth() * bitsPerPixel;
    int bufferSize = (int) Math.ceil(sizeInBits / 8.0d) + 1;

    //
    // NOTICE: This code only works when using play services v. 8.1 or higher.
    //

    // Creating the byte array this way and wrapping it, as opposed to using .allocate(),
    // should guarantee that there will be an array to work with.
    byte[] byteArray = new byte[bufferSize];
    ByteBuffer buffer = ByteBuffer.wrap(byteArray);
    if (!buffer.hasArray() || (buffer.array() != byteArray)) {
        // I don't think that this will ever happen.  But if it does, then we wouldn't be
        // passing the preview content to the underlying detector later.
        throw new IllegalStateException("Failed to create valid buffer for camera source.");
    }

    mBytesToByteBuffer.put(byteArray, buffer);
    return byteArray;
}
项目:Moneycim    文件:CameraSource.java   
/**
 * Creates one buffer for the camera preview callback.  The size of the buffer is based off of
 * the camera preview size and the format of the camera image.
 *
 * @return a new preview buffer of the appropriate size for the current camera settings
 */
private byte[] createPreviewBuffer(Size previewSize) {
    int bitsPerPixel = ImageFormat.getBitsPerPixel(ImageFormat.NV21);
    long sizeInBits = previewSize.getHeight() * previewSize.getWidth() * bitsPerPixel;
    int bufferSize = (int) Math.ceil(sizeInBits / 8.0d) + 1;

    //
    // NOTICE: This code only works when using play services v. 8.1 or higher.
    //

    // Creating the byte array this way and wrapping it, as opposed to using .allocate(),
    // should guarantee that there will be an array to work with.
    byte[] byteArray = new byte[bufferSize];
    ByteBuffer buffer = ByteBuffer.wrap(byteArray);
    if (!buffer.hasArray() || (buffer.array() != byteArray)) {
        // I don't think that this will ever happen.  But if it does, then we wouldn't be
        // passing the preview content to the underlying detector later.
        throw new IllegalStateException("Failed to create valid buffer for camera source.");
    }

    mBytesToByteBuffer.put(byteArray, buffer);
    return byteArray;
}
项目:Cable-Android    文件:BitmapUtil.java   
public static byte[] createFromNV21(@NonNull final byte[] data,
                                    final int width,
                                    final int height,
                                    int rotation,
                                    final Rect croppingRect,
                                    final boolean flipHorizontal)
    throws IOException
{
  byte[] rotated = rotateNV21(data, width, height, rotation, flipHorizontal);
  final int rotatedWidth  = rotation % 180 > 0 ? height : width;
  final int rotatedHeight = rotation % 180 > 0 ? width  : height;
  YuvImage previewImage = new YuvImage(rotated, ImageFormat.NV21,
                                       rotatedWidth, rotatedHeight, null);

  ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
  previewImage.compressToJpeg(croppingRect, 80, outputStream);
  byte[] bytes = outputStream.toByteArray();
  outputStream.close();
  return bytes;
}
项目:trust-wallet-android    文件:CameraSource.java   
/**
 * Creates one buffer for the camera preview callback.  The size of the buffer is based off of
 * the camera preview size and the format of the camera image.
 *
 * @return a new preview buffer of the appropriate size for the current camera settings
 */
private byte[] createPreviewBuffer(Size previewSize) {
    int bitsPerPixel = ImageFormat.getBitsPerPixel(ImageFormat.NV21);
    long sizeInBits = previewSize.getHeight() * previewSize.getWidth() * bitsPerPixel;
    int bufferSize = (int) Math.ceil(sizeInBits / 8.0d) + 1;

    //
    // NOTICE: This code only works when using play services v. 8.1 or higher.
    //

    // Creating the byte array this way and wrapping it, as opposed to using .allocate(),
    // should guarantee that there will be an array to work with.
    byte[] byteArray = new byte[bufferSize];
    ByteBuffer buffer = ByteBuffer.wrap(byteArray);
    if (!buffer.hasArray() || (buffer.array() != byteArray)) {
        // I don't think that this will ever happen.  But if it does, then we wouldn't be
        // passing the preview content to the underlying detector later.
        throw new IllegalStateException("Failed to create valid buffer for camera source.");
    }

    mBytesToByteBuffer.put(byteArray, buffer);
    return byteArray;
}
项目:xbot_head    文件:CommentaryFragment.java   
private void startPreview() {
    try {
        CameraCharacteristics characteristics = cameraManager.getCameraCharacteristics(cameraId);
        StreamConfigurationMap configMap = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);

        Size previewSize = Util.getPreferredPreviewSize(
                configMap.getOutputSizes(ImageFormat.JPEG),textureView.getWidth(), textureView.getHeight());

        surfaceTexture.setDefaultBufferSize(previewSize.getWidth(),previewSize.getHeight());
        Surface surface = new Surface(surfaceTexture);
        captureBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
        captureBuilder.addTarget(surface);

        cameraDevice.createCaptureSession(Arrays.asList(surface),captureSessionCallback,backgroundHandler);
    } catch (CameraAccessException e) {
        e.printStackTrace();
    }
}
项目:Fuse    文件:CameraSource.java   
/**
 * Creates one buffer for the camera preview callback.  The size of the buffer is based off of
 * the camera preview size and the format of the camera image.
 *
 * @return a new preview buffer of the appropriate size for the current camera settings
 */
private byte[] createPreviewBuffer(Size previewSize) {
    int bitsPerPixel = ImageFormat.getBitsPerPixel(ImageFormat.NV21);
    long sizeInBits = previewSize.getHeight() * previewSize.getWidth() * bitsPerPixel;
    int bufferSize = (int) Math.ceil(sizeInBits / 8.0d) + 1;

    //
    // NOTICE: This code only works when using play services v. 8.1 or higher.
    //

    // Creating the byte array this way and wrapping it, as opposed to using .allocate(),
    // should guarantee that there will be an array to work with.
    byte[] byteArray = new byte[bufferSize];
    ByteBuffer buffer = ByteBuffer.wrap(byteArray);
    if (!buffer.hasArray() || (buffer.array() != byteArray)) {
        // I don't think that this will ever happen.  But if it does, then we wouldn't be
        // passing the preview content to the underlying detector later.
        throw new IllegalStateException("Failed to create valid buffer for camera source.");
    }

    mBytesToByteBuffer.put(byteArray, buffer);
    return byteArray;
}
项目:NotifyTools    文件:JavaCameraView.java   
@Override
public Mat rgba() {
    if (mPreviewFormat == ImageFormat.NV21)
        Imgproc.cvtColor(mYuvFrameData, mRgba, Imgproc.COLOR_YUV2RGBA_NV21, 4);
    else if (mPreviewFormat == ImageFormat.YV12)
        Imgproc.cvtColor(mYuvFrameData, mRgba, Imgproc.COLOR_YUV2RGB_I420, 4);  // COLOR_YUV2RGBA_YV12 produces inverted colors
    else
        throw new IllegalArgumentException("Preview Format can be NV21 or YV12");

    return mRgba;
}
项目:react-native-camera-android-simple    文件:CameraManager.java   
public void setDefaultCameraParameters(Camera camera, Camera.CameraInfo cameraInfo) {
    Camera.Parameters parameters = camera.getParameters();

    parameters.setPictureFormat(ImageFormat.JPEG);

    List<Camera.Size> supportedSizes = parameters.getSupportedPictureSizes();
    Camera.Size pictureSize = getBestSize(supportedSizes, 0);
    parameters.setPictureSize(pictureSize.width, pictureSize.height);

    float whRatio = (float) pictureSize.width / pictureSize.height;

    List<Camera.Size> previewSupportedSizes = parameters.getSupportedPreviewSizes();
    Camera.Size previewSize = getBestSize(previewSupportedSizes, whRatio);
    parameters.setPreviewSize(previewSize.width, previewSize.height);

    List<String> supportedFocusModes = camera.getParameters().getSupportedFocusModes();
    boolean hasAutoFocus = supportedFocusModes != null && supportedFocusModes.contains(Camera.Parameters.FOCUS_MODE_AUTO);

    if(hasAutoFocus) {
        parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO);
    }

    if(cameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_BACK) {
        parameters.setFlashMode(Camera.Parameters.FLASH_MODE_AUTO);
    }

    List<String> supportedScreenModes = camera.getParameters().getSupportedSceneModes();
    boolean hasAutoScene = supportedScreenModes != null && supportedFocusModes.contains(Camera.Parameters.SCENE_MODE_AUTO);
    if(hasAutoScene) {
        parameters.setSceneMode(Camera.Parameters.SCENE_MODE_AUTO);
    }

    parameters.setColorEffect(Camera.Parameters.EFFECT_NONE);

    int orientation = cameraInfo.orientation;
    parameters.setRotation(orientation);

    camera.setParameters(parameters);
}
项目:SIGHT-For-the-Blind    文件:CameraHandler.java   
/**
 * Initialize the camera device
 */
public void initializeCamera(Context context,
                             Handler backgroundHandler,
                             ImageReader.OnImageAvailableListener imageAvailableListener) {
    // Discover the camera instance
    CameraManager manager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE);
    String[] camIds = {};
    try {
        camIds = manager.getCameraIdList();
    } catch (CameraAccessException e) {
        Log.d(TAG, "Cam access exception getting IDs", e);
    }
    if (camIds.length < 1) {
        Log.d(TAG, "No cameras found");
        return;
    }
    String id = camIds[0];
    Log.d(TAG, "Using camera id " + id);
    // Initialize the image processor
    mImageReader = ImageReader.newInstance(IMAGE_WIDTH, IMAGE_HEIGHT,
            ImageFormat.JPEG, MAX_IMAGES);
    mImageReader.setOnImageAvailableListener(
            imageAvailableListener, backgroundHandler);
    // Open the camera resource
    try {
        manager.openCamera(id, mStateCallback, backgroundHandler);
    } catch (CameraAccessException cae) {
        Log.d(TAG, "Camera access exception", cae);
    }
}
项目:droidCam    文件:Camera2Api23.java   
@Override
protected void collectPictureSizes(SizeMap sizes, StreamConfigurationMap map) {
    // Try to get hi-res output sizes
    android.util.Size[] outputSizes = map.getHighResolutionOutputSizes(ImageFormat.JPEG);
    if (outputSizes != null) {
        for (android.util.Size size : map.getHighResolutionOutputSizes(ImageFormat.JPEG)) {
            sizes.add(new Size(size.getWidth(), size.getHeight()));
        }
    }
    if (sizes.isEmpty()) {
        super.collectPictureSizes(sizes, map);
    }
}
项目:libRtmp    文件:AndroidUntil.java   
public static void setPreviewFormat(Camera camera, Camera.Parameters parameters) throws CameraNotSupportException{
    //设置预览回调的图片格式
    try {
        parameters.setPreviewFormat(ImageFormat.NV21);
        camera.setParameters(parameters);
    } catch (Exception e) {
        throw new CameraNotSupportException();
    }
}
项目:seeta4Android    文件:CameraWrapper.java   
private void initCamera() {
    if (this.mCamera != null) {
        this.mCameraParamters = this.mCamera.getParameters();
        this.mCameraParamters.setPreviewFormat(ImageFormat.NV21);
        this.mCameraParamters.setFlashMode("off");
        this.mCameraParamters.setWhiteBalance(Camera.Parameters.WHITE_BALANCE_AUTO);
        this.mCameraParamters.setSceneMode(Camera.Parameters.SCENE_MODE_AUTO);
        Point p = MyApplication.getBestCameraResolution(this.mCameraParamters, MyApplication.getScreenMetrics());
        IMAGE_WIDTH = p.x;
        IMAGE_HEIGHT = p.y;
        this.mCameraParamters.setPreviewSize(IMAGE_WIDTH, IMAGE_HEIGHT);
        mCameraPreviewCallback = new CameraPreviewCallback();
        byte[] a = new byte[IMAGE_WIDTH * IMAGE_HEIGHT * 3 / 2];
        byte[] b = new byte[IMAGE_WIDTH * IMAGE_HEIGHT * 3 / 2];
        byte[] c = new byte[IMAGE_WIDTH * IMAGE_HEIGHT * 3 / 2];
        mCamera.addCallbackBuffer(a);
        mCamera.addCallbackBuffer(b);
        mCamera.addCallbackBuffer(c);
        mCamera.setPreviewCallbackWithBuffer(mCameraPreviewCallback);
        List<String> focusModes = this.mCameraParamters.getSupportedFocusModes();
        if (focusModes.contains("continuous-video")) {
            this.mCameraParamters
                    .setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
        }
        this.mCamera.setParameters(this.mCameraParamters);
        this.mCamera.startPreview();

        this.mIsPreviewing = true;
    }
}
项目:heifreader    文件:HeifReader.java   
private static Bitmap renderHevcImageWithFormat(ByteBuffer bitstream, ImageInfo info, int imageFormat) throws FormatFallbackException {
    try (ImageReader reader = ImageReader.newInstance(info.size.getWidth(), info.size.getHeight(), imageFormat, 1)) {
        renderHevcImage(bitstream, info, reader.getSurface());
        Image image = null;
        try {
            try {
                image = reader.acquireNextImage();
            } catch (UnsupportedOperationException ex) {
                throw new FormatFallbackException(ex);
            }

            switch (image.getFormat()) {
                case ImageFormat.YUV_420_888:
                case ImageFormat.YV12:
                    return convertYuv420ToBitmap(image);
                case ImageFormat.RGB_565:
                    return convertRgb565ToBitmap(image);
                default:
                    throw new RuntimeException("unsupported image format(" + image.getFormat() + ")");
            }
        } finally {
            if (image != null) {
                image.close();
            }
        }
    }
}
项目:heifreader    文件:HeifReader.java   
private static Bitmap convertYuv420ToBitmap(Image image) {
    RenderScript rs = mRenderScript;
    final int width = image.getWidth();
    final int height = image.getHeight();

    // prepare input Allocation for RenderScript
    Type.Builder inType = new Type.Builder(rs, Element.U8(rs)).setX(width).setY(height).setYuvFormat(ImageFormat.YV12);
    Allocation inAlloc = Allocation.createTyped(rs, inType.create(), Allocation.USAGE_SCRIPT);
    byte[] rawBuffer = new byte[inAlloc.getBytesSize()];
    int lumaSize = width * height;
    int chromaSize = (width / 2) * (height / 2);
    Image.Plane[] planes = image.getPlanes();
    planes[0].getBuffer().get(rawBuffer, 0, lumaSize);
    planes[1].getBuffer().get(rawBuffer, lumaSize, chromaSize);
    planes[2].getBuffer().get(rawBuffer, lumaSize + chromaSize, chromaSize);
    inAlloc.copyFromUnchecked(rawBuffer);

    // prepare output Allocation for RenderScript
    Bitmap bmp = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
    Allocation outAlloc = Allocation.createFromBitmap(rs, bmp, Allocation.MipmapControl.MIPMAP_NONE, Allocation.USAGE_SCRIPT | Allocation.USAGE_SHARED);

    // convert YUV to RGB colorspace
    ScriptC_yuv2rgb converter = new ScriptC_yuv2rgb(rs);
    converter.set_gYUV(inAlloc);
    converter.forEach_convert(outAlloc);
    outAlloc.copyTo(bmp);
    return bmp;
}
项目:Microsphere    文件:JavaCameraView.java   
@Override
public Mat rgba() {
    if (mPreviewFormat == ImageFormat.NV21)
        Imgproc.cvtColor(mYuvFrameData, mRgba, Imgproc.COLOR_YUV2RGBA_NV21, 4);
    else if (mPreviewFormat == ImageFormat.YV12)
        Imgproc.cvtColor(mYuvFrameData, mRgba, Imgproc.COLOR_YUV2RGB_I420, 4);  // COLOR_YUV2RGBA_YV12 produces inverted colors
    else
        throw new IllegalArgumentException("Preview Format can be NV21 or YV12");

    return mRgba;
}
项目:FamilyBond    文件:ImageUtil.java   
public static byte[] imageToByteArray(Image image) {
    byte[] data = null;
    if (image.getFormat() == ImageFormat.JPEG) {
        Image.Plane[] planes = image.getPlanes();
        ByteBuffer buffer = planes[0].getBuffer();
        data = new byte[buffer.capacity()];
        buffer.get(data);
        return data;
    } else if (image.getFormat() == ImageFormat.YUV_420_888) {
        data = NV21toJPEG(
                YUV_420_888toNV21(image),
                image.getWidth(), image.getHeight());
    }
    return data;
}
项目:Eye    文件:HomeActivity.java   
/**
 * Initialises the output surfaces for the camera's preview.
 * There will be two output surfaces -
 * 1) mSurfaceView : The surface to just show the preview frame.
 * 2) mImageReader : The surface to get the actual pixel image
 * data of the preview frame.
 */
private void setupOutputSurfaces() {

    outputSurfaces = new ArrayList<>(2);

    // For the live preview.
    mSurfaceView.getHolder().setFixedSize(screenMaxX, screenMaxY);
    outputSurfaces.add(mSurfaceView.getHolder().getSurface());

    // For extracting the image.
    mImageReader = ImageReader.newInstance(screenMaxX, screenMaxY,
            ImageFormat.YUV_420_888, maxAcquired);
    mImageReader.setOnImageAvailableListener(getImageAvailableListener(), null);
    outputSurfaces.add(mImageReader.getSurface());
}
项目:PXLSRT    文件:Camera2Api23.java   
@Override
protected void collectPictureSizes(SizeMap sizes, StreamConfigurationMap map) {
    // Try to get hi-res output sizes
    android.util.Size[] outputSizes = map.getHighResolutionOutputSizes(ImageFormat.JPEG);
    if (outputSizes != null) {
        for (android.util.Size size : map.getHighResolutionOutputSizes(ImageFormat.JPEG)) {
            sizes.add(new Size(size.getWidth(), size.getHeight()));
        }
    }
    if (sizes.isEmpty()) {
        super.collectPictureSizes(sizes, map);
    }
}
项目:android-imaging-utils    文件:JavaCameraView.java   
@Override
public Mat rgba() {
    if (mPreviewFormat == ImageFormat.NV21)
        Imgproc.cvtColor(mYuvFrameData, mRgba, Imgproc.COLOR_YUV2RGBA_NV21, 4);
    else if (mPreviewFormat == ImageFormat.YV12)
        Imgproc.cvtColor(mYuvFrameData, mRgba, Imgproc.COLOR_YUV2RGB_I420, 4);  // COLOR_YUV2RGBA_YV12 produces inverted colors
    else
        throw new IllegalArgumentException("Preview Format can be NV21 or YV12");

    return mRgba;
}
项目:hella-renderscript    文件:RsUtil.java   
@RequiresApi(18)
public static Type createYuvType(RenderScript rs, int x, int y, int yuvFormat) {
    boolean supported = yuvFormat == ImageFormat.NV21 || yuvFormat == ImageFormat.YV12;
    if (Build.VERSION.SDK_INT >= 19) {
        supported |= yuvFormat == ImageFormat.YUV_420_888;
    }
    if (!supported) {
        throw new IllegalArgumentException("invalid yuv format: " + yuvFormat);
    }
    return new Type.Builder(rs, createYuvElement(rs)).setX(x).setY(y).setYuvFormat(yuvFormat)
            .create();
}
项目:SmartMath    文件:CameraPreview.java   
public void setCamera(Camera camera) {
     mCamera = camera;
     if (mCamera != null) {
      Parameters parameters = mCamera.getParameters();
         mSupportedPreviewSizes = parameters.getSupportedPreviewSizes();
List<Integer> formatsList = parameters.getSupportedPreviewFormats();    //get supported preview formats
if(formatsList.contains(ImageFormat.NV21)) {    // formatsList is always not null.
    parameters.setPreviewFormat(ImageFormat.NV21);      //set preview format is NV21,default is NV21 (yuv420sp)
}

    //  Set Focus mode depending on what is supported. MODE_AUTO is 
    //  preferred mode.
// need not to test supported mode coz it has been tested in main activity.
    if (msnFocusMode == 2)  {
        parameters.setFocusMode( Camera.Parameters.FOCUS_MODE_INFINITY );
    } else if (msnFocusMode == 1)   {
        parameters.setFocusMode( Camera.Parameters.FOCUS_MODE_FIXED );
    } else  {
        // set to auto focus by default
        parameters.setFocusMode( Camera.Parameters.FOCUS_MODE_AUTO);
    }

    /*if ((parameters.getMaxExposureCompensation() != 0 || parameters.getMinExposureCompensation() != 0)
            && ActivitySettings.msnPhotoTakenFrom == 1) {   // screen mode.
        parameters.setExposureCompensation(parameters.getMaxExposureCompensation());
      } */
    parameters.setExposureCompensation(0);  // exposure is not adjusted. Seems that screen mode does not bring much benefit.


List<String> scenesList = parameters.getSupportedSceneModes();
if (scenesList != null && scenesList.contains(Camera.Parameters.SCENE_MODE_STEADYPHOTO)) {
    parameters.setSceneMode(Camera.Parameters.SCENE_MODE_STEADYPHOTO);  // this may crash in some devices.
}
boolean bSuccessful = setCameraParams(mCamera, parameters);

      requestLayout();
     } 
 }
项目:androidthings-imageclassifier    文件:CameraHandler.java   
/**
 * Initialize the camera device
 */
public void initializeCamera(Context context,
                             Handler backgroundHandler,
                             ImageReader.OnImageAvailableListener imageAvailableListener) {
    // Discover the camera instance
    CameraManager manager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE);
    String[] camIds = {};
    try {
        camIds = manager.getCameraIdList();
    } catch (CameraAccessException e) {
        Log.d(TAG, "Cam access exception getting IDs", e);
    }
    if (camIds.length < 1) {
        Log.d(TAG, "No cameras found");
        return;
    }
    String id = camIds[0];
    Log.d(TAG, "Using camera id " + id);
    // Initialize the image processor
    mImageReader = ImageReader.newInstance(IMAGE_WIDTH, IMAGE_HEIGHT,
            ImageFormat.JPEG, MAX_IMAGES);
    mImageReader.setOnImageAvailableListener(
            imageAvailableListener, backgroundHandler);
    // Open the camera resource
    try {
        manager.openCamera(id, mStateCallback, backgroundHandler);
    } catch (CameraAccessException cae) {
        Log.d(TAG, "Camera access exception", cae);
    }
}
项目:androidthings-imageclassifier    文件:CameraHandler.java   
/**
 * Initialize the camera device
 */
public void initializeCamera(Context context,
                             Handler backgroundHandler,
                             ImageReader.OnImageAvailableListener imageAvailableListener) {
    // Discover the camera instance
    CameraManager manager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE);
    String[] camIds = {};
    try {
        camIds = manager.getCameraIdList();
    } catch (CameraAccessException e) {
        Log.d(TAG, "Cam access exception getting IDs", e);
    }
    if (camIds.length < 1) {
        Log.d(TAG, "No cameras found");
        return;
    }
    String id = camIds[0];
    Log.d(TAG, "Using camera id " + id);
    // Initialize the image processor
    mImageReader = ImageReader.newInstance(IMAGE_WIDTH, IMAGE_HEIGHT,
            ImageFormat.JPEG, MAX_IMAGES);
    mImageReader.setOnImageAvailableListener(
            imageAvailableListener, backgroundHandler);
    // Open the camera resource
    try {
        manager.openCamera(id, mStateCallback, backgroundHandler);
    } catch (CameraAccessException cae) {
        Log.d(TAG, "Camera access exception", cae);
    }
}
项目:androidthings-imageclassifier    文件:CameraHandler.java   
/**
 * Initialize the camera device
 */
public void initializeCamera(Context context,
                             Handler backgroundHandler,
                             ImageReader.OnImageAvailableListener imageAvailableListener) {
    // Discover the camera instance
    CameraManager manager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE);
    String[] camIds = {};
    try {
        camIds = manager.getCameraIdList();
    } catch (CameraAccessException e) {
        Log.d(TAG, "Cam access exception getting IDs", e);
    }
    if (camIds.length < 1) {
        Log.d(TAG, "No cameras found");
        return;
    }
    String id = camIds[0];
    Log.d(TAG, "Using camera id " + id);
    // Initialize the image processor
    mImageReader = ImageReader.newInstance(IMAGE_WIDTH, IMAGE_HEIGHT,
            ImageFormat.JPEG, MAX_IMAGES);
    mImageReader.setOnImageAvailableListener(
            imageAvailableListener, backgroundHandler);
    // Open the camera resource
    try {
        manager.openCamera(id, mStateCallback, backgroundHandler);
    } catch (CameraAccessException cae) {
        Log.d(TAG, "Camera access exception", cae);
    }
}
项目:Fatigue-Detection    文件:CameraEngine.java   
public void openCamera(boolean facingFront) {
    synchronized (this) {
        int facing=facingFront? Camera.CameraInfo.CAMERA_FACING_FRONT:Camera.CameraInfo.CAMERA_FACING_BACK;
        currentCameraId=getCameraIdWithFacing(facing);
        camera = Camera.open(currentCameraId);
        camera.setPreviewCallbackWithBuffer(this);
        initRotateDegree(currentCameraId);
        if (camera != null) {
            mParams = camera.getParameters();
            List<Camera.Size> supportedPictureSizesList=mParams.getSupportedPictureSizes();
            List<Camera.Size> supportedVideoSizesList=mParams.getSupportedVideoSizes();
            List<Camera.Size> supportedPreviewSizesList=mParams.getSupportedPreviewSizes();
            Logger.logCameraSizes(supportedPictureSizesList);
            Logger.logCameraSizes(supportedVideoSizesList);
            Logger.logCameraSizes(supportedPreviewSizesList);

            previewSize=choosePreferredSize(supportedPreviewSizesList,preferredRatio);
            Camera.Size photoSize=choosePreferredSize(supportedPictureSizesList,preferredRatio);

            frameHeight=previewSize.width;
            frameWidth=previewSize.height;
            Log.d(TAG, "openCamera: choose preview size"+previewSize.height+"x"+previewSize.width);
            mParams.setPreviewSize(frameHeight,frameWidth);

            mParams.setPictureSize(photoSize.width,photoSize.height);
            Log.d(TAG, "openCamera: choose photo size"+photoSize.height+"x"+photoSize.width);

            //mParams.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO);
            int size = frameWidth*frameHeight;
            size = size * ImageFormat.getBitsPerPixel(mParams.getPreviewFormat()) / 8;
            if (mBuffer==null || mBuffer.length!=size)
                mBuffer = new byte[size];
            mFrameChain[0].init(size);
            mFrameChain[1].init(size);
            camera.addCallbackBuffer(mBuffer);
            camera.setParameters(mParams);
            cameraOpened=true;
        }
    }
}