private void checkState(CaptureResult result) { /*switch (mState) { case STATE_PREVIEW: // NOTHING break; case STATE_WAITING_CAPTURE: int afState = result.get(CaptureResult.CONTROL_AF_STATE); if (CaptureResult.CONTROL_AF_STATE_FOCUSED_LOCKED == afState || CaptureResult.CONTROL_AF_STATE_NOT_FOCUSED_LOCKED == afState || CaptureResult.CONTROL_AF_STATE_PASSIVE_FOCUSED == afState || CaptureResult.CONTROL_AF_STATE_PASSIVE_UNFOCUSED == afState) { //do something like save picture } break; }*/ }
private void detectFaces(CaptureResult captureResult) { Integer mode = captureResult.get(CaptureResult.STATISTICS_FACE_DETECT_MODE); if (isViewAvailable() && mode != null) { android.hardware.camera2.params.Face[] faces = captureResult.get(CaptureResult.STATISTICS_FACES); if (faces != null) { Log.i(TAG, "faces : " + faces.length + " , mode : " + mode); for (android.hardware.camera2.params.Face face : faces) { Rect faceBounds = face.getBounds(); // Once processed, the result is sent back to the View presenterView.onFaceDetected(mapCameraFaceToCanvas(faceBounds, face.getLeftEyePosition(), face.getRightEyePosition())); } } } }
/** * Generate a human-readable string of the given capture request and write * it to the given file. */ public static void toFile(String title, CameraMetadata<?> metadata, File file) { try { // Will append if the file already exists. FileWriter writer = new FileWriter(file, true); if (metadata instanceof CaptureRequest) { dumpMetadata(title, (CaptureRequest) metadata, writer); } else if (metadata instanceof CaptureResult) { dumpMetadata(title, (CaptureResult) metadata, writer); } else { writer.close(); throw new IllegalArgumentException("Cannot generate debug data from type " + metadata.getClass().getName()); } writer.close(); } catch (IOException ex) { Log.e(TAG, "Could not write capture data to file.", ex); } }
/** * Convert reported camera2 AF state to OneCamera AutoFocusState. */ public static OneCamera.AutoFocusState stateFromCamera2State(int state) { switch (state) { case CaptureResult.CONTROL_AF_STATE_ACTIVE_SCAN: return OneCamera.AutoFocusState.ACTIVE_SCAN; case CaptureResult.CONTROL_AF_STATE_PASSIVE_SCAN: return OneCamera.AutoFocusState.PASSIVE_SCAN; case CaptureResult.CONTROL_AF_STATE_PASSIVE_FOCUSED: return OneCamera.AutoFocusState.PASSIVE_FOCUSED; case CaptureResult.CONTROL_AF_STATE_FOCUSED_LOCKED: return OneCamera.AutoFocusState.ACTIVE_FOCUSED; case CaptureResult.CONTROL_AF_STATE_PASSIVE_UNFOCUSED: return OneCamera.AutoFocusState.PASSIVE_UNFOCUSED; case CaptureResult.CONTROL_AF_STATE_NOT_FOCUSED_LOCKED: return OneCamera.AutoFocusState.ACTIVE_UNFOCUSED; default: return OneCamera.AutoFocusState.INACTIVE; } }
/** * Utility function: converts CaptureResult.CONTROL_AF_STATE to String. */ private static String controlAFStateToString(int controlAFState) { switch (controlAFState) { case CaptureResult.CONTROL_AF_STATE_INACTIVE: return "inactive"; case CaptureResult.CONTROL_AF_STATE_PASSIVE_SCAN: return "passive_scan"; case CaptureResult.CONTROL_AF_STATE_PASSIVE_FOCUSED: return "passive_focused"; case CaptureResult.CONTROL_AF_STATE_ACTIVE_SCAN: return "active_scan"; case CaptureResult.CONTROL_AF_STATE_FOCUSED_LOCKED: return "focus_locked"; case CaptureResult.CONTROL_AF_STATE_NOT_FOCUSED_LOCKED: return "not_focus_locked"; case CaptureResult.CONTROL_AF_STATE_PASSIVE_UNFOCUSED: return "passive_unfocused"; default: return "unknown"; } }
@Override public void onImageCaptured(Image image, TotalCaptureResult captureResult) { long timestamp = captureResult.get(CaptureResult.SENSOR_TIMESTAMP); // We should only capture the image if it's more recent than the // latest one. Synchronization is necessary since this method is // called on {@link #mImageSaverThreadPool}. synchronized (mLastCapturedImageTimestamp) { if (timestamp > mLastCapturedImageTimestamp.get()) { mLastCapturedImageTimestamp.set(timestamp); } else { // There was a more recent (or identical) image which has // begun being saved, so abort. return; } } mReadyStateManager.setInput( ReadyStateRequirement.CAPTURE_NOT_IN_PROGRESS, true); mSession.startEmpty(); savePicture(image, mParams, mSession); mParams.callback.onPictureTaken(mSession); Log.v(TAG, "Image saved. Frame number = " + captureResult.getFrameNumber()); }
private void process(CaptureResult captureResult) { switch (mCaptureState) { case STATE_PREVIEW: // Do nothing break; case STATE_WAIT_LOCK: mCaptureState = STATE_PREVIEW; Integer afState = captureResult.get(CaptureResult.CONTROL_AF_STATE); if(afState == CaptureResult.CONTROL_AF_STATE_FOCUSED_LOCKED || afState == CaptureResult.CONTROL_AF_STATE_NOT_FOCUSED_LOCKED) { Toast.makeText(getApplicationContext(), "AF Locked!", Toast.LENGTH_SHORT).show(); startStillCaptureRequest(); } break; } }
@Override public void onCaptureProgressed(CameraCaptureSession session, CaptureRequest request, CaptureResult partialResult) { Log.d(TAG, "mSessionCaptureCallback, onCaptureProgressed"); mSession = session; checkState(partialResult); }
private ImageSaver(Image image, File file, CaptureResult result, CameraCharacteristics characteristics, Context context, RefCountedAutoCloseable<ImageReader> reader) { mImage = image; mFile = file; mCaptureResult = result; mCharacteristics = characteristics; mContext = context; mReader = reader; }
public void qq() { mPreviewRequestBuilder.set(CaptureRequest.CONTROL_EFFECT_MODE, CaptureResult.CONTROL_EFFECT_MODE_MONO); mPreviewRequest = mPreviewRequestBuilder.build(); try { mCaptureSession.setRepeatingRequest(mPreviewRequest, mCaptureCallback, null); } catch (CameraAccessException e) { e.printStackTrace(); } }
@Override public void onCaptureProgressed(CameraCaptureSession session, CaptureRequest request, CaptureResult partialResult) { // キャプチャの進捗状況(随時呼び出される) process(partialResult); }
/** * Complain if CONTROL_AF_STATE is not present in result. * Could indicate bug in API implementation. */ public static boolean checkControlAfState(CaptureResult result) { boolean missing = result.get(CaptureResult.CONTROL_AF_STATE) == null; if (missing) { // throw new IllegalStateException("CaptureResult missing CONTROL_AF_STATE."); Log.e(TAG, "\n!!!! TotalCaptureResult missing CONTROL_AF_STATE. !!!!\n "); } return !missing; }
/** * Complain if LENS_STATE is not present in result. * Could indicate bug in API implementation. */ public static boolean checkLensState(CaptureResult result) { boolean missing = result.get(CaptureResult.LENS_STATE) == null; if (missing) { // throw new IllegalStateException("CaptureResult missing LENS_STATE."); Log.e(TAG, "\n!!!! TotalCaptureResult missing LENS_STATE. !!!!\n "); } return !missing; }
public static void logExtraFocusInfo(CaptureResult result) { if(!checkControlAfState(result) || !checkLensState(result)) { return; } Object tag = result.getRequest().getTag(); Log.v(TAG, String.format("af_state:%-17s lens_foc_dist:%.3f lens_state:%-10s %s", controlAFStateToString(result.get(CaptureResult.CONTROL_AF_STATE)), result.get(CaptureResult.LENS_FOCUS_DISTANCE), lensStateToString(result.get(CaptureResult.LENS_STATE)), (tag == null) ? "" : "[" + tag +"]" )); }
/** * Utility function: converts CaptureResult.LENS_STATE to String. */ private static String lensStateToString(int lensState) { switch (lensState) { case CaptureResult.LENS_STATE_MOVING: return "moving"; case CaptureResult.LENS_STATE_STATIONARY: return "stationary"; default: return "unknown"; } }
@Override public void onCaptureProgressed(CameraCaptureSession session, CaptureRequest request, final CaptureResult partialResult) { long frameNumber = partialResult.getFrameNumber(); // Update mMetadata for whichever keys are present, if this frame is // supplying newer values. for (final Key<?> key : partialResult.getKeys()) { Pair<Long, Object> oldEntry = mMetadata.get(key); final Object oldValue = (oldEntry != null) ? oldEntry.second : null; boolean newerValueAlreadyExists = oldEntry != null && frameNumber < oldEntry.first; if (newerValueAlreadyExists) { continue; } final Object newValue = partialResult.get(key); mMetadata.put(key, new Pair<Long, Object>(frameNumber, newValue)); // If the value has changed, call the appropriate listeners, if // any exist. if (oldValue == newValue || !mMetadataChangeListeners.containsKey(key)) { continue; } for (final MetadataChangeListener listener : mMetadataChangeListeners.get(key)) { Log.v(TAG, "Dispatching to metadata change listener for key: " + key.toString()); mListenerHandler.post(new Runnable() { @Override public void run() { listener.onImageMetadataChange(key, oldValue, newValue, partialResult); } }); } } }
/** * This method takes appropriate action if camera2 AF state changes. * <ol> * <li>Reports changes in camera2 AF state to OneCamera.FocusStateListener.</li> * <li>Take picture after AF scan if mTakePictureWhenLensIsStopped true.</li> * </ol> */ private void autofocusStateChangeDispatcher(CaptureResult result) { if (result.getFrameNumber() < mLastControlAfStateFrameNumber || result.get(CaptureResult.CONTROL_AF_STATE) == null) { return; } mLastControlAfStateFrameNumber = result.getFrameNumber(); // Convert to OneCamera mode and state. AutoFocusState resultAFState = AutoFocusHelper. stateFromCamera2State(result.get(CaptureResult.CONTROL_AF_STATE)); // TODO: Consider using LENS_STATE. boolean lensIsStopped = resultAFState == AutoFocusState.ACTIVE_FOCUSED || resultAFState == AutoFocusState.ACTIVE_UNFOCUSED || resultAFState == AutoFocusState.PASSIVE_FOCUSED || resultAFState == AutoFocusState.PASSIVE_UNFOCUSED; if (mTakePictureWhenLensIsStopped && lensIsStopped) { // Take the shot. mCameraHandler.post(mTakePictureRunnable); mTakePictureWhenLensIsStopped = false; } // Report state change when AF state has changed. if (resultAFState != mLastResultAFState && mFocusStateListener != null) { mFocusStateListener.onFocusStatusUpdate(resultAFState, result.getFrameNumber()); } mLastResultAFState = resultAFState; }
@Override public void onConfigured(CameraCaptureSession cameraCaptureSession) { Log.d(TAG, "CrPreviewSessionListener.onConfigured"); mPreviewSession = cameraCaptureSession; try { // This line triggers the preview. A |listener| is registered to receive the actual // capture result details. A CrImageReaderListener will be triggered every time a // downloaded image is ready. Since |handler| is null, we'll work on the current // Thread Looper. mPreviewSession.setRepeatingRequest( mPreviewRequest, new CameraCaptureSession.CaptureCallback() { @Override public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, TotalCaptureResult result) { mLastExposureTimeNs = result.get(CaptureResult.SENSOR_EXPOSURE_TIME); } }, null); } catch (CameraAccessException | SecurityException | IllegalStateException | IllegalArgumentException ex) { Log.e(TAG, "setRepeatingRequest: ", ex); return; } // Now wait for trigger on CrPreviewReaderListener.onImageAvailable(); nativeOnStarted(mNativeVideoCaptureDeviceAndroid); changeCameraStateAndNotify(CameraState.STARTED); }
private void process(CaptureResult result) { switch (mState) { case STATE_PREVIEW: { // We have nothing to do when the camera preview is working normally. break; } } }
@Override public void onCaptureProgressed( @NonNull CameraCaptureSession session, @NonNull CaptureRequest request, @NonNull CaptureResult partialResult) { process(partialResult); }
private void process(CaptureResult result) { int afState = result.get(CaptureResult.CONTROL_AF_STATE); if (CaptureResult.CONTROL_AF_STATE_PASSIVE_FOCUSED == afState) { areWeFocused = true; getActivity().runOnUiThread(new Runnable() { @Override public void run() { button.setBackgroundColor(getActivity().getResources().getColor(R.color.blue)); button.setText("Focused"); } }); } else { areWeFocused = false; getActivity().runOnUiThread(new Runnable() { @Override public void run() { button.setBackgroundColor(getActivity().getResources().getColor(R.color.colorAccent)); button.setText("Not focused"); } }); } if (shouldCapture) { if (areWeFocused) { shouldCapture = false; captureStillPicture(); } } // switch (mState) { // case STATE_PREVIEW: { // Log.d(TAG, "STATE_PREVIEW"); // // We have nothing to do when the camera preview is working normally. // break; // } // case STATE_WAITING_LOCK: { // Log.d(TAG, "STATE_WAITING_LOCK"); // Integer afState = result.get(CaptureResult.CONTROL_AF_STATE); // if (afState == null) { // captureStillPicture(); // } else if (CaptureResult.CONTROL_AF_STATE_FOCUSED_LOCKED == afState || // CaptureResult.CONTROL_AF_STATE_NOT_FOCUSED_LOCKED == afState || // CaptureResult.CONTROL_AF_STATE_INACTIVE == afState /*add this*/) { // Log.d(TAG, "STATE_WAITING_LOCK222"); // // CONTROL_AE_STATE can be null on some devices // Integer aeState = result.get(CaptureResult.CONTROL_AE_STATE); // if (aeState == null || aeState == CaptureResult.CONTROL_AE_STATE_CONVERGED) { // mState = STATE_PICTURE_TAKEN; // captureStillPicture(); // } else { // runPrecaptureSequence(); // } // } // break; // } // case STATE_WAITING_PRECAPTURE: { // Log.d(TAG, "STATE_WAITING_PRECAPTURE"); // // CONTROL_AE_STATE can be null on some devices // Integer aeState = result.get(CaptureResult.CONTROL_AE_STATE); // if (aeState == null || // aeState == CaptureResult.CONTROL_AE_STATE_PRECAPTURE || // aeState == CaptureRequest.CONTROL_AE_STATE_FLASH_REQUIRED) { // mState = STATE_WAITING_NON_PRECAPTURE; // } // break; // } // case STATE_WAITING_NON_PRECAPTURE: { // Log.d(TAG, "STATE_WAITING_NON_PRECAPTURE"); // // CONTROL_AE_STATE can be null on some devices // Integer aeState = result.get(CaptureResult.CONTROL_AE_STATE); // if (aeState == null || aeState != CaptureResult.CONTROL_AE_STATE_PRECAPTURE) { // mState = STATE_PICTURE_TAKEN; // captureStillPicture(); // } // break; // } // } }
@Override public void onCaptureProgressed(@NonNull CameraCaptureSession session, @NonNull CaptureRequest request, @NonNull CaptureResult partialResult) { process(partialResult); }
@Override public void onCaptureProgressed( final CameraCaptureSession session, final CaptureRequest request, final CaptureResult partialResult) {}
@Override public void onCaptureProgressed(@NonNull CameraCaptureSession session, @NonNull CaptureRequest request, @NonNull CaptureResult partialResult) { processCaptureResult(partialResult); }
@Override public void onCaptureProgressed(@NonNull CameraCaptureSession session, @NonNull CaptureRequest request, @NonNull CaptureResult partialResult) { Log.d(TAG, "Partial result"); }
@Override public void onCaptureProgressed( final CameraCaptureSession session, final CaptureRequest request, final CaptureResult partialResult) { }
private void process(CaptureResult result) { switch (mState) { case STATE_PREVIEW: { // We have nothing to do when the camera preview is working normally. break; } /* case STATE_WAITING_LOCK: { Integer afState = result.get(CaptureResult.CONTROL_AF_STATE); if (afState == null) { //captureStillPicture(); } else if (CaptureResult.CONTROL_AF_STATE_FOCUSED_LOCKED == afState || CaptureResult.CONTROL_AF_STATE_NOT_FOCUSED_LOCKED == afState) { // CONTROL_AE_STATE can be null on some devices Integer aeState = result.get(CaptureResult.CONTROL_AE_STATE); if (aeState == null || aeState == CaptureResult.CONTROL_AE_STATE_CONVERGED) { mState = STATE_PICTURE_TAKEN; //captureStillPicture(); } else { //runPrecaptureSequence(); } } break; } case STATE_WAITING_PRECAPTURE: { // CONTROL_AE_STATE can be null on some devices Integer aeState = result.get(CaptureResult.CONTROL_AE_STATE); if (aeState == null || aeState == CaptureResult.CONTROL_AE_STATE_PRECAPTURE || aeState == CaptureRequest.CONTROL_AE_STATE_FLASH_REQUIRED) { mState = STATE_WAITING_NON_PRECAPTURE; } break; } case STATE_WAITING_NON_PRECAPTURE: { // CONTROL_AE_STATE can be null on some devices Integer aeState = result.get(CaptureResult.CONTROL_AE_STATE); if (aeState == null || aeState != CaptureResult.CONTROL_AE_STATE_PRECAPTURE) { mState = STATE_PICTURE_TAKEN; //captureStillPicture(); } break; } */ } }