Java 类org.webrtc.CameraEnumerationAndroid.CaptureFormat 实例源码

项目:AppRTC-Android    文件:Camera2Session.java   
private void findCaptureFormat() {
  checkIsOnCameraThread();

  Range<Integer>[] fpsRanges =
      cameraCharacteristics.get(CameraCharacteristics.CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES);
  fpsUnitFactor = Camera2Enumerator.getFpsUnitFactor(fpsRanges);
  List<CaptureFormat.FramerateRange> framerateRanges =
      Camera2Enumerator.convertFramerates(fpsRanges, fpsUnitFactor);
  List<Size> sizes = Camera2Enumerator.getSupportedSizes(cameraCharacteristics);
  Logging.d(TAG, "Available preview sizes: " + sizes);
  Logging.d(TAG, "Available fps ranges: " + framerateRanges);

  if (framerateRanges.isEmpty() || sizes.isEmpty()) {
    reportError("No supported capture formats.");
    return;
  }

  final CaptureFormat.FramerateRange bestFpsRange =
      CameraEnumerationAndroid.getClosestSupportedFramerateRange(framerateRanges, framerate);

  final Size bestSize = CameraEnumerationAndroid.getClosestSupportedSize(sizes, width, height);
  CameraEnumerationAndroid.reportCameraResolution(camera2ResolutionHistogram, bestSize);

  captureFormat = new CaptureFormat(bestSize.width, bestSize.height, bestFpsRange);
  Logging.d(TAG, "Using capture format: " + captureFormat);
}
项目:AppRTC-Android    文件:Camera1Session.java   
private static void updateCameraParameters(android.hardware.Camera camera,
    android.hardware.Camera.Parameters parameters, CaptureFormat captureFormat, Size pictureSize,
    boolean captureToTexture) {
  final List<String> focusModes = parameters.getSupportedFocusModes();

  parameters.setPreviewFpsRange(captureFormat.framerate.min, captureFormat.framerate.max);
  parameters.setPreviewSize(captureFormat.width, captureFormat.height);
  parameters.setPictureSize(pictureSize.width, pictureSize.height);
  if (!captureToTexture) {
    parameters.setPreviewFormat(captureFormat.imageFormat);
  }

  if (parameters.isVideoStabilizationSupported()) {
    parameters.setVideoStabilization(true);
  }
  if (focusModes.contains(android.hardware.Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO)) {
    parameters.setFocusMode(android.hardware.Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
  }
  camera.setParameters(parameters);
}
项目:AppRTC-Android    文件:Camera1Session.java   
private static CaptureFormat findClosestCaptureFormat(
    android.hardware.Camera.Parameters parameters, int width, int height, int framerate) {
  // Find closest supported format for |width| x |height| @ |framerate|.
  final List<CaptureFormat.FramerateRange> supportedFramerates =
      Camera1Enumerator.convertFramerates(parameters.getSupportedPreviewFpsRange());
  Logging.d(TAG, "Available fps ranges: " + supportedFramerates);

  final CaptureFormat.FramerateRange fpsRange =
      CameraEnumerationAndroid.getClosestSupportedFramerateRange(supportedFramerates, framerate);

  final Size previewSize = CameraEnumerationAndroid.getClosestSupportedSize(
      Camera1Enumerator.convertSizes(parameters.getSupportedPreviewSizes()), width, height);
  CameraEnumerationAndroid.reportCameraResolution(camera1ResolutionHistogram, previewSize);

  return new CaptureFormat(previewSize.width, previewSize.height, fpsRange);
}
项目:AndroidRTC    文件:Camera2Session.java   
private void findCaptureFormat() {
  checkIsOnCameraThread();

  Range<Integer>[] fpsRanges =
      cameraCharacteristics.get(CameraCharacteristics.CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES);
  fpsUnitFactor = Camera2Enumerator.getFpsUnitFactor(fpsRanges);
  List<CaptureFormat.FramerateRange> framerateRanges =
      Camera2Enumerator.convertFramerates(fpsRanges, fpsUnitFactor);
  List<Size> sizes = Camera2Enumerator.getSupportedSizes(cameraCharacteristics);
  Logging.d(TAG, "Available preview sizes: " + sizes);
  Logging.d(TAG, "Available fps ranges: " + framerateRanges);

  if (framerateRanges.isEmpty() || sizes.isEmpty()) {
    reportError("No supported capture formats.");
    return;
  }

  final CaptureFormat.FramerateRange bestFpsRange =
      CameraEnumerationAndroid.getClosestSupportedFramerateRange(framerateRanges, framerate);

  final Size bestSize = CameraEnumerationAndroid.getClosestSupportedSize(sizes, width, height);
  CameraEnumerationAndroid.reportCameraResolution(camera2ResolutionHistogram, bestSize);

  captureFormat = new CaptureFormat(bestSize.width, bestSize.height, bestFpsRange);
  Logging.d(TAG, "Using capture format: " + captureFormat);
}
项目:AndroidRTC    文件:Camera1Session.java   
private static void updateCameraParameters(android.hardware.Camera camera,
    android.hardware.Camera.Parameters parameters, CaptureFormat captureFormat, Size pictureSize,
    boolean captureToTexture) {
  final List<String> focusModes = parameters.getSupportedFocusModes();

  parameters.setPreviewFpsRange(captureFormat.framerate.min, captureFormat.framerate.max);
  parameters.setPreviewSize(captureFormat.width, captureFormat.height);
  parameters.setPictureSize(pictureSize.width, pictureSize.height);
  if (!captureToTexture) {
    parameters.setPreviewFormat(captureFormat.imageFormat);
  }

  if (parameters.isVideoStabilizationSupported()) {
    parameters.setVideoStabilization(true);
  }
  if (focusModes.contains(android.hardware.Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO)) {
    parameters.setFocusMode(android.hardware.Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
  }
  camera.setParameters(parameters);
}
项目:AndroidRTC    文件:Camera1Session.java   
private static CaptureFormat findClosestCaptureFormat(
    android.hardware.Camera.Parameters parameters, int width, int height, int framerate) {
  // Find closest supported format for |width| x |height| @ |framerate|.
  final List<CaptureFormat.FramerateRange> supportedFramerates =
      Camera1Enumerator.convertFramerates(parameters.getSupportedPreviewFpsRange());
  Logging.d(TAG, "Available fps ranges: " + supportedFramerates);

  final CaptureFormat.FramerateRange fpsRange =
      CameraEnumerationAndroid.getClosestSupportedFramerateRange(supportedFramerates, framerate);

  final Size previewSize = CameraEnumerationAndroid.getClosestSupportedSize(
      Camera1Enumerator.convertSizes(parameters.getSupportedPreviewSizes()), width, height);
  CameraEnumerationAndroid.reportCameraResolution(camera1ResolutionHistogram, previewSize);

  return new CaptureFormat(previewSize.width, previewSize.height, fpsRange);
}
项目:AndroidRTC    文件:Camera1Session.java   
private Camera1Session(Events events, boolean captureToTexture, Context applicationContext,
    SurfaceTextureHelper surfaceTextureHelper, int cameraId, android.hardware.Camera camera,
    android.hardware.Camera.CameraInfo info, CaptureFormat captureFormat,
    long constructionTimeNs) {
  Logging.d(TAG, "Create new camera1 session on camera " + cameraId);

  this.cameraThreadHandler = new Handler();
  this.events = events;
  this.captureToTexture = captureToTexture;
  this.applicationContext = applicationContext;
  this.surfaceTextureHelper = surfaceTextureHelper;
  this.cameraId = cameraId;
  this.camera = camera;
  this.info = info;
  this.captureFormat = captureFormat;
  this.constructionTimeNs = constructionTimeNs;

  startCapturing();
}
项目:VideoCRE    文件:Camera2Session.java   
private void findCaptureFormat() {
  checkIsOnCameraThread();

  Range<Integer>[] fpsRanges =
      cameraCharacteristics.get(CameraCharacteristics.CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES);
  fpsUnitFactor = Camera2Enumerator.getFpsUnitFactor(fpsRanges);
  List<CaptureFormat.FramerateRange> framerateRanges =
      Camera2Enumerator.convertFramerates(fpsRanges, fpsUnitFactor);
  List<Size> sizes = Camera2Enumerator.getSupportedSizes(cameraCharacteristics);
  Logging.d(TAG, "Available preview sizes: " + sizes);
  Logging.d(TAG, "Available fps ranges: " + framerateRanges);

  if (framerateRanges.isEmpty() || sizes.isEmpty()) {
    reportError("No supported capture formats.");
    return;
  }

  final CaptureFormat.FramerateRange bestFpsRange =
      CameraEnumerationAndroid.getClosestSupportedFramerateRange(framerateRanges, framerate);

  final Size bestSize = CameraEnumerationAndroid.getClosestSupportedSize(sizes, width, height);
  //CameraEnumerationAndroid.reportCameraResolution(camera2ResolutionHistogram, bestSize);

  captureFormat = new CaptureFormat(bestSize.width, bestSize.height, bestFpsRange);
  Logging.d(TAG, "Using capture format: " + captureFormat);
}
项目:VideoCRE    文件:Camera1Session.java   
private static void updateCameraParameters(android.hardware.Camera camera,
    android.hardware.Camera.Parameters parameters, CaptureFormat captureFormat, Size pictureSize,
    boolean captureToTexture) {
  final List<String> focusModes = parameters.getSupportedFocusModes();

  parameters.setPreviewFpsRange(captureFormat.framerate.min, captureFormat.framerate.max);
  parameters.setPreviewSize(captureFormat.width, captureFormat.height);
  parameters.setPictureSize(pictureSize.width, pictureSize.height);
  if (!captureToTexture) {
    parameters.setPreviewFormat(captureFormat.imageFormat);
  }

  if (parameters.isVideoStabilizationSupported()) {
    parameters.setVideoStabilization(true);
  }
  if (focusModes.contains(android.hardware.Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO)) {
    parameters.setFocusMode(android.hardware.Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
  }
  camera.setParameters(parameters);
}
项目:VideoCRE    文件:Camera1Session.java   
private static CaptureFormat findClosestCaptureFormat(
    android.hardware.Camera.Parameters parameters, int width, int height, int framerate) {
  // Find closest supported format for |width| x |height| @ |framerate|.
  final List<CaptureFormat.FramerateRange> supportedFramerates =
      Camera1Enumerator.convertFramerates(parameters.getSupportedPreviewFpsRange());
  Logging.d(TAG, "Available fps ranges: " + supportedFramerates);

  final CaptureFormat.FramerateRange fpsRange =
      CameraEnumerationAndroid.getClosestSupportedFramerateRange(supportedFramerates, framerate);

  final Size previewSize = CameraEnumerationAndroid.getClosestSupportedSize(
      Camera1Enumerator.convertSizes(parameters.getSupportedPreviewSizes()), width, height);
  //CameraEnumerationAndroid.reportCameraResolution(camera1ResolutionHistogram, previewSize);

  return new CaptureFormat(previewSize.width, previewSize.height, fpsRange);
}
项目:VideoCRE    文件:Camera1Session.java   
private Camera1Session(Events events, boolean captureToTexture, Context applicationContext,
    SurfaceTextureHelper surfaceTextureHelper, MediaRecorder mediaRecorder, int cameraId,
    android.hardware.Camera camera, android.hardware.Camera.CameraInfo info,
    CaptureFormat captureFormat, long constructionTimeNs) {
  Logging.d(TAG, "Create new camera1 session on camera " + cameraId);

  this.cameraThreadHandler = new Handler();
  this.events = events;
  this.captureToTexture = captureToTexture;
  this.applicationContext = applicationContext;
  this.surfaceTextureHelper = surfaceTextureHelper;
  this.cameraId = cameraId;
  this.camera = camera;
  this.info = info;
  this.captureFormat = captureFormat;
  this.constructionTimeNs = constructionTimeNs;
  this.activityOrientation = getDeviceOrientation();

  startCapturing();

  if (mediaRecorder != null) {
    camera.unlock();
    mediaRecorder.setCamera(camera);
  }
}
项目:nc-android-webrtcpeer    文件:CaptureQualityController.java   
@Override
public int compare(CaptureFormat first, CaptureFormat second) {
    int firstFps = calculateFramerate(targetBandwidth, first);
    int secondFps = calculateFramerate(targetBandwidth, second);

    if (firstFps >= FRAMERATE_THRESHOLD && secondFps >= FRAMERATE_THRESHOLD
        || firstFps == secondFps) {
        // Compare resolution.
        return first.width * first.height - second.width * second.height;
    } else {
        // Compare fps.
        return firstFps - secondFps;
    }
}
项目:nc-android-webrtcpeer    文件:CaptureQualityController.java   
@Override
public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) {
    if (progress == 0) {
        width = 0;
        height = 0;
        framerate = 0;
        captureFormatText.setText("muted");
        return;
    }

    // Extract max bandwidth (in millipixels / second).
    long maxCaptureBandwidth = Long.MIN_VALUE;
    for (CaptureFormat format : formats) {
        maxCaptureBandwidth =
                Math.max(maxCaptureBandwidth, (long) format.width * format.height * format.framerate.max);
    }

    // Fraction between 0 and 1.
    double bandwidthFraction = (double) progress / 100.0;
    // Make a log-scale transformation, still between 0 and 1.
    final double kExpConstant = 3.0;
    bandwidthFraction =
            (Math.exp(kExpConstant * bandwidthFraction) - 1) / (Math.exp(kExpConstant) - 1);
    targetBandwidth = bandwidthFraction * maxCaptureBandwidth;

    // Choose the best format given a target bandwidth.
    final CaptureFormat bestFormat = Collections.max(formats, compareFormats);
    width = bestFormat.width;
    height = bestFormat.height;
    framerate = calculateFramerate(targetBandwidth, bestFormat);
    captureFormatText.setText(
            String.format(Locale.US, "%1$dx%2$d @ %3$d fps", width, height, framerate));
}
项目:AppRTC-Android    文件:CaptureQualityController.java   
@Override
public int compare(CaptureFormat first, CaptureFormat second) {
  int firstFps = calculateFramerate(targetBandwidth, first);
  int secondFps = calculateFramerate(targetBandwidth, second);

  if (firstFps >= FRAMERATE_THRESHOLD && secondFps >= FRAMERATE_THRESHOLD
      || firstFps == secondFps) {
    // Compare resolution.
    return first.width * first.height - second.width * second.height;
  } else {
    // Compare fps.
    return firstFps - secondFps;
  }
}
项目:AppRTC-Android    文件:CaptureQualityController.java   
@Override
public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) {
  if (progress == 0) {
    width = 0;
    height = 0;
    framerate = 0;
    captureFormatText.setText(R.string.muted);
    return;
  }

  // Extract max bandwidth (in millipixels / second).
  long maxCaptureBandwidth = java.lang.Long.MIN_VALUE;
  for (CaptureFormat format : formats) {
    maxCaptureBandwidth =
        Math.max(maxCaptureBandwidth, (long) format.width * format.height * format.framerate.max);
  }

  // Fraction between 0 and 1.
  double bandwidthFraction = (double) progress / 100.0;
  // Make a log-scale transformation, still between 0 and 1.
  final double kExpConstant = 3.0;
  bandwidthFraction =
      (Math.exp(kExpConstant * bandwidthFraction) - 1) / (Math.exp(kExpConstant) - 1);
  targetBandwidth = bandwidthFraction * maxCaptureBandwidth;

  // Choose the best format given a target bandwidth.
  final CaptureFormat bestFormat = Collections.max(formats, compareFormats);
  width = bestFormat.width;
  height = bestFormat.height;
  framerate = calculateFramerate(targetBandwidth, bestFormat);
  captureFormatText.setText(
      String.format(captureFormatText.getContext().getString(R.string.format_description), width,
          height, framerate));
}
项目:AppRTC-Android    文件:Camera1Enumerator.java   
static synchronized List<CaptureFormat> getSupportedFormats(int cameraId) {
  if (cachedSupportedFormats == null) {
    cachedSupportedFormats = new ArrayList<List<CaptureFormat>>();
    for (int i = 0; i < android.hardware.Camera.getNumberOfCameras(); ++i) {
      cachedSupportedFormats.add(enumerateFormats(i));
    }
  }
  return cachedSupportedFormats.get(cameraId);
}
项目:AppRTC-Android    文件:Camera1Enumerator.java   
static List<CaptureFormat.FramerateRange> convertFramerates(List<int[]> arrayRanges) {
  final List<CaptureFormat.FramerateRange> ranges = new ArrayList<CaptureFormat.FramerateRange>();
  for (int[] range : arrayRanges) {
    ranges.add(new CaptureFormat.FramerateRange(
        range[android.hardware.Camera.Parameters.PREVIEW_FPS_MIN_INDEX],
        range[android.hardware.Camera.Parameters.PREVIEW_FPS_MAX_INDEX]));
  }
  return ranges;
}
项目:AppRTC-Android    文件:Camera1Session.java   
private Camera1Session(Events events, boolean captureToTexture, Context applicationContext,
    SurfaceTextureHelper surfaceTextureHelper, MediaRecorder mediaRecorder, int cameraId,
    android.hardware.Camera camera, android.hardware.Camera.CameraInfo info,
    CaptureFormat captureFormat, long constructionTimeNs) {
  Logging.d(TAG, "Create new camera1 session on camera " + cameraId);
  videoFrameEmitTrialEnabled =
      PeerConnectionFactory.fieldTrialsFindFullName(PeerConnectionFactory.VIDEO_FRAME_EMIT_TRIAL)
          .equals(PeerConnectionFactory.TRIAL_ENABLED);

  this.cameraThreadHandler = new Handler();
  this.events = events;
  this.captureToTexture = captureToTexture;
  this.applicationContext = applicationContext;
  this.surfaceTextureHelper = surfaceTextureHelper;
  this.cameraId = cameraId;
  this.camera = camera;
  this.info = info;
  this.captureFormat = captureFormat;
  this.constructionTimeNs = constructionTimeNs;

  startCapturing();

  if (mediaRecorder != null) {
    camera.unlock();
    mediaRecorder.setCamera(camera);
  }
}
项目:AppRTC-Android    文件:Camera2Enumerator.java   
static List<CaptureFormat.FramerateRange> convertFramerates(
    Range<Integer>[] arrayRanges, int unitFactor) {
  final List<CaptureFormat.FramerateRange> ranges = new ArrayList<CaptureFormat.FramerateRange>();
  for (Range<Integer> range : arrayRanges) {
    ranges.add(new CaptureFormat.FramerateRange(
        range.getLower() * unitFactor, range.getUpper() * unitFactor));
  }
  return ranges;
}
项目:AndroidRTC    文件:CaptureQualityController.java   
@Override
public int compare(CaptureFormat first, CaptureFormat second) {
    int firstFps = calculateFramerate(targetBandwidth, first);
    int secondFps = calculateFramerate(targetBandwidth, second);

    if (firstFps >= FRAMERATE_THRESHOLD && secondFps >= FRAMERATE_THRESHOLD
            || firstFps == secondFps) {
        // Compare resolution.
        return first.width * first.height - second.width * second.height;
    } else {
        // Compare fps.
        return firstFps - secondFps;
    }
}
项目:AndroidRTC    文件:CaptureQualityController.java   
@Override
public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) {
    if (progress == 0) {
        width = 0;
        height = 0;
        framerate = 0;
        captureFormatText.setText(R.string.muted);
        return;
    }

    // Extract max bandwidth (in millipixels / second).
    long maxCaptureBandwidth = java.lang.Long.MIN_VALUE;
    for (CaptureFormat format : formats) {
        maxCaptureBandwidth =
                Math.max(maxCaptureBandwidth, (long) format.width * format.height * format.framerate.max);
    }

    // Fraction between 0 and 1.
    double bandwidthFraction = (double) progress / 100.0;
    // Make a log-scale transformation, still between 0 and 1.
    final double kExpConstant = 3.0;
    bandwidthFraction =
            (Math.exp(kExpConstant * bandwidthFraction) - 1) / (Math.exp(kExpConstant) - 1);
    targetBandwidth = bandwidthFraction * maxCaptureBandwidth;

    // Choose the best format given a target bandwidth.
    final CaptureFormat bestFormat = Collections.max(formats, compareFormats);
    width = bestFormat.width;
    height = bestFormat.height;
    framerate = calculateFramerate(targetBandwidth, bestFormat);
    captureFormatText.setText(
            String.format(captureFormatText.getContext().getString(R.string.format_description), width,
                    height, framerate));
}
项目:AndroidRTC    文件:Camera1Enumerator.java   
static synchronized List<CaptureFormat> getSupportedFormats(int cameraId) {
  if (cachedSupportedFormats == null) {
    cachedSupportedFormats = new ArrayList<List<CaptureFormat>>();
    for (int i = 0; i < android.hardware.Camera.getNumberOfCameras(); ++i) {
      cachedSupportedFormats.add(enumerateFormats(i));
    }
  }
  return cachedSupportedFormats.get(cameraId);
}
项目:AndroidRTC    文件:Camera1Enumerator.java   
static List<CaptureFormat.FramerateRange> convertFramerates(List<int[]> arrayRanges) {
  final List<CaptureFormat.FramerateRange> ranges = new ArrayList<CaptureFormat.FramerateRange>();
  for (int[] range : arrayRanges) {
    ranges.add(new CaptureFormat.FramerateRange(
        range[android.hardware.Camera.Parameters.PREVIEW_FPS_MIN_INDEX],
        range[android.hardware.Camera.Parameters.PREVIEW_FPS_MAX_INDEX]));
  }
  return ranges;
}
项目:AndroidRTC    文件:Camera2Enumerator.java   
static List<CaptureFormat.FramerateRange> convertFramerates(
    Range<Integer>[] arrayRanges, int unitFactor) {
  final List<CaptureFormat.FramerateRange> ranges = new ArrayList<CaptureFormat.FramerateRange>();
  for (Range<Integer> range : arrayRanges) {
    ranges.add(new CaptureFormat.FramerateRange(
        range.getLower() * unitFactor, range.getUpper() * unitFactor));
  }
  return ranges;
}
项目:AndroidRTC    文件:CameraVideoCapturerTestFixtures.java   
private void startCapture(CapturerInstance instance, int formatIndex) {
  final CameraEnumerationAndroid.CaptureFormat format =
      instance.supportedFormats.get(formatIndex);

  instance.capturer.startCapture(format.width, format.height, format.framerate.max);
  instance.format = format;
}
项目:VideoCRE    文件:Camera1Enumerator.java   
static synchronized List<CaptureFormat> getSupportedFormats(int cameraId) {
  if (cachedSupportedFormats == null) {
    cachedSupportedFormats = new ArrayList<List<CaptureFormat>>();
    for (int i = 0; i < android.hardware.Camera.getNumberOfCameras(); ++i) {
      cachedSupportedFormats.add(enumerateFormats(i));
    }
  }
  return cachedSupportedFormats.get(cameraId);
}
项目:VideoCRE    文件:Camera1Enumerator.java   
static List<CaptureFormat.FramerateRange> convertFramerates(List<int[]> arrayRanges) {
  final List<CaptureFormat.FramerateRange> ranges = new ArrayList<CaptureFormat.FramerateRange>();
  for (int[] range : arrayRanges) {
    ranges.add(new CaptureFormat.FramerateRange(
        range[android.hardware.Camera.Parameters.PREVIEW_FPS_MIN_INDEX],
        range[android.hardware.Camera.Parameters.PREVIEW_FPS_MAX_INDEX]));
  }
  return ranges;
}
项目:VideoCRE    文件:Camera2Enumerator.java   
static List<CaptureFormat.FramerateRange> convertFramerates(
    Range<Integer>[] arrayRanges, int unitFactor) {
  final List<CaptureFormat.FramerateRange> ranges = new ArrayList<CaptureFormat.FramerateRange>();
  for (Range<Integer> range : arrayRanges) {
    ranges.add(new CaptureFormat.FramerateRange(
        range.getLower() * unitFactor, range.getUpper() * unitFactor));
  }
  return ranges;
}
项目:webrtc-android    文件:CaptureQualityController.java   
@Override
public int compare(CaptureFormat first, CaptureFormat second) {
  int firstFps = calculateFramerate(targetBandwidth, first);
  int secondFps = calculateFramerate(targetBandwidth, second);

 if (firstFps >= FRAMERATE_THRESHOLD && secondFps >= FRAMERATE_THRESHOLD
     || firstFps == secondFps) {
    // Compare resolution.
    return first.width * first.height - second.width * second.height;
 } else {
    // Compare fps.
    return firstFps - secondFps;
 }
}
项目:webrtc-android    文件:CaptureQualityController.java   
@Override
public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) {
  if (progress == 0) {
    width = 0;
    height = 0;
    framerate = 0;
    captureFormatText.setText("Muted");
    return;
  }

  // Extract max bandwidth (in millipixels / second).
  long maxCaptureBandwidth = java.lang.Long.MIN_VALUE;
  for (CaptureFormat format : formats) {
    maxCaptureBandwidth = Math.max(maxCaptureBandwidth,
        (long) format.width * format.height * format.maxFramerate);
  }

  // Fraction between 0 and 1.
  double bandwidthFraction = (double) progress / 100.0;
  // Make a log-scale transformation, still between 0 and 1.
  final double kExpConstant = 3.0;
  bandwidthFraction =
      (Math.exp(kExpConstant * bandwidthFraction) - 1) / (Math.exp(kExpConstant) - 1);
  targetBandwidth = bandwidthFraction * maxCaptureBandwidth;

  // Choose the best format given a target bandwidth.
  final CaptureFormat bestFormat = Collections.max(formats, compareFormats);
  width = bestFormat.width;
  height = bestFormat.height;
  framerate = calculateFramerate(targetBandwidth, bestFormat);
  captureFormatText.setText(width + "x" + height + " @ " + framerate + "fps");
}
项目:nc-android-webrtcpeer    文件:CaptureQualityController.java   
private int calculateFramerate(double bandwidth, CaptureFormat format) {
    return (int) Math.round(
            Math.min(format.framerate.max, (int) Math.round(bandwidth / (format.width * format.height)))
            / 1000.0);
}
项目:AppRTC-Android    文件:CaptureQualityController.java   
private int calculateFramerate(double bandwidth, CaptureFormat format) {
  return (int) Math.round(
      Math.min(format.framerate.max, (int) Math.round(bandwidth / (format.width * format.height)))
      / 1000.0);
}
项目:AppRTC-Android    文件:Camera1Enumerator.java   
@Override
public List<CaptureFormat> getSupportedFormats(String deviceName) {
  return getSupportedFormats(getCameraIndex(deviceName));
}
项目:AppRTC-Android    文件:Camera2Enumerator.java   
@Override
public List<CaptureFormat> getSupportedFormats(String deviceName) {
  return getSupportedFormats(context, deviceName);
}
项目:AppRTC-Android    文件:Camera2Enumerator.java   
static List<CaptureFormat> getSupportedFormats(Context context, String cameraId) {
  return getSupportedFormats(
      (CameraManager) context.getSystemService(Context.CAMERA_SERVICE), cameraId);
}
项目:AndroidRTC    文件:CaptureQualityController.java   
private int calculateFramerate(double bandwidth, CaptureFormat format) {
    return (int) Math.round(
            Math.min(format.framerate.max, (int) Math.round(bandwidth / (format.width * format.height)))
                    / 1000.0);
}
项目:AndroidRTC    文件:Camera1Enumerator.java   
@Override
public List<CaptureFormat> getSupportedFormats(String deviceName) {
  return getSupportedFormats(getCameraIndex(deviceName));
}
项目:AndroidRTC    文件:Camera2Enumerator.java   
@Override
public List<CaptureFormat> getSupportedFormats(String deviceName) {
  return getSupportedFormats(context, deviceName);
}
项目:AndroidRTC    文件:Camera2Enumerator.java   
static List<CaptureFormat> getSupportedFormats(Context context, String cameraId) {
  return getSupportedFormats(
      (CameraManager) context.getSystemService(Context.CAMERA_SERVICE), cameraId);
}
项目:VideoCRE    文件:Camera1Enumerator.java   
@Override
public List<CaptureFormat> getSupportedFormats(String deviceName) {
  return getSupportedFormats(getCameraIndex(deviceName));
}