Java 类android.util.Range 实例源码

项目:android-Camera2Basic-master    文件:Camera2BasicFragment.java   
@Override
public void onStopTrackingTouch(SeekBar seekBar) {
    switch (seekBar.getId()) {
        case R.id.iso: {
            Range<Integer> range = mCameraCharacteristics.get(CameraCharacteristics.SENSOR_INFO_SENSITIVITY_RANGE);
            int max = range.getUpper();
            int min = range.getLower();
            isoValue = ((seekBar.getProgress() * (max - min)) / 100 + min);
            createCameraPreviewSession_ISO();
            break;
        }
        case R.id.focus: {
            float distance = mCameraCharacteristics.get(CameraCharacteristics.LENS_INFO_MINIMUM_FOCUS_DISTANCE);
            focusValue = seekBar.getProgress() * distance / 100;
            createCameraPreviewSession_FOCUS();
            break;
        }
        default:
            break;
    }

}
项目:AppRTC-Android    文件:Camera2Session.java   
private void findCaptureFormat() {
  checkIsOnCameraThread();

  Range<Integer>[] fpsRanges =
      cameraCharacteristics.get(CameraCharacteristics.CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES);
  fpsUnitFactor = Camera2Enumerator.getFpsUnitFactor(fpsRanges);
  List<CaptureFormat.FramerateRange> framerateRanges =
      Camera2Enumerator.convertFramerates(fpsRanges, fpsUnitFactor);
  List<Size> sizes = Camera2Enumerator.getSupportedSizes(cameraCharacteristics);
  Logging.d(TAG, "Available preview sizes: " + sizes);
  Logging.d(TAG, "Available fps ranges: " + framerateRanges);

  if (framerateRanges.isEmpty() || sizes.isEmpty()) {
    reportError("No supported capture formats.");
    return;
  }

  final CaptureFormat.FramerateRange bestFpsRange =
      CameraEnumerationAndroid.getClosestSupportedFramerateRange(framerateRanges, framerate);

  final Size bestSize = CameraEnumerationAndroid.getClosestSupportedSize(sizes, width, height);
  CameraEnumerationAndroid.reportCameraResolution(camera2ResolutionHistogram, bestSize);

  captureFormat = new CaptureFormat(bestSize.width, bestSize.height, bestFpsRange);
  Logging.d(TAG, "Using capture format: " + captureFormat);
}
项目:AndroidRTC    文件:Camera2Session.java   
private void findCaptureFormat() {
  checkIsOnCameraThread();

  Range<Integer>[] fpsRanges =
      cameraCharacteristics.get(CameraCharacteristics.CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES);
  fpsUnitFactor = Camera2Enumerator.getFpsUnitFactor(fpsRanges);
  List<CaptureFormat.FramerateRange> framerateRanges =
      Camera2Enumerator.convertFramerates(fpsRanges, fpsUnitFactor);
  List<Size> sizes = Camera2Enumerator.getSupportedSizes(cameraCharacteristics);
  Logging.d(TAG, "Available preview sizes: " + sizes);
  Logging.d(TAG, "Available fps ranges: " + framerateRanges);

  if (framerateRanges.isEmpty() || sizes.isEmpty()) {
    reportError("No supported capture formats.");
    return;
  }

  final CaptureFormat.FramerateRange bestFpsRange =
      CameraEnumerationAndroid.getClosestSupportedFramerateRange(framerateRanges, framerate);

  final Size bestSize = CameraEnumerationAndroid.getClosestSupportedSize(sizes, width, height);
  CameraEnumerationAndroid.reportCameraResolution(camera2ResolutionHistogram, bestSize);

  captureFormat = new CaptureFormat(bestSize.width, bestSize.height, bestFpsRange);
  Logging.d(TAG, "Using capture format: " + captureFormat);
}
项目:VideoCRE    文件:Camera2Session.java   
private void findCaptureFormat() {
  checkIsOnCameraThread();

  Range<Integer>[] fpsRanges =
      cameraCharacteristics.get(CameraCharacteristics.CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES);
  fpsUnitFactor = Camera2Enumerator.getFpsUnitFactor(fpsRanges);
  List<CaptureFormat.FramerateRange> framerateRanges =
      Camera2Enumerator.convertFramerates(fpsRanges, fpsUnitFactor);
  List<Size> sizes = Camera2Enumerator.getSupportedSizes(cameraCharacteristics);
  Logging.d(TAG, "Available preview sizes: " + sizes);
  Logging.d(TAG, "Available fps ranges: " + framerateRanges);

  if (framerateRanges.isEmpty() || sizes.isEmpty()) {
    reportError("No supported capture formats.");
    return;
  }

  final CaptureFormat.FramerateRange bestFpsRange =
      CameraEnumerationAndroid.getClosestSupportedFramerateRange(framerateRanges, framerate);

  final Size bestSize = CameraEnumerationAndroid.getClosestSupportedSize(sizes, width, height);
  //CameraEnumerationAndroid.reportCameraResolution(camera2ResolutionHistogram, bestSize);

  captureFormat = new CaptureFormat(bestSize.width, bestSize.height, bestFpsRange);
  Logging.d(TAG, "Using capture format: " + captureFormat);
}
项目:okuki    文件:DataManager.java   
public void loadMore() {
    if (!loading.get()) {
        setLoading(true);
        loadData(pageSize, results.size())
                .subscribeOn(Schedulers.io())
                .observeOn(AndroidSchedulers.mainThread())
                .doOnError(error -> setLoading(false))
                .subscribe(
                        list -> {
                            if (!list.isEmpty()) {
                                int start = results.size();
                                int end = start + list.size();
                                results.addAll(list);
                                if (start > 0) {
                                    rangeInserted.call(new Range<>(start, end));
                                } else {
                                    listUpdated.call(null);
                                }
                            }
                            setLoading(false);
                        },
                        Errors.log());
    }
}
项目:android_packages_apps_tv    文件:DvrScheduleManager.java   
/**
 * Suggests the higher priority than the schedules which overlap with {@code schedule}.
 */
public long suggestHighestPriority(String inputId, Range<Long> peroid, long basePriority) {
    List<ScheduledRecording> schedules = mInputScheduleMap.get(inputId);
    if (schedules == null) {
        return DEFAULT_PRIORITY;
    }
    long highestPriority = Long.MIN_VALUE;
    for (ScheduledRecording r : schedules) {
        if (r.isOverLapping(peroid) && r.getPriority() > highestPriority) {
            highestPriority = r.getPriority();
        }
    }
    if (highestPriority == Long.MIN_VALUE || highestPriority < basePriority) {
        return basePriority;
    }
    return highestPriority + PRIORITY_OFFSET;
}
项目:android_packages_apps_tv    文件:TimeShiftManager.java   
void onRecordTimeRangeChanged(long startTimeMs, long endTimeMs) {
    if (mChannel == null || mChannel.isPassthrough()) {
        return;
    }
    if (endTimeMs == CURRENT_TIME) {
        endTimeMs = System.currentTimeMillis();
    }

    long fetchStartTimeMs = Utils.floorTime(startTimeMs, MAX_DUMMY_PROGRAM_DURATION);
    boolean needToLoad = addDummyPrograms(fetchStartTimeMs,
            endTimeMs + PREFETCH_DURATION_FOR_NEXT);
    if (needToLoad) {
        Range<Long> period = Range.create(fetchStartTimeMs, endTimeMs);
        mProgramLoadQueue.add(period);
        startTaskIfNeeded();
    }
}
项目:android_packages_apps_tv    文件:TimeShiftManager.java   
private void startTaskIfNeeded() {
    if (mProgramLoadQueue.isEmpty()) {
        return;
    }
    if (mProgramLoadTask == null || mProgramLoadTask.isCancelled()) {
        startNext();
    } else {
        // Remove pending task fully satisfied by the current
        Range<Long> current = mProgramLoadTask.getPeriod();
        Iterator<Range<Long>> i = mProgramLoadQueue.iterator();
        while (i.hasNext()) {
            Range<Long> r = i.next();
            if (current.contains(r)) {
                i.remove();
            }
        }
    }
}
项目:android_packages_apps_tv    文件:TimeShiftManager.java   
private void startNext() {
    mProgramLoadTask = null;
    if (mProgramLoadQueue.isEmpty()) {
        return;
    }

    Range<Long> next = mProgramLoadQueue.poll();
    // Extend next to include any overlapping Ranges.
    Iterator<Range<Long>> i = mProgramLoadQueue.iterator();
    while(i.hasNext()) {
        Range<Long> r = i.next();
        if(next.contains(r.getLower()) || next.contains(r.getUpper())){
            i.remove();
            next = next.extend(r);
        }
    }
    if (mChannel != null) {
        mProgramLoadTask = new LoadProgramsForCurrentChannelTask(
                mContext.getContentResolver(), next);
        mProgramLoadTask.executeOnDbThread();
    }
}
项目:android_packages_apps_tv    文件:TimeShiftManager.java   
private void prefetchPrograms() {
    long startTimeMs;
    Program lastValidProgram = getLastValidProgram();
    if (lastValidProgram == null) {
        startTimeMs = System.currentTimeMillis();
    } else {
        startTimeMs = lastValidProgram.getEndTimeUtcMillis();
    }
    long endTimeMs = System.currentTimeMillis() + PREFETCH_DURATION_FOR_NEXT;
    if (startTimeMs <= endTimeMs) {
        if (DEBUG) {
            Log.d(TAG, "Prefetch task starts: {startTime=" + Utils.toTimeString(startTimeMs)
                    + ", endTime=" + Utils.toTimeString(endTimeMs) + "}");
        }
        mProgramLoadQueue.add(Range.create(startTimeMs, endTimeMs));
    }
    startTaskIfNeeded();
}
项目:android_packages_apps_tv    文件:DvrScheduleManagerTest.java   
public void testGetConflictingSchedules_period1() {
    long priority = 0;
    long channelId = 0;
    List<ScheduledRecording> schedules = new ArrayList<>();

    ScheduledRecording r1 = RecordingTestUtils.createTestRecordingWithPriorityAndPeriod(
            ++channelId, ++priority, 0L, 200L);
    schedules.add(r1);
    ScheduledRecording r2 = RecordingTestUtils.createTestRecordingWithPriorityAndPeriod(
            ++channelId, ++priority, 0L, 100L);
    schedules.add(r2);
    MoreAsserts.assertContentsInOrder(DvrScheduleManager.getConflictingSchedules(schedules, 1,
            Collections.singletonList(new Range<>(10L, 20L))), r1);
    MoreAsserts.assertContentsInOrder(DvrScheduleManager.getConflictingSchedules(schedules, 1,
            Collections.singletonList(new Range<>(110L, 120L))), r1);
}
项目:android_packages_apps_tv    文件:DvrScheduleManagerTest.java   
public void testGetConflictingSchedules_period2() {
    long priority = 0;
    long channelId = 0;
    List<ScheduledRecording> schedules = new ArrayList<>();

    ScheduledRecording r1 = RecordingTestUtils.createTestRecordingWithPriorityAndPeriod(
            ++channelId, ++priority, 0L, 200L);
    schedules.add(r1);
    ScheduledRecording r2 = RecordingTestUtils.createTestRecordingWithPriorityAndPeriod(
            ++channelId, ++priority, 100L, 200L);
    schedules.add(r2);
    MoreAsserts.assertContentsInOrder(DvrScheduleManager.getConflictingSchedules(schedules, 1,
            Collections.singletonList(new Range<>(10L, 20L))), r1);
    MoreAsserts.assertContentsInOrder(DvrScheduleManager.getConflictingSchedules(schedules, 1,
            Collections.singletonList(new Range<>(110L, 120L))), r1);
}
项目:AndroidSurvey    文件:RangeScorer.java   
@Override
public double score(ScoreUnit unit, Survey survey) {
    if (BuildConfig.DEBUG) Log.i(TAG, "RangeScorer");
    double scoreVal = 0;
    for (Question question : unit.questions()) {
        Response response = survey.getResponseByQuestion(question);
        if (!TextUtils.isEmpty(response.getText())) {
            Double responseText = Double.parseDouble(response.getText());
            if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.LOLLIPOP) {
                HashMap<Range<Double>, Double> map = rangeScoreOptionsMap(unit);
                for (Map.Entry<Range<Double>, Double> entry : map.entrySet()) {
                    if (entry.getKey().contains(responseText) && entry.getValue() > scoreVal) {
                        scoreVal = entry.getValue();
                    }
                }
            }
        }
    }
    return scoreVal;
}
项目:AppRTC-Android    文件:Camera2Enumerator.java   
static List<CaptureFormat.FramerateRange> convertFramerates(
    Range<Integer>[] arrayRanges, int unitFactor) {
  final List<CaptureFormat.FramerateRange> ranges = new ArrayList<CaptureFormat.FramerateRange>();
  for (Range<Integer> range : arrayRanges) {
    ranges.add(new CaptureFormat.FramerateRange(
        range.getLower() * unitFactor, range.getUpper() * unitFactor));
  }
  return ranges;
}
项目:AndroidRTC    文件:Camera2Enumerator.java   
static List<CaptureFormat.FramerateRange> convertFramerates(
    Range<Integer>[] arrayRanges, int unitFactor) {
  final List<CaptureFormat.FramerateRange> ranges = new ArrayList<CaptureFormat.FramerateRange>();
  for (Range<Integer> range : arrayRanges) {
    ranges.add(new CaptureFormat.FramerateRange(
        range.getLower() * unitFactor, range.getUpper() * unitFactor));
  }
  return ranges;
}
项目:VideoCRE    文件:Camera2Enumerator.java   
static List<CaptureFormat.FramerateRange> convertFramerates(
    Range<Integer>[] arrayRanges, int unitFactor) {
  final List<CaptureFormat.FramerateRange> ranges = new ArrayList<CaptureFormat.FramerateRange>();
  for (Range<Integer> range : arrayRanges) {
    ranges.add(new CaptureFormat.FramerateRange(
        range.getLower() * unitFactor, range.getUpper() * unitFactor));
  }
  return ranges;
}
项目:android_packages_apps_tv    文件:DvrManager.java   
/**
 * Schedules a recording for {@code program} with the highest priority so that the schedule
 * can be recorded.
 */
public ScheduledRecording addScheduleWithHighestPriority(Program program) {
    if (!SoftPreconditions.checkState(mDataManager.isDvrScheduleLoadFinished())) {
        return null;
    }
    SeriesRecording seriesRecording = getSeriesRecording(program);
    return addSchedule(program, seriesRecording == null
            ? mScheduleManager.suggestNewPriority()
            : mScheduleManager.suggestHighestPriority(seriesRecording.getInputId(),
                    new Range(program.getStartTimeUtcMillis(), program.getEndTimeUtcMillis()),
                    seriesRecording.getPriority()));
}
项目:android_packages_apps_tv    文件:DvrScheduleManager.java   
@VisibleForTesting
static List<ScheduledRecording> getConflictingSchedules(
        List<ScheduledRecording> schedules, int tunerCount, List<Range<Long>> periods) {
    List<ScheduledRecording> result = new ArrayList<>(
            getConflictingSchedulesInfo(schedules, tunerCount, periods).keySet());
    Collections.sort(result, RESULT_COMPARATOR);
    return result;
}
项目:android_packages_apps_tv    文件:DvrDataManagerImpl.java   
@Override
public List<ScheduledRecording> getScheduledRecordings(Range<Long> period,
        @RecordingState int state) {
    List<ScheduledRecording> result = new ArrayList<>();
    for (ScheduledRecording r : mScheduledRecordings.values()) {
        if (r.isOverLapping(period) && r.getState() == state) {
            result.add(r);
        }
    }
    return result;
}
项目:android_packages_apps_tv    文件:Scheduler.java   
private void updatePendingRecordings() {
    List<ScheduledRecording> scheduledRecordings = mDataManager
            .getScheduledRecordings(new Range<>(mLastStartTimePendingMs,
                    mClock.currentTimeMillis() + SOON_DURATION_IN_MS),
                    ScheduledRecording.STATE_RECORDING_NOT_STARTED);
    for (ScheduledRecording r : scheduledRecordings) {
        scheduleRecordingSoon(r);
    }
}
项目:android_packages_apps_tv    文件:TimeShiftManager.java   
boolean overlaps(Queue<Range<Long>> programLoadQueue) {
    for (Range<Long> r : programLoadQueue) {
        if (mPeriod.contains(r.getLower()) || mPeriod.contains(r.getUpper())) {
            return true;
        }
    }
    return false;
}
项目:android_packages_apps_tv    文件:AsyncDbTask.java   
public LoadProgramsForChannelTask(ContentResolver contentResolver, long channelId,
        @Nullable Range<Long> period) {
    super(contentResolver, period == null
            ? TvContract.buildProgramsUriForChannel(channelId)
            : TvContract.buildProgramsUriForChannel(channelId, period.getLower(),
                    period.getUpper()),
            null, null, null, null);
    mPeriod = period;
    mChannelId = channelId;
}
项目:android_packages_apps_tv    文件:DvrDataManagerInMemoryImpl.java   
@Override
public List<ScheduledRecording> getScheduledRecordings(Range<Long> period,
        @RecordingState int state) {
    List<ScheduledRecording> temp = getScheduledRecordingsPrograms();
    List<ScheduledRecording> result = new ArrayList<>();
    for (ScheduledRecording r : temp) {
        if (r.isOverLapping(period) && r.getState() == state) {
            result.add(r);
        }
    }
    return result;
}
项目:android_packages_apps_tv    文件:DvrScheduleManagerTest.java   
public void testGetConflictingSchedules_period3() {
    long priority = 0;
    long channelId = 0;
    List<ScheduledRecording> schedules = new ArrayList<>();

    ScheduledRecording r1 = RecordingTestUtils.createTestRecordingWithPriorityAndPeriod(
            ++channelId, ++priority, 0L, 100L);
    schedules.add(r1);
    ScheduledRecording r2 = RecordingTestUtils.createTestRecordingWithPriorityAndPeriod(
            ++channelId, ++priority, 100L, 200L);
    schedules.add(r2);
    ScheduledRecording r3 = RecordingTestUtils.createTestRecordingWithPriorityAndPeriod(
            ++channelId, ++priority, 0L, 100L);
    schedules.add(r3);
    ScheduledRecording r4 = RecordingTestUtils.createTestRecordingWithPriorityAndPeriod(
            ++channelId, ++priority, 100L, 200L);
    schedules.add(r4);
    MoreAsserts.assertContentsInOrder(DvrScheduleManager.getConflictingSchedules(schedules, 1,
            Collections.singletonList(new Range<>(10L, 20L))), r1);
    MoreAsserts.assertContentsInOrder(DvrScheduleManager.getConflictingSchedules(schedules, 1,
            Collections.singletonList(new Range<>(110L, 120L))), r2);
    MoreAsserts.assertContentsInOrder(DvrScheduleManager.getConflictingSchedules(schedules, 1,
            Collections.singletonList(new Range<>(50L, 150L))), r2, r1);
    List<Range<Long>> ranges = new ArrayList<>();
    ranges.add(new Range<>(10L, 20L));
    ranges.add(new Range<>(110L, 120L));
    MoreAsserts.assertContentsInOrder(DvrScheduleManager.getConflictingSchedules(schedules, 1,
            ranges), r2, r1);
}
项目:AndroidSurvey    文件:RangeScorer.java   
private HashMap<Range<Double>, Double> rangeScoreOptionsMap(ScoreUnit unit) {
    HashMap<Range<Double>, Double> map = new HashMap<>();
    for (OptionScore os : unit.optionScores()) {
        if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.LOLLIPOP) {
            if (os.getLabel().contains("..")) {
                String[] ranges = os.getLabel().split("\\.\\.");
                Range<Double> range = new Range<>(Double.parseDouble(ranges[0]),
                        Double.parseDouble(ranges[1]));
                map.put(range, os.getValue());
            }
        }
    }
    return map;
}
项目:19porn    文件:EncodeUtils.java   
public static EncodeVideoCapability getEncodVieoeCapability(MediaCodec mediaCodec, String mime)
    {
        if( mediaCodec == null || Build.VERSION.SDK_INT < 18) {
            return null;
        }

        EncodeVideoCapability retCapability = new EncodeVideoCapability();
        //String mime = mEncodeFormat.getValue();
        MediaCodecInfo codecInfo = mediaCodec.getCodecInfo();
        MediaCodecInfo.CodecCapabilities capabilities = codecInfo.getCapabilitiesForType(mime);
        int[] deviceColor = capabilities.colorFormats;
        retCapability.colorFormat = deviceColor;
        MediaCodecInfo.CodecProfileLevel[] profileLevels = capabilities.profileLevels;

        if(null != profileLevels)
        {
            retCapability.profileLevel = new EncodeVideoCapability.ProfileLevel[profileLevels.length];
            for(int i = 0; i < profileLevels.length; ++i)
            {
                retCapability.profileLevel[i] = new EncodeVideoCapability.ProfileLevel(profileLevels[i].profile, profileLevels[i].level);
            }
        }


        Range<Integer> widthRange = null;
        Range<Integer> heightRange = null;
        if(Build.VERSION.SDK_INT >= 21) {
                MediaCodecInfo.VideoCapabilities videoCapabilities = capabilities.getVideoCapabilities();
                heightRange = videoCapabilities.getSupportedHeights();
                widthRange = videoCapabilities.getSupportedWidths();

                retCapability.heightAlignment = videoCapabilities.getHeightAlignment();
                retCapability.widthAlignment  = videoCapabilities.getWidthAlignment();
        }
        else //for old device limite max width / height
        {
//            retCapability.widthUpper = 1280;
//            retCapability.widthLower = 176;
//            retCapability.heightUpper = 720;
//            retCapability.heightLower = 144;

            retCapability.heightAlignment = 2;
            retCapability.widthAlignment = 2;
        }

        if(null != widthRange)
        {
            retCapability.widthUpper = widthRange.getUpper();
            retCapability.widthLower = widthRange.getLower();
        }

        if(null != heightRange)
        {
            retCapability.heightUpper = heightRange.getUpper();
            retCapability.heightLower = heightRange.getLower();
        }

        return retCapability;
    }
项目:19porn    文件:VideoEncoder.java   
public VideoEncodeParam(MediaCodecInfo.CodecProfileLevel codecProfileLevel, Range<Integer> widthRange, Range<Integer> heightRanger) {
    this.codecProfileLevel = codecProfileLevel;
    this.widthRange = widthRange;
    this.heightRanger = heightRanger;
}
项目:AppRTC-Android    文件:Camera2Enumerator.java   
static int getFpsUnitFactor(Range<Integer>[] fpsRanges) {
  if (fpsRanges.length == 0) {
    return 1000;
  }
  return fpsRanges[0].getUpper() < 1000 ? 1000 : 1;
}
项目:AndroidRTC    文件:Camera2Session.java   
@Override
public void onConfigured(CameraCaptureSession session) {
  checkIsOnCameraThread();
  Logging.d(TAG, "Camera capture session configured.");
  captureSession = session;
  try {
    /*
     * The viable options for video capture requests are:
     * TEMPLATE_PREVIEW: High frame rate is given priority over the highest-quality
     *   post-processing.
     * TEMPLATE_RECORD: Stable frame rate is used, and post-processing is set for recording
     *   quality.
     */
    final CaptureRequest.Builder captureRequestBuilder =
        cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD);
    // Set auto exposure fps range.
    captureRequestBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE,
        new Range<Integer>(captureFormat.framerate.min / fpsUnitFactor,
                                  captureFormat.framerate.max / fpsUnitFactor));
    captureRequestBuilder.set(
        CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON);
    captureRequestBuilder.set(CaptureRequest.CONTROL_AE_LOCK, false);
    chooseStabilizationMode(captureRequestBuilder);
    chooseFocusMode(captureRequestBuilder);

    captureRequestBuilder.addTarget(surface);
    session.setRepeatingRequest(
        captureRequestBuilder.build(), new CameraCaptureCallback(), cameraThreadHandler);
  } catch (CameraAccessException e) {
    reportError("Failed to start capture request. " + e);
    return;
  }

  surfaceTextureHelper.startListening(
      new SurfaceTextureHelper.OnTextureFrameAvailableListener() {
        @Override
        public void onTextureFrameAvailable(
            int oesTextureId, float[] transformMatrix, long timestampNs) {
          checkIsOnCameraThread();

          if (state != SessionState.RUNNING) {
            Logging.d(TAG, "Texture frame captured but camera is no longer running.");
            surfaceTextureHelper.returnTextureFrame();
            return;
          }

          if (!firstFrameReported) {
            firstFrameReported = true;
            final int startTimeMs =
                (int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - constructionTimeNs);
            camera2StartTimeMsHistogram.addSample(startTimeMs);
          }

          int rotation = getFrameOrientation();
          if (isCameraFrontFacing) {
            // Undo the mirror that the OS "helps" us with.
            // http://developer.android.com/reference/android/hardware/Camera.html#setDisplayOrientation(int)
            transformMatrix = RendererCommon.multiplyMatrices(
                transformMatrix, RendererCommon.horizontalFlipMatrix());
          }

          // Undo camera orientation - we report it as rotation instead.
          transformMatrix =
              RendererCommon.rotateTextureMatrix(transformMatrix, -cameraOrientation);

          events.onTextureFrameCaptured(Camera2Session.this, captureFormat.width,
              captureFormat.height, oesTextureId, transformMatrix, rotation, timestampNs);
        }
      });
  Logging.d(TAG, "Camera device successfully started.");
  callback.onDone(Camera2Session.this);
}
项目:AndroidRTC    文件:Camera2Enumerator.java   
static int getFpsUnitFactor(Range<Integer>[] fpsRanges) {
  if (fpsRanges.length == 0) {
    return 1000;
  }
  return fpsRanges[0].getUpper() < 1000 ? 1000 : 1;
}
项目:VideoCRE    文件:Camera2Session.java   
@Override
public void onConfigured(CameraCaptureSession session) {
  checkIsOnCameraThread();
  Logging.d(TAG, "Camera capture session configured.");
  captureSession = session;
  try {
    /*
     * The viable options for video capture requests are:
     * TEMPLATE_PREVIEW: High frame rate is given priority over the highest-quality
     *   post-processing.
     * TEMPLATE_RECORD: Stable frame rate is used, and post-processing is set for recording
     *   quality.
     */
    final CaptureRequest.Builder captureRequestBuilder =
        cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD);
    // Set auto exposure fps range.
    captureRequestBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE,
        new Range<Integer>(captureFormat.framerate.min / fpsUnitFactor,
            captureFormat.framerate.max / fpsUnitFactor));
    captureRequestBuilder.set(
        CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON);
    captureRequestBuilder.set(CaptureRequest.CONTROL_AE_LOCK, false);
    chooseStabilizationMode(captureRequestBuilder);
    chooseFocusMode(captureRequestBuilder);

    captureRequestBuilder.addTarget(surface);
    if (mediaRecorderSurface != null) {
      Logging.d(TAG, "Add MediaRecorder surface to CaptureRequest.Builder");
      captureRequestBuilder.addTarget(mediaRecorderSurface);
    }
    session.setRepeatingRequest(
        captureRequestBuilder.build(), new CameraCaptureCallback(), cameraThreadHandler);
  } catch (CameraAccessException e) {
    reportError("Failed to start capture request. " + e);
    return;
  }

  surfaceTextureHelper.startListening(
      new SurfaceTextureHelper.OnTextureFrameAvailableListener() {
        @Override
        public void onTextureFrameAvailable(
            int oesTextureId, float[] transformMatrix, long timestampNs) {
          checkIsOnCameraThread();

          if (state != SessionState.RUNNING) {
            Logging.d(TAG, "Texture frame captured but camera is no longer running.");
            surfaceTextureHelper.returnTextureFrame();
            return;
          }

          if (!firstFrameReported) {
            firstFrameReported = true;
            final int startTimeMs =
                (int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - constructionTimeNs);
            //camera2StartTimeMsHistogram.addSample(startTimeMs);
          }

          int rotation = getFrameOrientation();

          // make sure it appears in right orientation and flip using GlRectDrawer

          //if (isCameraFrontFacing) {
          //  // Undo the mirror that the OS "helps" us with.
          //  // http://developer.android.com/reference/android/hardware/Camera.html#setDisplayOrientation(int)
          //  transformMatrix = RendererCommon.multiplyMatrices(
          //      transformMatrix, RendererCommon.horizontalFlipMatrix());
          //}
          //
          //// Undo camera orientation - we report it as rotation instead.
          //transformMatrix =
          //    RendererCommon.rotateTextureMatrix(transformMatrix, -cameraOrientation);

          events.onTextureFrameCaptured(Camera2Session.this, captureFormat.width,
              captureFormat.height, oesTextureId, transformMatrix, rotation, timestampNs);
        }
      });
  Logging.d(TAG, "Camera device successfully started.");
  callback.onDone(Camera2Session.this);
}
项目:VideoCRE    文件:Camera2Enumerator.java   
static int getFpsUnitFactor(Range<Integer>[] fpsRanges) {
  if (fpsRanges.length == 0) {
    return 1000;
  }
  return fpsRanges[0].getUpper() < 1000 ? 1000 : 1;
}
项目:okuki    文件:DataManager.java   
public Observable<Range<Integer>> onRangeInserted() {
    return rangeInserted;
}
项目:Android-Slow-Motion-Camera2    文件:CaptureHighSpeedVideoMode.java   
private void setUpCaptureRequestBuilder(CaptureRequest.Builder builder) {
//        Range<Integer> fpsRange = Range.create(240, 240);
        Range<Integer> fpsRange = getHighestFpsRange(availableFpsRange);
        builder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, fpsRange);

    }
项目:android_packages_apps_tv    文件:ScheduledRecording.java   
/**
 * Checks if the {@code period} overlaps with the recording time.
 */
public boolean isOverLapping(Range<Long> period) {
    return mStartTimeMs < period.getUpper() && mEndTimeMs > period.getLower();
}
项目:android_packages_apps_tv    文件:TimeShiftManager.java   
private boolean addDummyPrograms(Range<Long> period) {
    return addDummyPrograms(period.getLower(), period.getUpper());
}
项目:android_packages_apps_tv    文件:TimeShiftManager.java   
LoadProgramsForCurrentChannelTask(ContentResolver contentResolver,
        Range<Long> period) {
    super(contentResolver, mChannel.getId(), period);
}
项目:android_packages_apps_tv    文件:TimeShiftManager.java   
@Override
protected void onPostExecute(List<Program> programs) {
    if (DEBUG) {
        Log.d(TAG, "Programs are loaded {channelId=" + mChannelId +
                ", from=" + Utils.toTimeString(mPeriod.getLower()) +
                ", to=" + Utils.toTimeString(mPeriod.getUpper()) +
                "}");
    }
    //remove pending tasks that are fully satisfied by this query.
    Iterator<Range<Long>> it = mProgramLoadQueue.iterator();
    while (it.hasNext()) {
        Range<Long> r = it.next();
        if (mPeriod.contains(r)) {
            it.remove();
        }
    }
    if (programs == null || programs.isEmpty()) {
        mEmptyFetchCount++;
        if (addDummyPrograms(mPeriod)) {
            TimeShiftManager.this.onProgramInfoChanged();
        }
        schedulePrefetchPrograms();
        startNextLoadingIfNeeded();
        return;
    }
    mEmptyFetchCount = 0;
    if(!mPrograms.isEmpty()) {
        removeDummyPrograms();
        removeOverlappedPrograms(programs);
        Program loadedProgram = programs.get(0);
        for (int i = 0; i < mPrograms.size() && !programs.isEmpty(); ++i) {
            Program program = mPrograms.get(i);
            while (program.getStartTimeUtcMillis() > loadedProgram
                    .getStartTimeUtcMillis()) {
                mPrograms.add(i++, loadedProgram);
                programs.remove(0);
                if (programs.isEmpty()) {
                    break;
                }
                loadedProgram = programs.get(0);
            }
        }
    }
    mPrograms.addAll(programs);
    addDummyPrograms(mPeriod);
    TimeShiftManager.this.onProgramInfoChanged();
    schedulePrefetchPrograms();
    startNextLoadingIfNeeded();
}
项目:android_packages_apps_tv    文件:AsyncDbTask.java   
public final Range<Long> getPeriod() {
    return mPeriod;
}
项目:android_packages_apps_tv    文件:ScheduledRecordingTest.java   
private void assertOverLapping(boolean expected, long lower, long upper, ScheduledRecording r) {
    assertEquals("isOverlapping(Range(" + lower + "," + upper + "), recording " + r, expected,
            r.isOverLapping(new Range<>(lower, upper)));
}