public static String formatDuration(long duration) { // Using Joda Time DateTime now = new DateTime(); // Now DateTime plus = now.plus(new Duration(duration * 1000)); // Define and calculate the interval of time Interval interval = new Interval(now.getMillis(), plus.getMillis()); Period period = interval.toPeriod(PeriodType.time()); // Define the period formatter for pretty printing String ampersand = " & "; PeriodFormatter pf = new PeriodFormatterBuilder().appendHours().appendSuffix(ds("hour"), ds("hours")) .appendSeparator(" ", ampersand).appendMinutes().appendSuffix(ds("minute"), ds("minutes")) .appendSeparator(ampersand).appendSeconds().appendSuffix(ds("second"), ds("seconds")).toFormatter(); return pf.print(period).trim(); }
@Nullable private MBPlaceInavailability rowToInavail(Object[] row) { try { Interval interval = getInterval(BasicUtils.getTypeSafeString(row[3]), BasicUtils.getTypeSafeString(row[4])); MBPlaceInavailability in = new MBPlaceInavailability(); in.setProviderId(BasicUtils.getTypeSafeString(row[0])); in.setPlaceId(BasicUtils.getTypeSafeString(row[1])); in.setBookingTargetId(BasicUtils.getTypeSafeString(row[2])); in.setInavailability(interval); return in; } catch (Exception e) { return null; } }
protected static boolean overlaps(SharingStation station, Interval interval) { List<VehicleStatus> vehicleStatusList = station.getVehicleStatusList(); for (VehicleStatus vs : vehicleStatusList) { List<Interval> inavailabilities = vs.getInavailabilities(); if (!contains(inavailabilities, interval)) { // as long as there is a vehicle with non-overlapping inavailabilities, // the station can be used during the routing. return false; } } // ok, there are two possible reasons for this outcome // 1) there is no vehicle at the station // 2) we have gone through all the vehicles and there is no available one (all inavailabilities overlap) return true; }
protected Collection<Interval> gatherSuitableSlots(ExamRoom room, LocalDate date, Integer examDuration) { Collection<Interval> examSlots = new ArrayList<>(); // Resolve the opening hours for room and day List<ExamRoom.OpeningHours> openingHours = room.getWorkingHoursForDate(date); if (!openingHours.isEmpty()) { // Get suitable slots based on exam duration for (Interval slot : allSlots(openingHours, room, date)) { DateTime beginning = slot.getStart(); DateTime openUntil = getEndOfOpeningHours(beginning, openingHours); if (!beginning.plusMinutes(examDuration).isAfter(openUntil)) { DateTime end = beginning.plusMinutes(examDuration); examSlots.add(new Interval(beginning, end)); } } } return examSlots; }
/** * Queries for slots for given room and day */ private Set<TimeSlot> getExamSlots(User user, ExamRoom room, Exam exam, LocalDate date, Collection<Reservation> reservations, Collection<ExamMachine> machines) { Integer examDuration = exam.getDuration(); Collection<Interval> examSlots = gatherSuitableSlots(room, date, examDuration); Map<Interval, Optional<Integer>> map = examSlots.stream().collect( Collectors.toMap( Function.identity(), es -> Optional.empty(), (u, v) -> { throw new IllegalStateException(String.format("Duplicate key %s", u)); }, LinkedHashMap::new)); // Check reservation status and machine availability for each slot return handleReservations(map, reservations, exam, machines, user); }
private Set<TimeSlot> getExamSlots(ExamRoom room, Integer examDuration, LocalDate date, Collection<ExamMachine> machines) { Set<TimeSlot> slots = new LinkedHashSet<>(); Collection<Interval> examSlots = gatherSuitableSlots(room, date, examDuration); // Check machine availability for each slot for (Interval slot : examSlots) { // Check machine availability int availableMachineCount = machines.stream() .filter(m -> !isReservedDuring(m, slot)) .collect(Collectors.toList()) .size(); slots.add(new TimeSlot(slot, availableMachineCount, null)); } return slots; }
private static List<Interval> getExistingIntervalGaps(List<Interval> reserved) { List<Interval> gaps = new ArrayList<>(); Interval current = reserved.get(0); for (int i = 1; i < reserved.size(); i++) { Interval next = reserved.get(i); Interval gap = current.gap(next); if (gap != null) { gaps.add(gap); } current = next; } return gaps; }
public static List<Interval> getExceptionEvents(List<ExceptionWorkingHours> hours, LocalDate date, RestrictionType restrictionType) { List<Interval> exceptions = new ArrayList<>(); for (ExceptionWorkingHours ewh : hours) { boolean isApplicable = (restrictionType == RestrictionType.RESTRICTIVE && ewh.isOutOfService()) || (restrictionType == RestrictionType.NON_RESTRICTIVE && !ewh.isOutOfService()); if (isApplicable) { DateTime start = new DateTime(ewh.getStartDate()).plusMillis(ewh.getStartDateTimezoneOffset()); DateTime end = new DateTime(ewh.getEndDate()).plusMillis(ewh.getEndDateTimezoneOffset()); Interval exception = new Interval(start, end); Interval wholeDay = date.toInterval(); if (exception.contains(wholeDay) || exception.equals(wholeDay)) { exceptions.clear(); exceptions.add(wholeDay); break; } if (exception.overlaps(wholeDay)) { exceptions.add(new Interval(exception.getStart(), exception.getEnd())); } } } return exceptions; }
public static List<Interval> mergeSlots(List<Interval> slots) { if (slots.size() <= 1) { return slots; } slots.sort(Comparator.comparing(AbstractInterval::getStart)); boolean isMerged = false; List<Interval> merged = new ArrayList<>(); merged.add(slots.get(0)); for (int i = 1; i < slots.size(); ++i) { Interval first = slots.get(i - 1); Interval second = slots.get(i); if (!second.getStart().isAfter(first.getEnd())) { merged.remove(i - 1); DateTime laterEnding = first.getEnd().isAfter(second.getEnd()) ? first.getEnd() : second.getEnd(); merged.add(new Interval(first.getStart(), laterEnding)); isMerged = true; } else { merged.add(second); } } if (isMerged) { merged = mergeSlots(merged); } // Nothing to merge anymore return merged; }
public void filterTripByDate(Interval dateRange) { dateFilter = dateRange; if (selectedTrips == null) return; selectedTrips.clear(); if (allTrips == null) return;; if (dateRange != null) { for (Trip t : allTrips) { DateTime begDate = DateTime.parse(t.getStartDate()); DateTime endDate = DateTime.parse(t.getEndDate()); if (dateRange.isAfter(endDate)) continue; if (dateRange.isBefore(begDate)) continue; selectedTrips.add(t); } } else selectedTrips.addAll(allTrips); adapter.reloadFrom(selectedTrips); }
/** * Group interval in segment metadata by column. * * @param metadata Metadata containing the druid segments information * * @return map of data time to a map of segment id to segment info */ protected static Map<String, SimplifiedIntervalList> groupIntervalByColumn(DataSourceMetadata metadata) { Map<String, Set<Interval>> currentByColumn = new LinkedHashMap<>(); // Accumulate all intervals by column name for (DataSegment segment : metadata.getSegments()) { SegmentInfo segmentInfo = new SegmentInfo(segment); for (String column : segmentInfo.getColumnNames()) { currentByColumn.computeIfAbsent(column, ignored -> new HashSet<>()).add(segmentInfo.getInterval()); } } // Simplify interval sets using SimplifiedIntervalList return currentByColumn.entrySet().stream() .collect( Collectors.toMap( Map.Entry::getKey, entry -> new SimplifiedIntervalList(entry.getValue()) ) ); }
/** * Constructor. * * @param requestDimensions Dimensions contained in request * @param filterDimensions Filtered dimensions * @param metricDimensions Metric related dimensions * @param metricNames Names of metrics * @param apiFilters Map of dimension to its set of API filters * @param logicalTable The logical table requested by the request * @param intervals The interval constraint of the request * @param logicalMetrics The logical metrics requested by the request * @param minimumGranularity The finest granularity that must be satisfied by table granularity * @param requestGranularity The requested granularity of on the requested table */ public QueryPlanningConstraint( Set<Dimension> requestDimensions, Set<Dimension> filterDimensions, Set<Dimension> metricDimensions, Set<String> metricNames, ApiFilters apiFilters, LogicalTable logicalTable, Set<Interval> intervals, Set<LogicalMetric> logicalMetrics, Granularity minimumGranularity, Granularity requestGranularity ) { super(requestDimensions, filterDimensions, metricDimensions, metricNames, apiFilters); this.logicalTable = logicalTable; this.intervals = intervals; this.logicalMetrics = logicalMetrics; this.minimumGranularity = minimumGranularity; this.requestGranularity = requestGranularity; this.logicalMetricNames = generateLogicalMetricNames(); }
/** * Collect all subintervals of an interval list of a grain bucketed size which are subintervals of another supply * list of intervals. * * @param supplyIntervals The interval collection to match bucketedIntervals against * @param bucketedIntervals The grain bucketed intervals to collect if they overlap the supply * @param granularity Grain at which to split the bucketingIntervals * * @return a simplified list of subintervals of the bucketedIntervals list */ public static SimplifiedIntervalList collectBucketedIntervalsIntersectingIntervalList( SimplifiedIntervalList supplyIntervals, SimplifiedIntervalList bucketedIntervals, Granularity granularity ) { // Stream the from intervals, split by grain Iterable<Interval> bucketedIterable = granularity.intervalsIterable(bucketedIntervals); // Predicate to find buckets which overlap Predicate<Interval> isIntersecting = new SimplifiedIntervalList.SkippingIntervalPredicate( supplyIntervals, AbstractInterval::overlaps, false ); return StreamSupport.stream(bucketedIterable.spliterator(), false) .filter(isIntersecting) .collect(SimplifiedIntervalList.getCollector()); }
@Override @Transactional(readOnly = true) public List<Harvest> findHarvestsLinkedToHuntingDayAndPermitOfRhy(final Riistanhoitoyhdistys rhy, final GameSpecies species, final Interval interval) { final QHarvest harvest = QHarvest.harvest; final QGroupHuntingDay huntingDay = QGroupHuntingDay.groupHuntingDay; final QHuntingClubGroup group = QHuntingClubGroup.huntingClubGroup; final QHarvestPermit permit = QHarvestPermit.harvestPermit; return new JPAQuery<>(entityManager) .from(harvest) .join(harvest.huntingDayOfGroup, huntingDay) .join(huntingDay.group, group) .join(group.harvestPermit, permit) .select(harvest) .where(permit.rhy.eq(rhy) .and(harvest.species.eq(species)) .and(harvest.pointOfTime.between( new Timestamp(interval.getStart().getMillis()), new Timestamp(interval.getEnd().getMillis())))) .fetch(); }
@Override public Map<String, Double> getActiveUsersByMethod(Interval interval, ActiveMethod method, int resultSize, boolean withBots) { if (method == ActiveMethod.ToTV) { return occurrenceStatsDAO.getActiveColumnsByToTV("username", interval, resultSize, withBots); } else if (method == ActiveMethod.ToMV) { return occurrenceStatsDAO.getActiveColumnsByToMV("username", interval, resultSize, withBots); } else { throw new UnsupportedOperationException(String.format("Method %s not supported", method)); } }
@Transactional(readOnly = true) @Override public List<Observation> findGroupObservations( final HuntingClubGroup huntingClubGroup, final Interval interval) { final SQObservation observation = new SQObservation("game_observation"); final QObservation observationEntity = new QObservation("game_observation"); final SubQueryExpression<Long> subQuery1 = gameObservationForGroupMemberInsideGroupHuntingArea(huntingClubGroup, interval); final SubQueryExpression<Long> subQuery2 = gameObservationLinkedToGroupHuntingDay(huntingClubGroup); final SubQueryExpression<Long> subQuery3 = gameObservationRejected(huntingClubGroup); return new JPASQLQuery<Observation>(entityManager, sqlTemplates) .select(observationEntity).from(observation) .where(observation.gameObservationId.in(union(asList(subQuery1, subQuery2, subQuery3)))) .orderBy(observation.pointOfTime.desc(), observation.observerId.desc()) .fetch(); }
public static OfflineWorkflowCreationParameters load() { ModuleConfig config = PlatformConfig.defaultConfig().getModuleConfig("offline-default-creation-parameters"); Set<Country> countries = config.getEnumSetProperty("countries", Country.class); DateTime baseCaseDate = config.getDateTimeProperty("baseCaseDate"); Interval histoInterval = config.getIntervalProperty("histoInterval"); boolean generationSampled = config.getBooleanProperty("generationSampled", false); boolean boundariesSampled = config.getBooleanProperty("boundariesSampled", false); boolean initTopo = config.getBooleanProperty("initTopo", DEFAULT_INIT_TOPO); double correlationThreshold = config.getDoubleProperty("correlationThreshold", DEFAULT_CORRELATION_THRESHOLD); double probabilityThreshold = config.getDoubleProperty("probabilityThreshold", DEFAULT_PROBABILITY_THRESHOLD); boolean loadFlowTransformerVoltageControlOn = config.getBooleanProperty("loadFlowTransformerVoltageControlOn", DEFAULT_LOAD_FLOW_TRANSFORMER_VOLTAGE_CONTROL_ON); boolean simplifiedWorkflow = config.getBooleanProperty("simplifiedWorkflow", DEFAULT_SIMPLIFIED_WORKFLOW); boolean mergeOptimized = config.getBooleanProperty("mergeOptimized", DEFAULT_MERGE_OPTIMIZED); Set<Country> attributesCountryFilter = config.getEnumSetProperty("attributesCountryFilter", Country.class, DEFAULT_ATTRIBUTES_COUNTRY_FILTER); int attributesMinBaseVoltageFilter = config.getIntProperty("attributesMinBaseVoltageFilter", DEFAULT_ATTRIBUTES_MIN_BASE_VOLTAGE_FILTER); return new OfflineWorkflowCreationParameters(countries, baseCaseDate, histoInterval, generationSampled, boundariesSampled, initTopo, correlationThreshold, probabilityThreshold, loadFlowTransformerVoltageControlOn, simplifiedWorkflow, mergeOptimized, attributesCountryFilter, attributesMinBaseVoltageFilter); }
/** * checks strictly for time conflicts; DOES NOT CHECK FOR DAY CONFLICTS * @param course the course for which to check for conflicts * @return boolean */ public boolean timeConflicts(Course course, Course c){ String pattern="hh:mmaa"; DateTime courseStart=DateTime.parse(course.getTime().substring(0, course.getTime().indexOf('-')), DateTimeFormat.forPattern(pattern)); DateTime courseEnd=DateTime.parse(course.getTime().substring(course.getTime().indexOf('-')+1), DateTimeFormat.forPattern(pattern)); Interval courseInterval=new Interval(courseStart, courseEnd); DateTime cStart=DateTime.parse(c.getTime().substring(0, c.getTime().indexOf('-')), DateTimeFormat.forPattern(pattern)); DateTime cEnd=DateTime.parse(c.getTime().substring(c.getTime().indexOf('-')+1), DateTimeFormat.forPattern(pattern)); Interval cInterval=new Interval(cStart, cEnd); if(courseInterval.overlaps(cInterval)){ return true; } return false; }
/** * Makes sure that the correct amount of all emoji mentions are returned */ @Test public void testGetAllMentions() { Interval timeInterval = new Interval(mentionDate, mentionDate.plusHours(3)); List<EmojiEntity> result = underTest.getAllMentions(timeInterval, ImmutableList.of(), ImmutableList.of(), true); assertEquals(4, result.size()); result = underTest.getAllMentions(timeInterval, ImmutableList.of("room1"), ImmutableList.of(), true); assertEquals(3, result.size()); result = underTest.getAllMentions(timeInterval, ImmutableList.of("room1"), ImmutableList.of("giannis"), true); assertEquals(2, result.size()); }
/** * Find a valid timegrain from druid query to the {@link TableConfig}. * * { * ..., * "interval": "2015-09-12T00:00:00.000Z/2015-09-13T00:00:00.000Z", * ... * } * * @param tableConfig The TableConfig to be loaded. * @param segmentJson The JsonNode containing a time interval. */ private void loadTimeGrains(TableConfig tableConfig, JsonNode segmentJson) { JsonNode timeInterval = segmentJson.get("interval"); String[] utcTimes = timeInterval.asText().split("/"); Optional<TimeGrain> timeGrain = Optional.empty(); try { if (utcTimes.length == 2) { DateTime start = new DateTime(utcTimes[0], DateTimeZone.UTC); DateTime end = new DateTime(utcTimes[1], DateTimeZone.UTC); Interval interval = new Interval(start.toInstant(), end.toInstant()); timeGrain = IntervalUtils.getTimeGrain(interval); } } catch (IllegalArgumentException ignored) { LOG.warn("Unable to parse time intervals {} correctly", Arrays.toString(utcTimes)); } if (!timeGrain.isPresent()) { LOG.warn("Couldn't detect timegrain for {}, defaulting to DAY TimeGrain.", timeInterval.asText()); } tableConfig.setTimeGrain(timeGrain.orElse(DefaultTimeGrain.DAY)); }
private List<PivotTableRow> executeCountQuery( final Interval interval, final Optional<GameSpecies> gameSpeciesOpt, final Optional<Organisation> rkaOpt) { final String queryStr = nativeQueryForHarvestCounts(gameSpeciesOpt.isPresent(), rkaOpt.isPresent()); final MapSqlParameterSource queryParams = new MapSqlParameterSource(); queryParams.addValue("beginTime", interval.getStart().toDate(), Types.TIMESTAMP); queryParams.addValue("endTime", interval.getEnd().toDate(), Types.TIMESTAMP); if (gameSpeciesOpt.isPresent()) { queryParams.addValue("gameSpeciesId", gameSpeciesOpt.get().getId()); } if (rkaOpt.isPresent()) { queryParams.addValue("rkaId", rkaOpt.get().getId()); } return jdbcTemplate.query(queryStr, queryParams, (resultSet, i) -> new PivotTableRow(resultSet)); }
@Test public void testSuccess_lrp() throws Exception { persistResource( Registry.get("tld") .asBuilder() .setLrpPeriod(new Interval(clock.nowUtc().minusDays(1), clock.nowUtc().plusDays(1))) .build()); LrpTokenEntity token = persistResource( new LrpTokenEntity.Builder() .setToken("lrptokentest") .setAssignee("example.tld") .setValidTlds(ImmutableSet.of("tld")) .build()); setEppInput("domain_create_lrp.xml"); persistContactsAndHosts(); runFlowAssertResponse(loadFile("domain_create_response.xml")); assertSuccessfulCreate("tld", false); assertNoLordn(); assertThat(ofy().load().entity(token).now().getRedemptionHistoryEntry()).isNotNull(); }
public static Query getGroupByQuery() { List<DimFilter> filters = new ArrayList<>(); filters.add(DimFilters.dimEquals("report", "URLTransaction")); filters.add(DimFilters.dimEquals("pool", "r1cart")); filters.add(DimFilters.dimEquals("metric", "Duration")); return GroupByQuery.builder() .setDataSource("test") .setQuerySegmentSpec(QuerySegmentSpecs.create(new Interval(0, new DateTime().getMillis()))) .setGranularity(QueryGranularity.NONE) .addDimension("URL") .addAggregator(new LongSumAggregatorFactory("agg_count", "agg_count")) .addAggregator(new DoubleMaxAggregatorFactory("agg_max", "agg_max")) .addAggregator(new DoubleMinAggregatorFactory("agg_min", "agg_min")) .addAggregator(new DoubleSumAggregatorFactory("agg_sum", "agg_sum")) .setDimFilter(DimFilters.and(filters)) .build(); }
public ForecastErrorsAnalysisParameters(DateTime baseCaseDate, Interval histoInterval, String feAnalysisId, double ir, Integer flagPQ, Integer method, Integer nClusters, double percentileHistorical, Integer modalityGaussian, Integer outliers, Integer conditionalSampling, Integer nSamples, Set<Country> countries, CaseType caseType) { this.baseCaseDate = baseCaseDate; this.histoInterval = histoInterval; this.feAnalysisId = feAnalysisId; this.ir = ir; this.flagPQ = flagPQ; this.method = method; this.nClusters = nClusters; this.percentileHistorical = percentileHistorical; this.modalityGaussian = modalityGaussian; this.outliers = outliers; this.conditionalSampling = conditionalSampling; this.nSamples = nSamples; this.countries = countries; this.caseType = caseType; }
/** * Filter criteria includes: * 1) Person must have occupation in ClubGroup * 2) Person must have occupation in Club (to ignore invitations) * 2) Harvest location must intersect with defined area for ClubGroup * OR harvest must be linked to GroupHuntingDay * 3) OR Harvest is rejected by the group */ @Override @Transactional(readOnly = true) public List<Harvest> findGroupHarvest(final HuntingClubGroup huntingClubGroup, final Interval interval) { final SQHarvest harvest = new SQHarvest("harvest"); final QHarvest harvestEntity = new QHarvest("harvest"); final SQGroupHarvestRejection rejection = groupHarvestRejection; final SubQueryExpression<Long> subQuery1 = harvestForGroupMemberInsideGroupHuntingArea(huntingClubGroup.getId(), interval); final SubQueryExpression<Long> subQuery2 = harvestLinkedToGroupHuntingDay(huntingClubGroup.getId()); final BooleanExpression rejected = SQLExpressions.selectOne().from(rejection) .where(rejection.huntingClubGroupId.eq(huntingClubGroup.getId()) .and(rejection.harvestId.eq(harvest.harvestId))) .exists(); return new JPASQLQuery<Harvest>(entityManager, sqlTemplates) .from(harvest).select(harvestEntity) .where(harvest.harvestId.in(union(asList(subQuery1, subQuery2))).or(rejected)) .orderBy(harvest.pointOfTime.desc(), harvest.harvestId.desc()) .fetch(); }
public void onReadClick(){ if (ssQuarterlyInfo != null && ssQuarterlyInfo.lessons.size() > 0) { DateTime today = DateTime.now(); String ssLessonIndex = ssQuarterlyInfo.lessons.get(0).index; for (SSLesson ssLesson : ssQuarterlyInfo.lessons){ DateTime startDate = DateTimeFormat.forPattern(SSConstants.SS_DATE_FORMAT) .parseLocalDate(ssLesson.start_date).toDateTimeAtStartOfDay(); DateTime endDate = DateTimeFormat.forPattern(SSConstants.SS_DATE_FORMAT) .parseLocalDate(ssLesson.end_date).plusDays(1).toDateTimeAtStartOfDay().plusHours(12); if (new Interval(startDate, endDate).contains(today)){ ssLessonIndex = ssLesson.index; break; } } Intent ssReadingIntent = new Intent(context, SSReadingActivity.class); ssReadingIntent.putExtra(SSConstants.SS_LESSON_INDEX_EXTRA, ssLessonIndex); context.startActivity(ssReadingIntent); } }
@Test public void defaultValues() { assertNull(sut.getId()); assertEquals("token", sut.getToken()); assertEquals(0, sut.getRequests()); assertEquals(10000, sut.getMaxRequests()); assertTrue(new Interval( DateTime.now().minusSeconds(5), DateTime.now().plusSeconds(5)).contains(sut.getCreatedAt() )); assertTrue(DateTime.now().plusYears(1).isAfter(sut.getValidUntil())); assertTrue(DateTime.now().plusMonths(11).isBefore(sut.getValidUntil())); }
public static boolean contains(List<Interval> intervals, DateTime query) { try { for (Interval in : intervals) { if (in.contains(query)) { return true; } } return false; } catch (Exception e) { log.warn("Failed to check whether {} is contained within {}", query, intervals); return false; } }
public static boolean contains(List<Interval> intervals, Interval query) { try { for (Interval in : intervals) { if (in.overlaps(query)) { return true; } } return false; } catch (Exception e) { log.warn("Failed to check whether {} overlaps with {}", query, intervals); return false; } }
private List<VehicleStatus> getInavail(Map<String, Map<String, List<MBPlaceInavailability>>> map, MBPlaceEntity s) { // Get all booking target inavailabilities at a station Map<String, List<MBPlaceInavailability>> forAPlace = map.getOrDefault(s.getProviderId(), Collections.emptyMap()) .getOrDefault(s.getPlaceId(), Collections.emptyList()) .stream() .collect(Collectors.groupingBy(MBPlaceInavailability::getBookingTargetId)); List<VehicleStatus> vehicleStatusList = new ArrayList<>(forAPlace.size()); for (Map.Entry<String, List<MBPlaceInavailability>> entry : forAPlace.entrySet()) { String bookingTargetId = entry.getKey(); List<MBPlaceInavailability> values = entry.getValue(); List<Interval> intervalList = new ArrayList<>(values.size()); for (MBPlaceInavailability item : values) { Interval interval = item.getInavailability(); if (interval != null) { intervalList.add(interval); } } vehicleStatusList.add(new VehicleStatus(bookingTargetId, intervalList)); } return vehicleStatusList; }
@Nullable public static Interval getInterval(@Nullable String from, @Nullable String to) { if (isNullOrEmpty(from) || isNullOrEmpty(to)) { return null; } DateTime fromLdt = FORMATTER.parseDateTime(from); DateTime toLdt = FORMATTER.parseDateTime(to); return new Interval(fromLdt, toLdt); }
@Override public boolean overlaps(int stayTime, RouteLegList legs, SharingStation bsStart, RouteLegWrapper carWrapper) { // We always set stay time. Depending on DurationCheckStrategy it will be used or not. carWrapper.setStayTime(stayTime); int duration = durationCheckStrategy.getDurationToCheck(carWrapper); Interval interval = legs.getIntervalAfterPossibleLeg(duration); return overlaps(bsStart, interval); }
@Override public boolean overlaps(int stayTime, RouteLegList legs, SharingStation startStation, RouteLegWrapper bikeWrapper) { // We always set stay time. Depending on DurationCheckStrategy it will be used or not. bikeWrapper.setStayTime(stayTime); // ------------------------------------------------------------------------- // 1) If in future, be optimistic and assume always available // ------------------------------------------------------------------------- Interval nowTimeWindow = DateTimeUtils.getNowTimeWindow(); DateTime timeAtStartStation = legs.getAfterLastLeg(); boolean startIsNow = nowTimeWindow.contains(timeAtStartStation); if (!startIsNow) { return false; } // ------------------------------------------------------------------------- // 2) Check actual intervals for availability // ------------------------------------------------------------------------- int duration = durationCheckStrategy.getDurationToCheck(bikeWrapper); Interval interval = legs.getIntervalAfterPossibleLeg(duration); return overlaps(startStation, interval); }
@Transient private List<OpeningHours> getDefaultWorkingHours(LocalDate date) { String day = date.dayOfWeek().getAsText(Locale.ENGLISH); List<OpeningHours> hours = new ArrayList<>(); defaultWorkingHours.stream().filter(dwh -> dwh.getWeekday().equalsIgnoreCase(day)).collect(Collectors.toList()).forEach(dwh -> { DateTime midnight = date.toDateTimeAtStartOfDay(); DateTime start = midnight.withMillisOfDay(DateTimeUtils .resolveStartWorkingHourMillis(new DateTime(dwh.getStartTime()), dwh.getTimezoneOffset())); DateTime end = midnight.withMillisOfDay(DateTimeUtils .resolveEndWorkingHourMillis(new DateTime(dwh.getEndTime()), dwh.getTimezoneOffset())); Interval interval = new Interval(start, end); hours.add(new OpeningHours(interval, dwh.getTimezoneOffset())); }); return hours; }
@Transient public List<OpeningHours> getWorkingHoursForDate(LocalDate date) { List<OpeningHours> workingHours = getDefaultWorkingHours(date); List<Interval> extensionEvents = DateTimeUtils.mergeSlots( DateTimeUtils.getExceptionEvents(calendarExceptionEvents, date, DateTimeUtils.RestrictionType.NON_RESTRICTIVE)); List<Interval> restrictionEvents = DateTimeUtils.mergeSlots( DateTimeUtils.getExceptionEvents(calendarExceptionEvents, date, DateTimeUtils.RestrictionType.RESTRICTIVE)); List<OpeningHours> availableHours = new ArrayList<>(); if (!extensionEvents.isEmpty()) { List<Interval> unifiedIntervals = new ArrayList<>(); for (OpeningHours oh : workingHours) { unifiedIntervals.add(oh.getHours()); } unifiedIntervals.addAll(extensionEvents); unifiedIntervals = DateTimeUtils.mergeSlots(unifiedIntervals); int tzOffset; if (workingHours.isEmpty()) { tzOffset = DateTimeZone.forID(localTimezone).getOffset(new DateTime(date)); } else { tzOffset = workingHours.get(0).timezoneOffset; } workingHours.clear(); workingHours.addAll(unifiedIntervals.stream().map( interval -> new OpeningHours(interval, tzOffset)).collect(Collectors.toList())); } if (!restrictionEvents.isEmpty()) { for (OpeningHours hours : workingHours) { Interval slot = hours.getHours(); for (Interval gap : DateTimeUtils.findGaps(restrictionEvents, slot)) { availableHours.add(new OpeningHours(gap, hours.getTimezoneOffset())); } } } else { availableHours = workingHours; } return availableHours; }
protected Optional<ExamMachine> getRandomMachine(ExamRoom room, Exam exam, DateTime start, DateTime end, Collection<Integer> aids) { List<ExamMachine> machines = getEligibleMachines(room, aids, exam); Collections.shuffle(machines); Interval wantedTime = new Interval(start, end); for (ExamMachine machine : machines) { if (!machine.isReservedDuring(wantedTime)) { return Optional.of(machine); } } return Optional.empty(); }
/** * @return all intervals that fall within provided working hours */ private static Iterable<Interval> allSlots(Iterable<ExamRoom.OpeningHours> openingHours, ExamRoom room, LocalDate date) { Collection<Interval> intervals = new ArrayList<>(); List<ExamStartingHour> startingHours = room.getExamStartingHours(); if (startingHours.isEmpty()) { // Default to 1 hour slots that start at the hour startingHours = createDefaultStartingHours(room.getLocalTimezone()); } Collections.sort(startingHours); DateTime now = DateTime.now().plusMillis(DateTimeZone.forID(room.getLocalTimezone()).getOffset(DateTime.now())); for (ExamRoom.OpeningHours oh : openingHours) { int tzOffset = oh.getTimezoneOffset(); DateTime instant = now.getDayOfYear() == date.getDayOfYear() ? now : oh.getHours().getStart(); DateTime slotEnd = oh.getHours().getEnd(); DateTime beginning = nextStartingTime(instant, startingHours, tzOffset); while (beginning != null) { DateTime nextBeginning = nextStartingTime(beginning.plusMillis(1), startingHours, tzOffset); if (beginning.isBefore(oh.getHours().getStart())) { beginning = nextBeginning; continue; } if (nextBeginning != null && !nextBeginning.isAfter(slotEnd)) { intervals.add(new Interval(beginning.minusMillis(tzOffset), nextBeginning.minusMillis(tzOffset))); beginning = nextBeginning; } else if (beginning.isBefore(slotEnd)) { // We have some spare time in the end, take it as well intervals.add(new Interval(beginning.minusMillis(tzOffset), slotEnd.minusMillis(tzOffset))); break; } else { break; } } } return intervals; }
public TimeSlot(Interval interval, int machineCount, String exam) { start = ISODateTimeFormat.dateTime().print(interval.getStart()); end = ISODateTimeFormat.dateTime().print(interval.getEnd()); availableMachines = machineCount; ownReservation = machineCount < 0; conflictingExam = exam; }
private Set<TimeSlot> postProcessSlots(JsonNode node, String date, Exam exam, User user) { // Filter out slots that user has a conflicting reservation with if (node.isArray()) { ArrayNode root = (ArrayNode) node; LocalDate searchDate = LocalDate.parse(date, ISODateTimeFormat.dateParser()); // users reservations starting from now List<Reservation> reservations = Ebean.find(Reservation.class) .fetch("enrolment.exam") .where() .eq("user", user) .gt("startAt", searchDate.toDate()) .findList(); DateTimeFormatter dtf = ISODateTimeFormat.dateTimeParser(); Stream<JsonNode> stream = StreamSupport.stream(root.spliterator(), false); Map<Interval, Optional<Integer>> map = stream.collect(Collectors.toMap(n -> { DateTime start = dtf.parseDateTime(n.get("start").asText()); DateTime end = dtf.parseDateTime(n.get("end").asText()); return new Interval(start, end); }, n -> Optional.of(n.get("availableMachines").asInt()), (u, v) -> { throw new IllegalStateException(String.format("Duplicate key %s", u)); }, LinkedHashMap::new)); return handleReservations(map, reservations, exam, null, user); } return Collections.emptySet(); }
public static List<Interval> findGaps(List<Interval> reserved, Interval searchInterval) { List<Interval> gaps = new ArrayList<>(); DateTime searchStart = searchInterval.getStart(); DateTime searchEnd = searchInterval.getEnd(); if (hasNoOverlap(reserved, searchStart, searchEnd)) { gaps.add(searchInterval); return gaps; } // create a sub-list that excludes interval which does not overlap with // searchInterval List<Interval> subReservedList = removeNonOverlappingIntervals(reserved, searchInterval); DateTime subEarliestStart = subReservedList.get(0).getStart(); DateTime subLatestEnd = subReservedList.get(subReservedList.size() - 1).getEnd(); // in case the searchInterval is wider than the union of the existing // include searchInterval.start => earliestExisting.start if (searchStart.isBefore(subEarliestStart)) { gaps.add(new Interval(searchStart, subEarliestStart)); } // get all the gaps in the existing list gaps.addAll(getExistingIntervalGaps(subReservedList)); // include latestExisting.end => searchInterval.end if (searchEnd.isAfter(subLatestEnd)) { gaps.add(new Interval(subLatestEnd, searchEnd)); } return gaps; }