@Test public final void runTestWithDefaultFormatterConstant() throws Exception { populateDefaultFormatter(); final String sql = "select date '2016-01-01' from elasticsearch." + schema + "." + table; verifyJsonInPlan(sql, new String[] { "[{\n" + " \"from\" : 0,\n" + " \"size\" : 4000,\n" + " \"query\" : {\n" + " \"match_all\" : { }\n" + " },\n" + " \"_source\" : {\n" + " \"includes\" : [ ],\n" + " \"excludes\" : [ ]\n" + " }\n" + "}]"}); testBuilder() .sqlQuery(sql) .unOrdered() .baselineColumns("EXPR$0") .baselineValues(new LocalDateTime(Timestamp.valueOf("2016-01-01 00:00:00.000"))) .baselineValues(new LocalDateTime(Timestamp.valueOf("2016-01-01 00:00:00.000"))) .baselineValues(new LocalDateTime(Timestamp.valueOf("2016-01-01 00:00:00.000"))) .go(); }
/** * Calculates a list of progressing radius of gyration numbers based on time * unit given. Currently, day is supported. * * @param traces * location traces of an individual * @param unit * spatial distance unit * @param timeUnit * time unit for radius of gyration calculation. Day is supported. * @return an array of calculated radius of gyration. * @throws TimeUnitNotSupportedException */ public double[] calculateRadiusOfGyrationOverTime(List<LocationTrace> traces, SpatialDistanceUnit unit, TimeUnit timeUnit) throws TimeUnitNotSupportedException{ if (timeUnit != TimeUnit.DAYS){ throw new TimeUnitNotSupportedException( timeUnit + " is not supported. Please pass days as time unit."); } LocationTraceHelper traceHelper = new LocationTraceHelper(); List<LocationTrace> selectedTraces; LocalDateTime firstTraceTime = traces.get(0).getLocalTime().minusMinutes(1); LocalDateTime lastTraceTime = traces.get(traces.size()-1).getLocalTime(); double[] rogResults; LocalDateTime curentEndDate; int numberOfDays = Days.daysBetween(firstTraceTime, lastTraceTime).getDays(); rogResults = new double[numberOfDays-1]; for(int i=1; i < numberOfDays; i++ ){ curentEndDate = firstTraceTime.plusDays(i).withHourOfDay(0).withMinuteOfHour(0).withSecondOfMinute(0); selectedTraces = traceHelper.selectBetweenDates(traces, firstTraceTime, curentEndDate); rogResults[i-1] = calculateRadiusOfGyration(selectedTraces); } return rogResults; }
public void testThatEpochTimePrinterWorks() { StringBuffer buffer = new StringBuffer(); LocalDateTime now = LocalDateTime.now(); Joda.EpochTimePrinter epochTimePrinter = new Joda.EpochTimePrinter(false); epochTimePrinter.printTo(buffer, now, Locale.ROOT); assertThat(buffer.length(), is(10)); // only check the last digit, as seconds go from 0-99 in the unix timestamp and don't stop at 60 assertThat(buffer.toString(), endsWith(String.valueOf(now.getSecondOfMinute() % 10))); buffer = new StringBuffer(); Joda.EpochTimePrinter epochMilliSecondTimePrinter = new Joda.EpochTimePrinter(true); epochMilliSecondTimePrinter.printTo(buffer, now, Locale.ROOT); assertThat(buffer.length(), is(13)); assertThat(buffer.toString(), endsWith(String.valueOf(now.getMillisOfSecond()))); }
public CertificateExpiryStatus getExpiryStatus(Duration warningPeriod) { try { Date notAfter = getNotAfter(); LocalDateTime now = LocalDateTime.now(); Date notBefore = getNotBefore(); if (now.toDate().after(notAfter) || now.toDate().before(notBefore)) { return CertificateExpiryStatus.CRITICAL; } if (now.plus(warningPeriod).toDate().after(notAfter)) { return CertificateExpiryStatus.WARNING; } return CertificateExpiryStatus.OK; } catch (CertificateException e) { return CertificateExpiryStatus.CRITICAL; } }
@Test public final void runTestWithDateFormatter() throws Exception { populateDateFormatter(); final String sql = "select field from elasticsearch." + schema + "." + table; verifyJsonInPlan(sql, new String[] { "[{\n" + " \"from\" : 0,\n" + " \"size\" : 4000,\n" + " \"query\" : {\n" + " \"match_all\" : { }\n" + " },\n" + " \"_source\" : {\n" + " \"includes\" : [ \"field\" ],\n" + " \"excludes\" : [ ]\n" + " }\n" + "}]"}); testBuilder() .sqlQuery(sql) .unOrdered() .baselineColumns("field") .baselineValues(new LocalDateTime(Timestamp.valueOf("2016-12-09 00:00:00.000"))) .baselineValues(new LocalDateTime(Timestamp.valueOf("2017-10-11 00:00:00.000"))) .baselineValues(new LocalDateTime(Timestamp.valueOf("2017-01-01 00:00:00.000"))) .baselineValues(new LocalDateTime(Timestamp.valueOf("2017-01-01 00:00:00.000"))) .go(); }
@Test public void testNotesWithTimesWithRepeater() { shelfTestUtils.setupBook("notebook", "* Note 1\n"+ "SCHEDULED: <2017-03-10 Fri +1w>\n" + "* Note 2\n"+ "SCHEDULED: <2017-03-20 Mon 16:00>\n" + "* Note 3\n" + "* Note 4\n"+ "SCHEDULED: <2017-03-16 Fri +1w>\n"); ReminderService.LastRun lastRun = new ReminderService.LastRun(); Instant now = Instant.parse("2017-03-15T13:00:00"); // Wed AppPreferences.remindersForScheduledEnabled(context, true); List<NoteReminder> notes = ReminderService.getNoteReminders( context, now, lastRun, ReminderService.TIME_FROM_NOW); assertEquals(2, notes.size()); assertEquals("Note 4", notes.get(0).getPayload().title); assertEquals("2017-03-16T09:00:00", new LocalDateTime(notes.get(0).getRunTime()).toString("yyyy-MM-dd'T'HH:mm:ss")); assertEquals("Note 2", notes.get(1).getPayload().title); assertEquals("2017-03-20T16:00:00", new LocalDateTime(notes.get(1).getRunTime()).toString("yyyy-MM-dd'T'HH:mm:ss")); }
private String generateData(boolean printHeader, boolean longTimestamp, String Timezone, String dataFormat) { LocalDateTime queryTime = LocalDateTime.now(); if(isTest) queryTime = LocalDateTime.parse("2016-01-01T00:00:00.000"); // Get the time Values for the current time scala.collection.Iterable<Tuple3<String, LocalDateTime, Object>> data = SimController.getTimeValue(simConfig.timeSeries(), queryTime); // Convert the Scala Iterable to a Java one Iterable<Tuple3<String, LocalDateTime, Object>> generatedValues = JavaConverters.asJavaIterableConverter(data).asJava(); String resultString = ""; if (dataFormat.equals("CSV")){ resultString = createCsv(printHeader, longTimestamp, Timezone, generatedValues); } else if (dataFormat.equals("JSON")){ resultString = generateJson(longTimestamp, Timezone, generatedValues); } return resultString; }
private String generateJson(boolean longTimestamp, String Timezone, Iterable<Tuple3<String, LocalDateTime, Object>> generatedValues){ DataValue value = new DataValue(); generatedValues.forEach(tv -> { String dataValue = ((Some)tv._3()).get().toString(); String ts = tv._2().toString(); if (longTimestamp){ DateTime localTime = tv._2().toDateTime(DateTimeZone.forID(Timezone)); ts = String.valueOf(localTime.getMillis()); } value.setTimeStamp(ts); value.addValue(tv._1(),dataValue); }); String output = ""; try { output = mapper.writeValueAsString(value); } catch (JsonProcessingException e) { getLogger().error("Error generating JSON: " + e.getMessage()); } return output; }
@Test public final void runTestProjectTimestamp() throws Exception { String sqlQuery = "select `datefield` from elasticsearch." + schema + "." + table; verifyJsonInPlan(sqlQuery, new String[] { "=[{\n" + " \"from\" : 0,\n" + " \"size\" : 4000,\n" + " \"query\" : {\n" + " \"match_all\" : { }\n" + " },\n" + " \"_source\" : {\n" + " \"includes\" : [ \"datefield\" ],\n" + " \"excludes\" : [ ]\n" + " }\n" + "}])"}); testBuilder().sqlQuery(sqlQuery).unOrdered().baselineColumns("datefield") .baselineValues(new LocalDateTime(Timestamp.valueOf("2014-02-10 10:50:42"))) .baselineValues(new LocalDateTime(Timestamp.valueOf("2014-02-11 10:50:42"))) .baselineValues(new LocalDateTime(Timestamp.valueOf("2014-02-12 10:50:42"))) .baselineValues(new LocalDateTime(Timestamp.valueOf("2014-02-11 10:50:42"))) .baselineValues(new LocalDateTime(Timestamp.valueOf("2014-02-10 10:50:42"))) .go(); }
@Override public File nextFile() { StringBuilder sb = new StringBuilder(); String date = formatter.print(LocalDateTime.now()); if (!date.equals(lastRoll)) { getFileIndex().set(0); lastRoll = date; } sb.append(getPrefix()).append(date).append("-"); sb.append(getFileIndex().incrementAndGet()); if (getExtension().length() > 0) { sb.append(".").append(getExtension()); } currentFile = new File(getBaseDirectory(), sb.toString()); return currentFile; }
/** * スキーマのプロパティ定義に応じて適切な型に変換したプロパティ値オブジェクトを返す.<br/> * ユーザデータの場合はBoolean型のプロパティ値を文字列に変換する. * @param prop プロパティオブジェクト * @param edmType スキーマのプロパティ定義 * @return 適切な型に変換したプロパティ値オブジェクト */ @Override @SuppressWarnings("unchecked") protected Object getSimpleValue(OProperty<?> prop, EdmType edmType) { if (edmType.equals(EdmSimpleType.DATETIME)) { OProperty<LocalDateTime> propD = (OProperty<LocalDateTime>) prop; LocalDateTime ldt = propD.getValue(); if (ldt != null) { return ldt.toDateTime().getMillis(); } } // Boolean型/Double型のプロパティ値を文字列に変換する if (prop.getValue() != null && (edmType.equals(EdmSimpleType.BOOLEAN) || edmType.equals(EdmSimpleType.DOUBLE))) { return String.valueOf(prop.getValue()); } return prop.getValue(); }
@Override public Object convert(@SuppressWarnings("rawtypes") Class clazz, Object arg1) { if (arg1 instanceof LocalDate) { return JODA_DATE_FORMAT.print((LocalDate) arg1); } else if (arg1 instanceof LocalDateTime) { return JODA_DATETIME_FORMAT.print((LocalDateTime) arg1); } else if (arg1 instanceof Timestamp) { return String.format(TIMESTAMP_FORMAT_STRING_WITH_NANOS, arg1, ((Timestamp) arg1).getNanos()); } else if (arg1 instanceof Date) { // we need to create the date format object here, because it is not thread safe. DateFormat JDK_DATETIME_FORMAT = new SimpleDateFormat(DATE_TIME_FORMAT_STRING); return JDK_DATETIME_FORMAT.format((Date) arg1); } else { return arg1.toString(); } }
public static boolean castToInteger(FieldReader reader, NullableBigIntHolder out) { Object o = reader.readObject(); if (o instanceof Number) { out.value = ((Number) o).longValue(); return true; } else if (o instanceof Boolean) { out.value = ((Boolean) o).booleanValue() ? 1 : 0; return true; } else if (o instanceof LocalDateTime) { out.value = toMillis((LocalDateTime) o); return true; } else if (o instanceof Text) { try { String s = Text.decode(((Text) o).getBytes(), 0, ((Text) o).getLength()); return parseLong(s, out); } catch (CharacterCodingException e) { // TODO: is this the best way? logger.warn("Can't decode text", e); return false; } } else if (o instanceof byte[]) { return false; // TODO } return false; }
/** * Constructor. * @param cell cell object * @param entity OEntity object */ public Box(final Cell cell, final OEntity entity) { this.cell = cell; if (entity == null) { // Process for the MAIN BOX this.name = Box.DEFAULT_BOX_NAME; // Schema URL of MAIN BOX is the URL of its own cell this.schema = cell.getUrl(); // Internal ID of MAIN BOX will be together with the ID of the cell. this.id = cell.getId(); return; } this.name = (String) entity.getProperty("Name").getValue(); this.schema = (String) entity.getProperty(P_SCHEMA.getName()).getValue(); if (entity instanceof OEntityWrapper) { OEntityWrapper oew = (OEntityWrapper) entity; this.id = oew.getUuid(); } LocalDateTime dateTime = (LocalDateTime) entity.getProperty(Common.P_PUBLISHED.getName()).getValue(); this.published = dateTime.toDateTime().getMillis(); }
@Test @Ignore("decimal") public void testToTimeStamp() throws Exception { String query = "select to_timestamp(cast('800120400.12312' as decimal(38, 5))) as DEC38_TS, to_timestamp(200120400) as INT_TS " + "from cp.`employee.json` where employee_id < 2"; LocalDateTime result1 = new LocalDateTime(800120400123l, UTC); LocalDateTime result2 = new LocalDateTime(200120400000l, UTC); testBuilder() .sqlQuery(query) .unOrdered() .baselineColumns("DEC38_TS", "INT_TS") .baselineValues(result1, result2) .go(); }
public void startActivityWithAction(final String intentAction) { final Intent intent = new Intent(intentAction); intent.addCategory(Intent.CATEGORY_OPENABLE); intent.setType("text/xml"); if (intentAction.equals(Intent.ACTION_OPEN_DOCUMENT)) { startActivityForResult(intent, REQUEST_CODE_SELECT_OPML_FILE); } else if (intentAction.equals(Intent.ACTION_CREATE_DOCUMENT)) { final LocalDateTime dateTime = new LocalDateTime(); final String OPMLExportTitle = String.format("%s-%s:%s:%s:%s_%s.%s.%s", TAG.toLowerCase(), dateTime.getHourOfDay(), dateTime.getMinuteOfHour(), dateTime.getSecondOfMinute(), dateTime.getMillisOfSecond(), dateTime.getDayOfMonth(), dateTime.getMonthOfYear(), dateTime.getYear()); intent.putExtra(Intent.EXTRA_TITLE, OPMLExportTitle); startActivityForResult(intent, REQUEST_CODE_CREATE_ALTER_OPML_FILE); } }
@Test public void convert() throws Exception { String dateString = "1985-09-03 13:30"; LocalTime localTime = (LocalTime) converter.convert(dateString, TypeToken.of(LocalTime.class)); assertEquals(13, localTime.getHourOfDay()); assertEquals(30, localTime.getMinuteOfHour()); LocalDate localDate = (LocalDate) converter.convert(dateString, TypeToken.of(LocalDate.class)); assertEquals(1985, localDate.getYear()); assertEquals(9, localDate.getMonthOfYear()); assertEquals(3, localDate.getDayOfMonth()); LocalDateTime localDateTime = (LocalDateTime) converter.convert(dateString, TypeToken.of(LocalDateTime.class)); assertEquals(13, localDateTime.getHourOfDay()); assertEquals(30, localDateTime.getMinuteOfHour()); assertEquals(1985, localDateTime.getYear()); assertEquals(9, localDateTime.getMonthOfYear()); assertEquals(3, localDateTime.getDayOfMonth()); }
@Test public void testDateTrunc() throws Exception { String query = "select " + "date_trunc('MINUTE', time '2:30:21.5') as TIME1, " + "date_trunc('SECOND', time '2:30:21.5') as TIME2, " + "date_trunc('HOUR', timestamp '1991-05-05 10:11:12.100') as TS1, " + "date_trunc('SECOND', timestamp '1991-05-05 10:11:12.100') as TS2, " + "date_trunc('MONTH', date '2011-2-2') as DATE1, " + "date_trunc('YEAR', date '2011-2-2') as DATE2 " + "from cp.`employee.json` where employee_id < 2"; LocalDateTime time1 = formatTime.parseLocalDateTime("2:30:00.0"); LocalDateTime time2 = formatTime.parseLocalDateTime("2:30:21.0"); LocalDateTime ts1 = formatTimeStampMilli.parseLocalDateTime("1991-05-05 10:00:00.0"); LocalDateTime ts2 = formatTimeStampMilli.parseLocalDateTime("1991-05-05 10:11:12.0"); LocalDateTime date1 = formatDate.parseLocalDateTime("2011-02-01"); LocalDateTime date2 = formatDate.parseLocalDateTime("2011-01-01"); testBuilder() .sqlQuery(query) .unOrdered() .baselineColumns("TIME1", "TIME2", "TS1", "TS2", "DATE1", "DATE2") .baselineValues(time1, time2, ts1, ts2, date1, date2) .go(); }
@Test public final void runTestWithTimeFormatter() throws Exception { populateTimeFormatter(); final String sql = "select field from elasticsearch." + schema + "." + table; verifyJsonInPlan(sql, new String[] { "[{\n" + " \"from\" : 0,\n" + " \"size\" : 4000,\n" + " \"query\" : {\n" + " \"match_all\" : { }\n" + " },\n" + " \"_source\" : {\n" + " \"includes\" : [ \"field\" ],\n" + " \"excludes\" : [ ]\n" + " }\n" + "}]"}); testBuilder() .sqlQuery(sql) .unOrdered() .baselineColumns("field") .baselineValues(new LocalDateTime(Timestamp.valueOf("1970-01-01 01:02:03.123"))) .baselineValues(new LocalDateTime(Timestamp.valueOf("1970-01-01 01:02:03.345"))) .baselineValues(new LocalDateTime(Timestamp.valueOf("1970-01-01 01:01:15.000"))) .baselineValues(new LocalDateTime(Timestamp.valueOf("1970-01-01 01:02:03.345"))) .go(); }
@Test public void nextConnote_Goodcase() throws Exception { Long uniqueConnote = 1L; Connote connoteToBeSaved = new Connote(uniqueConnote); connoteToBeSaved.setCreated(new LocalDateTime(2017, 01, 02, 12, 03, 10)); when(randomConnoteGeneratorMock.randomNumber()).thenReturn(342L, uniqueConnote); when(connoteRepositoryMock.findOne(342L)).thenReturn(new Connote(342L)); when(connoteRepositoryMock.findOne(uniqueConnote)).thenReturn(null); when(connoteRepositoryMock.save(any(Connote.class))).thenReturn(connoteToBeSaved); ConnoteDTO connote = connoteService.createConnote(); assertThat(connote.getConnote(), is(1L)); verify(randomConnoteGeneratorMock, times(2)).randomNumber(); }
/** * To fix some of the corrupted dates fixed as part of DRILL-4203 it requires * actually looking at the values stored in the file. A column with date values * actually stored must be located to check a value. Just because we find one * column where the all values are null does not mean we can safely avoid reading * date columns with auto-correction, although null values do not need fixing, * other columns may contain actual corrupt date values. * * This test checks the case where the first columns in the file are all null filled * and a later column must be found to identify that the file is corrupt. */ @Test public void testReadCorruptDatesWithNullFilledColumns() throws Exception { testBuilder() .sqlQuery("select null_dates_1, null_dates_2, date_col from dfs.`" + PARQUET_DATE_FILE_WITH_NULL_FILLED_COLS + "`") .unOrdered() .baselineColumns("null_dates_1", "null_dates_2", "date_col") .baselineValues(null, null, new LocalDateTime(1970, 1, 1, 0, 0)) .baselineValues(null, null, new LocalDateTime(1970, 1, 2, 0, 0)) .baselineValues(null, null, new LocalDateTime(1969, 12, 31, 0, 0)) .baselineValues(null, null, new LocalDateTime(1969, 12, 30, 0, 0)) .baselineValues(null, null, new LocalDateTime(1900, 1, 1, 0, 0)) .baselineValues(null, null, new LocalDateTime(2015, 1, 1, 0, 0)) .go(); }
public ShowFilesCommandResult(String name, boolean isDirectory, boolean isFile, long length, String owner, String group, String permissions, long accessTime, long modificationTime) { this.name = name; this.isDirectory = isDirectory; this.isFile = isFile; this.length = length; this.owner = owner; this.group = group; this.permissions = permissions; // Get the timestamp in UTC because Dremio's internal TIMESTAMP stores time in UTC LocalDateTime at = new LocalDateTime(accessTime); this.accessTime = new Timestamp(com.dremio.common.util.DateTimes.toMillis(at)); LocalDateTime mt = new LocalDateTime(modificationTime); this.modificationTime = new Timestamp(com.dremio.common.util.DateTimes.toMillis(mt)); }
@Test public final void runTestProjectTimestamp() throws Exception { String sqlQuery = "select `datefield` from elasticsearch." + schema + "." + table; verifyJsonInPlan(sqlQuery, new String[] { "=[{\n" + " \"from\" : 0,\n" + " \"size\" : 4000,\n" + " \"query\" : {\n" + " \"match_all\" : { }\n" + " },\n" + " \"_source\" : {\n" + " \"includes\" : [ \"datefield\" ],\n" + " \"excludes\" : [ ]\n" + " }\n" + "}])"}); testBuilder().sqlQuery(sqlQuery).unOrdered().baselineColumns("datefield") .baselineValues(new LocalDateTime(Timestamp.valueOf("2014-02-10 10:50:42"))) .baselineValues(new Object[] {null}) .baselineValues(new LocalDateTime(Timestamp.valueOf("2014-02-12 10:50:42"))) .baselineValues(new LocalDateTime(Timestamp.valueOf("2014-02-11 10:50:42"))) .baselineValues(new LocalDateTime(Timestamp.valueOf("2014-02-10 10:50:42"))) .go(); }
@Override public void writeTimestamp(boolean isNull) throws IOException { TimeStampMilliWriter ts = writer.timeStampMilli(fieldName); if(!isNull){ switch (parser.getCurrentToken()) { case VALUE_NUMBER_INT: LocalDateTime dt = new LocalDateTime(parser.getLongValue(), org.joda.time.DateTimeZone.UTC); ts.writeTimeStampMilli(com.dremio.common.util.DateTimes.toMillis(dt)); break; case VALUE_STRING: DateTimeFormatter f = ISODateTimeFormat.dateTime().withZoneUTC(); ts.writeTimeStampMilli(com.dremio.common.util.DateTimes.toMillis(LocalDateTime.parse(parser.getValueAsString(), f))); break; default: throw UserException.unsupportedError() .message(parser.getCurrentToken().toString()) .build(LOG); } } }
@Test public void testCorruptValDetectionDuringPruning() throws Exception { try { for (String selection : new String[]{"*", "date_col"}) { // for sanity, try reading all partitions without a filter TestBuilder builder = testBuilder() .sqlQuery("select " + selection + " from dfs.`" + CORRUPTED_PARTITIONED_DATES_1_2_PATH + "`") .unOrdered() .baselineColumns("date_col"); addDateBaselineVals(builder); builder.go(); String query = "select " + selection + " from dfs.`" + CORRUPTED_PARTITIONED_DATES_1_2_PATH + "`" + " where date_col = date '1970-01-01'"; // verify that pruning is actually taking place testPlanMatchingPatterns(query, new String[]{"splits=\\[1"}, null); // read with a filter on the partition column testBuilder() .sqlQuery(query) .unOrdered() .baselineColumns("date_col") .baselineValues(new LocalDateTime(1970, 1, 1, 0, 0)) .go(); } } finally { test("alter session reset all"); } }
/** * Method that Personium added independently. * <p> * Parse "ISO-8601 format string without time zone offset" and convert it to DateTime type. * Time zone is interpreted as UTC. * @param value String to parse * @return DateTime */ private static DateTime parseUTCDateTime(String value) { DateTimeFormatter formatter = DateTimeFormatter.ISO_LOCAL_DATE_TIME .withZone(ZoneId.of("UTC")) .withResolverStyle(ResolverStyle.STRICT); java.time.LocalDateTime dateTime = java.time.LocalDateTime.parse(value, formatter); long epochMilli = dateTime.toInstant(ZoneOffset.UTC).toEpochMilli(); return new DateTime(epochMilli, DateTimeZone.UTC); }
private void sendAutoOrderSummary(){ System.out.println("RUN TASK SCHEDULE: " + LocalDateTime.now()); //Get deadline days Settings settings = settingsRepo.findById(1); int deadlinedays = settings.getDeadlineDays(); LocalDate day = new LocalDate(); if(this.holidaysRepo.findByIdHoliday(day) != null){ return; } LocalDate initDate = (day).plusDays(deadlinedays); // while (this.holidaysRepo.findByIdHoliday(initDate) != null) { // initDate = initDate.plusDays(1); // } LocalDate maxCheckdate = (initDate).plusDays(deadlinedays); while (this.holidaysRepo.findByIdHoliday(maxCheckdate) != null) { maxCheckdate = maxCheckdate.plusDays(1); } int checkDays = Days.daysBetween((new LocalDate()), maxCheckdate).getDays() + deadlinedays; for (int i = 0; i <= checkDays; i++) { //newDeadlineDays LocalDate dailyMenuDate = initDate.plusDays(i); // if old deadline not passed and new deadline passed and dailyMenu not null, send report email if (menusService.deadlinePassed(dailyMenuDate) && dailyMenuRepo.findByDate(dailyMenuDate) != null) { // //System.out.println(">>>>>>>>>>>>>sending email, date: " + dailyMenuDate); eService.sendOrderSummary(dailyMenuDate); } } }
private boolean oldDeadlinePassed(LocalDate date, int deadlineDays, LocalTime deadlineTime) { // Check if order deadline passed based on given date, deadlineDays and deadlineTime (deadline) date = date.minusDays(deadlineDays); while (this.holidaysRepo.findByIdHoliday(date) != null) { date = date.minusDays(1); } return (date.toLocalDateTime(deadlineTime).compareTo(LocalDateTime.now()) < 0); }
public boolean deadlinePassed(LocalDate date) { Settings settings = settingsRepo.findOne(1); int deadlineDays = settings.getDeadlineDays(); LocalTime deadlineTime = settings.getDeadline(); date = date.minusDays(deadlineDays); while (this.holidaysRepo.findByIdHoliday(date) != null) { date = date.minusDays(1); } // Check if order deadline passed based on given date, deadlineDays and deadlineTime (deadline) return (date.toLocalDateTime(deadlineTime).compareTo(LocalDateTime.now()) < 0); }
public boolean deadlinePassed(LocalDate date) { Settings settings = settingsRep.findOne(1); int deadlineDays = settings.getDeadlineDays(); LocalTime deadlineTime = settings.getDeadline(); date = date.minusDays(deadlineDays); while (this.holidaysRepo.findByIdHoliday(date) != null) { date = date.minusDays(1); } // Check if order deadline passed based on given date, deadlineDays and deadlineTime (deadline) return (date.toLocalDateTime(deadlineTime).compareTo(LocalDateTime.now()) < 0); }
public static boolean castToFloat(FieldReader reader, NullableFloat8Holder out) { Object o = reader.readObject(); if (o instanceof Number) { out.value = ((Number) o).doubleValue(); return true; } else if (o instanceof Boolean) { out.value = ((Boolean) o).booleanValue() ? 1 : 0; return true; } else if (o instanceof LocalDateTime) { out.value = toMillis((LocalDateTime) o); return true; } else if (o instanceof Text) { String s; try { s = Text.decode(((Text) o).getBytes(), 0, ((Text) o).getLength()); if (!isNumeric(s)) { return false; } out.value = Double.parseDouble(s); return true; } catch (Exception e) { // TODO: is this the best way? logger.warn("Can't decode text to FLOAT", e); return false; } } else if (o instanceof byte[]) { return false; // TODO } return false; }
private void addDateBaselineVals(TestBuilder builder) { builder .baselineValues(new LocalDateTime(1970, 1, 1, 0, 0)) .baselineValues(new LocalDateTime(1970, 1, 2, 0, 0)) .baselineValues(new LocalDateTime(1969, 12, 31, 0, 0)) .baselineValues(new LocalDateTime(1969, 12, 30, 0, 0)) .baselineValues(new LocalDateTime(1900, 1, 1, 0, 0)) .baselineValues(new LocalDateTime(2015, 1, 1, 0, 0)); }
public void start(LocalDateTime newTime) { int minDiff = getMinsBetween(previousTime, newTime); // 60min ... 360grade // minDif .. minDelta float minDeltaRotation = ((float) minDiff * 360f) / 60f; // 720min ... 360grade = 12h ... 360grade // minDif ... hourDelta float hourDeltaRotation = ((float) minDiff * 360f) / 720f; remainingMinRotation += minDeltaRotation; remainingHourRotation += hourDeltaRotation; d("ANIM", "current hour rotation = " + currentHourRotation + ", current min rotation = " + currentMinRotation); if (isRunning()) { stop(); } targetHourRotation = currentHourRotation + remainingHourRotation; hourAnimator.setFloatValues(currentHourRotation, targetHourRotation); targetMinRotation = currentMinRotation + remainingMinRotation; minAnimator.setFloatValues(currentMinRotation, targetMinRotation); start(); previousTime = newTime; }
@Test // DRILL-3769 public void testToDateForTimeStamp() throws Exception { final String query = "select to_date(to_timestamp(-1)) as col \n" + "from (values(1))"; testBuilder() .sqlQuery(query) .ordered() .baselineColumns("col") .baselineValues(new LocalDateTime(1969, 12, 31, 0, 0)) .build() .run(); }
@Override public Parser<?> getParser(DateTimeFormat annotation, Class<?> fieldType) { if (LocalDate.class.equals(fieldType)) { return new LocalDateParser(getFormatter(annotation, fieldType)); } else if (LocalTime.class.equals(fieldType)) { return new LocalTimeParser(getFormatter(annotation, fieldType)); } else if (LocalDateTime.class.equals(fieldType)) { return new LocalDateTimeParser(getFormatter(annotation, fieldType)); } else { return new DateTimeParser(getFormatter(annotation, fieldType)); } }
@Override public void registerFormatters(FormatterRegistry registry) { JodaTimeConverters.registerConverters(registry); DateTimeFormatter dateFormatter = getFormatter(Type.DATE); DateTimeFormatter timeFormatter = getFormatter(Type.TIME); DateTimeFormatter dateTimeFormatter = getFormatter(Type.DATE_TIME); addFormatterForFields(registry, new ReadablePartialPrinter(dateFormatter), new LocalDateParser(dateFormatter), LocalDate.class); addFormatterForFields(registry, new ReadablePartialPrinter(timeFormatter), new LocalTimeParser(timeFormatter), LocalTime.class); addFormatterForFields(registry, new ReadablePartialPrinter(dateTimeFormatter), new LocalDateTimeParser(dateTimeFormatter), LocalDateTime.class); addFormatterForFields(registry, new ReadableInstantPrinter(dateTimeFormatter), new DateTimeParser(dateTimeFormatter), ReadableInstant.class); // In order to retain backwards compatibility we only register Date/Calendar // types when a user defined formatter is specified (see SPR-10105) if (this.formatters.containsKey(Type.DATE_TIME)) { addFormatterForFields(registry, new ReadableInstantPrinter(dateTimeFormatter), new DateTimeParser(dateTimeFormatter), Date.class, Calendar.class); } registry.addFormatterForFieldAnnotation(new JodaDateTimeFormatAnnotationFormatterFactory()); }
public static void mkdirForHyperParameterConfig() { final String homeDir = System.getProperty("user.home"); final String logDir = "hyper" + new LocalDateTime().toString(); mHyperParameterConfigDirPath = FilenameUtils.concat(homeDir, logDir); try { FileUtils.forceMkdir(new File(mHyperParameterConfigDirPath)); } catch (final IOException e) { throw new IllegalStateException(e); } }
public static void mkdirForLog() { final String homeDir = System.getProperty("user.home"); final String logDir = "log" + new LocalDateTime().toString(); mLogDirPath = FilenameUtils.concat(homeDir, logDir); try { FileUtils.forceMkdir(new File(mLogDirPath)); } catch (final IOException e) { throw new IllegalStateException(e); } }
@Test public void testTimestampDiffReverse() throws Exception { LocalDateTime endDt = ts("2015-09-10T20:49:42.000"); LocalDateTime startDt = ts("2017-03-30T22:50:59.050"); testFunctions(new Object[][]{ {"timestampdiffSecond(c0, c1)", startDt, endDt, -48996077}, {"timestampdiffMinute(c0, c1)", startDt, endDt, -816601}, {"timestampdiffHour(c0, c1)", startDt, endDt, -13610}, {"timestampdiffDay(c0, c1)", startDt, endDt, -567}, {"timestampdiffWeek(c0, c1)", startDt, endDt, -81}, {"timestampdiffMonth(c0, c1)", startDt, endDt, -18}, {"timestampdiffQuarter(c0, c1)", startDt, endDt, -6}, {"timestampdiffYear(c0, c1)", startDt, endDt, -1} }); }