/** * Returns all {@link News} where a {@link CombItem} of the given {@link User} exists. * The News are sorted by the add_date descending. * @param userId the {@link User} to get the {@link CombItem}s. * @param onlyUnread if <code>true</code> only {@link CombItem} with no read_date * will be returned. * @return {@link List} containing {@link News}. */ public List<News> getCombItems(final long userId, final boolean onlyUnread) { final SelectConditionStep<Record> sql = DSL.using(jooqConfig). select(). from(COMB_ITEM_TABLE. join(CONTENT_TABLE). on(COMB_ITEM_TABLE.CONTENT_ID.eq(CONTENT_TABLE.ID)). join(NEWS_TABLE). on(CONTENT_TABLE.ID.eq(NEWS_TABLE.CONTENT_ID))). where(COMB_ITEM_TABLE.USER_ID.eq(userId)); if(onlyUnread) { sql.and(COMB_ITEM_TABLE.READ_DATE.isNull()); } return sql.orderBy(COMB_ITEM_TABLE.ADD_DATE.desc()). fetchInto(News.class); }
private ValidationResult notExists(final DSLContext context, final ColumnPermutation lhs, final ColumnPermutation rhs) { final Table<Record> lhsAlias = context.select(fields(lhs)) .from(tables(lhs)) .where(notNull(lhs)) .asTable(); final int violators = context.selectCount().from( selectFrom(lhsAlias).whereNotExists( context.selectOne() .from(tables(rhs)) .where(row(fields(rhs)).eq(row(lhsAlias.fields()))) ).limit(1) ).fetchOne().value1(); return new DefaultValidationResult(violators == 0); }
public void setUp() throws Exception { connection = DriverManager.getConnection("jdbc:hsqldb:mem:myDb"); context = DSL.using(connection, SQLDialect.HSQLDB, new Settings().withRenderNameStyle( RenderNameStyle.AS_IS)); final List<Field<String>> fields = getFields(); context.createTable(relationName) .columns(fields) .execute(); try (InputStream in = resourceClass.getResourceAsStream(csvPath)) { final Loader<Record> loader = context.loadInto(table(name(relationName))) .loadCSV(in) .fields(fields) .execute(); assertThat(loader.errors()).isEmpty(); } }
private void init(Connection conn) { DSLContext create = DSL.using(conn, SQLDialect.POSTGRES); Result<Record> coresAttributes = create.select().from(MD_CLASS_ATTRIBUTES) .join(MD_CLASSES).on(MD_CLASS_ATTRIBUTES.CLASS_ID.eq(MD_CLASSES.CLASS_ID)) .where(MD_CLASSES.CLASS_NAME.like("bom%Compute")) .and(MD_CLASS_ATTRIBUTES.ATTRIBUTE_NAME.eq("cores")).fetch(); for (Record coresAttribute : coresAttributes) { coresAttributeIds.add(coresAttribute.getValue(MD_CLASS_ATTRIBUTES.ATTRIBUTE_ID)); } create = DSL.using(conn, SQLDialect.POSTGRES); Result<Record> computeClasses = create.select().from(MD_CLASSES) .where(MD_CLASSES.CLASS_NAME.like("bom%Compute")).fetch(); for (Record computeClass : computeClasses) { computeClassIds.add(computeClass.get(MD_CLASSES.CLASS_ID)); } log.info("cached compute class ids: " + computeClassIds); log.info("cached compute cores attribute ids: " + coresAttributeIds); }
private List<Deployment> getDeployments(Connection conn, Environment env) { List<Deployment> deployments = new ArrayList<>(); DSLContext create = DSL.using(conn, SQLDialect.POSTGRES); Result<Record> records = create.select().from(DJ_DEPLOYMENT) .join(DJ_DEPLOYMENT_STATES).on(DJ_DEPLOYMENT_STATES.STATE_ID.eq(DJ_DEPLOYMENT.STATE_ID)) .join(NS_NAMESPACES).on(NS_NAMESPACES.NS_ID.eq(DJ_DEPLOYMENT.NS_ID)) .where(NS_NAMESPACES.NS_PATH.eq(env.getPath()+ "/" + env.getName() + "/bom")) .and(DJ_DEPLOYMENT.CREATED_BY.notEqual("oneops-autoreplace")) .orderBy(DJ_DEPLOYMENT.CREATED.desc()) .limit(1) .fetch(); for (Record r : records) { Deployment deployment = new Deployment(); deployment.setCreatedAt(r.getValue(DJ_DEPLOYMENT.CREATED)); deployment.setCreatedBy(r.getValue(DJ_DEPLOYMENT.CREATED_BY)); deployment.setState(r.getValue(DJ_DEPLOYMENT_STATES.STATE_NAME)); deployments.add(deployment); } return deployments; }
private List<Environment> getOneopsEnvironments(Connection conn) { List<Environment> envs = new ArrayList<>(); DSLContext create = DSL.using(conn, SQLDialect.POSTGRES); log.info("Fetching all environments.."); Result<Record> envRecords = create.select().from(CM_CI) .join(MD_CLASSES).on(CM_CI.CLASS_ID.eq(MD_CLASSES.CLASS_ID)) .join(NS_NAMESPACES).on(CM_CI.NS_ID.eq(NS_NAMESPACES.NS_ID)) .where(MD_CLASSES.CLASS_NAME.eq("manifest.Environment")) .fetch(); //all the env cis log.info("Got all environments"); for (Record r : envRecords) { long envId = r.getValue(CM_CI.CI_ID); //now query attributes for this env Environment env = new Environment(); env.setName(r.getValue(CM_CI.CI_NAME)); env.setId(r.getValue(CM_CI.CI_ID)); env.setPath(r.getValue(NS_NAMESPACES.NS_PATH)); env.setNsId(r.getValue(NS_NAMESPACES.NS_ID)); envs.add(env); } return envs; }
private List<String> getActiveClouds(Platform platform, Connection conn) { DSLContext create = DSL.using(conn, SQLDialect.POSTGRES); List<String> clouds = new ArrayList<>(); Result<Record> consumesRecords = create.select().from(CM_CI_RELATIONS) .join(MD_RELATIONS).on(MD_RELATIONS.RELATION_ID.eq(CM_CI_RELATIONS.RELATION_ID)) .join(CM_CI_RELATION_ATTRIBUTES).on(CM_CI_RELATION_ATTRIBUTES.CI_RELATION_ID.eq(CM_CI_RELATIONS.CI_RELATION_ID)) .where(CM_CI_RELATIONS.FROM_CI_ID.eq(platform.getId())) .and(CM_CI_RELATION_ATTRIBUTES.DF_ATTRIBUTE_VALUE.eq("active")) .fetch(); for (Record r : consumesRecords) { String comments = r.getValue(CM_CI_RELATIONS.COMMENTS); String cloudName = comments.split(":")[1]; cloudName = cloudName.split("\"")[1]; clouds.add(cloudName); } return clouds; }
/** * Copied from JavaGenerator * @param key * @return */ protected String getKeyType(UniqueKeyDefinition key){ String tType; List<ColumnDefinition> keyColumns = key.getKeyColumns(); if (keyColumns.size() == 1) { tType = getJavaType(keyColumns.get(0).getType()); } else if (keyColumns.size() <= Constants.MAX_ROW_DEGREE) { String generics = ""; String separator = ""; for (ColumnDefinition column : keyColumns) { generics += separator + (getJavaType(column.getType())); separator = ", "; } tType = Record.class.getName() + keyColumns.size() + "<" + generics + ">"; } else { tType = Record.class.getName(); } return tType; }
@Override public ModelWrapper getModelObjWithSpares(long id) throws Exception { List<SpareWrapper> spares = jooq.select().from(Spares.SPARES) .join(Brands.BRANDS).on(Brands.BRANDS.ID.eq(Spares.SPARES.BRAND_ID)) .join(SpareToModel.SPARE_TO_MODEL) .on(SpareToModel.SPARE_TO_MODEL.SPARE_ID.eq(Spares.SPARES.ID)) .where(SpareToModel.SPARE_TO_MODEL.MODEL_ID.eq(id)) .fetch(SpareWrapper::of); Record record = jooq.select().from(Models.MODELS) .join(Series.SERIES).on(Series.SERIES.ID.eq(Models.MODELS.SERIES_ID)) .join(Brands.BRANDS).on(Brands.BRANDS.ID.eq(Series.SERIES.BRAND_ID)) .where(Models.MODELS.ID.eq(id)) .fetchOne(); ModelWrapper model = new ModelWrapper(record); model.setSpares(spares); return model; }
@Override public Collection<SpareWrapper> getSpares(String label, Boolean flag, Integer numFromInclusive, Integer numToInclusive) throws Exception { SelectQuery<Record> query = jooq.selectQuery(); query.addFrom(Spares.SPARES); if (label != null) { query.addConditions(Spares.SPARES.LABEL.eq(label)); } if (flag != null) { query.addConditions(Spares.SPARES.FLAG.eq(flag)); } if (numFromInclusive != null) { if (numToInclusive == null) { query.addConditions(Spares.SPARES.NUM.eq(numFromInclusive)); } else { query.addConditions(Spares.SPARES.NUM.ge(numFromInclusive)); query.addConditions(Spares.SPARES.NUM.le(numToInclusive)); } } return query.fetch(SpareWrapper::of); }
private Map<String, Object> merageRecord(Record originalRecord, E freshEntity) { try { Map<String, Object> originalMap = this.converMap(originalRecord); Map<String, Object> freshMap = Mapper.object2Map(freshEntity); logger.debug("freshMap:{} ", freshMap); for (String oriKey : originalMap.keySet()) { Object oriValue = originalMap.get(oriKey); Object freshValue = freshMap.get(oriKey); if (freshValue == null && oriValue != null) { freshMap.put(oriKey, oriValue); } else if (freshValue == null && oriValue == null) { freshMap.remove(oriKey); } else if (freshValue.equals(oriValue)) { freshMap.remove(oriKey); } } logger.debug("change freshMap:{} ", freshMap); return freshMap; } catch (IllegalAccessException e) { e.printStackTrace(); } return null; }
@Test public void testNewContext() { BQRuntime runtime = stack.app("--config=classpath:test.yml") .autoLoadModules() .createRuntime(); try (DSLContext c = runtime.getInstance(JooqFactory.class).newContext()) { c.createTable(Tables.TEST_TABLE).columns(Tables.TEST_TABLE.fields()).execute(); c.delete(Tables.TEST_TABLE).execute(); c.insertInto(Tables.TEST_TABLE).set(Tables.TEST_TABLE.ID, 4).set(Tables.TEST_TABLE.NAME, "me").execute(); Record r = c.select().from(Tables.TEST_TABLE).fetchOne(); assertNotNull(r); assertEquals(Integer.valueOf(4), r.get(Tables.TEST_TABLE.ID)); assertEquals("me", r.get(Tables.TEST_TABLE.NAME)); } }
@Override public List<DependencyLink> getDependencies(long endTs, @Nullable Long lookback) { try (Connection conn = datasource.getConnection()) { if (hasPreAggregatedDependencies.get()) { List<Date> days = getDays(endTs, lookback); List<DependencyLink> unmerged = context.get(conn) .selectFrom(ZIPKIN_DEPENDENCIES) .where(ZIPKIN_DEPENDENCIES.DAY.in(days)) .fetch((Record l) -> DependencyLink.create( l.get(ZIPKIN_DEPENDENCIES.PARENT), l.get(ZIPKIN_DEPENDENCIES.CHILD), l.get(ZIPKIN_DEPENDENCIES.CALL_COUNT)) ); return DependencyLinker.merge(unmerged); } else { return aggregateDependencies(endTs, lookback, conn); } } catch (SQLException e) { throw new RuntimeException("Error querying dependencies for endTs " + endTs + " and lookback " + lookback + ": " + e.getMessage()); } }
@NotNull public List<PurpleCopyNumber> read(@NotNull final String sample) { List<PurpleCopyNumber> copyNumbers = Lists.newArrayList(); Result<Record> result = context.select().from(COPYNUMBER).where(COPYNUMBER.SAMPLEID.eq(sample)).fetch(); for (Record record : result) { copyNumbers.add(ImmutablePurpleCopyNumber.builder() .chromosome(record.getValue(COPYNUMBER.CHROMOSOME)) .start(record.getValue(COPYNUMBER.START)) .end(record.getValue(COPYNUMBER.END)) .bafCount(record.getValue(COPYNUMBER.BAFCOUNT)) .method(CopyNumberMethod.valueOf(record.getValue(COPYNUMBER.COPYNUMBERMETHOD))) .segmentStartSupport(SegmentSupport.valueOf(record.getValue(COPYNUMBER.SEGMENTSTARTSUPPORT))) .segmentEndSupport(SegmentSupport.valueOf(record.getValue(COPYNUMBER.SEGMENTENDSUPPORT))) .averageActualBAF(record.getValue(COPYNUMBER.ACTUALBAF)) .averageObservedBAF(record.getValue(COPYNUMBER.OBSERVEDBAF)) .averageTumorCopyNumber(record.getValue(COPYNUMBER.COPYNUMBER_)) .build()); } Collections.sort(copyNumbers); return copyNumbers; }
public static void main(String[] args) throws Exception { String user = System.getProperty("jdbc.user"); String password = System.getProperty("jdbc.password"); String url = System.getProperty("jdbc.url"); String driver = System.getProperty("jdbc.driver"); Class.forName(driver).newInstance(); try (Connection connection = DriverManager.getConnection(url, user, password)) { DSLContext dslContext = DSL.using(connection, SQLDialect.MYSQL); Result<Record> result = dslContext.select().from(AUTHOR).fetch(); for (Record r : result) { Integer id = r.getValue(AUTHOR.ID); String firstName = r.getValue(AUTHOR.FIRST_NAME); String lastName = r.getValue(AUTHOR.LAST_NAME); System.out.println("ID: " + id + " first name: " + firstName + " last name: " + lastName); } } catch (Exception e) { e.printStackTrace(); } }
static void loadCsv(final DSLContext context, final String fileName, final String tableName) throws IOException { try (InputStream in = Helper.class.getResourceAsStream(fileName)) { final Loader<Record> result = context.loadInto(table(name(tableName))) .loadCSV(in) .fields(LoaderFieldContext::field) .execute(); assertThat(result.errors()).as(getMessage(result.errors())).isEmpty(); } }
/** * Load a CSV file with {@code NULL} values which are represented as star {@code *}. * * <p>This method cannot possibly be merged with the other one since the {@code nullString(...)} * API is only available when explicitly passing a collection of o fields to {@code loadCsv}, * which is actually quite redundant.</p> */ static void loadCsvWithNulls(final DSLContext context, final String fileName, final String tableName, final List<String> fieldNames) throws IOException { try (InputStream in = Helper.class.getResourceAsStream(fileName)) { final Loader<Record> result = context.loadInto(table(name(tableName))) .loadCSV(in) .fields(toFields(tableName, fieldNames)) .nullString("*") .execute(); assertThat(result.errors()).as(getMessage(result.errors())).isEmpty(); } }
private void populateEnv(Environment env, Connection conn) { DSLContext create = DSL.using(conn, SQLDialect.POSTGRES); Result<Record> envAttributes = create.select().from(CM_CI_ATTRIBUTES) .join(MD_CLASS_ATTRIBUTES).on(CM_CI_ATTRIBUTES.ATTRIBUTE_ID.eq(MD_CLASS_ATTRIBUTES.ATTRIBUTE_ID)) .where(CM_CI_ATTRIBUTES.CI_ID.eq(env.getId())) .fetch(); for (Record attrib : envAttributes) { String attributeName = attrib.getValue(MD_CLASS_ATTRIBUTES.ATTRIBUTE_NAME); if (attributeName.equalsIgnoreCase("profile")) { env.setProfile(attrib.getValue(CM_CI_ATTRIBUTES.DF_ATTRIBUTE_VALUE)); } //add other attributes as and when needed } //now query all the platforms for this env Result<Record> platformRels = create.select().from(CM_CI_RELATIONS) .join(MD_RELATIONS).on(MD_RELATIONS.RELATION_ID.eq(CM_CI_RELATIONS.RELATION_ID)) .join(CM_CI).on(CM_CI.CI_ID.eq(CM_CI_RELATIONS.TO_CI_ID)) .where(MD_RELATIONS.RELATION_NAME.eq("manifest.ComposedOf")) .and(CM_CI_RELATIONS.FROM_CI_ID.eq(env.getId())) .fetch(); for (Record platformRel : platformRels) { long platformId = platformRel.getValue(CM_CI_RELATIONS.TO_CI_ID); Platform platform = new Platform(); platform.setId(platformId); platform.setName(platformRel.getValue(CM_CI.CI_NAME)); platform.setPath(env.getPath() + "/" + env.getName() + "/bom/" + platform.getName() + "/1"); populatePlatform(conn, platform); platform.setActiveClouds(getActiveClouds(platform, conn)); //now calculate total cores of the env - including all platforms env.setTotalCores(env.getTotalCores() + platform.getTotalCores()); env.addPlatform(platform); } }
private void populatePlatform(Connection conn, Platform platform) { DSLContext create = DSL.using(conn, SQLDialect.POSTGRES); Result<Record> computes = create.select().from(CM_CI) .join(NS_NAMESPACES).on(NS_NAMESPACES.NS_ID.eq(CM_CI.NS_ID)) .join(CM_CI_ATTRIBUTES).on(CM_CI_ATTRIBUTES.CI_ID.eq(CM_CI.CI_ID)) .where(NS_NAMESPACES.NS_PATH.eq(platform.getPath()) .and(CM_CI.CLASS_ID.in(computeClassIds)) .and(CM_CI_ATTRIBUTES.ATTRIBUTE_ID.in(coresAttributeIds))) .fetch(); platform.setTotalComputes(computes.size()); int totalCores = 0; if (platform.getTotalComputes() > 0) { for (Record compute : computes) { totalCores += Integer.parseInt(compute.get(CM_CI_ATTRIBUTES.DF_ATTRIBUTE_VALUE)); } } platform.setTotalCores(totalCores); //Now query platform ci attributes and set to the object Result<Record> platformAttributes = create.select().from(CM_CI_ATTRIBUTES) .join(MD_CLASS_ATTRIBUTES).on(MD_CLASS_ATTRIBUTES.ATTRIBUTE_ID.eq(CM_CI_ATTRIBUTES.ATTRIBUTE_ID)) .where(CM_CI_ATTRIBUTES.CI_ID.eq(platform.getId())) .fetch(); for ( Record attribute : platformAttributes ) { String attributeName = attribute.getValue(MD_CLASS_ATTRIBUTES.ATTRIBUTE_NAME); if (attributeName.equalsIgnoreCase("source")) { platform.setSource(attribute.getValue(CM_CI_ATTRIBUTES.DF_ATTRIBUTE_VALUE)); } else if (attributeName.equalsIgnoreCase("pack")) { platform.setPack(attribute.getValue(CM_CI_ATTRIBUTES.DF_ATTRIBUTE_VALUE)); } } }
private void updateCrawlEntry(Environment env) { if (crawlerDbUserName == null || crawlerDbUrl == null || crawlerDbPassword == null) { return; } try (Connection conn = DriverManager.getConnection(crawlerDbUrl, crawlerDbUserName, crawlerDbPassword)) { DSLContext create = DSL.using(conn, SQLDialect.POSTGRES); Result<Record> records = create.select().from(CRAWL_ENTITIES) .where(CRAWL_ENTITIES.OO_ID.eq(env.getId())) .fetch(); if (records.isNotEmpty()) { create.update(CRAWL_ENTITIES) .set(CRAWL_ENTITIES.LAST_CRAWLED_AT, new Timestamp(System.currentTimeMillis())) .where(CRAWL_ENTITIES.OO_ID.eq(env.getId())) .execute(); } else { create.insertInto(CRAWL_ENTITIES) .set(CRAWL_ENTITIES.NS_PATH, env.getPath() + "/" + env.getName()) .set(CRAWL_ENTITIES.OO_ID, env.getId()) .execute(); } } catch (Exception e) { e.printStackTrace(); } }
public static Tweet buildFromRecord(Record record) { final User user = User.buildFromRecord(record); return new Tweet( record.get(TWEETS.ID).longValue(), record.get(TWEETS.CREATED_AT), user, record.get(TWEETS.TEXT), record.get(TWEETS.IS_RETWEET)); }
public static User buildFromRecord(Record record) { return new User( record.get(USERS.ID).longValue(), record.get(USERS.NAME), record.get(USERS.SCREEN_NAME), record.get(USERS.LOCATION), record.get(USERS.VERIFIED)); }
public static Todo mapTodo(Record record) { Todo todo = new Todo( record.get(TODOS.ID), asUUID(record.get(TODOS.UUID)), defaultIfNull(record.get(TODOS.USER_ID),0L), record.get(TODOS.SUMMARY), record.get(TODOS.DESCRIPTION)); LOG.trace("todo returned : {}", todo); return todo ; }
public static User mapUser(Record record) { User user = new User(record.get(USERS.ID), UUID.nameUUIDFromBytes(record.get(USERS.UUID)), record.get(USERS.NAME)); LOG.trace("user returned : {}", user); return user ; }
@Override public SeriesWrapper getSeriesObj(long id) throws Exception { Record record = jooq.select().from(Series.SERIES) .join(Brands.BRANDS) .on(Brands.BRANDS.ID.eq(Series.SERIES.BRAND_ID)) .where(Series.SERIES.ID.eq(id)) .fetchOne(); return new SeriesWrapper(record); }
@Override public ModelWrapper getModelObj(long id) throws Exception { Record record = jooq.select().from(Models.MODELS) .join(Series.SERIES).on(Series.SERIES.ID.eq(Models.MODELS.SERIES_ID)) .join(Brands.BRANDS).on(Brands.BRANDS.ID.eq(Series.SERIES.BRAND_ID)) .where(Models.MODELS.ID.eq(id)) .fetchOne(); return new ModelWrapper(record); }
@Override public SpareWrapper getSpareObj(long id) throws Exception { Record record = jooq.select().from(Spares.SPARES) .join(Brands.BRANDS).on(Brands.BRANDS.ID.eq(Spares.SPARES.BRAND_ID)) .where(Spares.SPARES.ID.eq(id)) .fetchOne(); return new SpareWrapper(record); }
public static List<Map<String, Object>> convertList(Result<Record> results) { if (results == null) { return null; } List<Map<String, Object>> ls = new ArrayList<Map<String, Object>>(); for (Record aRecord : results) { Map<String, Object> o = convertMap(aRecord); if (o != null) { ls.add(o); } } return ls; }
public static Map<String, Object> convertMap(Record record) { if (record == null) { return null; } int n = record.fields().length; Map<String, Object> reMap = new LinkedHashMap<String, Object>(); for (int i = 0; i < n; i++) { String kn = record.field(i).getName().toString(); Object kv = record.getValue(record.field(i)); reMap.put(kn, matchJavaValue(kv)); } return reMap; }
protected InsertQuery<Record> _c(Table<Record> table,Map<Field<Object>, Object> mapRecord){ InsertQuery<Record> insertQuery = context.insertQuery(table); for(Field<Object> field : mapRecord.keySet()){ insertQuery.addValue(field, mapRecord.get(field)); } return insertQuery; }
protected SelectWhereStep<Record> _r(Table<Record> table,List<Field<Object>> viewer){ SelectWhereStep<Record> whereStep; if(viewer != null){ whereStep = context.select(viewer).from(table); }else{ whereStep = context.selectFrom(table); } return whereStep; }
protected Result<Record> getRecordsByPage(List<Field<Object>> viewer, Condition c, int form, int to, SortField<?>... sorts) { if (sorts != null && sorts.length != 0) { return _r(getProviderTable(), viewer).where(c).orderBy(sorts).limit(form, to).fetch(); } else { return _r(getProviderTable(), viewer).where(c).limit(form, to).fetch(); } }
protected Result<Record> getRecords(List<Field<Object>> viewer, Condition c, SortField<?>... sorts) { if (sorts != null && sorts.length != 0) { return _r(getProviderTable(), viewer).where(c).orderBy(sorts).fetch(); } else { return _r(getProviderTable(), viewer).where(c).fetch(); } }
private Map<String, Object> converMap(Record record) { if (record == null) return null; int n = record.fields().length; Map<String, Object> reMap = new LinkedHashMap<String, Object>(); for (int i = 0; i < n; i++) { String kn = record.field(i).getName().toString(); // String kn = SystemUtility.firstLower(record.field(i).getName()); Object kv = record.getValue(record.field(i)); reMap.put(kn, kv); } return reMap; }
public LectureRoomDto selectLecturerRoom(@NonNull Long lecturerId) { Result<Record> records = jooq.select() .from(Tables.LECTURER.leftJoin(Tables.LECTURER_ROOM).onKey()) .where(Tables.LECTURER.ID.eq(lecturerId)) .fetch(); Lecturer lecturer = records.into(Lecturer.class).get(0); List<LecturerRoom> rooms = records.into(LecturerRoom.class).stream() .filter(room -> room.getId() != null).collect(Collectors.toList()); return new LectureRoomDto(lecturer, rooms); }
public LecturerRoomDto selectRooms(Long lecturerId) { Result<Record> records = jooq.select() .from(Tables.LECTURER.leftJoin(Tables.LECTURER_ROOM).onKey()) .where(Tables.LECTURER.ID.eq(lecturerId)) .fetch(); Lecturer lecturer = records.into(Lecturer.class).get(0); List<LecturerRoom> rooms = records.into(LecturerRoom.class); if (rooms.get(0).getId() == null) { rooms = new ArrayList<>(); } return new LecturerRoomDto(lecturer, rooms); }
private Endpoint endpoint(Record a) { String serviceName = a.getValue(ZIPKIN_ANNOTATIONS.ENDPOINT_SERVICE_NAME); if (serviceName == null) return null; return Endpoint.builder() .serviceName(serviceName) .port(a.getValue(ZIPKIN_ANNOTATIONS.ENDPOINT_PORT)) .ipv4(a.getValue(ZIPKIN_ANNOTATIONS.ENDPOINT_IPV4)) .ipv6(hasIpv6.get() ? a.getValue(ZIPKIN_ANNOTATIONS.ENDPOINT_IPV6) : null).build(); }
List<DependencyLink> aggregateDependencies(long endTs, @Nullable Long lookback, Connection conn) { endTs = endTs * 1000; // Lazy fetching the cursor prevents us from buffering the whole dataset in memory. Cursor<Record> cursor = context.get(conn) .selectDistinct(hasTraceIdHigh.get() ? LINK_FIELDS : LINK_FIELDS_WITHOUT_TRACE_ID_HIGH) // left joining allows us to keep a mapping of all span ids, not just ones that have // special annotations. We need all span ids to reconstruct the trace tree. We need // the whole trace tree so that we can accurately skip local spans. .from(ZIPKIN_SPANS.leftJoin(ZIPKIN_ANNOTATIONS) // NOTE: we are intentionally grouping only on the low-bits of trace id. This buys time // for applications to upgrade to 128-bit instrumentation. .on(ZIPKIN_SPANS.TRACE_ID.eq(ZIPKIN_ANNOTATIONS.TRACE_ID).and( ZIPKIN_SPANS.ID.eq(ZIPKIN_ANNOTATIONS.SPAN_ID))) .and(ZIPKIN_ANNOTATIONS.A_KEY.in(CLIENT_SEND, CLIENT_ADDR, SERVER_RECV, SERVER_ADDR))) .where(lookback == null ? ZIPKIN_SPANS.START_TS.lessOrEqual(endTs) : ZIPKIN_SPANS.START_TS.between(endTs - lookback * 1000, endTs)) // Grouping so that later code knows when a span or trace is finished. .groupBy(hasTraceIdHigh.get() ? LINK_GROUP_FIELDS : LINK_GROUP_FIELDS_WITHOUT_TRACE_ID_HIGH).fetchLazy(); Iterator<Iterator<DependencyLinkSpan>> traces = new DependencyLinkSpanIterator.ByTraceId(cursor.iterator(), hasTraceIdHigh.get()); if (!traces.hasNext()) return Collections.emptyList(); DependencyLinker linker = new DependencyLinker(); while (traces.hasNext()) { linker.putTrace(traces.next()); } return linker.link(); }
private int writePatientData(@NotNull final PatientData patient) { final Record patientRecord = context.select(PATIENT.ID).from(PATIENT).where(PATIENT.CPCTID.eq(patient.cpctId())).fetchOne(); if (patientRecord != null) { return patientRecord.getValue(PATIENT.ID); } else { final int patientId = context.insertInto(PATIENT, PATIENT.CPCTID, PATIENT.REGISTRATIONDATE, PATIENT.GENDER, PATIENT.HOSPITAL, PATIENT.BIRTHYEAR, PATIENT.CANCERTYPE, PATIENT.CANCERSUBTYPE, PATIENT.DEATHDATE) .values(patient.cpctId(), Utils.toSQLDate(patient.registrationDate()), patient.gender(), patient.hospital(), patient.birthYear(), patient.primaryTumorLocation().category(), patient.primaryTumorLocation().subcategory(), Utils.toSQLDate(patient.deathDate())) .returning(PATIENT.ID) .fetchOne() .getValue(PATIENT.ID); writeFormStatus(patientId, PATIENT.getName(), "demography", patient.demographyStatus().stateString(), Boolean.toString(patient.demographyLocked())); writeFormStatus(patientId, PATIENT.getName(), "primaryTumor", patient.primaryTumorStatus().stateString(), Boolean.toString(patient.primaryTumorLocked())); writeFormStatus(patientId, PATIENT.getName(), "eligibility", patient.eligibilityStatus().stateString(), Boolean.toString(patient.eligibilityLocked())); writeFormStatus(patientId, PATIENT.getName(), "selectionCriteria", patient.selectionCriteriaStatus().stateString(), Boolean.toString(patient.selectionCriteriaLocked())); writeFormStatus(patientId, PATIENT.getName(), "death", patient.deathStatus().stateString(), Boolean.toString(patient.deathLocked())); return patientId; } }