public static String createAuthLinkResponse(){ JsonNodeFactory f = JsonNodeFactory.instance ; ObjectNode loginResponse = f.objectNode(); loginResponse.put("text","Authorization for SkyGiraffe to use Slack details is required."); ArrayNode attachments = loginResponse.putArray("attachments"); ObjectNode att = f.objectNode(); att.put("fallback", "Please authorize SkyGiraffe to access to your Slack details ..."); att.put("pretext", ""); att.put("title", "Please authorize.."); att.put("title_link", Config.getPropertyValue("SLACK_AUTH_URL_DEV")); att.put("text","Once authorized and logged into SkyGiraffe try '/sg help' to see all commands"); att.put("color", "#7CD197"); attachments.add(att); return loginResponse.toString(); }
/** * { "attachments": [ { "fallback": "Please login into SkyGiraffe to continue ...", "pretext": "SkyGiraffe Login", "title": "Please login into SkyGiraffe to continue ...", "title_link": "https://sgbot-mobilityai.rhcloud.com/SGbot-1.0/skyg/login?key=", "text": "Once logged in try '/sg help' to see all commands", "color": "#7CD197" } ] } * @return */ public static String createLoginLinkResponse(String slackUserId, String email, String teamId){ JsonNodeFactory f = JsonNodeFactory.instance ; ObjectNode loginResponse = f.objectNode(); loginResponse.put("text","Login to SkyGiraffe is required."); ArrayNode attachments = loginResponse.putArray("attachments"); ObjectNode att = f.objectNode(); att.put("fallback", "Please login into SkyGiraffe to continue ..."); att.put("pretext", ""); att.put("title", "Please login.."); att.put("title_link", Config.getPropertyValue("SGDS_LOGIN_URL_DEV")+slackUserId+"&EMAIL="+email+"&TEAMID="+teamId); att.put("text","Once logged in try '/sg help' to see all commands"); att.put("color", "#7CD197"); attachments.add(att); return loginResponse.toString(); }
public ArrayNode orderCollection(Collection<Order> collection) { ArrayNode array = JsonNodeFactory.instance.arrayNode(); for(Order param : collection){ ObjectNode obj = JsonNodeFactory.instance.objectNode(); obj.put("orderId", param.getId() ); obj.put("mile", param.getMilesage() ); obj.put("price", param.getPrice() ); obj.put("sold", param.getSold() ); obj.put("carName", param.getCar().getName() ); obj.put("carId", param.getCar().getId() ); obj.put("modelId", param.getCar().getModel().getId() ); obj.put("bodyId", param.getCar().getBody().getId() ); obj.put("drivetype", param.getCar().getDriveType().getId()); obj.put("engineId", param.getCar().getEngine().getId() ); obj.put("transsmId", param.getCar().getTransmission().getId() ); obj.put("data", param.getRelease().getTime()); obj.put("userId", param.getUser().getId() ); array.add(obj); } return array; }
public ArrayNode convert(Collection<Order> collection) { ArrayNode array = JsonNodeFactory.instance.arrayNode(); for(Order param : collection){ ObjectNode order = JsonNodeFactory.instance.objectNode(); order.put("orderId", param.getId() ); order.put("mile", param.getMilesage() ); order.put("price", param.getPrice() ); if(param.getSold()){ order.put("sold", "V" ); } else { order.put("sold", "" ); } order.put("carName", param.getCar().getName() ); order.put("carId", param.getCar().getId() ); GregorianCalendar calendar = new GregorianCalendar(); calendar.setTimeInMillis(param.getRelease().getTime()); //calendar.get(Calendar.YEAR); order.put("data", String.valueOf(calendar.get(Calendar.YEAR))); order.put("userId", param.getUser().getId() ); array.add(order); } return array; }
/** * Creates an array of JSON objects which hold details about the found users or groups. * * @param list * the search result to process * @param addSummary * if true a summary JSON object will be added to the top of the array. The JSON * object will be created with {@link #createSearchSummaryStatement(PageableList)} . * @param imageSize * the size of the user logo to include in the image path, can be null to not include * the image path into the JSON object. For groups no image will be included. * @return the JSON array with the details about the users */ public static ArrayNode createEntitySearchJSONResult( PageableList<CommunoteEntityData> list, boolean addSummary, ImageSizeType imageSize) { JsonNodeFactory nodeFactory = JsonHelper.getSharedObjectMapper() .getNodeFactory(); ArrayNode result = nodeFactory.arrayNode(); if (addSummary) { result.add(UserSearchHelper.createSearchSummaryStatement(list)); } for (CommunoteEntityData item : list) { String imagePath = null; boolean isGroup = (item instanceof EntityGroupListItem); if (!isGroup && imageSize != null) { imagePath = ImageUrlHelper.buildUserImageUrl(item.getEntityId(), imageSize); } ObjectNode entry = createUserSearchJSONResult(item.getEntityId(), item.getShortDisplayName(), item.getDisplayName(), imagePath, item.getAlias()); entry.put("isGroup", isGroup); result.add(entry); } return result; }
/** * Adds the data of the default blog if it is activated * * @param noteData * the data to modify * @return the ID of the default blog if enabled, null otherwise * */ private Long putDefaultBlogInfo(ObjectNode noteData) { Long defaultBlogId = null; Blog defaultBlog = null; JsonNode defaultBlogTitle; JsonNode defaultBlogAlias; defaultBlog = getDefaultBlog(); JsonNodeFactory factory = JsonHelper.getSharedObjectMapper().getNodeFactory(); if (defaultBlog != null) { defaultBlogId = defaultBlog.getId(); defaultBlogTitle = factory.textNode(defaultBlog.getTitle()); defaultBlogAlias = factory.textNode(defaultBlog.getNameIdentifier()); } else { defaultBlogTitle = factory.nullNode(); defaultBlogAlias = factory.nullNode(); } noteData.put("defaultBlogId", defaultBlogId == null ? factory.nullNode() : factory.numberNode(defaultBlogId)); noteData.put("defaultBlogTitle", defaultBlogTitle); noteData.put("defaultBlogAlias", defaultBlogAlias); return defaultBlogId; }
protected static void addParameters(ObjectNode cstNode, Map<Parameter<?>, Object> params) { ArrayNode argsNode = JsonNodeFactory.instance.arrayNode(); cstNode.put(ArgsKey, argsNode); for (Entry<Parameter<?>, Object> entry : params.entrySet()) { ObjectNode argNode = JsonNodeFactory.instance.objectNode(); argNode.put(ArgNameKey, entry.getKey().id()); argNode.put(ArgValueKey, String.valueOf(entry.getValue())); argsNode.add(argNode); } }
@Path("state") @GET @Produces(MediaType.APPLICATION_JSON) public String getStatus() throws Exception { ObjectNode mainNode = JsonNodeFactory.instance.objectNode(); ObjectNode switchesNode = JsonNodeFactory.instance.objectNode(); for ( ControlPanelTypes type : ControlPanelTypes.values() ) { switchesNode.put(UIResource.fixName(type), context.getExhibitor().getControlPanelValues().isSet(type)); } mainNode.put("switches", switchesNode); MonitorRunningInstance monitorRunningInstance = context.getExhibitor().getMonitorRunningInstance(); InstanceStateTypes state = monitorRunningInstance.getCurrentInstanceState(); mainNode.put("state", state.getCode()); mainNode.put("description", state.getDescription()); mainNode.put("isLeader", monitorRunningInstance.isCurrentlyLeader()); return JsonUtil.writeValueAsString(mainNode); }
@Path("list") @GET @Produces(MediaType.APPLICATION_JSON) public String getClusterAsJson() throws Exception { InstanceConfig config = context.getExhibitor().getConfigManager().getConfig(); ObjectNode node = JsonNodeFactory.instance.objectNode(); ArrayNode serversNode = JsonNodeFactory.instance.arrayNode(); ServerList serverList = new ServerList(config.getString(StringConfigs.SERVERS_SPEC)); for ( ServerSpec spec : serverList.getSpecs() ) { serversNode.add(spec.getHostname()); } node.put("servers", serversNode); node.put("port", config.getInt(IntConfigs.CLIENT_PORT)); return JsonUtil.writeValueAsString(node); }
@Path("dataTable/{index-name}/{search-handle}") @GET @Produces(MediaType.APPLICATION_JSON) public String getDataTableData ( @PathParam("index-name") String indexName, @PathParam("search-handle") String searchHandle, @QueryParam("iDisplayStart") int iDisplayStart, @QueryParam("iDisplayLength") int iDisplayLength, @QueryParam("sEcho") String sEcho ) throws Exception { LogSearch logSearch = getLogSearch(indexName); if ( logSearch == null ) { return "{}"; } ObjectNode node; try { CachedSearch cachedSearch = logSearch.getCachedSearch(searchHandle); DateFormat dateFormatter = new SimpleDateFormat(DATE_FORMAT_STR); ArrayNode dataTab = JsonNodeFactory.instance.arrayNode(); for ( int i = iDisplayStart; i < (iDisplayStart + iDisplayLength); ++i ) { if ( i < cachedSearch.getTotalHits() ) { ObjectNode data = JsonNodeFactory.instance.objectNode(); int docId = cachedSearch.getNthDocId(i); SearchItem item = logSearch.toResult(docId); data.put("DT_RowId", "index-query-result-" + docId); data.put("0", getTypeName(EntryTypes.getFromId(item.getType()))); data.put("1", dateFormatter.format(item.getDate())); data.put("2", trimPath(item.getPath())); dataTab.add(data); } } node = JsonNodeFactory.instance.objectNode(); node.put("sEcho", sEcho); node.put("iTotalRecords", logSearch.getDocQty()); node.put("iTotalDisplayRecords", cachedSearch.getTotalHits()); node.put("aaData", dataTab); } finally { context.getExhibitor().getIndexCache().releaseLogSearch(logSearch.getFile()); } return node.toString(); }
@Path("backup-config") @GET @Produces(MediaType.APPLICATION_JSON) public String getBackupConfig() throws Exception { ArrayNode node = JsonNodeFactory.instance.arrayNode(); if ( context.getExhibitor().getBackupManager().isActive() ) { EncodedConfigParser parser = context.getExhibitor().getBackupManager().getBackupConfigParser(); List<BackupConfigSpec> configs = context.getExhibitor().getBackupManager().getConfigSpecs(); for ( BackupConfigSpec c : configs ) { ObjectNode n = JsonNodeFactory.instance.objectNode(); String value = parser.getValue(c.getKey()); n.put("key", c.getKey()); n.put("name", c.getDisplayName()); n.put("help", c.getHelpText()); n.put("value", (value != null) ? value : ""); n.put("type", c.getType().name().toLowerCase().substring(0, 1)); node.add(n); } } return JsonUtil.writeValueAsString(node); }
@GET @Path("node-data") @Produces("application/json") public String getNodeData(@QueryParam("key") String key) throws Exception { ObjectNode node = JsonNodeFactory.instance.objectNode(); try { Stat stat = context.getExhibitor().getLocalConnection().checkExists().forPath(key); byte[] bytes = context.getExhibitor().getLocalConnection().getData().storingStatIn(stat).forPath(key); if (bytes != null) { node.put("bytes", bytesToString(bytes)); node.put("str", new String(bytes, "UTF-8")); } else { node.put("bytes", ""); node.put("str", ""); } node.put("stat", reflectToString(stat)); } catch ( KeeperException.NoNodeException dummy ) { node.put("bytes", ""); node.put("str", ""); node.put("stat", "* not found * "); } catch ( Throwable e ) { node.put("bytes", ""); node.put("str", "Exception"); node.put("stat", e.getMessage()); } return node.toString(); }
public static void main(String[] args) { DependencyGraph g = new DependencyGraph(); JsonNodeFactory nc = JsonNodeFactory.instance; JsonNode a = nc.numberNode(1); JsonNode b = nc.numberNode(2); JsonNode c = nc.numberNode(3); JsonNode d = nc.numberNode(4); JsonNode e = nc.numberNode(5); JsonNode f = nc.numberNode(6); JsonNode h = nc.numberNode(7); JsonNode i = nc.numberNode(8); g.addNode("input", null, a); g.addNode("loaddict", null, b); g.addNode("second", null, c); g.addNode("encode", Arrays.asList(new String[] { "input", "loaddict" }), d); g.addNode("groupby", Arrays.asList(new String[] { "encode" }), e); g.addNode("filter", Arrays.asList(new String[] { "groupby" }), f); g.addNode("join", Arrays.asList(new String[] { "filter", "second" }), h); g.addNode("shuffle", Arrays.asList(new String[] { "join" }), i); System.out.println(g.getSerialPlan()); }
public HashJoinOperator createHashJoinOperator(BlockSchema lSchema, BlockSchema rSchema, BlockSchema operatorSchema, TupleStore lStore, TupleStore rStore) throws IOException, InterruptedException { /* Create Blocks */ final Block lBlock = new TupleStoreBlock(lStore, new BlockProperties(lBlockName, lSchema, (BlockProperties) null)); final Block rBlock = new TupleStoreBlock(rStore, new BlockProperties(rBlockName, rSchema, (BlockProperties) null)); /* Perform the Hash Join */ Map<String, Block> input = new HashMap<String, Block>(); input.put(lBlockName, lBlock); input.put(rBlockName, rBlock); ObjectNode root = new ObjectNode(JsonNodeFactory.instance); root.put("leftBlock", lBlockName); root.put("rightBlock", rBlockName); final ArrayNode joinKeys = new ArrayNode(JsonNodeFactory.instance); joinKeys.add("Integer"); root.put("leftJoinKeys", joinKeys); root.put("rightJoinKeys", joinKeys); BlockProperties props = new BlockProperties("Joined", operatorSchema, (BlockProperties) null); HashJoinOperator operator = new HashJoinOperator(); operator.setInput(input, root, props); return operator; }
@GET @Path("{instanceName}/messages") public Response getMessagesOnInstance(@PathParam("clusterId") String clusterId, @PathParam("instanceName") String instanceName) throws IOException { HelixDataAccessor accessor = getDataAccssor(clusterId); ObjectNode root = JsonNodeFactory.instance.objectNode(); root.put(Properties.id.name(), instanceName); ArrayNode newMessages = root.putArray(InstanceProperties.new_messages.name()); ArrayNode readMessages = root.putArray(InstanceProperties.read_messages.name()); List<String> messages = accessor.getChildNames(accessor.keyBuilder().messages(instanceName)); if (messages == null || messages.size() == 0) { return notFound(); } for (String messageName : messages) { Message message = accessor.getProperty(accessor.keyBuilder().message(instanceName, messageName)); if (message.getMsgState() == Message.MessageState.NEW) { newMessages.add(messageName); } if (message.getMsgState() == Message.MessageState.READ) { readMessages.add(messageName); } } root.put(InstanceProperties.total_message_count.name(), newMessages.size() + readMessages.size()); root.put(InstanceProperties.read_message_count.name(), readMessages.size()); return JSONRepresentation(root); }
@GET @Path("{instanceName}/healthreports") public Response getHealthReportsOnInstance(@PathParam("clusterId") String clusterId, @PathParam("instanceName") String instanceName) throws IOException { HelixDataAccessor accessor = getDataAccssor(clusterId); ObjectNode root = JsonNodeFactory.instance.objectNode(); root.put(Properties.id.name(), instanceName); ArrayNode healthReportsNode = root.putArray(InstanceProperties.healthreports.name()); List<String> healthReports = accessor.getChildNames(accessor.keyBuilder().healthReports(instanceName)); if (healthReports != null && healthReports.size() > 0) { healthReportsNode.addAll((ArrayNode) OBJECT_MAPPER.valueToTree(healthReports)); } return JSONRepresentation(root); }
@GET public Response getJobs(@PathParam("clusterId") String clusterId, @PathParam("workflowName") String workflowName) { TaskDriver driver = getTaskDriver(clusterId); WorkflowConfig workflowConfig = driver.getWorkflowConfig(workflowName); ObjectNode root = JsonNodeFactory.instance.objectNode(); if (workflowConfig == null) { return badRequest(String.format("Workflow %s is not found!", workflowName)); } Set<String> jobs = workflowConfig.getJobDag().getAllNodes(); root.put(Properties.id.name(), JobProperties.Jobs.name()); ArrayNode jobsNode = root.putArray(JobProperties.Jobs.name()); if (jobs != null) { jobsNode.addAll((ArrayNode) OBJECT_MAPPER.valueToTree(jobs)); } return JSONRepresentation(root); }
/** * Creates the override strategy field. * * @return the field */ private Field createOverrideStrategyField() { List<String> overrideStrategySymbols = Arrays.asList(OverrideStrategy.APPEND.name(), OverrideStrategy.REPLACE.name()); Schema overrideStrategyEnum = Schema.createEnum(OVERRIDE_STRATEGY_TYPE_NAME, null, BASE_SCHEMA_FORM_NAMESPACE, overrideStrategySymbols); Field overrideStrategyField = new Field(OVERRIDE_STRATEGY, Schema.createUnion(Arrays.asList( overrideStrategyEnum, Schema.create(Type.NULL))), null, null); overrideStrategyField.addProp(DISPLAY_NAME, "Override strategy"); JsonNodeFactory jsonFactory = JsonNodeFactory.instance; ArrayNode displayNamesNode = jsonFactory.arrayNode(); displayNamesNode.add(TextNode.valueOf("Append")); displayNamesNode.add(TextNode.valueOf("Replace")); overrideStrategyField.addProp(DISPLAY_NAMES, displayNamesNode); overrideStrategyField.addProp(DISPLAY_PROMPT, "Select array override strategy"); return overrideStrategyField; }
/** * Creates the class type field. * * @return the field */ private Field createClassTypeField() { List<String> classTypeSymbols = Arrays.asList(OBJECT, EVENT); Schema classTypeEnum = Schema.createEnum(CLASS_TYPE_TYPE_NAME, null, BASE_SCHEMA_FORM_NAMESPACE, classTypeSymbols); Field classTypeField = new Field(CLASS_TYPE, classTypeEnum, null, null); classTypeField.addProp(DISPLAY_NAME, "Class type"); JsonNodeFactory jsonFactory = JsonNodeFactory.instance; ArrayNode displayNamesNode = jsonFactory.arrayNode(); displayNamesNode.add(TextNode.valueOf("Object")); displayNamesNode.add(TextNode.valueOf("Event")); classTypeField.addProp(DISPLAY_NAMES, displayNamesNode); classTypeField.addProp(DISPLAY_PROMPT, "Select class type"); classTypeField.addProp(BY_DEFAULT, OBJECT); return classTypeField; }
@SuppressWarnings("unchecked") @Override public JsonNode getSchema(SerializerProvider provider, Type typeHint) throws JsonMappingException { ObjectNode o = createSchemaNode("object", true); if (typeHint instanceof ParameterizedType) { Type[] typeArgs = ((ParameterizedType) typeHint).getActualTypeArguments(); if (typeArgs.length == 2) { JavaType enumType = TypeFactory.type(typeArgs[0]); JavaType valueType = TypeFactory.type(typeArgs[1]); ObjectNode propsNode = JsonNodeFactory.instance.objectNode(); Class<Enum<?>> enumClass = (Class<Enum<?>>) enumType.getRawClass(); for (Enum<?> enumValue : enumClass.getEnumConstants()) { JsonSerializer<Object> ser = provider.findValueSerializer(valueType.getRawClass()); JsonNode schemaNode = (ser instanceof SchemaAware) ? ((SchemaAware) ser).getSchema(provider, null) : JsonSchema.getDefaultSchemaNode(); propsNode.put(provider.getConfig().getAnnotationIntrospector().findEnumValue((Enum<?>)enumValue), schemaNode); } o.put("properties", propsNode); } } return o; }
@Test public void testEvolvedSchema() throws IOException { Schema schema = Schema.createRecord("mystring", null, null, false); schema.setFields(Lists.newArrayList( new Field("text", Schema.create(Type.STRING), null, null), new Field("text2", Schema.create(Type.STRING), null, JsonNodeFactory.instance.textNode("N/A")))); FileSystemDatasetReader<Record> reader = new FileSystemDatasetReader<Record>( fileSystem, new Path(Resources.getResource("data/strings-100.avro") .getFile()), schema); checkReaderBehavior(reader, 100, new RecordValidator<Record>() { @Override public void validate(Record record, int recordNum) { Assert.assertEquals(String.valueOf(recordNum), record.get("text")); Assert.assertEquals("N/A", record.get("text2")); } }); }
@Override public void sendRoomNotification( final String baseURL, final String room, final String message, final String color, final String authToken, final boolean sendUserNotification) { final ObjectNode requestBody = JsonNodeFactory.instance.objectNode(); requestBody.put("message", message); requestBody.put("color", color); requestBody.put("message_format", "html"); requestBody.put("notify", sendUserNotification); final String urlPath = String.format(HIPCHAT_API_ROOM_NOTIFICATION_URL_PATH, urlEncode(room)); final String urlQueryString = String.format(HIPCHAT_API_ROOM_NOTIFICATION_URL_QUERY, urlEncode(authToken)); final HttpResponse httpResponse = httpRequestExecutor.execute(baseURL + "/" + HIPCHAT_API_VERSION + "/" + urlPath + urlQueryString, requestBody.toString()); if (httpResponse.getResponseCode() != HttpResponse.STATUS__NO_CONTENT) { throw toHipChatNotificationPluginException(httpResponse); } }
public static String getSerializedJSON(List<SearchResult> searchResults) { ArrayNode resultArray = JsonNodeFactory.instance.arrayNode(); for (SearchResult result : searchResults) { ObjectNode resultNode = JsonNodeFactory.instance.objectNode(); resultNode.put("metric", result.getMetricName()); String unit = result.getUnit(); if (unit != null) { //Preaggreated metrics do not have units. Do not want to return null units in query results. resultNode.put("unit", unit); } resultArray.add(resultNode); } return resultArray.toString(); }
public static ObjectNode rollupToJson(Rollup rollup) { if (rollup instanceof BluefloodCounterRollup) return handleCounterRollup((BluefloodCounterRollup)rollup); else if (rollup instanceof BluefloodTimerRollup) return handleTimerRollup((BluefloodTimerRollup)rollup); else if (rollup instanceof BluefloodSetRollup) return handleSetRollup((BluefloodSetRollup)rollup); else if (rollup instanceof BluefloodGaugeRollup) return handleGaugeRollup((BluefloodGaugeRollup)rollup); else if (rollup instanceof BasicRollup) return handleBasicRollup((BasicRollup)rollup, JsonNodeFactory.instance.objectNode()); else { log.error("Error encountered while serializing the rollup "+rollup); throw new IOError(new IOException("Cannot serialize the Rollup : "+rollup)); } }
public static ObjectNode serializeRollupEvent(RollupEvent rollupPayload) throws IOException { //Metadata Node ObjectNode metaNode = JsonNodeFactory.instance.objectNode(); metaNode.put("type", rollupPayload.getRollup().getRollupType().toString()); metaNode.put("unit", rollupPayload.getUnit()); //Create and fill up root node ObjectNode rootNode = JsonNodeFactory.instance.objectNode(); rootNode.put("tenantId", rollupPayload.getLocator().getTenantId()); rootNode.put("metricName", rollupPayload.getLocator().getMetricName()); rootNode.put("gran", rollupPayload.getGranularityName()); rootNode.put("rollup", RollupSerializationHelper.rollupToJson(rollupPayload.getRollup())); rootNode.put("timestamp", rollupPayload.getTimestamp()); rootNode.put("metadata", metaNode); return rootNode; }
protected final ArrayNode deserializeArray(JsonParser paramJsonParser, DeserializationContext paramDeserializationContext, JsonNodeFactory paramJsonNodeFactory) { ArrayNode localArrayNode = paramJsonNodeFactory.arrayNode(); while (true) switch (BaseNodeDeserializer.1.$SwitchMap$org$codehaus$jackson$JsonToken[paramJsonParser.nextToken().ordinal()]) { default: localArrayNode.add(deserializeAny(paramJsonParser, paramDeserializationContext, paramJsonNodeFactory)); break; case 1: localArrayNode.add(deserializeObject(paramJsonParser, paramDeserializationContext, paramJsonNodeFactory)); break; case 2: localArrayNode.add(deserializeArray(paramJsonParser, paramDeserializationContext, paramJsonNodeFactory)); break; case 3: localArrayNode.add(paramJsonNodeFactory.textNode(paramJsonParser.getText())); case 4: } return localArrayNode; }
private static Schema getRecordSchemaWithDirtySupport(Schema originalSchema, Map<Schema,Schema> queue) throws IOException { if (originalSchema.getType() != Type.RECORD) { throw new IOException("Gora only supports record schemas."); } List<Field> originalFields = originalSchema.getFields(); /* make sure the schema doesn't contain the field __g__dirty */ for (Field field : originalFields) { if (GORA_RESERVED_NAMES.contains(field.name())) { throw new IOException( "Gora schemas cannot contain the field name " + field.name()); } } Schema newSchema = Schema.createRecord(originalSchema.getName(), originalSchema.getDoc(), originalSchema.getNamespace(), originalSchema.isError()); queue.put(originalSchema, newSchema); List<Field> newFields = new ArrayList<>(); byte[] defaultDirtyBytesValue = new byte[getNumberOfBytesNeededForDirtyBits(originalSchema)]; Arrays.fill(defaultDirtyBytesValue, (byte) 0); JsonNode defaultDirtyJsonValue = JsonNodeFactory.instance .binaryNode(defaultDirtyBytesValue); Field dirtyBits = new Field(DIRTY_BYTES_FIELD_NAME, Schema.create(Type.BYTES), "Bytes used to represent weather or not a field is dirty.", defaultDirtyJsonValue); newFields.add(dirtyBits); for (Field originalField : originalFields) { // recursively add dirty support Field newField = new Field(originalField.name(), getSchemaWithDirtySupport(originalField.schema(),queue), originalField.doc(), originalField.defaultValue(), originalField.order()); newFields.add(newField); } newSchema.setFields(newFields); return newSchema; }
public static String getParameterList(String sessionId, String appId, String repId, String repUpId) throws IOException { // if (sessionId == null || sessionId.isEmpty()) throw new IllegalArgumentException("Session Id is empty. Please login."); HttpPost postRequest = new HttpPost("https://wspublisherv2https.skygiraffe.com/WSPublisherV2.svc/GetReport_ParametersMainScreen4"); // add header postRequest.setHeader("Content-Type", "application/json"); JsonNodeFactory f = JsonNodeFactory.instance; ObjectNode o = f.objectNode(); o.put("ApplicationID", appId); o.put("ReportID", repId); o.put("TabID",""); ArrayNode diuId = f.arrayNode(); diuId = o.putArray("DataItemUpdateIDs"); o.put("ReportUpdateID",repUpId); o.put("RequestID",sessionId); o.put("SessionID",sessionId); StringEntity inputApps = new StringEntity(o.toString()); inputApps.setContentType("application/json"); postRequest.setEntity(inputApps); CloseableHttpResponse responseParams = getHttpClient().execute(postRequest); String paramStr = ""; try { HttpEntity entityApps = responseParams.getEntity(); paramStr = EntityUtils.toString(entityApps, "UTF-8"); EntityUtils.consume(entityApps); }finally{ responseParams.close(); } logger.debug("Params = "+paramStr); return paramStr; }
public static void add(Schema schema, ValueConstraint<?> constraint) { ArrayNode cstArrayNode = getOrCreateArrayNode(schema, ConstraintKey); // TODO remove any pre-existing constraint that matches the new one ObjectNode cstNode = JsonNodeFactory.instance.objectNode(); cstNode.put(ConstraintFunctionKey, constraint.typeTag()); cstArrayNode.add(cstNode); Map<Parameter<?>, Object> params = constraint.parameters(); if (params.isEmpty()) return; addParameters(cstNode, params); }
@Override public Schema convertSchema(JsonArray schema, WorkUnitState workUnit) throws SchemaConversionException { List<Schema.Field> fields = new ArrayList<Schema.Field>(); for (JsonElement elem : schema) { JsonObject map = (JsonObject) elem; String columnName = map.get("columnName").getAsString(); String comment = map.get("comment").getAsString(); boolean nullable = map.has("isNullable") ? map.get("isNullable").getAsBoolean() : false; Schema fldSchema; try { JsonElementConversionFactory.JsonElementConverter converter = JsonElementConversionFactory.getConvertor(columnName, map.get("dataType").getAsJsonObject().get("type") .getAsString(), map, workUnit, nullable); converters.put(columnName, converter); fldSchema = converter.getSchema(); } catch (UnsupportedDateTypeException e) { throw new SchemaConversionException(e); } Field fld = new Field(columnName, fldSchema, comment, nullable ? JsonNodeFactory.instance.nullNode() : null); fld.addProp("source.type", map.get("dataType").getAsJsonObject().get("type").getAsString()); fields.add(fld); } Schema avroSchema = Schema.createRecord(workUnit.getExtract().getTable(), "", workUnit.getExtract().getNamespace(), false); avroSchema.setFields(fields); if (workUnit.getPropAsBoolean(CONVERTER_AVRO_NULLIFY_FIELDS_ENABLED, DEFAULT_CONVERTER_AVRO_NULLIFY_FIELDS_ENABLED)) { return this.generateSchemaWithNullifiedField(workUnit, avroSchema); } return avroSchema; }
public BaseTO() { super(); if (factory == null) { factory = JsonNodeFactory.instance; jsonMapper = new ObjectMapper(); }// EOF if }
private Schema buildRecordSchema(JsonSchema schema, WorkUnitState workUnit, String name, String namespace) { List<Schema.Field> fields = new ArrayList<>(); for (int i = 0; i < schema.fieldsCount(); i++) { JsonSchema map = schema.getFieldSchemaAt(i); String childNamespace = buildNamespace(namespace, name); JsonElementConverter converter; String sourceType; Schema fldSchema; try { sourceType = map.isType(UNION) ? UNION.toString().toLowerCase() : map.getType().toString().toLowerCase(); converter = getConvertor(map, childNamespace, workUnit); this.converters.put(map.getColumnName(), converter); fldSchema = converter.schema(); } catch (UnsupportedDateTypeException e) { throw new UnsupportedOperationException(e); } Schema.Field fld = new Schema.Field(map.getColumnName(), fldSchema, map.getComment(), map.isNullable() ? JsonNodeFactory.instance.nullNode() : null); fld.addProp(SOURCE_TYPE, sourceType); fields.add(fld); } Schema avroSchema = Schema.createRecord(name.isEmpty() ? null : name, "", namespace, false); avroSchema.setFields(fields); return avroSchema; }
@GET @Path("{instanceName}/errors") public Response getErrorsOnInstance(@PathParam("clusterId") String clusterId, @PathParam("instanceName") String instanceName) throws IOException { HelixDataAccessor accessor = getDataAccssor(clusterId); ObjectNode root = JsonNodeFactory.instance.objectNode(); root.put(Properties.id.name(), instanceName); ObjectNode errorsNode = JsonNodeFactory.instance.objectNode(); List<String> sessionIds = accessor.getChildNames(accessor.keyBuilder().errors(instanceName)); if (sessionIds == null || sessionIds.size() == 0) { return notFound(); } for (String sessionId : sessionIds) { List<String> resources = accessor.getChildNames(accessor.keyBuilder().errors(instanceName, sessionId)); if (resources != null) { ObjectNode resourcesNode = JsonNodeFactory.instance.objectNode(); for (String resourceName : resources) { List<String> partitions = accessor .getChildNames(accessor.keyBuilder().errors(instanceName, sessionId, resourceName)); if (partitions != null) { ArrayNode partitionsNode = resourcesNode.putArray(resourceName); partitionsNode.addAll((ArrayNode) OBJECT_MAPPER.valueToTree(partitions)); } } errorsNode.put(sessionId, resourcesNode); } } root.put(InstanceProperties.errors.name(), errorsNode); return JSONRepresentation(root); }
@GET @Path("{workflowId}") public Response getWorkflow(@PathParam("clusterId") String clusterId, @PathParam("workflowId") String workflowId) { TaskDriver taskDriver = getTaskDriver(clusterId); WorkflowConfig workflowConfig = taskDriver.getWorkflowConfig(workflowId); WorkflowContext workflowContext = taskDriver.getWorkflowContext(workflowId); ObjectNode root = JsonNodeFactory.instance.objectNode(); TextNode id = JsonNodeFactory.instance.textNode(workflowId); root.put(Properties.id.name(), id); ObjectNode workflowConfigNode = JsonNodeFactory.instance.objectNode(); ObjectNode workflowContextNode = JsonNodeFactory.instance.objectNode(); if (workflowConfig != null) { getWorkflowConfigNode(workflowConfigNode, workflowConfig.getRecord()); } if (workflowContext != null) { getWorkflowContextNode(workflowContextNode, workflowContext.getRecord()); } root.put(WorkflowProperties.WorkflowConfig.name(), workflowConfigNode); root.put(WorkflowProperties.WorkflowContext.name(), workflowContextNode); JobDag jobDag = workflowConfig.getJobDag(); ArrayNode jobs = OBJECT_MAPPER.valueToTree(jobDag.getAllNodes()); ObjectNode parentJobs = OBJECT_MAPPER.valueToTree(jobDag.getParentsToChildren()); root.put(WorkflowProperties.Jobs.name(), jobs); root.put(WorkflowProperties.ParentJobs.name(), parentJobs); return JSONRepresentation(root); }
@GET @Path("{workflowId}/configs") public Response getWorkflowConfig(@PathParam("clusterId") String clusterId, @PathParam("workflowId") String workflowId) { TaskDriver taskDriver = getTaskDriver(clusterId); WorkflowConfig workflowConfig = taskDriver.getWorkflowConfig(workflowId); ObjectNode workflowConfigNode = JsonNodeFactory.instance.objectNode(); if (workflowConfig != null) { getWorkflowConfigNode(workflowConfigNode, workflowConfig.getRecord()); } return JSONRepresentation(workflowConfigNode); }
@GET @Path("{workflowId}/context") public Response getWorkflowContext(@PathParam("clusterId") String clusterId, @PathParam("workflowId") String workflowId) { TaskDriver taskDriver = getTaskDriver(clusterId); WorkflowContext workflowContext = taskDriver.getWorkflowContext(workflowId); ObjectNode workflowContextNode = JsonNodeFactory.instance.objectNode(); if (workflowContext != null) { getWorkflowContextNode(workflowContextNode, workflowContext.getRecord()); } return JSONRepresentation(workflowContextNode); }
private void getWorkflowConfigNode(ObjectNode workflowConfigNode, ZNRecord record) { for (Map.Entry<String, String> entry : record.getSimpleFields().entrySet()) { if (!entry.getKey().equals(WorkflowConfig.WorkflowConfigProperty.Dag)) { workflowConfigNode.put(entry.getKey(), JsonNodeFactory.instance.textNode(entry.getValue())); } } }
private void getWorkflowContextNode(ObjectNode workflowContextNode, ZNRecord record) { if (record.getMapFields() != null) { for (String fieldName : record.getMapFields().keySet()) { JsonNode node = OBJECT_MAPPER.valueToTree(record.getMapField(fieldName)); workflowContextNode.put(fieldName, node); } } if (record.getSimpleFields() != null) { for (Map.Entry<String, String> entry : record.getSimpleFields().entrySet()) { workflowContextNode .put(entry.getKey(), JsonNodeFactory.instance.textNode(entry.getValue())); } } }
public JsonNode getSchema(SerializerProvider paramSerializerProvider, Type paramType) throws JsonMappingException { ObjectNode localObjectNode1 = createSchemaNode("object", true); if ((paramType instanceof ParameterizedType)) { Type[] arrayOfType = ((ParameterizedType)paramType).getActualTypeArguments(); if (arrayOfType.length == 2) { JavaType localJavaType1 = paramSerializerProvider.constructType(arrayOfType[0]); JavaType localJavaType2 = paramSerializerProvider.constructType(arrayOfType[1]); ObjectNode localObjectNode2 = JsonNodeFactory.instance.objectNode(); Enum[] arrayOfEnum = (Enum[])localJavaType1.getRawClass().getEnumConstants(); int i = arrayOfEnum.length; int j = 0; if (j < i) { Enum localEnum = arrayOfEnum[j]; JsonSerializer localJsonSerializer = paramSerializerProvider.findValueSerializer(localJavaType2.getRawClass(), this._property); if ((localJsonSerializer instanceof SchemaAware)); for (JsonNode localJsonNode = ((SchemaAware)localJsonSerializer).getSchema(paramSerializerProvider, null); ; localJsonNode = JsonSchema.getDefaultSchemaNode()) { localObjectNode2.put(paramSerializerProvider.getConfig().getAnnotationIntrospector().findEnumValue(localEnum), localJsonNode); j++; break; } } localObjectNode1.put("properties", localObjectNode2); } } return localObjectNode1; }