/** * Parse a YAML stream and produce parsing events. * * @see <a href="http://yaml.org/spec/1.1/#id859333">Processing Overview</a> * @param yaml * YAML document(s) * @return parsed events */ public Iterable<Event> parse(Reader yaml) { final Parser parser = new ParserImpl(new StreamReader(yaml)); Iterator<Event> result = new Iterator<Event>() { public boolean hasNext() { return parser.peekEvent() != null; } public Event next() { return parser.getEvent(); } public void remove() { throw new UnsupportedOperationException(); } }; return new EventIterable(result); }
public Event produce() { // Parse an implicit document. if (!scanner.checkToken(Token.ID.Directive, Token.ID.DocumentStart, Token.ID.StreamEnd)) { directives = new VersionTagsTuple(null, DEFAULT_TAGS); Token token = scanner.peekToken(); Mark startMark = token.getStartMark(); Mark endMark = startMark; Event event = new DocumentStartEvent(startMark, endMark, false, null, null); // Prepare the next state. states.push(new ParseDocumentEnd()); state = new ParseBlockNode(); return event; } else { Production p = new ParseDocumentStart(); return p.produce(); } }
public Event produce() { // Parse the document end. Token token = scanner.peekToken(); Mark startMark = token.getStartMark(); Mark endMark = startMark; boolean explicit = false; if (scanner.checkToken(Token.ID.DocumentEnd)) { token = scanner.getToken(); endMark = token.getEndMark(); explicit = true; } Event event = new DocumentEndEvent(startMark, endMark, explicit); // Prepare the next state. state = new ParseDocumentStart(); return event; }
/** * Reads a document from a source that contains only one document. * <p> * If the stream contains more than one document an exception is thrown. * </p> * * @return The root node of the document or <code>null</code> if no document * is available. */ public Node getSingleNode() { // Drop the STREAM-START event. parser.getEvent(); // Compose a document if the stream is not empty. Node document = null; if (!parser.checkEvent(Event.ID.StreamEnd)) { document = composeDocument(); } // Ensure that the stream contains no more documents. if (!parser.checkEvent(Event.ID.StreamEnd)) { Event event = parser.getEvent(); throw new ComposerException("expected a single document in the stream", document.getStartMark(), "but found another document", event.getStartMark()); } // Drop the STREAM-END event. parser.getEvent(); return document; }
private boolean needEvents(int count) { int level = 0; Iterator<Event> iter = events.iterator(); iter.next(); while (iter.hasNext()) { Event event = iter.next(); if (event instanceof DocumentStartEvent || event instanceof CollectionStartEvent) { level++; } else if (event instanceof DocumentEndEvent || event instanceof CollectionEndEvent) { level--; } else if (event instanceof StreamEndEvent) { level = -1; } if (level < 0) { return false; } } return events.size() < count + 1; }
/** * Serialize the representation tree into Events. * * @param data * representation tree * * @return Event list * * @see <a href="http://yaml.org/spec/1.1/#id859333">Processing Overview</a> */ public List<Event> serialize(Node data) { YamlSilentEmitter emitter = new YamlSilentEmitter(); Serializer serializer = new Serializer(this.serialization, emitter, this.resolver, this.dumperOptions, null); try { serializer.open(); serializer.serialize(data); serializer.close(); } catch (IOException e) { throw new YAMLException(e); } return emitter.getEvents(); }
private boolean needMoreEvents() { if (this.events.isEmpty()) { return true; } Event event = this.events.peek(); if (event instanceof DocumentStartEvent) { return this.needEvents(1); } else if (event instanceof SequenceStartEvent) { return this.needEvents(2); } else if (event instanceof MappingStartEvent) { return this.needEvents(3); } else { return false; } }
private List<String> expectList() { expectSequenceStart(); List<String> list = new LinkedList<>(); while(true) { Event e = peek(); if (e == null) { break; } if (e.is(Event.ID.SequenceEnd)) { break; } list.add(expectScalar().getValue()); } expectSequenceEnd(); return list; }
protected Node composeSequenceNode(String anchor) { SequenceStartEvent startEvent = (SequenceStartEvent) parser.getEvent(); String tag = startEvent.getTag(); Tag nodeTag; boolean resolved = false; if (tag == null || tag.equals("!")) { nodeTag = resolver.resolve(NodeId.sequence, null, startEvent.getImplicit()); resolved = true; } else { nodeTag = new Tag(tag); } final ArrayList<Node> children = new ArrayList<Node>(); SequenceNode node = new SequenceNode(nodeTag, resolved, children, startEvent.getStartMark(), null, startEvent.getFlowStyle()); if (anchor != null) { anchors.put(anchor, node); } while (!parser.checkEvent(Event.ID.SequenceEnd)) { children.add(composeNode(node)); } Event endEvent = parser.getEvent(); node.setEndMark(endEvent.getEndMark()); return node; }
protected Node composeMappingNode(String anchor) { MappingStartEvent startEvent = (MappingStartEvent) parser.getEvent(); String tag = startEvent.getTag(); Tag nodeTag; boolean resolved = false; if (tag == null || tag.equals("!")) { nodeTag = resolver.resolve(NodeId.mapping, null, startEvent.getImplicit()); resolved = true; } else { nodeTag = new Tag(tag); } final List<NodeTuple> children = new ArrayList<NodeTuple>(); MappingNode node = new MappingNode(nodeTag, resolved, children, startEvent.getStartMark(), null, startEvent.getFlowStyle()); if (anchor != null) { anchors.put(anchor, node); } while (!parser.checkEvent(Event.ID.MappingEnd)) { composeMappingChildren(children, node); } Event endEvent = parser.getEvent(); node.setEndMark(endEvent.getEndMark()); return node; }
public void testGetEvent2() { String data = "american:\n - Boston Red Sox"; StreamReader reader = new StreamReader(data); Parser parser = new ParserImpl(reader); Mark dummyMark = new Mark("dummy", 0, 0, 0, "", 0); LinkedList<Event> etalonEvents = new LinkedList<Event>(); etalonEvents.add(new StreamStartEvent(dummyMark, dummyMark)); etalonEvents.add(new DocumentStartEvent(dummyMark, dummyMark, false, null, null)); etalonEvents .add(new MappingStartEvent(null, null, true, dummyMark, dummyMark, Boolean.TRUE)); etalonEvents.add(new ScalarEvent(null, null, new ImplicitTuple(true, false), "american", dummyMark, dummyMark, (char) 0)); etalonEvents.add(new SequenceStartEvent(null, null, true, dummyMark, dummyMark, Boolean.FALSE)); etalonEvents.add(new ScalarEvent(null, null, new ImplicitTuple(true, false), "Boston Red Sox", dummyMark, dummyMark, (char) 0)); etalonEvents.add(new SequenceEndEvent(dummyMark, dummyMark)); etalonEvents.add(new MappingEndEvent(dummyMark, dummyMark)); etalonEvents.add(new DocumentEndEvent(dummyMark, dummyMark, false)); etalonEvents.add(new StreamEndEvent(dummyMark, dummyMark)); check(etalonEvents, parser); }
@SuppressWarnings("unchecked") public void testEmitterErrors() { File[] files = getStreamsByExtension(".emitter-error"); assertTrue("No test files found.", files.length > 0); for (int i = 0; i < files.length; i++) { String content = getResource(files[i].getName()); List<Event> document = (List<Event>) load(new EventConstructor(), content.trim()); Writer writer = new StringWriter(); Emitter emitter = new Emitter(writer, new DumperOptions()); try { for (Event event : document) { emitter.emit(event); } fail("Loading must fail for " + files[i].getAbsolutePath()); // System.err.println("Loading must fail for " + // files[i].getAbsolutePath()); } catch (Exception e) { assertTrue(true); } } }
public void testParser() { File[] files = getStreamsByExtension(".data", true); assertTrue("No test files found.", files.length > 0); for (File file : files) { if (!file.getName().contains("scan-line-b")) { continue; } try { InputStream input = new FileInputStream(file); List<Event> events1 = parse(input); input.close(); assertFalse(events1.isEmpty()); int index = file.getAbsolutePath().lastIndexOf('.'); String canonicalName = file.getAbsolutePath().substring(0, index) + ".canonical"; File canonical = new File(canonicalName); List<Event> events2 = canonicalParse(new FileInputStream(canonical)); assertFalse(events2.isEmpty()); compareEvents(events1, events2, false); } catch (Exception e) { System.out.println("Failed File: " + file); // fail("Failed File: " + file + "; " + e.getMessage()); throw new RuntimeException(e); } } }
public void testParserOnCanonical() { File[] canonicalFiles = getStreamsByExtension(".canonical", false); assertTrue("No test files found.", canonicalFiles.length > 0); for (File file : canonicalFiles) { try { InputStream input = new FileInputStream(file); List<Event> events1 = parse(input); input.close(); assertFalse(events1.isEmpty()); List<Event> events2 = canonicalParse(new FileInputStream(file)); assertFalse(events2.isEmpty()); compareEvents(events1, events2, true); } catch (Exception e) { System.out.println("Failed File: " + file); // fail("Failed File: " + file + "; " + e.getMessage()); throw new RuntimeException(e); } } }
public Map<String, String> expectMap() { expectMappingStart(); HashMap<String, String> map = new HashMap<>(); while (true) { Event e = peek(); if (e == null) { break; } if (e.is(Event.ID.MappingEnd)) { break; } map.put( expectScalar().getValue(), expectScalar().getValue() ); } expectMappingEnd(); return map; }
/** * Get the next event. */ public Event peekEvent() { if (currentEvent == null) { if (state != null) { currentEvent = state.produce(); } } return currentEvent; }
/** * Get the next event and proceed further. */ public Event getEvent() { peekEvent(); Event value = currentEvent; currentEvent = null; return value; }
public Event produce() { // Parse the stream start. StreamStartToken token = (StreamStartToken) scanner.getToken(); Event event = new StreamStartEvent(token.getStartMark(), token.getEndMark()); // Prepare the next state. state = new ParseImplicitDocumentStart(); return event; }
public Event produce() { Event event; if (scanner.checkToken(Token.ID.Directive, Token.ID.DocumentStart, Token.ID.DocumentEnd, Token.ID.StreamEnd)) { event = processEmptyScalar(scanner.peekToken().getStartMark()); state = states.pop(); return event; } else { Production p = new ParseBlockNode(); return p.produce(); } }
public Event produce() { Token token = scanner.getToken(); if (!scanner.checkToken(Token.ID.Value, Token.ID.FlowEntry, Token.ID.FlowSequenceEnd)) { states.push(new ParseFlowSequenceEntryMappingValue()); return parseFlowNode(); } else { state = new ParseFlowSequenceEntryMappingValue(); return processEmptyScalar(token.getEndMark()); } }
/** * Checks if further documents are available. * * @return <code>true</code> if there is at least one more document. */ public boolean checkNode() { // Drop the STREAM-START event. if (parser.checkEvent(Event.ID.StreamStart)) { parser.getEvent(); } // If there are more documents available? return !parser.checkEvent(Event.ID.StreamEnd); }
/** * Reads and composes the next document. * * @return The root node of the document or <code>null</code> if no more * documents are available. */ public Node getNode() { // Get the root node of the next document. if (!parser.checkEvent(Event.ID.StreamEnd)) { return composeDocument(); } else { return null; } }
/** * Serialize the representation tree into Events. * * @see <a href="http://yaml.org/spec/1.1/#id859333">Processing Overview</a> * @param data * representation tree * @return Event list */ public List<Event> serialize(Node data) { SilentEmitter emitter = new SilentEmitter(); Serializer serializer = new Serializer(emitter, resolver, dumperOptions, null); try { serializer.open(); serializer.serialize(data); serializer.close(); } catch (IOException e) { throw new YAMLException(e); } return emitter.getEvents(); }
public void emit(Event event) throws IOException { this.events.add(event); while (!needMoreEvents()) { this.event = this.events.poll(); this.state.expect(); this.event = null; } }
private boolean needMoreEvents() { if (events.isEmpty()) { return true; } Event event = events.peek(); if (event instanceof DocumentStartEvent) { return needEvents(1); } else if (event instanceof SequenceStartEvent) { return needEvents(2); } else if (event instanceof MappingStartEvent) { return needEvents(3); } else { return false; } }