1/**
2 * Copyright (c) 2008, http://www.snakeyaml.org
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *     http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16package org.yaml.snakeyaml.parser;
17
18import java.util.HashMap;
19import java.util.List;
20import java.util.Map;
21
22import org.yaml.snakeyaml.DumperOptions.Version;
23import org.yaml.snakeyaml.error.Mark;
24import org.yaml.snakeyaml.error.YAMLException;
25import org.yaml.snakeyaml.events.AliasEvent;
26import org.yaml.snakeyaml.events.DocumentEndEvent;
27import org.yaml.snakeyaml.events.DocumentStartEvent;
28import org.yaml.snakeyaml.events.Event;
29import org.yaml.snakeyaml.events.ImplicitTuple;
30import org.yaml.snakeyaml.events.MappingEndEvent;
31import org.yaml.snakeyaml.events.MappingStartEvent;
32import org.yaml.snakeyaml.events.ScalarEvent;
33import org.yaml.snakeyaml.events.SequenceEndEvent;
34import org.yaml.snakeyaml.events.SequenceStartEvent;
35import org.yaml.snakeyaml.events.StreamEndEvent;
36import org.yaml.snakeyaml.events.StreamStartEvent;
37import org.yaml.snakeyaml.nodes.Tag;
38import org.yaml.snakeyaml.reader.StreamReader;
39import org.yaml.snakeyaml.scanner.Scanner;
40import org.yaml.snakeyaml.scanner.ScannerImpl;
41import org.yaml.snakeyaml.tokens.AliasToken;
42import org.yaml.snakeyaml.tokens.AnchorToken;
43import org.yaml.snakeyaml.tokens.BlockEntryToken;
44import org.yaml.snakeyaml.tokens.DirectiveToken;
45import org.yaml.snakeyaml.tokens.ScalarToken;
46import org.yaml.snakeyaml.tokens.StreamEndToken;
47import org.yaml.snakeyaml.tokens.StreamStartToken;
48import org.yaml.snakeyaml.tokens.TagToken;
49import org.yaml.snakeyaml.tokens.TagTuple;
50import org.yaml.snakeyaml.tokens.Token;
51import org.yaml.snakeyaml.util.ArrayStack;
52
53/**
54 * <pre>
55 * # The following YAML grammar is LL(1) and is parsed by a recursive descent
56 * parser.
57 * stream            ::= STREAM-START implicit_document? explicit_document* STREAM-END
58 * implicit_document ::= block_node DOCUMENT-END*
59 * explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END*
60 * block_node_or_indentless_sequence ::=
61 *                       ALIAS
62 *                       | properties (block_content | indentless_block_sequence)?
63 *                       | block_content
64 *                       | indentless_block_sequence
65 * block_node        ::= ALIAS
66 *                       | properties block_content?
67 *                       | block_content
68 * flow_node         ::= ALIAS
69 *                       | properties flow_content?
70 *                       | flow_content
71 * properties        ::= TAG ANCHOR? | ANCHOR TAG?
72 * block_content     ::= block_collection | flow_collection | SCALAR
73 * flow_content      ::= flow_collection | SCALAR
74 * block_collection  ::= block_sequence | block_mapping
75 * flow_collection   ::= flow_sequence | flow_mapping
76 * block_sequence    ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)* BLOCK-END
77 * indentless_sequence   ::= (BLOCK-ENTRY block_node?)+
78 * block_mapping     ::= BLOCK-MAPPING_START
79 *                       ((KEY block_node_or_indentless_sequence?)?
80 *                       (VALUE block_node_or_indentless_sequence?)?)*
81 *                       BLOCK-END
82 * flow_sequence     ::= FLOW-SEQUENCE-START
83 *                       (flow_sequence_entry FLOW-ENTRY)*
84 *                       flow_sequence_entry?
85 *                       FLOW-SEQUENCE-END
86 * flow_sequence_entry   ::= flow_node | KEY flow_node? (VALUE flow_node?)?
87 * flow_mapping      ::= FLOW-MAPPING-START
88 *                       (flow_mapping_entry FLOW-ENTRY)*
89 *                       flow_mapping_entry?
90 *                       FLOW-MAPPING-END
91 * flow_mapping_entry    ::= flow_node | KEY flow_node? (VALUE flow_node?)?
92 * FIRST sets:
93 * stream: { STREAM-START }
94 * explicit_document: { DIRECTIVE DOCUMENT-START }
95 * implicit_document: FIRST(block_node)
96 * block_node: { ALIAS TAG ANCHOR SCALAR BLOCK-SEQUENCE-START BLOCK-MAPPING-START FLOW-SEQUENCE-START FLOW-MAPPING-START }
97 * flow_node: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START }
98 * block_content: { BLOCK-SEQUENCE-START BLOCK-MAPPING-START FLOW-SEQUENCE-START FLOW-MAPPING-START SCALAR }
99 * flow_content: { FLOW-SEQUENCE-START FLOW-MAPPING-START SCALAR }
100 * block_collection: { BLOCK-SEQUENCE-START BLOCK-MAPPING-START }
101 * flow_collection: { FLOW-SEQUENCE-START FLOW-MAPPING-START }
102 * block_sequence: { BLOCK-SEQUENCE-START }
103 * block_mapping: { BLOCK-MAPPING-START }
104 * block_node_or_indentless_sequence: { ALIAS ANCHOR TAG SCALAR BLOCK-SEQUENCE-START BLOCK-MAPPING-START FLOW-SEQUENCE-START FLOW-MAPPING-START BLOCK-ENTRY }
105 * indentless_sequence: { ENTRY }
106 * flow_collection: { FLOW-SEQUENCE-START FLOW-MAPPING-START }
107 * flow_sequence: { FLOW-SEQUENCE-START }
108 * flow_mapping: { FLOW-MAPPING-START }
109 * flow_sequence_entry: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START KEY }
110 * flow_mapping_entry: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START KEY }
111 * </pre>
112 *
113 * Since writing a recursive-descendant parser is a straightforward task, we do
114 * not give many comments here.
115 */
116public class ParserImpl implements Parser {
117    private static final Map<String, String> DEFAULT_TAGS = new HashMap<String, String>();
118    static {
119        DEFAULT_TAGS.put("!", "!");
120        DEFAULT_TAGS.put("!!", Tag.PREFIX);
121    }
122
123    protected final Scanner scanner;
124    private Event currentEvent;
125    private final ArrayStack<Production> states;
126    private final ArrayStack<Mark> marks;
127    private Production state;
128    private VersionTagsTuple directives;
129
130    public ParserImpl(StreamReader reader) {
131        this(new ScannerImpl(reader));
132    }
133
134    public ParserImpl(Scanner scanner) {
135        this.scanner = scanner;
136        currentEvent = null;
137        directives = new VersionTagsTuple(null, new HashMap<String, String>(DEFAULT_TAGS));
138        states = new ArrayStack<Production>(100);
139        marks = new ArrayStack<Mark>(10);
140        state = new ParseStreamStart();
141    }
142
143    /**
144     * Check the type of the next event.
145     */
146    public boolean checkEvent(Event.ID choice) {
147        peekEvent();
148        return currentEvent != null && currentEvent.is(choice);
149    }
150
151    /**
152     * Get the next event.
153     */
154    public Event peekEvent() {
155        if (currentEvent == null) {
156            if (state != null) {
157                currentEvent = state.produce();
158            }
159        }
160        return currentEvent;
161    }
162
163    /**
164     * Get the next event and proceed further.
165     */
166    public Event getEvent() {
167        peekEvent();
168        Event value = currentEvent;
169        currentEvent = null;
170        return value;
171    }
172
173    /**
174     * <pre>
175     * stream    ::= STREAM-START implicit_document? explicit_document* STREAM-END
176     * implicit_document ::= block_node DOCUMENT-END*
177     * explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END*
178     * </pre>
179     */
180    private class ParseStreamStart implements Production {
181        public Event produce() {
182            // Parse the stream start.
183            StreamStartToken token = (StreamStartToken) scanner.getToken();
184            Event event = new StreamStartEvent(token.getStartMark(), token.getEndMark());
185            // Prepare the next state.
186            state = new ParseImplicitDocumentStart();
187            return event;
188        }
189    }
190
191    private class ParseImplicitDocumentStart implements Production {
192        public Event produce() {
193            // Parse an implicit document.
194            if (!scanner.checkToken(Token.ID.Directive, Token.ID.DocumentStart, Token.ID.StreamEnd)) {
195                directives = new VersionTagsTuple(null, DEFAULT_TAGS);
196                Token token = scanner.peekToken();
197                Mark startMark = token.getStartMark();
198                Mark endMark = startMark;
199                Event event = new DocumentStartEvent(startMark, endMark, false, null, null);
200                // Prepare the next state.
201                states.push(new ParseDocumentEnd());
202                state = new ParseBlockNode();
203                return event;
204            } else {
205                Production p = new ParseDocumentStart();
206                return p.produce();
207            }
208        }
209    }
210
211    private class ParseDocumentStart implements Production {
212        public Event produce() {
213            // Parse any extra document end indicators.
214            while (scanner.checkToken(Token.ID.DocumentEnd)) {
215                scanner.getToken();
216            }
217            // Parse an explicit document.
218            Event event;
219            if (!scanner.checkToken(Token.ID.StreamEnd)) {
220                Token token = scanner.peekToken();
221                Mark startMark = token.getStartMark();
222                VersionTagsTuple tuple = processDirectives();
223                if (!scanner.checkToken(Token.ID.DocumentStart)) {
224                    throw new ParserException(null, null, "expected '<document start>', but found "
225                            + scanner.peekToken().getTokenId(), scanner.peekToken().getStartMark());
226                }
227                token = scanner.getToken();
228                Mark endMark = token.getEndMark();
229                event = new DocumentStartEvent(startMark, endMark, true, tuple.getVersion(),
230                        tuple.getTags());
231                states.push(new ParseDocumentEnd());
232                state = new ParseDocumentContent();
233            } else {
234                // Parse the end of the stream.
235                StreamEndToken token = (StreamEndToken) scanner.getToken();
236                event = new StreamEndEvent(token.getStartMark(), token.getEndMark());
237                if (!states.isEmpty()) {
238                    throw new YAMLException("Unexpected end of stream. States left: " + states);
239                }
240                if (!marks.isEmpty()) {
241                    throw new YAMLException("Unexpected end of stream. Marks left: " + marks);
242                }
243                state = null;
244            }
245            return event;
246        }
247    }
248
249    private class ParseDocumentEnd implements Production {
250        public Event produce() {
251            // Parse the document end.
252            Token token = scanner.peekToken();
253            Mark startMark = token.getStartMark();
254            Mark endMark = startMark;
255            boolean explicit = false;
256            if (scanner.checkToken(Token.ID.DocumentEnd)) {
257                token = scanner.getToken();
258                endMark = token.getEndMark();
259                explicit = true;
260            }
261            Event event = new DocumentEndEvent(startMark, endMark, explicit);
262            // Prepare the next state.
263            state = new ParseDocumentStart();
264            return event;
265        }
266    }
267
268    private class ParseDocumentContent implements Production {
269        public Event produce() {
270            Event event;
271            if (scanner.checkToken(Token.ID.Directive, Token.ID.DocumentStart,
272                    Token.ID.DocumentEnd, Token.ID.StreamEnd)) {
273                event = processEmptyScalar(scanner.peekToken().getStartMark());
274                state = states.pop();
275                return event;
276            } else {
277                Production p = new ParseBlockNode();
278                return p.produce();
279            }
280        }
281    }
282
283    @SuppressWarnings("unchecked")
284    private VersionTagsTuple processDirectives() {
285        Version yamlVersion = null;
286        HashMap<String, String> tagHandles = new HashMap<String, String>();
287        while (scanner.checkToken(Token.ID.Directive)) {
288            @SuppressWarnings("rawtypes")
289            DirectiveToken token = (DirectiveToken) scanner.getToken();
290            if (token.getName().equals("YAML")) {
291                if (yamlVersion != null) {
292                    throw new ParserException(null, null, "found duplicate YAML directive",
293                            token.getStartMark());
294                }
295                List<Integer> value = (List<Integer>) token.getValue();
296                Integer major = value.get(0);
297                if (major != 1) {
298                    throw new ParserException(null, null,
299                            "found incompatible YAML document (version 1.* is required)",
300                            token.getStartMark());
301                }
302                Integer minor = value.get(1);
303                switch (minor) {
304                case 0:
305                    yamlVersion = Version.V1_0;
306                    break;
307
308                default:
309                    yamlVersion = Version.V1_1;
310                    break;
311                }
312            } else if (token.getName().equals("TAG")) {
313                List<String> value = (List<String>) token.getValue();
314                String handle = value.get(0);
315                String prefix = value.get(1);
316                if (tagHandles.containsKey(handle)) {
317                    throw new ParserException(null, null, "duplicate tag handle " + handle,
318                            token.getStartMark());
319                }
320                tagHandles.put(handle, prefix);
321            }
322        }
323        if (yamlVersion != null || !tagHandles.isEmpty()) {
324            // directives in the document found - drop the previous
325            for (String key : DEFAULT_TAGS.keySet()) {
326                // do not overwrite re-defined tags
327                if (!tagHandles.containsKey(key)) {
328                    tagHandles.put(key, DEFAULT_TAGS.get(key));
329                }
330            }
331            directives = new VersionTagsTuple(yamlVersion, tagHandles);
332        }
333        return directives;
334    }
335
336    /**
337     * <pre>
338     *  block_node_or_indentless_sequence ::= ALIAS
339     *                | properties (block_content | indentless_block_sequence)?
340     *                | block_content
341     *                | indentless_block_sequence
342     *  block_node    ::= ALIAS
343     *                    | properties block_content?
344     *                    | block_content
345     *  flow_node     ::= ALIAS
346     *                    | properties flow_content?
347     *                    | flow_content
348     *  properties    ::= TAG ANCHOR? | ANCHOR TAG?
349     *  block_content     ::= block_collection | flow_collection | SCALAR
350     *  flow_content      ::= flow_collection | SCALAR
351     *  block_collection  ::= block_sequence | block_mapping
352     *  flow_collection   ::= flow_sequence | flow_mapping
353     * </pre>
354     */
355
356    private class ParseBlockNode implements Production {
357        public Event produce() {
358            return parseNode(true, false);
359        }
360    }
361
362    private Event parseFlowNode() {
363        return parseNode(false, false);
364    }
365
366    private Event parseBlockNodeOrIndentlessSequence() {
367        return parseNode(true, true);
368    }
369
370    private Event parseNode(boolean block, boolean indentlessSequence) {
371        Event event;
372        Mark startMark = null;
373        Mark endMark = null;
374        Mark tagMark = null;
375        if (scanner.checkToken(Token.ID.Alias)) {
376            AliasToken token = (AliasToken) scanner.getToken();
377            event = new AliasEvent(token.getValue(), token.getStartMark(), token.getEndMark());
378            state = states.pop();
379        } else {
380            String anchor = null;
381            TagTuple tagTokenTag = null;
382            if (scanner.checkToken(Token.ID.Anchor)) {
383                AnchorToken token = (AnchorToken) scanner.getToken();
384                startMark = token.getStartMark();
385                endMark = token.getEndMark();
386                anchor = token.getValue();
387                if (scanner.checkToken(Token.ID.Tag)) {
388                    TagToken tagToken = (TagToken) scanner.getToken();
389                    tagMark = tagToken.getStartMark();
390                    endMark = tagToken.getEndMark();
391                    tagTokenTag = tagToken.getValue();
392                }
393            } else if (scanner.checkToken(Token.ID.Tag)) {
394                TagToken tagToken = (TagToken) scanner.getToken();
395                startMark = tagToken.getStartMark();
396                tagMark = startMark;
397                endMark = tagToken.getEndMark();
398                tagTokenTag = tagToken.getValue();
399                if (scanner.checkToken(Token.ID.Anchor)) {
400                    AnchorToken token = (AnchorToken) scanner.getToken();
401                    endMark = token.getEndMark();
402                    anchor = token.getValue();
403                }
404            }
405            String tag = null;
406            if (tagTokenTag != null) {
407                String handle = tagTokenTag.getHandle();
408                String suffix = tagTokenTag.getSuffix();
409                if (handle != null) {
410                    if (!directives.getTags().containsKey(handle)) {
411                        throw new ParserException("while parsing a node", startMark,
412                                "found undefined tag handle " + handle, tagMark);
413                    }
414                    tag = directives.getTags().get(handle) + suffix;
415                } else {
416                    tag = suffix;
417                }
418            }
419            if (startMark == null) {
420                startMark = scanner.peekToken().getStartMark();
421                endMark = startMark;
422            }
423            event = null;
424            boolean implicit = tag == null || tag.equals("!");
425            if (indentlessSequence && scanner.checkToken(Token.ID.BlockEntry)) {
426                endMark = scanner.peekToken().getEndMark();
427                event = new SequenceStartEvent(anchor, tag, implicit, startMark, endMark,
428                        Boolean.FALSE);
429                state = new ParseIndentlessSequenceEntry();
430            } else {
431                if (scanner.checkToken(Token.ID.Scalar)) {
432                    ScalarToken token = (ScalarToken) scanner.getToken();
433                    endMark = token.getEndMark();
434                    ImplicitTuple implicitValues;
435                    if ((token.getPlain() && tag == null) || "!".equals(tag)) {
436                        implicitValues = new ImplicitTuple(true, false);
437                    } else if (tag == null) {
438                        implicitValues = new ImplicitTuple(false, true);
439                    } else {
440                        implicitValues = new ImplicitTuple(false, false);
441                    }
442                    event = new ScalarEvent(anchor, tag, implicitValues, token.getValue(),
443                            startMark, endMark, token.getStyle());
444                    state = states.pop();
445                } else if (scanner.checkToken(Token.ID.FlowSequenceStart)) {
446                    endMark = scanner.peekToken().getEndMark();
447                    event = new SequenceStartEvent(anchor, tag, implicit, startMark, endMark,
448                            Boolean.TRUE);
449                    state = new ParseFlowSequenceFirstEntry();
450                } else if (scanner.checkToken(Token.ID.FlowMappingStart)) {
451                    endMark = scanner.peekToken().getEndMark();
452                    event = new MappingStartEvent(anchor, tag, implicit, startMark, endMark,
453                            Boolean.TRUE);
454                    state = new ParseFlowMappingFirstKey();
455                } else if (block && scanner.checkToken(Token.ID.BlockSequenceStart)) {
456                    endMark = scanner.peekToken().getStartMark();
457                    event = new SequenceStartEvent(anchor, tag, implicit, startMark, endMark,
458                            Boolean.FALSE);
459                    state = new ParseBlockSequenceFirstEntry();
460                } else if (block && scanner.checkToken(Token.ID.BlockMappingStart)) {
461                    endMark = scanner.peekToken().getStartMark();
462                    event = new MappingStartEvent(anchor, tag, implicit, startMark, endMark,
463                            Boolean.FALSE);
464                    state = new ParseBlockMappingFirstKey();
465                } else if (anchor != null || tag != null) {
466                    // Empty scalars are allowed even if a tag or an anchor is
467                    // specified.
468                    event = new ScalarEvent(anchor, tag, new ImplicitTuple(implicit, false), "",
469                            startMark, endMark, (char) 0);
470                    state = states.pop();
471                } else {
472                    String node;
473                    if (block) {
474                        node = "block";
475                    } else {
476                        node = "flow";
477                    }
478                    Token token = scanner.peekToken();
479                    throw new ParserException("while parsing a " + node + " node", startMark,
480                            "expected the node content, but found " + token.getTokenId(),
481                            token.getStartMark());
482                }
483            }
484        }
485        return event;
486    }
487
488    // block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)*
489    // BLOCK-END
490
491    private class ParseBlockSequenceFirstEntry implements Production {
492        public Event produce() {
493            Token token = scanner.getToken();
494            marks.push(token.getStartMark());
495            return new ParseBlockSequenceEntry().produce();
496        }
497    }
498
499    private class ParseBlockSequenceEntry implements Production {
500        public Event produce() {
501            if (scanner.checkToken(Token.ID.BlockEntry)) {
502                BlockEntryToken token = (BlockEntryToken) scanner.getToken();
503                if (!scanner.checkToken(Token.ID.BlockEntry, Token.ID.BlockEnd)) {
504                    states.push(new ParseBlockSequenceEntry());
505                    return new ParseBlockNode().produce();
506                } else {
507                    state = new ParseBlockSequenceEntry();
508                    return processEmptyScalar(token.getEndMark());
509                }
510            }
511            if (!scanner.checkToken(Token.ID.BlockEnd)) {
512                Token token = scanner.peekToken();
513                throw new ParserException("while parsing a block collection", marks.pop(),
514                        "expected <block end>, but found " + token.getTokenId(),
515                        token.getStartMark());
516            }
517            Token token = scanner.getToken();
518            Event event = new SequenceEndEvent(token.getStartMark(), token.getEndMark());
519            state = states.pop();
520            marks.pop();
521            return event;
522        }
523    }
524
525    // indentless_sequence ::= (BLOCK-ENTRY block_node?)+
526
527    private class ParseIndentlessSequenceEntry implements Production {
528        public Event produce() {
529            if (scanner.checkToken(Token.ID.BlockEntry)) {
530                Token token = scanner.getToken();
531                if (!scanner.checkToken(Token.ID.BlockEntry, Token.ID.Key, Token.ID.Value,
532                        Token.ID.BlockEnd)) {
533                    states.push(new ParseIndentlessSequenceEntry());
534                    return new ParseBlockNode().produce();
535                } else {
536                    state = new ParseIndentlessSequenceEntry();
537                    return processEmptyScalar(token.getEndMark());
538                }
539            }
540            Token token = scanner.peekToken();
541            Event event = new SequenceEndEvent(token.getStartMark(), token.getEndMark());
542            state = states.pop();
543            return event;
544        }
545    }
546
547    private class ParseBlockMappingFirstKey implements Production {
548        public Event produce() {
549            Token token = scanner.getToken();
550            marks.push(token.getStartMark());
551            return new ParseBlockMappingKey().produce();
552        }
553    }
554
555    private class ParseBlockMappingKey implements Production {
556        public Event produce() {
557            if (scanner.checkToken(Token.ID.Key)) {
558                Token token = scanner.getToken();
559                if (!scanner.checkToken(Token.ID.Key, Token.ID.Value, Token.ID.BlockEnd)) {
560                    states.push(new ParseBlockMappingValue());
561                    return parseBlockNodeOrIndentlessSequence();
562                } else {
563                    state = new ParseBlockMappingValue();
564                    return processEmptyScalar(token.getEndMark());
565                }
566            }
567            if (!scanner.checkToken(Token.ID.BlockEnd)) {
568                Token token = scanner.peekToken();
569                throw new ParserException("while parsing a block mapping", marks.pop(),
570                        "expected <block end>, but found " + token.getTokenId(),
571                        token.getStartMark());
572            }
573            Token token = scanner.getToken();
574            Event event = new MappingEndEvent(token.getStartMark(), token.getEndMark());
575            state = states.pop();
576            marks.pop();
577            return event;
578        }
579    }
580
581    private class ParseBlockMappingValue implements Production {
582        public Event produce() {
583            if (scanner.checkToken(Token.ID.Value)) {
584                Token token = scanner.getToken();
585                if (!scanner.checkToken(Token.ID.Key, Token.ID.Value, Token.ID.BlockEnd)) {
586                    states.push(new ParseBlockMappingKey());
587                    return parseBlockNodeOrIndentlessSequence();
588                } else {
589                    state = new ParseBlockMappingKey();
590                    return processEmptyScalar(token.getEndMark());
591                }
592            }
593            state = new ParseBlockMappingKey();
594            Token token = scanner.peekToken();
595            return processEmptyScalar(token.getStartMark());
596        }
597    }
598
599    /**
600     * <pre>
601     * flow_sequence     ::= FLOW-SEQUENCE-START
602     *                       (flow_sequence_entry FLOW-ENTRY)*
603     *                       flow_sequence_entry?
604     *                       FLOW-SEQUENCE-END
605     * flow_sequence_entry   ::= flow_node | KEY flow_node? (VALUE flow_node?)?
606     * Note that while production rules for both flow_sequence_entry and
607     * flow_mapping_entry are equal, their interpretations are different.
608     * For `flow_sequence_entry`, the part `KEY flow_node? (VALUE flow_node?)?`
609     * generate an inline mapping (set syntax).
610     * </pre>
611     */
612    private class ParseFlowSequenceFirstEntry implements Production {
613        public Event produce() {
614            Token token = scanner.getToken();
615            marks.push(token.getStartMark());
616            return new ParseFlowSequenceEntry(true).produce();
617        }
618    }
619
620    private class ParseFlowSequenceEntry implements Production {
621        private boolean first = false;
622
623        public ParseFlowSequenceEntry(boolean first) {
624            this.first = first;
625        }
626
627        public Event produce() {
628            if (!scanner.checkToken(Token.ID.FlowSequenceEnd)) {
629                if (!first) {
630                    if (scanner.checkToken(Token.ID.FlowEntry)) {
631                        scanner.getToken();
632                    } else {
633                        Token token = scanner.peekToken();
634                        throw new ParserException("while parsing a flow sequence", marks.pop(),
635                                "expected ',' or ']', but got " + token.getTokenId(),
636                                token.getStartMark());
637                    }
638                }
639                if (scanner.checkToken(Token.ID.Key)) {
640                    Token token = scanner.peekToken();
641                    Event event = new MappingStartEvent(null, null, true, token.getStartMark(),
642                            token.getEndMark(), Boolean.TRUE);
643                    state = new ParseFlowSequenceEntryMappingKey();
644                    return event;
645                } else if (!scanner.checkToken(Token.ID.FlowSequenceEnd)) {
646                    states.push(new ParseFlowSequenceEntry(false));
647                    return parseFlowNode();
648                }
649            }
650            Token token = scanner.getToken();
651            Event event = new SequenceEndEvent(token.getStartMark(), token.getEndMark());
652            state = states.pop();
653            marks.pop();
654            return event;
655        }
656    }
657
658    private class ParseFlowSequenceEntryMappingKey implements Production {
659        public Event produce() {
660            Token token = scanner.getToken();
661            if (!scanner.checkToken(Token.ID.Value, Token.ID.FlowEntry, Token.ID.FlowSequenceEnd)) {
662                states.push(new ParseFlowSequenceEntryMappingValue());
663                return parseFlowNode();
664            } else {
665                state = new ParseFlowSequenceEntryMappingValue();
666                return processEmptyScalar(token.getEndMark());
667            }
668        }
669    }
670
671    private class ParseFlowSequenceEntryMappingValue implements Production {
672        public Event produce() {
673            if (scanner.checkToken(Token.ID.Value)) {
674                Token token = scanner.getToken();
675                if (!scanner.checkToken(Token.ID.FlowEntry, Token.ID.FlowSequenceEnd)) {
676                    states.push(new ParseFlowSequenceEntryMappingEnd());
677                    return parseFlowNode();
678                } else {
679                    state = new ParseFlowSequenceEntryMappingEnd();
680                    return processEmptyScalar(token.getEndMark());
681                }
682            } else {
683                state = new ParseFlowSequenceEntryMappingEnd();
684                Token token = scanner.peekToken();
685                return processEmptyScalar(token.getStartMark());
686            }
687        }
688    }
689
690    private class ParseFlowSequenceEntryMappingEnd implements Production {
691        public Event produce() {
692            state = new ParseFlowSequenceEntry(false);
693            Token token = scanner.peekToken();
694            return new MappingEndEvent(token.getStartMark(), token.getEndMark());
695        }
696    }
697
698    /**
699     * <pre>
700     *   flow_mapping  ::= FLOW-MAPPING-START
701     *          (flow_mapping_entry FLOW-ENTRY)*
702     *          flow_mapping_entry?
703     *          FLOW-MAPPING-END
704     *   flow_mapping_entry    ::= flow_node | KEY flow_node? (VALUE flow_node?)?
705     * </pre>
706     */
707    private class ParseFlowMappingFirstKey implements Production {
708        public Event produce() {
709            Token token = scanner.getToken();
710            marks.push(token.getStartMark());
711            return new ParseFlowMappingKey(true).produce();
712        }
713    }
714
715    private class ParseFlowMappingKey implements Production {
716        private boolean first = false;
717
718        public ParseFlowMappingKey(boolean first) {
719            this.first = first;
720        }
721
722        public Event produce() {
723            if (!scanner.checkToken(Token.ID.FlowMappingEnd)) {
724                if (!first) {
725                    if (scanner.checkToken(Token.ID.FlowEntry)) {
726                        scanner.getToken();
727                    } else {
728                        Token token = scanner.peekToken();
729                        throw new ParserException("while parsing a flow mapping", marks.pop(),
730                                "expected ',' or '}', but got " + token.getTokenId(),
731                                token.getStartMark());
732                    }
733                }
734                if (scanner.checkToken(Token.ID.Key)) {
735                    Token token = scanner.getToken();
736                    if (!scanner.checkToken(Token.ID.Value, Token.ID.FlowEntry,
737                            Token.ID.FlowMappingEnd)) {
738                        states.push(new ParseFlowMappingValue());
739                        return parseFlowNode();
740                    } else {
741                        state = new ParseFlowMappingValue();
742                        return processEmptyScalar(token.getEndMark());
743                    }
744                } else if (!scanner.checkToken(Token.ID.FlowMappingEnd)) {
745                    states.push(new ParseFlowMappingEmptyValue());
746                    return parseFlowNode();
747                }
748            }
749            Token token = scanner.getToken();
750            Event event = new MappingEndEvent(token.getStartMark(), token.getEndMark());
751            state = states.pop();
752            marks.pop();
753            return event;
754        }
755    }
756
757    private class ParseFlowMappingValue implements Production {
758        public Event produce() {
759            if (scanner.checkToken(Token.ID.Value)) {
760                Token token = scanner.getToken();
761                if (!scanner.checkToken(Token.ID.FlowEntry, Token.ID.FlowMappingEnd)) {
762                    states.push(new ParseFlowMappingKey(false));
763                    return parseFlowNode();
764                } else {
765                    state = new ParseFlowMappingKey(false);
766                    return processEmptyScalar(token.getEndMark());
767                }
768            } else {
769                state = new ParseFlowMappingKey(false);
770                Token token = scanner.peekToken();
771                return processEmptyScalar(token.getStartMark());
772            }
773        }
774    }
775
776    private class ParseFlowMappingEmptyValue implements Production {
777        public Event produce() {
778            state = new ParseFlowMappingKey(false);
779            return processEmptyScalar(scanner.peekToken().getStartMark());
780        }
781    }
782
783    /**
784     * <pre>
785     * block_mapping     ::= BLOCK-MAPPING_START
786     *           ((KEY block_node_or_indentless_sequence?)?
787     *           (VALUE block_node_or_indentless_sequence?)?)*
788     *           BLOCK-END
789     * </pre>
790     */
791    private Event processEmptyScalar(Mark mark) {
792        return new ScalarEvent(null, null, new ImplicitTuple(true, false), "", mark, mark, (char) 0);
793    }
794}
795