1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
| var TYPE = require('../../tokenizer').TYPE;
|
| var LEFTPARENTHESIS = TYPE.LeftParenthesis;
| var RIGHTPARENTHESIS = TYPE.RightParenthesis;
|
| module.exports = {
| name: 'Parentheses',
| structure: {
| children: [[]]
| },
| parse: function(readSequence, recognizer) {
| var start = this.scanner.tokenStart;
| var children = null;
|
| this.eat(LEFTPARENTHESIS);
|
| children = readSequence.call(this, recognizer);
|
| if (!this.scanner.eof) {
| this.eat(RIGHTPARENTHESIS);
| }
|
| return {
| type: 'Parentheses',
| loc: this.getLocation(start, this.scanner.tokenStart),
| children: children
| };
| },
| generate: function(node) {
| this.chunk('(');
| this.children(node);
| this.chunk(')');
| }
| };
|
|