Import upstream version 1.0.0+git20190620.973c2ea, md5 f582383878850ccd256b852b40d141a8
Debian Janitor
4 years ago
0 | 0 | https://medium.com/@octskyward/graal-truffle-134d8f28fb69#.jo3luf4dn |
1 | http://nez-peg.github.io/ | |
2 | https://en.wikipedia.org/wiki/DFA_minimization | |
3 | ||
4 | https://news.ycombinator.com/item?id=14589173 | |
5 | http://jamey.thesharps.us/2017/06/search-based-compiler-code-generation.html | |
6 | ||
7 | https://news.ycombinator.com/item?id=15105119 | |
8 | https://en.wikipedia.org/wiki/Tree_transducer | |
9 | ||
10 | # Type-Driven Program Synthesis | |
11 | https://news.ycombinator.com/item?id=18251145 | |
12 | https://www.youtube.com/watch?v=HnOix9TFy1A | |
13 | http://comcom.csail.mit.edu/comcom/#welcome | |
14 | https://bitbucket.org/nadiapolikarpova/synquid | |
15 | ||
16 | # Formality – An efficient programming language and proof assistant | |
17 | https://news.ycombinator.com/item?id=18230148 | |
18 | https://github.com/maiavictor/formality |
0 | # Copyright 2010 The Go Authors. All rights reserved. | |
1 | # Use of this source code is governed by a BSD-style | |
2 | # license that can be found in the LICENSE file. | |
3 | ||
4 | peg: bootstrap.peg.go peg.go main.go | |
5 | go build | |
6 | ||
7 | bootstrap.peg.go: bootstrap/main.go peg.go | |
8 | cd bootstrap; go build | |
9 | bootstrap/bootstrap | |
10 | ||
11 | clean: | |
12 | rm -f bootstrap/bootstrap peg peg.peg.go |
12 | 12 | * http://piumarta.com/software/peg/ |
13 | 13 | |
14 | 14 | |
15 | # Development | |
16 | ||
17 | To rebuild from scratch: | |
18 | ``` | |
19 | go run build.go | |
20 | ``` | |
21 | ||
22 | For full test: | |
23 | ``` | |
24 | go run build.go test | |
25 | ``` | |
26 | ||
27 | ||
15 | 28 | # Usage |
16 | 29 | |
17 | 30 | ``` |
18 | 31 | -inline |
19 | 32 | Tells the parser generator to inline parser rules. |
20 | 33 | -switch |
34 | Use at your own peril! | |
21 | 35 | Reduces the number of rules that have to be tried for some pegs. |
22 | 36 | If statements are replaced with switch statements. |
23 | 37 | ``` |
25 | 39 | |
26 | 40 | # Syntax |
27 | 41 | |
28 | First declare the package name: | |
42 | First declare the package name and any import(s) required: | |
29 | 43 | ``` |
30 | 44 | package <package name> |
45 | ||
46 | import <import name> | |
31 | 47 | ``` |
32 | 48 | |
33 | 49 | Then declare the parser: |
42 | 58 | <rule name> <- <rule body> |
43 | 59 | ``` |
44 | 60 | |
45 | The first rule should probably end with '!.' to indicate no more input follows: | |
61 | The first rule should probably end with `!.` to indicate no more input follows: | |
46 | 62 | ``` |
47 | 63 | first <- . !. |
48 | 64 | ``` |
49 | 65 | |
50 | '.' means any character matches. For zero or more character matches use: | |
66 | `.` means any character matches. For zero or more character matches, use: | |
51 | 67 | ``` |
52 | 68 | repetition <- .* |
53 | 69 | ``` |
54 | 70 | |
55 | For one or more character matches use: | |
71 | For one or more character matches, use: | |
56 | 72 | ``` |
57 | 73 | oneOrMore <- .+ |
58 | 74 | ``` |
59 | 75 | |
60 | For an optional character match use: | |
76 | For an optional character match, use: | |
61 | 77 | ``` |
62 | 78 | optional <- .? |
63 | 79 | ``` |
64 | 80 | |
65 | If specific characters are to be matched use single quotes: | |
81 | If specific characters are to be matched, use single quotes: | |
66 | 82 | ``` |
67 | 83 | specific <- 'a'* 'bc'+ 'de'? |
68 | 84 | ``` |
69 | 85 | will match the string "aaabcbcde". |
70 | 86 | |
71 | For choosing between different inputs use alternates: | |
87 | For choosing between different inputs, use alternates: | |
72 | 88 | ``` |
73 | 89 | prioritized <- 'a' 'a'* / 'bc'+ / 'de'? |
74 | 90 | ``` |
75 | 91 | will match "aaaa" or "bcbc" or "de" or "". The matches are attempted in order. |
76 | 92 | |
77 | If the characters are case insensitive use double quotes: | |
93 | If the characters are case insensitive, use double quotes: | |
78 | 94 | ``` |
79 | 95 | insensitive <- "abc" |
80 | 96 | ``` |
81 | 97 | will match "abc" or "Abc" or "ABc" etc... |
82 | 98 | |
83 | For matching a set of characters use a character class: | |
99 | For matching a set of characters, use a character class: | |
84 | 100 | ``` |
85 | 101 | class <- [a-z] |
86 | 102 | ``` |
87 | will watch "a" or "b" or all the way to "z". | |
103 | will match "a" or "b" or all the way to "z". | |
88 | 104 | |
89 | For an inverse character class start with a tilde: | |
105 | For an inverse character class, start with a caret: | |
90 | 106 | ``` |
91 | inverse <- [~a-z] | |
107 | inverse <- [^a-z] | |
92 | 108 | ``` |
93 | will match anything but "a" or "b" or all the way to "z" | |
109 | will match anything but "a" or "b" or all the way to "z". | |
94 | 110 | |
95 | If the character class is case insensitive use double brackets: | |
111 | If the character class is case insensitive, use double brackets: | |
96 | 112 | ``` |
97 | 113 | insensitive <- [[A-Z]] |
98 | 114 | ``` |
102 | 118 | grouping <- (rule1 / rule2) rule3 |
103 | 119 | ``` |
104 | 120 | |
105 | For looking ahead for a match (predicate) use: | |
121 | For looking ahead a match (predicate), use: | |
106 | 122 | ``` |
107 | 123 | lookAhead <- &rule1 rule2 |
108 | 124 | ``` |
109 | 125 | |
110 | For inverse look ahead use: | |
126 | For inverse look ahead, use: | |
111 | 127 | ``` |
112 | 128 | inverse <- !rule1 rule2 |
113 | 129 | ``` |
117 | 133 | gocode <- { fmt.Println("hello world") } |
118 | 134 | ``` |
119 | 135 | |
120 | For string captures use less than greater than: | |
136 | For string captures, use less than and greater than: | |
121 | 137 | ``` |
122 | 138 | capture <- <'capture'> { fmt.Println(buffer[begin:end]) } |
123 | 139 | ``` |
124 | Will print out "capture". The captured string is stored in buffer[begin:end]. | |
140 | Will print out "capture". The captured string is stored in `buffer[begin:end]`. | |
125 | 141 | |
126 | 142 | |
127 | 143 | # Files |
128 | 144 | |
129 | 145 | * bootstrap/main.go: bootstrap syntax tree of peg |
130 | * peg.go: syntax tree and code generator | |
131 | * main.go: bootstrap main | |
146 | * tree/peg.go: syntax tree and code generator | |
132 | 147 | * peg.peg: peg in its own language |
133 | ||
134 | ||
135 | # Testing | |
136 | ||
137 | There should be no differences between the bootstrap and self compiled: | |
138 | ||
139 | ``` | |
140 | ./peg -inline -switch peg.peg | |
141 | diff bootstrap.peg.go peg.peg.go | |
142 | ``` | |
143 | 148 | |
144 | 149 | |
145 | 150 | # Author |
7 | 7 | "fmt" |
8 | 8 | "os" |
9 | 9 | "runtime" |
10 | ||
11 | "github.com/pointlander/peg/tree" | |
10 | 12 | ) |
11 | 13 | |
12 | 14 | func main() { |
13 | 15 | runtime.GOMAXPROCS(2) |
14 | t := New(true, true) | |
16 | t := tree.New(true, true, false) | |
15 | 17 | |
16 | 18 | /*package main |
17 | 19 | |
24 | 26 | *Tree |
25 | 27 | }*/ |
26 | 28 | t.AddPackage("main") |
29 | t.AddImport("github.com/pointlander/peg/tree") | |
27 | 30 | t.AddPeg("Peg") |
28 | 31 | t.AddState(` |
29 | *Tree | |
32 | *tree.Tree | |
30 | 33 | `) |
31 | 34 | |
32 | 35 | addDot := t.AddDot |
33 | 36 | addName := t.AddName |
34 | 37 | addCharacter := t.AddCharacter |
35 | addDoubleCharacter := t.AddDoubleCharacter | |
36 | addHexaCharacter := t.AddHexaCharacter | |
37 | 38 | addAction := t.AddAction |
38 | 39 | |
39 | 40 | addRule := func(name string, item func()) { |
84 | 85 | t.AddRange() |
85 | 86 | } |
86 | 87 | |
87 | addDoubleRange := func(begin, end string) { | |
88 | addCharacter(begin) | |
89 | addCharacter(end) | |
90 | t.AddDoubleRange() | |
91 | } | |
92 | ||
93 | 88 | addStar := func(item func()) { |
94 | 89 | item() |
95 | 90 | t.AddStar() |
96 | 91 | } |
97 | 92 | |
98 | addPlus := func(item func()) { | |
99 | item() | |
100 | t.AddPlus() | |
101 | } | |
102 | ||
103 | 93 | addQuery := func(item func()) { |
104 | 94 | item() |
105 | 95 | t.AddQuery() |
120 | 110 | t.AddPeekFor() |
121 | 111 | } |
122 | 112 | |
123 | /* Grammar <- Spacing 'package' MustSpacing Identifier { p.AddPackage(text) } | |
124 | Import* | |
125 | 'type' MustSpacing Identifier { p.AddPeg(text) } | |
126 | 'Peg' Spacing Action { p.AddState(text) } | |
127 | Definition+ EndOfFile */ | |
113 | /* Grammar <- Spacing { hdr; } Action* Definition* !. */ | |
128 | 114 | addRule("Grammar", func() { |
129 | 115 | addSequence( |
130 | 116 | func() { addName("Spacing") }, |
131 | func() { addString("package") }, | |
132 | func() { addName("MustSpacing") }, | |
133 | func() { addName("Identifier") }, | |
134 | func() { addAction(" p.AddPackage(text) ") }, | |
135 | func() { addStar(func() { addName("Import") }) }, | |
136 | func() { addString("type") }, | |
137 | func() { addName("MustSpacing") }, | |
138 | func() { addName("Identifier") }, | |
139 | func() { addAction(" p.AddPeg(text) ") }, | |
140 | func() { addString("Peg") }, | |
141 | func() { addName("Spacing") }, | |
142 | func() { addName("Action") }, | |
143 | func() { addAction(" p.AddState(text) ") }, | |
144 | func() { addPlus(func() { addName("Definition") }) }, | |
145 | func() { addName("EndOfFile") }, | |
146 | ) | |
147 | }) | |
148 | ||
149 | /* Import <- 'import' Spacing ["] < [a-zA-Z_/.\-]+ > ["] Spacing { p.AddImport(text) } */ | |
150 | addRule("Import", func() { | |
151 | addSequence( | |
152 | func() { addString("import") }, | |
153 | func() { addName("Spacing") }, | |
154 | func() { addCharacter(`"`) }, | |
155 | func() { | |
156 | addPush(func() { | |
157 | addPlus(func() { | |
158 | addAlternate( | |
159 | func() { addRange(`a`, `z`) }, | |
160 | func() { addRange(`A`, `Z`) }, | |
161 | func() { addCharacter(`_`) }, | |
162 | func() { addCharacter(`/`) }, | |
163 | func() { addCharacter(`.`) }, | |
164 | func() { addCharacter(`-`) }, | |
165 | ) | |
166 | }) | |
167 | }) | |
168 | }, | |
169 | func() { addCharacter(`"`) }, | |
170 | func() { addName("Spacing") }, | |
171 | func() { addAction(" p.AddImport(text) ") }, | |
117 | func() { addAction(`p.AddPackage("main")`) }, | |
118 | func() { addAction(`p.AddImport("github.com/pointlander/peg/tree")`) }, | |
119 | func() { addAction(`p.AddPeg("Peg")`) }, | |
120 | func() { addAction(`p.AddState("*tree.Tree")`) }, | |
121 | func() { addStar(func() { addName("Action") }) }, | |
122 | func() { addStar(func() { addName("Definition") }) }, | |
123 | func() { addPeekNot(func() { addDot() }) }, | |
172 | 124 | ) |
173 | 125 | }) |
174 | 126 | |
197 | 149 | ) |
198 | 150 | }) |
199 | 151 | |
200 | /* Expression <- Sequence (Slash Sequence { p.AddAlternate() } | |
201 | )* (Slash { p.AddNil(); p.AddAlternate() } | |
202 | )? | |
203 | / { p.AddNil() } */ | |
152 | /* Expression <- Sequence (Slash Sequence { p.AddAlternate() })* */ | |
204 | 153 | addRule("Expression", func() { |
205 | addAlternate( | |
206 | func() { | |
207 | addSequence( | |
208 | func() { addName("Sequence") }, | |
209 | func() { | |
210 | addStar(func() { | |
211 | addSequence( | |
212 | func() { addName("Slash") }, | |
213 | func() { addName("Sequence") }, | |
214 | func() { addAction(" p.AddAlternate() ") }, | |
215 | ) | |
216 | }) | |
217 | }, | |
218 | func() { | |
219 | addQuery(func() { | |
220 | addSequence( | |
221 | func() { addName("Slash") }, | |
222 | func() { addAction(" p.AddNil(); p.AddAlternate() ") }, | |
223 | ) | |
224 | }) | |
225 | }, | |
226 | ) | |
227 | }, | |
228 | func() { addAction(" p.AddNil() ") }, | |
229 | ) | |
230 | }) | |
231 | ||
232 | /* Sequence <- Prefix (Prefix { p.AddSequence() } | |
233 | )* */ | |
154 | addSequence( | |
155 | func() { addName("Sequence") }, | |
156 | func() { | |
157 | addStar(func() { | |
158 | addSequence( | |
159 | func() { addName("Slash") }, | |
160 | func() { addName("Sequence") }, | |
161 | func() { addAction(" p.AddAlternate() ") }, | |
162 | ) | |
163 | }) | |
164 | }, | |
165 | ) | |
166 | }) | |
167 | ||
168 | /* Sequence <- Prefix (Prefix { p.AddSequence() } )* */ | |
234 | 169 | addRule("Sequence", func() { |
235 | 170 | addSequence( |
236 | 171 | func() { addName("Prefix") }, |
245 | 180 | ) |
246 | 181 | }) |
247 | 182 | |
248 | /* Prefix <- And Action { p.AddPredicate(text) } | |
249 | / Not Action { p.AddStateChange(text) } | |
250 | / And Suffix { p.AddPeekFor() } | |
251 | / Not Suffix { p.AddPeekNot() } | |
252 | / Suffix */ | |
183 | /* Prefix <- '!' Suffix { p.AddPeekNot() } / Suffix */ | |
253 | 184 | addRule("Prefix", func() { |
254 | 185 | addAlternate( |
255 | 186 | func() { |
256 | 187 | addSequence( |
257 | func() { addName("And") }, | |
258 | func() { addName("Action") }, | |
259 | func() { addAction(" p.AddPredicate(text) ") }, | |
260 | ) | |
261 | }, | |
262 | func() { | |
263 | addSequence( | |
264 | func() { addName("Not") }, | |
265 | func() { addName("Action") }, | |
266 | func() { addAction(" p.AddStateChange(text) ") }, | |
267 | ) | |
268 | }, | |
269 | func() { | |
270 | addSequence( | |
271 | func() { addName("And") }, | |
272 | func() { addName("Suffix") }, | |
273 | func() { addAction(" p.AddPeekFor() ") }, | |
274 | ) | |
275 | }, | |
276 | func() { | |
277 | addSequence( | |
278 | func() { addName("Not") }, | |
188 | func() { addCharacter(`!`) }, | |
279 | 189 | func() { addName("Suffix") }, |
280 | 190 | func() { addAction(" p.AddPeekNot() ") }, |
281 | 191 | ) |
284 | 194 | ) |
285 | 195 | }) |
286 | 196 | |
287 | /* Suffix <- Primary (Question { p.AddQuery() } | |
288 | / Star { p.AddStar() } | |
289 | / Plus { p.AddPlus() } | |
290 | )? */ | |
197 | /* Suffix <- Primary ( Question { p.AddQuery() } | |
198 | / Star { p.AddStar() } | |
199 | )? */ | |
291 | 200 | addRule("Suffix", func() { |
292 | 201 | addSequence( |
293 | 202 | func() { addName("Primary") }, |
306 | 215 | func() { addAction(" p.AddStar() ") }, |
307 | 216 | ) |
308 | 217 | }, |
309 | func() { | |
310 | addSequence( | |
311 | func() { addName("Plus") }, | |
312 | func() { addAction(" p.AddPlus() ") }, | |
313 | ) | |
314 | }, | |
315 | 218 | ) |
316 | 219 | }) |
317 | 220 | }, |
366 | 269 | ) |
367 | 270 | }) |
368 | 271 | |
369 | /* Identifier <- < IdentStart IdentCont* > Spacing */ | |
272 | /* Identifier <- < Ident Ident* > Spacing */ | |
370 | 273 | addRule("Identifier", func() { |
371 | 274 | addSequence( |
372 | 275 | func() { |
373 | 276 | addPush(func() { |
374 | 277 | addSequence( |
375 | func() { addName("IdentStart") }, | |
376 | func() { addStar(func() { addName("IdentCont") }) }, | |
377 | ) | |
378 | }) | |
379 | }, | |
380 | func() { addName("Spacing") }, | |
381 | ) | |
382 | }) | |
383 | ||
384 | /* IdentStart <- [[a-z_]] */ | |
385 | addRule("IdentStart", func() { | |
386 | addAlternate( | |
387 | func() { addDoubleRange(`a`, `z`) }, | |
388 | func() { addCharacter(`_`) }, | |
389 | ) | |
390 | }) | |
391 | ||
392 | /* IdentCont <- IdentStart / [0-9] */ | |
393 | addRule("IdentCont", func() { | |
394 | addAlternate( | |
395 | func() { addName("IdentStart") }, | |
396 | func() { addRange(`0`, `9`) }, | |
397 | ) | |
398 | }) | |
399 | ||
400 | /* Literal <- ['] (!['] Char)? (!['] Char { p.AddSequence() } | |
401 | )* ['] Spacing | |
402 | / ["] (!["] DoubleChar)? (!["] DoubleChar { p.AddSequence() } | |
403 | )* ["] Spacing */ | |
278 | func() { addName("Ident") }, | |
279 | func() { addStar(func() { addName("Ident") }) }, | |
280 | ) | |
281 | }) | |
282 | }, | |
283 | func() { addName("Spacing") }, | |
284 | ) | |
285 | }) | |
286 | ||
287 | /* Ident <- [A-Za-z] */ | |
288 | addRule("Ident", func() { | |
289 | addAlternate( | |
290 | func() { addRange(`A`, `Z`) }, | |
291 | func() { addRange(`a`, `z`) }, | |
292 | ) | |
293 | }) | |
294 | ||
295 | /* Literal <- ['] !['] Char (!['] Char { p.AddSequence() } )* ['] Spacing */ | |
404 | 296 | addRule("Literal", func() { |
405 | addAlternate( | |
406 | func() { | |
407 | addSequence( | |
408 | func() { addCharacter(`'`) }, | |
409 | func() { | |
410 | addQuery(func() { | |
411 | addSequence( | |
412 | func() { addPeekNot(func() { addCharacter(`'`) }) }, | |
413 | func() { addName("Char") }, | |
414 | ) | |
415 | }) | |
416 | }, | |
417 | func() { | |
418 | addStar(func() { | |
419 | addSequence( | |
420 | func() { addPeekNot(func() { addCharacter(`'`) }) }, | |
421 | func() { addName("Char") }, | |
422 | func() { addAction(` p.AddSequence() `) }, | |
423 | ) | |
424 | }) | |
425 | }, | |
426 | func() { addCharacter(`'`) }, | |
427 | func() { addName("Spacing") }, | |
428 | ) | |
429 | }, | |
430 | func() { | |
431 | addSequence( | |
432 | func() { addCharacter(`"`) }, | |
433 | func() { | |
434 | addQuery(func() { | |
435 | addSequence( | |
436 | func() { addPeekNot(func() { addCharacter(`"`) }) }, | |
437 | func() { addName("DoubleChar") }, | |
438 | ) | |
439 | }) | |
440 | }, | |
441 | func() { | |
442 | addStar(func() { | |
443 | addSequence( | |
444 | func() { addPeekNot(func() { addCharacter(`"`) }) }, | |
445 | func() { addName("DoubleChar") }, | |
446 | func() { addAction(` p.AddSequence() `) }, | |
447 | ) | |
448 | }) | |
449 | }, | |
450 | func() { addCharacter(`"`) }, | |
451 | func() { addName("Spacing") }, | |
452 | ) | |
453 | }, | |
454 | ) | |
455 | }) | |
456 | ||
457 | /* Class <- ( '[[' ( '^' DoubleRanges { p.AddPeekNot(); p.AddDot(); p.AddSequence() } | |
458 | / DoubleRanges )? | |
459 | ']]' | |
460 | / '[' ( '^' Ranges { p.AddPeekNot(); p.AddDot(); p.AddSequence() } | |
461 | / Ranges )? | |
462 | ']' ) | |
463 | Spacing */ | |
297 | addSequence( | |
298 | func() { addCharacter(`'`) }, | |
299 | func() { | |
300 | addSequence( | |
301 | func() { addPeekNot(func() { addCharacter(`'`) }) }, | |
302 | func() { addName("Char") }, | |
303 | ) | |
304 | }, | |
305 | func() { | |
306 | addStar(func() { | |
307 | addSequence( | |
308 | func() { addPeekNot(func() { addCharacter(`'`) }) }, | |
309 | func() { addName("Char") }, | |
310 | func() { addAction(` p.AddSequence() `) }, | |
311 | ) | |
312 | }) | |
313 | }, | |
314 | func() { addCharacter(`'`) }, | |
315 | func() { addName("Spacing") }, | |
316 | ) | |
317 | }) | |
318 | ||
319 | /* Class <- '[' Range (!']' Range { p.AddAlternate() })* ']' Spacing */ | |
464 | 320 | addRule("Class", func() { |
465 | 321 | addSequence( |
466 | func() { | |
467 | addAlternate( | |
468 | func() { | |
469 | addSequence( | |
470 | func() { addString(`[[`) }, | |
471 | func() { | |
472 | addQuery(func() { | |
473 | addAlternate( | |
474 | func() { | |
475 | addSequence( | |
476 | func() { addCharacter(`^`) }, | |
477 | func() { addName("DoubleRanges") }, | |
478 | func() { addAction(` p.AddPeekNot(); p.AddDot(); p.AddSequence() `) }, | |
479 | ) | |
480 | }, | |
481 | func() { addName("DoubleRanges") }, | |
482 | ) | |
483 | }) | |
484 | }, | |
485 | func() { addString(`]]`) }, | |
486 | ) | |
487 | }, | |
488 | func() { | |
489 | addSequence( | |
490 | func() { addCharacter(`[`) }, | |
491 | func() { | |
492 | addQuery(func() { | |
493 | addAlternate( | |
494 | func() { | |
495 | addSequence( | |
496 | func() { addCharacter(`^`) }, | |
497 | func() { addName("Ranges") }, | |
498 | func() { addAction(` p.AddPeekNot(); p.AddDot(); p.AddSequence() `) }, | |
499 | ) | |
500 | }, | |
501 | func() { addName("Ranges") }, | |
502 | ) | |
503 | }) | |
504 | }, | |
505 | func() { addCharacter(`]`) }, | |
506 | ) | |
507 | }, | |
508 | ) | |
509 | }, | |
510 | func() { addName("Spacing") }, | |
511 | ) | |
512 | }) | |
513 | ||
514 | /* Ranges <- !']' Range (!']' Range { p.AddAlternate() } | |
515 | )* */ | |
516 | addRule("Ranges", func() { | |
517 | addSequence( | |
518 | func() { addPeekNot(func() { addCharacter(`]`) }) }, | |
322 | func() { addCharacter(`[`) }, | |
519 | 323 | func() { addName("Range") }, |
520 | 324 | func() { |
521 | 325 | addStar(func() { |
526 | 330 | ) |
527 | 331 | }) |
528 | 332 | }, |
529 | ) | |
530 | }) | |
531 | ||
532 | /* DoubleRanges <- !']]' DoubleRange (!']]' DoubleRange { p.AddAlternate() } | |
533 | )* */ | |
534 | addRule("DoubleRanges", func() { | |
535 | addSequence( | |
536 | func() { addPeekNot(func() { addString(`]]`) }) }, | |
537 | func() { addName("DoubleRange") }, | |
538 | func() { | |
539 | addStar(func() { | |
540 | addSequence( | |
541 | func() { addPeekNot(func() { addString(`]]`) }) }, | |
542 | func() { addName("DoubleRange") }, | |
543 | func() { addAction(" p.AddAlternate() ") }, | |
544 | ) | |
545 | }) | |
546 | }, | |
333 | func() { addCharacter(`]`) }, | |
334 | func() { addName("Spacing") }, | |
547 | 335 | ) |
548 | 336 | }) |
549 | 337 | |
563 | 351 | ) |
564 | 352 | }) |
565 | 353 | |
566 | /* DoubleRange <- Char '-' Char { p.AddDoubleRange() } | |
567 | / DoubleChar */ | |
568 | addRule("DoubleRange", func() { | |
569 | addAlternate( | |
570 | func() { | |
571 | addSequence( | |
572 | func() { addName("Char") }, | |
573 | func() { addCharacter(`-`) }, | |
574 | func() { addName("Char") }, | |
575 | func() { addAction(" p.AddDoubleRange() ") }, | |
576 | ) | |
577 | }, | |
578 | func() { addName("DoubleChar") }, | |
579 | ) | |
580 | }) | |
581 | ||
582 | /* Char <- Escape | |
583 | / !'\\' <.> { p.AddCharacter(text) } */ | |
354 | /* Char <- Escape | |
355 | / '\\' "0x"<[0-9a-f]*> { p.AddHexaCharacter(text) } | |
356 | / '\\\\' { p.AddCharacter("\\") } | |
357 | / !'\\' <.> { p.AddCharacter(text) } */ | |
584 | 358 | addRule("Char", func() { |
585 | 359 | addAlternate( |
586 | func() { addName("Escape") }, | |
587 | func() { | |
588 | addSequence( | |
589 | func() { addPeekNot(func() { addCharacter("\\") }) }, | |
590 | func() { addPush(func() { addDot() }) }, | |
591 | func() { addAction(` p.AddCharacter(text) `) }, | |
592 | ) | |
593 | }, | |
594 | ) | |
595 | }) | |
596 | ||
597 | /* DoubleChar <- Escape | |
598 | / <[a-zA-Z]> { p.AddDoubleCharacter(text) } | |
599 | / !'\\' <.> { p.AddCharacter(text) } */ | |
600 | addRule("DoubleChar", func() { | |
601 | addAlternate( | |
602 | func() { addName("Escape") }, | |
603 | func() { | |
604 | addSequence( | |
360 | func() { | |
361 | addSequence( | |
362 | func() { addCharacter("\\") }, | |
363 | func() { addCharacter(`0`) }, | |
364 | func() { addCharacter(`x`) }, | |
605 | 365 | func() { |
606 | 366 | addPush(func() { |
607 | addAlternate( | |
608 | func() { addRange(`a`, `z`) }, | |
609 | func() { addRange(`A`, `Z`) }, | |
610 | ) | |
611 | }) | |
612 | }, | |
613 | func() { addAction(` p.AddDoubleCharacter(text) `) }, | |
614 | ) | |
615 | }, | |
616 | func() { | |
617 | addSequence( | |
618 | func() { addPeekNot(func() { addCharacter("\\") }) }, | |
619 | func() { addPush(func() { addDot() }) }, | |
620 | func() { addAction(` p.AddCharacter(text) `) }, | |
621 | ) | |
622 | }, | |
623 | ) | |
624 | }) | |
625 | ||
626 | /* Escape <- "\\a" { p.AddCharacter("\a") } # bell | |
627 | / "\\b" { p.AddCharacter("\b") } # bs | |
628 | / "\\e" { p.AddCharacter("\x1B") } # esc | |
629 | / "\\f" { p.AddCharacter("\f") } # ff | |
630 | / "\\n" { p.AddCharacter("\n") } # nl | |
631 | / "\\r" { p.AddCharacter("\r") } # cr | |
632 | / "\\t" { p.AddCharacter("\t") } # ht | |
633 | / "\\v" { p.AddCharacter("\v") } # vt | |
634 | / "\\'" { p.AddCharacter("'") } | |
635 | / '\\"' { p.AddCharacter("\"") } | |
636 | / '\\[' { p.AddCharacter("[") } | |
637 | / '\\]' { p.AddCharacter("]") } | |
638 | / '\\-' { p.AddCharacter("-") } | |
639 | / '\\' "0x"<[0-9a-fA-F]+> { p.AddHexaCharacter(text) } | |
640 | / '\\' <[0-3][0-7][0-7]> { p.AddOctalCharacter(text) } | |
641 | / '\\' <[0-7][0-7]?> { p.AddOctalCharacter(text) } | |
642 | / '\\\\' { p.AddCharacter("\\") } */ | |
643 | addRule("Escape", func() { | |
644 | addAlternate( | |
645 | func() { | |
646 | addSequence( | |
647 | func() { addCharacter("\\") }, | |
648 | func() { addDoubleCharacter(`a`) }, | |
649 | func() { addAction(` p.AddCharacter("\a") `) }, | |
650 | ) | |
651 | }, | |
652 | func() { | |
653 | addSequence( | |
654 | func() { addCharacter("\\") }, | |
655 | func() { addDoubleCharacter(`b`) }, | |
656 | func() { addAction(` p.AddCharacter("\b") `) }, | |
657 | ) | |
658 | }, | |
659 | func() { | |
660 | addSequence( | |
661 | func() { addCharacter("\\") }, | |
662 | func() { addDoubleCharacter(`e`) }, | |
663 | func() { addAction(` p.AddCharacter("\x1B") `) }, | |
664 | ) | |
665 | }, | |
666 | func() { | |
667 | addSequence( | |
668 | func() { addCharacter("\\") }, | |
669 | func() { addDoubleCharacter(`f`) }, | |
670 | func() { addAction(` p.AddCharacter("\f") `) }, | |
671 | ) | |
672 | }, | |
673 | func() { | |
674 | addSequence( | |
675 | func() { addCharacter("\\") }, | |
676 | func() { addDoubleCharacter(`n`) }, | |
677 | func() { addAction(` p.AddCharacter("\n") `) }, | |
678 | ) | |
679 | }, | |
680 | func() { | |
681 | addSequence( | |
682 | func() { addCharacter("\\") }, | |
683 | func() { addDoubleCharacter(`r`) }, | |
684 | func() { addAction(` p.AddCharacter("\r") `) }, | |
685 | ) | |
686 | }, | |
687 | func() { | |
688 | addSequence( | |
689 | func() { addCharacter("\\") }, | |
690 | func() { addDoubleCharacter(`t`) }, | |
691 | func() { addAction(` p.AddCharacter("\t") `) }, | |
692 | ) | |
693 | }, | |
694 | func() { | |
695 | addSequence( | |
696 | func() { addCharacter("\\") }, | |
697 | func() { addDoubleCharacter(`v`) }, | |
698 | func() { addAction(` p.AddCharacter("\v") `) }, | |
699 | ) | |
700 | }, | |
701 | func() { | |
702 | addSequence( | |
703 | func() { addCharacter("\\") }, | |
704 | func() { addCharacter(`'`) }, | |
705 | func() { addAction(` p.AddCharacter("'") `) }, | |
706 | ) | |
707 | }, | |
708 | func() { | |
709 | addSequence( | |
710 | func() { addCharacter("\\") }, | |
711 | func() { addCharacter(`"`) }, | |
712 | func() { addAction(` p.AddCharacter("\"") `) }, | |
713 | ) | |
714 | }, | |
715 | func() { | |
716 | addSequence( | |
717 | func() { addCharacter("\\") }, | |
718 | func() { addCharacter(`[`) }, | |
719 | func() { addAction(` p.AddCharacter("[") `) }, | |
720 | ) | |
721 | }, | |
722 | func() { | |
723 | addSequence( | |
724 | func() { addCharacter("\\") }, | |
725 | func() { addCharacter(`]`) }, | |
726 | func() { addAction(` p.AddCharacter("]") `) }, | |
727 | ) | |
728 | }, | |
729 | func() { | |
730 | addSequence( | |
731 | func() { addCharacter("\\") }, | |
732 | func() { addCharacter(`-`) }, | |
733 | func() { addAction(` p.AddCharacter("-") `) }, | |
734 | ) | |
735 | }, | |
736 | func() { | |
737 | addSequence( | |
738 | func() { addCharacter("\\") }, | |
739 | func() { | |
740 | addSequence( | |
741 | func() { addCharacter(`0`) }, | |
742 | func() { addDoubleCharacter(`x`) }, | |
743 | ) | |
744 | }, | |
745 | func() { | |
746 | addPush(func() { | |
747 | addPlus(func() { | |
367 | addStar(func() { | |
748 | 368 | addAlternate( |
749 | 369 | func() { addRange(`0`, `9`) }, |
750 | 370 | func() { addRange(`a`, `f`) }, |
751 | func() { addRange(`A`, `F`) }, | |
752 | 371 | ) |
753 | 372 | }) |
754 | 373 | }) |
759 | 378 | func() { |
760 | 379 | addSequence( |
761 | 380 | func() { addCharacter("\\") }, |
762 | func() { | |
763 | addPush(func() { | |
764 | addSequence( | |
765 | func() { addRange(`0`, `3`) }, | |
766 | func() { addRange(`0`, `7`) }, | |
767 | func() { addRange(`0`, `7`) }, | |
768 | ) | |
769 | }) | |
770 | }, | |
771 | func() { addAction(` p.AddOctalCharacter(text) `) }, | |
772 | ) | |
773 | }, | |
774 | func() { | |
775 | addSequence( | |
776 | func() { addCharacter("\\") }, | |
777 | func() { | |
778 | addPush(func() { | |
779 | addSequence( | |
780 | func() { addRange(`0`, `7`) }, | |
781 | func() { addQuery(func() { addRange(`0`, `7`) }) }, | |
782 | ) | |
783 | }) | |
784 | }, | |
785 | func() { addAction(` p.AddOctalCharacter(text) `) }, | |
786 | ) | |
787 | }, | |
788 | func() { | |
789 | addSequence( | |
790 | func() { addCharacter("\\") }, | |
791 | 381 | func() { addCharacter("\\") }, |
792 | 382 | func() { addAction(` p.AddCharacter("\\") `) }, |
793 | 383 | ) |
794 | 384 | }, |
795 | ) | |
796 | }) | |
797 | ||
798 | /* LeftArrow <- ('<-' / '\0x2190') Spacing */ | |
385 | func() { | |
386 | addSequence( | |
387 | func() { addPeekNot(func() { addCharacter("\\") }) }, | |
388 | func() { addPush(func() { addDot() }) }, | |
389 | func() { addAction(` p.AddCharacter(text) `) }, | |
390 | ) | |
391 | }, | |
392 | ) | |
393 | }) | |
394 | /* LeftArrow <- '<-' Spacing */ | |
799 | 395 | addRule("LeftArrow", func() { |
800 | 396 | addSequence( |
801 | func() { | |
802 | addAlternate( | |
803 | func() { addString(`<-`) }, | |
804 | func() { addHexaCharacter("2190") }, | |
805 | ) | |
806 | }, | |
397 | func() { addString(`<-`) }, | |
807 | 398 | func() { addName("Spacing") }, |
808 | 399 | ) |
809 | 400 | }) |
816 | 407 | ) |
817 | 408 | }) |
818 | 409 | |
819 | /* And <- '&' Spacing */ | |
820 | addRule("And", func() { | |
821 | addSequence( | |
822 | func() { addCharacter(`&`) }, | |
823 | func() { addName("Spacing") }, | |
824 | ) | |
825 | }) | |
826 | ||
827 | /* Not <- '!' Spacing */ | |
828 | addRule("Not", func() { | |
829 | addSequence( | |
830 | func() { addCharacter(`!`) }, | |
831 | func() { addName("Spacing") }, | |
832 | ) | |
833 | }) | |
834 | ||
835 | 410 | /* Question <- '?' Spacing */ |
836 | 411 | addRule("Question", func() { |
837 | 412 | addSequence( |
848 | 423 | ) |
849 | 424 | }) |
850 | 425 | |
851 | /* Plus <- '+' Spacing */ | |
852 | addRule("Plus", func() { | |
853 | addSequence( | |
854 | func() { addCharacter(`+`) }, | |
855 | func() { addName("Spacing") }, | |
856 | ) | |
857 | }) | |
858 | ||
859 | 426 | /* Open <- '(' Spacing */ |
860 | 427 | addRule("Open", func() { |
861 | 428 | addSequence( |
880 | 447 | ) |
881 | 448 | }) |
882 | 449 | |
883 | /* SpaceComment <- (Space / Comment) */ | |
884 | addRule("SpaceComment", func() { | |
885 | addAlternate( | |
886 | func() { addName("Space") }, | |
887 | func() { addName("Comment") }, | |
888 | ) | |
889 | }) | |
890 | ||
891 | /* Spacing <- SpaceComment* */ | |
892 | 450 | addRule("Spacing", func() { |
893 | addStar(func() { addName("SpaceComment") }) | |
894 | }) | |
895 | ||
896 | /* MustSpacing <- SpaceComment+ */ | |
897 | addRule("MustSpacing", func() { | |
898 | addPlus(func() { t.AddName("SpaceComment") }) | |
899 | }) | |
900 | ||
901 | /* Comment <- '#' (!EndOfLine .)* EndOfLine */ | |
451 | addStar(func() { | |
452 | addAlternate( | |
453 | func() { addName("Space") }, | |
454 | func() { addName("Comment") }, | |
455 | ) | |
456 | }) | |
457 | }) | |
458 | ||
459 | /* Comment <- '#' (!EndOfLine .)* */ | |
902 | 460 | addRule("Comment", func() { |
903 | 461 | addSequence( |
904 | 462 | func() { addCharacter(`#`) }, |
910 | 468 | ) |
911 | 469 | }) |
912 | 470 | }, |
913 | func() { addName("EndOfLine") }, | |
914 | 471 | ) |
915 | 472 | }) |
916 | 473 | |
932 | 489 | ) |
933 | 490 | }) |
934 | 491 | |
935 | /* EndOfFile <- !. */ | |
936 | addRule("EndOfFile", func() { | |
937 | addPeekNot(func() { addDot() }) | |
938 | }) | |
939 | ||
940 | /* Action <- '{' < ActionBody* > '}' Spacing */ | |
492 | /* Action <- '{' < (![}].)* > '}' Spacing */ | |
941 | 493 | addRule("Action", func() { |
942 | 494 | addSequence( |
943 | 495 | func() { addCharacter(`{`) }, |
944 | 496 | func() { |
945 | 497 | addPush(func() { |
946 | addStar(func() { addName("ActionBody") }) | |
498 | addStar(func() { | |
499 | addSequence( | |
500 | func() { | |
501 | addPeekNot(func() { | |
502 | addCharacter(`}`) | |
503 | }) | |
504 | }, | |
505 | func() { addDot() }, | |
506 | ) | |
507 | }) | |
947 | 508 | }) |
948 | 509 | }, |
949 | 510 | func() { addCharacter(`}`) }, |
950 | 511 | func() { addName("Spacing") }, |
951 | ) | |
952 | }) | |
953 | ||
954 | /* ActionBody <- [^{}] / '{' ActionBody* '}' */ | |
955 | addRule("ActionBody", func() { | |
956 | addAlternate( | |
957 | func() { | |
958 | addSequence( | |
959 | func() { | |
960 | addPeekNot(func() { | |
961 | addAlternate( | |
962 | func() { addCharacter(`{`) }, | |
963 | func() { addCharacter(`}`) }, | |
964 | ) | |
965 | }) | |
966 | }, | |
967 | func() { addDot() }, | |
968 | ) | |
969 | }, | |
970 | func() { | |
971 | addSequence( | |
972 | func() { addCharacter(`{`) }, | |
973 | func() { addStar(func() { addName("ActionBody") }) }, | |
974 | func() { addCharacter(`}`) }, | |
975 | ) | |
976 | }, | |
977 | 512 | ) |
978 | 513 | }) |
979 | 514 | |
994 | 529 | }) |
995 | 530 | |
996 | 531 | filename := "bootstrap.peg.go" |
997 | out, error := os.OpenFile(filename, os.O_RDWR|os.O_CREATE|os.O_TRUNC, 0644) | |
998 | if error != nil { | |
999 | fmt.Printf("%v: %v\n", filename, error) | |
532 | out, err := os.OpenFile(filename, os.O_RDWR|os.O_CREATE|os.O_TRUNC, 0644) | |
533 | if err != nil { | |
534 | fmt.Printf("%v: %v\n", filename, err) | |
1000 | 535 | return |
1001 | 536 | } |
1002 | 537 | defer out.Close() |
1003 | t.Compile(filename, out) | |
538 | t.Compile(filename, os.Args, out) | |
1004 | 539 | } |
0 | package main | |
1 | ||
2 | import ( | |
3 | "fmt" | |
4 | "math" | |
5 | "sort" | |
6 | "strconv" | |
7 | ) | |
8 | ||
9 | const endSymbol rune = 1114112 | |
10 | ||
11 | /* The rule types inferred from the grammar are below. */ | |
12 | type pegRule uint8 | |
13 | ||
14 | const ( | |
15 | ruleUnknown pegRule = iota | |
16 | ruleGrammar | |
17 | ruleImport | |
18 | ruleDefinition | |
19 | ruleExpression | |
20 | ruleSequence | |
21 | rulePrefix | |
22 | ruleSuffix | |
23 | rulePrimary | |
24 | ruleIdentifier | |
25 | ruleIdentStart | |
26 | ruleIdentCont | |
27 | ruleLiteral | |
28 | ruleClass | |
29 | ruleRanges | |
30 | ruleDoubleRanges | |
31 | ruleRange | |
32 | ruleDoubleRange | |
33 | ruleChar | |
34 | ruleDoubleChar | |
35 | ruleEscape | |
36 | ruleLeftArrow | |
37 | ruleSlash | |
38 | ruleAnd | |
39 | ruleNot | |
40 | ruleQuestion | |
41 | ruleStar | |
42 | rulePlus | |
43 | ruleOpen | |
44 | ruleClose | |
45 | ruleDot | |
46 | ruleSpaceComment | |
47 | ruleSpacing | |
48 | ruleMustSpacing | |
49 | ruleComment | |
50 | ruleSpace | |
51 | ruleEndOfLine | |
52 | ruleEndOfFile | |
53 | ruleAction | |
54 | ruleActionBody | |
55 | ruleBegin | |
56 | ruleEnd | |
57 | ruleAction0 | |
58 | ruleAction1 | |
59 | ruleAction2 | |
60 | rulePegText | |
61 | ruleAction3 | |
62 | ruleAction4 | |
63 | ruleAction5 | |
64 | ruleAction6 | |
65 | ruleAction7 | |
66 | ruleAction8 | |
67 | ruleAction9 | |
68 | ruleAction10 | |
69 | ruleAction11 | |
70 | ruleAction12 | |
71 | ruleAction13 | |
72 | ruleAction14 | |
73 | ruleAction15 | |
74 | ruleAction16 | |
75 | ruleAction17 | |
76 | ruleAction18 | |
77 | ruleAction19 | |
78 | ruleAction20 | |
79 | ruleAction21 | |
80 | ruleAction22 | |
81 | ruleAction23 | |
82 | ruleAction24 | |
83 | ruleAction25 | |
84 | ruleAction26 | |
85 | ruleAction27 | |
86 | ruleAction28 | |
87 | ruleAction29 | |
88 | ruleAction30 | |
89 | ruleAction31 | |
90 | ruleAction32 | |
91 | ruleAction33 | |
92 | ruleAction34 | |
93 | ruleAction35 | |
94 | ruleAction36 | |
95 | ruleAction37 | |
96 | ruleAction38 | |
97 | ruleAction39 | |
98 | ruleAction40 | |
99 | ruleAction41 | |
100 | ruleAction42 | |
101 | ruleAction43 | |
102 | ruleAction44 | |
103 | ruleAction45 | |
104 | ruleAction46 | |
105 | ruleAction47 | |
106 | ruleAction48 | |
107 | ||
108 | rulePre | |
109 | ruleIn | |
110 | ruleSuf | |
111 | ) | |
112 | ||
113 | var rul3s = [...]string{ | |
114 | "Unknown", | |
115 | "Grammar", | |
116 | "Import", | |
117 | "Definition", | |
118 | "Expression", | |
119 | "Sequence", | |
120 | "Prefix", | |
121 | "Suffix", | |
122 | "Primary", | |
123 | "Identifier", | |
124 | "IdentStart", | |
125 | "IdentCont", | |
126 | "Literal", | |
127 | "Class", | |
128 | "Ranges", | |
129 | "DoubleRanges", | |
130 | "Range", | |
131 | "DoubleRange", | |
132 | "Char", | |
133 | "DoubleChar", | |
134 | "Escape", | |
135 | "LeftArrow", | |
136 | "Slash", | |
137 | "And", | |
138 | "Not", | |
139 | "Question", | |
140 | "Star", | |
141 | "Plus", | |
142 | "Open", | |
143 | "Close", | |
144 | "Dot", | |
145 | "SpaceComment", | |
146 | "Spacing", | |
147 | "MustSpacing", | |
148 | "Comment", | |
149 | "Space", | |
150 | "EndOfLine", | |
151 | "EndOfFile", | |
152 | "Action", | |
153 | "ActionBody", | |
154 | "Begin", | |
155 | "End", | |
156 | "Action0", | |
157 | "Action1", | |
158 | "Action2", | |
159 | "PegText", | |
160 | "Action3", | |
161 | "Action4", | |
162 | "Action5", | |
163 | "Action6", | |
164 | "Action7", | |
165 | "Action8", | |
166 | "Action9", | |
167 | "Action10", | |
168 | "Action11", | |
169 | "Action12", | |
170 | "Action13", | |
171 | "Action14", | |
172 | "Action15", | |
173 | "Action16", | |
174 | "Action17", | |
175 | "Action18", | |
176 | "Action19", | |
177 | "Action20", | |
178 | "Action21", | |
179 | "Action22", | |
180 | "Action23", | |
181 | "Action24", | |
182 | "Action25", | |
183 | "Action26", | |
184 | "Action27", | |
185 | "Action28", | |
186 | "Action29", | |
187 | "Action30", | |
188 | "Action31", | |
189 | "Action32", | |
190 | "Action33", | |
191 | "Action34", | |
192 | "Action35", | |
193 | "Action36", | |
194 | "Action37", | |
195 | "Action38", | |
196 | "Action39", | |
197 | "Action40", | |
198 | "Action41", | |
199 | "Action42", | |
200 | "Action43", | |
201 | "Action44", | |
202 | "Action45", | |
203 | "Action46", | |
204 | "Action47", | |
205 | "Action48", | |
206 | ||
207 | "Pre_", | |
208 | "_In_", | |
209 | "_Suf", | |
210 | } | |
211 | ||
212 | type node32 struct { | |
213 | token32 | |
214 | up, next *node32 | |
215 | } | |
216 | ||
217 | func (node *node32) print(depth int, buffer string) { | |
218 | for node != nil { | |
219 | for c := 0; c < depth; c++ { | |
220 | fmt.Printf(" ") | |
221 | } | |
222 | fmt.Printf("\x1B[34m%v\x1B[m %v\n", rul3s[node.pegRule], strconv.Quote(string(([]rune(buffer)[node.begin:node.end])))) | |
223 | if node.up != nil { | |
224 | node.up.print(depth+1, buffer) | |
225 | } | |
226 | node = node.next | |
227 | } | |
228 | } | |
229 | ||
230 | func (node *node32) Print(buffer string) { | |
231 | node.print(0, buffer) | |
232 | } | |
233 | ||
234 | type element struct { | |
235 | node *node32 | |
236 | down *element | |
237 | } | |
238 | ||
239 | /* ${@} bit structure for abstract syntax tree */ | |
240 | type token32 struct { | |
241 | pegRule | |
242 | begin, end, next uint32 | |
243 | } | |
244 | ||
245 | func (t *token32) isZero() bool { | |
246 | return t.pegRule == ruleUnknown && t.begin == 0 && t.end == 0 && t.next == 0 | |
247 | } | |
248 | ||
249 | func (t *token32) isParentOf(u token32) bool { | |
250 | return t.begin <= u.begin && t.end >= u.end && t.next > u.next | |
251 | } | |
252 | ||
253 | func (t *token32) getToken32() token32 { | |
254 | return token32{pegRule: t.pegRule, begin: uint32(t.begin), end: uint32(t.end), next: uint32(t.next)} | |
255 | } | |
256 | ||
257 | func (t *token32) String() string { | |
258 | return fmt.Sprintf("\x1B[34m%v\x1B[m %v %v %v", rul3s[t.pegRule], t.begin, t.end, t.next) | |
259 | } | |
260 | ||
261 | type tokens32 struct { | |
262 | tree []token32 | |
263 | ordered [][]token32 | |
264 | } | |
265 | ||
266 | func (t *tokens32) trim(length int) { | |
267 | t.tree = t.tree[0:length] | |
268 | } | |
269 | ||
270 | func (t *tokens32) Print() { | |
271 | for _, token := range t.tree { | |
272 | fmt.Println(token.String()) | |
273 | } | |
274 | } | |
275 | ||
276 | func (t *tokens32) Order() [][]token32 { | |
277 | if t.ordered != nil { | |
278 | return t.ordered | |
279 | } | |
280 | ||
281 | depths := make([]int32, 1, math.MaxInt16) | |
282 | for i, token := range t.tree { | |
283 | if token.pegRule == ruleUnknown { | |
284 | t.tree = t.tree[:i] | |
285 | break | |
286 | } | |
287 | depth := int(token.next) | |
288 | if length := len(depths); depth >= length { | |
289 | depths = depths[:depth+1] | |
290 | } | |
291 | depths[depth]++ | |
292 | } | |
293 | depths = append(depths, 0) | |
294 | ||
295 | ordered, pool := make([][]token32, len(depths)), make([]token32, len(t.tree)+len(depths)) | |
296 | for i, depth := range depths { | |
297 | depth++ | |
298 | ordered[i], pool, depths[i] = pool[:depth], pool[depth:], 0 | |
299 | } | |
300 | ||
301 | for i, token := range t.tree { | |
302 | depth := token.next | |
303 | token.next = uint32(i) | |
304 | ordered[depth][depths[depth]] = token | |
305 | depths[depth]++ | |
306 | } | |
307 | t.ordered = ordered | |
308 | return ordered | |
309 | } | |
310 | ||
311 | type state32 struct { | |
312 | token32 | |
313 | depths []int32 | |
314 | leaf bool | |
315 | } | |
316 | ||
317 | func (t *tokens32) AST() *node32 { | |
318 | tokens := t.Tokens() | |
319 | stack := &element{node: &node32{token32: <-tokens}} | |
320 | for token := range tokens { | |
321 | if token.begin == token.end { | |
322 | continue | |
323 | } | |
324 | node := &node32{token32: token} | |
325 | for stack != nil && stack.node.begin >= token.begin && stack.node.end <= token.end { | |
326 | stack.node.next = node.up | |
327 | node.up = stack.node | |
328 | stack = stack.down | |
329 | } | |
330 | stack = &element{node: node, down: stack} | |
331 | } | |
332 | return stack.node | |
333 | } | |
334 | ||
335 | func (t *tokens32) PreOrder() (<-chan state32, [][]token32) { | |
336 | s, ordered := make(chan state32, 6), t.Order() | |
337 | go func() { | |
338 | var states [8]state32 | |
339 | for i := range states { | |
340 | states[i].depths = make([]int32, len(ordered)) | |
341 | } | |
342 | depths, state, depth := make([]int32, len(ordered)), 0, 1 | |
343 | write := func(t token32, leaf bool) { | |
344 | S := states[state] | |
345 | state, S.pegRule, S.begin, S.end, S.next, S.leaf = (state+1)%8, t.pegRule, t.begin, t.end, uint32(depth), leaf | |
346 | copy(S.depths, depths) | |
347 | s <- S | |
348 | } | |
349 | ||
350 | states[state].token32 = ordered[0][0] | |
351 | depths[0]++ | |
352 | state++ | |
353 | a, b := ordered[depth-1][depths[depth-1]-1], ordered[depth][depths[depth]] | |
354 | depthFirstSearch: | |
355 | for { | |
356 | for { | |
357 | if i := depths[depth]; i > 0 { | |
358 | if c, j := ordered[depth][i-1], depths[depth-1]; a.isParentOf(c) && | |
359 | (j < 2 || !ordered[depth-1][j-2].isParentOf(c)) { | |
360 | if c.end != b.begin { | |
361 | write(token32{pegRule: ruleIn, begin: c.end, end: b.begin}, true) | |
362 | } | |
363 | break | |
364 | } | |
365 | } | |
366 | ||
367 | if a.begin < b.begin { | |
368 | write(token32{pegRule: rulePre, begin: a.begin, end: b.begin}, true) | |
369 | } | |
370 | break | |
371 | } | |
372 | ||
373 | next := depth + 1 | |
374 | if c := ordered[next][depths[next]]; c.pegRule != ruleUnknown && b.isParentOf(c) { | |
375 | write(b, false) | |
376 | depths[depth]++ | |
377 | depth, a, b = next, b, c | |
378 | continue | |
379 | } | |
380 | ||
381 | write(b, true) | |
382 | depths[depth]++ | |
383 | c, parent := ordered[depth][depths[depth]], true | |
384 | for { | |
385 | if c.pegRule != ruleUnknown && a.isParentOf(c) { | |
386 | b = c | |
387 | continue depthFirstSearch | |
388 | } else if parent && b.end != a.end { | |
389 | write(token32{pegRule: ruleSuf, begin: b.end, end: a.end}, true) | |
390 | } | |
391 | ||
392 | depth-- | |
393 | if depth > 0 { | |
394 | a, b, c = ordered[depth-1][depths[depth-1]-1], a, ordered[depth][depths[depth]] | |
395 | parent = a.isParentOf(b) | |
396 | continue | |
397 | } | |
398 | ||
399 | break depthFirstSearch | |
400 | } | |
401 | } | |
402 | ||
403 | close(s) | |
404 | }() | |
405 | return s, ordered | |
406 | } | |
407 | ||
408 | func (t *tokens32) PrintSyntax() { | |
409 | tokens, ordered := t.PreOrder() | |
410 | max := -1 | |
411 | for token := range tokens { | |
412 | if !token.leaf { | |
413 | fmt.Printf("%v", token.begin) | |
414 | for i, leaf, depths := 0, int(token.next), token.depths; i < leaf; i++ { | |
415 | fmt.Printf(" \x1B[36m%v\x1B[m", rul3s[ordered[i][depths[i]-1].pegRule]) | |
416 | } | |
417 | fmt.Printf(" \x1B[36m%v\x1B[m\n", rul3s[token.pegRule]) | |
418 | } else if token.begin == token.end { | |
419 | fmt.Printf("%v", token.begin) | |
420 | for i, leaf, depths := 0, int(token.next), token.depths; i < leaf; i++ { | |
421 | fmt.Printf(" \x1B[31m%v\x1B[m", rul3s[ordered[i][depths[i]-1].pegRule]) | |
422 | } | |
423 | fmt.Printf(" \x1B[31m%v\x1B[m\n", rul3s[token.pegRule]) | |
424 | } else { | |
425 | for c, end := token.begin, token.end; c < end; c++ { | |
426 | if i := int(c); max+1 < i { | |
427 | for j := max; j < i; j++ { | |
428 | fmt.Printf("skip %v %v\n", j, token.String()) | |
429 | } | |
430 | max = i | |
431 | } else if i := int(c); i <= max { | |
432 | for j := i; j <= max; j++ { | |
433 | fmt.Printf("dupe %v %v\n", j, token.String()) | |
434 | } | |
435 | } else { | |
436 | max = int(c) | |
437 | } | |
438 | fmt.Printf("%v", c) | |
439 | for i, leaf, depths := 0, int(token.next), token.depths; i < leaf; i++ { | |
440 | fmt.Printf(" \x1B[34m%v\x1B[m", rul3s[ordered[i][depths[i]-1].pegRule]) | |
441 | } | |
442 | fmt.Printf(" \x1B[34m%v\x1B[m\n", rul3s[token.pegRule]) | |
443 | } | |
444 | fmt.Printf("\n") | |
445 | } | |
446 | } | |
447 | } | |
448 | ||
449 | func (t *tokens32) PrintSyntaxTree(buffer string) { | |
450 | tokens, _ := t.PreOrder() | |
451 | for token := range tokens { | |
452 | for c := 0; c < int(token.next); c++ { | |
453 | fmt.Printf(" ") | |
454 | } | |
455 | fmt.Printf("\x1B[34m%v\x1B[m %v\n", rul3s[token.pegRule], strconv.Quote(string(([]rune(buffer)[token.begin:token.end])))) | |
456 | } | |
457 | } | |
458 | ||
459 | func (t *tokens32) Add(rule pegRule, begin, end, depth uint32, index int) { | |
460 | t.tree[index] = token32{pegRule: rule, begin: uint32(begin), end: uint32(end), next: uint32(depth)} | |
461 | } | |
462 | ||
463 | func (t *tokens32) Tokens() <-chan token32 { | |
464 | s := make(chan token32, 16) | |
465 | go func() { | |
466 | for _, v := range t.tree { | |
467 | s <- v.getToken32() | |
468 | } | |
469 | close(s) | |
470 | }() | |
471 | return s | |
472 | } | |
473 | ||
474 | func (t *tokens32) Error() []token32 { | |
475 | ordered := t.Order() | |
476 | length := len(ordered) | |
477 | tokens, length := make([]token32, length), length-1 | |
478 | for i := range tokens { | |
479 | o := ordered[length-i] | |
480 | if len(o) > 1 { | |
481 | tokens[i] = o[len(o)-2].getToken32() | |
482 | } | |
483 | } | |
484 | return tokens | |
485 | } | |
486 | ||
487 | func (t *tokens32) Expand(index int) { | |
488 | tree := t.tree | |
489 | if index >= len(tree) { | |
490 | expanded := make([]token32, 2*len(tree)) | |
491 | copy(expanded, tree) | |
492 | t.tree = expanded | |
493 | } | |
494 | } | |
495 | ||
496 | type Peg struct { | |
497 | *Tree | |
498 | ||
499 | Buffer string | |
500 | buffer []rune | |
501 | rules [92]func() bool | |
502 | Parse func(rule ...int) error | |
503 | Reset func() | |
504 | Pretty bool | |
505 | tokens32 | |
506 | } | |
507 | ||
508 | type textPosition struct { | |
509 | line, symbol int | |
510 | } | |
511 | ||
512 | type textPositionMap map[int]textPosition | |
513 | ||
514 | func translatePositions(buffer []rune, positions []int) textPositionMap { | |
515 | length, translations, j, line, symbol := len(positions), make(textPositionMap, len(positions)), 0, 1, 0 | |
516 | sort.Ints(positions) | |
517 | ||
518 | search: | |
519 | for i, c := range buffer { | |
520 | if c == '\n' { | |
521 | line, symbol = line+1, 0 | |
522 | } else { | |
523 | symbol++ | |
524 | } | |
525 | if i == positions[j] { | |
526 | translations[positions[j]] = textPosition{line, symbol} | |
527 | for j++; j < length; j++ { | |
528 | if i != positions[j] { | |
529 | continue search | |
530 | } | |
531 | } | |
532 | break search | |
533 | } | |
534 | } | |
535 | ||
536 | return translations | |
537 | } | |
538 | ||
539 | type parseError struct { | |
540 | p *Peg | |
541 | max token32 | |
542 | } | |
543 | ||
544 | func (e *parseError) Error() string { | |
545 | tokens, error := []token32{e.max}, "\n" | |
546 | positions, p := make([]int, 2*len(tokens)), 0 | |
547 | for _, token := range tokens { | |
548 | positions[p], p = int(token.begin), p+1 | |
549 | positions[p], p = int(token.end), p+1 | |
550 | } | |
551 | translations := translatePositions(e.p.buffer, positions) | |
552 | format := "parse error near %v (line %v symbol %v - line %v symbol %v):\n%v\n" | |
553 | if e.p.Pretty { | |
554 | format = "parse error near \x1B[34m%v\x1B[m (line %v symbol %v - line %v symbol %v):\n%v\n" | |
555 | } | |
556 | for _, token := range tokens { | |
557 | begin, end := int(token.begin), int(token.end) | |
558 | error += fmt.Sprintf(format, | |
559 | rul3s[token.pegRule], | |
560 | translations[begin].line, translations[begin].symbol, | |
561 | translations[end].line, translations[end].symbol, | |
562 | strconv.Quote(string(e.p.buffer[begin:end]))) | |
563 | } | |
564 | ||
565 | return error | |
566 | } | |
567 | ||
568 | func (p *Peg) PrintSyntaxTree() { | |
569 | p.tokens32.PrintSyntaxTree(p.Buffer) | |
570 | } | |
571 | ||
572 | func (p *Peg) Highlighter() { | |
573 | p.PrintSyntax() | |
574 | } | |
575 | ||
576 | func (p *Peg) Execute() { | |
577 | buffer, _buffer, text, begin, end := p.Buffer, p.buffer, "", 0, 0 | |
578 | for token := range p.Tokens() { | |
579 | switch token.pegRule { | |
580 | ||
581 | case rulePegText: | |
582 | begin, end = int(token.begin), int(token.end) | |
583 | text = string(_buffer[begin:end]) | |
584 | ||
585 | case ruleAction0: | |
586 | p.AddPackage(text) | |
587 | case ruleAction1: | |
588 | p.AddPeg(text) | |
589 | case ruleAction2: | |
590 | p.AddState(text) | |
591 | case ruleAction3: | |
592 | p.AddImport(text) | |
593 | case ruleAction4: | |
594 | p.AddRule(text) | |
595 | case ruleAction5: | |
596 | p.AddExpression() | |
597 | case ruleAction6: | |
598 | p.AddAlternate() | |
599 | case ruleAction7: | |
600 | p.AddNil() | |
601 | p.AddAlternate() | |
602 | case ruleAction8: | |
603 | p.AddNil() | |
604 | case ruleAction9: | |
605 | p.AddSequence() | |
606 | case ruleAction10: | |
607 | p.AddPredicate(text) | |
608 | case ruleAction11: | |
609 | p.AddStateChange(text) | |
610 | case ruleAction12: | |
611 | p.AddPeekFor() | |
612 | case ruleAction13: | |
613 | p.AddPeekNot() | |
614 | case ruleAction14: | |
615 | p.AddQuery() | |
616 | case ruleAction15: | |
617 | p.AddStar() | |
618 | case ruleAction16: | |
619 | p.AddPlus() | |
620 | case ruleAction17: | |
621 | p.AddName(text) | |
622 | case ruleAction18: | |
623 | p.AddDot() | |
624 | case ruleAction19: | |
625 | p.AddAction(text) | |
626 | case ruleAction20: | |
627 | p.AddPush() | |
628 | case ruleAction21: | |
629 | p.AddSequence() | |
630 | case ruleAction22: | |
631 | p.AddSequence() | |
632 | case ruleAction23: | |
633 | p.AddPeekNot() | |
634 | p.AddDot() | |
635 | p.AddSequence() | |
636 | case ruleAction24: | |
637 | p.AddPeekNot() | |
638 | p.AddDot() | |
639 | p.AddSequence() | |
640 | case ruleAction25: | |
641 | p.AddAlternate() | |
642 | case ruleAction26: | |
643 | p.AddAlternate() | |
644 | case ruleAction27: | |
645 | p.AddRange() | |
646 | case ruleAction28: | |
647 | p.AddDoubleRange() | |
648 | case ruleAction29: | |
649 | p.AddCharacter(text) | |
650 | case ruleAction30: | |
651 | p.AddDoubleCharacter(text) | |
652 | case ruleAction31: | |
653 | p.AddCharacter(text) | |
654 | case ruleAction32: | |
655 | p.AddCharacter("\a") | |
656 | case ruleAction33: | |
657 | p.AddCharacter("\b") | |
658 | case ruleAction34: | |
659 | p.AddCharacter("\x1B") | |
660 | case ruleAction35: | |
661 | p.AddCharacter("\f") | |
662 | case ruleAction36: | |
663 | p.AddCharacter("\n") | |
664 | case ruleAction37: | |
665 | p.AddCharacter("\r") | |
666 | case ruleAction38: | |
667 | p.AddCharacter("\t") | |
668 | case ruleAction39: | |
669 | p.AddCharacter("\v") | |
670 | case ruleAction40: | |
671 | p.AddCharacter("'") | |
672 | case ruleAction41: | |
673 | p.AddCharacter("\"") | |
674 | case ruleAction42: | |
675 | p.AddCharacter("[") | |
676 | case ruleAction43: | |
677 | p.AddCharacter("]") | |
678 | case ruleAction44: | |
679 | p.AddCharacter("-") | |
680 | case ruleAction45: | |
681 | p.AddHexaCharacter(text) | |
682 | case ruleAction46: | |
683 | p.AddOctalCharacter(text) | |
684 | case ruleAction47: | |
685 | p.AddOctalCharacter(text) | |
686 | case ruleAction48: | |
687 | p.AddCharacter("\\") | |
688 | ||
689 | } | |
690 | } | |
691 | _, _, _, _, _ = buffer, _buffer, text, begin, end | |
692 | } | |
693 | ||
694 | func (p *Peg) Init() { | |
695 | p.buffer = []rune(p.Buffer) | |
696 | if len(p.buffer) == 0 || p.buffer[len(p.buffer)-1] != endSymbol { | |
697 | p.buffer = append(p.buffer, endSymbol) | |
698 | } | |
699 | ||
700 | tree := tokens32{tree: make([]token32, math.MaxInt16)} | |
701 | var max token32 | |
702 | position, depth, tokenIndex, buffer, _rules := uint32(0), uint32(0), 0, p.buffer, p.rules | |
703 | ||
704 | p.Parse = func(rule ...int) error { | |
705 | r := 1 | |
706 | if len(rule) > 0 { | |
707 | r = rule[0] | |
708 | } | |
709 | matches := p.rules[r]() | |
710 | p.tokens32 = tree | |
711 | if matches { | |
712 | p.trim(tokenIndex) | |
713 | return nil | |
714 | } | |
715 | return &parseError{p, max} | |
716 | } | |
717 | ||
718 | p.Reset = func() { | |
719 | position, tokenIndex, depth = 0, 0, 0 | |
720 | } | |
721 | ||
722 | add := func(rule pegRule, begin uint32) { | |
723 | tree.Expand(tokenIndex) | |
724 | tree.Add(rule, begin, position, depth, tokenIndex) | |
725 | tokenIndex++ | |
726 | if begin != position && position > max.end { | |
727 | max = token32{rule, begin, position, depth} | |
728 | } | |
729 | } | |
730 | ||
731 | matchDot := func() bool { | |
732 | if buffer[position] != endSymbol { | |
733 | position++ | |
734 | return true | |
735 | } | |
736 | return false | |
737 | } | |
738 | ||
739 | /*matchChar := func(c byte) bool { | |
740 | if buffer[position] == c { | |
741 | position++ | |
742 | return true | |
743 | } | |
744 | return false | |
745 | }*/ | |
746 | ||
747 | /*matchRange := func(lower byte, upper byte) bool { | |
748 | if c := buffer[position]; c >= lower && c <= upper { | |
749 | position++ | |
750 | return true | |
751 | } | |
752 | return false | |
753 | }*/ | |
754 | ||
755 | _rules = [...]func() bool{ | |
756 | nil, | |
757 | /* 0 Grammar <- <(Spacing ('p' 'a' 'c' 'k' 'a' 'g' 'e') MustSpacing Identifier Action0 Import* ('t' 'y' 'p' 'e') MustSpacing Identifier Action1 ('P' 'e' 'g') Spacing Action Action2 Definition+ EndOfFile)> */ | |
758 | func() bool { | |
759 | position0, tokenIndex0, depth0 := position, tokenIndex, depth | |
760 | { | |
761 | position1 := position | |
762 | depth++ | |
763 | if !_rules[ruleSpacing]() { | |
764 | goto l0 | |
765 | } | |
766 | if buffer[position] != rune('p') { | |
767 | goto l0 | |
768 | } | |
769 | position++ | |
770 | if buffer[position] != rune('a') { | |
771 | goto l0 | |
772 | } | |
773 | position++ | |
774 | if buffer[position] != rune('c') { | |
775 | goto l0 | |
776 | } | |
777 | position++ | |
778 | if buffer[position] != rune('k') { | |
779 | goto l0 | |
780 | } | |
781 | position++ | |
782 | if buffer[position] != rune('a') { | |
783 | goto l0 | |
784 | } | |
785 | position++ | |
786 | if buffer[position] != rune('g') { | |
787 | goto l0 | |
788 | } | |
789 | position++ | |
790 | if buffer[position] != rune('e') { | |
791 | goto l0 | |
792 | } | |
793 | position++ | |
794 | if !_rules[ruleMustSpacing]() { | |
795 | goto l0 | |
796 | } | |
797 | if !_rules[ruleIdentifier]() { | |
798 | goto l0 | |
799 | } | |
800 | { | |
801 | add(ruleAction0, position) | |
802 | } | |
803 | l3: | |
804 | { | |
805 | position4, tokenIndex4, depth4 := position, tokenIndex, depth | |
806 | { | |
807 | position5 := position | |
808 | depth++ | |
809 | if buffer[position] != rune('i') { | |
810 | goto l4 | |
811 | } | |
812 | position++ | |
813 | if buffer[position] != rune('m') { | |
814 | goto l4 | |
815 | } | |
816 | position++ | |
817 | if buffer[position] != rune('p') { | |
818 | goto l4 | |
819 | } | |
820 | position++ | |
821 | if buffer[position] != rune('o') { | |
822 | goto l4 | |
823 | } | |
824 | position++ | |
825 | if buffer[position] != rune('r') { | |
826 | goto l4 | |
827 | } | |
828 | position++ | |
829 | if buffer[position] != rune('t') { | |
830 | goto l4 | |
831 | } | |
832 | position++ | |
833 | if !_rules[ruleSpacing]() { | |
834 | goto l4 | |
835 | } | |
836 | if buffer[position] != rune('"') { | |
837 | goto l4 | |
838 | } | |
839 | position++ | |
840 | { | |
841 | position6 := position | |
842 | depth++ | |
843 | { | |
844 | switch buffer[position] { | |
845 | case '-': | |
846 | if buffer[position] != rune('-') { | |
847 | goto l4 | |
848 | } | |
849 | position++ | |
850 | break | |
851 | case '.': | |
852 | if buffer[position] != rune('.') { | |
853 | goto l4 | |
854 | } | |
855 | position++ | |
856 | break | |
857 | case '/': | |
858 | if buffer[position] != rune('/') { | |
859 | goto l4 | |
860 | } | |
861 | position++ | |
862 | break | |
863 | case '_': | |
864 | if buffer[position] != rune('_') { | |
865 | goto l4 | |
866 | } | |
867 | position++ | |
868 | break | |
869 | case 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z': | |
870 | if c := buffer[position]; c < rune('A') || c > rune('Z') { | |
871 | goto l4 | |
872 | } | |
873 | position++ | |
874 | break | |
875 | default: | |
876 | if c := buffer[position]; c < rune('a') || c > rune('z') { | |
877 | goto l4 | |
878 | } | |
879 | position++ | |
880 | break | |
881 | } | |
882 | } | |
883 | ||
884 | l7: | |
885 | { | |
886 | position8, tokenIndex8, depth8 := position, tokenIndex, depth | |
887 | { | |
888 | switch buffer[position] { | |
889 | case '-': | |
890 | if buffer[position] != rune('-') { | |
891 | goto l8 | |
892 | } | |
893 | position++ | |
894 | break | |
895 | case '.': | |
896 | if buffer[position] != rune('.') { | |
897 | goto l8 | |
898 | } | |
899 | position++ | |
900 | break | |
901 | case '/': | |
902 | if buffer[position] != rune('/') { | |
903 | goto l8 | |
904 | } | |
905 | position++ | |
906 | break | |
907 | case '_': | |
908 | if buffer[position] != rune('_') { | |
909 | goto l8 | |
910 | } | |
911 | position++ | |
912 | break | |
913 | case 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z': | |
914 | if c := buffer[position]; c < rune('A') || c > rune('Z') { | |
915 | goto l8 | |
916 | } | |
917 | position++ | |
918 | break | |
919 | default: | |
920 | if c := buffer[position]; c < rune('a') || c > rune('z') { | |
921 | goto l8 | |
922 | } | |
923 | position++ | |
924 | break | |
925 | } | |
926 | } | |
927 | ||
928 | goto l7 | |
929 | l8: | |
930 | position, tokenIndex, depth = position8, tokenIndex8, depth8 | |
931 | } | |
932 | depth-- | |
933 | add(rulePegText, position6) | |
934 | } | |
935 | if buffer[position] != rune('"') { | |
936 | goto l4 | |
937 | } | |
938 | position++ | |
939 | if !_rules[ruleSpacing]() { | |
940 | goto l4 | |
941 | } | |
942 | { | |
943 | add(ruleAction3, position) | |
944 | } | |
945 | depth-- | |
946 | add(ruleImport, position5) | |
947 | } | |
948 | goto l3 | |
949 | l4: | |
950 | position, tokenIndex, depth = position4, tokenIndex4, depth4 | |
951 | } | |
952 | if buffer[position] != rune('t') { | |
953 | goto l0 | |
954 | } | |
955 | position++ | |
956 | if buffer[position] != rune('y') { | |
957 | goto l0 | |
958 | } | |
959 | position++ | |
960 | if buffer[position] != rune('p') { | |
961 | goto l0 | |
962 | } | |
963 | position++ | |
964 | if buffer[position] != rune('e') { | |
965 | goto l0 | |
966 | } | |
967 | position++ | |
968 | if !_rules[ruleMustSpacing]() { | |
969 | goto l0 | |
970 | } | |
971 | if !_rules[ruleIdentifier]() { | |
972 | goto l0 | |
973 | } | |
974 | { | |
975 | add(ruleAction1, position) | |
976 | } | |
977 | if buffer[position] != rune('P') { | |
978 | goto l0 | |
979 | } | |
980 | position++ | |
981 | if buffer[position] != rune('e') { | |
982 | goto l0 | |
983 | } | |
984 | position++ | |
985 | if buffer[position] != rune('g') { | |
986 | goto l0 | |
987 | } | |
988 | position++ | |
989 | if !_rules[ruleSpacing]() { | |
990 | goto l0 | |
991 | } | |
992 | if !_rules[ruleAction]() { | |
993 | goto l0 | |
994 | } | |
995 | { | |
996 | add(ruleAction2, position) | |
997 | } | |
998 | { | |
999 | position16 := position | |
1000 | depth++ | |
1001 | if !_rules[ruleIdentifier]() { | |
1002 | goto l0 | |
1003 | } | |
1004 | { | |
1005 | add(ruleAction4, position) | |
1006 | } | |
1007 | if !_rules[ruleLeftArrow]() { | |
1008 | goto l0 | |
1009 | } | |
1010 | if !_rules[ruleExpression]() { | |
1011 | goto l0 | |
1012 | } | |
1013 | { | |
1014 | add(ruleAction5, position) | |
1015 | } | |
1016 | { | |
1017 | position19, tokenIndex19, depth19 := position, tokenIndex, depth | |
1018 | { | |
1019 | position20, tokenIndex20, depth20 := position, tokenIndex, depth | |
1020 | if !_rules[ruleIdentifier]() { | |
1021 | goto l21 | |
1022 | } | |
1023 | if !_rules[ruleLeftArrow]() { | |
1024 | goto l21 | |
1025 | } | |
1026 | goto l20 | |
1027 | l21: | |
1028 | position, tokenIndex, depth = position20, tokenIndex20, depth20 | |
1029 | { | |
1030 | position22, tokenIndex22, depth22 := position, tokenIndex, depth | |
1031 | if !matchDot() { | |
1032 | goto l22 | |
1033 | } | |
1034 | goto l0 | |
1035 | l22: | |
1036 | position, tokenIndex, depth = position22, tokenIndex22, depth22 | |
1037 | } | |
1038 | } | |
1039 | l20: | |
1040 | position, tokenIndex, depth = position19, tokenIndex19, depth19 | |
1041 | } | |
1042 | depth-- | |
1043 | add(ruleDefinition, position16) | |
1044 | } | |
1045 | l14: | |
1046 | { | |
1047 | position15, tokenIndex15, depth15 := position, tokenIndex, depth | |
1048 | { | |
1049 | position23 := position | |
1050 | depth++ | |
1051 | if !_rules[ruleIdentifier]() { | |
1052 | goto l15 | |
1053 | } | |
1054 | { | |
1055 | add(ruleAction4, position) | |
1056 | } | |
1057 | if !_rules[ruleLeftArrow]() { | |
1058 | goto l15 | |
1059 | } | |
1060 | if !_rules[ruleExpression]() { | |
1061 | goto l15 | |
1062 | } | |
1063 | { | |
1064 | add(ruleAction5, position) | |
1065 | } | |
1066 | { | |
1067 | position26, tokenIndex26, depth26 := position, tokenIndex, depth | |
1068 | { | |
1069 | position27, tokenIndex27, depth27 := position, tokenIndex, depth | |
1070 | if !_rules[ruleIdentifier]() { | |
1071 | goto l28 | |
1072 | } | |
1073 | if !_rules[ruleLeftArrow]() { | |
1074 | goto l28 | |
1075 | } | |
1076 | goto l27 | |
1077 | l28: | |
1078 | position, tokenIndex, depth = position27, tokenIndex27, depth27 | |
1079 | { | |
1080 | position29, tokenIndex29, depth29 := position, tokenIndex, depth | |
1081 | if !matchDot() { | |
1082 | goto l29 | |
1083 | } | |
1084 | goto l15 | |
1085 | l29: | |
1086 | position, tokenIndex, depth = position29, tokenIndex29, depth29 | |
1087 | } | |
1088 | } | |
1089 | l27: | |
1090 | position, tokenIndex, depth = position26, tokenIndex26, depth26 | |
1091 | } | |
1092 | depth-- | |
1093 | add(ruleDefinition, position23) | |
1094 | } | |
1095 | goto l14 | |
1096 | l15: | |
1097 | position, tokenIndex, depth = position15, tokenIndex15, depth15 | |
1098 | } | |
1099 | { | |
1100 | position30 := position | |
1101 | depth++ | |
1102 | { | |
1103 | position31, tokenIndex31, depth31 := position, tokenIndex, depth | |
1104 | if !matchDot() { | |
1105 | goto l31 | |
1106 | } | |
1107 | goto l0 | |
1108 | l31: | |
1109 | position, tokenIndex, depth = position31, tokenIndex31, depth31 | |
1110 | } | |
1111 | depth-- | |
1112 | add(ruleEndOfFile, position30) | |
1113 | } | |
1114 | depth-- | |
1115 | add(ruleGrammar, position1) | |
1116 | } | |
1117 | return true | |
1118 | l0: | |
1119 | position, tokenIndex, depth = position0, tokenIndex0, depth0 | |
1120 | return false | |
1121 | }, | |
1122 | /* 1 Import <- <('i' 'm' 'p' 'o' 'r' 't' Spacing '"' <((&('-') '-') | (&('.') '.') | (&('/') '/') | (&('_') '_') | (&('A' | 'B' | 'C' | 'D' | 'E' | 'F' | 'G' | 'H' | 'I' | 'J' | 'K' | 'L' | 'M' | 'N' | 'O' | 'P' | 'Q' | 'R' | 'S' | 'T' | 'U' | 'V' | 'W' | 'X' | 'Y' | 'Z') [A-Z]) | (&('a' | 'b' | 'c' | 'd' | 'e' | 'f' | 'g' | 'h' | 'i' | 'j' | 'k' | 'l' | 'm' | 'n' | 'o' | 'p' | 'q' | 'r' | 's' | 't' | 'u' | 'v' | 'w' | 'x' | 'y' | 'z') [a-z]))+> '"' Spacing Action3)> */ | |
1123 | nil, | |
1124 | /* 2 Definition <- <(Identifier Action4 LeftArrow Expression Action5 &((Identifier LeftArrow) / !.))> */ | |
1125 | nil, | |
1126 | /* 3 Expression <- <((Sequence (Slash Sequence Action6)* (Slash Action7)?) / Action8)> */ | |
1127 | func() bool { | |
1128 | { | |
1129 | position35 := position | |
1130 | depth++ | |
1131 | { | |
1132 | position36, tokenIndex36, depth36 := position, tokenIndex, depth | |
1133 | if !_rules[ruleSequence]() { | |
1134 | goto l37 | |
1135 | } | |
1136 | l38: | |
1137 | { | |
1138 | position39, tokenIndex39, depth39 := position, tokenIndex, depth | |
1139 | if !_rules[ruleSlash]() { | |
1140 | goto l39 | |
1141 | } | |
1142 | if !_rules[ruleSequence]() { | |
1143 | goto l39 | |
1144 | } | |
1145 | { | |
1146 | add(ruleAction6, position) | |
1147 | } | |
1148 | goto l38 | |
1149 | l39: | |
1150 | position, tokenIndex, depth = position39, tokenIndex39, depth39 | |
1151 | } | |
1152 | { | |
1153 | position41, tokenIndex41, depth41 := position, tokenIndex, depth | |
1154 | if !_rules[ruleSlash]() { | |
1155 | goto l41 | |
1156 | } | |
1157 | { | |
1158 | add(ruleAction7, position) | |
1159 | } | |
1160 | goto l42 | |
1161 | l41: | |
1162 | position, tokenIndex, depth = position41, tokenIndex41, depth41 | |
1163 | } | |
1164 | l42: | |
1165 | goto l36 | |
1166 | l37: | |
1167 | position, tokenIndex, depth = position36, tokenIndex36, depth36 | |
1168 | { | |
1169 | add(ruleAction8, position) | |
1170 | } | |
1171 | } | |
1172 | l36: | |
1173 | depth-- | |
1174 | add(ruleExpression, position35) | |
1175 | } | |
1176 | return true | |
1177 | }, | |
1178 | /* 4 Sequence <- <(Prefix (Prefix Action9)*)> */ | |
1179 | func() bool { | |
1180 | position45, tokenIndex45, depth45 := position, tokenIndex, depth | |
1181 | { | |
1182 | position46 := position | |
1183 | depth++ | |
1184 | if !_rules[rulePrefix]() { | |
1185 | goto l45 | |
1186 | } | |
1187 | l47: | |
1188 | { | |
1189 | position48, tokenIndex48, depth48 := position, tokenIndex, depth | |
1190 | if !_rules[rulePrefix]() { | |
1191 | goto l48 | |
1192 | } | |
1193 | { | |
1194 | add(ruleAction9, position) | |
1195 | } | |
1196 | goto l47 | |
1197 | l48: | |
1198 | position, tokenIndex, depth = position48, tokenIndex48, depth48 | |
1199 | } | |
1200 | depth-- | |
1201 | add(ruleSequence, position46) | |
1202 | } | |
1203 | return true | |
1204 | l45: | |
1205 | position, tokenIndex, depth = position45, tokenIndex45, depth45 | |
1206 | return false | |
1207 | }, | |
1208 | /* 5 Prefix <- <((And Action Action10) / (Not Action Action11) / ((&('!') (Not Suffix Action13)) | (&('&') (And Suffix Action12)) | (&('"' | '\'' | '(' | '.' | '<' | 'A' | 'B' | 'C' | 'D' | 'E' | 'F' | 'G' | 'H' | 'I' | 'J' | 'K' | 'L' | 'M' | 'N' | 'O' | 'P' | 'Q' | 'R' | 'S' | 'T' | 'U' | 'V' | 'W' | 'X' | 'Y' | 'Z' | '[' | '_' | 'a' | 'b' | 'c' | 'd' | 'e' | 'f' | 'g' | 'h' | 'i' | 'j' | 'k' | 'l' | 'm' | 'n' | 'o' | 'p' | 'q' | 'r' | 's' | 't' | 'u' | 'v' | 'w' | 'x' | 'y' | 'z' | '{') Suffix)))> */ | |
1209 | func() bool { | |
1210 | position50, tokenIndex50, depth50 := position, tokenIndex, depth | |
1211 | { | |
1212 | position51 := position | |
1213 | depth++ | |
1214 | { | |
1215 | position52, tokenIndex52, depth52 := position, tokenIndex, depth | |
1216 | if !_rules[ruleAnd]() { | |
1217 | goto l53 | |
1218 | } | |
1219 | if !_rules[ruleAction]() { | |
1220 | goto l53 | |
1221 | } | |
1222 | { | |
1223 | add(ruleAction10, position) | |
1224 | } | |
1225 | goto l52 | |
1226 | l53: | |
1227 | position, tokenIndex, depth = position52, tokenIndex52, depth52 | |
1228 | if !_rules[ruleNot]() { | |
1229 | goto l55 | |
1230 | } | |
1231 | if !_rules[ruleAction]() { | |
1232 | goto l55 | |
1233 | } | |
1234 | { | |
1235 | add(ruleAction11, position) | |
1236 | } | |
1237 | goto l52 | |
1238 | l55: | |
1239 | position, tokenIndex, depth = position52, tokenIndex52, depth52 | |
1240 | { | |
1241 | switch buffer[position] { | |
1242 | case '!': | |
1243 | if !_rules[ruleNot]() { | |
1244 | goto l50 | |
1245 | } | |
1246 | if !_rules[ruleSuffix]() { | |
1247 | goto l50 | |
1248 | } | |
1249 | { | |
1250 | add(ruleAction13, position) | |
1251 | } | |
1252 | break | |
1253 | case '&': | |
1254 | if !_rules[ruleAnd]() { | |
1255 | goto l50 | |
1256 | } | |
1257 | if !_rules[ruleSuffix]() { | |
1258 | goto l50 | |
1259 | } | |
1260 | { | |
1261 | add(ruleAction12, position) | |
1262 | } | |
1263 | break | |
1264 | default: | |
1265 | if !_rules[ruleSuffix]() { | |
1266 | goto l50 | |
1267 | } | |
1268 | break | |
1269 | } | |
1270 | } | |
1271 | ||
1272 | } | |
1273 | l52: | |
1274 | depth-- | |
1275 | add(rulePrefix, position51) | |
1276 | } | |
1277 | return true | |
1278 | l50: | |
1279 | position, tokenIndex, depth = position50, tokenIndex50, depth50 | |
1280 | return false | |
1281 | }, | |
1282 | /* 6 Suffix <- <(Primary ((&('+') (Plus Action16)) | (&('*') (Star Action15)) | (&('?') (Question Action14)))?)> */ | |
1283 | func() bool { | |
1284 | position60, tokenIndex60, depth60 := position, tokenIndex, depth | |
1285 | { | |
1286 | position61 := position | |
1287 | depth++ | |
1288 | { | |
1289 | position62 := position | |
1290 | depth++ | |
1291 | { | |
1292 | switch buffer[position] { | |
1293 | case '<': | |
1294 | { | |
1295 | position64 := position | |
1296 | depth++ | |
1297 | if buffer[position] != rune('<') { | |
1298 | goto l60 | |
1299 | } | |
1300 | position++ | |
1301 | if !_rules[ruleSpacing]() { | |
1302 | goto l60 | |
1303 | } | |
1304 | depth-- | |
1305 | add(ruleBegin, position64) | |
1306 | } | |
1307 | if !_rules[ruleExpression]() { | |
1308 | goto l60 | |
1309 | } | |
1310 | { | |
1311 | position65 := position | |
1312 | depth++ | |
1313 | if buffer[position] != rune('>') { | |
1314 | goto l60 | |
1315 | } | |
1316 | position++ | |
1317 | if !_rules[ruleSpacing]() { | |
1318 | goto l60 | |
1319 | } | |
1320 | depth-- | |
1321 | add(ruleEnd, position65) | |
1322 | } | |
1323 | { | |
1324 | add(ruleAction20, position) | |
1325 | } | |
1326 | break | |
1327 | case '{': | |
1328 | if !_rules[ruleAction]() { | |
1329 | goto l60 | |
1330 | } | |
1331 | { | |
1332 | add(ruleAction19, position) | |
1333 | } | |
1334 | break | |
1335 | case '.': | |
1336 | { | |
1337 | position68 := position | |
1338 | depth++ | |
1339 | if buffer[position] != rune('.') { | |
1340 | goto l60 | |
1341 | } | |
1342 | position++ | |
1343 | if !_rules[ruleSpacing]() { | |
1344 | goto l60 | |
1345 | } | |
1346 | depth-- | |
1347 | add(ruleDot, position68) | |
1348 | } | |
1349 | { | |
1350 | add(ruleAction18, position) | |
1351 | } | |
1352 | break | |
1353 | case '[': | |
1354 | { | |
1355 | position70 := position | |
1356 | depth++ | |
1357 | { | |
1358 | position71, tokenIndex71, depth71 := position, tokenIndex, depth | |
1359 | if buffer[position] != rune('[') { | |
1360 | goto l72 | |
1361 | } | |
1362 | position++ | |
1363 | if buffer[position] != rune('[') { | |
1364 | goto l72 | |
1365 | } | |
1366 | position++ | |
1367 | { | |
1368 | position73, tokenIndex73, depth73 := position, tokenIndex, depth | |
1369 | { | |
1370 | position75, tokenIndex75, depth75 := position, tokenIndex, depth | |
1371 | if buffer[position] != rune('^') { | |
1372 | goto l76 | |
1373 | } | |
1374 | position++ | |
1375 | if !_rules[ruleDoubleRanges]() { | |
1376 | goto l76 | |
1377 | } | |
1378 | { | |
1379 | add(ruleAction23, position) | |
1380 | } | |
1381 | goto l75 | |
1382 | l76: | |
1383 | position, tokenIndex, depth = position75, tokenIndex75, depth75 | |
1384 | if !_rules[ruleDoubleRanges]() { | |
1385 | goto l73 | |
1386 | } | |
1387 | } | |
1388 | l75: | |
1389 | goto l74 | |
1390 | l73: | |
1391 | position, tokenIndex, depth = position73, tokenIndex73, depth73 | |
1392 | } | |
1393 | l74: | |
1394 | if buffer[position] != rune(']') { | |
1395 | goto l72 | |
1396 | } | |
1397 | position++ | |
1398 | if buffer[position] != rune(']') { | |
1399 | goto l72 | |
1400 | } | |
1401 | position++ | |
1402 | goto l71 | |
1403 | l72: | |
1404 | position, tokenIndex, depth = position71, tokenIndex71, depth71 | |
1405 | if buffer[position] != rune('[') { | |
1406 | goto l60 | |
1407 | } | |
1408 | position++ | |
1409 | { | |
1410 | position78, tokenIndex78, depth78 := position, tokenIndex, depth | |
1411 | { | |
1412 | position80, tokenIndex80, depth80 := position, tokenIndex, depth | |
1413 | if buffer[position] != rune('^') { | |
1414 | goto l81 | |
1415 | } | |
1416 | position++ | |
1417 | if !_rules[ruleRanges]() { | |
1418 | goto l81 | |
1419 | } | |
1420 | { | |
1421 | add(ruleAction24, position) | |
1422 | } | |
1423 | goto l80 | |
1424 | l81: | |
1425 | position, tokenIndex, depth = position80, tokenIndex80, depth80 | |
1426 | if !_rules[ruleRanges]() { | |
1427 | goto l78 | |
1428 | } | |
1429 | } | |
1430 | l80: | |
1431 | goto l79 | |
1432 | l78: | |
1433 | position, tokenIndex, depth = position78, tokenIndex78, depth78 | |
1434 | } | |
1435 | l79: | |
1436 | if buffer[position] != rune(']') { | |
1437 | goto l60 | |
1438 | } | |
1439 | position++ | |
1440 | } | |
1441 | l71: | |
1442 | if !_rules[ruleSpacing]() { | |
1443 | goto l60 | |
1444 | } | |
1445 | depth-- | |
1446 | add(ruleClass, position70) | |
1447 | } | |
1448 | break | |
1449 | case '"', '\'': | |
1450 | { | |
1451 | position83 := position | |
1452 | depth++ | |
1453 | { | |
1454 | position84, tokenIndex84, depth84 := position, tokenIndex, depth | |
1455 | if buffer[position] != rune('\'') { | |
1456 | goto l85 | |
1457 | } | |
1458 | position++ | |
1459 | { | |
1460 | position86, tokenIndex86, depth86 := position, tokenIndex, depth | |
1461 | { | |
1462 | position88, tokenIndex88, depth88 := position, tokenIndex, depth | |
1463 | if buffer[position] != rune('\'') { | |
1464 | goto l88 | |
1465 | } | |
1466 | position++ | |
1467 | goto l86 | |
1468 | l88: | |
1469 | position, tokenIndex, depth = position88, tokenIndex88, depth88 | |
1470 | } | |
1471 | if !_rules[ruleChar]() { | |
1472 | goto l86 | |
1473 | } | |
1474 | goto l87 | |
1475 | l86: | |
1476 | position, tokenIndex, depth = position86, tokenIndex86, depth86 | |
1477 | } | |
1478 | l87: | |
1479 | l89: | |
1480 | { | |
1481 | position90, tokenIndex90, depth90 := position, tokenIndex, depth | |
1482 | { | |
1483 | position91, tokenIndex91, depth91 := position, tokenIndex, depth | |
1484 | if buffer[position] != rune('\'') { | |
1485 | goto l91 | |
1486 | } | |
1487 | position++ | |
1488 | goto l90 | |
1489 | l91: | |
1490 | position, tokenIndex, depth = position91, tokenIndex91, depth91 | |
1491 | } | |
1492 | if !_rules[ruleChar]() { | |
1493 | goto l90 | |
1494 | } | |
1495 | { | |
1496 | add(ruleAction21, position) | |
1497 | } | |
1498 | goto l89 | |
1499 | l90: | |
1500 | position, tokenIndex, depth = position90, tokenIndex90, depth90 | |
1501 | } | |
1502 | if buffer[position] != rune('\'') { | |
1503 | goto l85 | |
1504 | } | |
1505 | position++ | |
1506 | if !_rules[ruleSpacing]() { | |
1507 | goto l85 | |
1508 | } | |
1509 | goto l84 | |
1510 | l85: | |
1511 | position, tokenIndex, depth = position84, tokenIndex84, depth84 | |
1512 | if buffer[position] != rune('"') { | |
1513 | goto l60 | |
1514 | } | |
1515 | position++ | |
1516 | { | |
1517 | position93, tokenIndex93, depth93 := position, tokenIndex, depth | |
1518 | { | |
1519 | position95, tokenIndex95, depth95 := position, tokenIndex, depth | |
1520 | if buffer[position] != rune('"') { | |
1521 | goto l95 | |
1522 | } | |
1523 | position++ | |
1524 | goto l93 | |
1525 | l95: | |
1526 | position, tokenIndex, depth = position95, tokenIndex95, depth95 | |
1527 | } | |
1528 | if !_rules[ruleDoubleChar]() { | |
1529 | goto l93 | |
1530 | } | |
1531 | goto l94 | |
1532 | l93: | |
1533 | position, tokenIndex, depth = position93, tokenIndex93, depth93 | |
1534 | } | |
1535 | l94: | |
1536 | l96: | |
1537 | { | |
1538 | position97, tokenIndex97, depth97 := position, tokenIndex, depth | |
1539 | { | |
1540 | position98, tokenIndex98, depth98 := position, tokenIndex, depth | |
1541 | if buffer[position] != rune('"') { | |
1542 | goto l98 | |
1543 | } | |
1544 | position++ | |
1545 | goto l97 | |
1546 | l98: | |
1547 | position, tokenIndex, depth = position98, tokenIndex98, depth98 | |
1548 | } | |
1549 | if !_rules[ruleDoubleChar]() { | |
1550 | goto l97 | |
1551 | } | |
1552 | { | |
1553 | add(ruleAction22, position) | |
1554 | } | |
1555 | goto l96 | |
1556 | l97: | |
1557 | position, tokenIndex, depth = position97, tokenIndex97, depth97 | |
1558 | } | |
1559 | if buffer[position] != rune('"') { | |
1560 | goto l60 | |
1561 | } | |
1562 | position++ | |
1563 | if !_rules[ruleSpacing]() { | |
1564 | goto l60 | |
1565 | } | |
1566 | } | |
1567 | l84: | |
1568 | depth-- | |
1569 | add(ruleLiteral, position83) | |
1570 | } | |
1571 | break | |
1572 | case '(': | |
1573 | { | |
1574 | position100 := position | |
1575 | depth++ | |
1576 | if buffer[position] != rune('(') { | |
1577 | goto l60 | |
1578 | } | |
1579 | position++ | |
1580 | if !_rules[ruleSpacing]() { | |
1581 | goto l60 | |
1582 | } | |
1583 | depth-- | |
1584 | add(ruleOpen, position100) | |
1585 | } | |
1586 | if !_rules[ruleExpression]() { | |
1587 | goto l60 | |
1588 | } | |
1589 | { | |
1590 | position101 := position | |
1591 | depth++ | |
1592 | if buffer[position] != rune(')') { | |
1593 | goto l60 | |
1594 | } | |
1595 | position++ | |
1596 | if !_rules[ruleSpacing]() { | |
1597 | goto l60 | |
1598 | } | |
1599 | depth-- | |
1600 | add(ruleClose, position101) | |
1601 | } | |
1602 | break | |
1603 | default: | |
1604 | if !_rules[ruleIdentifier]() { | |
1605 | goto l60 | |
1606 | } | |
1607 | { | |
1608 | position102, tokenIndex102, depth102 := position, tokenIndex, depth | |
1609 | if !_rules[ruleLeftArrow]() { | |
1610 | goto l102 | |
1611 | } | |
1612 | goto l60 | |
1613 | l102: | |
1614 | position, tokenIndex, depth = position102, tokenIndex102, depth102 | |
1615 | } | |
1616 | { | |
1617 | add(ruleAction17, position) | |
1618 | } | |
1619 | break | |
1620 | } | |
1621 | } | |
1622 | ||
1623 | depth-- | |
1624 | add(rulePrimary, position62) | |
1625 | } | |
1626 | { | |
1627 | position104, tokenIndex104, depth104 := position, tokenIndex, depth | |
1628 | { | |
1629 | switch buffer[position] { | |
1630 | case '+': | |
1631 | { | |
1632 | position107 := position | |
1633 | depth++ | |
1634 | if buffer[position] != rune('+') { | |
1635 | goto l104 | |
1636 | } | |
1637 | position++ | |
1638 | if !_rules[ruleSpacing]() { | |
1639 | goto l104 | |
1640 | } | |
1641 | depth-- | |
1642 | add(rulePlus, position107) | |
1643 | } | |
1644 | { | |
1645 | add(ruleAction16, position) | |
1646 | } | |
1647 | break | |
1648 | case '*': | |
1649 | { | |
1650 | position109 := position | |
1651 | depth++ | |
1652 | if buffer[position] != rune('*') { | |
1653 | goto l104 | |
1654 | } | |
1655 | position++ | |
1656 | if !_rules[ruleSpacing]() { | |
1657 | goto l104 | |
1658 | } | |
1659 | depth-- | |
1660 | add(ruleStar, position109) | |
1661 | } | |
1662 | { | |
1663 | add(ruleAction15, position) | |
1664 | } | |
1665 | break | |
1666 | default: | |
1667 | { | |
1668 | position111 := position | |
1669 | depth++ | |
1670 | if buffer[position] != rune('?') { | |
1671 | goto l104 | |
1672 | } | |
1673 | position++ | |
1674 | if !_rules[ruleSpacing]() { | |
1675 | goto l104 | |
1676 | } | |
1677 | depth-- | |
1678 | add(ruleQuestion, position111) | |
1679 | } | |
1680 | { | |
1681 | add(ruleAction14, position) | |
1682 | } | |
1683 | break | |
1684 | } | |
1685 | } | |
1686 | ||
1687 | goto l105 | |
1688 | l104: | |
1689 | position, tokenIndex, depth = position104, tokenIndex104, depth104 | |
1690 | } | |
1691 | l105: | |
1692 | depth-- | |
1693 | add(ruleSuffix, position61) | |
1694 | } | |
1695 | return true | |
1696 | l60: | |
1697 | position, tokenIndex, depth = position60, tokenIndex60, depth60 | |
1698 | return false | |
1699 | }, | |
1700 | /* 7 Primary <- <((&('<') (Begin Expression End Action20)) | (&('{') (Action Action19)) | (&('.') (Dot Action18)) | (&('[') Class) | (&('"' | '\'') Literal) | (&('(') (Open Expression Close)) | (&('A' | 'B' | 'C' | 'D' | 'E' | 'F' | 'G' | 'H' | 'I' | 'J' | 'K' | 'L' | 'M' | 'N' | 'O' | 'P' | 'Q' | 'R' | 'S' | 'T' | 'U' | 'V' | 'W' | 'X' | 'Y' | 'Z' | '_' | 'a' | 'b' | 'c' | 'd' | 'e' | 'f' | 'g' | 'h' | 'i' | 'j' | 'k' | 'l' | 'm' | 'n' | 'o' | 'p' | 'q' | 'r' | 's' | 't' | 'u' | 'v' | 'w' | 'x' | 'y' | 'z') (Identifier !LeftArrow Action17)))> */ | |
1701 | nil, | |
1702 | /* 8 Identifier <- <(<(IdentStart IdentCont*)> Spacing)> */ | |
1703 | func() bool { | |
1704 | position114, tokenIndex114, depth114 := position, tokenIndex, depth | |
1705 | { | |
1706 | position115 := position | |
1707 | depth++ | |
1708 | { | |
1709 | position116 := position | |
1710 | depth++ | |
1711 | if !_rules[ruleIdentStart]() { | |
1712 | goto l114 | |
1713 | } | |
1714 | l117: | |
1715 | { | |
1716 | position118, tokenIndex118, depth118 := position, tokenIndex, depth | |
1717 | { | |
1718 | position119 := position | |
1719 | depth++ | |
1720 | { | |
1721 | position120, tokenIndex120, depth120 := position, tokenIndex, depth | |
1722 | if !_rules[ruleIdentStart]() { | |
1723 | goto l121 | |
1724 | } | |
1725 | goto l120 | |
1726 | l121: | |
1727 | position, tokenIndex, depth = position120, tokenIndex120, depth120 | |
1728 | if c := buffer[position]; c < rune('0') || c > rune('9') { | |
1729 | goto l118 | |
1730 | } | |
1731 | position++ | |
1732 | } | |
1733 | l120: | |
1734 | depth-- | |
1735 | add(ruleIdentCont, position119) | |
1736 | } | |
1737 | goto l117 | |
1738 | l118: | |
1739 | position, tokenIndex, depth = position118, tokenIndex118, depth118 | |
1740 | } | |
1741 | depth-- | |
1742 | add(rulePegText, position116) | |
1743 | } | |
1744 | if !_rules[ruleSpacing]() { | |
1745 | goto l114 | |
1746 | } | |
1747 | depth-- | |
1748 | add(ruleIdentifier, position115) | |
1749 | } | |
1750 | return true | |
1751 | l114: | |
1752 | position, tokenIndex, depth = position114, tokenIndex114, depth114 | |
1753 | return false | |
1754 | }, | |
1755 | /* 9 IdentStart <- <((&('_') '_') | (&('A' | 'B' | 'C' | 'D' | 'E' | 'F' | 'G' | 'H' | 'I' | 'J' | 'K' | 'L' | 'M' | 'N' | 'O' | 'P' | 'Q' | 'R' | 'S' | 'T' | 'U' | 'V' | 'W' | 'X' | 'Y' | 'Z') [A-Z]) | (&('a' | 'b' | 'c' | 'd' | 'e' | 'f' | 'g' | 'h' | 'i' | 'j' | 'k' | 'l' | 'm' | 'n' | 'o' | 'p' | 'q' | 'r' | 's' | 't' | 'u' | 'v' | 'w' | 'x' | 'y' | 'z') [a-z]))> */ | |
1756 | func() bool { | |
1757 | position122, tokenIndex122, depth122 := position, tokenIndex, depth | |
1758 | { | |
1759 | position123 := position | |
1760 | depth++ | |
1761 | { | |
1762 | switch buffer[position] { | |
1763 | case '_': | |
1764 | if buffer[position] != rune('_') { | |
1765 | goto l122 | |
1766 | } | |
1767 | position++ | |
1768 | break | |
1769 | case 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z': | |
1770 | if c := buffer[position]; c < rune('A') || c > rune('Z') { | |
1771 | goto l122 | |
1772 | } | |
1773 | position++ | |
1774 | break | |
1775 | default: | |
1776 | if c := buffer[position]; c < rune('a') || c > rune('z') { | |
1777 | goto l122 | |
1778 | } | |
1779 | position++ | |
1780 | break | |
1781 | } | |
1782 | } | |
1783 | ||
1784 | depth-- | |
1785 | add(ruleIdentStart, position123) | |
1786 | } | |
1787 | return true | |
1788 | l122: | |
1789 | position, tokenIndex, depth = position122, tokenIndex122, depth122 | |
1790 | return false | |
1791 | }, | |
1792 | /* 10 IdentCont <- <(IdentStart / [0-9])> */ | |
1793 | nil, | |
1794 | /* 11 Literal <- <(('\'' (!'\'' Char)? (!'\'' Char Action21)* '\'' Spacing) / ('"' (!'"' DoubleChar)? (!'"' DoubleChar Action22)* '"' Spacing))> */ | |
1795 | nil, | |
1796 | /* 12 Class <- <((('[' '[' (('^' DoubleRanges Action23) / DoubleRanges)? (']' ']')) / ('[' (('^' Ranges Action24) / Ranges)? ']')) Spacing)> */ | |
1797 | nil, | |
1798 | /* 13 Ranges <- <(!']' Range (!']' Range Action25)*)> */ | |
1799 | func() bool { | |
1800 | position128, tokenIndex128, depth128 := position, tokenIndex, depth | |
1801 | { | |
1802 | position129 := position | |
1803 | depth++ | |
1804 | { | |
1805 | position130, tokenIndex130, depth130 := position, tokenIndex, depth | |
1806 | if buffer[position] != rune(']') { | |
1807 | goto l130 | |
1808 | } | |
1809 | position++ | |
1810 | goto l128 | |
1811 | l130: | |
1812 | position, tokenIndex, depth = position130, tokenIndex130, depth130 | |
1813 | } | |
1814 | if !_rules[ruleRange]() { | |
1815 | goto l128 | |
1816 | } | |
1817 | l131: | |
1818 | { | |
1819 | position132, tokenIndex132, depth132 := position, tokenIndex, depth | |
1820 | { | |
1821 | position133, tokenIndex133, depth133 := position, tokenIndex, depth | |
1822 | if buffer[position] != rune(']') { | |
1823 | goto l133 | |
1824 | } | |
1825 | position++ | |
1826 | goto l132 | |
1827 | l133: | |
1828 | position, tokenIndex, depth = position133, tokenIndex133, depth133 | |
1829 | } | |
1830 | if !_rules[ruleRange]() { | |
1831 | goto l132 | |
1832 | } | |
1833 | { | |
1834 | add(ruleAction25, position) | |
1835 | } | |
1836 | goto l131 | |
1837 | l132: | |
1838 | position, tokenIndex, depth = position132, tokenIndex132, depth132 | |
1839 | } | |
1840 | depth-- | |
1841 | add(ruleRanges, position129) | |
1842 | } | |
1843 | return true | |
1844 | l128: | |
1845 | position, tokenIndex, depth = position128, tokenIndex128, depth128 | |
1846 | return false | |
1847 | }, | |
1848 | /* 14 DoubleRanges <- <(!(']' ']') DoubleRange (!(']' ']') DoubleRange Action26)*)> */ | |
1849 | func() bool { | |
1850 | position135, tokenIndex135, depth135 := position, tokenIndex, depth | |
1851 | { | |
1852 | position136 := position | |
1853 | depth++ | |
1854 | { | |
1855 | position137, tokenIndex137, depth137 := position, tokenIndex, depth | |
1856 | if buffer[position] != rune(']') { | |
1857 | goto l137 | |
1858 | } | |
1859 | position++ | |
1860 | if buffer[position] != rune(']') { | |
1861 | goto l137 | |
1862 | } | |
1863 | position++ | |
1864 | goto l135 | |
1865 | l137: | |
1866 | position, tokenIndex, depth = position137, tokenIndex137, depth137 | |
1867 | } | |
1868 | if !_rules[ruleDoubleRange]() { | |
1869 | goto l135 | |
1870 | } | |
1871 | l138: | |
1872 | { | |
1873 | position139, tokenIndex139, depth139 := position, tokenIndex, depth | |
1874 | { | |
1875 | position140, tokenIndex140, depth140 := position, tokenIndex, depth | |
1876 | if buffer[position] != rune(']') { | |
1877 | goto l140 | |
1878 | } | |
1879 | position++ | |
1880 | if buffer[position] != rune(']') { | |
1881 | goto l140 | |
1882 | } | |
1883 | position++ | |
1884 | goto l139 | |
1885 | l140: | |
1886 | position, tokenIndex, depth = position140, tokenIndex140, depth140 | |
1887 | } | |
1888 | if !_rules[ruleDoubleRange]() { | |
1889 | goto l139 | |
1890 | } | |
1891 | { | |
1892 | add(ruleAction26, position) | |
1893 | } | |
1894 | goto l138 | |
1895 | l139: | |
1896 | position, tokenIndex, depth = position139, tokenIndex139, depth139 | |
1897 | } | |
1898 | depth-- | |
1899 | add(ruleDoubleRanges, position136) | |
1900 | } | |
1901 | return true | |
1902 | l135: | |
1903 | position, tokenIndex, depth = position135, tokenIndex135, depth135 | |
1904 | return false | |
1905 | }, | |
1906 | /* 15 Range <- <((Char '-' Char Action27) / Char)> */ | |
1907 | func() bool { | |
1908 | position142, tokenIndex142, depth142 := position, tokenIndex, depth | |
1909 | { | |
1910 | position143 := position | |
1911 | depth++ | |
1912 | { | |
1913 | position144, tokenIndex144, depth144 := position, tokenIndex, depth | |
1914 | if !_rules[ruleChar]() { | |
1915 | goto l145 | |
1916 | } | |
1917 | if buffer[position] != rune('-') { | |
1918 | goto l145 | |
1919 | } | |
1920 | position++ | |
1921 | if !_rules[ruleChar]() { | |
1922 | goto l145 | |
1923 | } | |
1924 | { | |
1925 | add(ruleAction27, position) | |
1926 | } | |
1927 | goto l144 | |
1928 | l145: | |
1929 | position, tokenIndex, depth = position144, tokenIndex144, depth144 | |
1930 | if !_rules[ruleChar]() { | |
1931 | goto l142 | |
1932 | } | |
1933 | } | |
1934 | l144: | |
1935 | depth-- | |
1936 | add(ruleRange, position143) | |
1937 | } | |
1938 | return true | |
1939 | l142: | |
1940 | position, tokenIndex, depth = position142, tokenIndex142, depth142 | |
1941 | return false | |
1942 | }, | |
1943 | /* 16 DoubleRange <- <((Char '-' Char Action28) / DoubleChar)> */ | |
1944 | func() bool { | |
1945 | position147, tokenIndex147, depth147 := position, tokenIndex, depth | |
1946 | { | |
1947 | position148 := position | |
1948 | depth++ | |
1949 | { | |
1950 | position149, tokenIndex149, depth149 := position, tokenIndex, depth | |
1951 | if !_rules[ruleChar]() { | |
1952 | goto l150 | |
1953 | } | |
1954 | if buffer[position] != rune('-') { | |
1955 | goto l150 | |
1956 | } | |
1957 | position++ | |
1958 | if !_rules[ruleChar]() { | |
1959 | goto l150 | |
1960 | } | |
1961 | { | |
1962 | add(ruleAction28, position) | |
1963 | } | |
1964 | goto l149 | |
1965 | l150: | |
1966 | position, tokenIndex, depth = position149, tokenIndex149, depth149 | |
1967 | if !_rules[ruleDoubleChar]() { | |
1968 | goto l147 | |
1969 | } | |
1970 | } | |
1971 | l149: | |
1972 | depth-- | |
1973 | add(ruleDoubleRange, position148) | |
1974 | } | |
1975 | return true | |
1976 | l147: | |
1977 | position, tokenIndex, depth = position147, tokenIndex147, depth147 | |
1978 | return false | |
1979 | }, | |
1980 | /* 17 Char <- <(Escape / (!'\\' <.> Action29))> */ | |
1981 | func() bool { | |
1982 | position152, tokenIndex152, depth152 := position, tokenIndex, depth | |
1983 | { | |
1984 | position153 := position | |
1985 | depth++ | |
1986 | { | |
1987 | position154, tokenIndex154, depth154 := position, tokenIndex, depth | |
1988 | if !_rules[ruleEscape]() { | |
1989 | goto l155 | |
1990 | } | |
1991 | goto l154 | |
1992 | l155: | |
1993 | position, tokenIndex, depth = position154, tokenIndex154, depth154 | |
1994 | { | |
1995 | position156, tokenIndex156, depth156 := position, tokenIndex, depth | |
1996 | if buffer[position] != rune('\\') { | |
1997 | goto l156 | |
1998 | } | |
1999 | position++ | |
2000 | goto l152 | |
2001 | l156: | |
2002 | position, tokenIndex, depth = position156, tokenIndex156, depth156 | |
2003 | } | |
2004 | { | |
2005 | position157 := position | |
2006 | depth++ | |
2007 | if !matchDot() { | |
2008 | goto l152 | |
2009 | } | |
2010 | depth-- | |
2011 | add(rulePegText, position157) | |
2012 | } | |
2013 | { | |
2014 | add(ruleAction29, position) | |
2015 | } | |
2016 | } | |
2017 | l154: | |
2018 | depth-- | |
2019 | add(ruleChar, position153) | |
2020 | } | |
2021 | return true | |
2022 | l152: | |
2023 | position, tokenIndex, depth = position152, tokenIndex152, depth152 | |
2024 | return false | |
2025 | }, | |
2026 | /* 18 DoubleChar <- <(Escape / (<([a-z] / [A-Z])> Action30) / (!'\\' <.> Action31))> */ | |
2027 | func() bool { | |
2028 | position159, tokenIndex159, depth159 := position, tokenIndex, depth | |
2029 | { | |
2030 | position160 := position | |
2031 | depth++ | |
2032 | { | |
2033 | position161, tokenIndex161, depth161 := position, tokenIndex, depth | |
2034 | if !_rules[ruleEscape]() { | |
2035 | goto l162 | |
2036 | } | |
2037 | goto l161 | |
2038 | l162: | |
2039 | position, tokenIndex, depth = position161, tokenIndex161, depth161 | |
2040 | { | |
2041 | position164 := position | |
2042 | depth++ | |
2043 | { | |
2044 | position165, tokenIndex165, depth165 := position, tokenIndex, depth | |
2045 | if c := buffer[position]; c < rune('a') || c > rune('z') { | |
2046 | goto l166 | |
2047 | } | |
2048 | position++ | |
2049 | goto l165 | |
2050 | l166: | |
2051 | position, tokenIndex, depth = position165, tokenIndex165, depth165 | |
2052 | if c := buffer[position]; c < rune('A') || c > rune('Z') { | |
2053 | goto l163 | |
2054 | } | |
2055 | position++ | |
2056 | } | |
2057 | l165: | |
2058 | depth-- | |
2059 | add(rulePegText, position164) | |
2060 | } | |
2061 | { | |
2062 | add(ruleAction30, position) | |
2063 | } | |
2064 | goto l161 | |
2065 | l163: | |
2066 | position, tokenIndex, depth = position161, tokenIndex161, depth161 | |
2067 | { | |
2068 | position168, tokenIndex168, depth168 := position, tokenIndex, depth | |
2069 | if buffer[position] != rune('\\') { | |
2070 | goto l168 | |
2071 | } | |
2072 | position++ | |
2073 | goto l159 | |
2074 | l168: | |
2075 | position, tokenIndex, depth = position168, tokenIndex168, depth168 | |
2076 | } | |
2077 | { | |
2078 | position169 := position | |
2079 | depth++ | |
2080 | if !matchDot() { | |
2081 | goto l159 | |
2082 | } | |
2083 | depth-- | |
2084 | add(rulePegText, position169) | |
2085 | } | |
2086 | { | |
2087 | add(ruleAction31, position) | |
2088 | } | |
2089 | } | |
2090 | l161: | |
2091 | depth-- | |
2092 | add(ruleDoubleChar, position160) | |
2093 | } | |
2094 | return true | |
2095 | l159: | |
2096 | position, tokenIndex, depth = position159, tokenIndex159, depth159 | |
2097 | return false | |
2098 | }, | |
2099 | /* 19 Escape <- <(('\\' ('a' / 'A') Action32) / ('\\' ('b' / 'B') Action33) / ('\\' ('e' / 'E') Action34) / ('\\' ('f' / 'F') Action35) / ('\\' ('n' / 'N') Action36) / ('\\' ('r' / 'R') Action37) / ('\\' ('t' / 'T') Action38) / ('\\' ('v' / 'V') Action39) / ('\\' '\'' Action40) / ('\\' '"' Action41) / ('\\' '[' Action42) / ('\\' ']' Action43) / ('\\' '-' Action44) / ('\\' ('0' ('x' / 'X')) <((&('A' | 'B' | 'C' | 'D' | 'E' | 'F') [A-F]) | (&('a' | 'b' | 'c' | 'd' | 'e' | 'f') [a-f]) | (&('0' | '1' | '2' | '3' | '4' | '5' | '6' | '7' | '8' | '9') [0-9]))+> Action45) / ('\\' <([0-3] [0-7] [0-7])> Action46) / ('\\' <([0-7] [0-7]?)> Action47) / ('\\' '\\' Action48))> */ | |
2100 | func() bool { | |
2101 | position171, tokenIndex171, depth171 := position, tokenIndex, depth | |
2102 | { | |
2103 | position172 := position | |
2104 | depth++ | |
2105 | { | |
2106 | position173, tokenIndex173, depth173 := position, tokenIndex, depth | |
2107 | if buffer[position] != rune('\\') { | |
2108 | goto l174 | |
2109 | } | |
2110 | position++ | |
2111 | { | |
2112 | position175, tokenIndex175, depth175 := position, tokenIndex, depth | |
2113 | if buffer[position] != rune('a') { | |
2114 | goto l176 | |
2115 | } | |
2116 | position++ | |
2117 | goto l175 | |
2118 | l176: | |
2119 | position, tokenIndex, depth = position175, tokenIndex175, depth175 | |
2120 | if buffer[position] != rune('A') { | |
2121 | goto l174 | |
2122 | } | |
2123 | position++ | |
2124 | } | |
2125 | l175: | |
2126 | { | |
2127 | add(ruleAction32, position) | |
2128 | } | |
2129 | goto l173 | |
2130 | l174: | |
2131 | position, tokenIndex, depth = position173, tokenIndex173, depth173 | |
2132 | if buffer[position] != rune('\\') { | |
2133 | goto l178 | |
2134 | } | |
2135 | position++ | |
2136 | { | |
2137 | position179, tokenIndex179, depth179 := position, tokenIndex, depth | |
2138 | if buffer[position] != rune('b') { | |
2139 | goto l180 | |
2140 | } | |
2141 | position++ | |
2142 | goto l179 | |
2143 | l180: | |
2144 | position, tokenIndex, depth = position179, tokenIndex179, depth179 | |
2145 | if buffer[position] != rune('B') { | |
2146 | goto l178 | |
2147 | } | |
2148 | position++ | |
2149 | } | |
2150 | l179: | |
2151 | { | |
2152 | add(ruleAction33, position) | |
2153 | } | |
2154 | goto l173 | |
2155 | l178: | |
2156 | position, tokenIndex, depth = position173, tokenIndex173, depth173 | |
2157 | if buffer[position] != rune('\\') { | |
2158 | goto l182 | |
2159 | } | |
2160 | position++ | |
2161 | { | |
2162 | position183, tokenIndex183, depth183 := position, tokenIndex, depth | |
2163 | if buffer[position] != rune('e') { | |
2164 | goto l184 | |
2165 | } | |
2166 | position++ | |
2167 | goto l183 | |
2168 | l184: | |
2169 | position, tokenIndex, depth = position183, tokenIndex183, depth183 | |
2170 | if buffer[position] != rune('E') { | |
2171 | goto l182 | |
2172 | } | |
2173 | position++ | |
2174 | } | |
2175 | l183: | |
2176 | { | |
2177 | add(ruleAction34, position) | |
2178 | } | |
2179 | goto l173 | |
2180 | l182: | |
2181 | position, tokenIndex, depth = position173, tokenIndex173, depth173 | |
2182 | if buffer[position] != rune('\\') { | |
2183 | goto l186 | |
2184 | } | |
2185 | position++ | |
2186 | { | |
2187 | position187, tokenIndex187, depth187 := position, tokenIndex, depth | |
2188 | if buffer[position] != rune('f') { | |
2189 | goto l188 | |
2190 | } | |
2191 | position++ | |
2192 | goto l187 | |
2193 | l188: | |
2194 | position, tokenIndex, depth = position187, tokenIndex187, depth187 | |
2195 | if buffer[position] != rune('F') { | |
2196 | goto l186 | |
2197 | } | |
2198 | position++ | |
2199 | } | |
2200 | l187: | |
2201 | { | |
2202 | add(ruleAction35, position) | |
2203 | } | |
2204 | goto l173 | |
2205 | l186: | |
2206 | position, tokenIndex, depth = position173, tokenIndex173, depth173 | |
2207 | if buffer[position] != rune('\\') { | |
2208 | goto l190 | |
2209 | } | |
2210 | position++ | |
2211 | { | |
2212 | position191, tokenIndex191, depth191 := position, tokenIndex, depth | |
2213 | if buffer[position] != rune('n') { | |
2214 | goto l192 | |
2215 | } | |
2216 | position++ | |
2217 | goto l191 | |
2218 | l192: | |
2219 | position, tokenIndex, depth = position191, tokenIndex191, depth191 | |
2220 | if buffer[position] != rune('N') { | |
2221 | goto l190 | |
2222 | } | |
2223 | position++ | |
2224 | } | |
2225 | l191: | |
2226 | { | |
2227 | add(ruleAction36, position) | |
2228 | } | |
2229 | goto l173 | |
2230 | l190: | |
2231 | position, tokenIndex, depth = position173, tokenIndex173, depth173 | |
2232 | if buffer[position] != rune('\\') { | |
2233 | goto l194 | |
2234 | } | |
2235 | position++ | |
2236 | { | |
2237 | position195, tokenIndex195, depth195 := position, tokenIndex, depth | |
2238 | if buffer[position] != rune('r') { | |
2239 | goto l196 | |
2240 | } | |
2241 | position++ | |
2242 | goto l195 | |
2243 | l196: | |
2244 | position, tokenIndex, depth = position195, tokenIndex195, depth195 | |
2245 | if buffer[position] != rune('R') { | |
2246 | goto l194 | |
2247 | } | |
2248 | position++ | |
2249 | } | |
2250 | l195: | |
2251 | { | |
2252 | add(ruleAction37, position) | |
2253 | } | |
2254 | goto l173 | |
2255 | l194: | |
2256 | position, tokenIndex, depth = position173, tokenIndex173, depth173 | |
2257 | if buffer[position] != rune('\\') { | |
2258 | goto l198 | |
2259 | } | |
2260 | position++ | |
2261 | { | |
2262 | position199, tokenIndex199, depth199 := position, tokenIndex, depth | |
2263 | if buffer[position] != rune('t') { | |
2264 | goto l200 | |
2265 | } | |
2266 | position++ | |
2267 | goto l199 | |
2268 | l200: | |
2269 | position, tokenIndex, depth = position199, tokenIndex199, depth199 | |
2270 | if buffer[position] != rune('T') { | |
2271 | goto l198 | |
2272 | } | |
2273 | position++ | |
2274 | } | |
2275 | l199: | |
2276 | { | |
2277 | add(ruleAction38, position) | |
2278 | } | |
2279 | goto l173 | |
2280 | l198: | |
2281 | position, tokenIndex, depth = position173, tokenIndex173, depth173 | |
2282 | if buffer[position] != rune('\\') { | |
2283 | goto l202 | |
2284 | } | |
2285 | position++ | |
2286 | { | |
2287 | position203, tokenIndex203, depth203 := position, tokenIndex, depth | |
2288 | if buffer[position] != rune('v') { | |
2289 | goto l204 | |
2290 | } | |
2291 | position++ | |
2292 | goto l203 | |
2293 | l204: | |
2294 | position, tokenIndex, depth = position203, tokenIndex203, depth203 | |
2295 | if buffer[position] != rune('V') { | |
2296 | goto l202 | |
2297 | } | |
2298 | position++ | |
2299 | } | |
2300 | l203: | |
2301 | { | |
2302 | add(ruleAction39, position) | |
2303 | } | |
2304 | goto l173 | |
2305 | l202: | |
2306 | position, tokenIndex, depth = position173, tokenIndex173, depth173 | |
2307 | if buffer[position] != rune('\\') { | |
2308 | goto l206 | |
2309 | } | |
2310 | position++ | |
2311 | if buffer[position] != rune('\'') { | |
2312 | goto l206 | |
2313 | } | |
2314 | position++ | |
2315 | { | |
2316 | add(ruleAction40, position) | |
2317 | } | |
2318 | goto l173 | |
2319 | l206: | |
2320 | position, tokenIndex, depth = position173, tokenIndex173, depth173 | |
2321 | if buffer[position] != rune('\\') { | |
2322 | goto l208 | |
2323 | } | |
2324 | position++ | |
2325 | if buffer[position] != rune('"') { | |
2326 | goto l208 | |
2327 | } | |
2328 | position++ | |
2329 | { | |
2330 | add(ruleAction41, position) | |
2331 | } | |
2332 | goto l173 | |
2333 | l208: | |
2334 | position, tokenIndex, depth = position173, tokenIndex173, depth173 | |
2335 | if buffer[position] != rune('\\') { | |
2336 | goto l210 | |
2337 | } | |
2338 | position++ | |
2339 | if buffer[position] != rune('[') { | |
2340 | goto l210 | |
2341 | } | |
2342 | position++ | |
2343 | { | |
2344 | add(ruleAction42, position) | |
2345 | } | |
2346 | goto l173 | |
2347 | l210: | |
2348 | position, tokenIndex, depth = position173, tokenIndex173, depth173 | |
2349 | if buffer[position] != rune('\\') { | |
2350 | goto l212 | |
2351 | } | |
2352 | position++ | |
2353 | if buffer[position] != rune(']') { | |
2354 | goto l212 | |
2355 | } | |
2356 | position++ | |
2357 | { | |
2358 | add(ruleAction43, position) | |
2359 | } | |
2360 | goto l173 | |
2361 | l212: | |
2362 | position, tokenIndex, depth = position173, tokenIndex173, depth173 | |
2363 | if buffer[position] != rune('\\') { | |
2364 | goto l214 | |
2365 | } | |
2366 | position++ | |
2367 | if buffer[position] != rune('-') { | |
2368 | goto l214 | |
2369 | } | |
2370 | position++ | |
2371 | { | |
2372 | add(ruleAction44, position) | |
2373 | } | |
2374 | goto l173 | |
2375 | l214: | |
2376 | position, tokenIndex, depth = position173, tokenIndex173, depth173 | |
2377 | if buffer[position] != rune('\\') { | |
2378 | goto l216 | |
2379 | } | |
2380 | position++ | |
2381 | if buffer[position] != rune('0') { | |
2382 | goto l216 | |
2383 | } | |
2384 | position++ | |
2385 | { | |
2386 | position217, tokenIndex217, depth217 := position, tokenIndex, depth | |
2387 | if buffer[position] != rune('x') { | |
2388 | goto l218 | |
2389 | } | |
2390 | position++ | |
2391 | goto l217 | |
2392 | l218: | |
2393 | position, tokenIndex, depth = position217, tokenIndex217, depth217 | |
2394 | if buffer[position] != rune('X') { | |
2395 | goto l216 | |
2396 | } | |
2397 | position++ | |
2398 | } | |
2399 | l217: | |
2400 | { | |
2401 | position219 := position | |
2402 | depth++ | |
2403 | { | |
2404 | switch buffer[position] { | |
2405 | case 'A', 'B', 'C', 'D', 'E', 'F': | |
2406 | if c := buffer[position]; c < rune('A') || c > rune('F') { | |
2407 | goto l216 | |
2408 | } | |
2409 | position++ | |
2410 | break | |
2411 | case 'a', 'b', 'c', 'd', 'e', 'f': | |
2412 | if c := buffer[position]; c < rune('a') || c > rune('f') { | |
2413 | goto l216 | |
2414 | } | |
2415 | position++ | |
2416 | break | |
2417 | default: | |
2418 | if c := buffer[position]; c < rune('0') || c > rune('9') { | |
2419 | goto l216 | |
2420 | } | |
2421 | position++ | |
2422 | break | |
2423 | } | |
2424 | } | |
2425 | ||
2426 | l220: | |
2427 | { | |
2428 | position221, tokenIndex221, depth221 := position, tokenIndex, depth | |
2429 | { | |
2430 | switch buffer[position] { | |
2431 | case 'A', 'B', 'C', 'D', 'E', 'F': | |
2432 | if c := buffer[position]; c < rune('A') || c > rune('F') { | |
2433 | goto l221 | |
2434 | } | |
2435 | position++ | |
2436 | break | |
2437 | case 'a', 'b', 'c', 'd', 'e', 'f': | |
2438 | if c := buffer[position]; c < rune('a') || c > rune('f') { | |
2439 | goto l221 | |
2440 | } | |
2441 | position++ | |
2442 | break | |
2443 | default: | |
2444 | if c := buffer[position]; c < rune('0') || c > rune('9') { | |
2445 | goto l221 | |
2446 | } | |
2447 | position++ | |
2448 | break | |
2449 | } | |
2450 | } | |
2451 | ||
2452 | goto l220 | |
2453 | l221: | |
2454 | position, tokenIndex, depth = position221, tokenIndex221, depth221 | |
2455 | } | |
2456 | depth-- | |
2457 | add(rulePegText, position219) | |
2458 | } | |
2459 | { | |
2460 | add(ruleAction45, position) | |
2461 | } | |
2462 | goto l173 | |
2463 | l216: | |
2464 | position, tokenIndex, depth = position173, tokenIndex173, depth173 | |
2465 | if buffer[position] != rune('\\') { | |
2466 | goto l225 | |
2467 | } | |
2468 | position++ | |
2469 | { | |
2470 | position226 := position | |
2471 | depth++ | |
2472 | if c := buffer[position]; c < rune('0') || c > rune('3') { | |
2473 | goto l225 | |
2474 | } | |
2475 | position++ | |
2476 | if c := buffer[position]; c < rune('0') || c > rune('7') { | |
2477 | goto l225 | |
2478 | } | |
2479 | position++ | |
2480 | if c := buffer[position]; c < rune('0') || c > rune('7') { | |
2481 | goto l225 | |
2482 | } | |
2483 | position++ | |
2484 | depth-- | |
2485 | add(rulePegText, position226) | |
2486 | } | |
2487 | { | |
2488 | add(ruleAction46, position) | |
2489 | } | |
2490 | goto l173 | |
2491 | l225: | |
2492 | position, tokenIndex, depth = position173, tokenIndex173, depth173 | |
2493 | if buffer[position] != rune('\\') { | |
2494 | goto l228 | |
2495 | } | |
2496 | position++ | |
2497 | { | |
2498 | position229 := position | |
2499 | depth++ | |
2500 | if c := buffer[position]; c < rune('0') || c > rune('7') { | |
2501 | goto l228 | |
2502 | } | |
2503 | position++ | |
2504 | { | |
2505 | position230, tokenIndex230, depth230 := position, tokenIndex, depth | |
2506 | if c := buffer[position]; c < rune('0') || c > rune('7') { | |
2507 | goto l230 | |
2508 | } | |
2509 | position++ | |
2510 | goto l231 | |
2511 | l230: | |
2512 | position, tokenIndex, depth = position230, tokenIndex230, depth230 | |
2513 | } | |
2514 | l231: | |
2515 | depth-- | |
2516 | add(rulePegText, position229) | |
2517 | } | |
2518 | { | |
2519 | add(ruleAction47, position) | |
2520 | } | |
2521 | goto l173 | |
2522 | l228: | |
2523 | position, tokenIndex, depth = position173, tokenIndex173, depth173 | |
2524 | if buffer[position] != rune('\\') { | |
2525 | goto l171 | |
2526 | } | |
2527 | position++ | |
2528 | if buffer[position] != rune('\\') { | |
2529 | goto l171 | |
2530 | } | |
2531 | position++ | |
2532 | { | |
2533 | add(ruleAction48, position) | |
2534 | } | |
2535 | } | |
2536 | l173: | |
2537 | depth-- | |
2538 | add(ruleEscape, position172) | |
2539 | } | |
2540 | return true | |
2541 | l171: | |
2542 | position, tokenIndex, depth = position171, tokenIndex171, depth171 | |
2543 | return false | |
2544 | }, | |
2545 | /* 20 LeftArrow <- <((('<' '-') / '←') Spacing)> */ | |
2546 | func() bool { | |
2547 | position234, tokenIndex234, depth234 := position, tokenIndex, depth | |
2548 | { | |
2549 | position235 := position | |
2550 | depth++ | |
2551 | { | |
2552 | position236, tokenIndex236, depth236 := position, tokenIndex, depth | |
2553 | if buffer[position] != rune('<') { | |
2554 | goto l237 | |
2555 | } | |
2556 | position++ | |
2557 | if buffer[position] != rune('-') { | |
2558 | goto l237 | |
2559 | } | |
2560 | position++ | |
2561 | goto l236 | |
2562 | l237: | |
2563 | position, tokenIndex, depth = position236, tokenIndex236, depth236 | |
2564 | if buffer[position] != rune('←') { | |
2565 | goto l234 | |
2566 | } | |
2567 | position++ | |
2568 | } | |
2569 | l236: | |
2570 | if !_rules[ruleSpacing]() { | |
2571 | goto l234 | |
2572 | } | |
2573 | depth-- | |
2574 | add(ruleLeftArrow, position235) | |
2575 | } | |
2576 | return true | |
2577 | l234: | |
2578 | position, tokenIndex, depth = position234, tokenIndex234, depth234 | |
2579 | return false | |
2580 | }, | |
2581 | /* 21 Slash <- <('/' Spacing)> */ | |
2582 | func() bool { | |
2583 | position238, tokenIndex238, depth238 := position, tokenIndex, depth | |
2584 | { | |
2585 | position239 := position | |
2586 | depth++ | |
2587 | if buffer[position] != rune('/') { | |
2588 | goto l238 | |
2589 | } | |
2590 | position++ | |
2591 | if !_rules[ruleSpacing]() { | |
2592 | goto l238 | |
2593 | } | |
2594 | depth-- | |
2595 | add(ruleSlash, position239) | |
2596 | } | |
2597 | return true | |
2598 | l238: | |
2599 | position, tokenIndex, depth = position238, tokenIndex238, depth238 | |
2600 | return false | |
2601 | }, | |
2602 | /* 22 And <- <('&' Spacing)> */ | |
2603 | func() bool { | |
2604 | position240, tokenIndex240, depth240 := position, tokenIndex, depth | |
2605 | { | |
2606 | position241 := position | |
2607 | depth++ | |
2608 | if buffer[position] != rune('&') { | |
2609 | goto l240 | |
2610 | } | |
2611 | position++ | |
2612 | if !_rules[ruleSpacing]() { | |
2613 | goto l240 | |
2614 | } | |
2615 | depth-- | |
2616 | add(ruleAnd, position241) | |
2617 | } | |
2618 | return true | |
2619 | l240: | |
2620 | position, tokenIndex, depth = position240, tokenIndex240, depth240 | |
2621 | return false | |
2622 | }, | |
2623 | /* 23 Not <- <('!' Spacing)> */ | |
2624 | func() bool { | |
2625 | position242, tokenIndex242, depth242 := position, tokenIndex, depth | |
2626 | { | |
2627 | position243 := position | |
2628 | depth++ | |
2629 | if buffer[position] != rune('!') { | |
2630 | goto l242 | |
2631 | } | |
2632 | position++ | |
2633 | if !_rules[ruleSpacing]() { | |
2634 | goto l242 | |
2635 | } | |
2636 | depth-- | |
2637 | add(ruleNot, position243) | |
2638 | } | |
2639 | return true | |
2640 | l242: | |
2641 | position, tokenIndex, depth = position242, tokenIndex242, depth242 | |
2642 | return false | |
2643 | }, | |
2644 | /* 24 Question <- <('?' Spacing)> */ | |
2645 | nil, | |
2646 | /* 25 Star <- <('*' Spacing)> */ | |
2647 | nil, | |
2648 | /* 26 Plus <- <('+' Spacing)> */ | |
2649 | nil, | |
2650 | /* 27 Open <- <('(' Spacing)> */ | |
2651 | nil, | |
2652 | /* 28 Close <- <(')' Spacing)> */ | |
2653 | nil, | |
2654 | /* 29 Dot <- <('.' Spacing)> */ | |
2655 | nil, | |
2656 | /* 30 SpaceComment <- <(Space / Comment)> */ | |
2657 | func() bool { | |
2658 | position250, tokenIndex250, depth250 := position, tokenIndex, depth | |
2659 | { | |
2660 | position251 := position | |
2661 | depth++ | |
2662 | { | |
2663 | position252, tokenIndex252, depth252 := position, tokenIndex, depth | |
2664 | { | |
2665 | position254 := position | |
2666 | depth++ | |
2667 | { | |
2668 | switch buffer[position] { | |
2669 | case '\t': | |
2670 | if buffer[position] != rune('\t') { | |
2671 | goto l253 | |
2672 | } | |
2673 | position++ | |
2674 | break | |
2675 | case ' ': | |
2676 | if buffer[position] != rune(' ') { | |
2677 | goto l253 | |
2678 | } | |
2679 | position++ | |
2680 | break | |
2681 | default: | |
2682 | if !_rules[ruleEndOfLine]() { | |
2683 | goto l253 | |
2684 | } | |
2685 | break | |
2686 | } | |
2687 | } | |
2688 | ||
2689 | depth-- | |
2690 | add(ruleSpace, position254) | |
2691 | } | |
2692 | goto l252 | |
2693 | l253: | |
2694 | position, tokenIndex, depth = position252, tokenIndex252, depth252 | |
2695 | { | |
2696 | position256 := position | |
2697 | depth++ | |
2698 | if buffer[position] != rune('#') { | |
2699 | goto l250 | |
2700 | } | |
2701 | position++ | |
2702 | l257: | |
2703 | { | |
2704 | position258, tokenIndex258, depth258 := position, tokenIndex, depth | |
2705 | { | |
2706 | position259, tokenIndex259, depth259 := position, tokenIndex, depth | |
2707 | if !_rules[ruleEndOfLine]() { | |
2708 | goto l259 | |
2709 | } | |
2710 | goto l258 | |
2711 | l259: | |
2712 | position, tokenIndex, depth = position259, tokenIndex259, depth259 | |
2713 | } | |
2714 | if !matchDot() { | |
2715 | goto l258 | |
2716 | } | |
2717 | goto l257 | |
2718 | l258: | |
2719 | position, tokenIndex, depth = position258, tokenIndex258, depth258 | |
2720 | } | |
2721 | if !_rules[ruleEndOfLine]() { | |
2722 | goto l250 | |
2723 | } | |
2724 | depth-- | |
2725 | add(ruleComment, position256) | |
2726 | } | |
2727 | } | |
2728 | l252: | |
2729 | depth-- | |
2730 | add(ruleSpaceComment, position251) | |
2731 | } | |
2732 | return true | |
2733 | l250: | |
2734 | position, tokenIndex, depth = position250, tokenIndex250, depth250 | |
2735 | return false | |
2736 | }, | |
2737 | /* 31 Spacing <- <SpaceComment*> */ | |
2738 | func() bool { | |
2739 | { | |
2740 | position261 := position | |
2741 | depth++ | |
2742 | l262: | |
2743 | { | |
2744 | position263, tokenIndex263, depth263 := position, tokenIndex, depth | |
2745 | if !_rules[ruleSpaceComment]() { | |
2746 | goto l263 | |
2747 | } | |
2748 | goto l262 | |
2749 | l263: | |
2750 | position, tokenIndex, depth = position263, tokenIndex263, depth263 | |
2751 | } | |
2752 | depth-- | |
2753 | add(ruleSpacing, position261) | |
2754 | } | |
2755 | return true | |
2756 | }, | |
2757 | /* 32 MustSpacing <- <SpaceComment+> */ | |
2758 | func() bool { | |
2759 | position264, tokenIndex264, depth264 := position, tokenIndex, depth | |
2760 | { | |
2761 | position265 := position | |
2762 | depth++ | |
2763 | if !_rules[ruleSpaceComment]() { | |
2764 | goto l264 | |
2765 | } | |
2766 | l266: | |
2767 | { | |
2768 | position267, tokenIndex267, depth267 := position, tokenIndex, depth | |
2769 | if !_rules[ruleSpaceComment]() { | |
2770 | goto l267 | |
2771 | } | |
2772 | goto l266 | |
2773 | l267: | |
2774 | position, tokenIndex, depth = position267, tokenIndex267, depth267 | |
2775 | } | |
2776 | depth-- | |
2777 | add(ruleMustSpacing, position265) | |
2778 | } | |
2779 | return true | |
2780 | l264: | |
2781 | position, tokenIndex, depth = position264, tokenIndex264, depth264 | |
2782 | return false | |
2783 | }, | |
2784 | /* 33 Comment <- <('#' (!EndOfLine .)* EndOfLine)> */ | |
2785 | nil, | |
2786 | /* 34 Space <- <((&('\t') '\t') | (&(' ') ' ') | (&('\n' | '\r') EndOfLine))> */ | |
2787 | nil, | |
2788 | /* 35 EndOfLine <- <(('\r' '\n') / '\n' / '\r')> */ | |
2789 | func() bool { | |
2790 | position270, tokenIndex270, depth270 := position, tokenIndex, depth | |
2791 | { | |
2792 | position271 := position | |
2793 | depth++ | |
2794 | { | |
2795 | position272, tokenIndex272, depth272 := position, tokenIndex, depth | |
2796 | if buffer[position] != rune('\r') { | |
2797 | goto l273 | |
2798 | } | |
2799 | position++ | |
2800 | if buffer[position] != rune('\n') { | |
2801 | goto l273 | |
2802 | } | |
2803 | position++ | |
2804 | goto l272 | |
2805 | l273: | |
2806 | position, tokenIndex, depth = position272, tokenIndex272, depth272 | |
2807 | if buffer[position] != rune('\n') { | |
2808 | goto l274 | |
2809 | } | |
2810 | position++ | |
2811 | goto l272 | |
2812 | l274: | |
2813 | position, tokenIndex, depth = position272, tokenIndex272, depth272 | |
2814 | if buffer[position] != rune('\r') { | |
2815 | goto l270 | |
2816 | } | |
2817 | position++ | |
2818 | } | |
2819 | l272: | |
2820 | depth-- | |
2821 | add(ruleEndOfLine, position271) | |
2822 | } | |
2823 | return true | |
2824 | l270: | |
2825 | position, tokenIndex, depth = position270, tokenIndex270, depth270 | |
2826 | return false | |
2827 | }, | |
2828 | /* 36 EndOfFile <- <!.> */ | |
2829 | nil, | |
2830 | /* 37 Action <- <('{' <ActionBody*> '}' Spacing)> */ | |
2831 | func() bool { | |
2832 | position276, tokenIndex276, depth276 := position, tokenIndex, depth | |
2833 | { | |
2834 | position277 := position | |
2835 | depth++ | |
2836 | if buffer[position] != rune('{') { | |
2837 | goto l276 | |
2838 | } | |
2839 | position++ | |
2840 | { | |
2841 | position278 := position | |
2842 | depth++ | |
2843 | l279: | |
2844 | { | |
2845 | position280, tokenIndex280, depth280 := position, tokenIndex, depth | |
2846 | if !_rules[ruleActionBody]() { | |
2847 | goto l280 | |
2848 | } | |
2849 | goto l279 | |
2850 | l280: | |
2851 | position, tokenIndex, depth = position280, tokenIndex280, depth280 | |
2852 | } | |
2853 | depth-- | |
2854 | add(rulePegText, position278) | |
2855 | } | |
2856 | if buffer[position] != rune('}') { | |
2857 | goto l276 | |
2858 | } | |
2859 | position++ | |
2860 | if !_rules[ruleSpacing]() { | |
2861 | goto l276 | |
2862 | } | |
2863 | depth-- | |
2864 | add(ruleAction, position277) | |
2865 | } | |
2866 | return true | |
2867 | l276: | |
2868 | position, tokenIndex, depth = position276, tokenIndex276, depth276 | |
2869 | return false | |
2870 | }, | |
2871 | /* 38 ActionBody <- <((!('{' / '}') .) / ('{' ActionBody* '}'))> */ | |
2872 | func() bool { | |
2873 | position281, tokenIndex281, depth281 := position, tokenIndex, depth | |
2874 | { | |
2875 | position282 := position | |
2876 | depth++ | |
2877 | { | |
2878 | position283, tokenIndex283, depth283 := position, tokenIndex, depth | |
2879 | { | |
2880 | position285, tokenIndex285, depth285 := position, tokenIndex, depth | |
2881 | { | |
2882 | position286, tokenIndex286, depth286 := position, tokenIndex, depth | |
2883 | if buffer[position] != rune('{') { | |
2884 | goto l287 | |
2885 | } | |
2886 | position++ | |
2887 | goto l286 | |
2888 | l287: | |
2889 | position, tokenIndex, depth = position286, tokenIndex286, depth286 | |
2890 | if buffer[position] != rune('}') { | |
2891 | goto l285 | |
2892 | } | |
2893 | position++ | |
2894 | } | |
2895 | l286: | |
2896 | goto l284 | |
2897 | l285: | |
2898 | position, tokenIndex, depth = position285, tokenIndex285, depth285 | |
2899 | } | |
2900 | if !matchDot() { | |
2901 | goto l284 | |
2902 | } | |
2903 | goto l283 | |
2904 | l284: | |
2905 | position, tokenIndex, depth = position283, tokenIndex283, depth283 | |
2906 | if buffer[position] != rune('{') { | |
2907 | goto l281 | |
2908 | } | |
2909 | position++ | |
2910 | l288: | |
2911 | { | |
2912 | position289, tokenIndex289, depth289 := position, tokenIndex, depth | |
2913 | if !_rules[ruleActionBody]() { | |
2914 | goto l289 | |
2915 | } | |
2916 | goto l288 | |
2917 | l289: | |
2918 | position, tokenIndex, depth = position289, tokenIndex289, depth289 | |
2919 | } | |
2920 | if buffer[position] != rune('}') { | |
2921 | goto l281 | |
2922 | } | |
2923 | position++ | |
2924 | } | |
2925 | l283: | |
2926 | depth-- | |
2927 | add(ruleActionBody, position282) | |
2928 | } | |
2929 | return true | |
2930 | l281: | |
2931 | position, tokenIndex, depth = position281, tokenIndex281, depth281 | |
2932 | return false | |
2933 | }, | |
2934 | /* 39 Begin <- <('<' Spacing)> */ | |
2935 | nil, | |
2936 | /* 40 End <- <('>' Spacing)> */ | |
2937 | nil, | |
2938 | /* 42 Action0 <- <{ p.AddPackage(text) }> */ | |
2939 | nil, | |
2940 | /* 43 Action1 <- <{ p.AddPeg(text) }> */ | |
2941 | nil, | |
2942 | /* 44 Action2 <- <{ p.AddState(text) }> */ | |
2943 | nil, | |
2944 | nil, | |
2945 | /* 46 Action3 <- <{ p.AddImport(text) }> */ | |
2946 | nil, | |
2947 | /* 47 Action4 <- <{ p.AddRule(text) }> */ | |
2948 | nil, | |
2949 | /* 48 Action5 <- <{ p.AddExpression() }> */ | |
2950 | nil, | |
2951 | /* 49 Action6 <- <{ p.AddAlternate() }> */ | |
2952 | nil, | |
2953 | /* 50 Action7 <- <{ p.AddNil(); p.AddAlternate() }> */ | |
2954 | nil, | |
2955 | /* 51 Action8 <- <{ p.AddNil() }> */ | |
2956 | nil, | |
2957 | /* 52 Action9 <- <{ p.AddSequence() }> */ | |
2958 | nil, | |
2959 | /* 53 Action10 <- <{ p.AddPredicate(text) }> */ | |
2960 | nil, | |
2961 | /* 54 Action11 <- <{ p.AddStateChange(text) }> */ | |
2962 | nil, | |
2963 | /* 55 Action12 <- <{ p.AddPeekFor() }> */ | |
2964 | nil, | |
2965 | /* 56 Action13 <- <{ p.AddPeekNot() }> */ | |
2966 | nil, | |
2967 | /* 57 Action14 <- <{ p.AddQuery() }> */ | |
2968 | nil, | |
2969 | /* 58 Action15 <- <{ p.AddStar() }> */ | |
2970 | nil, | |
2971 | /* 59 Action16 <- <{ p.AddPlus() }> */ | |
2972 | nil, | |
2973 | /* 60 Action17 <- <{ p.AddName(text) }> */ | |
2974 | nil, | |
2975 | /* 61 Action18 <- <{ p.AddDot() }> */ | |
2976 | nil, | |
2977 | /* 62 Action19 <- <{ p.AddAction(text) }> */ | |
2978 | nil, | |
2979 | /* 63 Action20 <- <{ p.AddPush() }> */ | |
2980 | nil, | |
2981 | /* 64 Action21 <- <{ p.AddSequence() }> */ | |
2982 | nil, | |
2983 | /* 65 Action22 <- <{ p.AddSequence() }> */ | |
2984 | nil, | |
2985 | /* 66 Action23 <- <{ p.AddPeekNot(); p.AddDot(); p.AddSequence() }> */ | |
2986 | nil, | |
2987 | /* 67 Action24 <- <{ p.AddPeekNot(); p.AddDot(); p.AddSequence() }> */ | |
2988 | nil, | |
2989 | /* 68 Action25 <- <{ p.AddAlternate() }> */ | |
2990 | nil, | |
2991 | /* 69 Action26 <- <{ p.AddAlternate() }> */ | |
2992 | nil, | |
2993 | /* 70 Action27 <- <{ p.AddRange() }> */ | |
2994 | nil, | |
2995 | /* 71 Action28 <- <{ p.AddDoubleRange() }> */ | |
2996 | nil, | |
2997 | /* 72 Action29 <- <{ p.AddCharacter(text) }> */ | |
2998 | nil, | |
2999 | /* 73 Action30 <- <{ p.AddDoubleCharacter(text) }> */ | |
3000 | nil, | |
3001 | /* 74 Action31 <- <{ p.AddCharacter(text) }> */ | |
3002 | nil, | |
3003 | /* 75 Action32 <- <{ p.AddCharacter("\a") }> */ | |
3004 | nil, | |
3005 | /* 76 Action33 <- <{ p.AddCharacter("\b") }> */ | |
3006 | nil, | |
3007 | /* 77 Action34 <- <{ p.AddCharacter("\x1B") }> */ | |
3008 | nil, | |
3009 | /* 78 Action35 <- <{ p.AddCharacter("\f") }> */ | |
3010 | nil, | |
3011 | /* 79 Action36 <- <{ p.AddCharacter("\n") }> */ | |
3012 | nil, | |
3013 | /* 80 Action37 <- <{ p.AddCharacter("\r") }> */ | |
3014 | nil, | |
3015 | /* 81 Action38 <- <{ p.AddCharacter("\t") }> */ | |
3016 | nil, | |
3017 | /* 82 Action39 <- <{ p.AddCharacter("\v") }> */ | |
3018 | nil, | |
3019 | /* 83 Action40 <- <{ p.AddCharacter("'") }> */ | |
3020 | nil, | |
3021 | /* 84 Action41 <- <{ p.AddCharacter("\"") }> */ | |
3022 | nil, | |
3023 | /* 85 Action42 <- <{ p.AddCharacter("[") }> */ | |
3024 | nil, | |
3025 | /* 86 Action43 <- <{ p.AddCharacter("]") }> */ | |
3026 | nil, | |
3027 | /* 87 Action44 <- <{ p.AddCharacter("-") }> */ | |
3028 | nil, | |
3029 | /* 88 Action45 <- <{ p.AddHexaCharacter(text) }> */ | |
3030 | nil, | |
3031 | /* 89 Action46 <- <{ p.AddOctalCharacter(text) }> */ | |
3032 | nil, | |
3033 | /* 90 Action47 <- <{ p.AddOctalCharacter(text) }> */ | |
3034 | nil, | |
3035 | /* 91 Action48 <- <{ p.AddCharacter("\\") }> */ | |
3036 | nil, | |
3037 | } | |
3038 | p.rules = _rules | |
3039 | } |
0 | // Copyright 2010 The Go Authors. All rights reserved. | |
1 | // Use of this source code is governed by a BSD-style | |
2 | // license that can be found in the LICENSE file. | |
3 | ||
4 | // +build ignore | |
5 | ||
6 | package main | |
7 | ||
8 | import ( | |
9 | "flag" | |
10 | "fmt" | |
11 | "io/ioutil" | |
12 | "os" | |
13 | "os/exec" | |
14 | "path/filepath" | |
15 | "reflect" | |
16 | "runtime" | |
17 | "strings" | |
18 | ) | |
19 | ||
20 | func main() { | |
21 | flag.Parse() | |
22 | ||
23 | args, target := flag.Args(), "peg" | |
24 | if len(args) > 0 { | |
25 | target = args[0] | |
26 | } | |
27 | ||
28 | switch target { | |
29 | case "peg": | |
30 | peg() | |
31 | case "clean": | |
32 | clean() | |
33 | case "test": | |
34 | test() | |
35 | case "bench": | |
36 | bench() | |
37 | case "help": | |
38 | fmt.Println("go run build.go [target]") | |
39 | fmt.Println(" peg - build peg from scratch") | |
40 | fmt.Println(" clean - clean up") | |
41 | fmt.Println(" test - run full test") | |
42 | fmt.Println(" bench - run benchmark") | |
43 | } | |
44 | } | |
45 | ||
46 | var processed = make(map[string]bool) | |
47 | ||
48 | func done(file string, deps ...interface{}) bool { | |
49 | fini := true | |
50 | file = filepath.FromSlash(file) | |
51 | info, err := os.Stat(file) | |
52 | if err != nil { | |
53 | fini = false | |
54 | } | |
55 | for _, dep := range deps { | |
56 | switch dep := dep.(type) { | |
57 | case string: | |
58 | if info == nil { | |
59 | fini = false | |
60 | break | |
61 | } | |
62 | dep = filepath.FromSlash(dep) | |
63 | fileInfo, err := os.Stat(dep) | |
64 | if err != nil { | |
65 | panic(err) | |
66 | } | |
67 | ||
68 | if fileInfo.ModTime().After(info.ModTime()) { | |
69 | fini = false | |
70 | } | |
71 | case func() bool: | |
72 | name := runtime.FuncForPC(reflect.ValueOf(dep).Pointer()).Name() | |
73 | if result, ok := processed[name]; ok { | |
74 | fini = fini && result | |
75 | fmt.Printf("%s is done\n", name) | |
76 | break | |
77 | } | |
78 | result := dep() | |
79 | fini = fini && result | |
80 | fmt.Printf("%s\n", name) | |
81 | processed[name] = result | |
82 | } | |
83 | } | |
84 | ||
85 | return fini | |
86 | } | |
87 | ||
88 | func chdir(dir string) string { | |
89 | dir = filepath.FromSlash(dir) | |
90 | working, err := os.Getwd() | |
91 | if err != nil { | |
92 | panic(err) | |
93 | } | |
94 | err = os.Chdir(dir) | |
95 | if err != nil { | |
96 | panic(err) | |
97 | } | |
98 | fmt.Printf("cd %s\n", dir) | |
99 | return working | |
100 | } | |
101 | ||
102 | func command(name, inputFile, outputFile string, arg ...string) { | |
103 | name = filepath.FromSlash(name) | |
104 | inputFile = filepath.FromSlash(inputFile) | |
105 | outputFile = filepath.FromSlash(outputFile) | |
106 | fmt.Print(name) | |
107 | for _, a := range arg { | |
108 | fmt.Printf(" %s", a) | |
109 | } | |
110 | ||
111 | cmd := exec.Command(name, arg...) | |
112 | ||
113 | if inputFile != "" { | |
114 | fmt.Printf(" < %s", inputFile) | |
115 | input, err := ioutil.ReadFile(inputFile) | |
116 | if err != nil { | |
117 | panic(err) | |
118 | } | |
119 | writer, err := cmd.StdinPipe() | |
120 | if err != nil { | |
121 | panic(err) | |
122 | } | |
123 | go func() { | |
124 | defer writer.Close() | |
125 | _, err := writer.Write([]byte(input)) | |
126 | if err != nil { | |
127 | panic(err) | |
128 | } | |
129 | }() | |
130 | } | |
131 | ||
132 | if outputFile != "" { | |
133 | fmt.Printf(" > %s\n", outputFile) | |
134 | output, err := cmd.Output() | |
135 | if err != nil { | |
136 | panic(err) | |
137 | } | |
138 | err = ioutil.WriteFile(outputFile, output, 0600) | |
139 | if err != nil { | |
140 | panic(err) | |
141 | } | |
142 | } else { | |
143 | output, err := cmd.CombinedOutput() | |
144 | fmt.Printf("\n%s", string(output)) | |
145 | if err != nil { | |
146 | panic(err) | |
147 | } | |
148 | } | |
149 | } | |
150 | ||
151 | func delete(file string) { | |
152 | file = filepath.FromSlash(file) | |
153 | fmt.Printf("rm -f %s\n", file) | |
154 | os.Remove(file) | |
155 | } | |
156 | ||
157 | func deleteFilesWithSuffix(suffix string) { | |
158 | files, err := ioutil.ReadDir(".") | |
159 | if err != nil { | |
160 | panic(err) | |
161 | } | |
162 | for _, file := range files { | |
163 | if strings.HasSuffix(file.Name(), suffix) { | |
164 | delete(file.Name()) | |
165 | } | |
166 | } | |
167 | } | |
168 | ||
169 | func bootstrap() bool { | |
170 | if done("bootstrap/bootstrap", "bootstrap/main.go", "tree/peg.go") { | |
171 | return true | |
172 | } | |
173 | ||
174 | wd := chdir("bootstrap") | |
175 | defer chdir(wd) | |
176 | ||
177 | command("go", "", "", "build") | |
178 | ||
179 | return false | |
180 | } | |
181 | ||
182 | func peg0() bool { | |
183 | if done("cmd/peg-bootstrap/peg0", "cmd/peg-bootstrap/main.go", bootstrap) { | |
184 | return true | |
185 | } | |
186 | ||
187 | wd := chdir("cmd/peg-bootstrap/") | |
188 | defer chdir(wd) | |
189 | ||
190 | deleteFilesWithSuffix(".peg.go") | |
191 | command("../../bootstrap/bootstrap", "", "") | |
192 | command("go", "", "", "build", "-tags", "bootstrap", "-o", "peg0") | |
193 | ||
194 | return false | |
195 | } | |
196 | ||
197 | func peg1() bool { | |
198 | if done("cmd/peg-bootstrap/peg1", peg0, "cmd/peg-bootstrap/bootstrap.peg") { | |
199 | return true | |
200 | } | |
201 | ||
202 | wd := chdir("cmd/peg-bootstrap/") | |
203 | defer chdir(wd) | |
204 | ||
205 | deleteFilesWithSuffix(".peg.go") | |
206 | command("./peg0", "bootstrap.peg", "peg1.peg.go") | |
207 | command("go", "", "", "build", "-tags", "bootstrap", "-o", "peg1") | |
208 | ||
209 | return false | |
210 | } | |
211 | ||
212 | func peg2() bool { | |
213 | if done("cmd/peg-bootstrap/peg2", peg1, "cmd/peg-bootstrap/peg.bootstrap.peg") { | |
214 | return true | |
215 | } | |
216 | ||
217 | wd := chdir("cmd/peg-bootstrap/") | |
218 | defer chdir(wd) | |
219 | ||
220 | deleteFilesWithSuffix(".peg.go") | |
221 | command("./peg1", "peg.bootstrap.peg", "peg2.peg.go") | |
222 | command("go", "", "", "build", "-tags", "bootstrap", "-o", "peg2") | |
223 | ||
224 | return false | |
225 | } | |
226 | ||
227 | func peg3() bool { | |
228 | if done("cmd/peg-bootstrap/peg3", peg2, "peg.peg") { | |
229 | return true | |
230 | } | |
231 | ||
232 | wd := chdir("cmd/peg-bootstrap/") | |
233 | defer chdir(wd) | |
234 | ||
235 | deleteFilesWithSuffix(".peg.go") | |
236 | command("./peg2", "../../peg.peg", "peg3.peg.go") | |
237 | command("go", "", "", "build", "-tags", "bootstrap", "-o", "peg3") | |
238 | ||
239 | return false | |
240 | } | |
241 | ||
242 | func peg_bootstrap() bool { | |
243 | if done("cmd/peg-bootstrap/peg-bootstrap", peg3) { | |
244 | return true | |
245 | } | |
246 | ||
247 | wd := chdir("cmd/peg-bootstrap/") | |
248 | defer chdir(wd) | |
249 | ||
250 | deleteFilesWithSuffix(".peg.go") | |
251 | command("./peg3", "../../peg.peg", "peg-bootstrap.peg.go") | |
252 | command("go", "", "", "build", "-tags", "bootstrap", "-o", "peg-bootstrap") | |
253 | ||
254 | return false | |
255 | } | |
256 | ||
257 | func peg_peg_go() bool { | |
258 | if done("peg.peg.go", peg_bootstrap) { | |
259 | return true | |
260 | } | |
261 | ||
262 | command("cmd/peg-bootstrap/peg-bootstrap", "peg.peg", "peg.peg.go") | |
263 | command("go", "", "", "build") | |
264 | command("./peg", "", "", "-inline", "-switch", "peg.peg") | |
265 | ||
266 | return false | |
267 | } | |
268 | ||
269 | func peg() bool { | |
270 | if done("peg", peg_peg_go, "main.go") { | |
271 | return true | |
272 | } | |
273 | ||
274 | command("go", "", "", "build") | |
275 | ||
276 | return false | |
277 | } | |
278 | ||
279 | func clean() bool { | |
280 | delete("bootstrap/bootstrap") | |
281 | ||
282 | delete("grammars/c/c.peg.go") | |
283 | delete("grammars/calculator/calculator.peg.go") | |
284 | delete("grammars/fexl/fexl.peg.go") | |
285 | delete("grammars/java/java_1_7.peg.go") | |
286 | delete("grammars/long_test/long.peg.go") | |
287 | ||
288 | wd := chdir("cmd/peg-bootstrap/") | |
289 | defer chdir(wd) | |
290 | ||
291 | deleteFilesWithSuffix(".peg.go") | |
292 | delete("peg0") | |
293 | delete("peg1") | |
294 | delete("peg2") | |
295 | delete("peg3") | |
296 | delete("peg-bootstrap") | |
297 | ||
298 | return false | |
299 | } | |
300 | ||
301 | func grammars_c() bool { | |
302 | if done("grammars/c/c.peg.go", peg, "grammars/c/c.peg") { | |
303 | return true | |
304 | } | |
305 | ||
306 | wd := chdir("grammars/c/") | |
307 | defer chdir(wd) | |
308 | ||
309 | command("../../peg", "", "", "-switch", "-inline", "c.peg") | |
310 | ||
311 | return false | |
312 | } | |
313 | ||
314 | func grammars_calculator() bool { | |
315 | if done("grammars/calculator/calculator.peg.go", peg, "grammars/calculator/calculator.peg") { | |
316 | return true | |
317 | } | |
318 | ||
319 | wd := chdir("grammars/calculator/") | |
320 | defer chdir(wd) | |
321 | ||
322 | command("../../peg", "", "", "-switch", "-inline", "calculator.peg") | |
323 | ||
324 | return false | |
325 | } | |
326 | ||
327 | func grammars_fexl() bool { | |
328 | if done("grammars/fexl/fexl.peg.go", peg, "grammars/fexl/fexl.peg") { | |
329 | return true | |
330 | } | |
331 | ||
332 | wd := chdir("grammars/fexl/") | |
333 | defer chdir(wd) | |
334 | ||
335 | command("../../peg", "", "", "-switch", "-inline", "fexl.peg") | |
336 | ||
337 | return false | |
338 | } | |
339 | ||
340 | func grammars_java() bool { | |
341 | if done("grammars/java/java_1_7.peg.go", peg, "grammars/java/java_1_7.peg") { | |
342 | return true | |
343 | } | |
344 | ||
345 | wd := chdir("grammars/java/") | |
346 | defer chdir(wd) | |
347 | ||
348 | command("../../peg", "", "", "-switch", "-inline", "java_1_7.peg") | |
349 | ||
350 | return false | |
351 | } | |
352 | ||
353 | func grammars_long_test() bool { | |
354 | if done("grammars/long_test/long.peg.go", peg, "grammars/long_test/long.peg") { | |
355 | return true | |
356 | } | |
357 | ||
358 | wd := chdir("grammars/long_test/") | |
359 | defer chdir(wd) | |
360 | ||
361 | command("../../peg", "", "", "-switch", "-inline", "long.peg") | |
362 | ||
363 | return false | |
364 | } | |
365 | ||
366 | func test() bool { | |
367 | if done("", grammars_c, grammars_calculator, grammars_fexl, grammars_java, grammars_long_test) { | |
368 | return true | |
369 | } | |
370 | ||
371 | command("go", "", "", "test", "-short", "-tags", "grammars", "./...") | |
372 | ||
373 | return false | |
374 | } | |
375 | ||
376 | func bench() bool { | |
377 | peg() | |
378 | ||
379 | command("go", "", "", "test", "-benchmem", "-bench", ".") | |
380 | ||
381 | return false | |
382 | } |
0 | # Core bootstrap PE Grammar for peg language. | |
1 | # Adapted from peg.peg. | |
2 | ||
3 | Grammar <- Spacing { p.AddPackage("main") } | |
4 | { p.AddImport("github.com/pointlander/peg/tree") } | |
5 | { p.AddPeg("Peg"); p.AddState("*tree.Tree") } | |
6 | Action* Definition* !. | |
7 | ||
8 | Definition <- Identifier { p.AddRule(text) } | |
9 | LeftArrow Expression { p.AddExpression() } | |
10 | Expression <- Sequence (Slash Sequence { p.AddAlternate() } )* | |
11 | Sequence <- Prefix (Prefix { p.AddSequence() } )* | |
12 | Prefix <- '!' Suffix { p.AddPeekNot() } / Suffix | |
13 | Suffix <- Primary (Question { p.AddQuery() } | |
14 | / Star { p.AddStar() } )? | |
15 | Primary <- Identifier !LeftArrow { p.AddName(text) } | |
16 | / Open Expression Close | |
17 | / Literal / Class / Dot { p.AddDot() } | |
18 | / Action { p.AddAction(text) } | |
19 | / Begin Expression End { p.AddPush() } | |
20 | ||
21 | Identifier <- < Ident Ident* > Spacing | |
22 | Ident <- [A-Za-z] | |
23 | Literal <- ['] !['] Char (!['] Char { p.AddSequence() } )* ['] Spacing | |
24 | Class <- '[' Range (!']' Range { p.AddAlternate() } )* ']' Spacing | |
25 | Range <- Char '-' Char { p.AddRange() } / Char | |
26 | Char <- '\\0x' <[0-9a-f]*> { p.AddHexaCharacter(text) } | |
27 | / '\\\\' { p.AddCharacter("\\") } | |
28 | / !'\\' <.> { p.AddCharacter(text) } | |
29 | ||
30 | LeftArrow <- '<-' Spacing | |
31 | Slash <- '/' Spacing | |
32 | Question <- '?' Spacing | |
33 | Star <- '*' Spacing | |
34 | Open <- '(' Spacing | |
35 | Close <- ')' Spacing | |
36 | Dot <- '.' Spacing | |
37 | ||
38 | Spacing <- (Space / Comment)* | |
39 | Comment <- '#' (!EndOfLine .)* | |
40 | Space <- ' ' / '\0x9' / EndOfLine | |
41 | EndOfLine <- '\0xd\0xa' / '\0xa' / '\0xd' | |
42 | ||
43 | Action <- '{' < (![}].)* > '}' Spacing | |
44 | Begin <- '<' Spacing | |
45 | End <- '>' Spacing |
0 | // Copyright 2010 The Go Authors. All rights reserved. | |
1 | // Use of this source code is governed by a BSD-style | |
2 | // license that can be found in the LICENSE file. | |
3 | ||
4 | // +build bootstrap | |
5 | ||
6 | package main | |
7 | ||
8 | import ( | |
9 | "io/ioutil" | |
10 | "log" | |
11 | "os" | |
12 | ||
13 | "github.com/pointlander/peg/tree" | |
14 | ) | |
15 | ||
16 | func main() { | |
17 | buffer, err := ioutil.ReadAll(os.Stdin) | |
18 | if err != nil { | |
19 | log.Fatal(err) | |
20 | } | |
21 | p := &Peg{Tree: tree.New(false, false, false), Buffer: string(buffer)} | |
22 | p.Init(Pretty(true), Size(1<<15)) | |
23 | if err := p.Parse(); err != nil { | |
24 | log.Fatal(err) | |
25 | } | |
26 | p.Execute() | |
27 | p.Compile("boot.peg.go", os.Args, os.Stdout) | |
28 | } |
0 | # PE Grammar for bootstrap peg language | |
1 | # | |
2 | # Adapted from peg.peg. | |
3 | ||
4 | # Hierarchical syntax | |
5 | Grammar <- Spacing 'package' MustSpacing Identifier { p.AddPackage(text) } | |
6 | Import* | |
7 | 'type' MustSpacing Identifier { p.AddPeg(text) } | |
8 | 'Peg' Spacing Action { p.AddState(text) } | |
9 | Definition Definition* EndOfFile | |
10 | ||
11 | Import <- 'import' Spacing ["] < ([a-zA-Z_/.]/'-')([a-zA-Z_/.]/'-')* > ["] Spacing { p.AddImport(text) } | |
12 | ||
13 | Definition <- Identifier { p.AddRule(text) } | |
14 | LeftArrow Expression { p.AddExpression() } | |
15 | Expression <- Sequence (Slash Sequence { p.AddAlternate() } | |
16 | )* (Slash { p.AddNil(); p.AddAlternate() } | |
17 | )? | |
18 | / { p.AddNil() } | |
19 | Sequence <- Prefix (Prefix { p.AddSequence() } | |
20 | )* | |
21 | Prefix <- And Action { p.AddPredicate(text) } | |
22 | / Not Action { p.AddStateChange(text) } | |
23 | / And Suffix { p.AddPeekFor() } | |
24 | / Not Suffix { p.AddPeekNot() } | |
25 | / Suffix | |
26 | Suffix <- Primary (Question { p.AddQuery() } | |
27 | / Star { p.AddStar() } | |
28 | / Plus { p.AddPlus() } | |
29 | )? | |
30 | Primary <- Identifier !LeftArrow { p.AddName(text) } | |
31 | / Open Expression Close | |
32 | / Literal | |
33 | / Class | |
34 | / Dot { p.AddDot() } | |
35 | / Action { p.AddAction(text) } | |
36 | / Begin Expression End { p.AddPush() } | |
37 | ||
38 | # Lexical syntax | |
39 | ||
40 | Identifier <- < IdentStart IdentCont* > Spacing | |
41 | IdentStart <- [A-Za-z_] | |
42 | IdentCont <- IdentStart / [0-9] | |
43 | Literal <- ['] (!['] Char)? (!['] Char { p.AddSequence() } | |
44 | )* ['] Spacing | |
45 | / ["] (!["] DoubleChar)? (!["] DoubleChar { p.AddSequence() } | |
46 | )* ["] Spacing | |
47 | Class <- ( '[[' ( '^' DoubleRanges { p.AddPeekNot(); p.AddDot(); p.AddSequence() } | |
48 | / DoubleRanges )? | |
49 | ']]' | |
50 | / '[' ( '^' Ranges { p.AddPeekNot(); p.AddDot(); p.AddSequence() } | |
51 | / Ranges )? | |
52 | ']' ) | |
53 | Spacing | |
54 | Ranges <- !']' Range (!']' Range { p.AddAlternate() } | |
55 | )* | |
56 | DoubleRanges <- !']]' DoubleRange (!']]' DoubleRange { p.AddAlternate() } | |
57 | )* | |
58 | Range <- Char '-' Char { p.AddRange() } | |
59 | / Char | |
60 | DoubleRange <- Char '-' Char { p.AddDoubleRange() } | |
61 | / DoubleChar | |
62 | Char <- Escape | |
63 | / !'\\' <.> { p.AddCharacter(text) } | |
64 | DoubleChar <- Escape | |
65 | / <[a-zA-Z]> { p.AddDoubleCharacter(text) } | |
66 | / !'\\' <.> { p.AddCharacter(text) } | |
67 | Escape <- '\\' [aA] { p.AddCharacter("\a") } # bell | |
68 | / '\\' [bB] { p.AddCharacter("\b") } # bs | |
69 | / '\\' [eE] { p.AddCharacter("\x1B") } # esc | |
70 | / '\\' [fF] { p.AddCharacter("\f") } # ff | |
71 | / '\\' [nN] { p.AddCharacter("\n") } # nl | |
72 | / '\\' [rR] { p.AddCharacter("\r") } # cr | |
73 | / '\\' [tT] { p.AddCharacter("\t") } # ht | |
74 | / '\\' [vV] { p.AddCharacter("\v") } # vt | |
75 | / '\\' ['] { p.AddCharacter("'") } | |
76 | / '\\"' { p.AddCharacter("\"") } | |
77 | / '\\[' { p.AddCharacter("[") } | |
78 | / '\\]' { p.AddCharacter("]") } | |
79 | / '\\-' { p.AddCharacter("-") } | |
80 | / '\\' '0'[xX] <[0-9a-fA-F][0-9a-fA-F]*> { p.AddHexaCharacter(text) } | |
81 | / '\\' <[0-3][0-7][0-7]> { p.AddOctalCharacter(text) } | |
82 | / '\\' <[0-7][0-7]?> { p.AddOctalCharacter(text) } | |
83 | / '\\\\' { p.AddCharacter("\\") } | |
84 | LeftArrow <- ('<-' / '\0x2190') Spacing | |
85 | Slash <- '/' Spacing | |
86 | And <- '&' Spacing | |
87 | Not <- '!' Spacing | |
88 | Question <- '?' Spacing | |
89 | Star <- '*' Spacing | |
90 | Plus <- '+' Spacing | |
91 | Open <- '(' Spacing | |
92 | Close <- ')' Spacing | |
93 | Dot <- '.' Spacing | |
94 | SpaceComment <- (Space / Comment) | |
95 | Spacing <- SpaceComment* | |
96 | MustSpacing <- SpaceComment Spacing | |
97 | Comment <- '#' (!EndOfLine .)* EndOfLine | |
98 | Space <- ' ' / '\0x9' / EndOfLine | |
99 | EndOfLine <- '\0xd\0xa' / '\0xa' / '\0xd' | |
100 | EndOfFile <- !. | |
101 | Action <- '{' < ActionBody* > '}' Spacing | |
102 | ActionBody <- ![{}]. / '{' ActionBody* '}' | |
103 | Begin <- '<' Spacing | |
104 | End <- '>' Spacing | |
105 |
0 | module github.com/pointlander/peg | |
1 | ||
2 | require github.com/pointlander/jetset v1.0.1-0.20190518214125-eee7eff80bd4 |
0 | github.com/pointlander/compress v1.1.0 h1:5fUcQV2qEHvk0OpILH6eltwluN5VnwiYrkc1wjGUHnU= | |
1 | github.com/pointlander/compress v1.1.0/go.mod h1:q5NXNGzqj5uPnVuhGkZfmgHqNUhf15VLi6L9kW0VEc0= | |
2 | github.com/pointlander/compress v1.1.1-0.20190518213731-ff44bd196cc3 h1:hUmXhbljNFtrH5hzV9kiRoddZ5nfPTq3K0Sb2hYYiqE= | |
3 | github.com/pointlander/compress v1.1.1-0.20190518213731-ff44bd196cc3/go.mod h1:q5NXNGzqj5uPnVuhGkZfmgHqNUhf15VLi6L9kW0VEc0= | |
4 | github.com/pointlander/jetset v1.0.0 h1:bNlaNAX7cDPID9SlcogmXlDWq0KcRJSpKwHXaAM3bGQ= | |
5 | github.com/pointlander/jetset v1.0.0/go.mod h1:zY6+WHRPB10uzTajloHtybSicLW1bf6Rz0eSaU9Deng= | |
6 | github.com/pointlander/jetset v1.0.1-0.20190518214125-eee7eff80bd4 h1:RHHRCZeaNyBXdYPMjZNH8/XHDBH38TZzw8izrW7dmBE= | |
7 | github.com/pointlander/jetset v1.0.1-0.20190518214125-eee7eff80bd4/go.mod h1:RdR1j20Aj5pB6+fw6Y9Ur7lMHpegTEjY1vc19hEZL40= |
0 | # Copyright 2010 The Go Authors. All rights reserved. | |
1 | # Use of this source code is governed by a BSD-style | |
2 | # license that can be found in the LICENSE file. | |
3 | ||
4 | c: c.peg.go main.go | |
5 | go build | |
6 | ||
7 | c.peg.go: c.peg | |
8 | ../../peg -switch -inline c.peg | |
9 | ||
10 | clean: | |
11 | rm -f c c.peg.go |
109 | 109 | |
110 | 110 | } |
111 | 111 | |
112 | TranslationUnit <- Spacing ExternalDeclaration+ EOT | |
112 | TranslationUnit <- Spacing ( ExternalDeclaration / SEMI ) * EOT | |
113 | 113 | |
114 | 114 | ExternalDeclaration <- FunctionDefinition / Declaration |
115 | 115 | |
170 | 170 | |
171 | 171 | StructOrUnionSpecifier |
172 | 172 | <- StructOrUnion |
173 | ( Identifier? LWING StructDeclaration+ RWING | |
173 | ( Identifier? LWING StructDeclaration* RWING | |
174 | 174 | / Identifier |
175 | 175 | ) |
176 | 176 | |
177 | 177 | StructOrUnion <- STRUCT / UNION |
178 | 178 | |
179 | StructDeclaration <- SpecifierQualifierList StructDeclaratorList SEMI | |
179 | StructDeclaration <- ( SpecifierQualifierList StructDeclaratorList? )? SEMI | |
180 | 180 | |
181 | 181 | SpecifierQualifierList |
182 | 182 | <- ( TypeQualifier* |
346 | 346 | / TILDA |
347 | 347 | / BANG |
348 | 348 | |
349 | CastExpression <- (LPAR TypeName RPAR)* UnaryExpression | |
349 | CastExpression <- (LPAR TypeName RPAR CastExpression) / UnaryExpression | |
350 | 350 | |
351 | 351 | MultiplicativeExpression <- CastExpression ((STAR / DIV / MOD) CastExpression)* |
352 | 352 | |
607 | 607 | / HexEscape |
608 | 608 | / UniversalCharacter |
609 | 609 | |
610 | SimpleEscape <- '\\' ['\"?\\abfnrtv] | |
610 | SimpleEscape <- '\\' ['\"?\\%abfnrtv] | |
611 | 611 | OctalEscape <- '\\' [0-7][0-7]?[0-7]? |
612 | 612 | HexEscape <- '\\x' HexDigit+ |
613 | 613 |
0 | // Copyright 2010 The Go Authors. All rights reserved. | |
1 | // Use of this source code is governed by a BSD-style | |
2 | // license that can be found in the LICENSE file. | |
3 | ||
4 | // +build grammars | |
5 | ||
6 | package main | |
7 | ||
8 | import ( | |
9 | "fmt" | |
10 | "io/ioutil" | |
11 | "log" | |
12 | "os" | |
13 | "strings" | |
14 | "testing" | |
15 | ) | |
16 | ||
17 | func parseCBuffer(buffer string) (*C, error) { | |
18 | clang := &C{Buffer: buffer} | |
19 | clang.Init() | |
20 | err := clang.Parse() | |
21 | return clang, err | |
22 | } | |
23 | ||
24 | func parseC_4t(t *testing.T, src string) *C { | |
25 | c, err := parseCBuffer(src) | |
26 | if err != nil { | |
27 | t.Fatal(err) | |
28 | } | |
29 | return c | |
30 | } | |
31 | ||
32 | func noParseC_4t(t *testing.T, src string) { | |
33 | _, err := parseCBuffer(src) | |
34 | if err == nil { | |
35 | t.Fatal("Parsed what should not have parsed.") | |
36 | } | |
37 | } | |
38 | ||
39 | func TestCParsing_Expressions1(t *testing.T) { | |
40 | case1src := | |
41 | `int a() { | |
42 | (es); | |
43 | 1++; | |
44 | 1+1; | |
45 | a+1; | |
46 | (a)+1; | |
47 | a->x; | |
48 | return 0; | |
49 | }` | |
50 | parseC_4t(t, case1src) | |
51 | } | |
52 | ||
53 | func TestCParsing_Expressions2(t *testing.T) { | |
54 | parseC_4t(t, | |
55 | `int a() { | |
56 | if (a) { return (a); } | |
57 | ||
58 | return (0); | |
59 | return a+b; | |
60 | return (a+b); | |
61 | return (a)+0; | |
62 | }`) | |
63 | ||
64 | parseC_4t(t, `int a() { return (a)+0; }`) | |
65 | } | |
66 | ||
67 | func TestCParsing_Expressions3(t *testing.T) { | |
68 | parseC_4t(t, | |
69 | `int a() { | |
70 | 1+(a); | |
71 | (a)++; | |
72 | (es)++; | |
73 | (es)||a; | |
74 | (es)->a; | |
75 | return (a)+(b); | |
76 | return 0+(a); | |
77 | }`) | |
78 | } | |
79 | ||
80 | func TestCParsing_Expressions4(t *testing.T) { | |
81 | parseC_4t(t, `int a(){1+(a);}`) | |
82 | } | |
83 | func TestCParsing_Expressions5(t *testing.T) { | |
84 | parseC_4t(t, `int a(){return (int)0;}`) | |
85 | } | |
86 | func TestCParsing_Expressions6(t *testing.T) { | |
87 | parseC_4t(t, `int a(){return (in)0;}`) | |
88 | } | |
89 | func TestCParsing_Expressions7(t *testing.T) { | |
90 | parseC_4t(t, `int a() | |
91 | { return (0); }`) | |
92 | } | |
93 | func TestCParsing_Cast0(t *testing.T) { | |
94 | parseC_4t(t, `int a(){(cast)0;}`) | |
95 | } | |
96 | func TestCParsing_Cast1(t *testing.T) { | |
97 | parseC_4t(t, `int a(){(m*)(rsp);}`) | |
98 | parseC_4t(t, `int a(){(struct m*)(rsp);}`) | |
99 | } | |
100 | ||
101 | func TestCParsing_Empty(t *testing.T) { | |
102 | parseC_4t(t, `/** empty is valid. */ `) | |
103 | } | |
104 | func TestCParsing_EmptyStruct(t *testing.T) { | |
105 | parseC_4t(t, `struct empty{};`) | |
106 | parseC_4t(t, `struct {} empty;`) | |
107 | parseC_4t(t, `struct empty {} empty;`) | |
108 | } | |
109 | func TestCParsing_EmptyEmbeddedUnion(t *testing.T) { | |
110 | parseC_4t(t, `struct empty{ | |
111 | union { | |
112 | int a; | |
113 | char b; | |
114 | }; | |
115 | };`) | |
116 | } | |
117 | func TestCParsing_ExtraSEMI(t *testing.T) { | |
118 | parseC_4t(t, `int func(){} | |
119 | ; | |
120 | struct {} empty; | |
121 | struct {} empty;; | |
122 | int foo() {}; | |
123 | int foo() {};; | |
124 | `) | |
125 | ||
126 | noParseC_4t(t, `struct empty{}`) | |
127 | } | |
128 | func TestCParsing_ExtraSEMI2(t *testing.T) { | |
129 | parseC_4t(t, ` | |
130 | struct a { int b; ; }; | |
131 | `) | |
132 | ||
133 | noParseC_4t(t, `struct empty{}`) | |
134 | } | |
135 | ||
136 | func TestCParsing_Escapes(t *testing.T) { | |
137 | parseC_4t(t, ` | |
138 | int f() { | |
139 | printf("%s", "\a\b\f\n\r\t\v"); | |
140 | printf("\\"); | |
141 | printf("\%"); | |
142 | printf("\""); | |
143 | printf('\"'); // <- semantically wrong but syntactically valid. | |
144 | }`) | |
145 | } | |
146 | ||
147 | func TestCParsing_Long(t *testing.T) { | |
148 | if testing.Short() { | |
149 | t.Skip("skipping c parsing long test") | |
150 | } | |
151 | ||
152 | var walk func(name string) | |
153 | walk = func(name string) { | |
154 | fileInfo, err := os.Stat(name) | |
155 | if err != nil { | |
156 | log.Fatal(err) | |
157 | } | |
158 | ||
159 | if fileInfo.Mode()&(os.ModeNamedPipe|os.ModeSocket|os.ModeDevice) != 0 { | |
160 | /* will lock up if opened */ | |
161 | } else if fileInfo.IsDir() { | |
162 | fmt.Printf("directory %v\n", name) | |
163 | ||
164 | file, err := os.Open(name) | |
165 | if err != nil { | |
166 | log.Fatal(err) | |
167 | } | |
168 | ||
169 | files, err := file.Readdir(-1) | |
170 | if err != nil { | |
171 | log.Fatal(err) | |
172 | } | |
173 | file.Close() | |
174 | ||
175 | for _, f := range files { | |
176 | if !strings.HasSuffix(name, "/") { | |
177 | name += "/" | |
178 | } | |
179 | walk(name + f.Name()) | |
180 | } | |
181 | } else if strings.HasSuffix(name, ".c") { | |
182 | fmt.Printf("parse %v\n", name) | |
183 | ||
184 | file, err := os.Open(name) | |
185 | if err != nil { | |
186 | log.Fatal(err) | |
187 | } | |
188 | ||
189 | buffer, err := ioutil.ReadAll(file) | |
190 | if err != nil { | |
191 | log.Fatal(err) | |
192 | } | |
193 | file.Close() | |
194 | ||
195 | clang := &C{Buffer: string(buffer)} | |
196 | clang.Init() | |
197 | if err := clang.Parse(); err != nil { | |
198 | log.Fatal(err) | |
199 | } | |
200 | } | |
201 | } | |
202 | walk("c/") | |
203 | } |
0 | // Copyright 2010 The Go Authors. All rights reserved. | |
1 | // Use of this source code is governed by a BSD-style | |
2 | // license that can be found in the LICENSE file. | |
3 | ||
4 | package main | |
5 | ||
6 | import ( | |
7 | "fmt" | |
8 | "io/ioutil" | |
9 | "log" | |
10 | "os" | |
11 | "strings" | |
12 | ) | |
13 | ||
14 | func main() { | |
15 | if len(os.Args) < 2 { | |
16 | fmt.Printf("%v FILE\n", os.Args[0]) | |
17 | os.Exit(1) | |
18 | } | |
19 | ||
20 | var walk func(name string) | |
21 | walk = func(name string) { | |
22 | fileInfo, err := os.Stat(name) | |
23 | if err != nil { | |
24 | log.Fatal(err) | |
25 | } | |
26 | ||
27 | if fileInfo.Mode() & (os.ModeNamedPipe | os.ModeSocket | os.ModeDevice) != 0 { | |
28 | /* will lock up if opened */ | |
29 | } else if fileInfo.IsDir() { | |
30 | fmt.Printf("directory %v\n", name) | |
31 | ||
32 | file, err := os.Open(name) | |
33 | if err != nil { | |
34 | log.Fatal(err) | |
35 | } | |
36 | ||
37 | files, err := file.Readdir(-1) | |
38 | if err != nil { | |
39 | log.Fatal(err) | |
40 | } | |
41 | file.Close() | |
42 | ||
43 | for _, f := range files { | |
44 | if !strings.HasSuffix(name, "/") { | |
45 | name += "/" | |
46 | } | |
47 | walk(name + f.Name()) | |
48 | } | |
49 | } else if strings.HasSuffix(name, ".c") { | |
50 | fmt.Printf("parse %v\n", name) | |
51 | ||
52 | file, err := os.Open(name) | |
53 | if err != nil { | |
54 | log.Fatal(err) | |
55 | } | |
56 | ||
57 | buffer, err := ioutil.ReadAll(file) | |
58 | if err != nil { | |
59 | log.Fatal(err) | |
60 | } | |
61 | file.Close() | |
62 | ||
63 | clang := &C{Buffer: string(buffer)} | |
64 | clang.Init() | |
65 | if err := clang.Parse(); err != nil { | |
66 | log.Fatal(err) | |
67 | } | |
68 | } | |
69 | } | |
70 | walk(os.Args[1]) | |
71 | } |
0 | # Copyright 2010 The Go Authors. All rights reserved. | |
1 | # Use of this source code is governed by a BSD-style | |
2 | # license that can be found in the LICENSE file. | |
3 | ||
4 | calculator: calculator.peg.go calculator.go main.go | |
5 | go build | |
6 | ||
7 | calculator.peg.go: calculator.peg | |
8 | ../../peg -switch -inline calculator.peg | |
9 | ||
10 | clean: | |
11 | rm -f calculator calculator.peg.go |
0 | 0 | // Copyright 2010 The Go Authors. All rights reserved. |
1 | 1 | // Use of this source code is governed by a BSD-style |
2 | 2 | // license that can be found in the LICENSE file. |
3 | ||
4 | // +build grammars | |
3 | 5 | |
4 | 6 | package main |
5 | 7 |
0 | // Copyright 2010 The Go Authors. All rights reserved. | |
1 | // Use of this source code is governed by a BSD-style | |
2 | // license that can be found in the LICENSE file. | |
3 | ||
4 | // +build grammars | |
5 | ||
6 | package main | |
7 | ||
8 | import ( | |
9 | "math/big" | |
10 | "testing" | |
11 | ) | |
12 | ||
13 | func TestCalculator(t *testing.T) { | |
14 | expression := "( 1 - -3 ) / 3 + 2 * ( 3 + -4 ) + 3 % 2^2" | |
15 | calc := &Calculator{Buffer: expression} | |
16 | calc.Init() | |
17 | calc.Expression.Init(expression) | |
18 | if err := calc.Parse(); err != nil { | |
19 | t.Fatal(err) | |
20 | } | |
21 | calc.Execute() | |
22 | if calc.Evaluate().Cmp(big.NewInt(2)) != 0 { | |
23 | t.Fatal("got incorrect result") | |
24 | } | |
25 | } |
0 | // Copyright 2010 The Go Authors. All rights reserved. | |
1 | // Use of this source code is governed by a BSD-style | |
2 | // license that can be found in the LICENSE file. | |
3 | ||
4 | package main | |
5 | ||
6 | import ( | |
7 | "fmt" | |
8 | "log" | |
9 | "os" | |
10 | ) | |
11 | ||
12 | func main() { | |
13 | if len(os.Args) < 2 { | |
14 | name := os.Args[0] | |
15 | fmt.Printf("Usage: %v \"EXPRESSION\"\n", name) | |
16 | fmt.Printf("Example: %v \"( 1 - -3 ) / 3 + 2 * ( 3 + -4 ) + 3 %% 2^2\"\n =2\n", name) | |
17 | os.Exit(1) | |
18 | } | |
19 | expression := os.Args[1] | |
20 | calc := &Calculator{Buffer: expression} | |
21 | calc.Init() | |
22 | calc.Expression.Init(expression) | |
23 | if err := calc.Parse(); err != nil { | |
24 | log.Fatal(err) | |
25 | } | |
26 | calc.Execute() | |
27 | fmt.Printf("= %v\n", calc.Evaluate()) | |
28 | } |
0 | # Copyright 2010 The Go Authors. All rights reserved. | |
1 | # Use of this source code is governed by a BSD-style | |
2 | # license that can be found in the LICENSE file. | |
3 | ||
4 | fexl: fexl.peg.go main.go | |
5 | go build | |
6 | ||
7 | fexl.peg.go: fexl.peg | |
8 | ../../peg -switch -inline fexl.peg | |
9 | ||
10 | clean: | |
11 | rm -f fexl fexl.peg.go |
0 | // Copyright 2010 The Go Authors. All rights reserved. | |
1 | // Use of this source code is governed by a BSD-style | |
2 | // license that can be found in the LICENSE file. | |
3 | ||
4 | // +build grammars | |
5 | ||
6 | package main | |
7 | ||
8 | import ( | |
9 | "io/ioutil" | |
10 | "testing" | |
11 | ) | |
12 | ||
13 | func TestFexl(t *testing.T) { | |
14 | buffer, err := ioutil.ReadFile("doc/try.fxl") | |
15 | if err != nil { | |
16 | t.Fatal(err) | |
17 | } | |
18 | ||
19 | fexl := &Fexl{Buffer: string(buffer)} | |
20 | fexl.Init() | |
21 | ||
22 | if err := fexl.Parse(); err != nil { | |
23 | t.Fatal(err) | |
24 | } | |
25 | } |
0 | // Copyright 2010 The Go Authors. All rights reserved. | |
1 | // Use of this source code is governed by a BSD-style | |
2 | // license that can be found in the LICENSE file. | |
3 | ||
4 | package main | |
5 | ||
6 | import ( | |
7 | "log" | |
8 | "io/ioutil" | |
9 | ) | |
10 | ||
11 | func main() { | |
12 | buffer, err := ioutil.ReadFile("doc/try.fxl") | |
13 | if err != nil { | |
14 | log.Fatal(err) | |
15 | } | |
16 | ||
17 | fexl := &Fexl{Buffer: string(buffer)} | |
18 | fexl.Init() | |
19 | ||
20 | if err := fexl.Parse(); err != nil { | |
21 | log.Fatal(err) | |
22 | } | |
23 | fexl.Highlighter() | |
24 | } |
0 | # Copyright 2010 The Go Authors. All rights reserved. | |
1 | # Use of this source code is governed by a BSD-style | |
2 | # license that can be found in the LICENSE file. | |
3 | ||
4 | java: java_1_7.peg.go main.go | |
5 | go build | |
6 | ||
7 | java_1_7.peg.go: java_1_7.peg | |
8 | ../../peg -switch -inline java_1_7.peg | |
9 | ||
10 | clean: | |
11 | rm -f java java_1_7.peg.go |
177 | 177 | / InterfaceMethodDeclaratorRest |
178 | 178 | |
179 | 179 | InterfaceMethodDeclaratorRest |
180 | <- FormalParameters Dim* (THROWS ClassTypeList)? SEM | |
180 | <- FormalParameters Dim* (THROWS ClassTypeList)? SEMI | |
181 | 181 | |
182 | 182 | InterfaceGenericMethodDecl |
183 | 183 | <- TypeParameters (Type / VOID) Identifier InterfaceMethodDeclaratorRest |
0 | // Copyright 2010 The Go Authors. All rights reserved. | |
1 | // Use of this source code is governed by a BSD-style | |
2 | // license that can be found in the LICENSE file. | |
3 | ||
4 | // +build grammars | |
5 | ||
6 | package main | |
7 | ||
8 | import ( | |
9 | "fmt" | |
10 | "io/ioutil" | |
11 | "log" | |
12 | "os" | |
13 | "strings" | |
14 | "testing" | |
15 | ) | |
16 | ||
17 | var example1 = `public class HelloWorld { | |
18 | public static void main(String[] args) { | |
19 | System.out.println("Hello, World"); | |
20 | } | |
21 | } | |
22 | ` | |
23 | ||
24 | func TestBasic(t *testing.T) { | |
25 | java := &Java{Buffer: example1} | |
26 | java.Init() | |
27 | ||
28 | if err := java.Parse(); err != nil { | |
29 | t.Fatal(err) | |
30 | } | |
31 | } | |
32 | ||
33 | func TestJava(t *testing.T) { | |
34 | if testing.Short() { | |
35 | t.Skip("skipping java parsing long test") | |
36 | } | |
37 | ||
38 | var walk func(name string) | |
39 | walk = func(name string) { | |
40 | fileInfo, err := os.Stat(name) | |
41 | if err != nil { | |
42 | log.Fatal(err) | |
43 | } | |
44 | ||
45 | if fileInfo.Mode()&(os.ModeNamedPipe|os.ModeSocket|os.ModeDevice) != 0 { | |
46 | /* will lock up if opened */ | |
47 | } else if fileInfo.IsDir() { | |
48 | fmt.Printf("directory %v\n", name) | |
49 | ||
50 | file, err := os.Open(name) | |
51 | if err != nil { | |
52 | log.Fatal(err) | |
53 | } | |
54 | ||
55 | files, err := file.Readdir(-1) | |
56 | if err != nil { | |
57 | log.Fatal(err) | |
58 | } | |
59 | file.Close() | |
60 | ||
61 | for _, f := range files { | |
62 | if !strings.HasSuffix(name, "/") { | |
63 | name += "/" | |
64 | } | |
65 | walk(name + f.Name()) | |
66 | } | |
67 | } else if strings.HasSuffix(name, ".java") { | |
68 | fmt.Printf("parse %v\n", name) | |
69 | ||
70 | file, err := os.Open(name) | |
71 | if err != nil { | |
72 | log.Fatal(err) | |
73 | } | |
74 | ||
75 | buffer, err := ioutil.ReadAll(file) | |
76 | if err != nil { | |
77 | log.Fatal(err) | |
78 | } | |
79 | file.Close() | |
80 | ||
81 | java := &Java{Buffer: string(buffer)} | |
82 | java.Init() | |
83 | if err := java.Parse(); err != nil { | |
84 | log.Fatal(err) | |
85 | } | |
86 | } | |
87 | } | |
88 | walk("java/") | |
89 | } |
0 | // Copyright 2010 The Go Authors. All rights reserved. | |
1 | // Use of this source code is governed by a BSD-style | |
2 | // license that can be found in the LICENSE file. | |
3 | ||
4 | package main | |
5 | ||
6 | import ( | |
7 | "fmt" | |
8 | "io/ioutil" | |
9 | "log" | |
10 | "os" | |
11 | "strings" | |
12 | ) | |
13 | ||
14 | func main() { | |
15 | if len(os.Args) < 2 { | |
16 | fmt.Printf("%v FILE\n", os.Args[0]) | |
17 | os.Exit(1) | |
18 | } | |
19 | ||
20 | var walk func(name string) | |
21 | walk = func(name string) { | |
22 | fileInfo, err := os.Stat(name) | |
23 | if err != nil { | |
24 | log.Fatal(err) | |
25 | } | |
26 | ||
27 | if fileInfo.Mode() & (os.ModeNamedPipe | os.ModeSocket | os.ModeDevice) != 0 { | |
28 | /* will lock up if opened */ | |
29 | } else if fileInfo.IsDir() { | |
30 | fmt.Printf("directory %v\n", name) | |
31 | ||
32 | file, err := os.Open(name) | |
33 | if err != nil { | |
34 | log.Fatal(err) | |
35 | } | |
36 | ||
37 | files, err := file.Readdir(-1) | |
38 | if err != nil { | |
39 | log.Fatal(err) | |
40 | } | |
41 | file.Close() | |
42 | ||
43 | for _, f := range files { | |
44 | if !strings.HasSuffix(name, "/") { | |
45 | name += "/" | |
46 | } | |
47 | walk(name + f.Name()) | |
48 | } | |
49 | } else if strings.HasSuffix(name, ".java") { | |
50 | fmt.Printf("parse %v\n", name) | |
51 | ||
52 | file, err := os.Open(name) | |
53 | if err != nil { | |
54 | log.Fatal(err) | |
55 | } | |
56 | ||
57 | buffer, err := ioutil.ReadAll(file) | |
58 | if err != nil { | |
59 | log.Fatal(err) | |
60 | } | |
61 | file.Close() | |
62 | ||
63 | java := &Java{Buffer: string(buffer)} | |
64 | java.Init() | |
65 | if err := java.Parse(); err != nil { | |
66 | log.Fatal(err) | |
67 | } | |
68 | } | |
69 | } | |
70 | walk(os.Args[1]) | |
71 | } |
0 | # Copyright 2010 The Go Authors. All rights reserved. | |
1 | # Use of this source code is governed by a BSD-style | |
2 | # license that can be found in the LICENSE file. | |
3 | ||
4 | long_test: long.peg.go main.go | |
5 | go build | |
6 | ||
7 | long.peg.go: long.peg | |
8 | peg -switch -inline long.peg | |
9 | ||
10 | clean: | |
11 | rm -f long_test long.peg.go |
0 | // Copyright 2010 The Go Authors. All rights reserved. | |
1 | // Use of this source code is governed by a BSD-style | |
2 | // license that can be found in the LICENSE file. | |
3 | ||
4 | // +build grammars | |
5 | ||
6 | package main | |
7 | ||
8 | import ( | |
9 | "testing" | |
10 | ) | |
11 | ||
12 | func TestLong(t *testing.T) { | |
13 | length := 100000 | |
14 | if testing.Short() { | |
15 | length = 100 | |
16 | } | |
17 | ||
18 | expression := "" | |
19 | long := &Long{Buffer: "\"" + expression + "\""} | |
20 | long.Init() | |
21 | for c := 0; c < length; c++ { | |
22 | if err := long.Parse(); err != nil { | |
23 | t.Fatal(err) | |
24 | } | |
25 | long.Reset() | |
26 | expression = expression + "X" | |
27 | long.Buffer = "\"" + expression + "\"" | |
28 | } | |
29 | } |
0 | // Copyright 2010 The Go Authors. All rights reserved. | |
1 | // Use of this source code is governed by a BSD-style | |
2 | // license that can be found in the LICENSE file. | |
3 | ||
4 | package main | |
5 | ||
6 | import ( | |
7 | "fmt" | |
8 | "log" | |
9 | ) | |
10 | ||
11 | func main() { | |
12 | expression := "" | |
13 | long := &Long{Buffer: "\"" + expression + "\""} | |
14 | long.Init() | |
15 | for c := 0; c < 100000; c++ { | |
16 | if err := long.Parse(); err != nil { | |
17 | fmt.Printf("%v\n", c) | |
18 | log.Fatal(err) | |
19 | } | |
20 | long.Reset() | |
21 | expression = expression + "X" | |
22 | long.Buffer = "\"" + expression + "\"" | |
23 | } | |
24 | } |
10 | 10 | "log" |
11 | 11 | "os" |
12 | 12 | "runtime" |
13 | "time" | |
13 | ||
14 | "github.com/pointlander/peg/tree" | |
14 | 15 | ) |
15 | 16 | |
16 | 17 | var ( |
17 | inline = flag.Bool("inline", false, "parse rule inlining") | |
18 | _switch = flag.Bool("switch", false, "replace if-else if-else like blocks with switch blocks") | |
19 | syntax = flag.Bool("syntax", false, "print out the syntax tree") | |
20 | highlight = flag.Bool("highlight", false, "test the syntax highlighter") | |
21 | ast = flag.Bool("ast", false, "generate an AST") | |
22 | test = flag.Bool("test", false, "test the PEG parser performance") | |
23 | print = flag.Bool("print", false, "directly dump the syntax tree") | |
18 | inline = flag.Bool("inline", false, "parse rule inlining") | |
19 | _switch = flag.Bool("switch", false, "replace if-else if-else like blocks with switch blocks") | |
20 | print = flag.Bool("print", false, "directly dump the syntax tree") | |
21 | syntax = flag.Bool("syntax", false, "print out the syntax tree") | |
22 | noast = flag.Bool("noast", false, "disable AST") | |
23 | strict = flag.Bool("strict", false, "treat compiler warnings as errors") | |
24 | 24 | ) |
25 | 25 | |
26 | 26 | func main() { |
38 | 38 | log.Fatal(err) |
39 | 39 | } |
40 | 40 | |
41 | if *test { | |
42 | iterations, p := 1000, &Peg{Tree: New(*inline, *_switch), Buffer: string(buffer)} | |
43 | p.Init() | |
44 | start := time.Now() | |
45 | for i := 0; i < iterations; i++ { | |
46 | p.Parse() | |
47 | p.Reset() | |
48 | } | |
49 | total := float64(time.Since(start).Nanoseconds()) / float64(1000) | |
50 | fmt.Printf("time: %v us\n", total/float64(iterations)) | |
51 | return | |
52 | } | |
53 | ||
54 | p := &Peg{Tree: New(*inline, *_switch), Buffer: string(buffer), Pretty: true} | |
55 | p.Init() | |
41 | p := &Peg{Tree: tree.New(*inline, *_switch, *noast), Buffer: string(buffer)} | |
42 | p.Init(Pretty(true), Size(1<<15)) | |
56 | 43 | if err := p.Parse(); err != nil { |
57 | 44 | log.Fatal(err) |
58 | 45 | } |
59 | 46 | |
60 | 47 | p.Execute() |
61 | 48 | |
62 | if *ast { | |
63 | p.AST().Print(p.Buffer) | |
64 | } | |
65 | 49 | if *print { |
66 | 50 | p.Print() |
67 | 51 | } |
68 | 52 | if *syntax { |
69 | 53 | p.PrintSyntaxTree() |
70 | 54 | } |
71 | if *highlight { | |
72 | p.Highlighter() | |
73 | } | |
74 | 55 | |
75 | 56 | filename := file + ".go" |
76 | out, error := os.OpenFile(filename, os.O_RDWR|os.O_CREATE|os.O_TRUNC, 0644) | |
77 | if error != nil { | |
78 | fmt.Printf("%v: %v\n", filename, error) | |
57 | out, err := os.OpenFile(filename, os.O_RDWR|os.O_CREATE|os.O_TRUNC, 0644) | |
58 | if err != nil { | |
59 | fmt.Printf("%v: %v\n", filename, err) | |
79 | 60 | return |
80 | 61 | } |
81 | 62 | defer out.Close() |
82 | p.Compile(filename, out) | |
63 | ||
64 | p.Strict = *strict | |
65 | if err = p.Compile(filename, os.Args, out); err != nil { | |
66 | log.Fatal(err) | |
67 | } | |
83 | 68 | } |
0 | // Copyright 2010 The Go Authors. All rights reserved. | |
1 | // Use of this source code is governed by a BSD-style | |
2 | // license that can be found in the LICENSE file. | |
3 | ||
4 | package main | |
5 | ||
6 | import ( | |
7 | "bytes" | |
8 | "fmt" | |
9 | "go/parser" | |
10 | "go/printer" | |
11 | "go/token" | |
12 | "io" | |
13 | "math" | |
14 | "os" | |
15 | "strconv" | |
16 | "strings" | |
17 | "text/template" | |
18 | ||
19 | "github.com/pointlander/jetset" | |
20 | ) | |
21 | ||
22 | const pegHeaderTemplate = `package {{.PackageName}} | |
23 | ||
24 | import ( | |
25 | {{range .Imports}}"{{.}}" | |
26 | {{end}} | |
27 | ) | |
28 | ||
29 | const endSymbol rune = {{.EndSymbol}} | |
30 | ||
31 | /* The rule types inferred from the grammar are below. */ | |
32 | type pegRule {{.PegRuleType}} | |
33 | ||
34 | const ( | |
35 | ruleUnknown pegRule = iota | |
36 | {{range .RuleNames}}rule{{.String}} | |
37 | {{end}} | |
38 | rulePre | |
39 | ruleIn | |
40 | ruleSuf | |
41 | ) | |
42 | ||
43 | var rul3s = [...]string { | |
44 | "Unknown", | |
45 | {{range .RuleNames}}"{{.String}}", | |
46 | {{end}} | |
47 | "Pre_", | |
48 | "_In_", | |
49 | "_Suf", | |
50 | } | |
51 | ||
52 | type node32 struct { | |
53 | token32 | |
54 | up, next *node32 | |
55 | } | |
56 | ||
57 | func (node *node32) print(depth int, buffer string) { | |
58 | for node != nil { | |
59 | for c := 0; c < depth; c++ { | |
60 | fmt.Printf(" ") | |
61 | } | |
62 | fmt.Printf("\x1B[34m%v\x1B[m %v\n", rul3s[node.pegRule], strconv.Quote(string(([]rune(buffer)[node.begin:node.end])))) | |
63 | if node.up != nil { | |
64 | node.up.print(depth + 1, buffer) | |
65 | } | |
66 | node = node.next | |
67 | } | |
68 | } | |
69 | ||
70 | func (node *node32) Print(buffer string) { | |
71 | node.print(0, buffer) | |
72 | } | |
73 | ||
74 | type element struct { | |
75 | node *node32 | |
76 | down *element | |
77 | } | |
78 | ||
79 | {{range .Sizes}} | |
80 | ||
81 | /* ${@} bit structure for abstract syntax tree */ | |
82 | type token{{.}} struct { | |
83 | pegRule | |
84 | begin, end, next uint{{.}} | |
85 | } | |
86 | ||
87 | func (t *token{{.}}) isZero() bool { | |
88 | return t.pegRule == ruleUnknown && t.begin == 0 && t.end == 0 && t.next == 0 | |
89 | } | |
90 | ||
91 | func (t *token{{.}}) isParentOf(u token{{.}}) bool { | |
92 | return t.begin <= u.begin && t.end >= u.end && t.next > u.next | |
93 | } | |
94 | ||
95 | func (t *token{{.}}) getToken32() token32 { | |
96 | return token32{pegRule: t.pegRule, begin: uint32(t.begin), end: uint32(t.end), next: uint32(t.next)} | |
97 | } | |
98 | ||
99 | func (t *token{{.}}) String() string { | |
100 | return fmt.Sprintf("\x1B[34m%v\x1B[m %v %v %v", rul3s[t.pegRule], t.begin, t.end, t.next) | |
101 | } | |
102 | ||
103 | type tokens{{.}} struct { | |
104 | tree []token{{.}} | |
105 | ordered [][]token{{.}} | |
106 | } | |
107 | ||
108 | func (t *tokens{{.}}) trim(length int) { | |
109 | t.tree = t.tree[0:length] | |
110 | } | |
111 | ||
112 | func (t *tokens{{.}}) Print() { | |
113 | for _, token := range t.tree { | |
114 | fmt.Println(token.String()) | |
115 | } | |
116 | } | |
117 | ||
118 | func (t *tokens{{.}}) Order() [][]token{{.}} { | |
119 | if t.ordered != nil { | |
120 | return t.ordered | |
121 | } | |
122 | ||
123 | depths := make([]int{{.}}, 1, math.MaxInt16) | |
124 | for i, token := range t.tree { | |
125 | if token.pegRule == ruleUnknown { | |
126 | t.tree = t.tree[:i] | |
127 | break | |
128 | } | |
129 | depth := int(token.next) | |
130 | if length := len(depths); depth >= length { | |
131 | depths = depths[:depth + 1] | |
132 | } | |
133 | depths[depth]++ | |
134 | } | |
135 | depths = append(depths, 0) | |
136 | ||
137 | ordered, pool := make([][]token{{.}}, len(depths)), make([]token{{.}}, len(t.tree) + len(depths)) | |
138 | for i, depth := range depths { | |
139 | depth++ | |
140 | ordered[i], pool, depths[i] = pool[:depth], pool[depth:], 0 | |
141 | } | |
142 | ||
143 | for i, token := range t.tree { | |
144 | depth := token.next | |
145 | token.next = uint{{.}}(i) | |
146 | ordered[depth][depths[depth]] = token | |
147 | depths[depth]++ | |
148 | } | |
149 | t.ordered = ordered | |
150 | return ordered | |
151 | } | |
152 | ||
153 | type state{{.}} struct { | |
154 | token{{.}} | |
155 | depths []int{{.}} | |
156 | leaf bool | |
157 | } | |
158 | ||
159 | func (t *tokens{{.}}) AST() *node32 { | |
160 | tokens := t.Tokens() | |
161 | stack := &element{node: &node32{token32:<-tokens}} | |
162 | for token := range tokens { | |
163 | if token.begin == token.end { | |
164 | continue | |
165 | } | |
166 | node := &node32{token32: token} | |
167 | for stack != nil && stack.node.begin >= token.begin && stack.node.end <= token.end { | |
168 | stack.node.next = node.up | |
169 | node.up = stack.node | |
170 | stack = stack.down | |
171 | } | |
172 | stack = &element{node: node, down: stack} | |
173 | } | |
174 | return stack.node | |
175 | } | |
176 | ||
177 | func (t *tokens{{.}}) PreOrder() (<-chan state{{.}}, [][]token{{.}}) { | |
178 | s, ordered := make(chan state{{.}}, 6), t.Order() | |
179 | go func() { | |
180 | var states [8]state{{.}} | |
181 | for i := range states { | |
182 | states[i].depths = make([]int{{.}}, len(ordered)) | |
183 | } | |
184 | depths, state, depth := make([]int{{.}}, len(ordered)), 0, 1 | |
185 | write := func(t token{{.}}, leaf bool) { | |
186 | S := states[state] | |
187 | state, S.pegRule, S.begin, S.end, S.next, S.leaf = (state + 1) % 8, t.pegRule, t.begin, t.end, uint{{.}}(depth), leaf | |
188 | copy(S.depths, depths) | |
189 | s <- S | |
190 | } | |
191 | ||
192 | states[state].token{{.}} = ordered[0][0] | |
193 | depths[0]++ | |
194 | state++ | |
195 | a, b := ordered[depth - 1][depths[depth - 1] - 1], ordered[depth][depths[depth]] | |
196 | depthFirstSearch: for { | |
197 | for { | |
198 | if i := depths[depth]; i > 0 { | |
199 | if c, j := ordered[depth][i - 1], depths[depth - 1]; a.isParentOf(c) && | |
200 | (j < 2 || !ordered[depth - 1][j - 2].isParentOf(c)) { | |
201 | if c.end != b.begin { | |
202 | write(token{{.}} {pegRule: ruleIn, begin: c.end, end: b.begin}, true) | |
203 | } | |
204 | break | |
205 | } | |
206 | } | |
207 | ||
208 | if a.begin < b.begin { | |
209 | write(token{{.}} {pegRule: rulePre, begin: a.begin, end: b.begin}, true) | |
210 | } | |
211 | break | |
212 | } | |
213 | ||
214 | next := depth + 1 | |
215 | if c := ordered[next][depths[next]]; c.pegRule != ruleUnknown && b.isParentOf(c) { | |
216 | write(b, false) | |
217 | depths[depth]++ | |
218 | depth, a, b = next, b, c | |
219 | continue | |
220 | } | |
221 | ||
222 | write(b, true) | |
223 | depths[depth]++ | |
224 | c, parent := ordered[depth][depths[depth]], true | |
225 | for { | |
226 | if c.pegRule != ruleUnknown && a.isParentOf(c) { | |
227 | b = c | |
228 | continue depthFirstSearch | |
229 | } else if parent && b.end != a.end { | |
230 | write(token{{.}} {pegRule: ruleSuf, begin: b.end, end: a.end}, true) | |
231 | } | |
232 | ||
233 | depth-- | |
234 | if depth > 0 { | |
235 | a, b, c = ordered[depth - 1][depths[depth - 1] - 1], a, ordered[depth][depths[depth]] | |
236 | parent = a.isParentOf(b) | |
237 | continue | |
238 | } | |
239 | ||
240 | break depthFirstSearch | |
241 | } | |
242 | } | |
243 | ||
244 | close(s) | |
245 | }() | |
246 | return s, ordered | |
247 | } | |
248 | ||
249 | func (t *tokens{{.}}) PrintSyntax() { | |
250 | tokens, ordered := t.PreOrder() | |
251 | max := -1 | |
252 | for token := range tokens { | |
253 | if !token.leaf { | |
254 | fmt.Printf("%v", token.begin) | |
255 | for i, leaf, depths := 0, int(token.next), token.depths; i < leaf; i++ { | |
256 | fmt.Printf(" \x1B[36m%v\x1B[m", rul3s[ordered[i][depths[i] - 1].pegRule]) | |
257 | } | |
258 | fmt.Printf(" \x1B[36m%v\x1B[m\n", rul3s[token.pegRule]) | |
259 | } else if token.begin == token.end { | |
260 | fmt.Printf("%v", token.begin) | |
261 | for i, leaf, depths := 0, int(token.next), token.depths; i < leaf; i++ { | |
262 | fmt.Printf(" \x1B[31m%v\x1B[m", rul3s[ordered[i][depths[i] - 1].pegRule]) | |
263 | } | |
264 | fmt.Printf(" \x1B[31m%v\x1B[m\n", rul3s[token.pegRule]) | |
265 | } else { | |
266 | for c, end := token.begin, token.end; c < end; c++ { | |
267 | if i := int(c); max + 1 < i { | |
268 | for j := max; j < i; j++ { | |
269 | fmt.Printf("skip %v %v\n", j, token.String()) | |
270 | } | |
271 | max = i | |
272 | } else if i := int(c); i <= max { | |
273 | for j := i; j <= max; j++ { | |
274 | fmt.Printf("dupe %v %v\n", j, token.String()) | |
275 | } | |
276 | } else { | |
277 | max = int(c) | |
278 | } | |
279 | fmt.Printf("%v", c) | |
280 | for i, leaf, depths := 0, int(token.next), token.depths; i < leaf; i++ { | |
281 | fmt.Printf(" \x1B[34m%v\x1B[m", rul3s[ordered[i][depths[i] - 1].pegRule]) | |
282 | } | |
283 | fmt.Printf(" \x1B[34m%v\x1B[m\n", rul3s[token.pegRule]) | |
284 | } | |
285 | fmt.Printf("\n") | |
286 | } | |
287 | } | |
288 | } | |
289 | ||
290 | func (t *tokens{{.}}) PrintSyntaxTree(buffer string) { | |
291 | tokens, _ := t.PreOrder() | |
292 | for token := range tokens { | |
293 | for c := 0; c < int(token.next); c++ { | |
294 | fmt.Printf(" ") | |
295 | } | |
296 | fmt.Printf("\x1B[34m%v\x1B[m %v\n", rul3s[token.pegRule], strconv.Quote(string(([]rune(buffer)[token.begin:token.end])))) | |
297 | } | |
298 | } | |
299 | ||
300 | func (t *tokens{{.}}) Add(rule pegRule, begin, end, depth uint32, index int) { | |
301 | t.tree[index] = token{{.}}{pegRule: rule, begin: uint{{.}}(begin), end: uint{{.}}(end), next: uint{{.}}(depth)} | |
302 | } | |
303 | ||
304 | func (t *tokens{{.}}) Tokens() <-chan token32 { | |
305 | s := make(chan token32, 16) | |
306 | go func() { | |
307 | for _, v := range t.tree { | |
308 | s <- v.getToken32() | |
309 | } | |
310 | close(s) | |
311 | }() | |
312 | return s | |
313 | } | |
314 | ||
315 | func (t *tokens{{.}}) Error() []token32 { | |
316 | ordered := t.Order() | |
317 | length := len(ordered) | |
318 | tokens, length := make([]token32, length), length - 1 | |
319 | for i := range tokens { | |
320 | o := ordered[length - i] | |
321 | if len(o) > 1 { | |
322 | tokens[i] = o[len(o) - 2].getToken32() | |
323 | } | |
324 | } | |
325 | return tokens | |
326 | } | |
327 | {{end}} | |
328 | ||
329 | func (t *tokens32) Expand(index int) { | |
330 | tree := t.tree | |
331 | if index >= len(tree) { | |
332 | expanded := make([]token32, 2 * len(tree)) | |
333 | copy(expanded, tree) | |
334 | t.tree = expanded | |
335 | } | |
336 | } | |
337 | ||
338 | type {{.StructName}} struct { | |
339 | {{.StructVariables}} | |
340 | Buffer string | |
341 | buffer []rune | |
342 | rules [{{.RulesCount}}]func() bool | |
343 | Parse func(rule ...int) error | |
344 | Reset func() | |
345 | Pretty bool | |
346 | tokens32 | |
347 | } | |
348 | ||
349 | type textPosition struct { | |
350 | line, symbol int | |
351 | } | |
352 | ||
353 | type textPositionMap map[int] textPosition | |
354 | ||
355 | func translatePositions(buffer []rune, positions []int) textPositionMap { | |
356 | length, translations, j, line, symbol := len(positions), make(textPositionMap, len(positions)), 0, 1, 0 | |
357 | sort.Ints(positions) | |
358 | ||
359 | search: for i, c := range buffer { | |
360 | if c == '\n' {line, symbol = line + 1, 0} else {symbol++} | |
361 | if i == positions[j] { | |
362 | translations[positions[j]] = textPosition{line, symbol} | |
363 | for j++; j < length; j++ {if i != positions[j] {continue search}} | |
364 | break search | |
365 | } | |
366 | } | |
367 | ||
368 | return translations | |
369 | } | |
370 | ||
371 | type parseError struct { | |
372 | p *{{.StructName}} | |
373 | max token32 | |
374 | } | |
375 | ||
376 | func (e *parseError) Error() string { | |
377 | tokens, error := []token32{e.max}, "\n" | |
378 | positions, p := make([]int, 2 * len(tokens)), 0 | |
379 | for _, token := range tokens { | |
380 | positions[p], p = int(token.begin), p + 1 | |
381 | positions[p], p = int(token.end), p + 1 | |
382 | } | |
383 | translations := translatePositions(e.p.buffer, positions) | |
384 | format := "parse error near %v (line %v symbol %v - line %v symbol %v):\n%v\n" | |
385 | if e.p.Pretty { | |
386 | format = "parse error near \x1B[34m%v\x1B[m (line %v symbol %v - line %v symbol %v):\n%v\n" | |
387 | } | |
388 | for _, token := range tokens { | |
389 | begin, end := int(token.begin), int(token.end) | |
390 | error += fmt.Sprintf(format, | |
391 | rul3s[token.pegRule], | |
392 | translations[begin].line, translations[begin].symbol, | |
393 | translations[end].line, translations[end].symbol, | |
394 | strconv.Quote(string(e.p.buffer[begin:end]))) | |
395 | } | |
396 | ||
397 | return error | |
398 | } | |
399 | ||
400 | func (p *{{.StructName}}) PrintSyntaxTree() { | |
401 | p.tokens32.PrintSyntaxTree(p.Buffer) | |
402 | } | |
403 | ||
404 | func (p *{{.StructName}}) Highlighter() { | |
405 | p.PrintSyntax() | |
406 | } | |
407 | ||
408 | {{if .HasActions}} | |
409 | func (p *{{.StructName}}) Execute() { | |
410 | buffer, _buffer, text, begin, end := p.Buffer, p.buffer, "", 0, 0 | |
411 | for token := range p.Tokens() { | |
412 | switch (token.pegRule) { | |
413 | {{if .HasPush}} | |
414 | case rulePegText: | |
415 | begin, end = int(token.begin), int(token.end) | |
416 | text = string(_buffer[begin:end]) | |
417 | {{end}} | |
418 | {{range .Actions}}case ruleAction{{.GetId}}: | |
419 | {{.String}} | |
420 | {{end}} | |
421 | } | |
422 | } | |
423 | _, _, _, _, _ = buffer, _buffer, text, begin, end | |
424 | } | |
425 | {{end}} | |
426 | ||
427 | func (p *{{.StructName}}) Init() { | |
428 | p.buffer = []rune(p.Buffer) | |
429 | if len(p.buffer) == 0 || p.buffer[len(p.buffer) - 1] != endSymbol { | |
430 | p.buffer = append(p.buffer, endSymbol) | |
431 | } | |
432 | ||
433 | tree := tokens32{tree: make([]token32, math.MaxInt16)} | |
434 | var max token32 | |
435 | position, depth, tokenIndex, buffer, _rules := uint32(0), uint32(0), 0, p.buffer, p.rules | |
436 | ||
437 | p.Parse = func(rule ...int) error { | |
438 | r := 1 | |
439 | if len(rule) > 0 { | |
440 | r = rule[0] | |
441 | } | |
442 | matches := p.rules[r]() | |
443 | p.tokens32 = tree | |
444 | if matches { | |
445 | p.trim(tokenIndex) | |
446 | return nil | |
447 | } | |
448 | return &parseError{p, max} | |
449 | } | |
450 | ||
451 | p.Reset = func() { | |
452 | position, tokenIndex, depth = 0, 0, 0 | |
453 | } | |
454 | ||
455 | add := func(rule pegRule, begin uint32) { | |
456 | tree.Expand(tokenIndex) | |
457 | tree.Add(rule, begin, position, depth, tokenIndex) | |
458 | tokenIndex++ | |
459 | if begin != position && position > max.end { | |
460 | max = token32{rule, begin, position, depth} | |
461 | } | |
462 | } | |
463 | ||
464 | {{if .HasDot}} | |
465 | matchDot := func() bool { | |
466 | if buffer[position] != endSymbol { | |
467 | position++ | |
468 | return true | |
469 | } | |
470 | return false | |
471 | } | |
472 | {{end}} | |
473 | ||
474 | {{if .HasCharacter}} | |
475 | /*matchChar := func(c byte) bool { | |
476 | if buffer[position] == c { | |
477 | position++ | |
478 | return true | |
479 | } | |
480 | return false | |
481 | }*/ | |
482 | {{end}} | |
483 | ||
484 | {{if .HasString}} | |
485 | matchString := func(s string) bool { | |
486 | i := position | |
487 | for _, c := range s { | |
488 | if buffer[i] != c { | |
489 | return false | |
490 | } | |
491 | i++ | |
492 | } | |
493 | position = i | |
494 | return true | |
495 | } | |
496 | {{end}} | |
497 | ||
498 | {{if .HasRange}} | |
499 | /*matchRange := func(lower byte, upper byte) bool { | |
500 | if c := buffer[position]; c >= lower && c <= upper { | |
501 | position++ | |
502 | return true | |
503 | } | |
504 | return false | |
505 | }*/ | |
506 | {{end}} | |
507 | ||
508 | _rules = [...]func() bool { | |
509 | nil,` | |
510 | ||
511 | type Type uint8 | |
512 | ||
513 | const ( | |
514 | TypeUnknown Type = iota | |
515 | TypeRule | |
516 | TypeName | |
517 | TypeDot | |
518 | TypeCharacter | |
519 | TypeRange | |
520 | TypeString | |
521 | TypePredicate | |
522 | TypeStateChange | |
523 | TypeCommit | |
524 | TypeAction | |
525 | TypePackage | |
526 | TypeImport | |
527 | TypeState | |
528 | TypeAlternate | |
529 | TypeUnorderedAlternate | |
530 | TypeSequence | |
531 | TypePeekFor | |
532 | TypePeekNot | |
533 | TypeQuery | |
534 | TypeStar | |
535 | TypePlus | |
536 | TypePeg | |
537 | TypePush | |
538 | TypeImplicitPush | |
539 | TypeNil | |
540 | TypeLast | |
541 | ) | |
542 | ||
543 | var TypeMap = [...]string{ | |
544 | "TypeUnknown", | |
545 | "TypeRule", | |
546 | "TypeName", | |
547 | "TypeDot", | |
548 | "TypeCharacter", | |
549 | "TypeRange", | |
550 | "TypeString", | |
551 | "TypePredicate", | |
552 | "TypeCommit", | |
553 | "TypeAction", | |
554 | "TypePackage", | |
555 | "TypeImport", | |
556 | "TypeState", | |
557 | "TypeAlternate", | |
558 | "TypeUnorderedAlternate", | |
559 | "TypeSequence", | |
560 | "TypePeekFor", | |
561 | "TypePeekNot", | |
562 | "TypeQuery", | |
563 | "TypeStar", | |
564 | "TypePlus", | |
565 | "TypePeg", | |
566 | "TypePush", | |
567 | "TypeImplicitPush", | |
568 | "TypeNil", | |
569 | "TypeLast"} | |
570 | ||
571 | func (t Type) GetType() Type { | |
572 | return t | |
573 | } | |
574 | ||
575 | type Node interface { | |
576 | fmt.Stringer | |
577 | debug() | |
578 | ||
579 | Escaped() string | |
580 | SetString(s string) | |
581 | ||
582 | GetType() Type | |
583 | SetType(t Type) | |
584 | ||
585 | GetId() int | |
586 | SetId(id int) | |
587 | ||
588 | Init() | |
589 | Front() *node | |
590 | Next() *node | |
591 | PushFront(value *node) | |
592 | PopFront() *node | |
593 | PushBack(value *node) | |
594 | Len() int | |
595 | Copy() *node | |
596 | Slice() []*node | |
597 | } | |
598 | ||
599 | type node struct { | |
600 | Type | |
601 | string | |
602 | id int | |
603 | ||
604 | front *node | |
605 | back *node | |
606 | length int | |
607 | ||
608 | /* use hash table here instead of Copy? */ | |
609 | next *node | |
610 | } | |
611 | ||
612 | func (n *node) String() string { | |
613 | return n.string | |
614 | } | |
615 | ||
616 | func (n *node) debug() { | |
617 | if len(n.string) == 1 { | |
618 | fmt.Printf("%v %v '%v' %d\n", n.id, TypeMap[n.Type], n.string, n.string[0]) | |
619 | } else { | |
620 | fmt.Printf("%v %v '%v'\n", n.id, TypeMap[n.Type], n.string) | |
621 | } | |
622 | } | |
623 | ||
624 | func (n *node) Escaped() string { | |
625 | return escape(n.string) | |
626 | } | |
627 | ||
628 | func (n *node) SetString(s string) { | |
629 | n.string = s | |
630 | } | |
631 | ||
632 | func (n *node) SetType(t Type) { | |
633 | n.Type = t | |
634 | } | |
635 | ||
636 | func (n *node) GetId() int { | |
637 | return n.id | |
638 | } | |
639 | ||
640 | func (n *node) SetId(id int) { | |
641 | n.id = id | |
642 | } | |
643 | ||
644 | func (n *node) Init() { | |
645 | n.front = nil | |
646 | n.back = nil | |
647 | n.length = 0 | |
648 | } | |
649 | ||
650 | func (n *node) Front() *node { | |
651 | return n.front | |
652 | } | |
653 | ||
654 | func (n *node) Next() *node { | |
655 | return n.next | |
656 | } | |
657 | ||
658 | func (n *node) PushFront(value *node) { | |
659 | if n.back == nil { | |
660 | n.back = value | |
661 | } else { | |
662 | value.next = n.front | |
663 | } | |
664 | n.front = value | |
665 | n.length++ | |
666 | } | |
667 | ||
668 | func (n *node) PopFront() *node { | |
669 | front := n.front | |
670 | ||
671 | switch true { | |
672 | case front == nil: | |
673 | panic("tree is empty") | |
674 | case front == n.back: | |
675 | n.front, n.back = nil, nil | |
676 | default: | |
677 | n.front, front.next = front.next, nil | |
678 | } | |
679 | ||
680 | n.length-- | |
681 | return front | |
682 | } | |
683 | ||
684 | func (n *node) PushBack(value *node) { | |
685 | if n.front == nil { | |
686 | n.front = value | |
687 | } else { | |
688 | n.back.next = value | |
689 | } | |
690 | n.back = value | |
691 | n.length++ | |
692 | } | |
693 | ||
694 | func (n *node) Len() (c int) { | |
695 | return n.length | |
696 | } | |
697 | ||
698 | func (n *node) Copy() *node { | |
699 | return &node{Type: n.Type, string: n.string, id: n.id, front: n.front, back: n.back, length: n.length} | |
700 | } | |
701 | ||
702 | func (n *node) Slice() []*node { | |
703 | s := make([]*node, n.length) | |
704 | for element, i := n.Front(), 0; element != nil; element, i = element.Next(), i+1 { | |
705 | s[i] = element | |
706 | } | |
707 | return s | |
708 | } | |
709 | ||
710 | /* A tree data structure into which a PEG can be parsed. */ | |
711 | type Tree struct { | |
712 | Rules map[string]Node | |
713 | rulesCount map[string]uint | |
714 | node | |
715 | inline, _switch bool | |
716 | ||
717 | RuleNames []Node | |
718 | Sizes [1]int | |
719 | PackageName string | |
720 | Imports []string | |
721 | EndSymbol rune | |
722 | PegRuleType string | |
723 | StructName string | |
724 | StructVariables string | |
725 | RulesCount int | |
726 | Bits int | |
727 | HasActions bool | |
728 | Actions []Node | |
729 | HasPush bool | |
730 | HasCommit bool | |
731 | HasDot bool | |
732 | HasCharacter bool | |
733 | HasString bool | |
734 | HasRange bool | |
735 | } | |
736 | ||
737 | func New(inline, _switch bool) *Tree { | |
738 | return &Tree{Rules: make(map[string]Node), | |
739 | Sizes: [1]int{32}, | |
740 | rulesCount: make(map[string]uint), | |
741 | inline: inline, | |
742 | _switch: _switch} | |
743 | } | |
744 | ||
745 | func (t *Tree) AddRule(name string) { | |
746 | t.PushFront(&node{Type: TypeRule, string: name, id: t.RulesCount}) | |
747 | t.RulesCount++ | |
748 | } | |
749 | ||
750 | func (t *Tree) AddExpression() { | |
751 | expression := t.PopFront() | |
752 | rule := t.PopFront() | |
753 | rule.PushBack(expression) | |
754 | t.PushBack(rule) | |
755 | } | |
756 | ||
757 | func (t *Tree) AddName(text string) { | |
758 | t.PushFront(&node{Type: TypeName, string: text}) | |
759 | } | |
760 | ||
761 | func (t *Tree) AddDot() { t.PushFront(&node{Type: TypeDot, string: "."}) } | |
762 | func (t *Tree) AddCharacter(text string) { | |
763 | t.PushFront(&node{Type: TypeCharacter, string: text}) | |
764 | } | |
765 | func (t *Tree) AddDoubleCharacter(text string) { | |
766 | t.PushFront(&node{Type: TypeCharacter, string: strings.ToLower(text)}) | |
767 | t.PushFront(&node{Type: TypeCharacter, string: strings.ToUpper(text)}) | |
768 | t.AddAlternate() | |
769 | } | |
770 | func (t *Tree) AddHexaCharacter(text string) { | |
771 | hexa, _ := strconv.ParseInt(text, 16, 32) | |
772 | t.PushFront(&node{Type: TypeCharacter, string: string(hexa)}) | |
773 | } | |
774 | func (t *Tree) AddOctalCharacter(text string) { | |
775 | octal, _ := strconv.ParseInt(text, 8, 8) | |
776 | t.PushFront(&node{Type: TypeCharacter, string: string(octal)}) | |
777 | } | |
778 | func (t *Tree) AddPredicate(text string) { t.PushFront(&node{Type: TypePredicate, string: text}) } | |
779 | func (t *Tree) AddStateChange(text string) { t.PushFront(&node{Type: TypeStateChange, string: text}) } | |
780 | func (t *Tree) AddNil() { t.PushFront(&node{Type: TypeNil, string: "<nil>"}) } | |
781 | func (t *Tree) AddAction(text string) { t.PushFront(&node{Type: TypeAction, string: text}) } | |
782 | func (t *Tree) AddPackage(text string) { t.PushBack(&node{Type: TypePackage, string: text}) } | |
783 | func (t *Tree) AddImport(text string) { t.PushBack(&node{Type: TypeImport, string: text}) } | |
784 | func (t *Tree) AddState(text string) { | |
785 | peg := t.PopFront() | |
786 | peg.PushBack(&node{Type: TypeState, string: text}) | |
787 | t.PushBack(peg) | |
788 | } | |
789 | ||
790 | func (t *Tree) addList(listType Type) { | |
791 | a := t.PopFront() | |
792 | b := t.PopFront() | |
793 | var l *node | |
794 | if b.GetType() == listType { | |
795 | l = b | |
796 | } else { | |
797 | l = &node{Type: listType} | |
798 | l.PushBack(b) | |
799 | } | |
800 | l.PushBack(a) | |
801 | t.PushFront(l) | |
802 | } | |
803 | func (t *Tree) AddAlternate() { t.addList(TypeAlternate) } | |
804 | func (t *Tree) AddSequence() { t.addList(TypeSequence) } | |
805 | func (t *Tree) AddRange() { t.addList(TypeRange) } | |
806 | func (t *Tree) AddDoubleRange() { | |
807 | a := t.PopFront() | |
808 | b := t.PopFront() | |
809 | ||
810 | t.AddCharacter(strings.ToLower(b.String())) | |
811 | t.AddCharacter(strings.ToLower(a.String())) | |
812 | t.addList(TypeRange) | |
813 | ||
814 | t.AddCharacter(strings.ToUpper(b.String())) | |
815 | t.AddCharacter(strings.ToUpper(a.String())) | |
816 | t.addList(TypeRange) | |
817 | ||
818 | t.AddAlternate() | |
819 | } | |
820 | ||
821 | func (t *Tree) addFix(fixType Type) { | |
822 | n := &node{Type: fixType} | |
823 | n.PushBack(t.PopFront()) | |
824 | t.PushFront(n) | |
825 | } | |
826 | func (t *Tree) AddPeekFor() { t.addFix(TypePeekFor) } | |
827 | func (t *Tree) AddPeekNot() { t.addFix(TypePeekNot) } | |
828 | func (t *Tree) AddQuery() { t.addFix(TypeQuery) } | |
829 | func (t *Tree) AddStar() { t.addFix(TypeStar) } | |
830 | func (t *Tree) AddPlus() { t.addFix(TypePlus) } | |
831 | func (t *Tree) AddPush() { t.addFix(TypePush) } | |
832 | ||
833 | func (t *Tree) AddPeg(text string) { t.PushFront(&node{Type: TypePeg, string: text}) } | |
834 | ||
835 | func join(tasks []func()) { | |
836 | length := len(tasks) | |
837 | done := make(chan int, length) | |
838 | for _, task := range tasks { | |
839 | go func(task func()) { task(); done <- 1 }(task) | |
840 | } | |
841 | for d := <-done; d < length; d += <-done { | |
842 | } | |
843 | } | |
844 | ||
845 | func escape(c string) string { | |
846 | switch c { | |
847 | case "'": | |
848 | return "\\'" | |
849 | case "\"": | |
850 | return "\"" | |
851 | default: | |
852 | c = strconv.Quote(c) | |
853 | return c[1 : len(c)-1] | |
854 | } | |
855 | } | |
856 | ||
857 | func (t *Tree) Compile(file string, out io.Writer) { | |
858 | t.AddImport("fmt") | |
859 | t.AddImport("math") | |
860 | t.AddImport("sort") | |
861 | t.AddImport("strconv") | |
862 | t.EndSymbol = 0x110000 | |
863 | t.RulesCount++ | |
864 | ||
865 | counts := [TypeLast]uint{} | |
866 | { | |
867 | var rule *node | |
868 | var link func(node Node) | |
869 | link = func(n Node) { | |
870 | nodeType := n.GetType() | |
871 | id := counts[nodeType] | |
872 | counts[nodeType]++ | |
873 | switch nodeType { | |
874 | case TypeAction: | |
875 | n.SetId(int(id)) | |
876 | copy, name := n.Copy(), fmt.Sprintf("Action%v", id) | |
877 | t.Actions = append(t.Actions, copy) | |
878 | n.Init() | |
879 | n.SetType(TypeName) | |
880 | n.SetString(name) | |
881 | n.SetId(t.RulesCount) | |
882 | ||
883 | emptyRule := &node{Type: TypeRule, string: name, id: t.RulesCount} | |
884 | implicitPush := &node{Type: TypeImplicitPush} | |
885 | emptyRule.PushBack(implicitPush) | |
886 | implicitPush.PushBack(copy) | |
887 | implicitPush.PushBack(emptyRule.Copy()) | |
888 | t.PushBack(emptyRule) | |
889 | t.RulesCount++ | |
890 | ||
891 | t.Rules[name] = emptyRule | |
892 | t.RuleNames = append(t.RuleNames, emptyRule) | |
893 | case TypeName: | |
894 | name := n.String() | |
895 | if _, ok := t.Rules[name]; !ok { | |
896 | emptyRule := &node{Type: TypeRule, string: name, id: t.RulesCount} | |
897 | implicitPush := &node{Type: TypeImplicitPush} | |
898 | emptyRule.PushBack(implicitPush) | |
899 | implicitPush.PushBack(&node{Type: TypeNil, string: "<nil>"}) | |
900 | implicitPush.PushBack(emptyRule.Copy()) | |
901 | t.PushBack(emptyRule) | |
902 | t.RulesCount++ | |
903 | ||
904 | t.Rules[name] = emptyRule | |
905 | t.RuleNames = append(t.RuleNames, emptyRule) | |
906 | } | |
907 | case TypePush: | |
908 | copy, name := rule.Copy(), "PegText" | |
909 | copy.SetString(name) | |
910 | if _, ok := t.Rules[name]; !ok { | |
911 | emptyRule := &node{Type: TypeRule, string: name, id: t.RulesCount} | |
912 | emptyRule.PushBack(&node{Type: TypeNil, string: "<nil>"}) | |
913 | t.PushBack(emptyRule) | |
914 | t.RulesCount++ | |
915 | ||
916 | t.Rules[name] = emptyRule | |
917 | t.RuleNames = append(t.RuleNames, emptyRule) | |
918 | } | |
919 | n.PushBack(copy) | |
920 | fallthrough | |
921 | case TypeImplicitPush: | |
922 | link(n.Front()) | |
923 | case TypeRule, TypeAlternate, TypeUnorderedAlternate, TypeSequence, | |
924 | TypePeekFor, TypePeekNot, TypeQuery, TypeStar, TypePlus: | |
925 | for _, node := range n.Slice() { | |
926 | link(node) | |
927 | } | |
928 | } | |
929 | } | |
930 | /* first pass */ | |
931 | for _, node := range t.Slice() { | |
932 | switch node.GetType() { | |
933 | case TypePackage: | |
934 | t.PackageName = node.String() | |
935 | case TypeImport: | |
936 | t.Imports = append(t.Imports, node.String()) | |
937 | case TypePeg: | |
938 | t.StructName = node.String() | |
939 | t.StructVariables = node.Front().String() | |
940 | case TypeRule: | |
941 | if _, ok := t.Rules[node.String()]; !ok { | |
942 | expression := node.Front() | |
943 | copy := expression.Copy() | |
944 | expression.Init() | |
945 | expression.SetType(TypeImplicitPush) | |
946 | expression.PushBack(copy) | |
947 | expression.PushBack(node.Copy()) | |
948 | ||
949 | t.Rules[node.String()] = node | |
950 | t.RuleNames = append(t.RuleNames, node) | |
951 | } | |
952 | } | |
953 | } | |
954 | /* second pass */ | |
955 | for _, node := range t.Slice() { | |
956 | if node.GetType() == TypeRule { | |
957 | rule = node | |
958 | link(node) | |
959 | } | |
960 | } | |
961 | } | |
962 | ||
963 | join([]func(){ | |
964 | func() { | |
965 | var countRules func(node Node) | |
966 | ruleReached := make([]bool, t.RulesCount) | |
967 | countRules = func(node Node) { | |
968 | switch node.GetType() { | |
969 | case TypeRule: | |
970 | name, id := node.String(), node.GetId() | |
971 | if count, ok := t.rulesCount[name]; ok { | |
972 | t.rulesCount[name] = count + 1 | |
973 | } else { | |
974 | t.rulesCount[name] = 1 | |
975 | } | |
976 | if ruleReached[id] { | |
977 | return | |
978 | } | |
979 | ruleReached[id] = true | |
980 | countRules(node.Front()) | |
981 | case TypeName: | |
982 | countRules(t.Rules[node.String()]) | |
983 | case TypeImplicitPush, TypePush: | |
984 | countRules(node.Front()) | |
985 | case TypeAlternate, TypeUnorderedAlternate, TypeSequence, | |
986 | TypePeekFor, TypePeekNot, TypeQuery, TypeStar, TypePlus: | |
987 | for _, element := range node.Slice() { | |
988 | countRules(element) | |
989 | } | |
990 | } | |
991 | } | |
992 | for _, node := range t.Slice() { | |
993 | if node.GetType() == TypeRule { | |
994 | countRules(node) | |
995 | break | |
996 | } | |
997 | } | |
998 | }, | |
999 | func() { | |
1000 | var checkRecursion func(node Node) bool | |
1001 | ruleReached := make([]bool, t.RulesCount) | |
1002 | checkRecursion = func(node Node) bool { | |
1003 | switch node.GetType() { | |
1004 | case TypeRule: | |
1005 | id := node.GetId() | |
1006 | if ruleReached[id] { | |
1007 | fmt.Fprintf(os.Stderr, "possible infinite left recursion in rule '%v'\n", node) | |
1008 | return false | |
1009 | } | |
1010 | ruleReached[id] = true | |
1011 | consumes := checkRecursion(node.Front()) | |
1012 | ruleReached[id] = false | |
1013 | return consumes | |
1014 | case TypeAlternate: | |
1015 | for _, element := range node.Slice() { | |
1016 | if !checkRecursion(element) { | |
1017 | return false | |
1018 | } | |
1019 | } | |
1020 | return true | |
1021 | case TypeSequence: | |
1022 | for _, element := range node.Slice() { | |
1023 | if checkRecursion(element) { | |
1024 | return true | |
1025 | } | |
1026 | } | |
1027 | case TypeName: | |
1028 | return checkRecursion(t.Rules[node.String()]) | |
1029 | case TypePlus, TypePush, TypeImplicitPush: | |
1030 | return checkRecursion(node.Front()) | |
1031 | case TypeCharacter, TypeString: | |
1032 | return len(node.String()) > 0 | |
1033 | case TypeDot, TypeRange: | |
1034 | return true | |
1035 | } | |
1036 | return false | |
1037 | } | |
1038 | for _, node := range t.Slice() { | |
1039 | if node.GetType() == TypeRule { | |
1040 | checkRecursion(node) | |
1041 | } | |
1042 | } | |
1043 | }}) | |
1044 | ||
1045 | if t._switch { | |
1046 | var optimizeAlternates func(node Node) (consumes bool, s jetset.Set) | |
1047 | cache, firstPass := make([]struct { | |
1048 | reached, consumes bool | |
1049 | s jetset.Set | |
1050 | }, t.RulesCount), true | |
1051 | optimizeAlternates = func(n Node) (consumes bool, s jetset.Set) { | |
1052 | /*n.debug()*/ | |
1053 | switch n.GetType() { | |
1054 | case TypeRule: | |
1055 | cache := &cache[n.GetId()] | |
1056 | if cache.reached { | |
1057 | consumes, s = cache.consumes, cache.s | |
1058 | return | |
1059 | } | |
1060 | ||
1061 | cache.reached = true | |
1062 | consumes, s = optimizeAlternates(n.Front()) | |
1063 | cache.consumes, cache.s = consumes, s | |
1064 | case TypeName: | |
1065 | consumes, s = optimizeAlternates(t.Rules[n.String()]) | |
1066 | case TypeDot: | |
1067 | consumes = true | |
1068 | /* TypeDot set doesn't include the EndSymbol */ | |
1069 | s = s.Add(uint64(t.EndSymbol)) | |
1070 | s = s.Complement(uint64(t.EndSymbol)) | |
1071 | case TypeString, TypeCharacter: | |
1072 | consumes = true | |
1073 | s = s.Add(uint64([]rune(n.String())[0])) | |
1074 | case TypeRange: | |
1075 | consumes = true | |
1076 | element := n.Front() | |
1077 | lower := []rune(element.String())[0] | |
1078 | element = element.Next() | |
1079 | upper := []rune(element.String())[0] | |
1080 | s = s.AddRange(uint64(lower), uint64(upper)) | |
1081 | case TypeAlternate: | |
1082 | consumes = true | |
1083 | mconsumes, properties, c := | |
1084 | consumes, make([]struct { | |
1085 | intersects bool | |
1086 | s jetset.Set | |
1087 | }, n.Len()), 0 | |
1088 | for _, element := range n.Slice() { | |
1089 | mconsumes, properties[c].s = optimizeAlternates(element) | |
1090 | consumes = consumes && mconsumes | |
1091 | s = s.Union(properties[c].s) | |
1092 | c++ | |
1093 | } | |
1094 | ||
1095 | if firstPass { | |
1096 | break | |
1097 | } | |
1098 | ||
1099 | intersections := 2 | |
1100 | compare: | |
1101 | for ai, a := range properties[0 : len(properties)-1] { | |
1102 | for _, b := range properties[ai+1:] { | |
1103 | if a.s.Intersects(b.s) { | |
1104 | intersections++ | |
1105 | properties[ai].intersects = true | |
1106 | continue compare | |
1107 | } | |
1108 | } | |
1109 | } | |
1110 | if intersections >= len(properties) { | |
1111 | break | |
1112 | } | |
1113 | ||
1114 | c, unordered, ordered, max := | |
1115 | 0, &node{Type: TypeUnorderedAlternate}, &node{Type: TypeAlternate}, 0 | |
1116 | for _, element := range n.Slice() { | |
1117 | if properties[c].intersects { | |
1118 | ordered.PushBack(element.Copy()) | |
1119 | } else { | |
1120 | class := &node{Type: TypeUnorderedAlternate} | |
1121 | for d := 0; d < 256; d++ { | |
1122 | if properties[c].s.Has(uint64(d)) { | |
1123 | class.PushBack(&node{Type: TypeCharacter, string: string(d)}) | |
1124 | } | |
1125 | } | |
1126 | ||
1127 | sequence, predicate, length := | |
1128 | &node{Type: TypeSequence}, &node{Type: TypePeekFor}, properties[c].s.Len() | |
1129 | if length == 0 { | |
1130 | class.PushBack(&node{Type: TypeNil, string: "<nil>"}) | |
1131 | } | |
1132 | predicate.PushBack(class) | |
1133 | sequence.PushBack(predicate) | |
1134 | sequence.PushBack(element.Copy()) | |
1135 | ||
1136 | if element.GetType() == TypeNil { | |
1137 | unordered.PushBack(sequence) | |
1138 | } else if length > max { | |
1139 | unordered.PushBack(sequence) | |
1140 | max = length | |
1141 | } else { | |
1142 | unordered.PushFront(sequence) | |
1143 | } | |
1144 | } | |
1145 | c++ | |
1146 | } | |
1147 | n.Init() | |
1148 | if ordered.Front() == nil { | |
1149 | n.SetType(TypeUnorderedAlternate) | |
1150 | for _, element := range unordered.Slice() { | |
1151 | n.PushBack(element.Copy()) | |
1152 | } | |
1153 | } else { | |
1154 | for _, element := range ordered.Slice() { | |
1155 | n.PushBack(element.Copy()) | |
1156 | } | |
1157 | n.PushBack(unordered) | |
1158 | } | |
1159 | case TypeSequence: | |
1160 | classes, elements := | |
1161 | make([]struct { | |
1162 | s jetset.Set | |
1163 | }, n.Len()), n.Slice() | |
1164 | ||
1165 | for c, element := range elements { | |
1166 | consumes, classes[c].s = optimizeAlternates(element) | |
1167 | if consumes { | |
1168 | elements, classes = elements[c+1:], classes[:c+1] | |
1169 | break | |
1170 | } | |
1171 | } | |
1172 | ||
1173 | for c := len(classes) - 1; c >= 0; c-- { | |
1174 | s = s.Union(classes[c].s) | |
1175 | } | |
1176 | ||
1177 | for _, element := range elements { | |
1178 | optimizeAlternates(element) | |
1179 | } | |
1180 | case TypePeekNot, TypePeekFor: | |
1181 | optimizeAlternates(n.Front()) | |
1182 | case TypeQuery, TypeStar: | |
1183 | _, s = optimizeAlternates(n.Front()) | |
1184 | case TypePlus, TypePush, TypeImplicitPush: | |
1185 | consumes, s = optimizeAlternates(n.Front()) | |
1186 | case TypeAction, TypeNil: | |
1187 | //empty | |
1188 | } | |
1189 | return | |
1190 | } | |
1191 | for _, element := range t.Slice() { | |
1192 | if element.GetType() == TypeRule { | |
1193 | optimizeAlternates(element) | |
1194 | break | |
1195 | } | |
1196 | } | |
1197 | ||
1198 | for i, _ := range cache { | |
1199 | cache[i].reached = false | |
1200 | } | |
1201 | firstPass = false | |
1202 | for _, element := range t.Slice() { | |
1203 | if element.GetType() == TypeRule { | |
1204 | optimizeAlternates(element) | |
1205 | break | |
1206 | } | |
1207 | } | |
1208 | } | |
1209 | ||
1210 | var buffer bytes.Buffer | |
1211 | defer func() { | |
1212 | fileSet := token.NewFileSet() | |
1213 | code, error := parser.ParseFile(fileSet, file, &buffer, parser.ParseComments) | |
1214 | if error != nil { | |
1215 | buffer.WriteTo(out) | |
1216 | fmt.Printf("%v: %v\n", file, error) | |
1217 | return | |
1218 | } | |
1219 | formatter := printer.Config{Mode: printer.TabIndent | printer.UseSpaces, Tabwidth: 8} | |
1220 | error = formatter.Fprint(out, fileSet, code) | |
1221 | if error != nil { | |
1222 | buffer.WriteTo(out) | |
1223 | fmt.Printf("%v: %v\n", file, error) | |
1224 | return | |
1225 | } | |
1226 | ||
1227 | }() | |
1228 | ||
1229 | _print := func(format string, a ...interface{}) { fmt.Fprintf(&buffer, format, a...) } | |
1230 | printSave := func(n uint) { _print("\n position%d, tokenIndex%d, depth%d := position, tokenIndex, depth", n, n, n) } | |
1231 | printRestore := func(n uint) { _print("\n position, tokenIndex, depth = position%d, tokenIndex%d, depth%d", n, n, n) } | |
1232 | printTemplate := func(s string) { | |
1233 | if error := template.Must(template.New("peg").Parse(s)).Execute(&buffer, t); error != nil { | |
1234 | panic(error) | |
1235 | } | |
1236 | } | |
1237 | ||
1238 | t.HasActions = counts[TypeAction] > 0 | |
1239 | t.HasPush = counts[TypePush] > 0 | |
1240 | t.HasCommit = counts[TypeCommit] > 0 | |
1241 | t.HasDot = counts[TypeDot] > 0 | |
1242 | t.HasCharacter = counts[TypeCharacter] > 0 | |
1243 | t.HasString = counts[TypeString] > 0 | |
1244 | t.HasRange = counts[TypeRange] > 0 | |
1245 | ||
1246 | var printRule func(n Node) | |
1247 | var compile func(expression Node, ko uint) | |
1248 | var label uint | |
1249 | labels := make(map[uint]bool) | |
1250 | printBegin := func() { _print("\n {") } | |
1251 | printEnd := func() { _print("\n }") } | |
1252 | printLabel := func(n uint) { | |
1253 | _print("\n") | |
1254 | if labels[n] { | |
1255 | _print(" l%d:\t", n) | |
1256 | } | |
1257 | } | |
1258 | printJump := func(n uint) { | |
1259 | _print("\n goto l%d", n) | |
1260 | labels[n] = true | |
1261 | } | |
1262 | printRule = func(n Node) { | |
1263 | switch n.GetType() { | |
1264 | case TypeRule: | |
1265 | _print("%v <- ", n) | |
1266 | printRule(n.Front()) | |
1267 | case TypeDot: | |
1268 | _print(".") | |
1269 | case TypeName: | |
1270 | _print("%v", n) | |
1271 | case TypeCharacter: | |
1272 | _print("'%v'", escape(n.String())) | |
1273 | case TypeString: | |
1274 | s := escape(n.String()) | |
1275 | _print("'%v'", s[1:len(s)-1]) | |
1276 | case TypeRange: | |
1277 | element := n.Front() | |
1278 | lower := element | |
1279 | element = element.Next() | |
1280 | upper := element | |
1281 | _print("[%v-%v]", escape(lower.String()), escape(upper.String())) | |
1282 | case TypePredicate: | |
1283 | _print("&{%v}", n) | |
1284 | case TypeStateChange: | |
1285 | _print("!{%v}", n) | |
1286 | case TypeAction: | |
1287 | _print("{%v}", n) | |
1288 | case TypeCommit: | |
1289 | _print("commit") | |
1290 | case TypeAlternate: | |
1291 | _print("(") | |
1292 | elements := n.Slice() | |
1293 | printRule(elements[0]) | |
1294 | for _, element := range elements[1:] { | |
1295 | _print(" / ") | |
1296 | printRule(element) | |
1297 | } | |
1298 | _print(")") | |
1299 | case TypeUnorderedAlternate: | |
1300 | _print("(") | |
1301 | elements := n.Slice() | |
1302 | printRule(elements[0]) | |
1303 | for _, element := range elements[1:] { | |
1304 | _print(" | ") | |
1305 | printRule(element) | |
1306 | } | |
1307 | _print(")") | |
1308 | case TypeSequence: | |
1309 | _print("(") | |
1310 | elements := n.Slice() | |
1311 | printRule(elements[0]) | |
1312 | for _, element := range elements[1:] { | |
1313 | _print(" ") | |
1314 | printRule(element) | |
1315 | } | |
1316 | _print(")") | |
1317 | case TypePeekFor: | |
1318 | _print("&") | |
1319 | printRule(n.Front()) | |
1320 | case TypePeekNot: | |
1321 | _print("!") | |
1322 | printRule(n.Front()) | |
1323 | case TypeQuery: | |
1324 | printRule(n.Front()) | |
1325 | _print("?") | |
1326 | case TypeStar: | |
1327 | printRule(n.Front()) | |
1328 | _print("*") | |
1329 | case TypePlus: | |
1330 | printRule(n.Front()) | |
1331 | _print("+") | |
1332 | case TypePush, TypeImplicitPush: | |
1333 | _print("<") | |
1334 | printRule(n.Front()) | |
1335 | _print(">") | |
1336 | case TypeNil: | |
1337 | default: | |
1338 | fmt.Fprintf(os.Stderr, "illegal node type: %v\n", n.GetType()) | |
1339 | } | |
1340 | } | |
1341 | compile = func(n Node, ko uint) { | |
1342 | switch n.GetType() { | |
1343 | case TypeRule: | |
1344 | fmt.Fprintf(os.Stderr, "internal error #1 (%v)\n", n) | |
1345 | case TypeDot: | |
1346 | _print("\n if !matchDot() {") | |
1347 | /*print("\n if buffer[position] == endSymbol {")*/ | |
1348 | printJump(ko) | |
1349 | /*print("}\nposition++")*/ | |
1350 | _print("}") | |
1351 | case TypeName: | |
1352 | name := n.String() | |
1353 | rule := t.Rules[name] | |
1354 | if t.inline && t.rulesCount[name] == 1 { | |
1355 | compile(rule.Front(), ko) | |
1356 | return | |
1357 | } | |
1358 | _print("\n if !_rules[rule%v]() {", name /*rule.GetId()*/) | |
1359 | printJump(ko) | |
1360 | _print("}") | |
1361 | case TypeRange: | |
1362 | element := n.Front() | |
1363 | lower := element | |
1364 | element = element.Next() | |
1365 | upper := element | |
1366 | /*print("\n if !matchRange('%v', '%v') {", escape(lower.String()), escape(upper.String()))*/ | |
1367 | _print("\n if c := buffer[position]; c < rune('%v') || c > rune('%v') {", escape(lower.String()), escape(upper.String())) | |
1368 | printJump(ko) | |
1369 | _print("}\nposition++") | |
1370 | case TypeCharacter: | |
1371 | /*print("\n if !matchChar('%v') {", escape(n.String()))*/ | |
1372 | _print("\n if buffer[position] != rune('%v') {", escape(n.String())) | |
1373 | printJump(ko) | |
1374 | _print("}\nposition++") | |
1375 | case TypeString: | |
1376 | _print("\n if !matchString(%v) {", strconv.Quote(n.String())) | |
1377 | printJump(ko) | |
1378 | _print("}") | |
1379 | case TypePredicate: | |
1380 | _print("\n if !(%v) {", n) | |
1381 | printJump(ko) | |
1382 | _print("}") | |
1383 | case TypeStateChange: | |
1384 | _print("\n %v", n) | |
1385 | case TypeAction: | |
1386 | case TypeCommit: | |
1387 | case TypePush: | |
1388 | fallthrough | |
1389 | case TypeImplicitPush: | |
1390 | ok, element := label, n.Front() | |
1391 | label++ | |
1392 | nodeType, rule := element.GetType(), element.Next() | |
1393 | printBegin() | |
1394 | if nodeType == TypeAction { | |
1395 | _print("\nadd(rule%v, position)", rule) | |
1396 | } else { | |
1397 | _print("\nposition%d := position", ok) | |
1398 | _print("\ndepth++") | |
1399 | compile(element, ko) | |
1400 | _print("\ndepth--") | |
1401 | _print("\nadd(rule%v, position%d)", rule, ok) | |
1402 | } | |
1403 | printEnd() | |
1404 | case TypeAlternate: | |
1405 | ok := label | |
1406 | label++ | |
1407 | printBegin() | |
1408 | elements := n.Slice() | |
1409 | printSave(ok) | |
1410 | for _, element := range elements[:len(elements)-1] { | |
1411 | next := label | |
1412 | label++ | |
1413 | compile(element, next) | |
1414 | printJump(ok) | |
1415 | printLabel(next) | |
1416 | printRestore(ok) | |
1417 | } | |
1418 | compile(elements[len(elements)-1], ko) | |
1419 | printEnd() | |
1420 | printLabel(ok) | |
1421 | case TypeUnorderedAlternate: | |
1422 | done, ok := ko, label | |
1423 | label++ | |
1424 | printBegin() | |
1425 | _print("\n switch buffer[position] {") | |
1426 | elements := n.Slice() | |
1427 | elements, last := elements[:len(elements)-1], elements[len(elements)-1].Front().Next() | |
1428 | for _, element := range elements { | |
1429 | sequence := element.Front() | |
1430 | class := sequence.Front() | |
1431 | sequence = sequence.Next() | |
1432 | _print("\n case") | |
1433 | comma := false | |
1434 | for _, character := range class.Slice() { | |
1435 | if comma { | |
1436 | _print(",") | |
1437 | } else { | |
1438 | comma = true | |
1439 | } | |
1440 | _print(" '%s'", escape(character.String())) | |
1441 | } | |
1442 | _print(":") | |
1443 | compile(sequence, done) | |
1444 | _print("\nbreak") | |
1445 | } | |
1446 | _print("\n default:") | |
1447 | compile(last, done) | |
1448 | _print("\nbreak") | |
1449 | _print("\n }") | |
1450 | printEnd() | |
1451 | printLabel(ok) | |
1452 | case TypeSequence: | |
1453 | for _, element := range n.Slice() { | |
1454 | compile(element, ko) | |
1455 | } | |
1456 | case TypePeekFor: | |
1457 | ok := label | |
1458 | label++ | |
1459 | printBegin() | |
1460 | printSave(ok) | |
1461 | compile(n.Front(), ko) | |
1462 | printRestore(ok) | |
1463 | printEnd() | |
1464 | case TypePeekNot: | |
1465 | ok := label | |
1466 | label++ | |
1467 | printBegin() | |
1468 | printSave(ok) | |
1469 | compile(n.Front(), ok) | |
1470 | printJump(ko) | |
1471 | printLabel(ok) | |
1472 | printRestore(ok) | |
1473 | printEnd() | |
1474 | case TypeQuery: | |
1475 | qko := label | |
1476 | label++ | |
1477 | qok := label | |
1478 | label++ | |
1479 | printBegin() | |
1480 | printSave(qko) | |
1481 | compile(n.Front(), qko) | |
1482 | printJump(qok) | |
1483 | printLabel(qko) | |
1484 | printRestore(qko) | |
1485 | printEnd() | |
1486 | printLabel(qok) | |
1487 | case TypeStar: | |
1488 | again := label | |
1489 | label++ | |
1490 | out := label | |
1491 | label++ | |
1492 | printLabel(again) | |
1493 | printBegin() | |
1494 | printSave(out) | |
1495 | compile(n.Front(), out) | |
1496 | printJump(again) | |
1497 | printLabel(out) | |
1498 | printRestore(out) | |
1499 | printEnd() | |
1500 | case TypePlus: | |
1501 | again := label | |
1502 | label++ | |
1503 | out := label | |
1504 | label++ | |
1505 | compile(n.Front(), ko) | |
1506 | printLabel(again) | |
1507 | printBegin() | |
1508 | printSave(out) | |
1509 | compile(n.Front(), out) | |
1510 | printJump(again) | |
1511 | printLabel(out) | |
1512 | printRestore(out) | |
1513 | printEnd() | |
1514 | case TypeNil: | |
1515 | default: | |
1516 | fmt.Fprintf(os.Stderr, "illegal node type: %v\n", n.GetType()) | |
1517 | } | |
1518 | } | |
1519 | ||
1520 | /* lets figure out which jump labels are going to be used with this dry compile */ | |
1521 | printTemp, _print := _print, func(format string, a ...interface{}) {} | |
1522 | for _, element := range t.Slice() { | |
1523 | if element.GetType() != TypeRule { | |
1524 | continue | |
1525 | } | |
1526 | expression := element.Front() | |
1527 | if expression.GetType() == TypeNil { | |
1528 | continue | |
1529 | } | |
1530 | ko := label | |
1531 | label++ | |
1532 | if count, ok := t.rulesCount[element.String()]; !ok { | |
1533 | continue | |
1534 | } else if t.inline && count == 1 && ko != 0 { | |
1535 | continue | |
1536 | } | |
1537 | compile(expression, ko) | |
1538 | } | |
1539 | _print, label = printTemp, 0 | |
1540 | ||
1541 | /* now for the real compile pass */ | |
1542 | t.PegRuleType = "uint8" | |
1543 | if length := int64(t.Len()); length > math.MaxUint32 { | |
1544 | t.PegRuleType = "uint64" | |
1545 | } else if length > math.MaxUint16 { | |
1546 | t.PegRuleType = "uint32" | |
1547 | } else if length > math.MaxUint8 { | |
1548 | t.PegRuleType = "uint16" | |
1549 | } | |
1550 | printTemplate(pegHeaderTemplate) | |
1551 | for _, element := range t.Slice() { | |
1552 | if element.GetType() != TypeRule { | |
1553 | continue | |
1554 | } | |
1555 | expression := element.Front() | |
1556 | if implicit := expression.Front(); expression.GetType() == TypeNil || implicit.GetType() == TypeNil { | |
1557 | if element.String() != "PegText" { | |
1558 | fmt.Fprintf(os.Stderr, "rule '%v' used but not defined\n", element) | |
1559 | } | |
1560 | _print("\n nil,") | |
1561 | continue | |
1562 | } | |
1563 | ko := label | |
1564 | label++ | |
1565 | _print("\n /* %v ", element.GetId()) | |
1566 | printRule(element) | |
1567 | _print(" */") | |
1568 | if count, ok := t.rulesCount[element.String()]; !ok { | |
1569 | fmt.Fprintf(os.Stderr, "rule '%v' defined but not used\n", element) | |
1570 | _print("\n nil,") | |
1571 | continue | |
1572 | } else if t.inline && count == 1 && ko != 0 { | |
1573 | _print("\n nil,") | |
1574 | continue | |
1575 | } | |
1576 | _print("\n func() bool {") | |
1577 | if labels[ko] { | |
1578 | printSave(ko) | |
1579 | } | |
1580 | compile(expression, ko) | |
1581 | //print("\n fmt.Printf(\"%v\\n\")", element.String()) | |
1582 | _print("\n return true") | |
1583 | if labels[ko] { | |
1584 | printLabel(ko) | |
1585 | printRestore(ko) | |
1586 | _print("\n return false") | |
1587 | } | |
1588 | _print("\n },") | |
1589 | } | |
1590 | _print("\n }\n p.rules = _rules") | |
1591 | _print("\n}\n") | |
1592 | } |
9 | 9 | |
10 | 10 | package main |
11 | 11 | |
12 | import "github.com/pointlander/peg/tree" | |
13 | ||
12 | 14 | # parser declaration |
13 | 15 | |
14 | 16 | type Peg Peg { |
15 | *Tree | |
17 | *tree.Tree | |
16 | 18 | } |
17 | 19 | |
18 | 20 | # Hierarchical syntax |
0 | package main | |
1 | ||
2 | // Code generated by ./peg -inline -switch peg.peg DO NOT EDIT | |
3 | ||
4 | import ( | |
5 | "fmt" | |
6 | "github.com/pointlander/peg/tree" | |
7 | "io" | |
8 | "os" | |
9 | "sort" | |
10 | "strconv" | |
11 | ) | |
12 | ||
13 | const endSymbol rune = 1114112 | |
14 | ||
15 | /* The rule types inferred from the grammar are below. */ | |
16 | type pegRule uint8 | |
17 | ||
18 | const ( | |
19 | ruleUnknown pegRule = iota | |
20 | ruleGrammar | |
21 | ruleImport | |
22 | ruleDefinition | |
23 | ruleExpression | |
24 | ruleSequence | |
25 | rulePrefix | |
26 | ruleSuffix | |
27 | rulePrimary | |
28 | ruleIdentifier | |
29 | ruleIdentStart | |
30 | ruleIdentCont | |
31 | ruleLiteral | |
32 | ruleClass | |
33 | ruleRanges | |
34 | ruleDoubleRanges | |
35 | ruleRange | |
36 | ruleDoubleRange | |
37 | ruleChar | |
38 | ruleDoubleChar | |
39 | ruleEscape | |
40 | ruleLeftArrow | |
41 | ruleSlash | |
42 | ruleAnd | |
43 | ruleNot | |
44 | ruleQuestion | |
45 | ruleStar | |
46 | rulePlus | |
47 | ruleOpen | |
48 | ruleClose | |
49 | ruleDot | |
50 | ruleSpaceComment | |
51 | ruleSpacing | |
52 | ruleMustSpacing | |
53 | ruleComment | |
54 | ruleSpace | |
55 | ruleEndOfLine | |
56 | ruleEndOfFile | |
57 | ruleAction | |
58 | ruleActionBody | |
59 | ruleBegin | |
60 | ruleEnd | |
61 | ruleAction0 | |
62 | ruleAction1 | |
63 | ruleAction2 | |
64 | rulePegText | |
65 | ruleAction3 | |
66 | ruleAction4 | |
67 | ruleAction5 | |
68 | ruleAction6 | |
69 | ruleAction7 | |
70 | ruleAction8 | |
71 | ruleAction9 | |
72 | ruleAction10 | |
73 | ruleAction11 | |
74 | ruleAction12 | |
75 | ruleAction13 | |
76 | ruleAction14 | |
77 | ruleAction15 | |
78 | ruleAction16 | |
79 | ruleAction17 | |
80 | ruleAction18 | |
81 | ruleAction19 | |
82 | ruleAction20 | |
83 | ruleAction21 | |
84 | ruleAction22 | |
85 | ruleAction23 | |
86 | ruleAction24 | |
87 | ruleAction25 | |
88 | ruleAction26 | |
89 | ruleAction27 | |
90 | ruleAction28 | |
91 | ruleAction29 | |
92 | ruleAction30 | |
93 | ruleAction31 | |
94 | ruleAction32 | |
95 | ruleAction33 | |
96 | ruleAction34 | |
97 | ruleAction35 | |
98 | ruleAction36 | |
99 | ruleAction37 | |
100 | ruleAction38 | |
101 | ruleAction39 | |
102 | ruleAction40 | |
103 | ruleAction41 | |
104 | ruleAction42 | |
105 | ruleAction43 | |
106 | ruleAction44 | |
107 | ruleAction45 | |
108 | ruleAction46 | |
109 | ruleAction47 | |
110 | ruleAction48 | |
111 | ) | |
112 | ||
113 | var rul3s = [...]string{ | |
114 | "Unknown", | |
115 | "Grammar", | |
116 | "Import", | |
117 | "Definition", | |
118 | "Expression", | |
119 | "Sequence", | |
120 | "Prefix", | |
121 | "Suffix", | |
122 | "Primary", | |
123 | "Identifier", | |
124 | "IdentStart", | |
125 | "IdentCont", | |
126 | "Literal", | |
127 | "Class", | |
128 | "Ranges", | |
129 | "DoubleRanges", | |
130 | "Range", | |
131 | "DoubleRange", | |
132 | "Char", | |
133 | "DoubleChar", | |
134 | "Escape", | |
135 | "LeftArrow", | |
136 | "Slash", | |
137 | "And", | |
138 | "Not", | |
139 | "Question", | |
140 | "Star", | |
141 | "Plus", | |
142 | "Open", | |
143 | "Close", | |
144 | "Dot", | |
145 | "SpaceComment", | |
146 | "Spacing", | |
147 | "MustSpacing", | |
148 | "Comment", | |
149 | "Space", | |
150 | "EndOfLine", | |
151 | "EndOfFile", | |
152 | "Action", | |
153 | "ActionBody", | |
154 | "Begin", | |
155 | "End", | |
156 | "Action0", | |
157 | "Action1", | |
158 | "Action2", | |
159 | "PegText", | |
160 | "Action3", | |
161 | "Action4", | |
162 | "Action5", | |
163 | "Action6", | |
164 | "Action7", | |
165 | "Action8", | |
166 | "Action9", | |
167 | "Action10", | |
168 | "Action11", | |
169 | "Action12", | |
170 | "Action13", | |
171 | "Action14", | |
172 | "Action15", | |
173 | "Action16", | |
174 | "Action17", | |
175 | "Action18", | |
176 | "Action19", | |
177 | "Action20", | |
178 | "Action21", | |
179 | "Action22", | |
180 | "Action23", | |
181 | "Action24", | |
182 | "Action25", | |
183 | "Action26", | |
184 | "Action27", | |
185 | "Action28", | |
186 | "Action29", | |
187 | "Action30", | |
188 | "Action31", | |
189 | "Action32", | |
190 | "Action33", | |
191 | "Action34", | |
192 | "Action35", | |
193 | "Action36", | |
194 | "Action37", | |
195 | "Action38", | |
196 | "Action39", | |
197 | "Action40", | |
198 | "Action41", | |
199 | "Action42", | |
200 | "Action43", | |
201 | "Action44", | |
202 | "Action45", | |
203 | "Action46", | |
204 | "Action47", | |
205 | "Action48", | |
206 | } | |
207 | ||
208 | type token32 struct { | |
209 | pegRule | |
210 | begin, end uint32 | |
211 | } | |
212 | ||
213 | func (t *token32) String() string { | |
214 | return fmt.Sprintf("\x1B[34m%v\x1B[m %v %v", rul3s[t.pegRule], t.begin, t.end) | |
215 | } | |
216 | ||
217 | type node32 struct { | |
218 | token32 | |
219 | up, next *node32 | |
220 | } | |
221 | ||
222 | func (node *node32) print(w io.Writer, pretty bool, buffer string) { | |
223 | var print func(node *node32, depth int) | |
224 | print = func(node *node32, depth int) { | |
225 | for node != nil { | |
226 | for c := 0; c < depth; c++ { | |
227 | fmt.Fprintf(w, " ") | |
228 | } | |
229 | rule := rul3s[node.pegRule] | |
230 | quote := strconv.Quote(string(([]rune(buffer)[node.begin:node.end]))) | |
231 | if !pretty { | |
232 | fmt.Fprintf(w, "%v %v\n", rule, quote) | |
233 | } else { | |
234 | fmt.Fprintf(w, "\x1B[34m%v\x1B[m %v\n", rule, quote) | |
235 | } | |
236 | if node.up != nil { | |
237 | print(node.up, depth+1) | |
238 | } | |
239 | node = node.next | |
240 | } | |
241 | } | |
242 | print(node, 0) | |
243 | } | |
244 | ||
245 | func (node *node32) Print(w io.Writer, buffer string) { | |
246 | node.print(w, false, buffer) | |
247 | } | |
248 | ||
249 | func (node *node32) PrettyPrint(w io.Writer, buffer string) { | |
250 | node.print(w, true, buffer) | |
251 | } | |
252 | ||
253 | type tokens32 struct { | |
254 | tree []token32 | |
255 | } | |
256 | ||
257 | func (t *tokens32) Trim(length uint32) { | |
258 | t.tree = t.tree[:length] | |
259 | } | |
260 | ||
261 | func (t *tokens32) Print() { | |
262 | for _, token := range t.tree { | |
263 | fmt.Println(token.String()) | |
264 | } | |
265 | } | |
266 | ||
267 | func (t *tokens32) AST() *node32 { | |
268 | type element struct { | |
269 | node *node32 | |
270 | down *element | |
271 | } | |
272 | tokens := t.Tokens() | |
273 | var stack *element | |
274 | for _, token := range tokens { | |
275 | if token.begin == token.end { | |
276 | continue | |
277 | } | |
278 | node := &node32{token32: token} | |
279 | for stack != nil && stack.node.begin >= token.begin && stack.node.end <= token.end { | |
280 | stack.node.next = node.up | |
281 | node.up = stack.node | |
282 | stack = stack.down | |
283 | } | |
284 | stack = &element{node: node, down: stack} | |
285 | } | |
286 | if stack != nil { | |
287 | return stack.node | |
288 | } | |
289 | return nil | |
290 | } | |
291 | ||
292 | func (t *tokens32) PrintSyntaxTree(buffer string) { | |
293 | t.AST().Print(os.Stdout, buffer) | |
294 | } | |
295 | ||
296 | func (t *tokens32) WriteSyntaxTree(w io.Writer, buffer string) { | |
297 | t.AST().Print(w, buffer) | |
298 | } | |
299 | ||
300 | func (t *tokens32) PrettyPrintSyntaxTree(buffer string) { | |
301 | t.AST().PrettyPrint(os.Stdout, buffer) | |
302 | } | |
303 | ||
304 | func (t *tokens32) Add(rule pegRule, begin, end, index uint32) { | |
305 | tree, i := t.tree, int(index) | |
306 | if i >= len(tree) { | |
307 | t.tree = append(tree, token32{pegRule: rule, begin: begin, end: end}) | |
308 | return | |
309 | } | |
310 | tree[i] = token32{pegRule: rule, begin: begin, end: end} | |
311 | } | |
312 | ||
313 | func (t *tokens32) Tokens() []token32 { | |
314 | return t.tree | |
315 | } | |
316 | ||
317 | type Peg struct { | |
318 | *tree.Tree | |
319 | ||
320 | Buffer string | |
321 | buffer []rune | |
322 | rules [92]func() bool | |
323 | parse func(rule ...int) error | |
324 | reset func() | |
325 | Pretty bool | |
326 | tokens32 | |
327 | } | |
328 | ||
329 | func (p *Peg) Parse(rule ...int) error { | |
330 | return p.parse(rule...) | |
331 | } | |
332 | ||
333 | func (p *Peg) Reset() { | |
334 | p.reset() | |
335 | } | |
336 | ||
337 | type textPosition struct { | |
338 | line, symbol int | |
339 | } | |
340 | ||
341 | type textPositionMap map[int]textPosition | |
342 | ||
343 | func translatePositions(buffer []rune, positions []int) textPositionMap { | |
344 | length, translations, j, line, symbol := len(positions), make(textPositionMap, len(positions)), 0, 1, 0 | |
345 | sort.Ints(positions) | |
346 | ||
347 | search: | |
348 | for i, c := range buffer { | |
349 | if c == '\n' { | |
350 | line, symbol = line+1, 0 | |
351 | } else { | |
352 | symbol++ | |
353 | } | |
354 | if i == positions[j] { | |
355 | translations[positions[j]] = textPosition{line, symbol} | |
356 | for j++; j < length; j++ { | |
357 | if i != positions[j] { | |
358 | continue search | |
359 | } | |
360 | } | |
361 | break search | |
362 | } | |
363 | } | |
364 | ||
365 | return translations | |
366 | } | |
367 | ||
368 | type parseError struct { | |
369 | p *Peg | |
370 | max token32 | |
371 | } | |
372 | ||
373 | func (e *parseError) Error() string { | |
374 | tokens, err := []token32{e.max}, "\n" | |
375 | positions, p := make([]int, 2*len(tokens)), 0 | |
376 | for _, token := range tokens { | |
377 | positions[p], p = int(token.begin), p+1 | |
378 | positions[p], p = int(token.end), p+1 | |
379 | } | |
380 | translations := translatePositions(e.p.buffer, positions) | |
381 | format := "parse error near %v (line %v symbol %v - line %v symbol %v):\n%v\n" | |
382 | if e.p.Pretty { | |
383 | format = "parse error near \x1B[34m%v\x1B[m (line %v symbol %v - line %v symbol %v):\n%v\n" | |
384 | } | |
385 | for _, token := range tokens { | |
386 | begin, end := int(token.begin), int(token.end) | |
387 | err += fmt.Sprintf(format, | |
388 | rul3s[token.pegRule], | |
389 | translations[begin].line, translations[begin].symbol, | |
390 | translations[end].line, translations[end].symbol, | |
391 | strconv.Quote(string(e.p.buffer[begin:end]))) | |
392 | } | |
393 | ||
394 | return err | |
395 | } | |
396 | ||
397 | func (p *Peg) PrintSyntaxTree() { | |
398 | if p.Pretty { | |
399 | p.tokens32.PrettyPrintSyntaxTree(p.Buffer) | |
400 | } else { | |
401 | p.tokens32.PrintSyntaxTree(p.Buffer) | |
402 | } | |
403 | } | |
404 | ||
405 | func (p *Peg) WriteSyntaxTree(w io.Writer) { | |
406 | p.tokens32.WriteSyntaxTree(w, p.Buffer) | |
407 | } | |
408 | ||
409 | func (p *Peg) Execute() { | |
410 | buffer, _buffer, text, begin, end := p.Buffer, p.buffer, "", 0, 0 | |
411 | for _, token := range p.Tokens() { | |
412 | switch token.pegRule { | |
413 | ||
414 | case rulePegText: | |
415 | begin, end = int(token.begin), int(token.end) | |
416 | text = string(_buffer[begin:end]) | |
417 | ||
418 | case ruleAction0: | |
419 | p.AddPackage(text) | |
420 | case ruleAction1: | |
421 | p.AddPeg(text) | |
422 | case ruleAction2: | |
423 | p.AddState(text) | |
424 | case ruleAction3: | |
425 | p.AddImport(text) | |
426 | case ruleAction4: | |
427 | p.AddRule(text) | |
428 | case ruleAction5: | |
429 | p.AddExpression() | |
430 | case ruleAction6: | |
431 | p.AddAlternate() | |
432 | case ruleAction7: | |
433 | p.AddNil() | |
434 | p.AddAlternate() | |
435 | case ruleAction8: | |
436 | p.AddNil() | |
437 | case ruleAction9: | |
438 | p.AddSequence() | |
439 | case ruleAction10: | |
440 | p.AddPredicate(text) | |
441 | case ruleAction11: | |
442 | p.AddStateChange(text) | |
443 | case ruleAction12: | |
444 | p.AddPeekFor() | |
445 | case ruleAction13: | |
446 | p.AddPeekNot() | |
447 | case ruleAction14: | |
448 | p.AddQuery() | |
449 | case ruleAction15: | |
450 | p.AddStar() | |
451 | case ruleAction16: | |
452 | p.AddPlus() | |
453 | case ruleAction17: | |
454 | p.AddName(text) | |
455 | case ruleAction18: | |
456 | p.AddDot() | |
457 | case ruleAction19: | |
458 | p.AddAction(text) | |
459 | case ruleAction20: | |
460 | p.AddPush() | |
461 | case ruleAction21: | |
462 | p.AddSequence() | |
463 | case ruleAction22: | |
464 | p.AddSequence() | |
465 | case ruleAction23: | |
466 | p.AddPeekNot() | |
467 | p.AddDot() | |
468 | p.AddSequence() | |
469 | case ruleAction24: | |
470 | p.AddPeekNot() | |
471 | p.AddDot() | |
472 | p.AddSequence() | |
473 | case ruleAction25: | |
474 | p.AddAlternate() | |
475 | case ruleAction26: | |
476 | p.AddAlternate() | |
477 | case ruleAction27: | |
478 | p.AddRange() | |
479 | case ruleAction28: | |
480 | p.AddDoubleRange() | |
481 | case ruleAction29: | |
482 | p.AddCharacter(text) | |
483 | case ruleAction30: | |
484 | p.AddDoubleCharacter(text) | |
485 | case ruleAction31: | |
486 | p.AddCharacter(text) | |
487 | case ruleAction32: | |
488 | p.AddCharacter("\a") | |
489 | case ruleAction33: | |
490 | p.AddCharacter("\b") | |
491 | case ruleAction34: | |
492 | p.AddCharacter("\x1B") | |
493 | case ruleAction35: | |
494 | p.AddCharacter("\f") | |
495 | case ruleAction36: | |
496 | p.AddCharacter("\n") | |
497 | case ruleAction37: | |
498 | p.AddCharacter("\r") | |
499 | case ruleAction38: | |
500 | p.AddCharacter("\t") | |
501 | case ruleAction39: | |
502 | p.AddCharacter("\v") | |
503 | case ruleAction40: | |
504 | p.AddCharacter("'") | |
505 | case ruleAction41: | |
506 | p.AddCharacter("\"") | |
507 | case ruleAction42: | |
508 | p.AddCharacter("[") | |
509 | case ruleAction43: | |
510 | p.AddCharacter("]") | |
511 | case ruleAction44: | |
512 | p.AddCharacter("-") | |
513 | case ruleAction45: | |
514 | p.AddHexaCharacter(text) | |
515 | case ruleAction46: | |
516 | p.AddOctalCharacter(text) | |
517 | case ruleAction47: | |
518 | p.AddOctalCharacter(text) | |
519 | case ruleAction48: | |
520 | p.AddCharacter("\\") | |
521 | ||
522 | } | |
523 | } | |
524 | _, _, _, _, _ = buffer, _buffer, text, begin, end | |
525 | } | |
526 | ||
527 | func Pretty(pretty bool) func(*Peg) error { | |
528 | return func(p *Peg) error { | |
529 | p.Pretty = pretty | |
530 | return nil | |
531 | } | |
532 | } | |
533 | ||
534 | func Size(size int) func(*Peg) error { | |
535 | return func(p *Peg) error { | |
536 | p.tokens32 = tokens32{tree: make([]token32, 0, size)} | |
537 | return nil | |
538 | } | |
539 | } | |
540 | func (p *Peg) Init(options ...func(*Peg) error) error { | |
541 | var ( | |
542 | max token32 | |
543 | position, tokenIndex uint32 | |
544 | buffer []rune | |
545 | ) | |
546 | for _, option := range options { | |
547 | err := option(p) | |
548 | if err != nil { | |
549 | return err | |
550 | } | |
551 | } | |
552 | p.reset = func() { | |
553 | max = token32{} | |
554 | position, tokenIndex = 0, 0 | |
555 | ||
556 | p.buffer = []rune(p.Buffer) | |
557 | if len(p.buffer) == 0 || p.buffer[len(p.buffer)-1] != endSymbol { | |
558 | p.buffer = append(p.buffer, endSymbol) | |
559 | } | |
560 | buffer = p.buffer | |
561 | } | |
562 | p.reset() | |
563 | ||
564 | _rules := p.rules | |
565 | tree := p.tokens32 | |
566 | p.parse = func(rule ...int) error { | |
567 | r := 1 | |
568 | if len(rule) > 0 { | |
569 | r = rule[0] | |
570 | } | |
571 | matches := p.rules[r]() | |
572 | p.tokens32 = tree | |
573 | if matches { | |
574 | p.Trim(tokenIndex) | |
575 | return nil | |
576 | } | |
577 | return &parseError{p, max} | |
578 | } | |
579 | ||
580 | add := func(rule pegRule, begin uint32) { | |
581 | tree.Add(rule, begin, position, tokenIndex) | |
582 | tokenIndex++ | |
583 | if begin != position && position > max.end { | |
584 | max = token32{rule, begin, position} | |
585 | } | |
586 | } | |
587 | ||
588 | matchDot := func() bool { | |
589 | if buffer[position] != endSymbol { | |
590 | position++ | |
591 | return true | |
592 | } | |
593 | return false | |
594 | } | |
595 | ||
596 | /*matchChar := func(c byte) bool { | |
597 | if buffer[position] == c { | |
598 | position++ | |
599 | return true | |
600 | } | |
601 | return false | |
602 | }*/ | |
603 | ||
604 | /*matchRange := func(lower byte, upper byte) bool { | |
605 | if c := buffer[position]; c >= lower && c <= upper { | |
606 | position++ | |
607 | return true | |
608 | } | |
609 | return false | |
610 | }*/ | |
611 | ||
612 | _rules = [...]func() bool{ | |
613 | nil, | |
614 | /* 0 Grammar <- <(Spacing ('p' 'a' 'c' 'k' 'a' 'g' 'e') MustSpacing Identifier Action0 Import* ('t' 'y' 'p' 'e') MustSpacing Identifier Action1 ('P' 'e' 'g') Spacing Action Action2 Definition+ EndOfFile)> */ | |
615 | func() bool { | |
616 | position0, tokenIndex0 := position, tokenIndex | |
617 | { | |
618 | position1 := position | |
619 | if !_rules[ruleSpacing]() { | |
620 | goto l0 | |
621 | } | |
622 | if buffer[position] != rune('p') { | |
623 | goto l0 | |
624 | } | |
625 | position++ | |
626 | if buffer[position] != rune('a') { | |
627 | goto l0 | |
628 | } | |
629 | position++ | |
630 | if buffer[position] != rune('c') { | |
631 | goto l0 | |
632 | } | |
633 | position++ | |
634 | if buffer[position] != rune('k') { | |
635 | goto l0 | |
636 | } | |
637 | position++ | |
638 | if buffer[position] != rune('a') { | |
639 | goto l0 | |
640 | } | |
641 | position++ | |
642 | if buffer[position] != rune('g') { | |
643 | goto l0 | |
644 | } | |
645 | position++ | |
646 | if buffer[position] != rune('e') { | |
647 | goto l0 | |
648 | } | |
649 | position++ | |
650 | if !_rules[ruleMustSpacing]() { | |
651 | goto l0 | |
652 | } | |
653 | if !_rules[ruleIdentifier]() { | |
654 | goto l0 | |
655 | } | |
656 | { | |
657 | add(ruleAction0, position) | |
658 | } | |
659 | l3: | |
660 | { | |
661 | position4, tokenIndex4 := position, tokenIndex | |
662 | { | |
663 | position5 := position | |
664 | if buffer[position] != rune('i') { | |
665 | goto l4 | |
666 | } | |
667 | position++ | |
668 | if buffer[position] != rune('m') { | |
669 | goto l4 | |
670 | } | |
671 | position++ | |
672 | if buffer[position] != rune('p') { | |
673 | goto l4 | |
674 | } | |
675 | position++ | |
676 | if buffer[position] != rune('o') { | |
677 | goto l4 | |
678 | } | |
679 | position++ | |
680 | if buffer[position] != rune('r') { | |
681 | goto l4 | |
682 | } | |
683 | position++ | |
684 | if buffer[position] != rune('t') { | |
685 | goto l4 | |
686 | } | |
687 | position++ | |
688 | if !_rules[ruleSpacing]() { | |
689 | goto l4 | |
690 | } | |
691 | if buffer[position] != rune('"') { | |
692 | goto l4 | |
693 | } | |
694 | position++ | |
695 | { | |
696 | position6 := position | |
697 | { | |
698 | switch buffer[position] { | |
699 | case '-': | |
700 | if buffer[position] != rune('-') { | |
701 | goto l4 | |
702 | } | |
703 | position++ | |
704 | case '.': | |
705 | if buffer[position] != rune('.') { | |
706 | goto l4 | |
707 | } | |
708 | position++ | |
709 | case '/': | |
710 | if buffer[position] != rune('/') { | |
711 | goto l4 | |
712 | } | |
713 | position++ | |
714 | case '_': | |
715 | if buffer[position] != rune('_') { | |
716 | goto l4 | |
717 | } | |
718 | position++ | |
719 | case 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z': | |
720 | if c := buffer[position]; c < rune('A') || c > rune('Z') { | |
721 | goto l4 | |
722 | } | |
723 | position++ | |
724 | default: | |
725 | if c := buffer[position]; c < rune('a') || c > rune('z') { | |
726 | goto l4 | |
727 | } | |
728 | position++ | |
729 | } | |
730 | } | |
731 | ||
732 | l7: | |
733 | { | |
734 | position8, tokenIndex8 := position, tokenIndex | |
735 | { | |
736 | switch buffer[position] { | |
737 | case '-': | |
738 | if buffer[position] != rune('-') { | |
739 | goto l8 | |
740 | } | |
741 | position++ | |
742 | case '.': | |
743 | if buffer[position] != rune('.') { | |
744 | goto l8 | |
745 | } | |
746 | position++ | |
747 | case '/': | |
748 | if buffer[position] != rune('/') { | |
749 | goto l8 | |
750 | } | |
751 | position++ | |
752 | case '_': | |
753 | if buffer[position] != rune('_') { | |
754 | goto l8 | |
755 | } | |
756 | position++ | |
757 | case 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z': | |
758 | if c := buffer[position]; c < rune('A') || c > rune('Z') { | |
759 | goto l8 | |
760 | } | |
761 | position++ | |
762 | default: | |
763 | if c := buffer[position]; c < rune('a') || c > rune('z') { | |
764 | goto l8 | |
765 | } | |
766 | position++ | |
767 | } | |
768 | } | |
769 | ||
770 | goto l7 | |
771 | l8: | |
772 | position, tokenIndex = position8, tokenIndex8 | |
773 | } | |
774 | add(rulePegText, position6) | |
775 | } | |
776 | if buffer[position] != rune('"') { | |
777 | goto l4 | |
778 | } | |
779 | position++ | |
780 | if !_rules[ruleSpacing]() { | |
781 | goto l4 | |
782 | } | |
783 | { | |
784 | add(ruleAction3, position) | |
785 | } | |
786 | add(ruleImport, position5) | |
787 | } | |
788 | goto l3 | |
789 | l4: | |
790 | position, tokenIndex = position4, tokenIndex4 | |
791 | } | |
792 | if buffer[position] != rune('t') { | |
793 | goto l0 | |
794 | } | |
795 | position++ | |
796 | if buffer[position] != rune('y') { | |
797 | goto l0 | |
798 | } | |
799 | position++ | |
800 | if buffer[position] != rune('p') { | |
801 | goto l0 | |
802 | } | |
803 | position++ | |
804 | if buffer[position] != rune('e') { | |
805 | goto l0 | |
806 | } | |
807 | position++ | |
808 | if !_rules[ruleMustSpacing]() { | |
809 | goto l0 | |
810 | } | |
811 | if !_rules[ruleIdentifier]() { | |
812 | goto l0 | |
813 | } | |
814 | { | |
815 | add(ruleAction1, position) | |
816 | } | |
817 | if buffer[position] != rune('P') { | |
818 | goto l0 | |
819 | } | |
820 | position++ | |
821 | if buffer[position] != rune('e') { | |
822 | goto l0 | |
823 | } | |
824 | position++ | |
825 | if buffer[position] != rune('g') { | |
826 | goto l0 | |
827 | } | |
828 | position++ | |
829 | if !_rules[ruleSpacing]() { | |
830 | goto l0 | |
831 | } | |
832 | if !_rules[ruleAction]() { | |
833 | goto l0 | |
834 | } | |
835 | { | |
836 | add(ruleAction2, position) | |
837 | } | |
838 | { | |
839 | position16 := position | |
840 | if !_rules[ruleIdentifier]() { | |
841 | goto l0 | |
842 | } | |
843 | { | |
844 | add(ruleAction4, position) | |
845 | } | |
846 | if !_rules[ruleLeftArrow]() { | |
847 | goto l0 | |
848 | } | |
849 | if !_rules[ruleExpression]() { | |
850 | goto l0 | |
851 | } | |
852 | { | |
853 | add(ruleAction5, position) | |
854 | } | |
855 | { | |
856 | position19, tokenIndex19 := position, tokenIndex | |
857 | { | |
858 | position20, tokenIndex20 := position, tokenIndex | |
859 | if !_rules[ruleIdentifier]() { | |
860 | goto l21 | |
861 | } | |
862 | if !_rules[ruleLeftArrow]() { | |
863 | goto l21 | |
864 | } | |
865 | goto l20 | |
866 | l21: | |
867 | position, tokenIndex = position20, tokenIndex20 | |
868 | { | |
869 | position22, tokenIndex22 := position, tokenIndex | |
870 | if !matchDot() { | |
871 | goto l22 | |
872 | } | |
873 | goto l0 | |
874 | l22: | |
875 | position, tokenIndex = position22, tokenIndex22 | |
876 | } | |
877 | } | |
878 | l20: | |
879 | position, tokenIndex = position19, tokenIndex19 | |
880 | } | |
881 | add(ruleDefinition, position16) | |
882 | } | |
883 | l14: | |
884 | { | |
885 | position15, tokenIndex15 := position, tokenIndex | |
886 | { | |
887 | position23 := position | |
888 | if !_rules[ruleIdentifier]() { | |
889 | goto l15 | |
890 | } | |
891 | { | |
892 | add(ruleAction4, position) | |
893 | } | |
894 | if !_rules[ruleLeftArrow]() { | |
895 | goto l15 | |
896 | } | |
897 | if !_rules[ruleExpression]() { | |
898 | goto l15 | |
899 | } | |
900 | { | |
901 | add(ruleAction5, position) | |
902 | } | |
903 | { | |
904 | position26, tokenIndex26 := position, tokenIndex | |
905 | { | |
906 | position27, tokenIndex27 := position, tokenIndex | |
907 | if !_rules[ruleIdentifier]() { | |
908 | goto l28 | |
909 | } | |
910 | if !_rules[ruleLeftArrow]() { | |
911 | goto l28 | |
912 | } | |
913 | goto l27 | |
914 | l28: | |
915 | position, tokenIndex = position27, tokenIndex27 | |
916 | { | |
917 | position29, tokenIndex29 := position, tokenIndex | |
918 | if !matchDot() { | |
919 | goto l29 | |
920 | } | |
921 | goto l15 | |
922 | l29: | |
923 | position, tokenIndex = position29, tokenIndex29 | |
924 | } | |
925 | } | |
926 | l27: | |
927 | position, tokenIndex = position26, tokenIndex26 | |
928 | } | |
929 | add(ruleDefinition, position23) | |
930 | } | |
931 | goto l14 | |
932 | l15: | |
933 | position, tokenIndex = position15, tokenIndex15 | |
934 | } | |
935 | { | |
936 | position30 := position | |
937 | { | |
938 | position31, tokenIndex31 := position, tokenIndex | |
939 | if !matchDot() { | |
940 | goto l31 | |
941 | } | |
942 | goto l0 | |
943 | l31: | |
944 | position, tokenIndex = position31, tokenIndex31 | |
945 | } | |
946 | add(ruleEndOfFile, position30) | |
947 | } | |
948 | add(ruleGrammar, position1) | |
949 | } | |
950 | return true | |
951 | l0: | |
952 | position, tokenIndex = position0, tokenIndex0 | |
953 | return false | |
954 | }, | |
955 | /* 1 Import <- <('i' 'm' 'p' 'o' 'r' 't' Spacing '"' <((&('-') '-') | (&('.') '.') | (&('/') '/') | (&('_') '_') | (&('A' | 'B' | 'C' | 'D' | 'E' | 'F' | 'G' | 'H' | 'I' | 'J' | 'K' | 'L' | 'M' | 'N' | 'O' | 'P' | 'Q' | 'R' | 'S' | 'T' | 'U' | 'V' | 'W' | 'X' | 'Y' | 'Z') [A-Z]) | (&('a' | 'b' | 'c' | 'd' | 'e' | 'f' | 'g' | 'h' | 'i' | 'j' | 'k' | 'l' | 'm' | 'n' | 'o' | 'p' | 'q' | 'r' | 's' | 't' | 'u' | 'v' | 'w' | 'x' | 'y' | 'z') [a-z]))+> '"' Spacing Action3)> */ | |
956 | nil, | |
957 | /* 2 Definition <- <(Identifier Action4 LeftArrow Expression Action5 &((Identifier LeftArrow) / !.))> */ | |
958 | nil, | |
959 | /* 3 Expression <- <((Sequence (Slash Sequence Action6)* (Slash Action7)?) / Action8)> */ | |
960 | func() bool { | |
961 | { | |
962 | position35 := position | |
963 | { | |
964 | position36, tokenIndex36 := position, tokenIndex | |
965 | if !_rules[ruleSequence]() { | |
966 | goto l37 | |
967 | } | |
968 | l38: | |
969 | { | |
970 | position39, tokenIndex39 := position, tokenIndex | |
971 | if !_rules[ruleSlash]() { | |
972 | goto l39 | |
973 | } | |
974 | if !_rules[ruleSequence]() { | |
975 | goto l39 | |
976 | } | |
977 | { | |
978 | add(ruleAction6, position) | |
979 | } | |
980 | goto l38 | |
981 | l39: | |
982 | position, tokenIndex = position39, tokenIndex39 | |
983 | } | |
984 | { | |
985 | position41, tokenIndex41 := position, tokenIndex | |
986 | if !_rules[ruleSlash]() { | |
987 | goto l41 | |
988 | } | |
989 | { | |
990 | add(ruleAction7, position) | |
991 | } | |
992 | goto l42 | |
993 | l41: | |
994 | position, tokenIndex = position41, tokenIndex41 | |
995 | } | |
996 | l42: | |
997 | goto l36 | |
998 | l37: | |
999 | position, tokenIndex = position36, tokenIndex36 | |
1000 | { | |
1001 | add(ruleAction8, position) | |
1002 | } | |
1003 | } | |
1004 | l36: | |
1005 | add(ruleExpression, position35) | |
1006 | } | |
1007 | return true | |
1008 | }, | |
1009 | /* 4 Sequence <- <(Prefix (Prefix Action9)*)> */ | |
1010 | func() bool { | |
1011 | position45, tokenIndex45 := position, tokenIndex | |
1012 | { | |
1013 | position46 := position | |
1014 | if !_rules[rulePrefix]() { | |
1015 | goto l45 | |
1016 | } | |
1017 | l47: | |
1018 | { | |
1019 | position48, tokenIndex48 := position, tokenIndex | |
1020 | if !_rules[rulePrefix]() { | |
1021 | goto l48 | |
1022 | } | |
1023 | { | |
1024 | add(ruleAction9, position) | |
1025 | } | |
1026 | goto l47 | |
1027 | l48: | |
1028 | position, tokenIndex = position48, tokenIndex48 | |
1029 | } | |
1030 | add(ruleSequence, position46) | |
1031 | } | |
1032 | return true | |
1033 | l45: | |
1034 | position, tokenIndex = position45, tokenIndex45 | |
1035 | return false | |
1036 | }, | |
1037 | /* 5 Prefix <- <((And Action Action10) / (Not Action Action11) / ((&('!') (Not Suffix Action13)) | (&('&') (And Suffix Action12)) | (&('"' | '\'' | '(' | '.' | '<' | 'A' | 'B' | 'C' | 'D' | 'E' | 'F' | 'G' | 'H' | 'I' | 'J' | 'K' | 'L' | 'M' | 'N' | 'O' | 'P' | 'Q' | 'R' | 'S' | 'T' | 'U' | 'V' | 'W' | 'X' | 'Y' | 'Z' | '[' | '_' | 'a' | 'b' | 'c' | 'd' | 'e' | 'f' | 'g' | 'h' | 'i' | 'j' | 'k' | 'l' | 'm' | 'n' | 'o' | 'p' | 'q' | 'r' | 's' | 't' | 'u' | 'v' | 'w' | 'x' | 'y' | 'z' | '{') Suffix)))> */ | |
1038 | func() bool { | |
1039 | position50, tokenIndex50 := position, tokenIndex | |
1040 | { | |
1041 | position51 := position | |
1042 | { | |
1043 | position52, tokenIndex52 := position, tokenIndex | |
1044 | if !_rules[ruleAnd]() { | |
1045 | goto l53 | |
1046 | } | |
1047 | if !_rules[ruleAction]() { | |
1048 | goto l53 | |
1049 | } | |
1050 | { | |
1051 | add(ruleAction10, position) | |
1052 | } | |
1053 | goto l52 | |
1054 | l53: | |
1055 | position, tokenIndex = position52, tokenIndex52 | |
1056 | if !_rules[ruleNot]() { | |
1057 | goto l55 | |
1058 | } | |
1059 | if !_rules[ruleAction]() { | |
1060 | goto l55 | |
1061 | } | |
1062 | { | |
1063 | add(ruleAction11, position) | |
1064 | } | |
1065 | goto l52 | |
1066 | l55: | |
1067 | position, tokenIndex = position52, tokenIndex52 | |
1068 | { | |
1069 | switch buffer[position] { | |
1070 | case '!': | |
1071 | if !_rules[ruleNot]() { | |
1072 | goto l50 | |
1073 | } | |
1074 | if !_rules[ruleSuffix]() { | |
1075 | goto l50 | |
1076 | } | |
1077 | { | |
1078 | add(ruleAction13, position) | |
1079 | } | |
1080 | case '&': | |
1081 | if !_rules[ruleAnd]() { | |
1082 | goto l50 | |
1083 | } | |
1084 | if !_rules[ruleSuffix]() { | |
1085 | goto l50 | |
1086 | } | |
1087 | { | |
1088 | add(ruleAction12, position) | |
1089 | } | |
1090 | default: | |
1091 | if !_rules[ruleSuffix]() { | |
1092 | goto l50 | |
1093 | } | |
1094 | } | |
1095 | } | |
1096 | ||
1097 | } | |
1098 | l52: | |
1099 | add(rulePrefix, position51) | |
1100 | } | |
1101 | return true | |
1102 | l50: | |
1103 | position, tokenIndex = position50, tokenIndex50 | |
1104 | return false | |
1105 | }, | |
1106 | /* 6 Suffix <- <(Primary ((&('+') (Plus Action16)) | (&('*') (Star Action15)) | (&('?') (Question Action14)))?)> */ | |
1107 | func() bool { | |
1108 | position60, tokenIndex60 := position, tokenIndex | |
1109 | { | |
1110 | position61 := position | |
1111 | { | |
1112 | position62 := position | |
1113 | { | |
1114 | switch buffer[position] { | |
1115 | case '<': | |
1116 | { | |
1117 | position64 := position | |
1118 | if buffer[position] != rune('<') { | |
1119 | goto l60 | |
1120 | } | |
1121 | position++ | |
1122 | if !_rules[ruleSpacing]() { | |
1123 | goto l60 | |
1124 | } | |
1125 | add(ruleBegin, position64) | |
1126 | } | |
1127 | if !_rules[ruleExpression]() { | |
1128 | goto l60 | |
1129 | } | |
1130 | { | |
1131 | position65 := position | |
1132 | if buffer[position] != rune('>') { | |
1133 | goto l60 | |
1134 | } | |
1135 | position++ | |
1136 | if !_rules[ruleSpacing]() { | |
1137 | goto l60 | |
1138 | } | |
1139 | add(ruleEnd, position65) | |
1140 | } | |
1141 | { | |
1142 | add(ruleAction20, position) | |
1143 | } | |
1144 | case '{': | |
1145 | if !_rules[ruleAction]() { | |
1146 | goto l60 | |
1147 | } | |
1148 | { | |
1149 | add(ruleAction19, position) | |
1150 | } | |
1151 | case '.': | |
1152 | { | |
1153 | position68 := position | |
1154 | if buffer[position] != rune('.') { | |
1155 | goto l60 | |
1156 | } | |
1157 | position++ | |
1158 | if !_rules[ruleSpacing]() { | |
1159 | goto l60 | |
1160 | } | |
1161 | add(ruleDot, position68) | |
1162 | } | |
1163 | { | |
1164 | add(ruleAction18, position) | |
1165 | } | |
1166 | case '[': | |
1167 | { | |
1168 | position70 := position | |
1169 | { | |
1170 | position71, tokenIndex71 := position, tokenIndex | |
1171 | if buffer[position] != rune('[') { | |
1172 | goto l72 | |
1173 | } | |
1174 | position++ | |
1175 | if buffer[position] != rune('[') { | |
1176 | goto l72 | |
1177 | } | |
1178 | position++ | |
1179 | { | |
1180 | position73, tokenIndex73 := position, tokenIndex | |
1181 | { | |
1182 | position75, tokenIndex75 := position, tokenIndex | |
1183 | if buffer[position] != rune('^') { | |
1184 | goto l76 | |
1185 | } | |
1186 | position++ | |
1187 | if !_rules[ruleDoubleRanges]() { | |
1188 | goto l76 | |
1189 | } | |
1190 | { | |
1191 | add(ruleAction23, position) | |
1192 | } | |
1193 | goto l75 | |
1194 | l76: | |
1195 | position, tokenIndex = position75, tokenIndex75 | |
1196 | if !_rules[ruleDoubleRanges]() { | |
1197 | goto l73 | |
1198 | } | |
1199 | } | |
1200 | l75: | |
1201 | goto l74 | |
1202 | l73: | |
1203 | position, tokenIndex = position73, tokenIndex73 | |
1204 | } | |
1205 | l74: | |
1206 | if buffer[position] != rune(']') { | |
1207 | goto l72 | |
1208 | } | |
1209 | position++ | |
1210 | if buffer[position] != rune(']') { | |
1211 | goto l72 | |
1212 | } | |
1213 | position++ | |
1214 | goto l71 | |
1215 | l72: | |
1216 | position, tokenIndex = position71, tokenIndex71 | |
1217 | if buffer[position] != rune('[') { | |
1218 | goto l60 | |
1219 | } | |
1220 | position++ | |
1221 | { | |
1222 | position78, tokenIndex78 := position, tokenIndex | |
1223 | { | |
1224 | position80, tokenIndex80 := position, tokenIndex | |
1225 | if buffer[position] != rune('^') { | |
1226 | goto l81 | |
1227 | } | |
1228 | position++ | |
1229 | if !_rules[ruleRanges]() { | |
1230 | goto l81 | |
1231 | } | |
1232 | { | |
1233 | add(ruleAction24, position) | |
1234 | } | |
1235 | goto l80 | |
1236 | l81: | |
1237 | position, tokenIndex = position80, tokenIndex80 | |
1238 | if !_rules[ruleRanges]() { | |
1239 | goto l78 | |
1240 | } | |
1241 | } | |
1242 | l80: | |
1243 | goto l79 | |
1244 | l78: | |
1245 | position, tokenIndex = position78, tokenIndex78 | |
1246 | } | |
1247 | l79: | |
1248 | if buffer[position] != rune(']') { | |
1249 | goto l60 | |
1250 | } | |
1251 | position++ | |
1252 | } | |
1253 | l71: | |
1254 | if !_rules[ruleSpacing]() { | |
1255 | goto l60 | |
1256 | } | |
1257 | add(ruleClass, position70) | |
1258 | } | |
1259 | case '"', '\'': | |
1260 | { | |
1261 | position83 := position | |
1262 | { | |
1263 | position84, tokenIndex84 := position, tokenIndex | |
1264 | if buffer[position] != rune('\'') { | |
1265 | goto l85 | |
1266 | } | |
1267 | position++ | |
1268 | { | |
1269 | position86, tokenIndex86 := position, tokenIndex | |
1270 | { | |
1271 | position88, tokenIndex88 := position, tokenIndex | |
1272 | if buffer[position] != rune('\'') { | |
1273 | goto l88 | |
1274 | } | |
1275 | position++ | |
1276 | goto l86 | |
1277 | l88: | |
1278 | position, tokenIndex = position88, tokenIndex88 | |
1279 | } | |
1280 | if !_rules[ruleChar]() { | |
1281 | goto l86 | |
1282 | } | |
1283 | goto l87 | |
1284 | l86: | |
1285 | position, tokenIndex = position86, tokenIndex86 | |
1286 | } | |
1287 | l87: | |
1288 | l89: | |
1289 | { | |
1290 | position90, tokenIndex90 := position, tokenIndex | |
1291 | { | |
1292 | position91, tokenIndex91 := position, tokenIndex | |
1293 | if buffer[position] != rune('\'') { | |
1294 | goto l91 | |
1295 | } | |
1296 | position++ | |
1297 | goto l90 | |
1298 | l91: | |
1299 | position, tokenIndex = position91, tokenIndex91 | |
1300 | } | |
1301 | if !_rules[ruleChar]() { | |
1302 | goto l90 | |
1303 | } | |
1304 | { | |
1305 | add(ruleAction21, position) | |
1306 | } | |
1307 | goto l89 | |
1308 | l90: | |
1309 | position, tokenIndex = position90, tokenIndex90 | |
1310 | } | |
1311 | if buffer[position] != rune('\'') { | |
1312 | goto l85 | |
1313 | } | |
1314 | position++ | |
1315 | if !_rules[ruleSpacing]() { | |
1316 | goto l85 | |
1317 | } | |
1318 | goto l84 | |
1319 | l85: | |
1320 | position, tokenIndex = position84, tokenIndex84 | |
1321 | if buffer[position] != rune('"') { | |
1322 | goto l60 | |
1323 | } | |
1324 | position++ | |
1325 | { | |
1326 | position93, tokenIndex93 := position, tokenIndex | |
1327 | { | |
1328 | position95, tokenIndex95 := position, tokenIndex | |
1329 | if buffer[position] != rune('"') { | |
1330 | goto l95 | |
1331 | } | |
1332 | position++ | |
1333 | goto l93 | |
1334 | l95: | |
1335 | position, tokenIndex = position95, tokenIndex95 | |
1336 | } | |
1337 | if !_rules[ruleDoubleChar]() { | |
1338 | goto l93 | |
1339 | } | |
1340 | goto l94 | |
1341 | l93: | |
1342 | position, tokenIndex = position93, tokenIndex93 | |
1343 | } | |
1344 | l94: | |
1345 | l96: | |
1346 | { | |
1347 | position97, tokenIndex97 := position, tokenIndex | |
1348 | { | |
1349 | position98, tokenIndex98 := position, tokenIndex | |
1350 | if buffer[position] != rune('"') { | |
1351 | goto l98 | |
1352 | } | |
1353 | position++ | |
1354 | goto l97 | |
1355 | l98: | |
1356 | position, tokenIndex = position98, tokenIndex98 | |
1357 | } | |
1358 | if !_rules[ruleDoubleChar]() { | |
1359 | goto l97 | |
1360 | } | |
1361 | { | |
1362 | add(ruleAction22, position) | |
1363 | } | |
1364 | goto l96 | |
1365 | l97: | |
1366 | position, tokenIndex = position97, tokenIndex97 | |
1367 | } | |
1368 | if buffer[position] != rune('"') { | |
1369 | goto l60 | |
1370 | } | |
1371 | position++ | |
1372 | if !_rules[ruleSpacing]() { | |
1373 | goto l60 | |
1374 | } | |
1375 | } | |
1376 | l84: | |
1377 | add(ruleLiteral, position83) | |
1378 | } | |
1379 | case '(': | |
1380 | { | |
1381 | position100 := position | |
1382 | if buffer[position] != rune('(') { | |
1383 | goto l60 | |
1384 | } | |
1385 | position++ | |
1386 | if !_rules[ruleSpacing]() { | |
1387 | goto l60 | |
1388 | } | |
1389 | add(ruleOpen, position100) | |
1390 | } | |
1391 | if !_rules[ruleExpression]() { | |
1392 | goto l60 | |
1393 | } | |
1394 | { | |
1395 | position101 := position | |
1396 | if buffer[position] != rune(')') { | |
1397 | goto l60 | |
1398 | } | |
1399 | position++ | |
1400 | if !_rules[ruleSpacing]() { | |
1401 | goto l60 | |
1402 | } | |
1403 | add(ruleClose, position101) | |
1404 | } | |
1405 | default: | |
1406 | if !_rules[ruleIdentifier]() { | |
1407 | goto l60 | |
1408 | } | |
1409 | { | |
1410 | position102, tokenIndex102 := position, tokenIndex | |
1411 | if !_rules[ruleLeftArrow]() { | |
1412 | goto l102 | |
1413 | } | |
1414 | goto l60 | |
1415 | l102: | |
1416 | position, tokenIndex = position102, tokenIndex102 | |
1417 | } | |
1418 | { | |
1419 | add(ruleAction17, position) | |
1420 | } | |
1421 | } | |
1422 | } | |
1423 | ||
1424 | add(rulePrimary, position62) | |
1425 | } | |
1426 | { | |
1427 | position104, tokenIndex104 := position, tokenIndex | |
1428 | { | |
1429 | switch buffer[position] { | |
1430 | case '+': | |
1431 | { | |
1432 | position107 := position | |
1433 | if buffer[position] != rune('+') { | |
1434 | goto l104 | |
1435 | } | |
1436 | position++ | |
1437 | if !_rules[ruleSpacing]() { | |
1438 | goto l104 | |
1439 | } | |
1440 | add(rulePlus, position107) | |
1441 | } | |
1442 | { | |
1443 | add(ruleAction16, position) | |
1444 | } | |
1445 | case '*': | |
1446 | { | |
1447 | position109 := position | |
1448 | if buffer[position] != rune('*') { | |
1449 | goto l104 | |
1450 | } | |
1451 | position++ | |
1452 | if !_rules[ruleSpacing]() { | |
1453 | goto l104 | |
1454 | } | |
1455 | add(ruleStar, position109) | |
1456 | } | |
1457 | { | |
1458 | add(ruleAction15, position) | |
1459 | } | |
1460 | default: | |
1461 | { | |
1462 | position111 := position | |
1463 | if buffer[position] != rune('?') { | |
1464 | goto l104 | |
1465 | } | |
1466 | position++ | |
1467 | if !_rules[ruleSpacing]() { | |
1468 | goto l104 | |
1469 | } | |
1470 | add(ruleQuestion, position111) | |
1471 | } | |
1472 | { | |
1473 | add(ruleAction14, position) | |
1474 | } | |
1475 | } | |
1476 | } | |
1477 | ||
1478 | goto l105 | |
1479 | l104: | |
1480 | position, tokenIndex = position104, tokenIndex104 | |
1481 | } | |
1482 | l105: | |
1483 | add(ruleSuffix, position61) | |
1484 | } | |
1485 | return true | |
1486 | l60: | |
1487 | position, tokenIndex = position60, tokenIndex60 | |
1488 | return false | |
1489 | }, | |
1490 | /* 7 Primary <- <((&('<') (Begin Expression End Action20)) | (&('{') (Action Action19)) | (&('.') (Dot Action18)) | (&('[') Class) | (&('"' | '\'') Literal) | (&('(') (Open Expression Close)) | (&('A' | 'B' | 'C' | 'D' | 'E' | 'F' | 'G' | 'H' | 'I' | 'J' | 'K' | 'L' | 'M' | 'N' | 'O' | 'P' | 'Q' | 'R' | 'S' | 'T' | 'U' | 'V' | 'W' | 'X' | 'Y' | 'Z' | '_' | 'a' | 'b' | 'c' | 'd' | 'e' | 'f' | 'g' | 'h' | 'i' | 'j' | 'k' | 'l' | 'm' | 'n' | 'o' | 'p' | 'q' | 'r' | 's' | 't' | 'u' | 'v' | 'w' | 'x' | 'y' | 'z') (Identifier !LeftArrow Action17)))> */ | |
1491 | nil, | |
1492 | /* 8 Identifier <- <(<(IdentStart IdentCont*)> Spacing)> */ | |
1493 | func() bool { | |
1494 | position114, tokenIndex114 := position, tokenIndex | |
1495 | { | |
1496 | position115 := position | |
1497 | { | |
1498 | position116 := position | |
1499 | if !_rules[ruleIdentStart]() { | |
1500 | goto l114 | |
1501 | } | |
1502 | l117: | |
1503 | { | |
1504 | position118, tokenIndex118 := position, tokenIndex | |
1505 | { | |
1506 | position119 := position | |
1507 | { | |
1508 | position120, tokenIndex120 := position, tokenIndex | |
1509 | if !_rules[ruleIdentStart]() { | |
1510 | goto l121 | |
1511 | } | |
1512 | goto l120 | |
1513 | l121: | |
1514 | position, tokenIndex = position120, tokenIndex120 | |
1515 | if c := buffer[position]; c < rune('0') || c > rune('9') { | |
1516 | goto l118 | |
1517 | } | |
1518 | position++ | |
1519 | } | |
1520 | l120: | |
1521 | add(ruleIdentCont, position119) | |
1522 | } | |
1523 | goto l117 | |
1524 | l118: | |
1525 | position, tokenIndex = position118, tokenIndex118 | |
1526 | } | |
1527 | add(rulePegText, position116) | |
1528 | } | |
1529 | if !_rules[ruleSpacing]() { | |
1530 | goto l114 | |
1531 | } | |
1532 | add(ruleIdentifier, position115) | |
1533 | } | |
1534 | return true | |
1535 | l114: | |
1536 | position, tokenIndex = position114, tokenIndex114 | |
1537 | return false | |
1538 | }, | |
1539 | /* 9 IdentStart <- <((&('_') '_') | (&('A' | 'B' | 'C' | 'D' | 'E' | 'F' | 'G' | 'H' | 'I' | 'J' | 'K' | 'L' | 'M' | 'N' | 'O' | 'P' | 'Q' | 'R' | 'S' | 'T' | 'U' | 'V' | 'W' | 'X' | 'Y' | 'Z') [A-Z]) | (&('a' | 'b' | 'c' | 'd' | 'e' | 'f' | 'g' | 'h' | 'i' | 'j' | 'k' | 'l' | 'm' | 'n' | 'o' | 'p' | 'q' | 'r' | 's' | 't' | 'u' | 'v' | 'w' | 'x' | 'y' | 'z') [a-z]))> */ | |
1540 | func() bool { | |
1541 | position122, tokenIndex122 := position, tokenIndex | |
1542 | { | |
1543 | position123 := position | |
1544 | { | |
1545 | switch buffer[position] { | |
1546 | case '_': | |
1547 | if buffer[position] != rune('_') { | |
1548 | goto l122 | |
1549 | } | |
1550 | position++ | |
1551 | case 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z': | |
1552 | if c := buffer[position]; c < rune('A') || c > rune('Z') { | |
1553 | goto l122 | |
1554 | } | |
1555 | position++ | |
1556 | default: | |
1557 | if c := buffer[position]; c < rune('a') || c > rune('z') { | |
1558 | goto l122 | |
1559 | } | |
1560 | position++ | |
1561 | } | |
1562 | } | |
1563 | ||
1564 | add(ruleIdentStart, position123) | |
1565 | } | |
1566 | return true | |
1567 | l122: | |
1568 | position, tokenIndex = position122, tokenIndex122 | |
1569 | return false | |
1570 | }, | |
1571 | /* 10 IdentCont <- <(IdentStart / [0-9])> */ | |
1572 | nil, | |
1573 | /* 11 Literal <- <(('\'' (!'\'' Char)? (!'\'' Char Action21)* '\'' Spacing) / ('"' (!'"' DoubleChar)? (!'"' DoubleChar Action22)* '"' Spacing))> */ | |
1574 | nil, | |
1575 | /* 12 Class <- <((('[' '[' (('^' DoubleRanges Action23) / DoubleRanges)? (']' ']')) / ('[' (('^' Ranges Action24) / Ranges)? ']')) Spacing)> */ | |
1576 | nil, | |
1577 | /* 13 Ranges <- <(!']' Range (!']' Range Action25)*)> */ | |
1578 | func() bool { | |
1579 | position128, tokenIndex128 := position, tokenIndex | |
1580 | { | |
1581 | position129 := position | |
1582 | { | |
1583 | position130, tokenIndex130 := position, tokenIndex | |
1584 | if buffer[position] != rune(']') { | |
1585 | goto l130 | |
1586 | } | |
1587 | position++ | |
1588 | goto l128 | |
1589 | l130: | |
1590 | position, tokenIndex = position130, tokenIndex130 | |
1591 | } | |
1592 | if !_rules[ruleRange]() { | |
1593 | goto l128 | |
1594 | } | |
1595 | l131: | |
1596 | { | |
1597 | position132, tokenIndex132 := position, tokenIndex | |
1598 | { | |
1599 | position133, tokenIndex133 := position, tokenIndex | |
1600 | if buffer[position] != rune(']') { | |
1601 | goto l133 | |
1602 | } | |
1603 | position++ | |
1604 | goto l132 | |
1605 | l133: | |
1606 | position, tokenIndex = position133, tokenIndex133 | |
1607 | } | |
1608 | if !_rules[ruleRange]() { | |
1609 | goto l132 | |
1610 | } | |
1611 | { | |
1612 | add(ruleAction25, position) | |
1613 | } | |
1614 | goto l131 | |
1615 | l132: | |
1616 | position, tokenIndex = position132, tokenIndex132 | |
1617 | } | |
1618 | add(ruleRanges, position129) | |
1619 | } | |
1620 | return true | |
1621 | l128: | |
1622 | position, tokenIndex = position128, tokenIndex128 | |
1623 | return false | |
1624 | }, | |
1625 | /* 14 DoubleRanges <- <(!(']' ']') DoubleRange (!(']' ']') DoubleRange Action26)*)> */ | |
1626 | func() bool { | |
1627 | position135, tokenIndex135 := position, tokenIndex | |
1628 | { | |
1629 | position136 := position | |
1630 | { | |
1631 | position137, tokenIndex137 := position, tokenIndex | |
1632 | if buffer[position] != rune(']') { | |
1633 | goto l137 | |
1634 | } | |
1635 | position++ | |
1636 | if buffer[position] != rune(']') { | |
1637 | goto l137 | |
1638 | } | |
1639 | position++ | |
1640 | goto l135 | |
1641 | l137: | |
1642 | position, tokenIndex = position137, tokenIndex137 | |
1643 | } | |
1644 | if !_rules[ruleDoubleRange]() { | |
1645 | goto l135 | |
1646 | } | |
1647 | l138: | |
1648 | { | |
1649 | position139, tokenIndex139 := position, tokenIndex | |
1650 | { | |
1651 | position140, tokenIndex140 := position, tokenIndex | |
1652 | if buffer[position] != rune(']') { | |
1653 | goto l140 | |
1654 | } | |
1655 | position++ | |
1656 | if buffer[position] != rune(']') { | |
1657 | goto l140 | |
1658 | } | |
1659 | position++ | |
1660 | goto l139 | |
1661 | l140: | |
1662 | position, tokenIndex = position140, tokenIndex140 | |
1663 | } | |
1664 | if !_rules[ruleDoubleRange]() { | |
1665 | goto l139 | |
1666 | } | |
1667 | { | |
1668 | add(ruleAction26, position) | |
1669 | } | |
1670 | goto l138 | |
1671 | l139: | |
1672 | position, tokenIndex = position139, tokenIndex139 | |
1673 | } | |
1674 | add(ruleDoubleRanges, position136) | |
1675 | } | |
1676 | return true | |
1677 | l135: | |
1678 | position, tokenIndex = position135, tokenIndex135 | |
1679 | return false | |
1680 | }, | |
1681 | /* 15 Range <- <((Char '-' Char Action27) / Char)> */ | |
1682 | func() bool { | |
1683 | position142, tokenIndex142 := position, tokenIndex | |
1684 | { | |
1685 | position143 := position | |
1686 | { | |
1687 | position144, tokenIndex144 := position, tokenIndex | |
1688 | if !_rules[ruleChar]() { | |
1689 | goto l145 | |
1690 | } | |
1691 | if buffer[position] != rune('-') { | |
1692 | goto l145 | |
1693 | } | |
1694 | position++ | |
1695 | if !_rules[ruleChar]() { | |
1696 | goto l145 | |
1697 | } | |
1698 | { | |
1699 | add(ruleAction27, position) | |
1700 | } | |
1701 | goto l144 | |
1702 | l145: | |
1703 | position, tokenIndex = position144, tokenIndex144 | |
1704 | if !_rules[ruleChar]() { | |
1705 | goto l142 | |
1706 | } | |
1707 | } | |
1708 | l144: | |
1709 | add(ruleRange, position143) | |
1710 | } | |
1711 | return true | |
1712 | l142: | |
1713 | position, tokenIndex = position142, tokenIndex142 | |
1714 | return false | |
1715 | }, | |
1716 | /* 16 DoubleRange <- <((Char '-' Char Action28) / DoubleChar)> */ | |
1717 | func() bool { | |
1718 | position147, tokenIndex147 := position, tokenIndex | |
1719 | { | |
1720 | position148 := position | |
1721 | { | |
1722 | position149, tokenIndex149 := position, tokenIndex | |
1723 | if !_rules[ruleChar]() { | |
1724 | goto l150 | |
1725 | } | |
1726 | if buffer[position] != rune('-') { | |
1727 | goto l150 | |
1728 | } | |
1729 | position++ | |
1730 | if !_rules[ruleChar]() { | |
1731 | goto l150 | |
1732 | } | |
1733 | { | |
1734 | add(ruleAction28, position) | |
1735 | } | |
1736 | goto l149 | |
1737 | l150: | |
1738 | position, tokenIndex = position149, tokenIndex149 | |
1739 | if !_rules[ruleDoubleChar]() { | |
1740 | goto l147 | |
1741 | } | |
1742 | } | |
1743 | l149: | |
1744 | add(ruleDoubleRange, position148) | |
1745 | } | |
1746 | return true | |
1747 | l147: | |
1748 | position, tokenIndex = position147, tokenIndex147 | |
1749 | return false | |
1750 | }, | |
1751 | /* 17 Char <- <(Escape / (!'\\' <.> Action29))> */ | |
1752 | func() bool { | |
1753 | position152, tokenIndex152 := position, tokenIndex | |
1754 | { | |
1755 | position153 := position | |
1756 | { | |
1757 | position154, tokenIndex154 := position, tokenIndex | |
1758 | if !_rules[ruleEscape]() { | |
1759 | goto l155 | |
1760 | } | |
1761 | goto l154 | |
1762 | l155: | |
1763 | position, tokenIndex = position154, tokenIndex154 | |
1764 | { | |
1765 | position156, tokenIndex156 := position, tokenIndex | |
1766 | if buffer[position] != rune('\\') { | |
1767 | goto l156 | |
1768 | } | |
1769 | position++ | |
1770 | goto l152 | |
1771 | l156: | |
1772 | position, tokenIndex = position156, tokenIndex156 | |
1773 | } | |
1774 | { | |
1775 | position157 := position | |
1776 | if !matchDot() { | |
1777 | goto l152 | |
1778 | } | |
1779 | add(rulePegText, position157) | |
1780 | } | |
1781 | { | |
1782 | add(ruleAction29, position) | |
1783 | } | |
1784 | } | |
1785 | l154: | |
1786 | add(ruleChar, position153) | |
1787 | } | |
1788 | return true | |
1789 | l152: | |
1790 | position, tokenIndex = position152, tokenIndex152 | |
1791 | return false | |
1792 | }, | |
1793 | /* 18 DoubleChar <- <(Escape / (<([a-z] / [A-Z])> Action30) / (!'\\' <.> Action31))> */ | |
1794 | func() bool { | |
1795 | position159, tokenIndex159 := position, tokenIndex | |
1796 | { | |
1797 | position160 := position | |
1798 | { | |
1799 | position161, tokenIndex161 := position, tokenIndex | |
1800 | if !_rules[ruleEscape]() { | |
1801 | goto l162 | |
1802 | } | |
1803 | goto l161 | |
1804 | l162: | |
1805 | position, tokenIndex = position161, tokenIndex161 | |
1806 | { | |
1807 | position164 := position | |
1808 | { | |
1809 | position165, tokenIndex165 := position, tokenIndex | |
1810 | if c := buffer[position]; c < rune('a') || c > rune('z') { | |
1811 | goto l166 | |
1812 | } | |
1813 | position++ | |
1814 | goto l165 | |
1815 | l166: | |
1816 | position, tokenIndex = position165, tokenIndex165 | |
1817 | if c := buffer[position]; c < rune('A') || c > rune('Z') { | |
1818 | goto l163 | |
1819 | } | |
1820 | position++ | |
1821 | } | |
1822 | l165: | |
1823 | add(rulePegText, position164) | |
1824 | } | |
1825 | { | |
1826 | add(ruleAction30, position) | |
1827 | } | |
1828 | goto l161 | |
1829 | l163: | |
1830 | position, tokenIndex = position161, tokenIndex161 | |
1831 | { | |
1832 | position168, tokenIndex168 := position, tokenIndex | |
1833 | if buffer[position] != rune('\\') { | |
1834 | goto l168 | |
1835 | } | |
1836 | position++ | |
1837 | goto l159 | |
1838 | l168: | |
1839 | position, tokenIndex = position168, tokenIndex168 | |
1840 | } | |
1841 | { | |
1842 | position169 := position | |
1843 | if !matchDot() { | |
1844 | goto l159 | |
1845 | } | |
1846 | add(rulePegText, position169) | |
1847 | } | |
1848 | { | |
1849 | add(ruleAction31, position) | |
1850 | } | |
1851 | } | |
1852 | l161: | |
1853 | add(ruleDoubleChar, position160) | |
1854 | } | |
1855 | return true | |
1856 | l159: | |
1857 | position, tokenIndex = position159, tokenIndex159 | |
1858 | return false | |
1859 | }, | |
1860 | /* 19 Escape <- <(('\\' ('a' / 'A') Action32) / ('\\' ('b' / 'B') Action33) / ('\\' ('e' / 'E') Action34) / ('\\' ('f' / 'F') Action35) / ('\\' ('n' / 'N') Action36) / ('\\' ('r' / 'R') Action37) / ('\\' ('t' / 'T') Action38) / ('\\' ('v' / 'V') Action39) / ('\\' '\'' Action40) / ('\\' '"' Action41) / ('\\' '[' Action42) / ('\\' ']' Action43) / ('\\' '-' Action44) / ('\\' ('0' ('x' / 'X')) <((&('A' | 'B' | 'C' | 'D' | 'E' | 'F') [A-F]) | (&('a' | 'b' | 'c' | 'd' | 'e' | 'f') [a-f]) | (&('0' | '1' | '2' | '3' | '4' | '5' | '6' | '7' | '8' | '9') [0-9]))+> Action45) / ('\\' <([0-3] [0-7] [0-7])> Action46) / ('\\' <([0-7] [0-7]?)> Action47) / ('\\' '\\' Action48))> */ | |
1861 | func() bool { | |
1862 | position171, tokenIndex171 := position, tokenIndex | |
1863 | { | |
1864 | position172 := position | |
1865 | { | |
1866 | position173, tokenIndex173 := position, tokenIndex | |
1867 | if buffer[position] != rune('\\') { | |
1868 | goto l174 | |
1869 | } | |
1870 | position++ | |
1871 | { | |
1872 | position175, tokenIndex175 := position, tokenIndex | |
1873 | if buffer[position] != rune('a') { | |
1874 | goto l176 | |
1875 | } | |
1876 | position++ | |
1877 | goto l175 | |
1878 | l176: | |
1879 | position, tokenIndex = position175, tokenIndex175 | |
1880 | if buffer[position] != rune('A') { | |
1881 | goto l174 | |
1882 | } | |
1883 | position++ | |
1884 | } | |
1885 | l175: | |
1886 | { | |
1887 | add(ruleAction32, position) | |
1888 | } | |
1889 | goto l173 | |
1890 | l174: | |
1891 | position, tokenIndex = position173, tokenIndex173 | |
1892 | if buffer[position] != rune('\\') { | |
1893 | goto l178 | |
1894 | } | |
1895 | position++ | |
1896 | { | |
1897 | position179, tokenIndex179 := position, tokenIndex | |
1898 | if buffer[position] != rune('b') { | |
1899 | goto l180 | |
1900 | } | |
1901 | position++ | |
1902 | goto l179 | |
1903 | l180: | |
1904 | position, tokenIndex = position179, tokenIndex179 | |
1905 | if buffer[position] != rune('B') { | |
1906 | goto l178 | |
1907 | } | |
1908 | position++ | |
1909 | } | |
1910 | l179: | |
1911 | { | |
1912 | add(ruleAction33, position) | |
1913 | } | |
1914 | goto l173 | |
1915 | l178: | |
1916 | position, tokenIndex = position173, tokenIndex173 | |
1917 | if buffer[position] != rune('\\') { | |
1918 | goto l182 | |
1919 | } | |
1920 | position++ | |
1921 | { | |
1922 | position183, tokenIndex183 := position, tokenIndex | |
1923 | if buffer[position] != rune('e') { | |
1924 | goto l184 | |
1925 | } | |
1926 | position++ | |
1927 | goto l183 | |
1928 | l184: | |
1929 | position, tokenIndex = position183, tokenIndex183 | |
1930 | if buffer[position] != rune('E') { | |
1931 | goto l182 | |
1932 | } | |
1933 | position++ | |
1934 | } | |
1935 | l183: | |
1936 | { | |
1937 | add(ruleAction34, position) | |
1938 | } | |
1939 | goto l173 | |
1940 | l182: | |
1941 | position, tokenIndex = position173, tokenIndex173 | |
1942 | if buffer[position] != rune('\\') { | |
1943 | goto l186 | |
1944 | } | |
1945 | position++ | |
1946 | { | |
1947 | position187, tokenIndex187 := position, tokenIndex | |
1948 | if buffer[position] != rune('f') { | |
1949 | goto l188 | |
1950 | } | |
1951 | position++ | |
1952 | goto l187 | |
1953 | l188: | |
1954 | position, tokenIndex = position187, tokenIndex187 | |
1955 | if buffer[position] != rune('F') { | |
1956 | goto l186 | |
1957 | } | |
1958 | position++ | |
1959 | } | |
1960 | l187: | |
1961 | { | |
1962 | add(ruleAction35, position) | |
1963 | } | |
1964 | goto l173 | |
1965 | l186: | |
1966 | position, tokenIndex = position173, tokenIndex173 | |
1967 | if buffer[position] != rune('\\') { | |
1968 | goto l190 | |
1969 | } | |
1970 | position++ | |
1971 | { | |
1972 | position191, tokenIndex191 := position, tokenIndex | |
1973 | if buffer[position] != rune('n') { | |
1974 | goto l192 | |
1975 | } | |
1976 | position++ | |
1977 | goto l191 | |
1978 | l192: | |
1979 | position, tokenIndex = position191, tokenIndex191 | |
1980 | if buffer[position] != rune('N') { | |
1981 | goto l190 | |
1982 | } | |
1983 | position++ | |
1984 | } | |
1985 | l191: | |
1986 | { | |
1987 | add(ruleAction36, position) | |
1988 | } | |
1989 | goto l173 | |
1990 | l190: | |
1991 | position, tokenIndex = position173, tokenIndex173 | |
1992 | if buffer[position] != rune('\\') { | |
1993 | goto l194 | |
1994 | } | |
1995 | position++ | |
1996 | { | |
1997 | position195, tokenIndex195 := position, tokenIndex | |
1998 | if buffer[position] != rune('r') { | |
1999 | goto l196 | |
2000 | } | |
2001 | position++ | |
2002 | goto l195 | |
2003 | l196: | |
2004 | position, tokenIndex = position195, tokenIndex195 | |
2005 | if buffer[position] != rune('R') { | |
2006 | goto l194 | |
2007 | } | |
2008 | position++ | |
2009 | } | |
2010 | l195: | |
2011 | { | |
2012 | add(ruleAction37, position) | |
2013 | } | |
2014 | goto l173 | |
2015 | l194: | |
2016 | position, tokenIndex = position173, tokenIndex173 | |
2017 | if buffer[position] != rune('\\') { | |
2018 | goto l198 | |
2019 | } | |
2020 | position++ | |
2021 | { | |
2022 | position199, tokenIndex199 := position, tokenIndex | |
2023 | if buffer[position] != rune('t') { | |
2024 | goto l200 | |
2025 | } | |
2026 | position++ | |
2027 | goto l199 | |
2028 | l200: | |
2029 | position, tokenIndex = position199, tokenIndex199 | |
2030 | if buffer[position] != rune('T') { | |
2031 | goto l198 | |
2032 | } | |
2033 | position++ | |
2034 | } | |
2035 | l199: | |
2036 | { | |
2037 | add(ruleAction38, position) | |
2038 | } | |
2039 | goto l173 | |
2040 | l198: | |
2041 | position, tokenIndex = position173, tokenIndex173 | |
2042 | if buffer[position] != rune('\\') { | |
2043 | goto l202 | |
2044 | } | |
2045 | position++ | |
2046 | { | |
2047 | position203, tokenIndex203 := position, tokenIndex | |
2048 | if buffer[position] != rune('v') { | |
2049 | goto l204 | |
2050 | } | |
2051 | position++ | |
2052 | goto l203 | |
2053 | l204: | |
2054 | position, tokenIndex = position203, tokenIndex203 | |
2055 | if buffer[position] != rune('V') { | |
2056 | goto l202 | |
2057 | } | |
2058 | position++ | |
2059 | } | |
2060 | l203: | |
2061 | { | |
2062 | add(ruleAction39, position) | |
2063 | } | |
2064 | goto l173 | |
2065 | l202: | |
2066 | position, tokenIndex = position173, tokenIndex173 | |
2067 | if buffer[position] != rune('\\') { | |
2068 | goto l206 | |
2069 | } | |
2070 | position++ | |
2071 | if buffer[position] != rune('\'') { | |
2072 | goto l206 | |
2073 | } | |
2074 | position++ | |
2075 | { | |
2076 | add(ruleAction40, position) | |
2077 | } | |
2078 | goto l173 | |
2079 | l206: | |
2080 | position, tokenIndex = position173, tokenIndex173 | |
2081 | if buffer[position] != rune('\\') { | |
2082 | goto l208 | |
2083 | } | |
2084 | position++ | |
2085 | if buffer[position] != rune('"') { | |
2086 | goto l208 | |
2087 | } | |
2088 | position++ | |
2089 | { | |
2090 | add(ruleAction41, position) | |
2091 | } | |
2092 | goto l173 | |
2093 | l208: | |
2094 | position, tokenIndex = position173, tokenIndex173 | |
2095 | if buffer[position] != rune('\\') { | |
2096 | goto l210 | |
2097 | } | |
2098 | position++ | |
2099 | if buffer[position] != rune('[') { | |
2100 | goto l210 | |
2101 | } | |
2102 | position++ | |
2103 | { | |
2104 | add(ruleAction42, position) | |
2105 | } | |
2106 | goto l173 | |
2107 | l210: | |
2108 | position, tokenIndex = position173, tokenIndex173 | |
2109 | if buffer[position] != rune('\\') { | |
2110 | goto l212 | |
2111 | } | |
2112 | position++ | |
2113 | if buffer[position] != rune(']') { | |
2114 | goto l212 | |
2115 | } | |
2116 | position++ | |
2117 | { | |
2118 | add(ruleAction43, position) | |
2119 | } | |
2120 | goto l173 | |
2121 | l212: | |
2122 | position, tokenIndex = position173, tokenIndex173 | |
2123 | if buffer[position] != rune('\\') { | |
2124 | goto l214 | |
2125 | } | |
2126 | position++ | |
2127 | if buffer[position] != rune('-') { | |
2128 | goto l214 | |
2129 | } | |
2130 | position++ | |
2131 | { | |
2132 | add(ruleAction44, position) | |
2133 | } | |
2134 | goto l173 | |
2135 | l214: | |
2136 | position, tokenIndex = position173, tokenIndex173 | |
2137 | if buffer[position] != rune('\\') { | |
2138 | goto l216 | |
2139 | } | |
2140 | position++ | |
2141 | if buffer[position] != rune('0') { | |
2142 | goto l216 | |
2143 | } | |
2144 | position++ | |
2145 | { | |
2146 | position217, tokenIndex217 := position, tokenIndex | |
2147 | if buffer[position] != rune('x') { | |
2148 | goto l218 | |
2149 | } | |
2150 | position++ | |
2151 | goto l217 | |
2152 | l218: | |
2153 | position, tokenIndex = position217, tokenIndex217 | |
2154 | if buffer[position] != rune('X') { | |
2155 | goto l216 | |
2156 | } | |
2157 | position++ | |
2158 | } | |
2159 | l217: | |
2160 | { | |
2161 | position219 := position | |
2162 | { | |
2163 | switch buffer[position] { | |
2164 | case 'A', 'B', 'C', 'D', 'E', 'F': | |
2165 | if c := buffer[position]; c < rune('A') || c > rune('F') { | |
2166 | goto l216 | |
2167 | } | |
2168 | position++ | |
2169 | case 'a', 'b', 'c', 'd', 'e', 'f': | |
2170 | if c := buffer[position]; c < rune('a') || c > rune('f') { | |
2171 | goto l216 | |
2172 | } | |
2173 | position++ | |
2174 | default: | |
2175 | if c := buffer[position]; c < rune('0') || c > rune('9') { | |
2176 | goto l216 | |
2177 | } | |
2178 | position++ | |
2179 | } | |
2180 | } | |
2181 | ||
2182 | l220: | |
2183 | { | |
2184 | position221, tokenIndex221 := position, tokenIndex | |
2185 | { | |
2186 | switch buffer[position] { | |
2187 | case 'A', 'B', 'C', 'D', 'E', 'F': | |
2188 | if c := buffer[position]; c < rune('A') || c > rune('F') { | |
2189 | goto l221 | |
2190 | } | |
2191 | position++ | |
2192 | case 'a', 'b', 'c', 'd', 'e', 'f': | |
2193 | if c := buffer[position]; c < rune('a') || c > rune('f') { | |
2194 | goto l221 | |
2195 | } | |
2196 | position++ | |
2197 | default: | |
2198 | if c := buffer[position]; c < rune('0') || c > rune('9') { | |
2199 | goto l221 | |
2200 | } | |
2201 | position++ | |
2202 | } | |
2203 | } | |
2204 | ||
2205 | goto l220 | |
2206 | l221: | |
2207 | position, tokenIndex = position221, tokenIndex221 | |
2208 | } | |
2209 | add(rulePegText, position219) | |
2210 | } | |
2211 | { | |
2212 | add(ruleAction45, position) | |
2213 | } | |
2214 | goto l173 | |
2215 | l216: | |
2216 | position, tokenIndex = position173, tokenIndex173 | |
2217 | if buffer[position] != rune('\\') { | |
2218 | goto l225 | |
2219 | } | |
2220 | position++ | |
2221 | { | |
2222 | position226 := position | |
2223 | if c := buffer[position]; c < rune('0') || c > rune('3') { | |
2224 | goto l225 | |
2225 | } | |
2226 | position++ | |
2227 | if c := buffer[position]; c < rune('0') || c > rune('7') { | |
2228 | goto l225 | |
2229 | } | |
2230 | position++ | |
2231 | if c := buffer[position]; c < rune('0') || c > rune('7') { | |
2232 | goto l225 | |
2233 | } | |
2234 | position++ | |
2235 | add(rulePegText, position226) | |
2236 | } | |
2237 | { | |
2238 | add(ruleAction46, position) | |
2239 | } | |
2240 | goto l173 | |
2241 | l225: | |
2242 | position, tokenIndex = position173, tokenIndex173 | |
2243 | if buffer[position] != rune('\\') { | |
2244 | goto l228 | |
2245 | } | |
2246 | position++ | |
2247 | { | |
2248 | position229 := position | |
2249 | if c := buffer[position]; c < rune('0') || c > rune('7') { | |
2250 | goto l228 | |
2251 | } | |
2252 | position++ | |
2253 | { | |
2254 | position230, tokenIndex230 := position, tokenIndex | |
2255 | if c := buffer[position]; c < rune('0') || c > rune('7') { | |
2256 | goto l230 | |
2257 | } | |
2258 | position++ | |
2259 | goto l231 | |
2260 | l230: | |
2261 | position, tokenIndex = position230, tokenIndex230 | |
2262 | } | |
2263 | l231: | |
2264 | add(rulePegText, position229) | |
2265 | } | |
2266 | { | |
2267 | add(ruleAction47, position) | |
2268 | } | |
2269 | goto l173 | |
2270 | l228: | |
2271 | position, tokenIndex = position173, tokenIndex173 | |
2272 | if buffer[position] != rune('\\') { | |
2273 | goto l171 | |
2274 | } | |
2275 | position++ | |
2276 | if buffer[position] != rune('\\') { | |
2277 | goto l171 | |
2278 | } | |
2279 | position++ | |
2280 | { | |
2281 | add(ruleAction48, position) | |
2282 | } | |
2283 | } | |
2284 | l173: | |
2285 | add(ruleEscape, position172) | |
2286 | } | |
2287 | return true | |
2288 | l171: | |
2289 | position, tokenIndex = position171, tokenIndex171 | |
2290 | return false | |
2291 | }, | |
2292 | /* 20 LeftArrow <- <((('<' '-') / '←') Spacing)> */ | |
2293 | func() bool { | |
2294 | position234, tokenIndex234 := position, tokenIndex | |
2295 | { | |
2296 | position235 := position | |
2297 | { | |
2298 | position236, tokenIndex236 := position, tokenIndex | |
2299 | if buffer[position] != rune('<') { | |
2300 | goto l237 | |
2301 | } | |
2302 | position++ | |
2303 | if buffer[position] != rune('-') { | |
2304 | goto l237 | |
2305 | } | |
2306 | position++ | |
2307 | goto l236 | |
2308 | l237: | |
2309 | position, tokenIndex = position236, tokenIndex236 | |
2310 | if buffer[position] != rune('←') { | |
2311 | goto l234 | |
2312 | } | |
2313 | position++ | |
2314 | } | |
2315 | l236: | |
2316 | if !_rules[ruleSpacing]() { | |
2317 | goto l234 | |
2318 | } | |
2319 | add(ruleLeftArrow, position235) | |
2320 | } | |
2321 | return true | |
2322 | l234: | |
2323 | position, tokenIndex = position234, tokenIndex234 | |
2324 | return false | |
2325 | }, | |
2326 | /* 21 Slash <- <('/' Spacing)> */ | |
2327 | func() bool { | |
2328 | position238, tokenIndex238 := position, tokenIndex | |
2329 | { | |
2330 | position239 := position | |
2331 | if buffer[position] != rune('/') { | |
2332 | goto l238 | |
2333 | } | |
2334 | position++ | |
2335 | if !_rules[ruleSpacing]() { | |
2336 | goto l238 | |
2337 | } | |
2338 | add(ruleSlash, position239) | |
2339 | } | |
2340 | return true | |
2341 | l238: | |
2342 | position, tokenIndex = position238, tokenIndex238 | |
2343 | return false | |
2344 | }, | |
2345 | /* 22 And <- <('&' Spacing)> */ | |
2346 | func() bool { | |
2347 | position240, tokenIndex240 := position, tokenIndex | |
2348 | { | |
2349 | position241 := position | |
2350 | if buffer[position] != rune('&') { | |
2351 | goto l240 | |
2352 | } | |
2353 | position++ | |
2354 | if !_rules[ruleSpacing]() { | |
2355 | goto l240 | |
2356 | } | |
2357 | add(ruleAnd, position241) | |
2358 | } | |
2359 | return true | |
2360 | l240: | |
2361 | position, tokenIndex = position240, tokenIndex240 | |
2362 | return false | |
2363 | }, | |
2364 | /* 23 Not <- <('!' Spacing)> */ | |
2365 | func() bool { | |
2366 | position242, tokenIndex242 := position, tokenIndex | |
2367 | { | |
2368 | position243 := position | |
2369 | if buffer[position] != rune('!') { | |
2370 | goto l242 | |
2371 | } | |
2372 | position++ | |
2373 | if !_rules[ruleSpacing]() { | |
2374 | goto l242 | |
2375 | } | |
2376 | add(ruleNot, position243) | |
2377 | } | |
2378 | return true | |
2379 | l242: | |
2380 | position, tokenIndex = position242, tokenIndex242 | |
2381 | return false | |
2382 | }, | |
2383 | /* 24 Question <- <('?' Spacing)> */ | |
2384 | nil, | |
2385 | /* 25 Star <- <('*' Spacing)> */ | |
2386 | nil, | |
2387 | /* 26 Plus <- <('+' Spacing)> */ | |
2388 | nil, | |
2389 | /* 27 Open <- <('(' Spacing)> */ | |
2390 | nil, | |
2391 | /* 28 Close <- <(')' Spacing)> */ | |
2392 | nil, | |
2393 | /* 29 Dot <- <('.' Spacing)> */ | |
2394 | nil, | |
2395 | /* 30 SpaceComment <- <(Space / Comment)> */ | |
2396 | func() bool { | |
2397 | position250, tokenIndex250 := position, tokenIndex | |
2398 | { | |
2399 | position251 := position | |
2400 | { | |
2401 | position252, tokenIndex252 := position, tokenIndex | |
2402 | { | |
2403 | position254 := position | |
2404 | { | |
2405 | switch buffer[position] { | |
2406 | case '\t': | |
2407 | if buffer[position] != rune('\t') { | |
2408 | goto l253 | |
2409 | } | |
2410 | position++ | |
2411 | case ' ': | |
2412 | if buffer[position] != rune(' ') { | |
2413 | goto l253 | |
2414 | } | |
2415 | position++ | |
2416 | default: | |
2417 | if !_rules[ruleEndOfLine]() { | |
2418 | goto l253 | |
2419 | } | |
2420 | } | |
2421 | } | |
2422 | ||
2423 | add(ruleSpace, position254) | |
2424 | } | |
2425 | goto l252 | |
2426 | l253: | |
2427 | position, tokenIndex = position252, tokenIndex252 | |
2428 | { | |
2429 | position256 := position | |
2430 | if buffer[position] != rune('#') { | |
2431 | goto l250 | |
2432 | } | |
2433 | position++ | |
2434 | l257: | |
2435 | { | |
2436 | position258, tokenIndex258 := position, tokenIndex | |
2437 | { | |
2438 | position259, tokenIndex259 := position, tokenIndex | |
2439 | if !_rules[ruleEndOfLine]() { | |
2440 | goto l259 | |
2441 | } | |
2442 | goto l258 | |
2443 | l259: | |
2444 | position, tokenIndex = position259, tokenIndex259 | |
2445 | } | |
2446 | if !matchDot() { | |
2447 | goto l258 | |
2448 | } | |
2449 | goto l257 | |
2450 | l258: | |
2451 | position, tokenIndex = position258, tokenIndex258 | |
2452 | } | |
2453 | if !_rules[ruleEndOfLine]() { | |
2454 | goto l250 | |
2455 | } | |
2456 | add(ruleComment, position256) | |
2457 | } | |
2458 | } | |
2459 | l252: | |
2460 | add(ruleSpaceComment, position251) | |
2461 | } | |
2462 | return true | |
2463 | l250: | |
2464 | position, tokenIndex = position250, tokenIndex250 | |
2465 | return false | |
2466 | }, | |
2467 | /* 31 Spacing <- <SpaceComment*> */ | |
2468 | func() bool { | |
2469 | { | |
2470 | position261 := position | |
2471 | l262: | |
2472 | { | |
2473 | position263, tokenIndex263 := position, tokenIndex | |
2474 | if !_rules[ruleSpaceComment]() { | |
2475 | goto l263 | |
2476 | } | |
2477 | goto l262 | |
2478 | l263: | |
2479 | position, tokenIndex = position263, tokenIndex263 | |
2480 | } | |
2481 | add(ruleSpacing, position261) | |
2482 | } | |
2483 | return true | |
2484 | }, | |
2485 | /* 32 MustSpacing <- <SpaceComment+> */ | |
2486 | func() bool { | |
2487 | position264, tokenIndex264 := position, tokenIndex | |
2488 | { | |
2489 | position265 := position | |
2490 | if !_rules[ruleSpaceComment]() { | |
2491 | goto l264 | |
2492 | } | |
2493 | l266: | |
2494 | { | |
2495 | position267, tokenIndex267 := position, tokenIndex | |
2496 | if !_rules[ruleSpaceComment]() { | |
2497 | goto l267 | |
2498 | } | |
2499 | goto l266 | |
2500 | l267: | |
2501 | position, tokenIndex = position267, tokenIndex267 | |
2502 | } | |
2503 | add(ruleMustSpacing, position265) | |
2504 | } | |
2505 | return true | |
2506 | l264: | |
2507 | position, tokenIndex = position264, tokenIndex264 | |
2508 | return false | |
2509 | }, | |
2510 | /* 33 Comment <- <('#' (!EndOfLine .)* EndOfLine)> */ | |
2511 | nil, | |
2512 | /* 34 Space <- <((&('\t') '\t') | (&(' ') ' ') | (&('\n' | '\r') EndOfLine))> */ | |
2513 | nil, | |
2514 | /* 35 EndOfLine <- <(('\r' '\n') / '\n' / '\r')> */ | |
2515 | func() bool { | |
2516 | position270, tokenIndex270 := position, tokenIndex | |
2517 | { | |
2518 | position271 := position | |
2519 | { | |
2520 | position272, tokenIndex272 := position, tokenIndex | |
2521 | if buffer[position] != rune('\r') { | |
2522 | goto l273 | |
2523 | } | |
2524 | position++ | |
2525 | if buffer[position] != rune('\n') { | |
2526 | goto l273 | |
2527 | } | |
2528 | position++ | |
2529 | goto l272 | |
2530 | l273: | |
2531 | position, tokenIndex = position272, tokenIndex272 | |
2532 | if buffer[position] != rune('\n') { | |
2533 | goto l274 | |
2534 | } | |
2535 | position++ | |
2536 | goto l272 | |
2537 | l274: | |
2538 | position, tokenIndex = position272, tokenIndex272 | |
2539 | if buffer[position] != rune('\r') { | |
2540 | goto l270 | |
2541 | } | |
2542 | position++ | |
2543 | } | |
2544 | l272: | |
2545 | add(ruleEndOfLine, position271) | |
2546 | } | |
2547 | return true | |
2548 | l270: | |
2549 | position, tokenIndex = position270, tokenIndex270 | |
2550 | return false | |
2551 | }, | |
2552 | /* 36 EndOfFile <- <!.> */ | |
2553 | nil, | |
2554 | /* 37 Action <- <('{' <ActionBody*> '}' Spacing)> */ | |
2555 | func() bool { | |
2556 | position276, tokenIndex276 := position, tokenIndex | |
2557 | { | |
2558 | position277 := position | |
2559 | if buffer[position] != rune('{') { | |
2560 | goto l276 | |
2561 | } | |
2562 | position++ | |
2563 | { | |
2564 | position278 := position | |
2565 | l279: | |
2566 | { | |
2567 | position280, tokenIndex280 := position, tokenIndex | |
2568 | if !_rules[ruleActionBody]() { | |
2569 | goto l280 | |
2570 | } | |
2571 | goto l279 | |
2572 | l280: | |
2573 | position, tokenIndex = position280, tokenIndex280 | |
2574 | } | |
2575 | add(rulePegText, position278) | |
2576 | } | |
2577 | if buffer[position] != rune('}') { | |
2578 | goto l276 | |
2579 | } | |
2580 | position++ | |
2581 | if !_rules[ruleSpacing]() { | |
2582 | goto l276 | |
2583 | } | |
2584 | add(ruleAction, position277) | |
2585 | } | |
2586 | return true | |
2587 | l276: | |
2588 | position, tokenIndex = position276, tokenIndex276 | |
2589 | return false | |
2590 | }, | |
2591 | /* 38 ActionBody <- <((!('{' / '}') .) / ('{' ActionBody* '}'))> */ | |
2592 | func() bool { | |
2593 | position281, tokenIndex281 := position, tokenIndex | |
2594 | { | |
2595 | position282 := position | |
2596 | { | |
2597 | position283, tokenIndex283 := position, tokenIndex | |
2598 | { | |
2599 | position285, tokenIndex285 := position, tokenIndex | |
2600 | { | |
2601 | position286, tokenIndex286 := position, tokenIndex | |
2602 | if buffer[position] != rune('{') { | |
2603 | goto l287 | |
2604 | } | |
2605 | position++ | |
2606 | goto l286 | |
2607 | l287: | |
2608 | position, tokenIndex = position286, tokenIndex286 | |
2609 | if buffer[position] != rune('}') { | |
2610 | goto l285 | |
2611 | } | |
2612 | position++ | |
2613 | } | |
2614 | l286: | |
2615 | goto l284 | |
2616 | l285: | |
2617 | position, tokenIndex = position285, tokenIndex285 | |
2618 | } | |
2619 | if !matchDot() { | |
2620 | goto l284 | |
2621 | } | |
2622 | goto l283 | |
2623 | l284: | |
2624 | position, tokenIndex = position283, tokenIndex283 | |
2625 | if buffer[position] != rune('{') { | |
2626 | goto l281 | |
2627 | } | |
2628 | position++ | |
2629 | l288: | |
2630 | { | |
2631 | position289, tokenIndex289 := position, tokenIndex | |
2632 | if !_rules[ruleActionBody]() { | |
2633 | goto l289 | |
2634 | } | |
2635 | goto l288 | |
2636 | l289: | |
2637 | position, tokenIndex = position289, tokenIndex289 | |
2638 | } | |
2639 | if buffer[position] != rune('}') { | |
2640 | goto l281 | |
2641 | } | |
2642 | position++ | |
2643 | } | |
2644 | l283: | |
2645 | add(ruleActionBody, position282) | |
2646 | } | |
2647 | return true | |
2648 | l281: | |
2649 | position, tokenIndex = position281, tokenIndex281 | |
2650 | return false | |
2651 | }, | |
2652 | /* 39 Begin <- <('<' Spacing)> */ | |
2653 | nil, | |
2654 | /* 40 End <- <('>' Spacing)> */ | |
2655 | nil, | |
2656 | /* 42 Action0 <- <{ p.AddPackage(text) }> */ | |
2657 | nil, | |
2658 | /* 43 Action1 <- <{ p.AddPeg(text) }> */ | |
2659 | nil, | |
2660 | /* 44 Action2 <- <{ p.AddState(text) }> */ | |
2661 | nil, | |
2662 | nil, | |
2663 | /* 46 Action3 <- <{ p.AddImport(text) }> */ | |
2664 | nil, | |
2665 | /* 47 Action4 <- <{ p.AddRule(text) }> */ | |
2666 | nil, | |
2667 | /* 48 Action5 <- <{ p.AddExpression() }> */ | |
2668 | nil, | |
2669 | /* 49 Action6 <- <{ p.AddAlternate() }> */ | |
2670 | nil, | |
2671 | /* 50 Action7 <- <{ p.AddNil(); p.AddAlternate() }> */ | |
2672 | nil, | |
2673 | /* 51 Action8 <- <{ p.AddNil() }> */ | |
2674 | nil, | |
2675 | /* 52 Action9 <- <{ p.AddSequence() }> */ | |
2676 | nil, | |
2677 | /* 53 Action10 <- <{ p.AddPredicate(text) }> */ | |
2678 | nil, | |
2679 | /* 54 Action11 <- <{ p.AddStateChange(text) }> */ | |
2680 | nil, | |
2681 | /* 55 Action12 <- <{ p.AddPeekFor() }> */ | |
2682 | nil, | |
2683 | /* 56 Action13 <- <{ p.AddPeekNot() }> */ | |
2684 | nil, | |
2685 | /* 57 Action14 <- <{ p.AddQuery() }> */ | |
2686 | nil, | |
2687 | /* 58 Action15 <- <{ p.AddStar() }> */ | |
2688 | nil, | |
2689 | /* 59 Action16 <- <{ p.AddPlus() }> */ | |
2690 | nil, | |
2691 | /* 60 Action17 <- <{ p.AddName(text) }> */ | |
2692 | nil, | |
2693 | /* 61 Action18 <- <{ p.AddDot() }> */ | |
2694 | nil, | |
2695 | /* 62 Action19 <- <{ p.AddAction(text) }> */ | |
2696 | nil, | |
2697 | /* 63 Action20 <- <{ p.AddPush() }> */ | |
2698 | nil, | |
2699 | /* 64 Action21 <- <{ p.AddSequence() }> */ | |
2700 | nil, | |
2701 | /* 65 Action22 <- <{ p.AddSequence() }> */ | |
2702 | nil, | |
2703 | /* 66 Action23 <- <{ p.AddPeekNot(); p.AddDot(); p.AddSequence() }> */ | |
2704 | nil, | |
2705 | /* 67 Action24 <- <{ p.AddPeekNot(); p.AddDot(); p.AddSequence() }> */ | |
2706 | nil, | |
2707 | /* 68 Action25 <- <{ p.AddAlternate() }> */ | |
2708 | nil, | |
2709 | /* 69 Action26 <- <{ p.AddAlternate() }> */ | |
2710 | nil, | |
2711 | /* 70 Action27 <- <{ p.AddRange() }> */ | |
2712 | nil, | |
2713 | /* 71 Action28 <- <{ p.AddDoubleRange() }> */ | |
2714 | nil, | |
2715 | /* 72 Action29 <- <{ p.AddCharacter(text) }> */ | |
2716 | nil, | |
2717 | /* 73 Action30 <- <{ p.AddDoubleCharacter(text) }> */ | |
2718 | nil, | |
2719 | /* 74 Action31 <- <{ p.AddCharacter(text) }> */ | |
2720 | nil, | |
2721 | /* 75 Action32 <- <{ p.AddCharacter("\a") }> */ | |
2722 | nil, | |
2723 | /* 76 Action33 <- <{ p.AddCharacter("\b") }> */ | |
2724 | nil, | |
2725 | /* 77 Action34 <- <{ p.AddCharacter("\x1B") }> */ | |
2726 | nil, | |
2727 | /* 78 Action35 <- <{ p.AddCharacter("\f") }> */ | |
2728 | nil, | |
2729 | /* 79 Action36 <- <{ p.AddCharacter("\n") }> */ | |
2730 | nil, | |
2731 | /* 80 Action37 <- <{ p.AddCharacter("\r") }> */ | |
2732 | nil, | |
2733 | /* 81 Action38 <- <{ p.AddCharacter("\t") }> */ | |
2734 | nil, | |
2735 | /* 82 Action39 <- <{ p.AddCharacter("\v") }> */ | |
2736 | nil, | |
2737 | /* 83 Action40 <- <{ p.AddCharacter("'") }> */ | |
2738 | nil, | |
2739 | /* 84 Action41 <- <{ p.AddCharacter("\"") }> */ | |
2740 | nil, | |
2741 | /* 85 Action42 <- <{ p.AddCharacter("[") }> */ | |
2742 | nil, | |
2743 | /* 86 Action43 <- <{ p.AddCharacter("]") }> */ | |
2744 | nil, | |
2745 | /* 87 Action44 <- <{ p.AddCharacter("-") }> */ | |
2746 | nil, | |
2747 | /* 88 Action45 <- <{ p.AddHexaCharacter(text) }> */ | |
2748 | nil, | |
2749 | /* 89 Action46 <- <{ p.AddOctalCharacter(text) }> */ | |
2750 | nil, | |
2751 | /* 90 Action47 <- <{ p.AddOctalCharacter(text) }> */ | |
2752 | nil, | |
2753 | /* 91 Action48 <- <{ p.AddCharacter("\\") }> */ | |
2754 | nil, | |
2755 | } | |
2756 | p.rules = _rules | |
2757 | return nil | |
2758 | } |
2 | 2 | import ( |
3 | 3 | "bytes" |
4 | 4 | "io/ioutil" |
5 | "os" | |
5 | 6 | "testing" |
7 | ||
8 | "github.com/pointlander/peg/tree" | |
6 | 9 | ) |
7 | 10 | |
8 | 11 | func TestCorrect(t *testing.T) { |
10 | 13 | type T Peg {} |
11 | 14 | Grammar <- !. |
12 | 15 | ` |
13 | p := &Peg{Tree: New(false, false), Buffer: buffer} | |
16 | p := &Peg{Tree: tree.New(false, false, false), Buffer: buffer} | |
14 | 17 | p.Init() |
15 | 18 | err := p.Parse() |
19 | if err != nil { | |
20 | t.Error(err) | |
21 | } | |
22 | ||
23 | p = &Peg{Tree: tree.New(false, false, false), Buffer: buffer} | |
24 | p.Init(Size(1<<15)) | |
25 | err = p.Parse() | |
16 | 26 | if err != nil { |
17 | 27 | t.Error(err) |
18 | 28 | } |
23 | 33 | type T Peg {} |
24 | 34 | Grammar <- !. |
25 | 35 | ` |
26 | p := &Peg{Tree: New(false, false), Buffer: buffer} | |
27 | p.Init() | |
36 | p := &Peg{Tree: tree.New(false, false, false), Buffer: buffer} | |
37 | p.Init(Size(1<<15)) | |
28 | 38 | err := p.Parse() |
29 | 39 | if err == nil { |
30 | 40 | t.Error("packagenospace was parsed without error") |
37 | 47 | typenospace Peg {} |
38 | 48 | Grammar <- !. |
39 | 49 | ` |
40 | p := &Peg{Tree: New(false, false), Buffer: buffer} | |
41 | p.Init() | |
50 | p := &Peg{Tree: tree.New(false, false, false), Buffer: buffer} | |
51 | p.Init(Size(1<<15)) | |
42 | 52 | err := p.Parse() |
43 | 53 | if err == nil { |
44 | 54 | t.Error("typenospace was parsed without error") |
51 | 61 | t.Error(err) |
52 | 62 | } |
53 | 63 | |
54 | p := &Peg{Tree: New(true, true), Buffer: string(buffer)} | |
55 | p.Init() | |
56 | if err := p.Parse(); err != nil { | |
64 | p := &Peg{Tree: tree.New(true, true, false), Buffer: string(buffer)} | |
65 | p.Init(Size(1<<15)) | |
66 | if err = p.Parse(); err != nil { | |
57 | 67 | t.Error(err) |
58 | 68 | } |
59 | 69 | |
60 | 70 | p.Execute() |
61 | 71 | |
62 | 72 | out := &bytes.Buffer{} |
63 | p.Compile("peg.peg.go", out) | |
64 | ||
65 | bootstrap, err := ioutil.ReadFile("bootstrap.peg.go") | |
73 | p.Compile("peg.peg.go", []string{"./peg", "-inline", "-switch", "peg.peg"}, out) | |
74 | ||
75 | bootstrap, err := ioutil.ReadFile("peg.peg.go") | |
66 | 76 | if err != nil { |
67 | 77 | t.Error(err) |
68 | 78 | } |
69 | 79 | |
70 | 80 | if len(out.Bytes()) != len(bootstrap) { |
71 | t.Error("code generated from peg.peg is not the same as bootstrap.peg.go") | |
81 | t.Error("code generated from peg.peg is not the same as .go") | |
72 | 82 | return |
73 | 83 | } |
74 | 84 | |
75 | 85 | for i, v := range out.Bytes() { |
76 | 86 | if v != bootstrap[i] { |
77 | t.Error("code generated from peg.peg is not the same as bootstrap.peg.go") | |
87 | t.Error("code generated from peg.peg is not the same as .go") | |
78 | 88 | return |
79 | 89 | } |
80 | 90 | } |
81 | 91 | } |
82 | 92 | |
93 | func TestStrict(t *testing.T) { | |
94 | tt := []string{ | |
95 | // rule used but not defined | |
96 | ` | |
97 | package main | |
98 | type test Peg {} | |
99 | Begin <- begin !. | |
100 | `, | |
101 | // rule defined but not used | |
102 | ` | |
103 | package main | |
104 | type test Peg {} | |
105 | Begin <- . | |
106 | unused <- 'unused' | |
107 | `, | |
108 | // left recursive rule | |
109 | `package main | |
110 | type test Peg {} | |
111 | Begin <- Begin 'x' | |
112 | `, | |
113 | } | |
114 | ||
115 | for i, buffer := range tt { | |
116 | p := &Peg{Tree: tree.New(false, false, false), Buffer: buffer} | |
117 | p.Init(Size(1<<15)) | |
118 | if err := p.Parse(); err != nil { | |
119 | t.Fatal(err) | |
120 | } | |
121 | p.Execute() | |
122 | ||
123 | f, err := ioutil.TempFile("", "peg") | |
124 | if err != nil { | |
125 | t.Fatal(err) | |
126 | } | |
127 | defer func() { | |
128 | os.Remove(f.Name()) | |
129 | f.Close() | |
130 | }() | |
131 | out := &bytes.Buffer{} | |
132 | p.Strict = true | |
133 | if err = p.Compile(f.Name(), []string{"peg"}, out); err == nil { | |
134 | t.Fatalf("#%d: expected warning error", i) | |
135 | } | |
136 | p.Strict = false | |
137 | if err = p.Compile(f.Name(), []string{"peg"}, out); err != nil { | |
138 | t.Fatalf("#%d: unexpected error (%v)", i, err) | |
139 | } | |
140 | } | |
141 | } | |
142 | ||
143 | var files = [...]string{ | |
144 | "peg.peg", | |
145 | "grammars/c/c.peg", | |
146 | "grammars/calculator/calculator.peg", | |
147 | "grammars/fexl/fexl.peg", | |
148 | "grammars/java/java_1_7.peg", | |
149 | } | |
150 | ||
151 | func BenchmarkInitOnly(b *testing.B) { | |
152 | pegs := []string{} | |
153 | for _, file := range files { | |
154 | input, err := ioutil.ReadFile(file) | |
155 | if err != nil { | |
156 | b.Error(err) | |
157 | } | |
158 | pegs = append(pegs, string(input)) | |
159 | } | |
160 | ||
161 | b.ResetTimer() | |
162 | for i := 0; i < b.N; i++ { | |
163 | for _, peg := range pegs { | |
164 | p := &Peg{Tree: tree.New(true, true, false), Buffer: peg} | |
165 | p.Init(Size(1<<15)) | |
166 | } | |
167 | } | |
168 | } | |
169 | ||
83 | 170 | func BenchmarkParse(b *testing.B) { |
84 | files := [...]string{ | |
85 | "peg.peg", | |
86 | "grammars/c/c.peg", | |
87 | "grammars/calculator/calculator.peg", | |
88 | "grammars/fexl/fexl.peg", | |
89 | "grammars/java/java_1_7.peg", | |
90 | } | |
91 | 171 | pegs := make([]*Peg, len(files)) |
92 | 172 | for i, file := range files { |
93 | 173 | input, err := ioutil.ReadFile(file) |
95 | 175 | b.Error(err) |
96 | 176 | } |
97 | 177 | |
98 | p := &Peg{Tree: New(true, true), Buffer: string(input)} | |
99 | p.Init() | |
178 | p := &Peg{Tree: tree.New(true, true, false), Buffer: string(input)} | |
179 | p.Init(Size(1<<15)) | |
100 | 180 | pegs[i] = p |
101 | 181 | } |
102 | 182 | |
103 | 183 | b.ResetTimer() |
104 | 184 | for i := 0; i < b.N; i++ { |
105 | 185 | for _, peg := range pegs { |
186 | if err := peg.Parse(); err != nil { | |
187 | b.Error(err) | |
188 | } | |
189 | b.StopTimer() | |
106 | 190 | peg.Reset() |
107 | if err := peg.Parse(); err != nil { | |
108 | b.Error(err) | |
109 | } | |
110 | } | |
111 | } | |
112 | } | |
191 | b.StartTimer() | |
192 | } | |
193 | } | |
194 | } | |
195 | ||
196 | func BenchmarkResetAndParse(b *testing.B) { | |
197 | pegs := make([]*Peg, len(files)) | |
198 | for i, file := range files { | |
199 | input, err := ioutil.ReadFile(file) | |
200 | if err != nil { | |
201 | b.Error(err) | |
202 | } | |
203 | ||
204 | p := &Peg{Tree: tree.New(true, true, false), Buffer: string(input)} | |
205 | p.Init(Size(1<<15)) | |
206 | pegs[i] = p | |
207 | } | |
208 | ||
209 | b.ResetTimer() | |
210 | for i := 0; i < b.N; i++ { | |
211 | for _, peg := range pegs { | |
212 | if err := peg.Parse(); err != nil { | |
213 | b.Error(err) | |
214 | } | |
215 | peg.Reset() | |
216 | } | |
217 | } | |
218 | } | |
219 | ||
220 | func BenchmarkInitAndParse(b *testing.B) { | |
221 | strs := []string{} | |
222 | for _, file := range files { | |
223 | input, err := ioutil.ReadFile(file) | |
224 | if err != nil { | |
225 | b.Error(err) | |
226 | } | |
227 | strs = append(strs, string(input)) | |
228 | } | |
229 | ||
230 | b.ResetTimer() | |
231 | for i := 0; i < b.N; i++ { | |
232 | for _, str := range strs { | |
233 | peg := &Peg{Tree: tree.New(true, true, false), Buffer: str} | |
234 | peg.Init(Size(1<<15)) | |
235 | if err := peg.Parse(); err != nil { | |
236 | b.Error(err) | |
237 | } | |
238 | } | |
239 | } | |
240 | } | |
241 | ||
242 | func BenchmarkInitResetAndParse(b *testing.B) { | |
243 | strs := []string{} | |
244 | for _, file := range files { | |
245 | input, err := ioutil.ReadFile(file) | |
246 | if err != nil { | |
247 | b.Error(err) | |
248 | } | |
249 | strs = append(strs, string(input)) | |
250 | } | |
251 | ||
252 | b.ResetTimer() | |
253 | for i := 0; i < b.N; i++ { | |
254 | for _, str := range strs { | |
255 | peg := &Peg{Tree: tree.New(true, true, false), Buffer: str} | |
256 | peg.Init(Size(1<<15)) | |
257 | if err := peg.Parse(); err != nil { | |
258 | b.Error(err) | |
259 | } | |
260 | peg.Reset() | |
261 | } | |
262 | } | |
263 | } |
0 | // Copyright 2010 The Go Authors. All rights reserved. | |
1 | // Use of this source code is governed by a BSD-style | |
2 | // license that can be found in the LICENSE file. | |
3 | ||
4 | package tree | |
5 | ||
6 | import ( | |
7 | "bytes" | |
8 | "fmt" | |
9 | "go/parser" | |
10 | "go/printer" | |
11 | "go/token" | |
12 | "io" | |
13 | "math" | |
14 | "os" | |
15 | "sort" | |
16 | "strconv" | |
17 | "strings" | |
18 | "text/template" | |
19 | ||
20 | "github.com/pointlander/jetset" | |
21 | ) | |
22 | ||
23 | const pegHeaderTemplate = `package {{.PackageName}} | |
24 | ||
25 | // Code generated by {{.Generator}} DO NOT EDIT | |
26 | ||
27 | ||
28 | import ( | |
29 | {{range .Imports}}"{{.}}" | |
30 | {{end}} | |
31 | ) | |
32 | ||
33 | const endSymbol rune = {{.EndSymbol}} | |
34 | ||
35 | /* The rule types inferred from the grammar are below. */ | |
36 | type pegRule {{.PegRuleType}} | |
37 | ||
38 | const ( | |
39 | ruleUnknown pegRule = iota | |
40 | {{range .RuleNames}}rule{{.String}} | |
41 | {{end}} | |
42 | ) | |
43 | ||
44 | var rul3s = [...]string { | |
45 | "Unknown", | |
46 | {{range .RuleNames}}"{{.String}}", | |
47 | {{end}} | |
48 | } | |
49 | ||
50 | type token32 struct { | |
51 | pegRule | |
52 | begin, end uint32 | |
53 | } | |
54 | ||
55 | func (t *token32) String() string { | |
56 | return fmt.Sprintf("\x1B[34m%v\x1B[m %v %v", rul3s[t.pegRule], t.begin, t.end) | |
57 | } | |
58 | ||
59 | {{if .Ast}} | |
60 | type node32 struct { | |
61 | token32 | |
62 | up, next *node32 | |
63 | } | |
64 | ||
65 | func (node *node32) print(w io.Writer, pretty bool, buffer string) { | |
66 | var print func(node *node32, depth int) | |
67 | print = func(node *node32, depth int) { | |
68 | for node != nil { | |
69 | for c := 0; c < depth; c++ { | |
70 | fmt.Fprintf(w, " ") | |
71 | } | |
72 | rule := rul3s[node.pegRule] | |
73 | quote := strconv.Quote(string(([]rune(buffer)[node.begin:node.end]))) | |
74 | if !pretty { | |
75 | fmt.Fprintf(w, "%v %v\n", rule, quote) | |
76 | } else { | |
77 | fmt.Fprintf(w, "\x1B[34m%v\x1B[m %v\n", rule, quote) | |
78 | } | |
79 | if node.up != nil { | |
80 | print(node.up, depth + 1) | |
81 | } | |
82 | node = node.next | |
83 | } | |
84 | } | |
85 | print(node, 0) | |
86 | } | |
87 | ||
88 | func (node *node32) Print(w io.Writer, buffer string) { | |
89 | node.print(w, false, buffer) | |
90 | } | |
91 | ||
92 | func (node *node32) PrettyPrint(w io.Writer, buffer string) { | |
93 | node.print(w, true, buffer) | |
94 | } | |
95 | ||
96 | type tokens32 struct { | |
97 | tree []token32 | |
98 | } | |
99 | ||
100 | func (t *tokens32) Trim(length uint32) { | |
101 | t.tree = t.tree[:length] | |
102 | } | |
103 | ||
104 | func (t *tokens32) Print() { | |
105 | for _, token := range t.tree { | |
106 | fmt.Println(token.String()) | |
107 | } | |
108 | } | |
109 | ||
110 | func (t *tokens32) AST() *node32 { | |
111 | type element struct { | |
112 | node *node32 | |
113 | down *element | |
114 | } | |
115 | tokens := t.Tokens() | |
116 | var stack *element | |
117 | for _, token := range tokens { | |
118 | if token.begin == token.end { | |
119 | continue | |
120 | } | |
121 | node := &node32{token32: token} | |
122 | for stack != nil && stack.node.begin >= token.begin && stack.node.end <= token.end { | |
123 | stack.node.next = node.up | |
124 | node.up = stack.node | |
125 | stack = stack.down | |
126 | } | |
127 | stack = &element{node: node, down: stack} | |
128 | } | |
129 | if stack != nil { | |
130 | return stack.node | |
131 | } | |
132 | return nil | |
133 | } | |
134 | ||
135 | func (t *tokens32) PrintSyntaxTree(buffer string) { | |
136 | t.AST().Print(os.Stdout, buffer) | |
137 | } | |
138 | ||
139 | func (t *tokens32) WriteSyntaxTree(w io.Writer, buffer string) { | |
140 | t.AST().Print(w, buffer) | |
141 | } | |
142 | ||
143 | func (t *tokens32) PrettyPrintSyntaxTree(buffer string) { | |
144 | t.AST().PrettyPrint(os.Stdout, buffer) | |
145 | } | |
146 | ||
147 | func (t *tokens32) Add(rule pegRule, begin, end, index uint32) { | |
148 | tree, i := t.tree, int(index) | |
149 | if i >= len(tree) { | |
150 | t.tree = append(tree, token32{pegRule: rule, begin: begin, end: end}) | |
151 | return | |
152 | } | |
153 | tree[i] = token32{pegRule: rule, begin: begin, end: end} | |
154 | } | |
155 | ||
156 | func (t *tokens32) Tokens() []token32 { | |
157 | return t.tree | |
158 | } | |
159 | {{end}} | |
160 | ||
161 | type {{.StructName}} struct { | |
162 | {{.StructVariables}} | |
163 | Buffer string | |
164 | buffer []rune | |
165 | rules [{{.RulesCount}}]func() bool | |
166 | parse func(rule ...int) error | |
167 | reset func() | |
168 | Pretty bool | |
169 | {{if .Ast -}} | |
170 | tokens32 | |
171 | {{end -}} | |
172 | } | |
173 | ||
174 | func (p *{{.StructName}}) Parse(rule ...int) error { | |
175 | return p.parse(rule...) | |
176 | } | |
177 | ||
178 | func (p *{{.StructName}}) Reset() { | |
179 | p.reset() | |
180 | } | |
181 | ||
182 | type textPosition struct { | |
183 | line, symbol int | |
184 | } | |
185 | ||
186 | type textPositionMap map[int] textPosition | |
187 | ||
188 | func translatePositions(buffer []rune, positions []int) textPositionMap { | |
189 | length, translations, j, line, symbol := len(positions), make(textPositionMap, len(positions)), 0, 1, 0 | |
190 | sort.Ints(positions) | |
191 | ||
192 | search: for i, c := range buffer { | |
193 | if c == '\n' {line, symbol = line + 1, 0} else {symbol++} | |
194 | if i == positions[j] { | |
195 | translations[positions[j]] = textPosition{line, symbol} | |
196 | for j++; j < length; j++ {if i != positions[j] {continue search}} | |
197 | break search | |
198 | } | |
199 | } | |
200 | ||
201 | return translations | |
202 | } | |
203 | ||
204 | type parseError struct { | |
205 | p *{{.StructName}} | |
206 | max token32 | |
207 | } | |
208 | ||
209 | func (e *parseError) Error() string { | |
210 | tokens, err := []token32{e.max}, "\n" | |
211 | positions, p := make([]int, 2 * len(tokens)), 0 | |
212 | for _, token := range tokens { | |
213 | positions[p], p = int(token.begin), p + 1 | |
214 | positions[p], p = int(token.end), p + 1 | |
215 | } | |
216 | translations := translatePositions(e.p.buffer, positions) | |
217 | format := "parse error near %v (line %v symbol %v - line %v symbol %v):\n%v\n" | |
218 | if e.p.Pretty { | |
219 | format = "parse error near \x1B[34m%v\x1B[m (line %v symbol %v - line %v symbol %v):\n%v\n" | |
220 | } | |
221 | for _, token := range tokens { | |
222 | begin, end := int(token.begin), int(token.end) | |
223 | err += fmt.Sprintf(format, | |
224 | rul3s[token.pegRule], | |
225 | translations[begin].line, translations[begin].symbol, | |
226 | translations[end].line, translations[end].symbol, | |
227 | strconv.Quote(string(e.p.buffer[begin:end]))) | |
228 | } | |
229 | ||
230 | return err | |
231 | } | |
232 | ||
233 | {{if .Ast}} | |
234 | func (p *{{.StructName}}) PrintSyntaxTree() { | |
235 | if p.Pretty { | |
236 | p.tokens32.PrettyPrintSyntaxTree(p.Buffer) | |
237 | } else { | |
238 | p.tokens32.PrintSyntaxTree(p.Buffer) | |
239 | } | |
240 | } | |
241 | ||
242 | func (p *{{.StructName}}) WriteSyntaxTree(w io.Writer) { | |
243 | p.tokens32.WriteSyntaxTree(w, p.Buffer) | |
244 | } | |
245 | ||
246 | ||
247 | {{if .HasActions}} | |
248 | func (p *{{.StructName}}) Execute() { | |
249 | buffer, _buffer, text, begin, end := p.Buffer, p.buffer, "", 0, 0 | |
250 | for _, token := range p.Tokens() { | |
251 | switch (token.pegRule) { | |
252 | {{if .HasPush}} | |
253 | case rulePegText: | |
254 | begin, end = int(token.begin), int(token.end) | |
255 | text = string(_buffer[begin:end]) | |
256 | {{end}} | |
257 | {{range .Actions}}case ruleAction{{.GetId}}: | |
258 | {{.String}} | |
259 | {{end}} | |
260 | } | |
261 | } | |
262 | _, _, _, _, _ = buffer, _buffer, text, begin, end | |
263 | } | |
264 | {{end}} | |
265 | {{end}} | |
266 | ||
267 | func Pretty(pretty bool) func(*{{.StructName}}) error { | |
268 | return func(p *{{.StructName}}) error { | |
269 | p.Pretty = pretty | |
270 | return nil | |
271 | } | |
272 | } | |
273 | ||
274 | {{if .Ast -}} | |
275 | func Size(size int) func(*{{.StructName}}) error { | |
276 | return func(p *{{.StructName}}) error { | |
277 | p.tokens32 = tokens32{tree: make([]token32, 0, size)} | |
278 | return nil | |
279 | } | |
280 | } | |
281 | {{end -}} | |
282 | ||
283 | func (p *{{.StructName}}) Init(options ...func(*{{.StructName}}) error) error { | |
284 | var ( | |
285 | max token32 | |
286 | position, tokenIndex uint32 | |
287 | buffer []rune | |
288 | {{if not .Ast -}} | |
289 | {{if .HasPush -}} | |
290 | text string | |
291 | {{end -}} | |
292 | {{end -}} | |
293 | ) | |
294 | for _, option := range options { | |
295 | err := option(p) | |
296 | if err != nil { | |
297 | return err | |
298 | } | |
299 | } | |
300 | p.reset = func() { | |
301 | max = token32{} | |
302 | position, tokenIndex = 0, 0 | |
303 | ||
304 | p.buffer = []rune(p.Buffer) | |
305 | if len(p.buffer) == 0 || p.buffer[len(p.buffer) - 1] != endSymbol { | |
306 | p.buffer = append(p.buffer, endSymbol) | |
307 | } | |
308 | buffer = p.buffer | |
309 | } | |
310 | p.reset() | |
311 | ||
312 | _rules := p.rules | |
313 | {{if .Ast -}} | |
314 | tree := p.tokens32 | |
315 | {{end -}} | |
316 | p.parse = func(rule ...int) error { | |
317 | r := 1 | |
318 | if len(rule) > 0 { | |
319 | r = rule[0] | |
320 | } | |
321 | matches := p.rules[r]() | |
322 | {{if .Ast -}} | |
323 | p.tokens32 = tree | |
324 | {{end -}} | |
325 | if matches { | |
326 | {{if .Ast -}} | |
327 | p.Trim(tokenIndex) | |
328 | {{end -}} | |
329 | return nil | |
330 | } | |
331 | return &parseError{p, max} | |
332 | } | |
333 | ||
334 | add := func(rule pegRule, begin uint32) { | |
335 | {{if .Ast -}} | |
336 | tree.Add(rule, begin, position, tokenIndex) | |
337 | {{end -}} | |
338 | tokenIndex++ | |
339 | if begin != position && position > max.end { | |
340 | max = token32{rule, begin, position} | |
341 | } | |
342 | } | |
343 | ||
344 | {{if .HasDot}} | |
345 | matchDot := func() bool { | |
346 | if buffer[position] != endSymbol { | |
347 | position++ | |
348 | return true | |
349 | } | |
350 | return false | |
351 | } | |
352 | {{end}} | |
353 | ||
354 | {{if .HasCharacter}} | |
355 | /*matchChar := func(c byte) bool { | |
356 | if buffer[position] == c { | |
357 | position++ | |
358 | return true | |
359 | } | |
360 | return false | |
361 | }*/ | |
362 | {{end}} | |
363 | ||
364 | {{if .HasString}} | |
365 | matchString := func(s string) bool { | |
366 | i := position | |
367 | for _, c := range s { | |
368 | if buffer[i] != c { | |
369 | return false | |
370 | } | |
371 | i++ | |
372 | } | |
373 | position = i | |
374 | return true | |
375 | } | |
376 | {{end}} | |
377 | ||
378 | {{if .HasRange}} | |
379 | /*matchRange := func(lower byte, upper byte) bool { | |
380 | if c := buffer[position]; c >= lower && c <= upper { | |
381 | position++ | |
382 | return true | |
383 | } | |
384 | return false | |
385 | }*/ | |
386 | {{end}} | |
387 | ||
388 | _rules = [...]func() bool { | |
389 | nil,` | |
390 | ||
391 | type Type uint8 | |
392 | ||
393 | const ( | |
394 | TypeUnknown Type = iota | |
395 | TypeRule | |
396 | TypeName | |
397 | TypeDot | |
398 | TypeCharacter | |
399 | TypeRange | |
400 | TypeString | |
401 | TypePredicate | |
402 | TypeStateChange | |
403 | TypeCommit | |
404 | TypeAction | |
405 | TypePackage | |
406 | TypeImport | |
407 | TypeState | |
408 | TypeAlternate | |
409 | TypeUnorderedAlternate | |
410 | TypeSequence | |
411 | TypePeekFor | |
412 | TypePeekNot | |
413 | TypeQuery | |
414 | TypeStar | |
415 | TypePlus | |
416 | TypePeg | |
417 | TypePush | |
418 | TypeImplicitPush | |
419 | TypeNil | |
420 | TypeLast | |
421 | ) | |
422 | ||
423 | var TypeMap = [...]string{ | |
424 | "TypeUnknown", | |
425 | "TypeRule", | |
426 | "TypeName", | |
427 | "TypeDot", | |
428 | "TypeCharacter", | |
429 | "TypeRange", | |
430 | "TypeString", | |
431 | "TypePredicate", | |
432 | "TypeStateChange", | |
433 | "TypeCommit", | |
434 | "TypeAction", | |
435 | "TypePackage", | |
436 | "TypeImport", | |
437 | "TypeState", | |
438 | "TypeAlternate", | |
439 | "TypeUnorderedAlternate", | |
440 | "TypeSequence", | |
441 | "TypePeekFor", | |
442 | "TypePeekNot", | |
443 | "TypeQuery", | |
444 | "TypeStar", | |
445 | "TypePlus", | |
446 | "TypePeg", | |
447 | "TypePush", | |
448 | "TypeImplicitPush", | |
449 | "TypeNil", | |
450 | "TypeLast"} | |
451 | ||
452 | func (t Type) GetType() Type { | |
453 | return t | |
454 | } | |
455 | ||
456 | type Node interface { | |
457 | fmt.Stringer | |
458 | debug() | |
459 | ||
460 | Escaped() string | |
461 | SetString(s string) | |
462 | ||
463 | GetType() Type | |
464 | SetType(t Type) | |
465 | ||
466 | GetId() int | |
467 | SetId(id int) | |
468 | ||
469 | Init() | |
470 | Front() *node | |
471 | Next() *node | |
472 | PushFront(value *node) | |
473 | PopFront() *node | |
474 | PushBack(value *node) | |
475 | Len() int | |
476 | Copy() *node | |
477 | Slice() []*node | |
478 | } | |
479 | ||
480 | type node struct { | |
481 | Type | |
482 | string | |
483 | id int | |
484 | ||
485 | front *node | |
486 | back *node | |
487 | length int | |
488 | ||
489 | /* use hash table here instead of Copy? */ | |
490 | next *node | |
491 | } | |
492 | ||
493 | func (n *node) String() string { | |
494 | return n.string | |
495 | } | |
496 | ||
497 | func (n *node) debug() { | |
498 | if len(n.string) == 1 { | |
499 | fmt.Printf("%v %v '%v' %d\n", n.id, TypeMap[n.Type], n.string, n.string[0]) | |
500 | } else { | |
501 | fmt.Printf("%v %v '%v'\n", n.id, TypeMap[n.Type], n.string) | |
502 | } | |
503 | } | |
504 | ||
505 | func (n *node) Escaped() string { | |
506 | return escape(n.string) | |
507 | } | |
508 | ||
509 | func (n *node) SetString(s string) { | |
510 | n.string = s | |
511 | } | |
512 | ||
513 | func (n *node) SetType(t Type) { | |
514 | n.Type = t | |
515 | } | |
516 | ||
517 | func (n *node) GetId() int { | |
518 | return n.id | |
519 | } | |
520 | ||
521 | func (n *node) SetId(id int) { | |
522 | n.id = id | |
523 | } | |
524 | ||
525 | func (n *node) Init() { | |
526 | n.front = nil | |
527 | n.back = nil | |
528 | n.length = 0 | |
529 | } | |
530 | ||
531 | func (n *node) Front() *node { | |
532 | return n.front | |
533 | } | |
534 | ||
535 | func (n *node) Next() *node { | |
536 | return n.next | |
537 | } | |
538 | ||
539 | func (n *node) PushFront(value *node) { | |
540 | if n.back == nil { | |
541 | n.back = value | |
542 | } else { | |
543 | value.next = n.front | |
544 | } | |
545 | n.front = value | |
546 | n.length++ | |
547 | } | |
548 | ||
549 | func (n *node) PopFront() *node { | |
550 | front := n.front | |
551 | ||
552 | switch true { | |
553 | case front == nil: | |
554 | panic("tree is empty") | |
555 | case front == n.back: | |
556 | n.front, n.back = nil, nil | |
557 | default: | |
558 | n.front, front.next = front.next, nil | |
559 | } | |
560 | ||
561 | n.length-- | |
562 | return front | |
563 | } | |
564 | ||
565 | func (n *node) PushBack(value *node) { | |
566 | if n.front == nil { | |
567 | n.front = value | |
568 | } else { | |
569 | n.back.next = value | |
570 | } | |
571 | n.back = value | |
572 | n.length++ | |
573 | } | |
574 | ||
575 | func (n *node) Len() (c int) { | |
576 | return n.length | |
577 | } | |
578 | ||
579 | func (n *node) Copy() *node { | |
580 | return &node{Type: n.Type, string: n.string, id: n.id, front: n.front, back: n.back, length: n.length} | |
581 | } | |
582 | ||
583 | func (n *node) Slice() []*node { | |
584 | s := make([]*node, n.length) | |
585 | for element, i := n.Front(), 0; element != nil; element, i = element.Next(), i+1 { | |
586 | s[i] = element | |
587 | } | |
588 | return s | |
589 | } | |
590 | ||
591 | /* A tree data structure into which a PEG can be parsed. */ | |
592 | type Tree struct { | |
593 | Rules map[string]Node | |
594 | rulesCount map[string]uint | |
595 | node | |
596 | inline, _switch, Ast bool | |
597 | Strict bool | |
598 | ||
599 | Generator string | |
600 | RuleNames []Node | |
601 | PackageName string | |
602 | Imports []string | |
603 | EndSymbol rune | |
604 | PegRuleType string | |
605 | StructName string | |
606 | StructVariables string | |
607 | RulesCount int | |
608 | Bits int | |
609 | HasActions bool | |
610 | Actions []Node | |
611 | HasPush bool | |
612 | HasCommit bool | |
613 | HasDot bool | |
614 | HasCharacter bool | |
615 | HasString bool | |
616 | HasRange bool | |
617 | } | |
618 | ||
619 | func New(inline, _switch, noast bool) *Tree { | |
620 | return &Tree{ | |
621 | Rules: make(map[string]Node), | |
622 | rulesCount: make(map[string]uint), | |
623 | inline: inline, | |
624 | _switch: _switch, | |
625 | Ast: !noast, | |
626 | } | |
627 | } | |
628 | ||
629 | func (t *Tree) AddRule(name string) { | |
630 | t.PushFront(&node{Type: TypeRule, string: name, id: t.RulesCount}) | |
631 | t.RulesCount++ | |
632 | } | |
633 | ||
634 | func (t *Tree) AddExpression() { | |
635 | expression := t.PopFront() | |
636 | rule := t.PopFront() | |
637 | rule.PushBack(expression) | |
638 | t.PushBack(rule) | |
639 | } | |
640 | ||
641 | func (t *Tree) AddName(text string) { | |
642 | t.PushFront(&node{Type: TypeName, string: text}) | |
643 | } | |
644 | ||
645 | func (t *Tree) AddDot() { t.PushFront(&node{Type: TypeDot, string: "."}) } | |
646 | func (t *Tree) AddCharacter(text string) { | |
647 | t.PushFront(&node{Type: TypeCharacter, string: text}) | |
648 | } | |
649 | func (t *Tree) AddDoubleCharacter(text string) { | |
650 | t.PushFront(&node{Type: TypeCharacter, string: strings.ToLower(text)}) | |
651 | t.PushFront(&node{Type: TypeCharacter, string: strings.ToUpper(text)}) | |
652 | t.AddAlternate() | |
653 | } | |
654 | func (t *Tree) AddHexaCharacter(text string) { | |
655 | hexa, _ := strconv.ParseInt(text, 16, 32) | |
656 | t.PushFront(&node{Type: TypeCharacter, string: string(hexa)}) | |
657 | } | |
658 | func (t *Tree) AddOctalCharacter(text string) { | |
659 | octal, _ := strconv.ParseInt(text, 8, 8) | |
660 | t.PushFront(&node{Type: TypeCharacter, string: string(octal)}) | |
661 | } | |
662 | func (t *Tree) AddPredicate(text string) { t.PushFront(&node{Type: TypePredicate, string: text}) } | |
663 | func (t *Tree) AddStateChange(text string) { t.PushFront(&node{Type: TypeStateChange, string: text}) } | |
664 | func (t *Tree) AddNil() { t.PushFront(&node{Type: TypeNil, string: "<nil>"}) } | |
665 | func (t *Tree) AddAction(text string) { t.PushFront(&node{Type: TypeAction, string: text}) } | |
666 | func (t *Tree) AddPackage(text string) { t.PushBack(&node{Type: TypePackage, string: text}) } | |
667 | func (t *Tree) AddImport(text string) { t.PushBack(&node{Type: TypeImport, string: text}) } | |
668 | func (t *Tree) AddState(text string) { | |
669 | peg := t.PopFront() | |
670 | peg.PushBack(&node{Type: TypeState, string: text}) | |
671 | t.PushBack(peg) | |
672 | } | |
673 | ||
674 | func (t *Tree) addList(listType Type) { | |
675 | a := t.PopFront() | |
676 | b := t.PopFront() | |
677 | var l *node | |
678 | if b.GetType() == listType { | |
679 | l = b | |
680 | } else { | |
681 | l = &node{Type: listType} | |
682 | l.PushBack(b) | |
683 | } | |
684 | l.PushBack(a) | |
685 | t.PushFront(l) | |
686 | } | |
687 | func (t *Tree) AddAlternate() { t.addList(TypeAlternate) } | |
688 | func (t *Tree) AddSequence() { t.addList(TypeSequence) } | |
689 | func (t *Tree) AddRange() { t.addList(TypeRange) } | |
690 | func (t *Tree) AddDoubleRange() { | |
691 | a := t.PopFront() | |
692 | b := t.PopFront() | |
693 | ||
694 | t.AddCharacter(strings.ToLower(b.String())) | |
695 | t.AddCharacter(strings.ToLower(a.String())) | |
696 | t.addList(TypeRange) | |
697 | ||
698 | t.AddCharacter(strings.ToUpper(b.String())) | |
699 | t.AddCharacter(strings.ToUpper(a.String())) | |
700 | t.addList(TypeRange) | |
701 | ||
702 | t.AddAlternate() | |
703 | } | |
704 | ||
705 | func (t *Tree) addFix(fixType Type) { | |
706 | n := &node{Type: fixType} | |
707 | n.PushBack(t.PopFront()) | |
708 | t.PushFront(n) | |
709 | } | |
710 | func (t *Tree) AddPeekFor() { t.addFix(TypePeekFor) } | |
711 | func (t *Tree) AddPeekNot() { t.addFix(TypePeekNot) } | |
712 | func (t *Tree) AddQuery() { t.addFix(TypeQuery) } | |
713 | func (t *Tree) AddStar() { t.addFix(TypeStar) } | |
714 | func (t *Tree) AddPlus() { t.addFix(TypePlus) } | |
715 | func (t *Tree) AddPush() { t.addFix(TypePush) } | |
716 | ||
717 | func (t *Tree) AddPeg(text string) { t.PushFront(&node{Type: TypePeg, string: text}) } | |
718 | ||
719 | func join(tasks []func()) { | |
720 | length := len(tasks) | |
721 | done := make(chan int, length) | |
722 | for _, task := range tasks { | |
723 | go func(task func()) { task(); done <- 1 }(task) | |
724 | } | |
725 | for d := <-done; d < length; d += <-done { | |
726 | } | |
727 | } | |
728 | ||
729 | func escape(c string) string { | |
730 | switch c { | |
731 | case "'": | |
732 | return "\\'" | |
733 | case "\"": | |
734 | return "\"" | |
735 | default: | |
736 | c = strconv.Quote(c) | |
737 | return c[1 : len(c)-1] | |
738 | } | |
739 | } | |
740 | ||
741 | func (t *Tree) Compile(file string, args []string, out io.Writer) (err error) { | |
742 | t.AddImport("fmt") | |
743 | t.AddImport("io") | |
744 | t.AddImport("os") | |
745 | t.AddImport("sort") | |
746 | t.AddImport("strconv") | |
747 | t.EndSymbol = 0x110000 | |
748 | t.RulesCount++ | |
749 | ||
750 | t.Generator = strings.Join(args, " ") | |
751 | ||
752 | var werr error | |
753 | warn := func(e error) { | |
754 | if werr == nil { | |
755 | werr = fmt.Errorf("warning: %s.", e) | |
756 | } else { | |
757 | werr = fmt.Errorf("%s\nwarning: %s", werr, e) | |
758 | } | |
759 | } | |
760 | ||
761 | counts := [TypeLast]uint{} | |
762 | { | |
763 | var rule *node | |
764 | var link func(node Node) | |
765 | link = func(n Node) { | |
766 | nodeType := n.GetType() | |
767 | id := counts[nodeType] | |
768 | counts[nodeType]++ | |
769 | switch nodeType { | |
770 | case TypeAction: | |
771 | n.SetId(int(id)) | |
772 | copy, name := n.Copy(), fmt.Sprintf("Action%v", id) | |
773 | t.Actions = append(t.Actions, copy) | |
774 | n.Init() | |
775 | n.SetType(TypeName) | |
776 | n.SetString(name) | |
777 | n.SetId(t.RulesCount) | |
778 | ||
779 | emptyRule := &node{Type: TypeRule, string: name, id: t.RulesCount} | |
780 | implicitPush := &node{Type: TypeImplicitPush} | |
781 | emptyRule.PushBack(implicitPush) | |
782 | implicitPush.PushBack(copy) | |
783 | implicitPush.PushBack(emptyRule.Copy()) | |
784 | t.PushBack(emptyRule) | |
785 | t.RulesCount++ | |
786 | ||
787 | t.Rules[name] = emptyRule | |
788 | t.RuleNames = append(t.RuleNames, emptyRule) | |
789 | case TypeName: | |
790 | name := n.String() | |
791 | if _, ok := t.Rules[name]; !ok { | |
792 | emptyRule := &node{Type: TypeRule, string: name, id: t.RulesCount} | |
793 | implicitPush := &node{Type: TypeImplicitPush} | |
794 | emptyRule.PushBack(implicitPush) | |
795 | implicitPush.PushBack(&node{Type: TypeNil, string: "<nil>"}) | |
796 | implicitPush.PushBack(emptyRule.Copy()) | |
797 | t.PushBack(emptyRule) | |
798 | t.RulesCount++ | |
799 | ||
800 | t.Rules[name] = emptyRule | |
801 | t.RuleNames = append(t.RuleNames, emptyRule) | |
802 | } | |
803 | case TypePush: | |
804 | copy, name := rule.Copy(), "PegText" | |
805 | copy.SetString(name) | |
806 | if _, ok := t.Rules[name]; !ok { | |
807 | emptyRule := &node{Type: TypeRule, string: name, id: t.RulesCount} | |
808 | emptyRule.PushBack(&node{Type: TypeNil, string: "<nil>"}) | |
809 | t.PushBack(emptyRule) | |
810 | t.RulesCount++ | |
811 | ||
812 | t.Rules[name] = emptyRule | |
813 | t.RuleNames = append(t.RuleNames, emptyRule) | |
814 | } | |
815 | n.PushBack(copy) | |
816 | fallthrough | |
817 | case TypeImplicitPush: | |
818 | link(n.Front()) | |
819 | case TypeRule, TypeAlternate, TypeUnorderedAlternate, TypeSequence, | |
820 | TypePeekFor, TypePeekNot, TypeQuery, TypeStar, TypePlus: | |
821 | for _, node := range n.Slice() { | |
822 | link(node) | |
823 | } | |
824 | } | |
825 | } | |
826 | /* first pass */ | |
827 | for _, node := range t.Slice() { | |
828 | switch node.GetType() { | |
829 | case TypePackage: | |
830 | t.PackageName = node.String() | |
831 | case TypeImport: | |
832 | t.Imports = append(t.Imports, node.String()) | |
833 | case TypePeg: | |
834 | t.StructName = node.String() | |
835 | t.StructVariables = node.Front().String() | |
836 | case TypeRule: | |
837 | if _, ok := t.Rules[node.String()]; !ok { | |
838 | expression := node.Front() | |
839 | copy := expression.Copy() | |
840 | expression.Init() | |
841 | expression.SetType(TypeImplicitPush) | |
842 | expression.PushBack(copy) | |
843 | expression.PushBack(node.Copy()) | |
844 | ||
845 | t.Rules[node.String()] = node | |
846 | t.RuleNames = append(t.RuleNames, node) | |
847 | } | |
848 | } | |
849 | } | |
850 | /* sort imports to satisfy gofmt */ | |
851 | sort.Strings(t.Imports) | |
852 | ||
853 | /* second pass */ | |
854 | for _, node := range t.Slice() { | |
855 | if node.GetType() == TypeRule { | |
856 | rule = node | |
857 | link(node) | |
858 | } | |
859 | } | |
860 | } | |
861 | ||
862 | join([]func(){ | |
863 | func() { | |
864 | var countRules func(node Node) | |
865 | ruleReached := make([]bool, t.RulesCount) | |
866 | countRules = func(node Node) { | |
867 | switch node.GetType() { | |
868 | case TypeRule: | |
869 | name, id := node.String(), node.GetId() | |
870 | if count, ok := t.rulesCount[name]; ok { | |
871 | t.rulesCount[name] = count + 1 | |
872 | } else { | |
873 | t.rulesCount[name] = 1 | |
874 | } | |
875 | if ruleReached[id] { | |
876 | return | |
877 | } | |
878 | ruleReached[id] = true | |
879 | countRules(node.Front()) | |
880 | case TypeName: | |
881 | countRules(t.Rules[node.String()]) | |
882 | case TypeImplicitPush, TypePush: | |
883 | countRules(node.Front()) | |
884 | case TypeAlternate, TypeUnorderedAlternate, TypeSequence, | |
885 | TypePeekFor, TypePeekNot, TypeQuery, TypeStar, TypePlus: | |
886 | for _, element := range node.Slice() { | |
887 | countRules(element) | |
888 | } | |
889 | } | |
890 | } | |
891 | for _, node := range t.Slice() { | |
892 | if node.GetType() == TypeRule { | |
893 | countRules(node) | |
894 | break | |
895 | } | |
896 | } | |
897 | }, | |
898 | func() { | |
899 | var checkRecursion func(node Node) bool | |
900 | ruleReached := make([]bool, t.RulesCount) | |
901 | checkRecursion = func(node Node) bool { | |
902 | switch node.GetType() { | |
903 | case TypeRule: | |
904 | id := node.GetId() | |
905 | if ruleReached[id] { | |
906 | warn(fmt.Errorf("possible infinite left recursion in rule '%v'", node)) | |
907 | return false | |
908 | } | |
909 | ruleReached[id] = true | |
910 | consumes := checkRecursion(node.Front()) | |
911 | ruleReached[id] = false | |
912 | return consumes | |
913 | case TypeAlternate: | |
914 | for _, element := range node.Slice() { | |
915 | if !checkRecursion(element) { | |
916 | return false | |
917 | } | |
918 | } | |
919 | return true | |
920 | case TypeSequence: | |
921 | for _, element := range node.Slice() { | |
922 | if checkRecursion(element) { | |
923 | return true | |
924 | } | |
925 | } | |
926 | case TypeName: | |
927 | return checkRecursion(t.Rules[node.String()]) | |
928 | case TypePlus, TypePush, TypeImplicitPush: | |
929 | return checkRecursion(node.Front()) | |
930 | case TypeCharacter, TypeString: | |
931 | return len(node.String()) > 0 | |
932 | case TypeDot, TypeRange: | |
933 | return true | |
934 | } | |
935 | return false | |
936 | } | |
937 | for _, node := range t.Slice() { | |
938 | if node.GetType() == TypeRule { | |
939 | checkRecursion(node) | |
940 | } | |
941 | } | |
942 | }}) | |
943 | ||
944 | if t._switch { | |
945 | var optimizeAlternates func(node Node) (consumes bool, s jetset.Set) | |
946 | cache, firstPass := make([]struct { | |
947 | reached, consumes bool | |
948 | s jetset.Set | |
949 | }, t.RulesCount), true | |
950 | optimizeAlternates = func(n Node) (consumes bool, s jetset.Set) { | |
951 | /*n.debug()*/ | |
952 | switch n.GetType() { | |
953 | case TypeRule: | |
954 | cache := &cache[n.GetId()] | |
955 | if cache.reached { | |
956 | consumes, s = cache.consumes, cache.s | |
957 | return | |
958 | } | |
959 | ||
960 | cache.reached = true | |
961 | consumes, s = optimizeAlternates(n.Front()) | |
962 | cache.consumes, cache.s = consumes, s | |
963 | case TypeName: | |
964 | consumes, s = optimizeAlternates(t.Rules[n.String()]) | |
965 | case TypeDot: | |
966 | consumes = true | |
967 | /* TypeDot set doesn't include the EndSymbol */ | |
968 | s = s.Add(uint64(t.EndSymbol)) | |
969 | s = s.Complement(uint64(t.EndSymbol)) | |
970 | case TypeString, TypeCharacter: | |
971 | consumes = true | |
972 | s = s.Add(uint64([]rune(n.String())[0])) | |
973 | case TypeRange: | |
974 | consumes = true | |
975 | element := n.Front() | |
976 | lower := []rune(element.String())[0] | |
977 | element = element.Next() | |
978 | upper := []rune(element.String())[0] | |
979 | s = s.AddRange(uint64(lower), uint64(upper)) | |
980 | case TypeAlternate: | |
981 | consumes = true | |
982 | mconsumes, properties, c := | |
983 | consumes, make([]struct { | |
984 | intersects bool | |
985 | s jetset.Set | |
986 | }, n.Len()), 0 | |
987 | for _, element := range n.Slice() { | |
988 | mconsumes, properties[c].s = optimizeAlternates(element) | |
989 | consumes = consumes && mconsumes | |
990 | s = s.Union(properties[c].s) | |
991 | c++ | |
992 | } | |
993 | ||
994 | if firstPass { | |
995 | break | |
996 | } | |
997 | ||
998 | intersections := 2 | |
999 | compare: | |
1000 | for ai, a := range properties[0 : len(properties)-1] { | |
1001 | for _, b := range properties[ai+1:] { | |
1002 | if a.s.Intersects(b.s) { | |
1003 | intersections++ | |
1004 | properties[ai].intersects = true | |
1005 | continue compare | |
1006 | } | |
1007 | } | |
1008 | } | |
1009 | if intersections >= len(properties) { | |
1010 | break | |
1011 | } | |
1012 | ||
1013 | c, unordered, ordered, max := | |
1014 | 0, &node{Type: TypeUnorderedAlternate}, &node{Type: TypeAlternate}, 0 | |
1015 | for _, element := range n.Slice() { | |
1016 | if properties[c].intersects { | |
1017 | ordered.PushBack(element.Copy()) | |
1018 | } else { | |
1019 | class := &node{Type: TypeUnorderedAlternate} | |
1020 | for d := 0; d < 256; d++ { | |
1021 | if properties[c].s.Has(uint64(d)) { | |
1022 | class.PushBack(&node{Type: TypeCharacter, string: string(d)}) | |
1023 | } | |
1024 | } | |
1025 | ||
1026 | sequence, predicate, length := | |
1027 | &node{Type: TypeSequence}, &node{Type: TypePeekFor}, properties[c].s.Len() | |
1028 | if length == 0 { | |
1029 | class.PushBack(&node{Type: TypeNil, string: "<nil>"}) | |
1030 | } | |
1031 | predicate.PushBack(class) | |
1032 | sequence.PushBack(predicate) | |
1033 | sequence.PushBack(element.Copy()) | |
1034 | ||
1035 | if element.GetType() == TypeNil { | |
1036 | unordered.PushBack(sequence) | |
1037 | } else if length > max { | |
1038 | unordered.PushBack(sequence) | |
1039 | max = length | |
1040 | } else { | |
1041 | unordered.PushFront(sequence) | |
1042 | } | |
1043 | } | |
1044 | c++ | |
1045 | } | |
1046 | n.Init() | |
1047 | if ordered.Front() == nil { | |
1048 | n.SetType(TypeUnorderedAlternate) | |
1049 | for _, element := range unordered.Slice() { | |
1050 | n.PushBack(element.Copy()) | |
1051 | } | |
1052 | } else { | |
1053 | for _, element := range ordered.Slice() { | |
1054 | n.PushBack(element.Copy()) | |
1055 | } | |
1056 | n.PushBack(unordered) | |
1057 | } | |
1058 | case TypeSequence: | |
1059 | classes, elements := | |
1060 | make([]struct { | |
1061 | s jetset.Set | |
1062 | }, n.Len()), n.Slice() | |
1063 | ||
1064 | for c, element := range elements { | |
1065 | consumes, classes[c].s = optimizeAlternates(element) | |
1066 | if consumes { | |
1067 | elements, classes = elements[c+1:], classes[:c+1] | |
1068 | break | |
1069 | } | |
1070 | } | |
1071 | ||
1072 | for c := len(classes) - 1; c >= 0; c-- { | |
1073 | s = s.Union(classes[c].s) | |
1074 | } | |
1075 | ||
1076 | for _, element := range elements { | |
1077 | optimizeAlternates(element) | |
1078 | } | |
1079 | case TypePeekNot, TypePeekFor: | |
1080 | optimizeAlternates(n.Front()) | |
1081 | case TypeQuery, TypeStar: | |
1082 | _, s = optimizeAlternates(n.Front()) | |
1083 | case TypePlus, TypePush, TypeImplicitPush: | |
1084 | consumes, s = optimizeAlternates(n.Front()) | |
1085 | case TypeAction, TypeNil: | |
1086 | //empty | |
1087 | } | |
1088 | return | |
1089 | } | |
1090 | for _, element := range t.Slice() { | |
1091 | if element.GetType() == TypeRule { | |
1092 | optimizeAlternates(element) | |
1093 | break | |
1094 | } | |
1095 | } | |
1096 | ||
1097 | for i := range cache { | |
1098 | cache[i].reached = false | |
1099 | } | |
1100 | firstPass = false | |
1101 | for _, element := range t.Slice() { | |
1102 | if element.GetType() == TypeRule { | |
1103 | optimizeAlternates(element) | |
1104 | break | |
1105 | } | |
1106 | } | |
1107 | } | |
1108 | ||
1109 | var buffer bytes.Buffer | |
1110 | defer func() { | |
1111 | if t.Strict && werr != nil && err == nil { | |
1112 | // Treat warnings as errors. | |
1113 | err = werr | |
1114 | } | |
1115 | if !t.Strict && werr != nil { | |
1116 | // Display warnings. | |
1117 | fmt.Fprintln(os.Stderr, werr) | |
1118 | } | |
1119 | if err != nil { | |
1120 | return | |
1121 | } | |
1122 | fileSet := token.NewFileSet() | |
1123 | code, err := parser.ParseFile(fileSet, file, &buffer, parser.ParseComments) | |
1124 | if err != nil { | |
1125 | buffer.WriteTo(out) | |
1126 | err = fmt.Errorf("%v: %v", file, err) | |
1127 | return | |
1128 | } | |
1129 | formatter := printer.Config{Mode: printer.TabIndent | printer.UseSpaces, Tabwidth: 8} | |
1130 | err = formatter.Fprint(out, fileSet, code) | |
1131 | if err != nil { | |
1132 | buffer.WriteTo(out) | |
1133 | err = fmt.Errorf("%v: %v", file, err) | |
1134 | return | |
1135 | } | |
1136 | ||
1137 | }() | |
1138 | ||
1139 | _print := func(format string, a ...interface{}) { fmt.Fprintf(&buffer, format, a...) } | |
1140 | printSave := func(n uint) { _print("\n position%d, tokenIndex%d := position, tokenIndex", n, n) } | |
1141 | printRestore := func(n uint) { _print("\n position, tokenIndex = position%d, tokenIndex%d", n, n) } | |
1142 | printTemplate := func(s string) error { | |
1143 | return template.Must(template.New("peg").Parse(s)).Execute(&buffer, t) | |
1144 | } | |
1145 | ||
1146 | t.HasActions = counts[TypeAction] > 0 | |
1147 | t.HasPush = counts[TypePush] > 0 | |
1148 | t.HasCommit = counts[TypeCommit] > 0 | |
1149 | t.HasDot = counts[TypeDot] > 0 | |
1150 | t.HasCharacter = counts[TypeCharacter] > 0 | |
1151 | t.HasString = counts[TypeString] > 0 | |
1152 | t.HasRange = counts[TypeRange] > 0 | |
1153 | ||
1154 | var printRule func(n Node) | |
1155 | var compile func(expression Node, ko uint) (labelLast bool) | |
1156 | var label uint | |
1157 | labels := make(map[uint]bool) | |
1158 | printBegin := func() { _print("\n {") } | |
1159 | printEnd := func() { _print("\n }") } | |
1160 | printLabel := func(n uint) bool { | |
1161 | _print("\n") | |
1162 | if labels[n] { | |
1163 | _print(" l%d:\t", n) | |
1164 | return true | |
1165 | } | |
1166 | return false | |
1167 | } | |
1168 | printJump := func(n uint) { | |
1169 | _print("\n goto l%d", n) | |
1170 | labels[n] = true | |
1171 | } | |
1172 | printRule = func(n Node) { | |
1173 | switch n.GetType() { | |
1174 | case TypeRule: | |
1175 | _print("%v <- ", n) | |
1176 | printRule(n.Front()) | |
1177 | case TypeDot: | |
1178 | _print(".") | |
1179 | case TypeName: | |
1180 | _print("%v", n) | |
1181 | case TypeCharacter: | |
1182 | _print("'%v'", escape(n.String())) | |
1183 | case TypeString: | |
1184 | s := escape(n.String()) | |
1185 | _print("'%v'", s[1:len(s)-1]) | |
1186 | case TypeRange: | |
1187 | element := n.Front() | |
1188 | lower := element | |
1189 | element = element.Next() | |
1190 | upper := element | |
1191 | _print("[%v-%v]", escape(lower.String()), escape(upper.String())) | |
1192 | case TypePredicate: | |
1193 | _print("&{%v}", n) | |
1194 | case TypeStateChange: | |
1195 | _print("!{%v}", n) | |
1196 | case TypeAction: | |
1197 | _print("{%v}", n) | |
1198 | case TypeCommit: | |
1199 | _print("commit") | |
1200 | case TypeAlternate: | |
1201 | _print("(") | |
1202 | elements := n.Slice() | |
1203 | printRule(elements[0]) | |
1204 | for _, element := range elements[1:] { | |
1205 | _print(" / ") | |
1206 | printRule(element) | |
1207 | } | |
1208 | _print(")") | |
1209 | case TypeUnorderedAlternate: | |
1210 | _print("(") | |
1211 | elements := n.Slice() | |
1212 | printRule(elements[0]) | |
1213 | for _, element := range elements[1:] { | |
1214 | _print(" | ") | |
1215 | printRule(element) | |
1216 | } | |
1217 | _print(")") | |
1218 | case TypeSequence: | |
1219 | _print("(") | |
1220 | elements := n.Slice() | |
1221 | printRule(elements[0]) | |
1222 | for _, element := range elements[1:] { | |
1223 | _print(" ") | |
1224 | printRule(element) | |
1225 | } | |
1226 | _print(")") | |
1227 | case TypePeekFor: | |
1228 | _print("&") | |
1229 | printRule(n.Front()) | |
1230 | case TypePeekNot: | |
1231 | _print("!") | |
1232 | printRule(n.Front()) | |
1233 | case TypeQuery: | |
1234 | printRule(n.Front()) | |
1235 | _print("?") | |
1236 | case TypeStar: | |
1237 | printRule(n.Front()) | |
1238 | _print("*") | |
1239 | case TypePlus: | |
1240 | printRule(n.Front()) | |
1241 | _print("+") | |
1242 | case TypePush, TypeImplicitPush: | |
1243 | _print("<") | |
1244 | printRule(n.Front()) | |
1245 | _print(">") | |
1246 | case TypeNil: | |
1247 | default: | |
1248 | warn(fmt.Errorf("illegal node type: %v", n.GetType())) | |
1249 | } | |
1250 | } | |
1251 | compile = func(n Node, ko uint) (labelLast bool) { | |
1252 | switch n.GetType() { | |
1253 | case TypeRule: | |
1254 | warn(fmt.Errorf("internal error #1 (%v)", n)) | |
1255 | case TypeDot: | |
1256 | _print("\n if !matchDot() {") | |
1257 | /*print("\n if buffer[position] == endSymbol {")*/ | |
1258 | printJump(ko) | |
1259 | /*print("}\nposition++")*/ | |
1260 | _print("}") | |
1261 | case TypeName: | |
1262 | name := n.String() | |
1263 | rule := t.Rules[name] | |
1264 | if t.inline && t.rulesCount[name] == 1 { | |
1265 | compile(rule.Front(), ko) | |
1266 | return | |
1267 | } | |
1268 | _print("\n if !_rules[rule%v]() {", name /*rule.GetId()*/) | |
1269 | printJump(ko) | |
1270 | _print("}") | |
1271 | case TypeRange: | |
1272 | element := n.Front() | |
1273 | lower := element | |
1274 | element = element.Next() | |
1275 | upper := element | |
1276 | /*print("\n if !matchRange('%v', '%v') {", escape(lower.String()), escape(upper.String()))*/ | |
1277 | _print("\n if c := buffer[position]; c < rune('%v') || c > rune('%v') {", escape(lower.String()), escape(upper.String())) | |
1278 | printJump(ko) | |
1279 | _print("}\nposition++") | |
1280 | case TypeCharacter: | |
1281 | /*print("\n if !matchChar('%v') {", escape(n.String()))*/ | |
1282 | _print("\n if buffer[position] != rune('%v') {", escape(n.String())) | |
1283 | printJump(ko) | |
1284 | _print("}\nposition++") | |
1285 | case TypeString: | |
1286 | _print("\n if !matchString(%v) {", strconv.Quote(n.String())) | |
1287 | printJump(ko) | |
1288 | _print("}") | |
1289 | case TypePredicate: | |
1290 | _print("\n if !(%v) {", n) | |
1291 | printJump(ko) | |
1292 | _print("}") | |
1293 | case TypeStateChange: | |
1294 | _print("\n %v", n) | |
1295 | case TypeAction: | |
1296 | case TypeCommit: | |
1297 | case TypePush: | |
1298 | fallthrough | |
1299 | case TypeImplicitPush: | |
1300 | ok, element := label, n.Front() | |
1301 | label++ | |
1302 | nodeType, rule := element.GetType(), element.Next() | |
1303 | printBegin() | |
1304 | if nodeType == TypeAction { | |
1305 | if t.Ast { | |
1306 | _print("\nadd(rule%v, position)", rule) | |
1307 | } else { | |
1308 | // There is no AST support, so inline the rule code | |
1309 | _print("\n%v", element) | |
1310 | } | |
1311 | } else { | |
1312 | _print("\nposition%d := position", ok) | |
1313 | compile(element, ko) | |
1314 | if n.GetType() == TypePush && !t.Ast { | |
1315 | // This is TypePush and there is no AST support, | |
1316 | // so inline capture to text right here | |
1317 | _print("\nbegin := position%d", ok) | |
1318 | _print("\nend := position") | |
1319 | _print("\ntext = string(buffer[begin:end])") | |
1320 | } else { | |
1321 | _print("\nadd(rule%v, position%d)", rule, ok) | |
1322 | } | |
1323 | } | |
1324 | printEnd() | |
1325 | case TypeAlternate: | |
1326 | ok := label | |
1327 | label++ | |
1328 | printBegin() | |
1329 | elements := n.Slice() | |
1330 | printSave(ok) | |
1331 | for _, element := range elements[:len(elements)-1] { | |
1332 | next := label | |
1333 | label++ | |
1334 | compile(element, next) | |
1335 | printJump(ok) | |
1336 | printLabel(next) | |
1337 | printRestore(ok) | |
1338 | } | |
1339 | compile(elements[len(elements)-1], ko) | |
1340 | printEnd() | |
1341 | labelLast = printLabel(ok) | |
1342 | case TypeUnorderedAlternate: | |
1343 | done, ok := ko, label | |
1344 | label++ | |
1345 | printBegin() | |
1346 | _print("\n switch buffer[position] {") | |
1347 | elements := n.Slice() | |
1348 | elements, last := elements[:len(elements)-1], elements[len(elements)-1].Front().Next() | |
1349 | for _, element := range elements { | |
1350 | sequence := element.Front() | |
1351 | class := sequence.Front() | |
1352 | sequence = sequence.Next() | |
1353 | _print("\n case") | |
1354 | comma := false | |
1355 | for _, character := range class.Slice() { | |
1356 | if comma { | |
1357 | _print(",") | |
1358 | } else { | |
1359 | comma = true | |
1360 | } | |
1361 | _print(" '%s'", escape(character.String())) | |
1362 | } | |
1363 | _print(":") | |
1364 | if compile(sequence, done) { | |
1365 | _print("\nbreak") | |
1366 | } | |
1367 | } | |
1368 | _print("\n default:") | |
1369 | if compile(last, done) { | |
1370 | _print("\nbreak") | |
1371 | } | |
1372 | _print("\n }") | |
1373 | printEnd() | |
1374 | labelLast = printLabel(ok) | |
1375 | case TypeSequence: | |
1376 | for _, element := range n.Slice() { | |
1377 | labelLast = compile(element, ko) | |
1378 | } | |
1379 | case TypePeekFor: | |
1380 | ok := label | |
1381 | label++ | |
1382 | printBegin() | |
1383 | printSave(ok) | |
1384 | compile(n.Front(), ko) | |
1385 | printRestore(ok) | |
1386 | printEnd() | |
1387 | case TypePeekNot: | |
1388 | ok := label | |
1389 | label++ | |
1390 | printBegin() | |
1391 | printSave(ok) | |
1392 | compile(n.Front(), ok) | |
1393 | printJump(ko) | |
1394 | printLabel(ok) | |
1395 | printRestore(ok) | |
1396 | printEnd() | |
1397 | case TypeQuery: | |
1398 | qko := label | |
1399 | label++ | |
1400 | qok := label | |
1401 | label++ | |
1402 | printBegin() | |
1403 | printSave(qko) | |
1404 | compile(n.Front(), qko) | |
1405 | printJump(qok) | |
1406 | printLabel(qko) | |
1407 | printRestore(qko) | |
1408 | printEnd() | |
1409 | labelLast = printLabel(qok) | |
1410 | case TypeStar: | |
1411 | again := label | |
1412 | label++ | |
1413 | out := label | |
1414 | label++ | |
1415 | printLabel(again) | |
1416 | printBegin() | |
1417 | printSave(out) | |
1418 | compile(n.Front(), out) | |
1419 | printJump(again) | |
1420 | printLabel(out) | |
1421 | printRestore(out) | |
1422 | printEnd() | |
1423 | case TypePlus: | |
1424 | again := label | |
1425 | label++ | |
1426 | out := label | |
1427 | label++ | |
1428 | compile(n.Front(), ko) | |
1429 | printLabel(again) | |
1430 | printBegin() | |
1431 | printSave(out) | |
1432 | compile(n.Front(), out) | |
1433 | printJump(again) | |
1434 | printLabel(out) | |
1435 | printRestore(out) | |
1436 | printEnd() | |
1437 | case TypeNil: | |
1438 | default: | |
1439 | warn(fmt.Errorf("illegal node type: %v", n.GetType())) | |
1440 | } | |
1441 | return labelLast | |
1442 | } | |
1443 | ||
1444 | /* lets figure out which jump labels are going to be used with this dry compile */ | |
1445 | printTemp, _print := _print, func(format string, a ...interface{}) {} | |
1446 | for _, element := range t.Slice() { | |
1447 | if element.GetType() != TypeRule { | |
1448 | continue | |
1449 | } | |
1450 | expression := element.Front() | |
1451 | if expression.GetType() == TypeNil { | |
1452 | continue | |
1453 | } | |
1454 | ko := label | |
1455 | label++ | |
1456 | if count, ok := t.rulesCount[element.String()]; !ok { | |
1457 | continue | |
1458 | } else if t.inline && count == 1 && ko != 0 { | |
1459 | continue | |
1460 | } | |
1461 | compile(expression, ko) | |
1462 | } | |
1463 | _print, label = printTemp, 0 | |
1464 | ||
1465 | /* now for the real compile pass */ | |
1466 | t.PegRuleType = "uint8" | |
1467 | if length := int64(t.Len()); length > math.MaxUint32 { | |
1468 | t.PegRuleType = "uint64" | |
1469 | } else if length > math.MaxUint16 { | |
1470 | t.PegRuleType = "uint32" | |
1471 | } else if length > math.MaxUint8 { | |
1472 | t.PegRuleType = "uint16" | |
1473 | } | |
1474 | if err = printTemplate(pegHeaderTemplate); err != nil { | |
1475 | return err | |
1476 | } | |
1477 | for _, element := range t.Slice() { | |
1478 | if element.GetType() != TypeRule { | |
1479 | continue | |
1480 | } | |
1481 | expression := element.Front() | |
1482 | if implicit := expression.Front(); expression.GetType() == TypeNil || implicit.GetType() == TypeNil { | |
1483 | if element.String() != "PegText" { | |
1484 | warn(fmt.Errorf("rule '%v' used but not defined", element)) | |
1485 | } | |
1486 | _print("\n nil,") | |
1487 | continue | |
1488 | } | |
1489 | ko := label | |
1490 | label++ | |
1491 | _print("\n /* %v ", element.GetId()) | |
1492 | printRule(element) | |
1493 | _print(" */") | |
1494 | if count, ok := t.rulesCount[element.String()]; !ok { | |
1495 | warn(fmt.Errorf("rule '%v' defined but not used", element)) | |
1496 | _print("\n nil,") | |
1497 | continue | |
1498 | } else if t.inline && count == 1 && ko != 0 { | |
1499 | _print("\n nil,") | |
1500 | continue | |
1501 | } | |
1502 | _print("\n func() bool {") | |
1503 | if labels[ko] { | |
1504 | printSave(ko) | |
1505 | } | |
1506 | compile(expression, ko) | |
1507 | //print("\n fmt.Printf(\"%v\\n\")", element.String()) | |
1508 | _print("\n return true") | |
1509 | if labels[ko] { | |
1510 | printLabel(ko) | |
1511 | printRestore(ko) | |
1512 | _print("\n return false") | |
1513 | } | |
1514 | _print("\n },") | |
1515 | } | |
1516 | _print("\n }\n p.rules = _rules") | |
1517 | _print("\n return nil") | |
1518 | _print("\n}\n") | |
1519 | return nil | |
1520 | } |