New upstream snapshot.
Debian Janitor
1 year, 3 months ago
0 | 0 | https://medium.com/@octskyward/graal-truffle-134d8f28fb69#.jo3luf4dn |
1 | http://nez-peg.github.io/ | |
2 | https://en.wikipedia.org/wiki/DFA_minimization | |
3 | ||
4 | https://news.ycombinator.com/item?id=14589173 | |
5 | http://jamey.thesharps.us/2017/06/search-based-compiler-code-generation.html | |
6 | ||
7 | https://news.ycombinator.com/item?id=15105119 | |
8 | https://en.wikipedia.org/wiki/Tree_transducer | |
9 | ||
10 | # Type-Driven Program Synthesis | |
11 | https://news.ycombinator.com/item?id=18251145 | |
12 | https://www.youtube.com/watch?v=HnOix9TFy1A | |
13 | http://comcom.csail.mit.edu/comcom/#welcome | |
14 | https://bitbucket.org/nadiapolikarpova/synquid | |
15 | ||
16 | # Formality – An efficient programming language and proof assistant | |
17 | https://news.ycombinator.com/item?id=18230148 | |
18 | https://github.com/maiavictor/formality |
0 | # Copyright 2010 The Go Authors. All rights reserved. | |
1 | # Use of this source code is governed by a BSD-style | |
2 | # license that can be found in the LICENSE file. | |
3 | ||
4 | peg: bootstrap.peg.go peg.go main.go | |
5 | go build | |
6 | ||
7 | bootstrap.peg.go: bootstrap/main.go peg.go | |
8 | cd bootstrap; go build | |
9 | bootstrap/bootstrap | |
10 | ||
11 | clean: | |
12 | rm -f bootstrap/bootstrap peg peg.peg.go |
0 | # About | |
1 | ||
2 | Peg, Parsing Expression Grammar, is an implementation of a Packrat parser | |
3 | generator. A Packrat parser is a descent recursive parser capable of | |
4 | backtracking. The generated parser searches for the correct parsing of the | |
5 | input. | |
6 | ||
7 | For more information see: | |
8 | * http://en.wikipedia.org/wiki/Parsing_expression_grammar | |
9 | * http://pdos.csail.mit.edu/~baford/packrat/ | |
10 | ||
11 | This Go implementation is based on: | |
12 | * http://piumarta.com/software/peg/ | |
13 | ||
14 | ||
15 | # Usage | |
16 | ||
17 | ``` | |
18 | -inline | |
19 | Tells the parser generator to inline parser rules. | |
20 | -switch | |
21 | Reduces the number of rules that have to be tried for some pegs. | |
22 | If statements are replaced with switch statements. | |
23 | ``` | |
24 | ||
25 | ||
26 | # Syntax | |
27 | ||
28 | First declare the package name: | |
0 | # PEG, an Implementation of a Packrat Parsing Expression Grammar in Go | |
1 | ||
2 | [![GoDoc](https://godoc.org/github.com/pointlander/peg?status.svg)](https://godoc.org/github.com/pointlander/peg) | |
3 | [![Go Report Card](https://goreportcard.com/badge/github.com/pointlander/peg)](https://goreportcard.com/report/github.com/pointlander/peg) | |
4 | [![Coverage](https://gocover.io/_badge/github.com/pointlander/peg)](https://gocover.io/github.com/pointlander/peg) | |
5 | ||
6 | A [Parsing Expression Grammar](http://en.wikipedia.org/wiki/Parsing_expression_grammar) ( hence `peg`) is a way to create grammars similar in principle to [regular expressions](https://en.wikipedia.org/wiki/Regular_expression) but which allow better code integration. Specifically, `peg` is an implementation of the [Packrat](https://en.wikipedia.org/wiki/Parsing_expression_grammar#Implementing_parsers_from_parsing_expression_grammars) parser generator originally implemented as [peg/leg](https://www.piumarta.com/software/peg/) by [Ian Piumarta](https://www.piumarta.com/cv/) in C. A Packrat parser is a "descent recursive parser" capable of backtracking and negative look-ahead assertions which are problematic for regular expression engines . | |
7 | ||
8 | ## See Also | |
9 | ||
10 | * <http://en.wikipedia.org/wiki/Parsing_expression_grammar> | |
11 | * <http://pdos.csail.mit.edu/~baford/packrat/> | |
12 | * <http://piumarta.com/software/peg/> | |
13 | ||
14 | ## Installing | |
15 | ||
16 | `go get -u github.com/pointlander/peg` | |
17 | ||
18 | ## Building | |
19 | ||
20 | ### Using Pre-Generated Files | |
21 | ||
22 | `go install` | |
23 | ||
24 | ### Generating Files Yourself | |
25 | You should only need to do this if you are contributing to the library, or if something gets messed up. | |
26 | ||
27 | `go run build.go` or `go generate` | |
28 | ||
29 | With tests: | |
30 | ||
31 | `go run build.go test` | |
32 | ||
33 | ## Usage | |
34 | ||
35 | ``` | |
36 | peg [<option>]... <file> | |
37 | ||
38 | Usage of peg: | |
39 | -inline | |
40 | parse rule inlining | |
41 | -noast | |
42 | disable AST | |
43 | -output string | |
44 | specify name of output file | |
45 | ||
46 | directly dump the syntax tree | |
47 | -strict | |
48 | treat compiler warnings as errors | |
49 | -switch | |
50 | replace if-else if-else like blocks with switch blocks | |
51 | -syntax | |
52 | print out the syntax tree | |
53 | -version | |
54 | print the version and exit | |
55 | ||
56 | ``` | |
57 | ||
58 | ||
59 | ## Sample Makefile | |
60 | ||
61 | This sample `Makefile` will convert any file ending with `.peg` into a `.go` file with the same name. Adjust as needed. | |
62 | ||
63 | ```make | |
64 | .SUFFIXES: .peg .go | |
65 | ||
66 | .peg.go: | |
67 | peg -noast -switch -inline -strict -output $@ $< | |
68 | ||
69 | all: grammar.go | |
70 | ``` | |
71 | ||
72 | Use caution when picking your names to avoid overwriting existing `.go` files. Since only one PEG grammar is allowed per Go package (currently) the use of the name `grammar.peg` is suggested as a convention: | |
73 | ||
74 | ``` | |
75 | grammar.peg | |
76 | grammar.go | |
77 | ``` | |
78 | ||
79 | ## PEG File Syntax | |
80 | ||
81 | First declare the package name and any import(s) required: | |
82 | ||
29 | 83 | ``` |
30 | 84 | package <package name> |
85 | ||
86 | import <import name> | |
31 | 87 | ``` |
32 | 88 | |
33 | 89 | Then declare the parser: |
90 | ||
34 | 91 | ``` |
35 | 92 | type <parser name> Peg { |
36 | 93 | <parser state variables> |
37 | 94 | } |
38 | 95 | ``` |
39 | 96 | |
40 | Next declare the rules. The first rule is the entry point into the parser: | |
97 | Next declare the rules. Note that the main rules are described below but are based on the [peg/leg rules](https://www.piumarta.com/software/peg/peg.1.html) which provide additional documentation. | |
98 | ||
99 | The first rule is the entry point into the parser: | |
100 | ||
41 | 101 | ``` |
42 | 102 | <rule name> <- <rule body> |
43 | 103 | ``` |
44 | 104 | |
45 | The first rule should probably end with '!.' to indicate no more input follows: | |
105 | The first rule should probably end with `!.` to indicate no more input follows. | |
106 | ||
46 | 107 | ``` |
47 | 108 | first <- . !. |
48 | 109 | ``` |
49 | 110 | |
50 | '.' means any character matches. For zero or more character matches use: | |
111 | This is often set to `END` to make PEG rules more readable: | |
112 | ||
113 | ``` | |
114 | END <- !. | |
115 | ``` | |
116 | ||
117 | `.` means any character matches. For zero or more character matches, use: | |
118 | ||
51 | 119 | ``` |
52 | 120 | repetition <- .* |
53 | 121 | ``` |
54 | 122 | |
55 | For one or more character matches use: | |
123 | For one or more character matches, use: | |
124 | ||
56 | 125 | ``` |
57 | 126 | oneOrMore <- .+ |
58 | 127 | ``` |
59 | 128 | |
60 | For an optional character match use: | |
129 | For an optional character match, use: | |
130 | ||
61 | 131 | ``` |
62 | 132 | optional <- .? |
63 | 133 | ``` |
64 | 134 | |
65 | If specific characters are to be matched use single quotes: | |
135 | If specific characters are to be matched, use single quotes: | |
136 | ||
66 | 137 | ``` |
67 | 138 | specific <- 'a'* 'bc'+ 'de'? |
68 | 139 | ``` |
69 | will match the string "aaabcbcde". | |
70 | ||
71 | For choosing between different inputs use alternates: | |
140 | ||
141 | This will match the string `"aaabcbcde"`. | |
142 | ||
143 | For choosing between different inputs, use alternates: | |
144 | ||
72 | 145 | ``` |
73 | 146 | prioritized <- 'a' 'a'* / 'bc'+ / 'de'? |
74 | 147 | ``` |
75 | will match "aaaa" or "bcbc" or "de" or "". The matches are attempted in order. | |
76 | ||
77 | If the characters are case insensitive use double quotes: | |
148 | ||
149 | This will match `"aaaa"` or `"bcbc"` or `"de"` or `""`. The matches are attempted in order. | |
150 | ||
151 | If the characters are case insensitive, use double quotes: | |
152 | ||
78 | 153 | ``` |
79 | 154 | insensitive <- "abc" |
80 | 155 | ``` |
81 | will match "abc" or "Abc" or "ABc" etc... | |
82 | ||
83 | For matching a set of characters use a character class: | |
156 | ||
157 | This will match `"abc"` or `"Abc"` or `"ABc"` and so on. | |
158 | ||
159 | For matching a set of characters, use a character class: | |
160 | ||
84 | 161 | ``` |
85 | 162 | class <- [a-z] |
86 | 163 | ``` |
87 | will watch "a" or "b" or all the way to "z". | |
88 | ||
89 | For an inverse character class start with a tilde: | |
90 | ``` | |
91 | inverse <- [~a-z] | |
92 | ``` | |
93 | will match anything but "a" or "b" or all the way to "z" | |
94 | ||
95 | If the character class is case insensitive use double brackets: | |
164 | ||
165 | This will match `"a"` or `"b"` or all the way to `"z"`. | |
166 | ||
167 | For an inverse character class, start with a caret: | |
168 | ||
169 | ``` | |
170 | inverse <- [^a-z] | |
171 | ``` | |
172 | ||
173 | This will match anything but `"a"` or `"b"` or all the way to `"z"`. | |
174 | ||
175 | If the character class is case insensitive, use double brackets: | |
176 | ||
96 | 177 | ``` |
97 | 178 | insensitive <- [[A-Z]] |
98 | 179 | ``` |
99 | 180 | |
181 | (Note that this is not available in regular expression syntax.) | |
182 | ||
100 | 183 | Use parentheses for grouping: |
184 | ||
101 | 185 | ``` |
102 | 186 | grouping <- (rule1 / rule2) rule3 |
103 | 187 | ``` |
104 | 188 | |
105 | For looking ahead for a match (predicate) use: | |
189 | For looking ahead a match (predicate), use: | |
190 | ||
106 | 191 | ``` |
107 | 192 | lookAhead <- &rule1 rule2 |
108 | 193 | ``` |
109 | 194 | |
110 | For inverse look ahead use: | |
195 | For inverse look ahead, use: | |
196 | ||
111 | 197 | ``` |
112 | 198 | inverse <- !rule1 rule2 |
113 | 199 | ``` |
114 | 200 | |
115 | 201 | Use curly braces for Go code: |
202 | ||
116 | 203 | ``` |
117 | 204 | gocode <- { fmt.Println("hello world") } |
118 | 205 | ``` |
119 | 206 | |
120 | For string captures use less than greater than: | |
121 | ``` | |
122 | capture <- <'capture'> { fmt.Println(buffer[begin:end]) } | |
123 | ``` | |
124 | Will print out "capture". The captured string is stored in buffer[begin:end]. | |
125 | ||
126 | ||
127 | # Files | |
128 | ||
129 | * bootstrap/main.go: bootstrap syntax tree of peg | |
130 | * peg.go: syntax tree and code generator | |
131 | * main.go: bootstrap main | |
132 | * peg.peg: peg in its own language | |
133 | ||
134 | ||
135 | # Testing | |
136 | ||
137 | There should be no differences between the bootstrap and self compiled: | |
138 | ||
139 | ``` | |
140 | ./peg -inline -switch peg.peg | |
141 | diff bootstrap.peg.go peg.peg.go | |
142 | ``` | |
143 | ||
144 | ||
145 | # Author | |
207 | For string captures, use less than and greater than: | |
208 | ||
209 | ``` | |
210 | capture <- <'capture'> { fmt.Println(text) } | |
211 | ``` | |
212 | ||
213 | Will print out `"capture"`. The captured string is stored in `buffer[begin:end]`. | |
214 | ||
215 | ## Testing Complex Grammars | |
216 | ||
217 | Testing a grammar usually requires more than the average unit testing with multiple inputs and outputs. Grammars are also usually not for just one language implementation. Consider maintaining a list of inputs with expected outputs in a structured file format such as JSON or YAML and parsing it for testing or using one of the available options for Go such as Rob Muhlestein's [`tinout`](https://github.com/robmuh/tinout) package. | |
218 | ||
219 | ## Files | |
220 | ||
221 | * `bootstrap/main.go` - bootstrap syntax tree of peg | |
222 | * `tree/peg.go` - syntax tree and code generator | |
223 | * `peg.peg` - peg in its own language | |
224 | ||
225 | ## Author | |
146 | 226 | |
147 | 227 | Andrew Snodgrass |
228 | ||
229 | ## Projects That Use `peg` | |
230 | ||
231 | Here are some projects that use `peg` to provide further examples of PEG grammars: | |
232 | ||
233 | * <https://github.com/tj/go-naturaldate> - natural date/time parsing | |
234 | * <https://github.com/robmuh/dtime> - easy date/time formats with duration spans | |
235 | * <https://github.com/gnames/gnparser> - scientific names parsing |
7 | 7 | "fmt" |
8 | 8 | "os" |
9 | 9 | "runtime" |
10 | ||
11 | "github.com/pointlander/peg/tree" | |
10 | 12 | ) |
11 | 13 | |
12 | 14 | func main() { |
13 | 15 | runtime.GOMAXPROCS(2) |
14 | t := New(true, true) | |
16 | t := tree.New(true, true, false) | |
15 | 17 | |
16 | 18 | /*package main |
17 | 19 | |
24 | 26 | *Tree |
25 | 27 | }*/ |
26 | 28 | t.AddPackage("main") |
29 | t.AddImport("github.com/pointlander/peg/tree") | |
27 | 30 | t.AddPeg("Peg") |
28 | 31 | t.AddState(` |
29 | *Tree | |
32 | *tree.Tree | |
30 | 33 | `) |
31 | 34 | |
32 | 35 | addDot := t.AddDot |
33 | 36 | addName := t.AddName |
34 | 37 | addCharacter := t.AddCharacter |
35 | addDoubleCharacter := t.AddDoubleCharacter | |
36 | addHexaCharacter := t.AddHexaCharacter | |
37 | 38 | addAction := t.AddAction |
38 | 39 | |
39 | 40 | addRule := func(name string, item func()) { |
84 | 85 | t.AddRange() |
85 | 86 | } |
86 | 87 | |
87 | addDoubleRange := func(begin, end string) { | |
88 | addCharacter(begin) | |
89 | addCharacter(end) | |
90 | t.AddDoubleRange() | |
91 | } | |
92 | ||
93 | 88 | addStar := func(item func()) { |
94 | 89 | item() |
95 | 90 | t.AddStar() |
96 | 91 | } |
97 | 92 | |
98 | addPlus := func(item func()) { | |
99 | item() | |
100 | t.AddPlus() | |
101 | } | |
102 | ||
103 | 93 | addQuery := func(item func()) { |
104 | 94 | item() |
105 | 95 | t.AddQuery() |
120 | 110 | t.AddPeekFor() |
121 | 111 | } |
122 | 112 | |
123 | /* Grammar <- Spacing 'package' MustSpacing Identifier { p.AddPackage(text) } | |
124 | Import* | |
125 | 'type' MustSpacing Identifier { p.AddPeg(text) } | |
126 | 'Peg' Spacing Action { p.AddState(text) } | |
127 | Definition+ EndOfFile */ | |
113 | /* Grammar <- Spacing { hdr; } Action* Definition* !. */ | |
128 | 114 | addRule("Grammar", func() { |
129 | 115 | addSequence( |
130 | 116 | func() { addName("Spacing") }, |
131 | func() { addString("package") }, | |
132 | func() { addName("MustSpacing") }, | |
133 | func() { addName("Identifier") }, | |
134 | func() { addAction(" p.AddPackage(text) ") }, | |
135 | func() { addStar(func() { addName("Import") }) }, | |
136 | func() { addString("type") }, | |
137 | func() { addName("MustSpacing") }, | |
138 | func() { addName("Identifier") }, | |
139 | func() { addAction(" p.AddPeg(text) ") }, | |
140 | func() { addString("Peg") }, | |
141 | func() { addName("Spacing") }, | |
142 | func() { addName("Action") }, | |
143 | func() { addAction(" p.AddState(text) ") }, | |
144 | func() { addPlus(func() { addName("Definition") }) }, | |
145 | func() { addName("EndOfFile") }, | |
146 | ) | |
147 | }) | |
148 | ||
149 | /* Import <- 'import' Spacing ["] < [a-zA-Z_/.\-]+ > ["] Spacing { p.AddImport(text) } */ | |
150 | addRule("Import", func() { | |
151 | addSequence( | |
152 | func() { addString("import") }, | |
153 | func() { addName("Spacing") }, | |
154 | func() { addCharacter(`"`) }, | |
155 | func() { | |
156 | addPush(func() { | |
157 | addPlus(func() { | |
158 | addAlternate( | |
159 | func() { addRange(`a`, `z`) }, | |
160 | func() { addRange(`A`, `Z`) }, | |
161 | func() { addCharacter(`_`) }, | |
162 | func() { addCharacter(`/`) }, | |
163 | func() { addCharacter(`.`) }, | |
164 | func() { addCharacter(`-`) }, | |
165 | ) | |
166 | }) | |
167 | }) | |
168 | }, | |
169 | func() { addCharacter(`"`) }, | |
170 | func() { addName("Spacing") }, | |
171 | func() { addAction(" p.AddImport(text) ") }, | |
117 | func() { addAction(`p.AddPackage("main")`) }, | |
118 | func() { addAction(`p.AddImport("github.com/pointlander/peg/tree")`) }, | |
119 | func() { addAction(`p.AddPeg("Peg")`) }, | |
120 | func() { addAction(`p.AddState("*tree.Tree")`) }, | |
121 | func() { addStar(func() { addName("Action") }) }, | |
122 | func() { addStar(func() { addName("Definition") }) }, | |
123 | func() { addPeekNot(func() { addDot() }) }, | |
172 | 124 | ) |
173 | 125 | }) |
174 | 126 | |
197 | 149 | ) |
198 | 150 | }) |
199 | 151 | |
200 | /* Expression <- Sequence (Slash Sequence { p.AddAlternate() } | |
201 | )* (Slash { p.AddNil(); p.AddAlternate() } | |
202 | )? | |
203 | / { p.AddNil() } */ | |
152 | /* Expression <- Sequence (Slash Sequence { p.AddAlternate() })* */ | |
204 | 153 | addRule("Expression", func() { |
205 | addAlternate( | |
206 | func() { | |
207 | addSequence( | |
208 | func() { addName("Sequence") }, | |
209 | func() { | |
210 | addStar(func() { | |
211 | addSequence( | |
212 | func() { addName("Slash") }, | |
213 | func() { addName("Sequence") }, | |
214 | func() { addAction(" p.AddAlternate() ") }, | |
215 | ) | |
216 | }) | |
217 | }, | |
218 | func() { | |
219 | addQuery(func() { | |
220 | addSequence( | |
221 | func() { addName("Slash") }, | |
222 | func() { addAction(" p.AddNil(); p.AddAlternate() ") }, | |
223 | ) | |
224 | }) | |
225 | }, | |
226 | ) | |
227 | }, | |
228 | func() { addAction(" p.AddNil() ") }, | |
229 | ) | |
230 | }) | |
231 | ||
232 | /* Sequence <- Prefix (Prefix { p.AddSequence() } | |
233 | )* */ | |
154 | addSequence( | |
155 | func() { addName("Sequence") }, | |
156 | func() { | |
157 | addStar(func() { | |
158 | addSequence( | |
159 | func() { addName("Slash") }, | |
160 | func() { addName("Sequence") }, | |
161 | func() { addAction(" p.AddAlternate() ") }, | |
162 | ) | |
163 | }) | |
164 | }, | |
165 | ) | |
166 | }) | |
167 | ||
168 | /* Sequence <- Prefix (Prefix { p.AddSequence() } )* */ | |
234 | 169 | addRule("Sequence", func() { |
235 | 170 | addSequence( |
236 | 171 | func() { addName("Prefix") }, |
245 | 180 | ) |
246 | 181 | }) |
247 | 182 | |
248 | /* Prefix <- And Action { p.AddPredicate(text) } | |
249 | / Not Action { p.AddStateChange(text) } | |
250 | / And Suffix { p.AddPeekFor() } | |
251 | / Not Suffix { p.AddPeekNot() } | |
252 | / Suffix */ | |
183 | /* Prefix <- '!' Suffix { p.AddPeekNot() } / Suffix */ | |
253 | 184 | addRule("Prefix", func() { |
254 | 185 | addAlternate( |
255 | 186 | func() { |
256 | 187 | addSequence( |
257 | func() { addName("And") }, | |
258 | func() { addName("Action") }, | |
259 | func() { addAction(" p.AddPredicate(text) ") }, | |
260 | ) | |
261 | }, | |
262 | func() { | |
263 | addSequence( | |
264 | func() { addName("Not") }, | |
265 | func() { addName("Action") }, | |
266 | func() { addAction(" p.AddStateChange(text) ") }, | |
267 | ) | |
268 | }, | |
269 | func() { | |
270 | addSequence( | |
271 | func() { addName("And") }, | |
272 | func() { addName("Suffix") }, | |
273 | func() { addAction(" p.AddPeekFor() ") }, | |
274 | ) | |
275 | }, | |
276 | func() { | |
277 | addSequence( | |
278 | func() { addName("Not") }, | |
188 | func() { addCharacter(`!`) }, | |
279 | 189 | func() { addName("Suffix") }, |
280 | 190 | func() { addAction(" p.AddPeekNot() ") }, |
281 | 191 | ) |
284 | 194 | ) |
285 | 195 | }) |
286 | 196 | |
287 | /* Suffix <- Primary (Question { p.AddQuery() } | |
288 | / Star { p.AddStar() } | |
289 | / Plus { p.AddPlus() } | |
290 | )? */ | |
197 | /* Suffix <- Primary ( Question { p.AddQuery() } | |
198 | / Star { p.AddStar() } | |
199 | )? */ | |
291 | 200 | addRule("Suffix", func() { |
292 | 201 | addSequence( |
293 | 202 | func() { addName("Primary") }, |
306 | 215 | func() { addAction(" p.AddStar() ") }, |
307 | 216 | ) |
308 | 217 | }, |
309 | func() { | |
310 | addSequence( | |
311 | func() { addName("Plus") }, | |
312 | func() { addAction(" p.AddPlus() ") }, | |
313 | ) | |
314 | }, | |
315 | 218 | ) |
316 | 219 | }) |
317 | 220 | }, |
366 | 269 | ) |
367 | 270 | }) |
368 | 271 | |
369 | /* Identifier <- < IdentStart IdentCont* > Spacing */ | |
272 | /* Identifier <- < Ident Ident* > Spacing */ | |
370 | 273 | addRule("Identifier", func() { |
371 | 274 | addSequence( |
372 | 275 | func() { |
373 | 276 | addPush(func() { |
374 | 277 | addSequence( |
375 | func() { addName("IdentStart") }, | |
376 | func() { addStar(func() { addName("IdentCont") }) }, | |
377 | ) | |
378 | }) | |
379 | }, | |
380 | func() { addName("Spacing") }, | |
381 | ) | |
382 | }) | |
383 | ||
384 | /* IdentStart <- [[a-z_]] */ | |
385 | addRule("IdentStart", func() { | |
386 | addAlternate( | |
387 | func() { addDoubleRange(`a`, `z`) }, | |
388 | func() { addCharacter(`_`) }, | |
389 | ) | |
390 | }) | |
391 | ||
392 | /* IdentCont <- IdentStart / [0-9] */ | |
393 | addRule("IdentCont", func() { | |
394 | addAlternate( | |
395 | func() { addName("IdentStart") }, | |
396 | func() { addRange(`0`, `9`) }, | |
397 | ) | |
398 | }) | |
399 | ||
400 | /* Literal <- ['] (!['] Char)? (!['] Char { p.AddSequence() } | |
401 | )* ['] Spacing | |
402 | / ["] (!["] DoubleChar)? (!["] DoubleChar { p.AddSequence() } | |
403 | )* ["] Spacing */ | |
278 | func() { addName("Ident") }, | |
279 | func() { addStar(func() { addName("Ident") }) }, | |
280 | ) | |
281 | }) | |
282 | }, | |
283 | func() { addName("Spacing") }, | |
284 | ) | |
285 | }) | |
286 | ||
287 | /* Ident <- [A-Za-z] */ | |
288 | addRule("Ident", func() { | |
289 | addAlternate( | |
290 | func() { addRange(`A`, `Z`) }, | |
291 | func() { addRange(`a`, `z`) }, | |
292 | ) | |
293 | }) | |
294 | ||
295 | /* Literal <- ['] !['] Char (!['] Char { p.AddSequence() } )* ['] Spacing */ | |
404 | 296 | addRule("Literal", func() { |
405 | addAlternate( | |
406 | func() { | |
407 | addSequence( | |
408 | func() { addCharacter(`'`) }, | |
409 | func() { | |
410 | addQuery(func() { | |
411 | addSequence( | |
412 | func() { addPeekNot(func() { addCharacter(`'`) }) }, | |
413 | func() { addName("Char") }, | |
414 | ) | |
415 | }) | |
416 | }, | |
417 | func() { | |
418 | addStar(func() { | |
419 | addSequence( | |
420 | func() { addPeekNot(func() { addCharacter(`'`) }) }, | |
421 | func() { addName("Char") }, | |
422 | func() { addAction(` p.AddSequence() `) }, | |
423 | ) | |
424 | }) | |
425 | }, | |
426 | func() { addCharacter(`'`) }, | |
427 | func() { addName("Spacing") }, | |
428 | ) | |
429 | }, | |
430 | func() { | |
431 | addSequence( | |
432 | func() { addCharacter(`"`) }, | |
433 | func() { | |
434 | addQuery(func() { | |
435 | addSequence( | |
436 | func() { addPeekNot(func() { addCharacter(`"`) }) }, | |
437 | func() { addName("DoubleChar") }, | |
438 | ) | |
439 | }) | |
440 | }, | |
441 | func() { | |
442 | addStar(func() { | |
443 | addSequence( | |
444 | func() { addPeekNot(func() { addCharacter(`"`) }) }, | |
445 | func() { addName("DoubleChar") }, | |
446 | func() { addAction(` p.AddSequence() `) }, | |
447 | ) | |
448 | }) | |
449 | }, | |
450 | func() { addCharacter(`"`) }, | |
451 | func() { addName("Spacing") }, | |
452 | ) | |
453 | }, | |
454 | ) | |
455 | }) | |
456 | ||
457 | /* Class <- ( '[[' ( '^' DoubleRanges { p.AddPeekNot(); p.AddDot(); p.AddSequence() } | |
458 | / DoubleRanges )? | |
459 | ']]' | |
460 | / '[' ( '^' Ranges { p.AddPeekNot(); p.AddDot(); p.AddSequence() } | |
461 | / Ranges )? | |
462 | ']' ) | |
463 | Spacing */ | |
297 | addSequence( | |
298 | func() { addCharacter(`'`) }, | |
299 | func() { | |
300 | addSequence( | |
301 | func() { addPeekNot(func() { addCharacter(`'`) }) }, | |
302 | func() { addName("Char") }, | |
303 | ) | |
304 | }, | |
305 | func() { | |
306 | addStar(func() { | |
307 | addSequence( | |
308 | func() { addPeekNot(func() { addCharacter(`'`) }) }, | |
309 | func() { addName("Char") }, | |
310 | func() { addAction(` p.AddSequence() `) }, | |
311 | ) | |
312 | }) | |
313 | }, | |
314 | func() { addCharacter(`'`) }, | |
315 | func() { addName("Spacing") }, | |
316 | ) | |
317 | }) | |
318 | ||
319 | /* Class <- '[' Range (!']' Range { p.AddAlternate() })* ']' Spacing */ | |
464 | 320 | addRule("Class", func() { |
465 | 321 | addSequence( |
466 | func() { | |
467 | addAlternate( | |
468 | func() { | |
469 | addSequence( | |
470 | func() { addString(`[[`) }, | |
471 | func() { | |
472 | addQuery(func() { | |
473 | addAlternate( | |
474 | func() { | |
475 | addSequence( | |
476 | func() { addCharacter(`^`) }, | |
477 | func() { addName("DoubleRanges") }, | |
478 | func() { addAction(` p.AddPeekNot(); p.AddDot(); p.AddSequence() `) }, | |
479 | ) | |
480 | }, | |
481 | func() { addName("DoubleRanges") }, | |
482 | ) | |
483 | }) | |
484 | }, | |
485 | func() { addString(`]]`) }, | |
486 | ) | |
487 | }, | |
488 | func() { | |
489 | addSequence( | |
490 | func() { addCharacter(`[`) }, | |
491 | func() { | |
492 | addQuery(func() { | |
493 | addAlternate( | |
494 | func() { | |
495 | addSequence( | |
496 | func() { addCharacter(`^`) }, | |
497 | func() { addName("Ranges") }, | |
498 | func() { addAction(` p.AddPeekNot(); p.AddDot(); p.AddSequence() `) }, | |
499 | ) | |
500 | }, | |
501 | func() { addName("Ranges") }, | |
502 | ) | |
503 | }) | |
504 | }, | |
505 | func() { addCharacter(`]`) }, | |
506 | ) | |
507 | }, | |
508 | ) | |
509 | }, | |
510 | func() { addName("Spacing") }, | |
511 | ) | |
512 | }) | |
513 | ||
514 | /* Ranges <- !']' Range (!']' Range { p.AddAlternate() } | |
515 | )* */ | |
516 | addRule("Ranges", func() { | |
517 | addSequence( | |
518 | func() { addPeekNot(func() { addCharacter(`]`) }) }, | |
322 | func() { addCharacter(`[`) }, | |
519 | 323 | func() { addName("Range") }, |
520 | 324 | func() { |
521 | 325 | addStar(func() { |
526 | 330 | ) |
527 | 331 | }) |
528 | 332 | }, |
529 | ) | |
530 | }) | |
531 | ||
532 | /* DoubleRanges <- !']]' DoubleRange (!']]' DoubleRange { p.AddAlternate() } | |
533 | )* */ | |
534 | addRule("DoubleRanges", func() { | |
535 | addSequence( | |
536 | func() { addPeekNot(func() { addString(`]]`) }) }, | |
537 | func() { addName("DoubleRange") }, | |
538 | func() { | |
539 | addStar(func() { | |
540 | addSequence( | |
541 | func() { addPeekNot(func() { addString(`]]`) }) }, | |
542 | func() { addName("DoubleRange") }, | |
543 | func() { addAction(" p.AddAlternate() ") }, | |
544 | ) | |
545 | }) | |
546 | }, | |
333 | func() { addCharacter(`]`) }, | |
334 | func() { addName("Spacing") }, | |
547 | 335 | ) |
548 | 336 | }) |
549 | 337 | |
563 | 351 | ) |
564 | 352 | }) |
565 | 353 | |
566 | /* DoubleRange <- Char '-' Char { p.AddDoubleRange() } | |
567 | / DoubleChar */ | |
568 | addRule("DoubleRange", func() { | |
569 | addAlternate( | |
570 | func() { | |
571 | addSequence( | |
572 | func() { addName("Char") }, | |
573 | func() { addCharacter(`-`) }, | |
574 | func() { addName("Char") }, | |
575 | func() { addAction(" p.AddDoubleRange() ") }, | |
576 | ) | |
577 | }, | |
578 | func() { addName("DoubleChar") }, | |
579 | ) | |
580 | }) | |
581 | ||
582 | /* Char <- Escape | |
583 | / !'\\' <.> { p.AddCharacter(text) } */ | |
354 | /* Char <- Escape | |
355 | / '\\' "0x"<[0-9a-f]*> { p.AddHexaCharacter(text) } | |
356 | / '\\\\' { p.AddCharacter("\\") } | |
357 | / !'\\' <.> { p.AddCharacter(text) } */ | |
584 | 358 | addRule("Char", func() { |
585 | 359 | addAlternate( |
586 | func() { addName("Escape") }, | |
587 | func() { | |
588 | addSequence( | |
589 | func() { addPeekNot(func() { addCharacter("\\") }) }, | |
590 | func() { addPush(func() { addDot() }) }, | |
591 | func() { addAction(` p.AddCharacter(text) `) }, | |
592 | ) | |
593 | }, | |
594 | ) | |
595 | }) | |
596 | ||
597 | /* DoubleChar <- Escape | |
598 | / <[a-zA-Z]> { p.AddDoubleCharacter(text) } | |
599 | / !'\\' <.> { p.AddCharacter(text) } */ | |
600 | addRule("DoubleChar", func() { | |
601 | addAlternate( | |
602 | func() { addName("Escape") }, | |
603 | func() { | |
604 | addSequence( | |
360 | func() { | |
361 | addSequence( | |
362 | func() { addCharacter("\\") }, | |
363 | func() { addCharacter(`0`) }, | |
364 | func() { addCharacter(`x`) }, | |
605 | 365 | func() { |
606 | 366 | addPush(func() { |
607 | addAlternate( | |
608 | func() { addRange(`a`, `z`) }, | |
609 | func() { addRange(`A`, `Z`) }, | |
610 | ) | |
611 | }) | |
612 | }, | |
613 | func() { addAction(` p.AddDoubleCharacter(text) `) }, | |
614 | ) | |
615 | }, | |
616 | func() { | |
617 | addSequence( | |
618 | func() { addPeekNot(func() { addCharacter("\\") }) }, | |
619 | func() { addPush(func() { addDot() }) }, | |
620 | func() { addAction(` p.AddCharacter(text) `) }, | |
621 | ) | |
622 | }, | |
623 | ) | |
624 | }) | |
625 | ||
626 | /* Escape <- "\\a" { p.AddCharacter("\a") } # bell | |
627 | / "\\b" { p.AddCharacter("\b") } # bs | |
628 | / "\\e" { p.AddCharacter("\x1B") } # esc | |
629 | / "\\f" { p.AddCharacter("\f") } # ff | |
630 | / "\\n" { p.AddCharacter("\n") } # nl | |
631 | / "\\r" { p.AddCharacter("\r") } # cr | |
632 | / "\\t" { p.AddCharacter("\t") } # ht | |
633 | / "\\v" { p.AddCharacter("\v") } # vt | |
634 | / "\\'" { p.AddCharacter("'") } | |
635 | / '\\"' { p.AddCharacter("\"") } | |
636 | / '\\[' { p.AddCharacter("[") } | |
637 | / '\\]' { p.AddCharacter("]") } | |
638 | / '\\-' { p.AddCharacter("-") } | |
639 | / '\\' "0x"<[0-9a-fA-F]+> { p.AddHexaCharacter(text) } | |
640 | / '\\' <[0-3][0-7][0-7]> { p.AddOctalCharacter(text) } | |
641 | / '\\' <[0-7][0-7]?> { p.AddOctalCharacter(text) } | |
642 | / '\\\\' { p.AddCharacter("\\") } */ | |
643 | addRule("Escape", func() { | |
644 | addAlternate( | |
645 | func() { | |
646 | addSequence( | |
647 | func() { addCharacter("\\") }, | |
648 | func() { addDoubleCharacter(`a`) }, | |
649 | func() { addAction(` p.AddCharacter("\a") `) }, | |
650 | ) | |
651 | }, | |
652 | func() { | |
653 | addSequence( | |
654 | func() { addCharacter("\\") }, | |
655 | func() { addDoubleCharacter(`b`) }, | |
656 | func() { addAction(` p.AddCharacter("\b") `) }, | |
657 | ) | |
658 | }, | |
659 | func() { | |
660 | addSequence( | |
661 | func() { addCharacter("\\") }, | |
662 | func() { addDoubleCharacter(`e`) }, | |
663 | func() { addAction(` p.AddCharacter("\x1B") `) }, | |
664 | ) | |
665 | }, | |
666 | func() { | |
667 | addSequence( | |
668 | func() { addCharacter("\\") }, | |
669 | func() { addDoubleCharacter(`f`) }, | |
670 | func() { addAction(` p.AddCharacter("\f") `) }, | |
671 | ) | |
672 | }, | |
673 | func() { | |
674 | addSequence( | |
675 | func() { addCharacter("\\") }, | |
676 | func() { addDoubleCharacter(`n`) }, | |
677 | func() { addAction(` p.AddCharacter("\n") `) }, | |
678 | ) | |
679 | }, | |
680 | func() { | |
681 | addSequence( | |
682 | func() { addCharacter("\\") }, | |
683 | func() { addDoubleCharacter(`r`) }, | |
684 | func() { addAction(` p.AddCharacter("\r") `) }, | |
685 | ) | |
686 | }, | |
687 | func() { | |
688 | addSequence( | |
689 | func() { addCharacter("\\") }, | |
690 | func() { addDoubleCharacter(`t`) }, | |
691 | func() { addAction(` p.AddCharacter("\t") `) }, | |
692 | ) | |
693 | }, | |
694 | func() { | |
695 | addSequence( | |
696 | func() { addCharacter("\\") }, | |
697 | func() { addDoubleCharacter(`v`) }, | |
698 | func() { addAction(` p.AddCharacter("\v") `) }, | |
699 | ) | |
700 | }, | |
701 | func() { | |
702 | addSequence( | |
703 | func() { addCharacter("\\") }, | |
704 | func() { addCharacter(`'`) }, | |
705 | func() { addAction(` p.AddCharacter("'") `) }, | |
706 | ) | |
707 | }, | |
708 | func() { | |
709 | addSequence( | |
710 | func() { addCharacter("\\") }, | |
711 | func() { addCharacter(`"`) }, | |
712 | func() { addAction(` p.AddCharacter("\"") `) }, | |
713 | ) | |
714 | }, | |
715 | func() { | |
716 | addSequence( | |
717 | func() { addCharacter("\\") }, | |
718 | func() { addCharacter(`[`) }, | |
719 | func() { addAction(` p.AddCharacter("[") `) }, | |
720 | ) | |
721 | }, | |
722 | func() { | |
723 | addSequence( | |
724 | func() { addCharacter("\\") }, | |
725 | func() { addCharacter(`]`) }, | |
726 | func() { addAction(` p.AddCharacter("]") `) }, | |
727 | ) | |
728 | }, | |
729 | func() { | |
730 | addSequence( | |
731 | func() { addCharacter("\\") }, | |
732 | func() { addCharacter(`-`) }, | |
733 | func() { addAction(` p.AddCharacter("-") `) }, | |
734 | ) | |
735 | }, | |
736 | func() { | |
737 | addSequence( | |
738 | func() { addCharacter("\\") }, | |
739 | func() { | |
740 | addSequence( | |
741 | func() { addCharacter(`0`) }, | |
742 | func() { addDoubleCharacter(`x`) }, | |
743 | ) | |
744 | }, | |
745 | func() { | |
746 | addPush(func() { | |
747 | addPlus(func() { | |
367 | addStar(func() { | |
748 | 368 | addAlternate( |
749 | 369 | func() { addRange(`0`, `9`) }, |
750 | 370 | func() { addRange(`a`, `f`) }, |
751 | func() { addRange(`A`, `F`) }, | |
752 | 371 | ) |
753 | 372 | }) |
754 | 373 | }) |
759 | 378 | func() { |
760 | 379 | addSequence( |
761 | 380 | func() { addCharacter("\\") }, |
762 | func() { | |
763 | addPush(func() { | |
764 | addSequence( | |
765 | func() { addRange(`0`, `3`) }, | |
766 | func() { addRange(`0`, `7`) }, | |
767 | func() { addRange(`0`, `7`) }, | |
768 | ) | |
769 | }) | |
770 | }, | |
771 | func() { addAction(` p.AddOctalCharacter(text) `) }, | |
772 | ) | |
773 | }, | |
774 | func() { | |
775 | addSequence( | |
776 | func() { addCharacter("\\") }, | |
777 | func() { | |
778 | addPush(func() { | |
779 | addSequence( | |
780 | func() { addRange(`0`, `7`) }, | |
781 | func() { addQuery(func() { addRange(`0`, `7`) }) }, | |
782 | ) | |
783 | }) | |
784 | }, | |
785 | func() { addAction(` p.AddOctalCharacter(text) `) }, | |
786 | ) | |
787 | }, | |
788 | func() { | |
789 | addSequence( | |
790 | func() { addCharacter("\\") }, | |
791 | 381 | func() { addCharacter("\\") }, |
792 | 382 | func() { addAction(` p.AddCharacter("\\") `) }, |
793 | 383 | ) |
794 | 384 | }, |
795 | ) | |
796 | }) | |
797 | ||
798 | /* LeftArrow <- ('<-' / '\0x2190') Spacing */ | |
385 | func() { | |
386 | addSequence( | |
387 | func() { addPeekNot(func() { addCharacter("\\") }) }, | |
388 | func() { addPush(func() { addDot() }) }, | |
389 | func() { addAction(` p.AddCharacter(text) `) }, | |
390 | ) | |
391 | }, | |
392 | ) | |
393 | }) | |
394 | /* LeftArrow <- '<-' Spacing */ | |
799 | 395 | addRule("LeftArrow", func() { |
800 | 396 | addSequence( |
801 | func() { | |
802 | addAlternate( | |
803 | func() { addString(`<-`) }, | |
804 | func() { addHexaCharacter("2190") }, | |
805 | ) | |
806 | }, | |
397 | func() { addString(`<-`) }, | |
807 | 398 | func() { addName("Spacing") }, |
808 | 399 | ) |
809 | 400 | }) |
816 | 407 | ) |
817 | 408 | }) |
818 | 409 | |
819 | /* And <- '&' Spacing */ | |
820 | addRule("And", func() { | |
821 | addSequence( | |
822 | func() { addCharacter(`&`) }, | |
823 | func() { addName("Spacing") }, | |
824 | ) | |
825 | }) | |
826 | ||
827 | /* Not <- '!' Spacing */ | |
828 | addRule("Not", func() { | |
829 | addSequence( | |
830 | func() { addCharacter(`!`) }, | |
831 | func() { addName("Spacing") }, | |
832 | ) | |
833 | }) | |
834 | ||
835 | 410 | /* Question <- '?' Spacing */ |
836 | 411 | addRule("Question", func() { |
837 | 412 | addSequence( |
848 | 423 | ) |
849 | 424 | }) |
850 | 425 | |
851 | /* Plus <- '+' Spacing */ | |
852 | addRule("Plus", func() { | |
853 | addSequence( | |
854 | func() { addCharacter(`+`) }, | |
855 | func() { addName("Spacing") }, | |
856 | ) | |
857 | }) | |
858 | ||
859 | 426 | /* Open <- '(' Spacing */ |
860 | 427 | addRule("Open", func() { |
861 | 428 | addSequence( |
880 | 447 | ) |
881 | 448 | }) |
882 | 449 | |
883 | /* SpaceComment <- (Space / Comment) */ | |
884 | addRule("SpaceComment", func() { | |
885 | addAlternate( | |
886 | func() { addName("Space") }, | |
887 | func() { addName("Comment") }, | |
888 | ) | |
889 | }) | |
890 | ||
891 | /* Spacing <- SpaceComment* */ | |
892 | 450 | addRule("Spacing", func() { |
893 | addStar(func() { addName("SpaceComment") }) | |
894 | }) | |
895 | ||
896 | /* MustSpacing <- SpaceComment+ */ | |
897 | addRule("MustSpacing", func() { | |
898 | addPlus(func() { t.AddName("SpaceComment") }) | |
899 | }) | |
900 | ||
901 | /* Comment <- '#' (!EndOfLine .)* EndOfLine */ | |
451 | addStar(func() { | |
452 | addAlternate( | |
453 | func() { addName("Space") }, | |
454 | func() { addName("Comment") }, | |
455 | ) | |
456 | }) | |
457 | }) | |
458 | ||
459 | /* Comment <- '#' (!EndOfLine .)* */ | |
902 | 460 | addRule("Comment", func() { |
903 | 461 | addSequence( |
904 | 462 | func() { addCharacter(`#`) }, |
910 | 468 | ) |
911 | 469 | }) |
912 | 470 | }, |
913 | func() { addName("EndOfLine") }, | |
914 | 471 | ) |
915 | 472 | }) |
916 | 473 | |
932 | 489 | ) |
933 | 490 | }) |
934 | 491 | |
935 | /* EndOfFile <- !. */ | |
936 | addRule("EndOfFile", func() { | |
937 | addPeekNot(func() { addDot() }) | |
938 | }) | |
939 | ||
940 | /* Action <- '{' < ActionBody* > '}' Spacing */ | |
492 | /* Action <- '{' < (![}].)* > '}' Spacing */ | |
941 | 493 | addRule("Action", func() { |
942 | 494 | addSequence( |
943 | 495 | func() { addCharacter(`{`) }, |
944 | 496 | func() { |
945 | 497 | addPush(func() { |
946 | addStar(func() { addName("ActionBody") }) | |
498 | addStar(func() { | |
499 | addSequence( | |
500 | func() { | |
501 | addPeekNot(func() { | |
502 | addCharacter(`}`) | |
503 | }) | |
504 | }, | |
505 | func() { addDot() }, | |
506 | ) | |
507 | }) | |
947 | 508 | }) |
948 | 509 | }, |
949 | 510 | func() { addCharacter(`}`) }, |
950 | 511 | func() { addName("Spacing") }, |
951 | ) | |
952 | }) | |
953 | ||
954 | /* ActionBody <- [^{}] / '{' ActionBody* '}' */ | |
955 | addRule("ActionBody", func() { | |
956 | addAlternate( | |
957 | func() { | |
958 | addSequence( | |
959 | func() { | |
960 | addPeekNot(func() { | |
961 | addAlternate( | |
962 | func() { addCharacter(`{`) }, | |
963 | func() { addCharacter(`}`) }, | |
964 | ) | |
965 | }) | |
966 | }, | |
967 | func() { addDot() }, | |
968 | ) | |
969 | }, | |
970 | func() { | |
971 | addSequence( | |
972 | func() { addCharacter(`{`) }, | |
973 | func() { addStar(func() { addName("ActionBody") }) }, | |
974 | func() { addCharacter(`}`) }, | |
975 | ) | |
976 | }, | |
977 | 512 | ) |
978 | 513 | }) |
979 | 514 | |
994 | 529 | }) |
995 | 530 | |
996 | 531 | filename := "bootstrap.peg.go" |
997 | out, error := os.OpenFile(filename, os.O_RDWR|os.O_CREATE|os.O_TRUNC, 0644) | |
998 | if error != nil { | |
999 | fmt.Printf("%v: %v\n", filename, error) | |
532 | out, err := os.OpenFile(filename, os.O_RDWR|os.O_CREATE|os.O_TRUNC, 0644) | |
533 | if err != nil { | |
534 | fmt.Printf("%v: %v\n", filename, err) | |
1000 | 535 | return |
1001 | 536 | } |
1002 | 537 | defer out.Close() |
1003 | t.Compile(filename, out) | |
538 | t.Compile(filename, os.Args, out) | |
1004 | 539 | } |
0 | package main | |
1 | ||
2 | import ( | |
3 | "fmt" | |
4 | "math" | |
5 | "sort" | |
6 | "strconv" | |
7 | ) | |
8 | ||
9 | const endSymbol rune = 1114112 | |
10 | ||
11 | /* The rule types inferred from the grammar are below. */ | |
12 | type pegRule uint8 | |
13 | ||
14 | const ( | |
15 | ruleUnknown pegRule = iota | |
16 | ruleGrammar | |
17 | ruleImport | |
18 | ruleDefinition | |
19 | ruleExpression | |
20 | ruleSequence | |
21 | rulePrefix | |
22 | ruleSuffix | |
23 | rulePrimary | |
24 | ruleIdentifier | |
25 | ruleIdentStart | |
26 | ruleIdentCont | |
27 | ruleLiteral | |
28 | ruleClass | |
29 | ruleRanges | |
30 | ruleDoubleRanges | |
31 | ruleRange | |
32 | ruleDoubleRange | |
33 | ruleChar | |
34 | ruleDoubleChar | |
35 | ruleEscape | |
36 | ruleLeftArrow | |
37 | ruleSlash | |
38 | ruleAnd | |
39 | ruleNot | |
40 | ruleQuestion | |
41 | ruleStar | |
42 | rulePlus | |
43 | ruleOpen | |
44 | ruleClose | |
45 | ruleDot | |
46 | ruleSpaceComment | |
47 | ruleSpacing | |
48 | ruleMustSpacing | |
49 | ruleComment | |
50 | ruleSpace | |
51 | ruleEndOfLine | |
52 | ruleEndOfFile | |
53 | ruleAction | |
54 | ruleActionBody | |
55 | ruleBegin | |
56 | ruleEnd | |
57 | ruleAction0 | |
58 | ruleAction1 | |
59 | ruleAction2 | |
60 | rulePegText | |
61 | ruleAction3 | |
62 | ruleAction4 | |
63 | ruleAction5 | |
64 | ruleAction6 | |
65 | ruleAction7 | |
66 | ruleAction8 | |
67 | ruleAction9 | |
68 | ruleAction10 | |
69 | ruleAction11 | |
70 | ruleAction12 | |
71 | ruleAction13 | |
72 | ruleAction14 | |
73 | ruleAction15 | |
74 | ruleAction16 | |
75 | ruleAction17 | |
76 | ruleAction18 | |
77 | ruleAction19 | |
78 | ruleAction20 | |
79 | ruleAction21 | |
80 | ruleAction22 | |
81 | ruleAction23 | |
82 | ruleAction24 | |
83 | ruleAction25 | |
84 | ruleAction26 | |
85 | ruleAction27 | |
86 | ruleAction28 | |
87 | ruleAction29 | |
88 | ruleAction30 | |
89 | ruleAction31 | |
90 | ruleAction32 | |
91 | ruleAction33 | |
92 | ruleAction34 | |
93 | ruleAction35 | |
94 | ruleAction36 | |
95 | ruleAction37 | |
96 | ruleAction38 | |
97 | ruleAction39 | |
98 | ruleAction40 | |
99 | ruleAction41 | |
100 | ruleAction42 | |
101 | ruleAction43 | |
102 | ruleAction44 | |
103 | ruleAction45 | |
104 | ruleAction46 | |
105 | ruleAction47 | |
106 | ruleAction48 | |
107 | ||
108 | rulePre | |
109 | ruleIn | |
110 | ruleSuf | |
111 | ) | |
112 | ||
113 | var rul3s = [...]string{ | |
114 | "Unknown", | |
115 | "Grammar", | |
116 | "Import", | |
117 | "Definition", | |
118 | "Expression", | |
119 | "Sequence", | |
120 | "Prefix", | |
121 | "Suffix", | |
122 | "Primary", | |
123 | "Identifier", | |
124 | "IdentStart", | |
125 | "IdentCont", | |
126 | "Literal", | |
127 | "Class", | |
128 | "Ranges", | |
129 | "DoubleRanges", | |
130 | "Range", | |
131 | "DoubleRange", | |
132 | "Char", | |
133 | "DoubleChar", | |
134 | "Escape", | |
135 | "LeftArrow", | |
136 | "Slash", | |
137 | "And", | |
138 | "Not", | |
139 | "Question", | |
140 | "Star", | |
141 | "Plus", | |
142 | "Open", | |
143 | "Close", | |
144 | "Dot", | |
145 | "SpaceComment", | |
146 | "Spacing", | |
147 | "MustSpacing", | |
148 | "Comment", | |
149 | "Space", | |
150 | "EndOfLine", | |
151 | "EndOfFile", | |
152 | "Action", | |
153 | "ActionBody", | |
154 | "Begin", | |
155 | "End", | |
156 | "Action0", | |
157 | "Action1", | |
158 | "Action2", | |
159 | "PegText", | |
160 | "Action3", | |
161 | "Action4", | |
162 | "Action5", | |
163 | "Action6", | |
164 | "Action7", | |
165 | "Action8", | |
166 | "Action9", | |
167 | "Action10", | |
168 | "Action11", | |
169 | "Action12", | |
170 | "Action13", | |
171 | "Action14", | |
172 | "Action15", | |
173 | "Action16", | |
174 | "Action17", | |
175 | "Action18", | |
176 | "Action19", | |
177 | "Action20", | |
178 | "Action21", | |
179 | "Action22", | |
180 | "Action23", | |
181 | "Action24", | |
182 | "Action25", | |
183 | "Action26", | |
184 | "Action27", | |
185 | "Action28", | |
186 | "Action29", | |
187 | "Action30", | |
188 | "Action31", | |
189 | "Action32", | |
190 | "Action33", | |
191 | "Action34", | |
192 | "Action35", | |
193 | "Action36", | |
194 | "Action37", | |
195 | "Action38", | |
196 | "Action39", | |
197 | "Action40", | |
198 | "Action41", | |
199 | "Action42", | |
200 | "Action43", | |
201 | "Action44", | |
202 | "Action45", | |
203 | "Action46", | |
204 | "Action47", | |
205 | "Action48", | |
206 | ||
207 | "Pre_", | |
208 | "_In_", | |
209 | "_Suf", | |
210 | } | |
211 | ||
212 | type node32 struct { | |
213 | token32 | |
214 | up, next *node32 | |
215 | } | |
216 | ||
217 | func (node *node32) print(depth int, buffer string) { | |
218 | for node != nil { | |
219 | for c := 0; c < depth; c++ { | |
220 | fmt.Printf(" ") | |
221 | } | |
222 | fmt.Printf("\x1B[34m%v\x1B[m %v\n", rul3s[node.pegRule], strconv.Quote(string(([]rune(buffer)[node.begin:node.end])))) | |
223 | if node.up != nil { | |
224 | node.up.print(depth+1, buffer) | |
225 | } | |
226 | node = node.next | |
227 | } | |
228 | } | |
229 | ||
230 | func (node *node32) Print(buffer string) { | |
231 | node.print(0, buffer) | |
232 | } | |
233 | ||
234 | type element struct { | |
235 | node *node32 | |
236 | down *element | |
237 | } | |
238 | ||
239 | /* ${@} bit structure for abstract syntax tree */ | |
240 | type token32 struct { | |
241 | pegRule | |
242 | begin, end, next uint32 | |
243 | } | |
244 | ||
245 | func (t *token32) isZero() bool { | |
246 | return t.pegRule == ruleUnknown && t.begin == 0 && t.end == 0 && t.next == 0 | |
247 | } | |
248 | ||
249 | func (t *token32) isParentOf(u token32) bool { | |
250 | return t.begin <= u.begin && t.end >= u.end && t.next > u.next | |
251 | } | |
252 | ||
253 | func (t *token32) getToken32() token32 { | |
254 | return token32{pegRule: t.pegRule, begin: uint32(t.begin), end: uint32(t.end), next: uint32(t.next)} | |
255 | } | |
256 | ||
257 | func (t *token32) String() string { | |
258 | return fmt.Sprintf("\x1B[34m%v\x1B[m %v %v %v", rul3s[t.pegRule], t.begin, t.end, t.next) | |
259 | } | |
260 | ||
261 | type tokens32 struct { | |
262 | tree []token32 | |
263 | ordered [][]token32 | |
264 | } | |
265 | ||
266 | func (t *tokens32) trim(length int) { | |
267 | t.tree = t.tree[0:length] | |
268 | } | |
269 | ||
270 | func (t *tokens32) Print() { | |
271 | for _, token := range t.tree { | |
272 | fmt.Println(token.String()) | |
273 | } | |
274 | } | |
275 | ||
276 | func (t *tokens32) Order() [][]token32 { | |
277 | if t.ordered != nil { | |
278 | return t.ordered | |
279 | } | |
280 | ||
281 | depths := make([]int32, 1, math.MaxInt16) | |
282 | for i, token := range t.tree { | |
283 | if token.pegRule == ruleUnknown { | |
284 | t.tree = t.tree[:i] | |
285 | break | |
286 | } | |
287 | depth := int(token.next) | |
288 | if length := len(depths); depth >= length { | |
289 | depths = depths[:depth+1] | |
290 | } | |
291 | depths[depth]++ | |
292 | } | |
293 | depths = append(depths, 0) | |
294 | ||
295 | ordered, pool := make([][]token32, len(depths)), make([]token32, len(t.tree)+len(depths)) | |
296 | for i, depth := range depths { | |
297 | depth++ | |
298 | ordered[i], pool, depths[i] = pool[:depth], pool[depth:], 0 | |
299 | } | |
300 | ||
301 | for i, token := range t.tree { | |
302 | depth := token.next | |
303 | token.next = uint32(i) | |
304 | ordered[depth][depths[depth]] = token | |
305 | depths[depth]++ | |
306 | } | |
307 | t.ordered = ordered | |
308 | return ordered | |
309 | } | |
310 | ||
311 | type state32 struct { | |
312 | token32 | |
313 | depths []int32 | |
314 | leaf bool | |
315 | } | |
316 | ||
317 | func (t *tokens32) AST() *node32 { | |
318 | tokens := t.Tokens() | |
319 | stack := &element{node: &node32{token32: <-tokens}} | |
320 | for token := range tokens { | |
321 | if token.begin == token.end { | |
322 | continue | |
323 | } | |
324 | node := &node32{token32: token} | |
325 | for stack != nil && stack.node.begin >= token.begin && stack.node.end <= token.end { | |
326 | stack.node.next = node.up | |
327 | node.up = stack.node | |
328 | stack = stack.down | |
329 | } | |
330 | stack = &element{node: node, down: stack} | |
331 | } | |
332 | return stack.node | |
333 | } | |
334 | ||
335 | func (t *tokens32) PreOrder() (<-chan state32, [][]token32) { | |
336 | s, ordered := make(chan state32, 6), t.Order() | |
337 | go func() { | |
338 | var states [8]state32 | |
339 | for i := range states { | |
340 | states[i].depths = make([]int32, len(ordered)) | |
341 | } | |
342 | depths, state, depth := make([]int32, len(ordered)), 0, 1 | |
343 | write := func(t token32, leaf bool) { | |
344 | S := states[state] | |
345 | state, S.pegRule, S.begin, S.end, S.next, S.leaf = (state+1)%8, t.pegRule, t.begin, t.end, uint32(depth), leaf | |
346 | copy(S.depths, depths) | |
347 | s <- S | |
348 | } | |
349 | ||
350 | states[state].token32 = ordered[0][0] | |
351 | depths[0]++ | |
352 | state++ | |
353 | a, b := ordered[depth-1][depths[depth-1]-1], ordered[depth][depths[depth]] | |
354 | depthFirstSearch: | |
355 | for { | |
356 | for { | |
357 | if i := depths[depth]; i > 0 { | |
358 | if c, j := ordered[depth][i-1], depths[depth-1]; a.isParentOf(c) && | |
359 | (j < 2 || !ordered[depth-1][j-2].isParentOf(c)) { | |
360 | if c.end != b.begin { | |
361 | write(token32{pegRule: ruleIn, begin: c.end, end: b.begin}, true) | |
362 | } | |
363 | break | |
364 | } | |
365 | } | |
366 | ||
367 | if a.begin < b.begin { | |
368 | write(token32{pegRule: rulePre, begin: a.begin, end: b.begin}, true) | |
369 | } | |
370 | break | |
371 | } | |
372 | ||
373 | next := depth + 1 | |
374 | if c := ordered[next][depths[next]]; c.pegRule != ruleUnknown && b.isParentOf(c) { | |
375 | write(b, false) | |
376 | depths[depth]++ | |
377 | depth, a, b = next, b, c | |
378 | continue | |
379 | } | |
380 | ||
381 | write(b, true) | |
382 | depths[depth]++ | |
383 | c, parent := ordered[depth][depths[depth]], true | |
384 | for { | |
385 | if c.pegRule != ruleUnknown && a.isParentOf(c) { | |
386 | b = c | |
387 | continue depthFirstSearch | |
388 | } else if parent && b.end != a.end { | |
389 | write(token32{pegRule: ruleSuf, begin: b.end, end: a.end}, true) | |
390 | } | |
391 | ||
392 | depth-- | |
393 | if depth > 0 { | |
394 | a, b, c = ordered[depth-1][depths[depth-1]-1], a, ordered[depth][depths[depth]] | |
395 | parent = a.isParentOf(b) | |
396 | continue | |
397 | } | |
398 | ||
399 | break depthFirstSearch | |
400 | } | |
401 | } | |
402 | ||
403 | close(s) | |
404 | }() | |
405 | return s, ordered | |
406 | } | |
407 | ||
408 | func (t *tokens32) PrintSyntax() { | |
409 | tokens, ordered := t.PreOrder() | |
410 | max := -1 | |
411 | for token := range tokens { | |
412 | if !token.leaf { | |
413 | fmt.Printf("%v", token.begin) | |
414 | for i, leaf, depths := 0, int(token.next), token.depths; i < leaf; i++ { | |
415 | fmt.Printf(" \x1B[36m%v\x1B[m", rul3s[ordered[i][depths[i]-1].pegRule]) | |
416 | } | |
417 | fmt.Printf(" \x1B[36m%v\x1B[m\n", rul3s[token.pegRule]) | |
418 | } else if token.begin == token.end { | |
419 | fmt.Printf("%v", token.begin) | |
420 | for i, leaf, depths := 0, int(token.next), token.depths; i < leaf; i++ { | |
421 | fmt.Printf(" \x1B[31m%v\x1B[m", rul3s[ordered[i][depths[i]-1].pegRule]) | |
422 | } | |
423 | fmt.Printf(" \x1B[31m%v\x1B[m\n", rul3s[token.pegRule]) | |
424 | } else { | |
425 | for c, end := token.begin, token.end; c < end; c++ { | |
426 | if i := int(c); max+1 < i { | |
427 | for j := max; j < i; j++ { | |
428 | fmt.Printf("skip %v %v\n", j, token.String()) | |
429 | } | |
430 | max = i | |
431 | } else if i := int(c); i <= max { | |
432 | for j := i; j <= max; j++ { | |
433 | fmt.Printf("dupe %v %v\n", j, token.String()) | |
434 | } | |
435 | } else { | |
436 | max = int(c) | |
437 | } | |
438 | fmt.Printf("%v", c) | |
439 | for i, leaf, depths := 0, int(token.next), token.depths; i < leaf; i++ { | |
440 | fmt.Printf(" \x1B[34m%v\x1B[m", rul3s[ordered[i][depths[i]-1].pegRule]) | |
441 | } | |
442 | fmt.Printf(" \x1B[34m%v\x1B[m\n", rul3s[token.pegRule]) | |
443 | } | |
444 | fmt.Printf("\n") | |
445 | } | |
446 | } | |
447 | } | |
448 | ||
449 | func (t *tokens32) PrintSyntaxTree(buffer string) { | |
450 | tokens, _ := t.PreOrder() | |
451 | for token := range tokens { | |
452 | for c := 0; c < int(token.next); c++ { | |
453 | fmt.Printf(" ") | |
454 | } | |
455 | fmt.Printf("\x1B[34m%v\x1B[m %v\n", rul3s[token.pegRule], strconv.Quote(string(([]rune(buffer)[token.begin:token.end])))) | |
456 | } | |
457 | } | |
458 | ||
459 | func (t *tokens32) Add(rule pegRule, begin, end, depth uint32, index int) { | |
460 | t.tree[index] = token32{pegRule: rule, begin: uint32(begin), end: uint32(end), next: uint32(depth)} | |
461 | } | |
462 | ||
463 | func (t *tokens32) Tokens() <-chan token32 { | |
464 | s := make(chan token32, 16) | |
465 | go func() { | |
466 | for _, v := range t.tree { | |
467 | s <- v.getToken32() | |
468 | } | |
469 | close(s) | |
470 | }() | |
471 | return s | |
472 | } | |
473 | ||
474 | func (t *tokens32) Error() []token32 { | |
475 | ordered := t.Order() | |
476 | length := len(ordered) | |
477 | tokens, length := make([]token32, length), length-1 | |
478 | for i := range tokens { | |
479 | o := ordered[length-i] | |
480 | if len(o) > 1 { | |
481 | tokens[i] = o[len(o)-2].getToken32() | |
482 | } | |
483 | } | |
484 | return tokens | |
485 | } | |
486 | ||
487 | func (t *tokens32) Expand(index int) { | |
488 | tree := t.tree | |
489 | if index >= len(tree) { | |
490 | expanded := make([]token32, 2*len(tree)) | |
491 | copy(expanded, tree) | |
492 | t.tree = expanded | |
493 | } | |
494 | } | |
495 | ||
496 | type Peg struct { | |
497 | *Tree | |
498 | ||
499 | Buffer string | |
500 | buffer []rune | |
501 | rules [92]func() bool | |
502 | Parse func(rule ...int) error | |
503 | Reset func() | |
504 | Pretty bool | |
505 | tokens32 | |
506 | } | |
507 | ||
508 | type textPosition struct { | |
509 | line, symbol int | |
510 | } | |
511 | ||
512 | type textPositionMap map[int]textPosition | |
513 | ||
514 | func translatePositions(buffer []rune, positions []int) textPositionMap { | |
515 | length, translations, j, line, symbol := len(positions), make(textPositionMap, len(positions)), 0, 1, 0 | |
516 | sort.Ints(positions) | |
517 | ||
518 | search: | |
519 | for i, c := range buffer { | |
520 | if c == '\n' { | |
521 | line, symbol = line+1, 0 | |
522 | } else { | |
523 | symbol++ | |
524 | } | |
525 | if i == positions[j] { | |
526 | translations[positions[j]] = textPosition{line, symbol} | |
527 | for j++; j < length; j++ { | |
528 | if i != positions[j] { | |
529 | continue search | |
530 | } | |
531 | } | |
532 | break search | |
533 | } | |
534 | } | |
535 | ||
536 | return translations | |
537 | } | |
538 | ||
539 | type parseError struct { | |
540 | p *Peg | |
541 | max token32 | |
542 | } | |
543 | ||
544 | func (e *parseError) Error() string { | |
545 | tokens, error := []token32{e.max}, "\n" | |
546 | positions, p := make([]int, 2*len(tokens)), 0 | |
547 | for _, token := range tokens { | |
548 | positions[p], p = int(token.begin), p+1 | |
549 | positions[p], p = int(token.end), p+1 | |
550 | } | |
551 | translations := translatePositions(e.p.buffer, positions) | |
552 | format := "parse error near %v (line %v symbol %v - line %v symbol %v):\n%v\n" | |
553 | if e.p.Pretty { | |
554 | format = "parse error near \x1B[34m%v\x1B[m (line %v symbol %v - line %v symbol %v):\n%v\n" | |
555 | } | |
556 | for _, token := range tokens { | |
557 | begin, end := int(token.begin), int(token.end) | |
558 | error += fmt.Sprintf(format, | |
559 | rul3s[token.pegRule], | |
560 | translations[begin].line, translations[begin].symbol, | |
561 | translations[end].line, translations[end].symbol, | |
562 | strconv.Quote(string(e.p.buffer[begin:end]))) | |
563 | } | |
564 | ||
565 | return error | |
566 | } | |
567 | ||
568 | func (p *Peg) PrintSyntaxTree() { | |
569 | p.tokens32.PrintSyntaxTree(p.Buffer) | |
570 | } | |
571 | ||
572 | func (p *Peg) Highlighter() { | |
573 | p.PrintSyntax() | |
574 | } | |
575 | ||
576 | func (p *Peg) Execute() { | |
577 | buffer, _buffer, text, begin, end := p.Buffer, p.buffer, "", 0, 0 | |
578 | for token := range p.Tokens() { | |
579 | switch token.pegRule { | |
580 | ||
581 | case rulePegText: | |
582 | begin, end = int(token.begin), int(token.end) | |
583 | text = string(_buffer[begin:end]) | |
584 | ||
585 | case ruleAction0: | |
586 | p.AddPackage(text) | |
587 | case ruleAction1: | |
588 | p.AddPeg(text) | |
589 | case ruleAction2: | |
590 | p.AddState(text) | |
591 | case ruleAction3: | |
592 | p.AddImport(text) | |
593 | case ruleAction4: | |
594 | p.AddRule(text) | |
595 | case ruleAction5: | |
596 | p.AddExpression() | |
597 | case ruleAction6: | |
598 | p.AddAlternate() | |
599 | case ruleAction7: | |
600 | p.AddNil() | |
601 | p.AddAlternate() | |
602 | case ruleAction8: | |
603 | p.AddNil() | |
604 | case ruleAction9: | |
605 | p.AddSequence() | |
606 | case ruleAction10: | |
607 | p.AddPredicate(text) | |
608 | case ruleAction11: | |
609 | p.AddStateChange(text) | |
610 | case ruleAction12: | |
611 | p.AddPeekFor() | |
612 | case ruleAction13: | |
613 | p.AddPeekNot() | |
614 | case ruleAction14: | |
615 | p.AddQuery() | |
616 | case ruleAction15: | |
617 | p.AddStar() | |
618 | case ruleAction16: | |
619 | p.AddPlus() | |
620 | case ruleAction17: | |
621 | p.AddName(text) | |
622 | case ruleAction18: | |
623 | p.AddDot() | |
624 | case ruleAction19: | |
625 | p.AddAction(text) | |
626 | case ruleAction20: | |
627 | p.AddPush() | |
628 | case ruleAction21: | |
629 | p.AddSequence() | |
630 | case ruleAction22: | |
631 | p.AddSequence() | |
632 | case ruleAction23: | |
633 | p.AddPeekNot() | |
634 | p.AddDot() | |
635 | p.AddSequence() | |
636 | case ruleAction24: | |
637 | p.AddPeekNot() | |
638 | p.AddDot() | |
639 | p.AddSequence() | |
640 | case ruleAction25: | |
641 | p.AddAlternate() | |
642 | case ruleAction26: | |
643 | p.AddAlternate() | |
644 | case ruleAction27: | |
645 | p.AddRange() | |
646 | case ruleAction28: | |
647 | p.AddDoubleRange() | |
648 | case ruleAction29: | |
649 | p.AddCharacter(text) | |
650 | case ruleAction30: | |
651 | p.AddDoubleCharacter(text) | |
652 | case ruleAction31: | |
653 | p.AddCharacter(text) | |
654 | case ruleAction32: | |
655 | p.AddCharacter("\a") | |
656 | case ruleAction33: | |
657 | p.AddCharacter("\b") | |
658 | case ruleAction34: | |
659 | p.AddCharacter("\x1B") | |
660 | case ruleAction35: | |
661 | p.AddCharacter("\f") | |
662 | case ruleAction36: | |
663 | p.AddCharacter("\n") | |
664 | case ruleAction37: | |
665 | p.AddCharacter("\r") | |
666 | case ruleAction38: | |
667 | p.AddCharacter("\t") | |
668 | case ruleAction39: | |
669 | p.AddCharacter("\v") | |
670 | case ruleAction40: | |
671 | p.AddCharacter("'") | |
672 | case ruleAction41: | |
673 | p.AddCharacter("\"") | |
674 | case ruleAction42: | |
675 | p.AddCharacter("[") | |
676 | case ruleAction43: | |
677 | p.AddCharacter("]") | |
678 | case ruleAction44: | |
679 | p.AddCharacter("-") | |
680 | case ruleAction45: | |
681 | p.AddHexaCharacter(text) | |
682 | case ruleAction46: | |
683 | p.AddOctalCharacter(text) | |
684 | case ruleAction47: | |
685 | p.AddOctalCharacter(text) | |
686 | case ruleAction48: | |
687 | p.AddCharacter("\\") | |
688 | ||
689 | } | |
690 | } | |
691 | _, _, _, _, _ = buffer, _buffer, text, begin, end | |
692 | } | |
693 | ||
694 | func (p *Peg) Init() { | |
695 | p.buffer = []rune(p.Buffer) | |
696 | if len(p.buffer) == 0 || p.buffer[len(p.buffer)-1] != endSymbol { | |
697 | p.buffer = append(p.buffer, endSymbol) | |
698 | } | |
699 | ||
700 | tree := tokens32{tree: make([]token32, math.MaxInt16)} | |
701 | var max token32 | |
702 | position, depth, tokenIndex, buffer, _rules := uint32(0), uint32(0), 0, p.buffer, p.rules | |
703 | ||
704 | p.Parse = func(rule ...int) error { | |
705 | r := 1 | |
706 | if len(rule) > 0 { | |
707 | r = rule[0] | |
708 | } | |
709 | matches := p.rules[r]() | |
710 | p.tokens32 = tree | |
711 | if matches { | |
712 | p.trim(tokenIndex) | |
713 | return nil | |
714 | } | |
715 | return &parseError{p, max} | |
716 | } | |
717 | ||
718 | p.Reset = func() { | |
719 | position, tokenIndex, depth = 0, 0, 0 | |
720 | } | |
721 | ||
722 | add := func(rule pegRule, begin uint32) { | |
723 | tree.Expand(tokenIndex) | |
724 | tree.Add(rule, begin, position, depth, tokenIndex) | |
725 | tokenIndex++ | |
726 | if begin != position && position > max.end { | |
727 | max = token32{rule, begin, position, depth} | |
728 | } | |
729 | } | |
730 | ||
731 | matchDot := func() bool { | |
732 | if buffer[position] != endSymbol { | |
733 | position++ | |
734 | return true | |
735 | } | |
736 | return false | |
737 | } | |
738 | ||
739 | /*matchChar := func(c byte) bool { | |
740 | if buffer[position] == c { | |
741 | position++ | |
742 | return true | |
743 | } | |
744 | return false | |
745 | }*/ | |
746 | ||
747 | /*matchRange := func(lower byte, upper byte) bool { | |
748 | if c := buffer[position]; c >= lower && c <= upper { | |
749 | position++ | |
750 | return true | |
751 | } | |
752 | return false | |
753 | }*/ | |
754 | ||
755 | _rules = [...]func() bool{ | |
756 | nil, | |
757 | /* 0 Grammar <- <(Spacing ('p' 'a' 'c' 'k' 'a' 'g' 'e') MustSpacing Identifier Action0 Import* ('t' 'y' 'p' 'e') MustSpacing Identifier Action1 ('P' 'e' 'g') Spacing Action Action2 Definition+ EndOfFile)> */ | |
758 | func() bool { | |
759 | position0, tokenIndex0, depth0 := position, tokenIndex, depth | |
760 | { | |
761 | position1 := position | |
762 | depth++ | |
763 | if !_rules[ruleSpacing]() { | |
764 | goto l0 | |
765 | } | |
766 | if buffer[position] != rune('p') { | |
767 | goto l0 | |
768 | } | |
769 | position++ | |
770 | if buffer[position] != rune('a') { | |
771 | goto l0 | |
772 | } | |
773 | position++ | |
774 | if buffer[position] != rune('c') { | |
775 | goto l0 | |
776 | } | |
777 | position++ | |
778 | if buffer[position] != rune('k') { | |
779 | goto l0 | |
780 | } | |
781 | position++ | |
782 | if buffer[position] != rune('a') { | |
783 | goto l0 | |
784 | } | |
785 | position++ | |
786 | if buffer[position] != rune('g') { | |
787 | goto l0 | |
788 | } | |
789 | position++ | |
790 | if buffer[position] != rune('e') { | |
791 | goto l0 | |
792 | } | |
793 | position++ | |
794 | if !_rules[ruleMustSpacing]() { | |
795 | goto l0 | |
796 | } | |
797 | if !_rules[ruleIdentifier]() { | |
798 | goto l0 | |
799 | } | |
800 | { | |
801 | add(ruleAction0, position) | |
802 | } | |
803 | l3: | |
804 | { | |
805 | position4, tokenIndex4, depth4 := position, tokenIndex, depth | |
806 | { | |
807 | position5 := position | |
808 | depth++ | |
809 | if buffer[position] != rune('i') { | |
810 | goto l4 | |
811 | } | |
812 | position++ | |
813 | if buffer[position] != rune('m') { | |
814 | goto l4 | |
815 | } | |
816 | position++ | |
817 | if buffer[position] != rune('p') { | |
818 | goto l4 | |
819 | } | |
820 | position++ | |
821 | if buffer[position] != rune('o') { | |
822 | goto l4 | |
823 | } | |
824 | position++ | |
825 | if buffer[position] != rune('r') { | |
826 | goto l4 | |
827 | } | |
828 | position++ | |
829 | if buffer[position] != rune('t') { | |
830 | goto l4 | |
831 | } | |
832 | position++ | |
833 | if !_rules[ruleSpacing]() { | |
834 | goto l4 | |
835 | } | |
836 | if buffer[position] != rune('"') { | |
837 | goto l4 | |
838 | } | |
839 | position++ | |
840 | { | |
841 | position6 := position | |
842 | depth++ | |
843 | { | |
844 | switch buffer[position] { | |
845 | case '-': | |
846 | if buffer[position] != rune('-') { | |
847 | goto l4 | |
848 | } | |
849 | position++ | |
850 | break | |
851 | case '.': | |
852 | if buffer[position] != rune('.') { | |
853 | goto l4 | |
854 | } | |
855 | position++ | |
856 | break | |
857 | case '/': | |
858 | if buffer[position] != rune('/') { | |
859 | goto l4 | |
860 | } | |
861 | position++ | |
862 | break | |
863 | case '_': | |
864 | if buffer[position] != rune('_') { | |
865 | goto l4 | |
866 | } | |
867 | position++ | |
868 | break | |
869 | case 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z': | |
870 | if c := buffer[position]; c < rune('A') || c > rune('Z') { | |
871 | goto l4 | |
872 | } | |
873 | position++ | |
874 | break | |
875 | default: | |
876 | if c := buffer[position]; c < rune('a') || c > rune('z') { | |
877 | goto l4 | |
878 | } | |
879 | position++ | |
880 | break | |
881 | } | |
882 | } | |
883 | ||
884 | l7: | |
885 | { | |
886 | position8, tokenIndex8, depth8 := position, tokenIndex, depth | |
887 | { | |
888 | switch buffer[position] { | |
889 | case '-': | |
890 | if buffer[position] != rune('-') { | |
891 | goto l8 | |
892 | } | |
893 | position++ | |
894 | break | |
895 | case '.': | |
896 | if buffer[position] != rune('.') { | |
897 | goto l8 | |
898 | } | |
899 | position++ | |
900 | break | |
901 | case '/': | |
902 | if buffer[position] != rune('/') { | |
903 | goto l8 | |
904 | } | |
905 | position++ | |
906 | break | |
907 | case '_': | |
908 | if buffer[position] != rune('_') { | |
909 | goto l8 | |
910 | } | |
911 | position++ | |
912 | break | |
913 | case 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z': | |
914 | if c := buffer[position]; c < rune('A') || c > rune('Z') { | |
915 | goto l8 | |
916 | } | |
917 | position++ | |
918 | break | |
919 | default: | |
920 | if c := buffer[position]; c < rune('a') || c > rune('z') { | |
921 | goto l8 | |
922 | } | |
923 | position++ | |
924 | break | |
925 | } | |
926 | } | |
927 | ||
928 | goto l7 | |
929 | l8: | |
930 | position, tokenIndex, depth = position8, tokenIndex8, depth8 | |
931 | } | |
932 | depth-- | |
933 | add(rulePegText, position6) | |
934 | } | |
935 | if buffer[position] != rune('"') { | |
936 | goto l4 | |
937 | } | |
938 | position++ | |
939 | if !_rules[ruleSpacing]() { | |
940 | goto l4 | |
941 | } | |
942 | { | |
943 | add(ruleAction3, position) | |
944 | } | |
945 | depth-- | |
946 | add(ruleImport, position5) | |
947 | } | |
948 | goto l3 | |
949 | l4: | |
950 | position, tokenIndex, depth = position4, tokenIndex4, depth4 | |
951 | } | |
952 | if buffer[position] != rune('t') { | |
953 | goto l0 | |
954 | } | |
955 | position++ | |
956 | if buffer[position] != rune('y') { | |
957 | goto l0 | |
958 | } | |
959 | position++ | |
960 | if buffer[position] != rune('p') { | |
961 | goto l0 | |
962 | } | |
963 | position++ | |
964 | if buffer[position] != rune('e') { | |
965 | goto l0 | |
966 | } | |
967 | position++ | |
968 | if !_rules[ruleMustSpacing]() { | |
969 | goto l0 | |
970 | } | |
971 | if !_rules[ruleIdentifier]() { | |
972 | goto l0 | |
973 | } | |
974 | { | |
975 | add(ruleAction1, position) | |
976 | } | |
977 | if buffer[position] != rune('P') { | |
978 | goto l0 | |
979 | } | |
980 | position++ | |
981 | if buffer[position] != rune('e') { | |
982 | goto l0 | |
983 | } | |
984 | position++ | |
985 | if buffer[position] != rune('g') { | |
986 | goto l0 | |
987 | } | |
988 | position++ | |
989 | if !_rules[ruleSpacing]() { | |
990 | goto l0 | |
991 | } | |
992 | if !_rules[ruleAction]() { | |
993 | goto l0 | |
994 | } | |
995 | { | |
996 | add(ruleAction2, position) | |
997 | } | |
998 | { | |
999 | position16 := position | |
1000 | depth++ | |
1001 | if !_rules[ruleIdentifier]() { | |
1002 | goto l0 | |
1003 | } | |
1004 | { | |
1005 | add(ruleAction4, position) | |
1006 | } | |
1007 | if !_rules[ruleLeftArrow]() { | |
1008 | goto l0 | |
1009 | } | |
1010 | if !_rules[ruleExpression]() { | |
1011 | goto l0 | |
1012 | } | |
1013 | { | |
1014 | add(ruleAction5, position) | |
1015 | } | |
1016 | { | |
1017 | position19, tokenIndex19, depth19 := position, tokenIndex, depth | |
1018 | { | |
1019 | position20, tokenIndex20, depth20 := position, tokenIndex, depth | |
1020 | if !_rules[ruleIdentifier]() { | |
1021 | goto l21 | |
1022 | } | |
1023 | if !_rules[ruleLeftArrow]() { | |
1024 | goto l21 | |
1025 | } | |
1026 | goto l20 | |
1027 | l21: | |
1028 | position, tokenIndex, depth = position20, tokenIndex20, depth20 | |
1029 | { | |
1030 | position22, tokenIndex22, depth22 := position, tokenIndex, depth | |
1031 | if !matchDot() { | |
1032 | goto l22 | |
1033 | } | |
1034 | goto l0 | |
1035 | l22: | |
1036 | position, tokenIndex, depth = position22, tokenIndex22, depth22 | |
1037 | } | |
1038 | } | |
1039 | l20: | |
1040 | position, tokenIndex, depth = position19, tokenIndex19, depth19 | |
1041 | } | |
1042 | depth-- | |
1043 | add(ruleDefinition, position16) | |
1044 | } | |
1045 | l14: | |
1046 | { | |
1047 | position15, tokenIndex15, depth15 := position, tokenIndex, depth | |
1048 | { | |
1049 | position23 := position | |
1050 | depth++ | |
1051 | if !_rules[ruleIdentifier]() { | |
1052 | goto l15 | |
1053 | } | |
1054 | { | |
1055 | add(ruleAction4, position) | |
1056 | } | |
1057 | if !_rules[ruleLeftArrow]() { | |
1058 | goto l15 | |
1059 | } | |
1060 | if !_rules[ruleExpression]() { | |
1061 | goto l15 | |
1062 | } | |
1063 | { | |
1064 | add(ruleAction5, position) | |
1065 | } | |
1066 | { | |
1067 | position26, tokenIndex26, depth26 := position, tokenIndex, depth | |
1068 | { | |
1069 | position27, tokenIndex27, depth27 := position, tokenIndex, depth | |
1070 | if !_rules[ruleIdentifier]() { | |
1071 | goto l28 | |
1072 | } | |
1073 | if !_rules[ruleLeftArrow]() { | |
1074 | goto l28 | |
1075 | } | |
1076 | goto l27 | |
1077 | l28: | |
1078 | position, tokenIndex, depth = position27, tokenIndex27, depth27 | |
1079 | { | |
1080 | position29, tokenIndex29, depth29 := position, tokenIndex, depth | |
1081 | if !matchDot() { | |
1082 | goto l29 | |
1083 | } | |
1084 | goto l15 | |
1085 | l29: | |
1086 | position, tokenIndex, depth = position29, tokenIndex29, depth29 | |
1087 | } | |
1088 | } | |
1089 | l27: | |
1090 | position, tokenIndex, depth = position26, tokenIndex26, depth26 | |
1091 | } | |
1092 | depth-- | |
1093 | add(ruleDefinition, position23) | |
1094 | } | |
1095 | goto l14 | |
1096 | l15: | |
1097 | position, tokenIndex, depth = position15, tokenIndex15, depth15 | |
1098 | } | |
1099 | { | |
1100 | position30 := position | |
1101 | depth++ | |
1102 | { | |
1103 | position31, tokenIndex31, depth31 := position, tokenIndex, depth | |
1104 | if !matchDot() { | |
1105 | goto l31 | |
1106 | } | |
1107 | goto l0 | |
1108 | l31: | |
1109 | position, tokenIndex, depth = position31, tokenIndex31, depth31 | |
1110 | } | |
1111 | depth-- | |
1112 | add(ruleEndOfFile, position30) | |
1113 | } | |
1114 | depth-- | |
1115 | add(ruleGrammar, position1) | |
1116 | } | |
1117 | return true | |
1118 | l0: | |
1119 | position, tokenIndex, depth = position0, tokenIndex0, depth0 | |
1120 | return false | |
1121 | }, | |
1122 | /* 1 Import <- <('i' 'm' 'p' 'o' 'r' 't' Spacing '"' <((&('-') '-') | (&('.') '.') | (&('/') '/') | (&('_') '_') | (&('A' | 'B' | 'C' | 'D' | 'E' | 'F' | 'G' | 'H' | 'I' | 'J' | 'K' | 'L' | 'M' | 'N' | 'O' | 'P' | 'Q' | 'R' | 'S' | 'T' | 'U' | 'V' | 'W' | 'X' | 'Y' | 'Z') [A-Z]) | (&('a' | 'b' | 'c' | 'd' | 'e' | 'f' | 'g' | 'h' | 'i' | 'j' | 'k' | 'l' | 'm' | 'n' | 'o' | 'p' | 'q' | 'r' | 's' | 't' | 'u' | 'v' | 'w' | 'x' | 'y' | 'z') [a-z]))+> '"' Spacing Action3)> */ | |
1123 | nil, | |
1124 | /* 2 Definition <- <(Identifier Action4 LeftArrow Expression Action5 &((Identifier LeftArrow) / !.))> */ | |
1125 | nil, | |
1126 | /* 3 Expression <- <((Sequence (Slash Sequence Action6)* (Slash Action7)?) / Action8)> */ | |
1127 | func() bool { | |
1128 | { | |
1129 | position35 := position | |
1130 | depth++ | |
1131 | { | |
1132 | position36, tokenIndex36, depth36 := position, tokenIndex, depth | |
1133 | if !_rules[ruleSequence]() { | |
1134 | goto l37 | |
1135 | } | |
1136 | l38: | |
1137 | { | |
1138 | position39, tokenIndex39, depth39 := position, tokenIndex, depth | |
1139 | if !_rules[ruleSlash]() { | |
1140 | goto l39 | |
1141 | } | |
1142 | if !_rules[ruleSequence]() { | |
1143 | goto l39 | |
1144 | } | |
1145 | { | |
1146 | add(ruleAction6, position) | |
1147 | } | |
1148 | goto l38 | |
1149 | l39: | |
1150 | position, tokenIndex, depth = position39, tokenIndex39, depth39 | |
1151 | } | |
1152 | { | |
1153 | position41, tokenIndex41, depth41 := position, tokenIndex, depth | |
1154 | if !_rules[ruleSlash]() { | |
1155 | goto l41 | |
1156 | } | |
1157 | { | |
1158 | add(ruleAction7, position) | |
1159 | } | |
1160 | goto l42 | |
1161 | l41: | |
1162 | position, tokenIndex, depth = position41, tokenIndex41, depth41 | |
1163 | } | |
1164 | l42: | |
1165 | goto l36 | |
1166 | l37: | |
1167 | position, tokenIndex, depth = position36, tokenIndex36, depth36 | |
1168 | { | |
1169 | add(ruleAction8, position) | |
1170 | } | |
1171 | } | |
1172 | l36: | |
1173 | depth-- | |
1174 | add(ruleExpression, position35) | |
1175 | } | |
1176 | return true | |
1177 | }, | |
1178 | /* 4 Sequence <- <(Prefix (Prefix Action9)*)> */ | |
1179 | func() bool { | |
1180 | position45, tokenIndex45, depth45 := position, tokenIndex, depth | |
1181 | { | |
1182 | position46 := position | |
1183 | depth++ | |
1184 | if !_rules[rulePrefix]() { | |
1185 | goto l45 | |
1186 | } | |
1187 | l47: | |
1188 | { | |
1189 | position48, tokenIndex48, depth48 := position, tokenIndex, depth | |
1190 | if !_rules[rulePrefix]() { | |
1191 | goto l48 | |
1192 | } | |
1193 | { | |
1194 | add(ruleAction9, position) | |
1195 | } | |
1196 | goto l47 | |
1197 | l48: | |
1198 | position, tokenIndex, depth = position48, tokenIndex48, depth48 | |
1199 | } | |
1200 | depth-- | |
1201 | add(ruleSequence, position46) | |
1202 | } | |
1203 | return true | |
1204 | l45: | |
1205 | position, tokenIndex, depth = position45, tokenIndex45, depth45 | |
1206 | return false | |
1207 | }, | |
1208 | /* 5 Prefix <- <((And Action Action10) / (Not Action Action11) / ((&('!') (Not Suffix Action13)) | (&('&') (And Suffix Action12)) | (&('"' | '\'' | '(' | '.' | '<' | 'A' | 'B' | 'C' | 'D' | 'E' | 'F' | 'G' | 'H' | 'I' | 'J' | 'K' | 'L' | 'M' | 'N' | 'O' | 'P' | 'Q' | 'R' | 'S' | 'T' | 'U' | 'V' | 'W' | 'X' | 'Y' | 'Z' | '[' | '_' | 'a' | 'b' | 'c' | 'd' | 'e' | 'f' | 'g' | 'h' | 'i' | 'j' | 'k' | 'l' | 'm' | 'n' | 'o' | 'p' | 'q' | 'r' | 's' | 't' | 'u' | 'v' | 'w' | 'x' | 'y' | 'z' | '{') Suffix)))> */ | |
1209 | func() bool { | |
1210 | position50, tokenIndex50, depth50 := position, tokenIndex, depth | |
1211 | { | |
1212 | position51 := position | |
1213 | depth++ | |
1214 | { | |
1215 | position52, tokenIndex52, depth52 := position, tokenIndex, depth | |
1216 | if !_rules[ruleAnd]() { | |
1217 | goto l53 | |
1218 | } | |
1219 | if !_rules[ruleAction]() { | |
1220 | goto l53 | |
1221 | } | |
1222 | { | |
1223 | add(ruleAction10, position) | |
1224 | } | |
1225 | goto l52 | |
1226 | l53: | |
1227 | position, tokenIndex, depth = position52, tokenIndex52, depth52 | |
1228 | if !_rules[ruleNot]() { | |
1229 | goto l55 | |
1230 | } | |
1231 | if !_rules[ruleAction]() { | |
1232 | goto l55 | |
1233 | } | |
1234 | { | |
1235 | add(ruleAction11, position) | |
1236 | } | |
1237 | goto l52 | |
1238 | l55: | |
1239 | position, tokenIndex, depth = position52, tokenIndex52, depth52 | |
1240 | { | |
1241 | switch buffer[position] { | |
1242 | case '!': | |
1243 | if !_rules[ruleNot]() { | |
1244 | goto l50 | |
1245 | } | |
1246 | if !_rules[ruleSuffix]() { | |
1247 | goto l50 | |
1248 | } | |
1249 | { | |
1250 | add(ruleAction13, position) | |
1251 | } | |
1252 | break | |
1253 | case '&': | |
1254 | if !_rules[ruleAnd]() { | |
1255 | goto l50 | |
1256 | } | |
1257 | if !_rules[ruleSuffix]() { | |
1258 | goto l50 | |
1259 | } | |
1260 | { | |
1261 | add(ruleAction12, position) | |
1262 | } | |
1263 | break | |
1264 | default: | |
1265 | if !_rules[ruleSuffix]() { | |
1266 | goto l50 | |
1267 | } | |
1268 | break | |
1269 | } | |
1270 | } | |
1271 | ||
1272 | } | |
1273 | l52: | |
1274 | depth-- | |
1275 | add(rulePrefix, position51) | |
1276 | } | |
1277 | return true | |
1278 | l50: | |
1279 | position, tokenIndex, depth = position50, tokenIndex50, depth50 | |
1280 | return false | |
1281 | }, | |
1282 | /* 6 Suffix <- <(Primary ((&('+') (Plus Action16)) | (&('*') (Star Action15)) | (&('?') (Question Action14)))?)> */ | |
1283 | func() bool { | |
1284 | position60, tokenIndex60, depth60 := position, tokenIndex, depth | |
1285 | { | |
1286 | position61 := position | |
1287 | depth++ | |
1288 | { | |
1289 | position62 := position | |
1290 | depth++ | |
1291 | { | |
1292 | switch buffer[position] { | |
1293 | case '<': | |
1294 | { | |
1295 | position64 := position | |
1296 | depth++ | |
1297 | if buffer[position] != rune('<') { | |
1298 | goto l60 | |
1299 | } | |
1300 | position++ | |
1301 | if !_rules[ruleSpacing]() { | |
1302 | goto l60 | |
1303 | } | |
1304 | depth-- | |
1305 | add(ruleBegin, position64) | |
1306 | } | |
1307 | if !_rules[ruleExpression]() { | |
1308 | goto l60 | |
1309 | } | |
1310 | { | |
1311 | position65 := position | |
1312 | depth++ | |
1313 | if buffer[position] != rune('>') { | |
1314 | goto l60 | |
1315 | } | |
1316 | position++ | |
1317 | if !_rules[ruleSpacing]() { | |
1318 | goto l60 | |
1319 | } | |
1320 | depth-- | |
1321 | add(ruleEnd, position65) | |
1322 | } | |
1323 | { | |
1324 | add(ruleAction20, position) | |
1325 | } | |
1326 | break | |
1327 | case '{': | |
1328 | if !_rules[ruleAction]() { | |
1329 | goto l60 | |
1330 | } | |
1331 | { | |
1332 | add(ruleAction19, position) | |
1333 | } | |
1334 | break | |
1335 | case '.': | |
1336 | { | |
1337 | position68 := position | |
1338 | depth++ | |
1339 | if buffer[position] != rune('.') { | |
1340 | goto l60 | |
1341 | } | |
1342 | position++ | |
1343 | if !_rules[ruleSpacing]() { | |
1344 | goto l60 | |
1345 | } | |
1346 | depth-- | |
1347 | add(ruleDot, position68) | |
1348 | } | |
1349 | { | |
1350 | add(ruleAction18, position) | |
1351 | } | |
1352 | break | |
1353 | case '[': | |
1354 | { | |
1355 | position70 := position | |
1356 | depth++ | |
1357 | { | |
1358 | position71, tokenIndex71, depth71 := position, tokenIndex, depth | |
1359 | if buffer[position] != rune('[') { | |
1360 | goto l72 | |
1361 | } | |
1362 | position++ | |
1363 | if buffer[position] != rune('[') { | |
1364 | goto l72 | |
1365 | } | |
1366 | position++ | |
1367 | { | |
1368 | position73, tokenIndex73, depth73 := position, tokenIndex, depth | |
1369 | { | |
1370 | position75, tokenIndex75, depth75 := position, tokenIndex, depth | |
1371 | if buffer[position] != rune('^') { | |
1372 | goto l76 | |
1373 | } | |
1374 | position++ | |
1375 | if !_rules[ruleDoubleRanges]() { | |
1376 | goto l76 | |
1377 | } | |
1378 | { | |
1379 | add(ruleAction23, position) | |
1380 | } | |
1381 | goto l75 | |
1382 | l76: | |
1383 | position, tokenIndex, depth = position75, tokenIndex75, depth75 | |
1384 | if !_rules[ruleDoubleRanges]() { | |
1385 | goto l73 | |
1386 | } | |
1387 | } | |
1388 | l75: | |
1389 | goto l74 | |
1390 | l73: | |
1391 | position, tokenIndex, depth = position73, tokenIndex73, depth73 | |
1392 | } | |
1393 | l74: | |
1394 | if buffer[position] != rune(']') { | |
1395 | goto l72 | |
1396 | } | |
1397 | position++ | |
1398 | if buffer[position] != rune(']') { | |
1399 | goto l72 | |
1400 | } | |
1401 | position++ | |
1402 | goto l71 | |
1403 | l72: | |
1404 | position, tokenIndex, depth = position71, tokenIndex71, depth71 | |
1405 | if buffer[position] != rune('[') { | |
1406 | goto l60 | |
1407 | } | |
1408 | position++ | |
1409 | { | |
1410 | position78, tokenIndex78, depth78 := position, tokenIndex, depth | |
1411 | { | |
1412 | position80, tokenIndex80, depth80 := position, tokenIndex, depth | |
1413 | if buffer[position] != rune('^') { | |
1414 | goto l81 | |
1415 | } | |
1416 | position++ | |
1417 | if !_rules[ruleRanges]() { | |
1418 | goto l81 | |
1419 | } | |
1420 | { | |
1421 | add(ruleAction24, position) | |
1422 | } | |
1423 | goto l80 | |
1424 | l81: | |
1425 | position, tokenIndex, depth = position80, tokenIndex80, depth80 | |
1426 | if !_rules[ruleRanges]() { | |
1427 | goto l78 | |
1428 | } | |
1429 | } | |
1430 | l80: | |
1431 | goto l79 | |
1432 | l78: | |
1433 | position, tokenIndex, depth = position78, tokenIndex78, depth78 | |
1434 | } | |
1435 | l79: | |
1436 | if buffer[position] != rune(']') { | |
1437 | goto l60 | |
1438 | } | |
1439 | position++ | |
1440 | } | |
1441 | l71: | |
1442 | if !_rules[ruleSpacing]() { | |
1443 | goto l60 | |
1444 | } | |
1445 | depth-- | |
1446 | add(ruleClass, position70) | |
1447 | } | |
1448 | break | |
1449 | case '"', '\'': | |
1450 | { | |
1451 | position83 := position | |
1452 | depth++ | |
1453 | { | |
1454 | position84, tokenIndex84, depth84 := position, tokenIndex, depth | |
1455 | if buffer[position] != rune('\'') { | |
1456 | goto l85 | |
1457 | } | |
1458 | position++ | |
1459 | { | |
1460 | position86, tokenIndex86, depth86 := position, tokenIndex, depth | |
1461 | { | |
1462 | position88, tokenIndex88, depth88 := position, tokenIndex, depth | |
1463 | if buffer[position] != rune('\'') { | |
1464 | goto l88 | |
1465 | } | |
1466 | position++ | |
1467 | goto l86 | |
1468 | l88: | |
1469 | position, tokenIndex, depth = position88, tokenIndex88, depth88 | |
1470 | } | |
1471 | if !_rules[ruleChar]() { | |
1472 | goto l86 | |
1473 | } | |
1474 | goto l87 | |
1475 | l86: | |
1476 | position, tokenIndex, depth = position86, tokenIndex86, depth86 | |
1477 | } | |
1478 | l87: | |
1479 | l89: | |
1480 | { | |
1481 | position90, tokenIndex90, depth90 := position, tokenIndex, depth | |
1482 | { | |
1483 | position91, tokenIndex91, depth91 := position, tokenIndex, depth | |
1484 | if buffer[position] != rune('\'') { | |
1485 | goto l91 | |
1486 | } | |
1487 | position++ | |
1488 | goto l90 | |
1489 | l91: | |
1490 | position, tokenIndex, depth = position91, tokenIndex91, depth91 | |
1491 | } | |
1492 | if !_rules[ruleChar]() { | |
1493 | goto l90 | |
1494 | } | |
1495 | { | |
1496 | add(ruleAction21, position) | |
1497 | } | |
1498 | goto l89 | |
1499 | l90: | |
1500 | position, tokenIndex, depth = position90, tokenIndex90, depth90 | |
1501 | } | |
1502 | if buffer[position] != rune('\'') { | |
1503 | goto l85 | |
1504 | } | |
1505 | position++ | |
1506 | if !_rules[ruleSpacing]() { | |
1507 | goto l85 | |
1508 | } | |
1509 | goto l84 | |
1510 | l85: | |
1511 | position, tokenIndex, depth = position84, tokenIndex84, depth84 | |
1512 | if buffer[position] != rune('"') { | |
1513 | goto l60 | |
1514 | } | |
1515 | position++ | |
1516 | { | |
1517 | position93, tokenIndex93, depth93 := position, tokenIndex, depth | |
1518 | { | |
1519 | position95, tokenIndex95, depth95 := position, tokenIndex, depth | |
1520 | if buffer[position] != rune('"') { | |
1521 | goto l95 | |
1522 | } | |
1523 | position++ | |
1524 | goto l93 | |
1525 | l95: | |
1526 | position, tokenIndex, depth = position95, tokenIndex95, depth95 | |
1527 | } | |
1528 | if !_rules[ruleDoubleChar]() { | |
1529 | goto l93 | |
1530 | } | |
1531 | goto l94 | |
1532 | l93: | |
1533 | position, tokenIndex, depth = position93, tokenIndex93, depth93 | |
1534 | } | |
1535 | l94: | |
1536 | l96: | |
1537 | { | |
1538 | position97, tokenIndex97, depth97 := position, tokenIndex, depth | |
1539 | { | |
1540 | position98, tokenIndex98, depth98 := position, tokenIndex, depth | |
1541 | if buffer[position] != rune('"') { | |
1542 | goto l98 | |
1543 | } | |
1544 | position++ | |
1545 | goto l97 | |
1546 | l98: | |
1547 | position, tokenIndex, depth = position98, tokenIndex98, depth98 | |
1548 | } | |
1549 | if !_rules[ruleDoubleChar]() { | |
1550 | goto l97 | |
1551 | } | |
1552 | { | |
1553 | add(ruleAction22, position) | |
1554 | } | |
1555 | goto l96 | |
1556 | l97: | |
1557 | position, tokenIndex, depth = position97, tokenIndex97, depth97 | |
1558 | } | |
1559 | if buffer[position] != rune('"') { | |
1560 | goto l60 | |
1561 | } | |
1562 | position++ | |
1563 | if !_rules[ruleSpacing]() { | |
1564 | goto l60 | |
1565 | } | |
1566 | } | |
1567 | l84: | |
1568 | depth-- | |
1569 | add(ruleLiteral, position83) | |
1570 | } | |
1571 | break | |
1572 | case '(': | |
1573 | { | |
1574 | position100 := position | |
1575 | depth++ | |
1576 | if buffer[position] != rune('(') { | |
1577 | goto l60 | |
1578 | } | |
1579 | position++ | |
1580 | if !_rules[ruleSpacing]() { | |
1581 | goto l60 | |
1582 | } | |
1583 | depth-- | |
1584 | add(ruleOpen, position100) | |
1585 | } | |
1586 | if !_rules[ruleExpression]() { | |
1587 | goto l60 | |
1588 | } | |
1589 | { | |
1590 | position101 := position | |
1591 | depth++ | |
1592 | if buffer[position] != rune(')') { | |
1593 | goto l60 | |
1594 | } | |
1595 | position++ | |
1596 | if !_rules[ruleSpacing]() { | |
1597 | goto l60 | |
1598 | } | |
1599 | depth-- | |
1600 | add(ruleClose, position101) | |
1601 | } | |
1602 | break | |
1603 | default: | |
1604 | if !_rules[ruleIdentifier]() { | |
1605 | goto l60 | |
1606 | } | |
1607 | { | |
1608 | position102, tokenIndex102, depth102 := position, tokenIndex, depth | |
1609 | if !_rules[ruleLeftArrow]() { | |
1610 | goto l102 | |
1611 | } | |
1612 | goto l60 | |
1613 | l102: | |
1614 | position, tokenIndex, depth = position102, tokenIndex102, depth102 | |
1615 | } | |
1616 | { | |
1617 | add(ruleAction17, position) | |
1618 | } | |
1619 | break | |
1620 | } | |
1621 | } | |
1622 | ||
1623 | depth-- | |
1624 | add(rulePrimary, position62) | |
1625 | } | |
1626 | { | |
1627 | position104, tokenIndex104, depth104 := position, tokenIndex, depth | |
1628 | { | |
1629 | switch buffer[position] { | |
1630 | case '+': | |
1631 | { | |
1632 | position107 := position | |
1633 | depth++ | |
1634 | if buffer[position] != rune('+') { | |
1635 | goto l104 | |
1636 | } | |
1637 | position++ | |
1638 | if !_rules[ruleSpacing]() { | |
1639 | goto l104 | |
1640 | } | |
1641 | depth-- | |
1642 | add(rulePlus, position107) | |
1643 | } | |
1644 | { | |
1645 | add(ruleAction16, position) | |
1646 | } | |
1647 | break | |
1648 | case '*': | |
1649 | { | |
1650 | position109 := position | |
1651 | depth++ | |
1652 | if buffer[position] != rune('*') { | |
1653 | goto l104 | |
1654 | } | |
1655 | position++ | |
1656 | if !_rules[ruleSpacing]() { | |
1657 | goto l104 | |
1658 | } | |
1659 | depth-- | |
1660 | add(ruleStar, position109) | |
1661 | } | |
1662 | { | |
1663 | add(ruleAction15, position) | |
1664 | } | |
1665 | break | |
1666 | default: | |
1667 | { | |
1668 | position111 := position | |
1669 | depth++ | |
1670 | if buffer[position] != rune('?') { | |
1671 | goto l104 | |
1672 | } | |
1673 | position++ | |
1674 | if !_rules[ruleSpacing]() { | |
1675 | goto l104 | |
1676 | } | |
1677 | depth-- | |
1678 | add(ruleQuestion, position111) | |
1679 | } | |
1680 | { | |
1681 | add(ruleAction14, position) | |
1682 | } | |
1683 | break | |
1684 | } | |
1685 | } | |
1686 | ||
1687 | goto l105 | |
1688 | l104: | |
1689 | position, tokenIndex, depth = position104, tokenIndex104, depth104 | |
1690 | } | |
1691 | l105: | |
1692 | depth-- | |
1693 | add(ruleSuffix, position61) | |
1694 | } | |
1695 | return true | |
1696 | l60: | |
1697 | position, tokenIndex, depth = position60, tokenIndex60, depth60 | |
1698 | return false | |
1699 | }, | |
1700 | /* 7 Primary <- <((&('<') (Begin Expression End Action20)) | (&('{') (Action Action19)) | (&('.') (Dot Action18)) | (&('[') Class) | (&('"' | '\'') Literal) | (&('(') (Open Expression Close)) | (&('A' | 'B' | 'C' | 'D' | 'E' | 'F' | 'G' | 'H' | 'I' | 'J' | 'K' | 'L' | 'M' | 'N' | 'O' | 'P' | 'Q' | 'R' | 'S' | 'T' | 'U' | 'V' | 'W' | 'X' | 'Y' | 'Z' | '_' | 'a' | 'b' | 'c' | 'd' | 'e' | 'f' | 'g' | 'h' | 'i' | 'j' | 'k' | 'l' | 'm' | 'n' | 'o' | 'p' | 'q' | 'r' | 's' | 't' | 'u' | 'v' | 'w' | 'x' | 'y' | 'z') (Identifier !LeftArrow Action17)))> */ | |
1701 | nil, | |
1702 | /* 8 Identifier <- <(<(IdentStart IdentCont*)> Spacing)> */ | |
1703 | func() bool { | |
1704 | position114, tokenIndex114, depth114 := position, tokenIndex, depth | |
1705 | { | |
1706 | position115 := position | |
1707 | depth++ | |
1708 | { | |
1709 | position116 := position | |
1710 | depth++ | |
1711 | if !_rules[ruleIdentStart]() { | |
1712 | goto l114 | |
1713 | } | |
1714 | l117: | |
1715 | { | |
1716 | position118, tokenIndex118, depth118 := position, tokenIndex, depth | |
1717 | { | |
1718 | position119 := position | |
1719 | depth++ | |
1720 | { | |
1721 | position120, tokenIndex120, depth120 := position, tokenIndex, depth | |
1722 | if !_rules[ruleIdentStart]() { | |
1723 | goto l121 | |
1724 | } | |
1725 | goto l120 | |
1726 | l121: | |
1727 | position, tokenIndex, depth = position120, tokenIndex120, depth120 | |
1728 | if c := buffer[position]; c < rune('0') || c > rune('9') { | |
1729 | goto l118 | |
1730 | } | |
1731 | position++ | |
1732 | } | |
1733 | l120: | |
1734 | depth-- | |
1735 | add(ruleIdentCont, position119) | |
1736 | } | |
1737 | goto l117 | |
1738 | l118: | |
1739 | position, tokenIndex, depth = position118, tokenIndex118, depth118 | |
1740 | } | |
1741 | depth-- | |
1742 | add(rulePegText, position116) | |
1743 | } | |
1744 | if !_rules[ruleSpacing]() { | |
1745 | goto l114 | |
1746 | } | |
1747 | depth-- | |
1748 | add(ruleIdentifier, position115) | |
1749 | } | |
1750 | return true | |
1751 | l114: | |
1752 | position, tokenIndex, depth = position114, tokenIndex114, depth114 | |
1753 | return false | |
1754 | }, | |
1755 | /* 9 IdentStart <- <((&('_') '_') | (&('A' | 'B' | 'C' | 'D' | 'E' | 'F' | 'G' | 'H' | 'I' | 'J' | 'K' | 'L' | 'M' | 'N' | 'O' | 'P' | 'Q' | 'R' | 'S' | 'T' | 'U' | 'V' | 'W' | 'X' | 'Y' | 'Z') [A-Z]) | (&('a' | 'b' | 'c' | 'd' | 'e' | 'f' | 'g' | 'h' | 'i' | 'j' | 'k' | 'l' | 'm' | 'n' | 'o' | 'p' | 'q' | 'r' | 's' | 't' | 'u' | 'v' | 'w' | 'x' | 'y' | 'z') [a-z]))> */ | |
1756 | func() bool { | |
1757 | position122, tokenIndex122, depth122 := position, tokenIndex, depth | |
1758 | { | |
1759 | position123 := position | |
1760 | depth++ | |
1761 | { | |
1762 | switch buffer[position] { | |
1763 | case '_': | |
1764 | if buffer[position] != rune('_') { | |
1765 | goto l122 | |
1766 | } | |
1767 | position++ | |
1768 | break | |
1769 | case 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z': | |
1770 | if c := buffer[position]; c < rune('A') || c > rune('Z') { | |
1771 | goto l122 | |
1772 | } | |
1773 | position++ | |
1774 | break | |
1775 | default: | |
1776 | if c := buffer[position]; c < rune('a') || c > rune('z') { | |
1777 | goto l122 | |
1778 | } | |
1779 | position++ | |
1780 | break | |
1781 | } | |
1782 | } | |
1783 | ||
1784 | depth-- | |
1785 | add(ruleIdentStart, position123) | |
1786 | } | |
1787 | return true | |
1788 | l122: | |
1789 | position, tokenIndex, depth = position122, tokenIndex122, depth122 | |
1790 | return false | |
1791 | }, | |
1792 | /* 10 IdentCont <- <(IdentStart / [0-9])> */ | |
1793 | nil, | |
1794 | /* 11 Literal <- <(('\'' (!'\'' Char)? (!'\'' Char Action21)* '\'' Spacing) / ('"' (!'"' DoubleChar)? (!'"' DoubleChar Action22)* '"' Spacing))> */ | |
1795 | nil, | |
1796 | /* 12 Class <- <((('[' '[' (('^' DoubleRanges Action23) / DoubleRanges)? (']' ']')) / ('[' (('^' Ranges Action24) / Ranges)? ']')) Spacing)> */ | |
1797 | nil, | |
1798 | /* 13 Ranges <- <(!']' Range (!']' Range Action25)*)> */ | |
1799 | func() bool { | |
1800 | position128, tokenIndex128, depth128 := position, tokenIndex, depth | |
1801 | { | |
1802 | position129 := position | |
1803 | depth++ | |
1804 | { | |
1805 | position130, tokenIndex130, depth130 := position, tokenIndex, depth | |
1806 | if buffer[position] != rune(']') { | |
1807 | goto l130 | |
1808 | } | |
1809 | position++ | |
1810 | goto l128 | |
1811 | l130: | |
1812 | position, tokenIndex, depth = position130, tokenIndex130, depth130 | |
1813 | } | |
1814 | if !_rules[ruleRange]() { | |
1815 | goto l128 | |
1816 | } | |
1817 | l131: | |
1818 | { | |
1819 | position132, tokenIndex132, depth132 := position, tokenIndex, depth | |
1820 | { | |
1821 | position133, tokenIndex133, depth133 := position, tokenIndex, depth | |
1822 | if buffer[position] != rune(']') { | |
1823 | goto l133 | |
1824 | } | |
1825 | position++ | |
1826 | goto l132 | |
1827 | l133: | |
1828 | position, tokenIndex, depth = position133, tokenIndex133, depth133 | |
1829 | } | |
1830 | if !_rules[ruleRange]() { | |
1831 | goto l132 | |
1832 | } | |
1833 | { | |
1834 | add(ruleAction25, position) | |
1835 | } | |
1836 | goto l131 | |
1837 | l132: | |
1838 | position, tokenIndex, depth = position132, tokenIndex132, depth132 | |
1839 | } | |
1840 | depth-- | |
1841 | add(ruleRanges, position129) | |
1842 | } | |
1843 | return true | |
1844 | l128: | |
1845 | position, tokenIndex, depth = position128, tokenIndex128, depth128 | |
1846 | return false | |
1847 | }, | |
1848 | /* 14 DoubleRanges <- <(!(']' ']') DoubleRange (!(']' ']') DoubleRange Action26)*)> */ | |
1849 | func() bool { | |
1850 | position135, tokenIndex135, depth135 := position, tokenIndex, depth | |
1851 | { | |
1852 | position136 := position | |
1853 | depth++ | |
1854 | { | |
1855 | position137, tokenIndex137, depth137 := position, tokenIndex, depth | |
1856 | if buffer[position] != rune(']') { | |
1857 | goto l137 | |
1858 | } | |
1859 | position++ | |
1860 | if buffer[position] != rune(']') { | |
1861 | goto l137 | |
1862 | } | |
1863 | position++ | |
1864 | goto l135 | |
1865 | l137: | |
1866 | position, tokenIndex, depth = position137, tokenIndex137, depth137 | |
1867 | } | |
1868 | if !_rules[ruleDoubleRange]() { | |
1869 | goto l135 | |
1870 | } | |
1871 | l138: | |
1872 | { | |
1873 | position139, tokenIndex139, depth139 := position, tokenIndex, depth | |
1874 | { | |
1875 | position140, tokenIndex140, depth140 := position, tokenIndex, depth | |
1876 | if buffer[position] != rune(']') { | |
1877 | goto l140 | |
1878 | } | |
1879 | position++ | |
1880 | if buffer[position] != rune(']') { | |
1881 | goto l140 | |
1882 | } | |
1883 | position++ | |
1884 | goto l139 | |
1885 | l140: | |
1886 | position, tokenIndex, depth = position140, tokenIndex140, depth140 | |
1887 | } | |
1888 | if !_rules[ruleDoubleRange]() { | |
1889 | goto l139 | |
1890 | } | |
1891 | { | |
1892 | add(ruleAction26, position) | |
1893 | } | |
1894 | goto l138 | |
1895 | l139: | |
1896 | position, tokenIndex, depth = position139, tokenIndex139, depth139 | |
1897 | } | |
1898 | depth-- | |
1899 | add(ruleDoubleRanges, position136) | |
1900 | } | |
1901 | return true | |
1902 | l135: | |
1903 | position, tokenIndex, depth = position135, tokenIndex135, depth135 | |
1904 | return false | |
1905 | }, | |
1906 | /* 15 Range <- <((Char '-' Char Action27) / Char)> */ | |
1907 | func() bool { | |
1908 | position142, tokenIndex142, depth142 := position, tokenIndex, depth | |
1909 | { | |
1910 | position143 := position | |
1911 | depth++ | |
1912 | { | |
1913 | position144, tokenIndex144, depth144 := position, tokenIndex, depth | |
1914 | if !_rules[ruleChar]() { | |
1915 | goto l145 | |
1916 | } | |
1917 | if buffer[position] != rune('-') { | |
1918 | goto l145 | |
1919 | } | |
1920 | position++ | |
1921 | if !_rules[ruleChar]() { | |
1922 | goto l145 | |
1923 | } | |
1924 | { | |
1925 | add(ruleAction27, position) | |
1926 | } | |
1927 | goto l144 | |
1928 | l145: | |
1929 | position, tokenIndex, depth = position144, tokenIndex144, depth144 | |
1930 | if !_rules[ruleChar]() { | |
1931 | goto l142 | |
1932 | } | |
1933 | } | |
1934 | l144: | |
1935 | depth-- | |
1936 | add(ruleRange, position143) | |
1937 | } | |
1938 | return true | |
1939 | l142: | |
1940 | position, tokenIndex, depth = position142, tokenIndex142, depth142 | |
1941 | return false | |
1942 | }, | |
1943 | /* 16 DoubleRange <- <((Char '-' Char Action28) / DoubleChar)> */ | |
1944 | func() bool { | |
1945 | position147, tokenIndex147, depth147 := position, tokenIndex, depth | |
1946 | { | |
1947 | position148 := position | |
1948 | depth++ | |
1949 | { | |
1950 | position149, tokenIndex149, depth149 := position, tokenIndex, depth | |
1951 | if !_rules[ruleChar]() { | |
1952 | goto l150 | |
1953 | } | |
1954 | if buffer[position] != rune('-') { | |
1955 | goto l150 | |
1956 | } | |
1957 | position++ | |
1958 | if !_rules[ruleChar]() { | |
1959 | goto l150 | |
1960 | } | |
1961 | { | |
1962 | add(ruleAction28, position) | |
1963 | } | |
1964 | goto l149 | |
1965 | l150: | |
1966 | position, tokenIndex, depth = position149, tokenIndex149, depth149 | |
1967 | if !_rules[ruleDoubleChar]() { | |
1968 | goto l147 | |
1969 | } | |
1970 | } | |
1971 | l149: | |
1972 | depth-- | |
1973 | add(ruleDoubleRange, position148) | |
1974 | } | |
1975 | return true | |
1976 | l147: | |
1977 | position, tokenIndex, depth = position147, tokenIndex147, depth147 | |
1978 | return false | |
1979 | }, | |
1980 | /* 17 Char <- <(Escape / (!'\\' <.> Action29))> */ | |
1981 | func() bool { | |
1982 | position152, tokenIndex152, depth152 := position, tokenIndex, depth | |
1983 | { | |
1984 | position153 := position | |
1985 | depth++ | |
1986 | { | |
1987 | position154, tokenIndex154, depth154 := position, tokenIndex, depth | |
1988 | if !_rules[ruleEscape]() { | |
1989 | goto l155 | |
1990 | } | |
1991 | goto l154 | |
1992 | l155: | |
1993 | position, tokenIndex, depth = position154, tokenIndex154, depth154 | |
1994 | { | |
1995 | position156, tokenIndex156, depth156 := position, tokenIndex, depth | |
1996 | if buffer[position] != rune('\\') { | |
1997 | goto l156 | |
1998 | } | |
1999 | position++ | |
2000 | goto l152 | |
2001 | l156: | |
2002 | position, tokenIndex, depth = position156, tokenIndex156, depth156 | |
2003 | } | |
2004 | { | |
2005 | position157 := position | |
2006 | depth++ | |
2007 | if !matchDot() { | |
2008 | goto l152 | |
2009 | } | |
2010 | depth-- | |
2011 | add(rulePegText, position157) | |
2012 | } | |
2013 | { | |
2014 | add(ruleAction29, position) | |
2015 | } | |
2016 | } | |
2017 | l154: | |
2018 | depth-- | |
2019 | add(ruleChar, position153) | |
2020 | } | |
2021 | return true | |
2022 | l152: | |
2023 | position, tokenIndex, depth = position152, tokenIndex152, depth152 | |
2024 | return false | |
2025 | }, | |
2026 | /* 18 DoubleChar <- <(Escape / (<([a-z] / [A-Z])> Action30) / (!'\\' <.> Action31))> */ | |
2027 | func() bool { | |
2028 | position159, tokenIndex159, depth159 := position, tokenIndex, depth | |
2029 | { | |
2030 | position160 := position | |
2031 | depth++ | |
2032 | { | |
2033 | position161, tokenIndex161, depth161 := position, tokenIndex, depth | |
2034 | if !_rules[ruleEscape]() { | |
2035 | goto l162 | |
2036 | } | |
2037 | goto l161 | |
2038 | l162: | |
2039 | position, tokenIndex, depth = position161, tokenIndex161, depth161 | |
2040 | { | |
2041 | position164 := position | |
2042 | depth++ | |
2043 | { | |
2044 | position165, tokenIndex165, depth165 := position, tokenIndex, depth | |
2045 | if c := buffer[position]; c < rune('a') || c > rune('z') { | |
2046 | goto l166 | |
2047 | } | |
2048 | position++ | |
2049 | goto l165 | |
2050 | l166: | |
2051 | position, tokenIndex, depth = position165, tokenIndex165, depth165 | |
2052 | if c := buffer[position]; c < rune('A') || c > rune('Z') { | |
2053 | goto l163 | |
2054 | } | |
2055 | position++ | |
2056 | } | |
2057 | l165: | |
2058 | depth-- | |
2059 | add(rulePegText, position164) | |
2060 | } | |
2061 | { | |
2062 | add(ruleAction30, position) | |
2063 | } | |
2064 | goto l161 | |
2065 | l163: | |
2066 | position, tokenIndex, depth = position161, tokenIndex161, depth161 | |
2067 | { | |
2068 | position168, tokenIndex168, depth168 := position, tokenIndex, depth | |
2069 | if buffer[position] != rune('\\') { | |
2070 | goto l168 | |
2071 | } | |
2072 | position++ | |
2073 | goto l159 | |
2074 | l168: | |
2075 | position, tokenIndex, depth = position168, tokenIndex168, depth168 | |
2076 | } | |
2077 | { | |
2078 | position169 := position | |
2079 | depth++ | |
2080 | if !matchDot() { | |
2081 | goto l159 | |
2082 | } | |
2083 | depth-- | |
2084 | add(rulePegText, position169) | |
2085 | } | |
2086 | { | |
2087 | add(ruleAction31, position) | |
2088 | } | |
2089 | } | |
2090 | l161: | |
2091 | depth-- | |
2092 | add(ruleDoubleChar, position160) | |
2093 | } | |
2094 | return true | |
2095 | l159: | |
2096 | position, tokenIndex, depth = position159, tokenIndex159, depth159 | |
2097 | return false | |
2098 | }, | |
2099 | /* 19 Escape <- <(('\\' ('a' / 'A') Action32) / ('\\' ('b' / 'B') Action33) / ('\\' ('e' / 'E') Action34) / ('\\' ('f' / 'F') Action35) / ('\\' ('n' / 'N') Action36) / ('\\' ('r' / 'R') Action37) / ('\\' ('t' / 'T') Action38) / ('\\' ('v' / 'V') Action39) / ('\\' '\'' Action40) / ('\\' '"' Action41) / ('\\' '[' Action42) / ('\\' ']' Action43) / ('\\' '-' Action44) / ('\\' ('0' ('x' / 'X')) <((&('A' | 'B' | 'C' | 'D' | 'E' | 'F') [A-F]) | (&('a' | 'b' | 'c' | 'd' | 'e' | 'f') [a-f]) | (&('0' | '1' | '2' | '3' | '4' | '5' | '6' | '7' | '8' | '9') [0-9]))+> Action45) / ('\\' <([0-3] [0-7] [0-7])> Action46) / ('\\' <([0-7] [0-7]?)> Action47) / ('\\' '\\' Action48))> */ | |
2100 | func() bool { | |
2101 | position171, tokenIndex171, depth171 := position, tokenIndex, depth | |
2102 | { | |
2103 | position172 := position | |
2104 | depth++ | |
2105 | { | |
2106 | position173, tokenIndex173, depth173 := position, tokenIndex, depth | |
2107 | if buffer[position] != rune('\\') { | |
2108 | goto l174 | |
2109 | } | |
2110 | position++ | |
2111 | { | |
2112 | position175, tokenIndex175, depth175 := position, tokenIndex, depth | |
2113 | if buffer[position] != rune('a') { | |
2114 | goto l176 | |
2115 | } | |
2116 | position++ | |
2117 | goto l175 | |
2118 | l176: | |
2119 | position, tokenIndex, depth = position175, tokenIndex175, depth175 | |
2120 | if buffer[position] != rune('A') { | |
2121 | goto l174 | |
2122 | } | |
2123 | position++ | |
2124 | } | |
2125 | l175: | |
2126 | { | |
2127 | add(ruleAction32, position) | |
2128 | } | |
2129 | goto l173 | |
2130 | l174: | |
2131 | position, tokenIndex, depth = position173, tokenIndex173, depth173 | |
2132 | if buffer[position] != rune('\\') { | |
2133 | goto l178 | |
2134 | } | |
2135 | position++ | |
2136 | { | |
2137 | position179, tokenIndex179, depth179 := position, tokenIndex, depth | |
2138 | if buffer[position] != rune('b') { | |
2139 | goto l180 | |
2140 | } | |
2141 | position++ | |
2142 | goto l179 | |
2143 | l180: | |
2144 | position, tokenIndex, depth = position179, tokenIndex179, depth179 | |
2145 | if buffer[position] != rune('B') { | |
2146 | goto l178 | |
2147 | } | |
2148 | position++ | |
2149 | } | |
2150 | l179: | |
2151 | { | |
2152 | add(ruleAction33, position) | |
2153 | } | |
2154 | goto l173 | |
2155 | l178: | |
2156 | position, tokenIndex, depth = position173, tokenIndex173, depth173 | |
2157 | if buffer[position] != rune('\\') { | |
2158 | goto l182 | |
2159 | } | |
2160 | position++ | |
2161 | { | |
2162 | position183, tokenIndex183, depth183 := position, tokenIndex, depth | |
2163 | if buffer[position] != rune('e') { | |
2164 | goto l184 | |
2165 | } | |
2166 | position++ | |
2167 | goto l183 | |
2168 | l184: | |
2169 | position, tokenIndex, depth = position183, tokenIndex183, depth183 | |
2170 | if buffer[position] != rune('E') { | |
2171 | goto l182 | |
2172 | } | |
2173 | position++ | |
2174 | } | |
2175 | l183: | |
2176 | { | |
2177 | add(ruleAction34, position) | |
2178 | } | |
2179 | goto l173 | |
2180 | l182: | |
2181 | position, tokenIndex, depth = position173, tokenIndex173, depth173 | |
2182 | if buffer[position] != rune('\\') { | |
2183 | goto l186 | |
2184 | } | |
2185 | position++ | |
2186 | { | |
2187 | position187, tokenIndex187, depth187 := position, tokenIndex, depth | |
2188 | if buffer[position] != rune('f') { | |
2189 | goto l188 | |
2190 | } | |
2191 | position++ | |
2192 | goto l187 | |
2193 | l188: | |
2194 | position, tokenIndex, depth = position187, tokenIndex187, depth187 | |
2195 | if buffer[position] != rune('F') { | |
2196 | goto l186 | |
2197 | } | |
2198 | position++ | |
2199 | } | |
2200 | l187: | |
2201 | { | |
2202 | add(ruleAction35, position) | |
2203 | } | |
2204 | goto l173 | |
2205 | l186: | |
2206 | position, tokenIndex, depth = position173, tokenIndex173, depth173 | |
2207 | if buffer[position] != rune('\\') { | |
2208 | goto l190 | |
2209 | } | |
2210 | position++ | |
2211 | { | |
2212 | position191, tokenIndex191, depth191 := position, tokenIndex, depth | |
2213 | if buffer[position] != rune('n') { | |
2214 | goto l192 | |
2215 | } | |
2216 | position++ | |
2217 | goto l191 | |
2218 | l192: | |
2219 | position, tokenIndex, depth = position191, tokenIndex191, depth191 | |
2220 | if buffer[position] != rune('N') { | |
2221 | goto l190 | |
2222 | } | |
2223 | position++ | |
2224 | } | |
2225 | l191: | |
2226 | { | |
2227 | add(ruleAction36, position) | |
2228 | } | |
2229 | goto l173 | |
2230 | l190: | |
2231 | position, tokenIndex, depth = position173, tokenIndex173, depth173 | |
2232 | if buffer[position] != rune('\\') { | |
2233 | goto l194 | |
2234 | } | |
2235 | position++ | |
2236 | { | |
2237 | position195, tokenIndex195, depth195 := position, tokenIndex, depth | |
2238 | if buffer[position] != rune('r') { | |
2239 | goto l196 | |
2240 | } | |
2241 | position++ | |
2242 | goto l195 | |
2243 | l196: | |
2244 | position, tokenIndex, depth = position195, tokenIndex195, depth195 | |
2245 | if buffer[position] != rune('R') { | |
2246 | goto l194 | |
2247 | } | |
2248 | position++ | |
2249 | } | |
2250 | l195: | |
2251 | { | |
2252 | add(ruleAction37, position) | |
2253 | } | |
2254 | goto l173 | |
2255 | l194: | |
2256 | position, tokenIndex, depth = position173, tokenIndex173, depth173 | |
2257 | if buffer[position] != rune('\\') { | |
2258 | goto l198 | |
2259 | } | |
2260 | position++ | |
2261 | { | |
2262 | position199, tokenIndex199, depth199 := position, tokenIndex, depth | |
2263 | if buffer[position] != rune('t') { | |
2264 | goto l200 | |
2265 | } | |
2266 | position++ | |
2267 | goto l199 | |
2268 | l200: | |
2269 | position, tokenIndex, depth = position199, tokenIndex199, depth199 | |
2270 | if buffer[position] != rune('T') { | |
2271 | goto l198 | |
2272 | } | |
2273 | position++ | |
2274 | } | |
2275 | l199: | |
2276 | { | |
2277 | add(ruleAction38, position) | |
2278 | } | |
2279 | goto l173 | |
2280 | l198: | |
2281 | position, tokenIndex, depth = position173, tokenIndex173, depth173 | |
2282 | if buffer[position] != rune('\\') { | |
2283 | goto l202 | |
2284 | } | |
2285 | position++ | |
2286 | { | |
2287 | position203, tokenIndex203, depth203 := position, tokenIndex, depth | |
2288 | if buffer[position] != rune('v') { | |
2289 | goto l204 | |
2290 | } | |
2291 | position++ | |
2292 | goto l203 | |
2293 | l204: | |
2294 | position, tokenIndex, depth = position203, tokenIndex203, depth203 | |
2295 | if buffer[position] != rune('V') { | |
2296 | goto l202 | |
2297 | } | |
2298 | position++ | |
2299 | } | |
2300 | l203: | |
2301 | { | |
2302 | add(ruleAction39, position) | |
2303 | } | |
2304 | goto l173 | |
2305 | l202: | |
2306 | position, tokenIndex, depth = position173, tokenIndex173, depth173 | |
2307 | if buffer[position] != rune('\\') { | |
2308 | goto l206 | |
2309 | } | |
2310 | position++ | |
2311 | if buffer[position] != rune('\'') { | |
2312 | goto l206 | |
2313 | } | |
2314 | position++ | |
2315 | { | |
2316 | add(ruleAction40, position) | |
2317 | } | |
2318 | goto l173 | |
2319 | l206: | |
2320 | position, tokenIndex, depth = position173, tokenIndex173, depth173 | |
2321 | if buffer[position] != rune('\\') { | |
2322 | goto l208 | |
2323 | } | |
2324 | position++ | |
2325 | if buffer[position] != rune('"') { | |
2326 | goto l208 | |
2327 | } | |
2328 | position++ | |
2329 | { | |
2330 | add(ruleAction41, position) | |
2331 | } | |
2332 | goto l173 | |
2333 | l208: | |
2334 | position, tokenIndex, depth = position173, tokenIndex173, depth173 | |
2335 | if buffer[position] != rune('\\') { | |
2336 | goto l210 | |
2337 | } | |
2338 | position++ | |
2339 | if buffer[position] != rune('[') { | |
2340 | goto l210 | |
2341 | } | |
2342 | position++ | |
2343 | { | |
2344 | add(ruleAction42, position) | |
2345 | } | |
2346 | goto l173 | |
2347 | l210: | |
2348 | position, tokenIndex, depth = position173, tokenIndex173, depth173 | |
2349 | if buffer[position] != rune('\\') { | |
2350 | goto l212 | |
2351 | } | |
2352 | position++ | |
2353 | if buffer[position] != rune(']') { | |
2354 | goto l212 | |
2355 | } | |
2356 | position++ | |
2357 | { | |
2358 | add(ruleAction43, position) | |
2359 | } | |
2360 | goto l173 | |
2361 | l212: | |
2362 | position, tokenIndex, depth = position173, tokenIndex173, depth173 | |
2363 | if buffer[position] != rune('\\') { | |
2364 | goto l214 | |
2365 | } | |
2366 | position++ | |
2367 | if buffer[position] != rune('-') { | |
2368 | goto l214 | |
2369 | } | |
2370 | position++ | |
2371 | { | |
2372 | add(ruleAction44, position) | |
2373 | } | |
2374 | goto l173 | |
2375 | l214: | |
2376 | position, tokenIndex, depth = position173, tokenIndex173, depth173 | |
2377 | if buffer[position] != rune('\\') { | |
2378 | goto l216 | |
2379 | } | |
2380 | position++ | |
2381 | if buffer[position] != rune('0') { | |
2382 | goto l216 | |
2383 | } | |
2384 | position++ | |
2385 | { | |
2386 | position217, tokenIndex217, depth217 := position, tokenIndex, depth | |
2387 | if buffer[position] != rune('x') { | |
2388 | goto l218 | |
2389 | } | |
2390 | position++ | |
2391 | goto l217 | |
2392 | l218: | |
2393 | position, tokenIndex, depth = position217, tokenIndex217, depth217 | |
2394 | if buffer[position] != rune('X') { | |
2395 | goto l216 | |
2396 | } | |
2397 | position++ | |
2398 | } | |
2399 | l217: | |
2400 | { | |
2401 | position219 := position | |
2402 | depth++ | |
2403 | { | |
2404 | switch buffer[position] { | |
2405 | case 'A', 'B', 'C', 'D', 'E', 'F': | |
2406 | if c := buffer[position]; c < rune('A') || c > rune('F') { | |
2407 | goto l216 | |
2408 | } | |
2409 | position++ | |
2410 | break | |
2411 | case 'a', 'b', 'c', 'd', 'e', 'f': | |
2412 | if c := buffer[position]; c < rune('a') || c > rune('f') { | |
2413 | goto l216 | |
2414 | } | |
2415 | position++ | |
2416 | break | |
2417 | default: | |
2418 | if c := buffer[position]; c < rune('0') || c > rune('9') { | |
2419 | goto l216 | |
2420 | } | |
2421 | position++ | |
2422 | break | |
2423 | } | |
2424 | } | |
2425 | ||
2426 | l220: | |
2427 | { | |
2428 | position221, tokenIndex221, depth221 := position, tokenIndex, depth | |
2429 | { | |
2430 | switch buffer[position] { | |
2431 | case 'A', 'B', 'C', 'D', 'E', 'F': | |
2432 | if c := buffer[position]; c < rune('A') || c > rune('F') { | |
2433 | goto l221 | |
2434 | } | |
2435 | position++ | |
2436 | break | |
2437 | case 'a', 'b', 'c', 'd', 'e', 'f': | |
2438 | if c := buffer[position]; c < rune('a') || c > rune('f') { | |
2439 | goto l221 | |
2440 | } | |
2441 | position++ | |
2442 | break | |
2443 | default: | |
2444 | if c := buffer[position]; c < rune('0') || c > rune('9') { | |
2445 | goto l221 | |
2446 | } | |
2447 | position++ | |
2448 | break | |
2449 | } | |
2450 | } | |
2451 | ||
2452 | goto l220 | |
2453 | l221: | |
2454 | position, tokenIndex, depth = position221, tokenIndex221, depth221 | |
2455 | } | |
2456 | depth-- | |
2457 | add(rulePegText, position219) | |
2458 | } | |
2459 | { | |
2460 | add(ruleAction45, position) | |
2461 | } | |
2462 | goto l173 | |
2463 | l216: | |
2464 | position, tokenIndex, depth = position173, tokenIndex173, depth173 | |
2465 | if buffer[position] != rune('\\') { | |
2466 | goto l225 | |
2467 | } | |
2468 | position++ | |
2469 | { | |
2470 | position226 := position | |
2471 | depth++ | |
2472 | if c := buffer[position]; c < rune('0') || c > rune('3') { | |
2473 | goto l225 | |
2474 | } | |
2475 | position++ | |
2476 | if c := buffer[position]; c < rune('0') || c > rune('7') { | |
2477 | goto l225 | |
2478 | } | |
2479 | position++ | |
2480 | if c := buffer[position]; c < rune('0') || c > rune('7') { | |
2481 | goto l225 | |
2482 | } | |
2483 | position++ | |
2484 | depth-- | |
2485 | add(rulePegText, position226) | |
2486 | } | |
2487 | { | |
2488 | add(ruleAction46, position) | |
2489 | } | |
2490 | goto l173 | |
2491 | l225: | |
2492 | position, tokenIndex, depth = position173, tokenIndex173, depth173 | |
2493 | if buffer[position] != rune('\\') { | |
2494 | goto l228 | |
2495 | } | |
2496 | position++ | |
2497 | { | |
2498 | position229 := position | |
2499 | depth++ | |
2500 | if c := buffer[position]; c < rune('0') || c > rune('7') { | |
2501 | goto l228 | |
2502 | } | |
2503 | position++ | |
2504 | { | |
2505 | position230, tokenIndex230, depth230 := position, tokenIndex, depth | |
2506 | if c := buffer[position]; c < rune('0') || c > rune('7') { | |
2507 | goto l230 | |
2508 | } | |
2509 | position++ | |
2510 | goto l231 | |
2511 | l230: | |
2512 | position, tokenIndex, depth = position230, tokenIndex230, depth230 | |
2513 | } | |
2514 | l231: | |
2515 | depth-- | |
2516 | add(rulePegText, position229) | |
2517 | } | |
2518 | { | |
2519 | add(ruleAction47, position) | |
2520 | } | |
2521 | goto l173 | |
2522 | l228: | |
2523 | position, tokenIndex, depth = position173, tokenIndex173, depth173 | |
2524 | if buffer[position] != rune('\\') { | |
2525 | goto l171 | |
2526 | } | |
2527 | position++ | |
2528 | if buffer[position] != rune('\\') { | |
2529 | goto l171 | |
2530 | } | |
2531 | position++ | |
2532 | { | |
2533 | add(ruleAction48, position) | |
2534 | } | |
2535 | } | |
2536 | l173: | |
2537 | depth-- | |
2538 | add(ruleEscape, position172) | |
2539 | } | |
2540 | return true | |
2541 | l171: | |
2542 | position, tokenIndex, depth = position171, tokenIndex171, depth171 | |
2543 | return false | |
2544 | }, | |
2545 | /* 20 LeftArrow <- <((('<' '-') / '←') Spacing)> */ | |
2546 | func() bool { | |
2547 | position234, tokenIndex234, depth234 := position, tokenIndex, depth | |
2548 | { | |
2549 | position235 := position | |
2550 | depth++ | |
2551 | { | |
2552 | position236, tokenIndex236, depth236 := position, tokenIndex, depth | |
2553 | if buffer[position] != rune('<') { | |
2554 | goto l237 | |
2555 | } | |
2556 | position++ | |
2557 | if buffer[position] != rune('-') { | |
2558 | goto l237 | |
2559 | } | |
2560 | position++ | |
2561 | goto l236 | |
2562 | l237: | |
2563 | position, tokenIndex, depth = position236, tokenIndex236, depth236 | |
2564 | if buffer[position] != rune('←') { | |
2565 | goto l234 | |
2566 | } | |
2567 | position++ | |
2568 | } | |
2569 | l236: | |
2570 | if !_rules[ruleSpacing]() { | |
2571 | goto l234 | |
2572 | } | |
2573 | depth-- | |
2574 | add(ruleLeftArrow, position235) | |
2575 | } | |
2576 | return true | |
2577 | l234: | |
2578 | position, tokenIndex, depth = position234, tokenIndex234, depth234 | |
2579 | return false | |
2580 | }, | |
2581 | /* 21 Slash <- <('/' Spacing)> */ | |
2582 | func() bool { | |
2583 | position238, tokenIndex238, depth238 := position, tokenIndex, depth | |
2584 | { | |
2585 | position239 := position | |
2586 | depth++ | |
2587 | if buffer[position] != rune('/') { | |
2588 | goto l238 | |
2589 | } | |
2590 | position++ | |
2591 | if !_rules[ruleSpacing]() { | |
2592 | goto l238 | |
2593 | } | |
2594 | depth-- | |
2595 | add(ruleSlash, position239) | |
2596 | } | |
2597 | return true | |
2598 | l238: | |
2599 | position, tokenIndex, depth = position238, tokenIndex238, depth238 | |
2600 | return false | |
2601 | }, | |
2602 | /* 22 And <- <('&' Spacing)> */ | |
2603 | func() bool { | |
2604 | position240, tokenIndex240, depth240 := position, tokenIndex, depth | |
2605 | { | |
2606 | position241 := position | |
2607 | depth++ | |
2608 | if buffer[position] != rune('&') { | |
2609 | goto l240 | |
2610 | } | |
2611 | position++ | |
2612 | if !_rules[ruleSpacing]() { | |
2613 | goto l240 | |
2614 | } | |
2615 | depth-- | |
2616 | add(ruleAnd, position241) | |
2617 | } | |
2618 | return true | |
2619 | l240: | |
2620 | position, tokenIndex, depth = position240, tokenIndex240, depth240 | |
2621 | return false | |
2622 | }, | |
2623 | /* 23 Not <- <('!' Spacing)> */ | |
2624 | func() bool { | |
2625 | position242, tokenIndex242, depth242 := position, tokenIndex, depth | |
2626 | { | |
2627 | position243 := position | |
2628 | depth++ | |
2629 | if buffer[position] != rune('!') { | |
2630 | goto l242 | |
2631 | } | |
2632 | position++ | |
2633 | if !_rules[ruleSpacing]() { | |
2634 | goto l242 | |
2635 | } | |
2636 | depth-- | |
2637 | add(ruleNot, position243) | |
2638 | } | |
2639 | return true | |
2640 | l242: | |
2641 | position, tokenIndex, depth = position242, tokenIndex242, depth242 | |
2642 | return false | |
2643 | }, | |
2644 | /* 24 Question <- <('?' Spacing)> */ | |
2645 | nil, | |
2646 | /* 25 Star <- <('*' Spacing)> */ | |
2647 | nil, | |
2648 | /* 26 Plus <- <('+' Spacing)> */ | |
2649 | nil, | |
2650 | /* 27 Open <- <('(' Spacing)> */ | |
2651 | nil, | |
2652 | /* 28 Close <- <(')' Spacing)> */ | |
2653 | nil, | |
2654 | /* 29 Dot <- <('.' Spacing)> */ | |
2655 | nil, | |
2656 | /* 30 SpaceComment <- <(Space / Comment)> */ | |
2657 | func() bool { | |
2658 | position250, tokenIndex250, depth250 := position, tokenIndex, depth | |
2659 | { | |
2660 | position251 := position | |
2661 | depth++ | |
2662 | { | |
2663 | position252, tokenIndex252, depth252 := position, tokenIndex, depth | |
2664 | { | |
2665 | position254 := position | |
2666 | depth++ | |
2667 | { | |
2668 | switch buffer[position] { | |
2669 | case '\t': | |
2670 | if buffer[position] != rune('\t') { | |
2671 | goto l253 | |
2672 | } | |
2673 | position++ | |
2674 | break | |
2675 | case ' ': | |
2676 | if buffer[position] != rune(' ') { | |
2677 | goto l253 | |
2678 | } | |
2679 | position++ | |
2680 | break | |
2681 | default: | |
2682 | if !_rules[ruleEndOfLine]() { | |
2683 | goto l253 | |
2684 | } | |
2685 | break | |
2686 | } | |
2687 | } | |
2688 | ||
2689 | depth-- | |
2690 | add(ruleSpace, position254) | |
2691 | } | |
2692 | goto l252 | |
2693 | l253: | |
2694 | position, tokenIndex, depth = position252, tokenIndex252, depth252 | |
2695 | { | |
2696 | position256 := position | |
2697 | depth++ | |
2698 | if buffer[position] != rune('#') { | |
2699 | goto l250 | |
2700 | } | |
2701 | position++ | |
2702 | l257: | |
2703 | { | |
2704 | position258, tokenIndex258, depth258 := position, tokenIndex, depth | |
2705 | { | |
2706 | position259, tokenIndex259, depth259 := position, tokenIndex, depth | |
2707 | if !_rules[ruleEndOfLine]() { | |
2708 | goto l259 | |
2709 | } | |
2710 | goto l258 | |
2711 | l259: | |
2712 | position, tokenIndex, depth = position259, tokenIndex259, depth259 | |
2713 | } | |
2714 | if !matchDot() { | |
2715 | goto l258 | |
2716 | } | |
2717 | goto l257 | |
2718 | l258: | |
2719 | position, tokenIndex, depth = position258, tokenIndex258, depth258 | |
2720 | } | |
2721 | if !_rules[ruleEndOfLine]() { | |
2722 | goto l250 | |
2723 | } | |
2724 | depth-- | |
2725 | add(ruleComment, position256) | |
2726 | } | |
2727 | } | |
2728 | l252: | |
2729 | depth-- | |
2730 | add(ruleSpaceComment, position251) | |
2731 | } | |
2732 | return true | |
2733 | l250: | |
2734 | position, tokenIndex, depth = position250, tokenIndex250, depth250 | |
2735 | return false | |
2736 | }, | |
2737 | /* 31 Spacing <- <SpaceComment*> */ | |
2738 | func() bool { | |
2739 | { | |
2740 | position261 := position | |
2741 | depth++ | |
2742 | l262: | |
2743 | { | |
2744 | position263, tokenIndex263, depth263 := position, tokenIndex, depth | |
2745 | if !_rules[ruleSpaceComment]() { | |
2746 | goto l263 | |
2747 | } | |
2748 | goto l262 | |
2749 | l263: | |
2750 | position, tokenIndex, depth = position263, tokenIndex263, depth263 | |
2751 | } | |
2752 | depth-- | |
2753 | add(ruleSpacing, position261) | |
2754 | } | |
2755 | return true | |
2756 | }, | |
2757 | /* 32 MustSpacing <- <SpaceComment+> */ | |
2758 | func() bool { | |
2759 | position264, tokenIndex264, depth264 := position, tokenIndex, depth | |
2760 | { | |
2761 | position265 := position | |
2762 | depth++ | |
2763 | if !_rules[ruleSpaceComment]() { | |
2764 | goto l264 | |
2765 | } | |
2766 | l266: | |
2767 | { | |
2768 | position267, tokenIndex267, depth267 := position, tokenIndex, depth | |
2769 | if !_rules[ruleSpaceComment]() { | |
2770 | goto l267 | |
2771 | } | |
2772 | goto l266 | |
2773 | l267: | |
2774 | position, tokenIndex, depth = position267, tokenIndex267, depth267 | |
2775 | } | |
2776 | depth-- | |
2777 | add(ruleMustSpacing, position265) | |
2778 | } | |
2779 | return true | |
2780 | l264: | |
2781 | position, tokenIndex, depth = position264, tokenIndex264, depth264 | |
2782 | return false | |
2783 | }, | |
2784 | /* 33 Comment <- <('#' (!EndOfLine .)* EndOfLine)> */ | |
2785 | nil, | |
2786 | /* 34 Space <- <((&('\t') '\t') | (&(' ') ' ') | (&('\n' | '\r') EndOfLine))> */ | |
2787 | nil, | |
2788 | /* 35 EndOfLine <- <(('\r' '\n') / '\n' / '\r')> */ | |
2789 | func() bool { | |
2790 | position270, tokenIndex270, depth270 := position, tokenIndex, depth | |
2791 | { | |
2792 | position271 := position | |
2793 | depth++ | |
2794 | { | |
2795 | position272, tokenIndex272, depth272 := position, tokenIndex, depth | |
2796 | if buffer[position] != rune('\r') { | |
2797 | goto l273 | |
2798 | } | |
2799 | position++ | |
2800 | if buffer[position] != rune('\n') { | |
2801 | goto l273 | |
2802 | } | |
2803 | position++ | |
2804 | goto l272 | |
2805 | l273: | |
2806 | position, tokenIndex, depth = position272, tokenIndex272, depth272 | |
2807 | if buffer[position] != rune('\n') { | |
2808 | goto l274 | |
2809 | } | |
2810 | position++ | |
2811 | goto l272 | |
2812 | l274: | |
2813 | position, tokenIndex, depth = position272, tokenIndex272, depth272 | |
2814 | if buffer[position] != rune('\r') { | |
2815 | goto l270 | |
2816 | } | |
2817 | position++ | |
2818 | } | |
2819 | l272: | |
2820 | depth-- | |
2821 | add(ruleEndOfLine, position271) | |
2822 | } | |
2823 | return true | |
2824 | l270: | |
2825 | position, tokenIndex, depth = position270, tokenIndex270, depth270 | |
2826 | return false | |
2827 | }, | |
2828 | /* 36 EndOfFile <- <!.> */ | |
2829 | nil, | |
2830 | /* 37 Action <- <('{' <ActionBody*> '}' Spacing)> */ | |
2831 | func() bool { | |
2832 | position276, tokenIndex276, depth276 := position, tokenIndex, depth | |
2833 | { | |
2834 | position277 := position | |
2835 | depth++ | |
2836 | if buffer[position] != rune('{') { | |
2837 | goto l276 | |
2838 | } | |
2839 | position++ | |
2840 | { | |
2841 | position278 := position | |
2842 | depth++ | |
2843 | l279: | |
2844 | { | |
2845 | position280, tokenIndex280, depth280 := position, tokenIndex, depth | |
2846 | if !_rules[ruleActionBody]() { | |
2847 | goto l280 | |
2848 | } | |
2849 | goto l279 | |
2850 | l280: | |
2851 | position, tokenIndex, depth = position280, tokenIndex280, depth280 | |
2852 | } | |
2853 | depth-- | |
2854 | add(rulePegText, position278) | |
2855 | } | |
2856 | if buffer[position] != rune('}') { | |
2857 | goto l276 | |
2858 | } | |
2859 | position++ | |
2860 | if !_rules[ruleSpacing]() { | |
2861 | goto l276 | |
2862 | } | |
2863 | depth-- | |
2864 | add(ruleAction, position277) | |
2865 | } | |
2866 | return true | |
2867 | l276: | |
2868 | position, tokenIndex, depth = position276, tokenIndex276, depth276 | |
2869 | return false | |
2870 | }, | |
2871 | /* 38 ActionBody <- <((!('{' / '}') .) / ('{' ActionBody* '}'))> */ | |
2872 | func() bool { | |
2873 | position281, tokenIndex281, depth281 := position, tokenIndex, depth | |
2874 | { | |
2875 | position282 := position | |
2876 | depth++ | |
2877 | { | |
2878 | position283, tokenIndex283, depth283 := position, tokenIndex, depth | |
2879 | { | |
2880 | position285, tokenIndex285, depth285 := position, tokenIndex, depth | |
2881 | { | |
2882 | position286, tokenIndex286, depth286 := position, tokenIndex, depth | |
2883 | if buffer[position] != rune('{') { | |
2884 | goto l287 | |
2885 | } | |
2886 | position++ | |
2887 | goto l286 | |
2888 | l287: | |
2889 | position, tokenIndex, depth = position286, tokenIndex286, depth286 | |
2890 | if buffer[position] != rune('}') { | |
2891 | goto l285 | |
2892 | } | |
2893 | position++ | |
2894 | } | |
2895 | l286: | |
2896 | goto l284 | |
2897 | l285: | |
2898 | position, tokenIndex, depth = position285, tokenIndex285, depth285 | |
2899 | } | |
2900 | if !matchDot() { | |
2901 | goto l284 | |
2902 | } | |
2903 | goto l283 | |
2904 | l284: | |
2905 | position, tokenIndex, depth = position283, tokenIndex283, depth283 | |
2906 | if buffer[position] != rune('{') { | |
2907 | goto l281 | |
2908 | } | |
2909 | position++ | |
2910 | l288: | |
2911 | { | |
2912 | position289, tokenIndex289, depth289 := position, tokenIndex, depth | |
2913 | if !_rules[ruleActionBody]() { | |
2914 | goto l289 | |
2915 | } | |
2916 | goto l288 | |
2917 | l289: | |
2918 | position, tokenIndex, depth = position289, tokenIndex289, depth289 | |
2919 | } | |
2920 | if buffer[position] != rune('}') { | |
2921 | goto l281 | |
2922 | } | |
2923 | position++ | |
2924 | } | |
2925 | l283: | |
2926 | depth-- | |
2927 | add(ruleActionBody, position282) | |
2928 | } | |
2929 | return true | |
2930 | l281: | |
2931 | position, tokenIndex, depth = position281, tokenIndex281, depth281 | |
2932 | return false | |
2933 | }, | |
2934 | /* 39 Begin <- <('<' Spacing)> */ | |
2935 | nil, | |
2936 | /* 40 End <- <('>' Spacing)> */ | |
2937 | nil, | |
2938 | /* 42 Action0 <- <{ p.AddPackage(text) }> */ | |
2939 | nil, | |
2940 | /* 43 Action1 <- <{ p.AddPeg(text) }> */ | |
2941 | nil, | |
2942 | /* 44 Action2 <- <{ p.AddState(text) }> */ | |
2943 | nil, | |
2944 | nil, | |
2945 | /* 46 Action3 <- <{ p.AddImport(text) }> */ | |
2946 | nil, | |
2947 | /* 47 Action4 <- <{ p.AddRule(text) }> */ | |
2948 | nil, | |
2949 | /* 48 Action5 <- <{ p.AddExpression() }> */ | |
2950 | nil, | |
2951 | /* 49 Action6 <- <{ p.AddAlternate() }> */ | |
2952 | nil, | |
2953 | /* 50 Action7 <- <{ p.AddNil(); p.AddAlternate() }> */ | |
2954 | nil, | |
2955 | /* 51 Action8 <- <{ p.AddNil() }> */ | |
2956 | nil, | |
2957 | /* 52 Action9 <- <{ p.AddSequence() }> */ | |
2958 | nil, | |
2959 | /* 53 Action10 <- <{ p.AddPredicate(text) }> */ | |
2960 | nil, | |
2961 | /* 54 Action11 <- <{ p.AddStateChange(text) }> */ | |
2962 | nil, | |
2963 | /* 55 Action12 <- <{ p.AddPeekFor() }> */ | |
2964 | nil, | |
2965 | /* 56 Action13 <- <{ p.AddPeekNot() }> */ | |
2966 | nil, | |
2967 | /* 57 Action14 <- <{ p.AddQuery() }> */ | |
2968 | nil, | |
2969 | /* 58 Action15 <- <{ p.AddStar() }> */ | |
2970 | nil, | |
2971 | /* 59 Action16 <- <{ p.AddPlus() }> */ | |
2972 | nil, | |
2973 | /* 60 Action17 <- <{ p.AddName(text) }> */ | |
2974 | nil, | |
2975 | /* 61 Action18 <- <{ p.AddDot() }> */ | |
2976 | nil, | |
2977 | /* 62 Action19 <- <{ p.AddAction(text) }> */ | |
2978 | nil, | |
2979 | /* 63 Action20 <- <{ p.AddPush() }> */ | |
2980 | nil, | |
2981 | /* 64 Action21 <- <{ p.AddSequence() }> */ | |
2982 | nil, | |
2983 | /* 65 Action22 <- <{ p.AddSequence() }> */ | |
2984 | nil, | |
2985 | /* 66 Action23 <- <{ p.AddPeekNot(); p.AddDot(); p.AddSequence() }> */ | |
2986 | nil, | |
2987 | /* 67 Action24 <- <{ p.AddPeekNot(); p.AddDot(); p.AddSequence() }> */ | |
2988 | nil, | |
2989 | /* 68 Action25 <- <{ p.AddAlternate() }> */ | |
2990 | nil, | |
2991 | /* 69 Action26 <- <{ p.AddAlternate() }> */ | |
2992 | nil, | |
2993 | /* 70 Action27 <- <{ p.AddRange() }> */ | |
2994 | nil, | |
2995 | /* 71 Action28 <- <{ p.AddDoubleRange() }> */ | |
2996 | nil, | |
2997 | /* 72 Action29 <- <{ p.AddCharacter(text) }> */ | |
2998 | nil, | |
2999 | /* 73 Action30 <- <{ p.AddDoubleCharacter(text) }> */ | |
3000 | nil, | |
3001 | /* 74 Action31 <- <{ p.AddCharacter(text) }> */ | |
3002 | nil, | |
3003 | /* 75 Action32 <- <{ p.AddCharacter("\a") }> */ | |
3004 | nil, | |
3005 | /* 76 Action33 <- <{ p.AddCharacter("\b") }> */ | |
3006 | nil, | |
3007 | /* 77 Action34 <- <{ p.AddCharacter("\x1B") }> */ | |
3008 | nil, | |
3009 | /* 78 Action35 <- <{ p.AddCharacter("\f") }> */ | |
3010 | nil, | |
3011 | /* 79 Action36 <- <{ p.AddCharacter("\n") }> */ | |
3012 | nil, | |
3013 | /* 80 Action37 <- <{ p.AddCharacter("\r") }> */ | |
3014 | nil, | |
3015 | /* 81 Action38 <- <{ p.AddCharacter("\t") }> */ | |
3016 | nil, | |
3017 | /* 82 Action39 <- <{ p.AddCharacter("\v") }> */ | |
3018 | nil, | |
3019 | /* 83 Action40 <- <{ p.AddCharacter("'") }> */ | |
3020 | nil, | |
3021 | /* 84 Action41 <- <{ p.AddCharacter("\"") }> */ | |
3022 | nil, | |
3023 | /* 85 Action42 <- <{ p.AddCharacter("[") }> */ | |
3024 | nil, | |
3025 | /* 86 Action43 <- <{ p.AddCharacter("]") }> */ | |
3026 | nil, | |
3027 | /* 87 Action44 <- <{ p.AddCharacter("-") }> */ | |
3028 | nil, | |
3029 | /* 88 Action45 <- <{ p.AddHexaCharacter(text) }> */ | |
3030 | nil, | |
3031 | /* 89 Action46 <- <{ p.AddOctalCharacter(text) }> */ | |
3032 | nil, | |
3033 | /* 90 Action47 <- <{ p.AddOctalCharacter(text) }> */ | |
3034 | nil, | |
3035 | /* 91 Action48 <- <{ p.AddCharacter("\\") }> */ | |
3036 | nil, | |
3037 | } | |
3038 | p.rules = _rules | |
3039 | } |
0 | // Copyright 2010 The Go Authors. All rights reserved. | |
1 | // Use of this source code is governed by a BSD-style | |
2 | // license that can be found in the LICENSE file. | |
3 | ||
4 | // +build ignore | |
5 | ||
6 | package main | |
7 | ||
8 | import ( | |
9 | "flag" | |
10 | "fmt" | |
11 | "io/ioutil" | |
12 | "log" | |
13 | "os" | |
14 | "os/exec" | |
15 | "path/filepath" | |
16 | "reflect" | |
17 | "runtime" | |
18 | "strings" | |
19 | "text/template" | |
20 | "time" | |
21 | ) | |
22 | ||
23 | func main() { | |
24 | flag.Parse() | |
25 | ||
26 | args, target := flag.Args(), "peg" | |
27 | if len(args) > 0 { | |
28 | target = args[0] | |
29 | } | |
30 | ||
31 | switch target { | |
32 | case "buildinfo": | |
33 | buildinfo() | |
34 | case "peg": | |
35 | peg() | |
36 | case "clean": | |
37 | clean() | |
38 | case "test": | |
39 | test() | |
40 | case "bench": | |
41 | bench() | |
42 | case "help": | |
43 | fmt.Println("go run build.go [target]") | |
44 | fmt.Println(" peg - build peg from scratch") | |
45 | fmt.Println(" clean - clean up") | |
46 | fmt.Println(" test - run full test") | |
47 | fmt.Println(" bench - run benchmark") | |
48 | fmt.Println(" buildinfo - generate buildinfo.go") | |
49 | } | |
50 | } | |
51 | ||
52 | const BuildinfoTemplate = `// Code Generated by "build.go buildinfo" DO NOT EDIT. | |
53 | package main | |
54 | ||
55 | const ( | |
56 | // VERSION is the version of peg | |
57 | VERSION = "{{.Version}}" | |
58 | // BUILDTIME is the build time of peg | |
59 | BUILDTIME = "{{.Buildtime}}" | |
60 | // COMMIT is the commit hash of peg | |
61 | COMMIT = "{{.Commit}}" | |
62 | // IS_TAGGED is there a version | |
63 | IS_TAGGED = {{.IsTagged}} | |
64 | ) | |
65 | ` | |
66 | ||
67 | func buildinfo() { | |
68 | log.SetPrefix("buildinfo:") | |
69 | type info struct { | |
70 | Version string | |
71 | Buildtime string | |
72 | Commit string | |
73 | IsTagged bool | |
74 | } | |
75 | infFile, err := os.Create("buildinfo.go") | |
76 | defer infFile.Close() | |
77 | if err != nil { | |
78 | log.Println("open buildinfo.go: fatal:", err) | |
79 | } | |
80 | var inf info = info{ | |
81 | Version: "unknown", // show this if we can't get the version | |
82 | } | |
83 | vers, err := exec.Command("git", "tag", "--contains").Output() | |
84 | if err != nil { | |
85 | log.Println("error:", err) | |
86 | } else if len(vers) > 1 { // ignore any single newlines that might exist | |
87 | inf.IsTagged = true | |
88 | inf.Version = strings.TrimSuffix(string(vers), "\n") | |
89 | } else { | |
90 | vers, err = exec.Command("git", "tag", "--merged", "--sort=v:refname").Output() | |
91 | if err != nil { | |
92 | log.Println("error:", err) | |
93 | } else if len(vers) > 1 { | |
94 | tags := strings.Split(string(vers), "\n") | |
95 | inf.Version = tags[len(tags)-1] | |
96 | } | |
97 | } | |
98 | ||
99 | cmit, err := exec.Command("git", "rev-parse", "HEAD").Output() | |
100 | if err != nil { | |
101 | log.Println("error:", err) | |
102 | } | |
103 | inf.Commit = strings.TrimSuffix(string(cmit), "\n") | |
104 | // slice the constant to remove the timezone specifier | |
105 | inf.Buildtime = time.Now().UTC().Format(time.RFC3339[0:19]) | |
106 | ||
107 | err = template.Must(template.New("buildinfo").Parse(BuildinfoTemplate)).Execute(infFile, inf) | |
108 | if err != nil { | |
109 | log.Println("error: template:", err) | |
110 | } | |
111 | log.SetPrefix("") | |
112 | } | |
113 | ||
114 | var processed = make(map[string]bool) | |
115 | ||
116 | func done(file string, deps ...interface{}) bool { | |
117 | fini := true | |
118 | file = filepath.FromSlash(file) | |
119 | info, err := os.Stat(file) | |
120 | if err != nil { | |
121 | fini = false | |
122 | } | |
123 | for _, dep := range deps { | |
124 | switch dep := dep.(type) { | |
125 | case string: | |
126 | if info == nil { | |
127 | fini = false | |
128 | break | |
129 | } | |
130 | dep = filepath.FromSlash(dep) | |
131 | fileInfo, err := os.Stat(dep) | |
132 | if err != nil { | |
133 | panic(err) | |
134 | } | |
135 | ||
136 | if fileInfo.ModTime().After(info.ModTime()) { | |
137 | fini = false | |
138 | } | |
139 | case func() bool: | |
140 | name := runtime.FuncForPC(reflect.ValueOf(dep).Pointer()).Name() | |
141 | if result, ok := processed[name]; ok { | |
142 | fini = fini && result | |
143 | fmt.Printf("%s is done\n", name) | |
144 | break | |
145 | } | |
146 | result := dep() | |
147 | fini = fini && result | |
148 | fmt.Printf("%s\n", name) | |
149 | processed[name] = result | |
150 | } | |
151 | } | |
152 | ||
153 | return fini | |
154 | } | |
155 | ||
156 | func chdir(dir string) string { | |
157 | dir = filepath.FromSlash(dir) | |
158 | working, err := os.Getwd() | |
159 | if err != nil { | |
160 | panic(err) | |
161 | } | |
162 | err = os.Chdir(dir) | |
163 | if err != nil { | |
164 | panic(err) | |
165 | } | |
166 | fmt.Printf("cd %s\n", dir) | |
167 | return working | |
168 | } | |
169 | ||
170 | func command(name, inputFile, outputFile string, arg ...string) { | |
171 | name = filepath.FromSlash(name) | |
172 | inputFile = filepath.FromSlash(inputFile) | |
173 | outputFile = filepath.FromSlash(outputFile) | |
174 | fmt.Print(name) | |
175 | for _, a := range arg { | |
176 | fmt.Printf(" %s", a) | |
177 | } | |
178 | ||
179 | cmd := exec.Command(name, arg...) | |
180 | ||
181 | if inputFile != "" { | |
182 | fmt.Printf(" < %s", inputFile) | |
183 | input, err := ioutil.ReadFile(inputFile) | |
184 | if err != nil { | |
185 | panic(err) | |
186 | } | |
187 | writer, err := cmd.StdinPipe() | |
188 | if err != nil { | |
189 | panic(err) | |
190 | } | |
191 | go func() { | |
192 | defer writer.Close() | |
193 | _, err := writer.Write([]byte(input)) | |
194 | if err != nil { | |
195 | panic(err) | |
196 | } | |
197 | }() | |
198 | } | |
199 | ||
200 | if outputFile != "" { | |
201 | fmt.Printf(" > %s\n", outputFile) | |
202 | output, err := cmd.Output() | |
203 | if err != nil { | |
204 | panic(err) | |
205 | } | |
206 | err = ioutil.WriteFile(outputFile, output, 0600) | |
207 | if err != nil { | |
208 | panic(err) | |
209 | } | |
210 | } else { | |
211 | output, err := cmd.CombinedOutput() | |
212 | fmt.Printf("\n%s", string(output)) | |
213 | if err != nil { | |
214 | panic(err) | |
215 | } | |
216 | } | |
217 | } | |
218 | ||
219 | func delete(file string) { | |
220 | file = filepath.FromSlash(file) | |
221 | fmt.Printf("rm -f %s\n", file) | |
222 | os.Remove(file) | |
223 | } | |
224 | ||
225 | func deleteFilesWithSuffix(suffix string) { | |
226 | files, err := ioutil.ReadDir(".") | |
227 | if err != nil { | |
228 | panic(err) | |
229 | } | |
230 | for _, file := range files { | |
231 | if strings.HasSuffix(file.Name(), suffix) { | |
232 | delete(file.Name()) | |
233 | } | |
234 | } | |
235 | } | |
236 | ||
237 | func bootstrap() bool { | |
238 | if done("bootstrap/bootstrap", "bootstrap/main.go", "tree/peg.go") { | |
239 | return true | |
240 | } | |
241 | ||
242 | wd := chdir("bootstrap") | |
243 | defer chdir(wd) | |
244 | ||
245 | command("go", "", "", "build") | |
246 | ||
247 | return false | |
248 | } | |
249 | ||
250 | func peg0() bool { | |
251 | if done("cmd/peg-bootstrap/peg0", "cmd/peg-bootstrap/main.go", bootstrap) { | |
252 | return true | |
253 | } | |
254 | ||
255 | wd := chdir("cmd/peg-bootstrap/") | |
256 | defer chdir(wd) | |
257 | ||
258 | deleteFilesWithSuffix(".peg.go") | |
259 | command("../../bootstrap/bootstrap", "", "") | |
260 | command("go", "", "", "build", "-tags", "bootstrap", "-o", "peg0") | |
261 | ||
262 | return false | |
263 | } | |
264 | ||
265 | func peg1() bool { | |
266 | if done("cmd/peg-bootstrap/peg1", peg0, "cmd/peg-bootstrap/bootstrap.peg") { | |
267 | return true | |
268 | } | |
269 | ||
270 | wd := chdir("cmd/peg-bootstrap/") | |
271 | defer chdir(wd) | |
272 | ||
273 | deleteFilesWithSuffix(".peg.go") | |
274 | command("./peg0", "bootstrap.peg", "peg1.peg.go") | |
275 | command("go", "", "", "build", "-tags", "bootstrap", "-o", "peg1") | |
276 | ||
277 | return false | |
278 | } | |
279 | ||
280 | func peg2() bool { | |
281 | if done("cmd/peg-bootstrap/peg2", peg1, "cmd/peg-bootstrap/peg.bootstrap.peg") { | |
282 | return true | |
283 | } | |
284 | ||
285 | wd := chdir("cmd/peg-bootstrap/") | |
286 | defer chdir(wd) | |
287 | ||
288 | deleteFilesWithSuffix(".peg.go") | |
289 | command("./peg1", "peg.bootstrap.peg", "peg2.peg.go") | |
290 | command("go", "", "", "build", "-tags", "bootstrap", "-o", "peg2") | |
291 | ||
292 | return false | |
293 | } | |
294 | ||
295 | func peg3() bool { | |
296 | if done("cmd/peg-bootstrap/peg3", peg2, "peg.peg") { | |
297 | return true | |
298 | } | |
299 | ||
300 | wd := chdir("cmd/peg-bootstrap/") | |
301 | defer chdir(wd) | |
302 | ||
303 | deleteFilesWithSuffix(".peg.go") | |
304 | command("./peg2", "../../peg.peg", "peg3.peg.go") | |
305 | command("go", "", "", "build", "-tags", "bootstrap", "-o", "peg3") | |
306 | ||
307 | return false | |
308 | } | |
309 | ||
310 | func peg_bootstrap() bool { | |
311 | if done("cmd/peg-bootstrap/peg-bootstrap", peg3) { | |
312 | return true | |
313 | } | |
314 | ||
315 | wd := chdir("cmd/peg-bootstrap/") | |
316 | defer chdir(wd) | |
317 | ||
318 | deleteFilesWithSuffix(".peg.go") | |
319 | command("./peg3", "../../peg.peg", "peg-bootstrap.peg.go") | |
320 | command("go", "", "", "build", "-tags", "bootstrap", "-o", "peg-bootstrap") | |
321 | ||
322 | return false | |
323 | } | |
324 | ||
325 | func peg_peg_go() bool { | |
326 | if done("peg.peg.go", peg_bootstrap) { | |
327 | return true | |
328 | } | |
329 | ||
330 | command("cmd/peg-bootstrap/peg-bootstrap", "peg.peg", "peg.peg.go") | |
331 | command("go", "", "", "build") | |
332 | command("./peg", "", "", "-inline", "-switch", "peg.peg") | |
333 | ||
334 | return false | |
335 | } | |
336 | ||
337 | func peg() bool { | |
338 | if done("peg", peg_peg_go, "main.go") { | |
339 | return true | |
340 | } | |
341 | ||
342 | command("go", "", "", "build") | |
343 | ||
344 | return false | |
345 | } | |
346 | ||
347 | func clean() bool { | |
348 | delete("bootstrap/bootstrap") | |
349 | ||
350 | delete("grammars/c/c.peg.go") | |
351 | delete("grammars/calculator/calculator.peg.go") | |
352 | delete("grammars/fexl/fexl.peg.go") | |
353 | delete("grammars/java/java_1_7.peg.go") | |
354 | delete("grammars/long_test/long.peg.go") | |
355 | ||
356 | wd := chdir("cmd/peg-bootstrap/") | |
357 | defer chdir(wd) | |
358 | ||
359 | deleteFilesWithSuffix(".peg.go") | |
360 | delete("peg0") | |
361 | delete("peg1") | |
362 | delete("peg2") | |
363 | delete("peg3") | |
364 | delete("peg-bootstrap") | |
365 | ||
366 | return false | |
367 | } | |
368 | ||
369 | func grammars_c() bool { | |
370 | if done("grammars/c/c.peg.go", peg, "grammars/c/c.peg") { | |
371 | return true | |
372 | } | |
373 | ||
374 | wd := chdir("grammars/c/") | |
375 | defer chdir(wd) | |
376 | ||
377 | command("../../peg", "", "", "-switch", "-inline", "c.peg") | |
378 | ||
379 | return false | |
380 | } | |
381 | ||
382 | func grammars_calculator() bool { | |
383 | if done("grammars/calculator/calculator.peg.go", peg, "grammars/calculator/calculator.peg") { | |
384 | return true | |
385 | } | |
386 | ||
387 | wd := chdir("grammars/calculator/") | |
388 | defer chdir(wd) | |
389 | ||
390 | command("../../peg", "", "", "-switch", "-inline", "calculator.peg") | |
391 | ||
392 | return false | |
393 | } | |
394 | ||
395 | func grammars_calculator_ast() bool { | |
396 | if done("grammars/calculator_ast/calculator.peg.go", peg, "grammars/calculator_ast/calculator.peg") { | |
397 | return true | |
398 | } | |
399 | ||
400 | wd := chdir("grammars/calculator_ast/") | |
401 | defer chdir(wd) | |
402 | ||
403 | command("../../peg", "", "", "-switch", "-inline", "calculator.peg") | |
404 | ||
405 | return false | |
406 | } | |
407 | ||
408 | func grammars_fexl() bool { | |
409 | if done("grammars/fexl/fexl.peg.go", peg, "grammars/fexl/fexl.peg") { | |
410 | return true | |
411 | } | |
412 | ||
413 | wd := chdir("grammars/fexl/") | |
414 | defer chdir(wd) | |
415 | ||
416 | command("../../peg", "", "", "-switch", "-inline", "fexl.peg") | |
417 | ||
418 | return false | |
419 | } | |
420 | ||
421 | func grammars_java() bool { | |
422 | if done("grammars/java/java_1_7.peg.go", peg, "grammars/java/java_1_7.peg") { | |
423 | return true | |
424 | } | |
425 | ||
426 | wd := chdir("grammars/java/") | |
427 | defer chdir(wd) | |
428 | ||
429 | command("../../peg", "", "", "-switch", "-inline", "java_1_7.peg") | |
430 | ||
431 | return false | |
432 | } | |
433 | ||
434 | func grammars_long_test() bool { | |
435 | if done("grammars/long_test/long.peg.go", peg, "grammars/long_test/long.peg") { | |
436 | return true | |
437 | } | |
438 | ||
439 | wd := chdir("grammars/long_test/") | |
440 | defer chdir(wd) | |
441 | ||
442 | command("../../peg", "", "", "-switch", "-inline", "long.peg") | |
443 | ||
444 | return false | |
445 | } | |
446 | ||
447 | func test() bool { | |
448 | if done("", grammars_c, grammars_calculator, grammars_calculator_ast, | |
449 | grammars_fexl, grammars_java, grammars_long_test) { | |
450 | return true | |
451 | } | |
452 | ||
453 | command("go", "", "", "test", "-short", "-tags", "grammars", "./...") | |
454 | ||
455 | return false | |
456 | } | |
457 | ||
458 | func bench() bool { | |
459 | peg() | |
460 | ||
461 | command("go", "", "", "test", "-benchmem", "-bench", ".") | |
462 | ||
463 | return false | |
464 | } |
0 | // Code Generated by "build.go buildinfo" DO NOT EDIT. | |
1 | package main | |
2 | ||
3 | const ( | |
4 | // VERSION is the version of peg | |
5 | VERSION = "unknown" | |
6 | // BUILDTIME is the build time of peg | |
7 | BUILDTIME = "2020-08-26T03:40:14" | |
8 | // COMMIT is the commit hash of peg | |
9 | COMMIT = "5cdb3adc061370cdd20392ffe2740cc8db104126" | |
10 | // IS_TAGGED is there a version | |
11 | IS_TAGGED = false | |
12 | ) |
0 | # Core bootstrap PE Grammar for peg language. | |
1 | # Adapted from peg.peg. | |
2 | ||
3 | Grammar <- Spacing { p.AddPackage("main") } | |
4 | { p.AddImport("github.com/pointlander/peg/tree") } | |
5 | { p.AddPeg("Peg"); p.AddState("*tree.Tree") } | |
6 | Action* Definition* !. | |
7 | ||
8 | Definition <- Identifier { p.AddRule(text) } | |
9 | LeftArrow Expression { p.AddExpression() } | |
10 | Expression <- Sequence (Slash Sequence { p.AddAlternate() } )* | |
11 | Sequence <- Prefix (Prefix { p.AddSequence() } )* | |
12 | Prefix <- '!' Suffix { p.AddPeekNot() } / Suffix | |
13 | Suffix <- Primary (Question { p.AddQuery() } | |
14 | / Star { p.AddStar() } )? | |
15 | Primary <- Identifier !LeftArrow { p.AddName(text) } | |
16 | / Open Expression Close | |
17 | / Literal / Class / Dot { p.AddDot() } | |
18 | / Action { p.AddAction(text) } | |
19 | / Begin Expression End { p.AddPush() } | |
20 | ||
21 | Identifier <- < Ident Ident* > Spacing | |
22 | Ident <- [A-Za-z] | |
23 | Literal <- ['] !['] Char (!['] Char { p.AddSequence() } )* ['] Spacing | |
24 | Class <- '[' Range (!']' Range { p.AddAlternate() } )* ']' Spacing | |
25 | Range <- Char '-' Char { p.AddRange() } / Char | |
26 | Char <- '\\0x' <[0-9a-f]*> { p.AddHexaCharacter(text) } | |
27 | / '\\\\' { p.AddCharacter("\\") } | |
28 | / !'\\' <.> { p.AddCharacter(text) } | |
29 | ||
30 | LeftArrow <- '<-' Spacing | |
31 | Slash <- '/' Spacing | |
32 | Question <- '?' Spacing | |
33 | Star <- '*' Spacing | |
34 | Open <- '(' Spacing | |
35 | Close <- ')' Spacing | |
36 | Dot <- '.' Spacing | |
37 | ||
38 | Spacing <- (Space / Comment)* | |
39 | Comment <- '#' (!EndOfLine .)* | |
40 | Space <- ' ' / '\0x9' / EndOfLine | |
41 | EndOfLine <- '\0xd\0xa' / '\0xa' / '\0xd' | |
42 | ||
43 | Action <- '{' < (![}].)* > '}' Spacing | |
44 | Begin <- '<' Spacing | |
45 | End <- '>' Spacing |
0 | // Copyright 2010 The Go Authors. All rights reserved. | |
1 | // Use of this source code is governed by a BSD-style | |
2 | // license that can be found in the LICENSE file. | |
3 | ||
4 | //go:build bootstrap | |
5 | // +build bootstrap | |
6 | ||
7 | package main | |
8 | ||
9 | import ( | |
10 | "io/ioutil" | |
11 | "log" | |
12 | "os" | |
13 | ||
14 | "github.com/pointlander/peg/tree" | |
15 | ) | |
16 | ||
17 | func main() { | |
18 | buffer, err := ioutil.ReadAll(os.Stdin) | |
19 | if err != nil { | |
20 | log.Fatal(err) | |
21 | } | |
22 | p := &Peg{Tree: tree.New(false, false, false), Buffer: string(buffer)} | |
23 | p.Init(Pretty(true), Size(1<<15)) | |
24 | if err := p.Parse(); err != nil { | |
25 | log.Fatal(err) | |
26 | } | |
27 | p.Execute() | |
28 | p.Compile("boot.peg.go", os.Args, os.Stdout) | |
29 | } |
0 | # PE Grammar for bootstrap peg language | |
1 | # | |
2 | # Adapted from peg.peg. | |
3 | ||
4 | # Hierarchical syntax | |
5 | Grammar <- Spacing 'package' MustSpacing Identifier { p.AddPackage(text) } | |
6 | Import* | |
7 | 'type' MustSpacing Identifier { p.AddPeg(text) } | |
8 | 'Peg' Spacing Action { p.AddState(text) } | |
9 | Definition Definition* EndOfFile | |
10 | ||
11 | Import <- 'import' Spacing ["] < ([a-zA-Z_/.]/'-')([a-zA-Z_/.]/'-')* > ["] Spacing { p.AddImport(text) } | |
12 | ||
13 | Definition <- Identifier { p.AddRule(text) } | |
14 | LeftArrow Expression { p.AddExpression() } | |
15 | Expression <- Sequence (Slash Sequence { p.AddAlternate() } | |
16 | )* (Slash { p.AddNil(); p.AddAlternate() } | |
17 | )? | |
18 | / { p.AddNil() } | |
19 | Sequence <- Prefix (Prefix { p.AddSequence() } | |
20 | )* | |
21 | Prefix <- And Action { p.AddPredicate(text) } | |
22 | / Not Action { p.AddStateChange(text) } | |
23 | / And Suffix { p.AddPeekFor() } | |
24 | / Not Suffix { p.AddPeekNot() } | |
25 | / Suffix | |
26 | Suffix <- Primary (Question { p.AddQuery() } | |
27 | / Star { p.AddStar() } | |
28 | / Plus { p.AddPlus() } | |
29 | )? | |
30 | Primary <- Identifier !LeftArrow { p.AddName(text) } | |
31 | / Open Expression Close | |
32 | / Literal | |
33 | / Class | |
34 | / Dot { p.AddDot() } | |
35 | / Action { p.AddAction(text) } | |
36 | / Begin Expression End { p.AddPush() } | |
37 | ||
38 | # Lexical syntax | |
39 | ||
40 | Identifier <- < IdentStart IdentCont* > Spacing | |
41 | IdentStart <- [A-Za-z_] | |
42 | IdentCont <- IdentStart / [0-9] | |
43 | Literal <- ['] (!['] Char)? (!['] Char { p.AddSequence() } | |
44 | )* ['] Spacing | |
45 | / ["] (!["] DoubleChar)? (!["] DoubleChar { p.AddSequence() } | |
46 | )* ["] Spacing | |
47 | Class <- ( '[[' ( '^' DoubleRanges { p.AddPeekNot(); p.AddDot(); p.AddSequence() } | |
48 | / DoubleRanges )? | |
49 | ']]' | |
50 | / '[' ( '^' Ranges { p.AddPeekNot(); p.AddDot(); p.AddSequence() } | |
51 | / Ranges )? | |
52 | ']' ) | |
53 | Spacing | |
54 | Ranges <- !']' Range (!']' Range { p.AddAlternate() } | |
55 | )* | |
56 | DoubleRanges <- !']]' DoubleRange (!']]' DoubleRange { p.AddAlternate() } | |
57 | )* | |
58 | Range <- Char '-' Char { p.AddRange() } | |
59 | / Char | |
60 | DoubleRange <- Char '-' Char { p.AddDoubleRange() } | |
61 | / DoubleChar | |
62 | Char <- Escape | |
63 | / !'\\' <.> { p.AddCharacter(text) } | |
64 | DoubleChar <- Escape | |
65 | / <[a-zA-Z]> { p.AddDoubleCharacter(text) } | |
66 | / !'\\' <.> { p.AddCharacter(text) } | |
67 | Escape <- '\\' [aA] { p.AddCharacter("\a") } # bell | |
68 | / '\\' [bB] { p.AddCharacter("\b") } # bs | |
69 | / '\\' [eE] { p.AddCharacter("\x1B") } # esc | |
70 | / '\\' [fF] { p.AddCharacter("\f") } # ff | |
71 | / '\\' [nN] { p.AddCharacter("\n") } # nl | |
72 | / '\\' [rR] { p.AddCharacter("\r") } # cr | |
73 | / '\\' [tT] { p.AddCharacter("\t") } # ht | |
74 | / '\\' [vV] { p.AddCharacter("\v") } # vt | |
75 | / '\\' ['] { p.AddCharacter("'") } | |
76 | / '\\"' { p.AddCharacter("\"") } | |
77 | / '\\[' { p.AddCharacter("[") } | |
78 | / '\\]' { p.AddCharacter("]") } | |
79 | / '\\-' { p.AddCharacter("-") } | |
80 | / '\\' '0'[xX] <[0-9a-fA-F][0-9a-fA-F]*> { p.AddHexaCharacter(text) } | |
81 | / '\\' <[0-3][0-7][0-7]> { p.AddOctalCharacter(text) } | |
82 | / '\\' <[0-7][0-7]?> { p.AddOctalCharacter(text) } | |
83 | / '\\\\' { p.AddCharacter("\\") } | |
84 | LeftArrow <- ('<-' / '\0x2190') Spacing | |
85 | Slash <- '/' Spacing | |
86 | And <- '&' Spacing | |
87 | Not <- '!' Spacing | |
88 | Question <- '?' Spacing | |
89 | Star <- '*' Spacing | |
90 | Plus <- '+' Spacing | |
91 | Open <- '(' Spacing | |
92 | Close <- ')' Spacing | |
93 | Dot <- '.' Spacing | |
94 | SpaceComment <- (Space / Comment) | |
95 | Spacing <- SpaceComment* | |
96 | MustSpacing <- SpaceComment Spacing | |
97 | Comment <- '#' (!EndOfLine .)* EndOfLine | |
98 | Space <- ' ' / '\0x9' / EndOfLine | |
99 | EndOfLine <- '\0xd\0xa' / '\0xa' / '\0xd' | |
100 | EndOfFile <- !. | |
101 | Action <- '{' < ActionBody* > '}' Spacing | |
102 | ActionBody <- ![{}]. / '{' ActionBody* '}' | |
103 | Begin <- '<' Spacing | |
104 | End <- '>' Spacing | |
105 |
0 | golang-github-pointlander-peg (1.0.1+git20230113.1.2b75877-1) UNRELEASED; urgency=low | |
1 | ||
2 | * New upstream snapshot. | |
3 | ||
4 | -- Debian Janitor <janitor@jelmer.uk> Thu, 19 Jan 2023 10:01:44 -0000 | |
5 | ||
0 | 6 | golang-github-pointlander-peg (1.0.0-5) unstable; urgency=medium |
1 | 7 | |
2 | 8 | * Team upload. |
0 | module github.com/pointlander/peg | |
1 | ||
2 | require github.com/pointlander/jetset v1.0.1-0.20190518214125-eee7eff80bd4 | |
3 | ||
4 | go 1.13 |
0 | github.com/pointlander/compress v1.1.0 h1:5fUcQV2qEHvk0OpILH6eltwluN5VnwiYrkc1wjGUHnU= | |
1 | github.com/pointlander/compress v1.1.0/go.mod h1:q5NXNGzqj5uPnVuhGkZfmgHqNUhf15VLi6L9kW0VEc0= | |
2 | github.com/pointlander/compress v1.1.1-0.20190518213731-ff44bd196cc3 h1:hUmXhbljNFtrH5hzV9kiRoddZ5nfPTq3K0Sb2hYYiqE= | |
3 | github.com/pointlander/compress v1.1.1-0.20190518213731-ff44bd196cc3/go.mod h1:q5NXNGzqj5uPnVuhGkZfmgHqNUhf15VLi6L9kW0VEc0= | |
4 | github.com/pointlander/jetset v1.0.0 h1:bNlaNAX7cDPID9SlcogmXlDWq0KcRJSpKwHXaAM3bGQ= | |
5 | github.com/pointlander/jetset v1.0.0/go.mod h1:zY6+WHRPB10uzTajloHtybSicLW1bf6Rz0eSaU9Deng= | |
6 | github.com/pointlander/jetset v1.0.1-0.20190518214125-eee7eff80bd4 h1:RHHRCZeaNyBXdYPMjZNH8/XHDBH38TZzw8izrW7dmBE= | |
7 | github.com/pointlander/jetset v1.0.1-0.20190518214125-eee7eff80bd4/go.mod h1:RdR1j20Aj5pB6+fw6Y9Ur7lMHpegTEjY1vc19hEZL40= |
0 | # Copyright 2010 The Go Authors. All rights reserved. | |
1 | # Use of this source code is governed by a BSD-style | |
2 | # license that can be found in the LICENSE file. | |
3 | ||
4 | c: c.peg.go main.go | |
5 | go build | |
6 | ||
7 | c.peg.go: c.peg | |
8 | ../../peg -switch -inline c.peg | |
9 | ||
10 | clean: | |
11 | rm -f c c.peg.go |
109 | 109 | |
110 | 110 | } |
111 | 111 | |
112 | TranslationUnit <- Spacing ExternalDeclaration+ EOT | |
112 | TranslationUnit <- Spacing ( ExternalDeclaration / SEMI ) * EOT | |
113 | 113 | |
114 | 114 | ExternalDeclaration <- FunctionDefinition / Declaration |
115 | 115 | |
170 | 170 | |
171 | 171 | StructOrUnionSpecifier |
172 | 172 | <- StructOrUnion |
173 | ( Identifier? LWING StructDeclaration+ RWING | |
173 | ( Identifier? LWING StructDeclaration* RWING | |
174 | 174 | / Identifier |
175 | 175 | ) |
176 | 176 | |
177 | 177 | StructOrUnion <- STRUCT / UNION |
178 | 178 | |
179 | StructDeclaration <- SpecifierQualifierList StructDeclaratorList SEMI | |
179 | StructDeclaration <- ( SpecifierQualifierList StructDeclaratorList? )? SEMI | |
180 | 180 | |
181 | 181 | SpecifierQualifierList |
182 | 182 | <- ( TypeQualifier* |
312 | 312 | #------------------------------------------------------------------------- |
313 | 313 | |
314 | 314 | PrimaryExpression |
315 | <- Identifier | |
315 | <- StringLiteral | |
316 | 316 | / Constant |
317 | / StringLiteral | |
317 | / Identifier | |
318 | 318 | / LPAR Expression RPAR |
319 | 319 | |
320 | 320 | PostfixExpression |
346 | 346 | / TILDA |
347 | 347 | / BANG |
348 | 348 | |
349 | CastExpression <- (LPAR TypeName RPAR)* UnaryExpression | |
349 | CastExpression <- (LPAR TypeName RPAR CastExpression) / UnaryExpression | |
350 | 350 | |
351 | 351 | MultiplicativeExpression <- CastExpression ((STAR / DIV / MOD) CastExpression)* |
352 | 352 | |
607 | 607 | / HexEscape |
608 | 608 | / UniversalCharacter |
609 | 609 | |
610 | SimpleEscape <- '\\' ['\"?\\abfnrtv] | |
610 | SimpleEscape <- '\\' ['\"?\\%abfnrtv] | |
611 | 611 | OctalEscape <- '\\' [0-7][0-7]?[0-7]? |
612 | 612 | HexEscape <- '\\x' HexDigit+ |
613 | 613 |
0 | // Copyright 2010 The Go Authors. All rights reserved. | |
1 | // Use of this source code is governed by a BSD-style | |
2 | // license that can be found in the LICENSE file. | |
3 | ||
4 | // +build grammars | |
5 | ||
6 | package main | |
7 | ||
8 | import ( | |
9 | "fmt" | |
10 | "io/ioutil" | |
11 | "log" | |
12 | "os" | |
13 | "strings" | |
14 | "testing" | |
15 | ) | |
16 | ||
17 | func parseCBuffer(buffer string) (*C, error) { | |
18 | clang := &C{Buffer: buffer} | |
19 | clang.Init() | |
20 | err := clang.Parse() | |
21 | return clang, err | |
22 | } | |
23 | ||
24 | func parseC_4t(t *testing.T, src string) *C { | |
25 | c, err := parseCBuffer(src) | |
26 | if err != nil { | |
27 | t.Fatal(err) | |
28 | } | |
29 | return c | |
30 | } | |
31 | ||
32 | func noParseC_4t(t *testing.T, src string) { | |
33 | _, err := parseCBuffer(src) | |
34 | if err == nil { | |
35 | t.Fatal("Parsed what should not have parsed.") | |
36 | } | |
37 | } | |
38 | ||
39 | func TestCParsing_Expressions1(t *testing.T) { | |
40 | case1src := | |
41 | `int a() { | |
42 | (es); | |
43 | 1++; | |
44 | 1+1; | |
45 | a+1; | |
46 | (a)+1; | |
47 | a->x; | |
48 | return 0; | |
49 | }` | |
50 | parseC_4t(t, case1src) | |
51 | } | |
52 | ||
53 | func TestCParsing_Expressions2(t *testing.T) { | |
54 | parseC_4t(t, | |
55 | `int a() { | |
56 | if (a) { return (a); } | |
57 | ||
58 | return (0); | |
59 | return a+b; | |
60 | return (a+b); | |
61 | return (a)+0; | |
62 | }`) | |
63 | ||
64 | parseC_4t(t, `int a() { return (a)+0; }`) | |
65 | } | |
66 | ||
67 | func TestCParsing_Expressions3(t *testing.T) { | |
68 | parseC_4t(t, | |
69 | `int a() { | |
70 | 1+(a); | |
71 | (a)++; | |
72 | (es)++; | |
73 | (es)||a; | |
74 | (es)->a; | |
75 | return (a)+(b); | |
76 | return 0+(a); | |
77 | }`) | |
78 | } | |
79 | ||
80 | func TestCParsing_Expressions4(t *testing.T) { | |
81 | parseC_4t(t, `int a(){1+(a);}`) | |
82 | } | |
83 | func TestCParsing_Expressions5(t *testing.T) { | |
84 | parseC_4t(t, `int a(){return (int)0;}`) | |
85 | } | |
86 | func TestCParsing_Expressions6(t *testing.T) { | |
87 | parseC_4t(t, `int a(){return (in)0;}`) | |
88 | } | |
89 | func TestCParsing_Expressions7(t *testing.T) { | |
90 | parseC_4t(t, `int a() | |
91 | { return (0); }`) | |
92 | } | |
93 | func TestCParsing_Cast0(t *testing.T) { | |
94 | parseC_4t(t, `int a(){(cast)0;}`) | |
95 | } | |
96 | func TestCParsing_Cast1(t *testing.T) { | |
97 | parseC_4t(t, `int a(){(m*)(rsp);}`) | |
98 | parseC_4t(t, `int a(){(struct m*)(rsp);}`) | |
99 | } | |
100 | ||
101 | func TestCParsing_Empty(t *testing.T) { | |
102 | parseC_4t(t, `/** empty is valid. */ `) | |
103 | } | |
104 | func TestCParsing_EmptyStruct(t *testing.T) { | |
105 | parseC_4t(t, `struct empty{};`) | |
106 | parseC_4t(t, `struct {} empty;`) | |
107 | parseC_4t(t, `struct empty {} empty;`) | |
108 | } | |
109 | func TestCParsing_EmptyEmbeddedUnion(t *testing.T) { | |
110 | parseC_4t(t, `struct empty{ | |
111 | union { | |
112 | int a; | |
113 | char b; | |
114 | }; | |
115 | };`) | |
116 | } | |
117 | func TestCParsing_ExtraSEMI(t *testing.T) { | |
118 | parseC_4t(t, `int func(){} | |
119 | ; | |
120 | struct {} empty; | |
121 | struct {} empty;; | |
122 | int foo() {}; | |
123 | int foo() {};; | |
124 | `) | |
125 | ||
126 | noParseC_4t(t, `struct empty{}`) | |
127 | } | |
128 | func TestCParsing_ExtraSEMI2(t *testing.T) { | |
129 | parseC_4t(t, ` | |
130 | struct a { int b; ; }; | |
131 | `) | |
132 | ||
133 | noParseC_4t(t, `struct empty{}`) | |
134 | } | |
135 | ||
136 | func TestCParsing_Escapes(t *testing.T) { | |
137 | parseC_4t(t, ` | |
138 | int f() { | |
139 | printf("%s", "\a\b\f\n\r\t\v"); | |
140 | printf("\\"); | |
141 | printf("\%"); | |
142 | printf("\""); | |
143 | printf('\"'); // <- semantically wrong but syntactically valid. | |
144 | }`) | |
145 | } | |
146 | ||
147 | func TestCParsing_Long(t *testing.T) { | |
148 | if testing.Short() { | |
149 | t.Skip("skipping c parsing long test") | |
150 | } | |
151 | ||
152 | var walk func(name string) | |
153 | walk = func(name string) { | |
154 | fileInfo, err := os.Stat(name) | |
155 | if err != nil { | |
156 | log.Fatal(err) | |
157 | } | |
158 | ||
159 | if fileInfo.Mode()&(os.ModeNamedPipe|os.ModeSocket|os.ModeDevice) != 0 { | |
160 | /* will lock up if opened */ | |
161 | } else if fileInfo.IsDir() { | |
162 | fmt.Printf("directory %v\n", name) | |
163 | ||
164 | file, err := os.Open(name) | |
165 | if err != nil { | |
166 | log.Fatal(err) | |
167 | } | |
168 | ||
169 | files, err := file.Readdir(-1) | |
170 | if err != nil { | |
171 | log.Fatal(err) | |
172 | } | |
173 | file.Close() | |
174 | ||
175 | for _, f := range files { | |
176 | if !strings.HasSuffix(name, "/") { | |
177 | name += "/" | |
178 | } | |
179 | walk(name + f.Name()) | |
180 | } | |
181 | } else if strings.HasSuffix(name, ".c") { | |
182 | fmt.Printf("parse %v\n", name) | |
183 | ||
184 | file, err := os.Open(name) | |
185 | if err != nil { | |
186 | log.Fatal(err) | |
187 | } | |
188 | ||
189 | buffer, err := ioutil.ReadAll(file) | |
190 | if err != nil { | |
191 | log.Fatal(err) | |
192 | } | |
193 | file.Close() | |
194 | ||
195 | clang := &C{Buffer: string(buffer)} | |
196 | clang.Init() | |
197 | if err := clang.Parse(); err != nil { | |
198 | log.Fatal(err) | |
199 | } | |
200 | } | |
201 | } | |
202 | walk("c/") | |
203 | } | |
204 | ||
205 | func TestCParsing_WideString(t *testing.T) { | |
206 | parseC_4t(t, `wchar_t *msg = L"Hello";`); | |
207 | } |
0 | // Copyright 2010 The Go Authors. All rights reserved. | |
1 | // Use of this source code is governed by a BSD-style | |
2 | // license that can be found in the LICENSE file. | |
3 | ||
4 | package main | |
5 | ||
6 | import ( | |
7 | "fmt" | |
8 | "io/ioutil" | |
9 | "log" | |
10 | "os" | |
11 | "strings" | |
12 | ) | |
13 | ||
14 | func main() { | |
15 | if len(os.Args) < 2 { | |
16 | fmt.Printf("%v FILE\n", os.Args[0]) | |
17 | os.Exit(1) | |
18 | } | |
19 | ||
20 | var walk func(name string) | |
21 | walk = func(name string) { | |
22 | fileInfo, err := os.Stat(name) | |
23 | if err != nil { | |
24 | log.Fatal(err) | |
25 | } | |
26 | ||
27 | if fileInfo.Mode() & (os.ModeNamedPipe | os.ModeSocket | os.ModeDevice) != 0 { | |
28 | /* will lock up if opened */ | |
29 | } else if fileInfo.IsDir() { | |
30 | fmt.Printf("directory %v\n", name) | |
31 | ||
32 | file, err := os.Open(name) | |
33 | if err != nil { | |
34 | log.Fatal(err) | |
35 | } | |
36 | ||
37 | files, err := file.Readdir(-1) | |
38 | if err != nil { | |
39 | log.Fatal(err) | |
40 | } | |
41 | file.Close() | |
42 | ||
43 | for _, f := range files { | |
44 | if !strings.HasSuffix(name, "/") { | |
45 | name += "/" | |
46 | } | |
47 | walk(name + f.Name()) | |
48 | } | |
49 | } else if strings.HasSuffix(name, ".c") { | |
50 | fmt.Printf("parse %v\n", name) | |
51 | ||
52 | file, err := os.Open(name) | |
53 | if err != nil { | |
54 | log.Fatal(err) | |
55 | } | |
56 | ||
57 | buffer, err := ioutil.ReadAll(file) | |
58 | if err != nil { | |
59 | log.Fatal(err) | |
60 | } | |
61 | file.Close() | |
62 | ||
63 | clang := &C{Buffer: string(buffer)} | |
64 | clang.Init() | |
65 | if err := clang.Parse(); err != nil { | |
66 | log.Fatal(err) | |
67 | } | |
68 | } | |
69 | } | |
70 | walk(os.Args[1]) | |
71 | } |
0 | # Copyright 2010 The Go Authors. All rights reserved. | |
1 | # Use of this source code is governed by a BSD-style | |
2 | # license that can be found in the LICENSE file. | |
3 | ||
4 | calculator: calculator.peg.go calculator.go main.go | |
5 | go build | |
6 | ||
7 | calculator.peg.go: calculator.peg | |
8 | ../../peg -switch -inline calculator.peg | |
9 | ||
10 | clean: | |
11 | rm -f calculator calculator.peg.go |
0 | 0 | // Copyright 2010 The Go Authors. All rights reserved. |
1 | 1 | // Use of this source code is governed by a BSD-style |
2 | 2 | // license that can be found in the LICENSE file. |
3 | ||
4 | // +build grammars | |
3 | 5 | |
4 | 6 | package main |
5 | 7 |
0 | // Copyright 2010 The Go Authors. All rights reserved. | |
1 | // Use of this source code is governed by a BSD-style | |
2 | // license that can be found in the LICENSE file. | |
3 | ||
4 | // +build grammars | |
5 | ||
6 | package main | |
7 | ||
8 | import ( | |
9 | "math/big" | |
10 | "testing" | |
11 | ) | |
12 | ||
13 | func TestCalculator(t *testing.T) { | |
14 | expression := "( 1 - -3 ) / 3 + 2 * ( 3 + -4 ) + 3 % 2^2" | |
15 | calc := &Calculator{Buffer: expression} | |
16 | calc.Init() | |
17 | calc.Expression.Init(expression) | |
18 | if err := calc.Parse(); err != nil { | |
19 | t.Fatal(err) | |
20 | } | |
21 | calc.Execute() | |
22 | if calc.Evaluate().Cmp(big.NewInt(2)) != 0 { | |
23 | t.Fatal("got incorrect result") | |
24 | } | |
25 | } |
0 | // Copyright 2010 The Go Authors. All rights reserved. | |
1 | // Use of this source code is governed by a BSD-style | |
2 | // license that can be found in the LICENSE file. | |
3 | ||
4 | package main | |
5 | ||
6 | import ( | |
7 | "fmt" | |
8 | "log" | |
9 | "os" | |
10 | ) | |
11 | ||
12 | func main() { | |
13 | if len(os.Args) < 2 { | |
14 | name := os.Args[0] | |
15 | fmt.Printf("Usage: %v \"EXPRESSION\"\n", name) | |
16 | fmt.Printf("Example: %v \"( 1 - -3 ) / 3 + 2 * ( 3 + -4 ) + 3 %% 2^2\"\n =2\n", name) | |
17 | os.Exit(1) | |
18 | } | |
19 | expression := os.Args[1] | |
20 | calc := &Calculator{Buffer: expression} | |
21 | calc.Init() | |
22 | calc.Expression.Init(expression) | |
23 | if err := calc.Parse(); err != nil { | |
24 | log.Fatal(err) | |
25 | } | |
26 | calc.Execute() | |
27 | fmt.Printf("= %v\n", calc.Evaluate()) | |
28 | } |
0 | // Copyright 2010 The Go Authors. All rights reserved. | |
1 | // Use of this source code is governed by a BSD-style | |
2 | // license that can be found in the LICENSE file. | |
3 | ||
4 | // +build grammars | |
5 | ||
6 | package main | |
7 | ||
8 | import ( | |
9 | "math/big" | |
10 | ) | |
11 | ||
12 | func (c *Calculator) Eval() *big.Int { | |
13 | return c.Rulee(c.AST()) | |
14 | } | |
15 | ||
16 | func (c *Calculator) Rulee(node *node32) *big.Int { | |
17 | node = node.up | |
18 | for node != nil { | |
19 | switch node.pegRule { | |
20 | case rulee1: | |
21 | return c.Rulee1(node) | |
22 | } | |
23 | node = node.next | |
24 | } | |
25 | return nil | |
26 | } | |
27 | ||
28 | func (c *Calculator) Rulee1(node *node32) *big.Int { | |
29 | node = node.up | |
30 | var a *big.Int | |
31 | for node != nil { | |
32 | switch node.pegRule { | |
33 | case rulee2: | |
34 | a = c.Rulee2(node) | |
35 | case ruleadd: | |
36 | node = node.next | |
37 | b := c.Rulee2(node) | |
38 | a.Add(a, b) | |
39 | case ruleminus: | |
40 | node = node.next | |
41 | b := c.Rulee2(node) | |
42 | a.Sub(a, b) | |
43 | } | |
44 | node = node.next | |
45 | } | |
46 | return a | |
47 | } | |
48 | ||
49 | func (c *Calculator) Rulee2(node *node32) *big.Int { | |
50 | node = node.up | |
51 | var a *big.Int | |
52 | for node != nil { | |
53 | switch node.pegRule { | |
54 | case rulee3: | |
55 | a = c.Rulee3(node) | |
56 | case rulemultiply: | |
57 | node = node.next | |
58 | b := c.Rulee3(node) | |
59 | a.Mul(a, b) | |
60 | case ruledivide: | |
61 | node = node.next | |
62 | b := c.Rulee3(node) | |
63 | a.Div(a, b) | |
64 | case rulemodulus: | |
65 | node = node.next | |
66 | b := c.Rulee3(node) | |
67 | a.Mod(a, b) | |
68 | } | |
69 | node = node.next | |
70 | } | |
71 | return a | |
72 | } | |
73 | ||
74 | func (c *Calculator) Rulee3(node *node32) *big.Int { | |
75 | node = node.up | |
76 | var a *big.Int | |
77 | for node != nil { | |
78 | switch node.pegRule { | |
79 | case rulee4: | |
80 | a = c.Rulee4(node) | |
81 | case ruleexponentiation: | |
82 | node = node.next | |
83 | b := c.Rulee4(node) | |
84 | a.Exp(a, b, nil) | |
85 | } | |
86 | node = node.next | |
87 | } | |
88 | return a | |
89 | } | |
90 | ||
91 | func (c *Calculator) Rulee4(node *node32) *big.Int { | |
92 | node = node.up | |
93 | minus := false | |
94 | for node != nil { | |
95 | switch node.pegRule { | |
96 | case rulevalue: | |
97 | a := c.Rulevalue(node) | |
98 | if minus { | |
99 | a.Neg(a) | |
100 | } | |
101 | return a | |
102 | case ruleminus: | |
103 | minus = true | |
104 | } | |
105 | node = node.next | |
106 | } | |
107 | return nil | |
108 | } | |
109 | ||
110 | func (c *Calculator) Rulevalue(node *node32) *big.Int { | |
111 | node = node.up | |
112 | for node != nil { | |
113 | switch node.pegRule { | |
114 | case rulenumber: | |
115 | a := big.NewInt(0) | |
116 | a.SetString(string(c.buffer[node.begin:node.end]), 10) | |
117 | return a | |
118 | case rulesub: | |
119 | return c.Rulesub(node) | |
120 | } | |
121 | node = node.next | |
122 | } | |
123 | return nil | |
124 | } | |
125 | ||
126 | func (c *Calculator) Rulesub(node *node32) *big.Int { | |
127 | node = node.up | |
128 | for node != nil { | |
129 | switch node.pegRule { | |
130 | case rulee1: | |
131 | return c.Rulee1(node) | |
132 | } | |
133 | node = node.next | |
134 | } | |
135 | return nil | |
136 | } |
0 | # Copyright 2010 The Go Authors. All rights reserved. | |
1 | # Use of this source code is governed by a BSD-style | |
2 | # license that can be found in the LICENSE file. | |
3 | ||
4 | package main | |
5 | ||
6 | type Calculator Peg { | |
7 | } | |
8 | ||
9 | e <- sp e1 !. | |
10 | e1 <- e2 ( add e2 | |
11 | / minus e2 | |
12 | )* | |
13 | e2 <- e3 ( multiply e3 | |
14 | / divide e3 | |
15 | / modulus e3 | |
16 | )* | |
17 | e3 <- e4 ( exponentiation e4 | |
18 | )* | |
19 | e4 <- minus value | |
20 | / value | |
21 | value <- number | |
22 | / sub | |
23 | number <- < [0-9]+ > sp | |
24 | sub <- open e1 close | |
25 | add <- '+' sp | |
26 | minus <- '-' sp | |
27 | multiply <- '*' sp | |
28 | divide <- '/' sp | |
29 | modulus <- '%' sp | |
30 | exponentiation <- '^' sp | |
31 | open <- '(' sp | |
32 | close <- ')' sp | |
33 | sp <- ( ' ' / '\t' )* |
0 | // Copyright 2010 The Go Authors. All rights reserved. | |
1 | // Use of this source code is governed by a BSD-style | |
2 | // license that can be found in the LICENSE file. | |
3 | ||
4 | // +build grammars | |
5 | ||
6 | package main | |
7 | ||
8 | import ( | |
9 | "math/big" | |
10 | "testing" | |
11 | ) | |
12 | ||
13 | func TestCalculator(t *testing.T) { | |
14 | expression := "( 1 - -3 ) / 3 + 2 * ( 3 + -4 ) + 3 % 2^2" | |
15 | calc := &Calculator{Buffer: expression} | |
16 | calc.Init() | |
17 | if err := calc.Parse(); err != nil { | |
18 | t.Fatal(err) | |
19 | } | |
20 | if calc.Eval().Cmp(big.NewInt(2)) != 0 { | |
21 | t.Fatal("got incorrect result") | |
22 | } | |
23 | } |
0 | # Copyright 2010 The Go Authors. All rights reserved. | |
1 | # Use of this source code is governed by a BSD-style | |
2 | # license that can be found in the LICENSE file. | |
3 | ||
4 | fexl: fexl.peg.go main.go | |
5 | go build | |
6 | ||
7 | fexl.peg.go: fexl.peg | |
8 | ../../peg -switch -inline fexl.peg | |
9 | ||
10 | clean: | |
11 | rm -f fexl fexl.peg.go |
0 | // Copyright 2010 The Go Authors. All rights reserved. | |
1 | // Use of this source code is governed by a BSD-style | |
2 | // license that can be found in the LICENSE file. | |
3 | ||
4 | // +build grammars | |
5 | ||
6 | package main | |
7 | ||
8 | import ( | |
9 | "io/ioutil" | |
10 | "testing" | |
11 | ) | |
12 | ||
13 | func TestFexl(t *testing.T) { | |
14 | buffer, err := ioutil.ReadFile("doc/try.fxl") | |
15 | if err != nil { | |
16 | t.Fatal(err) | |
17 | } | |
18 | ||
19 | fexl := &Fexl{Buffer: string(buffer)} | |
20 | fexl.Init() | |
21 | ||
22 | if err := fexl.Parse(); err != nil { | |
23 | t.Fatal(err) | |
24 | } | |
25 | } |
0 | // Copyright 2010 The Go Authors. All rights reserved. | |
1 | // Use of this source code is governed by a BSD-style | |
2 | // license that can be found in the LICENSE file. | |
3 | ||
4 | package main | |
5 | ||
6 | import ( | |
7 | "log" | |
8 | "io/ioutil" | |
9 | ) | |
10 | ||
11 | func main() { | |
12 | buffer, err := ioutil.ReadFile("doc/try.fxl") | |
13 | if err != nil { | |
14 | log.Fatal(err) | |
15 | } | |
16 | ||
17 | fexl := &Fexl{Buffer: string(buffer)} | |
18 | fexl.Init() | |
19 | ||
20 | if err := fexl.Parse(); err != nil { | |
21 | log.Fatal(err) | |
22 | } | |
23 | fexl.Highlighter() | |
24 | } |
0 | # Copyright 2010 The Go Authors. All rights reserved. | |
1 | # Use of this source code is governed by a BSD-style | |
2 | # license that can be found in the LICENSE file. | |
3 | ||
4 | java: java_1_7.peg.go main.go | |
5 | go build | |
6 | ||
7 | java_1_7.peg.go: java_1_7.peg | |
8 | ../../peg -switch -inline java_1_7.peg | |
9 | ||
10 | clean: | |
11 | rm -f java java_1_7.peg.go |
177 | 177 | / InterfaceMethodDeclaratorRest |
178 | 178 | |
179 | 179 | InterfaceMethodDeclaratorRest |
180 | <- FormalParameters Dim* (THROWS ClassTypeList)? SEM | |
180 | <- FormalParameters Dim* (THROWS ClassTypeList)? SEMI | |
181 | 181 | |
182 | 182 | InterfaceGenericMethodDecl |
183 | 183 | <- TypeParameters (Type / VOID) Identifier InterfaceMethodDeclaratorRest |
0 | // Copyright 2010 The Go Authors. All rights reserved. | |
1 | // Use of this source code is governed by a BSD-style | |
2 | // license that can be found in the LICENSE file. | |
3 | ||
4 | // +build grammars | |
5 | ||
6 | package main | |
7 | ||
8 | import ( | |
9 | "fmt" | |
10 | "io/ioutil" | |
11 | "log" | |
12 | "os" | |
13 | "strings" | |
14 | "testing" | |
15 | ) | |
16 | ||
17 | var example1 = `public class HelloWorld { | |
18 | public static void main(String[] args) { | |
19 | System.out.println("Hello, World"); | |
20 | } | |
21 | } | |
22 | ` | |
23 | ||
24 | func TestBasic(t *testing.T) { | |
25 | java := &Java{Buffer: example1} | |
26 | java.Init() | |
27 | ||
28 | if err := java.Parse(); err != nil { | |
29 | t.Fatal(err) | |
30 | } | |
31 | } | |
32 | ||
33 | func TestJava(t *testing.T) { | |
34 | if testing.Short() { | |
35 | t.Skip("skipping java parsing long test") | |
36 | } | |
37 | ||
38 | var walk func(name string) | |
39 | walk = func(name string) { | |
40 | fileInfo, err := os.Stat(name) | |
41 | if err != nil { | |
42 | log.Fatal(err) | |
43 | } | |
44 | ||
45 | if fileInfo.Mode()&(os.ModeNamedPipe|os.ModeSocket|os.ModeDevice) != 0 { | |
46 | /* will lock up if opened */ | |
47 | } else if fileInfo.IsDir() { | |
48 | fmt.Printf("directory %v\n", name) | |
49 | ||
50 | file, err := os.Open(name) | |
51 | if err != nil { | |
52 | log.Fatal(err) | |
53 | } | |
54 | ||
55 | files, err := file.Readdir(-1) | |
56 | if err != nil { | |
57 | log.Fatal(err) | |
58 | } | |
59 | file.Close() | |
60 | ||
61 | for _, f := range files { | |
62 | if !strings.HasSuffix(name, "/") { | |
63 | name += "/" | |
64 | } | |
65 | walk(name + f.Name()) | |
66 | } | |
67 | } else if strings.HasSuffix(name, ".java") { | |
68 | fmt.Printf("parse %v\n", name) | |
69 | ||
70 | file, err := os.Open(name) | |
71 | if err != nil { | |
72 | log.Fatal(err) | |
73 | } | |
74 | ||
75 | buffer, err := ioutil.ReadAll(file) | |
76 | if err != nil { | |
77 | log.Fatal(err) | |
78 | } | |
79 | file.Close() | |
80 | ||
81 | java := &Java{Buffer: string(buffer)} | |
82 | java.Init() | |
83 | if err := java.Parse(); err != nil { | |
84 | log.Fatal(err) | |
85 | } | |
86 | } | |
87 | } | |
88 | walk("java/") | |
89 | } |
0 | // Copyright 2010 The Go Authors. All rights reserved. | |
1 | // Use of this source code is governed by a BSD-style | |
2 | // license that can be found in the LICENSE file. | |
3 | ||
4 | package main | |
5 | ||
6 | import ( | |
7 | "fmt" | |
8 | "io/ioutil" | |
9 | "log" | |
10 | "os" | |
11 | "strings" | |
12 | ) | |
13 | ||
14 | func main() { | |
15 | if len(os.Args) < 2 { | |
16 | fmt.Printf("%v FILE\n", os.Args[0]) | |
17 | os.Exit(1) | |
18 | } | |
19 | ||
20 | var walk func(name string) | |
21 | walk = func(name string) { | |
22 | fileInfo, err := os.Stat(name) | |
23 | if err != nil { | |
24 | log.Fatal(err) | |
25 | } | |
26 | ||
27 | if fileInfo.Mode() & (os.ModeNamedPipe | os.ModeSocket | os.ModeDevice) != 0 { | |
28 | /* will lock up if opened */ | |
29 | } else if fileInfo.IsDir() { | |
30 | fmt.Printf("directory %v\n", name) | |
31 | ||
32 | file, err := os.Open(name) | |
33 | if err != nil { | |
34 | log.Fatal(err) | |
35 | } | |
36 | ||
37 | files, err := file.Readdir(-1) | |
38 | if err != nil { | |
39 | log.Fatal(err) | |
40 | } | |
41 | file.Close() | |
42 | ||
43 | for _, f := range files { | |
44 | if !strings.HasSuffix(name, "/") { | |
45 | name += "/" | |
46 | } | |
47 | walk(name + f.Name()) | |
48 | } | |
49 | } else if strings.HasSuffix(name, ".java") { | |
50 | fmt.Printf("parse %v\n", name) | |
51 | ||
52 | file, err := os.Open(name) | |
53 | if err != nil { | |
54 | log.Fatal(err) | |
55 | } | |
56 | ||
57 | buffer, err := ioutil.ReadAll(file) | |
58 | if err != nil { | |
59 | log.Fatal(err) | |
60 | } | |
61 | file.Close() | |
62 | ||
63 | java := &Java{Buffer: string(buffer)} | |
64 | java.Init() | |
65 | if err := java.Parse(); err != nil { | |
66 | log.Fatal(err) | |
67 | } | |
68 | } | |
69 | } | |
70 | walk(os.Args[1]) | |
71 | } |
0 | # Copyright 2010 The Go Authors. All rights reserved. | |
1 | # Use of this source code is governed by a BSD-style | |
2 | # license that can be found in the LICENSE file. | |
3 | ||
4 | long_test: long.peg.go main.go | |
5 | go build | |
6 | ||
7 | long.peg.go: long.peg | |
8 | peg -switch -inline long.peg | |
9 | ||
10 | clean: | |
11 | rm -f long_test long.peg.go |
0 | // Copyright 2010 The Go Authors. All rights reserved. | |
1 | // Use of this source code is governed by a BSD-style | |
2 | // license that can be found in the LICENSE file. | |
3 | ||
4 | // +build grammars | |
5 | ||
6 | package main | |
7 | ||
8 | import ( | |
9 | "testing" | |
10 | ) | |
11 | ||
12 | func TestLong(t *testing.T) { | |
13 | length := 100000 | |
14 | if testing.Short() { | |
15 | length = 100 | |
16 | } | |
17 | ||
18 | expression := "" | |
19 | long := &Long{Buffer: "\"" + expression + "\""} | |
20 | long.Init() | |
21 | for c := 0; c < length; c++ { | |
22 | if err := long.Parse(); err != nil { | |
23 | t.Fatal(err) | |
24 | } | |
25 | long.Reset() | |
26 | expression = expression + "X" | |
27 | long.Buffer = "\"" + expression + "\"" | |
28 | } | |
29 | } |
0 | // Copyright 2010 The Go Authors. All rights reserved. | |
1 | // Use of this source code is governed by a BSD-style | |
2 | // license that can be found in the LICENSE file. | |
3 | ||
4 | package main | |
5 | ||
6 | import ( | |
7 | "fmt" | |
8 | "log" | |
9 | ) | |
10 | ||
11 | func main() { | |
12 | expression := "" | |
13 | long := &Long{Buffer: "\"" + expression + "\""} | |
14 | long.Init() | |
15 | for c := 0; c < 100000; c++ { | |
16 | if err := long.Parse(); err != nil { | |
17 | fmt.Printf("%v\n", c) | |
18 | log.Fatal(err) | |
19 | } | |
20 | long.Reset() | |
21 | expression = expression + "X" | |
22 | long.Buffer = "\"" + expression + "\"" | |
23 | } | |
24 | } |
10 | 10 | "log" |
11 | 11 | "os" |
12 | 12 | "runtime" |
13 | "time" | |
13 | ||
14 | "github.com/pointlander/peg/tree" | |
14 | 15 | ) |
15 | 16 | |
17 | //go:generate -command build go run build.go | |
18 | //go:generate build buildinfo | |
19 | //go:generate build peg | |
20 | ||
16 | 21 | var ( |
17 | inline = flag.Bool("inline", false, "parse rule inlining") | |
18 | _switch = flag.Bool("switch", false, "replace if-else if-else like blocks with switch blocks") | |
19 | syntax = flag.Bool("syntax", false, "print out the syntax tree") | |
20 | highlight = flag.Bool("highlight", false, "test the syntax highlighter") | |
21 | ast = flag.Bool("ast", false, "generate an AST") | |
22 | test = flag.Bool("test", false, "test the PEG parser performance") | |
23 | print = flag.Bool("print", false, "directly dump the syntax tree") | |
22 | inline = flag.Bool("inline", false, "parse rule inlining") | |
23 | _switch = flag.Bool("switch", false, "replace if-else if-else like blocks with switch blocks") | |
24 | print = flag.Bool("print", false, "directly dump the syntax tree") | |
25 | syntax = flag.Bool("syntax", false, "print out the syntax tree") | |
26 | noast = flag.Bool("noast", false, "disable AST") | |
27 | strict = flag.Bool("strict", false, "treat compiler warnings as errors") | |
28 | filename = flag.String("output", "", "specify name of output file") | |
29 | showVersion = flag.Bool("version", false, "print the version and exit") | |
30 | showBuildTime = flag.Bool("time", false, "show the last time `build.go buildinfo` was ran") | |
24 | 31 | ) |
25 | 32 | |
26 | 33 | func main() { |
27 | 34 | runtime.GOMAXPROCS(2) |
28 | 35 | flag.Parse() |
36 | ||
37 | if *showVersion { | |
38 | if IS_TAGGED { | |
39 | fmt.Println("version:", VERSION) | |
40 | } else { | |
41 | fmt.Printf("version: %s-%s\n", VERSION, COMMIT) | |
42 | } | |
43 | if *showBuildTime { | |
44 | fmt.Println("time:", BUILDTIME) | |
45 | } | |
46 | return | |
47 | } | |
29 | 48 | |
30 | 49 | if flag.NArg() != 1 { |
31 | 50 | flag.Usage() |
38 | 57 | log.Fatal(err) |
39 | 58 | } |
40 | 59 | |
41 | if *test { | |
42 | iterations, p := 1000, &Peg{Tree: New(*inline, *_switch), Buffer: string(buffer)} | |
43 | p.Init() | |
44 | start := time.Now() | |
45 | for i := 0; i < iterations; i++ { | |
46 | p.Parse() | |
47 | p.Reset() | |
48 | } | |
49 | total := float64(time.Since(start).Nanoseconds()) / float64(1000) | |
50 | fmt.Printf("time: %v us\n", total/float64(iterations)) | |
51 | return | |
52 | } | |
53 | ||
54 | p := &Peg{Tree: New(*inline, *_switch), Buffer: string(buffer), Pretty: true} | |
55 | p.Init() | |
60 | p := &Peg{Tree: tree.New(*inline, *_switch, *noast), Buffer: string(buffer)} | |
61 | p.Init(Pretty(true), Size(1<<15)) | |
56 | 62 | if err := p.Parse(); err != nil { |
57 | 63 | log.Fatal(err) |
58 | 64 | } |
59 | 65 | |
60 | 66 | p.Execute() |
61 | 67 | |
62 | if *ast { | |
63 | p.AST().Print(p.Buffer) | |
64 | } | |
65 | 68 | if *print { |
66 | 69 | p.Print() |
67 | 70 | } |
68 | 71 | if *syntax { |
69 | 72 | p.PrintSyntaxTree() |
70 | 73 | } |
71 | if *highlight { | |
72 | p.Highlighter() | |
74 | ||
75 | if *filename == "" { | |
76 | *filename = file + ".go" | |
73 | 77 | } |
74 | ||
75 | filename := file + ".go" | |
76 | out, error := os.OpenFile(filename, os.O_RDWR|os.O_CREATE|os.O_TRUNC, 0644) | |
77 | if error != nil { | |
78 | fmt.Printf("%v: %v\n", filename, error) | |
78 | out, err := os.OpenFile(*filename, os.O_RDWR|os.O_CREATE|os.O_TRUNC, 0644) | |
79 | if err != nil { | |
80 | fmt.Printf("%v: %v\n", *filename, err) | |
79 | 81 | return |
80 | 82 | } |
81 | 83 | defer out.Close() |
82 | p.Compile(filename, out) | |
84 | ||
85 | p.Strict = *strict | |
86 | if err = p.Compile(*filename, os.Args, out); err != nil { | |
87 | log.Fatal(err) | |
88 | } | |
83 | 89 | } |
0 | // Copyright 2010 The Go Authors. All rights reserved. | |
1 | // Use of this source code is governed by a BSD-style | |
2 | // license that can be found in the LICENSE file. | |
3 | ||
4 | package main | |
5 | ||
6 | import ( | |
7 | "bytes" | |
8 | "fmt" | |
9 | "go/parser" | |
10 | "go/printer" | |
11 | "go/token" | |
12 | "io" | |
13 | "math" | |
14 | "os" | |
15 | "strconv" | |
16 | "strings" | |
17 | "text/template" | |
18 | ||
19 | "github.com/pointlander/jetset" | |
20 | ) | |
21 | ||
22 | const pegHeaderTemplate = `package {{.PackageName}} | |
23 | ||
24 | import ( | |
25 | {{range .Imports}}"{{.}}" | |
26 | {{end}} | |
27 | ) | |
28 | ||
29 | const endSymbol rune = {{.EndSymbol}} | |
30 | ||
31 | /* The rule types inferred from the grammar are below. */ | |
32 | type pegRule {{.PegRuleType}} | |
33 | ||
34 | const ( | |
35 | ruleUnknown pegRule = iota | |
36 | {{range .RuleNames}}rule{{.String}} | |
37 | {{end}} | |
38 | rulePre | |
39 | ruleIn | |
40 | ruleSuf | |
41 | ) | |
42 | ||
43 | var rul3s = [...]string { | |
44 | "Unknown", | |
45 | {{range .RuleNames}}"{{.String}}", | |
46 | {{end}} | |
47 | "Pre_", | |
48 | "_In_", | |
49 | "_Suf", | |
50 | } | |
51 | ||
52 | type node32 struct { | |
53 | token32 | |
54 | up, next *node32 | |
55 | } | |
56 | ||
57 | func (node *node32) print(depth int, buffer string) { | |
58 | for node != nil { | |
59 | for c := 0; c < depth; c++ { | |
60 | fmt.Printf(" ") | |
61 | } | |
62 | fmt.Printf("\x1B[34m%v\x1B[m %v\n", rul3s[node.pegRule], strconv.Quote(string(([]rune(buffer)[node.begin:node.end])))) | |
63 | if node.up != nil { | |
64 | node.up.print(depth + 1, buffer) | |
65 | } | |
66 | node = node.next | |
67 | } | |
68 | } | |
69 | ||
70 | func (node *node32) Print(buffer string) { | |
71 | node.print(0, buffer) | |
72 | } | |
73 | ||
74 | type element struct { | |
75 | node *node32 | |
76 | down *element | |
77 | } | |
78 | ||
79 | {{range .Sizes}} | |
80 | ||
81 | /* ${@} bit structure for abstract syntax tree */ | |
82 | type token{{.}} struct { | |
83 | pegRule | |
84 | begin, end, next uint{{.}} | |
85 | } | |
86 | ||
87 | func (t *token{{.}}) isZero() bool { | |
88 | return t.pegRule == ruleUnknown && t.begin == 0 && t.end == 0 && t.next == 0 | |
89 | } | |
90 | ||
91 | func (t *token{{.}}) isParentOf(u token{{.}}) bool { | |
92 | return t.begin <= u.begin && t.end >= u.end && t.next > u.next | |
93 | } | |
94 | ||
95 | func (t *token{{.}}) getToken32() token32 { | |
96 | return token32{pegRule: t.pegRule, begin: uint32(t.begin), end: uint32(t.end), next: uint32(t.next)} | |
97 | } | |
98 | ||
99 | func (t *token{{.}}) String() string { | |
100 | return fmt.Sprintf("\x1B[34m%v\x1B[m %v %v %v", rul3s[t.pegRule], t.begin, t.end, t.next) | |
101 | } | |
102 | ||
103 | type tokens{{.}} struct { | |
104 | tree []token{{.}} | |
105 | ordered [][]token{{.}} | |
106 | } | |
107 | ||
108 | func (t *tokens{{.}}) trim(length int) { | |
109 | t.tree = t.tree[0:length] | |
110 | } | |
111 | ||
112 | func (t *tokens{{.}}) Print() { | |
113 | for _, token := range t.tree { | |
114 | fmt.Println(token.String()) | |
115 | } | |
116 | } | |
117 | ||
118 | func (t *tokens{{.}}) Order() [][]token{{.}} { | |
119 | if t.ordered != nil { | |
120 | return t.ordered | |
121 | } | |
122 | ||
123 | depths := make([]int{{.}}, 1, math.MaxInt16) | |
124 | for i, token := range t.tree { | |
125 | if token.pegRule == ruleUnknown { | |
126 | t.tree = t.tree[:i] | |
127 | break | |
128 | } | |
129 | depth := int(token.next) | |
130 | if length := len(depths); depth >= length { | |
131 | depths = depths[:depth + 1] | |
132 | } | |
133 | depths[depth]++ | |
134 | } | |
135 | depths = append(depths, 0) | |
136 | ||
137 | ordered, pool := make([][]token{{.}}, len(depths)), make([]token{{.}}, len(t.tree) + len(depths)) | |
138 | for i, depth := range depths { | |
139 | depth++ | |
140 | ordered[i], pool, depths[i] = pool[:depth], pool[depth:], 0 | |
141 | } | |
142 | ||
143 | for i, token := range t.tree { | |
144 | depth := token.next | |
145 | token.next = uint{{.}}(i) | |
146 | ordered[depth][depths[depth]] = token | |
147 | depths[depth]++ | |
148 | } | |
149 | t.ordered = ordered | |
150 | return ordered | |
151 | } | |
152 | ||
153 | type state{{.}} struct { | |
154 | token{{.}} | |
155 | depths []int{{.}} | |
156 | leaf bool | |
157 | } | |
158 | ||
159 | func (t *tokens{{.}}) AST() *node32 { | |
160 | tokens := t.Tokens() | |
161 | stack := &element{node: &node32{token32:<-tokens}} | |
162 | for token := range tokens { | |
163 | if token.begin == token.end { | |
164 | continue | |
165 | } | |
166 | node := &node32{token32: token} | |
167 | for stack != nil && stack.node.begin >= token.begin && stack.node.end <= token.end { | |
168 | stack.node.next = node.up | |
169 | node.up = stack.node | |
170 | stack = stack.down | |
171 | } | |
172 | stack = &element{node: node, down: stack} | |
173 | } | |
174 | return stack.node | |
175 | } | |
176 | ||
177 | func (t *tokens{{.}}) PreOrder() (<-chan state{{.}}, [][]token{{.}}) { | |
178 | s, ordered := make(chan state{{.}}, 6), t.Order() | |
179 | go func() { | |
180 | var states [8]state{{.}} | |
181 | for i := range states { | |
182 | states[i].depths = make([]int{{.}}, len(ordered)) | |
183 | } | |
184 | depths, state, depth := make([]int{{.}}, len(ordered)), 0, 1 | |
185 | write := func(t token{{.}}, leaf bool) { | |
186 | S := states[state] | |
187 | state, S.pegRule, S.begin, S.end, S.next, S.leaf = (state + 1) % 8, t.pegRule, t.begin, t.end, uint{{.}}(depth), leaf | |
188 | copy(S.depths, depths) | |
189 | s <- S | |
190 | } | |
191 | ||
192 | states[state].token{{.}} = ordered[0][0] | |
193 | depths[0]++ | |
194 | state++ | |
195 | a, b := ordered[depth - 1][depths[depth - 1] - 1], ordered[depth][depths[depth]] | |
196 | depthFirstSearch: for { | |
197 | for { | |
198 | if i := depths[depth]; i > 0 { | |
199 | if c, j := ordered[depth][i - 1], depths[depth - 1]; a.isParentOf(c) && | |
200 | (j < 2 || !ordered[depth - 1][j - 2].isParentOf(c)) { | |
201 | if c.end != b.begin { | |
202 | write(token{{.}} {pegRule: ruleIn, begin: c.end, end: b.begin}, true) | |
203 | } | |
204 | break | |
205 | } | |
206 | } | |
207 | ||
208 | if a.begin < b.begin { | |
209 | write(token{{.}} {pegRule: rulePre, begin: a.begin, end: b.begin}, true) | |
210 | } | |
211 | break | |
212 | } | |
213 | ||
214 | next := depth + 1 | |
215 | if c := ordered[next][depths[next]]; c.pegRule != ruleUnknown && b.isParentOf(c) { | |
216 | write(b, false) | |
217 | depths[depth]++ | |
218 | depth, a, b = next, b, c | |
219 | continue | |
220 | } | |
221 | ||
222 | write(b, true) | |
223 | depths[depth]++ | |
224 | c, parent := ordered[depth][depths[depth]], true | |
225 | for { | |
226 | if c.pegRule != ruleUnknown && a.isParentOf(c) { | |
227 | b = c | |
228 | continue depthFirstSearch | |
229 | } else if parent && b.end != a.end { | |
230 | write(token{{.}} {pegRule: ruleSuf, begin: b.end, end: a.end}, true) | |
231 | } | |
232 | ||
233 | depth-- | |
234 | if depth > 0 { | |
235 | a, b, c = ordered[depth - 1][depths[depth - 1] - 1], a, ordered[depth][depths[depth]] | |
236 | parent = a.isParentOf(b) | |
237 | continue | |
238 | } | |
239 | ||
240 | break depthFirstSearch | |
241 | } | |
242 | } | |
243 | ||
244 | close(s) | |
245 | }() | |
246 | return s, ordered | |
247 | } | |
248 | ||
249 | func (t *tokens{{.}}) PrintSyntax() { | |
250 | tokens, ordered := t.PreOrder() | |
251 | max := -1 | |
252 | for token := range tokens { | |
253 | if !token.leaf { | |
254 | fmt.Printf("%v", token.begin) | |
255 | for i, leaf, depths := 0, int(token.next), token.depths; i < leaf; i++ { | |
256 | fmt.Printf(" \x1B[36m%v\x1B[m", rul3s[ordered[i][depths[i] - 1].pegRule]) | |
257 | } | |
258 | fmt.Printf(" \x1B[36m%v\x1B[m\n", rul3s[token.pegRule]) | |
259 | } else if token.begin == token.end { | |
260 | fmt.Printf("%v", token.begin) | |
261 | for i, leaf, depths := 0, int(token.next), token.depths; i < leaf; i++ { | |
262 | fmt.Printf(" \x1B[31m%v\x1B[m", rul3s[ordered[i][depths[i] - 1].pegRule]) | |
263 | } | |
264 | fmt.Printf(" \x1B[31m%v\x1B[m\n", rul3s[token.pegRule]) | |
265 | } else { | |
266 | for c, end := token.begin, token.end; c < end; c++ { | |
267 | if i := int(c); max + 1 < i { | |
268 | for j := max; j < i; j++ { | |
269 | fmt.Printf("skip %v %v\n", j, token.String()) | |
270 | } | |
271 | max = i | |
272 | } else if i := int(c); i <= max { | |
273 | for j := i; j <= max; j++ { | |
274 | fmt.Printf("dupe %v %v\n", j, token.String()) | |
275 | } | |
276 | } else { | |
277 | max = int(c) | |
278 | } | |
279 | fmt.Printf("%v", c) | |
280 | for i, leaf, depths := 0, int(token.next), token.depths; i < leaf; i++ { | |
281 | fmt.Printf(" \x1B[34m%v\x1B[m", rul3s[ordered[i][depths[i] - 1].pegRule]) | |
282 | } | |
283 | fmt.Printf(" \x1B[34m%v\x1B[m\n", rul3s[token.pegRule]) | |
284 | } | |
285 | fmt.Printf("\n") | |
286 | } | |
287 | } | |
288 | } | |
289 | ||
290 | func (t *tokens{{.}}) PrintSyntaxTree(buffer string) { | |
291 | tokens, _ := t.PreOrder() | |
292 | for token := range tokens { | |
293 | for c := 0; c < int(token.next); c++ { | |
294 | fmt.Printf(" ") | |
295 | } | |
296 | fmt.Printf("\x1B[34m%v\x1B[m %v\n", rul3s[token.pegRule], strconv.Quote(string(([]rune(buffer)[token.begin:token.end])))) | |
297 | } | |
298 | } | |
299 | ||
300 | func (t *tokens{{.}}) Add(rule pegRule, begin, end, depth uint32, index int) { | |
301 | t.tree[index] = token{{.}}{pegRule: rule, begin: uint{{.}}(begin), end: uint{{.}}(end), next: uint{{.}}(depth)} | |
302 | } | |
303 | ||
304 | func (t *tokens{{.}}) Tokens() <-chan token32 { | |
305 | s := make(chan token32, 16) | |
306 | go func() { | |
307 | for _, v := range t.tree { | |
308 | s <- v.getToken32() | |
309 | } | |
310 | close(s) | |
311 | }() | |
312 | return s | |
313 | } | |
314 | ||
315 | func (t *tokens{{.}}) Error() []token32 { | |
316 | ordered := t.Order() | |
317 | length := len(ordered) | |
318 | tokens, length := make([]token32, length), length - 1 | |
319 | for i := range tokens { | |
320 | o := ordered[length - i] | |
321 | if len(o) > 1 { | |
322 | tokens[i] = o[len(o) - 2].getToken32() | |
323 | } | |
324 | } | |
325 | return tokens | |
326 | } | |
327 | {{end}} | |
328 | ||
329 | func (t *tokens32) Expand(index int) { | |
330 | tree := t.tree | |
331 | if index >= len(tree) { | |
332 | expanded := make([]token32, 2 * len(tree)) | |
333 | copy(expanded, tree) | |
334 | t.tree = expanded | |
335 | } | |
336 | } | |
337 | ||
338 | type {{.StructName}} struct { | |
339 | {{.StructVariables}} | |
340 | Buffer string | |
341 | buffer []rune | |
342 | rules [{{.RulesCount}}]func() bool | |
343 | Parse func(rule ...int) error | |
344 | Reset func() | |
345 | Pretty bool | |
346 | tokens32 | |
347 | } | |
348 | ||
349 | type textPosition struct { | |
350 | line, symbol int | |
351 | } | |
352 | ||
353 | type textPositionMap map[int] textPosition | |
354 | ||
355 | func translatePositions(buffer []rune, positions []int) textPositionMap { | |
356 | length, translations, j, line, symbol := len(positions), make(textPositionMap, len(positions)), 0, 1, 0 | |
357 | sort.Ints(positions) | |
358 | ||
359 | search: for i, c := range buffer { | |
360 | if c == '\n' {line, symbol = line + 1, 0} else {symbol++} | |
361 | if i == positions[j] { | |
362 | translations[positions[j]] = textPosition{line, symbol} | |
363 | for j++; j < length; j++ {if i != positions[j] {continue search}} | |
364 | break search | |
365 | } | |
366 | } | |
367 | ||
368 | return translations | |
369 | } | |
370 | ||
371 | type parseError struct { | |
372 | p *{{.StructName}} | |
373 | max token32 | |
374 | } | |
375 | ||
376 | func (e *parseError) Error() string { | |
377 | tokens, error := []token32{e.max}, "\n" | |
378 | positions, p := make([]int, 2 * len(tokens)), 0 | |
379 | for _, token := range tokens { | |
380 | positions[p], p = int(token.begin), p + 1 | |
381 | positions[p], p = int(token.end), p + 1 | |
382 | } | |
383 | translations := translatePositions(e.p.buffer, positions) | |
384 | format := "parse error near %v (line %v symbol %v - line %v symbol %v):\n%v\n" | |
385 | if e.p.Pretty { | |
386 | format = "parse error near \x1B[34m%v\x1B[m (line %v symbol %v - line %v symbol %v):\n%v\n" | |
387 | } | |
388 | for _, token := range tokens { | |
389 | begin, end := int(token.begin), int(token.end) | |
390 | error += fmt.Sprintf(format, | |
391 | rul3s[token.pegRule], | |
392 | translations[begin].line, translations[begin].symbol, | |
393 | translations[end].line, translations[end].symbol, | |
394 | strconv.Quote(string(e.p.buffer[begin:end]))) | |
395 | } | |
396 | ||
397 | return error | |
398 | } | |
399 | ||
400 | func (p *{{.StructName}}) PrintSyntaxTree() { | |
401 | p.tokens32.PrintSyntaxTree(p.Buffer) | |
402 | } | |
403 | ||
404 | func (p *{{.StructName}}) Highlighter() { | |
405 | p.PrintSyntax() | |
406 | } | |
407 | ||
408 | {{if .HasActions}} | |
409 | func (p *{{.StructName}}) Execute() { | |
410 | buffer, _buffer, text, begin, end := p.Buffer, p.buffer, "", 0, 0 | |
411 | for token := range p.Tokens() { | |
412 | switch (token.pegRule) { | |
413 | {{if .HasPush}} | |
414 | case rulePegText: | |
415 | begin, end = int(token.begin), int(token.end) | |
416 | text = string(_buffer[begin:end]) | |
417 | {{end}} | |
418 | {{range .Actions}}case ruleAction{{.GetId}}: | |
419 | {{.String}} | |
420 | {{end}} | |
421 | } | |
422 | } | |
423 | _, _, _, _, _ = buffer, _buffer, text, begin, end | |
424 | } | |
425 | {{end}} | |
426 | ||
427 | func (p *{{.StructName}}) Init() { | |
428 | p.buffer = []rune(p.Buffer) | |
429 | if len(p.buffer) == 0 || p.buffer[len(p.buffer) - 1] != endSymbol { | |
430 | p.buffer = append(p.buffer, endSymbol) | |
431 | } | |
432 | ||
433 | tree := tokens32{tree: make([]token32, math.MaxInt16)} | |
434 | var max token32 | |
435 | position, depth, tokenIndex, buffer, _rules := uint32(0), uint32(0), 0, p.buffer, p.rules | |
436 | ||
437 | p.Parse = func(rule ...int) error { | |
438 | r := 1 | |
439 | if len(rule) > 0 { | |
440 | r = rule[0] | |
441 | } | |
442 | matches := p.rules[r]() | |
443 | p.tokens32 = tree | |
444 | if matches { | |
445 | p.trim(tokenIndex) | |
446 | return nil | |
447 | } | |
448 | return &parseError{p, max} | |
449 | } | |
450 | ||
451 | p.Reset = func() { | |
452 | position, tokenIndex, depth = 0, 0, 0 | |
453 | } | |
454 | ||
455 | add := func(rule pegRule, begin uint32) { | |
456 | tree.Expand(tokenIndex) | |
457 | tree.Add(rule, begin, position, depth, tokenIndex) | |
458 | tokenIndex++ | |
459 | if begin != position && position > max.end { | |
460 | max = token32{rule, begin, position, depth} | |
461 | } | |
462 | } | |
463 | ||
464 | {{if .HasDot}} | |
465 | matchDot := func() bool { | |
466 | if buffer[position] != endSymbol { | |
467 | position++ | |
468 | return true | |
469 | } | |
470 | return false | |
471 | } | |
472 | {{end}} | |
473 | ||
474 | {{if .HasCharacter}} | |
475 | /*matchChar := func(c byte) bool { | |
476 | if buffer[position] == c { | |
477 | position++ | |
478 | return true | |
479 | } | |
480 | return false | |
481 | }*/ | |
482 | {{end}} | |
483 | ||
484 | {{if .HasString}} | |
485 | matchString := func(s string) bool { | |
486 | i := position | |
487 | for _, c := range s { | |
488 | if buffer[i] != c { | |
489 | return false | |
490 | } | |
491 | i++ | |
492 | } | |
493 | position = i | |
494 | return true | |
495 | } | |
496 | {{end}} | |
497 | ||
498 | {{if .HasRange}} | |
499 | /*matchRange := func(lower byte, upper byte) bool { | |
500 | if c := buffer[position]; c >= lower && c <= upper { | |
501 | position++ | |
502 | return true | |
503 | } | |
504 | return false | |
505 | }*/ | |
506 | {{end}} | |
507 | ||
508 | _rules = [...]func() bool { | |
509 | nil,` | |
510 | ||
511 | type Type uint8 | |
512 | ||
513 | const ( | |
514 | TypeUnknown Type = iota | |
515 | TypeRule | |
516 | TypeName | |
517 | TypeDot | |
518 | TypeCharacter | |
519 | TypeRange | |
520 | TypeString | |
521 | TypePredicate | |
522 | TypeStateChange | |
523 | TypeCommit | |
524 | TypeAction | |
525 | TypePackage | |
526 | TypeImport | |
527 | TypeState | |
528 | TypeAlternate | |
529 | TypeUnorderedAlternate | |
530 | TypeSequence | |
531 | TypePeekFor | |
532 | TypePeekNot | |
533 | TypeQuery | |
534 | TypeStar | |
535 | TypePlus | |
536 | TypePeg | |
537 | TypePush | |
538 | TypeImplicitPush | |
539 | TypeNil | |
540 | TypeLast | |
541 | ) | |
542 | ||
543 | var TypeMap = [...]string{ | |
544 | "TypeUnknown", | |
545 | "TypeRule", | |
546 | "TypeName", | |
547 | "TypeDot", | |
548 | "TypeCharacter", | |
549 | "TypeRange", | |
550 | "TypeString", | |
551 | "TypePredicate", | |
552 | "TypeCommit", | |
553 | "TypeAction", | |
554 | "TypePackage", | |
555 | "TypeImport", | |
556 | "TypeState", | |
557 | "TypeAlternate", | |
558 | "TypeUnorderedAlternate", | |
559 | "TypeSequence", | |
560 | "TypePeekFor", | |
561 | "TypePeekNot", | |
562 | "TypeQuery", | |
563 | "TypeStar", | |
564 | "TypePlus", | |
565 | "TypePeg", | |
566 | "TypePush", | |
567 | "TypeImplicitPush", | |
568 | "TypeNil", | |
569 | "TypeLast"} | |
570 | ||
571 | func (t Type) GetType() Type { | |
572 | return t | |
573 | } | |
574 | ||
575 | type Node interface { | |
576 | fmt.Stringer | |
577 | debug() | |
578 | ||
579 | Escaped() string | |
580 | SetString(s string) | |
581 | ||
582 | GetType() Type | |
583 | SetType(t Type) | |
584 | ||
585 | GetId() int | |
586 | SetId(id int) | |
587 | ||
588 | Init() | |
589 | Front() *node | |
590 | Next() *node | |
591 | PushFront(value *node) | |
592 | PopFront() *node | |
593 | PushBack(value *node) | |
594 | Len() int | |
595 | Copy() *node | |
596 | Slice() []*node | |
597 | } | |
598 | ||
599 | type node struct { | |
600 | Type | |
601 | string | |
602 | id int | |
603 | ||
604 | front *node | |
605 | back *node | |
606 | length int | |
607 | ||
608 | /* use hash table here instead of Copy? */ | |
609 | next *node | |
610 | } | |
611 | ||
612 | func (n *node) String() string { | |
613 | return n.string | |
614 | } | |
615 | ||
616 | func (n *node) debug() { | |
617 | if len(n.string) == 1 { | |
618 | fmt.Printf("%v %v '%v' %d\n", n.id, TypeMap[n.Type], n.string, n.string[0]) | |
619 | } else { | |
620 | fmt.Printf("%v %v '%v'\n", n.id, TypeMap[n.Type], n.string) | |
621 | } | |
622 | } | |
623 | ||
624 | func (n *node) Escaped() string { | |
625 | return escape(n.string) | |
626 | } | |
627 | ||
628 | func (n *node) SetString(s string) { | |
629 | n.string = s | |
630 | } | |
631 | ||
632 | func (n *node) SetType(t Type) { | |
633 | n.Type = t | |
634 | } | |
635 | ||
636 | func (n *node) GetId() int { | |
637 | return n.id | |
638 | } | |
639 | ||
640 | func (n *node) SetId(id int) { | |
641 | n.id = id | |
642 | } | |
643 | ||
644 | func (n *node) Init() { | |
645 | n.front = nil | |
646 | n.back = nil | |
647 | n.length = 0 | |
648 | } | |
649 | ||
650 | func (n *node) Front() *node { | |
651 | return n.front | |
652 | } | |
653 | ||
654 | func (n *node) Next() *node { | |
655 | return n.next | |
656 | } | |
657 | ||
658 | func (n *node) PushFront(value *node) { | |
659 | if n.back == nil { | |
660 | n.back = value | |
661 | } else { | |
662 | value.next = n.front | |
663 | } | |
664 | n.front = value | |
665 | n.length++ | |
666 | } | |
667 | ||
668 | func (n *node) PopFront() *node { | |
669 | front := n.front | |
670 | ||
671 | switch true { | |
672 | case front == nil: | |
673 | panic("tree is empty") | |
674 | case front == n.back: | |
675 | n.front, n.back = nil, nil | |
676 | default: | |
677 | n.front, front.next = front.next, nil | |
678 | } | |
679 | ||
680 | n.length-- | |
681 | return front | |
682 | } | |
683 | ||
684 | func (n *node) PushBack(value *node) { | |
685 | if n.front == nil { | |
686 | n.front = value | |
687 | } else { | |
688 | n.back.next = value | |
689 | } | |
690 | n.back = value | |
691 | n.length++ | |
692 | } | |
693 | ||
694 | func (n *node) Len() (c int) { | |
695 | return n.length | |
696 | } | |
697 | ||
698 | func (n *node) Copy() *node { | |
699 | return &node{Type: n.Type, string: n.string, id: n.id, front: n.front, back: n.back, length: n.length} | |
700 | } | |
701 | ||
702 | func (n *node) Slice() []*node { | |
703 | s := make([]*node, n.length) | |
704 | for element, i := n.Front(), 0; element != nil; element, i = element.Next(), i+1 { | |
705 | s[i] = element | |
706 | } | |
707 | return s | |
708 | } | |
709 | ||
710 | /* A tree data structure into which a PEG can be parsed. */ | |
711 | type Tree struct { | |
712 | Rules map[string]Node | |
713 | rulesCount map[string]uint | |
714 | node | |
715 | inline, _switch bool | |
716 | ||
717 | RuleNames []Node | |
718 | Sizes [1]int | |
719 | PackageName string | |
720 | Imports []string | |
721 | EndSymbol rune | |
722 | PegRuleType string | |
723 | StructName string | |
724 | StructVariables string | |
725 | RulesCount int | |
726 | Bits int | |
727 | HasActions bool | |
728 | Actions []Node | |
729 | HasPush bool | |
730 | HasCommit bool | |
731 | HasDot bool | |
732 | HasCharacter bool | |
733 | HasString bool | |
734 | HasRange bool | |
735 | } | |
736 | ||
737 | func New(inline, _switch bool) *Tree { | |
738 | return &Tree{Rules: make(map[string]Node), | |
739 | Sizes: [1]int{32}, | |
740 | rulesCount: make(map[string]uint), | |
741 | inline: inline, | |
742 | _switch: _switch} | |
743 | } | |
744 | ||
745 | func (t *Tree) AddRule(name string) { | |
746 | t.PushFront(&node{Type: TypeRule, string: name, id: t.RulesCount}) | |
747 | t.RulesCount++ | |
748 | } | |
749 | ||
750 | func (t *Tree) AddExpression() { | |
751 | expression := t.PopFront() | |
752 | rule := t.PopFront() | |
753 | rule.PushBack(expression) | |
754 | t.PushBack(rule) | |
755 | } | |
756 | ||
757 | func (t *Tree) AddName(text string) { | |
758 | t.PushFront(&node{Type: TypeName, string: text}) | |
759 | } | |
760 | ||
761 | func (t *Tree) AddDot() { t.PushFront(&node{Type: TypeDot, string: "."}) } | |
762 | func (t *Tree) AddCharacter(text string) { | |
763 | t.PushFront(&node{Type: TypeCharacter, string: text}) | |
764 | } | |
765 | func (t *Tree) AddDoubleCharacter(text string) { | |
766 | t.PushFront(&node{Type: TypeCharacter, string: strings.ToLower(text)}) | |
767 | t.PushFront(&node{Type: TypeCharacter, string: strings.ToUpper(text)}) | |
768 | t.AddAlternate() | |
769 | } | |
770 | func (t *Tree) AddHexaCharacter(text string) { | |
771 | hexa, _ := strconv.ParseInt(text, 16, 32) | |
772 | t.PushFront(&node{Type: TypeCharacter, string: string(hexa)}) | |
773 | } | |
774 | func (t *Tree) AddOctalCharacter(text string) { | |
775 | octal, _ := strconv.ParseInt(text, 8, 8) | |
776 | t.PushFront(&node{Type: TypeCharacter, string: string(octal)}) | |
777 | } | |
778 | func (t *Tree) AddPredicate(text string) { t.PushFront(&node{Type: TypePredicate, string: text}) } | |
779 | func (t *Tree) AddStateChange(text string) { t.PushFront(&node{Type: TypeStateChange, string: text}) } | |
780 | func (t *Tree) AddNil() { t.PushFront(&node{Type: TypeNil, string: "<nil>"}) } | |
781 | func (t *Tree) AddAction(text string) { t.PushFront(&node{Type: TypeAction, string: text}) } | |
782 | func (t *Tree) AddPackage(text string) { t.PushBack(&node{Type: TypePackage, string: text}) } | |
783 | func (t *Tree) AddImport(text string) { t.PushBack(&node{Type: TypeImport, string: text}) } | |
784 | func (t *Tree) AddState(text string) { | |
785 | peg := t.PopFront() | |
786 | peg.PushBack(&node{Type: TypeState, string: text}) | |
787 | t.PushBack(peg) | |
788 | } | |
789 | ||
790 | func (t *Tree) addList(listType Type) { | |
791 | a := t.PopFront() | |
792 | b := t.PopFront() | |
793 | var l *node | |
794 | if b.GetType() == listType { | |
795 | l = b | |
796 | } else { | |
797 | l = &node{Type: listType} | |
798 | l.PushBack(b) | |
799 | } | |
800 | l.PushBack(a) | |
801 | t.PushFront(l) | |
802 | } | |
803 | func (t *Tree) AddAlternate() { t.addList(TypeAlternate) } | |
804 | func (t *Tree) AddSequence() { t.addList(TypeSequence) } | |
805 | func (t *Tree) AddRange() { t.addList(TypeRange) } | |
806 | func (t *Tree) AddDoubleRange() { | |
807 | a := t.PopFront() | |
808 | b := t.PopFront() | |
809 | ||
810 | t.AddCharacter(strings.ToLower(b.String())) | |
811 | t.AddCharacter(strings.ToLower(a.String())) | |
812 | t.addList(TypeRange) | |
813 | ||
814 | t.AddCharacter(strings.ToUpper(b.String())) | |
815 | t.AddCharacter(strings.ToUpper(a.String())) | |
816 | t.addList(TypeRange) | |
817 | ||
818 | t.AddAlternate() | |
819 | } | |
820 | ||
821 | func (t *Tree) addFix(fixType Type) { | |
822 | n := &node{Type: fixType} | |
823 | n.PushBack(t.PopFront()) | |
824 | t.PushFront(n) | |
825 | } | |
826 | func (t *Tree) AddPeekFor() { t.addFix(TypePeekFor) } | |
827 | func (t *Tree) AddPeekNot() { t.addFix(TypePeekNot) } | |
828 | func (t *Tree) AddQuery() { t.addFix(TypeQuery) } | |
829 | func (t *Tree) AddStar() { t.addFix(TypeStar) } | |
830 | func (t *Tree) AddPlus() { t.addFix(TypePlus) } | |
831 | func (t *Tree) AddPush() { t.addFix(TypePush) } | |
832 | ||
833 | func (t *Tree) AddPeg(text string) { t.PushFront(&node{Type: TypePeg, string: text}) } | |
834 | ||
835 | func join(tasks []func()) { | |
836 | length := len(tasks) | |
837 | done := make(chan int, length) | |
838 | for _, task := range tasks { | |
839 | go func(task func()) { task(); done <- 1 }(task) | |
840 | } | |
841 | for d := <-done; d < length; d += <-done { | |
842 | } | |
843 | } | |
844 | ||
845 | func escape(c string) string { | |
846 | switch c { | |
847 | case "'": | |
848 | return "\\'" | |
849 | case "\"": | |
850 | return "\"" | |
851 | default: | |
852 | c = strconv.Quote(c) | |
853 | return c[1 : len(c)-1] | |
854 | } | |
855 | } | |
856 | ||
857 | func (t *Tree) Compile(file string, out io.Writer) { | |
858 | t.AddImport("fmt") | |
859 | t.AddImport("math") | |
860 | t.AddImport("sort") | |
861 | t.AddImport("strconv") | |
862 | t.EndSymbol = 0x110000 | |
863 | t.RulesCount++ | |
864 | ||
865 | counts := [TypeLast]uint{} | |
866 | { | |
867 | var rule *node | |
868 | var link func(node Node) | |
869 | link = func(n Node) { | |
870 | nodeType := n.GetType() | |
871 | id := counts[nodeType] | |
872 | counts[nodeType]++ | |
873 | switch nodeType { | |
874 | case TypeAction: | |
875 | n.SetId(int(id)) | |
876 | copy, name := n.Copy(), fmt.Sprintf("Action%v", id) | |
877 | t.Actions = append(t.Actions, copy) | |
878 | n.Init() | |
879 | n.SetType(TypeName) | |
880 | n.SetString(name) | |
881 | n.SetId(t.RulesCount) | |
882 | ||
883 | emptyRule := &node{Type: TypeRule, string: name, id: t.RulesCount} | |
884 | implicitPush := &node{Type: TypeImplicitPush} | |
885 | emptyRule.PushBack(implicitPush) | |
886 | implicitPush.PushBack(copy) | |
887 | implicitPush.PushBack(emptyRule.Copy()) | |
888 | t.PushBack(emptyRule) | |
889 | t.RulesCount++ | |
890 | ||
891 | t.Rules[name] = emptyRule | |
892 | t.RuleNames = append(t.RuleNames, emptyRule) | |
893 | case TypeName: | |
894 | name := n.String() | |
895 | if _, ok := t.Rules[name]; !ok { | |
896 | emptyRule := &node{Type: TypeRule, string: name, id: t.RulesCount} | |
897 | implicitPush := &node{Type: TypeImplicitPush} | |
898 | emptyRule.PushBack(implicitPush) | |
899 | implicitPush.PushBack(&node{Type: TypeNil, string: "<nil>"}) | |
900 | implicitPush.PushBack(emptyRule.Copy()) | |
901 | t.PushBack(emptyRule) | |
902 | t.RulesCount++ | |
903 | ||
904 | t.Rules[name] = emptyRule | |
905 | t.RuleNames = append(t.RuleNames, emptyRule) | |
906 | } | |
907 | case TypePush: | |
908 | copy, name := rule.Copy(), "PegText" | |
909 | copy.SetString(name) | |
910 | if _, ok := t.Rules[name]; !ok { | |
911 | emptyRule := &node{Type: TypeRule, string: name, id: t.RulesCount} | |
912 | emptyRule.PushBack(&node{Type: TypeNil, string: "<nil>"}) | |
913 | t.PushBack(emptyRule) | |
914 | t.RulesCount++ | |
915 | ||
916 | t.Rules[name] = emptyRule | |
917 | t.RuleNames = append(t.RuleNames, emptyRule) | |
918 | } | |
919 | n.PushBack(copy) | |
920 | fallthrough | |
921 | case TypeImplicitPush: | |
922 | link(n.Front()) | |
923 | case TypeRule, TypeAlternate, TypeUnorderedAlternate, TypeSequence, | |
924 | TypePeekFor, TypePeekNot, TypeQuery, TypeStar, TypePlus: | |
925 | for _, node := range n.Slice() { | |
926 | link(node) | |
927 | } | |
928 | } | |
929 | } | |
930 | /* first pass */ | |
931 | for _, node := range t.Slice() { | |
932 | switch node.GetType() { | |
933 | case TypePackage: | |
934 | t.PackageName = node.String() | |
935 | case TypeImport: | |
936 | t.Imports = append(t.Imports, node.String()) | |
937 | case TypePeg: | |
938 | t.StructName = node.String() | |
939 | t.StructVariables = node.Front().String() | |
940 | case TypeRule: | |
941 | if _, ok := t.Rules[node.String()]; !ok { | |
942 | expression := node.Front() | |
943 | copy := expression.Copy() | |
944 | expression.Init() | |
945 | expression.SetType(TypeImplicitPush) | |
946 | expression.PushBack(copy) | |
947 | expression.PushBack(node.Copy()) | |
948 | ||
949 | t.Rules[node.String()] = node | |
950 | t.RuleNames = append(t.RuleNames, node) | |
951 | } | |
952 | } | |
953 | } | |
954 | /* second pass */ | |
955 | for _, node := range t.Slice() { | |
956 | if node.GetType() == TypeRule { | |
957 | rule = node | |
958 | link(node) | |
959 | } | |
960 | } | |
961 | } | |
962 | ||
963 | join([]func(){ | |
964 | func() { | |
965 | var countRules func(node Node) | |
966 | ruleReached := make([]bool, t.RulesCount) | |
967 | countRules = func(node Node) { | |
968 | switch node.GetType() { | |
969 | case TypeRule: | |
970 | name, id := node.String(), node.GetId() | |
971 | if count, ok := t.rulesCount[name]; ok { | |
972 | t.rulesCount[name] = count + 1 | |
973 | } else { | |
974 | t.rulesCount[name] = 1 | |
975 | } | |
976 | if ruleReached[id] { | |
977 | return | |
978 | } | |
979 | ruleReached[id] = true | |
980 | countRules(node.Front()) | |
981 | case TypeName: | |
982 | countRules(t.Rules[node.String()]) | |
983 | case TypeImplicitPush, TypePush: | |
984 | countRules(node.Front()) | |
985 | case TypeAlternate, TypeUnorderedAlternate, TypeSequence, | |
986 | TypePeekFor, TypePeekNot, TypeQuery, TypeStar, TypePlus: | |
987 | for _, element := range node.Slice() { | |
988 | countRules(element) | |
989 | } | |
990 | } | |
991 | } | |
992 | for _, node := range t.Slice() { | |
993 | if node.GetType() == TypeRule { | |
994 | countRules(node) | |
995 | break | |
996 | } | |
997 | } | |
998 | }, | |
999 | func() { | |
1000 | var checkRecursion func(node Node) bool | |
1001 | ruleReached := make([]bool, t.RulesCount) | |
1002 | checkRecursion = func(node Node) bool { | |
1003 | switch node.GetType() { | |
1004 | case TypeRule: | |
1005 | id := node.GetId() | |
1006 | if ruleReached[id] { | |
1007 | fmt.Fprintf(os.Stderr, "possible infinite left recursion in rule '%v'\n", node) | |
1008 | return false | |
1009 | } | |
1010 | ruleReached[id] = true | |
1011 | consumes := checkRecursion(node.Front()) | |
1012 | ruleReached[id] = false | |
1013 | return consumes | |
1014 | case TypeAlternate: | |
1015 | for _, element := range node.Slice() { | |
1016 | if !checkRecursion(element) { | |
1017 | return false | |
1018 | } | |
1019 | } | |
1020 | return true | |
1021 | case TypeSequence: | |
1022 | for _, element := range node.Slice() { | |
1023 | if checkRecursion(element) { | |
1024 | return true | |
1025 | } | |
1026 | } | |
1027 | case TypeName: | |
1028 | return checkRecursion(t.Rules[node.String()]) | |
1029 | case TypePlus, TypePush, TypeImplicitPush: | |
1030 | return checkRecursion(node.Front()) | |
1031 | case TypeCharacter, TypeString: | |
1032 | return len(node.String()) > 0 | |
1033 | case TypeDot, TypeRange: | |
1034 | return true | |
1035 | } | |
1036 | return false | |
1037 | } | |
1038 | for _, node := range t.Slice() { | |
1039 | if node.GetType() == TypeRule { | |
1040 | checkRecursion(node) | |
1041 | } | |
1042 | } | |
1043 | }}) | |
1044 | ||
1045 | if t._switch { | |
1046 | var optimizeAlternates func(node Node) (consumes bool, s jetset.Set) | |
1047 | cache, firstPass := make([]struct { | |
1048 | reached, consumes bool | |
1049 | s jetset.Set | |
1050 | }, t.RulesCount), true | |
1051 | optimizeAlternates = func(n Node) (consumes bool, s jetset.Set) { | |
1052 | /*n.debug()*/ | |
1053 | switch n.GetType() { | |
1054 | case TypeRule: | |
1055 | cache := &cache[n.GetId()] | |
1056 | if cache.reached { | |
1057 | consumes, s = cache.consumes, cache.s | |
1058 | return | |
1059 | } | |
1060 | ||
1061 | cache.reached = true | |
1062 | consumes, s = optimizeAlternates(n.Front()) | |
1063 | cache.consumes, cache.s = consumes, s | |
1064 | case TypeName: | |
1065 | consumes, s = optimizeAlternates(t.Rules[n.String()]) | |
1066 | case TypeDot: | |
1067 | consumes = true | |
1068 | /* TypeDot set doesn't include the EndSymbol */ | |
1069 | s = s.Add(uint64(t.EndSymbol)) | |
1070 | s = s.Complement(uint64(t.EndSymbol)) | |
1071 | case TypeString, TypeCharacter: | |
1072 | consumes = true | |
1073 | s = s.Add(uint64([]rune(n.String())[0])) | |
1074 | case TypeRange: | |
1075 | consumes = true | |
1076 | element := n.Front() | |
1077 | lower := []rune(element.String())[0] | |
1078 | element = element.Next() | |
1079 | upper := []rune(element.String())[0] | |
1080 | s = s.AddRange(uint64(lower), uint64(upper)) | |
1081 | case TypeAlternate: | |
1082 | consumes = true | |
1083 | mconsumes, properties, c := | |
1084 | consumes, make([]struct { | |
1085 | intersects bool | |
1086 | s jetset.Set | |
1087 | }, n.Len()), 0 | |
1088 | for _, element := range n.Slice() { | |
1089 | mconsumes, properties[c].s = optimizeAlternates(element) | |
1090 | consumes = consumes && mconsumes | |
1091 | s = s.Union(properties[c].s) | |
1092 | c++ | |
1093 | } | |
1094 | ||
1095 | if firstPass { | |
1096 | break | |
1097 | } | |
1098 | ||
1099 | intersections := 2 | |
1100 | compare: | |
1101 | for ai, a := range properties[0 : len(properties)-1] { | |
1102 | for _, b := range properties[ai+1:] { | |
1103 | if a.s.Intersects(b.s) { | |
1104 | intersections++ | |
1105 | properties[ai].intersects = true | |
1106 | continue compare | |
1107 | } | |
1108 | } | |
1109 | } | |
1110 | if intersections >= len(properties) { | |
1111 | break | |
1112 | } | |
1113 | ||
1114 | c, unordered, ordered, max := | |
1115 | 0, &node{Type: TypeUnorderedAlternate}, &node{Type: TypeAlternate}, 0 | |
1116 | for _, element := range n.Slice() { | |
1117 | if properties[c].intersects { | |
1118 | ordered.PushBack(element.Copy()) | |
1119 | } else { | |
1120 | class := &node{Type: TypeUnorderedAlternate} | |
1121 | for d := 0; d < 256; d++ { | |
1122 | if properties[c].s.Has(uint64(d)) { | |
1123 | class.PushBack(&node{Type: TypeCharacter, string: string(d)}) | |
1124 | } | |
1125 | } | |
1126 | ||
1127 | sequence, predicate, length := | |
1128 | &node{Type: TypeSequence}, &node{Type: TypePeekFor}, properties[c].s.Len() | |
1129 | if length == 0 { | |
1130 | class.PushBack(&node{Type: TypeNil, string: "<nil>"}) | |
1131 | } | |
1132 | predicate.PushBack(class) | |
1133 | sequence.PushBack(predicate) | |
1134 | sequence.PushBack(element.Copy()) | |
1135 | ||
1136 | if element.GetType() == TypeNil { | |
1137 | unordered.PushBack(sequence) | |
1138 | } else if length > max { | |
1139 | unordered.PushBack(sequence) | |
1140 | max = length | |
1141 | } else { | |
1142 | unordered.PushFront(sequence) | |
1143 | } | |
1144 | } | |
1145 | c++ | |
1146 | } | |
1147 | n.Init() | |
1148 | if ordered.Front() == nil { | |
1149 | n.SetType(TypeUnorderedAlternate) | |
1150 | for _, element := range unordered.Slice() { | |
1151 | n.PushBack(element.Copy()) | |
1152 | } | |
1153 | } else { | |
1154 | for _, element := range ordered.Slice() { | |
1155 | n.PushBack(element.Copy()) | |
1156 | } | |
1157 | n.PushBack(unordered) | |
1158 | } | |
1159 | case TypeSequence: | |
1160 | classes, elements := | |
1161 | make([]struct { | |
1162 | s jetset.Set | |
1163 | }, n.Len()), n.Slice() | |
1164 | ||
1165 | for c, element := range elements { | |
1166 | consumes, classes[c].s = optimizeAlternates(element) | |
1167 | if consumes { | |
1168 | elements, classes = elements[c+1:], classes[:c+1] | |
1169 | break | |
1170 | } | |
1171 | } | |
1172 | ||
1173 | for c := len(classes) - 1; c >= 0; c-- { | |
1174 | s = s.Union(classes[c].s) | |
1175 | } | |
1176 | ||
1177 | for _, element := range elements { | |
1178 | optimizeAlternates(element) | |
1179 | } | |
1180 | case TypePeekNot, TypePeekFor: | |
1181 | optimizeAlternates(n.Front()) | |
1182 | case TypeQuery, TypeStar: | |
1183 | _, s = optimizeAlternates(n.Front()) | |
1184 | case TypePlus, TypePush, TypeImplicitPush: | |
1185 | consumes, s = optimizeAlternates(n.Front()) | |
1186 | case TypeAction, TypeNil: | |
1187 | //empty | |
1188 | } | |
1189 | return | |
1190 | } | |
1191 | for _, element := range t.Slice() { | |
1192 | if element.GetType() == TypeRule { | |
1193 | optimizeAlternates(element) | |
1194 | break | |
1195 | } | |
1196 | } | |
1197 | ||
1198 | for i, _ := range cache { | |
1199 | cache[i].reached = false | |
1200 | } | |
1201 | firstPass = false | |
1202 | for _, element := range t.Slice() { | |
1203 | if element.GetType() == TypeRule { | |
1204 | optimizeAlternates(element) | |
1205 | break | |
1206 | } | |
1207 | } | |
1208 | } | |
1209 | ||
1210 | var buffer bytes.Buffer | |
1211 | defer func() { | |
1212 | fileSet := token.NewFileSet() | |
1213 | code, error := parser.ParseFile(fileSet, file, &buffer, parser.ParseComments) | |
1214 | if error != nil { | |
1215 | buffer.WriteTo(out) | |
1216 | fmt.Printf("%v: %v\n", file, error) | |
1217 | return | |
1218 | } | |
1219 | formatter := printer.Config{Mode: printer.TabIndent | printer.UseSpaces, Tabwidth: 8} | |
1220 | error = formatter.Fprint(out, fileSet, code) | |
1221 | if error != nil { | |
1222 | buffer.WriteTo(out) | |
1223 | fmt.Printf("%v: %v\n", file, error) | |
1224 | return | |
1225 | } | |
1226 | ||
1227 | }() | |
1228 | ||
1229 | _print := func(format string, a ...interface{}) { fmt.Fprintf(&buffer, format, a...) } | |
1230 | printSave := func(n uint) { _print("\n position%d, tokenIndex%d, depth%d := position, tokenIndex, depth", n, n, n) } | |
1231 | printRestore := func(n uint) { _print("\n position, tokenIndex, depth = position%d, tokenIndex%d, depth%d", n, n, n) } | |
1232 | printTemplate := func(s string) { | |
1233 | if error := template.Must(template.New("peg").Parse(s)).Execute(&buffer, t); error != nil { | |
1234 | panic(error) | |
1235 | } | |
1236 | } | |
1237 | ||
1238 | t.HasActions = counts[TypeAction] > 0 | |
1239 | t.HasPush = counts[TypePush] > 0 | |
1240 | t.HasCommit = counts[TypeCommit] > 0 | |
1241 | t.HasDot = counts[TypeDot] > 0 | |
1242 | t.HasCharacter = counts[TypeCharacter] > 0 | |
1243 | t.HasString = counts[TypeString] > 0 | |
1244 | t.HasRange = counts[TypeRange] > 0 | |
1245 | ||
1246 | var printRule func(n Node) | |
1247 | var compile func(expression Node, ko uint) | |
1248 | var label uint | |
1249 | labels := make(map[uint]bool) | |
1250 | printBegin := func() { _print("\n {") } | |
1251 | printEnd := func() { _print("\n }") } | |
1252 | printLabel := func(n uint) { | |
1253 | _print("\n") | |
1254 | if labels[n] { | |
1255 | _print(" l%d:\t", n) | |
1256 | } | |
1257 | } | |
1258 | printJump := func(n uint) { | |
1259 | _print("\n goto l%d", n) | |
1260 | labels[n] = true | |
1261 | } | |
1262 | printRule = func(n Node) { | |
1263 | switch n.GetType() { | |
1264 | case TypeRule: | |
1265 | _print("%v <- ", n) | |
1266 | printRule(n.Front()) | |
1267 | case TypeDot: | |
1268 | _print(".") | |
1269 | case TypeName: | |
1270 | _print("%v", n) | |
1271 | case TypeCharacter: | |
1272 | _print("'%v'", escape(n.String())) | |
1273 | case TypeString: | |
1274 | s := escape(n.String()) | |
1275 | _print("'%v'", s[1:len(s)-1]) | |
1276 | case TypeRange: | |
1277 | element := n.Front() | |
1278 | lower := element | |
1279 | element = element.Next() | |
1280 | upper := element | |
1281 | _print("[%v-%v]", escape(lower.String()), escape(upper.String())) | |
1282 | case TypePredicate: | |
1283 | _print("&{%v}", n) | |
1284 | case TypeStateChange: | |
1285 | _print("!{%v}", n) | |
1286 | case TypeAction: | |
1287 | _print("{%v}", n) | |
1288 | case TypeCommit: | |
1289 | _print("commit") | |
1290 | case TypeAlternate: | |
1291 | _print("(") | |
1292 | elements := n.Slice() | |
1293 | printRule(elements[0]) | |
1294 | for _, element := range elements[1:] { | |
1295 | _print(" / ") | |
1296 | printRule(element) | |
1297 | } | |
1298 | _print(")") | |
1299 | case TypeUnorderedAlternate: | |
1300 | _print("(") | |
1301 | elements := n.Slice() | |
1302 | printRule(elements[0]) | |
1303 | for _, element := range elements[1:] { | |
1304 | _print(" | ") | |
1305 | printRule(element) | |
1306 | } | |
1307 | _print(")") | |
1308 | case TypeSequence: | |
1309 | _print("(") | |
1310 | elements := n.Slice() | |
1311 | printRule(elements[0]) | |
1312 | for _, element := range elements[1:] { | |
1313 | _print(" ") | |
1314 | printRule(element) | |
1315 | } | |
1316 | _print(")") | |
1317 | case TypePeekFor: | |
1318 | _print("&") | |
1319 | printRule(n.Front()) | |
1320 | case TypePeekNot: | |
1321 | _print("!") | |
1322 | printRule(n.Front()) | |
1323 | case TypeQuery: | |
1324 | printRule(n.Front()) | |
1325 | _print("?") | |
1326 | case TypeStar: | |
1327 | printRule(n.Front()) | |
1328 | _print("*") | |
1329 | case TypePlus: | |
1330 | printRule(n.Front()) | |
1331 | _print("+") | |
1332 | case TypePush, TypeImplicitPush: | |
1333 | _print("<") | |
1334 | printRule(n.Front()) | |
1335 | _print(">") | |
1336 | case TypeNil: | |
1337 | default: | |
1338 | fmt.Fprintf(os.Stderr, "illegal node type: %v\n", n.GetType()) | |
1339 | } | |
1340 | } | |
1341 | compile = func(n Node, ko uint) { | |
1342 | switch n.GetType() { | |
1343 | case TypeRule: | |
1344 | fmt.Fprintf(os.Stderr, "internal error #1 (%v)\n", n) | |
1345 | case TypeDot: | |
1346 | _print("\n if !matchDot() {") | |
1347 | /*print("\n if buffer[position] == endSymbol {")*/ | |
1348 | printJump(ko) | |
1349 | /*print("}\nposition++")*/ | |
1350 | _print("}") | |
1351 | case TypeName: | |
1352 | name := n.String() | |
1353 | rule := t.Rules[name] | |
1354 | if t.inline && t.rulesCount[name] == 1 { | |
1355 | compile(rule.Front(), ko) | |
1356 | return | |
1357 | } | |
1358 | _print("\n if !_rules[rule%v]() {", name /*rule.GetId()*/) | |
1359 | printJump(ko) | |
1360 | _print("}") | |
1361 | case TypeRange: | |
1362 | element := n.Front() | |
1363 | lower := element | |
1364 | element = element.Next() | |
1365 | upper := element | |
1366 | /*print("\n if !matchRange('%v', '%v') {", escape(lower.String()), escape(upper.String()))*/ | |
1367 | _print("\n if c := buffer[position]; c < rune('%v') || c > rune('%v') {", escape(lower.String()), escape(upper.String())) | |
1368 | printJump(ko) | |
1369 | _print("}\nposition++") | |
1370 | case TypeCharacter: | |
1371 | /*print("\n if !matchChar('%v') {", escape(n.String()))*/ | |
1372 | _print("\n if buffer[position] != rune('%v') {", escape(n.String())) | |
1373 | printJump(ko) | |
1374 | _print("}\nposition++") | |
1375 | case TypeString: | |
1376 | _print("\n if !matchString(%v) {", strconv.Quote(n.String())) | |
1377 | printJump(ko) | |
1378 | _print("}") | |
1379 | case TypePredicate: | |
1380 | _print("\n if !(%v) {", n) | |
1381 | printJump(ko) | |
1382 | _print("}") | |
1383 | case TypeStateChange: | |
1384 | _print("\n %v", n) | |
1385 | case TypeAction: | |
1386 | case TypeCommit: | |
1387 | case TypePush: | |
1388 | fallthrough | |
1389 | case TypeImplicitPush: | |
1390 | ok, element := label, n.Front() | |
1391 | label++ | |
1392 | nodeType, rule := element.GetType(), element.Next() | |
1393 | printBegin() | |
1394 | if nodeType == TypeAction { | |
1395 | _print("\nadd(rule%v, position)", rule) | |
1396 | } else { | |
1397 | _print("\nposition%d := position", ok) | |
1398 | _print("\ndepth++") | |
1399 | compile(element, ko) | |
1400 | _print("\ndepth--") | |
1401 | _print("\nadd(rule%v, position%d)", rule, ok) | |
1402 | } | |
1403 | printEnd() | |
1404 | case TypeAlternate: | |
1405 | ok := label | |
1406 | label++ | |
1407 | printBegin() | |
1408 | elements := n.Slice() | |
1409 | printSave(ok) | |
1410 | for _, element := range elements[:len(elements)-1] { | |
1411 | next := label | |
1412 | label++ | |
1413 | compile(element, next) | |
1414 | printJump(ok) | |
1415 | printLabel(next) | |
1416 | printRestore(ok) | |
1417 | } | |
1418 | compile(elements[len(elements)-1], ko) | |
1419 | printEnd() | |
1420 | printLabel(ok) | |
1421 | case TypeUnorderedAlternate: | |
1422 | done, ok := ko, label | |
1423 | label++ | |
1424 | printBegin() | |
1425 | _print("\n switch buffer[position] {") | |
1426 | elements := n.Slice() | |
1427 | elements, last := elements[:len(elements)-1], elements[len(elements)-1].Front().Next() | |
1428 | for _, element := range elements { | |
1429 | sequence := element.Front() | |
1430 | class := sequence.Front() | |
1431 | sequence = sequence.Next() | |
1432 | _print("\n case") | |
1433 | comma := false | |
1434 | for _, character := range class.Slice() { | |
1435 | if comma { | |
1436 | _print(",") | |
1437 | } else { | |
1438 | comma = true | |
1439 | } | |
1440 | _print(" '%s'", escape(character.String())) | |
1441 | } | |
1442 | _print(":") | |
1443 | compile(sequence, done) | |
1444 | _print("\nbreak") | |
1445 | } | |
1446 | _print("\n default:") | |
1447 | compile(last, done) | |
1448 | _print("\nbreak") | |
1449 | _print("\n }") | |
1450 | printEnd() | |
1451 | printLabel(ok) | |
1452 | case TypeSequence: | |
1453 | for _, element := range n.Slice() { | |
1454 | compile(element, ko) | |
1455 | } | |
1456 | case TypePeekFor: | |
1457 | ok := label | |
1458 | label++ | |
1459 | printBegin() | |
1460 | printSave(ok) | |
1461 | compile(n.Front(), ko) | |
1462 | printRestore(ok) | |
1463 | printEnd() | |
1464 | case TypePeekNot: | |
1465 | ok := label | |
1466 | label++ | |
1467 | printBegin() | |
1468 | printSave(ok) | |
1469 | compile(n.Front(), ok) | |
1470 | printJump(ko) | |
1471 | printLabel(ok) | |
1472 | printRestore(ok) | |
1473 | printEnd() | |
1474 | case TypeQuery: | |
1475 | qko := label | |
1476 | label++ | |
1477 | qok := label | |
1478 | label++ | |
1479 | printBegin() | |
1480 | printSave(qko) | |
1481 | compile(n.Front(), qko) | |
1482 | printJump(qok) | |
1483 | printLabel(qko) | |
1484 | printRestore(qko) | |
1485 | printEnd() | |
1486 | printLabel(qok) | |
1487 | case TypeStar: | |
1488 | again := label | |
1489 | label++ | |
1490 | out := label | |
1491 | label++ | |
1492 | printLabel(again) | |
1493 | printBegin() | |
1494 | printSave(out) | |
1495 | compile(n.Front(), out) | |
1496 | printJump(again) | |
1497 | printLabel(out) | |
1498 | printRestore(out) | |
1499 | printEnd() | |
1500 | case TypePlus: | |
1501 | again := label | |
1502 | label++ | |
1503 | out := label | |
1504 | label++ | |
1505 | compile(n.Front(), ko) | |
1506 | printLabel(again) | |
1507 | printBegin() | |
1508 | printSave(out) | |
1509 | compile(n.Front(), out) | |
1510 | printJump(again) | |
1511 | printLabel(out) | |
1512 | printRestore(out) | |
1513 | printEnd() | |
1514 | case TypeNil: | |
1515 | default: | |
1516 | fmt.Fprintf(os.Stderr, "illegal node type: %v\n", n.GetType()) | |
1517 | } | |
1518 | } | |
1519 | ||
1520 | /* lets figure out which jump labels are going to be used with this dry compile */ | |
1521 | printTemp, _print := _print, func(format string, a ...interface{}) {} | |
1522 | for _, element := range t.Slice() { | |
1523 | if element.GetType() != TypeRule { | |
1524 | continue | |
1525 | } | |
1526 | expression := element.Front() | |
1527 | if expression.GetType() == TypeNil { | |
1528 | continue | |
1529 | } | |
1530 | ko := label | |
1531 | label++ | |
1532 | if count, ok := t.rulesCount[element.String()]; !ok { | |
1533 | continue | |
1534 | } else if t.inline && count == 1 && ko != 0 { | |
1535 | continue | |
1536 | } | |
1537 | compile(expression, ko) | |
1538 | } | |
1539 | _print, label = printTemp, 0 | |
1540 | ||
1541 | /* now for the real compile pass */ | |
1542 | t.PegRuleType = "uint8" | |
1543 | if length := int64(t.Len()); length > math.MaxUint32 { | |
1544 | t.PegRuleType = "uint64" | |
1545 | } else if length > math.MaxUint16 { | |
1546 | t.PegRuleType = "uint32" | |
1547 | } else if length > math.MaxUint8 { | |
1548 | t.PegRuleType = "uint16" | |
1549 | } | |
1550 | printTemplate(pegHeaderTemplate) | |
1551 | for _, element := range t.Slice() { | |
1552 | if element.GetType() != TypeRule { | |
1553 | continue | |
1554 | } | |
1555 | expression := element.Front() | |
1556 | if implicit := expression.Front(); expression.GetType() == TypeNil || implicit.GetType() == TypeNil { | |
1557 | if element.String() != "PegText" { | |
1558 | fmt.Fprintf(os.Stderr, "rule '%v' used but not defined\n", element) | |
1559 | } | |
1560 | _print("\n nil,") | |
1561 | continue | |
1562 | } | |
1563 | ko := label | |
1564 | label++ | |
1565 | _print("\n /* %v ", element.GetId()) | |
1566 | printRule(element) | |
1567 | _print(" */") | |
1568 | if count, ok := t.rulesCount[element.String()]; !ok { | |
1569 | fmt.Fprintf(os.Stderr, "rule '%v' defined but not used\n", element) | |
1570 | _print("\n nil,") | |
1571 | continue | |
1572 | } else if t.inline && count == 1 && ko != 0 { | |
1573 | _print("\n nil,") | |
1574 | continue | |
1575 | } | |
1576 | _print("\n func() bool {") | |
1577 | if labels[ko] { | |
1578 | printSave(ko) | |
1579 | } | |
1580 | compile(expression, ko) | |
1581 | //print("\n fmt.Printf(\"%v\\n\")", element.String()) | |
1582 | _print("\n return true") | |
1583 | if labels[ko] { | |
1584 | printLabel(ko) | |
1585 | printRestore(ko) | |
1586 | _print("\n return false") | |
1587 | } | |
1588 | _print("\n },") | |
1589 | } | |
1590 | _print("\n }\n p.rules = _rules") | |
1591 | _print("\n}\n") | |
1592 | } |
9 | 9 | |
10 | 10 | package main |
11 | 11 | |
12 | import "github.com/pointlander/peg/tree" | |
13 | ||
12 | 14 | # parser declaration |
13 | 15 | |
14 | 16 | type Peg Peg { |
15 | *Tree | |
17 | *tree.Tree | |
16 | 18 | } |
17 | 19 | |
18 | 20 | # Hierarchical syntax |
22 | 24 | 'Peg' Spacing Action { p.AddState(text) } |
23 | 25 | Definition+ EndOfFile |
24 | 26 | |
25 | Import <- 'import' Spacing ["] < [a-zA-Z_/.\-]+ > ["] Spacing { p.AddImport(text) } | |
27 | Import <- 'import' Spacing (MultiImport / SingleImport) Spacing | |
28 | SingleImport <- ImportName | |
29 | MultiImport <- '(' Spacing (ImportName '\n' Spacing)* Spacing ')' | |
30 | ||
31 | ImportName <- ["] < [0-9a-zA-Z_/.\-]+ > ["] { p.AddImport(text) } | |
26 | 32 | |
27 | 33 | Definition <- Identifier { p.AddRule(text) } |
28 | 34 | LeftArrow Expression { p.AddExpression() } &(Identifier LeftArrow / !.) |
109 | 115 | SpaceComment <- (Space / Comment) |
110 | 116 | Spacing <- SpaceComment* |
111 | 117 | MustSpacing <- SpaceComment+ |
112 | Comment <- '#' (!EndOfLine .)* EndOfLine | |
118 | Comment <- ('#' / '//') (!EndOfLine .)* EndOfLine | |
113 | 119 | Space <- ' ' / '\t' / EndOfLine |
114 | 120 | EndOfLine <- '\r\n' / '\n' / '\r' |
115 | 121 | EndOfFile <- !. |
0 | package main | |
1 | ||
2 | // Code generated by ./peg -inline -switch peg.peg DO NOT EDIT. | |
3 | ||
4 | import ( | |
5 | "bytes" | |
6 | "fmt" | |
7 | "github.com/pointlander/peg/tree" | |
8 | "io" | |
9 | "os" | |
10 | "sort" | |
11 | "strconv" | |
12 | ) | |
13 | ||
14 | const endSymbol rune = 1114112 | |
15 | ||
16 | /* The rule types inferred from the grammar are below. */ | |
17 | type pegRule uint8 | |
18 | ||
19 | const ( | |
20 | ruleUnknown pegRule = iota | |
21 | ruleGrammar | |
22 | ruleImport | |
23 | ruleSingleImport | |
24 | ruleMultiImport | |
25 | ruleImportName | |
26 | ruleDefinition | |
27 | ruleExpression | |
28 | ruleSequence | |
29 | rulePrefix | |
30 | ruleSuffix | |
31 | rulePrimary | |
32 | ruleIdentifier | |
33 | ruleIdentStart | |
34 | ruleIdentCont | |
35 | ruleLiteral | |
36 | ruleClass | |
37 | ruleRanges | |
38 | ruleDoubleRanges | |
39 | ruleRange | |
40 | ruleDoubleRange | |
41 | ruleChar | |
42 | ruleDoubleChar | |
43 | ruleEscape | |
44 | ruleLeftArrow | |
45 | ruleSlash | |
46 | ruleAnd | |
47 | ruleNot | |
48 | ruleQuestion | |
49 | ruleStar | |
50 | rulePlus | |
51 | ruleOpen | |
52 | ruleClose | |
53 | ruleDot | |
54 | ruleSpaceComment | |
55 | ruleSpacing | |
56 | ruleMustSpacing | |
57 | ruleComment | |
58 | ruleSpace | |
59 | ruleEndOfLine | |
60 | ruleEndOfFile | |
61 | ruleAction | |
62 | ruleActionBody | |
63 | ruleBegin | |
64 | ruleEnd | |
65 | ruleAction0 | |
66 | ruleAction1 | |
67 | ruleAction2 | |
68 | rulePegText | |
69 | ruleAction3 | |
70 | ruleAction4 | |
71 | ruleAction5 | |
72 | ruleAction6 | |
73 | ruleAction7 | |
74 | ruleAction8 | |
75 | ruleAction9 | |
76 | ruleAction10 | |
77 | ruleAction11 | |
78 | ruleAction12 | |
79 | ruleAction13 | |
80 | ruleAction14 | |
81 | ruleAction15 | |
82 | ruleAction16 | |
83 | ruleAction17 | |
84 | ruleAction18 | |
85 | ruleAction19 | |
86 | ruleAction20 | |
87 | ruleAction21 | |
88 | ruleAction22 | |
89 | ruleAction23 | |
90 | ruleAction24 | |
91 | ruleAction25 | |
92 | ruleAction26 | |
93 | ruleAction27 | |
94 | ruleAction28 | |
95 | ruleAction29 | |
96 | ruleAction30 | |
97 | ruleAction31 | |
98 | ruleAction32 | |
99 | ruleAction33 | |
100 | ruleAction34 | |
101 | ruleAction35 | |
102 | ruleAction36 | |
103 | ruleAction37 | |
104 | ruleAction38 | |
105 | ruleAction39 | |
106 | ruleAction40 | |
107 | ruleAction41 | |
108 | ruleAction42 | |
109 | ruleAction43 | |
110 | ruleAction44 | |
111 | ruleAction45 | |
112 | ruleAction46 | |
113 | ruleAction47 | |
114 | ruleAction48 | |
115 | ) | |
116 | ||
117 | var rul3s = [...]string{ | |
118 | "Unknown", | |
119 | "Grammar", | |
120 | "Import", | |
121 | "SingleImport", | |
122 | "MultiImport", | |
123 | "ImportName", | |
124 | "Definition", | |
125 | "Expression", | |
126 | "Sequence", | |
127 | "Prefix", | |
128 | "Suffix", | |
129 | "Primary", | |
130 | "Identifier", | |
131 | "IdentStart", | |
132 | "IdentCont", | |
133 | "Literal", | |
134 | "Class", | |
135 | "Ranges", | |
136 | "DoubleRanges", | |
137 | "Range", | |
138 | "DoubleRange", | |
139 | "Char", | |
140 | "DoubleChar", | |
141 | "Escape", | |
142 | "LeftArrow", | |
143 | "Slash", | |
144 | "And", | |
145 | "Not", | |
146 | "Question", | |
147 | "Star", | |
148 | "Plus", | |
149 | "Open", | |
150 | "Close", | |
151 | "Dot", | |
152 | "SpaceComment", | |
153 | "Spacing", | |
154 | "MustSpacing", | |
155 | "Comment", | |
156 | "Space", | |
157 | "EndOfLine", | |
158 | "EndOfFile", | |
159 | "Action", | |
160 | "ActionBody", | |
161 | "Begin", | |
162 | "End", | |
163 | "Action0", | |
164 | "Action1", | |
165 | "Action2", | |
166 | "PegText", | |
167 | "Action3", | |
168 | "Action4", | |
169 | "Action5", | |
170 | "Action6", | |
171 | "Action7", | |
172 | "Action8", | |
173 | "Action9", | |
174 | "Action10", | |
175 | "Action11", | |
176 | "Action12", | |
177 | "Action13", | |
178 | "Action14", | |
179 | "Action15", | |
180 | "Action16", | |
181 | "Action17", | |
182 | "Action18", | |
183 | "Action19", | |
184 | "Action20", | |
185 | "Action21", | |
186 | "Action22", | |
187 | "Action23", | |
188 | "Action24", | |
189 | "Action25", | |
190 | "Action26", | |
191 | "Action27", | |
192 | "Action28", | |
193 | "Action29", | |
194 | "Action30", | |
195 | "Action31", | |
196 | "Action32", | |
197 | "Action33", | |
198 | "Action34", | |
199 | "Action35", | |
200 | "Action36", | |
201 | "Action37", | |
202 | "Action38", | |
203 | "Action39", | |
204 | "Action40", | |
205 | "Action41", | |
206 | "Action42", | |
207 | "Action43", | |
208 | "Action44", | |
209 | "Action45", | |
210 | "Action46", | |
211 | "Action47", | |
212 | "Action48", | |
213 | } | |
214 | ||
215 | type token32 struct { | |
216 | pegRule | |
217 | begin, end uint32 | |
218 | } | |
219 | ||
220 | func (t *token32) String() string { | |
221 | return fmt.Sprintf("\x1B[34m%v\x1B[m %v %v", rul3s[t.pegRule], t.begin, t.end) | |
222 | } | |
223 | ||
224 | type node32 struct { | |
225 | token32 | |
226 | up, next *node32 | |
227 | } | |
228 | ||
229 | func (node *node32) print(w io.Writer, pretty bool, buffer string) { | |
230 | var print func(node *node32, depth int) | |
231 | print = func(node *node32, depth int) { | |
232 | for node != nil { | |
233 | for c := 0; c < depth; c++ { | |
234 | fmt.Fprintf(w, " ") | |
235 | } | |
236 | rule := rul3s[node.pegRule] | |
237 | quote := strconv.Quote(string(([]rune(buffer)[node.begin:node.end]))) | |
238 | if !pretty { | |
239 | fmt.Fprintf(w, "%v %v\n", rule, quote) | |
240 | } else { | |
241 | fmt.Fprintf(w, "\x1B[36m%v\x1B[m %v\n", rule, quote) | |
242 | } | |
243 | if node.up != nil { | |
244 | print(node.up, depth+1) | |
245 | } | |
246 | node = node.next | |
247 | } | |
248 | } | |
249 | print(node, 0) | |
250 | } | |
251 | ||
252 | func (node *node32) Print(w io.Writer, buffer string) { | |
253 | node.print(w, false, buffer) | |
254 | } | |
255 | ||
256 | func (node *node32) PrettyPrint(w io.Writer, buffer string) { | |
257 | node.print(w, true, buffer) | |
258 | } | |
259 | ||
260 | type tokens32 struct { | |
261 | tree []token32 | |
262 | } | |
263 | ||
264 | func (t *tokens32) Trim(length uint32) { | |
265 | t.tree = t.tree[:length] | |
266 | } | |
267 | ||
268 | func (t *tokens32) Print() { | |
269 | for _, token := range t.tree { | |
270 | fmt.Println(token.String()) | |
271 | } | |
272 | } | |
273 | ||
274 | func (t *tokens32) AST() *node32 { | |
275 | type element struct { | |
276 | node *node32 | |
277 | down *element | |
278 | } | |
279 | tokens := t.Tokens() | |
280 | var stack *element | |
281 | for _, token := range tokens { | |
282 | if token.begin == token.end { | |
283 | continue | |
284 | } | |
285 | node := &node32{token32: token} | |
286 | for stack != nil && stack.node.begin >= token.begin && stack.node.end <= token.end { | |
287 | stack.node.next = node.up | |
288 | node.up = stack.node | |
289 | stack = stack.down | |
290 | } | |
291 | stack = &element{node: node, down: stack} | |
292 | } | |
293 | if stack != nil { | |
294 | return stack.node | |
295 | } | |
296 | return nil | |
297 | } | |
298 | ||
299 | func (t *tokens32) PrintSyntaxTree(buffer string) { | |
300 | t.AST().Print(os.Stdout, buffer) | |
301 | } | |
302 | ||
303 | func (t *tokens32) WriteSyntaxTree(w io.Writer, buffer string) { | |
304 | t.AST().Print(w, buffer) | |
305 | } | |
306 | ||
307 | func (t *tokens32) PrettyPrintSyntaxTree(buffer string) { | |
308 | t.AST().PrettyPrint(os.Stdout, buffer) | |
309 | } | |
310 | ||
311 | func (t *tokens32) Add(rule pegRule, begin, end, index uint32) { | |
312 | tree, i := t.tree, int(index) | |
313 | if i >= len(tree) { | |
314 | t.tree = append(tree, token32{pegRule: rule, begin: begin, end: end}) | |
315 | return | |
316 | } | |
317 | tree[i] = token32{pegRule: rule, begin: begin, end: end} | |
318 | } | |
319 | ||
320 | func (t *tokens32) Tokens() []token32 { | |
321 | return t.tree | |
322 | } | |
323 | ||
324 | type Peg struct { | |
325 | *tree.Tree | |
326 | ||
327 | Buffer string | |
328 | buffer []rune | |
329 | rules [95]func() bool | |
330 | parse func(rule ...int) error | |
331 | reset func() | |
332 | Pretty bool | |
333 | disableMemoize bool | |
334 | tokens32 | |
335 | } | |
336 | ||
337 | func (p *Peg) Parse(rule ...int) error { | |
338 | return p.parse(rule...) | |
339 | } | |
340 | ||
341 | func (p *Peg) Reset() { | |
342 | p.reset() | |
343 | } | |
344 | ||
345 | type textPosition struct { | |
346 | line, symbol int | |
347 | } | |
348 | ||
349 | type textPositionMap map[int]textPosition | |
350 | ||
351 | func translatePositions(buffer []rune, positions []int) textPositionMap { | |
352 | length, translations, j, line, symbol := len(positions), make(textPositionMap, len(positions)), 0, 1, 0 | |
353 | sort.Ints(positions) | |
354 | ||
355 | search: | |
356 | for i, c := range buffer { | |
357 | if c == '\n' { | |
358 | line, symbol = line+1, 0 | |
359 | } else { | |
360 | symbol++ | |
361 | } | |
362 | if i == positions[j] { | |
363 | translations[positions[j]] = textPosition{line, symbol} | |
364 | for j++; j < length; j++ { | |
365 | if i != positions[j] { | |
366 | continue search | |
367 | } | |
368 | } | |
369 | break search | |
370 | } | |
371 | } | |
372 | ||
373 | return translations | |
374 | } | |
375 | ||
376 | type parseError struct { | |
377 | p *Peg | |
378 | max token32 | |
379 | } | |
380 | ||
381 | func (e *parseError) Error() string { | |
382 | tokens, err := []token32{e.max}, "\n" | |
383 | positions, p := make([]int, 2*len(tokens)), 0 | |
384 | for _, token := range tokens { | |
385 | positions[p], p = int(token.begin), p+1 | |
386 | positions[p], p = int(token.end), p+1 | |
387 | } | |
388 | translations := translatePositions(e.p.buffer, positions) | |
389 | format := "parse error near %v (line %v symbol %v - line %v symbol %v):\n%v\n" | |
390 | if e.p.Pretty { | |
391 | format = "parse error near \x1B[34m%v\x1B[m (line %v symbol %v - line %v symbol %v):\n%v\n" | |
392 | } | |
393 | for _, token := range tokens { | |
394 | begin, end := int(token.begin), int(token.end) | |
395 | err += fmt.Sprintf(format, | |
396 | rul3s[token.pegRule], | |
397 | translations[begin].line, translations[begin].symbol, | |
398 | translations[end].line, translations[end].symbol, | |
399 | strconv.Quote(string(e.p.buffer[begin:end]))) | |
400 | } | |
401 | ||
402 | return err | |
403 | } | |
404 | ||
405 | func (p *Peg) PrintSyntaxTree() { | |
406 | if p.Pretty { | |
407 | p.tokens32.PrettyPrintSyntaxTree(p.Buffer) | |
408 | } else { | |
409 | p.tokens32.PrintSyntaxTree(p.Buffer) | |
410 | } | |
411 | } | |
412 | ||
413 | func (p *Peg) WriteSyntaxTree(w io.Writer) { | |
414 | p.tokens32.WriteSyntaxTree(w, p.Buffer) | |
415 | } | |
416 | ||
417 | func (p *Peg) SprintSyntaxTree() string { | |
418 | var b bytes.Buffer | |
419 | p.WriteSyntaxTree(&b) | |
420 | return b.String() | |
421 | } | |
422 | ||
423 | func (p *Peg) Execute() { | |
424 | buffer, _buffer, text, begin, end := p.Buffer, p.buffer, "", 0, 0 | |
425 | for _, token := range p.Tokens() { | |
426 | switch token.pegRule { | |
427 | ||
428 | case rulePegText: | |
429 | begin, end = int(token.begin), int(token.end) | |
430 | text = string(_buffer[begin:end]) | |
431 | ||
432 | case ruleAction0: | |
433 | p.AddPackage(text) | |
434 | case ruleAction1: | |
435 | p.AddPeg(text) | |
436 | case ruleAction2: | |
437 | p.AddState(text) | |
438 | case ruleAction3: | |
439 | p.AddImport(text) | |
440 | case ruleAction4: | |
441 | p.AddRule(text) | |
442 | case ruleAction5: | |
443 | p.AddExpression() | |
444 | case ruleAction6: | |
445 | p.AddAlternate() | |
446 | case ruleAction7: | |
447 | p.AddNil() | |
448 | p.AddAlternate() | |
449 | case ruleAction8: | |
450 | p.AddNil() | |
451 | case ruleAction9: | |
452 | p.AddSequence() | |
453 | case ruleAction10: | |
454 | p.AddPredicate(text) | |
455 | case ruleAction11: | |
456 | p.AddStateChange(text) | |
457 | case ruleAction12: | |
458 | p.AddPeekFor() | |
459 | case ruleAction13: | |
460 | p.AddPeekNot() | |
461 | case ruleAction14: | |
462 | p.AddQuery() | |
463 | case ruleAction15: | |
464 | p.AddStar() | |
465 | case ruleAction16: | |
466 | p.AddPlus() | |
467 | case ruleAction17: | |
468 | p.AddName(text) | |
469 | case ruleAction18: | |
470 | p.AddDot() | |
471 | case ruleAction19: | |
472 | p.AddAction(text) | |
473 | case ruleAction20: | |
474 | p.AddPush() | |
475 | case ruleAction21: | |
476 | p.AddSequence() | |
477 | case ruleAction22: | |
478 | p.AddSequence() | |
479 | case ruleAction23: | |
480 | p.AddPeekNot() | |
481 | p.AddDot() | |
482 | p.AddSequence() | |
483 | case ruleAction24: | |
484 | p.AddPeekNot() | |
485 | p.AddDot() | |
486 | p.AddSequence() | |
487 | case ruleAction25: | |
488 | p.AddAlternate() | |
489 | case ruleAction26: | |
490 | p.AddAlternate() | |
491 | case ruleAction27: | |
492 | p.AddRange() | |
493 | case ruleAction28: | |
494 | p.AddDoubleRange() | |
495 | case ruleAction29: | |
496 | p.AddCharacter(text) | |
497 | case ruleAction30: | |
498 | p.AddDoubleCharacter(text) | |
499 | case ruleAction31: | |
500 | p.AddCharacter(text) | |
501 | case ruleAction32: | |
502 | p.AddCharacter("\a") | |
503 | case ruleAction33: | |
504 | p.AddCharacter("\b") | |
505 | case ruleAction34: | |
506 | p.AddCharacter("\x1B") | |
507 | case ruleAction35: | |
508 | p.AddCharacter("\f") | |
509 | case ruleAction36: | |
510 | p.AddCharacter("\n") | |
511 | case ruleAction37: | |
512 | p.AddCharacter("\r") | |
513 | case ruleAction38: | |
514 | p.AddCharacter("\t") | |
515 | case ruleAction39: | |
516 | p.AddCharacter("\v") | |
517 | case ruleAction40: | |
518 | p.AddCharacter("'") | |
519 | case ruleAction41: | |
520 | p.AddCharacter("\"") | |
521 | case ruleAction42: | |
522 | p.AddCharacter("[") | |
523 | case ruleAction43: | |
524 | p.AddCharacter("]") | |
525 | case ruleAction44: | |
526 | p.AddCharacter("-") | |
527 | case ruleAction45: | |
528 | p.AddHexaCharacter(text) | |
529 | case ruleAction46: | |
530 | p.AddOctalCharacter(text) | |
531 | case ruleAction47: | |
532 | p.AddOctalCharacter(text) | |
533 | case ruleAction48: | |
534 | p.AddCharacter("\\") | |
535 | ||
536 | } | |
537 | } | |
538 | _, _, _, _, _ = buffer, _buffer, text, begin, end | |
539 | } | |
540 | ||
541 | func Pretty(pretty bool) func(*Peg) error { | |
542 | return func(p *Peg) error { | |
543 | p.Pretty = pretty | |
544 | return nil | |
545 | } | |
546 | } | |
547 | ||
548 | func Size(size int) func(*Peg) error { | |
549 | return func(p *Peg) error { | |
550 | p.tokens32 = tokens32{tree: make([]token32, 0, size)} | |
551 | return nil | |
552 | } | |
553 | } | |
554 | ||
555 | func DisableMemoize() func(*Peg) error { | |
556 | return func(p *Peg) error { | |
557 | p.disableMemoize = true | |
558 | return nil | |
559 | } | |
560 | } | |
561 | ||
562 | type memo struct { | |
563 | Matched bool | |
564 | Partial []token32 | |
565 | } | |
566 | ||
567 | type memoKey struct { | |
568 | Rule uint32 | |
569 | Position uint32 | |
570 | } | |
571 | ||
572 | func (p *Peg) Init(options ...func(*Peg) error) error { | |
573 | var ( | |
574 | max token32 | |
575 | position, tokenIndex uint32 | |
576 | buffer []rune | |
577 | memoization map[memoKey]memo | |
578 | ) | |
579 | for _, option := range options { | |
580 | err := option(p) | |
581 | if err != nil { | |
582 | return err | |
583 | } | |
584 | } | |
585 | p.reset = func() { | |
586 | max = token32{} | |
587 | position, tokenIndex = 0, 0 | |
588 | memoization = make(map[memoKey]memo) | |
589 | p.buffer = []rune(p.Buffer) | |
590 | if len(p.buffer) == 0 || p.buffer[len(p.buffer)-1] != endSymbol { | |
591 | p.buffer = append(p.buffer, endSymbol) | |
592 | } | |
593 | buffer = p.buffer | |
594 | } | |
595 | p.reset() | |
596 | ||
597 | _rules := p.rules | |
598 | tree := p.tokens32 | |
599 | p.parse = func(rule ...int) error { | |
600 | r := 1 | |
601 | if len(rule) > 0 { | |
602 | r = rule[0] | |
603 | } | |
604 | matches := p.rules[r]() | |
605 | p.tokens32 = tree | |
606 | if matches { | |
607 | p.Trim(tokenIndex) | |
608 | return nil | |
609 | } | |
610 | return &parseError{p, max} | |
611 | } | |
612 | ||
613 | add := func(rule pegRule, begin uint32) { | |
614 | tree.Add(rule, begin, position, tokenIndex) | |
615 | tokenIndex++ | |
616 | if begin != position && position > max.end { | |
617 | max = token32{rule, begin, position} | |
618 | } | |
619 | } | |
620 | ||
621 | memoize := func(rule uint32, begin uint32, tokenIndexStart uint32, matched bool) { | |
622 | if p.disableMemoize { | |
623 | return | |
624 | } | |
625 | key := memoKey{rule, begin} | |
626 | if !matched { | |
627 | memoization[key] = memo{Matched: false} | |
628 | } else { | |
629 | t := tree.tree[tokenIndexStart:tokenIndex] | |
630 | tokenCopy := make([]token32, len(t)) | |
631 | copy(tokenCopy, t) | |
632 | memoization[key] = memo{Matched: true, Partial: tokenCopy} | |
633 | } | |
634 | } | |
635 | ||
636 | memoizedResult := func(m memo) bool { | |
637 | if !m.Matched { | |
638 | return false | |
639 | } | |
640 | tree.tree = append(tree.tree[:tokenIndex], m.Partial...) | |
641 | tokenIndex += uint32(len(m.Partial)) | |
642 | position = m.Partial[len(m.Partial)-1].end | |
643 | if tree.tree[tokenIndex-1].begin != position && position > max.end { | |
644 | max = tree.tree[tokenIndex-1] | |
645 | } | |
646 | return true | |
647 | } | |
648 | ||
649 | matchDot := func() bool { | |
650 | if buffer[position] != endSymbol { | |
651 | position++ | |
652 | return true | |
653 | } | |
654 | return false | |
655 | } | |
656 | ||
657 | /*matchChar := func(c byte) bool { | |
658 | if buffer[position] == c { | |
659 | position++ | |
660 | return true | |
661 | } | |
662 | return false | |
663 | }*/ | |
664 | ||
665 | /*matchRange := func(lower byte, upper byte) bool { | |
666 | if c := buffer[position]; c >= lower && c <= upper { | |
667 | position++ | |
668 | return true | |
669 | } | |
670 | return false | |
671 | }*/ | |
672 | ||
673 | _rules = [...]func() bool{ | |
674 | nil, | |
675 | /* 0 Grammar <- <(Spacing ('p' 'a' 'c' 'k' 'a' 'g' 'e') MustSpacing Identifier Action0 Import* ('t' 'y' 'p' 'e') MustSpacing Identifier Action1 ('P' 'e' 'g') Spacing Action Action2 Definition+ EndOfFile)> */ | |
676 | func() bool { | |
677 | if memoized, ok := memoization[memoKey{0, position}]; ok { | |
678 | return memoizedResult(memoized) | |
679 | } | |
680 | position0, tokenIndex0 := position, tokenIndex | |
681 | { | |
682 | position1 := position | |
683 | if !_rules[ruleSpacing]() { | |
684 | goto l0 | |
685 | } | |
686 | if buffer[position] != rune('p') { | |
687 | goto l0 | |
688 | } | |
689 | position++ | |
690 | if buffer[position] != rune('a') { | |
691 | goto l0 | |
692 | } | |
693 | position++ | |
694 | if buffer[position] != rune('c') { | |
695 | goto l0 | |
696 | } | |
697 | position++ | |
698 | if buffer[position] != rune('k') { | |
699 | goto l0 | |
700 | } | |
701 | position++ | |
702 | if buffer[position] != rune('a') { | |
703 | goto l0 | |
704 | } | |
705 | position++ | |
706 | if buffer[position] != rune('g') { | |
707 | goto l0 | |
708 | } | |
709 | position++ | |
710 | if buffer[position] != rune('e') { | |
711 | goto l0 | |
712 | } | |
713 | position++ | |
714 | if !_rules[ruleMustSpacing]() { | |
715 | goto l0 | |
716 | } | |
717 | if !_rules[ruleIdentifier]() { | |
718 | goto l0 | |
719 | } | |
720 | { | |
721 | add(ruleAction0, position) | |
722 | } | |
723 | l3: | |
724 | { | |
725 | position4, tokenIndex4 := position, tokenIndex | |
726 | { | |
727 | position5 := position | |
728 | if buffer[position] != rune('i') { | |
729 | goto l4 | |
730 | } | |
731 | position++ | |
732 | if buffer[position] != rune('m') { | |
733 | goto l4 | |
734 | } | |
735 | position++ | |
736 | if buffer[position] != rune('p') { | |
737 | goto l4 | |
738 | } | |
739 | position++ | |
740 | if buffer[position] != rune('o') { | |
741 | goto l4 | |
742 | } | |
743 | position++ | |
744 | if buffer[position] != rune('r') { | |
745 | goto l4 | |
746 | } | |
747 | position++ | |
748 | if buffer[position] != rune('t') { | |
749 | goto l4 | |
750 | } | |
751 | position++ | |
752 | if !_rules[ruleSpacing]() { | |
753 | goto l4 | |
754 | } | |
755 | { | |
756 | position6, tokenIndex6 := position, tokenIndex | |
757 | { | |
758 | position8 := position | |
759 | if buffer[position] != rune('(') { | |
760 | goto l7 | |
761 | } | |
762 | position++ | |
763 | if !_rules[ruleSpacing]() { | |
764 | goto l7 | |
765 | } | |
766 | l9: | |
767 | { | |
768 | position10, tokenIndex10 := position, tokenIndex | |
769 | if !_rules[ruleImportName]() { | |
770 | goto l10 | |
771 | } | |
772 | if buffer[position] != rune('\n') { | |
773 | goto l10 | |
774 | } | |
775 | position++ | |
776 | if !_rules[ruleSpacing]() { | |
777 | goto l10 | |
778 | } | |
779 | goto l9 | |
780 | l10: | |
781 | position, tokenIndex = position10, tokenIndex10 | |
782 | } | |
783 | if !_rules[ruleSpacing]() { | |
784 | goto l7 | |
785 | } | |
786 | if buffer[position] != rune(')') { | |
787 | goto l7 | |
788 | } | |
789 | position++ | |
790 | add(ruleMultiImport, position8) | |
791 | } | |
792 | goto l6 | |
793 | l7: | |
794 | position, tokenIndex = position6, tokenIndex6 | |
795 | { | |
796 | position11 := position | |
797 | if !_rules[ruleImportName]() { | |
798 | goto l4 | |
799 | } | |
800 | add(ruleSingleImport, position11) | |
801 | } | |
802 | } | |
803 | l6: | |
804 | if !_rules[ruleSpacing]() { | |
805 | goto l4 | |
806 | } | |
807 | add(ruleImport, position5) | |
808 | } | |
809 | goto l3 | |
810 | l4: | |
811 | position, tokenIndex = position4, tokenIndex4 | |
812 | } | |
813 | if buffer[position] != rune('t') { | |
814 | goto l0 | |
815 | } | |
816 | position++ | |
817 | if buffer[position] != rune('y') { | |
818 | goto l0 | |
819 | } | |
820 | position++ | |
821 | if buffer[position] != rune('p') { | |
822 | goto l0 | |
823 | } | |
824 | position++ | |
825 | if buffer[position] != rune('e') { | |
826 | goto l0 | |
827 | } | |
828 | position++ | |
829 | if !_rules[ruleMustSpacing]() { | |
830 | goto l0 | |
831 | } | |
832 | if !_rules[ruleIdentifier]() { | |
833 | goto l0 | |
834 | } | |
835 | { | |
836 | add(ruleAction1, position) | |
837 | } | |
838 | if buffer[position] != rune('P') { | |
839 | goto l0 | |
840 | } | |
841 | position++ | |
842 | if buffer[position] != rune('e') { | |
843 | goto l0 | |
844 | } | |
845 | position++ | |
846 | if buffer[position] != rune('g') { | |
847 | goto l0 | |
848 | } | |
849 | position++ | |
850 | if !_rules[ruleSpacing]() { | |
851 | goto l0 | |
852 | } | |
853 | if !_rules[ruleAction]() { | |
854 | goto l0 | |
855 | } | |
856 | { | |
857 | add(ruleAction2, position) | |
858 | } | |
859 | { | |
860 | position16 := position | |
861 | if !_rules[ruleIdentifier]() { | |
862 | goto l0 | |
863 | } | |
864 | { | |
865 | add(ruleAction4, position) | |
866 | } | |
867 | if !_rules[ruleLeftArrow]() { | |
868 | goto l0 | |
869 | } | |
870 | if !_rules[ruleExpression]() { | |
871 | goto l0 | |
872 | } | |
873 | { | |
874 | add(ruleAction5, position) | |
875 | } | |
876 | { | |
877 | position19, tokenIndex19 := position, tokenIndex | |
878 | { | |
879 | position20, tokenIndex20 := position, tokenIndex | |
880 | if !_rules[ruleIdentifier]() { | |
881 | goto l21 | |
882 | } | |
883 | if !_rules[ruleLeftArrow]() { | |
884 | goto l21 | |
885 | } | |
886 | goto l20 | |
887 | l21: | |
888 | position, tokenIndex = position20, tokenIndex20 | |
889 | { | |
890 | position22, tokenIndex22 := position, tokenIndex | |
891 | if !matchDot() { | |
892 | goto l22 | |
893 | } | |
894 | goto l0 | |
895 | l22: | |
896 | position, tokenIndex = position22, tokenIndex22 | |
897 | } | |
898 | } | |
899 | l20: | |
900 | position, tokenIndex = position19, tokenIndex19 | |
901 | } | |
902 | add(ruleDefinition, position16) | |
903 | } | |
904 | l14: | |
905 | { | |
906 | position15, tokenIndex15 := position, tokenIndex | |
907 | { | |
908 | position23 := position | |
909 | if !_rules[ruleIdentifier]() { | |
910 | goto l15 | |
911 | } | |
912 | { | |
913 | add(ruleAction4, position) | |
914 | } | |
915 | if !_rules[ruleLeftArrow]() { | |
916 | goto l15 | |
917 | } | |
918 | if !_rules[ruleExpression]() { | |
919 | goto l15 | |
920 | } | |
921 | { | |
922 | add(ruleAction5, position) | |
923 | } | |
924 | { | |
925 | position26, tokenIndex26 := position, tokenIndex | |
926 | { | |
927 | position27, tokenIndex27 := position, tokenIndex | |
928 | if !_rules[ruleIdentifier]() { | |
929 | goto l28 | |
930 | } | |
931 | if !_rules[ruleLeftArrow]() { | |
932 | goto l28 | |
933 | } | |
934 | goto l27 | |
935 | l28: | |
936 | position, tokenIndex = position27, tokenIndex27 | |
937 | { | |
938 | position29, tokenIndex29 := position, tokenIndex | |
939 | if !matchDot() { | |
940 | goto l29 | |
941 | } | |
942 | goto l15 | |
943 | l29: | |
944 | position, tokenIndex = position29, tokenIndex29 | |
945 | } | |
946 | } | |
947 | l27: | |
948 | position, tokenIndex = position26, tokenIndex26 | |
949 | } | |
950 | add(ruleDefinition, position23) | |
951 | } | |
952 | goto l14 | |
953 | l15: | |
954 | position, tokenIndex = position15, tokenIndex15 | |
955 | } | |
956 | { | |
957 | position30 := position | |
958 | { | |
959 | position31, tokenIndex31 := position, tokenIndex | |
960 | if !matchDot() { | |
961 | goto l31 | |
962 | } | |
963 | goto l0 | |
964 | l31: | |
965 | position, tokenIndex = position31, tokenIndex31 | |
966 | } | |
967 | add(ruleEndOfFile, position30) | |
968 | } | |
969 | add(ruleGrammar, position1) | |
970 | } | |
971 | memoize(0, position0, tokenIndex0, true) | |
972 | return true | |
973 | l0: | |
974 | memoize(0, position0, tokenIndex0, false) | |
975 | position, tokenIndex = position0, tokenIndex0 | |
976 | return false | |
977 | }, | |
978 | /* 1 Import <- <('i' 'm' 'p' 'o' 'r' 't' Spacing (MultiImport / SingleImport) Spacing)> */ | |
979 | nil, | |
980 | /* 2 SingleImport <- <ImportName> */ | |
981 | nil, | |
982 | /* 3 MultiImport <- <('(' Spacing (ImportName '\n' Spacing)* Spacing ')')> */ | |
983 | nil, | |
984 | /* 4 ImportName <- <('"' <((&('-') '-') | (&('.') '.') | (&('/') '/') | (&('_') '_') | (&('A' | 'B' | 'C' | 'D' | 'E' | 'F' | 'G' | 'H' | 'I' | 'J' | 'K' | 'L' | 'M' | 'N' | 'O' | 'P' | 'Q' | 'R' | 'S' | 'T' | 'U' | 'V' | 'W' | 'X' | 'Y' | 'Z') [A-Z]) | (&('0' | '1' | '2' | '3' | '4' | '5' | '6' | '7' | '8' | '9') [0-9]) | (&('a' | 'b' | 'c' | 'd' | 'e' | 'f' | 'g' | 'h' | 'i' | 'j' | 'k' | 'l' | 'm' | 'n' | 'o' | 'p' | 'q' | 'r' | 's' | 't' | 'u' | 'v' | 'w' | 'x' | 'y' | 'z') [a-z]))+> '"' Action3)> */ | |
985 | func() bool { | |
986 | if memoized, ok := memoization[memoKey{4, position}]; ok { | |
987 | return memoizedResult(memoized) | |
988 | } | |
989 | position35, tokenIndex35 := position, tokenIndex | |
990 | { | |
991 | position36 := position | |
992 | if buffer[position] != rune('"') { | |
993 | goto l35 | |
994 | } | |
995 | position++ | |
996 | { | |
997 | position37 := position | |
998 | { | |
999 | switch buffer[position] { | |
1000 | case '-': | |
1001 | if buffer[position] != rune('-') { | |
1002 | goto l35 | |
1003 | } | |
1004 | position++ | |
1005 | case '.': | |
1006 | if buffer[position] != rune('.') { | |
1007 | goto l35 | |
1008 | } | |
1009 | position++ | |
1010 | case '/': | |
1011 | if buffer[position] != rune('/') { | |
1012 | goto l35 | |
1013 | } | |
1014 | position++ | |
1015 | case '_': | |
1016 | if buffer[position] != rune('_') { | |
1017 | goto l35 | |
1018 | } | |
1019 | position++ | |
1020 | case 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z': | |
1021 | if c := buffer[position]; c < rune('A') || c > rune('Z') { | |
1022 | goto l35 | |
1023 | } | |
1024 | position++ | |
1025 | case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9': | |
1026 | if c := buffer[position]; c < rune('0') || c > rune('9') { | |
1027 | goto l35 | |
1028 | } | |
1029 | position++ | |
1030 | default: | |
1031 | if c := buffer[position]; c < rune('a') || c > rune('z') { | |
1032 | goto l35 | |
1033 | } | |
1034 | position++ | |
1035 | } | |
1036 | } | |
1037 | ||
1038 | l38: | |
1039 | { | |
1040 | position39, tokenIndex39 := position, tokenIndex | |
1041 | { | |
1042 | switch buffer[position] { | |
1043 | case '-': | |
1044 | if buffer[position] != rune('-') { | |
1045 | goto l39 | |
1046 | } | |
1047 | position++ | |
1048 | case '.': | |
1049 | if buffer[position] != rune('.') { | |
1050 | goto l39 | |
1051 | } | |
1052 | position++ | |
1053 | case '/': | |
1054 | if buffer[position] != rune('/') { | |
1055 | goto l39 | |
1056 | } | |
1057 | position++ | |
1058 | case '_': | |
1059 | if buffer[position] != rune('_') { | |
1060 | goto l39 | |
1061 | } | |
1062 | position++ | |
1063 | case 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z': | |
1064 | if c := buffer[position]; c < rune('A') || c > rune('Z') { | |
1065 | goto l39 | |
1066 | } | |
1067 | position++ | |
1068 | case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9': | |
1069 | if c := buffer[position]; c < rune('0') || c > rune('9') { | |
1070 | goto l39 | |
1071 | } | |
1072 | position++ | |
1073 | default: | |
1074 | if c := buffer[position]; c < rune('a') || c > rune('z') { | |
1075 | goto l39 | |
1076 | } | |
1077 | position++ | |
1078 | } | |
1079 | } | |
1080 | ||
1081 | goto l38 | |
1082 | l39: | |
1083 | position, tokenIndex = position39, tokenIndex39 | |
1084 | } | |
1085 | add(rulePegText, position37) | |
1086 | } | |
1087 | if buffer[position] != rune('"') { | |
1088 | goto l35 | |
1089 | } | |
1090 | position++ | |
1091 | { | |
1092 | add(ruleAction3, position) | |
1093 | } | |
1094 | add(ruleImportName, position36) | |
1095 | } | |
1096 | memoize(4, position35, tokenIndex35, true) | |
1097 | return true | |
1098 | l35: | |
1099 | memoize(4, position35, tokenIndex35, false) | |
1100 | position, tokenIndex = position35, tokenIndex35 | |
1101 | return false | |
1102 | }, | |
1103 | /* 5 Definition <- <(Identifier Action4 LeftArrow Expression Action5 &((Identifier LeftArrow) / !.))> */ | |
1104 | nil, | |
1105 | /* 6 Expression <- <((Sequence (Slash Sequence Action6)* (Slash Action7)?) / Action8)> */ | |
1106 | func() bool { | |
1107 | if memoized, ok := memoization[memoKey{6, position}]; ok { | |
1108 | return memoizedResult(memoized) | |
1109 | } | |
1110 | position44, tokenIndex44 := position, tokenIndex | |
1111 | { | |
1112 | position45 := position | |
1113 | { | |
1114 | position46, tokenIndex46 := position, tokenIndex | |
1115 | if !_rules[ruleSequence]() { | |
1116 | goto l47 | |
1117 | } | |
1118 | l48: | |
1119 | { | |
1120 | position49, tokenIndex49 := position, tokenIndex | |
1121 | if !_rules[ruleSlash]() { | |
1122 | goto l49 | |
1123 | } | |
1124 | if !_rules[ruleSequence]() { | |
1125 | goto l49 | |
1126 | } | |
1127 | { | |
1128 | add(ruleAction6, position) | |
1129 | } | |
1130 | goto l48 | |
1131 | l49: | |
1132 | position, tokenIndex = position49, tokenIndex49 | |
1133 | } | |
1134 | { | |
1135 | position51, tokenIndex51 := position, tokenIndex | |
1136 | if !_rules[ruleSlash]() { | |
1137 | goto l51 | |
1138 | } | |
1139 | { | |
1140 | add(ruleAction7, position) | |
1141 | } | |
1142 | goto l52 | |
1143 | l51: | |
1144 | position, tokenIndex = position51, tokenIndex51 | |
1145 | } | |
1146 | l52: | |
1147 | goto l46 | |
1148 | l47: | |
1149 | position, tokenIndex = position46, tokenIndex46 | |
1150 | { | |
1151 | add(ruleAction8, position) | |
1152 | } | |
1153 | } | |
1154 | l46: | |
1155 | add(ruleExpression, position45) | |
1156 | } | |
1157 | memoize(6, position44, tokenIndex44, true) | |
1158 | return true | |
1159 | }, | |
1160 | /* 7 Sequence <- <(Prefix (Prefix Action9)*)> */ | |
1161 | func() bool { | |
1162 | if memoized, ok := memoization[memoKey{7, position}]; ok { | |
1163 | return memoizedResult(memoized) | |
1164 | } | |
1165 | position55, tokenIndex55 := position, tokenIndex | |
1166 | { | |
1167 | position56 := position | |
1168 | if !_rules[rulePrefix]() { | |
1169 | goto l55 | |
1170 | } | |
1171 | l57: | |
1172 | { | |
1173 | position58, tokenIndex58 := position, tokenIndex | |
1174 | if !_rules[rulePrefix]() { | |
1175 | goto l58 | |
1176 | } | |
1177 | { | |
1178 | add(ruleAction9, position) | |
1179 | } | |
1180 | goto l57 | |
1181 | l58: | |
1182 | position, tokenIndex = position58, tokenIndex58 | |
1183 | } | |
1184 | add(ruleSequence, position56) | |
1185 | } | |
1186 | memoize(7, position55, tokenIndex55, true) | |
1187 | return true | |
1188 | l55: | |
1189 | memoize(7, position55, tokenIndex55, false) | |
1190 | position, tokenIndex = position55, tokenIndex55 | |
1191 | return false | |
1192 | }, | |
1193 | /* 8 Prefix <- <((And Action Action10) / (Not Action Action11) / ((&('!') (Not Suffix Action13)) | (&('&') (And Suffix Action12)) | (&('"' | '\'' | '(' | '.' | '<' | 'A' | 'B' | 'C' | 'D' | 'E' | 'F' | 'G' | 'H' | 'I' | 'J' | 'K' | 'L' | 'M' | 'N' | 'O' | 'P' | 'Q' | 'R' | 'S' | 'T' | 'U' | 'V' | 'W' | 'X' | 'Y' | 'Z' | '[' | '_' | 'a' | 'b' | 'c' | 'd' | 'e' | 'f' | 'g' | 'h' | 'i' | 'j' | 'k' | 'l' | 'm' | 'n' | 'o' | 'p' | 'q' | 'r' | 's' | 't' | 'u' | 'v' | 'w' | 'x' | 'y' | 'z' | '{') Suffix)))> */ | |
1194 | func() bool { | |
1195 | if memoized, ok := memoization[memoKey{8, position}]; ok { | |
1196 | return memoizedResult(memoized) | |
1197 | } | |
1198 | position60, tokenIndex60 := position, tokenIndex | |
1199 | { | |
1200 | position61 := position | |
1201 | { | |
1202 | position62, tokenIndex62 := position, tokenIndex | |
1203 | if !_rules[ruleAnd]() { | |
1204 | goto l63 | |
1205 | } | |
1206 | if !_rules[ruleAction]() { | |
1207 | goto l63 | |
1208 | } | |
1209 | { | |
1210 | add(ruleAction10, position) | |
1211 | } | |
1212 | goto l62 | |
1213 | l63: | |
1214 | position, tokenIndex = position62, tokenIndex62 | |
1215 | if !_rules[ruleNot]() { | |
1216 | goto l65 | |
1217 | } | |
1218 | if !_rules[ruleAction]() { | |
1219 | goto l65 | |
1220 | } | |
1221 | { | |
1222 | add(ruleAction11, position) | |
1223 | } | |
1224 | goto l62 | |
1225 | l65: | |
1226 | position, tokenIndex = position62, tokenIndex62 | |
1227 | { | |
1228 | switch buffer[position] { | |
1229 | case '!': | |
1230 | if !_rules[ruleNot]() { | |
1231 | goto l60 | |
1232 | } | |
1233 | if !_rules[ruleSuffix]() { | |
1234 | goto l60 | |
1235 | } | |
1236 | { | |
1237 | add(ruleAction13, position) | |
1238 | } | |
1239 | case '&': | |
1240 | if !_rules[ruleAnd]() { | |
1241 | goto l60 | |
1242 | } | |
1243 | if !_rules[ruleSuffix]() { | |
1244 | goto l60 | |
1245 | } | |
1246 | { | |
1247 | add(ruleAction12, position) | |
1248 | } | |
1249 | default: | |
1250 | if !_rules[ruleSuffix]() { | |
1251 | goto l60 | |
1252 | } | |
1253 | } | |
1254 | } | |
1255 | ||
1256 | } | |
1257 | l62: | |
1258 | add(rulePrefix, position61) | |
1259 | } | |
1260 | memoize(8, position60, tokenIndex60, true) | |
1261 | return true | |
1262 | l60: | |
1263 | memoize(8, position60, tokenIndex60, false) | |
1264 | position, tokenIndex = position60, tokenIndex60 | |
1265 | return false | |
1266 | }, | |
1267 | /* 9 Suffix <- <(Primary ((&('+') (Plus Action16)) | (&('*') (Star Action15)) | (&('?') (Question Action14)))?)> */ | |
1268 | func() bool { | |
1269 | if memoized, ok := memoization[memoKey{9, position}]; ok { | |
1270 | return memoizedResult(memoized) | |
1271 | } | |
1272 | position70, tokenIndex70 := position, tokenIndex | |
1273 | { | |
1274 | position71 := position | |
1275 | { | |
1276 | position72 := position | |
1277 | { | |
1278 | switch buffer[position] { | |
1279 | case '<': | |
1280 | { | |
1281 | position74 := position | |
1282 | if buffer[position] != rune('<') { | |
1283 | goto l70 | |
1284 | } | |
1285 | position++ | |
1286 | if !_rules[ruleSpacing]() { | |
1287 | goto l70 | |
1288 | } | |
1289 | add(ruleBegin, position74) | |
1290 | } | |
1291 | if !_rules[ruleExpression]() { | |
1292 | goto l70 | |
1293 | } | |
1294 | { | |
1295 | position75 := position | |
1296 | if buffer[position] != rune('>') { | |
1297 | goto l70 | |
1298 | } | |
1299 | position++ | |
1300 | if !_rules[ruleSpacing]() { | |
1301 | goto l70 | |
1302 | } | |
1303 | add(ruleEnd, position75) | |
1304 | } | |
1305 | { | |
1306 | add(ruleAction20, position) | |
1307 | } | |
1308 | case '{': | |
1309 | if !_rules[ruleAction]() { | |
1310 | goto l70 | |
1311 | } | |
1312 | { | |
1313 | add(ruleAction19, position) | |
1314 | } | |
1315 | case '.': | |
1316 | { | |
1317 | position78 := position | |
1318 | if buffer[position] != rune('.') { | |
1319 | goto l70 | |
1320 | } | |
1321 | position++ | |
1322 | if !_rules[ruleSpacing]() { | |
1323 | goto l70 | |
1324 | } | |
1325 | add(ruleDot, position78) | |
1326 | } | |
1327 | { | |
1328 | add(ruleAction18, position) | |
1329 | } | |
1330 | case '[': | |
1331 | { | |
1332 | position80 := position | |
1333 | { | |
1334 | position81, tokenIndex81 := position, tokenIndex | |
1335 | if buffer[position] != rune('[') { | |
1336 | goto l82 | |
1337 | } | |
1338 | position++ | |
1339 | if buffer[position] != rune('[') { | |
1340 | goto l82 | |
1341 | } | |
1342 | position++ | |
1343 | { | |
1344 | position83, tokenIndex83 := position, tokenIndex | |
1345 | { | |
1346 | position85, tokenIndex85 := position, tokenIndex | |
1347 | if buffer[position] != rune('^') { | |
1348 | goto l86 | |
1349 | } | |
1350 | position++ | |
1351 | if !_rules[ruleDoubleRanges]() { | |
1352 | goto l86 | |
1353 | } | |
1354 | { | |
1355 | add(ruleAction23, position) | |
1356 | } | |
1357 | goto l85 | |
1358 | l86: | |
1359 | position, tokenIndex = position85, tokenIndex85 | |
1360 | if !_rules[ruleDoubleRanges]() { | |
1361 | goto l83 | |
1362 | } | |
1363 | } | |
1364 | l85: | |
1365 | goto l84 | |
1366 | l83: | |
1367 | position, tokenIndex = position83, tokenIndex83 | |
1368 | } | |
1369 | l84: | |
1370 | if buffer[position] != rune(']') { | |
1371 | goto l82 | |
1372 | } | |
1373 | position++ | |
1374 | if buffer[position] != rune(']') { | |
1375 | goto l82 | |
1376 | } | |
1377 | position++ | |
1378 | goto l81 | |
1379 | l82: | |
1380 | position, tokenIndex = position81, tokenIndex81 | |
1381 | if buffer[position] != rune('[') { | |
1382 | goto l70 | |
1383 | } | |
1384 | position++ | |
1385 | { | |
1386 | position88, tokenIndex88 := position, tokenIndex | |
1387 | { | |
1388 | position90, tokenIndex90 := position, tokenIndex | |
1389 | if buffer[position] != rune('^') { | |
1390 | goto l91 | |
1391 | } | |
1392 | position++ | |
1393 | if !_rules[ruleRanges]() { | |
1394 | goto l91 | |
1395 | } | |
1396 | { | |
1397 | add(ruleAction24, position) | |
1398 | } | |
1399 | goto l90 | |
1400 | l91: | |
1401 | position, tokenIndex = position90, tokenIndex90 | |
1402 | if !_rules[ruleRanges]() { | |
1403 | goto l88 | |
1404 | } | |
1405 | } | |
1406 | l90: | |
1407 | goto l89 | |
1408 | l88: | |
1409 | position, tokenIndex = position88, tokenIndex88 | |
1410 | } | |
1411 | l89: | |
1412 | if buffer[position] != rune(']') { | |
1413 | goto l70 | |
1414 | } | |
1415 | position++ | |
1416 | } | |
1417 | l81: | |
1418 | if !_rules[ruleSpacing]() { | |
1419 | goto l70 | |
1420 | } | |
1421 | add(ruleClass, position80) | |
1422 | } | |
1423 | case '"', '\'': | |
1424 | { | |
1425 | position93 := position | |
1426 | { | |
1427 | position94, tokenIndex94 := position, tokenIndex | |
1428 | if buffer[position] != rune('\'') { | |
1429 | goto l95 | |
1430 | } | |
1431 | position++ | |
1432 | { | |
1433 | position96, tokenIndex96 := position, tokenIndex | |
1434 | { | |
1435 | position98, tokenIndex98 := position, tokenIndex | |
1436 | if buffer[position] != rune('\'') { | |
1437 | goto l98 | |
1438 | } | |
1439 | position++ | |
1440 | goto l96 | |
1441 | l98: | |
1442 | position, tokenIndex = position98, tokenIndex98 | |
1443 | } | |
1444 | if !_rules[ruleChar]() { | |
1445 | goto l96 | |
1446 | } | |
1447 | goto l97 | |
1448 | l96: | |
1449 | position, tokenIndex = position96, tokenIndex96 | |
1450 | } | |
1451 | l97: | |
1452 | l99: | |
1453 | { | |
1454 | position100, tokenIndex100 := position, tokenIndex | |
1455 | { | |
1456 | position101, tokenIndex101 := position, tokenIndex | |
1457 | if buffer[position] != rune('\'') { | |
1458 | goto l101 | |
1459 | } | |
1460 | position++ | |
1461 | goto l100 | |
1462 | l101: | |
1463 | position, tokenIndex = position101, tokenIndex101 | |
1464 | } | |
1465 | if !_rules[ruleChar]() { | |
1466 | goto l100 | |
1467 | } | |
1468 | { | |
1469 | add(ruleAction21, position) | |
1470 | } | |
1471 | goto l99 | |
1472 | l100: | |
1473 | position, tokenIndex = position100, tokenIndex100 | |
1474 | } | |
1475 | if buffer[position] != rune('\'') { | |
1476 | goto l95 | |
1477 | } | |
1478 | position++ | |
1479 | if !_rules[ruleSpacing]() { | |
1480 | goto l95 | |
1481 | } | |
1482 | goto l94 | |
1483 | l95: | |
1484 | position, tokenIndex = position94, tokenIndex94 | |
1485 | if buffer[position] != rune('"') { | |
1486 | goto l70 | |
1487 | } | |
1488 | position++ | |
1489 | { | |
1490 | position103, tokenIndex103 := position, tokenIndex | |
1491 | { | |
1492 | position105, tokenIndex105 := position, tokenIndex | |
1493 | if buffer[position] != rune('"') { | |
1494 | goto l105 | |
1495 | } | |
1496 | position++ | |
1497 | goto l103 | |
1498 | l105: | |
1499 | position, tokenIndex = position105, tokenIndex105 | |
1500 | } | |
1501 | if !_rules[ruleDoubleChar]() { | |
1502 | goto l103 | |
1503 | } | |
1504 | goto l104 | |
1505 | l103: | |
1506 | position, tokenIndex = position103, tokenIndex103 | |
1507 | } | |
1508 | l104: | |
1509 | l106: | |
1510 | { | |
1511 | position107, tokenIndex107 := position, tokenIndex | |
1512 | { | |
1513 | position108, tokenIndex108 := position, tokenIndex | |
1514 | if buffer[position] != rune('"') { | |
1515 | goto l108 | |
1516 | } | |
1517 | position++ | |
1518 | goto l107 | |
1519 | l108: | |
1520 | position, tokenIndex = position108, tokenIndex108 | |
1521 | } | |
1522 | if !_rules[ruleDoubleChar]() { | |
1523 | goto l107 | |
1524 | } | |
1525 | { | |
1526 | add(ruleAction22, position) | |
1527 | } | |
1528 | goto l106 | |
1529 | l107: | |
1530 | position, tokenIndex = position107, tokenIndex107 | |
1531 | } | |
1532 | if buffer[position] != rune('"') { | |
1533 | goto l70 | |
1534 | } | |
1535 | position++ | |
1536 | if !_rules[ruleSpacing]() { | |
1537 | goto l70 | |
1538 | } | |
1539 | } | |
1540 | l94: | |
1541 | add(ruleLiteral, position93) | |
1542 | } | |
1543 | case '(': | |
1544 | { | |
1545 | position110 := position | |
1546 | if buffer[position] != rune('(') { | |
1547 | goto l70 | |
1548 | } | |
1549 | position++ | |
1550 | if !_rules[ruleSpacing]() { | |
1551 | goto l70 | |
1552 | } | |
1553 | add(ruleOpen, position110) | |
1554 | } | |
1555 | if !_rules[ruleExpression]() { | |
1556 | goto l70 | |
1557 | } | |
1558 | { | |
1559 | position111 := position | |
1560 | if buffer[position] != rune(')') { | |
1561 | goto l70 | |
1562 | } | |
1563 | position++ | |
1564 | if !_rules[ruleSpacing]() { | |
1565 | goto l70 | |
1566 | } | |
1567 | add(ruleClose, position111) | |
1568 | } | |
1569 | default: | |
1570 | if !_rules[ruleIdentifier]() { | |
1571 | goto l70 | |
1572 | } | |
1573 | { | |
1574 | position112, tokenIndex112 := position, tokenIndex | |
1575 | if !_rules[ruleLeftArrow]() { | |
1576 | goto l112 | |
1577 | } | |
1578 | goto l70 | |
1579 | l112: | |
1580 | position, tokenIndex = position112, tokenIndex112 | |
1581 | } | |
1582 | { | |
1583 | add(ruleAction17, position) | |
1584 | } | |
1585 | } | |
1586 | } | |
1587 | ||
1588 | add(rulePrimary, position72) | |
1589 | } | |
1590 | { | |
1591 | position114, tokenIndex114 := position, tokenIndex | |
1592 | { | |
1593 | switch buffer[position] { | |
1594 | case '+': | |
1595 | { | |
1596 | position117 := position | |
1597 | if buffer[position] != rune('+') { | |
1598 | goto l114 | |
1599 | } | |
1600 | position++ | |
1601 | if !_rules[ruleSpacing]() { | |
1602 | goto l114 | |
1603 | } | |
1604 | add(rulePlus, position117) | |
1605 | } | |
1606 | { | |
1607 | add(ruleAction16, position) | |
1608 | } | |
1609 | case '*': | |
1610 | { | |
1611 | position119 := position | |
1612 | if buffer[position] != rune('*') { | |
1613 | goto l114 | |
1614 | } | |
1615 | position++ | |
1616 | if !_rules[ruleSpacing]() { | |
1617 | goto l114 | |
1618 | } | |
1619 | add(ruleStar, position119) | |
1620 | } | |
1621 | { | |
1622 | add(ruleAction15, position) | |
1623 | } | |
1624 | default: | |
1625 | { | |
1626 | position121 := position | |
1627 | if buffer[position] != rune('?') { | |
1628 | goto l114 | |
1629 | } | |
1630 | position++ | |
1631 | if !_rules[ruleSpacing]() { | |
1632 | goto l114 | |
1633 | } | |
1634 | add(ruleQuestion, position121) | |
1635 | } | |
1636 | { | |
1637 | add(ruleAction14, position) | |
1638 | } | |
1639 | } | |
1640 | } | |
1641 | ||
1642 | goto l115 | |
1643 | l114: | |
1644 | position, tokenIndex = position114, tokenIndex114 | |
1645 | } | |
1646 | l115: | |
1647 | add(ruleSuffix, position71) | |
1648 | } | |
1649 | memoize(9, position70, tokenIndex70, true) | |
1650 | return true | |
1651 | l70: | |
1652 | memoize(9, position70, tokenIndex70, false) | |
1653 | position, tokenIndex = position70, tokenIndex70 | |
1654 | return false | |
1655 | }, | |
1656 | /* 10 Primary <- <((&('<') (Begin Expression End Action20)) | (&('{') (Action Action19)) | (&('.') (Dot Action18)) | (&('[') Class) | (&('"' | '\'') Literal) | (&('(') (Open Expression Close)) | (&('A' | 'B' | 'C' | 'D' | 'E' | 'F' | 'G' | 'H' | 'I' | 'J' | 'K' | 'L' | 'M' | 'N' | 'O' | 'P' | 'Q' | 'R' | 'S' | 'T' | 'U' | 'V' | 'W' | 'X' | 'Y' | 'Z' | '_' | 'a' | 'b' | 'c' | 'd' | 'e' | 'f' | 'g' | 'h' | 'i' | 'j' | 'k' | 'l' | 'm' | 'n' | 'o' | 'p' | 'q' | 'r' | 's' | 't' | 'u' | 'v' | 'w' | 'x' | 'y' | 'z') (Identifier !LeftArrow Action17)))> */ | |
1657 | nil, | |
1658 | /* 11 Identifier <- <(<(IdentStart IdentCont*)> Spacing)> */ | |
1659 | func() bool { | |
1660 | if memoized, ok := memoization[memoKey{11, position}]; ok { | |
1661 | return memoizedResult(memoized) | |
1662 | } | |
1663 | position124, tokenIndex124 := position, tokenIndex | |
1664 | { | |
1665 | position125 := position | |
1666 | { | |
1667 | position126 := position | |
1668 | if !_rules[ruleIdentStart]() { | |
1669 | goto l124 | |
1670 | } | |
1671 | l127: | |
1672 | { | |
1673 | position128, tokenIndex128 := position, tokenIndex | |
1674 | { | |
1675 | position129 := position | |
1676 | { | |
1677 | position130, tokenIndex130 := position, tokenIndex | |
1678 | if !_rules[ruleIdentStart]() { | |
1679 | goto l131 | |
1680 | } | |
1681 | goto l130 | |
1682 | l131: | |
1683 | position, tokenIndex = position130, tokenIndex130 | |
1684 | if c := buffer[position]; c < rune('0') || c > rune('9') { | |
1685 | goto l128 | |
1686 | } | |
1687 | position++ | |
1688 | } | |
1689 | l130: | |
1690 | add(ruleIdentCont, position129) | |
1691 | } | |
1692 | goto l127 | |
1693 | l128: | |
1694 | position, tokenIndex = position128, tokenIndex128 | |
1695 | } | |
1696 | add(rulePegText, position126) | |
1697 | } | |
1698 | if !_rules[ruleSpacing]() { | |
1699 | goto l124 | |
1700 | } | |
1701 | add(ruleIdentifier, position125) | |
1702 | } | |
1703 | memoize(11, position124, tokenIndex124, true) | |
1704 | return true | |
1705 | l124: | |
1706 | memoize(11, position124, tokenIndex124, false) | |
1707 | position, tokenIndex = position124, tokenIndex124 | |
1708 | return false | |
1709 | }, | |
1710 | /* 12 IdentStart <- <((&('_') '_') | (&('A' | 'B' | 'C' | 'D' | 'E' | 'F' | 'G' | 'H' | 'I' | 'J' | 'K' | 'L' | 'M' | 'N' | 'O' | 'P' | 'Q' | 'R' | 'S' | 'T' | 'U' | 'V' | 'W' | 'X' | 'Y' | 'Z') [A-Z]) | (&('a' | 'b' | 'c' | 'd' | 'e' | 'f' | 'g' | 'h' | 'i' | 'j' | 'k' | 'l' | 'm' | 'n' | 'o' | 'p' | 'q' | 'r' | 's' | 't' | 'u' | 'v' | 'w' | 'x' | 'y' | 'z') [a-z]))> */ | |
1711 | func() bool { | |
1712 | if memoized, ok := memoization[memoKey{12, position}]; ok { | |
1713 | return memoizedResult(memoized) | |
1714 | } | |
1715 | position132, tokenIndex132 := position, tokenIndex | |
1716 | { | |
1717 | position133 := position | |
1718 | { | |
1719 | switch buffer[position] { | |
1720 | case '_': | |
1721 | if buffer[position] != rune('_') { | |
1722 | goto l132 | |
1723 | } | |
1724 | position++ | |
1725 | case 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z': | |
1726 | if c := buffer[position]; c < rune('A') || c > rune('Z') { | |
1727 | goto l132 | |
1728 | } | |
1729 | position++ | |
1730 | default: | |
1731 | if c := buffer[position]; c < rune('a') || c > rune('z') { | |
1732 | goto l132 | |
1733 | } | |
1734 | position++ | |
1735 | } | |
1736 | } | |
1737 | ||
1738 | add(ruleIdentStart, position133) | |
1739 | } | |
1740 | memoize(12, position132, tokenIndex132, true) | |
1741 | return true | |
1742 | l132: | |
1743 | memoize(12, position132, tokenIndex132, false) | |
1744 | position, tokenIndex = position132, tokenIndex132 | |
1745 | return false | |
1746 | }, | |
1747 | /* 13 IdentCont <- <(IdentStart / [0-9])> */ | |
1748 | nil, | |
1749 | /* 14 Literal <- <(('\'' (!'\'' Char)? (!'\'' Char Action21)* '\'' Spacing) / ('"' (!'"' DoubleChar)? (!'"' DoubleChar Action22)* '"' Spacing))> */ | |
1750 | nil, | |
1751 | /* 15 Class <- <((('[' '[' (('^' DoubleRanges Action23) / DoubleRanges)? (']' ']')) / ('[' (('^' Ranges Action24) / Ranges)? ']')) Spacing)> */ | |
1752 | nil, | |
1753 | /* 16 Ranges <- <(!']' Range (!']' Range Action25)*)> */ | |
1754 | func() bool { | |
1755 | if memoized, ok := memoization[memoKey{16, position}]; ok { | |
1756 | return memoizedResult(memoized) | |
1757 | } | |
1758 | position138, tokenIndex138 := position, tokenIndex | |
1759 | { | |
1760 | position139 := position | |
1761 | { | |
1762 | position140, tokenIndex140 := position, tokenIndex | |
1763 | if buffer[position] != rune(']') { | |
1764 | goto l140 | |
1765 | } | |
1766 | position++ | |
1767 | goto l138 | |
1768 | l140: | |
1769 | position, tokenIndex = position140, tokenIndex140 | |
1770 | } | |
1771 | if !_rules[ruleRange]() { | |
1772 | goto l138 | |
1773 | } | |
1774 | l141: | |
1775 | { | |
1776 | position142, tokenIndex142 := position, tokenIndex | |
1777 | { | |
1778 | position143, tokenIndex143 := position, tokenIndex | |
1779 | if buffer[position] != rune(']') { | |
1780 | goto l143 | |
1781 | } | |
1782 | position++ | |
1783 | goto l142 | |
1784 | l143: | |
1785 | position, tokenIndex = position143, tokenIndex143 | |
1786 | } | |
1787 | if !_rules[ruleRange]() { | |
1788 | goto l142 | |
1789 | } | |
1790 | { | |
1791 | add(ruleAction25, position) | |
1792 | } | |
1793 | goto l141 | |
1794 | l142: | |
1795 | position, tokenIndex = position142, tokenIndex142 | |
1796 | } | |
1797 | add(ruleRanges, position139) | |
1798 | } | |
1799 | memoize(16, position138, tokenIndex138, true) | |
1800 | return true | |
1801 | l138: | |
1802 | memoize(16, position138, tokenIndex138, false) | |
1803 | position, tokenIndex = position138, tokenIndex138 | |
1804 | return false | |
1805 | }, | |
1806 | /* 17 DoubleRanges <- <(!(']' ']') DoubleRange (!(']' ']') DoubleRange Action26)*)> */ | |
1807 | func() bool { | |
1808 | if memoized, ok := memoization[memoKey{17, position}]; ok { | |
1809 | return memoizedResult(memoized) | |
1810 | } | |
1811 | position145, tokenIndex145 := position, tokenIndex | |
1812 | { | |
1813 | position146 := position | |
1814 | { | |
1815 | position147, tokenIndex147 := position, tokenIndex | |
1816 | if buffer[position] != rune(']') { | |
1817 | goto l147 | |
1818 | } | |
1819 | position++ | |
1820 | if buffer[position] != rune(']') { | |
1821 | goto l147 | |
1822 | } | |
1823 | position++ | |
1824 | goto l145 | |
1825 | l147: | |
1826 | position, tokenIndex = position147, tokenIndex147 | |
1827 | } | |
1828 | if !_rules[ruleDoubleRange]() { | |
1829 | goto l145 | |
1830 | } | |
1831 | l148: | |
1832 | { | |
1833 | position149, tokenIndex149 := position, tokenIndex | |
1834 | { | |
1835 | position150, tokenIndex150 := position, tokenIndex | |
1836 | if buffer[position] != rune(']') { | |
1837 | goto l150 | |
1838 | } | |
1839 | position++ | |
1840 | if buffer[position] != rune(']') { | |
1841 | goto l150 | |
1842 | } | |
1843 | position++ | |
1844 | goto l149 | |
1845 | l150: | |
1846 | position, tokenIndex = position150, tokenIndex150 | |
1847 | } | |
1848 | if !_rules[ruleDoubleRange]() { | |
1849 | goto l149 | |
1850 | } | |
1851 | { | |
1852 | add(ruleAction26, position) | |
1853 | } | |
1854 | goto l148 | |
1855 | l149: | |
1856 | position, tokenIndex = position149, tokenIndex149 | |
1857 | } | |
1858 | add(ruleDoubleRanges, position146) | |
1859 | } | |
1860 | memoize(17, position145, tokenIndex145, true) | |
1861 | return true | |
1862 | l145: | |
1863 | memoize(17, position145, tokenIndex145, false) | |
1864 | position, tokenIndex = position145, tokenIndex145 | |
1865 | return false | |
1866 | }, | |
1867 | /* 18 Range <- <((Char '-' Char Action27) / Char)> */ | |
1868 | func() bool { | |
1869 | if memoized, ok := memoization[memoKey{18, position}]; ok { | |
1870 | return memoizedResult(memoized) | |
1871 | } | |
1872 | position152, tokenIndex152 := position, tokenIndex | |
1873 | { | |
1874 | position153 := position | |
1875 | { | |
1876 | position154, tokenIndex154 := position, tokenIndex | |
1877 | if !_rules[ruleChar]() { | |
1878 | goto l155 | |
1879 | } | |
1880 | if buffer[position] != rune('-') { | |
1881 | goto l155 | |
1882 | } | |
1883 | position++ | |
1884 | if !_rules[ruleChar]() { | |
1885 | goto l155 | |
1886 | } | |
1887 | { | |
1888 | add(ruleAction27, position) | |
1889 | } | |
1890 | goto l154 | |
1891 | l155: | |
1892 | position, tokenIndex = position154, tokenIndex154 | |
1893 | if !_rules[ruleChar]() { | |
1894 | goto l152 | |
1895 | } | |
1896 | } | |
1897 | l154: | |
1898 | add(ruleRange, position153) | |
1899 | } | |
1900 | memoize(18, position152, tokenIndex152, true) | |
1901 | return true | |
1902 | l152: | |
1903 | memoize(18, position152, tokenIndex152, false) | |
1904 | position, tokenIndex = position152, tokenIndex152 | |
1905 | return false | |
1906 | }, | |
1907 | /* 19 DoubleRange <- <((Char '-' Char Action28) / DoubleChar)> */ | |
1908 | func() bool { | |
1909 | if memoized, ok := memoization[memoKey{19, position}]; ok { | |
1910 | return memoizedResult(memoized) | |
1911 | } | |
1912 | position157, tokenIndex157 := position, tokenIndex | |
1913 | { | |
1914 | position158 := position | |
1915 | { | |
1916 | position159, tokenIndex159 := position, tokenIndex | |
1917 | if !_rules[ruleChar]() { | |
1918 | goto l160 | |
1919 | } | |
1920 | if buffer[position] != rune('-') { | |
1921 | goto l160 | |
1922 | } | |
1923 | position++ | |
1924 | if !_rules[ruleChar]() { | |
1925 | goto l160 | |
1926 | } | |
1927 | { | |
1928 | add(ruleAction28, position) | |
1929 | } | |
1930 | goto l159 | |
1931 | l160: | |
1932 | position, tokenIndex = position159, tokenIndex159 | |
1933 | if !_rules[ruleDoubleChar]() { | |
1934 | goto l157 | |
1935 | } | |
1936 | } | |
1937 | l159: | |
1938 | add(ruleDoubleRange, position158) | |
1939 | } | |
1940 | memoize(19, position157, tokenIndex157, true) | |
1941 | return true | |
1942 | l157: | |
1943 | memoize(19, position157, tokenIndex157, false) | |
1944 | position, tokenIndex = position157, tokenIndex157 | |
1945 | return false | |
1946 | }, | |
1947 | /* 20 Char <- <(Escape / (!'\\' <.> Action29))> */ | |
1948 | func() bool { | |
1949 | if memoized, ok := memoization[memoKey{20, position}]; ok { | |
1950 | return memoizedResult(memoized) | |
1951 | } | |
1952 | position162, tokenIndex162 := position, tokenIndex | |
1953 | { | |
1954 | position163 := position | |
1955 | { | |
1956 | position164, tokenIndex164 := position, tokenIndex | |
1957 | if !_rules[ruleEscape]() { | |
1958 | goto l165 | |
1959 | } | |
1960 | goto l164 | |
1961 | l165: | |
1962 | position, tokenIndex = position164, tokenIndex164 | |
1963 | { | |
1964 | position166, tokenIndex166 := position, tokenIndex | |
1965 | if buffer[position] != rune('\\') { | |
1966 | goto l166 | |
1967 | } | |
1968 | position++ | |
1969 | goto l162 | |
1970 | l166: | |
1971 | position, tokenIndex = position166, tokenIndex166 | |
1972 | } | |
1973 | { | |
1974 | position167 := position | |
1975 | if !matchDot() { | |
1976 | goto l162 | |
1977 | } | |
1978 | add(rulePegText, position167) | |
1979 | } | |
1980 | { | |
1981 | add(ruleAction29, position) | |
1982 | } | |
1983 | } | |
1984 | l164: | |
1985 | add(ruleChar, position163) | |
1986 | } | |
1987 | memoize(20, position162, tokenIndex162, true) | |
1988 | return true | |
1989 | l162: | |
1990 | memoize(20, position162, tokenIndex162, false) | |
1991 | position, tokenIndex = position162, tokenIndex162 | |
1992 | return false | |
1993 | }, | |
1994 | /* 21 DoubleChar <- <(Escape / (<([a-z] / [A-Z])> Action30) / (!'\\' <.> Action31))> */ | |
1995 | func() bool { | |
1996 | if memoized, ok := memoization[memoKey{21, position}]; ok { | |
1997 | return memoizedResult(memoized) | |
1998 | } | |
1999 | position169, tokenIndex169 := position, tokenIndex | |
2000 | { | |
2001 | position170 := position | |
2002 | { | |
2003 | position171, tokenIndex171 := position, tokenIndex | |
2004 | if !_rules[ruleEscape]() { | |
2005 | goto l172 | |
2006 | } | |
2007 | goto l171 | |
2008 | l172: | |
2009 | position, tokenIndex = position171, tokenIndex171 | |
2010 | { | |
2011 | position174 := position | |
2012 | { | |
2013 | position175, tokenIndex175 := position, tokenIndex | |
2014 | if c := buffer[position]; c < rune('a') || c > rune('z') { | |
2015 | goto l176 | |
2016 | } | |
2017 | position++ | |
2018 | goto l175 | |
2019 | l176: | |
2020 | position, tokenIndex = position175, tokenIndex175 | |
2021 | if c := buffer[position]; c < rune('A') || c > rune('Z') { | |
2022 | goto l173 | |
2023 | } | |
2024 | position++ | |
2025 | } | |
2026 | l175: | |
2027 | add(rulePegText, position174) | |
2028 | } | |
2029 | { | |
2030 | add(ruleAction30, position) | |
2031 | } | |
2032 | goto l171 | |
2033 | l173: | |
2034 | position, tokenIndex = position171, tokenIndex171 | |
2035 | { | |
2036 | position178, tokenIndex178 := position, tokenIndex | |
2037 | if buffer[position] != rune('\\') { | |
2038 | goto l178 | |
2039 | } | |
2040 | position++ | |
2041 | goto l169 | |
2042 | l178: | |
2043 | position, tokenIndex = position178, tokenIndex178 | |
2044 | } | |
2045 | { | |
2046 | position179 := position | |
2047 | if !matchDot() { | |
2048 | goto l169 | |
2049 | } | |
2050 | add(rulePegText, position179) | |
2051 | } | |
2052 | { | |
2053 | add(ruleAction31, position) | |
2054 | } | |
2055 | } | |
2056 | l171: | |
2057 | add(ruleDoubleChar, position170) | |
2058 | } | |
2059 | memoize(21, position169, tokenIndex169, true) | |
2060 | return true | |
2061 | l169: | |
2062 | memoize(21, position169, tokenIndex169, false) | |
2063 | position, tokenIndex = position169, tokenIndex169 | |
2064 | return false | |
2065 | }, | |
2066 | /* 22 Escape <- <(('\\' ('a' / 'A') Action32) / ('\\' ('b' / 'B') Action33) / ('\\' ('e' / 'E') Action34) / ('\\' ('f' / 'F') Action35) / ('\\' ('n' / 'N') Action36) / ('\\' ('r' / 'R') Action37) / ('\\' ('t' / 'T') Action38) / ('\\' ('v' / 'V') Action39) / ('\\' '\'' Action40) / ('\\' '"' Action41) / ('\\' '[' Action42) / ('\\' ']' Action43) / ('\\' '-' Action44) / ('\\' ('0' ('x' / 'X')) <((&('A' | 'B' | 'C' | 'D' | 'E' | 'F') [A-F]) | (&('a' | 'b' | 'c' | 'd' | 'e' | 'f') [a-f]) | (&('0' | '1' | '2' | '3' | '4' | '5' | '6' | '7' | '8' | '9') [0-9]))+> Action45) / ('\\' <([0-3] [0-7] [0-7])> Action46) / ('\\' <([0-7] [0-7]?)> Action47) / ('\\' '\\' Action48))> */ | |
2067 | func() bool { | |
2068 | if memoized, ok := memoization[memoKey{22, position}]; ok { | |
2069 | return memoizedResult(memoized) | |
2070 | } | |
2071 | position181, tokenIndex181 := position, tokenIndex | |
2072 | { | |
2073 | position182 := position | |
2074 | { | |
2075 | position183, tokenIndex183 := position, tokenIndex | |
2076 | if buffer[position] != rune('\\') { | |
2077 | goto l184 | |
2078 | } | |
2079 | position++ | |
2080 | { | |
2081 | position185, tokenIndex185 := position, tokenIndex | |
2082 | if buffer[position] != rune('a') { | |
2083 | goto l186 | |
2084 | } | |
2085 | position++ | |
2086 | goto l185 | |
2087 | l186: | |
2088 | position, tokenIndex = position185, tokenIndex185 | |
2089 | if buffer[position] != rune('A') { | |
2090 | goto l184 | |
2091 | } | |
2092 | position++ | |
2093 | } | |
2094 | l185: | |
2095 | { | |
2096 | add(ruleAction32, position) | |
2097 | } | |
2098 | goto l183 | |
2099 | l184: | |
2100 | position, tokenIndex = position183, tokenIndex183 | |
2101 | if buffer[position] != rune('\\') { | |
2102 | goto l188 | |
2103 | } | |
2104 | position++ | |
2105 | { | |
2106 | position189, tokenIndex189 := position, tokenIndex | |
2107 | if buffer[position] != rune('b') { | |
2108 | goto l190 | |
2109 | } | |
2110 | position++ | |
2111 | goto l189 | |
2112 | l190: | |
2113 | position, tokenIndex = position189, tokenIndex189 | |
2114 | if buffer[position] != rune('B') { | |
2115 | goto l188 | |
2116 | } | |
2117 | position++ | |
2118 | } | |
2119 | l189: | |
2120 | { | |
2121 | add(ruleAction33, position) | |
2122 | } | |
2123 | goto l183 | |
2124 | l188: | |
2125 | position, tokenIndex = position183, tokenIndex183 | |
2126 | if buffer[position] != rune('\\') { | |
2127 | goto l192 | |
2128 | } | |
2129 | position++ | |
2130 | { | |
2131 | position193, tokenIndex193 := position, tokenIndex | |
2132 | if buffer[position] != rune('e') { | |
2133 | goto l194 | |
2134 | } | |
2135 | position++ | |
2136 | goto l193 | |
2137 | l194: | |
2138 | position, tokenIndex = position193, tokenIndex193 | |
2139 | if buffer[position] != rune('E') { | |
2140 | goto l192 | |
2141 | } | |
2142 | position++ | |
2143 | } | |
2144 | l193: | |
2145 | { | |
2146 | add(ruleAction34, position) | |
2147 | } | |
2148 | goto l183 | |
2149 | l192: | |
2150 | position, tokenIndex = position183, tokenIndex183 | |
2151 | if buffer[position] != rune('\\') { | |
2152 | goto l196 | |
2153 | } | |
2154 | position++ | |
2155 | { | |
2156 | position197, tokenIndex197 := position, tokenIndex | |
2157 | if buffer[position] != rune('f') { | |
2158 | goto l198 | |
2159 | } | |
2160 | position++ | |
2161 | goto l197 | |
2162 | l198: | |
2163 | position, tokenIndex = position197, tokenIndex197 | |
2164 | if buffer[position] != rune('F') { | |
2165 | goto l196 | |
2166 | } | |
2167 | position++ | |
2168 | } | |
2169 | l197: | |
2170 | { | |
2171 | add(ruleAction35, position) | |
2172 | } | |
2173 | goto l183 | |
2174 | l196: | |
2175 | position, tokenIndex = position183, tokenIndex183 | |
2176 | if buffer[position] != rune('\\') { | |
2177 | goto l200 | |
2178 | } | |
2179 | position++ | |
2180 | { | |
2181 | position201, tokenIndex201 := position, tokenIndex | |
2182 | if buffer[position] != rune('n') { | |
2183 | goto l202 | |
2184 | } | |
2185 | position++ | |
2186 | goto l201 | |
2187 | l202: | |
2188 | position, tokenIndex = position201, tokenIndex201 | |
2189 | if buffer[position] != rune('N') { | |
2190 | goto l200 | |
2191 | } | |
2192 | position++ | |
2193 | } | |
2194 | l201: | |
2195 | { | |
2196 | add(ruleAction36, position) | |
2197 | } | |
2198 | goto l183 | |
2199 | l200: | |
2200 | position, tokenIndex = position183, tokenIndex183 | |
2201 | if buffer[position] != rune('\\') { | |
2202 | goto l204 | |
2203 | } | |
2204 | position++ | |
2205 | { | |
2206 | position205, tokenIndex205 := position, tokenIndex | |
2207 | if buffer[position] != rune('r') { | |
2208 | goto l206 | |
2209 | } | |
2210 | position++ | |
2211 | goto l205 | |
2212 | l206: | |
2213 | position, tokenIndex = position205, tokenIndex205 | |
2214 | if buffer[position] != rune('R') { | |
2215 | goto l204 | |
2216 | } | |
2217 | position++ | |
2218 | } | |
2219 | l205: | |
2220 | { | |
2221 | add(ruleAction37, position) | |
2222 | } | |
2223 | goto l183 | |
2224 | l204: | |
2225 | position, tokenIndex = position183, tokenIndex183 | |
2226 | if buffer[position] != rune('\\') { | |
2227 | goto l208 | |
2228 | } | |
2229 | position++ | |
2230 | { | |
2231 | position209, tokenIndex209 := position, tokenIndex | |
2232 | if buffer[position] != rune('t') { | |
2233 | goto l210 | |
2234 | } | |
2235 | position++ | |
2236 | goto l209 | |
2237 | l210: | |
2238 | position, tokenIndex = position209, tokenIndex209 | |
2239 | if buffer[position] != rune('T') { | |
2240 | goto l208 | |
2241 | } | |
2242 | position++ | |
2243 | } | |
2244 | l209: | |
2245 | { | |
2246 | add(ruleAction38, position) | |
2247 | } | |
2248 | goto l183 | |
2249 | l208: | |
2250 | position, tokenIndex = position183, tokenIndex183 | |
2251 | if buffer[position] != rune('\\') { | |
2252 | goto l212 | |
2253 | } | |
2254 | position++ | |
2255 | { | |
2256 | position213, tokenIndex213 := position, tokenIndex | |
2257 | if buffer[position] != rune('v') { | |
2258 | goto l214 | |
2259 | } | |
2260 | position++ | |
2261 | goto l213 | |
2262 | l214: | |
2263 | position, tokenIndex = position213, tokenIndex213 | |
2264 | if buffer[position] != rune('V') { | |
2265 | goto l212 | |
2266 | } | |
2267 | position++ | |
2268 | } | |
2269 | l213: | |
2270 | { | |
2271 | add(ruleAction39, position) | |
2272 | } | |
2273 | goto l183 | |
2274 | l212: | |
2275 | position, tokenIndex = position183, tokenIndex183 | |
2276 | if buffer[position] != rune('\\') { | |
2277 | goto l216 | |
2278 | } | |
2279 | position++ | |
2280 | if buffer[position] != rune('\'') { | |
2281 | goto l216 | |
2282 | } | |
2283 | position++ | |
2284 | { | |
2285 | add(ruleAction40, position) | |
2286 | } | |
2287 | goto l183 | |
2288 | l216: | |
2289 | position, tokenIndex = position183, tokenIndex183 | |
2290 | if buffer[position] != rune('\\') { | |
2291 | goto l218 | |
2292 | } | |
2293 | position++ | |
2294 | if buffer[position] != rune('"') { | |
2295 | goto l218 | |
2296 | } | |
2297 | position++ | |
2298 | { | |
2299 | add(ruleAction41, position) | |
2300 | } | |
2301 | goto l183 | |
2302 | l218: | |
2303 | position, tokenIndex = position183, tokenIndex183 | |
2304 | if buffer[position] != rune('\\') { | |
2305 | goto l220 | |
2306 | } | |
2307 | position++ | |
2308 | if buffer[position] != rune('[') { | |
2309 | goto l220 | |
2310 | } | |
2311 | position++ | |
2312 | { | |
2313 | add(ruleAction42, position) | |
2314 | } | |
2315 | goto l183 | |
2316 | l220: | |
2317 | position, tokenIndex = position183, tokenIndex183 | |
2318 | if buffer[position] != rune('\\') { | |
2319 | goto l222 | |
2320 | } | |
2321 | position++ | |
2322 | if buffer[position] != rune(']') { | |
2323 | goto l222 | |
2324 | } | |
2325 | position++ | |
2326 | { | |
2327 | add(ruleAction43, position) | |
2328 | } | |
2329 | goto l183 | |
2330 | l222: | |
2331 | position, tokenIndex = position183, tokenIndex183 | |
2332 | if buffer[position] != rune('\\') { | |
2333 | goto l224 | |
2334 | } | |
2335 | position++ | |
2336 | if buffer[position] != rune('-') { | |
2337 | goto l224 | |
2338 | } | |
2339 | position++ | |
2340 | { | |
2341 | add(ruleAction44, position) | |
2342 | } | |
2343 | goto l183 | |
2344 | l224: | |
2345 | position, tokenIndex = position183, tokenIndex183 | |
2346 | if buffer[position] != rune('\\') { | |
2347 | goto l226 | |
2348 | } | |
2349 | position++ | |
2350 | if buffer[position] != rune('0') { | |
2351 | goto l226 | |
2352 | } | |
2353 | position++ | |
2354 | { | |
2355 | position227, tokenIndex227 := position, tokenIndex | |
2356 | if buffer[position] != rune('x') { | |
2357 | goto l228 | |
2358 | } | |
2359 | position++ | |
2360 | goto l227 | |
2361 | l228: | |
2362 | position, tokenIndex = position227, tokenIndex227 | |
2363 | if buffer[position] != rune('X') { | |
2364 | goto l226 | |
2365 | } | |
2366 | position++ | |
2367 | } | |
2368 | l227: | |
2369 | { | |
2370 | position229 := position | |
2371 | { | |
2372 | switch buffer[position] { | |
2373 | case 'A', 'B', 'C', 'D', 'E', 'F': | |
2374 | if c := buffer[position]; c < rune('A') || c > rune('F') { | |
2375 | goto l226 | |
2376 | } | |
2377 | position++ | |
2378 | case 'a', 'b', 'c', 'd', 'e', 'f': | |
2379 | if c := buffer[position]; c < rune('a') || c > rune('f') { | |
2380 | goto l226 | |
2381 | } | |
2382 | position++ | |
2383 | default: | |
2384 | if c := buffer[position]; c < rune('0') || c > rune('9') { | |
2385 | goto l226 | |
2386 | } | |
2387 | position++ | |
2388 | } | |
2389 | } | |
2390 | ||
2391 | l230: | |
2392 | { | |
2393 | position231, tokenIndex231 := position, tokenIndex | |
2394 | { | |
2395 | switch buffer[position] { | |
2396 | case 'A', 'B', 'C', 'D', 'E', 'F': | |
2397 | if c := buffer[position]; c < rune('A') || c > rune('F') { | |
2398 | goto l231 | |
2399 | } | |
2400 | position++ | |
2401 | case 'a', 'b', 'c', 'd', 'e', 'f': | |
2402 | if c := buffer[position]; c < rune('a') || c > rune('f') { | |
2403 | goto l231 | |
2404 | } | |
2405 | position++ | |
2406 | default: | |
2407 | if c := buffer[position]; c < rune('0') || c > rune('9') { | |
2408 | goto l231 | |
2409 | } | |
2410 | position++ | |
2411 | } | |
2412 | } | |
2413 | ||
2414 | goto l230 | |
2415 | l231: | |
2416 | position, tokenIndex = position231, tokenIndex231 | |
2417 | } | |
2418 | add(rulePegText, position229) | |
2419 | } | |
2420 | { | |
2421 | add(ruleAction45, position) | |
2422 | } | |
2423 | goto l183 | |
2424 | l226: | |
2425 | position, tokenIndex = position183, tokenIndex183 | |
2426 | if buffer[position] != rune('\\') { | |
2427 | goto l235 | |
2428 | } | |
2429 | position++ | |
2430 | { | |
2431 | position236 := position | |
2432 | if c := buffer[position]; c < rune('0') || c > rune('3') { | |
2433 | goto l235 | |
2434 | } | |
2435 | position++ | |
2436 | if c := buffer[position]; c < rune('0') || c > rune('7') { | |
2437 | goto l235 | |
2438 | } | |
2439 | position++ | |
2440 | if c := buffer[position]; c < rune('0') || c > rune('7') { | |
2441 | goto l235 | |
2442 | } | |
2443 | position++ | |
2444 | add(rulePegText, position236) | |
2445 | } | |
2446 | { | |
2447 | add(ruleAction46, position) | |
2448 | } | |
2449 | goto l183 | |
2450 | l235: | |
2451 | position, tokenIndex = position183, tokenIndex183 | |
2452 | if buffer[position] != rune('\\') { | |
2453 | goto l238 | |
2454 | } | |
2455 | position++ | |
2456 | { | |
2457 | position239 := position | |
2458 | if c := buffer[position]; c < rune('0') || c > rune('7') { | |
2459 | goto l238 | |
2460 | } | |
2461 | position++ | |
2462 | { | |
2463 | position240, tokenIndex240 := position, tokenIndex | |
2464 | if c := buffer[position]; c < rune('0') || c > rune('7') { | |
2465 | goto l240 | |
2466 | } | |
2467 | position++ | |
2468 | goto l241 | |
2469 | l240: | |
2470 | position, tokenIndex = position240, tokenIndex240 | |
2471 | } | |
2472 | l241: | |
2473 | add(rulePegText, position239) | |
2474 | } | |
2475 | { | |
2476 | add(ruleAction47, position) | |
2477 | } | |
2478 | goto l183 | |
2479 | l238: | |
2480 | position, tokenIndex = position183, tokenIndex183 | |
2481 | if buffer[position] != rune('\\') { | |
2482 | goto l181 | |
2483 | } | |
2484 | position++ | |
2485 | if buffer[position] != rune('\\') { | |
2486 | goto l181 | |
2487 | } | |
2488 | position++ | |
2489 | { | |
2490 | add(ruleAction48, position) | |
2491 | } | |
2492 | } | |
2493 | l183: | |
2494 | add(ruleEscape, position182) | |
2495 | } | |
2496 | memoize(22, position181, tokenIndex181, true) | |
2497 | return true | |
2498 | l181: | |
2499 | memoize(22, position181, tokenIndex181, false) | |
2500 | position, tokenIndex = position181, tokenIndex181 | |
2501 | return false | |
2502 | }, | |
2503 | /* 23 LeftArrow <- <((('<' '-') / '←') Spacing)> */ | |
2504 | func() bool { | |
2505 | if memoized, ok := memoization[memoKey{23, position}]; ok { | |
2506 | return memoizedResult(memoized) | |
2507 | } | |
2508 | position244, tokenIndex244 := position, tokenIndex | |
2509 | { | |
2510 | position245 := position | |
2511 | { | |
2512 | position246, tokenIndex246 := position, tokenIndex | |
2513 | if buffer[position] != rune('<') { | |
2514 | goto l247 | |
2515 | } | |
2516 | position++ | |
2517 | if buffer[position] != rune('-') { | |
2518 | goto l247 | |
2519 | } | |
2520 | position++ | |
2521 | goto l246 | |
2522 | l247: | |
2523 | position, tokenIndex = position246, tokenIndex246 | |
2524 | if buffer[position] != rune('←') { | |
2525 | goto l244 | |
2526 | } | |
2527 | position++ | |
2528 | } | |
2529 | l246: | |
2530 | if !_rules[ruleSpacing]() { | |
2531 | goto l244 | |
2532 | } | |
2533 | add(ruleLeftArrow, position245) | |
2534 | } | |
2535 | memoize(23, position244, tokenIndex244, true) | |
2536 | return true | |
2537 | l244: | |
2538 | memoize(23, position244, tokenIndex244, false) | |
2539 | position, tokenIndex = position244, tokenIndex244 | |
2540 | return false | |
2541 | }, | |
2542 | /* 24 Slash <- <('/' Spacing)> */ | |
2543 | func() bool { | |
2544 | if memoized, ok := memoization[memoKey{24, position}]; ok { | |
2545 | return memoizedResult(memoized) | |
2546 | } | |
2547 | position248, tokenIndex248 := position, tokenIndex | |
2548 | { | |
2549 | position249 := position | |
2550 | if buffer[position] != rune('/') { | |
2551 | goto l248 | |
2552 | } | |
2553 | position++ | |
2554 | if !_rules[ruleSpacing]() { | |
2555 | goto l248 | |
2556 | } | |
2557 | add(ruleSlash, position249) | |
2558 | } | |
2559 | memoize(24, position248, tokenIndex248, true) | |
2560 | return true | |
2561 | l248: | |
2562 | memoize(24, position248, tokenIndex248, false) | |
2563 | position, tokenIndex = position248, tokenIndex248 | |
2564 | return false | |
2565 | }, | |
2566 | /* 25 And <- <('&' Spacing)> */ | |
2567 | func() bool { | |
2568 | if memoized, ok := memoization[memoKey{25, position}]; ok { | |
2569 | return memoizedResult(memoized) | |
2570 | } | |
2571 | position250, tokenIndex250 := position, tokenIndex | |
2572 | { | |
2573 | position251 := position | |
2574 | if buffer[position] != rune('&') { | |
2575 | goto l250 | |
2576 | } | |
2577 | position++ | |
2578 | if !_rules[ruleSpacing]() { | |
2579 | goto l250 | |
2580 | } | |
2581 | add(ruleAnd, position251) | |
2582 | } | |
2583 | memoize(25, position250, tokenIndex250, true) | |
2584 | return true | |
2585 | l250: | |
2586 | memoize(25, position250, tokenIndex250, false) | |
2587 | position, tokenIndex = position250, tokenIndex250 | |
2588 | return false | |
2589 | }, | |
2590 | /* 26 Not <- <('!' Spacing)> */ | |
2591 | func() bool { | |
2592 | if memoized, ok := memoization[memoKey{26, position}]; ok { | |
2593 | return memoizedResult(memoized) | |
2594 | } | |
2595 | position252, tokenIndex252 := position, tokenIndex | |
2596 | { | |
2597 | position253 := position | |
2598 | if buffer[position] != rune('!') { | |
2599 | goto l252 | |
2600 | } | |
2601 | position++ | |
2602 | if !_rules[ruleSpacing]() { | |
2603 | goto l252 | |
2604 | } | |
2605 | add(ruleNot, position253) | |
2606 | } | |
2607 | memoize(26, position252, tokenIndex252, true) | |
2608 | return true | |
2609 | l252: | |
2610 | memoize(26, position252, tokenIndex252, false) | |
2611 | position, tokenIndex = position252, tokenIndex252 | |
2612 | return false | |
2613 | }, | |
2614 | /* 27 Question <- <('?' Spacing)> */ | |
2615 | nil, | |
2616 | /* 28 Star <- <('*' Spacing)> */ | |
2617 | nil, | |
2618 | /* 29 Plus <- <('+' Spacing)> */ | |
2619 | nil, | |
2620 | /* 30 Open <- <('(' Spacing)> */ | |
2621 | nil, | |
2622 | /* 31 Close <- <(')' Spacing)> */ | |
2623 | nil, | |
2624 | /* 32 Dot <- <('.' Spacing)> */ | |
2625 | nil, | |
2626 | /* 33 SpaceComment <- <(Space / Comment)> */ | |
2627 | func() bool { | |
2628 | if memoized, ok := memoization[memoKey{33, position}]; ok { | |
2629 | return memoizedResult(memoized) | |
2630 | } | |
2631 | position260, tokenIndex260 := position, tokenIndex | |
2632 | { | |
2633 | position261 := position | |
2634 | { | |
2635 | position262, tokenIndex262 := position, tokenIndex | |
2636 | { | |
2637 | position264 := position | |
2638 | { | |
2639 | switch buffer[position] { | |
2640 | case '\t': | |
2641 | if buffer[position] != rune('\t') { | |
2642 | goto l263 | |
2643 | } | |
2644 | position++ | |
2645 | case ' ': | |
2646 | if buffer[position] != rune(' ') { | |
2647 | goto l263 | |
2648 | } | |
2649 | position++ | |
2650 | default: | |
2651 | if !_rules[ruleEndOfLine]() { | |
2652 | goto l263 | |
2653 | } | |
2654 | } | |
2655 | } | |
2656 | ||
2657 | add(ruleSpace, position264) | |
2658 | } | |
2659 | goto l262 | |
2660 | l263: | |
2661 | position, tokenIndex = position262, tokenIndex262 | |
2662 | { | |
2663 | position266 := position | |
2664 | { | |
2665 | position267, tokenIndex267 := position, tokenIndex | |
2666 | if buffer[position] != rune('#') { | |
2667 | goto l268 | |
2668 | } | |
2669 | position++ | |
2670 | goto l267 | |
2671 | l268: | |
2672 | position, tokenIndex = position267, tokenIndex267 | |
2673 | if buffer[position] != rune('/') { | |
2674 | goto l260 | |
2675 | } | |
2676 | position++ | |
2677 | if buffer[position] != rune('/') { | |
2678 | goto l260 | |
2679 | } | |
2680 | position++ | |
2681 | } | |
2682 | l267: | |
2683 | l269: | |
2684 | { | |
2685 | position270, tokenIndex270 := position, tokenIndex | |
2686 | { | |
2687 | position271, tokenIndex271 := position, tokenIndex | |
2688 | if !_rules[ruleEndOfLine]() { | |
2689 | goto l271 | |
2690 | } | |
2691 | goto l270 | |
2692 | l271: | |
2693 | position, tokenIndex = position271, tokenIndex271 | |
2694 | } | |
2695 | if !matchDot() { | |
2696 | goto l270 | |
2697 | } | |
2698 | goto l269 | |
2699 | l270: | |
2700 | position, tokenIndex = position270, tokenIndex270 | |
2701 | } | |
2702 | if !_rules[ruleEndOfLine]() { | |
2703 | goto l260 | |
2704 | } | |
2705 | add(ruleComment, position266) | |
2706 | } | |
2707 | } | |
2708 | l262: | |
2709 | add(ruleSpaceComment, position261) | |
2710 | } | |
2711 | memoize(33, position260, tokenIndex260, true) | |
2712 | return true | |
2713 | l260: | |
2714 | memoize(33, position260, tokenIndex260, false) | |
2715 | position, tokenIndex = position260, tokenIndex260 | |
2716 | return false | |
2717 | }, | |
2718 | /* 34 Spacing <- <SpaceComment*> */ | |
2719 | func() bool { | |
2720 | if memoized, ok := memoization[memoKey{34, position}]; ok { | |
2721 | return memoizedResult(memoized) | |
2722 | } | |
2723 | position272, tokenIndex272 := position, tokenIndex | |
2724 | { | |
2725 | position273 := position | |
2726 | l274: | |
2727 | { | |
2728 | position275, tokenIndex275 := position, tokenIndex | |
2729 | if !_rules[ruleSpaceComment]() { | |
2730 | goto l275 | |
2731 | } | |
2732 | goto l274 | |
2733 | l275: | |
2734 | position, tokenIndex = position275, tokenIndex275 | |
2735 | } | |
2736 | add(ruleSpacing, position273) | |
2737 | } | |
2738 | memoize(34, position272, tokenIndex272, true) | |
2739 | return true | |
2740 | }, | |
2741 | /* 35 MustSpacing <- <SpaceComment+> */ | |
2742 | func() bool { | |
2743 | if memoized, ok := memoization[memoKey{35, position}]; ok { | |
2744 | return memoizedResult(memoized) | |
2745 | } | |
2746 | position276, tokenIndex276 := position, tokenIndex | |
2747 | { | |
2748 | position277 := position | |
2749 | if !_rules[ruleSpaceComment]() { | |
2750 | goto l276 | |
2751 | } | |
2752 | l278: | |
2753 | { | |
2754 | position279, tokenIndex279 := position, tokenIndex | |
2755 | if !_rules[ruleSpaceComment]() { | |
2756 | goto l279 | |
2757 | } | |
2758 | goto l278 | |
2759 | l279: | |
2760 | position, tokenIndex = position279, tokenIndex279 | |
2761 | } | |
2762 | add(ruleMustSpacing, position277) | |
2763 | } | |
2764 | memoize(35, position276, tokenIndex276, true) | |
2765 | return true | |
2766 | l276: | |
2767 | memoize(35, position276, tokenIndex276, false) | |
2768 | position, tokenIndex = position276, tokenIndex276 | |
2769 | return false | |
2770 | }, | |
2771 | /* 36 Comment <- <(('#' / ('/' '/')) (!EndOfLine .)* EndOfLine)> */ | |
2772 | nil, | |
2773 | /* 37 Space <- <((&('\t') '\t') | (&(' ') ' ') | (&('\n' | '\r') EndOfLine))> */ | |
2774 | nil, | |
2775 | /* 38 EndOfLine <- <(('\r' '\n') / '\n' / '\r')> */ | |
2776 | func() bool { | |
2777 | if memoized, ok := memoization[memoKey{38, position}]; ok { | |
2778 | return memoizedResult(memoized) | |
2779 | } | |
2780 | position282, tokenIndex282 := position, tokenIndex | |
2781 | { | |
2782 | position283 := position | |
2783 | { | |
2784 | position284, tokenIndex284 := position, tokenIndex | |
2785 | if buffer[position] != rune('\r') { | |
2786 | goto l285 | |
2787 | } | |
2788 | position++ | |
2789 | if buffer[position] != rune('\n') { | |
2790 | goto l285 | |
2791 | } | |
2792 | position++ | |
2793 | goto l284 | |
2794 | l285: | |
2795 | position, tokenIndex = position284, tokenIndex284 | |
2796 | if buffer[position] != rune('\n') { | |
2797 | goto l286 | |
2798 | } | |
2799 | position++ | |
2800 | goto l284 | |
2801 | l286: | |
2802 | position, tokenIndex = position284, tokenIndex284 | |
2803 | if buffer[position] != rune('\r') { | |
2804 | goto l282 | |
2805 | } | |
2806 | position++ | |
2807 | } | |
2808 | l284: | |
2809 | add(ruleEndOfLine, position283) | |
2810 | } | |
2811 | memoize(38, position282, tokenIndex282, true) | |
2812 | return true | |
2813 | l282: | |
2814 | memoize(38, position282, tokenIndex282, false) | |
2815 | position, tokenIndex = position282, tokenIndex282 | |
2816 | return false | |
2817 | }, | |
2818 | /* 39 EndOfFile <- <!.> */ | |
2819 | nil, | |
2820 | /* 40 Action <- <('{' <ActionBody*> '}' Spacing)> */ | |
2821 | func() bool { | |
2822 | if memoized, ok := memoization[memoKey{40, position}]; ok { | |
2823 | return memoizedResult(memoized) | |
2824 | } | |
2825 | position288, tokenIndex288 := position, tokenIndex | |
2826 | { | |
2827 | position289 := position | |
2828 | if buffer[position] != rune('{') { | |
2829 | goto l288 | |
2830 | } | |
2831 | position++ | |
2832 | { | |
2833 | position290 := position | |
2834 | l291: | |
2835 | { | |
2836 | position292, tokenIndex292 := position, tokenIndex | |
2837 | if !_rules[ruleActionBody]() { | |
2838 | goto l292 | |
2839 | } | |
2840 | goto l291 | |
2841 | l292: | |
2842 | position, tokenIndex = position292, tokenIndex292 | |
2843 | } | |
2844 | add(rulePegText, position290) | |
2845 | } | |
2846 | if buffer[position] != rune('}') { | |
2847 | goto l288 | |
2848 | } | |
2849 | position++ | |
2850 | if !_rules[ruleSpacing]() { | |
2851 | goto l288 | |
2852 | } | |
2853 | add(ruleAction, position289) | |
2854 | } | |
2855 | memoize(40, position288, tokenIndex288, true) | |
2856 | return true | |
2857 | l288: | |
2858 | memoize(40, position288, tokenIndex288, false) | |
2859 | position, tokenIndex = position288, tokenIndex288 | |
2860 | return false | |
2861 | }, | |
2862 | /* 41 ActionBody <- <((!('{' / '}') .) / ('{' ActionBody* '}'))> */ | |
2863 | func() bool { | |
2864 | if memoized, ok := memoization[memoKey{41, position}]; ok { | |
2865 | return memoizedResult(memoized) | |
2866 | } | |
2867 | position293, tokenIndex293 := position, tokenIndex | |
2868 | { | |
2869 | position294 := position | |
2870 | { | |
2871 | position295, tokenIndex295 := position, tokenIndex | |
2872 | { | |
2873 | position297, tokenIndex297 := position, tokenIndex | |
2874 | { | |
2875 | position298, tokenIndex298 := position, tokenIndex | |
2876 | if buffer[position] != rune('{') { | |
2877 | goto l299 | |
2878 | } | |
2879 | position++ | |
2880 | goto l298 | |
2881 | l299: | |
2882 | position, tokenIndex = position298, tokenIndex298 | |
2883 | if buffer[position] != rune('}') { | |
2884 | goto l297 | |
2885 | } | |
2886 | position++ | |
2887 | } | |
2888 | l298: | |
2889 | goto l296 | |
2890 | l297: | |
2891 | position, tokenIndex = position297, tokenIndex297 | |
2892 | } | |
2893 | if !matchDot() { | |
2894 | goto l296 | |
2895 | } | |
2896 | goto l295 | |
2897 | l296: | |
2898 | position, tokenIndex = position295, tokenIndex295 | |
2899 | if buffer[position] != rune('{') { | |
2900 | goto l293 | |
2901 | } | |
2902 | position++ | |
2903 | l300: | |
2904 | { | |
2905 | position301, tokenIndex301 := position, tokenIndex | |
2906 | if !_rules[ruleActionBody]() { | |
2907 | goto l301 | |
2908 | } | |
2909 | goto l300 | |
2910 | l301: | |
2911 | position, tokenIndex = position301, tokenIndex301 | |
2912 | } | |
2913 | if buffer[position] != rune('}') { | |
2914 | goto l293 | |
2915 | } | |
2916 | position++ | |
2917 | } | |
2918 | l295: | |
2919 | add(ruleActionBody, position294) | |
2920 | } | |
2921 | memoize(41, position293, tokenIndex293, true) | |
2922 | return true | |
2923 | l293: | |
2924 | memoize(41, position293, tokenIndex293, false) | |
2925 | position, tokenIndex = position293, tokenIndex293 | |
2926 | return false | |
2927 | }, | |
2928 | /* 42 Begin <- <('<' Spacing)> */ | |
2929 | nil, | |
2930 | /* 43 End <- <('>' Spacing)> */ | |
2931 | nil, | |
2932 | /* 45 Action0 <- <{ p.AddPackage(text) }> */ | |
2933 | nil, | |
2934 | /* 46 Action1 <- <{ p.AddPeg(text) }> */ | |
2935 | nil, | |
2936 | /* 47 Action2 <- <{ p.AddState(text) }> */ | |
2937 | nil, | |
2938 | nil, | |
2939 | /* 49 Action3 <- <{ p.AddImport(text) }> */ | |
2940 | nil, | |
2941 | /* 50 Action4 <- <{ p.AddRule(text) }> */ | |
2942 | nil, | |
2943 | /* 51 Action5 <- <{ p.AddExpression() }> */ | |
2944 | nil, | |
2945 | /* 52 Action6 <- <{ p.AddAlternate() }> */ | |
2946 | nil, | |
2947 | /* 53 Action7 <- <{ p.AddNil(); p.AddAlternate() }> */ | |
2948 | nil, | |
2949 | /* 54 Action8 <- <{ p.AddNil() }> */ | |
2950 | nil, | |
2951 | /* 55 Action9 <- <{ p.AddSequence() }> */ | |
2952 | nil, | |
2953 | /* 56 Action10 <- <{ p.AddPredicate(text) }> */ | |
2954 | nil, | |
2955 | /* 57 Action11 <- <{ p.AddStateChange(text) }> */ | |
2956 | nil, | |
2957 | /* 58 Action12 <- <{ p.AddPeekFor() }> */ | |
2958 | nil, | |
2959 | /* 59 Action13 <- <{ p.AddPeekNot() }> */ | |
2960 | nil, | |
2961 | /* 60 Action14 <- <{ p.AddQuery() }> */ | |
2962 | nil, | |
2963 | /* 61 Action15 <- <{ p.AddStar() }> */ | |
2964 | nil, | |
2965 | /* 62 Action16 <- <{ p.AddPlus() }> */ | |
2966 | nil, | |
2967 | /* 63 Action17 <- <{ p.AddName(text) }> */ | |
2968 | nil, | |
2969 | /* 64 Action18 <- <{ p.AddDot() }> */ | |
2970 | nil, | |
2971 | /* 65 Action19 <- <{ p.AddAction(text) }> */ | |
2972 | nil, | |
2973 | /* 66 Action20 <- <{ p.AddPush() }> */ | |
2974 | nil, | |
2975 | /* 67 Action21 <- <{ p.AddSequence() }> */ | |
2976 | nil, | |
2977 | /* 68 Action22 <- <{ p.AddSequence() }> */ | |
2978 | nil, | |
2979 | /* 69 Action23 <- <{ p.AddPeekNot(); p.AddDot(); p.AddSequence() }> */ | |
2980 | nil, | |
2981 | /* 70 Action24 <- <{ p.AddPeekNot(); p.AddDot(); p.AddSequence() }> */ | |
2982 | nil, | |
2983 | /* 71 Action25 <- <{ p.AddAlternate() }> */ | |
2984 | nil, | |
2985 | /* 72 Action26 <- <{ p.AddAlternate() }> */ | |
2986 | nil, | |
2987 | /* 73 Action27 <- <{ p.AddRange() }> */ | |
2988 | nil, | |
2989 | /* 74 Action28 <- <{ p.AddDoubleRange() }> */ | |
2990 | nil, | |
2991 | /* 75 Action29 <- <{ p.AddCharacter(text) }> */ | |
2992 | nil, | |
2993 | /* 76 Action30 <- <{ p.AddDoubleCharacter(text) }> */ | |
2994 | nil, | |
2995 | /* 77 Action31 <- <{ p.AddCharacter(text) }> */ | |
2996 | nil, | |
2997 | /* 78 Action32 <- <{ p.AddCharacter("\a") }> */ | |
2998 | nil, | |
2999 | /* 79 Action33 <- <{ p.AddCharacter("\b") }> */ | |
3000 | nil, | |
3001 | /* 80 Action34 <- <{ p.AddCharacter("\x1B") }> */ | |
3002 | nil, | |
3003 | /* 81 Action35 <- <{ p.AddCharacter("\f") }> */ | |
3004 | nil, | |
3005 | /* 82 Action36 <- <{ p.AddCharacter("\n") }> */ | |
3006 | nil, | |
3007 | /* 83 Action37 <- <{ p.AddCharacter("\r") }> */ | |
3008 | nil, | |
3009 | /* 84 Action38 <- <{ p.AddCharacter("\t") }> */ | |
3010 | nil, | |
3011 | /* 85 Action39 <- <{ p.AddCharacter("\v") }> */ | |
3012 | nil, | |
3013 | /* 86 Action40 <- <{ p.AddCharacter("'") }> */ | |
3014 | nil, | |
3015 | /* 87 Action41 <- <{ p.AddCharacter("\"") }> */ | |
3016 | nil, | |
3017 | /* 88 Action42 <- <{ p.AddCharacter("[") }> */ | |
3018 | nil, | |
3019 | /* 89 Action43 <- <{ p.AddCharacter("]") }> */ | |
3020 | nil, | |
3021 | /* 90 Action44 <- <{ p.AddCharacter("-") }> */ | |
3022 | nil, | |
3023 | /* 91 Action45 <- <{ p.AddHexaCharacter(text) }> */ | |
3024 | nil, | |
3025 | /* 92 Action46 <- <{ p.AddOctalCharacter(text) }> */ | |
3026 | nil, | |
3027 | /* 93 Action47 <- <{ p.AddOctalCharacter(text) }> */ | |
3028 | nil, | |
3029 | /* 94 Action48 <- <{ p.AddCharacter("\\") }> */ | |
3030 | nil, | |
3031 | } | |
3032 | p.rules = _rules | |
3033 | return nil | |
3034 | } |
2 | 2 | import ( |
3 | 3 | "bytes" |
4 | 4 | "io/ioutil" |
5 | "os" | |
5 | 6 | "testing" |
7 | ||
8 | "github.com/pointlander/peg/tree" | |
6 | 9 | ) |
7 | 10 | |
8 | 11 | func TestCorrect(t *testing.T) { |
10 | 13 | type T Peg {} |
11 | 14 | Grammar <- !. |
12 | 15 | ` |
13 | p := &Peg{Tree: New(false, false), Buffer: buffer} | |
16 | p := &Peg{Tree: tree.New(false, false, false), Buffer: buffer} | |
14 | 17 | p.Init() |
15 | 18 | err := p.Parse() |
19 | if err != nil { | |
20 | t.Error(err) | |
21 | } | |
22 | ||
23 | p = &Peg{Tree: tree.New(false, false, false), Buffer: buffer} | |
24 | p.Init(Size(1<<15)) | |
25 | err = p.Parse() | |
16 | 26 | if err != nil { |
17 | 27 | t.Error(err) |
18 | 28 | } |
23 | 33 | type T Peg {} |
24 | 34 | Grammar <- !. |
25 | 35 | ` |
26 | p := &Peg{Tree: New(false, false), Buffer: buffer} | |
27 | p.Init() | |
36 | p := &Peg{Tree: tree.New(false, false, false), Buffer: buffer} | |
37 | p.Init(Size(1<<15)) | |
28 | 38 | err := p.Parse() |
29 | 39 | if err == nil { |
30 | 40 | t.Error("packagenospace was parsed without error") |
37 | 47 | typenospace Peg {} |
38 | 48 | Grammar <- !. |
39 | 49 | ` |
40 | p := &Peg{Tree: New(false, false), Buffer: buffer} | |
41 | p.Init() | |
50 | p := &Peg{Tree: tree.New(false, false, false), Buffer: buffer} | |
51 | p.Init(Size(1<<15)) | |
42 | 52 | err := p.Parse() |
43 | 53 | if err == nil { |
44 | 54 | t.Error("typenospace was parsed without error") |
51 | 61 | t.Error(err) |
52 | 62 | } |
53 | 63 | |
54 | p := &Peg{Tree: New(true, true), Buffer: string(buffer)} | |
55 | p.Init() | |
56 | if err := p.Parse(); err != nil { | |
64 | p := &Peg{Tree: tree.New(true, true, false), Buffer: string(buffer)} | |
65 | p.Init(Size(1<<15)) | |
66 | if err = p.Parse(); err != nil { | |
57 | 67 | t.Error(err) |
58 | 68 | } |
59 | 69 | |
60 | 70 | p.Execute() |
61 | 71 | |
62 | 72 | out := &bytes.Buffer{} |
63 | p.Compile("peg.peg.go", out) | |
64 | ||
65 | bootstrap, err := ioutil.ReadFile("bootstrap.peg.go") | |
73 | p.Compile("peg.peg.go", []string{"./peg", "-inline", "-switch", "peg.peg"}, out) | |
74 | ||
75 | bootstrap, err := ioutil.ReadFile("peg.peg.go") | |
66 | 76 | if err != nil { |
67 | 77 | t.Error(err) |
68 | 78 | } |
69 | 79 | |
70 | 80 | if len(out.Bytes()) != len(bootstrap) { |
71 | t.Error("code generated from peg.peg is not the same as bootstrap.peg.go") | |
81 | t.Error("code generated from peg.peg is not the same as .go") | |
72 | 82 | return |
73 | 83 | } |
74 | 84 | |
75 | 85 | for i, v := range out.Bytes() { |
76 | 86 | if v != bootstrap[i] { |
77 | t.Error("code generated from peg.peg is not the same as bootstrap.peg.go") | |
87 | t.Error("code generated from peg.peg is not the same as .go") | |
78 | 88 | return |
79 | 89 | } |
80 | 90 | } |
81 | 91 | } |
82 | 92 | |
93 | func TestStrict(t *testing.T) { | |
94 | tt := []string{ | |
95 | // rule used but not defined | |
96 | ` | |
97 | package main | |
98 | type test Peg {} | |
99 | Begin <- begin !. | |
100 | `, | |
101 | // rule defined but not used | |
102 | ` | |
103 | package main | |
104 | type test Peg {} | |
105 | Begin <- . | |
106 | unused <- 'unused' | |
107 | `, | |
108 | // left recursive rule | |
109 | `package main | |
110 | type test Peg {} | |
111 | Begin <- Begin 'x' | |
112 | `, | |
113 | } | |
114 | ||
115 | for i, buffer := range tt { | |
116 | p := &Peg{Tree: tree.New(false, false, false), Buffer: buffer} | |
117 | p.Init(Size(1<<15)) | |
118 | if err := p.Parse(); err != nil { | |
119 | t.Fatal(err) | |
120 | } | |
121 | p.Execute() | |
122 | ||
123 | f, err := ioutil.TempFile("", "peg") | |
124 | if err != nil { | |
125 | t.Fatal(err) | |
126 | } | |
127 | defer func() { | |
128 | os.Remove(f.Name()) | |
129 | f.Close() | |
130 | }() | |
131 | out := &bytes.Buffer{} | |
132 | p.Strict = true | |
133 | if err = p.Compile(f.Name(), []string{"peg"}, out); err == nil { | |
134 | t.Fatalf("#%d: expected warning error", i) | |
135 | } | |
136 | p.Strict = false | |
137 | if err = p.Compile(f.Name(), []string{"peg"}, out); err != nil { | |
138 | t.Fatalf("#%d: unexpected error (%v)", i, err) | |
139 | } | |
140 | } | |
141 | } | |
142 | ||
143 | var files = [...]string{ | |
144 | "peg.peg", | |
145 | "grammars/c/c.peg", | |
146 | "grammars/calculator/calculator.peg", | |
147 | "grammars/fexl/fexl.peg", | |
148 | "grammars/java/java_1_7.peg", | |
149 | } | |
150 | ||
151 | func BenchmarkInitOnly(b *testing.B) { | |
152 | pegs := []string{} | |
153 | for _, file := range files { | |
154 | input, err := ioutil.ReadFile(file) | |
155 | if err != nil { | |
156 | b.Error(err) | |
157 | } | |
158 | pegs = append(pegs, string(input)) | |
159 | } | |
160 | ||
161 | b.ResetTimer() | |
162 | for i := 0; i < b.N; i++ { | |
163 | for _, peg := range pegs { | |
164 | p := &Peg{Tree: tree.New(true, true, false), Buffer: peg} | |
165 | p.Init(Size(1<<15)) | |
166 | } | |
167 | } | |
168 | } | |
169 | ||
83 | 170 | func BenchmarkParse(b *testing.B) { |
84 | files := [...]string{ | |
85 | "peg.peg", | |
86 | "grammars/c/c.peg", | |
87 | "grammars/calculator/calculator.peg", | |
88 | "grammars/fexl/fexl.peg", | |
89 | "grammars/java/java_1_7.peg", | |
90 | } | |
91 | 171 | pegs := make([]*Peg, len(files)) |
92 | 172 | for i, file := range files { |
93 | 173 | input, err := ioutil.ReadFile(file) |
95 | 175 | b.Error(err) |
96 | 176 | } |
97 | 177 | |
98 | p := &Peg{Tree: New(true, true), Buffer: string(input)} | |
99 | p.Init() | |
178 | p := &Peg{Tree: tree.New(true, true, false), Buffer: string(input)} | |
179 | p.Init(Size(1<<15)) | |
100 | 180 | pegs[i] = p |
101 | 181 | } |
102 | 182 | |
103 | 183 | b.ResetTimer() |
104 | 184 | for i := 0; i < b.N; i++ { |
105 | 185 | for _, peg := range pegs { |
186 | if err := peg.Parse(); err != nil { | |
187 | b.Error(err) | |
188 | } | |
189 | b.StopTimer() | |
106 | 190 | peg.Reset() |
107 | if err := peg.Parse(); err != nil { | |
108 | b.Error(err) | |
109 | } | |
110 | } | |
111 | } | |
112 | } | |
191 | b.StartTimer() | |
192 | } | |
193 | } | |
194 | } | |
195 | ||
196 | func BenchmarkResetAndParse(b *testing.B) { | |
197 | pegs := make([]*Peg, len(files)) | |
198 | for i, file := range files { | |
199 | input, err := ioutil.ReadFile(file) | |
200 | if err != nil { | |
201 | b.Error(err) | |
202 | } | |
203 | ||
204 | p := &Peg{Tree: tree.New(true, true, false), Buffer: string(input)} | |
205 | p.Init(Size(1<<15)) | |
206 | pegs[i] = p | |
207 | } | |
208 | ||
209 | b.ResetTimer() | |
210 | for i := 0; i < b.N; i++ { | |
211 | for _, peg := range pegs { | |
212 | if err := peg.Parse(); err != nil { | |
213 | b.Error(err) | |
214 | } | |
215 | peg.Reset() | |
216 | } | |
217 | } | |
218 | } | |
219 | ||
220 | func BenchmarkInitAndParse(b *testing.B) { | |
221 | strs := []string{} | |
222 | for _, file := range files { | |
223 | input, err := ioutil.ReadFile(file) | |
224 | if err != nil { | |
225 | b.Error(err) | |
226 | } | |
227 | strs = append(strs, string(input)) | |
228 | } | |
229 | ||
230 | b.ResetTimer() | |
231 | for i := 0; i < b.N; i++ { | |
232 | for _, str := range strs { | |
233 | peg := &Peg{Tree: tree.New(true, true, false), Buffer: str} | |
234 | peg.Init(Size(1<<15)) | |
235 | if err := peg.Parse(); err != nil { | |
236 | b.Error(err) | |
237 | } | |
238 | } | |
239 | } | |
240 | } | |
241 | ||
242 | func BenchmarkInitResetAndParse(b *testing.B) { | |
243 | strs := []string{} | |
244 | for _, file := range files { | |
245 | input, err := ioutil.ReadFile(file) | |
246 | if err != nil { | |
247 | b.Error(err) | |
248 | } | |
249 | strs = append(strs, string(input)) | |
250 | } | |
251 | ||
252 | b.ResetTimer() | |
253 | for i := 0; i < b.N; i++ { | |
254 | for _, str := range strs { | |
255 | peg := &Peg{Tree: tree.New(true, true, false), Buffer: str} | |
256 | peg.Init(Size(1<<15)) | |
257 | if err := peg.Parse(); err != nil { | |
258 | b.Error(err) | |
259 | } | |
260 | peg.Reset() | |
261 | } | |
262 | } | |
263 | } |
0 | // Copyright 2010 The Go Authors. All rights reserved. | |
1 | // Use of this source code is governed by a BSD-style | |
2 | // license that can be found in the LICENSE file. | |
3 | ||
4 | package tree | |
5 | ||
6 | import ( | |
7 | "bytes" | |
8 | "fmt" | |
9 | "go/parser" | |
10 | "go/printer" | |
11 | "go/token" | |
12 | "io" | |
13 | "math" | |
14 | "os" | |
15 | "sort" | |
16 | "strconv" | |
17 | "strings" | |
18 | "text/template" | |
19 | ||
20 | "github.com/pointlander/jetset" | |
21 | ) | |
22 | ||
23 | const pegHeaderTemplate = `package {{.PackageName}} | |
24 | ||
25 | // Code generated by {{.Generator}} DO NOT EDIT. | |
26 | ||
27 | ||
28 | import ( | |
29 | {{range .Imports}}"{{.}}" | |
30 | {{end}} | |
31 | ) | |
32 | ||
33 | const endSymbol rune = {{.EndSymbol}} | |
34 | ||
35 | /* The rule types inferred from the grammar are below. */ | |
36 | type pegRule {{.PegRuleType}} | |
37 | ||
38 | const ( | |
39 | ruleUnknown pegRule = iota | |
40 | {{range .RuleNames}}rule{{.String}} | |
41 | {{end}} | |
42 | ) | |
43 | ||
44 | var rul3s = [...]string { | |
45 | "Unknown", | |
46 | {{range .RuleNames}}"{{.String}}", | |
47 | {{end}} | |
48 | } | |
49 | ||
50 | type token32 struct { | |
51 | pegRule | |
52 | begin, end uint32 | |
53 | } | |
54 | ||
55 | func (t *token32) String() string { | |
56 | return fmt.Sprintf("\x1B[34m%v\x1B[m %v %v", rul3s[t.pegRule], t.begin, t.end) | |
57 | } | |
58 | ||
59 | {{if .Ast}} | |
60 | type node32 struct { | |
61 | token32 | |
62 | up, next *node32 | |
63 | } | |
64 | ||
65 | func (node *node32) print(w io.Writer, pretty bool, buffer string) { | |
66 | var print func(node *node32, depth int) | |
67 | print = func(node *node32, depth int) { | |
68 | for node != nil { | |
69 | for c := 0; c < depth; c++ { | |
70 | fmt.Fprintf(w, " ") | |
71 | } | |
72 | rule := rul3s[node.pegRule] | |
73 | quote := strconv.Quote(string(([]rune(buffer)[node.begin:node.end]))) | |
74 | if !pretty { | |
75 | fmt.Fprintf(w, "%v %v\n", rule, quote) | |
76 | } else { | |
77 | fmt.Fprintf(w, "\x1B[36m%v\x1B[m %v\n", rule, quote) | |
78 | } | |
79 | if node.up != nil { | |
80 | print(node.up, depth + 1) | |
81 | } | |
82 | node = node.next | |
83 | } | |
84 | } | |
85 | print(node, 0) | |
86 | } | |
87 | ||
88 | func (node *node32) Print(w io.Writer, buffer string) { | |
89 | node.print(w, false, buffer) | |
90 | } | |
91 | ||
92 | func (node *node32) PrettyPrint(w io.Writer, buffer string) { | |
93 | node.print(w, true, buffer) | |
94 | } | |
95 | ||
96 | type tokens32 struct { | |
97 | tree []token32 | |
98 | } | |
99 | ||
100 | func (t *tokens32) Trim(length uint32) { | |
101 | t.tree = t.tree[:length] | |
102 | } | |
103 | ||
104 | func (t *tokens32) Print() { | |
105 | for _, token := range t.tree { | |
106 | fmt.Println(token.String()) | |
107 | } | |
108 | } | |
109 | ||
110 | func (t *tokens32) AST() *node32 { | |
111 | type element struct { | |
112 | node *node32 | |
113 | down *element | |
114 | } | |
115 | tokens := t.Tokens() | |
116 | var stack *element | |
117 | for _, token := range tokens { | |
118 | if token.begin == token.end { | |
119 | continue | |
120 | } | |
121 | node := &node32{token32: token} | |
122 | for stack != nil && stack.node.begin >= token.begin && stack.node.end <= token.end { | |
123 | stack.node.next = node.up | |
124 | node.up = stack.node | |
125 | stack = stack.down | |
126 | } | |
127 | stack = &element{node: node, down: stack} | |
128 | } | |
129 | if stack != nil { | |
130 | return stack.node | |
131 | } | |
132 | return nil | |
133 | } | |
134 | ||
135 | func (t *tokens32) PrintSyntaxTree(buffer string) { | |
136 | t.AST().Print(os.Stdout, buffer) | |
137 | } | |
138 | ||
139 | func (t *tokens32) WriteSyntaxTree(w io.Writer, buffer string) { | |
140 | t.AST().Print(w, buffer) | |
141 | } | |
142 | ||
143 | func (t *tokens32) PrettyPrintSyntaxTree(buffer string) { | |
144 | t.AST().PrettyPrint(os.Stdout, buffer) | |
145 | } | |
146 | ||
147 | func (t *tokens32) Add(rule pegRule, begin, end, index uint32) { | |
148 | tree, i := t.tree, int(index) | |
149 | if i >= len(tree) { | |
150 | t.tree = append(tree, token32{pegRule: rule, begin: begin, end: end}) | |
151 | return | |
152 | } | |
153 | tree[i] = token32{pegRule: rule, begin: begin, end: end} | |
154 | } | |
155 | ||
156 | func (t *tokens32) Tokens() []token32 { | |
157 | return t.tree | |
158 | } | |
159 | {{end}} | |
160 | ||
161 | type {{.StructName}} struct { | |
162 | {{.StructVariables}} | |
163 | Buffer string | |
164 | buffer []rune | |
165 | rules [{{.RulesCount}}]func() bool | |
166 | parse func(rule ...int) error | |
167 | reset func() | |
168 | Pretty bool | |
169 | {{if .Ast -}} | |
170 | disableMemoize bool | |
171 | tokens32 | |
172 | {{end -}} | |
173 | } | |
174 | ||
175 | func (p *{{.StructName}}) Parse(rule ...int) error { | |
176 | return p.parse(rule...) | |
177 | } | |
178 | ||
179 | func (p *{{.StructName}}) Reset() { | |
180 | p.reset() | |
181 | } | |
182 | ||
183 | type textPosition struct { | |
184 | line, symbol int | |
185 | } | |
186 | ||
187 | type textPositionMap map[int] textPosition | |
188 | ||
189 | func translatePositions(buffer []rune, positions []int) textPositionMap { | |
190 | length, translations, j, line, symbol := len(positions), make(textPositionMap, len(positions)), 0, 1, 0 | |
191 | sort.Ints(positions) | |
192 | ||
193 | search: for i, c := range buffer { | |
194 | if c == '\n' {line, symbol = line + 1, 0} else {symbol++} | |
195 | if i == positions[j] { | |
196 | translations[positions[j]] = textPosition{line, symbol} | |
197 | for j++; j < length; j++ {if i != positions[j] {continue search}} | |
198 | break search | |
199 | } | |
200 | } | |
201 | ||
202 | return translations | |
203 | } | |
204 | ||
205 | type parseError struct { | |
206 | p *{{.StructName}} | |
207 | max token32 | |
208 | } | |
209 | ||
210 | func (e *parseError) Error() string { | |
211 | tokens, err := []token32{e.max}, "\n" | |
212 | positions, p := make([]int, 2 * len(tokens)), 0 | |
213 | for _, token := range tokens { | |
214 | positions[p], p = int(token.begin), p + 1 | |
215 | positions[p], p = int(token.end), p + 1 | |
216 | } | |
217 | translations := translatePositions(e.p.buffer, positions) | |
218 | format := "parse error near %v (line %v symbol %v - line %v symbol %v):\n%v\n" | |
219 | if e.p.Pretty { | |
220 | format = "parse error near \x1B[34m%v\x1B[m (line %v symbol %v - line %v symbol %v):\n%v\n" | |
221 | } | |
222 | for _, token := range tokens { | |
223 | begin, end := int(token.begin), int(token.end) | |
224 | err += fmt.Sprintf(format, | |
225 | rul3s[token.pegRule], | |
226 | translations[begin].line, translations[begin].symbol, | |
227 | translations[end].line, translations[end].symbol, | |
228 | strconv.Quote(string(e.p.buffer[begin:end]))) | |
229 | } | |
230 | ||
231 | return err | |
232 | } | |
233 | ||
234 | {{if .Ast}} | |
235 | func (p *{{.StructName}}) PrintSyntaxTree() { | |
236 | if p.Pretty { | |
237 | p.tokens32.PrettyPrintSyntaxTree(p.Buffer) | |
238 | } else { | |
239 | p.tokens32.PrintSyntaxTree(p.Buffer) | |
240 | } | |
241 | } | |
242 | ||
243 | func (p *{{.StructName}}) WriteSyntaxTree(w io.Writer) { | |
244 | p.tokens32.WriteSyntaxTree(w, p.Buffer) | |
245 | } | |
246 | ||
247 | func (p *{{.StructName}}) SprintSyntaxTree() string { | |
248 | var b bytes.Buffer | |
249 | p.WriteSyntaxTree(&b) | |
250 | return b.String() | |
251 | } | |
252 | ||
253 | {{if .HasActions}} | |
254 | func (p *{{.StructName}}) Execute() { | |
255 | buffer, _buffer, text, begin, end := p.Buffer, p.buffer, "", 0, 0 | |
256 | for _, token := range p.Tokens() { | |
257 | switch (token.pegRule) { | |
258 | {{if .HasPush}} | |
259 | case rulePegText: | |
260 | begin, end = int(token.begin), int(token.end) | |
261 | text = string(_buffer[begin:end]) | |
262 | {{end}} | |
263 | {{range .Actions}}case ruleAction{{.GetId}}: | |
264 | {{.String}} | |
265 | {{end}} | |
266 | } | |
267 | } | |
268 | _, _, _, _, _ = buffer, _buffer, text, begin, end | |
269 | } | |
270 | {{end}} | |
271 | {{end}} | |
272 | ||
273 | func Pretty(pretty bool) func(*{{.StructName}}) error { | |
274 | return func(p *{{.StructName}}) error { | |
275 | p.Pretty = pretty | |
276 | return nil | |
277 | } | |
278 | } | |
279 | ||
280 | {{if .Ast -}} | |
281 | func Size(size int) func(*{{.StructName}}) error { | |
282 | return func(p *{{.StructName}}) error { | |
283 | p.tokens32 = tokens32{tree: make([]token32, 0, size)} | |
284 | return nil | |
285 | } | |
286 | } | |
287 | ||
288 | func DisableMemoize() func(*{{.StructName}}) error { | |
289 | return func(p *{{.StructName}}) error { | |
290 | p.disableMemoize = true | |
291 | return nil | |
292 | } | |
293 | } | |
294 | ||
295 | type memo struct { | |
296 | Matched bool | |
297 | Partial []token32 | |
298 | } | |
299 | ||
300 | type memoKey struct { | |
301 | Rule uint32 | |
302 | Position uint32 | |
303 | } | |
304 | {{end -}} | |
305 | ||
306 | func (p *{{.StructName}}) Init(options ...func(*{{.StructName}}) error) error { | |
307 | var ( | |
308 | max token32 | |
309 | position, tokenIndex uint32 | |
310 | buffer []rune | |
311 | {{if .Ast -}} | |
312 | memoization map[memoKey]memo | |
313 | {{end -}} | |
314 | {{if not .Ast -}} | |
315 | {{if .HasPush -}} | |
316 | text string | |
317 | {{end -}} | |
318 | {{end -}} | |
319 | ) | |
320 | for _, option := range options { | |
321 | err := option(p) | |
322 | if err != nil { | |
323 | return err | |
324 | } | |
325 | } | |
326 | p.reset = func() { | |
327 | max = token32{} | |
328 | position, tokenIndex = 0, 0 | |
329 | {{if .Ast -}} | |
330 | memoization = make(map[memoKey]memo) | |
331 | {{end -}} | |
332 | ||
333 | p.buffer = []rune(p.Buffer) | |
334 | if len(p.buffer) == 0 || p.buffer[len(p.buffer) - 1] != endSymbol { | |
335 | p.buffer = append(p.buffer, endSymbol) | |
336 | } | |
337 | buffer = p.buffer | |
338 | } | |
339 | p.reset() | |
340 | ||
341 | _rules := p.rules | |
342 | {{if .Ast -}} | |
343 | tree := p.tokens32 | |
344 | {{end -}} | |
345 | p.parse = func(rule ...int) error { | |
346 | r := 1 | |
347 | if len(rule) > 0 { | |
348 | r = rule[0] | |
349 | } | |
350 | matches := p.rules[r]() | |
351 | {{if .Ast -}} | |
352 | p.tokens32 = tree | |
353 | {{end -}} | |
354 | if matches { | |
355 | {{if .Ast -}} | |
356 | p.Trim(tokenIndex) | |
357 | {{end -}} | |
358 | return nil | |
359 | } | |
360 | return &parseError{p, max} | |
361 | } | |
362 | ||
363 | add := func(rule pegRule, begin uint32) { | |
364 | {{if .Ast -}} | |
365 | tree.Add(rule, begin, position, tokenIndex) | |
366 | {{end -}} | |
367 | tokenIndex++ | |
368 | if begin != position && position > max.end { | |
369 | max = token32{rule, begin, position} | |
370 | } | |
371 | } | |
372 | ||
373 | {{if .Ast -}} | |
374 | memoize := func(rule uint32, begin uint32, tokenIndexStart uint32, matched bool) { | |
375 | if p.disableMemoize { | |
376 | return | |
377 | } | |
378 | key := memoKey{rule, begin} | |
379 | if !matched { | |
380 | memoization[key] = memo{Matched: false} | |
381 | } else { | |
382 | t := tree.tree[tokenIndexStart:tokenIndex] | |
383 | tokenCopy := make([]token32, len(t)) | |
384 | copy(tokenCopy, t) | |
385 | memoization[key] = memo{Matched: true, Partial: tokenCopy} | |
386 | } | |
387 | } | |
388 | ||
389 | memoizedResult := func(m memo) bool { | |
390 | if !m.Matched { | |
391 | return false | |
392 | } | |
393 | tree.tree = append(tree.tree[:tokenIndex], m.Partial...) | |
394 | tokenIndex += uint32(len(m.Partial)) | |
395 | position = m.Partial[len(m.Partial)-1].end | |
396 | if tree.tree[tokenIndex-1].begin != position && position > max.end { | |
397 | max = tree.tree[tokenIndex-1] | |
398 | } | |
399 | return true | |
400 | } | |
401 | {{end -}} | |
402 | ||
403 | {{if .HasDot}} | |
404 | matchDot := func() bool { | |
405 | if buffer[position] != endSymbol { | |
406 | position++ | |
407 | return true | |
408 | } | |
409 | return false | |
410 | } | |
411 | {{end}} | |
412 | ||
413 | {{if .HasCharacter}} | |
414 | /*matchChar := func(c byte) bool { | |
415 | if buffer[position] == c { | |
416 | position++ | |
417 | return true | |
418 | } | |
419 | return false | |
420 | }*/ | |
421 | {{end}} | |
422 | ||
423 | {{if .HasString}} | |
424 | matchString := func(s string) bool { | |
425 | i := position | |
426 | for _, c := range s { | |
427 | if buffer[i] != c { | |
428 | return false | |
429 | } | |
430 | i++ | |
431 | } | |
432 | position = i | |
433 | return true | |
434 | } | |
435 | {{end}} | |
436 | ||
437 | {{if .HasRange}} | |
438 | /*matchRange := func(lower byte, upper byte) bool { | |
439 | if c := buffer[position]; c >= lower && c <= upper { | |
440 | position++ | |
441 | return true | |
442 | } | |
443 | return false | |
444 | }*/ | |
445 | {{end}} | |
446 | ||
447 | _rules = [...]func() bool { | |
448 | nil,` | |
449 | ||
450 | type Type uint8 | |
451 | ||
452 | const ( | |
453 | TypeUnknown Type = iota | |
454 | TypeRule | |
455 | TypeName | |
456 | TypeDot | |
457 | TypeCharacter | |
458 | TypeRange | |
459 | TypeString | |
460 | TypePredicate | |
461 | TypeStateChange | |
462 | TypeCommit | |
463 | TypeAction | |
464 | TypePackage | |
465 | TypeImport | |
466 | TypeState | |
467 | TypeAlternate | |
468 | TypeUnorderedAlternate | |
469 | TypeSequence | |
470 | TypePeekFor | |
471 | TypePeekNot | |
472 | TypeQuery | |
473 | TypeStar | |
474 | TypePlus | |
475 | TypePeg | |
476 | TypePush | |
477 | TypeImplicitPush | |
478 | TypeNil | |
479 | TypeLast | |
480 | ) | |
481 | ||
482 | var TypeMap = [...]string{ | |
483 | "TypeUnknown", | |
484 | "TypeRule", | |
485 | "TypeName", | |
486 | "TypeDot", | |
487 | "TypeCharacter", | |
488 | "TypeRange", | |
489 | "TypeString", | |
490 | "TypePredicate", | |
491 | "TypeStateChange", | |
492 | "TypeCommit", | |
493 | "TypeAction", | |
494 | "TypePackage", | |
495 | "TypeImport", | |
496 | "TypeState", | |
497 | "TypeAlternate", | |
498 | "TypeUnorderedAlternate", | |
499 | "TypeSequence", | |
500 | "TypePeekFor", | |
501 | "TypePeekNot", | |
502 | "TypeQuery", | |
503 | "TypeStar", | |
504 | "TypePlus", | |
505 | "TypePeg", | |
506 | "TypePush", | |
507 | "TypeImplicitPush", | |
508 | "TypeNil", | |
509 | "TypeLast"} | |
510 | ||
511 | func (t Type) GetType() Type { | |
512 | return t | |
513 | } | |
514 | ||
515 | type Node interface { | |
516 | fmt.Stringer | |
517 | debug() | |
518 | ||
519 | Escaped() string | |
520 | SetString(s string) | |
521 | ||
522 | GetType() Type | |
523 | SetType(t Type) | |
524 | ||
525 | GetId() int | |
526 | SetId(id int) | |
527 | ||
528 | Init() | |
529 | Front() *node | |
530 | Next() *node | |
531 | PushFront(value *node) | |
532 | PopFront() *node | |
533 | PushBack(value *node) | |
534 | Len() int | |
535 | Copy() *node | |
536 | Slice() []*node | |
537 | } | |
538 | ||
539 | type node struct { | |
540 | Type | |
541 | string | |
542 | id int | |
543 | ||
544 | front *node | |
545 | back *node | |
546 | length int | |
547 | ||
548 | /* use hash table here instead of Copy? */ | |
549 | next *node | |
550 | } | |
551 | ||
552 | func (n *node) String() string { | |
553 | return n.string | |
554 | } | |
555 | ||
556 | func (n *node) debug() { | |
557 | if len(n.string) == 1 { | |
558 | fmt.Printf("%v %v '%v' %d\n", n.id, TypeMap[n.Type], n.string, n.string[0]) | |
559 | } else { | |
560 | fmt.Printf("%v %v '%v'\n", n.id, TypeMap[n.Type], n.string) | |
561 | } | |
562 | } | |
563 | ||
564 | func (n *node) Escaped() string { | |
565 | return escape(n.string) | |
566 | } | |
567 | ||
568 | func (n *node) SetString(s string) { | |
569 | n.string = s | |
570 | } | |
571 | ||
572 | func (n *node) SetType(t Type) { | |
573 | n.Type = t | |
574 | } | |
575 | ||
576 | func (n *node) GetId() int { | |
577 | return n.id | |
578 | } | |
579 | ||
580 | func (n *node) SetId(id int) { | |
581 | n.id = id | |
582 | } | |
583 | ||
584 | func (n *node) Init() { | |
585 | n.front = nil | |
586 | n.back = nil | |
587 | n.length = 0 | |
588 | } | |
589 | ||
590 | func (n *node) Front() *node { | |
591 | return n.front | |
592 | } | |
593 | ||
594 | func (n *node) Next() *node { | |
595 | return n.next | |
596 | } | |
597 | ||
598 | func (n *node) PushFront(value *node) { | |
599 | if n.back == nil { | |
600 | n.back = value | |
601 | } else { | |
602 | value.next = n.front | |
603 | } | |
604 | n.front = value | |
605 | n.length++ | |
606 | } | |
607 | ||
608 | func (n *node) PopFront() *node { | |
609 | front := n.front | |
610 | ||
611 | switch true { | |
612 | case front == nil: | |
613 | panic("tree is empty") | |
614 | case front == n.back: | |
615 | n.front, n.back = nil, nil | |
616 | default: | |
617 | n.front, front.next = front.next, nil | |
618 | } | |
619 | ||
620 | n.length-- | |
621 | return front | |
622 | } | |
623 | ||
624 | func (n *node) PushBack(value *node) { | |
625 | if n.front == nil { | |
626 | n.front = value | |
627 | } else { | |
628 | n.back.next = value | |
629 | } | |
630 | n.back = value | |
631 | n.length++ | |
632 | } | |
633 | ||
634 | func (n *node) Len() (c int) { | |
635 | return n.length | |
636 | } | |
637 | ||
638 | func (n *node) Copy() *node { | |
639 | return &node{Type: n.Type, string: n.string, id: n.id, front: n.front, back: n.back, length: n.length} | |
640 | } | |
641 | ||
642 | func (n *node) Slice() []*node { | |
643 | s := make([]*node, n.length) | |
644 | for element, i := n.Front(), 0; element != nil; element, i = element.Next(), i+1 { | |
645 | s[i] = element | |
646 | } | |
647 | return s | |
648 | } | |
649 | ||
650 | /* A tree data structure into which a PEG can be parsed. */ | |
651 | type Tree struct { | |
652 | Rules map[string]Node | |
653 | rulesCount map[string]uint | |
654 | node | |
655 | inline, _switch, Ast bool | |
656 | Strict bool | |
657 | ||
658 | Generator string | |
659 | RuleNames []Node | |
660 | PackageName string | |
661 | Imports []string | |
662 | EndSymbol rune | |
663 | PegRuleType string | |
664 | StructName string | |
665 | StructVariables string | |
666 | RulesCount int | |
667 | Bits int | |
668 | HasActions bool | |
669 | Actions []Node | |
670 | HasPush bool | |
671 | HasCommit bool | |
672 | HasDot bool | |
673 | HasCharacter bool | |
674 | HasString bool | |
675 | HasRange bool | |
676 | } | |
677 | ||
678 | func New(inline, _switch, noast bool) *Tree { | |
679 | return &Tree{ | |
680 | Rules: make(map[string]Node), | |
681 | rulesCount: make(map[string]uint), | |
682 | inline: inline, | |
683 | _switch: _switch, | |
684 | Ast: !noast, | |
685 | } | |
686 | } | |
687 | ||
688 | func (t *Tree) AddRule(name string) { | |
689 | t.PushFront(&node{Type: TypeRule, string: name, id: t.RulesCount}) | |
690 | t.RulesCount++ | |
691 | } | |
692 | ||
693 | func (t *Tree) AddExpression() { | |
694 | expression := t.PopFront() | |
695 | rule := t.PopFront() | |
696 | rule.PushBack(expression) | |
697 | t.PushBack(rule) | |
698 | } | |
699 | ||
700 | func (t *Tree) AddName(text string) { | |
701 | t.PushFront(&node{Type: TypeName, string: text}) | |
702 | } | |
703 | ||
704 | func (t *Tree) AddDot() { t.PushFront(&node{Type: TypeDot, string: "."}) } | |
705 | func (t *Tree) AddCharacter(text string) { | |
706 | t.PushFront(&node{Type: TypeCharacter, string: text}) | |
707 | } | |
708 | func (t *Tree) AddDoubleCharacter(text string) { | |
709 | t.PushFront(&node{Type: TypeCharacter, string: strings.ToLower(text)}) | |
710 | t.PushFront(&node{Type: TypeCharacter, string: strings.ToUpper(text)}) | |
711 | t.AddAlternate() | |
712 | } | |
713 | func (t *Tree) AddHexaCharacter(text string) { | |
714 | hexa, _ := strconv.ParseInt(text, 16, 32) | |
715 | t.PushFront(&node{Type: TypeCharacter, string: string(rune(hexa))}) | |
716 | } | |
717 | func (t *Tree) AddOctalCharacter(text string) { | |
718 | octal, _ := strconv.ParseInt(text, 8, 8) | |
719 | t.PushFront(&node{Type: TypeCharacter, string: string(rune(octal))}) | |
720 | } | |
721 | func (t *Tree) AddPredicate(text string) { t.PushFront(&node{Type: TypePredicate, string: text}) } | |
722 | func (t *Tree) AddStateChange(text string) { t.PushFront(&node{Type: TypeStateChange, string: text}) } | |
723 | func (t *Tree) AddNil() { t.PushFront(&node{Type: TypeNil, string: "<nil>"}) } | |
724 | func (t *Tree) AddAction(text string) { t.PushFront(&node{Type: TypeAction, string: text}) } | |
725 | func (t *Tree) AddPackage(text string) { t.PushBack(&node{Type: TypePackage, string: text}) } | |
726 | func (t *Tree) AddImport(text string) { t.PushBack(&node{Type: TypeImport, string: text}) } | |
727 | func (t *Tree) AddState(text string) { | |
728 | peg := t.PopFront() | |
729 | peg.PushBack(&node{Type: TypeState, string: text}) | |
730 | t.PushBack(peg) | |
731 | } | |
732 | ||
733 | func (t *Tree) addList(listType Type) { | |
734 | a := t.PopFront() | |
735 | b := t.PopFront() | |
736 | var l *node | |
737 | if b.GetType() == listType { | |
738 | l = b | |
739 | } else { | |
740 | l = &node{Type: listType} | |
741 | l.PushBack(b) | |
742 | } | |
743 | l.PushBack(a) | |
744 | t.PushFront(l) | |
745 | } | |
746 | func (t *Tree) AddAlternate() { t.addList(TypeAlternate) } | |
747 | func (t *Tree) AddSequence() { t.addList(TypeSequence) } | |
748 | func (t *Tree) AddRange() { t.addList(TypeRange) } | |
749 | func (t *Tree) AddDoubleRange() { | |
750 | a := t.PopFront() | |
751 | b := t.PopFront() | |
752 | ||
753 | t.AddCharacter(strings.ToLower(b.String())) | |
754 | t.AddCharacter(strings.ToLower(a.String())) | |
755 | t.addList(TypeRange) | |
756 | ||
757 | t.AddCharacter(strings.ToUpper(b.String())) | |
758 | t.AddCharacter(strings.ToUpper(a.String())) | |
759 | t.addList(TypeRange) | |
760 | ||
761 | t.AddAlternate() | |
762 | } | |
763 | ||
764 | func (t *Tree) addFix(fixType Type) { | |
765 | n := &node{Type: fixType} | |
766 | n.PushBack(t.PopFront()) | |
767 | t.PushFront(n) | |
768 | } | |
769 | func (t *Tree) AddPeekFor() { t.addFix(TypePeekFor) } | |
770 | func (t *Tree) AddPeekNot() { t.addFix(TypePeekNot) } | |
771 | func (t *Tree) AddQuery() { t.addFix(TypeQuery) } | |
772 | func (t *Tree) AddStar() { t.addFix(TypeStar) } | |
773 | func (t *Tree) AddPlus() { t.addFix(TypePlus) } | |
774 | func (t *Tree) AddPush() { t.addFix(TypePush) } | |
775 | ||
776 | func (t *Tree) AddPeg(text string) { t.PushFront(&node{Type: TypePeg, string: text}) } | |
777 | ||
778 | func join(tasks []func()) { | |
779 | length := len(tasks) | |
780 | done := make(chan int, length) | |
781 | for _, task := range tasks { | |
782 | go func(task func()) { task(); done <- 1 }(task) | |
783 | } | |
784 | for d := <-done; d < length; d += <-done { | |
785 | } | |
786 | } | |
787 | ||
788 | func escape(c string) string { | |
789 | switch c { | |
790 | case "'": | |
791 | return "\\'" | |
792 | case "\"": | |
793 | return "\"" | |
794 | default: | |
795 | c = strconv.Quote(c) | |
796 | return c[1 : len(c)-1] | |
797 | } | |
798 | } | |
799 | ||
800 | func (t *Tree) Compile(file string, args []string, out io.Writer) (err error) { | |
801 | t.AddImport("fmt") | |
802 | if t.Ast { | |
803 | t.AddImport("io") | |
804 | t.AddImport("os") | |
805 | t.AddImport("bytes") | |
806 | } | |
807 | t.AddImport("sort") | |
808 | t.AddImport("strconv") | |
809 | t.EndSymbol = 0x110000 | |
810 | t.RulesCount++ | |
811 | ||
812 | t.Generator = strings.Join(args, " ") | |
813 | ||
814 | var werr error | |
815 | warn := func(e error) { | |
816 | if werr == nil { | |
817 | werr = fmt.Errorf("warning: %s.", e) | |
818 | } else { | |
819 | werr = fmt.Errorf("%s\nwarning: %s", werr, e) | |
820 | } | |
821 | } | |
822 | ||
823 | counts := [TypeLast]uint{} | |
824 | countsByRule := make([]*[TypeLast]uint, t.RulesCount) | |
825 | { | |
826 | var rule *node | |
827 | var link func(countsForRule *[TypeLast]uint, node Node) | |
828 | link = func(countsForRule *[TypeLast]uint, n Node) { | |
829 | nodeType := n.GetType() | |
830 | id := counts[nodeType] | |
831 | counts[nodeType]++ | |
832 | countsForRule[nodeType]++ | |
833 | switch nodeType { | |
834 | case TypeAction: | |
835 | n.SetId(int(id)) | |
836 | copy, name := n.Copy(), fmt.Sprintf("Action%v", id) | |
837 | t.Actions = append(t.Actions, copy) | |
838 | n.Init() | |
839 | n.SetType(TypeName) | |
840 | n.SetString(name) | |
841 | n.SetId(t.RulesCount) | |
842 | ||
843 | emptyRule := &node{Type: TypeRule, string: name, id: t.RulesCount} | |
844 | implicitPush := &node{Type: TypeImplicitPush} | |
845 | emptyRule.PushBack(implicitPush) | |
846 | implicitPush.PushBack(copy) | |
847 | implicitPush.PushBack(emptyRule.Copy()) | |
848 | t.PushBack(emptyRule) | |
849 | t.RulesCount++ | |
850 | ||
851 | t.Rules[name] = emptyRule | |
852 | t.RuleNames = append(t.RuleNames, emptyRule) | |
853 | countsByRule = append(countsByRule, &[TypeLast]uint{}) | |
854 | case TypeName: | |
855 | name := n.String() | |
856 | if _, ok := t.Rules[name]; !ok { | |
857 | emptyRule := &node{Type: TypeRule, string: name, id: t.RulesCount} | |
858 | implicitPush := &node{Type: TypeImplicitPush} | |
859 | emptyRule.PushBack(implicitPush) | |
860 | implicitPush.PushBack(&node{Type: TypeNil, string: "<nil>"}) | |
861 | implicitPush.PushBack(emptyRule.Copy()) | |
862 | t.PushBack(emptyRule) | |
863 | t.RulesCount++ | |
864 | ||
865 | t.Rules[name] = emptyRule | |
866 | t.RuleNames = append(t.RuleNames, emptyRule) | |
867 | countsByRule = append(countsByRule, &[TypeLast]uint{}) | |
868 | } | |
869 | case TypePush: | |
870 | copy, name := rule.Copy(), "PegText" | |
871 | copy.SetString(name) | |
872 | if _, ok := t.Rules[name]; !ok { | |
873 | emptyRule := &node{Type: TypeRule, string: name, id: t.RulesCount} | |
874 | emptyRule.PushBack(&node{Type: TypeNil, string: "<nil>"}) | |
875 | t.PushBack(emptyRule) | |
876 | t.RulesCount++ | |
877 | ||
878 | t.Rules[name] = emptyRule | |
879 | t.RuleNames = append(t.RuleNames, emptyRule) | |
880 | countsByRule = append(countsByRule, &[TypeLast]uint{}) | |
881 | } | |
882 | n.PushBack(copy) | |
883 | fallthrough | |
884 | case TypeImplicitPush: | |
885 | link(countsForRule, n.Front()) | |
886 | case TypeRule, TypeAlternate, TypeUnorderedAlternate, TypeSequence, | |
887 | TypePeekFor, TypePeekNot, TypeQuery, TypeStar, TypePlus: | |
888 | for _, node := range n.Slice() { | |
889 | link(countsForRule, node) | |
890 | } | |
891 | } | |
892 | } | |
893 | /* first pass */ | |
894 | for _, node := range t.Slice() { | |
895 | switch node.GetType() { | |
896 | case TypePackage: | |
897 | t.PackageName = node.String() | |
898 | case TypeImport: | |
899 | t.Imports = append(t.Imports, node.String()) | |
900 | case TypePeg: | |
901 | t.StructName = node.String() | |
902 | t.StructVariables = node.Front().String() | |
903 | case TypeRule: | |
904 | if _, ok := t.Rules[node.String()]; !ok { | |
905 | expression := node.Front() | |
906 | copy := expression.Copy() | |
907 | expression.Init() | |
908 | expression.SetType(TypeImplicitPush) | |
909 | expression.PushBack(copy) | |
910 | expression.PushBack(node.Copy()) | |
911 | ||
912 | t.Rules[node.String()] = node | |
913 | t.RuleNames = append(t.RuleNames, node) | |
914 | } | |
915 | } | |
916 | } | |
917 | /* sort imports to satisfy gofmt */ | |
918 | sort.Strings(t.Imports) | |
919 | ||
920 | /* second pass */ | |
921 | for _, node := range t.Slice() { | |
922 | if node.GetType() == TypeRule { | |
923 | rule = node | |
924 | counts := [TypeLast]uint{} | |
925 | countsByRule[node.GetId()] = &counts | |
926 | link(&counts, node) | |
927 | } | |
928 | } | |
929 | } | |
930 | ||
931 | usage := [TypeLast]uint{} | |
932 | join([]func(){ | |
933 | func() { | |
934 | var countRules func(node Node) | |
935 | ruleReached := make([]bool, t.RulesCount) | |
936 | countRules = func(node Node) { | |
937 | switch node.GetType() { | |
938 | case TypeRule: | |
939 | name, id := node.String(), node.GetId() | |
940 | if count, ok := t.rulesCount[name]; ok { | |
941 | t.rulesCount[name] = count + 1 | |
942 | } else { | |
943 | t.rulesCount[name] = 1 | |
944 | } | |
945 | if ruleReached[id] { | |
946 | return | |
947 | } | |
948 | ruleReached[id] = true | |
949 | countRules(node.Front()) | |
950 | case TypeName: | |
951 | countRules(t.Rules[node.String()]) | |
952 | case TypeImplicitPush, TypePush: | |
953 | countRules(node.Front()) | |
954 | case TypeAlternate, TypeUnorderedAlternate, TypeSequence, | |
955 | TypePeekFor, TypePeekNot, TypeQuery, TypeStar, TypePlus: | |
956 | for _, element := range node.Slice() { | |
957 | countRules(element) | |
958 | } | |
959 | } | |
960 | } | |
961 | for _, node := range t.Slice() { | |
962 | if node.GetType() == TypeRule { | |
963 | countRules(node) | |
964 | break | |
965 | } | |
966 | } | |
967 | for id, reached := range ruleReached { | |
968 | if reached { | |
969 | for i, count := range countsByRule[id] { | |
970 | usage[i] += count | |
971 | } | |
972 | } | |
973 | } | |
974 | }, | |
975 | func() { | |
976 | var checkRecursion func(node Node) bool | |
977 | ruleReached := make([]bool, t.RulesCount) | |
978 | checkRecursion = func(node Node) bool { | |
979 | switch node.GetType() { | |
980 | case TypeRule: | |
981 | id := node.GetId() | |
982 | if ruleReached[id] { | |
983 | warn(fmt.Errorf("possible infinite left recursion in rule '%v'", node)) | |
984 | return false | |
985 | } | |
986 | ruleReached[id] = true | |
987 | consumes := checkRecursion(node.Front()) | |
988 | ruleReached[id] = false | |
989 | return consumes | |
990 | case TypeAlternate: | |
991 | for _, element := range node.Slice() { | |
992 | if !checkRecursion(element) { | |
993 | return false | |
994 | } | |
995 | } | |
996 | return true | |
997 | case TypeSequence: | |
998 | for _, element := range node.Slice() { | |
999 | if checkRecursion(element) { | |
1000 | return true | |
1001 | } | |
1002 | } | |
1003 | case TypeName: | |
1004 | return checkRecursion(t.Rules[node.String()]) | |
1005 | case TypePlus, TypePush, TypeImplicitPush: | |
1006 | return checkRecursion(node.Front()) | |
1007 | case TypeCharacter, TypeString: | |
1008 | return len(node.String()) > 0 | |
1009 | case TypeDot, TypeRange: | |
1010 | return true | |
1011 | } | |
1012 | return false | |
1013 | } | |
1014 | for _, node := range t.Slice() { | |
1015 | if node.GetType() == TypeRule { | |
1016 | checkRecursion(node) | |
1017 | } | |
1018 | } | |
1019 | }}) | |
1020 | ||
1021 | if t._switch { | |
1022 | var optimizeAlternates func(node Node) (consumes bool, s jetset.Set) | |
1023 | cache, firstPass := make([]struct { | |
1024 | reached, consumes bool | |
1025 | s jetset.Set | |
1026 | }, t.RulesCount), true | |
1027 | optimizeAlternates = func(n Node) (consumes bool, s jetset.Set) { | |
1028 | /*n.debug()*/ | |
1029 | switch n.GetType() { | |
1030 | case TypeRule: | |
1031 | cache := &cache[n.GetId()] | |
1032 | if cache.reached { | |
1033 | consumes, s = cache.consumes, cache.s | |
1034 | return | |
1035 | } | |
1036 | ||
1037 | cache.reached = true | |
1038 | consumes, s = optimizeAlternates(n.Front()) | |
1039 | cache.consumes, cache.s = consumes, s | |
1040 | case TypeName: | |
1041 | consumes, s = optimizeAlternates(t.Rules[n.String()]) | |
1042 | case TypeDot: | |
1043 | consumes = true | |
1044 | /* TypeDot set doesn't include the EndSymbol */ | |
1045 | s = s.Add(uint64(t.EndSymbol)) | |
1046 | s = s.Complement(uint64(t.EndSymbol)) | |
1047 | case TypeString, TypeCharacter: | |
1048 | consumes = true | |
1049 | s = s.Add(uint64([]rune(n.String())[0])) | |
1050 | case TypeRange: | |
1051 | consumes = true | |
1052 | element := n.Front() | |
1053 | lower := []rune(element.String())[0] | |
1054 | element = element.Next() | |
1055 | upper := []rune(element.String())[0] | |
1056 | s = s.AddRange(uint64(lower), uint64(upper)) | |
1057 | case TypeAlternate: | |
1058 | consumes = true | |
1059 | mconsumes, properties, c := | |
1060 | consumes, make([]struct { | |
1061 | intersects bool | |
1062 | s jetset.Set | |
1063 | }, n.Len()), 0 | |
1064 | for _, element := range n.Slice() { | |
1065 | mconsumes, properties[c].s = optimizeAlternates(element) | |
1066 | consumes = consumes && mconsumes | |
1067 | s = s.Union(properties[c].s) | |
1068 | c++ | |
1069 | } | |
1070 | ||
1071 | if firstPass { | |
1072 | break | |
1073 | } | |
1074 | ||
1075 | intersections := 2 | |
1076 | compare: | |
1077 | for ai, a := range properties[0 : len(properties)-1] { | |
1078 | for _, b := range properties[ai+1:] { | |
1079 | if a.s.Intersects(b.s) { | |
1080 | intersections++ | |
1081 | properties[ai].intersects = true | |
1082 | continue compare | |
1083 | } | |
1084 | } | |
1085 | } | |
1086 | if intersections >= len(properties) { | |
1087 | break | |
1088 | } | |
1089 | ||
1090 | c, unordered, ordered, max := | |
1091 | 0, &node{Type: TypeUnorderedAlternate}, &node{Type: TypeAlternate}, 0 | |
1092 | for _, element := range n.Slice() { | |
1093 | if properties[c].intersects { | |
1094 | ordered.PushBack(element.Copy()) | |
1095 | } else { | |
1096 | class := &node{Type: TypeUnorderedAlternate} | |
1097 | for d := 0; d < 256; d++ { | |
1098 | if properties[c].s.Has(uint64(d)) { | |
1099 | class.PushBack(&node{Type: TypeCharacter, string: string(rune(d))}) | |
1100 | } | |
1101 | } | |
1102 | ||
1103 | sequence, predicate, length := | |
1104 | &node{Type: TypeSequence}, &node{Type: TypePeekFor}, properties[c].s.Len() | |
1105 | if length == 0 { | |
1106 | class.PushBack(&node{Type: TypeNil, string: "<nil>"}) | |
1107 | } | |
1108 | predicate.PushBack(class) | |
1109 | sequence.PushBack(predicate) | |
1110 | sequence.PushBack(element.Copy()) | |
1111 | ||
1112 | if element.GetType() == TypeNil { | |
1113 | unordered.PushBack(sequence) | |
1114 | } else if length > max { | |
1115 | unordered.PushBack(sequence) | |
1116 | max = length | |
1117 | } else { | |
1118 | unordered.PushFront(sequence) | |
1119 | } | |
1120 | } | |
1121 | c++ | |
1122 | } | |
1123 | n.Init() | |
1124 | if ordered.Front() == nil { | |
1125 | n.SetType(TypeUnorderedAlternate) | |
1126 | for _, element := range unordered.Slice() { | |
1127 | n.PushBack(element.Copy()) | |
1128 | } | |
1129 | } else { | |
1130 | for _, element := range ordered.Slice() { | |
1131 | n.PushBack(element.Copy()) | |
1132 | } | |
1133 | n.PushBack(unordered) | |
1134 | } | |
1135 | case TypeSequence: | |
1136 | classes, elements := | |
1137 | make([]struct { | |
1138 | s jetset.Set | |
1139 | }, n.Len()), n.Slice() | |
1140 | ||
1141 | for c, element := range elements { | |
1142 | consumes, classes[c].s = optimizeAlternates(element) | |
1143 | if consumes { | |
1144 | elements, classes = elements[c+1:], classes[:c+1] | |
1145 | break | |
1146 | } | |
1147 | } | |
1148 | ||
1149 | for c := len(classes) - 1; c >= 0; c-- { | |
1150 | s = s.Union(classes[c].s) | |
1151 | } | |
1152 | ||
1153 | for _, element := range elements { | |
1154 | optimizeAlternates(element) | |
1155 | } | |
1156 | case TypePeekNot, TypePeekFor: | |
1157 | optimizeAlternates(n.Front()) | |
1158 | case TypeQuery, TypeStar: | |
1159 | _, s = optimizeAlternates(n.Front()) | |
1160 | case TypePlus, TypePush, TypeImplicitPush: | |
1161 | consumes, s = optimizeAlternates(n.Front()) | |
1162 | case TypeAction, TypeNil: | |
1163 | //empty | |
1164 | } | |
1165 | return | |
1166 | } | |
1167 | for _, element := range t.Slice() { | |
1168 | if element.GetType() == TypeRule { | |
1169 | optimizeAlternates(element) | |
1170 | break | |
1171 | } | |
1172 | } | |
1173 | ||
1174 | for i := range cache { | |
1175 | cache[i].reached = false | |
1176 | } | |
1177 | firstPass = false | |
1178 | for _, element := range t.Slice() { | |
1179 | if element.GetType() == TypeRule { | |
1180 | optimizeAlternates(element) | |
1181 | break | |
1182 | } | |
1183 | } | |
1184 | } | |
1185 | ||
1186 | var buffer bytes.Buffer | |
1187 | defer func() { | |
1188 | if t.Strict && werr != nil && err == nil { | |
1189 | // Treat warnings as errors. | |
1190 | err = werr | |
1191 | } | |
1192 | if !t.Strict && werr != nil { | |
1193 | // Display warnings. | |
1194 | fmt.Fprintln(os.Stderr, werr) | |
1195 | } | |
1196 | if err != nil { | |
1197 | return | |
1198 | } | |
1199 | fileSet := token.NewFileSet() | |
1200 | code, err := parser.ParseFile(fileSet, file, &buffer, parser.ParseComments) | |
1201 | if err != nil { | |
1202 | buffer.WriteTo(out) | |
1203 | err = fmt.Errorf("%v: %v", file, err) | |
1204 | return | |
1205 | } | |
1206 | formatter := printer.Config{Mode: printer.TabIndent | printer.UseSpaces, Tabwidth: 8} | |
1207 | err = formatter.Fprint(out, fileSet, code) | |
1208 | if err != nil { | |
1209 | buffer.WriteTo(out) | |
1210 | err = fmt.Errorf("%v: %v", file, err) | |
1211 | return | |
1212 | } | |
1213 | ||
1214 | }() | |
1215 | ||
1216 | _print := func(format string, a ...interface{}) { fmt.Fprintf(&buffer, format, a...) } | |
1217 | printSave := func(n uint) { _print("\n position%d, tokenIndex%d := position, tokenIndex", n, n) } | |
1218 | printRestore := func(n uint) { _print("\n position, tokenIndex = position%d, tokenIndex%d", n, n) } | |
1219 | printMemoSave := func(rule int, n uint, ret bool) { | |
1220 | _print("\n memoize(%d, position%d, tokenIndex%d, %t)", rule, n, n, ret) | |
1221 | } | |
1222 | printMemoCheck := func(rule int) { | |
1223 | _print("\n if memoized, ok := memoization[memoKey{%d, position}]; ok {", rule) | |
1224 | _print("\n return memoizedResult(memoized)") | |
1225 | _print("\n }") | |
1226 | } | |
1227 | printTemplate := func(s string) error { | |
1228 | return template.Must(template.New("peg").Parse(s)).Execute(&buffer, t) | |
1229 | } | |
1230 | ||
1231 | t.HasActions = usage[TypeAction] > 0 | |
1232 | t.HasPush = usage[TypePush] > 0 | |
1233 | t.HasCommit = usage[TypeCommit] > 0 | |
1234 | t.HasDot = usage[TypeDot] > 0 | |
1235 | t.HasCharacter = usage[TypeCharacter] > 0 | |
1236 | t.HasString = usage[TypeString] > 0 | |
1237 | t.HasRange = usage[TypeRange] > 0 | |
1238 | ||
1239 | var printRule func(n Node) | |
1240 | var compile func(expression Node, ko uint) (labelLast bool) | |
1241 | var label uint | |
1242 | labels := make(map[uint]bool) | |
1243 | printBegin := func() { _print("\n {") } | |
1244 | printEnd := func() { _print("\n }") } | |
1245 | printLabel := func(n uint) bool { | |
1246 | _print("\n") | |
1247 | if labels[n] { | |
1248 | _print(" l%d:\t", n) | |
1249 | return true | |
1250 | } | |
1251 | return false | |
1252 | } | |
1253 | printJump := func(n uint) { | |
1254 | _print("\n goto l%d", n) | |
1255 | labels[n] = true | |
1256 | } | |
1257 | printRule = func(n Node) { | |
1258 | switch n.GetType() { | |
1259 | case TypeRule: | |
1260 | _print("%v <- ", n) | |
1261 | printRule(n.Front()) | |
1262 | case TypeDot: | |
1263 | _print(".") | |
1264 | case TypeName: | |
1265 | _print("%v", n) | |
1266 | case TypeCharacter: | |
1267 | _print("'%v'", escape(n.String())) | |
1268 | case TypeString: | |
1269 | s := escape(n.String()) | |
1270 | _print("'%v'", s[1:len(s)-1]) | |
1271 | case TypeRange: | |
1272 | element := n.Front() | |
1273 | lower := element | |
1274 | element = element.Next() | |
1275 | upper := element | |
1276 | _print("[%v-%v]", escape(lower.String()), escape(upper.String())) | |
1277 | case TypePredicate: | |
1278 | _print("&{%v}", n) | |
1279 | case TypeStateChange: | |
1280 | _print("!{%v}", n) | |
1281 | case TypeAction: | |
1282 | _print("{%v}", n) | |
1283 | case TypeCommit: | |
1284 | _print("commit") | |
1285 | case TypeAlternate: | |
1286 | _print("(") | |
1287 | elements := n.Slice() | |
1288 | printRule(elements[0]) | |
1289 | for _, element := range elements[1:] { | |
1290 | _print(" / ") | |
1291 | printRule(element) | |
1292 | } | |
1293 | _print(")") | |
1294 | case TypeUnorderedAlternate: | |
1295 | _print("(") | |
1296 | elements := n.Slice() | |
1297 | printRule(elements[0]) | |
1298 | for _, element := range elements[1:] { | |
1299 | _print(" | ") | |
1300 | printRule(element) | |
1301 | } | |
1302 | _print(")") | |
1303 | case TypeSequence: | |
1304 | _print("(") | |
1305 | elements := n.Slice() | |
1306 | printRule(elements[0]) | |
1307 | for _, element := range elements[1:] { | |
1308 | _print(" ") | |
1309 | printRule(element) | |
1310 | } | |
1311 | _print(")") | |
1312 | case TypePeekFor: | |
1313 | _print("&") | |
1314 | printRule(n.Front()) | |
1315 | case TypePeekNot: | |
1316 | _print("!") | |
1317 | printRule(n.Front()) | |
1318 | case TypeQuery: | |
1319 | printRule(n.Front()) | |
1320 | _print("?") | |
1321 | case TypeStar: | |
1322 | printRule(n.Front()) | |
1323 | _print("*") | |
1324 | case TypePlus: | |
1325 | printRule(n.Front()) | |
1326 | _print("+") | |
1327 | case TypePush, TypeImplicitPush: | |
1328 | _print("<") | |
1329 | printRule(n.Front()) | |
1330 | _print(">") | |
1331 | case TypeNil: | |
1332 | default: | |
1333 | warn(fmt.Errorf("illegal node type: %v", n.GetType())) | |
1334 | } | |
1335 | } | |
1336 | compile = func(n Node, ko uint) (labelLast bool) { | |
1337 | switch n.GetType() { | |
1338 | case TypeRule: | |
1339 | warn(fmt.Errorf("internal error #1 (%v)", n)) | |
1340 | case TypeDot: | |
1341 | _print("\n if !matchDot() {") | |
1342 | /*print("\n if buffer[position] == endSymbol {")*/ | |
1343 | printJump(ko) | |
1344 | /*print("}\nposition++")*/ | |
1345 | _print("}") | |
1346 | case TypeName: | |
1347 | name := n.String() | |
1348 | rule := t.Rules[name] | |
1349 | if t.inline && t.rulesCount[name] == 1 { | |
1350 | compile(rule.Front(), ko) | |
1351 | return | |
1352 | } | |
1353 | _print("\n if !_rules[rule%v]() {", name /*rule.GetId()*/) | |
1354 | printJump(ko) | |
1355 | _print("}") | |
1356 | case TypeRange: | |
1357 | element := n.Front() | |
1358 | lower := element | |
1359 | element = element.Next() | |
1360 | upper := element | |
1361 | /*print("\n if !matchRange('%v', '%v') {", escape(lower.String()), escape(upper.String()))*/ | |
1362 | _print("\n if c := buffer[position]; c < rune('%v') || c > rune('%v') {", escape(lower.String()), escape(upper.String())) | |
1363 | printJump(ko) | |
1364 | _print("}\nposition++") | |
1365 | case TypeCharacter: | |
1366 | /*print("\n if !matchChar('%v') {", escape(n.String()))*/ | |
1367 | _print("\n if buffer[position] != rune('%v') {", escape(n.String())) | |
1368 | printJump(ko) | |
1369 | _print("}\nposition++") | |
1370 | case TypeString: | |
1371 | _print("\n if !matchString(%v) {", strconv.Quote(n.String())) | |
1372 | printJump(ko) | |
1373 | _print("}") | |
1374 | case TypePredicate: | |
1375 | _print("\n if !(%v) {", n) | |
1376 | printJump(ko) | |
1377 | _print("}") | |
1378 | case TypeStateChange: | |
1379 | _print("\n %v", n) | |
1380 | case TypeAction: | |
1381 | case TypeCommit: | |
1382 | case TypePush: | |
1383 | fallthrough | |
1384 | case TypeImplicitPush: | |
1385 | ok, element := label, n.Front() | |
1386 | label++ | |
1387 | nodeType, rule := element.GetType(), element.Next() | |
1388 | printBegin() | |
1389 | if nodeType == TypeAction { | |
1390 | if t.Ast { | |
1391 | _print("\nadd(rule%v, position)", rule) | |
1392 | } else { | |
1393 | // There is no AST support, so inline the rule code | |
1394 | _print("\n%v", element) | |
1395 | } | |
1396 | } else { | |
1397 | _print("\nposition%d := position", ok) | |
1398 | compile(element, ko) | |
1399 | if n.GetType() == TypePush && !t.Ast { | |
1400 | // This is TypePush and there is no AST support, | |
1401 | // so inline capture to text right here | |
1402 | _print("\nbegin := position%d", ok) | |
1403 | _print("\nend := position") | |
1404 | _print("\ntext = string(buffer[begin:end])") | |
1405 | } else { | |
1406 | _print("\nadd(rule%v, position%d)", rule, ok) | |
1407 | } | |
1408 | } | |
1409 | printEnd() | |
1410 | case TypeAlternate: | |
1411 | ok := label | |
1412 | label++ | |
1413 | printBegin() | |
1414 | elements := n.Slice() | |
1415 | printSave(ok) | |
1416 | for _, element := range elements[:len(elements)-1] { | |
1417 | next := label | |
1418 | label++ | |
1419 | compile(element, next) | |
1420 | printJump(ok) | |
1421 | printLabel(next) | |
1422 | printRestore(ok) | |
1423 | } | |
1424 | compile(elements[len(elements)-1], ko) | |
1425 | printEnd() | |
1426 | labelLast = printLabel(ok) | |
1427 | case TypeUnorderedAlternate: | |
1428 | done, ok := ko, label | |
1429 | label++ | |
1430 | printBegin() | |
1431 | _print("\n switch buffer[position] {") | |
1432 | elements := n.Slice() | |
1433 | elements, last := elements[:len(elements)-1], elements[len(elements)-1].Front().Next() | |
1434 | for _, element := range elements { | |
1435 | sequence := element.Front() | |
1436 | class := sequence.Front() | |
1437 | sequence = sequence.Next() | |
1438 | _print("\n case") | |
1439 | comma := false | |
1440 | for _, character := range class.Slice() { | |
1441 | if comma { | |
1442 | _print(",") | |
1443 | } else { | |
1444 | comma = true | |
1445 | } | |
1446 | _print(" '%s'", escape(character.String())) | |
1447 | } | |
1448 | _print(":") | |
1449 | if compile(sequence, done) { | |
1450 | _print("\nbreak") | |
1451 | } | |
1452 | } | |
1453 | _print("\n default:") | |
1454 | if compile(last, done) { | |
1455 | _print("\nbreak") | |
1456 | } | |
1457 | _print("\n }") | |
1458 | printEnd() | |
1459 | labelLast = printLabel(ok) | |
1460 | case TypeSequence: | |
1461 | for _, element := range n.Slice() { | |
1462 | labelLast = compile(element, ko) | |
1463 | } | |
1464 | case TypePeekFor: | |
1465 | ok := label | |
1466 | label++ | |
1467 | printBegin() | |
1468 | printSave(ok) | |
1469 | compile(n.Front(), ko) | |
1470 | printRestore(ok) | |
1471 | printEnd() | |
1472 | case TypePeekNot: | |
1473 | ok := label | |
1474 | label++ | |
1475 | printBegin() | |
1476 | printSave(ok) | |
1477 | compile(n.Front(), ok) | |
1478 | printJump(ko) | |
1479 | printLabel(ok) | |
1480 | printRestore(ok) | |
1481 | printEnd() | |
1482 | case TypeQuery: | |
1483 | qko := label | |
1484 | label++ | |
1485 | qok := label | |
1486 | label++ | |
1487 | printBegin() | |
1488 | printSave(qko) | |
1489 | compile(n.Front(), qko) | |
1490 | printJump(qok) | |
1491 | printLabel(qko) | |
1492 | printRestore(qko) | |
1493 | printEnd() | |
1494 | labelLast = printLabel(qok) | |
1495 | case TypeStar: | |
1496 | again := label | |
1497 | label++ | |
1498 | out := label | |
1499 | label++ | |
1500 | printLabel(again) | |
1501 | printBegin() | |
1502 | printSave(out) | |
1503 | compile(n.Front(), out) | |
1504 | printJump(again) | |
1505 | printLabel(out) | |
1506 | printRestore(out) | |
1507 | printEnd() | |
1508 | case TypePlus: | |
1509 | again := label | |
1510 | label++ | |
1511 | out := label | |
1512 | label++ | |
1513 | compile(n.Front(), ko) | |
1514 | printLabel(again) | |
1515 | printBegin() | |
1516 | printSave(out) | |
1517 | compile(n.Front(), out) | |
1518 | printJump(again) | |
1519 | printLabel(out) | |
1520 | printRestore(out) | |
1521 | printEnd() | |
1522 | case TypeNil: | |
1523 | default: | |
1524 | warn(fmt.Errorf("illegal node type: %v", n.GetType())) | |
1525 | } | |
1526 | return labelLast | |
1527 | } | |
1528 | ||
1529 | /* lets figure out which jump labels are going to be used with this dry compile */ | |
1530 | printTemp, _print := _print, func(format string, a ...interface{}) {} | |
1531 | for _, element := range t.Slice() { | |
1532 | if element.GetType() != TypeRule { | |
1533 | continue | |
1534 | } | |
1535 | expression := element.Front() | |
1536 | if expression.GetType() == TypeNil { | |
1537 | continue | |
1538 | } | |
1539 | ko := label | |
1540 | label++ | |
1541 | if count, ok := t.rulesCount[element.String()]; !ok { | |
1542 | continue | |
1543 | } else if t.inline && count == 1 && ko != 0 { | |
1544 | continue | |
1545 | } | |
1546 | compile(expression, ko) | |
1547 | } | |
1548 | _print, label = printTemp, 0 | |
1549 | ||
1550 | /* now for the real compile pass */ | |
1551 | t.PegRuleType = "uint8" | |
1552 | if length := int64(t.Len()); length > math.MaxUint32 { | |
1553 | t.PegRuleType = "uint64" | |
1554 | } else if length > math.MaxUint16 { | |
1555 | t.PegRuleType = "uint32" | |
1556 | } else if length > math.MaxUint8 { | |
1557 | t.PegRuleType = "uint16" | |
1558 | } | |
1559 | if err = printTemplate(pegHeaderTemplate); err != nil { | |
1560 | return err | |
1561 | } | |
1562 | for _, element := range t.Slice() { | |
1563 | if element.GetType() != TypeRule { | |
1564 | continue | |
1565 | } | |
1566 | expression := element.Front() | |
1567 | if implicit := expression.Front(); expression.GetType() == TypeNil || implicit.GetType() == TypeNil { | |
1568 | if element.String() != "PegText" { | |
1569 | warn(fmt.Errorf("rule '%v' used but not defined", element)) | |
1570 | } | |
1571 | _print("\n nil,") | |
1572 | continue | |
1573 | } | |
1574 | ko := label | |
1575 | label++ | |
1576 | _print("\n /* %v ", element.GetId()) | |
1577 | printRule(element) | |
1578 | _print(" */") | |
1579 | if count, ok := t.rulesCount[element.String()]; !ok { | |
1580 | warn(fmt.Errorf("rule '%v' defined but not used", element)) | |
1581 | _print("\n nil,") | |
1582 | continue | |
1583 | } else if t.inline && count == 1 && ko != 0 { | |
1584 | _print("\n nil,") | |
1585 | continue | |
1586 | } | |
1587 | _print("\n func() bool {") | |
1588 | if t.Ast { | |
1589 | printMemoCheck(element.GetId()) | |
1590 | } | |
1591 | if t.Ast || labels[ko] { | |
1592 | printSave(ko) | |
1593 | } | |
1594 | compile(expression, ko) | |
1595 | //print("\n fmt.Printf(\"%v\\n\")", element.String()) | |
1596 | if t.Ast { | |
1597 | printMemoSave(element.GetId(), ko, true) | |
1598 | } | |
1599 | _print("\n return true") | |
1600 | if labels[ko] { | |
1601 | printLabel(ko) | |
1602 | if t.Ast { | |
1603 | printMemoSave(element.GetId(), ko, false) | |
1604 | } | |
1605 | printRestore(ko) | |
1606 | _print("\n return false") | |
1607 | } | |
1608 | _print("\n },") | |
1609 | } | |
1610 | _print("\n }\n p.rules = _rules") | |
1611 | _print("\n return nil") | |
1612 | _print("\n}\n") | |
1613 | return nil | |
1614 | } |