|
1 | 1 | using System; |
| 2 | +using System.Collections.Generic; |
| 3 | +using System.Linq; |
| 4 | +using System.Text; |
2 | 5 |
|
3 | 6 | using E.Mathematics; |
4 | 7 |
|
@@ -163,6 +166,46 @@ public void ReadPositions() |
163 | 166 | Assert.Equal(new Location(3, 0, 38), tokens.Next().Start); // pipe |
164 | 167 | } |
165 | 168 |
|
| 169 | + [Fact] |
| 170 | + public void CanReadTwoStatements() |
| 171 | + { |
| 172 | + var tokenizer = new Tokenizer( |
| 173 | + """ |
| 174 | + image = 10 |
| 175 | + b = 2 |
| 176 | + """); |
| 177 | + |
| 178 | + Assert.Equal( |
| 179 | + """ |
| 180 | + image | Identifier | 1 | 0 |
| 181 | + = | Op | 1 | 6 |
| 182 | + 10 | Number | 1 | 8 |
| 183 | + b | Identifier | 2 | 0 |
| 184 | + = | Op | 2 | 2 |
| 185 | + 2 | Number | 2 | 4 |
| 186 | + """.ReplaceLineEndings("\n"), tokenizer.Dump()); |
| 187 | + } |
| 188 | + |
| 189 | + [Fact] |
| 190 | + public void CanReadTwoStatements_Linux() |
| 191 | + { |
| 192 | + var tokenizer = new Tokenizer( |
| 193 | + """ |
| 194 | + image = 10 |
| 195 | + b = 2 |
| 196 | + """.ReplaceLineEndings("\n")); |
| 197 | + |
| 198 | + Assert.Equal( |
| 199 | + """ |
| 200 | + image | Identifier | 1 | 0 |
| 201 | + = | Op | 1 | 6 |
| 202 | + 10 | Number | 1 | 8 |
| 203 | + b | Identifier | 2 | 0 |
| 204 | + = | Op | 2 | 2 |
| 205 | + 2 | Number | 2 | 4 |
| 206 | + """.ReplaceLineEndings("\n"), tokenizer.Dump()); |
| 207 | + } |
| 208 | + |
166 | 209 | [Fact] |
167 | 210 | public void Read2() |
168 | 211 | { |
@@ -233,6 +276,39 @@ Image.create 100px 100px #000 |
233 | 276 |
|
234 | 277 | public static class TokenizerExtensions |
235 | 278 | { |
| 279 | + public static string Dump(this Tokenizer tokenizer) |
| 280 | + { |
| 281 | + var tokens = new List<Token>(); |
| 282 | + |
| 283 | + Token token; |
| 284 | + |
| 285 | + while ((token = tokenizer.Next()).Kind != TokenKind.EOF) |
| 286 | + { |
| 287 | + tokens.Add(token); |
| 288 | + } |
| 289 | + |
| 290 | + |
| 291 | + var sb = new StringBuilder(); |
| 292 | + |
| 293 | + int textWidth = Math.Max("Text".Length, tokens.Max(t => t.Text.Length)); |
| 294 | + int kindWidth = Math.Max("Kind".Length, tokens.Max(t => t.Kind.ToString().Length)); |
| 295 | + |
| 296 | + int i = 0; |
| 297 | + |
| 298 | + foreach (var t in tokens) |
| 299 | + { |
| 300 | + if (i > 0) |
| 301 | + { |
| 302 | + sb.Append('\n'); |
| 303 | + } |
| 304 | + sb.AppendFormat("{0,-" + textWidth + "} | {1,-" + kindWidth + "} | {2,4} | {3,4}", t.Text, t.Kind, t.Start.Line, t.Start.Column); |
| 305 | + |
| 306 | + i++; |
| 307 | + } |
| 308 | + |
| 309 | + return sb.ToString(); |
| 310 | + } |
| 311 | + |
236 | 312 | public static TokenKind ReadKind(this Tokenizer tokenizer) => tokenizer.Next().Kind; |
237 | 313 |
|
238 | 314 | public static string Read(this Tokenizer tokenizer, TokenKind kind) |
|
0 commit comments