/src/wasm-tools/crates/wit-parser/src/ast.rs
Line | Count | Source |
1 | | use crate::{Error, PackageNotFoundError, UnresolvedPackageGroup}; |
2 | | use alloc::borrow::Cow; |
3 | | use alloc::boxed::Box; |
4 | | use alloc::format; |
5 | | use alloc::string::{String, ToString}; |
6 | | use alloc::vec::Vec; |
7 | | use anyhow::{Context, Result, bail}; |
8 | | use core::fmt; |
9 | | use core::mem; |
10 | | use lex::{Span, Token, Tokenizer}; |
11 | | use semver::Version; |
12 | | #[cfg(feature = "std")] |
13 | | use std::path::Path; |
14 | | |
15 | | pub mod lex; |
16 | | |
17 | | pub use resolve::Resolver; |
18 | | mod resolve; |
19 | | pub mod toposort; |
20 | | |
21 | | pub use lex::validate_id; |
22 | | |
23 | | /// Representation of a single WIT `*.wit` file and nested packages. |
24 | | struct PackageFile<'a> { |
25 | | /// Optional `package foo:bar;` header |
26 | | package_id: Option<PackageName<'a>>, |
27 | | /// Other AST items. |
28 | | decl_list: DeclList<'a>, |
29 | | } |
30 | | |
31 | | impl<'a> PackageFile<'a> { |
32 | | /// Parse a standalone file represented by `tokens`. |
33 | | /// |
34 | | /// This will optionally start with `package foo:bar;` and then will have a |
35 | | /// list of ast items after it. |
36 | 22.0k | fn parse(tokens: &mut Tokenizer<'a>) -> Result<Self> { |
37 | 22.0k | let mut package_name_tokens_peek = tokens.clone(); |
38 | 22.0k | let docs = parse_docs(&mut package_name_tokens_peek)?; |
39 | | |
40 | | // Parse `package foo:bar;` but throw it out if it's actually |
41 | | // `package foo:bar { ... }` since that's an ast item instead. |
42 | 22.0k | let package_id = if package_name_tokens_peek.eat(Token::Package)? { |
43 | 16.2k | let name = PackageName::parse(&mut package_name_tokens_peek, docs)?; |
44 | 16.2k | if package_name_tokens_peek.eat(Token::Semicolon)? { |
45 | 16.2k | *tokens = package_name_tokens_peek; |
46 | 16.2k | Some(name) |
47 | | } else { |
48 | 0 | None |
49 | | } |
50 | | } else { |
51 | 5.77k | None |
52 | | }; |
53 | 22.0k | let decl_list = DeclList::parse_until(tokens, None)?; |
54 | 22.0k | Ok(PackageFile { |
55 | 22.0k | package_id, |
56 | 22.0k | decl_list, |
57 | 22.0k | }) |
58 | 22.0k | } |
59 | | |
60 | | /// Parse a nested package of the form `package foo:bar { ... }` |
61 | 466 | fn parse_nested( |
62 | 466 | tokens: &mut Tokenizer<'a>, |
63 | 466 | docs: Docs<'a>, |
64 | 466 | attributes: Vec<Attribute<'a>>, |
65 | 466 | ) -> Result<Self> { |
66 | 466 | let span = tokens.expect(Token::Package)?; |
67 | 466 | if !attributes.is_empty() { |
68 | 0 | bail!(Error::new( |
69 | 0 | span, |
70 | 0 | format!("cannot place attributes on nested packages"), |
71 | 0 | )); |
72 | 466 | } |
73 | 466 | let package_id = PackageName::parse(tokens, docs)?; |
74 | 466 | tokens.expect(Token::LeftBrace)?; |
75 | 466 | let decl_list = DeclList::parse_until(tokens, Some(Token::RightBrace))?; |
76 | 466 | Ok(PackageFile { |
77 | 466 | package_id: Some(package_id), |
78 | 466 | decl_list, |
79 | 466 | }) |
80 | 466 | } |
81 | | } |
82 | | |
83 | | /// Stores all of the declarations in a package's scope. In AST terms, this |
84 | | /// means everything except the `package` declaration that demarcates a package |
85 | | /// scope. In the traditional implicit format, these are all of the declarations |
86 | | /// non-`package` declarations in the file: |
87 | | /// |
88 | | /// ```wit |
89 | | /// package foo:name; |
90 | | /// |
91 | | /// /* START DECL LIST */ |
92 | | /// // Some comment... |
93 | | /// interface i {} |
94 | | /// world w {} |
95 | | /// /* END DECL LIST */ |
96 | | /// ``` |
97 | | /// |
98 | | /// In the nested package style, a [`DeclList`] is everything inside of each |
99 | | /// `package` element's brackets: |
100 | | /// |
101 | | /// ```wit |
102 | | /// package foo:name { |
103 | | /// /* START FIRST DECL LIST */ |
104 | | /// // Some comment... |
105 | | /// interface i {} |
106 | | /// world w {} |
107 | | /// /* END FIRST DECL LIST */ |
108 | | /// } |
109 | | /// |
110 | | /// package bar:name { |
111 | | /// /* START SECOND DECL LIST */ |
112 | | /// // Some comment... |
113 | | /// interface i {} |
114 | | /// world w {} |
115 | | /// /* END SECOND DECL LIST */ |
116 | | /// } |
117 | | /// ``` |
118 | | #[derive(Default)] |
119 | | pub struct DeclList<'a> { |
120 | | items: Vec<AstItem<'a>>, |
121 | | } |
122 | | |
123 | | impl<'a> DeclList<'a> { |
124 | 22.5k | fn parse_until(tokens: &mut Tokenizer<'a>, end: Option<Token>) -> Result<DeclList<'a>> { |
125 | 22.5k | let mut items = Vec::new(); |
126 | 22.5k | let mut docs = parse_docs(tokens)?; |
127 | | loop { |
128 | 94.0k | match end { |
129 | 1.02k | Some(end) => { |
130 | 1.02k | if tokens.eat(end)? { |
131 | 466 | break; |
132 | 554 | } |
133 | | } |
134 | | None => { |
135 | 93.0k | if tokens.clone().next()?.is_none() { |
136 | 22.0k | break; |
137 | 70.9k | } |
138 | | } |
139 | | } |
140 | 71.5k | items.push(AstItem::parse(tokens, docs)?); |
141 | 71.5k | docs = parse_docs(tokens)?; |
142 | | } |
143 | 22.5k | Ok(DeclList { items }) |
144 | 22.5k | } |
145 | | |
146 | 67.5k | fn for_each_path<'b>( |
147 | 67.5k | &'b self, |
148 | 67.5k | f: &mut dyn FnMut( |
149 | 67.5k | Option<&'b Id<'a>>, |
150 | 67.5k | &'b [Attribute<'a>], |
151 | 67.5k | &'b UsePath<'a>, |
152 | 67.5k | Option<&'b [UseName<'a>]>, |
153 | 67.5k | WorldOrInterface, |
154 | 67.5k | ) -> Result<()>, |
155 | 67.5k | ) -> Result<()> { |
156 | 213k | for item in self.items.iter() { |
157 | 213k | match item { |
158 | 50.5k | AstItem::World(world) => { |
159 | | // Visit imports here first before exports to help preserve |
160 | | // round-tripping of documents because printing a world puts |
161 | | // imports first but textually they can be listed with |
162 | | // exports first. |
163 | 50.5k | let mut imports = Vec::new(); |
164 | 50.5k | let mut exports = Vec::new(); |
165 | 78.6k | for item in world.items.iter() { |
166 | 78.6k | match item { |
167 | 4.43k | WorldItem::Use(u) => f( |
168 | 4.43k | None, |
169 | 4.43k | &u.attributes, |
170 | 4.43k | &u.from, |
171 | 4.43k | Some(&u.names), |
172 | 4.43k | WorldOrInterface::Interface, |
173 | 4.43k | )?, |
174 | 0 | WorldItem::Include(i) => f( |
175 | 0 | Some(&world.name), |
176 | 0 | &i.attributes, |
177 | 0 | &i.from, |
178 | 0 | None, |
179 | 0 | WorldOrInterface::World, |
180 | 0 | )?, |
181 | 50.5k | WorldItem::Type(_) => {} |
182 | | WorldItem::Import(Import { |
183 | 15.1k | kind, attributes, .. |
184 | 15.1k | }) => imports.push((kind, attributes)), |
185 | | WorldItem::Export(Export { |
186 | 8.49k | kind, attributes, .. |
187 | 8.49k | }) => exports.push((kind, attributes)), |
188 | | } |
189 | | } |
190 | | |
191 | 50.5k | let mut visit_kind = |
192 | 23.6k | |kind: &'b ExternKind<'a>, attrs: &'b [Attribute<'a>]| match kind { |
193 | 8.97k | ExternKind::Interface(_, items) => { |
194 | 19.7k | for item in items { |
195 | 19.7k | match item { |
196 | 1.44k | InterfaceItem::Use(u) => f( |
197 | 1.44k | None, |
198 | 1.44k | &u.attributes, |
199 | 1.44k | &u.from, |
200 | 1.44k | Some(&u.names), |
201 | 1.44k | WorldOrInterface::Interface, |
202 | 1.44k | )?, |
203 | 18.3k | _ => {} |
204 | | } |
205 | | } |
206 | 8.97k | Ok(()) |
207 | | } |
208 | 3.38k | ExternKind::Path(path) => { |
209 | 3.38k | f(None, attrs, path, None, WorldOrInterface::Interface) |
210 | | } |
211 | 11.3k | ExternKind::Func(..) => Ok(()), |
212 | 23.6k | }; |
213 | | |
214 | 50.5k | for (kind, attrs) in imports { |
215 | 15.1k | visit_kind(kind, attrs)?; |
216 | | } |
217 | 50.5k | for (kind, attrs) in exports { |
218 | 8.49k | visit_kind(kind, attrs)?; |
219 | | } |
220 | | } |
221 | 153k | AstItem::Interface(i) => { |
222 | 153k | for item in i.items.iter() { |
223 | 122k | match item { |
224 | 33.7k | InterfaceItem::Use(u) => f( |
225 | 33.7k | Some(&i.name), |
226 | 33.7k | &u.attributes, |
227 | 33.7k | &u.from, |
228 | 33.7k | Some(&u.names), |
229 | 33.7k | WorldOrInterface::Interface, |
230 | 33.7k | )?, |
231 | 88.3k | _ => {} |
232 | | } |
233 | | } |
234 | | } |
235 | 9.04k | AstItem::Use(u) => { |
236 | | // At the top-level, we don't know if this is a world or an interface |
237 | | // It is up to the resolver to decides how to handle this ambiguity. |
238 | 9.04k | f( |
239 | 9.04k | None, |
240 | 9.04k | &u.attributes, |
241 | 9.04k | &u.item, |
242 | 9.04k | None, |
243 | 9.04k | WorldOrInterface::Unknown, |
244 | 9.04k | )?; |
245 | | } |
246 | | |
247 | 0 | AstItem::Package(pkg) => pkg.decl_list.for_each_path(f)?, |
248 | | } |
249 | | } |
250 | 67.5k | Ok(()) |
251 | 67.5k | } |
252 | | } |
253 | | |
254 | | enum AstItem<'a> { |
255 | | Interface(Interface<'a>), |
256 | | World(World<'a>), |
257 | | Use(ToplevelUse<'a>), |
258 | | Package(PackageFile<'a>), |
259 | | } |
260 | | |
261 | | impl<'a> AstItem<'a> { |
262 | 71.5k | fn parse(tokens: &mut Tokenizer<'a>, docs: Docs<'a>) -> Result<Self> { |
263 | 71.5k | let attributes = Attribute::parse_list(tokens)?; |
264 | 71.5k | match tokens.clone().next()? { |
265 | 51.1k | Some((_span, Token::Interface)) => { |
266 | 51.1k | Interface::parse(tokens, docs, attributes).map(Self::Interface) |
267 | | } |
268 | 16.8k | Some((_span, Token::World)) => World::parse(tokens, docs, attributes).map(Self::World), |
269 | 3.01k | Some((_span, Token::Use)) => ToplevelUse::parse(tokens, attributes).map(Self::Use), |
270 | 466 | Some((_span, Token::Package)) => { |
271 | 466 | PackageFile::parse_nested(tokens, docs, attributes).map(Self::Package) |
272 | | } |
273 | 0 | other => Err(err_expected(tokens, "`world`, `interface` or `use`", other).into()), |
274 | | } |
275 | 71.5k | } |
276 | | } |
277 | | |
278 | | #[derive(Debug, Clone)] |
279 | | struct PackageName<'a> { |
280 | | docs: Docs<'a>, |
281 | | span: Span, |
282 | | namespace: Id<'a>, |
283 | | name: Id<'a>, |
284 | | version: Option<(Span, Version)>, |
285 | | } |
286 | | |
287 | | impl<'a> PackageName<'a> { |
288 | 16.7k | fn parse(tokens: &mut Tokenizer<'a>, docs: Docs<'a>) -> Result<Self> { |
289 | 16.7k | let namespace = parse_id(tokens)?; |
290 | 16.7k | tokens.expect(Token::Colon)?; |
291 | 16.7k | let name = parse_id(tokens)?; |
292 | 16.7k | let version = parse_opt_version(tokens)?; |
293 | | Ok(PackageName { |
294 | 16.7k | docs, |
295 | 16.7k | span: Span::new( |
296 | 16.7k | namespace.span.start(), |
297 | 16.7k | version |
298 | 16.7k | .as_ref() |
299 | 16.7k | .map(|(s, _)| s.end()) |
300 | 16.7k | .unwrap_or(name.span.end()), |
301 | | ), |
302 | 16.7k | namespace, |
303 | 16.7k | name, |
304 | 16.7k | version, |
305 | | }) |
306 | 16.7k | } |
307 | | |
308 | 47.8k | fn package_name(&self) -> crate::PackageName { |
309 | | crate::PackageName { |
310 | 47.8k | namespace: self.namespace.name.to_string(), |
311 | 47.8k | name: self.name.name.to_string(), |
312 | 47.8k | version: self.version.as_ref().map(|(_, v)| v.clone()), |
313 | | } |
314 | 47.8k | } |
315 | | } |
316 | | |
317 | | struct ToplevelUse<'a> { |
318 | | span: Span, |
319 | | attributes: Vec<Attribute<'a>>, |
320 | | item: UsePath<'a>, |
321 | | as_: Option<Id<'a>>, |
322 | | } |
323 | | |
324 | | impl<'a> ToplevelUse<'a> { |
325 | 3.01k | fn parse(tokens: &mut Tokenizer<'a>, attributes: Vec<Attribute<'a>>) -> Result<Self> { |
326 | 3.01k | let span = tokens.expect(Token::Use)?; |
327 | 3.01k | let item = UsePath::parse(tokens)?; |
328 | 3.01k | let as_ = if tokens.eat(Token::As)? { |
329 | 2.64k | Some(parse_id(tokens)?) |
330 | | } else { |
331 | 376 | None |
332 | | }; |
333 | 3.01k | tokens.expect_semicolon()?; |
334 | 3.01k | Ok(ToplevelUse { |
335 | 3.01k | span, |
336 | 3.01k | attributes, |
337 | 3.01k | item, |
338 | 3.01k | as_, |
339 | 3.01k | }) |
340 | 3.01k | } |
341 | | } |
342 | | |
343 | | struct World<'a> { |
344 | | docs: Docs<'a>, |
345 | | attributes: Vec<Attribute<'a>>, |
346 | | name: Id<'a>, |
347 | | items: Vec<WorldItem<'a>>, |
348 | | } |
349 | | |
350 | | impl<'a> World<'a> { |
351 | 16.8k | fn parse( |
352 | 16.8k | tokens: &mut Tokenizer<'a>, |
353 | 16.8k | docs: Docs<'a>, |
354 | 16.8k | attributes: Vec<Attribute<'a>>, |
355 | 16.8k | ) -> Result<Self> { |
356 | 16.8k | tokens.expect(Token::World)?; |
357 | 16.8k | let name = parse_id(tokens)?; |
358 | 16.8k | let items = Self::parse_items(tokens)?; |
359 | 16.8k | Ok(World { |
360 | 16.8k | docs, |
361 | 16.8k | attributes, |
362 | 16.8k | name, |
363 | 16.8k | items, |
364 | 16.8k | }) |
365 | 16.8k | } |
366 | | |
367 | 16.8k | fn parse_items(tokens: &mut Tokenizer<'a>) -> Result<Vec<WorldItem<'a>>> { |
368 | 16.8k | tokens.expect(Token::LeftBrace)?; |
369 | 16.8k | let mut items = Vec::new(); |
370 | | loop { |
371 | 43.0k | let docs = parse_docs(tokens)?; |
372 | 43.0k | if tokens.eat(Token::RightBrace)? { |
373 | 16.8k | break; |
374 | 26.2k | } |
375 | 26.2k | let attributes = Attribute::parse_list(tokens)?; |
376 | 26.2k | items.push(WorldItem::parse(tokens, docs, attributes)?); |
377 | | } |
378 | 16.8k | Ok(items) |
379 | 16.8k | } |
380 | | } |
381 | | |
382 | | enum WorldItem<'a> { |
383 | | Import(Import<'a>), |
384 | | Export(Export<'a>), |
385 | | Use(Use<'a>), |
386 | | Type(TypeDef<'a>), |
387 | | Include(Include<'a>), |
388 | | } |
389 | | |
390 | | impl<'a> WorldItem<'a> { |
391 | 26.2k | fn parse( |
392 | 26.2k | tokens: &mut Tokenizer<'a>, |
393 | 26.2k | docs: Docs<'a>, |
394 | 26.2k | attributes: Vec<Attribute<'a>>, |
395 | 26.2k | ) -> Result<WorldItem<'a>> { |
396 | 26.2k | match tokens.clone().next()? { |
397 | 5.05k | Some((_span, Token::Import)) => { |
398 | 5.05k | Import::parse(tokens, docs, attributes).map(WorldItem::Import) |
399 | | } |
400 | 2.83k | Some((_span, Token::Export)) => { |
401 | 2.83k | Export::parse(tokens, docs, attributes).map(WorldItem::Export) |
402 | | } |
403 | 1.47k | Some((_span, Token::Use)) => Use::parse(tokens, attributes).map(WorldItem::Use), |
404 | 569 | Some((_span, Token::Type)) => { |
405 | 569 | TypeDef::parse(tokens, docs, attributes).map(WorldItem::Type) |
406 | | } |
407 | 291 | Some((_span, Token::Flags)) => { |
408 | 291 | TypeDef::parse_flags(tokens, docs, attributes).map(WorldItem::Type) |
409 | | } |
410 | 737 | Some((_span, Token::Resource)) => { |
411 | 737 | TypeDef::parse_resource(tokens, docs, attributes).map(WorldItem::Type) |
412 | | } |
413 | 1.49k | Some((_span, Token::Record)) => { |
414 | 1.49k | TypeDef::parse_record(tokens, docs, attributes).map(WorldItem::Type) |
415 | | } |
416 | 409 | Some((_span, Token::Variant)) => { |
417 | 409 | TypeDef::parse_variant(tokens, docs, attributes).map(WorldItem::Type) |
418 | | } |
419 | 13.3k | Some((_span, Token::Enum)) => { |
420 | 13.3k | TypeDef::parse_enum(tokens, docs, attributes).map(WorldItem::Type) |
421 | | } |
422 | 0 | Some((_span, Token::Include)) => { |
423 | 0 | Include::parse(tokens, attributes).map(WorldItem::Include) |
424 | | } |
425 | 0 | other => Err(err_expected( |
426 | 0 | tokens, |
427 | 0 | "`import`, `export`, `include`, `use`, or type definition", |
428 | 0 | other, |
429 | 0 | ) |
430 | 0 | .into()), |
431 | | } |
432 | 26.2k | } |
433 | | } |
434 | | |
435 | | struct Import<'a> { |
436 | | docs: Docs<'a>, |
437 | | attributes: Vec<Attribute<'a>>, |
438 | | kind: ExternKind<'a>, |
439 | | } |
440 | | |
441 | | impl<'a> Import<'a> { |
442 | 5.05k | fn parse( |
443 | 5.05k | tokens: &mut Tokenizer<'a>, |
444 | 5.05k | docs: Docs<'a>, |
445 | 5.05k | attributes: Vec<Attribute<'a>>, |
446 | 5.05k | ) -> Result<Import<'a>> { |
447 | 5.05k | tokens.expect(Token::Import)?; |
448 | 5.05k | let kind = ExternKind::parse(tokens)?; |
449 | 5.05k | Ok(Import { |
450 | 5.05k | docs, |
451 | 5.05k | attributes, |
452 | 5.05k | kind, |
453 | 5.05k | }) |
454 | 5.05k | } |
455 | | } |
456 | | |
457 | | struct Export<'a> { |
458 | | docs: Docs<'a>, |
459 | | attributes: Vec<Attribute<'a>>, |
460 | | kind: ExternKind<'a>, |
461 | | } |
462 | | |
463 | | impl<'a> Export<'a> { |
464 | 2.83k | fn parse( |
465 | 2.83k | tokens: &mut Tokenizer<'a>, |
466 | 2.83k | docs: Docs<'a>, |
467 | 2.83k | attributes: Vec<Attribute<'a>>, |
468 | 2.83k | ) -> Result<Export<'a>> { |
469 | 2.83k | tokens.expect(Token::Export)?; |
470 | 2.83k | let kind = ExternKind::parse(tokens)?; |
471 | 2.83k | Ok(Export { |
472 | 2.83k | docs, |
473 | 2.83k | attributes, |
474 | 2.83k | kind, |
475 | 2.83k | }) |
476 | 2.83k | } |
477 | | } |
478 | | |
479 | | enum ExternKind<'a> { |
480 | | Interface(Id<'a>, Vec<InterfaceItem<'a>>), |
481 | | Path(UsePath<'a>), |
482 | | Func(Id<'a>, Func<'a>), |
483 | | } |
484 | | |
485 | | impl<'a> ExternKind<'a> { |
486 | 7.88k | fn parse(tokens: &mut Tokenizer<'a>) -> Result<ExternKind<'a>> { |
487 | | // Create a copy of the token stream to test out if this is a function |
488 | | // or an interface import. In those situations the token stream gets |
489 | | // reset to the state of the clone and we continue down those paths. |
490 | | // |
491 | | // If neither a function nor an interface appears here though then the |
492 | | // clone is thrown away and the original token stream is parsed for an |
493 | | // interface. This will redo the original ID parse and the original |
494 | | // colon parse, but that shouldn't be too bad perf-wise. |
495 | 7.88k | let mut clone = tokens.clone(); |
496 | 7.88k | let id = parse_id(&mut clone)?; |
497 | 7.88k | if clone.eat(Token::Colon)? { |
498 | | // import foo: async? func(...) |
499 | 7.20k | if clone.clone().eat(Token::Func)? || clone.clone().eat(Token::Async)? { |
500 | 3.76k | *tokens = clone; |
501 | 3.76k | let ret = ExternKind::Func(id, Func::parse(tokens)?); |
502 | 3.76k | tokens.expect_semicolon()?; |
503 | 3.76k | return Ok(ret); |
504 | 3.43k | } |
505 | | |
506 | | // import foo: interface { ... } |
507 | 3.43k | if clone.eat(Token::Interface)? { |
508 | 2.99k | *tokens = clone; |
509 | 2.99k | return Ok(ExternKind::Interface(id, Interface::parse_items(tokens)?)); |
510 | 444 | } |
511 | 683 | } |
512 | | |
513 | | // import foo |
514 | | // import foo/bar |
515 | | // import foo:bar/baz |
516 | 1.12k | let ret = ExternKind::Path(UsePath::parse(tokens)?); |
517 | 1.12k | tokens.expect_semicolon()?; |
518 | 1.12k | Ok(ret) |
519 | 7.88k | } |
520 | | |
521 | 0 | fn span(&self) -> Span { |
522 | 0 | match self { |
523 | 0 | ExternKind::Interface(id, _) => id.span, |
524 | 0 | ExternKind::Path(UsePath::Id(id)) => id.span, |
525 | 0 | ExternKind::Path(UsePath::Package { name, .. }) => name.span, |
526 | 0 | ExternKind::Func(id, _) => id.span, |
527 | | } |
528 | 0 | } |
529 | | } |
530 | | |
531 | | struct Interface<'a> { |
532 | | docs: Docs<'a>, |
533 | | attributes: Vec<Attribute<'a>>, |
534 | | name: Id<'a>, |
535 | | items: Vec<InterfaceItem<'a>>, |
536 | | } |
537 | | |
538 | | impl<'a> Interface<'a> { |
539 | 51.1k | fn parse( |
540 | 51.1k | tokens: &mut Tokenizer<'a>, |
541 | 51.1k | docs: Docs<'a>, |
542 | 51.1k | attributes: Vec<Attribute<'a>>, |
543 | 51.1k | ) -> Result<Self> { |
544 | 51.1k | tokens.expect(Token::Interface)?; |
545 | 51.1k | let name = parse_id(tokens)?; |
546 | 51.1k | let items = Self::parse_items(tokens)?; |
547 | 51.1k | Ok(Interface { |
548 | 51.1k | docs, |
549 | 51.1k | attributes, |
550 | 51.1k | name, |
551 | 51.1k | items, |
552 | 51.1k | }) |
553 | 51.1k | } |
554 | | |
555 | 54.1k | pub(super) fn parse_items(tokens: &mut Tokenizer<'a>) -> Result<Vec<InterfaceItem<'a>>> { |
556 | 54.1k | tokens.expect(Token::LeftBrace)?; |
557 | 54.1k | let mut items = Vec::new(); |
558 | | loop { |
559 | 101k | let docs = parse_docs(tokens)?; |
560 | 101k | if tokens.eat(Token::RightBrace)? { |
561 | 54.1k | break; |
562 | 47.3k | } |
563 | 47.3k | let attributes = Attribute::parse_list(tokens)?; |
564 | 47.3k | items.push(InterfaceItem::parse(tokens, docs, attributes)?); |
565 | | } |
566 | 54.1k | Ok(items) |
567 | 54.1k | } |
568 | | } |
569 | | |
570 | | #[derive(Debug)] |
571 | | pub enum WorldOrInterface { |
572 | | World, |
573 | | Interface, |
574 | | Unknown, |
575 | | } |
576 | | |
577 | | enum InterfaceItem<'a> { |
578 | | TypeDef(TypeDef<'a>), |
579 | | Func(NamedFunc<'a>), |
580 | | Use(Use<'a>), |
581 | | } |
582 | | |
583 | | struct Use<'a> { |
584 | | attributes: Vec<Attribute<'a>>, |
585 | | from: UsePath<'a>, |
586 | | names: Vec<UseName<'a>>, |
587 | | } |
588 | | |
589 | | #[derive(Debug)] |
590 | | enum UsePath<'a> { |
591 | | Id(Id<'a>), |
592 | | Package { id: PackageName<'a>, name: Id<'a> }, |
593 | | } |
594 | | |
595 | | impl<'a> UsePath<'a> { |
596 | 17.3k | fn parse(tokens: &mut Tokenizer<'a>) -> Result<Self> { |
597 | 17.3k | let id = parse_id(tokens)?; |
598 | 17.3k | if tokens.eat(Token::Colon)? { |
599 | | // `foo:bar/baz@1.0` |
600 | 10.9k | let namespace = id; |
601 | 10.9k | let pkg_name = parse_id(tokens)?; |
602 | 10.9k | tokens.expect(Token::Slash)?; |
603 | 10.9k | let name = parse_id(tokens)?; |
604 | 10.9k | let version = parse_opt_version(tokens)?; |
605 | 10.9k | Ok(UsePath::Package { |
606 | 10.9k | id: PackageName { |
607 | 10.9k | docs: Default::default(), |
608 | 10.9k | span: Span::new(namespace.span.start(), pkg_name.span.end()), |
609 | 10.9k | namespace, |
610 | 10.9k | name: pkg_name, |
611 | 10.9k | version, |
612 | 10.9k | }, |
613 | 10.9k | name, |
614 | 10.9k | }) |
615 | | } else { |
616 | | // `foo` |
617 | 6.42k | Ok(UsePath::Id(id)) |
618 | | } |
619 | 17.3k | } |
620 | | |
621 | 6.03k | fn name(&self) -> &Id<'a> { |
622 | 6.03k | match self { |
623 | 2.61k | UsePath::Id(id) => id, |
624 | 3.41k | UsePath::Package { name, .. } => name, |
625 | | } |
626 | 6.03k | } |
627 | | } |
628 | | |
629 | | struct UseName<'a> { |
630 | | name: Id<'a>, |
631 | | as_: Option<Id<'a>>, |
632 | | } |
633 | | |
634 | | impl<'a> Use<'a> { |
635 | 13.2k | fn parse(tokens: &mut Tokenizer<'a>, attributes: Vec<Attribute<'a>>) -> Result<Self> { |
636 | 13.2k | tokens.expect(Token::Use)?; |
637 | 13.2k | let from = UsePath::parse(tokens)?; |
638 | 13.2k | tokens.expect(Token::Period)?; |
639 | 13.2k | tokens.expect(Token::LeftBrace)?; |
640 | | |
641 | 13.2k | let mut names = Vec::new(); |
642 | 15.4k | while !tokens.eat(Token::RightBrace)? { |
643 | 15.4k | let mut name = UseName { |
644 | 15.4k | name: parse_id(tokens)?, |
645 | 15.4k | as_: None, |
646 | | }; |
647 | 15.4k | if tokens.eat(Token::As)? { |
648 | 13.5k | name.as_ = Some(parse_id(tokens)?); |
649 | 1.85k | } |
650 | 15.4k | names.push(name); |
651 | 15.4k | if !tokens.eat(Token::Comma)? { |
652 | 13.2k | tokens.expect(Token::RightBrace)?; |
653 | 13.2k | break; |
654 | 2.22k | } |
655 | | } |
656 | 13.2k | tokens.expect_semicolon()?; |
657 | 13.2k | Ok(Use { |
658 | 13.2k | attributes, |
659 | 13.2k | from, |
660 | 13.2k | names, |
661 | 13.2k | }) |
662 | 13.2k | } |
663 | | } |
664 | | |
665 | | struct Include<'a> { |
666 | | from: UsePath<'a>, |
667 | | attributes: Vec<Attribute<'a>>, |
668 | | names: Vec<IncludeName<'a>>, |
669 | | } |
670 | | |
671 | | struct IncludeName<'a> { |
672 | | name: Id<'a>, |
673 | | as_: Id<'a>, |
674 | | } |
675 | | |
676 | | impl<'a> Include<'a> { |
677 | 0 | fn parse(tokens: &mut Tokenizer<'a>, attributes: Vec<Attribute<'a>>) -> Result<Self> { |
678 | 0 | tokens.expect(Token::Include)?; |
679 | 0 | let from = UsePath::parse(tokens)?; |
680 | | |
681 | 0 | let names = if tokens.eat(Token::With)? { |
682 | 0 | parse_list( |
683 | 0 | tokens, |
684 | 0 | Token::LeftBrace, |
685 | 0 | Token::RightBrace, |
686 | 0 | |_docs, tokens| { |
687 | 0 | let name = parse_id(tokens)?; |
688 | 0 | tokens.expect(Token::As)?; |
689 | 0 | let as_ = parse_id(tokens)?; |
690 | 0 | Ok(IncludeName { name, as_ }) |
691 | 0 | }, |
692 | 0 | )? |
693 | | } else { |
694 | 0 | tokens.expect_semicolon()?; |
695 | 0 | Vec::new() |
696 | | }; |
697 | | |
698 | 0 | Ok(Include { |
699 | 0 | attributes, |
700 | 0 | from, |
701 | 0 | names, |
702 | 0 | }) |
703 | 0 | } |
704 | | } |
705 | | |
706 | | #[derive(Debug, Clone)] |
707 | | pub struct Id<'a> { |
708 | | name: &'a str, |
709 | | span: Span, |
710 | | } |
711 | | |
712 | | impl<'a> From<&'a str> for Id<'a> { |
713 | 0 | fn from(s: &'a str) -> Id<'a> { |
714 | 0 | Id { |
715 | 0 | name: s.into(), |
716 | 0 | span: Default::default(), |
717 | 0 | } |
718 | 0 | } |
719 | | } |
720 | | |
721 | | #[derive(Debug, Clone)] |
722 | | pub struct Docs<'a> { |
723 | | docs: Vec<Cow<'a, str>>, |
724 | | span: Span, |
725 | | } |
726 | | |
727 | | impl<'a> Default for Docs<'a> { |
728 | 575k | fn default() -> Self { |
729 | 575k | Self { |
730 | 575k | docs: Default::default(), |
731 | 575k | span: Default::default(), |
732 | 575k | } |
733 | 575k | } |
734 | | } |
735 | | |
736 | | struct TypeDef<'a> { |
737 | | docs: Docs<'a>, |
738 | | attributes: Vec<Attribute<'a>>, |
739 | | name: Id<'a>, |
740 | | ty: Type<'a>, |
741 | | } |
742 | | |
743 | | enum Type<'a> { |
744 | | Bool(Span), |
745 | | U8(Span), |
746 | | U16(Span), |
747 | | U32(Span), |
748 | | U64(Span), |
749 | | S8(Span), |
750 | | S16(Span), |
751 | | S32(Span), |
752 | | S64(Span), |
753 | | F32(Span), |
754 | | F64(Span), |
755 | | Char(Span), |
756 | | String(Span), |
757 | | Name(Id<'a>), |
758 | | List(List<'a>), |
759 | | Map(Map<'a>), |
760 | | FixedLengthList(FixedLengthList<'a>), |
761 | | Handle(Handle<'a>), |
762 | | Resource(Resource<'a>), |
763 | | Record(Record<'a>), |
764 | | Flags(Flags<'a>), |
765 | | Variant(Variant<'a>), |
766 | | Tuple(Tuple<'a>), |
767 | | Enum(Enum<'a>), |
768 | | Option(Option_<'a>), |
769 | | Result(Result_<'a>), |
770 | | Future(Future<'a>), |
771 | | Stream(Stream<'a>), |
772 | | ErrorContext(Span), |
773 | | } |
774 | | |
775 | | enum Handle<'a> { |
776 | | Own { resource: Id<'a> }, |
777 | | Borrow { resource: Id<'a> }, |
778 | | } |
779 | | |
780 | | impl Handle<'_> { |
781 | 363 | fn span(&self) -> Span { |
782 | 363 | match self { |
783 | 363 | Handle::Own { resource } | Handle::Borrow { resource } => resource.span, |
784 | | } |
785 | 363 | } |
786 | | } |
787 | | |
788 | | struct Resource<'a> { |
789 | | span: Span, |
790 | | funcs: Vec<ResourceFunc<'a>>, |
791 | | } |
792 | | |
793 | | enum ResourceFunc<'a> { |
794 | | Method(NamedFunc<'a>), |
795 | | Static(NamedFunc<'a>), |
796 | | Constructor(NamedFunc<'a>), |
797 | | } |
798 | | |
799 | | impl<'a> ResourceFunc<'a> { |
800 | 4.06k | fn parse( |
801 | 4.06k | docs: Docs<'a>, |
802 | 4.06k | attributes: Vec<Attribute<'a>>, |
803 | 4.06k | tokens: &mut Tokenizer<'a>, |
804 | 4.06k | ) -> Result<Self> { |
805 | 4.06k | match tokens.clone().next()? { |
806 | 523 | Some((span, Token::Constructor)) => { |
807 | 523 | tokens.expect(Token::Constructor)?; |
808 | 523 | tokens.expect(Token::LeftParen)?; |
809 | 976 | let params = parse_list_trailer(tokens, Token::RightParen, |_docs, tokens| { |
810 | 976 | let name = parse_id(tokens)?; |
811 | 976 | tokens.expect(Token::Colon)?; |
812 | 976 | let ty = Type::parse(tokens)?; |
813 | 976 | Ok((name, ty)) |
814 | 976 | })?; |
815 | 523 | let result = if tokens.eat(Token::RArrow)? { |
816 | 0 | let ty = Type::parse(tokens)?; |
817 | 0 | Some(ty) |
818 | | } else { |
819 | 523 | None |
820 | | }; |
821 | 523 | tokens.expect_semicolon()?; |
822 | 523 | Ok(ResourceFunc::Constructor(NamedFunc { |
823 | 523 | docs, |
824 | 523 | attributes, |
825 | 523 | name: Id { |
826 | 523 | span, |
827 | 523 | name: "constructor", |
828 | 523 | }, |
829 | 523 | func: Func { |
830 | 523 | span, |
831 | 523 | async_: false, |
832 | 523 | params, |
833 | 523 | result, |
834 | 523 | }, |
835 | 523 | })) |
836 | | } |
837 | 3.54k | Some((_span, Token::Id | Token::ExplicitId)) => { |
838 | 3.54k | let name = parse_id(tokens)?; |
839 | 3.54k | tokens.expect(Token::Colon)?; |
840 | 3.54k | let ctor = if tokens.eat(Token::Static)? { |
841 | 1.38k | ResourceFunc::Static |
842 | 2.15k | } else { |
843 | 2.15k | ResourceFunc::Method |
844 | 2.15k | }; |
845 | 3.54k | let func = Func::parse(tokens)?; |
846 | 3.54k | tokens.expect_semicolon()?; |
847 | 3.54k | Ok(ctor(NamedFunc { |
848 | 3.54k | docs, |
849 | 3.54k | attributes, |
850 | 3.54k | name, |
851 | 3.54k | func, |
852 | 3.54k | })) |
853 | | } |
854 | 0 | other => Err(err_expected(tokens, "`constructor` or identifier", other).into()), |
855 | | } |
856 | 4.06k | } |
857 | | |
858 | 4.06k | fn named_func(&self) -> &NamedFunc<'a> { |
859 | | use ResourceFunc::*; |
860 | 4.06k | match self { |
861 | 4.06k | Method(f) | Static(f) | Constructor(f) => f, |
862 | | } |
863 | 4.06k | } |
864 | | } |
865 | | |
866 | | struct Record<'a> { |
867 | | span: Span, |
868 | | fields: Vec<Field<'a>>, |
869 | | } |
870 | | |
871 | | struct Field<'a> { |
872 | | docs: Docs<'a>, |
873 | | name: Id<'a>, |
874 | | ty: Type<'a>, |
875 | | } |
876 | | |
877 | | struct Flags<'a> { |
878 | | span: Span, |
879 | | flags: Vec<Flag<'a>>, |
880 | | } |
881 | | |
882 | | struct Flag<'a> { |
883 | | docs: Docs<'a>, |
884 | | name: Id<'a>, |
885 | | } |
886 | | |
887 | | struct Variant<'a> { |
888 | | span: Span, |
889 | | cases: Vec<Case<'a>>, |
890 | | } |
891 | | |
892 | | struct Case<'a> { |
893 | | docs: Docs<'a>, |
894 | | name: Id<'a>, |
895 | | ty: Option<Type<'a>>, |
896 | | } |
897 | | |
898 | | struct Enum<'a> { |
899 | | span: Span, |
900 | | cases: Vec<EnumCase<'a>>, |
901 | | } |
902 | | |
903 | | struct EnumCase<'a> { |
904 | | docs: Docs<'a>, |
905 | | name: Id<'a>, |
906 | | } |
907 | | |
908 | | struct Option_<'a> { |
909 | | span: Span, |
910 | | ty: Box<Type<'a>>, |
911 | | } |
912 | | |
913 | | struct List<'a> { |
914 | | span: Span, |
915 | | ty: Box<Type<'a>>, |
916 | | } |
917 | | |
918 | | struct Map<'a> { |
919 | | span: Span, |
920 | | key: Box<Type<'a>>, |
921 | | value: Box<Type<'a>>, |
922 | | } |
923 | | |
924 | | struct FixedLengthList<'a> { |
925 | | span: Span, |
926 | | ty: Box<Type<'a>>, |
927 | | size: u32, |
928 | | } |
929 | | |
930 | | struct Future<'a> { |
931 | | span: Span, |
932 | | ty: Option<Box<Type<'a>>>, |
933 | | } |
934 | | |
935 | | struct Tuple<'a> { |
936 | | span: Span, |
937 | | types: Vec<Type<'a>>, |
938 | | } |
939 | | |
940 | | struct Result_<'a> { |
941 | | span: Span, |
942 | | ok: Option<Box<Type<'a>>>, |
943 | | err: Option<Box<Type<'a>>>, |
944 | | } |
945 | | |
946 | | struct Stream<'a> { |
947 | | span: Span, |
948 | | ty: Option<Box<Type<'a>>>, |
949 | | } |
950 | | |
951 | | struct NamedFunc<'a> { |
952 | | docs: Docs<'a>, |
953 | | attributes: Vec<Attribute<'a>>, |
954 | | name: Id<'a>, |
955 | | func: Func<'a>, |
956 | | } |
957 | | |
958 | | type ParamList<'a> = Vec<(Id<'a>, Type<'a>)>; |
959 | | |
960 | | struct Func<'a> { |
961 | | span: Span, |
962 | | async_: bool, |
963 | | params: ParamList<'a>, |
964 | | result: Option<Type<'a>>, |
965 | | } |
966 | | |
967 | | impl<'a> Func<'a> { |
968 | 22.8k | fn parse(tokens: &mut Tokenizer<'a>) -> Result<Func<'a>> { |
969 | 22.8k | fn parse_params<'a>(tokens: &mut Tokenizer<'a>, left_paren: bool) -> Result<ParamList<'a>> { |
970 | 22.8k | if left_paren { |
971 | 22.8k | tokens.expect(Token::LeftParen)?; |
972 | 0 | }; |
973 | 50.8k | parse_list_trailer(tokens, Token::RightParen, |_docs, tokens| { |
974 | 50.8k | let name = parse_id(tokens)?; |
975 | 50.8k | tokens.expect(Token::Colon)?; |
976 | 50.8k | let ty = Type::parse(tokens)?; |
977 | 50.8k | Ok((name, ty)) |
978 | 50.8k | }) |
979 | 22.8k | } |
980 | | |
981 | 22.8k | let async_ = tokens.eat(Token::Async)?; |
982 | 22.8k | let span = tokens.expect(Token::Func)?; |
983 | 22.8k | let params = parse_params(tokens, true)?; |
984 | 22.8k | let result = if tokens.eat(Token::RArrow)? { |
985 | 16.4k | let ty = Type::parse(tokens)?; |
986 | 16.4k | Some(ty) |
987 | | } else { |
988 | 6.32k | None |
989 | | }; |
990 | 22.8k | Ok(Func { |
991 | 22.8k | span, |
992 | 22.8k | async_, |
993 | 22.8k | params, |
994 | 22.8k | result, |
995 | 22.8k | }) |
996 | 22.8k | } |
997 | | } |
998 | | |
999 | | impl<'a> InterfaceItem<'a> { |
1000 | 47.3k | fn parse( |
1001 | 47.3k | tokens: &mut Tokenizer<'a>, |
1002 | 47.3k | docs: Docs<'a>, |
1003 | 47.3k | attributes: Vec<Attribute<'a>>, |
1004 | 47.3k | ) -> Result<InterfaceItem<'a>> { |
1005 | 47.3k | match tokens.clone().next()? { |
1006 | 1.23k | Some((_span, Token::Type)) => { |
1007 | 1.23k | TypeDef::parse(tokens, docs, attributes).map(InterfaceItem::TypeDef) |
1008 | | } |
1009 | 957 | Some((_span, Token::Flags)) => { |
1010 | 957 | TypeDef::parse_flags(tokens, docs, attributes).map(InterfaceItem::TypeDef) |
1011 | | } |
1012 | 7.28k | Some((_span, Token::Enum)) => { |
1013 | 7.28k | TypeDef::parse_enum(tokens, docs, attributes).map(InterfaceItem::TypeDef) |
1014 | | } |
1015 | 6.46k | Some((_span, Token::Variant)) => { |
1016 | 6.46k | TypeDef::parse_variant(tokens, docs, attributes).map(InterfaceItem::TypeDef) |
1017 | | } |
1018 | 1.68k | Some((_span, Token::Resource)) => { |
1019 | 1.68k | TypeDef::parse_resource(tokens, docs, attributes).map(InterfaceItem::TypeDef) |
1020 | | } |
1021 | 2.44k | Some((_span, Token::Record)) => { |
1022 | 2.44k | TypeDef::parse_record(tokens, docs, attributes).map(InterfaceItem::TypeDef) |
1023 | | } |
1024 | 7.83k | Some((_span, Token::Id)) | Some((_span, Token::ExplicitId)) => { |
1025 | 15.5k | NamedFunc::parse(tokens, docs, attributes).map(InterfaceItem::Func) |
1026 | | } |
1027 | 11.7k | Some((_span, Token::Use)) => Use::parse(tokens, attributes).map(InterfaceItem::Use), |
1028 | 0 | other => Err(err_expected(tokens, "`type`, `resource` or `func`", other).into()), |
1029 | | } |
1030 | 47.3k | } |
1031 | | } |
1032 | | |
1033 | | impl<'a> TypeDef<'a> { |
1034 | 1.80k | fn parse( |
1035 | 1.80k | tokens: &mut Tokenizer<'a>, |
1036 | 1.80k | docs: Docs<'a>, |
1037 | 1.80k | attributes: Vec<Attribute<'a>>, |
1038 | 1.80k | ) -> Result<Self> { |
1039 | 1.80k | tokens.expect(Token::Type)?; |
1040 | 1.80k | let name = parse_id(tokens)?; |
1041 | 1.80k | tokens.expect(Token::Equals)?; |
1042 | 1.80k | let ty = Type::parse(tokens)?; |
1043 | 1.80k | tokens.expect_semicolon()?; |
1044 | 1.80k | Ok(TypeDef { |
1045 | 1.80k | docs, |
1046 | 1.80k | attributes, |
1047 | 1.80k | name, |
1048 | 1.80k | ty, |
1049 | 1.80k | }) |
1050 | 1.80k | } |
1051 | | |
1052 | 1.24k | fn parse_flags( |
1053 | 1.24k | tokens: &mut Tokenizer<'a>, |
1054 | 1.24k | docs: Docs<'a>, |
1055 | 1.24k | attributes: Vec<Attribute<'a>>, |
1056 | 1.24k | ) -> Result<Self> { |
1057 | 1.24k | tokens.expect(Token::Flags)?; |
1058 | 1.24k | let name = parse_id(tokens)?; |
1059 | 1.24k | let ty = Type::Flags(Flags { |
1060 | 1.24k | span: name.span, |
1061 | 1.24k | flags: parse_list( |
1062 | 1.24k | tokens, |
1063 | 1.24k | Token::LeftBrace, |
1064 | 1.24k | Token::RightBrace, |
1065 | 3.73k | |docs, tokens| { |
1066 | 3.73k | let name = parse_id(tokens)?; |
1067 | 3.73k | Ok(Flag { docs, name }) |
1068 | 3.73k | }, |
1069 | 0 | )?, |
1070 | | }); |
1071 | 1.24k | Ok(TypeDef { |
1072 | 1.24k | docs, |
1073 | 1.24k | attributes, |
1074 | 1.24k | name, |
1075 | 1.24k | ty, |
1076 | 1.24k | }) |
1077 | 1.24k | } |
1078 | | |
1079 | 2.41k | fn parse_resource( |
1080 | 2.41k | tokens: &mut Tokenizer<'a>, |
1081 | 2.41k | docs: Docs<'a>, |
1082 | 2.41k | attributes: Vec<Attribute<'a>>, |
1083 | 2.41k | ) -> Result<Self> { |
1084 | 2.41k | tokens.expect(Token::Resource)?; |
1085 | 2.41k | let name = parse_id(tokens)?; |
1086 | 2.41k | let mut funcs = Vec::new(); |
1087 | 2.41k | if tokens.eat(Token::LeftBrace)? { |
1088 | 5.63k | while !tokens.eat(Token::RightBrace)? { |
1089 | 4.06k | let docs = parse_docs(tokens)?; |
1090 | 4.06k | let attributes = Attribute::parse_list(tokens)?; |
1091 | 4.06k | funcs.push(ResourceFunc::parse(docs, attributes, tokens)?); |
1092 | | } |
1093 | | } else { |
1094 | 845 | tokens.expect_semicolon()?; |
1095 | | } |
1096 | 2.41k | let ty = Type::Resource(Resource { |
1097 | 2.41k | span: name.span, |
1098 | 2.41k | funcs, |
1099 | 2.41k | }); |
1100 | 2.41k | Ok(TypeDef { |
1101 | 2.41k | docs, |
1102 | 2.41k | attributes, |
1103 | 2.41k | name, |
1104 | 2.41k | ty, |
1105 | 2.41k | }) |
1106 | 2.41k | } |
1107 | | |
1108 | 3.94k | fn parse_record( |
1109 | 3.94k | tokens: &mut Tokenizer<'a>, |
1110 | 3.94k | docs: Docs<'a>, |
1111 | 3.94k | attributes: Vec<Attribute<'a>>, |
1112 | 3.94k | ) -> Result<Self> { |
1113 | 3.94k | tokens.expect(Token::Record)?; |
1114 | 3.94k | let name = parse_id(tokens)?; |
1115 | 3.94k | let ty = Type::Record(Record { |
1116 | 3.94k | span: name.span, |
1117 | 3.94k | fields: parse_list( |
1118 | 3.94k | tokens, |
1119 | 3.94k | Token::LeftBrace, |
1120 | 3.94k | Token::RightBrace, |
1121 | 10.7k | |docs, tokens| { |
1122 | 10.7k | let name = parse_id(tokens)?; |
1123 | 10.7k | tokens.expect(Token::Colon)?; |
1124 | 10.7k | let ty = Type::parse(tokens)?; |
1125 | 10.7k | Ok(Field { docs, name, ty }) |
1126 | 10.7k | }, |
1127 | 0 | )?, |
1128 | | }); |
1129 | 3.94k | Ok(TypeDef { |
1130 | 3.94k | docs, |
1131 | 3.94k | attributes, |
1132 | 3.94k | name, |
1133 | 3.94k | ty, |
1134 | 3.94k | }) |
1135 | 3.94k | } |
1136 | | |
1137 | 6.86k | fn parse_variant( |
1138 | 6.86k | tokens: &mut Tokenizer<'a>, |
1139 | 6.86k | docs: Docs<'a>, |
1140 | 6.86k | attributes: Vec<Attribute<'a>>, |
1141 | 6.86k | ) -> Result<Self> { |
1142 | 6.86k | tokens.expect(Token::Variant)?; |
1143 | 6.86k | let name = parse_id(tokens)?; |
1144 | 6.86k | let ty = Type::Variant(Variant { |
1145 | 6.86k | span: name.span, |
1146 | 6.86k | cases: parse_list( |
1147 | 6.86k | tokens, |
1148 | 6.86k | Token::LeftBrace, |
1149 | 6.86k | Token::RightBrace, |
1150 | 23.1k | |docs, tokens| { |
1151 | 23.1k | let name = parse_id(tokens)?; |
1152 | 23.1k | let ty = if tokens.eat(Token::LeftParen)? { |
1153 | 20.1k | let ty = Type::parse(tokens)?; |
1154 | 20.1k | tokens.expect(Token::RightParen)?; |
1155 | 20.1k | Some(ty) |
1156 | | } else { |
1157 | 3.01k | None |
1158 | | }; |
1159 | 23.1k | Ok(Case { docs, name, ty }) |
1160 | 23.1k | }, |
1161 | 0 | )?, |
1162 | | }); |
1163 | 6.86k | Ok(TypeDef { |
1164 | 6.86k | docs, |
1165 | 6.86k | attributes, |
1166 | 6.86k | name, |
1167 | 6.86k | ty, |
1168 | 6.86k | }) |
1169 | 6.86k | } |
1170 | | |
1171 | 20.6k | fn parse_enum( |
1172 | 20.6k | tokens: &mut Tokenizer<'a>, |
1173 | 20.6k | docs: Docs<'a>, |
1174 | 20.6k | attributes: Vec<Attribute<'a>>, |
1175 | 20.6k | ) -> Result<Self> { |
1176 | 20.6k | tokens.expect(Token::Enum)?; |
1177 | 20.6k | let name = parse_id(tokens)?; |
1178 | 20.6k | let ty = Type::Enum(Enum { |
1179 | 20.6k | span: name.span, |
1180 | 20.6k | cases: parse_list( |
1181 | 20.6k | tokens, |
1182 | 20.6k | Token::LeftBrace, |
1183 | 20.6k | Token::RightBrace, |
1184 | 94.2k | |docs, tokens| { |
1185 | 94.2k | let name = parse_id(tokens)?; |
1186 | 94.2k | Ok(EnumCase { docs, name }) |
1187 | 94.2k | }, |
1188 | 0 | )?, |
1189 | | }); |
1190 | 20.6k | Ok(TypeDef { |
1191 | 20.6k | docs, |
1192 | 20.6k | attributes, |
1193 | 20.6k | name, |
1194 | 20.6k | ty, |
1195 | 20.6k | }) |
1196 | 20.6k | } |
1197 | | } |
1198 | | |
1199 | | impl<'a> NamedFunc<'a> { |
1200 | 15.5k | fn parse( |
1201 | 15.5k | tokens: &mut Tokenizer<'a>, |
1202 | 15.5k | docs: Docs<'a>, |
1203 | 15.5k | attributes: Vec<Attribute<'a>>, |
1204 | 15.5k | ) -> Result<Self> { |
1205 | 15.5k | let name = parse_id(tokens)?; |
1206 | 15.5k | tokens.expect(Token::Colon)?; |
1207 | 15.5k | let func = Func::parse(tokens)?; |
1208 | 15.5k | tokens.expect_semicolon()?; |
1209 | 15.5k | Ok(NamedFunc { |
1210 | 15.5k | docs, |
1211 | 15.5k | attributes, |
1212 | 15.5k | name, |
1213 | 15.5k | func, |
1214 | 15.5k | }) |
1215 | 15.5k | } |
1216 | | } |
1217 | | |
1218 | 431k | fn parse_id<'a>(tokens: &mut Tokenizer<'a>) -> Result<Id<'a>> { |
1219 | 431k | match tokens.next()? { |
1220 | 169k | Some((span, Token::Id)) => Ok(Id { |
1221 | 169k | name: tokens.parse_id(span)?, |
1222 | 169k | span, |
1223 | | }), |
1224 | 262k | Some((span, Token::ExplicitId)) => Ok(Id { |
1225 | 262k | name: tokens.parse_explicit_id(span)?, |
1226 | 262k | span, |
1227 | | }), |
1228 | 0 | other => Err(err_expected(tokens, "an identifier or string", other).into()), |
1229 | | } |
1230 | 431k | } |
1231 | | |
1232 | 27.6k | fn parse_opt_version(tokens: &mut Tokenizer<'_>) -> Result<Option<(Span, Version)>> { |
1233 | 27.6k | if tokens.eat(Token::At)? { |
1234 | 21.3k | parse_version(tokens).map(Some) |
1235 | | } else { |
1236 | 6.34k | Ok(None) |
1237 | | } |
1238 | 27.6k | } |
1239 | | |
1240 | 25.4k | fn parse_version(tokens: &mut Tokenizer<'_>) -> Result<(Span, Version)> { |
1241 | 25.4k | let start = tokens.expect(Token::Integer)?.start(); |
1242 | 25.4k | tokens.expect(Token::Period)?; |
1243 | 25.4k | tokens.expect(Token::Integer)?; |
1244 | 25.4k | tokens.expect(Token::Period)?; |
1245 | 25.4k | let end = tokens.expect(Token::Integer)?.end(); |
1246 | 25.4k | let mut span = Span::new(start, end); |
1247 | 25.4k | eat_ids(tokens, Token::Minus, &mut span)?; |
1248 | 25.4k | eat_ids(tokens, Token::Plus, &mut span)?; |
1249 | 25.4k | let string = tokens.get_span(span); |
1250 | 25.4k | let version = Version::parse(string).map_err(|e| Error::new(span, e.to_string()))?; |
1251 | 25.4k | return Ok((span, version)); |
1252 | | |
1253 | | // According to `semver.org` this is what we're parsing: |
1254 | | // |
1255 | | // ```ebnf |
1256 | | // <pre-release> ::= <dot-separated pre-release identifiers> |
1257 | | // |
1258 | | // <dot-separated pre-release identifiers> ::= <pre-release identifier> |
1259 | | // | <pre-release identifier> "." <dot-separated pre-release identifiers> |
1260 | | // |
1261 | | // <build> ::= <dot-separated build identifiers> |
1262 | | // |
1263 | | // <dot-separated build identifiers> ::= <build identifier> |
1264 | | // | <build identifier> "." <dot-separated build identifiers> |
1265 | | // |
1266 | | // <pre-release identifier> ::= <alphanumeric identifier> |
1267 | | // | <numeric identifier> |
1268 | | // |
1269 | | // <build identifier> ::= <alphanumeric identifier> |
1270 | | // | <digits> |
1271 | | // |
1272 | | // <alphanumeric identifier> ::= <non-digit> |
1273 | | // | <non-digit> <identifier characters> |
1274 | | // | <identifier characters> <non-digit> |
1275 | | // | <identifier characters> <non-digit> <identifier characters> |
1276 | | // |
1277 | | // <numeric identifier> ::= "0" |
1278 | | // | <positive digit> |
1279 | | // | <positive digit> <digits> |
1280 | | // |
1281 | | // <identifier characters> ::= <identifier character> |
1282 | | // | <identifier character> <identifier characters> |
1283 | | // |
1284 | | // <identifier character> ::= <digit> |
1285 | | // | <non-digit> |
1286 | | // |
1287 | | // <non-digit> ::= <letter> |
1288 | | // | "-" |
1289 | | // |
1290 | | // <digits> ::= <digit> |
1291 | | // | <digit> <digits> |
1292 | | // ``` |
1293 | | // |
1294 | | // This is loosely based on WIT syntax and an approximation is parsed here: |
1295 | | // |
1296 | | // * This function starts by parsing the optional leading `-` and `+` which |
1297 | | // indicates pre-release and build metadata. |
1298 | | // * Afterwards all of $id, $integer, `-`, and `.` are chomped. The only |
1299 | | // exception here is that if `.` isn't followed by $id, $integer, or `-` |
1300 | | // then it's assumed that it's something like `use a:b@1.0.0-a.{...}` |
1301 | | // where the `.` is part of WIT syntax, not semver. |
1302 | | // |
1303 | | // Note that this additionally doesn't try to return any first-class errors. |
1304 | | // Instead this bails out on something unrecognized for something else in |
1305 | | // the system to return an error. |
1306 | 50.9k | fn eat_ids(tokens: &mut Tokenizer<'_>, prefix: Token, end: &mut Span) -> Result<()> { |
1307 | 50.9k | if !tokens.eat(prefix)? { |
1308 | 10.4k | return Ok(()); |
1309 | 40.5k | } |
1310 | | loop { |
1311 | 141k | let mut clone = tokens.clone(); |
1312 | 141k | match clone.next()? { |
1313 | 40.5k | Some((span, Token::Id | Token::Integer | Token::Minus)) => { |
1314 | 40.5k | end.set_end(span.end()); |
1315 | 40.5k | *tokens = clone; |
1316 | 40.5k | } |
1317 | 67.7k | Some((_span, Token::Period)) => match clone.next()? { |
1318 | 60.3k | Some((span, Token::Id | Token::Integer | Token::Minus)) => { |
1319 | 60.3k | end.set_end(span.end()); |
1320 | 60.3k | *tokens = clone; |
1321 | 60.3k | } |
1322 | 7.44k | _ => break Ok(()), |
1323 | | }, |
1324 | 33.0k | _ => break Ok(()), |
1325 | | } |
1326 | | } |
1327 | 50.9k | } |
1328 | 25.4k | } |
1329 | | |
1330 | 564k | fn parse_docs<'a>(tokens: &mut Tokenizer<'a>) -> Result<Docs<'a>> { |
1331 | 564k | let mut docs = Docs::default(); |
1332 | 564k | let mut clone = tokens.clone(); |
1333 | 564k | let mut started = false; |
1334 | 1.05M | while let Some((span, token)) = clone.next_raw()? { |
1335 | 1.03M | match token { |
1336 | 491k | Token::Whitespace => {} |
1337 | | Token::Comment => { |
1338 | 0 | let comment = tokens.get_span(span); |
1339 | 0 | if !started { |
1340 | 0 | docs.span.set_start(span.start()); |
1341 | 0 | started = true; |
1342 | 0 | } |
1343 | 0 | let trailing_ws = comment |
1344 | 0 | .bytes() |
1345 | 0 | .rev() |
1346 | 0 | .take_while(|ch| ch.is_ascii_whitespace()) |
1347 | 0 | .count(); |
1348 | 0 | docs.span.set_end(span.end() - (trailing_ws as u32)); |
1349 | 0 | docs.docs.push(comment.into()); |
1350 | | } |
1351 | 541k | _ => break, |
1352 | | }; |
1353 | 491k | *tokens = clone.clone(); |
1354 | | } |
1355 | 564k | Ok(docs) |
1356 | 564k | } |
1357 | | |
1358 | | impl<'a> Type<'a> { |
1359 | 381k | fn parse(tokens: &mut Tokenizer<'a>) -> Result<Self> { |
1360 | 381k | match tokens.next()? { |
1361 | 6.24k | Some((span, Token::U8)) => Ok(Type::U8(span)), |
1362 | 2.02k | Some((span, Token::U16)) => Ok(Type::U16(span)), |
1363 | 3.34k | Some((span, Token::U32)) => Ok(Type::U32(span)), |
1364 | 2.83k | Some((span, Token::U64)) => Ok(Type::U64(span)), |
1365 | 2.94k | Some((span, Token::S8)) => Ok(Type::S8(span)), |
1366 | 1.55k | Some((span, Token::S16)) => Ok(Type::S16(span)), |
1367 | 2.43k | Some((span, Token::S32)) => Ok(Type::S32(span)), |
1368 | 12.5k | Some((span, Token::S64)) => Ok(Type::S64(span)), |
1369 | 5.11k | Some((span, Token::F32)) => Ok(Type::F32(span)), |
1370 | 7.67k | Some((span, Token::F64)) => Ok(Type::F64(span)), |
1371 | 9.45k | Some((span, Token::Char)) => Ok(Type::Char(span)), |
1372 | | |
1373 | | // tuple<T, U, ...> |
1374 | 21.7k | Some((span, Token::Tuple)) => { |
1375 | 21.7k | let types = parse_list( |
1376 | 21.7k | tokens, |
1377 | 21.7k | Token::LessThan, |
1378 | 21.7k | Token::GreaterThan, |
1379 | 78.1k | |_docs, tokens| Type::parse(tokens), |
1380 | 0 | )?; |
1381 | 21.7k | Ok(Type::Tuple(Tuple { span, types })) |
1382 | | } |
1383 | | |
1384 | 95.6k | Some((span, Token::Bool)) => Ok(Type::Bool(span)), |
1385 | 2.24k | Some((span, Token::String_)) => Ok(Type::String(span)), |
1386 | | |
1387 | | // list<T> |
1388 | | // list<T, N> |
1389 | 113k | Some((span, Token::List)) => { |
1390 | 113k | tokens.expect(Token::LessThan)?; |
1391 | 113k | let ty = Type::parse(tokens)?; |
1392 | 113k | let size = if tokens.eat(Token::Comma)? { |
1393 | 106k | let number = tokens.next()?; |
1394 | 106k | if let Some((span, Token::Integer)) = number { |
1395 | 106k | let size: u32 = tokens.get_span(span).parse()?; |
1396 | 106k | Some(size) |
1397 | | } else { |
1398 | 0 | return Err(err_expected(tokens, "fixed-length", number).into()); |
1399 | | } |
1400 | | } else { |
1401 | 6.26k | None |
1402 | | }; |
1403 | 113k | tokens.expect(Token::GreaterThan)?; |
1404 | 113k | if let Some(size) = size { |
1405 | 106k | Ok(Type::FixedLengthList(FixedLengthList { |
1406 | 106k | span, |
1407 | 106k | ty: Box::new(ty), |
1408 | 106k | size, |
1409 | 106k | })) |
1410 | | } else { |
1411 | 6.26k | Ok(Type::List(List { |
1412 | 6.26k | span, |
1413 | 6.26k | ty: Box::new(ty), |
1414 | 6.26k | })) |
1415 | | } |
1416 | | } |
1417 | | |
1418 | | // map<K, V> |
1419 | 0 | Some((span, Token::Map)) => { |
1420 | 0 | tokens.expect(Token::LessThan)?; |
1421 | 0 | let key = Type::parse(tokens)?; |
1422 | 0 | tokens.expect(Token::Comma)?; |
1423 | 0 | let value = Type::parse(tokens)?; |
1424 | 0 | tokens.expect(Token::GreaterThan)?; |
1425 | 0 | Ok(Type::Map(Map { |
1426 | 0 | span, |
1427 | 0 | key: Box::new(key), |
1428 | 0 | value: Box::new(value), |
1429 | 0 | })) |
1430 | | } |
1431 | | |
1432 | | // option<T> |
1433 | 26.7k | Some((span, Token::Option_)) => { |
1434 | 26.7k | tokens.expect(Token::LessThan)?; |
1435 | 26.7k | let ty = Type::parse(tokens)?; |
1436 | 26.7k | tokens.expect(Token::GreaterThan)?; |
1437 | 26.7k | Ok(Type::Option(Option_ { |
1438 | 26.7k | span, |
1439 | 26.7k | ty: Box::new(ty), |
1440 | 26.7k | })) |
1441 | | } |
1442 | | |
1443 | | // result<T, E> |
1444 | | // result<_, E> |
1445 | | // result<T> |
1446 | | // result |
1447 | 28.4k | Some((span, Token::Result_)) => { |
1448 | 28.4k | let mut ok = None; |
1449 | 28.4k | let mut err = None; |
1450 | | |
1451 | 28.4k | if tokens.eat(Token::LessThan)? { |
1452 | 27.9k | if tokens.eat(Token::Underscore)? { |
1453 | 2.16k | tokens.expect(Token::Comma)?; |
1454 | 2.16k | err = Some(Box::new(Type::parse(tokens)?)); |
1455 | | } else { |
1456 | 25.7k | ok = Some(Box::new(Type::parse(tokens)?)); |
1457 | 25.7k | if tokens.eat(Token::Comma)? { |
1458 | 22.1k | err = Some(Box::new(Type::parse(tokens)?)); |
1459 | 3.62k | } |
1460 | | }; |
1461 | 27.9k | tokens.expect(Token::GreaterThan)?; |
1462 | 550 | }; |
1463 | 28.4k | Ok(Type::Result(Result_ { span, ok, err })) |
1464 | | } |
1465 | | |
1466 | | // future<T> |
1467 | | // future |
1468 | 7.20k | Some((span, Token::Future)) => { |
1469 | 7.20k | let mut ty = None; |
1470 | | |
1471 | 7.20k | if tokens.eat(Token::LessThan)? { |
1472 | 4.04k | ty = Some(Box::new(Type::parse(tokens)?)); |
1473 | 4.04k | tokens.expect(Token::GreaterThan)?; |
1474 | 3.16k | }; |
1475 | 7.20k | Ok(Type::Future(Future { span, ty })) |
1476 | | } |
1477 | | |
1478 | | // stream<T> |
1479 | | // stream |
1480 | 8.14k | Some((span, Token::Stream)) => { |
1481 | 8.14k | let mut ty = None; |
1482 | | |
1483 | 8.14k | if tokens.eat(Token::LessThan)? { |
1484 | 8.14k | ty = Some(Box::new(Type::parse(tokens)?)); |
1485 | 8.14k | tokens.expect(Token::GreaterThan)?; |
1486 | 0 | }; |
1487 | 8.14k | Ok(Type::Stream(Stream { span, ty })) |
1488 | | } |
1489 | | |
1490 | | // error-context |
1491 | 18.9k | Some((span, Token::ErrorContext)) => Ok(Type::ErrorContext(span)), |
1492 | | |
1493 | | // own<T> |
1494 | 371 | Some((_span, Token::Own)) => { |
1495 | 371 | tokens.expect(Token::LessThan)?; |
1496 | 371 | let resource = parse_id(tokens)?; |
1497 | 371 | tokens.expect(Token::GreaterThan)?; |
1498 | 371 | Ok(Type::Handle(Handle::Own { resource })) |
1499 | | } |
1500 | | |
1501 | | // borrow<T> |
1502 | 0 | Some((_span, Token::Borrow)) => { |
1503 | 0 | tokens.expect(Token::LessThan)?; |
1504 | 0 | let resource = parse_id(tokens)?; |
1505 | 0 | tokens.expect(Token::GreaterThan)?; |
1506 | 0 | Ok(Type::Handle(Handle::Borrow { resource })) |
1507 | | } |
1508 | | |
1509 | | // `foo` |
1510 | 1.29k | Some((span, Token::Id)) => Ok(Type::Name(Id { |
1511 | 1.29k | name: tokens.parse_id(span)?.into(), |
1512 | 1.29k | span, |
1513 | | })), |
1514 | | // `%foo` |
1515 | 1.04k | Some((span, Token::ExplicitId)) => Ok(Type::Name(Id { |
1516 | 1.04k | name: tokens.parse_explicit_id(span)?.into(), |
1517 | 1.04k | span, |
1518 | | })), |
1519 | | |
1520 | 0 | other => Err(err_expected(tokens, "a type", other).into()), |
1521 | | } |
1522 | 381k | } |
1523 | | |
1524 | 379k | fn span(&self) -> Span { |
1525 | 379k | match self { |
1526 | 95.2k | Type::Bool(span) |
1527 | 6.23k | | Type::U8(span) |
1528 | 2.01k | | Type::U16(span) |
1529 | 3.29k | | Type::U32(span) |
1530 | 2.79k | | Type::U64(span) |
1531 | 2.91k | | Type::S8(span) |
1532 | 1.54k | | Type::S16(span) |
1533 | 2.39k | | Type::S32(span) |
1534 | 12.4k | | Type::S64(span) |
1535 | 5.06k | | Type::F32(span) |
1536 | 7.62k | | Type::F64(span) |
1537 | 9.34k | | Type::Char(span) |
1538 | 2.21k | | Type::String(span) |
1539 | 172k | | Type::ErrorContext(span) => *span, |
1540 | 2.28k | Type::Name(id) => id.span, |
1541 | 6.20k | Type::List(l) => l.span, |
1542 | 0 | Type::Map(m) => m.span, |
1543 | 106k | Type::FixedLengthList(l) => l.span, |
1544 | 363 | Type::Handle(h) => h.span(), |
1545 | 0 | Type::Resource(r) => r.span, |
1546 | 0 | Type::Record(r) => r.span, |
1547 | 0 | Type::Flags(f) => f.span, |
1548 | 0 | Type::Variant(v) => v.span, |
1549 | 21.5k | Type::Tuple(t) => t.span, |
1550 | 0 | Type::Enum(e) => e.span, |
1551 | 26.6k | Type::Option(o) => o.span, |
1552 | 28.1k | Type::Result(r) => r.span, |
1553 | 7.18k | Type::Future(f) => f.span, |
1554 | 8.12k | Type::Stream(s) => s.span, |
1555 | | } |
1556 | 379k | } |
1557 | | } |
1558 | | |
1559 | 54.4k | fn parse_list<'a, T>( |
1560 | 54.4k | tokens: &mut Tokenizer<'a>, |
1561 | 54.4k | start: Token, |
1562 | 54.4k | end: Token, |
1563 | 54.4k | parse: impl FnMut(Docs<'a>, &mut Tokenizer<'a>) -> Result<T>, |
1564 | 54.4k | ) -> Result<Vec<T>> { |
1565 | 54.4k | tokens.expect(start)?; |
1566 | 54.4k | parse_list_trailer(tokens, end, parse) |
1567 | 54.4k | } Unexecuted instantiation: wit_parser::ast::parse_list::<wit_parser::ast::IncludeName, <wit_parser::ast::Include>::parse::{closure#0}>wit_parser::ast::parse_list::<wit_parser::ast::Case, <wit_parser::ast::TypeDef>::parse_variant::{closure#0}>Line | Count | Source | 1559 | 6.86k | fn parse_list<'a, T>( | 1560 | 6.86k | tokens: &mut Tokenizer<'a>, | 1561 | 6.86k | start: Token, | 1562 | 6.86k | end: Token, | 1563 | 6.86k | parse: impl FnMut(Docs<'a>, &mut Tokenizer<'a>) -> Result<T>, | 1564 | 6.86k | ) -> Result<Vec<T>> { | 1565 | 6.86k | tokens.expect(start)?; | 1566 | 6.86k | parse_list_trailer(tokens, end, parse) | 1567 | 6.86k | } |
wit_parser::ast::parse_list::<wit_parser::ast::Flag, <wit_parser::ast::TypeDef>::parse_flags::{closure#0}>Line | Count | Source | 1559 | 1.24k | fn parse_list<'a, T>( | 1560 | 1.24k | tokens: &mut Tokenizer<'a>, | 1561 | 1.24k | start: Token, | 1562 | 1.24k | end: Token, | 1563 | 1.24k | parse: impl FnMut(Docs<'a>, &mut Tokenizer<'a>) -> Result<T>, | 1564 | 1.24k | ) -> Result<Vec<T>> { | 1565 | 1.24k | tokens.expect(start)?; | 1566 | 1.24k | parse_list_trailer(tokens, end, parse) | 1567 | 1.24k | } |
wit_parser::ast::parse_list::<wit_parser::ast::Type, <wit_parser::ast::Type>::parse::{closure#0}>Line | Count | Source | 1559 | 21.7k | fn parse_list<'a, T>( | 1560 | 21.7k | tokens: &mut Tokenizer<'a>, | 1561 | 21.7k | start: Token, | 1562 | 21.7k | end: Token, | 1563 | 21.7k | parse: impl FnMut(Docs<'a>, &mut Tokenizer<'a>) -> Result<T>, | 1564 | 21.7k | ) -> Result<Vec<T>> { | 1565 | 21.7k | tokens.expect(start)?; | 1566 | 21.7k | parse_list_trailer(tokens, end, parse) | 1567 | 21.7k | } |
wit_parser::ast::parse_list::<wit_parser::ast::Field, <wit_parser::ast::TypeDef>::parse_record::{closure#0}>Line | Count | Source | 1559 | 3.94k | fn parse_list<'a, T>( | 1560 | 3.94k | tokens: &mut Tokenizer<'a>, | 1561 | 3.94k | start: Token, | 1562 | 3.94k | end: Token, | 1563 | 3.94k | parse: impl FnMut(Docs<'a>, &mut Tokenizer<'a>) -> Result<T>, | 1564 | 3.94k | ) -> Result<Vec<T>> { | 1565 | 3.94k | tokens.expect(start)?; | 1566 | 3.94k | parse_list_trailer(tokens, end, parse) | 1567 | 3.94k | } |
wit_parser::ast::parse_list::<wit_parser::ast::EnumCase, <wit_parser::ast::TypeDef>::parse_enum::{closure#0}>Line | Count | Source | 1559 | 20.6k | fn parse_list<'a, T>( | 1560 | 20.6k | tokens: &mut Tokenizer<'a>, | 1561 | 20.6k | start: Token, | 1562 | 20.6k | end: Token, | 1563 | 20.6k | parse: impl FnMut(Docs<'a>, &mut Tokenizer<'a>) -> Result<T>, | 1564 | 20.6k | ) -> Result<Vec<T>> { | 1565 | 20.6k | tokens.expect(start)?; | 1566 | 20.6k | parse_list_trailer(tokens, end, parse) | 1567 | 20.6k | } |
|
1568 | | |
1569 | 77.7k | fn parse_list_trailer<'a, T>( |
1570 | 77.7k | tokens: &mut Tokenizer<'a>, |
1571 | 77.7k | end: Token, |
1572 | 77.7k | mut parse: impl FnMut(Docs<'a>, &mut Tokenizer<'a>) -> Result<T>, |
1573 | 77.7k | ) -> Result<Vec<T>> { |
1574 | 77.7k | let mut items = Vec::new(); |
1575 | | loop { |
1576 | | // get docs before we skip them to try to eat the end token |
1577 | 299k | let docs = parse_docs(tokens)?; |
1578 | | |
1579 | | // if we found an end token then we're done |
1580 | 299k | if tokens.eat(end)? { |
1581 | 37.8k | break; |
1582 | 261k | } |
1583 | | |
1584 | 261k | let item = parse(docs, tokens)?; |
1585 | 261k | items.push(item); |
1586 | | |
1587 | | // if there's no trailing comma then this is required to be the end, |
1588 | | // otherwise we go through the loop to try to get another item |
1589 | 261k | if !tokens.eat(Token::Comma)? { |
1590 | 39.9k | tokens.expect(end)?; |
1591 | 39.9k | break; |
1592 | 221k | } |
1593 | | } |
1594 | 77.7k | Ok(items) |
1595 | 77.7k | } Unexecuted instantiation: wit_parser::ast::parse_list_trailer::<wit_parser::ast::IncludeName, <wit_parser::ast::Include>::parse::{closure#0}>wit_parser::ast::parse_list_trailer::<wit_parser::ast::Case, <wit_parser::ast::TypeDef>::parse_variant::{closure#0}>Line | Count | Source | 1569 | 6.86k | fn parse_list_trailer<'a, T>( | 1570 | 6.86k | tokens: &mut Tokenizer<'a>, | 1571 | 6.86k | end: Token, | 1572 | 6.86k | mut parse: impl FnMut(Docs<'a>, &mut Tokenizer<'a>) -> Result<T>, | 1573 | 6.86k | ) -> Result<Vec<T>> { | 1574 | 6.86k | let mut items = Vec::new(); | 1575 | | loop { | 1576 | | // get docs before we skip them to try to eat the end token | 1577 | 29.9k | let docs = parse_docs(tokens)?; | 1578 | | | 1579 | | // if we found an end token then we're done | 1580 | 29.9k | if tokens.eat(end)? { | 1581 | 6.86k | break; | 1582 | 23.1k | } | 1583 | | | 1584 | 23.1k | let item = parse(docs, tokens)?; | 1585 | 23.1k | items.push(item); | 1586 | | | 1587 | | // if there's no trailing comma then this is required to be the end, | 1588 | | // otherwise we go through the loop to try to get another item | 1589 | 23.1k | if !tokens.eat(Token::Comma)? { | 1590 | 0 | tokens.expect(end)?; | 1591 | 0 | break; | 1592 | 23.1k | } | 1593 | | } | 1594 | 6.86k | Ok(items) | 1595 | 6.86k | } |
wit_parser::ast::parse_list_trailer::<wit_parser::ast::Flag, <wit_parser::ast::TypeDef>::parse_flags::{closure#0}>Line | Count | Source | 1569 | 1.24k | fn parse_list_trailer<'a, T>( | 1570 | 1.24k | tokens: &mut Tokenizer<'a>, | 1571 | 1.24k | end: Token, | 1572 | 1.24k | mut parse: impl FnMut(Docs<'a>, &mut Tokenizer<'a>) -> Result<T>, | 1573 | 1.24k | ) -> Result<Vec<T>> { | 1574 | 1.24k | let mut items = Vec::new(); | 1575 | | loop { | 1576 | | // get docs before we skip them to try to eat the end token | 1577 | 4.98k | let docs = parse_docs(tokens)?; | 1578 | | | 1579 | | // if we found an end token then we're done | 1580 | 4.98k | if tokens.eat(end)? { | 1581 | 1.24k | break; | 1582 | 3.73k | } | 1583 | | | 1584 | 3.73k | let item = parse(docs, tokens)?; | 1585 | 3.73k | items.push(item); | 1586 | | | 1587 | | // if there's no trailing comma then this is required to be the end, | 1588 | | // otherwise we go through the loop to try to get another item | 1589 | 3.73k | if !tokens.eat(Token::Comma)? { | 1590 | 0 | tokens.expect(end)?; | 1591 | 0 | break; | 1592 | 3.73k | } | 1593 | | } | 1594 | 1.24k | Ok(items) | 1595 | 1.24k | } |
wit_parser::ast::parse_list_trailer::<wit_parser::ast::Type, <wit_parser::ast::Type>::parse::{closure#0}>Line | Count | Source | 1569 | 21.7k | fn parse_list_trailer<'a, T>( | 1570 | 21.7k | tokens: &mut Tokenizer<'a>, | 1571 | 21.7k | end: Token, | 1572 | 21.7k | mut parse: impl FnMut(Docs<'a>, &mut Tokenizer<'a>) -> Result<T>, | 1573 | 21.7k | ) -> Result<Vec<T>> { | 1574 | 21.7k | let mut items = Vec::new(); | 1575 | | loop { | 1576 | | // get docs before we skip them to try to eat the end token | 1577 | 78.1k | let docs = parse_docs(tokens)?; | 1578 | | | 1579 | | // if we found an end token then we're done | 1580 | 78.1k | if tokens.eat(end)? { | 1581 | 0 | break; | 1582 | 78.1k | } | 1583 | | | 1584 | 78.1k | let item = parse(docs, tokens)?; | 1585 | 78.1k | items.push(item); | 1586 | | | 1587 | | // if there's no trailing comma then this is required to be the end, | 1588 | | // otherwise we go through the loop to try to get another item | 1589 | 78.1k | if !tokens.eat(Token::Comma)? { | 1590 | 21.7k | tokens.expect(end)?; | 1591 | 21.7k | break; | 1592 | 56.4k | } | 1593 | | } | 1594 | 21.7k | Ok(items) | 1595 | 21.7k | } |
wit_parser::ast::parse_list_trailer::<wit_parser::ast::Field, <wit_parser::ast::TypeDef>::parse_record::{closure#0}>Line | Count | Source | 1569 | 3.94k | fn parse_list_trailer<'a, T>( | 1570 | 3.94k | tokens: &mut Tokenizer<'a>, | 1571 | 3.94k | end: Token, | 1572 | 3.94k | mut parse: impl FnMut(Docs<'a>, &mut Tokenizer<'a>) -> Result<T>, | 1573 | 3.94k | ) -> Result<Vec<T>> { | 1574 | 3.94k | let mut items = Vec::new(); | 1575 | | loop { | 1576 | | // get docs before we skip them to try to eat the end token | 1577 | 14.6k | let docs = parse_docs(tokens)?; | 1578 | | | 1579 | | // if we found an end token then we're done | 1580 | 14.6k | if tokens.eat(end)? { | 1581 | 3.94k | break; | 1582 | 10.7k | } | 1583 | | | 1584 | 10.7k | let item = parse(docs, tokens)?; | 1585 | 10.7k | items.push(item); | 1586 | | | 1587 | | // if there's no trailing comma then this is required to be the end, | 1588 | | // otherwise we go through the loop to try to get another item | 1589 | 10.7k | if !tokens.eat(Token::Comma)? { | 1590 | 0 | tokens.expect(end)?; | 1591 | 0 | break; | 1592 | 10.7k | } | 1593 | | } | 1594 | 3.94k | Ok(items) | 1595 | 3.94k | } |
wit_parser::ast::parse_list_trailer::<wit_parser::ast::EnumCase, <wit_parser::ast::TypeDef>::parse_enum::{closure#0}>Line | Count | Source | 1569 | 20.6k | fn parse_list_trailer<'a, T>( | 1570 | 20.6k | tokens: &mut Tokenizer<'a>, | 1571 | 20.6k | end: Token, | 1572 | 20.6k | mut parse: impl FnMut(Docs<'a>, &mut Tokenizer<'a>) -> Result<T>, | 1573 | 20.6k | ) -> Result<Vec<T>> { | 1574 | 20.6k | let mut items = Vec::new(); | 1575 | | loop { | 1576 | | // get docs before we skip them to try to eat the end token | 1577 | 114k | let docs = parse_docs(tokens)?; | 1578 | | | 1579 | | // if we found an end token then we're done | 1580 | 114k | if tokens.eat(end)? { | 1581 | 20.6k | break; | 1582 | 94.2k | } | 1583 | | | 1584 | 94.2k | let item = parse(docs, tokens)?; | 1585 | 94.2k | items.push(item); | 1586 | | | 1587 | | // if there's no trailing comma then this is required to be the end, | 1588 | | // otherwise we go through the loop to try to get another item | 1589 | 94.2k | if !tokens.eat(Token::Comma)? { | 1590 | 0 | tokens.expect(end)?; | 1591 | 0 | break; | 1592 | 94.2k | } | 1593 | | } | 1594 | 20.6k | Ok(items) | 1595 | 20.6k | } |
wit_parser::ast::parse_list_trailer::<(wit_parser::ast::Id, wit_parser::ast::Type), <wit_parser::ast::ResourceFunc>::parse::{closure#0}>Line | Count | Source | 1569 | 523 | fn parse_list_trailer<'a, T>( | 1570 | 523 | tokens: &mut Tokenizer<'a>, | 1571 | 523 | end: Token, | 1572 | 523 | mut parse: impl FnMut(Docs<'a>, &mut Tokenizer<'a>) -> Result<T>, | 1573 | 523 | ) -> Result<Vec<T>> { | 1574 | 523 | let mut items = Vec::new(); | 1575 | | loop { | 1576 | | // get docs before we skip them to try to eat the end token | 1577 | 1.16k | let docs = parse_docs(tokens)?; | 1578 | | | 1579 | | // if we found an end token then we're done | 1580 | 1.16k | if tokens.eat(end)? { | 1581 | 189 | break; | 1582 | 976 | } | 1583 | | | 1584 | 976 | let item = parse(docs, tokens)?; | 1585 | 976 | items.push(item); | 1586 | | | 1587 | | // if there's no trailing comma then this is required to be the end, | 1588 | | // otherwise we go through the loop to try to get another item | 1589 | 976 | if !tokens.eat(Token::Comma)? { | 1590 | 334 | tokens.expect(end)?; | 1591 | 334 | break; | 1592 | 642 | } | 1593 | | } | 1594 | 523 | Ok(items) | 1595 | 523 | } |
wit_parser::ast::parse_list_trailer::<(wit_parser::ast::Id, wit_parser::ast::Type), <wit_parser::ast::Func>::parse::parse_params::{closure#0}>Line | Count | Source | 1569 | 22.8k | fn parse_list_trailer<'a, T>( | 1570 | 22.8k | tokens: &mut Tokenizer<'a>, | 1571 | 22.8k | end: Token, | 1572 | 22.8k | mut parse: impl FnMut(Docs<'a>, &mut Tokenizer<'a>) -> Result<T>, | 1573 | 22.8k | ) -> Result<Vec<T>> { | 1574 | 22.8k | let mut items = Vec::new(); | 1575 | | loop { | 1576 | | // get docs before we skip them to try to eat the end token | 1577 | 55.7k | let docs = parse_docs(tokens)?; | 1578 | | | 1579 | | // if we found an end token then we're done | 1580 | 55.7k | if tokens.eat(end)? { | 1581 | 4.91k | break; | 1582 | 50.8k | } | 1583 | | | 1584 | 50.8k | let item = parse(docs, tokens)?; | 1585 | 50.8k | items.push(item); | 1586 | | | 1587 | | // if there's no trailing comma then this is required to be the end, | 1588 | | // otherwise we go through the loop to try to get another item | 1589 | 50.8k | if !tokens.eat(Token::Comma)? { | 1590 | 17.8k | tokens.expect(end)?; | 1591 | 17.8k | break; | 1592 | 32.9k | } | 1593 | | } | 1594 | 22.8k | Ok(items) | 1595 | 22.8k | } |
|
1596 | | |
1597 | 0 | fn err_expected( |
1598 | 0 | tokens: &Tokenizer<'_>, |
1599 | 0 | expected: &'static str, |
1600 | 0 | found: Option<(Span, Token)>, |
1601 | 0 | ) -> Error { |
1602 | 0 | match found { |
1603 | 0 | Some((span, token)) => Error::new( |
1604 | 0 | span, |
1605 | 0 | format!("expected {}, found {}", expected, token.describe()), |
1606 | | ), |
1607 | 0 | None => Error::new(tokens.eof_span(), format!("expected {expected}, found eof")), |
1608 | | } |
1609 | 0 | } |
1610 | | |
1611 | | enum Attribute<'a> { |
1612 | | Since { span: Span, version: Version }, |
1613 | | Unstable { span: Span, feature: Id<'a> }, |
1614 | | Deprecated { span: Span, version: Version }, |
1615 | | } |
1616 | | |
1617 | | impl<'a> Attribute<'a> { |
1618 | 149k | fn parse_list(tokens: &mut Tokenizer<'a>) -> Result<Vec<Attribute<'a>>> { |
1619 | 149k | let mut ret = Vec::new(); |
1620 | 154k | while tokens.eat(Token::At)? { |
1621 | 5.18k | let id = parse_id(tokens)?; |
1622 | 5.18k | let attr = match id.name { |
1623 | 5.18k | "since" => { |
1624 | 3.21k | tokens.expect(Token::LeftParen)?; |
1625 | 3.21k | eat_id(tokens, "version")?; |
1626 | 3.21k | tokens.expect(Token::Equals)?; |
1627 | 3.21k | let (_span, version) = parse_version(tokens)?; |
1628 | 3.21k | tokens.expect(Token::RightParen)?; |
1629 | 3.21k | Attribute::Since { |
1630 | 3.21k | span: id.span, |
1631 | 3.21k | version, |
1632 | 3.21k | } |
1633 | | } |
1634 | 1.96k | "unstable" => { |
1635 | 1.04k | tokens.expect(Token::LeftParen)?; |
1636 | 1.04k | eat_id(tokens, "feature")?; |
1637 | 1.04k | tokens.expect(Token::Equals)?; |
1638 | 1.04k | let feature = parse_id(tokens)?; |
1639 | 1.04k | tokens.expect(Token::RightParen)?; |
1640 | 1.04k | Attribute::Unstable { |
1641 | 1.04k | span: id.span, |
1642 | 1.04k | feature, |
1643 | 1.04k | } |
1644 | | } |
1645 | 917 | "deprecated" => { |
1646 | 917 | tokens.expect(Token::LeftParen)?; |
1647 | 917 | eat_id(tokens, "version")?; |
1648 | 917 | tokens.expect(Token::Equals)?; |
1649 | 917 | let (_span, version) = parse_version(tokens)?; |
1650 | 917 | tokens.expect(Token::RightParen)?; |
1651 | 917 | Attribute::Deprecated { |
1652 | 917 | span: id.span, |
1653 | 917 | version, |
1654 | 917 | } |
1655 | | } |
1656 | 0 | other => { |
1657 | 0 | bail!(Error::new(id.span, format!("unknown attribute `{other}`"),)) |
1658 | | } |
1659 | | }; |
1660 | 5.18k | ret.push(attr); |
1661 | | } |
1662 | 149k | Ok(ret) |
1663 | 149k | } |
1664 | | |
1665 | 0 | fn span(&self) -> Span { |
1666 | 0 | match self { |
1667 | 0 | Attribute::Since { span, .. } |
1668 | 0 | | Attribute::Unstable { span, .. } |
1669 | 0 | | Attribute::Deprecated { span, .. } => *span, |
1670 | | } |
1671 | 0 | } |
1672 | | } |
1673 | | |
1674 | 5.18k | fn eat_id(tokens: &mut Tokenizer<'_>, expected: &str) -> Result<Span> { |
1675 | 5.18k | let id = parse_id(tokens)?; |
1676 | 5.18k | if id.name != expected { |
1677 | 0 | bail!(Error::new( |
1678 | 0 | id.span, |
1679 | 0 | format!("expected `{expected}`, found `{}`", id.name), |
1680 | 0 | )); |
1681 | 5.18k | } |
1682 | 5.18k | Ok(id.span) |
1683 | 5.18k | } |
1684 | | |
1685 | | /// A listing of source files which are used to get parsed into an |
1686 | | /// [`UnresolvedPackage`]. |
1687 | | /// |
1688 | | /// [`UnresolvedPackage`]: crate::UnresolvedPackage |
1689 | | #[derive(Clone, Default, Debug)] |
1690 | | pub struct SourceMap { |
1691 | | sources: Vec<Source>, |
1692 | | offset: u32, |
1693 | | } |
1694 | | |
1695 | | #[derive(Clone, Debug)] |
1696 | | struct Source { |
1697 | | offset: u32, |
1698 | | path: String, |
1699 | | contents: String, |
1700 | | } |
1701 | | |
1702 | | impl SourceMap { |
1703 | | /// Creates a new empty source map. |
1704 | 7.38k | pub fn new() -> SourceMap { |
1705 | 7.38k | SourceMap::default() |
1706 | 7.38k | } |
1707 | | |
1708 | | /// Reads the file `path` on the filesystem and appends its contents to this |
1709 | | /// [`SourceMap`]. |
1710 | | #[cfg(feature = "std")] |
1711 | 0 | pub fn push_file(&mut self, path: &Path) -> Result<()> { |
1712 | 0 | let contents = std::fs::read_to_string(path) |
1713 | 0 | .with_context(|| format!("failed to read file {path:?}"))?; |
1714 | 0 | self.push(path, contents); |
1715 | 0 | Ok(()) |
1716 | 0 | } |
1717 | | |
1718 | | /// Appends the given contents with the given path into this source map. |
1719 | | /// |
1720 | | /// The `path` provided is not read from the filesystem and is instead only |
1721 | | /// used during error messages. Each file added to a [`SourceMap`] is |
1722 | | /// used to create the final parsed package namely by unioning all the |
1723 | | /// interfaces and worlds defined together. Note that each file has its own |
1724 | | /// personal namespace, however, for top-level `use` and such. |
1725 | | #[cfg(feature = "std")] |
1726 | 16.8k | pub fn push(&mut self, path: &Path, contents: impl Into<String>) { |
1727 | 16.8k | self.push_str(&path.display().to_string(), contents); |
1728 | 16.8k | } <wit_parser::ast::SourceMap>::push::<&alloc::string::String> Line | Count | Source | 1726 | 16.8k | pub fn push(&mut self, path: &Path, contents: impl Into<String>) { | 1727 | 16.8k | self.push_str(&path.display().to_string(), contents); | 1728 | 16.8k | } |
Unexecuted instantiation: <wit_parser::ast::SourceMap>::push::<alloc::string::String> |
1729 | | |
1730 | | /// Appends the given contents with the given source name into this source map. |
1731 | | /// |
1732 | | /// The `path` provided is not read from the filesystem and is instead only |
1733 | | /// used during error messages. Each file added to a [`SourceMap`] is |
1734 | | /// used to create the final parsed package namely by unioning all the |
1735 | | /// interfaces and worlds defined together. Note that each file has its own |
1736 | | /// personal namespace, however, for top-level `use` and such. |
1737 | 22.0k | pub fn push_str(&mut self, path: &str, contents: impl Into<String>) { |
1738 | 22.0k | let mut contents = contents.into(); |
1739 | | // Guarantee that there's at least one character in these contents by |
1740 | | // appending a single newline to the end. This is excluded from |
1741 | | // tokenization below so it's only here to ensure that spans which point |
1742 | | // one byte beyond the end of a file (eof) point to the same original |
1743 | | // file. |
1744 | 22.0k | contents.push('\n'); |
1745 | 22.0k | let new_offset = self.offset + u32::try_from(contents.len()).unwrap(); |
1746 | 22.0k | self.sources.push(Source { |
1747 | 22.0k | offset: self.offset, |
1748 | 22.0k | path: path.to_string(), |
1749 | 22.0k | contents, |
1750 | 22.0k | }); |
1751 | 22.0k | self.offset = new_offset; |
1752 | 22.0k | } <wit_parser::ast::SourceMap>::push_str::<&alloc::string::String> Line | Count | Source | 1737 | 16.8k | pub fn push_str(&mut self, path: &str, contents: impl Into<String>) { | 1738 | 16.8k | let mut contents = contents.into(); | 1739 | | // Guarantee that there's at least one character in these contents by | 1740 | | // appending a single newline to the end. This is excluded from | 1741 | | // tokenization below so it's only here to ensure that spans which point | 1742 | | // one byte beyond the end of a file (eof) point to the same original | 1743 | | // file. | 1744 | 16.8k | contents.push('\n'); | 1745 | 16.8k | let new_offset = self.offset + u32::try_from(contents.len()).unwrap(); | 1746 | 16.8k | self.sources.push(Source { | 1747 | 16.8k | offset: self.offset, | 1748 | 16.8k | path: path.to_string(), | 1749 | 16.8k | contents, | 1750 | 16.8k | }); | 1751 | 16.8k | self.offset = new_offset; | 1752 | 16.8k | } |
Unexecuted instantiation: <wit_parser::ast::SourceMap>::push_str::<alloc::string::String> <wit_parser::ast::SourceMap>::push_str::<&str> Line | Count | Source | 1737 | 5.20k | pub fn push_str(&mut self, path: &str, contents: impl Into<String>) { | 1738 | 5.20k | let mut contents = contents.into(); | 1739 | | // Guarantee that there's at least one character in these contents by | 1740 | | // appending a single newline to the end. This is excluded from | 1741 | | // tokenization below so it's only here to ensure that spans which point | 1742 | | // one byte beyond the end of a file (eof) point to the same original | 1743 | | // file. | 1744 | 5.20k | contents.push('\n'); | 1745 | 5.20k | let new_offset = self.offset + u32::try_from(contents.len()).unwrap(); | 1746 | 5.20k | self.sources.push(Source { | 1747 | 5.20k | offset: self.offset, | 1748 | 5.20k | path: path.to_string(), | 1749 | 5.20k | contents, | 1750 | 5.20k | }); | 1751 | 5.20k | self.offset = new_offset; | 1752 | 5.20k | } |
|
1753 | | |
1754 | | /// Appends all sources from another `SourceMap` into this one. |
1755 | | /// |
1756 | | /// Returns the byte offset that should be added to all `Span.start` and |
1757 | | /// `Span.end` values from the appended source map to make them valid |
1758 | | /// in the combined source map. |
1759 | 24.2k | pub fn append(&mut self, other: SourceMap) -> u32 { |
1760 | 24.2k | let base = self.offset; |
1761 | 24.2k | for mut source in other.sources { |
1762 | 22.0k | source.offset += base; |
1763 | 22.0k | self.sources.push(source); |
1764 | 22.0k | } |
1765 | 24.2k | self.offset += other.offset; |
1766 | 24.2k | base |
1767 | 24.2k | } |
1768 | | |
1769 | | /// Parses the files added to this source map into a |
1770 | | /// [`UnresolvedPackageGroup`]. |
1771 | 12.5k | pub fn parse(self) -> Result<UnresolvedPackageGroup> { |
1772 | 12.5k | let mut nested = Vec::new(); |
1773 | 12.5k | let main = self.rewrite_error(|| { |
1774 | 12.5k | let mut resolver = Resolver::default(); |
1775 | 12.5k | let mut srcs = self.sources.iter().collect::<Vec<_>>(); |
1776 | 12.5k | srcs.sort_by_key(|src| &src.path); |
1777 | | |
1778 | | // Parse each source file individually. A tokenizer is created here |
1779 | | // form settings and then `PackageFile` is used to parse the whole |
1780 | | // stream of tokens. |
1781 | 22.0k | for src in srcs { |
1782 | 22.0k | let mut tokens = Tokenizer::new( |
1783 | | // chop off the forcibly appended `\n` character when |
1784 | | // passing through the source to get tokenized. |
1785 | 22.0k | &src.contents[..src.contents.len() - 1], |
1786 | 22.0k | src.offset, |
1787 | | ) |
1788 | 22.0k | .with_context(|| format!("failed to tokenize path: {}", src.path))?; |
1789 | 22.0k | let mut file = PackageFile::parse(&mut tokens)?; |
1790 | | |
1791 | | // Filter out any nested packages and resolve them separately. |
1792 | | // Nested packages have only a single "file" so only one item |
1793 | | // is pushed into a `Resolver`. Note that a nested `Resolver` |
1794 | | // is used here, not the outer one. |
1795 | | // |
1796 | | // Note that filtering out `Package` items is required due to |
1797 | | // how the implementation of disallowing nested packages in |
1798 | | // nested packages currently works. |
1799 | 70.9k | for item in mem::take(&mut file.decl_list.items) { |
1800 | 70.9k | match item { |
1801 | 466 | AstItem::Package(nested_pkg) => { |
1802 | 466 | let mut resolve = Resolver::default(); |
1803 | 466 | resolve.push(nested_pkg).with_context(|| { |
1804 | 0 | format!("failed to handle nested package in: {}", src.path) |
1805 | 0 | })?; |
1806 | | |
1807 | 466 | nested.push(resolve.resolve()?); |
1808 | | } |
1809 | 70.4k | other => file.decl_list.items.push(other), |
1810 | | } |
1811 | | } |
1812 | | |
1813 | | // With nested packages handled push this file into the |
1814 | | // resolver. |
1815 | 22.0k | resolver |
1816 | 22.0k | .push(file) |
1817 | 22.0k | .with_context(|| format!("failed to start resolving path: {}", src.path))?; |
1818 | | } |
1819 | 12.5k | Ok(resolver.resolve()?) |
1820 | 12.5k | })?; |
1821 | 12.5k | Ok(UnresolvedPackageGroup { |
1822 | 12.5k | main, |
1823 | 12.5k | nested, |
1824 | 12.5k | source_map: self, |
1825 | 12.5k | }) |
1826 | 12.5k | } |
1827 | | |
1828 | 38.6k | pub(crate) fn rewrite_error<F, T>(&self, f: F) -> Result<T> |
1829 | 38.6k | where |
1830 | 38.6k | F: FnOnce() -> Result<T>, |
1831 | | { |
1832 | 38.6k | let mut err = match f() { |
1833 | 38.6k | Ok(t) => return Ok(t), |
1834 | 0 | Err(e) => e, |
1835 | | }; |
1836 | 0 | if let Some(parse) = err.downcast_mut::<Error>() { |
1837 | 0 | parse.highlight(self); |
1838 | 0 | return Err(err); |
1839 | 0 | } |
1840 | 0 | if let Some(notfound) = err.downcast_mut::<PackageNotFoundError>() { |
1841 | 0 | notfound.highlight(self); |
1842 | 0 | return Err(err); |
1843 | 0 | } |
1844 | | |
1845 | 0 | if let Some(lex) = err.downcast_ref::<lex::Error>() { |
1846 | 0 | let pos = match lex { |
1847 | 0 | lex::Error::Unexpected(at, _) |
1848 | 0 | | lex::Error::UnterminatedComment(at) |
1849 | 0 | | lex::Error::Wanted { at, .. } |
1850 | 0 | | lex::Error::InvalidCharInId(at, _) |
1851 | 0 | | lex::Error::IdPartEmpty(at) |
1852 | 0 | | lex::Error::InvalidEscape(at, _) => *at, |
1853 | | }; |
1854 | 0 | let msg = self.highlight_err(pos, None, lex); |
1855 | 0 | bail!("{msg}") |
1856 | 0 | } |
1857 | | |
1858 | 0 | if let Some(sort) = err.downcast_mut::<toposort::Error>() { |
1859 | 0 | sort.highlight(self); |
1860 | 0 | } |
1861 | | |
1862 | 0 | Err(err) |
1863 | 38.6k | } <wit_parser::ast::SourceMap>::rewrite_error::<<wit_parser::ast::SourceMap>::parse::{closure#0}, wit_parser::UnresolvedPackage>Line | Count | Source | 1828 | 12.5k | pub(crate) fn rewrite_error<F, T>(&self, f: F) -> Result<T> | 1829 | 12.5k | where | 1830 | 12.5k | F: FnOnce() -> Result<T>, | 1831 | | { | 1832 | 12.5k | let mut err = match f() { | 1833 | 12.5k | Ok(t) => return Ok(t), | 1834 | 0 | Err(e) => e, | 1835 | | }; | 1836 | 0 | if let Some(parse) = err.downcast_mut::<Error>() { | 1837 | 0 | parse.highlight(self); | 1838 | 0 | return Err(err); | 1839 | 0 | } | 1840 | 0 | if let Some(notfound) = err.downcast_mut::<PackageNotFoundError>() { | 1841 | 0 | notfound.highlight(self); | 1842 | 0 | return Err(err); | 1843 | 0 | } | 1844 | | | 1845 | 0 | if let Some(lex) = err.downcast_ref::<lex::Error>() { | 1846 | 0 | let pos = match lex { | 1847 | 0 | lex::Error::Unexpected(at, _) | 1848 | 0 | | lex::Error::UnterminatedComment(at) | 1849 | 0 | | lex::Error::Wanted { at, .. } | 1850 | 0 | | lex::Error::InvalidCharInId(at, _) | 1851 | 0 | | lex::Error::IdPartEmpty(at) | 1852 | 0 | | lex::Error::InvalidEscape(at, _) => *at, | 1853 | | }; | 1854 | 0 | let msg = self.highlight_err(pos, None, lex); | 1855 | 0 | bail!("{msg}") | 1856 | 0 | } | 1857 | | | 1858 | 0 | if let Some(sort) = err.downcast_mut::<toposort::Error>() { | 1859 | 0 | sort.highlight(self); | 1860 | 0 | } | 1861 | | | 1862 | 0 | Err(err) | 1863 | 12.5k | } |
<wit_parser::ast::SourceMap>::rewrite_error::<<wit_parser::resolve::Resolve>::push::{closure#0}, id_arena::Id<wit_parser::resolve::Package>>Line | Count | Source | 1828 | 13.0k | pub(crate) fn rewrite_error<F, T>(&self, f: F) -> Result<T> | 1829 | 13.0k | where | 1830 | 13.0k | F: FnOnce() -> Result<T>, | 1831 | | { | 1832 | 13.0k | let mut err = match f() { | 1833 | 13.0k | Ok(t) => return Ok(t), | 1834 | 0 | Err(e) => e, | 1835 | | }; | 1836 | 0 | if let Some(parse) = err.downcast_mut::<Error>() { | 1837 | 0 | parse.highlight(self); | 1838 | 0 | return Err(err); | 1839 | 0 | } | 1840 | 0 | if let Some(notfound) = err.downcast_mut::<PackageNotFoundError>() { | 1841 | 0 | notfound.highlight(self); | 1842 | 0 | return Err(err); | 1843 | 0 | } | 1844 | | | 1845 | 0 | if let Some(lex) = err.downcast_ref::<lex::Error>() { | 1846 | 0 | let pos = match lex { | 1847 | 0 | lex::Error::Unexpected(at, _) | 1848 | 0 | | lex::Error::UnterminatedComment(at) | 1849 | 0 | | lex::Error::Wanted { at, .. } | 1850 | 0 | | lex::Error::InvalidCharInId(at, _) | 1851 | 0 | | lex::Error::IdPartEmpty(at) | 1852 | 0 | | lex::Error::InvalidEscape(at, _) => *at, | 1853 | | }; | 1854 | 0 | let msg = self.highlight_err(pos, None, lex); | 1855 | 0 | bail!("{msg}") | 1856 | 0 | } | 1857 | | | 1858 | 0 | if let Some(sort) = err.downcast_mut::<toposort::Error>() { | 1859 | 0 | sort.highlight(self); | 1860 | 0 | } | 1861 | | | 1862 | 0 | Err(err) | 1863 | 13.0k | } |
<wit_parser::ast::SourceMap>::rewrite_error::<wit_parser::resolve::visit::{closure#0}, ()>Line | Count | Source | 1828 | 13.0k | pub(crate) fn rewrite_error<F, T>(&self, f: F) -> Result<T> | 1829 | 13.0k | where | 1830 | 13.0k | F: FnOnce() -> Result<T>, | 1831 | | { | 1832 | 13.0k | let mut err = match f() { | 1833 | 13.0k | Ok(t) => return Ok(t), | 1834 | 0 | Err(e) => e, | 1835 | | }; | 1836 | 0 | if let Some(parse) = err.downcast_mut::<Error>() { | 1837 | 0 | parse.highlight(self); | 1838 | 0 | return Err(err); | 1839 | 0 | } | 1840 | 0 | if let Some(notfound) = err.downcast_mut::<PackageNotFoundError>() { | 1841 | 0 | notfound.highlight(self); | 1842 | 0 | return Err(err); | 1843 | 0 | } | 1844 | | | 1845 | 0 | if let Some(lex) = err.downcast_ref::<lex::Error>() { | 1846 | 0 | let pos = match lex { | 1847 | 0 | lex::Error::Unexpected(at, _) | 1848 | 0 | | lex::Error::UnterminatedComment(at) | 1849 | 0 | | lex::Error::Wanted { at, .. } | 1850 | 0 | | lex::Error::InvalidCharInId(at, _) | 1851 | 0 | | lex::Error::IdPartEmpty(at) | 1852 | 0 | | lex::Error::InvalidEscape(at, _) => *at, | 1853 | | }; | 1854 | 0 | let msg = self.highlight_err(pos, None, lex); | 1855 | 0 | bail!("{msg}") | 1856 | 0 | } | 1857 | | | 1858 | 0 | if let Some(sort) = err.downcast_mut::<toposort::Error>() { | 1859 | 0 | sort.highlight(self); | 1860 | 0 | } | 1861 | | | 1862 | 0 | Err(err) | 1863 | 13.0k | } |
|
1864 | | |
1865 | 0 | pub(crate) fn highlight_span(&self, span: Span, err: impl fmt::Display) -> Option<String> { |
1866 | 0 | if !span.is_known() { |
1867 | 0 | return None; |
1868 | 0 | } |
1869 | 0 | Some(self.highlight_err(span.start(), Some(span.end()), err)) |
1870 | 0 | } |
1871 | | |
1872 | 0 | fn highlight_err(&self, start: u32, end: Option<u32>, err: impl fmt::Display) -> String { |
1873 | 0 | let src = self.source_for_offset(start); |
1874 | 0 | let start = src.to_relative_offset(start); |
1875 | 0 | let end = end.map(|end| src.to_relative_offset(end)); Unexecuted instantiation: <wit_parser::ast::SourceMap>::highlight_err::<&wit_parser::ast::lex::Error>::{closure#0}Unexecuted instantiation: <wit_parser::ast::SourceMap>::highlight_err::<&alloc::string::String>::{closure#0} |
1876 | 0 | let (line, col) = src.linecol(start); |
1877 | 0 | let snippet = src.contents.lines().nth(line).unwrap_or(""); |
1878 | 0 | let line = line + 1; |
1879 | 0 | let col = col + 1; |
1880 | | |
1881 | | // If the snippet is too large then don't overload output on a terminal |
1882 | | // for example and instead just print the error. This also sidesteps |
1883 | | // Rust's restriction that `>0$` below has to be less than `u16::MAX`. |
1884 | 0 | if snippet.len() > 500 { |
1885 | 0 | return format!("{}:{line}:{col}: {err}", src.path); |
1886 | 0 | } |
1887 | 0 | let mut msg = format!( |
1888 | | "\ |
1889 | | {err} |
1890 | | --> {file}:{line}:{col} |
1891 | | | |
1892 | | {line:4} | {snippet} |
1893 | | | {marker:>0$}", |
1894 | | col, |
1895 | | file = src.path, |
1896 | | marker = "^", |
1897 | | ); |
1898 | 0 | if let Some(end) = end { |
1899 | 0 | if let Some(s) = src.contents.get(start..end) { |
1900 | 0 | for _ in s.chars().skip(1) { |
1901 | 0 | msg.push('-'); |
1902 | 0 | } |
1903 | 0 | } |
1904 | 0 | } |
1905 | 0 | return msg; |
1906 | 0 | } Unexecuted instantiation: <wit_parser::ast::SourceMap>::highlight_err::<&wit_parser::ast::lex::Error> Unexecuted instantiation: <wit_parser::ast::SourceMap>::highlight_err::<&alloc::string::String> |
1907 | | |
1908 | | /// Renders a span as a human-readable location string (e.g., "file.wit:10:5"). |
1909 | 0 | pub fn render_location(&self, span: Span) -> String { |
1910 | 0 | if !span.is_known() { |
1911 | 0 | return "<unknown>".to_string(); |
1912 | 0 | } |
1913 | 0 | let start = span.start(); |
1914 | 0 | let src = self.source_for_offset(start); |
1915 | 0 | let rel_start = src.to_relative_offset(start); |
1916 | 0 | let (line, col) = src.linecol(rel_start); |
1917 | 0 | format!( |
1918 | | "{file}:{line}:{col}", |
1919 | | file = src.path, |
1920 | 0 | line = line + 1, |
1921 | 0 | col = col + 1, |
1922 | | ) |
1923 | 0 | } |
1924 | | |
1925 | 0 | fn source_for_offset(&self, start: u32) -> &Source { |
1926 | 0 | let i = match self.sources.binary_search_by_key(&start, |src| src.offset) { |
1927 | 0 | Ok(i) => i, |
1928 | 0 | Err(i) => i - 1, |
1929 | | }; |
1930 | 0 | &self.sources[i] |
1931 | 0 | } |
1932 | | |
1933 | | /// Returns an iterator over all filenames added to this source map. |
1934 | | #[cfg(feature = "std")] |
1935 | 0 | pub fn source_files(&self) -> impl Iterator<Item = &Path> { |
1936 | 0 | self.sources.iter().map(|src| Path::new(&src.path)) |
1937 | 0 | } |
1938 | | |
1939 | | /// Returns an iterator over all source names added to this source map. |
1940 | 12.5k | pub fn source_names(&self) -> impl Iterator<Item = &str> { |
1941 | 22.0k | self.sources.iter().map(|src| src.path.as_str()) |
1942 | 12.5k | } |
1943 | | } |
1944 | | |
1945 | | impl Source { |
1946 | 0 | fn to_relative_offset(&self, offset: u32) -> usize { |
1947 | 0 | usize::try_from(offset - self.offset).unwrap() |
1948 | 0 | } |
1949 | | |
1950 | 0 | fn linecol(&self, relative_offset: usize) -> (usize, usize) { |
1951 | 0 | let mut cur = 0; |
1952 | | // Use split_terminator instead of lines so that if there is a `\r`, |
1953 | | // it is included in the offset calculation. The `+1` values below |
1954 | | // account for the `\n`. |
1955 | 0 | for (i, line) in self.contents.split_terminator('\n').enumerate() { |
1956 | 0 | if cur + line.len() + 1 > relative_offset { |
1957 | 0 | return (i, relative_offset - cur); |
1958 | 0 | } |
1959 | 0 | cur += line.len() + 1; |
1960 | | } |
1961 | 0 | (self.contents.lines().count(), 0) |
1962 | 0 | } |
1963 | | } |
1964 | | |
1965 | | pub enum ParsedUsePath { |
1966 | | Name(String), |
1967 | | Package(crate::PackageName, String), |
1968 | | } |
1969 | | |
1970 | 0 | pub fn parse_use_path(s: &str) -> Result<ParsedUsePath> { |
1971 | 0 | let mut tokens = Tokenizer::new(s, 0)?; |
1972 | 0 | let path = UsePath::parse(&mut tokens)?; |
1973 | 0 | if tokens.next()?.is_some() { |
1974 | 0 | bail!("trailing tokens in path specifier"); |
1975 | 0 | } |
1976 | 0 | Ok(match path { |
1977 | 0 | UsePath::Id(id) => ParsedUsePath::Name(id.name.to_string()), |
1978 | 0 | UsePath::Package { id, name } => { |
1979 | 0 | ParsedUsePath::Package(id.package_name(), name.name.to_string()) |
1980 | | } |
1981 | | }) |
1982 | 0 | } |