-
Notifications
You must be signed in to change notification settings - Fork 13.1k
/
Copy pathparser.rs
5210 lines (4770 loc) · 188 KB
/
parser.rs
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![macro_escape]
use abi;
use ast::{BareFnTy, ClosureTy};
use ast::{StaticRegionTyParamBound, OtherRegionTyParamBound, TraitTyParamBound};
use ast::{Provided, Public, FnStyle};
use ast::{Mod, BiAdd, Arg, Arm, Attribute, BindByRef, BindByValue};
use ast::{BiBitAnd, BiBitOr, BiBitXor, Block};
use ast::{BlockCheckMode, UnBox};
use ast::{Crate, CrateConfig, Decl, DeclItem};
use ast::{DeclLocal, DefaultBlock, UnDeref, BiDiv, EMPTY_CTXT, EnumDef, ExplicitSelf};
use ast::{Expr, Expr_, ExprAddrOf, ExprMatch, ExprAgain};
use ast::{ExprAssign, ExprAssignOp, ExprBinary, ExprBlock, ExprBox};
use ast::{ExprBreak, ExprCall, ExprCast};
use ast::{ExprField, ExprFnBlock, ExprIf, ExprIndex};
use ast::{ExprLit, ExprLoop, ExprMac};
use ast::{ExprMethodCall, ExprParen, ExprPath, ExprProc};
use ast::{ExprRepeat, ExprRet, ExprStruct, ExprTup, ExprUnary};
use ast::{ExprVec, ExprVstore, ExprVstoreSlice};
use ast::{ExprVstoreMutSlice, ExprWhile, ExprForLoop, Field, FnDecl};
use ast::{ExprVstoreUniq, Once, Many};
use ast::{ForeignItem, ForeignItemStatic, ForeignItemFn, ForeignMod};
use ast::{Ident, NormalFn, Inherited, Item, Item_, ItemStatic};
use ast::{ItemEnum, ItemFn, ItemForeignMod, ItemImpl};
use ast::{ItemMac, ItemMod, ItemStruct, ItemTrait, ItemTy, Lit, Lit_};
use ast::{LitBool, LitFloat, LitFloatUnsuffixed, LitInt, LitChar};
use ast::{LitIntUnsuffixed, LitNil, LitStr, LitUint, Local, LocalLet};
use ast::{MutImmutable, MutMutable, Mac_, MacInvocTT, Matcher, MatchNonterminal};
use ast::{MatchSeq, MatchTok, Method, MutTy, BiMul, Mutability};
use ast::{NamedField, UnNeg, NoReturn, UnNot, P, Pat, PatEnum};
use ast::{PatIdent, PatLit, PatRange, PatRegion, PatStruct};
use ast::{PatTup, PatBox, PatWild, PatWildMulti};
use ast::{BiRem, Required};
use ast::{RetStyle, Return, BiShl, BiShr, Stmt, StmtDecl};
use ast::{Sized, DynSize, StaticSize};
use ast::{StmtExpr, StmtSemi, StmtMac, StructDef, StructField};
use ast::{StructVariantKind, BiSub};
use ast::StrStyle;
use ast::{SelfRegion, SelfStatic, SelfUniq, SelfValue};
use ast::{TokenTree, TraitMethod, TraitRef, TTDelim, TTSeq, TTTok};
use ast::{TTNonterminal, TupleVariantKind, Ty, Ty_, TyBot, TyBox};
use ast::{TypeField, TyFixedLengthVec, TyClosure, TyProc, TyBareFn};
use ast::{TyTypeof, TyInfer, TypeMethod};
use ast::{TyNil, TyParam, TyParamBound, TyPath, TyPtr, TyRptr};
use ast::{TyTup, TyU32, TyUnboxedFn, TyUniq, TyVec, UnUniq};
use ast::{UnboxedFnTy, UnboxedFnTyParamBound, UnnamedField, UnsafeBlock};
use ast::{UnsafeFn, ViewItem, ViewItem_, ViewItemExternCrate, ViewItemUse};
use ast::{ViewPath, ViewPathGlob, ViewPathList, ViewPathSimple};
use ast::Visibility;
use ast;
use ast_util::{as_prec, lit_is_str, operator_prec};
use ast_util;
use codemap::{Span, BytePos, Spanned, spanned, mk_sp};
use codemap;
use parse::attr::ParserAttr;
use parse::classify;
use parse::common::{SeqSep, seq_sep_none};
use parse::common::{seq_sep_trailing_disallowed, seq_sep_trailing_allowed};
use parse::lexer::Reader;
use parse::lexer::TokenAndSpan;
use parse::obsolete::*;
use parse::token::{INTERPOLATED, InternedString, can_begin_expr};
use parse::token::{is_ident, is_ident_or_path, is_plain_ident};
use parse::token::{keywords, special_idents, token_to_binop};
use parse::token;
use parse::{new_sub_parser_from_file, ParseSess};
use owned_slice::OwnedSlice;
use std::collections::HashSet;
use std::mem::replace;
use std::rc::Rc;
use std::gc::Gc;
#[allow(non_camel_case_types)]
#[deriving(PartialEq)]
pub enum restriction {
UNRESTRICTED,
RESTRICT_STMT_EXPR,
RESTRICT_NO_BAR_OP,
RESTRICT_NO_BAR_OR_DOUBLEBAR_OP,
}
type ItemInfo = (Ident, Item_, Option<Vec<Attribute> >);
/// How to parse a path. There are four different kinds of paths, all of which
/// are parsed somewhat differently.
#[deriving(PartialEq)]
pub enum PathParsingMode {
/// A path with no type parameters; e.g. `foo::bar::Baz`
NoTypesAllowed,
/// A path with a lifetime and type parameters, with no double colons
/// before the type parameters; e.g. `foo::bar<'a>::Baz<T>`
LifetimeAndTypesWithoutColons,
/// A path with a lifetime and type parameters with double colons before
/// the type parameters; e.g. `foo::bar::<'a>::Baz::<T>`
LifetimeAndTypesWithColons,
/// A path with a lifetime and type parameters with bounds before the last
/// set of type parameters only; e.g. `foo::bar<'a>::Baz:X+Y<T>` This
/// form does not use extra double colons.
LifetimeAndTypesAndBounds,
}
/// A path paired with optional type bounds.
pub struct PathAndBounds {
pub path: ast::Path,
pub bounds: Option<OwnedSlice<TyParamBound>>,
}
enum ItemOrViewItem {
// Indicates a failure to parse any kind of item. The attributes are
// returned.
IoviNone(Vec<Attribute>),
IoviItem(Gc<Item>),
IoviForeignItem(Gc<ForeignItem>),
IoviViewItem(ViewItem)
}
// Possibly accept an `INTERPOLATED` expression (a pre-parsed expression
// dropped into the token stream, which happens while parsing the
// result of macro expansion)
/* Placement of these is not as complex as I feared it would be.
The important thing is to make sure that lookahead doesn't balk
at INTERPOLATED tokens */
macro_rules! maybe_whole_expr (
($p:expr) => (
{
let found = match $p.token {
INTERPOLATED(token::NtExpr(e)) => {
Some(e)
}
INTERPOLATED(token::NtPath(_)) => {
// FIXME: The following avoids an issue with lexical borrowck scopes,
// but the clone is unfortunate.
let pt = match $p.token {
INTERPOLATED(token::NtPath(ref pt)) => (**pt).clone(),
_ => unreachable!()
};
Some($p.mk_expr($p.span.lo, $p.span.hi, ExprPath(pt)))
}
INTERPOLATED(token::NtBlock(b)) => {
Some($p.mk_expr($p.span.lo, $p.span.hi, ExprBlock(b)))
}
_ => None
};
match found {
Some(e) => {
$p.bump();
return e;
}
None => ()
}
}
)
)
// As above, but for things other than expressions
macro_rules! maybe_whole (
($p:expr, $constructor:ident) => (
{
let found = match ($p).token {
INTERPOLATED(token::$constructor(_)) => {
Some(($p).bump_and_get())
}
_ => None
};
match found {
Some(INTERPOLATED(token::$constructor(x))) => {
return x.clone()
}
_ => {}
}
}
);
(no_clone $p:expr, $constructor:ident) => (
{
let found = match ($p).token {
INTERPOLATED(token::$constructor(_)) => {
Some(($p).bump_and_get())
}
_ => None
};
match found {
Some(INTERPOLATED(token::$constructor(x))) => {
return x
}
_ => {}
}
}
);
(deref $p:expr, $constructor:ident) => (
{
let found = match ($p).token {
INTERPOLATED(token::$constructor(_)) => {
Some(($p).bump_and_get())
}
_ => None
};
match found {
Some(INTERPOLATED(token::$constructor(x))) => {
return (*x).clone()
}
_ => {}
}
}
);
(Some $p:expr, $constructor:ident) => (
{
let found = match ($p).token {
INTERPOLATED(token::$constructor(_)) => {
Some(($p).bump_and_get())
}
_ => None
};
match found {
Some(INTERPOLATED(token::$constructor(x))) => {
return Some(x.clone()),
}
_ => {}
}
}
);
(iovi $p:expr, $constructor:ident) => (
{
let found = match ($p).token {
INTERPOLATED(token::$constructor(_)) => {
Some(($p).bump_and_get())
}
_ => None
};
match found {
Some(INTERPOLATED(token::$constructor(x))) => {
return IoviItem(x.clone())
}
_ => {}
}
}
);
(pair_empty $p:expr, $constructor:ident) => (
{
let found = match ($p).token {
INTERPOLATED(token::$constructor(_)) => {
Some(($p).bump_and_get())
}
_ => None
};
match found {
Some(INTERPOLATED(token::$constructor(x))) => {
return (Vec::new(), x)
}
_ => {}
}
}
)
)
fn maybe_append(lhs: Vec<Attribute> , rhs: Option<Vec<Attribute> >)
-> Vec<Attribute> {
match rhs {
None => lhs,
Some(ref attrs) => lhs.append(attrs.as_slice())
}
}
struct ParsedItemsAndViewItems {
attrs_remaining: Vec<Attribute>,
view_items: Vec<ViewItem>,
items: Vec<Gc<Item>>,
foreign_items: Vec<Gc<ForeignItem>>
}
/* ident is handled by common.rs */
pub struct Parser<'a> {
pub sess: &'a ParseSess,
// the current token:
pub token: token::Token,
// the span of the current token:
pub span: Span,
// the span of the prior token:
pub last_span: Span,
pub cfg: CrateConfig,
// the previous token or None (only stashed sometimes).
pub last_token: Option<Box<token::Token>>,
pub buffer: [TokenAndSpan, ..4],
pub buffer_start: int,
pub buffer_end: int,
pub tokens_consumed: uint,
pub restriction: restriction,
pub quote_depth: uint, // not (yet) related to the quasiquoter
pub reader: Box<Reader:>,
pub interner: Rc<token::IdentInterner>,
/// The set of seen errors about obsolete syntax. Used to suppress
/// extra detail when the same error is seen twice
pub obsolete_set: HashSet<ObsoleteSyntax>,
/// Used to determine the path to externally loaded source files
pub mod_path_stack: Vec<InternedString>,
/// Stack of spans of open delimiters. Used for error message.
pub open_braces: Vec<Span>,
/// Flag if this parser "owns" the directory that it is currently parsing
/// in. This will affect how nested files are looked up.
pub owns_directory: bool,
/// Name of the root module this parser originated from. If `None`, then the
/// name is not known. This does not change while the parser is descending
/// into modules, and sub-parsers have new values for this name.
pub root_module_name: Option<String>,
}
fn is_plain_ident_or_underscore(t: &token::Token) -> bool {
is_plain_ident(t) || *t == token::UNDERSCORE
}
impl<'a> Parser<'a> {
pub fn new(sess: &'a ParseSess, cfg: ast::CrateConfig, mut rdr: Box<Reader:>) -> Parser<'a> {
let tok0 = rdr.next_token();
let span = tok0.sp;
let placeholder = TokenAndSpan {
tok: token::UNDERSCORE,
sp: span,
};
Parser {
reader: rdr,
interner: token::get_ident_interner(),
sess: sess,
cfg: cfg,
token: tok0.tok,
span: span,
last_span: span,
last_token: None,
buffer: [
placeholder.clone(),
placeholder.clone(),
placeholder.clone(),
placeholder.clone(),
],
buffer_start: 0,
buffer_end: 0,
tokens_consumed: 0,
restriction: UNRESTRICTED,
quote_depth: 0,
obsolete_set: HashSet::new(),
mod_path_stack: Vec::new(),
open_braces: Vec::new(),
owns_directory: true,
root_module_name: None,
}
}
// convert a token to a string using self's reader
pub fn token_to_str(token: &token::Token) -> String {
token::to_str(token)
}
// convert the current token to a string using self's reader
pub fn this_token_to_str(&mut self) -> String {
Parser::token_to_str(&self.token)
}
pub fn unexpected_last(&mut self, t: &token::Token) -> ! {
let token_str = Parser::token_to_str(t);
self.span_fatal(self.last_span, format!("unexpected token: `{}`",
token_str).as_slice());
}
pub fn unexpected(&mut self) -> ! {
let this_token = self.this_token_to_str();
self.fatal(format!("unexpected token: `{}`", this_token).as_slice());
}
// expect and consume the token t. Signal an error if
// the next token is not t.
pub fn expect(&mut self, t: &token::Token) {
if self.token == *t {
self.bump();
} else {
let token_str = Parser::token_to_str(t);
let this_token_str = self.this_token_to_str();
self.fatal(format!("expected `{}` but found `{}`",
token_str,
this_token_str).as_slice())
}
}
// Expect next token to be edible or inedible token. If edible,
// then consume it; if inedible, then return without consuming
// anything. Signal a fatal error if next token is unexpected.
pub fn expect_one_of(&mut self,
edible: &[token::Token],
inedible: &[token::Token]) {
fn tokens_to_str(tokens: &[token::Token]) -> String {
let mut i = tokens.iter();
// This might be a sign we need a connect method on Iterator.
let b = i.next()
.map_or("".to_string(), |t| Parser::token_to_str(t));
i.fold(b, |b,a| {
let mut b = b;
b.push_str("`, `");
b.push_str(Parser::token_to_str(a).as_slice());
b
})
}
if edible.contains(&self.token) {
self.bump();
} else if inedible.contains(&self.token) {
// leave it in the input
} else {
let expected = edible.iter().map(|x| (*x).clone()).collect::<Vec<_>>().append(inedible);
let expect = tokens_to_str(expected.as_slice());
let actual = self.this_token_to_str();
self.fatal(
(if expected.len() != 1 {
(format!("expected one of `{}` but found `{}`",
expect,
actual))
} else {
(format!("expected `{}` but found `{}`",
expect,
actual))
}).as_slice()
)
}
}
// Check for erroneous `ident { }`; if matches, signal error and
// recover (without consuming any expected input token). Returns
// true if and only if input was consumed for recovery.
pub fn check_for_erroneous_unit_struct_expecting(&mut self, expected: &[token::Token]) -> bool {
if self.token == token::LBRACE
&& expected.iter().all(|t| *t != token::LBRACE)
&& self.look_ahead(1, |t| *t == token::RBRACE) {
// matched; signal non-fatal error and recover.
self.span_err(self.span,
"unit-like struct construction is written with no trailing `{ }`");
self.eat(&token::LBRACE);
self.eat(&token::RBRACE);
true
} else {
false
}
}
// Commit to parsing a complete expression `e` expected to be
// followed by some token from the set edible + inedible. Recover
// from anticipated input errors, discarding erroneous characters.
pub fn commit_expr(&mut self, e: Gc<Expr>, edible: &[token::Token],
inedible: &[token::Token]) {
debug!("commit_expr {:?}", e);
match e.node {
ExprPath(..) => {
// might be unit-struct construction; check for recoverableinput error.
let expected = edible.iter().map(|x| (*x).clone()).collect::<Vec<_>>()
.append(inedible);
self.check_for_erroneous_unit_struct_expecting(
expected.as_slice());
}
_ => {}
}
self.expect_one_of(edible, inedible)
}
pub fn commit_expr_expecting(&mut self, e: Gc<Expr>, edible: token::Token) {
self.commit_expr(e, &[edible], &[])
}
// Commit to parsing a complete statement `s`, which expects to be
// followed by some token from the set edible + inedible. Check
// for recoverable input errors, discarding erroneous characters.
pub fn commit_stmt(&mut self, s: Gc<Stmt>, edible: &[token::Token],
inedible: &[token::Token]) {
debug!("commit_stmt {:?}", s);
let _s = s; // unused, but future checks might want to inspect `s`.
if self.last_token.as_ref().map_or(false, |t| is_ident_or_path(*t)) {
let expected = edible.iter().map(|x| (*x).clone()).collect::<Vec<_>>()
.append(inedible.as_slice());
self.check_for_erroneous_unit_struct_expecting(
expected.as_slice());
}
self.expect_one_of(edible, inedible)
}
pub fn commit_stmt_expecting(&mut self, s: Gc<Stmt>, edible: token::Token) {
self.commit_stmt(s, &[edible], &[])
}
pub fn parse_ident(&mut self) -> ast::Ident {
self.check_strict_keywords();
self.check_reserved_keywords();
match self.token {
token::IDENT(i, _) => {
self.bump();
i
}
token::INTERPOLATED(token::NtIdent(..)) => {
self.bug("ident interpolation not converted to real token");
}
_ => {
let token_str = self.this_token_to_str();
self.fatal((format!("expected ident, found `{}`",
token_str)).as_slice())
}
}
}
pub fn parse_path_list_ident(&mut self) -> ast::PathListIdent {
let lo = self.span.lo;
let ident = self.parse_ident();
let hi = self.last_span.hi;
spanned(lo, hi, ast::PathListIdent_ { name: ident,
id: ast::DUMMY_NODE_ID })
}
// consume token 'tok' if it exists. Returns true if the given
// token was present, false otherwise.
pub fn eat(&mut self, tok: &token::Token) -> bool {
let is_present = self.token == *tok;
if is_present { self.bump() }
is_present
}
pub fn is_keyword(&mut self, kw: keywords::Keyword) -> bool {
token::is_keyword(kw, &self.token)
}
// if the next token is the given keyword, eat it and return
// true. Otherwise, return false.
pub fn eat_keyword(&mut self, kw: keywords::Keyword) -> bool {
let is_kw = match self.token {
token::IDENT(sid, false) => kw.to_ident().name == sid.name,
_ => false
};
if is_kw { self.bump() }
is_kw
}
// if the given word is not a keyword, signal an error.
// if the next token is not the given word, signal an error.
// otherwise, eat it.
pub fn expect_keyword(&mut self, kw: keywords::Keyword) {
if !self.eat_keyword(kw) {
let id_interned_str = token::get_ident(kw.to_ident());
let token_str = self.this_token_to_str();
self.fatal(format!("expected `{}`, found `{}`",
id_interned_str, token_str).as_slice())
}
}
// signal an error if the given string is a strict keyword
pub fn check_strict_keywords(&mut self) {
if token::is_strict_keyword(&self.token) {
let token_str = self.this_token_to_str();
self.span_err(self.span,
format!("found `{}` in ident position",
token_str).as_slice());
}
}
// signal an error if the current token is a reserved keyword
pub fn check_reserved_keywords(&mut self) {
if token::is_reserved_keyword(&self.token) {
let token_str = self.this_token_to_str();
self.fatal(format!("`{}` is a reserved keyword",
token_str).as_slice())
}
}
// Expect and consume an `&`. If `&&` is seen, replace it with a single
// `&` and continue. If an `&` is not seen, signal an error.
fn expect_and(&mut self) {
match self.token {
token::BINOP(token::AND) => self.bump(),
token::ANDAND => {
let lo = self.span.lo + BytePos(1);
self.replace_token(token::BINOP(token::AND), lo, self.span.hi)
}
_ => {
let token_str = self.this_token_to_str();
let found_token =
Parser::token_to_str(&token::BINOP(token::AND));
self.fatal(format!("expected `{}`, found `{}`",
found_token,
token_str).as_slice())
}
}
}
// Expect and consume a `|`. If `||` is seen, replace it with a single
// `|` and continue. If a `|` is not seen, signal an error.
fn expect_or(&mut self) {
match self.token {
token::BINOP(token::OR) => self.bump(),
token::OROR => {
let lo = self.span.lo + BytePos(1);
self.replace_token(token::BINOP(token::OR), lo, self.span.hi)
}
_ => {
let found_token = self.this_token_to_str();
let token_str =
Parser::token_to_str(&token::BINOP(token::OR));
self.fatal(format!("expected `{}`, found `{}`",
token_str,
found_token).as_slice())
}
}
}
// Attempt to consume a `<`. If `<<` is seen, replace it with a single
// `<` and continue. If a `<` is not seen, return false.
//
// This is meant to be used when parsing generics on a path to get the
// starting token. The `force` parameter is used to forcefully break up a
// `<<` token. If `force` is false, then `<<` is only broken when a lifetime
// shows up next. For example, consider the expression:
//
// foo as bar << test
//
// The parser needs to know if `bar <<` is the start of a generic path or if
// it's a left-shift token. If `test` were a lifetime, then it's impossible
// for the token to be a left-shift, but if it's not a lifetime, then it's
// considered a left-shift.
//
// The reason for this is that the only current ambiguity with `<<` is when
// parsing closure types:
//
// foo::<<'a> ||>();
// impl Foo<<'a> ||>() { ... }
fn eat_lt(&mut self, force: bool) -> bool {
match self.token {
token::LT => { self.bump(); true }
token::BINOP(token::SHL) => {
let next_lifetime = self.look_ahead(1, |t| match *t {
token::LIFETIME(..) => true,
_ => false,
});
if force || next_lifetime {
let lo = self.span.lo + BytePos(1);
self.replace_token(token::LT, lo, self.span.hi);
true
} else {
false
}
}
_ => false,
}
}
fn expect_lt(&mut self) {
if !self.eat_lt(true) {
let found_token = self.this_token_to_str();
let token_str = Parser::token_to_str(&token::LT);
self.fatal(format!("expected `{}`, found `{}`",
token_str,
found_token).as_slice())
}
}
// Parse a sequence bracketed by `|` and `|`, stopping before the `|`.
fn parse_seq_to_before_or<T>(
&mut self,
sep: &token::Token,
f: |&mut Parser| -> T)
-> Vec<T> {
let mut first = true;
let mut vector = Vec::new();
while self.token != token::BINOP(token::OR) &&
self.token != token::OROR {
if first {
first = false
} else {
self.expect(sep)
}
vector.push(f(self))
}
vector
}
// expect and consume a GT. if a >> is seen, replace it
// with a single > and continue. If a GT is not seen,
// signal an error.
pub fn expect_gt(&mut self) {
match self.token {
token::GT => self.bump(),
token::BINOP(token::SHR) => {
let lo = self.span.lo + BytePos(1);
self.replace_token(token::GT, lo, self.span.hi)
}
_ => {
let gt_str = Parser::token_to_str(&token::GT);
let this_token_str = self.this_token_to_str();
self.fatal(format!("expected `{}`, found `{}`",
gt_str,
this_token_str).as_slice())
}
}
}
// parse a sequence bracketed by '<' and '>', stopping
// before the '>'.
pub fn parse_seq_to_before_gt<T>(
&mut self,
sep: Option<token::Token>,
f: |&mut Parser| -> T)
-> OwnedSlice<T> {
let mut first = true;
let mut v = Vec::new();
while self.token != token::GT
&& self.token != token::BINOP(token::SHR) {
match sep {
Some(ref t) => {
if first { first = false; }
else { self.expect(t); }
}
_ => ()
}
v.push(f(self));
}
return OwnedSlice::from_vec(v);
}
pub fn parse_seq_to_gt<T>(
&mut self,
sep: Option<token::Token>,
f: |&mut Parser| -> T)
-> OwnedSlice<T> {
let v = self.parse_seq_to_before_gt(sep, f);
self.expect_gt();
return v;
}
// parse a sequence, including the closing delimiter. The function
// f must consume tokens until reaching the next separator or
// closing bracket.
pub fn parse_seq_to_end<T>(
&mut self,
ket: &token::Token,
sep: SeqSep,
f: |&mut Parser| -> T)
-> Vec<T> {
let val = self.parse_seq_to_before_end(ket, sep, f);
self.bump();
val
}
// parse a sequence, not including the closing delimiter. The function
// f must consume tokens until reaching the next separator or
// closing bracket.
pub fn parse_seq_to_before_end<T>(
&mut self,
ket: &token::Token,
sep: SeqSep,
f: |&mut Parser| -> T)
-> Vec<T> {
let mut first: bool = true;
let mut v = vec!();
while self.token != *ket {
match sep.sep {
Some(ref t) => {
if first { first = false; }
else { self.expect(t); }
}
_ => ()
}
if sep.trailing_sep_allowed && self.token == *ket { break; }
v.push(f(self));
}
return v;
}
// parse a sequence, including the closing delimiter. The function
// f must consume tokens until reaching the next separator or
// closing bracket.
pub fn parse_unspanned_seq<T>(
&mut self,
bra: &token::Token,
ket: &token::Token,
sep: SeqSep,
f: |&mut Parser| -> T)
-> Vec<T> {
self.expect(bra);
let result = self.parse_seq_to_before_end(ket, sep, f);
self.bump();
result
}
// parse a sequence parameter of enum variant. For consistency purposes,
// these should not be empty.
pub fn parse_enum_variant_seq<T>(
&mut self,
bra: &token::Token,
ket: &token::Token,
sep: SeqSep,
f: |&mut Parser| -> T)
-> Vec<T> {
let result = self.parse_unspanned_seq(bra, ket, sep, f);
if result.is_empty() {
self.span_err(self.last_span,
"nullary enum variants are written with no trailing `( )`");
}
result
}
// NB: Do not use this function unless you actually plan to place the
// spanned list in the AST.
pub fn parse_seq<T>(
&mut self,
bra: &token::Token,
ket: &token::Token,
sep: SeqSep,
f: |&mut Parser| -> T)
-> Spanned<Vec<T> > {
let lo = self.span.lo;
self.expect(bra);
let result = self.parse_seq_to_before_end(ket, sep, f);
let hi = self.span.hi;
self.bump();
spanned(lo, hi, result)
}
// advance the parser by one token
pub fn bump(&mut self) {
self.last_span = self.span;
// Stash token for error recovery (sometimes; clone is not necessarily cheap).
self.last_token = if is_ident_or_path(&self.token) {
Some(box self.token.clone())
} else {
None
};
let next = if self.buffer_start == self.buffer_end {
self.reader.next_token()
} else {
// Avoid token copies with `replace`.
let buffer_start = self.buffer_start as uint;
let next_index = (buffer_start + 1) & 3 as uint;
self.buffer_start = next_index as int;
let placeholder = TokenAndSpan {
tok: token::UNDERSCORE,
sp: self.span,
};
replace(&mut self.buffer[buffer_start], placeholder)
};
self.span = next.sp;
self.token = next.tok;
self.tokens_consumed += 1u;
}
// Advance the parser by one token and return the bumped token.
pub fn bump_and_get(&mut self) -> token::Token {
let old_token = replace(&mut self.token, token::UNDERSCORE);
self.bump();
old_token
}
// EFFECT: replace the current token and span with the given one
pub fn replace_token(&mut self,
next: token::Token,
lo: BytePos,
hi: BytePos) {
self.last_span = mk_sp(self.span.lo, lo);
self.token = next;
self.span = mk_sp(lo, hi);
}
pub fn buffer_length(&mut self) -> int {
if self.buffer_start <= self.buffer_end {
return self.buffer_end - self.buffer_start;
}
return (4 - self.buffer_start) + self.buffer_end;
}
pub fn look_ahead<R>(&mut self, distance: uint, f: |&token::Token| -> R)
-> R {
let dist = distance as int;
while self.buffer_length() < dist {
self.buffer[self.buffer_end as uint] = self.reader.next_token();
self.buffer_end = (self.buffer_end + 1) & 3;
}
f(&self.buffer[((self.buffer_start + dist - 1) & 3) as uint].tok)
}
pub fn fatal(&mut self, m: &str) -> ! {
self.sess.span_diagnostic.span_fatal(self.span, m)
}
pub fn span_fatal(&mut self, sp: Span, m: &str) -> ! {
self.sess.span_diagnostic.span_fatal(sp, m)
}
pub fn span_note(&mut self, sp: Span, m: &str) {
self.sess.span_diagnostic.span_note(sp, m)
}
pub fn bug(&mut self, m: &str) -> ! {
self.sess.span_diagnostic.span_bug(self.span, m)
}
pub fn warn(&mut self, m: &str) {
self.sess.span_diagnostic.span_warn(self.span, m)
}
pub fn span_warn(&mut self, sp: Span, m: &str) {
self.sess.span_diagnostic.span_warn(sp, m)
}
pub fn span_err(&mut self, sp: Span, m: &str) {
self.sess.span_diagnostic.span_err(sp, m)
}
pub fn abort_if_errors(&mut self) {
self.sess.span_diagnostic.handler().abort_if_errors();
}
pub fn id_to_interned_str(&mut self, id: Ident) -> InternedString {
token::get_ident(id)
}
// Is the current token one of the keywords that signals a bare function
// type?
pub fn token_is_bare_fn_keyword(&mut self) -> bool {
if token::is_keyword(keywords::Fn, &self.token) {
return true
}
if token::is_keyword(keywords::Unsafe, &self.token) ||
token::is_keyword(keywords::Once, &self.token) {
return self.look_ahead(1, |t| token::is_keyword(keywords::Fn, t))
}
false
}
// Is the current token one of the keywords that signals a closure type?
pub fn token_is_closure_keyword(&mut self) -> bool {
token::is_keyword(keywords::Unsafe, &self.token) ||
token::is_keyword(keywords::Once, &self.token)
}
// Is the current token one of the keywords that signals an old-style
// closure type (with explicit sigil)?
pub fn token_is_old_style_closure_keyword(&mut self) -> bool {
token::is_keyword(keywords::Unsafe, &self.token) ||
token::is_keyword(keywords::Once, &self.token) ||
token::is_keyword(keywords::Fn, &self.token)
}
pub fn token_is_lifetime(tok: &token::Token) -> bool {
match *tok {
token::LIFETIME(..) => true,
_ => false,
}
}
pub fn get_lifetime(&mut self) -> ast::Ident {
match self.token {
token::LIFETIME(ref ident) => *ident,
_ => self.bug("not a lifetime"),
}
}
// parse a TyBareFn type:
pub fn parse_ty_bare_fn(&mut self) -> Ty_ {
/*
[unsafe] [extern "ABI"] fn <'lt> (S) -> T
^~~~^ ^~~~^ ^~~~^ ^~^ ^
| | | | |
| | | | Return type
| | | Argument types
| | Lifetimes
| ABI
Function Style
*/
let fn_style = self.parse_unsafety();
let abi = if self.eat_keyword(keywords::Extern) {
self.parse_opt_abi().unwrap_or(abi::C)
} else {
abi::Rust
};
self.expect_keyword(keywords::Fn);
let (decl, lifetimes) = self.parse_ty_fn_decl(true);
return TyBareFn(box(GC) BareFnTy {
abi: abi,
fn_style: fn_style,
lifetimes: lifetimes,
decl: decl
});
}
// Parses a procedure type (`proc`). The initial `proc` keyword must
// already have been parsed.
pub fn parse_proc_type(&mut self) -> Ty_ {
/*
proc <'lt> (S) [:Bounds] -> T
^~~^ ^~~~^ ^ ^~~~~~~~^ ^
| | | | |