1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
use range::Range;
use read_token::ReadToken;
use super::{
ret_err,
update,
};
use {
DebugId,
MetaData,
ParseError,
Rule,
};
use tokenizer::TokenizerState;
#[derive(Clone, Debug, PartialEq)]
pub struct Optional {
pub rule: Rule,
pub debug_id: DebugId,
}
impl Optional {
pub fn parse(
&self,
tokens: &mut Vec<Range<MetaData>>,
state: &TokenizerState,
read_token: &ReadToken,
refs: &[Rule]
) -> (Range, TokenizerState, Option<Range<ParseError>>) {
let start = read_token;
let mut read_token = *start;
let mut success_state = state.clone();
let mut opt_error = None;
success_state = match self.rule.parse(
tokens, &success_state, &read_token, refs
) {
Ok((range, state, err)) => {
update(range, err, &mut read_token, &mut opt_error);
state
}
Err(err) => {
return (start.start(), state.clone(),
Some(ret_err(err, opt_error)))
}
};
(read_token.subtract(start), success_state, opt_error)
}
}
#[cfg(test)]
mod tests {
use all::*;
use all::tokenizer::*;
use meta_rules::{ Number, Optional, Sequence, Text };
use range::Range;
use read_token::ReadToken;
use std::sync::Arc;
#[test]
fn fail_but_continue() {
let text = "2";
let mut tokens = vec![];
let s = TokenizerState::new();
let num: Arc<String> = Arc::new("num".into());
let optional = Optional {
debug_id: 0,
rule: Rule::Sequence(Sequence {
debug_id: 1,
args: vec![
Rule::Text(Text {
debug_id: 2,
allow_empty: true,
property: None
}),
Rule::Number(Number {
debug_id: 3,
property: Some(num.clone()),
allow_underscore: false,
})
]
}),
};
let res = optional.parse(&mut tokens, &s,
&ReadToken::new(&text, 0), &[]);
assert_eq!(res, (Range::new(0, 0), TokenizerState(0),
Some(Range::new(0, 0).wrap(ParseError::ExpectedText(2)))));
assert_eq!(tokens.len(), 0);
}
}