1
 2
 3
 4
 5
 6
 7
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
use range::Range;
use read_token::ReadToken;

use super::{
    ret_err,
    update,
    ParseResult,
};
use {
    DebugId,
    MetaData,
    Rule,
};
use tokenizer::TokenizerState;

/// Stores information about sequence.
#[derive(Clone, Debug, PartialEq)]
pub struct Sequence {
    /// The sequential rules.
    pub args: Vec<Rule>,
    /// A debug id to track down the rule generating an error.
    pub debug_id: DebugId,
}

impl Sequence {
    /// Parses sequence.
    /// Fails if any sub rule fails.
    pub fn parse(
        &self,
        tokens: &mut Vec<Range<MetaData>>,
        state: &TokenizerState,
        read_token: &ReadToken,
        refs: &[Rule]
    ) -> ParseResult<TokenizerState> {
        let start = read_token;
        let mut read_token = *start;
        let mut state = state.clone();
        let mut opt_error = None;
        for sub_rule in &self.args {
            state = match sub_rule.parse(tokens, &state, &read_token, refs) {
                Ok((range, state, err)) => {
                    update(range, err, &mut read_token, &mut opt_error);
                    state
                }
                Err(err) => {
                    return Err(ret_err(err, opt_error));
                }
            }
        }
        Ok((read_token.subtract(start), state, opt_error))
    }
}