1
  2
  3
  4
  5
  6
  7
  8
  9
 10
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
use range::Range;
use read_token::ReadToken;

use super::{
    ret_err,
    err_update,
    update,
    ParseResult,
};
use {
    DebugId,
    MetaData,
    Rule,
};
use tokenizer::TokenizerState;

/// Stores inforamtion about separated by.
#[derive(Clone, Debug, PartialEq)]
pub struct Repeat {
    /// The rule to separate.
    pub rule: Rule,
    /// Whether the rule must occur at least once.
    pub optional: bool,
    /// A debug id to track down the rule generating an error.
    pub debug_id: DebugId,
}

impl Repeat {
    /// Parses rule repeatedly.
    pub fn parse(
        &self,
        tokens: &mut Vec<Range<MetaData>>,
        state: &TokenizerState,
        read_token: &ReadToken,
        refs: &[Rule]
    ) -> ParseResult<TokenizerState> {
        let start = read_token;
        let mut read_token = *start;
        let mut state = state.clone();
        let mut opt_error = None;
        let mut first = true;
        loop {
            state = match self.rule.parse(tokens, &state, &read_token, refs) {
                Err(err) => {
                    if first && !self.optional {
                        return Err(ret_err(err, opt_error));
                    } else {
                        err_update(Some(err), &mut opt_error);
                        break;
                    }
                }
                Ok((range, state, err)) => {
                    update(range, err, &mut read_token, &mut opt_error);
                    state
                }
            };
            first = false;
        }
        Ok((read_token.subtract(start), state, opt_error))
    }
}

#[cfg(test)]
mod tests {
    use all::*;
    use all::tokenizer::*;
    use meta_rules::{ Repeat, Tag };
    use std::sync::Arc;
    use range::Range;
    use read_token::ReadToken;

    #[test]
    fn fail() {
        let text = "[a][a][a]";
        let mut tokens = vec![];
        let s = TokenizerState::new();
        let token: Arc<String> = Arc::new("(a)".into());
        let rule = Repeat {
            debug_id: 0,
            optional: false,
            rule: Rule::Tag(Tag {
                debug_id: 1,
                text: token.clone(),
                not: false,
                inverted: false,
                property: None,
            })
        };
        let res = rule.parse(&mut tokens, &s, &ReadToken::new(&text, 0), &[]);
        assert_eq!(res, Err(Range::new(0, 0).wrap(
            ParseError::ExpectedTag(token.clone(), 1))))
    }

    #[test]
    fn success() {
        let text = "(a)(a)(a)";
        let mut tokens = vec![];
        let s = TokenizerState::new();
        let token: Arc<String> = Arc::new("(a)".into());
        let rule = Repeat {
            debug_id: 0,
            optional: false,
            rule: Rule::Tag(Tag {
                debug_id: 1,
                text: token.clone(),
                not: false,
                inverted: false,
                property: None,
            })
        };
        let res = rule.parse(&mut tokens, &s, &ReadToken::new(&text, 0), &[]);
        assert_eq!(res, Ok((Range::new(0, 9), TokenizerState(0),
            Some(Range::new(9, 0).wrap(
                ParseError::ExpectedTag(token.clone(), 1))))))
    }
}