1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
use read_token::ReadToken;
use range::Range;
use std::sync::Arc;
use super::{
ParseResult,
};
use {
DebugId,
MetaData,
ParseError,
};
use tokenizer::{ read_data, TokenizerState };
#[derive(Clone, Debug, PartialEq)]
pub struct Text {
pub allow_empty: bool,
pub property: Option<Arc<String>>,
pub debug_id: DebugId,
}
impl Text {
pub fn parse(
&self,
tokens: &mut Vec<Range<MetaData>>,
state: &TokenizerState,
read_token: &ReadToken
) -> ParseResult<TokenizerState> {
if let Some(range) = read_token.string() {
if !self.allow_empty && range.length == 2 {
Err(range.wrap(ParseError::EmptyTextNotAllowed(self.debug_id)))
} else {
match read_token.parse_string(range.length) {
Err(range_err) => {
Err(range_err.map(|err|
ParseError::ParseStringError(err, self.debug_id)))
}
Ok(text) => {
if let Some(ref property) = self.property {
Ok((range, read_data(
tokens,
range.wrap(MetaData::String(property.clone(),
Arc::new(text))),
state
), None))
} else {
Ok((range, state.clone(), None))
}
}
}
}
} else {
Err(read_token.start().wrap(
ParseError::ExpectedText(self.debug_id)))
}
}
}
#[cfg(test)]
mod tests {
use all::*;
use all::tokenizer::*;
use meta_rules::Text;
use range::Range;
use read_token::ReadToken;
use std::sync::Arc;
#[test]
fn expected_text() {
let text = "23";
let mut tokens = vec![];
let s = TokenizerState::new();
let rule = Text {
debug_id: 0,
allow_empty: true,
property: None
};
let res = rule.parse(&mut tokens, &s, &ReadToken::new(&text, 0));
assert_eq!(res, Err(Range::new(0, 0).wrap(ParseError::ExpectedText(0))));
}
#[test]
fn empty_string() {
let text = "\"\"";
let mut tokens = vec![];
let s = TokenizerState::new();
let rule = Text {
debug_id: 0,
allow_empty: false,
property: None
};
let res = rule.parse(&mut tokens, &s, &ReadToken::new(&text, 0));
assert_eq!(res, Err(Range::new(0, 2).wrap(
ParseError::EmptyTextNotAllowed(0))));
}
#[test]
fn successful() {
let text = "foo \"hello\"";
let mut tokens = vec![];
let s = TokenizerState::new();
let foo: Arc<String> = Arc::new("foo".into());
let rule = Text {
debug_id: 0,
allow_empty: true,
property: Some(foo.clone())
};
let res = rule.parse(&mut tokens, &s, &ReadToken::new(&text[4..], 4));
assert_eq!(res, Ok((Range::new(4, 7), TokenizerState(1), None)));
assert_eq!(tokens.len(), 1);
assert_eq!(&tokens[0].data,
&MetaData::String(foo.clone(), Arc::new("hello".into())));
}
}