diff --git a/saphyr/.travis.yml b/saphyr/.travis.yml index 5e1e170..3a542b8 100644 --- a/saphyr/.travis.yml +++ b/saphyr/.travis.yml @@ -7,5 +7,13 @@ rust: env: global: - secure: ZUcdcbS8xbpdII9FSPx7VtoVhEkJhWL2Hb75tDlKDHNhfXqmt1NyB9q/2qXJ5Ulp4MnYXwsI8LsDloR6gvdB4xElay3smuF/neGvMjrqcB15/2p0MSQ+kZjMsNB6mlb5kAlm8ahduXIscppmw/V+m5hn3Vo+RQz/Ng+pzv0nc8KEXPMYrfRFg+a7FaeIbRbb8ir9EfflUSqArLq2hbi2WdhM3hFMcCIAUt6DD4x5ubjEg60OnIof5FDu0mXMXzQvUfHWOeYnsNcD/DLyDnm6FuQEzk37M4EB8op2SdBUeQMQ5abR3i2rd//DZpbTTEjud0PseWohGAwTwL2aoFrqs7uYQMx+vcGlOzAyDUm4VemVUa3F2BECdzU5BiujcKOITJEVUYWongld93arQq34FuXG/TO/T1XrerxfG6LTkTkKS5Vz7W8z6Rloa99WrQLJg1ZJP6itEU7G7KsDFVgRhsg7rz4/dV/2+cV4UvIwd4HlGXKCFlH0SClqvM3/7i/qqCD0689SJW6Zip+ly38MXlGy2s/AmReEasXvFer9JkOEIuPa8QTBNAjDlw7bWXi6neQWBIZU1VhZcSssnrVmEFN8fNklShzpw5DyKCv8jPTx2O6Dw8B/LgIK8uo+eaTXiO6zz/T1c/qEdsYslvxPA2D3F+ONpPU7238ykT4eRog= +script: + - | + if [ "$TRAVIS_RUST_VERSION" = nightly ]; then + cargo build --features clippy --verbose + else + cargo build --verbose + fi + - cargo test --verbose after_script: - curl http://www.rust-ci.org/artifacts/put?t=$RUSTCI_TOKEN | sh diff --git a/saphyr/Cargo.toml b/saphyr/Cargo.toml index d427e4b..46d8c7a 100644 --- a/saphyr/Cargo.toml +++ b/saphyr/Cargo.toml @@ -7,3 +7,6 @@ documentation = "http://chyh1990.github.io/yaml-rust/doc/yaml_rust/" license = "MIT/Apache-2.0" description = "The missing YAML 1.2 parser for rust" repository = "https://github.com/chyh1990/yaml-rust" + +[dependencies] +clippy = { version = "^0.*", optional = true } diff --git a/saphyr/src/emitter.rs b/saphyr/src/emitter.rs index 8810c95..fe9a6e1 100644 --- a/saphyr/src/emitter.rs +++ b/saphyr/src/emitter.rs @@ -114,8 +114,8 @@ impl<'a> YamlEmitter<'a> { } fn emit_node(&mut self, node: &Yaml) -> EmitResult { - match node { - &Yaml::Array(ref v) => { + match *node { + Yaml::Array(ref v) => { if v.is_empty() { try!(write!(self.writer, "[]")); Ok(()) @@ -124,21 +124,19 @@ impl<'a> YamlEmitter<'a> { try!(write!(self.writer, "\n")); } self.level += 1; - let mut cnt = 0usize; - for x in v { + for (cnt, x) in v.iter().enumerate() { + if cnt > 0 { + try!(write!(self.writer, "\n")); + } try!(self.write_indent()); try!(write!(self.writer, "- ")); try!(self.emit_node(x)); - cnt += 1; - if cnt < v.len() { - try!(write!(self.writer, "\n")); - } } self.level -= 1; Ok(()) } }, - &Yaml::Hash(ref h) => { + Yaml::Hash(ref h) => { if h.is_empty() { try!(self.writer.write_str("{}")); Ok(()) @@ -147,32 +145,30 @@ impl<'a> YamlEmitter<'a> { try!(write!(self.writer, "\n")); } self.level += 1; - let mut cnt = 0usize; - for (k, v) in h { + for (cnt, (k, v)) in h.iter().enumerate() { + if cnt > 0 { + try!(write!(self.writer, "\n")); + } try!(self.write_indent()); - match k { + match *k { // complex key is not supported - &Yaml::Array(_) | &Yaml::Hash(_) => { + Yaml::Array(_) | Yaml::Hash(_) => { return Err(EmitError::BadHashmapKey); }, _ => { try!(self.emit_node(k)); } } try!(write!(self.writer, ": ")); try!(self.emit_node(v)); - cnt += 1; - if cnt < h.len() { - try!(write!(self.writer, "\n")); - } } self.level -= 1; Ok(()) } }, - &Yaml::String(ref v) => { + Yaml::String(ref v) => { try!(escape_str(self.writer, v)); Ok(()) }, - &Yaml::Boolean(v) => { + Yaml::Boolean(v) => { if v { try!(self.writer.write_str("true")); } else { @@ -180,15 +176,15 @@ impl<'a> YamlEmitter<'a> { } Ok(()) }, - &Yaml::Integer(v) => { + Yaml::Integer(v) => { try!(write!(self.writer, "{}", v)); Ok(()) }, - &Yaml::Real(ref v) => { + Yaml::Real(ref v) => { try!(write!(self.writer, "{}", v)); Ok(()) }, - &Yaml::Null | &Yaml::BadValue => { + Yaml::Null | Yaml::BadValue => { try!(write!(self.writer, "~")); Ok(()) }, diff --git a/saphyr/src/lib.rs b/saphyr/src/lib.rs index 1cba925..dc66adc 100644 --- a/saphyr/src/lib.rs +++ b/saphyr/src/lib.rs @@ -36,6 +36,12 @@ //! //! ``` +#![cfg_attr(feature="clippy", feature(plugin))] +#![cfg_attr(feature="clippy", plugin(clippy))] +#![cfg_attr(feature="clippy", deny(clippy))] +#![cfg_attr(feature="clippy", warn(cyclomatic_complexity))] +#![cfg_attr(feature="clippy", allow(match_same_arms))] + pub mod yaml; pub mod scanner; pub mod parser; @@ -89,7 +95,7 @@ mod tests { emitter.dump(doc).unwrap(); } - assert!(writer.len() > 0); + assert!(!writer.is_empty()); } fn try_fail(s: &str) -> Result, ScanError> { diff --git a/saphyr/src/parser.rs b/saphyr/src/parser.rs index 9a11abf..65924d7 100644 --- a/saphyr/src/parser.rs +++ b/saphyr/src/parser.rs @@ -55,11 +55,11 @@ pub enum Event { impl Event { fn empty_scalar() -> Event { // a null scalar - Event::Scalar("~".to_string(), TScalarStyle::Plain, 0, None) + Event::Scalar("~".to_owned(), TScalarStyle::Plain, 0, None) } fn empty_scalar_with_anchor(anchor: usize, tag: TokenType) -> Event { - Event::Scalar("".to_string(), TScalarStyle::Plain, anchor, Some(tag)) + Event::Scalar("".to_owned(), TScalarStyle::Plain, anchor, Some(tag)) } } @@ -179,10 +179,7 @@ impl> Parser { fn load_node(&mut self, first_ev: &Event, recv: &mut R) -> Result<(), ScanError> { match *first_ev { - Event::Alias(..) => { - Ok(()) - }, - Event::Scalar(..) => { + Event::Alias(..) | Event::Scalar(..) => { Ok(()) }, Event::SequenceStart(_) => { @@ -270,12 +267,12 @@ impl> Parser { let tok = try!(self.peek()); match tok.1 { - TokenType::StreamStartToken(_) => { + TokenType::StreamStart(_) => { self.state = State::ImplicitDocumentStart; self.skip(); Ok(Event::StreamStart) }, - _ => return Err(ScanError::new(tok.0, + _ => Err(ScanError::new(tok.0, "did not find expected ")), } } @@ -283,26 +280,21 @@ impl> Parser { fn document_start(&mut self, implicit: bool) -> ParseResult { let mut tok = try!(self.peek()); if !implicit { - loop { - match tok.1 { - TokenType::DocumentEndToken => { - self.skip(); - tok = try!(self.peek()); - }, - _ => break - } + while let TokenType::DocumentEnd = tok.1 { + self.skip(); + tok = try!(self.peek()); } } match tok.1 { - TokenType::StreamEndToken => { + TokenType::StreamEnd => { self.state = State::End; self.skip(); - return Ok(Event::StreamEnd); + Ok(Event::StreamEnd) }, - TokenType::VersionDirectiveToken(..) - | TokenType::TagDirectiveToken(..) - | TokenType::DocumentStartToken => { + TokenType::VersionDirective(..) + | TokenType::TagDirective(..) + | TokenType::DocumentStart => { // explicit document self._explict_document_start() }, @@ -323,14 +315,14 @@ impl> Parser { loop { let tok = try!(self.peek()); match tok.1 { - TokenType::VersionDirectiveToken(_, _) => { + TokenType::VersionDirective(_, _) => { // XXX parsing with warning according to spec //if major != 1 || minor > 2 { // return Err(ScanError::new(tok.0, // "found incompatible YAML document")); //} }, - TokenType::TagDirectiveToken(..) => { + TokenType::TagDirective(..) => { // TODO add tag directive }, _ => break @@ -344,7 +336,7 @@ impl> Parser { fn _explict_document_start(&mut self) -> ParseResult { try!(self.parser_process_directives()); let tok = try!(self.peek()); - if tok.1 != TokenType::DocumentStartToken { + if tok.1 != TokenType::DocumentStart { return Err(ScanError::new(tok.0, "did not find expected ")); } self.push_state(State::DocumentEnd); @@ -356,11 +348,11 @@ impl> Parser { fn document_content(&mut self) -> ParseResult { let tok = try!(self.peek()); match tok.1 { - TokenType::VersionDirectiveToken(..) - |TokenType::TagDirectiveToken(..) - |TokenType::DocumentStartToken - |TokenType::DocumentEndToken - |TokenType::StreamEndToken => { + TokenType::VersionDirective(..) + |TokenType::TagDirective(..) + |TokenType::DocumentStart + |TokenType::DocumentEnd + |TokenType::StreamEnd => { self.pop_state(); // empty scalar Ok(Event::empty_scalar()) @@ -376,12 +368,9 @@ impl> Parser { let tok = try!(self.peek()); let _start_mark = tok.0; - match tok.1 { - TokenType::DocumentEndToken => { - self.skip(); - _implicit = false; - } - _ => {} + if let TokenType::DocumentEnd = tok.1 { + self.skip(); + _implicit = false; } // TODO tag handling @@ -389,7 +378,7 @@ impl> Parser { Ok(Event::DocumentEnd) } - fn register_anchor(&mut self, name: &String, _: &Marker) -> Result { + fn register_anchor(&mut self, name: &str, _: &Marker) -> Result { // anchors can be overrided/reused // if self.anchors.contains_key(name) { // return Err(ScanError::new(*mark, @@ -397,7 +386,7 @@ impl> Parser { // } let new_id = self.anchor_id; self.anchor_id += 1; - self.anchors.insert(name.clone(), new_id); + self.anchors.insert(name.to_owned(), new_id); Ok(new_id) } @@ -406,7 +395,7 @@ impl> Parser { let mut anchor_id = 0; let mut tag = None; match tok.1 { - TokenType::AliasToken(name) => { + TokenType::Alias(name) => { self.pop_state(); self.skip(); match self.anchors.get(&name) { @@ -414,21 +403,21 @@ impl> Parser { Some(id) => return Ok(Event::Alias(*id)) } }, - TokenType::AnchorToken(name) => { + TokenType::Anchor(name) => { anchor_id = try!(self.register_anchor(&name, &tok.0)); self.skip(); tok = try!(self.peek()); - if let TokenType::TagToken(_, _) = tok.1 { + if let TokenType::Tag(_, _) = tok.1 { tag = Some(tok.1); self.skip(); tok = try!(self.peek()); } }, - TokenType::TagToken(..) => { + TokenType::Tag(..) => { tag = Some(tok.1); self.skip(); tok = try!(self.peek()); - if let TokenType::AnchorToken(name) = tok.1 { + if let TokenType::Anchor(name) = tok.1 { anchor_id = try!(self.register_anchor(&name, &tok.0)); self.skip(); tok = try!(self.peek()); @@ -437,28 +426,28 @@ impl> Parser { _ => {} } match tok.1 { - TokenType::BlockEntryToken if indentless_sequence => { + TokenType::BlockEntry if indentless_sequence => { self.state = State::IndentlessSequenceEntry; Ok(Event::SequenceStart(anchor_id)) }, - TokenType::ScalarToken(style, v) => { + TokenType::Scalar(style, v) => { self.pop_state(); self.skip(); Ok(Event::Scalar(v, style, anchor_id, tag)) }, - TokenType::FlowSequenceStartToken => { + TokenType::FlowSequenceStart => { self.state = State::FlowSequenceFirstEntry; Ok(Event::SequenceStart(anchor_id)) }, - TokenType::FlowMappingStartToken => { + TokenType::FlowMappingStart => { self.state = State::FlowMappingFirstKey; Ok(Event::MappingStart(anchor_id)) }, - TokenType::BlockSequenceStartToken if block => { + TokenType::BlockSequenceStart if block => { self.state = State::BlockSequenceFirstEntry; Ok(Event::SequenceStart(anchor_id)) }, - TokenType::BlockMappingStartToken if block => { + TokenType::BlockMappingStart if block => { self.state = State::BlockMappingFirstKey; Ok(Event::MappingStart(anchor_id)) }, @@ -472,7 +461,7 @@ impl> Parser { } fn block_mapping_key(&mut self, first: bool) -> ParseResult { - // skip BlockMappingStartToken + // skip BlockMappingStart if first { let _ = try!(self.peek()); //self.marks.push(tok.0); @@ -480,13 +469,13 @@ impl> Parser { } let tok = try!(self.peek()); match tok.1 { - TokenType::KeyToken => { + TokenType::Key => { self.skip(); let tok = try!(self.peek()); match tok.1 { - TokenType::KeyToken - | TokenType::ValueToken - | TokenType::BlockEndToken + TokenType::Key + | TokenType::Value + | TokenType::BlockEnd => { self.state = State::BlockMappingValue; // empty scalar @@ -499,11 +488,11 @@ impl> Parser { } }, // XXX(chenyh): libyaml failed to parse spec 1.2, ex8.18 - TokenType::ValueToken => { + TokenType::Value => { self.state = State::BlockMappingValue; Ok(Event::empty_scalar()) }, - TokenType::BlockEndToken => { + TokenType::BlockEnd => { self.pop_state(); self.skip(); Ok(Event::MappingEnd) @@ -517,11 +506,11 @@ impl> Parser { fn block_mapping_value(&mut self) -> ParseResult { let tok = try!(self.peek()); match tok.1 { - TokenType::ValueToken => { + TokenType::Value => { self.skip(); let tok = try!(self.peek()); match tok.1 { - TokenType::KeyToken | TokenType::ValueToken | TokenType::BlockEndToken + TokenType::Key | TokenType::Value | TokenType::BlockEnd => { self.state = State::BlockMappingKey; // empty scalar @@ -548,9 +537,9 @@ impl> Parser { } let mut tok = try!(self.peek()); - if tok.1 != TokenType::FlowMappingEndToken { + if tok.1 != TokenType::FlowMappingEnd { if !first { - if tok.1 == TokenType::FlowEntryToken { + if tok.1 == TokenType::FlowEntry { self.skip(); tok = try!(self.peek()); } else { @@ -559,13 +548,13 @@ impl> Parser { } } - if tok.1 == TokenType::KeyToken { + if tok.1 == TokenType::Key { self.skip(); tok = try!(self.peek()); match tok.1 { - TokenType::ValueToken - | TokenType::FlowEntryToken - | TokenType::FlowMappingEndToken => { + TokenType::Value + | TokenType::FlowEntry + | TokenType::FlowMappingEnd => { self.state = State::FlowMappingValue; return Ok(Event::empty_scalar()); }, @@ -575,10 +564,10 @@ impl> Parser { } } // XXX libyaml fail ex 7.3, empty key - } else if tok.1 == TokenType::ValueToken { + } else if tok.1 == TokenType::Value { self.state = State::FlowMappingValue; return Ok(Event::empty_scalar()); - } else if tok.1 != TokenType::FlowMappingEndToken { + } else if tok.1 != TokenType::FlowMappingEnd { self.push_state(State::FlowMappingEmptyValue); return self.parse_node(false, false); } @@ -596,12 +585,12 @@ impl> Parser { return Ok(Event::empty_scalar()); } - if tok.1 == TokenType::ValueToken { + if tok.1 == TokenType::Value { self.skip(); let tok = try!(self.peek()); match tok.1 { - TokenType::FlowEntryToken - | TokenType::FlowMappingEndToken => { }, + TokenType::FlowEntry + | TokenType::FlowMappingEnd => { }, _ => { self.push_state(State::FlowMappingKey); return self.parse_node(false, false); @@ -614,7 +603,7 @@ impl> Parser { } fn flow_sequence_entry(&mut self, first: bool) -> ParseResult { - // skip FlowMappingStartToken + // skip FlowMappingStart if first { let _ = try!(self.peek()); //self.marks.push(tok.0); @@ -622,12 +611,12 @@ impl> Parser { } let mut tok = try!(self.peek()); match tok.1 { - TokenType::FlowSequenceEndToken => { + TokenType::FlowSequenceEnd => { self.pop_state(); self.skip(); return Ok(Event::SequenceEnd); }, - TokenType::FlowEntryToken if !first => { + TokenType::FlowEntry if !first => { self.skip(); tok = try!(self.peek()); }, @@ -638,12 +627,12 @@ impl> Parser { _ => { /* next */ } } match tok.1 { - TokenType::FlowSequenceEndToken => { + TokenType::FlowSequenceEnd => { self.pop_state(); self.skip(); Ok(Event::SequenceEnd) }, - TokenType::KeyToken => { + TokenType::Key => { self.state = State::FlowSequenceEntryMappingKey; self.skip(); Ok(Event::MappingStart(0)) @@ -657,7 +646,7 @@ impl> Parser { fn indentless_sequence_entry(&mut self) -> ParseResult { let mut tok = try!(self.peek()); - if tok.1 != TokenType::BlockEntryToken { + if tok.1 != TokenType::BlockEntry { self.pop_state(); return Ok(Event::SequenceEnd); } @@ -665,10 +654,10 @@ impl> Parser { self.skip(); tok = try!(self.peek()); match tok.1 { - TokenType::BlockEntryToken - | TokenType::KeyToken - | TokenType::ValueToken - | TokenType::BlockEndToken => { + TokenType::BlockEntry + | TokenType::Key + | TokenType::Value + | TokenType::BlockEnd => { self.state = State::IndentlessSequenceEntry; Ok(Event::empty_scalar()) }, @@ -688,17 +677,17 @@ impl> Parser { } let mut tok = try!(self.peek()); match tok.1 { - TokenType::BlockEndToken => { + TokenType::BlockEnd => { self.pop_state(); self.skip(); Ok(Event::SequenceEnd) }, - TokenType::BlockEntryToken => { + TokenType::BlockEntry => { self.skip(); tok = try!(self.peek()); match tok.1 { - TokenType::BlockEntryToken - | TokenType::BlockEndToken => { + TokenType::BlockEntry + | TokenType::BlockEnd => { self.state = State::BlockSequenceEntry; Ok(Event::empty_scalar()) }, @@ -719,9 +708,9 @@ impl> Parser { let tok = try!(self.peek()); match tok.1 { - TokenType::ValueToken - | TokenType::FlowEntryToken - | TokenType::FlowSequenceEndToken => { + TokenType::Value + | TokenType::FlowEntry + | TokenType::FlowSequenceEnd => { self.skip(); self.state = State::FlowSequenceEntryMappingValue; Ok(Event::empty_scalar()) @@ -737,13 +726,13 @@ impl> Parser { let tok = try!(self.peek()); match tok.1 { - TokenType::ValueToken => { + TokenType::Value => { self.skip(); let tok = try!(self.peek()); self.state = State::FlowSequenceEntryMappingValue; match tok.1 { - TokenType::FlowEntryToken - | TokenType::FlowSequenceEndToken => { + TokenType::FlowEntry + | TokenType::FlowSequenceEnd => { self.state = State::FlowSequenceEntryMappingEnd; Ok(Event::empty_scalar()) }, diff --git a/saphyr/src/scanner.rs b/saphyr/src/scanner.rs index 4cfe2bc..d0b5e2b 100644 --- a/saphyr/src/scanner.rs +++ b/saphyr/src/scanner.rs @@ -45,7 +45,7 @@ impl ScanError { pub fn new(loc: Marker, info: &str) -> ScanError { ScanError { mark: loc, - info: info.to_string() + info: info.to_owned() } } } @@ -71,30 +71,30 @@ impl fmt::Display for ScanError { #[derive(Clone, PartialEq, Debug, Eq)] pub enum TokenType { NoToken, - StreamStartToken(TEncoding), - StreamEndToken, + StreamStart(TEncoding), + StreamEnd, /// major, minor - VersionDirectiveToken(u32, u32), + VersionDirective(u32, u32), /// handle, prefix - TagDirectiveToken(String, String), - DocumentStartToken, - DocumentEndToken, - BlockSequenceStartToken, - BlockMappingStartToken, - BlockEndToken, - FlowSequenceStartToken, - FlowSequenceEndToken, - FlowMappingStartToken, - FlowMappingEndToken, - BlockEntryToken, - FlowEntryToken, - KeyToken, - ValueToken, - AliasToken(String), - AnchorToken(String), + TagDirective(String, String), + DocumentStart, + DocumentEnd, + BlockSequenceStart, + BlockMappingStart, + BlockEnd, + FlowSequenceStart, + FlowSequenceEnd, + FlowMappingStart, + FlowMappingEnd, + BlockEntry, + FlowEntry, + Key, + Value, + Alias(String), + Anchor(String), /// handle, suffix - TagToken(String, String), - ScalarToken(TScalarStyle, String) + Tag(String, String), + Scalar(TScalarStyle, String) } #[derive(Clone, PartialEq, Debug, Eq)] @@ -233,7 +233,7 @@ impl> Scanner { } } - #[inline(always)] + #[inline] fn lookahead(&mut self, count: usize) { if self.buffer.len() >= count { return; @@ -348,7 +348,7 @@ impl> Scanner { && self.buffer[1] == '-' && self.buffer[2] == '-' && is_blankz(self.buffer[3]) { - try!(self.fetch_document_indicator(TokenType::DocumentStartToken)); + try!(self.fetch_document_indicator(TokenType::DocumentStart)); return Ok(()); } @@ -357,17 +357,17 @@ impl> Scanner { && self.buffer[1] == '.' && self.buffer[2] == '.' && is_blankz(self.buffer[3]) { - try!(self.fetch_document_indicator(TokenType::DocumentEndToken)); + try!(self.fetch_document_indicator(TokenType::DocumentEnd)); return Ok(()); } let c = self.buffer[0]; let nc = self.buffer[1]; match c { - '[' => self.fetch_flow_collection_start(TokenType::FlowSequenceStartToken), - '{' => self.fetch_flow_collection_start(TokenType::FlowMappingStartToken), - ']' => self.fetch_flow_collection_end(TokenType::FlowSequenceEndToken), - '}' => self.fetch_flow_collection_end(TokenType::FlowMappingEndToken), + '[' => self.fetch_flow_collection_start(TokenType::FlowSequenceStart), + '{' => self.fetch_flow_collection_start(TokenType::FlowMappingStart), + ']' => self.fetch_flow_collection_end(TokenType::FlowSequenceEnd), + '}' => self.fetch_flow_collection_end(TokenType::FlowMappingEnd), ',' => self.fetch_flow_entry(), '-' if is_blankz(nc) => self.fetch_block_entry(), '?' if self.flow_level > 0 || is_blankz(nc) => self.fetch_key(), @@ -386,7 +386,7 @@ impl> Scanner { // plain scalar '-' if !is_blankz(nc) => self.fetch_plain_scalar(), ':' | '?' if !is_blankz(nc) && self.flow_level == 0 => self.fetch_plain_scalar(), - '%' | '@' | '`' => return Err(ScanError::new(self.mark, + '%' | '@' | '`' => Err(ScanError::new(self.mark, &format!("unexpected character: `{}'", c))), _ => self.fetch_plain_scalar(), } @@ -404,9 +404,8 @@ impl> Scanner { self.token_available = false; self.tokens_parsed += 1; - match t.1 { - TokenType::StreamEndToken => self.stream_end_produced = true, - _ => {} + if let TokenType::StreamEnd = t.1 { + self.stream_end_produced = true; } Ok(Some(t)) } @@ -473,7 +472,7 @@ impl> Scanner { self.indent = -1; self.stream_start_produced = true; self.allow_simple_key(); - self.tokens.push_back(Token(mark, TokenType::StreamStartToken(TEncoding::Utf8))); + self.tokens.push_back(Token(mark, TokenType::StreamStart(TEncoding::Utf8))); self.simple_keys.push(SimpleKey::new(Marker::new(0,0,0))); } @@ -488,7 +487,7 @@ impl> Scanner { try!(self.remove_simple_key()); self.disallow_simple_key(); - self.tokens.push_back(Token(self.mark, TokenType::StreamEndToken)); + self.tokens.push_back(Token(self.mark, TokenType::StreamEnd)); Ok(()) } @@ -526,7 +525,7 @@ impl> Scanner { self.lookahead(1); } // XXX return an empty TagDirective token - Token(start_mark, TokenType::TagDirectiveToken(String::new(), String::new())) + Token(start_mark, TokenType::TagDirective(String::new(), String::new())) // return Err(ScanError::new(start_mark, // "while scanning a directive, found unknown directive name")) } @@ -578,7 +577,7 @@ impl> Scanner { let minor = try!(self.scan_version_directive_number(mark)); - Ok(Token(*mark, TokenType::VersionDirectiveToken(major, minor))) + Ok(Token(*mark, TokenType::VersionDirective(major, minor))) } fn scan_directive_name(&mut self) -> Result { @@ -652,7 +651,7 @@ impl> Scanner { Err(ScanError::new(*mark, "while scanning TAG, did not find expected whitespace or line break")) } else { - Ok(Token(*mark, TokenType::TagDirectiveToken(handle, prefix))) + Ok(Token(*mark, TokenType::TagDirective(handle, prefix))) } } @@ -697,12 +696,12 @@ impl> Scanner { suffix = try!(self.scan_tag_uri(false, secondary, &String::new(), &start_mark)); } else { suffix = try!(self.scan_tag_uri(false, false, &handle, &start_mark)); - handle = "!".to_string(); + handle = "!".to_owned(); // A special case: the '!' tag. Set the handle to '' and the // suffix to '!'. - if suffix.len() == 0 { + if suffix.is_empty() { handle.clear(); - suffix = "!".to_string(); + suffix = "!".to_owned(); } } } @@ -710,7 +709,7 @@ impl> Scanner { self.lookahead(1); if is_blankz(self.ch()) { // XXX: ex 7.2, an empty scalar can follow a secondary tag - Ok(Token(start_mark, TokenType::TagToken(handle, suffix))) + Ok(Token(start_mark, TokenType::Tag(handle, suffix))) } else { Err(ScanError::new(start_mark, "while scanning a tag, did not find expected whitespace or line break")) @@ -739,20 +738,18 @@ impl> Scanner { if self.ch() == '!' { string.push(self.ch()); self.skip(); - } else { + } else if directive && string != "!" { // It's either the '!' tag or not really a tag handle. If it's a %TAG // directive, it's an error. If it's a tag token, it must be a part of // URI. - if directive && string != "!" { - return Err(ScanError::new(*mark, - "while parsing a tag directive, did not find expected '!'")); - } + return Err(ScanError::new(*mark, + "while parsing a tag directive, did not find expected '!'")); } Ok(string) } fn scan_tag_uri(&mut self, directive: bool, _is_secondary: bool, - head: &String, mark: &Marker) -> Result { + head: &str, mark: &Marker) -> Result { let mut length = head.len(); let mut string = String::new(); @@ -883,9 +880,9 @@ impl> Scanner { } if alias { - Ok(Token(start_mark, TokenType::AliasToken(string))) + Ok(Token(start_mark, TokenType::Alias(string))) } else { - Ok(Token(start_mark, TokenType::AnchorToken(string))) + Ok(Token(start_mark, TokenType::Anchor(string))) } } @@ -924,7 +921,7 @@ impl> Scanner { let start_mark = self.mark; self.skip(); - self.tokens.push_back(Token(start_mark, TokenType::FlowEntryToken)); + self.tokens.push_back(Token(start_mark, TokenType::FlowEntry)); Ok(()) } @@ -949,7 +946,7 @@ impl> Scanner { let mark = self.mark; // generate BLOCK-SEQUENCE-START if indented - self.roll_indent(mark.col, None, TokenType::BlockSequenceStartToken, mark); + self.roll_indent(mark.col, None, TokenType::BlockSequenceStart, mark); } else { // - * only allowed in block unreachable!(); @@ -960,7 +957,7 @@ impl> Scanner { let start_mark = self.mark; self.skip(); - self.tokens.push_back(Token(start_mark, TokenType::BlockEntryToken)); + self.tokens.push_back(Token(start_mark, TokenType::BlockEntry)); Ok(()) } @@ -1119,9 +1116,9 @@ impl> Scanner { } if literal { - Ok(Token(start_mark, TokenType::ScalarToken(TScalarStyle::Literal, string))) + Ok(Token(start_mark, TokenType::Scalar(TScalarStyle::Literal, string))) } else { - Ok(Token(start_mark, TokenType::ScalarToken(TScalarStyle::Foled, string))) + Ok(Token(start_mark, TokenType::Scalar(TScalarStyle::Foled, string))) } } @@ -1353,9 +1350,9 @@ impl> Scanner { self.skip(); if single { - Ok(Token(start_mark, TokenType::ScalarToken(TScalarStyle::SingleQuoted, string))) + Ok(Token(start_mark, TokenType::Scalar(TScalarStyle::SingleQuoted, string))) } else { - Ok(Token(start_mark, TokenType::ScalarToken(TScalarStyle::DoubleQuoted, string))) + Ok(Token(start_mark, TokenType::Scalar(TScalarStyle::DoubleQuoted, string))) } } @@ -1477,7 +1474,7 @@ impl> Scanner { self.allow_simple_key(); } - Ok(Token(start_mark, TokenType::ScalarToken(TScalarStyle::Plain, string))) + Ok(Token(start_mark, TokenType::Scalar(TScalarStyle::Plain, string))) } fn fetch_key(&mut self) -> ScanResult { @@ -1488,7 +1485,7 @@ impl> Scanner { return Err(ScanError::new(self.mark, "mapping keys are not allowed in this context")); } self.roll_indent(start_mark.col, None, - TokenType::BlockMappingStartToken, start_mark); + TokenType::BlockMappingStart, start_mark); } try!(self.remove_simple_key()); @@ -1500,7 +1497,7 @@ impl> Scanner { } self.skip(); - self.tokens.push_back(Token(start_mark, TokenType::KeyToken)); + self.tokens.push_back(Token(start_mark, TokenType::Key)); Ok(()) } @@ -1509,13 +1506,13 @@ impl> Scanner { let start_mark = self.mark; if sk.possible { // insert simple key - let tok = Token(sk.mark, TokenType::KeyToken); + let tok = Token(sk.mark, TokenType::Key); let tokens_parsed = self.tokens_parsed; self.insert_token(sk.token_number - tokens_parsed, tok); // Add the BLOCK-MAPPING-START token if needed. self.roll_indent(sk.mark.col, Some(sk.token_number), - TokenType::BlockMappingStartToken, start_mark); + TokenType::BlockMappingStart, start_mark); self.simple_keys.last_mut().unwrap().possible = false; self.disallow_simple_key(); @@ -1528,7 +1525,7 @@ impl> Scanner { } self.roll_indent(start_mark.col, None, - TokenType::BlockMappingStartToken, start_mark); + TokenType::BlockMappingStart, start_mark); } if self.flow_level == 0 { @@ -1538,7 +1535,7 @@ impl> Scanner { } } self.skip(); - self.tokens.push_back(Token(start_mark, TokenType::ValueToken)); + self.tokens.push_back(Token(start_mark, TokenType::Value)); Ok(()) } @@ -1565,7 +1562,7 @@ impl> Scanner { return; } while self.indent > col { - self.tokens.push_back(Token(self.mark, TokenType::BlockEndToken)); + self.tokens.push_back(Token(self.mark, TokenType::BlockEnd)); self.indent = self.indents.pop().unwrap(); } } @@ -1588,10 +1585,8 @@ impl> Scanner { fn remove_simple_key(&mut self) -> ScanResult { let last = self.simple_keys.last_mut().unwrap(); - if last.possible { - if last.required { - return Err(ScanError::new(self.mark, "simple key expected")); - } + if last.possible && last.required { + return Err(ScanError::new(self.mark, "simple key expected")); } last.possible = false; @@ -1620,7 +1615,7 @@ macro_rules! next_scalar { ($p:ident, $tk:expr, $v:expr) => {{ let tok = $p.next().unwrap(); match tok.1 { - ScalarToken(style, ref v) => { + Scalar(style, ref v) => { assert_eq!(style, $tk); assert_eq!(v, $v); }, @@ -1640,8 +1635,8 @@ macro_rules! end { fn test_empty() { let s = ""; let mut p = Scanner::new(s.chars()); - next!(p, StreamStartToken(..)); - next!(p, StreamEndToken); + next!(p, StreamStart(..)); + next!(p, StreamEnd); end!(p); } @@ -1649,9 +1644,9 @@ macro_rules! end { fn test_scalar() { let s = "a scalar"; let mut p = Scanner::new(s.chars()); - next!(p, StreamStartToken(..)); - next!(p, ScalarToken(TScalarStyle::Plain, _)); - next!(p, StreamEndToken); + next!(p, StreamStart(..)); + next!(p, Scalar(TScalarStyle::Plain, _)); + next!(p, StreamEnd); end!(p); } @@ -1663,11 +1658,11 @@ macro_rules! end { ... "; let mut p = Scanner::new(s.chars()); - next!(p, StreamStartToken(..)); - next!(p, DocumentStartToken); - next!(p, ScalarToken(TScalarStyle::SingleQuoted, _)); - next!(p, DocumentEndToken); - next!(p, StreamEndToken); + next!(p, StreamStart(..)); + next!(p, DocumentStart); + next!(p, Scalar(TScalarStyle::SingleQuoted, _)); + next!(p, DocumentEnd); + next!(p, StreamEnd); end!(p); } @@ -1682,13 +1677,13 @@ macro_rules! end { 'a scalar' "; let mut p = Scanner::new(s.chars()); - next!(p, StreamStartToken(..)); - next!(p, ScalarToken(TScalarStyle::SingleQuoted, _)); - next!(p, DocumentStartToken); - next!(p, ScalarToken(TScalarStyle::SingleQuoted, _)); - next!(p, DocumentStartToken); - next!(p, ScalarToken(TScalarStyle::SingleQuoted, _)); - next!(p, StreamEndToken); + next!(p, StreamStart(..)); + next!(p, Scalar(TScalarStyle::SingleQuoted, _)); + next!(p, DocumentStart); + next!(p, Scalar(TScalarStyle::SingleQuoted, _)); + next!(p, DocumentStart); + next!(p, Scalar(TScalarStyle::SingleQuoted, _)); + next!(p, StreamEnd); end!(p); } @@ -1696,15 +1691,15 @@ macro_rules! end { fn test_a_flow_sequence() { let s = "[item 1, item 2, item 3]"; let mut p = Scanner::new(s.chars()); - next!(p, StreamStartToken(..)); - next!(p, FlowSequenceStartToken); + next!(p, StreamStart(..)); + next!(p, FlowSequenceStart); next_scalar!(p, TScalarStyle::Plain, "item 1"); - next!(p, FlowEntryToken); - next!(p, ScalarToken(TScalarStyle::Plain, _)); - next!(p, FlowEntryToken); - next!(p, ScalarToken(TScalarStyle::Plain, _)); - next!(p, FlowSequenceEndToken); - next!(p, StreamEndToken); + next!(p, FlowEntry); + next!(p, Scalar(TScalarStyle::Plain, _)); + next!(p, FlowEntry); + next!(p, Scalar(TScalarStyle::Plain, _)); + next!(p, FlowSequenceEnd); + next!(p, StreamEnd); end!(p); } @@ -1718,20 +1713,20 @@ macro_rules! end { } "; let mut p = Scanner::new(s.chars()); - next!(p, StreamStartToken(..)); - next!(p, FlowMappingStartToken); - next!(p, KeyToken); - next!(p, ScalarToken(TScalarStyle::Plain, _)); - next!(p, ValueToken); - next!(p, ScalarToken(TScalarStyle::Plain, _)); - next!(p, FlowEntryToken); - next!(p, KeyToken); + next!(p, StreamStart(..)); + next!(p, FlowMappingStart); + next!(p, Key); + next!(p, Scalar(TScalarStyle::Plain, _)); + next!(p, Value); + next!(p, Scalar(TScalarStyle::Plain, _)); + next!(p, FlowEntry); + next!(p, Key); next_scalar!(p, TScalarStyle::Plain, "a complex key"); - next!(p, ValueToken); - next!(p, ScalarToken(TScalarStyle::Plain, _)); - next!(p, FlowEntryToken); - next!(p, FlowMappingEndToken); - next!(p, StreamEndToken); + next!(p, Value); + next!(p, Scalar(TScalarStyle::Plain, _)); + next!(p, FlowEntry); + next!(p, FlowMappingEnd); + next!(p, StreamEnd); end!(p); } @@ -1749,32 +1744,32 @@ macro_rules! end { key 2: value 2 "; let mut p = Scanner::new(s.chars()); - next!(p, StreamStartToken(..)); - next!(p, BlockSequenceStartToken); - next!(p, BlockEntryToken); + next!(p, StreamStart(..)); + next!(p, BlockSequenceStart); + next!(p, BlockEntry); next_scalar!(p, TScalarStyle::Plain, "item 1"); - next!(p, BlockEntryToken); + next!(p, BlockEntry); next_scalar!(p, TScalarStyle::Plain, "item 2"); - next!(p, BlockEntryToken); - next!(p, BlockSequenceStartToken); - next!(p, BlockEntryToken); + next!(p, BlockEntry); + next!(p, BlockSequenceStart); + next!(p, BlockEntry); next_scalar!(p, TScalarStyle::Plain, "item 3.1"); - next!(p, BlockEntryToken); + next!(p, BlockEntry); next_scalar!(p, TScalarStyle::Plain, "item 3.2"); - next!(p, BlockEndToken); - next!(p, BlockEntryToken); - next!(p, BlockMappingStartToken); - next!(p, KeyToken); + next!(p, BlockEnd); + next!(p, BlockEntry); + next!(p, BlockMappingStart); + next!(p, Key); next_scalar!(p, TScalarStyle::Plain, "key 1"); - next!(p, ValueToken); + next!(p, Value); next_scalar!(p, TScalarStyle::Plain, "value 1"); - next!(p, KeyToken); + next!(p, Key); next_scalar!(p, TScalarStyle::Plain, "key 2"); - next!(p, ValueToken); + next!(p, Value); next_scalar!(p, TScalarStyle::Plain, "value 2"); - next!(p, BlockEndToken); - next!(p, BlockEndToken); - next!(p, StreamEndToken); + next!(p, BlockEnd); + next!(p, BlockEnd); + next!(p, StreamEnd); end!(p); } @@ -1793,40 +1788,40 @@ a sequence: - item 2 "; let mut p = Scanner::new(s.chars()); - next!(p, StreamStartToken(..)); - next!(p, BlockMappingStartToken); - next!(p, KeyToken); - next!(p, ScalarToken(_, _)); - next!(p, ValueToken); - next!(p, ScalarToken(_, _)); - next!(p, KeyToken); - next!(p, ScalarToken(_, _)); - next!(p, ValueToken); - next!(p, ScalarToken(_, _)); - next!(p, KeyToken); - next!(p, ScalarToken(_, _)); - next!(p, ValueToken); // libyaml comment seems to be wrong - next!(p, BlockMappingStartToken); - next!(p, KeyToken); - next!(p, ScalarToken(_, _)); - next!(p, ValueToken); - next!(p, ScalarToken(_, _)); - next!(p, KeyToken); - next!(p, ScalarToken(_, _)); - next!(p, ValueToken); - next!(p, ScalarToken(_, _)); - next!(p, BlockEndToken); - next!(p, KeyToken); - next!(p, ScalarToken(_, _)); - next!(p, ValueToken); - next!(p, BlockSequenceStartToken); - next!(p, BlockEntryToken); - next!(p, ScalarToken(_, _)); - next!(p, BlockEntryToken); - next!(p, ScalarToken(_, _)); - next!(p, BlockEndToken); - next!(p, BlockEndToken); - next!(p, StreamEndToken); + next!(p, StreamStart(..)); + next!(p, BlockMappingStart); + next!(p, Key); + next!(p, Scalar(_, _)); + next!(p, Value); + next!(p, Scalar(_, _)); + next!(p, Key); + next!(p, Scalar(_, _)); + next!(p, Value); + next!(p, Scalar(_, _)); + next!(p, Key); + next!(p, Scalar(_, _)); + next!(p, Value); // libyaml comment seems to be wrong + next!(p, BlockMappingStart); + next!(p, Key); + next!(p, Scalar(_, _)); + next!(p, Value); + next!(p, Scalar(_, _)); + next!(p, Key); + next!(p, Scalar(_, _)); + next!(p, Value); + next!(p, Scalar(_, _)); + next!(p, BlockEnd); + next!(p, Key); + next!(p, Scalar(_, _)); + next!(p, Value); + next!(p, BlockSequenceStart); + next!(p, BlockEntry); + next!(p, Scalar(_, _)); + next!(p, BlockEntry); + next!(p, Scalar(_, _)); + next!(p, BlockEnd); + next!(p, BlockEnd); + next!(p, StreamEnd); end!(p); } @@ -1840,17 +1835,17 @@ key: - item 2 "; let mut p = Scanner::new(s.chars()); - next!(p, StreamStartToken(..)); - next!(p, BlockMappingStartToken); - next!(p, KeyToken); + next!(p, StreamStart(..)); + next!(p, BlockMappingStart); + next!(p, Key); next_scalar!(p, TScalarStyle::Plain, "key"); - next!(p, ValueToken); - next!(p, BlockEntryToken); + next!(p, Value); + next!(p, BlockEntry); next_scalar!(p, TScalarStyle::Plain, "item 1"); - next!(p, BlockEntryToken); + next!(p, BlockEntry); next_scalar!(p, TScalarStyle::Plain, "item 2"); - next!(p, BlockEndToken); - next!(p, StreamEndToken); + next!(p, BlockEnd); + next!(p, StreamEnd); end!(p); } @@ -1866,35 +1861,35 @@ key: : complex value "; let mut p = Scanner::new(s.chars()); - next!(p, StreamStartToken(..)); - next!(p, BlockSequenceStartToken); - next!(p, BlockEntryToken); - next!(p, BlockSequenceStartToken); - next!(p, BlockEntryToken); + next!(p, StreamStart(..)); + next!(p, BlockSequenceStart); + next!(p, BlockEntry); + next!(p, BlockSequenceStart); + next!(p, BlockEntry); next_scalar!(p, TScalarStyle::Plain, "item 1"); - next!(p, BlockEntryToken); + next!(p, BlockEntry); next_scalar!(p, TScalarStyle::Plain, "item 2"); - next!(p, BlockEndToken); - next!(p, BlockEntryToken); - next!(p, BlockMappingStartToken); - next!(p, KeyToken); + next!(p, BlockEnd); + next!(p, BlockEntry); + next!(p, BlockMappingStart); + next!(p, Key); next_scalar!(p, TScalarStyle::Plain, "key 1"); - next!(p, ValueToken); + next!(p, Value); next_scalar!(p, TScalarStyle::Plain, "value 1"); - next!(p, KeyToken); + next!(p, Key); next_scalar!(p, TScalarStyle::Plain, "key 2"); - next!(p, ValueToken); + next!(p, Value); next_scalar!(p, TScalarStyle::Plain, "value 2"); - next!(p, BlockEndToken); - next!(p, BlockEntryToken); - next!(p, BlockMappingStartToken); - next!(p, KeyToken); + next!(p, BlockEnd); + next!(p, BlockEntry); + next!(p, BlockMappingStart); + next!(p, Key); next_scalar!(p, TScalarStyle::Plain, "complex key"); - next!(p, ValueToken); + next!(p, Value); next_scalar!(p, TScalarStyle::Plain, "complex value"); - next!(p, BlockEndToken); - next!(p, BlockEndToken); - next!(p, StreamEndToken); + next!(p, BlockEnd); + next!(p, BlockEnd); + next!(p, StreamEnd); end!(p); } @@ -1910,32 +1905,32 @@ key: key 2: value 2 "; let mut p = Scanner::new(s.chars()); - next!(p, StreamStartToken(..)); - next!(p, BlockMappingStartToken); - next!(p, KeyToken); + next!(p, StreamStart(..)); + next!(p, BlockMappingStart); + next!(p, Key); next_scalar!(p, TScalarStyle::Plain, "a sequence"); - next!(p, ValueToken); - next!(p, BlockSequenceStartToken); - next!(p, BlockEntryToken); + next!(p, Value); + next!(p, BlockSequenceStart); + next!(p, BlockEntry); next_scalar!(p, TScalarStyle::Plain, "item 1"); - next!(p, BlockEntryToken); + next!(p, BlockEntry); next_scalar!(p, TScalarStyle::Plain, "item 2"); - next!(p, BlockEndToken); - next!(p, KeyToken); + next!(p, BlockEnd); + next!(p, Key); next_scalar!(p, TScalarStyle::Plain, "a mapping"); - next!(p, ValueToken); - next!(p, BlockMappingStartToken); - next!(p, KeyToken); + next!(p, Value); + next!(p, BlockMappingStart); + next!(p, Key); next_scalar!(p, TScalarStyle::Plain, "key 1"); - next!(p, ValueToken); + next!(p, Value); next_scalar!(p, TScalarStyle::Plain, "value 1"); - next!(p, KeyToken); + next!(p, Key); next_scalar!(p, TScalarStyle::Plain, "key 2"); - next!(p, ValueToken); + next!(p, Value); next_scalar!(p, TScalarStyle::Plain, "value 2"); - next!(p, BlockEndToken); - next!(p, BlockEndToken); - next!(p, StreamEndToken); + next!(p, BlockEnd); + next!(p, BlockEnd); + next!(p, StreamEnd); end!(p); } @@ -1949,17 +1944,17 @@ key: } "; let mut p = Scanner::new(s.chars()); - next!(p, StreamStartToken(..)); - next!(p, FlowMappingStartToken); - next!(p, KeyToken); + next!(p, StreamStart(..)); + next!(p, FlowMappingStart); + next!(p, Key); next_scalar!(p, TScalarStyle::Plain, "foo"); - next!(p, ValueToken); - next!(p, FlowEntryToken); - next!(p, ValueToken); + next!(p, Value); + next!(p, FlowEntry); + next!(p, Value); next_scalar!(p, TScalarStyle::Plain, "bar"); - next!(p, FlowEntryToken); - next!(p, FlowMappingEndToken); - next!(p, StreamEndToken); + next!(p, FlowEntry); + next!(p, FlowMappingEnd); + next!(p, StreamEnd); end!(p); } @@ -1967,15 +1962,15 @@ key: fn test_scanner_cr() { let s = "---\r\n- tok1\r\n- tok2"; let mut p = Scanner::new(s.chars()); - next!(p, StreamStartToken(..)); - next!(p, DocumentStartToken); - next!(p, BlockSequenceStartToken); - next!(p, BlockEntryToken); + next!(p, StreamStart(..)); + next!(p, DocumentStart); + next!(p, BlockSequenceStart); + next!(p, BlockEntry); next_scalar!(p, TScalarStyle::Plain, "tok1"); - next!(p, BlockEntryToken); + next!(p, BlockEntry); next_scalar!(p, TScalarStyle::Plain, "tok2"); - next!(p, BlockEndToken); - next!(p, StreamEndToken); + next!(p, BlockEnd); + next!(p, StreamEnd); end!(p); } diff --git a/saphyr/src/yaml.rs b/saphyr/src/yaml.rs index 45f2fb8..6db2a1b 100644 --- a/saphyr/src/yaml.rs +++ b/saphyr/src/yaml.rs @@ -94,46 +94,43 @@ impl EventReceiver for YamlLoader { Event::Scalar(ref v, style, aid, ref tag) => { let node = if style != TScalarStyle::Plain { Yaml::String(v.clone()) - } else { - match tag { - &Some(TokenType::TagToken(ref handle, ref suffix)) => { - // XXX tag:yaml.org,2002: - if handle == "!!" { - match suffix.as_ref() { - "bool" => { - // "true" or "false" - match v.parse::() { - Err(_) => Yaml::BadValue, - Ok(v) => Yaml::Boolean(v) - } - }, - "int" => { - match v.parse::() { - Err(_) => Yaml::BadValue, - Ok(v) => Yaml::Integer(v) - } - }, - "float" => { - match v.parse::() { - Err(_) => Yaml::BadValue, - Ok(_) => Yaml::Real(v.clone()) - } - }, - "null" => { - match v.as_ref() { - "~" | "null" => Yaml::Null, - _ => Yaml::BadValue, - } - } - _ => Yaml::String(v.clone()), + } else if let Some(TokenType::Tag(ref handle, ref suffix)) = *tag { + // XXX tag:yaml.org,2002: + if handle == "!!" { + match suffix.as_ref() { + "bool" => { + // "true" or "false" + match v.parse::() { + Err(_) => Yaml::BadValue, + Ok(v) => Yaml::Boolean(v) + } + }, + "int" => { + match v.parse::() { + Err(_) => Yaml::BadValue, + Ok(v) => Yaml::Integer(v) + } + }, + "float" => { + match v.parse::() { + Err(_) => Yaml::BadValue, + Ok(_) => Yaml::Real(v.clone()) + } + }, + "null" => { + match v.as_ref() { + "~" | "null" => Yaml::Null, + _ => Yaml::BadValue, } - } else { - Yaml::String(v.clone()) } - }, - // Datatype is not specified, or unrecognized - _ => { Yaml::from_str(v.as_ref()) } + _ => Yaml::String(v.clone()), + } + } else { + Yaml::String(v.clone()) } + } else { + // Datatype is not specified, or unrecognized + Yaml::from_str(v.as_ref()) }; self.insert_new_node((node, aid)); @@ -245,7 +242,12 @@ impl Yaml { _ => None } } +} +#[cfg_attr(feature="clippy", allow(should_implement_trait))] +impl Yaml { + // Not implementing FromStr because there is no possibility of Error. + // This function falls back to Yaml::String if nothing else matches. pub fn from_str(v: &str) -> Yaml { if v.starts_with("0x") { let n = i64::from_str_radix(&v[2..], 16); @@ -259,8 +261,8 @@ impl Yaml { return Yaml::Integer(n.unwrap()); } } - if v.starts_with("+") && v[1..].parse::().is_ok() { - return Yaml::Integer(v[1..].parse::().unwrap()); + if v.starts_with('+') && v[1..].parse::().is_ok() { + return Yaml::Integer(v[1..].parse::().unwrap()); } match v { "~" | "null" => Yaml::Null, @@ -268,8 +270,8 @@ impl Yaml { "false" => Yaml::Boolean(false), _ if v.parse::().is_ok() => Yaml::Integer(v.parse::().unwrap()), // try parsing as f64 - _ if v.parse::().is_ok() => Yaml::Real(v.to_string()), - _ => Yaml::String(v.to_string()) + _ if v.parse::().is_ok() => Yaml::Real(v.to_owned()), + _ => Yaml::String(v.to_owned()) } } } @@ -279,7 +281,7 @@ impl<'a> Index<&'a str> for Yaml { type Output = Yaml; fn index(&self, idx: &'a str) -> &Yaml { - let key = Yaml::String(idx.to_string()); + let key = Yaml::String(idx.to_owned()); match self.as_hash() { Some(h) => h.get(&key).unwrap_or(&BAD_VALUE), None => &BAD_VALUE @@ -333,7 +335,7 @@ a4: a5: 'single_quoted' a6: \"double_quoted\" a7: 你好 -".to_string(); +".to_owned(); let out = YamlLoader::load_from_str(&s).unwrap(); let doc = &out[0]; assert_eq!(doc["a7"].as_str().unwrap(), "你好");