From 5039af6862c76eeddf47d181d889331cd6c42ab3 Mon Sep 17 00:00:00 2001 From: David Tolnay Date: Sat, 15 Sep 2018 09:49:04 -0700 Subject: [PATCH] Format with rustfmt 0.99.4 --- parser/examples/dump_yaml.rs | 4 +- parser/src/emitter.rs | 146 +++++---- parser/src/lib.rs | 16 +- parser/src/parser.rs | 294 ++++++++++--------- parser/src/scanner.rs | 552 ++++++++++++++++++++--------------- parser/src/yaml.rs | 172 ++++++----- parser/tests/quickcheck.rs | 2 +- parser/tests/spec_test.rs | 131 +++++---- 8 files changed, 733 insertions(+), 584 deletions(-) diff --git a/parser/examples/dump_yaml.rs b/parser/examples/dump_yaml.rs index 5f2e306..8fce0f3 100644 --- a/parser/examples/dump_yaml.rs +++ b/parser/examples/dump_yaml.rs @@ -17,14 +17,14 @@ fn dump_node(doc: &yaml::Yaml, indent: usize) { for x in v { dump_node(x, indent + 1); } - }, + } yaml::Yaml::Hash(ref h) => { for (k, v) in h { print_indent(indent); println!("{:?}:", k); dump_node(v, indent + 1); } - }, + } _ => { print_indent(indent); println!("{:?}", doc); diff --git a/parser/src/emitter.rs b/parser/src/emitter.rs index 68fe8a4..cb238be 100644 --- a/parser/src/emitter.rs +++ b/parser/src/emitter.rs @@ -1,13 +1,12 @@ -use std::fmt::{self, Display}; use std::convert::From; use std::error::Error; +use std::fmt::{self, Display}; use yaml::{Hash, Yaml}; - #[derive(Copy, Clone, Debug)] pub enum EmitError { - FmtError(fmt::Error), - BadHashmapKey, + FmtError(fmt::Error), + BadHashmapKey, } impl Error for EmitError { @@ -91,7 +90,7 @@ fn escape_str(wr: &mut fmt::Write, v: &str) -> Result<(), fmt::Error> { b'\x1e' => "\\u001e", b'\x1f' => "\\u001f", b'\x7f' => "\\u007f", - _ => { continue; } + _ => continue, }; if start < i { @@ -118,7 +117,7 @@ impl<'a> YamlEmitter<'a> { best_indent: 2, compact: true, - level: -1 + level: -1, } } @@ -131,12 +130,12 @@ impl<'a> YamlEmitter<'a> { /// or tags), which should be OK, because this emitter doesn't /// (currently) emit those anyways. pub fn compact(&mut self, compact: bool) { - self.compact = compact; + self.compact = compact; } /// Determine if this emitter is using 'compact inline notation'. pub fn is_compact(&self) -> bool { - self.compact + self.compact } pub fn dump(&mut self, doc: &Yaml) -> EmitResult { @@ -147,7 +146,9 @@ impl<'a> YamlEmitter<'a> { } fn write_indent(&mut self) -> EmitResult { - if self.level <= 0 { return Ok(()); } + if self.level <= 0 { + return Ok(()); + } for _ in 0..self.level { for _ in 0..self.best_indent { try!(write!(self.writer, " ")); @@ -163,12 +164,11 @@ impl<'a> YamlEmitter<'a> { Yaml::String(ref v) => { if need_quotes(v) { try!(escape_str(self.writer, v)); - } - else { + } else { try!(write!(self.writer, "{}", v)); } Ok(()) - }, + } Yaml::Boolean(v) => { if v { try!(self.writer.write_str("true")); @@ -176,21 +176,21 @@ impl<'a> YamlEmitter<'a> { try!(self.writer.write_str("false")); } Ok(()) - }, + } Yaml::Integer(v) => { try!(write!(self.writer, "{}", v)); Ok(()) - }, + } Yaml::Real(ref v) => { try!(write!(self.writer, "{}", v)); Ok(()) - }, + } Yaml::Null | Yaml::BadValue => { try!(write!(self.writer, "~")); Ok(()) - }, + } // XXX(chenyh) Alias - _ => { Ok(()) } + _ => Ok(()), } } @@ -219,24 +219,24 @@ impl<'a> YamlEmitter<'a> { self.level += 1; for (cnt, (k, v)) in h.iter().enumerate() { let complex_key = match *k { - Yaml::Hash(_) | Yaml::Array(_) => true, - _ => false, + Yaml::Hash(_) | Yaml::Array(_) => true, + _ => false, }; if cnt > 0 { try!(write!(self.writer, "\n")); try!(self.write_indent()); } if complex_key { - try!(write!(self.writer, "?")); - try!(self.emit_val(true, k)); - try!(write!(self.writer, "\n")); - try!(self.write_indent()); - try!(write!(self.writer, ":")); - try!(self.emit_val(true, v)); + try!(write!(self.writer, "?")); + try!(self.emit_val(true, k)); + try!(write!(self.writer, "\n")); + try!(self.write_indent()); + try!(write!(self.writer, ":")); + try!(self.emit_val(true, v)); } else { - try!(self.emit_node(k)); - try!(write!(self.writer, ":")); - try!(self.emit_val(false, v)); + try!(self.emit_node(k)); + try!(write!(self.writer, ":")); + try!(self.emit_val(false, v)); } } self.level -= 1; @@ -260,7 +260,7 @@ impl<'a> YamlEmitter<'a> { self.level -= 1; } self.emit_array(v) - }, + } Yaml::Hash(ref h) => { if (inline && self.compact) || h.is_empty() { try!(write!(self.writer, " ")); @@ -271,7 +271,7 @@ impl<'a> YamlEmitter<'a> { self.level -= 1; } self.emit_hash(h) - }, + } _ => { try!(write!(self.writer, " ")); self.emit_node(val) @@ -296,37 +296,48 @@ impl<'a> YamlEmitter<'a> { /// * When the string looks like a date (e.g. 2014-12-31) (otherwise it would be automatically converted into a Unix timestamp). fn need_quotes(string: &str) -> bool { fn need_quotes_spaces(string: &str) -> bool { - string.starts_with(' ') - || string.ends_with(' ') + string.starts_with(' ') || string.ends_with(' ') } string == "" - || need_quotes_spaces(string) - || string.starts_with(|character: char| { - match character { + || need_quotes_spaces(string) + || string.starts_with(|character: char| match character { ':' | '&' | '*' | '?' | '|' | '-' | '<' | '>' | '=' | '!' | '%' | '@' => true, _ => false, - } - }) - || string.contains(|character: char| { - match character { - '{' | '}' | '[' | ']' | ',' | '#' | '`' | '\"' | '\'' | '\\' | '\0' ... '\x06' | '\t' | '\n' | '\r' | '\x0e' ... '\x1a' | '\x1c' ... '\x1f' => true, + }) + || string.contains(|character: char| match character { + '{' + | '}' + | '[' + | ']' + | ',' + | '#' + | '`' + | '\"' + | '\'' + | '\\' + | '\0'...'\x06' + | '\t' + | '\n' + | '\r' + | '\x0e'...'\x1a' + | '\x1c'...'\x1f' => true, _ => false, - } - }) - || [// http://yaml.org/type/bool.html - // Note: 'y', 'Y', 'n', 'N', is not quoted deliberately, as in libyaml. PyYAML also parse - // them as string, not booleans, although it is volating the YAML 1.1 specification. - // See https://github.com/dtolnay/serde-yaml/pull/83#discussion_r152628088. - "yes","Yes","YES","no","No","NO", - "True", "TRUE", "true", "False", "FALSE", "false", - "on","On","ON","off","Off","OFF", - // http://yaml.org/type/null.html - "null","Null","NULL", "~" - ].contains(&string) - || string.starts_with('.') - || string.parse::().is_ok() - || string.parse::().is_ok() + }) + || [ + // http://yaml.org/type/bool.html + // Note: 'y', 'Y', 'n', 'N', is not quoted deliberately, as in libyaml. PyYAML also parse + // them as string, not booleans, although it is volating the YAML 1.1 specification. + // See https://github.com/dtolnay/serde-yaml/pull/83#discussion_r152628088. + "yes", "Yes", "YES", "no", "No", "NO", "True", "TRUE", "true", "False", "FALSE", + "false", "on", "On", "ON", "off", "Off", "OFF", + // http://yaml.org/type/null.html + "null", "Null", "NULL", "~", + ] + .contains(&string) + || string.starts_with('.') + || string.parse::().is_ok() + || string.parse::().is_ok() } #[cfg(test)] @@ -349,7 +360,6 @@ a4: - 2 "; - let docs = YamlLoader::load_from_str(&s).unwrap(); let doc = &docs[0]; let mut writer = String::new(); @@ -361,7 +371,7 @@ a4: println!("emitted:\n{}", writer); let docs_new = match YamlLoader::load_from_str(&writer) { Ok(y) => y, - Err(e) => panic!(format!("{}", e)) + Err(e) => panic!(format!("{}", e)), }; let doc_new = &docs_new[0]; @@ -398,7 +408,7 @@ products: } let docs_new = match YamlLoader::load_from_str(&writer) { Ok(y) => y, - Err(e) => panic!(format!("{}", e)) + Err(e) => panic!(format!("{}", e)), }; let doc_new = &docs_new[0]; assert_eq!(doc, doc_new); @@ -506,21 +516,26 @@ bool1: false"#; emitter.dump(doc).unwrap(); } - assert_eq!(expected, writer, "expected:\n{}\nactual:\n{}\n", expected, writer); + assert_eq!( + expected, writer, + "expected:\n{}\nactual:\n{}\n", + expected, writer + ); } #[test] fn test_empty_and_nested() { - test_empty_and_nested_flag(false) + test_empty_and_nested_flag(false) } #[test] fn test_empty_and_nested_compact() { - test_empty_and_nested_flag(true) + test_empty_and_nested_flag(true) } fn test_empty_and_nested_flag(compact: bool) { - let s = if compact { r#"--- + let s = if compact { + r#"--- a: b: c: hello @@ -528,7 +543,9 @@ a: e: - f - g - - h: []"# } else { r#"--- + - h: []"# + } else { + r#"--- a: b: c: hello @@ -537,7 +554,8 @@ e: - f - g - - h: []"# }; + h: []"# + }; let docs = YamlLoader::load_from_str(&s).unwrap(); let doc = &docs[0]; diff --git a/parser/src/lib.rs b/parser/src/lib.rs index e16449c..36932d9 100644 --- a/parser/src/lib.rs +++ b/parser/src/lib.rs @@ -41,16 +41,16 @@ extern crate linked_hash_map; -pub mod yaml; -pub mod scanner; -pub mod parser; pub mod emitter; +pub mod parser; +pub mod scanner; +pub mod yaml; // reexport key APIs -pub use scanner::ScanError; +pub use emitter::{EmitError, YamlEmitter}; pub use parser::Event; +pub use scanner::ScanError; pub use yaml::{Yaml, YamlLoader}; -pub use emitter::{YamlEmitter, EmitError}; #[cfg(test)] mod tests { @@ -58,8 +58,7 @@ mod tests { #[test] fn test_api() { - let s = -" + let s = " # from yaml-cpp example - name: Ogre position: [0, 5, 0] @@ -104,8 +103,7 @@ mod tests { #[test] fn test_fail() { - let s = -" + let s = " # syntax error scalar key: [1, 2]] diff --git a/parser/src/parser.rs b/parser/src/parser.rs index f8b04b5..190d84a 100644 --- a/parser/src/parser.rs +++ b/parser/src/parser.rs @@ -26,7 +26,7 @@ enum State { FlowMappingKey, FlowMappingValue, FlowMappingEmptyValue, - End + End, } /// `Event` is used with the low-level event base parsing API, @@ -48,7 +48,7 @@ pub enum Event { SequenceEnd, /// Anchor ID MappingStart(usize), - MappingEnd + MappingEnd, } impl Event { @@ -74,12 +74,10 @@ pub struct Parser { anchor_id: usize, } - pub trait EventReceiver { fn on_event(&mut self, ev: Event); } - pub trait MarkedEventReceiver { fn on_event(&mut self, ev: Event, _mark: Marker); } @@ -92,7 +90,7 @@ impl MarkedEventReceiver for R { pub type ParseResult = Result<(Event, Marker), ScanError>; -impl> Parser { +impl> Parser { pub fn new(src: T) -> Parser { Parser { scanner: Scanner::new(src), @@ -121,39 +119,37 @@ impl> Parser { pub fn next(&mut self) -> ParseResult { match self.current { None => self.parse(), - Some(_) => { - Ok(self.current.take().unwrap()) - } + Some(_) => Ok(self.current.take().unwrap()), } } fn peek_token(&mut self) -> Result<&Token, ScanError> { match self.token { - None => { + None => { self.token = Some(try!(self.scan_next_token())); Ok(self.token.as_ref().unwrap()) - }, - Some(ref tok) => Ok(tok) + } + Some(ref tok) => Ok(tok), } } fn scan_next_token(&mut self) -> Result { let token = self.scanner.next(); match token { - None => - match self.scanner.get_error() { - None => Err(ScanError::new(self.scanner.mark(), "unexpected eof")), - Some(e) => Err(e), - }, - Some(tok) => Ok(tok) + None => match self.scanner.get_error() { + None => Err(ScanError::new(self.scanner.mark(), "unexpected eof")), + Some(e) => Err(e), + }, + Some(tok) => Ok(tok), } } fn fetch_token(&mut self) -> Token { - self.token.take().expect("fetch_token needs to be preceded by peek_token") + self.token + .take() + .expect("fetch_token needs to be preceded by peek_token") } - fn skip(&mut self) { self.token = None; //self.peek_token(); @@ -174,8 +170,11 @@ impl> Parser { Ok((ev, mark)) } - pub fn load(&mut self, recv: &mut R, multi: bool) - -> Result<(), ScanError> { + pub fn load( + &mut self, + recv: &mut R, + multi: bool, + ) -> Result<(), ScanError> { if !self.scanner.stream_started() { let (ev, mark) = try!(self.next()); assert_eq!(ev, Event::StreamStart); @@ -203,8 +202,12 @@ impl> Parser { Ok(()) } - fn load_document(&mut self, first_ev: Event, mark: Marker, recv: &mut R) - -> Result<(), ScanError> { + fn load_document( + &mut self, + first_ev: Event, + mark: Marker, + recv: &mut R, + ) -> Result<(), ScanError> { assert_eq!(first_ev, Event::DocumentStart); recv.on_event(first_ev, mark); @@ -219,28 +222,33 @@ impl> Parser { Ok(()) } - fn load_node(&mut self, first_ev: Event, mark: Marker, recv: &mut R) - -> Result<(), ScanError> { + fn load_node( + &mut self, + first_ev: Event, + mark: Marker, + recv: &mut R, + ) -> Result<(), ScanError> { match first_ev { Event::Alias(..) | Event::Scalar(..) => { recv.on_event(first_ev, mark); Ok(()) - }, + } Event::SequenceStart(_) => { recv.on_event(first_ev, mark); self.load_sequence(recv) - }, + } Event::MappingStart(_) => { recv.on_event(first_ev, mark); self.load_mapping(recv) - }, - _ => { println!("UNREACHABLE EVENT: {:?}", first_ev); - unreachable!(); } + } + _ => { + println!("UNREACHABLE EVENT: {:?}", first_ev); + unreachable!(); + } } } - fn load_mapping(&mut self, recv: &mut R) - -> Result<(), ScanError> { + fn load_mapping(&mut self, recv: &mut R) -> Result<(), ScanError> { let (mut key_ev, mut key_mark) = try!(self.next()); while key_ev != Event::MappingEnd { // key @@ -254,14 +262,12 @@ impl> Parser { let (ev, mark) = try!(self.next()); key_ev = ev; key_mark = mark; - } recv.on_event(key_ev, key_mark); Ok(()) } - fn load_sequence(&mut self, recv: &mut R) - -> Result<(), ScanError> { + fn load_sequence(&mut self, recv: &mut R) -> Result<(), ScanError> { let (mut ev, mut mark) = try!(self.next()); while ev != Event::SequenceEnd { try!(self.load_node(ev, mark, recv)); @@ -289,7 +295,6 @@ impl> Parser { State::BlockNode => self.parse_node(true, false), // State::BlockNodeOrIndentlessSequence => self.parse_node(true, true), // State::FlowNode => self.parse_node(false, false), - State::BlockMappingFirstKey => self.block_mapping_key(true), State::BlockMappingKey => self.block_mapping_key(false), State::BlockMappingValue => self.block_mapping_value(), @@ -322,9 +327,8 @@ impl> Parser { self.state = State::ImplicitDocumentStart; self.skip(); Ok((Event::StreamStart, mark)) - }, - Token(mark, _) => Err(ScanError::new(mark, - "did not find expected ")), + } + Token(mark, _) => Err(ScanError::new(mark, "did not find expected ")), } } @@ -340,19 +344,19 @@ impl> Parser { self.state = State::End; self.skip(); Ok((Event::StreamEnd, mark)) - }, + } Token(_, TokenType::VersionDirective(..)) | Token(_, TokenType::TagDirective(..)) | Token(_, TokenType::DocumentStart) => { // explicit document self._explict_document_start() - }, + } Token(mark, _) if implicit => { try!(self.parser_process_directives()); self.push_state(State::DocumentEnd); self.state = State::BlockNode; Ok((Event::DocumentStart, mark)) - }, + } _ => { // explicit document self._explict_document_start() @@ -369,11 +373,11 @@ impl> Parser { // return Err(ScanError::new(tok.0, // "found incompatible YAML document")); //} - }, + } TokenType::TagDirective(..) => { // TODO add tag directive - }, - _ => break + } + _ => break, } self.skip(); } @@ -389,9 +393,12 @@ impl> Parser { self.state = State::DocumentContent; self.skip(); Ok((Event::DocumentStart, mark)) - } - Token(mark, _) => Err(ScanError::new(mark, "did not find expected ")) - } + } + Token(mark, _) => Err(ScanError::new( + mark, + "did not find expected ", + )), + } } fn document_content(&mut self) -> ParseResult { @@ -404,10 +411,8 @@ impl> Parser { self.pop_state(); // empty scalar Ok((Event::empty_scalar(), mark)) - }, - _ => { - self.parse_node(true, false) } + _ => self.parse_node(true, false), } } @@ -418,10 +423,10 @@ impl> Parser { self.skip(); _implicit = false; mark - }, - Token(mark, _) => mark + } + Token(mark, _) => mark, }; - + // TODO tag handling self.state = State::DocumentStart; Ok((Event::DocumentEnd, marker)) @@ -447,13 +452,18 @@ impl> Parser { self.pop_state(); if let Token(mark, TokenType::Alias(name)) = self.fetch_token() { match self.anchors.get(&name) { - None => return Err(ScanError::new(mark, "while parsing node, found unknown anchor")), - Some(id) => return Ok((Event::Alias(*id), mark)) + None => { + return Err(ScanError::new( + mark, + "while parsing node, found unknown anchor", + )) + } + Some(id) => return Ok((Event::Alias(*id), mark)), } } else { unreachable!() } - }, + } Token(_, TokenType::Anchor(_)) => { if let Token(mark, TokenType::Anchor(name)) = self.fetch_token() { anchor_id = try!(self.register_anchor(name, &mark)); @@ -467,7 +477,7 @@ impl> Parser { } else { unreachable!() } - }, + } Token(_, TokenType::Tag(..)) => { if let tg @ TokenType::Tag(..) = self.fetch_token().1 { tag = Some(tg); @@ -481,14 +491,14 @@ impl> Parser { } else { unreachable!() } - }, + } _ => {} } match *try!(self.peek_token()) { Token(mark, TokenType::BlockEntry) if indentless_sequence => { self.state = State::IndentlessSequenceEntry; Ok((Event::SequenceStart(anchor_id), mark)) - }, + } Token(_, TokenType::Scalar(..)) => { self.pop_state(); if let Token(mark, TokenType::Scalar(style, v)) = self.fetch_token() { @@ -496,29 +506,32 @@ impl> Parser { } else { unreachable!() } - }, + } Token(mark, TokenType::FlowSequenceStart) => { self.state = State::FlowSequenceFirstEntry; Ok((Event::SequenceStart(anchor_id), mark)) - }, + } Token(mark, TokenType::FlowMappingStart) => { self.state = State::FlowMappingFirstKey; Ok((Event::MappingStart(anchor_id), mark)) - }, + } Token(mark, TokenType::BlockSequenceStart) if block => { self.state = State::BlockSequenceFirstEntry; Ok((Event::SequenceStart(anchor_id), mark)) - }, + } Token(mark, TokenType::BlockMappingStart) if block => { self.state = State::BlockMappingFirstKey; Ok((Event::MappingStart(anchor_id), mark)) - }, + } // ex 7.2, an empty scalar can follow a secondary tag Token(mark, _) if tag.is_some() || anchor_id > 0 => { self.pop_state(); Ok((Event::empty_scalar_with_anchor(anchor_id, tag), mark)) - }, - Token(mark, _) => { Err(ScanError::new(mark, "while parsing a node, did not find expected node content")) } + } + Token(mark, _) => Err(ScanError::new( + mark, + "while parsing a node, did not find expected node content", + )), } } @@ -545,20 +558,21 @@ impl> Parser { self.parse_node(true, true) } } - }, + } // XXX(chenyh): libyaml failed to parse spec 1.2, ex8.18 Token(mark, TokenType::Value) => { self.state = State::BlockMappingValue; Ok((Event::empty_scalar(), mark)) - }, + } Token(mark, TokenType::BlockEnd) => { self.pop_state(); self.skip(); Ok((Event::MappingEnd, mark)) - }, - Token(mark, _) => { - Err(ScanError::new(mark, "while parsing a block mapping, did not find expected key")) } + Token(mark, _) => Err(ScanError::new( + mark, + "while parsing a block mapping, did not find expected key", + )), } } @@ -573,13 +587,13 @@ impl> Parser { self.state = State::BlockMappingKey; // empty scalar Ok((Event::empty_scalar(), mark)) - }, + } _ => { self.push_state(State::BlockMappingKey); self.parse_node(true, true) } } - }, + } Token(mark, _) => { self.state = State::BlockMappingKey; // empty scalar @@ -593,49 +607,50 @@ impl> Parser { let _ = try!(self.peek_token()); self.skip(); } - let marker: Marker = { - match *try!(self.peek_token()) { - Token(mark, TokenType::FlowMappingEnd) => mark, - Token(mark, _) => { - if !first { - match *try!(self.peek_token()) { + let marker: Marker = + { + match *try!(self.peek_token()) { + Token(mark, TokenType::FlowMappingEnd) => mark, + Token(mark, _) => { + if !first { + match *try!(self.peek_token()) { Token(_, TokenType::FlowEntry) => self.skip(), Token(mark, _) => return Err(ScanError::new(mark, "while parsing a flow mapping, did not find expected ',' or '}'")) } - } + } - match *try!(self.peek_token()) { - Token(_, TokenType::Key) => { - self.skip(); - match *try!(self.peek_token()) { - Token(mark, TokenType::Value) - | Token(mark, TokenType::FlowEntry) - | Token(mark, TokenType::FlowMappingEnd) => { - self.state = State::FlowMappingValue; - return Ok((Event::empty_scalar(), mark)); - }, - _ => { - self.push_state(State::FlowMappingValue); - return self.parse_node(false, false); + match *try!(self.peek_token()) { + Token(_, TokenType::Key) => { + self.skip(); + match *try!(self.peek_token()) { + Token(mark, TokenType::Value) + | Token(mark, TokenType::FlowEntry) + | Token(mark, TokenType::FlowMappingEnd) => { + self.state = State::FlowMappingValue; + return Ok((Event::empty_scalar(), mark)); + } + _ => { + self.push_state(State::FlowMappingValue); + return self.parse_node(false, false); + } } } - }, - Token(marker, TokenType::Value) => { - self.state = State::FlowMappingValue; - return Ok((Event::empty_scalar(), marker)); - }, - Token(_, TokenType::FlowMappingEnd) => (), - _ => { - self.push_state(State::FlowMappingEmptyValue); - return self.parse_node(false, false); + Token(marker, TokenType::Value) => { + self.state = State::FlowMappingValue; + return Ok((Event::empty_scalar(), marker)); + } + Token(_, TokenType::FlowMappingEnd) => (), + _ => { + self.push_state(State::FlowMappingEmptyValue); + return self.parse_node(false, false); + } } - } - mark + mark + } } - } - }; + }; self.pop_state(); self.skip(); @@ -653,20 +668,19 @@ impl> Parser { Token(marker, TokenType::Value) => { self.skip(); match try!(self.peek_token()).1 { - TokenType::FlowEntry - | TokenType::FlowMappingEnd => { }, + TokenType::FlowEntry | TokenType::FlowMappingEnd => {} _ => { self.push_state(State::FlowMappingKey); return self.parse_node(false, false); } } marker - }, - Token(marker, _) => marker + } + Token(marker, _) => marker, } } }; - + self.state = State::FlowMappingKey; Ok((Event::empty_scalar(), mark)) } @@ -683,13 +697,15 @@ impl> Parser { self.pop_state(); self.skip(); return Ok((Event::SequenceEnd, mark)); - }, + } Token(_, TokenType::FlowEntry) if !first => { self.skip(); - }, + } Token(mark, _) if !first => { - return Err(ScanError::new(mark, - "while parsing a flow sequence, expectd ',' or ']'")); + return Err(ScanError::new( + mark, + "while parsing a flow sequence, expectd ',' or ']'", + )); } _ => { /* next */ } } @@ -698,7 +714,7 @@ impl> Parser { self.pop_state(); self.skip(); Ok((Event::SequenceEnd, mark)) - }, + } Token(mark, TokenType::Key) => { self.state = State::FlowSequenceEntryMappingKey; self.skip(); @@ -727,7 +743,7 @@ impl> Parser { | Token(mark, TokenType::BlockEnd) => { self.state = State::IndentlessSequenceEntry; Ok((Event::empty_scalar(), mark)) - }, + } _ => { self.push_state(State::IndentlessSequenceEntry); self.parse_node(true, false) @@ -747,25 +763,24 @@ impl> Parser { self.pop_state(); self.skip(); Ok((Event::SequenceEnd, mark)) - }, + } Token(_, TokenType::BlockEntry) => { self.skip(); match *try!(self.peek_token()) { - Token(mark, TokenType::BlockEntry) - | Token(mark, TokenType::BlockEnd) => { + Token(mark, TokenType::BlockEntry) | Token(mark, TokenType::BlockEnd) => { self.state = State::BlockSequenceEntry; Ok((Event::empty_scalar(), mark)) - }, + } _ => { self.push_state(State::BlockSequenceEntry); self.parse_node(true, false) } } - }, - Token(mark, _) => { - Err(ScanError::new(mark, - "while parsing a block collection, did not find expected '-' indicator")) } + Token(mark, _) => Err(ScanError::new( + mark, + "while parsing a block collection, did not find expected '-' indicator", + )), } } @@ -777,7 +792,7 @@ impl> Parser { self.skip(); self.state = State::FlowSequenceEntryMappingValue; Ok((Event::empty_scalar(), mark)) - }, + } _ => { self.push_state(State::FlowSequenceEntryMappingValue); self.parse_node(false, false) @@ -788,20 +803,19 @@ impl> Parser { fn flow_sequence_entry_mapping_value(&mut self) -> ParseResult { match *try!(self.peek_token()) { Token(_, TokenType::Value) => { - self.skip(); - self.state = State::FlowSequenceEntryMappingValue; - match *try!(self.peek_token()) { - Token(mark, TokenType::FlowEntry) - | Token(mark, TokenType::FlowSequenceEnd) => { - self.state = State::FlowSequenceEntryMappingEnd; - Ok((Event::empty_scalar(), mark)) - }, - _ => { - self.push_state(State::FlowSequenceEntryMappingEnd); - self.parse_node(false, false) - } + self.skip(); + self.state = State::FlowSequenceEntryMappingValue; + match *try!(self.peek_token()) { + Token(mark, TokenType::FlowEntry) | Token(mark, TokenType::FlowSequenceEnd) => { + self.state = State::FlowSequenceEntryMappingEnd; + Ok((Event::empty_scalar(), mark)) } - }, + _ => { + self.push_state(State::FlowSequenceEntryMappingEnd); + self.parse_node(false, false) + } + } + } Token(mark, _) => { self.state = State::FlowSequenceEntryMappingEnd; Ok((Event::empty_scalar(), mark)) diff --git a/parser/src/scanner.rs b/parser/src/scanner.rs index 56496d3..366eee5 100644 --- a/parser/src/scanner.rs +++ b/parser/src/scanner.rs @@ -1,10 +1,10 @@ use std::collections::VecDeque; -use std::{char, fmt}; use std::error::Error; +use std::{char, fmt}; #[derive(Clone, Copy, PartialEq, Debug, Eq)] pub enum TEncoding { - Utf8 + Utf8, } #[derive(Clone, Copy, PartialEq, Debug, Eq)] @@ -15,7 +15,7 @@ pub enum TScalarStyle { DoubleQuoted, Literal, - Foled + Foled, } #[derive(Clone, Copy, PartialEq, Debug, Eq)] @@ -30,7 +30,7 @@ impl Marker { Marker { index: index, line: line, - col: col + col: col, } } @@ -57,7 +57,7 @@ impl ScanError { pub fn new(loc: Marker, info: &str) -> ScanError { ScanError { mark: loc, - info: info.to_owned() + info: info.to_owned(), } } @@ -79,8 +79,13 @@ impl Error for ScanError { impl fmt::Display for ScanError { // col starts from 0 fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - write!(formatter, "{} at line {} column {}", self.info, - self.mark.line, self.mark.col + 1) + write!( + formatter, + "{} at line {} column {}", + self.info, + self.mark.line, + self.mark.col + 1 + ) } } @@ -110,7 +115,7 @@ pub enum TokenType { Anchor(String), /// handle, suffix Tag(String, String), - Scalar(TScalarStyle, String) + Scalar(TScalarStyle, String), } #[derive(Clone, PartialEq, Debug, Eq)] @@ -154,7 +159,7 @@ pub struct Scanner { token_available: bool, } -impl> Iterator for Scanner { +impl> Iterator for Scanner { type Item = Token; fn next(&mut self) -> Option { if self.error.is_some() { @@ -199,14 +204,12 @@ fn is_alpha(c: char) -> bool { match c { '0'...'9' | 'a'...'z' | 'A'...'Z' => true, '_' | '-' => true, - _ => false + _ => false, } } #[inline] fn is_hex(c: char) -> bool { - (c >= '0' && c <= '9') - || (c >= 'a' && c <= 'f') - || (c >= 'A' && c <= 'F') + (c >= '0' && c <= '9') || (c >= 'a' && c <= 'f') || (c >= 'A' && c <= 'F') } #[inline] fn as_hex(c: char) -> u32 { @@ -214,13 +217,13 @@ fn as_hex(c: char) -> u32 { '0'...'9' => (c as u32) - ('0' as u32), 'a'...'f' => (c as u32) - ('a' as u32) + 10, 'A'...'F' => (c as u32) - ('A' as u32) + 10, - _ => unreachable!() + _ => unreachable!(), } } pub type ScanResult = Result<(), ScanError>; -impl> Scanner { +impl> Scanner { /// Creates the YAML tokenizer. pub fn new(rdr: T) -> Scanner { Scanner { @@ -326,10 +329,10 @@ impl> Scanner { } } fn allow_simple_key(&mut self) { - self.simple_key_allowed = true; + self.simple_key_allowed = true; } fn disallow_simple_key(&mut self) { - self.simple_key_allowed = false; + self.simple_key_allowed = false; } pub fn fetch_next_token(&mut self) -> ScanResult { @@ -363,7 +366,8 @@ impl> Scanner { && self.buffer[0] == '-' && self.buffer[1] == '-' && self.buffer[2] == '-' - && is_blankz(self.buffer[3]) { + && is_blankz(self.buffer[3]) + { try!(self.fetch_document_indicator(TokenType::DocumentStart)); return Ok(()); } @@ -372,7 +376,8 @@ impl> Scanner { && self.buffer[0] == '.' && self.buffer[1] == '.' && self.buffer[2] == '.' - && is_blankz(self.buffer[3]) { + && is_blankz(self.buffer[3]) + { try!(self.fetch_document_indicator(TokenType::DocumentEnd)); return Ok(()); } @@ -402,8 +407,10 @@ impl> Scanner { // plain scalar '-' if !is_blankz(nc) => self.fetch_plain_scalar(), ':' | '?' if !is_blankz(nc) && self.flow_level == 0 => self.fetch_plain_scalar(), - '%' | '@' | '`' => Err(ScanError::new(self.mark, - &format!("unexpected character: `{}'", c))), + '%' | '@' | '`' => Err(ScanError::new( + self.mark, + &format!("unexpected character: `{}'", c), + )), _ => self.fetch_plain_scalar(), } } @@ -442,7 +449,9 @@ impl> Scanner { } } - if !need_more { break; } + if !need_more { + break; + } try!(self.fetch_next_token()); } self.token_available = true; @@ -452,13 +461,14 @@ impl> Scanner { fn stale_simple_keys(&mut self) -> ScanResult { for sk in &mut self.simple_keys { - if sk.possible && (sk.mark.line < self.mark.line - || sk.mark.index + 1024 < self.mark.index) { - if sk.required { - return Err(ScanError::new(self.mark, "simple key expect ':'")); - } - sk.possible = false; + if sk.possible + && (sk.mark.line < self.mark.line || sk.mark.index + 1024 < self.mark.index) + { + if sk.required { + return Err(ScanError::new(self.mark, "simple key expect ':'")); } + sk.possible = false; + } } Ok(()) } @@ -476,9 +486,12 @@ impl> Scanner { if self.flow_level == 0 { self.allow_simple_key(); } + } + '#' => while !is_breakz(self.ch()) { + self.skip(); + self.lookahead(1); }, - '#' => while !is_breakz(self.ch()) { self.skip(); self.lookahead(1); }, - _ => break + _ => break, } } } @@ -488,8 +501,9 @@ impl> Scanner { self.indent = -1; self.stream_start_produced = true; self.allow_simple_key(); - self.tokens.push_back(Token(mark, TokenType::StreamStart(TEncoding::Utf8))); - self.simple_keys.push(SimpleKey::new(Marker::new(0,0,0))); + self.tokens + .push_back(Token(mark, TokenType::StreamStart(TEncoding::Utf8))); + self.simple_keys.push(SimpleKey::new(Marker::new(0, 0, 0))); } fn fetch_stream_end(&mut self) -> ScanResult { @@ -503,7 +517,8 @@ impl> Scanner { try!(self.remove_simple_key()); self.disallow_simple_key(); - self.tokens.push_back(Token(self.mark, TokenType::StreamEnd)); + self.tokens + .push_back(Token(self.mark, TokenType::StreamEnd)); Ok(()) } @@ -526,12 +541,8 @@ impl> Scanner { let name = try!(self.scan_directive_name()); let tok = match name.as_ref() { - "YAML" => { - try!(self.scan_version_directive_value(&start_mark)) - }, - "TAG" => { - try!(self.scan_tag_directive_value(&start_mark)) - }, + "YAML" => try!(self.scan_version_directive_value(&start_mark)), + "TAG" => try!(self.scan_tag_directive_value(&start_mark)), // XXX This should be a warning instead of an error _ => { // skip current line @@ -541,7 +552,10 @@ impl> Scanner { self.lookahead(1); } // XXX return an empty TagDirective token - Token(start_mark, TokenType::TagDirective(String::new(), String::new())) + Token( + start_mark, + TokenType::TagDirective(String::new(), String::new()), + ) // return Err(ScanError::new(start_mark, // "while scanning a directive, found unknown directive name")) } @@ -561,8 +575,10 @@ impl> Scanner { } if !is_breakz(self.ch()) { - return Err(ScanError::new(start_mark, - "while scanning a directive, did not find expected comment or line break")); + return Err(ScanError::new( + start_mark, + "while scanning a directive, did not find expected comment or line break", + )); } // Eat a line break @@ -585,8 +601,10 @@ impl> Scanner { let major = try!(self.scan_version_directive_number(mark)); if self.ch() != '.' { - return Err(ScanError::new(*mark, - "while scanning a YAML directive, did not find expected digit or '.' character")); + return Err(ScanError::new( + *mark, + "while scanning a YAML directive, did not find expected digit or '.' character", + )); } self.skip(); @@ -607,13 +625,17 @@ impl> Scanner { } if string.is_empty() { - return Err(ScanError::new(start_mark, - "while scanning a directive, could not find expected directive name")); + return Err(ScanError::new( + start_mark, + "while scanning a directive, could not find expected directive name", + )); } if !is_blankz(self.ch()) { - return Err(ScanError::new(start_mark, - "while scanning a directive, found unexpected non-alphabetical character")); + return Err(ScanError::new( + start_mark, + "while scanning a directive, found unexpected non-alphabetical character", + )); } Ok(string) @@ -625,8 +647,10 @@ impl> Scanner { self.lookahead(1); while is_digit(self.ch()) { if length + 1 > 9 { - return Err(ScanError::new(*mark, - "while scanning a YAML directive, found extremely long version number")); + return Err(ScanError::new( + *mark, + "while scanning a YAML directive, found extremely long version number", + )); } length += 1; val = val * 10 + ((self.ch() as u32) - ('0' as u32)); @@ -635,8 +659,10 @@ impl> Scanner { } if length == 0 { - return Err(ScanError::new(*mark, - "while scanning a YAML directive, did not find expected version number")); + return Err(ScanError::new( + *mark, + "while scanning a YAML directive, did not find expected version number", + )); } Ok(val) @@ -666,8 +692,10 @@ impl> Scanner { if is_blankz(self.ch()) { Ok(Token(*mark, TokenType::TagDirective(handle, prefix))) } else { - Err(ScanError::new(*mark, - "while scanning TAG, did not find expected whitespace or line break")) + Err(ScanError::new( + *mark, + "while scanning TAG, did not find expected whitespace or line break", + )) } } @@ -696,8 +724,10 @@ impl> Scanner { suffix = try!(self.scan_tag_uri(false, false, &String::new(), &start_mark)); if self.ch() != '>' { - return Err(ScanError::new(start_mark, - "while scanning a tag, did not find the expected '>'")); + return Err(ScanError::new( + start_mark, + "while scanning a tag, did not find the expected '>'", + )); } self.skip(); @@ -727,8 +757,10 @@ impl> Scanner { // XXX: ex 7.2, an empty scalar can follow a secondary tag Ok(Token(start_mark, TokenType::Tag(handle, suffix))) } else { - Err(ScanError::new(start_mark, - "while scanning a tag, did not find expected whitespace or line break")) + Err(ScanError::new( + start_mark, + "while scanning a tag, did not find expected whitespace or line break", + )) } } @@ -736,8 +768,10 @@ impl> Scanner { let mut string = String::new(); self.lookahead(1); if self.ch() != '!' { - return Err(ScanError::new(*mark, - "while scanning a tag, did not find expected '!'")); + return Err(ScanError::new( + *mark, + "while scanning a tag, did not find expected '!'", + )); } string.push(self.ch()); @@ -758,14 +792,21 @@ impl> Scanner { // It's either the '!' tag or not really a tag handle. If it's a %TAG // directive, it's an error. If it's a tag token, it must be a part of // URI. - return Err(ScanError::new(*mark, - "while parsing a tag directive, did not find expected '!'")); + return Err(ScanError::new( + *mark, + "while parsing a tag directive, did not find expected '!'", + )); } Ok(string) } - fn scan_tag_uri(&mut self, directive: bool, _is_secondary: bool, - head: &str, mark: &Marker) -> Result { + fn scan_tag_uri( + &mut self, + directive: bool, + _is_secondary: bool, + head: &str, + mark: &Marker, + ) -> Result { let mut length = head.len(); let mut string = String::new(); @@ -788,7 +829,7 @@ impl> Scanner { '=' | '+' | '$' | ',' | '.' | '!' | '~' | '*' | '\'' | '(' | ')' | '[' | ']' => true, '%' => true, c if is_alpha(c) => true, - _ => false + _ => false, } { // Check if it is a URI-escape sequence. if self.ch() == '%' { @@ -803,25 +844,26 @@ impl> Scanner { } if length == 0 { - return Err(ScanError::new(*mark, - "while parsing a tag, did not find expected tag URI")); + return Err(ScanError::new( + *mark, + "while parsing a tag, did not find expected tag URI", + )); } Ok(string) } - fn scan_uri_escapes(&mut self, _directive: bool, mark: &Marker) - -> Result { + fn scan_uri_escapes(&mut self, _directive: bool, mark: &Marker) -> Result { let mut width = 0usize; let mut code = 0u32; loop { self.lookahead(3); - if !(self.ch() == '%' - && is_hex(self.buffer[1]) - && is_hex(self.buffer[2])) { - return Err(ScanError::new(*mark, - "while parsing a tag, did not find URI escaped octet")); + if !(self.ch() == '%' && is_hex(self.buffer[1]) && is_hex(self.buffer[2])) { + return Err(ScanError::new( + *mark, + "while parsing a tag, did not find URI escaped octet", + )); } let octet = (as_hex(self.buffer[1]) << 4) + as_hex(self.buffer[2]); @@ -832,15 +874,19 @@ impl> Scanner { _ if octet & 0xF0 == 0xE0 => 3, _ if octet & 0xF8 == 0xF0 => 4, _ => { - return Err(ScanError::new(*mark, - "while parsing a tag, found an incorrect leading UTF-8 octet")); + return Err(ScanError::new( + *mark, + "while parsing a tag, found an incorrect leading UTF-8 octet", + )); } }; code = octet; } else { if octet & 0xc0 != 0x80 { - return Err(ScanError::new(*mark, - "while parsing a tag, found an incorrect trailing UTF-8 octet")); + return Err(ScanError::new( + *mark, + "while parsing a tag, found an incorrect trailing UTF-8 octet", + )); } code = (code << 8) + octet; } @@ -857,8 +903,10 @@ impl> Scanner { match char::from_u32(code) { Some(ch) => Ok(ch), - None => Err(ScanError::new(*mark, - "while parsing a tag, found an invalid UTF-8 codepoint")) + None => Err(ScanError::new( + *mark, + "while parsing a tag, found an invalid UTF-8 codepoint", + )), } } @@ -873,8 +921,7 @@ impl> Scanner { Ok(()) } - fn scan_anchor(&mut self, alias: bool) - -> Result { + fn scan_anchor(&mut self, alias: bool) -> Result { let mut string = String::new(); let start_mark = self.mark; @@ -886,12 +933,11 @@ impl> Scanner { self.lookahead(1); } - if string.is_empty() - || match self.ch() { - c if is_blankz(c) => false, - '?' | ':' | ',' | ']' | '}' | '%' | '@' | '`' => false, - _ => true - } { + if string.is_empty() || match self.ch() { + c if is_blankz(c) => false, + '?' | ':' | ',' | ']' | '}' | '%' | '@' | '`' => false, + _ => true, + } { return Err(ScanError::new(start_mark, "while scanning an anchor or alias, did not find expected alphabetic or numeric character")); } @@ -902,7 +948,7 @@ impl> Scanner { } } - fn fetch_flow_collection_start(&mut self, tok :TokenType) -> ScanResult { + fn fetch_flow_collection_start(&mut self, tok: TokenType) -> ScanResult { // The indicators '[' and '{' may start a simple key. try!(self.save_simple_key()); @@ -917,7 +963,7 @@ impl> Scanner { Ok(()) } - fn fetch_flow_collection_end(&mut self, tok :TokenType) -> ScanResult { + fn fetch_flow_collection_end(&mut self, tok: TokenType) -> ScanResult { try!(self.remove_simple_key()); self.decrease_flow_level(); @@ -937,13 +983,16 @@ impl> Scanner { let start_mark = self.mark; self.skip(); - self.tokens.push_back(Token(start_mark, TokenType::FlowEntry)); + self.tokens + .push_back(Token(start_mark, TokenType::FlowEntry)); Ok(()) } fn increase_flow_level(&mut self) -> ScanResult { - self.simple_keys.push(SimpleKey::new(Marker::new(0,0,0))); - self.flow_level = self.flow_level.checked_add(1) + self.simple_keys.push(SimpleKey::new(Marker::new(0, 0, 0))); + self.flow_level = self + .flow_level + .checked_add(1) .ok_or_else(|| ScanError::new(self.mark, "Recursion limit exceeded"))?; Ok(()) } @@ -958,8 +1007,10 @@ impl> Scanner { if self.flow_level == 0 { // Check if we are allowed to start a new entry. if !self.simple_key_allowed { - return Err(ScanError::new(self.mark, - "block sequence entries are not allowed in this context")); + return Err(ScanError::new( + self.mark, + "block sequence entries are not allowed in this context", + )); } let mark = self.mark; @@ -967,7 +1018,10 @@ impl> Scanner { self.roll_indent(mark.col, None, TokenType::BlockSequenceStart, mark); } else { // - * only allowed in block - return Err(ScanError::new(self.mark, r#""-" is only valid inside a block"#)) + return Err(ScanError::new( + self.mark, + r#""-" is only valid inside a block"#, + )); } try!(self.remove_simple_key()); self.allow_simple_key(); @@ -975,7 +1029,8 @@ impl> Scanner { let start_mark = self.mark; self.skip(); - self.tokens.push_back(Token(start_mark, TokenType::BlockEntry)); + self.tokens + .push_back(Token(start_mark, TokenType::BlockEntry)); Ok(()) } @@ -1029,16 +1084,20 @@ impl> Scanner { self.lookahead(1); if is_digit(self.ch()) { if self.ch() == '0' { - return Err(ScanError::new(start_mark, - "while scanning a block scalar, found an intendation indicator equal to 0")); + return Err(ScanError::new( + start_mark, + "while scanning a block scalar, found an intendation indicator equal to 0", + )); } increment = (self.ch() as usize) - ('0' as usize); self.skip(); } } else if is_digit(self.ch()) { if self.ch() == '0' { - return Err(ScanError::new(start_mark, - "while scanning a block scalar, found an intendation indicator equal to 0")); + return Err(ScanError::new( + start_mark, + "while scanning a block scalar, found an intendation indicator equal to 0", + )); } increment = (self.ch() as usize) - ('0' as usize); @@ -1071,8 +1130,10 @@ impl> Scanner { // Check if we are at the end of the line. if !is_breakz(self.ch()) { - return Err(ScanError::new(start_mark, - "while scanning a block scalar, did not find expected comment or line break")); + return Err(ScanError::new( + start_mark, + "while scanning a block scalar, did not find expected comment or line break", + )); } if is_break(self.ch()) { @@ -1081,7 +1142,11 @@ impl> Scanner { } if increment > 0 { - indent = if self.indent >= 0 { (self.indent + increment as isize) as usize } else { increment } + indent = if self.indent >= 0 { + (self.indent + increment as isize) as usize + } else { + increment + } } // Scan the leading line breaks and determine the indentation level if needed. try!(self.block_scalar_breaks(&mut indent, &mut trailing_breaks)); @@ -1093,12 +1158,11 @@ impl> Scanner { while self.mark.col == indent && !is_z(self.ch()) { // We are at the beginning of a non-empty line. trailing_blank = is_blank(self.ch()); - if !literal && !leading_break.is_empty() - && !leading_blank && !trailing_blank { - if trailing_breaks.is_empty() { - string.push(' '); - } - leading_break.clear(); + if !literal && !leading_break.is_empty() && !leading_blank && !trailing_blank { + if trailing_breaks.is_empty() { + string.push(' '); + } + leading_break.clear(); } else { string.push_str(&leading_break); leading_break.clear(); @@ -1115,7 +1179,9 @@ impl> Scanner { self.lookahead(1); } // break on EOF - if is_z(self.ch()) { break; } + if is_z(self.ch()) { + break; + } self.lookahead(2); self.read_break(&mut leading_break); @@ -1134,9 +1200,15 @@ impl> Scanner { } if literal { - Ok(Token(start_mark, TokenType::Scalar(TScalarStyle::Literal, string))) + Ok(Token( + start_mark, + TokenType::Scalar(TScalarStyle::Literal, string), + )) } else { - Ok(Token(start_mark, TokenType::Scalar(TScalarStyle::Foled, string))) + Ok(Token( + start_mark, + TokenType::Scalar(TScalarStyle::Foled, string), + )) } } @@ -1144,10 +1216,9 @@ impl> Scanner { let mut max_indent = 0; loop { self.lookahead(1); - while (*indent == 0 || self.mark.col < *indent) - && self.buffer[0] == ' ' { - self.skip(); - self.lookahead(1); + while (*indent == 0 || self.mark.col < *indent) && self.buffer[0] == ' ' { + self.skip(); + self.lookahead(1); } if self.mark.col > max_indent { @@ -1155,8 +1226,7 @@ impl> Scanner { } // Check for a tab character messing the intendation. - if (*indent == 0 || self.mark.col < *indent) - && self.buffer[0] == '\t' { + if (*indent == 0 || self.mark.col < *indent) && self.buffer[0] == '\t' { return Err(ScanError::new(self.mark, "while scanning a block scalar, found a tab character where an intendation space is expected")); } @@ -1208,21 +1278,24 @@ impl> Scanner { /* Check for a document indicator. */ self.lookahead(4); - if self.mark.col == 0 && - (((self.buffer[0] == '-') && - (self.buffer[1] == '-') && - (self.buffer[2] == '-')) || - ((self.buffer[0] == '.') && - (self.buffer[1] == '.') && - (self.buffer[2] == '.'))) && - is_blankz(self.buffer[3]) { - return Err(ScanError::new(start_mark, - "while scanning a quoted scalar, found unexpected document indicator")); - } + if self.mark.col == 0 + && (((self.buffer[0] == '-') && (self.buffer[1] == '-') && (self.buffer[2] == '-')) + || ((self.buffer[0] == '.') + && (self.buffer[1] == '.') + && (self.buffer[2] == '.'))) + && is_blankz(self.buffer[3]) + { + return Err(ScanError::new( + start_mark, + "while scanning a quoted scalar, found unexpected document indicator", + )); + } if is_z(self.ch()) { - return Err(ScanError::new(start_mark, - "while scanning a quoted scalar, found unexpected end of stream")); + return Err(ScanError::new( + start_mark, + "while scanning a quoted scalar, found unexpected end of stream", + )); } self.lookahead(2); @@ -1237,10 +1310,10 @@ impl> Scanner { string.push('\''); self.skip(); self.skip(); - }, + } // Check for the right quote. - '\'' if single => { break; }, - '"' if !single => { break; }, + '\'' if single => break, + '"' if !single => break, // Check for an escaped line break. '\\' if !single && is_break(self.buffer[1]) => { self.lookahead(3); @@ -1277,8 +1350,12 @@ impl> Scanner { 'x' => code_length = 2, 'u' => code_length = 4, 'U' => code_length = 8, - _ => return Err(ScanError::new(start_mark, - "while parsing a quoted scalar, found unknown escape character")) + _ => { + return Err(ScanError::new( + start_mark, + "while parsing a quoted scalar, found unknown escape character", + )) + } } self.skip(); self.skip(); @@ -1307,15 +1384,18 @@ impl> Scanner { self.skip(); } } - }, - c => { string.push(c); self.skip(); } + } + c => { + string.push(c); + self.skip(); + } } self.lookahead(2); } self.lookahead(1); match self.ch() { - '\'' if single => { break; }, - '"' if !single => { break; }, + '\'' if single => break, + '"' if !single => break, _ => {} } @@ -1368,9 +1448,15 @@ impl> Scanner { self.skip(); if single { - Ok(Token(start_mark, TokenType::Scalar(TScalarStyle::SingleQuoted, string))) + Ok(Token( + start_mark, + TokenType::Scalar(TScalarStyle::SingleQuoted, string), + )) } else { - Ok(Token(start_mark, TokenType::Scalar(TScalarStyle::DoubleQuoted, string))) + Ok(Token( + start_mark, + TokenType::Scalar(TScalarStyle::DoubleQuoted, string), + )) } } @@ -1398,28 +1484,30 @@ impl> Scanner { /* Check for a document indicator. */ self.lookahead(4); - if self.mark.col == 0 && - (((self.buffer[0] == '-') && - (self.buffer[1] == '-') && - (self.buffer[2] == '-')) || - ((self.buffer[0] == '.') && - (self.buffer[1] == '.') && - (self.buffer[2] == '.'))) && - is_blankz(self.buffer[3]) { - break; - } + if self.mark.col == 0 + && (((self.buffer[0] == '-') && (self.buffer[1] == '-') && (self.buffer[2] == '-')) + || ((self.buffer[0] == '.') + && (self.buffer[1] == '.') + && (self.buffer[2] == '.'))) + && is_blankz(self.buffer[3]) + { + break; + } - if self.ch() == '#' { break; } + if self.ch() == '#' { + break; + } while !is_blankz(self.ch()) { - if self.flow_level > 0 && self.ch() == ':' - && is_blankz(self.ch()) { - return Err(ScanError::new(start_mark, - "while scanning a plain scalar, found unexpected ':'")); - } + if self.flow_level > 0 && self.ch() == ':' && is_blankz(self.ch()) { + return Err(ScanError::new( + start_mark, + "while scanning a plain scalar, found unexpected ':'", + )); + } // indicators ends a plain scalar match self.ch() { ':' if is_blankz(self.buffer[1]) => break, - ',' | ':' | '?' | '[' | ']' |'{' |'}' if self.flow_level > 0 => break, + ',' | ':' | '?' | '[' | ']' | '{' | '}' if self.flow_level > 0 => break, _ => {} } @@ -1438,7 +1526,6 @@ impl> Scanner { trailing_breaks.clear(); } leading_break.clear(); - } leading_blanks = false; } else { @@ -1452,15 +1539,18 @@ impl> Scanner { self.lookahead(2); } // is the end? - if !(is_blank(self.ch()) || is_break(self.ch())) { break; } + if !(is_blank(self.ch()) || is_break(self.ch())) { + break; + } self.lookahead(1); while is_blank(self.ch()) || is_break(self.ch()) { if is_blank(self.ch()) { - if leading_blanks && (self.mark.col as isize) < indent - && self.ch() == '\t' { - return Err(ScanError::new(start_mark, - "while scanning a plain scalar, found a tab")); + if leading_blanks && (self.mark.col as isize) < indent && self.ch() == '\t' { + return Err(ScanError::new( + start_mark, + "while scanning a plain scalar, found a tab", + )); } if leading_blanks { @@ -1493,7 +1583,10 @@ impl> Scanner { self.allow_simple_key(); } - Ok(Token(start_mark, TokenType::Scalar(TScalarStyle::Plain, string))) + Ok(Token( + start_mark, + TokenType::Scalar(TScalarStyle::Plain, string), + )) } fn fetch_key(&mut self) -> ScanResult { @@ -1501,10 +1594,17 @@ impl> Scanner { if self.flow_level == 0 { // Check if we are allowed to start a new key (not nessesary simple). if !self.simple_key_allowed { - return Err(ScanError::new(self.mark, "mapping keys are not allowed in this context")); + return Err(ScanError::new( + self.mark, + "mapping keys are not allowed in this context", + )); } - self.roll_indent(start_mark.col, None, - TokenType::BlockMappingStart, start_mark); + self.roll_indent( + start_mark.col, + None, + TokenType::BlockMappingStart, + start_mark, + ); } try!(self.remove_simple_key()); @@ -1530,8 +1630,12 @@ impl> Scanner { self.insert_token(sk.token_number - tokens_parsed, tok); // Add the BLOCK-MAPPING-START token if needed. - self.roll_indent(sk.mark.col, Some(sk.token_number), - TokenType::BlockMappingStart, start_mark); + self.roll_indent( + sk.mark.col, + Some(sk.token_number), + TokenType::BlockMappingStart, + start_mark, + ); self.simple_keys.last_mut().unwrap().possible = false; self.disallow_simple_key(); @@ -1539,12 +1643,18 @@ impl> Scanner { // The ':' indicator follows a complex key. if self.flow_level == 0 { if !self.simple_key_allowed { - return Err(ScanError::new(start_mark, - "mapping values are not allowed in this context")); + return Err(ScanError::new( + start_mark, + "mapping values are not allowed in this context", + )); } - self.roll_indent(start_mark.col, None, - TokenType::BlockMappingStart, start_mark); + self.roll_indent( + start_mark.col, + None, + TokenType::BlockMappingStart, + start_mark, + ); } if self.flow_level == 0 { @@ -1559,8 +1669,7 @@ impl> Scanner { Ok(()) } - fn roll_indent(&mut self, col: usize, number: Option, - tok: TokenType, mark: Marker) { + fn roll_indent(&mut self, col: usize, number: Option, tok: TokenType, mark: Marker) { if self.flow_level > 0 { return; } @@ -1571,7 +1680,7 @@ impl> Scanner { let tokens_parsed = self.tokens_parsed; match number { Some(n) => self.insert_token(n - tokens_parsed, Token(mark, tok)), - None => self.tokens.push_back(Token(mark, tok)) + None => self.tokens.push_back(Token(mark, tok)), } } } @@ -1611,44 +1720,41 @@ impl> Scanner { last.possible = false; Ok(()) } - } #[cfg(test)] mod test { - use super::*; use super::TokenType::*; + use super::*; -macro_rules! next { - ($p:ident, $tk:pat) => {{ - let tok = $p.next().unwrap(); - match tok.1 { - $tk => {}, - _ => { panic!("unexpected token: {:?}", - tok) } - } - }} -} + macro_rules! next { + ($p:ident, $tk:pat) => {{ + let tok = $p.next().unwrap(); + match tok.1 { + $tk => {} + _ => panic!("unexpected token: {:?}", tok), + } + }}; + } -macro_rules! next_scalar { - ($p:ident, $tk:expr, $v:expr) => {{ - let tok = $p.next().unwrap(); - match tok.1 { - Scalar(style, ref v) => { - assert_eq!(style, $tk); - assert_eq!(v, $v); - }, - _ => { panic!("unexpected token: {:?}", - tok) } - } - }} -} + macro_rules! next_scalar { + ($p:ident, $tk:expr, $v:expr) => {{ + let tok = $p.next().unwrap(); + match tok.1 { + Scalar(style, ref v) => { + assert_eq!(style, $tk); + assert_eq!(v, $v); + } + _ => panic!("unexpected token: {:?}", tok), + } + }}; + } -macro_rules! end { - ($p:ident) => {{ - assert_eq!($p.next(), None); - }} -} + macro_rules! end { + ($p:ident) => {{ + assert_eq!($p.next(), None); + }}; + } /// test cases in libyaml scanner.c #[test] fn test_empty() { @@ -1671,8 +1777,7 @@ macro_rules! end { #[test] fn test_explicit_scalar() { - let s = -"--- + let s = "--- 'a scalar' ... "; @@ -1687,8 +1792,7 @@ macro_rules! end { #[test] fn test_multiple_documents() { - let s = -" + let s = " 'a scalar' --- 'a scalar' @@ -1724,8 +1828,7 @@ macro_rules! end { #[test] fn test_a_flow_mapping() { - let s = -" + let s = " { a simple key: a value, # Note that the KEY token is produced. ? a complex key: another value, @@ -1751,8 +1854,7 @@ macro_rules! end { #[test] fn test_block_sequences() { - let s = -" + let s = " - item 1 - item 2 - @@ -1794,8 +1896,7 @@ macro_rules! end { #[test] fn test_block_mappings() { - let s = -" + let s = " a simple key: a value # The KEY token is produced here. ? a complex key : another value @@ -1842,13 +1943,11 @@ a sequence: next!(p, BlockEnd); next!(p, StreamEnd); end!(p); - } #[test] fn test_no_block_sequence_start() { - let s = -" + let s = " key: - item 1 - item 2 @@ -1870,8 +1969,7 @@ key: #[test] fn test_collections_in_sequence() { - let s = -" + let s = " - - item 1 - item 2 - key 1: value 1 @@ -1914,8 +2012,7 @@ key: #[test] fn test_collections_in_mapping() { - let s = -" + let s = " ? a sequence : - item 1 - item 2 @@ -1955,8 +2052,7 @@ key: #[test] fn test_spec_ex7_3() { - let s = -" + let s = " { ? foo :, : bar, diff --git a/parser/src/yaml.rs b/parser/src/yaml.rs index f5967b0..7e14435 100644 --- a/parser/src/yaml.rs +++ b/parser/src/yaml.rs @@ -1,13 +1,13 @@ +use linked_hash_map::LinkedHashMap; +use parser::*; +use scanner::{Marker, ScanError, TScalarStyle, TokenType}; use std::collections::BTreeMap; +use std::f64; +use std::i64; +use std::mem; use std::ops::Index; use std::string; -use std::i64; -use std::f64; -use std::mem; use std::vec; -use parser::*; -use scanner::{TScalarStyle, ScanError, TokenType, Marker}; -use linked_hash_map::LinkedHashMap; /// A YAML node is stored as this `Yaml` enumeration, which provides an easy way to /// access your YAML document. @@ -62,7 +62,7 @@ fn parse_f64(v: &str) -> Option { ".inf" | ".Inf" | ".INF" | "+.inf" | "+.Inf" | "+.INF" => Some(f64::INFINITY), "-.inf" | "-.Inf" | "-.INF" => Some(f64::NEG_INFINITY), ".nan" | "NaN" | ".NAN" => Some(f64::NAN), - _ => v.parse::().ok() + _ => v.parse::().ok(), } } @@ -81,31 +81,31 @@ impl MarkedEventReceiver for YamlLoader { match ev { Event::DocumentStart => { // do nothing - }, + } Event::DocumentEnd => { match self.doc_stack.len() { // empty document 0 => self.docs.push(Yaml::BadValue), 1 => self.docs.push(self.doc_stack.pop().unwrap().0), - _ => unreachable!() + _ => unreachable!(), } - }, + } Event::SequenceStart(aid) => { self.doc_stack.push((Yaml::Array(Vec::new()), aid)); - }, + } Event::SequenceEnd => { let node = self.doc_stack.pop().unwrap(); self.insert_new_node(node); - }, + } Event::MappingStart(aid) => { self.doc_stack.push((Yaml::Hash(Hash::new()), aid)); self.key_stack.push(Yaml::BadValue); - }, + } Event::MappingEnd => { self.key_stack.pop().unwrap(); let node = self.doc_stack.pop().unwrap(); self.insert_new_node(node); - }, + } Event::Scalar(v, style, aid, tag) => { let node = if style != TScalarStyle::Plain { Yaml::String(v) @@ -117,28 +117,22 @@ impl MarkedEventReceiver for YamlLoader { // "true" or "false" match v.parse::() { Err(_) => Yaml::BadValue, - Ok(v) => Yaml::Boolean(v) - } - }, - "int" => { - match v.parse::() { - Err(_) => Yaml::BadValue, - Ok(v) => Yaml::Integer(v) - } - }, - "float" => { - match parse_f64(&v) { - Some(_) => Yaml::Real(v), - None => Yaml::BadValue, - } - }, - "null" => { - match v.as_ref() { - "~" | "null" => Yaml::Null, - _ => Yaml::BadValue, + Ok(v) => Yaml::Boolean(v), } } - _ => Yaml::String(v), + "int" => match v.parse::() { + Err(_) => Yaml::BadValue, + Ok(v) => Yaml::Integer(v), + }, + "float" => match parse_f64(&v) { + Some(_) => Yaml::Real(v), + None => Yaml::BadValue, + }, + "null" => match v.as_ref() { + "~" | "null" => Yaml::Null, + _ => Yaml::BadValue, + }, + _ => Yaml::String(v), } } else { Yaml::String(v) @@ -149,7 +143,7 @@ impl MarkedEventReceiver for YamlLoader { }; self.insert_new_node((node, aid)); - }, + } Event::Alias(id) => { let n = match self.anchor_map.get(&id) { Some(v) => v.clone(), @@ -186,13 +180,13 @@ impl YamlLoader { mem::swap(&mut newkey, cur_key); h.insert(newkey, node.0); } - }, + } _ => unreachable!(), } } } - pub fn load_from_str(source: &str) -> Result, ScanError>{ + pub fn load_from_str(source: &str) -> Result, ScanError> { let mut loader = YamlLoader { docs: Vec::new(), doc_stack: Vec::new(), @@ -255,35 +249,35 @@ impl Yaml { pub fn is_null(&self) -> bool { match *self { Yaml::Null => true, - _ => false + _ => false, } } pub fn is_badvalue(&self) -> bool { match *self { Yaml::BadValue => true, - _ => false + _ => false, } } pub fn is_array(&self) -> bool { match *self { Yaml::Array(_) => true, - _ => false + _ => false, } } pub fn as_f64(&self) -> Option { match *self { Yaml::Real(ref v) => parse_f64(v), - _ => None + _ => None, } } pub fn into_f64(self) -> Option { match self { Yaml::Real(ref v) => parse_f64(v), - _ => None + _ => None, } } } @@ -315,7 +309,7 @@ impl Yaml { _ if v.parse::().is_ok() => Yaml::Integer(v.parse::().unwrap()), // try parsing as f64 _ if parse_f64(v).is_some() => Yaml::Real(v.to_owned()), - _ => Yaml::String(v.to_owned()) + _ => Yaml::String(v.to_owned()), } } } @@ -328,7 +322,7 @@ impl<'a> Index<&'a str> for Yaml { let key = Yaml::String(idx.to_owned()); match self.as_hash() { Some(h) => h.get(&key).unwrap_or(&BAD_VALUE), - None => &BAD_VALUE + None => &BAD_VALUE, } } } @@ -354,8 +348,7 @@ impl IntoIterator for Yaml { fn into_iter(self) -> Self::IntoIter { YamlIter { - yaml: self.into_vec() - .unwrap_or_else(Vec::new).into_iter() + yaml: self.into_vec().unwrap_or_else(Vec::new).into_iter(), } } } @@ -374,8 +367,8 @@ impl Iterator for YamlIter { #[cfg(test)] mod test { - use yaml::*; use std::f64; + use yaml::*; #[test] fn test_coerce() { let s = "--- @@ -424,8 +417,7 @@ a7: 你好 #[test] fn test_multi_doc() { - let s = -" + let s = " 'a scalar' --- 'a scalar' @@ -438,8 +430,7 @@ a7: 你好 #[test] fn test_anchor() { - let s = -" + let s = " a1: &DEFAULT b1: 4 b2: d @@ -452,8 +443,7 @@ a2: *DEFAULT #[test] fn test_bad_anchor() { - let s = -" + let s = " a1: &DEFAULT b1: 4 b2: *DEFAULT @@ -461,7 +451,6 @@ a1: &DEFAULT let out = YamlLoader::load_from_str(&s).unwrap(); let doc = &out[0]; assert_eq!(doc["a1"]["b2"], Yaml::BadValue); - } #[test] @@ -475,8 +464,7 @@ a1: &DEFAULT #[test] fn test_plain_datatype() { - let s = -" + let s = " - 'string' - \"string\" - string @@ -555,15 +543,23 @@ a1: &DEFAULT #[test] fn test_bad_docstart() { assert!(YamlLoader::load_from_str("---This used to cause an infinite loop").is_ok()); - assert_eq!(YamlLoader::load_from_str("----"), Ok(vec![Yaml::String(String::from("----"))])); - assert_eq!(YamlLoader::load_from_str("--- #here goes a comment"), Ok(vec![Yaml::Null])); - assert_eq!(YamlLoader::load_from_str("---- #here goes a comment"), Ok(vec![Yaml::String(String::from("----"))])); + assert_eq!( + YamlLoader::load_from_str("----"), + Ok(vec![Yaml::String(String::from("----"))]) + ); + assert_eq!( + YamlLoader::load_from_str("--- #here goes a comment"), + Ok(vec![Yaml::Null]) + ); + assert_eq!( + YamlLoader::load_from_str("---- #here goes a comment"), + Ok(vec![Yaml::String(String::from("----"))]) + ); } #[test] fn test_plain_datatype_with_into_methods() { - let s = -" + let s = " - 'string' - \"string\" - string @@ -620,9 +616,18 @@ c: ~ let out = YamlLoader::load_from_str(&s).unwrap(); let first = out.into_iter().next().unwrap(); let mut iter = first.into_hash().unwrap().into_iter(); - assert_eq!(Some((Yaml::String("b".to_owned()), Yaml::Null)), iter.next()); - assert_eq!(Some((Yaml::String("a".to_owned()), Yaml::Null)), iter.next()); - assert_eq!(Some((Yaml::String("c".to_owned()), Yaml::Null)), iter.next()); + assert_eq!( + Some((Yaml::String("b".to_owned()), Yaml::Null)), + iter.next() + ); + assert_eq!( + Some((Yaml::String("a".to_owned()), Yaml::Null)), + iter.next() + ); + assert_eq!( + Some((Yaml::String("c".to_owned()), Yaml::Null)), + iter.next() + ); assert_eq!(None, iter.next()); } @@ -641,30 +646,49 @@ c: ~ #[test] fn test_indentation_equality() { - - let four_spaces = YamlLoader::load_from_str(r#" + let four_spaces = YamlLoader::load_from_str( + r#" hash: with: indentations -"#).unwrap().into_iter().next().unwrap(); +"#, + ).unwrap() + .into_iter() + .next() + .unwrap(); - let two_spaces = YamlLoader::load_from_str(r#" + let two_spaces = YamlLoader::load_from_str( + r#" hash: with: indentations -"#).unwrap().into_iter().next().unwrap(); +"#, + ).unwrap() + .into_iter() + .next() + .unwrap(); - let one_space = YamlLoader::load_from_str(r#" + let one_space = YamlLoader::load_from_str( + r#" hash: with: indentations -"#).unwrap().into_iter().next().unwrap(); +"#, + ).unwrap() + .into_iter() + .next() + .unwrap(); - let mixed_spaces = YamlLoader::load_from_str(r#" + let mixed_spaces = YamlLoader::load_from_str( + r#" hash: with: indentations -"#).unwrap().into_iter().next().unwrap(); +"#, + ).unwrap() + .into_iter() + .next() + .unwrap(); assert_eq!(four_spaces, two_spaces); assert_eq!(two_spaces, one_space); @@ -691,7 +715,7 @@ subcommands3: let doc = &out.into_iter().next().unwrap(); println!("{:#?}", doc); - assert_eq!(doc["subcommands"][0]["server"], Yaml::Null); + assert_eq!(doc["subcommands"][0]["server"], Yaml::Null); assert!(doc["subcommands2"][0]["server"].as_hash().is_some()); assert!(doc["subcommands3"][0]["server"].as_hash().is_some()); } diff --git a/parser/tests/quickcheck.rs b/parser/tests/quickcheck.rs index 54be25d..43fd254 100644 --- a/parser/tests/quickcheck.rs +++ b/parser/tests/quickcheck.rs @@ -3,8 +3,8 @@ extern crate yaml_rust; extern crate quickcheck; use quickcheck::TestResult; -use yaml_rust::{Yaml, YamlLoader, YamlEmitter}; use std::error::Error; +use yaml_rust::{Yaml, YamlEmitter, YamlLoader}; quickcheck! { fn test_check_weird_keys(xs: Vec) -> TestResult { diff --git a/parser/tests/spec_test.rs b/parser/tests/spec_test.rs index 5e881fc..442728f 100644 --- a/parser/tests/spec_test.rs +++ b/parser/tests/spec_test.rs @@ -2,7 +2,7 @@ #![allow(non_upper_case_globals)] extern crate yaml_rust; -use yaml_rust::parser::{Parser, EventReceiver, Event}; +use yaml_rust::parser::{Event, EventReceiver, Parser}; use yaml_rust::scanner::TScalarStyle; // These names match the names used in the C++ test suite. @@ -21,7 +21,7 @@ enum TestEvent { } struct YamlChecker { - pub evs: Vec + pub evs: Vec, } impl EventReceiver for YamlChecker { @@ -33,36 +33,36 @@ impl EventReceiver for YamlChecker { Event::SequenceEnd => TestEvent::OnSequenceEnd, Event::MappingStart(..) => TestEvent::OnMapStart, Event::MappingEnd => TestEvent::OnMapEnd, - Event::Scalar(ref v, style, _, _)=> { + Event::Scalar(ref v, style, _, _) => { if v == "~" && style == TScalarStyle::Plain { TestEvent::OnNull } else { TestEvent::OnScalar } - }, + } Event::Alias(_) => TestEvent::OnAlias, - _ => { return } // ignore other events + _ => return, // ignore other events }; self.evs.push(tev); } } fn str_to_test_events(docs: &str) -> Vec { - let mut p = YamlChecker { - evs: Vec::new() - }; + let mut p = YamlChecker { evs: Vec::new() }; let mut parser = Parser::new(docs.chars()); parser.load(&mut p, true).unwrap(); p.evs } macro_rules! assert_next { - ($v:expr, $p:pat) => ( + ($v:expr, $p:pat) => { match $v.next().unwrap() { - $p => {}, - e => { panic!("unexpected event: {:?}", e); } + $p => {} + e => { + panic!("unexpected event: {:?}", e); + } } - ) + }; } // auto generated from handler_spec_test.cpp @@ -76,66 +76,65 @@ include!("spec_test.rs.inc"); #[test] fn test_mapvec_legal() { - use yaml_rust::yaml::{Array, Hash, Yaml}; - use yaml_rust::{YamlLoader, YamlEmitter}; + use yaml_rust::yaml::{Array, Hash, Yaml}; + use yaml_rust::{YamlEmitter, YamlLoader}; - // Emitting a `map>, _>` should result in legal yaml that - // we can parse. + // Emitting a `map>, _>` should result in legal yaml that + // we can parse. - let mut key = Array::new(); - key.push(Yaml::Integer(1)); - key.push(Yaml::Integer(2)); - key.push(Yaml::Integer(3)); + let mut key = Array::new(); + key.push(Yaml::Integer(1)); + key.push(Yaml::Integer(2)); + key.push(Yaml::Integer(3)); - let mut keyhash = Hash::new(); - keyhash.insert(Yaml::String("key".into()), Yaml::Array(key)); + let mut keyhash = Hash::new(); + keyhash.insert(Yaml::String("key".into()), Yaml::Array(key)); - let mut val = Array::new(); - val.push(Yaml::Integer(4)); - val.push(Yaml::Integer(5)); - val.push(Yaml::Integer(6)); + let mut val = Array::new(); + val.push(Yaml::Integer(4)); + val.push(Yaml::Integer(5)); + val.push(Yaml::Integer(6)); - let mut hash = Hash::new(); - hash.insert(Yaml::Hash(keyhash), Yaml::Array(val)); + let mut hash = Hash::new(); + hash.insert(Yaml::Hash(keyhash), Yaml::Array(val)); - let mut out_str = String::new(); - { - let mut emitter = YamlEmitter::new(&mut out_str); - emitter.dump(&Yaml::Hash(hash)).unwrap(); - } + let mut out_str = String::new(); + { + let mut emitter = YamlEmitter::new(&mut out_str); + emitter.dump(&Yaml::Hash(hash)).unwrap(); + } - // At this point, we are tempted to naively render like this: - // - // ```yaml - // --- - // {key: - // - 1 - // - 2 - // - 3}: - // - 4 - // - 5 - // - 6 - // ``` - // - // However, this doesn't work, because the key sequence [1, 2, 3] is - // rendered in block mode, which is not legal (as far as I can tell) - // inside the flow mode of the key. We need to either fully render - // everything that's in a key in flow mode (which may make for some - // long lines), or use the explicit map identifier '?': - // - // ```yaml - // --- - // ? - // key: - // - 1 - // - 2 - // - 3 - // : - // - 4 - // - 5 - // - 6 - // ``` + // At this point, we are tempted to naively render like this: + // + // ```yaml + // --- + // {key: + // - 1 + // - 2 + // - 3}: + // - 4 + // - 5 + // - 6 + // ``` + // + // However, this doesn't work, because the key sequence [1, 2, 3] is + // rendered in block mode, which is not legal (as far as I can tell) + // inside the flow mode of the key. We need to either fully render + // everything that's in a key in flow mode (which may make for some + // long lines), or use the explicit map identifier '?': + // + // ```yaml + // --- + // ? + // key: + // - 1 + // - 2 + // - 3 + // : + // - 4 + // - 5 + // - 6 + // ``` - YamlLoader::load_from_str(&out_str).unwrap(); + YamlLoader::load_from_str(&out_str).unwrap(); } -