Format with rustfmt 0.99.4
This commit is contained in:
parent
124d237be0
commit
5039af6862
8 changed files with 733 additions and 584 deletions
|
@ -17,14 +17,14 @@ fn dump_node(doc: &yaml::Yaml, indent: usize) {
|
|||
for x in v {
|
||||
dump_node(x, indent + 1);
|
||||
}
|
||||
},
|
||||
}
|
||||
yaml::Yaml::Hash(ref h) => {
|
||||
for (k, v) in h {
|
||||
print_indent(indent);
|
||||
println!("{:?}:", k);
|
||||
dump_node(v, indent + 1);
|
||||
}
|
||||
},
|
||||
}
|
||||
_ => {
|
||||
print_indent(indent);
|
||||
println!("{:?}", doc);
|
||||
|
|
|
@ -1,13 +1,12 @@
|
|||
use std::fmt::{self, Display};
|
||||
use std::convert::From;
|
||||
use std::error::Error;
|
||||
use std::fmt::{self, Display};
|
||||
use yaml::{Hash, Yaml};
|
||||
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub enum EmitError {
|
||||
FmtError(fmt::Error),
|
||||
BadHashmapKey,
|
||||
FmtError(fmt::Error),
|
||||
BadHashmapKey,
|
||||
}
|
||||
|
||||
impl Error for EmitError {
|
||||
|
@ -91,7 +90,7 @@ fn escape_str(wr: &mut fmt::Write, v: &str) -> Result<(), fmt::Error> {
|
|||
b'\x1e' => "\\u001e",
|
||||
b'\x1f' => "\\u001f",
|
||||
b'\x7f' => "\\u007f",
|
||||
_ => { continue; }
|
||||
_ => continue,
|
||||
};
|
||||
|
||||
if start < i {
|
||||
|
@ -118,7 +117,7 @@ impl<'a> YamlEmitter<'a> {
|
|||
best_indent: 2,
|
||||
compact: true,
|
||||
|
||||
level: -1
|
||||
level: -1,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -131,12 +130,12 @@ impl<'a> YamlEmitter<'a> {
|
|||
/// or tags), which should be OK, because this emitter doesn't
|
||||
/// (currently) emit those anyways.
|
||||
pub fn compact(&mut self, compact: bool) {
|
||||
self.compact = compact;
|
||||
self.compact = compact;
|
||||
}
|
||||
|
||||
/// Determine if this emitter is using 'compact inline notation'.
|
||||
pub fn is_compact(&self) -> bool {
|
||||
self.compact
|
||||
self.compact
|
||||
}
|
||||
|
||||
pub fn dump(&mut self, doc: &Yaml) -> EmitResult {
|
||||
|
@ -147,7 +146,9 @@ impl<'a> YamlEmitter<'a> {
|
|||
}
|
||||
|
||||
fn write_indent(&mut self) -> EmitResult {
|
||||
if self.level <= 0 { return Ok(()); }
|
||||
if self.level <= 0 {
|
||||
return Ok(());
|
||||
}
|
||||
for _ in 0..self.level {
|
||||
for _ in 0..self.best_indent {
|
||||
try!(write!(self.writer, " "));
|
||||
|
@ -163,12 +164,11 @@ impl<'a> YamlEmitter<'a> {
|
|||
Yaml::String(ref v) => {
|
||||
if need_quotes(v) {
|
||||
try!(escape_str(self.writer, v));
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
try!(write!(self.writer, "{}", v));
|
||||
}
|
||||
Ok(())
|
||||
},
|
||||
}
|
||||
Yaml::Boolean(v) => {
|
||||
if v {
|
||||
try!(self.writer.write_str("true"));
|
||||
|
@ -176,21 +176,21 @@ impl<'a> YamlEmitter<'a> {
|
|||
try!(self.writer.write_str("false"));
|
||||
}
|
||||
Ok(())
|
||||
},
|
||||
}
|
||||
Yaml::Integer(v) => {
|
||||
try!(write!(self.writer, "{}", v));
|
||||
Ok(())
|
||||
},
|
||||
}
|
||||
Yaml::Real(ref v) => {
|
||||
try!(write!(self.writer, "{}", v));
|
||||
Ok(())
|
||||
},
|
||||
}
|
||||
Yaml::Null | Yaml::BadValue => {
|
||||
try!(write!(self.writer, "~"));
|
||||
Ok(())
|
||||
},
|
||||
}
|
||||
// XXX(chenyh) Alias
|
||||
_ => { Ok(()) }
|
||||
_ => Ok(()),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -219,24 +219,24 @@ impl<'a> YamlEmitter<'a> {
|
|||
self.level += 1;
|
||||
for (cnt, (k, v)) in h.iter().enumerate() {
|
||||
let complex_key = match *k {
|
||||
Yaml::Hash(_) | Yaml::Array(_) => true,
|
||||
_ => false,
|
||||
Yaml::Hash(_) | Yaml::Array(_) => true,
|
||||
_ => false,
|
||||
};
|
||||
if cnt > 0 {
|
||||
try!(write!(self.writer, "\n"));
|
||||
try!(self.write_indent());
|
||||
}
|
||||
if complex_key {
|
||||
try!(write!(self.writer, "?"));
|
||||
try!(self.emit_val(true, k));
|
||||
try!(write!(self.writer, "\n"));
|
||||
try!(self.write_indent());
|
||||
try!(write!(self.writer, ":"));
|
||||
try!(self.emit_val(true, v));
|
||||
try!(write!(self.writer, "?"));
|
||||
try!(self.emit_val(true, k));
|
||||
try!(write!(self.writer, "\n"));
|
||||
try!(self.write_indent());
|
||||
try!(write!(self.writer, ":"));
|
||||
try!(self.emit_val(true, v));
|
||||
} else {
|
||||
try!(self.emit_node(k));
|
||||
try!(write!(self.writer, ":"));
|
||||
try!(self.emit_val(false, v));
|
||||
try!(self.emit_node(k));
|
||||
try!(write!(self.writer, ":"));
|
||||
try!(self.emit_val(false, v));
|
||||
}
|
||||
}
|
||||
self.level -= 1;
|
||||
|
@ -260,7 +260,7 @@ impl<'a> YamlEmitter<'a> {
|
|||
self.level -= 1;
|
||||
}
|
||||
self.emit_array(v)
|
||||
},
|
||||
}
|
||||
Yaml::Hash(ref h) => {
|
||||
if (inline && self.compact) || h.is_empty() {
|
||||
try!(write!(self.writer, " "));
|
||||
|
@ -271,7 +271,7 @@ impl<'a> YamlEmitter<'a> {
|
|||
self.level -= 1;
|
||||
}
|
||||
self.emit_hash(h)
|
||||
},
|
||||
}
|
||||
_ => {
|
||||
try!(write!(self.writer, " "));
|
||||
self.emit_node(val)
|
||||
|
@ -296,37 +296,48 @@ impl<'a> YamlEmitter<'a> {
|
|||
/// * When the string looks like a date (e.g. 2014-12-31) (otherwise it would be automatically converted into a Unix timestamp).
|
||||
fn need_quotes(string: &str) -> bool {
|
||||
fn need_quotes_spaces(string: &str) -> bool {
|
||||
string.starts_with(' ')
|
||||
|| string.ends_with(' ')
|
||||
string.starts_with(' ') || string.ends_with(' ')
|
||||
}
|
||||
|
||||
string == ""
|
||||
|| need_quotes_spaces(string)
|
||||
|| string.starts_with(|character: char| {
|
||||
match character {
|
||||
|| need_quotes_spaces(string)
|
||||
|| string.starts_with(|character: char| match character {
|
||||
':' | '&' | '*' | '?' | '|' | '-' | '<' | '>' | '=' | '!' | '%' | '@' => true,
|
||||
_ => false,
|
||||
}
|
||||
})
|
||||
|| string.contains(|character: char| {
|
||||
match character {
|
||||
'{' | '}' | '[' | ']' | ',' | '#' | '`' | '\"' | '\'' | '\\' | '\0' ... '\x06' | '\t' | '\n' | '\r' | '\x0e' ... '\x1a' | '\x1c' ... '\x1f' => true,
|
||||
})
|
||||
|| string.contains(|character: char| match character {
|
||||
'{'
|
||||
| '}'
|
||||
| '['
|
||||
| ']'
|
||||
| ','
|
||||
| '#'
|
||||
| '`'
|
||||
| '\"'
|
||||
| '\''
|
||||
| '\\'
|
||||
| '\0'...'\x06'
|
||||
| '\t'
|
||||
| '\n'
|
||||
| '\r'
|
||||
| '\x0e'...'\x1a'
|
||||
| '\x1c'...'\x1f' => true,
|
||||
_ => false,
|
||||
}
|
||||
})
|
||||
|| [// http://yaml.org/type/bool.html
|
||||
// Note: 'y', 'Y', 'n', 'N', is not quoted deliberately, as in libyaml. PyYAML also parse
|
||||
// them as string, not booleans, although it is volating the YAML 1.1 specification.
|
||||
// See https://github.com/dtolnay/serde-yaml/pull/83#discussion_r152628088.
|
||||
"yes","Yes","YES","no","No","NO",
|
||||
"True", "TRUE", "true", "False", "FALSE", "false",
|
||||
"on","On","ON","off","Off","OFF",
|
||||
// http://yaml.org/type/null.html
|
||||
"null","Null","NULL", "~"
|
||||
].contains(&string)
|
||||
|| string.starts_with('.')
|
||||
|| string.parse::<i64>().is_ok()
|
||||
|| string.parse::<f64>().is_ok()
|
||||
})
|
||||
|| [
|
||||
// http://yaml.org/type/bool.html
|
||||
// Note: 'y', 'Y', 'n', 'N', is not quoted deliberately, as in libyaml. PyYAML also parse
|
||||
// them as string, not booleans, although it is volating the YAML 1.1 specification.
|
||||
// See https://github.com/dtolnay/serde-yaml/pull/83#discussion_r152628088.
|
||||
"yes", "Yes", "YES", "no", "No", "NO", "True", "TRUE", "true", "False", "FALSE",
|
||||
"false", "on", "On", "ON", "off", "Off", "OFF",
|
||||
// http://yaml.org/type/null.html
|
||||
"null", "Null", "NULL", "~",
|
||||
]
|
||||
.contains(&string)
|
||||
|| string.starts_with('.')
|
||||
|| string.parse::<i64>().is_ok()
|
||||
|| string.parse::<f64>().is_ok()
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
@ -349,7 +360,6 @@ a4:
|
|||
- 2
|
||||
";
|
||||
|
||||
|
||||
let docs = YamlLoader::load_from_str(&s).unwrap();
|
||||
let doc = &docs[0];
|
||||
let mut writer = String::new();
|
||||
|
@ -361,7 +371,7 @@ a4:
|
|||
println!("emitted:\n{}", writer);
|
||||
let docs_new = match YamlLoader::load_from_str(&writer) {
|
||||
Ok(y) => y,
|
||||
Err(e) => panic!(format!("{}", e))
|
||||
Err(e) => panic!(format!("{}", e)),
|
||||
};
|
||||
let doc_new = &docs_new[0];
|
||||
|
||||
|
@ -398,7 +408,7 @@ products:
|
|||
}
|
||||
let docs_new = match YamlLoader::load_from_str(&writer) {
|
||||
Ok(y) => y,
|
||||
Err(e) => panic!(format!("{}", e))
|
||||
Err(e) => panic!(format!("{}", e)),
|
||||
};
|
||||
let doc_new = &docs_new[0];
|
||||
assert_eq!(doc, doc_new);
|
||||
|
@ -506,21 +516,26 @@ bool1: false"#;
|
|||
emitter.dump(doc).unwrap();
|
||||
}
|
||||
|
||||
assert_eq!(expected, writer, "expected:\n{}\nactual:\n{}\n", expected, writer);
|
||||
assert_eq!(
|
||||
expected, writer,
|
||||
"expected:\n{}\nactual:\n{}\n",
|
||||
expected, writer
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_empty_and_nested() {
|
||||
test_empty_and_nested_flag(false)
|
||||
test_empty_and_nested_flag(false)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_empty_and_nested_compact() {
|
||||
test_empty_and_nested_flag(true)
|
||||
test_empty_and_nested_flag(true)
|
||||
}
|
||||
|
||||
fn test_empty_and_nested_flag(compact: bool) {
|
||||
let s = if compact { r#"---
|
||||
let s = if compact {
|
||||
r#"---
|
||||
a:
|
||||
b:
|
||||
c: hello
|
||||
|
@ -528,7 +543,9 @@ a:
|
|||
e:
|
||||
- f
|
||||
- g
|
||||
- h: []"# } else { r#"---
|
||||
- h: []"#
|
||||
} else {
|
||||
r#"---
|
||||
a:
|
||||
b:
|
||||
c: hello
|
||||
|
@ -537,7 +554,8 @@ e:
|
|||
- f
|
||||
- g
|
||||
-
|
||||
h: []"# };
|
||||
h: []"#
|
||||
};
|
||||
|
||||
let docs = YamlLoader::load_from_str(&s).unwrap();
|
||||
let doc = &docs[0];
|
||||
|
|
|
@ -41,16 +41,16 @@
|
|||
|
||||
extern crate linked_hash_map;
|
||||
|
||||
pub mod yaml;
|
||||
pub mod scanner;
|
||||
pub mod parser;
|
||||
pub mod emitter;
|
||||
pub mod parser;
|
||||
pub mod scanner;
|
||||
pub mod yaml;
|
||||
|
||||
// reexport key APIs
|
||||
pub use scanner::ScanError;
|
||||
pub use emitter::{EmitError, YamlEmitter};
|
||||
pub use parser::Event;
|
||||
pub use scanner::ScanError;
|
||||
pub use yaml::{Yaml, YamlLoader};
|
||||
pub use emitter::{YamlEmitter, EmitError};
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
|
@ -58,8 +58,7 @@ mod tests {
|
|||
|
||||
#[test]
|
||||
fn test_api() {
|
||||
let s =
|
||||
"
|
||||
let s = "
|
||||
# from yaml-cpp example
|
||||
- name: Ogre
|
||||
position: [0, 5, 0]
|
||||
|
@ -104,8 +103,7 @@ mod tests {
|
|||
|
||||
#[test]
|
||||
fn test_fail() {
|
||||
let s =
|
||||
"
|
||||
let s = "
|
||||
# syntax error
|
||||
scalar
|
||||
key: [1, 2]]
|
||||
|
|
|
@ -26,7 +26,7 @@ enum State {
|
|||
FlowMappingKey,
|
||||
FlowMappingValue,
|
||||
FlowMappingEmptyValue,
|
||||
End
|
||||
End,
|
||||
}
|
||||
|
||||
/// `Event` is used with the low-level event base parsing API,
|
||||
|
@ -48,7 +48,7 @@ pub enum Event {
|
|||
SequenceEnd,
|
||||
/// Anchor ID
|
||||
MappingStart(usize),
|
||||
MappingEnd
|
||||
MappingEnd,
|
||||
}
|
||||
|
||||
impl Event {
|
||||
|
@ -74,12 +74,10 @@ pub struct Parser<T> {
|
|||
anchor_id: usize,
|
||||
}
|
||||
|
||||
|
||||
pub trait EventReceiver {
|
||||
fn on_event(&mut self, ev: Event);
|
||||
}
|
||||
|
||||
|
||||
pub trait MarkedEventReceiver {
|
||||
fn on_event(&mut self, ev: Event, _mark: Marker);
|
||||
}
|
||||
|
@ -92,7 +90,7 @@ impl<R: EventReceiver> MarkedEventReceiver for R {
|
|||
|
||||
pub type ParseResult = Result<(Event, Marker), ScanError>;
|
||||
|
||||
impl<T: Iterator<Item=char>> Parser<T> {
|
||||
impl<T: Iterator<Item = char>> Parser<T> {
|
||||
pub fn new(src: T) -> Parser<T> {
|
||||
Parser {
|
||||
scanner: Scanner::new(src),
|
||||
|
@ -121,39 +119,37 @@ impl<T: Iterator<Item=char>> Parser<T> {
|
|||
pub fn next(&mut self) -> ParseResult {
|
||||
match self.current {
|
||||
None => self.parse(),
|
||||
Some(_) => {
|
||||
Ok(self.current.take().unwrap())
|
||||
}
|
||||
Some(_) => Ok(self.current.take().unwrap()),
|
||||
}
|
||||
}
|
||||
|
||||
fn peek_token(&mut self) -> Result<&Token, ScanError> {
|
||||
match self.token {
|
||||
None => {
|
||||
None => {
|
||||
self.token = Some(try!(self.scan_next_token()));
|
||||
Ok(self.token.as_ref().unwrap())
|
||||
},
|
||||
Some(ref tok) => Ok(tok)
|
||||
}
|
||||
Some(ref tok) => Ok(tok),
|
||||
}
|
||||
}
|
||||
|
||||
fn scan_next_token(&mut self) -> Result<Token, ScanError> {
|
||||
let token = self.scanner.next();
|
||||
match token {
|
||||
None =>
|
||||
match self.scanner.get_error() {
|
||||
None => Err(ScanError::new(self.scanner.mark(), "unexpected eof")),
|
||||
Some(e) => Err(e),
|
||||
},
|
||||
Some(tok) => Ok(tok)
|
||||
None => match self.scanner.get_error() {
|
||||
None => Err(ScanError::new(self.scanner.mark(), "unexpected eof")),
|
||||
Some(e) => Err(e),
|
||||
},
|
||||
Some(tok) => Ok(tok),
|
||||
}
|
||||
}
|
||||
|
||||
fn fetch_token(&mut self) -> Token {
|
||||
self.token.take().expect("fetch_token needs to be preceded by peek_token")
|
||||
self.token
|
||||
.take()
|
||||
.expect("fetch_token needs to be preceded by peek_token")
|
||||
}
|
||||
|
||||
|
||||
fn skip(&mut self) {
|
||||
self.token = None;
|
||||
//self.peek_token();
|
||||
|
@ -174,8 +170,11 @@ impl<T: Iterator<Item=char>> Parser<T> {
|
|||
Ok((ev, mark))
|
||||
}
|
||||
|
||||
pub fn load<R: MarkedEventReceiver>(&mut self, recv: &mut R, multi: bool)
|
||||
-> Result<(), ScanError> {
|
||||
pub fn load<R: MarkedEventReceiver>(
|
||||
&mut self,
|
||||
recv: &mut R,
|
||||
multi: bool,
|
||||
) -> Result<(), ScanError> {
|
||||
if !self.scanner.stream_started() {
|
||||
let (ev, mark) = try!(self.next());
|
||||
assert_eq!(ev, Event::StreamStart);
|
||||
|
@ -203,8 +202,12 @@ impl<T: Iterator<Item=char>> Parser<T> {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
fn load_document<R: MarkedEventReceiver>(&mut self, first_ev: Event, mark: Marker, recv: &mut R)
|
||||
-> Result<(), ScanError> {
|
||||
fn load_document<R: MarkedEventReceiver>(
|
||||
&mut self,
|
||||
first_ev: Event,
|
||||
mark: Marker,
|
||||
recv: &mut R,
|
||||
) -> Result<(), ScanError> {
|
||||
assert_eq!(first_ev, Event::DocumentStart);
|
||||
recv.on_event(first_ev, mark);
|
||||
|
||||
|
@ -219,28 +222,33 @@ impl<T: Iterator<Item=char>> Parser<T> {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
fn load_node<R: MarkedEventReceiver>(&mut self, first_ev: Event, mark: Marker, recv: &mut R)
|
||||
-> Result<(), ScanError> {
|
||||
fn load_node<R: MarkedEventReceiver>(
|
||||
&mut self,
|
||||
first_ev: Event,
|
||||
mark: Marker,
|
||||
recv: &mut R,
|
||||
) -> Result<(), ScanError> {
|
||||
match first_ev {
|
||||
Event::Alias(..) | Event::Scalar(..) => {
|
||||
recv.on_event(first_ev, mark);
|
||||
Ok(())
|
||||
},
|
||||
}
|
||||
Event::SequenceStart(_) => {
|
||||
recv.on_event(first_ev, mark);
|
||||
self.load_sequence(recv)
|
||||
},
|
||||
}
|
||||
Event::MappingStart(_) => {
|
||||
recv.on_event(first_ev, mark);
|
||||
self.load_mapping(recv)
|
||||
},
|
||||
_ => { println!("UNREACHABLE EVENT: {:?}", first_ev);
|
||||
unreachable!(); }
|
||||
}
|
||||
_ => {
|
||||
println!("UNREACHABLE EVENT: {:?}", first_ev);
|
||||
unreachable!();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn load_mapping<R: MarkedEventReceiver>(&mut self, recv: &mut R)
|
||||
-> Result<(), ScanError> {
|
||||
fn load_mapping<R: MarkedEventReceiver>(&mut self, recv: &mut R) -> Result<(), ScanError> {
|
||||
let (mut key_ev, mut key_mark) = try!(self.next());
|
||||
while key_ev != Event::MappingEnd {
|
||||
// key
|
||||
|
@ -254,14 +262,12 @@ impl<T: Iterator<Item=char>> Parser<T> {
|
|||
let (ev, mark) = try!(self.next());
|
||||
key_ev = ev;
|
||||
key_mark = mark;
|
||||
|
||||
}
|
||||
recv.on_event(key_ev, key_mark);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn load_sequence<R: MarkedEventReceiver>(&mut self, recv: &mut R)
|
||||
-> Result<(), ScanError> {
|
||||
fn load_sequence<R: MarkedEventReceiver>(&mut self, recv: &mut R) -> Result<(), ScanError> {
|
||||
let (mut ev, mut mark) = try!(self.next());
|
||||
while ev != Event::SequenceEnd {
|
||||
try!(self.load_node(ev, mark, recv));
|
||||
|
@ -289,7 +295,6 @@ impl<T: Iterator<Item=char>> Parser<T> {
|
|||
State::BlockNode => self.parse_node(true, false),
|
||||
// State::BlockNodeOrIndentlessSequence => self.parse_node(true, true),
|
||||
// State::FlowNode => self.parse_node(false, false),
|
||||
|
||||
State::BlockMappingFirstKey => self.block_mapping_key(true),
|
||||
State::BlockMappingKey => self.block_mapping_key(false),
|
||||
State::BlockMappingValue => self.block_mapping_value(),
|
||||
|
@ -322,9 +327,8 @@ impl<T: Iterator<Item=char>> Parser<T> {
|
|||
self.state = State::ImplicitDocumentStart;
|
||||
self.skip();
|
||||
Ok((Event::StreamStart, mark))
|
||||
},
|
||||
Token(mark, _) => Err(ScanError::new(mark,
|
||||
"did not find expected <stream-start>")),
|
||||
}
|
||||
Token(mark, _) => Err(ScanError::new(mark, "did not find expected <stream-start>")),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -340,19 +344,19 @@ impl<T: Iterator<Item=char>> Parser<T> {
|
|||
self.state = State::End;
|
||||
self.skip();
|
||||
Ok((Event::StreamEnd, mark))
|
||||
},
|
||||
}
|
||||
Token(_, TokenType::VersionDirective(..))
|
||||
| Token(_, TokenType::TagDirective(..))
|
||||
| Token(_, TokenType::DocumentStart) => {
|
||||
// explicit document
|
||||
self._explict_document_start()
|
||||
},
|
||||
}
|
||||
Token(mark, _) if implicit => {
|
||||
try!(self.parser_process_directives());
|
||||
self.push_state(State::DocumentEnd);
|
||||
self.state = State::BlockNode;
|
||||
Ok((Event::DocumentStart, mark))
|
||||
},
|
||||
}
|
||||
_ => {
|
||||
// explicit document
|
||||
self._explict_document_start()
|
||||
|
@ -369,11 +373,11 @@ impl<T: Iterator<Item=char>> Parser<T> {
|
|||
// return Err(ScanError::new(tok.0,
|
||||
// "found incompatible YAML document"));
|
||||
//}
|
||||
},
|
||||
}
|
||||
TokenType::TagDirective(..) => {
|
||||
// TODO add tag directive
|
||||
},
|
||||
_ => break
|
||||
}
|
||||
_ => break,
|
||||
}
|
||||
self.skip();
|
||||
}
|
||||
|
@ -390,7 +394,10 @@ impl<T: Iterator<Item=char>> Parser<T> {
|
|||
self.skip();
|
||||
Ok((Event::DocumentStart, mark))
|
||||
}
|
||||
Token(mark, _) => Err(ScanError::new(mark, "did not find expected <document start>"))
|
||||
Token(mark, _) => Err(ScanError::new(
|
||||
mark,
|
||||
"did not find expected <document start>",
|
||||
)),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -404,10 +411,8 @@ impl<T: Iterator<Item=char>> Parser<T> {
|
|||
self.pop_state();
|
||||
// empty scalar
|
||||
Ok((Event::empty_scalar(), mark))
|
||||
},
|
||||
_ => {
|
||||
self.parse_node(true, false)
|
||||
}
|
||||
_ => self.parse_node(true, false),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -418,8 +423,8 @@ impl<T: Iterator<Item=char>> Parser<T> {
|
|||
self.skip();
|
||||
_implicit = false;
|
||||
mark
|
||||
},
|
||||
Token(mark, _) => mark
|
||||
}
|
||||
Token(mark, _) => mark,
|
||||
};
|
||||
|
||||
// TODO tag handling
|
||||
|
@ -447,13 +452,18 @@ impl<T: Iterator<Item=char>> Parser<T> {
|
|||
self.pop_state();
|
||||
if let Token(mark, TokenType::Alias(name)) = self.fetch_token() {
|
||||
match self.anchors.get(&name) {
|
||||
None => return Err(ScanError::new(mark, "while parsing node, found unknown anchor")),
|
||||
Some(id) => return Ok((Event::Alias(*id), mark))
|
||||
None => {
|
||||
return Err(ScanError::new(
|
||||
mark,
|
||||
"while parsing node, found unknown anchor",
|
||||
))
|
||||
}
|
||||
Some(id) => return Ok((Event::Alias(*id), mark)),
|
||||
}
|
||||
} else {
|
||||
unreachable!()
|
||||
}
|
||||
},
|
||||
}
|
||||
Token(_, TokenType::Anchor(_)) => {
|
||||
if let Token(mark, TokenType::Anchor(name)) = self.fetch_token() {
|
||||
anchor_id = try!(self.register_anchor(name, &mark));
|
||||
|
@ -467,7 +477,7 @@ impl<T: Iterator<Item=char>> Parser<T> {
|
|||
} else {
|
||||
unreachable!()
|
||||
}
|
||||
},
|
||||
}
|
||||
Token(_, TokenType::Tag(..)) => {
|
||||
if let tg @ TokenType::Tag(..) = self.fetch_token().1 {
|
||||
tag = Some(tg);
|
||||
|
@ -481,14 +491,14 @@ impl<T: Iterator<Item=char>> Parser<T> {
|
|||
} else {
|
||||
unreachable!()
|
||||
}
|
||||
},
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
match *try!(self.peek_token()) {
|
||||
Token(mark, TokenType::BlockEntry) if indentless_sequence => {
|
||||
self.state = State::IndentlessSequenceEntry;
|
||||
Ok((Event::SequenceStart(anchor_id), mark))
|
||||
},
|
||||
}
|
||||
Token(_, TokenType::Scalar(..)) => {
|
||||
self.pop_state();
|
||||
if let Token(mark, TokenType::Scalar(style, v)) = self.fetch_token() {
|
||||
|
@ -496,29 +506,32 @@ impl<T: Iterator<Item=char>> Parser<T> {
|
|||
} else {
|
||||
unreachable!()
|
||||
}
|
||||
},
|
||||
}
|
||||
Token(mark, TokenType::FlowSequenceStart) => {
|
||||
self.state = State::FlowSequenceFirstEntry;
|
||||
Ok((Event::SequenceStart(anchor_id), mark))
|
||||
},
|
||||
}
|
||||
Token(mark, TokenType::FlowMappingStart) => {
|
||||
self.state = State::FlowMappingFirstKey;
|
||||
Ok((Event::MappingStart(anchor_id), mark))
|
||||
},
|
||||
}
|
||||
Token(mark, TokenType::BlockSequenceStart) if block => {
|
||||
self.state = State::BlockSequenceFirstEntry;
|
||||
Ok((Event::SequenceStart(anchor_id), mark))
|
||||
},
|
||||
}
|
||||
Token(mark, TokenType::BlockMappingStart) if block => {
|
||||
self.state = State::BlockMappingFirstKey;
|
||||
Ok((Event::MappingStart(anchor_id), mark))
|
||||
},
|
||||
}
|
||||
// ex 7.2, an empty scalar can follow a secondary tag
|
||||
Token(mark, _) if tag.is_some() || anchor_id > 0 => {
|
||||
self.pop_state();
|
||||
Ok((Event::empty_scalar_with_anchor(anchor_id, tag), mark))
|
||||
},
|
||||
Token(mark, _) => { Err(ScanError::new(mark, "while parsing a node, did not find expected node content")) }
|
||||
}
|
||||
Token(mark, _) => Err(ScanError::new(
|
||||
mark,
|
||||
"while parsing a node, did not find expected node content",
|
||||
)),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -545,20 +558,21 @@ impl<T: Iterator<Item=char>> Parser<T> {
|
|||
self.parse_node(true, true)
|
||||
}
|
||||
}
|
||||
},
|
||||
}
|
||||
// XXX(chenyh): libyaml failed to parse spec 1.2, ex8.18
|
||||
Token(mark, TokenType::Value) => {
|
||||
self.state = State::BlockMappingValue;
|
||||
Ok((Event::empty_scalar(), mark))
|
||||
},
|
||||
}
|
||||
Token(mark, TokenType::BlockEnd) => {
|
||||
self.pop_state();
|
||||
self.skip();
|
||||
Ok((Event::MappingEnd, mark))
|
||||
},
|
||||
Token(mark, _) => {
|
||||
Err(ScanError::new(mark, "while parsing a block mapping, did not find expected key"))
|
||||
}
|
||||
Token(mark, _) => Err(ScanError::new(
|
||||
mark,
|
||||
"while parsing a block mapping, did not find expected key",
|
||||
)),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -573,13 +587,13 @@ impl<T: Iterator<Item=char>> Parser<T> {
|
|||
self.state = State::BlockMappingKey;
|
||||
// empty scalar
|
||||
Ok((Event::empty_scalar(), mark))
|
||||
},
|
||||
}
|
||||
_ => {
|
||||
self.push_state(State::BlockMappingKey);
|
||||
self.parse_node(true, true)
|
||||
}
|
||||
}
|
||||
},
|
||||
}
|
||||
Token(mark, _) => {
|
||||
self.state = State::BlockMappingKey;
|
||||
// empty scalar
|
||||
|
@ -593,49 +607,50 @@ impl<T: Iterator<Item=char>> Parser<T> {
|
|||
let _ = try!(self.peek_token());
|
||||
self.skip();
|
||||
}
|
||||
let marker: Marker = {
|
||||
match *try!(self.peek_token()) {
|
||||
Token(mark, TokenType::FlowMappingEnd) => mark,
|
||||
Token(mark, _) => {
|
||||
if !first {
|
||||
match *try!(self.peek_token()) {
|
||||
let marker: Marker =
|
||||
{
|
||||
match *try!(self.peek_token()) {
|
||||
Token(mark, TokenType::FlowMappingEnd) => mark,
|
||||
Token(mark, _) => {
|
||||
if !first {
|
||||
match *try!(self.peek_token()) {
|
||||
Token(_, TokenType::FlowEntry) => self.skip(),
|
||||
Token(mark, _) => return Err(ScanError::new(mark,
|
||||
"while parsing a flow mapping, did not find expected ',' or '}'"))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
match *try!(self.peek_token()) {
|
||||
Token(_, TokenType::Key) => {
|
||||
self.skip();
|
||||
match *try!(self.peek_token()) {
|
||||
Token(mark, TokenType::Value)
|
||||
| Token(mark, TokenType::FlowEntry)
|
||||
| Token(mark, TokenType::FlowMappingEnd) => {
|
||||
self.state = State::FlowMappingValue;
|
||||
return Ok((Event::empty_scalar(), mark));
|
||||
},
|
||||
_ => {
|
||||
self.push_state(State::FlowMappingValue);
|
||||
return self.parse_node(false, false);
|
||||
match *try!(self.peek_token()) {
|
||||
Token(_, TokenType::Key) => {
|
||||
self.skip();
|
||||
match *try!(self.peek_token()) {
|
||||
Token(mark, TokenType::Value)
|
||||
| Token(mark, TokenType::FlowEntry)
|
||||
| Token(mark, TokenType::FlowMappingEnd) => {
|
||||
self.state = State::FlowMappingValue;
|
||||
return Ok((Event::empty_scalar(), mark));
|
||||
}
|
||||
_ => {
|
||||
self.push_state(State::FlowMappingValue);
|
||||
return self.parse_node(false, false);
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
Token(marker, TokenType::Value) => {
|
||||
self.state = State::FlowMappingValue;
|
||||
return Ok((Event::empty_scalar(), marker));
|
||||
},
|
||||
Token(_, TokenType::FlowMappingEnd) => (),
|
||||
_ => {
|
||||
self.push_state(State::FlowMappingEmptyValue);
|
||||
return self.parse_node(false, false);
|
||||
Token(marker, TokenType::Value) => {
|
||||
self.state = State::FlowMappingValue;
|
||||
return Ok((Event::empty_scalar(), marker));
|
||||
}
|
||||
Token(_, TokenType::FlowMappingEnd) => (),
|
||||
_ => {
|
||||
self.push_state(State::FlowMappingEmptyValue);
|
||||
return self.parse_node(false, false);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
mark
|
||||
mark
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
self.pop_state();
|
||||
self.skip();
|
||||
|
@ -653,16 +668,15 @@ impl<T: Iterator<Item=char>> Parser<T> {
|
|||
Token(marker, TokenType::Value) => {
|
||||
self.skip();
|
||||
match try!(self.peek_token()).1 {
|
||||
TokenType::FlowEntry
|
||||
| TokenType::FlowMappingEnd => { },
|
||||
TokenType::FlowEntry | TokenType::FlowMappingEnd => {}
|
||||
_ => {
|
||||
self.push_state(State::FlowMappingKey);
|
||||
return self.parse_node(false, false);
|
||||
}
|
||||
}
|
||||
marker
|
||||
},
|
||||
Token(marker, _) => marker
|
||||
}
|
||||
Token(marker, _) => marker,
|
||||
}
|
||||
}
|
||||
};
|
||||
|
@ -683,13 +697,15 @@ impl<T: Iterator<Item=char>> Parser<T> {
|
|||
self.pop_state();
|
||||
self.skip();
|
||||
return Ok((Event::SequenceEnd, mark));
|
||||
},
|
||||
}
|
||||
Token(_, TokenType::FlowEntry) if !first => {
|
||||
self.skip();
|
||||
},
|
||||
}
|
||||
Token(mark, _) if !first => {
|
||||
return Err(ScanError::new(mark,
|
||||
"while parsing a flow sequence, expectd ',' or ']'"));
|
||||
return Err(ScanError::new(
|
||||
mark,
|
||||
"while parsing a flow sequence, expectd ',' or ']'",
|
||||
));
|
||||
}
|
||||
_ => { /* next */ }
|
||||
}
|
||||
|
@ -698,7 +714,7 @@ impl<T: Iterator<Item=char>> Parser<T> {
|
|||
self.pop_state();
|
||||
self.skip();
|
||||
Ok((Event::SequenceEnd, mark))
|
||||
},
|
||||
}
|
||||
Token(mark, TokenType::Key) => {
|
||||
self.state = State::FlowSequenceEntryMappingKey;
|
||||
self.skip();
|
||||
|
@ -727,7 +743,7 @@ impl<T: Iterator<Item=char>> Parser<T> {
|
|||
| Token(mark, TokenType::BlockEnd) => {
|
||||
self.state = State::IndentlessSequenceEntry;
|
||||
Ok((Event::empty_scalar(), mark))
|
||||
},
|
||||
}
|
||||
_ => {
|
||||
self.push_state(State::IndentlessSequenceEntry);
|
||||
self.parse_node(true, false)
|
||||
|
@ -747,25 +763,24 @@ impl<T: Iterator<Item=char>> Parser<T> {
|
|||
self.pop_state();
|
||||
self.skip();
|
||||
Ok((Event::SequenceEnd, mark))
|
||||
},
|
||||
}
|
||||
Token(_, TokenType::BlockEntry) => {
|
||||
self.skip();
|
||||
match *try!(self.peek_token()) {
|
||||
Token(mark, TokenType::BlockEntry)
|
||||
| Token(mark, TokenType::BlockEnd) => {
|
||||
Token(mark, TokenType::BlockEntry) | Token(mark, TokenType::BlockEnd) => {
|
||||
self.state = State::BlockSequenceEntry;
|
||||
Ok((Event::empty_scalar(), mark))
|
||||
},
|
||||
}
|
||||
_ => {
|
||||
self.push_state(State::BlockSequenceEntry);
|
||||
self.parse_node(true, false)
|
||||
}
|
||||
}
|
||||
},
|
||||
Token(mark, _) => {
|
||||
Err(ScanError::new(mark,
|
||||
"while parsing a block collection, did not find expected '-' indicator"))
|
||||
}
|
||||
Token(mark, _) => Err(ScanError::new(
|
||||
mark,
|
||||
"while parsing a block collection, did not find expected '-' indicator",
|
||||
)),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -777,7 +792,7 @@ impl<T: Iterator<Item=char>> Parser<T> {
|
|||
self.skip();
|
||||
self.state = State::FlowSequenceEntryMappingValue;
|
||||
Ok((Event::empty_scalar(), mark))
|
||||
},
|
||||
}
|
||||
_ => {
|
||||
self.push_state(State::FlowSequenceEntryMappingValue);
|
||||
self.parse_node(false, false)
|
||||
|
@ -788,20 +803,19 @@ impl<T: Iterator<Item=char>> Parser<T> {
|
|||
fn flow_sequence_entry_mapping_value(&mut self) -> ParseResult {
|
||||
match *try!(self.peek_token()) {
|
||||
Token(_, TokenType::Value) => {
|
||||
self.skip();
|
||||
self.state = State::FlowSequenceEntryMappingValue;
|
||||
match *try!(self.peek_token()) {
|
||||
Token(mark, TokenType::FlowEntry)
|
||||
| Token(mark, TokenType::FlowSequenceEnd) => {
|
||||
self.state = State::FlowSequenceEntryMappingEnd;
|
||||
Ok((Event::empty_scalar(), mark))
|
||||
},
|
||||
_ => {
|
||||
self.push_state(State::FlowSequenceEntryMappingEnd);
|
||||
self.parse_node(false, false)
|
||||
}
|
||||
self.skip();
|
||||
self.state = State::FlowSequenceEntryMappingValue;
|
||||
match *try!(self.peek_token()) {
|
||||
Token(mark, TokenType::FlowEntry) | Token(mark, TokenType::FlowSequenceEnd) => {
|
||||
self.state = State::FlowSequenceEntryMappingEnd;
|
||||
Ok((Event::empty_scalar(), mark))
|
||||
}
|
||||
},
|
||||
_ => {
|
||||
self.push_state(State::FlowSequenceEntryMappingEnd);
|
||||
self.parse_node(false, false)
|
||||
}
|
||||
}
|
||||
}
|
||||
Token(mark, _) => {
|
||||
self.state = State::FlowSequenceEntryMappingEnd;
|
||||
Ok((Event::empty_scalar(), mark))
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -1,13 +1,13 @@
|
|||
use linked_hash_map::LinkedHashMap;
|
||||
use parser::*;
|
||||
use scanner::{Marker, ScanError, TScalarStyle, TokenType};
|
||||
use std::collections::BTreeMap;
|
||||
use std::f64;
|
||||
use std::i64;
|
||||
use std::mem;
|
||||
use std::ops::Index;
|
||||
use std::string;
|
||||
use std::i64;
|
||||
use std::f64;
|
||||
use std::mem;
|
||||
use std::vec;
|
||||
use parser::*;
|
||||
use scanner::{TScalarStyle, ScanError, TokenType, Marker};
|
||||
use linked_hash_map::LinkedHashMap;
|
||||
|
||||
/// A YAML node is stored as this `Yaml` enumeration, which provides an easy way to
|
||||
/// access your YAML document.
|
||||
|
@ -62,7 +62,7 @@ fn parse_f64(v: &str) -> Option<f64> {
|
|||
".inf" | ".Inf" | ".INF" | "+.inf" | "+.Inf" | "+.INF" => Some(f64::INFINITY),
|
||||
"-.inf" | "-.Inf" | "-.INF" => Some(f64::NEG_INFINITY),
|
||||
".nan" | "NaN" | ".NAN" => Some(f64::NAN),
|
||||
_ => v.parse::<f64>().ok()
|
||||
_ => v.parse::<f64>().ok(),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -81,31 +81,31 @@ impl MarkedEventReceiver for YamlLoader {
|
|||
match ev {
|
||||
Event::DocumentStart => {
|
||||
// do nothing
|
||||
},
|
||||
}
|
||||
Event::DocumentEnd => {
|
||||
match self.doc_stack.len() {
|
||||
// empty document
|
||||
0 => self.docs.push(Yaml::BadValue),
|
||||
1 => self.docs.push(self.doc_stack.pop().unwrap().0),
|
||||
_ => unreachable!()
|
||||
_ => unreachable!(),
|
||||
}
|
||||
},
|
||||
}
|
||||
Event::SequenceStart(aid) => {
|
||||
self.doc_stack.push((Yaml::Array(Vec::new()), aid));
|
||||
},
|
||||
}
|
||||
Event::SequenceEnd => {
|
||||
let node = self.doc_stack.pop().unwrap();
|
||||
self.insert_new_node(node);
|
||||
},
|
||||
}
|
||||
Event::MappingStart(aid) => {
|
||||
self.doc_stack.push((Yaml::Hash(Hash::new()), aid));
|
||||
self.key_stack.push(Yaml::BadValue);
|
||||
},
|
||||
}
|
||||
Event::MappingEnd => {
|
||||
self.key_stack.pop().unwrap();
|
||||
let node = self.doc_stack.pop().unwrap();
|
||||
self.insert_new_node(node);
|
||||
},
|
||||
}
|
||||
Event::Scalar(v, style, aid, tag) => {
|
||||
let node = if style != TScalarStyle::Plain {
|
||||
Yaml::String(v)
|
||||
|
@ -117,28 +117,22 @@ impl MarkedEventReceiver for YamlLoader {
|
|||
// "true" or "false"
|
||||
match v.parse::<bool>() {
|
||||
Err(_) => Yaml::BadValue,
|
||||
Ok(v) => Yaml::Boolean(v)
|
||||
}
|
||||
},
|
||||
"int" => {
|
||||
match v.parse::<i64>() {
|
||||
Err(_) => Yaml::BadValue,
|
||||
Ok(v) => Yaml::Integer(v)
|
||||
}
|
||||
},
|
||||
"float" => {
|
||||
match parse_f64(&v) {
|
||||
Some(_) => Yaml::Real(v),
|
||||
None => Yaml::BadValue,
|
||||
}
|
||||
},
|
||||
"null" => {
|
||||
match v.as_ref() {
|
||||
"~" | "null" => Yaml::Null,
|
||||
_ => Yaml::BadValue,
|
||||
Ok(v) => Yaml::Boolean(v),
|
||||
}
|
||||
}
|
||||
_ => Yaml::String(v),
|
||||
"int" => match v.parse::<i64>() {
|
||||
Err(_) => Yaml::BadValue,
|
||||
Ok(v) => Yaml::Integer(v),
|
||||
},
|
||||
"float" => match parse_f64(&v) {
|
||||
Some(_) => Yaml::Real(v),
|
||||
None => Yaml::BadValue,
|
||||
},
|
||||
"null" => match v.as_ref() {
|
||||
"~" | "null" => Yaml::Null,
|
||||
_ => Yaml::BadValue,
|
||||
},
|
||||
_ => Yaml::String(v),
|
||||
}
|
||||
} else {
|
||||
Yaml::String(v)
|
||||
|
@ -149,7 +143,7 @@ impl MarkedEventReceiver for YamlLoader {
|
|||
};
|
||||
|
||||
self.insert_new_node((node, aid));
|
||||
},
|
||||
}
|
||||
Event::Alias(id) => {
|
||||
let n = match self.anchor_map.get(&id) {
|
||||
Some(v) => v.clone(),
|
||||
|
@ -186,13 +180,13 @@ impl YamlLoader {
|
|||
mem::swap(&mut newkey, cur_key);
|
||||
h.insert(newkey, node.0);
|
||||
}
|
||||
},
|
||||
}
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn load_from_str(source: &str) -> Result<Vec<Yaml>, ScanError>{
|
||||
pub fn load_from_str(source: &str) -> Result<Vec<Yaml>, ScanError> {
|
||||
let mut loader = YamlLoader {
|
||||
docs: Vec::new(),
|
||||
doc_stack: Vec::new(),
|
||||
|
@ -255,35 +249,35 @@ impl Yaml {
|
|||
pub fn is_null(&self) -> bool {
|
||||
match *self {
|
||||
Yaml::Null => true,
|
||||
_ => false
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_badvalue(&self) -> bool {
|
||||
match *self {
|
||||
Yaml::BadValue => true,
|
||||
_ => false
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_array(&self) -> bool {
|
||||
match *self {
|
||||
Yaml::Array(_) => true,
|
||||
_ => false
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn as_f64(&self) -> Option<f64> {
|
||||
match *self {
|
||||
Yaml::Real(ref v) => parse_f64(v),
|
||||
_ => None
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn into_f64(self) -> Option<f64> {
|
||||
match self {
|
||||
Yaml::Real(ref v) => parse_f64(v),
|
||||
_ => None
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -315,7 +309,7 @@ impl Yaml {
|
|||
_ if v.parse::<i64>().is_ok() => Yaml::Integer(v.parse::<i64>().unwrap()),
|
||||
// try parsing as f64
|
||||
_ if parse_f64(v).is_some() => Yaml::Real(v.to_owned()),
|
||||
_ => Yaml::String(v.to_owned())
|
||||
_ => Yaml::String(v.to_owned()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -328,7 +322,7 @@ impl<'a> Index<&'a str> for Yaml {
|
|||
let key = Yaml::String(idx.to_owned());
|
||||
match self.as_hash() {
|
||||
Some(h) => h.get(&key).unwrap_or(&BAD_VALUE),
|
||||
None => &BAD_VALUE
|
||||
None => &BAD_VALUE,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -354,8 +348,7 @@ impl IntoIterator for Yaml {
|
|||
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
YamlIter {
|
||||
yaml: self.into_vec()
|
||||
.unwrap_or_else(Vec::new).into_iter()
|
||||
yaml: self.into_vec().unwrap_or_else(Vec::new).into_iter(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -374,8 +367,8 @@ impl Iterator for YamlIter {
|
|||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use yaml::*;
|
||||
use std::f64;
|
||||
use yaml::*;
|
||||
#[test]
|
||||
fn test_coerce() {
|
||||
let s = "---
|
||||
|
@ -424,8 +417,7 @@ a7: 你好
|
|||
|
||||
#[test]
|
||||
fn test_multi_doc() {
|
||||
let s =
|
||||
"
|
||||
let s = "
|
||||
'a scalar'
|
||||
---
|
||||
'a scalar'
|
||||
|
@ -438,8 +430,7 @@ a7: 你好
|
|||
|
||||
#[test]
|
||||
fn test_anchor() {
|
||||
let s =
|
||||
"
|
||||
let s = "
|
||||
a1: &DEFAULT
|
||||
b1: 4
|
||||
b2: d
|
||||
|
@ -452,8 +443,7 @@ a2: *DEFAULT
|
|||
|
||||
#[test]
|
||||
fn test_bad_anchor() {
|
||||
let s =
|
||||
"
|
||||
let s = "
|
||||
a1: &DEFAULT
|
||||
b1: 4
|
||||
b2: *DEFAULT
|
||||
|
@ -461,7 +451,6 @@ a1: &DEFAULT
|
|||
let out = YamlLoader::load_from_str(&s).unwrap();
|
||||
let doc = &out[0];
|
||||
assert_eq!(doc["a1"]["b2"], Yaml::BadValue);
|
||||
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -475,8 +464,7 @@ a1: &DEFAULT
|
|||
|
||||
#[test]
|
||||
fn test_plain_datatype() {
|
||||
let s =
|
||||
"
|
||||
let s = "
|
||||
- 'string'
|
||||
- \"string\"
|
||||
- string
|
||||
|
@ -555,15 +543,23 @@ a1: &DEFAULT
|
|||
#[test]
|
||||
fn test_bad_docstart() {
|
||||
assert!(YamlLoader::load_from_str("---This used to cause an infinite loop").is_ok());
|
||||
assert_eq!(YamlLoader::load_from_str("----"), Ok(vec![Yaml::String(String::from("----"))]));
|
||||
assert_eq!(YamlLoader::load_from_str("--- #here goes a comment"), Ok(vec![Yaml::Null]));
|
||||
assert_eq!(YamlLoader::load_from_str("---- #here goes a comment"), Ok(vec![Yaml::String(String::from("----"))]));
|
||||
assert_eq!(
|
||||
YamlLoader::load_from_str("----"),
|
||||
Ok(vec![Yaml::String(String::from("----"))])
|
||||
);
|
||||
assert_eq!(
|
||||
YamlLoader::load_from_str("--- #here goes a comment"),
|
||||
Ok(vec![Yaml::Null])
|
||||
);
|
||||
assert_eq!(
|
||||
YamlLoader::load_from_str("---- #here goes a comment"),
|
||||
Ok(vec![Yaml::String(String::from("----"))])
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_plain_datatype_with_into_methods() {
|
||||
let s =
|
||||
"
|
||||
let s = "
|
||||
- 'string'
|
||||
- \"string\"
|
||||
- string
|
||||
|
@ -620,9 +616,18 @@ c: ~
|
|||
let out = YamlLoader::load_from_str(&s).unwrap();
|
||||
let first = out.into_iter().next().unwrap();
|
||||
let mut iter = first.into_hash().unwrap().into_iter();
|
||||
assert_eq!(Some((Yaml::String("b".to_owned()), Yaml::Null)), iter.next());
|
||||
assert_eq!(Some((Yaml::String("a".to_owned()), Yaml::Null)), iter.next());
|
||||
assert_eq!(Some((Yaml::String("c".to_owned()), Yaml::Null)), iter.next());
|
||||
assert_eq!(
|
||||
Some((Yaml::String("b".to_owned()), Yaml::Null)),
|
||||
iter.next()
|
||||
);
|
||||
assert_eq!(
|
||||
Some((Yaml::String("a".to_owned()), Yaml::Null)),
|
||||
iter.next()
|
||||
);
|
||||
assert_eq!(
|
||||
Some((Yaml::String("c".to_owned()), Yaml::Null)),
|
||||
iter.next()
|
||||
);
|
||||
assert_eq!(None, iter.next());
|
||||
}
|
||||
|
||||
|
@ -641,30 +646,49 @@ c: ~
|
|||
|
||||
#[test]
|
||||
fn test_indentation_equality() {
|
||||
|
||||
let four_spaces = YamlLoader::load_from_str(r#"
|
||||
let four_spaces = YamlLoader::load_from_str(
|
||||
r#"
|
||||
hash:
|
||||
with:
|
||||
indentations
|
||||
"#).unwrap().into_iter().next().unwrap();
|
||||
"#,
|
||||
).unwrap()
|
||||
.into_iter()
|
||||
.next()
|
||||
.unwrap();
|
||||
|
||||
let two_spaces = YamlLoader::load_from_str(r#"
|
||||
let two_spaces = YamlLoader::load_from_str(
|
||||
r#"
|
||||
hash:
|
||||
with:
|
||||
indentations
|
||||
"#).unwrap().into_iter().next().unwrap();
|
||||
"#,
|
||||
).unwrap()
|
||||
.into_iter()
|
||||
.next()
|
||||
.unwrap();
|
||||
|
||||
let one_space = YamlLoader::load_from_str(r#"
|
||||
let one_space = YamlLoader::load_from_str(
|
||||
r#"
|
||||
hash:
|
||||
with:
|
||||
indentations
|
||||
"#).unwrap().into_iter().next().unwrap();
|
||||
"#,
|
||||
).unwrap()
|
||||
.into_iter()
|
||||
.next()
|
||||
.unwrap();
|
||||
|
||||
let mixed_spaces = YamlLoader::load_from_str(r#"
|
||||
let mixed_spaces = YamlLoader::load_from_str(
|
||||
r#"
|
||||
hash:
|
||||
with:
|
||||
indentations
|
||||
"#).unwrap().into_iter().next().unwrap();
|
||||
"#,
|
||||
).unwrap()
|
||||
.into_iter()
|
||||
.next()
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(four_spaces, two_spaces);
|
||||
assert_eq!(two_spaces, one_space);
|
||||
|
@ -691,7 +715,7 @@ subcommands3:
|
|||
let doc = &out.into_iter().next().unwrap();
|
||||
|
||||
println!("{:#?}", doc);
|
||||
assert_eq!(doc["subcommands"][0]["server"], Yaml::Null);
|
||||
assert_eq!(doc["subcommands"][0]["server"], Yaml::Null);
|
||||
assert!(doc["subcommands2"][0]["server"].as_hash().is_some());
|
||||
assert!(doc["subcommands3"][0]["server"].as_hash().is_some());
|
||||
}
|
||||
|
|
|
@ -3,8 +3,8 @@ extern crate yaml_rust;
|
|||
extern crate quickcheck;
|
||||
|
||||
use quickcheck::TestResult;
|
||||
use yaml_rust::{Yaml, YamlLoader, YamlEmitter};
|
||||
use std::error::Error;
|
||||
use yaml_rust::{Yaml, YamlEmitter, YamlLoader};
|
||||
|
||||
quickcheck! {
|
||||
fn test_check_weird_keys(xs: Vec<String>) -> TestResult {
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
#![allow(non_upper_case_globals)]
|
||||
extern crate yaml_rust;
|
||||
|
||||
use yaml_rust::parser::{Parser, EventReceiver, Event};
|
||||
use yaml_rust::parser::{Event, EventReceiver, Parser};
|
||||
use yaml_rust::scanner::TScalarStyle;
|
||||
|
||||
// These names match the names used in the C++ test suite.
|
||||
|
@ -21,7 +21,7 @@ enum TestEvent {
|
|||
}
|
||||
|
||||
struct YamlChecker {
|
||||
pub evs: Vec<TestEvent>
|
||||
pub evs: Vec<TestEvent>,
|
||||
}
|
||||
|
||||
impl EventReceiver for YamlChecker {
|
||||
|
@ -33,36 +33,36 @@ impl EventReceiver for YamlChecker {
|
|||
Event::SequenceEnd => TestEvent::OnSequenceEnd,
|
||||
Event::MappingStart(..) => TestEvent::OnMapStart,
|
||||
Event::MappingEnd => TestEvent::OnMapEnd,
|
||||
Event::Scalar(ref v, style, _, _)=> {
|
||||
Event::Scalar(ref v, style, _, _) => {
|
||||
if v == "~" && style == TScalarStyle::Plain {
|
||||
TestEvent::OnNull
|
||||
} else {
|
||||
TestEvent::OnScalar
|
||||
}
|
||||
},
|
||||
}
|
||||
Event::Alias(_) => TestEvent::OnAlias,
|
||||
_ => { return } // ignore other events
|
||||
_ => return, // ignore other events
|
||||
};
|
||||
self.evs.push(tev);
|
||||
}
|
||||
}
|
||||
|
||||
fn str_to_test_events(docs: &str) -> Vec<TestEvent> {
|
||||
let mut p = YamlChecker {
|
||||
evs: Vec::new()
|
||||
};
|
||||
let mut p = YamlChecker { evs: Vec::new() };
|
||||
let mut parser = Parser::new(docs.chars());
|
||||
parser.load(&mut p, true).unwrap();
|
||||
p.evs
|
||||
}
|
||||
|
||||
macro_rules! assert_next {
|
||||
($v:expr, $p:pat) => (
|
||||
($v:expr, $p:pat) => {
|
||||
match $v.next().unwrap() {
|
||||
$p => {},
|
||||
e => { panic!("unexpected event: {:?}", e); }
|
||||
$p => {}
|
||||
e => {
|
||||
panic!("unexpected event: {:?}", e);
|
||||
}
|
||||
}
|
||||
)
|
||||
};
|
||||
}
|
||||
|
||||
// auto generated from handler_spec_test.cpp
|
||||
|
@ -76,66 +76,65 @@ include!("spec_test.rs.inc");
|
|||
|
||||
#[test]
|
||||
fn test_mapvec_legal() {
|
||||
use yaml_rust::yaml::{Array, Hash, Yaml};
|
||||
use yaml_rust::{YamlLoader, YamlEmitter};
|
||||
use yaml_rust::yaml::{Array, Hash, Yaml};
|
||||
use yaml_rust::{YamlEmitter, YamlLoader};
|
||||
|
||||
// Emitting a `map<map<seq<_>>, _>` should result in legal yaml that
|
||||
// we can parse.
|
||||
// Emitting a `map<map<seq<_>>, _>` should result in legal yaml that
|
||||
// we can parse.
|
||||
|
||||
let mut key = Array::new();
|
||||
key.push(Yaml::Integer(1));
|
||||
key.push(Yaml::Integer(2));
|
||||
key.push(Yaml::Integer(3));
|
||||
let mut key = Array::new();
|
||||
key.push(Yaml::Integer(1));
|
||||
key.push(Yaml::Integer(2));
|
||||
key.push(Yaml::Integer(3));
|
||||
|
||||
let mut keyhash = Hash::new();
|
||||
keyhash.insert(Yaml::String("key".into()), Yaml::Array(key));
|
||||
let mut keyhash = Hash::new();
|
||||
keyhash.insert(Yaml::String("key".into()), Yaml::Array(key));
|
||||
|
||||
let mut val = Array::new();
|
||||
val.push(Yaml::Integer(4));
|
||||
val.push(Yaml::Integer(5));
|
||||
val.push(Yaml::Integer(6));
|
||||
let mut val = Array::new();
|
||||
val.push(Yaml::Integer(4));
|
||||
val.push(Yaml::Integer(5));
|
||||
val.push(Yaml::Integer(6));
|
||||
|
||||
let mut hash = Hash::new();
|
||||
hash.insert(Yaml::Hash(keyhash), Yaml::Array(val));
|
||||
let mut hash = Hash::new();
|
||||
hash.insert(Yaml::Hash(keyhash), Yaml::Array(val));
|
||||
|
||||
let mut out_str = String::new();
|
||||
{
|
||||
let mut emitter = YamlEmitter::new(&mut out_str);
|
||||
emitter.dump(&Yaml::Hash(hash)).unwrap();
|
||||
}
|
||||
let mut out_str = String::new();
|
||||
{
|
||||
let mut emitter = YamlEmitter::new(&mut out_str);
|
||||
emitter.dump(&Yaml::Hash(hash)).unwrap();
|
||||
}
|
||||
|
||||
// At this point, we are tempted to naively render like this:
|
||||
//
|
||||
// ```yaml
|
||||
// ---
|
||||
// {key:
|
||||
// - 1
|
||||
// - 2
|
||||
// - 3}:
|
||||
// - 4
|
||||
// - 5
|
||||
// - 6
|
||||
// ```
|
||||
//
|
||||
// However, this doesn't work, because the key sequence [1, 2, 3] is
|
||||
// rendered in block mode, which is not legal (as far as I can tell)
|
||||
// inside the flow mode of the key. We need to either fully render
|
||||
// everything that's in a key in flow mode (which may make for some
|
||||
// long lines), or use the explicit map identifier '?':
|
||||
//
|
||||
// ```yaml
|
||||
// ---
|
||||
// ?
|
||||
// key:
|
||||
// - 1
|
||||
// - 2
|
||||
// - 3
|
||||
// :
|
||||
// - 4
|
||||
// - 5
|
||||
// - 6
|
||||
// ```
|
||||
// At this point, we are tempted to naively render like this:
|
||||
//
|
||||
// ```yaml
|
||||
// ---
|
||||
// {key:
|
||||
// - 1
|
||||
// - 2
|
||||
// - 3}:
|
||||
// - 4
|
||||
// - 5
|
||||
// - 6
|
||||
// ```
|
||||
//
|
||||
// However, this doesn't work, because the key sequence [1, 2, 3] is
|
||||
// rendered in block mode, which is not legal (as far as I can tell)
|
||||
// inside the flow mode of the key. We need to either fully render
|
||||
// everything that's in a key in flow mode (which may make for some
|
||||
// long lines), or use the explicit map identifier '?':
|
||||
//
|
||||
// ```yaml
|
||||
// ---
|
||||
// ?
|
||||
// key:
|
||||
// - 1
|
||||
// - 2
|
||||
// - 3
|
||||
// :
|
||||
// - 4
|
||||
// - 5
|
||||
// - 6
|
||||
// ```
|
||||
|
||||
YamlLoader::load_from_str(&out_str).unwrap();
|
||||
YamlLoader::load_from_str(&out_str).unwrap();
|
||||
}
|
||||
|
||||
|
|
Loading…
Reference in a new issue