Format with rustfmt 0.99.4
This commit is contained in:
parent
124d237be0
commit
5039af6862
8 changed files with 733 additions and 584 deletions
|
@ -17,14 +17,14 @@ fn dump_node(doc: &yaml::Yaml, indent: usize) {
|
||||||
for x in v {
|
for x in v {
|
||||||
dump_node(x, indent + 1);
|
dump_node(x, indent + 1);
|
||||||
}
|
}
|
||||||
},
|
}
|
||||||
yaml::Yaml::Hash(ref h) => {
|
yaml::Yaml::Hash(ref h) => {
|
||||||
for (k, v) in h {
|
for (k, v) in h {
|
||||||
print_indent(indent);
|
print_indent(indent);
|
||||||
println!("{:?}:", k);
|
println!("{:?}:", k);
|
||||||
dump_node(v, indent + 1);
|
dump_node(v, indent + 1);
|
||||||
}
|
}
|
||||||
},
|
}
|
||||||
_ => {
|
_ => {
|
||||||
print_indent(indent);
|
print_indent(indent);
|
||||||
println!("{:?}", doc);
|
println!("{:?}", doc);
|
||||||
|
|
|
@ -1,13 +1,12 @@
|
||||||
use std::fmt::{self, Display};
|
|
||||||
use std::convert::From;
|
use std::convert::From;
|
||||||
use std::error::Error;
|
use std::error::Error;
|
||||||
|
use std::fmt::{self, Display};
|
||||||
use yaml::{Hash, Yaml};
|
use yaml::{Hash, Yaml};
|
||||||
|
|
||||||
|
|
||||||
#[derive(Copy, Clone, Debug)]
|
#[derive(Copy, Clone, Debug)]
|
||||||
pub enum EmitError {
|
pub enum EmitError {
|
||||||
FmtError(fmt::Error),
|
FmtError(fmt::Error),
|
||||||
BadHashmapKey,
|
BadHashmapKey,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Error for EmitError {
|
impl Error for EmitError {
|
||||||
|
@ -91,7 +90,7 @@ fn escape_str(wr: &mut fmt::Write, v: &str) -> Result<(), fmt::Error> {
|
||||||
b'\x1e' => "\\u001e",
|
b'\x1e' => "\\u001e",
|
||||||
b'\x1f' => "\\u001f",
|
b'\x1f' => "\\u001f",
|
||||||
b'\x7f' => "\\u007f",
|
b'\x7f' => "\\u007f",
|
||||||
_ => { continue; }
|
_ => continue,
|
||||||
};
|
};
|
||||||
|
|
||||||
if start < i {
|
if start < i {
|
||||||
|
@ -118,7 +117,7 @@ impl<'a> YamlEmitter<'a> {
|
||||||
best_indent: 2,
|
best_indent: 2,
|
||||||
compact: true,
|
compact: true,
|
||||||
|
|
||||||
level: -1
|
level: -1,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -131,12 +130,12 @@ impl<'a> YamlEmitter<'a> {
|
||||||
/// or tags), which should be OK, because this emitter doesn't
|
/// or tags), which should be OK, because this emitter doesn't
|
||||||
/// (currently) emit those anyways.
|
/// (currently) emit those anyways.
|
||||||
pub fn compact(&mut self, compact: bool) {
|
pub fn compact(&mut self, compact: bool) {
|
||||||
self.compact = compact;
|
self.compact = compact;
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Determine if this emitter is using 'compact inline notation'.
|
/// Determine if this emitter is using 'compact inline notation'.
|
||||||
pub fn is_compact(&self) -> bool {
|
pub fn is_compact(&self) -> bool {
|
||||||
self.compact
|
self.compact
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn dump(&mut self, doc: &Yaml) -> EmitResult {
|
pub fn dump(&mut self, doc: &Yaml) -> EmitResult {
|
||||||
|
@ -147,7 +146,9 @@ impl<'a> YamlEmitter<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn write_indent(&mut self) -> EmitResult {
|
fn write_indent(&mut self) -> EmitResult {
|
||||||
if self.level <= 0 { return Ok(()); }
|
if self.level <= 0 {
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
for _ in 0..self.level {
|
for _ in 0..self.level {
|
||||||
for _ in 0..self.best_indent {
|
for _ in 0..self.best_indent {
|
||||||
try!(write!(self.writer, " "));
|
try!(write!(self.writer, " "));
|
||||||
|
@ -163,12 +164,11 @@ impl<'a> YamlEmitter<'a> {
|
||||||
Yaml::String(ref v) => {
|
Yaml::String(ref v) => {
|
||||||
if need_quotes(v) {
|
if need_quotes(v) {
|
||||||
try!(escape_str(self.writer, v));
|
try!(escape_str(self.writer, v));
|
||||||
}
|
} else {
|
||||||
else {
|
|
||||||
try!(write!(self.writer, "{}", v));
|
try!(write!(self.writer, "{}", v));
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
},
|
}
|
||||||
Yaml::Boolean(v) => {
|
Yaml::Boolean(v) => {
|
||||||
if v {
|
if v {
|
||||||
try!(self.writer.write_str("true"));
|
try!(self.writer.write_str("true"));
|
||||||
|
@ -176,21 +176,21 @@ impl<'a> YamlEmitter<'a> {
|
||||||
try!(self.writer.write_str("false"));
|
try!(self.writer.write_str("false"));
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
},
|
}
|
||||||
Yaml::Integer(v) => {
|
Yaml::Integer(v) => {
|
||||||
try!(write!(self.writer, "{}", v));
|
try!(write!(self.writer, "{}", v));
|
||||||
Ok(())
|
Ok(())
|
||||||
},
|
}
|
||||||
Yaml::Real(ref v) => {
|
Yaml::Real(ref v) => {
|
||||||
try!(write!(self.writer, "{}", v));
|
try!(write!(self.writer, "{}", v));
|
||||||
Ok(())
|
Ok(())
|
||||||
},
|
}
|
||||||
Yaml::Null | Yaml::BadValue => {
|
Yaml::Null | Yaml::BadValue => {
|
||||||
try!(write!(self.writer, "~"));
|
try!(write!(self.writer, "~"));
|
||||||
Ok(())
|
Ok(())
|
||||||
},
|
}
|
||||||
// XXX(chenyh) Alias
|
// XXX(chenyh) Alias
|
||||||
_ => { Ok(()) }
|
_ => Ok(()),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -219,24 +219,24 @@ impl<'a> YamlEmitter<'a> {
|
||||||
self.level += 1;
|
self.level += 1;
|
||||||
for (cnt, (k, v)) in h.iter().enumerate() {
|
for (cnt, (k, v)) in h.iter().enumerate() {
|
||||||
let complex_key = match *k {
|
let complex_key = match *k {
|
||||||
Yaml::Hash(_) | Yaml::Array(_) => true,
|
Yaml::Hash(_) | Yaml::Array(_) => true,
|
||||||
_ => false,
|
_ => false,
|
||||||
};
|
};
|
||||||
if cnt > 0 {
|
if cnt > 0 {
|
||||||
try!(write!(self.writer, "\n"));
|
try!(write!(self.writer, "\n"));
|
||||||
try!(self.write_indent());
|
try!(self.write_indent());
|
||||||
}
|
}
|
||||||
if complex_key {
|
if complex_key {
|
||||||
try!(write!(self.writer, "?"));
|
try!(write!(self.writer, "?"));
|
||||||
try!(self.emit_val(true, k));
|
try!(self.emit_val(true, k));
|
||||||
try!(write!(self.writer, "\n"));
|
try!(write!(self.writer, "\n"));
|
||||||
try!(self.write_indent());
|
try!(self.write_indent());
|
||||||
try!(write!(self.writer, ":"));
|
try!(write!(self.writer, ":"));
|
||||||
try!(self.emit_val(true, v));
|
try!(self.emit_val(true, v));
|
||||||
} else {
|
} else {
|
||||||
try!(self.emit_node(k));
|
try!(self.emit_node(k));
|
||||||
try!(write!(self.writer, ":"));
|
try!(write!(self.writer, ":"));
|
||||||
try!(self.emit_val(false, v));
|
try!(self.emit_val(false, v));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
self.level -= 1;
|
self.level -= 1;
|
||||||
|
@ -260,7 +260,7 @@ impl<'a> YamlEmitter<'a> {
|
||||||
self.level -= 1;
|
self.level -= 1;
|
||||||
}
|
}
|
||||||
self.emit_array(v)
|
self.emit_array(v)
|
||||||
},
|
}
|
||||||
Yaml::Hash(ref h) => {
|
Yaml::Hash(ref h) => {
|
||||||
if (inline && self.compact) || h.is_empty() {
|
if (inline && self.compact) || h.is_empty() {
|
||||||
try!(write!(self.writer, " "));
|
try!(write!(self.writer, " "));
|
||||||
|
@ -271,7 +271,7 @@ impl<'a> YamlEmitter<'a> {
|
||||||
self.level -= 1;
|
self.level -= 1;
|
||||||
}
|
}
|
||||||
self.emit_hash(h)
|
self.emit_hash(h)
|
||||||
},
|
}
|
||||||
_ => {
|
_ => {
|
||||||
try!(write!(self.writer, " "));
|
try!(write!(self.writer, " "));
|
||||||
self.emit_node(val)
|
self.emit_node(val)
|
||||||
|
@ -296,37 +296,48 @@ impl<'a> YamlEmitter<'a> {
|
||||||
/// * When the string looks like a date (e.g. 2014-12-31) (otherwise it would be automatically converted into a Unix timestamp).
|
/// * When the string looks like a date (e.g. 2014-12-31) (otherwise it would be automatically converted into a Unix timestamp).
|
||||||
fn need_quotes(string: &str) -> bool {
|
fn need_quotes(string: &str) -> bool {
|
||||||
fn need_quotes_spaces(string: &str) -> bool {
|
fn need_quotes_spaces(string: &str) -> bool {
|
||||||
string.starts_with(' ')
|
string.starts_with(' ') || string.ends_with(' ')
|
||||||
|| string.ends_with(' ')
|
|
||||||
}
|
}
|
||||||
|
|
||||||
string == ""
|
string == ""
|
||||||
|| need_quotes_spaces(string)
|
|| need_quotes_spaces(string)
|
||||||
|| string.starts_with(|character: char| {
|
|| string.starts_with(|character: char| match character {
|
||||||
match character {
|
|
||||||
':' | '&' | '*' | '?' | '|' | '-' | '<' | '>' | '=' | '!' | '%' | '@' => true,
|
':' | '&' | '*' | '?' | '|' | '-' | '<' | '>' | '=' | '!' | '%' | '@' => true,
|
||||||
_ => false,
|
_ => false,
|
||||||
}
|
})
|
||||||
})
|
|| string.contains(|character: char| match character {
|
||||||
|| string.contains(|character: char| {
|
'{'
|
||||||
match character {
|
| '}'
|
||||||
'{' | '}' | '[' | ']' | ',' | '#' | '`' | '\"' | '\'' | '\\' | '\0' ... '\x06' | '\t' | '\n' | '\r' | '\x0e' ... '\x1a' | '\x1c' ... '\x1f' => true,
|
| '['
|
||||||
|
| ']'
|
||||||
|
| ','
|
||||||
|
| '#'
|
||||||
|
| '`'
|
||||||
|
| '\"'
|
||||||
|
| '\''
|
||||||
|
| '\\'
|
||||||
|
| '\0'...'\x06'
|
||||||
|
| '\t'
|
||||||
|
| '\n'
|
||||||
|
| '\r'
|
||||||
|
| '\x0e'...'\x1a'
|
||||||
|
| '\x1c'...'\x1f' => true,
|
||||||
_ => false,
|
_ => false,
|
||||||
}
|
})
|
||||||
})
|
|| [
|
||||||
|| [// http://yaml.org/type/bool.html
|
// http://yaml.org/type/bool.html
|
||||||
// Note: 'y', 'Y', 'n', 'N', is not quoted deliberately, as in libyaml. PyYAML also parse
|
// Note: 'y', 'Y', 'n', 'N', is not quoted deliberately, as in libyaml. PyYAML also parse
|
||||||
// them as string, not booleans, although it is volating the YAML 1.1 specification.
|
// them as string, not booleans, although it is volating the YAML 1.1 specification.
|
||||||
// See https://github.com/dtolnay/serde-yaml/pull/83#discussion_r152628088.
|
// See https://github.com/dtolnay/serde-yaml/pull/83#discussion_r152628088.
|
||||||
"yes","Yes","YES","no","No","NO",
|
"yes", "Yes", "YES", "no", "No", "NO", "True", "TRUE", "true", "False", "FALSE",
|
||||||
"True", "TRUE", "true", "False", "FALSE", "false",
|
"false", "on", "On", "ON", "off", "Off", "OFF",
|
||||||
"on","On","ON","off","Off","OFF",
|
// http://yaml.org/type/null.html
|
||||||
// http://yaml.org/type/null.html
|
"null", "Null", "NULL", "~",
|
||||||
"null","Null","NULL", "~"
|
]
|
||||||
].contains(&string)
|
.contains(&string)
|
||||||
|| string.starts_with('.')
|
|| string.starts_with('.')
|
||||||
|| string.parse::<i64>().is_ok()
|
|| string.parse::<i64>().is_ok()
|
||||||
|| string.parse::<f64>().is_ok()
|
|| string.parse::<f64>().is_ok()
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
|
@ -349,7 +360,6 @@ a4:
|
||||||
- 2
|
- 2
|
||||||
";
|
";
|
||||||
|
|
||||||
|
|
||||||
let docs = YamlLoader::load_from_str(&s).unwrap();
|
let docs = YamlLoader::load_from_str(&s).unwrap();
|
||||||
let doc = &docs[0];
|
let doc = &docs[0];
|
||||||
let mut writer = String::new();
|
let mut writer = String::new();
|
||||||
|
@ -361,7 +371,7 @@ a4:
|
||||||
println!("emitted:\n{}", writer);
|
println!("emitted:\n{}", writer);
|
||||||
let docs_new = match YamlLoader::load_from_str(&writer) {
|
let docs_new = match YamlLoader::load_from_str(&writer) {
|
||||||
Ok(y) => y,
|
Ok(y) => y,
|
||||||
Err(e) => panic!(format!("{}", e))
|
Err(e) => panic!(format!("{}", e)),
|
||||||
};
|
};
|
||||||
let doc_new = &docs_new[0];
|
let doc_new = &docs_new[0];
|
||||||
|
|
||||||
|
@ -398,7 +408,7 @@ products:
|
||||||
}
|
}
|
||||||
let docs_new = match YamlLoader::load_from_str(&writer) {
|
let docs_new = match YamlLoader::load_from_str(&writer) {
|
||||||
Ok(y) => y,
|
Ok(y) => y,
|
||||||
Err(e) => panic!(format!("{}", e))
|
Err(e) => panic!(format!("{}", e)),
|
||||||
};
|
};
|
||||||
let doc_new = &docs_new[0];
|
let doc_new = &docs_new[0];
|
||||||
assert_eq!(doc, doc_new);
|
assert_eq!(doc, doc_new);
|
||||||
|
@ -506,21 +516,26 @@ bool1: false"#;
|
||||||
emitter.dump(doc).unwrap();
|
emitter.dump(doc).unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
assert_eq!(expected, writer, "expected:\n{}\nactual:\n{}\n", expected, writer);
|
assert_eq!(
|
||||||
|
expected, writer,
|
||||||
|
"expected:\n{}\nactual:\n{}\n",
|
||||||
|
expected, writer
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_empty_and_nested() {
|
fn test_empty_and_nested() {
|
||||||
test_empty_and_nested_flag(false)
|
test_empty_and_nested_flag(false)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_empty_and_nested_compact() {
|
fn test_empty_and_nested_compact() {
|
||||||
test_empty_and_nested_flag(true)
|
test_empty_and_nested_flag(true)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn test_empty_and_nested_flag(compact: bool) {
|
fn test_empty_and_nested_flag(compact: bool) {
|
||||||
let s = if compact { r#"---
|
let s = if compact {
|
||||||
|
r#"---
|
||||||
a:
|
a:
|
||||||
b:
|
b:
|
||||||
c: hello
|
c: hello
|
||||||
|
@ -528,7 +543,9 @@ a:
|
||||||
e:
|
e:
|
||||||
- f
|
- f
|
||||||
- g
|
- g
|
||||||
- h: []"# } else { r#"---
|
- h: []"#
|
||||||
|
} else {
|
||||||
|
r#"---
|
||||||
a:
|
a:
|
||||||
b:
|
b:
|
||||||
c: hello
|
c: hello
|
||||||
|
@ -537,7 +554,8 @@ e:
|
||||||
- f
|
- f
|
||||||
- g
|
- g
|
||||||
-
|
-
|
||||||
h: []"# };
|
h: []"#
|
||||||
|
};
|
||||||
|
|
||||||
let docs = YamlLoader::load_from_str(&s).unwrap();
|
let docs = YamlLoader::load_from_str(&s).unwrap();
|
||||||
let doc = &docs[0];
|
let doc = &docs[0];
|
||||||
|
|
|
@ -41,16 +41,16 @@
|
||||||
|
|
||||||
extern crate linked_hash_map;
|
extern crate linked_hash_map;
|
||||||
|
|
||||||
pub mod yaml;
|
|
||||||
pub mod scanner;
|
|
||||||
pub mod parser;
|
|
||||||
pub mod emitter;
|
pub mod emitter;
|
||||||
|
pub mod parser;
|
||||||
|
pub mod scanner;
|
||||||
|
pub mod yaml;
|
||||||
|
|
||||||
// reexport key APIs
|
// reexport key APIs
|
||||||
pub use scanner::ScanError;
|
pub use emitter::{EmitError, YamlEmitter};
|
||||||
pub use parser::Event;
|
pub use parser::Event;
|
||||||
|
pub use scanner::ScanError;
|
||||||
pub use yaml::{Yaml, YamlLoader};
|
pub use yaml::{Yaml, YamlLoader};
|
||||||
pub use emitter::{YamlEmitter, EmitError};
|
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
|
@ -58,8 +58,7 @@ mod tests {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_api() {
|
fn test_api() {
|
||||||
let s =
|
let s = "
|
||||||
"
|
|
||||||
# from yaml-cpp example
|
# from yaml-cpp example
|
||||||
- name: Ogre
|
- name: Ogre
|
||||||
position: [0, 5, 0]
|
position: [0, 5, 0]
|
||||||
|
@ -104,8 +103,7 @@ mod tests {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_fail() {
|
fn test_fail() {
|
||||||
let s =
|
let s = "
|
||||||
"
|
|
||||||
# syntax error
|
# syntax error
|
||||||
scalar
|
scalar
|
||||||
key: [1, 2]]
|
key: [1, 2]]
|
||||||
|
|
|
@ -26,7 +26,7 @@ enum State {
|
||||||
FlowMappingKey,
|
FlowMappingKey,
|
||||||
FlowMappingValue,
|
FlowMappingValue,
|
||||||
FlowMappingEmptyValue,
|
FlowMappingEmptyValue,
|
||||||
End
|
End,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// `Event` is used with the low-level event base parsing API,
|
/// `Event` is used with the low-level event base parsing API,
|
||||||
|
@ -48,7 +48,7 @@ pub enum Event {
|
||||||
SequenceEnd,
|
SequenceEnd,
|
||||||
/// Anchor ID
|
/// Anchor ID
|
||||||
MappingStart(usize),
|
MappingStart(usize),
|
||||||
MappingEnd
|
MappingEnd,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Event {
|
impl Event {
|
||||||
|
@ -74,12 +74,10 @@ pub struct Parser<T> {
|
||||||
anchor_id: usize,
|
anchor_id: usize,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
pub trait EventReceiver {
|
pub trait EventReceiver {
|
||||||
fn on_event(&mut self, ev: Event);
|
fn on_event(&mut self, ev: Event);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
pub trait MarkedEventReceiver {
|
pub trait MarkedEventReceiver {
|
||||||
fn on_event(&mut self, ev: Event, _mark: Marker);
|
fn on_event(&mut self, ev: Event, _mark: Marker);
|
||||||
}
|
}
|
||||||
|
@ -92,7 +90,7 @@ impl<R: EventReceiver> MarkedEventReceiver for R {
|
||||||
|
|
||||||
pub type ParseResult = Result<(Event, Marker), ScanError>;
|
pub type ParseResult = Result<(Event, Marker), ScanError>;
|
||||||
|
|
||||||
impl<T: Iterator<Item=char>> Parser<T> {
|
impl<T: Iterator<Item = char>> Parser<T> {
|
||||||
pub fn new(src: T) -> Parser<T> {
|
pub fn new(src: T) -> Parser<T> {
|
||||||
Parser {
|
Parser {
|
||||||
scanner: Scanner::new(src),
|
scanner: Scanner::new(src),
|
||||||
|
@ -121,39 +119,37 @@ impl<T: Iterator<Item=char>> Parser<T> {
|
||||||
pub fn next(&mut self) -> ParseResult {
|
pub fn next(&mut self) -> ParseResult {
|
||||||
match self.current {
|
match self.current {
|
||||||
None => self.parse(),
|
None => self.parse(),
|
||||||
Some(_) => {
|
Some(_) => Ok(self.current.take().unwrap()),
|
||||||
Ok(self.current.take().unwrap())
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn peek_token(&mut self) -> Result<&Token, ScanError> {
|
fn peek_token(&mut self) -> Result<&Token, ScanError> {
|
||||||
match self.token {
|
match self.token {
|
||||||
None => {
|
None => {
|
||||||
self.token = Some(try!(self.scan_next_token()));
|
self.token = Some(try!(self.scan_next_token()));
|
||||||
Ok(self.token.as_ref().unwrap())
|
Ok(self.token.as_ref().unwrap())
|
||||||
},
|
}
|
||||||
Some(ref tok) => Ok(tok)
|
Some(ref tok) => Ok(tok),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn scan_next_token(&mut self) -> Result<Token, ScanError> {
|
fn scan_next_token(&mut self) -> Result<Token, ScanError> {
|
||||||
let token = self.scanner.next();
|
let token = self.scanner.next();
|
||||||
match token {
|
match token {
|
||||||
None =>
|
None => match self.scanner.get_error() {
|
||||||
match self.scanner.get_error() {
|
None => Err(ScanError::new(self.scanner.mark(), "unexpected eof")),
|
||||||
None => Err(ScanError::new(self.scanner.mark(), "unexpected eof")),
|
Some(e) => Err(e),
|
||||||
Some(e) => Err(e),
|
},
|
||||||
},
|
Some(tok) => Ok(tok),
|
||||||
Some(tok) => Ok(tok)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn fetch_token(&mut self) -> Token {
|
fn fetch_token(&mut self) -> Token {
|
||||||
self.token.take().expect("fetch_token needs to be preceded by peek_token")
|
self.token
|
||||||
|
.take()
|
||||||
|
.expect("fetch_token needs to be preceded by peek_token")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
fn skip(&mut self) {
|
fn skip(&mut self) {
|
||||||
self.token = None;
|
self.token = None;
|
||||||
//self.peek_token();
|
//self.peek_token();
|
||||||
|
@ -174,8 +170,11 @@ impl<T: Iterator<Item=char>> Parser<T> {
|
||||||
Ok((ev, mark))
|
Ok((ev, mark))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn load<R: MarkedEventReceiver>(&mut self, recv: &mut R, multi: bool)
|
pub fn load<R: MarkedEventReceiver>(
|
||||||
-> Result<(), ScanError> {
|
&mut self,
|
||||||
|
recv: &mut R,
|
||||||
|
multi: bool,
|
||||||
|
) -> Result<(), ScanError> {
|
||||||
if !self.scanner.stream_started() {
|
if !self.scanner.stream_started() {
|
||||||
let (ev, mark) = try!(self.next());
|
let (ev, mark) = try!(self.next());
|
||||||
assert_eq!(ev, Event::StreamStart);
|
assert_eq!(ev, Event::StreamStart);
|
||||||
|
@ -203,8 +202,12 @@ impl<T: Iterator<Item=char>> Parser<T> {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn load_document<R: MarkedEventReceiver>(&mut self, first_ev: Event, mark: Marker, recv: &mut R)
|
fn load_document<R: MarkedEventReceiver>(
|
||||||
-> Result<(), ScanError> {
|
&mut self,
|
||||||
|
first_ev: Event,
|
||||||
|
mark: Marker,
|
||||||
|
recv: &mut R,
|
||||||
|
) -> Result<(), ScanError> {
|
||||||
assert_eq!(first_ev, Event::DocumentStart);
|
assert_eq!(first_ev, Event::DocumentStart);
|
||||||
recv.on_event(first_ev, mark);
|
recv.on_event(first_ev, mark);
|
||||||
|
|
||||||
|
@ -219,28 +222,33 @@ impl<T: Iterator<Item=char>> Parser<T> {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn load_node<R: MarkedEventReceiver>(&mut self, first_ev: Event, mark: Marker, recv: &mut R)
|
fn load_node<R: MarkedEventReceiver>(
|
||||||
-> Result<(), ScanError> {
|
&mut self,
|
||||||
|
first_ev: Event,
|
||||||
|
mark: Marker,
|
||||||
|
recv: &mut R,
|
||||||
|
) -> Result<(), ScanError> {
|
||||||
match first_ev {
|
match first_ev {
|
||||||
Event::Alias(..) | Event::Scalar(..) => {
|
Event::Alias(..) | Event::Scalar(..) => {
|
||||||
recv.on_event(first_ev, mark);
|
recv.on_event(first_ev, mark);
|
||||||
Ok(())
|
Ok(())
|
||||||
},
|
}
|
||||||
Event::SequenceStart(_) => {
|
Event::SequenceStart(_) => {
|
||||||
recv.on_event(first_ev, mark);
|
recv.on_event(first_ev, mark);
|
||||||
self.load_sequence(recv)
|
self.load_sequence(recv)
|
||||||
},
|
}
|
||||||
Event::MappingStart(_) => {
|
Event::MappingStart(_) => {
|
||||||
recv.on_event(first_ev, mark);
|
recv.on_event(first_ev, mark);
|
||||||
self.load_mapping(recv)
|
self.load_mapping(recv)
|
||||||
},
|
}
|
||||||
_ => { println!("UNREACHABLE EVENT: {:?}", first_ev);
|
_ => {
|
||||||
unreachable!(); }
|
println!("UNREACHABLE EVENT: {:?}", first_ev);
|
||||||
|
unreachable!();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn load_mapping<R: MarkedEventReceiver>(&mut self, recv: &mut R)
|
fn load_mapping<R: MarkedEventReceiver>(&mut self, recv: &mut R) -> Result<(), ScanError> {
|
||||||
-> Result<(), ScanError> {
|
|
||||||
let (mut key_ev, mut key_mark) = try!(self.next());
|
let (mut key_ev, mut key_mark) = try!(self.next());
|
||||||
while key_ev != Event::MappingEnd {
|
while key_ev != Event::MappingEnd {
|
||||||
// key
|
// key
|
||||||
|
@ -254,14 +262,12 @@ impl<T: Iterator<Item=char>> Parser<T> {
|
||||||
let (ev, mark) = try!(self.next());
|
let (ev, mark) = try!(self.next());
|
||||||
key_ev = ev;
|
key_ev = ev;
|
||||||
key_mark = mark;
|
key_mark = mark;
|
||||||
|
|
||||||
}
|
}
|
||||||
recv.on_event(key_ev, key_mark);
|
recv.on_event(key_ev, key_mark);
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn load_sequence<R: MarkedEventReceiver>(&mut self, recv: &mut R)
|
fn load_sequence<R: MarkedEventReceiver>(&mut self, recv: &mut R) -> Result<(), ScanError> {
|
||||||
-> Result<(), ScanError> {
|
|
||||||
let (mut ev, mut mark) = try!(self.next());
|
let (mut ev, mut mark) = try!(self.next());
|
||||||
while ev != Event::SequenceEnd {
|
while ev != Event::SequenceEnd {
|
||||||
try!(self.load_node(ev, mark, recv));
|
try!(self.load_node(ev, mark, recv));
|
||||||
|
@ -289,7 +295,6 @@ impl<T: Iterator<Item=char>> Parser<T> {
|
||||||
State::BlockNode => self.parse_node(true, false),
|
State::BlockNode => self.parse_node(true, false),
|
||||||
// State::BlockNodeOrIndentlessSequence => self.parse_node(true, true),
|
// State::BlockNodeOrIndentlessSequence => self.parse_node(true, true),
|
||||||
// State::FlowNode => self.parse_node(false, false),
|
// State::FlowNode => self.parse_node(false, false),
|
||||||
|
|
||||||
State::BlockMappingFirstKey => self.block_mapping_key(true),
|
State::BlockMappingFirstKey => self.block_mapping_key(true),
|
||||||
State::BlockMappingKey => self.block_mapping_key(false),
|
State::BlockMappingKey => self.block_mapping_key(false),
|
||||||
State::BlockMappingValue => self.block_mapping_value(),
|
State::BlockMappingValue => self.block_mapping_value(),
|
||||||
|
@ -322,9 +327,8 @@ impl<T: Iterator<Item=char>> Parser<T> {
|
||||||
self.state = State::ImplicitDocumentStart;
|
self.state = State::ImplicitDocumentStart;
|
||||||
self.skip();
|
self.skip();
|
||||||
Ok((Event::StreamStart, mark))
|
Ok((Event::StreamStart, mark))
|
||||||
},
|
}
|
||||||
Token(mark, _) => Err(ScanError::new(mark,
|
Token(mark, _) => Err(ScanError::new(mark, "did not find expected <stream-start>")),
|
||||||
"did not find expected <stream-start>")),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -340,19 +344,19 @@ impl<T: Iterator<Item=char>> Parser<T> {
|
||||||
self.state = State::End;
|
self.state = State::End;
|
||||||
self.skip();
|
self.skip();
|
||||||
Ok((Event::StreamEnd, mark))
|
Ok((Event::StreamEnd, mark))
|
||||||
},
|
}
|
||||||
Token(_, TokenType::VersionDirective(..))
|
Token(_, TokenType::VersionDirective(..))
|
||||||
| Token(_, TokenType::TagDirective(..))
|
| Token(_, TokenType::TagDirective(..))
|
||||||
| Token(_, TokenType::DocumentStart) => {
|
| Token(_, TokenType::DocumentStart) => {
|
||||||
// explicit document
|
// explicit document
|
||||||
self._explict_document_start()
|
self._explict_document_start()
|
||||||
},
|
}
|
||||||
Token(mark, _) if implicit => {
|
Token(mark, _) if implicit => {
|
||||||
try!(self.parser_process_directives());
|
try!(self.parser_process_directives());
|
||||||
self.push_state(State::DocumentEnd);
|
self.push_state(State::DocumentEnd);
|
||||||
self.state = State::BlockNode;
|
self.state = State::BlockNode;
|
||||||
Ok((Event::DocumentStart, mark))
|
Ok((Event::DocumentStart, mark))
|
||||||
},
|
}
|
||||||
_ => {
|
_ => {
|
||||||
// explicit document
|
// explicit document
|
||||||
self._explict_document_start()
|
self._explict_document_start()
|
||||||
|
@ -369,11 +373,11 @@ impl<T: Iterator<Item=char>> Parser<T> {
|
||||||
// return Err(ScanError::new(tok.0,
|
// return Err(ScanError::new(tok.0,
|
||||||
// "found incompatible YAML document"));
|
// "found incompatible YAML document"));
|
||||||
//}
|
//}
|
||||||
},
|
}
|
||||||
TokenType::TagDirective(..) => {
|
TokenType::TagDirective(..) => {
|
||||||
// TODO add tag directive
|
// TODO add tag directive
|
||||||
},
|
}
|
||||||
_ => break
|
_ => break,
|
||||||
}
|
}
|
||||||
self.skip();
|
self.skip();
|
||||||
}
|
}
|
||||||
|
@ -389,9 +393,12 @@ impl<T: Iterator<Item=char>> Parser<T> {
|
||||||
self.state = State::DocumentContent;
|
self.state = State::DocumentContent;
|
||||||
self.skip();
|
self.skip();
|
||||||
Ok((Event::DocumentStart, mark))
|
Ok((Event::DocumentStart, mark))
|
||||||
}
|
}
|
||||||
Token(mark, _) => Err(ScanError::new(mark, "did not find expected <document start>"))
|
Token(mark, _) => Err(ScanError::new(
|
||||||
}
|
mark,
|
||||||
|
"did not find expected <document start>",
|
||||||
|
)),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn document_content(&mut self) -> ParseResult {
|
fn document_content(&mut self) -> ParseResult {
|
||||||
|
@ -404,10 +411,8 @@ impl<T: Iterator<Item=char>> Parser<T> {
|
||||||
self.pop_state();
|
self.pop_state();
|
||||||
// empty scalar
|
// empty scalar
|
||||||
Ok((Event::empty_scalar(), mark))
|
Ok((Event::empty_scalar(), mark))
|
||||||
},
|
|
||||||
_ => {
|
|
||||||
self.parse_node(true, false)
|
|
||||||
}
|
}
|
||||||
|
_ => self.parse_node(true, false),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -418,10 +423,10 @@ impl<T: Iterator<Item=char>> Parser<T> {
|
||||||
self.skip();
|
self.skip();
|
||||||
_implicit = false;
|
_implicit = false;
|
||||||
mark
|
mark
|
||||||
},
|
}
|
||||||
Token(mark, _) => mark
|
Token(mark, _) => mark,
|
||||||
};
|
};
|
||||||
|
|
||||||
// TODO tag handling
|
// TODO tag handling
|
||||||
self.state = State::DocumentStart;
|
self.state = State::DocumentStart;
|
||||||
Ok((Event::DocumentEnd, marker))
|
Ok((Event::DocumentEnd, marker))
|
||||||
|
@ -447,13 +452,18 @@ impl<T: Iterator<Item=char>> Parser<T> {
|
||||||
self.pop_state();
|
self.pop_state();
|
||||||
if let Token(mark, TokenType::Alias(name)) = self.fetch_token() {
|
if let Token(mark, TokenType::Alias(name)) = self.fetch_token() {
|
||||||
match self.anchors.get(&name) {
|
match self.anchors.get(&name) {
|
||||||
None => return Err(ScanError::new(mark, "while parsing node, found unknown anchor")),
|
None => {
|
||||||
Some(id) => return Ok((Event::Alias(*id), mark))
|
return Err(ScanError::new(
|
||||||
|
mark,
|
||||||
|
"while parsing node, found unknown anchor",
|
||||||
|
))
|
||||||
|
}
|
||||||
|
Some(id) => return Ok((Event::Alias(*id), mark)),
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
unreachable!()
|
unreachable!()
|
||||||
}
|
}
|
||||||
},
|
}
|
||||||
Token(_, TokenType::Anchor(_)) => {
|
Token(_, TokenType::Anchor(_)) => {
|
||||||
if let Token(mark, TokenType::Anchor(name)) = self.fetch_token() {
|
if let Token(mark, TokenType::Anchor(name)) = self.fetch_token() {
|
||||||
anchor_id = try!(self.register_anchor(name, &mark));
|
anchor_id = try!(self.register_anchor(name, &mark));
|
||||||
|
@ -467,7 +477,7 @@ impl<T: Iterator<Item=char>> Parser<T> {
|
||||||
} else {
|
} else {
|
||||||
unreachable!()
|
unreachable!()
|
||||||
}
|
}
|
||||||
},
|
}
|
||||||
Token(_, TokenType::Tag(..)) => {
|
Token(_, TokenType::Tag(..)) => {
|
||||||
if let tg @ TokenType::Tag(..) = self.fetch_token().1 {
|
if let tg @ TokenType::Tag(..) = self.fetch_token().1 {
|
||||||
tag = Some(tg);
|
tag = Some(tg);
|
||||||
|
@ -481,14 +491,14 @@ impl<T: Iterator<Item=char>> Parser<T> {
|
||||||
} else {
|
} else {
|
||||||
unreachable!()
|
unreachable!()
|
||||||
}
|
}
|
||||||
},
|
}
|
||||||
_ => {}
|
_ => {}
|
||||||
}
|
}
|
||||||
match *try!(self.peek_token()) {
|
match *try!(self.peek_token()) {
|
||||||
Token(mark, TokenType::BlockEntry) if indentless_sequence => {
|
Token(mark, TokenType::BlockEntry) if indentless_sequence => {
|
||||||
self.state = State::IndentlessSequenceEntry;
|
self.state = State::IndentlessSequenceEntry;
|
||||||
Ok((Event::SequenceStart(anchor_id), mark))
|
Ok((Event::SequenceStart(anchor_id), mark))
|
||||||
},
|
}
|
||||||
Token(_, TokenType::Scalar(..)) => {
|
Token(_, TokenType::Scalar(..)) => {
|
||||||
self.pop_state();
|
self.pop_state();
|
||||||
if let Token(mark, TokenType::Scalar(style, v)) = self.fetch_token() {
|
if let Token(mark, TokenType::Scalar(style, v)) = self.fetch_token() {
|
||||||
|
@ -496,29 +506,32 @@ impl<T: Iterator<Item=char>> Parser<T> {
|
||||||
} else {
|
} else {
|
||||||
unreachable!()
|
unreachable!()
|
||||||
}
|
}
|
||||||
},
|
}
|
||||||
Token(mark, TokenType::FlowSequenceStart) => {
|
Token(mark, TokenType::FlowSequenceStart) => {
|
||||||
self.state = State::FlowSequenceFirstEntry;
|
self.state = State::FlowSequenceFirstEntry;
|
||||||
Ok((Event::SequenceStart(anchor_id), mark))
|
Ok((Event::SequenceStart(anchor_id), mark))
|
||||||
},
|
}
|
||||||
Token(mark, TokenType::FlowMappingStart) => {
|
Token(mark, TokenType::FlowMappingStart) => {
|
||||||
self.state = State::FlowMappingFirstKey;
|
self.state = State::FlowMappingFirstKey;
|
||||||
Ok((Event::MappingStart(anchor_id), mark))
|
Ok((Event::MappingStart(anchor_id), mark))
|
||||||
},
|
}
|
||||||
Token(mark, TokenType::BlockSequenceStart) if block => {
|
Token(mark, TokenType::BlockSequenceStart) if block => {
|
||||||
self.state = State::BlockSequenceFirstEntry;
|
self.state = State::BlockSequenceFirstEntry;
|
||||||
Ok((Event::SequenceStart(anchor_id), mark))
|
Ok((Event::SequenceStart(anchor_id), mark))
|
||||||
},
|
}
|
||||||
Token(mark, TokenType::BlockMappingStart) if block => {
|
Token(mark, TokenType::BlockMappingStart) if block => {
|
||||||
self.state = State::BlockMappingFirstKey;
|
self.state = State::BlockMappingFirstKey;
|
||||||
Ok((Event::MappingStart(anchor_id), mark))
|
Ok((Event::MappingStart(anchor_id), mark))
|
||||||
},
|
}
|
||||||
// ex 7.2, an empty scalar can follow a secondary tag
|
// ex 7.2, an empty scalar can follow a secondary tag
|
||||||
Token(mark, _) if tag.is_some() || anchor_id > 0 => {
|
Token(mark, _) if tag.is_some() || anchor_id > 0 => {
|
||||||
self.pop_state();
|
self.pop_state();
|
||||||
Ok((Event::empty_scalar_with_anchor(anchor_id, tag), mark))
|
Ok((Event::empty_scalar_with_anchor(anchor_id, tag), mark))
|
||||||
},
|
}
|
||||||
Token(mark, _) => { Err(ScanError::new(mark, "while parsing a node, did not find expected node content")) }
|
Token(mark, _) => Err(ScanError::new(
|
||||||
|
mark,
|
||||||
|
"while parsing a node, did not find expected node content",
|
||||||
|
)),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -545,20 +558,21 @@ impl<T: Iterator<Item=char>> Parser<T> {
|
||||||
self.parse_node(true, true)
|
self.parse_node(true, true)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
}
|
||||||
// XXX(chenyh): libyaml failed to parse spec 1.2, ex8.18
|
// XXX(chenyh): libyaml failed to parse spec 1.2, ex8.18
|
||||||
Token(mark, TokenType::Value) => {
|
Token(mark, TokenType::Value) => {
|
||||||
self.state = State::BlockMappingValue;
|
self.state = State::BlockMappingValue;
|
||||||
Ok((Event::empty_scalar(), mark))
|
Ok((Event::empty_scalar(), mark))
|
||||||
},
|
}
|
||||||
Token(mark, TokenType::BlockEnd) => {
|
Token(mark, TokenType::BlockEnd) => {
|
||||||
self.pop_state();
|
self.pop_state();
|
||||||
self.skip();
|
self.skip();
|
||||||
Ok((Event::MappingEnd, mark))
|
Ok((Event::MappingEnd, mark))
|
||||||
},
|
|
||||||
Token(mark, _) => {
|
|
||||||
Err(ScanError::new(mark, "while parsing a block mapping, did not find expected key"))
|
|
||||||
}
|
}
|
||||||
|
Token(mark, _) => Err(ScanError::new(
|
||||||
|
mark,
|
||||||
|
"while parsing a block mapping, did not find expected key",
|
||||||
|
)),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -573,13 +587,13 @@ impl<T: Iterator<Item=char>> Parser<T> {
|
||||||
self.state = State::BlockMappingKey;
|
self.state = State::BlockMappingKey;
|
||||||
// empty scalar
|
// empty scalar
|
||||||
Ok((Event::empty_scalar(), mark))
|
Ok((Event::empty_scalar(), mark))
|
||||||
},
|
}
|
||||||
_ => {
|
_ => {
|
||||||
self.push_state(State::BlockMappingKey);
|
self.push_state(State::BlockMappingKey);
|
||||||
self.parse_node(true, true)
|
self.parse_node(true, true)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
}
|
||||||
Token(mark, _) => {
|
Token(mark, _) => {
|
||||||
self.state = State::BlockMappingKey;
|
self.state = State::BlockMappingKey;
|
||||||
// empty scalar
|
// empty scalar
|
||||||
|
@ -593,49 +607,50 @@ impl<T: Iterator<Item=char>> Parser<T> {
|
||||||
let _ = try!(self.peek_token());
|
let _ = try!(self.peek_token());
|
||||||
self.skip();
|
self.skip();
|
||||||
}
|
}
|
||||||
let marker: Marker = {
|
let marker: Marker =
|
||||||
match *try!(self.peek_token()) {
|
{
|
||||||
Token(mark, TokenType::FlowMappingEnd) => mark,
|
match *try!(self.peek_token()) {
|
||||||
Token(mark, _) => {
|
Token(mark, TokenType::FlowMappingEnd) => mark,
|
||||||
if !first {
|
Token(mark, _) => {
|
||||||
match *try!(self.peek_token()) {
|
if !first {
|
||||||
|
match *try!(self.peek_token()) {
|
||||||
Token(_, TokenType::FlowEntry) => self.skip(),
|
Token(_, TokenType::FlowEntry) => self.skip(),
|
||||||
Token(mark, _) => return Err(ScanError::new(mark,
|
Token(mark, _) => return Err(ScanError::new(mark,
|
||||||
"while parsing a flow mapping, did not find expected ',' or '}'"))
|
"while parsing a flow mapping, did not find expected ',' or '}'"))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
match *try!(self.peek_token()) {
|
match *try!(self.peek_token()) {
|
||||||
Token(_, TokenType::Key) => {
|
Token(_, TokenType::Key) => {
|
||||||
self.skip();
|
self.skip();
|
||||||
match *try!(self.peek_token()) {
|
match *try!(self.peek_token()) {
|
||||||
Token(mark, TokenType::Value)
|
Token(mark, TokenType::Value)
|
||||||
| Token(mark, TokenType::FlowEntry)
|
| Token(mark, TokenType::FlowEntry)
|
||||||
| Token(mark, TokenType::FlowMappingEnd) => {
|
| Token(mark, TokenType::FlowMappingEnd) => {
|
||||||
self.state = State::FlowMappingValue;
|
self.state = State::FlowMappingValue;
|
||||||
return Ok((Event::empty_scalar(), mark));
|
return Ok((Event::empty_scalar(), mark));
|
||||||
},
|
}
|
||||||
_ => {
|
_ => {
|
||||||
self.push_state(State::FlowMappingValue);
|
self.push_state(State::FlowMappingValue);
|
||||||
return self.parse_node(false, false);
|
return self.parse_node(false, false);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
Token(marker, TokenType::Value) => {
|
||||||
Token(marker, TokenType::Value) => {
|
self.state = State::FlowMappingValue;
|
||||||
self.state = State::FlowMappingValue;
|
return Ok((Event::empty_scalar(), marker));
|
||||||
return Ok((Event::empty_scalar(), marker));
|
}
|
||||||
},
|
Token(_, TokenType::FlowMappingEnd) => (),
|
||||||
Token(_, TokenType::FlowMappingEnd) => (),
|
_ => {
|
||||||
_ => {
|
self.push_state(State::FlowMappingEmptyValue);
|
||||||
self.push_state(State::FlowMappingEmptyValue);
|
return self.parse_node(false, false);
|
||||||
return self.parse_node(false, false);
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
mark
|
mark
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
};
|
||||||
};
|
|
||||||
|
|
||||||
self.pop_state();
|
self.pop_state();
|
||||||
self.skip();
|
self.skip();
|
||||||
|
@ -653,20 +668,19 @@ impl<T: Iterator<Item=char>> Parser<T> {
|
||||||
Token(marker, TokenType::Value) => {
|
Token(marker, TokenType::Value) => {
|
||||||
self.skip();
|
self.skip();
|
||||||
match try!(self.peek_token()).1 {
|
match try!(self.peek_token()).1 {
|
||||||
TokenType::FlowEntry
|
TokenType::FlowEntry | TokenType::FlowMappingEnd => {}
|
||||||
| TokenType::FlowMappingEnd => { },
|
|
||||||
_ => {
|
_ => {
|
||||||
self.push_state(State::FlowMappingKey);
|
self.push_state(State::FlowMappingKey);
|
||||||
return self.parse_node(false, false);
|
return self.parse_node(false, false);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
marker
|
marker
|
||||||
},
|
}
|
||||||
Token(marker, _) => marker
|
Token(marker, _) => marker,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
self.state = State::FlowMappingKey;
|
self.state = State::FlowMappingKey;
|
||||||
Ok((Event::empty_scalar(), mark))
|
Ok((Event::empty_scalar(), mark))
|
||||||
}
|
}
|
||||||
|
@ -683,13 +697,15 @@ impl<T: Iterator<Item=char>> Parser<T> {
|
||||||
self.pop_state();
|
self.pop_state();
|
||||||
self.skip();
|
self.skip();
|
||||||
return Ok((Event::SequenceEnd, mark));
|
return Ok((Event::SequenceEnd, mark));
|
||||||
},
|
}
|
||||||
Token(_, TokenType::FlowEntry) if !first => {
|
Token(_, TokenType::FlowEntry) if !first => {
|
||||||
self.skip();
|
self.skip();
|
||||||
},
|
}
|
||||||
Token(mark, _) if !first => {
|
Token(mark, _) if !first => {
|
||||||
return Err(ScanError::new(mark,
|
return Err(ScanError::new(
|
||||||
"while parsing a flow sequence, expectd ',' or ']'"));
|
mark,
|
||||||
|
"while parsing a flow sequence, expectd ',' or ']'",
|
||||||
|
));
|
||||||
}
|
}
|
||||||
_ => { /* next */ }
|
_ => { /* next */ }
|
||||||
}
|
}
|
||||||
|
@ -698,7 +714,7 @@ impl<T: Iterator<Item=char>> Parser<T> {
|
||||||
self.pop_state();
|
self.pop_state();
|
||||||
self.skip();
|
self.skip();
|
||||||
Ok((Event::SequenceEnd, mark))
|
Ok((Event::SequenceEnd, mark))
|
||||||
},
|
}
|
||||||
Token(mark, TokenType::Key) => {
|
Token(mark, TokenType::Key) => {
|
||||||
self.state = State::FlowSequenceEntryMappingKey;
|
self.state = State::FlowSequenceEntryMappingKey;
|
||||||
self.skip();
|
self.skip();
|
||||||
|
@ -727,7 +743,7 @@ impl<T: Iterator<Item=char>> Parser<T> {
|
||||||
| Token(mark, TokenType::BlockEnd) => {
|
| Token(mark, TokenType::BlockEnd) => {
|
||||||
self.state = State::IndentlessSequenceEntry;
|
self.state = State::IndentlessSequenceEntry;
|
||||||
Ok((Event::empty_scalar(), mark))
|
Ok((Event::empty_scalar(), mark))
|
||||||
},
|
}
|
||||||
_ => {
|
_ => {
|
||||||
self.push_state(State::IndentlessSequenceEntry);
|
self.push_state(State::IndentlessSequenceEntry);
|
||||||
self.parse_node(true, false)
|
self.parse_node(true, false)
|
||||||
|
@ -747,25 +763,24 @@ impl<T: Iterator<Item=char>> Parser<T> {
|
||||||
self.pop_state();
|
self.pop_state();
|
||||||
self.skip();
|
self.skip();
|
||||||
Ok((Event::SequenceEnd, mark))
|
Ok((Event::SequenceEnd, mark))
|
||||||
},
|
}
|
||||||
Token(_, TokenType::BlockEntry) => {
|
Token(_, TokenType::BlockEntry) => {
|
||||||
self.skip();
|
self.skip();
|
||||||
match *try!(self.peek_token()) {
|
match *try!(self.peek_token()) {
|
||||||
Token(mark, TokenType::BlockEntry)
|
Token(mark, TokenType::BlockEntry) | Token(mark, TokenType::BlockEnd) => {
|
||||||
| Token(mark, TokenType::BlockEnd) => {
|
|
||||||
self.state = State::BlockSequenceEntry;
|
self.state = State::BlockSequenceEntry;
|
||||||
Ok((Event::empty_scalar(), mark))
|
Ok((Event::empty_scalar(), mark))
|
||||||
},
|
}
|
||||||
_ => {
|
_ => {
|
||||||
self.push_state(State::BlockSequenceEntry);
|
self.push_state(State::BlockSequenceEntry);
|
||||||
self.parse_node(true, false)
|
self.parse_node(true, false)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
|
||||||
Token(mark, _) => {
|
|
||||||
Err(ScanError::new(mark,
|
|
||||||
"while parsing a block collection, did not find expected '-' indicator"))
|
|
||||||
}
|
}
|
||||||
|
Token(mark, _) => Err(ScanError::new(
|
||||||
|
mark,
|
||||||
|
"while parsing a block collection, did not find expected '-' indicator",
|
||||||
|
)),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -777,7 +792,7 @@ impl<T: Iterator<Item=char>> Parser<T> {
|
||||||
self.skip();
|
self.skip();
|
||||||
self.state = State::FlowSequenceEntryMappingValue;
|
self.state = State::FlowSequenceEntryMappingValue;
|
||||||
Ok((Event::empty_scalar(), mark))
|
Ok((Event::empty_scalar(), mark))
|
||||||
},
|
}
|
||||||
_ => {
|
_ => {
|
||||||
self.push_state(State::FlowSequenceEntryMappingValue);
|
self.push_state(State::FlowSequenceEntryMappingValue);
|
||||||
self.parse_node(false, false)
|
self.parse_node(false, false)
|
||||||
|
@ -788,20 +803,19 @@ impl<T: Iterator<Item=char>> Parser<T> {
|
||||||
fn flow_sequence_entry_mapping_value(&mut self) -> ParseResult {
|
fn flow_sequence_entry_mapping_value(&mut self) -> ParseResult {
|
||||||
match *try!(self.peek_token()) {
|
match *try!(self.peek_token()) {
|
||||||
Token(_, TokenType::Value) => {
|
Token(_, TokenType::Value) => {
|
||||||
self.skip();
|
self.skip();
|
||||||
self.state = State::FlowSequenceEntryMappingValue;
|
self.state = State::FlowSequenceEntryMappingValue;
|
||||||
match *try!(self.peek_token()) {
|
match *try!(self.peek_token()) {
|
||||||
Token(mark, TokenType::FlowEntry)
|
Token(mark, TokenType::FlowEntry) | Token(mark, TokenType::FlowSequenceEnd) => {
|
||||||
| Token(mark, TokenType::FlowSequenceEnd) => {
|
self.state = State::FlowSequenceEntryMappingEnd;
|
||||||
self.state = State::FlowSequenceEntryMappingEnd;
|
Ok((Event::empty_scalar(), mark))
|
||||||
Ok((Event::empty_scalar(), mark))
|
|
||||||
},
|
|
||||||
_ => {
|
|
||||||
self.push_state(State::FlowSequenceEntryMappingEnd);
|
|
||||||
self.parse_node(false, false)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
},
|
_ => {
|
||||||
|
self.push_state(State::FlowSequenceEntryMappingEnd);
|
||||||
|
self.parse_node(false, false)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
Token(mark, _) => {
|
Token(mark, _) => {
|
||||||
self.state = State::FlowSequenceEntryMappingEnd;
|
self.state = State::FlowSequenceEntryMappingEnd;
|
||||||
Ok((Event::empty_scalar(), mark))
|
Ok((Event::empty_scalar(), mark))
|
||||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -1,13 +1,13 @@
|
||||||
|
use linked_hash_map::LinkedHashMap;
|
||||||
|
use parser::*;
|
||||||
|
use scanner::{Marker, ScanError, TScalarStyle, TokenType};
|
||||||
use std::collections::BTreeMap;
|
use std::collections::BTreeMap;
|
||||||
|
use std::f64;
|
||||||
|
use std::i64;
|
||||||
|
use std::mem;
|
||||||
use std::ops::Index;
|
use std::ops::Index;
|
||||||
use std::string;
|
use std::string;
|
||||||
use std::i64;
|
|
||||||
use std::f64;
|
|
||||||
use std::mem;
|
|
||||||
use std::vec;
|
use std::vec;
|
||||||
use parser::*;
|
|
||||||
use scanner::{TScalarStyle, ScanError, TokenType, Marker};
|
|
||||||
use linked_hash_map::LinkedHashMap;
|
|
||||||
|
|
||||||
/// A YAML node is stored as this `Yaml` enumeration, which provides an easy way to
|
/// A YAML node is stored as this `Yaml` enumeration, which provides an easy way to
|
||||||
/// access your YAML document.
|
/// access your YAML document.
|
||||||
|
@ -62,7 +62,7 @@ fn parse_f64(v: &str) -> Option<f64> {
|
||||||
".inf" | ".Inf" | ".INF" | "+.inf" | "+.Inf" | "+.INF" => Some(f64::INFINITY),
|
".inf" | ".Inf" | ".INF" | "+.inf" | "+.Inf" | "+.INF" => Some(f64::INFINITY),
|
||||||
"-.inf" | "-.Inf" | "-.INF" => Some(f64::NEG_INFINITY),
|
"-.inf" | "-.Inf" | "-.INF" => Some(f64::NEG_INFINITY),
|
||||||
".nan" | "NaN" | ".NAN" => Some(f64::NAN),
|
".nan" | "NaN" | ".NAN" => Some(f64::NAN),
|
||||||
_ => v.parse::<f64>().ok()
|
_ => v.parse::<f64>().ok(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -81,31 +81,31 @@ impl MarkedEventReceiver for YamlLoader {
|
||||||
match ev {
|
match ev {
|
||||||
Event::DocumentStart => {
|
Event::DocumentStart => {
|
||||||
// do nothing
|
// do nothing
|
||||||
},
|
}
|
||||||
Event::DocumentEnd => {
|
Event::DocumentEnd => {
|
||||||
match self.doc_stack.len() {
|
match self.doc_stack.len() {
|
||||||
// empty document
|
// empty document
|
||||||
0 => self.docs.push(Yaml::BadValue),
|
0 => self.docs.push(Yaml::BadValue),
|
||||||
1 => self.docs.push(self.doc_stack.pop().unwrap().0),
|
1 => self.docs.push(self.doc_stack.pop().unwrap().0),
|
||||||
_ => unreachable!()
|
_ => unreachable!(),
|
||||||
}
|
}
|
||||||
},
|
}
|
||||||
Event::SequenceStart(aid) => {
|
Event::SequenceStart(aid) => {
|
||||||
self.doc_stack.push((Yaml::Array(Vec::new()), aid));
|
self.doc_stack.push((Yaml::Array(Vec::new()), aid));
|
||||||
},
|
}
|
||||||
Event::SequenceEnd => {
|
Event::SequenceEnd => {
|
||||||
let node = self.doc_stack.pop().unwrap();
|
let node = self.doc_stack.pop().unwrap();
|
||||||
self.insert_new_node(node);
|
self.insert_new_node(node);
|
||||||
},
|
}
|
||||||
Event::MappingStart(aid) => {
|
Event::MappingStart(aid) => {
|
||||||
self.doc_stack.push((Yaml::Hash(Hash::new()), aid));
|
self.doc_stack.push((Yaml::Hash(Hash::new()), aid));
|
||||||
self.key_stack.push(Yaml::BadValue);
|
self.key_stack.push(Yaml::BadValue);
|
||||||
},
|
}
|
||||||
Event::MappingEnd => {
|
Event::MappingEnd => {
|
||||||
self.key_stack.pop().unwrap();
|
self.key_stack.pop().unwrap();
|
||||||
let node = self.doc_stack.pop().unwrap();
|
let node = self.doc_stack.pop().unwrap();
|
||||||
self.insert_new_node(node);
|
self.insert_new_node(node);
|
||||||
},
|
}
|
||||||
Event::Scalar(v, style, aid, tag) => {
|
Event::Scalar(v, style, aid, tag) => {
|
||||||
let node = if style != TScalarStyle::Plain {
|
let node = if style != TScalarStyle::Plain {
|
||||||
Yaml::String(v)
|
Yaml::String(v)
|
||||||
|
@ -117,28 +117,22 @@ impl MarkedEventReceiver for YamlLoader {
|
||||||
// "true" or "false"
|
// "true" or "false"
|
||||||
match v.parse::<bool>() {
|
match v.parse::<bool>() {
|
||||||
Err(_) => Yaml::BadValue,
|
Err(_) => Yaml::BadValue,
|
||||||
Ok(v) => Yaml::Boolean(v)
|
Ok(v) => Yaml::Boolean(v),
|
||||||
}
|
|
||||||
},
|
|
||||||
"int" => {
|
|
||||||
match v.parse::<i64>() {
|
|
||||||
Err(_) => Yaml::BadValue,
|
|
||||||
Ok(v) => Yaml::Integer(v)
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"float" => {
|
|
||||||
match parse_f64(&v) {
|
|
||||||
Some(_) => Yaml::Real(v),
|
|
||||||
None => Yaml::BadValue,
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"null" => {
|
|
||||||
match v.as_ref() {
|
|
||||||
"~" | "null" => Yaml::Null,
|
|
||||||
_ => Yaml::BadValue,
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
_ => Yaml::String(v),
|
"int" => match v.parse::<i64>() {
|
||||||
|
Err(_) => Yaml::BadValue,
|
||||||
|
Ok(v) => Yaml::Integer(v),
|
||||||
|
},
|
||||||
|
"float" => match parse_f64(&v) {
|
||||||
|
Some(_) => Yaml::Real(v),
|
||||||
|
None => Yaml::BadValue,
|
||||||
|
},
|
||||||
|
"null" => match v.as_ref() {
|
||||||
|
"~" | "null" => Yaml::Null,
|
||||||
|
_ => Yaml::BadValue,
|
||||||
|
},
|
||||||
|
_ => Yaml::String(v),
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
Yaml::String(v)
|
Yaml::String(v)
|
||||||
|
@ -149,7 +143,7 @@ impl MarkedEventReceiver for YamlLoader {
|
||||||
};
|
};
|
||||||
|
|
||||||
self.insert_new_node((node, aid));
|
self.insert_new_node((node, aid));
|
||||||
},
|
}
|
||||||
Event::Alias(id) => {
|
Event::Alias(id) => {
|
||||||
let n = match self.anchor_map.get(&id) {
|
let n = match self.anchor_map.get(&id) {
|
||||||
Some(v) => v.clone(),
|
Some(v) => v.clone(),
|
||||||
|
@ -186,13 +180,13 @@ impl YamlLoader {
|
||||||
mem::swap(&mut newkey, cur_key);
|
mem::swap(&mut newkey, cur_key);
|
||||||
h.insert(newkey, node.0);
|
h.insert(newkey, node.0);
|
||||||
}
|
}
|
||||||
},
|
}
|
||||||
_ => unreachable!(),
|
_ => unreachable!(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn load_from_str(source: &str) -> Result<Vec<Yaml>, ScanError>{
|
pub fn load_from_str(source: &str) -> Result<Vec<Yaml>, ScanError> {
|
||||||
let mut loader = YamlLoader {
|
let mut loader = YamlLoader {
|
||||||
docs: Vec::new(),
|
docs: Vec::new(),
|
||||||
doc_stack: Vec::new(),
|
doc_stack: Vec::new(),
|
||||||
|
@ -255,35 +249,35 @@ impl Yaml {
|
||||||
pub fn is_null(&self) -> bool {
|
pub fn is_null(&self) -> bool {
|
||||||
match *self {
|
match *self {
|
||||||
Yaml::Null => true,
|
Yaml::Null => true,
|
||||||
_ => false
|
_ => false,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn is_badvalue(&self) -> bool {
|
pub fn is_badvalue(&self) -> bool {
|
||||||
match *self {
|
match *self {
|
||||||
Yaml::BadValue => true,
|
Yaml::BadValue => true,
|
||||||
_ => false
|
_ => false,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn is_array(&self) -> bool {
|
pub fn is_array(&self) -> bool {
|
||||||
match *self {
|
match *self {
|
||||||
Yaml::Array(_) => true,
|
Yaml::Array(_) => true,
|
||||||
_ => false
|
_ => false,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn as_f64(&self) -> Option<f64> {
|
pub fn as_f64(&self) -> Option<f64> {
|
||||||
match *self {
|
match *self {
|
||||||
Yaml::Real(ref v) => parse_f64(v),
|
Yaml::Real(ref v) => parse_f64(v),
|
||||||
_ => None
|
_ => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn into_f64(self) -> Option<f64> {
|
pub fn into_f64(self) -> Option<f64> {
|
||||||
match self {
|
match self {
|
||||||
Yaml::Real(ref v) => parse_f64(v),
|
Yaml::Real(ref v) => parse_f64(v),
|
||||||
_ => None
|
_ => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -315,7 +309,7 @@ impl Yaml {
|
||||||
_ if v.parse::<i64>().is_ok() => Yaml::Integer(v.parse::<i64>().unwrap()),
|
_ if v.parse::<i64>().is_ok() => Yaml::Integer(v.parse::<i64>().unwrap()),
|
||||||
// try parsing as f64
|
// try parsing as f64
|
||||||
_ if parse_f64(v).is_some() => Yaml::Real(v.to_owned()),
|
_ if parse_f64(v).is_some() => Yaml::Real(v.to_owned()),
|
||||||
_ => Yaml::String(v.to_owned())
|
_ => Yaml::String(v.to_owned()),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -328,7 +322,7 @@ impl<'a> Index<&'a str> for Yaml {
|
||||||
let key = Yaml::String(idx.to_owned());
|
let key = Yaml::String(idx.to_owned());
|
||||||
match self.as_hash() {
|
match self.as_hash() {
|
||||||
Some(h) => h.get(&key).unwrap_or(&BAD_VALUE),
|
Some(h) => h.get(&key).unwrap_or(&BAD_VALUE),
|
||||||
None => &BAD_VALUE
|
None => &BAD_VALUE,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -354,8 +348,7 @@ impl IntoIterator for Yaml {
|
||||||
|
|
||||||
fn into_iter(self) -> Self::IntoIter {
|
fn into_iter(self) -> Self::IntoIter {
|
||||||
YamlIter {
|
YamlIter {
|
||||||
yaml: self.into_vec()
|
yaml: self.into_vec().unwrap_or_else(Vec::new).into_iter(),
|
||||||
.unwrap_or_else(Vec::new).into_iter()
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -374,8 +367,8 @@ impl Iterator for YamlIter {
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod test {
|
mod test {
|
||||||
use yaml::*;
|
|
||||||
use std::f64;
|
use std::f64;
|
||||||
|
use yaml::*;
|
||||||
#[test]
|
#[test]
|
||||||
fn test_coerce() {
|
fn test_coerce() {
|
||||||
let s = "---
|
let s = "---
|
||||||
|
@ -424,8 +417,7 @@ a7: 你好
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_multi_doc() {
|
fn test_multi_doc() {
|
||||||
let s =
|
let s = "
|
||||||
"
|
|
||||||
'a scalar'
|
'a scalar'
|
||||||
---
|
---
|
||||||
'a scalar'
|
'a scalar'
|
||||||
|
@ -438,8 +430,7 @@ a7: 你好
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_anchor() {
|
fn test_anchor() {
|
||||||
let s =
|
let s = "
|
||||||
"
|
|
||||||
a1: &DEFAULT
|
a1: &DEFAULT
|
||||||
b1: 4
|
b1: 4
|
||||||
b2: d
|
b2: d
|
||||||
|
@ -452,8 +443,7 @@ a2: *DEFAULT
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_bad_anchor() {
|
fn test_bad_anchor() {
|
||||||
let s =
|
let s = "
|
||||||
"
|
|
||||||
a1: &DEFAULT
|
a1: &DEFAULT
|
||||||
b1: 4
|
b1: 4
|
||||||
b2: *DEFAULT
|
b2: *DEFAULT
|
||||||
|
@ -461,7 +451,6 @@ a1: &DEFAULT
|
||||||
let out = YamlLoader::load_from_str(&s).unwrap();
|
let out = YamlLoader::load_from_str(&s).unwrap();
|
||||||
let doc = &out[0];
|
let doc = &out[0];
|
||||||
assert_eq!(doc["a1"]["b2"], Yaml::BadValue);
|
assert_eq!(doc["a1"]["b2"], Yaml::BadValue);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -475,8 +464,7 @@ a1: &DEFAULT
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_plain_datatype() {
|
fn test_plain_datatype() {
|
||||||
let s =
|
let s = "
|
||||||
"
|
|
||||||
- 'string'
|
- 'string'
|
||||||
- \"string\"
|
- \"string\"
|
||||||
- string
|
- string
|
||||||
|
@ -555,15 +543,23 @@ a1: &DEFAULT
|
||||||
#[test]
|
#[test]
|
||||||
fn test_bad_docstart() {
|
fn test_bad_docstart() {
|
||||||
assert!(YamlLoader::load_from_str("---This used to cause an infinite loop").is_ok());
|
assert!(YamlLoader::load_from_str("---This used to cause an infinite loop").is_ok());
|
||||||
assert_eq!(YamlLoader::load_from_str("----"), Ok(vec![Yaml::String(String::from("----"))]));
|
assert_eq!(
|
||||||
assert_eq!(YamlLoader::load_from_str("--- #here goes a comment"), Ok(vec![Yaml::Null]));
|
YamlLoader::load_from_str("----"),
|
||||||
assert_eq!(YamlLoader::load_from_str("---- #here goes a comment"), Ok(vec![Yaml::String(String::from("----"))]));
|
Ok(vec![Yaml::String(String::from("----"))])
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
YamlLoader::load_from_str("--- #here goes a comment"),
|
||||||
|
Ok(vec![Yaml::Null])
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
YamlLoader::load_from_str("---- #here goes a comment"),
|
||||||
|
Ok(vec![Yaml::String(String::from("----"))])
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_plain_datatype_with_into_methods() {
|
fn test_plain_datatype_with_into_methods() {
|
||||||
let s =
|
let s = "
|
||||||
"
|
|
||||||
- 'string'
|
- 'string'
|
||||||
- \"string\"
|
- \"string\"
|
||||||
- string
|
- string
|
||||||
|
@ -620,9 +616,18 @@ c: ~
|
||||||
let out = YamlLoader::load_from_str(&s).unwrap();
|
let out = YamlLoader::load_from_str(&s).unwrap();
|
||||||
let first = out.into_iter().next().unwrap();
|
let first = out.into_iter().next().unwrap();
|
||||||
let mut iter = first.into_hash().unwrap().into_iter();
|
let mut iter = first.into_hash().unwrap().into_iter();
|
||||||
assert_eq!(Some((Yaml::String("b".to_owned()), Yaml::Null)), iter.next());
|
assert_eq!(
|
||||||
assert_eq!(Some((Yaml::String("a".to_owned()), Yaml::Null)), iter.next());
|
Some((Yaml::String("b".to_owned()), Yaml::Null)),
|
||||||
assert_eq!(Some((Yaml::String("c".to_owned()), Yaml::Null)), iter.next());
|
iter.next()
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
Some((Yaml::String("a".to_owned()), Yaml::Null)),
|
||||||
|
iter.next()
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
Some((Yaml::String("c".to_owned()), Yaml::Null)),
|
||||||
|
iter.next()
|
||||||
|
);
|
||||||
assert_eq!(None, iter.next());
|
assert_eq!(None, iter.next());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -641,30 +646,49 @@ c: ~
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_indentation_equality() {
|
fn test_indentation_equality() {
|
||||||
|
let four_spaces = YamlLoader::load_from_str(
|
||||||
let four_spaces = YamlLoader::load_from_str(r#"
|
r#"
|
||||||
hash:
|
hash:
|
||||||
with:
|
with:
|
||||||
indentations
|
indentations
|
||||||
"#).unwrap().into_iter().next().unwrap();
|
"#,
|
||||||
|
).unwrap()
|
||||||
|
.into_iter()
|
||||||
|
.next()
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
let two_spaces = YamlLoader::load_from_str(r#"
|
let two_spaces = YamlLoader::load_from_str(
|
||||||
|
r#"
|
||||||
hash:
|
hash:
|
||||||
with:
|
with:
|
||||||
indentations
|
indentations
|
||||||
"#).unwrap().into_iter().next().unwrap();
|
"#,
|
||||||
|
).unwrap()
|
||||||
|
.into_iter()
|
||||||
|
.next()
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
let one_space = YamlLoader::load_from_str(r#"
|
let one_space = YamlLoader::load_from_str(
|
||||||
|
r#"
|
||||||
hash:
|
hash:
|
||||||
with:
|
with:
|
||||||
indentations
|
indentations
|
||||||
"#).unwrap().into_iter().next().unwrap();
|
"#,
|
||||||
|
).unwrap()
|
||||||
|
.into_iter()
|
||||||
|
.next()
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
let mixed_spaces = YamlLoader::load_from_str(r#"
|
let mixed_spaces = YamlLoader::load_from_str(
|
||||||
|
r#"
|
||||||
hash:
|
hash:
|
||||||
with:
|
with:
|
||||||
indentations
|
indentations
|
||||||
"#).unwrap().into_iter().next().unwrap();
|
"#,
|
||||||
|
).unwrap()
|
||||||
|
.into_iter()
|
||||||
|
.next()
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
assert_eq!(four_spaces, two_spaces);
|
assert_eq!(four_spaces, two_spaces);
|
||||||
assert_eq!(two_spaces, one_space);
|
assert_eq!(two_spaces, one_space);
|
||||||
|
@ -691,7 +715,7 @@ subcommands3:
|
||||||
let doc = &out.into_iter().next().unwrap();
|
let doc = &out.into_iter().next().unwrap();
|
||||||
|
|
||||||
println!("{:#?}", doc);
|
println!("{:#?}", doc);
|
||||||
assert_eq!(doc["subcommands"][0]["server"], Yaml::Null);
|
assert_eq!(doc["subcommands"][0]["server"], Yaml::Null);
|
||||||
assert!(doc["subcommands2"][0]["server"].as_hash().is_some());
|
assert!(doc["subcommands2"][0]["server"].as_hash().is_some());
|
||||||
assert!(doc["subcommands3"][0]["server"].as_hash().is_some());
|
assert!(doc["subcommands3"][0]["server"].as_hash().is_some());
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,8 +3,8 @@ extern crate yaml_rust;
|
||||||
extern crate quickcheck;
|
extern crate quickcheck;
|
||||||
|
|
||||||
use quickcheck::TestResult;
|
use quickcheck::TestResult;
|
||||||
use yaml_rust::{Yaml, YamlLoader, YamlEmitter};
|
|
||||||
use std::error::Error;
|
use std::error::Error;
|
||||||
|
use yaml_rust::{Yaml, YamlEmitter, YamlLoader};
|
||||||
|
|
||||||
quickcheck! {
|
quickcheck! {
|
||||||
fn test_check_weird_keys(xs: Vec<String>) -> TestResult {
|
fn test_check_weird_keys(xs: Vec<String>) -> TestResult {
|
||||||
|
|
|
@ -2,7 +2,7 @@
|
||||||
#![allow(non_upper_case_globals)]
|
#![allow(non_upper_case_globals)]
|
||||||
extern crate yaml_rust;
|
extern crate yaml_rust;
|
||||||
|
|
||||||
use yaml_rust::parser::{Parser, EventReceiver, Event};
|
use yaml_rust::parser::{Event, EventReceiver, Parser};
|
||||||
use yaml_rust::scanner::TScalarStyle;
|
use yaml_rust::scanner::TScalarStyle;
|
||||||
|
|
||||||
// These names match the names used in the C++ test suite.
|
// These names match the names used in the C++ test suite.
|
||||||
|
@ -21,7 +21,7 @@ enum TestEvent {
|
||||||
}
|
}
|
||||||
|
|
||||||
struct YamlChecker {
|
struct YamlChecker {
|
||||||
pub evs: Vec<TestEvent>
|
pub evs: Vec<TestEvent>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl EventReceiver for YamlChecker {
|
impl EventReceiver for YamlChecker {
|
||||||
|
@ -33,36 +33,36 @@ impl EventReceiver for YamlChecker {
|
||||||
Event::SequenceEnd => TestEvent::OnSequenceEnd,
|
Event::SequenceEnd => TestEvent::OnSequenceEnd,
|
||||||
Event::MappingStart(..) => TestEvent::OnMapStart,
|
Event::MappingStart(..) => TestEvent::OnMapStart,
|
||||||
Event::MappingEnd => TestEvent::OnMapEnd,
|
Event::MappingEnd => TestEvent::OnMapEnd,
|
||||||
Event::Scalar(ref v, style, _, _)=> {
|
Event::Scalar(ref v, style, _, _) => {
|
||||||
if v == "~" && style == TScalarStyle::Plain {
|
if v == "~" && style == TScalarStyle::Plain {
|
||||||
TestEvent::OnNull
|
TestEvent::OnNull
|
||||||
} else {
|
} else {
|
||||||
TestEvent::OnScalar
|
TestEvent::OnScalar
|
||||||
}
|
}
|
||||||
},
|
}
|
||||||
Event::Alias(_) => TestEvent::OnAlias,
|
Event::Alias(_) => TestEvent::OnAlias,
|
||||||
_ => { return } // ignore other events
|
_ => return, // ignore other events
|
||||||
};
|
};
|
||||||
self.evs.push(tev);
|
self.evs.push(tev);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn str_to_test_events(docs: &str) -> Vec<TestEvent> {
|
fn str_to_test_events(docs: &str) -> Vec<TestEvent> {
|
||||||
let mut p = YamlChecker {
|
let mut p = YamlChecker { evs: Vec::new() };
|
||||||
evs: Vec::new()
|
|
||||||
};
|
|
||||||
let mut parser = Parser::new(docs.chars());
|
let mut parser = Parser::new(docs.chars());
|
||||||
parser.load(&mut p, true).unwrap();
|
parser.load(&mut p, true).unwrap();
|
||||||
p.evs
|
p.evs
|
||||||
}
|
}
|
||||||
|
|
||||||
macro_rules! assert_next {
|
macro_rules! assert_next {
|
||||||
($v:expr, $p:pat) => (
|
($v:expr, $p:pat) => {
|
||||||
match $v.next().unwrap() {
|
match $v.next().unwrap() {
|
||||||
$p => {},
|
$p => {}
|
||||||
e => { panic!("unexpected event: {:?}", e); }
|
e => {
|
||||||
|
panic!("unexpected event: {:?}", e);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
)
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
// auto generated from handler_spec_test.cpp
|
// auto generated from handler_spec_test.cpp
|
||||||
|
@ -76,66 +76,65 @@ include!("spec_test.rs.inc");
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_mapvec_legal() {
|
fn test_mapvec_legal() {
|
||||||
use yaml_rust::yaml::{Array, Hash, Yaml};
|
use yaml_rust::yaml::{Array, Hash, Yaml};
|
||||||
use yaml_rust::{YamlLoader, YamlEmitter};
|
use yaml_rust::{YamlEmitter, YamlLoader};
|
||||||
|
|
||||||
// Emitting a `map<map<seq<_>>, _>` should result in legal yaml that
|
// Emitting a `map<map<seq<_>>, _>` should result in legal yaml that
|
||||||
// we can parse.
|
// we can parse.
|
||||||
|
|
||||||
let mut key = Array::new();
|
let mut key = Array::new();
|
||||||
key.push(Yaml::Integer(1));
|
key.push(Yaml::Integer(1));
|
||||||
key.push(Yaml::Integer(2));
|
key.push(Yaml::Integer(2));
|
||||||
key.push(Yaml::Integer(3));
|
key.push(Yaml::Integer(3));
|
||||||
|
|
||||||
let mut keyhash = Hash::new();
|
let mut keyhash = Hash::new();
|
||||||
keyhash.insert(Yaml::String("key".into()), Yaml::Array(key));
|
keyhash.insert(Yaml::String("key".into()), Yaml::Array(key));
|
||||||
|
|
||||||
let mut val = Array::new();
|
let mut val = Array::new();
|
||||||
val.push(Yaml::Integer(4));
|
val.push(Yaml::Integer(4));
|
||||||
val.push(Yaml::Integer(5));
|
val.push(Yaml::Integer(5));
|
||||||
val.push(Yaml::Integer(6));
|
val.push(Yaml::Integer(6));
|
||||||
|
|
||||||
let mut hash = Hash::new();
|
let mut hash = Hash::new();
|
||||||
hash.insert(Yaml::Hash(keyhash), Yaml::Array(val));
|
hash.insert(Yaml::Hash(keyhash), Yaml::Array(val));
|
||||||
|
|
||||||
let mut out_str = String::new();
|
let mut out_str = String::new();
|
||||||
{
|
{
|
||||||
let mut emitter = YamlEmitter::new(&mut out_str);
|
let mut emitter = YamlEmitter::new(&mut out_str);
|
||||||
emitter.dump(&Yaml::Hash(hash)).unwrap();
|
emitter.dump(&Yaml::Hash(hash)).unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
// At this point, we are tempted to naively render like this:
|
// At this point, we are tempted to naively render like this:
|
||||||
//
|
//
|
||||||
// ```yaml
|
// ```yaml
|
||||||
// ---
|
// ---
|
||||||
// {key:
|
// {key:
|
||||||
// - 1
|
// - 1
|
||||||
// - 2
|
// - 2
|
||||||
// - 3}:
|
// - 3}:
|
||||||
// - 4
|
// - 4
|
||||||
// - 5
|
// - 5
|
||||||
// - 6
|
// - 6
|
||||||
// ```
|
// ```
|
||||||
//
|
//
|
||||||
// However, this doesn't work, because the key sequence [1, 2, 3] is
|
// However, this doesn't work, because the key sequence [1, 2, 3] is
|
||||||
// rendered in block mode, which is not legal (as far as I can tell)
|
// rendered in block mode, which is not legal (as far as I can tell)
|
||||||
// inside the flow mode of the key. We need to either fully render
|
// inside the flow mode of the key. We need to either fully render
|
||||||
// everything that's in a key in flow mode (which may make for some
|
// everything that's in a key in flow mode (which may make for some
|
||||||
// long lines), or use the explicit map identifier '?':
|
// long lines), or use the explicit map identifier '?':
|
||||||
//
|
//
|
||||||
// ```yaml
|
// ```yaml
|
||||||
// ---
|
// ---
|
||||||
// ?
|
// ?
|
||||||
// key:
|
// key:
|
||||||
// - 1
|
// - 1
|
||||||
// - 2
|
// - 2
|
||||||
// - 3
|
// - 3
|
||||||
// :
|
// :
|
||||||
// - 4
|
// - 4
|
||||||
// - 5
|
// - 5
|
||||||
// - 6
|
// - 6
|
||||||
// ```
|
// ```
|
||||||
|
|
||||||
YamlLoader::load_from_str(&out_str).unwrap();
|
YamlLoader::load_from_str(&out_str).unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue