Parser: Do not clone on peeking

This eliminates calls to clone() and to_owned() in the parser

- Peeking now returns reference only
- To obtain value, fetch_token needs to be called
- The parser was adapted accordingly
- Also: Pass anchor name by value to register_anchor
This commit is contained in:
Christian Hofer 2017-06-20 15:47:19 +02:00
parent 666af4c7fa
commit 75d1b53914

View file

@ -1,5 +1,7 @@
use scanner::*; use scanner::*;
use std::collections::HashMap; use std::collections::HashMap;
use std::mem::swap;
// use yaml::*; // use yaml::*;
#[derive(Clone, Copy, PartialEq, Debug, Eq)] #[derive(Clone, Copy, PartialEq, Debug, Eq)]
@ -107,22 +109,35 @@ impl<T: Iterator<Item=char>> Parser<T> {
} }
} }
fn peek(&mut self) -> Result<Token, ScanError> { fn peek(&mut self) -> Result<&Token, ScanError> {
if self.token.is_none() { match self.token {
self.token = self.scanner.next(); None => {
self.token = Some(self.scan_next_token()?);
Ok(self.token.as_ref().unwrap())
},
Some(ref tok) => Ok(tok)
} }
if self.token.is_none() {
match self.scanner.get_error() {
None =>
return Err(ScanError::new(self.scanner.mark(),
"unexpected eof")),
Some(e) => return Err(e),
}
}
// XXX better?
Ok(self.token.clone().unwrap())
} }
fn scan_next_token(&mut self) -> Result<Token, ScanError> {
let token = self.scanner.next();
match token {
None =>
match self.scanner.get_error() {
None => return Err(ScanError::new(self.scanner.mark(), "unexpected eof")),
Some(e) => return Err(e),
},
Some(tok) => Ok(tok)
}
}
fn fetch_token(&mut self) -> Token {
let mut token = None;
swap(&mut token, &mut self.token);
token.expect("fetch_token needs to be preceded by peek")
}
fn skip(&mut self) { fn skip(&mut self) {
self.token = None; self.token = None;
//self.peek(); //self.peek();
@ -286,45 +301,41 @@ impl<T: Iterator<Item=char>> Parser<T> {
} }
fn stream_start(&mut self) -> ParseResult { fn stream_start(&mut self) -> ParseResult {
let tok = try!(self.peek()); match *self.peek()? {
Token(mark, TokenType::StreamStart(_)) => {
match tok.1 {
TokenType::StreamStart(_) => {
self.state = State::ImplicitDocumentStart; self.state = State::ImplicitDocumentStart;
self.skip(); self.skip();
Ok((Event::StreamStart, tok.0)) Ok((Event::StreamStart, mark))
}, },
_ => Err(ScanError::new(tok.0, Token(mark, _) => Err(ScanError::new(mark,
"did not find expected <stream-start>")), "did not find expected <stream-start>")),
} }
} }
fn document_start(&mut self, implicit: bool) -> ParseResult { fn document_start(&mut self, implicit: bool) -> ParseResult {
let mut tok = try!(self.peek());
if !implicit { if !implicit {
while let TokenType::DocumentEnd = tok.1 { while let TokenType::DocumentEnd = self.peek()?.1 {
self.skip(); self.skip();
tok = try!(self.peek());
} }
} }
match tok.1 { match *self.peek()? {
TokenType::StreamEnd => { Token(mark, TokenType::StreamEnd) => {
self.state = State::End; self.state = State::End;
self.skip(); self.skip();
Ok((Event::StreamEnd, tok.0)) Ok((Event::StreamEnd, mark))
}, },
TokenType::VersionDirective(..) Token(_, TokenType::VersionDirective(..))
| TokenType::TagDirective(..) | Token(_, TokenType::TagDirective(..))
| TokenType::DocumentStart => { | Token(_, TokenType::DocumentStart) => {
// explicit document // explicit document
self._explict_document_start() self._explict_document_start()
}, },
_ if implicit => { Token(mark, _) if implicit => {
try!(self.parser_process_directives()); try!(self.parser_process_directives());
self.push_state(State::DocumentEnd); self.push_state(State::DocumentEnd);
self.state = State::BlockNode; self.state = State::BlockNode;
Ok((Event::DocumentStart, tok.0)) Ok((Event::DocumentStart, mark))
}, },
_ => { _ => {
// explicit document // explicit document
@ -335,8 +346,7 @@ impl<T: Iterator<Item=char>> Parser<T> {
fn parser_process_directives(&mut self) -> Result<(), ScanError> { fn parser_process_directives(&mut self) -> Result<(), ScanError> {
loop { loop {
let tok = try!(self.peek()); match self.peek()?.1 {
match tok.1 {
TokenType::VersionDirective(_, _) => { TokenType::VersionDirective(_, _) => {
// XXX parsing with warning according to spec // XXX parsing with warning according to spec
//if major != 1 || minor > 2 { //if major != 1 || minor > 2 {
@ -357,28 +367,28 @@ impl<T: Iterator<Item=char>> Parser<T> {
fn _explict_document_start(&mut self) -> ParseResult { fn _explict_document_start(&mut self) -> ParseResult {
try!(self.parser_process_directives()); try!(self.parser_process_directives());
let tok = try!(self.peek()); match *self.peek()? {
if tok.1 != TokenType::DocumentStart { Token(mark, TokenType::DocumentStart) => {
return Err(ScanError::new(tok.0, "did not find expected <document start>")); self.push_state(State::DocumentEnd);
} self.state = State::DocumentContent;
self.push_state(State::DocumentEnd); self.skip();
self.state = State::DocumentContent; Ok((Event::DocumentStart, mark))
self.skip(); }
Ok((Event::DocumentStart, tok.0)) Token(mark, _) => Err(ScanError::new(mark, "did not find expected <document start>"))
}
} }
fn document_content(&mut self) -> ParseResult { fn document_content(&mut self) -> ParseResult {
let tok = try!(self.peek()); match *self.peek()? {
match tok.1 { Token(mark, TokenType::VersionDirective(..))
TokenType::VersionDirective(..) | Token(mark, TokenType::TagDirective(..))
|TokenType::TagDirective(..) | Token(mark, TokenType::DocumentStart)
|TokenType::DocumentStart | Token(mark, TokenType::DocumentEnd)
|TokenType::DocumentEnd | Token(mark, TokenType::StreamEnd) => {
|TokenType::StreamEnd => { self.pop_state();
self.pop_state(); // empty scalar
// empty scalar Ok((Event::empty_scalar(), mark))
Ok((Event::empty_scalar(), tok.0)) },
},
_ => { _ => {
self.parse_node(true, false) self.parse_node(true, false)
} }
@ -387,20 +397,21 @@ impl<T: Iterator<Item=char>> Parser<T> {
fn document_end(&mut self) -> ParseResult { fn document_end(&mut self) -> ParseResult {
let mut _implicit = true; let mut _implicit = true;
let tok = try!(self.peek()); let marker: Marker = match *self.peek()? {
let _start_mark = tok.0; Token(mark, TokenType::DocumentEnd) => {
self.skip();
if let TokenType::DocumentEnd = tok.1 { _implicit = false;
self.skip(); mark
_implicit = false; },
} Token(mark, _) => mark
};
// TODO tag handling // TODO tag handling
self.state = State::DocumentStart; self.state = State::DocumentStart;
Ok((Event::DocumentEnd, tok.0)) Ok((Event::DocumentEnd, marker))
} }
fn register_anchor(&mut self, name: &str, _: &Marker) -> Result<usize, ScanError> { fn register_anchor(&mut self, name: String, _: &Marker) -> Result<usize, ScanError> {
// anchors can be overrided/reused // anchors can be overrided/reused
// if self.anchors.contains_key(name) { // if self.anchors.contains_key(name) {
// return Err(ScanError::new(*mark, // return Err(ScanError::new(*mark,
@ -408,77 +419,90 @@ impl<T: Iterator<Item=char>> Parser<T> {
// } // }
let new_id = self.anchor_id; let new_id = self.anchor_id;
self.anchor_id += 1; self.anchor_id += 1;
self.anchors.insert(name.to_owned(), new_id); self.anchors.insert(name, new_id);
Ok(new_id) Ok(new_id)
} }
fn parse_node(&mut self, block: bool, indentless_sequence: bool) -> ParseResult { fn parse_node(&mut self, block: bool, indentless_sequence: bool) -> ParseResult {
let mut tok = try!(self.peek());
let mut anchor_id = 0; let mut anchor_id = 0;
let mut tag = None; let mut tag = None;
match tok.1 { match *self.peek()? {
TokenType::Alias(name) => { Token(_, TokenType::Alias(_)) => {
self.pop_state(); self.pop_state();
self.skip(); if let Token(mark, TokenType::Alias(name)) = self.fetch_token() {
match self.anchors.get(&name) { match self.anchors.get(&name) {
None => return Err(ScanError::new(tok.0, "while parsing node, found unknown anchor")), None => return Err(ScanError::new(mark, "while parsing node, found unknown anchor")),
Some(id) => return Ok((Event::Alias(*id), tok.0)) Some(id) => return Ok((Event::Alias(*id), mark))
}
} else {
unreachable!()
} }
}, },
TokenType::Anchor(name) => { Token(_, TokenType::Anchor(_)) => {
anchor_id = try!(self.register_anchor(&name, &tok.0)); if let Token(mark, TokenType::Anchor(name)) = self.fetch_token() {
self.skip(); anchor_id = try!(self.register_anchor(name, &mark));
tok = try!(self.peek()); if let TokenType::Tag(..) = self.peek()?.1 {
if let TokenType::Tag(_, _) = tok.1 { if let tg @ TokenType::Tag(..) = self.fetch_token().1 {
tag = Some(tok.1); tag = Some(tg);
self.skip(); } else {
tok = try!(self.peek()); unreachable!()
}
}
} else {
unreachable!()
} }
}, },
TokenType::Tag(..) => { Token(_, TokenType::Tag(..)) => {
tag = Some(tok.1); if let tg @ TokenType::Tag(..) = self.fetch_token().1 {
self.skip(); tag = Some(tg);
tok = try!(self.peek()); if let TokenType::Anchor(_) = self.peek()?.1 {
if let TokenType::Anchor(name) = tok.1 { if let Token(mark, TokenType::Anchor(name)) = self.fetch_token() {
anchor_id = try!(self.register_anchor(&name, &tok.0)); anchor_id = try!(self.register_anchor(name, &mark));
self.skip(); } else {
tok = try!(self.peek()); unreachable!()
}
}
} else {
unreachable!()
} }
}, },
_ => {} _ => {}
} }
match tok.1 { match *self.peek()? {
TokenType::BlockEntry if indentless_sequence => { Token(mark, TokenType::BlockEntry) if indentless_sequence => {
self.state = State::IndentlessSequenceEntry; self.state = State::IndentlessSequenceEntry;
Ok((Event::SequenceStart(anchor_id), tok.0)) Ok((Event::SequenceStart(anchor_id), mark))
}, },
TokenType::Scalar(style, v) => { Token(_, TokenType::Scalar(..)) => {
self.pop_state(); self.pop_state();
self.skip(); if let Token(mark, TokenType::Scalar(style, v)) = self.fetch_token() {
Ok((Event::Scalar(v, style, anchor_id, tag), tok.0)) Ok((Event::Scalar(v, style, anchor_id, tag), mark))
} else {
unreachable!()
}
}, },
TokenType::FlowSequenceStart => { Token(mark, TokenType::FlowSequenceStart) => {
self.state = State::FlowSequenceFirstEntry; self.state = State::FlowSequenceFirstEntry;
Ok((Event::SequenceStart(anchor_id), tok.0)) Ok((Event::SequenceStart(anchor_id), mark))
}, },
TokenType::FlowMappingStart => { Token(mark, TokenType::FlowMappingStart) => {
self.state = State::FlowMappingFirstKey; self.state = State::FlowMappingFirstKey;
Ok((Event::MappingStart(anchor_id), tok.0)) Ok((Event::MappingStart(anchor_id), mark))
}, },
TokenType::BlockSequenceStart if block => { Token(mark, TokenType::BlockSequenceStart) if block => {
self.state = State::BlockSequenceFirstEntry; self.state = State::BlockSequenceFirstEntry;
Ok((Event::SequenceStart(anchor_id), tok.0)) Ok((Event::SequenceStart(anchor_id), mark))
}, },
TokenType::BlockMappingStart if block => { Token(mark, TokenType::BlockMappingStart) if block => {
self.state = State::BlockMappingFirstKey; self.state = State::BlockMappingFirstKey;
Ok((Event::MappingStart(anchor_id), tok.0)) Ok((Event::MappingStart(anchor_id), mark))
}, },
// ex 7.2, an empty scalar can follow a secondary tag // ex 7.2, an empty scalar can follow a secondary tag
_ if tag.is_some() || anchor_id > 0 => { Token(mark, _) if tag.is_some() || anchor_id > 0 => {
self.pop_state(); self.pop_state();
Ok((Event::empty_scalar_with_anchor(anchor_id, tag), tok.0)) Ok((Event::empty_scalar_with_anchor(anchor_id, tag), mark))
}, },
_ => { Err(ScanError::new(tok.0, "while parsing a node, did not find expected node content")) } Token(mark, _) => { Err(ScanError::new(mark, "while parsing a node, did not find expected node content")) }
} }
} }
@ -489,20 +513,17 @@ impl<T: Iterator<Item=char>> Parser<T> {
//self.marks.push(tok.0); //self.marks.push(tok.0);
self.skip(); self.skip();
} }
let tok = try!(self.peek()); match *self.peek()? {
match tok.1 { Token(_, TokenType::Key) => {
TokenType::Key => {
self.skip(); self.skip();
let tok = try!(self.peek()); match *self.peek()? {
match tok.1 { Token(mark, TokenType::Key)
TokenType::Key | Token(mark, TokenType::Value)
| TokenType::Value | Token(mark, TokenType::BlockEnd) => {
| TokenType::BlockEnd self.state = State::BlockMappingValue;
=> { // empty scalar
self.state = State::BlockMappingValue; Ok((Event::empty_scalar(), mark))
// empty scalar }
Ok((Event::empty_scalar(), tok.0))
}
_ => { _ => {
self.push_state(State::BlockMappingValue); self.push_state(State::BlockMappingValue);
self.parse_node(true, true) self.parse_node(true, true)
@ -510,46 +531,45 @@ impl<T: Iterator<Item=char>> Parser<T> {
} }
}, },
// XXX(chenyh): libyaml failed to parse spec 1.2, ex8.18 // XXX(chenyh): libyaml failed to parse spec 1.2, ex8.18
TokenType::Value => { Token(mark, TokenType::Value) => {
self.state = State::BlockMappingValue; self.state = State::BlockMappingValue;
Ok((Event::empty_scalar(), tok.0)) Ok((Event::empty_scalar(), mark))
}, },
TokenType::BlockEnd => { Token(mark, TokenType::BlockEnd) => {
self.pop_state(); self.pop_state();
self.skip(); self.skip();
Ok((Event::MappingEnd, tok.0)) Ok((Event::MappingEnd, mark))
}, },
_ => { Token(mark, _) => {
Err(ScanError::new(tok.0, "while parsing a block mapping, did not find expected key")) Err(ScanError::new(mark, "while parsing a block mapping, did not find expected key"))
} }
} }
} }
fn block_mapping_value(&mut self) -> ParseResult { fn block_mapping_value(&mut self) -> ParseResult {
let tok = try!(self.peek()); match *self.peek()? {
match tok.1 { Token(_, TokenType::Value) => {
TokenType::Value => { self.skip();
self.skip(); match *self.peek()? {
let tok = try!(self.peek()); Token(mark, TokenType::Key)
match tok.1 { | Token(mark, TokenType::Value)
TokenType::Key | TokenType::Value | TokenType::BlockEnd | Token(mark, TokenType::BlockEnd) => {
=> { self.state = State::BlockMappingKey;
self.state = State::BlockMappingKey; // empty scalar
// empty scalar Ok((Event::empty_scalar(), mark))
Ok((Event::empty_scalar(), tok.0)) },
} _ => {
_ => { self.push_state(State::BlockMappingKey);
self.push_state(State::BlockMappingKey); self.parse_node(true, true)
self.parse_node(true, true)
}
} }
},
_ => {
self.state = State::BlockMappingKey;
// empty scalar
Ok((Event::empty_scalar(), tok.0))
} }
},
Token(mark, _) => {
self.state = State::BlockMappingKey;
// empty scalar
Ok((Event::empty_scalar(), mark))
} }
}
} }
fn flow_mapping_key(&mut self, first: bool) -> ParseResult { fn flow_mapping_key(&mut self, first: bool) -> ParseResult {
@ -557,71 +577,82 @@ impl<T: Iterator<Item=char>> Parser<T> {
let _ = try!(self.peek()); let _ = try!(self.peek());
self.skip(); self.skip();
} }
let mut tok = try!(self.peek()); let marker: Marker = {
match *self.peek()? {
if tok.1 != TokenType::FlowMappingEnd { Token(mark, TokenType::FlowMappingEnd) => mark,
if !first { Token(mark, _) => {
if tok.1 == TokenType::FlowEntry { if !first {
self.skip(); match *self.peek()? {
tok = try!(self.peek()); Token(_, TokenType::FlowEntry) => self.skip(),
} else { Token(mark, _) => return Err(ScanError::new(mark,
return Err(ScanError::new(tok.0, "while parsing a flow mapping, did not find expected ',' or '}'"))
"while parsing a flow mapping, did not find expected ',' or '}'")); }
}
}
if tok.1 == TokenType::Key {
self.skip();
tok = try!(self.peek());
match tok.1 {
TokenType::Value
| TokenType::FlowEntry
| TokenType::FlowMappingEnd => {
self.state = State::FlowMappingValue;
return Ok((Event::empty_scalar(), tok.0));
},
_ => {
self.push_state(State::FlowMappingValue);
return self.parse_node(false, false);
} }
match *self.peek()? {
Token(_, TokenType::Key) => {
self.skip();
match *self.peek()? {
Token(mark, TokenType::Value)
| Token(mark, TokenType::FlowEntry)
| Token(mark, TokenType::FlowMappingEnd) => {
self.state = State::FlowMappingValue;
return Ok((Event::empty_scalar(), mark));
},
_ => {
self.push_state(State::FlowMappingValue);
return self.parse_node(false, false);
}
}
},
Token(marker, TokenType::Value) => {
self.state = State::FlowMappingValue;
return Ok((Event::empty_scalar(), marker));
},
Token(_, TokenType::FlowMappingEnd) => (),
_ => {
self.push_state(State::FlowMappingEmptyValue);
return self.parse_node(false, false);
}
}
mark
} }
// XXX libyaml fail ex 7.3, empty key
} else if tok.1 == TokenType::Value {
self.state = State::FlowMappingValue;
return Ok((Event::empty_scalar(), tok.0));
} else if tok.1 != TokenType::FlowMappingEnd {
self.push_state(State::FlowMappingEmptyValue);
return self.parse_node(false, false);
} }
} };
self.pop_state(); self.pop_state();
self.skip(); self.skip();
Ok((Event::MappingEnd, tok.0)) Ok((Event::MappingEnd, marker))
} }
fn flow_mapping_value(&mut self, empty: bool) -> ParseResult { fn flow_mapping_value(&mut self, empty: bool) -> ParseResult {
let tok = try!(self.peek()); let mark: Marker = {
if empty { if empty {
self.state = State::FlowMappingKey; let Token(mark, _) = *self.peek()?;
return Ok((Event::empty_scalar(), tok.0)); self.state = State::FlowMappingKey;
} return Ok((Event::empty_scalar(), mark));
} else {
if tok.1 == TokenType::Value { match *self.peek()? {
self.skip(); Token(marker, TokenType::Value) => {
let tok = try!(self.peek()); self.skip();
match tok.1 { match self.peek()?.1 {
TokenType::FlowEntry TokenType::FlowEntry
| TokenType::FlowMappingEnd => { }, | TokenType::FlowMappingEnd => { },
_ => { _ => {
self.push_state(State::FlowMappingKey); self.push_state(State::FlowMappingKey);
return self.parse_node(false, false); return self.parse_node(false, false);
}
}
marker
},
Token(marker, _) => marker
} }
} }
} };
self.state = State::FlowMappingKey; self.state = State::FlowMappingKey;
Ok((Event::empty_scalar(), tok.0)) Ok((Event::empty_scalar(), mark))
} }
fn flow_sequence_entry(&mut self, first: bool) -> ParseResult { fn flow_sequence_entry(&mut self, first: bool) -> ParseResult {
@ -631,33 +662,31 @@ impl<T: Iterator<Item=char>> Parser<T> {
//self.marks.push(tok.0); //self.marks.push(tok.0);
self.skip(); self.skip();
} }
let mut tok = try!(self.peek()); match *self.peek()? {
match tok.1 { Token(mark, TokenType::FlowSequenceEnd) => {
TokenType::FlowSequenceEnd => {
self.pop_state(); self.pop_state();
self.skip(); self.skip();
return Ok((Event::SequenceEnd, tok.0)); return Ok((Event::SequenceEnd, mark));
}, },
TokenType::FlowEntry if !first => { Token(_, TokenType::FlowEntry) if !first => {
self.skip(); self.skip();
tok = try!(self.peek());
}, },
_ if !first => { Token(mark, _) if !first => {
return Err(ScanError::new(tok.0, return Err(ScanError::new(mark,
"while parsing a flow sequence, expectd ',' or ']'")); "while parsing a flow sequence, expectd ',' or ']'"));
} }
_ => { /* next */ } _ => { /* next */ }
} }
match tok.1 { match *self.peek()? {
TokenType::FlowSequenceEnd => { Token(mark, TokenType::FlowSequenceEnd) => {
self.pop_state(); self.pop_state();
self.skip(); self.skip();
Ok((Event::SequenceEnd, tok.0)) Ok((Event::SequenceEnd, mark))
}, },
TokenType::Key => { Token(mark, TokenType::Key) => {
self.state = State::FlowSequenceEntryMappingKey; self.state = State::FlowSequenceEntryMappingKey;
self.skip(); self.skip();
Ok((Event::MappingStart(0), tok.0)) Ok((Event::MappingStart(0), mark))
} }
_ => { _ => {
self.push_state(State::FlowSequenceEntry); self.push_state(State::FlowSequenceEntry);
@ -667,21 +696,21 @@ impl<T: Iterator<Item=char>> Parser<T> {
} }
fn indentless_sequence_entry(&mut self) -> ParseResult { fn indentless_sequence_entry(&mut self) -> ParseResult {
let mut tok = try!(self.peek()); match *self.peek()? {
if tok.1 != TokenType::BlockEntry { Token(_, TokenType::BlockEntry) => (),
self.pop_state(); Token(mark, _) => {
return Ok((Event::SequenceEnd, tok.0)); self.pop_state();
return Ok((Event::SequenceEnd, mark));
}
} }
self.skip(); self.skip();
tok = try!(self.peek()); match *self.peek()? {
match tok.1 { Token(mark, TokenType::BlockEntry)
TokenType::BlockEntry | Token(mark, TokenType::Key)
| TokenType::Key | Token(mark, TokenType::Value)
| TokenType::Value | Token(mark, TokenType::BlockEnd) => {
| TokenType::BlockEnd => {
self.state = State::IndentlessSequenceEntry; self.state = State::IndentlessSequenceEntry;
Ok((Event::empty_scalar(), tok.0)) Ok((Event::empty_scalar(), mark))
}, },
_ => { _ => {
self.push_state(State::IndentlessSequenceEntry); self.push_state(State::IndentlessSequenceEntry);
@ -697,21 +726,19 @@ impl<T: Iterator<Item=char>> Parser<T> {
//self.marks.push(tok.0); //self.marks.push(tok.0);
self.skip(); self.skip();
} }
let mut tok = try!(self.peek()); match *self.peek()? {
match tok.1 { Token(mark, TokenType::BlockEnd) => {
TokenType::BlockEnd => {
self.pop_state(); self.pop_state();
self.skip(); self.skip();
Ok((Event::SequenceEnd, tok.0)) Ok((Event::SequenceEnd, mark))
}, },
TokenType::BlockEntry => { Token(_, TokenType::BlockEntry) => {
self.skip(); self.skip();
tok = try!(self.peek()); match *self.peek()? {
match tok.1 { Token(mark, TokenType::BlockEntry)
TokenType::BlockEntry | Token(mark, TokenType::BlockEnd) => {
| TokenType::BlockEnd => {
self.state = State::BlockSequenceEntry; self.state = State::BlockSequenceEntry;
Ok((Event::empty_scalar(), tok.0)) Ok((Event::empty_scalar(), mark))
}, },
_ => { _ => {
self.push_state(State::BlockSequenceEntry); self.push_state(State::BlockSequenceEntry);
@ -719,23 +746,21 @@ impl<T: Iterator<Item=char>> Parser<T> {
} }
} }
}, },
_ => { Token(mark, _) => {
Err(ScanError::new(tok.0, Err(ScanError::new(mark,
"while parsing a block collection, did not find expected '-' indicator")) "while parsing a block collection, did not find expected '-' indicator"))
} }
} }
} }
fn flow_sequence_entry_mapping_key(&mut self) -> ParseResult { fn flow_sequence_entry_mapping_key(&mut self) -> ParseResult {
let tok = try!(self.peek()); match *self.peek()? {
Token(mark, TokenType::Value)
match tok.1 { | Token(mark, TokenType::FlowEntry)
TokenType::Value | Token(mark, TokenType::FlowSequenceEnd) => {
| TokenType::FlowEntry self.skip();
| TokenType::FlowSequenceEnd => { self.state = State::FlowSequenceEntryMappingValue;
self.skip(); Ok((Event::empty_scalar(), mark))
self.state = State::FlowSequenceEntryMappingValue;
Ok((Event::empty_scalar(), tok.0))
}, },
_ => { _ => {
self.push_state(State::FlowSequenceEntryMappingValue); self.push_state(State::FlowSequenceEntryMappingValue);
@ -745,18 +770,15 @@ impl<T: Iterator<Item=char>> Parser<T> {
} }
fn flow_sequence_entry_mapping_value(&mut self) -> ParseResult { fn flow_sequence_entry_mapping_value(&mut self) -> ParseResult {
let tok = try!(self.peek()); match *self.peek()? {
Token(_, TokenType::Value) => {
match tok.1 {
TokenType::Value => {
self.skip(); self.skip();
let tok = try!(self.peek());
self.state = State::FlowSequenceEntryMappingValue; self.state = State::FlowSequenceEntryMappingValue;
match tok.1 { match *self.peek()? {
TokenType::FlowEntry Token(mark, TokenType::FlowEntry)
| TokenType::FlowSequenceEnd => { | Token(mark, TokenType::FlowSequenceEnd) => {
self.state = State::FlowSequenceEntryMappingEnd; self.state = State::FlowSequenceEntryMappingEnd;
Ok((Event::empty_scalar(), tok.0)) Ok((Event::empty_scalar(), mark))
}, },
_ => { _ => {
self.push_state(State::FlowSequenceEntryMappingEnd); self.push_state(State::FlowSequenceEntryMappingEnd);
@ -764,9 +786,9 @@ impl<T: Iterator<Item=char>> Parser<T> {
} }
} }
}, },
_ => { Token(mark, _) => {
self.state = State::FlowSequenceEntryMappingEnd; self.state = State::FlowSequenceEntryMappingEnd;
Ok((Event::empty_scalar(), tok.0)) Ok((Event::empty_scalar(), mark))
} }
} }
} }