Replace try! with question mark
This commit is contained in:
parent
1652cf524e
commit
34da7f5f01
5 changed files with 138 additions and 138 deletions
|
@ -49,7 +49,7 @@ pub type EmitResult = Result<(), EmitError>;
|
|||
|
||||
// from serialize::json
|
||||
fn escape_str(wr: &mut fmt::Write, v: &str) -> Result<(), fmt::Error> {
|
||||
try!(wr.write_str("\""));
|
||||
wr.write_str("\"")?;
|
||||
|
||||
let mut start = 0;
|
||||
|
||||
|
@ -94,19 +94,19 @@ fn escape_str(wr: &mut fmt::Write, v: &str) -> Result<(), fmt::Error> {
|
|||
};
|
||||
|
||||
if start < i {
|
||||
try!(wr.write_str(&v[start..i]));
|
||||
wr.write_str(&v[start..i])?;
|
||||
}
|
||||
|
||||
try!(wr.write_str(escaped));
|
||||
wr.write_str(escaped)?;
|
||||
|
||||
start = i + 1;
|
||||
}
|
||||
|
||||
if start != v.len() {
|
||||
try!(wr.write_str(&v[start..]));
|
||||
wr.write_str(&v[start..])?;
|
||||
}
|
||||
|
||||
try!(wr.write_str("\""));
|
||||
wr.write_str("\"")?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
@ -139,7 +139,7 @@ impl<'a> YamlEmitter<'a> {
|
|||
|
||||
pub fn dump(&mut self, doc: &Yaml) -> EmitResult {
|
||||
// write DocumentStart
|
||||
try!(write!(self.writer, "---\n"));
|
||||
write!(self.writer, "---\n")?;
|
||||
self.level = -1;
|
||||
self.emit_node(doc)
|
||||
}
|
||||
|
@ -150,7 +150,7 @@ impl<'a> YamlEmitter<'a> {
|
|||
}
|
||||
for _ in 0..self.level {
|
||||
for _ in 0..self.best_indent {
|
||||
try!(write!(self.writer, " "));
|
||||
write!(self.writer, " ")?;
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
|
@ -162,30 +162,30 @@ impl<'a> YamlEmitter<'a> {
|
|||
Yaml::Hash(ref h) => self.emit_hash(h),
|
||||
Yaml::String(ref v) => {
|
||||
if need_quotes(v) {
|
||||
try!(escape_str(self.writer, v));
|
||||
escape_str(self.writer, v)?;
|
||||
} else {
|
||||
try!(write!(self.writer, "{}", v));
|
||||
write!(self.writer, "{}", v)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
Yaml::Boolean(v) => {
|
||||
if v {
|
||||
try!(self.writer.write_str("true"));
|
||||
self.writer.write_str("true")?;
|
||||
} else {
|
||||
try!(self.writer.write_str("false"));
|
||||
self.writer.write_str("false")?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
Yaml::Integer(v) => {
|
||||
try!(write!(self.writer, "{}", v));
|
||||
write!(self.writer, "{}", v)?;
|
||||
Ok(())
|
||||
}
|
||||
Yaml::Real(ref v) => {
|
||||
try!(write!(self.writer, "{}", v));
|
||||
write!(self.writer, "{}", v)?;
|
||||
Ok(())
|
||||
}
|
||||
Yaml::Null | Yaml::BadValue => {
|
||||
try!(write!(self.writer, "~"));
|
||||
write!(self.writer, "~")?;
|
||||
Ok(())
|
||||
}
|
||||
// XXX(chenyh) Alias
|
||||
|
@ -195,16 +195,16 @@ impl<'a> YamlEmitter<'a> {
|
|||
|
||||
fn emit_array(&mut self, v: &[Yaml]) -> EmitResult {
|
||||
if v.is_empty() {
|
||||
try!(write!(self.writer, "[]"));
|
||||
write!(self.writer, "[]")?;
|
||||
} else {
|
||||
self.level += 1;
|
||||
for (cnt, x) in v.iter().enumerate() {
|
||||
if cnt > 0 {
|
||||
try!(write!(self.writer, "\n"));
|
||||
try!(self.write_indent());
|
||||
write!(self.writer, "\n")?;
|
||||
self.write_indent()?;
|
||||
}
|
||||
try!(write!(self.writer, "-"));
|
||||
try!(self.emit_val(true, x));
|
||||
write!(self.writer, "-")?;
|
||||
self.emit_val(true, x)?;
|
||||
}
|
||||
self.level -= 1;
|
||||
}
|
||||
|
@ -213,7 +213,7 @@ impl<'a> YamlEmitter<'a> {
|
|||
|
||||
fn emit_hash(&mut self, h: &Hash) -> EmitResult {
|
||||
if h.is_empty() {
|
||||
try!(self.writer.write_str("{}"));
|
||||
self.writer.write_str("{}")?;
|
||||
} else {
|
||||
self.level += 1;
|
||||
for (cnt, (k, v)) in h.iter().enumerate() {
|
||||
|
@ -222,20 +222,20 @@ impl<'a> YamlEmitter<'a> {
|
|||
_ => false,
|
||||
};
|
||||
if cnt > 0 {
|
||||
try!(write!(self.writer, "\n"));
|
||||
try!(self.write_indent());
|
||||
write!(self.writer, "\n")?;
|
||||
self.write_indent()?;
|
||||
}
|
||||
if complex_key {
|
||||
try!(write!(self.writer, "?"));
|
||||
try!(self.emit_val(true, k));
|
||||
try!(write!(self.writer, "\n"));
|
||||
try!(self.write_indent());
|
||||
try!(write!(self.writer, ":"));
|
||||
try!(self.emit_val(true, v));
|
||||
write!(self.writer, "?")?;
|
||||
self.emit_val(true, k)?;
|
||||
write!(self.writer, "\n")?;
|
||||
self.write_indent()?;
|
||||
write!(self.writer, ":")?;
|
||||
self.emit_val(true, v)?;
|
||||
} else {
|
||||
try!(self.emit_node(k));
|
||||
try!(write!(self.writer, ":"));
|
||||
try!(self.emit_val(false, v));
|
||||
self.emit_node(k)?;
|
||||
write!(self.writer, ":")?;
|
||||
self.emit_val(false, v)?;
|
||||
}
|
||||
}
|
||||
self.level -= 1;
|
||||
|
@ -251,28 +251,28 @@ impl<'a> YamlEmitter<'a> {
|
|||
match *val {
|
||||
Yaml::Array(ref v) => {
|
||||
if (inline && self.compact) || v.is_empty() {
|
||||
try!(write!(self.writer, " "));
|
||||
write!(self.writer, " ")?;
|
||||
} else {
|
||||
try!(write!(self.writer, "\n"));
|
||||
write!(self.writer, "\n")?;
|
||||
self.level += 1;
|
||||
try!(self.write_indent());
|
||||
self.write_indent()?;
|
||||
self.level -= 1;
|
||||
}
|
||||
self.emit_array(v)
|
||||
}
|
||||
Yaml::Hash(ref h) => {
|
||||
if (inline && self.compact) || h.is_empty() {
|
||||
try!(write!(self.writer, " "));
|
||||
write!(self.writer, " ")?;
|
||||
} else {
|
||||
try!(write!(self.writer, "\n"));
|
||||
write!(self.writer, "\n")?;
|
||||
self.level += 1;
|
||||
try!(self.write_indent());
|
||||
self.write_indent()?;
|
||||
self.level -= 1;
|
||||
}
|
||||
self.emit_hash(h)
|
||||
}
|
||||
_ => {
|
||||
try!(write!(self.writer, " "));
|
||||
write!(self.writer, " ")?;
|
||||
self.emit_node(val)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -102,7 +102,7 @@ mod tests {
|
|||
}
|
||||
|
||||
fn try_fail(s: &str) -> Result<Vec<Yaml>, ScanError> {
|
||||
let t = try!(YamlLoader::load_from_str(s));
|
||||
let t = YamlLoader::load_from_str(s)?;
|
||||
Ok(t)
|
||||
}
|
||||
|
||||
|
|
|
@ -110,7 +110,7 @@ impl<T: Iterator<Item = char>> Parser<T> {
|
|||
match self.current {
|
||||
Some(ref x) => Ok(x),
|
||||
None => {
|
||||
self.current = Some(try!(self.next()));
|
||||
self.current = Some(self.next()?);
|
||||
self.peek()
|
||||
}
|
||||
}
|
||||
|
@ -126,7 +126,7 @@ impl<T: Iterator<Item = char>> Parser<T> {
|
|||
fn peek_token(&mut self) -> Result<&Token, ScanError> {
|
||||
match self.token {
|
||||
None => {
|
||||
self.token = Some(try!(self.scan_next_token()));
|
||||
self.token = Some(self.scan_next_token()?);
|
||||
Ok(self.token.as_ref().unwrap())
|
||||
}
|
||||
Some(ref tok) => Ok(tok),
|
||||
|
@ -165,7 +165,7 @@ impl<T: Iterator<Item = char>> Parser<T> {
|
|||
if self.state == State::End {
|
||||
return Ok((Event::StreamEnd, self.scanner.mark()));
|
||||
}
|
||||
let (ev, mark) = try!(self.state_machine());
|
||||
let (ev, mark) = self.state_machine()?;
|
||||
// println!("EV {:?}", ev);
|
||||
Ok((ev, mark))
|
||||
}
|
||||
|
@ -176,7 +176,7 @@ impl<T: Iterator<Item = char>> Parser<T> {
|
|||
multi: bool,
|
||||
) -> Result<(), ScanError> {
|
||||
if !self.scanner.stream_started() {
|
||||
let (ev, mark) = try!(self.next());
|
||||
let (ev, mark) = self.next()?;
|
||||
assert_eq!(ev, Event::StreamStart);
|
||||
recv.on_event(ev, mark);
|
||||
}
|
||||
|
@ -187,14 +187,14 @@ impl<T: Iterator<Item = char>> Parser<T> {
|
|||
return Ok(());
|
||||
}
|
||||
loop {
|
||||
let (ev, mark) = try!(self.next());
|
||||
let (ev, mark) = self.next()?;
|
||||
if ev == Event::StreamEnd {
|
||||
recv.on_event(ev, mark);
|
||||
return Ok(());
|
||||
}
|
||||
// clear anchors before a new document
|
||||
self.anchors.clear();
|
||||
try!(self.load_document(ev, mark, recv));
|
||||
self.load_document(ev, mark, recv)?;
|
||||
if !multi {
|
||||
break;
|
||||
}
|
||||
|
@ -211,11 +211,11 @@ impl<T: Iterator<Item = char>> Parser<T> {
|
|||
assert_eq!(first_ev, Event::DocumentStart);
|
||||
recv.on_event(first_ev, mark);
|
||||
|
||||
let (ev, mark) = try!(self.next());
|
||||
try!(self.load_node(ev, mark, recv));
|
||||
let (ev, mark) = self.next()?;
|
||||
self.load_node(ev, mark, recv)?;
|
||||
|
||||
// DOCUMENT-END is expected.
|
||||
let (ev, mark) = try!(self.next());
|
||||
let (ev, mark) = self.next()?;
|
||||
assert_eq!(ev, Event::DocumentEnd);
|
||||
recv.on_event(ev, mark);
|
||||
|
||||
|
@ -249,17 +249,17 @@ impl<T: Iterator<Item = char>> Parser<T> {
|
|||
}
|
||||
|
||||
fn load_mapping<R: MarkedEventReceiver>(&mut self, recv: &mut R) -> Result<(), ScanError> {
|
||||
let (mut key_ev, mut key_mark) = try!(self.next());
|
||||
let (mut key_ev, mut key_mark) = self.next()?;
|
||||
while key_ev != Event::MappingEnd {
|
||||
// key
|
||||
try!(self.load_node(key_ev, key_mark, recv));
|
||||
self.load_node(key_ev, key_mark, recv)?;
|
||||
|
||||
// value
|
||||
let (ev, mark) = try!(self.next());
|
||||
try!(self.load_node(ev, mark, recv));
|
||||
let (ev, mark) = self.next()?;
|
||||
self.load_node(ev, mark, recv)?;
|
||||
|
||||
// next event
|
||||
let (ev, mark) = try!(self.next());
|
||||
let (ev, mark) = self.next()?;
|
||||
key_ev = ev;
|
||||
key_mark = mark;
|
||||
}
|
||||
|
@ -268,12 +268,12 @@ impl<T: Iterator<Item = char>> Parser<T> {
|
|||
}
|
||||
|
||||
fn load_sequence<R: MarkedEventReceiver>(&mut self, recv: &mut R) -> Result<(), ScanError> {
|
||||
let (mut ev, mut mark) = try!(self.next());
|
||||
let (mut ev, mut mark) = self.next()?;
|
||||
while ev != Event::SequenceEnd {
|
||||
try!(self.load_node(ev, mark, recv));
|
||||
self.load_node(ev, mark, recv)?;
|
||||
|
||||
// next event
|
||||
let (next_ev, next_mark) = try!(self.next());
|
||||
let (next_ev, next_mark) = self.next()?;
|
||||
ev = next_ev;
|
||||
mark = next_mark;
|
||||
}
|
||||
|
@ -282,7 +282,7 @@ impl<T: Iterator<Item = char>> Parser<T> {
|
|||
}
|
||||
|
||||
fn state_machine(&mut self) -> ParseResult {
|
||||
// let next_tok = try!(self.peek_token());
|
||||
// let next_tok = self.peek_token()?;
|
||||
// println!("cur_state {:?}, next tok: {:?}", self.state, next_tok);
|
||||
match self.state {
|
||||
State::StreamStart => self.stream_start(),
|
||||
|
@ -322,7 +322,7 @@ impl<T: Iterator<Item = char>> Parser<T> {
|
|||
}
|
||||
|
||||
fn stream_start(&mut self) -> ParseResult {
|
||||
match *try!(self.peek_token()) {
|
||||
match *self.peek_token()? {
|
||||
Token(mark, TokenType::StreamStart(_)) => {
|
||||
self.state = State::ImplicitDocumentStart;
|
||||
self.skip();
|
||||
|
@ -334,12 +334,12 @@ impl<T: Iterator<Item = char>> Parser<T> {
|
|||
|
||||
fn document_start(&mut self, implicit: bool) -> ParseResult {
|
||||
if !implicit {
|
||||
while let TokenType::DocumentEnd = try!(self.peek_token()).1 {
|
||||
while let TokenType::DocumentEnd = self.peek_token()?.1 {
|
||||
self.skip();
|
||||
}
|
||||
}
|
||||
|
||||
match *try!(self.peek_token()) {
|
||||
match *self.peek_token()? {
|
||||
Token(mark, TokenType::StreamEnd) => {
|
||||
self.state = State::End;
|
||||
self.skip();
|
||||
|
@ -352,7 +352,7 @@ impl<T: Iterator<Item = char>> Parser<T> {
|
|||
self._explict_document_start()
|
||||
}
|
||||
Token(mark, _) if implicit => {
|
||||
try!(self.parser_process_directives());
|
||||
self.parser_process_directives()?;
|
||||
self.push_state(State::DocumentEnd);
|
||||
self.state = State::BlockNode;
|
||||
Ok((Event::DocumentStart, mark))
|
||||
|
@ -366,7 +366,7 @@ impl<T: Iterator<Item = char>> Parser<T> {
|
|||
|
||||
fn parser_process_directives(&mut self) -> Result<(), ScanError> {
|
||||
loop {
|
||||
match try!(self.peek_token()).1 {
|
||||
match self.peek_token()?.1 {
|
||||
TokenType::VersionDirective(_, _) => {
|
||||
// XXX parsing with warning according to spec
|
||||
//if major != 1 || minor > 2 {
|
||||
|
@ -386,8 +386,8 @@ impl<T: Iterator<Item = char>> Parser<T> {
|
|||
}
|
||||
|
||||
fn _explict_document_start(&mut self) -> ParseResult {
|
||||
try!(self.parser_process_directives());
|
||||
match *try!(self.peek_token()) {
|
||||
self.parser_process_directives()?;
|
||||
match *self.peek_token()? {
|
||||
Token(mark, TokenType::DocumentStart) => {
|
||||
self.push_state(State::DocumentEnd);
|
||||
self.state = State::DocumentContent;
|
||||
|
@ -402,7 +402,7 @@ impl<T: Iterator<Item = char>> Parser<T> {
|
|||
}
|
||||
|
||||
fn document_content(&mut self) -> ParseResult {
|
||||
match *try!(self.peek_token()) {
|
||||
match *self.peek_token()? {
|
||||
Token(mark, TokenType::VersionDirective(..))
|
||||
| Token(mark, TokenType::TagDirective(..))
|
||||
| Token(mark, TokenType::DocumentStart)
|
||||
|
@ -418,7 +418,7 @@ impl<T: Iterator<Item = char>> Parser<T> {
|
|||
|
||||
fn document_end(&mut self) -> ParseResult {
|
||||
let mut _implicit = true;
|
||||
let marker: Marker = match *try!(self.peek_token()) {
|
||||
let marker: Marker = match *self.peek_token()? {
|
||||
Token(mark, TokenType::DocumentEnd) => {
|
||||
self.skip();
|
||||
_implicit = false;
|
||||
|
@ -447,7 +447,7 @@ impl<T: Iterator<Item = char>> Parser<T> {
|
|||
fn parse_node(&mut self, block: bool, indentless_sequence: bool) -> ParseResult {
|
||||
let mut anchor_id = 0;
|
||||
let mut tag = None;
|
||||
match *try!(self.peek_token()) {
|
||||
match *self.peek_token()? {
|
||||
Token(_, TokenType::Alias(_)) => {
|
||||
self.pop_state();
|
||||
if let Token(mark, TokenType::Alias(name)) = self.fetch_token() {
|
||||
|
@ -466,8 +466,8 @@ impl<T: Iterator<Item = char>> Parser<T> {
|
|||
}
|
||||
Token(_, TokenType::Anchor(_)) => {
|
||||
if let Token(mark, TokenType::Anchor(name)) = self.fetch_token() {
|
||||
anchor_id = try!(self.register_anchor(name, &mark));
|
||||
if let TokenType::Tag(..) = try!(self.peek_token()).1 {
|
||||
anchor_id = self.register_anchor(name, &mark)?;
|
||||
if let TokenType::Tag(..) = self.peek_token()?.1 {
|
||||
if let tg @ TokenType::Tag(..) = self.fetch_token().1 {
|
||||
tag = Some(tg);
|
||||
} else {
|
||||
|
@ -481,9 +481,9 @@ impl<T: Iterator<Item = char>> Parser<T> {
|
|||
Token(_, TokenType::Tag(..)) => {
|
||||
if let tg @ TokenType::Tag(..) = self.fetch_token().1 {
|
||||
tag = Some(tg);
|
||||
if let TokenType::Anchor(_) = try!(self.peek_token()).1 {
|
||||
if let TokenType::Anchor(_) = self.peek_token()?.1 {
|
||||
if let Token(mark, TokenType::Anchor(name)) = self.fetch_token() {
|
||||
anchor_id = try!(self.register_anchor(name, &mark));
|
||||
anchor_id = self.register_anchor(name, &mark)?;
|
||||
} else {
|
||||
unreachable!()
|
||||
}
|
||||
|
@ -494,7 +494,7 @@ impl<T: Iterator<Item = char>> Parser<T> {
|
|||
}
|
||||
_ => {}
|
||||
}
|
||||
match *try!(self.peek_token()) {
|
||||
match *self.peek_token()? {
|
||||
Token(mark, TokenType::BlockEntry) if indentless_sequence => {
|
||||
self.state = State::IndentlessSequenceEntry;
|
||||
Ok((Event::SequenceStart(anchor_id), mark))
|
||||
|
@ -538,14 +538,14 @@ impl<T: Iterator<Item = char>> Parser<T> {
|
|||
fn block_mapping_key(&mut self, first: bool) -> ParseResult {
|
||||
// skip BlockMappingStart
|
||||
if first {
|
||||
let _ = try!(self.peek_token());
|
||||
let _ = self.peek_token()?;
|
||||
//self.marks.push(tok.0);
|
||||
self.skip();
|
||||
}
|
||||
match *try!(self.peek_token()) {
|
||||
match *self.peek_token()? {
|
||||
Token(_, TokenType::Key) => {
|
||||
self.skip();
|
||||
match *try!(self.peek_token()) {
|
||||
match *self.peek_token()? {
|
||||
Token(mark, TokenType::Key)
|
||||
| Token(mark, TokenType::Value)
|
||||
| Token(mark, TokenType::BlockEnd) => {
|
||||
|
@ -577,10 +577,10 @@ impl<T: Iterator<Item = char>> Parser<T> {
|
|||
}
|
||||
|
||||
fn block_mapping_value(&mut self) -> ParseResult {
|
||||
match *try!(self.peek_token()) {
|
||||
match *self.peek_token()? {
|
||||
Token(_, TokenType::Value) => {
|
||||
self.skip();
|
||||
match *try!(self.peek_token()) {
|
||||
match *self.peek_token()? {
|
||||
Token(mark, TokenType::Key)
|
||||
| Token(mark, TokenType::Value)
|
||||
| Token(mark, TokenType::BlockEnd) => {
|
||||
|
@ -604,26 +604,26 @@ impl<T: Iterator<Item = char>> Parser<T> {
|
|||
|
||||
fn flow_mapping_key(&mut self, first: bool) -> ParseResult {
|
||||
if first {
|
||||
let _ = try!(self.peek_token());
|
||||
let _ = self.peek_token()?;
|
||||
self.skip();
|
||||
}
|
||||
let marker: Marker =
|
||||
{
|
||||
match *try!(self.peek_token()) {
|
||||
match *self.peek_token()? {
|
||||
Token(mark, TokenType::FlowMappingEnd) => mark,
|
||||
Token(mark, _) => {
|
||||
if !first {
|
||||
match *try!(self.peek_token()) {
|
||||
match *self.peek_token()? {
|
||||
Token(_, TokenType::FlowEntry) => self.skip(),
|
||||
Token(mark, _) => return Err(ScanError::new(mark,
|
||||
"while parsing a flow mapping, did not find expected ',' or '}'"))
|
||||
}
|
||||
}
|
||||
|
||||
match *try!(self.peek_token()) {
|
||||
match *self.peek_token()? {
|
||||
Token(_, TokenType::Key) => {
|
||||
self.skip();
|
||||
match *try!(self.peek_token()) {
|
||||
match *self.peek_token()? {
|
||||
Token(mark, TokenType::Value)
|
||||
| Token(mark, TokenType::FlowEntry)
|
||||
| Token(mark, TokenType::FlowMappingEnd) => {
|
||||
|
@ -660,14 +660,14 @@ impl<T: Iterator<Item = char>> Parser<T> {
|
|||
fn flow_mapping_value(&mut self, empty: bool) -> ParseResult {
|
||||
let mark: Marker = {
|
||||
if empty {
|
||||
let Token(mark, _) = *try!(self.peek_token());
|
||||
let Token(mark, _) = *self.peek_token()?;
|
||||
self.state = State::FlowMappingKey;
|
||||
return Ok((Event::empty_scalar(), mark));
|
||||
} else {
|
||||
match *try!(self.peek_token()) {
|
||||
match *self.peek_token()? {
|
||||
Token(marker, TokenType::Value) => {
|
||||
self.skip();
|
||||
match try!(self.peek_token()).1 {
|
||||
match self.peek_token()?.1 {
|
||||
TokenType::FlowEntry | TokenType::FlowMappingEnd => {}
|
||||
_ => {
|
||||
self.push_state(State::FlowMappingKey);
|
||||
|
@ -688,11 +688,11 @@ impl<T: Iterator<Item = char>> Parser<T> {
|
|||
fn flow_sequence_entry(&mut self, first: bool) -> ParseResult {
|
||||
// skip FlowMappingStart
|
||||
if first {
|
||||
let _ = try!(self.peek_token());
|
||||
let _ = self.peek_token()?;
|
||||
//self.marks.push(tok.0);
|
||||
self.skip();
|
||||
}
|
||||
match *try!(self.peek_token()) {
|
||||
match *self.peek_token()? {
|
||||
Token(mark, TokenType::FlowSequenceEnd) => {
|
||||
self.pop_state();
|
||||
self.skip();
|
||||
|
@ -709,7 +709,7 @@ impl<T: Iterator<Item = char>> Parser<T> {
|
|||
}
|
||||
_ => { /* next */ }
|
||||
}
|
||||
match *try!(self.peek_token()) {
|
||||
match *self.peek_token()? {
|
||||
Token(mark, TokenType::FlowSequenceEnd) => {
|
||||
self.pop_state();
|
||||
self.skip();
|
||||
|
@ -728,7 +728,7 @@ impl<T: Iterator<Item = char>> Parser<T> {
|
|||
}
|
||||
|
||||
fn indentless_sequence_entry(&mut self) -> ParseResult {
|
||||
match *try!(self.peek_token()) {
|
||||
match *self.peek_token()? {
|
||||
Token(_, TokenType::BlockEntry) => (),
|
||||
Token(mark, _) => {
|
||||
self.pop_state();
|
||||
|
@ -736,7 +736,7 @@ impl<T: Iterator<Item = char>> Parser<T> {
|
|||
}
|
||||
}
|
||||
self.skip();
|
||||
match *try!(self.peek_token()) {
|
||||
match *self.peek_token()? {
|
||||
Token(mark, TokenType::BlockEntry)
|
||||
| Token(mark, TokenType::Key)
|
||||
| Token(mark, TokenType::Value)
|
||||
|
@ -754,11 +754,11 @@ impl<T: Iterator<Item = char>> Parser<T> {
|
|||
fn block_sequence_entry(&mut self, first: bool) -> ParseResult {
|
||||
// BLOCK-SEQUENCE-START
|
||||
if first {
|
||||
let _ = try!(self.peek_token());
|
||||
let _ = self.peek_token()?;
|
||||
//self.marks.push(tok.0);
|
||||
self.skip();
|
||||
}
|
||||
match *try!(self.peek_token()) {
|
||||
match *self.peek_token()? {
|
||||
Token(mark, TokenType::BlockEnd) => {
|
||||
self.pop_state();
|
||||
self.skip();
|
||||
|
@ -766,7 +766,7 @@ impl<T: Iterator<Item = char>> Parser<T> {
|
|||
}
|
||||
Token(_, TokenType::BlockEntry) => {
|
||||
self.skip();
|
||||
match *try!(self.peek_token()) {
|
||||
match *self.peek_token()? {
|
||||
Token(mark, TokenType::BlockEntry) | Token(mark, TokenType::BlockEnd) => {
|
||||
self.state = State::BlockSequenceEntry;
|
||||
Ok((Event::empty_scalar(), mark))
|
||||
|
@ -785,7 +785,7 @@ impl<T: Iterator<Item = char>> Parser<T> {
|
|||
}
|
||||
|
||||
fn flow_sequence_entry_mapping_key(&mut self) -> ParseResult {
|
||||
match *try!(self.peek_token()) {
|
||||
match *self.peek_token()? {
|
||||
Token(mark, TokenType::Value)
|
||||
| Token(mark, TokenType::FlowEntry)
|
||||
| Token(mark, TokenType::FlowSequenceEnd) => {
|
||||
|
@ -801,11 +801,11 @@ impl<T: Iterator<Item = char>> Parser<T> {
|
|||
}
|
||||
|
||||
fn flow_sequence_entry_mapping_value(&mut self) -> ParseResult {
|
||||
match *try!(self.peek_token()) {
|
||||
match *self.peek_token()? {
|
||||
Token(_, TokenType::Value) => {
|
||||
self.skip();
|
||||
self.state = State::FlowSequenceEntryMappingValue;
|
||||
match *try!(self.peek_token()) {
|
||||
match *self.peek_token()? {
|
||||
Token(mark, TokenType::FlowEntry) | Token(mark, TokenType::FlowSequenceEnd) => {
|
||||
self.state = State::FlowSequenceEntryMappingEnd;
|
||||
Ok((Event::empty_scalar(), mark))
|
||||
|
|
|
@ -341,7 +341,7 @@ impl<T: Iterator<Item = char>> Scanner<T> {
|
|||
}
|
||||
self.skip_to_next_token();
|
||||
|
||||
try!(self.stale_simple_keys());
|
||||
self.stale_simple_keys()?;
|
||||
|
||||
let mark = self.mark;
|
||||
self.unroll_indent(mark.col as isize);
|
||||
|
@ -349,7 +349,7 @@ impl<T: Iterator<Item = char>> Scanner<T> {
|
|||
self.lookahead(4);
|
||||
|
||||
if is_z(self.ch()) {
|
||||
try!(self.fetch_stream_end());
|
||||
self.fetch_stream_end()?;
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
|
@ -364,7 +364,7 @@ impl<T: Iterator<Item = char>> Scanner<T> {
|
|||
&& self.buffer[2] == '-'
|
||||
&& is_blankz(self.buffer[3])
|
||||
{
|
||||
try!(self.fetch_document_indicator(TokenType::DocumentStart));
|
||||
self.fetch_document_indicator(TokenType::DocumentStart)?;
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
|
@ -374,7 +374,7 @@ impl<T: Iterator<Item = char>> Scanner<T> {
|
|||
&& self.buffer[2] == '.'
|
||||
&& is_blankz(self.buffer[3])
|
||||
{
|
||||
try!(self.fetch_document_indicator(TokenType::DocumentEnd));
|
||||
self.fetch_document_indicator(TokenType::DocumentEnd)?;
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
|
@ -417,7 +417,7 @@ impl<T: Iterator<Item = char>> Scanner<T> {
|
|||
}
|
||||
|
||||
if !self.token_available {
|
||||
try!(self.fetch_more_tokens());
|
||||
self.fetch_more_tokens()?;
|
||||
}
|
||||
let t = self.tokens.pop_front().unwrap();
|
||||
self.token_available = false;
|
||||
|
@ -436,7 +436,7 @@ impl<T: Iterator<Item = char>> Scanner<T> {
|
|||
if self.tokens.is_empty() {
|
||||
need_more = true;
|
||||
} else {
|
||||
try!(self.stale_simple_keys());
|
||||
self.stale_simple_keys()?;
|
||||
for sk in &self.simple_keys {
|
||||
if sk.possible && sk.token_number == self.tokens_parsed {
|
||||
need_more = true;
|
||||
|
@ -448,7 +448,7 @@ impl<T: Iterator<Item = char>> Scanner<T> {
|
|||
if !need_more {
|
||||
break;
|
||||
}
|
||||
try!(self.fetch_next_token());
|
||||
self.fetch_next_token()?;
|
||||
}
|
||||
self.token_available = true;
|
||||
|
||||
|
@ -510,7 +510,7 @@ impl<T: Iterator<Item = char>> Scanner<T> {
|
|||
}
|
||||
|
||||
self.unroll_indent(-1);
|
||||
try!(self.remove_simple_key());
|
||||
self.remove_simple_key()?;
|
||||
self.disallow_simple_key();
|
||||
|
||||
self.tokens
|
||||
|
@ -520,11 +520,11 @@ impl<T: Iterator<Item = char>> Scanner<T> {
|
|||
|
||||
fn fetch_directive(&mut self) -> ScanResult {
|
||||
self.unroll_indent(-1);
|
||||
try!(self.remove_simple_key());
|
||||
self.remove_simple_key()?;
|
||||
|
||||
self.disallow_simple_key();
|
||||
|
||||
let tok = try!(self.scan_directive());
|
||||
let tok = self.scan_directive()?;
|
||||
|
||||
self.tokens.push_back(tok);
|
||||
|
||||
|
@ -535,10 +535,10 @@ impl<T: Iterator<Item = char>> Scanner<T> {
|
|||
let start_mark = self.mark;
|
||||
self.skip();
|
||||
|
||||
let name = try!(self.scan_directive_name());
|
||||
let name = self.scan_directive_name()?;
|
||||
let tok = match name.as_ref() {
|
||||
"YAML" => try!(self.scan_version_directive_value(&start_mark)),
|
||||
"TAG" => try!(self.scan_tag_directive_value(&start_mark)),
|
||||
"YAML" => self.scan_version_directive_value(&start_mark)?,
|
||||
"TAG" => self.scan_tag_directive_value(&start_mark)?,
|
||||
// XXX This should be a warning instead of an error
|
||||
_ => {
|
||||
// skip current line
|
||||
|
@ -594,7 +594,7 @@ impl<T: Iterator<Item = char>> Scanner<T> {
|
|||
self.lookahead(1);
|
||||
}
|
||||
|
||||
let major = try!(self.scan_version_directive_number(mark));
|
||||
let major = self.scan_version_directive_number(mark)?;
|
||||
|
||||
if self.ch() != '.' {
|
||||
return Err(ScanError::new(
|
||||
|
@ -605,7 +605,7 @@ impl<T: Iterator<Item = char>> Scanner<T> {
|
|||
|
||||
self.skip();
|
||||
|
||||
let minor = try!(self.scan_version_directive_number(mark));
|
||||
let minor = self.scan_version_directive_number(mark)?;
|
||||
|
||||
Ok(Token(*mark, TokenType::VersionDirective(major, minor)))
|
||||
}
|
||||
|
@ -671,7 +671,7 @@ impl<T: Iterator<Item = char>> Scanner<T> {
|
|||
self.skip();
|
||||
self.lookahead(1);
|
||||
}
|
||||
let handle = try!(self.scan_tag_handle(true, mark));
|
||||
let handle = self.scan_tag_handle(true, mark)?;
|
||||
|
||||
self.lookahead(1);
|
||||
/* Eat whitespaces. */
|
||||
|
@ -681,7 +681,7 @@ impl<T: Iterator<Item = char>> Scanner<T> {
|
|||
}
|
||||
|
||||
let is_secondary = handle == "!!";
|
||||
let prefix = try!(self.scan_tag_uri(true, is_secondary, &String::new(), mark));
|
||||
let prefix = self.scan_tag_uri(true, is_secondary, &String::new(), mark)?;
|
||||
|
||||
self.lookahead(1);
|
||||
|
||||
|
@ -696,10 +696,10 @@ impl<T: Iterator<Item = char>> Scanner<T> {
|
|||
}
|
||||
|
||||
fn fetch_tag(&mut self) -> ScanResult {
|
||||
try!(self.save_simple_key());
|
||||
self.save_simple_key()?;
|
||||
self.disallow_simple_key();
|
||||
|
||||
let tok = try!(self.scan_tag());
|
||||
let tok = self.scan_tag()?;
|
||||
self.tokens.push_back(tok);
|
||||
Ok(())
|
||||
}
|
||||
|
@ -717,7 +717,7 @@ impl<T: Iterator<Item = char>> Scanner<T> {
|
|||
// Eat '!<'
|
||||
self.skip();
|
||||
self.skip();
|
||||
suffix = try!(self.scan_tag_uri(false, false, &String::new(), &start_mark));
|
||||
suffix = self.scan_tag_uri(false, false, &String::new(), &start_mark)?;
|
||||
|
||||
if self.ch() != '>' {
|
||||
return Err(ScanError::new(
|
||||
|
@ -729,15 +729,15 @@ impl<T: Iterator<Item = char>> Scanner<T> {
|
|||
self.skip();
|
||||
} else {
|
||||
// The tag has either the '!suffix' or the '!handle!suffix'
|
||||
handle = try!(self.scan_tag_handle(false, &start_mark));
|
||||
handle = self.scan_tag_handle(false, &start_mark)?;
|
||||
// Check if it is, indeed, handle.
|
||||
if handle.len() >= 2 && handle.starts_with('!') && handle.ends_with('!') {
|
||||
if handle == "!!" {
|
||||
secondary = true;
|
||||
}
|
||||
suffix = try!(self.scan_tag_uri(false, secondary, &String::new(), &start_mark));
|
||||
suffix = self.scan_tag_uri(false, secondary, &String::new(), &start_mark)?;
|
||||
} else {
|
||||
suffix = try!(self.scan_tag_uri(false, false, &handle, &start_mark));
|
||||
suffix = self.scan_tag_uri(false, false, &handle, &start_mark)?;
|
||||
handle = "!".to_owned();
|
||||
// A special case: the '!' tag. Set the handle to '' and the
|
||||
// suffix to '!'.
|
||||
|
@ -829,7 +829,7 @@ impl<T: Iterator<Item = char>> Scanner<T> {
|
|||
} {
|
||||
// Check if it is a URI-escape sequence.
|
||||
if self.ch() == '%' {
|
||||
string.push(try!(self.scan_uri_escapes(directive, mark)));
|
||||
string.push(self.scan_uri_escapes(directive, mark)?);
|
||||
} else {
|
||||
string.push(self.ch());
|
||||
self.skip();
|
||||
|
@ -907,10 +907,10 @@ impl<T: Iterator<Item = char>> Scanner<T> {
|
|||
}
|
||||
|
||||
fn fetch_anchor(&mut self, alias: bool) -> ScanResult {
|
||||
try!(self.save_simple_key());
|
||||
self.save_simple_key()?;
|
||||
self.disallow_simple_key();
|
||||
|
||||
let tok = try!(self.scan_anchor(alias));
|
||||
let tok = self.scan_anchor(alias)?;
|
||||
|
||||
self.tokens.push_back(tok);
|
||||
|
||||
|
@ -946,7 +946,7 @@ impl<T: Iterator<Item = char>> Scanner<T> {
|
|||
|
||||
fn fetch_flow_collection_start(&mut self, tok: TokenType) -> ScanResult {
|
||||
// The indicators '[' and '{' may start a simple key.
|
||||
try!(self.save_simple_key());
|
||||
self.save_simple_key()?;
|
||||
|
||||
self.increase_flow_level()?;
|
||||
|
||||
|
@ -960,7 +960,7 @@ impl<T: Iterator<Item = char>> Scanner<T> {
|
|||
}
|
||||
|
||||
fn fetch_flow_collection_end(&mut self, tok: TokenType) -> ScanResult {
|
||||
try!(self.remove_simple_key());
|
||||
self.remove_simple_key()?;
|
||||
self.decrease_flow_level();
|
||||
|
||||
self.disallow_simple_key();
|
||||
|
@ -973,7 +973,7 @@ impl<T: Iterator<Item = char>> Scanner<T> {
|
|||
}
|
||||
|
||||
fn fetch_flow_entry(&mut self) -> ScanResult {
|
||||
try!(self.remove_simple_key());
|
||||
self.remove_simple_key()?;
|
||||
self.allow_simple_key();
|
||||
|
||||
let start_mark = self.mark;
|
||||
|
@ -1019,7 +1019,7 @@ impl<T: Iterator<Item = char>> Scanner<T> {
|
|||
r#""-" is only valid inside a block"#,
|
||||
));
|
||||
}
|
||||
try!(self.remove_simple_key());
|
||||
self.remove_simple_key()?;
|
||||
self.allow_simple_key();
|
||||
|
||||
let start_mark = self.mark;
|
||||
|
@ -1032,7 +1032,7 @@ impl<T: Iterator<Item = char>> Scanner<T> {
|
|||
|
||||
fn fetch_document_indicator(&mut self, t: TokenType) -> ScanResult {
|
||||
self.unroll_indent(-1);
|
||||
try!(self.remove_simple_key());
|
||||
self.remove_simple_key()?;
|
||||
self.disallow_simple_key();
|
||||
|
||||
let mark = self.mark;
|
||||
|
@ -1046,9 +1046,9 @@ impl<T: Iterator<Item = char>> Scanner<T> {
|
|||
}
|
||||
|
||||
fn fetch_block_scalar(&mut self, literal: bool) -> ScanResult {
|
||||
try!(self.save_simple_key());
|
||||
self.save_simple_key()?;
|
||||
self.allow_simple_key();
|
||||
let tok = try!(self.scan_block_scalar(literal));
|
||||
let tok = self.scan_block_scalar(literal)?;
|
||||
|
||||
self.tokens.push_back(tok);
|
||||
Ok(())
|
||||
|
@ -1145,7 +1145,7 @@ impl<T: Iterator<Item = char>> Scanner<T> {
|
|||
}
|
||||
}
|
||||
// Scan the leading line breaks and determine the indentation level if needed.
|
||||
try!(self.block_scalar_breaks(&mut indent, &mut trailing_breaks));
|
||||
self.block_scalar_breaks(&mut indent, &mut trailing_breaks)?;
|
||||
|
||||
self.lookahead(1);
|
||||
|
||||
|
@ -1183,7 +1183,7 @@ impl<T: Iterator<Item = char>> Scanner<T> {
|
|||
self.read_break(&mut leading_break);
|
||||
|
||||
// Eat the following intendation spaces and line breaks.
|
||||
try!(self.block_scalar_breaks(&mut indent, &mut trailing_breaks));
|
||||
self.block_scalar_breaks(&mut indent, &mut trailing_breaks)?;
|
||||
}
|
||||
|
||||
// Chomp the tail.
|
||||
|
@ -1249,10 +1249,10 @@ impl<T: Iterator<Item = char>> Scanner<T> {
|
|||
}
|
||||
|
||||
fn fetch_flow_scalar(&mut self, single: bool) -> ScanResult {
|
||||
try!(self.save_simple_key());
|
||||
self.save_simple_key()?;
|
||||
self.disallow_simple_key();
|
||||
|
||||
let tok = try!(self.scan_flow_scalar(single));
|
||||
let tok = self.scan_flow_scalar(single)?;
|
||||
|
||||
self.tokens.push_back(tok);
|
||||
Ok(())
|
||||
|
@ -1457,10 +1457,10 @@ impl<T: Iterator<Item = char>> Scanner<T> {
|
|||
}
|
||||
|
||||
fn fetch_plain_scalar(&mut self) -> ScanResult {
|
||||
try!(self.save_simple_key());
|
||||
self.save_simple_key()?;
|
||||
self.disallow_simple_key();
|
||||
|
||||
let tok = try!(self.scan_plain_scalar());
|
||||
let tok = self.scan_plain_scalar()?;
|
||||
|
||||
self.tokens.push_back(tok);
|
||||
Ok(())
|
||||
|
@ -1603,7 +1603,7 @@ impl<T: Iterator<Item = char>> Scanner<T> {
|
|||
);
|
||||
}
|
||||
|
||||
try!(self.remove_simple_key());
|
||||
self.remove_simple_key()?;
|
||||
|
||||
if self.flow_level == 0 {
|
||||
self.allow_simple_key();
|
||||
|
@ -1699,7 +1699,7 @@ impl<T: Iterator<Item = char>> Scanner<T> {
|
|||
sk.required = required;
|
||||
sk.token_number = self.tokens_parsed + self.tokens.len();
|
||||
|
||||
try!(self.remove_simple_key());
|
||||
self.remove_simple_key()?;
|
||||
|
||||
self.simple_keys.pop();
|
||||
self.simple_keys.push(sk);
|
||||
|
|
|
@ -194,7 +194,7 @@ impl YamlLoader {
|
|||
anchor_map: BTreeMap::new(),
|
||||
};
|
||||
let mut parser = Parser::new(source.chars());
|
||||
try!(parser.load(&mut loader, true));
|
||||
parser.load(&mut loader, true)?;
|
||||
Ok(loader.docs)
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue