Clippy set to pedantic.

This commit is contained in:
Ethiraric 2023-08-12 01:54:46 +02:00
parent beae0f306b
commit 91ed5dca21
8 changed files with 273 additions and 269 deletions

View file

@ -21,13 +21,13 @@ fn dump_node(doc: &yaml::Yaml, indent: usize) {
yaml::Yaml::Hash(ref h) => { yaml::Yaml::Hash(ref h) => {
for (k, v) in h { for (k, v) in h {
print_indent(indent); print_indent(indent);
println!("{:?}:", k); println!("{k:?}:");
dump_node(v, indent + 1); dump_node(v, indent + 1);
} }
} }
_ => { _ => {
print_indent(indent); print_indent(indent);
println!("{:?}", doc); println!("{doc:?}");
} }
} }
} }

View file

@ -1,7 +1,7 @@
use crate::yaml::{Hash, Yaml};
use std::convert::From; use std::convert::From;
use std::error::Error; use std::error::Error;
use std::fmt::{self, Display}; use std::fmt::{self, Display};
use crate::yaml::{Hash, Yaml};
#[derive(Copy, Clone, Debug)] #[derive(Copy, Clone, Debug)]
pub enum EmitError { pub enum EmitError {
@ -30,6 +30,7 @@ impl From<fmt::Error> for EmitError {
} }
} }
#[allow(clippy::module_name_repetitions)]
pub struct YamlEmitter<'a> { pub struct YamlEmitter<'a> {
writer: &'a mut dyn fmt::Write, writer: &'a mut dyn fmt::Write,
best_indent: usize, best_indent: usize,
@ -126,6 +127,7 @@ impl<'a> YamlEmitter<'a> {
} }
/// Determine if this emitter is using 'compact inline notation'. /// Determine if this emitter is using 'compact inline notation'.
#[must_use]
pub fn is_compact(&self) -> bool { pub fn is_compact(&self) -> bool {
self.compact self.compact
} }
@ -157,7 +159,7 @@ impl<'a> YamlEmitter<'a> {
if need_quotes(v) { if need_quotes(v) {
escape_str(self.writer, v)?; escape_str(self.writer, v)?;
} else { } else {
write!(self.writer, "{}", v)?; write!(self.writer, "{v}")?;
} }
Ok(()) Ok(())
} }
@ -170,11 +172,11 @@ impl<'a> YamlEmitter<'a> {
Ok(()) Ok(())
} }
Yaml::Integer(v) => { Yaml::Integer(v) => {
write!(self.writer, "{}", v)?; write!(self.writer, "{v}")?;
Ok(()) Ok(())
} }
Yaml::Real(ref v) => { Yaml::Real(ref v) => {
write!(self.writer, "{}", v)?; write!(self.writer, "{v}")?;
Ok(()) Ok(())
} }
Yaml::Null | Yaml::BadValue => { Yaml::Null | Yaml::BadValue => {
@ -182,7 +184,7 @@ impl<'a> YamlEmitter<'a> {
Ok(()) Ok(())
} }
// XXX(chenyh) Alias // XXX(chenyh) Alias
_ => Ok(()), Yaml::Alias(_) => Ok(()),
} }
} }
@ -210,10 +212,7 @@ impl<'a> YamlEmitter<'a> {
} else { } else {
self.level += 1; self.level += 1;
for (cnt, (k, v)) in h.iter().enumerate() { for (cnt, (k, v)) in h.iter().enumerate() {
let complex_key = match *k { let complex_key = matches!(*k, Yaml::Hash(_) | Yaml::Array(_));
Yaml::Hash(_) | Yaml::Array(_) => true,
_ => false,
};
if cnt > 0 { if cnt > 0 {
writeln!(self.writer)?; writeln!(self.writer)?;
self.write_indent()?; self.write_indent()?;
@ -286,19 +285,22 @@ impl<'a> YamlEmitter<'a> {
/// * When the string is null or ~ (otherwise, it would be considered as a null value); /// * When the string is null or ~ (otherwise, it would be considered as a null value);
/// * When the string looks like a number, such as integers (e.g. 2, 14, etc.), floats (e.g. 2.6, 14.9) and exponential numbers (e.g. 12e7, etc.) (otherwise, it would be treated as a numeric value); /// * When the string looks like a number, such as integers (e.g. 2, 14, etc.), floats (e.g. 2.6, 14.9) and exponential numbers (e.g. 12e7, etc.) (otherwise, it would be treated as a numeric value);
/// * When the string looks like a date (e.g. 2014-12-31) (otherwise it would be automatically converted into a Unix timestamp). /// * When the string looks like a date (e.g. 2014-12-31) (otherwise it would be automatically converted into a Unix timestamp).
#[allow(clippy::doc_markdown)]
fn need_quotes(string: &str) -> bool { fn need_quotes(string: &str) -> bool {
fn need_quotes_spaces(string: &str) -> bool { fn need_quotes_spaces(string: &str) -> bool {
string.starts_with(' ') || string.ends_with(' ') string.starts_with(' ') || string.ends_with(' ')
} }
string == "" string.is_empty()
|| need_quotes_spaces(string) || need_quotes_spaces(string)
|| string.starts_with(|character: char| match character { || string.starts_with(|character: char| {
'&' | '*' | '?' | '|' | '-' | '<' | '>' | '=' | '!' | '%' | '@' => true, matches!(
_ => false, character,
'&' | '*' | '?' | '|' | '-' | '<' | '>' | '=' | '!' | '%' | '@'
)
}) })
|| string.contains(|character: char| match character { || string.contains(|character: char| {
':' matches!(character, ':'
| '{' | '{'
| '}' | '}'
| '[' | '['
@ -314,8 +316,7 @@ fn need_quotes(string: &str) -> bool {
| '\n' | '\n'
| '\r' | '\r'
| '\x0e'..='\x1a' | '\x0e'..='\x1a'
| '\x1c'..='\x1f' => true, | '\x1c'..='\x1f')
_ => false,
}) })
|| [ || [
// http://yaml.org/type/bool.html // http://yaml.org/type/bool.html
@ -335,6 +336,7 @@ fn need_quotes(string: &str) -> bool {
} }
#[cfg(test)] #[cfg(test)]
#[allow(clippy::similar_names)]
mod test { mod test {
use super::*; use super::*;
use crate::YamlLoader; use crate::YamlLoader;
@ -354,18 +356,18 @@ a4:
- 2 - 2
"; ";
let docs = YamlLoader::load_from_str(&s).unwrap(); let docs = YamlLoader::load_from_str(s).unwrap();
let doc = &docs[0]; let doc = &docs[0];
let mut writer = String::new(); let mut writer = String::new();
{ {
let mut emitter = YamlEmitter::new(&mut writer); let mut emitter = YamlEmitter::new(&mut writer);
emitter.dump(doc).unwrap(); emitter.dump(doc).unwrap();
} }
println!("original:\n{}", s); println!("original:\n{s}");
println!("emitted:\n{}", writer); println!("emitted:\n{writer}");
let docs_new = match YamlLoader::load_from_str(&writer) { let docs_new = match YamlLoader::load_from_str(&writer) {
Ok(y) => y, Ok(y) => y,
Err(e) => panic!(format!("{}", e)), Err(e) => panic!("{}", e),
}; };
let doc_new = &docs_new[0]; let doc_new = &docs_new[0];
@ -393,7 +395,7 @@ products:
{}: {}:
empty hash key empty hash key
"#; "#;
let docs = YamlLoader::load_from_str(&s).unwrap(); let docs = YamlLoader::load_from_str(s).unwrap();
let doc = &docs[0]; let doc = &docs[0];
let mut writer = String::new(); let mut writer = String::new();
{ {
@ -402,7 +404,7 @@ products:
} }
let docs_new = match YamlLoader::load_from_str(&writer) { let docs_new = match YamlLoader::load_from_str(&writer) {
Ok(y) => y, Ok(y) => y,
Err(e) => panic!(format!("{}", e)), Err(e) => panic!("{}", e),
}; };
let doc_new = &docs_new[0]; let doc_new = &docs_new[0];
assert_eq!(doc, doc_new); assert_eq!(doc, doc_new);
@ -444,7 +446,7 @@ x: test
y: avoid quoting here y: avoid quoting here
z: string with spaces"#; z: string with spaces"#;
let docs = YamlLoader::load_from_str(&s).unwrap(); let docs = YamlLoader::load_from_str(s).unwrap();
let doc = &docs[0]; let doc = &docs[0];
let mut writer = String::new(); let mut writer = String::new();
{ {
@ -452,7 +454,7 @@ z: string with spaces"#;
emitter.dump(doc).unwrap(); emitter.dump(doc).unwrap();
} }
assert_eq!(s, writer, "actual:\n\n{}\n", writer); assert_eq!(s, writer, "actual:\n\n{writer}\n");
} }
#[test] #[test]
@ -502,7 +504,7 @@ null0: ~
bool0: true bool0: true
bool1: false"#; bool1: false"#;
let docs = YamlLoader::load_from_str(&input).unwrap(); let docs = YamlLoader::load_from_str(input).unwrap();
let doc = &docs[0]; let doc = &docs[0];
let mut writer = String::new(); let mut writer = String::new();
{ {
@ -512,19 +514,18 @@ bool1: false"#;
assert_eq!( assert_eq!(
expected, writer, expected, writer,
"expected:\n{}\nactual:\n{}\n", "expected:\n{expected}\nactual:\n{writer}\n",
expected, writer
); );
} }
#[test] #[test]
fn test_empty_and_nested() { fn test_empty_and_nested() {
test_empty_and_nested_flag(false) test_empty_and_nested_flag(false);
} }
#[test] #[test]
fn test_empty_and_nested_compact() { fn test_empty_and_nested_compact() {
test_empty_and_nested_flag(true) test_empty_and_nested_flag(true);
} }
fn test_empty_and_nested_flag(compact: bool) { fn test_empty_and_nested_flag(compact: bool) {
@ -551,7 +552,7 @@ e:
h: []"# h: []"#
}; };
let docs = YamlLoader::load_from_str(&s).unwrap(); let docs = YamlLoader::load_from_str(s).unwrap();
let doc = &docs[0]; let doc = &docs[0];
let mut writer = String::new(); let mut writer = String::new();
{ {
@ -573,15 +574,15 @@ a:
- - e - - e
- f"#; - f"#;
let docs = YamlLoader::load_from_str(&s).unwrap(); let docs = YamlLoader::load_from_str(s).unwrap();
let doc = &docs[0]; let doc = &docs[0];
let mut writer = String::new(); let mut writer = String::new();
{ {
let mut emitter = YamlEmitter::new(&mut writer); let mut emitter = YamlEmitter::new(&mut writer);
emitter.dump(doc).unwrap(); emitter.dump(doc).unwrap();
} }
println!("original:\n{}", s); println!("original:\n{s}");
println!("emitted:\n{}", writer); println!("emitted:\n{writer}");
assert_eq!(s, writer); assert_eq!(s, writer);
} }
@ -597,15 +598,15 @@ a:
- - f - - f
- - e"#; - - e"#;
let docs = YamlLoader::load_from_str(&s).unwrap(); let docs = YamlLoader::load_from_str(s).unwrap();
let doc = &docs[0]; let doc = &docs[0];
let mut writer = String::new(); let mut writer = String::new();
{ {
let mut emitter = YamlEmitter::new(&mut writer); let mut emitter = YamlEmitter::new(&mut writer);
emitter.dump(doc).unwrap(); emitter.dump(doc).unwrap();
} }
println!("original:\n{}", s); println!("original:\n{s}");
println!("emitted:\n{}", writer); println!("emitted:\n{writer}");
assert_eq!(s, writer); assert_eq!(s, writer);
} }
@ -619,17 +620,16 @@ a:
d: d:
e: f"#; e: f"#;
let docs = YamlLoader::load_from_str(&s).unwrap(); let docs = YamlLoader::load_from_str(s).unwrap();
let doc = &docs[0]; let doc = &docs[0];
let mut writer = String::new(); let mut writer = String::new();
{ {
let mut emitter = YamlEmitter::new(&mut writer); let mut emitter = YamlEmitter::new(&mut writer);
emitter.dump(doc).unwrap(); emitter.dump(doc).unwrap();
} }
println!("original:\n{}", s); println!("original:\n{s}");
println!("emitted:\n{}", writer); println!("emitted:\n{writer}");
assert_eq!(s, writer); assert_eq!(s, writer);
} }
} }

View file

@ -37,11 +37,16 @@
//! ``` //! ```
#![doc(html_root_url = "https://docs.rs/yaml-rust/0.4.5")] #![doc(html_root_url = "https://docs.rs/yaml-rust/0.4.5")]
#![cfg_attr(feature = "cargo-clippy", allow(renamed_and_removed_lints))] #![cfg_attr(feature = "cargo-clippy", warn(clippy::pedantic))]
#![cfg_attr(feature = "cargo-clippy", warn(cyclomatic_complexity))]
#![cfg_attr( #![cfg_attr(
feature = "cargo-clippy", feature = "cargo-clippy",
allow(match_same_arms, should_implement_trait) allow(
clippy::match_same_arms,
clippy::should_implement_trait,
clippy::missing_errors_doc,
clippy::missing_panics_doc,
clippy::redundant_else,
)
)] )]
extern crate linked_hash_map; extern crate linked_hash_map;
@ -117,5 +122,4 @@ key1:a2
assert!(YamlLoader::load_from_str(s).is_err()); assert!(YamlLoader::load_from_str(s).is_err());
assert!(try_fail(s).is_err()); assert!(try_fail(s).is_err());
} }
} }

View file

@ -1,4 +1,4 @@
use crate::scanner::*; use crate::scanner::{Marker, ScanError, Scanner, TScalarStyle, Token, TokenType};
use std::collections::HashMap; use std::collections::HashMap;
#[derive(Clone, Copy, PartialEq, Debug, Eq)] #[derive(Clone, Copy, PartialEq, Debug, Eq)]
@ -58,11 +58,12 @@ impl Event {
} }
fn empty_scalar_with_anchor(anchor: usize, tag: Option<TokenType>) -> Event { fn empty_scalar_with_anchor(anchor: usize, tag: Option<TokenType>) -> Event {
Event::Scalar("".to_owned(), TScalarStyle::Plain, anchor, tag) Event::Scalar(String::new(), TScalarStyle::Plain, anchor, tag)
} }
} }
#[derive(Debug)] #[derive(Debug)]
#[allow(dead_code)]
pub struct Parser<T> { pub struct Parser<T> {
scanner: Scanner<T>, scanner: Scanner<T>,
states: Vec<State>, states: Vec<State>,
@ -84,7 +85,7 @@ pub trait MarkedEventReceiver {
impl<R: EventReceiver> MarkedEventReceiver for R { impl<R: EventReceiver> MarkedEventReceiver for R {
fn on_event(&mut self, ev: Event, _mark: Marker) { fn on_event(&mut self, ev: Event, _mark: Marker) {
self.on_event(ev) self.on_event(ev);
} }
} }
@ -107,12 +108,11 @@ impl<T: Iterator<Item = char>> Parser<T> {
} }
pub fn peek(&mut self) -> Result<&(Event, Marker), ScanError> { pub fn peek(&mut self) -> Result<&(Event, Marker), ScanError> {
match self.current { if let Some(ref x) = self.current {
Some(ref x) => Ok(x), Ok(x)
None => { } else {
self.current = Some(self.next()?); self.current = Some(self.next()?);
self.peek() self.peek()
}
} }
} }
@ -155,7 +155,7 @@ impl<T: Iterator<Item = char>> Parser<T> {
//self.peek_token(); //self.peek_token();
} }
fn pop_state(&mut self) { fn pop_state(&mut self) {
self.state = self.states.pop().unwrap() self.state = self.states.pop().unwrap();
} }
fn push_state(&mut self, state: State) { fn push_state(&mut self, state: State) {
self.states.push(state); self.states.push(state);
@ -242,7 +242,7 @@ impl<T: Iterator<Item = char>> Parser<T> {
self.load_mapping(recv) self.load_mapping(recv)
} }
_ => { _ => {
println!("UNREACHABLE EVENT: {:?}", first_ev); println!("UNREACHABLE EVENT: {first_ev:?}");
unreachable!(); unreachable!();
} }
} }
@ -345,9 +345,12 @@ impl<T: Iterator<Item = char>> Parser<T> {
self.skip(); self.skip();
Ok((Event::StreamEnd, mark)) Ok((Event::StreamEnd, mark))
} }
Token(_, TokenType::VersionDirective(..)) Token(
| Token(_, TokenType::TagDirective(..)) _,
| Token(_, TokenType::DocumentStart) => { TokenType::VersionDirective(..)
| TokenType::TagDirective(..)
| TokenType::DocumentStart,
) => {
// explicit document // explicit document
self._explicit_document_start() self._explicit_document_start()
} }
@ -403,11 +406,14 @@ impl<T: Iterator<Item = char>> Parser<T> {
fn document_content(&mut self) -> ParseResult { fn document_content(&mut self) -> ParseResult {
match *self.peek_token()? { match *self.peek_token()? {
Token(mark, TokenType::VersionDirective(..)) Token(
| Token(mark, TokenType::TagDirective(..)) mark,
| Token(mark, TokenType::DocumentStart) TokenType::VersionDirective(..)
| Token(mark, TokenType::DocumentEnd) | TokenType::TagDirective(..)
| Token(mark, TokenType::StreamEnd) => { | TokenType::DocumentStart
| TokenType::DocumentEnd
| TokenType::StreamEnd,
) => {
self.pop_state(); self.pop_state();
// empty scalar // empty scalar
Ok((Event::empty_scalar(), mark)) Ok((Event::empty_scalar(), mark))
@ -417,11 +423,9 @@ impl<T: Iterator<Item = char>> Parser<T> {
} }
fn document_end(&mut self) -> ParseResult { fn document_end(&mut self) -> ParseResult {
let mut _implicit = true;
let marker: Marker = match *self.peek_token()? { let marker: Marker = match *self.peek_token()? {
Token(mark, TokenType::DocumentEnd) => { Token(mark, TokenType::DocumentEnd) => {
self.skip(); self.skip();
_implicit = false;
mark mark
} }
Token(mark, _) => mark, Token(mark, _) => mark,
@ -432,7 +436,7 @@ impl<T: Iterator<Item = char>> Parser<T> {
Ok((Event::DocumentEnd, marker)) Ok((Event::DocumentEnd, marker))
} }
fn register_anchor(&mut self, name: String, _: &Marker) -> Result<usize, ScanError> { fn register_anchor(&mut self, name: String, _: &Marker) -> usize {
// anchors can be overridden/reused // anchors can be overridden/reused
// if self.anchors.contains_key(name) { // if self.anchors.contains_key(name) {
// return Err(ScanError::new(*mark, // return Err(ScanError::new(*mark,
@ -441,7 +445,7 @@ impl<T: Iterator<Item = char>> Parser<T> {
let new_id = self.anchor_id; let new_id = self.anchor_id;
self.anchor_id += 1; self.anchor_id += 1;
self.anchors.insert(name, new_id); self.anchors.insert(name, new_id);
Ok(new_id) new_id
} }
fn parse_node(&mut self, block: bool, indentless_sequence: bool) -> ParseResult { fn parse_node(&mut self, block: bool, indentless_sequence: bool) -> ParseResult {
@ -466,7 +470,7 @@ impl<T: Iterator<Item = char>> Parser<T> {
} }
Token(_, TokenType::Anchor(_)) => { Token(_, TokenType::Anchor(_)) => {
if let Token(mark, TokenType::Anchor(name)) = self.fetch_token() { if let Token(mark, TokenType::Anchor(name)) = self.fetch_token() {
anchor_id = self.register_anchor(name, &mark)?; anchor_id = self.register_anchor(name, &mark);
if let TokenType::Tag(..) = self.peek_token()?.1 { if let TokenType::Tag(..) = self.peek_token()?.1 {
if let tg @ TokenType::Tag(..) = self.fetch_token().1 { if let tg @ TokenType::Tag(..) = self.fetch_token().1 {
tag = Some(tg); tag = Some(tg);
@ -483,7 +487,7 @@ impl<T: Iterator<Item = char>> Parser<T> {
tag = Some(tg); tag = Some(tg);
if let TokenType::Anchor(_) = self.peek_token()?.1 { if let TokenType::Anchor(_) = self.peek_token()?.1 {
if let Token(mark, TokenType::Anchor(name)) = self.fetch_token() { if let Token(mark, TokenType::Anchor(name)) = self.fetch_token() {
anchor_id = self.register_anchor(name, &mark)?; anchor_id = self.register_anchor(name, &mark);
} else { } else {
unreachable!() unreachable!()
} }
@ -545,18 +549,15 @@ impl<T: Iterator<Item = char>> Parser<T> {
match *self.peek_token()? { match *self.peek_token()? {
Token(_, TokenType::Key) => { Token(_, TokenType::Key) => {
self.skip(); self.skip();
match *self.peek_token()? { if let Token(mark, TokenType::Key | TokenType::Value | TokenType::BlockEnd) =
Token(mark, TokenType::Key) *self.peek_token()?
| Token(mark, TokenType::Value) {
| Token(mark, TokenType::BlockEnd) => { self.state = State::BlockMappingValue;
self.state = State::BlockMappingValue; // empty scalar
// empty scalar Ok((Event::empty_scalar(), mark))
Ok((Event::empty_scalar(), mark)) } else {
} self.push_state(State::BlockMappingValue);
_ => { self.parse_node(true, true)
self.push_state(State::BlockMappingValue);
self.parse_node(true, true)
}
} }
} }
// XXX(chenyh): libyaml failed to parse spec 1.2, ex8.18 // XXX(chenyh): libyaml failed to parse spec 1.2, ex8.18
@ -580,18 +581,15 @@ impl<T: Iterator<Item = char>> Parser<T> {
match *self.peek_token()? { match *self.peek_token()? {
Token(_, TokenType::Value) => { Token(_, TokenType::Value) => {
self.skip(); self.skip();
match *self.peek_token()? { if let Token(mark, TokenType::Key | TokenType::Value | TokenType::BlockEnd) =
Token(mark, TokenType::Key) *self.peek_token()?
| Token(mark, TokenType::Value) {
| Token(mark, TokenType::BlockEnd) => { self.state = State::BlockMappingKey;
self.state = State::BlockMappingKey; // empty scalar
// empty scalar Ok((Event::empty_scalar(), mark))
Ok((Event::empty_scalar(), mark)) } else {
} self.push_state(State::BlockMappingKey);
_ => { self.parse_node(true, true)
self.push_state(State::BlockMappingKey);
self.parse_node(true, true)
}
} }
} }
Token(mark, _) => { Token(mark, _) => {
@ -607,50 +605,50 @@ impl<T: Iterator<Item = char>> Parser<T> {
let _ = self.peek_token()?; let _ = self.peek_token()?;
self.skip(); self.skip();
} }
let marker: Marker = let marker: Marker = {
{ match *self.peek_token()? {
match *self.peek_token()? { Token(mark, TokenType::FlowMappingEnd) => mark,
Token(mark, TokenType::FlowMappingEnd) => mark, Token(mark, _) => {
Token(mark, _) => { if !first {
if !first {
match *self.peek_token()? {
Token(_, TokenType::FlowEntry) => self.skip(),
Token(mark, _) => return Err(ScanError::new(mark,
"while parsing a flow mapping, did not find expected ',' or '}'"))
}
}
match *self.peek_token()? { match *self.peek_token()? {
Token(_, TokenType::Key) => { Token(_, TokenType::FlowEntry) => self.skip(),
self.skip(); Token(mark, _) => return Err(ScanError::new(
match *self.peek_token()? { mark,
Token(mark, TokenType::Value) "while parsing a flow mapping, did not find expected ',' or '}'",
| Token(mark, TokenType::FlowEntry) )),
| Token(mark, TokenType::FlowMappingEnd) => { }
self.state = State::FlowMappingValue; }
return Ok((Event::empty_scalar(), mark));
} match *self.peek_token()? {
_ => { Token(_, TokenType::Key) => {
self.push_state(State::FlowMappingValue); self.skip();
return self.parse_node(false, false); if let Token(
} mark,
} TokenType::Value | TokenType::FlowEntry | TokenType::FlowMappingEnd,
} ) = *self.peek_token()?
Token(marker, TokenType::Value) => { {
self.state = State::FlowMappingValue; self.state = State::FlowMappingValue;
return Ok((Event::empty_scalar(), marker)); return Ok((Event::empty_scalar(), mark));
} } else {
Token(_, TokenType::FlowMappingEnd) => (), self.push_state(State::FlowMappingValue);
_ => {
self.push_state(State::FlowMappingEmptyValue);
return self.parse_node(false, false); return self.parse_node(false, false);
} }
} }
Token(marker, TokenType::Value) => {
mark self.state = State::FlowMappingValue;
return Ok((Event::empty_scalar(), marker));
}
Token(_, TokenType::FlowMappingEnd) => (),
_ => {
self.push_state(State::FlowMappingEmptyValue);
return self.parse_node(false, false);
}
} }
mark
} }
}; }
};
self.pop_state(); self.pop_state();
self.skip(); self.skip();
@ -736,18 +734,16 @@ impl<T: Iterator<Item = char>> Parser<T> {
} }
} }
self.skip(); self.skip();
match *self.peek_token()? { if let Token(
Token(mark, TokenType::BlockEntry) mark,
| Token(mark, TokenType::Key) TokenType::BlockEntry | TokenType::Key | TokenType::Value | TokenType::BlockEnd,
| Token(mark, TokenType::Value) ) = *self.peek_token()?
| Token(mark, TokenType::BlockEnd) => { {
self.state = State::IndentlessSequenceEntry; self.state = State::IndentlessSequenceEntry;
Ok((Event::empty_scalar(), mark)) Ok((Event::empty_scalar(), mark))
} } else {
_ => { self.push_state(State::IndentlessSequenceEntry);
self.push_state(State::IndentlessSequenceEntry); self.parse_node(true, false)
self.parse_node(true, false)
}
} }
} }
@ -766,15 +762,14 @@ impl<T: Iterator<Item = char>> Parser<T> {
} }
Token(_, TokenType::BlockEntry) => { Token(_, TokenType::BlockEntry) => {
self.skip(); self.skip();
match *self.peek_token()? { if let Token(mark, TokenType::BlockEntry | TokenType::BlockEnd) =
Token(mark, TokenType::BlockEntry) | Token(mark, TokenType::BlockEnd) => { *self.peek_token()?
self.state = State::BlockSequenceEntry; {
Ok((Event::empty_scalar(), mark)) self.state = State::BlockSequenceEntry;
} Ok((Event::empty_scalar(), mark))
_ => { } else {
self.push_state(State::BlockSequenceEntry); self.push_state(State::BlockSequenceEntry);
self.parse_node(true, false) self.parse_node(true, false)
}
} }
} }
Token(mark, _) => Err(ScanError::new( Token(mark, _) => Err(ScanError::new(
@ -785,18 +780,15 @@ impl<T: Iterator<Item = char>> Parser<T> {
} }
fn flow_sequence_entry_mapping_key(&mut self) -> ParseResult { fn flow_sequence_entry_mapping_key(&mut self) -> ParseResult {
match *self.peek_token()? { if let Token(mark, TokenType::Value | TokenType::FlowEntry | TokenType::FlowSequenceEnd) =
Token(mark, TokenType::Value) *self.peek_token()?
| Token(mark, TokenType::FlowEntry) {
| Token(mark, TokenType::FlowSequenceEnd) => { self.skip();
self.skip(); self.state = State::FlowSequenceEntryMappingValue;
self.state = State::FlowSequenceEntryMappingValue; Ok((Event::empty_scalar(), mark))
Ok((Event::empty_scalar(), mark)) } else {
} self.push_state(State::FlowSequenceEntryMappingValue);
_ => { self.parse_node(false, false)
self.push_state(State::FlowSequenceEntryMappingValue);
self.parse_node(false, false)
}
} }
} }
@ -805,15 +797,14 @@ impl<T: Iterator<Item = char>> Parser<T> {
Token(_, TokenType::Value) => { Token(_, TokenType::Value) => {
self.skip(); self.skip();
self.state = State::FlowSequenceEntryMappingValue; self.state = State::FlowSequenceEntryMappingValue;
match *self.peek_token()? { if let Token(mark, TokenType::FlowEntry | TokenType::FlowSequenceEnd) =
Token(mark, TokenType::FlowEntry) | Token(mark, TokenType::FlowSequenceEnd) => { *self.peek_token()?
self.state = State::FlowSequenceEntryMappingEnd; {
Ok((Event::empty_scalar(), mark)) self.state = State::FlowSequenceEntryMappingEnd;
} Ok((Event::empty_scalar(), mark))
_ => { } else {
self.push_state(State::FlowSequenceEntryMappingEnd); self.push_state(State::FlowSequenceEntryMappingEnd);
self.parse_node(false, false) self.parse_node(false, false)
}
} }
} }
Token(mark, _) => { Token(mark, _) => {
@ -823,6 +814,7 @@ impl<T: Iterator<Item = char>> Parser<T> {
} }
} }
#[allow(clippy::unnecessary_wraps)]
fn flow_sequence_entry_mapping_end(&mut self) -> ParseResult { fn flow_sequence_entry_mapping_end(&mut self) -> ParseResult {
self.state = State::FlowSequenceEntry; self.state = State::FlowSequenceEntry;
Ok((Event::MappingEnd, self.scanner.mark())) Ok((Event::MappingEnd, self.scanner.mark()))

View file

@ -1,3 +1,6 @@
#![allow(clippy::cast_possible_wrap)]
#![allow(clippy::cast_sign_loss)]
use std::collections::VecDeque; use std::collections::VecDeque;
use std::error::Error; use std::error::Error;
use std::{char, fmt}; use std::{char, fmt};
@ -30,14 +33,17 @@ impl Marker {
Marker { index, line, col } Marker { index, line, col }
} }
#[must_use]
pub fn index(&self) -> usize { pub fn index(&self) -> usize {
self.index self.index
} }
#[must_use]
pub fn line(&self) -> usize { pub fn line(&self) -> usize {
self.line self.line
} }
#[must_use]
pub fn col(&self) -> usize { pub fn col(&self) -> usize {
self.col self.col
} }
@ -50,6 +56,7 @@ pub struct ScanError {
} }
impl ScanError { impl ScanError {
#[must_use]
pub fn new(loc: Marker, info: &str) -> ScanError { pub fn new(loc: Marker, info: &str) -> ScanError {
ScanError { ScanError {
mark: loc, mark: loc,
@ -57,6 +64,7 @@ impl ScanError {
} }
} }
#[must_use]
pub fn marker(&self) -> &Marker { pub fn marker(&self) -> &Marker {
&self.mark &self.mark
} }
@ -137,6 +145,7 @@ impl SimpleKey {
} }
#[derive(Debug)] #[derive(Debug)]
#[allow(clippy::struct_excessive_bools)]
pub struct Scanner<T> { pub struct Scanner<T> {
rdr: T, rdr: T,
mark: Marker, mark: Marker,
@ -194,19 +203,15 @@ fn is_blankz(c: char) -> bool {
} }
#[inline] #[inline]
fn is_digit(c: char) -> bool { fn is_digit(c: char) -> bool {
c >= '0' && c <= '9' c.is_ascii_digit()
} }
#[inline] #[inline]
fn is_alpha(c: char) -> bool { fn is_alpha(c: char) -> bool {
match c { matches!(c, '0'..='9' | 'a'..='z' | 'A'..='Z' | '_' | '-')
'0'..='9' | 'a'..='z' | 'A'..='Z' => true,
'_' | '-' => true,
_ => false,
}
} }
#[inline] #[inline]
fn is_hex(c: char) -> bool { fn is_hex(c: char) -> bool {
(c >= '0' && c <= '9') || (c >= 'a' && c <= 'f') || (c >= 'A' && c <= 'F') c.is_ascii_digit() || ('a'..='f').contains(&c) || ('A'..='F').contains(&c)
} }
#[inline] #[inline]
fn as_hex(c: char) -> u32 { fn as_hex(c: char) -> u32 {
@ -219,10 +224,7 @@ fn as_hex(c: char) -> u32 {
} }
#[inline] #[inline]
fn is_flow(c: char) -> bool { fn is_flow(c: char) -> bool {
match c { matches!(c, ',' | '[' | ']' | '{' | '}')
',' | '[' | ']' | '{' | '}' => true,
_ => false,
}
} }
pub type ScanResult = Result<(), ScanError>; pub type ScanResult = Result<(), ScanError>;
@ -251,10 +253,7 @@ impl<T: Iterator<Item = char>> Scanner<T> {
} }
#[inline] #[inline]
pub fn get_error(&self) -> Option<ScanError> { pub fn get_error(&self) -> Option<ScanError> {
match self.error { self.error.as_ref().map(std::clone::Clone::clone)
None => None,
Some(ref e) => Some(e.clone()),
}
} }
#[inline] #[inline]
@ -419,7 +418,7 @@ impl<T: Iterator<Item = char>> Scanner<T> {
':' | '?' if !is_blankz(nc) && self.flow_level == 0 => self.fetch_plain_scalar(), ':' | '?' if !is_blankz(nc) && self.flow_level == 0 => self.fetch_plain_scalar(),
'%' | '@' | '`' => Err(ScanError::new( '%' | '@' | '`' => Err(ScanError::new(
self.mark, self.mark,
&format!("unexpected character: `{}'", c), &format!("unexpected character: `{c}'"),
)), )),
_ => self.fetch_plain_scalar(), _ => self.fetch_plain_scalar(),
} }
@ -697,7 +696,7 @@ impl<T: Iterator<Item = char>> Scanner<T> {
} }
let is_secondary = handle == "!!"; let is_secondary = handle == "!!";
let prefix = self.scan_tag_uri(true, is_secondary, &String::new(), mark)?; let prefix = self.scan_tag_uri(true, is_secondary, "", mark)?;
self.lookahead(1); self.lookahead(1);
@ -733,7 +732,7 @@ impl<T: Iterator<Item = char>> Scanner<T> {
// Eat '!<' // Eat '!<'
self.skip(); self.skip();
self.skip(); self.skip();
suffix = self.scan_tag_uri(false, false, &String::new(), &start_mark)?; suffix = self.scan_tag_uri(false, false, "", &start_mark)?;
if self.ch() != '>' { if self.ch() != '>' {
return Err(ScanError::new( return Err(ScanError::new(
@ -751,7 +750,7 @@ impl<T: Iterator<Item = char>> Scanner<T> {
if handle == "!!" { if handle == "!!" {
secondary = true; secondary = true;
} }
suffix = self.scan_tag_uri(false, secondary, &String::new(), &start_mark)?; suffix = self.scan_tag_uri(false, secondary, "", &start_mark)?;
} else { } else {
suffix = self.scan_tag_uri(false, false, &handle, &start_mark)?; suffix = self.scan_tag_uri(false, false, &handle, &start_mark)?;
handle = "!".to_owned(); handle = "!".to_owned();
@ -1072,6 +1071,7 @@ impl<T: Iterator<Item = char>> Scanner<T> {
Ok(()) Ok(())
} }
#[allow(clippy::too_many_lines)]
fn scan_block_scalar(&mut self, literal: bool) -> Result<Token, ScanError> { fn scan_block_scalar(&mut self, literal: bool) -> Result<Token, ScanError> {
let start_mark = self.mark; let start_mark = self.mark;
let mut chomping: i32 = 0; let mut chomping: i32 = 0;
@ -1280,6 +1280,7 @@ impl<T: Iterator<Item = char>> Scanner<T> {
Ok(()) Ok(())
} }
#[allow(clippy::too_many_lines)]
fn scan_flow_scalar(&mut self, single: bool) -> Result<Token, ScanError> { fn scan_flow_scalar(&mut self, single: bool) -> Result<Token, ScanError> {
let start_mark = self.mark; let start_mark = self.mark;
@ -1389,12 +1390,8 @@ impl<T: Iterator<Item = char>> Scanner<T> {
value = (value << 4) + as_hex(self.buffer[i]); value = (value << 4) + as_hex(self.buffer[i]);
} }
let ch = match char::from_u32(value) { let Some(ch) = char::from_u32(value) else {
Some(v) => v, return Err(ScanError::new(start_mark, "while parsing a quoted scalar, found invalid Unicode character escape code"));
None => {
return Err(ScanError::new(start_mark,
"while parsing a quoted scalar, found invalid Unicode character escape code"));
}
}; };
string.push(ch); string.push(ch);
@ -1739,6 +1736,7 @@ impl<T: Iterator<Item = char>> Scanner<T> {
} }
#[cfg(test)] #[cfg(test)]
#[allow(clippy::enum_glob_use)]
mod test { mod test {
use super::TokenType::*; use super::TokenType::*;
use super::*; use super::*;

View file

@ -1,6 +1,8 @@
use linked_hash_map::LinkedHashMap; #![allow(clippy::module_name_repetitions)]
use crate::parser::*;
use crate::parser::{Event, MarkedEventReceiver, Parser};
use crate::scanner::{Marker, ScanError, TScalarStyle, TokenType}; use crate::scanner::{Marker, ScanError, TScalarStyle, TokenType};
use linked_hash_map::LinkedHashMap;
use std::collections::BTreeMap; use std::collections::BTreeMap;
use std::f64; use std::f64;
use std::i64; use std::i64;
@ -201,6 +203,7 @@ impl YamlLoader {
macro_rules! define_as ( macro_rules! define_as (
($name:ident, $t:ident, $yt:ident) => ( ($name:ident, $t:ident, $yt:ident) => (
#[must_use]
pub fn $name(&self) -> Option<$t> { pub fn $name(&self) -> Option<$t> {
match *self { match *self {
Yaml::$yt(v) => Some(v), Yaml::$yt(v) => Some(v),
@ -212,6 +215,7 @@ pub fn $name(&self) -> Option<$t> {
macro_rules! define_as_ref ( macro_rules! define_as_ref (
($name:ident, $t:ty, $yt:ident) => ( ($name:ident, $t:ty, $yt:ident) => (
#[must_use]
pub fn $name(&self) -> Option<$t> { pub fn $name(&self) -> Option<$t> {
match *self { match *self {
Yaml::$yt(ref v) => Some(v), Yaml::$yt(ref v) => Some(v),
@ -223,6 +227,7 @@ pub fn $name(&self) -> Option<$t> {
macro_rules! define_into ( macro_rules! define_into (
($name:ident, $t:ty, $yt:ident) => ( ($name:ident, $t:ty, $yt:ident) => (
#[must_use]
pub fn $name(self) -> Option<$t> { pub fn $name(self) -> Option<$t> {
match self { match self {
Yaml::$yt(v) => Some(v), Yaml::$yt(v) => Some(v),
@ -246,59 +251,58 @@ impl Yaml {
define_into!(into_hash, Hash, Hash); define_into!(into_hash, Hash, Hash);
define_into!(into_vec, Array, Array); define_into!(into_vec, Array, Array);
/// Returns the is null of this [`Yaml`].
#[must_use]
pub fn is_null(&self) -> bool { pub fn is_null(&self) -> bool {
match *self { matches!(*self, Yaml::Null)
Yaml::Null => true,
_ => false,
}
} }
/// Returns the is badvalue of this [`Yaml`].
#[must_use]
pub fn is_badvalue(&self) -> bool { pub fn is_badvalue(&self) -> bool {
match *self { matches!(*self, Yaml::BadValue)
Yaml::BadValue => true,
_ => false,
}
} }
#[must_use]
pub fn is_array(&self) -> bool { pub fn is_array(&self) -> bool {
match *self { matches!(*self, Yaml::Array(_))
Yaml::Array(_) => true,
_ => false,
}
} }
#[must_use]
pub fn as_f64(&self) -> Option<f64> { pub fn as_f64(&self) -> Option<f64> {
match *self { if let Yaml::Real(ref v) = self {
Yaml::Real(ref v) => parse_f64(v), parse_f64(v)
_ => None, } else {
None
} }
} }
#[must_use]
pub fn into_f64(self) -> Option<f64> { pub fn into_f64(self) -> Option<f64> {
match self { if let Yaml::Real(ref v) = self {
Yaml::Real(ref v) => parse_f64(v), parse_f64(v)
_ => None, } else {
None
} }
} }
} }
#[cfg_attr(feature = "cargo-clippy", allow(should_implement_trait))] #[cfg_attr(feature = "cargo-clippy", allow(clippy::should_implement_trait))]
impl Yaml { impl Yaml {
// Not implementing FromStr because there is no possibility of Error. // Not implementing FromStr because there is no possibility of Error.
// This function falls back to Yaml::String if nothing else matches. // This function falls back to Yaml::String if nothing else matches.
#[must_use]
pub fn from_str(v: &str) -> Yaml { pub fn from_str(v: &str) -> Yaml {
if v.starts_with("0x") { if let Some(number) = v.strip_prefix("0x") {
if let Ok(i) = i64::from_str_radix(&v[2..], 16) { if let Ok(i) = i64::from_str_radix(number, 16) {
return Yaml::Integer(i); return Yaml::Integer(i);
} }
} } else if let Some(number) = v.strip_prefix("0o") {
if v.starts_with("0o") { if let Ok(i) = i64::from_str_radix(number, 8) {
if let Ok(i) = i64::from_str_radix(&v[2..], 8) {
return Yaml::Integer(i); return Yaml::Integer(i);
} }
} } else if let Some(number) = v.strip_prefix('+') {
if v.starts_with('+') { if let Ok(i) = number.parse::<i64>() {
if let Ok(i) = v[1..].parse::<i64>() {
return Yaml::Integer(i); return Yaml::Integer(i);
} }
} }
@ -306,10 +310,15 @@ impl Yaml {
"~" | "null" => Yaml::Null, "~" | "null" => Yaml::Null,
"true" => Yaml::Boolean(true), "true" => Yaml::Boolean(true),
"false" => Yaml::Boolean(false), "false" => Yaml::Boolean(false),
_ if v.parse::<i64>().is_ok() => Yaml::Integer(v.parse::<i64>().unwrap()), _ => {
// try parsing as f64 if let Ok(integer) = v.parse::<i64>() {
_ if parse_f64(v).is_some() => Yaml::Real(v.to_owned()), Yaml::Integer(integer)
_ => Yaml::String(v.to_owned()), } else if parse_f64(v).is_some() {
Yaml::Real(v.to_owned())
} else {
Yaml::String(v.to_owned())
}
}
} }
} }
} }
@ -348,7 +357,7 @@ impl IntoIterator for Yaml {
fn into_iter(self) -> Self::IntoIter { fn into_iter(self) -> Self::IntoIter {
YamlIter { YamlIter {
yaml: self.into_vec().unwrap_or_else(Vec::new).into_iter(), yaml: self.into_vec().unwrap_or_default().into_iter(),
} }
} }
} }
@ -366,9 +375,11 @@ impl Iterator for YamlIter {
} }
#[cfg(test)] #[cfg(test)]
#[allow(clippy::bool_assert_comparison)]
#[allow(clippy::float_cmp)]
mod test { mod test {
use crate::yaml::{vec, Yaml, YamlLoader};
use std::f64; use std::f64;
use crate::yaml::*;
#[test] #[test]
fn test_coerce() { fn test_coerce() {
let s = "--- let s = "---
@ -376,7 +387,7 @@ a: 1
b: 2.2 b: 2.2
c: [1, 2] c: [1, 2]
"; ";
let out = YamlLoader::load_from_str(&s).unwrap(); let out = YamlLoader::load_from_str(s).unwrap();
let doc = &out[0]; let doc = &out[0];
assert_eq!(doc["a"].as_i64().unwrap(), 1i64); assert_eq!(doc["a"].as_i64().unwrap(), 1i64);
assert_eq!(doc["b"].as_f64().unwrap(), 2.2f64); assert_eq!(doc["b"].as_f64().unwrap(), 2.2f64);
@ -386,7 +397,7 @@ c: [1, 2]
#[test] #[test]
fn test_empty_doc() { fn test_empty_doc() {
let s: String = "".to_owned(); let s: String = String::new();
YamlLoader::load_from_str(&s).unwrap(); YamlLoader::load_from_str(&s).unwrap();
let s: String = "---".to_owned(); let s: String = "---".to_owned();
assert_eq!(YamlLoader::load_from_str(&s).unwrap()[0], Yaml::Null); assert_eq!(YamlLoader::load_from_str(&s).unwrap()[0], Yaml::Null);
@ -425,7 +436,7 @@ a7: 你好
--- ---
'a scalar' 'a scalar'
"; ";
let out = YamlLoader::load_from_str(&s).unwrap(); let out = YamlLoader::load_from_str(s).unwrap();
assert_eq!(out.len(), 3); assert_eq!(out.len(), 3);
} }
@ -437,7 +448,7 @@ a1: &DEFAULT
b2: d b2: d
a2: *DEFAULT a2: *DEFAULT
"; ";
let out = YamlLoader::load_from_str(&s).unwrap(); let out = YamlLoader::load_from_str(s).unwrap();
let doc = &out[0]; let doc = &out[0];
assert_eq!(doc["a2"]["b1"].as_i64().unwrap(), 4); assert_eq!(doc["a2"]["b1"].as_i64().unwrap(), 4);
} }
@ -449,7 +460,7 @@ a1: &DEFAULT
b1: 4 b1: 4
b2: *DEFAULT b2: *DEFAULT
"; ";
let out = YamlLoader::load_from_str(&s).unwrap(); let out = YamlLoader::load_from_str(s).unwrap();
let doc = &out[0]; let doc = &out[0];
assert_eq!(doc["a1"]["b2"], Yaml::BadValue); assert_eq!(doc["a1"]["b2"], Yaml::BadValue);
} }
@ -458,7 +469,7 @@ a1: &DEFAULT
fn test_github_27() { fn test_github_27() {
// https://github.com/chyh1990/yaml-rust/issues/27 // https://github.com/chyh1990/yaml-rust/issues/27
let s = "&a"; let s = "&a";
let out = YamlLoader::load_from_str(&s).unwrap(); let out = YamlLoader::load_from_str(s).unwrap();
let doc = &out[0]; let doc = &out[0];
assert_eq!(doc.as_str().unwrap(), ""); assert_eq!(doc.as_str().unwrap(), "");
} }
@ -494,7 +505,7 @@ a1: &DEFAULT
- +12345 - +12345
- [ true, false ] - [ true, false ]
"; ";
let out = YamlLoader::load_from_str(&s).unwrap(); let out = YamlLoader::load_from_str(s).unwrap();
let doc = &out[0]; let doc = &out[0];
assert_eq!(doc[0].as_str().unwrap(), "string"); assert_eq!(doc[0].as_str().unwrap(), "string");
@ -531,14 +542,14 @@ a1: &DEFAULT
fn test_bad_hyphen() { fn test_bad_hyphen() {
// See: https://github.com/chyh1990/yaml-rust/issues/23 // See: https://github.com/chyh1990/yaml-rust/issues/23
let s = "{-"; let s = "{-";
assert!(YamlLoader::load_from_str(&s).is_err()); assert!(YamlLoader::load_from_str(s).is_err());
} }
#[test] #[test]
fn test_issue_65() { fn test_issue_65() {
// See: https://github.com/chyh1990/yaml-rust/issues/65 // See: https://github.com/chyh1990/yaml-rust/issues/65
let b = "\n\"ll\\\"ll\\\r\n\"ll\\\"ll\\\r\r\r\rU\r\r\rU"; let b = "\n\"ll\\\"ll\\\r\n\"ll\\\"ll\\\r\r\r\rU\r\r\rU";
assert!(YamlLoader::load_from_str(&b).is_err()); assert!(YamlLoader::load_from_str(b).is_err());
} }
#[test] #[test]
@ -582,7 +593,7 @@ a1: &DEFAULT
- .NAN - .NAN
- !!float .INF - !!float .INF
"; ";
let mut out = YamlLoader::load_from_str(&s).unwrap().into_iter(); let mut out = YamlLoader::load_from_str(s).unwrap().into_iter();
let mut doc = out.next().unwrap().into_iter(); let mut doc = out.next().unwrap().into_iter();
assert_eq!(doc.next().unwrap().into_string().unwrap(), "string"); assert_eq!(doc.next().unwrap().into_string().unwrap(), "string");
@ -614,7 +625,7 @@ b: ~
a: ~ a: ~
c: ~ c: ~
"; ";
let out = YamlLoader::load_from_str(&s).unwrap(); let out = YamlLoader::load_from_str(s).unwrap();
let first = out.into_iter().next().unwrap(); let first = out.into_iter().next().unwrap();
let mut iter = first.into_hash().unwrap().into_iter(); let mut iter = first.into_hash().unwrap().into_iter();
assert_eq!( assert_eq!(
@ -640,7 +651,7 @@ c: ~
1: 1:
important: false important: false
"; ";
let out = YamlLoader::load_from_str(&s).unwrap(); let out = YamlLoader::load_from_str(s).unwrap();
let first = out.into_iter().next().unwrap(); let first = out.into_iter().next().unwrap();
assert_eq!(first[0]["important"].as_bool().unwrap(), true); assert_eq!(first[0]["important"].as_bool().unwrap(), true);
} }
@ -716,10 +727,10 @@ subcommands3:
about: server related commands about: server related commands
"#; "#;
let out = YamlLoader::load_from_str(&s).unwrap(); let out = YamlLoader::load_from_str(s).unwrap();
let doc = &out.into_iter().next().unwrap(); let doc = &out.into_iter().next().unwrap();
println!("{:#?}", doc); println!("{doc:#?}");
assert_eq!(doc["subcommands"][0]["server"], Yaml::Null); assert_eq!(doc["subcommands"][0]["server"], Yaml::Null);
assert!(doc["subcommands2"][0]["server"].as_hash().is_some()); assert!(doc["subcommands2"][0]["server"].as_hash().is_some());
assert!(doc["subcommands3"][0]["server"].as_hash().is_some()); assert!(doc["subcommands3"][0]["server"].as_hash().is_some());

View file

@ -6,7 +6,7 @@ use yaml_rust::parser::{Event, EventReceiver, Parser};
use yaml_rust::scanner::TScalarStyle; use yaml_rust::scanner::TScalarStyle;
// These names match the names used in the C++ test suite. // These names match the names used in the C++ test suite.
#[cfg_attr(feature = "cargo-clippy", allow(enum_variant_names))] #[cfg_attr(feature = "cargo-clippy", allow(clippy::enum_variant_names))]
#[derive(Clone, PartialEq, PartialOrd, Debug)] #[derive(Clone, PartialEq, PartialOrd, Debug)]
enum TestEvent { enum TestEvent {
OnDocumentStart, OnDocumentStart,
@ -76,24 +76,18 @@ include!("spec_test.rs.inc");
#[test] #[test]
fn test_mapvec_legal() { fn test_mapvec_legal() {
use yaml_rust::yaml::{Array, Hash, Yaml}; use yaml_rust::yaml::{Hash, Yaml};
use yaml_rust::{YamlEmitter, YamlLoader}; use yaml_rust::{YamlEmitter, YamlLoader};
// Emitting a `map<map<seq<_>>, _>` should result in legal yaml that // Emitting a `map<map<seq<_>>, _>` should result in legal yaml that
// we can parse. // we can parse.
let mut key = Array::new(); let key = vec![Yaml::Integer(1), Yaml::Integer(2), Yaml::Integer(3)];
key.push(Yaml::Integer(1));
key.push(Yaml::Integer(2));
key.push(Yaml::Integer(3));
let mut keyhash = Hash::new(); let mut keyhash = Hash::new();
keyhash.insert(Yaml::String("key".into()), Yaml::Array(key)); keyhash.insert(Yaml::String("key".into()), Yaml::Array(key));
let mut val = Array::new(); let val = vec![Yaml::Integer(4), Yaml::Integer(5), Yaml::Integer(6)];
val.push(Yaml::Integer(4));
val.push(Yaml::Integer(5));
val.push(Yaml::Integer(6));
let mut hash = Hash::new(); let mut hash = Hash::new();
hash.insert(Yaml::Hash(keyhash), Yaml::Array(val)); hash.insert(Yaml::Hash(keyhash), Yaml::Array(val));

View file

@ -7,14 +7,14 @@ fn roundtrip(original: &Yaml) {
YamlEmitter::new(&mut emitted).dump(original).unwrap(); YamlEmitter::new(&mut emitted).dump(original).unwrap();
let documents = YamlLoader::load_from_str(&emitted).unwrap(); let documents = YamlLoader::load_from_str(&emitted).unwrap();
println!("emitted {}", emitted); println!("emitted {emitted}");
assert_eq!(documents.len(), 1); assert_eq!(documents.len(), 1);
assert_eq!(documents[0], *original); assert_eq!(documents[0], *original);
} }
fn double_roundtrip(original: &str) { fn double_roundtrip(original: &str) {
let parsed = YamlLoader::load_from_str(&original).unwrap(); let parsed = YamlLoader::load_from_str(original).unwrap();
let mut serialized = String::new(); let mut serialized = String::new();
YamlEmitter::new(&mut serialized).dump(&parsed[0]).unwrap(); YamlEmitter::new(&mut serialized).dump(&parsed[0]).unwrap();
@ -39,27 +39,32 @@ fn test_colon_in_string() {
#[test] #[test]
fn test_numberlike_strings() { fn test_numberlike_strings() {
let docs = [ let docs = [
r#"x: "1234""#, r#"x: "01234""#, r#""1234""#, r#"x: "1234""#,
r#""01234""#, r#"" 01234""#, r#""0x1234""#, r#"x: "01234""#,
r#""1234""#,
r#""01234""#,
r#"" 01234""#,
r#""0x1234""#,
r#"" 0x1234""#, r#"" 0x1234""#,
]; ];
for doc in &docs { for doc in &docs {
roundtrip(&Yaml::String(doc.to_string())); roundtrip(&Yaml::String((*doc).to_string()));
double_roundtrip(&doc); double_roundtrip(doc);
} }
} }
/// Example from https://github.com/chyh1990/yaml-rust/issues/133 /// Example from <https://github.com/chyh1990/yaml-rust/issues/133>
#[test] #[test]
fn test_issue133() { fn test_issue133() {
let doc = YamlLoader::load_from_str("\"0x123\"")
let doc = YamlLoader::load_from_str("\"0x123\"").unwrap().pop().unwrap(); .unwrap()
.pop()
.unwrap();
assert_eq!(doc, Yaml::String("0x123".to_string())); assert_eq!(doc, Yaml::String("0x123".to_string()));
let mut out_str = String::new(); let mut out_str = String::new();
YamlEmitter::new(&mut out_str).dump(&doc).unwrap(); YamlEmitter::new(&mut out_str).dump(&doc).unwrap();
let doc2 = YamlLoader::load_from_str(&out_str).unwrap().pop().unwrap(); let doc2 = YamlLoader::load_from_str(&out_str).unwrap().pop().unwrap();
assert_eq!(doc, doc2); // This failed because the type has changed to a number now assert_eq!(doc, doc2); // This failed because the type has changed to a number now
} }