Add scan_tag_directive_value

This commit is contained in:
Yuheng Chen 2015-05-29 02:57:41 +08:00
parent 9a917eaf29
commit 10b91f6a31
2 changed files with 48 additions and 10 deletions

View file

@ -291,7 +291,7 @@ impl<T: Iterator<Item=char>> Parser<T> {
return Ok(Event::StreamEnd); return Ok(Event::StreamEnd);
}, },
TokenType::VersionDirectiveToken(..) TokenType::VersionDirectiveToken(..)
| TokenType::TagDirectiveToken | TokenType::TagDirectiveToken(..)
| TokenType::DocumentStartToken => { | TokenType::DocumentStartToken => {
// explicit document // explicit document
self._explict_document_start() self._explict_document_start()
@ -320,8 +320,9 @@ impl<T: Iterator<Item=char>> Parser<T> {
// "found incompatible YAML document")); // "found incompatible YAML document"));
//} //}
}, },
TokenType::TagDirectiveToken => { TokenType::TagDirectiveToken(..) => {
unimplemented!(); // unimplemented!();
// TODO add tag directive
}, },
_ => break _ => break
} }
@ -347,7 +348,7 @@ impl<T: Iterator<Item=char>> Parser<T> {
let tok = try!(self.peek()); let tok = try!(self.peek());
match tok.1 { match tok.1 {
TokenType::VersionDirectiveToken(..) TokenType::VersionDirectiveToken(..)
|TokenType::TagDirectiveToken |TokenType::TagDirectiveToken(..)
|TokenType::DocumentStartToken |TokenType::DocumentStartToken
|TokenType::DocumentEndToken |TokenType::DocumentEndToken
|TokenType::StreamEndToken => { |TokenType::StreamEndToken => {

View file

@ -56,7 +56,8 @@ pub enum TokenType {
StreamEndToken, StreamEndToken,
/// major, minor /// major, minor
VersionDirectiveToken(u32, u32), VersionDirectiveToken(u32, u32),
TagDirectiveToken, /// handle, prefix
TagDirectiveToken(String, String),
DocumentStartToken, DocumentStartToken,
DocumentEndToken, DocumentEndToken,
BlockSequenceStartToken, BlockSequenceStartToken,
@ -72,7 +73,7 @@ pub enum TokenType {
ValueToken, ValueToken,
AliasToken(String), AliasToken(String),
AnchorToken(String), AnchorToken(String),
// handle, suffix /// handle, suffix
TagToken(String, String), TagToken(String, String),
ScalarToken(TScalarStyle, String) ScalarToken(TScalarStyle, String)
} }
@ -497,6 +498,7 @@ impl<T: Iterator<Item=char>> Scanner<T> {
"TAG" => { "TAG" => {
try!(self.scan_tag_directive_value(&start_mark)) try!(self.scan_tag_directive_value(&start_mark))
}, },
// XXX This should be a warning instead of an error
_ => return Err(ScanError::new(start_mark, _ => return Err(ScanError::new(start_mark,
"while scanning a directive, found uknown directive name")) "while scanning a directive, found uknown directive name"))
}; };
@ -597,7 +599,32 @@ impl<T: Iterator<Item=char>> Scanner<T> {
} }
fn scan_tag_directive_value(&mut self, mark: &Marker) -> Result<Token, ScanError> { fn scan_tag_directive_value(&mut self, mark: &Marker) -> Result<Token, ScanError> {
unimplemented!(); self.lookahead(1);
/* Eat whitespaces. */
while is_blank(self.ch()) {
self.skip();
self.lookahead(1);
}
let handle = try!(self.scan_tag_handle(true, mark));
self.lookahead(1);
/* Eat whitespaces. */
while is_blank(self.ch()) {
self.skip();
self.lookahead(1);
}
let is_secondary = handle == "!!";
let prefix = try!(self.scan_tag_uri(true, is_secondary, &String::new(), mark));
self.lookahead(1);
if !is_blankz(self.ch()) {
Err(ScanError::new(*mark,
"while scanning TAG, did not find expected whitespace or line break"))
} else {
Ok(Token(*mark, TokenType::TagDirectiveToken(handle, prefix)))
}
} }
fn fetch_tag(&mut self) -> ScanResult { fn fetch_tag(&mut self) -> ScanResult {
@ -612,7 +639,7 @@ impl<T: Iterator<Item=char>> Scanner<T> {
fn scan_tag(&mut self) -> Result<Token, ScanError> { fn scan_tag(&mut self) -> Result<Token, ScanError> {
let start_mark = self.mark; let start_mark = self.mark;
let mut handle = String::new(); let mut handle = String::new();
let mut suffix = String::new(); let mut suffix;
let mut secondary = false; let mut secondary = false;
// Check if the tag is in the canonical form (verbatim). // Check if the tag is in the canonical form (verbatim).
@ -715,8 +742,8 @@ impl<T: Iterator<Item=char>> Scanner<T> {
* '%'. * '%'.
*/ */
while match self.ch() { while match self.ch() {
';' | '/' | '?' | ':' | '@' | '&' if !is_secondary => true, ';' | '/' | '?' | ':' | '@' | '&' => true,
'=' | '+' | '$' | ',' | '.' | '!' | '~' | '*' | '\'' | '(' | ')' | '[' | ']' if !is_secondary => true, '=' | '+' | '$' | ',' | '.' | '!' | '~' | '*' | '\'' | '(' | ')' | '[' | ']' => true,
'%' => true, '%' => true,
c if is_alpha(c) => true, c if is_alpha(c) => true,
_ => false _ => false
@ -1853,5 +1880,15 @@ key:
next!(p, StreamEndToken); next!(p, StreamEndToken);
end!(p); end!(p);
} }
#[test]
fn test_uri() {
// TODO
}
#[test]
fn test_uri_escapes() {
// TODO
}
} }