fune/third_party/rust/proc-macro2/tests/test.rs
Emilio Cobos Álvarez f0954b76d7 Bug 1587368 - Revendor rust dependencies.
This pulls syn and quote 1.0, but after talking with heycam it seems ok.


--HG--
rename : third_party/rust/proc-macro2/.cargo-checksum.json => third_party/rust/proc-macro2-0.4.27/.cargo-checksum.json
rename : third_party/rust/proc-macro2/Cargo.toml => third_party/rust/proc-macro2-0.4.27/Cargo.toml
rename : third_party/rust/proc-macro2/README.md => third_party/rust/proc-macro2-0.4.27/README.md
rename : third_party/rust/proc-macro2/build.rs => third_party/rust/proc-macro2-0.4.27/build.rs
rename : third_party/rust/proc-macro2/src/fallback.rs => third_party/rust/proc-macro2-0.4.27/src/fallback.rs
rename : third_party/rust/proc-macro2/src/lib.rs => third_party/rust/proc-macro2-0.4.27/src/lib.rs
rename : third_party/rust/proc-macro2/src/strnom.rs => third_party/rust/proc-macro2-0.4.27/src/strnom.rs
rename : third_party/rust/proc-macro2/src/wrapper.rs => third_party/rust/proc-macro2-0.4.27/src/wrapper.rs
rename : third_party/rust/proc-macro2/tests/marker.rs => third_party/rust/proc-macro2-0.4.27/tests/marker.rs
rename : third_party/rust/proc-macro2/tests/test.rs => third_party/rust/proc-macro2-0.4.27/tests/test.rs
rename : third_party/rust/quote/.cargo-checksum.json => third_party/rust/quote-0.6.11/.cargo-checksum.json
rename : third_party/rust/quote/Cargo.toml => third_party/rust/quote-0.6.11/Cargo.toml
rename : third_party/rust/quote/README.md => third_party/rust/quote-0.6.11/README.md
rename : third_party/rust/quote/src/ext.rs => third_party/rust/quote-0.6.11/src/ext.rs
rename : third_party/rust/quote/src/lib.rs => third_party/rust/quote-0.6.11/src/lib.rs
rename : third_party/rust/quote/src/to_tokens.rs => third_party/rust/quote-0.6.11/src/to_tokens.rs
rename : third_party/rust/quote/tests/conditional/integer128.rs => third_party/rust/quote-0.6.11/tests/conditional/integer128.rs
rename : third_party/rust/quote/tests/test.rs => third_party/rust/quote-0.6.11/tests/test.rs
rename : third_party/rust/syn/.cargo-checksum.json => third_party/rust/syn-0.15.30/.cargo-checksum.json
rename : third_party/rust/syn/Cargo.toml => third_party/rust/syn-0.15.30/Cargo.toml
rename : third_party/rust/syn/README.md => third_party/rust/syn-0.15.30/README.md
rename : third_party/rust/syn/build.rs => third_party/rust/syn-0.15.30/build.rs
rename : third_party/rust/syn/src/attr.rs => third_party/rust/syn-0.15.30/src/attr.rs
rename : third_party/rust/syn/src/buffer.rs => third_party/rust/syn-0.15.30/src/buffer.rs
rename : third_party/rust/syn/src/data.rs => third_party/rust/syn-0.15.30/src/data.rs
rename : third_party/rust/syn/src/derive.rs => third_party/rust/syn-0.15.30/src/derive.rs
rename : third_party/rust/syn/src/error.rs => third_party/rust/syn-0.15.30/src/error.rs
rename : third_party/rust/syn/src/export.rs => third_party/rust/syn-0.15.30/src/export.rs
rename : third_party/rust/syn/src/expr.rs => third_party/rust/syn-0.15.30/src/expr.rs
rename : third_party/rust/syn/src/ext.rs => third_party/rust/syn-0.15.30/src/ext.rs
rename : third_party/rust/syn/src/file.rs => third_party/rust/syn-0.15.30/src/file.rs
rename : third_party/rust/syn/src/gen/fold.rs => third_party/rust/syn-0.15.30/src/gen/fold.rs
rename : third_party/rust/syn/src/gen/visit.rs => third_party/rust/syn-0.15.30/src/gen/visit.rs
rename : third_party/rust/syn/src/gen/visit_mut.rs => third_party/rust/syn-0.15.30/src/gen/visit_mut.rs
rename : third_party/rust/syn/src/gen_helper.rs => third_party/rust/syn-0.15.30/src/gen_helper.rs
rename : third_party/rust/syn/src/generics.rs => third_party/rust/syn-0.15.30/src/generics.rs
rename : third_party/rust/syn/src/group.rs => third_party/rust/syn-0.15.30/src/group.rs
rename : third_party/rust/syn/src/ident.rs => third_party/rust/syn-0.15.30/src/ident.rs
rename : third_party/rust/syn/src/item.rs => third_party/rust/syn-0.15.30/src/item.rs
rename : third_party/rust/syn/src/keyword.rs => third_party/rust/syn-0.15.30/src/keyword.rs
rename : third_party/rust/syn/src/lib.rs => third_party/rust/syn-0.15.30/src/lib.rs
rename : third_party/rust/syn/src/lifetime.rs => third_party/rust/syn-0.15.30/src/lifetime.rs
rename : third_party/rust/syn/src/lit.rs => third_party/rust/syn-0.15.30/src/lit.rs
rename : third_party/rust/syn/src/lookahead.rs => third_party/rust/syn-0.15.30/src/lookahead.rs
rename : third_party/rust/syn/src/mac.rs => third_party/rust/syn-0.15.30/src/mac.rs
rename : third_party/rust/syn/src/macros.rs => third_party/rust/syn-0.15.30/src/macros.rs
rename : third_party/rust/syn/src/op.rs => third_party/rust/syn-0.15.30/src/op.rs
rename : third_party/rust/syn/src/parse.rs => third_party/rust/syn-0.15.30/src/parse.rs
rename : third_party/rust/syn/src/parse_macro_input.rs => third_party/rust/syn-0.15.30/src/parse_macro_input.rs
rename : third_party/rust/syn/src/parse_quote.rs => third_party/rust/syn-0.15.30/src/parse_quote.rs
rename : third_party/rust/syn/src/path.rs => third_party/rust/syn-0.15.30/src/path.rs
rename : third_party/rust/syn/src/print.rs => third_party/rust/syn-0.15.30/src/print.rs
rename : third_party/rust/syn/src/punctuated.rs => third_party/rust/syn-0.15.30/src/punctuated.rs
rename : third_party/rust/syn/src/spanned.rs => third_party/rust/syn-0.15.30/src/spanned.rs
rename : third_party/rust/syn/src/thread.rs => third_party/rust/syn-0.15.30/src/thread.rs
rename : third_party/rust/syn/src/token.rs => third_party/rust/syn-0.15.30/src/token.rs
rename : third_party/rust/syn/src/tt.rs => third_party/rust/syn-0.15.30/src/tt.rs
rename : third_party/rust/syn/src/ty.rs => third_party/rust/syn-0.15.30/src/ty.rs
rename : third_party/rust/synstructure/.cargo-checksum.json => third_party/rust/synstructure-0.10.1/.cargo-checksum.json
rename : third_party/rust/synstructure/Cargo.toml => third_party/rust/synstructure-0.10.1/Cargo.toml
rename : third_party/rust/synstructure/README.md => third_party/rust/synstructure-0.10.1/README.md
rename : third_party/rust/synstructure/src/lib.rs => third_party/rust/synstructure-0.10.1/src/lib.rs
rename : third_party/rust/synstructure/src/macros.rs => third_party/rust/synstructure-0.10.1/src/macros.rs
rename : third_party/rust/unicode-xid/.cargo-checksum.json => third_party/rust/unicode-xid-0.1.0/.cargo-checksum.json
rename : third_party/rust/unicode-xid/Cargo.toml => third_party/rust/unicode-xid-0.1.0/Cargo.toml
rename : third_party/rust/unicode-xid/README.md => third_party/rust/unicode-xid-0.1.0/README.md
rename : third_party/rust/unicode-xid/scripts/unicode.py => third_party/rust/unicode-xid-0.1.0/scripts/unicode.py
rename : third_party/rust/unicode-xid/src/lib.rs => third_party/rust/unicode-xid-0.1.0/src/lib.rs
rename : third_party/rust/unicode-xid/src/tables.rs => third_party/rust/unicode-xid-0.1.0/src/tables.rs
rename : third_party/rust/unicode-xid/src/tests.rs => third_party/rust/unicode-xid-0.1.0/src/tests.rs
2019-10-09 13:35:49 +02:00

466 lines
12 KiB
Rust

use std::str::{self, FromStr};
use proc_macro2::{Ident, Literal, Spacing, Span, TokenStream, TokenTree};
#[test]
fn idents() {
assert_eq!(
Ident::new("String", Span::call_site()).to_string(),
"String"
);
assert_eq!(Ident::new("fn", Span::call_site()).to_string(), "fn");
assert_eq!(Ident::new("_", Span::call_site()).to_string(), "_");
}
#[test]
#[cfg(procmacro2_semver_exempt)]
fn raw_idents() {
assert_eq!(
Ident::new_raw("String", Span::call_site()).to_string(),
"r#String"
);
assert_eq!(Ident::new_raw("fn", Span::call_site()).to_string(), "r#fn");
assert_eq!(Ident::new_raw("_", Span::call_site()).to_string(), "r#_");
}
#[test]
#[should_panic(expected = "Ident is not allowed to be empty; use Option<Ident>")]
fn ident_empty() {
Ident::new("", Span::call_site());
}
#[test]
#[should_panic(expected = "Ident cannot be a number; use Literal instead")]
fn ident_number() {
Ident::new("255", Span::call_site());
}
#[test]
#[should_panic(expected = "\"a#\" is not a valid Ident")]
fn ident_invalid() {
Ident::new("a#", Span::call_site());
}
#[test]
#[should_panic(expected = "not a valid Ident")]
fn raw_ident_empty() {
Ident::new("r#", Span::call_site());
}
#[test]
#[should_panic(expected = "not a valid Ident")]
fn raw_ident_number() {
Ident::new("r#255", Span::call_site());
}
#[test]
#[should_panic(expected = "\"r#a#\" is not a valid Ident")]
fn raw_ident_invalid() {
Ident::new("r#a#", Span::call_site());
}
#[test]
#[should_panic(expected = "not a valid Ident")]
fn lifetime_empty() {
Ident::new("'", Span::call_site());
}
#[test]
#[should_panic(expected = "not a valid Ident")]
fn lifetime_number() {
Ident::new("'255", Span::call_site());
}
#[test]
#[should_panic(expected = r#""\'a#" is not a valid Ident"#)]
fn lifetime_invalid() {
Ident::new("'a#", Span::call_site());
}
#[test]
fn literal_string() {
assert_eq!(Literal::string("foo").to_string(), "\"foo\"");
assert_eq!(Literal::string("\"").to_string(), "\"\\\"\"");
assert_eq!(Literal::string("didn't").to_string(), "\"didn't\"");
}
#[test]
fn literal_character() {
assert_eq!(Literal::character('x').to_string(), "'x'");
assert_eq!(Literal::character('\'').to_string(), "'\\''");
assert_eq!(Literal::character('"').to_string(), "'\"'");
}
#[test]
fn literal_float() {
assert_eq!(Literal::f32_unsuffixed(10.0).to_string(), "10.0");
}
#[test]
fn literal_suffix() {
fn token_count(p: &str) -> usize {
p.parse::<TokenStream>().unwrap().into_iter().count()
}
assert_eq!(token_count("999u256"), 1);
assert_eq!(token_count("999r#u256"), 3);
assert_eq!(token_count("1."), 1);
assert_eq!(token_count("1.f32"), 3);
assert_eq!(token_count("1.0_0"), 1);
assert_eq!(token_count("1._0"), 3);
assert_eq!(token_count("1._m"), 3);
assert_eq!(token_count("\"\"s"), 1);
}
#[test]
fn roundtrip() {
fn roundtrip(p: &str) {
println!("parse: {}", p);
let s = p.parse::<TokenStream>().unwrap().to_string();
println!("first: {}", s);
let s2 = s.to_string().parse::<TokenStream>().unwrap().to_string();
assert_eq!(s, s2);
}
roundtrip("a");
roundtrip("<<");
roundtrip("<<=");
roundtrip(
"
1
1.0
1f32
2f64
1usize
4isize
4e10
1_000
1_0i32
8u8
9
0
0xffffffffffffffffffffffffffffffff
1x
1u80
1f320
",
);
roundtrip("'a");
roundtrip("'_");
roundtrip("'static");
roundtrip("'\\u{10__FFFF}'");
roundtrip("\"\\u{10_F0FF__}foo\\u{1_0_0_0__}\"");
}
#[test]
fn fail() {
fn fail(p: &str) {
if let Ok(s) = p.parse::<TokenStream>() {
panic!("should have failed to parse: {}\n{:#?}", p, s);
}
}
fail("' static");
fail("r#1");
fail("r#_");
}
#[cfg(span_locations)]
#[test]
fn span_test() {
use proc_macro2::TokenTree;
fn check_spans(p: &str, mut lines: &[(usize, usize, usize, usize)]) {
let ts = p.parse::<TokenStream>().unwrap();
check_spans_internal(ts, &mut lines);
}
fn check_spans_internal(ts: TokenStream, lines: &mut &[(usize, usize, usize, usize)]) {
for i in ts {
if let Some((&(sline, scol, eline, ecol), rest)) = lines.split_first() {
*lines = rest;
let start = i.span().start();
assert_eq!(start.line, sline, "sline did not match for {}", i);
assert_eq!(start.column, scol, "scol did not match for {}", i);
let end = i.span().end();
assert_eq!(end.line, eline, "eline did not match for {}", i);
assert_eq!(end.column, ecol, "ecol did not match for {}", i);
match i {
TokenTree::Group(ref g) => {
check_spans_internal(g.stream().clone(), lines);
}
_ => {}
}
}
}
}
check_spans(
"\
/// This is a document comment
testing 123
{
testing 234
}",
&[
(1, 0, 1, 30), // #
(1, 0, 1, 30), // [ ... ]
(1, 0, 1, 30), // doc
(1, 0, 1, 30), // =
(1, 0, 1, 30), // "This is..."
(2, 0, 2, 7), // testing
(2, 8, 2, 11), // 123
(3, 0, 5, 1), // { ... }
(4, 2, 4, 9), // testing
(4, 10, 4, 13), // 234
],
);
}
#[cfg(procmacro2_semver_exempt)]
#[cfg(not(nightly))]
#[test]
fn default_span() {
let start = Span::call_site().start();
assert_eq!(start.line, 1);
assert_eq!(start.column, 0);
let end = Span::call_site().end();
assert_eq!(end.line, 1);
assert_eq!(end.column, 0);
let source_file = Span::call_site().source_file();
assert_eq!(source_file.path().to_string_lossy(), "<unspecified>");
assert!(!source_file.is_real());
}
#[cfg(procmacro2_semver_exempt)]
#[test]
fn span_join() {
let source1 = "aaa\nbbb"
.parse::<TokenStream>()
.unwrap()
.into_iter()
.collect::<Vec<_>>();
let source2 = "ccc\nddd"
.parse::<TokenStream>()
.unwrap()
.into_iter()
.collect::<Vec<_>>();
assert!(source1[0].span().source_file() != source2[0].span().source_file());
assert_eq!(
source1[0].span().source_file(),
source1[1].span().source_file()
);
let joined1 = source1[0].span().join(source1[1].span());
let joined2 = source1[0].span().join(source2[0].span());
assert!(joined1.is_some());
assert!(joined2.is_none());
let start = joined1.unwrap().start();
let end = joined1.unwrap().end();
assert_eq!(start.line, 1);
assert_eq!(start.column, 0);
assert_eq!(end.line, 2);
assert_eq!(end.column, 3);
assert_eq!(
joined1.unwrap().source_file(),
source1[0].span().source_file()
);
}
#[test]
fn no_panic() {
let s = str::from_utf8(b"b\'\xc2\x86 \x00\x00\x00^\"").unwrap();
assert!(s.parse::<proc_macro2::TokenStream>().is_err());
}
#[test]
fn tricky_doc_comment() {
let stream = "/**/".parse::<proc_macro2::TokenStream>().unwrap();
let tokens = stream.into_iter().collect::<Vec<_>>();
assert!(tokens.is_empty(), "not empty -- {:?}", tokens);
let stream = "/// doc".parse::<proc_macro2::TokenStream>().unwrap();
let tokens = stream.into_iter().collect::<Vec<_>>();
assert!(tokens.len() == 2, "not length 2 -- {:?}", tokens);
match tokens[0] {
proc_macro2::TokenTree::Punct(ref tt) => assert_eq!(tt.as_char(), '#'),
_ => panic!("wrong token {:?}", tokens[0]),
}
let mut tokens = match tokens[1] {
proc_macro2::TokenTree::Group(ref tt) => {
assert_eq!(tt.delimiter(), proc_macro2::Delimiter::Bracket);
tt.stream().into_iter()
}
_ => panic!("wrong token {:?}", tokens[0]),
};
match tokens.next().unwrap() {
proc_macro2::TokenTree::Ident(ref tt) => assert_eq!(tt.to_string(), "doc"),
t => panic!("wrong token {:?}", t),
}
match tokens.next().unwrap() {
proc_macro2::TokenTree::Punct(ref tt) => assert_eq!(tt.as_char(), '='),
t => panic!("wrong token {:?}", t),
}
match tokens.next().unwrap() {
proc_macro2::TokenTree::Literal(ref tt) => {
assert_eq!(tt.to_string(), "\" doc\"");
}
t => panic!("wrong token {:?}", t),
}
assert!(tokens.next().is_none());
let stream = "//! doc".parse::<proc_macro2::TokenStream>().unwrap();
let tokens = stream.into_iter().collect::<Vec<_>>();
assert!(tokens.len() == 3, "not length 3 -- {:?}", tokens);
}
#[test]
fn op_before_comment() {
let mut tts = TokenStream::from_str("~// comment").unwrap().into_iter();
match tts.next().unwrap() {
TokenTree::Punct(tt) => {
assert_eq!(tt.as_char(), '~');
assert_eq!(tt.spacing(), Spacing::Alone);
}
wrong => panic!("wrong token {:?}", wrong),
}
}
#[test]
fn raw_identifier() {
let mut tts = TokenStream::from_str("r#dyn").unwrap().into_iter();
match tts.next().unwrap() {
TokenTree::Ident(raw) => assert_eq!("r#dyn", raw.to_string()),
wrong => panic!("wrong token {:?}", wrong),
}
assert!(tts.next().is_none());
}
#[test]
fn test_debug_ident() {
let ident = Ident::new("proc_macro", Span::call_site());
#[cfg(not(procmacro2_semver_exempt))]
let expected = "Ident(proc_macro)";
#[cfg(procmacro2_semver_exempt)]
let expected = "Ident { sym: proc_macro, span: bytes(0..0) }";
assert_eq!(expected, format!("{:?}", ident));
}
#[test]
fn test_debug_tokenstream() {
let tts = TokenStream::from_str("[a + 1]").unwrap();
#[cfg(not(procmacro2_semver_exempt))]
let expected = "\
TokenStream [
Group {
delimiter: Bracket,
stream: TokenStream [
Ident {
sym: a,
},
Punct {
op: '+',
spacing: Alone,
},
Literal {
lit: 1,
},
],
},
]\
";
#[cfg(not(procmacro2_semver_exempt))]
let expected_before_trailing_commas = "\
TokenStream [
Group {
delimiter: Bracket,
stream: TokenStream [
Ident {
sym: a
},
Punct {
op: '+',
spacing: Alone
},
Literal {
lit: 1
}
]
}
]\
";
#[cfg(procmacro2_semver_exempt)]
let expected = "\
TokenStream [
Group {
delimiter: Bracket,
stream: TokenStream [
Ident {
sym: a,
span: bytes(2..3),
},
Punct {
op: '+',
spacing: Alone,
span: bytes(4..5),
},
Literal {
lit: 1,
span: bytes(6..7),
},
],
span: bytes(1..8),
},
]\
";
#[cfg(procmacro2_semver_exempt)]
let expected_before_trailing_commas = "\
TokenStream [
Group {
delimiter: Bracket,
stream: TokenStream [
Ident {
sym: a,
span: bytes(2..3)
},
Punct {
op: '+',
spacing: Alone,
span: bytes(4..5)
},
Literal {
lit: 1,
span: bytes(6..7)
}
],
span: bytes(1..8)
}
]\
";
let actual = format!("{:#?}", tts);
if actual.ends_with(",\n]") {
assert_eq!(expected, actual);
} else {
assert_eq!(expected_before_trailing_commas, actual);
}
}
#[test]
fn default_tokenstream_is_empty() {
let default_token_stream: TokenStream = Default::default();
assert!(default_token_stream.is_empty());
}