1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
/// Asserts that the `lhs` matches the tokens wrapped in braces on the `rhs`.
///
/// `lhs` needs to be an expression implementing `IntoIterator<Item=TokenTree>`
/// e.g. [`TokenStream`](proc_macro2::TokenStream) or
/// [`TokenParser`](crate::TokenParser).
/// ```
/// # use quote::quote;
/// # use proc_macro_utils::assert_tokens;
/// let some_tokens = quote! { ident, { group } };
///
/// assert_tokens! {some_tokens, { ident, {group} }};
/// ```
#[macro_export]
#[cfg(doc)]
macro_rules! assert_tokens {
    ($lhs:expr, { $($rhs:tt)* }) => {};
}

#[macro_export]
#[cfg(not(doc))]
#[doc(hidden)]
#[allow(clippy::module_name_repetitions)]
macro_rules! assert_tokens {
    ($lhs:expr, {$($rhs:tt)*}) => {
        let mut lhs = $crate::TokenParser::new($lhs);
        assert_tokens!(@O lhs, "", $($rhs)*);
    };
    (@E $prefix:expr, $expected:tt, $found:tt) => {
        panic!("expected\n    {}\nfound\n    {}\nat\n    {} {}", stringify!$expected, $found, $prefix, $found);
    };
    (@E $prefix:expr, $expected:tt) => {
        panic!("unexpected end, expected\n    {}\nafter\n    {}", stringify!$expected, $prefix);
    };
    (@G $lhs:ident, $fn:ident, $aggr:expr, $sym:literal, $group:tt, {$($inner:tt)*}, $($rhs:tt)*) => {
        if let Some(lhs) = $lhs.$fn() {
            let mut lhs = $crate::TokenParser::from(lhs);
            assert_tokens!(@O lhs, concat!($aggr, ' ', $sym), $($inner)*);
        } else if let Some(lhs) = $lhs.next() {
            assert_tokens!(@E $aggr, ($group), lhs);
        } else {
            assert_tokens!(@E $aggr, ($group));
        }
        assert_tokens!(@O $lhs, assert_tokens!(@C $aggr, $group), $($rhs)*);
    };
    // These don't add a whitespace in front
    (@C $lhs:expr, ,) => {
        concat!($lhs, ',')
    };
    (@C $lhs:expr, :) => {
        concat!($lhs, ':')
    };
    (@C $lhs:expr, ;) => {
        concat!($lhs, ';')
    };
    (@C $lhs:expr, .) => {
        concat!($lhs, '.')
    };
    // All other tokens do
    (@C $lhs:expr, $rhs:tt) => {
        concat!($lhs, ' ', stringify!($rhs))
    };
    (@O $lhs:ident, $aggr:expr,) => { assert!($lhs.is_empty(), "unexpected left over tokens `{}`", $lhs.into_token_stream()); };
    (@O $lhs:ident, $aggr:expr, ( $($inner:tt)* ) $($rhs:tt)*) => {
        assert_tokens!(@G $lhs, next_parenthesized, $aggr, '(', { $($inner)* }, { $($inner)* }, $($rhs)*);
    };
    (@O $lhs:ident, $aggr:expr, { $($inner:tt)* } $($rhs:tt)*) => {
        assert_tokens!(@G $lhs, next_braced, $aggr, '{', { $($inner)* }, { $($inner)* }, $($rhs)*);
    };
    (@O $lhs:ident, $aggr:expr, [ $($inner:tt)* ] $($rhs:tt)*) => {
        assert_tokens!(@G $lhs, next_bracketed, $aggr, '[', [ $($inner)* ], { $($inner)* }, $($rhs)*);
    };
    (@O $lhs:ident, $aggr:expr, $token:tt $($rhs:tt)*) => {
        if let Some(lhs) = $lhs.next_punctuation_group().map(|t|t.to_string()).or_else(|| $lhs.next().map(|t|t.to_string())) {
            if(lhs != stringify!($token)) {
                assert_tokens!(@E $aggr, ($token), lhs);
            }
        } else {
            assert_tokens!(@E $aggr, ($token));
        }
        assert_tokens!(@O $lhs, assert_tokens!(@C $aggr, $token), $($rhs)*);
    };
}

#[test]
fn test() {
    use quote::quote;
    assert_tokens!(quote!(ident ident, { group/test, vec![a, (a + b)] }, "literal" $), {
        ident ident, { group /test, vec![a,(a+b)] }, "literal" $
    });
    assert_tokens!(quote!(:::), {
        :::
    });
    assert_tokens!(quote!(more:::test::test:: hello :-D $$$ It should just work), {
        more ::: test ::test:: hello :-D $$$ It should just work
    });
}