Files
aho_corasick
anyhow
drone_config
drone_core
drone_core_macros
drone_ctypes
drone_macros_core
futures
futures_channel
futures_core
futures_io
futures_macro
futures_sink
futures_task
futures_util
if_chain
inflector
cases
camelcase
case
classcase
kebabcase
pascalcase
screamingsnakecase
sentencecase
snakecase
tablecase
titlecase
traincase
numbers
deordinalize
ordinalize
string
constants
deconstantize
demodulize
pluralize
singularize
suffix
foreignkey
lazy_static
memchr
pin_project_lite
pin_utils
proc_macro2
proc_macro_hack
proc_macro_nested
quote
regex
regex_syntax
serde
serde_derive
syn
toml
typenum
unicode_xid
 1
 2
 3
 4
 5
 6
 7
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
use inflector::Inflector;
use proc_macro::TokenStream;
use quote::{format_ident, quote};
use syn::{
    braced,
    parse::{Parse, ParseStream, Result},
    parse_macro_input,
    punctuated::Punctuated,
    Attribute, Ident, Token, Visibility,
};

const TOKEN_SUFFIX: &str = "Token";

struct Input {
    attrs: Vec<Attribute>,
    vis: Visibility,
    ident: Ident,
    tokens: Vec<Token>,
}

struct Token {
    name: String,
}

impl Parse for Input {
    fn parse(input: ParseStream<'_>) -> Result<Self> {
        let attrs = input.call(Attribute::parse_outer)?;
        let vis = input.parse()?;
        input.parse::<Token![struct]>()?;
        let ident = input.parse()?;
        let content;
        braced!(content in input);
        let tokens =
            content.call(Punctuated::<_, Token![,]>::parse_terminated)?.into_iter().collect();
        Ok(Self { attrs, vis, ident, tokens })
    }
}

impl Parse for Token {
    fn parse(input: ParseStream<'_>) -> Result<Self> {
        let mut name = input.parse::<Ident>()?.to_string();
        if name.ends_with(TOKEN_SUFFIX) {
            name.truncate(name.len() - TOKEN_SUFFIX.len());
        } else {
            return Err(
                input.error(format!("Expected an ident which ends with `{}`", TOKEN_SUFFIX))
            );
        }
        Ok(Self { name })
    }
}

pub fn proc_macro(input: TokenStream) -> TokenStream {
    let Input { attrs, vis, ident, tokens } = parse_macro_input!(input);
    let wrapper = format_ident!("__{}_init_tokens", ident.to_string().to_snake_case());
    let mut def_tokens = Vec::new();
    let mut ctor_tokens = Vec::new();
    for Token { name } in tokens {
        let struct_ident = format_ident!("{}Token", name);
        let field_ident = format_ident!("{}", name.to_snake_case());
        def_tokens.push(quote! {
            #[allow(missing_docs)]
            pub #field_ident: #struct_ident,
        });
        ctor_tokens.push(quote! {
            #field_ident: ::drone_core::token::Token::take(),
        });
    }
    let expanded = quote! {
        mod #wrapper {
            use super::*;

            #(#attrs)*
            pub struct #ident {
                #(#def_tokens)*
                __priv: (),
            }

            unsafe impl ::drone_core::token::Token for #ident {
                #[inline]
                unsafe fn take() -> Self {
                    Self {
                        #(#ctor_tokens)*
                        __priv: (),
                    }
                }
            }
        }

        #vis use #wrapper::#ident;
    };
    expanded.into()
}