Files
aho_corasick
anyhow
drone_config
drone_core
drone_core_macros
drone_ctypes
drone_macros_core
futures
futures_channel
futures_core
futures_io
futures_macro
futures_sink
futures_task
futures_util
if_chain
inflector
cases
camelcase
case
classcase
kebabcase
pascalcase
screamingsnakecase
sentencecase
snakecase
tablecase
titlecase
traincase
numbers
deordinalize
ordinalize
string
constants
deconstantize
demodulize
pluralize
singularize
suffix
foreignkey
lazy_static
memchr
pin_project_lite
pin_utils
proc_macro2
proc_macro_hack
proc_macro_nested
quote
regex
regex_syntax
serde
serde_derive
syn
toml
typenum
unicode_xid
  1
  2
  3
  4
  5
  6
  7
  8
  9
 10
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
use proc_macro::TokenStream;
use proc_macro2::Span;
use quote::quote;
use std::collections::BTreeMap;
use syn::{
    braced,
    parse::{Parse, ParseStream, Result},
    parse_macro_input, Attribute, Ident, LitStr, Path, Token, Visibility,
};

struct Input {
    attrs: Vec<Attribute>,
    vis: Visibility,
    ident: Ident,
    defs: Vec<Def>,
    undefs: Vec<Undef>,
}

struct Def {
    attrs: Vec<Attribute>,
    ident: Ident,
    path: Path,
}

struct Undef {
    ident: Ident,
}

impl Parse for Input {
    fn parse(input: ParseStream<'_>) -> Result<Self> {
        let attrs = input.call(Attribute::parse_outer)?;
        let vis = input.parse()?;
        let ident = input.parse()?;
        let content;
        braced!(content in input);
        let mut defs = Vec::new();
        while !content.is_empty() {
            defs.push(content.parse()?);
        }
        let content;
        braced!(content in input);
        let mut undefs = Vec::new();
        while !content.is_empty() {
            undefs.push(content.parse()?);
        }
        Ok(Self { attrs, vis, ident, defs, undefs })
    }
}

impl Parse for Def {
    fn parse(input: ParseStream<'_>) -> Result<Self> {
        let attrs = input.call(Attribute::parse_outer)?;
        let ident = input.parse()?;
        let path = input.parse()?;
        input.parse::<Token![;]>()?;
        Ok(Self { attrs, ident, path })
    }
}

impl Parse for Undef {
    fn parse(input: ParseStream<'_>) -> Result<Self> {
        let ident = input.parse()?;
        input.parse::<Token![;]>()?;
        Ok(Self { ident })
    }
}

pub fn proc_macro(input: TokenStream) -> TokenStream {
    let Input { attrs, vis, ident, defs, undefs } = &parse_macro_input!(input);
    let mut def_tokens = BTreeMap::new();
    let mut ctor_tokens = BTreeMap::new();
    let mut assert_tokens = BTreeMap::new();
    for Def { attrs, ident, path } in defs {
        let string = ident.to_string();
        let lit_str = LitStr::new(&string, Span::call_site());
        def_tokens.insert(string.clone(), quote! {
            #(#attrs)*
            #[allow(missing_docs)]
            pub #ident: #path<::drone_core::reg::tag::Srt>,
        });
        ctor_tokens.insert(string.clone(), quote! {
            #(#attrs)*
            #ident: ::drone_core::token::Token::take(),
        });
        assert_tokens.insert(string, quote! {
            ::drone_core::reg::assert_taken!(#lit_str);
        });
    }
    for Undef { ident } in undefs {
        let ident = ident.to_string();
        def_tokens.remove(&ident);
        ctor_tokens.remove(&ident);
        assert_tokens.remove(&ident);
    }
    let def_tokens = def_tokens.values();
    let ctor_tokens = ctor_tokens.values();
    let assert_tokens = assert_tokens.values();
    let expanded = quote! {
        #(#attrs)* #vis struct #ident {
            #(#def_tokens)*
        }
        unsafe impl ::drone_core::token::Token for #ident {
            #[inline]
            unsafe fn take() -> Self {
                Self { #(#ctor_tokens)* }
            }
        }
        #(#assert_tokens)*
    };
    expanded.into()
}