casey/
lib.rs

1mod traits;
2use proc_macro::{Group, Ident, Span, TokenStream, TokenTree};
3use traits::{PascalCaseExt, ShoutySnakeCaseExt, SnakeCaseExt};
4
5/// Apply a string transformation (`transform`) to the input `Ident`-
6/// However, it will not apply the transform to rust keywords.
7fn transform_non_keyword_ident<Transform>(ident: &Ident, transform: &Transform) -> Ident
8where
9    Transform: Fn(String) -> String,
10{
11    let ident_value = ident.to_string();
12    let is_keyword = syn::parse_str::<syn::Ident>(ident_value.as_str()).is_err();
13    if is_keyword {
14        ident.clone()
15    } else {
16        Ident::new(transform(ident_value).as_str(), Span::call_site())
17    }
18}
19
20fn transform_idents_in_stream2<Transform>(stream: TokenStream, transform: &Transform) -> TokenStream
21where
22    Transform: Fn(String) -> String,
23{
24    let mut transformed = TokenStream::new();
25    let mut attr_macro_hit = false;
26    for tt_in in stream {
27        let tt_out = match tt_in {
28            TokenTree::Punct(punct) => {
29                attr_macro_hit = punct.as_char() == '#';
30                punct.into()
31            }
32            TokenTree::Literal(l) => {
33                attr_macro_hit = attr_macro_hit && l.to_string() == "[";
34                l.into()
35            }
36            TokenTree::Ident(ref ident) => transform_non_keyword_ident(ident, transform).into(),
37            // find all idents in `TokenGroup` apply and reconstruct the group
38            TokenTree::Group(ref group) => TokenTree::Group(Group::new(
39                group.delimiter(),
40                group
41                    .stream()
42                    .into_iter()
43                    .map(|group_token_tree| {
44                        if let TokenTree::Ident(ref ident) = group_token_tree {
45                            if attr_macro_hit {
46                                attr_macro_hit = false;
47                                TokenTree::Ident(ident.clone())
48                            } else {
49                                transform_non_keyword_ident(ident, transform).into()
50                            }
51                        } else {
52                            group_token_tree
53                        }
54                    })
55                    .collect::<TokenStream>(),
56            )),
57        };
58        transformed.extend([tt_out]);
59    }
60    transformed
61}
62
63/// Expands idents in the input stream as UPPERCASE
64#[proc_macro]
65pub fn upper(stream: TokenStream) -> TokenStream {
66    transform_idents_in_stream2(stream, &|s: String| s.to_uppercase())
67}
68
69/// Expands idents in the input stream as lowercase
70#[proc_macro]
71pub fn lower(stream: TokenStream) -> TokenStream {
72    transform_idents_in_stream2(stream, &|s: String| s.to_lowercase())
73}
74
75/// Expands idents in the input stream as snake_case
76/// e.g. `HelloWorld` -> `hello_world`
77#[proc_macro]
78pub fn snake(stream: TokenStream) -> TokenStream {
79    transform_idents_in_stream2(stream, &|s: String| s.to_snake_case())
80}
81
82/// Expands idents in the input stream as PascalCase
83/// e.g. `helloWorld` -> `HelloWorld`
84#[proc_macro]
85pub fn pascal(stream: TokenStream) -> TokenStream {
86    transform_idents_in_stream2(stream, &|s: String| s.to_pascal_case())
87}
88
89/// Expands idents in the input stream as SHOUTY_CASE
90/// e.g. `HelloWorld` -> `HELLO_WORLD`
91#[proc_macro]
92pub fn shouty(stream: TokenStream) -> TokenStream {
93    transform_idents_in_stream2(stream, &|s: String| s.to_shouty_snake_case())
94}