1 // SPDX-License-Identifier: GPL-2.0 1 // SPDX-License-Identifier: GPL-2.0 2 2 3 use proc_macro::{token_stream, Group, TokenStr 3 use proc_macro::{token_stream, Group, TokenStream, TokenTree}; 4 4 5 pub(crate) fn try_ident(it: &mut token_stream: 5 pub(crate) fn try_ident(it: &mut token_stream::IntoIter) -> Option<String> { 6 if let Some(TokenTree::Ident(ident)) = it. 6 if let Some(TokenTree::Ident(ident)) = it.next() { 7 Some(ident.to_string()) 7 Some(ident.to_string()) 8 } else { 8 } else { 9 None 9 None 10 } 10 } 11 } 11 } 12 12 13 pub(crate) fn try_literal(it: &mut token_strea 13 pub(crate) fn try_literal(it: &mut token_stream::IntoIter) -> Option<String> { 14 if let Some(TokenTree::Literal(literal)) = 14 if let Some(TokenTree::Literal(literal)) = it.next() { 15 Some(literal.to_string()) 15 Some(literal.to_string()) 16 } else { 16 } else { 17 None 17 None 18 } 18 } 19 } 19 } 20 20 21 pub(crate) fn try_string(it: &mut token_stream 21 pub(crate) fn try_string(it: &mut token_stream::IntoIter) -> Option<String> { 22 try_literal(it).and_then(|string| { 22 try_literal(it).and_then(|string| { 23 if string.starts_with('\"') && string. 23 if string.starts_with('\"') && string.ends_with('\"') { 24 let content = &string[1..string.le 24 let content = &string[1..string.len() - 1]; 25 if content.contains('\\') { 25 if content.contains('\\') { 26 panic!("Escape sequences in st 26 panic!("Escape sequences in string literals not yet handled"); 27 } 27 } 28 Some(content.to_string()) 28 Some(content.to_string()) 29 } else if string.starts_with("r\"") { 29 } else if string.starts_with("r\"") { 30 panic!("Raw string literals are no 30 panic!("Raw string literals are not yet handled"); 31 } else { 31 } else { 32 None 32 None 33 } 33 } 34 }) 34 }) 35 } 35 } 36 36 37 pub(crate) fn expect_ident(it: &mut token_stre 37 pub(crate) fn expect_ident(it: &mut token_stream::IntoIter) -> String { 38 try_ident(it).expect("Expected Ident") 38 try_ident(it).expect("Expected Ident") 39 } 39 } 40 40 41 pub(crate) fn expect_punct(it: &mut token_stre 41 pub(crate) fn expect_punct(it: &mut token_stream::IntoIter) -> char { 42 if let TokenTree::Punct(punct) = it.next() 42 if let TokenTree::Punct(punct) = it.next().expect("Reached end of token stream for Punct") { 43 punct.as_char() 43 punct.as_char() 44 } else { 44 } else { 45 panic!("Expected Punct"); 45 panic!("Expected Punct"); 46 } 46 } 47 } 47 } 48 48 49 pub(crate) fn expect_string(it: &mut token_str 49 pub(crate) fn expect_string(it: &mut token_stream::IntoIter) -> String { 50 try_string(it).expect("Expected string") 50 try_string(it).expect("Expected string") 51 } 51 } 52 52 53 pub(crate) fn expect_string_ascii(it: &mut tok 53 pub(crate) fn expect_string_ascii(it: &mut token_stream::IntoIter) -> String { 54 let string = try_string(it).expect("Expect 54 let string = try_string(it).expect("Expected string"); 55 assert!(string.is_ascii(), "Expected ASCII 55 assert!(string.is_ascii(), "Expected ASCII string"); 56 string 56 string 57 } 57 } 58 58 59 pub(crate) fn expect_group(it: &mut token_stre 59 pub(crate) fn expect_group(it: &mut token_stream::IntoIter) -> Group { 60 if let TokenTree::Group(group) = it.next() 60 if let TokenTree::Group(group) = it.next().expect("Reached end of token stream for Group") { 61 group 61 group 62 } else { 62 } else { 63 panic!("Expected Group"); 63 panic!("Expected Group"); 64 } 64 } 65 } 65 } 66 66 67 pub(crate) fn expect_end(it: &mut token_stream 67 pub(crate) fn expect_end(it: &mut token_stream::IntoIter) { 68 if it.next().is_some() { 68 if it.next().is_some() { 69 panic!("Expected end"); 69 panic!("Expected end"); 70 } 70 } 71 } 71 } 72 72 73 /// Parsed generics. 73 /// Parsed generics. 74 /// 74 /// 75 /// See the field documentation for an explana 75 /// See the field documentation for an explanation what each of the fields represents. 76 /// 76 /// 77 /// # Examples 77 /// # Examples 78 /// 78 /// 79 /// ```rust,ignore 79 /// ```rust,ignore 80 /// # let input = todo!(); 80 /// # let input = todo!(); 81 /// let (Generics { decl_generics, impl_generi 81 /// let (Generics { decl_generics, impl_generics, ty_generics }, rest) = parse_generics(input); 82 /// quote! { 82 /// quote! { 83 /// struct Foo<$($decl_generics)*> { 83 /// struct Foo<$($decl_generics)*> { 84 /// // ... 84 /// // ... 85 /// } 85 /// } 86 /// 86 /// 87 /// impl<$impl_generics> Foo<$ty_generics> 87 /// impl<$impl_generics> Foo<$ty_generics> { 88 /// fn foo() { 88 /// fn foo() { 89 /// // ... 89 /// // ... 90 /// } 90 /// } 91 /// } 91 /// } 92 /// } 92 /// } 93 /// ``` 93 /// ``` 94 pub(crate) struct Generics { 94 pub(crate) struct Generics { 95 /// The generics with bounds and default v 95 /// The generics with bounds and default values (e.g. `T: Clone, const N: usize = 0`). 96 /// 96 /// 97 /// Use this on type definitions e.g. `str 97 /// Use this on type definitions e.g. `struct Foo<$decl_generics> ...` (or `union`/`enum`). 98 pub(crate) decl_generics: Vec<TokenTree>, 98 pub(crate) decl_generics: Vec<TokenTree>, 99 /// The generics with bounds (e.g. `T: Clo 99 /// The generics with bounds (e.g. `T: Clone, const N: usize`). 100 /// 100 /// 101 /// Use this on `impl` blocks e.g. `impl<$ 101 /// Use this on `impl` blocks e.g. `impl<$impl_generics> Trait for ...`. 102 pub(crate) impl_generics: Vec<TokenTree>, 102 pub(crate) impl_generics: Vec<TokenTree>, 103 /// The generics without bounds and withou 103 /// The generics without bounds and without default values (e.g. `T, N`). 104 /// 104 /// 105 /// Use this when you use the type that is 105 /// Use this when you use the type that is declared with these generics e.g. 106 /// `Foo<$ty_generics>`. 106 /// `Foo<$ty_generics>`. 107 pub(crate) ty_generics: Vec<TokenTree>, 107 pub(crate) ty_generics: Vec<TokenTree>, 108 } 108 } 109 109 110 /// Parses the given `TokenStream` into `Gener 110 /// Parses the given `TokenStream` into `Generics` and the rest. 111 /// 111 /// 112 /// The generics are not present in the rest, 112 /// The generics are not present in the rest, but a where clause might remain. 113 pub(crate) fn parse_generics(input: TokenStrea 113 pub(crate) fn parse_generics(input: TokenStream) -> (Generics, Vec<TokenTree>) { 114 // The generics with bounds and default va 114 // The generics with bounds and default values. 115 let mut decl_generics = vec![]; 115 let mut decl_generics = vec![]; 116 // `impl_generics`, the declared generics 116 // `impl_generics`, the declared generics with their bounds. 117 let mut impl_generics = vec![]; 117 let mut impl_generics = vec![]; 118 // Only the names of the generics, without 118 // Only the names of the generics, without any bounds. 119 let mut ty_generics = vec![]; 119 let mut ty_generics = vec![]; 120 // Tokens not related to the generics e.g. 120 // Tokens not related to the generics e.g. the `where` token and definition. 121 let mut rest = vec![]; 121 let mut rest = vec![]; 122 // The current level of `<`. 122 // The current level of `<`. 123 let mut nesting = 0; 123 let mut nesting = 0; 124 let mut toks = input.into_iter(); 124 let mut toks = input.into_iter(); 125 // If we are at the beginning of a generic 125 // If we are at the beginning of a generic parameter. 126 let mut at_start = true; 126 let mut at_start = true; 127 let mut skip_until_comma = false; 127 let mut skip_until_comma = false; 128 while let Some(tt) = toks.next() { 128 while let Some(tt) = toks.next() { 129 if nesting == 1 && matches!(&tt, Token 129 if nesting == 1 && matches!(&tt, TokenTree::Punct(p) if p.as_char() == '>') { 130 // Found the end of the generics. 130 // Found the end of the generics. 131 break; 131 break; 132 } else if nesting >= 1 { 132 } else if nesting >= 1 { 133 decl_generics.push(tt.clone()); 133 decl_generics.push(tt.clone()); 134 } 134 } 135 match tt.clone() { 135 match tt.clone() { 136 TokenTree::Punct(p) if p.as_char() 136 TokenTree::Punct(p) if p.as_char() == '<' => { 137 if nesting >= 1 && !skip_until 137 if nesting >= 1 && !skip_until_comma { 138 // This is inside of the g 138 // This is inside of the generics and part of some bound. 139 impl_generics.push(tt); 139 impl_generics.push(tt); 140 } 140 } 141 nesting += 1; 141 nesting += 1; 142 } 142 } 143 TokenTree::Punct(p) if p.as_char() 143 TokenTree::Punct(p) if p.as_char() == '>' => { 144 // This is a parsing error, so 144 // This is a parsing error, so we just end it here. 145 if nesting == 0 { 145 if nesting == 0 { 146 break; 146 break; 147 } else { 147 } else { 148 nesting -= 1; 148 nesting -= 1; 149 if nesting >= 1 && !skip_u 149 if nesting >= 1 && !skip_until_comma { 150 // We are still inside 150 // We are still inside of the generics and part of some bound. 151 impl_generics.push(tt) 151 impl_generics.push(tt); 152 } 152 } 153 } 153 } 154 } 154 } 155 TokenTree::Punct(p) if skip_until_ 155 TokenTree::Punct(p) if skip_until_comma && p.as_char() == ',' => { 156 if nesting == 1 { 156 if nesting == 1 { 157 impl_generics.push(tt.clon 157 impl_generics.push(tt.clone()); 158 impl_generics.push(tt); 158 impl_generics.push(tt); 159 skip_until_comma = false; 159 skip_until_comma = false; 160 } 160 } 161 } 161 } 162 _ if !skip_until_comma => { 162 _ if !skip_until_comma => { 163 match nesting { 163 match nesting { 164 // If we haven't entered t 164 // If we haven't entered the generics yet, we still want to keep these tokens. 165 0 => rest.push(tt), 165 0 => rest.push(tt), 166 1 => { 166 1 => { 167 // Here depending on t 167 // Here depending on the token, it might be a generic variable name. 168 match tt.clone() { 168 match tt.clone() { 169 TokenTree::Ident(i 169 TokenTree::Ident(i) if at_start && i.to_string() == "const" => { 170 let Some(name) 170 let Some(name) = toks.next() else { 171 // Parsing 171 // Parsing error. 172 break; 172 break; 173 }; 173 }; 174 impl_generics. 174 impl_generics.push(tt); 175 impl_generics. 175 impl_generics.push(name.clone()); 176 ty_generics.pu 176 ty_generics.push(name.clone()); 177 decl_generics. 177 decl_generics.push(name); 178 at_start = fal 178 at_start = false; 179 } 179 } 180 TokenTree::Ident(_ 180 TokenTree::Ident(_) if at_start => { 181 impl_generics. 181 impl_generics.push(tt.clone()); 182 ty_generics.pu 182 ty_generics.push(tt); 183 at_start = fal 183 at_start = false; 184 } 184 } 185 TokenTree::Punct(p 185 TokenTree::Punct(p) if p.as_char() == ',' => { 186 impl_generics. 186 impl_generics.push(tt.clone()); 187 ty_generics.pu 187 ty_generics.push(tt); 188 at_start = tru 188 at_start = true; 189 } 189 } 190 // Lifetimes begin 190 // Lifetimes begin with `'`. 191 TokenTree::Punct(p 191 TokenTree::Punct(p) if p.as_char() == '\'' && at_start => { 192 impl_generics. 192 impl_generics.push(tt.clone()); 193 ty_generics.pu 193 ty_generics.push(tt); 194 } 194 } 195 // Generics can ha 195 // Generics can have default values, we skip these. 196 TokenTree::Punct(p 196 TokenTree::Punct(p) if p.as_char() == '=' => { 197 skip_until_com 197 skip_until_comma = true; 198 } 198 } 199 _ => impl_generics 199 _ => impl_generics.push(tt), 200 } 200 } 201 } 201 } 202 _ => impl_generics.push(tt 202 _ => impl_generics.push(tt), 203 } 203 } 204 } 204 } 205 _ => {} 205 _ => {} 206 } 206 } 207 } 207 } 208 rest.extend(toks); 208 rest.extend(toks); 209 ( 209 ( 210 Generics { 210 Generics { 211 impl_generics, 211 impl_generics, 212 decl_generics, 212 decl_generics, 213 ty_generics, 213 ty_generics, 214 }, 214 }, 215 rest, 215 rest, 216 ) 216 ) 217 } 217 }
Linux® is a registered trademark of Linus Torvalds in the United States and other countries.
TOMOYO® is a registered trademark of NTT DATA CORPORATION.