1 // SPDX-License-Identifier: GPL-2.0 1 // SPDX-License-Identifier: GPL-2.0 2 2 3 use proc_macro::{token_stream, Group, TokenStr !! 3 use proc_macro::{token_stream, Group, Punct, Spacing, TokenStream, TokenTree}; 4 4 5 pub(crate) fn try_ident(it: &mut token_stream: 5 pub(crate) fn try_ident(it: &mut token_stream::IntoIter) -> Option<String> { 6 if let Some(TokenTree::Ident(ident)) = it. 6 if let Some(TokenTree::Ident(ident)) = it.next() { 7 Some(ident.to_string()) 7 Some(ident.to_string()) 8 } else { 8 } else { 9 None 9 None 10 } 10 } 11 } 11 } 12 12 13 pub(crate) fn try_literal(it: &mut token_strea 13 pub(crate) fn try_literal(it: &mut token_stream::IntoIter) -> Option<String> { 14 if let Some(TokenTree::Literal(literal)) = 14 if let Some(TokenTree::Literal(literal)) = it.next() { 15 Some(literal.to_string()) 15 Some(literal.to_string()) 16 } else { 16 } else { 17 None 17 None 18 } 18 } 19 } 19 } 20 20 21 pub(crate) fn try_string(it: &mut token_stream 21 pub(crate) fn try_string(it: &mut token_stream::IntoIter) -> Option<String> { 22 try_literal(it).and_then(|string| { 22 try_literal(it).and_then(|string| { 23 if string.starts_with('\"') && string. 23 if string.starts_with('\"') && string.ends_with('\"') { 24 let content = &string[1..string.le 24 let content = &string[1..string.len() - 1]; 25 if content.contains('\\') { 25 if content.contains('\\') { 26 panic!("Escape sequences in st 26 panic!("Escape sequences in string literals not yet handled"); 27 } 27 } 28 Some(content.to_string()) 28 Some(content.to_string()) 29 } else if string.starts_with("r\"") { 29 } else if string.starts_with("r\"") { 30 panic!("Raw string literals are no 30 panic!("Raw string literals are not yet handled"); 31 } else { 31 } else { 32 None 32 None 33 } 33 } 34 }) 34 }) 35 } 35 } 36 36 37 pub(crate) fn expect_ident(it: &mut token_stre 37 pub(crate) fn expect_ident(it: &mut token_stream::IntoIter) -> String { 38 try_ident(it).expect("Expected Ident") 38 try_ident(it).expect("Expected Ident") 39 } 39 } 40 40 41 pub(crate) fn expect_punct(it: &mut token_stre 41 pub(crate) fn expect_punct(it: &mut token_stream::IntoIter) -> char { 42 if let TokenTree::Punct(punct) = it.next() 42 if let TokenTree::Punct(punct) = it.next().expect("Reached end of token stream for Punct") { 43 punct.as_char() 43 punct.as_char() 44 } else { 44 } else { 45 panic!("Expected Punct"); 45 panic!("Expected Punct"); 46 } 46 } 47 } 47 } 48 48 49 pub(crate) fn expect_string(it: &mut token_str 49 pub(crate) fn expect_string(it: &mut token_stream::IntoIter) -> String { 50 try_string(it).expect("Expected string") 50 try_string(it).expect("Expected string") 51 } 51 } 52 52 53 pub(crate) fn expect_string_ascii(it: &mut tok 53 pub(crate) fn expect_string_ascii(it: &mut token_stream::IntoIter) -> String { 54 let string = try_string(it).expect("Expect 54 let string = try_string(it).expect("Expected string"); 55 assert!(string.is_ascii(), "Expected ASCII 55 assert!(string.is_ascii(), "Expected ASCII string"); 56 string 56 string 57 } 57 } 58 58 59 pub(crate) fn expect_group(it: &mut token_stre 59 pub(crate) fn expect_group(it: &mut token_stream::IntoIter) -> Group { 60 if let TokenTree::Group(group) = it.next() 60 if let TokenTree::Group(group) = it.next().expect("Reached end of token stream for Group") { 61 group 61 group 62 } else { 62 } else { 63 panic!("Expected Group"); 63 panic!("Expected Group"); 64 } 64 } 65 } 65 } 66 66 67 pub(crate) fn expect_end(it: &mut token_stream 67 pub(crate) fn expect_end(it: &mut token_stream::IntoIter) { 68 if it.next().is_some() { 68 if it.next().is_some() { 69 panic!("Expected end"); 69 panic!("Expected end"); 70 } 70 } 71 } 71 } 72 72 73 /// Parsed generics. << 74 /// << 75 /// See the field documentation for an explana << 76 /// << 77 /// # Examples << 78 /// << 79 /// ```rust,ignore << 80 /// # let input = todo!(); << 81 /// let (Generics { decl_generics, impl_generi << 82 /// quote! { << 83 /// struct Foo<$($decl_generics)*> { << 84 /// // ... << 85 /// } << 86 /// << 87 /// impl<$impl_generics> Foo<$ty_generics> << 88 /// fn foo() { << 89 /// // ... << 90 /// } << 91 /// } << 92 /// } << 93 /// ``` << 94 pub(crate) struct Generics { 73 pub(crate) struct Generics { 95 /// The generics with bounds and default v << 96 /// << 97 /// Use this on type definitions e.g. `str << 98 pub(crate) decl_generics: Vec<TokenTree>, << 99 /// The generics with bounds (e.g. `T: Clo << 100 /// << 101 /// Use this on `impl` blocks e.g. `impl<$ << 102 pub(crate) impl_generics: Vec<TokenTree>, 74 pub(crate) impl_generics: Vec<TokenTree>, 103 /// The generics without bounds and withou << 104 /// << 105 /// Use this when you use the type that is << 106 /// `Foo<$ty_generics>`. << 107 pub(crate) ty_generics: Vec<TokenTree>, 75 pub(crate) ty_generics: Vec<TokenTree>, 108 } 76 } 109 77 110 /// Parses the given `TokenStream` into `Gener 78 /// Parses the given `TokenStream` into `Generics` and the rest. 111 /// 79 /// 112 /// The generics are not present in the rest, 80 /// The generics are not present in the rest, but a where clause might remain. 113 pub(crate) fn parse_generics(input: TokenStrea 81 pub(crate) fn parse_generics(input: TokenStream) -> (Generics, Vec<TokenTree>) { 114 // The generics with bounds and default va << 115 let mut decl_generics = vec![]; << 116 // `impl_generics`, the declared generics 82 // `impl_generics`, the declared generics with their bounds. 117 let mut impl_generics = vec![]; 83 let mut impl_generics = vec![]; 118 // Only the names of the generics, without 84 // Only the names of the generics, without any bounds. 119 let mut ty_generics = vec![]; 85 let mut ty_generics = vec![]; 120 // Tokens not related to the generics e.g. 86 // Tokens not related to the generics e.g. the `where` token and definition. 121 let mut rest = vec![]; 87 let mut rest = vec![]; 122 // The current level of `<`. 88 // The current level of `<`. 123 let mut nesting = 0; 89 let mut nesting = 0; 124 let mut toks = input.into_iter(); 90 let mut toks = input.into_iter(); 125 // If we are at the beginning of a generic 91 // If we are at the beginning of a generic parameter. 126 let mut at_start = true; 92 let mut at_start = true; 127 let mut skip_until_comma = false; !! 93 for tt in &mut toks { 128 while let Some(tt) = toks.next() { << 129 if nesting == 1 && matches!(&tt, Token << 130 // Found the end of the generics. << 131 break; << 132 } else if nesting >= 1 { << 133 decl_generics.push(tt.clone()); << 134 } << 135 match tt.clone() { 94 match tt.clone() { 136 TokenTree::Punct(p) if p.as_char() 95 TokenTree::Punct(p) if p.as_char() == '<' => { 137 if nesting >= 1 && !skip_until !! 96 if nesting >= 1 { 138 // This is inside of the g 97 // This is inside of the generics and part of some bound. 139 impl_generics.push(tt); 98 impl_generics.push(tt); 140 } 99 } 141 nesting += 1; 100 nesting += 1; 142 } 101 } 143 TokenTree::Punct(p) if p.as_char() 102 TokenTree::Punct(p) if p.as_char() == '>' => { 144 // This is a parsing error, so 103 // This is a parsing error, so we just end it here. 145 if nesting == 0 { 104 if nesting == 0 { 146 break; 105 break; 147 } else { 106 } else { 148 nesting -= 1; 107 nesting -= 1; 149 if nesting >= 1 && !skip_u !! 108 if nesting >= 1 { 150 // We are still inside 109 // We are still inside of the generics and part of some bound. 151 impl_generics.push(tt) 110 impl_generics.push(tt); 152 } 111 } >> 112 if nesting == 0 { >> 113 break; >> 114 } 153 } 115 } 154 } 116 } 155 TokenTree::Punct(p) if skip_until_ !! 117 tt => { 156 if nesting == 1 { 118 if nesting == 1 { 157 impl_generics.push(tt.clon !! 119 // Here depending on the token, it might be a generic variable name. 158 impl_generics.push(tt); !! 120 match &tt { 159 skip_until_comma = false; !! 121 // Ignore const. 160 } !! 122 TokenTree::Ident(i) if i.to_string() == "const" => {} 161 } !! 123 TokenTree::Ident(_) if at_start => { 162 _ if !skip_until_comma => { !! 124 ty_generics.push(tt.clone()); 163 match nesting { !! 125 // We also already push the `,` token, this makes it easier to append 164 // If we haven't entered t !! 126 // generics. 165 0 => rest.push(tt), !! 127 ty_generics.push(TokenTree::Punct(Punct::new(',', Spacing::Alone))); 166 1 => { !! 128 at_start = false; 167 // Here depending on t !! 129 } 168 match tt.clone() { !! 130 TokenTree::Punct(p) if p.as_char() == ',' => at_start = true, 169 TokenTree::Ident(i !! 131 // Lifetimes begin with `'`. 170 let Some(name) !! 132 TokenTree::Punct(p) if p.as_char() == '\'' && at_start => { 171 // Parsing !! 133 ty_generics.push(tt.clone()); 172 break; << 173 }; << 174 impl_generics. << 175 impl_generics. << 176 ty_generics.pu << 177 decl_generics. << 178 at_start = fal << 179 } << 180 TokenTree::Ident(_ << 181 impl_generics. << 182 ty_generics.pu << 183 at_start = fal << 184 } << 185 TokenTree::Punct(p << 186 impl_generics. << 187 ty_generics.pu << 188 at_start = tru << 189 } << 190 // Lifetimes begin << 191 TokenTree::Punct(p << 192 impl_generics. << 193 ty_generics.pu << 194 } << 195 // Generics can ha << 196 TokenTree::Punct(p << 197 skip_until_com << 198 } << 199 _ => impl_generics << 200 } 134 } >> 135 _ => {} 201 } 136 } 202 _ => impl_generics.push(tt !! 137 } >> 138 if nesting >= 1 { >> 139 impl_generics.push(tt); >> 140 } else if nesting == 0 { >> 141 // If we haven't entered the generics yet, we still want to keep these tokens. >> 142 rest.push(tt); 203 } 143 } 204 } 144 } 205 _ => {} << 206 } 145 } 207 } 146 } 208 rest.extend(toks); 147 rest.extend(toks); 209 ( 148 ( 210 Generics { 149 Generics { 211 impl_generics, 150 impl_generics, 212 decl_generics, << 213 ty_generics, 151 ty_generics, 214 }, 152 }, 215 rest, 153 rest, 216 ) 154 ) 217 } 155 }
Linux® is a registered trademark of Linus Torvalds in the United States and other countries.
TOMOYO® is a registered trademark of NTT DATA CORPORATION.