1 // SPDX-License-Identifier: GPL-2.0 1 // SPDX-License-Identifier: GPL-2.0 2 2 3 use proc_macro::{token_stream, Group, TokenStr !! 3 use proc_macro::{token_stream, TokenTree}; 4 4 5 pub(crate) fn try_ident(it: &mut token_stream: 5 pub(crate) fn try_ident(it: &mut token_stream::IntoIter) -> Option<String> { 6 if let Some(TokenTree::Ident(ident)) = it. 6 if let Some(TokenTree::Ident(ident)) = it.next() { 7 Some(ident.to_string()) 7 Some(ident.to_string()) 8 } else { 8 } else { 9 None 9 None 10 } 10 } 11 } 11 } 12 12 13 pub(crate) fn try_literal(it: &mut token_strea 13 pub(crate) fn try_literal(it: &mut token_stream::IntoIter) -> Option<String> { 14 if let Some(TokenTree::Literal(literal)) = 14 if let Some(TokenTree::Literal(literal)) = it.next() { 15 Some(literal.to_string()) 15 Some(literal.to_string()) 16 } else { 16 } else { 17 None 17 None 18 } 18 } 19 } 19 } 20 20 21 pub(crate) fn try_string(it: &mut token_stream !! 21 pub(crate) fn try_byte_string(it: &mut token_stream::IntoIter) -> Option<String> { 22 try_literal(it).and_then(|string| { !! 22 try_literal(it).and_then(|byte_string| { 23 if string.starts_with('\"') && string. !! 23 if byte_string.starts_with("b\"") && byte_string.ends_with('\"') { 24 let content = &string[1..string.le !! 24 Some(byte_string[2..byte_string.len() - 1].to_string()) 25 if content.contains('\\') { << 26 panic!("Escape sequences in st << 27 } << 28 Some(content.to_string()) << 29 } else if string.starts_with("r\"") { << 30 panic!("Raw string literals are no << 31 } else { 25 } else { 32 None 26 None 33 } 27 } 34 }) 28 }) 35 } 29 } 36 30 37 pub(crate) fn expect_ident(it: &mut token_stre 31 pub(crate) fn expect_ident(it: &mut token_stream::IntoIter) -> String { 38 try_ident(it).expect("Expected Ident") 32 try_ident(it).expect("Expected Ident") 39 } 33 } 40 34 41 pub(crate) fn expect_punct(it: &mut token_stre 35 pub(crate) fn expect_punct(it: &mut token_stream::IntoIter) -> char { 42 if let TokenTree::Punct(punct) = it.next() 36 if let TokenTree::Punct(punct) = it.next().expect("Reached end of token stream for Punct") { 43 punct.as_char() 37 punct.as_char() 44 } else { 38 } else { 45 panic!("Expected Punct"); 39 panic!("Expected Punct"); 46 } 40 } 47 } 41 } 48 42 49 pub(crate) fn expect_string(it: &mut token_str !! 43 pub(crate) fn expect_byte_string(it: &mut token_stream::IntoIter) -> String { 50 try_string(it).expect("Expected string") !! 44 try_byte_string(it).expect("Expected byte string") 51 } << 52 << 53 pub(crate) fn expect_string_ascii(it: &mut tok << 54 let string = try_string(it).expect("Expect << 55 assert!(string.is_ascii(), "Expected ASCII << 56 string << 57 } << 58 << 59 pub(crate) fn expect_group(it: &mut token_stre << 60 if let TokenTree::Group(group) = it.next() << 61 group << 62 } else { << 63 panic!("Expected Group"); << 64 } << 65 } 45 } 66 46 67 pub(crate) fn expect_end(it: &mut token_stream 47 pub(crate) fn expect_end(it: &mut token_stream::IntoIter) { 68 if it.next().is_some() { 48 if it.next().is_some() { 69 panic!("Expected end"); 49 panic!("Expected end"); 70 } 50 } 71 } << 72 << 73 /// Parsed generics. << 74 /// << 75 /// See the field documentation for an explana << 76 /// << 77 /// # Examples << 78 /// << 79 /// ```rust,ignore << 80 /// # let input = todo!(); << 81 /// let (Generics { decl_generics, impl_generi << 82 /// quote! { << 83 /// struct Foo<$($decl_generics)*> { << 84 /// // ... << 85 /// } << 86 /// << 87 /// impl<$impl_generics> Foo<$ty_generics> << 88 /// fn foo() { << 89 /// // ... << 90 /// } << 91 /// } << 92 /// } << 93 /// ``` << 94 pub(crate) struct Generics { << 95 /// The generics with bounds and default v << 96 /// << 97 /// Use this on type definitions e.g. `str << 98 pub(crate) decl_generics: Vec<TokenTree>, << 99 /// The generics with bounds (e.g. `T: Clo << 100 /// << 101 /// Use this on `impl` blocks e.g. `impl<$ << 102 pub(crate) impl_generics: Vec<TokenTree>, << 103 /// The generics without bounds and withou << 104 /// << 105 /// Use this when you use the type that is << 106 /// `Foo<$ty_generics>`. << 107 pub(crate) ty_generics: Vec<TokenTree>, << 108 } << 109 << 110 /// Parses the given `TokenStream` into `Gener << 111 /// << 112 /// The generics are not present in the rest, << 113 pub(crate) fn parse_generics(input: TokenStrea << 114 // The generics with bounds and default va << 115 let mut decl_generics = vec![]; << 116 // `impl_generics`, the declared generics << 117 let mut impl_generics = vec![]; << 118 // Only the names of the generics, without << 119 let mut ty_generics = vec![]; << 120 // Tokens not related to the generics e.g. << 121 let mut rest = vec![]; << 122 // The current level of `<`. << 123 let mut nesting = 0; << 124 let mut toks = input.into_iter(); << 125 // If we are at the beginning of a generic << 126 let mut at_start = true; << 127 let mut skip_until_comma = false; << 128 while let Some(tt) = toks.next() { << 129 if nesting == 1 && matches!(&tt, Token << 130 // Found the end of the generics. << 131 break; << 132 } else if nesting >= 1 { << 133 decl_generics.push(tt.clone()); << 134 } << 135 match tt.clone() { << 136 TokenTree::Punct(p) if p.as_char() << 137 if nesting >= 1 && !skip_until << 138 // This is inside of the g << 139 impl_generics.push(tt); << 140 } << 141 nesting += 1; << 142 } << 143 TokenTree::Punct(p) if p.as_char() << 144 // This is a parsing error, so << 145 if nesting == 0 { << 146 break; << 147 } else { << 148 nesting -= 1; << 149 if nesting >= 1 && !skip_u << 150 // We are still inside << 151 impl_generics.push(tt) << 152 } << 153 } << 154 } << 155 TokenTree::Punct(p) if skip_until_ << 156 if nesting == 1 { << 157 impl_generics.push(tt.clon << 158 impl_generics.push(tt); << 159 skip_until_comma = false; << 160 } << 161 } << 162 _ if !skip_until_comma => { << 163 match nesting { << 164 // If we haven't entered t << 165 0 => rest.push(tt), << 166 1 => { << 167 // Here depending on t << 168 match tt.clone() { << 169 TokenTree::Ident(i << 170 let Some(name) << 171 // Parsing << 172 break; << 173 }; << 174 impl_generics. << 175 impl_generics. << 176 ty_generics.pu << 177 decl_generics. << 178 at_start = fal << 179 } << 180 TokenTree::Ident(_ << 181 impl_generics. << 182 ty_generics.pu << 183 at_start = fal << 184 } << 185 TokenTree::Punct(p << 186 impl_generics. << 187 ty_generics.pu << 188 at_start = tru << 189 } << 190 // Lifetimes begin << 191 TokenTree::Punct(p << 192 impl_generics. << 193 ty_generics.pu << 194 } << 195 // Generics can ha << 196 TokenTree::Punct(p << 197 skip_until_com << 198 } << 199 _ => impl_generics << 200 } << 201 } << 202 _ => impl_generics.push(tt << 203 } << 204 } << 205 _ => {} << 206 } << 207 } << 208 rest.extend(toks); << 209 ( << 210 Generics { << 211 impl_generics, << 212 decl_generics, << 213 ty_generics, << 214 }, << 215 rest, << 216 ) << 217 } 51 }
Linux® is a registered trademark of Linus Torvalds in the United States and other countries.
TOMOYO® is a registered trademark of NTT DATA CORPORATION.