1 // SPDX-License-Identifier: GPL-2.0 1 // SPDX-License-Identifier: GPL-2.0 2 2 3 use proc_macro::{token_stream, Group, TokenStr !! 3 use proc_macro::{token_stream, Group, TokenTree}; 4 4 5 pub(crate) fn try_ident(it: &mut token_stream: 5 pub(crate) fn try_ident(it: &mut token_stream::IntoIter) -> Option<String> { 6 if let Some(TokenTree::Ident(ident)) = it. 6 if let Some(TokenTree::Ident(ident)) = it.next() { 7 Some(ident.to_string()) 7 Some(ident.to_string()) 8 } else { 8 } else { 9 None 9 None 10 } 10 } 11 } 11 } 12 12 13 pub(crate) fn try_literal(it: &mut token_strea 13 pub(crate) fn try_literal(it: &mut token_stream::IntoIter) -> Option<String> { 14 if let Some(TokenTree::Literal(literal)) = 14 if let Some(TokenTree::Literal(literal)) = it.next() { 15 Some(literal.to_string()) 15 Some(literal.to_string()) 16 } else { 16 } else { 17 None 17 None 18 } 18 } 19 } 19 } 20 20 21 pub(crate) fn try_string(it: &mut token_stream 21 pub(crate) fn try_string(it: &mut token_stream::IntoIter) -> Option<String> { 22 try_literal(it).and_then(|string| { 22 try_literal(it).and_then(|string| { 23 if string.starts_with('\"') && string. 23 if string.starts_with('\"') && string.ends_with('\"') { 24 let content = &string[1..string.le 24 let content = &string[1..string.len() - 1]; 25 if content.contains('\\') { 25 if content.contains('\\') { 26 panic!("Escape sequences in st 26 panic!("Escape sequences in string literals not yet handled"); 27 } 27 } 28 Some(content.to_string()) 28 Some(content.to_string()) 29 } else if string.starts_with("r\"") { 29 } else if string.starts_with("r\"") { 30 panic!("Raw string literals are no 30 panic!("Raw string literals are not yet handled"); 31 } else { 31 } else { 32 None 32 None 33 } 33 } 34 }) 34 }) 35 } 35 } 36 36 37 pub(crate) fn expect_ident(it: &mut token_stre 37 pub(crate) fn expect_ident(it: &mut token_stream::IntoIter) -> String { 38 try_ident(it).expect("Expected Ident") 38 try_ident(it).expect("Expected Ident") 39 } 39 } 40 40 41 pub(crate) fn expect_punct(it: &mut token_stre 41 pub(crate) fn expect_punct(it: &mut token_stream::IntoIter) -> char { 42 if let TokenTree::Punct(punct) = it.next() 42 if let TokenTree::Punct(punct) = it.next().expect("Reached end of token stream for Punct") { 43 punct.as_char() 43 punct.as_char() 44 } else { 44 } else { 45 panic!("Expected Punct"); 45 panic!("Expected Punct"); 46 } 46 } 47 } 47 } 48 48 49 pub(crate) fn expect_string(it: &mut token_str 49 pub(crate) fn expect_string(it: &mut token_stream::IntoIter) -> String { 50 try_string(it).expect("Expected string") 50 try_string(it).expect("Expected string") 51 } 51 } 52 52 53 pub(crate) fn expect_string_ascii(it: &mut tok 53 pub(crate) fn expect_string_ascii(it: &mut token_stream::IntoIter) -> String { 54 let string = try_string(it).expect("Expect 54 let string = try_string(it).expect("Expected string"); 55 assert!(string.is_ascii(), "Expected ASCII 55 assert!(string.is_ascii(), "Expected ASCII string"); 56 string 56 string 57 } 57 } 58 58 59 pub(crate) fn expect_group(it: &mut token_stre 59 pub(crate) fn expect_group(it: &mut token_stream::IntoIter) -> Group { 60 if let TokenTree::Group(group) = it.next() 60 if let TokenTree::Group(group) = it.next().expect("Reached end of token stream for Group") { 61 group 61 group 62 } else { 62 } else { 63 panic!("Expected Group"); 63 panic!("Expected Group"); 64 } 64 } 65 } 65 } 66 66 67 pub(crate) fn expect_end(it: &mut token_stream 67 pub(crate) fn expect_end(it: &mut token_stream::IntoIter) { 68 if it.next().is_some() { 68 if it.next().is_some() { 69 panic!("Expected end"); 69 panic!("Expected end"); 70 } 70 } 71 } << 72 << 73 /// Parsed generics. << 74 /// << 75 /// See the field documentation for an explana << 76 /// << 77 /// # Examples << 78 /// << 79 /// ```rust,ignore << 80 /// # let input = todo!(); << 81 /// let (Generics { decl_generics, impl_generi << 82 /// quote! { << 83 /// struct Foo<$($decl_generics)*> { << 84 /// // ... << 85 /// } << 86 /// << 87 /// impl<$impl_generics> Foo<$ty_generics> << 88 /// fn foo() { << 89 /// // ... << 90 /// } << 91 /// } << 92 /// } << 93 /// ``` << 94 pub(crate) struct Generics { << 95 /// The generics with bounds and default v << 96 /// << 97 /// Use this on type definitions e.g. `str << 98 pub(crate) decl_generics: Vec<TokenTree>, << 99 /// The generics with bounds (e.g. `T: Clo << 100 /// << 101 /// Use this on `impl` blocks e.g. `impl<$ << 102 pub(crate) impl_generics: Vec<TokenTree>, << 103 /// The generics without bounds and withou << 104 /// << 105 /// Use this when you use the type that is << 106 /// `Foo<$ty_generics>`. << 107 pub(crate) ty_generics: Vec<TokenTree>, << 108 } << 109 << 110 /// Parses the given `TokenStream` into `Gener << 111 /// << 112 /// The generics are not present in the rest, << 113 pub(crate) fn parse_generics(input: TokenStrea << 114 // The generics with bounds and default va << 115 let mut decl_generics = vec![]; << 116 // `impl_generics`, the declared generics << 117 let mut impl_generics = vec![]; << 118 // Only the names of the generics, without << 119 let mut ty_generics = vec![]; << 120 // Tokens not related to the generics e.g. << 121 let mut rest = vec![]; << 122 // The current level of `<`. << 123 let mut nesting = 0; << 124 let mut toks = input.into_iter(); << 125 // If we are at the beginning of a generic << 126 let mut at_start = true; << 127 let mut skip_until_comma = false; << 128 while let Some(tt) = toks.next() { << 129 if nesting == 1 && matches!(&tt, Token << 130 // Found the end of the generics. << 131 break; << 132 } else if nesting >= 1 { << 133 decl_generics.push(tt.clone()); << 134 } << 135 match tt.clone() { << 136 TokenTree::Punct(p) if p.as_char() << 137 if nesting >= 1 && !skip_until << 138 // This is inside of the g << 139 impl_generics.push(tt); << 140 } << 141 nesting += 1; << 142 } << 143 TokenTree::Punct(p) if p.as_char() << 144 // This is a parsing error, so << 145 if nesting == 0 { << 146 break; << 147 } else { << 148 nesting -= 1; << 149 if nesting >= 1 && !skip_u << 150 // We are still inside << 151 impl_generics.push(tt) << 152 } << 153 } << 154 } << 155 TokenTree::Punct(p) if skip_until_ << 156 if nesting == 1 { << 157 impl_generics.push(tt.clon << 158 impl_generics.push(tt); << 159 skip_until_comma = false; << 160 } << 161 } << 162 _ if !skip_until_comma => { << 163 match nesting { << 164 // If we haven't entered t << 165 0 => rest.push(tt), << 166 1 => { << 167 // Here depending on t << 168 match tt.clone() { << 169 TokenTree::Ident(i << 170 let Some(name) << 171 // Parsing << 172 break; << 173 }; << 174 impl_generics. << 175 impl_generics. << 176 ty_generics.pu << 177 decl_generics. << 178 at_start = fal << 179 } << 180 TokenTree::Ident(_ << 181 impl_generics. << 182 ty_generics.pu << 183 at_start = fal << 184 } << 185 TokenTree::Punct(p << 186 impl_generics. << 187 ty_generics.pu << 188 at_start = tru << 189 } << 190 // Lifetimes begin << 191 TokenTree::Punct(p << 192 impl_generics. << 193 ty_generics.pu << 194 } << 195 // Generics can ha << 196 TokenTree::Punct(p << 197 skip_until_com << 198 } << 199 _ => impl_generics << 200 } << 201 } << 202 _ => impl_generics.push(tt << 203 } << 204 } << 205 _ => {} << 206 } << 207 } << 208 rest.extend(toks); << 209 ( << 210 Generics { << 211 impl_generics, << 212 decl_generics, << 213 ty_generics, << 214 }, << 215 rest, << 216 ) << 217 } 71 }
Linux® is a registered trademark of Linus Torvalds in the United States and other countries.
TOMOYO® is a registered trademark of NTT DATA CORPORATION.