Add a new trait proc_macro::ToTokens
· qinheping/verify-rust-std@80f0aa3 (original) (raw)
``
1
`+
use std::borrow::Cow;
`
``
2
`+
use std::ffi::{CStr, CString};
`
``
3
`+
use std::rc::Rc;
`
``
4
+
``
5
`+
use crate::{ConcatTreesHelper, Group, Ident, Literal, Punct, Span, TokenStream, TokenTree};
`
``
6
+
``
7
`` +
/// Types that can be interpolated inside a [quote!
] invocation.
``
``
8
`+
///
`
``
9
`` +
/// [quote!
]: crate::quote!
``
``
10
`+
#[unstable(feature = "proc_macro_totokens", issue = "130977")]
`
``
11
`+
pub trait ToTokens {
`
``
12
`` +
/// Write self
to the given TokenStream
.
``
``
13
`+
///
`
``
14
`+
/// # Example
`
``
15
`+
///
`
``
16
`+
/// Example implementation for a struct representing Rust paths like
`
``
17
`` +
/// std::cmp::PartialEq
:
``
``
18
`+
///
`
``
19
/// ```
``
20
`+
/// #![feature(proc_macro_totokens)]
`
``
21
`+
///
`
``
22
`+
/// use std::iter;
`
``
23
`+
/// use proc_macro::{Spacing, Punct, TokenStream, TokenTree, ToTokens};
`
``
24
`+
///
`
``
25
`+
/// pub struct Path {
`
``
26
`+
/// pub global: bool,
`
``
27
`+
/// pub segments: Vec,
`
``
28
`+
/// }
`
``
29
`+
///
`
``
30
`+
/// impl ToTokens for Path {
`
``
31
`+
/// fn to_tokens(&self, tokens: &mut TokenStream) {
`
``
32
`+
/// for (i, segment) in self.segments.iter().enumerate() {
`
``
33
`+
/// if i > 0 || self.global {
`
``
34
`` +
/// // Double colon ::
``
``
35
`+
/// tokens.extend(iter::once(TokenTree::from(Punct::new(':', Spacing::Joint))));
`
``
36
`+
/// tokens.extend(iter::once(TokenTree::from(Punct::new(':', Spacing::Alone))));
`
``
37
`+
/// }
`
``
38
`+
/// segment.to_tokens(tokens);
`
``
39
`+
/// }
`
``
40
`+
/// }
`
``
41
`+
/// }
`
``
42
`+
/// #
`
``
43
`+
/// # pub struct PathSegment;
`
``
44
`+
/// #
`
``
45
`+
/// # impl ToTokens for PathSegment {
`
``
46
`+
/// # fn to_tokens(&self, tokens: &mut TokenStream) {
`
``
47
`+
/// # unimplemented!()
`
``
48
`+
/// # }
`
``
49
`+
/// # }
`
``
50
/// ```
``
51
`+
fn to_tokens(&self, tokens: &mut TokenStream);
`
``
52
+
``
53
`` +
/// Convert self
directly into a TokenStream
object.
``
``
54
`+
///
`
``
55
`` +
/// This method is implicitly implemented using to_tokens
, and acts as a
``
``
56
`` +
/// convenience method for consumers of the ToTokens
trait.
``
``
57
`+
fn to_token_stream(&self) -> TokenStream {
`
``
58
`+
let mut tokens = TokenStream::new();
`
``
59
`+
self.to_tokens(&mut tokens);
`
``
60
`+
tokens
`
``
61
`+
}
`
``
62
+
``
63
`` +
/// Convert self
directly into a TokenStream
object.
``
``
64
`+
///
`
``
65
`` +
/// This method is implicitly implemented using to_tokens
, and acts as a
``
``
66
`` +
/// convenience method for consumers of the ToTokens
trait.
``
``
67
`+
fn into_token_stream(self) -> TokenStream
`
``
68
`+
where
`
``
69
`+
Self: Sized,
`
``
70
`+
{
`
``
71
`+
self.to_token_stream()
`
``
72
`+
}
`
``
73
`+
}
`
``
74
+
``
75
`+
#[unstable(feature = "proc_macro_totokens", issue = "130977")]
`
``
76
`+
impl ToTokens for TokenTree {
`
``
77
`+
fn to_tokens(&self, tokens: &mut TokenStream) {
`
``
78
`+
tokens.extend_one(self.clone());
`
``
79
`+
}
`
``
80
+
``
81
`+
fn into_token_stream(self) -> TokenStream {
`
``
82
`+
let mut builder = ConcatTreesHelper::new(1);
`
``
83
`+
builder.push(self);
`
``
84
`+
builder.build()
`
``
85
`+
}
`
``
86
`+
}
`
``
87
+
``
88
`+
#[unstable(feature = "proc_macro_totokens", issue = "130977")]
`
``
89
`+
impl ToTokens for TokenStream {
`
``
90
`+
fn to_tokens(&self, tokens: &mut TokenStream) {
`
``
91
`+
tokens.extend(self.clone());
`
``
92
`+
}
`
``
93
+
``
94
`+
fn into_token_stream(self) -> TokenStream {
`
``
95
`+
self
`
``
96
`+
}
`
``
97
`+
}
`
``
98
+
``
99
`+
#[unstable(feature = "proc_macro_totokens", issue = "130977")]
`
``
100
`+
impl ToTokens for Literal {
`
``
101
`+
fn to_tokens(&self, tokens: &mut TokenStream) {
`
``
102
`+
tokens.extend_one(TokenTree::from(self.clone()));
`
``
103
`+
}
`
``
104
`+
}
`
``
105
+
``
106
`+
#[unstable(feature = "proc_macro_totokens", issue = "130977")]
`
``
107
`+
impl ToTokens for Ident {
`
``
108
`+
fn to_tokens(&self, tokens: &mut TokenStream) {
`
``
109
`+
tokens.extend_one(TokenTree::from(self.clone()));
`
``
110
`+
}
`
``
111
`+
}
`
``
112
+
``
113
`+
#[unstable(feature = "proc_macro_totokens", issue = "130977")]
`
``
114
`+
impl ToTokens for Punct {
`
``
115
`+
fn to_tokens(&self, tokens: &mut TokenStream) {
`
``
116
`+
tokens.extend_one(TokenTree::from(self.clone()));
`
``
117
`+
}
`
``
118
`+
}
`
``
119
+
``
120
`+
#[unstable(feature = "proc_macro_totokens", issue = "130977")]
`
``
121
`+
impl ToTokens for Group {
`
``
122
`+
fn to_tokens(&self, tokens: &mut TokenStream) {
`
``
123
`+
tokens.extend_one(TokenTree::from(self.clone()));
`
``
124
`+
}
`
``
125
`+
}
`
``
126
+
``
127
`+
#[unstable(feature = "proc_macro_totokens", issue = "130977")]
`
``
128
`+
impl<T: ToTokens + ?Sized> ToTokens for &T {
`
``
129
`+
fn to_tokens(&self, tokens: &mut TokenStream) {
`
``
130
`+
(**self).to_tokens(tokens)
`
``
131
`+
}
`
``
132
`+
}
`
``
133
+
``
134
`+
#[unstable(feature = "proc_macro_totokens", issue = "130977")]
`
``
135
`+
impl<T: ToTokens + ?Sized> ToTokens for &mut T {
`
``
136
`+
fn to_tokens(&self, tokens: &mut TokenStream) {
`
``
137
`+
(**self).to_tokens(tokens)
`
``
138
`+
}
`
``
139
`+
}
`
``
140
+
``
141
`+
#[unstable(feature = "proc_macro_totokens", issue = "130977")]
`
``
142
`+
impl<T: ToTokens + ?Sized> ToTokens for Box {
`
``
143
`+
fn to_tokens(&self, tokens: &mut TokenStream) {
`
``
144
`+
(**self).to_tokens(tokens)
`
``
145
`+
}
`
``
146
`+
}
`
``
147
+
``
148
`+
#[unstable(feature = "proc_macro_totokens", issue = "130977")]
`
``
149
`+
impl<T: ToTokens + ?Sized> ToTokens for Rc {
`
``
150
`+
fn to_tokens(&self, tokens: &mut TokenStream) {
`
``
151
`+
(**self).to_tokens(tokens)
`
``
152
`+
}
`
``
153
`+
}
`
``
154
+
``
155
`+
#[unstable(feature = "proc_macro_totokens", issue = "130977")]
`
``
156
`+
impl<T: ToTokens + ToOwned + ?Sized> ToTokens for Cow<'_, T> {
`
``
157
`+
fn to_tokens(&self, tokens: &mut TokenStream) {
`
``
158
`+
(**self).to_tokens(tokens)
`
``
159
`+
}
`
``
160
`+
}
`
``
161
+
``
162
`+
#[unstable(feature = "proc_macro_totokens", issue = "130977")]
`
``
163
`+
impl<T: ToTokens> ToTokens for Option {
`
``
164
`+
fn to_tokens(&self, tokens: &mut TokenStream) {
`
``
165
`+
if let Some(t) = self {
`
``
166
`+
t.to_tokens(tokens);
`
``
167
`+
}
`
``
168
`+
}
`
``
169
`+
}
`
``
170
+
``
171
`+
#[unstable(feature = "proc_macro_totokens", issue = "130977")]
`
``
172
`+
impl ToTokens for u8 {
`
``
173
`+
fn to_tokens(&self, tokens: &mut TokenStream) {
`
``
174
`+
Literal::u8_suffixed(*self).to_tokens(tokens)
`
``
175
`+
}
`
``
176
`+
}
`
``
177
+
``
178
`+
#[unstable(feature = "proc_macro_totokens", issue = "130977")]
`
``
179
`+
impl ToTokens for u16 {
`
``
180
`+
fn to_tokens(&self, tokens: &mut TokenStream) {
`
``
181
`+
Literal::u16_suffixed(*self).to_tokens(tokens)
`
``
182
`+
}
`
``
183
`+
}
`
``
184
+
``
185
`+
#[unstable(feature = "proc_macro_totokens", issue = "130977")]
`
``
186
`+
impl ToTokens for u32 {
`
``
187
`+
fn to_tokens(&self, tokens: &mut TokenStream) {
`
``
188
`+
Literal::u32_suffixed(*self).to_tokens(tokens)
`
``
189
`+
}
`
``
190
`+
}
`
``
191
+
``
192
`+
#[unstable(feature = "proc_macro_totokens", issue = "130977")]
`
``
193
`+
impl ToTokens for u64 {
`
``
194
`+
fn to_tokens(&self, tokens: &mut TokenStream) {
`
``
195
`+
Literal::u64_suffixed(*self).to_tokens(tokens)
`
``
196
`+
}
`
``
197
`+
}
`
``
198
+
``
199
`+
#[unstable(feature = "proc_macro_totokens", issue = "130977")]
`
``
200
`+
impl ToTokens for u128 {
`
``
201
`+
fn to_tokens(&self, tokens: &mut TokenStream) {
`
``
202
`+
Literal::u128_suffixed(*self).to_tokens(tokens)
`
``
203
`+
}
`
``
204
`+
}
`
``
205
+
``
206
`+
#[unstable(feature = "proc_macro_totokens", issue = "130977")]
`
``
207
`+
impl ToTokens for i8 {
`
``
208
`+
fn to_tokens(&self, tokens: &mut TokenStream) {
`
``
209
`+
Literal::i8_suffixed(*self).to_tokens(tokens)
`
``
210
`+
}
`
``
211
`+
}
`
``
212
+
``
213
`+
#[unstable(feature = "proc_macro_totokens", issue = "130977")]
`
``
214
`+
impl ToTokens for i16 {
`
``
215
`+
fn to_tokens(&self, tokens: &mut TokenStream) {
`
``
216
`+
Literal::i16_suffixed(*self).to_tokens(tokens)
`
``
217
`+
}
`
``
218
`+
}
`
``
219
+
``
220
`+
#[unstable(feature = "proc_macro_totokens", issue = "130977")]
`
``
221
`+
impl ToTokens for i32 {
`
``
222
`+
fn to_tokens(&self, tokens: &mut TokenStream) {
`
``
223
`+
Literal::i32_suffixed(*self).to_tokens(tokens)
`
``
224
`+
}
`
``
225
`+
}
`
``
226
+
``
227
`+
#[unstable(feature = "proc_macro_totokens", issue = "130977")]
`
``
228
`+
impl ToTokens for i64 {
`
``
229
`+
fn to_tokens(&self, tokens: &mut TokenStream) {
`
``
230
`+
Literal::i64_suffixed(*self).to_tokens(tokens)
`
``
231
`+
}
`
``
232
`+
}
`
``
233
+
``
234
`+
#[unstable(feature = "proc_macro_totokens", issue = "130977")]
`
``
235
`+
impl ToTokens for i128 {
`
``
236
`+
fn to_tokens(&self, tokens: &mut TokenStream) {
`
``
237
`+
Literal::i128_suffixed(*self).to_tokens(tokens)
`
``
238
`+
}
`
``
239
`+
}
`
``
240
+
``
241
`+
#[unstable(feature = "proc_macro_totokens", issue = "130977")]
`
``
242
`+
impl ToTokens for f32 {
`
``
243
`+
fn to_tokens(&self, tokens: &mut TokenStream) {
`
``
244
`+
Literal::f32_suffixed(*self).to_tokens(tokens)
`
``
245
`+
}
`
``
246
`+
}
`
``
247
+
``
248
`+
#[unstable(feature = "proc_macro_totokens", issue = "130977")]
`
``
249
`+
impl ToTokens for f64 {
`
``
250
`+
fn to_tokens(&self, tokens: &mut TokenStream) {
`
``
251
`+
Literal::f64_suffixed(*self).to_tokens(tokens)
`
``
252
`+
}
`
``
253
`+
}
`
``
254
+
``
255
`+
#[unstable(feature = "proc_macro_totokens", issue = "130977")]
`
``
256
`+
impl ToTokens for usize {
`
``
257
`+
fn to_tokens(&self, tokens: &mut TokenStream) {
`
``
258
`+
Literal::usize_suffixed(*self).to_tokens(tokens)
`
``
259
`+
}
`
``
260
`+
}
`
``
261
+
``
262
`+
#[unstable(feature = "proc_macro_totokens", issue = "130977")]
`
``
263
`+
impl ToTokens for isize {
`
``
264
`+
fn to_tokens(&self, tokens: &mut TokenStream) {
`
``
265
`+
Literal::isize_suffixed(*self).to_tokens(tokens)
`
``
266
`+
}
`
``
267
`+
}
`
``
268
+
``
269
`+
#[unstable(feature = "proc_macro_totokens", issue = "130977")]
`
``
270
`+
impl ToTokens for bool {
`
``
271
`+
fn to_tokens(&self, tokens: &mut TokenStream) {
`
``
272
`+
let word = if *self { "true" } else { "false" };
`
``
273
`+
Ident::new(word, Span::call_site()).to_tokens(tokens)
`
``
274
`+
}
`
``
275
`+
}
`
``
276
+
``
277
`+
#[unstable(feature = "proc_macro_totokens", issue = "130977")]
`
``
278
`+
impl ToTokens for char {
`
``
279
`+
fn to_tokens(&self, tokens: &mut TokenStream) {
`
``
280
`+
Literal::character(*self).to_tokens(tokens)
`
``
281
`+
}
`
``
282
`+
}
`
``
283
+
``
284
`+
#[unstable(feature = "proc_macro_totokens", issue = "130977")]
`
``
285
`+
impl ToTokens for str {
`
``
286
`+
fn to_tokens(&self, tokens: &mut TokenStream) {
`
``
287
`+
Literal::string(self).to_tokens(tokens)
`
``
288
`+
}
`
``
289
`+
}
`
``
290
+
``
291
`+
#[unstable(feature = "proc_macro_totokens", issue = "130977")]
`
``
292
`+
impl ToTokens for String {
`
``
293
`+
fn to_tokens(&self, tokens: &mut TokenStream) {
`
``
294
`+
Literal::string(self).to_tokens(tokens)
`
``
295
`+
}
`
``
296
`+
}
`
``
297
+
``
298
`+
#[unstable(feature = "proc_macro_totokens", issue = "130977")]
`
``
299
`+
impl ToTokens for CStr {
`
``
300
`+
fn to_tokens(&self, tokens: &mut TokenStream) {
`
``
301
`+
Literal::c_string(self).to_tokens(tokens)
`
``
302
`+
}
`
``
303
`+
}
`
``
304
+
``
305
`+
#[unstable(feature = "proc_macro_totokens", issue = "130977")]
`
``
306
`+
impl ToTokens for CString {
`
``
307
`+
fn to_tokens(&self, tokens: &mut TokenStream) {
`
``
308
`+
Literal::c_string(self).to_tokens(tokens)
`
``
309
`+
}
`
``
310
`+
}
`