33
33
#![ feature( rustc_attrs) ]
34
34
#![ feature( min_specialization) ]
35
35
#![ feature( strict_provenance) ]
36
+ #![ feature( extend_one) ]
36
37
#![ recursion_limit = "256" ]
37
38
#![ allow( internal_features) ]
38
39
#![ deny( ffi_unwind_calls) ]
@@ -44,6 +45,7 @@ pub mod bridge;
44
45
45
46
mod diagnostic;
46
47
mod escape;
48
+ mod to_tokens;
47
49
48
50
use std:: ffi:: CStr ;
49
51
use std:: ops:: { Range , RangeBounds } ;
@@ -53,6 +55,8 @@ use std::{error, fmt};
53
55
54
56
#[ unstable( feature = "proc_macro_diagnostic" , issue = "54140" ) ]
55
57
pub use diagnostic:: { Diagnostic , Level , MultiSpan } ;
58
+ #[ unstable( feature = "proc_macro_totokens" , issue = "130977" ) ]
59
+ pub use to_tokens:: ToTokens ;
56
60
57
61
use crate :: escape:: { EscapeOptions , escape_bytes} ;
58
62
@@ -279,6 +283,7 @@ impl ConcatTreesHelper {
279
283
}
280
284
}
281
285
286
+ #[ allow( dead_code) ]
282
287
fn append_to ( self , stream : & mut TokenStream ) {
283
288
if self . trees . is_empty ( ) {
284
289
return ;
@@ -325,45 +330,22 @@ impl ConcatStreamsHelper {
325
330
}
326
331
}
327
332
328
- /// Collects a number of token trees into a single stream.
329
- #[ stable( feature = "proc_macro_lib2" , since = "1.29.0" ) ]
330
- impl FromIterator < TokenTree > for TokenStream {
331
- fn from_iter < I : IntoIterator < Item = TokenTree > > ( trees : I ) -> Self {
332
- let iter = trees. into_iter ( ) ;
333
- let mut builder = ConcatTreesHelper :: new ( iter. size_hint ( ) . 0 ) ;
334
- iter. for_each ( |tree| builder. push ( tree) ) ;
335
- builder. build ( )
336
- }
337
- }
338
-
339
- /// A "flattening" operation on token streams, collects token trees
340
- /// from multiple token streams into a single stream.
341
- #[ stable( feature = "proc_macro_lib" , since = "1.15.0" ) ]
342
- impl FromIterator < TokenStream > for TokenStream {
343
- fn from_iter < I : IntoIterator < Item = TokenStream > > ( streams : I ) -> Self {
344
- let iter = streams. into_iter ( ) ;
333
+ #[ stable( feature = "proc_macro_totokens_migration" , since = "CURRENT_RUSTC_VERSION" ) ]
334
+ impl < T : ToTokens > FromIterator < T > for TokenStream {
335
+ fn from_iter < I : IntoIterator < Item = T > > ( t : I ) -> Self {
336
+ let iter = t. into_iter ( ) ;
345
337
let mut builder = ConcatStreamsHelper :: new ( iter. size_hint ( ) . 0 ) ;
346
- iter. for_each ( |stream | builder. push ( stream ) ) ;
338
+ iter. for_each ( |t | builder. push ( t . into_token_stream ( ) ) ) ;
347
339
builder. build ( )
348
340
}
349
341
}
350
342
351
- #[ stable( feature = "token_stream_extend" , since = "1.30.0" ) ]
352
- impl Extend < TokenTree > for TokenStream {
353
- fn extend < I : IntoIterator < Item = TokenTree > > ( & mut self , trees : I ) {
354
- let iter = trees. into_iter ( ) ;
355
- let mut builder = ConcatTreesHelper :: new ( iter. size_hint ( ) . 0 ) ;
356
- iter. for_each ( |tree| builder. push ( tree) ) ;
357
- builder. append_to ( self ) ;
358
- }
359
- }
360
-
361
- #[ stable( feature = "token_stream_extend" , since = "1.30.0" ) ]
362
- impl Extend < TokenStream > for TokenStream {
363
- fn extend < I : IntoIterator < Item = TokenStream > > ( & mut self , streams : I ) {
364
- let iter = streams. into_iter ( ) ;
343
+ #[ stable( feature = "proc_macro_totokens_migration" , since = "CURRENT_RUSTC_VERSION" ) ]
344
+ impl < T : ToTokens > Extend < T > for TokenStream {
345
+ fn extend < I : IntoIterator < Item = T > > ( & mut self , t : I ) {
346
+ let iter = t. into_iter ( ) ;
365
347
let mut builder = ConcatStreamsHelper :: new ( iter. size_hint ( ) . 0 ) ;
366
- iter. for_each ( |stream | builder. push ( stream ) ) ;
348
+ iter. for_each ( |t | builder. push ( t . into_token_stream ( ) ) ) ;
367
349
builder. append_to ( self ) ;
368
350
}
369
351
}
0 commit comments