13
13
) ]
14
14
#![ feature( core_intrinsics) ]
15
15
#![ feature( dropck_eyepatch) ]
16
- #![ feature( raw_vec_internals) ]
16
+ #![ feature( new_uninit) ]
17
+ #![ feature( maybe_uninit_slice) ]
17
18
#![ cfg_attr( test, feature( test) ) ]
18
19
#![ allow( deprecated) ]
19
20
20
- extern crate alloc;
21
-
22
21
use rustc_data_structures:: cold_path;
23
22
use smallvec:: SmallVec ;
24
23
@@ -27,12 +26,10 @@ use std::cell::{Cell, RefCell};
27
26
use std:: cmp;
28
27
use std:: intrinsics;
29
28
use std:: marker:: { PhantomData , Send } ;
30
- use std:: mem;
29
+ use std:: mem:: { self , MaybeUninit } ;
31
30
use std:: ptr;
32
31
use std:: slice;
33
32
34
- use alloc:: raw_vec:: RawVec ;
35
-
36
33
/// An arena that can hold objects of only one type.
37
34
pub struct TypedArena < T > {
38
35
/// A pointer to the next object to be allocated.
@@ -52,15 +49,15 @@ pub struct TypedArena<T> {
52
49
53
50
struct TypedArenaChunk < T > {
54
51
/// The raw storage for the arena chunk.
55
- storage : RawVec < T > ,
52
+ storage : Box < [ MaybeUninit < T > ] > ,
56
53
/// The number of valid entries in the chunk.
57
54
entries : usize ,
58
55
}
59
56
60
57
impl < T > TypedArenaChunk < T > {
61
58
#[ inline]
62
59
unsafe fn new ( capacity : usize ) -> TypedArenaChunk < T > {
63
- TypedArenaChunk { storage : RawVec :: with_capacity ( capacity) , entries : 0 }
60
+ TypedArenaChunk { storage : Box :: new_uninit_slice ( capacity) , entries : 0 }
64
61
}
65
62
66
63
/// Destroys this arena chunk.
@@ -69,30 +66,25 @@ impl<T> TypedArenaChunk<T> {
69
66
// The branch on needs_drop() is an -O1 performance optimization.
70
67
// Without the branch, dropping TypedArena<u8> takes linear time.
71
68
if mem:: needs_drop :: < T > ( ) {
72
- let mut start = self . start ( ) ;
73
- // Destroy all allocated objects.
74
- for _ in 0 ..len {
75
- ptr:: drop_in_place ( start) ;
76
- start = start. offset ( 1 ) ;
77
- }
69
+ ptr:: drop_in_place ( MaybeUninit :: slice_assume_init_mut ( & mut self . storage [ ..len] ) ) ;
78
70
}
79
71
}
80
72
81
73
// Returns a pointer to the first allocated object.
82
74
#[ inline]
83
- fn start ( & self ) -> * mut T {
84
- self . storage . ptr ( )
75
+ fn start ( & mut self ) -> * mut T {
76
+ MaybeUninit :: slice_as_mut_ptr ( & mut self . storage )
85
77
}
86
78
87
79
// Returns a pointer to the end of the allocated space.
88
80
#[ inline]
89
- fn end ( & self ) -> * mut T {
81
+ fn end ( & mut self ) -> * mut T {
90
82
unsafe {
91
83
if mem:: size_of :: < T > ( ) == 0 {
92
84
// A pointer as large as possible for zero-sized elements.
93
85
!0 as * mut T
94
86
} else {
95
- self . start ( ) . add ( self . storage . capacity ( ) )
87
+ self . start ( ) . add ( self . storage . len ( ) )
96
88
}
97
89
}
98
90
}
@@ -226,10 +218,10 @@ impl<T> TypedArena<T> {
226
218
let used_bytes = self . ptr . get ( ) as usize - last_chunk. start ( ) as usize ;
227
219
last_chunk. entries = used_bytes / mem:: size_of :: < T > ( ) ;
228
220
229
- // If the previous chunk's capacity is less than HUGE_PAGE
221
+ // If the previous chunk's len is less than HUGE_PAGE
230
222
// bytes, then this chunk will be least double the previous
231
223
// chunk's size.
232
- new_cap = last_chunk. storage . capacity ( ) ;
224
+ new_cap = last_chunk. storage . len ( ) ;
233
225
if new_cap < HUGE_PAGE / elem_size {
234
226
new_cap = new_cap. checked_mul ( 2 ) . unwrap ( ) ;
235
227
}
@@ -239,7 +231,7 @@ impl<T> TypedArena<T> {
239
231
// Also ensure that this chunk can fit `additional`.
240
232
new_cap = cmp:: max ( additional, new_cap) ;
241
233
242
- let chunk = TypedArenaChunk :: < T > :: new ( new_cap) ;
234
+ let mut chunk = TypedArenaChunk :: < T > :: new ( new_cap) ;
243
235
self . ptr . set ( chunk. start ( ) ) ;
244
236
self . end . set ( chunk. end ( ) ) ;
245
237
chunks. push ( chunk) ;
@@ -301,7 +293,7 @@ unsafe impl<#[may_dangle] T> Drop for TypedArena<T> {
301
293
chunk. destroy ( chunk. entries ) ;
302
294
}
303
295
}
304
- // RawVec handles deallocation of `last_chunk` and `self.chunks`.
296
+ // Box handles deallocation of `last_chunk` and `self.chunks`.
305
297
}
306
298
}
307
299
}
@@ -344,10 +336,10 @@ impl DroplessArena {
344
336
// There is no need to update `last_chunk.entries` because that
345
337
// field isn't used by `DroplessArena`.
346
338
347
- // If the previous chunk's capacity is less than HUGE_PAGE
339
+ // If the previous chunk's len is less than HUGE_PAGE
348
340
// bytes, then this chunk will be least double the previous
349
341
// chunk's size.
350
- new_cap = last_chunk. storage . capacity ( ) ;
342
+ new_cap = last_chunk. storage . len ( ) ;
351
343
if new_cap < HUGE_PAGE {
352
344
new_cap = new_cap. checked_mul ( 2 ) . unwrap ( ) ;
353
345
}
@@ -357,7 +349,7 @@ impl DroplessArena {
357
349
// Also ensure that this chunk can fit `additional`.
358
350
new_cap = cmp:: max ( additional, new_cap) ;
359
351
360
- let chunk = TypedArenaChunk :: < u8 > :: new ( new_cap) ;
352
+ let mut chunk = TypedArenaChunk :: < u8 > :: new ( new_cap) ;
361
353
self . ptr . set ( chunk. start ( ) ) ;
362
354
self . end . set ( chunk. end ( ) ) ;
363
355
chunks. push ( chunk) ;
0 commit comments