20
20
#![ feature( rustc_attrs) ]
21
21
#![ cfg_attr( test, feature( test) ) ]
22
22
#![ feature( strict_provenance) ]
23
+ #![ deny( unsafe_op_in_unsafe_fn) ]
23
24
#![ deny( rustc:: untranslatable_diagnostic) ]
24
25
#![ deny( rustc:: diagnostic_outside_of_impl) ]
25
26
#![ allow( clippy:: mut_from_ref) ] // Arena allocators are one of the places where this pattern is fine.
@@ -74,19 +75,27 @@ impl<T> ArenaChunk<T> {
74
75
#[ inline]
75
76
unsafe fn new ( capacity : usize ) -> ArenaChunk < T > {
76
77
ArenaChunk {
77
- storage : NonNull :: new_unchecked ( Box :: into_raw ( Box :: new_uninit_slice ( capacity) ) ) ,
78
+ storage : NonNull :: from ( Box :: leak ( Box :: new_uninit_slice ( capacity) ) ) ,
78
79
entries : 0 ,
79
80
}
80
81
}
81
82
82
83
/// Destroys this arena chunk.
84
+ ///
85
+ /// # Safety
86
+ ///
87
+ /// The caller must ensure that `len` elements of this chunk have been initialized.
83
88
#[ inline]
84
89
unsafe fn destroy ( & mut self , len : usize ) {
85
90
// The branch on needs_drop() is an -O1 performance optimization.
86
- // Without the branch, dropping TypedArena<u8 > takes linear time.
91
+ // Without the branch, dropping TypedArena<T > takes linear time.
87
92
if mem:: needs_drop :: < T > ( ) {
88
- let slice = self . storage . as_mut ( ) ;
89
- ptr:: drop_in_place ( MaybeUninit :: slice_assume_init_mut ( & mut slice[ ..len] ) ) ;
93
+ // SAFETY: The caller must ensure that `len` elements of this chunk have
94
+ // been initialized.
95
+ unsafe {
96
+ let slice = self . storage . as_mut ( ) ;
97
+ ptr:: drop_in_place ( MaybeUninit :: slice_assume_init_mut ( & mut slice[ ..len] ) ) ;
98
+ }
90
99
}
91
100
}
92
101
@@ -255,7 +264,9 @@ impl<T> TypedArena<T> {
255
264
self . ensure_capacity ( len) ;
256
265
257
266
let start_ptr = self . ptr . get ( ) ;
258
- self . ptr . set ( start_ptr. add ( len) ) ;
267
+ // SAFETY: `self.ensure_capacity` makes sure that there is enough space
268
+ // for `len` elements.
269
+ unsafe { self . ptr . set ( start_ptr. add ( len) ) } ;
259
270
start_ptr
260
271
}
261
272
@@ -483,6 +494,10 @@ impl DroplessArena {
483
494
}
484
495
}
485
496
497
+ /// # Safety
498
+ ///
499
+ /// The caller must ensure that `mem` is valid for writes up to
500
+ /// `size_of::<T>() * len`.
486
501
#[ inline]
487
502
unsafe fn write_from_iter < T , I : Iterator < Item = T > > (
488
503
& self ,
@@ -494,13 +509,18 @@ impl DroplessArena {
494
509
// Use a manual loop since LLVM manages to optimize it better for
495
510
// slice iterators
496
511
loop {
497
- let value = iter. next ( ) ;
498
- if i >= len || value. is_none ( ) {
499
- // We only return as many items as the iterator gave us, even
500
- // though it was supposed to give us `len`
501
- return slice:: from_raw_parts_mut ( mem, i) ;
512
+ // SAFETY: The caller must ensure that `mem` is valid for writes up to
513
+ // `size_of::<T>() * len`.
514
+ unsafe {
515
+ match iter. next ( ) {
516
+ Some ( value) if i < len => mem. add ( i) . write ( value) ,
517
+ Some ( _) | None => {
518
+ // We only return as many items as the iterator gave us, even
519
+ // though it was supposed to give us `len`
520
+ return slice:: from_raw_parts_mut ( mem, i) ;
521
+ }
522
+ }
502
523
}
503
- ptr:: write ( mem. add ( i) , value. unwrap ( ) ) ;
504
524
i += 1 ;
505
525
}
506
526
}
0 commit comments