3131#![ feature( alloc) ]
3232#![ feature( box_syntax) ]
3333#![ feature( core_intrinsics) ]
34+ #![ feature( drop_in_place) ]
35+ #![ feature( raw) ]
3436#![ feature( heap_api) ]
3537#![ feature( oom) ]
36- #![ feature( ptr_as_ref) ]
3738#![ feature( raw) ]
3839#![ feature( staged_api) ]
3940#![ feature( dropck_parametricity) ]
@@ -47,9 +48,13 @@ use std::intrinsics;
4748use std:: marker;
4849use std:: mem;
4950use std:: ptr;
51+ use std:: raw;
52+ use std:: raw:: Repr ;
5053use std:: rc:: Rc ;
54+ use std:: slice;
5155
52- use alloc:: heap:: { allocate, deallocate} ;
56+ use alloc:: heap;
57+ use alloc:: raw_vec:: RawVec ;
5358
5459// The way arena uses arrays is really deeply awful. The arrays are
5560// allocated, and have capacities reserved, but the fill for the array
@@ -366,115 +371,87 @@ fn test_arena_destructors_fail() {
366371/// A faster arena that can hold objects of only one type.
367372pub struct TypedArena < T > {
368373 /// A pointer to the next object to be allocated.
369- ptr : Cell < * const T > ,
374+ ptr : Cell < * mut T > ,
370375
371376 /// A pointer to the end of the allocated area. When this pointer is
372377 /// reached, a new chunk is allocated.
373- end : Cell < * const T > ,
378+ end : Cell < * mut T > ,
374379
375- /// A pointer to the first arena segment .
376- first : RefCell < * mut TypedArenaChunk < T > > ,
380+ /// A vector arena segments .
381+ chunks : RefCell < Vec < TypedArenaChunk < T > > > ,
377382
378383 /// Marker indicating that dropping the arena causes its owned
379384 /// instances of `T` to be dropped.
380385 _own : marker:: PhantomData < T > ,
381386}
382387
383388struct TypedArenaChunk < T > {
384- marker : marker:: PhantomData < T > ,
385-
386389 /// Pointer to the next arena segment.
387- next : * mut TypedArenaChunk < T > ,
388-
389- /// The number of elements that this chunk can hold.
390- capacity : usize ,
391-
392- // Objects follow here, suitably aligned.
393- }
394-
395- fn calculate_size < T > ( capacity : usize ) -> usize {
396- let mut size = mem:: size_of :: < TypedArenaChunk < T > > ( ) ;
397- size = round_up ( size, mem:: align_of :: < T > ( ) ) ;
398- let elem_size = mem:: size_of :: < T > ( ) ;
399- let elems_size = elem_size. checked_mul ( capacity) . unwrap ( ) ;
400- size = size. checked_add ( elems_size) . unwrap ( ) ;
401- size
390+ storage : RawVec < T > ,
402391}
403392
404393impl < T > TypedArenaChunk < T > {
405394 #[ inline]
406- unsafe fn new ( next : * mut TypedArenaChunk < T > , capacity : usize ) -> * mut TypedArenaChunk < T > {
407- let size = calculate_size :: < T > ( capacity) ;
408- let chunk =
409- allocate ( size, mem:: align_of :: < TypedArenaChunk < T > > ( ) ) as * mut TypedArenaChunk < T > ;
410- if chunk. is_null ( ) {
411- alloc:: oom ( )
412- }
413- ( * chunk) . next = next;
414- ( * chunk) . capacity = capacity;
415- chunk
395+ unsafe fn new ( capacity : usize ) -> TypedArenaChunk < T > {
396+ TypedArenaChunk { storage : RawVec :: with_capacity ( capacity) }
416397 }
417398
418- /// Destroys this arena chunk. If the type descriptor is supplied, the
419- /// drop glue is called; otherwise, drop glue is not called.
399+ /// Destroys this arena chunk.
420400 #[ inline]
421401 unsafe fn destroy ( & mut self , len : usize ) {
422- // Destroy all the allocated objects.
402+ // The branch on needs_drop() is an -O1 performance optimization.
403+ // Without the branch, dropping TypedArena<u8> takes linear time.
423404 if intrinsics:: needs_drop :: < T > ( ) {
424405 let mut start = self . start ( ) ;
406+ // Destroy all allocated objects.
425407 for _ in 0 ..len {
426- ptr:: read ( start as * const T ) ; // run the destructor on the pointer
427- start = start. offset ( mem :: size_of :: < T > ( ) as isize )
408+ ptr:: drop_in_place ( start) ;
409+ start = start. offset ( 1 ) ;
428410 }
429411 }
430-
431- // Destroy the next chunk.
432- let next = self . next ;
433- let size = calculate_size :: < T > ( self . capacity ) ;
434- let self_ptr: * mut TypedArenaChunk < T > = self ;
435- deallocate ( self_ptr as * mut u8 ,
436- size,
437- mem:: align_of :: < TypedArenaChunk < T > > ( ) ) ;
438- if !next. is_null ( ) {
439- let capacity = ( * next) . capacity ;
440- ( * next) . destroy ( capacity) ;
441- }
442412 }
443413
444414 // Returns a pointer to the first allocated object.
445415 #[ inline]
446- fn start ( & self ) -> * const u8 {
447- let this: * const TypedArenaChunk < T > = self ;
448- unsafe { round_up ( this. offset ( 1 ) as usize , mem:: align_of :: < T > ( ) ) as * const u8 }
416+ fn start ( & self ) -> * mut T {
417+ self . storage . ptr ( )
449418 }
450419
451420 // Returns a pointer to the end of the allocated space.
452421 #[ inline]
453- fn end ( & self ) -> * const u8 {
422+ fn end ( & self ) -> * mut T {
454423 unsafe {
455- let size = mem:: size_of :: < T > ( ) . checked_mul ( self . capacity ) . unwrap ( ) ;
456- self . start ( ) . offset ( size as isize )
424+ if mem:: size_of :: < T > ( ) == 0 {
425+ // A pointer as large as possible for zero-sized elements.
426+ !0 as * mut T
427+ } else {
428+ self . start ( ) . offset ( self . storage . cap ( ) as isize )
429+ }
457430 }
458431 }
459432}
460433
434+ const PAGE : usize = 4096 ;
435+
461436impl < T > TypedArena < T > {
462- /// Creates a new `TypedArena` with preallocated space for eight objects.
437+ /// Creates a new `TypedArena` with preallocated space for many objects.
463438 #[ inline]
464439 pub fn new ( ) -> TypedArena < T > {
465- TypedArena :: with_capacity ( 8 )
440+ // Reserve at least one page.
441+ let elem_size = cmp:: max ( 1 , mem:: size_of :: < T > ( ) ) ;
442+ TypedArena :: with_capacity ( PAGE / elem_size)
466443 }
467444
468445 /// Creates a new `TypedArena` with preallocated space for the given number of
469446 /// objects.
470447 #[ inline]
471448 pub fn with_capacity ( capacity : usize ) -> TypedArena < T > {
472449 unsafe {
473- let chunk = TypedArenaChunk :: < T > :: new ( ptr :: null_mut ( ) , capacity) ;
450+ let chunk = TypedArenaChunk :: < T > :: new ( cmp :: max ( 1 , capacity) ) ;
474451 TypedArena {
475- ptr : Cell :: new ( ( * chunk) . start ( ) as * const T ) ,
476- end : Cell :: new ( ( * chunk) . end ( ) as * const T ) ,
477- first : RefCell :: new ( chunk) ,
452+ ptr : Cell :: new ( chunk. start ( ) ) ,
453+ end : Cell :: new ( chunk. end ( ) ) ,
454+ chunks : RefCell :: new ( vec ! [ chunk] ) ,
478455 _own : marker:: PhantomData ,
479456 }
480457 }
@@ -488,23 +465,39 @@ impl<T> TypedArena<T> {
488465 }
489466
490467 unsafe {
491- let ptr: & mut T = & mut * ( self . ptr . get ( ) as * mut T ) ;
492- ptr:: write ( ptr, object) ;
493- self . ptr . set ( self . ptr . get ( ) . offset ( 1 ) ) ;
494- ptr
468+ if mem:: size_of :: < T > ( ) == 0 {
469+ self . ptr . set ( intrinsics:: arith_offset ( self . ptr . get ( ) as * mut u8 , 1 ) as * mut T ) ;
470+ let ptr = heap:: EMPTY as * mut T ;
471+ // Don't drop the object. This `write` is equivalent to `forget`.
472+ ptr:: write ( ptr, object) ;
473+ & mut * ptr
474+ } else {
475+ let ptr = self . ptr . get ( ) ;
476+ // Advance the pointer.
477+ self . ptr . set ( self . ptr . get ( ) . offset ( 1 ) ) ;
478+ // Write into uninitialized memory.
479+ ptr:: write ( ptr, object) ;
480+ & mut * ptr
481+ }
495482 }
496483 }
497484
498485 /// Grows the arena.
499486 #[ inline( never) ]
487+ #[ cold]
500488 fn grow ( & self ) {
501489 unsafe {
502- let chunk = * self . first . borrow_mut ( ) ;
503- let new_capacity = ( * chunk) . capacity . checked_mul ( 2 ) . unwrap ( ) ;
504- let chunk = TypedArenaChunk :: < T > :: new ( chunk, new_capacity) ;
505- self . ptr . set ( ( * chunk) . start ( ) as * const T ) ;
506- self . end . set ( ( * chunk) . end ( ) as * const T ) ;
507- * self . first . borrow_mut ( ) = chunk
490+ let mut chunks = self . chunks . borrow_mut ( ) ;
491+ let prev_capacity = chunks. last ( ) . unwrap ( ) . storage . cap ( ) ;
492+ let new_capacity = prev_capacity. checked_mul ( 2 ) . unwrap ( ) ;
493+ if chunks. last_mut ( ) . unwrap ( ) . storage . double_in_place ( ) {
494+ self . end . set ( chunks. last ( ) . unwrap ( ) . end ( ) ) ;
495+ } else {
496+ let chunk = TypedArenaChunk :: < T > :: new ( new_capacity) ;
497+ self . ptr . set ( chunk. start ( ) ) ;
498+ self . end . set ( chunk. end ( ) ) ;
499+ chunks. push ( chunk) ;
500+ }
508501 }
509502 }
510503}
@@ -514,12 +507,26 @@ impl<T> Drop for TypedArena<T> {
514507 fn drop ( & mut self ) {
515508 unsafe {
516509 // Determine how much was filled.
517- let start = self . first . borrow ( ) . as_ref ( ) . unwrap ( ) . start ( ) as usize ;
510+ let mut chunks_borrow = self . chunks . borrow_mut ( ) ;
511+ let mut last_chunk = chunks_borrow. pop ( ) . unwrap ( ) ;
512+ let start = last_chunk. start ( ) as usize ;
518513 let end = self . ptr . get ( ) as usize ;
519- let diff = ( end - start) / mem:: size_of :: < T > ( ) ;
514+ let diff = if mem:: size_of :: < T > ( ) == 0 {
515+ // Avoid division by zero.
516+ end - start
517+ } else {
518+ ( end - start) / mem:: size_of :: < T > ( )
519+ } ;
520520
521521 // Pass that to the `destroy` method.
522- ( * * self . first . borrow_mut ( ) ) . destroy ( diff)
522+ last_chunk. destroy ( diff) ;
523+ // Destroy this chunk.
524+ let _: RawVec < T > = mem:: transmute ( last_chunk) ;
525+
526+ for chunk in chunks_borrow. iter_mut ( ) {
527+ let cap = chunk. storage . cap ( ) ;
528+ chunk. destroy ( cap) ;
529+ }
523530 }
524531 }
525532}
@@ -657,4 +664,12 @@ mod tests {
657664 } )
658665 } )
659666 }
667+
668+ #[ test]
669+ pub fn test_zero_sized ( ) {
670+ let arena = TypedArena :: new ( ) ;
671+ for _ in 0 ..100000 {
672+ arena. alloc ( ( ) ) ;
673+ }
674+ }
660675}
0 commit comments