Kaynağa Gözat

Simplify `Arena::provide`.

- Introduce the `Uninit` type.
- Indent comments properly.
ticki 8 yıl önce
ebeveyn
işleme
21c97d1d19
9 değiştirilmiş dosya ile 76 ekleme ve 66 silme
  1. 5 2
      shim/src/log.rs
  2. 2 2
      src/allocator.rs
  3. 10 43
      src/arena.rs
  4. 2 2
      src/bk/node.rs
  5. 3 3
      src/bk/seek.rs
  6. 7 1
      src/block.rs
  7. 6 3
      src/leak.rs
  8. 1 1
      src/prelude.rs
  9. 40 9
      src/ptr.rs

+ 5 - 2
shim/src/log.rs

@@ -54,9 +54,12 @@ struct BufWriter<'a> {
 
 impl<'a> fmt::Write for BufWriter<'a> {
     fn write_str(&mut self, s: &str) -> fmt::Result {
+        // Find the appropriate length of the copied subbuffer.
         let amt = cmp::min(s.len(), self.buffer.len());
-        let (a, b) = mem::replace(self, &mut []).split_at_mut(amt);
-        a.copy_from_slice(&s.as_bytes()[..amt]);
+        // Split the buffer.
+        let buf = mem::replace(self.buffer, &mut [])[..amt];
+        // Memcpy the content of the string.
+        buf.copy_from_slice(&s.as_bytes()[..amt]);
 
         Ok(())
     }

+ 2 - 2
src/allocator.rs

@@ -39,7 +39,7 @@ tls! {
 /// This is a macro due to the lack of generic closure, which makes it impossible to have one
 /// closure for both cases (global and local).
 // TODO: Instead of falling back to the global allocator, the thread dtor should be set such that
-// it run after the TLS keys that might be declared.
+//       it run after the TLS keys that might be declared.
 macro_rules! get_allocator {
     (|$v:ident| $b:expr) => {{
         // Get the thread allocator, if TLS is enabled
@@ -204,7 +204,7 @@ impl LocalAllocator {
             let global_alloc = global_alloc.get();
 
             // TODO: we know this is sorted, so we could abuse that fact to faster insertion in the
-            // global allocator.
+            //       global allocator.
 
             alloc.into_inner().inner.for_each(move |block| global_alloc.free(block));
         }

+ 10 - 43
src/arena.rs

@@ -86,6 +86,8 @@ impl<T> Arena<T> {
     #[inline]
     pub fn new() -> Arena<T> {
         // Make sure the links fit in.
+        // FIXME: When unsafe unions lands, this assertion can be removed in favour of storing
+        //        values of some union type.
         assert!(mem::size_of::<T>() >= mem::size_of::<PointerList>(), "Arena list is unable to \
                 contain a link (type must be pointer sized or more).");
 
@@ -133,52 +135,17 @@ impl<T> Arena<T> {
         }
     }
 
-    /// Provide this arena with some block.
+    /// Provide this arena with some uninitialized segment.
     ///
-    /// This is used to fill the arena with memory from some source by essentially breaking the
-    /// block up and linking each piece together.
-    ///
-    /// # Panics
-    ///
-    /// This will hit an assertion if `T`'s size doesn't divide the block's size.
+    /// This is used to fill the arena with memory from some source by essentially linking each
+    /// piece together.
     #[inline]
-    pub fn provide(&mut self, block: Block) {
-        // Some assertions...
-        assert!(block.size() % mem::size_of::<T>() == 0, "`T`'s size does not divide the block's.");
-        assert!(block.aligned_to(mem::align_of::<T>()) == 0, "Block is unaligned to `T`.");
-
-        // We log for convenience.
-        log!(DEBUG, "Providing {:?} to arena.", block);
-
-        // Calculate the end pointer.
-        let end = Pointer::from(block.empty_right()).cast();
-        // Calculate the start pointer.
-        let mut ptr: Pointer<PointerList>: Pointer::from(block).cast();
-
-        loop {
-            // We offset the current pointer to get the pointer to the next piece, which will
-            // define the value we will put at `ptr`.
-            // NB: We do not use `offset` because it asserts that we're inbound. Breaking this
-            // invariant would lead to undefined behavior. Instead we do custom convert'n'add
-            // arithmetic on the pointer.
-            let next_ptr = Pointer::new((*ptr.clone() as usize + mem::size_of::<PointerList>()) as *mut PointerList);
-
-            // If the new pointer goes beyond the end, we're done.
-            if next_ptr == end {
-                // We reached the end, so we leave a `None`.
-                *ptr = PointerList {
-                    head: None,
-                }
-
-                break;
-            }
+    pub fn provide(&mut self, new: Uninit<[T]>) {
+        log!(DEBUG, "Providing {:?} to arena.", new.as_ptr().len());
 
-            // Make the piece point to the next piece.
-            *ptr = PointerList {
-                head: Some(next_ptr),
-            };
-            // Update the pointer counter.
-            ptr = next_ptr;
+        for n in 0..new.as_ptr().len() {
+            // Push the nth element to the inner pointer list.
+            self.list.push(new.as_ptr().clone().cast::<T>().offset(n).cast());
         }
     }
 }

+ 2 - 2
src/bk/node.rs

@@ -65,7 +65,7 @@ impl Node {
 
         // Follow the shortcuts until we reach `new_node`.
         // TODO: Unroll the first iteration of the loop below to avoid the unneccesary
-        // branch in the first iteration's call of `cmp::max`.
+        //       branch in the first iteration's call of `cmp::max`.
         for i in below {
             new_fat = cmp::max(i.fat, new_fat);
 
@@ -128,7 +128,7 @@ impl Node {
             //    (_/'~~      ''''(;
 
             // FIXME: The short-circuit in `calculate_fat_value` makes the check incomplete, if a
-            // larger element follows.
+            //        larger element follows.
 
             // Check the fat value of the bottom level.
             assert!(self.shortcuts[0].fat == self.calculate_fat_value_bottom(), "The bottom layer's \

+ 3 - 3
src/bk/seek.rs

@@ -20,7 +20,7 @@ struct Seek<'a> {
     ///
     /// So, the back look of this particular seek is `[6, 7, 7, ...]`.
     // FIXME: Find a more rustic way than raw pointers.
-    back_look: [*mut Shortcuts; LEVELS.0],
+    back_look: [Pointer<Shortcuts>; LEVELS.0],
     /// A pointer to a pointer to the found node.
     ///
     /// This is the node equal to or less than the target. The two layers of pointers are there to
@@ -116,7 +116,7 @@ impl<'a> Seek<'a> {
                 i.update_fat(self.node.block.size(), block::Size(0));
 
                 // TODO: Consider breaking this loop into two loops to avoid too many fat value
-                // updates.
+                //       updates.
             }
 
             // Finally, replace the useless node, and free it to the arena.
@@ -133,7 +133,7 @@ impl<'a> Seek<'a> {
     //
     // The new shortcut's fat value will be set to the block's size, and recomputation is likely
     // needed to update it.
-    fn update_shortcut(&mut self, lv: shortcut::Level) -> *mut Shortcut {
+    fn update_shortcut(&mut self, lv: shortcut::Level) -> Pointer<Shortcut> {
         // Make the old shortcut point to `self.node`.
         let old_next = mem::replace(&mut self.back_look[lv].next, Some(self.node));
         mem::replace(&mut self.back_look[lv], Shortcut {

+ 7 - 1
src/block.rs

@@ -211,7 +211,7 @@ impl Block {
         log!(INTERNAL, "Padding {:?} to align {}", self, align);
 
         // FIXME: This functions suffers from external fragmentation. Leaving bigger segments might
-        // increase performance.
+        //        increase performance.
 
         // Calculate the aligner, which defines the smallest size required as precursor to align
         // the block to `align`.
@@ -310,6 +310,12 @@ impl fmt::Debug for Block {
     }
 }
 
+impl Drop for Block {
+    fn drop(&mut self) {
+        debug_assert!(self.is_empty(), "Leaking a non-empty block.");
+    }
+}
+
 #[cfg(test)]
 mod test {
     use prelude::*;

+ 6 - 3
src/leak.rs

@@ -5,13 +5,16 @@
 
 use prelude::*;
 
-/// Types that have no destructor.
+/// Types that have no (or a diverging) destructor.
 ///
-/// This trait holds the invariant that our type carries no destructor.
+/// This trait holds the invariant that our type is one of the following:
 ///
-/// Since one cannot define mutually exclusive traits, we have this as a temporary hack.
+/// 1. carries a diverging destructor.
+/// 2. carries a destructor which diverges if a (effectless) condition is true.
+/// 3. carries no destructor.
 pub unsafe trait Leak {}
 
 unsafe impl Leak for Block {}
 unsafe impl<T> Leak for Jar<T> {}
+unsafe impl<T> Leak for Uninit<T> {}
 unsafe impl<T> Leak for T where T: Copy {}

+ 1 - 1
src/prelude.rs

@@ -6,6 +6,6 @@ pub use block::Block;
 pub use cell::MoveCell;
 pub use lazy_init::LazyInit;
 pub use leak::Leak;
-pub use ptr::{Pointer, Jar};
+pub use ptr::{Pointer, Jar, Uninit};
 pub use sync::Mutex;
 pub use vec::Vec;

+ 40 - 9
src/ptr.rs

@@ -121,14 +121,9 @@ impl<T> ops::Deref for Pointer<T> {
 pub struct Jar<T: Leak> {
     /// The inner pointer.
     ///
-    /// This has four guarantees:
-    ///
-    /// 1. It is valid and initialized.
-    /// 2. The lifetime is tied to the ownership of the box (i.e. it is valid until manually
-    ///    deallocated).
-    /// 3. It is aligned to the alignment of `T`.
-    /// 4. It is non-aliased.
-    ptr: Pointer<T>,
+    /// Asside from the guarantees specified for `Uninit`, it has the additional guarantee of being
+    /// initialized.
+    ptr: Uninit<T>,
 }
 
 impl<T: Leak> Jar<T> {
@@ -137,7 +132,7 @@ impl<T: Leak> Jar<T> {
     /// # Safety
     ///
     /// Make sure the pointer is valid, initialized, non-aliased, and aligned. If any of these
-    /// invariants are broken, unsafety occurs.
+    /// invariants are broken, unsafety might occur.
     #[inline]
     pub unsafe fn from_raw(ptr: Pointer<T>) -> Jar<T> {
         debug_assert!(ptr.aligned_to(mem::align_of::<T>()), "`ptr` is unaligned to `T`.");
@@ -146,6 +141,42 @@ impl<T: Leak> Jar<T> {
     }
 }
 
+/// An owned, uninitialized pointer.
+#[must_use = "`Uninit` does not handle the destructor automatically, please free it into an arena \
+              to avoid memory leaks."]
+pub struct Uninit<T: Leak> {
+    /// The inner pointer.
+    ptr: Pointer<T>,
+}
+
+impl<T: Leak> Uninit<T> {
+    /// Create a new owned, uninitialized pointer.
+    ///
+    /// # Invariants
+    ///
+    /// Four invariants must be satisfied for the input pointer:
+    ///
+    /// 1. The pointer is valid.
+    /// 2. The lifetime is tied to the ownership of the box (i.e. it is valid until manually
+    ///    deallocated).
+    /// 3. It is aligned to `T`.
+    /// 4. It is non-aliased.
+    pub unsafe fn new(ptr: Pointer<T>) -> Uninit<T> {
+        debug_assert!(ptr.aligned_to(Align(mem::align_of::<T>())), "Pointer not aligned to `T`.");
+
+        Uninit { ptr: ptr }
+    }
+
+    /// Get the inner pointer.
+    pub fn as_ptr(&self) -> &Pointer<T> {
+        &self.ptr
+    }
+}
+
+impl<T: Leak> Drop for Uninit<T> {
+    panic!("Leaking a `Uninit<T>`. This should likely have been freed instead.");
+}
+
 impl<T: Leak> ops::Deref for Jar<T> {
     type Target = T;