Parcourir la source

Temporarily disable inplace reallocation for `reserve`, fix #9

ticki il y a 8 ans
Parent
commit
0c7b63819c
6 fichiers modifiés avec 85 ajouts et 70 suppressions
  1. 33 14
      src/block.rs
  2. 25 44
      src/bookkeeper.rs
  3. 1 0
      src/debug.rs
  4. 2 0
      src/leak.rs
  5. 8 6
      src/ptr.rs
  6. 16 6
      src/vec.rs

+ 33 - 14
src/block.rs

@@ -13,11 +13,18 @@ use core::{ptr, cmp, mem, fmt};
 ///
 /// This provides a number of guarantees,
 ///
-/// 1. The inner pointer is never aliased. No byte in the block is contained in another block
-///    (aliased in this case is defined purely based on liveliness).
-/// 2. The buffer is valid, but not necessarily initialized.
+/// 1. The buffer is valid for the block's lifetime, but not necessarily initialized.
+/// 2. The Block "owns" the inner data.
+/// 3. There is no interior mutability. Mutation requires either mutable access or ownership over
+///    the block.
+/// 4. The buffer is not aliased. That is, it do not overlap with other blocks or is aliased in any
+///    way.
 ///
-/// All this is enforced through the type system.
+/// All this is enforced through the type system. These invariants can only be broken through
+/// unsafe code.
+///
+/// Accessing it through an immutable reference does not break these guarantees. That is, you are
+/// not able to read/mutate without acquiring a _mutable_ reference.
 pub struct Block {
     /// The size of this block, in bytes.
     size: usize,
@@ -161,7 +168,12 @@ impl Block {
     /// Returns an `None` holding the intact block if `align` is out of bounds.
     #[inline]
     pub fn align(&mut self, align: usize) -> Option<(Block, Block)> {
-        let aligner = align - *self.ptr as usize % align;
+        // Calculate the aligner, which defines the smallest size required as precursor to align
+        // the block to `align`.
+        let aligner = (align - *self.ptr as usize % align) % align;
+        //                                                 ^^^^^^^^
+        // To avoid wasting space on the case where the block is already aligned, we calculate it
+        // modulo `align`.
 
         // Bound check.
         if aligner < self.size {
@@ -182,14 +194,13 @@ impl Block {
     }
 }
 
-impl !Sync for Block {}
-
 impl From<Block> for Pointer<u8> {
     fn from(from: Block) -> Pointer<u8> {
         from.ptr
     }
 }
 
+/// Compare the blocks address.
 impl PartialOrd for Block {
     #[inline]
     fn partial_cmp(&self, other: &Block) -> Option<cmp::Ordering> {
@@ -237,16 +248,10 @@ mod test {
         assert_eq!(lorem.size() + rest.size(), arr.len());
         assert!(lorem < rest);
 
-        /* TODO
-        assert_eq!(unsafe {
-            slice::from_raw_parts(*lorem.into_ptr() as *const _, lorem.size())
-        }, b"Lorem");
-        */
-
         assert_eq!(lorem, lorem);
         assert!(rest.is_empty());
         assert!(lorem.align(2).unwrap().1.aligned_to(2));
-        assert!(rest.align(16).unwrap().1.aligned_to(16));
+        assert!(rest.align(15).unwrap().1.aligned_to(15));
         assert_eq!(*Pointer::from(lorem) as usize + 5, *Pointer::from(rest) as usize);
     }
 
@@ -276,4 +281,18 @@ mod test {
         // Test OOB.
         block.split(6);
     }
+
+    #[test]
+    fn test_mutate() {
+        let mut arr = [0u8, 2, 0, 0, 255, 255];
+
+        let block = unsafe {
+            Block::from_raw_parts(Pointer::new(&mut arr[0] as *mut u8), 6)
+        };
+
+        let (a, mut b) = block.split(2);
+        a.copy_to(&mut b);
+
+        assert_eq!(arr, [0, 2, 0, 2, 255, 255]);
+    }
 }

+ 25 - 44
src/bookkeeper.rs

@@ -408,6 +408,8 @@ impl Bookkeeper {
             // Make some handy assertions.
             #[cfg(features = "debug_tools")]
             assert!(entry != &mut block, "Double free.");
+            debug_assert!(block.is_empty() || entry <= &mut block, "Block merged in the wrong \
+                          direction.");
 
             entry.merge_right(&mut block).is_err()
         } || ind == 0 || self.pool[ind - 1].merge_right(&mut block).is_err() {
@@ -480,52 +482,32 @@ impl Bookkeeper {
     #[inline]
     fn reserve(&mut self, needed: usize) {
         if needed > self.pool.capacity() {
-            // Fool the borrowchecker.
-            let len = self.pool.len();
-
-            // Calculate the index.
-            let ind = self.find(&Block::empty(Pointer::from(&*self.pool).cast()));
-            // Temporarily steal the block, placing an empty vector in its place.
-            let block = Block::from(mem::replace(&mut self.pool, Vec::new()));
             // TODO allow BRK-free non-inplace reservations.
+            // TODO Enable inplace reallocation in this position.
 
             // Reallocate the block pool.
 
-            // We first try do it inplace.
-            match self.realloc_inplace_ind(ind, block, needed * mem::size_of::<Block>()) {
-                Ok(succ) => {
-                    // Inplace reallocation suceeeded, place the block back as the pool.
-                    self.pool = unsafe { Vec::from_raw_parts(succ, len) };
-                },
-                Err(block) => {
-                    // Inplace alloc failed, so we have to BRK some new space.
-
-                    // Reconstruct the vector.
-                    self.pool = unsafe { Vec::from_raw_parts(block, len) };
-
-                    // Make a fresh allocation.
-                    let size = needed.saturating_add(
-                        cmp::min(self.pool.capacity(), 200 + self.pool.capacity() / 2)
-                        // We add:
-                        + 1 // block for the alignment block.
-                        + 1 // block for the freed vector.
-                        + 1 // block for the excessive space.
-                    ) * mem::size_of::<Block>();
-                    let (alignment_block, alloc, excessive) = self.brk(size, mem::align_of::<Block>());
-
-                    // Refill the pool.
-                    let old = self.pool.refill(alloc);
-
-                    // Push the alignment block (note that it is in fact in the end of the pool,
-                    // due to BRK _extending_ the segment).
-                    self.push(alignment_block);
-                    // The excessive space.
-                    self.push(excessive);
-
-                    // Free the old vector.
-                    self.free_ind(ind, old);
-                },
-            }
+            // Make a fresh allocation.
+            let size = needed.saturating_add(
+                cmp::min(self.pool.capacity(), 200 + self.pool.capacity() / 2)
+                // We add:
+                + 1 // block for the alignment block.
+                + 1 // block for the freed vector.
+                + 1 // block for the excessive space.
+            ) * mem::size_of::<Block>();
+            let (alignment_block, alloc, excessive) = self.brk(size, mem::align_of::<Block>());
+
+            // Refill the pool.
+            let old = self.pool.refill(alloc);
+
+            // Push the alignment block (note that it is in fact in the end of the pool,
+            // due to BRK _extending_ the segment).
+            self.push(alignment_block);
+            // The excessive space.
+            self.push(excessive);
+
+            // Free the old vector.
+            self.free(old);
 
             // Check consistency.
             self.check();
@@ -613,8 +595,7 @@ impl Bookkeeper {
         debug_assert!(self.pool.is_empty() || block >= self.pool[ind + 1], "Inserting at {} will \
                       make the list unsorted.", ind);
         debug_assert!(self.find(&block) == ind, "Block is not inserted at the appropriate index.");
-
-        // TODO consider moving right before searching left.
+        debug_assert!(!block.is_empty(), "Inserting an empty block.");
 
         // Find the next gap, where a used block were.
         if let Some((n, _)) = self.pool.iter().skip(ind).enumerate().filter(|&(_, x)| !x.is_empty()).next() {

+ 1 - 0
src/debug.rs

@@ -15,6 +15,7 @@ pub struct Writer {
 }
 
 impl Writer {
+    /// Standard error output.
     pub fn stderr() -> Writer {
         Writer {
             fd: 2,

+ 2 - 0
src/leak.rs

@@ -15,3 +15,5 @@ pub trait Leak {}
 impl Leak for () {}
 impl Leak for Block {}
 impl Leak for u8 {}
+impl Leak for u16 {}
+impl Leak for i32 {}

+ 8 - 6
src/ptr.rs

@@ -73,12 +73,6 @@ impl<T> Pointer<T> {
 unsafe impl<T: Send> Send for Pointer<T> {}
 unsafe impl<T: Sync> Sync for Pointer<T> {}
 
-impl<'a, T> From<&'a [T]> for Pointer<T> {
-    fn from(from: &[T]) -> Pointer<T> {
-        unsafe { Pointer::new(from.as_ptr() as *mut T) }
-    }
-}
-
 impl<T> ops::Deref for Pointer<T> {
     type Target = *mut T;
 
@@ -102,6 +96,14 @@ mod test {
             assert_eq!(**ptr.clone().cast::<[u8; 1]>(), [b'a']);
             assert_eq!(**ptr.offset(1), b'b');
         }
+
+        let mut x = ['a', 'b'];
+
+        unsafe {
+            let ptr = Pointer::new(&mut x[0] as *mut char);
+            assert_eq!(**ptr.clone().cast::<[char; 1]>(), ['a']);
+            assert_eq!(**ptr.offset(1), 'b');
+        }
     }
 
     #[test]

+ 16 - 6
src/vec.rs

@@ -41,11 +41,8 @@ impl<T: Leak> Vec<T> {
     /// This is unsafe, since it won't initialize the buffer in any way, possibly breaking type
     /// safety, memory safety, and so on. Thus, care must be taken upon usage.
     #[inline]
+    #[cfg(test)]
     pub unsafe fn from_raw_parts(block: Block, len: usize) -> Vec<T> {
-        // Make some handy assertions.
-        debug_assert!(block.size() % mem::size_of::<T>() == 0, "The size of T does not divide the \
-                      block's size.");
-
         Vec {
             cap: block.size() / mem::size_of::<T>(),
             ptr: Pointer::new(*Pointer::from(block) as *mut T),
@@ -68,8 +65,7 @@ impl<T: Leak> Vec<T> {
 
         // Make some assertions.
         assert!(self.len <= new_cap, "Block not large enough to cover the vector.");
-        debug_assert!(new_cap * mem::size_of::<T>() == block.size(), "The size of T does not divide the \
-                      block's size.");
+        self.check(&block);
 
         let old = mem::replace(self, Vec::new());
         unsafe {
@@ -108,6 +104,20 @@ impl<T: Leak> Vec<T> {
             Ok(())
         }
     }
+
+    /// Check the validity of a block with respect to the vector.
+    ///
+    /// Blocks not passing this checks might lead to logic errors when used as buffer for the
+    /// vector.
+    ///
+    /// This is a NO-OP in release mode.
+    #[inline]
+    fn check(&self, block: &Block) {
+        debug_assert!(block.size() % mem::size_of::<T>() == 0, "The size of T does not divide the \
+                      block's size.");
+        debug_assert!(self.len <= block.size() / mem::size_of::<T>(), "Block not large enough to \
+                      cover the vector.");
+    }
 }
 
 /// Cast this vector to the respective block.