2
0

c_vec.rs 5.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178
  1. use core::iter::IntoIterator;
  2. use core::ops::{Deref, DerefMut};
  3. use core::ptr::{self, NonNull};
  4. use core::{cmp, mem, slice};
  5. use crate::platform::types::*;
  6. use crate::platform;
  7. /// Error that occurs when an allocation fails
  8. #[derive(Debug, Default, Hash, PartialEq, Eq, Clone, Copy)]
  9. pub struct AllocError;
  10. /// A normal vector allocated in Rust needs to be dropped from Rust
  11. /// too, in order to avoid UB. This CVec is an abstraction that works
  12. /// using only C allocations functions and can therefore be dropped
  13. /// from C. Just like the Rust Vec, this does bounds checks to assure
  14. /// you never reach isize::MAX. Unless you need to drop something from
  15. /// C, prefer Rust's builtin Vec.
  16. pub struct CVec<T> {
  17. ptr: NonNull<T>,
  18. len: usize,
  19. cap: usize
  20. }
  21. impl<T> CVec<T> {
  22. pub fn new() -> Self {
  23. Self {
  24. ptr: NonNull::dangling(),
  25. len: 0,
  26. cap: 0
  27. }
  28. }
  29. fn check_bounds(i: usize) -> Result<usize, AllocError> {
  30. if i > core::isize::MAX as usize {
  31. Err(AllocError)
  32. } else {
  33. Ok(i)
  34. }
  35. }
  36. fn check_mul(x: usize, y: usize) -> Result<usize, AllocError> {
  37. x.checked_mul(y).ok_or(AllocError)
  38. .and_then(Self::check_bounds)
  39. }
  40. pub fn with_capacity(cap: usize) -> Result<Self, AllocError> {
  41. if cap == 0 {
  42. return Ok(Self::new());
  43. }
  44. let size = Self::check_mul(cap, mem::size_of::<T>())?;
  45. let ptr = NonNull::new(unsafe { platform::alloc(size) as *mut T }).ok_or(AllocError)?;
  46. Ok(Self {
  47. ptr,
  48. len: 0,
  49. cap
  50. })
  51. }
  52. unsafe fn resize(&mut self, cap: usize) -> Result<(), AllocError> {
  53. let size = Self::check_mul(cap, mem::size_of::<T>())?;
  54. let ptr = NonNull::new(platform::realloc(self.ptr.as_ptr() as *mut c_void, size) as *mut T).ok_or(AllocError)?;
  55. self.ptr = ptr;
  56. self.cap = cap;
  57. Ok(())
  58. }
  59. unsafe fn drop_range(&mut self, start: usize, end: usize) {
  60. let mut start = self.ptr.as_ptr().add(start);
  61. let end = self.ptr.as_ptr().add(end);
  62. while start < end {
  63. ptr::drop_in_place(start);
  64. start = start.add(1);
  65. }
  66. }
  67. pub fn reserve(&mut self, required: usize) -> Result<(), AllocError> {
  68. let reserved_len = self.len.checked_add(required)
  69. .ok_or(AllocError)
  70. .and_then(Self::check_bounds)?;
  71. let new_cap = cmp::min(reserved_len.next_power_of_two(), core::isize::MAX as usize);
  72. if new_cap > self.cap {
  73. unsafe {
  74. self.resize(new_cap)?;
  75. }
  76. }
  77. Ok(())
  78. }
  79. pub fn push(&mut self, elem: T) -> Result<(), AllocError> {
  80. unsafe {
  81. self.reserve(1)?;
  82. ptr::write(self.ptr.as_ptr().add(self.len), elem);
  83. }
  84. self.len += 1; // no need to bounds check, as new len <= cap
  85. Ok(())
  86. }
  87. pub fn extend_from_slice(&mut self, elems: &[T]) -> Result<(), AllocError>
  88. where T: Copy
  89. {
  90. unsafe {
  91. self.reserve(elems.len())?;
  92. ptr::copy_nonoverlapping(elems.as_ptr(), self.ptr.as_ptr().add(self.len), elems.len());
  93. }
  94. self.len += elems.len(); // no need to bounds check, as new len <= cap
  95. Ok(())
  96. }
  97. pub fn append(&mut self, other: &mut Self) -> Result<(), AllocError> {
  98. unsafe {
  99. self.reserve(other.len())?;
  100. ptr::copy_nonoverlapping(other.as_ptr(), self.ptr.as_ptr().add(self.len), other.len());
  101. }
  102. self.len += other.len(); // no need to bounds check, as new len <= cap
  103. Ok(())
  104. }
  105. pub fn truncate(&mut self, len: usize) {
  106. if len < self.len {
  107. unsafe {
  108. let old_len = self.len;
  109. self.drop_range(len, old_len);
  110. }
  111. self.len = len;
  112. }
  113. }
  114. pub fn shrink_to_fit(&mut self) -> Result<(), AllocError> {
  115. if self.len < self.cap {
  116. unsafe {
  117. let new_cap = self.len;
  118. self.resize(new_cap)?;
  119. }
  120. }
  121. Ok(())
  122. }
  123. pub fn capacity(&self) -> usize {
  124. self.cap
  125. }
  126. pub fn as_ptr(&self) -> *const T {
  127. self.ptr.as_ptr()
  128. }
  129. pub fn as_mut_ptr(&mut self) -> *mut T {
  130. self.ptr.as_ptr()
  131. }
  132. /// Leaks the inner data. This is safe to drop from C!
  133. pub fn leak(mut self) -> *mut T {
  134. let ptr = self.as_mut_ptr();
  135. mem::forget(self);
  136. ptr
  137. }
  138. }
  139. impl<T> Deref for CVec<T> {
  140. type Target = [T];
  141. fn deref(&self) -> &Self::Target {
  142. unsafe {
  143. slice::from_raw_parts(self.ptr.as_ptr(), self.len)
  144. }
  145. }
  146. }
  147. impl<T> DerefMut for CVec<T> {
  148. fn deref_mut(&mut self) -> &mut Self::Target {
  149. unsafe {
  150. slice::from_raw_parts_mut(self.ptr.as_ptr(), self.len)
  151. }
  152. }
  153. }
  154. impl<T> Drop for CVec<T> {
  155. fn drop(&mut self) {
  156. unsafe {
  157. let len = self.len;
  158. self.drop_range(0, len);
  159. }
  160. }
  161. }
  162. impl<'a, T> IntoIterator for &'a CVec<T> {
  163. type Item = <&'a [T] as IntoIterator>::Item;
  164. type IntoIter = <&'a [T] as IntoIterator>::IntoIter;
  165. fn into_iter(self) -> Self::IntoIter {
  166. <&[T]>::into_iter(&*self)
  167. }
  168. }
  169. impl<'a, T> IntoIterator for &'a mut CVec<T> {
  170. type Item = <&'a mut [T] as IntoIterator>::Item;
  171. type IntoIter = <&'a mut [T] as IntoIterator>::IntoIter;
  172. fn into_iter(self) -> Self::IntoIter {
  173. <&mut [T]>::into_iter(&mut *self)
  174. }
  175. }