c_vec.rs 5.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178
  1. use crate::platform::{self, types::*};
  2. use core::{
  3. cmp,
  4. iter::IntoIterator,
  5. mem,
  6. ops::{Deref, DerefMut},
  7. ptr::{self, NonNull},
  8. slice,
  9. };
  10. /// Error that occurs when an allocation fails
  11. #[derive(Debug, Default, Hash, PartialEq, Eq, Clone, Copy)]
  12. pub struct AllocError;
  13. /// A normal vector allocated in Rust needs to be dropped from Rust
  14. /// too, in order to avoid UB. This CVec is an abstraction that works
  15. /// using only C allocations functions and can therefore be dropped
  16. /// from C. Just like the Rust Vec, this does bounds checks to assure
  17. /// you never reach isize::MAX. Unless you need to drop something from
  18. /// C, prefer Rust's builtin Vec.
  19. pub struct CVec<T> {
  20. ptr: NonNull<T>,
  21. len: usize,
  22. cap: usize,
  23. }
  24. impl<T> CVec<T> {
  25. pub fn new() -> Self {
  26. Self {
  27. ptr: NonNull::dangling(),
  28. len: 0,
  29. cap: 0,
  30. }
  31. }
  32. fn check_bounds(i: usize) -> Result<usize, AllocError> {
  33. if i > core::isize::MAX as usize {
  34. Err(AllocError)
  35. } else {
  36. Ok(i)
  37. }
  38. }
  39. fn check_mul(x: usize, y: usize) -> Result<usize, AllocError> {
  40. x.checked_mul(y)
  41. .ok_or(AllocError)
  42. .and_then(Self::check_bounds)
  43. }
  44. pub fn with_capacity(cap: usize) -> Result<Self, AllocError> {
  45. if cap == 0 {
  46. return Ok(Self::new());
  47. }
  48. let size = Self::check_mul(cap, mem::size_of::<T>())?;
  49. let ptr = NonNull::new(unsafe { platform::alloc(size) as *mut T }).ok_or(AllocError)?;
  50. Ok(Self { ptr, len: 0, cap })
  51. }
  52. unsafe fn resize(&mut self, cap: usize) -> Result<(), AllocError> {
  53. let size = Self::check_mul(cap, mem::size_of::<T>())?;
  54. let ptr = NonNull::new(platform::realloc(self.ptr.as_ptr() as *mut c_void, size) as *mut T)
  55. .ok_or(AllocError)?;
  56. self.ptr = ptr;
  57. self.cap = cap;
  58. Ok(())
  59. }
  60. unsafe fn drop_range(&mut self, start: usize, end: usize) {
  61. let mut start = self.ptr.as_ptr().add(start);
  62. let end = self.ptr.as_ptr().add(end);
  63. while start < end {
  64. ptr::drop_in_place(start);
  65. start = start.add(1);
  66. }
  67. }
  68. pub fn reserve(&mut self, required: usize) -> Result<(), AllocError> {
  69. let reserved_len = self
  70. .len
  71. .checked_add(required)
  72. .ok_or(AllocError)
  73. .and_then(Self::check_bounds)?;
  74. let new_cap = cmp::min(reserved_len.next_power_of_two(), core::isize::MAX as usize);
  75. if new_cap > self.cap {
  76. unsafe {
  77. self.resize(new_cap)?;
  78. }
  79. }
  80. Ok(())
  81. }
  82. pub fn push(&mut self, elem: T) -> Result<(), AllocError> {
  83. unsafe {
  84. self.reserve(1)?;
  85. ptr::write(self.ptr.as_ptr().add(self.len), elem);
  86. }
  87. self.len += 1; // no need to bounds check, as new len <= cap
  88. Ok(())
  89. }
  90. pub fn extend_from_slice(&mut self, elems: &[T]) -> Result<(), AllocError>
  91. where
  92. T: Copy,
  93. {
  94. unsafe {
  95. self.reserve(elems.len())?;
  96. ptr::copy_nonoverlapping(elems.as_ptr(), self.ptr.as_ptr().add(self.len), elems.len());
  97. }
  98. self.len += elems.len(); // no need to bounds check, as new len <= cap
  99. Ok(())
  100. }
  101. pub fn append(&mut self, other: &mut Self) -> Result<(), AllocError> {
  102. unsafe {
  103. self.reserve(other.len())?;
  104. ptr::copy_nonoverlapping(other.as_ptr(), self.ptr.as_ptr().add(self.len), other.len());
  105. }
  106. self.len += other.len(); // no need to bounds check, as new len <= cap
  107. Ok(())
  108. }
  109. pub fn truncate(&mut self, len: usize) {
  110. if len < self.len {
  111. unsafe {
  112. let old_len = self.len;
  113. self.drop_range(len, old_len);
  114. }
  115. self.len = len;
  116. }
  117. }
  118. pub fn shrink_to_fit(&mut self) -> Result<(), AllocError> {
  119. if self.len < self.cap {
  120. unsafe {
  121. let new_cap = self.len;
  122. self.resize(new_cap)?;
  123. }
  124. }
  125. Ok(())
  126. }
  127. pub fn capacity(&self) -> usize {
  128. self.cap
  129. }
  130. pub fn as_ptr(&self) -> *const T {
  131. self.ptr.as_ptr()
  132. }
  133. pub fn as_mut_ptr(&mut self) -> *mut T {
  134. self.ptr.as_ptr()
  135. }
  136. /// Leaks the inner data. This is safe to drop from C!
  137. pub fn leak(mut self) -> *mut T {
  138. let ptr = self.as_mut_ptr();
  139. mem::forget(self);
  140. ptr
  141. }
  142. }
  143. impl<T> Deref for CVec<T> {
  144. type Target = [T];
  145. fn deref(&self) -> &Self::Target {
  146. unsafe { slice::from_raw_parts(self.ptr.as_ptr(), self.len) }
  147. }
  148. }
  149. impl<T> DerefMut for CVec<T> {
  150. fn deref_mut(&mut self) -> &mut Self::Target {
  151. unsafe { slice::from_raw_parts_mut(self.ptr.as_ptr(), self.len) }
  152. }
  153. }
  154. impl<T> Drop for CVec<T> {
  155. fn drop(&mut self) {
  156. unsafe {
  157. let len = self.len;
  158. self.drop_range(0, len);
  159. }
  160. }
  161. }
  162. impl<'a, T> IntoIterator for &'a CVec<T> {
  163. type Item = <&'a [T] as IntoIterator>::Item;
  164. type IntoIter = <&'a [T] as IntoIterator>::IntoIter;
  165. fn into_iter(self) -> Self::IntoIter {
  166. <&[T]>::into_iter(&*self)
  167. }
  168. }
  169. impl<'a, T> IntoIterator for &'a mut CVec<T> {
  170. type Item = <&'a mut [T] as IntoIterator>::Item;
  171. type IntoIter = <&'a mut [T] as IntoIterator>::IntoIter;
  172. fn into_iter(self) -> Self::IntoIter {
  173. <&mut [T]>::into_iter(&mut *self)
  174. }
  175. }