tests.rs 19 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582
  1. use env_logger;
  2. use rand;
  3. use std::alloc;
  4. use std::alloc::Layout;
  5. use std::collections::HashSet;
  6. use std::mem::{size_of, transmute};
  7. use std::prelude::v1::*;
  8. use crate::*;
  9. use test::Bencher;
  10. /// A simple page allocator based on GlobalAlloc (for testing purposes).
  11. struct Pager {
  12. base_pages: HashSet<*mut u8>, // probably should be hash-tables
  13. }
  14. unsafe impl Send for Pager {}
  15. unsafe impl Sync for Pager {}
  16. impl Pager {
  17. pub fn new() -> Pager {
  18. Pager {
  19. base_pages: HashSet::with_capacity(1024),
  20. }
  21. }
  22. }
  23. impl Pager {
  24. pub fn currently_allocated(&self) -> usize {
  25. self.base_pages.len()
  26. }
  27. fn alloc_page(&mut self, page_size: usize) -> Option<*mut u8> {
  28. let r =
  29. unsafe { std::alloc::alloc(Layout::from_size_align(page_size, page_size).unwrap()) };
  30. if !r.is_null() {
  31. match page_size {
  32. OBJECT_PAGE_SIZE => self.base_pages.insert(r),
  33. _ => unreachable!("invalid page-size supplied"),
  34. };
  35. Some(r)
  36. } else {
  37. None
  38. }
  39. }
  40. fn dealloc_page(&mut self, ptr: *mut u8, page_size: usize) {
  41. let layout = match page_size {
  42. OBJECT_PAGE_SIZE => {
  43. assert!(
  44. self.base_pages.contains(&ptr),
  45. "Trying to deallocate invalid base-page"
  46. );
  47. self.base_pages.remove(&ptr);
  48. Layout::from_size_align(OBJECT_PAGE_SIZE, OBJECT_PAGE_SIZE).unwrap()
  49. }
  50. _ => unreachable!("invalid page-size supplied"),
  51. };
  52. unsafe { std::alloc::dealloc(ptr, layout) };
  53. }
  54. }
  55. trait PageProvider<'a>: Send {
  56. fn allocate_page(&mut self) -> Option<&'a mut ObjectPage<'a>>;
  57. fn release_page(&mut self, page: &'a mut ObjectPage<'a>);
  58. }
  59. impl<'a> PageProvider<'a> for Pager {
  60. /// Allocates a new ObjectPage from the system.
  61. ///
  62. /// Uses `mmap` to map a page and casts it to a ObjectPage.
  63. fn allocate_page(&mut self) -> Option<&'a mut ObjectPage<'a>> {
  64. self.alloc_page(OBJECT_PAGE_SIZE)
  65. .map(|r| unsafe { transmute(r as usize) })
  66. }
  67. /// Release a ObjectPage back to the system.slab_page
  68. ///
  69. /// Uses `munmap` to release the page back to the OS.
  70. fn release_page(&mut self, p: &'a mut ObjectPage<'a>) {
  71. self.dealloc_page(p as *const ObjectPage as *mut u8, OBJECT_PAGE_SIZE);
  72. }
  73. }
  74. #[test]
  75. fn check_size() {
  76. assert_eq!(
  77. OBJECT_PAGE_SIZE as usize,
  78. size_of::<ObjectPage>(),
  79. "ObjectPage should be exactly the size of a single page."
  80. );
  81. }
  82. #[test]
  83. fn test_mmap_allocator() {
  84. let mut mmap = Pager::new();
  85. match mmap.allocate_page() {
  86. Some(sp) => {
  87. sp.bitfield.initialize(8, OBJECT_PAGE_SIZE - 80);
  88. assert!(!sp.is_full(), "Got empty slab");
  89. assert!(sp.is_empty(6 * 64), "Got empty slab");
  90. mmap.release_page(sp)
  91. }
  92. None => panic!("failed to allocate ObjectPage"),
  93. }
  94. }
  95. macro_rules! test_sc_allocation {
  96. ($test:ident, $size:expr, $alignment:expr, $allocations:expr, $type:ty) => {
  97. #[test]
  98. fn $test() {
  99. let _ = env_logger::try_init();
  100. let mut mmap = Pager::new();
  101. {
  102. let mut sa: SCAllocator<$type> = SCAllocator::new($size);
  103. let alignment = $alignment;
  104. let mut objects: Vec<NonNull<u8>> = Vec::new();
  105. let mut vec: Vec<(usize, &mut [usize; $size / 8])> = Vec::new();
  106. let layout = Layout::from_size_align($size, alignment).unwrap();
  107. for _ in 0..$allocations {
  108. loop {
  109. match sa.allocate(layout) {
  110. // Allocation was successful
  111. Ok(nptr) => {
  112. unsafe {
  113. vec.push((rand::random::<usize>(), transmute(nptr.as_ptr())))
  114. };
  115. objects.push(nptr);
  116. break;
  117. }
  118. // Couldn't allocate need to refill first
  119. Err(AllocationError::OutOfMemory) => {
  120. let page = mmap.allocate_page().unwrap();
  121. unsafe {
  122. sa.refill(page);
  123. }
  124. }
  125. // Unexpected errors
  126. Err(AllocationError::InvalidLayout) => unreachable!("Unexpected error"),
  127. }
  128. }
  129. }
  130. // Write the objects with a random pattern
  131. for item in vec.iter_mut() {
  132. let (pattern, ref mut obj) = *item;
  133. assert!(obj.len() == $size / 8);
  134. for i in 0..obj.len() {
  135. obj[i] = pattern;
  136. }
  137. }
  138. for item in vec.iter() {
  139. let (pattern, ref obj) = *item;
  140. for i in 0..obj.len() {
  141. assert_eq!(
  142. obj[i], pattern,
  143. "No two allocations point to the same memory."
  144. );
  145. }
  146. }
  147. // Make sure we can correctly deallocate:
  148. let pages_allocated = sa.slabs.elements;
  149. // Deallocate all the objects
  150. for item in objects.iter_mut() {
  151. unsafe {
  152. sa.deallocate(*item, layout).expect("Can't deallocate");
  153. }
  154. }
  155. objects.clear();
  156. sa.check_page_assignments();
  157. // then allocate everything again,
  158. for _ in 0..$allocations {
  159. loop {
  160. match sa.allocate(layout) {
  161. // Allocation was successful
  162. Ok(nptr) => {
  163. unsafe {
  164. vec.push((rand::random::<usize>(), transmute(nptr.as_ptr())))
  165. };
  166. objects.push(nptr);
  167. break;
  168. }
  169. // Couldn't allocate need to refill first
  170. Err(AllocationError::OutOfMemory) => {
  171. let page = mmap.allocate_page().unwrap();
  172. unsafe {
  173. sa.refill(page);
  174. }
  175. }
  176. // Unexpected errors
  177. Err(AllocationError::InvalidLayout) => unreachable!("Unexpected error"),
  178. }
  179. }
  180. }
  181. // and make sure we do not request more pages than what we had previously
  182. // println!("{} {}", pages_allocated, sa.slabs.elements);
  183. assert_eq!(
  184. pages_allocated, sa.slabs.elements,
  185. "Did not use more memory for 2nd allocation run."
  186. );
  187. // Deallocate everything once more
  188. for item in objects.iter_mut() {
  189. unsafe {
  190. sa.deallocate(*item, layout).expect("Can't deallocate");
  191. }
  192. }
  193. // Drain the slab-allocator and give unused pages back to the OS
  194. sa.try_reclaim_pages(usize::MAX, &mut |p: *mut ObjectPage| unsafe {
  195. mmap.release_page(&mut *p)
  196. });
  197. }
  198. // Check that we released everything to our page allocator:
  199. assert_eq!(
  200. mmap.currently_allocated(),
  201. 0,
  202. "Released all pages to the underlying memory manager."
  203. );
  204. }
  205. };
  206. }
  207. test_sc_allocation!(op_512_size8_alignment1, 8, 1, 512, ObjectPage);
  208. test_sc_allocation!(op_4096_size8_alignment8, 8, 8, 4096, ObjectPage);
  209. test_sc_allocation!(op_500_size8_alignment64, 8, 64, 500, ObjectPage);
  210. test_sc_allocation!(op_4096_size12_alignment1, 12, 1, 4096, ObjectPage);
  211. test_sc_allocation!(op_4096_size13_alignment1, 13, 1, 4096, ObjectPage);
  212. test_sc_allocation!(op_2000_size14_alignment1, 14, 1, 2000, ObjectPage);
  213. test_sc_allocation!(op_4096_size15_alignment1, 15, 1, 4096, ObjectPage);
  214. test_sc_allocation!(op_8000_size16_alignment1, 16, 1, 8000, ObjectPage);
  215. test_sc_allocation!(op_1024_size24_alignment1, 24, 1, 1024, ObjectPage);
  216. test_sc_allocation!(op_3090_size32_alignment1, 32, 1, 3090, ObjectPage);
  217. test_sc_allocation!(op_4096_size64_alignment1, 64, 1, 4096, ObjectPage);
  218. test_sc_allocation!(op_1000_size512_alignment1, 512, 1, 1000, ObjectPage);
  219. test_sc_allocation!(op_4096_size1024_alignment1, 1024, 1, 4096, ObjectPage);
  220. test_sc_allocation!(op_10_size2048_alignment1, 2048, 1, 10, ObjectPage);
  221. test_sc_allocation!(op_10000_size512_alignment1, 512, 1, 10000, ObjectPage);
  222. #[test]
  223. #[should_panic]
  224. fn invalid_alignment() {
  225. let _layout = Layout::from_size_align(10, 3).unwrap();
  226. }
  227. #[test]
  228. fn test_readme() -> Result<(), AllocationError> {
  229. let object_size = 12;
  230. let alignment = 4;
  231. let layout = Layout::from_size_align(object_size, alignment).unwrap();
  232. // We need something that can provide backing memory
  233. // (4 KiB and 2 MiB pages) to our ZoneAllocator
  234. // (see tests.rs for a dummy implementation).
  235. let mut pager = Pager::new();
  236. let page = pager.allocate_page().expect("Can't allocate a page");
  237. let mut zone: ZoneAllocator = Default::default();
  238. // Prematurely fill the ZoneAllocator with memory.
  239. // Alternatively, the allocate call would return an
  240. // error which we can capture to refill on-demand.
  241. unsafe { zone.refill(layout, page)? };
  242. let allocated = zone.allocate(layout)?;
  243. unsafe { zone.deallocate(allocated, layout, &SlabCallback) }?;
  244. Ok(())
  245. }
  246. #[test]
  247. fn test_readme2() -> Result<(), AllocationError> {
  248. let object_size = 10;
  249. let alignment = 8;
  250. let layout = Layout::from_size_align(object_size, alignment).unwrap();
  251. // We need something that can provide backing memory
  252. // (4 KiB and 2 MiB pages) to our ZoneAllocator
  253. // (see tests.rs for a dummy implementation).
  254. let mut pager = Pager::new();
  255. let page = pager.allocate_page().expect("Can't allocate a page");
  256. let mut sa: SCAllocator<ObjectPage> = SCAllocator::new(object_size);
  257. // Prematurely fill the SCAllocator with memory.
  258. // Alternatively, the allocate call would return an
  259. // error which we can capture to refill on-demand.
  260. unsafe { sa.refill(page) };
  261. sa.allocate(layout)?;
  262. Ok(())
  263. }
  264. #[test]
  265. fn test_bug1() -> Result<(), AllocationError> {
  266. let _ = env_logger::try_init();
  267. let mut mmap = Pager::new();
  268. let page = mmap.allocate_page();
  269. let mut sa: SCAllocator<ObjectPage> = SCAllocator::new(8);
  270. unsafe {
  271. sa.refill(page.unwrap());
  272. }
  273. let ptr1 = sa.allocate(Layout::from_size_align(1, 1).unwrap())?;
  274. let ptr2 = sa.allocate(Layout::from_size_align(2, 1).unwrap())?;
  275. unsafe { sa.deallocate(ptr1, Layout::from_size_align(1, 1).unwrap()) }?;
  276. let _ptr3 = sa.allocate(Layout::from_size_align(4, 1).unwrap())?;
  277. unsafe {
  278. sa.deallocate(ptr2, Layout::from_size_align(2, 1).unwrap())
  279. .map(|_| ())
  280. }
  281. }
  282. #[bench]
  283. fn slabmalloc_allocate_deallocate(b: &mut Bencher) {
  284. let _ = env_logger::try_init();
  285. let mut mmap = Pager::new();
  286. let mut sa: SCAllocator<ObjectPage> = SCAllocator::new(8);
  287. let layout = Layout::from_size_align(8, 1).unwrap();
  288. let page = mmap.allocate_page();
  289. unsafe {
  290. sa.refill(page.unwrap());
  291. }
  292. let ptr = sa.allocate(layout).expect("Can't allocate");
  293. test::black_box(ptr);
  294. b.iter(|| {
  295. let ptr = sa.allocate(layout).expect("Can't allocate");
  296. test::black_box(ptr);
  297. unsafe { sa.deallocate(ptr, layout).expect("Can't deallocate") };
  298. });
  299. }
  300. #[bench]
  301. fn slabmalloc_allocate_deallocate_big(b: &mut Bencher) {
  302. let _ = env_logger::try_init();
  303. let mut mmap = Pager::new();
  304. let mut sa: SCAllocator<ObjectPage> = SCAllocator::new(512);
  305. let page = mmap.allocate_page();
  306. unsafe {
  307. sa.refill(page.unwrap());
  308. }
  309. let layout = Layout::from_size_align(512, 1).unwrap();
  310. let ptr = sa.allocate(layout).expect("Can't allocate");
  311. test::black_box(ptr);
  312. b.iter(|| {
  313. let ptr = sa.allocate(layout).expect("Can't allocate");
  314. test::black_box(ptr);
  315. unsafe { sa.deallocate(ptr, layout).expect("Can't deallocate") };
  316. });
  317. }
  318. #[bench]
  319. fn jemalloc_allocate_deallocate(b: &mut Bencher) {
  320. let layout = Layout::from_size_align(8, 1).unwrap();
  321. let ptr = unsafe { alloc::alloc(layout) };
  322. test::black_box(ptr);
  323. b.iter(|| unsafe {
  324. let ptr = alloc::alloc(layout);
  325. test::black_box(ptr);
  326. alloc::dealloc(ptr, layout);
  327. });
  328. }
  329. #[bench]
  330. fn jemalloc_allocate_deallocate_big(b: &mut Bencher) {
  331. let layout = Layout::from_size_align(512, 1).unwrap();
  332. let ptr = unsafe { alloc::alloc(layout) };
  333. test::black_box(ptr);
  334. b.iter(|| unsafe {
  335. let ptr = alloc::alloc(layout);
  336. test::black_box(ptr);
  337. alloc::dealloc(ptr, layout);
  338. });
  339. }
  340. #[test]
  341. pub fn check_first_fit() {
  342. let op: ObjectPage = Default::default();
  343. let layout = Layout::from_size_align(8, 8).unwrap();
  344. println!("{:?}", op.first_fit(layout));
  345. }
  346. #[test]
  347. fn list_pop() {
  348. let mut op1: ObjectPage = Default::default();
  349. let op1_ptr = &op1 as *const ObjectPage<'_>;
  350. let mut op2: ObjectPage = Default::default();
  351. let op2_ptr = &op2 as *const ObjectPage<'_>;
  352. let mut op3: ObjectPage = Default::default();
  353. let op3_ptr = &op3 as *const ObjectPage<'_>;
  354. let mut op4: ObjectPage = Default::default();
  355. let op4_ptr = &op4 as *const ObjectPage<'_>;
  356. let mut list: PageList<ObjectPage> = PageList::new();
  357. list.insert_front(&mut op1);
  358. list.insert_front(&mut op2);
  359. list.insert_front(&mut op3);
  360. assert!(list.contains(op1_ptr));
  361. assert!(list.contains(op2_ptr));
  362. assert!(list.contains(op3_ptr));
  363. assert!(!list.contains(op4_ptr));
  364. let popped = list.pop();
  365. assert_eq!(popped.unwrap() as *const ObjectPage, op3_ptr);
  366. assert!(!list.contains(op3_ptr));
  367. let popped = list.pop();
  368. assert_eq!(popped.unwrap() as *const ObjectPage, op2_ptr);
  369. assert!(!list.contains(op2_ptr));
  370. list.insert_front(&mut op4);
  371. assert!(list.contains(op4_ptr));
  372. let popped = list.pop();
  373. assert_eq!(popped.unwrap() as *const ObjectPage, op4_ptr);
  374. assert!(!list.contains(op4_ptr));
  375. let popped = list.pop();
  376. assert_eq!(popped.unwrap() as *const ObjectPage, op1_ptr);
  377. assert!(!list.contains(op1_ptr));
  378. let popped = list.pop();
  379. assert!(popped.is_none());
  380. assert!(!list.contains(op1_ptr));
  381. assert!(!list.contains(op2_ptr));
  382. assert!(!list.contains(op3_ptr));
  383. assert!(!list.contains(op4_ptr));
  384. }
  385. #[test]
  386. pub fn iter_empty_list() {
  387. let mut new_head1: ObjectPage = Default::default();
  388. let mut l = PageList::new();
  389. l.insert_front(&mut new_head1);
  390. for _p in l.iter_mut() {}
  391. }
  392. #[test]
  393. pub fn check_is_full_8() {
  394. let _r = env_logger::try_init();
  395. let layout = Layout::from_size_align(8, 1).unwrap();
  396. let mut page: ObjectPage = Default::default();
  397. page.bitfield.initialize(8, OBJECT_PAGE_SIZE - 80);
  398. let obj_per_page = core::cmp::min((OBJECT_PAGE_SIZE - 80) / 8, 8 * 64);
  399. let mut allocs = 0;
  400. loop {
  401. if page.allocate(layout).is_null() {
  402. break;
  403. }
  404. allocs += 1;
  405. if allocs < obj_per_page {
  406. assert!(
  407. !page.is_full(),
  408. "Page mistakenly considered full after {} allocs",
  409. allocs
  410. );
  411. assert!(!page.is_empty(obj_per_page));
  412. }
  413. }
  414. assert_eq!(allocs, obj_per_page, "Can use all bitmap space");
  415. assert!(page.is_full());
  416. }
  417. // Test for bug that reports pages not as full when
  418. // the entire bitfield wasn't allocated.
  419. #[test]
  420. pub fn check_is_full_512() {
  421. let _r = env_logger::try_init();
  422. let mut page: ObjectPage = Default::default();
  423. page.bitfield.initialize(512, OBJECT_PAGE_SIZE - 80);
  424. let layout = Layout::from_size_align(512, 1).unwrap();
  425. let obj_per_page = core::cmp::min((OBJECT_PAGE_SIZE - 80) / 512, 6 * 64);
  426. let mut allocs = 0;
  427. loop {
  428. if page.allocate(layout).is_null() {
  429. break;
  430. }
  431. allocs += 1;
  432. if allocs < (OBJECT_PAGE_SIZE - 80) / 512 {
  433. assert!(!page.is_full());
  434. assert!(!page.is_empty(obj_per_page));
  435. }
  436. }
  437. assert!(page.is_full());
  438. }
  439. #[test]
  440. pub fn issue_9() -> Result<(), AllocationError> {
  441. let mut pager = Pager::new();
  442. let mut zone: ZoneAllocator = Default::default();
  443. // size: 256 align: 1 | my pager gets called
  444. let l1 = Layout::from_size_align(256, 1).unwrap();
  445. assert!(zone.allocate(l1).is_err(), "my pager gets called");
  446. let page = pager.allocate_page().expect("Can't allocate a page");
  447. unsafe { zone.refill(l1, page)? };
  448. let p1 = zone.allocate(l1)?;
  449. // size: 48 align: 8 | my pager gets called
  450. let l2 = Layout::from_size_align(48, 8).unwrap();
  451. assert!(zone.allocate(l2).is_err(), "my pager gets called");
  452. let page = pager.allocate_page().expect("Can't allocate a page");
  453. unsafe { zone.refill(l2, page)? };
  454. let p2 = zone.allocate(l2)?;
  455. assert_eq!(p2.as_ptr() as usize % l2.align(), 0);
  456. assert_ne!(p2, p1);
  457. // size: 6 align: 1 | my pager gets called and returns the properly aligned address X
  458. let l3 = Layout::from_size_align(6, 1).unwrap();
  459. assert!(
  460. zone.allocate(l3).is_err(),
  461. "my pager gets called and returns the properly aligned address X"
  462. );
  463. let page = pager.allocate_page().expect("Can't allocate a page");
  464. unsafe { zone.refill(l3, page)? };
  465. let p3 = zone.allocate(l3)?;
  466. assert_eq!(p3.as_ptr() as usize % l3.align(), 0);
  467. assert_ne!(p3, p2);
  468. assert_ne!(p3, p1);
  469. //size: 8 align: 1 | my pager doesn't get called
  470. let l4 = Layout::from_size_align(8, 1).unwrap();
  471. // my pager doesn't get called
  472. let p4 = zone.allocate(l4)?;
  473. assert_eq!(p4.as_ptr() as usize % l4.align(), 0);
  474. assert_ne!(p4, p3);
  475. assert_ne!(p4, p2);
  476. assert_ne!(p4, p1);
  477. // size: 16 align: 1 | my pager gets called
  478. let l5 = Layout::from_size_align(16, 1).unwrap();
  479. assert!(zone.allocate(l5).is_err(), "my pager gets called");
  480. let page = pager.allocate_page().expect("Can't allocate a page");
  481. unsafe { zone.refill(l5, page)? };
  482. let p5 = zone.allocate(l5)?;
  483. assert_eq!(p5.as_ptr() as usize % l5.align(), 0);
  484. assert_ne!(p5, p1);
  485. assert_ne!(p5, p2);
  486. assert_ne!(p5, p3);
  487. assert_ne!(p5, p4);
  488. Ok(())
  489. }
  490. /// 归还slab_page给buddy的回调
  491. struct SlabCallback;
  492. impl CallBack for SlabCallback {
  493. unsafe fn free_slab_page(&self, base_addr: *mut u8, size: usize) {
  494. assert_eq!(base_addr as usize & (OBJECT_PAGE_SIZE - 1), 0); // 确认地址4k对齐
  495. assert_eq!(size, OBJECT_PAGE_SIZE); // 确认释放的slab_page大小
  496. }
  497. }