loom.rs 9.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321
  1. pub(crate) use self::inner::*;
  2. #[cfg(all(test, loom))]
  3. mod inner {
  4. pub(crate) mod atomic {
  5. pub use loom::sync::atomic::*;
  6. pub use std::sync::atomic::Ordering;
  7. }
  8. pub(crate) use loom::{cell, future, hint, sync, thread};
  9. use std::{cell::RefCell, fmt::Write};
  10. pub(crate) mod model {
  11. #[allow(unused_imports)]
  12. pub(crate) use loom::model::Builder;
  13. }
  14. std::thread_local! {
  15. static TRACE_BUF: RefCell<String> = RefCell::new(String::new());
  16. }
  17. pub(crate) fn traceln(args: std::fmt::Arguments) {
  18. let mut args = Some(args);
  19. TRACE_BUF
  20. .try_with(|buf| {
  21. let mut buf = buf.borrow_mut();
  22. let _ = buf.write_fmt(args.take().unwrap());
  23. let _ = buf.write_char('\n');
  24. })
  25. .unwrap_or_else(|_| println!("{}", args.take().unwrap()))
  26. }
  27. #[track_caller]
  28. pub(crate) fn run_builder(
  29. builder: loom::model::Builder,
  30. model: impl Fn() + Sync + Send + std::panic::UnwindSafe + 'static,
  31. ) {
  32. use std::{
  33. env, io,
  34. sync::{
  35. atomic::{AtomicBool, AtomicUsize, Ordering},
  36. Once,
  37. },
  38. };
  39. use tracing_subscriber::{filter::Targets, fmt, prelude::*};
  40. static IS_NOCAPTURE: AtomicBool = AtomicBool::new(false);
  41. static SETUP_TRACE: Once = Once::new();
  42. SETUP_TRACE.call_once(|| {
  43. // set up tracing for loom.
  44. const LOOM_LOG: &str = "LOOM_LOG";
  45. struct TracebufWriter;
  46. impl io::Write for TracebufWriter {
  47. fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
  48. let len = buf.len();
  49. let s = std::str::from_utf8(buf)
  50. .map_err(|e| io::Error::new(io::ErrorKind::InvalidInput, e))?;
  51. TRACE_BUF.with(|buf| buf.borrow_mut().push_str(s));
  52. Ok(len)
  53. }
  54. fn flush(&mut self) -> io::Result<()> {
  55. Ok(())
  56. }
  57. }
  58. let filter = env::var(LOOM_LOG)
  59. .ok()
  60. .and_then(|var| match var.parse::<Targets>() {
  61. Err(e) => {
  62. eprintln!("invalid {}={:?}: {}", LOOM_LOG, var, e);
  63. None
  64. }
  65. Ok(targets) => Some(targets),
  66. })
  67. .unwrap_or_else(|| Targets::new().with_target("loom", tracing::Level::INFO));
  68. fmt::Subscriber::builder()
  69. .with_writer(|| TracebufWriter)
  70. .without_time()
  71. .with_max_level(tracing::Level::TRACE)
  72. .finish()
  73. .with(filter)
  74. .init();
  75. if std::env::args().any(|arg| arg == "--nocapture") {
  76. IS_NOCAPTURE.store(true, Ordering::Relaxed);
  77. }
  78. let default_hook = std::panic::take_hook();
  79. std::panic::set_hook(Box::new(move |panic| {
  80. // try to print the trace buffer.
  81. TRACE_BUF
  82. .try_with(|buf| {
  83. if let Ok(mut buf) = buf.try_borrow_mut() {
  84. eprint!("{}", buf);
  85. buf.clear();
  86. } else {
  87. eprint!("trace buf already mutably borrowed?");
  88. }
  89. })
  90. .unwrap_or_else(|e| eprintln!("trace buf already torn down: {}", e));
  91. // let the default panic hook do the rest...
  92. default_hook(panic);
  93. }))
  94. });
  95. // wrap the loom model with `catch_unwind` to avoid potentially losing
  96. // test output on double panics.
  97. let current_iteration = std::sync::Arc::new(AtomicUsize::new(1));
  98. let iteration = current_iteration.clone();
  99. let test_name = match std::thread::current().name() {
  100. Some("main") | None => "test".to_string(),
  101. Some(name) => name.to_string(),
  102. };
  103. builder.check(move || {
  104. let iteration = current_iteration.fetch_add(1, Ordering::Relaxed);
  105. traceln(format_args!(
  106. "\n---- {} iteration {} ----",
  107. test_name, iteration,
  108. ));
  109. model();
  110. // if this iteration succeeded, clear the buffer for the
  111. // next iteration...
  112. TRACE_BUF.with(|buf| buf.borrow_mut().clear());
  113. });
  114. // Only print iterations on test completion in nocapture mode; otherwise
  115. // they'll just get all mangled.
  116. if IS_NOCAPTURE.load(Ordering::Relaxed) {
  117. print!("({} iterations) ", iteration.load(Ordering::Relaxed));
  118. }
  119. }
  120. #[track_caller]
  121. pub(crate) fn model(model: impl Fn() + std::panic::UnwindSafe + Sync + Send + 'static) {
  122. run_builder(Default::default(), model)
  123. }
  124. pub(crate) mod alloc {
  125. #![allow(dead_code)]
  126. use loom::alloc;
  127. use std::fmt;
  128. /// Track allocations, detecting leaks
  129. ///
  130. /// This is a version of `loom::alloc::Track` that adds a missing
  131. /// `Default` impl.
  132. pub struct Track<T>(alloc::Track<T>);
  133. impl<T> Track<T> {
  134. /// Track a value for leaks
  135. #[inline(always)]
  136. pub fn new(value: T) -> Track<T> {
  137. Track(alloc::Track::new(value))
  138. }
  139. /// Get a reference to the value
  140. #[inline(always)]
  141. pub fn get_ref(&self) -> &T {
  142. self.0.get_ref()
  143. }
  144. /// Get a mutable reference to the value
  145. #[inline(always)]
  146. pub fn get_mut(&mut self) -> &mut T {
  147. self.0.get_mut()
  148. }
  149. /// Stop tracking the value for leaks
  150. #[inline(always)]
  151. pub fn into_inner(self) -> T {
  152. self.0.into_inner()
  153. }
  154. }
  155. impl<T: fmt::Debug> fmt::Debug for Track<T> {
  156. fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
  157. self.0.fmt(f)
  158. }
  159. }
  160. impl<T: Default> Default for Track<T> {
  161. fn default() -> Self {
  162. Self::new(T::default())
  163. }
  164. }
  165. impl<T: Clone> Clone for Track<T> {
  166. fn clone(&self) -> Self {
  167. Self::new(self.get_ref().clone())
  168. }
  169. fn clone_from(&mut self, source: &Self) {
  170. self.get_mut().clone_from(source.get_ref());
  171. }
  172. }
  173. }
  174. }
  175. #[cfg(not(all(loom, test)))]
  176. mod inner {
  177. #![allow(dead_code)]
  178. pub(crate) mod sync {
  179. pub use core::sync::*;
  180. #[cfg(feature = "alloc")]
  181. pub use alloc::sync::*;
  182. }
  183. pub(crate) use core::sync::atomic;
  184. #[cfg(feature = "std")]
  185. pub use std::thread;
  186. pub(crate) mod hint {
  187. #[inline(always)]
  188. pub(crate) fn spin_loop() {
  189. // MSRV: std::hint::spin_loop() stabilized in 1.49.0
  190. #[allow(deprecated)]
  191. super::atomic::spin_loop_hint()
  192. }
  193. }
  194. pub(crate) mod cell {
  195. #[derive(Debug)]
  196. pub(crate) struct UnsafeCell<T>(core::cell::UnsafeCell<T>);
  197. impl<T> UnsafeCell<T> {
  198. pub const fn new(data: T) -> UnsafeCell<T> {
  199. UnsafeCell(core::cell::UnsafeCell::new(data))
  200. }
  201. #[inline(always)]
  202. pub fn with<F, R>(&self, f: F) -> R
  203. where
  204. F: FnOnce(*const T) -> R,
  205. {
  206. f(self.0.get())
  207. }
  208. #[inline(always)]
  209. pub fn with_mut<F, R>(&self, f: F) -> R
  210. where
  211. F: FnOnce(*mut T) -> R,
  212. {
  213. f(self.0.get())
  214. }
  215. #[inline(always)]
  216. pub(crate) fn get_mut(&self) -> MutPtr<T> {
  217. MutPtr(self.0.get())
  218. }
  219. }
  220. #[derive(Debug)]
  221. pub(crate) struct MutPtr<T: ?Sized>(*mut T);
  222. impl<T: ?Sized> MutPtr<T> {
  223. // Clippy knows that it's Bad and Wrong to construct a mutable reference
  224. // from an immutable one...but this function is intended to simulate a raw
  225. // pointer, so we have to do that here.
  226. #[allow(clippy::mut_from_ref)]
  227. #[inline(always)]
  228. pub(crate) unsafe fn deref(&self) -> &mut T {
  229. &mut *self.0
  230. }
  231. #[inline(always)]
  232. pub fn with<F, R>(&self, f: F) -> R
  233. where
  234. F: FnOnce(*mut T) -> R,
  235. {
  236. f(self.0)
  237. }
  238. }
  239. }
  240. pub(crate) mod alloc {
  241. /// Track allocations, detecting leaks
  242. #[derive(Debug, Default)]
  243. pub struct Track<T> {
  244. value: T,
  245. }
  246. impl<T> Track<T> {
  247. /// Track a value for leaks
  248. #[inline(always)]
  249. pub fn new(value: T) -> Track<T> {
  250. Track { value }
  251. }
  252. /// Get a reference to the value
  253. #[inline(always)]
  254. pub fn get_ref(&self) -> &T {
  255. &self.value
  256. }
  257. /// Get a mutable reference to the value
  258. #[inline(always)]
  259. pub fn get_mut(&mut self) -> &mut T {
  260. &mut self.value
  261. }
  262. /// Stop tracking the value for leaks
  263. #[inline(always)]
  264. pub fn into_inner(self) -> T {
  265. self.value
  266. }
  267. }
  268. }
  269. #[cfg(feature = "std")]
  270. pub(crate) fn traceln(args: std::fmt::Arguments) {
  271. eprintln!("{}", args);
  272. }
  273. #[cfg(not(feature = "std"))]
  274. pub(crate) fn traceln(_: core::fmt::Arguments) {}
  275. }