binary.rs 66 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937
  1. //! Chapter 3. Binary Encoding.
  2. use core::marker::PhantomData;
  3. /// SBI functions return type.
  4. ///
  5. /// > SBI functions must return a pair of values in a0 and a1,
  6. /// > with a0 returning an error code.
  7. /// > This is analogous to returning the C structure `SbiRet`.
  8. ///
  9. /// Note: if this structure is used in function return on conventional
  10. /// Rust code, it would not require pinning memory representation as
  11. /// extern C. The `repr(C)` is set in case that some users want to use
  12. /// this structure in FFI code.
  13. #[derive(Clone, Copy, PartialEq, Eq)]
  14. #[repr(C)]
  15. pub struct SbiRet<T = usize> {
  16. /// Error number.
  17. pub error: T,
  18. /// Result value.
  19. pub value: T,
  20. }
  21. /// SBI success state return value.
  22. #[doc(alias = "SBI_SUCCESS")]
  23. pub const RET_SUCCESS: usize = <usize as SbiRegister>::RET_SUCCESS;
  24. /// Error for SBI call failed for unknown reasons.
  25. #[doc(alias = "SBI_ERR_FAILED")]
  26. pub const RET_ERR_FAILED: usize = <usize as SbiRegister>::RET_ERR_FAILED;
  27. /// Error for target operation not supported.
  28. #[doc(alias = "SBI_ERR_NOT_SUPPORTED")]
  29. pub const RET_ERR_NOT_SUPPORTED: usize = <usize as SbiRegister>::RET_ERR_NOT_SUPPORTED;
  30. /// Error for invalid parameter.
  31. #[doc(alias = "SBI_ERR_INVALID_PARAM")]
  32. pub const RET_ERR_INVALID_PARAM: usize = <usize as SbiRegister>::RET_ERR_INVALID_PARAM;
  33. /// Error for denied.
  34. #[doc(alias = "SBI_ERR_DENIED")]
  35. pub const RET_ERR_DENIED: usize = <usize as SbiRegister>::RET_ERR_DENIED;
  36. /// Error for invalid address.
  37. #[doc(alias = "SBI_ERR_INVALID_ADDRESS")]
  38. pub const RET_ERR_INVALID_ADDRESS: usize = <usize as SbiRegister>::RET_ERR_INVALID_ADDRESS;
  39. /// Error for resource already available.
  40. #[doc(alias = "SBI_ERR_ALREADY_AVAILABLE")]
  41. pub const RET_ERR_ALREADY_AVAILABLE: usize = <usize as SbiRegister>::RET_ERR_ALREADY_AVAILABLE;
  42. /// Error for resource already started.
  43. #[doc(alias = "SBI_ERR_ALREADY_STARTED")]
  44. pub const RET_ERR_ALREADY_STARTED: usize = <usize as SbiRegister>::RET_ERR_ALREADY_STARTED;
  45. /// Error for resource already stopped.
  46. #[doc(alias = "SBI_ERR_ALREADY_STOPPED")]
  47. pub const RET_ERR_ALREADY_STOPPED: usize = <usize as SbiRegister>::RET_ERR_ALREADY_STOPPED;
  48. /// Error for shared memory not available.
  49. #[doc(alias = "SBI_ERR_NO_SHMEM")]
  50. pub const RET_ERR_NO_SHMEM: usize = <usize as SbiRegister>::RET_ERR_NO_SHMEM;
  51. /// Error for invalid state.
  52. #[doc(alias = "SBI_ERR_INVALID_STATE")]
  53. pub const RET_ERR_INVALID_STATE: usize = <usize as SbiRegister>::RET_ERR_INVALID_STATE;
  54. /// Error for bad or invalid range.
  55. #[doc(alias = "SBI_ERR_BAD_RANGE")]
  56. pub const RET_ERR_BAD_RANGE: usize = <usize as SbiRegister>::RET_ERR_BAD_RANGE;
  57. /// Error for failed due to timeout.
  58. #[doc(alias = "SBI_ERR_TIMEOUT")]
  59. pub const RET_ERR_TIMEOUT: usize = <usize as SbiRegister>::RET_ERR_TIMEOUT;
  60. /// Error for input or output error.
  61. #[doc(alias = "SBI_ERR_IO")]
  62. pub const RET_ERR_IO: usize = <usize as SbiRegister>::RET_ERR_IO;
  63. /// Data type of register that can be passed to the RISC-V SBI ABI.
  64. ///
  65. /// This trait defines the requirements for types that are used as the underlying
  66. /// representation for both the `value` and `error` fields in the `SbiRet` structure.
  67. /// In most cases, this trait is implemented for primitive integer types (e.g., `usize`),
  68. /// but it can also be implemented for other types that satisfy the constraints.
  69. ///
  70. /// # Examples
  71. ///
  72. /// Implemented automatically for all types that satisfy `Copy`, `Eq`, and `Debug`.
  73. pub trait SbiRegister: Copy + Eq + Ord + core::fmt::Debug {
  74. /// SBI success state return value.
  75. const RET_SUCCESS: Self;
  76. /// Error for SBI call failed for unknown reasons.
  77. const RET_ERR_FAILED: Self;
  78. /// Error for target operation not supported.
  79. const RET_ERR_NOT_SUPPORTED: Self;
  80. /// Error for invalid parameter.
  81. const RET_ERR_INVALID_PARAM: Self;
  82. /// Error for denied.
  83. const RET_ERR_DENIED: Self;
  84. /// Error for invalid address.
  85. const RET_ERR_INVALID_ADDRESS: Self;
  86. /// Error for resource already available.
  87. const RET_ERR_ALREADY_AVAILABLE: Self;
  88. /// Error for resource already started.
  89. const RET_ERR_ALREADY_STARTED: Self;
  90. /// Error for resource already stopped.
  91. const RET_ERR_ALREADY_STOPPED: Self;
  92. /// Error for shared memory not available.
  93. const RET_ERR_NO_SHMEM: Self;
  94. /// Error for invalid state.
  95. const RET_ERR_INVALID_STATE: Self;
  96. /// Error for bad or invalid range.
  97. const RET_ERR_BAD_RANGE: Self;
  98. /// Error for failed due to timeout.
  99. const RET_ERR_TIMEOUT: Self;
  100. /// Error for input or output error.
  101. const RET_ERR_IO: Self;
  102. /// Zero value for this type; this is used on `value` fields once `SbiRet` returns an error.
  103. const ZERO: Self;
  104. /// Full-ones value for this type; this is used on SBI mask structures like `CounterMask`
  105. /// and `HartMask`.
  106. const FULL_MASK: Self;
  107. /// Converts an `SbiRet` of this type to a `Result` of self and `Error`.
  108. fn into_result(ret: SbiRet<Self>) -> Result<Self, Error<Self>>;
  109. }
  110. macro_rules! impl_sbi_register {
  111. ($ty:ty, $signed:ty) => {
  112. impl SbiRegister for $ty {
  113. const RET_SUCCESS: Self = 0;
  114. const RET_ERR_FAILED: Self = -1 as $signed as $ty;
  115. const RET_ERR_NOT_SUPPORTED: Self = -2 as $signed as $ty;
  116. const RET_ERR_INVALID_PARAM: Self = -3 as $signed as $ty;
  117. const RET_ERR_DENIED: Self = -4 as $signed as $ty;
  118. const RET_ERR_INVALID_ADDRESS: Self = -5 as $signed as $ty;
  119. const RET_ERR_ALREADY_AVAILABLE: Self = -6 as $signed as $ty;
  120. const RET_ERR_ALREADY_STARTED: Self = -7 as $signed as $ty;
  121. const RET_ERR_ALREADY_STOPPED: Self = -8 as $signed as $ty;
  122. const RET_ERR_NO_SHMEM: Self = -9 as $signed as $ty;
  123. const RET_ERR_INVALID_STATE: Self = -10 as $signed as $ty;
  124. const RET_ERR_BAD_RANGE: Self = -11 as $signed as $ty;
  125. const RET_ERR_TIMEOUT: Self = -12 as $signed as $ty;
  126. const RET_ERR_IO: Self = -13 as $signed as $ty;
  127. const ZERO: Self = 0;
  128. const FULL_MASK: Self = !0;
  129. fn into_result(ret: SbiRet<Self>) -> Result<Self, Error<Self>> {
  130. match ret.error {
  131. Self::RET_SUCCESS => Ok(ret.value),
  132. Self::RET_ERR_FAILED => Err(Error::Failed),
  133. Self::RET_ERR_NOT_SUPPORTED => Err(Error::NotSupported),
  134. Self::RET_ERR_INVALID_PARAM => Err(Error::InvalidParam),
  135. Self::RET_ERR_DENIED => Err(Error::Denied),
  136. Self::RET_ERR_INVALID_ADDRESS => Err(Error::InvalidAddress),
  137. Self::RET_ERR_ALREADY_AVAILABLE => Err(Error::AlreadyAvailable),
  138. Self::RET_ERR_ALREADY_STARTED => Err(Error::AlreadyStarted),
  139. Self::RET_ERR_ALREADY_STOPPED => Err(Error::AlreadyStopped),
  140. Self::RET_ERR_NO_SHMEM => Err(Error::NoShmem),
  141. Self::RET_ERR_INVALID_STATE => Err(Error::InvalidState),
  142. Self::RET_ERR_BAD_RANGE => Err(Error::BadRange),
  143. Self::RET_ERR_TIMEOUT => Err(Error::Timeout),
  144. Self::RET_ERR_IO => Err(Error::Io),
  145. unknown => Err(Error::Custom(unknown as _)),
  146. }
  147. }
  148. }
  149. };
  150. }
  151. impl_sbi_register!(usize, isize);
  152. impl_sbi_register!(isize, isize);
  153. impl_sbi_register!(u32, i32);
  154. impl_sbi_register!(i32, i32);
  155. impl_sbi_register!(u64, i64);
  156. impl_sbi_register!(i64, i64);
  157. impl_sbi_register!(u128, i128);
  158. impl_sbi_register!(i128, i128);
  159. impl<T: SbiRegister + core::fmt::LowerHex> core::fmt::Debug for SbiRet<T> {
  160. fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
  161. match T::into_result(*self) {
  162. Ok(value) => write!(f, "{:?}", value),
  163. Err(err) => match err {
  164. Error::Failed => write!(f, "<SBI call failed>"),
  165. Error::NotSupported => write!(f, "<SBI feature not supported>"),
  166. Error::InvalidParam => write!(f, "<SBI invalid parameter>"),
  167. Error::Denied => write!(f, "<SBI denied>"),
  168. Error::InvalidAddress => write!(f, "<SBI invalid address>"),
  169. Error::AlreadyAvailable => write!(f, "<SBI already available>"),
  170. Error::AlreadyStarted => write!(f, "<SBI already started>"),
  171. Error::AlreadyStopped => write!(f, "<SBI already stopped>"),
  172. Error::NoShmem => write!(f, "<SBI shared memory not available>"),
  173. Error::InvalidState => write!(f, "<SBI invalid state>"),
  174. Error::BadRange => write!(f, "<SBI bad range>"),
  175. Error::Timeout => write!(f, "<SBI timeout>"),
  176. Error::Io => write!(f, "<SBI input/output error>"),
  177. Error::Custom(unknown) => write!(f, "[SBI Unknown error: {:#x}]", unknown),
  178. },
  179. }
  180. }
  181. }
  182. /// RISC-V SBI error in enumeration.
  183. #[derive(Debug, Clone, Copy, PartialEq, Eq)]
  184. pub enum Error<T = usize> {
  185. /// Error for SBI call failed for unknown reasons.
  186. Failed,
  187. /// Error for target operation not supported.
  188. NotSupported,
  189. /// Error for invalid parameter.
  190. InvalidParam,
  191. /// Error for denied.
  192. Denied,
  193. /// Error for invalid address.
  194. InvalidAddress,
  195. /// Error for resource already available.
  196. AlreadyAvailable,
  197. /// Error for resource already started.
  198. AlreadyStarted,
  199. /// Error for resource already stopped.
  200. AlreadyStopped,
  201. /// Error for shared memory not available.
  202. NoShmem,
  203. /// Error for invalid state.
  204. InvalidState,
  205. /// Error for bad or invalid range.
  206. BadRange,
  207. /// Error for failed due to timeout.
  208. Timeout,
  209. /// Error for input or output error.
  210. Io,
  211. /// Custom error code.
  212. Custom(T),
  213. }
  214. impl<T: SbiRegister> SbiRet<T> {
  215. /// Returns success SBI state with given `value`.
  216. #[inline]
  217. pub const fn success(value: T) -> Self {
  218. Self {
  219. error: T::RET_SUCCESS,
  220. value,
  221. }
  222. }
  223. /// The SBI call request failed for unknown reasons.
  224. #[inline]
  225. pub const fn failed() -> Self {
  226. Self {
  227. error: T::RET_ERR_FAILED,
  228. value: T::ZERO,
  229. }
  230. }
  231. /// SBI call failed due to not supported by target ISA,
  232. /// operation type not supported,
  233. /// or target operation type not implemented on purpose.
  234. #[inline]
  235. pub const fn not_supported() -> Self {
  236. Self {
  237. error: T::RET_ERR_NOT_SUPPORTED,
  238. value: T::ZERO,
  239. }
  240. }
  241. /// SBI call failed due to invalid hart mask parameter,
  242. /// invalid target hart id,
  243. /// invalid operation type,
  244. /// or invalid resource index.
  245. #[inline]
  246. pub const fn invalid_param() -> Self {
  247. Self {
  248. error: T::RET_ERR_INVALID_PARAM,
  249. value: T::ZERO,
  250. }
  251. }
  252. /// SBI call denied for unsatisfied entry criteria, or insufficient access
  253. /// permission to debug console or CPPC register.
  254. #[inline]
  255. pub const fn denied() -> Self {
  256. Self {
  257. error: T::RET_ERR_DENIED,
  258. value: T::ZERO,
  259. }
  260. }
  261. /// SBI call failed for invalid mask start address,
  262. /// not a valid physical address parameter,
  263. /// or the target address is prohibited by PMP to run in supervisor mode.
  264. #[inline]
  265. pub const fn invalid_address() -> Self {
  266. Self {
  267. error: T::RET_ERR_INVALID_ADDRESS,
  268. value: T::ZERO,
  269. }
  270. }
  271. /// SBI call failed for the target resource is already available,
  272. /// e.g., the target hart is already started when caller still requests it to start.
  273. #[inline]
  274. pub const fn already_available() -> Self {
  275. Self {
  276. error: T::RET_ERR_ALREADY_AVAILABLE,
  277. value: T::ZERO,
  278. }
  279. }
  280. /// SBI call failed for the target resource is already started,
  281. /// e.g., target performance counter is started.
  282. #[inline]
  283. pub const fn already_started() -> Self {
  284. Self {
  285. error: T::RET_ERR_ALREADY_STARTED,
  286. value: T::ZERO,
  287. }
  288. }
  289. /// SBI call failed for the target resource is already stopped,
  290. /// e.g., target performance counter is stopped.
  291. #[inline]
  292. pub const fn already_stopped() -> Self {
  293. Self {
  294. error: T::RET_ERR_ALREADY_STOPPED,
  295. value: T::ZERO,
  296. }
  297. }
  298. /// SBI call failed for shared memory is not available,
  299. /// e.g. nested acceleration shared memory is not available.
  300. #[inline]
  301. pub const fn no_shmem() -> Self {
  302. Self {
  303. error: T::RET_ERR_NO_SHMEM,
  304. value: T::ZERO,
  305. }
  306. }
  307. /// SBI call failed for invalid state,
  308. /// e.g. register a software event but the event is not in unused state.
  309. #[inline]
  310. pub const fn invalid_state() -> Self {
  311. Self {
  312. error: T::RET_ERR_INVALID_STATE,
  313. value: T::ZERO,
  314. }
  315. }
  316. /// SBI call failed for bad or invalid range,
  317. /// e.g. the software event is not exist in the specified range.
  318. #[inline]
  319. pub const fn bad_range() -> Self {
  320. Self {
  321. error: T::RET_ERR_BAD_RANGE,
  322. value: T::ZERO,
  323. }
  324. }
  325. /// SBI call failed for timeout,
  326. /// e.g. message send timeout.
  327. #[inline]
  328. pub const fn timeout() -> Self {
  329. Self {
  330. error: T::RET_ERR_TIMEOUT,
  331. value: T::ZERO,
  332. }
  333. }
  334. /// SBI call failed for input or output error.
  335. #[inline]
  336. pub const fn io() -> Self {
  337. Self {
  338. error: T::RET_ERR_IO,
  339. value: T::ZERO,
  340. }
  341. }
  342. }
  343. impl<T: SbiRegister> From<Error<T>> for SbiRet<T> {
  344. #[inline]
  345. fn from(value: Error<T>) -> Self {
  346. match value {
  347. Error::Failed => SbiRet::failed(),
  348. Error::NotSupported => SbiRet::not_supported(),
  349. Error::InvalidParam => SbiRet::invalid_param(),
  350. Error::Denied => SbiRet::denied(),
  351. Error::InvalidAddress => SbiRet::invalid_address(),
  352. Error::AlreadyAvailable => SbiRet::already_available(),
  353. Error::AlreadyStarted => SbiRet::already_started(),
  354. Error::AlreadyStopped => SbiRet::already_stopped(),
  355. Error::NoShmem => SbiRet::no_shmem(),
  356. Error::InvalidState => SbiRet::invalid_state(),
  357. Error::BadRange => SbiRet::bad_range(),
  358. Error::Timeout => SbiRet::timeout(),
  359. Error::Io => SbiRet::io(),
  360. Error::Custom(error) => SbiRet {
  361. error,
  362. value: T::ZERO,
  363. },
  364. }
  365. }
  366. }
  367. impl SbiRet {
  368. /// Converts to a [`Result`] of value and error.
  369. #[inline]
  370. pub const fn into_result(self) -> Result<usize, Error> {
  371. match self.error {
  372. RET_SUCCESS => Ok(self.value),
  373. RET_ERR_FAILED => Err(Error::Failed),
  374. RET_ERR_NOT_SUPPORTED => Err(Error::NotSupported),
  375. RET_ERR_INVALID_PARAM => Err(Error::InvalidParam),
  376. RET_ERR_DENIED => Err(Error::Denied),
  377. RET_ERR_INVALID_ADDRESS => Err(Error::InvalidAddress),
  378. RET_ERR_ALREADY_AVAILABLE => Err(Error::AlreadyAvailable),
  379. RET_ERR_ALREADY_STARTED => Err(Error::AlreadyStarted),
  380. RET_ERR_ALREADY_STOPPED => Err(Error::AlreadyStopped),
  381. RET_ERR_NO_SHMEM => Err(Error::NoShmem),
  382. RET_ERR_INVALID_STATE => Err(Error::InvalidState),
  383. RET_ERR_BAD_RANGE => Err(Error::BadRange),
  384. RET_ERR_TIMEOUT => Err(Error::Timeout),
  385. RET_ERR_IO => Err(Error::Io),
  386. unknown => Err(Error::Custom(unknown as _)),
  387. }
  388. }
  389. /// Returns `true` if current SBI return succeeded.
  390. ///
  391. /// # Examples
  392. ///
  393. /// Basic usage:
  394. ///
  395. /// ```
  396. /// # use sbi_spec::binary::SbiRet;
  397. /// let x = SbiRet::success(0);
  398. /// assert_eq!(x.is_ok(), true);
  399. ///
  400. /// let x = SbiRet::failed();
  401. /// assert_eq!(x.is_ok(), false);
  402. /// ```
  403. #[must_use = "if you intended to assert that this is ok, consider `.unwrap()` instead"]
  404. #[inline]
  405. pub const fn is_ok(&self) -> bool {
  406. matches!(self.error, RET_SUCCESS)
  407. }
  408. /// Returns `true` if the SBI call succeeded and the value inside of it matches a predicate.
  409. ///
  410. /// # Examples
  411. ///
  412. /// Basic usage:
  413. ///
  414. /// ```
  415. /// # use sbi_spec::binary::SbiRet;
  416. /// let x = SbiRet::success(2);
  417. /// assert_eq!(x.is_ok_and(|x| x > 1), true);
  418. ///
  419. /// let x = SbiRet::success(0);
  420. /// assert_eq!(x.is_ok_and(|x| x > 1), false);
  421. ///
  422. /// let x = SbiRet::no_shmem();
  423. /// assert_eq!(x.is_ok_and(|x| x > 1), false);
  424. /// ```
  425. #[must_use]
  426. #[inline]
  427. pub fn is_ok_and(self, f: impl FnOnce(usize) -> bool) -> bool {
  428. self.into_result().is_ok_and(f)
  429. }
  430. /// Returns `true` if current SBI return is an error.
  431. ///
  432. /// # Examples
  433. ///
  434. /// Basic usage:
  435. ///
  436. /// ```
  437. /// # use sbi_spec::binary::SbiRet;
  438. /// let x = SbiRet::success(0);
  439. /// assert_eq!(x.is_err(), false);
  440. ///
  441. /// let x = SbiRet::not_supported();
  442. /// assert_eq!(x.is_err(), true);
  443. /// ```
  444. #[must_use = "if you intended to assert that this is err, consider `.unwrap_err()` instead"]
  445. #[inline]
  446. pub const fn is_err(&self) -> bool {
  447. !self.is_ok()
  448. }
  449. /// Returns `true` if the result is an error and the value inside of it matches a predicate.
  450. ///
  451. /// # Examples
  452. ///
  453. /// ```
  454. /// # use sbi_spec::binary::{SbiRet, Error};
  455. /// let x = SbiRet::denied();
  456. /// assert_eq!(x.is_err_and(|x| x == Error::Denied), true);
  457. ///
  458. /// let x = SbiRet::invalid_address();
  459. /// assert_eq!(x.is_err_and(|x| x == Error::Denied), false);
  460. ///
  461. /// let x = SbiRet::success(0);
  462. /// assert_eq!(x.is_err_and(|x| x == Error::Denied), false);
  463. /// ```
  464. #[must_use]
  465. #[inline]
  466. pub fn is_err_and(self, f: impl FnOnce(Error) -> bool) -> bool {
  467. self.into_result().is_err_and(f)
  468. }
  469. /// Converts from `SbiRet` to [`Option<usize>`].
  470. ///
  471. /// Converts `self` into an [`Option<usize>`], consuming `self`,
  472. /// and discarding the error, if any.
  473. ///
  474. /// # Examples
  475. ///
  476. /// Basic usage:
  477. ///
  478. /// ```
  479. /// # use sbi_spec::binary::SbiRet;
  480. /// let x = SbiRet::success(2);
  481. /// assert_eq!(x.ok(), Some(2));
  482. ///
  483. /// let x = SbiRet::invalid_param();
  484. /// assert_eq!(x.ok(), None);
  485. /// ```
  486. // fixme: should be pub const fn once this function in Result is stabilized in constant
  487. #[inline]
  488. pub fn ok(self) -> Option<usize> {
  489. self.into_result().ok()
  490. }
  491. /// Converts from `SbiRet` to [`Option<Error>`].
  492. ///
  493. /// Converts `self` into an [`Option<Error>`], consuming `self`,
  494. /// and discarding the success value, if any.
  495. ///
  496. /// # Examples
  497. ///
  498. /// Basic usage:
  499. ///
  500. /// ```
  501. /// # use sbi_spec::binary::{SbiRet, Error};
  502. /// let x = SbiRet::success(2);
  503. /// assert_eq!(x.err(), None);
  504. ///
  505. /// let x = SbiRet::denied();
  506. /// assert_eq!(x.err(), Some(Error::Denied));
  507. /// ```
  508. // fixme: should be pub const fn once this function in Result is stabilized in constant
  509. #[inline]
  510. pub fn err(self) -> Option<Error> {
  511. self.into_result().err()
  512. }
  513. /// Maps a `SbiRet` to `Result<U, Error>` by applying a function to a
  514. /// contained success value, leaving an error value untouched.
  515. ///
  516. /// This function can be used to compose the results of two functions.
  517. ///
  518. /// # Examples
  519. ///
  520. /// Gets detail of a PMU counter and judge if it is a firmware counter.
  521. ///
  522. /// ```
  523. /// # use sbi_spec::binary::SbiRet;
  524. /// # use core::mem::size_of;
  525. /// # mod sbi_rt {
  526. /// # use sbi_spec::binary::SbiRet;
  527. /// # const TYPE_MASK: usize = 1 << (core::mem::size_of::<usize>() - 1);
  528. /// # pub fn pmu_counter_get_info(_: usize) -> SbiRet { SbiRet::success(TYPE_MASK) }
  529. /// # }
  530. /// // We assume that counter index 42 is a firmware counter.
  531. /// let counter_idx = 42;
  532. /// // Masks PMU counter type by setting highest bit in `usize`.
  533. /// const TYPE_MASK: usize = 1 << (size_of::<usize>() - 1);
  534. /// // Highest bit of returned `counter_info` represents whether it's
  535. /// // a firmware counter or a hardware counter.
  536. /// let is_firmware_counter = sbi_rt::pmu_counter_get_info(counter_idx)
  537. /// .map(|counter_info| counter_info & TYPE_MASK != 0);
  538. /// // If that bit is set, it is a firmware counter.
  539. /// assert_eq!(is_firmware_counter, Ok(true));
  540. /// ```
  541. #[inline]
  542. pub fn map<U, F: FnOnce(usize) -> U>(self, op: F) -> Result<U, Error> {
  543. self.into_result().map(op)
  544. }
  545. /// Returns the provided default (if error),
  546. /// or applies a function to the contained value (if success).
  547. ///
  548. /// Arguments passed to `map_or` are eagerly evaluated;
  549. /// if you are passing the result of a function call,
  550. /// it is recommended to use [`map_or_else`],
  551. /// which is lazily evaluated.
  552. ///
  553. /// [`map_or_else`]: SbiRet::map_or_else
  554. ///
  555. /// # Examples
  556. ///
  557. /// ```
  558. /// # use sbi_spec::binary::SbiRet;
  559. /// let x = SbiRet::success(3);
  560. /// assert_eq!(x.map_or(42, |v| v & 0b1), 1);
  561. ///
  562. /// let x = SbiRet::invalid_address();
  563. /// assert_eq!(x.map_or(42, |v| v & 0b1), 42);
  564. /// ```
  565. #[inline]
  566. pub fn map_or<U, F: FnOnce(usize) -> U>(self, default: U, f: F) -> U {
  567. self.into_result().map_or(default, f)
  568. }
  569. /// Maps a `SbiRet` to `usize` value by applying fallback function `default` to
  570. /// a contained error, or function `f` to a contained success value.
  571. ///
  572. /// This function can be used to unpack a successful result
  573. /// while handling an error.
  574. ///
  575. /// # Examples
  576. ///
  577. /// Basic usage:
  578. ///
  579. /// ```
  580. /// # use sbi_spec::binary::SbiRet;
  581. /// let k = 21;
  582. ///
  583. /// let x = SbiRet::success(3);
  584. /// assert_eq!(x.map_or_else(|e| k * 2, |v| v & 0b1), 1);
  585. ///
  586. /// let x = SbiRet::already_available();
  587. /// assert_eq!(x.map_or_else(|e| k * 2, |v| v & 0b1), 42);
  588. /// ```
  589. #[inline]
  590. pub fn map_or_else<U, D: FnOnce(Error) -> U, F: FnOnce(usize) -> U>(
  591. self,
  592. default: D,
  593. f: F,
  594. ) -> U {
  595. self.into_result().map_or_else(default, f)
  596. }
  597. /// Maps a `SbiRet` to `Result<T, F>` by applying a function to a
  598. /// contained error as [`Error`] struct, leaving success value untouched.
  599. ///
  600. /// This function can be used to pass through a successful result while handling
  601. /// an error.
  602. ///
  603. /// # Examples
  604. ///
  605. /// Basic usage:
  606. ///
  607. /// ```
  608. /// # use sbi_spec::binary::{SbiRet, Error};
  609. /// fn stringify(x: Error) -> String {
  610. /// if x == Error::AlreadyStarted {
  611. /// "error: already started!".to_string()
  612. /// } else {
  613. /// "error: other error!".to_string()
  614. /// }
  615. /// }
  616. ///
  617. /// let x = SbiRet::success(2);
  618. /// assert_eq!(x.map_err(stringify), Ok(2));
  619. ///
  620. /// let x = SbiRet::already_started();
  621. /// assert_eq!(x.map_err(stringify), Err("error: already started!".to_string()));
  622. /// ```
  623. #[inline]
  624. pub fn map_err<F, O: FnOnce(Error) -> F>(self, op: O) -> Result<usize, F> {
  625. self.into_result().map_err(op)
  626. }
  627. /// Calls a function with a reference to the contained value if current SBI call succeeded.
  628. ///
  629. /// Returns the original result.
  630. ///
  631. /// # Examples
  632. ///
  633. /// ```
  634. /// # use sbi_spec::binary::SbiRet;
  635. /// // Assume that SBI debug console have read 512 bytes into a buffer.
  636. /// let ret = SbiRet::success(512);
  637. /// // Inspect the SBI DBCN call result.
  638. /// let idx = ret
  639. /// .inspect(|x| println!("bytes written: {x}"))
  640. /// .map(|x| x - 1)
  641. /// .expect("SBI DBCN call failed");
  642. /// assert_eq!(idx, 511);
  643. /// ```
  644. #[inline]
  645. pub fn inspect<F: FnOnce(&usize)>(self, f: F) -> Self {
  646. if let Ok(ref t) = self.into_result() {
  647. f(t);
  648. }
  649. self
  650. }
  651. /// Calls a function with a reference to the contained value if current SBI result is an error.
  652. ///
  653. /// Returns the original result.
  654. ///
  655. /// # Examples
  656. ///
  657. /// ```
  658. /// # use sbi_spec::binary::SbiRet;
  659. /// // Assume that SBI debug console write operation failed for invalid parameter.
  660. /// let ret = SbiRet::invalid_param();
  661. /// // Print the error if SBI DBCN call failed.
  662. /// let ret = ret.inspect_err(|e| eprintln!("failed to read from SBI console: {e:?}"));
  663. /// ```
  664. #[inline]
  665. pub fn inspect_err<F: FnOnce(&Error)>(self, f: F) -> Self {
  666. if let Err(ref e) = self.into_result() {
  667. f(e);
  668. }
  669. self
  670. }
  671. // TODO: pub fn iter(&self) -> Iter
  672. // TODO: pub fn iter_mut(&mut self) -> IterMut
  673. /// Returns the contained success value, consuming the `self` value.
  674. ///
  675. /// # Panics
  676. ///
  677. /// Panics if self is an SBI error with a panic message including the
  678. /// passed message, and the content of the SBI state.
  679. ///
  680. /// # Examples
  681. ///
  682. /// Basic usage:
  683. ///
  684. /// ```should_panic
  685. /// # use sbi_spec::binary::SbiRet;
  686. /// let x = SbiRet::already_stopped();
  687. /// x.expect("Testing expect"); // panics with `Testing expect`
  688. /// ```
  689. #[inline]
  690. pub fn expect(self, msg: &str) -> usize {
  691. self.into_result().expect(msg)
  692. }
  693. /// Returns the contained success value, consuming the `self` value.
  694. ///
  695. /// # Panics
  696. ///
  697. /// Panics if self is an SBI error, with a panic message provided by the
  698. /// SBI error converted into [`Error`] struct.
  699. ///
  700. /// # Examples
  701. ///
  702. /// Basic usage:
  703. ///
  704. /// ```
  705. /// # use sbi_spec::binary::SbiRet;
  706. /// let x = SbiRet::success(2);
  707. /// assert_eq!(x.unwrap(), 2);
  708. /// ```
  709. ///
  710. /// ```should_panic
  711. /// # use sbi_spec::binary::SbiRet;
  712. /// let x = SbiRet::failed();
  713. /// x.unwrap(); // panics
  714. /// ```
  715. #[inline]
  716. pub fn unwrap(self) -> usize {
  717. self.into_result().unwrap()
  718. }
  719. // Note: No unwrap_or_default as we cannot determine a meaningful default value for a successful SbiRet.
  720. /// Returns the contained error as [`Error`] struct, consuming the `self` value.
  721. ///
  722. /// # Panics
  723. ///
  724. /// Panics if the self is SBI success value, with a panic message
  725. /// including the passed message, and the content of the success value.
  726. ///
  727. /// # Examples
  728. ///
  729. /// Basic usage:
  730. ///
  731. /// ```should_panic
  732. /// # use sbi_spec::binary::SbiRet;
  733. /// let x = SbiRet::success(10);
  734. /// x.expect_err("Testing expect_err"); // panics with `Testing expect_err`
  735. /// ```
  736. #[inline]
  737. pub fn expect_err(self, msg: &str) -> Error {
  738. self.into_result().expect_err(msg)
  739. }
  740. /// Returns the contained error as [`Error`] struct, consuming the `self` value.
  741. ///
  742. /// # Panics
  743. ///
  744. /// Panics if the self is SBI success value, with a custom panic message provided
  745. /// by the success value.
  746. ///
  747. /// # Examples
  748. ///
  749. /// ```should_panic
  750. /// # use sbi_spec::binary::SbiRet;
  751. /// let x = SbiRet::success(2);
  752. /// x.unwrap_err(); // panics with `2`
  753. /// ```
  754. ///
  755. /// ```
  756. /// # use sbi_spec::binary::{SbiRet, Error};
  757. /// let x = SbiRet::not_supported();
  758. /// assert_eq!(x.unwrap_err(), Error::NotSupported);
  759. /// ```
  760. #[inline]
  761. pub fn unwrap_err(self) -> Error {
  762. self.into_result().unwrap_err()
  763. }
  764. // TODO: pub fn into_ok(self) -> usize and pub fn into_err(self) -> Error
  765. // once `unwrap_infallible` is stabilized
  766. /// Returns `res` if self is success value, otherwise otherwise returns the contained error
  767. /// of `self` as [`Error`] struct.
  768. ///
  769. /// Arguments passed to `and` are eagerly evaluated; if you are passing the
  770. /// result of a function call, it is recommended to use [`and_then`], which is
  771. /// lazily evaluated.
  772. ///
  773. /// [`and_then`]: SbiRet::and_then
  774. ///
  775. /// # Examples
  776. ///
  777. /// Basic usage:
  778. ///
  779. /// ```
  780. /// # use sbi_spec::binary::{SbiRet, Error};
  781. /// let x = SbiRet::success(2);
  782. /// let y = SbiRet::invalid_param().into_result();
  783. /// assert_eq!(x.and(y), Err(Error::InvalidParam));
  784. ///
  785. /// let x = SbiRet::denied();
  786. /// let y = SbiRet::success(3).into_result();
  787. /// assert_eq!(x.and(y), Err(Error::Denied));
  788. ///
  789. /// let x = SbiRet::invalid_address();
  790. /// let y = SbiRet::already_available().into_result();
  791. /// assert_eq!(x.and(y), Err(Error::InvalidAddress));
  792. ///
  793. /// let x = SbiRet::success(4);
  794. /// let y = SbiRet::success(5).into_result();
  795. /// assert_eq!(x.and(y), Ok(5));
  796. /// ```
  797. // fixme: should be pub const fn once this function in Result is stabilized in constant
  798. // fixme: should parameter be `res: SbiRet`?
  799. #[inline]
  800. pub fn and<U>(self, res: Result<U, Error>) -> Result<U, Error> {
  801. self.into_result().and(res)
  802. }
  803. /// Calls `op` if self is success value, otherwise returns the contained error
  804. /// as [`Error`] struct.
  805. ///
  806. /// This function can be used for control flow based on `SbiRet` values.
  807. ///
  808. /// # Examples
  809. ///
  810. /// ```
  811. /// # use sbi_spec::binary::{SbiRet, Error};
  812. /// fn sq_then_to_string(x: usize) -> Result<String, Error> {
  813. /// x.checked_mul(x).map(|sq| sq.to_string()).ok_or(Error::Failed)
  814. /// }
  815. ///
  816. /// assert_eq!(SbiRet::success(2).and_then(sq_then_to_string), Ok(4.to_string()));
  817. /// assert_eq!(SbiRet::success(1_000_000_000_000).and_then(sq_then_to_string), Err(Error::Failed));
  818. /// assert_eq!(SbiRet::invalid_param().and_then(sq_then_to_string), Err(Error::InvalidParam));
  819. /// ```
  820. #[inline]
  821. pub fn and_then<U, F: FnOnce(usize) -> Result<U, Error>>(self, op: F) -> Result<U, Error> {
  822. self.into_result().and_then(op)
  823. }
  824. /// Returns `res` if self is SBI error, otherwise returns the success value of `self`.
  825. ///
  826. /// Arguments passed to `or` are eagerly evaluated; if you are passing the
  827. /// result of a function call, it is recommended to use [`or_else`], which is
  828. /// lazily evaluated.
  829. ///
  830. /// [`or_else`]: Result::or_else
  831. ///
  832. /// # Examples
  833. ///
  834. /// Basic usage:
  835. ///
  836. /// ```
  837. /// # use sbi_spec::binary::{SbiRet, Error};
  838. /// let x = SbiRet::success(2);
  839. /// let y = SbiRet::invalid_param().into_result();
  840. /// assert_eq!(x.or(y), Ok(2));
  841. ///
  842. /// let x = SbiRet::denied();
  843. /// let y = SbiRet::success(3).into_result();
  844. /// assert_eq!(x.or(y), Ok(3));
  845. ///
  846. /// let x = SbiRet::invalid_address();
  847. /// let y = SbiRet::already_available().into_result();
  848. /// assert_eq!(x.or(y), Err(Error::AlreadyAvailable));
  849. ///
  850. /// let x = SbiRet::success(4);
  851. /// let y = SbiRet::success(100).into_result();
  852. /// assert_eq!(x.or(y), Ok(4));
  853. /// ```
  854. // fixme: should be pub const fn once this function in Result is stabilized in constant
  855. // fixme: should parameter be `res: SbiRet`?
  856. #[inline]
  857. pub fn or<F>(self, res: Result<usize, F>) -> Result<usize, F> {
  858. self.into_result().or(res)
  859. }
  860. /// Calls `op` if self is SBI error, otherwise returns the success value of `self`.
  861. ///
  862. /// This function can be used for control flow based on result values.
  863. ///
  864. ///
  865. /// # Examples
  866. ///
  867. /// Basic usage:
  868. ///
  869. /// ```
  870. /// # use sbi_spec::binary::{SbiRet, Error};
  871. /// fn is_failed(x: Error) -> Result<usize, bool> { Err(x == Error::Failed) }
  872. ///
  873. /// assert_eq!(SbiRet::success(2).or_else(is_failed), Ok(2));
  874. /// assert_eq!(SbiRet::failed().or_else(is_failed), Err(true));
  875. /// ```
  876. #[inline]
  877. pub fn or_else<F, O: FnOnce(Error) -> Result<usize, F>>(self, op: O) -> Result<usize, F> {
  878. self.into_result().or_else(op)
  879. }
  880. /// Returns the contained success value or a provided default.
  881. ///
  882. /// Arguments passed to `unwrap_or` are eagerly evaluated; if you are passing
  883. /// the result of a function call, it is recommended to use [`unwrap_or_else`],
  884. /// which is lazily evaluated.
  885. ///
  886. /// [`unwrap_or_else`]: SbiRet::unwrap_or_else
  887. ///
  888. /// # Examples
  889. ///
  890. /// Basic usage:
  891. ///
  892. /// ```
  893. /// # use sbi_spec::binary::SbiRet;
  894. /// let default = 2;
  895. /// let x = SbiRet::success(9);
  896. /// assert_eq!(x.unwrap_or(default), 9);
  897. ///
  898. /// let x = SbiRet::invalid_param();
  899. /// assert_eq!(x.unwrap_or(default), default);
  900. /// ```
  901. // fixme: should be pub const fn once this function in Result is stabilized in constant
  902. #[inline]
  903. pub fn unwrap_or(self, default: usize) -> usize {
  904. self.into_result().unwrap_or(default)
  905. }
  906. /// Returns the contained success value or computes it from a closure.
  907. ///
  908. /// # Examples
  909. ///
  910. /// Basic usage:
  911. ///
  912. /// ```
  913. /// # use sbi_spec::binary::{SbiRet, Error};
  914. /// fn invalid_use_zero(x: Error) -> usize { if x == Error::InvalidParam { 0 } else { 3 } }
  915. ///
  916. /// assert_eq!(SbiRet::success(2).unwrap_or_else(invalid_use_zero), 2);
  917. /// assert_eq!(SbiRet::invalid_param().unwrap_or_else(invalid_use_zero), 0);
  918. /// ```
  919. #[inline]
  920. pub fn unwrap_or_else<F: FnOnce(Error) -> usize>(self, op: F) -> usize {
  921. self.into_result().unwrap_or_else(op)
  922. }
  923. /// Returns the contained success value, consuming the `self` value,
  924. /// without checking that the `SbiRet` contains an error value.
  925. ///
  926. /// # Safety
  927. ///
  928. /// Calling this method on an `SbiRet` containing an error value results
  929. /// in *undefined behavior*.
  930. ///
  931. /// # Examples
  932. ///
  933. /// ```
  934. /// # use sbi_spec::binary::{SbiRet, Error};
  935. /// let x = SbiRet::success(3);
  936. /// assert_eq!(unsafe { x.unwrap_unchecked() }, 3);
  937. /// ```
  938. ///
  939. /// ```no_run
  940. /// # use sbi_spec::binary::SbiRet;
  941. /// let x = SbiRet::no_shmem();
  942. /// unsafe { x.unwrap_unchecked(); } // Undefined behavior!
  943. /// ```
  944. #[inline]
  945. pub unsafe fn unwrap_unchecked(self) -> usize {
  946. unsafe { self.into_result().unwrap_unchecked() }
  947. }
  948. /// Returns the contained `Error` value, consuming the `self` value,
  949. /// without checking that the `SbiRet` does not contain a success value.
  950. ///
  951. /// # Safety
  952. ///
  953. /// Calling this method on an `SbiRet` containing a success value results
  954. /// in *undefined behavior*.
  955. ///
  956. /// # Examples
  957. ///
  958. /// ```no_run
  959. /// # use sbi_spec::binary::SbiRet;
  960. /// let x = SbiRet::success(4);
  961. /// unsafe { x.unwrap_unchecked(); } // Undefined behavior!
  962. /// ```
  963. ///
  964. /// ```
  965. /// # use sbi_spec::binary::{SbiRet, Error};
  966. /// let x = SbiRet::failed();
  967. /// assert_eq!(unsafe { x.unwrap_err_unchecked() }, Error::Failed);
  968. /// ```
  969. #[inline]
  970. pub unsafe fn unwrap_err_unchecked(self) -> Error {
  971. unsafe { self.into_result().unwrap_err_unchecked() }
  972. }
  973. }
  974. impl IntoIterator for SbiRet {
  975. type Item = usize;
  976. type IntoIter = core::result::IntoIter<usize>;
  977. /// Returns a consuming iterator over the possibly contained value.
  978. ///
  979. /// The iterator yields one value if the result contains a success value, otherwise none.
  980. ///
  981. /// # Examples
  982. ///
  983. /// ```
  984. /// # use sbi_spec::binary::SbiRet;
  985. /// let x = SbiRet::success(5);
  986. /// let v: Vec<usize> = x.into_iter().collect();
  987. /// assert_eq!(v, [5]);
  988. ///
  989. /// let x = SbiRet::not_supported();
  990. /// let v: Vec<usize> = x.into_iter().collect();
  991. /// assert_eq!(v, []);
  992. /// ```
  993. #[inline]
  994. fn into_iter(self) -> Self::IntoIter {
  995. self.into_result().into_iter()
  996. }
  997. }
  998. // TODO: implement Try and FromResidual for SbiRet once those traits are stablized
  999. /*
  1000. impl core::ops::Try for SbiRet {
  1001. type Output = usize;
  1002. type Residual = Result<core::convert::Infallible, Error>;
  1003. #[inline]
  1004. fn from_output(output: Self::Output) -> Self {
  1005. SbiRet::success(output)
  1006. }
  1007. #[inline]
  1008. fn branch(self) -> core::ops::ControlFlow<Self::Residual, Self::Output> {
  1009. self.into_result().branch()
  1010. }
  1011. }
  1012. impl core::ops::FromResidual<Result<core::convert::Infallible, Error>> for SbiRet {
  1013. #[inline]
  1014. #[track_caller]
  1015. fn from_residual(residual: Result<core::convert::Infallible, Error>) -> Self {
  1016. match residual {
  1017. Err(e) => e.into(),
  1018. }
  1019. }
  1020. }
  1021. /// ```
  1022. /// # use sbi_spec::binary::SbiRet;
  1023. /// fn test() -> SbiRet {
  1024. /// let value = SbiRet::failed()?;
  1025. /// SbiRet::success(0)
  1026. /// }
  1027. /// assert_eq!(test(), SbiRet::failed());
  1028. /// ```
  1029. mod test_try_trait_for_sbiret {}
  1030. */
  1031. /// Check if the implementation can contains the provided `bit`.
  1032. #[inline]
  1033. pub(crate) const fn valid_bit(base: usize, bit: usize) -> bool {
  1034. if bit < base {
  1035. // invalid index, under minimum range.
  1036. false
  1037. } else if (bit - base) >= usize::BITS as usize {
  1038. // invalid index, over max range.
  1039. false
  1040. } else {
  1041. true
  1042. }
  1043. }
  1044. /// Check if the implementation contains the provided `bit`.
  1045. ///
  1046. /// ## Parameters
  1047. ///
  1048. /// - `mask`: bitmask defining the range of bits.
  1049. /// - `base`: the starting bit index. (default: `0`)
  1050. /// - `ignore`: if `base` is equal to this value, ignore the `mask` parameter, and consider all `bit`s set.
  1051. /// - `bit`: the bit index to check for membership in the `mask`.
  1052. #[inline]
  1053. pub(crate) const fn has_bit(mask: usize, base: usize, ignore: usize, bit: usize) -> bool {
  1054. if base == ignore {
  1055. // ignore the `mask`, consider all `bit`s as set.
  1056. true
  1057. } else if !valid_bit(base, bit) {
  1058. false
  1059. } else {
  1060. // index is in range, check if it is set in the mask.
  1061. mask & (1 << (bit - base)) != 0
  1062. }
  1063. }
  1064. /// Hart mask structure in SBI function calls.
  1065. #[repr(C)]
  1066. #[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
  1067. pub struct HartMask<T = usize> {
  1068. hart_mask: T,
  1069. hart_mask_base: T,
  1070. }
  1071. impl<T: SbiRegister> HartMask<T> {
  1072. /// Special value to ignore the `mask`, and consider all `bit`s as set.
  1073. pub const IGNORE_MASK: T = T::FULL_MASK;
  1074. /// Construct a [HartMask] from mask value and base hart id.
  1075. #[inline]
  1076. pub const fn from_mask_base(hart_mask: T, hart_mask_base: T) -> Self {
  1077. Self {
  1078. hart_mask,
  1079. hart_mask_base,
  1080. }
  1081. }
  1082. /// Construct a [HartMask] that selects all available harts on the current environment.
  1083. ///
  1084. /// According to the RISC-V SBI Specification, `hart_mask_base` can be set to `-1` (i.e. `usize::MAX`)
  1085. /// to indicate that `hart_mask` shall be ignored and all available harts must be considered.
  1086. /// In case of this function in the `sbi-spec` crate, we fill in `usize::MAX` in `hart_mask_base`
  1087. /// parameter to match the RISC-V SBI standard, while choosing 0 as the ignored `hart_mask` value.
  1088. #[inline]
  1089. pub const fn all() -> Self {
  1090. Self {
  1091. hart_mask: T::ZERO,
  1092. hart_mask_base: T::FULL_MASK,
  1093. }
  1094. }
  1095. /// Gets the special value for ignoring the `mask` parameter.
  1096. #[inline]
  1097. pub const fn ignore_mask(&self) -> T {
  1098. Self::IGNORE_MASK
  1099. }
  1100. /// Returns `mask` and `base` parameters from the [HartMask].
  1101. #[inline]
  1102. pub const fn into_inner(self) -> (T, T) {
  1103. (self.hart_mask, self.hart_mask_base)
  1104. }
  1105. }
  1106. // FIXME: implement for T: SbiRegister once we can implement this using const traits.
  1107. // Ref: https://rust-lang.github.io/rust-project-goals/2024h2/const-traits.html
  1108. impl HartMask<usize> {
  1109. /// Returns whether the [HartMask] contains the provided `hart_id`.
  1110. #[inline]
  1111. pub const fn has_bit(self, hart_id: usize) -> bool {
  1112. has_bit(
  1113. self.hart_mask,
  1114. self.hart_mask_base,
  1115. Self::IGNORE_MASK,
  1116. hart_id,
  1117. )
  1118. }
  1119. /// Insert a hart id into this [HartMask].
  1120. ///
  1121. /// Returns error when `hart_id` is invalid.
  1122. #[inline]
  1123. pub const fn insert(&mut self, hart_id: usize) -> Result<(), MaskError> {
  1124. if self.hart_mask_base == Self::IGNORE_MASK {
  1125. Ok(())
  1126. } else if valid_bit(self.hart_mask_base, hart_id) {
  1127. self.hart_mask |= 1usize << (hart_id - self.hart_mask_base);
  1128. Ok(())
  1129. } else {
  1130. Err(MaskError::InvalidBit)
  1131. }
  1132. }
  1133. /// Remove a hart id from this [HartMask].
  1134. ///
  1135. /// Returns error when `hart_id` is invalid, or it has been ignored.
  1136. #[inline]
  1137. pub const fn remove(&mut self, hart_id: usize) -> Result<(), MaskError> {
  1138. if self.hart_mask_base == Self::IGNORE_MASK {
  1139. Err(MaskError::Ignored)
  1140. } else if valid_bit(self.hart_mask_base, hart_id) {
  1141. self.hart_mask &= !(1usize << (hart_id - self.hart_mask_base));
  1142. Ok(())
  1143. } else {
  1144. Err(MaskError::InvalidBit)
  1145. }
  1146. }
  1147. /// Returns [HartIds] of self.
  1148. #[inline]
  1149. pub const fn iter(&self) -> HartIds {
  1150. HartIds {
  1151. inner: match self.hart_mask_base {
  1152. Self::IGNORE_MASK => UnvisitedMask::Range(0, usize::MAX),
  1153. _ => UnvisitedMask::MaskBase(self.hart_mask, self.hart_mask_base),
  1154. },
  1155. }
  1156. }
  1157. }
  1158. impl IntoIterator for HartMask {
  1159. type Item = usize;
  1160. type IntoIter = HartIds;
  1161. #[inline]
  1162. fn into_iter(self) -> Self::IntoIter {
  1163. self.iter()
  1164. }
  1165. }
  1166. /// Iterator structure for `HartMask`.
  1167. ///
  1168. /// It will iterate hart id from low to high.
  1169. #[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
  1170. pub struct HartIds {
  1171. inner: UnvisitedMask,
  1172. }
  1173. #[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
  1174. enum UnvisitedMask {
  1175. MaskBase(usize, usize),
  1176. Range(usize, usize),
  1177. }
  1178. impl Iterator for HartIds {
  1179. type Item = usize;
  1180. #[inline]
  1181. fn next(&mut self) -> Option<Self::Item> {
  1182. match &mut self.inner {
  1183. UnvisitedMask::MaskBase(0, _base) => None,
  1184. UnvisitedMask::MaskBase(unvisited_mask, base) => {
  1185. let low_bit = unvisited_mask.trailing_zeros();
  1186. let hart_id = usize::try_from(low_bit).unwrap() + *base;
  1187. *unvisited_mask &= !(1usize << low_bit);
  1188. Some(hart_id)
  1189. }
  1190. UnvisitedMask::Range(start, end) => {
  1191. assert!(start <= end);
  1192. if *start < *end {
  1193. let ans = *start;
  1194. *start += 1;
  1195. Some(ans)
  1196. } else {
  1197. None
  1198. }
  1199. }
  1200. }
  1201. }
  1202. #[inline]
  1203. fn size_hint(&self) -> (usize, Option<usize>) {
  1204. match self.inner {
  1205. UnvisitedMask::MaskBase(unvisited_mask, _base) => {
  1206. let exact_popcnt = usize::try_from(unvisited_mask.count_ones()).unwrap();
  1207. (exact_popcnt, Some(exact_popcnt))
  1208. }
  1209. UnvisitedMask::Range(start, end) => {
  1210. assert!(start <= end);
  1211. let exact_num_harts = end - start;
  1212. (exact_num_harts, Some(exact_num_harts))
  1213. }
  1214. }
  1215. }
  1216. #[inline]
  1217. fn count(self) -> usize {
  1218. self.size_hint().0
  1219. }
  1220. #[inline]
  1221. fn last(mut self) -> Option<Self::Item> {
  1222. self.next_back()
  1223. }
  1224. #[inline]
  1225. fn min(mut self) -> Option<Self::Item> {
  1226. self.next()
  1227. }
  1228. #[inline]
  1229. fn max(mut self) -> Option<Self::Item> {
  1230. self.next_back()
  1231. }
  1232. #[inline]
  1233. fn is_sorted(self) -> bool {
  1234. true
  1235. }
  1236. // TODO: implement fn advance_by once it's stablized: https://github.com/rust-lang/rust/issues/77404
  1237. // #[inline]
  1238. // fn advance_by(&mut self, n: usize) -> Result<(), core::num::NonZero<usize>> { ... }
  1239. }
  1240. impl DoubleEndedIterator for HartIds {
  1241. #[inline]
  1242. fn next_back(&mut self) -> Option<Self::Item> {
  1243. match &mut self.inner {
  1244. UnvisitedMask::MaskBase(0, _base) => None,
  1245. UnvisitedMask::MaskBase(unvisited_mask, base) => {
  1246. let high_bit = unvisited_mask.leading_zeros();
  1247. let hart_id = usize::try_from(usize::BITS - high_bit - 1).unwrap() + *base;
  1248. *unvisited_mask &= !(1usize << (usize::BITS - high_bit - 1));
  1249. Some(hart_id)
  1250. }
  1251. UnvisitedMask::Range(start, end) => {
  1252. assert!(start <= end);
  1253. if *start < *end {
  1254. let ans = *end;
  1255. *end -= 1;
  1256. Some(ans)
  1257. } else {
  1258. None
  1259. }
  1260. }
  1261. }
  1262. }
  1263. // TODO: implement advance_back_by once stablized.
  1264. // #[inline]
  1265. // fn advance_back_by(&mut self, n: usize) -> Result<(), core::num::NonZero<usize>> { ... }
  1266. }
  1267. impl ExactSizeIterator for HartIds {}
  1268. impl core::iter::FusedIterator for HartIds {}
  1269. /// Error of mask modification.
  1270. #[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
  1271. pub enum MaskError {
  1272. /// This mask has been ignored.
  1273. Ignored,
  1274. /// Request bit is invalid.
  1275. InvalidBit,
  1276. }
  1277. /// Counter index mask structure in SBI function calls for the `PMU` extension §11.
  1278. #[repr(C)]
  1279. #[derive(Debug, Copy, Clone, Eq, PartialEq)]
  1280. pub struct CounterMask<T = usize> {
  1281. counter_idx_mask: T,
  1282. counter_idx_base: T,
  1283. }
  1284. impl<T: SbiRegister> CounterMask<T> {
  1285. /// Special value to ignore the `mask`, and consider all `bit`s as set.
  1286. pub const IGNORE_MASK: T = T::FULL_MASK;
  1287. /// Construct a [CounterMask] from mask value and base counter index.
  1288. #[inline]
  1289. pub const fn from_mask_base(counter_idx_mask: T, counter_idx_base: T) -> Self {
  1290. Self {
  1291. counter_idx_mask,
  1292. counter_idx_base,
  1293. }
  1294. }
  1295. /// Gets the special value for ignoring the `mask` parameter.
  1296. #[inline]
  1297. pub const fn ignore_mask(&self) -> T {
  1298. Self::IGNORE_MASK
  1299. }
  1300. /// Returns `mask` and `base` parameters from the [CounterMask].
  1301. #[inline]
  1302. pub const fn into_inner(self) -> (T, T) {
  1303. (self.counter_idx_mask, self.counter_idx_base)
  1304. }
  1305. }
  1306. // FIXME: implement for T: SbiRegister once we can implement this using const traits.
  1307. // Ref: https://rust-lang.github.io/rust-project-goals/2024h2/const-traits.html
  1308. impl CounterMask<usize> {
  1309. /// Returns whether the [CounterMask] contains the provided `counter`.
  1310. #[inline]
  1311. pub const fn has_bit(self, counter: usize) -> bool {
  1312. has_bit(
  1313. self.counter_idx_mask,
  1314. self.counter_idx_base,
  1315. Self::IGNORE_MASK,
  1316. counter,
  1317. )
  1318. }
  1319. }
  1320. /// Debug trigger mask structure for the `DBTR` extension §19.
  1321. #[repr(C)]
  1322. #[derive(Debug, Copy, Clone, Eq, PartialEq)]
  1323. pub struct TriggerMask<T = usize> {
  1324. trig_idx_base: T,
  1325. trig_idx_mask: T,
  1326. }
  1327. impl<T: SbiRegister> TriggerMask<T> {
  1328. /// Construct a [TriggerMask] from mask value and base counter index.
  1329. ///
  1330. /// The `trig_idx_base` specifies the starting trigger index, while the `trig_idx_mask` is a
  1331. /// bitmask indicating which triggers, relative to the base, are to be operated.
  1332. #[inline]
  1333. pub const fn from_mask_base(trig_idx_mask: T, trig_idx_base: T) -> Self {
  1334. Self {
  1335. trig_idx_mask,
  1336. trig_idx_base,
  1337. }
  1338. }
  1339. /// Returns `mask` and `base` parameters from the [TriggerMask].
  1340. #[inline]
  1341. pub const fn into_inner(self) -> (T, T) {
  1342. (self.trig_idx_mask, self.trig_idx_base)
  1343. }
  1344. }
  1345. /// Physical slice wrapper with type annotation.
  1346. ///
  1347. /// This struct wraps slices in RISC-V physical memory by low and high part of the
  1348. /// physical base address as well as its length. It is usually used by SBI extensions
  1349. /// as parameter types to pass base address and length parameters on physical memory
  1350. /// other than a virtual one.
  1351. ///
  1352. /// Generic parameter `P` represents a hint of how this physical slice would be used.
  1353. /// For example, `Physical<&[u8]>` represents an immutable reference to physical byte slice,
  1354. /// while `Physical<&mut [u8]>` represents a mutable one.
  1355. ///
  1356. /// An SBI implementation should load or store memory using both `phys_addr_lo` and
  1357. /// `phys_addr_hi` combined as base address. A supervisor program (kernels etc.)
  1358. /// should provide continuous physical memory, wrapping its reference using this structure
  1359. /// before passing into SBI runtime.
  1360. #[derive(Clone, Copy)]
  1361. pub struct Physical<P> {
  1362. num_bytes: usize,
  1363. phys_addr_lo: usize,
  1364. phys_addr_hi: usize,
  1365. _marker: PhantomData<P>,
  1366. }
  1367. impl<P> Physical<P> {
  1368. /// Create a physical memory slice by length and physical address.
  1369. #[inline]
  1370. pub const fn new(num_bytes: usize, phys_addr_lo: usize, phys_addr_hi: usize) -> Self {
  1371. Self {
  1372. num_bytes,
  1373. phys_addr_lo,
  1374. phys_addr_hi,
  1375. _marker: core::marker::PhantomData,
  1376. }
  1377. }
  1378. /// Returns length of the physical memory slice.
  1379. #[inline]
  1380. pub const fn num_bytes(&self) -> usize {
  1381. self.num_bytes
  1382. }
  1383. /// Returns low-part base address of physical memory slice.
  1384. #[inline]
  1385. pub const fn phys_addr_lo(&self) -> usize {
  1386. self.phys_addr_lo
  1387. }
  1388. /// Returns high-part base address of physical memory slice.
  1389. #[inline]
  1390. pub const fn phys_addr_hi(&self) -> usize {
  1391. self.phys_addr_hi
  1392. }
  1393. }
  1394. /// Shared memory physical address raw pointer with type annotation.
  1395. ///
  1396. /// This is a structure wrapping a raw pointer to the value of the type `T` without
  1397. /// a pointer metadata. `SharedPtr`'s are _thin_; they won't include metadata
  1398. /// as RISC-V SBI does not provide an approach to pass them via SBI calls,
  1399. /// thus the length of type `T` should be decided independently of raw
  1400. /// pointer structure.
  1401. ///
  1402. /// `SharedPtr` can be used as a parameter to pass the shared memory physical pointer
  1403. /// with a given base address in RISC-V SBI calls. For example, a `SharedPtr<[u8; 64]>`
  1404. /// would represent a fixed-size 64 byte array on a RISC-V SBI function argument
  1405. /// type.
  1406. ///
  1407. /// This structure cannot be dereferenced directly with physical addresses,
  1408. /// because on RISC-V systems the physical address space could be larger than the
  1409. /// virtual ones. Hence, this structure describes the physical memory range by
  1410. /// two `usize` values: the upper `phys_addr_hi` and lower `phys_addr_lo`.
  1411. ///
  1412. /// RISC-V SBI extensions may declare special pointer values for shared memory
  1413. /// raw pointers. For example, SBI STA declares that steal-time information
  1414. /// should stop from reporting when the SBI call is invoked using all-ones
  1415. /// bitwise shared pointer, i.e. `phys_addr_hi` and `phys_addr_lo` both equals
  1416. /// `usize::MAX`. `SharedPtr` can be constructed using such special values
  1417. /// by providing them to the `SharedPtr::new` function.
  1418. ///
  1419. /// # Requirements
  1420. ///
  1421. /// If an SBI function needs to pass a shared memory physical address range to
  1422. /// the SBI implementation (or higher privilege mode), then this physical memory
  1423. /// address range MUST satisfy the following requirements:
  1424. ///
  1425. /// * The SBI implementation MUST check that the supervisor-mode software is
  1426. /// allowed to access the specified physical memory range with the access
  1427. /// type requested (read and/or write).
  1428. /// * The SBI implementation MUST access the specified physical memory range
  1429. /// using the PMA attributes.
  1430. /// * The data in the shared memory MUST follow little-endian byte ordering.
  1431. ///
  1432. /// *NOTE:* If the supervisor-mode software accesses the same physical memory
  1433. /// range using a memory type different from the PMA, then a loss of coherence
  1434. /// or unexpected memory ordering may occur. The invoking software should
  1435. /// follow the rules and sequences defined in the RISC-V Svpbmt specification
  1436. /// to prevent the loss of coherence and memory ordering.
  1437. ///
  1438. /// It is recommended that a memory physical address passed to an SBI function
  1439. /// should use at least two `usize` parameters to support platforms
  1440. /// which have memory physical addresses wider than `XLEN` bits.
  1441. // FIXME: should constrain with `T: Thin` once ptr_metadata feature is stabled;
  1442. // RISC-V SBI does not provide an approach to pass pointer metadata by SBI calls.
  1443. pub struct SharedPtr<T> {
  1444. phys_addr_lo: usize,
  1445. phys_addr_hi: usize,
  1446. _marker: PhantomData<*mut T>,
  1447. }
  1448. // FIXME: we should consider strict provenance rules for this pointer-like structure
  1449. // once feature strict_provenance is stabled.
  1450. impl<T> SharedPtr<T> {
  1451. /// Create a shared physical memory pointer by physical address.
  1452. #[inline]
  1453. pub const fn new(phys_addr_lo: usize, phys_addr_hi: usize) -> Self {
  1454. Self {
  1455. phys_addr_lo,
  1456. phys_addr_hi,
  1457. _marker: PhantomData,
  1458. }
  1459. }
  1460. /// Returns low-part physical address of the shared physical memory pointer.
  1461. #[inline]
  1462. pub const fn phys_addr_lo(self) -> usize {
  1463. self.phys_addr_lo
  1464. }
  1465. /// Returns high-part physical address of the shared physical memory pointer.
  1466. #[inline]
  1467. pub const fn phys_addr_hi(self) -> usize {
  1468. self.phys_addr_hi
  1469. }
  1470. }
  1471. impl<T> Clone for SharedPtr<T> {
  1472. #[inline(always)]
  1473. fn clone(&self) -> Self {
  1474. *self
  1475. }
  1476. }
  1477. impl<T> Copy for SharedPtr<T> {}
  1478. #[cfg(test)]
  1479. mod tests {
  1480. use super::*;
  1481. #[test]
  1482. #[rustfmt::skip]
  1483. fn rustsbi_sbi_ret_constructors() {
  1484. assert_eq!(SbiRet::success(0), SbiRet { value: 0, error: 0 });
  1485. assert_eq!(SbiRet::success(1037), SbiRet { value: 1037, error: 0 });
  1486. assert_eq!(SbiRet::success(usize::MAX), SbiRet { value: usize::MAX, error: 0 });
  1487. assert_eq!(SbiRet::failed(), SbiRet { value: 0, error: usize::MAX - 1 + 1 });
  1488. assert_eq!(SbiRet::not_supported(), SbiRet { value: 0, error: usize::MAX - 2 + 1 });
  1489. assert_eq!(SbiRet::invalid_param(), SbiRet { value: 0, error: usize::MAX - 3 + 1 });
  1490. assert_eq!(SbiRet::denied(), SbiRet { value: 0, error: usize::MAX - 4 + 1 });
  1491. assert_eq!(SbiRet::invalid_address(), SbiRet { value: 0, error: usize::MAX - 5 + 1 });
  1492. assert_eq!(SbiRet::already_available(), SbiRet { value: 0, error: usize::MAX - 6 + 1 });
  1493. assert_eq!(SbiRet::already_started(), SbiRet { value: 0, error: usize::MAX - 7 + 1 });
  1494. assert_eq!(SbiRet::already_stopped(), SbiRet { value: 0, error: usize::MAX - 8 + 1 });
  1495. assert_eq!(SbiRet::no_shmem(), SbiRet { value: 0, error: usize::MAX - 9 + 1 });
  1496. assert_eq!(SbiRet::invalid_state(), SbiRet { value: 0, error: usize::MAX - 10 + 1 });
  1497. assert_eq!(SbiRet::bad_range(), SbiRet { value: 0, error: usize::MAX - 11 + 1 });
  1498. assert_eq!(SbiRet::timeout(), SbiRet { value: 0, error: usize::MAX - 12 + 1 });
  1499. assert_eq!(SbiRet::io(), SbiRet { value: 0, error: usize::MAX - 13 + 1 });
  1500. }
  1501. #[test]
  1502. fn rustsbi_hart_mask() {
  1503. let mask = HartMask::from_mask_base(0b1, 400);
  1504. assert!(!mask.has_bit(0));
  1505. assert!(mask.has_bit(400));
  1506. assert!(!mask.has_bit(401));
  1507. let mask = HartMask::from_mask_base(0b110, 500);
  1508. assert!(!mask.has_bit(0));
  1509. assert!(!mask.has_bit(500));
  1510. assert!(mask.has_bit(501));
  1511. assert!(mask.has_bit(502));
  1512. assert!(!mask.has_bit(500 + (usize::BITS as usize)));
  1513. let max_bit = 1 << (usize::BITS - 1);
  1514. let mask = HartMask::from_mask_base(max_bit, 600);
  1515. assert!(mask.has_bit(600 + (usize::BITS as usize) - 1));
  1516. assert!(!mask.has_bit(600 + (usize::BITS as usize)));
  1517. let mask = HartMask::from_mask_base(0b11, usize::MAX - 1);
  1518. assert!(!mask.has_bit(usize::MAX - 2));
  1519. assert!(mask.has_bit(usize::MAX - 1));
  1520. assert!(mask.has_bit(usize::MAX));
  1521. assert!(!mask.has_bit(0));
  1522. // hart_mask_base == usize::MAX is special, it means hart_mask should be ignored
  1523. // and this hart mask contains all harts available
  1524. let mask = HartMask::from_mask_base(0, usize::MAX);
  1525. for i in 0..5 {
  1526. assert!(mask.has_bit(i));
  1527. }
  1528. assert!(mask.has_bit(usize::MAX));
  1529. let mut mask = HartMask::from_mask_base(0, 1);
  1530. assert!(!mask.has_bit(1));
  1531. assert!(mask.insert(1).is_ok());
  1532. assert!(mask.has_bit(1));
  1533. assert!(mask.remove(1).is_ok());
  1534. assert!(!mask.has_bit(1));
  1535. }
  1536. #[test]
  1537. fn rustsbi_hart_ids_iterator() {
  1538. let mask = HartMask::from_mask_base(0b101011, 1);
  1539. // Test the `next` method of `HartIds` structure.
  1540. let mut hart_ids = mask.iter();
  1541. assert_eq!(hart_ids.next(), Some(1));
  1542. assert_eq!(hart_ids.next(), Some(2));
  1543. assert_eq!(hart_ids.next(), Some(4));
  1544. assert_eq!(hart_ids.next(), Some(6));
  1545. assert_eq!(hart_ids.next(), None);
  1546. // `HartIds` structures are fused, meaning they return `None` forever once iteration finished.
  1547. assert_eq!(hart_ids.next(), None);
  1548. // Test `for` loop on mask (`HartMask`) as `IntoIterator`.
  1549. let mut ans = [0; 4];
  1550. let mut idx = 0;
  1551. for hart_id in mask {
  1552. ans[idx] = hart_id;
  1553. idx += 1;
  1554. }
  1555. assert_eq!(ans, [1, 2, 4, 6]);
  1556. // Test `Iterator` methods on `HartIds`.
  1557. let mut hart_ids = mask.iter();
  1558. assert_eq!(hart_ids.size_hint(), (4, Some(4)));
  1559. let _ = hart_ids.next();
  1560. assert_eq!(hart_ids.size_hint(), (3, Some(3)));
  1561. let _ = hart_ids.next();
  1562. let _ = hart_ids.next();
  1563. assert_eq!(hart_ids.size_hint(), (1, Some(1)));
  1564. let _ = hart_ids.next();
  1565. assert_eq!(hart_ids.size_hint(), (0, Some(0)));
  1566. let _ = hart_ids.next();
  1567. assert_eq!(hart_ids.size_hint(), (0, Some(0)));
  1568. let mut hart_ids = mask.iter();
  1569. assert_eq!(hart_ids.count(), 4);
  1570. let _ = hart_ids.next();
  1571. assert_eq!(hart_ids.count(), 3);
  1572. let _ = hart_ids.next();
  1573. let _ = hart_ids.next();
  1574. let _ = hart_ids.next();
  1575. assert_eq!(hart_ids.count(), 0);
  1576. let _ = hart_ids.next();
  1577. assert_eq!(hart_ids.count(), 0);
  1578. let hart_ids = mask.iter();
  1579. assert_eq!(hart_ids.last(), Some(6));
  1580. let mut hart_ids = mask.iter();
  1581. assert_eq!(hart_ids.nth(2), Some(4));
  1582. let mut hart_ids = mask.iter();
  1583. assert_eq!(hart_ids.nth(0), Some(1));
  1584. let mut iter = mask.iter().step_by(2);
  1585. assert_eq!(iter.next(), Some(1));
  1586. assert_eq!(iter.next(), Some(4));
  1587. assert_eq!(iter.next(), None);
  1588. let mask_2 = HartMask::from_mask_base(0b1001101, 64);
  1589. let mut iter = mask.iter().chain(mask_2);
  1590. assert_eq!(iter.next(), Some(1));
  1591. assert_eq!(iter.next(), Some(2));
  1592. assert_eq!(iter.next(), Some(4));
  1593. assert_eq!(iter.next(), Some(6));
  1594. assert_eq!(iter.next(), Some(64));
  1595. assert_eq!(iter.next(), Some(66));
  1596. assert_eq!(iter.next(), Some(67));
  1597. assert_eq!(iter.next(), Some(70));
  1598. assert_eq!(iter.next(), None);
  1599. let mut iter = mask.iter().zip(mask_2);
  1600. assert_eq!(iter.next(), Some((1, 64)));
  1601. assert_eq!(iter.next(), Some((2, 66)));
  1602. assert_eq!(iter.next(), Some((4, 67)));
  1603. assert_eq!(iter.next(), Some((6, 70)));
  1604. assert_eq!(iter.next(), None);
  1605. fn to_plic_context_id(hart_id_machine: usize) -> usize {
  1606. hart_id_machine * 2
  1607. }
  1608. let mut iter = mask.iter().map(to_plic_context_id);
  1609. assert_eq!(iter.next(), Some(2));
  1610. assert_eq!(iter.next(), Some(4));
  1611. assert_eq!(iter.next(), Some(8));
  1612. assert_eq!(iter.next(), Some(12));
  1613. assert_eq!(iter.next(), None);
  1614. let mut channel_received = [0; 4];
  1615. let mut idx = 0;
  1616. let mut channel_send = |hart_id| {
  1617. channel_received[idx] = hart_id;
  1618. idx += 1;
  1619. };
  1620. mask.iter().for_each(|value| channel_send(value));
  1621. assert_eq!(channel_received, [1, 2, 4, 6]);
  1622. let is_in_cluster_1 = |hart_id: &usize| *hart_id >= 4 && *hart_id < 7;
  1623. let mut iter = mask.iter().filter(is_in_cluster_1);
  1624. assert_eq!(iter.next(), Some(4));
  1625. assert_eq!(iter.next(), Some(6));
  1626. assert_eq!(iter.next(), None);
  1627. let if_in_cluster_1_get_plic_context_id = |hart_id: usize| {
  1628. if hart_id >= 4 && hart_id < 7 {
  1629. Some(hart_id * 2)
  1630. } else {
  1631. None
  1632. }
  1633. };
  1634. let mut iter = mask.iter().filter_map(if_in_cluster_1_get_plic_context_id);
  1635. assert_eq!(iter.next(), Some(8));
  1636. assert_eq!(iter.next(), Some(12));
  1637. assert_eq!(iter.next(), None);
  1638. let mut iter = mask.iter().enumerate();
  1639. assert_eq!(iter.next(), Some((0, 1)));
  1640. assert_eq!(iter.next(), Some((1, 2)));
  1641. assert_eq!(iter.next(), Some((2, 4)));
  1642. assert_eq!(iter.next(), Some((3, 6)));
  1643. assert_eq!(iter.next(), None);
  1644. let mut ans = [(0, 0); 4];
  1645. let mut idx = 0;
  1646. for (i, hart_id) in mask.iter().enumerate() {
  1647. ans[idx] = (i, hart_id);
  1648. idx += 1;
  1649. }
  1650. assert_eq!(ans, [(0, 1), (1, 2), (2, 4), (3, 6)]);
  1651. let mut iter = mask.iter().peekable();
  1652. assert_eq!(iter.peek(), Some(&1));
  1653. assert_eq!(iter.next(), Some(1));
  1654. assert_eq!(iter.peek(), Some(&2));
  1655. assert_eq!(iter.next(), Some(2));
  1656. assert_eq!(iter.peek(), Some(&4));
  1657. assert_eq!(iter.next(), Some(4));
  1658. assert_eq!(iter.peek(), Some(&6));
  1659. assert_eq!(iter.next(), Some(6));
  1660. assert_eq!(iter.peek(), None);
  1661. assert_eq!(iter.next(), None);
  1662. // TODO: other iterator tests.
  1663. assert!(mask.iter().is_sorted());
  1664. assert!(mask.iter().is_sorted_by(|a, b| a <= b));
  1665. // Reverse iterator as `DoubleEndedIterator`.
  1666. let mut iter = mask.iter().rev();
  1667. assert_eq!(iter.next(), Some(6));
  1668. assert_eq!(iter.next(), Some(4));
  1669. assert_eq!(iter.next(), Some(2));
  1670. assert_eq!(iter.next(), Some(1));
  1671. assert_eq!(iter.next(), None);
  1672. // Special iterator values.
  1673. let nothing = HartMask::from_mask_base(0, 1000);
  1674. assert!(nothing.iter().eq([]));
  1675. let all_mask_bits_set = HartMask::from_mask_base(usize::MAX, 1000);
  1676. let range = 1000..(1000 + usize::BITS as usize);
  1677. assert!(all_mask_bits_set.iter().eq(range));
  1678. let all_harts = HartMask::all();
  1679. let mut iter = all_harts.iter();
  1680. assert_eq!(iter.size_hint(), (usize::MAX, Some(usize::MAX)));
  1681. // Don't use `Iterator::eq` here; it would literally run `Iterator::try_for_each` from 0 to usize::MAX
  1682. // which will cost us forever to run the test.
  1683. assert_eq!(iter.next(), Some(0));
  1684. assert_eq!(iter.size_hint(), (usize::MAX - 1, Some(usize::MAX - 1)));
  1685. assert_eq!(iter.next(), Some(1));
  1686. assert_eq!(iter.next(), Some(2));
  1687. // skip 500 elements
  1688. let _ = iter.nth(500 - 1);
  1689. assert_eq!(iter.next(), Some(503));
  1690. assert_eq!(iter.size_hint(), (usize::MAX - 504, Some(usize::MAX - 504)));
  1691. assert_eq!(iter.next_back(), Some(usize::MAX));
  1692. assert_eq!(iter.next_back(), Some(usize::MAX - 1));
  1693. assert_eq!(iter.size_hint(), (usize::MAX - 506, Some(usize::MAX - 506)));
  1694. // A common usage of `HartMask::all`, we assume that this platform filters out hart 0..=3.
  1695. let environment_available_hart_ids = 4..128;
  1696. // `hart_mask_iter` contains 64..=usize::MAX.
  1697. let hart_mask_iter = all_harts.iter().skip(64);
  1698. let filtered_iter = environment_available_hart_ids.filter(|&x| {
  1699. hart_mask_iter
  1700. .clone()
  1701. .find(|&y| y >= x)
  1702. .map_or(false, |y| y == x)
  1703. });
  1704. assert!(filtered_iter.eq(64..128));
  1705. // The following operations should have O(1) complexity.
  1706. let all_harts = HartMask::all();
  1707. assert_eq!(all_harts.iter().count(), usize::MAX);
  1708. assert_eq!(all_harts.iter().last(), Some(usize::MAX));
  1709. assert_eq!(all_harts.iter().min(), Some(0));
  1710. assert_eq!(all_harts.iter().max(), Some(usize::MAX));
  1711. assert!(all_harts.iter().is_sorted());
  1712. let partial_all_harts = {
  1713. let mut ans = HartMask::all().iter();
  1714. let _ = ans.nth(65536 - 1);
  1715. let _ = ans.nth_back(4096 - 1);
  1716. ans
  1717. };
  1718. assert_eq!(partial_all_harts.clone().count(), usize::MAX - 65536 - 4096);
  1719. assert_eq!(partial_all_harts.clone().last(), Some(usize::MAX - 4096));
  1720. assert_eq!(partial_all_harts.clone().min(), Some(65536));
  1721. assert_eq!(partial_all_harts.clone().max(), Some(usize::MAX - 4096));
  1722. assert!(partial_all_harts.is_sorted());
  1723. let nothing = HartMask::from_mask_base(0, 1000);
  1724. assert_eq!(nothing.iter().count(), 0);
  1725. assert_eq!(nothing.iter().last(), None);
  1726. assert_eq!(nothing.iter().min(), None);
  1727. assert_eq!(nothing.iter().max(), None);
  1728. assert!(nothing.iter().is_sorted());
  1729. let mask = HartMask::from_mask_base(0b101011, 1);
  1730. assert_eq!(mask.iter().count(), 4);
  1731. assert_eq!(mask.iter().last(), Some(6));
  1732. assert_eq!(mask.iter().min(), Some(1));
  1733. assert_eq!(mask.iter().max(), Some(6));
  1734. assert!(mask.iter().is_sorted());
  1735. let all_mask_bits_set = HartMask::from_mask_base(usize::MAX, 1000);
  1736. let last = 1000 + usize::BITS as usize - 1;
  1737. assert_eq!(all_mask_bits_set.iter().count(), usize::BITS as usize);
  1738. assert_eq!(all_mask_bits_set.iter().last(), Some(last));
  1739. assert_eq!(all_mask_bits_set.iter().min(), Some(1000));
  1740. assert_eq!(all_mask_bits_set.iter().max(), Some(last));
  1741. assert!(all_mask_bits_set.iter().is_sorted());
  1742. }
  1743. #[test]
  1744. fn rustsbi_counter_index_mask() {
  1745. let mask = CounterMask::from_mask_base(0b1, 400);
  1746. assert!(!mask.has_bit(0));
  1747. assert!(mask.has_bit(400));
  1748. assert!(!mask.has_bit(401));
  1749. let mask = CounterMask::from_mask_base(0b110, 500);
  1750. assert!(!mask.has_bit(0));
  1751. assert!(!mask.has_bit(500));
  1752. assert!(mask.has_bit(501));
  1753. assert!(mask.has_bit(502));
  1754. assert!(!mask.has_bit(500 + (usize::BITS as usize)));
  1755. let max_bit = 1 << (usize::BITS - 1);
  1756. let mask = CounterMask::from_mask_base(max_bit, 600);
  1757. assert!(mask.has_bit(600 + (usize::BITS as usize) - 1));
  1758. assert!(!mask.has_bit(600 + (usize::BITS as usize)));
  1759. let mask = CounterMask::from_mask_base(0b11, usize::MAX - 1);
  1760. assert!(!mask.has_bit(usize::MAX - 2));
  1761. assert!(mask.has_bit(usize::MAX - 1));
  1762. assert!(mask.has_bit(usize::MAX));
  1763. assert!(!mask.has_bit(0));
  1764. let mask = CounterMask::from_mask_base(0, usize::MAX);
  1765. let null_mask = CounterMask::from_mask_base(0, 0);
  1766. (0..=usize::BITS as usize).for_each(|i| {
  1767. assert!(mask.has_bit(i));
  1768. assert!(!null_mask.has_bit(i));
  1769. });
  1770. assert!(mask.has_bit(usize::MAX));
  1771. }
  1772. #[test]
  1773. fn rustsbi_mask_non_usize() {
  1774. assert_eq!(CounterMask::<i32>::IGNORE_MASK, -1);
  1775. assert_eq!(CounterMask::<i64>::IGNORE_MASK, -1);
  1776. assert_eq!(CounterMask::<i128>::IGNORE_MASK, -1);
  1777. assert_eq!(CounterMask::<u32>::IGNORE_MASK, u32::MAX);
  1778. assert_eq!(CounterMask::<u64>::IGNORE_MASK, u64::MAX);
  1779. assert_eq!(CounterMask::<u128>::IGNORE_MASK, u128::MAX);
  1780. assert_eq!(HartMask::<i32>::IGNORE_MASK, -1);
  1781. assert_eq!(HartMask::<i64>::IGNORE_MASK, -1);
  1782. assert_eq!(HartMask::<i128>::IGNORE_MASK, -1);
  1783. assert_eq!(HartMask::<u32>::IGNORE_MASK, u32::MAX);
  1784. assert_eq!(HartMask::<u64>::IGNORE_MASK, u64::MAX);
  1785. assert_eq!(HartMask::<u128>::IGNORE_MASK, u128::MAX);
  1786. assert_eq!(HartMask::<i32>::all(), HartMask::from_mask_base(0, -1));
  1787. }
  1788. }