linux_bindings_riscv64.rs 115 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291229222932294229522962297229822992300230123022303230423052306230723082309231023112312231323142315231623172318231923202321232223232324232523262327232823292330233123322333233423352336233723382339234023412342234323442345234623472348234923502351235223532354235523562357235823592360236123622363236423652366236723682369237023712372237323742375237623772378237923802381238223832384238523862387238823892390239123922393239423952396239723982399240024012402240324042405240624072408240924102411241224132414241524162417241824192420242124222423242424252426242724282429243024312432243324342435243624372438243924402441244224432444244524462447244824492450245124522453245424552456245724582459246024612462246324642465246624672468246924702471247224732474247524762477247824792480248124822483248424852486248724882489249024912492249324942495249624972498249925002501250225032504250525062507250825092510251125122513251425152516251725182519252025212522252325242525252625272528252925302531253225332534253525362537253825392540254125422543254425452546254725482549255025512552255325542555255625572558255925602561256225632564256525662567256825692570257125722573257425752576257725782579258025812582258325842585258625872588258925902591259225932594259525962597259825992600260126022603260426052606260726082609261026112612261326142615261626172618261926202621262226232624262526262627262826292630263126322633263426352636263726382639264026412642264326442645264626472648264926502651265226532654265526562657265826592660266126622663266426652666266726682669267026712672267326742675267626772678267926802681268226832684268526862687268826892690269126922693269426952696269726982699270027012702270327042705270627072708270927102711271227132714271527162717271827192720272127222723272427252726272727282729273027312732273327342735273627372738273927402741274227432744274527462747274827492750275127522753275427552756275727582759276027612762276327642765276627672768276927702771277227732774277527762777277827792780278127822783278427852786278727882789279027912792279327942795279627972798279928002801280228032804280528062807280828092810281128122813281428152816281728182819282028212822282328242825282628272828282928302831283228332834283528362837283828392840284128422843284428452846284728482849285028512852285328542855285628572858285928602861286228632864286528662867286828692870287128722873287428752876287728782879288028812882288328842885288628872888288928902891289228932894289528962897289828992900290129022903290429052906290729082909291029112912291329142915291629172918291929202921292229232924292529262927292829292930293129322933293429352936293729382939294029412942294329442945294629472948294929502951295229532954295529562957295829592960296129622963296429652966296729682969297029712972297329742975297629772978297929802981298229832984298529862987298829892990299129922993299429952996299729982999300030013002300330043005300630073008300930103011301230133014301530163017301830193020302130223023302430253026302730283029303030313032303330343035303630373038303930403041304230433044304530463047304830493050305130523053305430553056305730583059306030613062306330643065306630673068306930703071307230733074307530763077307830793080308130823083308430853086308730883089309030913092309330943095309630973098309931003101310231033104310531063107310831093110311131123113311431153116311731183119312031213122312331243125312631273128312931303131313231333134313531363137313831393140314131423143314431453146314731483149315031513152315331543155315631573158315931603161316231633164316531663167316831693170317131723173317431753176317731783179318031813182318331843185318631873188318931903191319231933194319531963197319831993200320132023203320432053206320732083209321032113212321332143215321632173218321932203221322232233224322532263227322832293230323132323233323432353236323732383239324032413242324332443245324632473248324932503251325232533254325532563257325832593260326132623263326432653266326732683269327032713272327332743275327632773278327932803281328232833284328532863287328832893290329132923293329432953296329732983299330033013302330333043305330633073308330933103311331233133314331533163317331833193320332133223323332433253326332733283329333033313332333333343335333633373338333933403341334233433344334533463347334833493350335133523353335433553356335733583359336033613362336333643365336633673368336933703371337233733374337533763377337833793380338133823383338433853386338733883389339033913392339333943395339633973398339934003401340234033404340534063407340834093410341134123413341434153416341734183419342034213422342334243425342634273428342934303431343234333434343534363437343834393440344134423443344434453446344734483449345034513452345334543455345634573458345934603461346234633464346534663467346834693470347134723473347434753476347734783479348034813482348334843485348634873488348934903491349234933494349534963497349834993500350135023503350435053506350735083509351035113512351335143515351635173518351935203521352235233524352535263527352835293530353135323533353435353536353735383539354035413542354335443545354635473548354935503551355235533554355535563557355835593560356135623563356435653566356735683569357035713572357335743575357635773578357935803581358235833584358535863587358835893590359135923593359435953596359735983599360036013602360336043605360636073608360936103611361236133614361536163617
  1. /* automatically generated by rust-bindgen 0.71.1 */
  2. #[repr(C)]
  3. #[derive(Copy, Clone, Debug, Default, Eq, Hash, Ord, PartialEq, PartialOrd)]
  4. pub struct __BindgenBitfieldUnit<Storage> {
  5. storage: Storage,
  6. }
  7. impl<Storage> __BindgenBitfieldUnit<Storage> {
  8. #[inline]
  9. pub const fn new(storage: Storage) -> Self {
  10. Self { storage }
  11. }
  12. }
  13. impl<Storage> __BindgenBitfieldUnit<Storage>
  14. where
  15. Storage: AsRef<[u8]> + AsMut<[u8]>,
  16. {
  17. #[inline]
  18. fn extract_bit(byte: u8, index: usize) -> bool {
  19. let bit_index = if cfg!(target_endian = "big") {
  20. 7 - (index % 8)
  21. } else {
  22. index % 8
  23. };
  24. let mask = 1 << bit_index;
  25. byte & mask == mask
  26. }
  27. #[inline]
  28. pub fn get_bit(&self, index: usize) -> bool {
  29. debug_assert!(index / 8 < self.storage.as_ref().len());
  30. let byte_index = index / 8;
  31. let byte = self.storage.as_ref()[byte_index];
  32. Self::extract_bit(byte, index)
  33. }
  34. #[inline]
  35. pub unsafe fn raw_get_bit(this: *const Self, index: usize) -> bool {
  36. debug_assert!(index / 8 < core::mem::size_of::<Storage>());
  37. let byte_index = index / 8;
  38. let byte = *(core::ptr::addr_of!((*this).storage) as *const u8).offset(byte_index as isize);
  39. Self::extract_bit(byte, index)
  40. }
  41. #[inline]
  42. fn change_bit(byte: u8, index: usize, val: bool) -> u8 {
  43. let bit_index = if cfg!(target_endian = "big") {
  44. 7 - (index % 8)
  45. } else {
  46. index % 8
  47. };
  48. let mask = 1 << bit_index;
  49. if val {
  50. byte | mask
  51. } else {
  52. byte & !mask
  53. }
  54. }
  55. #[inline]
  56. pub fn set_bit(&mut self, index: usize, val: bool) {
  57. debug_assert!(index / 8 < self.storage.as_ref().len());
  58. let byte_index = index / 8;
  59. let byte = &mut self.storage.as_mut()[byte_index];
  60. *byte = Self::change_bit(*byte, index, val);
  61. }
  62. #[inline]
  63. pub unsafe fn raw_set_bit(this: *mut Self, index: usize, val: bool) {
  64. debug_assert!(index / 8 < core::mem::size_of::<Storage>());
  65. let byte_index = index / 8;
  66. let byte =
  67. (core::ptr::addr_of_mut!((*this).storage) as *mut u8).offset(byte_index as isize);
  68. *byte = Self::change_bit(*byte, index, val);
  69. }
  70. #[inline]
  71. pub fn get(&self, bit_offset: usize, bit_width: u8) -> u64 {
  72. debug_assert!(bit_width <= 64);
  73. debug_assert!(bit_offset / 8 < self.storage.as_ref().len());
  74. debug_assert!((bit_offset + (bit_width as usize)) / 8 <= self.storage.as_ref().len());
  75. let mut val = 0;
  76. for i in 0..(bit_width as usize) {
  77. if self.get_bit(i + bit_offset) {
  78. let index = if cfg!(target_endian = "big") {
  79. bit_width as usize - 1 - i
  80. } else {
  81. i
  82. };
  83. val |= 1 << index;
  84. }
  85. }
  86. val
  87. }
  88. #[inline]
  89. pub unsafe fn raw_get(this: *const Self, bit_offset: usize, bit_width: u8) -> u64 {
  90. debug_assert!(bit_width <= 64);
  91. debug_assert!(bit_offset / 8 < core::mem::size_of::<Storage>());
  92. debug_assert!((bit_offset + (bit_width as usize)) / 8 <= core::mem::size_of::<Storage>());
  93. let mut val = 0;
  94. for i in 0..(bit_width as usize) {
  95. if Self::raw_get_bit(this, i + bit_offset) {
  96. let index = if cfg!(target_endian = "big") {
  97. bit_width as usize - 1 - i
  98. } else {
  99. i
  100. };
  101. val |= 1 << index;
  102. }
  103. }
  104. val
  105. }
  106. #[inline]
  107. pub fn set(&mut self, bit_offset: usize, bit_width: u8, val: u64) {
  108. debug_assert!(bit_width <= 64);
  109. debug_assert!(bit_offset / 8 < self.storage.as_ref().len());
  110. debug_assert!((bit_offset + (bit_width as usize)) / 8 <= self.storage.as_ref().len());
  111. for i in 0..(bit_width as usize) {
  112. let mask = 1 << i;
  113. let val_bit_is_set = val & mask == mask;
  114. let index = if cfg!(target_endian = "big") {
  115. bit_width as usize - 1 - i
  116. } else {
  117. i
  118. };
  119. self.set_bit(index + bit_offset, val_bit_is_set);
  120. }
  121. }
  122. #[inline]
  123. pub unsafe fn raw_set(this: *mut Self, bit_offset: usize, bit_width: u8, val: u64) {
  124. debug_assert!(bit_width <= 64);
  125. debug_assert!(bit_offset / 8 < core::mem::size_of::<Storage>());
  126. debug_assert!((bit_offset + (bit_width as usize)) / 8 <= core::mem::size_of::<Storage>());
  127. for i in 0..(bit_width as usize) {
  128. let mask = 1 << i;
  129. let val_bit_is_set = val & mask == mask;
  130. let index = if cfg!(target_endian = "big") {
  131. bit_width as usize - 1 - i
  132. } else {
  133. i
  134. };
  135. Self::raw_set_bit(this, index + bit_offset, val_bit_is_set);
  136. }
  137. }
  138. }
  139. #[repr(C)]
  140. #[derive(Default)]
  141. pub struct __IncompleteArrayField<T>(::core::marker::PhantomData<T>, [T; 0]);
  142. impl<T> __IncompleteArrayField<T> {
  143. #[inline]
  144. pub const fn new() -> Self {
  145. __IncompleteArrayField(::core::marker::PhantomData, [])
  146. }
  147. #[inline]
  148. pub fn as_ptr(&self) -> *const T {
  149. self as *const _ as *const T
  150. }
  151. #[inline]
  152. pub fn as_mut_ptr(&mut self) -> *mut T {
  153. self as *mut _ as *mut T
  154. }
  155. #[inline]
  156. pub unsafe fn as_slice(&self, len: usize) -> &[T] {
  157. ::core::slice::from_raw_parts(self.as_ptr(), len)
  158. }
  159. #[inline]
  160. pub unsafe fn as_mut_slice(&mut self, len: usize) -> &mut [T] {
  161. ::core::slice::from_raw_parts_mut(self.as_mut_ptr(), len)
  162. }
  163. }
  164. impl<T> ::core::fmt::Debug for __IncompleteArrayField<T> {
  165. fn fmt(&self, fmt: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result {
  166. fmt.write_str("__IncompleteArrayField")
  167. }
  168. }
  169. pub const SO_ATTACH_BPF: u32 = 50;
  170. pub const SO_DETACH_BPF: u32 = 27;
  171. pub const BPF_LD: u32 = 0;
  172. pub const BPF_LDX: u32 = 1;
  173. pub const BPF_ST: u32 = 2;
  174. pub const BPF_STX: u32 = 3;
  175. pub const BPF_ALU: u32 = 4;
  176. pub const BPF_JMP: u32 = 5;
  177. pub const BPF_W: u32 = 0;
  178. pub const BPF_H: u32 = 8;
  179. pub const BPF_B: u32 = 16;
  180. pub const BPF_K: u32 = 0;
  181. pub const BPF_ALU64: u32 = 7;
  182. pub const BPF_DW: u32 = 24;
  183. pub const BPF_CALL: u32 = 128;
  184. pub const BPF_F_ALLOW_OVERRIDE: u32 = 1;
  185. pub const BPF_F_ALLOW_MULTI: u32 = 2;
  186. pub const BPF_F_REPLACE: u32 = 4;
  187. pub const BPF_F_BEFORE: u32 = 8;
  188. pub const BPF_F_AFTER: u32 = 16;
  189. pub const BPF_F_ID: u32 = 32;
  190. pub const BPF_F_STRICT_ALIGNMENT: u32 = 1;
  191. pub const BPF_F_ANY_ALIGNMENT: u32 = 2;
  192. pub const BPF_F_TEST_RND_HI32: u32 = 4;
  193. pub const BPF_F_TEST_STATE_FREQ: u32 = 8;
  194. pub const BPF_F_SLEEPABLE: u32 = 16;
  195. pub const BPF_F_XDP_HAS_FRAGS: u32 = 32;
  196. pub const BPF_F_XDP_DEV_BOUND_ONLY: u32 = 64;
  197. pub const BPF_F_TEST_REG_INVARIANTS: u32 = 128;
  198. pub const BPF_F_NETFILTER_IP_DEFRAG: u32 = 1;
  199. pub const BPF_PSEUDO_MAP_FD: u32 = 1;
  200. pub const BPF_PSEUDO_MAP_IDX: u32 = 5;
  201. pub const BPF_PSEUDO_MAP_VALUE: u32 = 2;
  202. pub const BPF_PSEUDO_MAP_IDX_VALUE: u32 = 6;
  203. pub const BPF_PSEUDO_BTF_ID: u32 = 3;
  204. pub const BPF_PSEUDO_FUNC: u32 = 4;
  205. pub const BPF_PSEUDO_CALL: u32 = 1;
  206. pub const BPF_PSEUDO_KFUNC_CALL: u32 = 2;
  207. pub const BPF_F_QUERY_EFFECTIVE: u32 = 1;
  208. pub const BPF_F_TEST_RUN_ON_CPU: u32 = 1;
  209. pub const BPF_F_TEST_XDP_LIVE_FRAMES: u32 = 2;
  210. pub const BTF_INT_SIGNED: u32 = 1;
  211. pub const BTF_INT_CHAR: u32 = 2;
  212. pub const BTF_INT_BOOL: u32 = 4;
  213. pub const NLMSG_ALIGNTO: u32 = 4;
  214. pub const XDP_FLAGS_UPDATE_IF_NOEXIST: u32 = 1;
  215. pub const XDP_FLAGS_SKB_MODE: u32 = 2;
  216. pub const XDP_FLAGS_DRV_MODE: u32 = 4;
  217. pub const XDP_FLAGS_HW_MODE: u32 = 8;
  218. pub const XDP_FLAGS_REPLACE: u32 = 16;
  219. pub const XDP_FLAGS_MODES: u32 = 14;
  220. pub const XDP_FLAGS_MASK: u32 = 31;
  221. pub const PERF_EVENT_IOC_ENABLE: u32 = 9216;
  222. pub const PERF_EVENT_IOC_DISABLE: u32 = 9217;
  223. pub const PERF_EVENT_IOC_REFRESH: u32 = 9218;
  224. pub const PERF_EVENT_IOC_RESET: u32 = 9219;
  225. pub const PERF_EVENT_IOC_PERIOD: u32 = 1074275332;
  226. pub const PERF_EVENT_IOC_SET_OUTPUT: u32 = 9221;
  227. pub const PERF_EVENT_IOC_SET_FILTER: u32 = 1074275334;
  228. pub const PERF_EVENT_IOC_ID: u32 = 2148017159;
  229. pub const PERF_EVENT_IOC_SET_BPF: u32 = 1074013192;
  230. pub const PERF_EVENT_IOC_PAUSE_OUTPUT: u32 = 1074013193;
  231. pub const PERF_EVENT_IOC_QUERY_BPF: u32 = 3221758986;
  232. pub const PERF_EVENT_IOC_MODIFY_ATTRIBUTES: u32 = 1074275339;
  233. pub const PERF_MAX_STACK_DEPTH: u32 = 127;
  234. pub const PERF_MAX_CONTEXTS_PER_STACK: u32 = 8;
  235. pub const PERF_FLAG_FD_NO_GROUP: u32 = 1;
  236. pub const PERF_FLAG_FD_OUTPUT: u32 = 2;
  237. pub const PERF_FLAG_PID_CGROUP: u32 = 4;
  238. pub const PERF_FLAG_FD_CLOEXEC: u32 = 8;
  239. pub const TC_H_MAJ_MASK: u32 = 4294901760;
  240. pub const TC_H_MIN_MASK: u32 = 65535;
  241. pub const TC_H_UNSPEC: u32 = 0;
  242. pub const TC_H_ROOT: u32 = 4294967295;
  243. pub const TC_H_INGRESS: u32 = 4294967281;
  244. pub const TC_H_CLSACT: u32 = 4294967281;
  245. pub const TC_H_MIN_PRIORITY: u32 = 65504;
  246. pub const TC_H_MIN_INGRESS: u32 = 65522;
  247. pub const TC_H_MIN_EGRESS: u32 = 65523;
  248. pub const TCA_BPF_FLAG_ACT_DIRECT: u32 = 1;
  249. pub type __u8 = ::core::ffi::c_uchar;
  250. pub type __s16 = ::core::ffi::c_short;
  251. pub type __u16 = ::core::ffi::c_ushort;
  252. pub type __s32 = ::core::ffi::c_int;
  253. pub type __u32 = ::core::ffi::c_uint;
  254. pub type __s64 = ::core::ffi::c_longlong;
  255. pub type __u64 = ::core::ffi::c_ulonglong;
  256. #[repr(C)]
  257. #[derive(Debug, Copy, Clone)]
  258. pub struct bpf_insn {
  259. pub code: __u8,
  260. pub _bitfield_align_1: [u8; 0],
  261. pub _bitfield_1: __BindgenBitfieldUnit<[u8; 1usize]>,
  262. pub off: __s16,
  263. pub imm: __s32,
  264. }
  265. impl bpf_insn {
  266. #[inline]
  267. pub fn dst_reg(&self) -> __u8 {
  268. unsafe { ::core::mem::transmute(self._bitfield_1.get(0usize, 4u8) as u8) }
  269. }
  270. #[inline]
  271. pub fn set_dst_reg(&mut self, val: __u8) {
  272. unsafe {
  273. let val: u8 = ::core::mem::transmute(val);
  274. self._bitfield_1.set(0usize, 4u8, val as u64)
  275. }
  276. }
  277. #[inline]
  278. pub unsafe fn dst_reg_raw(this: *const Self) -> __u8 {
  279. unsafe {
  280. ::core::mem::transmute(<__BindgenBitfieldUnit<[u8; 1usize]>>::raw_get(
  281. ::core::ptr::addr_of!((*this)._bitfield_1),
  282. 0usize,
  283. 4u8,
  284. ) as u8)
  285. }
  286. }
  287. #[inline]
  288. pub unsafe fn set_dst_reg_raw(this: *mut Self, val: __u8) {
  289. unsafe {
  290. let val: u8 = ::core::mem::transmute(val);
  291. <__BindgenBitfieldUnit<[u8; 1usize]>>::raw_set(
  292. ::core::ptr::addr_of_mut!((*this)._bitfield_1),
  293. 0usize,
  294. 4u8,
  295. val as u64,
  296. )
  297. }
  298. }
  299. #[inline]
  300. pub fn src_reg(&self) -> __u8 {
  301. unsafe { ::core::mem::transmute(self._bitfield_1.get(4usize, 4u8) as u8) }
  302. }
  303. #[inline]
  304. pub fn set_src_reg(&mut self, val: __u8) {
  305. unsafe {
  306. let val: u8 = ::core::mem::transmute(val);
  307. self._bitfield_1.set(4usize, 4u8, val as u64)
  308. }
  309. }
  310. #[inline]
  311. pub unsafe fn src_reg_raw(this: *const Self) -> __u8 {
  312. unsafe {
  313. ::core::mem::transmute(<__BindgenBitfieldUnit<[u8; 1usize]>>::raw_get(
  314. ::core::ptr::addr_of!((*this)._bitfield_1),
  315. 4usize,
  316. 4u8,
  317. ) as u8)
  318. }
  319. }
  320. #[inline]
  321. pub unsafe fn set_src_reg_raw(this: *mut Self, val: __u8) {
  322. unsafe {
  323. let val: u8 = ::core::mem::transmute(val);
  324. <__BindgenBitfieldUnit<[u8; 1usize]>>::raw_set(
  325. ::core::ptr::addr_of_mut!((*this)._bitfield_1),
  326. 4usize,
  327. 4u8,
  328. val as u64,
  329. )
  330. }
  331. }
  332. #[inline]
  333. pub fn new_bitfield_1(dst_reg: __u8, src_reg: __u8) -> __BindgenBitfieldUnit<[u8; 1usize]> {
  334. let mut __bindgen_bitfield_unit: __BindgenBitfieldUnit<[u8; 1usize]> = Default::default();
  335. __bindgen_bitfield_unit.set(0usize, 4u8, {
  336. let dst_reg: u8 = unsafe { ::core::mem::transmute(dst_reg) };
  337. dst_reg as u64
  338. });
  339. __bindgen_bitfield_unit.set(4usize, 4u8, {
  340. let src_reg: u8 = unsafe { ::core::mem::transmute(src_reg) };
  341. src_reg as u64
  342. });
  343. __bindgen_bitfield_unit
  344. }
  345. }
  346. #[repr(C)]
  347. #[derive(Debug)]
  348. pub struct bpf_lpm_trie_key {
  349. pub prefixlen: __u32,
  350. pub data: __IncompleteArrayField<__u8>,
  351. }
  352. #[repr(u32)]
  353. #[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
  354. pub enum bpf_cgroup_iter_order {
  355. BPF_CGROUP_ITER_ORDER_UNSPEC = 0,
  356. BPF_CGROUP_ITER_SELF_ONLY = 1,
  357. BPF_CGROUP_ITER_DESCENDANTS_PRE = 2,
  358. BPF_CGROUP_ITER_DESCENDANTS_POST = 3,
  359. BPF_CGROUP_ITER_ANCESTORS_UP = 4,
  360. }
  361. impl bpf_cmd {
  362. pub const BPF_PROG_RUN: bpf_cmd = bpf_cmd::BPF_PROG_TEST_RUN;
  363. }
  364. #[repr(u32)]
  365. #[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
  366. pub enum bpf_cmd {
  367. BPF_MAP_CREATE = 0,
  368. BPF_MAP_LOOKUP_ELEM = 1,
  369. BPF_MAP_UPDATE_ELEM = 2,
  370. BPF_MAP_DELETE_ELEM = 3,
  371. BPF_MAP_GET_NEXT_KEY = 4,
  372. BPF_PROG_LOAD = 5,
  373. BPF_OBJ_PIN = 6,
  374. BPF_OBJ_GET = 7,
  375. BPF_PROG_ATTACH = 8,
  376. BPF_PROG_DETACH = 9,
  377. BPF_PROG_TEST_RUN = 10,
  378. BPF_PROG_GET_NEXT_ID = 11,
  379. BPF_MAP_GET_NEXT_ID = 12,
  380. BPF_PROG_GET_FD_BY_ID = 13,
  381. BPF_MAP_GET_FD_BY_ID = 14,
  382. BPF_OBJ_GET_INFO_BY_FD = 15,
  383. BPF_PROG_QUERY = 16,
  384. BPF_RAW_TRACEPOINT_OPEN = 17,
  385. BPF_BTF_LOAD = 18,
  386. BPF_BTF_GET_FD_BY_ID = 19,
  387. BPF_TASK_FD_QUERY = 20,
  388. BPF_MAP_LOOKUP_AND_DELETE_ELEM = 21,
  389. BPF_MAP_FREEZE = 22,
  390. BPF_BTF_GET_NEXT_ID = 23,
  391. BPF_MAP_LOOKUP_BATCH = 24,
  392. BPF_MAP_LOOKUP_AND_DELETE_BATCH = 25,
  393. BPF_MAP_UPDATE_BATCH = 26,
  394. BPF_MAP_DELETE_BATCH = 27,
  395. BPF_LINK_CREATE = 28,
  396. BPF_LINK_UPDATE = 29,
  397. BPF_LINK_GET_FD_BY_ID = 30,
  398. BPF_LINK_GET_NEXT_ID = 31,
  399. BPF_ENABLE_STATS = 32,
  400. BPF_ITER_CREATE = 33,
  401. BPF_LINK_DETACH = 34,
  402. BPF_PROG_BIND_MAP = 35,
  403. BPF_TOKEN_CREATE = 36,
  404. __MAX_BPF_CMD = 37,
  405. }
  406. impl bpf_map_type {
  407. pub const BPF_MAP_TYPE_CGROUP_STORAGE: bpf_map_type =
  408. bpf_map_type::BPF_MAP_TYPE_CGROUP_STORAGE_DEPRECATED;
  409. }
  410. impl bpf_map_type {
  411. pub const BPF_MAP_TYPE_PERCPU_CGROUP_STORAGE: bpf_map_type =
  412. bpf_map_type::BPF_MAP_TYPE_PERCPU_CGROUP_STORAGE_DEPRECATED;
  413. }
  414. #[repr(u32)]
  415. #[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
  416. pub enum bpf_map_type {
  417. BPF_MAP_TYPE_UNSPEC = 0,
  418. BPF_MAP_TYPE_HASH = 1,
  419. BPF_MAP_TYPE_ARRAY = 2,
  420. BPF_MAP_TYPE_PROG_ARRAY = 3,
  421. BPF_MAP_TYPE_PERF_EVENT_ARRAY = 4,
  422. BPF_MAP_TYPE_PERCPU_HASH = 5,
  423. BPF_MAP_TYPE_PERCPU_ARRAY = 6,
  424. BPF_MAP_TYPE_STACK_TRACE = 7,
  425. BPF_MAP_TYPE_CGROUP_ARRAY = 8,
  426. BPF_MAP_TYPE_LRU_HASH = 9,
  427. BPF_MAP_TYPE_LRU_PERCPU_HASH = 10,
  428. BPF_MAP_TYPE_LPM_TRIE = 11,
  429. BPF_MAP_TYPE_ARRAY_OF_MAPS = 12,
  430. BPF_MAP_TYPE_HASH_OF_MAPS = 13,
  431. BPF_MAP_TYPE_DEVMAP = 14,
  432. BPF_MAP_TYPE_SOCKMAP = 15,
  433. BPF_MAP_TYPE_CPUMAP = 16,
  434. BPF_MAP_TYPE_XSKMAP = 17,
  435. BPF_MAP_TYPE_SOCKHASH = 18,
  436. BPF_MAP_TYPE_CGROUP_STORAGE_DEPRECATED = 19,
  437. BPF_MAP_TYPE_REUSEPORT_SOCKARRAY = 20,
  438. BPF_MAP_TYPE_PERCPU_CGROUP_STORAGE_DEPRECATED = 21,
  439. BPF_MAP_TYPE_QUEUE = 22,
  440. BPF_MAP_TYPE_STACK = 23,
  441. BPF_MAP_TYPE_SK_STORAGE = 24,
  442. BPF_MAP_TYPE_DEVMAP_HASH = 25,
  443. BPF_MAP_TYPE_STRUCT_OPS = 26,
  444. BPF_MAP_TYPE_RINGBUF = 27,
  445. BPF_MAP_TYPE_INODE_STORAGE = 28,
  446. BPF_MAP_TYPE_TASK_STORAGE = 29,
  447. BPF_MAP_TYPE_BLOOM_FILTER = 30,
  448. BPF_MAP_TYPE_USER_RINGBUF = 31,
  449. BPF_MAP_TYPE_CGRP_STORAGE = 32,
  450. BPF_MAP_TYPE_ARENA = 33,
  451. __MAX_BPF_MAP_TYPE = 34,
  452. }
  453. #[repr(u32)]
  454. #[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
  455. pub enum bpf_prog_type {
  456. BPF_PROG_TYPE_UNSPEC = 0,
  457. BPF_PROG_TYPE_SOCKET_FILTER = 1,
  458. BPF_PROG_TYPE_KPROBE = 2,
  459. BPF_PROG_TYPE_SCHED_CLS = 3,
  460. BPF_PROG_TYPE_SCHED_ACT = 4,
  461. BPF_PROG_TYPE_TRACEPOINT = 5,
  462. BPF_PROG_TYPE_XDP = 6,
  463. BPF_PROG_TYPE_PERF_EVENT = 7,
  464. BPF_PROG_TYPE_CGROUP_SKB = 8,
  465. BPF_PROG_TYPE_CGROUP_SOCK = 9,
  466. BPF_PROG_TYPE_LWT_IN = 10,
  467. BPF_PROG_TYPE_LWT_OUT = 11,
  468. BPF_PROG_TYPE_LWT_XMIT = 12,
  469. BPF_PROG_TYPE_SOCK_OPS = 13,
  470. BPF_PROG_TYPE_SK_SKB = 14,
  471. BPF_PROG_TYPE_CGROUP_DEVICE = 15,
  472. BPF_PROG_TYPE_SK_MSG = 16,
  473. BPF_PROG_TYPE_RAW_TRACEPOINT = 17,
  474. BPF_PROG_TYPE_CGROUP_SOCK_ADDR = 18,
  475. BPF_PROG_TYPE_LWT_SEG6LOCAL = 19,
  476. BPF_PROG_TYPE_LIRC_MODE2 = 20,
  477. BPF_PROG_TYPE_SK_REUSEPORT = 21,
  478. BPF_PROG_TYPE_FLOW_DISSECTOR = 22,
  479. BPF_PROG_TYPE_CGROUP_SYSCTL = 23,
  480. BPF_PROG_TYPE_RAW_TRACEPOINT_WRITABLE = 24,
  481. BPF_PROG_TYPE_CGROUP_SOCKOPT = 25,
  482. BPF_PROG_TYPE_TRACING = 26,
  483. BPF_PROG_TYPE_STRUCT_OPS = 27,
  484. BPF_PROG_TYPE_EXT = 28,
  485. BPF_PROG_TYPE_LSM = 29,
  486. BPF_PROG_TYPE_SK_LOOKUP = 30,
  487. BPF_PROG_TYPE_SYSCALL = 31,
  488. BPF_PROG_TYPE_NETFILTER = 32,
  489. __MAX_BPF_PROG_TYPE = 33,
  490. }
  491. #[repr(u32)]
  492. #[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
  493. pub enum bpf_attach_type {
  494. BPF_CGROUP_INET_INGRESS = 0,
  495. BPF_CGROUP_INET_EGRESS = 1,
  496. BPF_CGROUP_INET_SOCK_CREATE = 2,
  497. BPF_CGROUP_SOCK_OPS = 3,
  498. BPF_SK_SKB_STREAM_PARSER = 4,
  499. BPF_SK_SKB_STREAM_VERDICT = 5,
  500. BPF_CGROUP_DEVICE = 6,
  501. BPF_SK_MSG_VERDICT = 7,
  502. BPF_CGROUP_INET4_BIND = 8,
  503. BPF_CGROUP_INET6_BIND = 9,
  504. BPF_CGROUP_INET4_CONNECT = 10,
  505. BPF_CGROUP_INET6_CONNECT = 11,
  506. BPF_CGROUP_INET4_POST_BIND = 12,
  507. BPF_CGROUP_INET6_POST_BIND = 13,
  508. BPF_CGROUP_UDP4_SENDMSG = 14,
  509. BPF_CGROUP_UDP6_SENDMSG = 15,
  510. BPF_LIRC_MODE2 = 16,
  511. BPF_FLOW_DISSECTOR = 17,
  512. BPF_CGROUP_SYSCTL = 18,
  513. BPF_CGROUP_UDP4_RECVMSG = 19,
  514. BPF_CGROUP_UDP6_RECVMSG = 20,
  515. BPF_CGROUP_GETSOCKOPT = 21,
  516. BPF_CGROUP_SETSOCKOPT = 22,
  517. BPF_TRACE_RAW_TP = 23,
  518. BPF_TRACE_FENTRY = 24,
  519. BPF_TRACE_FEXIT = 25,
  520. BPF_MODIFY_RETURN = 26,
  521. BPF_LSM_MAC = 27,
  522. BPF_TRACE_ITER = 28,
  523. BPF_CGROUP_INET4_GETPEERNAME = 29,
  524. BPF_CGROUP_INET6_GETPEERNAME = 30,
  525. BPF_CGROUP_INET4_GETSOCKNAME = 31,
  526. BPF_CGROUP_INET6_GETSOCKNAME = 32,
  527. BPF_XDP_DEVMAP = 33,
  528. BPF_CGROUP_INET_SOCK_RELEASE = 34,
  529. BPF_XDP_CPUMAP = 35,
  530. BPF_SK_LOOKUP = 36,
  531. BPF_XDP = 37,
  532. BPF_SK_SKB_VERDICT = 38,
  533. BPF_SK_REUSEPORT_SELECT = 39,
  534. BPF_SK_REUSEPORT_SELECT_OR_MIGRATE = 40,
  535. BPF_PERF_EVENT = 41,
  536. BPF_TRACE_KPROBE_MULTI = 42,
  537. BPF_LSM_CGROUP = 43,
  538. BPF_STRUCT_OPS = 44,
  539. BPF_NETFILTER = 45,
  540. BPF_TCX_INGRESS = 46,
  541. BPF_TCX_EGRESS = 47,
  542. BPF_TRACE_UPROBE_MULTI = 48,
  543. BPF_CGROUP_UNIX_CONNECT = 49,
  544. BPF_CGROUP_UNIX_SENDMSG = 50,
  545. BPF_CGROUP_UNIX_RECVMSG = 51,
  546. BPF_CGROUP_UNIX_GETPEERNAME = 52,
  547. BPF_CGROUP_UNIX_GETSOCKNAME = 53,
  548. BPF_NETKIT_PRIMARY = 54,
  549. BPF_NETKIT_PEER = 55,
  550. __MAX_BPF_ATTACH_TYPE = 56,
  551. }
  552. #[repr(u32)]
  553. #[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
  554. pub enum bpf_link_type {
  555. BPF_LINK_TYPE_UNSPEC = 0,
  556. BPF_LINK_TYPE_RAW_TRACEPOINT = 1,
  557. BPF_LINK_TYPE_TRACING = 2,
  558. BPF_LINK_TYPE_CGROUP = 3,
  559. BPF_LINK_TYPE_ITER = 4,
  560. BPF_LINK_TYPE_NETNS = 5,
  561. BPF_LINK_TYPE_XDP = 6,
  562. BPF_LINK_TYPE_PERF_EVENT = 7,
  563. BPF_LINK_TYPE_KPROBE_MULTI = 8,
  564. BPF_LINK_TYPE_STRUCT_OPS = 9,
  565. BPF_LINK_TYPE_NETFILTER = 10,
  566. BPF_LINK_TYPE_TCX = 11,
  567. BPF_LINK_TYPE_UPROBE_MULTI = 12,
  568. BPF_LINK_TYPE_NETKIT = 13,
  569. __MAX_BPF_LINK_TYPE = 14,
  570. }
  571. #[repr(u32)]
  572. #[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
  573. pub enum bpf_perf_event_type {
  574. BPF_PERF_EVENT_UNSPEC = 0,
  575. BPF_PERF_EVENT_UPROBE = 1,
  576. BPF_PERF_EVENT_URETPROBE = 2,
  577. BPF_PERF_EVENT_KPROBE = 3,
  578. BPF_PERF_EVENT_KRETPROBE = 4,
  579. BPF_PERF_EVENT_TRACEPOINT = 5,
  580. BPF_PERF_EVENT_EVENT = 6,
  581. }
  582. pub const BPF_F_KPROBE_MULTI_RETURN: _bindgen_ty_2 = 1;
  583. pub type _bindgen_ty_2 = ::core::ffi::c_uint;
  584. pub const BPF_F_UPROBE_MULTI_RETURN: _bindgen_ty_3 = 1;
  585. pub type _bindgen_ty_3 = ::core::ffi::c_uint;
  586. pub const BPF_ANY: _bindgen_ty_4 = 0;
  587. pub const BPF_NOEXIST: _bindgen_ty_4 = 1;
  588. pub const BPF_EXIST: _bindgen_ty_4 = 2;
  589. pub const BPF_F_LOCK: _bindgen_ty_4 = 4;
  590. pub type _bindgen_ty_4 = ::core::ffi::c_uint;
  591. pub const BPF_F_NO_PREALLOC: _bindgen_ty_5 = 1;
  592. pub const BPF_F_NO_COMMON_LRU: _bindgen_ty_5 = 2;
  593. pub const BPF_F_NUMA_NODE: _bindgen_ty_5 = 4;
  594. pub const BPF_F_RDONLY: _bindgen_ty_5 = 8;
  595. pub const BPF_F_WRONLY: _bindgen_ty_5 = 16;
  596. pub const BPF_F_STACK_BUILD_ID: _bindgen_ty_5 = 32;
  597. pub const BPF_F_ZERO_SEED: _bindgen_ty_5 = 64;
  598. pub const BPF_F_RDONLY_PROG: _bindgen_ty_5 = 128;
  599. pub const BPF_F_WRONLY_PROG: _bindgen_ty_5 = 256;
  600. pub const BPF_F_CLONE: _bindgen_ty_5 = 512;
  601. pub const BPF_F_MMAPABLE: _bindgen_ty_5 = 1024;
  602. pub const BPF_F_PRESERVE_ELEMS: _bindgen_ty_5 = 2048;
  603. pub const BPF_F_INNER_MAP: _bindgen_ty_5 = 4096;
  604. pub const BPF_F_LINK: _bindgen_ty_5 = 8192;
  605. pub const BPF_F_PATH_FD: _bindgen_ty_5 = 16384;
  606. pub const BPF_F_VTYPE_BTF_OBJ_FD: _bindgen_ty_5 = 32768;
  607. pub const BPF_F_TOKEN_FD: _bindgen_ty_5 = 65536;
  608. pub const BPF_F_SEGV_ON_FAULT: _bindgen_ty_5 = 131072;
  609. pub const BPF_F_NO_USER_CONV: _bindgen_ty_5 = 262144;
  610. pub type _bindgen_ty_5 = ::core::ffi::c_uint;
  611. #[repr(u32)]
  612. #[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
  613. pub enum bpf_stats_type {
  614. BPF_STATS_RUN_TIME = 0,
  615. }
  616. #[repr(C)]
  617. #[derive(Copy, Clone)]
  618. pub union bpf_attr {
  619. pub __bindgen_anon_1: bpf_attr__bindgen_ty_1,
  620. pub __bindgen_anon_2: bpf_attr__bindgen_ty_2,
  621. pub batch: bpf_attr__bindgen_ty_3,
  622. pub __bindgen_anon_3: bpf_attr__bindgen_ty_4,
  623. pub __bindgen_anon_4: bpf_attr__bindgen_ty_5,
  624. pub __bindgen_anon_5: bpf_attr__bindgen_ty_6,
  625. pub test: bpf_attr__bindgen_ty_7,
  626. pub __bindgen_anon_6: bpf_attr__bindgen_ty_8,
  627. pub info: bpf_attr__bindgen_ty_9,
  628. pub query: bpf_attr__bindgen_ty_10,
  629. pub raw_tracepoint: bpf_attr__bindgen_ty_11,
  630. pub __bindgen_anon_7: bpf_attr__bindgen_ty_12,
  631. pub task_fd_query: bpf_attr__bindgen_ty_13,
  632. pub link_create: bpf_attr__bindgen_ty_14,
  633. pub link_update: bpf_attr__bindgen_ty_15,
  634. pub link_detach: bpf_attr__bindgen_ty_16,
  635. pub enable_stats: bpf_attr__bindgen_ty_17,
  636. pub iter_create: bpf_attr__bindgen_ty_18,
  637. pub prog_bind_map: bpf_attr__bindgen_ty_19,
  638. pub token_create: bpf_attr__bindgen_ty_20,
  639. }
  640. #[repr(C)]
  641. #[derive(Debug, Copy, Clone)]
  642. pub struct bpf_attr__bindgen_ty_1 {
  643. pub map_type: __u32,
  644. pub key_size: __u32,
  645. pub value_size: __u32,
  646. pub max_entries: __u32,
  647. pub map_flags: __u32,
  648. pub inner_map_fd: __u32,
  649. pub numa_node: __u32,
  650. pub map_name: [::core::ffi::c_char; 16usize],
  651. pub map_ifindex: __u32,
  652. pub btf_fd: __u32,
  653. pub btf_key_type_id: __u32,
  654. pub btf_value_type_id: __u32,
  655. pub btf_vmlinux_value_type_id: __u32,
  656. pub map_extra: __u64,
  657. pub value_type_btf_obj_fd: __s32,
  658. pub map_token_fd: __s32,
  659. }
  660. #[repr(C)]
  661. #[derive(Copy, Clone)]
  662. pub struct bpf_attr__bindgen_ty_2 {
  663. pub map_fd: __u32,
  664. pub key: __u64,
  665. pub __bindgen_anon_1: bpf_attr__bindgen_ty_2__bindgen_ty_1,
  666. pub flags: __u64,
  667. }
  668. #[repr(C)]
  669. #[derive(Copy, Clone)]
  670. pub union bpf_attr__bindgen_ty_2__bindgen_ty_1 {
  671. pub value: __u64,
  672. pub next_key: __u64,
  673. }
  674. #[repr(C)]
  675. #[derive(Debug, Copy, Clone)]
  676. pub struct bpf_attr__bindgen_ty_3 {
  677. pub in_batch: __u64,
  678. pub out_batch: __u64,
  679. pub keys: __u64,
  680. pub values: __u64,
  681. pub count: __u32,
  682. pub map_fd: __u32,
  683. pub elem_flags: __u64,
  684. pub flags: __u64,
  685. }
  686. #[repr(C)]
  687. #[derive(Copy, Clone)]
  688. pub struct bpf_attr__bindgen_ty_4 {
  689. pub prog_type: __u32,
  690. pub insn_cnt: __u32,
  691. pub insns: __u64,
  692. pub license: __u64,
  693. pub log_level: __u32,
  694. pub log_size: __u32,
  695. pub log_buf: __u64,
  696. pub kern_version: __u32,
  697. pub prog_flags: __u32,
  698. pub prog_name: [::core::ffi::c_char; 16usize],
  699. pub prog_ifindex: __u32,
  700. pub expected_attach_type: __u32,
  701. pub prog_btf_fd: __u32,
  702. pub func_info_rec_size: __u32,
  703. pub func_info: __u64,
  704. pub func_info_cnt: __u32,
  705. pub line_info_rec_size: __u32,
  706. pub line_info: __u64,
  707. pub line_info_cnt: __u32,
  708. pub attach_btf_id: __u32,
  709. pub __bindgen_anon_1: bpf_attr__bindgen_ty_4__bindgen_ty_1,
  710. pub core_relo_cnt: __u32,
  711. pub fd_array: __u64,
  712. pub core_relos: __u64,
  713. pub core_relo_rec_size: __u32,
  714. pub log_true_size: __u32,
  715. pub prog_token_fd: __s32,
  716. }
  717. #[repr(C)]
  718. #[derive(Copy, Clone)]
  719. pub union bpf_attr__bindgen_ty_4__bindgen_ty_1 {
  720. pub attach_prog_fd: __u32,
  721. pub attach_btf_obj_fd: __u32,
  722. }
  723. #[repr(C)]
  724. #[derive(Debug, Copy, Clone)]
  725. pub struct bpf_attr__bindgen_ty_5 {
  726. pub pathname: __u64,
  727. pub bpf_fd: __u32,
  728. pub file_flags: __u32,
  729. pub path_fd: __s32,
  730. }
  731. #[repr(C)]
  732. #[derive(Copy, Clone)]
  733. pub struct bpf_attr__bindgen_ty_6 {
  734. pub __bindgen_anon_1: bpf_attr__bindgen_ty_6__bindgen_ty_1,
  735. pub attach_bpf_fd: __u32,
  736. pub attach_type: __u32,
  737. pub attach_flags: __u32,
  738. pub replace_bpf_fd: __u32,
  739. pub __bindgen_anon_2: bpf_attr__bindgen_ty_6__bindgen_ty_2,
  740. pub expected_revision: __u64,
  741. }
  742. #[repr(C)]
  743. #[derive(Copy, Clone)]
  744. pub union bpf_attr__bindgen_ty_6__bindgen_ty_1 {
  745. pub target_fd: __u32,
  746. pub target_ifindex: __u32,
  747. }
  748. #[repr(C)]
  749. #[derive(Copy, Clone)]
  750. pub union bpf_attr__bindgen_ty_6__bindgen_ty_2 {
  751. pub relative_fd: __u32,
  752. pub relative_id: __u32,
  753. }
  754. #[repr(C)]
  755. #[derive(Debug, Copy, Clone)]
  756. pub struct bpf_attr__bindgen_ty_7 {
  757. pub prog_fd: __u32,
  758. pub retval: __u32,
  759. pub data_size_in: __u32,
  760. pub data_size_out: __u32,
  761. pub data_in: __u64,
  762. pub data_out: __u64,
  763. pub repeat: __u32,
  764. pub duration: __u32,
  765. pub ctx_size_in: __u32,
  766. pub ctx_size_out: __u32,
  767. pub ctx_in: __u64,
  768. pub ctx_out: __u64,
  769. pub flags: __u32,
  770. pub cpu: __u32,
  771. pub batch_size: __u32,
  772. }
  773. #[repr(C)]
  774. #[derive(Copy, Clone)]
  775. pub struct bpf_attr__bindgen_ty_8 {
  776. pub __bindgen_anon_1: bpf_attr__bindgen_ty_8__bindgen_ty_1,
  777. pub next_id: __u32,
  778. pub open_flags: __u32,
  779. }
  780. #[repr(C)]
  781. #[derive(Copy, Clone)]
  782. pub union bpf_attr__bindgen_ty_8__bindgen_ty_1 {
  783. pub start_id: __u32,
  784. pub prog_id: __u32,
  785. pub map_id: __u32,
  786. pub btf_id: __u32,
  787. pub link_id: __u32,
  788. }
  789. #[repr(C)]
  790. #[derive(Debug, Copy, Clone)]
  791. pub struct bpf_attr__bindgen_ty_9 {
  792. pub bpf_fd: __u32,
  793. pub info_len: __u32,
  794. pub info: __u64,
  795. }
  796. #[repr(C)]
  797. #[derive(Copy, Clone)]
  798. pub struct bpf_attr__bindgen_ty_10 {
  799. pub __bindgen_anon_1: bpf_attr__bindgen_ty_10__bindgen_ty_1,
  800. pub attach_type: __u32,
  801. pub query_flags: __u32,
  802. pub attach_flags: __u32,
  803. pub prog_ids: __u64,
  804. pub __bindgen_anon_2: bpf_attr__bindgen_ty_10__bindgen_ty_2,
  805. pub _bitfield_align_1: [u8; 0],
  806. pub _bitfield_1: __BindgenBitfieldUnit<[u8; 4usize]>,
  807. pub prog_attach_flags: __u64,
  808. pub link_ids: __u64,
  809. pub link_attach_flags: __u64,
  810. pub revision: __u64,
  811. }
  812. #[repr(C)]
  813. #[derive(Copy, Clone)]
  814. pub union bpf_attr__bindgen_ty_10__bindgen_ty_1 {
  815. pub target_fd: __u32,
  816. pub target_ifindex: __u32,
  817. }
  818. #[repr(C)]
  819. #[derive(Copy, Clone)]
  820. pub union bpf_attr__bindgen_ty_10__bindgen_ty_2 {
  821. pub prog_cnt: __u32,
  822. pub count: __u32,
  823. }
  824. impl bpf_attr__bindgen_ty_10 {
  825. #[inline]
  826. pub fn new_bitfield_1() -> __BindgenBitfieldUnit<[u8; 4usize]> {
  827. let mut __bindgen_bitfield_unit: __BindgenBitfieldUnit<[u8; 4usize]> = Default::default();
  828. __bindgen_bitfield_unit
  829. }
  830. }
  831. #[repr(C)]
  832. #[derive(Debug, Copy, Clone)]
  833. pub struct bpf_attr__bindgen_ty_11 {
  834. pub name: __u64,
  835. pub prog_fd: __u32,
  836. pub _bitfield_align_1: [u8; 0],
  837. pub _bitfield_1: __BindgenBitfieldUnit<[u8; 4usize]>,
  838. pub cookie: __u64,
  839. }
  840. impl bpf_attr__bindgen_ty_11 {
  841. #[inline]
  842. pub fn new_bitfield_1() -> __BindgenBitfieldUnit<[u8; 4usize]> {
  843. let mut __bindgen_bitfield_unit: __BindgenBitfieldUnit<[u8; 4usize]> = Default::default();
  844. __bindgen_bitfield_unit
  845. }
  846. }
  847. #[repr(C)]
  848. #[derive(Debug, Copy, Clone)]
  849. pub struct bpf_attr__bindgen_ty_12 {
  850. pub btf: __u64,
  851. pub btf_log_buf: __u64,
  852. pub btf_size: __u32,
  853. pub btf_log_size: __u32,
  854. pub btf_log_level: __u32,
  855. pub btf_log_true_size: __u32,
  856. pub btf_flags: __u32,
  857. pub btf_token_fd: __s32,
  858. }
  859. #[repr(C)]
  860. #[derive(Debug, Copy, Clone)]
  861. pub struct bpf_attr__bindgen_ty_13 {
  862. pub pid: __u32,
  863. pub fd: __u32,
  864. pub flags: __u32,
  865. pub buf_len: __u32,
  866. pub buf: __u64,
  867. pub prog_id: __u32,
  868. pub fd_type: __u32,
  869. pub probe_offset: __u64,
  870. pub probe_addr: __u64,
  871. }
  872. #[repr(C)]
  873. #[derive(Copy, Clone)]
  874. pub struct bpf_attr__bindgen_ty_14 {
  875. pub __bindgen_anon_1: bpf_attr__bindgen_ty_14__bindgen_ty_1,
  876. pub __bindgen_anon_2: bpf_attr__bindgen_ty_14__bindgen_ty_2,
  877. pub attach_type: __u32,
  878. pub flags: __u32,
  879. pub __bindgen_anon_3: bpf_attr__bindgen_ty_14__bindgen_ty_3,
  880. }
  881. #[repr(C)]
  882. #[derive(Copy, Clone)]
  883. pub union bpf_attr__bindgen_ty_14__bindgen_ty_1 {
  884. pub prog_fd: __u32,
  885. pub map_fd: __u32,
  886. }
  887. #[repr(C)]
  888. #[derive(Copy, Clone)]
  889. pub union bpf_attr__bindgen_ty_14__bindgen_ty_2 {
  890. pub target_fd: __u32,
  891. pub target_ifindex: __u32,
  892. }
  893. #[repr(C)]
  894. #[derive(Copy, Clone)]
  895. pub union bpf_attr__bindgen_ty_14__bindgen_ty_3 {
  896. pub target_btf_id: __u32,
  897. pub __bindgen_anon_1: bpf_attr__bindgen_ty_14__bindgen_ty_3__bindgen_ty_1,
  898. pub perf_event: bpf_attr__bindgen_ty_14__bindgen_ty_3__bindgen_ty_2,
  899. pub kprobe_multi: bpf_attr__bindgen_ty_14__bindgen_ty_3__bindgen_ty_3,
  900. pub tracing: bpf_attr__bindgen_ty_14__bindgen_ty_3__bindgen_ty_4,
  901. pub netfilter: bpf_attr__bindgen_ty_14__bindgen_ty_3__bindgen_ty_5,
  902. pub tcx: bpf_attr__bindgen_ty_14__bindgen_ty_3__bindgen_ty_6,
  903. pub uprobe_multi: bpf_attr__bindgen_ty_14__bindgen_ty_3__bindgen_ty_7,
  904. pub netkit: bpf_attr__bindgen_ty_14__bindgen_ty_3__bindgen_ty_8,
  905. }
  906. #[repr(C)]
  907. #[derive(Debug, Copy, Clone)]
  908. pub struct bpf_attr__bindgen_ty_14__bindgen_ty_3__bindgen_ty_1 {
  909. pub iter_info: __u64,
  910. pub iter_info_len: __u32,
  911. }
  912. #[repr(C)]
  913. #[derive(Debug, Copy, Clone)]
  914. pub struct bpf_attr__bindgen_ty_14__bindgen_ty_3__bindgen_ty_2 {
  915. pub bpf_cookie: __u64,
  916. }
  917. #[repr(C)]
  918. #[derive(Debug, Copy, Clone)]
  919. pub struct bpf_attr__bindgen_ty_14__bindgen_ty_3__bindgen_ty_3 {
  920. pub flags: __u32,
  921. pub cnt: __u32,
  922. pub syms: __u64,
  923. pub addrs: __u64,
  924. pub cookies: __u64,
  925. }
  926. #[repr(C)]
  927. #[derive(Debug, Copy, Clone)]
  928. pub struct bpf_attr__bindgen_ty_14__bindgen_ty_3__bindgen_ty_4 {
  929. pub target_btf_id: __u32,
  930. pub cookie: __u64,
  931. }
  932. #[repr(C)]
  933. #[derive(Debug, Copy, Clone)]
  934. pub struct bpf_attr__bindgen_ty_14__bindgen_ty_3__bindgen_ty_5 {
  935. pub pf: __u32,
  936. pub hooknum: __u32,
  937. pub priority: __s32,
  938. pub flags: __u32,
  939. }
  940. #[repr(C)]
  941. #[derive(Copy, Clone)]
  942. pub struct bpf_attr__bindgen_ty_14__bindgen_ty_3__bindgen_ty_6 {
  943. pub __bindgen_anon_1: bpf_attr__bindgen_ty_14__bindgen_ty_3__bindgen_ty_6__bindgen_ty_1,
  944. pub expected_revision: __u64,
  945. }
  946. #[repr(C)]
  947. #[derive(Copy, Clone)]
  948. pub union bpf_attr__bindgen_ty_14__bindgen_ty_3__bindgen_ty_6__bindgen_ty_1 {
  949. pub relative_fd: __u32,
  950. pub relative_id: __u32,
  951. }
  952. #[repr(C)]
  953. #[derive(Debug, Copy, Clone)]
  954. pub struct bpf_attr__bindgen_ty_14__bindgen_ty_3__bindgen_ty_7 {
  955. pub path: __u64,
  956. pub offsets: __u64,
  957. pub ref_ctr_offsets: __u64,
  958. pub cookies: __u64,
  959. pub cnt: __u32,
  960. pub flags: __u32,
  961. pub pid: __u32,
  962. }
  963. #[repr(C)]
  964. #[derive(Copy, Clone)]
  965. pub struct bpf_attr__bindgen_ty_14__bindgen_ty_3__bindgen_ty_8 {
  966. pub __bindgen_anon_1: bpf_attr__bindgen_ty_14__bindgen_ty_3__bindgen_ty_8__bindgen_ty_1,
  967. pub expected_revision: __u64,
  968. }
  969. #[repr(C)]
  970. #[derive(Copy, Clone)]
  971. pub union bpf_attr__bindgen_ty_14__bindgen_ty_3__bindgen_ty_8__bindgen_ty_1 {
  972. pub relative_fd: __u32,
  973. pub relative_id: __u32,
  974. }
  975. #[repr(C)]
  976. #[derive(Copy, Clone)]
  977. pub struct bpf_attr__bindgen_ty_15 {
  978. pub link_fd: __u32,
  979. pub __bindgen_anon_1: bpf_attr__bindgen_ty_15__bindgen_ty_1,
  980. pub flags: __u32,
  981. pub __bindgen_anon_2: bpf_attr__bindgen_ty_15__bindgen_ty_2,
  982. }
  983. #[repr(C)]
  984. #[derive(Copy, Clone)]
  985. pub union bpf_attr__bindgen_ty_15__bindgen_ty_1 {
  986. pub new_prog_fd: __u32,
  987. pub new_map_fd: __u32,
  988. }
  989. #[repr(C)]
  990. #[derive(Copy, Clone)]
  991. pub union bpf_attr__bindgen_ty_15__bindgen_ty_2 {
  992. pub old_prog_fd: __u32,
  993. pub old_map_fd: __u32,
  994. }
  995. #[repr(C)]
  996. #[derive(Debug, Copy, Clone)]
  997. pub struct bpf_attr__bindgen_ty_16 {
  998. pub link_fd: __u32,
  999. }
  1000. #[repr(C)]
  1001. #[derive(Debug, Copy, Clone)]
  1002. pub struct bpf_attr__bindgen_ty_17 {
  1003. pub type_: __u32,
  1004. }
  1005. #[repr(C)]
  1006. #[derive(Debug, Copy, Clone)]
  1007. pub struct bpf_attr__bindgen_ty_18 {
  1008. pub link_fd: __u32,
  1009. pub flags: __u32,
  1010. }
  1011. #[repr(C)]
  1012. #[derive(Debug, Copy, Clone)]
  1013. pub struct bpf_attr__bindgen_ty_19 {
  1014. pub prog_fd: __u32,
  1015. pub map_fd: __u32,
  1016. pub flags: __u32,
  1017. }
  1018. #[repr(C)]
  1019. #[derive(Debug, Copy, Clone)]
  1020. pub struct bpf_attr__bindgen_ty_20 {
  1021. pub flags: __u32,
  1022. pub bpffs_fd: __u32,
  1023. }
  1024. pub const BPF_F_RECOMPUTE_CSUM: _bindgen_ty_6 = 1;
  1025. pub const BPF_F_INVALIDATE_HASH: _bindgen_ty_6 = 2;
  1026. pub type _bindgen_ty_6 = ::core::ffi::c_uint;
  1027. pub const BPF_F_HDR_FIELD_MASK: _bindgen_ty_7 = 15;
  1028. pub type _bindgen_ty_7 = ::core::ffi::c_uint;
  1029. pub const BPF_F_PSEUDO_HDR: _bindgen_ty_8 = 16;
  1030. pub const BPF_F_MARK_MANGLED_0: _bindgen_ty_8 = 32;
  1031. pub const BPF_F_MARK_ENFORCE: _bindgen_ty_8 = 64;
  1032. pub type _bindgen_ty_8 = ::core::ffi::c_uint;
  1033. pub const BPF_F_INGRESS: _bindgen_ty_9 = 1;
  1034. pub type _bindgen_ty_9 = ::core::ffi::c_uint;
  1035. pub const BPF_F_TUNINFO_IPV6: _bindgen_ty_10 = 1;
  1036. pub type _bindgen_ty_10 = ::core::ffi::c_uint;
  1037. pub const BPF_F_SKIP_FIELD_MASK: _bindgen_ty_11 = 255;
  1038. pub const BPF_F_USER_STACK: _bindgen_ty_11 = 256;
  1039. pub const BPF_F_FAST_STACK_CMP: _bindgen_ty_11 = 512;
  1040. pub const BPF_F_REUSE_STACKID: _bindgen_ty_11 = 1024;
  1041. pub const BPF_F_USER_BUILD_ID: _bindgen_ty_11 = 2048;
  1042. pub type _bindgen_ty_11 = ::core::ffi::c_uint;
  1043. pub const BPF_F_ZERO_CSUM_TX: _bindgen_ty_12 = 2;
  1044. pub const BPF_F_DONT_FRAGMENT: _bindgen_ty_12 = 4;
  1045. pub const BPF_F_SEQ_NUMBER: _bindgen_ty_12 = 8;
  1046. pub const BPF_F_NO_TUNNEL_KEY: _bindgen_ty_12 = 16;
  1047. pub type _bindgen_ty_12 = ::core::ffi::c_uint;
  1048. pub const BPF_F_TUNINFO_FLAGS: _bindgen_ty_13 = 16;
  1049. pub type _bindgen_ty_13 = ::core::ffi::c_uint;
  1050. pub const BPF_F_INDEX_MASK: _bindgen_ty_14 = 4294967295;
  1051. pub const BPF_F_CURRENT_CPU: _bindgen_ty_14 = 4294967295;
  1052. pub const BPF_F_CTXLEN_MASK: _bindgen_ty_14 = 4503595332403200;
  1053. pub type _bindgen_ty_14 = ::core::ffi::c_ulong;
  1054. pub const BPF_F_CURRENT_NETNS: _bindgen_ty_15 = -1;
  1055. pub type _bindgen_ty_15 = ::core::ffi::c_int;
  1056. pub const BPF_F_ADJ_ROOM_FIXED_GSO: _bindgen_ty_17 = 1;
  1057. pub const BPF_F_ADJ_ROOM_ENCAP_L3_IPV4: _bindgen_ty_17 = 2;
  1058. pub const BPF_F_ADJ_ROOM_ENCAP_L3_IPV6: _bindgen_ty_17 = 4;
  1059. pub const BPF_F_ADJ_ROOM_ENCAP_L4_GRE: _bindgen_ty_17 = 8;
  1060. pub const BPF_F_ADJ_ROOM_ENCAP_L4_UDP: _bindgen_ty_17 = 16;
  1061. pub const BPF_F_ADJ_ROOM_NO_CSUM_RESET: _bindgen_ty_17 = 32;
  1062. pub const BPF_F_ADJ_ROOM_ENCAP_L2_ETH: _bindgen_ty_17 = 64;
  1063. pub const BPF_F_ADJ_ROOM_DECAP_L3_IPV4: _bindgen_ty_17 = 128;
  1064. pub const BPF_F_ADJ_ROOM_DECAP_L3_IPV6: _bindgen_ty_17 = 256;
  1065. pub type _bindgen_ty_17 = ::core::ffi::c_uint;
  1066. pub const BPF_F_SYSCTL_BASE_NAME: _bindgen_ty_19 = 1;
  1067. pub type _bindgen_ty_19 = ::core::ffi::c_uint;
  1068. pub const BPF_F_GET_BRANCH_RECORDS_SIZE: _bindgen_ty_21 = 1;
  1069. pub type _bindgen_ty_21 = ::core::ffi::c_uint;
  1070. pub const BPF_RINGBUF_BUSY_BIT: _bindgen_ty_24 = 2147483648;
  1071. pub const BPF_RINGBUF_DISCARD_BIT: _bindgen_ty_24 = 1073741824;
  1072. pub const BPF_RINGBUF_HDR_SZ: _bindgen_ty_24 = 8;
  1073. pub type _bindgen_ty_24 = ::core::ffi::c_uint;
  1074. pub const BPF_F_BPRM_SECUREEXEC: _bindgen_ty_26 = 1;
  1075. pub type _bindgen_ty_26 = ::core::ffi::c_uint;
  1076. pub const BPF_F_BROADCAST: _bindgen_ty_27 = 8;
  1077. pub const BPF_F_EXCLUDE_INGRESS: _bindgen_ty_27 = 16;
  1078. pub type _bindgen_ty_27 = ::core::ffi::c_uint;
  1079. #[repr(C)]
  1080. #[derive(Copy, Clone)]
  1081. pub struct bpf_devmap_val {
  1082. pub ifindex: __u32,
  1083. pub bpf_prog: bpf_devmap_val__bindgen_ty_1,
  1084. }
  1085. #[repr(C)]
  1086. #[derive(Copy, Clone)]
  1087. pub union bpf_devmap_val__bindgen_ty_1 {
  1088. pub fd: ::core::ffi::c_int,
  1089. pub id: __u32,
  1090. }
  1091. #[repr(C)]
  1092. #[derive(Copy, Clone)]
  1093. pub struct bpf_cpumap_val {
  1094. pub qsize: __u32,
  1095. pub bpf_prog: bpf_cpumap_val__bindgen_ty_1,
  1096. }
  1097. #[repr(C)]
  1098. #[derive(Copy, Clone)]
  1099. pub union bpf_cpumap_val__bindgen_ty_1 {
  1100. pub fd: ::core::ffi::c_int,
  1101. pub id: __u32,
  1102. }
  1103. #[repr(C)]
  1104. #[derive(Debug, Copy, Clone)]
  1105. pub struct bpf_prog_info {
  1106. pub type_: __u32,
  1107. pub id: __u32,
  1108. pub tag: [__u8; 8usize],
  1109. pub jited_prog_len: __u32,
  1110. pub xlated_prog_len: __u32,
  1111. pub jited_prog_insns: __u64,
  1112. pub xlated_prog_insns: __u64,
  1113. pub load_time: __u64,
  1114. pub created_by_uid: __u32,
  1115. pub nr_map_ids: __u32,
  1116. pub map_ids: __u64,
  1117. pub name: [::core::ffi::c_char; 16usize],
  1118. pub ifindex: __u32,
  1119. pub _bitfield_align_1: [u8; 0],
  1120. pub _bitfield_1: __BindgenBitfieldUnit<[u8; 4usize]>,
  1121. pub netns_dev: __u64,
  1122. pub netns_ino: __u64,
  1123. pub nr_jited_ksyms: __u32,
  1124. pub nr_jited_func_lens: __u32,
  1125. pub jited_ksyms: __u64,
  1126. pub jited_func_lens: __u64,
  1127. pub btf_id: __u32,
  1128. pub func_info_rec_size: __u32,
  1129. pub func_info: __u64,
  1130. pub nr_func_info: __u32,
  1131. pub nr_line_info: __u32,
  1132. pub line_info: __u64,
  1133. pub jited_line_info: __u64,
  1134. pub nr_jited_line_info: __u32,
  1135. pub line_info_rec_size: __u32,
  1136. pub jited_line_info_rec_size: __u32,
  1137. pub nr_prog_tags: __u32,
  1138. pub prog_tags: __u64,
  1139. pub run_time_ns: __u64,
  1140. pub run_cnt: __u64,
  1141. pub recursion_misses: __u64,
  1142. pub verified_insns: __u32,
  1143. pub attach_btf_obj_id: __u32,
  1144. pub attach_btf_id: __u32,
  1145. }
  1146. impl bpf_prog_info {
  1147. #[inline]
  1148. pub fn gpl_compatible(&self) -> __u32 {
  1149. unsafe { ::core::mem::transmute(self._bitfield_1.get(0usize, 1u8) as u32) }
  1150. }
  1151. #[inline]
  1152. pub fn set_gpl_compatible(&mut self, val: __u32) {
  1153. unsafe {
  1154. let val: u32 = ::core::mem::transmute(val);
  1155. self._bitfield_1.set(0usize, 1u8, val as u64)
  1156. }
  1157. }
  1158. #[inline]
  1159. pub unsafe fn gpl_compatible_raw(this: *const Self) -> __u32 {
  1160. unsafe {
  1161. ::core::mem::transmute(<__BindgenBitfieldUnit<[u8; 4usize]>>::raw_get(
  1162. ::core::ptr::addr_of!((*this)._bitfield_1),
  1163. 0usize,
  1164. 1u8,
  1165. ) as u32)
  1166. }
  1167. }
  1168. #[inline]
  1169. pub unsafe fn set_gpl_compatible_raw(this: *mut Self, val: __u32) {
  1170. unsafe {
  1171. let val: u32 = ::core::mem::transmute(val);
  1172. <__BindgenBitfieldUnit<[u8; 4usize]>>::raw_set(
  1173. ::core::ptr::addr_of_mut!((*this)._bitfield_1),
  1174. 0usize,
  1175. 1u8,
  1176. val as u64,
  1177. )
  1178. }
  1179. }
  1180. #[inline]
  1181. pub fn new_bitfield_1(gpl_compatible: __u32) -> __BindgenBitfieldUnit<[u8; 4usize]> {
  1182. let mut __bindgen_bitfield_unit: __BindgenBitfieldUnit<[u8; 4usize]> = Default::default();
  1183. __bindgen_bitfield_unit.set(0usize, 1u8, {
  1184. let gpl_compatible: u32 = unsafe { ::core::mem::transmute(gpl_compatible) };
  1185. gpl_compatible as u64
  1186. });
  1187. __bindgen_bitfield_unit
  1188. }
  1189. }
  1190. #[repr(C)]
  1191. #[derive(Debug, Copy, Clone)]
  1192. pub struct bpf_map_info {
  1193. pub type_: __u32,
  1194. pub id: __u32,
  1195. pub key_size: __u32,
  1196. pub value_size: __u32,
  1197. pub max_entries: __u32,
  1198. pub map_flags: __u32,
  1199. pub name: [::core::ffi::c_char; 16usize],
  1200. pub ifindex: __u32,
  1201. pub btf_vmlinux_value_type_id: __u32,
  1202. pub netns_dev: __u64,
  1203. pub netns_ino: __u64,
  1204. pub btf_id: __u32,
  1205. pub btf_key_type_id: __u32,
  1206. pub btf_value_type_id: __u32,
  1207. pub btf_vmlinux_id: __u32,
  1208. pub map_extra: __u64,
  1209. }
  1210. #[repr(C)]
  1211. #[derive(Debug, Copy, Clone)]
  1212. pub struct bpf_btf_info {
  1213. pub btf: __u64,
  1214. pub btf_size: __u32,
  1215. pub id: __u32,
  1216. pub name: __u64,
  1217. pub name_len: __u32,
  1218. pub kernel_btf: __u32,
  1219. }
  1220. #[repr(C)]
  1221. #[derive(Copy, Clone)]
  1222. pub struct bpf_link_info {
  1223. pub type_: __u32,
  1224. pub id: __u32,
  1225. pub prog_id: __u32,
  1226. pub __bindgen_anon_1: bpf_link_info__bindgen_ty_1,
  1227. }
  1228. #[repr(C)]
  1229. #[derive(Copy, Clone)]
  1230. pub union bpf_link_info__bindgen_ty_1 {
  1231. pub raw_tracepoint: bpf_link_info__bindgen_ty_1__bindgen_ty_1,
  1232. pub tracing: bpf_link_info__bindgen_ty_1__bindgen_ty_2,
  1233. pub cgroup: bpf_link_info__bindgen_ty_1__bindgen_ty_3,
  1234. pub iter: bpf_link_info__bindgen_ty_1__bindgen_ty_4,
  1235. pub netns: bpf_link_info__bindgen_ty_1__bindgen_ty_5,
  1236. pub xdp: bpf_link_info__bindgen_ty_1__bindgen_ty_6,
  1237. pub struct_ops: bpf_link_info__bindgen_ty_1__bindgen_ty_7,
  1238. pub netfilter: bpf_link_info__bindgen_ty_1__bindgen_ty_8,
  1239. pub kprobe_multi: bpf_link_info__bindgen_ty_1__bindgen_ty_9,
  1240. pub uprobe_multi: bpf_link_info__bindgen_ty_1__bindgen_ty_10,
  1241. pub perf_event: bpf_link_info__bindgen_ty_1__bindgen_ty_11,
  1242. pub tcx: bpf_link_info__bindgen_ty_1__bindgen_ty_12,
  1243. pub netkit: bpf_link_info__bindgen_ty_1__bindgen_ty_13,
  1244. }
  1245. #[repr(C)]
  1246. #[derive(Debug, Copy, Clone)]
  1247. pub struct bpf_link_info__bindgen_ty_1__bindgen_ty_1 {
  1248. pub tp_name: __u64,
  1249. pub tp_name_len: __u32,
  1250. }
  1251. #[repr(C)]
  1252. #[derive(Debug, Copy, Clone)]
  1253. pub struct bpf_link_info__bindgen_ty_1__bindgen_ty_2 {
  1254. pub attach_type: __u32,
  1255. pub target_obj_id: __u32,
  1256. pub target_btf_id: __u32,
  1257. }
  1258. #[repr(C)]
  1259. #[derive(Debug, Copy, Clone)]
  1260. pub struct bpf_link_info__bindgen_ty_1__bindgen_ty_3 {
  1261. pub cgroup_id: __u64,
  1262. pub attach_type: __u32,
  1263. }
  1264. #[repr(C)]
  1265. #[derive(Copy, Clone)]
  1266. pub struct bpf_link_info__bindgen_ty_1__bindgen_ty_4 {
  1267. pub target_name: __u64,
  1268. pub target_name_len: __u32,
  1269. pub __bindgen_anon_1: bpf_link_info__bindgen_ty_1__bindgen_ty_4__bindgen_ty_1,
  1270. pub __bindgen_anon_2: bpf_link_info__bindgen_ty_1__bindgen_ty_4__bindgen_ty_2,
  1271. }
  1272. #[repr(C)]
  1273. #[derive(Copy, Clone)]
  1274. pub union bpf_link_info__bindgen_ty_1__bindgen_ty_4__bindgen_ty_1 {
  1275. pub map: bpf_link_info__bindgen_ty_1__bindgen_ty_4__bindgen_ty_1__bindgen_ty_1,
  1276. }
  1277. #[repr(C)]
  1278. #[derive(Debug, Copy, Clone)]
  1279. pub struct bpf_link_info__bindgen_ty_1__bindgen_ty_4__bindgen_ty_1__bindgen_ty_1 {
  1280. pub map_id: __u32,
  1281. }
  1282. #[repr(C)]
  1283. #[derive(Copy, Clone)]
  1284. pub union bpf_link_info__bindgen_ty_1__bindgen_ty_4__bindgen_ty_2 {
  1285. pub cgroup: bpf_link_info__bindgen_ty_1__bindgen_ty_4__bindgen_ty_2__bindgen_ty_1,
  1286. pub task: bpf_link_info__bindgen_ty_1__bindgen_ty_4__bindgen_ty_2__bindgen_ty_2,
  1287. }
  1288. #[repr(C)]
  1289. #[derive(Debug, Copy, Clone)]
  1290. pub struct bpf_link_info__bindgen_ty_1__bindgen_ty_4__bindgen_ty_2__bindgen_ty_1 {
  1291. pub cgroup_id: __u64,
  1292. pub order: __u32,
  1293. }
  1294. #[repr(C)]
  1295. #[derive(Debug, Copy, Clone)]
  1296. pub struct bpf_link_info__bindgen_ty_1__bindgen_ty_4__bindgen_ty_2__bindgen_ty_2 {
  1297. pub tid: __u32,
  1298. pub pid: __u32,
  1299. }
  1300. #[repr(C)]
  1301. #[derive(Debug, Copy, Clone)]
  1302. pub struct bpf_link_info__bindgen_ty_1__bindgen_ty_5 {
  1303. pub netns_ino: __u32,
  1304. pub attach_type: __u32,
  1305. }
  1306. #[repr(C)]
  1307. #[derive(Debug, Copy, Clone)]
  1308. pub struct bpf_link_info__bindgen_ty_1__bindgen_ty_6 {
  1309. pub ifindex: __u32,
  1310. }
  1311. #[repr(C)]
  1312. #[derive(Debug, Copy, Clone)]
  1313. pub struct bpf_link_info__bindgen_ty_1__bindgen_ty_7 {
  1314. pub map_id: __u32,
  1315. }
  1316. #[repr(C)]
  1317. #[derive(Debug, Copy, Clone)]
  1318. pub struct bpf_link_info__bindgen_ty_1__bindgen_ty_8 {
  1319. pub pf: __u32,
  1320. pub hooknum: __u32,
  1321. pub priority: __s32,
  1322. pub flags: __u32,
  1323. }
  1324. #[repr(C)]
  1325. #[derive(Debug, Copy, Clone)]
  1326. pub struct bpf_link_info__bindgen_ty_1__bindgen_ty_9 {
  1327. pub addrs: __u64,
  1328. pub count: __u32,
  1329. pub flags: __u32,
  1330. pub missed: __u64,
  1331. pub cookies: __u64,
  1332. }
  1333. #[repr(C)]
  1334. #[derive(Debug, Copy, Clone)]
  1335. pub struct bpf_link_info__bindgen_ty_1__bindgen_ty_10 {
  1336. pub path: __u64,
  1337. pub offsets: __u64,
  1338. pub ref_ctr_offsets: __u64,
  1339. pub cookies: __u64,
  1340. pub path_size: __u32,
  1341. pub count: __u32,
  1342. pub flags: __u32,
  1343. pub pid: __u32,
  1344. }
  1345. #[repr(C)]
  1346. #[derive(Copy, Clone)]
  1347. pub struct bpf_link_info__bindgen_ty_1__bindgen_ty_11 {
  1348. pub type_: __u32,
  1349. pub _bitfield_align_1: [u8; 0],
  1350. pub _bitfield_1: __BindgenBitfieldUnit<[u8; 4usize]>,
  1351. pub __bindgen_anon_1: bpf_link_info__bindgen_ty_1__bindgen_ty_11__bindgen_ty_1,
  1352. }
  1353. #[repr(C)]
  1354. #[derive(Copy, Clone)]
  1355. pub union bpf_link_info__bindgen_ty_1__bindgen_ty_11__bindgen_ty_1 {
  1356. pub uprobe: bpf_link_info__bindgen_ty_1__bindgen_ty_11__bindgen_ty_1__bindgen_ty_1,
  1357. pub kprobe: bpf_link_info__bindgen_ty_1__bindgen_ty_11__bindgen_ty_1__bindgen_ty_2,
  1358. pub tracepoint: bpf_link_info__bindgen_ty_1__bindgen_ty_11__bindgen_ty_1__bindgen_ty_3,
  1359. pub event: bpf_link_info__bindgen_ty_1__bindgen_ty_11__bindgen_ty_1__bindgen_ty_4,
  1360. }
  1361. #[repr(C)]
  1362. #[derive(Debug, Copy, Clone)]
  1363. pub struct bpf_link_info__bindgen_ty_1__bindgen_ty_11__bindgen_ty_1__bindgen_ty_1 {
  1364. pub file_name: __u64,
  1365. pub name_len: __u32,
  1366. pub offset: __u32,
  1367. pub cookie: __u64,
  1368. }
  1369. #[repr(C)]
  1370. #[derive(Debug, Copy, Clone)]
  1371. pub struct bpf_link_info__bindgen_ty_1__bindgen_ty_11__bindgen_ty_1__bindgen_ty_2 {
  1372. pub func_name: __u64,
  1373. pub name_len: __u32,
  1374. pub offset: __u32,
  1375. pub addr: __u64,
  1376. pub missed: __u64,
  1377. pub cookie: __u64,
  1378. }
  1379. #[repr(C)]
  1380. #[derive(Debug, Copy, Clone)]
  1381. pub struct bpf_link_info__bindgen_ty_1__bindgen_ty_11__bindgen_ty_1__bindgen_ty_3 {
  1382. pub tp_name: __u64,
  1383. pub name_len: __u32,
  1384. pub _bitfield_align_1: [u8; 0],
  1385. pub _bitfield_1: __BindgenBitfieldUnit<[u8; 4usize]>,
  1386. pub cookie: __u64,
  1387. }
  1388. impl bpf_link_info__bindgen_ty_1__bindgen_ty_11__bindgen_ty_1__bindgen_ty_3 {
  1389. #[inline]
  1390. pub fn new_bitfield_1() -> __BindgenBitfieldUnit<[u8; 4usize]> {
  1391. let mut __bindgen_bitfield_unit: __BindgenBitfieldUnit<[u8; 4usize]> = Default::default();
  1392. __bindgen_bitfield_unit
  1393. }
  1394. }
  1395. #[repr(C)]
  1396. #[derive(Debug, Copy, Clone)]
  1397. pub struct bpf_link_info__bindgen_ty_1__bindgen_ty_11__bindgen_ty_1__bindgen_ty_4 {
  1398. pub config: __u64,
  1399. pub type_: __u32,
  1400. pub _bitfield_align_1: [u8; 0],
  1401. pub _bitfield_1: __BindgenBitfieldUnit<[u8; 4usize]>,
  1402. pub cookie: __u64,
  1403. }
  1404. impl bpf_link_info__bindgen_ty_1__bindgen_ty_11__bindgen_ty_1__bindgen_ty_4 {
  1405. #[inline]
  1406. pub fn new_bitfield_1() -> __BindgenBitfieldUnit<[u8; 4usize]> {
  1407. let mut __bindgen_bitfield_unit: __BindgenBitfieldUnit<[u8; 4usize]> = Default::default();
  1408. __bindgen_bitfield_unit
  1409. }
  1410. }
  1411. impl bpf_link_info__bindgen_ty_1__bindgen_ty_11 {
  1412. #[inline]
  1413. pub fn new_bitfield_1() -> __BindgenBitfieldUnit<[u8; 4usize]> {
  1414. let mut __bindgen_bitfield_unit: __BindgenBitfieldUnit<[u8; 4usize]> = Default::default();
  1415. __bindgen_bitfield_unit
  1416. }
  1417. }
  1418. #[repr(C)]
  1419. #[derive(Debug, Copy, Clone)]
  1420. pub struct bpf_link_info__bindgen_ty_1__bindgen_ty_12 {
  1421. pub ifindex: __u32,
  1422. pub attach_type: __u32,
  1423. }
  1424. #[repr(C)]
  1425. #[derive(Debug, Copy, Clone)]
  1426. pub struct bpf_link_info__bindgen_ty_1__bindgen_ty_13 {
  1427. pub ifindex: __u32,
  1428. pub attach_type: __u32,
  1429. }
  1430. #[repr(u32)]
  1431. #[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
  1432. pub enum bpf_task_fd_type {
  1433. BPF_FD_TYPE_RAW_TRACEPOINT = 0,
  1434. BPF_FD_TYPE_TRACEPOINT = 1,
  1435. BPF_FD_TYPE_KPROBE = 2,
  1436. BPF_FD_TYPE_KRETPROBE = 3,
  1437. BPF_FD_TYPE_UPROBE = 4,
  1438. BPF_FD_TYPE_URETPROBE = 5,
  1439. }
  1440. #[repr(C)]
  1441. #[derive(Debug, Copy, Clone)]
  1442. pub struct bpf_func_info {
  1443. pub insn_off: __u32,
  1444. pub type_id: __u32,
  1445. }
  1446. #[repr(C)]
  1447. #[derive(Debug, Copy, Clone)]
  1448. pub struct bpf_line_info {
  1449. pub insn_off: __u32,
  1450. pub file_name_off: __u32,
  1451. pub line_off: __u32,
  1452. pub line_col: __u32,
  1453. }
  1454. pub const BPF_F_TIMER_ABS: _bindgen_ty_41 = 1;
  1455. pub const BPF_F_TIMER_CPU_PIN: _bindgen_ty_41 = 2;
  1456. pub type _bindgen_ty_41 = ::core::ffi::c_uint;
  1457. #[repr(C)]
  1458. #[derive(Debug, Copy, Clone)]
  1459. pub struct btf_header {
  1460. pub magic: __u16,
  1461. pub version: __u8,
  1462. pub flags: __u8,
  1463. pub hdr_len: __u32,
  1464. pub type_off: __u32,
  1465. pub type_len: __u32,
  1466. pub str_off: __u32,
  1467. pub str_len: __u32,
  1468. }
  1469. #[repr(C)]
  1470. #[derive(Copy, Clone)]
  1471. pub struct btf_type {
  1472. pub name_off: __u32,
  1473. pub info: __u32,
  1474. pub __bindgen_anon_1: btf_type__bindgen_ty_1,
  1475. }
  1476. #[repr(C)]
  1477. #[derive(Copy, Clone)]
  1478. pub union btf_type__bindgen_ty_1 {
  1479. pub size: __u32,
  1480. pub type_: __u32,
  1481. }
  1482. pub const BTF_KIND_UNKN: _bindgen_ty_42 = 0;
  1483. pub const BTF_KIND_INT: _bindgen_ty_42 = 1;
  1484. pub const BTF_KIND_PTR: _bindgen_ty_42 = 2;
  1485. pub const BTF_KIND_ARRAY: _bindgen_ty_42 = 3;
  1486. pub const BTF_KIND_STRUCT: _bindgen_ty_42 = 4;
  1487. pub const BTF_KIND_UNION: _bindgen_ty_42 = 5;
  1488. pub const BTF_KIND_ENUM: _bindgen_ty_42 = 6;
  1489. pub const BTF_KIND_FWD: _bindgen_ty_42 = 7;
  1490. pub const BTF_KIND_TYPEDEF: _bindgen_ty_42 = 8;
  1491. pub const BTF_KIND_VOLATILE: _bindgen_ty_42 = 9;
  1492. pub const BTF_KIND_CONST: _bindgen_ty_42 = 10;
  1493. pub const BTF_KIND_RESTRICT: _bindgen_ty_42 = 11;
  1494. pub const BTF_KIND_FUNC: _bindgen_ty_42 = 12;
  1495. pub const BTF_KIND_FUNC_PROTO: _bindgen_ty_42 = 13;
  1496. pub const BTF_KIND_VAR: _bindgen_ty_42 = 14;
  1497. pub const BTF_KIND_DATASEC: _bindgen_ty_42 = 15;
  1498. pub const BTF_KIND_FLOAT: _bindgen_ty_42 = 16;
  1499. pub const BTF_KIND_DECL_TAG: _bindgen_ty_42 = 17;
  1500. pub const BTF_KIND_TYPE_TAG: _bindgen_ty_42 = 18;
  1501. pub const BTF_KIND_ENUM64: _bindgen_ty_42 = 19;
  1502. pub const NR_BTF_KINDS: _bindgen_ty_42 = 20;
  1503. pub const BTF_KIND_MAX: _bindgen_ty_42 = 19;
  1504. pub type _bindgen_ty_42 = ::core::ffi::c_uint;
  1505. #[repr(C)]
  1506. #[derive(Debug, Copy, Clone)]
  1507. pub struct btf_enum {
  1508. pub name_off: __u32,
  1509. pub val: __s32,
  1510. }
  1511. #[repr(C)]
  1512. #[derive(Debug, Copy, Clone)]
  1513. pub struct btf_array {
  1514. pub type_: __u32,
  1515. pub index_type: __u32,
  1516. pub nelems: __u32,
  1517. }
  1518. #[repr(C)]
  1519. #[derive(Debug, Copy, Clone)]
  1520. pub struct btf_member {
  1521. pub name_off: __u32,
  1522. pub type_: __u32,
  1523. pub offset: __u32,
  1524. }
  1525. #[repr(C)]
  1526. #[derive(Debug, Copy, Clone)]
  1527. pub struct btf_param {
  1528. pub name_off: __u32,
  1529. pub type_: __u32,
  1530. }
  1531. pub const BTF_VAR_STATIC: _bindgen_ty_43 = 0;
  1532. pub const BTF_VAR_GLOBAL_ALLOCATED: _bindgen_ty_43 = 1;
  1533. pub const BTF_VAR_GLOBAL_EXTERN: _bindgen_ty_43 = 2;
  1534. pub type _bindgen_ty_43 = ::core::ffi::c_uint;
  1535. #[repr(u32)]
  1536. #[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
  1537. pub enum btf_func_linkage {
  1538. BTF_FUNC_STATIC = 0,
  1539. BTF_FUNC_GLOBAL = 1,
  1540. BTF_FUNC_EXTERN = 2,
  1541. }
  1542. #[repr(C)]
  1543. #[derive(Debug, Copy, Clone)]
  1544. pub struct btf_var {
  1545. pub linkage: __u32,
  1546. }
  1547. #[repr(C)]
  1548. #[derive(Debug, Copy, Clone)]
  1549. pub struct btf_var_secinfo {
  1550. pub type_: __u32,
  1551. pub offset: __u32,
  1552. pub size: __u32,
  1553. }
  1554. #[repr(C)]
  1555. #[derive(Debug, Copy, Clone)]
  1556. pub struct btf_decl_tag {
  1557. pub component_idx: __s32,
  1558. }
  1559. impl nlmsgerr_attrs {
  1560. pub const NLMSGERR_ATTR_MAX: nlmsgerr_attrs = nlmsgerr_attrs::NLMSGERR_ATTR_COOKIE;
  1561. }
  1562. #[repr(u32)]
  1563. #[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
  1564. pub enum nlmsgerr_attrs {
  1565. NLMSGERR_ATTR_UNUSED = 0,
  1566. NLMSGERR_ATTR_MSG = 1,
  1567. NLMSGERR_ATTR_OFFS = 2,
  1568. NLMSGERR_ATTR_COOKIE = 3,
  1569. __NLMSGERR_ATTR_MAX = 4,
  1570. }
  1571. pub const IFLA_XDP_UNSPEC: _bindgen_ty_92 = 0;
  1572. pub const IFLA_XDP_FD: _bindgen_ty_92 = 1;
  1573. pub const IFLA_XDP_ATTACHED: _bindgen_ty_92 = 2;
  1574. pub const IFLA_XDP_FLAGS: _bindgen_ty_92 = 3;
  1575. pub const IFLA_XDP_PROG_ID: _bindgen_ty_92 = 4;
  1576. pub const IFLA_XDP_DRV_PROG_ID: _bindgen_ty_92 = 5;
  1577. pub const IFLA_XDP_SKB_PROG_ID: _bindgen_ty_92 = 6;
  1578. pub const IFLA_XDP_HW_PROG_ID: _bindgen_ty_92 = 7;
  1579. pub const IFLA_XDP_EXPECTED_FD: _bindgen_ty_92 = 8;
  1580. pub const __IFLA_XDP_MAX: _bindgen_ty_92 = 9;
  1581. pub type _bindgen_ty_92 = ::core::ffi::c_uint;
  1582. #[repr(u32)]
  1583. #[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
  1584. pub enum nf_inet_hooks {
  1585. NF_INET_PRE_ROUTING = 0,
  1586. NF_INET_LOCAL_IN = 1,
  1587. NF_INET_FORWARD = 2,
  1588. NF_INET_LOCAL_OUT = 3,
  1589. NF_INET_POST_ROUTING = 4,
  1590. NF_INET_NUMHOOKS = 5,
  1591. }
  1592. pub const NFPROTO_UNSPEC: _bindgen_ty_99 = 0;
  1593. pub const NFPROTO_INET: _bindgen_ty_99 = 1;
  1594. pub const NFPROTO_IPV4: _bindgen_ty_99 = 2;
  1595. pub const NFPROTO_ARP: _bindgen_ty_99 = 3;
  1596. pub const NFPROTO_NETDEV: _bindgen_ty_99 = 5;
  1597. pub const NFPROTO_BRIDGE: _bindgen_ty_99 = 7;
  1598. pub const NFPROTO_IPV6: _bindgen_ty_99 = 10;
  1599. pub const NFPROTO_DECNET: _bindgen_ty_99 = 12;
  1600. pub const NFPROTO_NUMPROTO: _bindgen_ty_99 = 13;
  1601. pub type _bindgen_ty_99 = ::core::ffi::c_uint;
  1602. #[repr(u32)]
  1603. #[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
  1604. pub enum perf_type_id {
  1605. PERF_TYPE_HARDWARE = 0,
  1606. PERF_TYPE_SOFTWARE = 1,
  1607. PERF_TYPE_TRACEPOINT = 2,
  1608. PERF_TYPE_HW_CACHE = 3,
  1609. PERF_TYPE_RAW = 4,
  1610. PERF_TYPE_BREAKPOINT = 5,
  1611. PERF_TYPE_MAX = 6,
  1612. }
  1613. #[repr(u32)]
  1614. #[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
  1615. pub enum perf_hw_id {
  1616. PERF_COUNT_HW_CPU_CYCLES = 0,
  1617. PERF_COUNT_HW_INSTRUCTIONS = 1,
  1618. PERF_COUNT_HW_CACHE_REFERENCES = 2,
  1619. PERF_COUNT_HW_CACHE_MISSES = 3,
  1620. PERF_COUNT_HW_BRANCH_INSTRUCTIONS = 4,
  1621. PERF_COUNT_HW_BRANCH_MISSES = 5,
  1622. PERF_COUNT_HW_BUS_CYCLES = 6,
  1623. PERF_COUNT_HW_STALLED_CYCLES_FRONTEND = 7,
  1624. PERF_COUNT_HW_STALLED_CYCLES_BACKEND = 8,
  1625. PERF_COUNT_HW_REF_CPU_CYCLES = 9,
  1626. PERF_COUNT_HW_MAX = 10,
  1627. }
  1628. #[repr(u32)]
  1629. #[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
  1630. pub enum perf_hw_cache_id {
  1631. PERF_COUNT_HW_CACHE_L1D = 0,
  1632. PERF_COUNT_HW_CACHE_L1I = 1,
  1633. PERF_COUNT_HW_CACHE_LL = 2,
  1634. PERF_COUNT_HW_CACHE_DTLB = 3,
  1635. PERF_COUNT_HW_CACHE_ITLB = 4,
  1636. PERF_COUNT_HW_CACHE_BPU = 5,
  1637. PERF_COUNT_HW_CACHE_NODE = 6,
  1638. PERF_COUNT_HW_CACHE_MAX = 7,
  1639. }
  1640. #[repr(u32)]
  1641. #[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
  1642. pub enum perf_hw_cache_op_id {
  1643. PERF_COUNT_HW_CACHE_OP_READ = 0,
  1644. PERF_COUNT_HW_CACHE_OP_WRITE = 1,
  1645. PERF_COUNT_HW_CACHE_OP_PREFETCH = 2,
  1646. PERF_COUNT_HW_CACHE_OP_MAX = 3,
  1647. }
  1648. #[repr(u32)]
  1649. #[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
  1650. pub enum perf_hw_cache_op_result_id {
  1651. PERF_COUNT_HW_CACHE_RESULT_ACCESS = 0,
  1652. PERF_COUNT_HW_CACHE_RESULT_MISS = 1,
  1653. PERF_COUNT_HW_CACHE_RESULT_MAX = 2,
  1654. }
  1655. #[repr(u32)]
  1656. #[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
  1657. pub enum perf_sw_ids {
  1658. PERF_COUNT_SW_CPU_CLOCK = 0,
  1659. PERF_COUNT_SW_TASK_CLOCK = 1,
  1660. PERF_COUNT_SW_PAGE_FAULTS = 2,
  1661. PERF_COUNT_SW_CONTEXT_SWITCHES = 3,
  1662. PERF_COUNT_SW_CPU_MIGRATIONS = 4,
  1663. PERF_COUNT_SW_PAGE_FAULTS_MIN = 5,
  1664. PERF_COUNT_SW_PAGE_FAULTS_MAJ = 6,
  1665. PERF_COUNT_SW_ALIGNMENT_FAULTS = 7,
  1666. PERF_COUNT_SW_EMULATION_FAULTS = 8,
  1667. PERF_COUNT_SW_DUMMY = 9,
  1668. PERF_COUNT_SW_BPF_OUTPUT = 10,
  1669. PERF_COUNT_SW_CGROUP_SWITCHES = 11,
  1670. PERF_COUNT_SW_MAX = 12,
  1671. }
  1672. #[repr(u32)]
  1673. #[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
  1674. pub enum perf_event_sample_format {
  1675. PERF_SAMPLE_IP = 1,
  1676. PERF_SAMPLE_TID = 2,
  1677. PERF_SAMPLE_TIME = 4,
  1678. PERF_SAMPLE_ADDR = 8,
  1679. PERF_SAMPLE_READ = 16,
  1680. PERF_SAMPLE_CALLCHAIN = 32,
  1681. PERF_SAMPLE_ID = 64,
  1682. PERF_SAMPLE_CPU = 128,
  1683. PERF_SAMPLE_PERIOD = 256,
  1684. PERF_SAMPLE_STREAM_ID = 512,
  1685. PERF_SAMPLE_RAW = 1024,
  1686. PERF_SAMPLE_BRANCH_STACK = 2048,
  1687. PERF_SAMPLE_REGS_USER = 4096,
  1688. PERF_SAMPLE_STACK_USER = 8192,
  1689. PERF_SAMPLE_WEIGHT = 16384,
  1690. PERF_SAMPLE_DATA_SRC = 32768,
  1691. PERF_SAMPLE_IDENTIFIER = 65536,
  1692. PERF_SAMPLE_TRANSACTION = 131072,
  1693. PERF_SAMPLE_REGS_INTR = 262144,
  1694. PERF_SAMPLE_PHYS_ADDR = 524288,
  1695. PERF_SAMPLE_AUX = 1048576,
  1696. PERF_SAMPLE_CGROUP = 2097152,
  1697. PERF_SAMPLE_DATA_PAGE_SIZE = 4194304,
  1698. PERF_SAMPLE_CODE_PAGE_SIZE = 8388608,
  1699. PERF_SAMPLE_WEIGHT_STRUCT = 16777216,
  1700. PERF_SAMPLE_MAX = 33554432,
  1701. }
  1702. #[repr(C)]
  1703. #[derive(Copy, Clone)]
  1704. pub struct perf_event_attr {
  1705. pub type_: __u32,
  1706. pub size: __u32,
  1707. pub config: __u64,
  1708. pub __bindgen_anon_1: perf_event_attr__bindgen_ty_1,
  1709. pub sample_type: __u64,
  1710. pub read_format: __u64,
  1711. pub _bitfield_align_1: [u32; 0],
  1712. pub _bitfield_1: __BindgenBitfieldUnit<[u8; 8usize]>,
  1713. pub __bindgen_anon_2: perf_event_attr__bindgen_ty_2,
  1714. pub bp_type: __u32,
  1715. pub __bindgen_anon_3: perf_event_attr__bindgen_ty_3,
  1716. pub __bindgen_anon_4: perf_event_attr__bindgen_ty_4,
  1717. pub branch_sample_type: __u64,
  1718. pub sample_regs_user: __u64,
  1719. pub sample_stack_user: __u32,
  1720. pub clockid: __s32,
  1721. pub sample_regs_intr: __u64,
  1722. pub aux_watermark: __u32,
  1723. pub sample_max_stack: __u16,
  1724. pub __reserved_2: __u16,
  1725. pub aux_sample_size: __u32,
  1726. pub __reserved_3: __u32,
  1727. pub sig_data: __u64,
  1728. pub config3: __u64,
  1729. }
  1730. #[repr(C)]
  1731. #[derive(Copy, Clone)]
  1732. pub union perf_event_attr__bindgen_ty_1 {
  1733. pub sample_period: __u64,
  1734. pub sample_freq: __u64,
  1735. }
  1736. #[repr(C)]
  1737. #[derive(Copy, Clone)]
  1738. pub union perf_event_attr__bindgen_ty_2 {
  1739. pub wakeup_events: __u32,
  1740. pub wakeup_watermark: __u32,
  1741. }
  1742. #[repr(C)]
  1743. #[derive(Copy, Clone)]
  1744. pub union perf_event_attr__bindgen_ty_3 {
  1745. pub bp_addr: __u64,
  1746. pub kprobe_func: __u64,
  1747. pub uprobe_path: __u64,
  1748. pub config1: __u64,
  1749. }
  1750. #[repr(C)]
  1751. #[derive(Copy, Clone)]
  1752. pub union perf_event_attr__bindgen_ty_4 {
  1753. pub bp_len: __u64,
  1754. pub kprobe_addr: __u64,
  1755. pub probe_offset: __u64,
  1756. pub config2: __u64,
  1757. }
  1758. impl perf_event_attr {
  1759. #[inline]
  1760. pub fn disabled(&self) -> __u64 {
  1761. unsafe { ::core::mem::transmute(self._bitfield_1.get(0usize, 1u8) as u64) }
  1762. }
  1763. #[inline]
  1764. pub fn set_disabled(&mut self, val: __u64) {
  1765. unsafe {
  1766. let val: u64 = ::core::mem::transmute(val);
  1767. self._bitfield_1.set(0usize, 1u8, val as u64)
  1768. }
  1769. }
  1770. #[inline]
  1771. pub unsafe fn disabled_raw(this: *const Self) -> __u64 {
  1772. unsafe {
  1773. ::core::mem::transmute(<__BindgenBitfieldUnit<[u8; 8usize]>>::raw_get(
  1774. ::core::ptr::addr_of!((*this)._bitfield_1),
  1775. 0usize,
  1776. 1u8,
  1777. ) as u64)
  1778. }
  1779. }
  1780. #[inline]
  1781. pub unsafe fn set_disabled_raw(this: *mut Self, val: __u64) {
  1782. unsafe {
  1783. let val: u64 = ::core::mem::transmute(val);
  1784. <__BindgenBitfieldUnit<[u8; 8usize]>>::raw_set(
  1785. ::core::ptr::addr_of_mut!((*this)._bitfield_1),
  1786. 0usize,
  1787. 1u8,
  1788. val as u64,
  1789. )
  1790. }
  1791. }
  1792. #[inline]
  1793. pub fn inherit(&self) -> __u64 {
  1794. unsafe { ::core::mem::transmute(self._bitfield_1.get(1usize, 1u8) as u64) }
  1795. }
  1796. #[inline]
  1797. pub fn set_inherit(&mut self, val: __u64) {
  1798. unsafe {
  1799. let val: u64 = ::core::mem::transmute(val);
  1800. self._bitfield_1.set(1usize, 1u8, val as u64)
  1801. }
  1802. }
  1803. #[inline]
  1804. pub unsafe fn inherit_raw(this: *const Self) -> __u64 {
  1805. unsafe {
  1806. ::core::mem::transmute(<__BindgenBitfieldUnit<[u8; 8usize]>>::raw_get(
  1807. ::core::ptr::addr_of!((*this)._bitfield_1),
  1808. 1usize,
  1809. 1u8,
  1810. ) as u64)
  1811. }
  1812. }
  1813. #[inline]
  1814. pub unsafe fn set_inherit_raw(this: *mut Self, val: __u64) {
  1815. unsafe {
  1816. let val: u64 = ::core::mem::transmute(val);
  1817. <__BindgenBitfieldUnit<[u8; 8usize]>>::raw_set(
  1818. ::core::ptr::addr_of_mut!((*this)._bitfield_1),
  1819. 1usize,
  1820. 1u8,
  1821. val as u64,
  1822. )
  1823. }
  1824. }
  1825. #[inline]
  1826. pub fn pinned(&self) -> __u64 {
  1827. unsafe { ::core::mem::transmute(self._bitfield_1.get(2usize, 1u8) as u64) }
  1828. }
  1829. #[inline]
  1830. pub fn set_pinned(&mut self, val: __u64) {
  1831. unsafe {
  1832. let val: u64 = ::core::mem::transmute(val);
  1833. self._bitfield_1.set(2usize, 1u8, val as u64)
  1834. }
  1835. }
  1836. #[inline]
  1837. pub unsafe fn pinned_raw(this: *const Self) -> __u64 {
  1838. unsafe {
  1839. ::core::mem::transmute(<__BindgenBitfieldUnit<[u8; 8usize]>>::raw_get(
  1840. ::core::ptr::addr_of!((*this)._bitfield_1),
  1841. 2usize,
  1842. 1u8,
  1843. ) as u64)
  1844. }
  1845. }
  1846. #[inline]
  1847. pub unsafe fn set_pinned_raw(this: *mut Self, val: __u64) {
  1848. unsafe {
  1849. let val: u64 = ::core::mem::transmute(val);
  1850. <__BindgenBitfieldUnit<[u8; 8usize]>>::raw_set(
  1851. ::core::ptr::addr_of_mut!((*this)._bitfield_1),
  1852. 2usize,
  1853. 1u8,
  1854. val as u64,
  1855. )
  1856. }
  1857. }
  1858. #[inline]
  1859. pub fn exclusive(&self) -> __u64 {
  1860. unsafe { ::core::mem::transmute(self._bitfield_1.get(3usize, 1u8) as u64) }
  1861. }
  1862. #[inline]
  1863. pub fn set_exclusive(&mut self, val: __u64) {
  1864. unsafe {
  1865. let val: u64 = ::core::mem::transmute(val);
  1866. self._bitfield_1.set(3usize, 1u8, val as u64)
  1867. }
  1868. }
  1869. #[inline]
  1870. pub unsafe fn exclusive_raw(this: *const Self) -> __u64 {
  1871. unsafe {
  1872. ::core::mem::transmute(<__BindgenBitfieldUnit<[u8; 8usize]>>::raw_get(
  1873. ::core::ptr::addr_of!((*this)._bitfield_1),
  1874. 3usize,
  1875. 1u8,
  1876. ) as u64)
  1877. }
  1878. }
  1879. #[inline]
  1880. pub unsafe fn set_exclusive_raw(this: *mut Self, val: __u64) {
  1881. unsafe {
  1882. let val: u64 = ::core::mem::transmute(val);
  1883. <__BindgenBitfieldUnit<[u8; 8usize]>>::raw_set(
  1884. ::core::ptr::addr_of_mut!((*this)._bitfield_1),
  1885. 3usize,
  1886. 1u8,
  1887. val as u64,
  1888. )
  1889. }
  1890. }
  1891. #[inline]
  1892. pub fn exclude_user(&self) -> __u64 {
  1893. unsafe { ::core::mem::transmute(self._bitfield_1.get(4usize, 1u8) as u64) }
  1894. }
  1895. #[inline]
  1896. pub fn set_exclude_user(&mut self, val: __u64) {
  1897. unsafe {
  1898. let val: u64 = ::core::mem::transmute(val);
  1899. self._bitfield_1.set(4usize, 1u8, val as u64)
  1900. }
  1901. }
  1902. #[inline]
  1903. pub unsafe fn exclude_user_raw(this: *const Self) -> __u64 {
  1904. unsafe {
  1905. ::core::mem::transmute(<__BindgenBitfieldUnit<[u8; 8usize]>>::raw_get(
  1906. ::core::ptr::addr_of!((*this)._bitfield_1),
  1907. 4usize,
  1908. 1u8,
  1909. ) as u64)
  1910. }
  1911. }
  1912. #[inline]
  1913. pub unsafe fn set_exclude_user_raw(this: *mut Self, val: __u64) {
  1914. unsafe {
  1915. let val: u64 = ::core::mem::transmute(val);
  1916. <__BindgenBitfieldUnit<[u8; 8usize]>>::raw_set(
  1917. ::core::ptr::addr_of_mut!((*this)._bitfield_1),
  1918. 4usize,
  1919. 1u8,
  1920. val as u64,
  1921. )
  1922. }
  1923. }
  1924. #[inline]
  1925. pub fn exclude_kernel(&self) -> __u64 {
  1926. unsafe { ::core::mem::transmute(self._bitfield_1.get(5usize, 1u8) as u64) }
  1927. }
  1928. #[inline]
  1929. pub fn set_exclude_kernel(&mut self, val: __u64) {
  1930. unsafe {
  1931. let val: u64 = ::core::mem::transmute(val);
  1932. self._bitfield_1.set(5usize, 1u8, val as u64)
  1933. }
  1934. }
  1935. #[inline]
  1936. pub unsafe fn exclude_kernel_raw(this: *const Self) -> __u64 {
  1937. unsafe {
  1938. ::core::mem::transmute(<__BindgenBitfieldUnit<[u8; 8usize]>>::raw_get(
  1939. ::core::ptr::addr_of!((*this)._bitfield_1),
  1940. 5usize,
  1941. 1u8,
  1942. ) as u64)
  1943. }
  1944. }
  1945. #[inline]
  1946. pub unsafe fn set_exclude_kernel_raw(this: *mut Self, val: __u64) {
  1947. unsafe {
  1948. let val: u64 = ::core::mem::transmute(val);
  1949. <__BindgenBitfieldUnit<[u8; 8usize]>>::raw_set(
  1950. ::core::ptr::addr_of_mut!((*this)._bitfield_1),
  1951. 5usize,
  1952. 1u8,
  1953. val as u64,
  1954. )
  1955. }
  1956. }
  1957. #[inline]
  1958. pub fn exclude_hv(&self) -> __u64 {
  1959. unsafe { ::core::mem::transmute(self._bitfield_1.get(6usize, 1u8) as u64) }
  1960. }
  1961. #[inline]
  1962. pub fn set_exclude_hv(&mut self, val: __u64) {
  1963. unsafe {
  1964. let val: u64 = ::core::mem::transmute(val);
  1965. self._bitfield_1.set(6usize, 1u8, val as u64)
  1966. }
  1967. }
  1968. #[inline]
  1969. pub unsafe fn exclude_hv_raw(this: *const Self) -> __u64 {
  1970. unsafe {
  1971. ::core::mem::transmute(<__BindgenBitfieldUnit<[u8; 8usize]>>::raw_get(
  1972. ::core::ptr::addr_of!((*this)._bitfield_1),
  1973. 6usize,
  1974. 1u8,
  1975. ) as u64)
  1976. }
  1977. }
  1978. #[inline]
  1979. pub unsafe fn set_exclude_hv_raw(this: *mut Self, val: __u64) {
  1980. unsafe {
  1981. let val: u64 = ::core::mem::transmute(val);
  1982. <__BindgenBitfieldUnit<[u8; 8usize]>>::raw_set(
  1983. ::core::ptr::addr_of_mut!((*this)._bitfield_1),
  1984. 6usize,
  1985. 1u8,
  1986. val as u64,
  1987. )
  1988. }
  1989. }
  1990. #[inline]
  1991. pub fn exclude_idle(&self) -> __u64 {
  1992. unsafe { ::core::mem::transmute(self._bitfield_1.get(7usize, 1u8) as u64) }
  1993. }
  1994. #[inline]
  1995. pub fn set_exclude_idle(&mut self, val: __u64) {
  1996. unsafe {
  1997. let val: u64 = ::core::mem::transmute(val);
  1998. self._bitfield_1.set(7usize, 1u8, val as u64)
  1999. }
  2000. }
  2001. #[inline]
  2002. pub unsafe fn exclude_idle_raw(this: *const Self) -> __u64 {
  2003. unsafe {
  2004. ::core::mem::transmute(<__BindgenBitfieldUnit<[u8; 8usize]>>::raw_get(
  2005. ::core::ptr::addr_of!((*this)._bitfield_1),
  2006. 7usize,
  2007. 1u8,
  2008. ) as u64)
  2009. }
  2010. }
  2011. #[inline]
  2012. pub unsafe fn set_exclude_idle_raw(this: *mut Self, val: __u64) {
  2013. unsafe {
  2014. let val: u64 = ::core::mem::transmute(val);
  2015. <__BindgenBitfieldUnit<[u8; 8usize]>>::raw_set(
  2016. ::core::ptr::addr_of_mut!((*this)._bitfield_1),
  2017. 7usize,
  2018. 1u8,
  2019. val as u64,
  2020. )
  2021. }
  2022. }
  2023. #[inline]
  2024. pub fn mmap(&self) -> __u64 {
  2025. unsafe { ::core::mem::transmute(self._bitfield_1.get(8usize, 1u8) as u64) }
  2026. }
  2027. #[inline]
  2028. pub fn set_mmap(&mut self, val: __u64) {
  2029. unsafe {
  2030. let val: u64 = ::core::mem::transmute(val);
  2031. self._bitfield_1.set(8usize, 1u8, val as u64)
  2032. }
  2033. }
  2034. #[inline]
  2035. pub unsafe fn mmap_raw(this: *const Self) -> __u64 {
  2036. unsafe {
  2037. ::core::mem::transmute(<__BindgenBitfieldUnit<[u8; 8usize]>>::raw_get(
  2038. ::core::ptr::addr_of!((*this)._bitfield_1),
  2039. 8usize,
  2040. 1u8,
  2041. ) as u64)
  2042. }
  2043. }
  2044. #[inline]
  2045. pub unsafe fn set_mmap_raw(this: *mut Self, val: __u64) {
  2046. unsafe {
  2047. let val: u64 = ::core::mem::transmute(val);
  2048. <__BindgenBitfieldUnit<[u8; 8usize]>>::raw_set(
  2049. ::core::ptr::addr_of_mut!((*this)._bitfield_1),
  2050. 8usize,
  2051. 1u8,
  2052. val as u64,
  2053. )
  2054. }
  2055. }
  2056. #[inline]
  2057. pub fn comm(&self) -> __u64 {
  2058. unsafe { ::core::mem::transmute(self._bitfield_1.get(9usize, 1u8) as u64) }
  2059. }
  2060. #[inline]
  2061. pub fn set_comm(&mut self, val: __u64) {
  2062. unsafe {
  2063. let val: u64 = ::core::mem::transmute(val);
  2064. self._bitfield_1.set(9usize, 1u8, val as u64)
  2065. }
  2066. }
  2067. #[inline]
  2068. pub unsafe fn comm_raw(this: *const Self) -> __u64 {
  2069. unsafe {
  2070. ::core::mem::transmute(<__BindgenBitfieldUnit<[u8; 8usize]>>::raw_get(
  2071. ::core::ptr::addr_of!((*this)._bitfield_1),
  2072. 9usize,
  2073. 1u8,
  2074. ) as u64)
  2075. }
  2076. }
  2077. #[inline]
  2078. pub unsafe fn set_comm_raw(this: *mut Self, val: __u64) {
  2079. unsafe {
  2080. let val: u64 = ::core::mem::transmute(val);
  2081. <__BindgenBitfieldUnit<[u8; 8usize]>>::raw_set(
  2082. ::core::ptr::addr_of_mut!((*this)._bitfield_1),
  2083. 9usize,
  2084. 1u8,
  2085. val as u64,
  2086. )
  2087. }
  2088. }
  2089. #[inline]
  2090. pub fn freq(&self) -> __u64 {
  2091. unsafe { ::core::mem::transmute(self._bitfield_1.get(10usize, 1u8) as u64) }
  2092. }
  2093. #[inline]
  2094. pub fn set_freq(&mut self, val: __u64) {
  2095. unsafe {
  2096. let val: u64 = ::core::mem::transmute(val);
  2097. self._bitfield_1.set(10usize, 1u8, val as u64)
  2098. }
  2099. }
  2100. #[inline]
  2101. pub unsafe fn freq_raw(this: *const Self) -> __u64 {
  2102. unsafe {
  2103. ::core::mem::transmute(<__BindgenBitfieldUnit<[u8; 8usize]>>::raw_get(
  2104. ::core::ptr::addr_of!((*this)._bitfield_1),
  2105. 10usize,
  2106. 1u8,
  2107. ) as u64)
  2108. }
  2109. }
  2110. #[inline]
  2111. pub unsafe fn set_freq_raw(this: *mut Self, val: __u64) {
  2112. unsafe {
  2113. let val: u64 = ::core::mem::transmute(val);
  2114. <__BindgenBitfieldUnit<[u8; 8usize]>>::raw_set(
  2115. ::core::ptr::addr_of_mut!((*this)._bitfield_1),
  2116. 10usize,
  2117. 1u8,
  2118. val as u64,
  2119. )
  2120. }
  2121. }
  2122. #[inline]
  2123. pub fn inherit_stat(&self) -> __u64 {
  2124. unsafe { ::core::mem::transmute(self._bitfield_1.get(11usize, 1u8) as u64) }
  2125. }
  2126. #[inline]
  2127. pub fn set_inherit_stat(&mut self, val: __u64) {
  2128. unsafe {
  2129. let val: u64 = ::core::mem::transmute(val);
  2130. self._bitfield_1.set(11usize, 1u8, val as u64)
  2131. }
  2132. }
  2133. #[inline]
  2134. pub unsafe fn inherit_stat_raw(this: *const Self) -> __u64 {
  2135. unsafe {
  2136. ::core::mem::transmute(<__BindgenBitfieldUnit<[u8; 8usize]>>::raw_get(
  2137. ::core::ptr::addr_of!((*this)._bitfield_1),
  2138. 11usize,
  2139. 1u8,
  2140. ) as u64)
  2141. }
  2142. }
  2143. #[inline]
  2144. pub unsafe fn set_inherit_stat_raw(this: *mut Self, val: __u64) {
  2145. unsafe {
  2146. let val: u64 = ::core::mem::transmute(val);
  2147. <__BindgenBitfieldUnit<[u8; 8usize]>>::raw_set(
  2148. ::core::ptr::addr_of_mut!((*this)._bitfield_1),
  2149. 11usize,
  2150. 1u8,
  2151. val as u64,
  2152. )
  2153. }
  2154. }
  2155. #[inline]
  2156. pub fn enable_on_exec(&self) -> __u64 {
  2157. unsafe { ::core::mem::transmute(self._bitfield_1.get(12usize, 1u8) as u64) }
  2158. }
  2159. #[inline]
  2160. pub fn set_enable_on_exec(&mut self, val: __u64) {
  2161. unsafe {
  2162. let val: u64 = ::core::mem::transmute(val);
  2163. self._bitfield_1.set(12usize, 1u8, val as u64)
  2164. }
  2165. }
  2166. #[inline]
  2167. pub unsafe fn enable_on_exec_raw(this: *const Self) -> __u64 {
  2168. unsafe {
  2169. ::core::mem::transmute(<__BindgenBitfieldUnit<[u8; 8usize]>>::raw_get(
  2170. ::core::ptr::addr_of!((*this)._bitfield_1),
  2171. 12usize,
  2172. 1u8,
  2173. ) as u64)
  2174. }
  2175. }
  2176. #[inline]
  2177. pub unsafe fn set_enable_on_exec_raw(this: *mut Self, val: __u64) {
  2178. unsafe {
  2179. let val: u64 = ::core::mem::transmute(val);
  2180. <__BindgenBitfieldUnit<[u8; 8usize]>>::raw_set(
  2181. ::core::ptr::addr_of_mut!((*this)._bitfield_1),
  2182. 12usize,
  2183. 1u8,
  2184. val as u64,
  2185. )
  2186. }
  2187. }
  2188. #[inline]
  2189. pub fn task(&self) -> __u64 {
  2190. unsafe { ::core::mem::transmute(self._bitfield_1.get(13usize, 1u8) as u64) }
  2191. }
  2192. #[inline]
  2193. pub fn set_task(&mut self, val: __u64) {
  2194. unsafe {
  2195. let val: u64 = ::core::mem::transmute(val);
  2196. self._bitfield_1.set(13usize, 1u8, val as u64)
  2197. }
  2198. }
  2199. #[inline]
  2200. pub unsafe fn task_raw(this: *const Self) -> __u64 {
  2201. unsafe {
  2202. ::core::mem::transmute(<__BindgenBitfieldUnit<[u8; 8usize]>>::raw_get(
  2203. ::core::ptr::addr_of!((*this)._bitfield_1),
  2204. 13usize,
  2205. 1u8,
  2206. ) as u64)
  2207. }
  2208. }
  2209. #[inline]
  2210. pub unsafe fn set_task_raw(this: *mut Self, val: __u64) {
  2211. unsafe {
  2212. let val: u64 = ::core::mem::transmute(val);
  2213. <__BindgenBitfieldUnit<[u8; 8usize]>>::raw_set(
  2214. ::core::ptr::addr_of_mut!((*this)._bitfield_1),
  2215. 13usize,
  2216. 1u8,
  2217. val as u64,
  2218. )
  2219. }
  2220. }
  2221. #[inline]
  2222. pub fn watermark(&self) -> __u64 {
  2223. unsafe { ::core::mem::transmute(self._bitfield_1.get(14usize, 1u8) as u64) }
  2224. }
  2225. #[inline]
  2226. pub fn set_watermark(&mut self, val: __u64) {
  2227. unsafe {
  2228. let val: u64 = ::core::mem::transmute(val);
  2229. self._bitfield_1.set(14usize, 1u8, val as u64)
  2230. }
  2231. }
  2232. #[inline]
  2233. pub unsafe fn watermark_raw(this: *const Self) -> __u64 {
  2234. unsafe {
  2235. ::core::mem::transmute(<__BindgenBitfieldUnit<[u8; 8usize]>>::raw_get(
  2236. ::core::ptr::addr_of!((*this)._bitfield_1),
  2237. 14usize,
  2238. 1u8,
  2239. ) as u64)
  2240. }
  2241. }
  2242. #[inline]
  2243. pub unsafe fn set_watermark_raw(this: *mut Self, val: __u64) {
  2244. unsafe {
  2245. let val: u64 = ::core::mem::transmute(val);
  2246. <__BindgenBitfieldUnit<[u8; 8usize]>>::raw_set(
  2247. ::core::ptr::addr_of_mut!((*this)._bitfield_1),
  2248. 14usize,
  2249. 1u8,
  2250. val as u64,
  2251. )
  2252. }
  2253. }
  2254. #[inline]
  2255. pub fn precise_ip(&self) -> __u64 {
  2256. unsafe { ::core::mem::transmute(self._bitfield_1.get(15usize, 2u8) as u64) }
  2257. }
  2258. #[inline]
  2259. pub fn set_precise_ip(&mut self, val: __u64) {
  2260. unsafe {
  2261. let val: u64 = ::core::mem::transmute(val);
  2262. self._bitfield_1.set(15usize, 2u8, val as u64)
  2263. }
  2264. }
  2265. #[inline]
  2266. pub unsafe fn precise_ip_raw(this: *const Self) -> __u64 {
  2267. unsafe {
  2268. ::core::mem::transmute(<__BindgenBitfieldUnit<[u8; 8usize]>>::raw_get(
  2269. ::core::ptr::addr_of!((*this)._bitfield_1),
  2270. 15usize,
  2271. 2u8,
  2272. ) as u64)
  2273. }
  2274. }
  2275. #[inline]
  2276. pub unsafe fn set_precise_ip_raw(this: *mut Self, val: __u64) {
  2277. unsafe {
  2278. let val: u64 = ::core::mem::transmute(val);
  2279. <__BindgenBitfieldUnit<[u8; 8usize]>>::raw_set(
  2280. ::core::ptr::addr_of_mut!((*this)._bitfield_1),
  2281. 15usize,
  2282. 2u8,
  2283. val as u64,
  2284. )
  2285. }
  2286. }
  2287. #[inline]
  2288. pub fn mmap_data(&self) -> __u64 {
  2289. unsafe { ::core::mem::transmute(self._bitfield_1.get(17usize, 1u8) as u64) }
  2290. }
  2291. #[inline]
  2292. pub fn set_mmap_data(&mut self, val: __u64) {
  2293. unsafe {
  2294. let val: u64 = ::core::mem::transmute(val);
  2295. self._bitfield_1.set(17usize, 1u8, val as u64)
  2296. }
  2297. }
  2298. #[inline]
  2299. pub unsafe fn mmap_data_raw(this: *const Self) -> __u64 {
  2300. unsafe {
  2301. ::core::mem::transmute(<__BindgenBitfieldUnit<[u8; 8usize]>>::raw_get(
  2302. ::core::ptr::addr_of!((*this)._bitfield_1),
  2303. 17usize,
  2304. 1u8,
  2305. ) as u64)
  2306. }
  2307. }
  2308. #[inline]
  2309. pub unsafe fn set_mmap_data_raw(this: *mut Self, val: __u64) {
  2310. unsafe {
  2311. let val: u64 = ::core::mem::transmute(val);
  2312. <__BindgenBitfieldUnit<[u8; 8usize]>>::raw_set(
  2313. ::core::ptr::addr_of_mut!((*this)._bitfield_1),
  2314. 17usize,
  2315. 1u8,
  2316. val as u64,
  2317. )
  2318. }
  2319. }
  2320. #[inline]
  2321. pub fn sample_id_all(&self) -> __u64 {
  2322. unsafe { ::core::mem::transmute(self._bitfield_1.get(18usize, 1u8) as u64) }
  2323. }
  2324. #[inline]
  2325. pub fn set_sample_id_all(&mut self, val: __u64) {
  2326. unsafe {
  2327. let val: u64 = ::core::mem::transmute(val);
  2328. self._bitfield_1.set(18usize, 1u8, val as u64)
  2329. }
  2330. }
  2331. #[inline]
  2332. pub unsafe fn sample_id_all_raw(this: *const Self) -> __u64 {
  2333. unsafe {
  2334. ::core::mem::transmute(<__BindgenBitfieldUnit<[u8; 8usize]>>::raw_get(
  2335. ::core::ptr::addr_of!((*this)._bitfield_1),
  2336. 18usize,
  2337. 1u8,
  2338. ) as u64)
  2339. }
  2340. }
  2341. #[inline]
  2342. pub unsafe fn set_sample_id_all_raw(this: *mut Self, val: __u64) {
  2343. unsafe {
  2344. let val: u64 = ::core::mem::transmute(val);
  2345. <__BindgenBitfieldUnit<[u8; 8usize]>>::raw_set(
  2346. ::core::ptr::addr_of_mut!((*this)._bitfield_1),
  2347. 18usize,
  2348. 1u8,
  2349. val as u64,
  2350. )
  2351. }
  2352. }
  2353. #[inline]
  2354. pub fn exclude_host(&self) -> __u64 {
  2355. unsafe { ::core::mem::transmute(self._bitfield_1.get(19usize, 1u8) as u64) }
  2356. }
  2357. #[inline]
  2358. pub fn set_exclude_host(&mut self, val: __u64) {
  2359. unsafe {
  2360. let val: u64 = ::core::mem::transmute(val);
  2361. self._bitfield_1.set(19usize, 1u8, val as u64)
  2362. }
  2363. }
  2364. #[inline]
  2365. pub unsafe fn exclude_host_raw(this: *const Self) -> __u64 {
  2366. unsafe {
  2367. ::core::mem::transmute(<__BindgenBitfieldUnit<[u8; 8usize]>>::raw_get(
  2368. ::core::ptr::addr_of!((*this)._bitfield_1),
  2369. 19usize,
  2370. 1u8,
  2371. ) as u64)
  2372. }
  2373. }
  2374. #[inline]
  2375. pub unsafe fn set_exclude_host_raw(this: *mut Self, val: __u64) {
  2376. unsafe {
  2377. let val: u64 = ::core::mem::transmute(val);
  2378. <__BindgenBitfieldUnit<[u8; 8usize]>>::raw_set(
  2379. ::core::ptr::addr_of_mut!((*this)._bitfield_1),
  2380. 19usize,
  2381. 1u8,
  2382. val as u64,
  2383. )
  2384. }
  2385. }
  2386. #[inline]
  2387. pub fn exclude_guest(&self) -> __u64 {
  2388. unsafe { ::core::mem::transmute(self._bitfield_1.get(20usize, 1u8) as u64) }
  2389. }
  2390. #[inline]
  2391. pub fn set_exclude_guest(&mut self, val: __u64) {
  2392. unsafe {
  2393. let val: u64 = ::core::mem::transmute(val);
  2394. self._bitfield_1.set(20usize, 1u8, val as u64)
  2395. }
  2396. }
  2397. #[inline]
  2398. pub unsafe fn exclude_guest_raw(this: *const Self) -> __u64 {
  2399. unsafe {
  2400. ::core::mem::transmute(<__BindgenBitfieldUnit<[u8; 8usize]>>::raw_get(
  2401. ::core::ptr::addr_of!((*this)._bitfield_1),
  2402. 20usize,
  2403. 1u8,
  2404. ) as u64)
  2405. }
  2406. }
  2407. #[inline]
  2408. pub unsafe fn set_exclude_guest_raw(this: *mut Self, val: __u64) {
  2409. unsafe {
  2410. let val: u64 = ::core::mem::transmute(val);
  2411. <__BindgenBitfieldUnit<[u8; 8usize]>>::raw_set(
  2412. ::core::ptr::addr_of_mut!((*this)._bitfield_1),
  2413. 20usize,
  2414. 1u8,
  2415. val as u64,
  2416. )
  2417. }
  2418. }
  2419. #[inline]
  2420. pub fn exclude_callchain_kernel(&self) -> __u64 {
  2421. unsafe { ::core::mem::transmute(self._bitfield_1.get(21usize, 1u8) as u64) }
  2422. }
  2423. #[inline]
  2424. pub fn set_exclude_callchain_kernel(&mut self, val: __u64) {
  2425. unsafe {
  2426. let val: u64 = ::core::mem::transmute(val);
  2427. self._bitfield_1.set(21usize, 1u8, val as u64)
  2428. }
  2429. }
  2430. #[inline]
  2431. pub unsafe fn exclude_callchain_kernel_raw(this: *const Self) -> __u64 {
  2432. unsafe {
  2433. ::core::mem::transmute(<__BindgenBitfieldUnit<[u8; 8usize]>>::raw_get(
  2434. ::core::ptr::addr_of!((*this)._bitfield_1),
  2435. 21usize,
  2436. 1u8,
  2437. ) as u64)
  2438. }
  2439. }
  2440. #[inline]
  2441. pub unsafe fn set_exclude_callchain_kernel_raw(this: *mut Self, val: __u64) {
  2442. unsafe {
  2443. let val: u64 = ::core::mem::transmute(val);
  2444. <__BindgenBitfieldUnit<[u8; 8usize]>>::raw_set(
  2445. ::core::ptr::addr_of_mut!((*this)._bitfield_1),
  2446. 21usize,
  2447. 1u8,
  2448. val as u64,
  2449. )
  2450. }
  2451. }
  2452. #[inline]
  2453. pub fn exclude_callchain_user(&self) -> __u64 {
  2454. unsafe { ::core::mem::transmute(self._bitfield_1.get(22usize, 1u8) as u64) }
  2455. }
  2456. #[inline]
  2457. pub fn set_exclude_callchain_user(&mut self, val: __u64) {
  2458. unsafe {
  2459. let val: u64 = ::core::mem::transmute(val);
  2460. self._bitfield_1.set(22usize, 1u8, val as u64)
  2461. }
  2462. }
  2463. #[inline]
  2464. pub unsafe fn exclude_callchain_user_raw(this: *const Self) -> __u64 {
  2465. unsafe {
  2466. ::core::mem::transmute(<__BindgenBitfieldUnit<[u8; 8usize]>>::raw_get(
  2467. ::core::ptr::addr_of!((*this)._bitfield_1),
  2468. 22usize,
  2469. 1u8,
  2470. ) as u64)
  2471. }
  2472. }
  2473. #[inline]
  2474. pub unsafe fn set_exclude_callchain_user_raw(this: *mut Self, val: __u64) {
  2475. unsafe {
  2476. let val: u64 = ::core::mem::transmute(val);
  2477. <__BindgenBitfieldUnit<[u8; 8usize]>>::raw_set(
  2478. ::core::ptr::addr_of_mut!((*this)._bitfield_1),
  2479. 22usize,
  2480. 1u8,
  2481. val as u64,
  2482. )
  2483. }
  2484. }
  2485. #[inline]
  2486. pub fn mmap2(&self) -> __u64 {
  2487. unsafe { ::core::mem::transmute(self._bitfield_1.get(23usize, 1u8) as u64) }
  2488. }
  2489. #[inline]
  2490. pub fn set_mmap2(&mut self, val: __u64) {
  2491. unsafe {
  2492. let val: u64 = ::core::mem::transmute(val);
  2493. self._bitfield_1.set(23usize, 1u8, val as u64)
  2494. }
  2495. }
  2496. #[inline]
  2497. pub unsafe fn mmap2_raw(this: *const Self) -> __u64 {
  2498. unsafe {
  2499. ::core::mem::transmute(<__BindgenBitfieldUnit<[u8; 8usize]>>::raw_get(
  2500. ::core::ptr::addr_of!((*this)._bitfield_1),
  2501. 23usize,
  2502. 1u8,
  2503. ) as u64)
  2504. }
  2505. }
  2506. #[inline]
  2507. pub unsafe fn set_mmap2_raw(this: *mut Self, val: __u64) {
  2508. unsafe {
  2509. let val: u64 = ::core::mem::transmute(val);
  2510. <__BindgenBitfieldUnit<[u8; 8usize]>>::raw_set(
  2511. ::core::ptr::addr_of_mut!((*this)._bitfield_1),
  2512. 23usize,
  2513. 1u8,
  2514. val as u64,
  2515. )
  2516. }
  2517. }
  2518. #[inline]
  2519. pub fn comm_exec(&self) -> __u64 {
  2520. unsafe { ::core::mem::transmute(self._bitfield_1.get(24usize, 1u8) as u64) }
  2521. }
  2522. #[inline]
  2523. pub fn set_comm_exec(&mut self, val: __u64) {
  2524. unsafe {
  2525. let val: u64 = ::core::mem::transmute(val);
  2526. self._bitfield_1.set(24usize, 1u8, val as u64)
  2527. }
  2528. }
  2529. #[inline]
  2530. pub unsafe fn comm_exec_raw(this: *const Self) -> __u64 {
  2531. unsafe {
  2532. ::core::mem::transmute(<__BindgenBitfieldUnit<[u8; 8usize]>>::raw_get(
  2533. ::core::ptr::addr_of!((*this)._bitfield_1),
  2534. 24usize,
  2535. 1u8,
  2536. ) as u64)
  2537. }
  2538. }
  2539. #[inline]
  2540. pub unsafe fn set_comm_exec_raw(this: *mut Self, val: __u64) {
  2541. unsafe {
  2542. let val: u64 = ::core::mem::transmute(val);
  2543. <__BindgenBitfieldUnit<[u8; 8usize]>>::raw_set(
  2544. ::core::ptr::addr_of_mut!((*this)._bitfield_1),
  2545. 24usize,
  2546. 1u8,
  2547. val as u64,
  2548. )
  2549. }
  2550. }
  2551. #[inline]
  2552. pub fn use_clockid(&self) -> __u64 {
  2553. unsafe { ::core::mem::transmute(self._bitfield_1.get(25usize, 1u8) as u64) }
  2554. }
  2555. #[inline]
  2556. pub fn set_use_clockid(&mut self, val: __u64) {
  2557. unsafe {
  2558. let val: u64 = ::core::mem::transmute(val);
  2559. self._bitfield_1.set(25usize, 1u8, val as u64)
  2560. }
  2561. }
  2562. #[inline]
  2563. pub unsafe fn use_clockid_raw(this: *const Self) -> __u64 {
  2564. unsafe {
  2565. ::core::mem::transmute(<__BindgenBitfieldUnit<[u8; 8usize]>>::raw_get(
  2566. ::core::ptr::addr_of!((*this)._bitfield_1),
  2567. 25usize,
  2568. 1u8,
  2569. ) as u64)
  2570. }
  2571. }
  2572. #[inline]
  2573. pub unsafe fn set_use_clockid_raw(this: *mut Self, val: __u64) {
  2574. unsafe {
  2575. let val: u64 = ::core::mem::transmute(val);
  2576. <__BindgenBitfieldUnit<[u8; 8usize]>>::raw_set(
  2577. ::core::ptr::addr_of_mut!((*this)._bitfield_1),
  2578. 25usize,
  2579. 1u8,
  2580. val as u64,
  2581. )
  2582. }
  2583. }
  2584. #[inline]
  2585. pub fn context_switch(&self) -> __u64 {
  2586. unsafe { ::core::mem::transmute(self._bitfield_1.get(26usize, 1u8) as u64) }
  2587. }
  2588. #[inline]
  2589. pub fn set_context_switch(&mut self, val: __u64) {
  2590. unsafe {
  2591. let val: u64 = ::core::mem::transmute(val);
  2592. self._bitfield_1.set(26usize, 1u8, val as u64)
  2593. }
  2594. }
  2595. #[inline]
  2596. pub unsafe fn context_switch_raw(this: *const Self) -> __u64 {
  2597. unsafe {
  2598. ::core::mem::transmute(<__BindgenBitfieldUnit<[u8; 8usize]>>::raw_get(
  2599. ::core::ptr::addr_of!((*this)._bitfield_1),
  2600. 26usize,
  2601. 1u8,
  2602. ) as u64)
  2603. }
  2604. }
  2605. #[inline]
  2606. pub unsafe fn set_context_switch_raw(this: *mut Self, val: __u64) {
  2607. unsafe {
  2608. let val: u64 = ::core::mem::transmute(val);
  2609. <__BindgenBitfieldUnit<[u8; 8usize]>>::raw_set(
  2610. ::core::ptr::addr_of_mut!((*this)._bitfield_1),
  2611. 26usize,
  2612. 1u8,
  2613. val as u64,
  2614. )
  2615. }
  2616. }
  2617. #[inline]
  2618. pub fn write_backward(&self) -> __u64 {
  2619. unsafe { ::core::mem::transmute(self._bitfield_1.get(27usize, 1u8) as u64) }
  2620. }
  2621. #[inline]
  2622. pub fn set_write_backward(&mut self, val: __u64) {
  2623. unsafe {
  2624. let val: u64 = ::core::mem::transmute(val);
  2625. self._bitfield_1.set(27usize, 1u8, val as u64)
  2626. }
  2627. }
  2628. #[inline]
  2629. pub unsafe fn write_backward_raw(this: *const Self) -> __u64 {
  2630. unsafe {
  2631. ::core::mem::transmute(<__BindgenBitfieldUnit<[u8; 8usize]>>::raw_get(
  2632. ::core::ptr::addr_of!((*this)._bitfield_1),
  2633. 27usize,
  2634. 1u8,
  2635. ) as u64)
  2636. }
  2637. }
  2638. #[inline]
  2639. pub unsafe fn set_write_backward_raw(this: *mut Self, val: __u64) {
  2640. unsafe {
  2641. let val: u64 = ::core::mem::transmute(val);
  2642. <__BindgenBitfieldUnit<[u8; 8usize]>>::raw_set(
  2643. ::core::ptr::addr_of_mut!((*this)._bitfield_1),
  2644. 27usize,
  2645. 1u8,
  2646. val as u64,
  2647. )
  2648. }
  2649. }
  2650. #[inline]
  2651. pub fn namespaces(&self) -> __u64 {
  2652. unsafe { ::core::mem::transmute(self._bitfield_1.get(28usize, 1u8) as u64) }
  2653. }
  2654. #[inline]
  2655. pub fn set_namespaces(&mut self, val: __u64) {
  2656. unsafe {
  2657. let val: u64 = ::core::mem::transmute(val);
  2658. self._bitfield_1.set(28usize, 1u8, val as u64)
  2659. }
  2660. }
  2661. #[inline]
  2662. pub unsafe fn namespaces_raw(this: *const Self) -> __u64 {
  2663. unsafe {
  2664. ::core::mem::transmute(<__BindgenBitfieldUnit<[u8; 8usize]>>::raw_get(
  2665. ::core::ptr::addr_of!((*this)._bitfield_1),
  2666. 28usize,
  2667. 1u8,
  2668. ) as u64)
  2669. }
  2670. }
  2671. #[inline]
  2672. pub unsafe fn set_namespaces_raw(this: *mut Self, val: __u64) {
  2673. unsafe {
  2674. let val: u64 = ::core::mem::transmute(val);
  2675. <__BindgenBitfieldUnit<[u8; 8usize]>>::raw_set(
  2676. ::core::ptr::addr_of_mut!((*this)._bitfield_1),
  2677. 28usize,
  2678. 1u8,
  2679. val as u64,
  2680. )
  2681. }
  2682. }
  2683. #[inline]
  2684. pub fn ksymbol(&self) -> __u64 {
  2685. unsafe { ::core::mem::transmute(self._bitfield_1.get(29usize, 1u8) as u64) }
  2686. }
  2687. #[inline]
  2688. pub fn set_ksymbol(&mut self, val: __u64) {
  2689. unsafe {
  2690. let val: u64 = ::core::mem::transmute(val);
  2691. self._bitfield_1.set(29usize, 1u8, val as u64)
  2692. }
  2693. }
  2694. #[inline]
  2695. pub unsafe fn ksymbol_raw(this: *const Self) -> __u64 {
  2696. unsafe {
  2697. ::core::mem::transmute(<__BindgenBitfieldUnit<[u8; 8usize]>>::raw_get(
  2698. ::core::ptr::addr_of!((*this)._bitfield_1),
  2699. 29usize,
  2700. 1u8,
  2701. ) as u64)
  2702. }
  2703. }
  2704. #[inline]
  2705. pub unsafe fn set_ksymbol_raw(this: *mut Self, val: __u64) {
  2706. unsafe {
  2707. let val: u64 = ::core::mem::transmute(val);
  2708. <__BindgenBitfieldUnit<[u8; 8usize]>>::raw_set(
  2709. ::core::ptr::addr_of_mut!((*this)._bitfield_1),
  2710. 29usize,
  2711. 1u8,
  2712. val as u64,
  2713. )
  2714. }
  2715. }
  2716. #[inline]
  2717. pub fn bpf_event(&self) -> __u64 {
  2718. unsafe { ::core::mem::transmute(self._bitfield_1.get(30usize, 1u8) as u64) }
  2719. }
  2720. #[inline]
  2721. pub fn set_bpf_event(&mut self, val: __u64) {
  2722. unsafe {
  2723. let val: u64 = ::core::mem::transmute(val);
  2724. self._bitfield_1.set(30usize, 1u8, val as u64)
  2725. }
  2726. }
  2727. #[inline]
  2728. pub unsafe fn bpf_event_raw(this: *const Self) -> __u64 {
  2729. unsafe {
  2730. ::core::mem::transmute(<__BindgenBitfieldUnit<[u8; 8usize]>>::raw_get(
  2731. ::core::ptr::addr_of!((*this)._bitfield_1),
  2732. 30usize,
  2733. 1u8,
  2734. ) as u64)
  2735. }
  2736. }
  2737. #[inline]
  2738. pub unsafe fn set_bpf_event_raw(this: *mut Self, val: __u64) {
  2739. unsafe {
  2740. let val: u64 = ::core::mem::transmute(val);
  2741. <__BindgenBitfieldUnit<[u8; 8usize]>>::raw_set(
  2742. ::core::ptr::addr_of_mut!((*this)._bitfield_1),
  2743. 30usize,
  2744. 1u8,
  2745. val as u64,
  2746. )
  2747. }
  2748. }
  2749. #[inline]
  2750. pub fn aux_output(&self) -> __u64 {
  2751. unsafe { ::core::mem::transmute(self._bitfield_1.get(31usize, 1u8) as u64) }
  2752. }
  2753. #[inline]
  2754. pub fn set_aux_output(&mut self, val: __u64) {
  2755. unsafe {
  2756. let val: u64 = ::core::mem::transmute(val);
  2757. self._bitfield_1.set(31usize, 1u8, val as u64)
  2758. }
  2759. }
  2760. #[inline]
  2761. pub unsafe fn aux_output_raw(this: *const Self) -> __u64 {
  2762. unsafe {
  2763. ::core::mem::transmute(<__BindgenBitfieldUnit<[u8; 8usize]>>::raw_get(
  2764. ::core::ptr::addr_of!((*this)._bitfield_1),
  2765. 31usize,
  2766. 1u8,
  2767. ) as u64)
  2768. }
  2769. }
  2770. #[inline]
  2771. pub unsafe fn set_aux_output_raw(this: *mut Self, val: __u64) {
  2772. unsafe {
  2773. let val: u64 = ::core::mem::transmute(val);
  2774. <__BindgenBitfieldUnit<[u8; 8usize]>>::raw_set(
  2775. ::core::ptr::addr_of_mut!((*this)._bitfield_1),
  2776. 31usize,
  2777. 1u8,
  2778. val as u64,
  2779. )
  2780. }
  2781. }
  2782. #[inline]
  2783. pub fn cgroup(&self) -> __u64 {
  2784. unsafe { ::core::mem::transmute(self._bitfield_1.get(32usize, 1u8) as u64) }
  2785. }
  2786. #[inline]
  2787. pub fn set_cgroup(&mut self, val: __u64) {
  2788. unsafe {
  2789. let val: u64 = ::core::mem::transmute(val);
  2790. self._bitfield_1.set(32usize, 1u8, val as u64)
  2791. }
  2792. }
  2793. #[inline]
  2794. pub unsafe fn cgroup_raw(this: *const Self) -> __u64 {
  2795. unsafe {
  2796. ::core::mem::transmute(<__BindgenBitfieldUnit<[u8; 8usize]>>::raw_get(
  2797. ::core::ptr::addr_of!((*this)._bitfield_1),
  2798. 32usize,
  2799. 1u8,
  2800. ) as u64)
  2801. }
  2802. }
  2803. #[inline]
  2804. pub unsafe fn set_cgroup_raw(this: *mut Self, val: __u64) {
  2805. unsafe {
  2806. let val: u64 = ::core::mem::transmute(val);
  2807. <__BindgenBitfieldUnit<[u8; 8usize]>>::raw_set(
  2808. ::core::ptr::addr_of_mut!((*this)._bitfield_1),
  2809. 32usize,
  2810. 1u8,
  2811. val as u64,
  2812. )
  2813. }
  2814. }
  2815. #[inline]
  2816. pub fn text_poke(&self) -> __u64 {
  2817. unsafe { ::core::mem::transmute(self._bitfield_1.get(33usize, 1u8) as u64) }
  2818. }
  2819. #[inline]
  2820. pub fn set_text_poke(&mut self, val: __u64) {
  2821. unsafe {
  2822. let val: u64 = ::core::mem::transmute(val);
  2823. self._bitfield_1.set(33usize, 1u8, val as u64)
  2824. }
  2825. }
  2826. #[inline]
  2827. pub unsafe fn text_poke_raw(this: *const Self) -> __u64 {
  2828. unsafe {
  2829. ::core::mem::transmute(<__BindgenBitfieldUnit<[u8; 8usize]>>::raw_get(
  2830. ::core::ptr::addr_of!((*this)._bitfield_1),
  2831. 33usize,
  2832. 1u8,
  2833. ) as u64)
  2834. }
  2835. }
  2836. #[inline]
  2837. pub unsafe fn set_text_poke_raw(this: *mut Self, val: __u64) {
  2838. unsafe {
  2839. let val: u64 = ::core::mem::transmute(val);
  2840. <__BindgenBitfieldUnit<[u8; 8usize]>>::raw_set(
  2841. ::core::ptr::addr_of_mut!((*this)._bitfield_1),
  2842. 33usize,
  2843. 1u8,
  2844. val as u64,
  2845. )
  2846. }
  2847. }
  2848. #[inline]
  2849. pub fn build_id(&self) -> __u64 {
  2850. unsafe { ::core::mem::transmute(self._bitfield_1.get(34usize, 1u8) as u64) }
  2851. }
  2852. #[inline]
  2853. pub fn set_build_id(&mut self, val: __u64) {
  2854. unsafe {
  2855. let val: u64 = ::core::mem::transmute(val);
  2856. self._bitfield_1.set(34usize, 1u8, val as u64)
  2857. }
  2858. }
  2859. #[inline]
  2860. pub unsafe fn build_id_raw(this: *const Self) -> __u64 {
  2861. unsafe {
  2862. ::core::mem::transmute(<__BindgenBitfieldUnit<[u8; 8usize]>>::raw_get(
  2863. ::core::ptr::addr_of!((*this)._bitfield_1),
  2864. 34usize,
  2865. 1u8,
  2866. ) as u64)
  2867. }
  2868. }
  2869. #[inline]
  2870. pub unsafe fn set_build_id_raw(this: *mut Self, val: __u64) {
  2871. unsafe {
  2872. let val: u64 = ::core::mem::transmute(val);
  2873. <__BindgenBitfieldUnit<[u8; 8usize]>>::raw_set(
  2874. ::core::ptr::addr_of_mut!((*this)._bitfield_1),
  2875. 34usize,
  2876. 1u8,
  2877. val as u64,
  2878. )
  2879. }
  2880. }
  2881. #[inline]
  2882. pub fn inherit_thread(&self) -> __u64 {
  2883. unsafe { ::core::mem::transmute(self._bitfield_1.get(35usize, 1u8) as u64) }
  2884. }
  2885. #[inline]
  2886. pub fn set_inherit_thread(&mut self, val: __u64) {
  2887. unsafe {
  2888. let val: u64 = ::core::mem::transmute(val);
  2889. self._bitfield_1.set(35usize, 1u8, val as u64)
  2890. }
  2891. }
  2892. #[inline]
  2893. pub unsafe fn inherit_thread_raw(this: *const Self) -> __u64 {
  2894. unsafe {
  2895. ::core::mem::transmute(<__BindgenBitfieldUnit<[u8; 8usize]>>::raw_get(
  2896. ::core::ptr::addr_of!((*this)._bitfield_1),
  2897. 35usize,
  2898. 1u8,
  2899. ) as u64)
  2900. }
  2901. }
  2902. #[inline]
  2903. pub unsafe fn set_inherit_thread_raw(this: *mut Self, val: __u64) {
  2904. unsafe {
  2905. let val: u64 = ::core::mem::transmute(val);
  2906. <__BindgenBitfieldUnit<[u8; 8usize]>>::raw_set(
  2907. ::core::ptr::addr_of_mut!((*this)._bitfield_1),
  2908. 35usize,
  2909. 1u8,
  2910. val as u64,
  2911. )
  2912. }
  2913. }
  2914. #[inline]
  2915. pub fn remove_on_exec(&self) -> __u64 {
  2916. unsafe { ::core::mem::transmute(self._bitfield_1.get(36usize, 1u8) as u64) }
  2917. }
  2918. #[inline]
  2919. pub fn set_remove_on_exec(&mut self, val: __u64) {
  2920. unsafe {
  2921. let val: u64 = ::core::mem::transmute(val);
  2922. self._bitfield_1.set(36usize, 1u8, val as u64)
  2923. }
  2924. }
  2925. #[inline]
  2926. pub unsafe fn remove_on_exec_raw(this: *const Self) -> __u64 {
  2927. unsafe {
  2928. ::core::mem::transmute(<__BindgenBitfieldUnit<[u8; 8usize]>>::raw_get(
  2929. ::core::ptr::addr_of!((*this)._bitfield_1),
  2930. 36usize,
  2931. 1u8,
  2932. ) as u64)
  2933. }
  2934. }
  2935. #[inline]
  2936. pub unsafe fn set_remove_on_exec_raw(this: *mut Self, val: __u64) {
  2937. unsafe {
  2938. let val: u64 = ::core::mem::transmute(val);
  2939. <__BindgenBitfieldUnit<[u8; 8usize]>>::raw_set(
  2940. ::core::ptr::addr_of_mut!((*this)._bitfield_1),
  2941. 36usize,
  2942. 1u8,
  2943. val as u64,
  2944. )
  2945. }
  2946. }
  2947. #[inline]
  2948. pub fn sigtrap(&self) -> __u64 {
  2949. unsafe { ::core::mem::transmute(self._bitfield_1.get(37usize, 1u8) as u64) }
  2950. }
  2951. #[inline]
  2952. pub fn set_sigtrap(&mut self, val: __u64) {
  2953. unsafe {
  2954. let val: u64 = ::core::mem::transmute(val);
  2955. self._bitfield_1.set(37usize, 1u8, val as u64)
  2956. }
  2957. }
  2958. #[inline]
  2959. pub unsafe fn sigtrap_raw(this: *const Self) -> __u64 {
  2960. unsafe {
  2961. ::core::mem::transmute(<__BindgenBitfieldUnit<[u8; 8usize]>>::raw_get(
  2962. ::core::ptr::addr_of!((*this)._bitfield_1),
  2963. 37usize,
  2964. 1u8,
  2965. ) as u64)
  2966. }
  2967. }
  2968. #[inline]
  2969. pub unsafe fn set_sigtrap_raw(this: *mut Self, val: __u64) {
  2970. unsafe {
  2971. let val: u64 = ::core::mem::transmute(val);
  2972. <__BindgenBitfieldUnit<[u8; 8usize]>>::raw_set(
  2973. ::core::ptr::addr_of_mut!((*this)._bitfield_1),
  2974. 37usize,
  2975. 1u8,
  2976. val as u64,
  2977. )
  2978. }
  2979. }
  2980. #[inline]
  2981. pub fn __reserved_1(&self) -> __u64 {
  2982. unsafe { ::core::mem::transmute(self._bitfield_1.get(38usize, 26u8) as u64) }
  2983. }
  2984. #[inline]
  2985. pub fn set___reserved_1(&mut self, val: __u64) {
  2986. unsafe {
  2987. let val: u64 = ::core::mem::transmute(val);
  2988. self._bitfield_1.set(38usize, 26u8, val as u64)
  2989. }
  2990. }
  2991. #[inline]
  2992. pub unsafe fn __reserved_1_raw(this: *const Self) -> __u64 {
  2993. unsafe {
  2994. ::core::mem::transmute(<__BindgenBitfieldUnit<[u8; 8usize]>>::raw_get(
  2995. ::core::ptr::addr_of!((*this)._bitfield_1),
  2996. 38usize,
  2997. 26u8,
  2998. ) as u64)
  2999. }
  3000. }
  3001. #[inline]
  3002. pub unsafe fn set___reserved_1_raw(this: *mut Self, val: __u64) {
  3003. unsafe {
  3004. let val: u64 = ::core::mem::transmute(val);
  3005. <__BindgenBitfieldUnit<[u8; 8usize]>>::raw_set(
  3006. ::core::ptr::addr_of_mut!((*this)._bitfield_1),
  3007. 38usize,
  3008. 26u8,
  3009. val as u64,
  3010. )
  3011. }
  3012. }
  3013. #[inline]
  3014. pub fn new_bitfield_1(
  3015. disabled: __u64,
  3016. inherit: __u64,
  3017. pinned: __u64,
  3018. exclusive: __u64,
  3019. exclude_user: __u64,
  3020. exclude_kernel: __u64,
  3021. exclude_hv: __u64,
  3022. exclude_idle: __u64,
  3023. mmap: __u64,
  3024. comm: __u64,
  3025. freq: __u64,
  3026. inherit_stat: __u64,
  3027. enable_on_exec: __u64,
  3028. task: __u64,
  3029. watermark: __u64,
  3030. precise_ip: __u64,
  3031. mmap_data: __u64,
  3032. sample_id_all: __u64,
  3033. exclude_host: __u64,
  3034. exclude_guest: __u64,
  3035. exclude_callchain_kernel: __u64,
  3036. exclude_callchain_user: __u64,
  3037. mmap2: __u64,
  3038. comm_exec: __u64,
  3039. use_clockid: __u64,
  3040. context_switch: __u64,
  3041. write_backward: __u64,
  3042. namespaces: __u64,
  3043. ksymbol: __u64,
  3044. bpf_event: __u64,
  3045. aux_output: __u64,
  3046. cgroup: __u64,
  3047. text_poke: __u64,
  3048. build_id: __u64,
  3049. inherit_thread: __u64,
  3050. remove_on_exec: __u64,
  3051. sigtrap: __u64,
  3052. __reserved_1: __u64,
  3053. ) -> __BindgenBitfieldUnit<[u8; 8usize]> {
  3054. let mut __bindgen_bitfield_unit: __BindgenBitfieldUnit<[u8; 8usize]> = Default::default();
  3055. __bindgen_bitfield_unit.set(0usize, 1u8, {
  3056. let disabled: u64 = unsafe { ::core::mem::transmute(disabled) };
  3057. disabled as u64
  3058. });
  3059. __bindgen_bitfield_unit.set(1usize, 1u8, {
  3060. let inherit: u64 = unsafe { ::core::mem::transmute(inherit) };
  3061. inherit as u64
  3062. });
  3063. __bindgen_bitfield_unit.set(2usize, 1u8, {
  3064. let pinned: u64 = unsafe { ::core::mem::transmute(pinned) };
  3065. pinned as u64
  3066. });
  3067. __bindgen_bitfield_unit.set(3usize, 1u8, {
  3068. let exclusive: u64 = unsafe { ::core::mem::transmute(exclusive) };
  3069. exclusive as u64
  3070. });
  3071. __bindgen_bitfield_unit.set(4usize, 1u8, {
  3072. let exclude_user: u64 = unsafe { ::core::mem::transmute(exclude_user) };
  3073. exclude_user as u64
  3074. });
  3075. __bindgen_bitfield_unit.set(5usize, 1u8, {
  3076. let exclude_kernel: u64 = unsafe { ::core::mem::transmute(exclude_kernel) };
  3077. exclude_kernel as u64
  3078. });
  3079. __bindgen_bitfield_unit.set(6usize, 1u8, {
  3080. let exclude_hv: u64 = unsafe { ::core::mem::transmute(exclude_hv) };
  3081. exclude_hv as u64
  3082. });
  3083. __bindgen_bitfield_unit.set(7usize, 1u8, {
  3084. let exclude_idle: u64 = unsafe { ::core::mem::transmute(exclude_idle) };
  3085. exclude_idle as u64
  3086. });
  3087. __bindgen_bitfield_unit.set(8usize, 1u8, {
  3088. let mmap: u64 = unsafe { ::core::mem::transmute(mmap) };
  3089. mmap as u64
  3090. });
  3091. __bindgen_bitfield_unit.set(9usize, 1u8, {
  3092. let comm: u64 = unsafe { ::core::mem::transmute(comm) };
  3093. comm as u64
  3094. });
  3095. __bindgen_bitfield_unit.set(10usize, 1u8, {
  3096. let freq: u64 = unsafe { ::core::mem::transmute(freq) };
  3097. freq as u64
  3098. });
  3099. __bindgen_bitfield_unit.set(11usize, 1u8, {
  3100. let inherit_stat: u64 = unsafe { ::core::mem::transmute(inherit_stat) };
  3101. inherit_stat as u64
  3102. });
  3103. __bindgen_bitfield_unit.set(12usize, 1u8, {
  3104. let enable_on_exec: u64 = unsafe { ::core::mem::transmute(enable_on_exec) };
  3105. enable_on_exec as u64
  3106. });
  3107. __bindgen_bitfield_unit.set(13usize, 1u8, {
  3108. let task: u64 = unsafe { ::core::mem::transmute(task) };
  3109. task as u64
  3110. });
  3111. __bindgen_bitfield_unit.set(14usize, 1u8, {
  3112. let watermark: u64 = unsafe { ::core::mem::transmute(watermark) };
  3113. watermark as u64
  3114. });
  3115. __bindgen_bitfield_unit.set(15usize, 2u8, {
  3116. let precise_ip: u64 = unsafe { ::core::mem::transmute(precise_ip) };
  3117. precise_ip as u64
  3118. });
  3119. __bindgen_bitfield_unit.set(17usize, 1u8, {
  3120. let mmap_data: u64 = unsafe { ::core::mem::transmute(mmap_data) };
  3121. mmap_data as u64
  3122. });
  3123. __bindgen_bitfield_unit.set(18usize, 1u8, {
  3124. let sample_id_all: u64 = unsafe { ::core::mem::transmute(sample_id_all) };
  3125. sample_id_all as u64
  3126. });
  3127. __bindgen_bitfield_unit.set(19usize, 1u8, {
  3128. let exclude_host: u64 = unsafe { ::core::mem::transmute(exclude_host) };
  3129. exclude_host as u64
  3130. });
  3131. __bindgen_bitfield_unit.set(20usize, 1u8, {
  3132. let exclude_guest: u64 = unsafe { ::core::mem::transmute(exclude_guest) };
  3133. exclude_guest as u64
  3134. });
  3135. __bindgen_bitfield_unit.set(21usize, 1u8, {
  3136. let exclude_callchain_kernel: u64 =
  3137. unsafe { ::core::mem::transmute(exclude_callchain_kernel) };
  3138. exclude_callchain_kernel as u64
  3139. });
  3140. __bindgen_bitfield_unit.set(22usize, 1u8, {
  3141. let exclude_callchain_user: u64 =
  3142. unsafe { ::core::mem::transmute(exclude_callchain_user) };
  3143. exclude_callchain_user as u64
  3144. });
  3145. __bindgen_bitfield_unit.set(23usize, 1u8, {
  3146. let mmap2: u64 = unsafe { ::core::mem::transmute(mmap2) };
  3147. mmap2 as u64
  3148. });
  3149. __bindgen_bitfield_unit.set(24usize, 1u8, {
  3150. let comm_exec: u64 = unsafe { ::core::mem::transmute(comm_exec) };
  3151. comm_exec as u64
  3152. });
  3153. __bindgen_bitfield_unit.set(25usize, 1u8, {
  3154. let use_clockid: u64 = unsafe { ::core::mem::transmute(use_clockid) };
  3155. use_clockid as u64
  3156. });
  3157. __bindgen_bitfield_unit.set(26usize, 1u8, {
  3158. let context_switch: u64 = unsafe { ::core::mem::transmute(context_switch) };
  3159. context_switch as u64
  3160. });
  3161. __bindgen_bitfield_unit.set(27usize, 1u8, {
  3162. let write_backward: u64 = unsafe { ::core::mem::transmute(write_backward) };
  3163. write_backward as u64
  3164. });
  3165. __bindgen_bitfield_unit.set(28usize, 1u8, {
  3166. let namespaces: u64 = unsafe { ::core::mem::transmute(namespaces) };
  3167. namespaces as u64
  3168. });
  3169. __bindgen_bitfield_unit.set(29usize, 1u8, {
  3170. let ksymbol: u64 = unsafe { ::core::mem::transmute(ksymbol) };
  3171. ksymbol as u64
  3172. });
  3173. __bindgen_bitfield_unit.set(30usize, 1u8, {
  3174. let bpf_event: u64 = unsafe { ::core::mem::transmute(bpf_event) };
  3175. bpf_event as u64
  3176. });
  3177. __bindgen_bitfield_unit.set(31usize, 1u8, {
  3178. let aux_output: u64 = unsafe { ::core::mem::transmute(aux_output) };
  3179. aux_output as u64
  3180. });
  3181. __bindgen_bitfield_unit.set(32usize, 1u8, {
  3182. let cgroup: u64 = unsafe { ::core::mem::transmute(cgroup) };
  3183. cgroup as u64
  3184. });
  3185. __bindgen_bitfield_unit.set(33usize, 1u8, {
  3186. let text_poke: u64 = unsafe { ::core::mem::transmute(text_poke) };
  3187. text_poke as u64
  3188. });
  3189. __bindgen_bitfield_unit.set(34usize, 1u8, {
  3190. let build_id: u64 = unsafe { ::core::mem::transmute(build_id) };
  3191. build_id as u64
  3192. });
  3193. __bindgen_bitfield_unit.set(35usize, 1u8, {
  3194. let inherit_thread: u64 = unsafe { ::core::mem::transmute(inherit_thread) };
  3195. inherit_thread as u64
  3196. });
  3197. __bindgen_bitfield_unit.set(36usize, 1u8, {
  3198. let remove_on_exec: u64 = unsafe { ::core::mem::transmute(remove_on_exec) };
  3199. remove_on_exec as u64
  3200. });
  3201. __bindgen_bitfield_unit.set(37usize, 1u8, {
  3202. let sigtrap: u64 = unsafe { ::core::mem::transmute(sigtrap) };
  3203. sigtrap as u64
  3204. });
  3205. __bindgen_bitfield_unit.set(38usize, 26u8, {
  3206. let __reserved_1: u64 = unsafe { ::core::mem::transmute(__reserved_1) };
  3207. __reserved_1 as u64
  3208. });
  3209. __bindgen_bitfield_unit
  3210. }
  3211. }
  3212. #[repr(C)]
  3213. #[derive(Copy, Clone)]
  3214. pub struct perf_event_mmap_page {
  3215. pub version: __u32,
  3216. pub compat_version: __u32,
  3217. pub lock: __u32,
  3218. pub index: __u32,
  3219. pub offset: __s64,
  3220. pub time_enabled: __u64,
  3221. pub time_running: __u64,
  3222. pub __bindgen_anon_1: perf_event_mmap_page__bindgen_ty_1,
  3223. pub pmc_width: __u16,
  3224. pub time_shift: __u16,
  3225. pub time_mult: __u32,
  3226. pub time_offset: __u64,
  3227. pub time_zero: __u64,
  3228. pub size: __u32,
  3229. pub __reserved_1: __u32,
  3230. pub time_cycles: __u64,
  3231. pub time_mask: __u64,
  3232. pub __reserved: [__u8; 928usize],
  3233. pub data_head: __u64,
  3234. pub data_tail: __u64,
  3235. pub data_offset: __u64,
  3236. pub data_size: __u64,
  3237. pub aux_head: __u64,
  3238. pub aux_tail: __u64,
  3239. pub aux_offset: __u64,
  3240. pub aux_size: __u64,
  3241. }
  3242. #[repr(C)]
  3243. #[derive(Copy, Clone)]
  3244. pub union perf_event_mmap_page__bindgen_ty_1 {
  3245. pub capabilities: __u64,
  3246. pub __bindgen_anon_1: perf_event_mmap_page__bindgen_ty_1__bindgen_ty_1,
  3247. }
  3248. #[repr(C)]
  3249. #[derive(Debug, Copy, Clone)]
  3250. pub struct perf_event_mmap_page__bindgen_ty_1__bindgen_ty_1 {
  3251. pub _bitfield_align_1: [u64; 0],
  3252. pub _bitfield_1: __BindgenBitfieldUnit<[u8; 8usize]>,
  3253. }
  3254. impl perf_event_mmap_page__bindgen_ty_1__bindgen_ty_1 {
  3255. #[inline]
  3256. pub fn cap_bit0(&self) -> __u64 {
  3257. unsafe { ::core::mem::transmute(self._bitfield_1.get(0usize, 1u8) as u64) }
  3258. }
  3259. #[inline]
  3260. pub fn set_cap_bit0(&mut self, val: __u64) {
  3261. unsafe {
  3262. let val: u64 = ::core::mem::transmute(val);
  3263. self._bitfield_1.set(0usize, 1u8, val as u64)
  3264. }
  3265. }
  3266. #[inline]
  3267. pub unsafe fn cap_bit0_raw(this: *const Self) -> __u64 {
  3268. unsafe {
  3269. ::core::mem::transmute(<__BindgenBitfieldUnit<[u8; 8usize]>>::raw_get(
  3270. ::core::ptr::addr_of!((*this)._bitfield_1),
  3271. 0usize,
  3272. 1u8,
  3273. ) as u64)
  3274. }
  3275. }
  3276. #[inline]
  3277. pub unsafe fn set_cap_bit0_raw(this: *mut Self, val: __u64) {
  3278. unsafe {
  3279. let val: u64 = ::core::mem::transmute(val);
  3280. <__BindgenBitfieldUnit<[u8; 8usize]>>::raw_set(
  3281. ::core::ptr::addr_of_mut!((*this)._bitfield_1),
  3282. 0usize,
  3283. 1u8,
  3284. val as u64,
  3285. )
  3286. }
  3287. }
  3288. #[inline]
  3289. pub fn cap_bit0_is_deprecated(&self) -> __u64 {
  3290. unsafe { ::core::mem::transmute(self._bitfield_1.get(1usize, 1u8) as u64) }
  3291. }
  3292. #[inline]
  3293. pub fn set_cap_bit0_is_deprecated(&mut self, val: __u64) {
  3294. unsafe {
  3295. let val: u64 = ::core::mem::transmute(val);
  3296. self._bitfield_1.set(1usize, 1u8, val as u64)
  3297. }
  3298. }
  3299. #[inline]
  3300. pub unsafe fn cap_bit0_is_deprecated_raw(this: *const Self) -> __u64 {
  3301. unsafe {
  3302. ::core::mem::transmute(<__BindgenBitfieldUnit<[u8; 8usize]>>::raw_get(
  3303. ::core::ptr::addr_of!((*this)._bitfield_1),
  3304. 1usize,
  3305. 1u8,
  3306. ) as u64)
  3307. }
  3308. }
  3309. #[inline]
  3310. pub unsafe fn set_cap_bit0_is_deprecated_raw(this: *mut Self, val: __u64) {
  3311. unsafe {
  3312. let val: u64 = ::core::mem::transmute(val);
  3313. <__BindgenBitfieldUnit<[u8; 8usize]>>::raw_set(
  3314. ::core::ptr::addr_of_mut!((*this)._bitfield_1),
  3315. 1usize,
  3316. 1u8,
  3317. val as u64,
  3318. )
  3319. }
  3320. }
  3321. #[inline]
  3322. pub fn cap_user_rdpmc(&self) -> __u64 {
  3323. unsafe { ::core::mem::transmute(self._bitfield_1.get(2usize, 1u8) as u64) }
  3324. }
  3325. #[inline]
  3326. pub fn set_cap_user_rdpmc(&mut self, val: __u64) {
  3327. unsafe {
  3328. let val: u64 = ::core::mem::transmute(val);
  3329. self._bitfield_1.set(2usize, 1u8, val as u64)
  3330. }
  3331. }
  3332. #[inline]
  3333. pub unsafe fn cap_user_rdpmc_raw(this: *const Self) -> __u64 {
  3334. unsafe {
  3335. ::core::mem::transmute(<__BindgenBitfieldUnit<[u8; 8usize]>>::raw_get(
  3336. ::core::ptr::addr_of!((*this)._bitfield_1),
  3337. 2usize,
  3338. 1u8,
  3339. ) as u64)
  3340. }
  3341. }
  3342. #[inline]
  3343. pub unsafe fn set_cap_user_rdpmc_raw(this: *mut Self, val: __u64) {
  3344. unsafe {
  3345. let val: u64 = ::core::mem::transmute(val);
  3346. <__BindgenBitfieldUnit<[u8; 8usize]>>::raw_set(
  3347. ::core::ptr::addr_of_mut!((*this)._bitfield_1),
  3348. 2usize,
  3349. 1u8,
  3350. val as u64,
  3351. )
  3352. }
  3353. }
  3354. #[inline]
  3355. pub fn cap_user_time(&self) -> __u64 {
  3356. unsafe { ::core::mem::transmute(self._bitfield_1.get(3usize, 1u8) as u64) }
  3357. }
  3358. #[inline]
  3359. pub fn set_cap_user_time(&mut self, val: __u64) {
  3360. unsafe {
  3361. let val: u64 = ::core::mem::transmute(val);
  3362. self._bitfield_1.set(3usize, 1u8, val as u64)
  3363. }
  3364. }
  3365. #[inline]
  3366. pub unsafe fn cap_user_time_raw(this: *const Self) -> __u64 {
  3367. unsafe {
  3368. ::core::mem::transmute(<__BindgenBitfieldUnit<[u8; 8usize]>>::raw_get(
  3369. ::core::ptr::addr_of!((*this)._bitfield_1),
  3370. 3usize,
  3371. 1u8,
  3372. ) as u64)
  3373. }
  3374. }
  3375. #[inline]
  3376. pub unsafe fn set_cap_user_time_raw(this: *mut Self, val: __u64) {
  3377. unsafe {
  3378. let val: u64 = ::core::mem::transmute(val);
  3379. <__BindgenBitfieldUnit<[u8; 8usize]>>::raw_set(
  3380. ::core::ptr::addr_of_mut!((*this)._bitfield_1),
  3381. 3usize,
  3382. 1u8,
  3383. val as u64,
  3384. )
  3385. }
  3386. }
  3387. #[inline]
  3388. pub fn cap_user_time_zero(&self) -> __u64 {
  3389. unsafe { ::core::mem::transmute(self._bitfield_1.get(4usize, 1u8) as u64) }
  3390. }
  3391. #[inline]
  3392. pub fn set_cap_user_time_zero(&mut self, val: __u64) {
  3393. unsafe {
  3394. let val: u64 = ::core::mem::transmute(val);
  3395. self._bitfield_1.set(4usize, 1u8, val as u64)
  3396. }
  3397. }
  3398. #[inline]
  3399. pub unsafe fn cap_user_time_zero_raw(this: *const Self) -> __u64 {
  3400. unsafe {
  3401. ::core::mem::transmute(<__BindgenBitfieldUnit<[u8; 8usize]>>::raw_get(
  3402. ::core::ptr::addr_of!((*this)._bitfield_1),
  3403. 4usize,
  3404. 1u8,
  3405. ) as u64)
  3406. }
  3407. }
  3408. #[inline]
  3409. pub unsafe fn set_cap_user_time_zero_raw(this: *mut Self, val: __u64) {
  3410. unsafe {
  3411. let val: u64 = ::core::mem::transmute(val);
  3412. <__BindgenBitfieldUnit<[u8; 8usize]>>::raw_set(
  3413. ::core::ptr::addr_of_mut!((*this)._bitfield_1),
  3414. 4usize,
  3415. 1u8,
  3416. val as u64,
  3417. )
  3418. }
  3419. }
  3420. #[inline]
  3421. pub fn cap_user_time_short(&self) -> __u64 {
  3422. unsafe { ::core::mem::transmute(self._bitfield_1.get(5usize, 1u8) as u64) }
  3423. }
  3424. #[inline]
  3425. pub fn set_cap_user_time_short(&mut self, val: __u64) {
  3426. unsafe {
  3427. let val: u64 = ::core::mem::transmute(val);
  3428. self._bitfield_1.set(5usize, 1u8, val as u64)
  3429. }
  3430. }
  3431. #[inline]
  3432. pub unsafe fn cap_user_time_short_raw(this: *const Self) -> __u64 {
  3433. unsafe {
  3434. ::core::mem::transmute(<__BindgenBitfieldUnit<[u8; 8usize]>>::raw_get(
  3435. ::core::ptr::addr_of!((*this)._bitfield_1),
  3436. 5usize,
  3437. 1u8,
  3438. ) as u64)
  3439. }
  3440. }
  3441. #[inline]
  3442. pub unsafe fn set_cap_user_time_short_raw(this: *mut Self, val: __u64) {
  3443. unsafe {
  3444. let val: u64 = ::core::mem::transmute(val);
  3445. <__BindgenBitfieldUnit<[u8; 8usize]>>::raw_set(
  3446. ::core::ptr::addr_of_mut!((*this)._bitfield_1),
  3447. 5usize,
  3448. 1u8,
  3449. val as u64,
  3450. )
  3451. }
  3452. }
  3453. #[inline]
  3454. pub fn cap_____res(&self) -> __u64 {
  3455. unsafe { ::core::mem::transmute(self._bitfield_1.get(6usize, 58u8) as u64) }
  3456. }
  3457. #[inline]
  3458. pub fn set_cap_____res(&mut self, val: __u64) {
  3459. unsafe {
  3460. let val: u64 = ::core::mem::transmute(val);
  3461. self._bitfield_1.set(6usize, 58u8, val as u64)
  3462. }
  3463. }
  3464. #[inline]
  3465. pub unsafe fn cap_____res_raw(this: *const Self) -> __u64 {
  3466. unsafe {
  3467. ::core::mem::transmute(<__BindgenBitfieldUnit<[u8; 8usize]>>::raw_get(
  3468. ::core::ptr::addr_of!((*this)._bitfield_1),
  3469. 6usize,
  3470. 58u8,
  3471. ) as u64)
  3472. }
  3473. }
  3474. #[inline]
  3475. pub unsafe fn set_cap_____res_raw(this: *mut Self, val: __u64) {
  3476. unsafe {
  3477. let val: u64 = ::core::mem::transmute(val);
  3478. <__BindgenBitfieldUnit<[u8; 8usize]>>::raw_set(
  3479. ::core::ptr::addr_of_mut!((*this)._bitfield_1),
  3480. 6usize,
  3481. 58u8,
  3482. val as u64,
  3483. )
  3484. }
  3485. }
  3486. #[inline]
  3487. pub fn new_bitfield_1(
  3488. cap_bit0: __u64,
  3489. cap_bit0_is_deprecated: __u64,
  3490. cap_user_rdpmc: __u64,
  3491. cap_user_time: __u64,
  3492. cap_user_time_zero: __u64,
  3493. cap_user_time_short: __u64,
  3494. cap_____res: __u64,
  3495. ) -> __BindgenBitfieldUnit<[u8; 8usize]> {
  3496. let mut __bindgen_bitfield_unit: __BindgenBitfieldUnit<[u8; 8usize]> = Default::default();
  3497. __bindgen_bitfield_unit.set(0usize, 1u8, {
  3498. let cap_bit0: u64 = unsafe { ::core::mem::transmute(cap_bit0) };
  3499. cap_bit0 as u64
  3500. });
  3501. __bindgen_bitfield_unit.set(1usize, 1u8, {
  3502. let cap_bit0_is_deprecated: u64 =
  3503. unsafe { ::core::mem::transmute(cap_bit0_is_deprecated) };
  3504. cap_bit0_is_deprecated as u64
  3505. });
  3506. __bindgen_bitfield_unit.set(2usize, 1u8, {
  3507. let cap_user_rdpmc: u64 = unsafe { ::core::mem::transmute(cap_user_rdpmc) };
  3508. cap_user_rdpmc as u64
  3509. });
  3510. __bindgen_bitfield_unit.set(3usize, 1u8, {
  3511. let cap_user_time: u64 = unsafe { ::core::mem::transmute(cap_user_time) };
  3512. cap_user_time as u64
  3513. });
  3514. __bindgen_bitfield_unit.set(4usize, 1u8, {
  3515. let cap_user_time_zero: u64 = unsafe { ::core::mem::transmute(cap_user_time_zero) };
  3516. cap_user_time_zero as u64
  3517. });
  3518. __bindgen_bitfield_unit.set(5usize, 1u8, {
  3519. let cap_user_time_short: u64 = unsafe { ::core::mem::transmute(cap_user_time_short) };
  3520. cap_user_time_short as u64
  3521. });
  3522. __bindgen_bitfield_unit.set(6usize, 58u8, {
  3523. let cap_____res: u64 = unsafe { ::core::mem::transmute(cap_____res) };
  3524. cap_____res as u64
  3525. });
  3526. __bindgen_bitfield_unit
  3527. }
  3528. }
  3529. #[repr(C)]
  3530. #[derive(Debug, Copy, Clone)]
  3531. pub struct perf_event_header {
  3532. pub type_: __u32,
  3533. pub misc: __u16,
  3534. pub size: __u16,
  3535. }
  3536. #[repr(u32)]
  3537. #[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
  3538. pub enum perf_event_type {
  3539. PERF_RECORD_MMAP = 1,
  3540. PERF_RECORD_LOST = 2,
  3541. PERF_RECORD_COMM = 3,
  3542. PERF_RECORD_EXIT = 4,
  3543. PERF_RECORD_THROTTLE = 5,
  3544. PERF_RECORD_UNTHROTTLE = 6,
  3545. PERF_RECORD_FORK = 7,
  3546. PERF_RECORD_READ = 8,
  3547. PERF_RECORD_SAMPLE = 9,
  3548. PERF_RECORD_MMAP2 = 10,
  3549. PERF_RECORD_AUX = 11,
  3550. PERF_RECORD_ITRACE_START = 12,
  3551. PERF_RECORD_LOST_SAMPLES = 13,
  3552. PERF_RECORD_SWITCH = 14,
  3553. PERF_RECORD_SWITCH_CPU_WIDE = 15,
  3554. PERF_RECORD_NAMESPACES = 16,
  3555. PERF_RECORD_KSYMBOL = 17,
  3556. PERF_RECORD_BPF_EVENT = 18,
  3557. PERF_RECORD_CGROUP = 19,
  3558. PERF_RECORD_TEXT_POKE = 20,
  3559. PERF_RECORD_AUX_OUTPUT_HW_ID = 21,
  3560. PERF_RECORD_MAX = 22,
  3561. }
  3562. pub const TCA_BPF_UNSPEC: _bindgen_ty_154 = 0;
  3563. pub const TCA_BPF_ACT: _bindgen_ty_154 = 1;
  3564. pub const TCA_BPF_POLICE: _bindgen_ty_154 = 2;
  3565. pub const TCA_BPF_CLASSID: _bindgen_ty_154 = 3;
  3566. pub const TCA_BPF_OPS_LEN: _bindgen_ty_154 = 4;
  3567. pub const TCA_BPF_OPS: _bindgen_ty_154 = 5;
  3568. pub const TCA_BPF_FD: _bindgen_ty_154 = 6;
  3569. pub const TCA_BPF_NAME: _bindgen_ty_154 = 7;
  3570. pub const TCA_BPF_FLAGS: _bindgen_ty_154 = 8;
  3571. pub const TCA_BPF_FLAGS_GEN: _bindgen_ty_154 = 9;
  3572. pub const TCA_BPF_TAG: _bindgen_ty_154 = 10;
  3573. pub const TCA_BPF_ID: _bindgen_ty_154 = 11;
  3574. pub const __TCA_BPF_MAX: _bindgen_ty_154 = 12;
  3575. pub type _bindgen_ty_154 = ::core::ffi::c_uint;
  3576. #[repr(C)]
  3577. #[derive(Debug, Copy, Clone)]
  3578. pub struct ifinfomsg {
  3579. pub ifi_family: ::core::ffi::c_uchar,
  3580. pub __ifi_pad: ::core::ffi::c_uchar,
  3581. pub ifi_type: ::core::ffi::c_ushort,
  3582. pub ifi_index: ::core::ffi::c_int,
  3583. pub ifi_flags: ::core::ffi::c_uint,
  3584. pub ifi_change: ::core::ffi::c_uint,
  3585. }
  3586. #[repr(C)]
  3587. #[derive(Debug, Copy, Clone)]
  3588. pub struct tcmsg {
  3589. pub tcm_family: ::core::ffi::c_uchar,
  3590. pub tcm__pad1: ::core::ffi::c_uchar,
  3591. pub tcm__pad2: ::core::ffi::c_ushort,
  3592. pub tcm_ifindex: ::core::ffi::c_int,
  3593. pub tcm_handle: __u32,
  3594. pub tcm_parent: __u32,
  3595. pub tcm_info: __u32,
  3596. }
  3597. pub const TCA_UNSPEC: _bindgen_ty_172 = 0;
  3598. pub const TCA_KIND: _bindgen_ty_172 = 1;
  3599. pub const TCA_OPTIONS: _bindgen_ty_172 = 2;
  3600. pub const TCA_STATS: _bindgen_ty_172 = 3;
  3601. pub const TCA_XSTATS: _bindgen_ty_172 = 4;
  3602. pub const TCA_RATE: _bindgen_ty_172 = 5;
  3603. pub const TCA_FCNT: _bindgen_ty_172 = 6;
  3604. pub const TCA_STATS2: _bindgen_ty_172 = 7;
  3605. pub const TCA_STAB: _bindgen_ty_172 = 8;
  3606. pub const TCA_PAD: _bindgen_ty_172 = 9;
  3607. pub const TCA_DUMP_INVISIBLE: _bindgen_ty_172 = 10;
  3608. pub const TCA_CHAIN: _bindgen_ty_172 = 11;
  3609. pub const TCA_HW_OFFLOAD: _bindgen_ty_172 = 12;
  3610. pub const TCA_INGRESS_BLOCK: _bindgen_ty_172 = 13;
  3611. pub const TCA_EGRESS_BLOCK: _bindgen_ty_172 = 14;
  3612. pub const __TCA_MAX: _bindgen_ty_172 = 15;
  3613. pub type _bindgen_ty_172 = ::core::ffi::c_uint;
  3614. pub const AYA_PERF_EVENT_IOC_ENABLE: ::core::ffi::c_int = 9216;
  3615. pub const AYA_PERF_EVENT_IOC_DISABLE: ::core::ffi::c_int = 9217;
  3616. pub const AYA_PERF_EVENT_IOC_SET_BPF: ::core::ffi::c_int = 1074013192;