linux_bindings_armv7.rs 123 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319232023212322232323242325232623272328232923302331233223332334233523362337233823392340234123422343234423452346234723482349235023512352235323542355235623572358235923602361236223632364236523662367236823692370237123722373237423752376237723782379238023812382238323842385238623872388238923902391239223932394239523962397239823992400240124022403240424052406240724082409241024112412241324142415241624172418241924202421242224232424242524262427242824292430243124322433243424352436243724382439244024412442244324442445244624472448244924502451245224532454245524562457245824592460246124622463246424652466246724682469247024712472247324742475247624772478247924802481248224832484248524862487248824892490249124922493249424952496249724982499250025012502250325042505250625072508250925102511251225132514251525162517251825192520252125222523252425252526252725282529253025312532253325342535253625372538253925402541254225432544254525462547254825492550255125522553255425552556255725582559256025612562256325642565256625672568256925702571257225732574257525762577257825792580258125822583258425852586258725882589259025912592259325942595259625972598259926002601260226032604260526062607260826092610261126122613261426152616261726182619262026212622262326242625262626272628262926302631263226332634263526362637263826392640264126422643264426452646264726482649265026512652265326542655265626572658265926602661266226632664266526662667266826692670267126722673267426752676267726782679268026812682268326842685268626872688268926902691269226932694269526962697269826992700270127022703270427052706270727082709271027112712271327142715271627172718271927202721272227232724272527262727272827292730273127322733273427352736273727382739274027412742274327442745274627472748274927502751275227532754275527562757275827592760276127622763276427652766276727682769277027712772277327742775277627772778277927802781278227832784278527862787278827892790279127922793279427952796279727982799280028012802280328042805280628072808280928102811281228132814281528162817281828192820282128222823282428252826282728282829283028312832283328342835283628372838283928402841284228432844284528462847284828492850285128522853285428552856285728582859286028612862286328642865286628672868286928702871287228732874287528762877287828792880288128822883288428852886288728882889289028912892289328942895289628972898289929002901290229032904290529062907290829092910291129122913291429152916291729182919292029212922292329242925292629272928292929302931293229332934293529362937293829392940294129422943294429452946294729482949295029512952295329542955295629572958295929602961296229632964296529662967296829692970297129722973297429752976297729782979298029812982298329842985298629872988298929902991299229932994299529962997299829993000300130023003300430053006300730083009301030113012301330143015301630173018301930203021302230233024302530263027302830293030303130323033303430353036303730383039304030413042304330443045304630473048304930503051305230533054305530563057305830593060306130623063306430653066306730683069307030713072307330743075307630773078307930803081308230833084308530863087308830893090309130923093309430953096309730983099310031013102310331043105310631073108310931103111311231133114311531163117311831193120312131223123312431253126312731283129313031313132313331343135313631373138313931403141314231433144314531463147314831493150315131523153315431553156315731583159316031613162316331643165316631673168316931703171317231733174317531763177317831793180318131823183318431853186318731883189319031913192319331943195319631973198319932003201320232033204320532063207320832093210321132123213321432153216321732183219322032213222322332243225322632273228322932303231323232333234323532363237323832393240324132423243324432453246324732483249325032513252325332543255325632573258325932603261326232633264326532663267326832693270327132723273327432753276327732783279328032813282328332843285328632873288328932903291329232933294329532963297329832993300330133023303330433053306330733083309331033113312331333143315331633173318331933203321332233233324332533263327332833293330333133323333333433353336333733383339334033413342334333443345334633473348334933503351335233533354335533563357335833593360336133623363336433653366336733683369337033713372337333743375337633773378337933803381338233833384338533863387338833893390339133923393339433953396339733983399340034013402340334043405340634073408340934103411341234133414341534163417341834193420342134223423342434253426342734283429343034313432343334343435343634373438343934403441344234433444344534463447344834493450345134523453345434553456345734583459346034613462346334643465346634673468346934703471347234733474347534763477347834793480348134823483348434853486348734883489349034913492349334943495349634973498349935003501350235033504350535063507350835093510351135123513351435153516351735183519352035213522352335243525352635273528352935303531353235333534353535363537353835393540354135423543354435453546354735483549355035513552355335543555355635573558355935603561356235633564356535663567356835693570357135723573357435753576357735783579358035813582358335843585358635873588358935903591359235933594359535963597359835993600360136023603360436053606360736083609361036113612361336143615361636173618361936203621362236233624362536263627362836293630363136323633363436353636363736383639364036413642364336443645364636473648364936503651365236533654365536563657365836593660366136623663366436653666366736683669367036713672367336743675367636773678367936803681368236833684368536863687368836893690369136923693369436953696369736983699370037013702370337043705370637073708370937103711371237133714371537163717371837193720372137223723372437253726372737283729373037313732373337343735373637373738373937403741374237433744374537463747374837493750375137523753375437553756375737583759376037613762376337643765376637673768376937703771377237733774377537763777377837793780378137823783378437853786378737883789379037913792379337943795379637973798379938003801380238033804380538063807380838093810381138123813381438153816381738183819382038213822382338243825382638273828382938303831383238333834383538363837383838393840
  1. /* automatically generated by rust-bindgen 0.71.1 */
  2. #[repr(C)]
  3. #[derive(Copy, Clone, Debug, Default, Eq, Hash, Ord, PartialEq, PartialOrd)]
  4. pub struct __BindgenBitfieldUnit<Storage> {
  5. storage: Storage,
  6. }
  7. impl<Storage> __BindgenBitfieldUnit<Storage> {
  8. #[inline]
  9. pub const fn new(storage: Storage) -> Self {
  10. Self { storage }
  11. }
  12. }
  13. impl<Storage> __BindgenBitfieldUnit<Storage>
  14. where
  15. Storage: AsRef<[u8]> + AsMut<[u8]>,
  16. {
  17. #[inline]
  18. fn extract_bit(byte: u8, index: usize) -> bool {
  19. let bit_index = if cfg!(target_endian = "big") {
  20. 7 - (index % 8)
  21. } else {
  22. index % 8
  23. };
  24. let mask = 1 << bit_index;
  25. byte & mask == mask
  26. }
  27. #[inline]
  28. pub fn get_bit(&self, index: usize) -> bool {
  29. debug_assert!(index / 8 < self.storage.as_ref().len());
  30. let byte_index = index / 8;
  31. let byte = self.storage.as_ref()[byte_index];
  32. Self::extract_bit(byte, index)
  33. }
  34. #[inline]
  35. pub unsafe fn raw_get_bit(this: *const Self, index: usize) -> bool {
  36. debug_assert!(index / 8 < core::mem::size_of::<Storage>());
  37. let byte_index = index / 8;
  38. let byte = *(core::ptr::addr_of!((*this).storage) as *const u8).offset(byte_index as isize);
  39. Self::extract_bit(byte, index)
  40. }
  41. #[inline]
  42. fn change_bit(byte: u8, index: usize, val: bool) -> u8 {
  43. let bit_index = if cfg!(target_endian = "big") {
  44. 7 - (index % 8)
  45. } else {
  46. index % 8
  47. };
  48. let mask = 1 << bit_index;
  49. if val {
  50. byte | mask
  51. } else {
  52. byte & !mask
  53. }
  54. }
  55. #[inline]
  56. pub fn set_bit(&mut self, index: usize, val: bool) {
  57. debug_assert!(index / 8 < self.storage.as_ref().len());
  58. let byte_index = index / 8;
  59. let byte = &mut self.storage.as_mut()[byte_index];
  60. *byte = Self::change_bit(*byte, index, val);
  61. }
  62. #[inline]
  63. pub unsafe fn raw_set_bit(this: *mut Self, index: usize, val: bool) {
  64. debug_assert!(index / 8 < core::mem::size_of::<Storage>());
  65. let byte_index = index / 8;
  66. let byte =
  67. (core::ptr::addr_of_mut!((*this).storage) as *mut u8).offset(byte_index as isize);
  68. *byte = Self::change_bit(*byte, index, val);
  69. }
  70. #[inline]
  71. pub fn get(&self, bit_offset: usize, bit_width: u8) -> u64 {
  72. debug_assert!(bit_width <= 64);
  73. debug_assert!(bit_offset / 8 < self.storage.as_ref().len());
  74. debug_assert!((bit_offset + (bit_width as usize)) / 8 <= self.storage.as_ref().len());
  75. let mut val = 0;
  76. for i in 0..(bit_width as usize) {
  77. if self.get_bit(i + bit_offset) {
  78. let index = if cfg!(target_endian = "big") {
  79. bit_width as usize - 1 - i
  80. } else {
  81. i
  82. };
  83. val |= 1 << index;
  84. }
  85. }
  86. val
  87. }
  88. #[inline]
  89. pub unsafe fn raw_get(this: *const Self, bit_offset: usize, bit_width: u8) -> u64 {
  90. debug_assert!(bit_width <= 64);
  91. debug_assert!(bit_offset / 8 < core::mem::size_of::<Storage>());
  92. debug_assert!((bit_offset + (bit_width as usize)) / 8 <= core::mem::size_of::<Storage>());
  93. let mut val = 0;
  94. for i in 0..(bit_width as usize) {
  95. if Self::raw_get_bit(this, i + bit_offset) {
  96. let index = if cfg!(target_endian = "big") {
  97. bit_width as usize - 1 - i
  98. } else {
  99. i
  100. };
  101. val |= 1 << index;
  102. }
  103. }
  104. val
  105. }
  106. #[inline]
  107. pub fn set(&mut self, bit_offset: usize, bit_width: u8, val: u64) {
  108. debug_assert!(bit_width <= 64);
  109. debug_assert!(bit_offset / 8 < self.storage.as_ref().len());
  110. debug_assert!((bit_offset + (bit_width as usize)) / 8 <= self.storage.as_ref().len());
  111. for i in 0..(bit_width as usize) {
  112. let mask = 1 << i;
  113. let val_bit_is_set = val & mask == mask;
  114. let index = if cfg!(target_endian = "big") {
  115. bit_width as usize - 1 - i
  116. } else {
  117. i
  118. };
  119. self.set_bit(index + bit_offset, val_bit_is_set);
  120. }
  121. }
  122. #[inline]
  123. pub unsafe fn raw_set(this: *mut Self, bit_offset: usize, bit_width: u8, val: u64) {
  124. debug_assert!(bit_width <= 64);
  125. debug_assert!(bit_offset / 8 < core::mem::size_of::<Storage>());
  126. debug_assert!((bit_offset + (bit_width as usize)) / 8 <= core::mem::size_of::<Storage>());
  127. for i in 0..(bit_width as usize) {
  128. let mask = 1 << i;
  129. let val_bit_is_set = val & mask == mask;
  130. let index = if cfg!(target_endian = "big") {
  131. bit_width as usize - 1 - i
  132. } else {
  133. i
  134. };
  135. Self::raw_set_bit(this, index + bit_offset, val_bit_is_set);
  136. }
  137. }
  138. }
  139. #[repr(C)]
  140. #[derive(Default)]
  141. pub struct __IncompleteArrayField<T>(::core::marker::PhantomData<T>, [T; 0]);
  142. impl<T> __IncompleteArrayField<T> {
  143. #[inline]
  144. pub const fn new() -> Self {
  145. __IncompleteArrayField(::core::marker::PhantomData, [])
  146. }
  147. #[inline]
  148. pub fn as_ptr(&self) -> *const T {
  149. self as *const _ as *const T
  150. }
  151. #[inline]
  152. pub fn as_mut_ptr(&mut self) -> *mut T {
  153. self as *mut _ as *mut T
  154. }
  155. #[inline]
  156. pub unsafe fn as_slice(&self, len: usize) -> &[T] {
  157. ::core::slice::from_raw_parts(self.as_ptr(), len)
  158. }
  159. #[inline]
  160. pub unsafe fn as_mut_slice(&mut self, len: usize) -> &mut [T] {
  161. ::core::slice::from_raw_parts_mut(self.as_mut_ptr(), len)
  162. }
  163. }
  164. impl<T> ::core::fmt::Debug for __IncompleteArrayField<T> {
  165. fn fmt(&self, fmt: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result {
  166. fmt.write_str("__IncompleteArrayField")
  167. }
  168. }
  169. pub const SO_ATTACH_BPF: u32 = 50;
  170. pub const SO_DETACH_BPF: u32 = 27;
  171. pub const BPF_LD: u32 = 0;
  172. pub const BPF_LDX: u32 = 1;
  173. pub const BPF_ST: u32 = 2;
  174. pub const BPF_STX: u32 = 3;
  175. pub const BPF_ALU: u32 = 4;
  176. pub const BPF_JMP: u32 = 5;
  177. pub const BPF_W: u32 = 0;
  178. pub const BPF_H: u32 = 8;
  179. pub const BPF_B: u32 = 16;
  180. pub const BPF_IMM: u32 = 0;
  181. pub const BPF_MEM: u32 = 96;
  182. pub const BPF_SUB: u32 = 16;
  183. pub const BPF_K: u32 = 0;
  184. pub const BPF_X: u32 = 8;
  185. pub const BPF_ALU64: u32 = 7;
  186. pub const BPF_DW: u32 = 24;
  187. pub const BPF_MOV: u32 = 176;
  188. pub const BPF_CALL: u32 = 128;
  189. pub const BPF_EXIT: u32 = 144;
  190. pub const BPF_F_ALLOW_OVERRIDE: u32 = 1;
  191. pub const BPF_F_ALLOW_MULTI: u32 = 2;
  192. pub const BPF_F_REPLACE: u32 = 4;
  193. pub const BPF_F_BEFORE: u32 = 8;
  194. pub const BPF_F_AFTER: u32 = 16;
  195. pub const BPF_F_ID: u32 = 32;
  196. pub const BPF_F_STRICT_ALIGNMENT: u32 = 1;
  197. pub const BPF_F_ANY_ALIGNMENT: u32 = 2;
  198. pub const BPF_F_TEST_RND_HI32: u32 = 4;
  199. pub const BPF_F_TEST_STATE_FREQ: u32 = 8;
  200. pub const BPF_F_SLEEPABLE: u32 = 16;
  201. pub const BPF_F_XDP_HAS_FRAGS: u32 = 32;
  202. pub const BPF_F_XDP_DEV_BOUND_ONLY: u32 = 64;
  203. pub const BPF_F_TEST_REG_INVARIANTS: u32 = 128;
  204. pub const BPF_F_NETFILTER_IP_DEFRAG: u32 = 1;
  205. pub const BPF_PSEUDO_MAP_FD: u32 = 1;
  206. pub const BPF_PSEUDO_MAP_IDX: u32 = 5;
  207. pub const BPF_PSEUDO_MAP_VALUE: u32 = 2;
  208. pub const BPF_PSEUDO_MAP_IDX_VALUE: u32 = 6;
  209. pub const BPF_PSEUDO_BTF_ID: u32 = 3;
  210. pub const BPF_PSEUDO_FUNC: u32 = 4;
  211. pub const BPF_PSEUDO_CALL: u32 = 1;
  212. pub const BPF_PSEUDO_KFUNC_CALL: u32 = 2;
  213. pub const BPF_F_QUERY_EFFECTIVE: u32 = 1;
  214. pub const BPF_F_TEST_RUN_ON_CPU: u32 = 1;
  215. pub const BPF_F_TEST_XDP_LIVE_FRAMES: u32 = 2;
  216. pub const BTF_INT_SIGNED: u32 = 1;
  217. pub const BTF_INT_CHAR: u32 = 2;
  218. pub const BTF_INT_BOOL: u32 = 4;
  219. pub const NLMSG_ALIGNTO: u32 = 4;
  220. pub const XDP_FLAGS_UPDATE_IF_NOEXIST: u32 = 1;
  221. pub const XDP_FLAGS_SKB_MODE: u32 = 2;
  222. pub const XDP_FLAGS_DRV_MODE: u32 = 4;
  223. pub const XDP_FLAGS_HW_MODE: u32 = 8;
  224. pub const XDP_FLAGS_REPLACE: u32 = 16;
  225. pub const XDP_FLAGS_MODES: u32 = 14;
  226. pub const XDP_FLAGS_MASK: u32 = 31;
  227. pub const PERF_EVENT_IOC_ENABLE: u32 = 9216;
  228. pub const PERF_EVENT_IOC_DISABLE: u32 = 9217;
  229. pub const PERF_EVENT_IOC_REFRESH: u32 = 9218;
  230. pub const PERF_EVENT_IOC_RESET: u32 = 9219;
  231. pub const PERF_EVENT_IOC_PERIOD: u32 = 1074275332;
  232. pub const PERF_EVENT_IOC_SET_OUTPUT: u32 = 9221;
  233. pub const PERF_EVENT_IOC_SET_FILTER: u32 = 1074013190;
  234. pub const PERF_EVENT_IOC_ID: u32 = 2147755015;
  235. pub const PERF_EVENT_IOC_SET_BPF: u32 = 1074013192;
  236. pub const PERF_EVENT_IOC_PAUSE_OUTPUT: u32 = 1074013193;
  237. pub const PERF_EVENT_IOC_QUERY_BPF: u32 = 3221496842;
  238. pub const PERF_EVENT_IOC_MODIFY_ATTRIBUTES: u32 = 1074013195;
  239. pub const PERF_MAX_STACK_DEPTH: u32 = 127;
  240. pub const PERF_MAX_CONTEXTS_PER_STACK: u32 = 8;
  241. pub const PERF_FLAG_FD_NO_GROUP: u32 = 1;
  242. pub const PERF_FLAG_FD_OUTPUT: u32 = 2;
  243. pub const PERF_FLAG_PID_CGROUP: u32 = 4;
  244. pub const PERF_FLAG_FD_CLOEXEC: u32 = 8;
  245. pub const TC_H_MAJ_MASK: u32 = 4294901760;
  246. pub const TC_H_MIN_MASK: u32 = 65535;
  247. pub const TC_H_UNSPEC: u32 = 0;
  248. pub const TC_H_ROOT: u32 = 4294967295;
  249. pub const TC_H_INGRESS: u32 = 4294967281;
  250. pub const TC_H_CLSACT: u32 = 4294967281;
  251. pub const TC_H_MIN_PRIORITY: u32 = 65504;
  252. pub const TC_H_MIN_INGRESS: u32 = 65522;
  253. pub const TC_H_MIN_EGRESS: u32 = 65523;
  254. pub const TCA_BPF_FLAG_ACT_DIRECT: u32 = 1;
  255. pub type __u8 = ::core::ffi::c_uchar;
  256. pub type __s16 = ::core::ffi::c_short;
  257. pub type __u16 = ::core::ffi::c_ushort;
  258. pub type __s32 = ::core::ffi::c_int;
  259. pub type __u32 = ::core::ffi::c_uint;
  260. pub type __s64 = ::core::ffi::c_longlong;
  261. pub type __u64 = ::core::ffi::c_ulonglong;
  262. #[repr(C)]
  263. #[derive(Debug, Copy, Clone)]
  264. pub struct bpf_insn {
  265. pub code: __u8,
  266. pub _bitfield_align_1: [u8; 0],
  267. pub _bitfield_1: __BindgenBitfieldUnit<[u8; 1usize]>,
  268. pub off: __s16,
  269. pub imm: __s32,
  270. }
  271. impl bpf_insn {
  272. #[inline]
  273. pub fn dst_reg(&self) -> __u8 {
  274. unsafe { ::core::mem::transmute(self._bitfield_1.get(0usize, 4u8) as u8) }
  275. }
  276. #[inline]
  277. pub fn set_dst_reg(&mut self, val: __u8) {
  278. unsafe {
  279. let val: u8 = ::core::mem::transmute(val);
  280. self._bitfield_1.set(0usize, 4u8, val as u64)
  281. }
  282. }
  283. #[inline]
  284. pub unsafe fn dst_reg_raw(this: *const Self) -> __u8 {
  285. unsafe {
  286. ::core::mem::transmute(<__BindgenBitfieldUnit<[u8; 1usize]>>::raw_get(
  287. ::core::ptr::addr_of!((*this)._bitfield_1),
  288. 0usize,
  289. 4u8,
  290. ) as u8)
  291. }
  292. }
  293. #[inline]
  294. pub unsafe fn set_dst_reg_raw(this: *mut Self, val: __u8) {
  295. unsafe {
  296. let val: u8 = ::core::mem::transmute(val);
  297. <__BindgenBitfieldUnit<[u8; 1usize]>>::raw_set(
  298. ::core::ptr::addr_of_mut!((*this)._bitfield_1),
  299. 0usize,
  300. 4u8,
  301. val as u64,
  302. )
  303. }
  304. }
  305. #[inline]
  306. pub fn src_reg(&self) -> __u8 {
  307. unsafe { ::core::mem::transmute(self._bitfield_1.get(4usize, 4u8) as u8) }
  308. }
  309. #[inline]
  310. pub fn set_src_reg(&mut self, val: __u8) {
  311. unsafe {
  312. let val: u8 = ::core::mem::transmute(val);
  313. self._bitfield_1.set(4usize, 4u8, val as u64)
  314. }
  315. }
  316. #[inline]
  317. pub unsafe fn src_reg_raw(this: *const Self) -> __u8 {
  318. unsafe {
  319. ::core::mem::transmute(<__BindgenBitfieldUnit<[u8; 1usize]>>::raw_get(
  320. ::core::ptr::addr_of!((*this)._bitfield_1),
  321. 4usize,
  322. 4u8,
  323. ) as u8)
  324. }
  325. }
  326. #[inline]
  327. pub unsafe fn set_src_reg_raw(this: *mut Self, val: __u8) {
  328. unsafe {
  329. let val: u8 = ::core::mem::transmute(val);
  330. <__BindgenBitfieldUnit<[u8; 1usize]>>::raw_set(
  331. ::core::ptr::addr_of_mut!((*this)._bitfield_1),
  332. 4usize,
  333. 4u8,
  334. val as u64,
  335. )
  336. }
  337. }
  338. #[inline]
  339. pub fn new_bitfield_1(dst_reg: __u8, src_reg: __u8) -> __BindgenBitfieldUnit<[u8; 1usize]> {
  340. let mut __bindgen_bitfield_unit: __BindgenBitfieldUnit<[u8; 1usize]> = Default::default();
  341. __bindgen_bitfield_unit.set(0usize, 4u8, {
  342. let dst_reg: u8 = unsafe { ::core::mem::transmute(dst_reg) };
  343. dst_reg as u64
  344. });
  345. __bindgen_bitfield_unit.set(4usize, 4u8, {
  346. let src_reg: u8 = unsafe { ::core::mem::transmute(src_reg) };
  347. src_reg as u64
  348. });
  349. __bindgen_bitfield_unit
  350. }
  351. }
  352. #[repr(C)]
  353. #[derive(Debug)]
  354. pub struct bpf_lpm_trie_key {
  355. pub prefixlen: __u32,
  356. pub data: __IncompleteArrayField<__u8>,
  357. }
  358. #[repr(u32)]
  359. #[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
  360. pub enum bpf_cgroup_iter_order {
  361. BPF_CGROUP_ITER_ORDER_UNSPEC = 0,
  362. BPF_CGROUP_ITER_SELF_ONLY = 1,
  363. BPF_CGROUP_ITER_DESCENDANTS_PRE = 2,
  364. BPF_CGROUP_ITER_DESCENDANTS_POST = 3,
  365. BPF_CGROUP_ITER_ANCESTORS_UP = 4,
  366. }
  367. impl bpf_cmd {
  368. pub const BPF_PROG_RUN: bpf_cmd = bpf_cmd::BPF_PROG_TEST_RUN;
  369. }
  370. #[repr(u32)]
  371. #[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
  372. pub enum bpf_cmd {
  373. BPF_MAP_CREATE = 0,
  374. BPF_MAP_LOOKUP_ELEM = 1,
  375. BPF_MAP_UPDATE_ELEM = 2,
  376. BPF_MAP_DELETE_ELEM = 3,
  377. BPF_MAP_GET_NEXT_KEY = 4,
  378. BPF_PROG_LOAD = 5,
  379. BPF_OBJ_PIN = 6,
  380. BPF_OBJ_GET = 7,
  381. BPF_PROG_ATTACH = 8,
  382. BPF_PROG_DETACH = 9,
  383. BPF_PROG_TEST_RUN = 10,
  384. BPF_PROG_GET_NEXT_ID = 11,
  385. BPF_MAP_GET_NEXT_ID = 12,
  386. BPF_PROG_GET_FD_BY_ID = 13,
  387. BPF_MAP_GET_FD_BY_ID = 14,
  388. BPF_OBJ_GET_INFO_BY_FD = 15,
  389. BPF_PROG_QUERY = 16,
  390. BPF_RAW_TRACEPOINT_OPEN = 17,
  391. BPF_BTF_LOAD = 18,
  392. BPF_BTF_GET_FD_BY_ID = 19,
  393. BPF_TASK_FD_QUERY = 20,
  394. BPF_MAP_LOOKUP_AND_DELETE_ELEM = 21,
  395. BPF_MAP_FREEZE = 22,
  396. BPF_BTF_GET_NEXT_ID = 23,
  397. BPF_MAP_LOOKUP_BATCH = 24,
  398. BPF_MAP_LOOKUP_AND_DELETE_BATCH = 25,
  399. BPF_MAP_UPDATE_BATCH = 26,
  400. BPF_MAP_DELETE_BATCH = 27,
  401. BPF_LINK_CREATE = 28,
  402. BPF_LINK_UPDATE = 29,
  403. BPF_LINK_GET_FD_BY_ID = 30,
  404. BPF_LINK_GET_NEXT_ID = 31,
  405. BPF_ENABLE_STATS = 32,
  406. BPF_ITER_CREATE = 33,
  407. BPF_LINK_DETACH = 34,
  408. BPF_PROG_BIND_MAP = 35,
  409. BPF_TOKEN_CREATE = 36,
  410. __MAX_BPF_CMD = 37,
  411. }
  412. impl bpf_map_type {
  413. pub const BPF_MAP_TYPE_CGROUP_STORAGE: bpf_map_type =
  414. bpf_map_type::BPF_MAP_TYPE_CGROUP_STORAGE_DEPRECATED;
  415. }
  416. impl bpf_map_type {
  417. pub const BPF_MAP_TYPE_PERCPU_CGROUP_STORAGE: bpf_map_type =
  418. bpf_map_type::BPF_MAP_TYPE_PERCPU_CGROUP_STORAGE_DEPRECATED;
  419. }
  420. #[repr(u32)]
  421. #[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
  422. pub enum bpf_map_type {
  423. BPF_MAP_TYPE_UNSPEC = 0,
  424. BPF_MAP_TYPE_HASH = 1,
  425. BPF_MAP_TYPE_ARRAY = 2,
  426. BPF_MAP_TYPE_PROG_ARRAY = 3,
  427. BPF_MAP_TYPE_PERF_EVENT_ARRAY = 4,
  428. BPF_MAP_TYPE_PERCPU_HASH = 5,
  429. BPF_MAP_TYPE_PERCPU_ARRAY = 6,
  430. BPF_MAP_TYPE_STACK_TRACE = 7,
  431. BPF_MAP_TYPE_CGROUP_ARRAY = 8,
  432. BPF_MAP_TYPE_LRU_HASH = 9,
  433. BPF_MAP_TYPE_LRU_PERCPU_HASH = 10,
  434. BPF_MAP_TYPE_LPM_TRIE = 11,
  435. BPF_MAP_TYPE_ARRAY_OF_MAPS = 12,
  436. BPF_MAP_TYPE_HASH_OF_MAPS = 13,
  437. BPF_MAP_TYPE_DEVMAP = 14,
  438. BPF_MAP_TYPE_SOCKMAP = 15,
  439. BPF_MAP_TYPE_CPUMAP = 16,
  440. BPF_MAP_TYPE_XSKMAP = 17,
  441. BPF_MAP_TYPE_SOCKHASH = 18,
  442. BPF_MAP_TYPE_CGROUP_STORAGE_DEPRECATED = 19,
  443. BPF_MAP_TYPE_REUSEPORT_SOCKARRAY = 20,
  444. BPF_MAP_TYPE_PERCPU_CGROUP_STORAGE_DEPRECATED = 21,
  445. BPF_MAP_TYPE_QUEUE = 22,
  446. BPF_MAP_TYPE_STACK = 23,
  447. BPF_MAP_TYPE_SK_STORAGE = 24,
  448. BPF_MAP_TYPE_DEVMAP_HASH = 25,
  449. BPF_MAP_TYPE_STRUCT_OPS = 26,
  450. BPF_MAP_TYPE_RINGBUF = 27,
  451. BPF_MAP_TYPE_INODE_STORAGE = 28,
  452. BPF_MAP_TYPE_TASK_STORAGE = 29,
  453. BPF_MAP_TYPE_BLOOM_FILTER = 30,
  454. BPF_MAP_TYPE_USER_RINGBUF = 31,
  455. BPF_MAP_TYPE_CGRP_STORAGE = 32,
  456. BPF_MAP_TYPE_ARENA = 33,
  457. __MAX_BPF_MAP_TYPE = 34,
  458. }
  459. #[repr(u32)]
  460. #[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
  461. pub enum bpf_prog_type {
  462. BPF_PROG_TYPE_UNSPEC = 0,
  463. BPF_PROG_TYPE_SOCKET_FILTER = 1,
  464. BPF_PROG_TYPE_KPROBE = 2,
  465. BPF_PROG_TYPE_SCHED_CLS = 3,
  466. BPF_PROG_TYPE_SCHED_ACT = 4,
  467. BPF_PROG_TYPE_TRACEPOINT = 5,
  468. BPF_PROG_TYPE_XDP = 6,
  469. BPF_PROG_TYPE_PERF_EVENT = 7,
  470. BPF_PROG_TYPE_CGROUP_SKB = 8,
  471. BPF_PROG_TYPE_CGROUP_SOCK = 9,
  472. BPF_PROG_TYPE_LWT_IN = 10,
  473. BPF_PROG_TYPE_LWT_OUT = 11,
  474. BPF_PROG_TYPE_LWT_XMIT = 12,
  475. BPF_PROG_TYPE_SOCK_OPS = 13,
  476. BPF_PROG_TYPE_SK_SKB = 14,
  477. BPF_PROG_TYPE_CGROUP_DEVICE = 15,
  478. BPF_PROG_TYPE_SK_MSG = 16,
  479. BPF_PROG_TYPE_RAW_TRACEPOINT = 17,
  480. BPF_PROG_TYPE_CGROUP_SOCK_ADDR = 18,
  481. BPF_PROG_TYPE_LWT_SEG6LOCAL = 19,
  482. BPF_PROG_TYPE_LIRC_MODE2 = 20,
  483. BPF_PROG_TYPE_SK_REUSEPORT = 21,
  484. BPF_PROG_TYPE_FLOW_DISSECTOR = 22,
  485. BPF_PROG_TYPE_CGROUP_SYSCTL = 23,
  486. BPF_PROG_TYPE_RAW_TRACEPOINT_WRITABLE = 24,
  487. BPF_PROG_TYPE_CGROUP_SOCKOPT = 25,
  488. BPF_PROG_TYPE_TRACING = 26,
  489. BPF_PROG_TYPE_STRUCT_OPS = 27,
  490. BPF_PROG_TYPE_EXT = 28,
  491. BPF_PROG_TYPE_LSM = 29,
  492. BPF_PROG_TYPE_SK_LOOKUP = 30,
  493. BPF_PROG_TYPE_SYSCALL = 31,
  494. BPF_PROG_TYPE_NETFILTER = 32,
  495. __MAX_BPF_PROG_TYPE = 33,
  496. }
  497. #[repr(u32)]
  498. #[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
  499. pub enum bpf_attach_type {
  500. BPF_CGROUP_INET_INGRESS = 0,
  501. BPF_CGROUP_INET_EGRESS = 1,
  502. BPF_CGROUP_INET_SOCK_CREATE = 2,
  503. BPF_CGROUP_SOCK_OPS = 3,
  504. BPF_SK_SKB_STREAM_PARSER = 4,
  505. BPF_SK_SKB_STREAM_VERDICT = 5,
  506. BPF_CGROUP_DEVICE = 6,
  507. BPF_SK_MSG_VERDICT = 7,
  508. BPF_CGROUP_INET4_BIND = 8,
  509. BPF_CGROUP_INET6_BIND = 9,
  510. BPF_CGROUP_INET4_CONNECT = 10,
  511. BPF_CGROUP_INET6_CONNECT = 11,
  512. BPF_CGROUP_INET4_POST_BIND = 12,
  513. BPF_CGROUP_INET6_POST_BIND = 13,
  514. BPF_CGROUP_UDP4_SENDMSG = 14,
  515. BPF_CGROUP_UDP6_SENDMSG = 15,
  516. BPF_LIRC_MODE2 = 16,
  517. BPF_FLOW_DISSECTOR = 17,
  518. BPF_CGROUP_SYSCTL = 18,
  519. BPF_CGROUP_UDP4_RECVMSG = 19,
  520. BPF_CGROUP_UDP6_RECVMSG = 20,
  521. BPF_CGROUP_GETSOCKOPT = 21,
  522. BPF_CGROUP_SETSOCKOPT = 22,
  523. BPF_TRACE_RAW_TP = 23,
  524. BPF_TRACE_FENTRY = 24,
  525. BPF_TRACE_FEXIT = 25,
  526. BPF_MODIFY_RETURN = 26,
  527. BPF_LSM_MAC = 27,
  528. BPF_TRACE_ITER = 28,
  529. BPF_CGROUP_INET4_GETPEERNAME = 29,
  530. BPF_CGROUP_INET6_GETPEERNAME = 30,
  531. BPF_CGROUP_INET4_GETSOCKNAME = 31,
  532. BPF_CGROUP_INET6_GETSOCKNAME = 32,
  533. BPF_XDP_DEVMAP = 33,
  534. BPF_CGROUP_INET_SOCK_RELEASE = 34,
  535. BPF_XDP_CPUMAP = 35,
  536. BPF_SK_LOOKUP = 36,
  537. BPF_XDP = 37,
  538. BPF_SK_SKB_VERDICT = 38,
  539. BPF_SK_REUSEPORT_SELECT = 39,
  540. BPF_SK_REUSEPORT_SELECT_OR_MIGRATE = 40,
  541. BPF_PERF_EVENT = 41,
  542. BPF_TRACE_KPROBE_MULTI = 42,
  543. BPF_LSM_CGROUP = 43,
  544. BPF_STRUCT_OPS = 44,
  545. BPF_NETFILTER = 45,
  546. BPF_TCX_INGRESS = 46,
  547. BPF_TCX_EGRESS = 47,
  548. BPF_TRACE_UPROBE_MULTI = 48,
  549. BPF_CGROUP_UNIX_CONNECT = 49,
  550. BPF_CGROUP_UNIX_SENDMSG = 50,
  551. BPF_CGROUP_UNIX_RECVMSG = 51,
  552. BPF_CGROUP_UNIX_GETPEERNAME = 52,
  553. BPF_CGROUP_UNIX_GETSOCKNAME = 53,
  554. BPF_NETKIT_PRIMARY = 54,
  555. BPF_NETKIT_PEER = 55,
  556. __MAX_BPF_ATTACH_TYPE = 56,
  557. }
  558. #[repr(u32)]
  559. #[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
  560. pub enum bpf_link_type {
  561. BPF_LINK_TYPE_UNSPEC = 0,
  562. BPF_LINK_TYPE_RAW_TRACEPOINT = 1,
  563. BPF_LINK_TYPE_TRACING = 2,
  564. BPF_LINK_TYPE_CGROUP = 3,
  565. BPF_LINK_TYPE_ITER = 4,
  566. BPF_LINK_TYPE_NETNS = 5,
  567. BPF_LINK_TYPE_XDP = 6,
  568. BPF_LINK_TYPE_PERF_EVENT = 7,
  569. BPF_LINK_TYPE_KPROBE_MULTI = 8,
  570. BPF_LINK_TYPE_STRUCT_OPS = 9,
  571. BPF_LINK_TYPE_NETFILTER = 10,
  572. BPF_LINK_TYPE_TCX = 11,
  573. BPF_LINK_TYPE_UPROBE_MULTI = 12,
  574. BPF_LINK_TYPE_NETKIT = 13,
  575. __MAX_BPF_LINK_TYPE = 14,
  576. }
  577. #[repr(u32)]
  578. #[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
  579. pub enum bpf_perf_event_type {
  580. BPF_PERF_EVENT_UNSPEC = 0,
  581. BPF_PERF_EVENT_UPROBE = 1,
  582. BPF_PERF_EVENT_URETPROBE = 2,
  583. BPF_PERF_EVENT_KPROBE = 3,
  584. BPF_PERF_EVENT_KRETPROBE = 4,
  585. BPF_PERF_EVENT_TRACEPOINT = 5,
  586. BPF_PERF_EVENT_EVENT = 6,
  587. }
  588. pub const BPF_F_KPROBE_MULTI_RETURN: _bindgen_ty_2 = 1;
  589. pub type _bindgen_ty_2 = ::core::ffi::c_uint;
  590. pub const BPF_F_UPROBE_MULTI_RETURN: _bindgen_ty_3 = 1;
  591. pub type _bindgen_ty_3 = ::core::ffi::c_uint;
  592. pub const BPF_ANY: _bindgen_ty_4 = 0;
  593. pub const BPF_NOEXIST: _bindgen_ty_4 = 1;
  594. pub const BPF_EXIST: _bindgen_ty_4 = 2;
  595. pub const BPF_F_LOCK: _bindgen_ty_4 = 4;
  596. pub type _bindgen_ty_4 = ::core::ffi::c_uint;
  597. pub const BPF_F_NO_PREALLOC: _bindgen_ty_5 = 1;
  598. pub const BPF_F_NO_COMMON_LRU: _bindgen_ty_5 = 2;
  599. pub const BPF_F_NUMA_NODE: _bindgen_ty_5 = 4;
  600. pub const BPF_F_RDONLY: _bindgen_ty_5 = 8;
  601. pub const BPF_F_WRONLY: _bindgen_ty_5 = 16;
  602. pub const BPF_F_STACK_BUILD_ID: _bindgen_ty_5 = 32;
  603. pub const BPF_F_ZERO_SEED: _bindgen_ty_5 = 64;
  604. pub const BPF_F_RDONLY_PROG: _bindgen_ty_5 = 128;
  605. pub const BPF_F_WRONLY_PROG: _bindgen_ty_5 = 256;
  606. pub const BPF_F_CLONE: _bindgen_ty_5 = 512;
  607. pub const BPF_F_MMAPABLE: _bindgen_ty_5 = 1024;
  608. pub const BPF_F_PRESERVE_ELEMS: _bindgen_ty_5 = 2048;
  609. pub const BPF_F_INNER_MAP: _bindgen_ty_5 = 4096;
  610. pub const BPF_F_LINK: _bindgen_ty_5 = 8192;
  611. pub const BPF_F_PATH_FD: _bindgen_ty_5 = 16384;
  612. pub const BPF_F_VTYPE_BTF_OBJ_FD: _bindgen_ty_5 = 32768;
  613. pub const BPF_F_TOKEN_FD: _bindgen_ty_5 = 65536;
  614. pub const BPF_F_SEGV_ON_FAULT: _bindgen_ty_5 = 131072;
  615. pub const BPF_F_NO_USER_CONV: _bindgen_ty_5 = 262144;
  616. pub type _bindgen_ty_5 = ::core::ffi::c_uint;
  617. #[repr(u32)]
  618. #[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
  619. pub enum bpf_stats_type {
  620. BPF_STATS_RUN_TIME = 0,
  621. }
  622. #[repr(C)]
  623. #[derive(Copy, Clone)]
  624. pub union bpf_attr {
  625. pub __bindgen_anon_1: bpf_attr__bindgen_ty_1,
  626. pub __bindgen_anon_2: bpf_attr__bindgen_ty_2,
  627. pub batch: bpf_attr__bindgen_ty_3,
  628. pub __bindgen_anon_3: bpf_attr__bindgen_ty_4,
  629. pub __bindgen_anon_4: bpf_attr__bindgen_ty_5,
  630. pub __bindgen_anon_5: bpf_attr__bindgen_ty_6,
  631. pub test: bpf_attr__bindgen_ty_7,
  632. pub __bindgen_anon_6: bpf_attr__bindgen_ty_8,
  633. pub info: bpf_attr__bindgen_ty_9,
  634. pub query: bpf_attr__bindgen_ty_10,
  635. pub raw_tracepoint: bpf_attr__bindgen_ty_11,
  636. pub __bindgen_anon_7: bpf_attr__bindgen_ty_12,
  637. pub task_fd_query: bpf_attr__bindgen_ty_13,
  638. pub link_create: bpf_attr__bindgen_ty_14,
  639. pub link_update: bpf_attr__bindgen_ty_15,
  640. pub link_detach: bpf_attr__bindgen_ty_16,
  641. pub enable_stats: bpf_attr__bindgen_ty_17,
  642. pub iter_create: bpf_attr__bindgen_ty_18,
  643. pub prog_bind_map: bpf_attr__bindgen_ty_19,
  644. pub token_create: bpf_attr__bindgen_ty_20,
  645. }
  646. #[repr(C)]
  647. #[derive(Debug, Copy, Clone)]
  648. pub struct bpf_attr__bindgen_ty_1 {
  649. pub map_type: __u32,
  650. pub key_size: __u32,
  651. pub value_size: __u32,
  652. pub max_entries: __u32,
  653. pub map_flags: __u32,
  654. pub inner_map_fd: __u32,
  655. pub numa_node: __u32,
  656. pub map_name: [::core::ffi::c_char; 16usize],
  657. pub map_ifindex: __u32,
  658. pub btf_fd: __u32,
  659. pub btf_key_type_id: __u32,
  660. pub btf_value_type_id: __u32,
  661. pub btf_vmlinux_value_type_id: __u32,
  662. pub map_extra: __u64,
  663. pub value_type_btf_obj_fd: __s32,
  664. pub map_token_fd: __s32,
  665. }
  666. #[repr(C)]
  667. #[derive(Copy, Clone)]
  668. pub struct bpf_attr__bindgen_ty_2 {
  669. pub map_fd: __u32,
  670. pub key: __u64,
  671. pub __bindgen_anon_1: bpf_attr__bindgen_ty_2__bindgen_ty_1,
  672. pub flags: __u64,
  673. }
  674. #[repr(C)]
  675. #[derive(Copy, Clone)]
  676. pub union bpf_attr__bindgen_ty_2__bindgen_ty_1 {
  677. pub value: __u64,
  678. pub next_key: __u64,
  679. }
  680. #[repr(C)]
  681. #[derive(Debug, Copy, Clone)]
  682. pub struct bpf_attr__bindgen_ty_3 {
  683. pub in_batch: __u64,
  684. pub out_batch: __u64,
  685. pub keys: __u64,
  686. pub values: __u64,
  687. pub count: __u32,
  688. pub map_fd: __u32,
  689. pub elem_flags: __u64,
  690. pub flags: __u64,
  691. }
  692. #[repr(C)]
  693. #[derive(Copy, Clone)]
  694. pub struct bpf_attr__bindgen_ty_4 {
  695. pub prog_type: __u32,
  696. pub insn_cnt: __u32,
  697. pub insns: __u64,
  698. pub license: __u64,
  699. pub log_level: __u32,
  700. pub log_size: __u32,
  701. pub log_buf: __u64,
  702. pub kern_version: __u32,
  703. pub prog_flags: __u32,
  704. pub prog_name: [::core::ffi::c_char; 16usize],
  705. pub prog_ifindex: __u32,
  706. pub expected_attach_type: __u32,
  707. pub prog_btf_fd: __u32,
  708. pub func_info_rec_size: __u32,
  709. pub func_info: __u64,
  710. pub func_info_cnt: __u32,
  711. pub line_info_rec_size: __u32,
  712. pub line_info: __u64,
  713. pub line_info_cnt: __u32,
  714. pub attach_btf_id: __u32,
  715. pub __bindgen_anon_1: bpf_attr__bindgen_ty_4__bindgen_ty_1,
  716. pub core_relo_cnt: __u32,
  717. pub fd_array: __u64,
  718. pub core_relos: __u64,
  719. pub core_relo_rec_size: __u32,
  720. pub log_true_size: __u32,
  721. pub prog_token_fd: __s32,
  722. }
  723. #[repr(C)]
  724. #[derive(Copy, Clone)]
  725. pub union bpf_attr__bindgen_ty_4__bindgen_ty_1 {
  726. pub attach_prog_fd: __u32,
  727. pub attach_btf_obj_fd: __u32,
  728. }
  729. #[repr(C)]
  730. #[derive(Debug, Copy, Clone)]
  731. pub struct bpf_attr__bindgen_ty_5 {
  732. pub pathname: __u64,
  733. pub bpf_fd: __u32,
  734. pub file_flags: __u32,
  735. pub path_fd: __s32,
  736. }
  737. #[repr(C)]
  738. #[derive(Copy, Clone)]
  739. pub struct bpf_attr__bindgen_ty_6 {
  740. pub __bindgen_anon_1: bpf_attr__bindgen_ty_6__bindgen_ty_1,
  741. pub attach_bpf_fd: __u32,
  742. pub attach_type: __u32,
  743. pub attach_flags: __u32,
  744. pub replace_bpf_fd: __u32,
  745. pub __bindgen_anon_2: bpf_attr__bindgen_ty_6__bindgen_ty_2,
  746. pub expected_revision: __u64,
  747. }
  748. #[repr(C)]
  749. #[derive(Copy, Clone)]
  750. pub union bpf_attr__bindgen_ty_6__bindgen_ty_1 {
  751. pub target_fd: __u32,
  752. pub target_ifindex: __u32,
  753. }
  754. #[repr(C)]
  755. #[derive(Copy, Clone)]
  756. pub union bpf_attr__bindgen_ty_6__bindgen_ty_2 {
  757. pub relative_fd: __u32,
  758. pub relative_id: __u32,
  759. }
  760. #[repr(C)]
  761. #[derive(Debug, Copy, Clone)]
  762. pub struct bpf_attr__bindgen_ty_7 {
  763. pub prog_fd: __u32,
  764. pub retval: __u32,
  765. pub data_size_in: __u32,
  766. pub data_size_out: __u32,
  767. pub data_in: __u64,
  768. pub data_out: __u64,
  769. pub repeat: __u32,
  770. pub duration: __u32,
  771. pub ctx_size_in: __u32,
  772. pub ctx_size_out: __u32,
  773. pub ctx_in: __u64,
  774. pub ctx_out: __u64,
  775. pub flags: __u32,
  776. pub cpu: __u32,
  777. pub batch_size: __u32,
  778. }
  779. #[repr(C)]
  780. #[derive(Copy, Clone)]
  781. pub struct bpf_attr__bindgen_ty_8 {
  782. pub __bindgen_anon_1: bpf_attr__bindgen_ty_8__bindgen_ty_1,
  783. pub next_id: __u32,
  784. pub open_flags: __u32,
  785. }
  786. #[repr(C)]
  787. #[derive(Copy, Clone)]
  788. pub union bpf_attr__bindgen_ty_8__bindgen_ty_1 {
  789. pub start_id: __u32,
  790. pub prog_id: __u32,
  791. pub map_id: __u32,
  792. pub btf_id: __u32,
  793. pub link_id: __u32,
  794. }
  795. #[repr(C)]
  796. #[derive(Debug, Copy, Clone)]
  797. pub struct bpf_attr__bindgen_ty_9 {
  798. pub bpf_fd: __u32,
  799. pub info_len: __u32,
  800. pub info: __u64,
  801. }
  802. #[repr(C)]
  803. #[derive(Copy, Clone)]
  804. pub struct bpf_attr__bindgen_ty_10 {
  805. pub __bindgen_anon_1: bpf_attr__bindgen_ty_10__bindgen_ty_1,
  806. pub attach_type: __u32,
  807. pub query_flags: __u32,
  808. pub attach_flags: __u32,
  809. pub prog_ids: __u64,
  810. pub __bindgen_anon_2: bpf_attr__bindgen_ty_10__bindgen_ty_2,
  811. pub _bitfield_align_1: [u8; 0],
  812. pub _bitfield_1: __BindgenBitfieldUnit<[u8; 4usize]>,
  813. pub prog_attach_flags: __u64,
  814. pub link_ids: __u64,
  815. pub link_attach_flags: __u64,
  816. pub revision: __u64,
  817. }
  818. #[repr(C)]
  819. #[derive(Copy, Clone)]
  820. pub union bpf_attr__bindgen_ty_10__bindgen_ty_1 {
  821. pub target_fd: __u32,
  822. pub target_ifindex: __u32,
  823. }
  824. #[repr(C)]
  825. #[derive(Copy, Clone)]
  826. pub union bpf_attr__bindgen_ty_10__bindgen_ty_2 {
  827. pub prog_cnt: __u32,
  828. pub count: __u32,
  829. }
  830. impl bpf_attr__bindgen_ty_10 {
  831. #[inline]
  832. pub fn new_bitfield_1() -> __BindgenBitfieldUnit<[u8; 4usize]> {
  833. let mut __bindgen_bitfield_unit: __BindgenBitfieldUnit<[u8; 4usize]> = Default::default();
  834. __bindgen_bitfield_unit
  835. }
  836. }
  837. #[repr(C)]
  838. #[derive(Debug, Copy, Clone)]
  839. pub struct bpf_attr__bindgen_ty_11 {
  840. pub name: __u64,
  841. pub prog_fd: __u32,
  842. pub _bitfield_align_1: [u8; 0],
  843. pub _bitfield_1: __BindgenBitfieldUnit<[u8; 4usize]>,
  844. pub cookie: __u64,
  845. }
  846. impl bpf_attr__bindgen_ty_11 {
  847. #[inline]
  848. pub fn new_bitfield_1() -> __BindgenBitfieldUnit<[u8; 4usize]> {
  849. let mut __bindgen_bitfield_unit: __BindgenBitfieldUnit<[u8; 4usize]> = Default::default();
  850. __bindgen_bitfield_unit
  851. }
  852. }
  853. #[repr(C)]
  854. #[derive(Debug, Copy, Clone)]
  855. pub struct bpf_attr__bindgen_ty_12 {
  856. pub btf: __u64,
  857. pub btf_log_buf: __u64,
  858. pub btf_size: __u32,
  859. pub btf_log_size: __u32,
  860. pub btf_log_level: __u32,
  861. pub btf_log_true_size: __u32,
  862. pub btf_flags: __u32,
  863. pub btf_token_fd: __s32,
  864. }
  865. #[repr(C)]
  866. #[derive(Debug, Copy, Clone)]
  867. pub struct bpf_attr__bindgen_ty_13 {
  868. pub pid: __u32,
  869. pub fd: __u32,
  870. pub flags: __u32,
  871. pub buf_len: __u32,
  872. pub buf: __u64,
  873. pub prog_id: __u32,
  874. pub fd_type: __u32,
  875. pub probe_offset: __u64,
  876. pub probe_addr: __u64,
  877. }
  878. #[repr(C)]
  879. #[derive(Copy, Clone)]
  880. pub struct bpf_attr__bindgen_ty_14 {
  881. pub __bindgen_anon_1: bpf_attr__bindgen_ty_14__bindgen_ty_1,
  882. pub __bindgen_anon_2: bpf_attr__bindgen_ty_14__bindgen_ty_2,
  883. pub attach_type: __u32,
  884. pub flags: __u32,
  885. pub __bindgen_anon_3: bpf_attr__bindgen_ty_14__bindgen_ty_3,
  886. }
  887. #[repr(C)]
  888. #[derive(Copy, Clone)]
  889. pub union bpf_attr__bindgen_ty_14__bindgen_ty_1 {
  890. pub prog_fd: __u32,
  891. pub map_fd: __u32,
  892. }
  893. #[repr(C)]
  894. #[derive(Copy, Clone)]
  895. pub union bpf_attr__bindgen_ty_14__bindgen_ty_2 {
  896. pub target_fd: __u32,
  897. pub target_ifindex: __u32,
  898. }
  899. #[repr(C)]
  900. #[derive(Copy, Clone)]
  901. pub union bpf_attr__bindgen_ty_14__bindgen_ty_3 {
  902. pub target_btf_id: __u32,
  903. pub __bindgen_anon_1: bpf_attr__bindgen_ty_14__bindgen_ty_3__bindgen_ty_1,
  904. pub perf_event: bpf_attr__bindgen_ty_14__bindgen_ty_3__bindgen_ty_2,
  905. pub kprobe_multi: bpf_attr__bindgen_ty_14__bindgen_ty_3__bindgen_ty_3,
  906. pub tracing: bpf_attr__bindgen_ty_14__bindgen_ty_3__bindgen_ty_4,
  907. pub netfilter: bpf_attr__bindgen_ty_14__bindgen_ty_3__bindgen_ty_5,
  908. pub tcx: bpf_attr__bindgen_ty_14__bindgen_ty_3__bindgen_ty_6,
  909. pub uprobe_multi: bpf_attr__bindgen_ty_14__bindgen_ty_3__bindgen_ty_7,
  910. pub netkit: bpf_attr__bindgen_ty_14__bindgen_ty_3__bindgen_ty_8,
  911. }
  912. #[repr(C)]
  913. #[derive(Debug, Copy, Clone)]
  914. pub struct bpf_attr__bindgen_ty_14__bindgen_ty_3__bindgen_ty_1 {
  915. pub iter_info: __u64,
  916. pub iter_info_len: __u32,
  917. }
  918. #[repr(C)]
  919. #[derive(Debug, Copy, Clone)]
  920. pub struct bpf_attr__bindgen_ty_14__bindgen_ty_3__bindgen_ty_2 {
  921. pub bpf_cookie: __u64,
  922. }
  923. #[repr(C)]
  924. #[derive(Debug, Copy, Clone)]
  925. pub struct bpf_attr__bindgen_ty_14__bindgen_ty_3__bindgen_ty_3 {
  926. pub flags: __u32,
  927. pub cnt: __u32,
  928. pub syms: __u64,
  929. pub addrs: __u64,
  930. pub cookies: __u64,
  931. }
  932. #[repr(C)]
  933. #[derive(Debug, Copy, Clone)]
  934. pub struct bpf_attr__bindgen_ty_14__bindgen_ty_3__bindgen_ty_4 {
  935. pub target_btf_id: __u32,
  936. pub cookie: __u64,
  937. }
  938. #[repr(C)]
  939. #[derive(Debug, Copy, Clone)]
  940. pub struct bpf_attr__bindgen_ty_14__bindgen_ty_3__bindgen_ty_5 {
  941. pub pf: __u32,
  942. pub hooknum: __u32,
  943. pub priority: __s32,
  944. pub flags: __u32,
  945. }
  946. #[repr(C)]
  947. #[derive(Copy, Clone)]
  948. pub struct bpf_attr__bindgen_ty_14__bindgen_ty_3__bindgen_ty_6 {
  949. pub __bindgen_anon_1: bpf_attr__bindgen_ty_14__bindgen_ty_3__bindgen_ty_6__bindgen_ty_1,
  950. pub expected_revision: __u64,
  951. }
  952. #[repr(C)]
  953. #[derive(Copy, Clone)]
  954. pub union bpf_attr__bindgen_ty_14__bindgen_ty_3__bindgen_ty_6__bindgen_ty_1 {
  955. pub relative_fd: __u32,
  956. pub relative_id: __u32,
  957. }
  958. #[repr(C)]
  959. #[derive(Debug, Copy, Clone)]
  960. pub struct bpf_attr__bindgen_ty_14__bindgen_ty_3__bindgen_ty_7 {
  961. pub path: __u64,
  962. pub offsets: __u64,
  963. pub ref_ctr_offsets: __u64,
  964. pub cookies: __u64,
  965. pub cnt: __u32,
  966. pub flags: __u32,
  967. pub pid: __u32,
  968. }
  969. #[repr(C)]
  970. #[derive(Copy, Clone)]
  971. pub struct bpf_attr__bindgen_ty_14__bindgen_ty_3__bindgen_ty_8 {
  972. pub __bindgen_anon_1: bpf_attr__bindgen_ty_14__bindgen_ty_3__bindgen_ty_8__bindgen_ty_1,
  973. pub expected_revision: __u64,
  974. }
  975. #[repr(C)]
  976. #[derive(Copy, Clone)]
  977. pub union bpf_attr__bindgen_ty_14__bindgen_ty_3__bindgen_ty_8__bindgen_ty_1 {
  978. pub relative_fd: __u32,
  979. pub relative_id: __u32,
  980. }
  981. #[repr(C)]
  982. #[derive(Copy, Clone)]
  983. pub struct bpf_attr__bindgen_ty_15 {
  984. pub link_fd: __u32,
  985. pub __bindgen_anon_1: bpf_attr__bindgen_ty_15__bindgen_ty_1,
  986. pub flags: __u32,
  987. pub __bindgen_anon_2: bpf_attr__bindgen_ty_15__bindgen_ty_2,
  988. }
  989. #[repr(C)]
  990. #[derive(Copy, Clone)]
  991. pub union bpf_attr__bindgen_ty_15__bindgen_ty_1 {
  992. pub new_prog_fd: __u32,
  993. pub new_map_fd: __u32,
  994. }
  995. #[repr(C)]
  996. #[derive(Copy, Clone)]
  997. pub union bpf_attr__bindgen_ty_15__bindgen_ty_2 {
  998. pub old_prog_fd: __u32,
  999. pub old_map_fd: __u32,
  1000. }
  1001. #[repr(C)]
  1002. #[derive(Debug, Copy, Clone)]
  1003. pub struct bpf_attr__bindgen_ty_16 {
  1004. pub link_fd: __u32,
  1005. }
  1006. #[repr(C)]
  1007. #[derive(Debug, Copy, Clone)]
  1008. pub struct bpf_attr__bindgen_ty_17 {
  1009. pub type_: __u32,
  1010. }
  1011. #[repr(C)]
  1012. #[derive(Debug, Copy, Clone)]
  1013. pub struct bpf_attr__bindgen_ty_18 {
  1014. pub link_fd: __u32,
  1015. pub flags: __u32,
  1016. }
  1017. #[repr(C)]
  1018. #[derive(Debug, Copy, Clone)]
  1019. pub struct bpf_attr__bindgen_ty_19 {
  1020. pub prog_fd: __u32,
  1021. pub map_fd: __u32,
  1022. pub flags: __u32,
  1023. }
  1024. #[repr(C)]
  1025. #[derive(Debug, Copy, Clone)]
  1026. pub struct bpf_attr__bindgen_ty_20 {
  1027. pub flags: __u32,
  1028. pub bpffs_fd: __u32,
  1029. }
  1030. #[repr(u32)]
  1031. #[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
  1032. pub enum bpf_func_id {
  1033. BPF_FUNC_unspec = 0,
  1034. BPF_FUNC_map_lookup_elem = 1,
  1035. BPF_FUNC_map_update_elem = 2,
  1036. BPF_FUNC_map_delete_elem = 3,
  1037. BPF_FUNC_probe_read = 4,
  1038. BPF_FUNC_ktime_get_ns = 5,
  1039. BPF_FUNC_trace_printk = 6,
  1040. BPF_FUNC_get_prandom_u32 = 7,
  1041. BPF_FUNC_get_smp_processor_id = 8,
  1042. BPF_FUNC_skb_store_bytes = 9,
  1043. BPF_FUNC_l3_csum_replace = 10,
  1044. BPF_FUNC_l4_csum_replace = 11,
  1045. BPF_FUNC_tail_call = 12,
  1046. BPF_FUNC_clone_redirect = 13,
  1047. BPF_FUNC_get_current_pid_tgid = 14,
  1048. BPF_FUNC_get_current_uid_gid = 15,
  1049. BPF_FUNC_get_current_comm = 16,
  1050. BPF_FUNC_get_cgroup_classid = 17,
  1051. BPF_FUNC_skb_vlan_push = 18,
  1052. BPF_FUNC_skb_vlan_pop = 19,
  1053. BPF_FUNC_skb_get_tunnel_key = 20,
  1054. BPF_FUNC_skb_set_tunnel_key = 21,
  1055. BPF_FUNC_perf_event_read = 22,
  1056. BPF_FUNC_redirect = 23,
  1057. BPF_FUNC_get_route_realm = 24,
  1058. BPF_FUNC_perf_event_output = 25,
  1059. BPF_FUNC_skb_load_bytes = 26,
  1060. BPF_FUNC_get_stackid = 27,
  1061. BPF_FUNC_csum_diff = 28,
  1062. BPF_FUNC_skb_get_tunnel_opt = 29,
  1063. BPF_FUNC_skb_set_tunnel_opt = 30,
  1064. BPF_FUNC_skb_change_proto = 31,
  1065. BPF_FUNC_skb_change_type = 32,
  1066. BPF_FUNC_skb_under_cgroup = 33,
  1067. BPF_FUNC_get_hash_recalc = 34,
  1068. BPF_FUNC_get_current_task = 35,
  1069. BPF_FUNC_probe_write_user = 36,
  1070. BPF_FUNC_current_task_under_cgroup = 37,
  1071. BPF_FUNC_skb_change_tail = 38,
  1072. BPF_FUNC_skb_pull_data = 39,
  1073. BPF_FUNC_csum_update = 40,
  1074. BPF_FUNC_set_hash_invalid = 41,
  1075. BPF_FUNC_get_numa_node_id = 42,
  1076. BPF_FUNC_skb_change_head = 43,
  1077. BPF_FUNC_xdp_adjust_head = 44,
  1078. BPF_FUNC_probe_read_str = 45,
  1079. BPF_FUNC_get_socket_cookie = 46,
  1080. BPF_FUNC_get_socket_uid = 47,
  1081. BPF_FUNC_set_hash = 48,
  1082. BPF_FUNC_setsockopt = 49,
  1083. BPF_FUNC_skb_adjust_room = 50,
  1084. BPF_FUNC_redirect_map = 51,
  1085. BPF_FUNC_sk_redirect_map = 52,
  1086. BPF_FUNC_sock_map_update = 53,
  1087. BPF_FUNC_xdp_adjust_meta = 54,
  1088. BPF_FUNC_perf_event_read_value = 55,
  1089. BPF_FUNC_perf_prog_read_value = 56,
  1090. BPF_FUNC_getsockopt = 57,
  1091. BPF_FUNC_override_return = 58,
  1092. BPF_FUNC_sock_ops_cb_flags_set = 59,
  1093. BPF_FUNC_msg_redirect_map = 60,
  1094. BPF_FUNC_msg_apply_bytes = 61,
  1095. BPF_FUNC_msg_cork_bytes = 62,
  1096. BPF_FUNC_msg_pull_data = 63,
  1097. BPF_FUNC_bind = 64,
  1098. BPF_FUNC_xdp_adjust_tail = 65,
  1099. BPF_FUNC_skb_get_xfrm_state = 66,
  1100. BPF_FUNC_get_stack = 67,
  1101. BPF_FUNC_skb_load_bytes_relative = 68,
  1102. BPF_FUNC_fib_lookup = 69,
  1103. BPF_FUNC_sock_hash_update = 70,
  1104. BPF_FUNC_msg_redirect_hash = 71,
  1105. BPF_FUNC_sk_redirect_hash = 72,
  1106. BPF_FUNC_lwt_push_encap = 73,
  1107. BPF_FUNC_lwt_seg6_store_bytes = 74,
  1108. BPF_FUNC_lwt_seg6_adjust_srh = 75,
  1109. BPF_FUNC_lwt_seg6_action = 76,
  1110. BPF_FUNC_rc_repeat = 77,
  1111. BPF_FUNC_rc_keydown = 78,
  1112. BPF_FUNC_skb_cgroup_id = 79,
  1113. BPF_FUNC_get_current_cgroup_id = 80,
  1114. BPF_FUNC_get_local_storage = 81,
  1115. BPF_FUNC_sk_select_reuseport = 82,
  1116. BPF_FUNC_skb_ancestor_cgroup_id = 83,
  1117. BPF_FUNC_sk_lookup_tcp = 84,
  1118. BPF_FUNC_sk_lookup_udp = 85,
  1119. BPF_FUNC_sk_release = 86,
  1120. BPF_FUNC_map_push_elem = 87,
  1121. BPF_FUNC_map_pop_elem = 88,
  1122. BPF_FUNC_map_peek_elem = 89,
  1123. BPF_FUNC_msg_push_data = 90,
  1124. BPF_FUNC_msg_pop_data = 91,
  1125. BPF_FUNC_rc_pointer_rel = 92,
  1126. BPF_FUNC_spin_lock = 93,
  1127. BPF_FUNC_spin_unlock = 94,
  1128. BPF_FUNC_sk_fullsock = 95,
  1129. BPF_FUNC_tcp_sock = 96,
  1130. BPF_FUNC_skb_ecn_set_ce = 97,
  1131. BPF_FUNC_get_listener_sock = 98,
  1132. BPF_FUNC_skc_lookup_tcp = 99,
  1133. BPF_FUNC_tcp_check_syncookie = 100,
  1134. BPF_FUNC_sysctl_get_name = 101,
  1135. BPF_FUNC_sysctl_get_current_value = 102,
  1136. BPF_FUNC_sysctl_get_new_value = 103,
  1137. BPF_FUNC_sysctl_set_new_value = 104,
  1138. BPF_FUNC_strtol = 105,
  1139. BPF_FUNC_strtoul = 106,
  1140. BPF_FUNC_sk_storage_get = 107,
  1141. BPF_FUNC_sk_storage_delete = 108,
  1142. BPF_FUNC_send_signal = 109,
  1143. BPF_FUNC_tcp_gen_syncookie = 110,
  1144. BPF_FUNC_skb_output = 111,
  1145. BPF_FUNC_probe_read_user = 112,
  1146. BPF_FUNC_probe_read_kernel = 113,
  1147. BPF_FUNC_probe_read_user_str = 114,
  1148. BPF_FUNC_probe_read_kernel_str = 115,
  1149. BPF_FUNC_tcp_send_ack = 116,
  1150. BPF_FUNC_send_signal_thread = 117,
  1151. BPF_FUNC_jiffies64 = 118,
  1152. BPF_FUNC_read_branch_records = 119,
  1153. BPF_FUNC_get_ns_current_pid_tgid = 120,
  1154. BPF_FUNC_xdp_output = 121,
  1155. BPF_FUNC_get_netns_cookie = 122,
  1156. BPF_FUNC_get_current_ancestor_cgroup_id = 123,
  1157. BPF_FUNC_sk_assign = 124,
  1158. BPF_FUNC_ktime_get_boot_ns = 125,
  1159. BPF_FUNC_seq_printf = 126,
  1160. BPF_FUNC_seq_write = 127,
  1161. BPF_FUNC_sk_cgroup_id = 128,
  1162. BPF_FUNC_sk_ancestor_cgroup_id = 129,
  1163. BPF_FUNC_ringbuf_output = 130,
  1164. BPF_FUNC_ringbuf_reserve = 131,
  1165. BPF_FUNC_ringbuf_submit = 132,
  1166. BPF_FUNC_ringbuf_discard = 133,
  1167. BPF_FUNC_ringbuf_query = 134,
  1168. BPF_FUNC_csum_level = 135,
  1169. BPF_FUNC_skc_to_tcp6_sock = 136,
  1170. BPF_FUNC_skc_to_tcp_sock = 137,
  1171. BPF_FUNC_skc_to_tcp_timewait_sock = 138,
  1172. BPF_FUNC_skc_to_tcp_request_sock = 139,
  1173. BPF_FUNC_skc_to_udp6_sock = 140,
  1174. BPF_FUNC_get_task_stack = 141,
  1175. BPF_FUNC_load_hdr_opt = 142,
  1176. BPF_FUNC_store_hdr_opt = 143,
  1177. BPF_FUNC_reserve_hdr_opt = 144,
  1178. BPF_FUNC_inode_storage_get = 145,
  1179. BPF_FUNC_inode_storage_delete = 146,
  1180. BPF_FUNC_d_path = 147,
  1181. BPF_FUNC_copy_from_user = 148,
  1182. BPF_FUNC_snprintf_btf = 149,
  1183. BPF_FUNC_seq_printf_btf = 150,
  1184. BPF_FUNC_skb_cgroup_classid = 151,
  1185. BPF_FUNC_redirect_neigh = 152,
  1186. BPF_FUNC_per_cpu_ptr = 153,
  1187. BPF_FUNC_this_cpu_ptr = 154,
  1188. BPF_FUNC_redirect_peer = 155,
  1189. BPF_FUNC_task_storage_get = 156,
  1190. BPF_FUNC_task_storage_delete = 157,
  1191. BPF_FUNC_get_current_task_btf = 158,
  1192. BPF_FUNC_bprm_opts_set = 159,
  1193. BPF_FUNC_ktime_get_coarse_ns = 160,
  1194. BPF_FUNC_ima_inode_hash = 161,
  1195. BPF_FUNC_sock_from_file = 162,
  1196. BPF_FUNC_check_mtu = 163,
  1197. BPF_FUNC_for_each_map_elem = 164,
  1198. BPF_FUNC_snprintf = 165,
  1199. BPF_FUNC_sys_bpf = 166,
  1200. BPF_FUNC_btf_find_by_name_kind = 167,
  1201. BPF_FUNC_sys_close = 168,
  1202. BPF_FUNC_timer_init = 169,
  1203. BPF_FUNC_timer_set_callback = 170,
  1204. BPF_FUNC_timer_start = 171,
  1205. BPF_FUNC_timer_cancel = 172,
  1206. BPF_FUNC_get_func_ip = 173,
  1207. BPF_FUNC_get_attach_cookie = 174,
  1208. BPF_FUNC_task_pt_regs = 175,
  1209. BPF_FUNC_get_branch_snapshot = 176,
  1210. BPF_FUNC_trace_vprintk = 177,
  1211. BPF_FUNC_skc_to_unix_sock = 178,
  1212. BPF_FUNC_kallsyms_lookup_name = 179,
  1213. BPF_FUNC_find_vma = 180,
  1214. BPF_FUNC_loop = 181,
  1215. BPF_FUNC_strncmp = 182,
  1216. BPF_FUNC_get_func_arg = 183,
  1217. BPF_FUNC_get_func_ret = 184,
  1218. BPF_FUNC_get_func_arg_cnt = 185,
  1219. BPF_FUNC_get_retval = 186,
  1220. BPF_FUNC_set_retval = 187,
  1221. BPF_FUNC_xdp_get_buff_len = 188,
  1222. BPF_FUNC_xdp_load_bytes = 189,
  1223. BPF_FUNC_xdp_store_bytes = 190,
  1224. BPF_FUNC_copy_from_user_task = 191,
  1225. BPF_FUNC_skb_set_tstamp = 192,
  1226. BPF_FUNC_ima_file_hash = 193,
  1227. BPF_FUNC_kptr_xchg = 194,
  1228. BPF_FUNC_map_lookup_percpu_elem = 195,
  1229. BPF_FUNC_skc_to_mptcp_sock = 196,
  1230. BPF_FUNC_dynptr_from_mem = 197,
  1231. BPF_FUNC_ringbuf_reserve_dynptr = 198,
  1232. BPF_FUNC_ringbuf_submit_dynptr = 199,
  1233. BPF_FUNC_ringbuf_discard_dynptr = 200,
  1234. BPF_FUNC_dynptr_read = 201,
  1235. BPF_FUNC_dynptr_write = 202,
  1236. BPF_FUNC_dynptr_data = 203,
  1237. BPF_FUNC_tcp_raw_gen_syncookie_ipv4 = 204,
  1238. BPF_FUNC_tcp_raw_gen_syncookie_ipv6 = 205,
  1239. BPF_FUNC_tcp_raw_check_syncookie_ipv4 = 206,
  1240. BPF_FUNC_tcp_raw_check_syncookie_ipv6 = 207,
  1241. BPF_FUNC_ktime_get_tai_ns = 208,
  1242. BPF_FUNC_user_ringbuf_drain = 209,
  1243. BPF_FUNC_cgrp_storage_get = 210,
  1244. BPF_FUNC_cgrp_storage_delete = 211,
  1245. __BPF_FUNC_MAX_ID = 212,
  1246. }
  1247. pub const BPF_F_RECOMPUTE_CSUM: _bindgen_ty_6 = 1;
  1248. pub const BPF_F_INVALIDATE_HASH: _bindgen_ty_6 = 2;
  1249. pub type _bindgen_ty_6 = ::core::ffi::c_uint;
  1250. pub const BPF_F_HDR_FIELD_MASK: _bindgen_ty_7 = 15;
  1251. pub type _bindgen_ty_7 = ::core::ffi::c_uint;
  1252. pub const BPF_F_PSEUDO_HDR: _bindgen_ty_8 = 16;
  1253. pub const BPF_F_MARK_MANGLED_0: _bindgen_ty_8 = 32;
  1254. pub const BPF_F_MARK_ENFORCE: _bindgen_ty_8 = 64;
  1255. pub type _bindgen_ty_8 = ::core::ffi::c_uint;
  1256. pub const BPF_F_INGRESS: _bindgen_ty_9 = 1;
  1257. pub type _bindgen_ty_9 = ::core::ffi::c_uint;
  1258. pub const BPF_F_TUNINFO_IPV6: _bindgen_ty_10 = 1;
  1259. pub type _bindgen_ty_10 = ::core::ffi::c_uint;
  1260. pub const BPF_F_SKIP_FIELD_MASK: _bindgen_ty_11 = 255;
  1261. pub const BPF_F_USER_STACK: _bindgen_ty_11 = 256;
  1262. pub const BPF_F_FAST_STACK_CMP: _bindgen_ty_11 = 512;
  1263. pub const BPF_F_REUSE_STACKID: _bindgen_ty_11 = 1024;
  1264. pub const BPF_F_USER_BUILD_ID: _bindgen_ty_11 = 2048;
  1265. pub type _bindgen_ty_11 = ::core::ffi::c_uint;
  1266. pub const BPF_F_ZERO_CSUM_TX: _bindgen_ty_12 = 2;
  1267. pub const BPF_F_DONT_FRAGMENT: _bindgen_ty_12 = 4;
  1268. pub const BPF_F_SEQ_NUMBER: _bindgen_ty_12 = 8;
  1269. pub const BPF_F_NO_TUNNEL_KEY: _bindgen_ty_12 = 16;
  1270. pub type _bindgen_ty_12 = ::core::ffi::c_uint;
  1271. pub const BPF_F_TUNINFO_FLAGS: _bindgen_ty_13 = 16;
  1272. pub type _bindgen_ty_13 = ::core::ffi::c_uint;
  1273. pub const BPF_F_INDEX_MASK: _bindgen_ty_14 = 4294967295;
  1274. pub const BPF_F_CURRENT_CPU: _bindgen_ty_14 = 4294967295;
  1275. pub const BPF_F_CTXLEN_MASK: _bindgen_ty_14 = 4503595332403200;
  1276. pub type _bindgen_ty_14 = ::core::ffi::c_ulonglong;
  1277. pub const BPF_F_CURRENT_NETNS: _bindgen_ty_15 = -1;
  1278. pub type _bindgen_ty_15 = ::core::ffi::c_int;
  1279. pub const BPF_F_ADJ_ROOM_FIXED_GSO: _bindgen_ty_17 = 1;
  1280. pub const BPF_F_ADJ_ROOM_ENCAP_L3_IPV4: _bindgen_ty_17 = 2;
  1281. pub const BPF_F_ADJ_ROOM_ENCAP_L3_IPV6: _bindgen_ty_17 = 4;
  1282. pub const BPF_F_ADJ_ROOM_ENCAP_L4_GRE: _bindgen_ty_17 = 8;
  1283. pub const BPF_F_ADJ_ROOM_ENCAP_L4_UDP: _bindgen_ty_17 = 16;
  1284. pub const BPF_F_ADJ_ROOM_NO_CSUM_RESET: _bindgen_ty_17 = 32;
  1285. pub const BPF_F_ADJ_ROOM_ENCAP_L2_ETH: _bindgen_ty_17 = 64;
  1286. pub const BPF_F_ADJ_ROOM_DECAP_L3_IPV4: _bindgen_ty_17 = 128;
  1287. pub const BPF_F_ADJ_ROOM_DECAP_L3_IPV6: _bindgen_ty_17 = 256;
  1288. pub type _bindgen_ty_17 = ::core::ffi::c_uint;
  1289. pub const BPF_F_SYSCTL_BASE_NAME: _bindgen_ty_19 = 1;
  1290. pub type _bindgen_ty_19 = ::core::ffi::c_uint;
  1291. pub const BPF_F_GET_BRANCH_RECORDS_SIZE: _bindgen_ty_21 = 1;
  1292. pub type _bindgen_ty_21 = ::core::ffi::c_uint;
  1293. pub const BPF_RINGBUF_BUSY_BIT: _bindgen_ty_24 = 2147483648;
  1294. pub const BPF_RINGBUF_DISCARD_BIT: _bindgen_ty_24 = 1073741824;
  1295. pub const BPF_RINGBUF_HDR_SZ: _bindgen_ty_24 = 8;
  1296. pub type _bindgen_ty_24 = ::core::ffi::c_uint;
  1297. pub const BPF_F_BPRM_SECUREEXEC: _bindgen_ty_26 = 1;
  1298. pub type _bindgen_ty_26 = ::core::ffi::c_uint;
  1299. pub const BPF_F_BROADCAST: _bindgen_ty_27 = 8;
  1300. pub const BPF_F_EXCLUDE_INGRESS: _bindgen_ty_27 = 16;
  1301. pub type _bindgen_ty_27 = ::core::ffi::c_uint;
  1302. #[repr(C)]
  1303. #[derive(Copy, Clone)]
  1304. pub struct bpf_devmap_val {
  1305. pub ifindex: __u32,
  1306. pub bpf_prog: bpf_devmap_val__bindgen_ty_1,
  1307. }
  1308. #[repr(C)]
  1309. #[derive(Copy, Clone)]
  1310. pub union bpf_devmap_val__bindgen_ty_1 {
  1311. pub fd: ::core::ffi::c_int,
  1312. pub id: __u32,
  1313. }
  1314. #[repr(C)]
  1315. #[derive(Copy, Clone)]
  1316. pub struct bpf_cpumap_val {
  1317. pub qsize: __u32,
  1318. pub bpf_prog: bpf_cpumap_val__bindgen_ty_1,
  1319. }
  1320. #[repr(C)]
  1321. #[derive(Copy, Clone)]
  1322. pub union bpf_cpumap_val__bindgen_ty_1 {
  1323. pub fd: ::core::ffi::c_int,
  1324. pub id: __u32,
  1325. }
  1326. #[repr(C)]
  1327. #[derive(Debug, Copy, Clone)]
  1328. pub struct bpf_prog_info {
  1329. pub type_: __u32,
  1330. pub id: __u32,
  1331. pub tag: [__u8; 8usize],
  1332. pub jited_prog_len: __u32,
  1333. pub xlated_prog_len: __u32,
  1334. pub jited_prog_insns: __u64,
  1335. pub xlated_prog_insns: __u64,
  1336. pub load_time: __u64,
  1337. pub created_by_uid: __u32,
  1338. pub nr_map_ids: __u32,
  1339. pub map_ids: __u64,
  1340. pub name: [::core::ffi::c_char; 16usize],
  1341. pub ifindex: __u32,
  1342. pub _bitfield_align_1: [u8; 0],
  1343. pub _bitfield_1: __BindgenBitfieldUnit<[u8; 4usize]>,
  1344. pub netns_dev: __u64,
  1345. pub netns_ino: __u64,
  1346. pub nr_jited_ksyms: __u32,
  1347. pub nr_jited_func_lens: __u32,
  1348. pub jited_ksyms: __u64,
  1349. pub jited_func_lens: __u64,
  1350. pub btf_id: __u32,
  1351. pub func_info_rec_size: __u32,
  1352. pub func_info: __u64,
  1353. pub nr_func_info: __u32,
  1354. pub nr_line_info: __u32,
  1355. pub line_info: __u64,
  1356. pub jited_line_info: __u64,
  1357. pub nr_jited_line_info: __u32,
  1358. pub line_info_rec_size: __u32,
  1359. pub jited_line_info_rec_size: __u32,
  1360. pub nr_prog_tags: __u32,
  1361. pub prog_tags: __u64,
  1362. pub run_time_ns: __u64,
  1363. pub run_cnt: __u64,
  1364. pub recursion_misses: __u64,
  1365. pub verified_insns: __u32,
  1366. pub attach_btf_obj_id: __u32,
  1367. pub attach_btf_id: __u32,
  1368. }
  1369. impl bpf_prog_info {
  1370. #[inline]
  1371. pub fn gpl_compatible(&self) -> __u32 {
  1372. unsafe { ::core::mem::transmute(self._bitfield_1.get(0usize, 1u8) as u32) }
  1373. }
  1374. #[inline]
  1375. pub fn set_gpl_compatible(&mut self, val: __u32) {
  1376. unsafe {
  1377. let val: u32 = ::core::mem::transmute(val);
  1378. self._bitfield_1.set(0usize, 1u8, val as u64)
  1379. }
  1380. }
  1381. #[inline]
  1382. pub unsafe fn gpl_compatible_raw(this: *const Self) -> __u32 {
  1383. unsafe {
  1384. ::core::mem::transmute(<__BindgenBitfieldUnit<[u8; 4usize]>>::raw_get(
  1385. ::core::ptr::addr_of!((*this)._bitfield_1),
  1386. 0usize,
  1387. 1u8,
  1388. ) as u32)
  1389. }
  1390. }
  1391. #[inline]
  1392. pub unsafe fn set_gpl_compatible_raw(this: *mut Self, val: __u32) {
  1393. unsafe {
  1394. let val: u32 = ::core::mem::transmute(val);
  1395. <__BindgenBitfieldUnit<[u8; 4usize]>>::raw_set(
  1396. ::core::ptr::addr_of_mut!((*this)._bitfield_1),
  1397. 0usize,
  1398. 1u8,
  1399. val as u64,
  1400. )
  1401. }
  1402. }
  1403. #[inline]
  1404. pub fn new_bitfield_1(gpl_compatible: __u32) -> __BindgenBitfieldUnit<[u8; 4usize]> {
  1405. let mut __bindgen_bitfield_unit: __BindgenBitfieldUnit<[u8; 4usize]> = Default::default();
  1406. __bindgen_bitfield_unit.set(0usize, 1u8, {
  1407. let gpl_compatible: u32 = unsafe { ::core::mem::transmute(gpl_compatible) };
  1408. gpl_compatible as u64
  1409. });
  1410. __bindgen_bitfield_unit
  1411. }
  1412. }
  1413. #[repr(C)]
  1414. #[derive(Debug, Copy, Clone)]
  1415. pub struct bpf_map_info {
  1416. pub type_: __u32,
  1417. pub id: __u32,
  1418. pub key_size: __u32,
  1419. pub value_size: __u32,
  1420. pub max_entries: __u32,
  1421. pub map_flags: __u32,
  1422. pub name: [::core::ffi::c_char; 16usize],
  1423. pub ifindex: __u32,
  1424. pub btf_vmlinux_value_type_id: __u32,
  1425. pub netns_dev: __u64,
  1426. pub netns_ino: __u64,
  1427. pub btf_id: __u32,
  1428. pub btf_key_type_id: __u32,
  1429. pub btf_value_type_id: __u32,
  1430. pub btf_vmlinux_id: __u32,
  1431. pub map_extra: __u64,
  1432. }
  1433. #[repr(C)]
  1434. #[derive(Debug, Copy, Clone)]
  1435. pub struct bpf_btf_info {
  1436. pub btf: __u64,
  1437. pub btf_size: __u32,
  1438. pub id: __u32,
  1439. pub name: __u64,
  1440. pub name_len: __u32,
  1441. pub kernel_btf: __u32,
  1442. }
  1443. #[repr(C)]
  1444. #[derive(Copy, Clone)]
  1445. pub struct bpf_link_info {
  1446. pub type_: __u32,
  1447. pub id: __u32,
  1448. pub prog_id: __u32,
  1449. pub __bindgen_anon_1: bpf_link_info__bindgen_ty_1,
  1450. }
  1451. #[repr(C)]
  1452. #[derive(Copy, Clone)]
  1453. pub union bpf_link_info__bindgen_ty_1 {
  1454. pub raw_tracepoint: bpf_link_info__bindgen_ty_1__bindgen_ty_1,
  1455. pub tracing: bpf_link_info__bindgen_ty_1__bindgen_ty_2,
  1456. pub cgroup: bpf_link_info__bindgen_ty_1__bindgen_ty_3,
  1457. pub iter: bpf_link_info__bindgen_ty_1__bindgen_ty_4,
  1458. pub netns: bpf_link_info__bindgen_ty_1__bindgen_ty_5,
  1459. pub xdp: bpf_link_info__bindgen_ty_1__bindgen_ty_6,
  1460. pub struct_ops: bpf_link_info__bindgen_ty_1__bindgen_ty_7,
  1461. pub netfilter: bpf_link_info__bindgen_ty_1__bindgen_ty_8,
  1462. pub kprobe_multi: bpf_link_info__bindgen_ty_1__bindgen_ty_9,
  1463. pub uprobe_multi: bpf_link_info__bindgen_ty_1__bindgen_ty_10,
  1464. pub perf_event: bpf_link_info__bindgen_ty_1__bindgen_ty_11,
  1465. pub tcx: bpf_link_info__bindgen_ty_1__bindgen_ty_12,
  1466. pub netkit: bpf_link_info__bindgen_ty_1__bindgen_ty_13,
  1467. }
  1468. #[repr(C)]
  1469. #[derive(Debug, Copy, Clone)]
  1470. pub struct bpf_link_info__bindgen_ty_1__bindgen_ty_1 {
  1471. pub tp_name: __u64,
  1472. pub tp_name_len: __u32,
  1473. }
  1474. #[repr(C)]
  1475. #[derive(Debug, Copy, Clone)]
  1476. pub struct bpf_link_info__bindgen_ty_1__bindgen_ty_2 {
  1477. pub attach_type: __u32,
  1478. pub target_obj_id: __u32,
  1479. pub target_btf_id: __u32,
  1480. }
  1481. #[repr(C)]
  1482. #[derive(Debug, Copy, Clone)]
  1483. pub struct bpf_link_info__bindgen_ty_1__bindgen_ty_3 {
  1484. pub cgroup_id: __u64,
  1485. pub attach_type: __u32,
  1486. }
  1487. #[repr(C)]
  1488. #[derive(Copy, Clone)]
  1489. pub struct bpf_link_info__bindgen_ty_1__bindgen_ty_4 {
  1490. pub target_name: __u64,
  1491. pub target_name_len: __u32,
  1492. pub __bindgen_anon_1: bpf_link_info__bindgen_ty_1__bindgen_ty_4__bindgen_ty_1,
  1493. pub __bindgen_anon_2: bpf_link_info__bindgen_ty_1__bindgen_ty_4__bindgen_ty_2,
  1494. }
  1495. #[repr(C)]
  1496. #[derive(Copy, Clone)]
  1497. pub union bpf_link_info__bindgen_ty_1__bindgen_ty_4__bindgen_ty_1 {
  1498. pub map: bpf_link_info__bindgen_ty_1__bindgen_ty_4__bindgen_ty_1__bindgen_ty_1,
  1499. }
  1500. #[repr(C)]
  1501. #[derive(Debug, Copy, Clone)]
  1502. pub struct bpf_link_info__bindgen_ty_1__bindgen_ty_4__bindgen_ty_1__bindgen_ty_1 {
  1503. pub map_id: __u32,
  1504. }
  1505. #[repr(C)]
  1506. #[derive(Copy, Clone)]
  1507. pub union bpf_link_info__bindgen_ty_1__bindgen_ty_4__bindgen_ty_2 {
  1508. pub cgroup: bpf_link_info__bindgen_ty_1__bindgen_ty_4__bindgen_ty_2__bindgen_ty_1,
  1509. pub task: bpf_link_info__bindgen_ty_1__bindgen_ty_4__bindgen_ty_2__bindgen_ty_2,
  1510. }
  1511. #[repr(C)]
  1512. #[derive(Debug, Copy, Clone)]
  1513. pub struct bpf_link_info__bindgen_ty_1__bindgen_ty_4__bindgen_ty_2__bindgen_ty_1 {
  1514. pub cgroup_id: __u64,
  1515. pub order: __u32,
  1516. }
  1517. #[repr(C)]
  1518. #[derive(Debug, Copy, Clone)]
  1519. pub struct bpf_link_info__bindgen_ty_1__bindgen_ty_4__bindgen_ty_2__bindgen_ty_2 {
  1520. pub tid: __u32,
  1521. pub pid: __u32,
  1522. }
  1523. #[repr(C)]
  1524. #[derive(Debug, Copy, Clone)]
  1525. pub struct bpf_link_info__bindgen_ty_1__bindgen_ty_5 {
  1526. pub netns_ino: __u32,
  1527. pub attach_type: __u32,
  1528. }
  1529. #[repr(C)]
  1530. #[derive(Debug, Copy, Clone)]
  1531. pub struct bpf_link_info__bindgen_ty_1__bindgen_ty_6 {
  1532. pub ifindex: __u32,
  1533. }
  1534. #[repr(C)]
  1535. #[derive(Debug, Copy, Clone)]
  1536. pub struct bpf_link_info__bindgen_ty_1__bindgen_ty_7 {
  1537. pub map_id: __u32,
  1538. }
  1539. #[repr(C)]
  1540. #[derive(Debug, Copy, Clone)]
  1541. pub struct bpf_link_info__bindgen_ty_1__bindgen_ty_8 {
  1542. pub pf: __u32,
  1543. pub hooknum: __u32,
  1544. pub priority: __s32,
  1545. pub flags: __u32,
  1546. }
  1547. #[repr(C)]
  1548. #[derive(Debug, Copy, Clone)]
  1549. pub struct bpf_link_info__bindgen_ty_1__bindgen_ty_9 {
  1550. pub addrs: __u64,
  1551. pub count: __u32,
  1552. pub flags: __u32,
  1553. pub missed: __u64,
  1554. pub cookies: __u64,
  1555. }
  1556. #[repr(C)]
  1557. #[derive(Debug, Copy, Clone)]
  1558. pub struct bpf_link_info__bindgen_ty_1__bindgen_ty_10 {
  1559. pub path: __u64,
  1560. pub offsets: __u64,
  1561. pub ref_ctr_offsets: __u64,
  1562. pub cookies: __u64,
  1563. pub path_size: __u32,
  1564. pub count: __u32,
  1565. pub flags: __u32,
  1566. pub pid: __u32,
  1567. }
  1568. #[repr(C)]
  1569. #[derive(Copy, Clone)]
  1570. pub struct bpf_link_info__bindgen_ty_1__bindgen_ty_11 {
  1571. pub type_: __u32,
  1572. pub _bitfield_align_1: [u8; 0],
  1573. pub _bitfield_1: __BindgenBitfieldUnit<[u8; 4usize]>,
  1574. pub __bindgen_anon_1: bpf_link_info__bindgen_ty_1__bindgen_ty_11__bindgen_ty_1,
  1575. }
  1576. #[repr(C)]
  1577. #[derive(Copy, Clone)]
  1578. pub union bpf_link_info__bindgen_ty_1__bindgen_ty_11__bindgen_ty_1 {
  1579. pub uprobe: bpf_link_info__bindgen_ty_1__bindgen_ty_11__bindgen_ty_1__bindgen_ty_1,
  1580. pub kprobe: bpf_link_info__bindgen_ty_1__bindgen_ty_11__bindgen_ty_1__bindgen_ty_2,
  1581. pub tracepoint: bpf_link_info__bindgen_ty_1__bindgen_ty_11__bindgen_ty_1__bindgen_ty_3,
  1582. pub event: bpf_link_info__bindgen_ty_1__bindgen_ty_11__bindgen_ty_1__bindgen_ty_4,
  1583. }
  1584. #[repr(C)]
  1585. #[derive(Debug, Copy, Clone)]
  1586. pub struct bpf_link_info__bindgen_ty_1__bindgen_ty_11__bindgen_ty_1__bindgen_ty_1 {
  1587. pub file_name: __u64,
  1588. pub name_len: __u32,
  1589. pub offset: __u32,
  1590. pub cookie: __u64,
  1591. }
  1592. #[repr(C)]
  1593. #[derive(Debug, Copy, Clone)]
  1594. pub struct bpf_link_info__bindgen_ty_1__bindgen_ty_11__bindgen_ty_1__bindgen_ty_2 {
  1595. pub func_name: __u64,
  1596. pub name_len: __u32,
  1597. pub offset: __u32,
  1598. pub addr: __u64,
  1599. pub missed: __u64,
  1600. pub cookie: __u64,
  1601. }
  1602. #[repr(C)]
  1603. #[derive(Debug, Copy, Clone)]
  1604. pub struct bpf_link_info__bindgen_ty_1__bindgen_ty_11__bindgen_ty_1__bindgen_ty_3 {
  1605. pub tp_name: __u64,
  1606. pub name_len: __u32,
  1607. pub _bitfield_align_1: [u8; 0],
  1608. pub _bitfield_1: __BindgenBitfieldUnit<[u8; 4usize]>,
  1609. pub cookie: __u64,
  1610. }
  1611. impl bpf_link_info__bindgen_ty_1__bindgen_ty_11__bindgen_ty_1__bindgen_ty_3 {
  1612. #[inline]
  1613. pub fn new_bitfield_1() -> __BindgenBitfieldUnit<[u8; 4usize]> {
  1614. let mut __bindgen_bitfield_unit: __BindgenBitfieldUnit<[u8; 4usize]> = Default::default();
  1615. __bindgen_bitfield_unit
  1616. }
  1617. }
  1618. #[repr(C)]
  1619. #[derive(Debug, Copy, Clone)]
  1620. pub struct bpf_link_info__bindgen_ty_1__bindgen_ty_11__bindgen_ty_1__bindgen_ty_4 {
  1621. pub config: __u64,
  1622. pub type_: __u32,
  1623. pub _bitfield_align_1: [u8; 0],
  1624. pub _bitfield_1: __BindgenBitfieldUnit<[u8; 4usize]>,
  1625. pub cookie: __u64,
  1626. }
  1627. impl bpf_link_info__bindgen_ty_1__bindgen_ty_11__bindgen_ty_1__bindgen_ty_4 {
  1628. #[inline]
  1629. pub fn new_bitfield_1() -> __BindgenBitfieldUnit<[u8; 4usize]> {
  1630. let mut __bindgen_bitfield_unit: __BindgenBitfieldUnit<[u8; 4usize]> = Default::default();
  1631. __bindgen_bitfield_unit
  1632. }
  1633. }
  1634. impl bpf_link_info__bindgen_ty_1__bindgen_ty_11 {
  1635. #[inline]
  1636. pub fn new_bitfield_1() -> __BindgenBitfieldUnit<[u8; 4usize]> {
  1637. let mut __bindgen_bitfield_unit: __BindgenBitfieldUnit<[u8; 4usize]> = Default::default();
  1638. __bindgen_bitfield_unit
  1639. }
  1640. }
  1641. #[repr(C)]
  1642. #[derive(Debug, Copy, Clone)]
  1643. pub struct bpf_link_info__bindgen_ty_1__bindgen_ty_12 {
  1644. pub ifindex: __u32,
  1645. pub attach_type: __u32,
  1646. }
  1647. #[repr(C)]
  1648. #[derive(Debug, Copy, Clone)]
  1649. pub struct bpf_link_info__bindgen_ty_1__bindgen_ty_13 {
  1650. pub ifindex: __u32,
  1651. pub attach_type: __u32,
  1652. }
  1653. #[repr(u32)]
  1654. #[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
  1655. pub enum bpf_task_fd_type {
  1656. BPF_FD_TYPE_RAW_TRACEPOINT = 0,
  1657. BPF_FD_TYPE_TRACEPOINT = 1,
  1658. BPF_FD_TYPE_KPROBE = 2,
  1659. BPF_FD_TYPE_KRETPROBE = 3,
  1660. BPF_FD_TYPE_UPROBE = 4,
  1661. BPF_FD_TYPE_URETPROBE = 5,
  1662. }
  1663. #[repr(C)]
  1664. #[derive(Debug, Copy, Clone)]
  1665. pub struct bpf_func_info {
  1666. pub insn_off: __u32,
  1667. pub type_id: __u32,
  1668. }
  1669. #[repr(C)]
  1670. #[derive(Debug, Copy, Clone)]
  1671. pub struct bpf_line_info {
  1672. pub insn_off: __u32,
  1673. pub file_name_off: __u32,
  1674. pub line_off: __u32,
  1675. pub line_col: __u32,
  1676. }
  1677. pub const BPF_F_TIMER_ABS: _bindgen_ty_41 = 1;
  1678. pub const BPF_F_TIMER_CPU_PIN: _bindgen_ty_41 = 2;
  1679. pub type _bindgen_ty_41 = ::core::ffi::c_uint;
  1680. #[repr(C)]
  1681. #[derive(Debug, Copy, Clone)]
  1682. pub struct btf_header {
  1683. pub magic: __u16,
  1684. pub version: __u8,
  1685. pub flags: __u8,
  1686. pub hdr_len: __u32,
  1687. pub type_off: __u32,
  1688. pub type_len: __u32,
  1689. pub str_off: __u32,
  1690. pub str_len: __u32,
  1691. }
  1692. #[repr(C)]
  1693. #[derive(Copy, Clone)]
  1694. pub struct btf_type {
  1695. pub name_off: __u32,
  1696. pub info: __u32,
  1697. pub __bindgen_anon_1: btf_type__bindgen_ty_1,
  1698. }
  1699. #[repr(C)]
  1700. #[derive(Copy, Clone)]
  1701. pub union btf_type__bindgen_ty_1 {
  1702. pub size: __u32,
  1703. pub type_: __u32,
  1704. }
  1705. pub const BTF_KIND_UNKN: _bindgen_ty_42 = 0;
  1706. pub const BTF_KIND_INT: _bindgen_ty_42 = 1;
  1707. pub const BTF_KIND_PTR: _bindgen_ty_42 = 2;
  1708. pub const BTF_KIND_ARRAY: _bindgen_ty_42 = 3;
  1709. pub const BTF_KIND_STRUCT: _bindgen_ty_42 = 4;
  1710. pub const BTF_KIND_UNION: _bindgen_ty_42 = 5;
  1711. pub const BTF_KIND_ENUM: _bindgen_ty_42 = 6;
  1712. pub const BTF_KIND_FWD: _bindgen_ty_42 = 7;
  1713. pub const BTF_KIND_TYPEDEF: _bindgen_ty_42 = 8;
  1714. pub const BTF_KIND_VOLATILE: _bindgen_ty_42 = 9;
  1715. pub const BTF_KIND_CONST: _bindgen_ty_42 = 10;
  1716. pub const BTF_KIND_RESTRICT: _bindgen_ty_42 = 11;
  1717. pub const BTF_KIND_FUNC: _bindgen_ty_42 = 12;
  1718. pub const BTF_KIND_FUNC_PROTO: _bindgen_ty_42 = 13;
  1719. pub const BTF_KIND_VAR: _bindgen_ty_42 = 14;
  1720. pub const BTF_KIND_DATASEC: _bindgen_ty_42 = 15;
  1721. pub const BTF_KIND_FLOAT: _bindgen_ty_42 = 16;
  1722. pub const BTF_KIND_DECL_TAG: _bindgen_ty_42 = 17;
  1723. pub const BTF_KIND_TYPE_TAG: _bindgen_ty_42 = 18;
  1724. pub const BTF_KIND_ENUM64: _bindgen_ty_42 = 19;
  1725. pub const NR_BTF_KINDS: _bindgen_ty_42 = 20;
  1726. pub const BTF_KIND_MAX: _bindgen_ty_42 = 19;
  1727. pub type _bindgen_ty_42 = ::core::ffi::c_uint;
  1728. #[repr(C)]
  1729. #[derive(Debug, Copy, Clone)]
  1730. pub struct btf_enum {
  1731. pub name_off: __u32,
  1732. pub val: __s32,
  1733. }
  1734. #[repr(C)]
  1735. #[derive(Debug, Copy, Clone)]
  1736. pub struct btf_array {
  1737. pub type_: __u32,
  1738. pub index_type: __u32,
  1739. pub nelems: __u32,
  1740. }
  1741. #[repr(C)]
  1742. #[derive(Debug, Copy, Clone)]
  1743. pub struct btf_member {
  1744. pub name_off: __u32,
  1745. pub type_: __u32,
  1746. pub offset: __u32,
  1747. }
  1748. #[repr(C)]
  1749. #[derive(Debug, Copy, Clone)]
  1750. pub struct btf_param {
  1751. pub name_off: __u32,
  1752. pub type_: __u32,
  1753. }
  1754. pub const BTF_VAR_STATIC: _bindgen_ty_43 = 0;
  1755. pub const BTF_VAR_GLOBAL_ALLOCATED: _bindgen_ty_43 = 1;
  1756. pub const BTF_VAR_GLOBAL_EXTERN: _bindgen_ty_43 = 2;
  1757. pub type _bindgen_ty_43 = ::core::ffi::c_uint;
  1758. #[repr(u32)]
  1759. #[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
  1760. pub enum btf_func_linkage {
  1761. BTF_FUNC_STATIC = 0,
  1762. BTF_FUNC_GLOBAL = 1,
  1763. BTF_FUNC_EXTERN = 2,
  1764. }
  1765. #[repr(C)]
  1766. #[derive(Debug, Copy, Clone)]
  1767. pub struct btf_var {
  1768. pub linkage: __u32,
  1769. }
  1770. #[repr(C)]
  1771. #[derive(Debug, Copy, Clone)]
  1772. pub struct btf_var_secinfo {
  1773. pub type_: __u32,
  1774. pub offset: __u32,
  1775. pub size: __u32,
  1776. }
  1777. #[repr(C)]
  1778. #[derive(Debug, Copy, Clone)]
  1779. pub struct btf_decl_tag {
  1780. pub component_idx: __s32,
  1781. }
  1782. impl nlmsgerr_attrs {
  1783. pub const NLMSGERR_ATTR_MAX: nlmsgerr_attrs = nlmsgerr_attrs::NLMSGERR_ATTR_COOKIE;
  1784. }
  1785. #[repr(u32)]
  1786. #[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
  1787. pub enum nlmsgerr_attrs {
  1788. NLMSGERR_ATTR_UNUSED = 0,
  1789. NLMSGERR_ATTR_MSG = 1,
  1790. NLMSGERR_ATTR_OFFS = 2,
  1791. NLMSGERR_ATTR_COOKIE = 3,
  1792. __NLMSGERR_ATTR_MAX = 4,
  1793. }
  1794. pub const IFLA_XDP_UNSPEC: _bindgen_ty_92 = 0;
  1795. pub const IFLA_XDP_FD: _bindgen_ty_92 = 1;
  1796. pub const IFLA_XDP_ATTACHED: _bindgen_ty_92 = 2;
  1797. pub const IFLA_XDP_FLAGS: _bindgen_ty_92 = 3;
  1798. pub const IFLA_XDP_PROG_ID: _bindgen_ty_92 = 4;
  1799. pub const IFLA_XDP_DRV_PROG_ID: _bindgen_ty_92 = 5;
  1800. pub const IFLA_XDP_SKB_PROG_ID: _bindgen_ty_92 = 6;
  1801. pub const IFLA_XDP_HW_PROG_ID: _bindgen_ty_92 = 7;
  1802. pub const IFLA_XDP_EXPECTED_FD: _bindgen_ty_92 = 8;
  1803. pub const __IFLA_XDP_MAX: _bindgen_ty_92 = 9;
  1804. pub type _bindgen_ty_92 = ::core::ffi::c_uint;
  1805. #[repr(u32)]
  1806. #[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
  1807. pub enum nf_inet_hooks {
  1808. NF_INET_PRE_ROUTING = 0,
  1809. NF_INET_LOCAL_IN = 1,
  1810. NF_INET_FORWARD = 2,
  1811. NF_INET_LOCAL_OUT = 3,
  1812. NF_INET_POST_ROUTING = 4,
  1813. NF_INET_NUMHOOKS = 5,
  1814. }
  1815. pub const NFPROTO_UNSPEC: _bindgen_ty_99 = 0;
  1816. pub const NFPROTO_INET: _bindgen_ty_99 = 1;
  1817. pub const NFPROTO_IPV4: _bindgen_ty_99 = 2;
  1818. pub const NFPROTO_ARP: _bindgen_ty_99 = 3;
  1819. pub const NFPROTO_NETDEV: _bindgen_ty_99 = 5;
  1820. pub const NFPROTO_BRIDGE: _bindgen_ty_99 = 7;
  1821. pub const NFPROTO_IPV6: _bindgen_ty_99 = 10;
  1822. pub const NFPROTO_DECNET: _bindgen_ty_99 = 12;
  1823. pub const NFPROTO_NUMPROTO: _bindgen_ty_99 = 13;
  1824. pub type _bindgen_ty_99 = ::core::ffi::c_uint;
  1825. #[repr(u32)]
  1826. #[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
  1827. pub enum perf_type_id {
  1828. PERF_TYPE_HARDWARE = 0,
  1829. PERF_TYPE_SOFTWARE = 1,
  1830. PERF_TYPE_TRACEPOINT = 2,
  1831. PERF_TYPE_HW_CACHE = 3,
  1832. PERF_TYPE_RAW = 4,
  1833. PERF_TYPE_BREAKPOINT = 5,
  1834. PERF_TYPE_MAX = 6,
  1835. }
  1836. #[repr(u32)]
  1837. #[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
  1838. pub enum perf_hw_id {
  1839. PERF_COUNT_HW_CPU_CYCLES = 0,
  1840. PERF_COUNT_HW_INSTRUCTIONS = 1,
  1841. PERF_COUNT_HW_CACHE_REFERENCES = 2,
  1842. PERF_COUNT_HW_CACHE_MISSES = 3,
  1843. PERF_COUNT_HW_BRANCH_INSTRUCTIONS = 4,
  1844. PERF_COUNT_HW_BRANCH_MISSES = 5,
  1845. PERF_COUNT_HW_BUS_CYCLES = 6,
  1846. PERF_COUNT_HW_STALLED_CYCLES_FRONTEND = 7,
  1847. PERF_COUNT_HW_STALLED_CYCLES_BACKEND = 8,
  1848. PERF_COUNT_HW_REF_CPU_CYCLES = 9,
  1849. PERF_COUNT_HW_MAX = 10,
  1850. }
  1851. #[repr(u32)]
  1852. #[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
  1853. pub enum perf_hw_cache_id {
  1854. PERF_COUNT_HW_CACHE_L1D = 0,
  1855. PERF_COUNT_HW_CACHE_L1I = 1,
  1856. PERF_COUNT_HW_CACHE_LL = 2,
  1857. PERF_COUNT_HW_CACHE_DTLB = 3,
  1858. PERF_COUNT_HW_CACHE_ITLB = 4,
  1859. PERF_COUNT_HW_CACHE_BPU = 5,
  1860. PERF_COUNT_HW_CACHE_NODE = 6,
  1861. PERF_COUNT_HW_CACHE_MAX = 7,
  1862. }
  1863. #[repr(u32)]
  1864. #[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
  1865. pub enum perf_hw_cache_op_id {
  1866. PERF_COUNT_HW_CACHE_OP_READ = 0,
  1867. PERF_COUNT_HW_CACHE_OP_WRITE = 1,
  1868. PERF_COUNT_HW_CACHE_OP_PREFETCH = 2,
  1869. PERF_COUNT_HW_CACHE_OP_MAX = 3,
  1870. }
  1871. #[repr(u32)]
  1872. #[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
  1873. pub enum perf_hw_cache_op_result_id {
  1874. PERF_COUNT_HW_CACHE_RESULT_ACCESS = 0,
  1875. PERF_COUNT_HW_CACHE_RESULT_MISS = 1,
  1876. PERF_COUNT_HW_CACHE_RESULT_MAX = 2,
  1877. }
  1878. #[repr(u32)]
  1879. #[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
  1880. pub enum perf_sw_ids {
  1881. PERF_COUNT_SW_CPU_CLOCK = 0,
  1882. PERF_COUNT_SW_TASK_CLOCK = 1,
  1883. PERF_COUNT_SW_PAGE_FAULTS = 2,
  1884. PERF_COUNT_SW_CONTEXT_SWITCHES = 3,
  1885. PERF_COUNT_SW_CPU_MIGRATIONS = 4,
  1886. PERF_COUNT_SW_PAGE_FAULTS_MIN = 5,
  1887. PERF_COUNT_SW_PAGE_FAULTS_MAJ = 6,
  1888. PERF_COUNT_SW_ALIGNMENT_FAULTS = 7,
  1889. PERF_COUNT_SW_EMULATION_FAULTS = 8,
  1890. PERF_COUNT_SW_DUMMY = 9,
  1891. PERF_COUNT_SW_BPF_OUTPUT = 10,
  1892. PERF_COUNT_SW_CGROUP_SWITCHES = 11,
  1893. PERF_COUNT_SW_MAX = 12,
  1894. }
  1895. #[repr(u32)]
  1896. #[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
  1897. pub enum perf_event_sample_format {
  1898. PERF_SAMPLE_IP = 1,
  1899. PERF_SAMPLE_TID = 2,
  1900. PERF_SAMPLE_TIME = 4,
  1901. PERF_SAMPLE_ADDR = 8,
  1902. PERF_SAMPLE_READ = 16,
  1903. PERF_SAMPLE_CALLCHAIN = 32,
  1904. PERF_SAMPLE_ID = 64,
  1905. PERF_SAMPLE_CPU = 128,
  1906. PERF_SAMPLE_PERIOD = 256,
  1907. PERF_SAMPLE_STREAM_ID = 512,
  1908. PERF_SAMPLE_RAW = 1024,
  1909. PERF_SAMPLE_BRANCH_STACK = 2048,
  1910. PERF_SAMPLE_REGS_USER = 4096,
  1911. PERF_SAMPLE_STACK_USER = 8192,
  1912. PERF_SAMPLE_WEIGHT = 16384,
  1913. PERF_SAMPLE_DATA_SRC = 32768,
  1914. PERF_SAMPLE_IDENTIFIER = 65536,
  1915. PERF_SAMPLE_TRANSACTION = 131072,
  1916. PERF_SAMPLE_REGS_INTR = 262144,
  1917. PERF_SAMPLE_PHYS_ADDR = 524288,
  1918. PERF_SAMPLE_AUX = 1048576,
  1919. PERF_SAMPLE_CGROUP = 2097152,
  1920. PERF_SAMPLE_DATA_PAGE_SIZE = 4194304,
  1921. PERF_SAMPLE_CODE_PAGE_SIZE = 8388608,
  1922. PERF_SAMPLE_WEIGHT_STRUCT = 16777216,
  1923. PERF_SAMPLE_MAX = 33554432,
  1924. }
  1925. #[repr(C)]
  1926. #[derive(Copy, Clone)]
  1927. pub struct perf_event_attr {
  1928. pub type_: __u32,
  1929. pub size: __u32,
  1930. pub config: __u64,
  1931. pub __bindgen_anon_1: perf_event_attr__bindgen_ty_1,
  1932. pub sample_type: __u64,
  1933. pub read_format: __u64,
  1934. pub _bitfield_align_1: [u32; 0],
  1935. pub _bitfield_1: __BindgenBitfieldUnit<[u8; 8usize]>,
  1936. pub __bindgen_anon_2: perf_event_attr__bindgen_ty_2,
  1937. pub bp_type: __u32,
  1938. pub __bindgen_anon_3: perf_event_attr__bindgen_ty_3,
  1939. pub __bindgen_anon_4: perf_event_attr__bindgen_ty_4,
  1940. pub branch_sample_type: __u64,
  1941. pub sample_regs_user: __u64,
  1942. pub sample_stack_user: __u32,
  1943. pub clockid: __s32,
  1944. pub sample_regs_intr: __u64,
  1945. pub aux_watermark: __u32,
  1946. pub sample_max_stack: __u16,
  1947. pub __reserved_2: __u16,
  1948. pub aux_sample_size: __u32,
  1949. pub __reserved_3: __u32,
  1950. pub sig_data: __u64,
  1951. pub config3: __u64,
  1952. }
  1953. #[repr(C)]
  1954. #[derive(Copy, Clone)]
  1955. pub union perf_event_attr__bindgen_ty_1 {
  1956. pub sample_period: __u64,
  1957. pub sample_freq: __u64,
  1958. }
  1959. #[repr(C)]
  1960. #[derive(Copy, Clone)]
  1961. pub union perf_event_attr__bindgen_ty_2 {
  1962. pub wakeup_events: __u32,
  1963. pub wakeup_watermark: __u32,
  1964. }
  1965. #[repr(C)]
  1966. #[derive(Copy, Clone)]
  1967. pub union perf_event_attr__bindgen_ty_3 {
  1968. pub bp_addr: __u64,
  1969. pub kprobe_func: __u64,
  1970. pub uprobe_path: __u64,
  1971. pub config1: __u64,
  1972. }
  1973. #[repr(C)]
  1974. #[derive(Copy, Clone)]
  1975. pub union perf_event_attr__bindgen_ty_4 {
  1976. pub bp_len: __u64,
  1977. pub kprobe_addr: __u64,
  1978. pub probe_offset: __u64,
  1979. pub config2: __u64,
  1980. }
  1981. impl perf_event_attr {
  1982. #[inline]
  1983. pub fn disabled(&self) -> __u64 {
  1984. unsafe { ::core::mem::transmute(self._bitfield_1.get(0usize, 1u8) as u64) }
  1985. }
  1986. #[inline]
  1987. pub fn set_disabled(&mut self, val: __u64) {
  1988. unsafe {
  1989. let val: u64 = ::core::mem::transmute(val);
  1990. self._bitfield_1.set(0usize, 1u8, val as u64)
  1991. }
  1992. }
  1993. #[inline]
  1994. pub unsafe fn disabled_raw(this: *const Self) -> __u64 {
  1995. unsafe {
  1996. ::core::mem::transmute(<__BindgenBitfieldUnit<[u8; 8usize]>>::raw_get(
  1997. ::core::ptr::addr_of!((*this)._bitfield_1),
  1998. 0usize,
  1999. 1u8,
  2000. ) as u64)
  2001. }
  2002. }
  2003. #[inline]
  2004. pub unsafe fn set_disabled_raw(this: *mut Self, val: __u64) {
  2005. unsafe {
  2006. let val: u64 = ::core::mem::transmute(val);
  2007. <__BindgenBitfieldUnit<[u8; 8usize]>>::raw_set(
  2008. ::core::ptr::addr_of_mut!((*this)._bitfield_1),
  2009. 0usize,
  2010. 1u8,
  2011. val as u64,
  2012. )
  2013. }
  2014. }
  2015. #[inline]
  2016. pub fn inherit(&self) -> __u64 {
  2017. unsafe { ::core::mem::transmute(self._bitfield_1.get(1usize, 1u8) as u64) }
  2018. }
  2019. #[inline]
  2020. pub fn set_inherit(&mut self, val: __u64) {
  2021. unsafe {
  2022. let val: u64 = ::core::mem::transmute(val);
  2023. self._bitfield_1.set(1usize, 1u8, val as u64)
  2024. }
  2025. }
  2026. #[inline]
  2027. pub unsafe fn inherit_raw(this: *const Self) -> __u64 {
  2028. unsafe {
  2029. ::core::mem::transmute(<__BindgenBitfieldUnit<[u8; 8usize]>>::raw_get(
  2030. ::core::ptr::addr_of!((*this)._bitfield_1),
  2031. 1usize,
  2032. 1u8,
  2033. ) as u64)
  2034. }
  2035. }
  2036. #[inline]
  2037. pub unsafe fn set_inherit_raw(this: *mut Self, val: __u64) {
  2038. unsafe {
  2039. let val: u64 = ::core::mem::transmute(val);
  2040. <__BindgenBitfieldUnit<[u8; 8usize]>>::raw_set(
  2041. ::core::ptr::addr_of_mut!((*this)._bitfield_1),
  2042. 1usize,
  2043. 1u8,
  2044. val as u64,
  2045. )
  2046. }
  2047. }
  2048. #[inline]
  2049. pub fn pinned(&self) -> __u64 {
  2050. unsafe { ::core::mem::transmute(self._bitfield_1.get(2usize, 1u8) as u64) }
  2051. }
  2052. #[inline]
  2053. pub fn set_pinned(&mut self, val: __u64) {
  2054. unsafe {
  2055. let val: u64 = ::core::mem::transmute(val);
  2056. self._bitfield_1.set(2usize, 1u8, val as u64)
  2057. }
  2058. }
  2059. #[inline]
  2060. pub unsafe fn pinned_raw(this: *const Self) -> __u64 {
  2061. unsafe {
  2062. ::core::mem::transmute(<__BindgenBitfieldUnit<[u8; 8usize]>>::raw_get(
  2063. ::core::ptr::addr_of!((*this)._bitfield_1),
  2064. 2usize,
  2065. 1u8,
  2066. ) as u64)
  2067. }
  2068. }
  2069. #[inline]
  2070. pub unsafe fn set_pinned_raw(this: *mut Self, val: __u64) {
  2071. unsafe {
  2072. let val: u64 = ::core::mem::transmute(val);
  2073. <__BindgenBitfieldUnit<[u8; 8usize]>>::raw_set(
  2074. ::core::ptr::addr_of_mut!((*this)._bitfield_1),
  2075. 2usize,
  2076. 1u8,
  2077. val as u64,
  2078. )
  2079. }
  2080. }
  2081. #[inline]
  2082. pub fn exclusive(&self) -> __u64 {
  2083. unsafe { ::core::mem::transmute(self._bitfield_1.get(3usize, 1u8) as u64) }
  2084. }
  2085. #[inline]
  2086. pub fn set_exclusive(&mut self, val: __u64) {
  2087. unsafe {
  2088. let val: u64 = ::core::mem::transmute(val);
  2089. self._bitfield_1.set(3usize, 1u8, val as u64)
  2090. }
  2091. }
  2092. #[inline]
  2093. pub unsafe fn exclusive_raw(this: *const Self) -> __u64 {
  2094. unsafe {
  2095. ::core::mem::transmute(<__BindgenBitfieldUnit<[u8; 8usize]>>::raw_get(
  2096. ::core::ptr::addr_of!((*this)._bitfield_1),
  2097. 3usize,
  2098. 1u8,
  2099. ) as u64)
  2100. }
  2101. }
  2102. #[inline]
  2103. pub unsafe fn set_exclusive_raw(this: *mut Self, val: __u64) {
  2104. unsafe {
  2105. let val: u64 = ::core::mem::transmute(val);
  2106. <__BindgenBitfieldUnit<[u8; 8usize]>>::raw_set(
  2107. ::core::ptr::addr_of_mut!((*this)._bitfield_1),
  2108. 3usize,
  2109. 1u8,
  2110. val as u64,
  2111. )
  2112. }
  2113. }
  2114. #[inline]
  2115. pub fn exclude_user(&self) -> __u64 {
  2116. unsafe { ::core::mem::transmute(self._bitfield_1.get(4usize, 1u8) as u64) }
  2117. }
  2118. #[inline]
  2119. pub fn set_exclude_user(&mut self, val: __u64) {
  2120. unsafe {
  2121. let val: u64 = ::core::mem::transmute(val);
  2122. self._bitfield_1.set(4usize, 1u8, val as u64)
  2123. }
  2124. }
  2125. #[inline]
  2126. pub unsafe fn exclude_user_raw(this: *const Self) -> __u64 {
  2127. unsafe {
  2128. ::core::mem::transmute(<__BindgenBitfieldUnit<[u8; 8usize]>>::raw_get(
  2129. ::core::ptr::addr_of!((*this)._bitfield_1),
  2130. 4usize,
  2131. 1u8,
  2132. ) as u64)
  2133. }
  2134. }
  2135. #[inline]
  2136. pub unsafe fn set_exclude_user_raw(this: *mut Self, val: __u64) {
  2137. unsafe {
  2138. let val: u64 = ::core::mem::transmute(val);
  2139. <__BindgenBitfieldUnit<[u8; 8usize]>>::raw_set(
  2140. ::core::ptr::addr_of_mut!((*this)._bitfield_1),
  2141. 4usize,
  2142. 1u8,
  2143. val as u64,
  2144. )
  2145. }
  2146. }
  2147. #[inline]
  2148. pub fn exclude_kernel(&self) -> __u64 {
  2149. unsafe { ::core::mem::transmute(self._bitfield_1.get(5usize, 1u8) as u64) }
  2150. }
  2151. #[inline]
  2152. pub fn set_exclude_kernel(&mut self, val: __u64) {
  2153. unsafe {
  2154. let val: u64 = ::core::mem::transmute(val);
  2155. self._bitfield_1.set(5usize, 1u8, val as u64)
  2156. }
  2157. }
  2158. #[inline]
  2159. pub unsafe fn exclude_kernel_raw(this: *const Self) -> __u64 {
  2160. unsafe {
  2161. ::core::mem::transmute(<__BindgenBitfieldUnit<[u8; 8usize]>>::raw_get(
  2162. ::core::ptr::addr_of!((*this)._bitfield_1),
  2163. 5usize,
  2164. 1u8,
  2165. ) as u64)
  2166. }
  2167. }
  2168. #[inline]
  2169. pub unsafe fn set_exclude_kernel_raw(this: *mut Self, val: __u64) {
  2170. unsafe {
  2171. let val: u64 = ::core::mem::transmute(val);
  2172. <__BindgenBitfieldUnit<[u8; 8usize]>>::raw_set(
  2173. ::core::ptr::addr_of_mut!((*this)._bitfield_1),
  2174. 5usize,
  2175. 1u8,
  2176. val as u64,
  2177. )
  2178. }
  2179. }
  2180. #[inline]
  2181. pub fn exclude_hv(&self) -> __u64 {
  2182. unsafe { ::core::mem::transmute(self._bitfield_1.get(6usize, 1u8) as u64) }
  2183. }
  2184. #[inline]
  2185. pub fn set_exclude_hv(&mut self, val: __u64) {
  2186. unsafe {
  2187. let val: u64 = ::core::mem::transmute(val);
  2188. self._bitfield_1.set(6usize, 1u8, val as u64)
  2189. }
  2190. }
  2191. #[inline]
  2192. pub unsafe fn exclude_hv_raw(this: *const Self) -> __u64 {
  2193. unsafe {
  2194. ::core::mem::transmute(<__BindgenBitfieldUnit<[u8; 8usize]>>::raw_get(
  2195. ::core::ptr::addr_of!((*this)._bitfield_1),
  2196. 6usize,
  2197. 1u8,
  2198. ) as u64)
  2199. }
  2200. }
  2201. #[inline]
  2202. pub unsafe fn set_exclude_hv_raw(this: *mut Self, val: __u64) {
  2203. unsafe {
  2204. let val: u64 = ::core::mem::transmute(val);
  2205. <__BindgenBitfieldUnit<[u8; 8usize]>>::raw_set(
  2206. ::core::ptr::addr_of_mut!((*this)._bitfield_1),
  2207. 6usize,
  2208. 1u8,
  2209. val as u64,
  2210. )
  2211. }
  2212. }
  2213. #[inline]
  2214. pub fn exclude_idle(&self) -> __u64 {
  2215. unsafe { ::core::mem::transmute(self._bitfield_1.get(7usize, 1u8) as u64) }
  2216. }
  2217. #[inline]
  2218. pub fn set_exclude_idle(&mut self, val: __u64) {
  2219. unsafe {
  2220. let val: u64 = ::core::mem::transmute(val);
  2221. self._bitfield_1.set(7usize, 1u8, val as u64)
  2222. }
  2223. }
  2224. #[inline]
  2225. pub unsafe fn exclude_idle_raw(this: *const Self) -> __u64 {
  2226. unsafe {
  2227. ::core::mem::transmute(<__BindgenBitfieldUnit<[u8; 8usize]>>::raw_get(
  2228. ::core::ptr::addr_of!((*this)._bitfield_1),
  2229. 7usize,
  2230. 1u8,
  2231. ) as u64)
  2232. }
  2233. }
  2234. #[inline]
  2235. pub unsafe fn set_exclude_idle_raw(this: *mut Self, val: __u64) {
  2236. unsafe {
  2237. let val: u64 = ::core::mem::transmute(val);
  2238. <__BindgenBitfieldUnit<[u8; 8usize]>>::raw_set(
  2239. ::core::ptr::addr_of_mut!((*this)._bitfield_1),
  2240. 7usize,
  2241. 1u8,
  2242. val as u64,
  2243. )
  2244. }
  2245. }
  2246. #[inline]
  2247. pub fn mmap(&self) -> __u64 {
  2248. unsafe { ::core::mem::transmute(self._bitfield_1.get(8usize, 1u8) as u64) }
  2249. }
  2250. #[inline]
  2251. pub fn set_mmap(&mut self, val: __u64) {
  2252. unsafe {
  2253. let val: u64 = ::core::mem::transmute(val);
  2254. self._bitfield_1.set(8usize, 1u8, val as u64)
  2255. }
  2256. }
  2257. #[inline]
  2258. pub unsafe fn mmap_raw(this: *const Self) -> __u64 {
  2259. unsafe {
  2260. ::core::mem::transmute(<__BindgenBitfieldUnit<[u8; 8usize]>>::raw_get(
  2261. ::core::ptr::addr_of!((*this)._bitfield_1),
  2262. 8usize,
  2263. 1u8,
  2264. ) as u64)
  2265. }
  2266. }
  2267. #[inline]
  2268. pub unsafe fn set_mmap_raw(this: *mut Self, val: __u64) {
  2269. unsafe {
  2270. let val: u64 = ::core::mem::transmute(val);
  2271. <__BindgenBitfieldUnit<[u8; 8usize]>>::raw_set(
  2272. ::core::ptr::addr_of_mut!((*this)._bitfield_1),
  2273. 8usize,
  2274. 1u8,
  2275. val as u64,
  2276. )
  2277. }
  2278. }
  2279. #[inline]
  2280. pub fn comm(&self) -> __u64 {
  2281. unsafe { ::core::mem::transmute(self._bitfield_1.get(9usize, 1u8) as u64) }
  2282. }
  2283. #[inline]
  2284. pub fn set_comm(&mut self, val: __u64) {
  2285. unsafe {
  2286. let val: u64 = ::core::mem::transmute(val);
  2287. self._bitfield_1.set(9usize, 1u8, val as u64)
  2288. }
  2289. }
  2290. #[inline]
  2291. pub unsafe fn comm_raw(this: *const Self) -> __u64 {
  2292. unsafe {
  2293. ::core::mem::transmute(<__BindgenBitfieldUnit<[u8; 8usize]>>::raw_get(
  2294. ::core::ptr::addr_of!((*this)._bitfield_1),
  2295. 9usize,
  2296. 1u8,
  2297. ) as u64)
  2298. }
  2299. }
  2300. #[inline]
  2301. pub unsafe fn set_comm_raw(this: *mut Self, val: __u64) {
  2302. unsafe {
  2303. let val: u64 = ::core::mem::transmute(val);
  2304. <__BindgenBitfieldUnit<[u8; 8usize]>>::raw_set(
  2305. ::core::ptr::addr_of_mut!((*this)._bitfield_1),
  2306. 9usize,
  2307. 1u8,
  2308. val as u64,
  2309. )
  2310. }
  2311. }
  2312. #[inline]
  2313. pub fn freq(&self) -> __u64 {
  2314. unsafe { ::core::mem::transmute(self._bitfield_1.get(10usize, 1u8) as u64) }
  2315. }
  2316. #[inline]
  2317. pub fn set_freq(&mut self, val: __u64) {
  2318. unsafe {
  2319. let val: u64 = ::core::mem::transmute(val);
  2320. self._bitfield_1.set(10usize, 1u8, val as u64)
  2321. }
  2322. }
  2323. #[inline]
  2324. pub unsafe fn freq_raw(this: *const Self) -> __u64 {
  2325. unsafe {
  2326. ::core::mem::transmute(<__BindgenBitfieldUnit<[u8; 8usize]>>::raw_get(
  2327. ::core::ptr::addr_of!((*this)._bitfield_1),
  2328. 10usize,
  2329. 1u8,
  2330. ) as u64)
  2331. }
  2332. }
  2333. #[inline]
  2334. pub unsafe fn set_freq_raw(this: *mut Self, val: __u64) {
  2335. unsafe {
  2336. let val: u64 = ::core::mem::transmute(val);
  2337. <__BindgenBitfieldUnit<[u8; 8usize]>>::raw_set(
  2338. ::core::ptr::addr_of_mut!((*this)._bitfield_1),
  2339. 10usize,
  2340. 1u8,
  2341. val as u64,
  2342. )
  2343. }
  2344. }
  2345. #[inline]
  2346. pub fn inherit_stat(&self) -> __u64 {
  2347. unsafe { ::core::mem::transmute(self._bitfield_1.get(11usize, 1u8) as u64) }
  2348. }
  2349. #[inline]
  2350. pub fn set_inherit_stat(&mut self, val: __u64) {
  2351. unsafe {
  2352. let val: u64 = ::core::mem::transmute(val);
  2353. self._bitfield_1.set(11usize, 1u8, val as u64)
  2354. }
  2355. }
  2356. #[inline]
  2357. pub unsafe fn inherit_stat_raw(this: *const Self) -> __u64 {
  2358. unsafe {
  2359. ::core::mem::transmute(<__BindgenBitfieldUnit<[u8; 8usize]>>::raw_get(
  2360. ::core::ptr::addr_of!((*this)._bitfield_1),
  2361. 11usize,
  2362. 1u8,
  2363. ) as u64)
  2364. }
  2365. }
  2366. #[inline]
  2367. pub unsafe fn set_inherit_stat_raw(this: *mut Self, val: __u64) {
  2368. unsafe {
  2369. let val: u64 = ::core::mem::transmute(val);
  2370. <__BindgenBitfieldUnit<[u8; 8usize]>>::raw_set(
  2371. ::core::ptr::addr_of_mut!((*this)._bitfield_1),
  2372. 11usize,
  2373. 1u8,
  2374. val as u64,
  2375. )
  2376. }
  2377. }
  2378. #[inline]
  2379. pub fn enable_on_exec(&self) -> __u64 {
  2380. unsafe { ::core::mem::transmute(self._bitfield_1.get(12usize, 1u8) as u64) }
  2381. }
  2382. #[inline]
  2383. pub fn set_enable_on_exec(&mut self, val: __u64) {
  2384. unsafe {
  2385. let val: u64 = ::core::mem::transmute(val);
  2386. self._bitfield_1.set(12usize, 1u8, val as u64)
  2387. }
  2388. }
  2389. #[inline]
  2390. pub unsafe fn enable_on_exec_raw(this: *const Self) -> __u64 {
  2391. unsafe {
  2392. ::core::mem::transmute(<__BindgenBitfieldUnit<[u8; 8usize]>>::raw_get(
  2393. ::core::ptr::addr_of!((*this)._bitfield_1),
  2394. 12usize,
  2395. 1u8,
  2396. ) as u64)
  2397. }
  2398. }
  2399. #[inline]
  2400. pub unsafe fn set_enable_on_exec_raw(this: *mut Self, val: __u64) {
  2401. unsafe {
  2402. let val: u64 = ::core::mem::transmute(val);
  2403. <__BindgenBitfieldUnit<[u8; 8usize]>>::raw_set(
  2404. ::core::ptr::addr_of_mut!((*this)._bitfield_1),
  2405. 12usize,
  2406. 1u8,
  2407. val as u64,
  2408. )
  2409. }
  2410. }
  2411. #[inline]
  2412. pub fn task(&self) -> __u64 {
  2413. unsafe { ::core::mem::transmute(self._bitfield_1.get(13usize, 1u8) as u64) }
  2414. }
  2415. #[inline]
  2416. pub fn set_task(&mut self, val: __u64) {
  2417. unsafe {
  2418. let val: u64 = ::core::mem::transmute(val);
  2419. self._bitfield_1.set(13usize, 1u8, val as u64)
  2420. }
  2421. }
  2422. #[inline]
  2423. pub unsafe fn task_raw(this: *const Self) -> __u64 {
  2424. unsafe {
  2425. ::core::mem::transmute(<__BindgenBitfieldUnit<[u8; 8usize]>>::raw_get(
  2426. ::core::ptr::addr_of!((*this)._bitfield_1),
  2427. 13usize,
  2428. 1u8,
  2429. ) as u64)
  2430. }
  2431. }
  2432. #[inline]
  2433. pub unsafe fn set_task_raw(this: *mut Self, val: __u64) {
  2434. unsafe {
  2435. let val: u64 = ::core::mem::transmute(val);
  2436. <__BindgenBitfieldUnit<[u8; 8usize]>>::raw_set(
  2437. ::core::ptr::addr_of_mut!((*this)._bitfield_1),
  2438. 13usize,
  2439. 1u8,
  2440. val as u64,
  2441. )
  2442. }
  2443. }
  2444. #[inline]
  2445. pub fn watermark(&self) -> __u64 {
  2446. unsafe { ::core::mem::transmute(self._bitfield_1.get(14usize, 1u8) as u64) }
  2447. }
  2448. #[inline]
  2449. pub fn set_watermark(&mut self, val: __u64) {
  2450. unsafe {
  2451. let val: u64 = ::core::mem::transmute(val);
  2452. self._bitfield_1.set(14usize, 1u8, val as u64)
  2453. }
  2454. }
  2455. #[inline]
  2456. pub unsafe fn watermark_raw(this: *const Self) -> __u64 {
  2457. unsafe {
  2458. ::core::mem::transmute(<__BindgenBitfieldUnit<[u8; 8usize]>>::raw_get(
  2459. ::core::ptr::addr_of!((*this)._bitfield_1),
  2460. 14usize,
  2461. 1u8,
  2462. ) as u64)
  2463. }
  2464. }
  2465. #[inline]
  2466. pub unsafe fn set_watermark_raw(this: *mut Self, val: __u64) {
  2467. unsafe {
  2468. let val: u64 = ::core::mem::transmute(val);
  2469. <__BindgenBitfieldUnit<[u8; 8usize]>>::raw_set(
  2470. ::core::ptr::addr_of_mut!((*this)._bitfield_1),
  2471. 14usize,
  2472. 1u8,
  2473. val as u64,
  2474. )
  2475. }
  2476. }
  2477. #[inline]
  2478. pub fn precise_ip(&self) -> __u64 {
  2479. unsafe { ::core::mem::transmute(self._bitfield_1.get(15usize, 2u8) as u64) }
  2480. }
  2481. #[inline]
  2482. pub fn set_precise_ip(&mut self, val: __u64) {
  2483. unsafe {
  2484. let val: u64 = ::core::mem::transmute(val);
  2485. self._bitfield_1.set(15usize, 2u8, val as u64)
  2486. }
  2487. }
  2488. #[inline]
  2489. pub unsafe fn precise_ip_raw(this: *const Self) -> __u64 {
  2490. unsafe {
  2491. ::core::mem::transmute(<__BindgenBitfieldUnit<[u8; 8usize]>>::raw_get(
  2492. ::core::ptr::addr_of!((*this)._bitfield_1),
  2493. 15usize,
  2494. 2u8,
  2495. ) as u64)
  2496. }
  2497. }
  2498. #[inline]
  2499. pub unsafe fn set_precise_ip_raw(this: *mut Self, val: __u64) {
  2500. unsafe {
  2501. let val: u64 = ::core::mem::transmute(val);
  2502. <__BindgenBitfieldUnit<[u8; 8usize]>>::raw_set(
  2503. ::core::ptr::addr_of_mut!((*this)._bitfield_1),
  2504. 15usize,
  2505. 2u8,
  2506. val as u64,
  2507. )
  2508. }
  2509. }
  2510. #[inline]
  2511. pub fn mmap_data(&self) -> __u64 {
  2512. unsafe { ::core::mem::transmute(self._bitfield_1.get(17usize, 1u8) as u64) }
  2513. }
  2514. #[inline]
  2515. pub fn set_mmap_data(&mut self, val: __u64) {
  2516. unsafe {
  2517. let val: u64 = ::core::mem::transmute(val);
  2518. self._bitfield_1.set(17usize, 1u8, val as u64)
  2519. }
  2520. }
  2521. #[inline]
  2522. pub unsafe fn mmap_data_raw(this: *const Self) -> __u64 {
  2523. unsafe {
  2524. ::core::mem::transmute(<__BindgenBitfieldUnit<[u8; 8usize]>>::raw_get(
  2525. ::core::ptr::addr_of!((*this)._bitfield_1),
  2526. 17usize,
  2527. 1u8,
  2528. ) as u64)
  2529. }
  2530. }
  2531. #[inline]
  2532. pub unsafe fn set_mmap_data_raw(this: *mut Self, val: __u64) {
  2533. unsafe {
  2534. let val: u64 = ::core::mem::transmute(val);
  2535. <__BindgenBitfieldUnit<[u8; 8usize]>>::raw_set(
  2536. ::core::ptr::addr_of_mut!((*this)._bitfield_1),
  2537. 17usize,
  2538. 1u8,
  2539. val as u64,
  2540. )
  2541. }
  2542. }
  2543. #[inline]
  2544. pub fn sample_id_all(&self) -> __u64 {
  2545. unsafe { ::core::mem::transmute(self._bitfield_1.get(18usize, 1u8) as u64) }
  2546. }
  2547. #[inline]
  2548. pub fn set_sample_id_all(&mut self, val: __u64) {
  2549. unsafe {
  2550. let val: u64 = ::core::mem::transmute(val);
  2551. self._bitfield_1.set(18usize, 1u8, val as u64)
  2552. }
  2553. }
  2554. #[inline]
  2555. pub unsafe fn sample_id_all_raw(this: *const Self) -> __u64 {
  2556. unsafe {
  2557. ::core::mem::transmute(<__BindgenBitfieldUnit<[u8; 8usize]>>::raw_get(
  2558. ::core::ptr::addr_of!((*this)._bitfield_1),
  2559. 18usize,
  2560. 1u8,
  2561. ) as u64)
  2562. }
  2563. }
  2564. #[inline]
  2565. pub unsafe fn set_sample_id_all_raw(this: *mut Self, val: __u64) {
  2566. unsafe {
  2567. let val: u64 = ::core::mem::transmute(val);
  2568. <__BindgenBitfieldUnit<[u8; 8usize]>>::raw_set(
  2569. ::core::ptr::addr_of_mut!((*this)._bitfield_1),
  2570. 18usize,
  2571. 1u8,
  2572. val as u64,
  2573. )
  2574. }
  2575. }
  2576. #[inline]
  2577. pub fn exclude_host(&self) -> __u64 {
  2578. unsafe { ::core::mem::transmute(self._bitfield_1.get(19usize, 1u8) as u64) }
  2579. }
  2580. #[inline]
  2581. pub fn set_exclude_host(&mut self, val: __u64) {
  2582. unsafe {
  2583. let val: u64 = ::core::mem::transmute(val);
  2584. self._bitfield_1.set(19usize, 1u8, val as u64)
  2585. }
  2586. }
  2587. #[inline]
  2588. pub unsafe fn exclude_host_raw(this: *const Self) -> __u64 {
  2589. unsafe {
  2590. ::core::mem::transmute(<__BindgenBitfieldUnit<[u8; 8usize]>>::raw_get(
  2591. ::core::ptr::addr_of!((*this)._bitfield_1),
  2592. 19usize,
  2593. 1u8,
  2594. ) as u64)
  2595. }
  2596. }
  2597. #[inline]
  2598. pub unsafe fn set_exclude_host_raw(this: *mut Self, val: __u64) {
  2599. unsafe {
  2600. let val: u64 = ::core::mem::transmute(val);
  2601. <__BindgenBitfieldUnit<[u8; 8usize]>>::raw_set(
  2602. ::core::ptr::addr_of_mut!((*this)._bitfield_1),
  2603. 19usize,
  2604. 1u8,
  2605. val as u64,
  2606. )
  2607. }
  2608. }
  2609. #[inline]
  2610. pub fn exclude_guest(&self) -> __u64 {
  2611. unsafe { ::core::mem::transmute(self._bitfield_1.get(20usize, 1u8) as u64) }
  2612. }
  2613. #[inline]
  2614. pub fn set_exclude_guest(&mut self, val: __u64) {
  2615. unsafe {
  2616. let val: u64 = ::core::mem::transmute(val);
  2617. self._bitfield_1.set(20usize, 1u8, val as u64)
  2618. }
  2619. }
  2620. #[inline]
  2621. pub unsafe fn exclude_guest_raw(this: *const Self) -> __u64 {
  2622. unsafe {
  2623. ::core::mem::transmute(<__BindgenBitfieldUnit<[u8; 8usize]>>::raw_get(
  2624. ::core::ptr::addr_of!((*this)._bitfield_1),
  2625. 20usize,
  2626. 1u8,
  2627. ) as u64)
  2628. }
  2629. }
  2630. #[inline]
  2631. pub unsafe fn set_exclude_guest_raw(this: *mut Self, val: __u64) {
  2632. unsafe {
  2633. let val: u64 = ::core::mem::transmute(val);
  2634. <__BindgenBitfieldUnit<[u8; 8usize]>>::raw_set(
  2635. ::core::ptr::addr_of_mut!((*this)._bitfield_1),
  2636. 20usize,
  2637. 1u8,
  2638. val as u64,
  2639. )
  2640. }
  2641. }
  2642. #[inline]
  2643. pub fn exclude_callchain_kernel(&self) -> __u64 {
  2644. unsafe { ::core::mem::transmute(self._bitfield_1.get(21usize, 1u8) as u64) }
  2645. }
  2646. #[inline]
  2647. pub fn set_exclude_callchain_kernel(&mut self, val: __u64) {
  2648. unsafe {
  2649. let val: u64 = ::core::mem::transmute(val);
  2650. self._bitfield_1.set(21usize, 1u8, val as u64)
  2651. }
  2652. }
  2653. #[inline]
  2654. pub unsafe fn exclude_callchain_kernel_raw(this: *const Self) -> __u64 {
  2655. unsafe {
  2656. ::core::mem::transmute(<__BindgenBitfieldUnit<[u8; 8usize]>>::raw_get(
  2657. ::core::ptr::addr_of!((*this)._bitfield_1),
  2658. 21usize,
  2659. 1u8,
  2660. ) as u64)
  2661. }
  2662. }
  2663. #[inline]
  2664. pub unsafe fn set_exclude_callchain_kernel_raw(this: *mut Self, val: __u64) {
  2665. unsafe {
  2666. let val: u64 = ::core::mem::transmute(val);
  2667. <__BindgenBitfieldUnit<[u8; 8usize]>>::raw_set(
  2668. ::core::ptr::addr_of_mut!((*this)._bitfield_1),
  2669. 21usize,
  2670. 1u8,
  2671. val as u64,
  2672. )
  2673. }
  2674. }
  2675. #[inline]
  2676. pub fn exclude_callchain_user(&self) -> __u64 {
  2677. unsafe { ::core::mem::transmute(self._bitfield_1.get(22usize, 1u8) as u64) }
  2678. }
  2679. #[inline]
  2680. pub fn set_exclude_callchain_user(&mut self, val: __u64) {
  2681. unsafe {
  2682. let val: u64 = ::core::mem::transmute(val);
  2683. self._bitfield_1.set(22usize, 1u8, val as u64)
  2684. }
  2685. }
  2686. #[inline]
  2687. pub unsafe fn exclude_callchain_user_raw(this: *const Self) -> __u64 {
  2688. unsafe {
  2689. ::core::mem::transmute(<__BindgenBitfieldUnit<[u8; 8usize]>>::raw_get(
  2690. ::core::ptr::addr_of!((*this)._bitfield_1),
  2691. 22usize,
  2692. 1u8,
  2693. ) as u64)
  2694. }
  2695. }
  2696. #[inline]
  2697. pub unsafe fn set_exclude_callchain_user_raw(this: *mut Self, val: __u64) {
  2698. unsafe {
  2699. let val: u64 = ::core::mem::transmute(val);
  2700. <__BindgenBitfieldUnit<[u8; 8usize]>>::raw_set(
  2701. ::core::ptr::addr_of_mut!((*this)._bitfield_1),
  2702. 22usize,
  2703. 1u8,
  2704. val as u64,
  2705. )
  2706. }
  2707. }
  2708. #[inline]
  2709. pub fn mmap2(&self) -> __u64 {
  2710. unsafe { ::core::mem::transmute(self._bitfield_1.get(23usize, 1u8) as u64) }
  2711. }
  2712. #[inline]
  2713. pub fn set_mmap2(&mut self, val: __u64) {
  2714. unsafe {
  2715. let val: u64 = ::core::mem::transmute(val);
  2716. self._bitfield_1.set(23usize, 1u8, val as u64)
  2717. }
  2718. }
  2719. #[inline]
  2720. pub unsafe fn mmap2_raw(this: *const Self) -> __u64 {
  2721. unsafe {
  2722. ::core::mem::transmute(<__BindgenBitfieldUnit<[u8; 8usize]>>::raw_get(
  2723. ::core::ptr::addr_of!((*this)._bitfield_1),
  2724. 23usize,
  2725. 1u8,
  2726. ) as u64)
  2727. }
  2728. }
  2729. #[inline]
  2730. pub unsafe fn set_mmap2_raw(this: *mut Self, val: __u64) {
  2731. unsafe {
  2732. let val: u64 = ::core::mem::transmute(val);
  2733. <__BindgenBitfieldUnit<[u8; 8usize]>>::raw_set(
  2734. ::core::ptr::addr_of_mut!((*this)._bitfield_1),
  2735. 23usize,
  2736. 1u8,
  2737. val as u64,
  2738. )
  2739. }
  2740. }
  2741. #[inline]
  2742. pub fn comm_exec(&self) -> __u64 {
  2743. unsafe { ::core::mem::transmute(self._bitfield_1.get(24usize, 1u8) as u64) }
  2744. }
  2745. #[inline]
  2746. pub fn set_comm_exec(&mut self, val: __u64) {
  2747. unsafe {
  2748. let val: u64 = ::core::mem::transmute(val);
  2749. self._bitfield_1.set(24usize, 1u8, val as u64)
  2750. }
  2751. }
  2752. #[inline]
  2753. pub unsafe fn comm_exec_raw(this: *const Self) -> __u64 {
  2754. unsafe {
  2755. ::core::mem::transmute(<__BindgenBitfieldUnit<[u8; 8usize]>>::raw_get(
  2756. ::core::ptr::addr_of!((*this)._bitfield_1),
  2757. 24usize,
  2758. 1u8,
  2759. ) as u64)
  2760. }
  2761. }
  2762. #[inline]
  2763. pub unsafe fn set_comm_exec_raw(this: *mut Self, val: __u64) {
  2764. unsafe {
  2765. let val: u64 = ::core::mem::transmute(val);
  2766. <__BindgenBitfieldUnit<[u8; 8usize]>>::raw_set(
  2767. ::core::ptr::addr_of_mut!((*this)._bitfield_1),
  2768. 24usize,
  2769. 1u8,
  2770. val as u64,
  2771. )
  2772. }
  2773. }
  2774. #[inline]
  2775. pub fn use_clockid(&self) -> __u64 {
  2776. unsafe { ::core::mem::transmute(self._bitfield_1.get(25usize, 1u8) as u64) }
  2777. }
  2778. #[inline]
  2779. pub fn set_use_clockid(&mut self, val: __u64) {
  2780. unsafe {
  2781. let val: u64 = ::core::mem::transmute(val);
  2782. self._bitfield_1.set(25usize, 1u8, val as u64)
  2783. }
  2784. }
  2785. #[inline]
  2786. pub unsafe fn use_clockid_raw(this: *const Self) -> __u64 {
  2787. unsafe {
  2788. ::core::mem::transmute(<__BindgenBitfieldUnit<[u8; 8usize]>>::raw_get(
  2789. ::core::ptr::addr_of!((*this)._bitfield_1),
  2790. 25usize,
  2791. 1u8,
  2792. ) as u64)
  2793. }
  2794. }
  2795. #[inline]
  2796. pub unsafe fn set_use_clockid_raw(this: *mut Self, val: __u64) {
  2797. unsafe {
  2798. let val: u64 = ::core::mem::transmute(val);
  2799. <__BindgenBitfieldUnit<[u8; 8usize]>>::raw_set(
  2800. ::core::ptr::addr_of_mut!((*this)._bitfield_1),
  2801. 25usize,
  2802. 1u8,
  2803. val as u64,
  2804. )
  2805. }
  2806. }
  2807. #[inline]
  2808. pub fn context_switch(&self) -> __u64 {
  2809. unsafe { ::core::mem::transmute(self._bitfield_1.get(26usize, 1u8) as u64) }
  2810. }
  2811. #[inline]
  2812. pub fn set_context_switch(&mut self, val: __u64) {
  2813. unsafe {
  2814. let val: u64 = ::core::mem::transmute(val);
  2815. self._bitfield_1.set(26usize, 1u8, val as u64)
  2816. }
  2817. }
  2818. #[inline]
  2819. pub unsafe fn context_switch_raw(this: *const Self) -> __u64 {
  2820. unsafe {
  2821. ::core::mem::transmute(<__BindgenBitfieldUnit<[u8; 8usize]>>::raw_get(
  2822. ::core::ptr::addr_of!((*this)._bitfield_1),
  2823. 26usize,
  2824. 1u8,
  2825. ) as u64)
  2826. }
  2827. }
  2828. #[inline]
  2829. pub unsafe fn set_context_switch_raw(this: *mut Self, val: __u64) {
  2830. unsafe {
  2831. let val: u64 = ::core::mem::transmute(val);
  2832. <__BindgenBitfieldUnit<[u8; 8usize]>>::raw_set(
  2833. ::core::ptr::addr_of_mut!((*this)._bitfield_1),
  2834. 26usize,
  2835. 1u8,
  2836. val as u64,
  2837. )
  2838. }
  2839. }
  2840. #[inline]
  2841. pub fn write_backward(&self) -> __u64 {
  2842. unsafe { ::core::mem::transmute(self._bitfield_1.get(27usize, 1u8) as u64) }
  2843. }
  2844. #[inline]
  2845. pub fn set_write_backward(&mut self, val: __u64) {
  2846. unsafe {
  2847. let val: u64 = ::core::mem::transmute(val);
  2848. self._bitfield_1.set(27usize, 1u8, val as u64)
  2849. }
  2850. }
  2851. #[inline]
  2852. pub unsafe fn write_backward_raw(this: *const Self) -> __u64 {
  2853. unsafe {
  2854. ::core::mem::transmute(<__BindgenBitfieldUnit<[u8; 8usize]>>::raw_get(
  2855. ::core::ptr::addr_of!((*this)._bitfield_1),
  2856. 27usize,
  2857. 1u8,
  2858. ) as u64)
  2859. }
  2860. }
  2861. #[inline]
  2862. pub unsafe fn set_write_backward_raw(this: *mut Self, val: __u64) {
  2863. unsafe {
  2864. let val: u64 = ::core::mem::transmute(val);
  2865. <__BindgenBitfieldUnit<[u8; 8usize]>>::raw_set(
  2866. ::core::ptr::addr_of_mut!((*this)._bitfield_1),
  2867. 27usize,
  2868. 1u8,
  2869. val as u64,
  2870. )
  2871. }
  2872. }
  2873. #[inline]
  2874. pub fn namespaces(&self) -> __u64 {
  2875. unsafe { ::core::mem::transmute(self._bitfield_1.get(28usize, 1u8) as u64) }
  2876. }
  2877. #[inline]
  2878. pub fn set_namespaces(&mut self, val: __u64) {
  2879. unsafe {
  2880. let val: u64 = ::core::mem::transmute(val);
  2881. self._bitfield_1.set(28usize, 1u8, val as u64)
  2882. }
  2883. }
  2884. #[inline]
  2885. pub unsafe fn namespaces_raw(this: *const Self) -> __u64 {
  2886. unsafe {
  2887. ::core::mem::transmute(<__BindgenBitfieldUnit<[u8; 8usize]>>::raw_get(
  2888. ::core::ptr::addr_of!((*this)._bitfield_1),
  2889. 28usize,
  2890. 1u8,
  2891. ) as u64)
  2892. }
  2893. }
  2894. #[inline]
  2895. pub unsafe fn set_namespaces_raw(this: *mut Self, val: __u64) {
  2896. unsafe {
  2897. let val: u64 = ::core::mem::transmute(val);
  2898. <__BindgenBitfieldUnit<[u8; 8usize]>>::raw_set(
  2899. ::core::ptr::addr_of_mut!((*this)._bitfield_1),
  2900. 28usize,
  2901. 1u8,
  2902. val as u64,
  2903. )
  2904. }
  2905. }
  2906. #[inline]
  2907. pub fn ksymbol(&self) -> __u64 {
  2908. unsafe { ::core::mem::transmute(self._bitfield_1.get(29usize, 1u8) as u64) }
  2909. }
  2910. #[inline]
  2911. pub fn set_ksymbol(&mut self, val: __u64) {
  2912. unsafe {
  2913. let val: u64 = ::core::mem::transmute(val);
  2914. self._bitfield_1.set(29usize, 1u8, val as u64)
  2915. }
  2916. }
  2917. #[inline]
  2918. pub unsafe fn ksymbol_raw(this: *const Self) -> __u64 {
  2919. unsafe {
  2920. ::core::mem::transmute(<__BindgenBitfieldUnit<[u8; 8usize]>>::raw_get(
  2921. ::core::ptr::addr_of!((*this)._bitfield_1),
  2922. 29usize,
  2923. 1u8,
  2924. ) as u64)
  2925. }
  2926. }
  2927. #[inline]
  2928. pub unsafe fn set_ksymbol_raw(this: *mut Self, val: __u64) {
  2929. unsafe {
  2930. let val: u64 = ::core::mem::transmute(val);
  2931. <__BindgenBitfieldUnit<[u8; 8usize]>>::raw_set(
  2932. ::core::ptr::addr_of_mut!((*this)._bitfield_1),
  2933. 29usize,
  2934. 1u8,
  2935. val as u64,
  2936. )
  2937. }
  2938. }
  2939. #[inline]
  2940. pub fn bpf_event(&self) -> __u64 {
  2941. unsafe { ::core::mem::transmute(self._bitfield_1.get(30usize, 1u8) as u64) }
  2942. }
  2943. #[inline]
  2944. pub fn set_bpf_event(&mut self, val: __u64) {
  2945. unsafe {
  2946. let val: u64 = ::core::mem::transmute(val);
  2947. self._bitfield_1.set(30usize, 1u8, val as u64)
  2948. }
  2949. }
  2950. #[inline]
  2951. pub unsafe fn bpf_event_raw(this: *const Self) -> __u64 {
  2952. unsafe {
  2953. ::core::mem::transmute(<__BindgenBitfieldUnit<[u8; 8usize]>>::raw_get(
  2954. ::core::ptr::addr_of!((*this)._bitfield_1),
  2955. 30usize,
  2956. 1u8,
  2957. ) as u64)
  2958. }
  2959. }
  2960. #[inline]
  2961. pub unsafe fn set_bpf_event_raw(this: *mut Self, val: __u64) {
  2962. unsafe {
  2963. let val: u64 = ::core::mem::transmute(val);
  2964. <__BindgenBitfieldUnit<[u8; 8usize]>>::raw_set(
  2965. ::core::ptr::addr_of_mut!((*this)._bitfield_1),
  2966. 30usize,
  2967. 1u8,
  2968. val as u64,
  2969. )
  2970. }
  2971. }
  2972. #[inline]
  2973. pub fn aux_output(&self) -> __u64 {
  2974. unsafe { ::core::mem::transmute(self._bitfield_1.get(31usize, 1u8) as u64) }
  2975. }
  2976. #[inline]
  2977. pub fn set_aux_output(&mut self, val: __u64) {
  2978. unsafe {
  2979. let val: u64 = ::core::mem::transmute(val);
  2980. self._bitfield_1.set(31usize, 1u8, val as u64)
  2981. }
  2982. }
  2983. #[inline]
  2984. pub unsafe fn aux_output_raw(this: *const Self) -> __u64 {
  2985. unsafe {
  2986. ::core::mem::transmute(<__BindgenBitfieldUnit<[u8; 8usize]>>::raw_get(
  2987. ::core::ptr::addr_of!((*this)._bitfield_1),
  2988. 31usize,
  2989. 1u8,
  2990. ) as u64)
  2991. }
  2992. }
  2993. #[inline]
  2994. pub unsafe fn set_aux_output_raw(this: *mut Self, val: __u64) {
  2995. unsafe {
  2996. let val: u64 = ::core::mem::transmute(val);
  2997. <__BindgenBitfieldUnit<[u8; 8usize]>>::raw_set(
  2998. ::core::ptr::addr_of_mut!((*this)._bitfield_1),
  2999. 31usize,
  3000. 1u8,
  3001. val as u64,
  3002. )
  3003. }
  3004. }
  3005. #[inline]
  3006. pub fn cgroup(&self) -> __u64 {
  3007. unsafe { ::core::mem::transmute(self._bitfield_1.get(32usize, 1u8) as u64) }
  3008. }
  3009. #[inline]
  3010. pub fn set_cgroup(&mut self, val: __u64) {
  3011. unsafe {
  3012. let val: u64 = ::core::mem::transmute(val);
  3013. self._bitfield_1.set(32usize, 1u8, val as u64)
  3014. }
  3015. }
  3016. #[inline]
  3017. pub unsafe fn cgroup_raw(this: *const Self) -> __u64 {
  3018. unsafe {
  3019. ::core::mem::transmute(<__BindgenBitfieldUnit<[u8; 8usize]>>::raw_get(
  3020. ::core::ptr::addr_of!((*this)._bitfield_1),
  3021. 32usize,
  3022. 1u8,
  3023. ) as u64)
  3024. }
  3025. }
  3026. #[inline]
  3027. pub unsafe fn set_cgroup_raw(this: *mut Self, val: __u64) {
  3028. unsafe {
  3029. let val: u64 = ::core::mem::transmute(val);
  3030. <__BindgenBitfieldUnit<[u8; 8usize]>>::raw_set(
  3031. ::core::ptr::addr_of_mut!((*this)._bitfield_1),
  3032. 32usize,
  3033. 1u8,
  3034. val as u64,
  3035. )
  3036. }
  3037. }
  3038. #[inline]
  3039. pub fn text_poke(&self) -> __u64 {
  3040. unsafe { ::core::mem::transmute(self._bitfield_1.get(33usize, 1u8) as u64) }
  3041. }
  3042. #[inline]
  3043. pub fn set_text_poke(&mut self, val: __u64) {
  3044. unsafe {
  3045. let val: u64 = ::core::mem::transmute(val);
  3046. self._bitfield_1.set(33usize, 1u8, val as u64)
  3047. }
  3048. }
  3049. #[inline]
  3050. pub unsafe fn text_poke_raw(this: *const Self) -> __u64 {
  3051. unsafe {
  3052. ::core::mem::transmute(<__BindgenBitfieldUnit<[u8; 8usize]>>::raw_get(
  3053. ::core::ptr::addr_of!((*this)._bitfield_1),
  3054. 33usize,
  3055. 1u8,
  3056. ) as u64)
  3057. }
  3058. }
  3059. #[inline]
  3060. pub unsafe fn set_text_poke_raw(this: *mut Self, val: __u64) {
  3061. unsafe {
  3062. let val: u64 = ::core::mem::transmute(val);
  3063. <__BindgenBitfieldUnit<[u8; 8usize]>>::raw_set(
  3064. ::core::ptr::addr_of_mut!((*this)._bitfield_1),
  3065. 33usize,
  3066. 1u8,
  3067. val as u64,
  3068. )
  3069. }
  3070. }
  3071. #[inline]
  3072. pub fn build_id(&self) -> __u64 {
  3073. unsafe { ::core::mem::transmute(self._bitfield_1.get(34usize, 1u8) as u64) }
  3074. }
  3075. #[inline]
  3076. pub fn set_build_id(&mut self, val: __u64) {
  3077. unsafe {
  3078. let val: u64 = ::core::mem::transmute(val);
  3079. self._bitfield_1.set(34usize, 1u8, val as u64)
  3080. }
  3081. }
  3082. #[inline]
  3083. pub unsafe fn build_id_raw(this: *const Self) -> __u64 {
  3084. unsafe {
  3085. ::core::mem::transmute(<__BindgenBitfieldUnit<[u8; 8usize]>>::raw_get(
  3086. ::core::ptr::addr_of!((*this)._bitfield_1),
  3087. 34usize,
  3088. 1u8,
  3089. ) as u64)
  3090. }
  3091. }
  3092. #[inline]
  3093. pub unsafe fn set_build_id_raw(this: *mut Self, val: __u64) {
  3094. unsafe {
  3095. let val: u64 = ::core::mem::transmute(val);
  3096. <__BindgenBitfieldUnit<[u8; 8usize]>>::raw_set(
  3097. ::core::ptr::addr_of_mut!((*this)._bitfield_1),
  3098. 34usize,
  3099. 1u8,
  3100. val as u64,
  3101. )
  3102. }
  3103. }
  3104. #[inline]
  3105. pub fn inherit_thread(&self) -> __u64 {
  3106. unsafe { ::core::mem::transmute(self._bitfield_1.get(35usize, 1u8) as u64) }
  3107. }
  3108. #[inline]
  3109. pub fn set_inherit_thread(&mut self, val: __u64) {
  3110. unsafe {
  3111. let val: u64 = ::core::mem::transmute(val);
  3112. self._bitfield_1.set(35usize, 1u8, val as u64)
  3113. }
  3114. }
  3115. #[inline]
  3116. pub unsafe fn inherit_thread_raw(this: *const Self) -> __u64 {
  3117. unsafe {
  3118. ::core::mem::transmute(<__BindgenBitfieldUnit<[u8; 8usize]>>::raw_get(
  3119. ::core::ptr::addr_of!((*this)._bitfield_1),
  3120. 35usize,
  3121. 1u8,
  3122. ) as u64)
  3123. }
  3124. }
  3125. #[inline]
  3126. pub unsafe fn set_inherit_thread_raw(this: *mut Self, val: __u64) {
  3127. unsafe {
  3128. let val: u64 = ::core::mem::transmute(val);
  3129. <__BindgenBitfieldUnit<[u8; 8usize]>>::raw_set(
  3130. ::core::ptr::addr_of_mut!((*this)._bitfield_1),
  3131. 35usize,
  3132. 1u8,
  3133. val as u64,
  3134. )
  3135. }
  3136. }
  3137. #[inline]
  3138. pub fn remove_on_exec(&self) -> __u64 {
  3139. unsafe { ::core::mem::transmute(self._bitfield_1.get(36usize, 1u8) as u64) }
  3140. }
  3141. #[inline]
  3142. pub fn set_remove_on_exec(&mut self, val: __u64) {
  3143. unsafe {
  3144. let val: u64 = ::core::mem::transmute(val);
  3145. self._bitfield_1.set(36usize, 1u8, val as u64)
  3146. }
  3147. }
  3148. #[inline]
  3149. pub unsafe fn remove_on_exec_raw(this: *const Self) -> __u64 {
  3150. unsafe {
  3151. ::core::mem::transmute(<__BindgenBitfieldUnit<[u8; 8usize]>>::raw_get(
  3152. ::core::ptr::addr_of!((*this)._bitfield_1),
  3153. 36usize,
  3154. 1u8,
  3155. ) as u64)
  3156. }
  3157. }
  3158. #[inline]
  3159. pub unsafe fn set_remove_on_exec_raw(this: *mut Self, val: __u64) {
  3160. unsafe {
  3161. let val: u64 = ::core::mem::transmute(val);
  3162. <__BindgenBitfieldUnit<[u8; 8usize]>>::raw_set(
  3163. ::core::ptr::addr_of_mut!((*this)._bitfield_1),
  3164. 36usize,
  3165. 1u8,
  3166. val as u64,
  3167. )
  3168. }
  3169. }
  3170. #[inline]
  3171. pub fn sigtrap(&self) -> __u64 {
  3172. unsafe { ::core::mem::transmute(self._bitfield_1.get(37usize, 1u8) as u64) }
  3173. }
  3174. #[inline]
  3175. pub fn set_sigtrap(&mut self, val: __u64) {
  3176. unsafe {
  3177. let val: u64 = ::core::mem::transmute(val);
  3178. self._bitfield_1.set(37usize, 1u8, val as u64)
  3179. }
  3180. }
  3181. #[inline]
  3182. pub unsafe fn sigtrap_raw(this: *const Self) -> __u64 {
  3183. unsafe {
  3184. ::core::mem::transmute(<__BindgenBitfieldUnit<[u8; 8usize]>>::raw_get(
  3185. ::core::ptr::addr_of!((*this)._bitfield_1),
  3186. 37usize,
  3187. 1u8,
  3188. ) as u64)
  3189. }
  3190. }
  3191. #[inline]
  3192. pub unsafe fn set_sigtrap_raw(this: *mut Self, val: __u64) {
  3193. unsafe {
  3194. let val: u64 = ::core::mem::transmute(val);
  3195. <__BindgenBitfieldUnit<[u8; 8usize]>>::raw_set(
  3196. ::core::ptr::addr_of_mut!((*this)._bitfield_1),
  3197. 37usize,
  3198. 1u8,
  3199. val as u64,
  3200. )
  3201. }
  3202. }
  3203. #[inline]
  3204. pub fn __reserved_1(&self) -> __u64 {
  3205. unsafe { ::core::mem::transmute(self._bitfield_1.get(38usize, 26u8) as u64) }
  3206. }
  3207. #[inline]
  3208. pub fn set___reserved_1(&mut self, val: __u64) {
  3209. unsafe {
  3210. let val: u64 = ::core::mem::transmute(val);
  3211. self._bitfield_1.set(38usize, 26u8, val as u64)
  3212. }
  3213. }
  3214. #[inline]
  3215. pub unsafe fn __reserved_1_raw(this: *const Self) -> __u64 {
  3216. unsafe {
  3217. ::core::mem::transmute(<__BindgenBitfieldUnit<[u8; 8usize]>>::raw_get(
  3218. ::core::ptr::addr_of!((*this)._bitfield_1),
  3219. 38usize,
  3220. 26u8,
  3221. ) as u64)
  3222. }
  3223. }
  3224. #[inline]
  3225. pub unsafe fn set___reserved_1_raw(this: *mut Self, val: __u64) {
  3226. unsafe {
  3227. let val: u64 = ::core::mem::transmute(val);
  3228. <__BindgenBitfieldUnit<[u8; 8usize]>>::raw_set(
  3229. ::core::ptr::addr_of_mut!((*this)._bitfield_1),
  3230. 38usize,
  3231. 26u8,
  3232. val as u64,
  3233. )
  3234. }
  3235. }
  3236. #[inline]
  3237. pub fn new_bitfield_1(
  3238. disabled: __u64,
  3239. inherit: __u64,
  3240. pinned: __u64,
  3241. exclusive: __u64,
  3242. exclude_user: __u64,
  3243. exclude_kernel: __u64,
  3244. exclude_hv: __u64,
  3245. exclude_idle: __u64,
  3246. mmap: __u64,
  3247. comm: __u64,
  3248. freq: __u64,
  3249. inherit_stat: __u64,
  3250. enable_on_exec: __u64,
  3251. task: __u64,
  3252. watermark: __u64,
  3253. precise_ip: __u64,
  3254. mmap_data: __u64,
  3255. sample_id_all: __u64,
  3256. exclude_host: __u64,
  3257. exclude_guest: __u64,
  3258. exclude_callchain_kernel: __u64,
  3259. exclude_callchain_user: __u64,
  3260. mmap2: __u64,
  3261. comm_exec: __u64,
  3262. use_clockid: __u64,
  3263. context_switch: __u64,
  3264. write_backward: __u64,
  3265. namespaces: __u64,
  3266. ksymbol: __u64,
  3267. bpf_event: __u64,
  3268. aux_output: __u64,
  3269. cgroup: __u64,
  3270. text_poke: __u64,
  3271. build_id: __u64,
  3272. inherit_thread: __u64,
  3273. remove_on_exec: __u64,
  3274. sigtrap: __u64,
  3275. __reserved_1: __u64,
  3276. ) -> __BindgenBitfieldUnit<[u8; 8usize]> {
  3277. let mut __bindgen_bitfield_unit: __BindgenBitfieldUnit<[u8; 8usize]> = Default::default();
  3278. __bindgen_bitfield_unit.set(0usize, 1u8, {
  3279. let disabled: u64 = unsafe { ::core::mem::transmute(disabled) };
  3280. disabled as u64
  3281. });
  3282. __bindgen_bitfield_unit.set(1usize, 1u8, {
  3283. let inherit: u64 = unsafe { ::core::mem::transmute(inherit) };
  3284. inherit as u64
  3285. });
  3286. __bindgen_bitfield_unit.set(2usize, 1u8, {
  3287. let pinned: u64 = unsafe { ::core::mem::transmute(pinned) };
  3288. pinned as u64
  3289. });
  3290. __bindgen_bitfield_unit.set(3usize, 1u8, {
  3291. let exclusive: u64 = unsafe { ::core::mem::transmute(exclusive) };
  3292. exclusive as u64
  3293. });
  3294. __bindgen_bitfield_unit.set(4usize, 1u8, {
  3295. let exclude_user: u64 = unsafe { ::core::mem::transmute(exclude_user) };
  3296. exclude_user as u64
  3297. });
  3298. __bindgen_bitfield_unit.set(5usize, 1u8, {
  3299. let exclude_kernel: u64 = unsafe { ::core::mem::transmute(exclude_kernel) };
  3300. exclude_kernel as u64
  3301. });
  3302. __bindgen_bitfield_unit.set(6usize, 1u8, {
  3303. let exclude_hv: u64 = unsafe { ::core::mem::transmute(exclude_hv) };
  3304. exclude_hv as u64
  3305. });
  3306. __bindgen_bitfield_unit.set(7usize, 1u8, {
  3307. let exclude_idle: u64 = unsafe { ::core::mem::transmute(exclude_idle) };
  3308. exclude_idle as u64
  3309. });
  3310. __bindgen_bitfield_unit.set(8usize, 1u8, {
  3311. let mmap: u64 = unsafe { ::core::mem::transmute(mmap) };
  3312. mmap as u64
  3313. });
  3314. __bindgen_bitfield_unit.set(9usize, 1u8, {
  3315. let comm: u64 = unsafe { ::core::mem::transmute(comm) };
  3316. comm as u64
  3317. });
  3318. __bindgen_bitfield_unit.set(10usize, 1u8, {
  3319. let freq: u64 = unsafe { ::core::mem::transmute(freq) };
  3320. freq as u64
  3321. });
  3322. __bindgen_bitfield_unit.set(11usize, 1u8, {
  3323. let inherit_stat: u64 = unsafe { ::core::mem::transmute(inherit_stat) };
  3324. inherit_stat as u64
  3325. });
  3326. __bindgen_bitfield_unit.set(12usize, 1u8, {
  3327. let enable_on_exec: u64 = unsafe { ::core::mem::transmute(enable_on_exec) };
  3328. enable_on_exec as u64
  3329. });
  3330. __bindgen_bitfield_unit.set(13usize, 1u8, {
  3331. let task: u64 = unsafe { ::core::mem::transmute(task) };
  3332. task as u64
  3333. });
  3334. __bindgen_bitfield_unit.set(14usize, 1u8, {
  3335. let watermark: u64 = unsafe { ::core::mem::transmute(watermark) };
  3336. watermark as u64
  3337. });
  3338. __bindgen_bitfield_unit.set(15usize, 2u8, {
  3339. let precise_ip: u64 = unsafe { ::core::mem::transmute(precise_ip) };
  3340. precise_ip as u64
  3341. });
  3342. __bindgen_bitfield_unit.set(17usize, 1u8, {
  3343. let mmap_data: u64 = unsafe { ::core::mem::transmute(mmap_data) };
  3344. mmap_data as u64
  3345. });
  3346. __bindgen_bitfield_unit.set(18usize, 1u8, {
  3347. let sample_id_all: u64 = unsafe { ::core::mem::transmute(sample_id_all) };
  3348. sample_id_all as u64
  3349. });
  3350. __bindgen_bitfield_unit.set(19usize, 1u8, {
  3351. let exclude_host: u64 = unsafe { ::core::mem::transmute(exclude_host) };
  3352. exclude_host as u64
  3353. });
  3354. __bindgen_bitfield_unit.set(20usize, 1u8, {
  3355. let exclude_guest: u64 = unsafe { ::core::mem::transmute(exclude_guest) };
  3356. exclude_guest as u64
  3357. });
  3358. __bindgen_bitfield_unit.set(21usize, 1u8, {
  3359. let exclude_callchain_kernel: u64 =
  3360. unsafe { ::core::mem::transmute(exclude_callchain_kernel) };
  3361. exclude_callchain_kernel as u64
  3362. });
  3363. __bindgen_bitfield_unit.set(22usize, 1u8, {
  3364. let exclude_callchain_user: u64 =
  3365. unsafe { ::core::mem::transmute(exclude_callchain_user) };
  3366. exclude_callchain_user as u64
  3367. });
  3368. __bindgen_bitfield_unit.set(23usize, 1u8, {
  3369. let mmap2: u64 = unsafe { ::core::mem::transmute(mmap2) };
  3370. mmap2 as u64
  3371. });
  3372. __bindgen_bitfield_unit.set(24usize, 1u8, {
  3373. let comm_exec: u64 = unsafe { ::core::mem::transmute(comm_exec) };
  3374. comm_exec as u64
  3375. });
  3376. __bindgen_bitfield_unit.set(25usize, 1u8, {
  3377. let use_clockid: u64 = unsafe { ::core::mem::transmute(use_clockid) };
  3378. use_clockid as u64
  3379. });
  3380. __bindgen_bitfield_unit.set(26usize, 1u8, {
  3381. let context_switch: u64 = unsafe { ::core::mem::transmute(context_switch) };
  3382. context_switch as u64
  3383. });
  3384. __bindgen_bitfield_unit.set(27usize, 1u8, {
  3385. let write_backward: u64 = unsafe { ::core::mem::transmute(write_backward) };
  3386. write_backward as u64
  3387. });
  3388. __bindgen_bitfield_unit.set(28usize, 1u8, {
  3389. let namespaces: u64 = unsafe { ::core::mem::transmute(namespaces) };
  3390. namespaces as u64
  3391. });
  3392. __bindgen_bitfield_unit.set(29usize, 1u8, {
  3393. let ksymbol: u64 = unsafe { ::core::mem::transmute(ksymbol) };
  3394. ksymbol as u64
  3395. });
  3396. __bindgen_bitfield_unit.set(30usize, 1u8, {
  3397. let bpf_event: u64 = unsafe { ::core::mem::transmute(bpf_event) };
  3398. bpf_event as u64
  3399. });
  3400. __bindgen_bitfield_unit.set(31usize, 1u8, {
  3401. let aux_output: u64 = unsafe { ::core::mem::transmute(aux_output) };
  3402. aux_output as u64
  3403. });
  3404. __bindgen_bitfield_unit.set(32usize, 1u8, {
  3405. let cgroup: u64 = unsafe { ::core::mem::transmute(cgroup) };
  3406. cgroup as u64
  3407. });
  3408. __bindgen_bitfield_unit.set(33usize, 1u8, {
  3409. let text_poke: u64 = unsafe { ::core::mem::transmute(text_poke) };
  3410. text_poke as u64
  3411. });
  3412. __bindgen_bitfield_unit.set(34usize, 1u8, {
  3413. let build_id: u64 = unsafe { ::core::mem::transmute(build_id) };
  3414. build_id as u64
  3415. });
  3416. __bindgen_bitfield_unit.set(35usize, 1u8, {
  3417. let inherit_thread: u64 = unsafe { ::core::mem::transmute(inherit_thread) };
  3418. inherit_thread as u64
  3419. });
  3420. __bindgen_bitfield_unit.set(36usize, 1u8, {
  3421. let remove_on_exec: u64 = unsafe { ::core::mem::transmute(remove_on_exec) };
  3422. remove_on_exec as u64
  3423. });
  3424. __bindgen_bitfield_unit.set(37usize, 1u8, {
  3425. let sigtrap: u64 = unsafe { ::core::mem::transmute(sigtrap) };
  3426. sigtrap as u64
  3427. });
  3428. __bindgen_bitfield_unit.set(38usize, 26u8, {
  3429. let __reserved_1: u64 = unsafe { ::core::mem::transmute(__reserved_1) };
  3430. __reserved_1 as u64
  3431. });
  3432. __bindgen_bitfield_unit
  3433. }
  3434. }
  3435. #[repr(C)]
  3436. #[derive(Copy, Clone)]
  3437. pub struct perf_event_mmap_page {
  3438. pub version: __u32,
  3439. pub compat_version: __u32,
  3440. pub lock: __u32,
  3441. pub index: __u32,
  3442. pub offset: __s64,
  3443. pub time_enabled: __u64,
  3444. pub time_running: __u64,
  3445. pub __bindgen_anon_1: perf_event_mmap_page__bindgen_ty_1,
  3446. pub pmc_width: __u16,
  3447. pub time_shift: __u16,
  3448. pub time_mult: __u32,
  3449. pub time_offset: __u64,
  3450. pub time_zero: __u64,
  3451. pub size: __u32,
  3452. pub __reserved_1: __u32,
  3453. pub time_cycles: __u64,
  3454. pub time_mask: __u64,
  3455. pub __reserved: [__u8; 928usize],
  3456. pub data_head: __u64,
  3457. pub data_tail: __u64,
  3458. pub data_offset: __u64,
  3459. pub data_size: __u64,
  3460. pub aux_head: __u64,
  3461. pub aux_tail: __u64,
  3462. pub aux_offset: __u64,
  3463. pub aux_size: __u64,
  3464. }
  3465. #[repr(C)]
  3466. #[derive(Copy, Clone)]
  3467. pub union perf_event_mmap_page__bindgen_ty_1 {
  3468. pub capabilities: __u64,
  3469. pub __bindgen_anon_1: perf_event_mmap_page__bindgen_ty_1__bindgen_ty_1,
  3470. }
  3471. #[repr(C)]
  3472. #[derive(Debug, Copy, Clone)]
  3473. pub struct perf_event_mmap_page__bindgen_ty_1__bindgen_ty_1 {
  3474. pub _bitfield_align_1: [u64; 0],
  3475. pub _bitfield_1: __BindgenBitfieldUnit<[u8; 8usize]>,
  3476. }
  3477. impl perf_event_mmap_page__bindgen_ty_1__bindgen_ty_1 {
  3478. #[inline]
  3479. pub fn cap_bit0(&self) -> __u64 {
  3480. unsafe { ::core::mem::transmute(self._bitfield_1.get(0usize, 1u8) as u64) }
  3481. }
  3482. #[inline]
  3483. pub fn set_cap_bit0(&mut self, val: __u64) {
  3484. unsafe {
  3485. let val: u64 = ::core::mem::transmute(val);
  3486. self._bitfield_1.set(0usize, 1u8, val as u64)
  3487. }
  3488. }
  3489. #[inline]
  3490. pub unsafe fn cap_bit0_raw(this: *const Self) -> __u64 {
  3491. unsafe {
  3492. ::core::mem::transmute(<__BindgenBitfieldUnit<[u8; 8usize]>>::raw_get(
  3493. ::core::ptr::addr_of!((*this)._bitfield_1),
  3494. 0usize,
  3495. 1u8,
  3496. ) as u64)
  3497. }
  3498. }
  3499. #[inline]
  3500. pub unsafe fn set_cap_bit0_raw(this: *mut Self, val: __u64) {
  3501. unsafe {
  3502. let val: u64 = ::core::mem::transmute(val);
  3503. <__BindgenBitfieldUnit<[u8; 8usize]>>::raw_set(
  3504. ::core::ptr::addr_of_mut!((*this)._bitfield_1),
  3505. 0usize,
  3506. 1u8,
  3507. val as u64,
  3508. )
  3509. }
  3510. }
  3511. #[inline]
  3512. pub fn cap_bit0_is_deprecated(&self) -> __u64 {
  3513. unsafe { ::core::mem::transmute(self._bitfield_1.get(1usize, 1u8) as u64) }
  3514. }
  3515. #[inline]
  3516. pub fn set_cap_bit0_is_deprecated(&mut self, val: __u64) {
  3517. unsafe {
  3518. let val: u64 = ::core::mem::transmute(val);
  3519. self._bitfield_1.set(1usize, 1u8, val as u64)
  3520. }
  3521. }
  3522. #[inline]
  3523. pub unsafe fn cap_bit0_is_deprecated_raw(this: *const Self) -> __u64 {
  3524. unsafe {
  3525. ::core::mem::transmute(<__BindgenBitfieldUnit<[u8; 8usize]>>::raw_get(
  3526. ::core::ptr::addr_of!((*this)._bitfield_1),
  3527. 1usize,
  3528. 1u8,
  3529. ) as u64)
  3530. }
  3531. }
  3532. #[inline]
  3533. pub unsafe fn set_cap_bit0_is_deprecated_raw(this: *mut Self, val: __u64) {
  3534. unsafe {
  3535. let val: u64 = ::core::mem::transmute(val);
  3536. <__BindgenBitfieldUnit<[u8; 8usize]>>::raw_set(
  3537. ::core::ptr::addr_of_mut!((*this)._bitfield_1),
  3538. 1usize,
  3539. 1u8,
  3540. val as u64,
  3541. )
  3542. }
  3543. }
  3544. #[inline]
  3545. pub fn cap_user_rdpmc(&self) -> __u64 {
  3546. unsafe { ::core::mem::transmute(self._bitfield_1.get(2usize, 1u8) as u64) }
  3547. }
  3548. #[inline]
  3549. pub fn set_cap_user_rdpmc(&mut self, val: __u64) {
  3550. unsafe {
  3551. let val: u64 = ::core::mem::transmute(val);
  3552. self._bitfield_1.set(2usize, 1u8, val as u64)
  3553. }
  3554. }
  3555. #[inline]
  3556. pub unsafe fn cap_user_rdpmc_raw(this: *const Self) -> __u64 {
  3557. unsafe {
  3558. ::core::mem::transmute(<__BindgenBitfieldUnit<[u8; 8usize]>>::raw_get(
  3559. ::core::ptr::addr_of!((*this)._bitfield_1),
  3560. 2usize,
  3561. 1u8,
  3562. ) as u64)
  3563. }
  3564. }
  3565. #[inline]
  3566. pub unsafe fn set_cap_user_rdpmc_raw(this: *mut Self, val: __u64) {
  3567. unsafe {
  3568. let val: u64 = ::core::mem::transmute(val);
  3569. <__BindgenBitfieldUnit<[u8; 8usize]>>::raw_set(
  3570. ::core::ptr::addr_of_mut!((*this)._bitfield_1),
  3571. 2usize,
  3572. 1u8,
  3573. val as u64,
  3574. )
  3575. }
  3576. }
  3577. #[inline]
  3578. pub fn cap_user_time(&self) -> __u64 {
  3579. unsafe { ::core::mem::transmute(self._bitfield_1.get(3usize, 1u8) as u64) }
  3580. }
  3581. #[inline]
  3582. pub fn set_cap_user_time(&mut self, val: __u64) {
  3583. unsafe {
  3584. let val: u64 = ::core::mem::transmute(val);
  3585. self._bitfield_1.set(3usize, 1u8, val as u64)
  3586. }
  3587. }
  3588. #[inline]
  3589. pub unsafe fn cap_user_time_raw(this: *const Self) -> __u64 {
  3590. unsafe {
  3591. ::core::mem::transmute(<__BindgenBitfieldUnit<[u8; 8usize]>>::raw_get(
  3592. ::core::ptr::addr_of!((*this)._bitfield_1),
  3593. 3usize,
  3594. 1u8,
  3595. ) as u64)
  3596. }
  3597. }
  3598. #[inline]
  3599. pub unsafe fn set_cap_user_time_raw(this: *mut Self, val: __u64) {
  3600. unsafe {
  3601. let val: u64 = ::core::mem::transmute(val);
  3602. <__BindgenBitfieldUnit<[u8; 8usize]>>::raw_set(
  3603. ::core::ptr::addr_of_mut!((*this)._bitfield_1),
  3604. 3usize,
  3605. 1u8,
  3606. val as u64,
  3607. )
  3608. }
  3609. }
  3610. #[inline]
  3611. pub fn cap_user_time_zero(&self) -> __u64 {
  3612. unsafe { ::core::mem::transmute(self._bitfield_1.get(4usize, 1u8) as u64) }
  3613. }
  3614. #[inline]
  3615. pub fn set_cap_user_time_zero(&mut self, val: __u64) {
  3616. unsafe {
  3617. let val: u64 = ::core::mem::transmute(val);
  3618. self._bitfield_1.set(4usize, 1u8, val as u64)
  3619. }
  3620. }
  3621. #[inline]
  3622. pub unsafe fn cap_user_time_zero_raw(this: *const Self) -> __u64 {
  3623. unsafe {
  3624. ::core::mem::transmute(<__BindgenBitfieldUnit<[u8; 8usize]>>::raw_get(
  3625. ::core::ptr::addr_of!((*this)._bitfield_1),
  3626. 4usize,
  3627. 1u8,
  3628. ) as u64)
  3629. }
  3630. }
  3631. #[inline]
  3632. pub unsafe fn set_cap_user_time_zero_raw(this: *mut Self, val: __u64) {
  3633. unsafe {
  3634. let val: u64 = ::core::mem::transmute(val);
  3635. <__BindgenBitfieldUnit<[u8; 8usize]>>::raw_set(
  3636. ::core::ptr::addr_of_mut!((*this)._bitfield_1),
  3637. 4usize,
  3638. 1u8,
  3639. val as u64,
  3640. )
  3641. }
  3642. }
  3643. #[inline]
  3644. pub fn cap_user_time_short(&self) -> __u64 {
  3645. unsafe { ::core::mem::transmute(self._bitfield_1.get(5usize, 1u8) as u64) }
  3646. }
  3647. #[inline]
  3648. pub fn set_cap_user_time_short(&mut self, val: __u64) {
  3649. unsafe {
  3650. let val: u64 = ::core::mem::transmute(val);
  3651. self._bitfield_1.set(5usize, 1u8, val as u64)
  3652. }
  3653. }
  3654. #[inline]
  3655. pub unsafe fn cap_user_time_short_raw(this: *const Self) -> __u64 {
  3656. unsafe {
  3657. ::core::mem::transmute(<__BindgenBitfieldUnit<[u8; 8usize]>>::raw_get(
  3658. ::core::ptr::addr_of!((*this)._bitfield_1),
  3659. 5usize,
  3660. 1u8,
  3661. ) as u64)
  3662. }
  3663. }
  3664. #[inline]
  3665. pub unsafe fn set_cap_user_time_short_raw(this: *mut Self, val: __u64) {
  3666. unsafe {
  3667. let val: u64 = ::core::mem::transmute(val);
  3668. <__BindgenBitfieldUnit<[u8; 8usize]>>::raw_set(
  3669. ::core::ptr::addr_of_mut!((*this)._bitfield_1),
  3670. 5usize,
  3671. 1u8,
  3672. val as u64,
  3673. )
  3674. }
  3675. }
  3676. #[inline]
  3677. pub fn cap_____res(&self) -> __u64 {
  3678. unsafe { ::core::mem::transmute(self._bitfield_1.get(6usize, 58u8) as u64) }
  3679. }
  3680. #[inline]
  3681. pub fn set_cap_____res(&mut self, val: __u64) {
  3682. unsafe {
  3683. let val: u64 = ::core::mem::transmute(val);
  3684. self._bitfield_1.set(6usize, 58u8, val as u64)
  3685. }
  3686. }
  3687. #[inline]
  3688. pub unsafe fn cap_____res_raw(this: *const Self) -> __u64 {
  3689. unsafe {
  3690. ::core::mem::transmute(<__BindgenBitfieldUnit<[u8; 8usize]>>::raw_get(
  3691. ::core::ptr::addr_of!((*this)._bitfield_1),
  3692. 6usize,
  3693. 58u8,
  3694. ) as u64)
  3695. }
  3696. }
  3697. #[inline]
  3698. pub unsafe fn set_cap_____res_raw(this: *mut Self, val: __u64) {
  3699. unsafe {
  3700. let val: u64 = ::core::mem::transmute(val);
  3701. <__BindgenBitfieldUnit<[u8; 8usize]>>::raw_set(
  3702. ::core::ptr::addr_of_mut!((*this)._bitfield_1),
  3703. 6usize,
  3704. 58u8,
  3705. val as u64,
  3706. )
  3707. }
  3708. }
  3709. #[inline]
  3710. pub fn new_bitfield_1(
  3711. cap_bit0: __u64,
  3712. cap_bit0_is_deprecated: __u64,
  3713. cap_user_rdpmc: __u64,
  3714. cap_user_time: __u64,
  3715. cap_user_time_zero: __u64,
  3716. cap_user_time_short: __u64,
  3717. cap_____res: __u64,
  3718. ) -> __BindgenBitfieldUnit<[u8; 8usize]> {
  3719. let mut __bindgen_bitfield_unit: __BindgenBitfieldUnit<[u8; 8usize]> = Default::default();
  3720. __bindgen_bitfield_unit.set(0usize, 1u8, {
  3721. let cap_bit0: u64 = unsafe { ::core::mem::transmute(cap_bit0) };
  3722. cap_bit0 as u64
  3723. });
  3724. __bindgen_bitfield_unit.set(1usize, 1u8, {
  3725. let cap_bit0_is_deprecated: u64 =
  3726. unsafe { ::core::mem::transmute(cap_bit0_is_deprecated) };
  3727. cap_bit0_is_deprecated as u64
  3728. });
  3729. __bindgen_bitfield_unit.set(2usize, 1u8, {
  3730. let cap_user_rdpmc: u64 = unsafe { ::core::mem::transmute(cap_user_rdpmc) };
  3731. cap_user_rdpmc as u64
  3732. });
  3733. __bindgen_bitfield_unit.set(3usize, 1u8, {
  3734. let cap_user_time: u64 = unsafe { ::core::mem::transmute(cap_user_time) };
  3735. cap_user_time as u64
  3736. });
  3737. __bindgen_bitfield_unit.set(4usize, 1u8, {
  3738. let cap_user_time_zero: u64 = unsafe { ::core::mem::transmute(cap_user_time_zero) };
  3739. cap_user_time_zero as u64
  3740. });
  3741. __bindgen_bitfield_unit.set(5usize, 1u8, {
  3742. let cap_user_time_short: u64 = unsafe { ::core::mem::transmute(cap_user_time_short) };
  3743. cap_user_time_short as u64
  3744. });
  3745. __bindgen_bitfield_unit.set(6usize, 58u8, {
  3746. let cap_____res: u64 = unsafe { ::core::mem::transmute(cap_____res) };
  3747. cap_____res as u64
  3748. });
  3749. __bindgen_bitfield_unit
  3750. }
  3751. }
  3752. #[repr(C)]
  3753. #[derive(Debug, Copy, Clone)]
  3754. pub struct perf_event_header {
  3755. pub type_: __u32,
  3756. pub misc: __u16,
  3757. pub size: __u16,
  3758. }
  3759. #[repr(u32)]
  3760. #[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
  3761. pub enum perf_event_type {
  3762. PERF_RECORD_MMAP = 1,
  3763. PERF_RECORD_LOST = 2,
  3764. PERF_RECORD_COMM = 3,
  3765. PERF_RECORD_EXIT = 4,
  3766. PERF_RECORD_THROTTLE = 5,
  3767. PERF_RECORD_UNTHROTTLE = 6,
  3768. PERF_RECORD_FORK = 7,
  3769. PERF_RECORD_READ = 8,
  3770. PERF_RECORD_SAMPLE = 9,
  3771. PERF_RECORD_MMAP2 = 10,
  3772. PERF_RECORD_AUX = 11,
  3773. PERF_RECORD_ITRACE_START = 12,
  3774. PERF_RECORD_LOST_SAMPLES = 13,
  3775. PERF_RECORD_SWITCH = 14,
  3776. PERF_RECORD_SWITCH_CPU_WIDE = 15,
  3777. PERF_RECORD_NAMESPACES = 16,
  3778. PERF_RECORD_KSYMBOL = 17,
  3779. PERF_RECORD_BPF_EVENT = 18,
  3780. PERF_RECORD_CGROUP = 19,
  3781. PERF_RECORD_TEXT_POKE = 20,
  3782. PERF_RECORD_AUX_OUTPUT_HW_ID = 21,
  3783. PERF_RECORD_MAX = 22,
  3784. }
  3785. pub const TCA_BPF_UNSPEC: _bindgen_ty_154 = 0;
  3786. pub const TCA_BPF_ACT: _bindgen_ty_154 = 1;
  3787. pub const TCA_BPF_POLICE: _bindgen_ty_154 = 2;
  3788. pub const TCA_BPF_CLASSID: _bindgen_ty_154 = 3;
  3789. pub const TCA_BPF_OPS_LEN: _bindgen_ty_154 = 4;
  3790. pub const TCA_BPF_OPS: _bindgen_ty_154 = 5;
  3791. pub const TCA_BPF_FD: _bindgen_ty_154 = 6;
  3792. pub const TCA_BPF_NAME: _bindgen_ty_154 = 7;
  3793. pub const TCA_BPF_FLAGS: _bindgen_ty_154 = 8;
  3794. pub const TCA_BPF_FLAGS_GEN: _bindgen_ty_154 = 9;
  3795. pub const TCA_BPF_TAG: _bindgen_ty_154 = 10;
  3796. pub const TCA_BPF_ID: _bindgen_ty_154 = 11;
  3797. pub const __TCA_BPF_MAX: _bindgen_ty_154 = 12;
  3798. pub type _bindgen_ty_154 = ::core::ffi::c_uint;
  3799. #[repr(C)]
  3800. #[derive(Debug, Copy, Clone)]
  3801. pub struct ifinfomsg {
  3802. pub ifi_family: ::core::ffi::c_uchar,
  3803. pub __ifi_pad: ::core::ffi::c_uchar,
  3804. pub ifi_type: ::core::ffi::c_ushort,
  3805. pub ifi_index: ::core::ffi::c_int,
  3806. pub ifi_flags: ::core::ffi::c_uint,
  3807. pub ifi_change: ::core::ffi::c_uint,
  3808. }
  3809. #[repr(C)]
  3810. #[derive(Debug, Copy, Clone)]
  3811. pub struct tcmsg {
  3812. pub tcm_family: ::core::ffi::c_uchar,
  3813. pub tcm__pad1: ::core::ffi::c_uchar,
  3814. pub tcm__pad2: ::core::ffi::c_ushort,
  3815. pub tcm_ifindex: ::core::ffi::c_int,
  3816. pub tcm_handle: __u32,
  3817. pub tcm_parent: __u32,
  3818. pub tcm_info: __u32,
  3819. }
  3820. pub const TCA_UNSPEC: _bindgen_ty_172 = 0;
  3821. pub const TCA_KIND: _bindgen_ty_172 = 1;
  3822. pub const TCA_OPTIONS: _bindgen_ty_172 = 2;
  3823. pub const TCA_STATS: _bindgen_ty_172 = 3;
  3824. pub const TCA_XSTATS: _bindgen_ty_172 = 4;
  3825. pub const TCA_RATE: _bindgen_ty_172 = 5;
  3826. pub const TCA_FCNT: _bindgen_ty_172 = 6;
  3827. pub const TCA_STATS2: _bindgen_ty_172 = 7;
  3828. pub const TCA_STAB: _bindgen_ty_172 = 8;
  3829. pub const TCA_PAD: _bindgen_ty_172 = 9;
  3830. pub const TCA_DUMP_INVISIBLE: _bindgen_ty_172 = 10;
  3831. pub const TCA_CHAIN: _bindgen_ty_172 = 11;
  3832. pub const TCA_HW_OFFLOAD: _bindgen_ty_172 = 12;
  3833. pub const TCA_INGRESS_BLOCK: _bindgen_ty_172 = 13;
  3834. pub const TCA_EGRESS_BLOCK: _bindgen_ty_172 = 14;
  3835. pub const __TCA_MAX: _bindgen_ty_172 = 15;
  3836. pub type _bindgen_ty_172 = ::core::ffi::c_uint;
  3837. pub const AYA_PERF_EVENT_IOC_ENABLE: ::core::ffi::c_int = 9216;
  3838. pub const AYA_PERF_EVENT_IOC_DISABLE: ::core::ffi::c_int = 9217;
  3839. pub const AYA_PERF_EVENT_IOC_SET_BPF: ::core::ffi::c_int = 1074013192;