linker.rs 19 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528
  1. use alloc::boxed::Box;
  2. use alloc::collections::BTreeMap;
  3. use alloc::string::{String, ToString};
  4. use alloc::vec::Vec;
  5. use core::{mem, ptr, slice};
  6. use goblin::elf::{program_header, reloc, sym, Elf};
  7. use goblin::error::{Error, Result};
  8. use c_str::CString;
  9. use fs::File;
  10. use header::{fcntl, sys_mman, unistd};
  11. use io::Read;
  12. use platform::types::c_void;
  13. use super::tcb::{Master, Tcb};
  14. use super::PAGE_SIZE;
  15. #[cfg(target_os = "redox")]
  16. const PATH_SEP: char = ';';
  17. #[cfg(target_os = "linux")]
  18. const PATH_SEP: char = ':';
  19. pub struct Linker {
  20. library_path: String,
  21. objects: BTreeMap<String, Box<[u8]>>,
  22. }
  23. impl Linker {
  24. pub fn new(library_path: &str) -> Self {
  25. Self {
  26. library_path: library_path.to_string(),
  27. objects: BTreeMap::new(),
  28. }
  29. }
  30. pub fn load(&mut self, name: &str, path: &str) -> Result<()> {
  31. println!("load {}: {}", name, path);
  32. let mut data = Vec::new();
  33. let path_c = CString::new(path)
  34. .map_err(|err| Error::Malformed(format!("invalid path '{}': {}", path, err)))?;
  35. {
  36. let flags = fcntl::O_RDONLY | fcntl::O_CLOEXEC;
  37. let mut file = File::open(&path_c, flags)
  38. .map_err(|err| Error::Malformed(format!("failed to open '{}': {}", path, err)))?;
  39. file.read_to_end(&mut data)
  40. .map_err(|err| Error::Malformed(format!("failed to read '{}': {}", path, err)))?;
  41. }
  42. self.load_data(name, data.into_boxed_slice())
  43. }
  44. pub fn load_data(&mut self, name: &str, data: Box<[u8]>) -> Result<()> {
  45. //TODO: Prevent failures due to recursion
  46. {
  47. let elf = Elf::parse(&data)?;
  48. //println!("{:#?}", elf);
  49. for library in elf.libraries.iter() {
  50. if !self.objects.contains_key(&library.to_string()) {
  51. self.load_library(library)?;
  52. }
  53. }
  54. }
  55. self.objects.insert(name.to_string(), data);
  56. Ok(())
  57. }
  58. pub fn load_library(&mut self, name: &str) -> Result<()> {
  59. if name.contains('/') {
  60. self.load(name, name)
  61. } else {
  62. let library_path = self.library_path.clone();
  63. for part in library_path.split(PATH_SEP) {
  64. let path = if part.is_empty() {
  65. format!("./{}", name)
  66. } else {
  67. format!("{}/{}", part, name)
  68. };
  69. println!("check {}", path);
  70. let access = unsafe {
  71. let path_c = CString::new(path.as_bytes()).map_err(|err| {
  72. Error::Malformed(format!("invalid path '{}': {}", path, err))
  73. })?;
  74. // TODO: Use R_OK | X_OK
  75. unistd::access(path_c.as_ptr(), unistd::F_OK) == 0
  76. };
  77. if access {
  78. self.load(name, &path)?;
  79. return Ok(());
  80. }
  81. }
  82. Err(Error::Malformed(format!("failed to locate '{}'", name)))
  83. }
  84. }
  85. pub fn link(&mut self, primary: &str) -> Result<usize> {
  86. let elfs = {
  87. let mut elfs = BTreeMap::new();
  88. for (name, data) in self.objects.iter() {
  89. elfs.insert(name.as_str(), Elf::parse(&data)?);
  90. }
  91. elfs
  92. };
  93. // Load all ELF files into memory and find all globals
  94. let mut tls_primary = 0;
  95. let mut tls_size = 0;
  96. let mut mmaps = BTreeMap::new();
  97. let mut globals = BTreeMap::new();
  98. for (elf_name, elf) in elfs.iter() {
  99. println!("map {}", elf_name);
  100. let object = match self.objects.get(*elf_name) {
  101. Some(some) => some,
  102. None => continue,
  103. };
  104. // Calculate virtual memory bounds
  105. let bounds = {
  106. let mut bounds_opt: Option<(usize, usize)> = None;
  107. for ph in elf.program_headers.iter() {
  108. let voff = ph.p_vaddr as usize % PAGE_SIZE;
  109. let vaddr = ph.p_vaddr as usize - voff;
  110. let vsize =
  111. ((ph.p_memsz as usize + voff + PAGE_SIZE - 1) / PAGE_SIZE) * PAGE_SIZE;
  112. match ph.p_type {
  113. program_header::PT_LOAD => {
  114. println!(" load {:#x}, {:#x}: {:x?}", vaddr, vsize, ph);
  115. if let Some(ref mut bounds) = bounds_opt {
  116. if vaddr < bounds.0 {
  117. bounds.0 = vaddr;
  118. }
  119. if vaddr + vsize > bounds.1 {
  120. bounds.1 = vaddr + vsize;
  121. }
  122. } else {
  123. bounds_opt = Some((vaddr, vaddr + vsize));
  124. }
  125. }
  126. program_header::PT_TLS => {
  127. println!(" load tls {:#x}: {:x?}", vsize, ph);
  128. tls_size += vsize;
  129. if *elf_name == primary {
  130. tls_primary += vsize;
  131. }
  132. }
  133. _ => (),
  134. }
  135. }
  136. match bounds_opt {
  137. Some(some) => some,
  138. None => continue,
  139. }
  140. };
  141. println!(" bounds {:#x}, {:#x}", bounds.0, bounds.1);
  142. // Allocate memory
  143. let mmap = unsafe {
  144. let size = bounds.1 /* - bounds.0 */;
  145. let ptr = sys_mman::mmap(
  146. ptr::null_mut(),
  147. size,
  148. //TODO: Make it possible to not specify PROT_EXEC on Redox
  149. sys_mman::PROT_READ | sys_mman::PROT_WRITE,
  150. sys_mman::MAP_ANONYMOUS | sys_mman::MAP_PRIVATE,
  151. -1,
  152. 0,
  153. );
  154. if ptr as usize == !0
  155. /* MAP_FAILED */
  156. {
  157. return Err(Error::Malformed(format!("failed to map {}", elf_name)));
  158. }
  159. slice::from_raw_parts_mut(ptr as *mut u8, size)
  160. };
  161. println!(" mmap {:p}, {:#x}", mmap.as_mut_ptr(), mmap.len());
  162. // Locate all globals
  163. for sym in elf.dynsyms.iter() {
  164. if sym.st_bind() == sym::STB_GLOBAL && sym.st_value != 0 {
  165. if let Some(name_res) = elf.dynstrtab.get(sym.st_name) {
  166. let name = name_res?;
  167. let value = mmap.as_ptr() as usize + sym.st_value as usize;
  168. // println!(" global {}: {:x?} = {:#x}", name, sym, value);
  169. globals.insert(name, value);
  170. }
  171. }
  172. }
  173. mmaps.insert(elf_name, mmap);
  174. }
  175. // Allocate TLS
  176. let tcb = unsafe { Tcb::new(tls_size)? };
  177. println!("tcb {:x?}", tcb);
  178. // Copy data
  179. let mut tls_offset = tls_primary;
  180. let mut tcb_masters = Vec::new();
  181. let mut tls_index = 0;
  182. let mut tls_ranges = BTreeMap::new();
  183. for (elf_name, elf) in elfs.iter() {
  184. let object = match self.objects.get(*elf_name) {
  185. Some(some) => some,
  186. None => continue,
  187. };
  188. let mmap = match mmaps.get_mut(elf_name) {
  189. Some(some) => some,
  190. None => continue,
  191. };
  192. println!("load {}", elf_name);
  193. // Copy data
  194. for ph in elf.program_headers.iter() {
  195. let voff = ph.p_vaddr as usize % PAGE_SIZE;
  196. let vaddr = ph.p_vaddr as usize - voff;
  197. let vsize = ((ph.p_memsz as usize + voff + PAGE_SIZE - 1) / PAGE_SIZE) * PAGE_SIZE;
  198. match ph.p_type {
  199. program_header::PT_LOAD => {
  200. let obj_data = {
  201. let range = ph.file_range();
  202. match object.get(range.clone()) {
  203. Some(some) => some,
  204. None => {
  205. return Err(Error::Malformed(format!(
  206. "failed to read {:?}",
  207. range
  208. )))
  209. }
  210. }
  211. };
  212. let mmap_data = {
  213. let range = ph.p_vaddr as usize..ph.p_vaddr as usize + obj_data.len();
  214. match mmap.get_mut(range.clone()) {
  215. Some(some) => some,
  216. None => {
  217. return Err(Error::Malformed(format!(
  218. "failed to write {:?}",
  219. range
  220. )))
  221. }
  222. }
  223. };
  224. println!(
  225. " copy {:#x}, {:#x}: {:#x}, {:#x}",
  226. vaddr,
  227. vsize,
  228. voff,
  229. obj_data.len()
  230. );
  231. mmap_data.copy_from_slice(obj_data);
  232. }
  233. program_header::PT_TLS => {
  234. let valign = if ph.p_align > 0 {
  235. ((ph.p_memsz + (ph.p_align - 1)) / ph.p_align) * ph.p_align
  236. } else {
  237. ph.p_memsz
  238. } as usize;
  239. let mut tcb_master = Master {
  240. ptr: unsafe { mmap.as_ptr().add(ph.p_vaddr as usize) },
  241. len: ph.p_filesz as usize,
  242. offset: tls_size - valign,
  243. };
  244. println!(
  245. " tls master {:p}, {:#x}: {:#x}, {:#x}",
  246. tcb_master.ptr, tcb_master.len, tcb_master.offset, valign,
  247. );
  248. if *elf_name == primary {
  249. tls_ranges.insert(elf_name, (0, tcb_master.range()));
  250. tcb_masters.insert(0, tcb_master);
  251. } else {
  252. tcb_master.offset -= tls_offset;
  253. tls_offset += vsize;
  254. tls_index += 1;
  255. tls_ranges.insert(elf_name, (tls_index, tcb_master.range()));
  256. tcb_masters.push(tcb_master);
  257. }
  258. }
  259. _ => (),
  260. }
  261. }
  262. }
  263. // Set master images for TLS and copy TLS data
  264. unsafe {
  265. tcb.set_masters(tcb_masters.into_boxed_slice());
  266. tcb.copy_masters()?;
  267. }
  268. // Perform relocations, and protect pages
  269. for (elf_name, elf) in elfs.iter() {
  270. let mmap = match mmaps.get_mut(elf_name) {
  271. Some(some) => some,
  272. None => continue,
  273. };
  274. println!("link {}", elf_name);
  275. // Relocate
  276. for rel in elf
  277. .dynrelas
  278. .iter()
  279. .chain(elf.dynrels.iter())
  280. .chain(elf.pltrelocs.iter())
  281. {
  282. // println!(" rel {}: {:x?}",
  283. // reloc::r_to_str(rel.r_type, elf.header.e_machine),
  284. // rel
  285. // );
  286. let a = rel.r_addend.unwrap_or(0) as usize;
  287. let b = mmap.as_mut_ptr() as usize;
  288. let s = if rel.r_sym > 0 {
  289. let sym = elf.dynsyms.get(rel.r_sym).ok_or(Error::Malformed(format!(
  290. "missing symbol for relocation {:?}",
  291. rel
  292. )))?;
  293. let name =
  294. elf.dynstrtab
  295. .get(sym.st_name)
  296. .ok_or(Error::Malformed(format!(
  297. "missing name for symbol {:?}",
  298. sym
  299. )))??;
  300. if let Some(value) = globals.get(name) {
  301. // println!(" sym {}: {:x?} = {:#x}", name, sym, value);
  302. *value
  303. } else {
  304. // println!(" sym {}: {:x?} = undefined", name, sym);
  305. 0
  306. }
  307. } else {
  308. 0
  309. };
  310. let (tm, t) = if let Some((tls_index, tls_range)) = tls_ranges.get(elf_name) {
  311. (*tls_index, tls_range.start)
  312. } else {
  313. (0, 0)
  314. };
  315. let ptr = unsafe { mmap.as_mut_ptr().add(rel.r_offset as usize) };
  316. let set_u64 = |value| {
  317. // println!(" set_u64 {:#x}", value);
  318. unsafe {
  319. *(ptr as *mut u64) = value;
  320. }
  321. };
  322. match rel.r_type {
  323. reloc::R_X86_64_64 => {
  324. set_u64((s + a) as u64);
  325. }
  326. reloc::R_X86_64_DTPMOD64 => {
  327. set_u64(tm as u64);
  328. }
  329. reloc::R_X86_64_GLOB_DAT | reloc::R_X86_64_JUMP_SLOT => {
  330. set_u64(s as u64);
  331. }
  332. reloc::R_X86_64_RELATIVE => {
  333. set_u64((b + a) as u64);
  334. }
  335. reloc::R_X86_64_TPOFF64 => {
  336. set_u64((s + a).wrapping_sub(t) as u64);
  337. }
  338. reloc::R_X86_64_IRELATIVE => (), // Handled below
  339. _ => {
  340. println!(
  341. " {} unsupported",
  342. reloc::r_to_str(rel.r_type, elf.header.e_machine)
  343. );
  344. }
  345. }
  346. }
  347. // Protect pages
  348. for ph in elf.program_headers.iter() {
  349. if ph.p_type == program_header::PT_LOAD {
  350. let voff = ph.p_vaddr as usize % PAGE_SIZE;
  351. let vaddr = ph.p_vaddr as usize - voff;
  352. let vsize =
  353. ((ph.p_memsz as usize + voff + PAGE_SIZE - 1) / PAGE_SIZE) * PAGE_SIZE;
  354. let mut prot = 0;
  355. if ph.p_flags & program_header::PF_R == program_header::PF_R {
  356. prot |= sys_mman::PROT_READ;
  357. }
  358. // W ^ X. If it is executable, do not allow it to be writable, even if requested
  359. if ph.p_flags & program_header::PF_X == program_header::PF_X {
  360. prot |= sys_mman::PROT_EXEC;
  361. } else if ph.p_flags & program_header::PF_W == program_header::PF_W {
  362. prot |= sys_mman::PROT_WRITE;
  363. }
  364. let res = unsafe {
  365. let ptr = mmap.as_mut_ptr().add(vaddr);
  366. println!(" prot {:#x}, {:#x}: {:p}, {:#x}", vaddr, vsize, ptr, prot);
  367. sys_mman::mprotect(ptr as *mut c_void, vsize, prot)
  368. };
  369. if res < 0 {
  370. return Err(Error::Malformed(format!("failed to mprotect {}", elf_name)));
  371. }
  372. }
  373. }
  374. }
  375. // Activate TLS
  376. unsafe {
  377. tcb.activate();
  378. }
  379. // Perform indirect relocations (necessary evil), gather entry point
  380. let mut entry_opt = None;
  381. for (elf_name, elf) in elfs.iter() {
  382. let mmap = match mmaps.get_mut(elf_name) {
  383. Some(some) => some,
  384. None => continue,
  385. };
  386. println!("entry {}", elf_name);
  387. if *elf_name == primary {
  388. entry_opt = Some(mmap.as_mut_ptr() as usize + elf.header.e_entry as usize);
  389. }
  390. // Relocate
  391. for rel in elf
  392. .dynrelas
  393. .iter()
  394. .chain(elf.dynrels.iter())
  395. .chain(elf.pltrelocs.iter())
  396. {
  397. // println!(" rel {}: {:x?}",
  398. // reloc::r_to_str(rel.r_type, elf.header.e_machine),
  399. // rel
  400. // );
  401. let a = rel.r_addend.unwrap_or(0) as usize;
  402. let b = mmap.as_mut_ptr() as usize;
  403. let ptr = unsafe { mmap.as_mut_ptr().add(rel.r_offset as usize) };
  404. let set_u64 = |value| {
  405. // println!(" set_u64 {:#x}", value);
  406. unsafe {
  407. *(ptr as *mut u64) = value;
  408. }
  409. };
  410. if rel.r_type == reloc::R_X86_64_IRELATIVE {
  411. unsafe {
  412. let f: unsafe extern "C" fn() -> u64 = mem::transmute(b + a);
  413. set_u64(f());
  414. }
  415. }
  416. }
  417. // Protect pages
  418. for ph in elf.program_headers.iter() {
  419. if let program_header::PT_LOAD = ph.p_type {
  420. let voff = ph.p_vaddr as usize % PAGE_SIZE;
  421. let vaddr = ph.p_vaddr as usize - voff;
  422. let vsize =
  423. ((ph.p_memsz as usize + voff + PAGE_SIZE - 1) / PAGE_SIZE) * PAGE_SIZE;
  424. let mut prot = 0;
  425. if ph.p_flags & program_header::PF_R == program_header::PF_R {
  426. prot |= sys_mman::PROT_READ;
  427. }
  428. // W ^ X. If it is executable, do not allow it to be writable, even if requested
  429. if ph.p_flags & program_header::PF_X == program_header::PF_X {
  430. prot |= sys_mman::PROT_EXEC;
  431. } else if ph.p_flags & program_header::PF_W == program_header::PF_W {
  432. prot |= sys_mman::PROT_WRITE;
  433. }
  434. let res = unsafe {
  435. let ptr = mmap.as_mut_ptr().add(vaddr);
  436. println!(" prot {:#x}, {:#x}: {:p}, {:#x}", vaddr, vsize, ptr, prot);
  437. sys_mman::mprotect(ptr as *mut c_void, vsize, prot)
  438. };
  439. if res < 0 {
  440. return Err(Error::Malformed(format!("failed to mprotect {}", elf_name)));
  441. }
  442. }
  443. }
  444. }
  445. entry_opt.ok_or(Error::Malformed(format!("missing entry for {}", primary)))
  446. }
  447. }