Browse Source

Add "volatile" and "memory" clobber to asm! that doesn't fall through.

Use the "volatile" option and the "memory" clobber on inline asm that does
things like return directly, to reduce the chances of compilers rearranging
the code.
Dan Gohman 7 years ago
parent
commit
a6ecd1a73f
4 changed files with 18 additions and 13 deletions
  1. 4 4
      src/arm.rs
  2. 2 2
      src/probestack.rs
  3. 4 3
      src/x86.rs
  4. 8 4
      src/x86_64.rs

+ 4 - 4
src/arm.rs

@@ -11,7 +11,7 @@ pub unsafe fn __aeabi_uidivmod() {
           bl __udivmodsi4
           bl __udivmodsi4
           ldr r1, [sp]
           ldr r1, [sp]
           add sp, sp, #4
           add sp, sp, #4
-          pop {pc}");
+          pop {pc}" ::: "memory" : "volatile");
     intrinsics::unreachable();
     intrinsics::unreachable();
 }
 }
 
 
@@ -26,7 +26,7 @@ pub unsafe fn __aeabi_uldivmod() {
           ldr r2, [sp, #8]
           ldr r2, [sp, #8]
           ldr r3, [sp, #12]
           ldr r3, [sp, #12]
           add sp, sp, #16
           add sp, sp, #16
-          pop {r4, pc}");
+          pop {r4, pc}" ::: "memory" : "volatile");
     intrinsics::unreachable();
     intrinsics::unreachable();
 }
 }
 
 
@@ -38,7 +38,7 @@ pub unsafe fn __aeabi_idivmod() {
           pop {r1, r2}
           pop {r1, r2}
           muls r2, r2, r0
           muls r2, r2, r0
           subs r1, r1, r2
           subs r1, r1, r2
-          pop {r4, pc}");
+          pop {r4, pc}" ::: "memory" : "volatile");
     intrinsics::unreachable();
     intrinsics::unreachable();
 }
 }
 
 
@@ -53,7 +53,7 @@ pub unsafe fn __aeabi_ldivmod() {
           ldr r2, [sp, #8]
           ldr r2, [sp, #8]
           ldr r3, [sp, #12]
           ldr r3, [sp, #12]
           add sp, sp, #16
           add sp, sp, #16
-          pop {r4, pc}");
+          pop {r4, pc}" ::: "memory" : "volatile");
     intrinsics::unreachable();
     intrinsics::unreachable();
 }
 }
 
 

+ 2 - 2
src/probestack.rs

@@ -82,7 +82,7 @@ pub unsafe extern fn __rust_probestack() {
         add    %rax,%rsp
         add    %rax,%rsp
 
 
         ret
         ret
-    ");
+    " ::: "memory" : "volatile");
     ::core::intrinsics::unreachable();
     ::core::intrinsics::unreachable();
 }
 }
 
 
@@ -111,6 +111,6 @@ pub unsafe extern fn __rust_probestack() {
         add    %eax,%esp
         add    %eax,%esp
         pop    %ecx
         pop    %ecx
         ret
         ret
-    ");
+    " ::: "memory" : "volatile");
     ::core::intrinsics::unreachable();
     ::core::intrinsics::unreachable();
 }
 }

+ 4 - 3
src/x86.rs

@@ -29,7 +29,7 @@ pub unsafe fn ___chkstk_ms() {
         test   %ecx,(%ecx)
         test   %ecx,(%ecx)
         pop    %eax
         pop    %eax
         pop    %ecx
         pop    %ecx
-        ret");
+        ret" ::: "memory" : "volatile");
     intrinsics::unreachable();
     intrinsics::unreachable();
 }
 }
 
 
@@ -38,7 +38,8 @@ pub unsafe fn ___chkstk_ms() {
 #[naked]
 #[naked]
 #[no_mangle]
 #[no_mangle]
 pub unsafe fn __alloca() {
 pub unsafe fn __alloca() {
-    asm!("jmp ___chkstk   // Jump to ___chkstk since fallthrough may be unreliable");
+    asm!("jmp ___chkstk   // Jump to ___chkstk since fallthrough may be unreliable"
+         ::: "memory" : "volatile");
     intrinsics::unreachable();
     intrinsics::unreachable();
 }
 }
 
 
@@ -66,6 +67,6 @@ pub unsafe fn ___chkstk() {
         mov    -4(%eax),%ecx    // restore ecx
         mov    -4(%eax),%ecx    // restore ecx
         push   (%eax)           // push return address onto the stack
         push   (%eax)           // push return address onto the stack
         sub    %esp,%eax        // restore the original value in eax
         sub    %esp,%eax        // restore the original value in eax
-        ret");
+        ret" ::: "memory" : "volatile");
     intrinsics::unreachable();
     intrinsics::unreachable();
 }
 }

+ 8 - 4
src/x86_64.rs

@@ -29,7 +29,7 @@ pub unsafe fn ___chkstk_ms() {
         test   %rcx,(%rcx)
         test   %rcx,(%rcx)
         pop    %rax
         pop    %rax
         pop    %rcx
         pop    %rcx
-        ret");
+        ret" ::: "memory" : "volatile");
     intrinsics::unreachable();
     intrinsics::unreachable();
 }
 }
 
 
@@ -38,7 +38,8 @@ pub unsafe fn ___chkstk_ms() {
 #[no_mangle]
 #[no_mangle]
 pub unsafe fn __alloca() {
 pub unsafe fn __alloca() {
     asm!("mov    %rcx,%rax  // x64 _alloca is a normal function with parameter in rcx
     asm!("mov    %rcx,%rax  // x64 _alloca is a normal function with parameter in rcx
-          jmp    ___chkstk  // Jump to ___chkstk since fallthrough may be unreliable");
+          jmp    ___chkstk  // Jump to ___chkstk since fallthrough may be unreliable"
+         ::: "memory" : "volatile");
     intrinsics::unreachable();
     intrinsics::unreachable();
 }
 }
 
 
@@ -46,7 +47,8 @@ pub unsafe fn __alloca() {
 #[naked]
 #[naked]
 #[no_mangle]
 #[no_mangle]
 pub unsafe fn ___chkstk() {
 pub unsafe fn ___chkstk() {
-    asm!("
+    asm!(
+        "
         push   %rcx
         push   %rcx
         cmp    $$0x1000,%rax
         cmp    $$0x1000,%rax
         lea    16(%rsp),%rcx  // rsp before calling this routine -> rcx
         lea    16(%rsp),%rcx  // rsp before calling this routine -> rcx
@@ -66,6 +68,8 @@ pub unsafe fn ___chkstk() {
         mov    -8(%rax),%rcx  // restore rcx
         mov    -8(%rax),%rcx  // restore rcx
         push   (%rax)         // push return address onto the stack
         push   (%rax)         // push return address onto the stack
         sub    %rsp,%rax      // restore the original value in rax
         sub    %rsp,%rax      // restore the original value in rax
-        ret");
+        ret"
+        ::: "memory" : "volatile"
+    );
     intrinsics::unreachable();
     intrinsics::unreachable();
 }
 }