0 | |
Patch to fix asm related FTBFS by switching from asm! to llvm_asm!
|
1 | |
|
2 | |
This patch is Based on the upstream commit referenced below with
|
3 | |
non-asm related changes removed.
|
4 | |
|
5 | |
commit cde22bc180391e75de1c189fe29f442ada86ccde
|
6 | |
Author: Alex Crichton <alex@alexcrichton.com>
|
7 | |
Date: Wed Apr 29 15:30:10 2020 -0500
|
8 | |
|
9 | |
diff --git a/examples/intrinsics.rs b/examples/intrinsics.rs
|
10 | |
index 5ceebe1..82762e0 100644
|
11 | |
--- a/examples/intrinsics.rs
|
12 | |
+++ b/examples/intrinsics.rs
|
13 | |
@@ -6,7 +6,7 @@
|
14 | |
#![allow(unused_features)]
|
15 | |
#![cfg_attr(thumb, no_main)]
|
16 | |
#![deny(dead_code)]
|
17 | |
-#![feature(asm)]
|
18 | |
+#![feature(llvm_asm)]
|
19 | |
#![feature(lang_items)]
|
20 | |
#![feature(start)]
|
21 | |
#![feature(allocator_api)]
|
22 | |
@@ -280,7 +280,7 @@ fn run() {
|
23 | |
|
24 | |
// A copy of "test::black_box". Used to prevent LLVM from optimizing away the intrinsics during LTO
|
25 | |
fn bb<T>(dummy: T) -> T {
|
26 | |
- unsafe { asm!("" : : "r"(&dummy)) }
|
27 | |
+ unsafe { llvm_asm!("" : : "r"(&dummy)) }
|
28 | |
dummy
|
29 | |
}
|
30 | |
|
31 | |
diff --git a/src/arm.rs b/src/arm.rs
|
32 | |
index 4cf73ef..190bba7 100644
|
33 | |
--- a/src/arm.rs
|
34 | |
+++ b/src/arm.rs
|
35 | |
@@ -8,13 +8,15 @@ use core::intrinsics;
|
36 | |
#[naked]
|
37 | |
#[cfg_attr(not(feature = "mangled-names"), no_mangle)]
|
38 | |
pub unsafe fn __aeabi_uidivmod() {
|
39 | |
- asm!("push {lr}
|
40 | |
- sub sp, sp, #4
|
41 | |
- mov r2, sp
|
42 | |
- bl __udivmodsi4
|
43 | |
- ldr r1, [sp]
|
44 | |
- add sp, sp, #4
|
45 | |
- pop {pc}" ::: "memory" : "volatile");
|
46 | |
+ llvm_asm!("
|
47 | |
+ push {lr}
|
48 | |
+ sub sp, sp, #4
|
49 | |
+ mov r2, sp
|
50 | |
+ bl __udivmodsi4
|
51 | |
+ ldr r1, [sp]
|
52 | |
+ add sp, sp, #4
|
53 | |
+ pop {pc}
|
54 | |
+ " ::: "memory" : "volatile");
|
55 | |
intrinsics::unreachable();
|
56 | |
}
|
57 | |
|
58 | |
@@ -22,13 +24,15 @@ pub unsafe fn __aeabi_uidivmod() {
|
59 | |
#[naked]
|
60 | |
#[cfg_attr(not(feature = "mangled-names"), no_mangle)]
|
61 | |
pub unsafe fn __aeabi_uidivmod() {
|
62 | |
- asm!("push {lr}
|
63 | |
- sub sp, sp, #4
|
64 | |
- mov r2, sp
|
65 | |
- bl ___udivmodsi4
|
66 | |
- ldr r1, [sp]
|
67 | |
- add sp, sp, #4
|
68 | |
- pop {pc}" ::: "memory" : "volatile");
|
69 | |
+ llvm_asm!("
|
70 | |
+ push {lr}
|
71 | |
+ sub sp, sp, #4
|
72 | |
+ mov r2, sp
|
73 | |
+ bl ___udivmodsi4
|
74 | |
+ ldr r1, [sp]
|
75 | |
+ add sp, sp, #4
|
76 | |
+ pop {pc}
|
77 | |
+ " ::: "memory" : "volatile");
|
78 | |
intrinsics::unreachable();
|
79 | |
}
|
80 | |
|
81 | |
@@ -36,15 +40,17 @@ pub unsafe fn __aeabi_uidivmod() {
|
82 | |
#[naked]
|
83 | |
#[cfg_attr(not(feature = "mangled-names"), no_mangle)]
|
84 | |
pub unsafe fn __aeabi_uldivmod() {
|
85 | |
- asm!("push {r4, lr}
|
86 | |
- sub sp, sp, #16
|
87 | |
- add r4, sp, #8
|
88 | |
- str r4, [sp]
|
89 | |
- bl __udivmoddi4
|
90 | |
- ldr r2, [sp, #8]
|
91 | |
- ldr r3, [sp, #12]
|
92 | |
- add sp, sp, #16
|
93 | |
- pop {r4, pc}" ::: "memory" : "volatile");
|
94 | |
+ llvm_asm!("
|
95 | |
+ push {r4, lr}
|
96 | |
+ sub sp, sp, #16
|
97 | |
+ add r4, sp, #8
|
98 | |
+ str r4, [sp]
|
99 | |
+ bl __udivmoddi4
|
100 | |
+ ldr r2, [sp, #8]
|
101 | |
+ ldr r3, [sp, #12]
|
102 | |
+ add sp, sp, #16
|
103 | |
+ pop {r4, pc}
|
104 | |
+ " ::: "memory" : "volatile");
|
105 | |
intrinsics::unreachable();
|
106 | |
}
|
107 | |
|
108 | |
@@ -52,15 +58,17 @@ pub unsafe fn __aeabi_uldivmod() {
|
109 | |
#[naked]
|
110 | |
#[cfg_attr(not(feature = "mangled-names"), no_mangle)]
|
111 | |
pub unsafe fn __aeabi_uldivmod() {
|
112 | |
- asm!("push {r4, lr}
|
113 | |
- sub sp, sp, #16
|
114 | |
- add r4, sp, #8
|
115 | |
- str r4, [sp]
|
116 | |
- bl ___udivmoddi4
|
117 | |
- ldr r2, [sp, #8]
|
118 | |
- ldr r3, [sp, #12]
|
119 | |
- add sp, sp, #16
|
120 | |
- pop {r4, pc}" ::: "memory" : "volatile");
|
121 | |
+ llvm_asm!("
|
122 | |
+ push {r4, lr}
|
123 | |
+ sub sp, sp, #16
|
124 | |
+ add r4, sp, #8
|
125 | |
+ str r4, [sp]
|
126 | |
+ bl ___udivmoddi4
|
127 | |
+ ldr r2, [sp, #8]
|
128 | |
+ ldr r3, [sp, #12]
|
129 | |
+ add sp, sp, #16
|
130 | |
+ pop {r4, pc}
|
131 | |
+ " ::: "memory" : "volatile");
|
132 | |
intrinsics::unreachable();
|
133 | |
}
|
134 | |
|
135 | |
@@ -68,12 +76,14 @@ pub unsafe fn __aeabi_uldivmod() {
|
136 | |
#[naked]
|
137 | |
#[cfg_attr(not(feature = "mangled-names"), no_mangle)]
|
138 | |
pub unsafe fn __aeabi_idivmod() {
|
139 | |
- asm!("push {r0, r1, r4, lr}
|
140 | |
- bl __aeabi_idiv
|
141 | |
- pop {r1, r2}
|
142 | |
- muls r2, r2, r0
|
143 | |
- subs r1, r1, r2
|
144 | |
- pop {r4, pc}" ::: "memory" : "volatile");
|
145 | |
+ llvm_asm!("
|
146 | |
+ push {r0, r1, r4, lr}
|
147 | |
+ bl __aeabi_idiv
|
148 | |
+ pop {r1, r2}
|
149 | |
+ muls r2, r2, r0
|
150 | |
+ subs r1, r1, r2
|
151 | |
+ pop {r4, pc}
|
152 | |
+ " ::: "memory" : "volatile");
|
153 | |
intrinsics::unreachable();
|
154 | |
}
|
155 | |
|
156 | |
@@ -81,12 +91,14 @@ pub unsafe fn __aeabi_idivmod() {
|
157 | |
#[naked]
|
158 | |
#[cfg_attr(not(feature = "mangled-names"), no_mangle)]
|
159 | |
pub unsafe fn __aeabi_idivmod() {
|
160 | |
- asm!("push {r0, r1, r4, lr}
|
161 | |
- bl ___aeabi_idiv
|
162 | |
- pop {r1, r2}
|
163 | |
- muls r2, r2, r0
|
164 | |
- subs r1, r1, r2
|
165 | |
- pop {r4, pc}" ::: "memory" : "volatile");
|
166 | |
+ llvm_asm!("
|
167 | |
+ push {r0, r1, r4, lr}
|
168 | |
+ bl ___aeabi_idiv
|
169 | |
+ pop {r1, r2}
|
170 | |
+ muls r2, r2, r0
|
171 | |
+ subs r1, r1, r2
|
172 | |
+ pop {r4, pc}
|
173 | |
+ " ::: "memory" : "volatile");
|
174 | |
intrinsics::unreachable();
|
175 | |
}
|
176 | |
|
177 | |
@@ -94,15 +106,17 @@ pub unsafe fn __aeabi_idivmod() {
|
178 | |
#[naked]
|
179 | |
#[cfg_attr(not(feature = "mangled-names"), no_mangle)]
|
180 | |
pub unsafe fn __aeabi_ldivmod() {
|
181 | |
- asm!("push {r4, lr}
|
182 | |
- sub sp, sp, #16
|
183 | |
- add r4, sp, #8
|
184 | |
- str r4, [sp]
|
185 | |
- bl __divmoddi4
|
186 | |
- ldr r2, [sp, #8]
|
187 | |
- ldr r3, [sp, #12]
|
188 | |
- add sp, sp, #16
|
189 | |
- pop {r4, pc}" ::: "memory" : "volatile");
|
190 | |
+ llvm_asm!("
|
191 | |
+ push {r4, lr}
|
192 | |
+ sub sp, sp, #16
|
193 | |
+ add r4, sp, #8
|
194 | |
+ str r4, [sp]
|
195 | |
+ bl __divmoddi4
|
196 | |
+ ldr r2, [sp, #8]
|
197 | |
+ ldr r3, [sp, #12]
|
198 | |
+ add sp, sp, #16
|
199 | |
+ pop {r4, pc}
|
200 | |
+ " ::: "memory" : "volatile");
|
201 | |
intrinsics::unreachable();
|
202 | |
}
|
203 | |
|
204 | |
@@ -110,15 +124,17 @@ pub unsafe fn __aeabi_ldivmod() {
|
205 | |
#[naked]
|
206 | |
#[cfg_attr(not(feature = "mangled-names"), no_mangle)]
|
207 | |
pub unsafe fn __aeabi_ldivmod() {
|
208 | |
- asm!("push {r4, lr}
|
209 | |
- sub sp, sp, #16
|
210 | |
- add r4, sp, #8
|
211 | |
- str r4, [sp]
|
212 | |
- bl ___divmoddi4
|
213 | |
- ldr r2, [sp, #8]
|
214 | |
- ldr r3, [sp, #12]
|
215 | |
- add sp, sp, #16
|
216 | |
- pop {r4, pc}" ::: "memory" : "volatile");
|
217 | |
+ llvm_asm!("
|
218 | |
+ push {r4, lr}
|
219 | |
+ sub sp, sp, #16
|
220 | |
+ add r4, sp, #8
|
221 | |
+ str r4, [sp]
|
222 | |
+ bl ___divmoddi4
|
223 | |
+ ldr r2, [sp, #8]
|
224 | |
+ ldr r3, [sp, #12]
|
225 | |
+ add sp, sp, #16
|
226 | |
+ pop {r4, pc}
|
227 | |
+ " ::: "memory" : "volatile");
|
228 | |
intrinsics::unreachable();
|
229 | |
}
|
230 | |
|
231 | |
diff --git a/src/lib.rs b/src/lib.rs
|
232 | |
index e57a5ef..0ca770b 100644
|
233 | |
--- a/src/lib.rs
|
234 | |
+++ b/src/lib.rs
|
235 | |
@@ -1,6 +1,6 @@
|
236 | |
#![cfg_attr(feature = "compiler-builtins", compiler_builtins)]
|
237 | |
#![feature(abi_unadjusted)]
|
238 | |
-#![feature(asm)]
|
239 | |
+#![feature(llvm_asm)]
|
240 | |
#![feature(global_asm)]
|
241 | |
#![feature(cfg_target_has_atomic)]
|
242 | |
#![feature(compiler_builtins)]
|
243 | |
diff --git a/src/x86.rs b/src/x86.rs
|
244 | |
index 035c0a3..5511c45 100644
|
245 | |
--- a/src/x86.rs
|
246 | |
+++ b/src/x86.rs
|
247 | |
@@ -12,7 +12,7 @@ use core::intrinsics;
|
248 | |
#[naked]
|
249 | |
#[no_mangle]
|
250 | |
pub unsafe fn ___chkstk_ms() {
|
251 | |
- asm!("
|
252 | |
+ llvm_asm!("
|
253 | |
push %ecx
|
254 | |
push %eax
|
255 | |
cmp $$0x1000,%eax
|
256 | |
@@ -38,7 +38,7 @@ pub unsafe fn ___chkstk_ms() {
|
257 | |
#[naked]
|
258 | |
#[no_mangle]
|
259 | |
pub unsafe fn __alloca() {
|
260 | |
- asm!("jmp ___chkstk // Jump to ___chkstk since fallthrough may be unreliable"
|
261 | |
+ llvm_asm!("jmp ___chkstk // Jump to ___chkstk since fallthrough may be unreliable"
|
262 | |
::: "memory" : "volatile");
|
263 | |
intrinsics::unreachable();
|
264 | |
}
|
265 | |
@@ -47,7 +47,7 @@ pub unsafe fn __alloca() {
|
266 | |
#[naked]
|
267 | |
#[no_mangle]
|
268 | |
pub unsafe fn ___chkstk() {
|
269 | |
- asm!("
|
270 | |
+ llvm_asm!("
|
271 | |
push %ecx
|
272 | |
cmp $$0x1000,%eax
|
273 | |
lea 8(%esp),%ecx // esp before calling this routine -> ecx
|
274 | |
diff --git a/src/x86_64.rs b/src/x86_64.rs
|
275 | |
index 6940f8d..6a0cd56 100644
|
276 | |
--- a/src/x86_64.rs
|
277 | |
+++ b/src/x86_64.rs
|
278 | |
@@ -12,7 +12,7 @@ use core::intrinsics;
|
279 | |
#[naked]
|
280 | |
#[no_mangle]
|
281 | |
pub unsafe fn ___chkstk_ms() {
|
282 | |
- asm!("
|
283 | |
+ llvm_asm!("
|
284 | |
push %rcx
|
285 | |
push %rax
|
286 | |
cmp $$0x1000,%rax
|
287 | |
@@ -37,7 +37,7 @@ pub unsafe fn ___chkstk_ms() {
|
288 | |
#[naked]
|
289 | |
#[no_mangle]
|
290 | |
pub unsafe fn __alloca() {
|
291 | |
- asm!("mov %rcx,%rax // x64 _alloca is a normal function with parameter in rcx
|
292 | |
+ llvm_asm!("mov %rcx,%rax // x64 _alloca is a normal function with parameter in rcx
|
293 | |
jmp ___chkstk // Jump to ___chkstk since fallthrough may be unreliable"
|
294 | |
::: "memory" : "volatile");
|
295 | |
intrinsics::unreachable();
|
296 | |
@@ -47,7 +47,7 @@ pub unsafe fn __alloca() {
|
297 | |
#[naked]
|
298 | |
#[no_mangle]
|
299 | |
pub unsafe fn ___chkstk() {
|
300 | |
- asm!(
|
301 | |
+ llvm_asm!(
|
302 | |
"
|
303 | |
push %rcx
|
304 | |
cmp $$0x1000,%rax
|