Auto merge of #123351 - beetrees:x86-ret-snan-rust, r=nikic,workingju… · rust-lang/rust@c6727fc (original) (raw)
``
1
`+
//@ assembly-output: emit-asm
`
``
2
`+
//@ only-x86
`
``
3
`` +
// FIXME(#114479): LLVM miscompiles loading and storing f32 and f64 when SSE is disabled.
``
``
4
`+
// There's no compiletest directive to ignore a test on i586 only, so just always explicitly enable
`
``
5
`+
// SSE2.
`
``
6
`` +
// Use the same target CPU as i686 so that LLVM orders the instructions in the same order.
``
``
7
`+
//@ compile-flags: -Ctarget-feature=+sse2 -Ctarget-cpu=pentium4
`
``
8
`+
// Force frame pointers to make ASM more consistent between targets
`
``
9
`+
//@ compile-flags: -O -C force-frame-pointers
`
``
10
`+
//@ filecheck-flags: --implicit-check-not fld --implicit-check-not fst
`
``
11
`+
//@ revisions: unix windows
`
``
12
`+
//@[unix] ignore-windows
`
``
13
`+
//@[windows] only-windows
`
``
14
+
``
15
`+
#![crate_type = "lib"]
`
``
16
`+
#![feature(f16, f128)]
`
``
17
+
``
18
`` +
// Tests that returning f32 and f64 with the "Rust" ABI on 32-bit x86 doesn't use the x87
``
``
19
`` +
// floating point stack, as loading and storing f32s and f64s to and from the x87 stack quietens
``
``
20
`+
// signalling NaNs.
`
``
21
+
``
22
`+
// Returning individual floats
`
``
23
+
``
24
`+
// CHECK-LABEL: return_f32:
`
``
25
`+
#[no_mangle]
`
``
26
`+
pub fn return_f32(x: f32) -> f32 {
`
``
27
`+
// CHECK: movl {{.*}}(%ebp), %eax
`
``
28
`+
// CHECK-NOT: ax
`
``
29
`+
// CHECK: retl
`
``
30
`+
x
`
``
31
`+
}
`
``
32
+
``
33
`+
// CHECK-LABEL: return_f64:
`
``
34
`+
#[no_mangle]
`
``
35
`+
pub fn return_f64(x: f64) -> f64 {
`
``
36
`+
// CHECK: movl [#%d,OFFSET:], %[[PTR:.*]]
`
``
37
`+
// CHECK-NEXT: movsd [#%d,OFFSET+4], %[[VAL:.*]]
`
``
38
`+
// CHECK-NEXT: movsd %[[VAL]], (%[[PTR]])
`
``
39
`+
// CHECK: retl
`
``
40
`+
x
`
``
41
`+
}
`
``
42
+
``
43
`+
// Returning scalar pairs containing floats
`
``
44
+
``
45
`+
// CHECK-LABEL: return_f32_f32:
`
``
46
`+
#[no_mangle]
`
``
47
`+
pub fn return_f32_f32(x: (f32, f32)) -> (f32, f32) {
`
``
48
`+
// CHECK: movl [#%d,OFFSET:], %[[PTR:.*]]
`
``
49
`+
// CHECK-NEXT: movss [#%d,OFFSET+4], %[[VAL1:.*]]
`
``
50
`+
// CHECK-NEXT: movss [#%d,OFFSET+8], %[[VAL2:.*]]
`
``
51
`+
// CHECK-NEXT: movss %[[VAL1]], (%[[PTR]])
`
``
52
`+
// CHECK-NEXT: movss %[[VAL2]], 4(%[[PTR]])
`
``
53
`+
// CHECK: retl
`
``
54
`+
x
`
``
55
`+
}
`
``
56
+
``
57
`+
// CHECK-LABEL: return_f64_f64:
`
``
58
`+
#[no_mangle]
`
``
59
`+
pub fn return_f64_f64(x: (f64, f64)) -> (f64, f64) {
`
``
60
`+
// CHECK: movl [#%d,OFFSET:], %[[PTR:.*]]
`
``
61
`+
// CHECK-NEXT: movsd [#%d,OFFSET+4], %[[VAL1:.*]]
`
``
62
`+
// CHECK-NEXT: movsd [#%d,OFFSET+12], %[[VAL2:.*]]
`
``
63
`+
// CHECK-NEXT: movsd %[[VAL1]], (%[[PTR]])
`
``
64
`+
// CHECK-NEXT: movsd %[[VAL2]], 8(%[[PTR]])
`
``
65
`+
// CHECK: retl
`
``
66
`+
x
`
``
67
`+
}
`
``
68
+
``
69
`+
// CHECK-LABEL: return_f32_f64:
`
``
70
`+
#[no_mangle]
`
``
71
`+
pub fn return_f32_f64(x: (f32, f64)) -> (f32, f64) {
`
``
72
`+
// CHECK: movl [#%d,OFFSET:], %[[PTR:.*]]
`
``
73
`+
// CHECK-NEXT: movss [#%d,OFFSET+4], %[[VAL1:.*]]
`
``
74
`+
// CHECK-NEXT: movsd [#%d,OFFSET+8], %[[VAL2:.*]]
`
``
75
`+
// CHECK-NEXT: movss %[[VAL1]], (%[[PTR]])
`
``
76
`+
// CHECK-NEXT: movsd %[[VAL2]], {{4|8}}(%[[PTR]])
`
``
77
`+
// CHECK: retl
`
``
78
`+
x
`
``
79
`+
}
`
``
80
+
``
81
`+
// CHECK-LABEL: return_f64_f32:
`
``
82
`+
#[no_mangle]
`
``
83
`+
pub fn return_f64_f32(x: (f64, f32)) -> (f64, f32) {
`
``
84
`+
// CHECK: movl [#%d,OFFSET:], %[[PTR:.*]]
`
``
85
`+
// CHECK-NEXT: movsd [#%d,OFFSET+4], %[[VAL1:.*]]
`
``
86
`+
// CHECK-NEXT: movss [#%d,OFFSET+12], %[[VAL2:.*]]
`
``
87
`+
// CHECK-NEXT: movsd %[[VAL1]], (%[[PTR]])
`
``
88
`+
// CHECK-NEXT: movss %[[VAL2]], 8(%[[PTR]])
`
``
89
`+
// CHECK: retl
`
``
90
`+
x
`
``
91
`+
}
`
``
92
+
``
93
`+
// CHECK-LABEL: return_f32_other:
`
``
94
`+
#[no_mangle]
`
``
95
`+
pub fn return_f32_other(x: (f32, usize)) -> (f32, usize) {
`
``
96
`+
// CHECK: movl [#%d,OFFSET:], %[[PTR:.*]]
`
``
97
`+
// CHECK-NEXT: movss [#%d,OFFSET+4], %[[VAL1:.*]]
`
``
98
`+
// CHECK-NEXT: movl [#%d,OFFSET+8], %[[VAL2:.*]]
`
``
99
`+
// CHECK-NEXT: movss %[[VAL1]], (%[[PTR]])
`
``
100
`+
// CHECK-NEXT: movl %[[VAL2]], 4(%[[PTR]])
`
``
101
`+
// CHECK: retl
`
``
102
`+
x
`
``
103
`+
}
`
``
104
+
``
105
`+
// CHECK-LABEL: return_f64_other:
`
``
106
`+
#[no_mangle]
`
``
107
`+
pub fn return_f64_other(x: (f64, usize)) -> (f64, usize) {
`
``
108
`+
// CHECK: movl [#%d,OFFSET:], %[[PTR:.*]]
`
``
109
`+
// CHECK-NEXT: movsd [#%d,OFFSET+4], %[[VAL1:.*]]
`
``
110
`+
// CHECK-NEXT: movl [#%d,OFFSET+12], %[[VAL2:.*]]
`
``
111
`+
// CHECK-NEXT: movsd %[[VAL1]], (%[[PTR]])
`
``
112
`+
// CHECK-NEXT: movl %[[VAL2]], 8(%[[PTR]])
`
``
113
`+
// CHECK: retl
`
``
114
`+
x
`
``
115
`+
}
`
``
116
+
``
117
`+
// CHECK-LABEL: return_other_f32:
`
``
118
`+
#[no_mangle]
`
``
119
`+
pub fn return_other_f32(x: (usize, f32)) -> (usize, f32) {
`
``
120
`+
// CHECK: movl [#%d,OFFSET:], %[[PTR:.*]]
`
``
121
`+
// CHECK-NEXT: movl [#%d,OFFSET+4], %[[VAL1:.*]]
`
``
122
`+
// CHECK-NEXT: movss [#%d,OFFSET+8], %[[VAL2:.*]]
`
``
123
`+
// CHECK-NEXT: movl %[[VAL1]], (%[[PTR]])
`
``
124
`+
// CHECK-NEXT: movss %[[VAL2]], 4(%[[PTR]])
`
``
125
`+
// CHECK: retl
`
``
126
`+
x
`
``
127
`+
}
`
``
128
+
``
129
`+
// CHECK-LABEL: return_other_f64:
`
``
130
`+
#[no_mangle]
`
``
131
`+
pub fn return_other_f64(x: (usize, f64)) -> (usize, f64) {
`
``
132
`+
// CHECK: movl [#%d,OFFSET:], %[[PTR:.*]]
`
``
133
`+
// CHECK-NEXT: movl [#%d,OFFSET+4], %[[VAL1:.*]]
`
``
134
`+
// CHECK-NEXT: movsd [#%d,OFFSET+8], %[[VAL2:.*]]
`
``
135
`+
// CHECK-NEXT: movl %[[VAL1]], (%[[PTR]])
`
``
136
`+
// CHECK-NEXT: movsd %[[VAL2]], {{4|8}}(%[[PTR]])
`
``
137
`+
// CHECK: retl
`
``
138
`+
x
`
``
139
`+
}
`
``
140
+
``
141
`+
// Calling functions returning floats
`
``
142
+
``
143
`+
// CHECK-LABEL: call_f32:
`
``
144
`+
#[no_mangle]
`
``
145
`+
pub unsafe fn call_f32(x: &mut f32) {
`
``
146
`+
extern "Rust" {
`
``
147
`+
fn get_f32() -> f32;
`
``
148
`+
}
`
``
149
`+
// CHECK: movl {{.}}(%ebp), %[[PTR:.]]
`
``
150
`+
// CHECK: calll {{()|_}}get_f32
`
``
151
`+
// CHECK-NEXT: movl %eax, (%[[PTR]])
`
``
152
`+
*x = get_f32();
`
``
153
`+
}
`
``
154
+
``
155
`+
// CHECK-LABEL: call_f64:
`
``
156
`+
#[no_mangle]
`
``
157
`+
pub unsafe fn call_f64(x: &mut f64) {
`
``
158
`+
extern "Rust" {
`
``
159
`+
fn get_f64() -> f64;
`
``
160
`+
}
`
``
161
`+
// CHECK: movl {{.}}(%ebp), %[[PTR:.]]
`
``
162
`+
// CHECK: calll {{()|_}}get_f64
`
``
163
`+
// CHECK: movsd {{.}}(%{{ebp|esp}}), %[[VAL:.]]
`
``
164
`+
// CHECK-NEXT: movsd %[[VAL:.*]], (%[[PTR]])
`
``
165
`+
*x = get_f64();
`
``
166
`+
}
`
``
167
+
``
168
`+
// Calling functions returning scalar pairs containing floats
`
``
169
+
``
170
`+
// CHECK-LABEL: call_f32_f32:
`
``
171
`+
#[no_mangle]
`
``
172
`+
pub unsafe fn call_f32_f32(x: &mut (f32, f32)) {
`
``
173
`+
extern "Rust" {
`
``
174
`+
fn get_f32_f32() -> (f32, f32);
`
``
175
`+
}
`
``
176
`+
// CHECK: movl {{.}}(%ebp), %[[PTR:.]]
`
``
177
`+
// CHECK: calll {{()|_}}get_f32_f32
`
``
178
`+
// CHECK: movss [#%d,OFFSET:], %[[VAL1:.*]]
`
``
179
`+
// CHECK-NEXT: movss [#%d,OFFSET+4], %[[VAL2:.*]]
`
``
180
`+
// CHECK-NEXT: movss %[[VAL1]], (%[[PTR]])
`
``
181
`+
// CHECK-NEXT: movss %[[VAL2]], 4(%[[PTR]])
`
``
182
`+
*x = get_f32_f32();
`
``
183
`+
}
`
``
184
+
``
185
`+
// CHECK-LABEL: call_f64_f64:
`
``
186
`+
#[no_mangle]
`
``
187
`+
pub unsafe fn call_f64_f64(x: &mut (f64, f64)) {
`
``
188
`+
extern "Rust" {
`
``
189
`+
fn get_f64_f64() -> (f64, f64);
`
``
190
`+
}
`
``
191
`+
// CHECK: movl {{.}}(%ebp), %[[PTR:.]]
`
``
192
`+
// CHECK: calll {{()|_}}get_f64_f64
`
``
193
`+
// unix: movsd [#%d,OFFSET:], %[[VAL1:.*]]
`
``
194
`+
// unix-NEXT: movsd [#%d,OFFSET+8], %[[VAL2:.*]]
`
``
195
`+
// windows: movsd (%esp), %[[VAL1:.*]]
`
``
196
`+
// windows-NEXT: movsd 8(%esp), %[[VAL2:.*]]
`
``
197
`+
// CHECK-NEXT: movsd %[[VAL1]], (%[[PTR]])
`
``
198
`+
// CHECK-NEXT: movsd %[[VAL2]], 8(%[[PTR]])
`
``
199
`+
*x = get_f64_f64();
`
``
200
`+
}
`
``
201
+
``
202
`+
// CHECK-LABEL: call_f32_f64:
`
``
203
`+
#[no_mangle]
`
``
204
`+
pub unsafe fn call_f32_f64(x: &mut (f32, f64)) {
`
``
205
`+
extern "Rust" {
`
``
206
`+
fn get_f32_f64() -> (f32, f64);
`
``
207
`+
}
`
``
208
`+
// CHECK: movl {{.}}(%ebp), %[[PTR:.]]
`
``
209
`+
// CHECK: calll {{()|_}}get_f32_f64
`
``
210
`+
// unix: movss [#%d,OFFSET:], %[[VAL1:.*]]
`
``
211
`+
// unix-NEXT: movsd [#%d,OFFSET+4], %[[VAL2:.*]]
`
``
212
`+
// windows: movss (%esp), %[[VAL1:.*]]
`
``
213
`+
// windows-NEXT: movsd 8(%esp), %[[VAL2:.*]]
`
``
214
`+
// CHECK-NEXT: movss %[[VAL1]], (%[[PTR]])
`
``
215
`+
// unix-NEXT: movsd %[[VAL2]], 4(%[[PTR]])
`
``
216
`+
// windows-NEXT: movsd %[[VAL2]], 8(%[[PTR]])
`
``
217
`+
*x = get_f32_f64();
`
``
218
`+
}
`
``
219
+
``
220
`+
// CHECK-LABEL: call_f64_f32:
`
``
221
`+
#[no_mangle]
`
``
222
`+
pub unsafe fn call_f64_f32(x: &mut (f64, f32)) {
`
``
223
`+
extern "Rust" {
`
``
224
`+
fn get_f64_f32() -> (f64, f32);
`
``
225
`+
}
`
``
226
`+
// CHECK: movl {{.}}(%ebp), %[[PTR:.]]
`
``
227
`+
// CHECK: calll {{()|_}}get_f64_f32
`
``
228
`+
// unix: movsd [#%d,OFFSET:], %[[VAL1:.*]]
`
``
229
`+
// unix-NEXT: movss [#%d,OFFSET+8], %[[VAL2:.*]]
`
``
230
`+
// windows: movsd (%esp), %[[VAL1:.*]]
`
``
231
`+
// windows-NEXT: movss 8(%esp), %[[VAL2:.*]]
`
``
232
`+
// CHECK-NEXT: movsd %[[VAL1]], (%[[PTR]])
`
``
233
`+
// CHECK-NEXT: movss %[[VAL2]], 8(%[[PTR]])
`
``
234
`+
*x = get_f64_f32();
`
``
235
`+
}
`
``
236
+
``
237
`+
// CHECK-LABEL: call_f32_other:
`
``
238
`+
#[no_mangle]
`
``
239
`+
pub unsafe fn call_f32_other(x: &mut (f32, usize)) {
`
``
240
`+
extern "Rust" {
`
``
241
`+
fn get_f32_other() -> (f32, usize);
`
``
242
`+
}
`
``
243
`+
// CHECK: movl {{.}}(%ebp), %[[PTR:.]]
`
``
244
`+
// CHECK: calll {{()|_}}get_f32_other
`
``
245
`+
// CHECK: movss [#%d,OFFSET:], %[[VAL1:.*]]
`
``
246
`+
// CHECK-NEXT: movl [#%d,OFFSET+4], %[[VAL2:.*]]
`
``
247
`+
// CHECK-NEXT: movss %[[VAL1]], (%[[PTR]])
`
``
248
`+
// CHECK-NEXT: movl %[[VAL2]], 4(%[[PTR]])
`
``
249
`+
*x = get_f32_other();
`
``
250
`+
}
`
``
251
+
``
252
`+
// CHECK-LABEL: call_f64_other:
`
``
253
`+
#[no_mangle]
`
``
254
`+
pub unsafe fn call_f64_other(x: &mut (f64, usize)) {
`
``
255
`+
extern "Rust" {
`
``
256
`+
fn get_f64_other() -> (f64, usize);
`
``
257
`+
}
`
``
258
`+
// CHECK: movl {{.}}(%ebp), %[[PTR:.]]
`
``
259
`+
// CHECK: calll {{()|_}}get_f64_other
`
``
260
`+
// unix: movsd [#%d,OFFSET:], %[[VAL1:.*]]
`
``
261
`+
// unix-NEXT: movl [#%d,OFFSET+8], %[[VAL2:.*]]
`
``
262
`+
// windows: movsd (%esp), %[[VAL1:.*]]
`
``
263
`+
// windows-NEXT: movl 8(%esp), %[[VAL2:.*]]
`
``
264
`+
// CHECK-NEXT: movsd %[[VAL1]], (%[[PTR]])
`
``
265
`+
// CHECK-NEXT: movl %[[VAL2]], 8(%[[PTR]])
`
``
266
`+
*x = get_f64_other();
`
``
267
`+
}
`
``
268
+
``
269
`+
// CHECK-LABEL: call_other_f32:
`
``
270
`+
#[no_mangle]
`
``
271
`+
pub unsafe fn call_other_f32(x: &mut (usize, f32)) {
`
``
272
`+
extern "Rust" {
`
``
273
`+
fn get_other_f32() -> (usize, f32);
`
``
274
`+
}
`
``
275
`+
// CHECK: movl {{.}}(%ebp), %[[PTR:.]]
`
``
276
`+
// CHECK: calll {{()|_}}get_other_f32
`
``
277
`+
// CHECK: movl [#%d,OFFSET:], %[[VAL1:.*]]
`
``
278
`+
// CHECK-NEXT: movss [#%d,OFFSET+4], %[[VAL2:.*]]
`
``
279
`+
// CHECK-NEXT: movl %[[VAL1]], (%[[PTR]])
`
``
280
`+
// CHECK-NEXT: movss %[[VAL2]], 4(%[[PTR]])
`
``
281
`+
*x = get_other_f32();
`
``
282
`+
}
`
``
283
+
``
284
`+
// CHECK-LABEL: call_other_f64:
`
``
285
`+
#[no_mangle]
`
``
286
`+
pub unsafe fn call_other_f64(x: &mut (usize, f64)) {
`
``
287
`+
extern "Rust" {
`
``
288
`+
fn get_other_f64() -> (usize, f64);
`
``
289
`+
}
`
``
290
`+
// CHECK: movl {{.}}(%ebp), %[[PTR:.]]
`
``
291
`+
// CHECK: calll {{()|_}}get_other_f64
`
``
292
`+
// unix: movl [#%d,OFFSET:], %[[VAL1:.*]]
`
``
293
`+
// unix-NEXT: movsd [#%d,OFFSET+4], %[[VAL2:.*]]
`
``
294
`+
// windows: movl (%esp), %[[VAL1:.*]]
`
``
295
`+
// windows-NEXT: movsd 8(%esp), %[[VAL2:.*]]
`
``
296
`+
// CHECK-NEXT: movl %[[VAL1]], (%[[PTR]])
`
``
297
`+
// unix-NEXT: movsd %[[VAL2]], 4(%[[PTR]])
`
``
298
`+
// windows-NEXT: movsd %[[VAL2]], 8(%[[PTR]])
`
``
299
`+
*x = get_other_f64();
`
``
300
`+
}
`
``
301
+
``
302
`` +
// The "C" ABI for f16 and f128 on x86 has never used the x87 floating point stack. Do some
``
``
303
`+
// basic checks to ensure this remains the case for the "Rust" ABI.
`
``
304
+
``
305
`+
// CHECK-LABEL: return_f16:
`
``
306
`+
#[no_mangle]
`
``
307
`+
pub fn return_f16(x: f16) -> f16 {
`
``
308
`+
// CHECK: pinsrw $0, {{.*}}(%ebp), %xmm0
`
``
309
`+
// CHECK-NOT: xmm0
`
``
310
`+
// CHECK: retl
`
``
311
`+
x
`
``
312
`+
}
`
``
313
+
``
314
`+
// CHECK-LABEL: return_f128:
`
``
315
`+
#[no_mangle]
`
``
316
`+
pub fn return_f128(x: f128) -> f128 {
`
``
317
`+
// CHECK: movl [#%d,OFFSET:], %[[PTR:.*]]
`
``
318
`+
// CHECK-NEXT: movl [#%d,OFFSET+16], %[[VAL4:.*]]
`
``
319
`+
// CHECK-NEXT: movl [#%d,OFFSET+4], %[[VAL1:.*]]
`
``
320
`+
// CHECK-NEXT: movl [#%d,OFFSET+8], %[[VAL2:.*]]
`
``
321
`+
// CHECK-NEXT: movl [#%d,OFFSET+12], %[[VAL3:.*]]
`
``
322
`+
// CHECK-NEXT: movl %[[VAL4:.*]] 12(%[[PTR]])
`
``
323
`+
// CHECK-NEXT: movl %[[VAL3:.*]] 8(%[[PTR]])
`
``
324
`+
// CHECK-NEXT: movl %[[VAL2:.*]] 4(%[[PTR]])
`
``
325
`+
// CHECK-NEXT: movl %[[VAL1:.*]] (%[[PTR]])
`
``
326
`+
// CHECK: retl
`
``
327
`+
x
`
``
328
`+
}
`