Auto merge of #147893 - fee1-dead-contrib:constheapheapheap, r= · rust-lang/rust@b3f7ade (original) (raw)
`@@ -5,8 +5,8 @@
`
5
5
`#[stable(feature = "alloc_module", since = "1.28.0")]
`
6
6
`#[doc(inline)]
`
7
7
`pub use core::alloc::*;
`
8
``
`-
use core::hint;
`
9
8
`use core::ptr::{self, NonNull};
`
``
9
`+
use core::{cmp, hint};
`
10
10
``
11
11
`unsafe extern "Rust" {
`
12
12
`// These are the magic symbols to call the global allocator. rustc generates
`
`@@ -182,7 +182,7 @@ pub unsafe fn alloc_zeroed(layout: Layout) -> *mut u8 {
`
182
182
`impl Global {
`
183
183
`#[inline]
`
184
184
`#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
`
185
``
`-
fn alloc_impl(&self, layout: Layout, zeroed: bool) -> Result<NonNull<[u8]>, AllocError> {
`
``
185
`+
fn alloc_impl_runtime(layout: Layout, zeroed: bool) -> Result<NonNull<[u8]>, AllocError> {
`
186
186
`match layout.size() {
`
187
187
`0 => Ok(NonNull::slice_from_raw_parts(layout.dangling(), 0)),
`
188
188
`` // SAFETY: layout is non-zero in size,
``
`@@ -194,10 +194,26 @@ impl Global {
`
194
194
`}
`
195
195
`}
`
196
196
``
``
197
`+
#[inline]
`
``
198
`+
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
`
``
199
`+
fn deallocate_impl_runtime(ptr: NonNull, layout: Layout) {
`
``
200
`+
if layout.size() != 0 {
`
``
201
`+
// SAFETY:
`
``
202
`` +
// * We have checked that layout is non-zero in size.
``
``
203
`+
// * The caller is obligated to provide a layout that "fits", and in this case,
`
``
204
`+
// "fit" always means a layout that is equal to the original, because our
`
``
205
`` +
// allocate(), grow(), and shrink() implementations never returns a larger
``
``
206
`+
// allocation than requested.
`
``
207
`` +
// * Other conditions must be upheld by the caller, as per Allocator::deallocate()'s
``
``
208
`+
// safety documentation.
`
``
209
`+
unsafe { dealloc(ptr.as_ptr(), layout) }
`
``
210
`+
}
`
``
211
`+
}
`
``
212
+
197
213
`` // SAFETY: Same as Allocator::grow
``
198
214
`#[inline]
`
199
215
`#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
`
200
``
`-
unsafe fn grow_impl(
`
``
216
`+
fn grow_impl_runtime(
`
201
217
`&self,
`
202
218
`ptr: NonNull,
`
203
219
`old_layout: Layout,
`
`@@ -241,10 +257,172 @@ impl Global {
`
241
257
`},
`
242
258
`}
`
243
259
`}
`
``
260
+
``
261
`` +
// SAFETY: Same as Allocator::grow
``
``
262
`+
#[inline]
`
``
263
`+
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
`
``
264
`+
fn shrink_impl_runtime(
`
``
265
`+
&self,
`
``
266
`+
ptr: NonNull,
`
``
267
`+
old_layout: Layout,
`
``
268
`+
new_layout: Layout,
`
``
269
`+
_zeroed: bool,
`
``
270
`+
) -> Result<NonNull<[u8]>, AllocError> {
`
``
271
`+
debug_assert!(
`
``
272
`+
new_layout.size() <= old_layout.size(),
`
``
273
`` +
"new_layout.size() must be smaller than or equal to old_layout.size()"
``
``
274
`+
);
`
``
275
+
``
276
`+
match new_layout.size() {
`
``
277
`+
// SAFETY: conditions must be upheld by the caller
`
``
278
`+
0 => unsafe {
`
``
279
`+
self.deallocate(ptr, old_layout);
`
``
280
`+
Ok(NonNull::slice_from_raw_parts(new_layout.dangling(), 0))
`
``
281
`+
},
`
``
282
+
``
283
`` +
// SAFETY: new_size is non-zero. Other conditions must be upheld by the caller
``
``
284
`+
new_size if old_layout.align() == new_layout.align() => unsafe {
`
``
285
`` +
// realloc probably checks for new_size <= old_layout.size() or something similar.
``
``
286
`+
hint::assert_unchecked(new_size <= old_layout.size());
`
``
287
+
``
288
`+
let raw_ptr = realloc(ptr.as_ptr(), old_layout, new_size);
`
``
289
`+
let ptr = NonNull::new(raw_ptr).ok_or(AllocError)?;
`
``
290
`+
Ok(NonNull::slice_from_raw_parts(ptr, new_size))
`
``
291
`+
},
`
``
292
+
``
293
`` +
// SAFETY: because new_size must be smaller than or equal to old_layout.size(),
``
``
294
`` +
// both the old and new memory allocation are valid for reads and writes for new_size
``
``
295
`+
// bytes. Also, because the old allocation wasn't yet deallocated, it cannot overlap
`
``
296
`` +
// new_ptr. Thus, the call to copy_nonoverlapping is safe. The safety contract
``
``
297
`` +
// for dealloc must be upheld by the caller.
``
``
298
`+
new_size => unsafe {
`
``
299
`+
let new_ptr = self.allocate(new_layout)?;
`
``
300
`+
ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_mut_ptr(), new_size);
`
``
301
`+
self.deallocate(ptr, old_layout);
`
``
302
`+
Ok(new_ptr)
`
``
303
`+
},
`
``
304
`+
}
`
``
305
`+
}
`
``
306
+
``
307
`` +
// SAFETY: Same as Allocator::allocate
``
``
308
`+
#[inline]
`
``
309
`+
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
`
``
310
`+
#[rustc_const_unstable(feature = "const_heap", issue = "79597")]
`
``
311
`+
const fn alloc_impl(&self, layout: Layout, zeroed: bool) -> Result<NonNull<[u8]>, AllocError> {
`
``
312
`+
core::intrinsics::const_eval_select(
`
``
313
`+
(layout, zeroed),
`
``
314
`+
Global::alloc_impl_const,
`
``
315
`+
Global::alloc_impl_runtime,
`
``
316
`+
)
`
``
317
`+
}
`
``
318
+
``
319
`` +
// SAFETY: Same as Allocator::deallocate
``
``
320
`+
#[inline]
`
``
321
`+
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
`
``
322
`+
#[rustc_const_unstable(feature = "const_heap", issue = "79597")]
`
``
323
`+
const unsafe fn deallocate_impl(&self, ptr: NonNull, layout: Layout) {
`
``
324
`+
core::intrinsics::const_eval_select(
`
``
325
`+
(ptr, layout),
`
``
326
`+
Global::deallocate_impl_const,
`
``
327
`+
Global::deallocate_impl_runtime,
`
``
328
`+
)
`
``
329
`+
}
`
``
330
+
``
331
`` +
// SAFETY: Same as Allocator::grow
``
``
332
`+
#[inline]
`
``
333
`+
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
`
``
334
`+
#[rustc_const_unstable(feature = "const_heap", issue = "79597")]
`
``
335
`+
const unsafe fn grow_impl(
`
``
336
`+
&self,
`
``
337
`+
ptr: NonNull,
`
``
338
`+
old_layout: Layout,
`
``
339
`+
new_layout: Layout,
`
``
340
`+
zeroed: bool,
`
``
341
`+
) -> Result<NonNull<[u8]>, AllocError> {
`
``
342
`+
core::intrinsics::const_eval_select(
`
``
343
`+
(self, ptr, old_layout, new_layout, zeroed),
`
``
344
`+
Global::grow_shrink_impl_const,
`
``
345
`+
Global::grow_impl_runtime,
`
``
346
`+
)
`
``
347
`+
}
`
``
348
+
``
349
`` +
// SAFETY: Same as Allocator::shrink
``
``
350
`+
#[inline]
`
``
351
`+
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
`
``
352
`+
#[rustc_const_unstable(feature = "const_heap", issue = "79597")]
`
``
353
`+
const unsafe fn shrink_impl(
`
``
354
`+
&self,
`
``
355
`+
ptr: NonNull,
`
``
356
`+
old_layout: Layout,
`
``
357
`+
new_layout: Layout,
`
``
358
`+
) -> Result<NonNull<[u8]>, AllocError> {
`
``
359
`+
core::intrinsics::const_eval_select(
`
``
360
`+
(self, ptr, old_layout, new_layout, false),
`
``
361
`+
Global::grow_shrink_impl_const,
`
``
362
`+
Global::shrink_impl_runtime,
`
``
363
`+
)
`
``
364
`+
}
`
``
365
+
``
366
`+
#[inline]
`
``
367
`+
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
`
``
368
`+
#[rustc_const_unstable(feature = "const_heap", issue = "79597")]
`
``
369
`+
const fn alloc_impl_const(layout: Layout, zeroed: bool) -> Result<NonNull<[u8]>, AllocError> {
`
``
370
`+
match layout.size() {
`
``
371
`+
0 => Ok(NonNull::slice_from_raw_parts(layout.dangling(), 0)),
`
``
372
`` +
// SAFETY: layout is non-zero in size,
``
``
373
`+
size => unsafe {
`
``
374
`+
let raw_ptr = core::intrinsics::const_allocate(layout.size(), layout.align());
`
``
375
`+
let ptr = NonNull::new(raw_ptr).ok_or(AllocError)?;
`
``
376
`+
if zeroed {
`
``
377
`` +
// SAFETY: the pointer returned by const_allocate is valid to write to.
``
``
378
`+
ptr.write_bytes(0, size);
`
``
379
`+
}
`
``
380
`+
Ok(NonNull::slice_from_raw_parts(ptr, size))
`
``
381
`+
},
`
``
382
`+
}
`
``
383
`+
}
`
``
384
+
``
385
`+
#[inline]
`
``
386
`+
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
`
``
387
`+
#[rustc_const_unstable(feature = "const_heap", issue = "79597")]
`
``
388
`+
const fn deallocate_impl_const(ptr: NonNull, layout: Layout) {
`
``
389
`+
if layout.size() != 0 {
`
``
390
`+
// SAFETY: We checked for nonzero size; other preconditions must be upheld by caller.
`
``
391
`+
unsafe {
`
``
392
`+
core::intrinsics::const_deallocate(ptr.as_ptr(), layout.size(), layout.align());
`
``
393
`+
}
`
``
394
`+
}
`
``
395
`+
}
`
``
396
+
``
397
`+
#[inline]
`
``
398
`+
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
`
``
399
`+
#[rustc_const_unstable(feature = "const_heap", issue = "79597")]
`
``
400
`+
const fn grow_shrink_impl_const(
`
``
401
`+
&self,
`
``
402
`+
ptr: NonNull,
`
``
403
`+
old_layout: Layout,
`
``
404
`+
new_layout: Layout,
`
``
405
`+
zeroed: bool,
`
``
406
`+
) -> Result<NonNull<[u8]>, AllocError> {
`
``
407
`+
let new_ptr = self.alloc_impl(new_layout, zeroed)?;
`
``
408
`+
// SAFETY: both pointers are valid and this operations is in bounds.
`
``
409
`+
unsafe {
`
``
410
`+
ptr::copy_nonoverlapping(
`
``
411
`+
ptr.as_ptr(),
`
``
412
`+
new_ptr.as_mut_ptr(),
`
``
413
`+
cmp::min(old_layout.size(), new_layout.size()),
`
``
414
`+
);
`
``
415
`+
}
`
``
416
`+
unsafe {
`
``
417
`+
self.deallocate_impl(ptr, old_layout);
`
``
418
`+
}
`
``
419
`+
Ok(new_ptr)
`
``
420
`+
}
`
244
421
`}
`
245
422
``
246
423
`#[unstable(feature = "allocator_api", issue = "32838")]
`
247
``
`-
unsafe impl Allocator for Global {
`
``
424
`+
#[rustc_const_unstable(feature = "const_heap", issue = "79597")]
`
``
425
`+
unsafe impl const Allocator for Global {
`
248
426
`#[inline]
`
249
427
`#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
`
250
428
`fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
`
`@@ -260,17 +438,8 @@ unsafe impl Allocator for Global {
`
260
438
`#[inline]
`
261
439
`#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
`
262
440
`unsafe fn deallocate(&self, ptr: NonNull, layout: Layout) {
`
263
``
`-
if layout.size() != 0 {
`
264
``
`-
// SAFETY:
`
265
``
`` -
// * We have checked that layout is non-zero in size.
``
266
``
`-
// * The caller is obligated to provide a layout that "fits", and in this case,
`
267
``
`-
// "fit" always means a layout that is equal to the original, because our
`
268
``
`` -
// allocate(), grow(), and shrink() implementations never returns a larger
``
269
``
`-
// allocation than requested.
`
270
``
`` -
// * Other conditions must be upheld by the caller, as per Allocator::deallocate()'s
``
271
``
`-
// safety documentation.
`
272
``
`-
unsafe { dealloc(ptr.as_ptr(), layout) }
`
273
``
`-
}
`
``
441
`+
// SAFETY: all conditions must be upheld by the caller
`
``
442
`+
unsafe { self.deallocate_impl(ptr, layout) }
`
274
443
`}
`
275
444
``
276
445
`#[inline]
`
`@@ -305,40 +474,8 @@ unsafe impl Allocator for Global {
`
305
474
`old_layout: Layout,
`
306
475
`new_layout: Layout,
`
307
476
`) -> Result<NonNull<[u8]>, AllocError> {
`
308
``
`-
debug_assert!(
`
309
``
`-
new_layout.size() <= old_layout.size(),
`
310
``
`` -
"new_layout.size() must be smaller than or equal to old_layout.size()"
``
311
``
`-
);
`
312
``
-
313
``
`-
match new_layout.size() {
`
314
``
`-
// SAFETY: conditions must be upheld by the caller
`
315
``
`-
0 => unsafe {
`
316
``
`-
self.deallocate(ptr, old_layout);
`
317
``
`-
Ok(NonNull::slice_from_raw_parts(new_layout.dangling(), 0))
`
318
``
`-
},
`
319
``
-
320
``
`` -
// SAFETY: new_size is non-zero. Other conditions must be upheld by the caller
``
321
``
`-
new_size if old_layout.align() == new_layout.align() => unsafe {
`
322
``
`` -
// realloc probably checks for new_size <= old_layout.size() or something similar.
``
323
``
`-
hint::assert_unchecked(new_size <= old_layout.size());
`
324
``
-
325
``
`-
let raw_ptr = realloc(ptr.as_ptr(), old_layout, new_size);
`
326
``
`-
let ptr = NonNull::new(raw_ptr).ok_or(AllocError)?;
`
327
``
`-
Ok(NonNull::slice_from_raw_parts(ptr, new_size))
`
328
``
`-
},
`
329
``
-
330
``
`` -
// SAFETY: because new_size must be smaller than or equal to old_layout.size(),
``
331
``
`` -
// both the old and new memory allocation are valid for reads and writes for new_size
``
332
``
`-
// bytes. Also, because the old allocation wasn't yet deallocated, it cannot overlap
`
333
``
`` -
// new_ptr. Thus, the call to copy_nonoverlapping is safe. The safety contract
``
334
``
`` -
// for dealloc must be upheld by the caller.
``
335
``
`-
new_size => unsafe {
`
336
``
`-
let new_ptr = self.allocate(new_layout)?;
`
337
``
`-
ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_mut_ptr(), new_size);
`
338
``
`-
self.deallocate(ptr, old_layout);
`
339
``
`-
Ok(new_ptr)
`
340
``
`-
},
`
341
``
`-
}
`
``
477
`+
// SAFETY: all conditions must be upheld by the caller
`
``
478
`+
unsafe { self.shrink_impl(ptr, old_layout, new_layout) }
`
342
479
`}
`
343
480
`}
`
344
481
``