1 // SPDX-License-Identifier: Apache-2.0 OR MIT 2 3 //! Memory allocation APIs 4 5 #![stable(feature = "alloc_module", since = "1.28.0")] 6 7 #[cfg(not(test))] 8 use core::intrinsics; 9 use core::intrinsics::{min_align_of_val, size_of_val}; 10 11 use core::ptr::Unique; 12 #[cfg(not(test))] 13 use core::ptr::{self, NonNull}; 14 15 #[stable(feature = "alloc_module", since = "1.28.0")] 16 #[doc(inline)] 17 pub use core::alloc::*; 18 19 #[cfg(test)] 20 mod tests; 21 22 extern "Rust" { 23 // These are the magic symbols to call the global allocator. rustc generates 24 // them to call `__rg_alloc` etc. if there is a `#[global_allocator]` attribute 25 // (the code expanding that attribute macro generates those functions), or to call 26 // the default implementations in std (`__rdl_alloc` etc. in `library/std/src/alloc.rs`) 27 // otherwise. 28 // The rustc fork of LLVM 14 and earlier also special-cases these function names to be able to optimize them 29 // like `malloc`, `realloc`, and `free`, respectively. 30 #[rustc_allocator] 31 #[rustc_nounwind] 32 fn __rust_alloc(size: usize, align: usize) -> *mut u8; 33 #[rustc_deallocator] 34 #[rustc_nounwind] 35 fn __rust_dealloc(ptr: *mut u8, size: usize, align: usize); 36 #[rustc_reallocator] 37 #[rustc_nounwind] 38 fn __rust_realloc(ptr: *mut u8, old_size: usize, align: usize, new_size: usize) -> *mut u8; 39 #[rustc_allocator_zeroed] 40 #[rustc_nounwind] 41 fn __rust_alloc_zeroed(size: usize, align: usize) -> *mut u8; 42 43 #[cfg(not(bootstrap))] 44 static __rust_no_alloc_shim_is_unstable: u8; 45 } 46 47 /// The global memory allocator. 48 /// 49 /// This type implements the [`Allocator`] trait by forwarding calls 50 /// to the allocator registered with the `#[global_allocator]` attribute 51 /// if there is one, or the `std` crate’s default. 52 /// 53 /// Note: while this type is unstable, the functionality it provides can be 54 /// accessed through the [free functions in `alloc`](self#functions). 55 #[unstable(feature = "allocator_api", issue = "32838")] 56 #[derive(Copy, Clone, Default, Debug)] 57 #[cfg(not(test))] 58 pub struct Global; 59 60 #[cfg(test)] 61 pub use std::alloc::Global; 62 63 /// Allocate memory with the global allocator. 64 /// 65 /// This function forwards calls to the [`GlobalAlloc::alloc`] method 66 /// of the allocator registered with the `#[global_allocator]` attribute 67 /// if there is one, or the `std` crate’s default. 68 /// 69 /// This function is expected to be deprecated in favor of the `alloc` method 70 /// of the [`Global`] type when it and the [`Allocator`] trait become stable. 71 /// 72 /// # Safety 73 /// 74 /// See [`GlobalAlloc::alloc`]. 75 /// 76 /// # Examples 77 /// 78 /// ``` 79 /// use std::alloc::{alloc, dealloc, handle_alloc_error, Layout}; 80 /// 81 /// unsafe { 82 /// let layout = Layout::new::<u16>(); 83 /// let ptr = alloc(layout); 84 /// if ptr.is_null() { 85 /// handle_alloc_error(layout); 86 /// } 87 /// 88 /// *(ptr as *mut u16) = 42; 89 /// assert_eq!(*(ptr as *mut u16), 42); 90 /// 91 /// dealloc(ptr, layout); 92 /// } 93 /// ``` 94 #[stable(feature = "global_alloc", since = "1.28.0")] 95 #[must_use = "losing the pointer will leak memory"] 96 #[inline] 97 pub unsafe fn alloc(layout: Layout) -> *mut u8 { 98 unsafe { 99 // Make sure we don't accidentally allow omitting the allocator shim in 100 // stable code until it is actually stabilized. 101 #[cfg(not(bootstrap))] 102 core::ptr::read_volatile(&__rust_no_alloc_shim_is_unstable); 103 104 __rust_alloc(layout.size(), layout.align()) 105 } 106 } 107 108 /// Deallocate memory with the global allocator. 109 /// 110 /// This function forwards calls to the [`GlobalAlloc::dealloc`] method 111 /// of the allocator registered with the `#[global_allocator]` attribute 112 /// if there is one, or the `std` crate’s default. 113 /// 114 /// This function is expected to be deprecated in favor of the `dealloc` method 115 /// of the [`Global`] type when it and the [`Allocator`] trait become stable. 116 /// 117 /// # Safety 118 /// 119 /// See [`GlobalAlloc::dealloc`]. 120 #[stable(feature = "global_alloc", since = "1.28.0")] 121 #[inline] 122 pub unsafe fn dealloc(ptr: *mut u8, layout: Layout) { 123 unsafe { __rust_dealloc(ptr, layout.size(), layout.align()) } 124 } 125 126 /// Reallocate memory with the global allocator. 127 /// 128 /// This function forwards calls to the [`GlobalAlloc::realloc`] method 129 /// of the allocator registered with the `#[global_allocator]` attribute 130 /// if there is one, or the `std` crate’s default. 131 /// 132 /// This function is expected to be deprecated in favor of the `realloc` method 133 /// of the [`Global`] type when it and the [`Allocator`] trait become stable. 134 /// 135 /// # Safety 136 /// 137 /// See [`GlobalAlloc::realloc`]. 138 #[stable(feature = "global_alloc", since = "1.28.0")] 139 #[must_use = "losing the pointer will leak memory"] 140 #[inline] 141 pub unsafe fn realloc(ptr: *mut u8, layout: Layout, new_size: usize) -> *mut u8 { 142 unsafe { __rust_realloc(ptr, layout.size(), layout.align(), new_size) } 143 } 144 145 /// Allocate zero-initialized memory with the global allocator. 146 /// 147 /// This function forwards calls to the [`GlobalAlloc::alloc_zeroed`] method 148 /// of the allocator registered with the `#[global_allocator]` attribute 149 /// if there is one, or the `std` crate’s default. 150 /// 151 /// This function is expected to be deprecated in favor of the `alloc_zeroed` method 152 /// of the [`Global`] type when it and the [`Allocator`] trait become stable. 153 /// 154 /// # Safety 155 /// 156 /// See [`GlobalAlloc::alloc_zeroed`]. 157 /// 158 /// # Examples 159 /// 160 /// ``` 161 /// use std::alloc::{alloc_zeroed, dealloc, Layout}; 162 /// 163 /// unsafe { 164 /// let layout = Layout::new::<u16>(); 165 /// let ptr = alloc_zeroed(layout); 166 /// 167 /// assert_eq!(*(ptr as *mut u16), 0); 168 /// 169 /// dealloc(ptr, layout); 170 /// } 171 /// ``` 172 #[stable(feature = "global_alloc", since = "1.28.0")] 173 #[must_use = "losing the pointer will leak memory"] 174 #[inline] 175 pub unsafe fn alloc_zeroed(layout: Layout) -> *mut u8 { 176 unsafe { __rust_alloc_zeroed(layout.size(), layout.align()) } 177 } 178 179 #[cfg(not(test))] 180 impl Global { 181 #[inline] 182 fn alloc_impl(&self, layout: Layout, zeroed: bool) -> Result<NonNull<[u8]>, AllocError> { 183 match layout.size() { 184 0 => Ok(NonNull::slice_from_raw_parts(layout.dangling(), 0)), 185 // SAFETY: `layout` is non-zero in size, 186 size => unsafe { 187 let raw_ptr = if zeroed { alloc_zeroed(layout) } else { alloc(layout) }; 188 let ptr = NonNull::new(raw_ptr).ok_or(AllocError)?; 189 Ok(NonNull::slice_from_raw_parts(ptr, size)) 190 }, 191 } 192 } 193 194 // SAFETY: Same as `Allocator::grow` 195 #[inline] 196 unsafe fn grow_impl( 197 &self, 198 ptr: NonNull<u8>, 199 old_layout: Layout, 200 new_layout: Layout, 201 zeroed: bool, 202 ) -> Result<NonNull<[u8]>, AllocError> { 203 debug_assert!( 204 new_layout.size() >= old_layout.size(), 205 "`new_layout.size()` must be greater than or equal to `old_layout.size()`" 206 ); 207 208 match old_layout.size() { 209 0 => self.alloc_impl(new_layout, zeroed), 210 211 // SAFETY: `new_size` is non-zero as `old_size` is greater than or equal to `new_size` 212 // as required by safety conditions. Other conditions must be upheld by the caller 213 old_size if old_layout.align() == new_layout.align() => unsafe { 214 let new_size = new_layout.size(); 215 216 // `realloc` probably checks for `new_size >= old_layout.size()` or something similar. 217 intrinsics::assume(new_size >= old_layout.size()); 218 219 let raw_ptr = realloc(ptr.as_ptr(), old_layout, new_size); 220 let ptr = NonNull::new(raw_ptr).ok_or(AllocError)?; 221 if zeroed { 222 raw_ptr.add(old_size).write_bytes(0, new_size - old_size); 223 } 224 Ok(NonNull::slice_from_raw_parts(ptr, new_size)) 225 }, 226 227 // SAFETY: because `new_layout.size()` must be greater than or equal to `old_size`, 228 // both the old and new memory allocation are valid for reads and writes for `old_size` 229 // bytes. Also, because the old allocation wasn't yet deallocated, it cannot overlap 230 // `new_ptr`. Thus, the call to `copy_nonoverlapping` is safe. The safety contract 231 // for `dealloc` must be upheld by the caller. 232 old_size => unsafe { 233 let new_ptr = self.alloc_impl(new_layout, zeroed)?; 234 ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_mut_ptr(), old_size); 235 self.deallocate(ptr, old_layout); 236 Ok(new_ptr) 237 }, 238 } 239 } 240 } 241 242 #[unstable(feature = "allocator_api", issue = "32838")] 243 #[cfg(not(test))] 244 unsafe impl Allocator for Global { 245 #[inline] 246 fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> { 247 self.alloc_impl(layout, false) 248 } 249 250 #[inline] 251 fn allocate_zeroed(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> { 252 self.alloc_impl(layout, true) 253 } 254 255 #[inline] 256 unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) { 257 if layout.size() != 0 { 258 // SAFETY: `layout` is non-zero in size, 259 // other conditions must be upheld by the caller 260 unsafe { dealloc(ptr.as_ptr(), layout) } 261 } 262 } 263 264 #[inline] 265 unsafe fn grow( 266 &self, 267 ptr: NonNull<u8>, 268 old_layout: Layout, 269 new_layout: Layout, 270 ) -> Result<NonNull<[u8]>, AllocError> { 271 // SAFETY: all conditions must be upheld by the caller 272 unsafe { self.grow_impl(ptr, old_layout, new_layout, false) } 273 } 274 275 #[inline] 276 unsafe fn grow_zeroed( 277 &self, 278 ptr: NonNull<u8>, 279 old_layout: Layout, 280 new_layout: Layout, 281 ) -> Result<NonNull<[u8]>, AllocError> { 282 // SAFETY: all conditions must be upheld by the caller 283 unsafe { self.grow_impl(ptr, old_layout, new_layout, true) } 284 } 285 286 #[inline] 287 unsafe fn shrink( 288 &self, 289 ptr: NonNull<u8>, 290 old_layout: Layout, 291 new_layout: Layout, 292 ) -> Result<NonNull<[u8]>, AllocError> { 293 debug_assert!( 294 new_layout.size() <= old_layout.size(), 295 "`new_layout.size()` must be smaller than or equal to `old_layout.size()`" 296 ); 297 298 match new_layout.size() { 299 // SAFETY: conditions must be upheld by the caller 300 0 => unsafe { 301 self.deallocate(ptr, old_layout); 302 Ok(NonNull::slice_from_raw_parts(new_layout.dangling(), 0)) 303 }, 304 305 // SAFETY: `new_size` is non-zero. Other conditions must be upheld by the caller 306 new_size if old_layout.align() == new_layout.align() => unsafe { 307 // `realloc` probably checks for `new_size <= old_layout.size()` or something similar. 308 intrinsics::assume(new_size <= old_layout.size()); 309 310 let raw_ptr = realloc(ptr.as_ptr(), old_layout, new_size); 311 let ptr = NonNull::new(raw_ptr).ok_or(AllocError)?; 312 Ok(NonNull::slice_from_raw_parts(ptr, new_size)) 313 }, 314 315 // SAFETY: because `new_size` must be smaller than or equal to `old_layout.size()`, 316 // both the old and new memory allocation are valid for reads and writes for `new_size` 317 // bytes. Also, because the old allocation wasn't yet deallocated, it cannot overlap 318 // `new_ptr`. Thus, the call to `copy_nonoverlapping` is safe. The safety contract 319 // for `dealloc` must be upheld by the caller. 320 new_size => unsafe { 321 let new_ptr = self.allocate(new_layout)?; 322 ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_mut_ptr(), new_size); 323 self.deallocate(ptr, old_layout); 324 Ok(new_ptr) 325 }, 326 } 327 } 328 } 329 330 /// The allocator for unique pointers. 331 #[cfg(all(not(no_global_oom_handling), not(test)))] 332 #[lang = "exchange_malloc"] 333 #[inline] 334 unsafe fn exchange_malloc(size: usize, align: usize) -> *mut u8 { 335 let layout = unsafe { Layout::from_size_align_unchecked(size, align) }; 336 match Global.allocate(layout) { 337 Ok(ptr) => ptr.as_mut_ptr(), 338 Err(_) => handle_alloc_error(layout), 339 } 340 } 341 342 #[cfg_attr(not(test), lang = "box_free")] 343 #[inline] 344 // This signature has to be the same as `Box`, otherwise an ICE will happen. 345 // When an additional parameter to `Box` is added (like `A: Allocator`), this has to be added here as 346 // well. 347 // For example if `Box` is changed to `struct Box<T: ?Sized, A: Allocator>(Unique<T>, A)`, 348 // this function has to be changed to `fn box_free<T: ?Sized, A: Allocator>(Unique<T>, A)` as well. 349 pub(crate) unsafe fn box_free<T: ?Sized, A: Allocator>(ptr: Unique<T>, alloc: A) { 350 unsafe { 351 let size = size_of_val(ptr.as_ref()); 352 let align = min_align_of_val(ptr.as_ref()); 353 let layout = Layout::from_size_align_unchecked(size, align); 354 alloc.deallocate(From::from(ptr.cast()), layout) 355 } 356 } 357 358 // # Allocation error handler 359 360 #[cfg(not(no_global_oom_handling))] 361 extern "Rust" { 362 // This is the magic symbol to call the global alloc error handler. rustc generates 363 // it to call `__rg_oom` if there is a `#[alloc_error_handler]`, or to call the 364 // default implementations below (`__rdl_oom`) otherwise. 365 fn __rust_alloc_error_handler(size: usize, align: usize) -> !; 366 } 367 368 /// Abort on memory allocation error or failure. 369 /// 370 /// Callers of memory allocation APIs wishing to abort computation 371 /// in response to an allocation error are encouraged to call this function, 372 /// rather than directly invoking `panic!` or similar. 373 /// 374 /// The default behavior of this function is to print a message to standard error 375 /// and abort the process. 376 /// It can be replaced with [`set_alloc_error_hook`] and [`take_alloc_error_hook`]. 377 /// 378 /// [`set_alloc_error_hook`]: ../../std/alloc/fn.set_alloc_error_hook.html 379 /// [`take_alloc_error_hook`]: ../../std/alloc/fn.take_alloc_error_hook.html 380 #[stable(feature = "global_alloc", since = "1.28.0")] 381 #[rustc_const_unstable(feature = "const_alloc_error", issue = "92523")] 382 #[cfg(all(not(no_global_oom_handling), not(test)))] 383 #[cold] 384 pub const fn handle_alloc_error(layout: Layout) -> ! { 385 const fn ct_error(_: Layout) -> ! { 386 panic!("allocation failed"); 387 } 388 389 fn rt_error(layout: Layout) -> ! { 390 unsafe { 391 __rust_alloc_error_handler(layout.size(), layout.align()); 392 } 393 } 394 395 unsafe { core::intrinsics::const_eval_select((layout,), ct_error, rt_error) } 396 } 397 398 // For alloc test `std::alloc::handle_alloc_error` can be used directly. 399 #[cfg(all(not(no_global_oom_handling), test))] 400 pub use std::alloc::handle_alloc_error; 401 402 #[cfg(all(not(no_global_oom_handling), not(test)))] 403 #[doc(hidden)] 404 #[allow(unused_attributes)] 405 #[unstable(feature = "alloc_internals", issue = "none")] 406 pub mod __alloc_error_handler { 407 // called via generated `__rust_alloc_error_handler` if there is no 408 // `#[alloc_error_handler]`. 409 #[rustc_std_internal_symbol] 410 pub unsafe fn __rdl_oom(size: usize, _align: usize) -> ! { 411 extern "Rust" { 412 // This symbol is emitted by rustc next to __rust_alloc_error_handler. 413 // Its value depends on the -Zoom={panic,abort} compiler option. 414 static __rust_alloc_error_handler_should_panic: u8; 415 } 416 417 #[allow(unused_unsafe)] 418 if unsafe { __rust_alloc_error_handler_should_panic != 0 } { 419 panic!("memory allocation of {size} bytes failed") 420 } else { 421 core::panicking::panic_nounwind_fmt(format_args!( 422 "memory allocation of {size} bytes failed" 423 )) 424 } 425 } 426 } 427 428 /// Specialize clones into pre-allocated, uninitialized memory. 429 /// Used by `Box::clone` and `Rc`/`Arc::make_mut`. 430 pub(crate) trait WriteCloneIntoRaw: Sized { 431 unsafe fn write_clone_into_raw(&self, target: *mut Self); 432 } 433 434 impl<T: Clone> WriteCloneIntoRaw for T { 435 #[inline] 436 default unsafe fn write_clone_into_raw(&self, target: *mut Self) { 437 // Having allocated *first* may allow the optimizer to create 438 // the cloned value in-place, skipping the local and move. 439 unsafe { target.write(self.clone()) }; 440 } 441 } 442 443 impl<T: Copy> WriteCloneIntoRaw for T { 444 #[inline] 445 unsafe fn write_clone_into_raw(&self, target: *mut Self) { 446 // We can always copy in-place, without ever involving a local value. 447 unsafe { target.copy_from_nonoverlapping(self, 1) }; 448 } 449 } 450