1 // SPDX-License-Identifier: Apache-2.0 OR MIT 2 3 //! Memory allocation APIs 4 5 #![stable(feature = "alloc_module", since = "1.28.0")] 6 7 #[cfg(not(test))] 8 use core::intrinsics; 9 10 #[cfg(not(test))] 11 use core::ptr::{self, NonNull}; 12 13 #[stable(feature = "alloc_module", since = "1.28.0")] 14 #[doc(inline)] 15 pub use core::alloc::*; 16 17 #[cfg(test)] 18 mod tests; 19 20 extern "Rust" { 21 // These are the magic symbols to call the global allocator. rustc generates 22 // them to call `__rg_alloc` etc. if there is a `#[global_allocator]` attribute 23 // (the code expanding that attribute macro generates those functions), or to call 24 // the default implementations in std (`__rdl_alloc` etc. in `library/std/src/alloc.rs`) 25 // otherwise. 26 // The rustc fork of LLVM 14 and earlier also special-cases these function names to be able to optimize them 27 // like `malloc`, `realloc`, and `free`, respectively. 28 #[rustc_allocator] 29 #[rustc_nounwind] 30 fn __rust_alloc(size: usize, align: usize) -> *mut u8; 31 #[rustc_deallocator] 32 #[rustc_nounwind] 33 fn __rust_dealloc(ptr: *mut u8, size: usize, align: usize); 34 #[rustc_reallocator] 35 #[rustc_nounwind] 36 fn __rust_realloc(ptr: *mut u8, old_size: usize, align: usize, new_size: usize) -> *mut u8; 37 #[rustc_allocator_zeroed] 38 #[rustc_nounwind] 39 fn __rust_alloc_zeroed(size: usize, align: usize) -> *mut u8; 40 41 static __rust_no_alloc_shim_is_unstable: u8; 42 } 43 44 /// The global memory allocator. 45 /// 46 /// This type implements the [`Allocator`] trait by forwarding calls 47 /// to the allocator registered with the `#[global_allocator]` attribute 48 /// if there is one, or the `std` crate’s default. 49 /// 50 /// Note: while this type is unstable, the functionality it provides can be 51 /// accessed through the [free functions in `alloc`](self#functions). 52 #[unstable(feature = "allocator_api", issue = "32838")] 53 #[derive(Copy, Clone, Default, Debug)] 54 #[cfg(not(test))] 55 pub struct Global; 56 57 #[cfg(test)] 58 pub use std::alloc::Global; 59 60 /// Allocate memory with the global allocator. 61 /// 62 /// This function forwards calls to the [`GlobalAlloc::alloc`] method 63 /// of the allocator registered with the `#[global_allocator]` attribute 64 /// if there is one, or the `std` crate’s default. 65 /// 66 /// This function is expected to be deprecated in favor of the `alloc` method 67 /// of the [`Global`] type when it and the [`Allocator`] trait become stable. 68 /// 69 /// # Safety 70 /// 71 /// See [`GlobalAlloc::alloc`]. 72 /// 73 /// # Examples 74 /// 75 /// ``` 76 /// use std::alloc::{alloc, dealloc, handle_alloc_error, Layout}; 77 /// 78 /// unsafe { 79 /// let layout = Layout::new::<u16>(); 80 /// let ptr = alloc(layout); 81 /// if ptr.is_null() { 82 /// handle_alloc_error(layout); 83 /// } 84 /// 85 /// *(ptr as *mut u16) = 42; 86 /// assert_eq!(*(ptr as *mut u16), 42); 87 /// 88 /// dealloc(ptr, layout); 89 /// } 90 /// ``` 91 #[stable(feature = "global_alloc", since = "1.28.0")] 92 #[must_use = "losing the pointer will leak memory"] 93 #[inline] 94 pub unsafe fn alloc(layout: Layout) -> *mut u8 { 95 unsafe { 96 // Make sure we don't accidentally allow omitting the allocator shim in 97 // stable code until it is actually stabilized. 98 core::ptr::read_volatile(&__rust_no_alloc_shim_is_unstable); 99 100 __rust_alloc(layout.size(), layout.align()) 101 } 102 } 103 104 /// Deallocate memory with the global allocator. 105 /// 106 /// This function forwards calls to the [`GlobalAlloc::dealloc`] method 107 /// of the allocator registered with the `#[global_allocator]` attribute 108 /// if there is one, or the `std` crate’s default. 109 /// 110 /// This function is expected to be deprecated in favor of the `dealloc` method 111 /// of the [`Global`] type when it and the [`Allocator`] trait become stable. 112 /// 113 /// # Safety 114 /// 115 /// See [`GlobalAlloc::dealloc`]. 116 #[stable(feature = "global_alloc", since = "1.28.0")] 117 #[inline] 118 pub unsafe fn dealloc(ptr: *mut u8, layout: Layout) { 119 unsafe { __rust_dealloc(ptr, layout.size(), layout.align()) } 120 } 121 122 /// Reallocate memory with the global allocator. 123 /// 124 /// This function forwards calls to the [`GlobalAlloc::realloc`] method 125 /// of the allocator registered with the `#[global_allocator]` attribute 126 /// if there is one, or the `std` crate’s default. 127 /// 128 /// This function is expected to be deprecated in favor of the `realloc` method 129 /// of the [`Global`] type when it and the [`Allocator`] trait become stable. 130 /// 131 /// # Safety 132 /// 133 /// See [`GlobalAlloc::realloc`]. 134 #[stable(feature = "global_alloc", since = "1.28.0")] 135 #[must_use = "losing the pointer will leak memory"] 136 #[inline] 137 pub unsafe fn realloc(ptr: *mut u8, layout: Layout, new_size: usize) -> *mut u8 { 138 unsafe { __rust_realloc(ptr, layout.size(), layout.align(), new_size) } 139 } 140 141 /// Allocate zero-initialized memory with the global allocator. 142 /// 143 /// This function forwards calls to the [`GlobalAlloc::alloc_zeroed`] method 144 /// of the allocator registered with the `#[global_allocator]` attribute 145 /// if there is one, or the `std` crate’s default. 146 /// 147 /// This function is expected to be deprecated in favor of the `alloc_zeroed` method 148 /// of the [`Global`] type when it and the [`Allocator`] trait become stable. 149 /// 150 /// # Safety 151 /// 152 /// See [`GlobalAlloc::alloc_zeroed`]. 153 /// 154 /// # Examples 155 /// 156 /// ``` 157 /// use std::alloc::{alloc_zeroed, dealloc, Layout}; 158 /// 159 /// unsafe { 160 /// let layout = Layout::new::<u16>(); 161 /// let ptr = alloc_zeroed(layout); 162 /// 163 /// assert_eq!(*(ptr as *mut u16), 0); 164 /// 165 /// dealloc(ptr, layout); 166 /// } 167 /// ``` 168 #[stable(feature = "global_alloc", since = "1.28.0")] 169 #[must_use = "losing the pointer will leak memory"] 170 #[inline] 171 pub unsafe fn alloc_zeroed(layout: Layout) -> *mut u8 { 172 unsafe { __rust_alloc_zeroed(layout.size(), layout.align()) } 173 } 174 175 #[cfg(not(test))] 176 impl Global { 177 #[inline] 178 fn alloc_impl(&self, layout: Layout, zeroed: bool) -> Result<NonNull<[u8]>, AllocError> { 179 match layout.size() { 180 0 => Ok(NonNull::slice_from_raw_parts(layout.dangling(), 0)), 181 // SAFETY: `layout` is non-zero in size, 182 size => unsafe { 183 let raw_ptr = if zeroed { alloc_zeroed(layout) } else { alloc(layout) }; 184 let ptr = NonNull::new(raw_ptr).ok_or(AllocError)?; 185 Ok(NonNull::slice_from_raw_parts(ptr, size)) 186 }, 187 } 188 } 189 190 // SAFETY: Same as `Allocator::grow` 191 #[inline] 192 unsafe fn grow_impl( 193 &self, 194 ptr: NonNull<u8>, 195 old_layout: Layout, 196 new_layout: Layout, 197 zeroed: bool, 198 ) -> Result<NonNull<[u8]>, AllocError> { 199 debug_assert!( 200 new_layout.size() >= old_layout.size(), 201 "`new_layout.size()` must be greater than or equal to `old_layout.size()`" 202 ); 203 204 match old_layout.size() { 205 0 => self.alloc_impl(new_layout, zeroed), 206 207 // SAFETY: `new_size` is non-zero as `old_size` is greater than or equal to `new_size` 208 // as required by safety conditions. Other conditions must be upheld by the caller 209 old_size if old_layout.align() == new_layout.align() => unsafe { 210 let new_size = new_layout.size(); 211 212 // `realloc` probably checks for `new_size >= old_layout.size()` or something similar. 213 intrinsics::assume(new_size >= old_layout.size()); 214 215 let raw_ptr = realloc(ptr.as_ptr(), old_layout, new_size); 216 let ptr = NonNull::new(raw_ptr).ok_or(AllocError)?; 217 if zeroed { 218 raw_ptr.add(old_size).write_bytes(0, new_size - old_size); 219 } 220 Ok(NonNull::slice_from_raw_parts(ptr, new_size)) 221 }, 222 223 // SAFETY: because `new_layout.size()` must be greater than or equal to `old_size`, 224 // both the old and new memory allocation are valid for reads and writes for `old_size` 225 // bytes. Also, because the old allocation wasn't yet deallocated, it cannot overlap 226 // `new_ptr`. Thus, the call to `copy_nonoverlapping` is safe. The safety contract 227 // for `dealloc` must be upheld by the caller. 228 old_size => unsafe { 229 let new_ptr = self.alloc_impl(new_layout, zeroed)?; 230 ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_mut_ptr(), old_size); 231 self.deallocate(ptr, old_layout); 232 Ok(new_ptr) 233 }, 234 } 235 } 236 } 237 238 #[unstable(feature = "allocator_api", issue = "32838")] 239 #[cfg(not(test))] 240 unsafe impl Allocator for Global { 241 #[inline] 242 fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> { 243 self.alloc_impl(layout, false) 244 } 245 246 #[inline] 247 fn allocate_zeroed(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> { 248 self.alloc_impl(layout, true) 249 } 250 251 #[inline] 252 unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) { 253 if layout.size() != 0 { 254 // SAFETY: `layout` is non-zero in size, 255 // other conditions must be upheld by the caller 256 unsafe { dealloc(ptr.as_ptr(), layout) } 257 } 258 } 259 260 #[inline] 261 unsafe fn grow( 262 &self, 263 ptr: NonNull<u8>, 264 old_layout: Layout, 265 new_layout: Layout, 266 ) -> Result<NonNull<[u8]>, AllocError> { 267 // SAFETY: all conditions must be upheld by the caller 268 unsafe { self.grow_impl(ptr, old_layout, new_layout, false) } 269 } 270 271 #[inline] 272 unsafe fn grow_zeroed( 273 &self, 274 ptr: NonNull<u8>, 275 old_layout: Layout, 276 new_layout: Layout, 277 ) -> Result<NonNull<[u8]>, AllocError> { 278 // SAFETY: all conditions must be upheld by the caller 279 unsafe { self.grow_impl(ptr, old_layout, new_layout, true) } 280 } 281 282 #[inline] 283 unsafe fn shrink( 284 &self, 285 ptr: NonNull<u8>, 286 old_layout: Layout, 287 new_layout: Layout, 288 ) -> Result<NonNull<[u8]>, AllocError> { 289 debug_assert!( 290 new_layout.size() <= old_layout.size(), 291 "`new_layout.size()` must be smaller than or equal to `old_layout.size()`" 292 ); 293 294 match new_layout.size() { 295 // SAFETY: conditions must be upheld by the caller 296 0 => unsafe { 297 self.deallocate(ptr, old_layout); 298 Ok(NonNull::slice_from_raw_parts(new_layout.dangling(), 0)) 299 }, 300 301 // SAFETY: `new_size` is non-zero. Other conditions must be upheld by the caller 302 new_size if old_layout.align() == new_layout.align() => unsafe { 303 // `realloc` probably checks for `new_size <= old_layout.size()` or something similar. 304 intrinsics::assume(new_size <= old_layout.size()); 305 306 let raw_ptr = realloc(ptr.as_ptr(), old_layout, new_size); 307 let ptr = NonNull::new(raw_ptr).ok_or(AllocError)?; 308 Ok(NonNull::slice_from_raw_parts(ptr, new_size)) 309 }, 310 311 // SAFETY: because `new_size` must be smaller than or equal to `old_layout.size()`, 312 // both the old and new memory allocation are valid for reads and writes for `new_size` 313 // bytes. Also, because the old allocation wasn't yet deallocated, it cannot overlap 314 // `new_ptr`. Thus, the call to `copy_nonoverlapping` is safe. The safety contract 315 // for `dealloc` must be upheld by the caller. 316 new_size => unsafe { 317 let new_ptr = self.allocate(new_layout)?; 318 ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_mut_ptr(), new_size); 319 self.deallocate(ptr, old_layout); 320 Ok(new_ptr) 321 }, 322 } 323 } 324 } 325 326 /// The allocator for unique pointers. 327 #[cfg(all(not(no_global_oom_handling), not(test)))] 328 #[lang = "exchange_malloc"] 329 #[inline] 330 unsafe fn exchange_malloc(size: usize, align: usize) -> *mut u8 { 331 let layout = unsafe { Layout::from_size_align_unchecked(size, align) }; 332 match Global.allocate(layout) { 333 Ok(ptr) => ptr.as_mut_ptr(), 334 Err(_) => handle_alloc_error(layout), 335 } 336 } 337 338 // # Allocation error handler 339 340 #[cfg(not(no_global_oom_handling))] 341 extern "Rust" { 342 // This is the magic symbol to call the global alloc error handler. rustc generates 343 // it to call `__rg_oom` if there is a `#[alloc_error_handler]`, or to call the 344 // default implementations below (`__rdl_oom`) otherwise. 345 fn __rust_alloc_error_handler(size: usize, align: usize) -> !; 346 } 347 348 /// Abort on memory allocation error or failure. 349 /// 350 /// Callers of memory allocation APIs wishing to abort computation 351 /// in response to an allocation error are encouraged to call this function, 352 /// rather than directly invoking `panic!` or similar. 353 /// 354 /// The default behavior of this function is to print a message to standard error 355 /// and abort the process. 356 /// It can be replaced with [`set_alloc_error_hook`] and [`take_alloc_error_hook`]. 357 /// 358 /// [`set_alloc_error_hook`]: ../../std/alloc/fn.set_alloc_error_hook.html 359 /// [`take_alloc_error_hook`]: ../../std/alloc/fn.take_alloc_error_hook.html 360 #[stable(feature = "global_alloc", since = "1.28.0")] 361 #[rustc_const_unstable(feature = "const_alloc_error", issue = "92523")] 362 #[cfg(all(not(no_global_oom_handling), not(test)))] 363 #[cold] 364 pub const fn handle_alloc_error(layout: Layout) -> ! { 365 const fn ct_error(_: Layout) -> ! { 366 panic!("allocation failed"); 367 } 368 369 fn rt_error(layout: Layout) -> ! { 370 unsafe { 371 __rust_alloc_error_handler(layout.size(), layout.align()); 372 } 373 } 374 375 unsafe { core::intrinsics::const_eval_select((layout,), ct_error, rt_error) } 376 } 377 378 // For alloc test `std::alloc::handle_alloc_error` can be used directly. 379 #[cfg(all(not(no_global_oom_handling), test))] 380 pub use std::alloc::handle_alloc_error; 381 382 #[cfg(all(not(no_global_oom_handling), not(test)))] 383 #[doc(hidden)] 384 #[allow(unused_attributes)] 385 #[unstable(feature = "alloc_internals", issue = "none")] 386 pub mod __alloc_error_handler { 387 // called via generated `__rust_alloc_error_handler` if there is no 388 // `#[alloc_error_handler]`. 389 #[rustc_std_internal_symbol] 390 pub unsafe fn __rdl_oom(size: usize, _align: usize) -> ! { 391 extern "Rust" { 392 // This symbol is emitted by rustc next to __rust_alloc_error_handler. 393 // Its value depends on the -Zoom={panic,abort} compiler option. 394 static __rust_alloc_error_handler_should_panic: u8; 395 } 396 397 if unsafe { __rust_alloc_error_handler_should_panic != 0 } { 398 panic!("memory allocation of {size} bytes failed") 399 } else { 400 core::panicking::panic_nounwind_fmt(format_args!( 401 "memory allocation of {size} bytes failed" 402 )) 403 } 404 } 405 } 406 407 /// Specialize clones into pre-allocated, uninitialized memory. 408 /// Used by `Box::clone` and `Rc`/`Arc::make_mut`. 409 pub(crate) trait WriteCloneIntoRaw: Sized { 410 unsafe fn write_clone_into_raw(&self, target: *mut Self); 411 } 412 413 impl<T: Clone> WriteCloneIntoRaw for T { 414 #[inline] 415 default unsafe fn write_clone_into_raw(&self, target: *mut Self) { 416 // Having allocated *first* may allow the optimizer to create 417 // the cloned value in-place, skipping the local and move. 418 unsafe { target.write(self.clone()) }; 419 } 420 } 421 422 impl<T: Copy> WriteCloneIntoRaw for T { 423 #[inline] 424 unsafe fn write_clone_into_raw(&self, target: *mut Self) { 425 // We can always copy in-place, without ever involving a local value. 426 unsafe { target.copy_from_nonoverlapping(self, 1) }; 427 } 428 } 429