|
1 | | -use core::alloc::Allocator; |
| 1 | +use core::alloc::{AllocError, Allocator}; |
2 | 2 | use core::cell::UnsafeCell; |
3 | 3 | use core::marker::PhantomData; |
4 | 4 | use core::mem; |
5 | 5 | use core::num::NonZeroUsize; |
6 | 6 | use core::ops::{Deref, DerefMut}; |
7 | 7 | use core::ptr::{self, NonNull}; |
8 | 8 |
|
9 | | -use crate::raw_rc::rc_layout::RcLayout; |
| 9 | +use crate::raw_rc::rc_layout::{RcLayout, RcLayoutExt}; |
10 | 10 | use crate::raw_rc::rc_value_pointer::RcValuePointer; |
11 | 11 | use crate::raw_rc::{RefCounter, RefCounts}; |
12 | 12 |
|
@@ -355,3 +355,116 @@ where |
355 | 355 | unsafe { RcValuePointer::new(self.ptr.cast()) } |
356 | 356 | } |
357 | 357 | } |
| 358 | + |
| 359 | +impl<T, A> RawWeak<T, A> { |
| 360 | + pub(crate) fn new_dangling() -> Self |
| 361 | + where |
| 362 | + A: Default, |
| 363 | + { |
| 364 | + Self::new_dangling_in(A::default()) |
| 365 | + } |
| 366 | + |
| 367 | + pub(crate) const fn new_dangling_in(alloc: A) -> Self { |
| 368 | + unsafe { Self::from_raw_parts(NonNull::without_provenance(DANGLING_WEAK_ADDRESS), alloc) } |
| 369 | + } |
| 370 | + |
| 371 | + pub(crate) fn try_new_uninit<const STRONG_COUNT: usize>() -> Result<Self, AllocError> |
| 372 | + where |
| 373 | + A: Allocator + Default, |
| 374 | + { |
| 375 | + super::try_allocate_uninit::<A, STRONG_COUNT>(T::RC_LAYOUT) |
| 376 | + .map(|(ptr, alloc)| unsafe { Self::from_raw_parts(ptr.as_ptr().cast(), alloc) }) |
| 377 | + } |
| 378 | + |
| 379 | + pub(crate) fn try_new_uninit_in<const STRONG_COUNT: usize>(alloc: A) -> Result<Self, AllocError> |
| 380 | + where |
| 381 | + A: Allocator, |
| 382 | + { |
| 383 | + super::try_allocate_uninit_in::<A, STRONG_COUNT>(&alloc, T::RC_LAYOUT) |
| 384 | + .map(|ptr| unsafe { Self::from_raw_parts(ptr.as_ptr().cast(), alloc) }) |
| 385 | + } |
| 386 | + |
| 387 | + pub(crate) fn try_new_zeroed<const STRONG_COUNT: usize>() -> Result<Self, AllocError> |
| 388 | + where |
| 389 | + A: Allocator + Default, |
| 390 | + { |
| 391 | + super::try_allocate_zeroed::<A, STRONG_COUNT>(T::RC_LAYOUT) |
| 392 | + .map(|(ptr, alloc)| unsafe { Self::from_raw_parts(ptr.as_ptr().cast(), alloc) }) |
| 393 | + } |
| 394 | + |
| 395 | + pub(crate) fn try_new_zeroed_in<const STRONG_COUNT: usize>(alloc: A) -> Result<Self, AllocError> |
| 396 | + where |
| 397 | + A: Allocator, |
| 398 | + { |
| 399 | + super::try_allocate_zeroed_in::<A, STRONG_COUNT>(&alloc, T::RC_LAYOUT) |
| 400 | + .map(|ptr| unsafe { Self::from_raw_parts(ptr.as_ptr().cast(), alloc) }) |
| 401 | + } |
| 402 | + |
| 403 | + #[cfg(not(no_global_oom_handling))] |
| 404 | + pub(crate) fn new_uninit<const STRONG_COUNT: usize>() -> Self |
| 405 | + where |
| 406 | + A: Allocator + Default, |
| 407 | + { |
| 408 | + let (ptr, alloc) = super::allocate_uninit::<A, STRONG_COUNT>(T::RC_LAYOUT); |
| 409 | + |
| 410 | + unsafe { Self::from_raw_parts(ptr.as_ptr().cast(), alloc) } |
| 411 | + } |
| 412 | + |
| 413 | + #[cfg(not(no_global_oom_handling))] |
| 414 | + pub(crate) fn new_uninit_in<const STRONG_COUNT: usize>(alloc: A) -> Self |
| 415 | + where |
| 416 | + A: Allocator, |
| 417 | + { |
| 418 | + unsafe { |
| 419 | + Self::from_raw_parts( |
| 420 | + super::allocate_uninit_in::<A, STRONG_COUNT>(&alloc, T::RC_LAYOUT).as_ptr().cast(), |
| 421 | + alloc, |
| 422 | + ) |
| 423 | + } |
| 424 | + } |
| 425 | + |
| 426 | + #[cfg(not(no_global_oom_handling))] |
| 427 | + pub(crate) fn new_zeroed<const STRONG_COUNT: usize>() -> Self |
| 428 | + where |
| 429 | + A: Allocator + Default, |
| 430 | + { |
| 431 | + let (ptr, alloc) = super::allocate_zeroed::<A, STRONG_COUNT>(T::RC_LAYOUT); |
| 432 | + |
| 433 | + unsafe { Self::from_raw_parts(ptr.as_ptr().cast(), alloc) } |
| 434 | + } |
| 435 | + |
| 436 | + #[cfg(not(no_global_oom_handling))] |
| 437 | + pub(crate) fn new_zeroed_in<const STRONG_COUNT: usize>(alloc: A) -> Self |
| 438 | + where |
| 439 | + A: Allocator, |
| 440 | + { |
| 441 | + unsafe { |
| 442 | + Self::from_raw_parts( |
| 443 | + super::allocate_zeroed_in::<A, STRONG_COUNT>(&alloc, T::RC_LAYOUT).as_ptr().cast(), |
| 444 | + alloc, |
| 445 | + ) |
| 446 | + } |
| 447 | + } |
| 448 | + |
| 449 | + /// Consumes the `RawWeak` object and returns the contained value, assuming the value is |
| 450 | + /// initialized. |
| 451 | + /// |
| 452 | + /// # Safety |
| 453 | + /// |
| 454 | + /// - `self` is non-dangling. |
| 455 | + /// - The value pointed to by `self` is initialized. |
| 456 | + /// - The strong reference count is zero. |
| 457 | + pub(super) unsafe fn assume_init_into_inner<R>(mut self) -> T |
| 458 | + where |
| 459 | + A: Allocator, |
| 460 | + R: RefCounter, |
| 461 | + { |
| 462 | + unsafe { |
| 463 | + let result = self.ptr.read(); |
| 464 | + |
| 465 | + self.drop_unchecked::<R>(); |
| 466 | + |
| 467 | + result |
| 468 | + } |
| 469 | + } |
| 470 | +} |
0 commit comments