unsafe { T::dec(candidate) }; } // We got a (possibly) different pointer out. But that one is already protected and // the slot is paid back. unsafe { Self::new(replacement as *mut _, None) } } } } #[inline] fn as_ptr(&self) -> *const T::Base { T::as_ptr(self.ptr.deref()) } } impl Drop for HybridProtection { #[inline] fn drop(&mut self) { match self.debt.take() { // We have our own copy of Arc, so we don't need a protection. Do nothing (but release // the Arc below). None => (), // If we owed something, just return the debt. We don't have a pointer owned, so // nothing to release. Some(debt) => { let ptr = T::as_ptr(&self.ptr); if debt.pay::(ptr) { return; } // But if the debt was already paid for us, we need to release the pointer, as we // were effectively already in the Unprotected mode. } } // Equivalent to T::dec(ptr) unsafe { ManuallyDrop::drop(&mut self.ptr) }; } } impl Protected for HybridProtection { #[inline] fn from_inner(ptr: T) -> Self { Self { debt: None, ptr: ManuallyDrop::new(ptr), } } #[inline] fn into_inner(mut self) -> T { // Drop any debt and release any lock held by the given guard and return a // full-featured value that even can outlive the ArcSwap it originated from. match self.debt.take() { None => (), // We have a fully loaded ref-counted pointer. Some(debt) => { let ptr = T::inc(&self.ptr); if !debt.pay::(ptr) { unsafe { T::dec(ptr) }; } } } // The ptr::read & forget is something like a cheating move. We can't move it out, because // we have a destructor and Rust doesn't allow us to do that. let inner = unsafe { ptr::read(self.ptr.deref()) }; mem::forget(self); inner } } impl Borrow for HybridProtection { #[inline] fn borrow(&self) -> &T { &self.ptr } } pub trait Config { // Mostly for testing, way to disable the fast slo const USE_FAST: bool; } #[derive(Clone, Default)] pub struct DefaultConfig; impl Config for DefaultConfig { const USE_FAST: bool = true; } #[derive(Clone, Default)] pub struct HybridStrategy { pub(crate) _config: Cfg, } impl InnerStrategy for HybridStrategy where T: RefCnt, Cfg: Config, { type Protected = HybridProtection; unsafe fn load(&self, storage: &AtomicPtr) -> Self::Protected { LocalNode::with(|node| { let fast = if Cfg::USE_FAST { HybridProtection::attempt(node, storage) } else { None }; fast.unwrap_or_else(|| HybridProtection::fallback(node, storage)) }) } unsafe fn wait_for_readers(&self, old: *const T::Base, storage: &AtomicPtr) { // The pay_all may need to provide fresh replacement values if someone else is loading from // this particular storage. We do so by the exact same way, by `load` ‒ it's OK, a writer // does not hold a slot and the reader doesn't recurse back into writer, so we won't run // out of slots. let replacement = || self.load(storage).into_inner(); Debt::pay_all::(old, storage as *const _ as usize, replacement); } } impl CaS for HybridStrategy { unsafe fn compare_and_swap>( &self, storage: &AtomicPtr, current: C, new: T, ) -> Self::Protected { loop { let old = >::load(self, storage); // Observation of their inequality is enough to make a verdict if old.as_ptr() != current.as_raw() { return old; } // If they are still equal, put the new one in. let new_raw = T::as_ptr(&new); if storage .compare_exchange_weak(current.as_raw(), new_raw, SeqCst, Relaxed) .is_ok() { // We successfully put the new value in. The ref count went in there too. T::into_ptr(new); >::wait_for_readers(self, old.as_ptr(), storage); // We just got one ref count out of the storage and we have one in old. We don't // need two. T::dec(old.as_ptr()); return old; } } } }