pub struct AtomicU8(/* private fields */);Expand description
Creusot wrapper around std::sync::atomic::AtomicU8.
Implementations§
Source§impl AtomicU8
impl AtomicU8
Sourcepub fn new(
val: u8,
sync_view: Ghost<&mut SyncView>,
) -> (Self, Ghost<Perm<AtomicU8>>)
pub fn new( val: u8, sync_view: Ghost<&mut SyncView>, ) -> (Self, Ghost<Perm<AtomicU8>>)
ensures
result.1.val() == FMap::singleton(result.0.get_timestamp(^sync_view), (val, **sync_view))
ensures
*result.1.ward() == result.0terminates
Sourcepub fn into_inner(
self,
own: Ghost<Perm<AtomicU8>>,
sync_view: Ghost<&mut SyncView>,
) -> (u8, Ghost<Timestamp>)
pub fn into_inner( self, own: Ghost<Perm<AtomicU8>>, sync_view: Ghost<&mut SyncView>, ) -> (u8, Ghost<Timestamp>)
Wrapper for std::sync::atomic::AtomicU8::into_inner.
requires
self == *own.ward()ensures
match own.val().get(*result.1) { Some((v, _)) => result.0 == v, None => false }
ensures
self.get_timestamp(^sync_view) == *result.1ensures
forall<t> match own.val().get(t) { Some((_, view)) => t <= *result.1 && view <= ^sync_view, None => true }
Sourcepub fn refresh(
&mut self,
own: Ghost<&mut Perm<AtomicU8>>,
sync_view: Ghost<&mut SyncView>,
) -> Ghost<Timestamp>
pub fn refresh( &mut self, own: Ghost<&mut Perm<AtomicU8>>, sync_view: Ghost<&mut SyncView>, ) -> Ghost<Timestamp>
Clear the old unusable history, thanks to the full ownership of the atomic.
requires
*self == *own.ward()ensures
match (*own).val().get(*result) { Some((v, _)) => (^own).val() == FMap::singleton(*result, (v, **sync_view)), None => false }
ensures
self.get_timestamp(^sync_view) == *resultensures
forall<t> match own.val().get(t) { Some((_, view)) => t <= *result && view <= ^sync_view , None => true }
ensures
*self == ^selfterminates
Sourcepub fn compare_exchange<F, Success: Ordering, Failure: Ordering>(
&self,
current: u8,
new: u8,
f: Ghost<F>,
) -> Result<u8, u8>
pub fn compare_exchange<F, Success: Ordering, Failure: Ordering>( &self, current: u8, new: u8, f: Ghost<F>, ) -> Result<u8, u8>
Wrapper for std::sync::atomic::AtomicU8::compare_exchange.
The load and the store are always sequentially consistent.
requires
Success::ORDERING != Ordering::SeqCst::ORDERINGrequires
Failure::ORDERING == Ordering::Acquire::ORDERING || Failure::ORDERING == Ordering::Relaxed::ORDERING
requires
forall<c: &mut Committer<Self, $type, _, _>> !c.shot_store() ==> c.ward() == *self ==> c.val_load().deep_model() == current.deep_model() ==> c.val_store() == new ==> f.precondition((Ok(c),)) && (f.postcondition_once((Ok(c),), ()) ==> (^c).shot_store())
requires
forall<c: &Committer<Self, $type, _, _>> !c.shot_store() ==> c.ward() == *self ==> // NOTE: This following line is not present for `weak` c.val_load().deep_model() != current.deep_model() ==> f.precondition((Err(c),))
ensures
match result { Ok(result) => { exists<c: &mut Committer<Self, $type, _, _>> !c.shot_store() && c.ward() == *self && c.val_load().deep_model() == current.deep_model() && c.val_store() == new && result == c.val_load() && f.postcondition_once((Ok(c),), ()) }, Err(result) => { exists<c: &Committer<Self, $type, _, _>> !c.shot_store() && c.ward() == *self && // NOTE: This following line is not present for `weak` c.val_load().deep_model() != current.deep_model() && result == c.val_load() && f.postcondition_once((Err(c),), ()) } }
Sourcepub fn compare_exchange_weak<F, Success: Ordering, Failure: Ordering>(
&self,
current: u8,
new: u8,
f: Ghost<F>,
) -> Result<u8, u8>
pub fn compare_exchange_weak<F, Success: Ordering, Failure: Ordering>( &self, current: u8, new: u8, f: Ghost<F>, ) -> Result<u8, u8>
Wrapper for std::sync::atomic::AtomicU8::compare_exchange_weak.
The load and the store are always sequentially consistent.
requires
Success::ORDERING != Ordering::SeqCst::ORDERINGrequires
Failure::ORDERING == Ordering::Acquire::ORDERING || Failure::ORDERING == Ordering::Relaxed::ORDERING
requires
forall<c: &mut Committer<Self, $type, _, _>> // TODO: [VL] Wrong permission here (Success == Ordering::RelAcq) !c.shot_store() ==> c.ward() == *self ==> c.val_load().deep_model() == current.deep_model() ==> c.val_store() == new ==> f.precondition((Ok(c),)) && (f.postcondition_once((Ok(c),), ()) ==> (^c).shot_store())
requires
forall<c: &Committer<Self, $type, _, _>> !c.shot_store() ==> c.ward() == *self ==> f.precondition((Err(c),))
ensures
match result { Ok(result) => { exists<c: &mut Committer<Self, $type, _, _>> !c.shot_store() && c.ward() == *self && c.val_load().deep_model() == current.deep_model() && c.val_store() == new && result == c.val_load() && f.postcondition_once((Ok(c),), ()) }, Err(result) => { exists<c: &Committer<Self, $type, _, _>> !c.shot_store() && c.ward() == *self && result == c.val_load() && f.postcondition_once((Err(c),), ()) } }
Sourcepub fn load<F, Load: Ordering>(&self, f: Ghost<F>) -> u8
pub fn load<F, Load: Ordering>(&self, f: Ghost<F>) -> u8
Wrapper for std::sync::atomic::AtomicU8::load.
requires
Load::ORDERING == Ordering::Acquire::ORDERING || Load::ORDERING == Ordering::Relaxed::ORDERING
requires
forall<c: &Committer<Self, $type, Load, Ordering::None>> !c.shot_store() ==> c.ward() == *self ==> f.precondition((c,))
ensures
exists<c: &Committer<Self, $type, Load, Ordering::None>> !c.shot_store() && c.ward() == *self && c.val_load() == result && f.postcondition_once((c,), ())
Sourcepub fn store<F, Store: Ordering>(&self, val: u8, f: Ghost<F>)
pub fn store<F, Store: Ordering>(&self, val: u8, f: Ghost<F>)
Wrapper for std::sync::atomic::AtomicU8::store.
requires
Store::ORDERING == Ordering::Release::ORDERING || Store::ORDERING == Ordering::Relaxed::ORDERING
requires
forall<c: &mut Committer<Self, $type, Ordering::None, Store>> !c.shot_store() ==> c.ward() == *self ==> c.val_store() == val ==> f.precondition((c,)) && (f.postcondition_once((c,), ()) ==> (^c).shot_store())
ensures
exists<c: &mut Committer<Self, $type, Ordering::None, Store>> !c.shot_store() && c.ward() == *self && c.val_store() == val && f.postcondition_once((c,), ())
Source§impl AtomicU8
impl AtomicU8
Sourcepub fn fetch_add<F, Ord: Ordering>(&self, val: u8, f: Ghost<F>) -> u8
pub fn fetch_add<F, Ord: Ordering>(&self, val: u8, f: Ghost<F>) -> u8
Wrapper for std::sync::atomic::AtomicU8::fetch_add.
requires
forall<c: &mut Committer<Self, $int_type, Ord::Load, Ord::Store>> !c.shot_store() ==> c.ward() == *self ==> c.val_store() == val + c.val_load() ==> f.precondition((c,)) && (f.postcondition_once((c,), ()) ==> (^c).shot_store())
ensures
exists<c: &mut Committer<Self, $int_type, Ord::Load, Ord::Store>> !c.shot_store() && c.ward() == *self && c.val_store() == val + c.val_load() && c.val_load() == result && f.postcondition_once((c,), ())
Trait Implementations§
Source§impl HasTimestamp for AtomicU8
impl HasTimestamp for AtomicU8
Source§fn get_timestamp(self, _: SyncView) -> Timestamp
fn get_timestamp(self, _: SyncView) -> Timestamp
logic(opaque) ⚠
Source§fn get_timestamp_monotonic(self, x: SyncView, y: SyncView)
fn get_timestamp_monotonic(self, x: SyncView, y: SyncView)
logic(law) ⚠
requires
x <= yensures
self.get_timestamp(x) <= self.get_timestamp(y)