Skip to main content

AtomicU32

Struct AtomicU32 

Source
pub struct AtomicU32(/* private fields */);
Expand description

Creusot wrapper around std::sync::atomic::AtomicU32.

Implementations§

Source§

impl AtomicU32

Source

pub fn new( val: u32, sync_view: Ghost<&mut SyncView>, ) -> (Self, Ghost<Perm<AtomicU32>>)

ensures

result.1.val() == FMap::singleton(result.0.get_timestamp(^sync_view), (val, **sync_view))

ensures

*result.1.ward() == result.0

terminates

Source

pub fn into_inner( self, own: Ghost<Perm<AtomicU32>>, sync_view: Ghost<&mut SyncView>, ) -> (u32, Ghost<Timestamp>)

Wrapper for std::sync::atomic::AtomicU32::into_inner.

requires

self == *own.ward()

ensures

match own.val().get(*result.1) { Some((v, _)) => result.0 == v, None => false }

ensures

self.get_timestamp(^sync_view) == *result.1

ensures

forall<t> match own.val().get(t) {
    Some((_, view)) => t <= *result.1 && view <= ^sync_view,
    None => true
}
Source

pub fn refresh( &mut self, own: Ghost<&mut Perm<AtomicU32>>, sync_view: Ghost<&mut SyncView>, ) -> Ghost<Timestamp>

Clear the old unusable history, thanks to the full ownership of the atomic.

requires

*self == *own.ward()

ensures

match (*own).val().get(*result) {
    Some((v, _)) => (^own).val() == FMap::singleton(*result, (v, **sync_view)),
    None => false
}

ensures

self.get_timestamp(^sync_view) == *result

ensures

forall<t> match own.val().get(t) {
    Some((_, view)) => t <= *result && view <= ^sync_view ,
    None => true
}

ensures

*self == ^self

terminates

Source

pub fn compare_exchange<F, Success: Ordering, Failure: Ordering>( &self, current: u32, new: u32, f: Ghost<F>, ) -> Result<u32, u32>
where F: FnGhost + FnOnce(Result<&mut Committer<Self, u32, Success::Load, Success::Store>, &Committer<Self, u32, Failure, None>>),

Wrapper for std::sync::atomic::AtomicU32::compare_exchange.

The load and the store are always sequentially consistent.

requires

Success::ORDERING != Ordering::SeqCst::ORDERING

requires

Failure::ORDERING == Ordering::Acquire::ORDERING || Failure::ORDERING == Ordering::Relaxed::ORDERING

requires

forall<c: &mut Committer<Self, $type, _, _>>
    !c.shot_store() ==> c.ward() == *self ==>
    c.val_load().deep_model() == current.deep_model() ==>
    c.val_store() == new ==>
    f.precondition((Ok(c),)) && (f.postcondition_once((Ok(c),), ()) ==> (^c).shot_store())

requires

forall<c: &Committer<Self, $type, _, _>>
    !c.shot_store() ==> c.ward() == *self ==>
    // NOTE: This following line is not present for `weak`
    c.val_load().deep_model() != current.deep_model() ==>
    f.precondition((Err(c),))

ensures

match result {
    Ok(result) => {
        exists<c: &mut Committer<Self, $type, _, _>>
            !c.shot_store() && c.ward() == *self &&
            c.val_load().deep_model() == current.deep_model() &&
            c.val_store() == new &&
            result == c.val_load() &&
            f.postcondition_once((Ok(c),), ())
    },
    Err(result) => {
       exists<c: &Committer<Self, $type, _, _>>
            !c.shot_store() && c.ward() == *self &&
            // NOTE: This following line is not present for `weak`
            c.val_load().deep_model() != current.deep_model() &&
            result == c.val_load() &&
            f.postcondition_once((Err(c),), ())
    }
}
Source

pub fn compare_exchange_weak<F, Success: Ordering, Failure: Ordering>( &self, current: u32, new: u32, f: Ghost<F>, ) -> Result<u32, u32>
where F: FnGhost + FnOnce(Result<&mut Committer<Self, u32, Success::Load, Success::Store>, &Committer<Self, u32, Failure, None>>),

Wrapper for std::sync::atomic::AtomicU32::compare_exchange_weak.

The load and the store are always sequentially consistent.

requires

Success::ORDERING != Ordering::SeqCst::ORDERING

requires

Failure::ORDERING == Ordering::Acquire::ORDERING || Failure::ORDERING == Ordering::Relaxed::ORDERING

requires

forall<c: &mut Committer<Self, $type, _, _>> // TODO: [VL] Wrong permission here (Success == Ordering::RelAcq)
    !c.shot_store() ==> c.ward() == *self ==>
    c.val_load().deep_model() == current.deep_model() ==>
    c.val_store() == new ==>
    f.precondition((Ok(c),)) && (f.postcondition_once((Ok(c),), ()) ==> (^c).shot_store())

requires

forall<c: &Committer<Self, $type, _, _>>
    !c.shot_store() ==> c.ward() == *self ==>
    f.precondition((Err(c),))

ensures

match result {
    Ok(result) => {
        exists<c: &mut Committer<Self, $type, _, _>>
            !c.shot_store() && c.ward() == *self &&
            c.val_load().deep_model() == current.deep_model() &&
            c.val_store() == new &&
            result == c.val_load() &&
            f.postcondition_once((Ok(c),), ())
    },
    Err(result) => {
       exists<c: &Committer<Self, $type, _, _>>
            !c.shot_store() && c.ward() == *self &&
            result == c.val_load() &&
            f.postcondition_once((Err(c),), ())
    }
}
Source

pub fn load<F, Load: Ordering>(&self, f: Ghost<F>) -> u32
where F: FnGhost + FnOnce(&Committer<Self, u32, Load, None>),

Wrapper for std::sync::atomic::AtomicU32::load.

requires

Load::ORDERING == Ordering::Acquire::ORDERING || Load::ORDERING == Ordering::Relaxed::ORDERING

requires

forall<c: &Committer<Self, $type, Load, Ordering::None>>
    !c.shot_store() ==> c.ward() == *self ==> f.precondition((c,))

ensures

exists<c: &Committer<Self, $type, Load, Ordering::None>>
    !c.shot_store() && c.ward() == *self && c.val_load() == result && f.postcondition_once((c,), ())
Source

pub fn store<F, Store: Ordering>(&self, val: u32, f: Ghost<F>)
where F: FnGhost + FnOnce(&mut Committer<Self, u32, None, Store>),

Wrapper for std::sync::atomic::AtomicU32::store.

requires

Store::ORDERING == Ordering::Release::ORDERING || Store::ORDERING == Ordering::Relaxed::ORDERING

requires

forall<c: &mut Committer<Self, $type, Ordering::None, Store>>
    !c.shot_store() ==> c.ward() == *self ==> c.val_store() == val ==>
    f.precondition((c,)) && (f.postcondition_once((c,), ()) ==> (^c).shot_store())

ensures

exists<c: &mut Committer<Self, $type, Ordering::None, Store>>
    !c.shot_store() && c.ward() == *self && c.val_store() == val &&
    f.postcondition_once((c,), ())
Source§

impl AtomicU32

Source

pub fn fetch_add<F, Ord: Ordering>(&self, val: u32, f: Ghost<F>) -> u32
where F: FnGhost + FnOnce(&mut Committer<Self, u32, Ord::Load, Ord::Store>),

Wrapper for std::sync::atomic::AtomicU32::fetch_add.

requires

forall<c: &mut Committer<Self, $int_type, Ord::Load, Ord::Store>>
    !c.shot_store() ==> c.ward() == *self ==> c.val_store() == val + c.val_load() ==>
    f.precondition((c,)) && (f.postcondition_once((c,), ()) ==> (^c).shot_store())

ensures

exists<c: &mut Committer<Self, $int_type, Ord::Load, Ord::Store>>
    !c.shot_store() && c.ward() == *self && c.val_store() == val + c.val_load() &&
    c.val_load() == result && f.postcondition_once((c,), ())

Trait Implementations§

Source§

impl HasTimestamp for AtomicU32

Source§

fn get_timestamp(self, _: SyncView) -> Timestamp

logic(opaque)

Source§

fn get_timestamp_monotonic(self, x: SyncView, y: SyncView)

logic(law)

requires

x <= y

ensures

self.get_timestamp(x) <= self.get_timestamp(y)

Source§

impl PermTarget for AtomicU32

Source§

type Value<'a> = FMap<Int, (u32, SyncView)> where Self: 'a

Source§

type PermPayload = ()

Source§

fn is_disjoint( &self, _self_val: Self::Value<'_>, other: &Self, _other_val: Self::Value<'_>, ) -> bool

logic(open, inline) Read more

Auto Trait Implementations§

Blanket Implementations§

Source§

impl<T> Any for T
where T: 'static + ?Sized,

Source§

fn type_id(&self) -> TypeId

Gets the TypeId of self. Read more
Source§

impl<T> Borrow<T> for T
where T: ?Sized,

Source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
Source§

impl<T> BorrowMut<T> for T
where T: ?Sized,

Source§

fn borrow_mut(&mut self) -> &mut T

Mutably borrows from an owned value. Read more
Source§

impl<T> From<T> for T

Source§

fn from(t: T) -> T

Returns the argument unchanged.

Source§

impl<T, U> Into<U> for T
where U: From<T>,

Source§

fn into(self) -> U

Calls U::from(self).

That is, this conversion is whatever the implementation of From<T> for U chooses to do.

Source§

impl<T, U> TryFrom<U> for T
where U: Into<T>,

Source§

type Error = Infallible

The type returned in the event of a conversion error.
Source§

fn try_from(value: U) -> Result<T, <T as TryFrom<U>>::Error>

Performs the conversion.
Source§

impl<T, U> TryInto<U> for T
where U: TryFrom<T>,

Source§

type Error = <U as TryFrom<T>>::Error

The type returned in the event of a conversion error.
Source§

fn try_into(self) -> Result<U, <U as TryFrom<T>>::Error>

Performs the conversion.