Make some linked list operations unsafe, and document their safety at usage

This commit is contained in:
Emil Fresk 2023-02-18 09:43:06 +01:00 committed by Henrik Tjäder
parent 002d0b0d16
commit 1cda61fbda
6 changed files with 66 additions and 40 deletions

View file

@ -54,7 +54,8 @@ impl<T> Arbiter<T> {
pub async fn access(&self) -> ExclusiveAccess<'_, T> {
let mut link_ptr: Option<Link<Waker>> = None;
// Make this future `Drop`-safe, also shadow the original definition so we can't abuse it.
// Make this future `Drop`-safe.
// SAFETY(link_ptr): Shadow the original definition of `link_ptr` so we can't abuse it.
let mut link_ptr = LinkPtr(&mut link_ptr as *mut Option<Link<Waker>>);
let mut link_ptr2 = link_ptr.clone();
@ -89,10 +90,13 @@ impl<T> Arbiter<T> {
// Place the link in the wait queue on first run.
let link_ref = link.insert(Link::new(cx.waker().clone()));
// SAFETY: The address to the link is stable as it is hidden behind
// `link_ptr`, and `link_ptr` shadows the original making it unmovable.
self.wait_queue
.push(unsafe { Pin::new_unchecked(link_ref) });
// SAFETY(new_unchecked): The address to the link is stable as it is defined
// outside this stack frame.
// SAFETY(push): `link_ref` lifetime comes from `link_ptr` that is shadowed,
// and we make sure in `dropper` that the link is removed from the queue
// before dropping `link_ptr` AND `dropper` makes sure that the shadowed
// `link_ptr` lives until the end of the stack frame.
unsafe { self.wait_queue.push(Pin::new_unchecked(link_ref)) };
}
Poll::Pending

View file

@ -240,7 +240,8 @@ impl<'a, T, const N: usize> Sender<'a, T, N> {
pub async fn send(&mut self, val: T) -> Result<(), NoReceiver<T>> {
let mut link_ptr: Option<Link<Waker>> = None;
// Make this future `Drop`-safe, also shadow the original definition so we can't abuse it.
// Make this future `Drop`-safe.
// SAFETY(link_ptr): Shadow the original definition of `link_ptr` so we can't abuse it.
let mut link_ptr = LinkPtr(&mut link_ptr as *mut Option<Link<Waker>>);
let mut link_ptr2 = link_ptr.clone();
@ -276,11 +277,13 @@ impl<'a, T, const N: usize> Sender<'a, T, N> {
// Place the link in the wait queue on first run.
let link_ref = link.insert(Link::new(cx.waker().clone()));
// SAFETY: The address to the link is stable as it is hidden behind
// `link_ptr`, and `link_ptr` shadows the original making it unmovable.
self.0
.wait_queue
.push(unsafe { Pin::new_unchecked(link_ref) });
// SAFETY(new_unchecked): The address to the link is stable as it is defined
// outside this stack frame.
// SAFETY(push): `link_ref` lifetime comes from `link_ptr` that is shadowed,
// and we make sure in `dropper` that the link is removed from the queue
// before dropping `link_ptr` AND `dropper` makes sure that the shadowed
// `link_ptr` lives until the end of the stack frame.
unsafe { self.0.wait_queue.push(Pin::new_unchecked(link_ref)) };
return None;
}

View file

@ -4,6 +4,10 @@
#![deny(missing_docs)]
//deny_warnings_placeholder_for_ci
#[cfg(test)]
#[macro_use]
extern crate std;
pub mod dropper;
pub mod wait_queue;
pub mod waker_registration;

View file

@ -68,7 +68,9 @@ impl<T: Clone> LinkedList<T> {
}
/// Put an element at the back of the queue.
pub fn push(&self, link: Pin<&mut Link<T>>) {
///
/// SAFETY: The link must live until it is removed from the queue.
pub unsafe fn push(&self, link: Pin<&Link<T>>) {
cs::with(|_| {
// Make sure all previous writes are visible
core::sync::atomic::fence(Ordering::SeqCst);
@ -76,17 +78,17 @@ impl<T: Clone> LinkedList<T> {
let tail = self.tail.load(Self::R);
// SAFETY: This datastructure does not move the underlying value.
let link = unsafe { link.get_unchecked_mut() };
let link = link.get_ref();
if let Some(tail_ref) = unsafe { tail.as_ref() } {
// Queue is not empty
link.prev.store(tail, Self::R);
self.tail.store(link, Self::R);
tail_ref.next.store(link, Self::R);
self.tail.store(link as *const _ as *mut _, Self::R);
tail_ref.next.store(link as *const _ as *mut _, Self::R);
} else {
// Queue is empty
self.tail.store(link, Self::R);
self.head.store(link, Self::R);
self.tail.store(link as *const _ as *mut _, Self::R);
self.head.store(link as *const _ as *mut _, Self::R);
}
});
}
@ -126,7 +128,7 @@ impl<T: Clone> Link<T> {
}
/// Remove this link from a linked list.
pub fn remove_from_list(&mut self, list: &LinkedList<T>) {
pub fn remove_from_list(&self, list: &LinkedList<T>) {
cs::with(|_| {
// Make sure all previous writes are visible
core::sync::atomic::fence(Ordering::SeqCst);
@ -230,17 +232,17 @@ mod tests {
fn linked_list() {
let wq = LinkedList::<u32>::new();
let mut i1 = Link::new(10);
let mut i2 = Link::new(11);
let mut i3 = Link::new(12);
let mut i4 = Link::new(13);
let mut i5 = Link::new(14);
let i1 = Link::new(10);
let i2 = Link::new(11);
let i3 = Link::new(12);
let i4 = Link::new(13);
let i5 = Link::new(14);
wq.push(unsafe { Pin::new_unchecked(&mut i1) });
wq.push(unsafe { Pin::new_unchecked(&mut i2) });
wq.push(unsafe { Pin::new_unchecked(&mut i3) });
wq.push(unsafe { Pin::new_unchecked(&mut i4) });
wq.push(unsafe { Pin::new_unchecked(&mut i5) });
unsafe { wq.push(Pin::new_unchecked(&i1)) };
unsafe { wq.push(Pin::new_unchecked(&i2)) };
unsafe { wq.push(Pin::new_unchecked(&i3)) };
unsafe { wq.push(Pin::new_unchecked(&i4)) };
unsafe { wq.push(Pin::new_unchecked(&i5)) };
wq.print();

View file

@ -208,7 +208,8 @@ impl<Mono: Monotonic> TimerQueue<Mono> {
let mut link_ptr: Option<linked_list::Link<WaitingWaker<Mono>>> = None;
// Make this future `Drop`-safe, also shadow the original definition so we can't abuse it.
// Make this future `Drop`-safe
// SAFETY(link_ptr): Shadow the original definition of `link_ptr` so we can't abuse it.
let mut link_ptr =
LinkPtr(&mut link_ptr as *mut Option<linked_list::Link<WaitingWaker<Mono>>>);
let mut link_ptr2 = link_ptr.clone();
@ -226,18 +227,24 @@ impl<Mono: Monotonic> TimerQueue<Mono> {
}
// SAFETY: This pointer is only dereferenced here and on drop of the future
// which happens outside this `poll_fn`'s stack frame.
// which happens outside this `poll_fn`'s stack frame, so this mutable access cannot
// happen at the same time as `dropper` runs.
let link = unsafe { link_ptr2.get() };
if link.is_none() {
let mut link_ref = link.insert(Link::new(WaitingWaker {
let link_ref = link.insert(Link::new(WaitingWaker {
waker: cx.waker().clone(),
release_at: instant,
was_poped: AtomicBool::new(false),
}));
// SAFETY: The address to the link is stable as it is defined outside this stack
// frame.
let (was_empty, addr) = queue.insert(unsafe { Pin::new_unchecked(&mut link_ref) });
// SAFETY(new_unchecked): The address to the link is stable as it is defined
//outside this stack frame.
// SAFETY(insert): `link_ref` lifetime comes from `link_ptr` that is shadowed, and
// we make sure in `dropper` that the link is removed from the queue before
// dropping `link_ptr` AND `dropper` makes sure that the shadowed `link_ptr` lives
// until the end of the stack frame.
let (was_empty, addr) = unsafe { queue.insert(Pin::new_unchecked(&link_ref)) };
marker.store(addr, Ordering::Relaxed);
if was_empty {

View file

@ -93,10 +93,12 @@ impl<T: PartialOrd + Clone> LinkedList<T> {
/// Insert a new link into the linked list.
/// The return is (was_empty, address), where the address of the link is for use with `delete`.
pub fn insert(&self, val: Pin<&mut Link<T>>) -> (bool, usize) {
///
/// SAFETY: The pinned link must live until it is removed from this list.
pub unsafe fn insert(&self, val: Pin<&Link<T>>) -> (bool, usize) {
cs::with(|_| {
// SAFETY: This datastructure does not move the underlying value.
let val = unsafe { val.get_unchecked_mut() };
let val = val.get_ref();
let addr = val as *const _ as usize;
// Make sure all previous writes are visible
@ -111,7 +113,8 @@ impl<T: PartialOrd + Clone> LinkedList<T> {
let head_ref = if let Some(head_ref) = unsafe { head.as_ref() } {
head_ref
} else {
self.head.store(val, Ordering::Relaxed);
self.head
.store(val as *const _ as *mut _, Ordering::Relaxed);
return (true, addr);
};
@ -121,7 +124,8 @@ impl<T: PartialOrd + Clone> LinkedList<T> {
val.next.store(head, Ordering::Relaxed);
// `val` is now first in the queue
self.head.store(val, Ordering::Relaxed);
self.head
.store(val as *const _ as *mut _, Ordering::Relaxed);
return (false, addr);
}
@ -139,7 +143,8 @@ impl<T: PartialOrd + Clone> LinkedList<T> {
val.next.store(next, Ordering::Relaxed);
// Insert `val`
curr.next.store(val, Ordering::Relaxed);
curr.next
.store(val as *const _ as *mut _, Ordering::Relaxed);
return (false, addr);
}
@ -150,7 +155,8 @@ impl<T: PartialOrd + Clone> LinkedList<T> {
}
// No next, write link to last position in list
curr.next.store(val, Ordering::Relaxed);
curr.next
.store(val as *const _ as *mut _, Ordering::Relaxed);
(false, addr)
})