[PATCH v3 7/9] rust: workqueue: implement `WorkItemPointer` for pointer types
From: Alice Ryhl
Date: Tue Jul 11 2023 - 05:49:40 EST
This implements the `WorkItemPointer` trait for the pointer types that
you are likely to use the workqueue with. The `Arc` type is for
reference counted objects, and the `Pin<Box<T>>` type is for objects
where the caller has exclusive ownership of the object.
Co-developed-by: Gary Guo <gary@xxxxxxxxxxx>
Signed-off-by: Gary Guo <gary@xxxxxxxxxxx>
Signed-off-by: Alice Ryhl <aliceryhl@xxxxxxxxxx>
Reviewed-by: Martin Rodriguez Reboredo <yakoyoku@xxxxxxxxx>
Reviewed-by: Andreas Hindborg (Samsung) <nmi@xxxxxxxxxxxx>
Reviewed-by: Benno Lossin <benno.lossin@xxxxxxxxx>
---
v2 -> v3:
* Use `cast_mut` to cast pointer in `impl RawWorkItem for Arc`
* Add Reviewed-by from Martin, Andreas, Benno.
rust/kernel/workqueue.rs | 97 +++++++++++++++++++++++++++++++++++++++-
1 file changed, 96 insertions(+), 1 deletion(-)
diff --git a/rust/kernel/workqueue.rs b/rust/kernel/workqueue.rs
index a7c972bda0ed..e760040762bd 100644
--- a/rust/kernel/workqueue.rs
+++ b/rust/kernel/workqueue.rs
@@ -28,8 +28,10 @@
//!
//! C header: [`include/linux/workqueue.h`](../../../../include/linux/workqueue.h)
-use crate::{bindings, prelude::*, sync::LockClassKey, types::Opaque};
+use crate::{bindings, prelude::*, sync::Arc, sync::LockClassKey, types::Opaque};
+use alloc::boxed::Box;
use core::marker::PhantomData;
+use core::pin::Pin;
/// A kernel work queue.
///
@@ -345,6 +347,99 @@ unsafe fn raw_get_work(ptr: *mut Self) -> *mut $crate::workqueue::Work<$work_typ
)*};
}
+unsafe impl<T, const ID: u64> WorkItemPointer<ID> for Arc<T>
+where
+ T: WorkItem<ID, Pointer = Self>,
+ T: HasWork<T, ID>,
+{
+ unsafe extern "C" fn run(ptr: *mut bindings::work_struct) {
+ // SAFETY: The `__enqueue` method always uses a `work_struct` stored in a `Work<T, ID>`.
+ let ptr = ptr as *mut Work<T, ID>;
+ // SAFETY: This computes the pointer that `__enqueue` got from `Arc::into_raw`.
+ let ptr = unsafe { T::work_container_of(ptr) };
+ // SAFETY: This pointer comes from `Arc::into_raw` and we've been given back ownership.
+ let arc = unsafe { Arc::from_raw(ptr) };
+
+ T::run(arc)
+ }
+}
+
+unsafe impl<T, const ID: u64> RawWorkItem<ID> for Arc<T>
+where
+ T: WorkItem<ID, Pointer = Self>,
+ T: HasWork<T, ID>,
+{
+ type EnqueueOutput = Result<(), Self>;
+
+ unsafe fn __enqueue<F>(self, queue_work_on: F) -> Self::EnqueueOutput
+ where
+ F: FnOnce(*mut bindings::work_struct) -> bool,
+ {
+ // Casting between const and mut is not a problem as long as the pointer is a raw pointer.
+ let ptr = Arc::into_raw(self).cast_mut();
+
+ // SAFETY: Pointers into an `Arc` point at a valid value.
+ let work_ptr = unsafe { T::raw_get_work(ptr) };
+ // SAFETY: `raw_get_work` returns a pointer to a valid value.
+ let work_ptr = unsafe { Work::raw_get(work_ptr) };
+
+ if queue_work_on(work_ptr) {
+ Ok(())
+ } else {
+ // SAFETY: The work queue has not taken ownership of the pointer.
+ Err(unsafe { Arc::from_raw(ptr) })
+ }
+ }
+}
+
+unsafe impl<T, const ID: u64> WorkItemPointer<ID> for Pin<Box<T>>
+where
+ T: WorkItem<ID, Pointer = Self>,
+ T: HasWork<T, ID>,
+{
+ unsafe extern "C" fn run(ptr: *mut bindings::work_struct) {
+ // SAFETY: The `__enqueue` method always uses a `work_struct` stored in a `Work<T, ID>`.
+ let ptr = ptr as *mut Work<T, ID>;
+ // SAFETY: This computes the pointer that `__enqueue` got from `Arc::into_raw`.
+ let ptr = unsafe { T::work_container_of(ptr) };
+ // SAFETY: This pointer comes from `Arc::into_raw` and we've been given back ownership.
+ let boxed = unsafe { Box::from_raw(ptr) };
+ // SAFETY: The box was already pinned when it was enqueued.
+ let pinned = unsafe { Pin::new_unchecked(boxed) };
+
+ T::run(pinned)
+ }
+}
+
+unsafe impl<T, const ID: u64> RawWorkItem<ID> for Pin<Box<T>>
+where
+ T: WorkItem<ID, Pointer = Self>,
+ T: HasWork<T, ID>,
+{
+ type EnqueueOutput = ();
+
+ unsafe fn __enqueue<F>(self, queue_work_on: F) -> Self::EnqueueOutput
+ where
+ F: FnOnce(*mut bindings::work_struct) -> bool,
+ {
+ // SAFETY: We're not going to move `self` or any of its fields, so its okay to temporarily
+ // remove the `Pin` wrapper.
+ let boxed = unsafe { Pin::into_inner_unchecked(self) };
+ let ptr = Box::into_raw(boxed);
+
+ // SAFETY: Pointers into a `Box` point at a valid value.
+ let work_ptr = unsafe { T::raw_get_work(ptr) };
+ // SAFETY: `raw_get_work` returns a pointer to a valid value.
+ let work_ptr = unsafe { Work::raw_get(work_ptr) };
+
+ if !queue_work_on(work_ptr) {
+ // SAFETY: This method requires exclusive ownership of the box, so it cannot be in a
+ // workqueue.
+ unsafe { ::core::hint::unreachable_unchecked() }
+ }
+ }
+}
+
/// Returns the system work queue (`system_wq`).
///
/// It is the one used by `schedule[_delayed]_work[_on]()`. Multi-CPU multi-threaded. There are
--
2.41.0.255.g8b1d071c50-goog