Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[Sub-issue of #378] Add PinnedKernel, and use Pin in locks, MruArena, and MruEntry #394

Merged
14 commits merged into from
Feb 4, 2021
Merged
56 changes: 56 additions & 0 deletions kernel-rs/Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions kernel-rs/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@ spin = { version = "0.7.1", default-features = false }
array-macro = "2.0.0"
static_assertions = "1.1.0"
itertools = { version = "0.10.0", default-features = false }
pin-project = "1"

# Compiler options for sysroot packages.
# Cargo currently warns following packages are not dependencies.
Expand Down
42 changes: 28 additions & 14 deletions kernel-rs/src/arena.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,9 @@ use crate::spinlock::{Spinlock, SpinlockGuard};
use core::marker::PhantomData;
use core::mem::{self, ManuallyDrop};
use core::ops::Deref;
use core::pin::Pin;
use core::ptr::{self, NonNull};
use pin_project::pin_project;

/// A homogeneous memory allocator, equipped with the box type representing an allocation.
pub trait Arena: Sized {
Expand Down Expand Up @@ -96,16 +98,20 @@ impl<'s, T> ArrayPtr<'s, T> {
}
}

#[pin_project]
#[repr(C)]
pub struct MruEntry<T> {
#[pin]
list_entry: ListEntry,
refcnt: usize,
data: T,
}

/// A homogeneous memory allocator equipped with reference counts.
#[pin_project]
pub struct MruArena<T, const CAPACITY: usize> {
entries: [MruEntry<T>; CAPACITY],
#[pin]
head: ListEntry,
}

travis1829 marked this conversation as resolved.
Show resolved Hide resolved
Expand Down Expand Up @@ -155,7 +161,9 @@ impl<T> Drop for ArrayPtr<'_, T> {
}
}

impl<T: 'static + ArenaObject, const CAPACITY: usize> Arena for Spinlock<ArrayArena<T, CAPACITY>> {
impl<T: 'static + ArenaObject + Unpin, const CAPACITY: usize> Arena
for Spinlock<ArrayArena<T, CAPACITY>>
{
type Data = T;
type Handle<'s> = ArrayPtr<'s, T>;
type Guard<'s> = SpinlockGuard<'s, ArrayArena<T, CAPACITY>>;
Expand Down Expand Up @@ -253,7 +261,7 @@ impl<T> MruEntry<T> {
Self {
refcnt: 0,
data,
list_entry: ListEntry::new(),
list_entry: unsafe { ListEntry::new() },
}
}
}
Expand All @@ -263,15 +271,21 @@ impl<T, const CAPACITY: usize> MruArena<T, CAPACITY> {
pub const fn new(entries: [MruEntry<T>; CAPACITY]) -> Self {
Self {
entries,
head: ListEntry::new(),
head: unsafe { ListEntry::new() },
}
}

pub fn init(&mut self) {
self.head.init();
pub fn init(self: Pin<&mut Self>) {
let mut this = self.project();

for entry in &mut self.entries {
self.head.prepend(&mut entry.list_entry);
this.head.as_mut().init();
// TODO: Pin of array's element?
travis1829 marked this conversation as resolved.
Show resolved Hide resolved
for entry in this.entries {
unsafe {
// Safe since `MruEntry` is accessed only through `Pin`.
travis1829 marked this conversation as resolved.
Show resolved Hide resolved
let entry = Pin::new_unchecked(&mut entry.list_entry);
this.head.as_mut().prepend(entry);
}
}
}
}
Expand Down Expand Up @@ -388,16 +402,16 @@ impl<T: 'static + ArenaObject, const CAPACITY: usize> Arena for Spinlock<MruAren
unsafe fn dealloc(&self, mut handle: Self::Handle<'_>) {
let mut this = self.lock();

let entry = unsafe { handle.ptr.as_mut() };
if entry.refcnt == 1 {
// Safe since `MruEntry` is accessed only through `Pin`.
let mut entry = unsafe { Pin::new_unchecked(handle.ptr.as_mut()).project() };
if *entry.refcnt == 1 {
entry.data.finalize::<Self>(&mut this);
}
*entry.refcnt -= 1;

entry.refcnt -= 1;

if entry.refcnt == 0 {
entry.list_entry.remove();
this.head.prepend(&mut entry.list_entry);
if *entry.refcnt == 0 {
entry.list_entry.as_mut().remove();
this.get_pin().project().head.prepend(entry.list_entry);
}

mem::forget(handle);
Expand Down
2 changes: 1 addition & 1 deletion kernel-rs/src/kernel.rs
Original file line number Diff line number Diff line change
Expand Up @@ -204,7 +204,7 @@ pub unsafe fn kernel_main() -> ! {
unsafe { plicinithart() };

// Buffer cache.
unsafe { KERNEL.bcache.get_mut().init() };
unsafe { KERNEL.bcache.get_pin().init() };

// Emulated hard disk.
unsafe { KERNEL.file_system.disk.get_mut().init() };
Expand Down
63 changes: 45 additions & 18 deletions kernel-rs/src/list.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,24 +2,44 @@
//! `ListEntry` types must be first initialized with init()
//! before calling its member functions.

use core::marker::PhantomPinned;
use core::pin::Pin;
use core::ptr;

pub struct ListEntry {
travis1829 marked this conversation as resolved.
Show resolved Hide resolved
next: *mut ListEntry,
prev: *mut ListEntry,
// Add `PhantomPinned` to mark this struct as `!Unpin`, since `ListEntry`s must not move.
travis1829 marked this conversation as resolved.
Show resolved Hide resolved
_marker: PhantomPinned,
}

/// A list entry for doubly, circular, intrusive linked lists.
///
/// # Safety
///
/// All `ListEntry` types must be used only after initializing it with `ListEntry::init()`,
/// or after appending/prepending it to another initialized `ListEntry`.
/// After this, `ListEntry::{prev, next}` always refer to a valid, initialized `ListEntry`.
impl ListEntry {
pub const fn new() -> Self {
/// Returns an uninitialized `ListEntry`,
///
/// # Safety
///
/// All `ListEntry` types must be used only after initializing it with `ListEntry::init()`,
/// or after appending/prepending it to another initialized `ListEntry`.
pub const unsafe fn new() -> Self {
Self {
prev: ptr::null_mut(),
next: ptr::null_mut(),
_marker: PhantomPinned,
}
}

pub fn init(&mut self) {
self.next = self;
self.prev = self;
pub fn init(self: Pin<&mut Self>) {
// Safe since we don't move the inner data and don't leak the mutable reference.
let this = unsafe { self.get_unchecked_mut() };
this.next = this;
this.prev = this;
}

pub fn prev(&self) -> &Self {
Expand All @@ -31,42 +51,49 @@ impl ListEntry {
}

/// `e` <-> `this`
pub fn append(&mut self, e: &mut ListEntry) {
e.next = self;
e.prev = self.prev;
pub fn append(self: Pin<&mut Self>, e: Pin<&mut Self>) {
// Safe since we don't move the inner data and don't leak the mutable reference.
let this = unsafe { self.get_unchecked_mut() };
let elem = unsafe { e.get_unchecked_mut() };

elem.next = this;
elem.prev = this.prev;
unsafe {
(*e.next).prev = e;
(*e.prev).next = e;
(*elem.next).prev = elem;
(*elem.prev).next = elem;
}
}

/// `this` <-> `e`
pub fn prepend(&mut self, e: &mut ListEntry) {
e.next = self.next;
e.prev = self;
pub fn prepend(self: Pin<&mut Self>, e: Pin<&mut Self>) {
// Safe since we don't move the inner data and don't leak the mutable reference.
let this = unsafe { self.get_unchecked_mut() };
let elem = unsafe { e.get_unchecked_mut() };

elem.next = this.next;
elem.prev = this;
unsafe {
(*e.next).prev = e;
(*e.prev).next = e;
(*elem.next).prev = elem;
(*elem.prev).next = elem;
}
}

pub fn is_empty(&self) -> bool {
ptr::eq(self.next, self)
}

pub fn remove(&mut self) {
pub fn remove(self: Pin<&mut Self>) {
unsafe {
(*self.prev).next = self.next;
(*self.next).prev = self.prev;
}
self.init();
}

pub fn list_pop_front(&self) -> &ListEntry {
let result = unsafe { &mut *self.next };
result.remove();
pub fn list_pop_front(self: Pin<&mut Self>) -> Pin<&mut Self> {
// Safe since we don't move the inner data and don't leak the mutable reference.
let mut result = unsafe { Pin::new_unchecked(&mut *self.next) };
result.as_mut().remove();
result
}
}
1 change: 1 addition & 0 deletions kernel-rs/src/proc.rs
Original file line number Diff line number Diff line change
Expand Up @@ -453,6 +453,7 @@ impl Deref for ProcGuard {
}
}

// TODO(travis1829): Change &mut Target -> Pin<&mut Target>
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

이 PR에서 되어야하지 않을까 생각합니다.

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

현재 손서윤님께서 하고계신 PR이랑 겹치는 내용이 많을것 같아 보류하였습니다. 특히, PR #385 가 끝나고 나면 Proc의 내부를 접근하는 방법이 조금 바뀔 것 같아서 일단은 TODO로만 표시했습니다.

impl DerefMut for ProcGuard {
fn deref_mut(&mut self) -> &mut Self::Target {
unsafe { &mut *(self.ptr as *mut _) }
Expand Down
29 changes: 24 additions & 5 deletions kernel-rs/src/sleepablelock.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ use crate::spinlock::RawSpinlock;
use core::cell::UnsafeCell;
use core::marker::PhantomData;
use core::ops::{Deref, DerefMut};
use core::pin::Pin;

pub struct SleepablelockGuard<'s, T> {
lock: &'s Sleepablelock<T>,
Expand Down Expand Up @@ -32,10 +33,6 @@ impl<T> Sleepablelock<T> {
}
}

pub fn into_inner(self) -> T {
self.data.into_inner()
}

pub fn lock(&self) -> SleepablelockGuard<'_, T> {
self.lock.acquire();

Expand All @@ -45,6 +42,19 @@ impl<T> Sleepablelock<T> {
}
}

/// Returns a pinned mutable reference to the inner data.
pub fn get_pin(&mut self) -> Pin<&mut T> {
travis1829 marked this conversation as resolved.
Show resolved Hide resolved
// Safe since for `T: !Unpin`, we only provide pinned references and don't move `T`.
unsafe { Pin::new_unchecked(&mut *self.data.get()) }
}
}

impl<T: Unpin> Sleepablelock<T> {
pub fn into_inner(self) -> T {
self.data.into_inner()
}

/// Returns a mutable reference to the inner data.
/// # Safety
///
/// `self` must not be shared by other threads. Use this function only in the middle of
Expand All @@ -54,6 +64,7 @@ impl<T> Sleepablelock<T> {
unsafe { &mut *self.data.get() }
}

/// Returns a mutable reference to the inner data.
pub fn get_mut(&mut self) -> &mut T {
unsafe { &mut *self.data.get() }
}
Expand All @@ -67,6 +78,12 @@ impl<T> SleepablelockGuard<'_, T> {
pub fn wakeup(&self) {
self.lock.waitchannel.wakeup();
}

/// Returns a pinned mutable reference to the inner data.
pub fn get_pin(&mut self) -> Pin<&mut T> {
// Safe since for `T: !Unpin`, we only provide pinned references and don't move `T`.
unsafe { Pin::new_unchecked(&mut *self.lock.data.get()) }
}
}

impl<T> Waitable for SleepablelockGuard<'_, T> {
Expand All @@ -91,7 +108,9 @@ impl<T> Deref for SleepablelockGuard<'_, T> {
}
}

impl<T> DerefMut for SleepablelockGuard<'_, T> {
// We can mutably dereference the guard only when `T: Unpin`.
// If `T: !Unpin`, use `SleeplockGuard::get_pin()` instead.
impl<T: Unpin> DerefMut for SleepablelockGuard<'_, T> {
fn deref_mut(&mut self) -> &mut Self::Target {
unsafe { &mut *self.lock.data.get() }
}
Expand Down
Loading