From 3ec99966393ea274c958551a06335e828cbe0780 Mon Sep 17 00:00:00 2001 From: ptitSeb Date: Tue, 6 Sep 2022 10:08:19 +0200 Subject: [PATCH] Added helper functions for WaitNotify opcodes (for #3155) --- lib/types/src/libcalls.rs | 24 +++++ lib/types/src/vmoffsets.rs | 26 ++++- lib/vm/src/instance/mod.rs | 205 ++++++++++++++++++++++++++++++++++++- lib/vm/src/libcalls.rs | 151 +++++++++++++++++++++++++++ lib/vm/src/vmcontext.rs | 70 +++++++++++++ 5 files changed, 470 insertions(+), 6 deletions(-) diff --git a/lib/types/src/libcalls.rs b/lib/types/src/libcalls.rs index f794eab5a0f..e58d52ff817 100644 --- a/lib/types/src/libcalls.rs +++ b/lib/types/src/libcalls.rs @@ -115,6 +115,24 @@ pub enum LibCall { /// probe for stack overflow. These are emitted for functions which need /// when the `enable_probestack` setting is true. Probestack, + + /// memory.atomic.wait32 for local memories + Memory32AtomicWait32, + + /// memory.atomic.wait32 for imported memories + ImportedMemory32AtomicWait32, + + /// memory.atomic.wait64 for local memories + Memory32AtomicWait64, + + /// memory.atomic.wait64 for imported memories + ImportedMemory32AtomicWait64, + + /// memory.atomic.notify for local memories + Memory32AtomicNotify, + + /// memory.atomic.botify for imported memories + ImportedMemory32AtomicNotify, } impl LibCall { @@ -157,6 +175,12 @@ impl LibCall { Self::Probestack => "_wasmer_vm_probestack", #[cfg(not(target_vendor = "apple"))] Self::Probestack => "wasmer_vm_probestack", + Self::Memory32AtomicWait32 => "wasmer_vm_memory32_atomic_wait32", + Self::ImportedMemory32AtomicWait32 => "wasmer_vm_imported_memory32_atomic_wait32", + Self::Memory32AtomicWait64 => "wasmer_vm_memory32_atomic_wait64", + Self::ImportedMemory32AtomicWait64 => "wasmer_vm_imported_memory32_atomic_wait64", + Self::Memory32AtomicNotify => "wasmer_vm_memory32_atomic_notify", + Self::ImportedMemory32AtomicNotify => "wasmer_vm_imported_memory32_atomic_notify", } } } diff --git a/lib/types/src/vmoffsets.rs b/lib/types/src/vmoffsets.rs index 81cd0b0422c..5be5ee2ddfd 100644 --- a/lib/types/src/vmoffsets.rs +++ b/lib/types/src/vmoffsets.rs @@ -115,9 +115,33 @@ impl VMBuiltinFunctionIndex { pub const fn get_table_fill_index() -> Self { Self(23) } + /// Returns an index for wasm's local `memory.atomic.wait32` builtin function. + pub const fn get_memory_atomic_wait32_index() -> Self { + Self(24) + } + /// Returns an index for wasm's imported `memory.atomic.wait32` builtin function. + pub const fn get_imported_memory_atomic_wait32_index() -> Self { + Self(25) + } + /// Returns an index for wasm's local `memory.atomic.wait64` builtin function. + pub const fn get_memory_atomic_wait64_index() -> Self { + Self(26) + } + /// Returns an index for wasm's imported `memory.atomic.wait64` builtin function. + pub const fn get_imported_memory_atomic_wait64_index() -> Self { + Self(27) + } + /// Returns an index for wasm's local `memory.atomic.notify` builtin function. + pub const fn get_memory_atomic_notify_index() -> Self { + Self(28) + } + /// Returns an index for wasm's imported `memory.atomic.notify` builtin function. + pub const fn get_imported_memory_atomic_notify_index() -> Self { + Self(29) + } /// Returns the total number of builtin functions. pub const fn builtin_functions_total_number() -> u32 { - 24 + 30 } /// Return the index as an u32 number. diff --git a/lib/vm/src/instance/mod.rs b/lib/vm/src/instance/mod.rs index 60f9f1ea80e..ea6433a3c1e 100644 --- a/lib/vm/src/instance/mod.rs +++ b/lib/vm/src/instance/mod.rs @@ -14,10 +14,10 @@ use crate::store::{InternalStoreHandle, StoreObjects}; use crate::table::TableElement; use crate::trap::{catch_traps, Trap, TrapCode}; use crate::vmcontext::{ - memory_copy, memory_fill, VMBuiltinFunctionsArray, VMCallerCheckedAnyfunc, VMContext, - VMFunctionContext, VMFunctionImport, VMFunctionKind, VMGlobalDefinition, VMGlobalImport, - VMMemoryDefinition, VMMemoryImport, VMSharedSignatureIndex, VMTableDefinition, VMTableImport, - VMTrampoline, + memory32_atomic_check32, memory32_atomic_check64, memory_copy, memory_fill, + VMBuiltinFunctionsArray, VMCallerCheckedAnyfunc, VMContext, VMFunctionContext, + VMFunctionImport, VMFunctionKind, VMGlobalDefinition, VMGlobalImport, VMMemoryDefinition, + VMMemoryImport, VMSharedSignatureIndex, VMTableDefinition, VMTableImport, VMTrampoline, }; use crate::LinearMemory; use crate::{FunctionBodyPtr, MaybeInstanceOwned, TrapHandlerFn, VMFunctionBody}; @@ -33,7 +33,8 @@ use std::fmt; use std::mem; use std::ptr::{self, NonNull}; use std::slice; -use std::sync::Arc; +use std::sync::{Arc, Mutex}; +use std::thread::{current, park, park_timeout, Thread}; use wasmer_types::entity::{packed_option::ReservedValue, BoxedSlice, EntityRef, PrimaryMap}; use wasmer_types::{ DataIndex, DataInitializer, ElemIndex, ExportIndex, FunctionIndex, GlobalIndex, GlobalInit, @@ -48,6 +49,7 @@ use wasmer_types::{ /// to ensure that the `vmctx` field is last. See the documentation of /// the `vmctx` field to learn more. #[repr(C)] +#[allow(clippy::type_complexity)] pub(crate) struct Instance { /// The `ModuleInfo` this `Instance` was instantiated from. module: Arc, @@ -89,6 +91,9 @@ pub(crate) struct Instance { /// will point to elements here for functions imported by this instance. imported_funcrefs: BoxedSlice>, + /// The Hasmap with the Notify for the Notify/wait opcodes + conditions: Arc>>>, + /// Additional context used by compiled WebAssembly code. This /// field is last, and represents a dynamically-sized array that /// extends beyond the nominal end of the struct (similar to a @@ -768,6 +773,195 @@ impl Instance { self.imported_table(table_index).handle } } + + // To implement Wait / Notify, a HasMap, behind a mutex, will be used + // to track the address of waiter. The key of the hashmap is based on the memory + // and waiter threads are "park"'d (with or without timeout) + // Notify will wake the waiters by simply "unpark" the thread + // as the Thread info is stored on the HashMap + // once unparked, the waiter thread will remove it's mark on the HashMap + // timeout / awake is tracked with a boolean in the HashMap + // because `park_timeout` doesn't gives any information on why it returns + fn do_wait(&mut self, index: u32, dst: u32, timeout: i64) -> u32 { + // fetch the notifier + let key = (index, dst); + let mut conds = self.conditions.lock().unwrap(); + conds.entry(key).or_insert_with(Vec::new); + let v = conds.get_mut(&key).unwrap(); + v.push((current(), false)); + drop(conds); + if timeout < 0 { + park(); + } else { + park_timeout(std::time::Duration::from_nanos(timeout as u64)); + } + let mut conds = self.conditions.lock().unwrap(); + let v = conds.get_mut(&key).unwrap(); + let id = current().id(); + let mut ret = 0; + v.retain(|cond| { + if cond.0.id() == id { + ret = if cond.1 { 0 } else { 2 }; + false + } else { + true + } + }); + if v.is_empty() { + conds.remove(&key); + } + ret + } + + /// Perform an Atomic.Wait32 + pub(crate) fn local_memory_wait32( + &mut self, + memory_index: LocalMemoryIndex, + dst: u32, + val: u32, + timeout: i64, + ) -> Result { + let memory = self.memory(memory_index); + //if ! memory.shared { + // We should trap according to spec, but official test rely on not trapping... + //} + + let ret = unsafe { memory32_atomic_check32(&memory, dst, val) }; + + if let Ok(mut ret) = ret { + if ret == 0 { + ret = self.do_wait(memory_index.as_u32(), dst, timeout); + } + Ok(ret) + } else { + ret + } + } + + /// Perform an Atomic.Wait32 + pub(crate) fn imported_memory_wait32( + &mut self, + memory_index: MemoryIndex, + dst: u32, + val: u32, + timeout: i64, + ) -> Result { + let import = self.imported_memory(memory_index); + let memory = unsafe { import.definition.as_ref() }; + //if ! memory.shared { + // We should trap according to spec, but official test rely on not trapping... + //} + + let ret = unsafe { memory32_atomic_check32(memory, dst, val) }; + + if let Ok(mut ret) = ret { + if ret == 0 { + ret = self.do_wait(memory_index.as_u32(), dst, timeout); + } + Ok(ret) + } else { + ret + } + } + + /// Perform an Atomic.Wait64 + pub(crate) fn local_memory_wait64( + &mut self, + memory_index: LocalMemoryIndex, + dst: u32, + val: u64, + timeout: i64, + ) -> Result { + let memory = self.memory(memory_index); + //if ! memory.shared { + // We should trap according to spec, but official test rely on not trapping... + //} + + let ret = unsafe { memory32_atomic_check64(&memory, dst, val) }; + + if let Ok(mut ret) = ret { + if ret == 0 { + ret = self.do_wait(memory_index.as_u32(), dst, timeout); + } + Ok(ret) + } else { + ret + } + } + + /// Perform an Atomic.Wait64 + pub(crate) fn imported_memory_wait64( + &mut self, + memory_index: MemoryIndex, + dst: u32, + val: u64, + timeout: i64, + ) -> Result { + let import = self.imported_memory(memory_index); + let memory = unsafe { import.definition.as_ref() }; + //if ! memory.shared { + // We should trap according to spec, but official test rely on not trapping... + //} + + let ret = unsafe { memory32_atomic_check64(memory, dst, val) }; + + if let Ok(mut ret) = ret { + if ret == 0 { + ret = self.do_wait(memory_index.as_u32(), dst, timeout); + } + Ok(ret) + } else { + ret + } + } + + /// Perform an Atomic.Notify + pub(crate) fn local_memory_notify( + &mut self, + memory_index: LocalMemoryIndex, + dst: u32, + ) -> Result<(), Trap> { + //let memory = self.memory(memory_index); + //if ! memory.shared { + // We should trap according to spec, but official test rely on not trapping... + //} + + // fetch the notifier + let key = (memory_index.as_u32(), dst); + let mut conds = self.conditions.lock().unwrap(); + if conds.contains_key(&key) { + let v = conds.get_mut(&key).unwrap(); + for (t, b) in v { + *b = true; // mark as was waiked up + t.unpark(); // wakeup! + } + } + Ok(()) + } + /// Perform an Atomic.Notify + pub(crate) fn imported_memory_notify( + &mut self, + memory_index: MemoryIndex, + dst: u32, + ) -> Result<(), Trap> { + //let import = self.imported_memory(memory_index); + //let memory = unsafe { import.definition.as_ref() }; + //if ! memory.shared { + // We should trap according to spec, but official test rely on not trapping... + //} + + // fetch the notifier + let key = (memory_index.as_u32(), dst); + let mut conds = self.conditions.lock().unwrap(); + if conds.contains_key(&key) { + let v = conds.get_mut(&key).unwrap(); + for (t, b) in v { + *b = true; // mark as was waiked up + t.unpark(); // wakeup! + } + } + Ok(()) + } } /// A handle holding an `Instance` of a WebAssembly module. @@ -860,6 +1054,7 @@ impl InstanceHandle { funcrefs, imported_funcrefs, vmctx: VMContext {}, + conditions: Arc::new(Mutex::new(HashMap::new())), }; let mut instance_handle = allocator.write_instance(instance); diff --git a/lib/vm/src/libcalls.rs b/lib/vm/src/libcalls.rs index 9274237f167..a3f0107859c 100644 --- a/lib/vm/src/libcalls.rs +++ b/lib/vm/src/libcalls.rs @@ -667,6 +667,151 @@ pub unsafe extern "C" fn wasmer_vm_raise_trap(trap_code: TrapCode) -> ! { #[no_mangle] pub static wasmer_vm_probestack: unsafe extern "C" fn() = PROBESTACK; +/// Implementation of memory.wait32 for locally-defined 32-bit memories. +/// +/// # Safety +/// +/// `vmctx` must be dereferenceable. +#[no_mangle] +pub unsafe extern "C" fn wasmer_vm_memory32_atomic_wait32( + vmctx: *mut VMContext, + memory_index: u32, + dst: u32, + val: u32, + timeout: i64, +) -> u32 { + let result = { + let instance = (*vmctx).instance_mut(); + let memory_index = LocalMemoryIndex::from_u32(memory_index); + + instance.local_memory_wait32(memory_index, dst, val, timeout) + }; + if let Err(trap) = result { + raise_lib_trap(trap); + } + result.unwrap() +} + +/// Implementation of memory.wait32 for imported 32-bit memories. +/// +/// # Safety +/// +/// `vmctx` must be dereferenceable. +#[no_mangle] +pub unsafe extern "C" fn wasmer_vm_imported_memory32_atomic_wait32( + vmctx: *mut VMContext, + memory_index: u32, + dst: u32, + val: u32, + timeout: i64, +) -> u32 { + let result = { + let instance = (*vmctx).instance_mut(); + let memory_index = MemoryIndex::from_u32(memory_index); + + instance.imported_memory_wait32(memory_index, dst, val, timeout) + }; + if let Err(trap) = result { + raise_lib_trap(trap); + } + result.unwrap() +} + +/// Implementation of memory.wait64 for locally-defined 32-bit memories. +/// +/// # Safety +/// +/// `vmctx` must be dereferenceable. +#[no_mangle] +pub unsafe extern "C" fn wasmer_vm_memory32_atomic_wait64( + vmctx: *mut VMContext, + memory_index: u32, + dst: u32, + val: u64, + timeout: i64, +) -> u32 { + let result = { + let instance = (*vmctx).instance_mut(); + let memory_index = LocalMemoryIndex::from_u32(memory_index); + + instance.local_memory_wait64(memory_index, dst, val, timeout) + }; + if let Err(trap) = result { + raise_lib_trap(trap); + } + result.unwrap() +} + +/// Implementation of memory.wait64 for imported 32-bit memories. +/// +/// # Safety +/// +/// `vmctx` must be dereferenceable. +#[no_mangle] +pub unsafe extern "C" fn wasmer_vm_imported_memory32_atomic_wait64( + vmctx: *mut VMContext, + memory_index: u32, + dst: u32, + val: u64, + timeout: i64, +) -> u32 { + let result = { + let instance = (*vmctx).instance_mut(); + let memory_index = MemoryIndex::from_u32(memory_index); + + instance.imported_memory_wait64(memory_index, dst, val, timeout) + }; + if let Err(trap) = result { + raise_lib_trap(trap); + } + result.unwrap() +} + +/// Implementation of memory.notfy for locally-defined 32-bit memories. +/// +/// # Safety +/// +/// `vmctx` must be dereferenceable. +#[no_mangle] +pub unsafe extern "C" fn wasmer_vm_memory32_atomic_notify( + vmctx: *mut VMContext, + memory_index: u32, + dst: u32, +) { + let result = { + let instance = (*vmctx).instance_mut(); + let memory_index = LocalMemoryIndex::from_u32(memory_index); + + instance.local_memory_notify(memory_index, dst) + }; + if let Err(trap) = result { + raise_lib_trap(trap); + } +} + +/// Implementation of memory.notfy for imported 32-bit memories. +/// +/// # Safety +/// +/// `vmctx` must be dereferenceable. +#[no_mangle] +pub unsafe extern "C" fn wasmer_vm_imported_memory32_atomic_notify( + vmctx: *mut VMContext, + memory_index: u32, + dst: u32, +) { + let result = { + let instance = (*vmctx).instance_mut(); + let memory_index = MemoryIndex::from_u32(memory_index); + + instance.imported_memory_notify(memory_index, dst) + }; + if let Err(trap) = result { + raise_lib_trap(trap); + } + result.unwrap() +} + /// The function pointer to a libcall pub fn function_pointer(libcall: LibCall) -> usize { match libcall { @@ -701,5 +846,11 @@ pub fn function_pointer(libcall: LibCall) -> usize { LibCall::DataDrop => wasmer_vm_data_drop as usize, LibCall::Probestack => wasmer_vm_probestack as usize, LibCall::RaiseTrap => wasmer_vm_raise_trap as usize, + LibCall::Memory32AtomicWait32 => wasmer_vm_memory32_atomic_wait32 as usize, + LibCall::ImportedMemory32AtomicWait32 => wasmer_vm_imported_memory32_atomic_wait32 as usize, + LibCall::Memory32AtomicWait64 => wasmer_vm_memory32_atomic_wait64 as usize, + LibCall::ImportedMemory32AtomicWait64 => wasmer_vm_imported_memory32_atomic_wait64 as usize, + LibCall::Memory32AtomicNotify => wasmer_vm_memory32_atomic_notify as usize, + LibCall::ImportedMemory32AtomicNotify => wasmer_vm_imported_memory32_atomic_notify as usize, } } diff --git a/lib/vm/src/vmcontext.rs b/lib/vm/src/vmcontext.rs index 766a8708d1d..eb16620096a 100644 --- a/lib/vm/src/vmcontext.rs +++ b/lib/vm/src/vmcontext.rs @@ -14,6 +14,7 @@ use crate::VMTable; use crate::{VMBuiltinFunctionIndex, VMFunction}; use std::convert::TryFrom; use std::ptr::{self, NonNull}; +use std::sync::atomic::{AtomicPtr, Ordering}; use std::u32; use wasmer_types::RawValue; @@ -376,6 +377,62 @@ pub(crate) unsafe fn memory_fill( Ok(()) } +/// Perform the `memory32.atomic.check32` operation for the memory. Return 0 if same, 1 if different +/// +/// # Errors +/// +/// Returns a `Trap` error if the memory range is out of bounds. +/// +/// # Safety +/// memory access is unsafe +pub(crate) unsafe fn memory32_atomic_check32( + mem: &VMMemoryDefinition, + dst: u32, + val: u32, +) -> Result { + if usize::try_from(dst).unwrap() > mem.current_length { + return Err(Trap::lib(TrapCode::HeapAccessOutOfBounds)); + } + + let dst = isize::try_from(dst).unwrap(); + + // Bounds and casts are checked above, by this point we know that + // everything is safe. + let dst = mem.base.offset(dst) as *mut u32; + let atomic_dst = AtomicPtr::new(dst); + let read_val = *atomic_dst.load(Ordering::Acquire); + let ret = if read_val == val { 0 } else { 1 }; + Ok(ret) +} + +/// Perform the `memory32.atomic.check64` operation for the memory. Return 0 if same, 1 if different +/// +/// # Errors +/// +/// Returns a `Trap` error if the memory range is out of bounds. +/// +/// # Safety +/// memory access is unsafe +pub(crate) unsafe fn memory32_atomic_check64( + mem: &VMMemoryDefinition, + dst: u32, + val: u64, +) -> Result { + if usize::try_from(dst).unwrap() > mem.current_length { + return Err(Trap::lib(TrapCode::HeapAccessOutOfBounds)); + } + + let dst = isize::try_from(dst).unwrap(); + + // Bounds and casts are checked above, by this point we know that + // everything is safe. + let dst = mem.base.offset(dst) as *mut u64; + let atomic_dst = AtomicPtr::new(dst); + let read_val = *atomic_dst.load(Ordering::Acquire); + let ret = if read_val == val { 0 } else { 1 }; + Ok(ret) +} + /// The fields compiled code needs to access to utilize a WebAssembly table /// defined within the instance. #[derive(Debug, Clone, Copy)] @@ -634,6 +691,19 @@ impl VMBuiltinFunctionsArray { ptrs[VMBuiltinFunctionIndex::get_table_fill_index().index() as usize] = wasmer_vm_table_fill as usize; + ptrs[VMBuiltinFunctionIndex::get_memory_atomic_wait32_index().index() as usize] = + wasmer_vm_memory32_atomic_wait32 as usize; + ptrs[VMBuiltinFunctionIndex::get_imported_memory_atomic_wait32_index().index() as usize] = + wasmer_vm_imported_memory32_atomic_wait32 as usize; + ptrs[VMBuiltinFunctionIndex::get_memory_atomic_wait64_index().index() as usize] = + wasmer_vm_memory32_atomic_wait64 as usize; + ptrs[VMBuiltinFunctionIndex::get_imported_memory_atomic_wait64_index().index() as usize] = + wasmer_vm_imported_memory32_atomic_wait64 as usize; + ptrs[VMBuiltinFunctionIndex::get_memory_atomic_notify_index().index() as usize] = + wasmer_vm_memory32_atomic_notify as usize; + ptrs[VMBuiltinFunctionIndex::get_imported_memory_atomic_notify_index().index() as usize] = + wasmer_vm_imported_memory32_atomic_notify as usize; + debug_assert!(ptrs.iter().cloned().all(|p| p != 0)); Self { ptrs }