Skip to content

Commit

Permalink
Use NonNull for VMFuncRef in more places (#9677)
Browse files Browse the repository at this point in the history
* Use `NonNull` for `VMFuncRef` in more places

This commit switches to using `NonNull<VMFuncRef>` in more places
instead of `*mut VMFuncRef`. A few minor locations benefitted from this
by removing some otherwise extraneous checks but most places have been
updated to mostly do the same as before. The goal of this commit is to
make it more clear about what returns a nullable reference and what
never returns a nullable reference. Additionally some constructors now
unconditionally work with `NonNull<T>` instead of checking for null
internally.

* Fix a refactoring typo
  • Loading branch information
alexcrichton authored Nov 26, 2024
1 parent 8e8ad73 commit c52b941
Show file tree
Hide file tree
Showing 11 changed files with 56 additions and 56 deletions.
2 changes: 1 addition & 1 deletion crates/wasmtime/src/runtime/externals/table.rs
Original file line number Diff line number Diff line change
Expand Up @@ -163,7 +163,7 @@ impl Table {
unsafe {
match (*table).get(gc_store, index)? {
runtime::TableElement::FuncRef(f) => {
let func = Func::from_vm_func_ref(&mut store, f);
let func = f.map(|f| Func::from_vm_func_ref(&mut store, f));
Some(func.into())
}

Expand Down
9 changes: 4 additions & 5 deletions crates/wasmtime/src/runtime/func.rs
Original file line number Diff line number Diff line change
Expand Up @@ -573,12 +573,11 @@ impl Func {

pub(crate) unsafe fn from_vm_func_ref(
store: &mut StoreOpaque,
raw: *mut VMFuncRef,
) -> Option<Func> {
let func_ref = NonNull::new(raw)?;
func_ref: NonNull<VMFuncRef>,
) -> Func {
debug_assert!(func_ref.as_ref().type_index != VMSharedTypeIndex::default());
let export = ExportFunction { func_ref };
Some(Func::from_wasmtime_function(export, store))
Func::from_wasmtime_function(export, store)
}

/// Creates a new `Func` from the given Rust closure.
Expand Down Expand Up @@ -1091,7 +1090,7 @@ impl Func {
}

pub(crate) unsafe fn _from_raw(store: &mut StoreOpaque, raw: *mut c_void) -> Option<Func> {
Func::from_vm_func_ref(store, raw.cast())
Some(Func::from_vm_func_ref(store, NonNull::new(raw.cast())?))
}

/// Extracts the raw value of this `Func`, which is owned by `store`.
Expand Down
10 changes: 5 additions & 5 deletions crates/wasmtime/src/runtime/func/typed.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ use core::ffi::c_void;
use core::marker;
use core::mem::{self, MaybeUninit};
use core::num::NonZeroUsize;
use core::ptr::{self};
use core::ptr::{self, NonNull};
use wasmtime_environ::VMSharedTypeIndex;

/// A statically typed WebAssembly function.
Expand Down Expand Up @@ -566,9 +566,8 @@ unsafe impl WasmTy for Func {

#[inline]
unsafe fn load(store: &mut AutoAssertNoGc<'_>, ptr: &ValRaw) -> Self {
let p = ptr.get_funcref();
debug_assert!(!p.is_null());
Func::from_vm_func_ref(store, p.cast()).unwrap()
let p = NonNull::new(ptr.get_funcref()).unwrap().cast();
Func::from_vm_func_ref(store, p)
}
}

Expand Down Expand Up @@ -617,7 +616,8 @@ unsafe impl WasmTy for Option<Func> {

#[inline]
unsafe fn load(store: &mut AutoAssertNoGc<'_>, ptr: &ValRaw) -> Self {
Func::from_vm_func_ref(store, ptr.get_funcref().cast())
let ptr = NonNull::new(ptr.get_funcref())?.cast();
Some(Func::from_vm_func_ref(store, ptr))
}
}

Expand Down
4 changes: 2 additions & 2 deletions crates/wasmtime/src/runtime/values.rs
Original file line number Diff line number Diff line change
Expand Up @@ -974,14 +974,14 @@ impl Ref {
match (self, ty.heap_type().top()) {
(Ref::Func(None), HeapType::Func) => {
assert!(ty.is_nullable());
Ok(TableElement::FuncRef(ptr::null_mut()))
Ok(TableElement::FuncRef(None))
}
(Ref::Func(Some(f)), HeapType::Func) => {
debug_assert!(
f.comes_from_same_store(&store),
"checked in `ensure_matches_ty`"
);
Ok(TableElement::FuncRef(f.vm_func_ref(&mut store).as_ptr()))
Ok(TableElement::FuncRef(Some(f.vm_func_ref(&mut store))))
}

(Ref::Extern(e), HeapType::Extern) => match e {
Expand Down
2 changes: 1 addition & 1 deletion crates/wasmtime/src/runtime/vm/const_expr.rs
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ impl<'a> ConstEvalContext<'a> {

fn ref_func(&mut self, index: FuncIndex) -> Result<ValRaw> {
Ok(ValRaw::funcref(
self.instance.get_func_ref(index).unwrap().cast(),
self.instance.get_func_ref(index).unwrap().as_ptr().cast(),
))
}

Expand Down
5 changes: 1 addition & 4 deletions crates/wasmtime/src/runtime/vm/gc/enabled/arrayref.rs
Original file line number Diff line number Diff line change
Expand Up @@ -172,10 +172,7 @@ impl VMArrayRef {
.func_ref_table
.get_untyped(func_ref_id);
Val::FuncRef(unsafe {
Func::from_vm_func_ref(
store,
func_ref.map_or(core::ptr::null_mut(), |f| f.as_ptr()),
)
func_ref.map(|p| Func::from_vm_func_ref(store, p.as_non_null()))
})
}
otherwise => unreachable!("not a top type: {otherwise:?}"),
Expand Down
5 changes: 1 addition & 4 deletions crates/wasmtime/src/runtime/vm/gc/enabled/structref.rs
Original file line number Diff line number Diff line change
Expand Up @@ -166,10 +166,7 @@ impl VMStructRef {
.func_ref_table
.get_untyped(func_ref_id);
Val::FuncRef(unsafe {
Func::from_vm_func_ref(
store,
func_ref.map_or(core::ptr::null_mut(), |f| f.as_ptr()),
)
func_ref.map(|p| Func::from_vm_func_ref(store, p.as_non_null()))
})
}
otherwise => unreachable!("not a top type: {otherwise:?}"),
Expand Down
28 changes: 11 additions & 17 deletions crates/wasmtime/src/runtime/vm/instance.rs
Original file line number Diff line number Diff line change
Expand Up @@ -650,7 +650,6 @@ impl Instance {

fn get_exported_func(&mut self, index: FuncIndex) -> ExportFunction {
let func_ref = self.get_func_ref(index).unwrap();
let func_ref = NonNull::new(func_ref as *const VMFuncRef as *mut _).unwrap();
ExportFunction { func_ref }
}

Expand Down Expand Up @@ -880,7 +879,7 @@ impl Instance {
///
/// The returned reference is a stable reference that won't be moved and can
/// be passed into JIT code.
pub(crate) fn get_func_ref(&mut self, index: FuncIndex) -> Option<*mut VMFuncRef> {
pub(crate) fn get_func_ref(&mut self, index: FuncIndex) -> Option<NonNull<VMFuncRef>> {
if index == FuncIndex::reserved_value() {
return None;
}
Expand Down Expand Up @@ -918,7 +917,7 @@ impl Instance {
.vmctx_plus_offset_mut::<VMFuncRef>(self.offsets().vmctx_func_ref(func.func_ref));
self.construct_func_ref(index, sig, func_ref);

Some(func_ref)
Some(NonNull::new(func_ref).unwrap())
}
}

Expand Down Expand Up @@ -1010,12 +1009,7 @@ impl Instance {
.get(src..)
.and_then(|s| s.get(..len))
.ok_or(Trap::TableOutOfBounds)?;
table.init_func(
dst,
elements
.iter()
.map(|idx| self.get_func_ref(*idx).unwrap_or(ptr::null_mut())),
)?;
table.init_func(dst, elements.iter().map(|idx| self.get_func_ref(*idx)))?;
}
TableSegmentElements::Expressions(exprs) => {
let exprs = exprs
Expand Down Expand Up @@ -1045,11 +1039,13 @@ impl Instance {
WasmHeapTopType::Func => table.init_func(
dst,
exprs.iter().map(|expr| unsafe {
const_evaluator
.eval(store, &mut context, expr)
.expect("const expr should be valid")
.get_funcref()
.cast()
NonNull::new(
const_evaluator
.eval(store, &mut context, expr)
.expect("const expr should be valid")
.get_funcref()
.cast(),
)
}),
)?,
}
Expand Down Expand Up @@ -1283,9 +1279,7 @@ impl Instance {
};
// Panicking here helps catch bugs rather than silently truncating by accident.
let func_index = precomputed.get(usize::try_from(i).unwrap()).cloned();
let func_ref = func_index
.and_then(|func_index| self.get_func_ref(func_index))
.unwrap_or(ptr::null_mut());
let func_ref = func_index.and_then(|func_index| self.get_func_ref(func_index));
self.tables[idx]
.1
.set(i, TableElement::FuncRef(func_ref))
Expand Down
3 changes: 2 additions & 1 deletion crates/wasmtime/src/runtime/vm/instance/allocator.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ use crate::runtime::vm::table::Table;
use crate::runtime::vm::{CompiledModuleId, ModuleRuntimeInfo, VMFuncRef, VMGcRef, VMStore};
use crate::store::{AutoAssertNoGc, StoreOpaque};
use crate::vm::VMGlobalDefinition;
use core::ptr::NonNull;
use core::{any::Any, mem, ptr};
use wasmtime_environ::{
DefinedMemoryIndex, DefinedTableIndex, HostPtr, InitMemory, MemoryInitialization,
Expand Down Expand Up @@ -594,7 +595,7 @@ fn initialize_tables(
}

WasmHeapTopType::Func => {
let funcref = raw.get_funcref().cast::<VMFuncRef>();
let funcref = NonNull::new(raw.get_funcref().cast::<VMFuncRef>());
let items = (0..table.size()).map(|_| funcref);
table.init_func(0, items).err2anyhow()?;
}
Expand Down
29 changes: 20 additions & 9 deletions crates/wasmtime/src/runtime/vm/libcalls.rs
Original file line number Diff line number Diff line change
Expand Up @@ -58,6 +58,7 @@ use crate::prelude::*;
use crate::runtime::vm::table::{Table, TableElementType};
use crate::runtime::vm::vmcontext::VMFuncRef;
use crate::runtime::vm::{Instance, TrapReason, VMGcRef, VMStore};
use core::ptr::NonNull;
#[cfg(feature = "threads")]
use core::time::Duration;
use wasmtime_environ::Unsigned;
Expand Down Expand Up @@ -87,6 +88,7 @@ pub mod raw {
#![allow(unused_doc_comments, unused_attributes)]

use crate::runtime::vm::{InstanceAndStore, TrapReason, VMContext};
use core::ptr::NonNull;

macro_rules! libcall {
(
Expand Down Expand Up @@ -184,6 +186,13 @@ pub mod raw {
}
}

impl LibcallResult for NonNull<u8> {
type Abi = *mut u8;
unsafe fn convert(self) -> *mut u8 {
self.as_ptr()
}
}

impl LibcallResult for bool {
type Abi = u32;
unsafe fn convert(self) -> u32 {
Expand Down Expand Up @@ -217,7 +226,7 @@ unsafe fn table_grow_func_ref(
let table_index = TableIndex::from_u32(table_index);

let element = match instance.table_element_type(table_index) {
TableElementType::Func => (init_value as *mut VMFuncRef).into(),
TableElementType::Func => NonNull::new(init_value.cast::<VMFuncRef>()).into(),
TableElementType::GcRef => unreachable!(),
};

Expand Down Expand Up @@ -271,7 +280,7 @@ unsafe fn table_fill_func_ref(
let table = &mut *instance.get_table(table_index);
match table.element_type() {
TableElementType::Func => {
let val = val.cast::<VMFuncRef>();
let val = NonNull::new(val.cast::<VMFuncRef>());
table
.fill(store.optional_gc_store_mut()?, dst, val.into(), len)
.err2anyhow()?;
Expand Down Expand Up @@ -401,7 +410,7 @@ fn memory_init(
}

// Implementation of `ref.func`.
fn ref_func(_store: &mut dyn VMStore, instance: &mut Instance, func_index: u32) -> *mut u8 {
fn ref_func(_store: &mut dyn VMStore, instance: &mut Instance, func_index: u32) -> NonNull<u8> {
instance
.get_func_ref(FuncIndex::from_u32(func_index))
.expect("ref_func: funcref should always be available for given func index")
Expand All @@ -427,7 +436,10 @@ unsafe fn table_get_lazy_init_func_ref(
.get(None, index)
.expect("table access already bounds-checked");

elem.into_func_ref_asserting_initialized().cast()
match elem.into_func_ref_asserting_initialized() {
Some(ptr) => ptr.as_ptr().cast(),
None => core::ptr::null_mut(),
}
}

/// Drop a GC reference.
Expand Down Expand Up @@ -802,9 +814,8 @@ unsafe fn array_new_elem(
.ok_or_else(|| Trap::TableOutOfBounds.into_anyhow())?
.iter()
.map(|f| {
let raw_func_ref =
instance.get_func_ref(*f).unwrap_or(core::ptr::null_mut());
let func = Func::from_vm_func_ref(store, raw_func_ref);
let raw_func_ref = instance.get_func_ref(*f);
let func = raw_func_ref.map(|p| Func::from_vm_func_ref(store, p));
Val::FuncRef(func)
}),
);
Expand Down Expand Up @@ -910,8 +921,8 @@ unsafe fn array_init_elem(
.ok_or_else(|| Trap::TableOutOfBounds.into_anyhow())?
.iter()
.map(|f| {
let raw_func_ref = instance.get_func_ref(*f).unwrap_or(core::ptr::null_mut());
let func = Func::from_vm_func_ref(&mut store, raw_func_ref);
let raw_func_ref = instance.get_func_ref(*f);
let func = raw_func_ref.map(|p| Func::from_vm_func_ref(&mut store, p));
Val::FuncRef(func)
})
.collect::<Vec<_>>(),
Expand Down
15 changes: 8 additions & 7 deletions crates/wasmtime/src/runtime/vm/table.rs
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ use wasmtime_environ::{
/// `ptr::null_mut`.
pub enum TableElement {
/// A `funcref`.
FuncRef(*mut VMFuncRef),
FuncRef(Option<NonNull<VMFuncRef>>),

/// A GC reference.
GcRef(Option<VMGcRef>),
Expand Down Expand Up @@ -67,7 +67,7 @@ impl TableElement {
/// # Safety
///
/// The same warnings as for `into_table_values()` apply.
pub(crate) unsafe fn into_func_ref_asserting_initialized(self) -> *mut VMFuncRef {
pub(crate) unsafe fn into_func_ref_asserting_initialized(self) -> Option<NonNull<VMFuncRef>> {
match self {
Self::FuncRef(e) => e,
Self::UninitFunc => panic!("Uninitialized table element value outside of table slot"),
Expand All @@ -85,8 +85,8 @@ impl TableElement {
}
}

impl From<*mut VMFuncRef> for TableElement {
fn from(f: *mut VMFuncRef) -> TableElement {
impl From<Option<NonNull<VMFuncRef>>> for TableElement {
fn from(f: Option<NonNull<VMFuncRef>>) -> TableElement {
TableElement::FuncRef(f)
}
}
Expand All @@ -112,7 +112,8 @@ impl TaggedFuncRef {

/// Converts the given `ptr`, a valid funcref pointer, into a tagged pointer
/// by adding in the `FUNCREF_INIT_BIT`.
fn from(ptr: *mut VMFuncRef, lazy_init: bool) -> Self {
fn from(ptr: Option<NonNull<VMFuncRef>>, lazy_init: bool) -> Self {
let ptr = ptr.map(|p| p.as_ptr()).unwrap_or(ptr::null_mut());
if lazy_init {
let masked = Strict::map_addr(ptr, |a| a | FUNCREF_INIT_BIT);
TaggedFuncRef(masked)
Expand All @@ -131,7 +132,7 @@ impl TaggedFuncRef {
// Masking off the tag bit is harmless whether the table uses lazy
// init or not.
let unmasked = Strict::map_addr(ptr, |a| a & FUNCREF_MASK);
TableElement::FuncRef(unmasked)
TableElement::FuncRef(NonNull::new(unmasked))
}
}
}
Expand Down Expand Up @@ -442,7 +443,7 @@ impl Table {
pub fn init_func(
&mut self,
dst: u64,
items: impl ExactSizeIterator<Item = *mut VMFuncRef>,
items: impl ExactSizeIterator<Item = Option<NonNull<VMFuncRef>>>,
) -> Result<(), Trap> {
let dst = usize::try_from(dst).map_err(|_| Trap::TableOutOfBounds)?;

Expand Down

0 comments on commit c52b941

Please sign in to comment.