Skip to content

Commit 3948f66

Browse files
authored
Use VmSafe trait in VMComponentContext (#10097)
An additional extension of #10043 to migrate components to `VmSafe` as well.
1 parent 887e5c9 commit 3948f66

File tree

3 files changed

+37
-27
lines changed

3 files changed

+37
-27
lines changed

crates/wasmtime/src/runtime/component/func/host.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -92,10 +92,10 @@ impl HostFunc {
9292
}
9393

9494
pub fn lowering(&self) -> VMLowering {
95-
let data = &*self.func as *const (dyn Any + Send + Sync) as *mut u8;
95+
let data = NonNull::from(&*self.func).cast();
9696
VMLowering {
9797
callee: self.entrypoint,
98-
data,
98+
data: data.into(),
9999
}
100100
}
101101
}

crates/wasmtime/src/runtime/vm/component.rs

Lines changed: 30 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@
99
use crate::prelude::*;
1010
use crate::runtime::vm::{
1111
SendSyncPtr, VMArrayCallFunction, VMFuncRef, VMGlobalDefinition, VMMemoryDefinition,
12-
VMOpaqueContext, VMStore, VMStoreRawPtr, VMWasmCallFunction, ValRaw, VmPtr,
12+
VMOpaqueContext, VMStore, VMStoreRawPtr, VMWasmCallFunction, ValRaw, VmPtr, VmSafe,
1313
};
1414
use alloc::alloc::Layout;
1515
use alloc::sync::Arc;
@@ -130,9 +130,12 @@ pub struct VMLowering {
130130
/// invoked.
131131
pub callee: VMLoweringCallee,
132132
/// The host data pointer (think void* pointer) to get passed to `callee`.
133-
pub data: *mut u8,
133+
pub data: VmPtr<u8>,
134134
}
135135

136+
// SAFETY: the above structure is repr(C) and only contains `VmSafe` fields.
137+
unsafe impl VmSafe for VMLowering {}
138+
136139
/// This is a marker type to represent the underlying allocation of a
137140
/// `VMComponentContext`.
138141
///
@@ -237,14 +240,14 @@ impl ComponentInstance {
237240
NonNull::new(ret).unwrap()
238241
}
239242

240-
unsafe fn vmctx_plus_offset<T>(&self, offset: u32) -> *const T {
243+
unsafe fn vmctx_plus_offset<T: VmSafe>(&self, offset: u32) -> *const T {
241244
self.vmctx()
242245
.as_ptr()
243246
.byte_add(usize::try_from(offset).unwrap())
244247
.cast()
245248
}
246249

247-
unsafe fn vmctx_plus_offset_mut<T>(&mut self, offset: u32) -> *mut T {
250+
unsafe fn vmctx_plus_offset_mut<T: VmSafe>(&mut self, offset: u32) -> *mut T {
248251
self.vmctx()
249252
.as_ptr()
250253
.byte_add(usize::try_from(offset).unwrap())
@@ -275,9 +278,9 @@ impl ComponentInstance {
275278
/// during the instantiation process of a component.
276279
pub fn runtime_memory(&self, idx: RuntimeMemoryIndex) -> *mut VMMemoryDefinition {
277280
unsafe {
278-
let ret = *self.vmctx_plus_offset(self.offsets.runtime_memory(idx));
279-
debug_assert!(ret as usize != INVALID_PTR);
280-
ret
281+
let ret = *self.vmctx_plus_offset::<VmPtr<_>>(self.offsets.runtime_memory(idx));
282+
debug_assert!(ret.as_ptr() as usize != INVALID_PTR);
283+
ret.as_ptr()
281284
}
282285
}
283286

@@ -287,9 +290,9 @@ impl ComponentInstance {
287290
/// during the instantiation process of a component.
288291
pub fn runtime_realloc(&self, idx: RuntimeReallocIndex) -> NonNull<VMFuncRef> {
289292
unsafe {
290-
let ret = *self.vmctx_plus_offset::<NonNull<_>>(self.offsets.runtime_realloc(idx));
293+
let ret = *self.vmctx_plus_offset::<VmPtr<_>>(self.offsets.runtime_realloc(idx));
291294
debug_assert!(ret.as_ptr() as usize != INVALID_PTR);
292-
ret
295+
ret.as_non_null()
293296
}
294297
}
295298

@@ -299,9 +302,9 @@ impl ComponentInstance {
299302
/// during the instantiation process of a component.
300303
pub fn runtime_post_return(&self, idx: RuntimePostReturnIndex) -> NonNull<VMFuncRef> {
301304
unsafe {
302-
let ret = *self.vmctx_plus_offset::<NonNull<_>>(self.offsets.runtime_post_return(idx));
305+
let ret = *self.vmctx_plus_offset::<VmPtr<_>>(self.offsets.runtime_post_return(idx));
303306
debug_assert!(ret.as_ptr() as usize != INVALID_PTR);
304-
ret
307+
ret.as_non_null()
305308
}
306309
}
307310

@@ -314,7 +317,7 @@ impl ComponentInstance {
314317
unsafe {
315318
let ret = *self.vmctx_plus_offset::<VMLowering>(self.offsets.lowering(idx));
316319
debug_assert!(ret.callee as usize != INVALID_PTR);
317-
debug_assert!(ret.data as usize != INVALID_PTR);
320+
debug_assert!(ret.data.as_ptr() as usize != INVALID_PTR);
318321
ret
319322
}
320323
}
@@ -354,20 +357,21 @@ impl ComponentInstance {
354357
ptr: NonNull<VMMemoryDefinition>,
355358
) {
356359
unsafe {
357-
let storage = self.vmctx_plus_offset_mut::<NonNull<VMMemoryDefinition>>(
360+
let storage = self.vmctx_plus_offset_mut::<VmPtr<VMMemoryDefinition>>(
358361
self.offsets.runtime_memory(idx),
359362
);
360363
debug_assert!((*storage).as_ptr() as usize == INVALID_PTR);
361-
*storage = ptr;
364+
*storage = ptr.into();
362365
}
363366
}
364367

365368
/// Same as `set_runtime_memory` but for realloc function pointers.
366369
pub fn set_runtime_realloc(&mut self, idx: RuntimeReallocIndex, ptr: NonNull<VMFuncRef>) {
367370
unsafe {
368-
let storage = self.vmctx_plus_offset_mut(self.offsets.runtime_realloc(idx));
369-
debug_assert!(*storage as usize == INVALID_PTR);
370-
*storage = ptr.as_ptr();
371+
let storage =
372+
self.vmctx_plus_offset_mut::<VmPtr<VMFuncRef>>(self.offsets.runtime_realloc(idx));
373+
debug_assert!((*storage).as_ptr() as usize == INVALID_PTR);
374+
*storage = ptr.into();
371375
}
372376
}
373377

@@ -378,9 +382,10 @@ impl ComponentInstance {
378382
ptr: NonNull<VMFuncRef>,
379383
) {
380384
unsafe {
381-
let storage = self.vmctx_plus_offset_mut(self.offsets.runtime_post_return(idx));
382-
debug_assert!(*storage as usize == INVALID_PTR);
383-
*storage = ptr.as_ptr();
385+
let storage = self
386+
.vmctx_plus_offset_mut::<VmPtr<VMFuncRef>>(self.offsets.runtime_post_return(idx));
387+
debug_assert!((*storage).as_ptr() as usize == INVALID_PTR);
388+
*storage = ptr.into();
384389
}
385390
}
386391

@@ -431,7 +436,7 @@ impl ComponentInstance {
431436
unsafe {
432437
let offset = self.offsets.resource_destructor(idx);
433438
debug_assert!(*self.vmctx_plus_offset::<usize>(offset) == INVALID_PTR);
434-
*self.vmctx_plus_offset_mut(offset) = dtor;
439+
*self.vmctx_plus_offset_mut(offset) = dtor.map(VmPtr::from);
435440
}
436441
}
437442

@@ -443,15 +448,16 @@ impl ComponentInstance {
443448
unsafe {
444449
let offset = self.offsets.resource_destructor(idx);
445450
debug_assert!(*self.vmctx_plus_offset::<usize>(offset) != INVALID_PTR);
446-
*self.vmctx_plus_offset(offset)
451+
(*self.vmctx_plus_offset::<Option<VmPtr<VMFuncRef>>>(offset)).map(|p| p.as_non_null())
447452
}
448453
}
449454

450455
unsafe fn initialize_vmctx(&mut self) {
451456
*self.vmctx_plus_offset_mut(self.offsets.magic()) = VMCOMPONENT_MAGIC;
452-
*self.vmctx_plus_offset_mut(self.offsets.builtins()) = &libcalls::VMComponentBuiltins::INIT;
457+
*self.vmctx_plus_offset_mut(self.offsets.builtins()) =
458+
VmPtr::from(NonNull::from(&libcalls::VMComponentBuiltins::INIT));
453459
*self.vmctx_plus_offset_mut(self.offsets.limits()) =
454-
self.store.0.as_ref().vmruntime_limits();
460+
VmPtr::from(self.store.0.as_ref().vmruntime_limits());
455461

456462
for i in 0..self.offsets.num_runtime_component_instances {
457463
let i = RuntimeComponentInstanceIndex::from_u32(i);

crates/wasmtime/src/runtime/vm/component/libcalls.rs

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22
33
use crate::prelude::*;
44
use crate::runtime::vm::component::{ComponentInstance, VMComponentContext};
5-
use crate::runtime::vm::HostResultHasUnwindSentinel;
5+
use crate::runtime::vm::{HostResultHasUnwindSentinel, VmSafe};
66
use core::cell::Cell;
77
use core::convert::Infallible;
88
use core::ptr::NonNull;
@@ -43,6 +43,10 @@ macro_rules! define_builtins {
4343
)*
4444
}
4545

46+
// SAFETY: the above structure is repr(C) and only contains `VmSafe`
47+
// fields.
48+
unsafe impl VmSafe for VMComponentBuiltins {}
49+
4650
impl VMComponentBuiltins {
4751
pub const INIT: VMComponentBuiltins = VMComponentBuiltins {
4852
$($name: trampolines::$name,)*

0 commit comments

Comments
 (0)