Skip to content

Commit 9f2f1d4

Browse files
committed
Deny unsafe_op_in_unsafe_fn in wasmtime::runtime::vm
Slowly expanding this lint to more of the crate. prtest:full
1 parent ef3d260 commit 9f2f1d4

32 files changed

+1061
-785
lines changed

crates/wasmtime/src/runtime/vm.rs

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,10 @@
44
// See documentation in crates/wasmtime/src/runtime.rs for why this is
55
// selectively enabled here.
66
#![warn(clippy::cast_sign_loss)]
7+
#![warn(
8+
unsafe_op_in_unsafe_fn,
9+
reason = "opt-in until the crate opts-in as a whole -- #11180"
10+
)]
711

812
// Polyfill `std::simd::i8x16` etc. until they're stable.
913
#[cfg(all(target_arch = "x86_64", target_feature = "sse"))]
@@ -282,7 +286,7 @@ impl dyn VMStore + '_ {
282286
/// This method is not safe as there's no static guarantee that `T` is
283287
/// correct for this store.
284288
pub(crate) unsafe fn unchecked_context_mut<T>(&mut self) -> StoreContextMut<'_, T> {
285-
StoreContextMut(&mut *(self as *mut dyn VMStore as *mut StoreInner<T>))
289+
unsafe { StoreContextMut(&mut *(self as *mut dyn VMStore as *mut StoreInner<T>)) }
286290
}
287291
}
288292

crates/wasmtime/src/runtime/vm/component.rs

Lines changed: 123 additions & 42 deletions
Original file line numberDiff line numberDiff line change
@@ -222,16 +222,28 @@ impl ComponentInstance {
222222
///
223223
/// This is `unsafe` because `vmctx` cannot be guaranteed to be a valid
224224
/// pointer and it cannot be proven statically that it's safe to get a
225-
/// mutable reference at this time to the instance from `vmctx`.
225+
/// mutable reference at this time to the instance from `vmctx`. Note that
226+
/// it must be also safe to borrow the store mutably, meaning it can't
227+
/// already be in use elsewhere.
226228
pub unsafe fn from_vmctx<R>(
227229
vmctx: NonNull<VMComponentContext>,
228230
f: impl FnOnce(&mut dyn VMStore, Instance) -> R,
229231
) -> R {
230-
let mut ptr = vmctx
231-
.byte_sub(mem::size_of::<ComponentInstance>())
232-
.cast::<ComponentInstance>();
233-
let reference = ptr.as_mut();
234-
let store = &mut *reference.store.0.as_ptr();
232+
// SAFETY: it's a contract of this function that `vmctx` is a valid
233+
// allocation which can go backwards to a `ComponentInstance`.
234+
let mut ptr = unsafe {
235+
vmctx
236+
.byte_sub(mem::size_of::<ComponentInstance>())
237+
.cast::<ComponentInstance>()
238+
};
239+
// SAFETY: it's a contract of this function that it's safe to use `ptr`
240+
// as a mutable reference.
241+
let reference = unsafe { ptr.as_mut() };
242+
243+
// SAFETY: it's a contract of this function that it's safe to use the
244+
// store mutably at this time.
245+
let store = unsafe { &mut *reference.store.0.as_ptr() };
246+
235247
let instance = Instance::from_wasmtime(store, reference.id);
236248
f(store, instance)
237249
}
@@ -245,11 +257,15 @@ impl ComponentInstance {
245257
pub(crate) unsafe fn vmctx_instance_id(
246258
vmctx: NonNull<VMComponentContext>,
247259
) -> ComponentInstanceId {
248-
vmctx
249-
.byte_sub(mem::size_of::<ComponentInstance>())
250-
.cast::<ComponentInstance>()
251-
.as_ref()
252-
.id
260+
// SAFETY: it's a contract of this function that `vmctx` is a valid
261+
// pointer with a `ComponentInstance` in front which can be read.
262+
unsafe {
263+
vmctx
264+
.byte_sub(mem::size_of::<ComponentInstance>())
265+
.cast::<ComponentInstance>()
266+
.as_ref()
267+
.id
268+
}
253269
}
254270

255271
/// Returns the layout corresponding to what would be an allocation of a
@@ -567,69 +583,118 @@ impl ComponentInstance {
567583

568584
unsafe fn initialize_vmctx(mut self: Pin<&mut Self>) {
569585
let offset = self.offsets.magic();
570-
*self.as_mut().vmctx_plus_offset_mut(offset) = VMCOMPONENT_MAGIC;
586+
// SAFETY: it's safe to write the magic value during initialization and
587+
// this is also the right type of value to write.
588+
unsafe {
589+
*self.as_mut().vmctx_plus_offset_mut(offset) = VMCOMPONENT_MAGIC;
590+
}
591+
571592
// Initialize the built-in functions
593+
//
594+
// SAFETY: it's safe to initialize the vmctx in this function and this
595+
// is also the right type of value to store in the vmctx.
572596
static BUILTINS: libcalls::VMComponentBuiltins = libcalls::VMComponentBuiltins::INIT;
573597
let ptr = BUILTINS.expose_provenance();
574598
let offset = self.offsets.builtins();
575-
*self.as_mut().vmctx_plus_offset_mut(offset) = VmPtr::from(ptr);
599+
unsafe {
600+
*self.as_mut().vmctx_plus_offset_mut(offset) = VmPtr::from(ptr);
601+
}
602+
603+
// SAFETY: it's safe to initialize the vmctx in this function and this
604+
// is also the right type of value to store in the vmctx.
576605
let offset = self.offsets.vm_store_context();
577-
*self.as_mut().vmctx_plus_offset_mut(offset) =
578-
VmPtr::from(self.store.0.as_ref().vm_store_context_ptr());
606+
unsafe {
607+
*self.as_mut().vmctx_plus_offset_mut(offset) =
608+
VmPtr::from(self.store.0.as_ref().vm_store_context_ptr());
609+
}
579610

580611
for i in 0..self.offsets.num_runtime_component_instances {
581612
let i = RuntimeComponentInstanceIndex::from_u32(i);
582613
let mut def = VMGlobalDefinition::new();
583-
*def.as_i32_mut() = FLAG_MAY_ENTER | FLAG_MAY_LEAVE;
584-
self.instance_flags(i).as_raw().write(def);
614+
// SAFETY: this is a valid initialization of all globals which are
615+
// 32-bit values.
616+
unsafe {
617+
*def.as_i32_mut() = FLAG_MAY_ENTER | FLAG_MAY_LEAVE;
618+
self.instance_flags(i).as_raw().write(def);
619+
}
585620
}
586621

587622
// In debug mode set non-null bad values to all "pointer looking" bits
588623
// and pieces related to lowering and such. This'll help detect any
589624
// erroneous usage and enable debug assertions above as well to prevent
590625
// loading these before they're configured or setting them twice.
626+
//
627+
// SAFETY: it's valid to write a garbage pointer during initialization
628+
// when this is otherwise uninitialized memory
591629
if cfg!(debug_assertions) {
592630
for i in 0..self.offsets.num_lowerings {
593631
let i = LoweredIndex::from_u32(i);
594632
let offset = self.offsets.lowering_callee(i);
595-
*self.as_mut().vmctx_plus_offset_mut(offset) = INVALID_PTR;
633+
// SAFETY: see above
634+
unsafe {
635+
*self.as_mut().vmctx_plus_offset_mut(offset) = INVALID_PTR;
636+
}
596637
let offset = self.offsets.lowering_data(i);
597-
*self.as_mut().vmctx_plus_offset_mut(offset) = INVALID_PTR;
638+
// SAFETY: see above
639+
unsafe {
640+
*self.as_mut().vmctx_plus_offset_mut(offset) = INVALID_PTR;
641+
}
598642
}
599643
for i in 0..self.offsets.num_trampolines {
600644
let i = TrampolineIndex::from_u32(i);
601645
let offset = self.offsets.trampoline_func_ref(i);
602-
*self.as_mut().vmctx_plus_offset_mut(offset) = INVALID_PTR;
646+
// SAFETY: see above
647+
unsafe {
648+
*self.as_mut().vmctx_plus_offset_mut(offset) = INVALID_PTR;
649+
}
603650
}
604651
for i in 0..self.offsets.num_runtime_memories {
605652
let i = RuntimeMemoryIndex::from_u32(i);
606653
let offset = self.offsets.runtime_memory(i);
607-
*self.as_mut().vmctx_plus_offset_mut(offset) = INVALID_PTR;
654+
// SAFETY: see above
655+
unsafe {
656+
*self.as_mut().vmctx_plus_offset_mut(offset) = INVALID_PTR;
657+
}
608658
}
609659
for i in 0..self.offsets.num_runtime_reallocs {
610660
let i = RuntimeReallocIndex::from_u32(i);
611661
let offset = self.offsets.runtime_realloc(i);
612-
*self.as_mut().vmctx_plus_offset_mut(offset) = INVALID_PTR;
662+
// SAFETY: see above
663+
unsafe {
664+
*self.as_mut().vmctx_plus_offset_mut(offset) = INVALID_PTR;
665+
}
613666
}
614667
for i in 0..self.offsets.num_runtime_callbacks {
615668
let i = RuntimeCallbackIndex::from_u32(i);
616669
let offset = self.offsets.runtime_callback(i);
617-
*self.as_mut().vmctx_plus_offset_mut(offset) = INVALID_PTR;
670+
// SAFETY: see above
671+
unsafe {
672+
*self.as_mut().vmctx_plus_offset_mut(offset) = INVALID_PTR;
673+
}
618674
}
619675
for i in 0..self.offsets.num_runtime_post_returns {
620676
let i = RuntimePostReturnIndex::from_u32(i);
621677
let offset = self.offsets.runtime_post_return(i);
622-
*self.as_mut().vmctx_plus_offset_mut(offset) = INVALID_PTR;
678+
// SAFETY: see above
679+
unsafe {
680+
*self.as_mut().vmctx_plus_offset_mut(offset) = INVALID_PTR;
681+
}
623682
}
624683
for i in 0..self.offsets.num_resources {
625684
let i = ResourceIndex::from_u32(i);
626685
let offset = self.offsets.resource_destructor(i);
627-
*self.as_mut().vmctx_plus_offset_mut(offset) = INVALID_PTR;
686+
// SAFETY: see above
687+
unsafe {
688+
*self.as_mut().vmctx_plus_offset_mut(offset) = INVALID_PTR;
689+
}
628690
}
629691
for i in 0..self.offsets.num_runtime_tables {
630692
let i = RuntimeTableIndex::from_u32(i);
631693
let offset = self.offsets.runtime_table(i);
632-
*self.as_mut().vmctx_plus_offset_mut(offset) = INVALID_PTR;
694+
// SAFETY: see above
695+
unsafe {
696+
*self.as_mut().vmctx_plus_offset_mut(offset) = INVALID_PTR;
697+
}
633698
}
634699
}
635700
}
@@ -869,10 +934,20 @@ impl VMComponentContext {
869934

870935
/// Helper function to cast between context types using a debug assertion to
871936
/// protect against some mistakes.
937+
///
938+
/// # Safety
939+
///
940+
/// The `opaque` value must be a valid pointer where it's safe to read its
941+
/// "magic" value.
872942
#[inline]
873943
pub unsafe fn from_opaque(opaque: NonNull<VMOpaqueContext>) -> NonNull<VMComponentContext> {
874944
// See comments in `VMContext::from_opaque` for this debug assert
875-
debug_assert_eq!(opaque.as_ref().magic, VMCOMPONENT_MAGIC);
945+
//
946+
// SAFETY: it's a contract of this function that it's safe to read
947+
// `opaque`.
948+
unsafe {
949+
debug_assert_eq!(opaque.as_ref().magic, VMCOMPONENT_MAGIC);
950+
}
876951
opaque.cast()
877952
}
878953
}
@@ -902,43 +977,49 @@ impl InstanceFlags {
902977

903978
#[inline]
904979
pub unsafe fn may_leave(&self) -> bool {
905-
*self.as_raw().as_ref().as_i32() & FLAG_MAY_LEAVE != 0
980+
unsafe { *self.as_raw().as_ref().as_i32() & FLAG_MAY_LEAVE != 0 }
906981
}
907982

908983
#[inline]
909984
pub unsafe fn set_may_leave(&mut self, val: bool) {
910-
if val {
911-
*self.as_raw().as_mut().as_i32_mut() |= FLAG_MAY_LEAVE;
912-
} else {
913-
*self.as_raw().as_mut().as_i32_mut() &= !FLAG_MAY_LEAVE;
985+
unsafe {
986+
if val {
987+
*self.as_raw().as_mut().as_i32_mut() |= FLAG_MAY_LEAVE;
988+
} else {
989+
*self.as_raw().as_mut().as_i32_mut() &= !FLAG_MAY_LEAVE;
990+
}
914991
}
915992
}
916993

917994
#[inline]
918995
pub unsafe fn may_enter(&self) -> bool {
919-
*self.as_raw().as_ref().as_i32() & FLAG_MAY_ENTER != 0
996+
unsafe { *self.as_raw().as_ref().as_i32() & FLAG_MAY_ENTER != 0 }
920997
}
921998

922999
#[inline]
9231000
pub unsafe fn set_may_enter(&mut self, val: bool) {
924-
if val {
925-
*self.as_raw().as_mut().as_i32_mut() |= FLAG_MAY_ENTER;
926-
} else {
927-
*self.as_raw().as_mut().as_i32_mut() &= !FLAG_MAY_ENTER;
1001+
unsafe {
1002+
if val {
1003+
*self.as_raw().as_mut().as_i32_mut() |= FLAG_MAY_ENTER;
1004+
} else {
1005+
*self.as_raw().as_mut().as_i32_mut() &= !FLAG_MAY_ENTER;
1006+
}
9281007
}
9291008
}
9301009

9311010
#[inline]
9321011
pub unsafe fn needs_post_return(&self) -> bool {
933-
*self.as_raw().as_ref().as_i32() & FLAG_NEEDS_POST_RETURN != 0
1012+
unsafe { *self.as_raw().as_ref().as_i32() & FLAG_NEEDS_POST_RETURN != 0 }
9341013
}
9351014

9361015
#[inline]
9371016
pub unsafe fn set_needs_post_return(&mut self, val: bool) {
938-
if val {
939-
*self.as_raw().as_mut().as_i32_mut() |= FLAG_NEEDS_POST_RETURN;
940-
} else {
941-
*self.as_raw().as_mut().as_i32_mut() &= !FLAG_NEEDS_POST_RETURN;
1017+
unsafe {
1018+
if val {
1019+
*self.as_raw().as_mut().as_i32_mut() |= FLAG_NEEDS_POST_RETURN;
1020+
} else {
1021+
*self.as_raw().as_mut().as_i32_mut() &= !FLAG_NEEDS_POST_RETURN;
1022+
}
9421023
}
9431024
}
9441025

0 commit comments

Comments
 (0)