@@ -222,16 +222,28 @@ impl ComponentInstance {
222
222
///
223
223
/// This is `unsafe` because `vmctx` cannot be guaranteed to be a valid
224
224
/// pointer and it cannot be proven statically that it's safe to get a
225
- /// mutable reference at this time to the instance from `vmctx`.
225
+ /// mutable reference at this time to the instance from `vmctx`. Note that
226
+ /// it must be also safe to borrow the store mutably, meaning it can't
227
+ /// already be in use elsewhere.
226
228
pub unsafe fn from_vmctx < R > (
227
229
vmctx : NonNull < VMComponentContext > ,
228
230
f : impl FnOnce ( & mut dyn VMStore , Instance ) -> R ,
229
231
) -> R {
230
- let mut ptr = vmctx
231
- . byte_sub ( mem:: size_of :: < ComponentInstance > ( ) )
232
- . cast :: < ComponentInstance > ( ) ;
233
- let reference = ptr. as_mut ( ) ;
234
- let store = & mut * reference. store . 0 . as_ptr ( ) ;
232
+ // SAFETY: it's a contract of this function that `vmctx` is a valid
233
+ // allocation which can go backwards to a `ComponentInstance`.
234
+ let mut ptr = unsafe {
235
+ vmctx
236
+ . byte_sub ( mem:: size_of :: < ComponentInstance > ( ) )
237
+ . cast :: < ComponentInstance > ( )
238
+ } ;
239
+ // SAFETY: it's a contract of this function that it's safe to use `ptr`
240
+ // as a mutable reference.
241
+ let reference = unsafe { ptr. as_mut ( ) } ;
242
+
243
+ // SAFETY: it's a contract of this function that it's safe to use the
244
+ // store mutably at this time.
245
+ let store = unsafe { & mut * reference. store . 0 . as_ptr ( ) } ;
246
+
235
247
let instance = Instance :: from_wasmtime ( store, reference. id ) ;
236
248
f ( store, instance)
237
249
}
@@ -245,11 +257,15 @@ impl ComponentInstance {
245
257
pub ( crate ) unsafe fn vmctx_instance_id (
246
258
vmctx : NonNull < VMComponentContext > ,
247
259
) -> ComponentInstanceId {
248
- vmctx
249
- . byte_sub ( mem:: size_of :: < ComponentInstance > ( ) )
250
- . cast :: < ComponentInstance > ( )
251
- . as_ref ( )
252
- . id
260
+ // SAFETY: it's a contract of this function that `vmctx` is a valid
261
+ // pointer with a `ComponentInstance` in front which can be read.
262
+ unsafe {
263
+ vmctx
264
+ . byte_sub ( mem:: size_of :: < ComponentInstance > ( ) )
265
+ . cast :: < ComponentInstance > ( )
266
+ . as_ref ( )
267
+ . id
268
+ }
253
269
}
254
270
255
271
/// Returns the layout corresponding to what would be an allocation of a
@@ -567,69 +583,118 @@ impl ComponentInstance {
567
583
568
584
unsafe fn initialize_vmctx ( mut self : Pin < & mut Self > ) {
569
585
let offset = self . offsets . magic ( ) ;
570
- * self . as_mut ( ) . vmctx_plus_offset_mut ( offset) = VMCOMPONENT_MAGIC ;
586
+ // SAFETY: it's safe to write the magic value during initialization and
587
+ // this is also the right type of value to write.
588
+ unsafe {
589
+ * self . as_mut ( ) . vmctx_plus_offset_mut ( offset) = VMCOMPONENT_MAGIC ;
590
+ }
591
+
571
592
// Initialize the built-in functions
593
+ //
594
+ // SAFETY: it's safe to initialize the vmctx in this function and this
595
+ // is also the right type of value to store in the vmctx.
572
596
static BUILTINS : libcalls:: VMComponentBuiltins = libcalls:: VMComponentBuiltins :: INIT ;
573
597
let ptr = BUILTINS . expose_provenance ( ) ;
574
598
let offset = self . offsets . builtins ( ) ;
575
- * self . as_mut ( ) . vmctx_plus_offset_mut ( offset) = VmPtr :: from ( ptr) ;
599
+ unsafe {
600
+ * self . as_mut ( ) . vmctx_plus_offset_mut ( offset) = VmPtr :: from ( ptr) ;
601
+ }
602
+
603
+ // SAFETY: it's safe to initialize the vmctx in this function and this
604
+ // is also the right type of value to store in the vmctx.
576
605
let offset = self . offsets . vm_store_context ( ) ;
577
- * self . as_mut ( ) . vmctx_plus_offset_mut ( offset) =
578
- VmPtr :: from ( self . store . 0 . as_ref ( ) . vm_store_context_ptr ( ) ) ;
606
+ unsafe {
607
+ * self . as_mut ( ) . vmctx_plus_offset_mut ( offset) =
608
+ VmPtr :: from ( self . store . 0 . as_ref ( ) . vm_store_context_ptr ( ) ) ;
609
+ }
579
610
580
611
for i in 0 ..self . offsets . num_runtime_component_instances {
581
612
let i = RuntimeComponentInstanceIndex :: from_u32 ( i) ;
582
613
let mut def = VMGlobalDefinition :: new ( ) ;
583
- * def. as_i32_mut ( ) = FLAG_MAY_ENTER | FLAG_MAY_LEAVE ;
584
- self . instance_flags ( i) . as_raw ( ) . write ( def) ;
614
+ // SAFETY: this is a valid initialization of all globals which are
615
+ // 32-bit values.
616
+ unsafe {
617
+ * def. as_i32_mut ( ) = FLAG_MAY_ENTER | FLAG_MAY_LEAVE ;
618
+ self . instance_flags ( i) . as_raw ( ) . write ( def) ;
619
+ }
585
620
}
586
621
587
622
// In debug mode set non-null bad values to all "pointer looking" bits
588
623
// and pieces related to lowering and such. This'll help detect any
589
624
// erroneous usage and enable debug assertions above as well to prevent
590
625
// loading these before they're configured or setting them twice.
626
+ //
627
+ // SAFETY: it's valid to write a garbage pointer during initialization
628
+ // when this is otherwise uninitialized memory
591
629
if cfg ! ( debug_assertions) {
592
630
for i in 0 ..self . offsets . num_lowerings {
593
631
let i = LoweredIndex :: from_u32 ( i) ;
594
632
let offset = self . offsets . lowering_callee ( i) ;
595
- * self . as_mut ( ) . vmctx_plus_offset_mut ( offset) = INVALID_PTR ;
633
+ // SAFETY: see above
634
+ unsafe {
635
+ * self . as_mut ( ) . vmctx_plus_offset_mut ( offset) = INVALID_PTR ;
636
+ }
596
637
let offset = self . offsets . lowering_data ( i) ;
597
- * self . as_mut ( ) . vmctx_plus_offset_mut ( offset) = INVALID_PTR ;
638
+ // SAFETY: see above
639
+ unsafe {
640
+ * self . as_mut ( ) . vmctx_plus_offset_mut ( offset) = INVALID_PTR ;
641
+ }
598
642
}
599
643
for i in 0 ..self . offsets . num_trampolines {
600
644
let i = TrampolineIndex :: from_u32 ( i) ;
601
645
let offset = self . offsets . trampoline_func_ref ( i) ;
602
- * self . as_mut ( ) . vmctx_plus_offset_mut ( offset) = INVALID_PTR ;
646
+ // SAFETY: see above
647
+ unsafe {
648
+ * self . as_mut ( ) . vmctx_plus_offset_mut ( offset) = INVALID_PTR ;
649
+ }
603
650
}
604
651
for i in 0 ..self . offsets . num_runtime_memories {
605
652
let i = RuntimeMemoryIndex :: from_u32 ( i) ;
606
653
let offset = self . offsets . runtime_memory ( i) ;
607
- * self . as_mut ( ) . vmctx_plus_offset_mut ( offset) = INVALID_PTR ;
654
+ // SAFETY: see above
655
+ unsafe {
656
+ * self . as_mut ( ) . vmctx_plus_offset_mut ( offset) = INVALID_PTR ;
657
+ }
608
658
}
609
659
for i in 0 ..self . offsets . num_runtime_reallocs {
610
660
let i = RuntimeReallocIndex :: from_u32 ( i) ;
611
661
let offset = self . offsets . runtime_realloc ( i) ;
612
- * self . as_mut ( ) . vmctx_plus_offset_mut ( offset) = INVALID_PTR ;
662
+ // SAFETY: see above
663
+ unsafe {
664
+ * self . as_mut ( ) . vmctx_plus_offset_mut ( offset) = INVALID_PTR ;
665
+ }
613
666
}
614
667
for i in 0 ..self . offsets . num_runtime_callbacks {
615
668
let i = RuntimeCallbackIndex :: from_u32 ( i) ;
616
669
let offset = self . offsets . runtime_callback ( i) ;
617
- * self . as_mut ( ) . vmctx_plus_offset_mut ( offset) = INVALID_PTR ;
670
+ // SAFETY: see above
671
+ unsafe {
672
+ * self . as_mut ( ) . vmctx_plus_offset_mut ( offset) = INVALID_PTR ;
673
+ }
618
674
}
619
675
for i in 0 ..self . offsets . num_runtime_post_returns {
620
676
let i = RuntimePostReturnIndex :: from_u32 ( i) ;
621
677
let offset = self . offsets . runtime_post_return ( i) ;
622
- * self . as_mut ( ) . vmctx_plus_offset_mut ( offset) = INVALID_PTR ;
678
+ // SAFETY: see above
679
+ unsafe {
680
+ * self . as_mut ( ) . vmctx_plus_offset_mut ( offset) = INVALID_PTR ;
681
+ }
623
682
}
624
683
for i in 0 ..self . offsets . num_resources {
625
684
let i = ResourceIndex :: from_u32 ( i) ;
626
685
let offset = self . offsets . resource_destructor ( i) ;
627
- * self . as_mut ( ) . vmctx_plus_offset_mut ( offset) = INVALID_PTR ;
686
+ // SAFETY: see above
687
+ unsafe {
688
+ * self . as_mut ( ) . vmctx_plus_offset_mut ( offset) = INVALID_PTR ;
689
+ }
628
690
}
629
691
for i in 0 ..self . offsets . num_runtime_tables {
630
692
let i = RuntimeTableIndex :: from_u32 ( i) ;
631
693
let offset = self . offsets . runtime_table ( i) ;
632
- * self . as_mut ( ) . vmctx_plus_offset_mut ( offset) = INVALID_PTR ;
694
+ // SAFETY: see above
695
+ unsafe {
696
+ * self . as_mut ( ) . vmctx_plus_offset_mut ( offset) = INVALID_PTR ;
697
+ }
633
698
}
634
699
}
635
700
}
@@ -869,10 +934,20 @@ impl VMComponentContext {
869
934
870
935
/// Helper function to cast between context types using a debug assertion to
871
936
/// protect against some mistakes.
937
+ ///
938
+ /// # Safety
939
+ ///
940
+ /// The `opaque` value must be a valid pointer where it's safe to read its
941
+ /// "magic" value.
872
942
#[ inline]
873
943
pub unsafe fn from_opaque ( opaque : NonNull < VMOpaqueContext > ) -> NonNull < VMComponentContext > {
874
944
// See comments in `VMContext::from_opaque` for this debug assert
875
- debug_assert_eq ! ( opaque. as_ref( ) . magic, VMCOMPONENT_MAGIC ) ;
945
+ //
946
+ // SAFETY: it's a contract of this function that it's safe to read
947
+ // `opaque`.
948
+ unsafe {
949
+ debug_assert_eq ! ( opaque. as_ref( ) . magic, VMCOMPONENT_MAGIC ) ;
950
+ }
876
951
opaque. cast ( )
877
952
}
878
953
}
@@ -902,43 +977,49 @@ impl InstanceFlags {
902
977
903
978
#[ inline]
904
979
pub unsafe fn may_leave ( & self ) -> bool {
905
- * self . as_raw ( ) . as_ref ( ) . as_i32 ( ) & FLAG_MAY_LEAVE != 0
980
+ unsafe { * self . as_raw ( ) . as_ref ( ) . as_i32 ( ) & FLAG_MAY_LEAVE != 0 }
906
981
}
907
982
908
983
#[ inline]
909
984
pub unsafe fn set_may_leave ( & mut self , val : bool ) {
910
- if val {
911
- * self . as_raw ( ) . as_mut ( ) . as_i32_mut ( ) |= FLAG_MAY_LEAVE ;
912
- } else {
913
- * self . as_raw ( ) . as_mut ( ) . as_i32_mut ( ) &= !FLAG_MAY_LEAVE ;
985
+ unsafe {
986
+ if val {
987
+ * self . as_raw ( ) . as_mut ( ) . as_i32_mut ( ) |= FLAG_MAY_LEAVE ;
988
+ } else {
989
+ * self . as_raw ( ) . as_mut ( ) . as_i32_mut ( ) &= !FLAG_MAY_LEAVE ;
990
+ }
914
991
}
915
992
}
916
993
917
994
#[ inline]
918
995
pub unsafe fn may_enter ( & self ) -> bool {
919
- * self . as_raw ( ) . as_ref ( ) . as_i32 ( ) & FLAG_MAY_ENTER != 0
996
+ unsafe { * self . as_raw ( ) . as_ref ( ) . as_i32 ( ) & FLAG_MAY_ENTER != 0 }
920
997
}
921
998
922
999
#[ inline]
923
1000
pub unsafe fn set_may_enter ( & mut self , val : bool ) {
924
- if val {
925
- * self . as_raw ( ) . as_mut ( ) . as_i32_mut ( ) |= FLAG_MAY_ENTER ;
926
- } else {
927
- * self . as_raw ( ) . as_mut ( ) . as_i32_mut ( ) &= !FLAG_MAY_ENTER ;
1001
+ unsafe {
1002
+ if val {
1003
+ * self . as_raw ( ) . as_mut ( ) . as_i32_mut ( ) |= FLAG_MAY_ENTER ;
1004
+ } else {
1005
+ * self . as_raw ( ) . as_mut ( ) . as_i32_mut ( ) &= !FLAG_MAY_ENTER ;
1006
+ }
928
1007
}
929
1008
}
930
1009
931
1010
#[ inline]
932
1011
pub unsafe fn needs_post_return ( & self ) -> bool {
933
- * self . as_raw ( ) . as_ref ( ) . as_i32 ( ) & FLAG_NEEDS_POST_RETURN != 0
1012
+ unsafe { * self . as_raw ( ) . as_ref ( ) . as_i32 ( ) & FLAG_NEEDS_POST_RETURN != 0 }
934
1013
}
935
1014
936
1015
#[ inline]
937
1016
pub unsafe fn set_needs_post_return ( & mut self , val : bool ) {
938
- if val {
939
- * self . as_raw ( ) . as_mut ( ) . as_i32_mut ( ) |= FLAG_NEEDS_POST_RETURN ;
940
- } else {
941
- * self . as_raw ( ) . as_mut ( ) . as_i32_mut ( ) &= !FLAG_NEEDS_POST_RETURN ;
1017
+ unsafe {
1018
+ if val {
1019
+ * self . as_raw ( ) . as_mut ( ) . as_i32_mut ( ) |= FLAG_NEEDS_POST_RETURN ;
1020
+ } else {
1021
+ * self . as_raw ( ) . as_mut ( ) . as_i32_mut ( ) &= !FLAG_NEEDS_POST_RETURN ;
1022
+ }
942
1023
}
943
1024
}
944
1025
0 commit comments