Skip to content

Commit 8dc83a8

Browse files
alexcrichtonbongjunj
authored andcommitted
Deny unsafe_op_in_unsafe_fn in wasmtime::vm::instance (bytecodealliance#11267)
Turn the lint on and add some safety comments where appropriate. cc bytecodealliance#11180
1 parent 71b71d8 commit 8dc83a8

File tree

6 files changed

+304
-154
lines changed

6 files changed

+304
-154
lines changed

crates/wasmtime/src/runtime/vm/instance.rs

Lines changed: 196 additions & 112 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,11 @@
22
//! wasm module (except its callstack and register state). An
33
//! `InstanceHandle` is a reference-counting handle for an `Instance`.
44
5+
#![warn(
6+
unsafe_op_in_unsafe_fn,
7+
reason = "opt-in until the crate opts-in as a whole -- #11180"
8+
)]
9+
510
use crate::prelude::*;
611
use crate::runtime::vm::const_expr::{ConstEvalContext, ConstExprEvaluator};
712
use crate::runtime::vm::export::Export;
@@ -125,11 +130,18 @@ impl InstanceAndStore {
125130
f: impl for<'a> FnOnce(&'a mut Self) -> R,
126131
) -> R {
127132
const _: () = assert!(mem::size_of::<InstanceAndStore>() == mem::size_of::<Instance>());
128-
let mut ptr = vmctx
129-
.byte_sub(mem::size_of::<Instance>())
130-
.cast::<InstanceAndStore>();
133+
// SAFETY: The validity of this `byte_sub` relies on `vmctx` being a
134+
// valid allocation which is itself a contract of this function.
135+
let mut ptr = unsafe {
136+
vmctx
137+
.byte_sub(mem::size_of::<Instance>())
138+
.cast::<InstanceAndStore>()
139+
};
131140

132-
f(ptr.as_mut())
141+
// SAFETY: the ability to interpret `vmctx` as a safe pointer and
142+
// continue on is a contract of this function itself, so the safety here
143+
// is effectively up to callers.
144+
unsafe { f(ptr.as_mut()) }
133145
}
134146

135147
/// Unpacks this `InstanceAndStore` into its underlying `Instance` and `dyn
@@ -327,10 +339,18 @@ impl Instance {
327339
/// from the store that `self` belongs to.
328340
#[inline]
329341
unsafe fn sibling_vmctx<'a>(&'a self, vmctx: NonNull<VMContext>) -> &'a Instance {
330-
let ptr = vmctx
331-
.byte_sub(mem::size_of::<Instance>())
332-
.cast::<Instance>();
333-
ptr.as_ref()
342+
// SAFETY: it's a contract of this function itself that `vmctx` is a
343+
// valid pointer such that this pointer arithmetic is valid.
344+
let ptr = unsafe {
345+
vmctx
346+
.byte_sub(mem::size_of::<Instance>())
347+
.cast::<Instance>()
348+
};
349+
// SAFETY: it's a contract of this function itself that `vmctx` is a
350+
// valid pointer to dereference. Additionally the lifetime of the return
351+
// value is constrained to be the same as `self` to avoid granting a
352+
// too-long lifetime.
353+
unsafe { ptr.as_ref() }
334354
}
335355

336356
/// Same as [`Self::sibling_vmctx`], but the mutable version.
@@ -344,10 +364,20 @@ impl Instance {
344364
self: Pin<&'a mut Self>,
345365
vmctx: NonNull<VMContext>,
346366
) -> Pin<&'a mut Instance> {
347-
let mut ptr = vmctx
348-
.byte_sub(mem::size_of::<Instance>())
349-
.cast::<Instance>();
350-
Pin::new_unchecked(ptr.as_mut())
367+
// SAFETY: it's a contract of this function itself that `vmctx` is a
368+
// valid pointer such that this pointer arithmetic is valid.
369+
let mut ptr = unsafe {
370+
vmctx
371+
.byte_sub(mem::size_of::<Instance>())
372+
.cast::<Instance>()
373+
};
374+
375+
// SAFETY: it's a contract of this function itself that `vmctx` is a
376+
// valid pointer to dereference. Additionally the lifetime of the return
377+
// value is constrained to be the same as `self` to avoid granting a
378+
// too-long lifetime. Finally mutable references to an instance are
379+
// always through `Pin`, so it's safe to create a pin-pointer here.
380+
unsafe { Pin::new_unchecked(ptr.as_mut()) }
351381
}
352382

353383
pub(crate) fn env_module(&self) -> &Arc<wasmtime_environ::Module> {
@@ -527,38 +557,42 @@ impl Instance {
527557
}
528558

529559
pub(crate) unsafe fn set_store(mut self: Pin<&mut Self>, store: Option<NonNull<dyn VMStore>>) {
530-
*self.as_mut().store_mut() = store.map(VMStoreRawPtr);
531-
if let Some(mut store) = store {
532-
let store = store.as_mut();
533-
self.vm_store_context()
534-
.write(Some(store.vm_store_context_ptr().into()));
535-
#[cfg(target_has_atomic = "64")]
536-
{
537-
*self.as_mut().epoch_ptr() =
538-
Some(NonNull::from(store.engine().epoch_counter()).into());
539-
}
560+
// FIXME: should be more targeted ideally with the `unsafe` than just
561+
// throwing this entire function in a large `unsafe` block.
562+
unsafe {
563+
*self.as_mut().store_mut() = store.map(VMStoreRawPtr);
564+
if let Some(mut store) = store {
565+
let store = store.as_mut();
566+
self.vm_store_context()
567+
.write(Some(store.vm_store_context_ptr().into()));
568+
#[cfg(target_has_atomic = "64")]
569+
{
570+
*self.as_mut().epoch_ptr() =
571+
Some(NonNull::from(store.engine().epoch_counter()).into());
572+
}
540573

541-
if self.env_module().needs_gc_heap {
542-
self.as_mut().set_gc_heap(Some(store.gc_store().expect(
543-
"if we need a GC heap, then `Instance::new_raw` should have already \
574+
if self.env_module().needs_gc_heap {
575+
self.as_mut().set_gc_heap(Some(store.gc_store().expect(
576+
"if we need a GC heap, then `Instance::new_raw` should have already \
544577
allocated it for us",
545-
)));
578+
)));
579+
} else {
580+
self.as_mut().set_gc_heap(None);
581+
}
546582
} else {
583+
self.vm_store_context().write(None);
584+
#[cfg(target_has_atomic = "64")]
585+
{
586+
*self.as_mut().epoch_ptr() = None;
587+
}
547588
self.as_mut().set_gc_heap(None);
548589
}
549-
} else {
550-
self.vm_store_context().write(None);
551-
#[cfg(target_has_atomic = "64")]
552-
{
553-
*self.as_mut().epoch_ptr() = None;
554-
}
555-
self.as_mut().set_gc_heap(None);
556590
}
557591
}
558592

559593
unsafe fn set_gc_heap(self: Pin<&mut Self>, gc_store: Option<&GcStore>) {
560594
if let Some(gc_store) = gc_store {
561-
*self.gc_heap_data() = Some(gc_store.gc_heap.vmctx_gc_heap_data().into());
595+
*self.gc_heap_data() = Some(unsafe { gc_store.gc_heap.vmctx_gc_heap_data().into() });
562596
} else {
563597
*self.gc_heap_data() = None;
564598
}
@@ -1347,115 +1381,165 @@ impl Instance {
13471381
) {
13481382
assert!(ptr::eq(module, self.env_module().as_ref()));
13491383

1350-
self.vmctx_plus_offset_raw::<u32>(offsets.ptr.vmctx_magic())
1351-
.write(VMCONTEXT_MAGIC);
1352-
self.as_mut().set_store(store.as_raw());
1384+
// SAFETY: the type of the magic field is indeed `u32` and this function
1385+
// is initializing its value.
1386+
unsafe {
1387+
self.vmctx_plus_offset_raw::<u32>(offsets.ptr.vmctx_magic())
1388+
.write(VMCONTEXT_MAGIC);
1389+
}
1390+
1391+
// SAFETY: it's up to the caller to provide a valid store pointer here.
1392+
unsafe {
1393+
self.as_mut().set_store(store.as_raw());
1394+
}
13531395

13541396
// Initialize shared types
1355-
let types = NonNull::from(self.runtime_info.type_ids());
1356-
self.type_ids_array().write(types.cast().into());
1397+
//
1398+
// SAFETY: validity of the vmctx means it should be safe to write to it
1399+
// here.
1400+
unsafe {
1401+
let types = NonNull::from(self.runtime_info.type_ids());
1402+
self.type_ids_array().write(types.cast().into());
1403+
}
13571404

13581405
// Initialize the built-in functions
1359-
static BUILTINS: VMBuiltinFunctionsArray = VMBuiltinFunctionsArray::INIT;
1360-
let ptr = BUILTINS.expose_provenance();
1361-
self.vmctx_plus_offset_raw(offsets.ptr.vmctx_builtin_functions())
1362-
.write(VmPtr::from(ptr));
1406+
//
1407+
// SAFETY: the type of the builtin functions field is indeed a pointer
1408+
// and the pointer being filled in here, plus the vmctx is valid to
1409+
// write to during initialization.
1410+
unsafe {
1411+
static BUILTINS: VMBuiltinFunctionsArray = VMBuiltinFunctionsArray::INIT;
1412+
let ptr = BUILTINS.expose_provenance();
1413+
self.vmctx_plus_offset_raw(offsets.ptr.vmctx_builtin_functions())
1414+
.write(VmPtr::from(ptr));
1415+
}
13631416

13641417
// Initialize the imports
1418+
//
1419+
// SAFETY: the vmctx is safe to initialize during this function and
1420+
// validity of each item itself is a contract the caller must uphold.
13651421
debug_assert_eq!(imports.functions.len(), module.num_imported_funcs);
1366-
ptr::copy_nonoverlapping(
1367-
imports.functions.as_ptr(),
1368-
self.vmctx_plus_offset_raw(offsets.vmctx_imported_functions_begin())
1369-
.as_ptr(),
1370-
imports.functions.len(),
1371-
);
1372-
debug_assert_eq!(imports.tables.len(), module.num_imported_tables);
1373-
ptr::copy_nonoverlapping(
1374-
imports.tables.as_ptr(),
1375-
self.vmctx_plus_offset_raw(offsets.vmctx_imported_tables_begin())
1376-
.as_ptr(),
1377-
imports.tables.len(),
1378-
);
1379-
debug_assert_eq!(imports.memories.len(), module.num_imported_memories);
1380-
ptr::copy_nonoverlapping(
1381-
imports.memories.as_ptr(),
1382-
self.vmctx_plus_offset_raw(offsets.vmctx_imported_memories_begin())
1383-
.as_ptr(),
1384-
imports.memories.len(),
1385-
);
1386-
debug_assert_eq!(imports.globals.len(), module.num_imported_globals);
1387-
ptr::copy_nonoverlapping(
1388-
imports.globals.as_ptr(),
1389-
self.vmctx_plus_offset_raw(offsets.vmctx_imported_globals_begin())
1390-
.as_ptr(),
1391-
imports.globals.len(),
1392-
);
1393-
1394-
debug_assert_eq!(imports.tags.len(), module.num_imported_tags);
1395-
ptr::copy_nonoverlapping(
1396-
imports.tags.as_ptr(),
1397-
self.vmctx_plus_offset_raw(offsets.vmctx_imported_tags_begin())
1398-
.as_ptr(),
1399-
imports.tags.len(),
1400-
);
1422+
unsafe {
1423+
ptr::copy_nonoverlapping(
1424+
imports.functions.as_ptr(),
1425+
self.vmctx_plus_offset_raw(offsets.vmctx_imported_functions_begin())
1426+
.as_ptr(),
1427+
imports.functions.len(),
1428+
);
1429+
debug_assert_eq!(imports.tables.len(), module.num_imported_tables);
1430+
ptr::copy_nonoverlapping(
1431+
imports.tables.as_ptr(),
1432+
self.vmctx_plus_offset_raw(offsets.vmctx_imported_tables_begin())
1433+
.as_ptr(),
1434+
imports.tables.len(),
1435+
);
1436+
debug_assert_eq!(imports.memories.len(), module.num_imported_memories);
1437+
ptr::copy_nonoverlapping(
1438+
imports.memories.as_ptr(),
1439+
self.vmctx_plus_offset_raw(offsets.vmctx_imported_memories_begin())
1440+
.as_ptr(),
1441+
imports.memories.len(),
1442+
);
1443+
debug_assert_eq!(imports.globals.len(), module.num_imported_globals);
1444+
ptr::copy_nonoverlapping(
1445+
imports.globals.as_ptr(),
1446+
self.vmctx_plus_offset_raw(offsets.vmctx_imported_globals_begin())
1447+
.as_ptr(),
1448+
imports.globals.len(),
1449+
);
1450+
debug_assert_eq!(imports.tags.len(), module.num_imported_tags);
1451+
ptr::copy_nonoverlapping(
1452+
imports.tags.as_ptr(),
1453+
self.vmctx_plus_offset_raw(offsets.vmctx_imported_tags_begin())
1454+
.as_ptr(),
1455+
imports.tags.len(),
1456+
);
1457+
}
14011458

14021459
// N.B.: there is no need to initialize the funcrefs array because we
14031460
// eagerly construct each element in it whenever asked for a reference
14041461
// to that element. In other words, there is no state needed to track
14051462
// the lazy-init, so we don't need to initialize any state now.
14061463

14071464
// Initialize the defined tables
1408-
let mut ptr = self.vmctx_plus_offset_raw(offsets.vmctx_tables_begin());
1409-
let tables = self.as_mut().tables_mut();
1410-
for i in 0..module.num_defined_tables() {
1411-
ptr.write(tables[DefinedTableIndex::new(i)].1.vmtable());
1412-
ptr = ptr.add(1);
1465+
//
1466+
// SAFETY: it's safe to initialize these tables during initialization
1467+
// here and the various types of pointers and such here should all be
1468+
// valid.
1469+
unsafe {
1470+
let mut ptr = self.vmctx_plus_offset_raw(offsets.vmctx_tables_begin());
1471+
let tables = self.as_mut().tables_mut();
1472+
for i in 0..module.num_defined_tables() {
1473+
ptr.write(tables[DefinedTableIndex::new(i)].1.vmtable());
1474+
ptr = ptr.add(1);
1475+
}
14131476
}
14141477

14151478
// Initialize the defined memories. This fills in both the
14161479
// `defined_memories` table and the `owned_memories` table at the same
14171480
// time. Entries in `defined_memories` hold a pointer to a definition
14181481
// (all memories) whereas the `owned_memories` hold the actual
14191482
// definitions of memories owned (not shared) in the module.
1420-
let mut ptr = self.vmctx_plus_offset_raw(offsets.vmctx_memories_begin());
1421-
let mut owned_ptr = self.vmctx_plus_offset_raw(offsets.vmctx_owned_memories_begin());
1422-
let memories = self.as_mut().memories_mut();
1423-
for i in 0..module.num_defined_memories() {
1424-
let defined_memory_index = DefinedMemoryIndex::new(i);
1425-
let memory_index = module.memory_index(defined_memory_index);
1426-
if module.memories[memory_index].shared {
1427-
let def_ptr = memories[defined_memory_index]
1428-
.1
1429-
.as_shared_memory()
1430-
.unwrap()
1431-
.vmmemory_ptr();
1432-
ptr.write(VmPtr::from(def_ptr));
1433-
} else {
1434-
owned_ptr.write(memories[defined_memory_index].1.vmmemory());
1435-
ptr.write(VmPtr::from(owned_ptr));
1436-
owned_ptr = owned_ptr.add(1);
1483+
//
1484+
// SAFETY: it's safe to initialize these memories during initialization
1485+
// here and the various types of pointers and such here should all be
1486+
// valid.
1487+
unsafe {
1488+
let mut ptr = self.vmctx_plus_offset_raw(offsets.vmctx_memories_begin());
1489+
let mut owned_ptr = self.vmctx_plus_offset_raw(offsets.vmctx_owned_memories_begin());
1490+
let memories = self.as_mut().memories_mut();
1491+
for i in 0..module.num_defined_memories() {
1492+
let defined_memory_index = DefinedMemoryIndex::new(i);
1493+
let memory_index = module.memory_index(defined_memory_index);
1494+
if module.memories[memory_index].shared {
1495+
let def_ptr = memories[defined_memory_index]
1496+
.1
1497+
.as_shared_memory()
1498+
.unwrap()
1499+
.vmmemory_ptr();
1500+
ptr.write(VmPtr::from(def_ptr));
1501+
} else {
1502+
owned_ptr.write(memories[defined_memory_index].1.vmmemory());
1503+
ptr.write(VmPtr::from(owned_ptr));
1504+
owned_ptr = owned_ptr.add(1);
1505+
}
1506+
ptr = ptr.add(1);
14371507
}
1438-
ptr = ptr.add(1);
14391508
}
14401509

14411510
// Zero-initialize the globals so that nothing is uninitialized memory
14421511
// after this function returns. The globals are actually initialized
14431512
// with their const expression initializers after the instance is fully
14441513
// allocated.
1445-
for (index, _init) in module.global_initializers.iter() {
1446-
self.global_ptr(index).write(VMGlobalDefinition::new());
1514+
//
1515+
// SAFETY: it's safe to initialize globals during initialization
1516+
// here. Note that while the value being written is not valid for all
1517+
// types of globals it's initializing the memory to zero instead of
1518+
// being in an undefined state. So it's still unsafe to access globals
1519+
// after this, but if it's read then it'd hopefully crash faster than
1520+
// leaving this undefined.
1521+
unsafe {
1522+
for (index, _init) in module.global_initializers.iter() {
1523+
self.global_ptr(index).write(VMGlobalDefinition::new());
1524+
}
14471525
}
14481526

14491527
// Initialize the defined tags
1450-
let mut ptr = self.vmctx_plus_offset_raw(offsets.vmctx_tags_begin());
1451-
for i in 0..module.num_defined_tags() {
1452-
let defined_index = DefinedTagIndex::new(i);
1453-
let tag_index = module.tag_index(defined_index);
1454-
let tag = module.tags[tag_index];
1455-
ptr.write(VMTagDefinition::new(
1456-
tag.signature.unwrap_engine_type_index(),
1457-
));
1458-
ptr = ptr.add(1);
1528+
//
1529+
// SAFETY: it's safe to initialize these tags during initialization
1530+
// here and the various types of pointers and such here should all be
1531+
// valid.
1532+
unsafe {
1533+
let mut ptr = self.vmctx_plus_offset_raw(offsets.vmctx_tags_begin());
1534+
for i in 0..module.num_defined_tags() {
1535+
let defined_index = DefinedTagIndex::new(i);
1536+
let tag_index = module.tag_index(defined_index);
1537+
let tag = module.tags[tag_index];
1538+
ptr.write(VMTagDefinition::new(
1539+
tag.signature.unwrap_engine_type_index(),
1540+
));
1541+
ptr = ptr.add(1);
1542+
}
14591543
}
14601544
}
14611545

0 commit comments

Comments
 (0)