Skip to content
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
Use checked arithmetic in LocalsPlus and DataStack allocators
  • Loading branch information
youknowone committed Mar 4, 2026
commit e8d94bb74e58c2bab2d47f023374202cbb019b7c
5 changes: 4 additions & 1 deletion crates/common/src/lock.rs
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,10 @@ cfg_if::cfg_if! {
pub use std::sync::LazyLock;
} else {
Comment thread
coderabbitai[bot] marked this conversation as resolved.
pub struct LazyLock<T, F = fn() -> T>(core::cell::LazyCell<T, F>);
// SAFETY: Without std, there can be no threads.
// SAFETY: This branch is only active when both "std" and "threading"
// features are absent — i.e., truly single-threaded no_std environments
// (e.g., embedded or bare-metal WASM). Without std, the Rust runtime
// cannot spawn threads, so Sync is trivially satisfied.
unsafe impl<T, F> Sync for LazyLock<T, F> {}
Comment thread
coderabbitai[bot] marked this conversation as resolved.

impl<T, F: FnOnce() -> T> LazyLock<T, F> {
Expand Down
11 changes: 8 additions & 3 deletions crates/vm/src/datastack.rs
Original file line number Diff line number Diff line change
Expand Up @@ -102,10 +102,15 @@ impl DataStack {
#[inline(never)]
fn push_slow(&mut self, aligned_size: usize) -> *mut u8 {
let mut chunk_size = MIN_CHUNK_SIZE;
let needed =
aligned_size + MINIMUM_OVERHEAD + core::mem::size_of::<DataStackChunk>() + ALIGN;
let needed = aligned_size
.checked_add(MINIMUM_OVERHEAD)
.and_then(|v| v.checked_add(core::mem::size_of::<DataStackChunk>()))
.and_then(|v| v.checked_add(ALIGN))
.expect("DataStack chunk size overflow");
while chunk_size < needed {
chunk_size *= 2;
chunk_size = chunk_size
.checked_mul(2)
.expect("DataStack chunk size overflow");
}
// Save current position in old chunk.
unsafe {
Expand Down
91 changes: 59 additions & 32 deletions crates/vm/src/frame.rs
Original file line number Diff line number Diff line change
Expand Up @@ -163,10 +163,13 @@ const _: () = {
impl LocalsPlus {
/// Create a new heap-backed LocalsPlus. All slots start as None (0).
fn new(nlocalsplus: usize, stacksize: usize) -> Self {
let capacity = nlocalsplus + stacksize;
let capacity = nlocalsplus
.checked_add(stacksize)
.expect("LocalsPlus capacity overflow");
let nlocalsplus_u32 = u32::try_from(nlocalsplus).expect("nlocalsplus exceeds u32");
Self {
data: LocalsPlusData::Heap(vec![0usize; capacity].into_boxed_slice()),
nlocalsplus: nlocalsplus as u32,
nlocalsplus: nlocalsplus_u32,
stack_top: 0,
}
}
Expand All @@ -177,14 +180,19 @@ impl LocalsPlus {
/// The caller must call `materialize_localsplus()` when the frame finishes
/// to migrate data to the heap, then `datastack_pop()` to free the memory.
fn new_on_datastack(nlocalsplus: usize, stacksize: usize, vm: &VirtualMachine) -> Self {
let capacity = nlocalsplus + stacksize;
let byte_size = capacity * core::mem::size_of::<usize>();
let capacity = nlocalsplus
.checked_add(stacksize)
.expect("LocalsPlus capacity overflow");
let byte_size = capacity
.checked_mul(core::mem::size_of::<usize>())
.expect("LocalsPlus byte size overflow");
let nlocalsplus_u32 = u32::try_from(nlocalsplus).expect("nlocalsplus exceeds u32");
let ptr = vm.datastack_push(byte_size) as *mut usize;
// Zero-initialize all slots (0 = None for both PyObjectRef and PyStackRef).
unsafe { core::ptr::write_bytes(ptr, 0, capacity) };
Self {
data: LocalsPlusData::DataStack { ptr, capacity },
nlocalsplus: nlocalsplus as u32,
nlocalsplus: nlocalsplus_u32,
stack_top: 0,
Comment thread
coderabbitai[bot] marked this conversation as resolved.
}
}
Expand Down Expand Up @@ -2362,18 +2370,16 @@ impl ExecutingFrame<'_> {
// Same as LoadFast but explicitly checks for unbound locals
// (LoadFast in RustPython already does this check)
let idx = idx.get(arg) as usize;
let x = self.localsplus.fastlocals()[idx]
.clone()
.ok_or_else(|| {
vm.new_exception_msg(
vm.ctx.exceptions.unbound_local_error.to_owned(),
format!(
"local variable '{}' referenced before assignment",
self.code.varnames[idx]
)
.into(),
let x = self.localsplus.fastlocals()[idx].clone().ok_or_else(|| {
vm.new_exception_msg(
vm.ctx.exceptions.unbound_local_error.to_owned(),
format!(
"local variable '{}' referenced before assignment",
self.code.varnames[idx]
)
})?;
.into(),
)
})?;
self.push_value(x);
Ok(None)
}
Expand Down Expand Up @@ -2413,18 +2419,16 @@ impl ExecutingFrame<'_> {
// lifetime issues at yield/exception points are resolved.
Instruction::LoadFastBorrow { var_num: idx } => {
let idx = idx.get(arg) as usize;
let x = self.localsplus.fastlocals()[idx]
.clone()
.ok_or_else(|| {
vm.new_exception_msg(
vm.ctx.exceptions.unbound_local_error.to_owned(),
format!(
"local variable '{}' referenced before assignment",
self.code.varnames[idx]
)
.into(),
let x = self.localsplus.fastlocals()[idx].clone().ok_or_else(|| {
vm.new_exception_msg(
vm.ctx.exceptions.unbound_local_error.to_owned(),
format!(
"local variable '{}' referenced before assignment",
self.code.varnames[idx]
)
})?;
.into(),
)
})?;
self.push_value(x);
Ok(None)
}
Expand Down Expand Up @@ -4312,7 +4316,10 @@ impl ExecutingFrame<'_> {
let nargs: u32 = arg.into();
let callable = self.nth_value(nargs + 1);
let stack_len = self.localsplus.stack_len();
let self_or_null_is_some = self.localsplus.stack_index(stack_len - nargs as usize - 1).is_some();
let self_or_null_is_some = self
.localsplus
.stack_index(stack_len - nargs as usize - 1)
.is_some();
if !self_or_null_is_some
&& cached_version != 0
&& let Some(cls) = callable.downcast_ref::<PyType>()
Expand Down Expand Up @@ -5994,11 +6001,21 @@ impl ExecutingFrame<'_> {
args_vec.push(self_val);
}
for stack_idx in args_start..stack_len {
let val = self.localsplus.stack_index_mut(stack_idx).take().unwrap().to_pyobj();
let val = self
.localsplus
.stack_index_mut(stack_idx)
.take()
.unwrap()
.to_pyobj();
args_vec.push(val);
}

let callable_obj = self.localsplus.stack_index_mut(callable_idx).take().unwrap().to_pyobj();
let callable_obj = self
.localsplus
.stack_index_mut(callable_idx)
.take()
.unwrap()
.to_pyobj();
self.localsplus.stack_truncate(callable_idx);

// invoke_vectorcall falls back to FuncArgs if no vectorcall slot
Expand Down Expand Up @@ -6049,11 +6066,21 @@ impl ExecutingFrame<'_> {
args_vec.push(self_val);
}
for stack_idx in args_start..stack_len {
let val = self.localsplus.stack_index_mut(stack_idx).take().unwrap().to_pyobj();
let val = self
.localsplus
.stack_index_mut(stack_idx)
.take()
.unwrap()
.to_pyobj();
args_vec.push(val);
}

let callable_obj = self.localsplus.stack_index_mut(callable_idx).take().unwrap().to_pyobj();
let callable_obj = self
.localsplus
.stack_index_mut(callable_idx)
.take()
.unwrap()
.to_pyobj();
self.localsplus.stack_truncate(callable_idx);

// invoke_vectorcall falls back to FuncArgs if no vectorcall slot
Expand Down