Skip to content
Merged
Changes from 1 commit
Commits
Show all changes
20 commits
Select commit Hold shift + click to select a range
687e99f
Add debug_assert to invoke_exact_args, lazy func_version reassignment
youknowone Mar 2, 2026
81d307b
working
youknowone Mar 1, 2026
0176223
Add COMPARE_OP, TO_BOOL, FOR_ITER, LOAD_GLOBAL specialization
youknowone Mar 1, 2026
9bb0c46
Add BINARY_SUBSCR, CONTAINS_OP, UNPACK_SEQUENCE, STORE_ATTR specializ…
youknowone Mar 1, 2026
1c07777
Add STORE_SUBSCR, BinaryOpAddUnicode, ToBoolAlwaysTrue, CallLen, Call…
youknowone Mar 1, 2026
240f3ac
Add BinaryOpSubscrStrInt, CallStr1, CallTuple1 specialization
youknowone Mar 1, 2026
cadb9be
Add BinaryOpInplaceAddUnicode specialization
youknowone Mar 1, 2026
fd098fe
Add LoadAttrModule, CallBuiltinO, CallPyGeneral, CallBoundMethodGener…
youknowone Mar 2, 2026
dd29113
Add LoadAttrNondescriptor*, CallMethodDescriptor* specialization
youknowone Mar 2, 2026
b238a27
Add CallBuiltinFast, CallNonPyGeneral specialization
youknowone Mar 2, 2026
d950035
Add SendGen specialization for generator/coroutine send
youknowone Mar 2, 2026
32376d5
Add LoadAttrSlot, StoreAttrSlot specialization for __slots__ access
youknowone Mar 2, 2026
a7c179c
Add LoadSuperAttrAttr, LoadSuperAttrMethod, CallBuiltinClass, CallBui…
youknowone Mar 2, 2026
e1289f1
Add LoadAttrProperty specialization for property descriptor access
youknowone Mar 2, 2026
2350bc1
Add LoadAttrClass specialization for class attribute access
youknowone Mar 2, 2026
ba9d528
Add BinaryOpSubscrListSlice specialization
youknowone Mar 2, 2026
3c88368
Add CallKwPy, CallKwBoundMethod, CallKwNonPy specialization
youknowone Mar 2, 2026
ab6bbb6
Clean up comments in specialization code
youknowone Mar 2, 2026
48fd5c7
fix check_signals
youknowone Mar 2, 2026
51accdb
fix import
youknowone Mar 3, 2026
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
Add SendGen specialization for generator/coroutine send
- SendGen: direct coro.send() for generator/coroutine receivers
- Add adaptive counter to Send instruction
- specialize_send checks builtin_coro for PyGenerator/PyCoroutine
  • Loading branch information
youknowone committed Mar 3, 2026
commit d9500354828db05f1faf167aa35fb1e3eb3e0be6
91 changes: 85 additions & 6 deletions crates/vm/src/frame.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2799,22 +2799,83 @@ impl ExecutingFrame<'_> {
}
Instruction::Send { .. } => {
// (receiver, v -- receiver, retval)
// Pops v, sends it to receiver. On yield, pushes retval
// (so stack = [..., receiver, retval]). On return/StopIteration,
// also pushes retval and jumps to END_SEND which will pop receiver.
// Relative forward: target = lasti + caches(1) + delta
let instr_idx = self.lasti() as usize - 1;
let cache_base = instr_idx + 1;
let counter = self.code.instructions.read_cache_u16(cache_base);
if counter > 0 {
unsafe {
self.code
.instructions
.write_cache_u16(cache_base, counter - 1);
}
} else {
self.specialize_send(instr_idx, cache_base);
}
let exit_label = bytecode::Label(self.lasti() + 1 + u32::from(arg));
let val = self.pop_value();
let receiver = self.top_value();

match self._send(receiver, val, vm)? {
PyIterReturn::Return(value) => {
self.push_value(value);
Ok(None)
}
PyIterReturn::StopIteration(value) => {
if vm.use_tracing.get() && !vm.is_none(&self.object.trace.lock()) {
let stop_exc = vm.new_stop_iteration(value.clone());
self.fire_exception_trace(&stop_exc, vm)?;
}
let value = vm.unwrap_or_none(value);
self.push_value(value);
self.jump(exit_label);
Ok(None)
}
}
}
Instruction::SendGen => {
let exit_label = bytecode::Label(self.lasti() + 1 + u32::from(arg));
// Stack: [receiver, val] — peek receiver before popping
let receiver = self.nth_value(1);
let is_coro = self.builtin_coro(receiver).is_some();
let val = self.pop_value();
let receiver = self.top_value();

if is_coro {
let coro = self.builtin_coro(receiver).unwrap();
match coro.send(receiver, val, vm)? {
PyIterReturn::Return(value) => {
self.push_value(value);
return Ok(None);
}
PyIterReturn::StopIteration(value) => {
if vm.use_tracing.get() && !vm.is_none(&self.object.trace.lock()) {
let stop_exc = vm.new_stop_iteration(value.clone());
self.fire_exception_trace(&stop_exc, vm)?;
}
let value = vm.unwrap_or_none(value);
self.push_value(value);
self.jump(exit_label);
return Ok(None);
}
}
}
// Deoptimize
let instr_idx = self.lasti() as usize - 1;
let cache_base = instr_idx + 1;
unsafe {
self.code
.instructions
.replace_op(instr_idx, Instruction::Send { target: Arg::marker() });
self.code
.instructions
.write_adaptive_counter(cache_base, ADAPTIVE_BACKOFF_VALUE);
}
match self._send(receiver, val, vm)? {
PyIterReturn::Return(value) => {
self.push_value(value);
Ok(None)
}
PyIterReturn::StopIteration(value) => {
// Fire 'exception' trace event for StopIteration,
// matching SEND's exception handling.
if vm.use_tracing.get() && !vm.is_none(&self.object.trace.lock()) {
let stop_exc = vm.new_stop_iteration(value.clone());
self.fire_exception_trace(&stop_exc, vm)?;
Expand Down Expand Up @@ -6200,6 +6261,24 @@ impl ExecutingFrame<'_> {
}
}

fn specialize_send(&mut self, instr_idx: usize, cache_base: usize) {
// Stack: [receiver, val] — receiver is at position 1
let receiver = self.nth_value(1);
if self.builtin_coro(receiver).is_some() {
unsafe {
self.code
.instructions
.replace_op(instr_idx, Instruction::SendGen);
}
} else {
unsafe {
self.code
.instructions
.write_adaptive_counter(cache_base, ADAPTIVE_BACKOFF_VALUE);
}
}
}

fn specialize_compare_op(
&mut self,
vm: &VirtualMachine,
Expand Down