@@ -3508,7 +3508,7 @@ impl ExecutingFrame<'_> {
35083508 let callable = self . pop_value ( ) ;
35093509 let callable_tag = & * callable as * const PyObject as u32 ;
35103510 let is_len_callable = callable
3511- . downcast_ref :: < PyNativeFunction > ( )
3511+ . downcast_ref_if_exact :: < PyNativeFunction > ( vm )
35123512 . is_some_and ( |native| native. zelf . is_none ( ) && native. value . name == "len" ) ;
35133513 if null. is_none ( ) && cached_tag == callable_tag && is_len_callable {
35143514 let len = obj. length ( vm) ?;
@@ -3536,7 +3536,7 @@ impl ExecutingFrame<'_> {
35363536 let callable = self . nth_value ( nargs + 1 ) ;
35373537 let callable_tag = callable as * const PyObject as u32 ;
35383538 let is_isinstance_callable = callable
3539- . downcast_ref :: < PyNativeFunction > ( )
3539+ . downcast_ref_if_exact :: < PyNativeFunction > ( vm )
35403540 . is_some_and ( |native| {
35413541 native. zelf . is_none ( ) && native. value . name == "isinstance"
35423542 } ) ;
@@ -3626,7 +3626,11 @@ impl ExecutingFrame<'_> {
36263626 let self_or_null_is_some = stack[ stack_len - nargs as usize - 1 ] . is_some ( ) ;
36273627 let effective_nargs = nargs + u32:: from ( self_or_null_is_some) ;
36283628 let callable = self . nth_value ( nargs + 1 ) ;
3629- if callable. downcast_ref :: < PyNativeFunction > ( ) . is_some ( ) && effective_nargs == 1 {
3629+ if callable
3630+ . downcast_ref_if_exact :: < PyNativeFunction > ( vm)
3631+ . is_some ( )
3632+ && effective_nargs == 1
3633+ {
36303634 let nargs_usize = nargs as usize ;
36313635 let pos_args: Vec < PyObjectRef > = self . pop_multiple ( nargs_usize) . collect ( ) ;
36323636 let self_or_null = self . pop_value_opt ( ) ;
@@ -3651,7 +3655,10 @@ impl ExecutingFrame<'_> {
36513655 let self_or_null_is_some = stack[ stack_len - nargs as usize - 1 ] . is_some ( ) ;
36523656 let effective_nargs = nargs + u32:: from ( self_or_null_is_some) ;
36533657 let callable = self . nth_value ( nargs + 1 ) ;
3654- if callable. downcast_ref :: < PyNativeFunction > ( ) . is_some ( ) {
3658+ if callable
3659+ . downcast_ref_if_exact :: < PyNativeFunction > ( vm)
3660+ . is_some ( )
3661+ {
36553662 let nargs_usize = nargs as usize ;
36563663 let pos_args: Vec < PyObjectRef > = self . pop_multiple ( nargs_usize) . collect ( ) ;
36573664 let self_or_null = self . pop_value_opt ( ) ;
@@ -3921,6 +3928,7 @@ impl ExecutingFrame<'_> {
39213928 // Look up __init__ (guarded by type_version)
39223929 if let Some ( init) = cls. get_attr ( identifier ! ( vm, __init__) )
39233930 && let Some ( init_func) = init. downcast_ref :: < PyFunction > ( )
3931+ && init_func. can_specialize_call ( nargs + 1 )
39243932 {
39253933 // Allocate object directly (tp_new == object.__new__)
39263934 let dict = if cls
@@ -4003,7 +4011,10 @@ impl ExecutingFrame<'_> {
40034011 let self_or_null_is_some = stack[ stack_len - nargs as usize - 1 ] . is_some ( ) ;
40044012 let effective_nargs = nargs + u32:: from ( self_or_null_is_some) ;
40054013 let callable = self . nth_value ( nargs + 1 ) ;
4006- if callable. downcast_ref :: < PyNativeFunction > ( ) . is_some ( ) {
4014+ if callable
4015+ . downcast_ref_if_exact :: < PyNativeFunction > ( vm)
4016+ . is_some ( )
4017+ {
40074018 let nargs_usize = nargs as usize ;
40084019 let pos_args: Vec < PyObjectRef > = self . pop_multiple ( nargs_usize) . collect ( ) ;
40094020 let self_or_null = self . pop_value_opt ( ) ;
@@ -4601,23 +4612,12 @@ impl ExecutingFrame<'_> {
46014612 }
46024613 }
46034614 Instruction :: ForIterGen => {
4615+ // ForIterGen is not faithfully implementable without inline
4616+ // generator frame resumption (as CPython does). Fall through
4617+ // to the generic path so the debugger sees StopIteration.
46044618 let target = bytecode:: Label ( self . lasti ( ) + 1 + u32:: from ( arg) ) ;
4605- let iter = self . top_value ( ) ;
4606- if let Some ( generator) = iter. downcast_ref_if_exact :: < PyGenerator > ( vm) {
4607- match generator. as_coro ( ) . send ( iter, vm. ctx . none ( ) , vm) {
4608- Ok ( PyIterReturn :: Return ( value) ) => {
4609- self . push_value ( value) ;
4610- }
4611- Ok ( PyIterReturn :: StopIteration ( _) ) => {
4612- self . for_iter_jump_on_exhausted ( target) ;
4613- }
4614- Err ( e) => return Err ( e) ,
4615- }
4616- Ok ( None )
4617- } else {
4618- self . execute_for_iter ( vm, target) ?;
4619- Ok ( None )
4620- }
4619+ self . execute_for_iter ( vm, target) ?;
4620+ Ok ( None )
46214621 }
46224622 Instruction :: LoadGlobalModule => {
46234623 let oparg = u32:: from ( arg) ;
@@ -6964,7 +6964,7 @@ impl ExecutingFrame<'_> {
69646964 }
69656965
69666966 // Try to specialize builtin calls
6967- if let Some ( native) = callable. downcast_ref :: < PyNativeFunction > ( ) {
6967+ if let Some ( native) = callable. downcast_ref_if_exact :: < PyNativeFunction > ( vm ) {
69686968 let effective_nargs = nargs + u32:: from ( self_or_null_is_some) ;
69696969 let callable_tag = callable as * const PyObject as u32 ;
69706970 let new_op = if native. zelf . is_none ( )
@@ -7299,8 +7299,6 @@ impl ExecutingFrame<'_> {
72997299 Some ( Instruction :: ForIterList )
73007300 } else if iter. downcast_ref_if_exact :: < PyTupleIterator > ( vm) . is_some ( ) {
73017301 Some ( Instruction :: ForIterTuple )
7302- } else if iter. downcast_ref_if_exact :: < PyGenerator > ( vm) . is_some ( ) {
7303- Some ( Instruction :: ForIterGen )
73047302 } else {
73057303 None
73067304 } ;
0 commit comments