4141#include "bc.h"
4242#include "objgenerator.h"
4343
44- // With these macros you can tune the maximum number of state slots
44+ // With these macros you can tune the maximum number of function state bytes
4545// that will be allocated on the stack. Any function that needs more
4646// than this will use the heap.
47- #define VM_MAX_STATE_ON_STACK (10)
48- #define VM_MAX_EXC_STATE_ON_STACK (4)
47+ #define VM_MAX_STATE_ON_STACK (40)
4948
5049#define DETECT_VM_STACK_OVERFLOW (0)
5150#if 0
@@ -121,67 +120,65 @@ mp_vm_return_kind_t mp_execute_bytecode(const byte *code, const mp_obj_t *args,
121120#if DETECT_VM_STACK_OVERFLOW
122121 n_state += 1 ;
123122#endif
124- mp_obj_t * state ;
125- if (n_state > VM_MAX_STATE_ON_STACK ) {
126- state = m_new (mp_obj_t , n_state );
127- } else {
128- state = alloca (sizeof (mp_obj_t ) * n_state );
129- }
130- mp_obj_t * sp = & state [0 ] - 1 ;
131123
132- // allocate state for exceptions
133- mp_exc_stack_t * exc_stack ;
134- if (n_exc_stack > VM_MAX_EXC_STATE_ON_STACK ) {
135- exc_stack = m_new ( mp_exc_stack_t , n_exc_stack );
124+ int state_size = n_state * sizeof ( mp_obj_t ) + n_exc_stack * sizeof ( mp_exc_stack_t );
125+ mp_code_state * code_state ;
126+ if (state_size > VM_MAX_STATE_ON_STACK ) {
127+ code_state = m_new_obj_var ( mp_code_state , byte , state_size );
136128 } else {
137- exc_stack = alloca (sizeof (mp_exc_stack_t ) * n_exc_stack );
129+ code_state = alloca (sizeof (mp_code_state ) + state_size );
138130 }
139- mp_exc_stack_t * exc_sp = & exc_stack [0 ] - 1 ;
131+
132+ code_state -> code_info = code ;
133+ code_state -> sp = & code_state -> state [0 ] - 1 ;
134+ code_state -> exc_sp = (mp_exc_stack_t * )(code_state -> state + n_state ) - 1 ;
135+ code_state -> n_state = n_state ;
140136
141137 // init args
142138 for (uint i = 0 ; i < n_args ; i ++ ) {
143- state [n_state - 1 - i ] = args [i ];
139+ code_state -> state [n_state - 1 - i ] = args [i ];
144140 }
145141 for (uint i = 0 ; i < n_args2 ; i ++ ) {
146- state [n_state - 1 - n_args - i ] = args2 [i ];
142+ code_state -> state [n_state - 1 - n_args - i ] = args2 [i ];
147143 }
148144
149145 // set rest of state to MP_OBJ_NULL
150146 for (uint i = 0 ; i < n_state - n_args - n_args2 ; i ++ ) {
151- state [i ] = MP_OBJ_NULL ;
147+ code_state -> state [i ] = MP_OBJ_NULL ;
152148 }
153149
154150 // bytecode prelude: initialise closed over variables
155151 for (uint n_local = * ip ++ ; n_local > 0 ; n_local -- ) {
156152 uint local_num = * ip ++ ;
157- state [n_state - 1 - local_num ] = mp_obj_new_cell (state [n_state - 1 - local_num ]);
153+ code_state -> state [n_state - 1 - local_num ] = mp_obj_new_cell (code_state -> state [n_state - 1 - local_num ]);
158154 }
159155
156+ code_state -> ip = ip ;
157+
160158 // execute the byte code
161- mp_vm_return_kind_t vm_return_kind = mp_execute_bytecode2 (code , & ip , & state [ n_state - 1 ], & sp , exc_stack , & exc_sp , MP_OBJ_NULL );
159+ mp_vm_return_kind_t vm_return_kind = mp_execute_bytecode2 (code_state , MP_OBJ_NULL );
162160
163161#if DETECT_VM_STACK_OVERFLOW
164162 if (vm_return_kind == MP_VM_RETURN_NORMAL ) {
165- if (sp != state ) {
166- printf ("Stack misalign: %d\n" , sp - state );
163+ if (code_state -> sp != code_state -> state ) {
164+ printf ("Stack misalign: %d\n" , code_state -> sp - code_state -> state );
167165 assert (0 );
168166 }
169167 }
170-
171168 // We can't check the case when an exception is returned in state[n_state - 1]
172169 // and there are no arguments, because in this case our detection slot may have
173170 // been overwritten by the returned exception (which is allowed).
174171 if (!(vm_return_kind == MP_VM_RETURN_EXCEPTION && n_args == 0 && n_args2 == 0 )) {
175172 // Just check to see that we have at least 1 null object left in the state.
176173 bool overflow = true;
177174 for (uint i = 0 ; i < n_state - n_args - n_args2 ; i ++ ) {
178- if (state [i ] == MP_OBJ_NULL ) {
175+ if (code_state -> state [i ] == MP_OBJ_NULL ) {
179176 overflow = false;
180177 break ;
181178 }
182179 }
183180 if (overflow ) {
184- printf ("VM stack overflow state=%p n_state+1=%u\n" , state , n_state );
181+ printf ("VM stack overflow state=%p n_state+1=%u\n" , code_state -> state , n_state );
185182 assert (0 );
186183 }
187184 }
@@ -191,13 +188,13 @@ mp_vm_return_kind_t mp_execute_bytecode(const byte *code, const mp_obj_t *args,
191188 switch (vm_return_kind ) {
192189 case MP_VM_RETURN_NORMAL :
193190 // return value is in *sp
194- * ret = * sp ;
191+ * ret = * code_state -> sp ;
195192 ret_kind = MP_VM_RETURN_NORMAL ;
196193 break ;
197194
198195 case MP_VM_RETURN_EXCEPTION :
199196 // return value is in state[n_state - 1]
200- * ret = state [n_state - 1 ];
197+ * ret = code_state -> state [n_state - 1 ];
201198 ret_kind = MP_VM_RETURN_EXCEPTION ;
202199 break ;
203200
@@ -209,15 +206,9 @@ mp_vm_return_kind_t mp_execute_bytecode(const byte *code, const mp_obj_t *args,
209206 }
210207
211208 // free the state if it was allocated on the heap
212- if (n_state > VM_MAX_STATE_ON_STACK ) {
213- m_free ( state , n_state );
209+ if (state_size > VM_MAX_STATE_ON_STACK ) {
210+ m_del_var ( mp_code_state , byte , state_size , code_state );
214211 }
215-
216- // free the exception state if it was allocated on the heap
217- if (n_exc_stack > VM_MAX_EXC_STATE_ON_STACK ) {
218- m_free (exc_stack , n_exc_stack );
219- }
220-
221212 return ret_kind ;
222213}
223214
@@ -227,10 +218,7 @@ mp_vm_return_kind_t mp_execute_bytecode(const byte *code, const mp_obj_t *args,
227218// MP_VM_RETURN_NORMAL, sp valid, return value in *sp
228219// MP_VM_RETURN_YIELD, ip, sp valid, yielded value in *sp
229220// MP_VM_RETURN_EXCEPTION, exception in fastn[0]
230- mp_vm_return_kind_t mp_execute_bytecode2 (const byte * code_info , const byte * * ip_in_out ,
231- mp_obj_t * fastn , mp_obj_t * * sp_in_out ,
232- mp_exc_stack_t * exc_stack , mp_exc_stack_t * * exc_sp_in_out ,
233- volatile mp_obj_t inject_exc ) {
221+ mp_vm_return_kind_t mp_execute_bytecode2 (mp_code_state * code_state , volatile mp_obj_t inject_exc ) {
234222#if MICROPY_OPT_COMPUTED_GOTO
235223 #include "vmentrytable.h"
236224 #define DISPATCH () do { \
@@ -252,20 +240,24 @@ mp_vm_return_kind_t mp_execute_bytecode2(const byte *code_info, const byte **ip_
252240 // loop and the exception handler, leading to very obscure bugs.
253241 #define RAISE (o ) do { nlr_pop(); nlr.ret_val = o; goto exception_handler; } while(0)
254242
243+ // Pointers which are constant for particular invocation of mp_execute_bytecode2()
244+ mp_obj_t * const fastn = & code_state -> state [code_state -> n_state - 1 ];
245+ mp_exc_stack_t * const exc_stack = (mp_exc_stack_t * )(code_state -> state + code_state -> n_state );
246+
255247 // variables that are visible to the exception handler (declared volatile)
256- volatile bool currently_in_except_block = MP_TAGPTR_TAG (* exc_sp_in_out ); // 0 or 1, to detect nested exceptions
257- mp_exc_stack_t * volatile exc_sp = MP_TAGPTR_PTR (* exc_sp_in_out ); // stack grows up, exc_sp points to top of stack
258- const byte * volatile save_ip = * ip_in_out ; // this is so we can access ip in the exception handler without making ip volatile (which means the compiler can't keep it in a register in the main loop)
259- mp_obj_t * volatile save_sp = * sp_in_out ; // this is so we can access sp in the exception handler when needed
248+ volatile bool currently_in_except_block = MP_TAGPTR_TAG (code_state -> exc_sp ); // 0 or 1, to detect nested exceptions
249+ mp_exc_stack_t * volatile exc_sp = MP_TAGPTR_PTR (code_state -> exc_sp ); // stack grows up, exc_sp points to top of stack
250+ const byte * volatile save_ip = code_state -> ip ; // this is so we can access ip in the exception handler without making ip volatile (which means the compiler can't keep it in a register in the main loop)
251+ mp_obj_t * volatile save_sp = code_state -> sp ; // this is so we can access sp in the exception handler when needed
260252
261253 // outer exception handling loop
262254 for (;;) {
263255 nlr_buf_t nlr ;
264256outer_dispatch_loop :
265257 if (nlr_push (& nlr ) == 0 ) {
266258 // local variables that are not visible to the exception handler
267- const byte * ip = * ip_in_out ;
268- mp_obj_t * sp = * sp_in_out ;
259+ const byte * ip = code_state -> ip ;
260+ mp_obj_t * sp = code_state -> sp ;
269261 machine_uint_t unum ;
270262 mp_obj_t obj_shared ;
271263
@@ -908,7 +900,7 @@ mp_vm_return_kind_t mp_execute_bytecode2(const byte *code_info, const byte **ip_
908900 exc_sp -- ;
909901 }
910902 nlr_pop ();
911- * sp_in_out = sp ;
903+ code_state -> sp = sp ;
912904 assert (exc_sp == exc_stack - 1 );
913905 return MP_VM_RETURN_NORMAL ;
914906
@@ -939,9 +931,9 @@ mp_vm_return_kind_t mp_execute_bytecode2(const byte *code_info, const byte **ip_
939931 ENTRY (MP_BC_YIELD_VALUE ):
940932yield :
941933 nlr_pop ();
942- * ip_in_out = ip ;
943- * sp_in_out = sp ;
944- * exc_sp_in_out = MP_TAGPTR_MAKE (exc_sp , currently_in_except_block );
934+ code_state -> ip = ip ;
935+ code_state -> sp = sp ;
936+ code_state -> exc_sp = MP_TAGPTR_MAKE (exc_sp , currently_in_except_block );
945937 return MP_VM_RETURN_YIELD ;
946938
947939 ENTRY (MP_BC_YIELD_FROM ): {
@@ -1035,8 +1027,8 @@ mp_vm_return_kind_t mp_execute_bytecode2(const byte *code_info, const byte **ip_
10351027 const byte * ip = save_ip + 1 ;
10361028 machine_uint_t unum ;
10371029 DECODE_ULABEL ; // the jump offset if iteration finishes; for labels are always forward
1038- * ip_in_out = ip + unum ; // jump to after for-block
1039- * sp_in_out = save_sp - 1 ; // pop the exhausted iterator
1030+ code_state -> ip = ip + unum ; // jump to after for-block
1031+ code_state -> sp = save_sp - 1 ; // pop the exhausted iterator
10401032 goto outer_dispatch_loop ; // continue with dispatch loop
10411033 }
10421034
@@ -1045,6 +1037,7 @@ mp_vm_return_kind_t mp_execute_bytecode2(const byte *code_info, const byte **ip_
10451037 // But consider how to handle nested exceptions.
10461038 // TODO need a better way of not adding traceback to constant objects (right now, just GeneratorExit_obj and MemoryError_obj)
10471039 if (mp_obj_is_exception_instance (nlr .ret_val ) && nlr .ret_val != & mp_const_GeneratorExit_obj && nlr .ret_val != & mp_const_MemoryError_obj ) {
1040+ const byte * code_info = code_state -> code_info ;
10481041 machine_uint_t code_info_size = code_info [0 ] | (code_info [1 ] << 8 ) | (code_info [2 ] << 16 ) | (code_info [3 ] << 24 );
10491042 qstr source_file = code_info [4 ] | (code_info [5 ] << 8 ) | (code_info [6 ] << 16 ) | (code_info [7 ] << 24 );
10501043 qstr block_name = code_info [8 ] | (code_info [9 ] << 8 ) | (code_info [10 ] << 16 ) | (code_info [11 ] << 24 );
@@ -1075,15 +1068,15 @@ mp_vm_return_kind_t mp_execute_bytecode2(const byte *code_info, const byte **ip_
10751068 currently_in_except_block = 1 ;
10761069
10771070 // catch exception and pass to byte code
1078- * ip_in_out = exc_sp -> handler ;
1071+ code_state -> ip = exc_sp -> handler ;
10791072 mp_obj_t * sp = MP_TAGPTR_PTR (exc_sp -> val_sp );
10801073 // save this exception in the stack so it can be used in a reraise, if needed
10811074 exc_sp -> prev_exc = nlr .ret_val ;
10821075 // push(traceback, exc-val, exc-type)
10831076 PUSH (mp_const_none );
10841077 PUSH (nlr .ret_val );
10851078 PUSH (mp_obj_get_type (nlr .ret_val ));
1086- * sp_in_out = sp ;
1079+ code_state -> sp = sp ;
10871080
10881081 } else {
10891082 // propagate exception to higher level
0 commit comments