1- use crate :: common:: atomic:: { Ordering , PyAtomic , Radium } ;
1+ use crate :: common:: atomic:: { OncePtr , PyAtomic , Radium } ;
22use crate :: common:: linked_list:: { Link , LinkedList , Pointers } ;
33use crate :: common:: lock:: { PyMutex , PyMutexGuard , PyRwLock } ;
44use crate :: common:: refcount:: RefCount ;
@@ -15,11 +15,6 @@ use std::mem::ManuallyDrop;
1515use std:: ops:: Deref ;
1616use std:: ptr:: { self , NonNull } ;
1717
18- #[ cfg( feature = "threading" ) ]
19- use once_cell:: race:: OnceBox ;
20- #[ cfg( not( feature = "threading" ) ) ]
21- type OnceBox < T > = once_cell:: unsync:: OnceCell < Box < T > > ;
22-
2318// so, PyObjectRef is basically equivalent to `PyRc<PyGenericObject<dyn PyObjectPayload>>`, except it's
2419// only one pointer in width rather than 2. We do that by manually creating a vtable, and putting
2520// a &'static reference to it inside the `PyRc` rather than adjacent to it, like trait objects do.
@@ -108,7 +103,7 @@ impl<T: fmt::Debug> fmt::Debug for PyInner<T> {
108103}
109104
110105struct WeakRefList {
111- inner : OnceBox < PyMutex < WeakListInner > > ,
106+ inner : OncePtr < PyMutex < WeakListInner > > ,
112107}
113108
114109impl fmt:: Debug for WeakRefList {
@@ -117,10 +112,21 @@ impl fmt::Debug for WeakRefList {
117112 }
118113}
119114
120- #[ derive( Default ) ]
121115struct WeakListInner {
122116 list : LinkedList < WeakLink , PyObjectView < PyWeak > > ,
123117 obj : Option < NonNull < PyObject > > ,
118+ // one for each live PyWeak with a reference to this, + 1 for the referent object if it's not dead
119+ ref_count : usize ,
120+ }
121+
122+ impl Default for WeakListInner {
123+ fn default ( ) -> Self {
124+ Self {
125+ list : LinkedList :: default ( ) ,
126+ obj : None ,
127+ ref_count : 1 ,
128+ }
129+ }
124130}
125131
126132cfg_if:: cfg_if! {
@@ -133,19 +139,13 @@ cfg_if::cfg_if! {
133139impl WeakRefList {
134140 pub fn new ( ) -> Self {
135141 WeakRefList {
136- inner : OnceBox :: new ( ) ,
142+ inner : OncePtr :: new ( ) ,
137143 }
138144 }
139145
140146 /// returns None if there have never been any weakrefs in this list
141147 fn try_lock ( & self ) -> Option < PyMutexGuard < ' _ , WeakListInner > > {
142- self . inner . get ( ) . map ( |mu| mu. lock ( ) )
143- }
144- fn lock ( & self ) -> PyMutexGuard < ' _ , WeakListInner > {
145- self . inner
146- . get ( )
147- . expect ( "weakref should be initialized" )
148- . lock ( )
148+ self . inner . get ( ) . map ( |mu| unsafe { mu. as_ref ( ) . lock ( ) } )
149149 }
150150
151151 fn add (
@@ -155,70 +155,85 @@ impl WeakRefList {
155155 callback : Option < PyObjectRef > ,
156156 dict : Option < PyDictRef > ,
157157 ) -> PyObjectWeak {
158- let mut inner = self . inner . get_or_init ( Default :: default) . lock ( ) ;
158+ let inner_ptr = self . inner . get_or_init ( Default :: default) ;
159+ let mut inner = unsafe { inner_ptr. as_ref ( ) . lock ( ) } ;
159160 if inner. obj . is_none ( ) {
160161 inner. obj = Some ( NonNull :: from ( obj) ) ;
161162 }
162163 let obj = PyWeak {
163164 pointers : Pointers :: new ( ) ,
164- parent : Radium :: new ( self as * const WeakRefList as * mut WeakRefList ) ,
165+ parent : inner_ptr ,
165166 callback : UnsafeCell :: new ( callback) ,
166167 hash : Radium :: new ( -1 ) ,
167168 } ;
168169 let weak = PyRef :: new_ref ( obj, cls, dict) ;
169170 // SAFETY: we don't actually own the PyObjectWeaks inside `list`, and every time we take
170- // one out of the list we immediately wrap it in ManuallyDrop
171+ // one out of the list we immediately wrap it in ManuallyDrop or forget it
171172 inner. list . push_front ( unsafe { ptr:: read ( & weak) } ) ;
173+ inner. ref_count += 1 ;
172174 PyObjectWeak { weak }
173175 }
174176
175177 fn clear ( & self ) {
176- let mut inner = match self . try_lock ( ) {
177- Some ( inner) => inner,
178- None => return ,
179- } ;
180- inner. obj = None ;
181- // TODO: can be an arrayvec
182- let mut v = Vec :: with_capacity ( 16 ) ;
183- loop {
184- let iter = inner
185- . list
186- . drain_filter ( |_| true )
187- . filter_map ( |wr| {
188- // we don't have actual ownership of the reference counts in the list.
189- // but, now we do want ownership (and so incref these *while the lock
190- // is held*) to avoid weird things if PyWeakObj::drop happens after
191- // this but before we reach the loop body below
192- let wr = ManuallyDrop :: new ( wr) ;
193-
194- wr. parent . store ( ptr:: null_mut ( ) , Ordering :: SeqCst ) ;
195-
196- // if strong_count == 0, drop_slow has been called but lock_parent is blocking
197- // in PyWeak::Drop OR there's some reentrancy going on. either way we don't
198- // want to call the callback
199- ( wr. as_object ( ) . strong_count ( ) > 0 ) . then ( || ( * wr) . clone ( ) )
178+ let to_dealloc = {
179+ let ptr = match self . inner . get ( ) {
180+ Some ( ptr) => ptr,
181+ None => return ,
182+ } ;
183+ let mut inner = unsafe { ptr. as_ref ( ) . lock ( ) } ;
184+ inner. obj = None ;
185+ // TODO: can be an arrayvec
186+ let mut v = Vec :: with_capacity ( 16 ) ;
187+ loop {
188+ let iter = inner
189+ . list
190+ . drain_filter ( |_| true )
191+ . filter_map ( |wr| {
192+ // we don't have actual ownership of the reference counts in the list.
193+ // but, now we do want ownership (and so incref these *while the lock
194+ // is held*) to avoid weird things if PyWeakObj::drop happens after
195+ // this but before we reach the loop body below
196+ let wr = ManuallyDrop :: new ( wr) ;
197+
198+ // if strong_count == 0 there's some reentrancy going on. we don't
199+ // want to call the callback
200+ ( wr. as_object ( ) . strong_count ( ) > 0 ) . then ( || ( * wr) . clone ( ) )
201+ } )
202+ . take ( 16 ) ;
203+ v. extend ( iter) ;
204+ if v. is_empty ( ) {
205+ break ;
206+ }
207+ PyMutexGuard :: unlocked ( & mut inner, || {
208+ for wr in v. drain ( ..) {
209+ let cb = unsafe { wr. callback . get ( ) . replace ( None ) } ;
210+ if let Some ( cb) = cb {
211+ crate :: vm:: thread:: with_vm ( & cb, |vm| {
212+ // TODO: handle unraisable exception
213+ let _ = vm. invoke ( & cb, ( wr. clone ( ) , ) ) ;
214+ } ) ;
215+ }
216+ }
200217 } )
201- . take ( 16 ) ;
202- v. extend ( iter) ;
203- if v. is_empty ( ) {
204- break ;
205218 }
206- PyMutexGuard :: unlocked ( & mut inner, || {
207- for wr in v. drain ( ..) {
208- let cb = unsafe { wr. callback . get ( ) . replace ( None ) } ;
209- if let Some ( cb) = cb {
210- crate :: vm:: thread:: with_vm ( & cb, |vm| {
211- // TODO: handle unraisable exception
212- let _ = vm. invoke ( & cb, ( wr. clone ( ) , ) ) ;
213- } ) ;
214- }
215- }
216- } )
219+ inner. ref_count -= 1 ;
220+ ( inner. ref_count == 0 ) . then ( || ptr)
221+ } ;
222+ if let Some ( ptr) = to_dealloc {
223+ unsafe { WeakRefList :: dealloc ( ptr) }
217224 }
218225 }
219226
220227 fn count ( & self ) -> usize {
221- self . try_lock ( ) . map_or ( 0 , |inner| inner. list . count ( ) )
228+ self . try_lock ( )
229+ // we assume the object is still alive (and this is only
230+ // called from PyObject::weak_count so it should be)
231+ . map ( |inner| inner. ref_count - 1 )
232+ . unwrap_or ( 0 )
233+ }
234+
235+ unsafe fn dealloc ( ptr : NonNull < PyMutex < WeakListInner > > ) {
236+ Box :: from_raw ( ptr. as_ptr ( ) ) ;
222237 }
223238}
224239
@@ -251,7 +266,8 @@ unsafe impl Link for WeakLink {
251266#[ derive( Debug ) ]
252267pub struct PyWeak {
253268 pointers : Pointers < PyObjectView < PyWeak > > ,
254- parent : PyAtomic < * mut WeakRefList > ,
269+ parent : NonNull < PyMutex < WeakListInner > > ,
270+ // this is treated as part of parent's mutex - you must hold that lock to access it
255271 callback : UnsafeCell < Option < PyObjectRef > > ,
256272 pub ( crate ) hash : PyAtomic < crate :: common:: hash:: PyHash > ,
257273}
@@ -266,7 +282,7 @@ cfg_if::cfg_if! {
266282impl PyWeak {
267283 pub ( crate ) fn upgrade ( & self ) -> Option < PyObjectRef > {
268284 // TODO: figure out orderings for here, in drop, and the store in WeakRefList::clear
269- let guard = unsafe { self . parent . load ( Ordering :: SeqCst ) . as_ref ( ) ? . lock ( ) } ;
285+ let guard = unsafe { self . parent . as_ref ( ) . lock ( ) } ;
270286 let obj_ptr = guard. obj ?;
271287 unsafe {
272288 obj_ptr. as_ref ( ) . 0 . refcount . incref ( ) ;
@@ -276,24 +292,20 @@ impl PyWeak {
276292
277293 #[ inline( always) ]
278294 fn drop_inner ( & self ) {
279- let parent_ptr = self . parent . load ( Ordering :: SeqCst ) ;
280- let mut guard = match unsafe { parent_ptr. as_ref ( ) } {
281- Some ( parent) => parent. lock ( ) ,
282- // the object is dead - we don't have to worry about removing ourselves from the list
283- None => return ,
295+ let dealloc = {
296+ let mut guard = unsafe { self . parent . as_ref ( ) . lock ( ) } ;
297+ let offset = memoffset:: offset_of!( PyInner <PyWeak >, payload) ;
298+ let pyinner = ( self as * const Self as usize - offset) as * const PyInner < Self > ;
299+ let node_ptr = unsafe { NonNull :: new_unchecked ( pyinner as * mut PyObjectView < Self > ) } ;
300+ // the list doesn't have ownership over its PyRef<PyWeak>! we're being dropped
301+ // right now so that should be obvious!!
302+ std:: mem:: forget ( unsafe { guard. list . remove ( node_ptr) } ) ;
303+ guard. ref_count -= 1 ;
304+ guard. ref_count == 0
284305 } ;
285- if self . parent . load ( Ordering :: SeqCst ) . is_null ( ) {
286- // another thread is calling WeakRefList::clear(), and between the previous load and
287- // the lock() it reached our weakref in the .drain_filter() iterator. because of that,
288- // we know we've now been removed from the list, so we don't have to do anything
289- return ;
306+ if dealloc {
307+ unsafe { WeakRefList :: dealloc ( self . parent ) }
290308 }
291- let offset = memoffset:: offset_of!( PyInner <PyWeak >, payload) ;
292- let pyinner = ( self as * const Self as usize - offset) as * const PyInner < Self > ;
293- let node_ptr = unsafe { NonNull :: new_unchecked ( pyinner as * mut PyObjectView < Self > ) } ;
294- // the list doesn't have ownership over its PyRef<PyWeak>! we're being dropped
295- // right now so that should be obvious!!
296- std:: mem:: forget ( unsafe { guard. list . remove ( node_ptr) } ) ;
297309 }
298310}
299311
0 commit comments