@@ -117,6 +117,16 @@ impl<T> Arc<T> {
117
117
// these contents.
118
118
unsafe { & * self . _ptr }
119
119
}
120
+
121
+ /// Get the number of weak references to this value.
122
+ #[ inline]
123
+ #[ experimental]
124
+ pub fn weak_count ( & self ) -> uint { self . inner ( ) . weak . load ( atomic:: SeqCst ) - 1 }
125
+
126
+ /// Get the number of strong references to this value.
127
+ #[ inline]
128
+ #[ experimental]
129
+ pub fn strong_count ( & self ) -> uint { self . inner ( ) . strong . load ( atomic:: SeqCst ) }
120
130
}
121
131
122
132
#[ unstable = "waiting on stability of Clone" ]
@@ -247,6 +257,29 @@ impl<T: Sync + Send> Weak<T> {
247
257
// See comments above for why this is "safe"
248
258
unsafe { & * self . _ptr }
249
259
}
260
+
261
+ // Why is there no `weak_count()`?
262
+ //
263
+ // It is not possible to determine the number of weak references with only a weak reference
264
+ // accurately in a wait-free manner. This is because we have a data-race with the last strong
265
+ // reference's `drop` method. If that operation pauses between decrementing the strong
266
+ // reference count to 0 and removing the implicit weak reference that the strong references
267
+ // share then we will incorrectly think there is one more weak reference then there really is.
268
+ //
269
+ // We cannot get around this without making parts of this object no longer wait-free, since we
270
+ // would either need to use locks to get mutual exclusion with `drop` or make it so that the
271
+ // weak and strong reference counts can be modified atomically together. The first option
272
+ // destroys wait-freedom by adding a lock and the second (in addition to being annoying to
273
+ // implement) would make many operations (at least `downgrade` and both `clone`s) go from being
274
+ // wait-free to merely lock-free, as we would need to do a manual CAS loop to get around other
275
+ // threads modifying the other value in each of these cases.
276
+
277
+ /// Get the number of strong references to this value.
278
+ ///
279
+ /// If this function returns 0 then the value has been freed.
280
+ #[ inline]
281
+ #[ experimental]
282
+ pub fn strong_count ( & self ) -> uint { self . inner ( ) . strong . load ( atomic:: SeqCst ) }
250
283
}
251
284
252
285
#[ experimental = "Weak pointers may not belong in this module." ]
@@ -465,6 +498,47 @@ mod tests {
465
498
drop ( arc_weak) ;
466
499
}
467
500
501
+ #[ test]
502
+ fn test_strong_count ( ) {
503
+ let a = Arc :: new ( 0u32 ) ;
504
+ assert ! ( a. strong_count( ) == 1 ) ;
505
+ let w = a. downgrade ( ) ;
506
+ assert ! ( a. strong_count( ) == 1 ) ;
507
+ let b = w. upgrade ( ) . expect ( "" ) ;
508
+ assert ! ( b. strong_count( ) == 2 ) ;
509
+ assert ! ( a. strong_count( ) == 2 ) ;
510
+ drop ( w) ;
511
+ drop ( a) ;
512
+ assert ! ( b. strong_count( ) == 1 ) ;
513
+ let c = b. clone ( ) ;
514
+ assert ! ( b. strong_count( ) == 2 ) ;
515
+ assert ! ( c. strong_count( ) == 2 ) ;
516
+ }
517
+
518
+ #[ test]
519
+ fn test_weak_count ( ) {
520
+ let a = Arc :: new ( 0u32 ) ;
521
+ assert ! ( a. strong_count( ) == 1 ) ;
522
+ assert ! ( a. weak_count( ) == 0 ) ;
523
+ let w = a. downgrade ( ) ;
524
+ assert ! ( a. strong_count( ) == 1 ) ;
525
+ assert ! ( w. strong_count( ) == 1 ) ;
526
+ assert ! ( a. weak_count( ) == 1 ) ;
527
+ drop ( w) ;
528
+ assert ! ( a. strong_count( ) == 1 ) ;
529
+ assert ! ( a. weak_count( ) == 0 ) ;
530
+ let c = a. clone ( ) ;
531
+ assert ! ( a. strong_count( ) == 2 ) ;
532
+ assert ! ( a. weak_count( ) == 0 ) ;
533
+ let d = c. downgrade ( ) ;
534
+ assert ! ( c. weak_count( ) == 1 ) ;
535
+ assert ! ( c. strong_count( ) == 2 ) ;
536
+
537
+ drop ( a) ;
538
+ drop ( c) ;
539
+ drop ( d) ;
540
+ }
541
+
468
542
#[ test]
469
543
fn show_arc ( ) {
470
544
let a = Arc :: new ( 5u32 ) ;
0 commit comments