Skip to content

Commit

Permalink
Auto merge of rust-lang#27174 - Gankro:rc-sat, r=alexcrichton
Browse files Browse the repository at this point in the history
  • Loading branch information
bors committed Jul 30, 2015
2 parents 8b9ada5 + 22e2100 commit db2af71
Show file tree
Hide file tree
Showing 2 changed files with 42 additions and 7 deletions.
30 changes: 26 additions & 4 deletions src/liballoc/arc.rs
Original file line number Diff line number Diff line change
Expand Up @@ -78,16 +78,18 @@ use core::atomic::Ordering::{Relaxed, Release, Acquire, SeqCst};
use core::fmt;
use core::cmp::Ordering;
use core::mem::{align_of_val, size_of_val};
use core::intrinsics::drop_in_place;
use core::intrinsics::{drop_in_place, abort};
use core::mem;
use core::nonzero::NonZero;
use core::ops::{Deref, CoerceUnsized};
use core::ptr;
use core::marker::Unsize;
use core::hash::{Hash, Hasher};
use core::usize;
use core::{usize, isize};
use heap::deallocate;

const MAX_REFCOUNT: usize = (isize::MAX) as usize;

/// An atomically reference counted wrapper for shared state.
///
/// # Examples
Expand Down Expand Up @@ -312,7 +314,21 @@ impl<T: ?Sized> Clone for Arc<T> {
// another must already provide any required synchronization.
//
// [1]: (www.boost.org/doc/libs/1_55_0/doc/html/atomic/usage_examples.html)
self.inner().strong.fetch_add(1, Relaxed);
let old_size = self.inner().strong.fetch_add(1, Relaxed);

// However we need to guard against massive refcounts in case someone
// is `mem::forget`ing Arcs. If we don't do this the count can overflow
// and users will use-after free. We racily saturate to `isize::MAX` on
// the assumption that there aren't ~2 billion threads incrementing
// the reference count at once. This branch will never be taken in
// any realistic program.
//
// We abort because such a program is incredibly degenerate, and we
// don't care to support it.
if old_size > MAX_REFCOUNT {
unsafe { abort(); }
}

Arc { _ptr: self._ptr }
}
}
Expand Down Expand Up @@ -617,7 +633,13 @@ impl<T: ?Sized> Clone for Weak<T> {
// fetch_add (ignoring the lock) because the weak count is only locked
// where are *no other* weak pointers in existence. (So we can't be
// running this code in that case).
self.inner().weak.fetch_add(1, Relaxed);
let old_size = self.inner().weak.fetch_add(1, Relaxed);

// See comments in Arc::clone() for why we do this (for mem::forget).
if old_size > MAX_REFCOUNT {
unsafe { abort(); }
}

return Weak { _ptr: self._ptr }
}
}
Expand Down
19 changes: 16 additions & 3 deletions src/liballoc/rc.rs
Original file line number Diff line number Diff line change
Expand Up @@ -161,7 +161,7 @@ use core::cell::Cell;
use core::cmp::Ordering;
use core::fmt;
use core::hash::{Hasher, Hash};
use core::intrinsics::{assume, drop_in_place};
use core::intrinsics::{assume, drop_in_place, abort};
use core::marker::{self, Unsize};
use core::mem::{self, align_of, size_of, align_of_val, size_of_val, forget};
use core::nonzero::NonZero;
Expand Down Expand Up @@ -858,6 +858,15 @@ impl<T: ?Sized+fmt::Debug> fmt::Debug for Weak<T> {
}
}

// NOTE: We checked_add here to deal with mem::forget safety. In particular
// if you mem::forget Rcs (or Weaks), the ref-count can overflow, and then
// you can free the allocation while outstanding Rcs (or Weaks) exist.
// We abort because this is such a degenerate scenario that we don't care about
// what happens -- no real program should ever experience this.
//
// This should have negligible overhead since you don't actually need to
// clone these much in Rust thanks to ownership and move-semantics.

#[doc(hidden)]
trait RcBoxPtr<T: ?Sized> {
fn inner(&self) -> &RcBox<T>;
Expand All @@ -866,7 +875,9 @@ trait RcBoxPtr<T: ?Sized> {
fn strong(&self) -> usize { self.inner().strong.get() }

#[inline]
fn inc_strong(&self) { self.inner().strong.set(self.strong() + 1); }
fn inc_strong(&self) {
self.inner().strong.set(self.strong().checked_add(1).unwrap_or_else(|| unsafe { abort() }));
}

#[inline]
fn dec_strong(&self) { self.inner().strong.set(self.strong() - 1); }
Expand All @@ -875,7 +886,9 @@ trait RcBoxPtr<T: ?Sized> {
fn weak(&self) -> usize { self.inner().weak.get() }

#[inline]
fn inc_weak(&self) { self.inner().weak.set(self.weak() + 1); }
fn inc_weak(&self) {
self.inner().weak.set(self.weak().checked_add(1).unwrap_or_else(|| unsafe { abort() }));
}

#[inline]
fn dec_weak(&self) { self.inner().weak.set(self.weak() - 1); }
Expand Down

0 comments on commit db2af71

Please sign in to comment.