Remove #[cfg(stage0)] items.

This commit is contained in:
Eduard Burtescu 2015-05-25 20:21:29 +03:00
parent 07503dfb8b
commit 6e8e4f847c
17 changed files with 14 additions and 1353 deletions

View file

@ -1,686 +0,0 @@
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![stable(feature = "rust1", since = "1.0.0")]
//! Threadsafe reference-counted boxes (the `Arc<T>` type).
//!
//! The `Arc<T>` type provides shared ownership of an immutable value.
//! Destruction is deterministic, and will occur as soon as the last owner is
//! gone. It is marked as `Send` because it uses atomic reference counting.
//!
//! If you do not need thread-safety, and just need shared ownership, consider
//! the [`Rc<T>` type](../rc/struct.Rc.html). It is the same as `Arc<T>`, but
//! does not use atomics, making it both thread-unsafe as well as significantly
//! faster when updating the reference count.
//!
//! The `downgrade` method can be used to create a non-owning `Weak<T>` pointer
//! to the box. A `Weak<T>` pointer can be upgraded to an `Arc<T>` pointer, but
//! will return `None` if the value has already been dropped.
//!
//! For example, a tree with parent pointers can be represented by putting the
//! nodes behind strong `Arc<T>` pointers, and then storing the parent pointers
//! as `Weak<T>` pointers.
//!
//! # Examples
//!
//! Sharing some immutable data between threads:
//!
//! ```no_run
//! use std::sync::Arc;
//! use std::thread;
//!
//! let five = Arc::new(5);
//!
//! for _ in 0..10 {
//! let five = five.clone();
//!
//! thread::spawn(move || {
//! println!("{:?}", five);
//! });
//! }
//! ```
//!
//! Sharing mutable data safely between threads with a `Mutex`:
//!
//! ```no_run
//! use std::sync::{Arc, Mutex};
//! use std::thread;
//!
//! let five = Arc::new(Mutex::new(5));
//!
//! for _ in 0..10 {
//! let five = five.clone();
//!
//! thread::spawn(move || {
//! let mut number = five.lock().unwrap();
//!
//! *number += 1;
//!
//! println!("{}", *number); // prints 6
//! });
//! }
//! ```
use boxed::Box;
use core::prelude::*;
use core::atomic;
use core::atomic::Ordering::{Relaxed, Release, Acquire, SeqCst};
use core::fmt;
use core::cmp::Ordering;
use core::mem::{min_align_of, size_of};
use core::mem;
use core::nonzero::NonZero;
use core::ops::Deref;
use core::ptr;
use core::hash::{Hash, Hasher};
use heap::deallocate;
/// An atomically reference counted wrapper for shared state.
///
/// # Examples
///
/// In this example, a large vector of floats is shared between several threads.
/// With simple pipes, without `Arc`, a copy would have to be made for each
/// thread.
///
/// When you clone an `Arc<T>`, it will create another pointer to the data and
/// increase the reference counter.
///
/// ```
/// # #![feature(alloc, core)]
/// use std::sync::Arc;
/// use std::thread;
///
/// fn main() {
/// let numbers: Vec<_> = (0..100u32).collect();
/// let shared_numbers = Arc::new(numbers);
///
/// for _ in 0..10 {
/// let child_numbers = shared_numbers.clone();
///
/// thread::spawn(move || {
/// let local_numbers = &child_numbers[..];
///
/// // Work with the local numbers
/// });
/// }
/// }
/// ```
#[unsafe_no_drop_flag]
#[stable(feature = "rust1", since = "1.0.0")]
pub struct Arc<T> {
// FIXME #12808: strange name to try to avoid interfering with
// field accesses of the contained type via Deref
_ptr: NonZero<*mut ArcInner<T>>,
}
unsafe impl<T: Sync + Send> Send for Arc<T> { }
unsafe impl<T: Sync + Send> Sync for Arc<T> { }
/// A weak pointer to an `Arc`.
///
/// Weak pointers will not keep the data inside of the `Arc` alive, and can be
/// used to break cycles between `Arc` pointers.
#[unsafe_no_drop_flag]
#[unstable(feature = "alloc",
reason = "Weak pointers may not belong in this module.")]
pub struct Weak<T> {
// FIXME #12808: strange name to try to avoid interfering with
// field accesses of the contained type via Deref
_ptr: NonZero<*mut ArcInner<T>>,
}
unsafe impl<T: Sync + Send> Send for Weak<T> { }
unsafe impl<T: Sync + Send> Sync for Weak<T> { }
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: fmt::Debug> fmt::Debug for Weak<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "(Weak)")
}
}
struct ArcInner<T> {
strong: atomic::AtomicUsize,
weak: atomic::AtomicUsize,
data: T,
}
unsafe impl<T: Sync + Send> Send for ArcInner<T> {}
unsafe impl<T: Sync + Send> Sync for ArcInner<T> {}
impl<T> Arc<T> {
/// Constructs a new `Arc<T>`.
///
/// # Examples
///
/// ```
/// use std::sync::Arc;
///
/// let five = Arc::new(5);
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn new(data: T) -> Arc<T> {
// Start the weak pointer count as 1 which is the weak pointer that's
// held by all the strong pointers (kinda), see std/rc.rs for more info
let x: Box<_> = box ArcInner {
strong: atomic::AtomicUsize::new(1),
weak: atomic::AtomicUsize::new(1),
data: data,
};
Arc { _ptr: unsafe { NonZero::new(mem::transmute(x)) } }
}
/// Downgrades the `Arc<T>` to a `Weak<T>` reference.
///
/// # Examples
///
/// ```
/// # #![feature(alloc)]
/// use std::sync::Arc;
///
/// let five = Arc::new(5);
///
/// let weak_five = five.downgrade();
/// ```
#[unstable(feature = "alloc",
reason = "Weak pointers may not belong in this module.")]
pub fn downgrade(&self) -> Weak<T> {
// See the clone() impl for why this is relaxed
self.inner().weak.fetch_add(1, Relaxed);
Weak { _ptr: self._ptr }
}
}
impl<T> Arc<T> {
#[inline]
fn inner(&self) -> &ArcInner<T> {
// This unsafety is ok because while this arc is alive we're guaranteed
// that the inner pointer is valid. Furthermore, we know that the
// `ArcInner` structure itself is `Sync` because the inner data is
// `Sync` as well, so we're ok loaning out an immutable pointer to these
// contents.
unsafe { &**self._ptr }
}
// Non-inlined part of `drop`.
#[inline(never)]
unsafe fn drop_slow(&mut self) {
let ptr = *self._ptr;
// Destroy the data at this time, even though we may not free the box
// allocation itself (there may still be weak pointers lying around).
drop(ptr::read(&self.inner().data));
if self.inner().weak.fetch_sub(1, Release) == 1 {
atomic::fence(Acquire);
deallocate(ptr as *mut u8, size_of::<ArcInner<T>>(), min_align_of::<ArcInner<T>>())
}
}
}
/// Get the number of weak references to this value.
#[inline]
#[unstable(feature = "alloc")]
pub fn weak_count<T>(this: &Arc<T>) -> usize { this.inner().weak.load(SeqCst) - 1 }
/// Get the number of strong references to this value.
#[inline]
#[unstable(feature = "alloc")]
pub fn strong_count<T>(this: &Arc<T>) -> usize { this.inner().strong.load(SeqCst) }
/// Returns a mutable reference to the contained value if the `Arc<T>` is unique.
///
/// Returns `None` if the `Arc<T>` is not unique.
///
/// # Examples
///
/// ```
/// # #![feature(alloc)]
/// extern crate alloc;
/// # fn main() {
/// use alloc::arc::{Arc, get_mut};
///
/// let mut x = Arc::new(3);
/// *get_mut(&mut x).unwrap() = 4;
/// assert_eq!(*x, 4);
///
/// let _y = x.clone();
/// assert!(get_mut(&mut x).is_none());
/// # }
/// ```
#[inline]
#[unstable(feature = "alloc")]
pub fn get_mut<T>(this: &mut Arc<T>) -> Option<&mut T> {
if strong_count(this) == 1 && weak_count(this) == 0 {
// This unsafety is ok because we're guaranteed that the pointer
// returned is the *only* pointer that will ever be returned to T. Our
// reference count is guaranteed to be 1 at this point, and we required
// the Arc itself to be `mut`, so we're returning the only possible
// reference to the inner data.
let inner = unsafe { &mut **this._ptr };
Some(&mut inner.data)
} else {
None
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T> Clone for Arc<T> {
/// Makes a clone of the `Arc<T>`.
///
/// This increases the strong reference count.
///
/// # Examples
///
/// ```
/// # #![feature(alloc)]
/// use std::sync::Arc;
///
/// let five = Arc::new(5);
///
/// five.clone();
/// ```
#[inline]
fn clone(&self) -> Arc<T> {
// Using a relaxed ordering is alright here, as knowledge of the
// original reference prevents other threads from erroneously deleting
// the object.
//
// As explained in the [Boost documentation][1], Increasing the
// reference counter can always be done with memory_order_relaxed: New
// references to an object can only be formed from an existing
// reference, and passing an existing reference from one thread to
// another must already provide any required synchronization.
//
// [1]: (www.boost.org/doc/libs/1_55_0/doc/html/atomic/usage_examples.html)
self.inner().strong.fetch_add(1, Relaxed);
Arc { _ptr: self._ptr }
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T> Deref for Arc<T> {
type Target = T;
#[inline]
fn deref(&self) -> &T {
&self.inner().data
}
}
impl<T: Clone> Arc<T> {
/// Make a mutable reference from the given `Arc<T>`.
///
/// This is also referred to as a copy-on-write operation because the inner
/// data is cloned if the reference count is greater than one.
///
/// # Examples
///
/// ```
/// # #![feature(alloc)]
/// use std::sync::Arc;
///
/// let mut five = Arc::new(5);
///
/// let mut_five = five.make_unique();
/// ```
#[inline]
#[unstable(feature = "alloc")]
pub fn make_unique(&mut self) -> &mut T {
// Note that we hold a strong reference, which also counts as a weak
// reference, so we only clone if there is an additional reference of
// either kind.
if self.inner().strong.load(SeqCst) != 1 ||
self.inner().weak.load(SeqCst) != 1 {
*self = Arc::new((**self).clone())
}
// As with `get_mut()`, the unsafety is ok because our reference was
// either unique to begin with, or became one upon cloning the contents.
let inner = unsafe { &mut **self._ptr };
&mut inner.data
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T> Drop for Arc<T> {
/// Drops the `Arc<T>`.
///
/// This will decrement the strong reference count. If the strong reference
/// count becomes zero and the only other references are `Weak<T>` ones,
/// `drop`s the inner value.
///
/// # Examples
///
/// ```
/// # #![feature(alloc)]
/// use std::sync::Arc;
///
/// {
/// let five = Arc::new(5);
///
/// // stuff
///
/// drop(five); // explicit drop
/// }
/// {
/// let five = Arc::new(5);
///
/// // stuff
///
/// } // implicit drop
/// ```
#[inline]
fn drop(&mut self) {
// This structure has #[unsafe_no_drop_flag], so this drop glue may run
// more than once (but it is guaranteed to be zeroed after the first if
// it's run more than once)
let ptr = *self._ptr;
// if ptr.is_null() { return }
if ptr.is_null() || ptr as usize == mem::POST_DROP_USIZE { return }
// Because `fetch_sub` is already atomic, we do not need to synchronize
// with other threads unless we are going to delete the object. This
// same logic applies to the below `fetch_sub` to the `weak` count.
if self.inner().strong.fetch_sub(1, Release) != 1 { return }
// This fence is needed to prevent reordering of use of the data and
// deletion of the data. Because it is marked `Release`, the decreasing
// of the reference count synchronizes with this `Acquire` fence. This
// means that use of the data happens before decreasing the reference
// count, which happens before this fence, which happens before the
// deletion of the data.
//
// As explained in the [Boost documentation][1],
//
// > It is important to enforce any possible access to the object in one
// > thread (through an existing reference) to *happen before* deleting
// > the object in a different thread. This is achieved by a "release"
// > operation after dropping a reference (any access to the object
// > through this reference must obviously happened before), and an
// > "acquire" operation before deleting the object.
//
// [1]: (www.boost.org/doc/libs/1_55_0/doc/html/atomic/usage_examples.html)
atomic::fence(Acquire);
unsafe {
self.drop_slow()
}
}
}
#[unstable(feature = "alloc",
reason = "Weak pointers may not belong in this module.")]
impl<T> Weak<T> {
/// Upgrades a weak reference to a strong reference.
///
/// Upgrades the `Weak<T>` reference to an `Arc<T>`, if possible.
///
/// Returns `None` if there were no strong references and the data was
/// destroyed.
///
/// # Examples
///
/// ```
/// # #![feature(alloc)]
/// use std::sync::Arc;
///
/// let five = Arc::new(5);
///
/// let weak_five = five.downgrade();
///
/// let strong_five: Option<Arc<_>> = weak_five.upgrade();
/// ```
pub fn upgrade(&self) -> Option<Arc<T>> {
// We use a CAS loop to increment the strong count instead of a
// fetch_add because once the count hits 0 it must never be above 0.
let inner = self.inner();
loop {
let n = inner.strong.load(SeqCst);
if n == 0 { return None }
let old = inner.strong.compare_and_swap(n, n + 1, SeqCst);
if old == n { return Some(Arc { _ptr: self._ptr }) }
}
}
#[inline]
fn inner(&self) -> &ArcInner<T> {
// See comments above for why this is "safe"
unsafe { &**self._ptr }
}
}
#[unstable(feature = "alloc",
reason = "Weak pointers may not belong in this module.")]
impl<T> Clone for Weak<T> {
/// Makes a clone of the `Weak<T>`.
///
/// This increases the weak reference count.
///
/// # Examples
///
/// ```
/// # #![feature(alloc)]
/// use std::sync::Arc;
///
/// let weak_five = Arc::new(5).downgrade();
///
/// weak_five.clone();
/// ```
#[inline]
fn clone(&self) -> Weak<T> {
// See comments in Arc::clone() for why this is relaxed
self.inner().weak.fetch_add(1, Relaxed);
Weak { _ptr: self._ptr }
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T> Drop for Weak<T> {
/// Drops the `Weak<T>`.
///
/// This will decrement the weak reference count.
///
/// # Examples
///
/// ```
/// # #![feature(alloc)]
/// use std::sync::Arc;
///
/// {
/// let five = Arc::new(5);
/// let weak_five = five.downgrade();
///
/// // stuff
///
/// drop(weak_five); // explicit drop
/// }
/// {
/// let five = Arc::new(5);
/// let weak_five = five.downgrade();
///
/// // stuff
///
/// } // implicit drop
/// ```
fn drop(&mut self) {
let ptr = *self._ptr;
// see comments above for why this check is here
if ptr.is_null() || ptr as usize == mem::POST_DROP_USIZE { return }
// If we find out that we were the last weak pointer, then its time to
// deallocate the data entirely. See the discussion in Arc::drop() about
// the memory orderings
if self.inner().weak.fetch_sub(1, Release) == 1 {
atomic::fence(Acquire);
unsafe { deallocate(ptr as *mut u8, size_of::<ArcInner<T>>(),
min_align_of::<ArcInner<T>>()) }
}
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: PartialEq> PartialEq for Arc<T> {
/// Equality for two `Arc<T>`s.
///
/// Two `Arc<T>`s are equal if their inner value are equal.
///
/// # Examples
///
/// ```
/// use std::sync::Arc;
///
/// let five = Arc::new(5);
///
/// five == Arc::new(5);
/// ```
fn eq(&self, other: &Arc<T>) -> bool { *(*self) == *(*other) }
/// Inequality for two `Arc<T>`s.
///
/// Two `Arc<T>`s are unequal if their inner value are unequal.
///
/// # Examples
///
/// ```
/// use std::sync::Arc;
///
/// let five = Arc::new(5);
///
/// five != Arc::new(5);
/// ```
fn ne(&self, other: &Arc<T>) -> bool { *(*self) != *(*other) }
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: PartialOrd> PartialOrd for Arc<T> {
/// Partial comparison for two `Arc<T>`s.
///
/// The two are compared by calling `partial_cmp()` on their inner values.
///
/// # Examples
///
/// ```
/// use std::sync::Arc;
///
/// let five = Arc::new(5);
///
/// five.partial_cmp(&Arc::new(5));
/// ```
fn partial_cmp(&self, other: &Arc<T>) -> Option<Ordering> {
(**self).partial_cmp(&**other)
}
/// Less-than comparison for two `Arc<T>`s.
///
/// The two are compared by calling `<` on their inner values.
///
/// # Examples
///
/// ```
/// use std::sync::Arc;
///
/// let five = Arc::new(5);
///
/// five < Arc::new(5);
/// ```
fn lt(&self, other: &Arc<T>) -> bool { *(*self) < *(*other) }
/// 'Less-than or equal to' comparison for two `Arc<T>`s.
///
/// The two are compared by calling `<=` on their inner values.
///
/// # Examples
///
/// ```
/// use std::sync::Arc;
///
/// let five = Arc::new(5);
///
/// five <= Arc::new(5);
/// ```
fn le(&self, other: &Arc<T>) -> bool { *(*self) <= *(*other) }
/// Greater-than comparison for two `Arc<T>`s.
///
/// The two are compared by calling `>` on their inner values.
///
/// # Examples
///
/// ```
/// use std::sync::Arc;
///
/// let five = Arc::new(5);
///
/// five > Arc::new(5);
/// ```
fn gt(&self, other: &Arc<T>) -> bool { *(*self) > *(*other) }
/// 'Greater-than or equal to' comparison for two `Arc<T>`s.
///
/// The two are compared by calling `>=` on their inner values.
///
/// # Examples
///
/// ```
/// use std::sync::Arc;
///
/// let five = Arc::new(5);
///
/// five >= Arc::new(5);
/// ```
fn ge(&self, other: &Arc<T>) -> bool { *(*self) >= *(*other) }
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: Ord> Ord for Arc<T> {
fn cmp(&self, other: &Arc<T>) -> Ordering { (**self).cmp(&**other) }
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: Eq> Eq for Arc<T> {}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: fmt::Display> fmt::Display for Arc<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Display::fmt(&**self, f)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: fmt::Debug> fmt::Debug for Arc<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Debug::fmt(&**self, f)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T> fmt::Pointer for Arc<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Pointer::fmt(&*self._ptr, f)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: Default> Default for Arc<T> {
#[stable(feature = "rust1", since = "1.0.0")]
fn default() -> Arc<T> { Arc::new(Default::default()) }
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: Hash> Hash for Arc<T> {
fn hash<H: Hasher>(&self, state: &mut H) {
(**self).hash(state)
}
}

View file

@ -57,16 +57,12 @@
use core::cmp::Ordering;
use core::fmt;
use core::hash::{self, Hash};
use core::marker::Unsize;
use core::mem;
use core::ops::{Deref, DerefMut};
use core::ops::{CoerceUnsized, Deref, DerefMut};
use core::ptr::{Unique};
use core::raw::{TraitObject};
#[cfg(not(stage0))]
use core::marker::Unsize;
#[cfg(not(stage0))]
use core::ops::CoerceUnsized;
/// A value that represents the heap. This is the default place that the `box`
/// keyword allocates into when no place is supplied.
///
@ -392,5 +388,4 @@ extern "rust-call" fn call_once(self, args: A) -> R {
}
}
#[cfg(not(stage0))]
impl<T: ?Sized+Unsize<U>, U: ?Sized> CoerceUnsized<Box<U>> for Box<T> {}

View file

@ -112,14 +112,7 @@
mod boxed { pub use std::boxed::{Box, HEAP}; }
#[cfg(test)]
mod boxed_test;
#[cfg(not(stage0))]
pub mod arc;
#[cfg(stage0)]
mod arc_stage0;
#[cfg(stage0)]
pub mod arc {
pub use arc_stage0::*;
}
pub mod rc;
/// Common out-of-memory routine

View file

@ -159,36 +159,19 @@
use core::default::Default;
use core::fmt;
use core::hash::{Hasher, Hash};
use core::marker::{self, Sized};
use core::mem::{self, min_align_of, size_of, forget};
use core::intrinsics::{assume, drop_in_place};
use core::marker::{self, Sized, Unsize};
use core::mem::{self, min_align_of, size_of, min_align_of_val, size_of_val, forget};
use core::nonzero::NonZero;
use core::ops::{Deref, Drop};
use core::ops::{CoerceUnsized, Deref, Drop};
use core::option::Option;
use core::option::Option::{Some, None};
use core::ptr;
use core::result::Result;
use core::result::Result::{Ok, Err};
use core::intrinsics::assume;
#[cfg(not(stage0))]
use core::intrinsics::drop_in_place;
#[cfg(not(stage0))]
use core::marker::Unsize;
#[cfg(not(stage0))]
use core::mem::{min_align_of_val, size_of_val};
#[cfg(not(stage0))]
use core::ops::CoerceUnsized;
use heap::deallocate;
#[cfg(stage0)]
struct RcBox<T> {
strong: Cell<usize>,
weak: Cell<usize>,
value: T,
}
#[cfg(not(stage0))]
struct RcBox<T: ?Sized> {
strong: Cell<usize>,
weak: Cell<usize>,
@ -199,15 +182,6 @@ struct RcBox<T: ?Sized> {
/// A reference-counted pointer type over an immutable value.
///
/// See the [module level documentation](./index.html) for more details.
#[cfg(stage0)]
#[unsafe_no_drop_flag]
#[stable(feature = "rust1", since = "1.0.0")]
pub struct Rc<T> {
// FIXME #12808: strange names to try to avoid interfering with field
// accesses of the contained type via Deref
_ptr: NonZero<*mut RcBox<T>>,
}
#[cfg(not(stage0))]
#[unsafe_no_drop_flag]
#[stable(feature = "rust1", since = "1.0.0")]
pub struct Rc<T: ?Sized> {
@ -216,19 +190,9 @@ pub struct Rc<T: ?Sized> {
_ptr: NonZero<*mut RcBox<T>>,
}
#[cfg(stage0)]
impl<T> !marker::Send for Rc<T> {}
#[cfg(not(stage0))]
impl<T: ?Sized> !marker::Send for Rc<T> {}
#[cfg(stage0)]
impl<T> !marker::Sync for Rc<T> {}
#[cfg(not(stage0))]
impl<T: ?Sized> !marker::Sync for Rc<T> {}
#[cfg(not(stage0))]
impl<T: ?Sized+Unsize<U>, U: ?Sized> CoerceUnsized<Rc<U>> for Rc<T> {}
impl<T> Rc<T> {
@ -259,7 +223,6 @@ pub fn new(value: T) -> Rc<T> {
}
}
#[cfg(not(stage0))]
impl<T: ?Sized> Rc<T> {
/// Downgrades the `Rc<T>` to a `Weak<T>` reference.
///
@ -281,44 +244,12 @@ pub fn downgrade(&self) -> Weak<T> {
}
}
#[cfg(stage0)]
impl<T> Rc<T> {
/// Downgrades the `Rc<T>` to a `Weak<T>` reference.
///
/// # Examples
///
/// ```
/// # #![feature(alloc)]
/// use std::rc::Rc;
///
/// let five = Rc::new(5);
///
/// let weak_five = five.downgrade();
/// ```
#[unstable(feature = "alloc",
reason = "Weak pointers may not belong in this module")]
pub fn downgrade(&self) -> Weak<T> {
self.inc_weak();
Weak { _ptr: self._ptr }
}
}
/// Get the number of weak references to this value.
#[cfg(stage0)]
#[inline]
#[unstable(feature = "alloc")]
pub fn weak_count<T>(this: &Rc<T>) -> usize { this.weak() - 1 }
#[cfg(not(stage0))]
#[inline]
#[unstable(feature = "alloc")]
pub fn weak_count<T: ?Sized>(this: &Rc<T>) -> usize { this.weak() - 1 }
/// Get the number of strong references to this value.
#[cfg(stage0)]
#[inline]
#[unstable(feature = "alloc")]
pub fn strong_count<T>(this: &Rc<T>) -> usize { this.strong() }
#[cfg(not(stage0))]
#[inline]
#[unstable(feature = "alloc")]
pub fn strong_count<T: ?Sized>(this: &Rc<T>) -> usize { this.strong() }
@ -438,17 +369,6 @@ pub fn make_unique(&mut self) -> &mut T {
}
}
#[cfg(stage0)]
#[stable(feature = "rust1", since = "1.0.0")]
impl<T> Deref for Rc<T> {
type Target = T;
#[inline(always)]
fn deref(&self) -> &T {
&self.inner().value
}
}
#[cfg(not(stage0))]
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: ?Sized> Deref for Rc<T> {
type Target = T;
@ -459,58 +379,6 @@ fn deref(&self) -> &T {
}
}
#[cfg(stage0)]
#[stable(feature = "rust1", since = "1.0.0")]
impl<T> Drop for Rc<T> {
/// Drops the `Rc<T>`.
///
/// This will decrement the strong reference count. If the strong reference
/// count becomes zero and the only other references are `Weak<T>` ones,
/// `drop`s the inner value.
///
/// # Examples
///
/// ```
/// # #![feature(alloc)]
/// use std::rc::Rc;
///
/// {
/// let five = Rc::new(5);
///
/// // stuff
///
/// drop(five); // explicit drop
/// }
/// {
/// let five = Rc::new(5);
///
/// // stuff
///
/// } // implicit drop
/// ```
fn drop(&mut self) {
unsafe {
let ptr = *self._ptr;
if !ptr.is_null() && ptr as usize != mem::POST_DROP_USIZE {
self.dec_strong();
if self.strong() == 0 {
ptr::read(&**self); // destroy the contained object
// remove the implicit "strong weak" pointer now that we've
// destroyed the contents.
self.dec_weak();
if self.weak() == 0 {
deallocate(ptr as *mut u8, size_of::<RcBox<T>>(),
min_align_of::<RcBox<T>>())
}
}
}
}
}
}
#[cfg(not(stage0))]
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: ?Sized> Drop for Rc<T> {
/// Drops the `Rc<T>`.
@ -564,32 +432,6 @@ fn drop(&mut self) {
}
}
#[cfg(stage0)]
#[stable(feature = "rust1", since = "1.0.0")]
impl<T> Clone for Rc<T> {
/// Makes a clone of the `Rc<T>`.
///
/// When you clone an `Rc<T>`, it will create another pointer to the data and
/// increase the strong reference counter.
///
/// # Examples
///
/// ```
/// # #![feature(alloc)]
/// use std::rc::Rc;
///
/// let five = Rc::new(5);
///
/// five.clone();
/// ```
#[inline]
fn clone(&self) -> Rc<T> {
self.inc_strong();
Rc { _ptr: self._ptr }
}
}
#[cfg(not(stage0))]
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: ?Sized> Clone for Rc<T> {
@ -634,17 +476,6 @@ fn default() -> Rc<T> {
}
#[stable(feature = "rust1", since = "1.0.0")]
#[cfg(stage0)]
impl<T: PartialEq> PartialEq for Rc<T> {
#[inline(always)]
fn eq(&self, other: &Rc<T>) -> bool { **self == **other }
#[inline(always)]
fn ne(&self, other: &Rc<T>) -> bool { **self != **other }
}
#[stable(feature = "rust1", since = "1.0.0")]
#[cfg(not(stage0))]
impl<T: ?Sized + PartialEq> PartialEq for Rc<T> {
/// Equality for two `Rc<T>`s.
///
@ -680,34 +511,9 @@ fn ne(&self, other: &Rc<T>) -> bool { **self != **other }
}
#[stable(feature = "rust1", since = "1.0.0")]
#[cfg(stage0)]
impl<T: Eq> Eq for Rc<T> {}
#[stable(feature = "rust1", since = "1.0.0")]
#[cfg(not(stage0))]
impl<T: ?Sized + Eq> Eq for Rc<T> {}
#[stable(feature = "rust1", since = "1.0.0")]
#[cfg(stage0)]
impl<T: PartialOrd> PartialOrd for Rc<T> {
#[inline(always)]
fn partial_cmp(&self, other: &Rc<T>) -> Option<Ordering> {
(**self).partial_cmp(&**other)
}
#[inline(always)]
fn lt(&self, other: &Rc<T>) -> bool { **self < **other }
#[inline(always)]
fn le(&self, other: &Rc<T>) -> bool { **self <= **other }
#[inline(always)]
fn gt(&self, other: &Rc<T>) -> bool { **self > **other }
#[inline(always)]
fn ge(&self, other: &Rc<T>) -> bool { **self >= **other }
}
#[stable(feature = "rust1", since = "1.0.0")]
#[cfg(not(stage0))]
impl<T: ?Sized + PartialOrd> PartialOrd for Rc<T> {
/// Partial comparison for two `Rc<T>`s.
///
@ -793,13 +599,6 @@ fn ge(&self, other: &Rc<T>) -> bool { **self >= **other }
}
#[stable(feature = "rust1", since = "1.0.0")]
#[cfg(stage0)]
impl<T: Ord> Ord for Rc<T> {
#[inline]
fn cmp(&self, other: &Rc<T>) -> Ordering { (**self).cmp(&**other) }
}
#[stable(feature = "rust1", since = "1.0.0")]
#[cfg(not(stage0))]
impl<T: ?Sized + Ord> Ord for Rc<T> {
/// Comparison for two `Rc<T>`s.
///
@ -818,14 +617,6 @@ impl<T: ?Sized + Ord> Ord for Rc<T> {
fn cmp(&self, other: &Rc<T>) -> Ordering { (**self).cmp(&**other) }
}
#[cfg(stage0)]
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: Hash> Hash for Rc<T> {
fn hash<H: Hasher>(&self, state: &mut H) {
(**self).hash(state);
}
}
#[cfg(not(stage0))]
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: ?Sized+Hash> Hash for Rc<T> {
fn hash<H: Hasher>(&self, state: &mut H) {
@ -833,14 +624,6 @@ fn hash<H: Hasher>(&self, state: &mut H) {
}
}
#[cfg(stage0)]
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: fmt::Display> fmt::Display for Rc<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Display::fmt(&**self, f)
}
}
#[cfg(not(stage0))]
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: ?Sized+fmt::Display> fmt::Display for Rc<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
@ -848,14 +631,6 @@ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
}
}
#[cfg(stage0)]
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: fmt::Debug> fmt::Debug for Rc<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Debug::fmt(&**self, f)
}
}
#[cfg(not(stage0))]
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: ?Sized+fmt::Debug> fmt::Debug for Rc<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
@ -876,16 +651,6 @@ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
/// dropped.
///
/// See the [module level documentation](./index.html) for more.
#[cfg(stage0)]
#[unsafe_no_drop_flag]
#[unstable(feature = "alloc",
reason = "Weak pointers may not belong in this module.")]
pub struct Weak<T> {
// FIXME #12808: strange names to try to avoid interfering with
// field accesses of the contained type via Deref
_ptr: NonZero<*mut RcBox<T>>,
}
#[cfg(not(stage0))]
#[unsafe_no_drop_flag]
#[unstable(feature = "alloc",
reason = "Weak pointers may not belong in this module.")]
@ -895,51 +660,9 @@ pub struct Weak<T: ?Sized> {
_ptr: NonZero<*mut RcBox<T>>,
}
#[cfg(stage0)]
impl<T> !marker::Send for Weak<T> {}
#[cfg(not(stage0))]
impl<T: ?Sized> !marker::Send for Weak<T> {}
#[cfg(stage0)]
impl<T> !marker::Sync for Weak<T> {}
#[cfg(not(stage0))]
impl<T: ?Sized> !marker::Sync for Weak<T> {}
#[cfg(stage0)]
#[unstable(feature = "alloc",
reason = "Weak pointers may not belong in this module.")]
impl<T> Weak<T> {
/// Upgrades a weak reference to a strong reference.
///
/// Upgrades the `Weak<T>` reference to an `Rc<T>`, if possible.
///
/// Returns `None` if there were no strong references and the data was
/// destroyed.
///
/// # Examples
///
/// ```
/// # #![feature(alloc)]
/// use std::rc::Rc;
///
/// let five = Rc::new(5);
///
/// let weak_five = five.downgrade();
///
/// let strong_five: Option<Rc<_>> = weak_five.upgrade();
/// ```
pub fn upgrade(&self) -> Option<Rc<T>> {
if self.strong() == 0 {
None
} else {
self.inc_strong();
Some(Rc { _ptr: self._ptr })
}
}
}
#[cfg(not(stage0))]
#[unstable(feature = "alloc",
reason = "Weak pointers may not belong in this module.")]
impl<T: ?Sized> Weak<T> {
@ -973,52 +696,6 @@ pub fn upgrade(&self) -> Option<Rc<T>> {
}
}
#[cfg(stage0)]
#[stable(feature = "rust1", since = "1.0.0")]
impl<T> Drop for Weak<T> {
/// Drops the `Weak<T>`.
///
/// This will decrement the weak reference count.
///
/// # Examples
///
/// ```
/// # #![feature(alloc)]
/// use std::rc::Rc;
///
/// {
/// let five = Rc::new(5);
/// let weak_five = five.downgrade();
///
/// // stuff
///
/// drop(weak_five); // explicit drop
/// }
/// {
/// let five = Rc::new(5);
/// let weak_five = five.downgrade();
///
/// // stuff
///
/// } // implicit drop
/// ```
fn drop(&mut self) {
unsafe {
let ptr = *self._ptr;
if !ptr.is_null() && ptr as usize != mem::POST_DROP_USIZE {
self.dec_weak();
// the weak count starts at 1, and will only go to zero if all
// the strong pointers have disappeared.
if self.weak() == 0 {
deallocate(ptr as *mut u8, size_of::<RcBox<T>>(),
min_align_of::<RcBox<T>>())
}
}
}
}
}
#[cfg(not(stage0))]
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: ?Sized> Drop for Weak<T> {
/// Drops the `Weak<T>`.
@ -1064,32 +741,6 @@ fn drop(&mut self) {
}
}
#[cfg(stage0)]
#[unstable(feature = "alloc",
reason = "Weak pointers may not belong in this module.")]
impl<T> Clone for Weak<T> {
/// Makes a clone of the `Weak<T>`.
///
/// This increases the weak reference count.
///
/// # Examples
///
/// ```
/// # #![feature(alloc)]
/// use std::rc::Rc;
///
/// let weak_five = Rc::new(5).downgrade();
///
/// weak_five.clone();
/// ```
#[inline]
fn clone(&self) -> Weak<T> {
self.inc_weak();
Weak { _ptr: self._ptr }
}
}
#[cfg(not(stage0))]
#[unstable(feature = "alloc",
reason = "Weak pointers may not belong in this module.")]
impl<T: ?Sized> Clone for Weak<T> {
@ -1115,14 +766,6 @@ fn clone(&self) -> Weak<T> {
}
}
#[cfg(stage0)]
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: fmt::Debug> fmt::Debug for Weak<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "(Weak)")
}
}
#[cfg(not(stage0))]
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: ?Sized+fmt::Debug> fmt::Debug for Weak<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
@ -1130,30 +773,6 @@ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
}
}
#[cfg(stage0)]
#[doc(hidden)]
trait RcBoxPtr<T> {
fn inner(&self) -> &RcBox<T>;
#[inline]
fn strong(&self) -> usize { self.inner().strong.get() }
#[inline]
fn inc_strong(&self) { self.inner().strong.set(self.strong() + 1); }
#[inline]
fn dec_strong(&self) { self.inner().strong.set(self.strong() - 1); }
#[inline]
fn weak(&self) -> usize { self.inner().weak.get() }
#[inline]
fn inc_weak(&self) { self.inner().weak.set(self.weak() + 1); }
#[inline]
fn dec_weak(&self) { self.inner().weak.set(self.weak() - 1); }
}
#[cfg(not(stage0))]
#[doc(hidden)]
trait RcBoxPtr<T: ?Sized> {
fn inner(&self) -> &RcBox<T>;
@ -1177,21 +796,6 @@ fn weak(&self) -> usize { self.inner().weak.get() }
fn dec_weak(&self) { self.inner().weak.set(self.weak() - 1); }
}
#[cfg(stage0)]
impl<T> RcBoxPtr<T> for Rc<T> {
#[inline(always)]
fn inner(&self) -> &RcBox<T> {
unsafe {
// Safe to assume this here, as if it weren't true, we'd be breaking
// the contract anyway.
// This allows the null check to be elided in the destructor if we
// manipulated the reference count in the same function.
assume(!(*(&self._ptr as *const _ as *const *const ())).is_null());
&(**self._ptr)
}
}
}
#[cfg(not(stage0))]
impl<T: ?Sized> RcBoxPtr<T> for Rc<T> {
#[inline(always)]
fn inner(&self) -> &RcBox<T> {
@ -1206,21 +810,6 @@ fn inner(&self) -> &RcBox<T> {
}
}
#[cfg(stage0)]
impl<T> RcBoxPtr<T> for Weak<T> {
#[inline(always)]
fn inner(&self) -> &RcBox<T> {
unsafe {
// Safe to assume this here, as if it weren't true, we'd be breaking
// the contract anyway.
// This allows the null check to be elided in the destructor if we
// manipulated the reference count in the same function.
assume(!(*(&self._ptr as *const _ as *const *const ())).is_null());
&(**self._ptr)
}
}
}
#[cfg(not(stage0))]
impl<T: ?Sized> RcBoxPtr<T> for Weak<T> {
#[inline(always)]
fn inner(&self) -> &RcBox<T> {

View file

@ -116,17 +116,11 @@ impl<'a, T: ?Sized> BorrowMut<T> for &'a mut T {
fn borrow_mut(&mut self) -> &mut T { &mut **self }
}
#[cfg(stage0)]
impl<T> Borrow<T> for rc::Rc<T> {
fn borrow(&self) -> &T { &**self }
}
#[cfg(not(stage0))]
impl<T: ?Sized> Borrow<T> for rc::Rc<T> {
fn borrow(&self) -> &T { &**self }
}
impl<T> Borrow<T> for arc::Arc<T> {
impl<T: ?Sized> Borrow<T> for arc::Arc<T> {
fn borrow(&self) -> &T { &**self }
}

View file

@ -19,7 +19,6 @@
use core::prelude::*;
use core::cmp::Ordering::{Greater, Less, Equal};
#[cfg(not(stage0))]
use core::intrinsics::arith_offset;
use core::iter::Zip;
use core::marker::PhantomData;
@ -207,22 +206,6 @@ unsafe fn from_slice(slice: &[T]) -> RawItems<T> {
RawItems::from_parts(slice.as_ptr(), slice.len())
}
#[cfg(stage0)]
unsafe fn from_parts(ptr: *const T, len: usize) -> RawItems<T> {
if mem::size_of::<T>() == 0 {
RawItems {
head: ptr,
tail: (ptr as usize + len) as *const T,
}
} else {
RawItems {
head: ptr,
tail: ptr.offset(len as isize),
}
}
}
#[cfg(not(stage0))]
unsafe fn from_parts(ptr: *const T, len: usize) -> RawItems<T> {
if mem::size_of::<T>() == 0 {
RawItems {
@ -237,18 +220,6 @@ unsafe fn from_parts(ptr: *const T, len: usize) -> RawItems<T> {
}
}
#[cfg(stage0)]
unsafe fn push(&mut self, val: T) {
ptr::write(self.tail as *mut T, val);
if mem::size_of::<T>() == 0 {
self.tail = (self.tail as usize + 1) as *const T;
} else {
self.tail = self.tail.offset(1);
}
}
#[cfg(not(stage0))]
unsafe fn push(&mut self, val: T) {
ptr::write(self.tail as *mut T, val);
@ -263,26 +234,6 @@ unsafe fn push(&mut self, val: T) {
impl<T> Iterator for RawItems<T> {
type Item = T;
#[cfg(stage0)]
fn next(&mut self) -> Option<T> {
if self.head == self.tail {
None
} else {
unsafe {
let ret = Some(ptr::read(self.head));
if mem::size_of::<T>() == 0 {
self.head = (self.head as usize + 1) as *const T;
} else {
self.head = self.head.offset(1);
}
ret
}
}
}
#[cfg(not(stage0))]
fn next(&mut self) -> Option<T> {
if self.head == self.tail {
None
@ -303,24 +254,6 @@ fn next(&mut self) -> Option<T> {
}
impl<T> DoubleEndedIterator for RawItems<T> {
#[cfg(stage0)]
fn next_back(&mut self) -> Option<T> {
if self.head == self.tail {
None
} else {
unsafe {
if mem::size_of::<T>() == 0 {
self.tail = (self.tail as usize - 1) as *const T;
} else {
self.tail = self.tail.offset(-1);
}
Some(ptr::read(self.tail))
}
}
}
#[cfg(not(stage0))]
fn next_back(&mut self) -> Option<T> {
if self.head == self.tail {
None

View file

@ -66,9 +66,7 @@
use core::cmp::Ordering;
use core::fmt;
use core::hash::{self, Hash};
use core::intrinsics::assume;
#[cfg(not(stage0))]
use core::intrinsics::arith_offset;
use core::intrinsics::{arith_offset, assume};
use core::iter::{repeat, FromIterator};
use core::marker::PhantomData;
use core::mem;
@ -1526,25 +1524,6 @@ impl<T> IntoIterator for Vec<T> {
/// }
/// ```
#[inline]
#[cfg(stage0)]
fn into_iter(self) -> IntoIter<T> {
unsafe {
let ptr = *self.ptr;
assume(!ptr.is_null());
let cap = self.cap;
let begin = ptr as *const T;
let end = if mem::size_of::<T>() == 0 {
(ptr as usize + self.len()) as *const T
} else {
ptr.offset(self.len() as isize) as *const T
};
mem::forget(self);
IntoIter { allocation: ptr, cap: cap, ptr: begin, end: end }
}
}
#[inline]
#[cfg(not(stage0))]
fn into_iter(self) -> IntoIter<T> {
unsafe {
let ptr = *self.ptr;
@ -1764,32 +1743,6 @@ impl<T> Iterator for IntoIter<T> {
type Item = T;
#[inline]
#[cfg(stage0)]
fn next(&mut self) -> Option<T> {
unsafe {
if self.ptr == self.end {
None
} else {
if mem::size_of::<T>() == 0 {
// purposefully don't use 'ptr.offset' because for
// vectors with 0-size elements this would return the
// same pointer.
self.ptr = mem::transmute(self.ptr as usize + 1);
// Use a non-null pointer value
Some(ptr::read(EMPTY as *mut T))
} else {
let old = self.ptr;
self.ptr = self.ptr.offset(1);
Some(ptr::read(old))
}
}
}
}
#[inline]
#[cfg(not(stage0))]
fn next(&mut self) -> Option<T> {
unsafe {
if self.ptr == self.end {
@ -1830,29 +1783,6 @@ fn count(self) -> usize {
#[stable(feature = "rust1", since = "1.0.0")]
impl<T> DoubleEndedIterator for IntoIter<T> {
#[inline]
#[cfg(stage0)]
fn next_back(&mut self) -> Option<T> {
unsafe {
if self.end == self.ptr {
None
} else {
if mem::size_of::<T>() == 0 {
// See above for why 'ptr.offset' isn't used
self.end = mem::transmute(self.end as usize - 1);
// Use a non-null pointer value
Some(ptr::read(EMPTY as *mut T))
} else {
self.end = self.end.offset(-1);
Some(ptr::read(mem::transmute(self.end)))
}
}
}
}
#[inline]
#[cfg(not(stage0))]
fn next_back(&mut self) -> Option<T> {
unsafe {
if self.end == self.ptr {

View file

@ -145,13 +145,9 @@
/// but no instructions will be emitted for it. This is appropriate for operations
/// on the same thread that may be preempted, such as when interacting with signal
/// handlers.
#[cfg(not(stage0))] // SNAP 857ef6e
pub fn atomic_singlethreadfence();
#[cfg(not(stage0))] // SNAP 857ef6e
pub fn atomic_singlethreadfence_acq();
#[cfg(not(stage0))] // SNAP 857ef6e
pub fn atomic_singlethreadfence_rel();
#[cfg(not(stage0))] // SNAP 857ef6e
pub fn atomic_singlethreadfence_acqrel();
/// Aborts the execution of the process.
@ -193,11 +189,8 @@
pub fn min_align_of<T>() -> usize;
pub fn pref_align_of<T>() -> usize;
#[cfg(not(stage0))]
pub fn size_of_val<T: ?Sized>(_: &T) -> usize;
#[cfg(not(stage0))]
pub fn min_align_of_val<T: ?Sized>(_: &T) -> usize;
#[cfg(not(stage0))]
pub fn drop_in_place<T: ?Sized>(_: *mut T);
/// Gets a static string slice containing the name of a type.
@ -294,7 +287,6 @@
/// resulting pointer to point into or one byte past the end of an allocated
/// object, and it wraps with two's complement arithmetic. The resulting
/// value is not necessarily valid to be used to actually access memory.
#[cfg(not(stage0))]
pub fn arith_offset<T>(dst: *const T, offset: isize) -> *const T;
/// Copies `count * size_of<T>` bytes from `src` to `dst`. The source
@ -592,13 +584,6 @@ pub fn volatile_copy_nonoverlapping_memory<T>(dst: *mut T, src: *const T,
/// Returns (a * b) mod 2^N, where N is the width of N in bits.
pub fn overflowing_mul<T>(a: T, b: T) -> T;
/// Returns the value of the discriminant for the variant in 'v',
/// cast to a `u64`; if `T` has no discriminant, returns 0.
pub fn discriminant_value<T>(v: &T) -> u64;
}
#[cfg(not(stage0))]
extern "rust-intrinsic" {
/// Performs an unchecked signed division, which results in undefined behavior,
/// in cases where y == 0, or x == int::MIN and y == -1
pub fn unchecked_sdiv<T>(x: T, y: T) -> T;
@ -612,4 +597,8 @@ pub fn volatile_copy_nonoverlapping_memory<T>(dst: *mut T, src: *const T,
/// Returns the remainder of an unchecked signed division, which results in
/// undefined behavior, in cases where y == 0
pub fn unchecked_srem<T>(x: T, y: T) -> T;
/// Returns the value of the discriminant for the variant in 'v',
/// cast to a `u64`; if `T` has no discriminant, returns 0.
pub fn discriminant_value<T>(v: &T) -> u64;
}

View file

@ -55,7 +55,6 @@ pub trait Sized {
/// Types that can be "unsized" to a dynamically sized type.
#[unstable(feature = "core")]
#[cfg(not(stage0))]
#[lang="unsize"]
pub trait Unsize<T> {
// Empty.

View file

@ -95,29 +95,12 @@ pub fn size_of<T>() -> usize {
///
/// assert_eq!(4, mem::size_of_val(&5i32));
/// ```
#[cfg(not(stage0))]
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn size_of_val<T: ?Sized>(val: &T) -> usize {
unsafe { intrinsics::size_of_val(val) }
}
/// Returns the size of the type that `_val` points to in bytes.
///
/// # Examples
///
/// ```
/// use std::mem;
///
/// assert_eq!(4, mem::size_of_val(&5i32));
/// ```
#[cfg(stage0)]
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn size_of_val<T>(_val: &T) -> usize {
size_of::<T>()
}
/// Returns the ABI-required minimum alignment of a type
///
/// This is the alignment used for struct fields. It may be smaller than the preferred alignment.
@ -144,29 +127,12 @@ pub fn min_align_of<T>() -> usize {
///
/// assert_eq!(4, mem::min_align_of_val(&5i32));
/// ```
#[cfg(not(stage0))]
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn min_align_of_val<T: ?Sized>(val: &T) -> usize {
unsafe { intrinsics::min_align_of_val(val) }
}
/// Returns the ABI-required minimum alignment of the type of the value that `_val` points to
///
/// # Examples
///
/// ```
/// use std::mem;
///
/// assert_eq!(4, mem::min_align_of_val(&5i32));
/// ```
#[cfg(stage0)]
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn min_align_of_val<T>(_val: &T) -> usize {
min_align_of::<T>()
}
/// Returns the alignment in memory for a type.
///
/// This function will return the alignment, in bytes, of a type in memory. If the alignment

View file

@ -11,9 +11,7 @@
//! Exposes the NonZero lang item which provides optimization hints.
use marker::Sized;
use ops::Deref;
#[cfg(not(stage0))]
use ops::CoerceUnsized;
use ops::{CoerceUnsized, Deref};
/// Unsafe trait to indicate what types are usable with the NonZero struct
pub unsafe trait Zeroable {}
@ -57,5 +55,4 @@ fn deref<'a>(&'a self) -> &'a T {
}
}
#[cfg(not(stage0))]
impl<T: Zeroable+CoerceUnsized<U>, U: Zeroable> CoerceUnsized<NonZero<U>> for NonZero<T> {}

View file

@ -67,12 +67,9 @@
#![stable(feature = "rust1", since = "1.0.0")]
use marker::Sized;
use marker::{Sized, Unsize};
use fmt;
#[cfg(not(stage0))]
use marker::Unsize;
/// The `Drop` trait is used to run some code when a value goes out of scope. This
/// is sometimes called a 'destructor'.
///
@ -1214,39 +1211,29 @@ extern "rust-call" fn call_once(mut self, args: A) -> F::Output {
/// Trait that indicates that this is a pointer or a wrapper for one,
/// where unsizing can be performed on the pointee.
#[unstable(feature = "core")]
#[cfg(not(stage0))]
#[lang="coerce_unsized"]
pub trait CoerceUnsized<T> {
// Empty.
}
// &mut T -> &mut U
#[cfg(not(stage0))]
impl<'a, T: ?Sized+Unsize<U>, U: ?Sized> CoerceUnsized<&'a mut U> for &'a mut T {}
// &mut T -> &U
#[cfg(not(stage0))]
impl<'a, 'b: 'a, T: ?Sized+Unsize<U>, U: ?Sized> CoerceUnsized<&'a U> for &'b mut T {}
// &mut T -> *mut U
#[cfg(not(stage0))]
impl<'a, T: ?Sized+Unsize<U>, U: ?Sized> CoerceUnsized<*mut U> for &'a mut T {}
// &mut T -> *const U
#[cfg(not(stage0))]
impl<'a, T: ?Sized+Unsize<U>, U: ?Sized> CoerceUnsized<*const U> for &'a mut T {}
// &T -> &U
#[cfg(not(stage0))]
impl<'a, 'b: 'a, T: ?Sized+Unsize<U>, U: ?Sized> CoerceUnsized<&'a U> for &'b T {}
// &T -> *const U
#[cfg(not(stage0))]
impl<'a, T: ?Sized+Unsize<U>, U: ?Sized> CoerceUnsized<*const U> for &'a T {}
// *mut T -> *mut U
#[cfg(not(stage0))]
impl<T: ?Sized+Unsize<U>, U: ?Sized> CoerceUnsized<*mut U> for *mut T {}
// *mut T -> *const U
#[cfg(not(stage0))]
impl<T: ?Sized+Unsize<U>, U: ?Sized> CoerceUnsized<*const U> for *mut T {}
// *const T -> *const U
#[cfg(not(stage0))]
impl<T: ?Sized+Unsize<U>, U: ?Sized> CoerceUnsized<*const U> for *const T {}

View file

@ -125,19 +125,6 @@ fn rsplitn_mut<P>(&mut self, n: usize, pred: P) -> RSplitNMut<Self::Item, P>
}
// Use macros to be generic over const/mut
#[cfg(stage0)]
macro_rules! slice_offset {
($ptr:expr, $by:expr) => {{
let ptr = $ptr;
if size_from_ptr(ptr) == 0 {
transmute((ptr as isize).wrapping_add($by))
} else {
ptr.offset($by)
}
}};
}
#[cfg(not(stage0))]
macro_rules! slice_offset {
($ptr:expr, $by:expr) => {{
let ptr = $ptr;

View file

@ -167,7 +167,4 @@ mod rustc {
}
// Build the diagnostics array at the end so that the metadata includes error use sites.
#[cfg(stage0)]
__build_diagnostic_array! { DIAGNOSTICS }
#[cfg(not(stage0))]
__build_diagnostic_array! { librustc, DIAGNOSTICS }

View file

@ -47,7 +47,4 @@
pub mod graphviz;
#[cfg(stage0)]
__build_diagnostic_array! { DIAGNOSTICS }
#[cfg(not(stage0))]
__build_diagnostic_array! { librustc_borrowck, DIAGNOSTICS }

View file

@ -3723,7 +3723,4 @@ pub fn resolve_crate<'a, 'tcx>(session: &'a Session,
}
}
#[cfg(stage0)]
__build_diagnostic_array! { DIAGNOSTICS }
#[cfg(not(stage0))]
__build_diagnostic_array! { librustc_resolve, DIAGNOSTICS }

View file

@ -344,7 +344,4 @@ pub fn check_crate(tcx: &ty::ctxt, trait_map: ty::TraitMap) {
tcx.sess.abort_if_errors();
}
#[cfg(stage0)]
__build_diagnostic_array! { DIAGNOSTICS }
#[cfg(not(stage0))]
__build_diagnostic_array! { librustc_typeck, DIAGNOSTICS }