Auto merge of #40867 - alexcrichton:rollup, r=alexcrichton

Rollup of 19 pull requests

- Successful merges: #40317, #40516, #40524, #40606, #40683, #40751, #40778, #40813, #40818, #40819, #40824, #40828, #40832, #40833, #40837, #40849, #40852, #40853, #40865
- Failed merges:
This commit is contained in:
bors 2017-03-28 21:10:07 +00:00
commit 07a34293fa
93 changed files with 974 additions and 592 deletions

View file

@ -46,13 +46,13 @@ environment:
RUST_CONFIGURE_ARGS: --build=i686-pc-windows-gnu --enable-ninja
SCRIPT: python x.py test
MINGW_URL: https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror
MINGW_ARCHIVE: i686-6.3.0-release-win32-dwarf-rt_v5-rev1.7z
MINGW_ARCHIVE: i686-6.2.0-release-win32-dwarf-rt_v5-rev1.7z
MINGW_DIR: mingw32
- MSYS_BITS: 64
SCRIPT: python x.py test
RUST_CONFIGURE_ARGS: --build=x86_64-pc-windows-gnu --enable-ninja
MINGW_URL: https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror
MINGW_ARCHIVE: x86_64-6.3.0-release-win32-seh-rt_v5-rev1.7z
MINGW_ARCHIVE: x86_64-6.2.0-release-win32-seh-rt_v5-rev1.7z
MINGW_DIR: mingw64
# 32/64 bit MSVC and GNU deployment
@ -71,14 +71,14 @@ environment:
RUST_CONFIGURE_ARGS: --build=i686-pc-windows-gnu --enable-extended --enable-ninja
SCRIPT: python x.py dist
MINGW_URL: https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror
MINGW_ARCHIVE: i686-6.3.0-release-win32-dwarf-rt_v5-rev1.7z
MINGW_ARCHIVE: i686-6.2.0-release-win32-dwarf-rt_v5-rev1.7z
MINGW_DIR: mingw32
DEPLOY: 1
- MSYS_BITS: 64
SCRIPT: python x.py dist
RUST_CONFIGURE_ARGS: --build=x86_64-pc-windows-gnu --enable-extended --enable-ninja
MINGW_URL: https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror
MINGW_ARCHIVE: x86_64-6.3.0-release-win32-seh-rt_v5-rev1.7z
MINGW_ARCHIVE: x86_64-6.2.0-release-win32-seh-rt_v5-rev1.7z
MINGW_DIR: mingw64
DEPLOY: 1

2
cargo

@ -1 +1 @@
Subproject commit c995e9eb5acf3976ae8674a0dc6d9e958053d9fd
Subproject commit 4e95c6b41eca3388f54dd5f7787366ad2df637b5

View file

@ -40,6 +40,14 @@ fn main() {
.arg(sysroot)
.env(bootstrap::util::dylib_path_var(),
env::join_paths(&dylib_path).unwrap());
// Pass the `rustbuild` feature flag to crates which rustbuild is
// building. See the comment in bootstrap/lib.rs where this env var is
// set for more details.
if env::var_os("RUSTBUILD_UNSTABLE").is_some() {
cmd.arg("--cfg").arg("rustbuild");
}
std::process::exit(match cmd.status() {
Ok(s) => s.code().unwrap_or(1),
Err(e) => panic!("\n\nfailed to run {:?}: {}\n\n", cmd, e),

View file

@ -586,7 +586,7 @@ fn android_copy_libs(build: &Build, compiler: &Compiler, target: &str) {
.arg(ADB_TEST_DIR));
let target_dir = format!("{}/{}", ADB_TEST_DIR, target);
build.run(Command::new("adb").args(&["shell", "mkdir", &target_dir[..]]));
build.run(Command::new("adb").args(&["shell", "mkdir", &target_dir]));
for f in t!(build.sysroot_libdir(compiler, target).read_dir()) {
let f = t!(f);

View file

@ -196,7 +196,7 @@ fn parse_antlr_token(s: &str, tokens: &HashMap<String, token::Token>, surrogate_
let toknum = &s[content_end + 3 .. toknum_end];
let not_found = format!("didn't find token {:?} in the map", toknum);
let proto_tok = tokens.get(toknum).expect(&not_found[..]);
let proto_tok = tokens.get(toknum).expect(&not_found);
let nm = Symbol::intern(content);
@ -304,14 +304,14 @@ fn main() {
let mut token_file = File::open(&Path::new(&args.next().unwrap())).unwrap();
let mut token_list = String::new();
token_file.read_to_string(&mut token_list).unwrap();
let token_map = parse_token_list(&token_list[..]);
let token_map = parse_token_list(&token_list);
let stdin = std::io::stdin();
let lock = stdin.lock();
let lines = lock.lines();
let antlr_tokens = lines.map(|l| parse_antlr_token(l.unwrap().trim(),
&token_map,
&surrogate_pairs_pos[..],
&surrogate_pairs_pos,
has_bom));
for antlr_tok in antlr_tokens {

View file

@ -1376,7 +1376,7 @@ mod tests {
thread::spawn(move || {
check_links(&n);
let a: &[_] = &[&1, &2, &3];
assert_eq!(a, &n.iter().collect::<Vec<_>>()[..]);
assert_eq!(a, &*n.iter().collect::<Vec<_>>());
})
.join()
.ok()

View file

@ -196,7 +196,7 @@ pub trait Drop {
fn drop(&mut self);
}
/// The `Add` trait is used to specify the functionality of `+`.
/// The addition operator `+`.
///
/// # Examples
///
@ -269,7 +269,7 @@ macro_rules! add_impl {
add_impl! { usize u8 u16 u32 u64 u128 isize i8 i16 i32 i64 i128 f32 f64 }
/// The `Sub` trait is used to specify the functionality of `-`.
/// The subtraction operator `-`.
///
/// # Examples
///
@ -342,7 +342,7 @@ macro_rules! sub_impl {
sub_impl! { usize u8 u16 u32 u64 u128 isize i8 i16 i32 i64 i128 f32 f64 }
/// The `Mul` trait is used to specify the functionality of `*`.
/// The multiplication operator `*`.
///
/// # Examples
///
@ -464,7 +464,7 @@ macro_rules! mul_impl {
mul_impl! { usize u8 u16 u32 u64 u128 isize i8 i16 i32 i64 i128 f32 f64 }
/// The `Div` trait is used to specify the functionality of `/`.
/// The division operator `/`.
///
/// # Examples
///
@ -609,7 +609,7 @@ macro_rules! div_impl_float {
div_impl_float! { f32 f64 }
/// The `Rem` trait is used to specify the functionality of `%`.
/// The remainder operator `%`.
///
/// # Examples
///
@ -689,7 +689,7 @@ macro_rules! rem_impl_float {
rem_impl_float! { f32 f64 }
/// The `Neg` trait is used to specify the functionality of unary `-`.
/// The unary negation operator `-`.
///
/// # Examples
///
@ -768,7 +768,7 @@ macro_rules! neg_impl_unsigned {
// neg_impl_unsigned! { usize u8 u16 u32 u64 }
neg_impl_numeric! { isize i8 i16 i32 i64 i128 f32 f64 }
/// The `Not` trait is used to specify the functionality of unary `!`.
/// The unary logical negation operator `!`.
///
/// # Examples
///
@ -826,7 +826,7 @@ macro_rules! not_impl {
not_impl! { bool usize u8 u16 u32 u64 u128 isize i8 i16 i32 i64 i128 }
/// The `BitAnd` trait is used to specify the functionality of `&`.
/// The bitwise AND operator `&`.
///
/// # Examples
///
@ -909,7 +909,7 @@ macro_rules! bitand_impl {
bitand_impl! { bool usize u8 u16 u32 u64 u128 isize i8 i16 i32 i64 i128 }
/// The `BitOr` trait is used to specify the functionality of `|`.
/// The bitwise OR operator `|`.
///
/// # Examples
///
@ -992,7 +992,7 @@ macro_rules! bitor_impl {
bitor_impl! { bool usize u8 u16 u32 u64 u128 isize i8 i16 i32 i64 i128 }
/// The `BitXor` trait is used to specify the functionality of `^`.
/// The bitwise XOR operator `^`.
///
/// # Examples
///
@ -1078,7 +1078,7 @@ macro_rules! bitxor_impl {
bitxor_impl! { bool usize u8 u16 u32 u64 u128 isize i8 i16 i32 i64 i128 }
/// The `Shl` trait is used to specify the functionality of `<<`.
/// The left shift operator `<<`.
///
/// # Examples
///
@ -1181,7 +1181,7 @@ macro_rules! shl_impl_all {
shl_impl_all! { u8 u16 u32 u64 u128 usize i8 i16 i32 i64 isize i128 }
/// The `Shr` trait is used to specify the functionality of `>>`.
/// The right shift operator `>>`.
///
/// # Examples
///
@ -1284,7 +1284,7 @@ macro_rules! shr_impl_all {
shr_impl_all! { u8 u16 u32 u64 u128 usize i8 i16 i32 i64 i128 isize }
/// The `AddAssign` trait is used to specify the functionality of `+=`.
/// The addition assignment operator `+=`.
///
/// # Examples
///
@ -1340,7 +1340,7 @@ macro_rules! add_assign_impl {
add_assign_impl! { usize u8 u16 u32 u64 u128 isize i8 i16 i32 i64 i128 f32 f64 }
/// The `SubAssign` trait is used to specify the functionality of `-=`.
/// The subtraction assignment operator `-=`.
///
/// # Examples
///
@ -1396,7 +1396,7 @@ macro_rules! sub_assign_impl {
sub_assign_impl! { usize u8 u16 u32 u64 u128 isize i8 i16 i32 i64 i128 f32 f64 }
/// The `MulAssign` trait is used to specify the functionality of `*=`.
/// The multiplication assignment operator `*=`.
///
/// # Examples
///
@ -1441,7 +1441,7 @@ macro_rules! mul_assign_impl {
mul_assign_impl! { usize u8 u16 u32 u64 u128 isize i8 i16 i32 i64 i128 f32 f64 }
/// The `DivAssign` trait is used to specify the functionality of `/=`.
/// The division assignment operator `/=`.
///
/// # Examples
///
@ -1485,7 +1485,7 @@ macro_rules! div_assign_impl {
div_assign_impl! { usize u8 u16 u32 u64 u128 isize i8 i16 i32 i64 i128 f32 f64 }
/// The `RemAssign` trait is used to specify the functionality of `%=`.
/// The remainder assignment operator `%=`.
///
/// # Examples
///
@ -1529,7 +1529,7 @@ macro_rules! rem_assign_impl {
rem_assign_impl! { usize u8 u16 u32 u64 u128 isize i8 i16 i32 i64 i128 f32 f64 }
/// The `BitAndAssign` trait is used to specify the functionality of `&=`.
/// The bitwise AND assignment operator `&=`.
///
/// # Examples
///
@ -1615,7 +1615,7 @@ macro_rules! bitand_assign_impl {
bitand_assign_impl! { bool usize u8 u16 u32 u64 u128 isize i8 i16 i32 i64 i128 }
/// The `BitOrAssign` trait is used to specify the functionality of `|=`.
/// The bitwise OR assignment operator `|=`.
///
/// # Examples
///
@ -1659,7 +1659,7 @@ macro_rules! bitor_assign_impl {
bitor_assign_impl! { bool usize u8 u16 u32 u64 u128 isize i8 i16 i32 i64 i128 }
/// The `BitXorAssign` trait is used to specify the functionality of `^=`.
/// The bitwise XOR assignment operator `^=`.
///
/// # Examples
///
@ -1703,7 +1703,7 @@ macro_rules! bitxor_assign_impl {
bitxor_assign_impl! { bool usize u8 u16 u32 u64 u128 isize i8 i16 i32 i64 i128 }
/// The `ShlAssign` trait is used to specify the functionality of `<<=`.
/// The left shift assignment operator `<<=`.
///
/// # Examples
///
@ -1768,7 +1768,7 @@ macro_rules! shl_assign_impl_all {
shl_assign_impl_all! { u8 u16 u32 u64 u128 usize i8 i16 i32 i64 i128 isize }
/// The `ShrAssign` trait is used to specify the functionality of `>>=`.
/// The right shift assignment operator `>>=`.
///
/// # Examples
///

View file

@ -498,32 +498,40 @@ fn partition_equal<T, F>(v: &mut [T], pivot: usize, is_less: &mut F) -> usize
#[cold]
fn break_patterns<T>(v: &mut [T]) {
let len = v.len();
if len >= 8 {
// A random number will be taken modulo this one. The modulus is a power of two so that we
// can simply take bitwise "and", thus avoiding costly CPU operations.
let modulus = (len / 4).next_power_of_two();
debug_assert!(modulus >= 1 && modulus <= len / 2);
// Pseudorandom number generator from the "Xorshift RNGs" paper by George Marsaglia.
let mut random = len as u32;
let mut gen_u32 = || {
random ^= random << 13;
random ^= random >> 17;
random ^= random << 5;
random
};
let mut gen_usize = || {
if mem::size_of::<usize>() <= 4 {
gen_u32() as usize
} else {
(((gen_u32() as u64) << 32) | (gen_u32() as u64)) as usize
}
};
// Pseudorandom number generation from the "Xorshift RNGs" paper by George Marsaglia.
let mut random = len;
random ^= random << 13;
random ^= random >> 17;
random ^= random << 5;
random &= modulus - 1;
debug_assert!(random < len / 2);
// Take random numbers modulo this number.
// The number fits into `usize` because `len` is not greater than `isize::MAX`.
let modulus = len.next_power_of_two();
// The first index.
let a = len / 4 * 2;
debug_assert!(a >= 1 && a < len - 2);
// Some pivot candidates will be in the nearby of this index. Let's randomize them.
let pos = len / 4 * 2;
// The second index.
let b = len / 4 + random;
debug_assert!(b >= 1 && b < len - 2);
// Swap neighbourhoods of `a` and `b`.
for i in 0..3 {
v.swap(a - 1 + i, b - 1 + i);
// Generate a random number modulo `len`. However, in order to avoid costly operations
// we first take it modulo a power of two, and then decrease by `len` until it fits
// into the range `[0, len - 1]`.
let mut other = gen_usize() & (modulus - 1);
while other >= len {
other -= len;
}
v.swap(pos - 1 + i, other);
}
}
}

View file

@ -35,6 +35,39 @@ pub mod pattern;
/// [`from_str`]: #tymethod.from_str
/// [`str`]: ../../std/primitive.str.html
/// [`parse`]: ../../std/primitive.str.html#method.parse
///
/// # Examples
///
/// Basic implementation of `FromStr` on an example `Point` type:
///
/// ```
/// use std::str::FromStr;
/// use std::num::ParseIntError;
///
/// #[derive(Debug, PartialEq)]
/// struct Point {
/// x: i32,
/// y: i32
/// }
///
/// impl FromStr for Point {
/// type Err = ParseIntError;
///
/// fn from_str(s: &str) -> Result<Self, Self::Err> {
/// let coords: Vec<&str> = s.trim_matches(|p| p == '(' || p == ')' )
/// .split(",")
/// .collect();
///
/// let x_fromstr = coords[0].parse::<i32>()?;
/// let y_fromstr = coords[1].parse::<i32>()?;
///
/// Ok(Point { x: x_fromstr, y: y_fromstr })
/// }
/// }
///
/// let p = Point::from_str("(1,2)");
/// assert_eq!(p.unwrap(), Point{ x: 1, y: 2} )
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub trait FromStr: Sized {
/// The associated error which can be returned from parsing.
@ -101,7 +134,9 @@ impl FromStr for bool {
}
}
/// An error returned when parsing a `bool` from a string fails.
/// An error returned when parsing a `bool` using [`from_str`] fails
///
/// [`from_str`]: ../../std/primitive.bool.html#method.from_str
#[derive(Debug, Clone, PartialEq, Eq)]
#[stable(feature = "rust1", since = "1.0.0")]
pub struct ParseBoolError { _priv: () }

View file

@ -554,7 +554,7 @@ impl<'a> LabelText<'a> {
pub fn to_dot_string(&self) -> String {
match self {
&LabelStr(ref s) => format!("\"{}\"", s.escape_default()),
&EscStr(ref s) => format!("\"{}\"", LabelText::escape_str(&s[..])),
&EscStr(ref s) => format!("\"{}\"", LabelText::escape_str(&s)),
&HtmlStr(ref s) => format!("<{}>", s),
}
}
@ -587,7 +587,7 @@ impl<'a> LabelText<'a> {
let mut prefix = self.pre_escaped_content().into_owned();
let suffix = suffix.pre_escaped_content();
prefix.push_str(r"\n\n");
prefix.push_str(&suffix[..]);
prefix.push_str(&suffix);
EscStr(prefix.into_cow())
}
}
@ -878,7 +878,7 @@ mod tests {
type Node = Node;
type Edge = &'a Edge;
fn graph_id(&'a self) -> Id<'a> {
Id::new(&self.name[..]).unwrap()
Id::new(self.name).unwrap()
}
fn node_id(&'a self, n: &Node) -> Id<'a> {
id_name(n)

@ -1 +1 @@
Subproject commit 64d954c6a76e896fbf7ed5c17e77c40e388abe84
Subproject commit 05a2d197356ef253dfd985166576619ac9b6947f

View file

@ -75,7 +75,6 @@ pub enum DepNode<D: Clone + Debug> {
CoherenceCheckImpl(D),
CoherenceOverlapCheck(D),
CoherenceOverlapCheckSpecial(D),
CoherenceOverlapInherentCheck(D),
CoherenceOrphanCheck(D),
Variance,
WfCheck(D),
@ -252,7 +251,6 @@ impl<D: Clone + Debug> DepNode<D> {
CoherenceCheckImpl(ref d) => op(d).map(CoherenceCheckImpl),
CoherenceOverlapCheck(ref d) => op(d).map(CoherenceOverlapCheck),
CoherenceOverlapCheckSpecial(ref d) => op(d).map(CoherenceOverlapCheckSpecial),
CoherenceOverlapInherentCheck(ref d) => op(d).map(CoherenceOverlapInherentCheck),
CoherenceOrphanCheck(ref d) => op(d).map(CoherenceOrphanCheck),
WfCheck(ref d) => op(d).map(WfCheck),
TypeckItemType(ref d) => op(d).map(TypeckItemType),

View file

@ -81,21 +81,6 @@ impl<M: DepTrackingMapConfig> DepTrackingMap<M> {
pub fn keys(&self) -> Vec<M::Key> {
self.map.keys().cloned().collect()
}
/// Append `elem` to the vector stored for `k`, creating a new vector if needed.
/// This is considered a write to `k`.
///
/// NOTE: Caution is required when using this method. You should
/// be sure that nobody is **reading from the vector** while you
/// are writing to it. Eventually, it'd be nice to remove this.
pub fn push<E: Clone>(&mut self, k: M::Key, elem: E)
where M: DepTrackingMapConfig<Value=Vec<E>>
{
self.write(&k);
self.map.entry(k)
.or_insert(Vec::new())
.push(elem);
}
}
impl<M: DepTrackingMapConfig> MemoizationMap for RefCell<DepTrackingMap<M>> {

View file

@ -1336,7 +1336,7 @@ trait SecondTrait : FirstTrait {
E0398: r##"
In Rust 1.3, the default object lifetime bounds are expected to change, as
described in RFC #1156 [1]. You are getting a warning because the compiler
described in [RFC 1156]. You are getting a warning because the compiler
thinks it is possible that this change will cause a compilation error in your
code. It is possible, though unlikely, that this is a false alarm.
@ -1365,7 +1365,7 @@ fn foo<'a>(arg: &Box<SomeTrait+'a>) { ... }
This explicitly states that you expect the trait object `SomeTrait` to contain
references (with a maximum lifetime of `'a`).
[1]: https://github.com/rust-lang/rfcs/pull/1156
[RFC 1156]: https://github.com/rust-lang/rfcs/blob/master/text/1156-adjust-default-object-bounds.md
"##,
E0452: r##"
@ -1771,6 +1771,7 @@ This pattern is incorrect because, because the type of `foo` is a function
**item** (`typeof(foo)`), which is zero-sized, and the target type (`fn()`)
is a function pointer, which is not zero-sized.
This pattern should be rewritten. There are a few possible ways to do this:
- change the original fn declaration to match the expected signature,
and do the cast in the fn body (the prefered option)
- cast the fn item fo a fn pointer before calling transmute, as shown here:

View file

@ -55,7 +55,7 @@ impl Fingerprint {
impl Encodable for Fingerprint {
#[inline]
fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> {
for &byte in &self.0[..] {
for &byte in &self.0 {
s.emit_u8(byte)?;
}
Ok(())
@ -66,7 +66,7 @@ impl Decodable for Fingerprint {
#[inline]
fn decode<D: Decoder>(d: &mut D) -> Result<Fingerprint, D::Error> {
let mut result = Fingerprint([0u8; FINGERPRINT_LENGTH]);
for byte in &mut result.0[..] {
for byte in &mut result.0 {
*byte = d.read_u8()?;
}
Ok(result)

View file

@ -40,7 +40,6 @@ use std::cmp;
use std::default::Default as StdDefault;
use std::mem;
use std::fmt;
use std::ops::Deref;
use syntax::attr;
use syntax::ast;
use syntax::symbol::Symbol;
@ -485,7 +484,7 @@ pub fn raw_struct_lint<'a, S>(sess: &'a Session,
Allow => bug!("earlier conditional return should handle Allow case")
};
let hyphen_case_lint_name = name.replace("_", "-");
if lint_flag_val.as_str().deref() == name {
if lint_flag_val.as_str() == name {
err.note(&format!("requested on the command line with `{} {}`",
flag, hyphen_case_lint_name));
} else {
@ -496,7 +495,7 @@ pub fn raw_struct_lint<'a, S>(sess: &'a Session,
},
Node(lint_attr_name, src) => {
def = Some(src);
if lint_attr_name.as_str().deref() != name {
if lint_attr_name.as_str() != name {
let level_str = level.as_str();
err.note(&format!("#[{}({})] implied by #[{}({})]",
level_str, name, level_str, lint_attr_name));

View file

@ -176,7 +176,6 @@ pub trait CrateStore {
fn item_generics_cloned(&self, def: DefId) -> ty::Generics;
fn item_attrs(&self, def_id: DefId) -> Vec<ast::Attribute>;
fn fn_arg_names(&self, did: DefId) -> Vec<ast::Name>;
fn inherent_implementations_for_type(&self, def_id: DefId) -> Vec<DefId>;
// trait info
fn implementations_of_trait(&self, filter: Option<DefId>) -> Vec<DefId>;
@ -310,7 +309,6 @@ impl CrateStore for DummyCrateStore {
{ bug!("item_generics_cloned") }
fn item_attrs(&self, def_id: DefId) -> Vec<ast::Attribute> { bug!("item_attrs") }
fn fn_arg_names(&self, did: DefId) -> Vec<ast::Name> { bug!("fn_arg_names") }
fn inherent_implementations_for_type(&self, def_id: DefId) -> Vec<DefId> { vec![] }
// trait info
fn implementations_of_trait(&self, filter: Option<DefId>) -> Vec<DefId> { vec![] }

View file

@ -536,7 +536,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
if !self.stability.borrow().active_features.contains(feature) {
let msg = match *reason {
Some(ref r) => format!("use of unstable library feature '{}': {}",
&feature.as_str(), &r),
feature.as_str(), &r),
None => format!("use of unstable library feature '{}'", &feature)
};
emit_feature_err(&self.sess.parse_sess, &feature.as_str(), span,

View file

@ -139,6 +139,21 @@ pub enum AutoBorrow<'tcx> {
RawPtr(hir::Mutability),
}
/// Information for `CoerceUnsized` impls, storing information we
/// have computed about the coercion.
///
/// This struct can be obtained via the `coerce_impl_info` query.
/// Demanding this struct also has the side-effect of reporting errors
/// for inappropriate impls.
#[derive(Clone, Copy, RustcEncodable, RustcDecodable, Debug)]
pub struct CoerceUnsizedInfo {
/// If this is a "custom coerce" impl, then what kind of custom
/// coercion is it? This applies to impls of `CoerceUnsized` for
/// structs, primarily, where we store a bit of info about which
/// fields need to be coerced.
pub custom_kind: Option<CustomCoerceUnsized>
}
#[derive(Clone, Copy, RustcEncodable, RustcDecodable, Debug)]
pub enum CustomCoerceUnsized {
/// Records the index of the field being coerced.

View file

@ -13,7 +13,7 @@ use hir::def_id::{CrateNum, DefId, LOCAL_CRATE};
use middle::const_val::ConstVal;
use middle::privacy::AccessLevels;
use mir;
use ty::{self, Ty, TyCtxt};
use ty::{self, CrateInherentImpls, Ty, TyCtxt};
use rustc_data_structures::indexed_vec::IndexVec;
use std::cell::{RefCell, RefMut};
@ -177,9 +177,15 @@ impl<'tcx> QueryDescription for queries::coherent_trait<'tcx> {
}
}
impl<'tcx> QueryDescription for queries::coherent_inherent_impls<'tcx> {
impl<'tcx> QueryDescription for queries::crate_inherent_impls<'tcx> {
fn describe(_: TyCtxt, k: CrateNum) -> String {
format!("all inherent impls defined in crate `{:?}`", k)
}
}
impl<'tcx> QueryDescription for queries::crate_inherent_impls_overlap_check<'tcx> {
fn describe(_: TyCtxt, _: CrateNum) -> String {
format!("coherence checking all inherent impls")
format!("check for overlap between inherent impls defined in this crate")
}
}
@ -375,7 +381,7 @@ define_maps! { <'tcx>
/// Maps a DefId of a type to a list of its inherent impls.
/// Contains implementations of methods that are inherent to a type.
/// Methods in these implementations don't need to be exported.
pub inherent_impls: InherentImpls(DefId) -> Vec<DefId>,
pub inherent_impls: InherentImpls(DefId) -> Rc<Vec<DefId>>,
/// Maps from the def-id of a function/method or const/static
/// to its MIR. Mutation is done at an item granularity to
@ -400,14 +406,22 @@ define_maps! { <'tcx>
pub closure_type: ItemSignature(DefId) -> ty::PolyFnSig<'tcx>,
/// Caches CoerceUnsized kinds for impls on custom types.
pub custom_coerce_unsized_kind: ItemSignature(DefId)
-> ty::adjustment::CustomCoerceUnsized,
pub coerce_unsized_info: ItemSignature(DefId)
-> ty::adjustment::CoerceUnsizedInfo,
pub typeck_tables: TypeckTables(DefId) -> &'tcx ty::TypeckTables<'tcx>,
pub coherent_trait: coherent_trait_dep_node((CrateNum, DefId)) -> (),
pub coherent_inherent_impls: coherent_inherent_impls_dep_node(CrateNum) -> (),
/// Gets a complete map from all types to their inherent impls.
/// Not meant to be used directly outside of coherence.
/// (Defined only for LOCAL_CRATE)
pub crate_inherent_impls: crate_inherent_impls_dep_node(CrateNum) -> CrateInherentImpls,
/// Checks all types in the krate for overlap in their inherent impls. Reports errors.
/// Not meant to be used directly outside of coherence.
/// (Defined only for LOCAL_CRATE)
pub crate_inherent_impls_overlap_check: crate_inherent_impls_dep_node(CrateNum) -> (),
/// Results of evaluating monomorphic constants embedded in
/// other items, such as enum variant explicit discriminants.
@ -423,7 +437,7 @@ fn coherent_trait_dep_node((_, def_id): (CrateNum, DefId)) -> DepNode<DefId> {
DepNode::CoherenceCheckTrait(def_id)
}
fn coherent_inherent_impls_dep_node(_: CrateNum) -> DepNode<DefId> {
fn crate_inherent_impls_dep_node(_: CrateNum) -> DepNode<DefId> {
DepNode::Coherence
}

View file

@ -31,7 +31,7 @@ use ty::subst::{Subst, Substs};
use ty::util::IntTypeExt;
use ty::walk::TypeWalker;
use util::common::MemoizationMap;
use util::nodemap::{NodeSet, FxHashMap};
use util::nodemap::{NodeSet, DefIdMap, FxHashMap};
use serialize::{self, Encodable, Encoder};
use std::borrow::Cow;
@ -2057,8 +2057,8 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
})
}
pub fn custom_coerce_unsized_kind(self, did: DefId) -> adjustment::CustomCoerceUnsized {
queries::custom_coerce_unsized_kind::get(self, DUMMY_SP, did)
pub fn coerce_unsized_info(self, did: DefId) -> adjustment::CoerceUnsizedInfo {
queries::coerce_unsized_info::get(self, DUMMY_SP, did)
}
pub fn associated_item(self, def_id: DefId) -> AssociatedItem {
@ -2348,34 +2348,6 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
def.flags.get().intersects(TraitFlags::HAS_DEFAULT_IMPL)
}
/// Populates the type context with all the inherent implementations for
/// the given type if necessary.
pub fn populate_inherent_implementations_for_type_if_necessary(self,
span: Span,
type_id: DefId) {
if type_id.is_local() {
// Make sure coherence of inherent impls ran already.
ty::queries::coherent_inherent_impls::force(self, span, LOCAL_CRATE);
return
}
// The type is not local, hence we are reading this out of
// metadata and don't need to track edges.
let _ignore = self.dep_graph.in_ignore();
if self.populated_external_types.borrow().contains(&type_id) {
return
}
debug!("populate_inherent_implementations_for_type_if_necessary: searching for {:?}",
type_id);
let inherent_impls = self.sess.cstore.inherent_implementations_for_type(type_id);
self.maps.inherent_impls.borrow_mut().insert(type_id, inherent_impls);
self.populated_external_types.borrow_mut().insert(type_id);
}
/// Populates the type context with all the implementations for the given
/// trait if necessary.
pub fn populate_implementations_for_trait_if_necessary(self, trait_id: DefId) {
@ -2640,3 +2612,16 @@ pub fn provide(providers: &mut ty::maps::Providers) {
..*providers
};
}
/// A map for the local crate mapping each type to a vector of its
/// inherent impls. This is not meant to be used outside of coherence;
/// rather, you should request the vector for a specific type via
/// `ty::queries::inherent_impls::get(def_id)` so as to minimize your
/// dependencies (constructing this map requires touching the entire
/// crate).
#[derive(Clone, Debug)]
pub struct CrateInherentImpls {
pub inherent_impls: DefIdMap<Rc<Vec<DefId>>>,
}

View file

@ -267,11 +267,11 @@ pub fn fixup_fragment_sets<'a, 'tcx>(this: &MoveData<'tcx>, tcx: TyCtxt<'a, 'tcx
// First, filter out duplicates
moved.sort();
moved.dedup();
debug!("fragments 1 moved: {:?}", path_lps(&moved[..]));
debug!("fragments 1 moved: {:?}", path_lps(&moved));
assigned.sort();
assigned.dedup();
debug!("fragments 1 assigned: {:?}", path_lps(&assigned[..]));
debug!("fragments 1 assigned: {:?}", path_lps(&assigned));
// Second, build parents from the moved and assigned.
for m in &moved {
@ -291,14 +291,14 @@ pub fn fixup_fragment_sets<'a, 'tcx>(this: &MoveData<'tcx>, tcx: TyCtxt<'a, 'tcx
parents.sort();
parents.dedup();
debug!("fragments 2 parents: {:?}", path_lps(&parents[..]));
debug!("fragments 2 parents: {:?}", path_lps(&parents));
// Third, filter the moved and assigned fragments down to just the non-parents
moved.retain(|f| non_member(*f, &parents[..]));
debug!("fragments 3 moved: {:?}", path_lps(&moved[..]));
moved.retain(|f| non_member(*f, &parents));
debug!("fragments 3 moved: {:?}", path_lps(&moved));
assigned.retain(|f| non_member(*f, &parents[..]));
debug!("fragments 3 assigned: {:?}", path_lps(&assigned[..]));
assigned.retain(|f| non_member(*f, &parents));
debug!("fragments 3 assigned: {:?}", path_lps(&assigned));
// Fourth, build the leftover from the moved, assigned, and parents.
for m in &moved {
@ -316,16 +316,16 @@ pub fn fixup_fragment_sets<'a, 'tcx>(this: &MoveData<'tcx>, tcx: TyCtxt<'a, 'tcx
unmoved.sort();
unmoved.dedup();
debug!("fragments 4 unmoved: {:?}", frag_lps(&unmoved[..]));
debug!("fragments 4 unmoved: {:?}", frag_lps(&unmoved));
// Fifth, filter the leftover fragments down to its core.
unmoved.retain(|f| match *f {
AllButOneFrom(_) => true,
Just(mpi) => non_member(mpi, &parents[..]) &&
non_member(mpi, &moved[..]) &&
non_member(mpi, &assigned[..])
Just(mpi) => non_member(mpi, &parents) &&
non_member(mpi, &moved) &&
non_member(mpi, &assigned)
});
debug!("fragments 5 unmoved: {:?}", frag_lps(&unmoved[..]));
debug!("fragments 5 unmoved: {:?}", frag_lps(&unmoved));
// Swap contents back in.
fragments.unmoved_fragments = unmoved;

View file

@ -112,7 +112,7 @@ fn borrowck_fn<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, body_id: hir::BodyId) {
&flowed_moves.move_data,
owner_id);
check_loans::check_loans(bccx, &loan_dfcx, &flowed_moves, &all_loans[..], body);
check_loans::check_loans(bccx, &loan_dfcx, &flowed_moves, &all_loans, body);
}
fn build_borrowck_dataflow_data<'a, 'tcx>(this: &mut BorrowckCtxt<'a, 'tcx>,

View file

@ -88,7 +88,7 @@ impl<'a, 'tcx> DataflowLabeller<'a, 'tcx> {
set.push_str(", ");
}
let loan_str = self.borrowck_ctxt.loan_path_to_string(&lp);
set.push_str(&loan_str[..]);
set.push_str(&loan_str);
saw_some = true;
true
});

View file

@ -680,10 +680,10 @@ fn is_useful_specialized<'p, 'a:'p, 'tcx: 'a>(
}).collect();
let wild_patterns: Vec<_> = wild_patterns_owned.iter().collect();
let matrix = Matrix(m.iter().flat_map(|r| {
specialize(cx, &r[..], &ctor, &wild_patterns)
specialize(cx, &r, &ctor, &wild_patterns)
}).collect());
match specialize(cx, v, &ctor, &wild_patterns) {
Some(v) => match is_useful(cx, &matrix, &v[..], witness) {
Some(v) => match is_useful(cx, &matrix, &v, witness) {
UsefulWithWitness(witnesses) => UsefulWithWitness(
witnesses.into_iter()
.map(|witness| witness.apply_constructor(cx, &ctor, lty))

View file

@ -311,7 +311,7 @@ fn check_arms<'a, 'tcx>(cx: &mut MatchCheckCtxt<'a, 'tcx>,
for &(pat, hir_pat) in pats {
let v = vec![pat];
match is_useful(cx, &seen, &v[..], LeaveOutWitness) {
match is_useful(cx, &seen, &v, LeaveOutWitness) {
NotUseful => {
match source {
hir::MatchSource::IfLetDesugar { .. } => {

View file

@ -91,8 +91,8 @@ impl<A: Array> Deref for AccumulateVec<A> {
type Target = [A::Element];
fn deref(&self) -> &Self::Target {
match *self {
AccumulateVec::Array(ref v) => &v[..],
AccumulateVec::Heap(ref v) => &v[..],
AccumulateVec::Array(ref v) => v,
AccumulateVec::Heap(ref v) => v,
}
}
}
@ -100,8 +100,8 @@ impl<A: Array> Deref for AccumulateVec<A> {
impl<A: Array> DerefMut for AccumulateVec<A> {
fn deref_mut(&mut self) -> &mut [A::Element] {
match *self {
AccumulateVec::Array(ref mut v) => &mut v[..],
AccumulateVec::Heap(ref mut v) => &mut v[..],
AccumulateVec::Array(ref mut v) => v,
AccumulateVec::Heap(ref mut v) => v,
}
}
}

View file

@ -48,7 +48,7 @@ pub fn encode(n: u64, base: u64) -> String {
#[test]
fn test_encode() {
fn test(n: u64, base: u64) {
assert_eq!(Ok(n), u64::from_str_radix(&encode(n, base)[..], base as u32));
assert_eq!(Ok(n), u64::from_str_radix(&encode(n, base), base as u32));
}
for base in 2..37 {

View file

@ -35,7 +35,7 @@ pub struct Blake2bCtx {
impl ::std::fmt::Debug for Blake2bCtx {
fn fmt(&self, fmt: &mut ::std::fmt::Formatter) -> Result<(), ::std::fmt::Error> {
try!(write!(fmt, "hash: "));
for v in &self.h[..] {
for v in &self.h {
try!(write!(fmt, "{:x}", v));
}
Ok(())

View file

@ -91,13 +91,13 @@ impl<T: Idx> IdxSet<T> {
impl<T: Idx> Deref for IdxSetBuf<T> {
type Target = IdxSet<T>;
fn deref(&self) -> &IdxSet<T> {
unsafe { IdxSet::from_slice(&self.bits[..]) }
unsafe { IdxSet::from_slice(&self.bits) }
}
}
impl<T: Idx> DerefMut for IdxSetBuf<T> {
fn deref_mut(&mut self) -> &mut IdxSet<T> {
unsafe { IdxSet::from_slice_mut(&mut self.bits[..]) }
unsafe { IdxSet::from_slice_mut(&mut self.bits) }
}
}
@ -135,11 +135,11 @@ impl<T: Idx> IdxSet<T> {
}
pub fn words(&self) -> &[Word] {
&self.bits[..]
&self.bits
}
pub fn words_mut(&mut self) -> &mut [Word] {
&mut self.bits[..]
&mut self.bits
}
pub fn clone_from(&mut self, other: &IdxSet<T>) {

View file

@ -258,10 +258,7 @@ fn keep_hygiene_data(sess: &Session) -> bool {
}
fn keep_ast(sess: &Session) -> bool {
sess.opts.debugging_opts.keep_ast ||
sess.opts.debugging_opts.save_analysis ||
sess.opts.debugging_opts.save_analysis_csv ||
sess.opts.debugging_opts.save_analysis_api
sess.opts.debugging_opts.keep_ast || ::save_analysis(sess)
}
/// The name used for source code that doesn't originate in a file

View file

@ -67,6 +67,7 @@ use pretty::{PpMode, UserIdentifiedItem};
use rustc_resolve as resolve;
use rustc_save_analysis as save;
use rustc_save_analysis::DumpHandler;
use rustc_trans::back::link;
use rustc_trans::back::write::{create_target_machine, RELOC_MODEL_ARGS, CODE_GEN_MODEL_ARGS};
use rustc::dep_graph::DepGraph;
@ -233,7 +234,7 @@ fn make_output(matches: &getopts::Matches) -> (Option<PathBuf>, Option<PathBuf>)
// Extract input (string or file and optional path) from matches.
fn make_input(free_matches: &[String]) -> Option<(Input, Option<PathBuf>)> {
if free_matches.len() == 1 {
let ifile = &free_matches[0][..];
let ifile = &free_matches[0];
if ifile == "-" {
let mut src = String::new();
io::stdin().read_to_string(&mut src).unwrap();
@ -507,8 +508,9 @@ impl<'a> CompilerCalls<'a> for RustcDefaultCalls {
state.expanded_crate.unwrap(),
state.analysis.unwrap(),
state.crate_name.unwrap(),
state.out_dir,
save_analysis_format(state.session))
DumpHandler::new(save_analysis_format(state.session),
state.out_dir,
state.crate_name.unwrap()))
});
};
control.after_analysis.run_callback_on_error = true;
@ -800,7 +802,7 @@ Available lint options:
for lint in lints {
let name = lint.name_lower().replace("_", "-");
println!(" {} {:7.7} {}",
padded(&name[..]),
padded(&name),
lint.default_level.as_str(),
lint.desc);
}
@ -838,7 +840,7 @@ Available lint options:
.map(|x| x.to_string().replace("_", "-"))
.collect::<Vec<String>>()
.join(", ");
println!(" {} {}", padded(&name[..]), desc);
println!(" {} {}", padded(&name), desc);
}
println!("\n");
};
@ -945,7 +947,7 @@ pub fn handle_options(args: &[String]) -> Option<getopts::Matches> {
.into_iter()
.map(|x| x.opt_group)
.collect();
let matches = match getopts::getopts(&args[..], &all_groups) {
let matches = match getopts::getopts(&args, &all_groups) {
Ok(m) => m,
Err(f) => early_error(ErrorOutputType::default(), &f.to_string()),
};
@ -1084,7 +1086,7 @@ pub fn monitor<F: FnOnce() + Send + 'static>(f: F) {
format!("we would appreciate a bug report: {}", BUG_REPORT_URL)];
for note in &xs {
handler.emit(&MultiSpan::new(),
&note[..],
&note,
errors::Level::Note);
}
if match env::var_os("RUST_BACKTRACE") {

View file

@ -589,7 +589,7 @@ impl UserIdentifiedItem {
-> NodesMatchingUII<'a, 'hir> {
match *self {
ItemViaNode(node_id) => NodesMatchingDirect(Some(node_id).into_iter()),
ItemViaPath(ref parts) => NodesMatchingSuffix(map.nodes_matching_suffix(&parts[..])),
ItemViaPath(ref parts) => NodesMatchingSuffix(map.nodes_matching_suffix(&parts)),
}
}
@ -600,7 +600,7 @@ impl UserIdentifiedItem {
user_option,
self.reconstructed_input(),
is_wrong_because);
sess.fatal(&message[..])
sess.fatal(&message)
};
let mut saw_node = ast::DUMMY_NODE_ID;
@ -771,7 +771,7 @@ fn print_flowgraph<'a, 'tcx, W: Write>(variants: Vec<borrowck_dot::Variant>,
fn expand_err_details(r: io::Result<()>) -> io::Result<()> {
r.map_err(|ioerr| {
io::Error::new(io::ErrorKind::Other,
&format!("graphviz::render failed: {}", ioerr)[..])
format!("graphviz::render failed: {}", ioerr))
})
}
}

View file

@ -289,7 +289,7 @@ impl<'a, 'gcx, 'tcx> Env<'a, 'gcx, 'tcx> {
pub fn t_param(&self, index: u32) -> Ty<'tcx> {
let name = format!("T{}", index);
self.infcx.tcx.mk_param(index, Symbol::intern(&name[..]))
self.infcx.tcx.mk_param(index, Symbol::intern(&name))
}
pub fn re_early_bound(&self, index: u32, name: &'static str) -> &'tcx ty::Region {

View file

@ -99,9 +99,9 @@ pub fn read_file(sess: &Session, path: &Path) -> io::Result<Option<Vec<u8>>> {
let rustc_version_str_len = rustc_version_str_len[0] as usize;
let mut buffer = Vec::with_capacity(rustc_version_str_len);
buffer.resize(rustc_version_str_len, 0);
file.read_exact(&mut buffer[..])?;
file.read_exact(&mut buffer)?;
if &buffer[..] != rustc_version().as_bytes() {
if buffer != rustc_version().as_bytes() {
report_format_mismatch(sess, path, "Different compiler version");
return Ok(None);
}

View file

@ -88,7 +88,7 @@ impl NonCamelCaseTypes {
} else {
format!("{} `{}` should have a camel case name such as `{}`", sort, name, c)
};
cx.span_lint(NON_CAMEL_CASE_TYPES, span, &m[..]);
cx.span_lint(NON_CAMEL_CASE_TYPES, span, &m);
}
}
}

View file

@ -334,7 +334,7 @@ impl<'a, 'tcx> LateLintPass<'a, 'tcx> for MissingDoc {
attr.check_name("doc") &&
match attr.meta_item_list() {
None => false,
Some(l) => attr::list_contains_name(&l[..], "hidden"),
Some(l) => attr::list_contains_name(&l, "hidden"),
}
});
self.doc_hidden_stack.push(doc_hidden);

View file

@ -146,7 +146,7 @@ impl<'a, 'tcx> LateLintPass<'a, 'tcx> for UnusedResults {
ty::TyBool => return,
ty::TyAdt(def, _) => {
let attrs = cx.tcx.get_attrs(def.did);
check_must_use(cx, &attrs[..], s.span)
check_must_use(cx, &attrs, s.span)
}
_ => false,
};

View file

@ -140,7 +140,7 @@ fn main() {
cfg.flag(flag);
}
for component in &components[..] {
for component in &components {
let mut flag = String::from("-DLLVM_COMPONENT_");
flag.push_str(&component.to_uppercase());
cfg.flag(&flag);
@ -173,7 +173,7 @@ fn main() {
if !is_crossed {
cmd.arg("--system-libs");
}
cmd.args(&components[..]);
cmd.args(&components);
for lib in output(&mut cmd).split_whitespace() {
let name = if lib.starts_with("-l") {

View file

@ -669,7 +669,7 @@ impl<'a> CrateLoader<'a> {
name,
config::host_triple(),
self.sess.opts.target_triple);
span_fatal!(self.sess, span, E0456, "{}", &message[..]);
span_fatal!(self.sess, span, E0456, "{}", &message);
}
let root = ekrate.metadata.get_root();

View file

@ -88,9 +88,9 @@ provide! { <'tcx> tcx, def_id, cdata
}
associated_item => { cdata.get_associated_item(def_id.index) }
impl_trait_ref => { cdata.get_impl_trait(def_id.index, tcx) }
custom_coerce_unsized_kind => {
cdata.get_custom_coerce_unsized_kind(def_id.index).unwrap_or_else(|| {
bug!("custom_coerce_unsized_kind: `{:?}` is missing its kind", def_id);
coerce_unsized_info => {
cdata.get_coerce_unsized_info(def_id.index).unwrap_or_else(|| {
bug!("coerce_unsized_info: `{:?}` is missing its info", def_id);
})
}
mir => {
@ -109,6 +109,7 @@ provide! { <'tcx> tcx, def_id, cdata
typeck_tables => { cdata.item_body_tables(def_id.index, tcx) }
closure_kind => { cdata.closure_kind(def_id.index) }
closure_type => { cdata.closure_ty(def_id.index, tcx) }
inherent_impls => { Rc::new(cdata.get_inherent_implementations_for_type(def_id.index)) }
}
impl CrateStore for cstore::CStore {
@ -162,12 +163,6 @@ impl CrateStore for cstore::CStore {
self.get_crate_data(did.krate).get_fn_arg_names(did.index)
}
fn inherent_implementations_for_type(&self, def_id: DefId) -> Vec<DefId>
{
self.dep_graph.read(DepNode::MetaData(def_id));
self.get_crate_data(def_id.krate).get_inherent_implementations_for_type(def_id.index)
}
fn implementations_of_trait(&self, filter: Option<DefId>) -> Vec<DefId>
{
if let Some(def_id) = filter {

View file

@ -643,10 +643,10 @@ impl<'a, 'tcx> CrateMetadata {
self.get_impl_data(id).polarity
}
pub fn get_custom_coerce_unsized_kind(&self,
id: DefIndex)
-> Option<ty::adjustment::CustomCoerceUnsized> {
self.get_impl_data(id).coerce_unsized_kind
pub fn get_coerce_unsized_info(&self,
id: DefIndex)
-> Option<ty::adjustment::CoerceUnsizedInfo> {
self.get_impl_data(id).coerce_unsized_info
}
pub fn get_impl_trait(&self,

View file

@ -693,7 +693,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
let data = ImplData {
polarity: hir::ImplPolarity::Positive,
parent_impl: None,
coerce_unsized_kind: None,
coerce_unsized_info: None,
trait_ref: tcx.impl_trait_ref(def_id).map(|trait_ref| self.lazy(&trait_ref)),
};
@ -713,13 +713,21 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
None
};
// if this is an impl of `CoerceUnsized`, create its
// "unsized info", else just store None
let coerce_unsized_info =
trait_ref.and_then(|t| {
if Some(t.def_id) == tcx.lang_items.coerce_unsized_trait() {
Some(ty::queries::coerce_unsized_info::get(tcx, item.span, def_id))
} else {
None
}
});
let data = ImplData {
polarity: polarity,
parent_impl: parent,
coerce_unsized_kind: tcx.maps.custom_coerce_unsized_kind
.borrow()
.get(&def_id)
.cloned(),
coerce_unsized_info: coerce_unsized_info,
trait_ref: trait_ref.map(|trait_ref| self.lazy(&trait_ref)),
};
@ -918,14 +926,14 @@ impl<'a, 'b, 'tcx> IndexBuilder<'a, 'b, 'tcx> {
self.encode_fields(def_id);
}
hir::ItemImpl(..) => {
for &trait_item_def_id in &self.tcx.associated_item_def_ids(def_id)[..] {
for &trait_item_def_id in self.tcx.associated_item_def_ids(def_id).iter() {
self.record(trait_item_def_id,
EncodeContext::encode_info_for_impl_item,
trait_item_def_id);
}
}
hir::ItemTrait(..) => {
for &item_def_id in &self.tcx.associated_item_def_ids(def_id)[..] {
for &item_def_id in self.tcx.associated_item_def_ids(def_id).iter() {
self.record(item_def_id,
EncodeContext::encode_info_for_trait_item,
item_def_id);

View file

@ -477,15 +477,15 @@ impl<'a> Context<'a> {
Some(file) => file,
};
let (hash, found_kind) =
if file.starts_with(&rlib_prefix[..]) && file.ends_with(".rlib") {
if file.starts_with(&rlib_prefix) && file.ends_with(".rlib") {
(&file[(rlib_prefix.len())..(file.len() - ".rlib".len())], CrateFlavor::Rlib)
} else if file.starts_with(&rlib_prefix[..]) && file.ends_with(".rmeta") {
} else if file.starts_with(&rlib_prefix) && file.ends_with(".rmeta") {
(&file[(rlib_prefix.len())..(file.len() - ".rmeta".len())], CrateFlavor::Rmeta)
} else if file.starts_with(&dylib_prefix) &&
file.ends_with(&dypair.1) {
(&file[(dylib_prefix.len())..(file.len() - dypair.1.len())], CrateFlavor::Dylib)
} else {
if file.starts_with(&staticlib_prefix[..]) && file.ends_with(&staticpair.1) {
if file.starts_with(&staticlib_prefix) && file.ends_with(&staticpair.1) {
staticlibs.push(CrateMismatch {
path: path.to_path_buf(),
got: "static".to_string(),

View file

@ -285,7 +285,9 @@ pub struct TraitData<'tcx> {
pub struct ImplData<'tcx> {
pub polarity: hir::ImplPolarity,
pub parent_impl: Option<DefId>,
pub coerce_unsized_kind: Option<ty::adjustment::CustomCoerceUnsized>,
/// This is `Some` only for impls of `CoerceUnsized`.
pub coerce_unsized_info: Option<ty::adjustment::CoerceUnsizedInfo>,
pub trait_ref: Option<Lazy<ty::TraitRef<'tcx>>>,
}

View file

@ -126,19 +126,19 @@ impl<'a> PluginLoader<'a> {
// inside this crate, so continue would spew "macro undefined"
// errors
Err(err) => {
self.sess.span_fatal(span, &err[..])
self.sess.span_fatal(span, &err)
}
};
unsafe {
let registrar =
match lib.symbol(&symbol[..]) {
match lib.symbol(&symbol) {
Ok(registrar) => {
mem::transmute::<*mut u8,PluginRegistrarFun>(registrar)
}
// again fatal if we can't register macros
Err(err) => {
self.sess.span_fatal(span, &err[..])
self.sess.span_fatal(span, &err)
}
};

View file

@ -890,19 +890,23 @@ match (A, B, C) {
E0422: r##"
You are trying to use an identifier that is either undefined or not a struct.
Erroneous code example:
``` compile_fail,E0422
```compile_fail,E0422
fn main () {
let x = Foo { x: 1, y: 2 };
}
```
In this case, `Foo` is undefined, so it inherently isn't anything, and
definitely not a struct.
```compile_fail
fn main () {
let foo = 1;
let x = foo { x: 1, y: 2 };
}
```
In this case, `foo` is defined, but is not a struct, so Rust can't use it as
one.
"##,

View file

@ -922,6 +922,10 @@ impl<'a> ModuleData<'a> {
fn is_local(&self) -> bool {
self.normal_ancestor_id.is_local()
}
fn nearest_item_scope(&'a self) -> Module<'a> {
if self.is_trait() { self.parent.unwrap() } else { self }
}
}
impl<'a> fmt::Debug for ModuleData<'a> {

View file

@ -172,7 +172,6 @@ impl<'a> base::Resolver for Resolver<'a> {
expansion: mark,
};
expansion.visit_with(&mut visitor);
self.current_module.unresolved_invocations.borrow_mut().remove(&mark);
invocation.expansion.set(visitor.legacy_scope);
}
@ -390,7 +389,7 @@ impl<'a> Resolver<'a> {
Err(Determinacy::Determined)
},
};
self.current_module.macro_resolutions.borrow_mut()
self.current_module.nearest_item_scope().macro_resolutions.borrow_mut()
.push((path.into_boxed_slice(), span));
return def;
}
@ -410,7 +409,7 @@ impl<'a> Resolver<'a> {
}
};
self.current_module.legacy_macro_resolutions.borrow_mut()
self.current_module.nearest_item_scope().legacy_macro_resolutions.borrow_mut()
.push((scope, path[0], span, kind));
result

View file

@ -423,7 +423,7 @@ fn make_values_str(pairs: &[(&'static str, &str)]) -> String {
let strs = pairs.map(|(f, v)| format!(",{},\"{}\"", f, escape(String::from(v))));
strs.fold(String::new(), |mut s, ss| {
s.push_str(&ss[..]);
s.push_str(&ss);
s
})
}

View file

@ -22,22 +22,52 @@ use external_data::*;
use data::{self, VariableKind};
use dump::Dump;
pub struct JsonDumper<'b, W: Write + 'b> {
output: &'b mut W,
pub struct JsonDumper<O: DumpOutput> {
result: Analysis,
output: O,
}
impl<'b, W: Write> JsonDumper<'b, W> {
pub fn new(writer: &'b mut W) -> JsonDumper<'b, W> {
JsonDumper { output: writer, result: Analysis::new() }
pub trait DumpOutput {
fn dump(&mut self, result: &Analysis);
}
pub struct WriteOutput<'b, W: Write + 'b> {
output: &'b mut W,
}
impl<'b, W: Write> DumpOutput for WriteOutput<'b, W> {
fn dump(&mut self, result: &Analysis) {
if let Err(_) = write!(self.output, "{}", as_json(&result)) {
error!("Error writing output");
}
}
}
impl<'b, W: Write> Drop for JsonDumper<'b, W> {
pub struct CallbackOutput<'b> {
callback: &'b mut FnMut(&Analysis),
}
impl<'b> DumpOutput for CallbackOutput<'b> {
fn dump(&mut self, result: &Analysis) {
(self.callback)(result)
}
}
impl<'b, W: Write> JsonDumper<WriteOutput<'b, W>> {
pub fn new(writer: &'b mut W) -> JsonDumper<WriteOutput<'b, W>> {
JsonDumper { output: WriteOutput { output: writer }, result: Analysis::new() }
}
}
impl<'b> JsonDumper<CallbackOutput<'b>> {
pub fn with_callback(callback: &'b mut FnMut(&Analysis)) -> JsonDumper<CallbackOutput<'b>> {
JsonDumper { output: CallbackOutput { callback: callback }, result: Analysis::new() }
}
}
impl<O: DumpOutput> Drop for JsonDumper<O> {
fn drop(&mut self) {
if let Err(_) = write!(self.output, "{}", as_json(&self.result)) {
error!("Error writing output");
}
self.output.dump(&self.result);
}
}
@ -49,7 +79,7 @@ macro_rules! impl_fn {
}
}
impl<'b, W: Write + 'b> Dump for JsonDumper<'b, W> {
impl<'b, O: DumpOutput + 'b> Dump for JsonDumper<O> {
fn crate_prelude(&mut self, data: CratePreludeData) {
self.result.prelude = Some(data)
}

View file

@ -48,6 +48,7 @@ use rustc::hir::def::Def;
use rustc::hir::map::Node;
use rustc::hir::def_id::DefId;
use rustc::session::config::CrateType::CrateTypeExecutable;
use rustc::session::Session;
use rustc::ty::{self, TyCtxt};
use std::env;
@ -866,56 +867,132 @@ impl Format {
}
}
pub fn process_crate<'l, 'tcx>(tcx: TyCtxt<'l, 'tcx, 'tcx>,
krate: &ast::Crate,
analysis: &'l ty::CrateAnalysis,
cratename: &str,
odir: Option<&Path>,
format: Format) {
/// Defines what to do with the results of saving the analysis.
pub trait SaveHandler {
fn save<'l, 'tcx>(&mut self,
save_ctxt: SaveContext<'l, 'tcx>,
krate: &ast::Crate,
cratename: &str);
}
/// Dump the save-analysis results to a file.
pub struct DumpHandler<'a> {
format: Format,
odir: Option<&'a Path>,
cratename: String
}
impl<'a> DumpHandler<'a> {
pub fn new(format: Format, odir: Option<&'a Path>, cratename: &str) -> DumpHandler<'a> {
DumpHandler {
format: format,
odir: odir,
cratename: cratename.to_owned()
}
}
fn output_file(&self, sess: &Session) -> File {
let mut root_path = match env::var_os("RUST_SAVE_ANALYSIS_FOLDER") {
Some(val) => PathBuf::from(val),
None => match self.odir {
Some(val) => val.join("save-analysis"),
None => PathBuf::from("save-analysis-temp"),
},
};
if let Err(e) = std::fs::create_dir_all(&root_path) {
error!("Could not create directory {}: {}", root_path.display(), e);
}
{
let disp = root_path.display();
info!("Writing output to {}", disp);
}
let executable = sess.crate_types.borrow().iter().any(|ct| *ct == CrateTypeExecutable);
let mut out_name = if executable {
"".to_owned()
} else {
"lib".to_owned()
};
out_name.push_str(&self.cratename);
out_name.push_str(&sess.opts.cg.extra_filename);
out_name.push_str(self.format.extension());
root_path.push(&out_name);
let output_file = File::create(&root_path).unwrap_or_else(|e| {
let disp = root_path.display();
sess.fatal(&format!("Could not open {}: {}", disp, e));
});
root_path.pop();
output_file
}
}
impl<'a> SaveHandler for DumpHandler<'a> {
fn save<'l, 'tcx>(&mut self,
save_ctxt: SaveContext<'l, 'tcx>,
krate: &ast::Crate,
cratename: &str) {
macro_rules! dump {
($new_dumper: expr) => {{
let mut dumper = $new_dumper;
let mut visitor = DumpVisitor::new(save_ctxt, &mut dumper);
visitor.dump_crate_info(cratename, krate);
visit::walk_crate(&mut visitor, krate);
}}
}
let output = &mut self.output_file(&save_ctxt.tcx.sess);
match self.format {
Format::Csv => dump!(CsvDumper::new(output)),
Format::Json => dump!(JsonDumper::new(output)),
Format::JsonApi => dump!(JsonApiDumper::new(output)),
}
}
}
/// Call a callback with the results of save-analysis.
pub struct CallbackHandler<'b> {
pub callback: &'b mut FnMut(&rls_data::Analysis),
}
impl<'b> SaveHandler for CallbackHandler<'b> {
fn save<'l, 'tcx>(&mut self,
save_ctxt: SaveContext<'l, 'tcx>,
krate: &ast::Crate,
cratename: &str) {
macro_rules! dump {
($new_dumper: expr) => {{
let mut dumper = $new_dumper;
let mut visitor = DumpVisitor::new(save_ctxt, &mut dumper);
visitor.dump_crate_info(cratename, krate);
visit::walk_crate(&mut visitor, krate);
}}
}
// We're using the JsonDumper here because it has the format of the
// save-analysis results that we will pass to the callback. IOW, we are
// using the JsonDumper to collect the save-analysis results, but not
// actually to dump them to a file. This is all a bit convoluted and
// there is certainly a simpler design here trying to get out (FIXME).
dump!(JsonDumper::with_callback(self.callback))
}
}
pub fn process_crate<'l, 'tcx, H: SaveHandler>(tcx: TyCtxt<'l, 'tcx, 'tcx>,
krate: &ast::Crate,
analysis: &'l ty::CrateAnalysis,
cratename: &str,
mut handler: H) {
let _ignore = tcx.dep_graph.in_ignore();
assert!(analysis.glob_map.is_some());
info!("Dumping crate {}", cratename);
// find a path to dump our data to
let mut root_path = match env::var_os("RUST_SAVE_ANALYSIS_FOLDER") {
Some(val) => PathBuf::from(val),
None => match odir {
Some(val) => val.join("save-analysis"),
None => PathBuf::from("save-analysis-temp"),
},
};
if let Err(e) = std::fs::create_dir_all(&root_path) {
tcx.sess.err(&format!("Could not create directory {}: {}",
root_path.display(),
e));
}
{
let disp = root_path.display();
info!("Writing output to {}", disp);
}
// Create output file.
let executable = tcx.sess.crate_types.borrow().iter().any(|ct| *ct == CrateTypeExecutable);
let mut out_name = if executable {
"".to_owned()
} else {
"lib".to_owned()
};
out_name.push_str(&cratename);
out_name.push_str(&tcx.sess.opts.cg.extra_filename);
out_name.push_str(format.extension());
root_path.push(&out_name);
let mut output_file = File::create(&root_path).unwrap_or_else(|e| {
let disp = root_path.display();
tcx.sess.fatal(&format!("Could not open {}: {}", disp, e));
});
root_path.pop();
let output = &mut output_file;
let save_ctxt = SaveContext {
tcx: tcx,
tables: &ty::TypeckTables::empty(),
@ -923,21 +1000,7 @@ pub fn process_crate<'l, 'tcx>(tcx: TyCtxt<'l, 'tcx, 'tcx>,
span_utils: SpanUtils::new(&tcx.sess),
};
macro_rules! dump {
($new_dumper: expr) => {{
let mut dumper = $new_dumper;
let mut visitor = DumpVisitor::new(save_ctxt, &mut dumper);
visitor.dump_crate_info(cratename, krate);
visit::walk_crate(&mut visitor, krate);
}}
}
match format {
Format::Csv => dump!(CsvDumper::new(output)),
Format::Json => dump!(JsonDumper::new(output)),
Format::JsonApi => dump!(JsonApiDumper::new(output)),
}
handler.save(save_ctxt, krate, cratename)
}
// Utility functions for the module.

View file

@ -369,7 +369,7 @@ impl FnType {
match sig.inputs().last().unwrap().sty {
ty::TyTuple(ref tupled_arguments, _) => {
inputs = &sig.inputs()[0..sig.inputs().len() - 1];
&tupled_arguments[..]
&tupled_arguments
}
_ => {
bug!("argument to function with \"rust-call\" ABI \

View file

@ -229,11 +229,11 @@ fn generic_type_of<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
variant_fill].iter().cloned().collect();
match name {
None => {
Type::struct_(cx, &fields[..], false)
Type::struct_(cx, &fields, false)
}
Some(name) => {
let mut llty = Type::named_struct(cx, name);
llty.set_struct_body(&fields[..], false);
llty.set_struct_body(&fields, false);
llty
}
}
@ -330,7 +330,7 @@ fn struct_wrapped_nullable_bitdiscr(
alignment: Alignment,
) -> ValueRef {
let llptrptr = bcx.gepi(scrutinee,
&discrfield.iter().map(|f| *f as usize).collect::<Vec<_>>()[..]);
&discrfield.iter().map(|f| *f as usize).collect::<Vec<_>>());
let llptr = bcx.load(llptrptr, alignment.to_align());
let cmp = if nndiscr == 0 { IntEQ } else { IntNE };
bcx.icmp(cmp, llptr, C_null(val_ty(llptr)))
@ -402,7 +402,7 @@ pub fn trans_set_discr<'a, 'tcx>(bcx: &Builder<'a, 'tcx>, t: Ty<'tcx>, val: Valu
base::call_memset(bcx, llptr, fill_byte, size, align, false);
} else {
let path = discrfield.iter().map(|&i| i as usize).collect::<Vec<_>>();
let llptrptr = bcx.gepi(val, &path[..]);
let llptrptr = bcx.gepi(val, &path);
let llptrty = val_ty(llptrptr).element_type();
bcx.store(C_null(llptrty), llptrptr, None);
}

View file

@ -77,14 +77,14 @@ pub fn trans_inline_asm<'a, 'tcx>(
.chain(arch_clobbers.iter().map(|s| s.to_string()))
.collect::<Vec<String>>().join(",");
debug!("Asm Constraints: {}", &all_constraints[..]);
debug!("Asm Constraints: {}", &all_constraints);
// Depending on how many outputs we have, the return type is different
let num_outputs = output_types.len();
let output_type = match num_outputs {
0 => Type::void(bcx.ccx),
1 => output_types[0],
_ => Type::struct_(bcx.ccx, &output_types[..], false)
_ => Type::struct_(bcx.ccx, &output_types, false)
};
let dialect = match ia.dialect {

View file

@ -65,10 +65,10 @@ pub fn find_library(name: &str, search_paths: &[PathBuf], sess: &Session)
for path in search_paths {
debug!("looking for {} inside {:?}", name, path);
let test = path.join(&oslibname[..]);
let test = path.join(&oslibname);
if test.exists() { return test }
if oslibname != unixlibname {
let test = path.join(&unixlibname[..]);
let test = path.join(&unixlibname);
if test.exists() { return test }
}
}

View file

@ -91,7 +91,7 @@ pub fn find_crate_name(sess: Option<&Session>,
attrs: &[ast::Attribute],
input: &Input) -> String {
let validate = |s: String, span: Option<Span>| {
cstore::validate_crate_name(sess, &s[..], span);
cstore::validate_crate_name(sess, &s, span);
s
};
@ -109,7 +109,7 @@ pub fn find_crate_name(sess: Option<&Session>,
let msg = format!("--crate-name and #[crate_name] are \
required to match, but `{}` != `{}`",
s, name);
sess.span_err(attr.span, &msg[..]);
sess.span_err(attr.span, &msg);
}
}
return validate(s.clone(), None);
@ -417,7 +417,7 @@ fn object_filenames(trans: &CrateTranslation,
outputs: &OutputFilenames)
-> Vec<PathBuf> {
trans.modules.iter().map(|module| {
outputs.temp_path(OutputType::Object, Some(&module.name[..]))
outputs.temp_path(OutputType::Object, Some(&module.name))
}).collect()
}
@ -551,7 +551,7 @@ fn link_rlib<'a>(sess: &'a Session,
e))
}
let bc_data_deflated = flate::deflate_bytes(&bc_data[..]);
let bc_data_deflated = flate::deflate_bytes(&bc_data);
let mut bc_file_deflated = match fs::File::create(&bc_deflated_filename) {
Ok(file) => file,
@ -819,12 +819,12 @@ fn link_natively(sess: &Session,
pname,
prog.status))
.note(&format!("{:?}", &cmd))
.note(&escape_string(&output[..]))
.note(&escape_string(&output))
.emit();
sess.abort_if_errors();
}
info!("linker stderr:\n{}", escape_string(&prog.stderr[..]));
info!("linker stdout:\n{}", escape_string(&prog.stdout[..]));
info!("linker stderr:\n{}", escape_string(&prog.stderr));
info!("linker stdout:\n{}", escape_string(&prog.stdout));
},
Err(e) => {
sess.struct_err(&format!("could not exec the linker `{}`: {}", pname, e))

View file

@ -61,7 +61,7 @@ pub fn run(sess: &session::Session,
}
let export_threshold =
symbol_export::crates_export_threshold(&sess.crate_types.borrow()[..]);
symbol_export::crates_export_threshold(&sess.crate_types.borrow());
let symbol_filter = &|&(ref name, level): &(String, _)| {
if symbol_export::is_below_threshold(level, export_threshold) {
@ -147,7 +147,7 @@ pub fn run(sess: &session::Session,
bc_decoded.len() as libc::size_t) {
write::llvm_err(sess.diagnostic(),
format!("failed to load bc of `{}`",
&name[..]));
name));
}
});
}

View file

@ -37,8 +37,8 @@ pub fn get_rpath_flags(config: &mut RPathConfig) -> Vec<String> {
let libs = config.used_crates.clone();
let libs = libs.into_iter().filter_map(|(_, l)| l.option()).collect::<Vec<_>>();
let rpaths = get_rpaths(config, &libs[..]);
flags.extend_from_slice(&rpaths_to_flags(&rpaths[..]));
let rpaths = get_rpaths(config, &libs);
flags.extend_from_slice(&rpaths_to_flags(&rpaths));
// Use DT_RUNPATH instead of DT_RPATH if available
if config.linker_is_gnu {
@ -84,14 +84,14 @@ fn get_rpaths(config: &mut RPathConfig, libs: &[PathBuf]) -> Vec<String> {
}
}
log_rpaths("relative", &rel_rpaths[..]);
log_rpaths("fallback", &fallback_rpaths[..]);
log_rpaths("relative", &rel_rpaths);
log_rpaths("fallback", &fallback_rpaths);
let mut rpaths = rel_rpaths;
rpaths.extend_from_slice(&fallback_rpaths[..]);
rpaths.extend_from_slice(&fallback_rpaths);
// Remove duplicates
let rpaths = minimize_rpaths(&rpaths[..]);
let rpaths = minimize_rpaths(&rpaths);
return rpaths;
}
@ -177,7 +177,7 @@ fn minimize_rpaths(rpaths: &[String]) -> Vec<String> {
let mut set = HashSet::new();
let mut minimized = Vec::new();
for rpath in rpaths {
if set.insert(&rpath[..]) {
if set.insert(rpath) {
minimized.push(rpath.clone());
}
}

View file

@ -154,7 +154,7 @@ impl ExportedSymbols {
cnum: CrateNum)
-> &[(String, SymbolExportLevel)] {
match self.exports.get(&cnum) {
Some(exports) => &exports[..],
Some(exports) => exports,
None => &[]
}
}
@ -167,7 +167,7 @@ impl ExportedSymbols {
{
for &(ref name, export_level) in self.exported_symbols(cnum) {
if is_below_threshold(export_level, export_threshold) {
f(&name[..], export_level)
f(&name, export_level)
}
}
}

View file

@ -341,7 +341,7 @@ pub fn sanitize(s: &str) -> String {
if !result.is_empty() &&
result.as_bytes()[0] != '_' as u8 &&
! (result.as_bytes()[0] as char).is_xid_start() {
return format!("_{}", &result[..]);
return format!("_{}", result);
}
return result;

View file

@ -105,7 +105,7 @@ impl SharedEmitter {
Some(ref code) => {
handler.emit_with_code(&MultiSpan::new(),
&diag.msg,
&code[..],
&code,
diag.lvl);
},
None => {
@ -189,8 +189,8 @@ pub fn create_target_machine(sess: &Session) -> TargetMachineRef {
let fdata_sections = ffunction_sections;
let code_model_arg = match sess.opts.cg.code_model {
Some(ref s) => &s[..],
None => &sess.target.target.options.code_model[..],
Some(ref s) => &s,
None => &sess.target.target.options.code_model,
};
let code_model = match CODE_GEN_MODEL_ARGS.iter().find(
@ -397,7 +397,7 @@ unsafe extern "C" fn inline_asm_handler(diag: SMDiagnosticRef,
let msg = llvm::build_string(|s| llvm::LLVMRustWriteSMDiagnosticToString(diag, s))
.expect("non-UTF8 SMDiagnostic");
report_inline_asm(cgcx, &msg[..], cookie);
report_inline_asm(cgcx, &msg, cookie);
}
unsafe extern "C" fn diagnostic_handler(info: DiagnosticInfoRef, user: *mut c_void) {
@ -823,7 +823,7 @@ pub fn run_passes(sess: &Session,
if trans.modules.len() == 1 {
// 1) Only one codegen unit. In this case it's no difficulty
// to copy `foo.0.x` to `foo.x`.
let module_name = Some(&(trans.modules[0].name)[..]);
let module_name = Some(&trans.modules[0].name[..]);
let path = crate_output.temp_path(output_type, module_name);
copy_gracefully(&path,
&crate_output.path(output_type));
@ -939,7 +939,7 @@ pub fn run_passes(sess: &Session,
if metadata_config.emit_bc && !user_wants_bitcode {
let path = crate_output.temp_path(OutputType::Bitcode,
Some(&trans.metadata_module.name[..]));
Some(&trans.metadata_module.name));
remove(sess, &path);
}
}

View file

@ -514,7 +514,7 @@ pub fn call_memcpy<'a, 'tcx>(b: &Builder<'a, 'tcx>,
n_bytes: ValueRef,
align: u32) {
let ccx = b.ccx;
let ptr_width = &ccx.sess().target.target.target_pointer_width[..];
let ptr_width = &ccx.sess().target.target.target_pointer_width;
let key = format!("llvm.memcpy.p0i8.p0i8.i{}", ptr_width);
let memcpy = ccx.get_intrinsic(&key);
let src_ptr = b.pointercast(src, Type::i8p(ccx));
@ -550,7 +550,7 @@ pub fn call_memset<'a, 'tcx>(b: &Builder<'a, 'tcx>,
size: ValueRef,
align: ValueRef,
volatile: bool) -> ValueRef {
let ptr_width = &b.ccx.sess().target.target.target_pointer_width[..];
let ptr_width = &b.ccx.sess().target.target.target_pointer_width;
let intrinsic_key = format!("llvm.memset.p0i8.i{}", ptr_width);
let llintrinsicfn = b.ccx.get_intrinsic(&intrinsic_key);
let volatile = C_bool(b.ccx, volatile);
@ -765,7 +765,7 @@ fn write_metadata(cx: &SharedCrateContext,
let mut compressed = cstore.metadata_encoding_version().to_vec();
compressed.extend_from_slice(&flate::deflate_bytes(&metadata));
let llmeta = C_bytes_in_context(cx.metadata_llcx(), &compressed[..]);
let llmeta = C_bytes_in_context(cx.metadata_llcx(), &compressed);
let llconst = C_struct_in_context(cx.metadata_llcx(), &[llmeta], false);
let name = cx.metadata_symbol_name();
let buf = CString::new(name).unwrap();
@ -796,7 +796,7 @@ fn internalize_symbols<'a, 'tcx>(sess: &Session,
symbol_map: &SymbolMap<'tcx>,
exported_symbols: &ExportedSymbols) {
let export_threshold =
symbol_export::crates_export_threshold(&sess.crate_types.borrow()[..]);
symbol_export::crates_export_threshold(&sess.crate_types.borrow());
let exported_symbols = exported_symbols
.exported_symbols(LOCAL_CRATE)
@ -1035,7 +1035,7 @@ pub fn find_exported_symbols(tcx: TyCtxt, reachable: NodeSet) -> NodeSet {
(generics.parent_types == 0 && generics.types.is_empty()) &&
// Functions marked with #[inline] are only ever translated
// with "internal" linkage and are never exported.
!attr::requests_inline(&attributes[..])
!attr::requests_inline(&attributes)
}
_ => false
@ -1574,7 +1574,7 @@ fn collect_and_partition_translation_items<'a, 'tcx>(scx: &SharedCrateContext<'a
cgus.dedup();
for &(ref cgu_name, linkage) in cgus.iter() {
output.push_str(" ");
output.push_str(&cgu_name[..]);
output.push_str(&cgu_name);
let linkage_abbrev = match linkage {
llvm::Linkage::ExternalLinkage => "External",

View file

@ -627,7 +627,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
} else {
let v = ixs.iter().map(|i| C_i32(self.ccx, *i as i32)).collect::<Vec<ValueRef>>();
self.count_insn("gepi");
self.inbounds_gep(base, &v[..])
self.inbounds_gep(base, &v)
}
}
@ -835,8 +835,8 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
let s = format!("{} ({})",
text,
self.ccx.sess().codemap().span_to_string(sp));
debug!("{}", &s[..]);
self.add_comment(&s[..]);
debug!("{}", s);
self.add_comment(&s);
}
}

View file

@ -287,7 +287,7 @@ pub fn custom_coerce_unsize_info<'scx, 'tcx>(scx: &SharedCrateContext<'scx, 'tcx
match fulfill_obligation(scx, DUMMY_SP, trait_ref) {
traits::VtableImpl(traits::VtableImplData { impl_def_id, .. }) => {
scx.tcx().custom_coerce_unsized_kind(impl_def_id)
scx.tcx().coerce_unsized_info(impl_def_id).custom_kind.unwrap()
}
vtable => {
bug!("invalid CoerceUnsized vtable: {:?}", vtable);

View file

@ -479,14 +479,9 @@ impl<'a, 'gcx, 'tcx> ProbeContext<'a, 'gcx, 'tcx> {
}
fn assemble_inherent_impl_candidates_for_type(&mut self, def_id: DefId) {
// Read the inherent implementation candidates for this type from the
// metadata if necessary.
self.tcx.populate_inherent_implementations_for_type_if_necessary(self.span, def_id);
if let Some(impl_infos) = self.tcx.maps.inherent_impls.borrow().get(&def_id) {
for &impl_def_id in impl_infos.iter() {
self.assemble_inherent_impl_probe(impl_def_id);
}
let impl_def_ids = ty::queries::inherent_impls::get(self.tcx, self.span, def_id);
for &impl_def_id in impl_def_ids.iter() {
self.assemble_inherent_impl_probe(impl_def_id);
}
}

View file

@ -18,6 +18,7 @@ use rustc::traits::{self, ObligationCause, Reveal};
use rustc::ty::{self, Ty, TyCtxt};
use rustc::ty::ParameterEnvironment;
use rustc::ty::TypeFoldable;
use rustc::ty::adjustment::CoerceUnsizedInfo;
use rustc::ty::subst::Subst;
use rustc::ty::util::CopyImplementationError;
use rustc::infer;
@ -159,11 +160,26 @@ fn visit_implementation_of_copy<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
}
fn visit_implementation_of_coerce_unsized<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
coerce_unsized_trait: DefId,
_: DefId,
impl_did: DefId) {
debug!("visit_implementation_of_coerce_unsized: impl_did={:?}",
impl_did);
// Just compute this for the side-effects, in particular reporting
// errors; other parts of the code may demand it for the info of
// course.
if impl_did.is_local() {
let span = tcx.def_span(impl_did);
ty::queries::coerce_unsized_info::get(tcx, span, impl_did);
}
}
pub fn coerce_unsized_info<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
impl_did: DefId)
-> CoerceUnsizedInfo {
debug!("compute_coerce_unsized_info(impl_did={:?})", impl_did);
let coerce_unsized_trait = tcx.lang_items.coerce_unsized_trait().unwrap();
let unsize_trait = match tcx.lang_items.require(UnsizeTraitLangItem) {
Ok(id) => id,
Err(err) => {
@ -171,16 +187,14 @@ fn visit_implementation_of_coerce_unsized<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
}
};
let impl_node_id = if let Some(n) = tcx.hir.as_local_node_id(impl_did) {
n
} else {
debug!("visit_implementation_of_coerce_unsized(): impl not \
in this crate");
return;
};
// this provider should only get invoked for local def-ids
let impl_node_id = tcx.hir.as_local_node_id(impl_did).unwrap_or_else(|| {
bug!("coerce_unsized_info: invoked for non-local def-id {:?}", impl_did)
});
let source = tcx.item_type(impl_did);
let trait_ref = tcx.impl_trait_ref(impl_did).unwrap();
assert_eq!(trait_ref.def_id, coerce_unsized_trait);
let target = trait_ref.substs.type_at(1);
debug!("visit_implementation_of_coerce_unsized: {:?} -> {:?} (bound)",
source,
@ -192,6 +206,8 @@ fn visit_implementation_of_coerce_unsized<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
let target = target.subst(tcx, &param_env.free_substs);
assert!(!source.has_escaping_regions());
let err_info = CoerceUnsizedInfo { custom_kind: None };
debug!("visit_implementation_of_coerce_unsized: {:?} -> {:?} (free)",
source,
target);
@ -234,7 +250,7 @@ fn visit_implementation_of_coerce_unsized<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
definition; expected {}, found {}",
source_path,
target_path);
return;
return err_info;
}
let fields = &def_a.struct_variant().fields;
@ -268,7 +284,7 @@ fn visit_implementation_of_coerce_unsized<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
"the trait `CoerceUnsized` may only be implemented \
for a coercion between structures with one field \
being coerced, none found");
return;
return err_info;
} else if diff_fields.len() > 1 {
let item = tcx.hir.expect_item(impl_node_id);
let span = if let ItemImpl(.., Some(ref t), _, _) = item.node {
@ -295,7 +311,7 @@ fn visit_implementation_of_coerce_unsized<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
.join(", ")));
err.span_label(span, &format!("requires multiple coercions"));
err.emit();
return;
return err_info;
}
let (i, a, b) = diff_fields[0];
@ -309,7 +325,7 @@ fn visit_implementation_of_coerce_unsized<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
E0376,
"the trait `CoerceUnsized` may only be implemented \
for a coercion between structures");
return;
return err_info;
}
};
@ -331,8 +347,8 @@ fn visit_implementation_of_coerce_unsized<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
.caller_bounds);
infcx.resolve_regions_and_report_errors(&free_regions, impl_node_id);
if let Some(kind) = kind {
tcx.maps.custom_coerce_unsized_kind.borrow_mut().insert(impl_did, kind);
CoerceUnsizedInfo {
custom_kind: kind
}
});
})
}

View file

@ -8,19 +8,82 @@
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! The code in this module gathers up all of the inherent impls in
//! the current crate and organizes them in a map. It winds up
//! touching the whole crate and thus must be recomputed completely
//! for any change, but it is very cheap to compute. In practice, most
//! code in the compiler never *directly* requests this map. Instead,
//! it requests the inherent impls specific to some type (via
//! `ty::queries::inherent_impls::get(def_id)`). That value, however,
//! is computed by selecting an idea from this table.
use rustc::dep_graph::DepNode;
use rustc::hir::def_id::DefId;
use rustc::hir::def_id::{CrateNum, DefId, LOCAL_CRATE};
use rustc::hir;
use rustc::hir::itemlikevisit::ItemLikeVisitor;
use rustc::lint;
use rustc::traits::{self, Reveal};
use rustc::ty::{self, TyCtxt};
use rustc::ty::{self, CrateInherentImpls, TyCtxt};
use rustc::util::nodemap::DefIdMap;
use std::rc::Rc;
use syntax::ast;
use syntax_pos::Span;
use syntax_pos::{DUMMY_SP, Span};
/// On-demand query: yields a map containing all types mapped to their inherent impls.
pub fn crate_inherent_impls<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
crate_num: CrateNum)
-> CrateInherentImpls {
assert_eq!(crate_num, LOCAL_CRATE);
let krate = tcx.hir.krate();
let mut collect = InherentCollect {
tcx,
impls_map: CrateInherentImpls {
inherent_impls: DefIdMap()
}
};
krate.visit_all_item_likes(&mut collect);
collect.impls_map
}
/// On-demand query: yields a vector of the inherent impls for a specific type.
pub fn inherent_impls<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
ty_def_id: DefId)
-> Rc<Vec<DefId>> {
assert!(ty_def_id.is_local());
// NB. Until we adopt the red-green dep-tracking algorithm (see
// [the plan] for details on that), we do some hackery here to get
// the dependencies correct. Basically, we use a `with_ignore` to
// read the result we want. If we didn't have the `with_ignore`,
// we would wind up with a dependency on the entire crate, which
// we don't want. Then we go and add dependencies on all the impls
// in the result (which is what we wanted).
//
// The result is a graph with an edge from `Hir(I)` for every impl
// `I` defined on some type `T` to `CoherentInherentImpls(T)`,
// thus ensuring that if any of those impls change, the set of
// inherent impls is considered dirty.
//
// [the plan]: https://github.com/rust-lang/rust-roadmap/issues/4
let result = tcx.dep_graph.with_ignore(|| {
let crate_map = ty::queries::crate_inherent_impls::get(tcx, DUMMY_SP, ty_def_id.krate);
match crate_map.inherent_impls.get(&ty_def_id) {
Some(v) => v.clone(),
None => Rc::new(vec![]),
}
});
for &impl_def_id in &result[..] {
tcx.dep_graph.read(DepNode::Hir(impl_def_id));
}
result
}
struct InherentCollect<'a, 'tcx: 'a> {
tcx: TyCtxt<'a, 'tcx, 'tcx>
tcx: TyCtxt<'a, 'tcx, 'tcx>,
impls_map: CrateInherentImpls,
}
impl<'a, 'tcx, 'v> ItemLikeVisitor<'v> for InherentCollect<'a, 'tcx> {
@ -209,25 +272,19 @@ impl<'a, 'tcx, 'v> ItemLikeVisitor<'v> for InherentCollect<'a, 'tcx> {
}
impl<'a, 'tcx> InherentCollect<'a, 'tcx> {
fn check_def_id(&self, item: &hir::Item, def_id: DefId) {
fn check_def_id(&mut self, item: &hir::Item, def_id: DefId) {
if def_id.is_local() {
// Add the implementation to the mapping from implementation to base
// type def ID, if there is a base type for this implementation and
// the implementation does not have any associated traits.
let impl_def_id = self.tcx.hir.local_def_id(item.id);
let mut rc_vec = self.impls_map.inherent_impls
.entry(def_id)
.or_insert_with(|| Rc::new(vec![]));
// Subtle: it'd be better to collect these into a local map
// and then write the vector only once all items are known,
// but that leads to degenerate dep-graphs. The problem is
// that the write of that big vector winds up having reads
// from *all* impls in the krate, since we've lost the
// precision basically. This would be ok in the firewall
// model so once we've made progess towards that we can modify
// the strategy here. In the meantime, using `push` is ok
// because we are doing this as a pre-pass before anyone
// actually reads from `inherent_impls` -- and we know this is
// true beacuse we hold the refcell lock.
self.tcx.maps.inherent_impls.borrow_mut().push(def_id, impl_def_id);
// At this point, there should not be any clones of the
// `Rc`, so we can still safely push into it in place:
Rc::get_mut(&mut rc_vec).unwrap().push(impl_def_id);
} else {
struct_span_err!(self.tcx.sess,
item.span,
@ -266,91 +323,3 @@ impl<'a, 'tcx> InherentCollect<'a, 'tcx> {
}
}
struct InherentOverlapChecker<'a, 'tcx: 'a> {
tcx: TyCtxt<'a, 'tcx, 'tcx>
}
impl<'a, 'tcx> InherentOverlapChecker<'a, 'tcx> {
fn check_for_common_items_in_impls(&self, impl1: DefId, impl2: DefId) {
#[derive(Copy, Clone, PartialEq)]
enum Namespace {
Type,
Value,
}
let name_and_namespace = |def_id| {
let item = self.tcx.associated_item(def_id);
(item.name, match item.kind {
ty::AssociatedKind::Type => Namespace::Type,
ty::AssociatedKind::Const |
ty::AssociatedKind::Method => Namespace::Value,
})
};
let impl_items1 = self.tcx.associated_item_def_ids(impl1);
let impl_items2 = self.tcx.associated_item_def_ids(impl2);
for &item1 in &impl_items1[..] {
let (name, namespace) = name_and_namespace(item1);
for &item2 in &impl_items2[..] {
if (name, namespace) == name_and_namespace(item2) {
let msg = format!("duplicate definitions with name `{}`", name);
let node_id = self.tcx.hir.as_local_node_id(item1).unwrap();
self.tcx.sess.add_lint(lint::builtin::OVERLAPPING_INHERENT_IMPLS,
node_id,
self.tcx.span_of_impl(item1).unwrap(),
msg);
}
}
}
}
fn check_for_overlapping_inherent_impls(&self, ty_def_id: DefId) {
let _task = self.tcx.dep_graph.in_task(DepNode::CoherenceOverlapInherentCheck(ty_def_id));
let inherent_impls = self.tcx.maps.inherent_impls.borrow();
let impls = match inherent_impls.get(&ty_def_id) {
Some(impls) => impls,
None => return,
};
for (i, &impl1_def_id) in impls.iter().enumerate() {
for &impl2_def_id in &impls[(i + 1)..] {
self.tcx.infer_ctxt((), Reveal::UserFacing).enter(|infcx| {
if traits::overlapping_impls(&infcx, impl1_def_id, impl2_def_id).is_some() {
self.check_for_common_items_in_impls(impl1_def_id, impl2_def_id)
}
});
}
}
}
}
impl<'a, 'tcx, 'v> ItemLikeVisitor<'v> for InherentOverlapChecker<'a, 'tcx> {
fn visit_item(&mut self, item: &'v hir::Item) {
match item.node {
hir::ItemEnum(..) |
hir::ItemStruct(..) |
hir::ItemTrait(..) |
hir::ItemUnion(..) => {
let type_def_id = self.tcx.hir.local_def_id(item.id);
self.check_for_overlapping_inherent_impls(type_def_id);
}
_ => {}
}
}
fn visit_trait_item(&mut self, _trait_item: &hir::TraitItem) {
}
fn visit_impl_item(&mut self, _impl_item: &hir::ImplItem) {
}
}
pub fn check<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
tcx.visit_all_item_likes_in_krate(DepNode::CoherenceCheckImpl,
&mut InherentCollect { tcx });
tcx.visit_all_item_likes_in_krate(DepNode::CoherenceOverlapCheckSpecial,
&mut InherentOverlapChecker { tcx });
}

View file

@ -0,0 +1,102 @@
// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use rustc::hir::def_id::{CrateNum, DefId, LOCAL_CRATE};
use rustc::hir;
use rustc::hir::itemlikevisit::ItemLikeVisitor;
use rustc::lint;
use rustc::traits::{self, Reveal};
use rustc::ty::{self, TyCtxt};
use syntax_pos::DUMMY_SP;
pub fn crate_inherent_impls_overlap_check<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
crate_num: CrateNum) {
assert_eq!(crate_num, LOCAL_CRATE);
let krate = tcx.hir.krate();
krate.visit_all_item_likes(&mut InherentOverlapChecker { tcx });
}
struct InherentOverlapChecker<'a, 'tcx: 'a> {
tcx: TyCtxt<'a, 'tcx, 'tcx>
}
impl<'a, 'tcx> InherentOverlapChecker<'a, 'tcx> {
fn check_for_common_items_in_impls(&self, impl1: DefId, impl2: DefId) {
#[derive(Copy, Clone, PartialEq)]
enum Namespace {
Type,
Value,
}
let name_and_namespace = |def_id| {
let item = self.tcx.associated_item(def_id);
(item.name, match item.kind {
ty::AssociatedKind::Type => Namespace::Type,
ty::AssociatedKind::Const |
ty::AssociatedKind::Method => Namespace::Value,
})
};
let impl_items1 = self.tcx.associated_item_def_ids(impl1);
let impl_items2 = self.tcx.associated_item_def_ids(impl2);
for &item1 in &impl_items1[..] {
let (name, namespace) = name_and_namespace(item1);
for &item2 in &impl_items2[..] {
if (name, namespace) == name_and_namespace(item2) {
let msg = format!("duplicate definitions with name `{}`", name);
let node_id = self.tcx.hir.as_local_node_id(item1).unwrap();
self.tcx.sess.add_lint(lint::builtin::OVERLAPPING_INHERENT_IMPLS,
node_id,
self.tcx.span_of_impl(item1).unwrap(),
msg);
}
}
}
}
fn check_for_overlapping_inherent_impls(&self, ty_def_id: DefId) {
let impls = ty::queries::inherent_impls::get(self.tcx, DUMMY_SP, ty_def_id);
for (i, &impl1_def_id) in impls.iter().enumerate() {
for &impl2_def_id in &impls[(i + 1)..] {
self.tcx.infer_ctxt((), Reveal::UserFacing).enter(|infcx| {
if traits::overlapping_impls(&infcx, impl1_def_id, impl2_def_id).is_some() {
self.check_for_common_items_in_impls(impl1_def_id, impl2_def_id)
}
});
}
}
}
}
impl<'a, 'tcx, 'v> ItemLikeVisitor<'v> for InherentOverlapChecker<'a, 'tcx> {
fn visit_item(&mut self, item: &'v hir::Item) {
match item.node {
hir::ItemEnum(..) |
hir::ItemStruct(..) |
hir::ItemTrait(..) |
hir::ItemUnion(..) => {
let type_def_id = self.tcx.hir.local_def_id(item.id);
self.check_for_overlapping_inherent_impls(type_def_id);
}
_ => {}
}
}
fn visit_trait_item(&mut self, _trait_item: &hir::TraitItem) {
}
fn visit_impl_item(&mut self, _impl_item: &hir::ImplItem) {
}
}

View file

@ -24,7 +24,8 @@ use syntax::ast;
use syntax_pos::DUMMY_SP;
mod builtin;
mod inherent;
mod inherent_impls;
mod inherent_impls_overlap;
mod orphan;
mod overlap;
mod unsafety;
@ -102,9 +103,16 @@ fn enforce_trait_manually_implementable(tcx: TyCtxt, impl_def_id: DefId, trait_d
}
pub fn provide(providers: &mut Providers) {
use self::builtin::coerce_unsized_info;
use self::inherent_impls::{crate_inherent_impls, inherent_impls};
use self::inherent_impls_overlap::crate_inherent_impls_overlap_check;
*providers = Providers {
coherent_trait,
coherent_inherent_impls,
crate_inherent_impls,
inherent_impls,
crate_inherent_impls_overlap_check,
coerce_unsized_info,
..*providers
};
}
@ -123,10 +131,6 @@ fn coherent_trait<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
builtin::check_trait(tcx, def_id);
}
fn coherent_inherent_impls<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, _: CrateNum) {
inherent::check(tcx);
}
pub fn check_coherence<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
let _task = tcx.dep_graph.in_task(DepNode::Coherence);
for &trait_def_id in tcx.hir.krate().trait_impls.keys() {
@ -137,5 +141,7 @@ pub fn check_coherence<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
orphan::check(tcx);
overlap::check_default_impls(tcx);
ty::queries::coherent_inherent_impls::get(tcx, DUMMY_SP, LOCAL_CRATE);
// these queries are executed for side-effects (error reporting):
ty::queries::crate_inherent_impls::get(tcx, DUMMY_SP, LOCAL_CRATE);
ty::queries::crate_inherent_impls_overlap_check::get(tcx, DUMMY_SP, LOCAL_CRATE);
}

View file

@ -790,7 +790,7 @@ Furthermore, the syntax is changing to use `in` instead of `box`. See [RFC 470]
and [RFC 809] for more details.
[RFC 470]: https://github.com/rust-lang/rfcs/pull/470
[RFC 809]: https://github.com/rust-lang/rfcs/pull/809
[RFC 809]: https://github.com/rust-lang/rfcs/blob/master/text/0809-box-and-in-for-stdlib.md
"##,
E0067: r##"
@ -1428,7 +1428,7 @@ type X = u32; // this compiles
```
Note that type parameters for enum-variant constructors go after the variant,
not after the enum (Option::None::<u32>, not Option::<u32>::None).
not after the enum (`Option::None::<u32>`, not `Option::<u32>::None`).
"##,
E0110: r##"
@ -1521,7 +1521,7 @@ impl Bar for u32 {
For information on the design of the orphan rules, see [RFC 1023].
[RFC 1023]: https://github.com/rust-lang/rfcs/pull/1023
[RFC 1023]: https://github.com/rust-lang/rfcs/blob/master/text/1023-rebalancing-coherence.md
"##,
E0118: r##"
@ -1911,8 +1911,9 @@ type Foo = Trait<Bar=i32>; // ok!
E0192: r##"
Negative impls are only allowed for traits with default impls. For more
information see the [opt-in builtin traits RFC](https://github.com/rust-lang/
rfcs/blob/master/text/0019-opt-in-builtin-traits.md).
information see the [opt-in builtin traits RFC][RFC 19].
[RFC 19]: https://github.com/rust-lang/rfcs/blob/master/text/0019-opt-in-builtin-traits.md
"##,
E0193: r##"
@ -2147,7 +2148,7 @@ E0202: r##"
Inherent associated types were part of [RFC 195] but are not yet implemented.
See [the tracking issue][iss8995] for the status of this implementation.
[RFC 195]: https://github.com/rust-lang/rfcs/pull/195
[RFC 195]: https://github.com/rust-lang/rfcs/blob/master/text/0195-associated-items.md
[iss8995]: https://github.com/rust-lang/rust/issues/8995
"##,
@ -2424,7 +2425,7 @@ such that `Ti` is a local type. Then no type parameter can appear in any of the
For information on the design of the orphan rules, see [RFC 1023].
[RFC 1023]: https://github.com/rust-lang/rfcs/pull/1023
[RFC 1023]: https://github.com/rust-lang/rfcs/blob/master/text/1023-rebalancing-coherence.md
"##,
/*
@ -2799,8 +2800,9 @@ verify this assertion; therefore we must tag this `impl` as unsafe.
E0318: r##"
Default impls for a trait must be located in the same crate where the trait was
defined. For more information see the [opt-in builtin traits RFC](https://github
.com/rust-lang/rfcs/blob/master/text/0019-opt-in-builtin-traits.md).
defined. For more information see the [opt-in builtin traits RFC][RFC 19].
[RFC 19]: https://github.com/rust-lang/rfcs/blob/master/text/0019-opt-in-builtin-traits.md
"##,
E0321: r##"
@ -3018,10 +3020,8 @@ impl<T> Unsize<T> for MyType {}
```
If you are defining your own smart pointer type and would like to enable
conversion from a sized to an unsized type with the [DST coercion system]
(https://github.com/rust-lang/rfcs/blob/master/text/0982-dst-coercion.md), use
[`CoerceUnsized`](https://doc.rust-lang.org/std/ops/trait.CoerceUnsized.html)
instead.
conversion from a sized to an unsized type with the
[DST coercion system][RFC 982], use [`CoerceUnsized`] instead.
```
#![feature(coerce_unsized)]
@ -3035,6 +3035,9 @@ pub struct MyType<T: ?Sized> {
impl<T, U> CoerceUnsized<MyType<U>> for MyType<T>
where T: CoerceUnsized<U> {}
```
[RFC 982]: https://github.com/rust-lang/rfcs/blob/master/text/0982-dst-coercion.md
[`CoerceUnsized`]: https://doc.rust-lang.org/std/ops/trait.CoerceUnsized.html
"##,
E0329: r##"
@ -3438,8 +3441,9 @@ struct.
E0380: r##"
Default impls are only allowed for traits with no methods or associated items.
For more information see the [opt-in builtin traits RFC](https://github.com/rust
-lang/rfcs/blob/master/text/0019-opt-in-builtin-traits.md).
For more information see the [opt-in builtin traits RFC][RFC 19].
[RFC 19]: https://github.com/rust-lang/rfcs/blob/master/text/0019-opt-in-builtin-traits.md
"##,
E0390: r##"

View file

@ -232,14 +232,12 @@ fn build_type_alias(cx: &DocContext, did: DefId) -> clean::Typedef {
pub fn build_impls(cx: &DocContext, did: DefId) -> Vec<clean::Item> {
let tcx = cx.tcx;
tcx.populate_inherent_implementations_for_type_if_necessary(DUMMY_SP, did);
let mut impls = Vec::new();
if let Some(i) = tcx.maps.inherent_impls.borrow().get(&did) {
for &did in i.iter() {
build_impl(cx, did, &mut impls);
}
for &did in ty::queries::inherent_impls::get(tcx, DUMMY_SP, did).iter() {
build_impl(cx, did, &mut impls);
}
// If this is the first time we've inlined something from another crate, then
// we inline *all* impls from all the crates into this crate. Note that there's
// currently no way for us to filter this based on type, and we likely need

View file

@ -2611,7 +2611,7 @@ fn render_attribute(attr: &ast::MetaItem) -> Option<String> {
if attr.is_word() {
Some(format!("{}", name))
} else if let Some(v) = attr.value_str() {
Some(format!("{} = {:?}", name, &v.as_str()[..]))
Some(format!("{} = {:?}", name, v.as_str()))
} else if let Some(values) = attr.meta_item_list() {
let display: Vec<_> = values.iter().filter_map(|attr| {
attr.meta_item().and_then(|mi| render_attribute(mi))
@ -2642,7 +2642,7 @@ fn render_attributes(w: &mut fmt::Formatter, it: &clean::Item) -> fmt::Result {
for attr in &it.attrs.other_attrs {
let name = attr.name().unwrap();
if !ATTRIBUTE_WHITELIST.contains(&&name.as_str()[..]) {
if !ATTRIBUTE_WHITELIST.contains(&&*name.as_str()) {
continue;
}
if let Some(s) = render_attribute(&attr.meta().unwrap()) {

View file

@ -28,20 +28,22 @@ use html::markdown;
use html::markdown::{Markdown, MarkdownWithToc, find_testable_code};
use test::{TestOptions, Collector};
/// Separate any lines at the start of the file that begin with `%`.
/// Separate any lines at the start of the file that begin with `# ` or `%`.
fn extract_leading_metadata<'a>(s: &'a str) -> (Vec<&'a str>, &'a str) {
let mut metadata = Vec::new();
let mut count = 0;
for line in s.lines() {
if line.starts_with("%") {
// remove %<whitespace>
if line.starts_with("# ") || line.starts_with("%") {
// trim the whitespace after the symbol
metadata.push(line[1..].trim_left());
count += line.len() + 1;
} else {
return (metadata, &s[count..]);
}
}
// if we're here, then all lines were metadata % lines.
// if we're here, then all lines were metadata `# ` or `%` lines.
(metadata, "")
}
@ -83,7 +85,7 @@ pub fn render(input: &str, mut output: PathBuf, matches: &getopts::Matches,
if metadata.is_empty() {
let _ = writeln!(
&mut io::stderr(),
"rustdoc: invalid markdown file: expecting initial line with `% ...TITLE...`"
"rustdoc: invalid markdown file: no initial lines starting with `# ` or `%`"
);
return 5;
}

View file

@ -17,7 +17,7 @@ use mem;
use ops::Range;
use iter::FusedIterator;
/// Extension methods for ASCII-subset only operations on string slices.
/// Extension methods for ASCII-subset only operations.
///
/// Be aware that operations on seemingly non-ASCII characters can sometimes
/// have unexpected results. Consider this example:
@ -54,19 +54,21 @@ pub trait AsciiExt {
///
/// let ascii = 'a';
/// let utf8 = '❤';
/// let int_ascii = 97;
///
/// assert!(ascii.is_ascii());
/// assert!(!utf8.is_ascii());
/// assert!(int_ascii.is_ascii());
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
fn is_ascii(&self) -> bool;
/// Makes a copy of the string in ASCII upper case.
/// Makes a copy of the value in its ASCII upper case equivalent.
///
/// ASCII letters 'a' to 'z' are mapped to 'A' to 'Z',
/// but non-ASCII letters are unchanged.
///
/// To uppercase the string in-place, use [`make_ascii_uppercase`].
/// To uppercase the value in-place, use [`make_ascii_uppercase`].
///
/// To uppercase ASCII characters in addition to non-ASCII characters, use
/// [`str::to_uppercase`].
@ -78,9 +80,11 @@ pub trait AsciiExt {
///
/// let ascii = 'a';
/// let utf8 = '❤';
/// let int_ascii = 97;
///
/// assert_eq!('A', ascii.to_ascii_uppercase());
/// assert_eq!('❤', utf8.to_ascii_uppercase());
/// assert_eq!(65, int_ascii.to_ascii_uppercase());
/// ```
///
/// [`make_ascii_uppercase`]: #tymethod.make_ascii_uppercase
@ -88,12 +92,12 @@ pub trait AsciiExt {
#[stable(feature = "rust1", since = "1.0.0")]
fn to_ascii_uppercase(&self) -> Self::Owned;
/// Makes a copy of the string in ASCII lower case.
/// Makes a copy of the value in its ASCII lower case equivalent.
///
/// ASCII letters 'A' to 'Z' are mapped to 'a' to 'z',
/// but non-ASCII letters are unchanged.
///
/// To lowercase the string in-place, use [`make_ascii_lowercase`].
/// To lowercase the value in-place, use [`make_ascii_lowercase`].
///
/// To lowercase ASCII characters in addition to non-ASCII characters, use
/// [`str::to_lowercase`].
@ -105,9 +109,11 @@ pub trait AsciiExt {
///
/// let ascii = 'A';
/// let utf8 = '❤';
/// let int_ascii = 65;
///
/// assert_eq!('a', ascii.to_ascii_lowercase());
/// assert_eq!('❤', utf8.to_ascii_lowercase());
/// assert_eq!(97, int_ascii.to_ascii_lowercase());
/// ```
///
/// [`make_ascii_lowercase`]: #tymethod.make_ascii_lowercase
@ -115,10 +121,10 @@ pub trait AsciiExt {
#[stable(feature = "rust1", since = "1.0.0")]
fn to_ascii_lowercase(&self) -> Self::Owned;
/// Checks that two strings are an ASCII case-insensitive match.
/// Checks that two values are an ASCII case-insensitive match.
///
/// Same as `to_ascii_lowercase(a) == to_ascii_lowercase(b)`,
/// but without allocating and copying temporary strings.
/// but without allocating and copying temporaries.
///
/// # Examples
///
@ -142,7 +148,7 @@ pub trait AsciiExt {
/// ASCII letters 'a' to 'z' are mapped to 'A' to 'Z',
/// but non-ASCII letters are unchanged.
///
/// To return a new uppercased string without modifying the existing one, use
/// To return a new uppercased value without modifying the existing one, use
/// [`to_ascii_uppercase`].
///
/// # Examples
@ -166,7 +172,7 @@ pub trait AsciiExt {
/// ASCII letters 'A' to 'Z' are mapped to 'a' to 'z',
/// but non-ASCII letters are unchanged.
///
/// To return a new lowercased string without modifying the existing one, use
/// To return a new lowercased value without modifying the existing one, use
/// [`to_ascii_lowercase`].
///
/// # Examples

View file

@ -14,22 +14,24 @@
use fmt;
#[cfg(any(target_os = "android",
target_os = "emscripten",
#[cfg(any(target_os = "emscripten",
all(target_os = "linux", any(target_arch = "aarch64",
target_arch = "arm",
target_arch = "powerpc",
target_arch = "powerpc64",
target_arch = "s390x")),
all(target_os = "android", any(target_arch = "aarch64",
target_arch = "arm")),
all(target_os = "fuchsia", target_arch = "aarch64")))]
#[stable(feature = "raw_os", since = "1.1.0")] pub type c_char = u8;
#[cfg(not(any(target_os = "android",
target_os = "emscripten",
#[cfg(not(any(target_os = "emscripten",
all(target_os = "linux", any(target_arch = "aarch64",
target_arch = "arm",
target_arch = "powerpc",
target_arch = "powerpc64",
target_arch = "s390x")),
all(target_os = "android", any(target_arch = "aarch64",
target_arch = "arm")),
all(target_os = "fuchsia", target_arch = "aarch64"))))]
#[stable(feature = "raw_os", since = "1.1.0")] pub type c_char = i8;
#[stable(feature = "raw_os", since = "1.1.0")] pub type c_schar = i8;

View file

@ -641,7 +641,7 @@ impl UnixListener {
let inner = Socket::new_raw(libc::AF_UNIX, libc::SOCK_STREAM)?;
let (addr, len) = sockaddr_un(path)?;
cvt(libc::bind(*inner.as_inner(), &addr as *const _ as *const _, len))?;
cvt(libc::bind(*inner.as_inner(), &addr as *const _ as *const _, len as _))?;
cvt(libc::listen(*inner.as_inner(), 128))?;
Ok(UnixListener(inner))
@ -920,7 +920,7 @@ impl UnixDatagram {
let socket = UnixDatagram::unbound()?;
let (addr, len) = sockaddr_un(path)?;
cvt(libc::bind(*socket.0.as_inner(), &addr as *const _ as *const _, len))?;
cvt(libc::bind(*socket.0.as_inner(), &addr as *const _ as *const _, len as _))?;
Ok(socket)
}

View file

@ -417,12 +417,26 @@ mod tests {
}
}
// Android with api less than 21 define sig* functions inline, so it is not
// available for dynamic link. Implementing sigemptyset and sigaddset allow us
// to support older Android version (independent of libc version).
// The following implementations are based on https://git.io/vSkNf
#[cfg(not(target_os = "android"))]
extern {
#[cfg_attr(target_os = "netbsd", link_name = "__sigemptyset14")]
fn sigemptyset(set: *mut libc::sigset_t) -> libc::c_int;
#[cfg_attr(target_os = "netbsd", link_name = "__sigaddset14")]
fn sigaddset(set: *mut libc::sigset_t, signum: libc::c_int) -> libc::c_int;
}
#[cfg(target_os = "android")]
unsafe fn sigemptyset(set: *mut libc::sigset_t) -> libc::c_int {
libc::memset(set as *mut _, 0, mem::size_of::<libc::sigset_t>());
return 0;
}
#[cfg(target_os = "android")]
unsafe fn sigaddset(set: *mut libc::sigset_t, signum: libc::c_int) -> libc::c_int {
use slice;
@ -450,7 +464,7 @@ mod tests {
let mut set: libc::sigset_t = mem::uninitialized();
let mut old_set: libc::sigset_t = mem::uninitialized();
t!(cvt(libc::sigemptyset(&mut set)));
t!(cvt(sigemptyset(&mut set)));
t!(cvt(sigaddset(&mut set, libc::SIGINT)));
t!(cvt(libc::pthread_sigmask(libc::SIG_SETMASK, &set, &mut old_set)));

View file

@ -193,7 +193,16 @@ impl Command {
// need to clean things up now to avoid confusing the program
// we're about to run.
let mut set: libc::sigset_t = mem::uninitialized();
t!(cvt(libc::sigemptyset(&mut set)));
if cfg!(target_os = "android") {
// Implementing sigemptyset allow us to support older Android
// versions. See the comment about Android and sig* functions in
// process_common.rs
libc::memset(&mut set as *mut _ as *mut _,
0,
mem::size_of::<libc::sigset_t>());
} else {
t!(cvt(libc::sigemptyset(&mut set)));
}
t!(cvt(libc::pthread_sigmask(libc::SIG_SETMASK, &set,
ptr::null_mut())));
let ret = sys::signal(libc::SIGPIPE, libc::SIG_DFL);

View file

@ -257,8 +257,13 @@ impl Stdio {
// INVALID_HANDLE_VALUE.
Stdio::Inherit => {
match stdio::get(stdio_id) {
Ok(io) => io.handle().duplicate(0, true,
c::DUPLICATE_SAME_ACCESS),
Ok(io) => {
let io = Handle::new(io.handle());
let ret = io.duplicate(0, true,
c::DUPLICATE_SAME_ACCESS);
io.into_raw();
return ret
}
Err(..) => Ok(Handle::new(c::INVALID_HANDLE_VALUE)),
}
}

View file

@ -22,42 +22,43 @@ use sys::cvt;
use sys::handle::Handle;
use sys_common::io::read_to_end_uninitialized;
pub struct NoClose(Option<Handle>);
pub enum Output {
Console(NoClose),
Pipe(NoClose),
Console(c::HANDLE),
Pipe(c::HANDLE),
}
pub struct Stdin {
handle: Output,
utf8: Mutex<io::Cursor<Vec<u8>>>,
}
pub struct Stdout(Output);
pub struct Stderr(Output);
pub struct Stdout;
pub struct Stderr;
pub fn get(handle: c::DWORD) -> io::Result<Output> {
let handle = unsafe { c::GetStdHandle(handle) };
if handle == c::INVALID_HANDLE_VALUE {
Err(io::Error::last_os_error())
} else if handle.is_null() {
Err(io::Error::new(io::ErrorKind::Other,
"no stdio handle available for this process"))
Err(io::Error::from_raw_os_error(c::ERROR_INVALID_HANDLE as i32))
} else {
let ret = NoClose::new(handle);
let mut out = 0;
match unsafe { c::GetConsoleMode(handle, &mut out) } {
0 => Ok(Output::Pipe(ret)),
_ => Ok(Output::Console(ret)),
0 => Ok(Output::Pipe(handle)),
_ => Ok(Output::Console(handle)),
}
}
}
fn write(out: &Output, data: &[u8]) -> io::Result<usize> {
let handle = match *out {
Output::Console(ref c) => c.get().raw(),
Output::Pipe(ref p) => return p.get().write(data),
fn write(handle: c::DWORD, data: &[u8]) -> io::Result<usize> {
let handle = match try!(get(handle)) {
Output::Console(c) => c,
Output::Pipe(p) => {
let handle = Handle::new(p);
let ret = handle.write(data);
handle.into_raw();
return ret
}
};
// As with stdin on windows, stdout often can't handle writes of large
// sizes. For an example, see #14940. For this reason, don't try to
// write the entire output buffer on windows.
@ -93,18 +94,20 @@ fn write(out: &Output, data: &[u8]) -> io::Result<usize> {
impl Stdin {
pub fn new() -> io::Result<Stdin> {
get(c::STD_INPUT_HANDLE).map(|handle| {
Stdin {
handle: handle,
utf8: Mutex::new(Cursor::new(Vec::new())),
}
Ok(Stdin {
utf8: Mutex::new(Cursor::new(Vec::new())),
})
}
pub fn read(&self, buf: &mut [u8]) -> io::Result<usize> {
let handle = match self.handle {
Output::Console(ref c) => c.get().raw(),
Output::Pipe(ref p) => return p.get().read(buf),
let handle = match try!(get(c::STD_INPUT_HANDLE)) {
Output::Console(c) => c,
Output::Pipe(p) => {
let handle = Handle::new(p);
let ret = handle.read(buf);
handle.into_raw();
return ret
}
};
let mut utf8 = self.utf8.lock().unwrap();
// Read more if the buffer is empty
@ -125,11 +128,9 @@ impl Stdin {
Ok(utf8) => utf8.into_bytes(),
Err(..) => return Err(invalid_encoding()),
};
if let Output::Console(_) = self.handle {
if let Some(&last_byte) = data.last() {
if last_byte == CTRL_Z {
data.pop();
}
if let Some(&last_byte) = data.last() {
if last_byte == CTRL_Z {
data.pop();
}
}
*utf8 = Cursor::new(data);
@ -158,11 +159,11 @@ impl<'a> Read for &'a Stdin {
impl Stdout {
pub fn new() -> io::Result<Stdout> {
get(c::STD_OUTPUT_HANDLE).map(Stdout)
Ok(Stdout)
}
pub fn write(&self, data: &[u8]) -> io::Result<usize> {
write(&self.0, data)
write(c::STD_OUTPUT_HANDLE, data)
}
pub fn flush(&self) -> io::Result<()> {
@ -172,11 +173,11 @@ impl Stdout {
impl Stderr {
pub fn new() -> io::Result<Stderr> {
get(c::STD_ERROR_HANDLE).map(Stderr)
Ok(Stderr)
}
pub fn write(&self, data: &[u8]) -> io::Result<usize> {
write(&self.0, data)
write(c::STD_ERROR_HANDLE, data)
}
pub fn flush(&self) -> io::Result<()> {
@ -197,27 +198,12 @@ impl io::Write for Stderr {
}
}
impl NoClose {
fn new(handle: c::HANDLE) -> NoClose {
NoClose(Some(Handle::new(handle)))
}
fn get(&self) -> &Handle { self.0.as_ref().unwrap() }
}
impl Drop for NoClose {
fn drop(&mut self) {
self.0.take().unwrap().into_raw();
}
}
impl Output {
pub fn handle(&self) -> &Handle {
let nc = match *self {
Output::Console(ref c) => c,
Output::Pipe(ref c) => c,
};
nc.0.as_ref().unwrap()
pub fn handle(&self) -> c::HANDLE {
match *self {
Output::Console(c) => c,
Output::Pipe(c) => c,
}
}
}

View file

@ -339,7 +339,7 @@ impl TcpListener {
// Bind our new socket
let (addrp, len) = addr.into_inner();
cvt(unsafe { c::bind(*sock.as_inner(), addrp, len) })?;
cvt(unsafe { c::bind(*sock.as_inner(), addrp, len as _) })?;
// Start listening
cvt(unsafe { c::listen(*sock.as_inner(), 128) })?;
@ -430,7 +430,7 @@ impl UdpSocket {
let sock = Socket::new(addr, c::SOCK_DGRAM)?;
let (addrp, len) = addr.into_inner();
cvt(unsafe { c::bind(*sock.as_inner(), addrp, len) })?;
cvt(unsafe { c::bind(*sock.as_inner(), addrp, len as _) })?;
Ok(UdpSocket { inner: sock })
}

View file

@ -842,11 +842,11 @@ impl char {
/// Returns an iterator that yields the uppercase equivalent of a `char`
/// as one or more `char`s.
///
/// If a character does not have a uppercase equivalent, the same character
/// If a character does not have an uppercase equivalent, the same character
/// will be returned back by the iterator.
///
/// This performs complex unconditional mappings with no tailoring: it maps
/// one Unicode character to its lowercase equivalent according to the
/// one Unicode character to its uppercase equivalent according to the
/// [Unicode database] and the additional complex mappings
/// [`SpecialCasing.txt`]. Conditional mappings (based on context or
/// language) are not considered here.

View file

@ -106,8 +106,8 @@ impl<'a, 'b> PlaceholderExpander<'a, 'b> {
impl<'a, 'b> Folder for PlaceholderExpander<'a, 'b> {
fn fold_item(&mut self, item: P<ast::Item>) -> SmallVector<P<ast::Item>> {
match item.node {
ast::ItemKind::Mac(ref mac) if !mac.node.path.segments.is_empty() => {}
ast::ItemKind::Mac(_) => return self.remove(item.id).make_items(),
ast::ItemKind::MacroDef(_) => return SmallVector::one(item),
_ => {}
}

View file

@ -119,7 +119,7 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt,
};
let mut p = Parser::new(cx.parse_sess(), tts, Some(directory), false);
p.root_module_name = cx.current_expansion.module.mod_path.last()
.map(|id| (*id.name.as_str()).to_owned());
.map(|id| id.name.as_str().to_string());
p.check_unknown_macro_variable();
// Let the context choose how to interpret the result.

View file

@ -818,7 +818,7 @@ pub struct GatedCfg {
impl GatedCfg {
pub fn gate(cfg: &ast::MetaItem) -> Option<GatedCfg> {
let name = &*cfg.name().as_str();
let name = cfg.name().as_str();
GATED_CFGS.iter()
.position(|info| info.0 == name)
.map(|idx| {
@ -865,8 +865,7 @@ macro_rules! gate_feature {
impl<'a> Context<'a> {
fn check_attribute(&self, attr: &ast::Attribute, is_macro: bool) {
debug!("check_attribute(attr = {:?})", attr);
let name = unwrap_or!(attr.name(), return);
let name = unwrap_or!(attr.name(), return).as_str();
for &(n, ty, ref gateage) in BUILTIN_ATTRIBUTES {
if name == n {
if let &Gated(_, ref name, ref desc, ref has_feature) = gateage {
@ -885,12 +884,12 @@ impl<'a> Context<'a> {
return;
}
}
if name.as_str().starts_with("rustc_") {
if name.starts_with("rustc_") {
gate_feature!(self, rustc_attrs, attr.span,
"unless otherwise specified, attributes \
with the prefix `rustc_` \
are reserved for internal compiler diagnostics");
} else if name.as_str().starts_with("derive_") {
} else if name.starts_with("derive_") {
gate_feature!(self, custom_derive, attr.span, EXPLAIN_DERIVE_UNDERSCORE);
} else if !attr::is_known(attr) {
// Only run the custom attribute lint during regular

View file

@ -5151,15 +5151,15 @@ impl<'a> Parser<'a> {
fn push_directory(&mut self, id: Ident, attrs: &[Attribute]) {
if let Some(path) = attr::first_attr_value_str_by_name(attrs, "path") {
self.directory.path.push(&*path.as_str());
self.directory.path.push(&path.as_str());
self.directory.ownership = DirectoryOwnership::Owned;
} else {
self.directory.path.push(&*id.name.as_str());
self.directory.path.push(&id.name.as_str());
}
}
pub fn submod_path_from_attr(attrs: &[ast::Attribute], dir_path: &Path) -> Option<PathBuf> {
attr::first_attr_value_str_by_name(attrs, "path").map(|d| dir_path.join(&*d.as_str()))
attr::first_attr_value_str_by_name(attrs, "path").map(|d| dir_path.join(&d.as_str()))
}
/// Returns either a path to a module, or .

View file

@ -72,9 +72,9 @@ impl Decodable for Symbol {
}
}
impl<'a> PartialEq<&'a str> for Symbol {
fn eq(&self, other: &&str) -> bool {
*self.as_str() == **other
impl<T: ::std::ops::Deref<Target=str>> PartialEq<T> for Symbol {
fn eq(&self, other: &T) -> bool {
self.as_str() == other.deref()
}
}
@ -244,11 +244,47 @@ fn with_interner<T, F: FnOnce(&mut Interner) -> T>(f: F) -> T {
/// destroyed. In particular, they must not access string contents. This can
/// be fixed in the future by just leaking all strings until thread death
/// somehow.
#[derive(Clone, PartialEq, Hash, PartialOrd, Eq, Ord)]
#[derive(Clone, Hash, PartialOrd, Eq, Ord)]
pub struct InternedString {
string: &'static str,
}
impl<U: ?Sized> ::std::convert::AsRef<U> for InternedString where str: ::std::convert::AsRef<U> {
fn as_ref(&self) -> &U {
self.string.as_ref()
}
}
impl<T: ::std::ops::Deref<Target = str>> ::std::cmp::PartialEq<T> for InternedString {
fn eq(&self, other: &T) -> bool {
self.string == other.deref()
}
}
impl ::std::cmp::PartialEq<InternedString> for str {
fn eq(&self, other: &InternedString) -> bool {
self == other.string
}
}
impl<'a> ::std::cmp::PartialEq<InternedString> for &'a str {
fn eq(&self, other: &InternedString) -> bool {
*self == other.string
}
}
impl ::std::cmp::PartialEq<InternedString> for String {
fn eq(&self, other: &InternedString) -> bool {
self == other.string
}
}
impl<'a> ::std::cmp::PartialEq<InternedString> for &'a String {
fn eq(&self, other: &InternedString) -> bool {
*self == other.string
}
}
impl !Send for InternedString { }
impl ::std::ops::Deref for InternedString {

View file

@ -616,7 +616,7 @@ fn mk_tests(cx: &TestCtxt) -> P<ast::Item> {
fn is_test_crate(krate: &ast::Crate) -> bool {
match attr::find_crate_name(&krate.attrs) {
Some(s) if "test" == &*s.as_str() => true,
Some(s) if "test" == s.as_str() => true,
_ => false
}
}

View file

@ -0,0 +1,16 @@
// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
trait T { m!(); } //~ ERROR cannot find macro `m!` in this scope
struct S;
impl S { m!(); } //~ ERROR cannot find macro `m!` in this scope
fn main() {}

View file

@ -0,0 +1,64 @@
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(rustc_private)]
extern crate rustc_back;
use std::fs::File;
use std::io::{Read, Write};
use rustc_back::tempdir::TempDir;
#[cfg(unix)]
fn switch_stdout_to(file: File) {
use std::os::unix::prelude::*;
extern {
fn dup2(old: i32, new: i32) -> i32;
}
unsafe {
assert_eq!(dup2(file.as_raw_fd(), 1), 1);
}
}
#[cfg(windows)]
fn switch_stdout_to(file: File) {
use std::os::windows::prelude::*;
extern "system" {
fn SetStdHandle(nStdHandle: u32, handle: *mut u8) -> i32;
}
const STD_OUTPUT_HANDLE: u32 = (-11i32) as u32;
unsafe {
let rc = SetStdHandle(STD_OUTPUT_HANDLE,
file.into_raw_handle() as *mut _);
assert!(rc != 0);
}
}
fn main() {
let td = TempDir::new("foo").unwrap();
let path = td.path().join("bar");
let f = File::create(&path).unwrap();
println!("foo");
std::io::stdout().flush().unwrap();
switch_stdout_to(f);
println!("bar");
std::io::stdout().flush().unwrap();
let mut contents = String::new();
File::open(&path).unwrap().read_to_string(&mut contents).unwrap();
assert_eq!(contents, "bar\n");
}

View file

@ -0,0 +1,19 @@
// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
macro_rules! m {
($e:expr) => {
macro_rules! n { () => { $e } }
}
}
fn main() {
m!(foo!());
}

View file

@ -58,24 +58,8 @@ pub fn run(lib_path: &str,
let mut cmd = Command::new(prog);
cmd.args(args)
.stdout(Stdio::piped())
.stderr(Stdio::piped());
// Why oh why do we sometimes make a pipe and sometimes inherit the stdin
// stream, well that's an excellent question! In theory it should suffice to
// always create a pipe here and be done with it. Unfortunately though
// there's apparently something odd with the gdb that comes with gcc 6.3.0
// on MinGW. Tracked at rust-lang/rust#40184 when stdin is piped here
// (unconditionally) then all gdb tests will fail on MinGW when using gcc
// 6.3.0. WHen using an inherited stdin though they happen to all work!
//
// As to why this fixes the issue, well, I have no idea. If you can remove
// this branch and unconditionally use `piped` and it gets past @bors please
// feel free to send a PR!
if input.is_some() || !cfg!(windows) {
cmd.stdin(Stdio::piped());
} else {
cmd.stdin(Stdio::inherit());
}
.stderr(Stdio::piped())
.stdin(Stdio::piped());
add_target_env(&mut cmd, lib_path, aux_path);
for (key, val) in env {