Merge upstream/master: Fix a weird bug

This commit is contained in:
ubsan 2016-07-05 20:12:37 -07:00
commit 297e3969e2
185 changed files with 3642 additions and 2133 deletions

View file

@ -10,6 +10,7 @@
import argparse
import contextlib
import datetime
import hashlib
import os
import shutil
@ -18,6 +19,8 @@ import sys
import tarfile
import tempfile
from time import time
def get(url, path, verbose=False):
sha_url = url + ".sha256"
@ -118,6 +121,9 @@ def stage0_data(rust_root):
data[a] = b
return data
def format_build_time(duration):
return str(datetime.timedelta(seconds=int(duration)))
class RustBuild:
def download_stage0(self):
cache_dst = os.path.join(self.build_dir, "cache")
@ -265,7 +271,7 @@ class RustBuild:
try:
ostype = subprocess.check_output(['uname', '-s']).strip()
cputype = subprocess.check_output(['uname', '-m']).strip()
except subprocess.CalledProcessError:
except (subprocess.CalledProcessError, WindowsError):
if sys.platform == 'win32':
return 'x86_64-pc-windows-msvc'
err = "uname not found"
@ -372,6 +378,8 @@ def main():
rb._rustc_channel, rb._rustc_date = data['rustc'].split('-', 1)
rb._cargo_channel, rb._cargo_date = data['cargo'].split('-', 1)
start_time = time()
# Fetch/build the bootstrap
rb.build = rb.build_triple()
rb.download_stage0()
@ -390,5 +398,9 @@ def main():
env["BOOTSTRAP_PARENT_ID"] = str(os.getpid())
rb.run(args, env)
end_time = time()
print("Build completed in %s" % format_build_time(end_time - start_time))
if __name__ == '__main__':
main()

View file

@ -90,6 +90,7 @@ fn set_compiler(cfg: &mut gcc::Config,
// compiler already takes into account the triple in question.
t if t.contains("android") => {
if let Some(ndk) = config.and_then(|c| c.ndk.as_ref()) {
let target = target.replace("armv7", "arm");
let compiler = format!("{}-{}", target, gnu_compiler);
cfg.compiler(ndk.join("bin").join(compiler));
}

View file

@ -23,6 +23,9 @@ use build_helper::output;
use bootstrap::{dylib_path, dylib_path_var};
use build::{Build, Compiler, Mode};
use build::util;
const ADB_TEST_DIR: &'static str = "/data/tmp";
/// Runs the `linkchecker` tool as compiled in `stage` by the `host` compiler.
///
@ -88,6 +91,7 @@ pub fn compiletest(build: &Build,
target: &str,
mode: &str,
suite: &str) {
println!("Check compiletest {} ({} -> {})", suite, compiler.host, target);
let mut cmd = build.tool_cmd(compiler, "compiletest");
// compiletest currently has... a lot of arguments, so let's just pass all
@ -105,21 +109,23 @@ pub fn compiletest(build: &Build,
cmd.arg("--host").arg(compiler.host);
cmd.arg("--llvm-filecheck").arg(build.llvm_filecheck(&build.config.build));
let mut flags = format!("-Crpath");
let mut flags = vec!["-Crpath".to_string()];
if build.config.rust_optimize_tests {
flags.push_str(" -O");
flags.push("-O".to_string());
}
if build.config.rust_debuginfo_tests {
flags.push_str(" -g");
flags.push("-g".to_string());
}
cmd.arg("--host-rustcflags").arg(&flags);
let mut hostflags = build.rustc_flags(&compiler.host);
hostflags.extend(flags.clone());
cmd.arg("--host-rustcflags").arg(hostflags.join(" "));
let linkflag = format!("-Lnative={}", build.test_helpers_out(target).display());
cmd.arg("--target-rustcflags").arg(format!("{} {}", flags, linkflag));
// FIXME: needs android support
cmd.arg("--android-cross-path").arg("");
let mut targetflags = build.rustc_flags(&target);
targetflags.extend(flags);
targetflags.push(format!("-Lnative={}",
build.test_helpers_out(target).display()));
cmd.arg("--target-rustcflags").arg(targetflags.join(" "));
// FIXME: CFG_PYTHON should probably be detected more robustly elsewhere
let python_default = "python";
@ -180,6 +186,16 @@ pub fn compiletest(build: &Build,
}
build.add_bootstrap_key(compiler, &mut cmd);
cmd.arg("--adb-path").arg("adb");
cmd.arg("--adb-test-dir").arg(ADB_TEST_DIR);
if target.contains("android") {
// Assume that cc for this target comes from the android sysroot
cmd.arg("--android-cross-path")
.arg(build.cc(target).parent().unwrap().parent().unwrap());
} else {
cmd.arg("--android-cross-path").arg("");
}
build.run(&mut cmd);
}
@ -302,7 +318,97 @@ pub fn krate(build: &Build,
let mut dylib_path = dylib_path();
dylib_path.insert(0, build.sysroot_libdir(compiler, target));
cargo.env(dylib_path_var(), env::join_paths(&dylib_path).unwrap());
cargo.args(&build.flags.args);
build.run(&mut cargo);
if target.contains("android") {
build.run(cargo.arg("--no-run"));
krate_android(build, compiler, target, mode);
} else {
cargo.args(&build.flags.args);
build.run(&mut cargo);
}
}
fn krate_android(build: &Build,
compiler: &Compiler,
target: &str,
mode: Mode) {
let mut tests = Vec::new();
let out_dir = build.cargo_out(compiler, mode, target);
find_tests(&out_dir, target, &mut tests);
find_tests(&out_dir.join("deps"), target, &mut tests);
for test in tests {
build.run(Command::new("adb").arg("push").arg(&test).arg(ADB_TEST_DIR));
let test_file_name = test.file_name().unwrap().to_string_lossy();
let log = format!("{}/check-stage{}-T-{}-H-{}-{}.log",
ADB_TEST_DIR,
compiler.stage,
target,
compiler.host,
test_file_name);
let program = format!("(cd {dir}; \
LD_LIBRARY_PATH=./{target} ./{test} \
--logfile {log} \
{args})",
dir = ADB_TEST_DIR,
target = target,
test = test_file_name,
log = log,
args = build.flags.args.join(" "));
let output = output(Command::new("adb").arg("shell").arg(&program));
println!("{}", output);
build.run(Command::new("adb")
.arg("pull")
.arg(&log)
.arg(build.out.join("tmp")));
build.run(Command::new("adb").arg("shell").arg("rm").arg(&log));
if !output.contains("result: ok") {
panic!("some tests failed");
}
}
}
fn find_tests(dir: &Path,
target: &str,
dst: &mut Vec<PathBuf>) {
for e in t!(dir.read_dir()).map(|e| t!(e)) {
let file_type = t!(e.file_type());
if !file_type.is_file() {
continue
}
let filename = e.file_name().into_string().unwrap();
if (target.contains("windows") && filename.ends_with(".exe")) ||
(!target.contains("windows") && !filename.contains(".")) {
dst.push(e.path());
}
}
}
pub fn android_copy_libs(build: &Build,
compiler: &Compiler,
target: &str) {
println!("Android copy libs to emulator ({})", target);
build.run(Command::new("adb").arg("remount"));
build.run(Command::new("adb").args(&["shell", "rm", "-r", ADB_TEST_DIR]));
build.run(Command::new("adb").args(&["shell", "mkdir", ADB_TEST_DIR]));
build.run(Command::new("adb")
.arg("push")
.arg(build.src.join("src/etc/adb_run_wrapper.sh"))
.arg(ADB_TEST_DIR));
let target_dir = format!("{}/{}", ADB_TEST_DIR, target);
build.run(Command::new("adb").args(&["shell", "mkdir", &target_dir[..]]));
for f in t!(build.sysroot_libdir(compiler, target).read_dir()) {
let f = t!(f);
let name = f.file_name().into_string().unwrap();
if util::is_dylib(&name) {
build.run(Command::new("adb")
.arg("push")
.arg(f.path())
.arg(&target_dir));
}
}
}

View file

@ -368,13 +368,13 @@ impl Config {
target.ndk = Some(PathBuf::from(value));
}
"CFG_I686_LINUX_ANDROID_NDK" if value.len() > 0 => {
let target = "i686-linux-androideabi".to_string();
let target = "i686-linux-android".to_string();
let target = self.target_config.entry(target)
.or_insert(Target::default());
target.ndk = Some(PathBuf::from(value));
}
"CFG_AARCH64_LINUX_ANDROID_NDK" if value.len() > 0 => {
let target = "aarch64-linux-androideabi".to_string();
let target = "aarch64-linux-android".to_string();
let target = self.target_config.entry(target)
.or_insert(Target::default());
target.ndk = Some(PathBuf::from(value));

View file

@ -135,7 +135,6 @@ pub fn rustc(build: &Build, stage: u32, host: &str) {
// Prepare the overlay which is part of the tarball but won't actually be
// installed
t!(fs::create_dir_all(&overlay));
let cp = |file: &str| {
install(&build.src.join(file), &overlay, 0o644);
};
@ -199,7 +198,6 @@ pub fn rustc(build: &Build, stage: u32, host: &str) {
// Copy runtime DLLs needed by the compiler
if libdir != "bin" {
t!(fs::create_dir_all(image.join(libdir)));
for entry in t!(src.join(libdir).read_dir()).map(|e| t!(e)) {
let name = entry.file_name();
if let Some(s) = name.to_str() {
@ -221,7 +219,6 @@ pub fn rustc(build: &Build, stage: u32, host: &str) {
let cp = |file: &str| {
install(&build.src.join(file), &image.join("share/doc/rust"), 0o644);
};
t!(fs::create_dir_all(&image.join("share/doc/rust")));
cp("COPYRIGHT");
cp("LICENSE-APACHE");
cp("LICENSE-MIT");
@ -289,6 +286,7 @@ pub fn std(build: &Build, compiler: &Compiler, target: &str) {
fn install(src: &Path, dstdir: &Path, perms: u32) {
let dst = dstdir.join(src.file_name().unwrap());
t!(fs::create_dir_all(dstdir));
t!(fs::copy(src, &dst));
chmod(&dst, perms);
}

View file

@ -128,6 +128,7 @@ pub struct Build {
///
/// These entries currently correspond to the various output directories of the
/// build system, with each mod generating output in a different directory.
#[derive(Clone, Copy)]
pub enum Mode {
/// This cargo is going to build the standard library, placing output in the
/// "stageN-std" directory.
@ -383,8 +384,7 @@ impl Build {
"ui", "ui");
}
CheckDebuginfo { compiler } => {
if target.target.contains("msvc") ||
target.target.contains("android") {
if target.target.contains("msvc") {
// nothing to do
} else if target.target.contains("apple") {
check::compiletest(self, &compiler, target.target,
@ -434,8 +434,14 @@ impl Build {
target.target);
}
AndroidCopyLibs { compiler } => {
check::android_copy_libs(self, &compiler, target.target);
}
// pseudo-steps
Dist { .. } |
Doc { .. } | // pseudo-steps
Doc { .. } |
CheckTarget { .. } |
Check { .. } => {}
}
}

View file

@ -49,6 +49,8 @@ pub fn llvm(build: &Build, target: &str) {
return
}
println!("Building LLVM for {}", target);
let _ = fs::remove_dir_all(&dst.join("build"));
t!(fs::create_dir_all(&dst.join("build")));
let assertions = if build.config.llvm_assertions {"ON"} else {"OFF"};
@ -167,8 +169,10 @@ pub fn compiler_rt(build: &Build, target: &str) {
"arm" if target.contains("eabihf") => "armhf",
_ => arch,
};
let target = format!("clang_rt.builtins-{}{}", builtins_arch, os_extra);
("linux".to_string(), target.clone(), target)
let target = format!("clang_rt.builtins-{}", builtins_arch);
("linux".to_string(),
target.clone(),
format!("{}{}", target, os_extra))
} else if target.contains("apple-darwin") {
let builtins_arch = match arch {
"i686" => "i386",

View file

@ -139,6 +139,10 @@ $ pacman -R cmake && pacman -S mingw-w64-x86_64-cmake
");
}
}
if target.contains("arm-linux-android") {
need_cmd("adb".as_ref());
}
}
for host in build.flags.host.iter() {

View file

@ -102,6 +102,7 @@ macro_rules! targets {
// Steps for running tests. The 'check' target is just a pseudo
// target to depend on a bunch of others.
(check, Check { stage: u32, compiler: Compiler<'a> }),
(check_target, CheckTarget { stage: u32, compiler: Compiler<'a> }),
(check_linkcheck, CheckLinkcheck { stage: u32 }),
(check_cargotest, CheckCargoTest { stage: u32 }),
(check_tidy, CheckTidy { stage: u32 }),
@ -138,6 +139,9 @@ macro_rules! targets {
(dist_mingw, DistMingw { _dummy: () }),
(dist_rustc, DistRustc { stage: u32 }),
(dist_std, DistStd { compiler: Compiler<'a> }),
// Misc targets
(android_copy_libs, AndroidCopyLibs { compiler: Compiler<'a> }),
}
}
}
@ -382,37 +386,80 @@ impl<'a> Step<'a> {
self.doc_error_index(stage)]
}
Source::Check { stage, compiler } => {
vec![
// Check is just a pseudo step which means check all targets,
// so just depend on checking all targets.
build.config.target.iter().map(|t| {
self.target(t).check_target(stage, compiler)
}).collect()
}
Source::CheckTarget { stage, compiler } => {
// CheckTarget here means run all possible test suites for this
// target. Most of the time, however, we can't actually run
// anything if we're not the build triple as we could be cross
// compiling.
//
// As a result, the base set of targets here is quite stripped
// down from the standard set of targets. These suites have
// their own internal logic to run in cross-compiled situations
// if they'll run at all. For example compiletest knows that
// when testing Android targets we ship artifacts to the
// emulator.
//
// When in doubt the rule of thumb for adding to this list is
// "should this test suite run on the android bot?"
let mut base = vec![
self.check_rpass(compiler),
self.check_rpass_full(compiler),
self.check_rfail(compiler),
self.check_rfail_full(compiler),
self.check_cfail(compiler),
self.check_cfail_full(compiler),
self.check_pfail(compiler),
self.check_incremental(compiler),
self.check_ui(compiler),
self.check_crate_std(compiler),
self.check_crate_test(compiler),
self.check_crate_rustc(compiler),
self.check_codegen(compiler),
self.check_codegen_units(compiler),
self.check_debuginfo(compiler),
self.check_rustdoc(compiler),
self.check_pretty(compiler),
self.check_pretty_rpass(compiler),
self.check_pretty_rpass_full(compiler),
self.check_pretty_rfail(compiler),
self.check_pretty_rfail_full(compiler),
self.check_pretty_rpass_valgrind(compiler),
self.check_rpass_valgrind(compiler),
self.check_error_index(compiler),
self.check_docs(compiler),
self.check_rmake(compiler),
self.check_linkcheck(stage),
self.check_tidy(stage),
self.dist(stage),
]
];
// If we're testing the build triple, then we know we can
// actually run binaries and such, so we run all possible tests
// that we know about.
if self.target == build.config.build {
base.extend(vec![
// docs-related
self.check_docs(compiler),
self.check_error_index(compiler),
self.check_rustdoc(compiler),
// UI-related
self.check_cfail(compiler),
self.check_pfail(compiler),
self.check_ui(compiler),
// codegen-related
self.check_incremental(compiler),
self.check_codegen(compiler),
self.check_codegen_units(compiler),
// misc compiletest-test suites
self.check_rpass_full(compiler),
self.check_rfail_full(compiler),
self.check_cfail_full(compiler),
self.check_pretty_rpass_full(compiler),
self.check_pretty_rfail_full(compiler),
self.check_rpass_valgrind(compiler),
self.check_rmake(compiler),
// crates
self.check_crate_rustc(compiler),
// pretty
self.check_pretty(compiler),
self.check_pretty_rpass(compiler),
self.check_pretty_rfail(compiler),
self.check_pretty_rpass_valgrind(compiler),
// misc
self.check_linkcheck(stage),
self.check_tidy(stage),
]);
}
return base
}
Source::CheckLinkcheck { stage } => {
vec![self.tool_linkchecker(stage), self.doc(stage)]
@ -437,16 +484,20 @@ impl<'a> Step<'a> {
Source::CheckCFail { compiler } |
Source::CheckRPassValgrind { compiler } |
Source::CheckRPass { compiler } => {
vec![
let mut base = vec![
self.libtest(compiler),
self.tool_compiletest(compiler.stage),
self.target(compiler.host).tool_compiletest(compiler.stage),
self.test_helpers(()),
]
];
if self.target.contains("android") {
base.push(self.android_copy_libs(compiler));
}
base
}
Source::CheckDebuginfo { compiler } => {
vec![
self.libtest(compiler),
self.tool_compiletest(compiler.stage),
self.target(compiler.host).tool_compiletest(compiler.stage),
self.test_helpers(()),
self.debugger_scripts(compiler.stage),
]
@ -459,13 +510,14 @@ impl<'a> Step<'a> {
Source::CheckPrettyRPassValgrind { compiler } |
Source::CheckRMake { compiler } => {
vec![self.librustc(compiler),
self.tool_compiletest(compiler.stage)]
self.target(compiler.host).tool_compiletest(compiler.stage)]
}
Source::CheckDocs { compiler } => {
vec![self.libstd(compiler)]
}
Source::CheckErrorIndex { compiler } => {
vec![self.libstd(compiler), self.tool_error_index(compiler.stage)]
vec![self.libstd(compiler),
self.target(compiler.host).tool_error_index(compiler.stage)]
}
Source::CheckCrateStd { compiler } => {
vec![self.libtest(compiler)]
@ -529,6 +581,10 @@ impl<'a> Step<'a> {
}
return base
}
Source::AndroidCopyLibs { compiler } => {
vec![self.libtest(compiler)]
}
}
}
}

View file

@ -25,6 +25,11 @@ all:
clean:
$(Q)$(BOOTSTRAP) --clean
rustc-stage1:
$(Q)$(BOOTSTRAP) --step libtest --stage 1
rustc-stage2:
$(Q)$(BOOTSTRAP) --step libtest --stage 2
docs: doc
doc:
$(Q)$(BOOTSTRAP) --step doc

@ -1 +1 @@
Subproject commit a1ef94b76029780a510bc2dc9c6a791bd091ff19
Subproject commit ac3d1cda612edccb6f1da53cbf7716e248405f3b

View file

@ -521,14 +521,14 @@ against `libc` and `libm` by default.
# The "nullable pointer optimization"
Certain types are defined to not be `null`. This includes references (`&T`,
Certain types are defined to not be NULL. This includes references (`&T`,
`&mut T`), boxes (`Box<T>`), and function pointers (`extern "abi" fn()`).
When interfacing with C, pointers that might be null are often used.
When interfacing with C, pointers that might be NULL are often used.
As a special case, a generic `enum` that contains exactly two variants, one of
which contains no data and the other containing a single field, is eligible
for the "nullable pointer optimization". When such an enum is instantiated
with one of the non-nullable types, it is represented as a single pointer,
and the non-data variant is represented as the null pointer. So
and the non-data variant is represented as the NULL pointer. So
`Option<extern "C" fn(c_int) -> c_int>` is how one represents a nullable
function pointer using the C ABI.

View file

@ -46,6 +46,12 @@ must abide by that constraint.
[traits]: traits.html
### Combinators
Combinators are higher-order functions that apply only functions and
earlier defined combinators to provide a result from its arguments.
They can be used to manage control flow in a modular fashion.
### DST (Dynamically Sized Type)
A type without a statically known size or alignment. ([more info][link])

View file

@ -290,7 +290,7 @@ lifetime parameters using three easily memorizable and unambiguous rules. This m
acts as a shorthand for writing an item signature, while not hiding
away the actual types involved as full local inference would if applied to it.
When talking about lifetime elision, we use the term *input lifetime* and
When talking about lifetime elision, we use the terms *input lifetime* and
*output lifetime*. An *input lifetime* is a lifetime associated with a parameter
of a function, and an *output lifetime* is a lifetime associated with the return
value of a function. For example, this function has an input lifetime:
@ -335,11 +335,13 @@ fn print<'a>(s: &'a str); // expanded
fn debug(lvl: u32, s: &str); // elided
fn debug<'a>(lvl: u32, s: &'a str); // expanded
```
// In the preceding example, `lvl` doesnt need a lifetime because its not a
// reference (`&`). Only things relating to references (such as a `struct`
// which contains a reference) need lifetimes.
In the preceding example, `lvl` doesnt need a lifetime because its not a
reference (`&`). Only things relating to references (such as a `struct`
which contains a reference) need lifetimes.
```rust,ignore
fn substr(s: &str, until: u32) -> &str; // elided
fn substr<'a>(s: &'a str, until: u32) -> &'a str; // expanded

View file

@ -214,7 +214,7 @@ But, unlike a move, we can still use `v` afterward. This is because an `i32`
has no pointers to data somewhere else, copying it is a full copy.
All primitive types implement the `Copy` trait and their ownership is
therefore not moved like one would assume, following the ´ownership rules´.
therefore not moved like one would assume, following the ownership rules.
To give an example, the two following snippets of code only compile because the
`i32` and `bool` types implement the `Copy` trait.
@ -290,6 +290,6 @@ let (v1, v2, answer) = foo(v1, v2);
Ugh! The return type, return line, and calling the function gets way more
complicated.
Luckily, Rust offers a feature, borrowing, which helps us solve this problem.
Its the topic of the next section!
Luckily, Rust offers a feature which helps us solve this problem.
Its called borrowing and is the topic of the next section!

View file

@ -17,7 +17,7 @@ Here are some things to remember about raw pointers that are different than
other pointer types. They:
- are not guaranteed to point to valid memory and are not even
guaranteed to be non-null (unlike both `Box` and `&`);
guaranteed to be non-NULL (unlike both `Box` and `&`);
- do not have any automatic clean-up, unlike `Box`, and so require
manual resource management;
- are plain-old-data, that is, they don't move ownership, again unlike

View file

@ -123,7 +123,7 @@ let v = vec![];
foo(&v);
```
errors with:
will give us this error:
```text
error: cannot borrow immutable borrowed content `*v` as mutable
@ -152,8 +152,8 @@ the thing `y` points at. Youll notice that `x` had to be marked `mut` as well
If it wasnt, we couldnt take a mutable borrow to an immutable value.
You'll also notice we added an asterisk (`*`) in front of `y`, making it `*y`,
this is because `y` is a `&mut` reference. You'll also need to use them for
accessing the contents of a reference as well.
this is because `y` is a `&mut` reference. You'll need to use astrisks to
access the contents of a reference as well.
Otherwise, `&mut` references are like references. There _is_ a large
difference between the two, and how they interact, though. You can tell
@ -179,7 +179,7 @@ As it turns out, there are rules.
# The Rules
Heres the rules about borrowing in Rust:
Here are the rules for borrowing in Rust:
First, any borrow must last for a scope no greater than that of the owner.
Second, you may have one or the other of these two kinds of borrows, but not
@ -208,12 +208,14 @@ With this in mind, lets consider our example again.
Heres the code:
```rust,ignore
let mut x = 5;
let y = &mut x;
fn main() {
let mut x = 5;
let y = &mut x;
*y += 1;
*y += 1;
println!("{}", x);
println!("{}", x);
}
```
This code gives us this error:
@ -225,7 +227,7 @@ error: cannot borrow `x` as immutable because it is also borrowed as mutable
```
This is because weve violated the rules: we have a `&mut T` pointing to `x`,
and so we arent allowed to create any `&T`s. One or the other. The note
and so we arent allowed to create any `&T`s. It's one or the other. The note
hints at how to think about this problem:
```text
@ -243,14 +245,16 @@ In Rust, borrowing is tied to the scope that the borrow is valid for. And our
scopes look like this:
```rust,ignore
let mut x = 5;
fn main() {
let mut x = 5;
let y = &mut x; // -+ &mut borrow of x starts here
// |
*y += 1; // |
// |
println!("{}", x); // -+ - try to borrow x here
// -+ &mut borrow of x ends here
let y = &mut x; // -+ &mut borrow of x starts here
// |
*y += 1; // |
// |
println!("{}", x); // -+ - try to borrow x here
} // -+ &mut borrow of x ends here
```
The scopes conflict: we cant make an `&x` while `y` is in scope.
@ -269,12 +273,12 @@ println!("{}", x); // <- try to borrow x here
```
Theres no problem. Our mutable borrow goes out of scope before we create an
immutable one. But scope is the key to seeing how long a borrow lasts for.
immutable one. So scope is the key to seeing how long a borrow lasts for.
## Issues borrowing prevents
Why have these restrictive rules? Well, as we noted, these rules prevent data
races. What kinds of issues do data races cause? Heres a few.
races. What kinds of issues do data races cause? Here are a few.
### Iterator invalidation
@ -323,7 +327,7 @@ for i in &v {
We cant modify `v` because its borrowed by the loop.
### use after free
### Use after free
References must not live longer than the resource they refer to. Rust will
check the scopes of your references to ensure that this is true.

View file

@ -9,7 +9,7 @@ strings also work differently than in some other systems languages, such as C.
Lets dig into the details. A string is a sequence of Unicode scalar values
encoded as a stream of UTF-8 bytes. All strings are guaranteed to be a valid
encoding of UTF-8 sequences. Additionally, unlike some systems languages,
strings are not null-terminated and can contain null bytes.
strings are not NUL-terminated and can contain NUL bytes.
Rust has two main types of strings: `&str` and `String`. Lets talk about
`&str` first. These are called string slices. A string slice has a fixed

View file

@ -63,7 +63,7 @@ In addition, the following are all undefined behaviors in Rust, and must be
avoided, even when writing `unsafe` code:
* Data races
* Dereferencing a null/dangling raw pointer
* Dereferencing a NULL/dangling raw pointer
* Reads of [undef][undef] (uninitialized) memory
* Breaking the [pointer aliasing rules][aliasing] with raw pointers.
* `&mut T` and `&T` follow LLVMs scoped [noalias][noalias] model, except if
@ -77,7 +77,7 @@ avoided, even when writing `unsafe` code:
* Using `std::ptr::copy_nonoverlapping_memory` (`memcpy32`/`memcpy64`
intrinsics) on overlapping buffers
* Invalid values in primitive types, even in private fields/locals:
* Null/dangling references or boxes
* NULL/dangling references or boxes
* A value other than `false` (0) or `true` (1) in a `bool`
* A discriminant in an `enum` not included in its type definition
* A value in a `char` which is a surrogate or above `char::MAX`

View file

@ -139,7 +139,7 @@ class Type(object):
return TYPE_KIND_STR_SLICE
# REGULAR SLICE
if (unqualified_type_name.startswith("&[") and
if (unqualified_type_name.startswith(("&[", "&mut [")) and
unqualified_type_name.endswith("]") and
self.__conforms_to_field_layout(SLICE_FIELD_NAMES)):
return TYPE_KIND_SLICE

View file

@ -211,15 +211,12 @@ class RustSlicePrinter:
("(len: %i)" % length))
def children(self):
cs = []
(length, data_ptr) = rustpp.extract_length_and_ptr_from_slice(self.__val)
assert data_ptr.type.get_dwarf_type_kind() == rustpp.DWARF_TYPE_CODE_PTR
raw_ptr = data_ptr.get_wrapped_value()
for index in range(0, length):
cs.append((str(index), (raw_ptr + index).dereference()))
return cs
yield (str(index), (raw_ptr + index).dereference())
class RustStringSlicePrinter:
@ -245,12 +242,10 @@ class RustStdVecPrinter:
("(len: %i, cap: %i)" % (length, cap)))
def children(self):
cs = []
(length, data_ptr, cap) = rustpp.extract_length_ptr_and_cap_from_std_vec(self.__val)
gdb_ptr = data_ptr.get_wrapped_value()
for index in range(0, length):
cs.append((str(index), (gdb_ptr + index).dereference()))
return cs
yield (str(index), (gdb_ptr + index).dereference())
class RustStdStringPrinter:

View file

@ -1,6 +1,6 @@
{
"platform": "aarch64",
"intrinsic_prefix": "aarch64_v",
"platform": "aarch64_v",
"intrinsic_prefix": "",
"llvm_prefix": "llvm.aarch64.neon.",
"number_info": {
"signed": {

View file

@ -1,6 +1,6 @@
{
"platform": "arm",
"intrinsic_prefix": "arm_v",
"platform": "arm_v",
"intrinsic_prefix": "",
"llvm_prefix": "llvm.neon.v",
"number_info": {
"signed": {

View file

@ -26,10 +26,9 @@ SPEC = re.compile(
class PlatformInfo(object):
def __init__(self, json):
self._platform = json['platform']
self._intrinsic_prefix = json['intrinsic_prefix']
def intrinsic_prefix(self):
return self._intrinsic_prefix
def platform_prefix(self):
return self._platform
class IntrinsicSet(object):
def __init__(self, platform, json):
@ -38,6 +37,7 @@ class IntrinsicSet(object):
self._intrinsics = json['intrinsics']
self._widths = json['width_info']
self._platform = platform
self._intrinsic_prefix = json['intrinsic_prefix']
def intrinsics(self):
for raw in self._intrinsics:
@ -48,6 +48,9 @@ class IntrinsicSet(object):
def platform(self):
return self._platform
def intrinsic_prefix(self):
return self._intrinsic_prefix
def llvm_prefix(self):
return self._llvm_prefix
@ -538,8 +541,14 @@ class MonomorphicIntrinsic(object):
*self._args,
width = self._width)
def platform_prefix(self):
return self._platform.platform().platform_prefix()
def intrinsic_set_name(self):
return self._platform.intrinsic_prefix()
def intrinsic_name(self):
return self._platform.platform().intrinsic_prefix() + self.intrinsic_suffix()
return self._platform.intrinsic_prefix() + self.intrinsic_suffix()
def compiler_args(self):
return ', '.join(arg.compiler_ctor_ref() for arg in self._args_raw)
@ -561,6 +570,27 @@ def parse_args():
formatter_class = argparse.RawDescriptionHelpFormatter,
description = 'Render an intrinsic definition JSON to various formats.',
epilog = textwrap.dedent('''\
Quick How-To:
There are two operating modes: single file and multiple files.
For example, ARM is specified as a single file. To generate the
compiler-definitions for ARM just pass the script the "arm.json" file:
python generator.py --format compiler-defs arm.json
The X86 architecture is specified as multiple files (for the different
instruction sets that x86 supports). To generate the compiler
definitions one needs to pass the script a "platform information file"
(with the -i flag) next to the files of the different intruction sets.
For example, to generate the X86 compiler-definitions for SSE4.2, just:
python generator.py --format compiler-defs -i x86/info.json sse42.json
And to generate the compiler-definitions for SSE4.1 and SSE4.2, just:
python generator.py --format compiler-defs -i x86/info.json sse41.json sse42.json
An intrinsic definition consists of a map with fields:
- intrinsic: pattern for the name(s) of the vendor's C intrinsic(s)
- llvm: pattern for the name(s) of the internal llvm intrinsic(s)
@ -730,8 +760,9 @@ class ExternBlock(object):
return 'extern "platform-intrinsic" {'
def render(self, mono):
return ' fn {}{};'.format(mono.intrinsic_name(),
mono.intrinsic_signature())
return ' fn {}{}{};'.format(mono.platform_prefix(),
mono.intrinsic_name(),
mono.intrinsic_signature())
def close(self):
return '}'
@ -765,7 +796,7 @@ use IntrinsicDef::Named;
#[inline(never)]
pub fn find(name: &str) -> Option<Intrinsic> {{
if !name.starts_with("{0}") {{ return None }}
Some(match &name["{0}".len()..] {{'''.format(platform.intrinsic_prefix())
Some(match &name["{0}".len()..] {{'''.format(platform.platform_prefix())
def render(self, mono):
return '''\
@ -773,7 +804,7 @@ pub fn find(name: &str) -> Option<Intrinsic> {{
inputs: {{ static INPUTS: [&'static Type; {}] = [{}]; &INPUTS }},
output: {},
definition: Named("{}")
}},'''.format(mono.intrinsic_suffix(),
}},'''.format(mono.intrinsic_set_name() + mono.intrinsic_suffix(),
len(mono._args_raw),
mono.compiler_args(),
mono.compiler_ret(),

View file

@ -1,4 +1,5 @@
{
"intrinsic_prefix": "_mm",
"llvm_prefix": "llvm.x86.avx.",
"intrinsics": [
{

View file

@ -1,4 +1,5 @@
{
"intrinsic_prefix": "_mm",
"llvm_prefix": "llvm.x86.avx2.",
"intrinsics": [
{

View file

@ -0,0 +1,13 @@
{
"intrinsic_prefix": "_bmi",
"llvm_prefix": "llvm.x86.bmi.",
"intrinsics": [
{
"intrinsic": "_bextr_{0.bitwidth}",
"width": ["0"],
"llvm": "bextr.{0.bitwidth}",
"ret": "S(32-64)u",
"args": ["0", "0"]
}
]
}

View file

@ -0,0 +1,27 @@
{
"intrinsic_prefix": "_bmi2",
"llvm_prefix": "llvm.x86.bmi.",
"intrinsics": [
{
"intrinsic": "_bzhi_{0.bitwidth}",
"width": ["0"],
"llvm": "bzhi.{0.bitwidth}",
"ret": "S(32-64)u",
"args": ["0", "0"]
},
{
"intrinsic": "_pdep_{0.bitwidth}",
"width": ["0"],
"llvm": "pdep.{0.bitwidth}",
"ret": "S(32-64)u",
"args": ["0", "0"]
},
{
"intrinsic": "_pext_{0.bitwidth}",
"width": ["0"],
"llvm": "pext.{0.bitwidth}",
"ret": "S(32-64)u",
"args": ["0", "0"]
}
]
}

View file

@ -1,4 +1,5 @@
{
"intrinsic_prefix": "_mm",
"llvm_prefix": "llvm.x86.fma.",
"intrinsics": [
{

View file

@ -1,26 +1,30 @@
{
"platform": "x86",
"intrinsic_prefix": "x86_mm",
"number_info": {
"signed": {
"kind": "s",
"kind_short": "",
"data_type": { "pattern": "epi{bitwidth}" },
"bitwidth": { "pattern": "{bitwidth}" },
"data_type_short": { "8": "b", "16": "w", "32": "d", "64": "q" }
},
"unsigned": {
"kind": "u",
"kind_short": "u",
"data_type": { "pattern": "epu{bitwidth}" },
"bitwidth": { "pattern": "{bitwidth}" },
"data_type_short": { "8": "b", "16": "w", "32": "d", "64": "q" }
},
"float": {
"kind": "f",
"data_type": { "32": "ps", "64": "pd" },
"bitwidth": { "pattern": "{bitwidth}" },
"data_type_short": { "32": "ps", "64": "pd" }
}
},
"width_info": {
"32": { "width_mm": "32", "width_suffix": "" },
"64": { "width_mm": "64", "width_suffix": "" },
"128": { "width_mm": "", "width_suffix": "" },
"256": { "width_mm": "256", "width_suffix": ".256" },
"512": { "width_mm": "512", "width_suffix": ".512" }

View file

@ -1,4 +1,5 @@
{
"intrinsic_prefix": "_mm",
"llvm_prefix": "llvm.x86.sse.",
"intrinsics": [
{

View file

@ -1,4 +1,5 @@
{
"intrinsic_prefix": "_mm",
"llvm_prefix": "llvm.x86.sse2.",
"intrinsics": [
{

View file

@ -1,4 +1,5 @@
{
"intrinsic_prefix": "_mm",
"llvm_prefix": "llvm.x86.sse3.",
"intrinsics": [
{

View file

@ -1,4 +1,5 @@
{
"intrinsic_prefix": "_mm",
"llvm_prefix": "llvm.x86.sse41.",
"intrinsics": [
{

View file

@ -1,4 +1,5 @@
{
"intrinsic_prefix": "_mm",
"llvm_prefix": "llvm.x86.sse42.",
"intrinsics": [
{

View file

@ -1,4 +1,5 @@
{
"intrinsic_prefix": "_mm",
"llvm_prefix": "llvm.x86.ssse3.",
"intrinsics": [
{

View file

@ -0,0 +1,13 @@
{
"intrinsic_prefix": "_tbm",
"llvm_prefix": "llvm.x86.tbm.",
"intrinsics": [
{
"intrinsic": "_bextri_u{0.bitwidth}",
"width": ["0"],
"llvm": "bextri.u{0.bitwidth}",
"ret": "S(32-64)u",
"args": ["0", "0"]
}
]
}

View file

@ -12,6 +12,17 @@
# Exit if anything fails
set -e
LLDB_VERSION=`lldb --version 2>/dev/null | head -1 | cut -d. -f1`
if [ "$LLDB_VERSION" = "lldb-350" ]
then
echo "***"
echo \
"WARNING: This version of LLDB has known issues with Rust and cannot \
display the contents of local variables!"
echo "***"
fi
# Create a tempfile containing the LLDB script we want to execute on startup
TMPFILE=`mktemp /tmp/rust-lldb-commands.XXXXXX`

View file

@ -825,8 +825,6 @@ impl<T: Ord> BinaryHeap<T> {
/// Basic usage:
///
/// ```
/// #![feature(binary_heap_append)]
///
/// use std::collections::BinaryHeap;
///
/// let v = vec![-10, 1, 2, 3, 3];
@ -840,9 +838,7 @@ impl<T: Ord> BinaryHeap<T> {
/// assert_eq!(a.into_sorted_vec(), [-20, -10, 1, 2, 3, 3, 5, 43]);
/// assert!(b.is_empty());
/// ```
#[unstable(feature = "binary_heap_append",
reason = "needs to be audited",
issue = "32526")]
#[stable(feature = "binary_heap_append", since = "1.11.0")]
pub fn append(&mut self, other: &mut Self) {
if self.len() < other.len() {
swap(self, other);

View file

@ -559,7 +559,6 @@ impl<K: Ord, V> BTreeMap<K, V> {
/// # Examples
///
/// ```
/// #![feature(btree_append)]
/// use std::collections::BTreeMap;
///
/// let mut a = BTreeMap::new();
@ -583,8 +582,7 @@ impl<K: Ord, V> BTreeMap<K, V> {
/// assert_eq!(a[&4], "e");
/// assert_eq!(a[&5], "f");
/// ```
#[unstable(feature = "btree_append", reason = "recently added as part of collections reform 2",
issue = "34152")]
#[stable(feature = "btree_append", since = "1.11.0")]
pub fn append(&mut self, other: &mut Self) {
// Do we have to append anything at all?
if other.len() == 0 {
@ -914,7 +912,6 @@ impl<K: Ord, V> BTreeMap<K, V> {
/// Basic usage:
///
/// ```
/// #![feature(btree_split_off)]
/// use std::collections::BTreeMap;
///
/// let mut a = BTreeMap::new();
@ -936,9 +933,7 @@ impl<K: Ord, V> BTreeMap<K, V> {
/// assert_eq!(b[&17], "d");
/// assert_eq!(b[&41], "e");
/// ```
#[unstable(feature = "btree_split_off",
reason = "recently added as part of collections reform 2",
issue = "34152")]
#[stable(feature = "btree_split_off", since = "1.11.0")]
pub fn split_off<Q: ?Sized + Ord>(&mut self, key: &Q) -> Self
where K: Borrow<Q>
{

View file

@ -551,7 +551,6 @@ impl<T: Ord> BTreeSet<T> {
/// # Examples
///
/// ```
/// #![feature(btree_append)]
/// use std::collections::BTreeSet;
///
/// let mut a = BTreeSet::new();
@ -575,8 +574,7 @@ impl<T: Ord> BTreeSet<T> {
/// assert!(a.contains(&4));
/// assert!(a.contains(&5));
/// ```
#[unstable(feature = "btree_append", reason = "recently added as part of collections reform 2",
issue = "34152")]
#[stable(feature = "btree_append", since = "1.11.0")]
pub fn append(&mut self, other: &mut Self) {
self.map.append(&mut other.map);
}
@ -589,7 +587,6 @@ impl<T: Ord> BTreeSet<T> {
/// Basic usage:
///
/// ```
/// #![feature(btree_split_off)]
/// use std::collections::BTreeMap;
///
/// let mut a = BTreeMap::new();
@ -611,9 +608,7 @@ impl<T: Ord> BTreeSet<T> {
/// assert_eq!(b[&17], "d");
/// assert_eq!(b[&41], "e");
/// ```
#[unstable(feature = "btree_split_off",
reason = "recently added as part of collections reform 2",
issue = "34152")]
#[stable(feature = "btree_split_off", since = "1.11.0")]
pub fn split_off<Q: ?Sized + Ord>(&mut self, key: &Q) -> Self where T: Borrow<Q> {
BTreeSet { map: self.map.split_off(key) }
}

View file

@ -38,7 +38,6 @@
#![feature(fmt_internals)]
#![feature(heap_api)]
#![feature(inclusive_range)]
#![feature(iter_arith)]
#![feature(lang_items)]
#![feature(nonzero)]
#![feature(pattern)]

View file

@ -11,18 +11,14 @@
#![deny(warnings)]
#![feature(binary_heap_extras)]
#![feature(binary_heap_append)]
#![feature(binary_heap_peek_mut)]
#![feature(box_syntax)]
#![feature(btree_append)]
#![feature(btree_split_off)]
#![feature(btree_range)]
#![feature(collections)]
#![feature(collections_bound)]
#![feature(const_fn)]
#![feature(fn_traits)]
#![feature(enumset)]
#![feature(iter_arith)]
#![feature(linked_list_contains)]
#![feature(pattern)]
#![feature(rand)]

View file

@ -238,7 +238,7 @@ impl<T:Copy> Cell<T> {
/// This call borrows `Cell` mutably (at compile-time) which guarantees
/// that we possess the only reference.
#[inline]
#[unstable(feature = "cell_get_mut", issue = "33444")]
#[stable(feature = "cell_get_mut", since = "1.11.0")]
pub fn get_mut(&mut self) -> &mut T {
unsafe {
&mut *self.value.get()
@ -509,7 +509,7 @@ impl<T: ?Sized> RefCell<T> {
/// This call borrows `RefCell` mutably (at compile-time) so there is no
/// need for dynamic checks.
#[inline]
#[unstable(feature = "cell_get_mut", issue="33444")]
#[stable(feature = "cell_get_mut", since = "1.11.0")]
pub fn get_mut(&mut self) -> &mut T {
unsafe {
&mut *self.value.get()

View file

@ -840,11 +840,8 @@ pub fn write(output: &mut Write, args: Arguments) -> Result {
}
// There can be only one trailing string piece left.
match pieces.next() {
Some(piece) => {
formatter.buf.write_str(*piece)?;
}
None => {}
if let Some(piece) = pieces.next() {
formatter.buf.write_str(*piece)?;
}
Ok(())

View file

@ -11,18 +11,16 @@
use clone::Clone;
use cmp::{Ord, PartialOrd, PartialEq, Ordering};
use default::Default;
use num::{Zero, One};
use ops::{Add, FnMut, Mul};
use ops::FnMut;
use option::Option::{self, Some, None};
use marker::Sized;
use super::{Chain, Cycle, Cloned, Enumerate, Filter, FilterMap, FlatMap, Fuse,
Inspect, Map, Peekable, Scan, Skip, SkipWhile, Take, TakeWhile, Rev,
Zip};
use super::{Chain, Cycle, Cloned, Enumerate, Filter, FilterMap, FlatMap, Fuse};
use super::{Inspect, Map, Peekable, Scan, Skip, SkipWhile, Take, TakeWhile, Rev};
use super::{Zip, Sum, Product};
use super::ChainState;
use super::{DoubleEndedIterator, ExactSizeIterator, Extend, FromIterator,
IntoIterator};
use super::ZipImpl;
use super::{DoubleEndedIterator, ExactSizeIterator, Extend, FromIterator};
use super::{IntoIterator, ZipImpl};
fn _assert_is_object_safe(_: &Iterator<Item=()>) {}
@ -1820,36 +1818,41 @@ pub trait Iterator {
///
/// An empty iterator returns the zero value of the type.
///
/// # Panics
///
/// When calling `sum` and a primitive integer type is being returned, this
/// method will panic if the computation overflows.
///
/// # Examples
///
/// Basic usage:
///
/// ```
/// #![feature(iter_arith)]
///
/// let a = [1, 2, 3];
/// let sum: i32 = a.iter().sum();
///
/// assert_eq!(sum, 6);
/// ```
#[unstable(feature = "iter_arith", reason = "bounds recently changed",
issue = "27739")]
fn sum<S>(self) -> S where
S: Add<Self::Item, Output=S> + Zero,
Self: Sized,
#[stable(feature = "iter_arith", since = "1.11.0")]
fn sum<S>(self) -> S
where Self: Sized,
S: Sum<Self::Item>,
{
self.fold(Zero::zero(), |s, e| s + e)
Sum::sum(self)
}
/// Iterates over the entire iterator, multiplying all the elements
///
/// An empty iterator returns the one value of the type.
///
/// # Panics
///
/// When calling `product` and a primitive integer type is being returned,
/// this method will panic if the computation overflows.
///
/// # Examples
///
/// ```
/// #![feature(iter_arith)]
///
/// fn factorial(n: u32) -> u32 {
/// (1..).take_while(|&i| i <= n).product()
/// }
@ -1857,13 +1860,12 @@ pub trait Iterator {
/// assert_eq!(factorial(1), 1);
/// assert_eq!(factorial(5), 120);
/// ```
#[unstable(feature="iter_arith", reason = "bounds recently changed",
issue = "27739")]
fn product<P>(self) -> P where
P: Mul<Self::Item, Output=P> + One,
Self: Sized,
#[stable(feature = "iter_arith", since = "1.11.0")]
fn product<P>(self) -> P
where Self: Sized,
P: Product<Self::Item>,
{
self.fold(One::one(), |p, e| p * e)
Product::product(self)
}
/// Lexicographically compares the elements of this `Iterator` with those

View file

@ -327,8 +327,9 @@ pub use self::sources::{Empty, empty};
pub use self::sources::{Once, once};
#[stable(feature = "rust1", since = "1.0.0")]
pub use self::traits::{FromIterator, IntoIterator, DoubleEndedIterator, Extend,
ExactSizeIterator};
pub use self::traits::{FromIterator, IntoIterator, DoubleEndedIterator, Extend};
#[stable(feature = "rust1", since = "1.0.0")]
pub use self::traits::{ExactSizeIterator, Sum, Product};
mod iterator;
mod range;

View file

@ -11,7 +11,6 @@
use clone::Clone;
use cmp::PartialOrd;
use mem;
use num::{Zero, One};
use ops::{self, Add, Sub};
use option::Option::{self, Some, None};
use marker::Sized;
@ -36,6 +35,24 @@ pub trait Step: PartialOrd + Sized {
/// Returns `None` if it is not possible to calculate `steps_between`
/// without overflow.
fn steps_between(start: &Self, end: &Self, by: &Self) -> Option<usize>;
/// Same as `steps_between`, but with a `by` of 1
fn steps_between_by_one(start: &Self, end: &Self) -> Option<usize>;
/// Tests whether this step is negative or not (going backwards)
fn is_negative(&self) -> bool;
/// Replaces this step with `1`, returning itself
fn replace_one(&mut self) -> Self;
/// Replaces this step with `0`, returning itself
fn replace_zero(&mut self) -> Self;
/// Adds one to this step, returning the result
fn add_one(&self) -> Self;
/// Subtracts one to this step, returning the result
fn sub_one(&self) -> Self;
}
macro_rules! step_impl_unsigned {
@ -65,6 +82,36 @@ macro_rules! step_impl_unsigned {
Some(0)
}
}
#[inline]
fn is_negative(&self) -> bool {
false
}
#[inline]
fn replace_one(&mut self) -> Self {
mem::replace(self, 0)
}
#[inline]
fn replace_zero(&mut self) -> Self {
mem::replace(self, 1)
}
#[inline]
fn add_one(&self) -> Self {
*self + 1
}
#[inline]
fn sub_one(&self) -> Self {
*self - 1
}
#[inline]
fn steps_between_by_one(start: &Self, end: &Self) -> Option<usize> {
Self::steps_between(start, end, &1)
}
}
)*)
}
@ -106,6 +153,36 @@ macro_rules! step_impl_signed {
Some(diff / by_u)
}
}
#[inline]
fn is_negative(&self) -> bool {
*self < 0
}
#[inline]
fn replace_one(&mut self) -> Self {
mem::replace(self, 0)
}
#[inline]
fn replace_zero(&mut self) -> Self {
mem::replace(self, 1)
}
#[inline]
fn add_one(&self) -> Self {
*self + 1
}
#[inline]
fn sub_one(&self) -> Self {
*self - 1
}
#[inline]
fn steps_between_by_one(start: &Self, end: &Self) -> Option<usize> {
Self::steps_between(start, end, &1)
}
}
)*)
}
@ -124,6 +201,37 @@ macro_rules! step_impl_no_between {
fn steps_between(_a: &$t, _b: &$t, _by: &$t) -> Option<usize> {
None
}
#[inline]
#[allow(unused_comparisons)]
fn is_negative(&self) -> bool {
*self < 0
}
#[inline]
fn replace_one(&mut self) -> Self {
mem::replace(self, 0)
}
#[inline]
fn replace_zero(&mut self) -> Self {
mem::replace(self, 1)
}
#[inline]
fn add_one(&self) -> Self {
*self + 1
}
#[inline]
fn sub_one(&self) -> Self {
*self - 1
}
#[inline]
fn steps_between_by_one(start: &Self, end: &Self) -> Option<usize> {
Self::steps_between(start, end, &1)
}
}
)*)
}
@ -269,12 +377,12 @@ impl<A> Iterator for StepBy<A, ops::RangeFrom<A>> where
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<A: Step + Zero + Clone> Iterator for StepBy<A, ops::Range<A>> {
impl<A: Step + Clone> Iterator for StepBy<A, ops::Range<A>> {
type Item = A;
#[inline]
fn next(&mut self) -> Option<A> {
let rev = self.step_by < A::zero();
let rev = self.step_by.is_negative();
if (rev && self.range.start > self.range.end) ||
(!rev && self.range.start < self.range.end)
{
@ -308,7 +416,7 @@ impl<A: Step + Zero + Clone> Iterator for StepBy<A, ops::Range<A>> {
#[unstable(feature = "inclusive_range",
reason = "recently added, follows RFC",
issue = "28237")]
impl<A: Step + Zero + Clone> Iterator for StepBy<A, ops::RangeInclusive<A>> {
impl<A: Step + Clone> Iterator for StepBy<A, ops::RangeInclusive<A>> {
type Item = A;
#[inline]
@ -322,8 +430,7 @@ impl<A: Step + Zero + Clone> Iterator for StepBy<A, ops::RangeInclusive<A>> {
Empty { .. } => return None, // empty iterators yield no values
NonEmpty { ref mut start, ref mut end } => {
let zero = A::zero();
let rev = self.step_by < zero;
let rev = self.step_by.is_negative();
// march start towards (maybe past!) end and yield the old value
if (rev && start >= end) ||
@ -342,7 +449,7 @@ impl<A: Step + Zero + Clone> Iterator for StepBy<A, ops::RangeInclusive<A>> {
}
} else {
// found range in inconsistent state (start at or past end), so become empty
(Some(mem::replace(end, zero)), None)
(Some(end.replace_zero()), None)
}
}
};
@ -386,7 +493,7 @@ macro_rules! range_exact_iter_impl {
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<A: Step + One> Iterator for ops::Range<A> where
impl<A: Step> Iterator for ops::Range<A> where
for<'a> &'a A: Add<&'a A, Output = A>
{
type Item = A;
@ -394,7 +501,7 @@ impl<A: Step + One> Iterator for ops::Range<A> where
#[inline]
fn next(&mut self) -> Option<A> {
if self.start < self.end {
let mut n = &self.start + &A::one();
let mut n = self.start.add_one();
mem::swap(&mut n, &mut self.start);
Some(n)
} else {
@ -404,7 +511,7 @@ impl<A: Step + One> Iterator for ops::Range<A> where
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
match Step::steps_between(&self.start, &self.end, &A::one()) {
match Step::steps_between_by_one(&self.start, &self.end) {
Some(hint) => (hint, Some(hint)),
None => (0, None)
}
@ -416,14 +523,14 @@ impl<A: Step + One> Iterator for ops::Range<A> where
range_exact_iter_impl!(usize u8 u16 u32 isize i8 i16 i32);
#[stable(feature = "rust1", since = "1.0.0")]
impl<A: Step + One + Clone> DoubleEndedIterator for ops::Range<A> where
impl<A: Step + Clone> DoubleEndedIterator for ops::Range<A> where
for<'a> &'a A: Add<&'a A, Output = A>,
for<'a> &'a A: Sub<&'a A, Output = A>
{
#[inline]
fn next_back(&mut self) -> Option<A> {
if self.start < self.end {
self.end = &self.end - &A::one();
self.end = self.end.sub_one();
Some(self.end.clone())
} else {
None
@ -432,21 +539,21 @@ impl<A: Step + One + Clone> DoubleEndedIterator for ops::Range<A> where
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<A: Step + One> Iterator for ops::RangeFrom<A> where
impl<A: Step> Iterator for ops::RangeFrom<A> where
for<'a> &'a A: Add<&'a A, Output = A>
{
type Item = A;
#[inline]
fn next(&mut self) -> Option<A> {
let mut n = &self.start + &A::one();
let mut n = self.start.add_one();
mem::swap(&mut n, &mut self.start);
Some(n)
}
}
#[unstable(feature = "inclusive_range", reason = "recently added, follows RFC", issue = "28237")]
impl<A: Step + One> Iterator for ops::RangeInclusive<A> where
impl<A: Step> Iterator for ops::RangeInclusive<A> where
for<'a> &'a A: Add<&'a A, Output = A>
{
type Item = A;
@ -463,23 +570,22 @@ impl<A: Step + One> Iterator for ops::RangeInclusive<A> where
NonEmpty { ref mut start, ref mut end } => {
if start == end {
(Some(mem::replace(end, A::one())), Some(mem::replace(start, A::one())))
(Some(end.replace_one()), Some(start.replace_one()))
} else if start < end {
let one = A::one();
let mut n = &*start + &one;
let mut n = start.add_one();
mem::swap(&mut n, start);
// if the iterator is done iterating, it will change from NonEmpty to Empty
// to avoid unnecessary drops or clones, we'll reuse either start or end
// (they are equal now, so it doesn't matter which)
// to pull out end, we need to swap something back in -- use the previously
// created A::one() as a dummy value
// if the iterator is done iterating, it will change from
// NonEmpty to Empty to avoid unnecessary drops or clones,
// we'll reuse either start or end (they are equal now, so
// it doesn't matter which) to pull out end, we need to swap
// something back in
(if n == *end { Some(mem::replace(end, one)) } else { None },
(if n == *end { Some(end.replace_one()) } else { None },
// ^ are we done yet?
Some(n)) // < the value to output
} else {
(Some(mem::replace(start, A::one())), None)
(Some(start.replace_one()), None)
}
}
};
@ -500,7 +606,7 @@ impl<A: Step + One> Iterator for ops::RangeInclusive<A> where
Empty { .. } => (0, Some(0)),
NonEmpty { ref start, ref end } =>
match Step::steps_between(start, end, &A::one()) {
match Step::steps_between_by_one(start, end) {
Some(hint) => (hint.saturating_add(1), hint.checked_add(1)),
None => (0, None),
}
@ -509,7 +615,7 @@ impl<A: Step + One> Iterator for ops::RangeInclusive<A> where
}
#[unstable(feature = "inclusive_range", reason = "recently added, follows RFC", issue = "28237")]
impl<A: Step + One> DoubleEndedIterator for ops::RangeInclusive<A> where
impl<A: Step> DoubleEndedIterator for ops::RangeInclusive<A> where
for<'a> &'a A: Add<&'a A, Output = A>,
for<'a> &'a A: Sub<&'a A, Output = A>
{
@ -524,16 +630,15 @@ impl<A: Step + One> DoubleEndedIterator for ops::RangeInclusive<A> where
NonEmpty { ref mut start, ref mut end } => {
if start == end {
(Some(mem::replace(start, A::one())), Some(mem::replace(end, A::one())))
(Some(start.replace_one()), Some(end.replace_one()))
} else if start < end {
let one = A::one();
let mut n = &*end - &one;
let mut n = end.sub_one();
mem::swap(&mut n, end);
(if n == *start { Some(mem::replace(start, one)) } else { None },
(if n == *start { Some(start.replace_one()) } else { None },
Some(n))
} else {
(Some(mem::replace(end, A::one())), None)
(Some(end.replace_one()), None)
}
}
};

View file

@ -524,3 +524,104 @@ pub trait ExactSizeIterator: Iterator {
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, I: ExactSizeIterator + ?Sized> ExactSizeIterator for &'a mut I {}
/// Trait to represent types that can be created by summing up an iterator.
///
/// This trait is used to implement the `sum` method on iterators. Types which
/// implement the trait can be generated by the `sum` method. Like
/// `FromIterator` this trait should rarely be called directly and instead
/// interacted with through `Iterator::sum`.
#[unstable(feature = "iter_arith_traits", issue = "34529")]
pub trait Sum<A = Self>: Sized {
/// Method which takes an iterator and generates `Self` from the elements by
/// "summing up" the items.
fn sum<I: Iterator<Item=A>>(iter: I) -> Self;
}
/// Trait to represent types that can be created by multiplying elements of an
/// iterator.
///
/// This trait is used to implement the `product` method on iterators. Types
/// which implement the trait can be generated by the `product` method. Like
/// `FromIterator` this trait should rarely be called directly and instead
/// interacted with through `Iterator::product`.
#[unstable(feature = "iter_arith_traits", issue = "34529")]
pub trait Product<A = Self>: Sized {
/// Method which takes an iterator and generates `Self` from the elements by
/// multiplying the items.
fn product<I: Iterator<Item=A>>(iter: I) -> Self;
}
macro_rules! integer_sum_product {
($($a:ident)*) => ($(
#[unstable(feature = "iter_arith_traits", issue = "34529")]
impl Sum for $a {
fn sum<I: Iterator<Item=$a>>(iter: I) -> $a {
iter.fold(0, |a, b| {
a.checked_add(b).expect("overflow in sum")
})
}
}
#[unstable(feature = "iter_arith_traits", issue = "34529")]
impl Product for $a {
fn product<I: Iterator<Item=$a>>(iter: I) -> $a {
iter.fold(1, |a, b| {
a.checked_mul(b).expect("overflow in product")
})
}
}
#[unstable(feature = "iter_arith_traits", issue = "34529")]
impl<'a> Sum<&'a $a> for $a {
fn sum<I: Iterator<Item=&'a $a>>(iter: I) -> $a {
iter.fold(0, |a, b| {
a.checked_add(*b).expect("overflow in sum")
})
}
}
#[unstable(feature = "iter_arith_traits", issue = "34529")]
impl<'a> Product<&'a $a> for $a {
fn product<I: Iterator<Item=&'a $a>>(iter: I) -> $a {
iter.fold(1, |a, b| {
a.checked_mul(*b).expect("overflow in product")
})
}
}
)*)
}
macro_rules! float_sum_product {
($($a:ident)*) => ($(
#[unstable(feature = "iter_arith_traits", issue = "34529")]
impl Sum for $a {
fn sum<I: Iterator<Item=$a>>(iter: I) -> $a {
iter.fold(0.0, |a, b| a + b)
}
}
#[unstable(feature = "iter_arith_traits", issue = "34529")]
impl Product for $a {
fn product<I: Iterator<Item=$a>>(iter: I) -> $a {
iter.fold(1.0, |a, b| a * b)
}
}
#[unstable(feature = "iter_arith_traits", issue = "34529")]
impl<'a> Sum<&'a $a> for $a {
fn sum<I: Iterator<Item=&'a $a>>(iter: I) -> $a {
iter.fold(0.0, |a, b| a + *b)
}
}
#[unstable(feature = "iter_arith_traits", issue = "34529")]
impl<'a> Product<&'a $a> for $a {
fn product<I: Iterator<Item=&'a $a>>(iter: I) -> $a {
iter.fold(1.0, |a, b| a * *b)
}
}
)*)
}
integer_sum_product! { i8 i16 i32 i64 isize u8 u16 u32 u64 usize }
float_sum_product! { f32 f64 }

View file

@ -59,6 +59,8 @@
#![deny(missing_debug_implementations)]
#![cfg_attr(not(stage0), deny(warnings))]
#![cfg_attr(stage0, allow(unused_attributes))]
#![feature(allow_internal_unstable)]
#![feature(asm)]
#![feature(associated_type_defaults)]

View file

@ -321,7 +321,7 @@ pub fn algorithm_m<T: RawFloat>(f: &Big, e: i16) -> T {
return underflow(x, v, rem);
}
if k > T::max_exp_int() {
return T::infinity();
return T::infinity2();
}
if x < min_sig {
u.mul_pow2(1);

View file

@ -215,11 +215,11 @@ fn dec2flt<T: RawFloat>(s: &str) -> Result<T, ParseFloatError> {
let (sign, s) = extract_sign(s);
let flt = match parse_decimal(s) {
ParseResult::Valid(decimal) => convert(decimal)?,
ParseResult::ShortcutToInf => T::infinity(),
ParseResult::ShortcutToZero => T::zero(),
ParseResult::ShortcutToInf => T::infinity2(),
ParseResult::ShortcutToZero => T::zero2(),
ParseResult::Invalid => match s {
"inf" => T::infinity(),
"NaN" => T::nan(),
"inf" => T::infinity2(),
"NaN" => T::nan2(),
_ => { return Err(pfe_invalid()); }
}
};
@ -316,7 +316,7 @@ fn bound_intermediate_digits(decimal: &Decimal, e: i64) -> u64 {
fn trivial_cases<T: RawFloat>(decimal: &Decimal) -> Option<T> {
// There were zeros but they were stripped by simplify()
if decimal.integral.is_empty() && decimal.fractional.is_empty() {
return Some(T::zero());
return Some(T::zero2());
}
// This is a crude approximation of ceil(log10(the real value)). We don't need to worry too
// much about overflow here because the input length is tiny (at least compared to 2^64) and
@ -324,9 +324,9 @@ fn trivial_cases<T: RawFloat>(decimal: &Decimal) -> Option<T> {
// (which is still 10^19 short of 2^64).
let max_place = decimal.exp + decimal.integral.len() as i64;
if max_place > T::inf_cutoff() {
return Some(T::infinity());
return Some(T::infinity2());
} else if max_place < T::zero_cutoff() {
return Some(T::zero());
return Some(T::zero2());
}
None
}

View file

@ -61,6 +61,27 @@ impl Unpacked {
pub trait RawFloat : Float + Copy + Debug + LowerExp
+ Mul<Output=Self> + Div<Output=Self> + Neg<Output=Self>
{
// suffix of "2" because Float::infinity is deprecated
#[allow(deprecated)]
fn infinity2() -> Self {
Float::infinity()
}
// suffix of "2" because Float::nan is deprecated
#[allow(deprecated)]
fn nan2() -> Self {
Float::nan()
}
// suffix of "2" because Float::zero is deprecated
fn zero2() -> Self;
// suffix of "2" because Float::integer_decode is deprecated
#[allow(deprecated)]
fn integer_decode2(self) -> (u64, i16, i8) {
Float::integer_decode(self)
}
/// Get the raw binary representation of the float.
fn transmute(self) -> u64;
@ -146,6 +167,10 @@ pub trait RawFloat : Float + Copy + Debug + LowerExp
}
impl RawFloat for f32 {
fn zero2() -> Self {
0.0
}
fn sig_bits() -> u8 {
24
}
@ -169,7 +194,7 @@ impl RawFloat for f32 {
}
fn unpack(self) -> Unpacked {
let (sig, exp, _sig) = self.integer_decode();
let (sig, exp, _sig) = self.integer_decode2();
Unpacked::new(sig, exp)
}
@ -198,6 +223,10 @@ impl RawFloat for f32 {
impl RawFloat for f64 {
fn zero2() -> Self {
0.0
}
fn sig_bits() -> u8 {
53
}
@ -220,7 +249,7 @@ impl RawFloat for f64 {
}
fn unpack(self) -> Unpacked {
let (sig, exp, _sig) = self.integer_decode();
let (sig, exp, _sig) = self.integer_decode2();
Unpacked::new(sig, exp)
}
@ -351,7 +380,7 @@ pub fn prev_float<T: RawFloat>(x: T) -> T {
pub fn next_float<T: RawFloat>(x: T) -> T {
match x.classify() {
Nan => panic!("next_float: argument is NaN"),
Infinite => T::infinity(),
Infinite => T::infinity2(),
// This seems too good to be true, but it works.
// 0.0 is encoded as the all-zero word. Subnormals are 0x000m...m where m is the mantissa.
// In particular, the smallest subnormal is 0x0...01 and the largest is 0x000F...F.

View file

@ -168,7 +168,7 @@ impl Float for f32 {
/// Returns `true` if the number is infinite.
#[inline]
fn is_infinite(self) -> bool {
self == Float::infinity() || self == Float::neg_infinity()
self == INFINITY || self == NEG_INFINITY
}
/// Returns `true` if the number is neither infinite or NaN.
@ -230,7 +230,7 @@ impl Float for f32 {
#[inline]
fn signum(self) -> f32 {
if self.is_nan() {
Float::nan()
NAN
} else {
unsafe { intrinsics::copysignf32(1.0, self) }
}
@ -240,14 +240,14 @@ impl Float for f32 {
/// `Float::infinity()`.
#[inline]
fn is_sign_positive(self) -> bool {
self > 0.0 || (1.0 / self) == Float::infinity()
self > 0.0 || (1.0 / self) == INFINITY
}
/// Returns `true` if `self` is negative, including `-0.0` and
/// `Float::neg_infinity()`.
#[inline]
fn is_sign_negative(self) -> bool {
self < 0.0 || (1.0 / self) == Float::neg_infinity()
self < 0.0 || (1.0 / self) == NEG_INFINITY
}
/// Returns the reciprocal (multiplicative inverse) of the number.

View file

@ -168,7 +168,7 @@ impl Float for f64 {
/// Returns `true` if the number is infinite.
#[inline]
fn is_infinite(self) -> bool {
self == Float::infinity() || self == Float::neg_infinity()
self == INFINITY || self == NEG_INFINITY
}
/// Returns `true` if the number is neither infinite or NaN.
@ -230,7 +230,7 @@ impl Float for f64 {
#[inline]
fn signum(self) -> f64 {
if self.is_nan() {
Float::nan()
NAN
} else {
unsafe { intrinsics::copysignf64(1.0, self) }
}
@ -240,14 +240,14 @@ impl Float for f64 {
/// `Float::infinity()`.
#[inline]
fn is_sign_positive(self) -> bool {
self > 0.0 || (1.0 / self) == Float::infinity()
self > 0.0 || (1.0 / self) == INFINITY
}
/// Returns `true` if `self` is negative, including `-0.0` and
/// `Float::neg_infinity()`.
#[inline]
fn is_sign_negative(self) -> bool {
self < 0.0 || (1.0 / self) == Float::neg_infinity()
self < 0.0 || (1.0 / self) == NEG_INFINITY
}
/// Returns the reciprocal (multiplicative inverse) of the number.

View file

@ -13,7 +13,8 @@
use prelude::v1::*;
use {f32, f64};
use num::{Float, FpCategory};
use num::FpCategory;
use num::dec2flt::rawfp::RawFloat;
/// Decoded unsigned finite value, such that:
///
@ -52,7 +53,7 @@ pub enum FullDecoded {
}
/// A floating point type which can be `decode`d.
pub trait DecodableFloat: Float + Copy {
pub trait DecodableFloat: RawFloat + Copy {
/// The minimum positive normalized value.
fn min_pos_norm_value() -> Self;
}
@ -68,7 +69,7 @@ impl DecodableFloat for f64 {
/// Returns a sign (true when negative) and `FullDecoded` value
/// from given floating point number.
pub fn decode<T: DecodableFloat>(v: T) -> (/*negative?*/ bool, FullDecoded) {
let (mant, exp, sign) = v.integer_decode();
let (mant, exp, sign) = v.integer_decode2();
let even = (mant & 1) == 0;
let decoded = match v.classify() {
FpCategory::Nan => FullDecoded::Nan,
@ -82,7 +83,7 @@ pub fn decode<T: DecodableFloat>(v: T) -> (/*negative?*/ bool, FullDecoded) {
exp: exp, inclusive: even })
}
FpCategory::Normal => {
let minnorm = <T as DecodableFloat>::min_pos_norm_value().integer_decode();
let minnorm = <T as DecodableFloat>::min_pos_norm_value().integer_decode2();
if mant == minnorm.0 {
// neighbors: (maxmant, exp - 1) -- (minnormmant, exp) -- (minnormmant + 1, exp)
// where maxmant = minnormmant * 2 - 1

View file

@ -109,6 +109,8 @@ pub mod diy_float;
#[unstable(feature = "zero_one",
reason = "unsure of placement, wants to use associated constants",
issue = "27739")]
#[rustc_deprecated(since = "1.11.0", reason = "no longer used for \
Iterator::sum")]
pub trait Zero: Sized {
/// The "zero" (usually, additive identity) for this type.
fn zero() -> Self;
@ -121,6 +123,8 @@ pub trait Zero: Sized {
#[unstable(feature = "zero_one",
reason = "unsure of placement, wants to use associated constants",
issue = "27739")]
#[rustc_deprecated(since = "1.11.0", reason = "no longer used for \
Iterator::product")]
pub trait One: Sized {
/// The "one" (usually, multiplicative identity) for this type.
fn one() -> Self;
@ -131,6 +135,7 @@ macro_rules! zero_one_impl {
#[unstable(feature = "zero_one",
reason = "unsure of placement, wants to use associated constants",
issue = "27739")]
#[allow(deprecated)]
impl Zero for $t {
#[inline]
fn zero() -> Self { 0 }
@ -138,6 +143,7 @@ macro_rules! zero_one_impl {
#[unstable(feature = "zero_one",
reason = "unsure of placement, wants to use associated constants",
issue = "27739")]
#[allow(deprecated)]
impl One for $t {
#[inline]
fn one() -> Self { 1 }
@ -151,6 +157,7 @@ macro_rules! zero_one_impl_float {
#[unstable(feature = "zero_one",
reason = "unsure of placement, wants to use associated constants",
issue = "27739")]
#[allow(deprecated)]
impl Zero for $t {
#[inline]
fn zero() -> Self { 0.0 }
@ -158,6 +165,7 @@ macro_rules! zero_one_impl_float {
#[unstable(feature = "zero_one",
reason = "unsure of placement, wants to use associated constants",
issue = "27739")]
#[allow(deprecated)]
impl One for $t {
#[inline]
fn one() -> Self { 1.0 }
@ -604,7 +612,7 @@ macro_rules! int_impl {
pub fn saturating_add(self, other: Self) -> Self {
match self.checked_add(other) {
Some(x) => x,
None if other >= Self::zero() => Self::max_value(),
None if other >= 0 => Self::max_value(),
None => Self::min_value(),
}
}
@ -625,7 +633,7 @@ macro_rules! int_impl {
pub fn saturating_sub(self, other: Self) -> Self {
match self.checked_sub(other) {
Some(x) => x,
None if other >= Self::zero() => Self::min_value(),
None if other >= 0 => Self::min_value(),
None => Self::max_value(),
}
}
@ -1064,7 +1072,7 @@ macro_rules! int_impl {
#[rustc_inherit_overflow_checks]
pub fn pow(self, mut exp: u32) -> Self {
let mut base = self;
let mut acc = Self::one();
let mut acc = 1;
while exp > 1 {
if (exp & 1) == 1 {
@ -2092,7 +2100,7 @@ macro_rules! uint_impl {
#[rustc_inherit_overflow_checks]
pub fn pow(self, mut exp: u32) -> Self {
let mut base = self;
let mut acc = Self::one();
let mut acc = 1;
let mut prev_base = self;
let mut base_oflo = false;
@ -2129,8 +2137,7 @@ macro_rules! uint_impl {
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn is_power_of_two(self) -> bool {
(self.wrapping_sub(Self::one())) & self == Self::zero() &&
!(self == Self::zero())
(self.wrapping_sub(1)) & self == 0 && !(self == 0)
}
/// Returns the smallest power of two greater than or equal to `self`.
@ -2148,7 +2155,7 @@ macro_rules! uint_impl {
#[inline]
pub fn next_power_of_two(self) -> Self {
let bits = size_of::<Self>() * 8;
let one: Self = Self::one();
let one: Self = 1;
one << ((bits - self.wrapping_sub(one).leading_zeros() as usize) % bits)
}
@ -2303,26 +2310,44 @@ pub trait Float: Sized {
/// Returns the NaN value.
#[unstable(feature = "float_extras", reason = "needs removal",
issue = "27752")]
#[rustc_deprecated(since = "1.11.0",
reason = "never really came to fruition and easily \
implementable outside the standard library")]
fn nan() -> Self;
/// Returns the infinite value.
#[unstable(feature = "float_extras", reason = "needs removal",
issue = "27752")]
#[rustc_deprecated(since = "1.11.0",
reason = "never really came to fruition and easily \
implementable outside the standard library")]
fn infinity() -> Self;
/// Returns the negative infinite value.
#[unstable(feature = "float_extras", reason = "needs removal",
issue = "27752")]
#[rustc_deprecated(since = "1.11.0",
reason = "never really came to fruition and easily \
implementable outside the standard library")]
fn neg_infinity() -> Self;
/// Returns -0.0.
#[unstable(feature = "float_extras", reason = "needs removal",
issue = "27752")]
#[rustc_deprecated(since = "1.11.0",
reason = "never really came to fruition and easily \
implementable outside the standard library")]
fn neg_zero() -> Self;
/// Returns 0.0.
#[unstable(feature = "float_extras", reason = "needs removal",
issue = "27752")]
#[rustc_deprecated(since = "1.11.0",
reason = "never really came to fruition and easily \
implementable outside the standard library")]
fn zero() -> Self;
/// Returns 1.0.
#[unstable(feature = "float_extras", reason = "needs removal",
issue = "27752")]
#[rustc_deprecated(since = "1.11.0",
reason = "never really came to fruition and easily \
implementable outside the standard library")]
fn one() -> Self;
/// Returns true if this value is NaN and false otherwise.
@ -2345,6 +2370,9 @@ pub trait Float: Sized {
/// Returns the mantissa, exponent and sign as integers, respectively.
#[unstable(feature = "float_extras", reason = "signature is undecided",
issue = "27752")]
#[rustc_deprecated(since = "1.11.0",
reason = "never really came to fruition and easily \
implementable outside the standard library")]
fn integer_decode(self) -> (u64, i16, i8);
/// Computes the absolute value of `self`. Returns `Float::nan()` if the
@ -2379,12 +2407,10 @@ pub trait Float: Sized {
fn powi(self, n: i32) -> Self;
/// Convert radians to degrees.
#[unstable(feature = "float_extras", reason = "desirability is unclear",
issue = "27752")]
#[stable(feature = "deg_rad_conversions", since="1.7.0")]
fn to_degrees(self) -> Self;
/// Convert degrees to radians.
#[unstable(feature = "float_extras", reason = "desirability is unclear",
issue = "27752")]
#[stable(feature = "deg_rad_conversions", since="1.7.0")]
fn to_radians(self) -> Self;
}

View file

@ -69,9 +69,7 @@
use cmp::PartialOrd;
use fmt;
use convert::From;
use marker::{Sized, Unsize};
use num::One;
/// The `Drop` trait is used to run some code when a value goes out of scope.
/// This is sometimes called a 'destructor'.
@ -1494,7 +1492,6 @@ impl fmt::Debug for RangeFull {
/// # Examples
///
/// ```
/// #![feature(iter_arith)]
/// fn main() {
/// assert_eq!((3..5), std::ops::Range{ start: 3, end: 5 });
/// assert_eq!(3+4+5, (3..6).sum());
@ -1558,7 +1555,6 @@ impl<Idx: PartialOrd<Idx>> Range<Idx> {
/// # Examples
///
/// ```
/// #![feature(iter_arith)]
/// fn main() {
/// assert_eq!((2..), std::ops::RangeFrom{ start: 2 });
/// assert_eq!(2+3+4, (2..).take(3).sum());
@ -1660,7 +1656,7 @@ impl<Idx: PartialOrd<Idx>> RangeTo<Idx> {
/// # Examples
///
/// ```
/// #![feature(inclusive_range,inclusive_range_syntax,iter_arith)]
/// #![feature(inclusive_range,inclusive_range_syntax)]
/// fn main() {
/// assert_eq!((3...5), std::ops::RangeInclusive::NonEmpty{ start: 3, end: 5 });
/// assert_eq!(3+4+5, (3...5).sum());
@ -1714,24 +1710,6 @@ impl<Idx: fmt::Debug> fmt::Debug for RangeInclusive<Idx> {
}
}
#[unstable(feature = "inclusive_range", reason = "recently added, follows RFC", issue = "28237")]
impl<Idx: PartialOrd + One + Sub<Output=Idx>> From<Range<Idx>> for RangeInclusive<Idx> {
fn from(range: Range<Idx>) -> RangeInclusive<Idx> {
use self::RangeInclusive::*;
if range.start < range.end {
NonEmpty {
start: range.start,
end: range.end - Idx::one() // can't underflow because end > start >= MIN
}
} else {
Empty {
at: range.start
}
}
}
}
#[unstable(feature = "range_contains", reason = "recently added as per RFC", issue = "32311")]
impl<Idx: PartialOrd<Idx>> RangeInclusive<Idx> {
/// # Examples

View file

@ -175,8 +175,11 @@
//! }
//!
//! fn write_info(info: &Info) -> io::Result<()> {
//! let mut file = try!(File::create("my_best_friends.txt"));
//! // Early return on error
//! let mut file = match File::create("my_best_friends.txt") {
//! Err(e) => return Err(e),
//! Ok(f) => f,
//! };
//! if let Err(e) = file.write_all(format!("name: {}\n", info.name).as_bytes()) {
//! return Err(e)
//! }

View file

@ -19,9 +19,7 @@
#![feature(core_private_diy_float)]
#![feature(dec2flt)]
#![feature(fixed_size_array)]
#![feature(float_extras)]
#![feature(flt2dec)]
#![feature(iter_arith)]
#![feature(libc)]
#![feature(nonzero)]
#![feature(rand)]

View file

@ -9,9 +9,24 @@
// except according to those terms.
use std::f64;
use std::mem;
use core::num::diy_float::Fp;
use core::num::dec2flt::rawfp::{fp_to_float, prev_float, next_float, round_normal};
fn integer_decode(f: f64) -> (u64, i16, i8) {
let bits: u64 = unsafe { mem::transmute(f) };
let sign: i8 = if bits >> 63 == 0 { 1 } else { -1 };
let mut exponent: i16 = ((bits >> 52) & 0x7ff) as i16;
let mantissa = if exponent == 0 {
(bits & 0xfffffffffffff) << 1
} else {
(bits & 0xfffffffffffff) | 0x10000000000000
};
// Exponent bias + mantissa shift
exponent -= 1023 + 52;
(mantissa, exponent, sign)
}
#[test]
fn fp_to_float_half_to_even() {
fn is_normalized(sig: u64) -> bool {
@ -21,12 +36,12 @@ fn fp_to_float_half_to_even() {
fn conv(sig: u64) -> u64 {
// The significands are perfectly in range, so the exponent should not matter
let (m1, e1, _) = fp_to_float::<f64>(Fp { f: sig, e: 0 }).integer_decode();
let (m1, e1, _) = integer_decode(fp_to_float::<f64>(Fp { f: sig, e: 0 }));
assert_eq!(e1, 0 + 64 - 53);
let (m2, e2, _) = fp_to_float::<f64>(Fp { f: sig, e: 55 }).integer_decode();
let (m2, e2, _) = integer_decode(fp_to_float::<f64>(Fp { f: sig, e: 55 }));
assert_eq!(e2, 55 + 64 - 53);
assert_eq!(m2, m1);
let (m3, e3, _) = fp_to_float::<f64>(Fp { f: sig, e: -78 }).integer_decode();
let (m3, e3, _) = integer_decode(fp_to_float::<f64>(Fp { f: sig, e: -78 }));
assert_eq!(e3, -78 + 64 - 53);
assert_eq!(m3, m2);
m3
@ -65,7 +80,7 @@ const SOME_FLOATS: [f64; 9] =
#[test]
fn human_f64_roundtrip() {
for &x in &SOME_FLOATS {
let (f, e, _) = x.integer_decode();
let (f, e, _) = integer_decode(x);
let fp = Fp { f: f, e: e};
assert_eq!(fp_to_float::<f64>(fp), x);
}

View file

@ -8,7 +8,6 @@
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::f64;
use core::num::flt2dec::estimator::*;
#[test]
@ -54,7 +53,7 @@ fn test_estimate_scaling_factor() {
assert_almost_eq!(estimate_scaling_factor(0x1fffffffffffff, 971), 309);
for i in -1074..972 {
let expected = f64::ldexp(1.0, i).log10().ceil();
let expected = super::ldexp_f64(1.0, i).log10().ceil();
assert_almost_eq!(estimate_scaling_factor(1, i as i16), expected as i16);
}
}

View file

@ -89,6 +89,17 @@ macro_rules! try_fixed {
})
}
fn ldexp_f32(a: f32, b: i32) -> f32 {
ldexp_f64(a as f64, b) as f32
}
fn ldexp_f64(a: f64, b: i32) -> f64 {
extern {
fn ldexp(x: f64, n: i32) -> f64;
}
unsafe { ldexp(a, b) }
}
fn check_exact<F, T>(mut f: F, v: T, vstr: &str, expected: &[u8], expectedk: i16)
where T: DecodableFloat, F: FnMut(&Decoded, &mut [u8], i16) -> (usize, i16) {
// use a large enough buffer
@ -237,7 +248,7 @@ pub fn f32_shortest_sanity_test<F>(mut f: F) where F: FnMut(&Decoded, &mut [u8])
// 10^8 * 0.3355443
// 10^8 * 0.33554432
// 10^8 * 0.33554436
check_shortest!(f(f32::ldexp(1.0, 25)) => b"33554432", 8);
check_shortest!(f(ldexp_f32(1.0, 25)) => b"33554432", 8);
// 10^39 * 0.340282326356119256160033759537265639424
// 10^39 * 0.34028234663852885981170418348451692544
@ -252,13 +263,13 @@ pub fn f32_shortest_sanity_test<F>(mut f: F) where F: FnMut(&Decoded, &mut [u8])
// 10^-44 * 0
// 10^-44 * 0.1401298464324817070923729583289916131280...
// 10^-44 * 0.2802596928649634141847459166579832262560...
let minf32 = f32::ldexp(1.0, -149);
let minf32 = ldexp_f32(1.0, -149);
check_shortest!(f(minf32) => b"1", -44);
}
pub fn f32_exact_sanity_test<F>(mut f: F)
where F: FnMut(&Decoded, &mut [u8], i16) -> (usize, i16) {
let minf32 = f32::ldexp(1.0, -149);
let minf32 = ldexp_f32(1.0, -149);
check_exact!(f(0.1f32) => b"100000001490116119384765625 ", 0);
check_exact!(f(0.5f32) => b"5 ", 0);
@ -336,7 +347,7 @@ pub fn f64_shortest_sanity_test<F>(mut f: F) where F: FnMut(&Decoded, &mut [u8])
// 10^20 * 0.18446744073709549568
// 10^20 * 0.18446744073709551616
// 10^20 * 0.18446744073709555712
check_shortest!(f(f64::ldexp(1.0, 64)) => b"18446744073709552", 20);
check_shortest!(f(ldexp_f64(1.0, 64)) => b"18446744073709552", 20);
// pathological case: high = 10^23 (exact). tie breaking should always prefer that.
// 10^24 * 0.099999999999999974834176
@ -357,13 +368,13 @@ pub fn f64_shortest_sanity_test<F>(mut f: F) where F: FnMut(&Decoded, &mut [u8])
// 10^-323 * 0
// 10^-323 * 0.4940656458412465441765687928682213723650...
// 10^-323 * 0.9881312916824930883531375857364427447301...
let minf64 = f64::ldexp(1.0, -1074);
let minf64 = ldexp_f64(1.0, -1074);
check_shortest!(f(minf64) => b"5", -323);
}
pub fn f64_exact_sanity_test<F>(mut f: F)
where F: FnMut(&Decoded, &mut [u8], i16) -> (usize, i16) {
let minf64 = f64::ldexp(1.0, -1074);
let minf64 = ldexp_f64(1.0, -1074);
check_exact!(f(0.1f64) => b"1000000000000000055511151231257827021181", 0);
check_exact!(f(0.45f64) => b"4500000000000000111022302462515654042363", 0);
@ -616,7 +627,7 @@ pub fn to_shortest_str_test<F>(mut f_: F)
assert_eq!(to_string(f, f32::MAX, Minus, 1, false), format!("34028235{:0>31}.0", ""));
assert_eq!(to_string(f, f32::MAX, Minus, 8, false), format!("34028235{:0>31}.00000000", ""));
let minf32 = f32::ldexp(1.0, -149);
let minf32 = ldexp_f32(1.0, -149);
assert_eq!(to_string(f, minf32, Minus, 0, false), format!("0.{:0>44}1", ""));
assert_eq!(to_string(f, minf32, Minus, 45, false), format!("0.{:0>44}1", ""));
assert_eq!(to_string(f, minf32, Minus, 46, false), format!("0.{:0>44}10", ""));
@ -628,7 +639,7 @@ pub fn to_shortest_str_test<F>(mut f_: F)
assert_eq!(to_string(f, f64::MAX, Minus, 8, false),
format!("17976931348623157{:0>292}.00000000", ""));
let minf64 = f64::ldexp(1.0, -1074);
let minf64 = ldexp_f64(1.0, -1074);
assert_eq!(to_string(f, minf64, Minus, 0, false), format!("0.{:0>323}5", ""));
assert_eq!(to_string(f, minf64, Minus, 324, false), format!("0.{:0>323}5", ""));
assert_eq!(to_string(f, minf64, Minus, 325, false), format!("0.{:0>323}50", ""));
@ -730,7 +741,7 @@ pub fn to_shortest_exp_str_test<F>(mut f_: F)
assert_eq!(to_string(f, f32::MAX, Minus, (-39, 38), false), "3.4028235e38");
assert_eq!(to_string(f, f32::MAX, Minus, (-38, 39), false), format!("34028235{:0>31}", ""));
let minf32 = f32::ldexp(1.0, -149);
let minf32 = ldexp_f32(1.0, -149);
assert_eq!(to_string(f, minf32, Minus, ( -4, 16), false), "1e-45");
assert_eq!(to_string(f, minf32, Minus, (-44, 45), false), "1e-45");
assert_eq!(to_string(f, minf32, Minus, (-45, 44), false), format!("0.{:0>44}1", ""));
@ -742,7 +753,7 @@ pub fn to_shortest_exp_str_test<F>(mut f_: F)
assert_eq!(to_string(f, f64::MAX, Minus, (-309, 308), false),
"1.7976931348623157e308");
let minf64 = f64::ldexp(1.0, -1074);
let minf64 = ldexp_f64(1.0, -1074);
assert_eq!(to_string(f, minf64, Minus, ( -4, 16), false), "5e-324");
assert_eq!(to_string(f, minf64, Minus, (-324, 323), false), format!("0.{:0>323}5", ""));
assert_eq!(to_string(f, minf64, Minus, (-323, 324), false), "5e-324");
@ -874,7 +885,7 @@ pub fn to_exact_exp_str_test<F>(mut f_: F)
assert_eq!(to_string(f, f32::MAX, Minus, 64, false),
"3.402823466385288598117041834845169254400000000000000000000000000e38");
let minf32 = f32::ldexp(1.0, -149);
let minf32 = ldexp_f32(1.0, -149);
assert_eq!(to_string(f, minf32, Minus, 1, false), "1e-45");
assert_eq!(to_string(f, minf32, Minus, 2, false), "1.4e-45");
assert_eq!(to_string(f, minf32, Minus, 4, false), "1.401e-45");
@ -914,7 +925,7 @@ pub fn to_exact_exp_str_test<F>(mut f_: F)
0000000000000000000000000000000000000000000000000000000000000000e308");
// okay, this is becoming tough. fortunately for us, this is almost the worst case.
let minf64 = f64::ldexp(1.0, -1074);
let minf64 = ldexp_f64(1.0, -1074);
assert_eq!(to_string(f, minf64, Minus, 1, false), "5e-324");
assert_eq!(to_string(f, minf64, Minus, 2, false), "4.9e-324");
assert_eq!(to_string(f, minf64, Minus, 4, false), "4.941e-324");
@ -1120,7 +1131,7 @@ pub fn to_exact_fixed_str_test<F>(mut f_: F)
assert_eq!(to_string(f, f32::MAX, Minus, 2, false),
"340282346638528859811704183484516925440.00");
let minf32 = f32::ldexp(1.0, -149);
let minf32 = ldexp_f32(1.0, -149);
assert_eq!(to_string(f, minf32, Minus, 0, false), "0");
assert_eq!(to_string(f, minf32, Minus, 1, false), "0.0");
assert_eq!(to_string(f, minf32, Minus, 2, false), "0.00");
@ -1152,7 +1163,7 @@ pub fn to_exact_fixed_str_test<F>(mut f_: F)
9440758685084551339423045832369032229481658085593321233482747978\
26204144723168738177180919299881250404026184124858368.0000000000");
let minf64 = f64::ldexp(1.0, -1074);
let minf64 = ldexp_f64(1.0, -1074);
assert_eq!(to_string(f, minf64, Minus, 0, false), "0");
assert_eq!(to_string(f, minf64, Minus, 1, false), "0.0");
assert_eq!(to_string(f, minf64, Minus, 10, false), "0.0000000000");

View file

@ -144,9 +144,8 @@ impl Rand for char {
// Rejection sampling. About 0.2% of numbers with at most
// 21-bits are invalid codepoints (surrogates), so this
// will succeed first go almost every time.
match char::from_u32(rng.next_u32() & CHAR_MASK) {
Some(c) => return c,
None => {}
if let Some(c) = char::from_u32(rng.next_u32() & CHAR_MASK) {
return c;
}
}
}

View file

@ -118,8 +118,6 @@ impl DepGraphThreadData {
/// the buffer is full, this may swap.)
#[inline]
pub fn enqueue(&self, message: DepMessage) {
debug!("enqueue: {:?} tasks_pushed={}", message, self.tasks_pushed.get());
// Regardless of whether dep graph construction is enabled, we
// still want to check that we always have a valid task on the
// stack when a read/write/etc event occurs.

View file

@ -1697,13 +1697,10 @@ impl<'a> State<'a> {
self.commasep(Inconsistent, &data.inputs, |s, ty| s.print_type(&ty))?;
word(&mut self.s, ")")?;
match data.output {
None => {}
Some(ref ty) => {
self.space_if_not_bol()?;
self.word_space("->")?;
self.print_type(&ty)?;
}
if let Some(ref ty) = data.output {
self.space_if_not_bol()?;
self.word_space("->")?;
self.print_type(&ty)?;
}
}
}

View file

@ -842,11 +842,8 @@ impl<'a, 'gcx, 'tcx> RegionVarBindings<'a, 'gcx, 'tcx> {
where F: FnMut(&RegionVarBindings<'a, 'gcx, 'tcx>, Region, Region)
{
let vars = TwoRegions { a: a, b: b };
match self.combine_map(t).borrow().get(&vars) {
Some(&c) => {
return ReVar(c);
}
None => {}
if let Some(&c) = self.combine_map(t).borrow().get(&vars) {
return ReVar(c);
}
let c = self.new_region_var(MiscVariable(origin.span()));
self.combine_map(t).borrow_mut().insert(vars, c);

View file

@ -30,7 +30,6 @@
#![feature(const_fn)]
#![feature(core_intrinsics)]
#![feature(enumset)]
#![feature(iter_arith)]
#![feature(libc)]
#![feature(nonzero)]
#![feature(quote)]

View file

@ -1055,13 +1055,10 @@ impl<'a> ast_visit::Visitor for EarlyContext<'a> {
// Output any lints that were previously added to the session.
impl<'a, 'tcx> IdVisitingOperation for LateContext<'a, 'tcx> {
fn visit_id(&mut self, id: ast::NodeId) {
match self.sess().lints.borrow_mut().remove(&id) {
None => {}
Some(lints) => {
debug!("LateContext::visit_id: id={:?} lints={:?}", id, lints);
for (lint_id, span, msg) in lints {
self.span_lint(lint_id.lint, span, &msg[..])
}
if let Some(lints) = self.sess().lints.borrow_mut().remove(&id) {
debug!("LateContext::visit_id: id={:?} lints={:?}", id, lints);
for (lint_id, span, msg) in lints {
self.span_lint(lint_id.lint, span, &msg[..])
}
}
}

View file

@ -168,9 +168,8 @@ fn build_nodeid_to_index(decl: Option<&hir::FnDecl>,
// into cfg itself? i.e. introduce a fn-based flow-graph in
// addition to the current block-based flow-graph, rather than
// have to put traversals like this here?
match decl {
None => {}
Some(decl) => add_entries_from_fn_decl(&mut index, decl, cfg.entry)
if let Some(decl) = decl {
add_entries_from_fn_decl(&mut index, decl, cfg.entry);
}
cfg.graph.each_node(|node_idx, node| {

View file

@ -105,9 +105,8 @@ fn calculate_type(sess: &session::Session,
// If the global prefer_dynamic switch is turned off, first attempt
// static linkage (this can fail).
config::CrateTypeExecutable if !sess.opts.cg.prefer_dynamic => {
match attempt_static(sess) {
Some(v) => return v,
None => {}
if let Some(v) = attempt_static(sess) {
return v;
}
}
@ -119,9 +118,8 @@ fn calculate_type(sess: &session::Session,
// to be found, we generate some nice pretty errors.
config::CrateTypeStaticlib |
config::CrateTypeCdylib => {
match attempt_static(sess) {
Some(v) => return v,
None => {}
if let Some(v) = attempt_static(sess) {
return v;
}
for cnum in sess.cstore.crates() {
let src = sess.cstore.used_crate_source(cnum);
@ -136,9 +134,8 @@ fn calculate_type(sess: &session::Session,
// to try to eagerly statically link all dependencies. This is normally
// done for end-product dylibs, not intermediate products.
config::CrateTypeDylib if !sess.opts.cg.prefer_dynamic => {
match attempt_static(sess) {
Some(v) => return v,
None => {}
if let Some(v) = attempt_static(sess) {
return v;
}
}

View file

@ -735,26 +735,23 @@ impl<'a, 'gcx, 'tcx> ExprUseVisitor<'a, 'gcx, 'tcx> {
for i in 0..autoderefs {
let deref_id = ty::MethodCall::autoderef(expr.id, i as u32);
match self.mc.infcx.node_method_ty(deref_id) {
None => {}
Some(method_ty) => {
let cmt = return_if_err!(self.mc.cat_expr_autoderefd(expr, i));
if let Some(method_ty) = self.mc.infcx.node_method_ty(deref_id) {
let cmt = return_if_err!(self.mc.cat_expr_autoderefd(expr, i));
// the method call infrastructure should have
// replaced all late-bound regions with variables:
let self_ty = method_ty.fn_sig().input(0);
let self_ty = self.tcx().no_late_bound_regions(&self_ty).unwrap();
// the method call infrastructure should have
// replaced all late-bound regions with variables:
let self_ty = method_ty.fn_sig().input(0);
let self_ty = self.tcx().no_late_bound_regions(&self_ty).unwrap();
let (m, r) = match self_ty.sty {
ty::TyRef(r, ref m) => (m.mutbl, r),
_ => span_bug!(expr.span,
"bad overloaded deref type {:?}",
method_ty)
};
let bk = ty::BorrowKind::from_mutbl(m);
self.delegate.borrow(expr.id, expr.span, cmt,
*r, bk, AutoRef);
}
let (m, r) = match self_ty.sty {
ty::TyRef(r, ref m) => (m.mutbl, r),
_ => span_bug!(expr.span,
"bad overloaded deref type {:?}",
method_ty)
};
let bk = ty::BorrowKind::from_mutbl(m);
self.delegate.borrow(expr.id, expr.span, cmt,
*r, bk, AutoRef);
}
}
}

View file

@ -598,11 +598,8 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> {
fn arm_pats_bindings<F>(&mut self, pat: Option<&hir::Pat>, f: F) where
F: FnMut(&mut Liveness<'a, 'tcx>, LiveNode, Variable, Span, NodeId),
{
match pat {
Some(pat) => {
self.pat_bindings(pat, f);
}
None => {}
if let Some(pat) = pat {
self.pat_bindings(pat, f);
}
}

View file

@ -284,9 +284,8 @@ impl<'a, 'tcx, 'v> Visitor<'v> for LifetimeContext<'a, 'tcx> {
fn visit_generics(&mut self, generics: &hir::Generics) {
for ty_param in generics.ty_params.iter() {
walk_list!(self, visit_ty_param_bound, &ty_param.bounds);
match ty_param.default {
Some(ref ty) => self.visit_ty(&ty),
None => {}
if let Some(ref ty) = ty_param.default {
self.visit_ty(&ty);
}
}
for predicate in &generics.where_clause.predicates {

View file

@ -123,9 +123,8 @@ impl<'a> Context<'a> {
impl<'a, 'v> Visitor<'v> for Context<'a> {
fn visit_foreign_item(&mut self, i: &hir::ForeignItem) {
match lang_items::extract(&i.attrs) {
None => {}
Some(lang_item) => self.register(&lang_item, i.span),
if let Some(lang_item) = lang_items::extract(&i.attrs) {
self.register(&lang_item, i.span);
}
intravisit::walk_foreign_item(self, i)
}

View file

@ -250,15 +250,12 @@ impl Session {
msg: String) {
let lint_id = lint::LintId::of(lint);
let mut lints = self.lints.borrow_mut();
match lints.get_mut(&id) {
Some(arr) => {
let tuple = (lint_id, sp, msg);
if !arr.contains(&tuple) {
arr.push(tuple);
}
return;
if let Some(arr) = lints.get_mut(&id) {
let tuple = (lint_id, sp, msg);
if !arr.contains(&tuple) {
arr.push(tuple);
}
None => {}
return;
}
lints.insert(id, vec!((lint_id, sp, msg)));
}

View file

@ -908,6 +908,9 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
err.note("only the last field of a struct or enum variant \
may have a dynamically sized type");
}
ObligationCauseCode::ConstSized => {
err.note("constant expressions must have a statically known size");
}
ObligationCauseCode::SharedStatic => {
err.note("shared static variables must have a type that implements `Sync`");
}

View file

@ -127,6 +127,9 @@ pub enum ObligationCauseCode<'tcx> {
// Types of fields (other than the last) in a struct must be sized.
FieldSized,
// Constant expressions must be sized.
ConstSized,
// static items must have `Sync` type
SharedStatic,

View file

@ -168,13 +168,12 @@ impl<'a, 'tcx> ty::TyS<'tcx> {
// which is incorrect. This value was computed based on the crutch
// value for the type contents of list. The correct value is
// TC::OwnsOwned. This manifested as issue #4821.
match cache.get(&ty) {
Some(tc) => { return *tc; }
None => {}
if let Some(tc) = cache.get(&ty) {
return *tc;
}
match tcx.tc_cache.borrow().get(&ty) { // Must check both caches!
Some(tc) => { return *tc; }
None => {}
// Must check both caches!
if let Some(tc) = tcx.tc_cache.borrow().get(&ty) {
return *tc;
}
cache.insert(ty, TC::None);

View file

@ -521,9 +521,8 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
fn tcx<'b>(&'b self) -> TyCtxt<'b, 'gcx, 'tcx> { self.0 }
fn fold_ty(&mut self, ty: Ty<'tcx>) -> Ty<'tcx> {
match self.tcx().normalized_cache.borrow().get(&ty).cloned() {
None => {}
Some(u) => return u
if let Some(u) = self.tcx().normalized_cache.borrow().get(&ty).cloned() {
return u;
}
// FIXME(eddyb) should local contexts have a cache too?
@ -714,4 +713,3 @@ impl<'tcx> TypeVisitor<'tcx> for LateBoundRegionsCollector {
false
}
}

View file

@ -712,16 +712,13 @@ impl<'a, 'tcx> ty::TyS<'tcx> {
// struct Foo;
// struct Bar<T> { x: Bar<Foo> }
match iter.next() {
Some(&seen_type) => {
if same_struct_or_enum(seen_type, def) {
debug!("SelfRecursive: {:?} contains {:?}",
seen_type,
ty);
return Representability::SelfRecursive;
}
if let Some(&seen_type) = iter.next() {
if same_struct_or_enum(seen_type, def) {
debug!("SelfRecursive: {:?} contains {:?}",
seen_type,
ty);
return Representability::SelfRecursive;
}
None => {}
}
// We also need to know whether the first item contains other types

View file

@ -274,11 +274,8 @@ impl<'a, 'tcx> MoveData<'tcx> {
/// `lp` and any of its base paths that do not yet have an index.
pub fn move_path(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>,
lp: Rc<LoanPath<'tcx>>) -> MovePathIndex {
match self.path_map.borrow().get(&lp) {
Some(&index) => {
return index;
}
None => {}
if let Some(&index) = self.path_map.borrow().get(&lp) {
return index;
}
let index = match lp.kind {

View file

@ -176,9 +176,8 @@ fn check_expr(cx: &mut MatchCheckCtxt, ex: &hir::Expr) {
// Second, if there is a guard on each arm, make sure it isn't
// assigning or borrowing anything mutably.
match arm.guard {
Some(ref guard) => check_for_mutation_in_guard(cx, &guard),
None => {}
if let Some(ref guard) = arm.guard {
check_for_mutation_in_guard(cx, &guard);
}
}

View file

@ -27,7 +27,6 @@
#![feature(staged_api)]
#![feature(rustc_diagnostic_macros)]
#![feature(slice_patterns)]
#![feature(iter_arith)]
#![feature(question_mark)]
#![feature(box_patterns)]
#![feature(box_syntax)]

View file

@ -15,6 +15,7 @@ use std::marker::PhantomData;
use std::ops::{Index, IndexMut, Range};
use std::fmt;
use std::vec;
use std::u32;
use rustc_serialize as serialize;
@ -31,6 +32,11 @@ impl Idx for usize {
fn index(self) -> usize { self }
}
impl Idx for u32 {
fn new(idx: usize) -> Self { assert!(idx <= u32::MAX as usize); idx as u32 }
fn index(self) -> usize { self as usize }
}
#[derive(Clone)]
pub struct IndexVec<I: Idx, T> {
pub raw: Vec<T>,

View file

@ -208,11 +208,17 @@ impl<O: ForestObligation> ObligationForest<O> {
///
/// This CAN be done in a snapshot
pub fn register_obligation(&mut self, obligation: O) {
self.register_obligation_at(obligation, None)
// Ignore errors here - there is no guarantee of success.
let _ = self.register_obligation_at(obligation, None);
}
fn register_obligation_at(&mut self, obligation: O, parent: Option<NodeIndex>) {
if self.done_cache.contains(obligation.as_predicate()) { return }
// returns Err(()) if we already know this obligation failed.
fn register_obligation_at(&mut self, obligation: O, parent: Option<NodeIndex>)
-> Result<(), ()>
{
if self.done_cache.contains(obligation.as_predicate()) {
return Ok(())
}
match self.waiting_cache.entry(obligation.as_predicate().clone()) {
Entry::Occupied(o) => {
@ -226,6 +232,11 @@ impl<O: ForestObligation> ObligationForest<O> {
self.nodes[o.get().get()].dependents.push(parent);
}
}
if let NodeState::Error = self.nodes[o.get().get()].state.get() {
Err(())
} else {
Ok(())
}
}
Entry::Vacant(v) => {
debug!("register_obligation_at({:?}, {:?}) - ok",
@ -233,8 +244,9 @@ impl<O: ForestObligation> ObligationForest<O> {
v.insert(NodeIndex::new(self.nodes.len()));
self.cache_list.push(obligation.as_predicate().clone());
self.nodes.push(Node::new(parent, obligation));
Ok(())
}
};
}
}
/// Convert all remaining obligations to the given error.
@ -306,12 +318,19 @@ impl<O: ForestObligation> ObligationForest<O> {
Ok(Some(children)) => {
// if we saw a Some(_) result, we are not (yet) stalled
stalled = false;
for child in children {
self.register_obligation_at(child,
Some(NodeIndex::new(index)));
}
self.nodes[index].state.set(NodeState::Success);
for child in children {
let st = self.register_obligation_at(
child,
Some(NodeIndex::new(index))
);
if let Err(()) = st {
// error already reported - propagate it
// to our node.
self.error_at(index);
}
}
}
Err(err) => {
let backtrace = self.error_at(index);

View file

@ -418,3 +418,43 @@ fn orphan() {
let errors = forest.to_errors(());
assert_eq!(errors.len(), 0);
}
#[test]
fn simultaneous_register_and_error() {
// check that registering a failed obligation works correctly
let mut forest = ObligationForest::new();
forest.register_obligation("A");
forest.register_obligation("B");
let Outcome { completed: ok, errors: err, .. } =
forest.process_obligations(&mut C(|obligation| {
match *obligation {
"A" => Err("An error"),
"B" => Ok(Some(vec!["A"])),
_ => unreachable!(),
}
}, |_|{}));
assert_eq!(ok.len(), 0);
assert_eq!(err, vec![super::Error {
error: "An error",
backtrace: vec!["A"]
}]);
let mut forest = ObligationForest::new();
forest.register_obligation("B");
forest.register_obligation("A");
let Outcome { completed: ok, errors: err, .. } =
forest.process_obligations(&mut C(|obligation| {
match *obligation {
"A" => Err("An error"),
"B" => Ok(Some(vec!["A"])),
_ => unreachable!(),
}
}, |_|{}));
assert_eq!(ok.len(), 0);
assert_eq!(err, vec![super::Error {
error: "An error",
backtrace: vec!["A"]
}]);
}

View file

@ -116,34 +116,20 @@ pub fn compile_input(sess: &Session,
let outputs = build_output_filenames(input, outdir, output, &krate.attrs, sess);
let id = link::find_crate_name(Some(sess), &krate.attrs, input);
let ExpansionResult { expanded_crate, defs, analysis, resolutions, mut hir_forest } = {
let make_glob_map = control.make_glob_map;
phase_2_configure_and_expand(sess, &cstore, krate, &id, addl_plugins, make_glob_map)?
phase_2_configure_and_expand(
sess, &cstore, krate, &id, addl_plugins, control.make_glob_map,
|expanded_crate| {
let mut state = CompileState::state_after_expand(
input, sess, outdir, output, &cstore, expanded_crate, &id,
);
controller_entry_point!(after_expand, sess, state, Ok(()));
Ok(())
}
)?
};
controller_entry_point!(after_expand,
sess,
CompileState::state_after_expand(input,
sess,
outdir,
output,
&cstore,
&expanded_crate,
&id),
Ok(()));
write_out_deps(sess, &outputs, &id);
controller_entry_point!(after_write_deps,
sess,
CompileState::state_after_write_deps(input,
sess,
outdir,
output,
&cstore,
&expanded_crate,
&id),
Ok(()));
let arenas = ty::CtxtArenas::new();
// Construct the HIR map
@ -239,8 +225,15 @@ pub fn compile_input(sess: &Session,
phase5_result);
phase5_result?;
write::cleanup_llvm(&trans);
phase_6_link_output(sess, &trans, &outputs);
controller_entry_point!(compilation_done,
sess,
CompileState::state_when_compilation_done(input, sess, outdir, output),
Ok(()));
Ok(())
}
@ -285,10 +278,10 @@ pub fn source_name(input: &Input) -> String {
pub struct CompileController<'a> {
pub after_parse: PhaseController<'a>,
pub after_expand: PhaseController<'a>,
pub after_write_deps: PhaseController<'a>,
pub after_hir_lowering: PhaseController<'a>,
pub after_analysis: PhaseController<'a>,
pub after_llvm: PhaseController<'a>,
pub compilation_done: PhaseController<'a>,
pub make_glob_map: MakeGlobMap,
}
@ -298,10 +291,10 @@ impl<'a> CompileController<'a> {
CompileController {
after_parse: PhaseController::basic(),
after_expand: PhaseController::basic(),
after_write_deps: PhaseController::basic(),
after_hir_lowering: PhaseController::basic(),
after_analysis: PhaseController::basic(),
after_llvm: PhaseController::basic(),
compilation_done: PhaseController::basic(),
make_glob_map: MakeGlobMap::No,
}
}
@ -406,23 +399,6 @@ impl<'a, 'b, 'ast, 'tcx> CompileState<'a, 'b, 'ast, 'tcx> {
}
}
fn state_after_write_deps(input: &'a Input,
session: &'ast Session,
out_dir: &'a Option<PathBuf>,
out_file: &'a Option<PathBuf>,
cstore: &'a CStore,
krate: &'a ast::Crate,
crate_name: &'a str)
-> CompileState<'a, 'b, 'ast, 'tcx> {
CompileState {
crate_name: Some(crate_name),
cstore: Some(cstore),
expanded_crate: Some(krate),
out_file: out_file.as_ref().map(|s| &**s),
..CompileState::empty(input, session, out_dir)
}
}
fn state_after_hir_lowering(input: &'a Input,
session: &'ast Session,
out_dir: &'a Option<PathBuf>,
@ -486,6 +462,17 @@ impl<'a, 'b, 'ast, 'tcx> CompileState<'a, 'b, 'ast, 'tcx> {
..CompileState::empty(input, session, out_dir)
}
}
fn state_when_compilation_done(input: &'a Input,
session: &'ast Session,
out_dir: &'a Option<PathBuf>,
out_file: &'a Option<PathBuf>)
-> CompileState<'a, 'b, 'ast, 'tcx> {
CompileState {
out_file: out_file.as_ref().map(|s| &**s),
..CompileState::empty(input, session, out_dir)
}
}
}
pub fn phase_1_parse_input<'a>(sess: &'a Session,
@ -556,13 +543,16 @@ pub struct ExpansionResult<'a> {
/// standard library and prelude, and name resolution.
///
/// Returns `None` if we're aborting after handling -W help.
pub fn phase_2_configure_and_expand<'a>(sess: &Session,
cstore: &CStore,
mut krate: ast::Crate,
crate_name: &'a str,
addl_plugins: Option<Vec<String>>,
make_glob_map: MakeGlobMap)
-> Result<ExpansionResult<'a>, usize> {
pub fn phase_2_configure_and_expand<'a, F>(sess: &Session,
cstore: &CStore,
mut krate: ast::Crate,
crate_name: &'a str,
addl_plugins: Option<Vec<String>>,
make_glob_map: MakeGlobMap,
after_expand: F)
-> Result<ExpansionResult<'a>, usize>
where F: FnOnce(&ast::Crate) -> CompileResult,
{
let time_passes = sess.time_passes();
// strip before anything else because crate metadata may use #[cfg_attr]
@ -745,9 +735,23 @@ pub fn phase_2_configure_and_expand<'a>(sess: &Session,
"AST validation",
|| ast_validation::check_crate(sess, &krate));
time(sess.time_passes(), "name resolution", || {
time(sess.time_passes(), "name resolution", || -> CompileResult {
// Currently, we ignore the name resolution data structures for the purposes of dependency
// tracking. Instead we will run name resolution and include its output in the hash of each
// item, much like we do for macro expansion. In other words, the hash reflects not just
// its contents but the results of name resolution on those contents. Hopefully we'll push
// this back at some point.
let _ignore = sess.dep_graph.in_ignore();
resolver.build_reduced_graph(&krate);
resolver.resolve_imports();
// Since import resolution will eventually happen in expansion,
// don't perform `after_expand` until after import resolution.
after_expand(&krate)?;
resolver.resolve_crate(&krate);
});
Ok(())
})?;
// Lower ast -> hir.
let hir_forest = time(sess.time_passes(), "lowering ast -> hir", || {

View file

@ -511,7 +511,7 @@ impl<'a> CompilerCalls<'a> for RustcDefaultCalls {
}
if sess.opts.no_analysis || sess.opts.debugging_opts.ast_json {
control.after_write_deps.stop = Compilation::Stop;
control.after_hir_lowering.stop = Compilation::Stop;
}
if !sess.opts.output_types.keys().any(|&i| i == OutputType::Exe) {

View file

@ -30,12 +30,15 @@ const ARM_WHITELIST: &'static [&'static str] = &[
const X86_WHITELIST: &'static [&'static str] = &[
"avx\0",
"avx2\0",
"bmi\0",
"bmi2\0",
"sse\0",
"sse2\0",
"sse3\0",
"sse4.1\0",
"sse4.2\0",
"ssse3\0",
"tbm\0",
];
/// Add `target_feature = "..."` cfgs for a variety of platform

View file

@ -116,9 +116,11 @@ fn test_env<F>(source_string: &str,
input: source_string.to_string(),
};
let krate = driver::phase_1_parse_input(&sess, krate_config, &input).unwrap();
let driver::ExpansionResult { defs, resolutions, mut hir_forest, .. } =
driver::phase_2_configure_and_expand(&sess, &cstore, krate, "test", None, MakeGlobMap::No)
.expect("phase 2 aborted");
let driver::ExpansionResult { defs, resolutions, mut hir_forest, .. } = {
driver::phase_2_configure_and_expand(
&sess, &cstore, krate, "test", None, MakeGlobMap::No, |_| Ok(()),
).expect("phase 2 aborted")
};
let _ignore = dep_graph.in_ignore();
let arenas = ty::CtxtArenas::new();

View file

@ -150,12 +150,9 @@ impl LateLintPass for UnusedResults {
if attr.check_name("must_use") {
let mut msg = "unused result which must be used".to_string();
// check for #[must_use="..."]
match attr.value_str() {
None => {}
Some(s) => {
msg.push_str(": ");
msg.push_str(&s);
}
if let Some(s) = attr.value_str() {
msg.push_str(": ");
msg.push_str(&s);
}
cx.span_lint(UNUSED_MUST_USE, sp, &msg);
return true;

View file

@ -24,19 +24,17 @@ fn main() {
let llvm_config = env::var_os("LLVM_CONFIG")
.map(PathBuf::from)
.unwrap_or_else(|| {
match env::var_os("CARGO_TARGET_DIR").map(PathBuf::from) {
Some(dir) => {
let to_test = dir.parent()
.unwrap()
.parent()
.unwrap()
.join(&target)
.join("llvm/bin/llvm-config");
if Command::new(&to_test).output().is_ok() {
return to_test;
}
if let Some(dir) = env::var_os("CARGO_TARGET_DIR")
.map(PathBuf::from) {
let to_test = dir.parent()
.unwrap()
.parent()
.unwrap()
.join(&target)
.join("llvm/bin/llvm-config");
if Command::new(&to_test).output().is_ok() {
return to_test;
}
None => {}
}
PathBuf::from("llvm-config")
});

View file

@ -16,6 +16,7 @@ rustc = { path = "../librustc" }
rustc_back = { path = "../librustc_back" }
rustc_bitflags = { path = "../librustc_bitflags" }
rustc_const_math = { path = "../librustc_const_math" }
rustc_data_structures = { path = "../librustc_data_structures" }
rustc_errors = { path = "../librustc_errors" }
rustc_llvm = { path = "../librustc_llvm" }
serialize = { path = "../libserialize" }

View file

@ -60,7 +60,7 @@ use rustc_serialize::{Encodable, EncoderHelpers};
struct DecodeContext<'a, 'b, 'tcx: 'a> {
tcx: TyCtxt<'a, 'tcx, 'tcx>,
cdata: &'b cstore::crate_metadata,
cdata: &'b cstore::CrateMetadata,
from_id_range: IdRange,
to_id_range: IdRange,
// Cache the last used filemap for translating spans as an optimization.
@ -121,7 +121,7 @@ impl<'a, 'b, 'c, 'tcx> ast_map::FoldOps for &'a DecodeContext<'b, 'c, 'tcx> {
/// Decodes an item from its AST in the cdata's metadata and adds it to the
/// ast-map.
pub fn decode_inlined_item<'a, 'tcx>(cdata: &cstore::crate_metadata,
pub fn decode_inlined_item<'a, 'tcx>(cdata: &cstore::CrateMetadata,
tcx: TyCtxt<'a, 'tcx, 'tcx>,
parent_def_path: ast_map::DefPath,
parent_did: DefId,
@ -246,7 +246,7 @@ impl<S:serialize::Encoder> def_id_encoder_helpers for S
trait def_id_decoder_helpers {
fn read_def_id(&mut self, dcx: &DecodeContext) -> DefId;
fn read_def_id_nodcx(&mut self,
cdata: &cstore::crate_metadata) -> DefId;
cdata: &cstore::CrateMetadata) -> DefId;
}
impl<D:serialize::Decoder> def_id_decoder_helpers for D
@ -258,7 +258,7 @@ impl<D:serialize::Decoder> def_id_decoder_helpers for D
}
fn read_def_id_nodcx(&mut self,
cdata: &cstore::crate_metadata)
cdata: &cstore::CrateMetadata)
-> DefId {
let did: DefId = Decodable::decode(self).unwrap();
decoder::translate_def_id(cdata, did)
@ -858,17 +858,17 @@ trait rbml_decoder_decoder_helpers<'tcx> {
// Versions of the type reading functions that don't need the full
// DecodeContext.
fn read_ty_nodcx<'a>(&mut self, tcx: TyCtxt<'a, 'tcx, 'tcx>,
cdata: &cstore::crate_metadata) -> Ty<'tcx>;
cdata: &cstore::CrateMetadata) -> Ty<'tcx>;
fn read_tys_nodcx<'a>(&mut self, tcx: TyCtxt<'a, 'tcx, 'tcx>,
cdata: &cstore::crate_metadata) -> Vec<Ty<'tcx>>;
cdata: &cstore::CrateMetadata) -> Vec<Ty<'tcx>>;
fn read_substs_nodcx<'a>(&mut self, tcx: TyCtxt<'a, 'tcx, 'tcx>,
cdata: &cstore::crate_metadata)
cdata: &cstore::CrateMetadata)
-> subst::Substs<'tcx>;
}
impl<'a, 'tcx> rbml_decoder_decoder_helpers<'tcx> for reader::Decoder<'a> {
fn read_ty_nodcx<'b>(&mut self, tcx: TyCtxt<'b, 'tcx, 'tcx>,
cdata: &cstore::crate_metadata)
cdata: &cstore::CrateMetadata)
-> Ty<'tcx> {
self.read_opaque(|_, doc| {
Ok(
@ -879,7 +879,7 @@ impl<'a, 'tcx> rbml_decoder_decoder_helpers<'tcx> for reader::Decoder<'a> {
}
fn read_tys_nodcx<'b>(&mut self, tcx: TyCtxt<'b, 'tcx, 'tcx>,
cdata: &cstore::crate_metadata) -> Vec<Ty<'tcx>> {
cdata: &cstore::CrateMetadata) -> Vec<Ty<'tcx>> {
self.read_to_vec(|this| Ok(this.read_ty_nodcx(tcx, cdata)) )
.unwrap()
.into_iter()
@ -887,7 +887,7 @@ impl<'a, 'tcx> rbml_decoder_decoder_helpers<'tcx> for reader::Decoder<'a> {
}
fn read_substs_nodcx<'b>(&mut self, tcx: TyCtxt<'b, 'tcx, 'tcx>,
cdata: &cstore::crate_metadata)
cdata: &cstore::CrateMetadata)
-> subst::Substs<'tcx>
{
self.read_opaque(|_, doc| {

View file

@ -252,3 +252,7 @@ pub fn rustc_version() -> String {
}
pub const tag_panic_strategy: usize = 0x114;
// NB: increment this if you change the format of metadata such that
// rustc_version can't be found.
pub const metadata_encoding_version : &'static [u8] = &[b'r', b'u', b's', b't', 0, 0, 0, 2];

View file

@ -12,7 +12,6 @@
//! Validates all used crates and extern libraries and loads their metadata
use common::rustc_version;
use cstore::{self, CStore, CrateSource, MetadataBlob};
use decoder;
use loader::{self, CratePaths};
@ -24,7 +23,7 @@ use rustc::session::{config, Session};
use rustc::session::config::PanicStrategy;
use rustc::session::search_paths::PathKind;
use rustc::middle::cstore::{CrateStore, validate_crate_name, ExternCrate};
use rustc::util::nodemap::FnvHashMap;
use rustc::util::nodemap::{FnvHashMap, FnvHashSet};
use rustc::hir::map as hir_map;
use std::cell::{RefCell, Cell};
@ -132,7 +131,7 @@ struct ExtensionCrate {
}
enum PMDSource {
Registered(Rc<cstore::crate_metadata>),
Registered(Rc<cstore::CrateMetadata>),
Owned(MetadataBlob),
}
@ -236,25 +235,6 @@ impl<'a> CrateReader<'a> {
return ret;
}
fn verify_rustc_version(&self,
name: &str,
span: Span,
metadata: &MetadataBlob) {
let crate_rustc_version = decoder::crate_rustc_version(metadata.as_slice());
if crate_rustc_version != Some(rustc_version()) {
let mut err = struct_span_fatal!(self.sess, span, E0514,
"the crate `{}` has been compiled with {}, which is \
incompatible with this version of rustc",
name,
crate_rustc_version
.as_ref().map(|s| &**s)
.unwrap_or("an old version of rustc"));
err.help("consider removing the compiled binaries and recompiling \
with your current version of rustc");
err.emit();
}
}
fn verify_no_symbol_conflicts(&self,
span: Span,
metadata: &MetadataBlob) {
@ -294,9 +274,8 @@ impl<'a> CrateReader<'a> {
span: Span,
lib: loader::Library,
explicitly_linked: bool)
-> (ast::CrateNum, Rc<cstore::crate_metadata>,
-> (ast::CrateNum, Rc<cstore::CrateMetadata>,
cstore::CrateSource) {
self.verify_rustc_version(name, span, &lib.metadata);
self.verify_no_symbol_conflicts(span, &lib.metadata);
// Claim this crate number and cache it
@ -318,10 +297,10 @@ impl<'a> CrateReader<'a> {
let loader::Library { dylib, rlib, metadata } = lib;
let cnum_map = self.resolve_crate_deps(root, metadata.as_slice(), span);
let cnum_map = self.resolve_crate_deps(root, metadata.as_slice(), cnum, span);
let staged_api = self.is_staged_api(metadata.as_slice());
let cmeta = Rc::new(cstore::crate_metadata {
let cmeta = Rc::new(cstore::CrateMetadata {
name: name.to_string(),
extern_crate: Cell::new(None),
index: decoder::load_index(metadata.as_slice()),
@ -364,7 +343,7 @@ impl<'a> CrateReader<'a> {
span: Span,
kind: PathKind,
explicitly_linked: bool)
-> (ast::CrateNum, Rc<cstore::crate_metadata>, cstore::CrateSource) {
-> (ast::CrateNum, Rc<cstore::CrateMetadata>, cstore::CrateSource) {
let result = match self.existing_match(name, hash, kind) {
Some(cnum) => LoadResult::Previous(cnum),
None => {
@ -381,6 +360,7 @@ impl<'a> CrateReader<'a> {
rejected_via_hash: vec!(),
rejected_via_triple: vec!(),
rejected_via_kind: vec!(),
rejected_via_version: vec!(),
should_match_name: true,
};
match self.load(&mut load_ctxt) {
@ -438,8 +418,11 @@ impl<'a> CrateReader<'a> {
fn update_extern_crate(&mut self,
cnum: ast::CrateNum,
mut extern_crate: ExternCrate)
mut extern_crate: ExternCrate,
visited: &mut FnvHashSet<(ast::CrateNum, bool)>)
{
if !visited.insert((cnum, extern_crate.direct)) { return }
let cmeta = self.cstore.get_crate_data(cnum);
let old_extern_crate = cmeta.extern_crate.get();
@ -458,11 +441,10 @@ impl<'a> CrateReader<'a> {
}
cmeta.extern_crate.set(Some(extern_crate));
// Propagate the extern crate info to dependencies.
extern_crate.direct = false;
for &dep_cnum in cmeta.cnum_map.borrow().values() {
self.update_extern_crate(dep_cnum, extern_crate);
for &dep_cnum in cmeta.cnum_map.borrow().iter() {
self.update_extern_crate(dep_cnum, extern_crate, visited);
}
}
@ -470,12 +452,13 @@ impl<'a> CrateReader<'a> {
fn resolve_crate_deps(&mut self,
root: &Option<CratePaths>,
cdata: &[u8],
span : Span)
-> cstore::cnum_map {
krate: ast::CrateNum,
span: Span)
-> cstore::CrateNumMap {
debug!("resolving deps of external crate");
// The map from crate numbers in the crate we're resolving to local crate
// numbers
decoder::get_crate_deps(cdata).iter().map(|dep| {
let map: FnvHashMap<_, _> = decoder::get_crate_deps(cdata).iter().map(|dep| {
debug!("resolving dep crate {} hash: `{}`", dep.name, dep.hash);
let (local_cnum, _, _) = self.resolve_crate(root,
&dep.name,
@ -485,7 +468,13 @@ impl<'a> CrateReader<'a> {
PathKind::Dependency,
dep.explicitly_linked);
(dep.cnum, local_cnum)
}).collect()
}).collect();
let max_cnum = map.values().cloned().max().unwrap_or(0);
// we map 0 and all other holes in the map to our parent crate. The "additional"
// self-dependencies should be harmless.
(0..max_cnum+1).map(|cnum| map.get(&cnum).cloned().unwrap_or(krate)).collect()
}
fn read_extension_crate(&mut self, span: Span, info: &CrateInfo) -> ExtensionCrate {
@ -508,6 +497,7 @@ impl<'a> CrateReader<'a> {
rejected_via_hash: vec!(),
rejected_via_triple: vec!(),
rejected_via_kind: vec!(),
rejected_via_version: vec!(),
should_match_name: true,
};
let library = self.load(&mut load_ctxt).or_else(|| {
@ -826,7 +816,7 @@ impl<'a> CrateReader<'a> {
fn inject_dependency_if(&self,
krate: ast::CrateNum,
what: &str,
needs_dep: &Fn(&cstore::crate_metadata) -> bool) {
needs_dep: &Fn(&cstore::CrateMetadata) -> bool) {
// don't perform this validation if the session has errors, as one of
// those errors may indicate a circular dependency which could cause
// this to stack overflow.
@ -837,7 +827,17 @@ impl<'a> CrateReader<'a> {
// Before we inject any dependencies, make sure we don't inject a
// circular dependency by validating that this crate doesn't
// transitively depend on any crates satisfying `needs_dep`.
validate(self, krate, krate, what, needs_dep);
for dep in self.cstore.crate_dependencies_in_rpo(krate) {
let data = self.cstore.get_crate_data(dep);
if needs_dep(&data) {
self.sess.err(&format!("the crate `{}` cannot depend \
on a crate that needs {}, but \
it depends on `{}`",
self.cstore.get_crate_data(krate).name(),
what,
data.name()));
}
}
// All crates satisfying `needs_dep` do not explicitly depend on the
// crate provided for this compile, but in order for this compilation to
@ -849,32 +849,8 @@ impl<'a> CrateReader<'a> {
}
info!("injecting a dep from {} to {}", cnum, krate);
let mut cnum_map = data.cnum_map.borrow_mut();
let remote_cnum = cnum_map.len() + 1;
let prev = cnum_map.insert(remote_cnum as ast::CrateNum, krate);
assert!(prev.is_none());
data.cnum_map.borrow_mut().push(krate);
});
fn validate(me: &CrateReader,
krate: ast::CrateNum,
root: ast::CrateNum,
what: &str,
needs_dep: &Fn(&cstore::crate_metadata) -> bool) {
let data = me.cstore.get_crate_data(krate);
if needs_dep(&data) {
let krate_name = data.name();
let data = me.cstore.get_crate_data(root);
let root_name = data.name();
me.sess.err(&format!("the crate `{}` cannot depend \
on a crate that needs {}, but \
it depends on `{}`", root_name, what,
krate_name));
}
for (_, &dep) in data.cnum_map.borrow().iter() {
validate(me, dep, root, what, needs_dep);
}
}
}
}
@ -948,7 +924,8 @@ impl<'a> LocalCrateReader<'a> {
span: i.span,
direct: true,
path_len: len,
});
},
&mut FnvHashSet());
self.cstore.add_extern_mod_stmt_cnum(info.id, cnum);
}
}

View file

@ -9,6 +9,7 @@
// except according to those terms.
use cstore;
use common;
use decoder;
use encoder;
use loader;
@ -588,7 +589,7 @@ impl<'tcx> CrateStore<'tcx> for cstore::CStore {
fn metadata_encoding_version(&self) -> &[u8]
{
encoder::metadata_encoding_version
common::metadata_encoding_version
}
/// Returns a map from a sufficiently visible external item (i.e. an external item that is

View file

@ -15,6 +15,7 @@
pub use self::MetadataBlob::*;
use common;
use creader;
use decoder;
use index;
@ -26,6 +27,7 @@ use rustc::hir::map::DefKey;
use rustc::hir::svh::Svh;
use rustc::middle::cstore::{ExternCrate};
use rustc::session::config::PanicStrategy;
use rustc_data_structures::indexed_vec::IndexVec;
use rustc::util::nodemap::{FnvHashMap, NodeMap, NodeSet, DefIdMap};
use std::cell::{RefCell, Ref, Cell};
@ -46,7 +48,7 @@ pub use middle::cstore::{CrateSource, LinkMeta};
// local crate numbers (as generated during this session). Each external
// crate may refer to types in other external crates, and each has their
// own crate numbers.
pub type cnum_map = FnvHashMap<ast::CrateNum, ast::CrateNum>;
pub type CrateNumMap = IndexVec<ast::CrateNum, ast::CrateNum>;
pub enum MetadataBlob {
MetadataVec(Bytes),
@ -64,7 +66,7 @@ pub struct ImportedFileMap {
pub translated_filemap: Rc<syntax_pos::FileMap>
}
pub struct crate_metadata {
pub struct CrateMetadata {
pub name: String,
/// Information about the extern crate that caused this crate to
@ -73,7 +75,7 @@ pub struct crate_metadata {
pub extern_crate: Cell<Option<ExternCrate>>,
pub data: MetadataBlob,
pub cnum_map: RefCell<cnum_map>,
pub cnum_map: RefCell<CrateNumMap>,
pub cnum: ast::CrateNum,
pub codemap_import_info: RefCell<Vec<ImportedFileMap>>,
pub staged_api: bool,
@ -97,7 +99,7 @@ pub struct crate_metadata {
pub struct CStore {
pub dep_graph: DepGraph,
metas: RefCell<FnvHashMap<ast::CrateNum, Rc<crate_metadata>>>,
metas: RefCell<FnvHashMap<ast::CrateNum, Rc<CrateMetadata>>>,
/// Map from NodeId's of local extern crate statements to crate numbers
extern_mod_crate_map: RefCell<NodeMap<ast::CrateNum>>,
used_crate_sources: RefCell<Vec<CrateSource>>,
@ -128,7 +130,7 @@ impl CStore {
self.metas.borrow().len() as ast::CrateNum + 1
}
pub fn get_crate_data(&self, cnum: ast::CrateNum) -> Rc<crate_metadata> {
pub fn get_crate_data(&self, cnum: ast::CrateNum) -> Rc<CrateMetadata> {
self.metas.borrow().get(&cnum).unwrap().clone()
}
@ -137,12 +139,12 @@ impl CStore {
decoder::get_crate_hash(cdata.data())
}
pub fn set_crate_data(&self, cnum: ast::CrateNum, data: Rc<crate_metadata>) {
pub fn set_crate_data(&self, cnum: ast::CrateNum, data: Rc<CrateMetadata>) {
self.metas.borrow_mut().insert(cnum, data);
}
pub fn iter_crate_data<I>(&self, mut i: I) where
I: FnMut(ast::CrateNum, &Rc<crate_metadata>),
I: FnMut(ast::CrateNum, &Rc<CrateMetadata>),
{
for (&k, v) in self.metas.borrow().iter() {
i(k, v);
@ -151,7 +153,7 @@ impl CStore {
/// Like `iter_crate_data`, but passes source paths (if available) as well.
pub fn iter_crate_data_origins<I>(&self, mut i: I) where
I: FnMut(ast::CrateNum, &crate_metadata, Option<CrateSource>),
I: FnMut(ast::CrateNum, &CrateMetadata, Option<CrateSource>),
{
for (&k, v) in self.metas.borrow().iter() {
let origin = self.opt_used_crate_source(k);
@ -182,6 +184,30 @@ impl CStore {
self.statically_included_foreign_items.borrow_mut().clear();
}
pub fn crate_dependencies_in_rpo(&self, krate: ast::CrateNum) -> Vec<ast::CrateNum>
{
let mut ordering = Vec::new();
self.push_dependencies_in_postorder(&mut ordering, krate);
ordering.reverse();
ordering
}
pub fn push_dependencies_in_postorder(&self,
ordering: &mut Vec<ast::CrateNum>,
krate: ast::CrateNum)
{
if ordering.contains(&krate) { return }
let data = self.get_crate_data(krate);
for &dep in data.cnum_map.borrow().iter() {
if dep != krate {
self.push_dependencies_in_postorder(ordering, dep);
}
}
ordering.push(krate);
}
// This method is used when generating the command line to pass through to
// system linker. The linker expects undefined symbols on the left of the
// command line to be defined in libraries on the right, not the other way
@ -194,17 +220,8 @@ impl CStore {
pub fn do_get_used_crates(&self, prefer: LinkagePreference)
-> Vec<(ast::CrateNum, Option<PathBuf>)> {
let mut ordering = Vec::new();
fn visit(cstore: &CStore, cnum: ast::CrateNum,
ordering: &mut Vec<ast::CrateNum>) {
if ordering.contains(&cnum) { return }
let meta = cstore.get_crate_data(cnum);
for (_, &dep) in meta.cnum_map.borrow().iter() {
visit(cstore, dep, ordering);
}
ordering.push(cnum);
}
for (&num, _) in self.metas.borrow().iter() {
visit(self, num, &mut ordering);
self.push_dependencies_in_postorder(&mut ordering, num);
}
info!("topological ordering: {:?}", ordering);
ordering.reverse();
@ -264,7 +281,7 @@ impl CStore {
}
}
impl crate_metadata {
impl CrateMetadata {
pub fn data<'a>(&'a self) -> &'a [u8] { self.data.as_slice() }
pub fn name(&self) -> &str { decoder::get_crate_name(self.data()) }
pub fn hash(&self) -> Svh { decoder::get_crate_hash(self.data()) }
@ -312,20 +329,25 @@ impl crate_metadata {
}
impl MetadataBlob {
pub fn as_slice<'a>(&'a self) -> &'a [u8] {
let slice = match *self {
pub fn as_slice_raw<'a>(&'a self) -> &'a [u8] {
match *self {
MetadataVec(ref vec) => &vec[..],
MetadataArchive(ref ar) => ar.as_slice(),
};
if slice.len() < 4 {
}
}
pub fn as_slice<'a>(&'a self) -> &'a [u8] {
let slice = self.as_slice_raw();
let len_offset = 4 + common::metadata_encoding_version.len();
if slice.len() < len_offset+4 {
&[] // corrupt metadata
} else {
let len = (((slice[0] as u32) << 24) |
((slice[1] as u32) << 16) |
((slice[2] as u32) << 8) |
((slice[3] as u32) << 0)) as usize;
if len + 4 <= slice.len() {
&slice[4.. len + 4]
let len = (((slice[len_offset+0] as u32) << 24) |
((slice[len_offset+1] as u32) << 16) |
((slice[len_offset+2] as u32) << 8) |
((slice[len_offset+3] as u32) << 0)) as usize;
if len <= slice.len() - 4 - len_offset {
&slice[len_offset + 4..len_offset + len + 4]
} else {
&[] // corrupt or old metadata
}

View file

@ -15,7 +15,7 @@
use self::Family::*;
use astencode::decode_inlined_item;
use cstore::{self, crate_metadata};
use cstore::{self, CrateMetadata};
use common::*;
use def_key;
use encoder::def_to_u64;
@ -30,7 +30,7 @@ use rustc::util::nodemap::FnvHashMap;
use rustc::hir;
use rustc::session::config::PanicStrategy;
use middle::cstore::{LOCAL_CRATE, FoundAst, InlinedItem, LinkagePreference};
use middle::cstore::{FoundAst, InlinedItem, LinkagePreference};
use middle::cstore::{DefLike, DlDef, DlField, DlImpl, tls};
use rustc::hir::def::Def;
use rustc::hir::def_id::{DefId, DefIndex};
@ -61,9 +61,9 @@ use syntax::print::pprust;
use syntax::ptr::P;
use syntax_pos::{self, Span, BytePos, NO_EXPANSION};
pub type Cmd<'a> = &'a crate_metadata;
pub type Cmd<'a> = &'a CrateMetadata;
impl crate_metadata {
impl CrateMetadata {
fn get_item(&self, item_id: DefIndex) -> Option<rbml::Doc> {
self.index.lookup_item(self.data(), item_id).map(|pos| {
reader::doc_at(self.data(), pos as usize).unwrap().doc
@ -663,7 +663,7 @@ fn each_child_of_item_or_crate<F, G>(intr: Rc<IdentInterner>,
mut get_crate_data: G,
mut callback: F) where
F: FnMut(DefLike, ast::Name, ty::Visibility),
G: FnMut(ast::CrateNum) -> Rc<crate_metadata>,
G: FnMut(ast::CrateNum) -> Rc<CrateMetadata>,
{
// Iterate over all children.
for child_info_doc in reader::tagged_docs(item_doc, tag_mod_child) {
@ -682,15 +682,12 @@ fn each_child_of_item_or_crate<F, G>(intr: Rc<IdentInterner>,
};
// Get the item.
match crate_data.get_item(child_def_id.index) {
None => {}
Some(child_item_doc) => {
// Hand off the item to the callback.
let child_name = item_name(&intr, child_item_doc);
let def_like = item_to_def_like(crate_data, child_item_doc, child_def_id);
let visibility = item_visibility(child_item_doc);
callback(def_like, child_name, visibility);
}
if let Some(child_item_doc) = crate_data.get_item(child_def_id.index) {
// Hand off the item to the callback.
let child_name = item_name(&intr, child_item_doc);
let def_like = item_to_def_like(crate_data, child_item_doc, child_def_id);
let visibility = item_visibility(child_item_doc);
callback(def_like, child_name, visibility);
}
}
@ -758,7 +755,7 @@ pub fn each_child_of_item<F, G>(intr: Rc<IdentInterner>,
get_crate_data: G,
callback: F) where
F: FnMut(DefLike, ast::Name, ty::Visibility),
G: FnMut(ast::CrateNum) -> Rc<crate_metadata>,
G: FnMut(ast::CrateNum) -> Rc<CrateMetadata>,
{
// Find the item.
let item_doc = match cdata.get_item(id) {
@ -779,7 +776,7 @@ pub fn each_top_level_item_of_crate<F, G>(intr: Rc<IdentInterner>,
get_crate_data: G,
callback: F) where
F: FnMut(DefLike, ast::Name, ty::Visibility),
G: FnMut(ast::CrateNum) -> Rc<crate_metadata>,
G: FnMut(ast::CrateNum) -> Rc<CrateMetadata>,
{
let root_doc = rbml::Doc::new(cdata.data());
let misc_info_doc = reader::get_doc(root_doc, tag_misc_info);
@ -1348,25 +1345,16 @@ pub fn translate_def_id(cdata: Cmd, did: DefId) -> DefId {
return DefId { krate: cdata.cnum, index: did.index };
}
match cdata.cnum_map.borrow().get(&did.krate) {
Some(&n) => {
DefId {
krate: n,
index: did.index,
}
}
None => bug!("didn't find a crate in the cnum_map")
DefId {
krate: cdata.cnum_map.borrow()[did.krate],
index: did.index
}
}
// Translate a DefId from the current compilation environment to a DefId
// for an external crate.
fn reverse_translate_def_id(cdata: Cmd, did: DefId) -> Option<DefId> {
if did.krate == cdata.cnum {
return Some(DefId { krate: LOCAL_CRATE, index: did.index });
}
for (&local, &global) in cdata.cnum_map.borrow().iter() {
for (local, &global) in cdata.cnum_map.borrow().iter_enumerated() {
if global == did.krate {
return Some(DefId { krate: local, index: did.index });
}
@ -1545,10 +1533,7 @@ pub fn get_dylib_dependency_formats(cdata: Cmd)
let cnum = spec.split(':').nth(0).unwrap();
let link = spec.split(':').nth(1).unwrap();
let cnum: ast::CrateNum = cnum.parse().unwrap();
let cnum = match cdata.cnum_map.borrow().get(&cnum) {
Some(&n) => n,
None => bug!("didn't find a crate in the cnum_map")
};
let cnum = cdata.cnum_map.borrow()[cnum];
result.push((cnum, if link == "d" {
LinkagePreference::RequireDynamic
} else {

Some files were not shown because too many files have changed in this diff Show more