Merge remote-tracking branch 'upstream/io' into io

Conflicts:
	src/rt/rustrt.def.in
This commit is contained in:
Eric Reed 2013-06-25 11:45:44 -07:00
commit 4870dce3eb
246 changed files with 5086 additions and 5376 deletions

View file

@ -4,8 +4,8 @@ Source layout:
librustc/ The self-hosted compiler
libcore/ The core library (imported and linked by default)
libstd/ The standard library (slightly more peripheral code)
libstd/ The standard library (imported and linked by default)
libextra/ The "extras" library (slightly more peripheral code)
libsyntax/ The Rust parser and pretty-printer
rt/ The runtime system

View file

@ -23,7 +23,7 @@ fn target_env(lib_path: &str, prog: &str) -> ~[(~str,~str)] {
assert!(prog.ends_with(".exe"));
let aux_path = prog.slice(0u, prog.len() - 4u).to_owned() + ".libaux";
env = do vec::map(env) |pair| {
env = do env.map() |pair| {
let (k,v) = copy *pair;
if k == ~"PATH" { (~"PATH", v + ";" + lib_path + ";" + aux_path) }
else { (k,v) }

View file

@ -24,7 +24,6 @@ use util::logv;
use core::io;
use core::os;
use core::str;
use core::uint;
use core::vec;

View file

@ -16,9 +16,6 @@ extern mod core(name = "std", vers = "0.7-pre");
#[cfg(rustpkg)]
extern mod this(name = "rustpkg");
#[cfg(fuzzer)]
extern mod this(name = "fuzzer");
#[cfg(rustdoc)]
extern mod this(name = "rustdoc");

View file

@ -26,29 +26,29 @@ download_unpack_base = os.path.join(download_dir_base, "unpack")
snapshot_files = {
"linux": ["bin/rustc",
"lib/libcore-*.so",
"lib/libstd-*.so",
"lib/libextra-*.so",
"lib/librustc-*.so",
"lib/libsyntax-*.so",
"lib/librustrt.so",
"lib/librustllvm.so"],
"macos": ["bin/rustc",
"lib/libcore-*.dylib",
"lib/libstd-*.dylib",
"lib/libextra-*.dylib",
"lib/librustc-*.dylib",
"lib/libsyntax-*.dylib",
"lib/librustrt.dylib",
"lib/librustllvm.dylib"],
"winnt": ["bin/rustc.exe",
"bin/core-*.dll",
"bin/std-*.dll",
"bin/extra-*.dll",
"bin/rustc-*.dll",
"bin/syntax-*.dll",
"bin/rustrt.dll",
"bin/rustllvm.dll"],
"freebsd": ["bin/rustc",
"lib/libcore-*.so",
"lib/libstd-*.so",
"lib/libextra-*.so",
"lib/librustc-*.so",
"lib/libsyntax-*.so",
"lib/librustrt.so",

View file

@ -58,7 +58,7 @@ pub struct Condvar<'self> {
impl<'self> Condvar<'self> {
/// Atomically exit the associated ARC and block until a signal is sent.
#[inline(always)]
#[inline]
pub fn wait(&self) { self.wait_on(0) }
/**
@ -67,7 +67,7 @@ impl<'self> Condvar<'self> {
*
* wait() is equivalent to wait_on(0).
*/
#[inline(always)]
#[inline]
pub fn wait_on(&self, condvar_id: uint) {
assert!(!*self.failed);
self.cond.wait_on(condvar_id);
@ -76,28 +76,28 @@ impl<'self> Condvar<'self> {
}
/// Wake up a blocked task. Returns false if there was no blocked task.
#[inline(always)]
#[inline]
pub fn signal(&self) -> bool { self.signal_on(0) }
/**
* Wake up a blocked task on a specified condvar (as
* sync::cond.signal_on). Returns false if there was no blocked task.
*/
#[inline(always)]
#[inline]
pub fn signal_on(&self, condvar_id: uint) -> bool {
assert!(!*self.failed);
self.cond.signal_on(condvar_id)
}
/// Wake up all blocked tasks. Returns the number of tasks woken.
#[inline(always)]
#[inline]
pub fn broadcast(&self) -> uint { self.broadcast_on(0) }
/**
* Wake up all blocked tasks on a specified condvar (as
* sync::cond.broadcast_on). Returns the number of tasks woken.
*/
#[inline(always)]
#[inline]
pub fn broadcast_on(&self, condvar_id: uint) -> uint {
assert!(!*self.failed);
self.cond.broadcast_on(condvar_id)
@ -198,22 +198,20 @@ impl<T:Owned> MutexARC<T> {
* any tasks that subsequently try to access it (including those already
* blocked on the mutex) will also fail immediately.
*/
#[inline(always)]
#[inline]
pub unsafe fn access<U>(&self, blk: &fn(x: &mut T) -> U) -> U {
unsafe {
let state = self.x.get();
// Borrowck would complain about this if the function were
// not already unsafe. See borrow_rwlock, far below.
do (&(*state).lock).lock {
check_poison(true, (*state).failed);
let _z = PoisonOnFail(&mut (*state).failed);
blk(&mut (*state).data)
}
let state = self.x.get();
// Borrowck would complain about this if the function were
// not already unsafe. See borrow_rwlock, far below.
do (&(*state).lock).lock {
check_poison(true, (*state).failed);
let _z = PoisonOnFail(&mut (*state).failed);
blk(&mut (*state).data)
}
}
/// As access(), but with a condvar, as sync::mutex.lock_cond().
#[inline(always)]
#[inline]
pub unsafe fn access_cond<'x, 'c, U>(&self,
blk: &fn(x: &'x mut T,
c: &'c Condvar) -> U)
@ -231,7 +229,7 @@ impl<T:Owned> MutexARC<T> {
}
// Common code for {mutex.access,rwlock.write}{,_cond}.
#[inline(always)]
#[inline]
#[doc(hidden)]
fn check_poison(is_mutex: bool, failed: bool) {
if failed {
@ -322,7 +320,7 @@ impl<T:Const + Owned> RWARC<T> {
* that other tasks won't block forever. As MutexARC.access, it will also
* poison the ARC, so subsequent readers and writers will both also fail.
*/
#[inline(always)]
#[inline]
pub fn write<U>(&self, blk: &fn(x: &mut T) -> U) -> U {
unsafe {
let state = self.x.get();
@ -335,7 +333,7 @@ impl<T:Const + Owned> RWARC<T> {
}
/// As write(), but with a condvar, as sync::rwlock.write_cond().
#[inline(always)]
#[inline]
pub fn write_cond<'x, 'c, U>(&self,
blk: &fn(x: &'x mut T, c: &'c Condvar) -> U)
-> U {

View file

@ -119,7 +119,7 @@ pub fn Arena() -> Arena {
arena_with_size(32u)
}
#[inline(always)]
#[inline]
fn round_up_to(base: uint, align: uint) -> uint {
(base + (align - 1)) & !(align - 1)
}
@ -156,12 +156,12 @@ unsafe fn destroy_chunk(chunk: &Chunk) {
// initialized in the arena in the low bit of the tydesc pointer. This
// is necessary in order to properly do cleanup if a failure occurs
// during an initializer.
#[inline(always)]
#[inline]
unsafe fn bitpack_tydesc_ptr(p: *TypeDesc, is_done: bool) -> uint {
let p_bits: uint = transmute(p);
p_bits | (is_done as uint)
}
#[inline(always)]
#[inline]
unsafe fn un_bitpack_tydesc_ptr(p: uint) -> (*TypeDesc, bool) {
(transmute(p & !1), p & 1 == 1)
}
@ -179,7 +179,7 @@ impl Arena {
return self.alloc_pod_inner(n_bytes, align);
}
#[inline(always)]
#[inline]
fn alloc_pod_inner(&mut self, n_bytes: uint, align: uint) -> *u8 {
unsafe {
// XXX: Borrow check
@ -199,7 +199,7 @@ impl Arena {
}
}
#[inline(always)]
#[inline]
fn alloc_pod<'a, T>(&'a mut self, op: &fn() -> T) -> &'a T {
unsafe {
let tydesc = sys::get_type_desc::<T>();
@ -223,7 +223,7 @@ impl Arena {
return self.alloc_nonpod_inner(n_bytes, align);
}
#[inline(always)]
#[inline]
fn alloc_nonpod_inner(&mut self, n_bytes: uint, align: uint)
-> (*u8, *u8) {
unsafe {
@ -246,7 +246,7 @@ impl Arena {
}
}
#[inline(always)]
#[inline]
fn alloc_nonpod<'a, T>(&'a mut self, op: &fn() -> T) -> &'a T {
unsafe {
let tydesc = sys::get_type_desc::<T>();
@ -268,7 +268,7 @@ impl Arena {
}
// The external interface
#[inline(always)]
#[inline]
pub fn alloc<'a, T>(&'a mut self, op: &fn() -> T) -> &'a T {
unsafe {
// XXX: Borrow check

View file

@ -229,8 +229,6 @@ impl<'self> FromBase64 for &'self str {
#[cfg(test)]
mod tests {
use core::str;
#[test]
fn test_to_base64() {
assert_eq!("".to_base64(), ~"");

View file

@ -23,7 +23,7 @@ struct SmallBitv {
}
/// a mask that has a 1 for each defined bit in a small_bitv, assuming n bits
#[inline(always)]
#[inline]
fn small_mask(nbits: uint) -> uint {
(1 << nbits) - 1
}
@ -33,7 +33,7 @@ impl SmallBitv {
SmallBitv {bits: bits}
}
#[inline(always)]
#[inline]
pub fn bits_op(&mut self,
right_bits: uint,
nbits: uint,
@ -46,32 +46,32 @@ impl SmallBitv {
mask & old_b != mask & new_b
}
#[inline(always)]
#[inline]
pub fn union(&mut self, s: &SmallBitv, nbits: uint) -> bool {
self.bits_op(s.bits, nbits, |u1, u2| u1 | u2)
}
#[inline(always)]
#[inline]
pub fn intersect(&mut self, s: &SmallBitv, nbits: uint) -> bool {
self.bits_op(s.bits, nbits, |u1, u2| u1 & u2)
}
#[inline(always)]
#[inline]
pub fn become(&mut self, s: &SmallBitv, nbits: uint) -> bool {
self.bits_op(s.bits, nbits, |_u1, u2| u2)
}
#[inline(always)]
#[inline]
pub fn difference(&mut self, s: &SmallBitv, nbits: uint) -> bool {
self.bits_op(s.bits, nbits, |u1, u2| u1 & !u2)
}
#[inline(always)]
#[inline]
pub fn get(&self, i: uint) -> bool {
(self.bits & (1 << i)) != 0
}
#[inline(always)]
#[inline]
pub fn set(&mut self, i: uint, x: bool) {
if x {
self.bits |= 1<<i;
@ -81,29 +81,29 @@ impl SmallBitv {
}
}
#[inline(always)]
#[inline]
pub fn equals(&self, b: &SmallBitv, nbits: uint) -> bool {
let mask = small_mask(nbits);
mask & self.bits == mask & b.bits
}
#[inline(always)]
#[inline]
pub fn clear(&mut self) { self.bits = 0; }
#[inline(always)]
#[inline]
pub fn set_all(&mut self) { self.bits = !0; }
#[inline(always)]
#[inline]
pub fn is_true(&self, nbits: uint) -> bool {
small_mask(nbits) & !self.bits == 0
}
#[inline(always)]
#[inline]
pub fn is_false(&self, nbits: uint) -> bool {
small_mask(nbits) & self.bits == 0
}
#[inline(always)]
#[inline]
pub fn invert(&mut self) { self.bits = !self.bits; }
}
@ -115,7 +115,7 @@ struct BigBitv {
* a mask that has a 1 for each defined bit in the nth element of a big_bitv,
* assuming n bits.
*/
#[inline(always)]
#[inline]
fn big_mask(nbits: uint, elem: uint) -> uint {
let rmd = nbits % uint::bits;
let nelems = nbits/uint::bits + if rmd == 0 {0} else {1};
@ -132,7 +132,7 @@ impl BigBitv {
BigBitv {storage: storage}
}
#[inline(always)]
#[inline]
pub fn process(&mut self,
b: &BigBitv,
nbits: uint,
@ -154,35 +154,35 @@ impl BigBitv {
changed
}
#[inline(always)]
#[inline]
pub fn each_storage(&mut self, op: &fn(v: &mut uint) -> bool) -> bool {
uint::range(0, self.storage.len(), |i| op(&mut self.storage[i]))
}
#[inline(always)]
#[inline]
pub fn invert(&mut self) { for self.each_storage |w| { *w = !*w } }
#[inline(always)]
#[inline]
pub fn union(&mut self, b: &BigBitv, nbits: uint) -> bool {
self.process(b, nbits, |w1, w2| w1 | w2)
}
#[inline(always)]
#[inline]
pub fn intersect(&mut self, b: &BigBitv, nbits: uint) -> bool {
self.process(b, nbits, |w1, w2| w1 & w2)
}
#[inline(always)]
#[inline]
pub fn become(&mut self, b: &BigBitv, nbits: uint) -> bool {
self.process(b, nbits, |_, w| w)
}
#[inline(always)]
#[inline]
pub fn difference(&mut self, b: &BigBitv, nbits: uint) -> bool {
self.process(b, nbits, |w1, w2| w1 & !w2)
}
#[inline(always)]
#[inline]
pub fn get(&self, i: uint) -> bool {
let w = i / uint::bits;
let b = i % uint::bits;
@ -190,7 +190,7 @@ impl BigBitv {
x == 1
}
#[inline(always)]
#[inline]
pub fn set(&mut self, i: uint, x: bool) {
let w = i / uint::bits;
let b = i % uint::bits;
@ -199,7 +199,7 @@ impl BigBitv {
else { self.storage[w] & !flag };
}
#[inline(always)]
#[inline]
pub fn equals(&self, b: &BigBitv, nbits: uint) -> bool {
let len = b.storage.len();
for uint::iterate(0, len) |i| {
@ -229,7 +229,7 @@ fn die() -> ! {
}
impl Bitv {
#[inline(always)]
#[inline]
fn do_op(&mut self, op: Op, other: &Bitv) -> bool {
if self.nbits != other.nbits {
die();
@ -279,7 +279,7 @@ impl Bitv {
* Sets `self` to the union of `self` and `v1`. Both bitvectors must be
* the same length. Returns 'true' if `self` changed.
*/
#[inline(always)]
#[inline]
pub fn union(&mut self, v1: &Bitv) -> bool { self.do_op(Union, v1) }
/**
@ -288,7 +288,7 @@ impl Bitv {
* Sets `self` to the intersection of `self` and `v1`. Both bitvectors
* must be the same length. Returns 'true' if `self` changed.
*/
#[inline(always)]
#[inline]
pub fn intersect(&mut self, v1: &Bitv) -> bool {
self.do_op(Intersect, v1)
}
@ -299,11 +299,11 @@ impl Bitv {
* Both bitvectors must be the same length. Returns `true` if `self` was
* changed
*/
#[inline(always)]
#[inline]
pub fn assign(&mut self, v: &Bitv) -> bool { self.do_op(Assign, v) }
/// Retrieve the value at index `i`
#[inline(always)]
#[inline]
pub fn get(&self, i: uint) -> bool {
assert!((i < self.nbits));
match self.rep {
@ -317,7 +317,7 @@ impl Bitv {
*
* `i` must be less than the length of the bitvector.
*/
#[inline(always)]
#[inline]
pub fn set(&mut self, i: uint, x: bool) {
assert!((i < self.nbits));
match self.rep {
@ -332,7 +332,7 @@ impl Bitv {
* Both bitvectors must be the same length. Returns `true` if both
* bitvectors contain identical elements.
*/
#[inline(always)]
#[inline]
pub fn equal(&self, v1: &Bitv) -> bool {
if self.nbits != v1.nbits { return false; }
match self.rep {
@ -348,7 +348,7 @@ impl Bitv {
}
/// Set all bits to 0
#[inline(always)]
#[inline]
pub fn clear(&mut self) {
match self.rep {
Small(ref mut b) => b.clear(),
@ -357,7 +357,7 @@ impl Bitv {
}
/// Set all bits to 1
#[inline(always)]
#[inline]
pub fn set_all(&mut self) {
match self.rep {
Small(ref mut b) => b.set_all(),
@ -365,7 +365,7 @@ impl Bitv {
}
/// Invert all bits
#[inline(always)]
#[inline]
pub fn invert(&mut self) {
match self.rep {
Small(ref mut b) => b.invert(),
@ -381,13 +381,13 @@ impl Bitv {
*
* Returns `true` if `v0` was changed.
*/
#[inline(always)]
#[inline]
pub fn difference(&mut self, v: &Bitv) -> bool {
self.do_op(Difference, v)
}
/// Returns true if all bits are 1
#[inline(always)]
#[inline]
pub fn is_true(&self) -> bool {
match self.rep {
Small(ref b) => b.is_true(self.nbits),
@ -398,7 +398,7 @@ impl Bitv {
}
}
#[inline(always)]
#[inline]
pub fn each(&self, f: &fn(bool) -> bool) -> bool {
let mut i = 0;
while i < self.nbits {
@ -508,7 +508,7 @@ impl Bitv {
impl Clone for Bitv {
/// Makes a copy of a bitvector
#[inline(always)]
#[inline]
fn clone(&self) -> Bitv {
match self.rep {
Small(ref b) => {
@ -562,7 +562,7 @@ impl ops::Index<uint,bool> for Bitv {
}
}
#[inline(always)]
#[inline]
fn iterate_bits(base: uint, bits: uint, f: &fn(uint) -> bool) -> bool {
if bits == 0 {
return true;
@ -623,7 +623,7 @@ impl BitvSet {
return Bitv{ nbits:cap, rep: Big(~bitv) };
}
#[inline(always)]
#[inline]
fn other_op(&mut self, other: &BitvSet, f: &fn(uint, uint) -> uint) {
fn nbits(mut w: uint) -> uint {
let mut bits = 0;

View file

@ -122,7 +122,7 @@ pub unsafe fn c_vec_with_dtor<T>(base: *mut T, len: uint, dtor: @fn())
*/
pub fn get<T:Copy>(t: CVec<T>, ofs: uint) -> T {
assert!(ofs < len(t));
return unsafe { *ptr::mut_offset(t.base, ofs) };
return unsafe { copy *ptr::mut_offset(t.base, ofs) };
}
/**

View file

@ -264,31 +264,31 @@ mod tests {
fn test_parameterized<T:Copy + Eq>(a: T, b: T, c: T, d: T) {
let mut deq = Deque::new();
assert_eq!(deq.len(), 0);
deq.add_front(a);
deq.add_front(b);
deq.add_back(c);
deq.add_front(copy a);
deq.add_front(copy b);
deq.add_back(copy c);
assert_eq!(deq.len(), 3);
deq.add_back(d);
deq.add_back(copy d);
assert_eq!(deq.len(), 4);
assert_eq!(*deq.peek_front(), b);
assert_eq!(*deq.peek_back(), d);
assert_eq!(deq.pop_front(), b);
assert_eq!(deq.pop_back(), d);
assert_eq!(deq.pop_back(), c);
assert_eq!(deq.pop_back(), a);
assert_eq!(copy *deq.peek_front(), copy b);
assert_eq!(copy *deq.peek_back(), copy d);
assert_eq!(deq.pop_front(), copy b);
assert_eq!(deq.pop_back(), copy d);
assert_eq!(deq.pop_back(), copy c);
assert_eq!(deq.pop_back(), copy a);
assert_eq!(deq.len(), 0);
deq.add_back(c);
deq.add_back(copy c);
assert_eq!(deq.len(), 1);
deq.add_front(b);
deq.add_front(copy b);
assert_eq!(deq.len(), 2);
deq.add_back(d);
deq.add_back(copy d);
assert_eq!(deq.len(), 3);
deq.add_front(a);
deq.add_front(copy a);
assert_eq!(deq.len(), 4);
assert_eq!(*deq.get(0), a);
assert_eq!(*deq.get(1), b);
assert_eq!(*deq.get(2), c);
assert_eq!(*deq.get(3), d);
assert_eq!(copy *deq.get(0), copy a);
assert_eq!(copy *deq.get(1), copy b);
assert_eq!(copy *deq.get(2), copy c);
assert_eq!(copy *deq.get(3), copy d);
}
#[deriving(Eq)]

View file

@ -111,7 +111,8 @@ pub fn from_elem<T>(data: T) -> @mut DList<T> {
/// Creates a new dlist from a vector of elements, maintaining the same order
pub fn from_vec<T:Copy>(vec: &[T]) -> @mut DList<T> {
do vec.iter().fold(DList()) |list,data| {
list.push(*data); // Iterating left-to-right -- add newly to the tail.
// Iterating left-to-right -- add newly to the tail.
list.push(copy *data);
list
}
}
@ -159,7 +160,7 @@ impl<T> DList<T> {
}
// Link two nodes together. If either of them are 'none', also sets
// the head and/or tail pointers appropriately.
#[inline(always)]
#[inline]
fn link(&mut self, before: DListLink<T>, after: DListLink<T>) {
match before {
Some(neighbour) => neighbour.next = after,
@ -460,35 +461,35 @@ impl<T> DList<T> {
impl<T:Copy> DList<T> {
/// Remove data from the head of the list. O(1).
pub fn pop(@mut self) -> Option<T> {
self.pop_n().map(|nobe| nobe.data)
self.pop_n().map(|nobe| copy nobe.data)
}
/// Remove data from the tail of the list. O(1).
pub fn pop_tail(@mut self) -> Option<T> {
self.pop_tail_n().map(|nobe| nobe.data)
self.pop_tail_n().map(|nobe| copy nobe.data)
}
/// Get data at the list's head. O(1).
pub fn peek(@mut self) -> Option<T> {
self.peek_n().map(|nobe| nobe.data)
self.peek_n().map(|nobe| copy nobe.data)
}
/// Get data at the list's tail. O(1).
pub fn peek_tail(@mut self) -> Option<T> {
self.peek_tail_n().map (|nobe| nobe.data)
self.peek_tail_n().map (|nobe| copy nobe.data)
}
/// Get data at the list's head, failing if empty. O(1).
pub fn head(@mut self) -> T { self.head_n().data }
pub fn head(@mut self) -> T { copy self.head_n().data }
/// Get data at the list's tail, failing if empty. O(1).
pub fn tail(@mut self) -> T { self.tail_n().data }
pub fn tail(@mut self) -> T { copy self.tail_n().data }
/// Get the elements of the list as a vector. O(n).
pub fn to_vec(@mut self) -> ~[T] {
let mut v = vec::with_capacity(self.size);
for old_iter::eachi(&self) |index,data| {
v[index] = *data;
v[index] = copy *data;
}
v
}
@ -531,7 +532,7 @@ impl<T> BaseIter<T> for @mut DList<T> {
return true;
}
#[inline(always)]
#[inline]
fn size_hint(&self) -> Option<uint> { Some(self.len()) }
}

View file

@ -516,7 +516,6 @@ pub mod bytepipes {
use core::comm::{Port, Chan};
use core::comm;
use core::io::{Writer, Reader, ReaderUtil};
use core::vec;
pub struct ReaderBytePort<R> {
reader: R
@ -583,12 +582,12 @@ pub mod bytepipes {
impl BytePort for PipeBytePort {
fn try_recv(&self, count: uint) -> Option<~[u8]> {
if vec::uniq_len(&const *self.buf) >= count {
if self.buf.len() >= count {
let mut bytes = ::core::util::replace(&mut *self.buf, ~[]);
*self.buf = bytes.slice(count, bytes.len()).to_owned();
bytes.truncate(count);
return Some(bytes);
} else if vec::uniq_len(&const *self.buf) > 0 {
} else if !self.buf.is_empty() {
let mut bytes = ::core::util::replace(&mut *self.buf, ~[]);
assert!(count > bytes.len());
match self.try_recv(count - bytes.len()) {
@ -598,7 +597,7 @@ pub mod bytepipes {
}
None => return None
}
} else if vec::uniq_len(&const *self.buf) == 0 {
} else /* empty */ {
match self.port.try_recv() {
Some(buf) => {
assert!(!buf.is_empty());
@ -607,8 +606,6 @@ pub mod bytepipes {
}
None => return None
}
} else {
::core::util::unreachable()
}
}
}

View file

@ -57,7 +57,7 @@ priv enum FutureState<A> {
impl<A:Copy> Future<A> {
pub fn get(&mut self) -> A {
//! Get the value of the future.
*(self.get_ref())
copy *(self.get_ref())
}
}

View file

@ -27,7 +27,7 @@ pub enum MutList<T> {
/// Create a list from a vector
pub fn from_vec<T:Copy>(v: &[T]) -> @List<T> {
v.rev_iter().fold(@Nil::<T>, |t, h| @Cons(*h, t))
v.rev_iter().fold(@Nil::<T>, |t, h| @Cons(copy *h, t))
}
/**
@ -61,7 +61,7 @@ pub fn find<T:Copy>(ls: @List<T>, f: &fn(&T) -> bool) -> Option<T> {
loop {
ls = match *ls {
Cons(ref hd, tl) => {
if f(hd) { return Some(*hd); }
if f(hd) { return Some(copy *hd); }
tl
}
Nil => return None

View file

@ -181,90 +181,88 @@ pub fn connect(input_ip: ip::IpAddr, port: uint,
debug!("stream_handle_ptr outside interact %?",
stream_handle_ptr);
do iotask::interact(iotask) |loop_ptr| {
unsafe {
debug!("in interact cb for tcp client connect..");
debug!("stream_handle_ptr in interact %?",
stream_handle_ptr);
match uv::ll::tcp_init( loop_ptr, stream_handle_ptr) {
0i32 => {
debug!("tcp_init successful");
debug!("dealing w/ ipv4 connection..");
let connect_req_ptr: *uv::ll::uv_connect_t =
&(*socket_data_ptr).connect_req;
let addr_str = ip::format_addr(&input_ip);
let connect_result = match input_ip {
ip::Ipv4(ref addr) => {
// have to "recreate" the
// sockaddr_in/6 since the ip_addr
// discards the port info.. should
// probably add an additional rust
// type that actually is closer to
// what the libuv API expects (ip str
// + port num)
debug!("addr: %?", addr);
let in_addr = uv::ll::ip4_addr(addr_str,
port as int);
uv::ll::tcp_connect(
connect_req_ptr,
stream_handle_ptr,
&in_addr,
tcp_connect_on_connect_cb)
}
ip::Ipv6(ref addr) => {
debug!("addr: %?", addr);
let in_addr = uv::ll::ip6_addr(addr_str,
port as int);
uv::ll::tcp_connect6(
connect_req_ptr,
stream_handle_ptr,
&in_addr,
tcp_connect_on_connect_cb)
}
};
match connect_result {
0i32 => {
debug!("tcp_connect successful: \
stream %x,
socket data %x",
stream_handle_ptr as uint,
socket_data_ptr as uint);
// reusable data that we'll have for the
// duration..
uv::ll::set_data_for_uv_handle(
stream_handle_ptr,
socket_data_ptr as
*libc::c_void);
// just so the connect_cb can send the
// outcome..
uv::ll::set_data_for_req(connect_req_ptr,
conn_data_ptr);
debug!("leaving tcp_connect interact cb...");
// let tcp_connect_on_connect_cb send on
// the result_ch, now..
}
_ => {
// immediate connect
// failure.. probably a garbage ip or
// somesuch
let err_data =
uv::ll::get_last_err_data(loop_ptr);
let result_ch = (*conn_data_ptr)
.result_ch.clone();
result_ch.send(ConnFailure(err_data));
uv::ll::set_data_for_uv_handle(
stream_handle_ptr,
conn_data_ptr);
uv::ll::close(stream_handle_ptr,
stream_error_close_cb);
}
debug!("in interact cb for tcp client connect..");
debug!("stream_handle_ptr in interact %?",
stream_handle_ptr);
match uv::ll::tcp_init( loop_ptr, stream_handle_ptr) {
0i32 => {
debug!("tcp_init successful");
debug!("dealing w/ ipv4 connection..");
let connect_req_ptr: *uv::ll::uv_connect_t =
&(*socket_data_ptr).connect_req;
let addr_str = ip::format_addr(&input_ip);
let connect_result = match input_ip {
ip::Ipv4(ref addr) => {
// have to "recreate" the
// sockaddr_in/6 since the ip_addr
// discards the port info.. should
// probably add an additional rust
// type that actually is closer to
// what the libuv API expects (ip str
// + port num)
debug!("addr: %?", addr);
let in_addr = uv::ll::ip4_addr(addr_str,
port as int);
uv::ll::tcp_connect(
connect_req_ptr,
stream_handle_ptr,
&in_addr,
tcp_connect_on_connect_cb)
}
ip::Ipv6(ref addr) => {
debug!("addr: %?", addr);
let in_addr = uv::ll::ip6_addr(addr_str,
port as int);
uv::ll::tcp_connect6(
connect_req_ptr,
stream_handle_ptr,
&in_addr,
tcp_connect_on_connect_cb)
}
};
match connect_result {
0i32 => {
debug!("tcp_connect successful: \
stream %x,
socket data %x",
stream_handle_ptr as uint,
socket_data_ptr as uint);
// reusable data that we'll have for the
// duration..
uv::ll::set_data_for_uv_handle(
stream_handle_ptr,
socket_data_ptr as
*libc::c_void);
// just so the connect_cb can send the
// outcome..
uv::ll::set_data_for_req(connect_req_ptr,
conn_data_ptr);
debug!("leaving tcp_connect interact cb...");
// let tcp_connect_on_connect_cb send on
// the result_ch, now..
}
_ => {
// immediate connect
// failure.. probably a garbage ip or
// somesuch
let err_data =
uv::ll::get_last_err_data(loop_ptr);
let result_ch = (*conn_data_ptr)
.result_ch.clone();
result_ch.send(ConnFailure(err_data));
uv::ll::set_data_for_uv_handle(
stream_handle_ptr,
conn_data_ptr);
uv::ll::close(stream_handle_ptr,
stream_error_close_cb);
}
}
_ => {
// failure to create a tcp handle
let err_data = uv::ll::get_last_err_data(loop_ptr);
let result_ch = (*conn_data_ptr).result_ch.clone();
result_ch.send(ConnFailure(err_data));
}
}
_ => {
// failure to create a tcp handle
let err_data = uv::ll::get_last_err_data(loop_ptr);
let result_ch = (*conn_data_ptr).result_ch.clone();
result_ch.send(ConnFailure(err_data));
}
}
}
@ -879,8 +877,7 @@ impl io::Reader for TcpSocketBuf {
// If possible, copy up to `len` bytes from the internal
// `data.buf` into `buf`
let nbuffered = vec::uniq_len(&const self.data.buf) -
self.data.buf_off;
let nbuffered = self.data.buf.len() - self.data.buf_off;
let needed = len - count;
if nbuffered > 0 {
unsafe {
@ -934,7 +931,7 @@ impl io::Reader for TcpSocketBuf {
}
fn read_byte(&self) -> int {
loop {
if vec::uniq_len(&const self.data.buf) > self.data.buf_off {
if self.data.buf.len() > self.data.buf_off {
let c = self.data.buf[self.data.buf_off];
self.data.buf_off += 1;
return c as int
@ -1016,14 +1013,12 @@ fn tear_down_socket_data(socket_data: @TcpSocketData) {
let close_data_ptr: *TcpSocketCloseData = &close_data;
let stream_handle_ptr = (*socket_data).stream_handle_ptr;
do iotask::interact(&(*socket_data).iotask) |loop_ptr| {
unsafe {
debug!(
"interact dtor for tcp_socket stream %? loop %?",
stream_handle_ptr, loop_ptr);
uv::ll::set_data_for_uv_handle(stream_handle_ptr,
close_data_ptr);
uv::ll::close(stream_handle_ptr, tcp_socket_dtor_close_cb);
}
debug!(
"interact dtor for tcp_socket stream %? loop %?",
stream_handle_ptr, loop_ptr);
uv::ll::set_data_for_uv_handle(stream_handle_ptr,
close_data_ptr);
uv::ll::close(stream_handle_ptr, tcp_socket_dtor_close_cb);
};
closed_po.recv();
//the line below will most likely crash
@ -1083,19 +1078,17 @@ fn read_stop_common_impl(socket_data: *TcpSocketData) ->
let stream_handle_ptr = (*socket_data).stream_handle_ptr;
let (stop_po, stop_ch) = stream::<Option<TcpErrData>>();
do iotask::interact(&(*socket_data).iotask) |loop_ptr| {
unsafe {
debug!("in interact cb for tcp::read_stop");
match uv::ll::read_stop(stream_handle_ptr
as *uv::ll::uv_stream_t) {
0i32 => {
debug!("successfully called uv_read_stop");
stop_ch.send(None);
}
_ => {
debug!("failure in calling uv_read_stop");
let err_data = uv::ll::get_last_err_data(loop_ptr);
stop_ch.send(Some(err_data.to_tcp_err()));
}
debug!("in interact cb for tcp::read_stop");
match uv::ll::read_stop(stream_handle_ptr
as *uv::ll::uv_stream_t) {
0i32 => {
debug!("successfully called uv_read_stop");
stop_ch.send(None);
}
_ => {
debug!("failure in calling uv_read_stop");
let err_data = uv::ll::get_last_err_data(loop_ptr);
stop_ch.send(Some(err_data.to_tcp_err()));
}
}
}
@ -1115,22 +1108,20 @@ fn read_start_common_impl(socket_data: *TcpSocketData)
let (start_po, start_ch) = stream::<Option<uv::ll::uv_err_data>>();
debug!("in tcp::read_start before interact loop");
do iotask::interact(&(*socket_data).iotask) |loop_ptr| {
unsafe {
debug!("in tcp::read_start interact cb %?",
loop_ptr);
match uv::ll::read_start(stream_handle_ptr
as *uv::ll::uv_stream_t,
on_alloc_cb,
on_tcp_read_cb) {
0i32 => {
debug!("success doing uv_read_start");
start_ch.send(None);
}
_ => {
debug!("error attempting uv_read_start");
let err_data = uv::ll::get_last_err_data(loop_ptr);
start_ch.send(Some(err_data));
}
debug!("in tcp::read_start interact cb %?",
loop_ptr);
match uv::ll::read_start(stream_handle_ptr
as *uv::ll::uv_stream_t,
on_alloc_cb,
on_tcp_read_cb) {
0i32 => {
debug!("success doing uv_read_start");
start_ch.send(None);
}
_ => {
debug!("error attempting uv_read_start");
let err_data = uv::ll::get_last_err_data(loop_ptr);
start_ch.send(Some(err_data));
}
}
}
@ -1167,24 +1158,22 @@ fn write_common_impl(socket_data_ptr: *TcpSocketData,
};
let write_data_ptr: *WriteReqData = &write_data;
do iotask::interact(&(*socket_data_ptr).iotask) |loop_ptr| {
unsafe {
debug!("in interact cb for tcp::write %?",
loop_ptr);
match uv::ll::write(write_req_ptr,
stream_handle_ptr,
write_buf_vec_ptr,
tcp_write_complete_cb) {
0i32 => {
debug!("uv_write() invoked successfully");
uv::ll::set_data_for_req(write_req_ptr,
write_data_ptr);
}
_ => {
debug!("error invoking uv_write()");
let err_data = uv::ll::get_last_err_data(loop_ptr);
let result_ch = (*write_data_ptr).result_ch.clone();
result_ch.send(TcpWriteError(err_data.to_tcp_err()));
}
debug!("in interact cb for tcp::write %?",
loop_ptr);
match uv::ll::write(write_req_ptr,
stream_handle_ptr,
write_buf_vec_ptr,
tcp_write_complete_cb) {
0i32 => {
debug!("uv_write() invoked successfully");
uv::ll::set_data_for_req(write_req_ptr,
write_data_ptr);
}
_ => {
debug!("error invoking uv_write()");
let err_data = uv::ll::get_last_err_data(loop_ptr);
let result_ch = (*write_data_ptr).result_ch.clone();
result_ch.send(TcpWriteError(err_data.to_tcp_err()));
}
}
}

View file

@ -80,19 +80,17 @@ impl<T: Clone + Num> Cmplx<T> {
}
}
#[cfg(not(stage0))] // Fixed by #4228
impl<T: Clone + Algebraic + Num> Cmplx<T> {
/// Calculate |self|
#[inline(always)]
#[inline]
pub fn norm(&self) -> T {
self.re.hypot(&self.im)
}
}
#[cfg(not(stage0))] // Fixed by #4228
impl<T: Clone + Trigonometric + Algebraic + Num> Cmplx<T> {
/// Calculate the principal Arg of self.
#[inline(always)]
#[inline]
pub fn arg(&self) -> T {
self.im.atan2(&self.re)
}
@ -222,6 +220,8 @@ mod test {
}
#[test]
#[ignore(cfg(target_arch = "x86"))]
// FIXME #7158: (maybe?) currently failing on x86.
fn test_norm() {
fn test(c: Complex, ns: float) {
assert_eq!(c.norm_sqr(), ns);

View file

@ -36,19 +36,19 @@ pub type BigRational = Ratio<BigInt>;
impl<T: Clone + Integer + Ord>
Ratio<T> {
/// Create a ratio representing the integer `t`.
#[inline(always)]
#[inline]
pub fn from_integer(t: T) -> Ratio<T> {
Ratio::new_raw(t, One::one())
}
/// Create a ratio without checking for `denom == 0` or reducing.
#[inline(always)]
#[inline]
pub fn new_raw(numer: T, denom: T) -> Ratio<T> {
Ratio { numer: numer, denom: denom }
}
/// Create a new Ratio. Fails if `denom == 0`.
#[inline(always)]
#[inline]
pub fn new(numer: T, denom: T) -> Ratio<T> {
if denom == Zero::zero() {
fail!("denominator == 0");
@ -206,7 +206,7 @@ impl<T: Clone + Integer + Ord>
}
}
#[inline(always)]
#[inline]
fn round(&self) -> Ratio<T> {
if *self < Zero::zero() {
Ratio::from_integer((self.numer - self.denom + One::one()) / self.denom)
@ -215,7 +215,7 @@ impl<T: Clone + Integer + Ord>
}
}
#[inline(always)]
#[inline]
fn trunc(&self) -> Ratio<T> {
Ratio::from_integer(self.numer / self.denom)
}

View file

@ -35,10 +35,10 @@ impl<T:Ord> BaseIter<T> for PriorityQueue<T> {
impl<T:Ord> Container for PriorityQueue<T> {
/// Returns the length of the queue
fn len(&const self) -> uint { vec::uniq_len(&const self.data) }
fn len(&self) -> uint { self.data.len() }
/// Returns true if a queue contains no elements
fn is_empty(&const self) -> bool { self.len() == 0 }
fn is_empty(&self) -> bool { self.len() == 0 }
}
impl<T:Ord> Mutable for PriorityQueue<T> {

View file

@ -60,7 +60,7 @@ pub fn rc_from_const<T: Const>(value: T) -> Rc<T> {
}
impl<T> Rc<T> {
#[inline(always)]
#[inline]
pub fn borrow<'r>(&'r self) -> &'r T {
unsafe { cast::copy_lifetime(self, &(*self.ptr).value) }
}

View file

@ -1271,7 +1271,6 @@ mod tests {
use rope::*;
use core::str;
use core::uint;
use core::vec;

View file

@ -29,7 +29,7 @@ pub enum Identifier {
}
impl cmp::Ord for Identifier {
#[inline(always)]
#[inline]
fn lt(&self, other: &Identifier) -> bool {
match (self, other) {
(&Numeric(a), &Numeric(b)) => a < b,
@ -38,22 +38,22 @@ impl cmp::Ord for Identifier {
(&AlphaNumeric(_), _) => false
}
}
#[inline(always)]
#[inline]
fn le(&self, other: &Identifier) -> bool {
! (other < self)
}
#[inline(always)]
#[inline]
fn gt(&self, other: &Identifier) -> bool {
other < self
}
#[inline(always)]
#[inline]
fn ge(&self, other: &Identifier) -> bool {
! (self < other)
}
}
impl ToStr for Identifier {
#[inline(always)]
#[inline]
fn to_str(&self) -> ~str {
match self {
&Numeric(n) => n.to_str(),
@ -73,7 +73,7 @@ pub struct Version {
}
impl ToStr for Version {
#[inline(always)]
#[inline]
fn to_str(&self) -> ~str {
let s = fmt!("%u.%u.%u", self.major, self.minor, self.patch);
let s = if self.pre.is_empty() {
@ -90,7 +90,7 @@ impl ToStr for Version {
}
impl cmp::Ord for Version {
#[inline(always)]
#[inline]
fn lt(&self, other: &Version) -> bool {
self.major < other.major ||
@ -123,15 +123,15 @@ impl cmp::Ord for Version {
self.build < other.build)
}
#[inline(always)]
#[inline]
fn le(&self, other: &Version) -> bool {
! (other < self)
}
#[inline(always)]
#[inline]
fn gt(&self, other: &Version) -> bool {
other < self
}
#[inline(always)]
#[inline]
fn ge(&self, other: &Version) -> bool {
! (self < other)
}

View file

@ -24,7 +24,6 @@
use core::prelude::*;
use core::str;
use core::uint;
use core::vec;
@ -93,7 +92,7 @@ pub fn sha1() -> @Sha1 {
}
fn process_msg_block(st: &mut Sha1State) {
assert_eq!(st.h.len(), digest_buf_len);
assert_eq!(vec::uniq_len(st.work_buf), work_buf_len);
assert_eq!(st.work_buf.len(), work_buf_len);
let mut t: int; // Loop counter
let w = st.work_buf;
@ -279,8 +278,6 @@ pub fn sha1() -> @Sha1 {
mod tests {
use sha1;
use core::vec;
#[test]
fn test() {
struct Test {

View file

@ -32,9 +32,9 @@ pub struct SmallIntMap<T> {
impl<V> Container for SmallIntMap<V> {
/// Return the number of elements in the map
fn len(&const self) -> uint {
fn len(&self) -> uint {
let mut sz = 0;
for uint::range(0, vec::uniq_len(&const self.v)) |i| {
for uint::range(0, self.v.len()) |i| {
match self.v[i] {
Some(_) => sz += 1,
None => {}
@ -44,7 +44,7 @@ impl<V> Container for SmallIntMap<V> {
}
/// Return true if the map contains no elements
fn is_empty(&const self) -> bool { self.len() == 0 }
fn is_empty(&self) -> bool { self.len() == 0 }
}
impl<V> Mutable for SmallIntMap<V> {
@ -179,7 +179,7 @@ impl<V:Copy> SmallIntMap<V> {
ff: &fn(uint, V, V) -> V) -> bool {
let new_val = match self.find(&key) {
None => val,
Some(orig) => ff(key, *orig, val)
Some(orig) => ff(key, copy *orig, val)
};
self.insert(key, new_val)
}
@ -199,12 +199,12 @@ pub struct SmallIntSet {
impl Container for SmallIntSet {
/// Return the number of elements in the map
fn len(&const self) -> uint {
fn len(&self) -> uint {
self.map.len()
}
/// Return true if the map contains no elements
fn is_empty(&const self) -> bool { self.len() == 0 }
fn is_empty(&self) -> bool { self.len() == 0 }
}
impl Mutable for SmallIntSet {
@ -294,11 +294,6 @@ mod tests {
use super::SmallIntMap;
use core::local_data;
use core::rand;
use core::uint;
use core::vec;
#[test]
fn test_find_mut() {
let mut m = SmallIntMap::new();

View file

@ -37,7 +37,7 @@ pub fn merge_sort<T:Copy>(v: &[T], le: Le<T>) -> ~[T] {
let v_len = end - begin;
if v_len == 0 { return ~[]; }
if v_len == 1 { return ~[v[begin]]; }
if v_len == 1 { return ~[copy v[begin]]; }
let mid = v_len / 2 + begin;
let a = (begin, mid);
@ -53,9 +53,9 @@ pub fn merge_sort<T:Copy>(v: &[T], le: Le<T>) -> ~[T] {
let mut b_ix = 0;
while a_ix < a_len && b_ix < b_len {
if le(&a[a_ix], &b[b_ix]) {
rs.push(a[a_ix]);
rs.push(copy a[a_ix]);
a_ix += 1;
} else { rs.push(b[b_ix]); b_ix += 1; }
} else { rs.push(copy b[b_ix]); b_ix += 1; }
}
rs.push_all(vec::slice(a, a_ix, a_len));
rs.push_all(vec::slice(b, b_ix, b_len));
@ -106,7 +106,7 @@ pub fn quick_sort<T>(arr: &mut [T], compare_func: Le<T>) {
fn qsort3<T:Copy + Ord + Eq>(arr: &mut [T], left: int, right: int) {
if right <= left { return; }
let v: T = arr[right];
let v: T = copy arr[right];
let mut i: int = left - 1;
let mut j: int = right;
let mut p: int = i;
@ -233,7 +233,7 @@ fn binarysort<T:Copy + Ord>(array: &mut [T], start: uint) {
if start == 0 { start += 1; }
while start < size {
let pivot = array[start];
let pivot = copy array[start];
let mut left = 0;
let mut right = start;
assert!(left <= right);
@ -470,7 +470,7 @@ impl<T:Copy + Ord> MergeState<T> {
let mut tmp = ~[];
for uint::range(base1, base1+len1) |i| {
tmp.push(array[i]);
tmp.push(copy array[i]);
}
let mut c1 = 0;
@ -580,7 +580,7 @@ impl<T:Copy + Ord> MergeState<T> {
let mut tmp = ~[];
for uint::range(base2, base2+len2) |i| {
tmp.push(array[i]);
tmp.push(copy array[i]);
}
let mut c1 = base1 + len1 - 1;
@ -725,18 +725,18 @@ impl<T:Copy + Ord> MergeState<T> {
}
}
#[inline(always)]
#[inline]
fn copy_vec<T:Copy>(dest: &mut [T],
s1: uint,
from: &[T]) {
assert!(s1+from.len() <= dest.len());
for from.eachi |i, v| {
dest[s1+i] = *v;
dest[s1+i] = copy *v;
}
}
#[inline(always)]
#[inline]
fn shift_vec<T:Copy>(dest: &mut [T],
s1: uint,
s2: uint,
@ -751,7 +751,6 @@ fn shift_vec<T:Copy>(dest: &mut [T],
mod test_qsort3 {
use sort::*;
use core::vec;
fn check_sort(v1: &mut [int], v2: &mut [int]) {
let len = v1.len();
@ -861,8 +860,6 @@ mod tests {
use sort::*;
use core::vec;
fn check_sort(v1: &[int], v2: &[int]) {
let len = v1.len();
pub fn le(a: &int, b: &int) -> bool { *a <= *b }
@ -1048,7 +1045,7 @@ mod big_tests {
fn multiplyVec<T:Copy>(arr: &[T], num: uint) -> ~[T] {
let size = arr.len();
let res = do vec::from_fn(num) |i| {
arr[i % size]
copy arr[i % size]
};
res
}

View file

@ -12,7 +12,6 @@
use core::prelude::*;
use core::vec;
use core::f64;
use core::cmp;
use core::num;

View file

@ -10,34 +10,32 @@
/*!
The Rust standard library.
Rust extras.
The Rust standand library provides a number of useful features that are
not required in or otherwise suitable for the core library.
The `extra` crate is a set of useful modules for a variety of
purposes, including collections, numerics, I/O, serialization,
and concurrency.
Rust extras are part of the standard Rust distribution.
*/
#[link(name = "extra",
vers = "0.7-pre",
uuid = "122bed0b-c19b-4b82-b0b7-7ae8aead7297",
url = "https://github.com/mozilla/rust/tree/master/src/libstd")];
url = "https://github.com/mozilla/rust/tree/master/src/libextra")];
#[comment = "The Rust standard library"];
#[comment = "Rust extras"];
#[license = "MIT/ASL2"];
#[crate_type = "lib"];
#[deny(non_camel_case_types)];
#[deny(missing_doc)];
// NOTE: remove these two attributes after the next snapshot
#[no_core]; // for stage0
#[allow(unrecognized_lint)]; // otherwise stage0 is seriously ugly
#[no_std];
extern mod core(name = "std", vers = "0.7-pre");
use core::{str, unstable};
use core::str::{StrSlice, OwnedStr};
pub use core::os;
@ -127,7 +125,7 @@ pub mod test;
pub mod serialize;
// A curious inner-module that's not exported that contains the binding
// 'extra' so that macro-expanded references to std::serialize and such
// 'extra' so that macro-expanded references to extra::serialize and such
// can be resolved within libextra.
#[doc(hidden)]
pub mod std {

View file

@ -363,7 +363,7 @@ impl<'self> Condvar<'self> {
// Checks whether a condvar ID was out of bounds, and fails if so, or does
// something else next on success.
#[inline(always)]
#[inline]
#[doc(hidden)]
fn check_cvar_bounds<U>(out_of_bounds: Option<uint>, id: uint, act: &str,
blk: &fn() -> U) -> U {

View file

@ -36,7 +36,6 @@ mod tests {
use tempfile::mkdtemp;
use core::os;
use core::str;
#[test]
fn test_mkdtemp() {

View file

@ -224,9 +224,9 @@ pub fn expand(cap: &[u8], params: &[Param], vars: &mut Variables)
}
} else { return Err(~"stack is empty") },
'i' => match (copy mparams[0], copy mparams[1]) {
(Number(ref mut x), Number(ref mut y)) => {
*x += 1;
*y += 1;
(Number(x), Number(y)) => {
mparams[0] = Number(x+1);
mparams[1] = Number(y+1);
},
(_, _) => return Err(~"first two params not numbers with %i")
},
@ -352,6 +352,7 @@ pub fn expand(cap: &[u8], params: &[Param], vars: &mut Variables)
#[cfg(test)]
mod test {
use super::*;
use core::result::Ok;
#[test]
fn test_basic_setabf() {
@ -366,6 +367,16 @@ mod test {
bytes!("21").to_owned());
}
#[test]
fn test_op_i() {
let mut vars = Variables::new();
assert_eq!(expand(bytes!("%p1%d%p2%d%p3%d%i%p1%d%p2%d%p3%d"),
[Number(1),Number(2),Number(3)], &mut vars),
Ok(bytes!("123233").to_owned()));
assert_eq!(expand(bytes!("%p1%d%p2%d%i%p1%d%p2%d"), [], &mut vars),
Ok(bytes!("0011").to_owned()));
}
#[test]
fn test_param_stack_failure_conditions() {
let mut varstruct = Variables::new();

View file

@ -313,7 +313,6 @@ pub fn parse(file: @Reader, longnames: bool) -> Result<~TermInfo, ~str> {
#[cfg(test)]
mod test {
use super::*;
use p = core::path::Path;
#[test]
fn test_veclens() {

View file

@ -26,20 +26,11 @@ use core::either;
use core::io;
use core::option;
use core::result;
use core::str;
use core::task;
use core::to_str::ToStr;
use core::uint;
use core::vec;
pub mod rustrt {
use core::libc::size_t;
#[abi = "cdecl"]
pub extern {
pub unsafe fn rust_sched_threads() -> size_t;
}
}
// The name of a test. By convention this follows the rules for rust
// paths; i.e. it should be a series of identifiers separated by double
@ -365,7 +356,7 @@ pub fn run_tests_console(opts: &TestOpts,
fn print_failures(st: &ConsoleTestState) {
st.out.write_line("\nfailures:");
let mut failures = ~[];
for uint::range(0, vec::uniq_len(&const st.failures)) |i| {
for uint::range(0, st.failures.len()) |i| {
let name = copy st.failures[i].name;
failures.push(name.to_str());
}
@ -489,11 +480,10 @@ static sched_overcommit : uint = 1;
static sched_overcommit : uint = 4u;
fn get_concurrency() -> uint {
unsafe {
let threads = rustrt::rust_sched_threads() as uint;
if threads == 1 { 1 }
else { threads * sched_overcommit }
}
use core::rt;
let threads = rt::util::default_sched_threads();
if threads == 1 { 1 }
else { threads * sched_overcommit }
}
#[allow(non_implicitly_copyable_typarams)]
@ -542,7 +532,7 @@ pub fn filter_tests(
// Sort the tests alphabetically
fn lteq(t1: &TestDescAndFn, t2: &TestDescAndFn) -> bool {
str::le(t1.desc.name.to_str(), t2.desc.name.to_str())
t1.desc.name.to_str() < t2.desc.name.to_str()
}
sort::quick_sort(filtered, lteq);

View file

@ -275,7 +275,7 @@ priv fn do_strptime(s: &str, format: &str) -> Result<Tm, ~str> {
let mut i = 0u;
let len = strs.len();
while i < len {
match strs[i] { // can't use let due to stage0 bugs
match strs[i] { // can't use let due to let-pattern bugs
(ref needle, value) => {
if match_str(ss, pos, *needle) {
return Some((value, pos + needle.len()));

View file

@ -75,13 +75,13 @@ fn lt<K: Ord + TotalOrd, V>(a: &TreeMap<K, V>,
}
impl<K: Ord + TotalOrd, V> Ord for TreeMap<K, V> {
#[inline(always)]
#[inline]
fn lt(&self, other: &TreeMap<K, V>) -> bool { lt(self, other) }
#[inline(always)]
#[inline]
fn le(&self, other: &TreeMap<K, V>) -> bool { !lt(other, self) }
#[inline(always)]
#[inline]
fn ge(&self, other: &TreeMap<K, V>) -> bool { !lt(self, other) }
#[inline(always)]
#[inline]
fn gt(&self, other: &TreeMap<K, V>) -> bool { lt(other, self) }
}
@ -145,7 +145,7 @@ impl<K: TotalOrd, V> Map<K, V> for TreeMap<K, V> {
}
/// Return a mutable reference to the value corresponding to the key
#[inline(always)]
#[inline]
fn find_mut<'a>(&'a mut self, key: &K) -> Option<&'a mut V> {
find_mut(&mut self.root, key)
}
@ -236,7 +236,7 @@ impl<'self, K, V> Iterator<(&'self K, &'self V)> for TreeMapIterator<'self, K, V
impl<'self, T> Iterator<&'self T> for TreeSetIterator<'self, T> {
/// Advance the iterator to the next node (in order). If there are no more nodes, return `None`.
#[inline(always)]
#[inline]
fn next(&mut self) -> Option<&'self T> {
do self.iter.next().map |&(value, _)| { value }
}
@ -251,69 +251,69 @@ pub struct TreeSet<T> {
impl<T: TotalOrd> BaseIter<T> for TreeSet<T> {
/// Visit all values in order
#[inline(always)]
#[inline]
fn each(&self, f: &fn(&T) -> bool) -> bool { self.map.each_key(f) }
#[inline(always)]
#[inline]
fn size_hint(&self) -> Option<uint> { Some(self.len()) }
}
impl<T: TotalOrd> ReverseIter<T> for TreeSet<T> {
/// Visit all values in reverse order
#[inline(always)]
#[inline]
fn each_reverse(&self, f: &fn(&T) -> bool) -> bool {
self.map.each_key_reverse(f)
}
}
impl<T: Eq + TotalOrd> Eq for TreeSet<T> {
#[inline(always)]
#[inline]
fn eq(&self, other: &TreeSet<T>) -> bool { self.map == other.map }
#[inline(always)]
#[inline]
fn ne(&self, other: &TreeSet<T>) -> bool { self.map != other.map }
}
impl<T: Ord + TotalOrd> Ord for TreeSet<T> {
#[inline(always)]
#[inline]
fn lt(&self, other: &TreeSet<T>) -> bool { self.map < other.map }
#[inline(always)]
#[inline]
fn le(&self, other: &TreeSet<T>) -> bool { self.map <= other.map }
#[inline(always)]
#[inline]
fn ge(&self, other: &TreeSet<T>) -> bool { self.map >= other.map }
#[inline(always)]
#[inline]
fn gt(&self, other: &TreeSet<T>) -> bool { self.map > other.map }
}
impl<T: TotalOrd> Container for TreeSet<T> {
/// Return the number of elements in the set
#[inline(always)]
#[inline]
fn len(&const self) -> uint { self.map.len() }
/// Return true if the set contains no elements
#[inline(always)]
#[inline]
fn is_empty(&const self) -> bool { self.map.is_empty() }
}
impl<T: TotalOrd> Mutable for TreeSet<T> {
/// Clear the set, removing all values.
#[inline(always)]
#[inline]
fn clear(&mut self) { self.map.clear() }
}
impl<T: TotalOrd> Set<T> for TreeSet<T> {
/// Return true if the set contains a value
#[inline(always)]
#[inline]
fn contains(&self, value: &T) -> bool {
self.map.contains_key(value)
}
/// Add a value to the set. Return true if the value was not already
/// present in the set.
#[inline(always)]
#[inline]
fn insert(&mut self, value: T) -> bool { self.map.insert(value, ()) }
/// Remove a value from the set. Return true if the value was
/// present in the set.
#[inline(always)]
#[inline]
fn remove(&mut self, value: &T) -> bool { self.map.remove(value) }
/// Return true if the set has no elements in common with `other`.
@ -336,7 +336,7 @@ impl<T: TotalOrd> Set<T> for TreeSet<T> {
}
/// Return true if the set is a subset of another
#[inline(always)]
#[inline]
fn is_subset(&self, other: &TreeSet<T>) -> bool {
other.is_superset(self)
}
@ -490,12 +490,12 @@ impl<T: TotalOrd> Set<T> for TreeSet<T> {
impl<T: TotalOrd> TreeSet<T> {
/// Create an empty TreeSet
#[inline(always)]
#[inline]
pub fn new() -> TreeSet<T> { TreeSet{map: TreeMap::new()} }
/// Get a lazy iterator over the values in the set.
/// Requires that it be frozen (immutable).
#[inline(always)]
#[inline]
pub fn iter<'a>(&'a self) -> TreeSetIterator<'a, T> {
TreeSetIterator{iter: self.map.iter()}
}
@ -518,7 +518,7 @@ struct TreeNode<K, V> {
impl<K: TotalOrd, V> TreeNode<K, V> {
/// Creates a new tree node.
#[inline(always)]
#[inline]
pub fn new(key: K, value: V) -> TreeNode<K, V> {
TreeNode{key: key, value: value, left: None, right: None, level: 1}
}
@ -710,7 +710,6 @@ mod test_treemap {
use core::rand::RngUtil;
use core::rand;
use core::str;
use core::vec;
#[test]

View file

@ -104,7 +104,7 @@ struct WorkKey {
}
impl to_bytes::IterBytes for WorkKey {
#[inline(always)]
#[inline]
fn iter_bytes(&self, lsb0: bool, f: to_bytes::Cb) -> bool {
self.kind.iter_bytes(lsb0, f) && self.name.iter_bytes(lsb0, f)
}

View file

@ -1,42 +0,0 @@
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::prelude::*;
use vec;
fn vec_equal<T>(v: ~[T],
u: ~[T],
element_equality_test: @fn(&&T, &&T) -> bool) ->
bool {
let Lv = v.len();
if Lv != u.len() { return false; }
let i = 0u;
while i < Lv {
if !element_equality_test(v[i], u[i]) { return false; }
i += 1u;
}
return true;
}
fn builtin_equal<T>(&&a: T, &&b: T) -> bool { return a == b; }
fn builtin_equal_int(&&a: int, &&b: int) -> bool { return a == b; }
fn main() {
assert!((builtin_equal(5, 5)));
assert!((!builtin_equal(5, 4)));
assert!((!vec_equal(~[5, 5], ~[5], bind builtin_equal(_, _))));
assert!((!vec_equal(~[5, 5], ~[5], builtin_equal_int)));
assert!((!vec_equal(~[5, 5], ~[5, 4], builtin_equal_int)));
assert!((!vec_equal(~[5, 5], ~[4, 5], builtin_equal_int)));
assert!((vec_equal(~[5, 5], ~[5, 5], builtin_equal_int)));
error!("Pass");
}

View file

@ -1,108 +0,0 @@
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use core::prelude::*;
use extra;
use extra::rand;
use uint::range;
// random uint less than n
fn under(r : rand::rng, n : uint) -> uint {
assert!(n != 0u); r.next() as uint % n
}
// random choice from a vec
fn choice<T:copy>(r : rand::rng, v : ~[const T]) -> T {
assert!(v.len() != 0u); v[under(r, v.len())]
}
// k in n chance of being true
fn likelihood(r : rand::rng, k : uint, n : uint) -> bool { under(r, n) < k }
static iters : uint = 1000u;
static vlen : uint = 100u;
enum maybe_pointy {
none,
p(@pointy)
}
type pointy = {
mut a : maybe_pointy,
mut b : ~maybe_pointy,
mut c : @maybe_pointy,
mut f : @fn()->(),
mut g : ~fn()->(),
mut m : ~[maybe_pointy],
mut n : ~[maybe_pointy],
mut o : {x : int, y : maybe_pointy}
};
// To add: objects; traits; anything type-parameterized?
fn empty_pointy() -> @pointy {
return @{
mut a : none,
mut b : ~none,
mut c : @none,
mut f : || {},
mut g : || {},
mut m : ~[],
mut n : ~[],
mut o : {x : 0, y : none}
}
}
fn nopP(_x : @pointy) { }
fn nop<T>(_x: T) { }
fn test_cycles(r : rand::rng, k: uint, n: uint)
{
let mut v : ~[@pointy] = ~[];
// Create a graph with no edges
range(0u, vlen) {|_i|
v.push(empty_pointy());
}
// Fill in the graph with random edges, with density k/n
range(0u, vlen) {|i|
if (likelihood(r, k, n)) { v[i].a = p(choice(r, v)); }
if (likelihood(r, k, n)) { v[i].b = ~p(choice(r, v)); }
if (likelihood(r, k, n)) { v[i].c = @p(choice(r, v)); }
if (likelihood(r, k, n)) { v[i].f = bind nopP(choice(r, v)); }
//if (false) { v[i].g = bind (|_: @pointy| { })(
// choice(r, v)); }
// https://github.com/mozilla/rust/issues/1899
if (likelihood(r, k, n)) { v[i].m = [p(choice(r, v))]; }
if (likelihood(r, k, n)) { v[i].n.push(mut p(choice(r, v))); }
if (likelihood(r, k, n)) { v[i].o = {x: 0, y: p(choice(r, v))}; }
}
// Drop refs one at a time
range(0u, vlen) {|i|
v[i] = empty_pointy()
}
}
fn main()
{
let r = rand::rng();
range(0u, iters) {|i|
test_cycles(r, i, iters);
}
}

View file

@ -1,713 +0,0 @@
// Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#[link(name = "fuzzer",
vers = "0.7-pre",
uuid = "d6418797-2736-4833-bd82-d3c684b7c1b0",
url = "https://github.com/mozilla/rust/tree/master/src/libfuzzer")];
#[comment = "The Rust fuzzer library"];
#[license = "MIT/ASL2"];
#[crate_type = "lib"];
#[allow(non_camel_case_types)];
#[no_std];
extern mod std(name = "std", vers = "0.7-pre");
extern mod extra(name = "extra", vers = "0.7-pre");
extern mod syntax(vers = "0.7-pre");
use std::prelude::*;
use std::int;
use std::io;
use std::option;
use std::os;
use std::result;
use std::run;
use std::str;
use std::uint;
use syntax::diagnostic;
use syntax::parse::token::ident_interner;
use syntax::parse::token;
use syntax::parse;
use syntax::print::pprust;
use syntax::{ast, fold, visit, codemap};
#[deriving(Eq)]
pub enum test_mode { tm_converge, tm_run, }
pub struct Context { mode: test_mode } // + rng
pub fn write_file(filename: &Path, content: &str) {
result::get(&io::file_writer(filename, [io::Create, io::Truncate]))
.write_str(content);
}
pub fn contains(haystack: &str, needle: &str) -> bool {
haystack.contains(needle)
}
pub fn find_rust_files(files: &mut ~[Path], path: &Path) {
if path.filetype() == Some(~".rs") && !contains(path.to_str(), "utf8") {
// ignoring "utf8" tests because something is broken
files.push(path.clone());
} else if os::path_is_dir(path)
&& !contains(path.to_str(), "compile-fail")
&& !contains(path.to_str(), "build") {
for os::list_dir_path(path).each |p| {
find_rust_files(&mut *files, *p);
}
}
}
pub fn common_exprs() -> ~[@ast::expr] {
fn dse(e: ast::expr_) -> @ast::expr {
@ast::expr {
id: 0,
node: e,
span: codemap::dummy_sp(),
}
}
fn dsl(l: ast::lit_) -> ast::lit {
codemap::spanned { node: l, span: codemap::dummy_sp() }
}
~[dse(ast::expr_break(option::None)),
dse(ast::expr_again(option::None)),
dse(ast::expr_ret(option::None)),
dse(ast::expr_lit(@dsl(ast::lit_nil))),
dse(ast::expr_lit(@dsl(ast::lit_bool(false)))),
dse(ast::expr_lit(@dsl(ast::lit_bool(true)))),
dse(ast::expr_unary(-1, ast::box(ast::m_imm),
dse(ast::expr_lit(@dsl(ast::lit_bool(true)))))),
dse(ast::expr_unary(-1, ast::uniq(ast::m_imm),
dse(ast::expr_lit(@dsl(ast::lit_bool(true))))))
]
}
pub fn safe_to_steal_expr(e: @ast::expr, tm: test_mode) -> bool {
safe_to_use_expr(e, tm)
}
pub fn safe_to_use_expr(e: @ast::expr, tm: test_mode) -> bool {
match tm {
tm_converge => {
match e.node {
// If the fuzzer moves a block-ending-in-semicolon into callee
// position, the pretty-printer can't preserve this even by
// parenthesizing!! See email to marijn.
ast::expr_if(*) | ast::expr_block(*)
| ast::expr_match(*) | ast::expr_while(*) => { false }
// https://github.com/mozilla/rust/issues/929
ast::expr_cast(*) | ast::expr_binary(*) | ast::expr_assign(*) |
ast::expr_assign_op(*) => { false }
ast::expr_ret(option::None) => { false }
// https://github.com/mozilla/rust/issues/953
//ast::expr_fail(option::Some(_)) => { false }
// https://github.com/mozilla/rust/issues/928
//ast::expr_cast(_, _) { false }
// https://github.com/mozilla/rust/issues/1458
ast::expr_call(*) => { false }
_ => { true }
}
}
tm_run => { true }
}
}
pub fn safe_to_steal_ty(t: @ast::Ty, tm: test_mode) -> bool {
// Restrictions happen to be the same.
safe_to_replace_ty(&t.node, tm)
}
// Not type-parameterized: https://github.com/mozilla/rust/issues/898 (FIXED)
pub fn stash_expr_if(c: @fn(@ast::expr, test_mode)->bool,
es: @mut ~[@ast::expr],
e: @ast::expr,
tm: test_mode) {
if c(e, tm) {
*es = *es + [e];
} else {
/* now my indices are wrong :( */
}
}
pub fn stash_ty_if(c: @fn(@ast::Ty, test_mode) -> bool,
es: @mut ~[@ast::Ty],
e: @ast::Ty,
tm: test_mode) {
if c(e, tm) {
es.push(e);
} else {
/* now my indices are wrong :( */
}
}
pub struct StolenStuff {
exprs: ~[@ast::expr],
tys: ~[@ast::Ty]
}
pub fn steal(crate: @ast::crate, tm: test_mode) -> StolenStuff {
let exprs = @mut ~[];
let tys = @mut ~[];
let v = visit::mk_simple_visitor(@visit::SimpleVisitor {
visit_expr: |a| stash_expr_if(safe_to_steal_expr, exprs, a, tm),
visit_ty: |a| stash_ty_if(safe_to_steal_ty, tys, a, tm),
.. *visit::default_simple_visitor()
});
visit::visit_crate(crate, ((), v));
StolenStuff {
exprs: (*exprs).clone(),
tys: (*tys).clone(),
}
}
pub fn safe_to_replace_expr(e: &ast::expr_, _tm: test_mode) -> bool {
match *e {
// https://github.com/mozilla/rust/issues/652
ast::expr_if(*) => false,
ast::expr_block(_) => false,
// expr_call is also missing a constraint
ast::expr_fn_block(*) => false,
_ => true,
}
}
pub fn safe_to_replace_ty(t: &ast::ty_, _tm: test_mode) -> bool {
match *t {
ast::ty_infer => { false } // always implicit, always top level
ast::ty_bot => { false } // in source, can only appear
// as the out type of a function
ast::ty_mac(_) => { false }
_ => { true }
}
}
// Replace the |i|th expr (in fold order) of |crate| with |newexpr|.
pub fn replace_expr_in_crate(crate: @ast::crate,
i: uint,
newexpr: @ast::expr,
tm: test_mode)
-> @ast::crate {
let j: @mut uint = @mut 0u;
fn fold_expr_rep(j_: @mut uint,
i_: uint,
newexpr_: &ast::expr_,
original: &ast::expr_,
fld: @fold::ast_fold,
tm_: test_mode)
-> ast::expr_ {
*j_ += 1;
if i_ + 1 == *j_ && safe_to_replace_expr(original, tm_) {
copy *newexpr_
} else {
fold::noop_fold_expr(original, fld)
}
}
let afp = @fold::AstFoldFns {
fold_expr: fold::wrap(|a,b| {
fold_expr_rep(j, i, &newexpr.node, a, b, tm)
}),
.. *fold::default_ast_fold()
};
let af = fold::make_fold(afp);
let crate2: @ast::crate = @af.fold_crate(crate);
crate2
}
// Replace the |i|th ty (in fold order) of |crate| with |newty|.
pub fn replace_ty_in_crate(crate: @ast::crate,
i: uint,
newty: @ast::Ty,
tm: test_mode)
-> @ast::crate {
let j: @mut uint = @mut 0u;
fn fold_ty_rep(j_: @mut uint,
i_: uint,
newty_: &ast::ty_,
original: &ast::ty_,
fld: @fold::ast_fold,
tm_: test_mode)
-> ast::ty_ {
*j_ += 1;
if i_ + 1 == *j_ && safe_to_replace_ty(original, tm_) {
copy *newty_
} else {
fold::noop_fold_ty(original, fld)
}
}
let afp = @fold::AstFoldFns {
fold_ty: fold::wrap(|a,b| fold_ty_rep(j, i, &newty.node, a, b, tm)),
.. *fold::default_ast_fold()
};
let af = fold::make_fold(afp);
let crate2: @ast::crate = @af.fold_crate(crate);
crate2
}
pub fn under(n: uint, it: &fn(uint)) {
let mut i: uint = 0u;
while i < n { it(i); i += 1u; }
}
pub fn as_str(f: @fn(x: @io::Writer)) -> ~str {
io::with_str_writer(f)
}
pub fn check_variants_of_ast(crate: @ast::crate,
codemap: @codemap::CodeMap,
filename: &Path,
cx: Context) {
let stolen = steal(crate, cx.mode);
let extra_exprs = do common_exprs().filtered |&a| {
safe_to_use_expr(a, cx.mode)
};
check_variants_T(crate,
codemap,
filename,
~"expr",
extra_exprs + stolen.exprs,
pprust::expr_to_str,
replace_expr_in_crate,
cx);
check_variants_T(crate,
codemap,
filename,
~"ty",
stolen.tys,
pprust::ty_to_str,
replace_ty_in_crate,
cx);
}
pub fn check_variants_T<T:Copy>(crate: @ast::crate,
codemap: @codemap::CodeMap,
filename: &Path,
thing_label: ~str,
things: &[T],
stringifier: @fn(T, @ident_interner) -> ~str,
replacer: @fn(@ast::crate,
uint,
T,
test_mode)
-> @ast::crate,
cx: Context) {
error!("%s contains %u %s objects", filename.to_str(),
things.len(), thing_label);
// Assuming we're not generating any token_trees
let intr = syntax::parse::token::mk_fake_ident_interner();
let L = things.len();
if L < 100 {
do under(uint::min(L, 20)) |i| {
error!("Replacing... #%?", uint::to_str(i));
let fname = str::to_owned(filename.to_str());
do under(uint::min(L, 30)) |j| {
let fname = fname.to_str();
error!("With... %?", stringifier(things[j], intr));
let crate2 = replacer(crate, i, things[j], cx.mode);
// It would be best to test the *crate* for stability, but
// testing the string for stability is easier and ok for now.
let handler = diagnostic::mk_handler(None);
let str3 = do io::with_str_reader("") |rdr| {
let fname = fname.to_str();
let string = do as_str |a| {
let span_handler =
diagnostic::mk_span_handler(handler, codemap);
pprust::print_crate(codemap,
intr,
span_handler,
crate2,
fname.to_managed(),
rdr,
a,
pprust::no_ann(),
false)
};
string.to_managed()
};
match cx.mode {
tm_converge => check_roundtrip_convergence(str3, 1),
tm_run => {
let file_label = fmt!("rusttmp/%s_%s_%u_%u",
last_part(filename.to_str()),
thing_label,
i,
j);
let safe_to_run = !(content_is_dangerous_to_run(str3)
|| has_raw_pointers(crate2));
check_whole_compiler(str3,
&Path(file_label),
safe_to_run);
}
}
}
}
}
}
pub fn last_part(filename: ~str) -> ~str {
let ix = filename.rfind('/').get();
filename.slice(ix + 1u, filename.len() - 3u).to_owned()
}
pub enum happiness {
passed,
cleanly_rejected(~str),
known_bug(~str),
failed(~str),
}
// We'd find more bugs if we could take an AST here, but
// - that would find many "false positives" or unimportant bugs
// - that would be tricky, requiring use of tasks or serialization
// or randomness.
// This seems to find plenty of bugs as it is :)
pub fn check_whole_compiler(code: &str,
suggested_filename_prefix: &Path,
allow_running: bool) {
let filename = &suggested_filename_prefix.with_filetype("rs");
write_file(filename, code);
let compile_result = check_compiling(filename);
let run_result = match (compile_result, allow_running) {
(passed, true) => { check_running(suggested_filename_prefix) }
(h, _) => { h }
};
match run_result {
passed | cleanly_rejected(_) | known_bug(_) => {
removeIfExists(suggested_filename_prefix);
removeIfExists(&suggested_filename_prefix.with_filetype("rs"));
removeDirIfExists(&suggested_filename_prefix.with_filetype("dSYM"));
}
failed(s) => {
error!("check_whole_compiler failure: %?", s);
error!("Saved as: %?", filename.to_str());
}
}
}
pub fn removeIfExists(filename: &Path) {
// So sketchy!
assert!(!contains(filename.to_str(), " "));
run::process_status("bash", [~"-c", ~"rm " + filename.to_str()]);
}
pub fn removeDirIfExists(filename: &Path) {
// So sketchy!
assert!(!contains(filename.to_str(), " "));
run::process_status("bash", [~"-c", ~"rm -r " + filename.to_str()]);
}
pub fn check_running(exe_filename: &Path) -> happiness {
let p = run::process_output(
"/Users/jruderman/scripts/timed_run_rust_program.py",
[exe_filename.to_str()]);
let comb = str::from_bytes(p.output) + "\n" + str::from_bytes(p.error);
if comb.len() > 1u {
error!("comb comb comb: %?", comb);
}
if contains(comb, "Assertion failed:") {
failed(~"C++ assertion failure")
} else if contains(comb, "leaked memory in rust main loop") {
// might also use exit code 134
//failed("Leaked")
known_bug(~"https://github.com/mozilla/rust/issues/910")
} else if contains(comb, "src/rt/") {
failed(~"Mentioned src/rt/")
} else if contains(comb, "malloc") {
failed(~"Mentioned malloc")
} else {
match p.status {
0 => { passed }
100 => { cleanly_rejected(~"running: explicit fail") }
101 | 247 => { cleanly_rejected(~"running: timed out") }
245 | 246 | 138 | 252 => {
known_bug(~"https://github.com/mozilla/rust/issues/1466")
}
136 | 248 => {
known_bug(
~"SIGFPE - https://github.com/mozilla/rust/issues/944")
}
rc => {
failed(~"Rust program ran but exited with status " +
int::to_str(rc))
}
}
}
}
pub fn check_compiling(filename: &Path) -> happiness {
let p = run::process_output(
"/Users/jruderman/code/rust/build/x86_64-apple-darwin/stage1/bin/rustc",
[filename.to_str()]);
let out = str::from_bytes(p.output);
let err = str::from_bytes(p.error);
//error!("Status: %d", p.status);
if p.status == 0 {
passed
} else if !err.is_empty() {
if err.contains("error:") {
cleanly_rejected(~"rejected with span_error")
} else {
error!("Stderr: %?", err);
failed(~"Unfamiliar error message")
}
} else if out.contains("Assertion") && out.contains("failed") {
error!("Stdout: %?", out);
failed(~"Looks like an llvm assertion failure")
} else if out.contains("internal compiler error unimplemented") {
known_bug(~"Something unimplemented")
} else if out.contains("internal compiler error") {
error!("Stdout: %?", out);
failed(~"internal compiler error")
} else {
error!("%?", p.status);
error!("!Stdout: %?", out);
failed(~"What happened?")
}
}
pub fn parse_and_print(code: @str) -> @str {
let filename = Path("tmp.rs");
let sess = parse::new_parse_sess(option::None);
write_file(&filename, code);
let crate = parse::parse_crate_from_source_str(filename.to_str().to_managed(),
code,
~[],
sess);
do io::with_str_reader(code) |rdr| {
let filename = filename.to_str();
do as_str |a| {
pprust::print_crate(sess.cm,
// Assuming there are no token_trees
token::mk_fake_ident_interner(),
copy sess.span_diagnostic,
crate,
filename.to_managed(),
rdr,
a,
pprust::no_ann(),
false)
}.to_managed()
}
}
pub fn has_raw_pointers(c: @ast::crate) -> bool {
let has_rp = @mut false;
fn visit_ty(flag: @mut bool, t: @ast::Ty) {
match t.node {
ast::ty_ptr(_) => { *flag = true; }
_ => { }
}
}
let v =
visit::mk_simple_visitor(@visit::SimpleVisitor {
visit_ty: |a| visit_ty(has_rp, a),
.. *visit::default_simple_visitor()});
visit::visit_crate(c, ((), v));
return *has_rp;
}
pub fn content_is_dangerous_to_run(code: &str) -> bool {
let dangerous_patterns =
~[~"xfail-test",
~"import", // espeically fs, run
~"extern",
~"unsafe",
~"log"]; // python --> rust pipe deadlock?
for dangerous_patterns.each |p| { if contains(code, *p) { return true; } }
return false;
}
pub fn content_is_dangerous_to_compile(code: &str) -> bool {
let dangerous_patterns =
~[~"xfail-test"];
for dangerous_patterns.each |p| { if contains(code, *p) { return true; } }
return false;
}
pub fn content_might_not_converge(code: &str) -> bool {
let confusing_patterns =
~[~"xfail-test",
~"xfail-pretty",
~"self", // crazy rules enforced by parser not typechecker?
~"spawn", // precedence issues?
~"bind", // precedence issues?
~" be ", // don't want to replace its child with a non-call:
// "Non-call expression in tail call"
~"\n\n\n\n\n" // https://github.com/mozilla/rust/issues/850
];
for confusing_patterns.each |p| { if contains(code, *p) { return true; } }
return false;
}
pub fn file_might_not_converge(filename: &Path) -> bool {
let confusing_files = ~[
~"expr-alt.rs", // pretty-printing "(a = b) = c"
// vs "a = b = c" and wrapping
~"block-arg-in-ternary.rs", // wrapping
~"move-3-unique.rs", // 0 becomes (0), but both seem reasonable. wtf?
~"move-3.rs" // 0 becomes (0), but both seem reasonable. wtf?
];
for confusing_files.each |f| {
if contains(filename.to_str(), *f) {
return true;
}
}
return false;
}
pub fn check_roundtrip_convergence(code: @str, maxIters: uint) {
let mut i = 0u;
let mut newv = code;
let mut oldv = code;
while i < maxIters {
oldv = newv;
if content_might_not_converge(oldv) { return; }
newv = parse_and_print(oldv);
if oldv == newv { break; }
i += 1u;
}
if oldv == newv {
error!("Converged after %u iterations", i);
} else {
error!("Did not converge after %u iterations!", i);
write_file(&Path("round-trip-a.rs"), oldv);
write_file(&Path("round-trip-b.rs"), newv);
run::process_status("diff", [~"-w", ~"-u", ~"round-trip-a.rs", ~"round-trip-b.rs"]);
fail!("Mismatch");
}
}
pub fn check_convergence(files: &[Path]) {
error!("pp convergence tests: %u files", files.len());
for files.each |file| {
if !file_might_not_converge(file) {
let s = result::get(&io::read_whole_file_str(file)).to_managed();
if !content_might_not_converge(s) {
error!("pp converge: %s", file.to_str());
// Change from 7u to 2u once
// https://github.com/mozilla/rust/issues/850 is fixed
check_roundtrip_convergence(s, 7u);
}
}
}
}
pub fn check_variants(files: &[Path], cx: Context) {
for files.each |file| {
if cx.mode == tm_converge &&
file_might_not_converge(file) {
error!("Skipping convergence test based on\
file_might_not_converge");
loop;
}
let s = result::get(&io::read_whole_file_str(file)).to_managed();
if s.contains_char('#') {
loop; // Macros are confusing
}
if cx.mode == tm_converge && content_might_not_converge(s) {
loop;
}
if cx.mode == tm_run && content_is_dangerous_to_compile(s) {
loop;
}
let file_str = file.to_str();
error!("check_variants: %?", file_str);
let sess = parse::new_parse_sess(None);
let crate = parse::parse_crate_from_source_str(file_str.to_managed(),
s,
~[],
sess);
io::with_str_reader(s, |rdr| {
let file_str = file_str.to_str();
error!("%s",
as_str(|a| {
pprust::print_crate(
sess.cm,
// Assuming no token_trees
token::mk_fake_ident_interner(),
copy sess.span_diagnostic,
crate,
file_str.to_managed(),
rdr,
a,
pprust::no_ann(),
false)
}))
});
check_variants_of_ast(crate, sess.cm, file, cx);
}
}
pub fn main() {
let args = os::args();
if args.len() != 2u {
error!("usage: %s <testdir>", args[0]);
return;
}
let mut files = ~[];
let root = Path(args[1]);
find_rust_files(&mut files, &root);
error!("== check_convergence ==");
check_convergence(files);
error!("== check_variants: converge ==");
check_variants(files, Context { mode: tm_converge });
error!("== check_variants: run ==");
check_variants(files, Context { mode: tm_run });
error!("Fuzzer done");
}
// For bootstrapping purposes...
pub mod core {
pub use std::cmp;
pub use std::sys;
}

View file

@ -1,121 +0,0 @@
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
/*
Idea: provide functions for 'exhaustive' and 'random' modification of vecs.
two functions, "return all edits" and "return a random edit" = move-
leaning toward this model or two functions, "return the number of
possible edits" and "return edit #n"
It would be nice if this could be data-driven, so the two functions
could share information:
type vec_modifier = rec(fn (<T> v, uint i) -> ~[T] fun, uint lo, uint di);
const ~[vec_modifier] vec_modifiers = ~[rec(fun=vec_omit, 0u, 1u), ...]/~;
But that gives me "error: internal compiler error unimplemented consts
that's not a plain literal".
https://github.com/graydon/rust/issues/570
vec_edits is not an iter because iters might go away.
*/
use std::prelude::*;
use vec::slice;
use vec::len;
fn vec_omit<T:copy>(v: ~[T], i: uint) -> ~[T] {
slice(v, 0u, i) + slice(v, i + 1u, len(v))
}
fn vec_dup<T:copy>(v: ~[T], i: uint) -> ~[T] {
slice(v, 0u, i) + [v[i]] + slice(v, i, len(v))
}
fn vec_swadj<T:copy>(v: ~[T], i: uint) -> ~[T] {
slice(v, 0u, i) + [v[i + 1u], v[i]] + slice(v, i + 2u, len(v))
}
fn vec_prefix<T:copy>(v: ~[T], i: uint) -> ~[T] { slice(v, 0u, i) }
fn vec_suffix<T:copy>(v: ~[T], i: uint) -> ~[T] { slice(v, i, len(v)) }
fn vec_poke<T:copy>(v: ~[T], i: uint, x: T) -> ~[T] {
slice(v, 0u, i) + ~[x] + slice(v, i + 1u, len(v))
}
fn vec_insert<T:copy>(v: ~[T], i: uint, x: T) -> ~[T] {
slice(v, 0u, i) + ~[x] + slice(v, i, len(v))
}
// Iterates over 0...length, skipping the specified number on each side.
fn ix(skip_low: uint, skip_high: uint, length: uint, it: block(uint)) {
let i: uint = skip_low;
while i + skip_high <= length { it(i); i += 1u; }
}
// Returns a bunch of modified versions of v, some of which introduce
// new elements (borrowed from xs).
fn vec_edits<T:copy>(v: ~[T], xs: ~[T]) -> ~[~[T]] {
let edits: ~[~[T]] = ~[];
let Lv: uint = len(v);
if Lv != 1u {
// When Lv == 1u, this is redundant with omit.
edits.push(~[]);
}
if Lv >= 3u {
// When Lv == 2u, this is redundant with swap.
edits.push(vec::reversed(v));
}
ix(0u, 1u, Lv) {|i| edits += ~[vec_omit(v, i)]; }
ix(0u, 1u, Lv) {|i| edits += ~[vec_dup(v, i)]; }
ix(0u, 2u, Lv) {|i| edits += ~[vec_swadj(v, i)]; }
ix(1u, 2u, Lv) {|i| edits += ~[vec_prefix(v, i)]; }
ix(2u, 1u, Lv) {|i| edits += ~[vec_suffix(v, i)]; }
ix(0u, 1u, len(xs)) {|j|
ix(0u, 1u, Lv) {|i|
edits.push(vec_poke(v, i, xs[j]));
}
ix(0u, 0u, Lv) {|i|
edits.push(vec_insert(v, i, xs[j]));
}
}
edits
}
// Would be nice if this were built in:
// https://github.com/graydon/rust/issues/424
fn vec_to_str(v: ~[int]) -> str {
let i = 0u;
let s = "[";
while i < len(v) {
s += int::str(v[i]);
if i + 1u < len(v) { s += ", "; }
i += 1u;
}
return s + "]";
}
fn show_edits(a: ~[int], xs: ~[int]) {
log(error, "=== Edits of " + vec_to_str(a) + " ===");
let b = vec_edits(a, xs);
ix(0u, 1u, len(b)) {|i| log(error, vec_to_str(b[i])); }
}
fn demo_edits() {
let xs = ~[7, 8];
show_edits(~[], xs);
show_edits(~[1], xs);
show_edits(~[1, 2], xs);
show_edits(~[1, 2, 3], xs);
show_edits(~[1, 2, 3, 4], xs);
}
fn main() { demo_edits(); }

View file

@ -1,106 +0,0 @@
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::prelude::*;
use extra::rand;
// random uint less than n
fn under(r : rand::rng, n : uint) -> uint {
assert!(n != 0u); r.next() as uint % n
}
// random choice from a vec
fn choice<T:copy>(r : rand::rng, v : ~[T]) -> T {
assert!(v.len() != 0u); v[under(r, v.len())]
}
// 1 in n chance of being true
fn unlikely(r : rand::rng, n : uint) -> bool { under(r, n) == 0u }
// shuffle a vec in place
fn shuffle<T>(r : rand::rng, &v : ~[T]) {
let i = v.len();
while i >= 2u {
// Loop invariant: elements with index >= i have been locked in place.
i -= 1u;
vec::swap(v, i, under(r, i + 1u)); // Lock element i in place.
}
}
// create a shuffled copy of a vec
fn shuffled<T:copy>(r : rand::rng, v : ~[T]) -> ~[T] {
let w = vec::to_mut(v);
shuffle(r, w);
vec::from_mut(w) // Shouldn't this happen automatically?
}
// sample from a population without replacement
//fn sample<T>(r : rand::rng, pop : ~[T], k : uint) -> ~[T] { fail!() }
// Two ways to make a weighted choice.
// * weighted_choice is O(number of choices) time
// * weighted_vec is O(total weight) space
type weighted<T> = { weight: uint, item: T };
fn weighted_choice<T:copy>(r : rand::rng, v : ~[weighted<T>]) -> T {
assert!(v.len() != 0u);
let total = 0u;
for {weight: weight, item: _} in v {
total += weight;
}
assert!(total >= 0u);
let chosen = under(r, total);
let so_far = 0u;
for {weight: weight, item: item} in v {
so_far += weight;
if so_far > chosen {
return item;
}
}
std::unreachable();
}
fn weighted_vec<T:copy>(v : ~[weighted<T>]) -> ~[T] {
let r = ~[];
for {weight: weight, item: item} in v {
let i = 0u;
while i < weight {
r.push(item);
i += 1u;
}
}
r
}
fn main()
{
let r = rand::mk_rng();
log(error, under(r, 5u));
log(error, choice(r, ~[10, 20, 30]));
log(error, if unlikely(r, 5u) { "unlikely" } else { "likely" });
let mut a = ~[1, 2, 3];
shuffle(r, a);
log(error, a);
let i = 0u;
let v = ~[
{weight:1u, item:"low"},
{weight:8u, item:"middle"},
{weight:1u, item:"high"}
];
let w = weighted_vec(v);
while i < 1000u {
log(error, "Immed: " + weighted_choice(r, v));
log(error, "Fast: " + choice(r, w));
i += 1u;
}
}

View file

@ -221,7 +221,7 @@ pub static analysis_passes : &'static [(&'static str, &'static str)] = &'static
/** Transformation Passes */
pub static transform_passes : &'static [(&'static str, &'static str)] = &'static [
("adce", "Aggressive Dead Code Elimination"),
("always-inline", "Inliner for #[inline(always)] functions"),
("always-inline", "Inliner for #[inline] functions"),
("argpromotion", "Promote 'by reference' arguments to scalars"),
("bb-vectorize", "Basic-Block Vectorization"),
("block-placement", "Profile Guided Basic Block Placement"),
@ -299,18 +299,27 @@ fn passes_exist() {
let mut failed = ~[];
unsafe { llvm::LLVMInitializePasses(); }
for analysis_passes.each() |&(name,_)| {
if !create_pass(name).is_some() {
let pass = create_pass(name);
if !pass.is_some() {
failed.push(name);
} else {
unsafe { llvm::LLVMDestroyPass(pass.get()) }
}
}
for transform_passes.each() |&(name,_)| {
if !create_pass(name).is_some() {
let pass = create_pass(name);
if !pass.is_some() {
failed.push(name);
} else {
unsafe { llvm::LLVMDestroyPass(pass.get()) }
}
}
for utility_passes.each() |&(name,_)| {
if !create_pass(name).is_some() {
let pass = create_pass(name);
if !pass.is_some() {
failed.push(name);
} else {
unsafe { llvm::LLVMDestroyPass(pass.get()) }
}
}

View file

@ -77,6 +77,10 @@ fn get_rpaths(os: session::os,
// crates they depend on.
let rel_rpaths = get_rpaths_relative_to_output(os, output, libs);
// Make backup absolute paths to the libraries. Binaries can
// be moved as long as the crates they link against don't move.
let abs_rpaths = get_absolute_rpaths(libs);
// And a final backup rpath to the global library location.
let fallback_rpaths = ~[get_install_prefix_rpath(target_triple)];
@ -88,9 +92,11 @@ fn get_rpaths(os: session::os,
}
log_rpaths("relative", rel_rpaths);
log_rpaths("absolute", abs_rpaths);
log_rpaths("fallback", fallback_rpaths);
let mut rpaths = rel_rpaths;
rpaths.push_all(abs_rpaths);
rpaths.push_all(fallback_rpaths);
// Remove duplicates
@ -160,19 +166,14 @@ pub fn get_relative_to(abs1: &Path, abs2: &Path) -> Path {
}
}
#[cfg(stage0)]
pub fn get_install_prefix_rpath(target_triple: &str) -> Path {
let install_prefix = env!("CFG_PREFIX");
if install_prefix.is_empty() {
fail!("rustc compiled without CFG_PREFIX environment variable");
}
let tlib = filesearch::relative_target_lib_path(target_triple);
os::make_absolute(&Path(install_prefix).push_rel(&tlib))
fn get_absolute_rpaths(libs: &[Path]) -> ~[Path] {
vec::map(libs, |a| get_absolute_rpath(a) )
}
pub fn get_absolute_rpath(lib: &Path) -> Path {
os::make_absolute(lib).dir_path()
}
#[cfg(not(stage0))]
pub fn get_install_prefix_rpath(target_triple: &str) -> Path {
let install_prefix = env!("CFG_PREFIX");
@ -198,15 +199,13 @@ pub fn minimize_rpaths(rpaths: &[Path]) -> ~[Path] {
#[cfg(unix, test)]
mod test {
use core::prelude::*;
use core::os;
use core::str;
// FIXME(#2119): the outer attribute should be #[cfg(unix, test)], then
// these redundant #[cfg(test)] blocks can be removed
#[cfg(test)]
#[cfg(test)]
use back::rpath::{get_install_prefix_rpath};
use back::rpath::{get_absolute_rpath, get_install_prefix_rpath};
use back::rpath::{get_relative_to, get_rpath_relative_to_output};
use back::rpath::{minimize_rpaths, rpaths_to_flags};
use driver::session;
@ -350,4 +349,14 @@ mod test {
&Path("lib/libstd.so"));
assert_eq!(res.to_str(), ~"@executable_path/../lib");
}
#[test]
fn test_get_absolute_rpath() {
let res = get_absolute_rpath(&Path("lib/libstd.so"));
debug!("test_get_absolute_rpath: %s vs. %s",
res.to_str(),
os::make_absolute(&Path("lib")).to_str());
assert_eq!(res, os::make_absolute(&Path("lib")));
}
}

View file

@ -65,34 +65,24 @@ pub fn source_name(input: &input) -> @str {
pub fn default_configuration(sess: Session, argv0: @str, input: &input) ->
ast::crate_cfg {
let libc = match sess.targ_cfg.os {
session::os_win32 => @"msvcrt.dll",
session::os_macos => @"libc.dylib",
session::os_linux => @"libc.so.6",
session::os_android => @"libc.so",
session::os_freebsd => @"libc.so.7"
// _ { "libc.so" }
let (libc, tos) = match sess.targ_cfg.os {
session::os_win32 => (@"msvcrt.dll", @"win32"),
session::os_macos => (@"libc.dylib", @"macos"),
session::os_linux => (@"libc.so.6", @"linux"),
session::os_android => (@"libc.so", @"android"),
session::os_freebsd => (@"libc.so.7", @"freebsd")
};
let tos = match sess.targ_cfg.os {
session::os_win32 => @"win32",
session::os_macos => @"macos",
session::os_linux => @"linux",
session::os_android => @"android",
session::os_freebsd => @"freebsd"
// _ { "libc.so" }
};
let mk = attr::mk_name_value_item_str;
// ARM is bi-endian, however using NDK seems to default
// to little-endian unless a flag is provided.
let (end,arch,wordsz) = match sess.targ_cfg.arch {
abi::X86 => (@"little",@"x86",@"32"),
abi::X86_64 => (@"little",@"x86_64",@"64"),
abi::Arm => (@"little",@"arm",@"32"),
abi::Mips => (@"big",@"mips",@"32")
abi::X86 => (@"little", @"x86", @"32"),
abi::X86_64 => (@"little", @"x86_64", @"64"),
abi::Arm => (@"little", @"arm", @"32"),
abi::Mips => (@"big", @"mips", @"32")
};
let mk = attr::mk_name_value_item_str;
return ~[ // Target bindings.
attr::mk_word_item(os::FAMILY.to_managed()),
mk(@"target_os", tos),
@ -463,36 +453,38 @@ pub fn pretty_print_input(sess: Session, cfg: ast::crate_cfg, input: &input,
}
pub fn get_os(triple: &str) -> Option<session::os> {
if triple.contains("win32") ||
triple.contains("mingw32") {
Some(session::os_win32)
} else if triple.contains("darwin") {
Some(session::os_macos)
} else if triple.contains("android") {
Some(session::os_android)
} else if triple.contains("linux") {
Some(session::os_linux)
} else if triple.contains("freebsd") {
Some(session::os_freebsd)
} else { None }
for os_names.each |&(name, os)| {
if triple.contains(name) { return Some(os) }
}
None
}
static os_names : &'static [(&'static str, session::os)] = &'static [
("mingw32", session::os_win32),
("win32", session::os_win32),
("darwin", session::os_macos),
("android", session::os_android),
("linux", session::os_linux),
("freebsd", session::os_freebsd)];
pub fn get_arch(triple: &str) -> Option<abi::Architecture> {
if triple.contains("i386") ||
triple.contains("i486") ||
triple.contains("i586") ||
triple.contains("i686") ||
triple.contains("i786") {
Some(abi::X86)
} else if triple.contains("x86_64") {
Some(abi::X86_64)
} else if triple.contains("arm") ||
triple.contains("xscale") {
Some(abi::Arm)
} else if triple.contains("mips") {
Some(abi::Mips)
} else { None }
for architecture_abis.each |&(arch, abi)| {
if triple.contains(arch) { return Some(abi) }
}
None
}
static architecture_abis : &'static [(&'static str, abi::Architecture)] = &'static [
("i386", abi::X86),
("i486", abi::X86),
("i586", abi::X86),
("i686", abi::X86),
("i786", abi::X86),
("x86_64", abi::X86_64),
("arm", abi::Arm),
("xscale", abi::Arm),
("mips", abi::Mips)];
pub fn build_target_config(sopts: @session::options,
demitter: diagnostic::Emitter)
@ -529,25 +521,6 @@ pub fn build_target_config(sopts: @session::options,
return target_cfg;
}
#[cfg(stage0)]
pub fn host_triple() -> ~str {
// Get the host triple out of the build environment. This ensures that our
// idea of the host triple is the same as for the set of libraries we've
// actually built. We can't just take LLVM's host triple because they
// normalize all ix86 architectures to i386.
//
// Instead of grabbing the host triple (for the current host), we grab (at
// compile time) the target triple that this rustc is built with and
// calling that (at runtime) the host triple.
let ht = env!("CFG_COMPILER_TRIPLE");
return if ht != ~"" {
ht
} else {
fail!("rustc built without CFG_COMPILER_TRIPLE")
};
}
#[cfg(not(stage0))]
pub fn host_triple() -> ~str {
// Get the host triple out of the build environment. This ensures that our
// idea of the host triple is the same as for the set of libraries we've

View file

@ -224,13 +224,50 @@ pub type SectionIteratorRef = *SectionIterator_opaque;
pub enum Pass_opaque {}
pub type PassRef = *Pass_opaque;
pub mod debuginfo {
use super::{ValueRef};
pub enum DIBuilder_opaque {}
pub type DIBuilderRef = *DIBuilder_opaque;
pub type DIDescriptor = ValueRef;
pub type DIScope = DIDescriptor;
pub type DILocation = DIDescriptor;
pub type DIFile = DIScope;
pub type DILexicalBlock = DIScope;
pub type DISubprogram = DIScope;
pub type DIType = DIDescriptor;
pub type DIBasicType = DIType;
pub type DIDerivedType = DIType;
pub type DICompositeType = DIDerivedType;
pub type DIVariable = DIDescriptor;
pub type DIArray = DIDescriptor;
pub type DISubrange = DIDescriptor;
pub enum DIDescriptorFlags {
FlagPrivate = 1 << 0,
FlagProtected = 1 << 1,
FlagFwdDecl = 1 << 2,
FlagAppleBlock = 1 << 3,
FlagBlockByrefStruct = 1 << 4,
FlagVirtual = 1 << 5,
FlagArtificial = 1 << 6,
FlagExplicit = 1 << 7,
FlagPrototyped = 1 << 8,
FlagObjcClassComplete = 1 << 9,
FlagObjectPointer = 1 << 10,
FlagVector = 1 << 11,
FlagStaticMember = 1 << 12
}
}
pub mod llvm {
use super::{AtomicBinOp, AtomicOrdering, BasicBlockRef, ExecutionEngineRef};
use super::{Bool, BuilderRef, ContextRef, MemoryBufferRef, ModuleRef};
use super::{ObjectFileRef, Opcode, PassManagerRef, PassManagerBuilderRef};
use super::{SectionIteratorRef, TargetDataRef, TypeKind, TypeRef, UseRef};
use super::{ValueRef,PassRef};
use super::{ValueRef, PassRef};
use super::debuginfo::*;
use core::libc::{c_char, c_int, c_longlong, c_ushort, c_uint, c_ulonglong};
#[link_args = "-Lrustllvm -lrustllvm"]
@ -929,6 +966,12 @@ pub mod llvm {
#[fast_ffi]
pub unsafe fn LLVMDeleteBasicBlock(BB: BasicBlockRef);
#[fast_ffi]
pub unsafe fn LLVMMoveBasicBlockAfter(BB: BasicBlockRef, MoveAfter: BasicBlockRef);
#[fast_ffi]
pub unsafe fn LLVMMoveBasicBlockBefore(BB: BasicBlockRef, MoveBefore: BasicBlockRef);
/* Operations on instructions */
#[fast_ffi]
pub unsafe fn LLVMGetInstructionParent(Inst: ValueRef)
@ -1641,6 +1684,9 @@ pub mod llvm {
#[fast_ffi]
pub unsafe fn LLVMCreatePass(PassName:*c_char) -> PassRef;
#[fast_ffi]
pub unsafe fn LLVMDestroyPass(P: PassRef);
/** Adds a verification pass. */
#[fast_ffi]
pub unsafe fn LLVMAddVerifierPass(PM: PassManagerRef);
@ -1885,6 +1931,164 @@ pub mod llvm {
AlignStack: Bool, Dialect: c_uint)
-> ValueRef;
#[fast_ffi]
pub unsafe fn LLVMDIBuilderCreate(M: ModuleRef) -> DIBuilderRef;
#[fast_ffi]
pub unsafe fn LLVMDIBuilderDispose(Builder: DIBuilderRef);
#[fast_ffi]
pub unsafe fn LLVMDIBuilderFinalize(Builder: DIBuilderRef);
#[fast_ffi]
pub unsafe fn LLVMDIBuilderCreateCompileUnit(
Builder: DIBuilderRef,
Lang: c_uint,
File: *c_char,
Dir: *c_char,
Producer: *c_char,
isOptimized: bool,
Flags: *c_char,
RuntimeVer: c_uint,
SplitName: *c_char);
#[fast_ffi]
pub unsafe fn LLVMDIBuilderCreateFile(
Builder: DIBuilderRef,
Filename: *c_char,
Directory: *c_char) -> DIFile;
#[fast_ffi]
pub unsafe fn LLVMDIBuilderCreateSubroutineType(
Builder: DIBuilderRef,
File: DIFile,
ParameterTypes: DIArray) -> DICompositeType;
#[fast_ffi]
pub unsafe fn LLVMDIBuilderCreateFunction(
Builder: DIBuilderRef,
Scope: DIDescriptor,
Name: *c_char,
LinkageName: *c_char,
File: DIFile,
LineNo: c_uint,
Ty: DIType,
isLocalToUnit: bool,
isDefinition: bool,
ScopeLine: c_uint,
Flags: c_uint,
isOptimized: bool,
Fn: ValueRef,
TParam: ValueRef,
Decl: ValueRef) -> DISubprogram;
#[fast_ffi]
pub unsafe fn LLVMDIBuilderCreateBasicType(
Builder: DIBuilderRef,
Name: *c_char,
SizeInBits: c_ulonglong,
AlignInBits: c_ulonglong,
Encoding: c_uint) -> DIBasicType;
#[fast_ffi]
pub unsafe fn LLVMDIBuilderCreatePointerType(
Builder: DIBuilderRef,
PointeeTy: DIType,
SizeInBits: c_ulonglong,
AlignInBits: c_ulonglong,
Name: *c_char) -> DIDerivedType;
#[fast_ffi]
pub unsafe fn LLVMDIBuilderCreateStructType(
Builder: DIBuilderRef,
Scope: DIDescriptor,
Name: *c_char,
File: DIFile,
LineNumber: c_uint,
SizeInBits: c_ulonglong,
AlignInBits: c_ulonglong,
Flags: c_uint,
DerivedFrom: DIType,
Elements: DIArray,
RunTimeLang: c_uint,
VTableHolder: ValueRef) -> DICompositeType;
#[fast_ffi]
pub unsafe fn LLVMDIBuilderCreateMemberType(
Builder: DIBuilderRef,
Scope: DIDescriptor,
Name: *c_char,
File: DIFile,
LineNo: c_uint,
SizeInBits: c_ulonglong,
AlignInBits: c_ulonglong,
OffsetInBits: c_ulonglong,
Flags: c_uint,
Ty: DIType) -> DIDerivedType;
#[fast_ffi]
pub unsafe fn LLVMDIBuilderCreateLexicalBlock(
Builder: DIBuilderRef,
Scope: DIDescriptor,
File: DIFile,
Line: c_uint,
Col: c_uint) -> DILexicalBlock;
#[fast_ffi]
pub unsafe fn LLVMDIBuilderCreateLocalVariable(
Builder: DIBuilderRef,
Tag: c_uint,
Scope: DIDescriptor,
Name: *c_char,
File: DIFile,
LineNo: c_uint,
Ty: DIType,
AlwaysPreserve: bool,
Flags: c_uint,
ArgNo: c_uint) -> DIVariable;
#[fast_ffi]
pub unsafe fn LLVMDIBuilderCreateArrayType(
Builder: DIBuilderRef,
Size: c_ulonglong,
AlignInBits: c_ulonglong,
Ty: DIType,
Subscripts: DIArray) -> DIType;
#[fast_ffi]
pub unsafe fn LLVMDIBuilderCreateVectorType(
Builder: DIBuilderRef,
Size: c_ulonglong,
AlignInBits: c_ulonglong,
Ty: DIType,
Subscripts: DIArray) -> DIType;
#[fast_ffi]
pub unsafe fn LLVMDIBuilderGetOrCreateSubrange(
Builder: DIBuilderRef,
Lo: c_longlong,
Count: c_longlong) -> DISubrange;
#[fast_ffi]
pub unsafe fn LLVMDIBuilderGetOrCreateArray(
Builder: DIBuilderRef,
Ptr: *DIDescriptor,
Count: c_uint) -> DIArray;
#[fast_ffi]
pub unsafe fn LLVMDIBuilderInsertDeclareAtEnd(
Builder: DIBuilderRef,
Val: ValueRef,
VarInfo: DIVariable,
InsertAtEnd: BasicBlockRef) -> ValueRef;
#[fast_ffi]
pub unsafe fn LLVMDIBuilderInsertDeclareBefore(
Builder: DIBuilderRef,
Val: ValueRef,
VarInfo: DIVariable,
InsertBefore: ValueRef) -> ValueRef;
}
}
@ -1988,9 +2192,7 @@ pub fn type_to_str_inner(names: @TypeNames, outer0: &[TypeRef], ty: TypeRef)
let out_ty: TypeRef = llvm::LLVMGetReturnType(ty);
let n_args = llvm::LLVMCountParamTypes(ty) as uint;
let args = vec::from_elem(n_args, 0 as TypeRef);
unsafe {
llvm::LLVMGetParamTypes(ty, vec::raw::to_ptr(args));
}
llvm::LLVMGetParamTypes(ty, vec::raw::to_ptr(args));
// See [Note at-str]
return fmt!("fn(%s) -> %s",
tys_str(names, outer, args),

View file

@ -1184,7 +1184,7 @@ fn create_index<T:Copy + Hash + IterBytes>(index: ~[entry<T>]) ->
for uint::range(0u, 256u) |_i| { buckets.push(@mut ~[]); };
for index.each |elt| {
let h = elt.val.hash() as uint;
buckets[h % 256].push(*elt);
buckets[h % 256].push(copy *elt);
}
let mut buckets_frozen = ~[];

View file

@ -204,35 +204,33 @@ fn get_metadata_section(os: os,
let si = mk_section_iter(of.llof);
while llvm::LLVMIsSectionIteratorAtEnd(of.llof, si.llsi) == False {
let name_buf = llvm::LLVMGetSectionName(si.llsi);
let name = unsafe { str::raw::from_c_str(name_buf) };
let name = str::raw::from_c_str(name_buf);
debug!("get_metadata_section: name %s", name);
if name == read_meta_section_name(os) {
let cbuf = llvm::LLVMGetSectionContents(si.llsi);
let csz = llvm::LLVMGetSectionSize(si.llsi) as uint;
let mut found = None;
unsafe {
let cvbuf: *u8 = cast::transmute(cbuf);
let vlen = encoder::metadata_encoding_version.len();
debug!("checking %u bytes of metadata-version stamp",
vlen);
let minsz = uint::min(vlen, csz);
let mut version_ok = false;
do vec::raw::buf_as_slice(cvbuf, minsz) |buf0| {
version_ok = (buf0 ==
encoder::metadata_encoding_version);
}
if !version_ok { return None; }
let cvbuf: *u8 = cast::transmute(cbuf);
let vlen = encoder::metadata_encoding_version.len();
debug!("checking %u bytes of metadata-version stamp",
vlen);
let minsz = uint::min(vlen, csz);
let mut version_ok = false;
do vec::raw::buf_as_slice(cvbuf, minsz) |buf0| {
version_ok = (buf0 ==
encoder::metadata_encoding_version);
}
if !version_ok { return None; }
let cvbuf1 = ptr::offset(cvbuf, vlen);
debug!("inflating %u bytes of compressed metadata",
csz - vlen);
do vec::raw::buf_as_slice(cvbuf1, csz-vlen) |bytes| {
let inflated = flate::inflate_bytes(bytes);
found = Some(@(inflated));
}
if found != None {
return found;
}
let cvbuf1 = ptr::offset(cvbuf, vlen);
debug!("inflating %u bytes of compressed metadata",
csz - vlen);
do vec::raw::buf_as_slice(cvbuf1, csz-vlen) |bytes| {
let inflated = flate::inflate_bytes(bytes);
found = Some(@(inflated));
}
if found != None {
return found;
}
}
llvm::LLVMMoveToNextSection(si.llsi);

View file

@ -15,7 +15,6 @@ use cstore = metadata::cstore;
use driver::session::Session;
use e = metadata::encoder;
use metadata::decoder;
use metadata::encoder;
use metadata::tydecode;
use metadata::tydecode::{DefIdSource, NominalType, TypeWithId, TypeParameter};
use metadata::tyencode;

View file

@ -82,7 +82,7 @@ impl<'self> CheckLoanCtxt<'self> {
//! are issued for future scopes and thus they may have been
//! *issued* but not yet be in effect.
for self.dfcx_loans.each_bit_on_entry(scope_id) |loan_index| {
for self.dfcx_loans.each_bit_on_entry_frozen(scope_id) |loan_index| {
let loan = &self.all_loans[loan_index];
if !op(loan) {
return false;
@ -134,7 +134,7 @@ impl<'self> CheckLoanCtxt<'self> {
//! we encounter `scope_id`.
let mut result = ~[];
for self.dfcx_loans.each_gen_bit(scope_id) |loan_index| {
for self.dfcx_loans.each_gen_bit_frozen(scope_id) |loan_index| {
result.push(loan_index);
}
return result;

View file

@ -105,7 +105,8 @@ fn check_is_legal_to_move_from(bccx: @BorrowckCtxt,
mc::cat_implicit_self(*) |
mc::cat_copied_upvar(*) |
mc::cat_deref(_, _, mc::region_ptr(*)) |
mc::cat_deref(_, _, mc::gc_ptr(*)) => {
mc::cat_deref(_, _, mc::gc_ptr(*)) |
mc::cat_deref(_, _, mc::unsafe_ptr(*)) => {
bccx.span_err(
cmt0.span,
fmt!("cannot move out of %s",
@ -129,8 +130,7 @@ fn check_is_legal_to_move_from(bccx: @BorrowckCtxt,
mc::cat_rvalue(*) |
mc::cat_local(*) |
mc::cat_arg(*) |
mc::cat_self(*) |
mc::cat_deref(_, _, mc::unsafe_ptr(*)) => {
mc::cat_self(*) => {
true
}

View file

@ -775,17 +775,17 @@ impl BorrowckCtxt {
}
impl DataFlowOperator for LoanDataFlowOperator {
#[inline(always)]
#[inline]
fn initial_value(&self) -> bool {
false // no loans in scope by default
}
#[inline(always)]
#[inline]
fn join(&self, succ: uint, pred: uint) -> uint {
succ | pred // loans from both preds are in scope
}
#[inline(always)]
#[inline]
fn walk_closures(&self) -> bool {
true
}

View file

@ -504,7 +504,7 @@ impl FlowedMoveData {
let opt_loan_path_index = self.move_data.existing_move_path(loan_path);
for self.dfcx_moves.each_bit_on_entry(id) |index| {
for self.dfcx_moves.each_bit_on_entry_frozen(id) |index| {
let move = &self.move_data.moves[index];
let moved_path = move.path;
if base_indices.contains(&moved_path) {
@ -560,7 +560,7 @@ impl FlowedMoveData {
}
};
for self.dfcx_assign.each_bit_on_entry(id) |index| {
for self.dfcx_assign.each_bit_on_entry_frozen(id) |index| {
let assignment = &self.move_data.var_assignments[index];
if assignment.path == loan_path_index && !f(assignment) {
return false;
@ -571,34 +571,34 @@ impl FlowedMoveData {
}
impl DataFlowOperator for MoveDataFlowOperator {
#[inline(always)]
#[inline]
fn initial_value(&self) -> bool {
false // no loans in scope by default
}
#[inline(always)]
#[inline]
fn join(&self, succ: uint, pred: uint) -> uint {
succ | pred // moves from both preds are in scope
}
#[inline(always)]
#[inline]
fn walk_closures(&self) -> bool {
true
}
}
impl DataFlowOperator for AssignDataFlowOperator {
#[inline(always)]
#[inline]
fn initial_value(&self) -> bool {
false // no assignments in scope by default
}
#[inline(always)]
#[inline]
fn join(&self, succ: uint, pred: uint) -> uint {
succ | pred // moves from both preds are in scope
}
#[inline(always)]
#[inline]
fn walk_closures(&self) -> bool {
true
}

View file

@ -22,6 +22,7 @@ use core::cast;
use core::io;
use core::uint;
use core::vec;
use core::hashmap::HashMap;
use syntax::ast;
use syntax::ast_util;
use syntax::ast_util::id_range;
@ -37,9 +38,6 @@ pub struct DataFlowContext<O> {
/// the data flow operator
priv oper: O,
/// range of ids that appear within the item in question
priv id_range: id_range,
/// number of bits to propagate per id
priv bits_per_id: uint,
@ -47,6 +45,9 @@ pub struct DataFlowContext<O> {
/// equal to bits_per_id/uint::bits rounded up.
priv words_per_id: uint,
// mapping from node to bitset index.
priv nodeid_to_bitset: HashMap<ast::node_id,uint>,
// Bit sets per id. The following three fields (`gens`, `kills`,
// and `on_entry`) all have the same structure. For each id in
// `id_range`, there is a range of words equal to `words_per_id`.
@ -108,19 +109,17 @@ impl<O:DataFlowOperator> DataFlowContext<O> {
debug!("DataFlowContext::new(id_range=%?, bits_per_id=%?, words_per_id=%?)",
id_range, bits_per_id, words_per_id);
let len = (id_range.max - id_range.min) as uint * words_per_id;
let gens = vec::from_elem(len, 0);
let kills = vec::from_elem(len, 0);
let elem = if oper.initial_value() {uint::max_value} else {0};
let on_entry = vec::from_elem(len, elem);
let gens = ~[];
let kills = ~[];
let on_entry = ~[];
DataFlowContext {
tcx: tcx,
method_map: method_map,
words_per_id: words_per_id,
nodeid_to_bitset: HashMap::new(),
bits_per_id: bits_per_id,
oper: oper,
id_range: id_range,
gens: gens,
kills: kills,
on_entry: on_entry
@ -149,7 +148,7 @@ impl<O:DataFlowOperator> DataFlowContext<O> {
}
}
fn apply_gen_kill(&self, id: ast::node_id, bits: &mut [uint]) {
fn apply_gen_kill(&mut self, id: ast::node_id, bits: &mut [uint]) {
//! Applies the gen and kill sets for `id` to `bits`
debug!("apply_gen_kill(id=%?, bits=%s) [before]",
@ -164,7 +163,7 @@ impl<O:DataFlowOperator> DataFlowContext<O> {
id, mut_bits_to_str(bits));
}
fn apply_kill(&self, id: ast::node_id, bits: &mut [uint]) {
fn apply_kill(&mut self, id: ast::node_id, bits: &mut [uint]) {
debug!("apply_kill(id=%?, bits=%s) [before]",
id, mut_bits_to_str(bits));
let (start, end) = self.compute_id_range(id);
@ -174,18 +173,56 @@ impl<O:DataFlowOperator> DataFlowContext<O> {
id, mut_bits_to_str(bits));
}
fn compute_id_range(&self, absolute_id: ast::node_id) -> (uint, uint) {
assert!(absolute_id >= self.id_range.min);
assert!(absolute_id < self.id_range.max);
let relative_id = absolute_id - self.id_range.min;
let start = (relative_id as uint) * self.words_per_id;
fn compute_id_range_frozen(&self, id: ast::node_id) -> (uint, uint) {
let n = *self.nodeid_to_bitset.get(&id);
let start = n * self.words_per_id;
let end = start + self.words_per_id;
(start, end)
}
fn compute_id_range(&mut self, id: ast::node_id) -> (uint, uint) {
let mut expanded = false;
let len = self.nodeid_to_bitset.len();
let n = do self.nodeid_to_bitset.find_or_insert_with(id) |_| {
expanded = true;
len
};
if expanded {
let entry = if self.oper.initial_value() { uint::max_value } else {0};
for self.words_per_id.times {
self.gens.push(0);
self.kills.push(0);
self.on_entry.push(entry);
}
}
let start = *n * self.words_per_id;
let end = start + self.words_per_id;
pub fn each_bit_on_entry(&self,
assert!(start < self.gens.len());
assert!(end <= self.gens.len());
assert!(self.gens.len() == self.kills.len());
assert!(self.gens.len() == self.on_entry.len());
(start, end)
}
pub fn each_bit_on_entry_frozen(&self,
id: ast::node_id,
f: &fn(uint) -> bool) -> bool {
//! Iterates through each bit that is set on entry to `id`.
//! Only useful after `propagate()` has been called.
if !self.nodeid_to_bitset.contains_key(&id) {
return true;
}
let (start, end) = self.compute_id_range_frozen(id);
let on_entry = vec::slice(self.on_entry, start, end);
debug!("each_bit_on_entry_frozen(id=%?, on_entry=%s)",
id, bits_to_str(on_entry));
self.each_bit(on_entry, f)
}
pub fn each_bit_on_entry(&mut self,
id: ast::node_id,
f: &fn(uint) -> bool) -> bool {
//! Iterates through each bit that is set on entry to `id`.
@ -198,7 +235,7 @@ impl<O:DataFlowOperator> DataFlowContext<O> {
self.each_bit(on_entry, f)
}
pub fn each_gen_bit(&self,
pub fn each_gen_bit(&mut self,
id: ast::node_id,
f: &fn(uint) -> bool) -> bool {
//! Iterates through each bit in the gen set for `id`.
@ -210,6 +247,20 @@ impl<O:DataFlowOperator> DataFlowContext<O> {
self.each_bit(gens, f)
}
pub fn each_gen_bit_frozen(&self,
id: ast::node_id,
f: &fn(uint) -> bool) -> bool {
//! Iterates through each bit in the gen set for `id`.
if !self.nodeid_to_bitset.contains_key(&id) {
return true;
}
let (start, end) = self.compute_id_range_frozen(id);
let gens = vec::slice(self.gens, start, end);
debug!("each_gen_bit(id=%?, gens=%s)",
id, bits_to_str(gens));
self.each_bit(gens, f)
}
fn each_bit(&self,
words: &[uint],
f: &fn(uint) -> bool) -> bool {
@ -285,8 +336,8 @@ impl<O:DataFlowOperator+Copy+'static> DataFlowContext<O> {
pprust::node_pat(ps, pat) => (ps, pat.id)
};
if id >= self.id_range.min || id < self.id_range.max {
let (start, end) = self.compute_id_range(id);
if self.nodeid_to_bitset.contains_key(&id) {
let (start, end) = self.compute_id_range_frozen(id);
let on_entry = vec::slice(self.on_entry, start, end);
let entry_str = bits_to_str(on_entry);
@ -965,7 +1016,7 @@ fn join_bits<O:DataFlowOperator>(oper: &O,
bitwise(out_vec, in_vec, |a, b| oper.join(a, b))
}
#[inline(always)]
#[inline]
fn bitwise(out_vec: &mut [uint],
in_vec: &[uint],
op: &fn(uint, uint) -> uint) -> bool {

View file

@ -749,11 +749,7 @@ impl Liveness {
None => {
// Vanilla 'break' or 'loop', so use the enclosing
// loop scope
let len = { // FIXME(#5074) stage0
let loop_scope = &mut *self.loop_scope;
loop_scope.len()
};
if len == 0 {
if self.loop_scope.len() == 0 {
self.tcx.sess.span_bug(sp, "break outside loop");
} else {
// FIXME(#5275): this shouldn't have to be a method...

View file

@ -4845,7 +4845,7 @@ impl Resolver {
let mut smallest = 0;
for maybes.eachi |i, &other| {
values[i] = str::levdistance(name, other);
values[i] = name.lev_distance(other);
if values[i] <= values[smallest] {
smallest = i;

View file

@ -47,7 +47,6 @@
use core::container::Map;
use core::libc::c_ulonglong;
use core::option::{Option, Some, None};
use core::str;
use core::vec;
use lib::llvm::{ValueRef, TypeRef, True, IntEQ, IntNE};
@ -165,7 +164,7 @@ fn represent_type_uncached(cx: &mut CrateContext, t: ty::t) -> Repr {
if cases.all(|c| c.tys.len() == 0) {
// All bodies empty -> intlike
let discrs = cases.map(|c| c.discr);
return CEnum(discrs.min(), discrs.max());
return CEnum(*discrs.iter().min().unwrap(), *discrs.iter().max().unwrap());
}
if cases.len() == 1 {
@ -509,7 +508,7 @@ pub fn trans_const(ccx: &mut CrateContext, r: &Repr, discr: int,
}
General(ref cases) => {
let case = &cases[discr as uint];
let max_sz = cases.map(|s| s.size).max();
let max_sz = cases.iter().transform(|x| x.size).max().unwrap();
let discr_ty = C_int(ccx, discr);
let contents = build_const_struct(ccx, case,
~[discr_ty] + vals);
@ -577,7 +576,7 @@ fn padding(size: u64) -> ValueRef {
}
// XXX this utility routine should be somewhere more general
#[inline(always)]
#[inline]
fn roundup(x: u64, a: u64) -> u64 { ((x + (a - 1)) / a) * a }
/// Get the discriminant of a constant value. (Not currently used.)

View file

@ -71,7 +71,6 @@ use core::libc::c_uint;
use core::str;
use core::uint;
use core::vec;
use core::local_data;
use extra::time;
use syntax::ast::ident;
use syntax::ast_map::{path, path_elt_to_str, path_name};
@ -1318,26 +1317,38 @@ pub fn cleanup_and_leave(bcx: block,
match cur.kind {
block_scope(inf) if !inf.empty_cleanups() => {
let (sub_cx, inf_cleanups) = {
let inf = &mut *inf; // FIXME(#5074) workaround stage0
let (sub_cx, dest, inf_cleanups) = {
let inf = &mut *inf;
let mut skip = 0;
let mut dest = None;
{
let r = vec::find((*inf).cleanup_paths, |cp| cp.target == leave);
let r = vec::rfind((*inf).cleanup_paths, |cp| cp.target == leave);
for r.iter().advance |cp| {
Br(bcx, cp.dest);
return;
if cp.size == inf.cleanups.len() {
Br(bcx, cp.dest);
return;
}
skip = cp.size;
dest = Some(cp.dest);
}
}
let sub_cx = sub_block(bcx, "cleanup");
Br(bcx, sub_cx.llbb);
inf.cleanup_paths.push(cleanup_path {
target: leave,
size: inf.cleanups.len(),
dest: sub_cx.llbb
});
(sub_cx, copy inf.cleanups)
(sub_cx, dest, inf.cleanups.tailn(skip).to_owned())
};
bcx = trans_block_cleanups_(sub_cx,
inf_cleanups,
is_lpad);
for dest.iter().advance |&dest| {
Br(bcx, dest);
return;
}
}
_ => ()
}
@ -1897,6 +1908,12 @@ pub fn trans_closure(ccx: @mut CrateContext,
finish(bcx);
cleanup_and_Br(bcx, bcx_top, fcx.llreturn);
// Put return block after all other blocks.
// This somewhat improves single-stepping experience in debugger.
unsafe {
llvm::LLVMMoveBasicBlockAfter(fcx.llreturn, bcx.llbb);
}
// Insert the mandatory first few basic blocks before lltop.
finish_fn(fcx, lltop);
}
@ -3033,7 +3050,7 @@ pub fn write_metadata(cx: &mut CrateContext, crate: &ast::crate) {
// Writes the current ABI version into the crate.
pub fn write_abi_version(ccx: &mut CrateContext) {
mk_global(ccx, ~"rust_abi_version", C_uint(ccx, abi::abi_version),
mk_global(ccx, "rust_abi_version", C_uint(ccx, abi::abi_version),
false);
}
@ -3091,6 +3108,9 @@ pub fn trans_crate(sess: session::Session,
fill_crate_map(ccx, ccx.crate_map);
glue::emit_tydescs(ccx);
write_abi_version(ccx);
if ccx.sess.opts.debuginfo {
debuginfo::finalize(ccx);
}
// Translate the metadata.
write_metadata(ccx, crate);
@ -3120,4 +3140,3 @@ pub fn trans_crate(sess: session::Session,
return (llcx, llmod, link_meta);
}

View file

@ -618,13 +618,11 @@ pub fn InBoundsGEP(cx: block, Pointer: ValueRef, Indices: &[ValueRef]) ->
ValueRef {
unsafe {
if cx.unreachable { return llvm::LLVMGetUndef(T_ptr(T_nil())); }
unsafe {
count_insn(cx, "inboundsgep");
count_insn(cx, "inboundsgep");
return llvm::LLVMBuildInBoundsGEP(B(cx), Pointer,
vec::raw::to_ptr(Indices),
Indices.len() as c_uint,
noname());
}
}
}
@ -1072,11 +1070,9 @@ pub fn Trap(cx: block) {
});
assert!((T as int != 0));
let Args: ~[ValueRef] = ~[];
unsafe {
count_insn(cx, "trap");
llvm::LLVMBuildCall(b, T, vec::raw::to_ptr(Args),
Args.len() as c_uint, noname());
}
count_insn(cx, "trap");
llvm::LLVMBuildCall(b, T, vec::raw::to_ptr(Args),
Args.len() as c_uint, noname());
}
}

View file

@ -553,7 +553,7 @@ pub fn make_opaque_cbox_drop_glue(
ast::BorrowedSigil => bcx,
ast::ManagedSigil => {
glue::decr_refcnt_maybe_free(
bcx, Load(bcx, cboxptr),
bcx, Load(bcx, cboxptr), Some(cboxptr),
ty::mk_opaque_closure_ptr(bcx.tcx(), sigil))
}
ast::OwnedSigil => {

View file

@ -325,11 +325,17 @@ pub enum cleanup {
// target: none means the path ends in an resume instruction
pub struct cleanup_path {
target: Option<BasicBlockRef>,
size: uint,
dest: BasicBlockRef
}
pub fn scope_clean_changed(scope_info: &mut scope_info) {
if scope_info.cleanup_paths.len() > 0u { scope_info.cleanup_paths = ~[]; }
pub fn shrink_scope_clean(scope_info: &mut scope_info, size: uint) {
scope_info.landing_pad = None;
scope_info.cleanup_paths = scope_info.cleanup_paths.iter()
.take_while(|&cu| cu.size <= size).transform(|&x|x).collect();
}
pub fn grow_scope_clean(scope_info: &mut scope_info) {
scope_info.landing_pad = None;
}
@ -374,7 +380,7 @@ pub fn add_clean(bcx: block, val: ValueRef, t: ty::t) {
scope_info.cleanups.push(
clean(|a| glue::drop_ty_root(a, root, rooted, t),
cleanup_type));
scope_clean_changed(scope_info);
grow_scope_clean(scope_info);
}
}
@ -388,7 +394,7 @@ pub fn add_clean_temp_immediate(cx: block, val: ValueRef, ty: ty::t) {
scope_info.cleanups.push(
clean_temp(val, |a| glue::drop_ty_immediate(a, val, ty),
cleanup_type));
scope_clean_changed(scope_info);
grow_scope_clean(scope_info);
}
}
pub fn add_clean_temp_mem(bcx: block, val: ValueRef, t: ty::t) {
@ -402,7 +408,7 @@ pub fn add_clean_temp_mem(bcx: block, val: ValueRef, t: ty::t) {
scope_info.cleanups.push(
clean_temp(val, |a| glue::drop_ty_root(a, root, rooted, t),
cleanup_type));
scope_clean_changed(scope_info);
grow_scope_clean(scope_info);
}
}
pub fn add_clean_return_to_mut(bcx: block,
@ -434,7 +440,7 @@ pub fn add_clean_return_to_mut(bcx: block,
filename_val,
line_val),
normal_exit_only));
scope_clean_changed(scope_info);
grow_scope_clean(scope_info);
}
}
pub fn add_clean_free(cx: block, ptr: ValueRef, heap: heap) {
@ -451,7 +457,7 @@ pub fn add_clean_free(cx: block, ptr: ValueRef, heap: heap) {
do in_scope_cx(cx) |scope_info| {
scope_info.cleanups.push(clean_temp(ptr, free_fn,
normal_exit_and_unwind));
scope_clean_changed(scope_info);
grow_scope_clean(scope_info);
}
}
@ -474,7 +480,7 @@ pub fn revoke_clean(cx: block, val: ValueRef) {
vec::slice(scope_info.cleanups,
*i + 1u,
scope_info.cleanups.len()));
scope_clean_changed(scope_info);
shrink_scope_clean(scope_info, *i);
}
}
}

View file

@ -148,7 +148,7 @@ impl CrateContext {
lib::llvm::associate_type(tn, @"tydesc", tydesc_type);
let crate_map = decl_crate_map(sess, link_meta, llmod);
let dbg_cx = if sess.opts.debuginfo {
Some(debuginfo::mk_ctxt(name.to_owned()))
Some(debuginfo::DebugContext::new(llmod, name.to_owned()))
} else {
None
};
@ -211,9 +211,7 @@ impl CrateContext {
int_type: int_type,
float_type: float_type,
opaque_vec_type: T_opaque_vec(targ_cfg),
builder: BuilderRef_res(unsafe {
llvm::LLVMCreateBuilderInContext(llcx)
}),
builder: BuilderRef_res(llvm::LLVMCreateBuilderInContext(llcx)),
shape_cx: mk_ctxt(llmod),
crate_map: crate_map,
uses_gc: false,

File diff suppressed because it is too large Load diff

View file

@ -103,7 +103,7 @@ pub fn drop_ty_immediate(bcx: block, v: ValueRef, t: ty::t) -> block {
ty::ty_box(_) | ty::ty_opaque_box |
ty::ty_evec(_, ty::vstore_box) |
ty::ty_estr(ty::vstore_box) => {
decr_refcnt_maybe_free(bcx, v, t)
decr_refcnt_maybe_free(bcx, v, None, t)
}
_ => bcx.tcx().sess.bug("drop_ty_immediate: non-box ty")
}
@ -419,15 +419,6 @@ pub fn make_free_glue(bcx: block, v: ValueRef, t: ty::t) {
ty::ty_opaque_closure_ptr(ck) => {
closure::make_opaque_cbox_free_glue(bcx, ck, v)
}
ty::ty_struct(did, ref substs) => {
// Call the dtor if there is one
match ty::ty_dtor(bcx.tcx(), did) {
ty::NoDtor => bcx,
ty::TraitDtor(ref dt_id) => {
trans_struct_drop(bcx, t, v, *dt_id, did, substs)
}
}
}
_ => bcx
};
build_return(bcx);
@ -489,7 +480,7 @@ pub fn make_drop_glue(bcx: block, v0: ValueRef, t: ty::t) {
let bcx = match ty::get(t).sty {
ty::ty_box(_) | ty::ty_opaque_box |
ty::ty_estr(ty::vstore_box) | ty::ty_evec(_, ty::vstore_box) => {
decr_refcnt_maybe_free(bcx, Load(bcx, v0), t)
decr_refcnt_maybe_free(bcx, Load(bcx, v0), Some(v0), t)
}
ty::ty_uniq(_) |
ty::ty_evec(_, ty::vstore_uniq) | ty::ty_estr(ty::vstore_uniq) => {
@ -514,8 +505,10 @@ pub fn make_drop_glue(bcx: block, v0: ValueRef, t: ty::t) {
closure::make_closure_glue(bcx, v0, t, drop_ty)
}
ty::ty_trait(_, _, ty::BoxTraitStore, _) => {
let llbox = Load(bcx, GEPi(bcx, v0, [0u, abi::trt_field_box]));
decr_refcnt_maybe_free(bcx, llbox, ty::mk_opaque_box(ccx.tcx))
let llbox_ptr = GEPi(bcx, v0, [0u, abi::trt_field_box]);
let llbox = Load(bcx, llbox_ptr);
decr_refcnt_maybe_free(bcx, llbox, Some(llbox_ptr),
ty::mk_opaque_box(ccx.tcx))
}
ty::ty_trait(_, _, ty::UniqTraitStore, _) => {
let lluniquevalue = GEPi(bcx, v0, [0, abi::trt_field_box]);
@ -549,7 +542,10 @@ pub fn make_drop_glue(bcx: block, v0: ValueRef, t: ty::t) {
build_return(bcx);
}
pub fn decr_refcnt_maybe_free(bcx: block, box_ptr: ValueRef, t: ty::t)
// box_ptr_ptr is optional, it is constructed if not supplied.
pub fn decr_refcnt_maybe_free(bcx: block, box_ptr: ValueRef,
box_ptr_ptr: Option<ValueRef>,
t: ty::t)
-> block {
let _icx = bcx.insn_ctxt("decr_refcnt_maybe_free");
let ccx = bcx.ccx();
@ -559,7 +555,12 @@ pub fn decr_refcnt_maybe_free(bcx: block, box_ptr: ValueRef, t: ty::t)
let rc = Sub(bcx, Load(bcx, rc_ptr), C_int(ccx, 1));
Store(bcx, rc, rc_ptr);
let zero_test = ICmp(bcx, lib::llvm::IntEQ, C_int(ccx, 0), rc);
with_cond(bcx, zero_test, |bcx| free_ty_immediate(bcx, box_ptr, t))
do with_cond(bcx, zero_test) |bcx| {
match box_ptr_ptr {
Some(p) => free_ty(bcx, p, t),
None => free_ty_immediate(bcx, box_ptr, t)
}
}
}
}

View file

@ -1094,62 +1094,62 @@ fn mk_t(cx: ctxt, st: sty) -> t {
}
}
#[inline(always)]
#[inline]
pub fn mk_prim_t(primitive: &'static t_box_) -> t {
unsafe {
cast::transmute::<&'static t_box_, t>(primitive)
}
}
#[inline(always)]
#[inline]
pub fn mk_nil() -> t { mk_prim_t(&primitives::TY_NIL) }
#[inline(always)]
#[inline]
pub fn mk_err() -> t { mk_prim_t(&primitives::TY_ERR) }
#[inline(always)]
#[inline]
pub fn mk_bot() -> t { mk_prim_t(&primitives::TY_BOT) }
#[inline(always)]
#[inline]
pub fn mk_bool() -> t { mk_prim_t(&primitives::TY_BOOL) }
#[inline(always)]
#[inline]
pub fn mk_int() -> t { mk_prim_t(&primitives::TY_INT) }
#[inline(always)]
#[inline]
pub fn mk_i8() -> t { mk_prim_t(&primitives::TY_I8) }
#[inline(always)]
#[inline]
pub fn mk_i16() -> t { mk_prim_t(&primitives::TY_I16) }
#[inline(always)]
#[inline]
pub fn mk_i32() -> t { mk_prim_t(&primitives::TY_I32) }
#[inline(always)]
#[inline]
pub fn mk_i64() -> t { mk_prim_t(&primitives::TY_I64) }
#[inline(always)]
#[inline]
pub fn mk_float() -> t { mk_prim_t(&primitives::TY_FLOAT) }
#[inline(always)]
#[inline]
pub fn mk_f32() -> t { mk_prim_t(&primitives::TY_F32) }
#[inline(always)]
#[inline]
pub fn mk_f64() -> t { mk_prim_t(&primitives::TY_F64) }
#[inline(always)]
#[inline]
pub fn mk_uint() -> t { mk_prim_t(&primitives::TY_UINT) }
#[inline(always)]
#[inline]
pub fn mk_u8() -> t { mk_prim_t(&primitives::TY_U8) }
#[inline(always)]
#[inline]
pub fn mk_u16() -> t { mk_prim_t(&primitives::TY_U16) }
#[inline(always)]
#[inline]
pub fn mk_u32() -> t { mk_prim_t(&primitives::TY_U32) }
#[inline(always)]
#[inline]
pub fn mk_u64() -> t { mk_prim_t(&primitives::TY_U64) }
pub fn mk_mach_int(tm: ast::int_ty) -> t {
@ -1181,7 +1181,7 @@ pub fn mk_mach_float(tm: ast::float_ty) -> t {
}
}
#[inline(always)]
#[inline]
pub fn mk_char() -> t { mk_prim_t(&primitives::TY_CHAR) }
pub fn mk_estr(cx: ctxt, t: vstore) -> t {
@ -2273,7 +2273,7 @@ pub fn type_contents(cx: ctxt, ty: t) -> TypeContents {
for type_param_def.bounds.builtin_bounds.each |bound| {
debug!("tc = %s, bound = %?", tc.to_str(), bound);
tc = tc - match bound {
BoundCopy => TypeContents::nonimplicitly_copyable(cx),
BoundCopy => TypeContents::noncopyable(cx),
BoundStatic => TypeContents::nonstatic(cx),
BoundOwned => TypeContents::nonowned(cx),
BoundConst => TypeContents::nonconst(cx),
@ -3694,7 +3694,7 @@ fn lookup_locally_or_in_crate_store<V:Copy>(
*/
match map.find(&def_id) {
Some(&v) => { return v; }
Some(&ref v) => { return copy *v; }
None => { }
}
@ -3702,8 +3702,8 @@ fn lookup_locally_or_in_crate_store<V:Copy>(
fail!("No def'n found for %? in tcx.%s", def_id, descr);
}
let v = load_external();
map.insert(def_id, v);
return v;
map.insert(def_id, copy v);
return copy v;
}
pub fn trait_method(cx: ctxt, trait_did: ast::def_id, idx: uint) -> @Method {

View file

@ -725,7 +725,7 @@ impl FnCtxt {
ty::re_scope(self.region_lb)
}
#[inline(always)]
#[inline]
pub fn write_ty(&self, node_id: ast::node_id, ty: ty::t) {
debug!("write_ty(%d, %s) in fcx %s",
node_id, ppaux::ty_to_str(self.tcx(), ty), self.tag());

View file

@ -10,7 +10,6 @@
use core::prelude::*;
use middle::resolve::Impl;
use middle::ty::param_ty;
use middle::ty;
use middle::typeck::check::{FnCtxt, impl_self_ty};
@ -27,7 +26,6 @@ use util::ppaux;
use core::hashmap::HashSet;
use core::result;
use core::uint;
use syntax::ast;
use syntax::ast_util;
use syntax::codemap::span;
@ -248,16 +246,9 @@ fn lookup_vtable(vcx: &VtableContext,
// Nothing found. Continue.
}
Some(implementations) => {
let len = { // FIXME(#5074): stage0 requires it
let implementations: &mut ~[@Impl] = *implementations;
implementations.len()
};
// implementations is the list of all impls in scope for
// trait_ref. (Usually, there's just one.)
for uint::range(0, len) |i| {
let im = implementations[i];
for implementations.iter().advance |im| {
// im is one specific impl of trait_ref.
// First, ensure we haven't processed this impl yet.

View file

@ -520,12 +520,8 @@ impl CoherenceChecker {
match extension_methods.find(&trait_def_id) {
Some(impls) => {
let len = { // FIXME(#5074) stage0 requires this
let impls: &mut ~[@Impl] = *impls;
impls.len()
};
for uint::range(0, len) |i| {
f(impls[i]);
for impls.iter().advance |&im| {
f(im);
}
}
None => { /* no impls? */ }

View file

@ -89,10 +89,10 @@ impl CombineFields {
// Need to make sub_id a subtype of sup_id.
let node_a = self.infcx.get(a_id);
let node_b = self.infcx.get(b_id);
let a_id = node_a.root;
let b_id = node_b.root;
let a_bounds = node_a.possible_types;
let b_bounds = node_b.possible_types;
let a_id = copy node_a.root;
let b_id = copy node_b.root;
let a_bounds = copy node_a.possible_types;
let b_bounds = copy node_b.possible_types;
debug!("vars(%s=%s <: %s=%s)",
a_id.to_str(), a_bounds.inf_str(self.infcx),
@ -102,8 +102,8 @@ impl CombineFields {
// If both A's UB and B's LB have already been bound to types,
// see if we can make those types subtypes.
match (a_bounds.ub, b_bounds.lb) {
(Some(ref a_ub), Some(ref b_lb)) => {
match (&a_bounds.ub, &b_bounds.lb) {
(&Some(ref a_ub), &Some(ref b_lb)) => {
let r = self.infcx.try(
|| LatticeValue::sub(self, a_ub, b_lb));
match r {
@ -138,9 +138,9 @@ impl CombineFields {
* Make a variable (`a_id`) a subtype of the concrete type `b` */
let node_a = self.infcx.get(a_id);
let a_id = node_a.root;
let a_id = copy node_a.root;
let a_bounds = &node_a.possible_types;
let b_bounds = &Bounds { lb: None, ub: Some(b) };
let b_bounds = &Bounds { lb: None, ub: Some(copy b) };
debug!("var_sub_t(%s=%s <: %s)",
a_id.to_str(),
@ -161,9 +161,9 @@ impl CombineFields {
*
* Make a concrete type (`a`) a subtype of the variable `b_id` */
let a_bounds = &Bounds { lb: Some(a), ub: None };
let a_bounds = &Bounds { lb: Some(copy a), ub: None };
let node_b = self.infcx.get(b_id);
let b_id = node_b.root;
let b_id = copy node_b.root;
let b_bounds = &node_b.possible_types;
debug!("t_sub_var(%s <: %s=%s)",
@ -190,11 +190,11 @@ impl CombineFields {
b.inf_str(self.infcx));
let _r = indenter();
match (*a, *b) {
(None, None) => Ok(None),
(Some(_), None) => Ok(*a),
(None, Some(_)) => Ok(*b),
(Some(ref v_a), Some(ref v_b)) => {
match (a, b) {
(&None, &None) => Ok(None),
(&Some(_), &None) => Ok(copy *a),
(&None, &Some(_)) => Ok(copy *b),
(&Some(ref v_a), &Some(ref v_b)) => {
do lattice_op(self, v_a, v_b).chain |v| {
Ok(Some(v))
}
@ -272,13 +272,13 @@ impl CombineFields {
b.inf_str(self.infcx));
let _r = indenter();
match (*a, *b) {
(None, None) |
(Some(_), None) |
(None, Some(_)) => {
match (a, b) {
(&None, &None) |
(&Some(_), &None) |
(&None, &Some(_)) => {
uok()
}
(Some(ref t_a), Some(ref t_b)) => {
(&Some(ref t_a), &Some(ref t_b)) => {
LatticeValue::sub(self, t_a, t_b)
}
}
@ -303,9 +303,9 @@ pub trait TyLatticeDir {
impl LatticeDir for Lub {
fn combine_fields(&self) -> CombineFields { **self }
fn bnd<T:Copy>(&self, b: &Bounds<T>) -> Option<T> { b.ub }
fn bnd<T:Copy>(&self, b: &Bounds<T>) -> Option<T> { copy b.ub }
fn with_bnd<T:Copy>(&self, b: &Bounds<T>, t: T) -> Bounds<T> {
Bounds { ub: Some(t), ..*b }
Bounds { ub: Some(t), ..copy *b }
}
}
@ -317,9 +317,9 @@ impl TyLatticeDir for Lub {
impl LatticeDir for Glb {
fn combine_fields(&self) -> CombineFields { **self }
fn bnd<T:Copy>(&self, b: &Bounds<T>) -> Option<T> { b.lb }
fn bnd<T:Copy>(&self, b: &Bounds<T>) -> Option<T> { copy b.lb }
fn with_bnd<T:Copy>(&self, b: &Bounds<T>, t: T) -> Bounds<T> {
Bounds { lb: Some(t), ..*b }
Bounds { lb: Some(t), ..copy *b }
}
}
@ -405,8 +405,8 @@ pub fn lattice_vars<L:LatticeDir + Combine,
-> cres<LatticeVarResult<V,T>> {
let nde_a = this.infcx().get(a_vid);
let nde_b = this.infcx().get(b_vid);
let a_vid = nde_a.root;
let b_vid = nde_b.root;
let a_vid = copy nde_a.root;
let b_vid = copy nde_b.root;
let a_bounds = &nde_a.possible_types;
let b_bounds = &nde_b.possible_types;
@ -436,8 +436,8 @@ pub fn lattice_vars<L:LatticeDir + Combine,
// Otherwise, we need to merge A and B into one variable. We can
// then use either variable as an upper bound:
let cf = this.combine_fields();
do cf.var_sub_var(a_vid, b_vid).then {
Ok(VarResult(a_vid))
do cf.var_sub_var(copy a_vid, copy b_vid).then {
Ok(VarResult(copy a_vid))
}
}
@ -450,7 +450,7 @@ pub fn lattice_var_and_t<L:LatticeDir + Combine,
lattice_dir_op: LatticeDirOp<T>)
-> cres<T> {
let nde_a = this.infcx().get(a_id);
let a_id = nde_a.root;
let a_id = copy nde_a.root;
let a_bounds = &nde_a.possible_types;
// The comments in this function are written for LUB, but they
@ -472,10 +472,11 @@ pub fn lattice_var_and_t<L:LatticeDir + Combine,
// If a does not have an upper bound, make b the upper bound of a
// and then return b.
debug!("bnd=None");
let a_bounds = this.with_bnd(a_bounds, *b);
let a_bounds = this.with_bnd(a_bounds, copy *b);
do this.combine_fields().bnds(&a_bounds.lb, &a_bounds.ub).then {
this.infcx().set(a_id, Root(a_bounds, nde_a.rank));
Ok(*b)
this.infcx().set(copy a_id,
Root(copy a_bounds, copy nde_a.rank));
Ok(copy *b)
}
}
}

View file

@ -504,9 +504,9 @@ trait CresCompare<T> {
impl<T:Copy + Eq> CresCompare<T> for cres<T> {
fn compare(&self, t: T, f: &fn() -> ty::type_err) -> cres<T> {
do self.chain |s| {
do (copy *self).chain |s| {
if s == t {
*self
copy *self
} else {
Err(f())
}

View file

@ -61,7 +61,7 @@ impl InferCtxt {
{
let vid_u = vid.to_uint();
let var_val = match vb.vals.find(&vid_u) {
Some(&var_val) => var_val,
Some(&ref var_val) => copy *var_val,
None => {
tcx.sess.bug(fmt!(
"failed lookup of vid `%u`", vid_u));
@ -69,11 +69,11 @@ impl InferCtxt {
};
match var_val {
Redirect(vid) => {
let node: Node<V,T> = helper(tcx, vb, vid);
let node: Node<V,T> = helper(tcx, vb, copy vid);
if node.root != vid {
// Path compression
vb.vals.insert(vid.to_uint(),
Redirect(node.root));
Redirect(copy node.root));
}
node
}
@ -96,12 +96,10 @@ impl InferCtxt {
debug!("Updating variable %s to %s",
vid.to_str(), new_v.inf_str(self));
{ // FIXME(#4903)---borrow checker is not flow sensitive
let vb = UnifyVid::appropriate_vals_and_bindings(self);
let old_v = { *vb.vals.get(&vid.to_uint()) }; // FIXME(#4903)
vb.bindings.push((vid, old_v));
vb.vals.insert(vid.to_uint(), new_v);
}
let vb = UnifyVid::appropriate_vals_and_bindings(self);
let old_v = copy *vb.vals.get(&vid.to_uint());
vb.bindings.push((copy vid, old_v));
vb.vals.insert(vid.to_uint(), new_v);
}
pub fn unify<T:Copy + InferStr,
@ -120,18 +118,18 @@ impl InferCtxt {
if node_a.rank > node_b.rank {
// a has greater rank, so a should become b's parent,
// i.e., b should redirect to a.
self.set(node_b.root, Redirect(node_a.root));
(node_a.root, node_a.rank)
self.set(copy node_b.root, Redirect(copy node_a.root));
(copy node_a.root, node_a.rank)
} else if node_a.rank < node_b.rank {
// b has greater rank, so a should redirect to b.
self.set(node_a.root, Redirect(node_b.root));
(node_b.root, node_b.rank)
self.set(copy node_a.root, Redirect(copy node_b.root));
(copy node_b.root, node_b.rank)
} else {
// If equal, redirect one to the other and increment the
// other's rank.
assert_eq!(node_a.rank, node_b.rank);
self.set(node_b.root, Redirect(node_a.root));
(node_a.root, node_a.rank + 1)
self.set(copy node_b.root, Redirect(copy node_a.root));
(copy node_a.root, node_a.rank + 1)
}
}
@ -174,20 +172,20 @@ impl InferCtxt {
let node_a = self.get(a_id);
let node_b = self.get(b_id);
let a_id = node_a.root;
let b_id = node_b.root;
let a_id = copy node_a.root;
let b_id = copy node_b.root;
if a_id == b_id { return uok(); }
let combined = match (&node_a.possible_types, &node_b.possible_types)
{
(&None, &None) => None,
(&Some(ref v), &None) | (&None, &Some(ref v)) => Some(*v),
(&Some(ref v), &None) | (&None, &Some(ref v)) => Some(copy *v),
(&Some(ref v1), &Some(ref v2)) => {
if *v1 != *v2 {
return mk_err(a_is_expected, *v1, *v2);
return mk_err(a_is_expected, copy *v1, copy *v2);
}
Some(*v1)
Some(copy *v1)
}
};
@ -211,7 +209,7 @@ impl InferCtxt {
* `b`. */
let node_a = self.get(a_id);
let a_id = node_a.root;
let a_id = copy node_a.root;
match node_a.possible_types {
None => {
@ -223,7 +221,7 @@ impl InferCtxt {
if *a_t == b {
return uok();
} else {
return mk_err(a_is_expected, *a_t, b);
return mk_err(a_is_expected, copy *a_t, b);
}
}
}

View file

@ -28,16 +28,8 @@ extern mod core(name = "std");
extern mod extra(name = "extra");
extern mod syntax;
// For deriving(Encodable) purposes...
#[cfg(stage0)]
extern mod std(name = "extra", vers = "0.7-pre");
#[cfg(not(stage0))]
extern mod std(name = "std", vers = "0.7-pre");
// For bootstrapping purposes.
#[cfg(stage0)]
pub use core::unstable;
use core::prelude::*;
use driver::driver::{host_triple, optgroups, early_error};

View file

@ -32,8 +32,6 @@ use syntax::ast;
use syntax::ast_map;
use syntax;
#[cfg(test)] use core::vec;
pub struct Ctxt {
ast: @ast::crate,
ast_map: ast_map::map

View file

@ -1,4 +1,4 @@
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
@ -152,6 +152,6 @@ mod test {
fn should_concatenate_multiple_doc_comments() {
let source = @"/// foo\n/// bar";
let desc = parse_desc(parse_attributes(source));
assert!(desc == Some(~"foo\nbar"));
assert!(desc == Some(~" foo\n bar"));
}
}

View file

@ -24,7 +24,6 @@ use fold::Fold;
use fold;
use pass::Pass;
use core::str;
use core::util;
pub fn mk_pass() -> Pass {
@ -129,25 +128,21 @@ fn first_sentence_(s: &str) -> ~str {
}
});
match idx {
Some(idx) if idx > 2u => {
str::to_owned(s.slice(0, idx - 1))
}
Some(idx) if idx > 2u => s.slice(0, idx - 1).to_owned(),
_ => {
if s.ends_with(".") {
str::to_owned(s)
s.to_owned()
} else {
str::to_owned(s)
s.to_owned()
}
}
}
}
pub fn paragraphs(s: &str) -> ~[~str] {
let mut lines = ~[];
for str::each_line_any(s) |line| { lines.push(line.to_owned()); }
let mut whitespace_lines = 0;
let mut accum = ~"";
let paras = do lines.iter().fold(~[]) |paras, line| {
let paras = do s.any_line_iter().fold(~[]) |paras, line| {
let mut res = paras;
if line.is_whitespace() {
@ -163,9 +158,9 @@ pub fn paragraphs(s: &str) -> ~[~str] {
whitespace_lines = 0;
accum = if accum.is_empty() {
copy *line
line.to_owned()
} else {
accum + "\n" + *line
fmt!("%s\n%s", accum, line)
}
}

View file

@ -285,8 +285,6 @@ mod test {
use extract::{extract, from_srv};
use parse;
use core::vec;
fn mk_doc(source: @str) -> doc::Doc {
let ast = parse::from_str(source);
extract(ast, ~"")

View file

@ -466,10 +466,7 @@ fn write_variant(ctxt: &Ctxt, doc: doc::VariantDoc) {
}
fn list_item_indent(item: &str) -> ~str {
let mut indented = ~[];
for str::each_line_any(item) |line| {
indented.push(line);
}
let indented = item.any_line_iter().collect::<~[&str]>();
// separate markdown elements within `*` lists must be indented by four
// spaces, or they will escape the list context. indenting everything
@ -539,8 +536,6 @@ mod test {
use tystr_pass;
use unindent_pass;
use core::str;
fn render(source: ~str) -> ~str {
let (srv, doc) = create_doc_srv(source);
let markdown = write_markdown_str_srv(srv, doc);

View file

@ -157,7 +157,6 @@ mod test {
use doc;
use extract;
use page_pass::run;
use core::vec;
fn mk_doc_(
output_style: config::OutputStyle,

View file

@ -77,8 +77,6 @@ mod test {
#[test]
fn should_prune_hidden_items() {
use core::vec;
let doc = mk_doc(~"#[doc(hidden)] mod a { }");
assert!(doc.cratemod().mods().is_empty())
}

View file

@ -162,7 +162,6 @@ mod test {
use extract;
use tystr_pass;
use prune_private_pass::run;
use core::vec;
fn mk_doc(source: ~str) -> doc::Doc {
do astsrv::from_str(copy source) |srv| {

View file

@ -19,7 +19,7 @@ use fold::Fold;
use fold;
use pass::Pass;
use core::str;
use core::iterator::IteratorUtil;
pub fn mk_pass() -> Pass {
Pass {
@ -104,21 +104,19 @@ fn sectionalize(desc: Option<~str>) -> (Option<~str>, ~[doc::Section]) {
if desc.is_none() {
return (None, ~[]);
}
let mut lines = ~[];
for str::each_line_any(*desc.get_ref()) |line| { lines.push(line.to_owned()); }
let mut new_desc = None::<~str>;
let mut current_section = None;
let mut sections = ~[];
for lines.each |line| {
match parse_header(copy *line) {
for desc.get_ref().any_line_iter().advance |line| {
match parse_header(line) {
Some(header) => {
if current_section.is_some() {
sections += [copy *current_section.get_ref()];
sections.push(copy *current_section.get_ref());
}
current_section = Some(doc::Section {
header: header,
header: header.to_owned(),
body: ~""
});
}
@ -126,17 +124,17 @@ fn sectionalize(desc: Option<~str>) -> (Option<~str>, ~[doc::Section]) {
match copy current_section {
Some(section) => {
current_section = Some(doc::Section {
body: section.body + "\n" + *line,
body: fmt!("%s\n%s", section.body, line),
.. section
});
}
None => {
new_desc = match copy new_desc {
Some(desc) => {
Some(desc + "\n" + *line)
Some(fmt!("%s\n%s", desc, line))
}
None => {
Some(copy *line)
Some(line.to_owned())
}
};
}
@ -146,15 +144,15 @@ fn sectionalize(desc: Option<~str>) -> (Option<~str>, ~[doc::Section]) {
}
if current_section.is_some() {
sections += [current_section.get()];
sections.push(current_section.unwrap());
}
(new_desc, sections)
}
fn parse_header(line: ~str) -> Option<~str> {
fn parse_header<'a>(line: &'a str) -> Option<&'a str> {
if line.starts_with("# ") {
Some(line.slice(2u, line.len()).to_owned())
Some(line.slice_from(2))
} else {
None
}
@ -172,9 +170,6 @@ mod test {
use extract;
use sectionalize_pass::run;
use core::str;
use core::vec;
fn mk_doc(source: ~str) -> doc::Doc {
do astsrv::from_str(copy source) |srv| {
let doc = extract::from_srv(srv.clone(), ~"");

View file

@ -149,8 +149,6 @@ mod test {
use sectionalize_pass;
use text_pass::mk_pass;
use core::str;
fn mk_doc(source: ~str) -> doc::Doc {
do astsrv::from_str(copy source) |srv| {
let doc = extract::from_srv(srv.clone(), ~"");

View file

@ -21,7 +21,6 @@ middle of a line, and each of the following lines is indented.
use core::prelude::*;
use core::str;
use core::uint;
use pass::Pass;
use text_pass;
@ -31,8 +30,7 @@ pub fn mk_pass() -> Pass {
}
fn unindent(s: &str) -> ~str {
let mut lines = ~[];
for str::each_line_any(s) |line| { lines.push(line.to_owned()); }
let lines = s.any_line_iter().collect::<~[&str]>();
let mut saw_first_line = false;
let mut saw_second_line = false;
let min_indent = do lines.iter().fold(uint::max_value)
@ -76,19 +74,20 @@ fn unindent(s: &str) -> ~str {
}
};
if !lines.is_empty() {
let unindented = ~[lines.head().trim().to_owned()]
+ do lines.tail().map |line| {
if line.is_whitespace() {
copy *line
} else {
assert!(line.len() >= min_indent);
line.slice(min_indent, line.len()).to_owned()
}
};
unindented.connect("\n")
} else {
s.to_str()
match lines {
[head, .. tail] => {
let mut unindented = ~[ head.trim() ];
unindented.push_all(do tail.map |&line| {
if line.is_whitespace() {
line
} else {
assert!(line.len() >= min_indent);
line.slice_from(min_indent)
}
});
unindented.connect("\n")
}
[] => s.to_owned()
}
}

View file

@ -370,7 +370,7 @@ fn run_cmd(repl: &mut Repl, _in: @io::Reader, _out: @io::Writer,
if arg.ends_with(".rs") || arg.ends_with(".rc") {
(arg.slice_to(arg.len() - 3).to_owned(), copy *arg)
} else {
(copy *arg, arg + ".rs")
(copy *arg, *arg + ".rs")
};
match compile_crate(filename, copy repl.binary) {
Some(_) => loaded_crates.push(crate),
@ -537,6 +537,9 @@ mod tests {
}
#[test]
// FIXME: #7220 rusti on 32bit mac doesn't work.
#[cfg(not(target_word_size="32",
target_os="macos"))]
fn run_all() {
// FIXME(#7071):
// By default, unit tests are run in parallel. Rusti, on the other hand,
@ -648,6 +651,9 @@ mod tests {
}
#[test]
// FIXME: #7220 rusti on 32bit mac doesn't work.
#[cfg(not(target_word_size="32",
target_os="macos"))]
fn exit_quits() {
let mut r = repl();
assert!(r.running);

View file

@ -309,7 +309,7 @@ fn frob_source_file(workspace: &Path, pkgid: &PkgId) {
}
}
#[test]
#[test] #[ignore] //FIXME(#7249)
fn test_all() {
// FIXME(#7071): these tests use rustc, so they can't be run in parallel
// until this issue is resolved

View file

@ -23,22 +23,8 @@ use vec;
/// Code for dealing with @-vectors. This is pretty incomplete, and
/// contains a bunch of duplication from the code for ~-vectors.
pub mod rustrt {
use libc;
use sys;
use vec;
#[abi = "cdecl"]
#[link_name = "rustrt"]
pub extern {
pub unsafe fn vec_reserve_shared_actual(t: *sys::TypeDesc,
v: **vec::raw::VecRepr,
n: libc::size_t);
}
}
/// Returns the number of elements the vector can hold without reallocating
#[inline(always)]
#[inline]
pub fn capacity<T>(v: @[T]) -> uint {
unsafe {
let repr: **raw::VecRepr = transmute(&v);
@ -58,7 +44,7 @@ pub fn capacity<T>(v: @[T]) -> uint {
* as an argument a function that will push an element
* onto the vector being constructed.
*/
#[inline(always)]
#[inline]
pub fn build_sized<A>(size: uint, builder: &fn(push: &fn(v: A))) -> @[A] {
let mut vec: @[A] = @[];
unsafe { raw::reserve(&mut vec, size); }
@ -76,7 +62,7 @@ pub fn build_sized<A>(size: uint, builder: &fn(push: &fn(v: A))) -> @[A] {
* as an argument a function that will push an element
* onto the vector being constructed.
*/
#[inline(always)]
#[inline]
pub fn build<A>(builder: &fn(push: &fn(v: A))) -> @[A] {
build_sized(4, builder)
}
@ -93,7 +79,7 @@ pub fn build<A>(builder: &fn(push: &fn(v: A))) -> @[A] {
* as an argument a function that will push an element
* onto the vector being constructed.
*/
#[inline(always)]
#[inline]
pub fn build_sized_opt<A>(size: Option<uint>,
builder: &fn(push: &fn(v: A)))
-> @[A] {
@ -104,11 +90,11 @@ pub fn build_sized_opt<A>(size: Option<uint>,
/// Iterates over the `rhs` vector, copying each element and appending it to the
/// `lhs`. Afterwards, the `lhs` is then returned for use again.
#[inline(always)]
#[inline]
pub fn append<T:Copy>(lhs: @[T], rhs: &const [T]) -> @[T] {
do build_sized(lhs.len() + rhs.len()) |push| {
for lhs.each |x| { push(*x); }
for uint::range(0, rhs.len()) |i| { push(rhs[i]); }
for lhs.each |x| { push(copy *x); }
for uint::range(0, rhs.len()) |i| { push(copy rhs[i]); }
}
}
@ -168,7 +154,7 @@ pub fn to_managed_consume<T>(v: ~[T]) -> @[T] {
* elements of a slice.
*/
pub fn to_managed<T:Copy>(v: &[T]) -> @[T] {
from_fn(v.len(), |i| v[i])
from_fn(v.len(), |i| copy v[i])
}
#[cfg(not(test))]
@ -178,7 +164,7 @@ pub mod traits {
use ops::Add;
impl<'self,T:Copy> Add<&'self const [T],@[T]> for @[T] {
#[inline(always)]
#[inline]
fn add(&self, rhs: & &'self const [T]) -> @[T] {
append(*self, (*rhs))
}
@ -189,7 +175,7 @@ pub mod traits {
pub mod traits {}
pub mod raw {
use at_vec::{capacity, rustrt};
use at_vec::capacity;
use cast::{transmute, transmute_copy};
use libc;
use ptr;
@ -197,6 +183,8 @@ pub mod raw {
use uint;
use unstable::intrinsics::{move_val_init};
use vec;
use vec::UnboxedVecRepr;
use sys::TypeDesc;
pub type VecRepr = vec::raw::VecRepr;
pub type SliceRepr = vec::raw::SliceRepr;
@ -208,7 +196,7 @@ pub mod raw {
* modifing its buffers, so it is up to the caller to ensure that
* the vector is actually the specified size.
*/
#[inline(always)]
#[inline]
pub unsafe fn set_len<T>(v: @[T], new_len: uint) {
let repr: **mut VecRepr = transmute(&v);
(**repr).unboxed.fill = new_len * sys::size_of::<T>();
@ -217,7 +205,7 @@ pub mod raw {
/**
* Pushes a new value onto this vector.
*/
#[inline(always)]
#[inline]
pub unsafe fn push<T>(v: &mut @[T], initval: T) {
let repr: **VecRepr = transmute_copy(&v);
let fill = (**repr).unboxed.fill;
@ -228,7 +216,7 @@ pub mod raw {
}
}
#[inline(always)] // really pretty please
#[inline] // really pretty please
unsafe fn push_fast<T>(v: &mut @[T], initval: T) {
let repr: **mut VecRepr = ::cast::transmute(v);
let fill = (**repr).unboxed.fill;
@ -257,9 +245,47 @@ pub mod raw {
pub unsafe fn reserve<T>(v: &mut @[T], n: uint) {
// Only make the (slow) call into the runtime if we have to
if capacity(*v) < n {
let ptr: **VecRepr = transmute(v);
rustrt::vec_reserve_shared_actual(sys::get_type_desc::<T>(),
ptr, n as libc::size_t);
let ptr: *mut *mut VecRepr = transmute(v);
let ty = sys::get_type_desc::<T>();
return reserve_raw(ty, ptr, n);
}
}
// Implementation detail. Shouldn't be public
#[allow(missing_doc)]
pub fn reserve_raw(ty: *TypeDesc, ptr: *mut *mut VecRepr, n: uint) {
unsafe {
let size_in_bytes = n * (*ty).size;
if size_in_bytes > (**ptr).unboxed.alloc {
let total_size = size_in_bytes + sys::size_of::<UnboxedVecRepr>();
// XXX: UnboxedVecRepr has an extra u8 at the end
let total_size = total_size - sys::size_of::<u8>();
(*ptr) = local_realloc(*ptr as *(), total_size) as *mut VecRepr;
(**ptr).unboxed.alloc = size_in_bytes;
}
}
fn local_realloc(ptr: *(), size: uint) -> *() {
use rt;
use rt::OldTaskContext;
use rt::local::Local;
use rt::task::Task;
if rt::context() == OldTaskContext {
unsafe {
return rust_local_realloc(ptr, size as libc::size_t);
}
extern {
#[fast_ffi]
fn rust_local_realloc(ptr: *(), size: libc::size_t) -> *();
}
} else {
do Local::borrow::<Task, *()> |task| {
task.heap.realloc(ptr as *libc::c_void, size) as *()
}
}
}
}

View file

@ -212,7 +212,7 @@ impl FromStr for bool {
* ~~~
*/
impl ToStr for bool {
#[inline(always)]
#[inline]
fn to_str(&self) -> ~str {
if *self { ~"true" } else { ~"false" }
}
@ -250,24 +250,24 @@ pub fn all_values(blk: &fn(v: bool)) {
* 0
* ~~~
*/
#[inline(always)]
#[inline]
pub fn to_bit(v: bool) -> u8 { if v { 1u8 } else { 0u8 } }
#[cfg(not(test))]
impl Ord for bool {
#[inline(always)]
#[inline]
fn lt(&self, other: &bool) -> bool { to_bit(*self) < to_bit(*other) }
#[inline(always)]
#[inline]
fn le(&self, other: &bool) -> bool { to_bit(*self) <= to_bit(*other) }
#[inline(always)]
#[inline]
fn gt(&self, other: &bool) -> bool { to_bit(*self) > to_bit(*other) }
#[inline(always)]
#[inline]
fn ge(&self, other: &bool) -> bool { to_bit(*self) >= to_bit(*other) }
}
#[cfg(not(test))]
impl TotalOrd for bool {
#[inline(always)]
#[inline]
fn cmp(&self, other: &bool) -> Ordering { to_bit(*self).cmp(&to_bit(*other)) }
}
@ -298,9 +298,9 @@ impl TotalOrd for bool {
*/
#[cfg(not(test))]
impl Eq for bool {
#[inline(always)]
#[inline]
fn eq(&self, other: &bool) -> bool { (*self) == (*other) }
#[inline(always)]
#[inline]
fn ne(&self, other: &bool) -> bool { (*self) != (*other) }
}

View file

@ -14,13 +14,13 @@
use prelude::*;
/// Cast a region pointer - &T - to a uint.
#[inline(always)]
#[inline]
pub fn to_uint<T>(thing: &T) -> uint {
thing as *T as uint
}
/// Determine if two borrowed pointers point to the same thing.
#[inline(always)]
#[inline]
pub fn ref_eq<'a, 'b, T>(thing: &'a T, other: &'b T) -> bool {
to_uint(thing) == to_uint(other)
}
@ -28,11 +28,11 @@ pub fn ref_eq<'a, 'b, T>(thing: &'a T, other: &'b T) -> bool {
// Equality for region pointers
#[cfg(not(test))]
impl<'self, T: Eq> Eq for &'self T {
#[inline(always)]
#[inline]
fn eq(&self, other: & &'self T) -> bool {
*(*self) == *(*other)
}
#[inline(always)]
#[inline]
fn ne(&self, other: & &'self T) -> bool {
*(*self) != *(*other)
}
@ -41,19 +41,19 @@ impl<'self, T: Eq> Eq for &'self T {
// Comparison for region pointers
#[cfg(not(test))]
impl<'self, T: Ord> Ord for &'self T {
#[inline(always)]
#[inline]
fn lt(&self, other: & &'self T) -> bool {
*(*self) < *(*other)
}
#[inline(always)]
#[inline]
fn le(&self, other: & &'self T) -> bool {
*(*self) <= *(*other)
}
#[inline(always)]
#[inline]
fn ge(&self, other: & &'self T) -> bool {
*(*self) >= *(*other)
}
#[inline(always)]
#[inline]
fn gt(&self, other: & &'self T) -> bool {
*(*self) > *(*other)
}

View file

@ -14,22 +14,9 @@ use sys;
use unstable::intrinsics;
/// Casts the value at `src` to U. The two types must have the same length.
#[cfg(stage0)]
pub unsafe fn transmute_copy<T, U>(src: &T) -> U {
let mut dest: U = intrinsics::uninit();
{
let dest_ptr: *mut u8 = transmute(&mut dest);
let src_ptr: *u8 = transmute(src);
intrinsics::memmove64(dest_ptr,
src_ptr,
sys::size_of::<U>() as u64);
}
dest
}
/// Casts the value at `src` to U. The two types must have the same length.
#[cfg(target_word_size = "32", not(stage0))]
#[inline(always)]
#[cfg(target_word_size = "32")]
#[inline]
pub unsafe fn transmute_copy<T, U>(src: &T) -> U {
let mut dest: U = intrinsics::uninit();
let dest_ptr: *mut u8 = transmute(&mut dest);
@ -39,8 +26,8 @@ pub unsafe fn transmute_copy<T, U>(src: &T) -> U {
}
/// Casts the value at `src` to U. The two types must have the same length.
#[cfg(target_word_size = "64", not(stage0))]
#[inline(always)]
#[cfg(target_word_size = "64")]
#[inline]
pub unsafe fn transmute_copy<T, U>(src: &T) -> U {
let mut dest: U = intrinsics::uninit();
let dest_ptr: *mut u8 = transmute(&mut dest);
@ -54,19 +41,16 @@ pub unsafe fn transmute_copy<T, U>(src: &T) -> U {
*
* The forget function will take ownership of the provided value but neglect
* to run any required cleanup or memory-management operations on it. This
* can be used for various acts of magick, particularly when using
* reinterpret_cast on pointer types.
* can be used for various acts of magick.
*/
#[inline(always)]
#[inline]
pub unsafe fn forget<T>(thing: T) { intrinsics::forget(thing); }
/**
* Force-increment the reference count on a shared box. If used
* carelessly, this can leak the box. Use this in conjunction with transmute
* and/or reinterpret_cast when such calls would otherwise scramble a box's
* reference count
* carelessly, this can leak the box.
*/
#[inline(always)]
#[inline]
pub unsafe fn bump_box_refcount<T>(t: @T) { forget(t); }
/**
@ -77,59 +61,59 @@ pub unsafe fn bump_box_refcount<T>(t: @T) { forget(t); }
*
* assert!(transmute("L") == ~[76u8, 0u8]);
*/
#[inline(always)]
#[inline]
pub unsafe fn transmute<L, G>(thing: L) -> G {
intrinsics::transmute(thing)
}
/// Coerce an immutable reference to be mutable.
#[inline(always)]
#[inline]
pub unsafe fn transmute_mut<'a,T>(ptr: &'a T) -> &'a mut T { transmute(ptr) }
/// Coerce a mutable reference to be immutable.
#[inline(always)]
#[inline]
pub unsafe fn transmute_immut<'a,T>(ptr: &'a mut T) -> &'a T {
transmute(ptr)
}
/// Coerce a borrowed pointer to have an arbitrary associated region.
#[inline(always)]
#[inline]
pub unsafe fn transmute_region<'a,'b,T>(ptr: &'a T) -> &'b T {
transmute(ptr)
}
/// Coerce an immutable reference to be mutable.
#[inline(always)]
#[inline]
pub unsafe fn transmute_mut_unsafe<T>(ptr: *const T) -> *mut T {
transmute(ptr)
}
/// Coerce an immutable reference to be mutable.
#[inline(always)]
#[inline]
pub unsafe fn transmute_immut_unsafe<T>(ptr: *const T) -> *T {
transmute(ptr)
}
/// Coerce a borrowed mutable pointer to have an arbitrary associated region.
#[inline(always)]
#[inline]
pub unsafe fn transmute_mut_region<'a,'b,T>(ptr: &'a mut T) -> &'b mut T {
transmute(ptr)
}
/// Transforms lifetime of the second pointer to match the first.
#[inline(always)]
#[inline]
pub unsafe fn copy_lifetime<'a,S,T>(_ptr: &'a S, ptr: &T) -> &'a T {
transmute_region(ptr)
}
/// Transforms lifetime of the second pointer to match the first.
#[inline(always)]
#[inline]
pub unsafe fn copy_mut_lifetime<'a,S,T>(_ptr: &'a mut S, ptr: &mut T) -> &'a mut T {
transmute_mut_region(ptr)
}
/// Transforms lifetime of the second pointer to match the first.
#[inline(always)]
#[inline]
pub unsafe fn copy_lifetime_vec<'a,S,T>(_ptr: &'a [S], ptr: &T) -> &'a T {
transmute_region(ptr)
}

View file

@ -17,8 +17,8 @@ use u32;
use uint;
use unicode::{derived_property, general_category};
#[cfg(not(test))]
use cmp::{Eq, Ord};
#[cfg(not(test))] use cmp::{Eq, Ord};
#[cfg(not(test))] use num::Zero;
/*
Lu Uppercase_Letter an uppercase letter
@ -65,14 +65,14 @@ pub fn is_XID_continue(c: char) -> bool { derived_property::XID_Continue(c) }
/// Indicates whether a character is in lower case, defined
/// in terms of the Unicode General Category 'Ll'
///
#[inline(always)]
#[inline]
pub fn is_lowercase(c: char) -> bool { general_category::Ll(c) }
///
/// Indicates whether a character is in upper case, defined
/// in terms of the Unicode General Category 'Lu'.
///
#[inline(always)]
#[inline]
pub fn is_uppercase(c: char) -> bool { general_category::Lu(c) }
///
@ -80,7 +80,7 @@ pub fn is_uppercase(c: char) -> bool { general_category::Lu(c) }
/// terms of the Unicode General Categories 'Zs', 'Zl', 'Zp'
/// additional 'Cc'-category control codes in the range [0x09, 0x0d]
///
#[inline(always)]
#[inline]
pub fn is_whitespace(c: char) -> bool {
('\x09' <= c && c <= '\x0d')
|| general_category::Zs(c)
@ -93,7 +93,7 @@ pub fn is_whitespace(c: char) -> bool {
/// defined in terms of the Unicode General Categories 'Nd', 'Nl', 'No'
/// and the Derived Core Property 'Alphabetic'.
///
#[inline(always)]
#[inline]
pub fn is_alphanumeric(c: char) -> bool {
derived_property::Alphabetic(c)
|| general_category::Nd(c)
@ -102,7 +102,7 @@ pub fn is_alphanumeric(c: char) -> bool {
}
/// Indicates whether the character is numeric (Nd, Nl, or No)
#[inline(always)]
#[inline]
pub fn is_digit(c: char) -> bool {
general_category::Nd(c)
|| general_category::Nl(c)
@ -127,7 +127,7 @@ pub fn is_digit(c: char) -> bool {
///
/// This just wraps `to_digit()`.
///
#[inline(always)]
#[inline]
pub fn is_digit_radix(c: char, radix: uint) -> bool {
match to_digit(c, radix) {
Some(_) => true,
@ -310,24 +310,30 @@ impl Char for char {
#[cfg(not(test))]
impl Eq for char {
#[inline(always)]
#[inline]
fn eq(&self, other: &char) -> bool { (*self) == (*other) }
#[inline(always)]
#[inline]
fn ne(&self, other: &char) -> bool { (*self) != (*other) }
}
#[cfg(not(test))]
impl Ord for char {
#[inline(always)]
#[inline]
fn lt(&self, other: &char) -> bool { *self < *other }
#[inline(always)]
#[inline]
fn le(&self, other: &char) -> bool { *self <= *other }
#[inline(always)]
#[inline]
fn gt(&self, other: &char) -> bool { *self > *other }
#[inline(always)]
#[inline]
fn ge(&self, other: &char) -> bool { *self >= *other }
}
#[cfg(not(test))]
impl Zero for char {
fn zero() -> char { 0 as char }
fn is_zero(&self) -> bool { *self == 0 as char }
}
#[test]
fn test_is_lowercase() {
assert!('a'.is_lowercase());

View file

@ -13,107 +13,14 @@
use libc::{c_char, c_void, intptr_t, uintptr_t};
use ptr::mut_null;
use repr::BoxRepr;
use rt;
use rt::OldTaskContext;
use sys::TypeDesc;
use cast::transmute;
#[cfg(not(test))] use unstable::lang::clear_task_borrow_list;
#[cfg(not(test))] use ptr::to_unsafe_ptr;
/**
* Runtime structures
*
* NB: These must match the representation in the C++ runtime.
*/
type DropGlue<'self> = &'self fn(**TypeDesc, *c_void);
type FreeGlue<'self> = &'self fn(**TypeDesc, *c_void);
type TaskID = uintptr_t;
struct StackSegment { priv opaque: () }
struct Scheduler { priv opaque: () }
struct SchedulerLoop { priv opaque: () }
struct Kernel { priv opaque: () }
struct Env { priv opaque: () }
struct AllocHeader { priv opaque: () }
struct MemoryRegion { priv opaque: () }
#[cfg(target_arch="x86")]
struct Registers {
data: [u32, ..16]
}
#[cfg(target_arch="arm")]
#[cfg(target_arch="mips")]
struct Registers {
data: [u32, ..32]
}
#[cfg(target_arch="x86")]
#[cfg(target_arch="arm")]
#[cfg(target_arch="mips")]
struct Context {
regs: Registers,
next: *Context,
pad: [u32, ..3]
}
#[cfg(target_arch="x86_64")]
struct Registers {
data: [u64, ..22]
}
#[cfg(target_arch="x86_64")]
struct Context {
regs: Registers,
next: *Context,
pad: uintptr_t
}
struct BoxedRegion {
env: *Env,
backing_region: *MemoryRegion,
live_allocs: *BoxRepr
}
#[cfg(target_arch="x86")]
#[cfg(target_arch="arm")]
#[cfg(target_arch="mips")]
struct Task {
// Public fields
refcount: intptr_t, // 0
id: TaskID, // 4
pad: [u32, ..2], // 8
ctx: Context, // 16
stack_segment: *StackSegment, // 96
runtime_sp: uintptr_t, // 100
scheduler: *Scheduler, // 104
scheduler_loop: *SchedulerLoop, // 108
// Fields known only to the runtime
kernel: *Kernel, // 112
name: *c_char, // 116
list_index: i32, // 120
boxed_region: BoxedRegion // 128
}
#[cfg(target_arch="x86_64")]
struct Task {
// Public fields
refcount: intptr_t,
id: TaskID,
ctx: Context,
stack_segment: *StackSegment,
runtime_sp: uintptr_t,
scheduler: *Scheduler,
scheduler_loop: *SchedulerLoop,
// Fields known only to the runtime
kernel: *Kernel,
name: *c_char,
list_index: i32,
boxed_region: BoxedRegion
}
/*
* Box annihilation
@ -132,9 +39,9 @@ unsafe fn each_live_alloc(read_next_before: bool,
//! Walks the internal list of allocations
use managed;
use rt::local_heap;
let task: *Task = transmute(rustrt::rust_get_task());
let box = (*task).boxed_region.live_allocs;
let box = local_heap::live_allocs();
let mut box: *mut BoxRepr = transmute(copy box);
while box != mut_null() {
let next_before = transmute(copy (*box).header.next);
@ -156,7 +63,11 @@ unsafe fn each_live_alloc(read_next_before: bool,
#[cfg(unix)]
fn debug_mem() -> bool {
::rt::env::get().debug_mem
// XXX: Need to port the environment struct to newsched
match rt::context() {
OldTaskContext => ::rt::env::get().debug_mem,
_ => false
}
}
#[cfg(windows)]
@ -165,13 +76,12 @@ fn debug_mem() -> bool {
}
/// Destroys all managed memory (i.e. @ boxes) held by the current task.
#[cfg(not(test))]
#[lang="annihilate"]
pub unsafe fn annihilate() {
use unstable::lang::local_free;
use rt::local_heap::local_free;
use io::WriterUtil;
use io;
use libc;
use rt::borrowck;
use sys;
use managed;
@ -183,7 +93,7 @@ pub unsafe fn annihilate() {
// Quick hack: we need to free this list upon task exit, and this
// is a convenient place to do it.
clear_task_borrow_list();
borrowck::clear_task_borrow_list();
// Pass 1: Make all boxes immortal.
//
@ -207,7 +117,7 @@ pub unsafe fn annihilate() {
if !uniq {
let tydesc: *TypeDesc = transmute(copy (*box).header.type_desc);
let drop_glue: DropGlue = transmute(((*tydesc).drop_glue, 0));
drop_glue(to_unsafe_ptr(&tydesc), transmute(&(*box).data));
drop_glue(&tydesc, transmute(&(*box).data));
}
}

View file

@ -34,25 +34,25 @@ pub trait Clone {
impl<T: Clone> Clone for ~T {
/// Return a deep copy of the owned box.
#[inline(always)]
#[inline]
fn clone(&self) -> ~T { ~(**self).clone() }
}
impl<T> Clone for @T {
/// Return a shallow copy of the managed box.
#[inline(always)]
#[inline]
fn clone(&self) -> @T { *self }
}
impl<T> Clone for @mut T {
/// Return a shallow copy of the managed box.
#[inline(always)]
#[inline]
fn clone(&self) -> @mut T { *self }
}
impl<'self, T> Clone for &'self T {
/// Return a shallow copy of the borrowed pointer.
#[inline(always)]
#[inline]
fn clone(&self) -> &'self T { *self }
}
@ -60,7 +60,7 @@ macro_rules! clone_impl(
($t:ty) => {
impl Clone for $t {
/// Return a deep copy of the value.
#[inline(always)]
#[inline]
fn clone(&self) -> $t { *self }
}
}
@ -96,7 +96,7 @@ pub trait DeepClone {
impl<T: DeepClone> DeepClone for ~T {
/// Return a deep copy of the owned box.
#[inline(always)]
#[inline]
fn deep_clone(&self) -> ~T { ~(**self).deep_clone() }
}
@ -104,7 +104,7 @@ impl<T: DeepClone> DeepClone for ~T {
impl<T: Const + DeepClone> DeepClone for @T {
/// Return a deep copy of the managed box. The `Const` trait is required to prevent performing
/// a deep clone of a potentially cyclical type.
#[inline(always)]
#[inline]
fn deep_clone(&self) -> @T { @(**self).deep_clone() }
}
@ -112,7 +112,7 @@ impl<T: Const + DeepClone> DeepClone for @T {
impl<T: Const + DeepClone> DeepClone for @mut T {
/// Return a deep copy of the managed box. The `Const` trait is required to prevent performing
/// a deep clone of a potentially cyclical type.
#[inline(always)]
#[inline]
fn deep_clone(&self) -> @mut T { @mut (**self).deep_clone() }
}
@ -120,7 +120,7 @@ macro_rules! deep_clone_impl(
($t:ty) => {
impl DeepClone for $t {
/// Return a deep copy of the value.
#[inline(always)]
#[inline]
fn deep_clone(&self) -> $t { *self }
}
}

View file

@ -45,7 +45,7 @@ pub trait TotalEq {
macro_rules! totaleq_impl(
($t:ty) => {
impl TotalEq for $t {
#[inline(always)]
#[inline]
fn equals(&self, other: &$t) -> bool { *self == *other }
}
}
@ -84,27 +84,27 @@ pub trait TotalOrd: TotalEq {
}
impl TotalOrd for Ordering {
#[inline(always)]
#[inline]
fn cmp(&self, other: &Ordering) -> Ordering {
(*self as int).cmp(&(*other as int))
}
}
impl Ord for Ordering {
#[inline(always)]
#[inline]
fn lt(&self, other: &Ordering) -> bool { (*self as int) < (*other as int) }
#[inline(always)]
#[inline]
fn le(&self, other: &Ordering) -> bool { (*self as int) <= (*other as int) }
#[inline(always)]
#[inline]
fn gt(&self, other: &Ordering) -> bool { (*self as int) > (*other as int) }
#[inline(always)]
#[inline]
fn ge(&self, other: &Ordering) -> bool { (*self as int) >= (*other as int) }
}
macro_rules! totalord_impl(
($t:ty) => {
impl TotalOrd for $t {
#[inline(always)]
#[inline]
fn cmp(&self, other: &$t) -> Ordering {
if *self < *other { Less }
else if *self > *other { Greater }
@ -146,7 +146,7 @@ Return `o1` if it is not `Equal`, otherwise `o2`. Simulates the
lexical ordering on a type `(int, int)`.
*/
// used in deriving code in libsyntax
#[inline(always)]
#[inline]
pub fn lexical_ordering(o1: Ordering, o2: Ordering) -> Ordering {
match o1 {
Equal => o2,
@ -180,12 +180,12 @@ pub trait Equiv<T> {
fn equiv(&self, other: &T) -> bool;
}
#[inline(always)]
#[inline]
pub fn min<T:Ord>(v1: T, v2: T) -> T {
if v1 < v2 { v1 } else { v2 }
}
#[inline(always)]
#[inline]
pub fn max<T:Ord>(v1: T, v2: T) -> T {
if v1 > v2 { v1 } else { v2 }
}

View file

@ -20,7 +20,6 @@ use either::{Either, Left, Right};
use kinds::Owned;
use option::{Option, Some, None};
use uint;
use vec;
use vec::OwnedVector;
use util::replace;
use unstable::sync::{Exclusive, exclusive};
@ -209,7 +208,7 @@ impl<T: Owned> Peekable<T> for PortSet<T> {
fn peek(&self) -> bool {
// It'd be nice to use self.port.each, but that version isn't
// pure.
for uint::range(0, vec::uniq_len(&const self.ports)) |i| {
for uint::range(0, self.ports.len()) |i| {
let port: &pipesy::Port<T> = &self.ports[i];
if port.peek() {
return true;
@ -221,7 +220,7 @@ impl<T: Owned> Peekable<T> for PortSet<T> {
/// A channel that can be shared between many senders.
pub struct SharedChan<T> {
ch: Exclusive<pipesy::Chan<T>>
inner: Either<Exclusive<pipesy::Chan<T>>, rtcomm::SharedChan<T>>
}
impl<T: Owned> SharedChan<T> {
@ -229,40 +228,50 @@ impl<T: Owned> SharedChan<T> {
pub fn new(c: Chan<T>) -> SharedChan<T> {
let Chan { inner } = c;
let c = match inner {
Left(c) => c,
Right(_) => fail!("SharedChan not implemented")
Left(c) => Left(exclusive(c)),
Right(c) => Right(rtcomm::SharedChan::new(c))
};
SharedChan { ch: exclusive(c) }
SharedChan { inner: c }
}
}
impl<T: Owned> GenericChan<T> for SharedChan<T> {
fn send(&self, x: T) {
unsafe {
let mut xx = Some(x);
do self.ch.with_imm |chan| {
let x = replace(&mut xx, None);
chan.send(x.unwrap())
match self.inner {
Left(ref chan) => {
unsafe {
let mut xx = Some(x);
do chan.with_imm |chan| {
let x = replace(&mut xx, None);
chan.send(x.unwrap())
}
}
}
Right(ref chan) => chan.send(x)
}
}
}
impl<T: Owned> GenericSmartChan<T> for SharedChan<T> {
fn try_send(&self, x: T) -> bool {
unsafe {
let mut xx = Some(x);
do self.ch.with_imm |chan| {
let x = replace(&mut xx, None);
chan.try_send(x.unwrap())
match self.inner {
Left(ref chan) => {
unsafe {
let mut xx = Some(x);
do chan.with_imm |chan| {
let x = replace(&mut xx, None);
chan.try_send(x.unwrap())
}
}
}
Right(ref chan) => chan.try_send(x)
}
}
}
impl<T: Owned> ::clone::Clone for SharedChan<T> {
fn clone(&self) -> SharedChan<T> {
SharedChan { ch: self.ch.clone() }
SharedChan { inner: self.inner.clone() }
}
}
@ -625,7 +634,7 @@ mod pipesy {
}
impl<T: Owned> GenericChan<T> for Chan<T> {
#[inline(always)]
#[inline]
fn send(&self, x: T) {
unsafe {
let self_endp = transmute_mut(&self.endp);
@ -636,7 +645,7 @@ mod pipesy {
}
impl<T: Owned> GenericSmartChan<T> for Chan<T> {
#[inline(always)]
#[inline]
fn try_send(&self, x: T) -> bool {
unsafe {
let self_endp = transmute_mut(&self.endp);
@ -653,7 +662,7 @@ mod pipesy {
}
impl<T: Owned> GenericPort<T> for Port<T> {
#[inline(always)]
#[inline]
fn recv(&self) -> T {
unsafe {
let self_endp = transmute_mut(&self.endp);
@ -664,7 +673,7 @@ mod pipesy {
}
}
#[inline(always)]
#[inline]
fn try_recv(&self) -> Option<T> {
unsafe {
let self_endp = transmute_mut(&self.endp);
@ -681,7 +690,7 @@ mod pipesy {
}
impl<T: Owned> Peekable<T> for Port<T> {
#[inline(always)]
#[inline]
fn peek(&self) -> bool {
unsafe {
let self_endp = transmute_mut(&self.endp);

View file

@ -16,10 +16,10 @@ use option::Option;
/// knowledge known is the number of elements contained within.
pub trait Container {
/// Return the number of elements in the container
fn len(&const self) -> uint;
fn len(&self) -> uint;
/// Return true if the container contains no elements
fn is_empty(&const self) -> bool;
fn is_empty(&self) -> bool;
}
/// A trait to represent mutable containers

View file

@ -12,19 +12,20 @@
# The Rust standard library
The Rust standard library provides runtime features required by the language,
including the task scheduler and memory allocators, as well as library
support for Rust built-in types, platform abstractions, and other commonly
used features.
The Rust standard library is a group of interrelated modules defining
the core language traits, operations on built-in data types, collections,
platform abstractions, the task scheduler, runtime support for language
features and other common functionality.
`std` includes modules corresponding to each of the integer types, each of
the floating point types, the `bool` type, tuples, characters, strings
(`str`), vectors (`vec`), managed boxes (`managed`), owned boxes (`owned`),
and unsafe and borrowed pointers (`ptr`). Additionally, `std` provides
pervasive types (`option` and `result`), task creation and communication
primitives (`task`, `comm`), platform abstractions (`os` and `path`), basic
I/O abstractions (`io`), common traits (`kinds`, `ops`, `cmp`, `num`,
`to_str`), and complete bindings to the C standard library (`libc`).
`std` includes modules corresponding to each of the integer types,
each of the floating point types, the `bool` type, tuples, characters,
strings (`str`), vectors (`vec`), managed boxes (`managed`), owned
boxes (`owned`), and unsafe and borrowed pointers (`ptr`, `borrowed`).
Additionally, `std` provides pervasive types (`option` and `result`),
task creation and communication primitives (`task`, `comm`), platform
abstractions (`os` and `path`), basic I/O abstractions (`io`), common
traits (`kinds`, `ops`, `cmp`, `num`, `to_str`), and complete bindings
to the C standard library (`libc`).
# Standard library injection and the Rust prelude
@ -38,7 +39,7 @@ with the `std::` path prefix, as in `use std::vec`, `use std::task::spawn`,
etc.
Additionally, `std` contains a `prelude` module that reexports many of the
most common std modules, types and traits. The contents of the prelude are
most common types, traits and functions. The contents of the prelude are
imported into every *module* by default. Implicitly, all modules behave as if
they contained the following prologue:
@ -56,17 +57,13 @@ they contained the following prologue:
#[license = "MIT/ASL2"];
#[crate_type = "lib"];
// NOTE: remove these two attributes after the next snapshot
#[no_core]; // for stage0
#[allow(unrecognized_lint)]; // otherwise stage0 is seriously ugly
// Don't link to std. We are std.
#[no_std];
#[deny(non_camel_case_types)];
#[deny(missing_doc)];
// Make core testable by not duplicating lang items. See #2912
// Make std testable by not duplicating lang items. See #2912
#[cfg(test)] extern mod realstd(name = "std");
#[cfg(test)] pub use kinds = realstd::kinds;
#[cfg(test)] pub use ops = realstd::ops;

View file

@ -33,7 +33,7 @@ pub enum Either<T, U> {
/// If `value` is left(T) then `f_left` is applied to its contents, if
/// `value` is right(U) then `f_right` is applied to its contents, and the
/// result is returned.
#[inline(always)]
#[inline]
pub fn either<T, U, V>(f_left: &fn(&T) -> V,
f_right: &fn(&U) -> V, value: &Either<T, U>) -> V {
match *value {
@ -47,7 +47,7 @@ pub fn lefts<T:Copy,U>(eithers: &[Either<T, U>]) -> ~[T] {
do vec::build_sized(eithers.len()) |push| {
for eithers.each |elt| {
match *elt {
Left(ref l) => { push(*l); }
Left(ref l) => { push(copy *l); }
_ => { /* fallthrough */ }
}
}
@ -59,7 +59,7 @@ pub fn rights<T, U: Copy>(eithers: &[Either<T, U>]) -> ~[U] {
do vec::build_sized(eithers.len()) |push| {
for eithers.each |elt| {
match *elt {
Right(ref r) => { push(*r); }
Right(ref r) => { push(copy *r); }
_ => { /* fallthrough */ }
}
}
@ -83,7 +83,7 @@ pub fn partition<T, U>(eithers: ~[Either<T, U>]) -> (~[T], ~[U]) {
}
/// Flips between left and right of a given either
#[inline(always)]
#[inline]
pub fn flip<T, U>(eith: Either<T, U>) -> Either<U, T> {
match eith {
Right(r) => Left(r),
@ -95,7 +95,7 @@ pub fn flip<T, U>(eith: Either<T, U>) -> Either<U, T> {
///
/// Converts an `either` type to a `result` type, making the "right" choice
/// an ok result, and the "left" choice a fail
#[inline(always)]
#[inline]
pub fn to_result<T, U>(eith: Either<T, U>) -> Result<U, T> {
match eith {
Right(r) => result::Ok(r),
@ -104,7 +104,7 @@ pub fn to_result<T, U>(eith: Either<T, U>) -> Result<U, T> {
}
/// Checks whether the given value is a left
#[inline(always)]
#[inline]
pub fn is_left<T, U>(eith: &Either<T, U>) -> bool {
match *eith {
Left(_) => true,
@ -113,7 +113,7 @@ pub fn is_left<T, U>(eith: &Either<T, U>) -> bool {
}
/// Checks whether the given value is a right
#[inline(always)]
#[inline]
pub fn is_right<T, U>(eith: &Either<T, U>) -> bool {
match *eith {
Right(_) => true,
@ -122,7 +122,7 @@ pub fn is_right<T, U>(eith: &Either<T, U>) -> bool {
}
/// Retrieves the value in the left branch. Fails if the either is Right.
#[inline(always)]
#[inline]
pub fn unwrap_left<T,U>(eith: Either<T,U>) -> T {
match eith {
Left(x) => x,
@ -131,7 +131,7 @@ pub fn unwrap_left<T,U>(eith: Either<T,U>) -> T {
}
/// Retrieves the value in the right branch. Fails if the either is Left.
#[inline(always)]
#[inline]
pub fn unwrap_right<T,U>(eith: Either<T,U>) -> U {
match eith {
Right(x) => x,
@ -140,27 +140,27 @@ pub fn unwrap_right<T,U>(eith: Either<T,U>) -> U {
}
impl<T, U> Either<T, U> {
#[inline(always)]
#[inline]
pub fn either<V>(&self, f_left: &fn(&T) -> V, f_right: &fn(&U) -> V) -> V {
either(f_left, f_right, self)
}
#[inline(always)]
#[inline]
pub fn flip(self) -> Either<U, T> { flip(self) }
#[inline(always)]
#[inline]
pub fn to_result(self) -> Result<U, T> { to_result(self) }
#[inline(always)]
#[inline]
pub fn is_left(&self) -> bool { is_left(self) }
#[inline(always)]
#[inline]
pub fn is_right(&self) -> bool { is_right(self) }
#[inline(always)]
#[inline]
pub fn unwrap_left(self) -> T { unwrap_left(self) }
#[inline(always)]
#[inline]
pub fn unwrap_right(self) -> U { unwrap_right(self) }
}

View file

@ -64,7 +64,7 @@ pub trait HashUtil {
}
impl<A:Hash> HashUtil for A {
#[inline(always)]
#[inline]
fn hash(&self) -> u64 { self.hash_keyed(0,0) }
}
@ -79,7 +79,7 @@ pub trait Streaming {
}
impl<A:IterBytes> Hash for A {
#[inline(always)]
#[inline]
fn hash_keyed(&self, k0: u64, k1: u64) -> u64 {
let mut s = State::new(k0, k1);
for self.iter_bytes(true) |bytes| {
@ -176,7 +176,7 @@ fn hash_keyed_5<A: IterBytes,
s.result_u64()
}
#[inline(always)]
#[inline]
pub fn default_state() -> State {
State::new(0, 0)
}
@ -194,7 +194,7 @@ struct SipState {
}
impl SipState {
#[inline(always)]
#[inline]
fn new(key0: u64, key1: u64) -> SipState {
let mut state = SipState {
k0: key0,
@ -248,7 +248,7 @@ macro_rules! compress (
impl Writer for SipState {
// Methods for io::writer
#[inline(always)]
#[inline]
fn write(&mut self, msg: &[u8]) {
let length = msg.len();
self.length += length;
@ -315,12 +315,12 @@ impl Writer for SipState {
}
impl Streaming for SipState {
#[inline(always)]
#[inline]
fn input(&mut self, buf: &[u8]) {
self.write(buf);
}
#[inline(always)]
#[inline]
fn result_u64(&mut self) -> u64 {
let mut v0 = self.v0;
let mut v1 = self.v1;
@ -373,7 +373,7 @@ impl Streaming for SipState {
s
}
#[inline(always)]
#[inline]
fn reset(&mut self) {
self.length = 0;
self.v0 = self.k0 ^ 0x736f6d6570736575;
@ -558,4 +558,15 @@ mod tests {
val & !(0xff << (byte * 8))
}
}
#[test]
fn test_float_hashes_differ() {
assert!(0.0.hash() != 1.0.hash());
assert!(1.0.hash() != (-1.0).hash());
}
#[test]
fn test_float_hashes_of_zero() {
assert_eq!(0.0.hash(), (-0.0).hash());
}
}

View file

@ -59,7 +59,7 @@ enum SearchResult {
FoundEntry(uint), FoundHole(uint), TableFull
}
#[inline(always)]
#[inline]
fn resize_at(capacity: uint) -> uint {
((capacity as float) * 3. / 4.) as uint
}
@ -85,19 +85,19 @@ fn linear_map_with_capacity_and_keys<K:Eq + Hash,V>(
}
impl<K:Hash + Eq,V> HashMap<K, V> {
#[inline(always)]
#[inline]
fn to_bucket(&self, h: uint) -> uint {
// A good hash function with entropy spread over all of the
// bits is assumed. SipHash is more than good enough.
h % self.buckets.len()
}
#[inline(always)]
#[inline]
fn next_bucket(&self, idx: uint, len_buckets: uint) -> uint {
(idx + 1) % len_buckets
}
#[inline(always)]
#[inline]
fn bucket_sequence(&self, hash: uint,
op: &fn(uint) -> bool) -> bool {
let start_idx = self.to_bucket(hash);
@ -112,20 +112,20 @@ impl<K:Hash + Eq,V> HashMap<K, V> {
}
}
#[inline(always)]
#[inline]
fn bucket_for_key(&self, k: &K) -> SearchResult {
let hash = k.hash_keyed(self.k0, self.k1) as uint;
self.bucket_for_key_with_hash(hash, k)
}
#[inline(always)]
#[inline]
fn bucket_for_key_equiv<Q:Hash + Equiv<K>>(&self, k: &Q)
-> SearchResult {
let hash = k.hash_keyed(self.k0, self.k1) as uint;
self.bucket_for_key_with_hash_equiv(hash, k)
}
#[inline(always)]
#[inline]
fn bucket_for_key_with_hash(&self,
hash: uint,
k: &K)
@ -141,7 +141,7 @@ impl<K:Hash + Eq,V> HashMap<K, V> {
TableFull
}
#[inline(always)]
#[inline]
fn bucket_for_key_with_hash_equiv<Q:Equiv<K>>(&self,
hash: uint,
k: &Q)
@ -161,7 +161,7 @@ impl<K:Hash + Eq,V> HashMap<K, V> {
/// Expand the capacity of the array to the next power of two
/// and re-insert each of the existing buckets.
#[inline(always)]
#[inline]
fn expand(&mut self) {
let new_capacity = self.buckets.len() * 2;
self.resize(new_capacity);
@ -190,7 +190,7 @@ impl<K:Hash + Eq,V> HashMap<K, V> {
}
}
#[inline(always)]
#[inline]
fn value_for_bucket<'a>(&'a self, idx: uint) -> &'a V {
match self.buckets[idx] {
Some(ref bkt) => &bkt.value,
@ -198,7 +198,7 @@ impl<K:Hash + Eq,V> HashMap<K, V> {
}
}
#[inline(always)]
#[inline]
fn mut_value_for_bucket<'a>(&'a mut self, idx: uint) -> &'a mut V {
match self.buckets[idx] {
Some(ref mut bkt) => &mut bkt.value,

View file

@ -1654,9 +1654,7 @@ impl Writer for BytesWriter {
vec::reserve(bytes, count);
unsafe {
// Silly stage0 borrow check workaround...
let casted: &mut ~[u8] = cast::transmute_copy(&bytes);
vec::raw::set_len(casted, count);
vec::raw::set_len(bytes, count);
let view = vec::mut_slice(*bytes, *self.pos, count);
vec::bytes::copy_memory(view, v, v_len);
@ -1667,7 +1665,7 @@ impl Writer for BytesWriter {
fn seek(&self, offset: int, whence: SeekStyle) {
let pos = *self.pos;
let len = vec::uniq_len(&const *self.bytes);
let len = self.bytes.len();
*self.pos = seek_in_buf(offset, pos, len, whence);
}
@ -1779,7 +1777,7 @@ pub mod fsync {
None => (),
Some(level) => {
// fail hard if not succesful
assert!(((self.arg.fsync_fn)(self.arg.val, level)
assert!(((self.arg.fsync_fn)(copy self.arg.val, level)
!= -1));
}
}

View file

@ -73,7 +73,7 @@ pub trait FromIter<T> {
* assert!(!any(|&x: &uint| x > 5, |f| xs.each(f)));
* ~~~
*/
#[inline(always)]
#[inline]
pub fn any<T>(predicate: &fn(T) -> bool,
iter: &fn(f: &fn(T) -> bool) -> bool) -> bool {
for iter |x| {
@ -94,7 +94,7 @@ pub fn any<T>(predicate: &fn(T) -> bool,
* assert!(!all(|&x: &uint| x < 5, |f| uint::range(1, 6, f)));
* ~~~
*/
#[inline(always)]
#[inline]
pub fn all<T>(predicate: &fn(T) -> bool,
iter: &fn(f: &fn(T) -> bool) -> bool) -> bool {
// If we ever break, iter will return false, so this will only return true
@ -112,7 +112,7 @@ pub fn all<T>(predicate: &fn(T) -> bool,
* assert_eq!(*find(|& &x: & &uint| x > 3, |f| xs.each(f)).unwrap(), 4);
* ~~~
*/
#[inline(always)]
#[inline]
pub fn find<T>(predicate: &fn(&T) -> bool,
iter: &fn(f: &fn(T) -> bool) -> bool) -> Option<T> {
for iter |x| {
@ -226,7 +226,7 @@ pub fn fold_ref<T, U>(start: T, iter: &fn(f: &fn(&U) -> bool) -> bool, f: &fn(&m
* assert_eq!(do sum |f| { xs.each(f) }, 10);
* ~~~
*/
#[inline(always)]
#[inline]
pub fn sum<T: Zero + Add<T, T>>(iter: &fn(f: &fn(&T) -> bool) -> bool) -> T {
fold_ref(Zero::zero::<T>(), iter, |a, x| *a = a.add(x))
}
@ -241,7 +241,7 @@ pub fn sum<T: Zero + Add<T, T>>(iter: &fn(f: &fn(&T) -> bool) -> bool) -> T {
* assert_eq!(do product |f| { xs.each(f) }, 24);
* ~~~
*/
#[inline(always)]
#[inline]
pub fn product<T: One + Mul<T, T>>(iter: &fn(f: &fn(&T) -> bool) -> bool) -> T {
fold_ref(One::one::<T>(), iter, |a, x| *a = a.mul(x))
}

View file

@ -308,6 +308,12 @@ pub trait IteratorUtil<A> {
/// assert!(!it.any_(|&x| *x == 3));
/// ~~~
fn any_(&mut self, f: &fn(A) -> bool) -> bool;
/// Return the first element satisfying the specified predicate
fn find_(&mut self, predicate: &fn(&A) -> bool) -> Option<A>;
/// Return the index of the first element satisfying the specified predicate
fn position_(&mut self, predicate: &fn(A) -> bool) -> Option<uint>;
}
/// Iterator adaptors provided for every `Iterator` implementation. The adaptor objects are also
@ -315,59 +321,59 @@ pub trait IteratorUtil<A> {
///
/// In the future these will be default methods instead of a utility trait.
impl<A, T: Iterator<A>> IteratorUtil<A> for T {
#[inline(always)]
#[inline]
fn chain_<U: Iterator<A>>(self, other: U) -> ChainIterator<A, T, U> {
ChainIterator{a: self, b: other, flag: false}
}
#[inline(always)]
#[inline]
fn zip<B, U: Iterator<B>>(self, other: U) -> ZipIterator<A, T, B, U> {
ZipIterator{a: self, b: other}
}
// FIXME: #5898: should be called map
#[inline(always)]
#[inline]
fn transform<'r, B>(self, f: &'r fn(A) -> B) -> MapIterator<'r, A, B, T> {
MapIterator{iter: self, f: f}
}
#[inline(always)]
#[inline]
fn filter<'r>(self, predicate: &'r fn(&A) -> bool) -> FilterIterator<'r, A, T> {
FilterIterator{iter: self, predicate: predicate}
}
#[inline(always)]
#[inline]
fn filter_map<'r, B>(self, f: &'r fn(A) -> Option<B>) -> FilterMapIterator<'r, A, B, T> {
FilterMapIterator { iter: self, f: f }
}
#[inline(always)]
#[inline]
fn enumerate(self) -> EnumerateIterator<A, T> {
EnumerateIterator{iter: self, count: 0}
}
#[inline(always)]
#[inline]
fn skip_while<'r>(self, predicate: &'r fn(&A) -> bool) -> SkipWhileIterator<'r, A, T> {
SkipWhileIterator{iter: self, flag: false, predicate: predicate}
}
#[inline(always)]
#[inline]
fn take_while<'r>(self, predicate: &'r fn(&A) -> bool) -> TakeWhileIterator<'r, A, T> {
TakeWhileIterator{iter: self, flag: false, predicate: predicate}
}
#[inline(always)]
#[inline]
fn skip(self, n: uint) -> SkipIterator<A, T> {
SkipIterator{iter: self, n: n}
}
// FIXME: #5898: should be called take
#[inline(always)]
#[inline]
fn take_(self, n: uint) -> TakeIterator<A, T> {
TakeIterator{iter: self, n: n}
}
#[inline(always)]
#[inline]
fn scan<'r, St, B>(self, initial_state: St, f: &'r fn(&mut St, A) -> Option<B>)
-> ScanIterator<'r, A, B, T, St> {
ScanIterator{iter: self, f: f, state: initial_state}
@ -386,13 +392,13 @@ impl<A, T: Iterator<A>> IteratorUtil<A> for T {
}
}
#[inline(always)]
#[inline]
fn collect<B: FromIter<A>>(&mut self) -> B {
FromIter::from_iter::<A, B>(|f| self.advance(f))
}
/// Return the `n`th item yielded by an iterator.
#[inline(always)]
#[inline]
fn nth(&mut self, mut n: uint) -> Option<A> {
loop {
match self.next() {
@ -404,7 +410,7 @@ impl<A, T: Iterator<A>> IteratorUtil<A> for T {
}
/// Return the last item yielded by an iterator.
#[inline(always)]
#[inline]
fn last_(&mut self) -> Option<A> {
let mut last = None;
for self.advance |x| { last = Some(x); }
@ -421,23 +427,45 @@ impl<A, T: Iterator<A>> IteratorUtil<A> for T {
None => { break; }
}
}
return accum;
accum
}
/// Count the number of items yielded by an iterator
#[inline(always)]
#[inline]
fn count(&mut self) -> uint { self.fold(0, |cnt, _x| cnt + 1) }
#[inline(always)]
#[inline]
fn all(&mut self, f: &fn(A) -> bool) -> bool {
for self.advance |x| { if !f(x) { return false; } }
return true;
true
}
#[inline(always)]
#[inline]
fn any_(&mut self, f: &fn(A) -> bool) -> bool {
for self.advance |x| { if f(x) { return true; } }
return false;
false
}
/// Return the first element satisfying the specified predicate
#[inline]
fn find_(&mut self, predicate: &fn(&A) -> bool) -> Option<A> {
for self.advance |x| {
if predicate(&x) { return Some(x) }
}
None
}
/// Return the index of the first element satisfying the specified predicate
#[inline]
fn position_(&mut self, predicate: &fn(A) -> bool) -> Option<uint> {
let mut i = 0;
for self.advance |x| {
if predicate(x) {
return Some(i);
}
i += 1;
}
None
}
}
@ -456,7 +484,7 @@ pub trait AdditiveIterator<A> {
}
impl<A: Add<A, A> + Zero, T: Iterator<A>> AdditiveIterator<A> for T {
#[inline(always)]
#[inline]
fn sum(&mut self) -> A { self.fold(Zero::zero::<A>(), |s, x| s + x) }
}
@ -481,7 +509,7 @@ pub trait MultiplicativeIterator<A> {
}
impl<A: Mul<A, A> + One, T: Iterator<A>> MultiplicativeIterator<A> for T {
#[inline(always)]
#[inline]
fn product(&mut self) -> A { self.fold(One::one::<A>(), |p, x| p * x) }
}
@ -510,7 +538,7 @@ pub trait OrdIterator<A> {
}
impl<A: Ord, T: Iterator<A>> OrdIterator<A> for T {
#[inline(always)]
#[inline]
fn max(&mut self) -> Option<A> {
self.fold(None, |max, x| {
match max {
@ -520,7 +548,7 @@ impl<A: Ord, T: Iterator<A>> OrdIterator<A> for T {
})
}
#[inline(always)]
#[inline]
fn min(&mut self) -> Option<A> {
self.fold(None, |min, x| {
match min {
@ -788,8 +816,8 @@ impl<'self, A, St> UnfoldrIterator<'self, A, St> {
/// Creates a new iterator with the specified closure as the "iterator
/// function" and an initial state to eventually pass to the iterator
#[inline]
pub fn new(f: &'self fn(&mut St) -> Option<A>, initial_state: St)
-> UnfoldrIterator<'self, A, St> {
pub fn new<'a>(f: &'a fn(&mut St) -> Option<A>, initial_state: St)
-> UnfoldrIterator<'a, A, St> {
UnfoldrIterator {
f: f,
state: initial_state
@ -815,14 +843,14 @@ pub struct Counter<A> {
impl<A> Counter<A> {
/// Creates a new counter with the specified start/step
#[inline(always)]
#[inline]
pub fn new(start: A, step: A) -> Counter<A> {
Counter{state: start, step: step}
}
}
impl<A: Add<A, A> + Clone> Iterator<A> for Counter<A> {
#[inline(always)]
#[inline]
fn next(&mut self) -> Option<A> {
let result = self.state.clone();
self.state = self.state.add(&self.step); // FIXME: #6050
@ -1055,4 +1083,20 @@ mod tests {
assert!(!v.iter().any_(|&x| x > 100));
assert!(!v.slice(0, 0).iter().any_(|_| fail!()));
}
#[test]
fn test_find() {
let v = &[1, 3, 9, 27, 103, 14, 11];
assert_eq!(*v.iter().find_(|x| *x & 1 == 0).unwrap(), 14);
assert_eq!(*v.iter().find_(|x| *x % 3 == 0).unwrap(), 3);
assert!(v.iter().find_(|x| *x % 12 == 0).is_none());
}
#[test]
fn test_position() {
let v = &[1, 3, 9, 27, 103, 14, 11];
assert_eq!(v.iter().position_(|x| *x & 1 == 0).unwrap(), 5);
assert_eq!(v.iter().position_(|x| *x % 3 == 0).unwrap(), 1);
assert!(v.iter().position_(|x| *x % 12 == 0).is_none());
}
}

Some files were not shown because too many files have changed in this diff Show more