auto merge of #7451 : cmr/rust/rewrite-each-path, r=pcwalton

This commit is contained in:
bors 2013-06-28 12:05:12 -07:00
commit f44b951a1e
193 changed files with 3456 additions and 1885 deletions

View file

@ -2862,13 +2862,13 @@ call to the method `make_string`.
Types in Rust are categorized into kinds, based on various properties of the components of the type.
The kinds are:
`Const`
`Freeze`
: Types of this kind are deeply immutable;
they contain no mutable memory locations directly or indirectly via pointers.
`Owned`
`Send`
: Types of this kind can be safely sent between tasks.
This kind includes scalars, owning pointers, owned closures, and
structural types containing only other owned types. All `Owned` types are `Static`.
structural types containing only other owned types. All `Send` types are `Static`.
`Static`
: Types of this kind do not contain any borrowed pointers;
this can be a useful guarantee for code that breaks borrowing assumptions using [`unsafe` operations](#unsafe-functions).
@ -2882,7 +2882,7 @@ The kinds are:
trait provides a single method `finalize` that takes no parameters, and is run
when values of the type are dropped. Such a method is called a "destructor",
and are always executed in "top-down" order: a value is completely destroyed
before any of the values it owns run their destructors. Only `Owned` types
before any of the values it owns run their destructors. Only `Send` types
that do not implement `Copy` can implement `Drop`.
> **Note:** The `finalize` method may be renamed in future versions of Rust.
@ -2968,10 +2968,10 @@ frame they are allocated within.
A task owns all memory it can *safely* reach through local variables,
as well as managed, owning and borrowed pointers.
When a task sends a value that has the `Owned` trait to another task,
When a task sends a value that has the `Send` trait to another task,
it loses ownership of the value sent and can no longer refer to it.
This is statically guaranteed by the combined use of "move semantics",
and the compiler-checked _meaning_ of the `Owned` trait:
and the compiler-checked _meaning_ of the `Send` trait:
it is only instantiated for (transitively) sendable kinds of data constructor and pointers,
never including managed or borrowed pointers.
@ -3116,7 +3116,7 @@ These include:
- read-only and read-write shared variables with various safe mutual exclusion patterns
- simple locks and semaphores
When such facilities carry values, the values are restricted to the [`Owned` type-kind](#type-kinds).
When such facilities carry values, the values are restricted to the [`Send` type-kind](#type-kinds).
Restricting communication interfaces to this kind ensures that no borrowed or managed pointers move between tasks.
Thus access to an entire data structure can be mediated through its owning "root" value;
no further locking or copying is required to avoid data races within the substructure of such a value.

View file

@ -159,7 +159,7 @@ pub struct Unique<T> {
priv ptr: *mut T
}
impl<T: Owned> Unique<T> {
impl<T: Send> Unique<T> {
pub fn new(value: T) -> Unique<T> {
unsafe {
let ptr = malloc(std::sys::size_of::<T>() as size_t) as *mut T;
@ -182,7 +182,7 @@ impl<T: Owned> Unique<T> {
}
#[unsafe_destructor]
impl<T: Owned> Drop for Unique<T> {
impl<T: Send> Drop for Unique<T> {
fn drop(&self) {
unsafe {
let x = intrinsics::init(); // dummy value to swap in

View file

@ -1281,9 +1281,9 @@ let your_crayons = ~[BananaMania, Beaver, Bittersweet];
// Add two vectors to create a new one
let our_crayons = my_crayons + your_crayons;
// += will append to a vector, provided it lives in a mutable slot
// .push_all() will append to a vector, provided it lives in a mutable slot
let mut my_crayons = my_crayons;
my_crayons += your_crayons;
my_crayons.push_all(your_crayons);
~~~~
> ***Note:*** The above examples of vector addition use owned

View file

@ -21,7 +21,7 @@ pub fn load_errors(testfile: &Path) -> ~[ExpectedError] {
let mut line_num = 1u;
while !rdr.eof() {
let ln = rdr.read_line();
error_patterns += parse_expected(line_num, ln);
error_patterns.push_all_move(parse_expected(line_num, ln));
line_num += 1u;
}
return error_patterns;

View file

@ -226,8 +226,8 @@ actual:\n\
~"-L", config.build_base.to_str(),
~"-L",
aux_output_dir_name(config, testfile).to_str()];
args += split_maybe_args(&config.rustcflags);
args += split_maybe_args(&props.compile_flags);
args.push_all_move(split_maybe_args(&config.rustcflags));
args.push_all_move(split_maybe_args(&props.compile_flags));
return ProcArgs {prog: config.rustc_path.to_str(), args: args};
}
}
@ -581,8 +581,8 @@ fn make_compile_args(config: &config, props: &TestProps, extras: ~[~str],
~"-o", xform(config, testfile).to_str(),
~"-L", config.build_base.to_str()]
+ extras;
args += split_maybe_args(&config.rustcflags);
args += split_maybe_args(&props.compile_flags);
args.push_all_move(split_maybe_args(&config.rustcflags));
args.push_all_move(split_maybe_args(&props.compile_flags));
return ProcArgs {prog: config.rustc_path.to_str(), args: args};
}

View file

@ -112,7 +112,7 @@ impl<'self> Condvar<'self> {
pub struct ARC<T> { x: UnsafeAtomicRcBox<T> }
/// Create an atomically reference counted wrapper.
pub fn ARC<T:Const + Owned>(data: T) -> ARC<T> {
pub fn ARC<T:Freeze + Send>(data: T) -> ARC<T> {
ARC { x: UnsafeAtomicRcBox::new(data) }
}
@ -120,7 +120,7 @@ pub fn ARC<T:Const + Owned>(data: T) -> ARC<T> {
* Access the underlying data in an atomically reference counted
* wrapper.
*/
impl<T:Const+Owned> ARC<T> {
impl<T:Freeze+Send> ARC<T> {
pub fn get<'a>(&'a self) -> &'a T {
unsafe { &*self.x.get_immut() }
}
@ -133,7 +133,7 @@ impl<T:Const+Owned> ARC<T> {
* object. However, one of the `arc` objects can be sent to another task,
* allowing them to share the underlying data.
*/
impl<T:Const + Owned> Clone for ARC<T> {
impl<T:Freeze + Send> Clone for ARC<T> {
fn clone(&self) -> ARC<T> {
ARC { x: self.x.clone() }
}
@ -149,14 +149,14 @@ struct MutexARCInner<T> { lock: Mutex, failed: bool, data: T }
struct MutexARC<T> { x: UnsafeAtomicRcBox<MutexARCInner<T>> }
/// Create a mutex-protected ARC with the supplied data.
pub fn MutexARC<T:Owned>(user_data: T) -> MutexARC<T> {
pub fn MutexARC<T:Send>(user_data: T) -> MutexARC<T> {
mutex_arc_with_condvars(user_data, 1)
}
/**
* Create a mutex-protected ARC with the supplied data and a specified number
* of condvars (as sync::mutex_with_condvars).
*/
pub fn mutex_arc_with_condvars<T:Owned>(user_data: T,
pub fn mutex_arc_with_condvars<T:Send>(user_data: T,
num_condvars: uint) -> MutexARC<T> {
let data =
MutexARCInner { lock: mutex_with_condvars(num_condvars),
@ -164,7 +164,7 @@ pub fn mutex_arc_with_condvars<T:Owned>(user_data: T,
MutexARC { x: UnsafeAtomicRcBox::new(data) }
}
impl<T:Owned> Clone for MutexARC<T> {
impl<T:Send> Clone for MutexARC<T> {
/// Duplicate a mutex-protected ARC, as arc::clone.
fn clone(&self) -> MutexARC<T> {
// NB: Cloning the underlying mutex is not necessary. Its reference
@ -173,7 +173,7 @@ impl<T:Owned> Clone for MutexARC<T> {
}
}
impl<T:Owned> MutexARC<T> {
impl<T:Send> MutexARC<T> {
/**
* Access the underlying mutable data with mutual exclusion from other
@ -282,14 +282,14 @@ struct RWARC<T> {
}
/// Create a reader/writer ARC with the supplied data.
pub fn RWARC<T:Const + Owned>(user_data: T) -> RWARC<T> {
pub fn RWARC<T:Freeze + Send>(user_data: T) -> RWARC<T> {
rw_arc_with_condvars(user_data, 1)
}
/**
* Create a reader/writer ARC with the supplied data and a specified number
* of condvars (as sync::rwlock_with_condvars).
*/
pub fn rw_arc_with_condvars<T:Const + Owned>(
pub fn rw_arc_with_condvars<T:Freeze + Send>(
user_data: T,
num_condvars: uint) -> RWARC<T>
{
@ -299,7 +299,7 @@ pub fn rw_arc_with_condvars<T:Const + Owned>(
RWARC { x: UnsafeAtomicRcBox::new(data), }
}
impl<T:Const + Owned> RWARC<T> {
impl<T:Freeze + Send> RWARC<T> {
/// Duplicate a rwlock-protected ARC, as arc::clone.
pub fn clone(&self) -> RWARC<T> {
RWARC {
@ -309,7 +309,7 @@ impl<T:Const + Owned> RWARC<T> {
}
impl<T:Const + Owned> RWARC<T> {
impl<T:Freeze + Send> RWARC<T> {
/**
* Access the underlying data mutably. Locks the rwlock in write mode;
* other readers and writers will block.
@ -435,7 +435,7 @@ impl<T:Const + Owned> RWARC<T> {
// lock it. This wraps the unsafety, with the justification that the 'lock'
// field is never overwritten; only 'failed' and 'data'.
#[doc(hidden)]
fn borrow_rwlock<T:Const + Owned>(state: *const RWARCInner<T>) -> *RWlock {
fn borrow_rwlock<T:Freeze + Send>(state: *const RWARCInner<T>) -> *RWlock {
unsafe { cast::transmute(&const (*state).lock) }
}
@ -452,7 +452,7 @@ pub struct RWReadMode<'self, T> {
token: sync::RWlockReadMode<'self>,
}
impl<'self, T:Const + Owned> RWWriteMode<'self, T> {
impl<'self, T:Freeze + Send> RWWriteMode<'self, T> {
/// Access the pre-downgrade RWARC in write mode.
pub fn write<U>(&mut self, blk: &fn(x: &mut T) -> U) -> U {
match *self {
@ -493,7 +493,7 @@ impl<'self, T:Const + Owned> RWWriteMode<'self, T> {
}
}
impl<'self, T:Const + Owned> RWReadMode<'self, T> {
impl<'self, T:Freeze + Send> RWReadMode<'self, T> {
/// Access the post-downgrade rwlock in read mode.
pub fn read<U>(&self, blk: &fn(x: &T) -> U) -> U {
match *self {

View file

@ -186,20 +186,18 @@ impl Arena {
#[inline]
fn alloc_pod_inner(&mut self, n_bytes: uint, align: uint) -> *u8 {
unsafe {
// XXX: Borrow check
let head = transmute_mut_region(&mut self.pod_head);
let start = round_up_to(head.fill, align);
let this = transmute_mut_region(self);
let start = round_up_to(this.pod_head.fill, align);
let end = start + n_bytes;
if end > at_vec::capacity(head.data) {
return self.alloc_pod_grow(n_bytes, align);
if end > at_vec::capacity(this.pod_head.data) {
return this.alloc_pod_grow(n_bytes, align);
}
head.fill = end;
this.pod_head.fill = end;
//debug!("idx = %u, size = %u, align = %u, fill = %u",
// start, n_bytes, align, head.fill);
ptr::offset(vec::raw::to_ptr(head.data), start)
ptr::offset(vec::raw::to_ptr(this.pod_head.data), start)
}
}
@ -231,21 +229,31 @@ impl Arena {
fn alloc_nonpod_inner(&mut self, n_bytes: uint, align: uint)
-> (*u8, *u8) {
unsafe {
let head = transmute_mut_region(&mut self.head);
let start;
let end;
let tydesc_start;
let after_tydesc;
let tydesc_start = head.fill;
let after_tydesc = head.fill + sys::size_of::<*TyDesc>();
let start = round_up_to(after_tydesc, align);
let end = start + n_bytes;
if end > at_vec::capacity(head.data) {
{
let head = transmute_mut_region(&mut self.head);
tydesc_start = head.fill;
after_tydesc = head.fill + sys::size_of::<*TyDesc>();
start = round_up_to(after_tydesc, align);
end = start + n_bytes;
}
if end > at_vec::capacity(self.head.data) {
return self.alloc_nonpod_grow(n_bytes, align);
}
let head = transmute_mut_region(&mut self.head);
head.fill = round_up_to(end, sys::pref_align_of::<*TyDesc>());
//debug!("idx = %u, size = %u, align = %u, fill = %u",
// start, n_bytes, align, head.fill);
let buf = vec::raw::to_ptr(head.data);
let buf = vec::raw::to_ptr(self.head.data);
return (ptr::offset(buf, tydesc_start), ptr::offset(buf, start));
}
}

View file

@ -476,9 +476,15 @@ impl Bitv {
* character is either '0' or '1'.
*/
pub fn to_str(&self) -> ~str {
let mut rs = ~"";
for self.each() |i| { if i { rs += "1"; } else { rs += "0"; } };
rs
let mut rs = ~"";
for self.each() |i| {
if i {
rs.push_char('1');
} else {
rs.push_char('0');
}
};
rs
}

View file

@ -30,7 +30,7 @@ pub struct DuplexStream<T, U> {
}
// Allow these methods to be used without import:
impl<T:Owned,U:Owned> DuplexStream<T, U> {
impl<T:Send,U:Send> DuplexStream<T, U> {
pub fn send(&self, x: T) {
self.chan.send(x)
}
@ -48,19 +48,19 @@ impl<T:Owned,U:Owned> DuplexStream<T, U> {
}
}
impl<T:Owned,U:Owned> GenericChan<T> for DuplexStream<T, U> {
impl<T:Send,U:Send> GenericChan<T> for DuplexStream<T, U> {
fn send(&self, x: T) {
self.chan.send(x)
}
}
impl<T:Owned,U:Owned> GenericSmartChan<T> for DuplexStream<T, U> {
impl<T:Send,U:Send> GenericSmartChan<T> for DuplexStream<T, U> {
fn try_send(&self, x: T) -> bool {
self.chan.try_send(x)
}
}
impl<T:Owned,U:Owned> GenericPort<U> for DuplexStream<T, U> {
impl<T:Send,U:Send> GenericPort<U> for DuplexStream<T, U> {
fn recv(&self) -> U {
self.port.recv()
}
@ -70,20 +70,20 @@ impl<T:Owned,U:Owned> GenericPort<U> for DuplexStream<T, U> {
}
}
impl<T:Owned,U:Owned> Peekable<U> for DuplexStream<T, U> {
impl<T:Send,U:Send> Peekable<U> for DuplexStream<T, U> {
fn peek(&self) -> bool {
self.port.peek()
}
}
impl<T:Owned,U:Owned> Selectable for DuplexStream<T, U> {
impl<T:Send,U:Send> Selectable for DuplexStream<T, U> {
fn header(&mut self) -> *mut pipes::PacketHeader {
self.port.header()
}
}
/// Creates a bidirectional stream.
pub fn DuplexStream<T:Owned,U:Owned>()
pub fn DuplexStream<T:Send,U:Send>()
-> (DuplexStream<T, U>, DuplexStream<U, T>)
{
let (p1, c2) = comm::stream();

View file

@ -49,9 +49,9 @@ fn to_hex(rr: &[u8]) -> ~str {
for rr.iter().advance() |b| {
let hex = uint::to_str_radix(*b as uint, 16u);
if hex.len() == 1 {
s += "0";
s.push_char('0');
}
s += hex;
s.push_str(hex);
}
return s;
}

View file

@ -12,6 +12,8 @@
use core::prelude::*;
use core::str;
// Simple Extensible Binary Markup Language (ebml) reader and writer on a
// cursor model. See the specification here:
// http://www.matroska.org/technical/specs/rfc/index.html
@ -34,6 +36,20 @@ pub struct Doc {
end: uint,
}
impl Doc {
pub fn get(&self, tag: uint) -> Doc {
reader::get_doc(*self, tag)
}
pub fn as_str_slice<'a>(&'a self) -> &'a str {
str::from_bytes_slice(self.data.slice(self.start, self.end))
}
pub fn as_str(&self) -> ~str {
self.as_str_slice().to_owned()
}
}
pub struct TaggedDoc {
tag: uint,
doc: Doc,
@ -78,36 +94,18 @@ pub mod reader {
use serialize;
use core::prelude::*;
use core::cast::transmute;
use core::int;
use core::io;
use core::str;
#[cfg(target_arch = "x86")]
#[cfg(target_arch = "x86_64")]
use core::option::{None, Option, Some};
use core::ptr::offset;
#[cfg(target_arch = "x86")]
#[cfg(target_arch = "x86_64")]
use core::unstable::intrinsics::bswap32;
// ebml reading
impl Doc {
pub fn get(&self, tag: uint) -> Doc {
get_doc(*self, tag)
}
pub fn as_str_slice<'a>(&'a self) -> &'a str {
str::from_bytes_slice(self.data.slice(self.start, self.end))
}
pub fn as_str(&self) -> ~str {
self.as_str_slice().to_owned()
}
}
struct Res {
val: uint,
next: uint

View file

@ -166,8 +166,8 @@ Constructors for flat pipes that send POD types using memcpy.
# Safety Note
This module is currently unsafe because it uses `Copy Owned` as a type
parameter bounds meaning POD (plain old data), but `Copy Owned` and
This module is currently unsafe because it uses `Copy Send` as a type
parameter bounds meaning POD (plain old data), but `Copy Send` and
POD are not equivelant.
*/
@ -191,7 +191,7 @@ pub mod pod {
pub type PipeChan<T> = FlatChan<T, PodFlattener<T>, PipeByteChan>;
/// Create a `FlatPort` from a `Reader`
pub fn reader_port<T:Copy + Owned,R:Reader>(
pub fn reader_port<T:Copy + Send,R:Reader>(
reader: R
) -> ReaderPort<T, R> {
let unflat: PodUnflattener<T> = PodUnflattener::new();
@ -200,7 +200,7 @@ pub mod pod {
}
/// Create a `FlatChan` from a `Writer`
pub fn writer_chan<T:Copy + Owned,W:Writer>(
pub fn writer_chan<T:Copy + Send,W:Writer>(
writer: W
) -> WriterChan<T, W> {
let flat: PodFlattener<T> = PodFlattener::new();
@ -209,21 +209,21 @@ pub mod pod {
}
/// Create a `FlatPort` from a `Port<~[u8]>`
pub fn pipe_port<T:Copy + Owned>(port: Port<~[u8]>) -> PipePort<T> {
pub fn pipe_port<T:Copy + Send>(port: Port<~[u8]>) -> PipePort<T> {
let unflat: PodUnflattener<T> = PodUnflattener::new();
let byte_port = PipeBytePort::new(port);
FlatPort::new(unflat, byte_port)
}
/// Create a `FlatChan` from a `Chan<~[u8]>`
pub fn pipe_chan<T:Copy + Owned>(chan: Chan<~[u8]>) -> PipeChan<T> {
pub fn pipe_chan<T:Copy + Send>(chan: Chan<~[u8]>) -> PipeChan<T> {
let flat: PodFlattener<T> = PodFlattener::new();
let byte_chan = PipeByteChan::new(chan);
FlatChan::new(flat, byte_chan)
}
/// Create a pair of `FlatChan` and `FlatPort`, backed by pipes
pub fn pipe_stream<T:Copy + Owned>() -> (PipePort<T>, PipeChan<T>) {
pub fn pipe_stream<T:Copy + Send>() -> (PipePort<T>, PipeChan<T>) {
let (port, chan) = comm::stream();
return (pipe_port(port), pipe_chan(chan));
}
@ -352,7 +352,7 @@ pub mod flatteners {
use core::sys::size_of;
use core::vec;
// FIXME #4074: Copy + Owned != POD
// FIXME #4074: Copy + Send != POD
pub struct PodUnflattener<T> {
bogus: ()
}
@ -361,7 +361,7 @@ pub mod flatteners {
bogus: ()
}
impl<T:Copy + Owned> Unflattener<T> for PodUnflattener<T> {
impl<T:Copy + Send> Unflattener<T> for PodUnflattener<T> {
fn unflatten(&self, buf: ~[u8]) -> T {
assert!(size_of::<T>() != 0);
assert_eq!(size_of::<T>(), buf.len());
@ -371,7 +371,7 @@ pub mod flatteners {
}
}
impl<T:Copy + Owned> Flattener<T> for PodFlattener<T> {
impl<T:Copy + Send> Flattener<T> for PodFlattener<T> {
fn flatten(&self, val: T) -> ~[u8] {
assert!(size_of::<T>() != 0);
let val: *T = ptr::to_unsafe_ptr(&val);
@ -380,7 +380,7 @@ pub mod flatteners {
}
}
impl<T:Copy + Owned> PodUnflattener<T> {
impl<T:Copy + Send> PodUnflattener<T> {
pub fn new() -> PodUnflattener<T> {
PodUnflattener {
bogus: ()
@ -388,7 +388,7 @@ pub mod flatteners {
}
}
impl<T:Copy + Owned> PodFlattener<T> {
impl<T:Copy + Send> PodFlattener<T> {
pub fn new() -> PodFlattener<T> {
PodFlattener {
bogus: ()

View file

@ -101,7 +101,7 @@ pub fn from_value<A>(val: A) -> Future<A> {
Future {state: Forced(val)}
}
pub fn from_port<A:Owned>(port: PortOne<A>) -> Future<A> {
pub fn from_port<A:Send>(port: PortOne<A>) -> Future<A> {
/*!
* Create a future from a port
*
@ -127,7 +127,7 @@ pub fn from_fn<A>(f: ~fn() -> A) -> Future<A> {
Future {state: Pending(f)}
}
pub fn spawn<A:Owned>(blk: ~fn() -> A) -> Future<A> {
pub fn spawn<A:Send>(blk: ~fn() -> A) -> Future<A> {
/*!
* Create a future from a unique closure.
*

View file

@ -606,33 +606,47 @@ pub mod groups {
let mut row = " ".repeat(4);
// short option
row += match short_name.len() {
0 => ~"",
1 => ~"-" + short_name + " ",
match short_name.len() {
0 => {}
1 => {
row.push_char('-');
row.push_str(short_name);
row.push_char(' ');
}
_ => fail!("the short name should only be 1 ascii char long"),
};
}
// long option
row += match long_name.len() {
0 => ~"",
_ => ~"--" + long_name + " ",
};
match long_name.len() {
0 => {}
_ => {
row.push_str("--");
row.push_str(long_name);
row.push_char(' ');
}
}
// arg
row += match hasarg {
No => ~"",
Yes => hint,
Maybe => ~"[" + hint + "]",
};
match hasarg {
No => {}
Yes => row.push_str(hint),
Maybe => {
row.push_char('[');
row.push_str(hint);
row.push_char(']');
}
}
// FIXME: #5516
// here we just need to indent the start of the description
let rowlen = row.len();
row += if rowlen < 24 {
" ".repeat(24 - rowlen)
if rowlen < 24 {
for (24 - rowlen).times {
row.push_char(' ')
}
} else {
copy desc_sep
};
row.push_str(desc_sep)
}
// Normalize desc to contain words separated by one space character
let mut desc_normalized_whitespace = ~"";
@ -649,7 +663,7 @@ pub mod groups {
// FIXME: #5516
// wrapped description
row += desc_rows.connect(desc_sep);
row.push_str(desc_rows.connect(desc_sep));
row
});

View file

@ -60,25 +60,27 @@ fn escape_str(s: &str) -> ~str {
let mut escaped = ~"\"";
for s.iter().advance |c| {
match c {
'"' => escaped += "\\\"",
'\\' => escaped += "\\\\",
'\x08' => escaped += "\\b",
'\x0c' => escaped += "\\f",
'\n' => escaped += "\\n",
'\r' => escaped += "\\r",
'\t' => escaped += "\\t",
_ => escaped += str::from_char(c)
'"' => escaped.push_str("\\\""),
'\\' => escaped.push_str("\\\\"),
'\x08' => escaped.push_str("\\b"),
'\x0c' => escaped.push_str("\\f"),
'\n' => escaped.push_str("\\n"),
'\r' => escaped.push_str("\\r"),
'\t' => escaped.push_str("\\t"),
_ => escaped.push_char(c),
}
};
escaped += "\"";
escaped.push_char('"');
escaped
}
fn spaces(n: uint) -> ~str {
let mut ss = ~"";
for n.times { ss.push_str(" "); }
for n.times {
ss.push_str(" ");
}
return ss;
}

View file

@ -59,7 +59,8 @@ pub fn md4(msg: &[u8]) -> Quad {
while i < e {
let (aa, bb, cc, dd) = (a, b, c, d);
let mut (j, base) = (0u, i);
let mut j = 0u;
let mut base = i;
while j < 16u {
x[j] = (msg[base] as u32) + (msg[base + 1u] as u32 << 8u32) +
(msg[base + 2u] as u32 << 16u32) +
@ -118,8 +119,10 @@ pub fn md4_str(msg: &[u8]) -> ~str {
let mut i = 0u32;
while i < 4u32 {
let byte = (u >> (i * 8u32)) as u8;
if byte <= 16u8 { result += "0"; }
result += uint::to_str_radix(byte as uint, 16u);
if byte <= 16u8 {
result.push_char('0')
}
result.push_str(uint::to_str_radix(byte as uint, 16u));
i += 1u32;
}
}

View file

@ -93,10 +93,10 @@ fn encode_inner(s: &str, full_url: bool) -> ~str {
out.push_char(ch);
}
_ => out += fmt!("%%%X", ch as uint)
_ => out.push_str(fmt!("%%%X", ch as uint))
}
} else {
out += fmt!("%%%X", ch as uint);
out.push_str(fmt!("%%%X", ch as uint));
}
}
}
@ -192,7 +192,7 @@ fn encode_plus(s: &str) -> ~str {
out.push_char(ch);
}
' ' => out.push_char('+'),
_ => out += fmt!("%%%X", ch as uint)
_ => out.push_str(fmt!("%%%X", ch as uint))
}
}
@ -218,7 +218,7 @@ pub fn encode_form_urlencoded(m: &HashMap<~str, ~[~str]>) -> ~str {
first = false;
}
out += fmt!("%s=%s", key, encode_plus(*value));
out.push_str(fmt!("%s=%s", key, encode_plus(*value)));
}
}
@ -415,7 +415,9 @@ fn get_authority(rawurl: &str) ->
let mut port = None;
let mut colon_count = 0;
let mut (pos, begin, end) = (0, 2, len);
let mut pos = 0;
let mut begin = 2;
let mut end = len;
for rawurl.iter().enumerate().advance |(i,c)| {
if i < 2 { loop; } // ignore the leading //

View file

@ -380,7 +380,8 @@ impl Integer for BigUint {
let mut d = Zero::zero::<BigUint>();
let mut n = 1;
while m >= b {
let mut (d0, d_unit, b_unit) = div_estimate(&m, &b, n);
let (d0, d_unit, b_unit) = div_estimate(&m, &b, n);
let mut d0 = d0;
let mut prod = b * d0;
while prod > m {
// FIXME(#6050): overloaded operators force moves with generic types
@ -442,7 +443,8 @@ impl Integer for BigUint {
fn gcd(&self, other: &BigUint) -> BigUint {
// Use Euclid's algorithm
let mut (m, n) = (copy *self, copy *other);
let mut m = copy *self;
let mut n = copy *other;
while !m.is_zero() {
let temp = m;
m = n % temp;
@ -506,11 +508,11 @@ impl ToStrRadix for BigUint {
let mut m = n;
while m > divider {
let (d, m0) = m.div_mod_floor(&divider);
result += [m0.to_uint() as BigDigit];
result.push(m0.to_uint() as BigDigit);
m = d;
}
if !m.is_zero() {
result += [m.to_uint() as BigDigit];
result.push(m.to_uint() as BigDigit);
}
return result;
}

View file

@ -33,7 +33,7 @@ static min_granularity : uint = 1024u;
* This is used to build most of the other parallel vector functions,
* like map or alli.
*/
fn map_slices<A:Copy + Owned,B:Copy + Owned>(
fn map_slices<A:Copy + Send,B:Copy + Send>(
xs: &[A],
f: &fn() -> ~fn(uint, v: &[A]) -> B)
-> ~[B] {
@ -88,7 +88,7 @@ fn map_slices<A:Copy + Owned,B:Copy + Owned>(
}
/// A parallel version of map.
pub fn map<A:Copy + Owned,B:Copy + Owned>(
pub fn map<A:Copy + Send,B:Copy + Send>(
xs: &[A], fn_factory: &fn() -> ~fn(&A) -> B) -> ~[B] {
vec::concat(map_slices(xs, || {
let f = fn_factory();
@ -99,7 +99,7 @@ pub fn map<A:Copy + Owned,B:Copy + Owned>(
}
/// A parallel version of mapi.
pub fn mapi<A:Copy + Owned,B:Copy + Owned>(
pub fn mapi<A:Copy + Send,B:Copy + Send>(
xs: &[A],
fn_factory: &fn() -> ~fn(uint, &A) -> B) -> ~[B] {
let slices = map_slices(xs, || {
@ -118,7 +118,7 @@ pub fn mapi<A:Copy + Owned,B:Copy + Owned>(
}
/// Returns true if the function holds for all elements in the vector.
pub fn alli<A:Copy + Owned>(
pub fn alli<A:Copy + Send>(
xs: &[A],
fn_factory: &fn() -> ~fn(uint, &A) -> bool) -> bool
{
@ -133,7 +133,7 @@ pub fn alli<A:Copy + Owned>(
}
/// Returns true if the function holds for any elements in the vector.
pub fn any<A:Copy + Owned>(
pub fn any<A:Copy + Send>(
xs: &[A],
fn_factory: &fn() -> ~fn(&A) -> bool) -> bool {
let mapped = map_slices(xs, || {

View file

@ -13,10 +13,10 @@
/** Task-local reference counted smart pointers
Task-local reference counted smart pointers are an alternative to managed boxes with deterministic
destruction. They are restricted to containing types that are either `Owned` or `Const` (or both) to
destruction. They are restricted to containing types that are either `Send` or `Freeze` (or both) to
prevent cycles.
Neither `Rc<T>` or `RcMut<T>` is ever `Owned` and `RcMut<T>` is never `Const`. If `T` is `Const`, a
Neither `Rc<T>` or `RcMut<T>` is ever `Send` and `RcMut<T>` is never `Freeze`. If `T` is `Freeze`, a
cycle cannot be created with `Rc<T>` because there is no way to modify it after creation.
*/
@ -35,8 +35,8 @@ struct RcBox<T> {
}
/// Immutable reference counted pointer type
#[non_owned]
#[unsafe_no_drop_flag]
#[non_sendable]
pub struct Rc<T> {
priv ptr: *mut RcBox<T>,
}
@ -51,12 +51,12 @@ impl<T> Rc<T> {
}
// FIXME: #6516: should be a static method
pub fn rc_from_owned<T: Owned>(value: T) -> Rc<T> {
pub fn rc_from_owned<T: Send>(value: T) -> Rc<T> {
unsafe { Rc::new(value) }
}
// FIXME: #6516: should be a static method
pub fn rc_from_const<T: Const>(value: T) -> Rc<T> {
pub fn rc_from_const<T: Freeze>(value: T) -> Rc<T> {
unsafe { Rc::new(value) }
}
@ -168,6 +168,7 @@ struct RcMutBox<T> {
/// Mutable reference counted pointer type
#[non_owned]
#[non_sendable]
#[mutable]
#[unsafe_no_drop_flag]
pub struct RcMut<T> {
@ -184,12 +185,12 @@ impl<T> RcMut<T> {
}
// FIXME: #6516: should be a static method
pub fn rc_mut_from_owned<T: Owned>(value: T) -> RcMut<T> {
pub fn rc_mut_from_owned<T: Send>(value: T) -> RcMut<T> {
unsafe { RcMut::new(value) }
}
// FIXME: #6516: should be a static method
pub fn rc_mut_from_const<T: Const>(value: T) -> RcMut<T> {
pub fn rc_mut_from_const<T: Freeze>(value: T) -> RcMut<T> {
unsafe { RcMut::new(value) }
}

View file

@ -86,7 +86,7 @@ struct SemInner<Q> {
struct Sem<Q>(Exclusive<SemInner<Q>>);
#[doc(hidden)]
fn new_sem<Q:Owned>(count: int, q: Q) -> Sem<Q> {
fn new_sem<Q:Send>(count: int, q: Q) -> Sem<Q> {
Sem(exclusive(SemInner {
count: count, waiters: new_waitqueue(), blocked: q }))
}
@ -101,7 +101,7 @@ fn new_sem_and_signal(count: int, num_condvars: uint)
}
#[doc(hidden)]
impl<Q:Owned> Sem<Q> {
impl<Q:Send> Sem<Q> {
pub fn acquire(&self) {
unsafe {
let mut waiter_nobe = None;
@ -153,7 +153,7 @@ impl Sem<()> {
#[doc(hidden)]
impl Sem<~[Waitqueue]> {
pub fn access<U>(&self, blk: &fn() -> U) -> U {
pub fn access_waitqueue<U>(&self, blk: &fn() -> U) -> U {
let mut release = None;
unsafe {
do task::unkillable {
@ -175,7 +175,7 @@ struct SemReleaseGeneric<'self, Q> { sem: &'self Sem<Q> }
#[doc(hidden)]
#[unsafe_destructor]
impl<'self, Q:Owned> Drop for SemReleaseGeneric<'self, Q> {
impl<'self, Q:Send> Drop for SemReleaseGeneric<'self, Q> {
fn drop(&self) {
self.sem.release();
}
@ -381,7 +381,7 @@ impl Sem<~[Waitqueue]> {
// The only other places that condvars get built are rwlock.write_cond()
// and rwlock_write_mode.
pub fn access_cond<U>(&self, blk: &fn(c: &Condvar) -> U) -> U {
do self.access {
do self.access_waitqueue {
blk(&Condvar { sem: self, order: Nothing })
}
}
@ -456,7 +456,9 @@ impl Clone for Mutex {
impl Mutex {
/// Run a function with ownership of the mutex.
pub fn lock<U>(&self, blk: &fn() -> U) -> U { (&self.sem).access(blk) }
pub fn lock<U>(&self, blk: &fn() -> U) -> U {
(&self.sem).access_waitqueue(blk)
}
/// Run a function with ownership of the mutex and a handle to a condvar.
pub fn lock_cond<U>(&self, blk: &fn(c: &Condvar) -> U) -> U {
@ -559,7 +561,7 @@ impl RWlock {
unsafe {
do task::unkillable {
(&self.order_lock).acquire();
do (&self.access_lock).access {
do (&self.access_lock).access_waitqueue {
(&self.order_lock).release();
task::rekillable(blk)
}

View file

@ -470,13 +470,14 @@ priv fn format(val: Param, op: FormatOp, flags: Flags) -> Result<~[u8],~str> {
FormatHex|FormatHEX => 16,
FormatString => util::unreachable()
};
let mut (s,_) = match op {
let (s,_) = match op {
FormatDigit => {
let sign = if flags.sign { SignAll } else { SignNeg };
to_str_bytes_common(&d, radix, false, sign, DigAll)
}
_ => to_str_bytes_common(&(d as uint), radix, false, SignNone, DigAll)
};
let mut s = s;
if flags.precision > s.len() {
let mut s_ = vec::with_capacity(flags.precision);
let n = flags.precision - s.len();

View file

@ -19,15 +19,21 @@ use core::prelude::*;
use getopts;
use sort;
use stats::Stats;
use term;
use time::precise_time_ns;
use core::comm::{stream, SharedChan};
use core::either;
use core::io;
use core::num;
use core::option;
use core::rand::RngUtil;
use core::rand;
use core::result;
use core::task;
use core::to_str::ToStr;
use core::u64;
use core::uint;
use core::vec;
@ -609,153 +615,144 @@ fn calc_result(desc: &TestDesc, task_succeeded: bool) -> TestResult {
}
}
pub mod bench {
use core::prelude::*;
use core::num;
use core::rand::RngUtil;
use core::rand;
use core::u64;
use core::vec;
use stats::Stats;
use test::{BenchHarness, BenchSamples};
use time::precise_time_ns;
impl BenchHarness {
/// Callback for benchmark functions to run in their body.
pub fn iter(&mut self, inner:&fn()) {
self.ns_start = precise_time_ns();
let k = self.iterations;
for u64::range(0, k) |_| {
inner();
}
self.ns_end = precise_time_ns();
impl BenchHarness {
/// Callback for benchmark functions to run in their body.
pub fn iter(&mut self, inner:&fn()) {
self.ns_start = precise_time_ns();
let k = self.iterations;
for u64::range(0, k) |_| {
inner();
}
self.ns_end = precise_time_ns();
}
pub fn ns_elapsed(&mut self) -> u64 {
if self.ns_start == 0 || self.ns_end == 0 {
0
} else {
self.ns_end - self.ns_start
}
}
pub fn ns_per_iter(&mut self) -> u64 {
if self.iterations == 0 {
0
} else {
self.ns_elapsed() / self.iterations
}
}
pub fn bench_n(&mut self, n: u64, f: &fn(&mut BenchHarness)) {
self.iterations = n;
debug!("running benchmark for %u iterations",
n as uint);
f(self);
}
// This is the Go benchmark algorithm. It produces a single
// datapoint and always tries to run for 1s.
pub fn go_bench(&mut self, f: &fn(&mut BenchHarness)) {
// Rounds a number down to the nearest power of 10.
fn round_down_10(n: u64) -> u64 {
let mut n = n;
let mut res = 1;
while n > 10 {
n = n / 10;
res *= 10;
}
res
}
// Rounds x up to a number of the form [1eX, 2eX, 5eX].
fn round_up(n: u64) -> u64 {
let base = round_down_10(n);
if n < (2 * base) {
2 * base
} else if n < (5 * base) {
5 * base
} else {
10 * base
}
}
// Initial bench run to get ballpark figure.
let mut n = 1_u64;
self.bench_n(n, f);
while n < 1_000_000_000 &&
self.ns_elapsed() < 1_000_000_000 {
let last = n;
// Try to estimate iter count for 1s falling back to 1bn
// iterations if first run took < 1ns.
if self.ns_per_iter() == 0 {
n = 1_000_000_000;
} else {
n = 1_000_000_000 / self.ns_per_iter();
}
n = u64::max(u64::min(n+n/2, 100*last), last+1);
n = round_up(n);
self.bench_n(n, f);
}
}
// This is a more statistics-driven benchmark algorithm.
// It stops as quickly as 50ms, so long as the statistical
// properties are satisfactory. If those properties are
// not met, it may run as long as the Go algorithm.
pub fn auto_bench(&mut self, f: &fn(&mut BenchHarness)) -> ~[f64] {
let mut rng = rand::rng();
let mut magnitude = 10;
let mut prev_madp = 0.0;
loop {
let n_samples = rng.gen_uint_range(50, 60);
let n_iter = rng.gen_uint_range(magnitude,
magnitude * 2);
let samples = do vec::from_fn(n_samples) |_| {
self.bench_n(n_iter as u64, f);
self.ns_per_iter() as f64
};
// Eliminate outliers
let med = samples.median();
let mad = samples.median_abs_dev();
let samples = do vec::filter(samples) |f| {
num::abs(*f - med) <= 3.0 * mad
};
debug!("%u samples, median %f, MAD=%f, %u survived filter",
n_samples, med as float, mad as float,
samples.len());
if samples.len() != 0 {
// If we have _any_ cluster of signal...
let curr_madp = samples.median_abs_dev_pct();
if self.ns_elapsed() > 1_000_000 &&
(curr_madp < 1.0 ||
num::abs(curr_madp - prev_madp) < 0.1) {
return samples;
}
prev_madp = curr_madp;
if n_iter > 20_000_000 ||
self.ns_elapsed() > 20_000_000 {
return samples;
}
}
magnitude *= 2;
}
pub fn ns_elapsed(&mut self) -> u64 {
if self.ns_start == 0 || self.ns_end == 0 {
0
} else {
self.ns_end - self.ns_start
}
}
pub fn ns_per_iter(&mut self) -> u64 {
if self.iterations == 0 {
0
} else {
self.ns_elapsed() / self.iterations
}
}
pub fn bench_n(&mut self, n: u64, f: &fn(&mut BenchHarness)) {
self.iterations = n;
debug!("running benchmark for %u iterations",
n as uint);
f(self);
}
// This is the Go benchmark algorithm. It produces a single
// datapoint and always tries to run for 1s.
pub fn go_bench(&mut self, f: &fn(&mut BenchHarness)) {
// Rounds a number down to the nearest power of 10.
fn round_down_10(n: u64) -> u64 {
let mut n = n;
let mut res = 1;
while n > 10 {
n = n / 10;
res *= 10;
}
res
}
// Rounds x up to a number of the form [1eX, 2eX, 5eX].
fn round_up(n: u64) -> u64 {
let base = round_down_10(n);
if n < (2 * base) {
2 * base
} else if n < (5 * base) {
5 * base
} else {
10 * base
}
}
// Initial bench run to get ballpark figure.
let mut n = 1_u64;
self.bench_n(n, f);
while n < 1_000_000_000 &&
self.ns_elapsed() < 1_000_000_000 {
let last = n;
// Try to estimate iter count for 1s falling back to 1bn
// iterations if first run took < 1ns.
if self.ns_per_iter() == 0 {
n = 1_000_000_000;
} else {
n = 1_000_000_000 / self.ns_per_iter();
}
n = u64::max(u64::min(n+n/2, 100*last), last+1);
n = round_up(n);
self.bench_n(n, f);
}
}
// This is a more statistics-driven benchmark algorithm.
// It stops as quickly as 50ms, so long as the statistical
// properties are satisfactory. If those properties are
// not met, it may run as long as the Go algorithm.
pub fn auto_bench(&mut self, f: &fn(&mut BenchHarness)) -> ~[f64] {
let mut rng = rand::rng();
let mut magnitude = 10;
let mut prev_madp = 0.0;
loop {
let n_samples = rng.gen_uint_range(50, 60);
let n_iter = rng.gen_uint_range(magnitude,
magnitude * 2);
let samples = do vec::from_fn(n_samples) |_| {
self.bench_n(n_iter as u64, f);
self.ns_per_iter() as f64
};
// Eliminate outliers
let med = samples.median();
let mad = samples.median_abs_dev();
let samples = do vec::filter(samples) |f| {
num::abs(*f - med) <= 3.0 * mad
};
debug!("%u samples, median %f, MAD=%f, %u survived filter",
n_samples, med as float, mad as float,
samples.len());
if samples.len() != 0 {
// If we have _any_ cluster of signal...
let curr_madp = samples.median_abs_dev_pct();
if self.ns_elapsed() > 1_000_000 &&
(curr_madp < 1.0 ||
num::abs(curr_madp - prev_madp) < 0.1) {
return samples;
}
prev_madp = curr_madp;
if n_iter > 20_000_000 ||
self.ns_elapsed() > 20_000_000 {
return samples;
}
}
magnitude *= 2;
}
}
}
pub mod bench {
use test::{BenchHarness, BenchSamples};
pub fn benchmark(f: &fn(&mut BenchHarness)) -> BenchSamples {
let mut bs = BenchHarness {

View file

@ -849,7 +849,7 @@ priv fn do_strftime(format: &str, tm: &Tm) -> ~str {
do io::with_str_reader(format) |rdr| {
while !rdr.eof() {
match rdr.read_char() {
'%' => buf += parse_type(rdr.read_char(), tm),
'%' => buf.push_str(parse_type(rdr.read_char(), tm)),
ch => buf.push_char(ch)
}
}

View file

@ -39,7 +39,7 @@ use core::libc;
* * ch - a channel of type T to send a `val` on
* * val - a value of type T to send over the provided `ch`
*/
pub fn delayed_send<T:Owned>(iotask: &IoTask,
pub fn delayed_send<T:Send>(iotask: &IoTask,
msecs: uint,
ch: &Chan<T>,
val: T) {
@ -119,11 +119,12 @@ pub fn sleep(iotask: &IoTask, msecs: uint) {
* on the provided port in the allotted timeout period, then the result will
* be a `Some(T)`. If not, then `None` will be returned.
*/
pub fn recv_timeout<T:Copy + Owned>(iotask: &IoTask,
pub fn recv_timeout<T:Copy + Send>(iotask: &IoTask,
msecs: uint,
wait_po: &Port<T>)
-> Option<T> {
let mut (timeout_po, timeout_ch) = stream::<()>();
let (timeout_po, timeout_ch) = stream::<()>();
let mut timeout_po = timeout_po;
delayed_send(iotask, msecs, &timeout_ch, ());
// XXX: Workaround due to ports and channels not being &mut. They should

View file

@ -272,7 +272,7 @@ impl Context {
}
}
pub fn prep<T:Owned +
pub fn prep<T:Send +
Encodable<json::Encoder> +
Decodable<json::Decoder>>(@self, // FIXME(#5121)
fn_name:&str,
@ -292,7 +292,7 @@ trait TPrep {
fn declare_input(&mut self, kind:&str, name:&str, val:&str);
fn is_fresh(&self, cat:&str, kind:&str, name:&str, val:&str) -> bool;
fn all_fresh(&self, cat:&str, map:&WorkMap) -> bool;
fn exec<T:Owned +
fn exec<T:Send +
Encodable<json::Encoder> +
Decodable<json::Decoder>>( // FIXME(#5121)
&self, blk: ~fn(&Exec) -> T) -> Work<T>;
@ -328,7 +328,7 @@ impl TPrep for Prep {
return true;
}
fn exec<T:Owned +
fn exec<T:Send +
Encodable<json::Encoder> +
Decodable<json::Decoder>>( // FIXME(#5121)
&self, blk: ~fn(&Exec) -> T) -> Work<T> {
@ -365,7 +365,7 @@ impl TPrep for Prep {
}
}
impl<T:Owned +
impl<T:Send +
Encodable<json::Encoder> +
Decodable<json::Decoder>> Work<T> { // FIXME(#5121)
pub fn new(p: @mut Prep, e: Either<T,PortOne<(Exec,T)>>) -> Work<T> {
@ -374,7 +374,7 @@ impl<T:Owned +
}
// FIXME (#3724): movable self. This should be in impl Work.
fn unwrap<T:Owned +
fn unwrap<T:Send +
Encodable<json::Encoder> +
Decodable<json::Decoder>>( // FIXME(#5121)
w: Work<T>) -> T {

View file

@ -642,15 +642,15 @@ pub fn sanitize(s: &str) -> ~str {
for s.iter().advance |c| {
match c {
// Escape these with $ sequences
'@' => result += "$SP$",
'~' => result += "$UP$",
'*' => result += "$RP$",
'&' => result += "$BP$",
'<' => result += "$LT$",
'>' => result += "$GT$",
'(' => result += "$LP$",
')' => result += "$RP$",
',' => result += "$C$",
'@' => result.push_str("$SP$"),
'~' => result.push_str("$UP$"),
'*' => result.push_str("$RP$"),
'&' => result.push_str("$BP$"),
'<' => result.push_str("$LT$"),
'>' => result.push_str("$GT$"),
'(' => result.push_str("$LP$"),
')' => result.push_str("$RP$"),
',' => result.push_str("$C$"),
// '.' doesn't occur in types and functions, so reuse it
// for ':'
@ -686,12 +686,14 @@ pub fn mangle(sess: Session, ss: path) -> ~str {
let mut n = ~"_ZN"; // Begin name-sequence.
for ss.iter().advance |s| {
match *s { path_name(s) | path_mod(s) => {
let sani = sanitize(sess.str_of(s));
n += fmt!("%u%s", sani.len(), sani);
} }
match *s {
path_name(s) | path_mod(s) => {
let sani = sanitize(sess.str_of(s));
n.push_str(fmt!("%u%s", sani.len(), sani));
}
}
}
n += "E"; // End name-sequence.
n.push_char('E'); // End name-sequence.
n
}

View file

@ -19,7 +19,7 @@ use front;
use lib::llvm::llvm;
use metadata::{creader, cstore, filesearch};
use metadata;
use middle::{trans, freevars, kind, ty, typeck, lint, astencode};
use middle::{trans, freevars, kind, ty, typeck, lint, astencode, reachable};
use middle;
use util::common::time;
use util::ppaux;
@ -299,10 +299,16 @@ pub fn compile_rest(sess: Session,
time(time_passes, ~"kind checking", ||
kind::check_crate(ty_cx, method_map, crate));
let reachable_map =
time(time_passes, ~"reachability checking", ||
reachable::find_reachable(ty_cx, method_map, crate));
time(time_passes, ~"lint checking", ||
lint::check_crate(ty_cx, crate));
if phases.to == cu_no_trans { return (Some(crate), Some(ty_cx)); }
if phases.to == cu_no_trans {
return (Some(crate), Some(ty_cx));
}
let maps = astencode::Maps {
root_map: root_map,
@ -315,9 +321,13 @@ pub fn compile_rest(sess: Session,
let outputs = outputs.get_ref();
time(time_passes, ~"translation", ||
trans::base::trans_crate(sess, crate, ty_cx,
trans::base::trans_crate(sess,
crate,
ty_cx,
&outputs.obj_filename,
exp_map2, maps))
exp_map2,
reachable_map,
maps))
};
let outputs = outputs.get_ref();

View file

@ -403,8 +403,12 @@ mod test {
fn make_crate(with_bin: bool, with_lib: bool) -> @ast::crate {
let mut attrs = ~[];
if with_bin { attrs += [make_crate_type_attr(@"bin")]; }
if with_lib { attrs += [make_crate_type_attr(@"lib")]; }
if with_bin {
attrs.push(make_crate_type_attr(@"bin"));
}
if with_lib {
attrs.push(make_crate_type_attr(@"lib"));
}
@codemap::respan(codemap::dummy_sp(), ast::crate_ {
module: ast::_mod { view_items: ~[], items: ~[] },
attrs: attrs,

View file

@ -176,6 +176,10 @@ pub static tag_item_method_tps: uint = 0x7b;
pub static tag_item_method_fty: uint = 0x7c;
pub static tag_item_method_transformed_self_ty: uint = 0x7d;
pub static tag_mod_child: uint = 0x7e;
pub static tag_misc_info: uint = 0x7f;
pub static tag_misc_info_crate_items: uint = 0x80;
pub struct LinkMeta {
name: @str,
vers: @str,

View file

@ -97,18 +97,14 @@ pub fn get_enum_variants(tcx: ty::ctxt, def: ast::def_id)
return decoder::get_enum_variants(cstore.intr, cdata, def.node, tcx)
}
pub fn get_impls_for_mod(cstore: @mut cstore::CStore, def: ast::def_id,
name: Option<ast::ident>)
-> @~[@resolve::Impl] {
let cdata = cstore::get_crate_data(cstore, def.crate);
do decoder::get_impls_for_mod(cstore.intr, cdata, def.node, name) |cnum| {
cstore::get_crate_data(cstore, cnum)
}
/// Returns information about the given implementation.
pub fn get_impl(cstore: @mut cstore::CStore, impl_def_id: ast::def_id)
-> resolve::Impl {
let cdata = cstore::get_crate_data(cstore, impl_def_id.crate);
decoder::get_impl(cstore.intr, cdata, impl_def_id.node)
}
pub fn get_method(tcx: ty::ctxt,
def: ast::def_id) -> ty::Method
{
pub fn get_method(tcx: ty::ctxt, def: ast::def_id) -> ty::Method {
let cdata = cstore::get_crate_data(tcx.cstore, def.crate);
decoder::get_method(tcx.cstore.intr, cdata, def.node, tcx)
}

View file

@ -458,64 +458,192 @@ pub fn each_lang_item(cdata: cmd, f: &fn(ast::node_id, uint) -> bool) -> bool {
return true;
}
/// Iterates over all the paths in the given crate.
pub fn each_path(intr: @ident_interner,
cdata: cmd,
get_crate_data: GetCrateDataCb,
f: &fn(&str, def_like, ast::visibility) -> bool)
-> bool {
// FIXME #4572: This function needs to be nuked, as it's impossible to make fast.
// It's the source of most of the performance problems when compiling small crates.
struct EachItemContext<'self> {
intr: @ident_interner,
cdata: cmd,
get_crate_data: GetCrateDataCb<'self>,
path_builder: &'self mut ~str,
callback: &'self fn(&str, def_like, ast::visibility) -> bool,
}
let root = reader::Doc(cdata.data);
let items = reader::get_doc(root, tag_items);
let items_data = reader::get_doc(items, tag_items_data);
impl<'self> EachItemContext<'self> {
// Pushes the given name and returns the old length.
fn push_name(&mut self, string: &str) -> uint {
let path_len = self.path_builder.len();
if path_len != 0 {
self.path_builder.push_str("::")
}
self.path_builder.push_str(string);
path_len
}
// First, go through all the explicit items.
for reader::tagged_docs(items_data, tag_items_data_item) |item_doc| {
let path = ast_map::path_to_str(item_path(item_doc), intr);
let path_is_empty = path.is_empty();
if !path_is_empty {
// Extract the def ID.
let def_id = item_def_id(item_doc, cdata);
// Pops the given name.
fn pop_name(&mut self, old_len: uint) {
// XXX(pcwalton): There's no safe function to do this. :(
unsafe {
str::raw::set_len(self.path_builder, old_len)
}
}
// Construct the def for this item.
debug!("(each_path) yielding explicit item: %s", path);
let def_like = item_to_def_like(item_doc, def_id, cdata.cnum);
let vis = item_visibility(item_doc);
// Hand the information off to the iteratee.
if !f(path, def_like, vis) {
return false;
fn process_item_and_pop_name(&mut self,
doc: ebml::Doc,
def_id: ast::def_id,
old_len: uint)
-> bool {
let def_like = item_to_def_like(doc, def_id, self.cdata.cnum);
match def_like {
dl_def(def) => {
debug!("(iterating over each item of a module) processing \
`%s` (def %?)",
*self.path_builder,
def);
}
_ => {
debug!("(iterating over each item of a module) processing \
`%s` (%d:%d)",
*self.path_builder,
def_id.crate,
def_id.node);
}
}
// If this is a module, find the reexports.
for each_reexport(item_doc) |reexport_doc| {
let def_id_doc =
reader::get_doc(reexport_doc,
tag_items_data_item_reexport_def_id);
let def_id = reader::with_doc_data(def_id_doc, parse_def_id);
let def_id = translate_def_id(cdata, def_id);
let vis = item_visibility(doc);
let reexport_name_doc =
reader::get_doc(reexport_doc,
tag_items_data_item_reexport_name);
let reexport_name = reexport_name_doc.as_str_slice();
let mut continue = (self.callback)(*self.path_builder, def_like, vis);
let reexport_path;
if path_is_empty {
reexport_path = reexport_name.to_owned();
} else {
reexport_path = path + "::" + reexport_name;
let family = item_family(doc);
if family == ForeignMod {
// These are unnamed; pop the name now.
self.pop_name(old_len)
}
if continue {
// Recurse if necessary.
match family {
Mod | ForeignMod | Trait | Impl => {
continue = self.each_item_of_module(def_id);
}
ImmStatic | MutStatic | Struct | UnsafeFn | Fn | ForeignFn |
UnsafeStaticMethod | StaticMethod | Type | ForeignType |
Variant | Enum | PublicField | PrivateField |
InheritedField => {}
}
}
// This reexport may be in yet another crate
let other_crates_items = if def_id.crate == cdata.cnum {
items
if family != ForeignMod {
self.pop_name(old_len)
}
continue
}
fn each_item_of_module(&mut self, def_id: ast::def_id) -> bool {
// This item might not be in this crate. If it's not, look it up.
let (cdata, items) = if def_id.crate == self.cdata.cnum {
let items = reader::get_doc(reader::Doc(self.cdata.data),
tag_items);
(self.cdata, items)
} else {
let crate_data = (self.get_crate_data)(def_id.crate);
let root = reader::Doc(crate_data.data);
(crate_data, reader::get_doc(root, tag_items))
};
// Look up the item.
let item_doc = match maybe_find_item(def_id.node, items) {
None => return false,
Some(item_doc) => item_doc,
};
self.each_child_of_module_or_crate(item_doc)
}
fn each_child_of_module_or_crate(&mut self, item_doc: ebml::Doc) -> bool {
let mut continue = true;
// Iterate over all children.
for reader::tagged_docs(item_doc, tag_mod_child) |child_info_doc| {
let child_def_id = reader::with_doc_data(child_info_doc,
parse_def_id);
let child_def_id = translate_def_id(self.cdata, child_def_id);
// This item may be in yet another crate, if it was the child of
// a reexport.
let other_crates_items = if child_def_id.crate ==
self.cdata.cnum {
reader::get_doc(reader::Doc(self.cdata.data), tag_items)
} else {
let crate_data = get_crate_data(def_id.crate);
let crate_data = (self.get_crate_data)(child_def_id.crate);
let root = reader::Doc(crate_data.data);
reader::get_doc(root, tag_items)
};
debug!("(iterating over each item of a module) looking up item \
%d:%d in `%s`, crate %d",
child_def_id.crate,
child_def_id.node,
*self.path_builder,
self.cdata.cnum);
// Get the item.
match maybe_find_item(child_def_id.node, other_crates_items) {
None => {}
Some(child_item_doc) => {
// Push the name.
let child_name = item_name(self.intr, child_item_doc);
debug!("(iterating over each item of a module) pushing \
name `%s` onto `%s`",
token::ident_to_str(&child_name),
*self.path_builder);
let old_len =
self.push_name(token::ident_to_str(&child_name));
// Process this item.
continue = self.process_item_and_pop_name(child_item_doc,
child_def_id,
old_len);
if !continue {
break
}
}
}
}
if !continue {
return false
}
// Iterate over reexports.
for each_reexport(item_doc) |reexport_doc| {
let def_id_doc = reader::get_doc(
reexport_doc,
tag_items_data_item_reexport_def_id);
let orig_def_id = reader::with_doc_data(def_id_doc, parse_def_id);
// NB: was "cdata"
let def_id = translate_def_id(self.cdata, orig_def_id);
let name_doc = reader::get_doc(reexport_doc,
tag_items_data_item_reexport_name);
let name = name_doc.as_str_slice();
// Push the name.
debug!("(iterating over each item of a module) pushing \
reexported name `%s` onto `%s` (crate %d, orig %d, \
in crate %d)",
name,
*self.path_builder,
def_id.crate,
orig_def_id.crate,
self.cdata.cnum);
let old_len = self.push_name(name);
// This reexport may be in yet another crate.
let other_crates_items = if def_id.crate == self.cdata.cnum {
reader::get_doc(reader::Doc(self.cdata.data), tag_items)
} else {
let crate_data = (self.get_crate_data)(def_id.crate);
let root = reader::Doc(crate_data.data);
reader::get_doc(root, tag_items)
};
@ -523,29 +651,53 @@ pub fn each_path(intr: @ident_interner,
// Get the item.
match maybe_find_item(def_id.node, other_crates_items) {
None => {}
Some(item_doc) => {
// Construct the def for this item.
let def_like = item_to_def_like(item_doc,
def_id,
cdata.cnum);
// Hand the information off to the iteratee.
debug!("(each_path) yielding reexported \
item: %s", reexport_path);
if (!f(reexport_path, def_like, ast::public)) {
return false;
}
Some(reexported_item_doc) => {
continue = self.process_item_and_pop_name(
reexported_item_doc,
def_id,
old_len);
}
}
}
}
return true;
if !continue {
break
}
}
continue
}
}
pub fn get_item_path(cdata: cmd, id: ast::node_id)
-> ast_map::path {
/// Iterates over all the paths in the given crate.
pub fn each_path(intr: @ident_interner,
cdata: cmd,
get_crate_data: GetCrateDataCb,
f: &fn(&str, def_like, ast::visibility) -> bool)
-> bool {
// FIXME #4572: This function needs to be nuked, as it's impossible to
// make fast. It's the source of most of the performance problems when
// compiling small crates.
let root_doc = reader::Doc(cdata.data);
let misc_info_doc = reader::get_doc(root_doc, tag_misc_info);
let crate_items_doc = reader::get_doc(misc_info_doc,
tag_misc_info_crate_items);
let mut path_builder = ~"";
let mut context = EachItemContext {
intr: intr,
cdata: cdata,
get_crate_data: get_crate_data,
path_builder: &mut path_builder,
callback: f,
};
// Iterate over all top-level crate items.
context.each_child_of_module_or_crate(crate_items_doc)
}
pub fn get_item_path(cdata: cmd, id: ast::node_id) -> ast_map::path {
item_path(lookup_item(id, cdata.data))
}
@ -661,35 +813,20 @@ fn item_impl_methods(intr: @ident_interner, cdata: cmd, item: ebml::Doc,
rslt
}
pub fn get_impls_for_mod(intr: @ident_interner,
cdata: cmd,
m_id: ast::node_id,
name: Option<ast::ident>,
get_cdata: &fn(ast::crate_num) -> cmd)
-> @~[@resolve::Impl] {
/// Returns information about the given implementation.
pub fn get_impl(intr: @ident_interner, cdata: cmd, impl_id: ast::node_id)
-> resolve::Impl {
let data = cdata.data;
let mod_item = lookup_item(m_id, data);
let mut result = ~[];
for reader::tagged_docs(mod_item, tag_mod_impl) |doc| {
let did = reader::with_doc_data(doc, parse_def_id);
let local_did = translate_def_id(cdata, did);
debug!("(get impls for mod) getting did %? for '%?'",
local_did, name);
// The impl may be defined in a different crate. Ask the caller
// to give us the metadata
let impl_cdata = get_cdata(local_did.crate);
let impl_data = impl_cdata.data;
let item = lookup_item(local_did.node, impl_data);
let nm = item_name(intr, item);
if match name { Some(n) => { n == nm } None => { true } } {
let base_tps = item_ty_param_count(item);
result.push(@resolve::Impl {
did: local_did, ident: nm,
methods: item_impl_methods(intr, impl_cdata, item, base_tps)
});
};
let impl_item = lookup_item(impl_id, data);
let base_tps = item_ty_param_count(impl_item);
resolve::Impl {
did: ast::def_id {
crate: cdata.cnum,
node: impl_id,
},
ident: item_name(intr, impl_item),
methods: item_impl_methods(intr, cdata, impl_item, base_tps),
}
@result
}
pub fn get_method_name_and_explicit_self(

View file

@ -16,14 +16,13 @@ use metadata::common::*;
use metadata::cstore;
use metadata::decoder;
use metadata::tyencode;
use middle::trans::reachable;
use middle::ty::node_id_to_type;
use middle::ty;
use middle;
use util::ppaux::ty_to_str;
use core::hash::HashUtil;
use core::hashmap::HashMap;
use core::hashmap::{HashMap, HashSet};
use core::int;
use core::io;
use core::str;
@ -60,13 +59,13 @@ pub type encode_inlined_item<'self> = &'self fn(ecx: &EncodeContext,
pub struct EncodeParams<'self> {
diag: @span_handler,
tcx: ty::ctxt,
reachable: reachable::map,
reexports2: middle::resolve::ExportMap2,
item_symbols: &'self HashMap<ast::node_id, ~str>,
discrim_symbols: &'self HashMap<ast::node_id, @str>,
link_meta: &'self LinkMeta,
cstore: @mut cstore::CStore,
encode_inlined_item: encode_inlined_item<'self>
encode_inlined_item: encode_inlined_item<'self>,
reachable: @mut HashSet<ast::node_id>,
}
struct Stats {
@ -75,6 +74,7 @@ struct Stats {
dep_bytes: uint,
lang_item_bytes: uint,
link_args_bytes: uint,
misc_bytes: uint,
item_bytes: uint,
index_bytes: uint,
zero_bytes: uint,
@ -87,14 +87,14 @@ pub struct EncodeContext<'self> {
diag: @span_handler,
tcx: ty::ctxt,
stats: @mut Stats,
reachable: reachable::map,
reexports2: middle::resolve::ExportMap2,
item_symbols: &'self HashMap<ast::node_id, ~str>,
discrim_symbols: &'self HashMap<ast::node_id, @str>,
link_meta: &'self LinkMeta,
cstore: &'self cstore::CStore,
encode_inlined_item: encode_inlined_item<'self>,
type_abbrevs: abbrev_map
type_abbrevs: abbrev_map,
reachable: @mut HashSet<ast::node_id>,
}
pub fn reachable(ecx: &EncodeContext, id: node_id) -> bool {
@ -157,8 +157,8 @@ fn encode_trait_ref(ebml_w: &mut writer::Encoder,
diag: ecx.diag,
ds: def_to_str,
tcx: ecx.tcx,
reachable: |a| r.contains(&a),
abbrevs: tyencode::ac_use_abbrevs(ecx.type_abbrevs)};
abbrevs: tyencode::ac_use_abbrevs(ecx.type_abbrevs)
};
ebml_w.start_tag(tag);
tyencode::enc_trait_ref(ebml_w.writer, ty_str_ctxt, trait_ref);
@ -185,8 +185,8 @@ fn encode_ty_type_param_defs(ebml_w: &mut writer::Encoder,
diag: ecx.diag,
ds: def_to_str,
tcx: ecx.tcx,
reachable: |a| r.contains(&a),
abbrevs: tyencode::ac_use_abbrevs(ecx.type_abbrevs)};
abbrevs: tyencode::ac_use_abbrevs(ecx.type_abbrevs)
};
for params.iter().advance |param| {
ebml_w.start_tag(tag);
tyencode::enc_type_param_def(ebml_w.writer, ty_str_ctxt, param);
@ -218,8 +218,8 @@ pub fn write_type(ecx: &EncodeContext,
diag: ecx.diag,
ds: def_to_str,
tcx: ecx.tcx,
reachable: |a| r.contains(&a),
abbrevs: tyencode::ac_use_abbrevs(ecx.type_abbrevs)};
abbrevs: tyencode::ac_use_abbrevs(ecx.type_abbrevs)
};
tyencode::enc_ty(ebml_w.writer, ty_str_ctxt, typ);
}
@ -231,8 +231,8 @@ pub fn write_vstore(ecx: &EncodeContext,
diag: ecx.diag,
ds: def_to_str,
tcx: ecx.tcx,
reachable: |a| r.contains(&a),
abbrevs: tyencode::ac_use_abbrevs(ecx.type_abbrevs)};
abbrevs: tyencode::ac_use_abbrevs(ecx.type_abbrevs)
};
tyencode::enc_vstore(ebml_w.writer, ty_str_ctxt, vstore);
}
@ -264,8 +264,8 @@ fn encode_method_fty(ecx: &EncodeContext,
diag: ecx.diag,
ds: def_to_str,
tcx: ecx.tcx,
reachable: |a| r.contains(&a),
abbrevs: tyencode::ac_use_abbrevs(ecx.type_abbrevs)};
abbrevs: tyencode::ac_use_abbrevs(ecx.type_abbrevs)
};
tyencode::enc_bare_fn_ty(ebml_w.writer, ty_str_ctxt, typ);
ebml_w.end_tag();
@ -473,40 +473,45 @@ fn encode_reexported_static_methods(ecx: &EncodeContext,
}
}
fn encode_info_for_mod(ecx: &EncodeContext,
ebml_w: &mut writer::Encoder,
md: &_mod,
id: node_id,
path: &[ast_map::path_elt],
name: ident) {
ebml_w.start_tag(tag_items_data_item);
encode_def_id(ebml_w, local_def(id));
encode_family(ebml_w, 'm');
encode_name(ecx, ebml_w, name);
debug!("(encoding info for module) encoding info for module ID %d", id);
// Encode info about all the module children.
for md.items.iter().advance |item| {
match item.node {
item_impl(*) => {
let (ident, did) = (item.ident, item.id);
debug!("(encoding info for module) ... encoding impl %s \
(%?/%?)",
ecx.tcx.sess.str_of(ident),
did,
ast_map::node_id_to_str(ecx.tcx.items, did, token::get_ident_interner()));
ebml_w.start_tag(tag_mod_impl);
ebml_w.wr_str(def_to_str(local_def(did)));
ebml_w.end_tag();
/// Iterates through "auxiliary node IDs", which are node IDs that describe
/// top-level items that are sub-items of the given item. Specifically:
///
/// * For enums, iterates through the node IDs of the variants.
///
/// * For newtype structs, iterates through the node ID of the constructor.
fn each_auxiliary_node_id(item: @item, callback: &fn(node_id) -> bool)
-> bool {
let mut continue = true;
match item.node {
item_enum(ref enum_def, _) => {
for enum_def.variants.iter().advance |variant| {
continue = callback(variant.node.id);
if !continue {
break
}
}
_ => {} // FIXME #4573: Encode these too.
}
item_struct(struct_def, _) => {
// If this is a newtype struct, return the constructor.
match struct_def.ctor_id {
Some(ctor_id) if struct_def.fields.len() > 0 &&
struct_def.fields[0].node.kind ==
ast::unnamed_field => {
continue = callback(ctor_id);
}
_ => {}
}
}
_ => {}
}
encode_path(ecx, ebml_w, path, ast_map::path_mod(name));
continue
}
// Encode the reexports of this module.
fn encode_reexports(ecx: &EncodeContext,
ebml_w: &mut writer::Encoder,
id: node_id,
path: &[ast_map::path_elt]) {
debug!("(encoding info for module) encoding reexports for %d", id);
match ecx.reexports2.find(&id) {
Some(ref exports) => {
@ -530,6 +535,57 @@ fn encode_info_for_mod(ecx: &EncodeContext,
id);
}
}
}
fn encode_info_for_mod(ecx: &EncodeContext,
ebml_w: &mut writer::Encoder,
md: &_mod,
id: node_id,
path: &[ast_map::path_elt],
name: ident,
vis: visibility) {
ebml_w.start_tag(tag_items_data_item);
encode_def_id(ebml_w, local_def(id));
encode_family(ebml_w, 'm');
encode_name(ecx, ebml_w, name);
debug!("(encoding info for module) encoding info for module ID %d", id);
// Encode info about all the module children.
for md.items.iter().advance |item| {
ebml_w.start_tag(tag_mod_child);
ebml_w.wr_str(def_to_str(local_def(item.id)));
ebml_w.end_tag();
for each_auxiliary_node_id(*item) |auxiliary_node_id| {
ebml_w.start_tag(tag_mod_child);
ebml_w.wr_str(def_to_str(local_def(auxiliary_node_id)));
ebml_w.end_tag();
}
match item.node {
item_impl(*) => {
let (ident, did) = (item.ident, item.id);
debug!("(encoding info for module) ... encoding impl %s \
(%?/%?)",
ecx.tcx.sess.str_of(ident),
did,
ast_map::node_id_to_str(ecx.tcx.items, did, token::get_ident_interner()));
ebml_w.start_tag(tag_mod_impl);
ebml_w.wr_str(def_to_str(local_def(did)));
ebml_w.end_tag();
}
_ => {}
}
}
encode_path(ecx, ebml_w, path, ast_map::path_mod(name));
// Encode the reexports of this module, if this module is public.
if vis == public {
debug!("(encoding info for module) encoding reexports for %d", id);
encode_reexports(ecx, ebml_w, id, path);
}
ebml_w.end_tag();
}
@ -780,13 +836,6 @@ fn encode_info_for_item(ecx: &EncodeContext,
index: @mut ~[entry<int>],
path: &[ast_map::path_elt]) {
let tcx = ecx.tcx;
let must_write =
match item.node {
item_enum(_, _) | item_impl(*) | item_trait(*) | item_struct(*) |
item_mod(*) | item_foreign_mod(*) | item_static(*) => true,
_ => false
};
if !must_write && !reachable(ecx, item.id) { return; }
fn add_to_index_(item: @item, ebml_w: &writer::Encoder,
index: @mut ~[entry<int>]) {
@ -809,6 +858,7 @@ fn encode_info_for_item(ecx: &EncodeContext,
}
encode_type(ecx, ebml_w, node_id_to_type(tcx, item.id));
encode_symbol(ecx, ebml_w, item.id);
encode_name(ecx, ebml_w, item.ident);
encode_path(ecx, ebml_w, path, ast_map::path_name(item.ident));
(ecx.encode_inlined_item)(ecx, ebml_w, path, ii_item(item));
ebml_w.end_tag();
@ -821,6 +871,7 @@ fn encode_info_for_item(ecx: &EncodeContext,
let tps_len = generics.ty_params.len();
encode_type_param_bounds(ebml_w, ecx, &generics.ty_params);
encode_type(ecx, ebml_w, node_id_to_type(tcx, item.id));
encode_name(ecx, ebml_w, item.ident);
encode_path(ecx, ebml_w, path, ast_map::path_name(item.ident));
encode_attributes(ebml_w, item.attrs);
if tps_len > 0u || should_inline(item.attrs) {
@ -832,15 +883,29 @@ fn encode_info_for_item(ecx: &EncodeContext,
}
item_mod(ref m) => {
add_to_index();
encode_info_for_mod(ecx, ebml_w, m, item.id, path, item.ident);
encode_info_for_mod(ecx,
ebml_w,
m,
item.id,
path,
item.ident,
item.vis);
}
item_foreign_mod(_) => {
item_foreign_mod(ref fm) => {
add_to_index();
ebml_w.start_tag(tag_items_data_item);
encode_def_id(ebml_w, local_def(item.id));
encode_family(ebml_w, 'n');
encode_name(ecx, ebml_w, item.ident);
encode_path(ecx, ebml_w, path, ast_map::path_name(item.ident));
// Encode all the items in this module.
for fm.items.iter().advance |foreign_item| {
ebml_w.start_tag(tag_mod_child);
ebml_w.wr_str(def_to_str(local_def(foreign_item.id)));
ebml_w.end_tag();
}
ebml_w.end_tag();
}
item_ty(_, ref generics) => {
@ -898,23 +963,6 @@ fn encode_info_for_item(ecx: &EncodeContext,
encode_type_param_bounds(ebml_w, ecx, &generics.ty_params);
encode_type(ecx, ebml_w, node_id_to_type(tcx, item.id));
// If this is a tuple- or enum-like struct, encode the type of the
// constructor.
if struct_def.fields.len() > 0 &&
struct_def.fields[0].node.kind == ast::unnamed_field {
let ctor_id = match struct_def.ctor_id {
Some(ctor_id) => ctor_id,
None => ecx.tcx.sess.bug("struct def didn't have ctor id"),
};
encode_info_for_struct_ctor(ecx,
ebml_w,
path,
item.ident,
ctor_id,
index);
}
encode_name(ecx, ebml_w, item.ident);
encode_attributes(ebml_w, item.attrs);
encode_path(ecx, ebml_w, path, ast_map::path_name(item.ident));
@ -944,6 +992,23 @@ fn encode_info_for_item(ecx: &EncodeContext,
let bkts = create_index(idx);
encode_index(ebml_w, bkts, write_int);
ebml_w.end_tag();
// If this is a tuple- or enum-like struct, encode the type of the
// constructor.
if struct_def.fields.len() > 0 &&
struct_def.fields[0].node.kind == ast::unnamed_field {
let ctor_id = match struct_def.ctor_id {
Some(ctor_id) => ctor_id,
None => ecx.tcx.sess.bug("struct def didn't have ctor id"),
};
encode_info_for_struct_ctor(ecx,
ebml_w,
path,
item.ident,
ctor_id,
index);
}
}
item_impl(ref generics, opt_trait, ty, ref methods) => {
add_to_index();
@ -979,7 +1044,7 @@ fn encode_info_for_item(ecx: &EncodeContext,
// >:-<
let mut impl_path = vec::append(~[], path);
impl_path += [ast_map::path_name(item.ident)];
impl_path.push(ast_map::path_name(item.ident));
for methods.iter().advance |m| {
index.push(entry {val: m.id, pos: ebml_w.writer.tell()});
@ -1008,6 +1073,10 @@ fn encode_info_for_item(ecx: &EncodeContext,
ebml_w.start_tag(tag_item_trait_method);
encode_def_id(ebml_w, method_def_id);
ebml_w.end_tag();
ebml_w.start_tag(tag_mod_child);
ebml_w.wr_str(def_to_str(method_def_id));
ebml_w.end_tag();
}
encode_path(ecx, ebml_w, path, ast_map::path_name(item.ident));
for super_traits.iter().advance |ast_trait_ref| {
@ -1092,7 +1161,6 @@ fn encode_info_for_foreign_item(ecx: &EncodeContext,
index: @mut ~[entry<int>],
path: ast_map::path,
abi: AbiSet) {
if !reachable(ecx, nitem.id) { return; }
index.push(entry { val: nitem.id, pos: ebml_w.writer.tell() });
ebml_w.start_tag(tag_items_data_item);
@ -1102,6 +1170,7 @@ fn encode_info_for_foreign_item(ecx: &EncodeContext,
encode_family(ebml_w, purity_fn_family(purity));
encode_type_param_bounds(ebml_w, ecx, &generics.ty_params);
encode_type(ecx, ebml_w, node_id_to_type(ecx.tcx, nitem.id));
encode_name(ecx, ebml_w, nitem.ident);
if abi.is_intrinsic() {
(ecx.encode_inlined_item)(ecx, ebml_w, path, ii_foreign(nitem));
} else {
@ -1118,6 +1187,7 @@ fn encode_info_for_foreign_item(ecx: &EncodeContext,
}
encode_type(ecx, ebml_w, node_id_to_type(ecx.tcx, nitem.id));
encode_symbol(ecx, ebml_w, nitem.id);
encode_name(ecx, ebml_w, nitem.ident);
encode_path(ecx, ebml_w, path, ast_map::path_name(nitem.ident));
}
}
@ -1131,9 +1201,13 @@ fn encode_info_for_items(ecx: &EncodeContext,
let index = @mut ~[];
ebml_w.start_tag(tag_items_data);
index.push(entry { val: crate_node_id, pos: ebml_w.writer.tell() });
encode_info_for_mod(ecx, ebml_w, &crate.node.module,
crate_node_id, [],
syntax::parse::token::special_idents::invalid);
encode_info_for_mod(ecx,
ebml_w,
&crate.node.module,
crate_node_id,
[],
syntax::parse::token::special_idents::invalid,
public);
let items = ecx.tcx.items;
// See comment in `encode_side_tables_for_ii` in astencode
@ -1162,6 +1236,12 @@ fn encode_info_for_items(ecx: &EncodeContext,
visit::visit_foreign_item(ni, (cx, v));
match items.get_copy(&ni.id) {
ast_map::node_foreign_item(_, abi, _, pt) => {
debug!("writing foreign item %s::%s",
ast_map::path_to_str(
*pt,
token::get_ident_interner()),
token::ident_to_str(&ni.ident));
let mut ebml_w = copy ebml_w;
// See above
let ecx : &EncodeContext = unsafe { cast::transmute(ecx_ptr) };
@ -1421,6 +1501,30 @@ fn encode_link_args(ecx: &EncodeContext, ebml_w: &mut writer::Encoder) {
ebml_w.end_tag();
}
fn encode_misc_info(ecx: &EncodeContext,
crate: &crate,
ebml_w: &mut writer::Encoder) {
ebml_w.start_tag(tag_misc_info);
ebml_w.start_tag(tag_misc_info_crate_items);
for crate.node.module.items.iter().advance |&item| {
ebml_w.start_tag(tag_mod_child);
ebml_w.wr_str(def_to_str(local_def(item.id)));
ebml_w.end_tag();
for each_auxiliary_node_id(item) |auxiliary_node_id| {
ebml_w.start_tag(tag_mod_child);
ebml_w.wr_str(def_to_str(local_def(auxiliary_node_id)));
ebml_w.end_tag();
}
}
// Encode reexports for the root module.
encode_reexports(ecx, ebml_w, 0, []);
ebml_w.end_tag();
ebml_w.end_tag();
}
fn encode_crate_dep(ecx: &EncodeContext,
ebml_w: &mut writer::Encoder,
dep: decoder::crate_dep) {
@ -1460,29 +1564,39 @@ pub fn encode_metadata(parms: EncodeParams, crate: &crate) -> ~[u8] {
dep_bytes: 0,
lang_item_bytes: 0,
link_args_bytes: 0,
misc_bytes: 0,
item_bytes: 0,
index_bytes: 0,
zero_bytes: 0,
total_bytes: 0,
n_inlines: 0
};
let EncodeParams{item_symbols, diag, tcx, reachable, reexports2,
discrim_symbols, cstore, encode_inlined_item,
link_meta, _} = parms;
let EncodeParams {
item_symbols,
diag,
tcx,
reexports2,
discrim_symbols,
cstore,
encode_inlined_item,
link_meta,
reachable,
_
} = parms;
let type_abbrevs = @mut HashMap::new();
let stats = @mut stats;
let ecx = EncodeContext {
diag: diag,
tcx: tcx,
stats: stats,
reachable: reachable,
reexports2: reexports2,
item_symbols: item_symbols,
discrim_symbols: discrim_symbols,
link_meta: link_meta,
cstore: cstore,
encode_inlined_item: encode_inlined_item,
type_abbrevs: type_abbrevs
type_abbrevs: type_abbrevs,
reachable: reachable,
};
let mut ebml_w = writer::Encoder(wr as @io::Writer);
@ -1508,6 +1622,11 @@ pub fn encode_metadata(parms: EncodeParams, crate: &crate) -> ~[u8] {
encode_link_args(&ecx, &mut ebml_w);
ecx.stats.link_args_bytes = *wr.pos - i;
// Encode miscellaneous info.
i = *wr.pos;
encode_misc_info(&ecx, crate, &mut ebml_w);
ecx.stats.misc_bytes = *wr.pos - i;
// Encode and index the items.
ebml_w.start_tag(tag_items);
i = *wr.pos;
@ -1535,6 +1654,7 @@ pub fn encode_metadata(parms: EncodeParams, crate: &crate) -> ~[u8] {
io::println(fmt!(" dep bytes: %u", ecx.stats.dep_bytes));
io::println(fmt!(" lang item bytes: %u", ecx.stats.lang_item_bytes));
io::println(fmt!(" link args bytes: %u", ecx.stats.link_args_bytes));
io::println(fmt!(" misc bytes: %u", ecx.stats.misc_bytes));
io::println(fmt!(" item bytes: %u", ecx.stats.item_bytes));
io::println(fmt!(" index bytes: %u", ecx.stats.index_bytes));
io::println(fmt!(" zero bytes: %u", ecx.stats.zero_bytes));
@ -1557,7 +1677,6 @@ pub fn encoded_ty(tcx: ty::ctxt, t: ty::t) -> ~str {
diag: tcx.diag,
ds: def_to_str,
tcx: tcx,
reachable: |_id| false,
abbrevs: tyencode::ac_no_abbrevs};
do io::with_str_writer |wr| {
tyencode::enc_ty(wr, cx, t);

View file

@ -261,7 +261,9 @@ fn parse_opt<T>(st: &mut PState, f: &fn(&mut PState) -> T) -> Option<T> {
fn parse_str(st: &mut PState, term: char) -> ~str {
let mut result = ~"";
while peek(st) != term {
result += str::from_byte(next_byte(st));
unsafe {
str::raw::push_byte(&mut result, next_byte(st));
}
}
next(st);
return result;
@ -554,13 +556,13 @@ fn parse_bounds(st: &mut PState, conv: conv_did) -> ty::ParamBounds {
loop {
match next(st) {
'S' => {
param_bounds.builtin_bounds.add(ty::BoundOwned);
param_bounds.builtin_bounds.add(ty::BoundSend);
}
'C' => {
param_bounds.builtin_bounds.add(ty::BoundCopy);
}
'K' => {
param_bounds.builtin_bounds.add(ty::BoundConst);
param_bounds.builtin_bounds.add(ty::BoundFreeze);
}
'O' => {
param_bounds.builtin_bounds.add(ty::BoundStatic);

View file

@ -31,7 +31,6 @@ pub struct ctxt {
ds: @fn(def_id) -> ~str,
// The type context.
tcx: ty::ctxt,
reachable: @fn(node_id) -> bool,
abbrevs: abbrev_ctxt
}
@ -401,9 +400,9 @@ fn enc_fn_sig(w: @io::Writer, cx: @ctxt, fsig: &ty::FnSig) {
fn enc_bounds(w: @io::Writer, cx: @ctxt, bs: &ty::ParamBounds) {
for bs.builtin_bounds.each |bound| {
match bound {
ty::BoundOwned => w.write_char('S'),
ty::BoundSend => w.write_char('S'),
ty::BoundCopy => w.write_char('C'),
ty::BoundConst => w.write_char('K'),
ty::BoundFreeze => w.write_char('K'),
ty::BoundStatic => w.write_char('O'),
ty::BoundSized => w.write_char('Z'),
}

View file

@ -368,14 +368,17 @@ impl tr for ast::def {
ast::def_static_method(did.tr(xcx),
did2_opt.map(|did2| did2.tr(xcx)),
p)
},
ast::def_self_ty(nid) => ast::def_self_ty(xcx.tr_id(nid)),
ast::def_self(nid, i) => ast::def_self(xcx.tr_id(nid), i),
ast::def_mod(did) => ast::def_mod(did.tr(xcx)),
ast::def_foreign_mod(did) => ast::def_foreign_mod(did.tr(xcx)),
ast::def_static(did, m) => ast::def_static(did.tr(xcx), m),
ast::def_arg(nid, b) => ast::def_arg(xcx.tr_id(nid), b),
ast::def_local(nid, b) => ast::def_local(xcx.tr_id(nid), b),
}
ast::def_method(did0, did1) => {
ast::def_method(did0.tr(xcx), did1.map(|did1| did1.tr(xcx)))
}
ast::def_self_ty(nid) => { ast::def_self_ty(xcx.tr_id(nid)) }
ast::def_self(nid, i) => { ast::def_self(xcx.tr_id(nid), i) }
ast::def_mod(did) => { ast::def_mod(did.tr(xcx)) }
ast::def_foreign_mod(did) => { ast::def_foreign_mod(did.tr(xcx)) }
ast::def_static(did, m) => { ast::def_static(did.tr(xcx), m) }
ast::def_arg(nid, b) => { ast::def_arg(xcx.tr_id(nid), b) }
ast::def_local(nid, b) => { ast::def_local(xcx.tr_id(nid), b) }
ast::def_variant(e_did, v_did) => {
ast::def_variant(e_did.tr(xcx), v_did.tr(xcx))
},
@ -692,12 +695,12 @@ trait get_ty_str_ctxt {
impl<'self> get_ty_str_ctxt for e::EncodeContext<'self> {
fn ty_str_ctxt(&self) -> @tyencode::ctxt {
let r = self.reachable;
@tyencode::ctxt {diag: self.tcx.sess.diagnostic(),
ds: e::def_to_str,
tcx: self.tcx,
reachable: |a| r.contains(&a),
abbrevs: tyencode::ac_use_abbrevs(self.type_abbrevs)}
@tyencode::ctxt {
diag: self.tcx.sess.diagnostic(),
ds: e::def_to_str,
tcx: self.tcx,
abbrevs: tyencode::ac_use_abbrevs(self.type_abbrevs)
}
}
}

View file

@ -359,7 +359,7 @@ of its owner:
LIFETIME(LV.f, LT, MQ) // L-Field
LIFETIME(LV, LT, MQ)
LIFETIME(*LV, LT, MQ) // L-Deref-Owned
LIFETIME(*LV, LT, MQ) // L-Deref-Send
TYPE(LV) = ~Ty
LIFETIME(LV, LT, MQ)
@ -504,7 +504,7 @@ must prevent the owned pointer `LV` from being mutated, which means
that we always add `MUTATE` and `CLAIM` to the restriction set imposed
on `LV`:
RESTRICTIONS(*LV, ACTIONS) = RS, (*LV, ACTIONS) // R-Deref-Owned-Pointer
RESTRICTIONS(*LV, ACTIONS) = RS, (*LV, ACTIONS) // R-Deref-Send-Pointer
TYPE(LV) = ~Ty
RESTRICTIONS(LV, ACTIONS|MUTATE|CLAIM) = RS
@ -539,14 +539,14 @@ mutable borrowed pointers.
### Restrictions for loans of const aliasable pointees
Const pointers are read-only. There may be `&mut` or `&` aliases, and
Freeze pointers are read-only. There may be `&mut` or `&` aliases, and
we can not prevent *anything* but moves in that case. So the
`RESTRICTIONS` function is only defined if `ACTIONS` is the empty set.
Because moves from a `&const` or `@const` lvalue are never legal, it
is not necessary to add any restrictions at all to the final
result.
RESTRICTIONS(*LV, []) = [] // R-Deref-Const-Borrowed
RESTRICTIONS(*LV, []) = [] // R-Deref-Freeze-Borrowed
TYPE(LV) = &const Ty or @const Ty
### Restrictions for loans of mutable borrowed pointees

View file

@ -109,7 +109,7 @@ impl GuaranteeLifetimeContext {
}
mc::cat_downcast(base) |
mc::cat_deref(base, _, mc::uniq_ptr(*)) | // L-Deref-Owned
mc::cat_deref(base, _, mc::uniq_ptr(*)) | // L-Deref-Send
mc::cat_interior(base, _) => { // L-Field
self.check(base, discr_scope)
}

View file

@ -103,7 +103,7 @@ impl RestrictionsContext {
}
mc::cat_deref(cmt_base, _, mc::uniq_ptr(*)) => {
// R-Deref-Owned-Pointer
// R-Deref-Send-Pointer
//
// When we borrow the interior of an owned pointer, we
// cannot permit the base to be mutated, because that
@ -125,7 +125,7 @@ impl RestrictionsContext {
mc::cat_deref(_, _, mc::region_ptr(m_const, _)) |
mc::cat_deref(_, _, mc::gc_ptr(m_const)) => {
// R-Deref-Const-Borrowed
// R-Deref-Freeze-Borrowed
self.check_no_mutability_control(cmt, restrictions);
Safe
}

View file

@ -31,21 +31,21 @@ use syntax::{visit, ast_util};
//
// send: Things that can be sent on channels or included in spawned closures.
// copy: Things that can be copied.
// const: Things thare are deeply immutable. They are guaranteed never to
// freeze: Things thare are deeply immutable. They are guaranteed never to
// change, and can be safely shared without copying between tasks.
// owned: Things that do not contain borrowed pointers.
// 'static: Things that do not contain borrowed pointers.
//
// Send includes scalar types as well as classes and unique types containing
// only sendable types.
//
// Copy includes boxes, closure and unique types containing copyable types.
//
// Const include scalar types, things without non-const fields, and pointers
// to const things.
// Freeze include scalar types, things without non-const fields, and pointers
// to freezable things.
//
// This pass ensures that type parameters are only instantiated with types
// whose kinds are equal or less general than the way the type parameter was
// annotated (with the `send`, `copy` or `const` keyword).
// annotated (with the `Send`, `Copy` or `Freeze` bound).
//
// It also verifies that noncopyable kinds are not copied. Sendability is not
// applied, since none of our language primitives send. Instead, the sending
@ -90,10 +90,10 @@ fn check_struct_safe_for_destructor(cx: Context,
self_ty: None,
tps: ~[]
});
if !ty::type_is_owned(cx.tcx, struct_ty) {
if !ty::type_is_sendable(cx.tcx, struct_ty) {
cx.tcx.sess.span_err(span,
"cannot implement a destructor on a struct \
that is not Owned");
"cannot implement a destructor on a \
structure that does not satisfy Send");
cx.tcx.sess.span_note(span,
"use \"#[unsafe_destructor]\" on the \
implementation to force the compiler to \
@ -101,7 +101,7 @@ fn check_struct_safe_for_destructor(cx: Context,
}
} else {
cx.tcx.sess.span_err(span,
"cannot implement a destructor on a struct \
"cannot implement a destructor on a structure \
with type parameters");
cx.tcx.sess.span_note(span,
"use \"#[unsafe_destructor]\" on the \
@ -438,10 +438,10 @@ fn check_copy(cx: Context, ty: ty::t, sp: span, reason: &str) {
}
}
pub fn check_owned(cx: Context, ty: ty::t, sp: span) -> bool {
if !ty::type_is_owned(cx.tcx, ty) {
pub fn check_send(cx: Context, ty: ty::t, sp: span) -> bool {
if !ty::type_is_sendable(cx.tcx, ty) {
cx.tcx.sess.span_err(
sp, fmt!("value has non-owned type `%s`",
sp, fmt!("value has non-sendable type `%s`",
ty_to_str(cx.tcx, ty)));
false
} else {
@ -489,7 +489,7 @@ pub fn check_durable(tcx: ty::ctxt, ty: ty::t, sp: span) -> bool {
/// `deque<T>`, then whatever borrowed ptrs may appear in `T` also
/// appear in `deque<T>`.
///
/// (3) The type parameter is owned (and therefore does not contain
/// (3) The type parameter is sendable (and therefore does not contain
/// borrowed ptrs).
///
/// FIXME(#5723)---This code should probably move into regionck.
@ -528,7 +528,7 @@ pub fn check_cast_for_escaping_regions(
}
// Assuming the trait instance can escape, then ensure that each parameter
// either appears in the trait type or is owned.
// either appears in the trait type or is sendable.
let target_params = ty::param_tys_in_type(target_ty);
let source_ty = ty::expr_ty(cx.tcx, source);
ty::walk_regions_and_ty(
@ -574,3 +574,4 @@ pub fn check_cast_for_escaping_regions(
cx.tcx.region_maps.is_subregion_of(r_sub, r_sup)
}
}

View file

@ -13,9 +13,9 @@
// Language items are items that represent concepts intrinsic to the language
// itself. Examples are:
//
// * Traits that specify "kinds"; e.g. "const", "copy", "owned".
// * Traits that specify "kinds"; e.g. "Freeze", "Copy", "Send".
//
// * Traits that represent operators; e.g. "add", "sub", "index".
// * Traits that represent operators; e.g. "Add", "Sub", "Index".
//
// * Functions called by the compiler itself.
@ -33,9 +33,9 @@ use syntax::visit::visit_crate;
use core::hashmap::HashMap;
pub enum LangItem {
ConstTraitLangItem, // 0
FreezeTraitLangItem, // 0
CopyTraitLangItem, // 1
OwnedTraitLangItem, // 2
SendTraitLangItem, // 2
SizedTraitLangItem, // 3
DropTraitLangItem, // 4
@ -99,9 +99,9 @@ impl LanguageItems {
pub fn item_name(index: uint) -> &'static str {
match index {
0 => "const",
0 => "freeze",
1 => "copy",
2 => "owned",
2 => "send",
3 => "sized",
4 => "drop",
@ -152,14 +152,14 @@ impl LanguageItems {
// FIXME #4621: Method macros sure would be nice here.
pub fn const_trait(&const self) -> def_id {
self.items[ConstTraitLangItem as uint].get()
pub fn freeze_trait(&const self) -> def_id {
self.items[FreezeTraitLangItem as uint].get()
}
pub fn copy_trait(&const self) -> def_id {
self.items[CopyTraitLangItem as uint].get()
}
pub fn owned_trait(&const self) -> def_id {
self.items[OwnedTraitLangItem as uint].get()
pub fn send_trait(&const self) -> def_id {
self.items[SendTraitLangItem as uint].get()
}
pub fn sized_trait(&const self) -> def_id {
self.items[SizedTraitLangItem as uint].get()
@ -291,13 +291,13 @@ struct LanguageItemCollector<'self> {
}
impl<'self> LanguageItemCollector<'self> {
pub fn new<'a>(crate: &'a crate, session: Session) -> LanguageItemCollector<'a> {
pub fn new<'a>(crate: &'a crate, session: Session)
-> LanguageItemCollector<'a> {
let mut item_refs = HashMap::new();
item_refs.insert(@"const", ConstTraitLangItem as uint);
item_refs.insert(@"freeze", FreezeTraitLangItem as uint);
item_refs.insert(@"copy", CopyTraitLangItem as uint);
item_refs.insert(@"owned", OwnedTraitLangItem as uint);
item_refs.insert(@"send", SendTraitLangItem as uint);
item_refs.insert(@"sized", SizedTraitLangItem as uint);
item_refs.insert(@"drop", DropTraitLangItem as uint);

View file

@ -452,7 +452,7 @@ impl mem_categorization_ctxt {
ast::def_trait(_) | ast::def_ty(_) | ast::def_prim_ty(_) |
ast::def_ty_param(*) | ast::def_struct(*) |
ast::def_typaram_binder(*) | ast::def_region(_) |
ast::def_label(_) | ast::def_self_ty(*) => {
ast::def_label(_) | ast::def_self_ty(*) | ast::def_method(*) => {
@cmt_ {
id:id,
span:span,

View file

@ -0,0 +1,438 @@
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Finds items that are externally reachable, to determine which items
// need to have their metadata (and possibly their AST) serialized.
// All items that can be referred to through an exported name are
// reachable, and when a reachable thing is inline or generic, it
// makes all other generics or inline functions that it references
// reachable as well.
use core::prelude::*;
use core::iterator::IteratorUtil;
use middle::resolve;
use middle::ty;
use middle::typeck;
use core::hashmap::HashSet;
use syntax::ast::*;
use syntax::ast;
use syntax::ast_map;
use syntax::ast_util::def_id_of_def;
use syntax::attr;
use syntax::codemap;
use syntax::parse::token;
use syntax::visit::Visitor;
use syntax::visit;
// Returns true if the given set of attributes contains the `#[inline]`
// attribute.
fn attributes_specify_inlining(attrs: &[attribute]) -> bool {
attr::attrs_contains_name(attrs, "inline")
}
// Returns true if the given set of generics implies that the item it's
// associated with must be inlined.
fn generics_require_inlining(generics: &Generics) -> bool {
!generics.ty_params.is_empty()
}
// Returns true if the given item must be inlined because it may be
// monomorphized or it was marked with `#[inline]`. This will only return
// true for functions.
fn item_might_be_inlined(item: @item) -> bool {
if attributes_specify_inlining(item.attrs) {
return true
}
match item.node {
item_fn(_, _, _, ref generics, _) => {
generics_require_inlining(generics)
}
_ => false,
}
}
// Returns true if the given type method must be inlined because it may be
// monomorphized or it was marked with `#[inline]`.
fn ty_method_might_be_inlined(ty_method: &ty_method) -> bool {
attributes_specify_inlining(ty_method.attrs) ||
generics_require_inlining(&ty_method.generics)
}
// Returns true if the given trait method must be inlined because it may be
// monomorphized or it was marked with `#[inline]`.
fn trait_method_might_be_inlined(trait_method: &trait_method) -> bool {
match *trait_method {
required(ref ty_method) => ty_method_might_be_inlined(ty_method),
provided(_) => true
}
}
// The context we're in. If we're in a public context, then public symbols are
// marked reachable. If we're in a private context, then only trait
// implementations are marked reachable.
#[deriving(Eq)]
enum PrivacyContext {
PublicContext,
PrivateContext,
}
// Information needed while computing reachability.
struct ReachableContext {
// The type context.
tcx: ty::ctxt,
// The method map, which links node IDs of method call expressions to the
// methods they've been resolved to.
method_map: typeck::method_map,
// The set of items which must be exported in the linkage sense.
reachable_symbols: @mut HashSet<node_id>,
// A worklist of item IDs. Each item ID in this worklist will be inlined
// and will be scanned for further references.
worklist: @mut ~[node_id],
}
impl ReachableContext {
// Creates a new reachability computation context.
fn new(tcx: ty::ctxt, method_map: typeck::method_map)
-> ReachableContext {
ReachableContext {
tcx: tcx,
method_map: method_map,
reachable_symbols: @mut HashSet::new(),
worklist: @mut ~[],
}
}
// Step 1: Mark all public symbols, and add all public symbols that might
// be inlined to a worklist.
fn mark_public_symbols(&self, crate: @crate) {
let reachable_symbols = self.reachable_symbols;
let worklist = self.worklist;
let visitor = visit::mk_vt(@Visitor {
visit_item: |item, (privacy_context, visitor):
(PrivacyContext, visit::vt<PrivacyContext>)| {
match item.node {
item_fn(*) => {
if privacy_context == PublicContext {
reachable_symbols.insert(item.id);
}
if item_might_be_inlined(item) {
worklist.push(item.id)
}
}
item_struct(ref struct_def, _) => {
match struct_def.ctor_id {
Some(ctor_id) if
privacy_context == PublicContext => {
reachable_symbols.insert(ctor_id);
}
Some(_) | None => {}
}
}
item_enum(ref enum_def, _) => {
if privacy_context == PublicContext {
for enum_def.variants.iter().advance |variant| {
reachable_symbols.insert(variant.node.id);
}
}
}
item_impl(ref generics, trait_ref, _, ref methods) => {
// XXX(pcwalton): We conservatively assume any methods
// on a trait implementation are reachable, when this
// is not the case. We could be more precise by only
// treating implementations of reachable or cross-
// crate traits as reachable.
let should_be_considered_public = |method: @method| {
(method.vis == public &&
privacy_context == PublicContext) ||
trait_ref.is_some()
};
// Mark all public methods as reachable.
for methods.iter().advance |&method| {
if should_be_considered_public(method) {
reachable_symbols.insert(method.id);
}
}
if generics_require_inlining(generics) {
// If the impl itself has generics, add all public
// symbols to the worklist.
for methods.iter().advance |&method| {
if should_be_considered_public(method) {
worklist.push(method.id)
}
}
} else {
// Otherwise, add only public methods that have
// generics to the worklist.
for methods.iter().advance |method| {
let generics = &method.generics;
let attrs = &method.attrs;
if generics_require_inlining(generics) ||
attributes_specify_inlining(*attrs) ||
should_be_considered_public(*method) {
worklist.push(method.id)
}
}
}
}
item_trait(_, _, ref trait_methods) => {
// Mark all provided methods as reachable.
if privacy_context == PublicContext {
for trait_methods.iter().advance |trait_method| {
match *trait_method {
provided(method) => {
reachable_symbols.insert(method.id);
worklist.push(method.id)
}
required(_) => {}
}
}
}
}
_ => {}
}
if item.vis == public && privacy_context == PublicContext {
visit::visit_item(item, (PublicContext, visitor))
} else {
visit::visit_item(item, (PrivateContext, visitor))
}
},
.. *visit::default_visitor()
});
visit::visit_crate(crate, (PublicContext, visitor))
}
// Returns true if the given def ID represents a local item that is
// eligible for inlining and false otherwise.
fn def_id_represents_local_inlined_item(tcx: ty::ctxt, def_id: def_id)
-> bool {
if def_id.crate != local_crate {
return false
}
let node_id = def_id.node;
match tcx.items.find(&node_id) {
Some(&ast_map::node_item(item, _)) => {
match item.node {
item_fn(*) => item_might_be_inlined(item),
_ => false,
}
}
Some(&ast_map::node_trait_method(trait_method, _, _)) => {
match *trait_method {
required(_) => false,
provided(_) => true,
}
}
Some(&ast_map::node_method(method, impl_did, _)) => {
if generics_require_inlining(&method.generics) ||
attributes_specify_inlining(method.attrs) {
true
} else {
// Check the impl. If the generics on the self type of the
// impl require inlining, this method does too.
assert!(impl_did.crate == local_crate);
match tcx.items.find(&impl_did.node) {
Some(&ast_map::node_item(item, _)) => {
match item.node {
item_impl(ref generics, _, _, _) => {
generics_require_inlining(generics)
}
_ => false
}
}
Some(_) => {
tcx.sess.span_bug(method.span,
"method is not inside an \
impl?!")
}
None => {
tcx.sess.span_bug(method.span,
"the impl that this method is \
supposedly inside of doesn't \
exist in the AST map?!")
}
}
}
}
Some(_) => false,
None => false // This will happen for default methods.
}
}
// Helper function to set up a visitor for `propagate()` below.
fn init_visitor(&self) -> visit::vt<()> {
let (worklist, method_map) = (self.worklist, self.method_map);
let (tcx, reachable_symbols) = (self.tcx, self.reachable_symbols);
visit::mk_vt(@visit::Visitor {
visit_expr: |expr, (_, visitor)| {
match expr.node {
expr_path(_) => {
let def = match tcx.def_map.find(&expr.id) {
Some(&def) => def,
None => {
tcx.sess.span_bug(expr.span,
"def ID not in def map?!")
}
};
let def_id = def_id_of_def(def);
if ReachableContext::
def_id_represents_local_inlined_item(tcx,
def_id) {
worklist.push(def_id.node)
}
reachable_symbols.insert(def_id.node);
}
expr_method_call(*) => {
match method_map.find(&expr.id) {
Some(&typeck::method_map_entry {
origin: typeck::method_static(def_id),
_
}) => {
if ReachableContext::
def_id_represents_local_inlined_item(
tcx,
def_id) {
worklist.push(def_id.node)
}
reachable_symbols.insert(def_id.node);
}
Some(_) => {}
None => {
tcx.sess.span_bug(expr.span,
"method call expression \
not in method map?!")
}
}
}
_ => {}
}
visit::visit_expr(expr, ((), visitor))
},
..*visit::default_visitor()
})
}
// Step 2: Mark all symbols that the symbols on the worklist touch.
fn propagate(&self) {
let visitor = self.init_visitor();
let mut scanned = HashSet::new();
while self.worklist.len() > 0 {
let search_item = self.worklist.pop();
if scanned.contains(&search_item) {
loop
}
scanned.insert(search_item);
self.reachable_symbols.insert(search_item);
// Find the AST block corresponding to the item and visit it,
// marking all path expressions that resolve to something
// interesting.
match self.tcx.items.find(&search_item) {
Some(&ast_map::node_item(item, _)) => {
match item.node {
item_fn(_, _, _, _, ref search_block) => {
visit::visit_block(search_block, ((), visitor))
}
_ => {
self.tcx.sess.span_bug(item.span,
"found non-function item \
in worklist?!")
}
}
}
Some(&ast_map::node_trait_method(trait_method, _, _)) => {
match *trait_method {
required(ref ty_method) => {
self.tcx.sess.span_bug(ty_method.span,
"found required method in \
worklist?!")
}
provided(ref method) => {
visit::visit_block(&method.body, ((), visitor))
}
}
}
Some(&ast_map::node_method(ref method, _, _)) => {
visit::visit_block(&method.body, ((), visitor))
}
Some(_) => {
let ident_interner = token::get_ident_interner();
let desc = ast_map::node_id_to_str(self.tcx.items,
search_item,
ident_interner);
self.tcx.sess.bug(fmt!("found unexpected thingy in \
worklist: %s",
desc))
}
None => {
self.tcx.sess.bug(fmt!("found unmapped ID in worklist: \
%d",
search_item))
}
}
}
}
// Step 3: Mark all destructors as reachable.
//
// XXX(pcwalton): This is a conservative overapproximation, but fixing
// this properly would result in the necessity of computing *type*
// reachability, which might result in a compile time loss.
fn mark_destructors_reachable(&self) {
for self.tcx.destructor_for_type.iter().advance
|(_, destructor_def_id)| {
if destructor_def_id.crate == local_crate {
self.reachable_symbols.insert(destructor_def_id.node);
}
}
}
}
pub fn find_reachable(tcx: ty::ctxt,
method_map: typeck::method_map,
crate: @crate)
-> @mut HashSet<node_id> {
// XXX(pcwalton): We only need to mark symbols that are exported. But this
// is more complicated than just looking at whether the symbol is `pub`,
// because it might be the target of a `pub use` somewhere. For now, I
// think we are fine, because you can't `pub use` something that wasn't
// exported due to the bug whereby `use` only looks through public
// modules even if you're inside the module the `use` appears in. When
// this bug is fixed, however, this code will need to be updated. Probably
// the easiest way to fix this (although a conservative overapproximation)
// is to have the name resolution pass mark all targets of a `pub use` as
// "must be reachable".
let reachable_context = ReachableContext::new(tcx, method_map);
// Step 1: Mark all public symbols, and add all public symbols that might
// be inlined to a worklist.
reachable_context.mark_public_symbols(crate);
// Step 2: Mark all symbols that the symbols on the worklist touch.
reachable_context.propagate();
// Step 3: Mark all destructors as reachable.
reachable_context.mark_destructors_reachable();
// Return the set of reachable symbols.
reachable_context.reachable_symbols
}

View file

@ -652,19 +652,17 @@ impl NameBindings {
match self.type_def {
None => None,
Some(ref type_def) => {
// FIXME (#3784): This is reallllly questionable.
// Perhaps the right thing to do is to merge def_mod
// and def_ty.
match (*type_def).type_def {
Some(type_def) => Some(type_def),
None => {
match (*type_def).module_def {
Some(module_def) => {
let module_def = &mut *module_def;
module_def.def_id.map(|def_id|
def_mod(*def_id))
match type_def.module_def {
Some(module) => {
match module.def_id {
Some(did) => Some(def_mod(did)),
None => None,
}
}
None => None
None => None,
}
}
}
@ -1230,49 +1228,29 @@ impl Resolver {
visit_item(item, (new_parent, visitor));
}
item_impl(_, trait_ref_opt, ty, ref methods) => {
// If this implements an anonymous trait and it has static
// methods, then add all the static methods within to a new
// module, if the type was defined within this module.
item_impl(_, None, ty, ref methods) => {
// If this implements an anonymous trait, then add all the
// methods within to a new module, if the type was defined
// within this module.
//
// FIXME (#3785): This is quite unsatisfactory. Perhaps we
// should modify anonymous traits to only be implementable in
// the same module that declared the type.
// Bail out early if there are no static methods.
let mut methods_seen = HashMap::new();
let mut has_static_methods = false;
for methods.iter().advance |method| {
match method.explicit_self.node {
sty_static => has_static_methods = true,
_ => {
// Make sure you can't define duplicate methods
let ident = method.ident;
let span = method.span;
let old_sp = methods_seen.find_or_insert(ident, span);
if *old_sp != span {
self.session.span_err(span,
fmt!("duplicate definition of method `%s`",
self.session.str_of(ident)));
self.session.span_note(*old_sp,
fmt!("first definition of method `%s` here",
self.session.str_of(ident)));
}
}
}
}
// If there are static methods, then create the module
// and add them.
match (trait_ref_opt, ty) {
(None, @Ty { node: ty_path(path, _, _), _ }) if
has_static_methods && path.idents.len() == 1 => {
// Create the module and add all methods.
match *ty {
Ty {
node: ty_path(path, _, _),
_
} if path.idents.len() == 1 => {
let name = path_to_ident(path);
let new_parent = match parent.children.find(&name) {
// It already exists
Some(&child) if child.get_module_if_available().is_some() &&
child.get_module().kind == ImplModuleKind => {
Some(&child) if child.get_module_if_available()
.is_some() &&
child.get_module().kind ==
ImplModuleKind => {
ModuleReducedGraphParent(child.get_module())
}
// Create the module
@ -1283,8 +1261,8 @@ impl Resolver {
ForbidDuplicateModules,
sp);
let parent_link = self.get_parent_link(new_parent,
ident);
let parent_link =
self.get_parent_link(new_parent, ident);
let def_id = local_def(item.id);
name_bindings.define_module(Public,
parent_link,
@ -1292,30 +1270,36 @@ impl Resolver {
ImplModuleKind,
sp);
ModuleReducedGraphParent(name_bindings.get_module())
ModuleReducedGraphParent(
name_bindings.get_module())
}
};
// For each static method...
// For each method...
for methods.iter().advance |method| {
match method.explicit_self.node {
// Add the method to the module.
let ident = method.ident;
let (method_name_bindings, _) =
self.add_child(ident,
new_parent,
ForbidDuplicateValues,
method.span);
let def = match method.explicit_self.node {
sty_static => {
// Add the static method to the
// module.
let ident = method.ident;
let (method_name_bindings, _) =
self.add_child(
ident,
new_parent,
ForbidDuplicateValues,
method.span);
let def = def_fn(local_def(method.id),
method.purity);
method_name_bindings.define_value(
Public, def, method.span);
// Static methods become `def_fn`s.
def_fn(local_def(method.id),
method.purity)
}
_ => {}
}
_ => {
// Non-static methods become
// `def_method`s.
def_method(local_def(method.id), None)
}
};
method_name_bindings.define_value(Public,
def,
method.span);
}
}
_ => {}
@ -1324,41 +1308,23 @@ impl Resolver {
visit_item(item, (parent, visitor));
}
item_impl(_, Some(_), ty, ref methods) => {
visit_item(item, (parent, visitor));
}
item_trait(_, _, ref methods) => {
let (name_bindings, new_parent) =
self.add_child(ident, parent, ForbidDuplicateTypes, sp);
// If the trait has static methods, then add all the static
// methods within to a new module.
//
// We only need to create the module if the trait has static
// methods, so check that first.
let mut has_static_methods = false;
for (*methods).iter().advance |method| {
let ty_m = trait_method_to_ty_method(method);
match ty_m.explicit_self.node {
sty_static => {
has_static_methods = true;
break;
}
_ => {}
}
}
// Create the module if necessary.
let module_parent_opt;
if has_static_methods {
let parent_link = self.get_parent_link(parent, ident);
name_bindings.define_module(privacy,
parent_link,
Some(local_def(item.id)),
TraitModuleKind,
sp);
module_parent_opt = Some(ModuleReducedGraphParent(
name_bindings.get_module()));
} else {
module_parent_opt = None;
}
// Add all the methods within to a new module.
let parent_link = self.get_parent_link(parent, ident);
name_bindings.define_module(privacy,
parent_link,
Some(local_def(item.id)),
TraitModuleKind,
sp);
let module_parent = ModuleReducedGraphParent(name_bindings.
get_module());
// Add the names of all the methods to the trait info.
let mut method_names = HashMap::new();
@ -1366,35 +1332,34 @@ impl Resolver {
let ty_m = trait_method_to_ty_method(method);
let ident = ty_m.ident;
// Add it to the trait info if not static,
// add it as a name in the trait module otherwise.
match ty_m.explicit_self.node {
sty_static => {
let def = def_static_method(
local_def(ty_m.id),
Some(local_def(item.id)),
ty_m.purity);
let (method_name_bindings, _) =
self.add_child(ident,
module_parent_opt.get(),
ForbidDuplicateValues,
ty_m.span);
method_name_bindings.define_value(Public,
def,
ty_m.span);
// Add it as a name in the trait module.
let def = match ty_m.explicit_self.node {
sty_static => {
// Static methods become `def_static_method`s.
def_static_method(local_def(ty_m.id),
Some(local_def(item.id)),
ty_m.purity)
}
_ => {
// Make sure you can't define duplicate methods
let old_sp = method_names.find_or_insert(ident, ty_m.span);
if *old_sp != ty_m.span {
self.session.span_err(ty_m.span,
fmt!("duplicate definition of method `%s`",
self.session.str_of(ident)));
self.session.span_note(*old_sp,
fmt!("first definition of method `%s` here",
self.session.str_of(ident)));
}
// Non-static methods become `def_method`s.
def_method(local_def(ty_m.id),
Some(local_def(item.id)))
}
};
let (method_name_bindings, _) =
self.add_child(ident,
module_parent,
ForbidDuplicateValues,
ty_m.span);
method_name_bindings.define_value(Public, def, ty_m.span);
// Add it to the trait info if not static.
match ty_m.explicit_self.node {
sty_static => {}
_ => {
method_names.insert(ident, ());
}
}
}
@ -1751,6 +1716,9 @@ impl Resolver {
child_name_bindings.define_type(privacy, def, dummy_sp());
self.structs.insert(def_id);
}
def_method(*) => {
// Ignored; handled elsewhere.
}
def_self(*) | def_arg(*) | def_local(*) |
def_prim_ty(*) | def_ty_param(*) | def_binding(*) |
def_use(*) | def_upvar(*) | def_region(*) |
@ -2091,8 +2059,12 @@ impl Resolver {
let mut first = true;
let mut result = ~"";
for idents.iter().advance |ident| {
if first { first = false; } else { result += "::" };
result += self.session.str_of(*ident);
if first {
first = false
} else {
result.push_str("::")
}
result.push_str(self.session.str_of(*ident));
};
return result;
}
@ -2387,7 +2359,8 @@ impl Resolver {
}
match type_result {
BoundResult(target_module, name_bindings) => {
debug!("(resolving single import) found type target");
debug!("(resolving single import) found type target: %?",
name_bindings.type_def.get().type_def);
import_resolution.type_target =
Some(Target(target_module, name_bindings));
import_resolution.type_id = directive.id;
@ -3186,12 +3159,14 @@ impl Resolver {
Some(def_id) if def_id.crate == local_crate => {
// OK. Continue.
debug!("(recording exports for module subtree) recording \
exports for local module");
exports for local module `%s`",
self.module_to_str(module_));
}
None => {
// Record exports for the root module.
debug!("(recording exports for module subtree) recording \
exports for root module");
exports for root module `%s`",
self.module_to_str(module_));
}
Some(_) => {
// Bail out.
@ -3265,22 +3240,8 @@ impl Resolver {
pub fn add_exports_for_module(@mut self,
exports2: &mut ~[Export2],
module_: @mut Module) {
for module_.children.iter().advance |(ident, namebindings)| {
debug!("(computing exports) maybe export '%s'",
self.session.str_of(*ident));
self.add_exports_of_namebindings(&mut *exports2,
*ident,
*namebindings,
TypeNS,
false);
self.add_exports_of_namebindings(&mut *exports2,
*ident,
*namebindings,
ValueNS,
false);
}
for module_.import_resolutions.iter().advance |(ident, importresolution)| {
for module_.import_resolutions.iter().advance |(ident,
importresolution)| {
if importresolution.privacy != Public {
debug!("(computing exports) not reexporting private `%s`",
self.session.str_of(*ident));
@ -4514,8 +4475,8 @@ impl Resolver {
if path.global {
return self.resolve_crate_relative_path(path,
self.xray_context,
namespace);
self.xray_context,
namespace);
}
if path.idents.len() > 1 {
@ -4943,6 +4904,22 @@ impl Resolver {
// Write the result into the def map.
debug!("(resolving expr) resolved `%s`",
self.idents_to_str(path.idents));
// First-class methods are not supported yet; error
// out here.
match def {
def_method(*) => {
self.session.span_err(expr.span,
"first-class methods \
are not supported");
self.session.span_note(expr.span,
"call the method \
using the `.` \
syntax");
}
_ => {}
}
self.record_def(expr.id, def);
}
None => {
@ -5072,6 +5049,9 @@ impl Resolver {
self.trait_map.insert(expr.id, @mut traits);
}
expr_method_call(_, _, ident, _, _, _) => {
debug!("(recording candidate traits for expr) recording \
traits for %d",
expr.id);
let traits = self.search_for_traits_containing_method(ident);
self.trait_map.insert(expr.id, @mut traits);
}
@ -5147,7 +5127,6 @@ impl Resolver {
debug!("(searching for traits containing method) looking for '%s'",
self.session.str_of(name));
let mut found_traits = ~[];
let mut search_module = self.current_module;
match self.method_map.find(&name) {
@ -5411,7 +5390,7 @@ pub fn resolve_crate(session: Session,
-> CrateMap {
let resolver = @mut Resolver(session, lang_items, crate);
resolver.resolve();
let Resolver{def_map, export_map2, trait_map, _} = copy *resolver;
let Resolver { def_map, export_map2, trait_map, _ } = copy *resolver;
CrateMap {
def_map: def_map,
exp_map2: export_map2,

View file

@ -98,15 +98,15 @@ pub fn trans_inline_asm(bcx: block, ia: &ast::inline_asm) -> block {
if !ia.clobbers.is_empty() && !clobbers.is_empty() {
clobbers = fmt!("%s,%s", ia.clobbers, clobbers);
} else {
clobbers += ia.clobbers;
clobbers.push_str(ia.clobbers);
};
// Add the clobbers to our constraints list
if !clobbers.is_empty() && !constraints.is_empty() {
constraints += ",";
constraints += clobbers;
if clobbers.len() != 0 && constraints.len() != 0 {
constraints.push_char(',');
constraints.push_str(clobbers);
} else {
constraints += clobbers;
constraints.push_str(clobbers);
}
debug!("Asm Constraints: %?", constraints);

View file

@ -54,7 +54,6 @@ use middle::trans::machine;
use middle::trans::machine::{llalign_of_min, llsize_of};
use middle::trans::meth;
use middle::trans::monomorphize;
use middle::trans::reachable;
use middle::trans::tvec;
use middle::trans::type_of;
use middle::trans::type_of::*;
@ -65,7 +64,7 @@ use util::ppaux::{Repr, ty_to_str};
use middle::trans::type_::Type;
use core::hash;
use core::hashmap::{HashMap};
use core::hashmap::{HashMap, HashSet};
use core::int;
use core::io;
use core::libc::c_uint;
@ -2437,7 +2436,6 @@ pub fn get_item_val(ccx: @mut CrateContext, id: ast::node_id) -> ValueRef {
}
}
ast_map::node_method(m, _, pth) => {
exprt = true;
register_method(ccx, id, pth, m)
}
ast_map::node_foreign_item(ni, _, _, pth) => {
@ -2511,7 +2509,7 @@ pub fn get_item_val(ccx: @mut CrateContext, id: ast::node_id) -> ValueRef {
variant))
}
};
if !(exprt || ccx.reachable.contains(&id)) {
if !exprt && !ccx.reachable.contains(&id) {
lib::llvm::SetLinkage(val, lib::llvm::InternalLinkage);
}
ccx.item_vals.insert(id, val);
@ -2818,13 +2816,13 @@ pub fn crate_ctxt_to_encode_parms<'r>(cx: &'r CrateContext, ie: encoder::encode_
encoder::EncodeParams {
diag: diag,
tcx: cx.tcx,
reachable: cx.reachable,
reexports2: cx.exp_map2,
item_symbols: item_symbols,
discrim_symbols: discrim_symbols,
link_meta: link_meta,
cstore: cx.sess.cstore,
encode_inlined_item: ie
encode_inlined_item: ie,
reachable: cx.reachable,
}
}
@ -2890,16 +2888,12 @@ pub fn trans_crate(sess: session::Session,
tcx: ty::ctxt,
output: &Path,
emap2: resolve::ExportMap2,
maps: astencode::Maps) -> (ContextRef, ModuleRef, LinkMeta) {
reachable_map: @mut HashSet<ast::node_id>,
maps: astencode::Maps)
-> (ContextRef, ModuleRef, LinkMeta) {
let mut symbol_hasher = hash::default_state();
let link_meta = link::build_link_meta(sess, crate, output, &mut symbol_hasher);
let reachable = reachable::find_reachable(
&crate.node.module,
emap2,
tcx,
maps.method_map
);
// Append ".rc" to crate name as LLVM module identifier.
//
@ -2917,8 +2911,15 @@ pub fn trans_crate(sess: session::Session,
// sess.bug("couldn't enable multi-threaded LLVM");
// }
let ccx = @mut CrateContext::new(sess, llmod_id, tcx, emap2, maps,
symbol_hasher, link_meta, reachable);
let ccx = @mut CrateContext::new(sess,
llmod_id,
tcx,
emap2,
maps,
symbol_hasher,
link_meta,
reachable_map);
{
let _icx = push_ctxt("data");
trans_constants(ccx, crate);

View file

@ -68,13 +68,13 @@ pub fn count_insn(cx: block, category: &str) {
i = 0u;
while i < len {
i = *mm.get(&v[i]);
s += "/";
s += v[i];
s.push_char('/');
s.push_str(v[i]);
i += 1u;
}
s += "/";
s += category;
s.push_char('/');
s.push_str(category);
let n = match h.find(&s) {
Some(&n) => n,
@ -610,12 +610,21 @@ pub fn GEP(cx: block, Pointer: ValueRef, Indices: &[ValueRef]) -> ValueRef {
// Simple wrapper around GEP that takes an array of ints and wraps them
// in C_i32()
//
// FIXME #6571: Use a small-vector optimization to avoid allocations here.
#[inline]
pub fn GEPi(cx: block, base: ValueRef, ixs: &[uint]) -> ValueRef {
let v = do vec::map(ixs) |i| { C_i32(*i as i32) };
count_insn(cx, "gepi");
return InBoundsGEP(cx, base, v);
// Small vector optimization. This should catch 100% of the cases that
// we care about.
if ixs.len() < 16 {
let mut small_vec = [ C_i32(0), ..16 ];
for ixs.iter().enumerate().advance |(i, &ix)| {
small_vec[i] = C_i32(ix as i32)
}
InBoundsGEP(cx, base, small_vec.slice(0, ixs.len()))
} else {
let v = do vec::map(ixs) |i| { C_i32(*i as i32) };
count_insn(cx, "gepi");
InBoundsGEP(cx, base, v)
}
}
pub fn InBoundsGEP(cx: block, Pointer: ValueRef, Indices: &[ValueRef]) -> ValueRef {

View file

@ -139,12 +139,14 @@ impl ABIInfo for ARM_ABIInfo {
attrs.push(attr);
}
let mut (ret_ty, ret_attr) = if ret_def {
let (ret_ty, ret_attr) = if ret_def {
classify_ret_ty(rty)
} else {
(LLVMType { cast: false, ty: Type::void() }, None)
};
let mut ret_ty = ret_ty;
let sret = ret_attr.is_some();
if sret {
arg_tys.unshift(ret_ty);

View file

@ -178,12 +178,14 @@ impl ABIInfo for MIPS_ABIInfo {
atys: &[Type],
rty: Type,
ret_def: bool) -> FnType {
let mut (ret_ty, ret_attr) = if ret_def {
let (ret_ty, ret_attr) = if ret_def {
classify_ret_ty(rty)
} else {
(LLVMType { cast: false, ty: Type::void() }, None)
};
let mut ret_ty = ret_ty;
let sret = ret_attr.is_some();
let mut arg_tys = ~[];
let mut attrs = ~[];

View file

@ -39,7 +39,11 @@ enum RegClass {
Memory
}
impl Type {
trait TypeMethods {
fn is_reg_ty(&self) -> bool;
}
impl TypeMethods for Type {
fn is_reg_ty(&self) -> bool {
match self.kind() {
Integer | Pointer | Float | Double => true,
@ -360,8 +364,9 @@ fn x86_64_tys(atys: &[Type],
arg_tys.push(ty);
attrs.push(attr);
}
let mut (ret_ty, ret_attr) = x86_64_ty(rty, |cls| cls.is_ret_bysret(),
let (ret_ty, ret_attr) = x86_64_ty(rty, |cls| cls.is_ret_bysret(),
StructRetAttribute);
let mut ret_ty = ret_ty;
let sret = ret_attr.is_some();
if sret {
arg_tys = vec::append(~[ret_ty], arg_tys);

View file

@ -146,7 +146,7 @@ pub fn trans(bcx: block, expr: @ast::expr) -> Callee {
ast::def_static(*) | ast::def_ty(*) | ast::def_prim_ty(*) |
ast::def_use(*) | ast::def_typaram_binder(*) |
ast::def_region(*) | ast::def_label(*) | ast::def_ty_param(*) |
ast::def_self_ty(*) => {
ast::def_self_ty(*) | ast::def_method(*) => {
bcx.tcx().sess.span_bug(
ref_expr.span,
fmt!("Cannot translate def %? \
@ -319,9 +319,10 @@ pub fn trans_fn_ref_with_vtables(
// Should be either intra-crate or inlined.
assert_eq!(def_id.crate, ast::local_crate);
let mut (val, must_cast) =
let (val, must_cast) =
monomorphize::monomorphic_fn(ccx, def_id, &substs,
vtables, opt_impl_did, Some(ref_id));
let mut val = val;
if must_cast && ref_id != 0 {
// Monotype of the REFERENCE to the function (type params
// are subst'd)

View file

@ -964,9 +964,12 @@ pub fn path_str(sess: session::Session, p: &[path_elt]) -> ~str {
for p.iter().advance |e| {
match *e {
ast_map::path_name(s) | ast_map::path_mod(s) => {
if first { first = false; }
else { r += "::"; }
r += sess.str_of(s);
if first {
first = false
} else {
r.push_str("::")
}
r.push_str(sess.str_of(s));
}
}
}

View file

@ -21,7 +21,6 @@ use middle::resolve;
use middle::trans::adt;
use middle::trans::base;
use middle::trans::debuginfo;
use middle::trans::reachable;
use middle::trans::type_use;
use middle::ty;
@ -48,7 +47,7 @@ pub struct CrateContext {
intrinsics: HashMap<&'static str, ValueRef>,
item_vals: HashMap<ast::node_id, ValueRef>,
exp_map2: resolve::ExportMap2,
reachable: reachable::map,
reachable: @mut HashSet<ast::node_id>,
item_symbols: HashMap<ast::node_id, ~str>,
link_meta: LinkMeta,
enum_sizes: HashMap<ty::t, uint>,
@ -115,10 +114,15 @@ pub struct CrateContext {
}
impl CrateContext {
pub fn new(sess: session::Session, name: &str, tcx: ty::ctxt,
emap2: resolve::ExportMap2, maps: astencode::Maps,
symbol_hasher: hash::State, link_meta: LinkMeta,
reachable: reachable::map) -> CrateContext {
pub fn new(sess: session::Session,
name: &str,
tcx: ty::ctxt,
emap2: resolve::ExportMap2,
maps: astencode::Maps,
symbol_hasher: hash::State,
link_meta: LinkMeta,
reachable: @mut HashSet<ast::node_id>)
-> CrateContext {
unsafe {
let llcx = llvm::LLVMContextCreate();
set_task_llcx(llcx);

View file

@ -907,9 +907,12 @@ fn trans_lvalue_unadjusted(bcx: block, expr: @ast::expr) -> DatumBlock {
let scaled_ix = Mul(bcx, ix_val, vt.llunit_size);
base::maybe_name_value(bcx.ccx(), scaled_ix, "scaled_ix");
let mut (bcx, base, len) =
let (bcx, base, len) =
base_datum.get_vec_base_and_len(bcx, index_expr.span,
index_expr.id, 0);
let mut bcx = bcx;
let mut base = base;
let mut len = len;
if ty::type_is_str(base_ty) {
// acccount for null terminator in the case of string

View file

@ -37,7 +37,6 @@ pub mod foreign;
pub mod reflect;
pub mod debuginfo;
pub mod type_use;
pub mod reachable;
pub mod machine;
pub mod adt;
pub mod asm;

View file

@ -1,246 +0,0 @@
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Finds items that are externally reachable, to determine which items
// need to have their metadata (and possibly their AST) serialized.
// All items that can be referred to through an exported name are
// reachable, and when a reachable thing is inline or generic, it
// makes all other generics or inline functions that it references
// reachable as well.
use core::prelude::*;
use middle::resolve;
use middle::ty;
use middle::typeck;
use core::hashmap::HashSet;
use syntax::ast;
use syntax::ast::*;
use syntax::ast_util::def_id_of_def;
use syntax::attr;
use syntax::codemap;
use syntax::print::pprust::expr_to_str;
use syntax::{visit, ast_map};
pub type map = @HashSet<node_id>;
struct ctx<'self> {
exp_map2: resolve::ExportMap2,
tcx: ty::ctxt,
method_map: typeck::method_map,
rmap: &'self mut HashSet<node_id>,
}
pub fn find_reachable(crate_mod: &_mod, exp_map2: resolve::ExportMap2,
tcx: ty::ctxt, method_map: typeck::method_map) -> map {
let mut rmap = HashSet::new();
{
let cx = @mut ctx {
exp_map2: exp_map2,
tcx: tcx,
method_map: method_map,
rmap: &mut rmap
};
traverse_public_mod(cx, ast::crate_node_id, crate_mod);
traverse_all_resources_and_impls(cx, crate_mod);
}
return @rmap;
}
fn traverse_exports(cx: @mut ctx, mod_id: node_id) -> bool {
let mut found_export = false;
match cx.exp_map2.find(&mod_id) {
Some(ref exp2s) => {
for (*exp2s).iter().advance |e2| {
found_export = true;
traverse_def_id(cx, e2.def_id)
};
}
None => ()
}
return found_export;
}
fn traverse_def_id(cx: @mut ctx, did: def_id) {
if did.crate != local_crate { return; }
match cx.tcx.items.find(&did.node) {
None => (), // This can happen for self, for example
Some(&ast_map::node_item(item, _)) => traverse_public_item(cx, item),
Some(&ast_map::node_method(_, impl_id, _)) => traverse_def_id(cx, impl_id),
Some(&ast_map::node_foreign_item(item, _, _, _)) => {
let cx = &mut *cx; // FIXME(#6269) reborrow @mut to &mut
cx.rmap.insert(item.id);
}
Some(&ast_map::node_variant(ref v, _, _)) => {
let cx = &mut *cx; // FIXME(#6269) reborrow @mut to &mut
cx.rmap.insert(v.node.id);
}
_ => ()
}
}
fn traverse_public_mod(cx: @mut ctx, mod_id: node_id, m: &_mod) {
if !traverse_exports(cx, mod_id) {
// No exports, so every local item is exported
for m.items.iter().advance |item| {
traverse_public_item(cx, *item);
}
}
}
fn traverse_public_item(cx: @mut ctx, item: @item) {
{
// FIXME #6021: naming rmap shouldn't be necessary
let cx = &mut *cx;
let rmap: &mut HashSet<node_id> = cx.rmap;
if rmap.contains(&item.id) { return; }
rmap.insert(item.id);
}
match item.node {
item_mod(ref m) => traverse_public_mod(cx, item.id, m),
item_foreign_mod(ref nm) => {
if !traverse_exports(cx, item.id) {
for nm.items.iter().advance |item| {
let cx = &mut *cx; // FIXME(#6269) reborrow @mut to &mut
cx.rmap.insert(item.id);
}
}
}
item_fn(_, _, _, ref generics, ref blk) => {
if generics.ty_params.len() > 0u ||
attr::find_inline_attr(item.attrs) != attr::ia_none {
traverse_inline_body(cx, blk);
}
}
item_impl(ref generics, _, _, ref ms) => {
for ms.iter().advance |m| {
if generics.ty_params.len() > 0u ||
m.generics.ty_params.len() > 0u ||
attr::find_inline_attr(m.attrs) != attr::ia_none
{
{
let cx = &mut *cx; // FIXME(#6269) reborrow @mut to &mut
cx.rmap.insert(m.id);
}
traverse_inline_body(cx, &m.body);
}
}
}
item_struct(ref struct_def, _) => {
for struct_def.ctor_id.iter().advance |&ctor_id| {
let cx = &mut *cx; // FIXME(#6269) reborrow @mut to &mut
cx.rmap.insert(ctor_id);
}
}
item_ty(t, _) => {
traverse_ty(t, (cx,
visit::mk_vt(@visit::Visitor {visit_ty: traverse_ty,
..*visit::default_visitor()})))
}
item_static(*) |
item_enum(*) | item_trait(*) => (),
item_mac(*) => fail!("item macros unimplemented")
}
}
fn traverse_ty<'a>(ty: @Ty, (cx, v): (@mut ctx<'a>, visit::vt<@mut ctx<'a>>)) {
{
let cx = &mut *cx; // FIXME(#6269) reborrow @mut to &mut
if cx.rmap.contains(&ty.id) { return; }
cx.rmap.insert(ty.id);
}
match ty.node {
ty_path(p, _bounds, p_id) => {
match cx.tcx.def_map.find(&p_id) {
// Kind of a hack to check this here, but I'm not sure what else
// to do
Some(&def_prim_ty(_)) => { /* do nothing */ }
Some(&d) => traverse_def_id(cx, def_id_of_def(d)),
None => { /* do nothing -- but should we fail here? */ }
}
for p.types.iter().advance |t| {
(v.visit_ty)(*t, (cx, v));
}
}
_ => visit::visit_ty(ty, (cx, v))
}
}
fn traverse_inline_body(cx: @mut ctx, body: &blk) {
fn traverse_expr<'a>(e: @expr, (cx, v): (@mut ctx<'a>,
visit::vt<@mut ctx<'a>>)) {
match e.node {
expr_path(_) => {
match cx.tcx.def_map.find(&e.id) {
Some(&d) => {
traverse_def_id(cx, def_id_of_def(d));
}
None => cx.tcx.sess.span_bug(
e.span,
fmt!("Unbound node id %? while traversing %s",
e.id,
expr_to_str(e, cx.tcx.sess.intr())))
}
}
expr_method_call(*) => {
match cx.method_map.find(&e.id) {
Some(&typeck::method_map_entry {
origin: typeck::method_static(did),
_
}) => {
traverse_def_id(cx, did);
}
Some(_) => {}
None => {
cx.tcx.sess.span_bug(e.span, "expr_method_call not in \
method map");
}
}
}
_ => ()
}
visit::visit_expr(e, (cx, v));
}
// Don't ignore nested items: for example if a generic fn contains a
// generic impl (as in deque::create), we need to monomorphize the
// impl as well
fn traverse_item(i: @item, (cx, _v): (@mut ctx, visit::vt<@mut ctx>)) {
traverse_public_item(cx, i);
}
visit::visit_block(body, (cx, visit::mk_vt(@visit::Visitor {
visit_expr: traverse_expr,
visit_item: traverse_item,
..*visit::default_visitor()
})));
}
fn traverse_all_resources_and_impls(cx: @mut ctx, crate_mod: &_mod) {
visit::visit_mod(
crate_mod,
codemap::dummy_sp(),
0,
(cx,
visit::mk_vt(@visit::Visitor {
visit_expr: |_e, (_cx, _v)| { },
visit_item: |i, (cx, v)| {
visit::visit_item(i, (cx, v));
match i.node {
item_impl(*) => {
traverse_public_item(cx, i);
}
_ => ()
}
},
..*visit::default_visitor()
})));
}

View file

@ -686,8 +686,8 @@ pub type BuiltinBounds = EnumSet<BuiltinBound>;
pub enum BuiltinBound {
BoundCopy,
BoundStatic,
BoundOwned,
BoundConst,
BoundSend,
BoundFreeze,
BoundSized,
}
@ -699,8 +699,8 @@ pub fn AllBuiltinBounds() -> BuiltinBounds {
let mut set = EnumSet::empty();
set.add(BoundCopy);
set.add(BoundStatic);
set.add(BoundOwned);
set.add(BoundConst);
set.add(BoundSend);
set.add(BoundFreeze);
set.add(BoundSized);
set
}
@ -1838,8 +1838,8 @@ impl TypeContents {
match bb {
BoundCopy => self.is_copy(cx),
BoundStatic => self.is_static(cx),
BoundConst => self.is_const(cx),
BoundOwned => self.is_owned(cx),
BoundFreeze => self.is_freezable(cx),
BoundSend => self.is_sendable(cx),
BoundSized => self.is_sized(cx),
}
}
@ -1865,23 +1865,23 @@ impl TypeContents {
TC_BORROWED_POINTER
}
pub fn is_owned(&self, cx: ctxt) -> bool {
!self.intersects(TypeContents::nonowned(cx))
pub fn is_sendable(&self, cx: ctxt) -> bool {
!self.intersects(TypeContents::nonsendable(cx))
}
pub fn nonowned(_cx: ctxt) -> TypeContents {
TC_MANAGED + TC_BORROWED_POINTER + TC_NON_OWNED
pub fn nonsendable(_cx: ctxt) -> TypeContents {
TC_MANAGED + TC_BORROWED_POINTER + TC_NON_SENDABLE
}
pub fn contains_managed(&self) -> bool {
self.intersects(TC_MANAGED)
}
pub fn is_const(&self, cx: ctxt) -> bool {
!self.intersects(TypeContents::nonconst(cx))
pub fn is_freezable(&self, cx: ctxt) -> bool {
!self.intersects(TypeContents::nonfreezable(cx))
}
pub fn nonconst(_cx: ctxt) -> TypeContents {
pub fn nonfreezable(_cx: ctxt) -> TypeContents {
TC_MUTABLE
}
@ -1908,12 +1908,12 @@ impl TypeContents {
// this assertion.
assert!(self.intersects(TC_OWNED_POINTER));
}
let tc = TC_MANAGED + TC_DTOR + TypeContents::owned(cx);
let tc = TC_MANAGED + TC_DTOR + TypeContents::sendable(cx);
self.intersects(tc)
}
pub fn owned(_cx: ctxt) -> TypeContents {
//! Any kind of owned contents.
pub fn sendable(_cx: ctxt) -> TypeContents {
//! Any kind of sendable contents.
TC_OWNED_POINTER + TC_OWNED_VEC
}
}
@ -1969,8 +1969,8 @@ static TC_ONCE_CLOSURE: TypeContents = TypeContents{bits: 0b0001_0000_0000};
/// An enum with no variants.
static TC_EMPTY_ENUM: TypeContents = TypeContents{bits: 0b0010_0000_0000};
/// Contains a type marked with `#[non_owned]`
static TC_NON_OWNED: TypeContents = TypeContents{bits: 0b0100_0000_0000};
/// Contains a type marked with `#[non_sendable]`
static TC_NON_SENDABLE: TypeContents = TypeContents{bits: 0b0100_0000_0000};
/// Is a bare vector, str, function, trait, etc (only relevant at top level).
static TC_DYNAMIC_SIZE: TypeContents = TypeContents{bits: 0b1000_0000_0000};
@ -1986,12 +1986,12 @@ pub fn type_is_static(cx: ctxt, t: ty::t) -> bool {
type_contents(cx, t).is_static(cx)
}
pub fn type_is_owned(cx: ctxt, t: ty::t) -> bool {
type_contents(cx, t).is_owned(cx)
pub fn type_is_sendable(cx: ctxt, t: ty::t) -> bool {
type_contents(cx, t).is_sendable(cx)
}
pub fn type_is_const(cx: ctxt, t: ty::t) -> bool {
type_contents(cx, t).is_const(cx)
pub fn type_is_freezable(cx: ctxt, t: ty::t) -> bool {
type_contents(cx, t).is_freezable(cx)
}
pub fn type_contents(cx: ctxt, ty: t) -> TypeContents {
@ -2045,7 +2045,7 @@ pub fn type_contents(cx: ctxt, ty: t) -> TypeContents {
let _i = indenter();
let result = match get(ty).sty {
// Scalar and unique types are sendable, constant, and owned
// Scalar and unique types are sendable, freezable, and durable
ty_nil | ty_bot | ty_bool | ty_int(_) | ty_uint(_) | ty_float(_) |
ty_bare_fn(_) | ty_ptr(_) => {
TC_NONE
@ -2060,7 +2060,8 @@ pub fn type_contents(cx: ctxt, ty: t) -> TypeContents {
}
ty_box(mt) => {
TC_MANAGED + statically_sized(nonowned(tc_mt(cx, mt, cache)))
TC_MANAGED +
statically_sized(nonsendable(tc_mt(cx, mt, cache)))
}
ty_trait(_, _, store, mutbl, bounds) => {
@ -2069,7 +2070,7 @@ pub fn type_contents(cx: ctxt, ty: t) -> TypeContents {
ty_rptr(r, mt) => {
borrowed_contents(r, mt.mutbl) +
statically_sized(nonowned(tc_mt(cx, mt, cache)))
statically_sized(nonsendable(tc_mt(cx, mt, cache)))
}
ty_uniq(mt) => {
@ -2081,12 +2082,13 @@ pub fn type_contents(cx: ctxt, ty: t) -> TypeContents {
}
ty_evec(mt, vstore_box) => {
TC_MANAGED + statically_sized(nonowned(tc_mt(cx, mt, cache)))
TC_MANAGED +
statically_sized(nonsendable(tc_mt(cx, mt, cache)))
}
ty_evec(mt, vstore_slice(r)) => {
borrowed_contents(r, mt.mutbl) +
statically_sized(nonowned(tc_mt(cx, mt, cache)))
statically_sized(nonsendable(tc_mt(cx, mt, cache)))
}
ty_evec(mt, vstore_fixed(_)) => {
@ -2118,7 +2120,7 @@ pub fn type_contents(cx: ctxt, ty: t) -> TypeContents {
TC_NONE,
|tc, f| tc + tc_mt(cx, f.mt, cache));
if ty::has_dtor(cx, did) {
res += TC_DTOR;
res = res + TC_DTOR;
}
apply_tc_attr(cx, did, res)
}
@ -2203,10 +2205,10 @@ pub fn type_contents(cx: ctxt, ty: t) -> TypeContents {
fn apply_tc_attr(cx: ctxt, did: def_id, mut tc: TypeContents) -> TypeContents {
if has_attr(cx, did, "mutable") {
tc += TC_MUTABLE;
tc = tc + TC_MUTABLE;
}
if has_attr(cx, did, "non_owned") {
tc += TC_NON_OWNED;
if has_attr(cx, did, "non_sendable") {
tc = tc + TC_NON_SENDABLE;
}
tc
}
@ -2227,7 +2229,7 @@ pub fn type_contents(cx: ctxt, ty: t) -> TypeContents {
mc + rc
}
fn nonowned(pointee: TypeContents) -> TypeContents {
fn nonsendable(pointee: TypeContents) -> TypeContents {
/*!
*
* Given a non-owning pointer to some type `T` with
@ -2291,11 +2293,11 @@ pub fn type_contents(cx: ctxt, ty: t) -> TypeContents {
BoundCopy if store == UniqTraitStore
=> TC_NONCOPY_TRAIT,
BoundCopy => TC_NONE, // @Trait/&Trait are copyable either way
BoundStatic if bounds.contains_elem(BoundOwned)
=> TC_NONE, // Owned bound implies static bound.
BoundStatic if bounds.contains_elem(BoundSend)
=> TC_NONE, // Send bound implies static bound.
BoundStatic => TC_BORROWED_POINTER, // Useful for "@Trait:'static"
BoundOwned => TC_NON_OWNED,
BoundConst => TC_MUTABLE,
BoundSend => TC_NON_SENDABLE,
BoundFreeze => TC_MUTABLE,
BoundSized => TC_NONE, // don't care if interior is sized
};
}
@ -2314,8 +2316,8 @@ pub fn type_contents(cx: ctxt, ty: t) -> TypeContents {
tc = tc - match bound {
BoundCopy => TypeContents::noncopyable(cx),
BoundStatic => TypeContents::nonstatic(cx),
BoundOwned => TypeContents::nonowned(cx),
BoundConst => TypeContents::nonconst(cx),
BoundSend => TypeContents::nonsendable(cx),
BoundFreeze => TypeContents::nonfreezable(cx),
// The dynamic-size bit can be removed at pointer-level, etc.
BoundSized => TypeContents::dynamically_sized(cx),
};

View file

@ -753,7 +753,7 @@ fn conv_builtin_bounds(tcx: ty::ctxt, ast_bounds: &Option<OptVec<ast::TyParamBou
//! Converts a list of bounds from the AST into a `BuiltinBounds`
//! struct. Reports an error if any of the bounds that appear
//! in the AST refer to general traits and not the built-in traits
//! like `Copy` or `Owned`. Used to translate the bounds that
//! like `Copy` or `Send`. Used to translate the bounds that
//! appear in closure and trait types, where only builtin bounds are
//! legal.
//! If no bounds were specified, we choose a "default" bound based on
@ -788,9 +788,9 @@ fn conv_builtin_bounds(tcx: ty::ctxt, ast_bounds: &Option<OptVec<ast::TyParamBou
}
builtin_bounds
},
// ~Trait is sugar for ~Trait:Owned.
// ~Trait is sugar for ~Trait:Send.
(&None, ty::UniqTraitStore) => {
let mut set = ty::EmptyBuiltinBounds(); set.add(ty::BoundOwned); set
let mut set = ty::EmptyBuiltinBounds(); set.add(ty::BoundSend); set
}
// @Trait is sugar for @Trait:'static.
// &'static Trait is sugar for &'static Trait:'static.
@ -807,19 +807,19 @@ pub fn try_add_builtin_trait(tcx: ty::ctxt,
trait_def_id: ast::def_id,
builtin_bounds: &mut ty::BuiltinBounds) -> bool {
//! Checks whether `trait_ref` refers to one of the builtin
//! traits, like `Copy` or `Owned`, and adds the corresponding
//! traits, like `Copy` or `Send`, and adds the corresponding
//! bound to the set `builtin_bounds` if so. Returns true if `trait_ref`
//! is a builtin trait.
let li = &tcx.lang_items;
if trait_def_id == li.owned_trait() {
builtin_bounds.add(ty::BoundOwned);
if trait_def_id == li.send_trait() {
builtin_bounds.add(ty::BoundSend);
true
} else if trait_def_id == li.copy_trait() {
builtin_bounds.add(ty::BoundCopy);
true
} else if trait_def_id == li.const_trait() {
builtin_bounds.add(ty::BoundConst);
} else if trait_def_id == li.freeze_trait() {
builtin_bounds.add(ty::BoundFreeze);
true
} else if trait_def_id == li.sized_trait() {
builtin_bounds.add(ty::BoundSized);

View file

@ -538,7 +538,7 @@ pub fn check_pat(pcx: &pat_ctxt, pat: @ast::pat, expected: ty::t) {
check_pointer_pat(pcx, Managed, inner, pat.id, pat.span, expected);
}
ast::pat_uniq(inner) => {
check_pointer_pat(pcx, Owned, inner, pat.id, pat.span, expected);
check_pointer_pat(pcx, Send, inner, pat.id, pat.span, expected);
}
ast::pat_region(inner) => {
check_pointer_pat(pcx, Borrowed, inner, pat.id, pat.span, expected);
@ -624,7 +624,7 @@ pub fn check_pointer_pat(pcx: &pat_ctxt,
ty::ty_box(e_inner) if pointer_kind == Managed => {
check_inner(e_inner);
}
ty::ty_uniq(e_inner) if pointer_kind == Owned => {
ty::ty_uniq(e_inner) if pointer_kind == Send => {
check_inner(e_inner);
}
ty::ty_rptr(_, e_inner) if pointer_kind == Borrowed => {
@ -641,7 +641,7 @@ pub fn check_pointer_pat(pcx: &pat_ctxt,
Some(expected),
fmt!("%s pattern", match pointer_kind {
Managed => "an @-box",
Owned => "a ~-box",
Send => "a ~-box",
Borrowed => "an &-pointer"
}),
None);
@ -651,4 +651,4 @@ pub fn check_pointer_pat(pcx: &pat_ctxt,
}
#[deriving(Eq)]
enum PointerKind { Managed, Owned, Borrowed }
enum PointerKind { Managed, Send, Borrowed }

View file

@ -1088,16 +1088,19 @@ impl<'self> LookupContext<'self> {
_ => {}
}
return match candidate.method_ty.explicit_self {
let result = match candidate.method_ty.explicit_self {
sty_static => {
debug!("(is relevant?) explicit self is static");
false
}
sty_value => {
debug!("(is relevant?) explicit self is by-value");
self.fcx.can_mk_subty(rcvr_ty, candidate.rcvr_ty).is_ok()
}
sty_region(_, m) => {
debug!("(is relevant?) explicit self is a region");
match ty::get(rcvr_ty).sty {
ty::ty_rptr(_, mt) => {
mutability_matches(mt.mutbl, m) &&
@ -1109,6 +1112,7 @@ impl<'self> LookupContext<'self> {
}
sty_box(m) => {
debug!("(is relevant?) explicit self is a box");
match ty::get(rcvr_ty).sty {
ty::ty_box(mt) => {
mutability_matches(mt.mutbl, m) &&
@ -1120,6 +1124,7 @@ impl<'self> LookupContext<'self> {
}
sty_uniq(m) => {
debug!("(is relevant?) explicit self is a unique pointer");
match ty::get(rcvr_ty).sty {
ty::ty_uniq(mt) => {
mutability_matches(mt.mutbl, m) &&
@ -1131,6 +1136,10 @@ impl<'self> LookupContext<'self> {
}
};
debug!("(is relevant?) %s", if result { "yes" } else { "no" });
return result;
fn mutability_matches(self_mutbl: ast::mutability,
candidate_mutbl: ast::mutability) -> bool {
//! True if `self_mutbl <: candidate_mutbl`

View file

@ -213,6 +213,13 @@ impl PurityState {
}
}
/// Whether `check_binop` allows overloaded operators to be invoked.
#[deriving(Eq)]
enum AllowOverloadedOperatorsFlag {
AllowOverloadedOperators,
DontAllowOverloadedOperators,
}
pub struct FnCtxt {
// Number of errors that had been reported when we started
// checking this function. On exit, if we find that *more* errors
@ -784,10 +791,6 @@ impl FnCtxt {
ast_ty_to_ty(self, self, ast_t)
}
pub fn expr_to_str(&self, expr: @ast::expr) -> ~str {
expr.repr(self.tcx())
}
pub fn pat_to_str(&self, pat: @ast::pat) -> ~str {
pat.repr(self.tcx())
}
@ -796,9 +799,8 @@ impl FnCtxt {
match self.inh.node_types.find(&ex.id) {
Some(&t) => t,
None => {
self.tcx().sess.bug(
fmt!("no type for %s in fcx %s",
self.expr_to_str(ex), self.tag()));
self.tcx().sess.bug(fmt!("no type for expr in fcx %s",
self.tag()));
}
}
}
@ -1138,7 +1140,7 @@ pub fn check_expr_with_unifier(fcx: @mut FnCtxt,
expr: @ast::expr,
expected: Option<ty::t>,
unifier: &fn()) {
debug!(">> typechecking %s", fcx.expr_to_str(expr));
debug!(">> typechecking");
fn check_method_argument_types(
fcx: @mut FnCtxt,
@ -1391,6 +1393,8 @@ pub fn check_expr_with_unifier(fcx: @mut FnCtxt,
method_map.insert(expr.id, (*entry));
}
None => {
debug!("(checking method call) failing expr is %d", expr.id);
fcx.type_error_message(expr.span,
|actual| {
fmt!("type `%s` does not implement any method in scope \
@ -1487,7 +1491,8 @@ pub fn check_expr_with_unifier(fcx: @mut FnCtxt,
lhs: @ast::expr,
rhs: @ast::expr,
// Used only in the error case
expected_result: Option<ty::t>
expected_result: Option<ty::t>,
allow_overloaded_operators: AllowOverloadedOperatorsFlag
) {
let tcx = fcx.ccx.tcx;
@ -1537,8 +1542,30 @@ pub fn check_expr_with_unifier(fcx: @mut FnCtxt,
}
let result_t = check_user_binop(fcx, callee_id, expr, lhs, lhs_t, op, rhs,
expected_result);
// Check for overloaded operators if allowed.
let result_t;
if allow_overloaded_operators == AllowOverloadedOperators {
result_t = check_user_binop(fcx,
callee_id,
expr,
lhs,
lhs_t,
op,
rhs,
expected_result);
} else {
fcx.type_error_message(expr.span,
|actual| {
fmt!("binary operation %s cannot be \
applied to type `%s`",
ast_util::binop_to_str(op),
actual)
},
lhs_t,
None);
result_t = ty::mk_err();
}
fcx.write_ty(expr.id, result_t);
if ty::type_is_error(result_t) {
fcx.write_ty(rhs.id, result_t);
@ -1704,8 +1731,7 @@ pub fn check_expr_with_unifier(fcx: @mut FnCtxt,
ty::mk_closure(tcx, fn_ty_copy)
};
debug!("check_expr_fn_with_unifier %s fty=%s",
fcx.expr_to_str(expr),
debug!("check_expr_fn_with_unifier fty=%s",
fcx.infcx().ty_to_str(fty));
fcx.write_ty(expr.id, fty);
@ -2229,7 +2255,15 @@ pub fn check_expr_with_unifier(fcx: @mut FnCtxt,
fcx.write_ty(id, typ);
}
ast::expr_binary(callee_id, op, lhs, rhs) => {
check_binop(fcx, callee_id, expr, op, lhs, rhs, expected);
check_binop(fcx,
callee_id,
expr,
op,
lhs,
rhs,
expected,
AllowOverloadedOperators);
let lhs_ty = fcx.expr_ty(lhs);
let rhs_ty = fcx.expr_ty(rhs);
if ty::type_is_error(lhs_ty) ||
@ -2242,7 +2276,15 @@ pub fn check_expr_with_unifier(fcx: @mut FnCtxt,
}
}
ast::expr_assign_op(callee_id, op, lhs, rhs) => {
check_binop(fcx, callee_id, expr, op, lhs, rhs, expected);
check_binop(fcx,
callee_id,
expr,
op,
lhs,
rhs,
expected,
DontAllowOverloadedOperators);
let lhs_t = fcx.expr_ty(lhs);
let result_t = fcx.expr_ty(expr);
demand::suptype(fcx, expr.span, result_t, lhs_t);
@ -3247,6 +3289,9 @@ pub fn ty_param_bounds_and_ty_for_def(fcx: @mut FnCtxt,
ast::def_self_ty(*) => {
fcx.ccx.tcx.sess.span_bug(sp, "expected value but found self ty");
}
ast::def_method(*) => {
fcx.ccx.tcx.sess.span_bug(sp, "expected value but found method");
}
}
}

View file

@ -230,7 +230,7 @@ fn constrain_bindings_in_pat(pat: @ast::pat, rcx: @mut Rcx) {
}
fn visit_expr(expr: @ast::expr, (rcx, v): (@mut Rcx, rvt)) {
debug!("regionck::visit_expr(e=%s)", rcx.fcx.expr_to_str(expr));
debug!("regionck::visit_expr(e=?)");
let has_method_map = rcx.fcx.inh.method_map.contains_key(&expr.id);
@ -520,8 +520,7 @@ fn constrain_derefs(rcx: @mut Rcx,
let tcx = rcx.fcx.tcx();
let r_deref_expr = ty::re_scope(deref_expr.id);
for uint::range(0, derefs) |i| {
debug!("constrain_derefs(deref_expr=%s, derefd_ty=%s, derefs=%?/%?",
rcx.fcx.expr_to_str(deref_expr),
debug!("constrain_derefs(deref_expr=?, derefd_ty=%s, derefs=%?/%?",
rcx.fcx.infcx().ty_to_str(derefd_ty),
i, derefs);
@ -576,8 +575,7 @@ fn constrain_index(rcx: @mut Rcx,
let tcx = rcx.fcx.tcx();
debug!("constrain_index(index_expr=%s, indexed_ty=%s",
rcx.fcx.expr_to_str(index_expr),
debug!("constrain_index(index_expr=?, indexed_ty=%s",
rcx.fcx.infcx().ty_to_str(indexed_ty));
let r_index_expr = ty::re_scope(index_expr.id);
@ -808,7 +806,7 @@ pub mod guarantor {
* to the lifetime of its guarantor (if any).
*/
debug!("guarantor::for_addr_of(base=%s)", rcx.fcx.expr_to_str(base));
debug!("guarantor::for_addr_of(base=?)");
let guarantor = guarantor(rcx, base);
link(rcx, expr.span, expr.id, guarantor);
@ -842,8 +840,7 @@ pub mod guarantor {
* region pointers.
*/
debug!("guarantor::for_autoref(expr=%s, autoref=%?)",
rcx.fcx.expr_to_str(expr), autoref);
debug!("guarantor::for_autoref(autoref=%?)", autoref);
let mut expr_ct = categorize_unadjusted(rcx, expr);
debug!(" unadjusted cat=%?", expr_ct.cat);
@ -970,7 +967,7 @@ pub mod guarantor {
* `&expr`).
*/
debug!("guarantor(expr=%s)", rcx.fcx.expr_to_str(expr));
debug!("guarantor()");
match expr.node {
ast::expr_unary(_, ast::deref, b) => {
let cat = categorize(rcx, b);
@ -1034,7 +1031,7 @@ pub mod guarantor {
}
fn categorize(rcx: @mut Rcx, expr: @ast::expr) -> ExprCategorization {
debug!("categorize(expr=%s)", rcx.fcx.expr_to_str(expr));
debug!("categorize()");
let mut expr_ct = categorize_unadjusted(rcx, expr);
debug!("before adjustments, cat=%?", expr_ct.cat);
@ -1086,7 +1083,7 @@ pub mod guarantor {
fn categorize_unadjusted(rcx: @mut Rcx,
expr: @ast::expr)
-> ExprCategorizationType {
debug!("categorize_unadjusted(expr=%s)", rcx.fcx.expr_to_str(expr));
debug!("categorize_unadjusted()");
let guarantor = {
if rcx.fcx.inh.method_map.contains_key(&expr.id) {

View file

@ -17,7 +17,7 @@
use core::prelude::*;
use metadata::csearch::{each_path, get_impl_trait};
use metadata::csearch::{get_impls_for_mod};
use metadata::csearch;
use metadata::cstore::{CStore, iter_crate_data};
use metadata::decoder::{dl_def, dl_field, dl_impl};
use middle::resolve::{Impl, MethodInfo};
@ -855,92 +855,81 @@ impl CoherenceChecker {
// External crate handling
pub fn add_impls_for_module(&self,
impls_seen: &mut HashSet<def_id>,
crate_store: @mut CStore,
module_def_id: def_id) {
let implementations = get_impls_for_mod(crate_store,
module_def_id,
None);
for implementations.iter().advance |implementation| {
debug!("coherence: adding impl from external crate: %s",
ty::item_path_str(self.crate_context.tcx,
implementation.did));
pub fn add_external_impl(&self,
impls_seen: &mut HashSet<def_id>,
crate_store: @mut CStore,
impl_def_id: def_id) {
let implementation = csearch::get_impl(crate_store, impl_def_id);
// Make sure we don't visit the same implementation
// multiple times.
if !impls_seen.insert(implementation.did) {
// Skip this one.
loop;
}
// Good. Continue.
debug!("coherence: adding impl from external crate: %s",
ty::item_path_str(self.crate_context.tcx, implementation.did));
let self_type = lookup_item_type(self.crate_context.tcx,
implementation.did);
let associated_traits = get_impl_trait(self.crate_context.tcx,
implementation.did);
// Make sure we don't visit the same implementation multiple times.
if !impls_seen.insert(implementation.did) {
// Skip this one.
return
}
// Good. Continue.
// Do a sanity check to make sure that inherent methods have base
// types.
if associated_traits.is_none() {
match get_base_type_def_id(self.inference_context,
dummy_sp(),
self_type.ty) {
None => {
let session = self.crate_context.tcx.sess;
session.bug(fmt!(
"no base type for external impl \
with no trait: %s (type %s)!",
session.str_of(implementation.ident),
ty_to_str(self.crate_context.tcx,self_type.ty)));
}
Some(_) => {
// Nothing to do.
}
}
}
let mut implementation = *implementation;
// Record all the trait methods.
for associated_traits.iter().advance |trait_ref| {
self.instantiate_default_methods(implementation.did,
&**trait_ref);
// Could we avoid these copies when we don't need them?
let mut methods = /*bad?*/ copy implementation.methods;
self.add_provided_methods_to_impl(
&mut methods,
&trait_ref.def_id,
&implementation.did);
implementation = @Impl { methods: methods,
.. *implementation };
self.add_trait_method(trait_ref.def_id, implementation);
}
// Add the implementation to the mapping from
// implementation to base type def ID, if there is a base
// type for this implementation.
let self_type = lookup_item_type(self.crate_context.tcx,
implementation.did);
let associated_traits = get_impl_trait(self.crate_context.tcx,
implementation.did);
// Do a sanity check to make sure that inherent methods have base
// types.
if associated_traits.is_none() {
match get_base_type_def_id(self.inference_context,
dummy_sp(),
self_type.ty) {
dummy_sp(),
self_type.ty) {
None => {
// Nothing to do.
let session = self.crate_context.tcx.sess;
session.bug(fmt!("no base type for external impl with no \
trait: %s (type %s)!",
session.str_of(implementation.ident),
ty_to_str(self.crate_context.tcx,
self_type.ty)));
}
Some(base_type_def_id) => {
// inherent methods apply to `impl Type` but not
// `impl Trait for Type`:
if associated_traits.is_none() {
self.add_inherent_method(base_type_def_id,
implementation);
}
Some(_) => {} // Nothing to do.
}
}
self.base_type_def_ids.insert(implementation.did,
base_type_def_id);
// Record all the trait methods.
let mut implementation = @implementation;
for associated_traits.iter().advance |trait_ref| {
self.instantiate_default_methods(implementation.did,
*trait_ref);
// XXX(sully): We could probably avoid this copy if there are no
// default methods.
let mut methods = copy implementation.methods;
self.add_provided_methods_to_impl(&mut methods,
&trait_ref.def_id,
&implementation.did);
implementation = @Impl {
methods: methods,
..*implementation
};
self.add_trait_method(trait_ref.def_id, implementation);
}
// Add the implementation to the mapping from implementation to base
// type def ID, if there is a base type for this implementation.
match get_base_type_def_id(self.inference_context,
dummy_sp(),
self_type.ty) {
None => {} // Nothing to do.
Some(base_type_def_id) => {
// inherent methods apply to `impl Type` but not
// `impl Trait for Type`:
if associated_traits.is_none() {
self.add_inherent_method(base_type_def_id,
implementation);
}
self.base_type_def_ids.insert(implementation.did,
base_type_def_id);
}
}
}
@ -952,22 +941,14 @@ impl CoherenceChecker {
let crate_store = self.crate_context.tcx.sess.cstore;
do iter_crate_data(crate_store) |crate_number, _crate_metadata| {
self.add_impls_for_module(&mut impls_seen,
crate_store,
def_id { crate: crate_number,
node: 0 });
for each_path(crate_store, crate_number) |_, def_like, _| {
match def_like {
dl_def(def_mod(def_id)) => {
self.add_impls_for_module(&mut impls_seen,
crate_store,
def_id);
}
dl_def(_) | dl_impl(_) | dl_field => {
// Skip this.
loop;
dl_impl(def_id) => {
self.add_external_impl(&mut impls_seen,
crate_store,
def_id)
}
dl_def(_) | dl_field => loop, // Skip this.
}
}
}

View file

@ -81,10 +81,20 @@ pub fn collect_item_types(ccx: @mut CrateCtxt, crate: &ast::crate) {
})));
}
impl CrateCtxt {
pub trait ToTy {
fn to_ty<RS:region_scope + Copy + 'static>(
&self, rs: &RS, ast_ty: &ast::Ty) -> ty::t
{
&self,
rs: &RS,
ast_ty: &ast::Ty)
-> ty::t;
}
impl ToTy for CrateCtxt {
fn to_ty<RS:region_scope + Copy + 'static>(
&self,
rs: &RS,
ast_ty: &ast::Ty)
-> ty::t {
ast_ty_to_ty(self, rs, ast_ty)
}
}
@ -1165,7 +1175,7 @@ pub fn ty_generics(ccx: &CrateCtxt,
* enum consisting of a newtyped Ty or a region) to ty's
* notion of ty param bounds, which can either be user-defined
* traits, or one of the four built-in traits (formerly known
* as kinds): Const, Copy, and Send.
* as kinds): Freeze, Copy, and Send.
*/
let mut param_bounds = ty::ParamBounds {

View file

@ -64,7 +64,8 @@ use middle::typeck::infer::glb::Glb;
use middle::typeck::infer::lub::Lub;
use middle::typeck::infer::sub::Sub;
use middle::typeck::infer::to_str::InferStr;
use middle::typeck::infer::{cres, InferCtxt, ures};
use middle::typeck::infer::unify::{InferCtxtMethods, UnifyInferCtxtMethods};
use middle::typeck::infer::{InferCtxt, cres, ures};
use util::common::indent;
use core::result::{iter_vec2, map_vec2};

View file

@ -71,15 +71,53 @@ impl LatticeValue for ty::t {
}
}
impl CombineFields {
pub fn var_sub_var<T:Copy + InferStr + LatticeValue,
V:Copy + Eq + ToStr + Vid + UnifyVid<Bounds<T>>>(&self,
a_id:
V,
b_id:
V)
->
ures {
pub trait CombineFieldsLatticeMethods {
fn var_sub_var<T:Copy + InferStr + LatticeValue,
V:Copy + Eq + ToStr + Vid + UnifyVid<Bounds<T>>>(&self,
a_id: V,
b_id: V)
-> ures;
/// make variable a subtype of T
fn var_sub_t<T:Copy + InferStr + LatticeValue,
V:Copy + Eq + ToStr + Vid + UnifyVid<Bounds<T>>>(
&self,
a_id: V,
b: T)
-> ures;
fn t_sub_var<T:Copy + InferStr + LatticeValue,
V:Copy + Eq + ToStr + Vid + UnifyVid<Bounds<T>>>(
&self,
a: T,
b_id: V)
-> ures;
fn merge_bnd<T:Copy + InferStr + LatticeValue>(
&self,
a: &Bound<T>,
b: &Bound<T>,
lattice_op: LatticeOp<T>)
-> cres<Bound<T>>;
fn set_var_to_merged_bounds<T:Copy + InferStr + LatticeValue,
V:Copy+Eq+ToStr+Vid+UnifyVid<Bounds<T>>>(
&self,
v_id: V,
a: &Bounds<T>,
b: &Bounds<T>,
rank: uint)
-> ures;
fn bnds<T:Copy + InferStr + LatticeValue>(
&self,
a: &Bound<T>,
b: &Bound<T>)
-> ures;
}
impl CombineFieldsLatticeMethods for CombineFields {
fn var_sub_var<T:Copy + InferStr + LatticeValue,
V:Copy + Eq + ToStr + Vid + UnifyVid<Bounds<T>>>(
&self,
a_id: V,
b_id: V)
-> ures {
/*!
*
* Make one variable a subtype of another variable. This is a
@ -127,12 +165,12 @@ impl CombineFields {
}
/// make variable a subtype of T
pub fn var_sub_t<T:Copy + InferStr + LatticeValue,
V:Copy + Eq + ToStr + Vid + UnifyVid<Bounds<T>>>(&self,
a_id: V,
b: T)
-> ures
{
fn var_sub_t<T:Copy + InferStr + LatticeValue,
V:Copy + Eq + ToStr + Vid + UnifyVid<Bounds<T>>>(
&self,
a_id: V,
b: T)
-> ures {
/*!
*
* Make a variable (`a_id`) a subtype of the concrete type `b` */
@ -151,12 +189,12 @@ impl CombineFields {
a_id, a_bounds, b_bounds, node_a.rank)
}
pub fn t_sub_var<T:Copy + InferStr + LatticeValue,
V:Copy + Eq + ToStr + Vid + UnifyVid<Bounds<T>>>(&self,
a: T,
b_id: V)
-> ures
{
fn t_sub_var<T:Copy + InferStr + LatticeValue,
V:Copy + Eq + ToStr + Vid + UnifyVid<Bounds<T>>>(
&self,
a: T,
b_id: V)
-> ures {
/*!
*
* Make a concrete type (`a`) a subtype of the variable `b_id` */
@ -175,12 +213,12 @@ impl CombineFields {
b_id, a_bounds, b_bounds, node_b.rank)
}
pub fn merge_bnd<T:Copy + InferStr + LatticeValue>(&self,
a: &Bound<T>,
b: &Bound<T>,
lattice_op:
LatticeOp<T>)
-> cres<Bound<T>> {
fn merge_bnd<T:Copy + InferStr + LatticeValue>(
&self,
a: &Bound<T>,
b: &Bound<T>,
lattice_op: LatticeOp<T>)
-> cres<Bound<T>> {
/*!
*
* Combines two bounds into a more general bound. */
@ -202,14 +240,14 @@ impl CombineFields {
}
}
pub fn set_var_to_merged_bounds<T:Copy + InferStr + LatticeValue,
V:Copy+Eq+ToStr+Vid+UnifyVid<Bounds<T>>>(
&self,
v_id: V,
a: &Bounds<T>,
b: &Bounds<T>,
rank: uint)
-> ures {
fn set_var_to_merged_bounds<T:Copy + InferStr + LatticeValue,
V:Copy+Eq+ToStr+Vid+UnifyVid<Bounds<T>>>(
&self,
v_id: V,
a: &Bounds<T>,
b: &Bounds<T>,
rank: uint)
-> ures {
/*!
*
* Updates the bounds for the variable `v_id` to be the intersection
@ -264,10 +302,10 @@ impl CombineFields {
uok()
}
pub fn bnds<T:Copy + InferStr + LatticeValue>(&self,
a: &Bound<T>,
b: &Bound<T>)
-> ures {
fn bnds<T:Copy + InferStr + LatticeValue>(&self,
a: &Bound<T>,
b: &Bound<T>)
-> ures {
debug!("bnds(%s <: %s)", a.inf_str(self.infcx),
b.inf_str(self.infcx));
let _r = indenter();

View file

@ -54,7 +54,7 @@ use middle::ty;
use middle::typeck::infer::{Bounds, cyclic_ty, fixup_err, fres, InferCtxt};
use middle::typeck::infer::{region_var_bound_by_region_var, unresolved_ty};
use middle::typeck::infer::to_str::InferStr;
use middle::typeck::infer::unify::Root;
use middle::typeck::infer::unify::{Root, UnifyInferCtxtMethods};
use util::common::{indent, indenter};
use util::ppaux::ty_to_str;

View file

@ -18,6 +18,7 @@ use middle::typeck::infer::combine::*;
use middle::typeck::infer::cres;
use middle::typeck::infer::glb::Glb;
use middle::typeck::infer::InferCtxt;
use middle::typeck::infer::lattice::CombineFieldsLatticeMethods;
use middle::typeck::infer::lub::Lub;
use middle::typeck::infer::to_str::InferStr;
use util::common::{indent, indenter};

View file

@ -40,9 +40,31 @@ pub trait UnifyVid<T> {
-> &'v mut ValsAndBindings<Self, T>;
}
impl InferCtxt {
pub fn get<T:Copy, V:Copy+Eq+Vid+UnifyVid<T>>(&mut self, vid: V)
-> Node<V, T> {
pub trait UnifyInferCtxtMethods {
fn get<T:Copy,
V:Copy + Eq + Vid + UnifyVid<T>>(
&mut self,
vid: V)
-> Node<V, T>;
fn set<T:Copy + InferStr,
V:Copy + Vid + ToStr + UnifyVid<T>>(
&mut self,
vid: V,
new_v: VarValue<V, T>);
fn unify<T:Copy + InferStr,
V:Copy + Vid + ToStr + UnifyVid<T>>(
&mut self,
node_a: &Node<V, T>,
node_b: &Node<V, T>)
-> (V, uint);
}
impl UnifyInferCtxtMethods for InferCtxt {
fn get<T:Copy,
V:Copy + Eq + Vid + UnifyVid<T>>(
&mut self,
vid: V)
-> Node<V, T> {
/*!
*
* Find the root node for `vid`. This uses the standard
@ -84,10 +106,11 @@ impl InferCtxt {
}
}
pub fn set<T:Copy + InferStr,
V:Copy + Vid + ToStr + UnifyVid<T>>(&mut self,
vid: V,
new_v: VarValue<V, T>) {
fn set<T:Copy + InferStr,
V:Copy + Vid + ToStr + UnifyVid<T>>(
&mut self,
vid: V,
new_v: VarValue<V, T>) {
/*!
*
* Sets the value for `vid` to `new_v`. `vid` MUST be a root node!
@ -102,11 +125,12 @@ impl InferCtxt {
vb.vals.insert(vid.to_uint(), new_v);
}
pub fn unify<T:Copy + InferStr,
V:Copy + Vid + ToStr + UnifyVid<T>>(&mut self,
node_a: &Node<V, T>,
node_b: &Node<V, T>)
-> (V, uint) {
fn unify<T:Copy + InferStr,
V:Copy + Vid + ToStr + UnifyVid<T>>(
&mut self,
node_a: &Node<V, T>,
node_b: &Node<V, T>)
-> (V, uint) {
// Rank optimization: if you don't know what it is, check
// out <http://en.wikipedia.org/wiki/Disjoint-set_data_structure>
@ -155,14 +179,31 @@ pub fn mk_err<T:SimplyUnifiable>(a_is_expected: bool,
}
}
impl InferCtxt {
pub fn simple_vars<T:Copy+Eq+InferStr+SimplyUnifiable,
V:Copy+Eq+Vid+ToStr+UnifyVid<Option<T>>>(&mut self,
a_is_expected:
bool,
a_id: V,
b_id: V)
-> ures {
pub trait InferCtxtMethods {
fn simple_vars<T:Copy + Eq + InferStr + SimplyUnifiable,
V:Copy + Eq + Vid + ToStr + UnifyVid<Option<T>>>(
&mut self,
a_is_expected: bool,
a_id: V,
b_id: V)
-> ures;
fn simple_var_t<T:Copy + Eq + InferStr + SimplyUnifiable,
V:Copy + Eq + Vid + ToStr + UnifyVid<Option<T>>>(
&mut self,
a_is_expected: bool,
a_id: V,
b: T)
-> ures;
}
impl InferCtxtMethods for InferCtxt {
fn simple_vars<T:Copy + Eq + InferStr + SimplyUnifiable,
V:Copy + Eq + Vid + ToStr + UnifyVid<Option<T>>>(
&mut self,
a_is_expected: bool,
a_id: V,
b_id: V)
-> ures {
/*!
*
* Unifies two simple variables. Because simple variables do
@ -194,13 +235,13 @@ impl InferCtxt {
return uok();
}
pub fn simple_var_t<T:Copy+Eq+InferStr+SimplyUnifiable,
V:Copy+Eq+Vid+ToStr+UnifyVid<Option<T>>>(&mut self,
a_is_expected
: bool,
a_id: V,
b: T)
-> ures {
fn simple_var_t<T:Copy + Eq + InferStr + SimplyUnifiable,
V:Copy + Eq + Vid + ToStr + UnifyVid<Option<T>>>(
&mut self,
a_is_expected: bool,
a_id: V,
b: T)
-> ures {
/*!
*
* Sets the value of the variable `a_id` to `b`. Because

View file

@ -80,6 +80,7 @@ pub mod middle {
pub mod moves;
pub mod entry;
pub mod effect;
pub mod reachable;
}
pub mod front {

View file

@ -488,7 +488,9 @@ pub fn parameterized(cx: ctxt,
}
};
strs += vec::map(tps, |t| ty_to_str(cx, *t));
for tps.iter().advance |t| {
strs.push(ty_to_str(cx, *t))
}
if strs.len() > 0u {
fmt!("%s<%s>", base, strs.connect(","))
@ -575,8 +577,8 @@ impl Repr for ty::ParamBounds {
res.push(match b {
ty::BoundCopy => ~"Copy",
ty::BoundStatic => ~"'static",
ty::BoundOwned => ~"Owned",
ty::BoundConst => ~"Const",
ty::BoundSend => ~"Send",
ty::BoundFreeze => ~"Freeze",
ty::BoundSized => ~"Sized",
});
}
@ -781,8 +783,8 @@ impl UserString for ty::BuiltinBound {
match *self {
ty::BoundCopy => ~"Copy",
ty::BoundStatic => ~"'static",
ty::BoundOwned => ~"Owned",
ty::BoundConst => ~"Const",
ty::BoundSend => ~"Send",
ty::BoundFreeze => ~"Freeze",
ty::BoundSized => ~"Sized",
}
}

View file

@ -99,7 +99,7 @@ fn act(po: &Port<Msg>, source: @str, parse: Parser) {
}
}
pub fn exec<T:Owned>(
pub fn exec<T:Send>(
srv: Srv,
f: ~fn(ctxt: Ctxt) -> T
) -> T {

View file

@ -101,7 +101,7 @@ fn fold_item(
}
}
fn parse_item_attrs<T:Owned>(
fn parse_item_attrs<T:Send>(
srv: astsrv::Srv,
id: doc::AstId,
parse_attrs: ~fn(a: ~[ast::attribute]) -> T) -> T {

View file

@ -152,7 +152,7 @@ pub fn header_kind(doc: doc::ItemTag) -> ~str {
~"Function"
}
doc::ConstTag(_) => {
~"Const"
~"Freeze"
}
doc::EnumTag(_) => {
~"Enum"
@ -192,11 +192,11 @@ pub fn header_name(doc: doc::ItemTag) -> ~str {
let mut trait_part = ~"";
for doc.trait_types.iter().enumerate().advance |(i, trait_type)| {
if i == 0 {
trait_part += " of ";
trait_part.push_str(" of ");
} else {
trait_part += ", ";
trait_part.push_str(", ");
}
trait_part += *trait_type;
trait_part.push_str(*trait_type);
}
fmt!("%s for %s%s", trait_part, *self_ty, bounds)
}
@ -786,7 +786,7 @@ mod test {
#[test]
fn should_write_const_header() {
let markdown = render(~"static a: bool = true;");
assert!(markdown.contains("## Const `a`\n\n"));
assert!(markdown.contains("## Freeze `a`\n\n"));
}
#[test]

View file

@ -130,7 +130,7 @@ fn generic_writer(process: ~fn(markdown: ~str)) -> Writer {
let mut keep_going = true;
while keep_going {
match po.recv() {
Write(s) => markdown += s,
Write(s) => markdown.push_str(s),
Done => keep_going = false
}
}
@ -214,7 +214,7 @@ fn future_writer() -> (Writer, future::Future<~str>) {
let mut res = ~"";
loop {
match port.recv() {
Write(s) => res += s,
Write(s) => res.push_str(s),
Done => break
}
}

View file

@ -70,7 +70,7 @@ fn make_doc_from_pages(page_port: &PagePort) -> doc::Doc {
loop {
let val = page_port.recv();
if val.is_some() {
pages += [val.unwrap()];
pages.push(val.unwrap());
} else {
break;
}

666
src/librusti/rusti.rc Normal file
View file

@ -0,0 +1,666 @@
// Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
/*!
* rusti - A REPL using the JIT backend
*
* Rusti works by serializing state between lines of input. This means that each
* line can be run in a separate task, and the only limiting factor is that all
* local bound variables are encodable.
*
* This is accomplished by feeding in generated input to rustc for execution in
* the JIT compiler. Currently input actually gets fed in three times to get
* information about the program.
*
* - Pass #1
* In this pass, the input is simply thrown at the parser and the input comes
* back. This validates the structure of the program, and at this stage the
* global items (fns, structs, impls, traits, etc.) are filtered from the
* input into the "global namespace". These declarations shadow all previous
* declarations of an item by the same name.
*
* - Pass #2
* After items have been stripped, the remaining input is passed to rustc
* along with all local variables declared (initialized to nothing). This pass
* runs up to typechecking. From this, we can learn about the types of each
* bound variable, what variables are bound, and also ensure that all the
* types are encodable (the input can actually be run).
*
* - Pass #3
* Finally, a program is generated to deserialize the local variable state,
* run the code input, and then reserialize all bindings back into a local
* hash map. Once this code runs, the input has fully been run and the REPL
* waits for new input.
*
* Encoding/decoding is done with EBML, and there is simply a map of ~str ->
* ~[u8] maintaining the values of each local binding (by name).
*/
#[link(name = "rusti",
vers = "0.7-pre",
uuid = "7fb5bf52-7d45-4fee-8325-5ad3311149fc",
url = "https://github.com/mozilla/rust/tree/master/src/rusti")];
#[license = "MIT/ASL2"];
#[crate_type = "lib"];
extern mod extra;
extern mod rustc;
extern mod syntax;
use std::{libc, io, os, task, vec};
use std::cell::Cell;
use extra::rl;
use rustc::driver::{driver, session};
use syntax::{ast, diagnostic};
use syntax::ast_util::*;
use syntax::parse::token;
use syntax::print::pprust;
use program::Program;
use utils::*;
mod program;
pub mod utils;
/**
* A structure shared across REPL instances for storing history
* such as statements and view items. I wish the AST was sendable.
*/
pub struct Repl {
prompt: ~str,
binary: ~str,
running: bool,
lib_search_paths: ~[~str],
program: Program,
}
// Action to do after reading a :command
enum CmdAction {
action_none,
action_run_line(~str),
}
/// Run an input string in a Repl, returning the new Repl.
fn run(mut repl: Repl, input: ~str) -> Repl {
// Build some necessary rustc boilerplate for compiling things
let binary = repl.binary.to_managed();
let options = @session::options {
crate_type: session::unknown_crate,
binary: binary,
addl_lib_search_paths: @mut repl.lib_search_paths.map(|p| Path(*p)),
jit: true,
.. copy *session::basic_options()
};
// Because we assume that everything is encodable (and assert so), add some
// extra helpful information if the error crops up. Otherwise people are
// bound to be very confused when they find out code is running that they
// never typed in...
let sess = driver::build_session(options, |cm, msg, lvl| {
diagnostic::emit(cm, msg, lvl);
if msg.contains("failed to find an implementation of trait") &&
msg.contains("extra::serialize::Encodable") {
diagnostic::emit(cm,
"Currrently rusti serializes bound locals between \
different lines of input. This means that all \
values of local variables need to be encodable, \
and this type isn't encodable",
diagnostic::note);
}
});
let intr = token::get_ident_interner();
//
// Stage 1: parse the input and filter it into the program (as necessary)
//
debug!("parsing: %s", input);
let crate = parse_input(sess, binary, input);
let mut to_run = ~[]; // statements to run (emitted back into code)
let new_locals = @mut ~[]; // new locals being defined
let mut result = None; // resultant expression (to print via pp)
do find_main(crate, sess) |blk| {
// Fish out all the view items, be sure to record 'extern mod' items
// differently beause they must appear before all 'use' statements
for blk.node.view_items.iter().advance |vi| {
let s = do with_pp(intr) |pp, _| {
pprust::print_view_item(pp, *vi);
};
match vi.node {
ast::view_item_extern_mod(*) => {
repl.program.record_extern(s);
}
ast::view_item_use(*) => { repl.program.record_view_item(s); }
}
}
// Iterate through all of the block's statements, inserting them into
// the correct portions of the program
for blk.node.stmts.iter().advance |stmt| {
let s = do with_pp(intr) |pp, _| { pprust::print_stmt(pp, *stmt); };
match stmt.node {
ast::stmt_decl(d, _) => {
match d.node {
ast::decl_item(it) => {
let name = sess.str_of(it.ident);
match it.node {
// Structs are treated specially because to make
// them at all usable they need to be decorated
// with #[deriving(Encoable, Decodable)]
ast::item_struct(*) => {
repl.program.record_struct(name, s);
}
// Item declarations are hoisted out of main()
_ => { repl.program.record_item(name, s); }
}
}
// Local declarations must be specially dealt with,
// record all local declarations for use later on
ast::decl_local(l) => {
let mutbl = l.node.is_mutbl;
do each_binding(l) |path, _| {
let s = do with_pp(intr) |pp, _| {
pprust::print_path(pp, path, false);
};
new_locals.push((s, mutbl));
}
to_run.push(s);
}
}
}
// run statements with expressions (they have effects)
ast::stmt_mac(*) | ast::stmt_semi(*) | ast::stmt_expr(*) => {
to_run.push(s);
}
}
}
result = do blk.node.expr.map_consume |e| {
do with_pp(intr) |pp, _| { pprust::print_expr(pp, e); }
};
}
// return fast for empty inputs
if to_run.len() == 0 && result.is_none() {
return repl;
}
//
// Stage 2: run everything up to typeck to learn the types of the new
// variables introduced into the program
//
info!("Learning about the new types in the program");
repl.program.set_cache(); // before register_new_vars (which changes them)
let input = to_run.connect("\n");
let test = repl.program.test_code(input, &result, *new_locals);
debug!("testing with ^^^^^^ %?", (||{ println(test) })());
let dinput = driver::str_input(test.to_managed());
let cfg = driver::build_configuration(sess, binary, &dinput);
let outputs = driver::build_output_filenames(&dinput, &None, &None, [], sess);
let (crate, tcx) = driver::compile_upto(sess, copy cfg, &dinput,
driver::cu_typeck, Some(outputs));
// Once we're typechecked, record the types of all local variables defined
// in this input
do find_main(crate.expect("crate after cu_typeck"), sess) |blk| {
repl.program.register_new_vars(blk, tcx.expect("tcx after cu_typeck"));
}
//
// Stage 3: Actually run the code in the JIT
//
info!("actually running code");
let code = repl.program.code(input, &result);
debug!("actually running ^^^^^^ %?", (||{ println(code) })());
let input = driver::str_input(code.to_managed());
let cfg = driver::build_configuration(sess, binary, &input);
let outputs = driver::build_output_filenames(&input, &None, &None, [], sess);
let sess = driver::build_session(options, diagnostic::emit);
driver::compile_upto(sess, cfg, &input, driver::cu_everything,
Some(outputs));
//
// Stage 4: Inform the program that computation is done so it can update all
// local variable bindings.
//
info!("cleaning up after code");
repl.program.consume_cache();
return repl;
fn parse_input(sess: session::Session, binary: @str,
input: &str) -> @ast::crate {
let code = fmt!("fn main() {\n %s \n}", input);
let input = driver::str_input(code.to_managed());
let cfg = driver::build_configuration(sess, binary, &input);
let outputs = driver::build_output_filenames(&input, &None, &None, [], sess);
let (crate, _) = driver::compile_upto(sess, cfg, &input,
driver::cu_parse, Some(outputs));
crate.expect("parsing should return a crate")
}
fn find_main(crate: @ast::crate, sess: session::Session,
f: &fn(&ast::blk)) {
for crate.node.module.items.iter().advance |item| {
match item.node {
ast::item_fn(_, _, _, _, ref blk) => {
if item.ident == sess.ident_of("main") {
return f(blk);
}
}
_ => {}
}
}
fail!("main function was expected somewhere...");
}
}
// Compiles a crate given by the filename as a library if the compiled
// version doesn't exist or is older than the source file. Binary is
// the name of the compiling executable. Returns Some(true) if it
// successfully compiled, Some(false) if the crate wasn't compiled
// because it already exists and is newer than the source file, or
// None if there were compile errors.
fn compile_crate(src_filename: ~str, binary: ~str) -> Option<bool> {
match do task::try {
let src_path = Path(src_filename);
let binary = binary.to_managed();
let options = @session::options {
binary: binary,
addl_lib_search_paths: @mut ~[os::getcwd()],
.. copy *session::basic_options()
};
let input = driver::file_input(copy src_path);
let sess = driver::build_session(options, diagnostic::emit);
*sess.building_library = true;
let cfg = driver::build_configuration(sess, binary, &input);
let outputs = driver::build_output_filenames(
&input, &None, &None, [], sess);
// If the library already exists and is newer than the source
// file, skip compilation and return None.
let mut should_compile = true;
let dir = os::list_dir_path(&Path(outputs.out_filename.dirname()));
let maybe_lib_path = do dir.iter().find_ |file| {
// The actual file's name has a hash value and version
// number in it which is unknown at this time, so looking
// for a file that matches out_filename won't work,
// instead we guess which file is the library by matching
// the prefix and suffix of out_filename to files in the
// directory.
let file_str = file.filename().get();
file_str.starts_with(outputs.out_filename.filestem().get())
&& file_str.ends_with(outputs.out_filename.filetype().get())
};
match maybe_lib_path {
Some(lib_path) => {
let (src_mtime, _) = src_path.get_mtime().get();
let (lib_mtime, _) = lib_path.get_mtime().get();
if lib_mtime >= src_mtime {
should_compile = false;
}
},
None => { },
}
if (should_compile) {
println(fmt!("compiling %s...", src_filename));
driver::compile_upto(sess, cfg, &input, driver::cu_everything,
Some(outputs));
true
} else { false }
} {
Ok(true) => Some(true),
Ok(false) => Some(false),
Err(_) => None,
}
}
/// Tries to get a line from rl after outputting a prompt. Returns
/// None if no input was read (e.g. EOF was reached).
fn get_line(use_rl: bool, prompt: &str) -> Option<~str> {
if use_rl {
let result = unsafe { rl::read(prompt) };
match result {
None => None,
Some(line) => {
unsafe { rl::add_history(line) };
Some(line)
}
}
} else {
if io::stdin().eof() {
None
} else {
Some(io::stdin().read_line())
}
}
}
/// Run a command, e.g. :clear, :exit, etc.
fn run_cmd(repl: &mut Repl, _in: @io::Reader, _out: @io::Writer,
cmd: ~str, args: ~[~str], use_rl: bool) -> CmdAction {
let mut action = action_none;
match cmd {
~"exit" => repl.running = false,
~"clear" => {
repl.program.clear();
// XXX: Win32 version of linenoise can't do this
//rl::clear();
}
~"help" => {
println(
":{\\n ..lines.. \\n:}\\n - execute multiline command\n\
:load <crate> ... - loads given crates as dynamic libraries\n\
:clear - clear the bindings\n\
:exit - exit from the repl\n\
:help - show this message");
}
~"load" => {
let mut loaded_crates: ~[~str] = ~[];
for args.iter().advance |arg| {
let (crate, filename) =
if arg.ends_with(".rs") || arg.ends_with(".rc") {
(arg.slice_to(arg.len() - 3).to_owned(), copy *arg)
} else {
(copy *arg, *arg + ".rs")
};
match compile_crate(filename, copy repl.binary) {
Some(_) => loaded_crates.push(crate),
None => { }
}
}
for loaded_crates.iter().advance |crate| {
let crate_path = Path(*crate);
let crate_dir = crate_path.dirname();
repl.program.record_extern(fmt!("extern mod %s;", *crate));
if !repl.lib_search_paths.iter().any_(|x| x == &crate_dir) {
repl.lib_search_paths.push(crate_dir);
}
}
if loaded_crates.is_empty() {
println("no crates loaded");
} else {
println(fmt!("crates loaded: %s",
loaded_crates.connect(", ")));
}
}
~"{" => {
let mut multiline_cmd = ~"";
let mut end_multiline = false;
while (!end_multiline) {
match get_line(use_rl, "rusti| ") {
None => fail!("unterminated multiline command :{ .. :}"),
Some(line) => {
if line.trim() == ":}" {
end_multiline = true;
} else {
multiline_cmd.push_str(line);
multiline_cmd.push_char('\n');
}
}
}
}
action = action_run_line(multiline_cmd);
}
_ => println(~"unknown cmd: " + cmd)
}
return action;
}
/// Executes a line of input, which may either be rust code or a
/// :command. Returns a new Repl if it has changed.
pub fn run_line(repl: &mut Repl, in: @io::Reader, out: @io::Writer, line: ~str,
use_rl: bool)
-> Option<Repl> {
if line.starts_with(":") {
// drop the : and the \n (one byte each)
let full = line.slice(1, line.len());
let split: ~[~str] = full.word_iter().transform(|s| s.to_owned()).collect();
let len = split.len();
if len > 0 {
let cmd = copy split[0];
if !cmd.is_empty() {
let args = if len > 1 {
vec::slice(split, 1, len).to_owned()
} else { ~[] };
match run_cmd(repl, in, out, cmd, args, use_rl) {
action_none => { }
action_run_line(multiline_cmd) => {
if !multiline_cmd.is_empty() {
return run_line(repl, in, out, multiline_cmd, use_rl);
}
}
}
return None;
}
}
}
let line = Cell::new(line);
let r = Cell::new(copy *repl);
let result = do task::try {
run(r.take(), line.take())
};
if result.is_ok() {
return Some(result.get());
}
return None;
}
pub fn main() {
let args = os::args();
let in = io::stdin();
let out = io::stdout();
let mut repl = Repl {
prompt: ~"rusti> ",
binary: copy args[0],
running: true,
lib_search_paths: ~[],
program: Program::new(),
};
let istty = unsafe { libc::isatty(libc::STDIN_FILENO as i32) } != 0;
// only print this stuff if the user is actually typing into rusti
if istty {
println("WARNING: The Rust REPL is experimental and may be");
println("unstable. If you encounter problems, please use the");
println("compiler instead. Type :help for help.");
unsafe {
do rl::complete |line, suggest| {
if line.starts_with(":") {
suggest(~":clear");
suggest(~":exit");
suggest(~":help");
suggest(~":load");
}
}
}
}
while repl.running {
match get_line(istty, repl.prompt) {
None => break,
Some(line) => {
if line.is_empty() {
if istty {
println("()");
}
loop;
}
match run_line(&mut repl, in, out, line, istty) {
Some(new_repl) => repl = new_repl,
None => { }
}
}
}
}
}
#[cfg(test)]
mod tests {
use std::io;
use std::iterator::IteratorUtil;
use program::Program;
use super::*;
fn repl() -> Repl {
Repl {
prompt: ~"rusti> ",
binary: ~"rusti",
running: true,
lib_search_paths: ~[],
program: Program::new(),
}
}
fn run_program(prog: &str) {
let mut r = repl();
for prog.split_iter('\n').advance |cmd| {
let result = run_line(&mut r, io::stdin(), io::stdout(),
cmd.to_owned(), false);
r = result.expect(fmt!("the command '%s' failed", cmd));
}
}
#[test]
// FIXME: #7220 rusti on 32bit mac doesn't work.
#[cfg(not(target_word_size="32",
target_os="macos"))]
fn run_all() {
// FIXME(#7071):
// By default, unit tests are run in parallel. Rusti, on the other hand,
// does not enjoy doing this. I suspect that it is because the LLVM
// bindings are not thread-safe (when running parallel tests, some tests
// were triggering assertions in LLVM (or segfaults). Hence, this
// function exists to run everything serially (sadface).
//
// To get some interesting output, run with RUST_LOG=rusti::tests
debug!("hopefully this runs");
run_program("");
debug!("regression test for #5937");
run_program("use std::hashmap;");
debug!("regression test for #5784");
run_program("let a = 3;");
// XXX: can't spawn new tasks because the JIT code is cleaned up
// after the main function is done.
// debug!("regression test for #5803");
// run_program("
// spawn( || println(\"Please don't segfault\") );
// do spawn { println(\"Please?\"); }
// ");
debug!("inferred integers are usable");
run_program("let a = 2;\n()\n");
run_program("
let a = 3;
let b = 4u;
assert!((a as uint) + b == 7)
");
debug!("local variables can be shadowed");
run_program("
let a = 3;
let a = 5;
assert!(a == 5)
");
debug!("strings are usable");
run_program("
let a = ~\"\";
let b = \"\";
let c = @\"\";
let d = a + b + c;
assert!(d.len() == 0);
");
debug!("vectors are usable");
run_program("
let a = ~[1, 2, 3];
let b = &[1, 2, 3];
let c = @[1, 2, 3];
let d = a + b + c;
assert!(d.len() == 9);
let e: &[int] = [];
");
debug!("structs are usable");
run_program("
struct A{ a: int }
let b = A{ a: 3 };
assert!(b.a == 3)
");
debug!("mutable variables");
run_program("
let mut a = 3;
a = 5;
let mut b = std::hashmap::HashSet::new::<int>();
b.insert(a);
assert!(b.contains(&5))
assert!(b.len() == 1)
");
debug!("functions are cached");
run_program("
fn fib(x: int) -> int { if x < 2 {x} else { fib(x - 1) + fib(x - 2) } }
let a = fib(3);
let a = a + fib(4);
assert!(a == 5)
");
debug!("modules are cached");
run_program("
mod b { pub fn foo() -> uint { 3 } }
assert!(b::foo() == 3)
");
debug!("multiple function definitions are allowed");
run_program("
fn f() {}
fn f() {}
f()
");
debug!("multiple item definitions are allowed");
run_program("
fn f() {}
mod f {}
struct f;
enum f {}
fn f() {}
f()
");
}
#[test]
// FIXME: #7220 rusti on 32bit mac doesn't work.
#[cfg(not(target_word_size="32",
target_os="macos"))]
fn exit_quits() {
let mut r = repl();
assert!(r.running);
let result = run_line(&mut r, io::stdin(), io::stdout(),
~":exit", false);
assert!(result.is_none());
assert!(!r.running);
}
}

View file

@ -402,7 +402,8 @@ fn run_cmd(repl: &mut Repl, _in: @io::Reader, _out: @io::Writer,
if line.trim() == ":}" {
end_multiline = true;
} else {
multiline_cmd += line + "\n";
multiline_cmd.push_str(line);
multiline_cmd.push_char('\n');
}
}
}

View file

@ -185,7 +185,21 @@ impl<'self> PkgScript<'self> {
}
impl Ctx {
pub trait CtxMethods {
fn run(&self, cmd: &str, args: ~[~str]);
fn do_cmd(&self, _cmd: &str, _pkgname: &str);
fn build(&self, workspace: &Path, pkgid: &PkgId);
fn clean(&self, workspace: &Path, id: &PkgId);
fn info(&self);
fn install(&self, workspace: &Path, id: &PkgId);
fn install_no_build(&self, workspace: &Path, id: &PkgId);
fn prefer(&self, _id: &str, _vers: Option<~str>);
fn test(&self);
fn uninstall(&self, _id: &str, _vers: Option<~str>);
fn unprefer(&self, _id: &str, _vers: Option<~str>);
}
impl CtxMethods for Ctx {
fn run(&self, cmd: &str, args: ~[~str]) {
match cmd {

View file

@ -248,7 +248,7 @@ fn command_line_test_output(args: &[~str]) -> ~[~str] {
let p_output = command_line_test(args, &os::getcwd());
let test_output = str::from_bytes(p_output.output);
for test_output.split_iter('\n').advance |s| {
result += [s.to_owned()];
result.push(s.to_owned());
}
result
}

View file

@ -10,6 +10,7 @@
//! Utilities for manipulating the char type
use container::Container;
use option::{None, Option, Some};
use str;
use str::{StrSlice, OwnedStr};

View file

@ -22,7 +22,7 @@ by convention implementing the `Clone` trait and calling the
*/
use core::kinds::Const;
use core::kinds::Freeze;
/// A common trait for cloning an object.
pub trait Clone {
@ -112,17 +112,17 @@ impl<T: DeepClone> DeepClone for ~T {
fn deep_clone(&self) -> ~T { ~(**self).deep_clone() }
}
// FIXME: #6525: should also be implemented for `T: Owned + DeepClone`
impl<T: Const + DeepClone> DeepClone for @T {
/// Return a deep copy of the managed box. The `Const` trait is required to prevent performing
// FIXME: #6525: should also be implemented for `T: Send + DeepClone`
impl<T: Freeze + DeepClone> DeepClone for @T {
/// Return a deep copy of the managed box. The `Freeze` trait is required to prevent performing
/// a deep clone of a potentially cyclical type.
#[inline]
fn deep_clone(&self) -> @T { @(**self).deep_clone() }
}
// FIXME: #6525: should also be implemented for `T: Owned + DeepClone`
impl<T: Const + DeepClone> DeepClone for @mut T {
/// Return a deep copy of the managed box. The `Const` trait is required to prevent performing
// FIXME: #6525: should also be implemented for `T: Send + DeepClone`
impl<T: Freeze + DeepClone> DeepClone for @mut T {
/// Return a deep copy of the managed box. The `Freeze` trait is required to prevent performing
/// a deep clone of a potentially cyclical type.
#[inline]
fn deep_clone(&self) -> @mut T { @mut (**self).deep_clone() }

View file

@ -17,7 +17,7 @@ Message passing
use cast::{transmute, transmute_mut};
use container::Container;
use either::{Either, Left, Right};
use kinds::Owned;
use kinds::Send;
use option::{Option, Some, None};
use uint;
use vec::OwnedVector;
@ -77,7 +77,7 @@ pub struct Port<T> {
These allow sending or receiving an unlimited number of messages.
*/
pub fn stream<T:Owned>() -> (Port<T>, Chan<T>) {
pub fn stream<T:Send>() -> (Port<T>, Chan<T>) {
let (port, chan) = match rt::context() {
rt::OldTaskContext => match pipesy::stream() {
(p, c) => (Left(p), Left(c))
@ -91,7 +91,7 @@ pub fn stream<T:Owned>() -> (Port<T>, Chan<T>) {
return (port, chan);
}
impl<T: Owned> GenericChan<T> for Chan<T> {
impl<T: Send> GenericChan<T> for Chan<T> {
fn send(&self, x: T) {
match self.inner {
Left(ref chan) => chan.send(x),
@ -100,7 +100,7 @@ impl<T: Owned> GenericChan<T> for Chan<T> {
}
}
impl<T: Owned> GenericSmartChan<T> for Chan<T> {
impl<T: Send> GenericSmartChan<T> for Chan<T> {
fn try_send(&self, x: T) -> bool {
match self.inner {
Left(ref chan) => chan.try_send(x),
@ -109,7 +109,7 @@ impl<T: Owned> GenericSmartChan<T> for Chan<T> {
}
}
impl<T: Owned> GenericPort<T> for Port<T> {
impl<T: Send> GenericPort<T> for Port<T> {
fn recv(&self) -> T {
match self.inner {
Left(ref port) => port.recv(),
@ -125,7 +125,7 @@ impl<T: Owned> GenericPort<T> for Port<T> {
}
}
impl<T: Owned> Peekable<T> for Port<T> {
impl<T: Send> Peekable<T> for Port<T> {
fn peek(&self) -> bool {
match self.inner {
Left(ref port) => port.peek(),
@ -134,7 +134,7 @@ impl<T: Owned> Peekable<T> for Port<T> {
}
}
impl<T: Owned> Selectable for Port<T> {
impl<T: Send> Selectable for Port<T> {
fn header(&mut self) -> *mut PacketHeader {
match self.inner {
Left(ref mut port) => port.header(),
@ -149,7 +149,7 @@ pub struct PortSet<T> {
ports: ~[pipesy::Port<T>],
}
impl<T: Owned> PortSet<T> {
impl<T: Send> PortSet<T> {
pub fn new() -> PortSet<T> {
PortSet {
ports: ~[]
@ -175,7 +175,7 @@ impl<T: Owned> PortSet<T> {
}
}
impl<T:Owned> GenericPort<T> for PortSet<T> {
impl<T:Send> GenericPort<T> for PortSet<T> {
fn try_recv(&self) -> Option<T> {
unsafe {
let self_ports = transmute_mut(&self.ports);
@ -204,7 +204,7 @@ impl<T:Owned> GenericPort<T> for PortSet<T> {
}
}
impl<T: Owned> Peekable<T> for PortSet<T> {
impl<T: Send> Peekable<T> for PortSet<T> {
fn peek(&self) -> bool {
// It'd be nice to use self.port.each, but that version isn't
// pure.
@ -223,7 +223,7 @@ pub struct SharedChan<T> {
ch: Exclusive<pipesy::Chan<T>>
}
impl<T: Owned> SharedChan<T> {
impl<T: Send> SharedChan<T> {
/// Converts a `chan` into a `shared_chan`.
pub fn new(c: Chan<T>) -> SharedChan<T> {
let Chan { inner } = c;
@ -235,7 +235,7 @@ impl<T: Owned> SharedChan<T> {
}
}
impl<T: Owned> GenericChan<T> for SharedChan<T> {
impl<T: Send> GenericChan<T> for SharedChan<T> {
fn send(&self, x: T) {
unsafe {
let mut xx = Some(x);
@ -247,7 +247,7 @@ impl<T: Owned> GenericChan<T> for SharedChan<T> {
}
}
impl<T: Owned> GenericSmartChan<T> for SharedChan<T> {
impl<T: Send> GenericSmartChan<T> for SharedChan<T> {
fn try_send(&self, x: T) -> bool {
unsafe {
let mut xx = Some(x);
@ -259,7 +259,7 @@ impl<T: Owned> GenericSmartChan<T> for SharedChan<T> {
}
}
impl<T: Owned> ::clone::Clone for SharedChan<T> {
impl<T: Send> ::clone::Clone for SharedChan<T> {
fn clone(&self) -> SharedChan<T> {
SharedChan { ch: self.ch.clone() }
}
@ -273,7 +273,7 @@ pub struct ChanOne<T> {
inner: Either<pipesy::ChanOne<T>, rtcomm::ChanOne<T>>
}
pub fn oneshot<T: Owned>() -> (PortOne<T>, ChanOne<T>) {
pub fn oneshot<T: Send>() -> (PortOne<T>, ChanOne<T>) {
let (port, chan) = match rt::context() {
rt::OldTaskContext => match pipesy::oneshot() {
(p, c) => (Left(p), Left(c)),
@ -287,7 +287,7 @@ pub fn oneshot<T: Owned>() -> (PortOne<T>, ChanOne<T>) {
return (port, chan);
}
impl<T: Owned> PortOne<T> {
impl<T: Send> PortOne<T> {
pub fn recv(self) -> T {
let PortOne { inner } = self;
match inner {
@ -305,7 +305,7 @@ impl<T: Owned> PortOne<T> {
}
}
impl<T: Owned> ChanOne<T> {
impl<T: Send> ChanOne<T> {
pub fn send(self, data: T) {
let ChanOne { inner } = self;
match inner {
@ -323,7 +323,7 @@ impl<T: Owned> ChanOne<T> {
}
}
pub fn recv_one<T: Owned>(port: PortOne<T>) -> T {
pub fn recv_one<T: Send>(port: PortOne<T>) -> T {
let PortOne { inner } = port;
match inner {
Left(p) => pipesy::recv_one(p),
@ -331,7 +331,7 @@ pub fn recv_one<T: Owned>(port: PortOne<T>) -> T {
}
}
pub fn try_recv_one<T: Owned>(port: PortOne<T>) -> Option<T> {
pub fn try_recv_one<T: Send>(port: PortOne<T>) -> Option<T> {
let PortOne { inner } = port;
match inner {
Left(p) => pipesy::try_recv_one(p),
@ -339,7 +339,7 @@ pub fn try_recv_one<T: Owned>(port: PortOne<T>) -> Option<T> {
}
}
pub fn send_one<T: Owned>(chan: ChanOne<T>, data: T) {
pub fn send_one<T: Send>(chan: ChanOne<T>, data: T) {
let ChanOne { inner } = chan;
match inner {
Left(c) => pipesy::send_one(c, data),
@ -347,7 +347,7 @@ pub fn send_one<T: Owned>(chan: ChanOne<T>, data: T) {
}
}
pub fn try_send_one<T: Owned>(chan: ChanOne<T>, data: T) -> bool {
pub fn try_send_one<T: Send>(chan: ChanOne<T>, data: T) -> bool {
let ChanOne { inner } = chan;
match inner {
Left(c) => pipesy::try_send_one(c, data),
@ -357,7 +357,7 @@ pub fn try_send_one<T: Owned>(chan: ChanOne<T>, data: T) -> bool {
mod pipesy {
use kinds::Owned;
use kinds::Send;
use option::{Option, Some, None};
use pipes::{recv, try_recv, peek, PacketHeader};
use super::{GenericChan, GenericSmartChan, GenericPort, Peekable, Selectable};
@ -365,17 +365,17 @@ mod pipesy {
use util::replace;
/*proto! oneshot (
Oneshot:send<T:Owned> {
Oneshot:send<T:Send> {
send(T) -> !
}
)*/
#[allow(non_camel_case_types)]
pub mod oneshot {
priv use core::kinds::Owned;
priv use core::kinds::Send;
use ptr::to_mut_unsafe_ptr;
pub fn init<T: Owned>() -> (server::Oneshot<T>, client::Oneshot<T>) {
pub fn init<T: Send>() -> (server::Oneshot<T>, client::Oneshot<T>) {
pub use core::pipes::HasBuffer;
let buffer = ~::core::pipes::Buffer {
@ -399,10 +399,10 @@ mod pipesy {
#[allow(non_camel_case_types)]
pub mod client {
priv use core::kinds::Owned;
priv use core::kinds::Send;
#[allow(non_camel_case_types)]
pub fn try_send<T: Owned>(pipe: Oneshot<T>, x_0: T) ->
pub fn try_send<T: Send>(pipe: Oneshot<T>, x_0: T) ->
::core::option::Option<()> {
{
use super::send;
@ -414,7 +414,7 @@ mod pipesy {
}
#[allow(non_camel_case_types)]
pub fn send<T: Owned>(pipe: Oneshot<T>, x_0: T) {
pub fn send<T: Send>(pipe: Oneshot<T>, x_0: T) {
{
use super::send;
let message = send(x_0);
@ -464,12 +464,12 @@ mod pipesy {
}
/// Initialiase a (send-endpoint, recv-endpoint) oneshot pipe pair.
pub fn oneshot<T: Owned>() -> (PortOne<T>, ChanOne<T>) {
pub fn oneshot<T: Send>() -> (PortOne<T>, ChanOne<T>) {
let (port, chan) = oneshot::init();
(PortOne::new(port), ChanOne::new(chan))
}
impl<T: Owned> PortOne<T> {
impl<T: Send> PortOne<T> {
pub fn recv(self) -> T { recv_one(self) }
pub fn try_recv(self) -> Option<T> { try_recv_one(self) }
pub fn unwrap(self) -> oneshot::server::Oneshot<T> {
@ -479,7 +479,7 @@ mod pipesy {
}
}
impl<T: Owned> ChanOne<T> {
impl<T: Send> ChanOne<T> {
pub fn send(self, data: T) { send_one(self, data) }
pub fn try_send(self, data: T) -> bool { try_send_one(self, data) }
pub fn unwrap(self) -> oneshot::client::Oneshot<T> {
@ -493,7 +493,7 @@ mod pipesy {
* Receive a message from a oneshot pipe, failing if the connection was
* closed.
*/
pub fn recv_one<T: Owned>(port: PortOne<T>) -> T {
pub fn recv_one<T: Send>(port: PortOne<T>) -> T {
match port {
PortOne { contents: port } => {
let oneshot::send(message) = recv(port);
@ -503,7 +503,7 @@ mod pipesy {
}
/// Receive a message from a oneshot pipe unless the connection was closed.
pub fn try_recv_one<T: Owned> (port: PortOne<T>) -> Option<T> {
pub fn try_recv_one<T: Send> (port: PortOne<T>) -> Option<T> {
match port {
PortOne { contents: port } => {
let message = try_recv(port);
@ -519,7 +519,7 @@ mod pipesy {
}
/// Send a message on a oneshot pipe, failing if the connection was closed.
pub fn send_one<T: Owned>(chan: ChanOne<T>, data: T) {
pub fn send_one<T: Send>(chan: ChanOne<T>, data: T) {
match chan {
ChanOne { contents: chan } => oneshot::client::send(chan, data),
}
@ -529,7 +529,7 @@ mod pipesy {
* Send a message on a oneshot pipe, or return false if the connection was
* closed.
*/
pub fn try_send_one<T: Owned>(chan: ChanOne<T>, data: T) -> bool {
pub fn try_send_one<T: Send>(chan: ChanOne<T>, data: T) -> bool {
match chan {
ChanOne { contents: chan } => {
oneshot::client::try_send(chan, data).is_some()
@ -540,16 +540,16 @@ mod pipesy {
// Streams - Make pipes a little easier in general.
/*proto! streamp (
Open:send<T: Owned> {
Open:send<T: Send> {
data(T) -> Open<T>
}
)*/
#[allow(non_camel_case_types)]
pub mod streamp {
priv use core::kinds::Owned;
priv use core::kinds::Send;
pub fn init<T: Owned>() -> (server::Open<T>, client::Open<T>) {
pub fn init<T: Send>() -> (server::Open<T>, client::Open<T>) {
pub use core::pipes::HasBuffer;
::core::pipes::entangle()
}
@ -559,10 +559,10 @@ mod pipesy {
#[allow(non_camel_case_types)]
pub mod client {
priv use core::kinds::Owned;
priv use core::kinds::Send;
#[allow(non_camel_case_types)]
pub fn try_data<T: Owned>(pipe: Open<T>, x_0: T) ->
pub fn try_data<T: Send>(pipe: Open<T>, x_0: T) ->
::core::option::Option<Open<T>> {
{
use super::data;
@ -575,7 +575,7 @@ mod pipesy {
}
#[allow(non_camel_case_types)]
pub fn data<T: Owned>(pipe: Open<T>, x_0: T) -> Open<T> {
pub fn data<T: Send>(pipe: Open<T>, x_0: T) -> Open<T> {
{
use super::data;
let (s, c) = ::core::pipes::entangle();
@ -613,7 +613,7 @@ mod pipesy {
These allow sending or receiving an unlimited number of messages.
*/
pub fn stream<T:Owned>() -> (Port<T>, Chan<T>) {
pub fn stream<T:Send>() -> (Port<T>, Chan<T>) {
let (s, c) = streamp::init();
(Port {
@ -623,7 +623,7 @@ mod pipesy {
})
}
impl<T: Owned> GenericChan<T> for Chan<T> {
impl<T: Send> GenericChan<T> for Chan<T> {
#[inline]
fn send(&self, x: T) {
unsafe {
@ -634,7 +634,7 @@ mod pipesy {
}
}
impl<T: Owned> GenericSmartChan<T> for Chan<T> {
impl<T: Send> GenericSmartChan<T> for Chan<T> {
#[inline]
fn try_send(&self, x: T) -> bool {
unsafe {
@ -651,7 +651,7 @@ mod pipesy {
}
}
impl<T: Owned> GenericPort<T> for Port<T> {
impl<T: Send> GenericPort<T> for Port<T> {
#[inline]
fn recv(&self) -> T {
unsafe {
@ -679,7 +679,7 @@ mod pipesy {
}
}
impl<T: Owned> Peekable<T> for Port<T> {
impl<T: Send> Peekable<T> for Port<T> {
#[inline]
fn peek(&self) -> bool {
unsafe {
@ -695,7 +695,7 @@ mod pipesy {
}
}
impl<T: Owned> Selectable for Port<T> {
impl<T: Send> Selectable for Port<T> {
fn header(&mut self) -> *mut PacketHeader {
match self.endp {
Some(ref mut endp) => endp.header(),
@ -723,15 +723,15 @@ pub fn select2i<A:Selectable, B:Selectable>(a: &mut A, b: &mut B)
}
/// Receive a message from one of two endpoints.
pub trait Select2<T: Owned, U: Owned> {
pub trait Select2<T: Send, U: Send> {
/// Receive a message or return `None` if a connection closes.
fn try_select(&mut self) -> Either<Option<T>, Option<U>>;
/// Receive a message or fail if a connection closes.
fn select(&mut self) -> Either<T, U>;
}
impl<T:Owned,
U:Owned,
impl<T:Send,
U:Send,
Left:Selectable + GenericPort<T>,
Right:Selectable + GenericPort<U>>
Select2<T, U>

View file

@ -24,6 +24,7 @@
use container::Container;
use iterator::IteratorUtil;
use rt::io::Writer;
use str::OwnedStr;
use to_bytes::IterBytes;
use uint;
use vec::ImmutableVector;
@ -369,7 +370,7 @@ impl Streaming for SipState {
let r = self.result_bytes();
let mut s = ~"";
for r.iter().advance |b| {
s += uint::to_str_radix(*b as uint, 16u);
s.push_str(uint::to_str_radix(*b as uint, 16u));
}
s
}
@ -471,7 +472,7 @@ mod tests {
fn to_hex_str(r: &[u8, ..8]) -> ~str {
let mut s = ~"";
for r.iter().advance |b| {
s += uint::to_str_radix(*b as uint, 16u);
s.push_str(uint::to_str_radix(*b as uint, 16u));
}
s
}
@ -492,7 +493,7 @@ mod tests {
assert!(f == i && f == v);
buf += [t as u8];
buf.push(t as u8);
stream_inc.input([t as u8]);
t += 1;

View file

@ -771,7 +771,9 @@ impl<T:Reader> ReaderUtil for T {
fn read_le_uint_n(&self, nbytes: uint) -> u64 {
assert!(nbytes > 0 && nbytes <= 8);
let mut (val, pos, i) = (0u64, 0, nbytes);
let mut val = 0u64;
let mut pos = 0;
let mut i = nbytes;
while i > 0 {
val += (self.read_u8() as u64) << pos;
pos += 8;
@ -787,7 +789,8 @@ impl<T:Reader> ReaderUtil for T {
fn read_be_uint_n(&self, nbytes: uint) -> u64 {
assert!(nbytes > 0 && nbytes <= 8);
let mut (val, i) = (0u64, nbytes);
let mut val = 0u64;
let mut i = nbytes;
while i > 0 {
i -= 1;
val += (self.read_u8() as u64) << i * 8;

View file

@ -24,11 +24,10 @@ The 4 kinds are
scalar types and managed pointers, and exludes owned pointers. It
also excludes types that implement `Drop`.
* Owned - owned types and types containing owned types. These types
* Send - owned types and types containing owned types. These types
may be transferred across task boundaries.
* Const - types that are deeply immutable. Const types are used for
freezable data structures.
* Freeze - types that are deeply immutable.
`Copy` types include both implicitly copyable types that the compiler
will copy automatically and non-implicitly copyable types that require
@ -44,14 +43,28 @@ pub trait Copy {
// Empty.
}
#[cfg(stage0)]
#[lang="owned"]
pub trait Owned {
// Empty.
pub trait Send {
// empty.
}
#[cfg(not(stage0))]
#[lang="send"]
pub trait Send {
// empty.
}
#[cfg(stage0)]
#[lang="const"]
pub trait Const {
// Empty.
pub trait Freeze {
// empty.
}
#[cfg(not(stage0))]
#[lang="freeze"]
pub trait Freeze {
// empty.
}
#[lang="sized"]

View file

@ -400,7 +400,8 @@ impl Integer for $T {
#[inline]
fn gcd(&self, other: &$T) -> $T {
// Use Euclid's algorithm
let mut (m, n) = (*self, *other);
let mut m = *self;
let mut n = *other;
while m != 0 {
let temp = m;
m = n % temp;

View file

@ -412,7 +412,7 @@ pub fn pow_with_uint<T:NumCast+One+Zero+Copy+Div<T,T>+Mul<T,T>>(radix: uint, pow
if my_pow % 2u == 1u {
total = total * multiplier;
}
my_pow = my_pow / 2u;
my_pow = my_pow / 2u;
multiplier = multiplier * multiplier;
}
total

View file

@ -237,7 +237,8 @@ impl Integer for $T {
#[inline]
fn gcd(&self, other: &$T) -> $T {
// Use Euclid's algorithm
let mut (m, n) = (*self, *other);
let mut m = *self;
let mut n = *other;
while m != 0 {
let temp = m;
m = n % temp;

View file

@ -29,6 +29,7 @@
#[allow(missing_doc)];
use cast;
use container::Container;
use io;
use iterator::IteratorUtil;
use libc;
@ -145,7 +146,7 @@ pub mod win32 {
pub fn as_utf16_p<T>(s: &str, f: &fn(*u16) -> T) -> T {
let mut t = s.to_utf16();
// Null terminate before passing on.
t += [0u16];
t.push(0u16);
vec::as_imm_buf(t, |buf, _len| f(buf))
}
}
@ -1500,7 +1501,10 @@ mod tests {
fn test_getenv_big() {
let mut s = ~"";
let mut i = 0;
while i < 100 { s += "aaaaaaaaaa"; i += 1; }
while i < 100 {
s = s + "aaaaaaaaaa";
i += 1;
}
let n = make_rand_name();
setenv(n, s);
debug!(copy s);

View file

@ -21,8 +21,8 @@ use cmp::Eq;
use iterator::IteratorUtil;
use libc;
use option::{None, Option, Some};
use str::{OwnedStr, Str, StrSlice, StrVector};
use str;
use str::{Str, StrSlice, StrVector};
use to_str::ToStr;
use ascii::{AsciiCast, AsciiStr};
use vec::{OwnedVector, ImmutableVector};
@ -335,8 +335,8 @@ mod stat {
}
}
impl Path {
#[cfg(target_os = "win32")]
impl WindowsPath {
pub fn stat(&self) -> Option<libc::stat> {
unsafe {
do str::as_c_str(self.to_str()) |buf| {
@ -349,12 +349,35 @@ impl Path {
}
}
#[cfg(unix)]
pub fn lstat(&self) -> Option<libc::stat> {
pub fn exists(&self) -> bool {
match self.stat() {
None => false,
Some(_) => true,
}
}
pub fn get_size(&self) -> Option<i64> {
match self.stat() {
None => None,
Some(ref st) => Some(st.st_size as i64),
}
}
pub fn get_mode(&self) -> Option<uint> {
match self.stat() {
None => None,
Some(ref st) => Some(st.st_mode as uint),
}
}
}
#[cfg(not(target_os = "win32"))]
impl PosixPath {
pub fn stat(&self) -> Option<libc::stat> {
unsafe {
do str::as_c_str(self.to_str()) |buf| {
do str::as_c_str(self.to_str()) |buf| {
let mut st = stat::arch::default_stat();
match libc::lstat(buf, &mut st) {
match libc::stat(buf, &mut st) {
0 => Some(st),
_ => None,
}
@ -396,7 +419,7 @@ impl Path {
#[cfg(target_os = "freebsd")]
#[cfg(target_os = "linux")]
#[cfg(target_os = "macos")]
impl Path {
impl PosixPath {
pub fn get_atime(&self) -> Option<(i64, int)> {
match self.stat() {
None => None,
@ -428,9 +451,24 @@ impl Path {
}
}
#[cfg(unix)]
impl PosixPath {
pub fn lstat(&self) -> Option<libc::stat> {
unsafe {
do str::as_c_str(self.to_str()) |buf| {
let mut st = stat::arch::default_stat();
match libc::lstat(buf, &mut st) {
0 => Some(st),
_ => None,
}
}
}
}
}
#[cfg(target_os = "freebsd")]
#[cfg(target_os = "macos")]
impl Path {
impl PosixPath {
pub fn get_birthtime(&self) -> Option<(i64, int)> {
match self.stat() {
None => None,
@ -443,7 +481,7 @@ impl Path {
}
#[cfg(target_os = "win32")]
impl Path {
impl WindowsPath {
pub fn get_atime(&self) -> Option<(i64, int)> {
match self.stat() {
None => None,
@ -470,13 +508,21 @@ impl Path {
}
}
}
/// Execute a function on p as well as all of its ancestors
pub fn each_parent(&self, f: &fn(&Path)) {
if !self.components.is_empty() {
f(self);
self.pop().each_parent(f);
}
}
}
impl ToStr for PosixPath {
fn to_str(&self) -> ~str {
let mut s = ~"";
if self.is_absolute {
s += "/";
s.push_str("/");
}
s + self.components.connect("/")
}
@ -655,15 +701,21 @@ impl ToStr for WindowsPath {
fn to_str(&self) -> ~str {
let mut s = ~"";
match self.host {
Some(ref h) => { s += "\\\\"; s += *h; }
Some(ref h) => {
s.push_str("\\\\");
s.push_str(*h);
}
None => { }
}
match self.device {
Some(ref d) => { s += *d; s += ":"; }
Some(ref d) => {
s.push_str(*d);
s.push_str(":");
}
None => { }
}
if self.is_absolute {
s += "\\";
s.push_str("\\");
}
s + self.components.connect("\\")
}

View file

@ -88,7 +88,7 @@ use container::Container;
use cast::{forget, transmute, transmute_copy, transmute_mut};
use either::{Either, Left, Right};
use iterator::IteratorUtil;
use kinds::Owned;
use kinds::Send;
use libc;
use ops::Drop;
use option::{None, Option, Some};
@ -177,7 +177,7 @@ impl PacketHeader {
transmute_copy(&self.buffer)
}
pub fn set_buffer<T:Owned>(&mut self, b: ~Buffer<T>) {
pub fn set_buffer<T:Send>(&mut self, b: ~Buffer<T>) {
unsafe {
self.buffer = transmute_copy(&b);
}
@ -193,13 +193,13 @@ pub trait HasBuffer {
fn set_buffer(&mut self, b: *libc::c_void);
}
impl<T:Owned> HasBuffer for Packet<T> {
impl<T:Send> HasBuffer for Packet<T> {
fn set_buffer(&mut self, b: *libc::c_void) {
self.header.buffer = b;
}
}
pub fn mk_packet<T:Owned>() -> Packet<T> {
pub fn mk_packet<T:Send>() -> Packet<T> {
Packet {
header: PacketHeader(),
payload: None,
@ -230,7 +230,7 @@ pub fn packet<T>() -> *mut Packet<T> {
p
}
pub fn entangle_buffer<T:Owned,Tstart:Owned>(
pub fn entangle_buffer<T:Send,Tstart:Send>(
mut buffer: ~Buffer<T>,
init: &fn(*libc::c_void, x: &mut T) -> *mut Packet<Tstart>)
-> (RecvPacketBuffered<Tstart, T>, SendPacketBuffered<Tstart, T>) {
@ -396,7 +396,7 @@ pub fn send<T,Tbuffer>(mut p: SendPacketBuffered<T,Tbuffer>,
Fails if the sender closes the connection.
*/
pub fn recv<T:Owned,Tbuffer:Owned>(
pub fn recv<T:Send,Tbuffer:Send>(
p: RecvPacketBuffered<T, Tbuffer>) -> T {
try_recv(p).expect("connection closed")
}
@ -407,7 +407,7 @@ Returns `None` if the sender has closed the connection without sending
a message, or `Some(T)` if a message was received.
*/
pub fn try_recv<T:Owned,Tbuffer:Owned>(mut p: RecvPacketBuffered<T, Tbuffer>)
pub fn try_recv<T:Send,Tbuffer:Send>(mut p: RecvPacketBuffered<T, Tbuffer>)
-> Option<T> {
let p_ = p.unwrap();
let p = unsafe { &mut *p_ };
@ -427,7 +427,7 @@ pub fn try_recv<T:Owned,Tbuffer:Owned>(mut p: RecvPacketBuffered<T, Tbuffer>)
}
}
fn try_recv_<T:Owned>(p: &mut Packet<T>) -> Option<T> {
fn try_recv_<T:Send>(p: &mut Packet<T>) -> Option<T> {
// optimistic path
match p.header.state {
Full => {
@ -511,7 +511,7 @@ fn try_recv_<T:Owned>(p: &mut Packet<T>) -> Option<T> {
}
/// Returns true if messages are available.
pub fn peek<T:Owned,Tb:Owned>(p: &mut RecvPacketBuffered<T, Tb>) -> bool {
pub fn peek<T:Send,Tb:Send>(p: &mut RecvPacketBuffered<T, Tb>) -> bool {
unsafe {
match (*p.header()).state {
Empty | Terminated => false,
@ -521,7 +521,7 @@ pub fn peek<T:Owned,Tb:Owned>(p: &mut RecvPacketBuffered<T, Tb>) -> bool {
}
}
fn sender_terminate<T:Owned>(p: *mut Packet<T>) {
fn sender_terminate<T:Send>(p: *mut Packet<T>) {
let p = unsafe {
&mut *p
};
@ -553,7 +553,7 @@ fn sender_terminate<T:Owned>(p: *mut Packet<T>) {
}
}
fn receiver_terminate<T:Owned>(p: *mut Packet<T>) {
fn receiver_terminate<T:Send>(p: *mut Packet<T>) {
let p = unsafe {
&mut *p
};
@ -671,7 +671,7 @@ pub struct SendPacketBuffered<T, Tbuffer> {
}
#[unsafe_destructor]
impl<T:Owned,Tbuffer:Owned> Drop for SendPacketBuffered<T,Tbuffer> {
impl<T:Send,Tbuffer:Send> Drop for SendPacketBuffered<T,Tbuffer> {
fn drop(&self) {
unsafe {
let this: &mut SendPacketBuffered<T,Tbuffer> = transmute(self);
@ -729,7 +729,7 @@ pub struct RecvPacketBuffered<T, Tbuffer> {
}
#[unsafe_destructor]
impl<T:Owned,Tbuffer:Owned> Drop for RecvPacketBuffered<T,Tbuffer> {
impl<T:Send,Tbuffer:Send> Drop for RecvPacketBuffered<T,Tbuffer> {
fn drop(&self) {
unsafe {
let this: &mut RecvPacketBuffered<T,Tbuffer> = transmute(self);
@ -741,7 +741,7 @@ impl<T:Owned,Tbuffer:Owned> Drop for RecvPacketBuffered<T,Tbuffer> {
}
}
impl<T:Owned,Tbuffer:Owned> RecvPacketBuffered<T, Tbuffer> {
impl<T:Send,Tbuffer:Send> RecvPacketBuffered<T, Tbuffer> {
pub fn unwrap(&mut self) -> *mut Packet<T> {
replace(&mut self.p, None).unwrap()
}
@ -751,7 +751,7 @@ impl<T:Owned,Tbuffer:Owned> RecvPacketBuffered<T, Tbuffer> {
}
}
impl<T:Owned,Tbuffer:Owned> Selectable for RecvPacketBuffered<T, Tbuffer> {
impl<T:Send,Tbuffer:Send> Selectable for RecvPacketBuffered<T, Tbuffer> {
fn header(&mut self) -> *mut PacketHeader {
match self.p {
Some(packet) => unsafe {
@ -807,7 +807,7 @@ Sometimes messages will be available on both endpoints at once. In
this case, `select2` may return either `left` or `right`.
*/
pub fn select2<A:Owned,Ab:Owned,B:Owned,Bb:Owned>(
pub fn select2<A:Send,Ab:Send,B:Send,Bb:Send>(
mut a: RecvPacketBuffered<A, Ab>,
mut b: RecvPacketBuffered<B, Bb>)
-> Either<(Option<A>, RecvPacketBuffered<B, Bb>),
@ -847,7 +847,7 @@ pub fn select2i<A:Selectable,B:Selectable>(a: &mut A, b: &mut B)
/// Waits on a set of endpoints. Returns a message, its index, and a
/// list of the remaining endpoints.
pub fn select<T:Owned,Tb:Owned>(mut endpoints: ~[RecvPacketBuffered<T, Tb>])
pub fn select<T:Send,Tb:Send>(mut endpoints: ~[RecvPacketBuffered<T, Tb>])
-> (uint,
Option<T>,
~[RecvPacketBuffered<T, Tb>]) {

View file

@ -29,7 +29,8 @@ Rust's prelude has three main parts:
// Reexported core operators
pub use either::{Either, Left, Right};
pub use kinds::{Const, Copy, Owned, Sized};
pub use kinds::{Copy, Sized};
pub use kinds::{Freeze, Send};
pub use ops::{Add, Sub, Mul, Div, Rem, Neg, Not};
pub use ops::{BitAnd, BitOr, BitXor};
pub use ops::{Drop};

View file

@ -42,6 +42,7 @@ fn main () {
use cast;
use cmp;
use container::Container;
use int;
use iterator::IteratorUtil;
use local_data;
@ -720,7 +721,8 @@ impl IsaacRng {
fn isaac(&mut self) {
self.c += 1;
// abbreviations
let mut (a, b) = (self.a, self.b + self.c);
let mut a = self.a;
let mut b = self.b + self.c;
static midpoint: uint = RAND_SIZE as uint / 2;

View file

@ -89,7 +89,8 @@ impl Rand for StandardNormal {
// do-while, so the condition should be true on the first
// run, they get overwritten anyway (0 < 1, so these are
// good).
let mut (x, y) = (1.0, 0.0);
let mut x = 1.0;
let mut y = 0.0;
// XXX infinities?
while -2.0*y < x * x {

View file

@ -19,7 +19,7 @@ use option::*;
use cast;
use util;
use ops::Drop;
use kinds::Owned;
use kinds::Send;
use rt::sched::{Scheduler, Coroutine};
use rt::local::Local;
use unstable::intrinsics::{atomic_xchg, atomic_load};
@ -68,7 +68,7 @@ pub struct PortOneHack<T> {
suppress_finalize: bool
}
pub fn oneshot<T: Owned>() -> (PortOne<T>, ChanOne<T>) {
pub fn oneshot<T: Send>() -> (PortOne<T>, ChanOne<T>) {
let packet: ~Packet<T> = ~Packet {
state: STATE_BOTH,
payload: None
@ -307,20 +307,20 @@ pub struct Port<T> {
next: Cell<PortOne<StreamPayload<T>>>
}
pub fn stream<T: Owned>() -> (Port<T>, Chan<T>) {
pub fn stream<T: Send>() -> (Port<T>, Chan<T>) {
let (pone, cone) = oneshot();
let port = Port { next: Cell::new(pone) };
let chan = Chan { next: Cell::new(cone) };
return (port, chan);
}
impl<T: Owned> GenericChan<T> for Chan<T> {
impl<T: Send> GenericChan<T> for Chan<T> {
fn send(&self, val: T) {
self.try_send(val);
}
}
impl<T: Owned> GenericSmartChan<T> for Chan<T> {
impl<T: Send> GenericSmartChan<T> for Chan<T> {
fn try_send(&self, val: T) -> bool {
let (next_pone, next_cone) = oneshot();
let cone = self.next.take();

View file

@ -343,7 +343,9 @@ impl<T: Reader> ReaderByteConversions for T {
fn read_le_uint_n(&mut self, nbytes: uint) -> u64 {
assert!(nbytes > 0 && nbytes <= 8);
let mut (val, pos, i) = (0u64, 0, nbytes);
let mut val = 0u64;
let mut pos = 0;
let mut i = nbytes;
while i > 0 {
val += (self.read_u8() as u64) << pos;
pos += 8;
@ -359,7 +361,8 @@ impl<T: Reader> ReaderByteConversions for T {
fn read_be_uint_n(&mut self, nbytes: uint) -> u64 {
assert!(nbytes > 0 && nbytes <= 8);
let mut (val, i) = (0u64, nbytes);
let mut val = 0u64;
let mut i = nbytes;
while i > 0 {
i -= 1;
val += (self.read_u8() as u64) << i * 8;

View file

@ -9,7 +9,7 @@
// except according to those terms.
use container::Container;
use kinds::Owned;
use kinds::Send;
use vec::OwnedVector;
use cell::Cell;
use option::*;
@ -21,7 +21,7 @@ pub struct MessageQueue<T> {
priv queue: ~Exclusive<~[T]>
}
impl<T: Owned> MessageQueue<T> {
impl<T: Send> MessageQueue<T> {
pub fn new() -> MessageQueue<T> {
MessageQueue {
queue: ~exclusive(~[])

View file

@ -13,7 +13,7 @@ use option::*;
use vec::OwnedVector;
use unstable::sync::{Exclusive, exclusive};
use cell::Cell;
use kinds::Owned;
use kinds::Send;
use clone::Clone;
pub struct WorkQueue<T> {
@ -21,7 +21,7 @@ pub struct WorkQueue<T> {
priv queue: ~Exclusive<~[T]>
}
impl<T: Owned> WorkQueue<T> {
impl<T: Send> WorkQueue<T> {
pub fn new() -> WorkQueue<T> {
WorkQueue {
queue: ~exclusive(~[])

View file

@ -357,7 +357,8 @@ impl<'self> Iterator<(uint, uint)> for StrMatchesIndexIterator<'self> {
fn next(&mut self) -> Option<(uint, uint)> {
// See Issue #1932 for why this is a naive search
let (h_len, n_len) = (self.haystack.len(), self.needle.len());
let mut (match_start, match_i) = (0, 0);
let mut match_start = 0;
let mut match_i = 0;
while self.position < h_len {
if self.haystack[self.position] == self.needle[match_i] {
@ -473,6 +474,31 @@ pub fn each_split_within<'a>(ss: &'a str,
return cont;
}
/**
* Replace all occurrences of one string with another
*
* # Arguments
*
* * s - The string containing substrings to replace
* * from - The string to replace
* * to - The replacement string
*
* # Return value
*
* The original string with all occurances of `from` replaced with `to`
*/
pub fn replace(s: &str, from: &str, to: &str) -> ~str {
let mut result = ~"";
let mut last_end = 0;
for s.matches_index_iter(from).advance |(start, end)| {
result.push_str(unsafe{raw::slice_bytes(s, last_end, start)});
result.push_str(to);
last_end = end;
}
result.push_str(unsafe{raw::slice_bytes(s, last_end, s.len())});
result
}
/*
Section: Comparing strings
*/
@ -631,6 +657,48 @@ pub fn with_capacity(capacity: uint) -> ~str {
buf
}
/**
* As char_len but for a slice of a string
*
* # Arguments
*
* * s - A valid string
* * start - The position inside `s` where to start counting in bytes
* * end - The position where to stop counting
*
* # Return value
*
* The number of Unicode characters in `s` between the given indices.
*/
pub fn count_chars(s: &str, start: uint, end: uint) -> uint {
assert!(s.is_char_boundary(start));
assert!(s.is_char_boundary(end));
let mut i = start;
let mut len = 0u;
while i < end {
let next = s.char_range_at(i).next;
len += 1u;
i = next;
}
return len;
}
/// Counts the number of bytes taken by the first `n` chars in `s`
/// starting from `start`.
pub fn count_bytes<'b>(s: &'b str, start: uint, n: uint) -> uint {
assert!(s.is_char_boundary(start));
let mut end = start;
let mut cnt = n;
let l = s.len();
while cnt > 0u {
assert!(end < l);
let next = s.char_range_at(end).next;
cnt -= 1u;
end = next;
}
end - start
}
/// Given a first byte, determine how many bytes are in this UTF-8 character
pub fn utf8_char_width(b: u8) -> uint {
let byte: uint = b as uint;
@ -737,7 +805,8 @@ pub mod raw {
/// Create a Rust string from a null-terminated *u8 buffer
pub unsafe fn from_buf(buf: *u8) -> ~str {
let mut (curr, i) = (buf, 0u);
let mut curr = buf;
let mut i = 0u;
while *curr != 0u8 {
i += 1u;
curr = ptr::offset(buf, i);
@ -790,7 +859,8 @@ pub mod raw {
/// invalidated later.
pub unsafe fn c_str_to_static_slice(s: *libc::c_char) -> &'static str {
let s = s as *u8;
let mut (curr, len) = (s, 0u);
let mut curr = s;
let mut len = 0u;
while *curr != 0u8 {
len += 1u;
curr = ptr::offset(s, len);
@ -1070,6 +1140,17 @@ impl<'self> Str for @str {
}
}
impl<'self> Container for &'self str {
#[inline]
fn len(&self) -> uint {
do as_buf(*self) |_p, n| { n - 1u }
}
#[inline]
fn is_empty(&self) -> bool {
self.len() == 0
}
}
#[allow(missing_doc)]
pub trait StrSlice<'self> {
fn contains<'a>(&self, needle: &'a str) -> bool;
@ -1088,10 +1169,8 @@ pub trait StrSlice<'self> {
fn any_line_iter(&self) -> AnyLineIterator<'self>;
fn word_iter(&self) -> WordIterator<'self>;
fn ends_with(&self, needle: &str) -> bool;
fn is_empty(&self) -> bool;
fn is_whitespace(&self) -> bool;
fn is_alphanumeric(&self) -> bool;
fn len(&self) -> uint;
fn char_len(&self) -> uint;
fn slice(&self, begin: uint, end: uint) -> &'self str;
@ -1292,9 +1371,6 @@ impl<'self> StrSlice<'self> for &'self str {
self.split_iter(char::is_whitespace).filter(|s| !s.is_empty())
}
/// Returns true if the string has length 0
#[inline]
fn is_empty(&self) -> bool { self.len() == 0 }
/**
* Returns true if the string contains only whitespace
*
@ -1309,11 +1385,6 @@ impl<'self> StrSlice<'self> for &'self str {
*/
#[inline]
fn is_alphanumeric(&self) -> bool { self.iter().all(char::is_alphanumeric) }
/// Returns the size in bytes not counting the null terminator
#[inline]
fn len(&self) -> uint {
do as_buf(*self) |_p, n| { n - 1u }
}
/// Returns the number of characters that a string holds
#[inline]
fn char_len(&self) -> uint { self.iter().len_() }
@ -1357,7 +1428,8 @@ impl<'self> StrSlice<'self> for &'self str {
fn slice_chars(&self, begin: uint, end: uint) -> &'self str {
assert!(begin <= end);
// not sure how to use the iterators for this nicely.
let mut (position, count) = (0, 0);
let mut position = 0;
let mut count = 0;
let l = self.len();
while count < begin && position < l {
position = self.char_range_at(position).next;
@ -1505,7 +1577,8 @@ impl<'self> StrSlice<'self> for &'self str {
* The original string with all occurances of `from` replaced with `to`
*/
pub fn replace(&self, from: &str, to: &str) -> ~str {
let mut (result, last_end) = (~"", 0);
let mut result = ~"";
let mut last_end = 0;
for self.matches_index_iter(from).advance |(start, end)| {
result.push_str(unsafe{raw::slice_bytes(*self, last_end, start)});
result.push_str(to);

Some files were not shown because too many files have changed in this diff Show more