auto merge of #11974 : huonw/rust/no-at-vec, r=pcwalton

This removes @[] from the parser as well as much of the handling of it (and `@str`) from the compiler as I can find.

I've just rebased @pcwalton's (already reviewed) `@str` removal (and fixed the problems in a separate commit); the only new work is the trailing commits with my authorship.

Closes #11967
This commit is contained in:
bors 2014-02-01 11:16:24 -08:00
commit 2bcd951749
162 changed files with 2081 additions and 2642 deletions

View file

@ -3079,7 +3079,7 @@ A value of type `str` is a Unicode string,
represented as a vector of 8-bit unsigned bytes holding a sequence of UTF-8 codepoints.
Since `str` is of unknown size, it is not a _first class_ type,
but can only be instantiated through a pointer type,
such as `&str`, `@str` or `~str`.
such as `&str` or `~str`.
### Tuple types
@ -3115,7 +3115,7 @@ Such a definite-sized vector type is a first-class type, since its size is known
A vector without such a size is said to be of _indefinite_ size,
and is therefore not a _first-class_ type.
An indefinite-size vector can only be instantiated through a pointer type,
such as `&[T]`, `@[T]` or `~[T]`.
such as `&[T]` or `~[T]`.
The kind of a vector type depends on the kind of its element type,
as with other simple structural types.

View file

@ -27,7 +27,6 @@ extern mod extra;
use extra::list::{List, Cons, Nil};
use extra::list;
use std::at_vec;
use std::cast::{transmute, transmute_mut, transmute_mut_region};
use std::cast;
use std::cell::{Cell, RefCell};
@ -35,20 +34,31 @@ use std::num;
use std::ptr;
use std::kinds::marker;
use std::mem;
use std::rc::Rc;
use std::rt::global_heap;
use std::unstable::intrinsics::{TyDesc, get_tydesc};
use std::unstable::intrinsics;
use std::util;
use std::vec;
// The way arena uses arrays is really deeply awful. The arrays are
// allocated, and have capacities reserved, but the fill for the array
// will always stay at 0.
#[deriving(Clone)]
struct Chunk {
data: RefCell<@[u8]>,
data: Rc<RefCell<~[u8]>>,
fill: Cell<uint>,
is_pod: Cell<bool>,
}
impl Chunk {
fn capacity(&self) -> uint {
self.data.borrow().borrow().get().capacity()
}
unsafe fn as_ptr(&self) -> *u8 {
self.data.borrow().borrow().get().as_ptr()
}
}
// Arenas are used to quickly allocate objects that share a
// lifetime. The arena uses ~[u8] vectors as a backing store to
@ -97,10 +107,8 @@ impl Arena {
}
fn chunk(size: uint, is_pod: bool) -> Chunk {
let mut v: @[u8] = @[];
unsafe { at_vec::raw::reserve(&mut v, size); }
Chunk {
data: RefCell::new(unsafe { cast::transmute(v) }),
data: Rc::new(RefCell::new(vec::with_capacity(size))),
fill: Cell::new(0u),
is_pod: Cell::new(is_pod),
}
@ -131,10 +139,7 @@ fn round_up(base: uint, align: uint) -> uint {
// in it.
unsafe fn destroy_chunk(chunk: &Chunk) {
let mut idx = 0;
let buf = {
let data = chunk.data.borrow();
data.get().as_ptr()
};
let buf = chunk.as_ptr();
let fill = chunk.fill.get();
while idx < fill {
@ -172,11 +177,13 @@ unsafe fn un_bitpack_tydesc_ptr(p: uint) -> (*TyDesc, bool) {
}
impl Arena {
fn chunk_size(&self) -> uint {
self.pod_head.capacity()
}
// Functions for the POD part of the arena
fn alloc_pod_grow(&mut self, n_bytes: uint, align: uint) -> *u8 {
// Allocate a new chunk.
let chunk_size = at_vec::capacity(self.pod_head.data.get());
let new_min_chunk_size = num::max(n_bytes, chunk_size);
let new_min_chunk_size = num::max(n_bytes, self.chunk_size());
self.chunks.set(@Cons(self.pod_head.clone(), self.chunks.get()));
self.pod_head =
chunk(num::next_power_of_two(new_min_chunk_size + 1u), true);
@ -190,7 +197,7 @@ impl Arena {
let this = transmute_mut_region(self);
let start = round_up(this.pod_head.fill.get(), align);
let end = start + n_bytes;
if end > at_vec::capacity(this.pod_head.data.get()) {
if end > self.chunk_size() {
return this.alloc_pod_grow(n_bytes, align);
}
this.pod_head.fill.set(end);
@ -198,7 +205,7 @@ impl Arena {
//debug!("idx = {}, size = {}, align = {}, fill = {}",
// start, n_bytes, align, head.fill.get());
ptr::offset(this.pod_head.data.get().as_ptr(), start as int)
this.pod_head.as_ptr().offset(start as int)
}
}
@ -217,8 +224,7 @@ impl Arena {
fn alloc_nonpod_grow(&mut self, n_bytes: uint, align: uint)
-> (*u8, *u8) {
// Allocate a new chunk.
let chunk_size = at_vec::capacity(self.head.data.get());
let new_min_chunk_size = num::max(n_bytes, chunk_size);
let new_min_chunk_size = num::max(n_bytes, self.chunk_size());
self.chunks.set(@Cons(self.head.clone(), self.chunks.get()));
self.head =
chunk(num::next_power_of_two(new_min_chunk_size + 1u), false);
@ -244,7 +250,7 @@ impl Arena {
end = start + n_bytes;
}
if end > at_vec::capacity(self.head.data.get()) {
if end > self.head.capacity() {
return self.alloc_nonpod_grow(n_bytes, align);
}
@ -254,7 +260,7 @@ impl Arena {
//debug!("idx = {}, size = {}, align = {}, fill = {}",
// start, n_bytes, align, head.fill);
let buf = self.head.data.get().as_ptr();
let buf = self.head.as_ptr();
return (ptr::offset(buf, tydesc_start as int), ptr::offset(buf, start as int));
}
}
@ -606,5 +612,3 @@ mod test {
})
}
}

View file

@ -80,7 +80,7 @@ pub mod BigDigit {
/**
A big unsigned integer type.
A `BigUint`-typed value `BigUint { data: @[a, b, c] }` represents a number
A `BigUint`-typed value `BigUint { data: ~[a, b, c] }` represents a number
`(a + b * BigDigit::base + c * BigDigit::base^2)`.
*/
#[deriving(Clone)]

View file

@ -18,7 +18,6 @@ Core encoding and decoding interfaces.
#[forbid(non_camel_case_types)];
use std::at_vec;
use std::hashmap::{HashMap, HashSet};
use std::rc::Rc;
use std::trie::{TrieMap, TrieSet};
@ -310,18 +309,6 @@ impl<D:Decoder> Decodable<D> for ~str {
}
}
impl<S:Encoder> Encodable<S> for @str {
fn encode(&self, s: &mut S) {
s.emit_str(*self)
}
}
impl<D:Decoder> Decodable<D> for @str {
fn decode(d: &mut D) -> @str {
d.read_str().to_managed()
}
}
impl<S:Encoder> Encodable<S> for f32 {
fn encode(&self, s: &mut S) {
s.emit_f32(*self)
@ -456,26 +443,6 @@ impl<D:Decoder,T:Decodable<D>> Decodable<D> for ~[T] {
}
}
impl<S:Encoder,T:Encodable<S>> Encodable<S> for @[T] {
fn encode(&self, s: &mut S) {
s.emit_seq(self.len(), |s| {
for (i, e) in self.iter().enumerate() {
s.emit_seq_elt(i, |s| e.encode(s))
}
})
}
}
impl<D:Decoder,T:Decodable<D>> Decodable<D> for @[T] {
fn decode(d: &mut D) -> @[T] {
d.read_seq(|d, len| {
at_vec::from_fn(len, |i| {
d.read_seq_elt(i, |d| Decodable::decode(d))
})
})
}
}
impl<S:Encoder,T:Encodable<S>> Encodable<S> for Option<T> {
fn encode(&self, s: &mut S) {
s.emit_option(|s| {

View file

@ -473,10 +473,10 @@ pub fn build_link_meta(sess: Session,
symbol_hasher: &mut Sha256)
-> LinkMeta {
// This calculates CMH as defined above
fn crate_hash(symbol_hasher: &mut Sha256, crateid: &CrateId) -> @str {
fn crate_hash(symbol_hasher: &mut Sha256, crateid: &CrateId) -> ~str {
symbol_hasher.reset();
symbol_hasher.input_str(crateid.to_str());
truncated_hash_result(symbol_hasher).to_managed()
truncated_hash_result(symbol_hasher)
}
let crateid = match attr::find_crateid(attrs) {
@ -510,7 +510,8 @@ fn truncated_hash_result(symbol_hasher: &mut Sha256) -> ~str {
pub fn symbol_hash(tcx: ty::ctxt,
symbol_hasher: &mut Sha256,
t: ty::t,
link_meta: &LinkMeta) -> @str {
link_meta: &LinkMeta)
-> ~str {
// NB: do *not* use abbrevs here as we want the symbol names
// to be independent of one another in the crate.
@ -523,15 +524,14 @@ pub fn symbol_hash(tcx: ty::ctxt,
let mut hash = truncated_hash_result(symbol_hasher);
// Prefix with 'h' so that it never blends into adjacent digits
hash.unshift_char('h');
// tjc: allocation is unfortunate; need to change std::hash
hash.to_managed()
hash
}
pub fn get_symbol_hash(ccx: &CrateContext, t: ty::t) -> @str {
pub fn get_symbol_hash(ccx: &CrateContext, t: ty::t) -> ~str {
{
let type_hashcodes = ccx.type_hashcodes.borrow();
match type_hashcodes.get().find(&t) {
Some(&h) => return h,
Some(h) => return h.to_str(),
None => {}
}
}
@ -539,7 +539,7 @@ pub fn get_symbol_hash(ccx: &CrateContext, t: ty::t) -> @str {
let mut type_hashcodes = ccx.type_hashcodes.borrow_mut();
let mut symbol_hasher = ccx.symbol_hasher.borrow_mut();
let hash = symbol_hash(ccx.tcx, symbol_hasher.get(), t, &ccx.link_meta);
type_hashcodes.get().insert(t, hash);
type_hashcodes.get().insert(t, hash.clone());
hash
}
@ -963,7 +963,7 @@ fn link_staticlib(sess: Session, obj_filename: &Path, out_filename: &Path) {
let crates = sess.cstore.get_used_crates(cstore::RequireStatic);
for &(cnum, ref path) in crates.iter() {
let name = sess.cstore.get_crate_data(cnum).name;
let name = sess.cstore.get_crate_data(cnum).name.clone();
let p = match *path {
Some(ref p) => p.clone(), None => {
sess.err(format!("could not find rlib for: `{}`", name));
@ -1221,7 +1221,7 @@ fn add_upstream_rust_crates(args: &mut ~[~str], sess: Session,
// If we're not doing LTO, then our job is simply to just link
// against the archive.
if sess.lto() {
let name = sess.cstore.get_crate_data(cnum).name;
let name = sess.cstore.get_crate_data(cnum).name.clone();
time(sess.time_passes(), format!("altering {}.rlib", name),
(), |()| {
let dst = tmpdir.join(cratepath.filename().unwrap());

View file

@ -42,7 +42,7 @@ pub fn run(sess: session::Session, llmod: ModuleRef,
// module that we've got.
let crates = sess.cstore.get_used_crates(cstore::RequireStatic);
for (cnum, path) in crates.move_iter() {
let name = sess.cstore.get_crate_data(cnum).name;
let name = sess.cstore.get_crate_data(cnum).name.clone();
let path = match path {
Some(p) => p,
None => {

View file

@ -44,6 +44,7 @@ use syntax::codemap;
use syntax::diagnostic;
use syntax::ext::base::CrateLoader;
use syntax::parse;
use syntax::parse::token::InternedString;
use syntax::parse::token;
use syntax::print::{pp, pprust};
use syntax;
@ -60,12 +61,14 @@ pub enum PpMode {
* The name used for source code that doesn't originate in a file
* (e.g. source from stdin or a string)
*/
pub fn anon_src() -> @str { @"<anon>" }
pub fn anon_src() -> ~str {
"<anon>".to_str()
}
pub fn source_name(input: &Input) -> @str {
pub fn source_name(input: &Input) -> ~str {
match *input {
// FIXME (#9639): This needs to handle non-utf8 paths
FileInput(ref ifile) => ifile.as_str().unwrap().to_managed(),
FileInput(ref ifile) => ifile.as_str().unwrap().to_str(),
StrInput(_) => anon_src()
}
}
@ -73,39 +76,41 @@ pub fn source_name(input: &Input) -> @str {
pub fn default_configuration(sess: Session) ->
ast::CrateConfig {
let tos = match sess.targ_cfg.os {
abi::OsWin32 => @"win32",
abi::OsMacos => @"macos",
abi::OsLinux => @"linux",
abi::OsAndroid => @"android",
abi::OsFreebsd => @"freebsd"
abi::OsWin32 => InternedString::new("win32"),
abi::OsMacos => InternedString::new("macos"),
abi::OsLinux => InternedString::new("linux"),
abi::OsAndroid => InternedString::new("android"),
abi::OsFreebsd => InternedString::new("freebsd"),
};
// ARM is bi-endian, however using NDK seems to default
// to little-endian unless a flag is provided.
let (end,arch,wordsz) = match sess.targ_cfg.arch {
abi::X86 => (@"little", @"x86", @"32"),
abi::X86_64 => (@"little", @"x86_64", @"64"),
abi::Arm => (@"little", @"arm", @"32"),
abi::Mips => (@"big", @"mips", @"32")
abi::X86 => ("little", "x86", "32"),
abi::X86_64 => ("little", "x86_64", "64"),
abi::Arm => ("little", "arm", "32"),
abi::Mips => ("big", "mips", "32")
};
let fam = match sess.targ_cfg.os {
abi::OsWin32 => @"windows",
_ => @"unix"
abi::OsWin32 => InternedString::new("windows"),
_ => InternedString::new("unix")
};
let mk = attr::mk_name_value_item_str;
return ~[ // Target bindings.
attr::mk_word_item(fam),
mk(@"target_os", tos),
mk(@"target_family", fam),
mk(@"target_arch", arch),
mk(@"target_endian", end),
mk(@"target_word_size", wordsz),
attr::mk_word_item(fam.clone()),
mk(InternedString::new("target_os"), tos),
mk(InternedString::new("target_family"), fam),
mk(InternedString::new("target_arch"), InternedString::new(arch)),
mk(InternedString::new("target_endian"), InternedString::new(end)),
mk(InternedString::new("target_word_size"),
InternedString::new(wordsz)),
];
}
pub fn append_configuration(cfg: &mut ast::CrateConfig, name: @str) {
pub fn append_configuration(cfg: &mut ast::CrateConfig,
name: InternedString) {
if !cfg.iter().any(|mi| mi.name() == name) {
cfg.push(attr::mk_word_item(name))
}
@ -118,9 +123,15 @@ pub fn build_configuration(sess: Session) ->
let default_cfg = default_configuration(sess);
let mut user_cfg = sess.opts.cfg.clone();
// If the user wants a test runner, then add the test cfg
if sess.opts.test { append_configuration(&mut user_cfg, @"test") }
if sess.opts.test {
append_configuration(&mut user_cfg, InternedString::new("test"))
}
// If the user requested GC, then add the GC cfg
append_configuration(&mut user_cfg, if sess.opts.gc { @"gc" } else { @"nogc" });
append_configuration(&mut user_cfg, if sess.opts.gc {
InternedString::new("gc")
} else {
InternedString::new("nogc")
});
return vec::append(user_cfg, default_cfg);
}
@ -129,7 +140,7 @@ fn parse_cfgspecs(cfgspecs: ~[~str], demitter: @diagnostic::Emitter)
-> ast::CrateConfig {
cfgspecs.move_iter().map(|s| {
let sess = parse::new_parse_sess(Some(demitter));
parse::parse_meta_from_source_str(@"cfgspec", s.to_managed(), ~[], sess)
parse::parse_meta_from_source_str("cfgspec".to_str(), s, ~[], sess)
}).collect::<ast::CrateConfig>()
}
@ -137,8 +148,7 @@ pub enum Input {
/// Load source from file
FileInput(Path),
/// The string is the source
// FIXME (#2319): Don't really want to box the source string
StrInput(@str)
StrInput(~str)
}
pub fn phase_1_parse_input(sess: Session, cfg: ast::CrateConfig, input: &Input)
@ -148,9 +158,11 @@ pub fn phase_1_parse_input(sess: Session, cfg: ast::CrateConfig, input: &Input)
FileInput(ref file) => {
parse::parse_crate_from_file(&(*file), cfg.clone(), sess.parse_sess)
}
StrInput(src) => {
parse::parse_crate_from_source_str(
anon_src(), src, cfg.clone(), sess.parse_sess)
StrInput(ref src) => {
parse::parse_crate_from_source_str(anon_src(),
(*src).clone(),
cfg.clone(),
sess.parse_sess)
}
}
})
@ -474,13 +486,13 @@ fn write_out_deps(sess: Session, input: &Input, outputs: &OutputFilenames, crate
// Build a list of files used to compile the output and
// write Makefile-compatible dependency rules
let files: ~[@str] = {
let files: ~[~str] = {
let files = sess.codemap.files.borrow();
files.get()
.iter()
.filter_map(|fmap| {
if fmap.is_real_file() {
Some(fmap.name)
Some(fmap.name.clone())
} else {
None
}
@ -615,7 +627,7 @@ pub fn pretty_print_input(sess: Session,
_ => @pprust::NoAnn as @pprust::PpAnn,
};
let src = sess.codemap.get_filemap(source_name(input)).src;
let src = &sess.codemap.get_filemap(source_name(input)).src;
let mut rdr = MemReader::new(src.as_bytes().to_owned());
let stdout = io::stdout();
pprust::print_crate(sess.codemap,
@ -1100,17 +1112,17 @@ pub fn build_output_filenames(input: &Input,
let mut stem = match *input {
// FIXME (#9639): This needs to handle non-utf8 paths
FileInput(ref ifile) => (*ifile).filestem_str().unwrap().to_managed(),
StrInput(_) => @"rust_out"
FileInput(ref ifile) => {
(*ifile).filestem_str().unwrap().to_str()
}
StrInput(_) => ~"rust_out"
};
// If a crateid is present, we use it as the link name
let crateid = attr::find_crateid(attrs);
match crateid {
None => {}
Some(crateid) => {
stem = crateid.name.to_managed()
}
Some(crateid) => stem = crateid.name.to_str(),
}
if sess.building_library.get() {
@ -1201,7 +1213,7 @@ mod test {
let sessopts = build_session_options(~"rustc", matches, @diagnostic::DefaultEmitter);
let sess = build_session(sessopts, None, @diagnostic::DefaultEmitter);
let cfg = build_configuration(sess);
let mut test_items = cfg.iter().filter(|m| "test" == m.name());
let mut test_items = cfg.iter().filter(|m| m.name().equiv(&("test")));
assert!(test_items.next().is_some());
assert!(test_items.next().is_none());
}

View file

@ -352,9 +352,11 @@ impl Session_ {
self.debugging_opt(NO_LANDING_PADS)
}
// pointless function, now...
pub fn str_of(&self, id: ast::Ident) -> @str {
token::ident_to_str(&id)
// DEPRECATED. This function results in a lot of allocations when they
// are not necessary.
pub fn str_of(&self, id: ast::Ident) -> ~str {
let string = token::get_ident(id.name);
string.get().to_str()
}
// pointless function, now...
@ -417,7 +419,12 @@ pub fn building_library(options: &Options, crate: &ast::Crate) -> bool {
}
}
match syntax::attr::first_attr_value_str_by_name(crate.attrs, "crate_type") {
Some(s) => "lib" == s || "rlib" == s || "dylib" == s || "staticlib" == s,
Some(s) => {
s.equiv(&("lib")) ||
s.equiv(&("rlib")) ||
s.equiv(&("dylib")) ||
s.equiv(&("staticlib"))
}
_ => false
}
}
@ -435,16 +442,22 @@ pub fn collect_outputs(session: &Session,
}
let mut base = session.opts.outputs.clone();
let mut iter = attrs.iter().filter_map(|a| {
if "crate_type" == a.name() {
if a.name().equiv(&("crate_type")) {
match a.value_str() {
Some(n) if "rlib" == n => Some(OutputRlib),
Some(n) if "dylib" == n => Some(OutputDylib),
Some(n) if "lib" == n => Some(default_lib_output()),
Some(n) if "staticlib" == n => Some(OutputStaticlib),
Some(n) if "bin" == n => Some(OutputExecutable),
Some(ref n) if n.equiv(&("rlib")) => Some(OutputRlib),
Some(ref n) if n.equiv(&("dylib")) => Some(OutputDylib),
Some(ref n) if n.equiv(&("lib")) => {
Some(default_lib_output())
}
Some(ref n) if n.equiv(&("staticlib")) => {
Some(OutputStaticlib)
}
Some(ref n) if n.equiv(&("bin")) => Some(OutputExecutable),
Some(_) => {
session.add_lint(lint::UnknownCrateType, ast::CRATE_NODE_ID,
a.span, ~"invalid `crate_type` value");
session.add_lint(lint::UnknownCrateType,
ast::CRATE_NODE_ID,
a.span,
~"invalid `crate_type` value");
None
}
_ => {

View file

@ -98,7 +98,8 @@ impl Context {
impl Visitor<()> for Context {
fn visit_ident(&mut self, sp: Span, id: ast::Ident, _: ()) {
let s = token::ident_to_str(&id);
let string = token::get_ident(id.name);
let s = string.get();
if !s.is_ascii() {
self.gate_feature("non_ascii_idents", sp,
@ -122,7 +123,7 @@ impl Visitor<()> for Context {
}
ast::ViewItemExternMod(..) => {
for attr in i.attrs.iter() {
if "phase" == attr.name() {
if attr.name().get() == "phase"{
self.gate_feature("phase", attr.span,
"compile time crate loading is \
experimental and possibly buggy");
@ -135,7 +136,7 @@ impl Visitor<()> for Context {
fn visit_item(&mut self, i: &ast::Item, _:()) {
for attr in i.attrs.iter() {
if "thread_local" == attr.name() {
if attr.name().equiv(&("thread_local")) {
self.gate_feature("thread_local", i.span,
"`#[thread_local]` is an experimental feature, and does not \
currently handle destructors. There is no corresponding \
@ -227,8 +228,7 @@ impl Visitor<()> for Context {
fn visit_expr(&mut self, e: &ast::Expr, _: ()) {
match e.node {
ast::ExprUnary(_, ast::UnBox, _) |
ast::ExprVstore(_, ast::ExprVstoreBox) => {
ast::ExprUnary(_, ast::UnBox, _) => {
self.gate_box(e.span);
}
_ => {}
@ -258,7 +258,9 @@ pub fn check_crate(sess: Session, crate: &ast::Crate) {
};
for attr in crate.attrs.iter() {
if "feature" != attr.name() { continue }
if !attr.name().equiv(&("feature")) {
continue
}
match attr.meta_item_list() {
None => {
@ -268,14 +270,16 @@ pub fn check_crate(sess: Session, crate: &ast::Crate) {
Some(list) => {
for &mi in list.iter() {
let name = match mi.node {
ast::MetaWord(word) => word,
ast::MetaWord(ref word) => (*word).clone(),
_ => {
sess.span_err(mi.span, "malformed feature, expected \
just one word");
sess.span_err(mi.span,
"malformed feature, expected just \
one word");
continue
}
};
match KNOWN_FEATURES.iter().find(|& &(n, _)| n == name) {
match KNOWN_FEATURES.iter()
.find(|& &(n, _)| name.equiv(&n)) {
Some(&(name, Active)) => { cx.features.push(name); }
Some(&(_, Removed)) => {
sess.span_err(mi.span, "feature has been removed");

View file

@ -19,6 +19,8 @@ use syntax::codemap;
use syntax::fold::Folder;
use syntax::fold;
use syntax::opt_vec;
use syntax::parse::token::InternedString;
use syntax::parse::token;
use syntax::util::small_vector::SmallVector;
pub static VERSION: &'static str = "0.10-pre";
@ -56,11 +58,13 @@ struct StandardLibraryInjector {
sess: Session,
}
pub fn with_version(crate: &str) -> Option<(@str, ast::StrStyle)> {
pub fn with_version(crate: &str) -> Option<(InternedString, ast::StrStyle)> {
match option_env!("CFG_DISABLE_INJECT_STD_VERSION") {
Some("1") => None,
_ => {
Some((format!("{}\\#{}", crate, VERSION).to_managed(),
Some((token::intern_and_get_ident(format!("{}\\#{}",
crate,
VERSION)),
ast::CookedStr))
}
}
@ -73,9 +77,12 @@ impl fold::Folder for StandardLibraryInjector {
with_version("std"),
ast::DUMMY_NODE_ID),
attrs: ~[
attr::mk_attr(attr::mk_list_item(@"phase",
~[attr::mk_word_item(@"syntax"),
attr::mk_word_item(@"link")]))
attr::mk_attr(attr::mk_list_item(
InternedString::new("phase"),
~[
attr::mk_word_item(InternedString::new("syntax")),
attr::mk_word_item(InternedString::new("link")
)]))
],
vis: ast::Inherited,
span: DUMMY_SP

View file

@ -27,6 +27,8 @@ use syntax::ext::base::ExtCtxt;
use syntax::fold::Folder;
use syntax::fold;
use syntax::opt_vec;
use syntax::parse::token::InternedString;
use syntax::parse::token;
use syntax::print::pprust;
use syntax::{ast, ast_util};
use syntax::util::small_vector::SmallVector;
@ -132,7 +134,7 @@ impl<'a> fold::Folder for TestHarnessGenerator<'a> {
if !cx.sess.building_library.get() {
@ast::Item {
attrs: item.attrs.iter().filter_map(|attr| {
if "main" != attr.name() {
if !attr.name().equiv(&("main")) {
Some(*attr)
} else {
None
@ -169,7 +171,7 @@ fn generate_test_harness(sess: session::Session, crate: ast::Crate)
cx.ext_cx.bt_push(ExpnInfo {
call_site: DUMMY_SP,
callee: NameAndSpan {
name: @"test",
name: ~"test",
format: MacroAttribute,
span: None
}
@ -248,7 +250,7 @@ fn is_bench_fn(i: @ast::Item) -> bool {
fn is_ignored(cx: &TestCtxt, i: @ast::Item) -> bool {
i.attrs.iter().any(|attr| {
// check ignore(cfg(foo, bar))
"ignore" == attr.name() && match attr.meta_item_list() {
attr.name().equiv(&("ignore")) && match attr.meta_item_list() {
Some(ref cfgs) => attr::test_cfg(cx.config, cfgs.iter().map(|x| *x)),
None => true
}
@ -330,8 +332,9 @@ fn mk_test_module(cx: &TestCtxt) -> @ast::Item {
let item_ = ast::ItemMod(testmod);
// This attribute tells resolve to let us call unexported functions
let resolve_unexported_str = InternedString::new("!resolve_unexported");
let resolve_unexported_attr =
attr::mk_attr(attr::mk_word_item(@"!resolve_unexported"));
attr::mk_attr(attr::mk_word_item(resolve_unexported_str));
let item = ast::Item {
ident: cx.sess.ident_of("__test"),
@ -424,7 +427,8 @@ fn mk_test_desc_and_fn_rec(cx: &TestCtxt, test: &Test) -> @ast::Expr {
debug!("encoding {}", ast_util::path_name_i(path));
let name_lit: ast::Lit =
nospan(ast::LitStr(ast_util::path_name_i(path).to_managed(), ast::CookedStr));
nospan(ast::LitStr(token::intern_and_get_ident(
ast_util::path_name_i(path)), ast::CookedStr));
let name_expr = @ast::Expr {
id: ast::DUMMY_NODE_ID,

View file

@ -235,9 +235,10 @@ pub fn run_compiler(args: &[~str], demitter: @diagnostic::Emitter) {
0u => d::early_error(demitter, "no input filename given"),
1u => {
let ifile = matches.free[0].as_slice();
if "-" == ifile {
let src = str::from_utf8_owned(io::stdin().read_to_end()).unwrap();
(d::StrInput(src.to_managed()), None)
if ifile == "-" {
let src =
str::from_utf8_owned(io::stdin().read_to_end()).unwrap();
(d::StrInput(src), None)
} else {
(d::FileInput(Path::new(ifile)), Some(Path::new(ifile)))
}
@ -319,9 +320,11 @@ fn parse_crate_attrs(sess: session::Session,
d::FileInput(ref ifile) => {
parse::parse_crate_attrs_from_file(ifile, ~[], sess.parse_sess)
}
d::StrInput(src) => {
parse::parse_crate_attrs_from_source_str(
d::anon_src(), src, ~[], sess.parse_sess)
d::StrInput(ref src) => {
parse::parse_crate_attrs_from_source_str(d::anon_src(),
(*src).clone(),
~[],
sess.parse_sess)
}
}
}

View file

@ -212,5 +212,5 @@ pub static tag_macro_def: uint = 0x112;
#[deriving(Clone)]
pub struct LinkMeta {
crateid: CrateId,
crate_hash: @str,
crate_hash: ~str,
}

View file

@ -27,8 +27,8 @@ use syntax::attr::AttrMetaMethods;
use syntax::codemap::{Span, DUMMY_SP};
use syntax::diagnostic::SpanHandler;
use syntax::ext::base::{CrateLoader, MacroCrate};
use syntax::parse::token::{IdentInterner, InternedString};
use syntax::parse::token;
use syntax::parse::token::IdentInterner;
use syntax::crateid::CrateId;
use syntax::visit;
@ -76,7 +76,7 @@ impl<'a> visit::Visitor<()> for ReadCrateVisitor<'a> {
struct cache_entry {
cnum: ast::CrateNum,
span: Span,
hash: @str,
hash: ~str,
crateid: CrateId,
}
@ -124,19 +124,17 @@ struct Env {
fn visit_crate(e: &Env, c: &ast::Crate) {
let cstore = e.sess.cstore;
for a in c.attrs.iter().filter(|m| "link_args" == m.name()) {
for a in c.attrs.iter().filter(|m| m.name().equiv(&("link_args"))) {
match a.value_str() {
Some(ref linkarg) => {
cstore.add_used_link_args(*linkarg);
}
None => {/* fallthrough */ }
Some(ref linkarg) => cstore.add_used_link_args(linkarg.get()),
None => { /* fallthrough */ }
}
}
}
fn visit_view_item(e: &mut Env, i: &ast::ViewItem) {
let should_load = i.attrs.iter().all(|attr| {
"phase" != attr.name() ||
attr.name().get() != "phase" ||
attr.meta_item_list().map_or(false, |phases| {
attr::contains_name(phases, "link")
})
@ -148,8 +146,12 @@ fn visit_view_item(e: &mut Env, i: &ast::ViewItem) {
match extract_crate_info(i) {
Some(info) => {
let cnum = resolve_crate(e, info.ident, info.name, info.version,
@"", i.span);
let cnum = resolve_crate(e,
info.ident.clone(),
info.name.clone(),
info.version.clone(),
~"",
i.span);
e.sess.cstore.add_extern_mod_stmt_cnum(info.id, cnum);
}
None => ()
@ -157,36 +159,36 @@ fn visit_view_item(e: &mut Env, i: &ast::ViewItem) {
}
struct CrateInfo {
ident: @str,
name: @str,
version: @str,
ident: ~str,
name: ~str,
version: ~str,
id: ast::NodeId,
}
fn extract_crate_info(i: &ast::ViewItem) -> Option<CrateInfo> {
match i.node {
ast::ViewItemExternMod(ident, path_opt, id) => {
let ident = token::ident_to_str(&ident);
ast::ViewItemExternMod(ref ident, ref path_opt, id) => {
let ident = token::get_ident(ident.name);
debug!("resolving extern mod stmt. ident: {:?} path_opt: {:?}",
ident, path_opt);
let (name, version) = match path_opt {
Some((path_str, _)) => {
let crateid: Option<CrateId> = from_str(path_str);
ident.get(), path_opt);
let (name, version) = match *path_opt {
Some((ref path_str, _)) => {
let crateid: Option<CrateId> = from_str(path_str.get());
match crateid {
None => (@"", @""),
None => (~"", ~""),
Some(crateid) => {
let version = match crateid.version {
None => @"",
Some(ref ver) => ver.to_managed(),
None => ~"",
Some(ref ver) => ver.to_str(),
};
(crateid.name.to_managed(), version)
(crateid.name.to_str(), version)
}
}
}
None => (ident, @""),
None => (ident.get().to_str(), ~""),
};
Some(CrateInfo {
ident: ident,
ident: ident.get().to_str(),
name: name,
version: version,
id: id,
@ -206,13 +208,15 @@ fn visit_item(e: &Env, i: &ast::Item) {
// First, add all of the custom link_args attributes
let cstore = e.sess.cstore;
let link_args = i.attrs.iter()
.filter_map(|at| if "link_args" == at.name() {Some(at)} else {None})
.filter_map(|at| if at.name().equiv(&("link_args")) {
Some(at)
} else {
None
})
.to_owned_vec();
for m in link_args.iter() {
match m.value_str() {
Some(linkarg) => {
cstore.add_used_link_args(linkarg);
}
Some(linkarg) => cstore.add_used_link_args(linkarg.get()),
None => { /* fallthrough */ }
}
}
@ -220,22 +224,26 @@ fn visit_item(e: &Env, i: &ast::Item) {
// Next, process all of the #[link(..)]-style arguments
let cstore = e.sess.cstore;
let link_args = i.attrs.iter()
.filter_map(|at| if "link" == at.name() {Some(at)} else {None})
.filter_map(|at| if at.name().equiv(&("link")) {
Some(at)
} else {
None
})
.to_owned_vec();
for m in link_args.iter() {
match m.meta_item_list() {
Some(items) => {
let kind = items.iter().find(|k| {
"kind" == k.name()
k.name().equiv(&("kind"))
}).and_then(|a| a.value_str());
let kind = match kind {
Some(k) => {
if "static" == k {
if k.equiv(&("static")) {
cstore::NativeStatic
} else if e.sess.targ_cfg.os == abi::OsMacos &&
"framework" == k {
k.equiv(&("framework")) {
cstore::NativeFramework
} else if "framework" == k {
} else if k.equiv(&("framework")) {
e.sess.span_err(m.span,
"native frameworks are only available \
on OSX targets");
@ -249,7 +257,7 @@ fn visit_item(e: &Env, i: &ast::Item) {
None => cstore::NativeUnknown
};
let n = items.iter().find(|n| {
"name" == n.name()
n.name().equiv(&("name"))
}).and_then(|a| a.value_str());
let n = match n {
Some(n) => n,
@ -257,13 +265,13 @@ fn visit_item(e: &Env, i: &ast::Item) {
e.sess.span_err(m.span,
"#[link(...)] specified without \
`name = \"foo\"`");
@"foo"
InternedString::new("foo")
}
};
if n.is_empty() {
if n.get().is_empty() {
e.sess.span_err(m.span, "#[link(name = \"\")] given with empty name");
} else {
cstore.add_used_library(n.to_owned(), kind);
cstore.add_used_library(n.get().to_owned(), kind);
}
}
None => {}
@ -274,14 +282,14 @@ fn visit_item(e: &Env, i: &ast::Item) {
}
}
fn existing_match(e: &Env, name: @str, version: @str, hash: &str) -> Option<ast::CrateNum> {
fn existing_match(e: &Env, name: ~str, version: ~str, hash: &str) -> Option<ast::CrateNum> {
let crate_cache = e.crate_cache.borrow();
for c in crate_cache.get().iter() {
let crateid_version = match c.crateid.version {
None => @"0.0",
Some(ref ver) => ver.to_managed(),
None => ~"0.0",
Some(ref ver) => ver.to_str(),
};
if (name.is_empty() || c.crateid.name.to_managed() == name) &&
if (name.is_empty() || c.crateid.name == name) &&
(version.is_empty() || crateid_version == version) &&
(hash.is_empty() || c.hash.as_slice() == hash) {
return Some(c.cnum);
@ -291,19 +299,19 @@ fn existing_match(e: &Env, name: @str, version: @str, hash: &str) -> Option<ast:
}
fn resolve_crate(e: &mut Env,
ident: @str,
name: @str,
version: @str,
hash: @str,
ident: ~str,
name: ~str,
version: ~str,
hash: ~str,
span: Span)
-> ast::CrateNum {
match existing_match(e, name, version, hash) {
match existing_match(e, name.clone(), version.clone(), hash.clone()) {
None => {
let load_ctxt = loader::Context {
sess: e.sess,
span: span,
ident: ident,
name: name,
name: name.clone(),
version: version,
hash: hash,
os: e.os,
@ -364,10 +372,13 @@ fn resolve_crate_deps(e: &mut Env, cdata: &[u8]) -> cstore::cnum_map {
let r = decoder::get_crate_deps(cdata);
for dep in r.iter() {
let extrn_cnum = dep.cnum;
let cname_str = token::ident_to_str(&dep.name);
let cname_str = token::get_ident(dep.name.name);
debug!("resolving dep crate {} ver: {} hash: {}",
cname_str, dep.vers, dep.hash);
match existing_match(e, cname_str, dep.vers, dep.hash) {
match existing_match(e,
cname_str.get().to_str(),
dep.vers.clone(),
dep.hash.clone()) {
Some(local_cnum) => {
debug!("already have it");
// We've already seen this crate
@ -379,8 +390,12 @@ fn resolve_crate_deps(e: &mut Env, cdata: &[u8]) -> cstore::cnum_map {
// FIXME (#2404): Need better error reporting than just a bogus
// span.
let fake_span = DUMMY_SP;
let local_cnum = resolve_crate(e, cname_str, cname_str, dep.vers,
dep.hash, fake_span);
let local_cnum = resolve_crate(e,
cname_str.get().to_str(),
cname_str.get().to_str(),
dep.vers.clone(),
dep.hash.clone(),
fake_span);
cnum_map.insert(extrn_cnum, local_cnum);
}
}
@ -411,8 +426,12 @@ impl Loader {
impl CrateLoader for Loader {
fn load_crate(&mut self, crate: &ast::ViewItem) -> MacroCrate {
let info = extract_crate_info(crate).unwrap();
let cnum = resolve_crate(&mut self.env, info.ident, info.name,
info.version, @"", crate.span);
let cnum = resolve_crate(&mut self.env,
info.ident.clone(),
info.name.clone(),
info.version.clone(),
~"",
crate.span);
let library = self.env.sess.cstore.get_used_crate_source(cnum).unwrap();
MacroCrate {
lib: library.dylib,

View file

@ -18,6 +18,7 @@ use middle::ty;
use middle::typeck;
use std::vec;
use std::rc::Rc;
use reader = extra::ebml::reader;
use syntax::ast;
use syntax::ast_map;
@ -221,8 +222,8 @@ pub fn get_field_type(tcx: ty::ctxt, class_id: ast::DefId,
class_id, def) );
let ty = decoder::item_type(def, the_field, tcx, cdata);
ty::ty_param_bounds_and_ty {
generics: ty::Generics {type_param_defs: @~[],
region_param_defs: @[]},
generics: ty::Generics {type_param_defs: Rc::new(~[]),
region_param_defs: Rc::new(~[])},
ty: ty
}
}

View file

@ -32,7 +32,7 @@ pub enum MetadataBlob {
}
pub struct crate_metadata {
name: @str,
name: ~str,
data: MetadataBlob,
cnum_map: cnum_map,
cnum: ast::CrateNum
@ -89,12 +89,12 @@ impl CStore {
*metas.get().get(&cnum)
}
pub fn get_crate_hash(&self, cnum: ast::CrateNum) -> @str {
pub fn get_crate_hash(&self, cnum: ast::CrateNum) -> ~str {
let cdata = self.get_crate_data(cnum);
decoder::get_crate_hash(cdata.data())
}
pub fn get_crate_vers(&self, cnum: ast::CrateNum) -> @str {
pub fn get_crate_vers(&self, cnum: ast::CrateNum) -> ~str {
let cdata = self.get_crate_data(cnum);
decoder::get_crate_vers(cdata.data())
}
@ -192,7 +192,7 @@ impl CStore {
// returns hashes of crates directly used by this crate. Hashes are sorted by
// (crate name, crate version, crate hash) in lexicographic order (not semver)
pub fn get_dep_hashes(&self) -> ~[@str] {
pub fn get_dep_hashes(&self) -> ~[~str] {
let mut result = ~[];
let extern_mod_crate_map = self.extern_mod_crate_map.borrow();
@ -202,7 +202,7 @@ impl CStore {
let vers = decoder::get_crate_vers(cdata.data());
debug!("Add hash[{}]: {} {}", cdata.name, vers, hash);
result.push(crate_hash {
name: cdata.name,
name: cdata.name.clone(),
vers: vers,
hash: hash
});
@ -215,15 +215,15 @@ impl CStore {
debug!(" hash[{}]: {}", x.name, x.hash);
}
result.map(|ch| ch.hash)
result.move_iter().map(|crate_hash { hash, ..}| hash).collect()
}
}
#[deriving(Clone, TotalEq, TotalOrd)]
struct crate_hash {
name: @str,
vers: @str,
hash: @str,
name: ~str,
vers: ~str,
hash: ~str,
}
impl crate_metadata {

View file

@ -25,11 +25,11 @@ use middle::ty;
use middle::typeck;
use middle::astencode::vtable_decoder_helpers;
use std::at_vec;
use std::u64;
use std::io;
use std::io::extensions::u64_from_be_bytes;
use std::option;
use std::rc::Rc;
use std::vec;
use extra::ebml::reader;
use extra::ebml;
@ -246,7 +246,7 @@ fn item_ty_param_defs(item: ebml::Doc,
tcx: ty::ctxt,
cdata: Cmd,
tag: uint)
-> @~[ty::TypeParameterDef] {
-> Rc<~[ty::TypeParameterDef]> {
let mut bounds = ~[];
reader::tagged_docs(item, tag, |p| {
let bd = parse_type_param_def_data(
@ -255,15 +255,15 @@ fn item_ty_param_defs(item: ebml::Doc,
bounds.push(bd);
true
});
@bounds
Rc::new(bounds)
}
fn item_region_param_defs(item_doc: ebml::Doc,
tcx: ty::ctxt,
cdata: Cmd)
-> @[ty::RegionParameterDef] {
at_vec::build(None, |push| {
reader::tagged_docs(item_doc, tag_region_param_def, |rp_doc| {
-> Rc<~[ty::RegionParameterDef]> {
let mut v = ~[];
reader::tagged_docs(item_doc, tag_region_param_def, |rp_doc| {
let ident_str_doc = reader::get_doc(rp_doc,
tag_region_param_def_ident);
let ident = item_name(tcx.sess.intr(), ident_str_doc);
@ -271,11 +271,11 @@ fn item_region_param_defs(item_doc: ebml::Doc,
tag_region_param_def_def_id);
let def_id = reader::with_doc_data(def_id_doc, parse_def_id);
let def_id = translate_def_id(cdata, def_id);
push(ty::RegionParameterDef { ident: ident,
def_id: def_id });
v.push(ty::RegionParameterDef { ident: ident,
def_id: def_id });
true
});
})
Rc::new(v)
}
fn item_ty_param_count(item: ebml::Doc) -> uint {
@ -1059,15 +1059,15 @@ fn get_meta_items(md: ebml::Doc) -> ~[@ast::MetaItem] {
let mut items: ~[@ast::MetaItem] = ~[];
reader::tagged_docs(md, tag_meta_item_word, |meta_item_doc| {
let nd = reader::get_doc(meta_item_doc, tag_meta_item_name);
let n = nd.as_str_slice().to_managed();
let n = token::intern_and_get_ident(nd.as_str_slice());
items.push(attr::mk_word_item(n));
true
});
reader::tagged_docs(md, tag_meta_item_name_value, |meta_item_doc| {
let nd = reader::get_doc(meta_item_doc, tag_meta_item_name);
let vd = reader::get_doc(meta_item_doc, tag_meta_item_value);
let n = nd.as_str_slice().to_managed();
let v = vd.as_str_slice().to_managed();
let n = token::intern_and_get_ident(nd.as_str_slice());
let v = token::intern_and_get_ident(vd.as_str_slice());
// FIXME (#623): Should be able to decode MetaNameValue variants,
// but currently the encoder just drops them
items.push(attr::mk_name_value_item_str(n, v));
@ -1075,7 +1075,7 @@ fn get_meta_items(md: ebml::Doc) -> ~[@ast::MetaItem] {
});
reader::tagged_docs(md, tag_meta_item_list, |meta_item_doc| {
let nd = reader::get_doc(meta_item_doc, tag_meta_item_name);
let n = nd.as_str_slice().to_managed();
let n = token::intern_and_get_ident(nd.as_str_slice());
let subitems = get_meta_items(meta_item_doc);
items.push(attr::mk_list_item(n, subitems));
true
@ -1130,8 +1130,8 @@ pub fn get_crate_attributes(data: &[u8]) -> ~[ast::Attribute] {
pub struct CrateDep {
cnum: ast::CrateNum,
name: ast::Ident,
vers: @str,
hash: @str
vers: ~str,
hash: ~str
}
pub fn get_crate_deps(data: &[u8]) -> ~[CrateDep] {
@ -1139,9 +1139,9 @@ pub fn get_crate_deps(data: &[u8]) -> ~[CrateDep] {
let cratedoc = reader::Doc(data);
let depsdoc = reader::get_doc(cratedoc, tag_crate_deps);
let mut crate_num = 1;
fn docstr(doc: ebml::Doc, tag_: uint) -> @str {
fn docstr(doc: ebml::Doc, tag_: uint) -> ~str {
let d = reader::get_doc(doc, tag_);
d.as_str_slice().to_managed()
d.as_str_slice().to_str()
}
reader::tagged_docs(depsdoc, tag_crate_dep, |depdoc| {
deps.push(CrateDep {cnum: crate_num,
@ -1159,24 +1159,29 @@ fn list_crate_deps(data: &[u8], out: &mut io::Writer) {
let r = get_crate_deps(data);
for dep in r.iter() {
write!(out, "{} {}-{}-{}\n",
dep.cnum, token::ident_to_str(&dep.name), dep.hash, dep.vers);
let string = token::get_ident(dep.name.name);
write!(out,
"{} {}-{}-{}\n",
dep.cnum,
string.get(),
dep.hash,
dep.vers);
}
write!(out, "\n");
}
pub fn get_crate_hash(data: &[u8]) -> @str {
pub fn get_crate_hash(data: &[u8]) -> ~str {
let cratedoc = reader::Doc(data);
let hashdoc = reader::get_doc(cratedoc, tag_crate_hash);
hashdoc.as_str_slice().to_managed()
hashdoc.as_str_slice().to_str()
}
pub fn get_crate_vers(data: &[u8]) -> @str {
pub fn get_crate_vers(data: &[u8]) -> ~str {
let attrs = decoder::get_crate_attributes(data);
match attr::find_crateid(attrs) {
None => @"0.0",
Some(crateid) => crateid.version_or_default().to_managed(),
None => ~"0.0",
Some(crateid) => crateid.version_or_default().to_str(),
}
}

View file

@ -21,29 +21,28 @@ use middle::ty;
use middle::typeck;
use middle;
use extra::serialize::Encodable;
use std::cast;
use std::cell::{Cell, RefCell};
use std::hashmap::{HashMap, HashSet};
use std::io::MemWriter;
use std::str;
use std::vec;
use extra::serialize::Encodable;
use syntax::abi::AbiSet;
use syntax::ast::*;
use syntax::ast;
use syntax::ast_map;
use syntax::ast_util::*;
use syntax::attr;
use syntax::ast_util;
use syntax::attr::AttrMetaMethods;
use syntax::attr;
use syntax::codemap;
use syntax::diagnostic::SpanHandler;
use syntax::parse::token::InternedString;
use syntax::parse::token::special_idents;
use syntax::ast_util;
use syntax::parse::token;
use syntax::visit::Visitor;
use syntax::visit;
use syntax::parse::token;
use syntax;
use writer = extra::ebml::writer;
@ -172,7 +171,7 @@ pub fn def_to_str(did: DefId) -> ~str {
fn encode_ty_type_param_defs(ebml_w: &mut writer::Encoder,
ecx: &EncodeContext,
params: @~[ty::TypeParameterDef],
params: &[ty::TypeParameterDef],
tag: uint) {
let ty_str_ctxt = @tyencode::ctxt {
diag: ecx.diag,
@ -189,7 +188,7 @@ fn encode_ty_type_param_defs(ebml_w: &mut writer::Encoder,
fn encode_region_param_defs(ebml_w: &mut writer::Encoder,
ecx: &EncodeContext,
params: @[ty::RegionParameterDef]) {
params: &[ty::RegionParameterDef]) {
for param in params.iter() {
ebml_w.start_tag(tag_region_param_def);
@ -216,9 +215,9 @@ fn encode_item_variances(ebml_w: &mut writer::Encoder,
fn encode_bounds_and_type(ebml_w: &mut writer::Encoder,
ecx: &EncodeContext,
tpt: &ty::ty_param_bounds_and_ty) {
encode_ty_type_param_defs(ebml_w, ecx, tpt.generics.type_param_defs,
encode_ty_type_param_defs(ebml_w, ecx, tpt.generics.type_param_defs(),
tag_items_data_item_ty_param_bounds);
encode_region_param_defs(ebml_w, ecx, tpt.generics.region_param_defs);
encode_region_param_defs(ebml_w, ecx, tpt.generics.region_param_defs());
encode_type(ecx, ebml_w, tpt.ty);
}
@ -491,7 +490,7 @@ fn encode_reexported_static_methods(ecx: &EncodeContext,
exp: &middle::resolve::Export2) {
match ecx.tcx.items.find(exp.def_id.node) {
Some(ast_map::NodeItem(item, path)) => {
let original_name = ecx.tcx.sess.str_of(item.ident);
let original_name = token::get_ident(item.ident.name);
//
// We don't need to reexport static methods on items
@ -503,7 +502,7 @@ fn encode_reexported_static_methods(ecx: &EncodeContext,
// encoded metadata for static methods relative to Bar,
// but not yet for Foo.
//
if mod_path != *path || exp.name != original_name {
if mod_path != *path || original_name.get() != exp.name {
if !encode_reexported_static_base_methods(ecx, ebml_w, exp) {
if encode_reexported_static_trait_methods(ecx, ebml_w, exp) {
debug!("(encode reexported static methods) {} \
@ -793,7 +792,7 @@ fn encode_method_ty_fields(ecx: &EncodeContext,
encode_def_id(ebml_w, method_ty.def_id);
encode_name(ecx, ebml_w, method_ty.ident);
encode_ty_type_param_defs(ebml_w, ecx,
method_ty.generics.type_param_defs,
method_ty.generics.type_param_defs(),
tag_item_method_tps);
encode_method_fty(ecx, ebml_w, &method_ty.fty);
encode_visibility(ebml_w, method_ty.vis);
@ -834,7 +833,7 @@ fn encode_info_for_method(ecx: &EncodeContext,
}
for &ast_method in ast_method_opt.iter() {
let num_params = tpt.generics.type_param_defs.len();
let num_params = tpt.generics.type_param_defs().len();
if num_params > 0u || is_default_impl
|| should_inline(ast_method.attrs) {
(ecx.encode_inlined_item)(
@ -1178,10 +1177,10 @@ fn encode_info_for_item(ecx: &EncodeContext,
encode_item_variances(ebml_w, ecx, item.id);
let trait_def = ty::lookup_trait_def(tcx, def_id);
encode_ty_type_param_defs(ebml_w, ecx,
trait_def.generics.type_param_defs,
trait_def.generics.type_param_defs(),
tag_items_data_item_ty_param_bounds);
encode_region_param_defs(ebml_w, ecx,
trait_def.generics.region_param_defs);
trait_def.generics.region_param_defs());
encode_trait_ref(ebml_w, ecx, trait_def.trait_ref, tag_item_trait_ref);
encode_name(ecx, ebml_w, item.ident);
encode_attributes(ebml_w, item.attrs);
@ -1357,11 +1356,10 @@ fn my_visit_foreign_item(ni: &ForeignItem,
index: @RefCell<~[entry<i64>]>) {
match items.get(ni.id) {
ast_map::NodeForeignItem(_, abi, _, pt) => {
let string = token::get_ident(ni.ident.name);
debug!("writing foreign item {}::{}",
ast_map::path_to_str(
*pt,
token::get_ident_interner()),
token::ident_to_str(&ni.ident));
ast_map::path_to_str(*pt, token::get_ident_interner()),
string.get());
let mut ebml_w = unsafe {
ebml_w.unsafe_clone()
@ -1513,32 +1511,32 @@ fn write_i64(writer: &mut MemWriter, &n: &i64) {
fn encode_meta_item(ebml_w: &mut writer::Encoder, mi: @MetaItem) {
match mi.node {
MetaWord(name) => {
MetaWord(ref name) => {
ebml_w.start_tag(tag_meta_item_word);
ebml_w.start_tag(tag_meta_item_name);
ebml_w.writer.write(name.as_bytes());
ebml_w.writer.write(name.get().as_bytes());
ebml_w.end_tag();
ebml_w.end_tag();
}
MetaNameValue(name, value) => {
MetaNameValue(ref name, ref value) => {
match value.node {
LitStr(value, _) => {
LitStr(ref value, _) => {
ebml_w.start_tag(tag_meta_item_name_value);
ebml_w.start_tag(tag_meta_item_name);
ebml_w.writer.write(name.as_bytes());
ebml_w.writer.write(name.get().as_bytes());
ebml_w.end_tag();
ebml_w.start_tag(tag_meta_item_value);
ebml_w.writer.write(value.as_bytes());
ebml_w.writer.write(value.get().as_bytes());
ebml_w.end_tag();
ebml_w.end_tag();
}
_ => {/* FIXME (#623): encode other variants */ }
}
}
MetaList(name, ref items) => {
MetaList(ref name, ref items) => {
ebml_w.start_tag(tag_meta_item_list);
ebml_w.start_tag(tag_meta_item_name);
ebml_w.writer.write(name.as_bytes());
ebml_w.writer.write(name.get().as_bytes());
ebml_w.end_tag();
for inner_item in items.iter() {
encode_meta_item(ebml_w, *inner_item);
@ -1569,13 +1567,13 @@ fn synthesize_crate_attrs(ecx: &EncodeContext,
attr::mk_attr(
attr::mk_name_value_item_str(
@"crate_id",
ecx.link_meta.crateid.to_str().to_managed()))
InternedString::new("crate_id"),
token::intern_and_get_ident(ecx.link_meta.crateid.to_str())))
}
let mut attrs = ~[];
for attr in crate.attrs.iter() {
if "crate_id" != attr.name() {
if !attr.name().equiv(&("crate_id")) {
attrs.push(*attr);
}
}
@ -1621,7 +1619,7 @@ fn encode_crate_deps(ecx: &EncodeContext,
ebml_w.start_tag(tag_crate_deps);
let r = get_ordered_deps(ecx, cstore);
for dep in r.iter() {
encode_crate_dep(ecx, ebml_w, *dep);
encode_crate_dep(ecx, ebml_w, (*dep).clone());
}
ebml_w.end_tag();
}

View file

@ -46,10 +46,10 @@ pub enum Os {
pub struct Context {
sess: Session,
span: Span,
ident: @str,
name: @str,
version: @str,
hash: @str,
ident: ~str,
name: ~str,
version: ~str,
hash: ~str,
os: Os,
intr: @IdentInterner
}
@ -80,7 +80,7 @@ impl Context {
fn find_library_crate(&self) -> Option<Library> {
let filesearch = self.sess.filesearch;
let crate_name = self.name;
let crate_name = self.name.clone();
let (dyprefix, dysuffix) = self.dylibname();
// want: crate_name.dir_part() + prefix + crate_name.file_part + "-"
@ -109,8 +109,10 @@ impl Context {
} else if candidate {
match get_metadata_section(self.os, path) {
Some(cvec) =>
if crate_matches(cvec.as_slice(), self.name,
self.version, self.hash) {
if crate_matches(cvec.as_slice(),
self.name.clone(),
self.version.clone(),
self.hash.clone()) {
debug!("found {} with matching crate_id",
path.display());
let (rlib, dylib) = if file.ends_with(".rlib") {
@ -235,9 +237,9 @@ pub fn note_crateid_attr(diag: @SpanHandler, crateid: &CrateId) {
}
fn crate_matches(crate_data: &[u8],
name: @str,
version: @str,
hash: @str) -> bool {
name: ~str,
version: ~str,
hash: ~str) -> bool {
let attrs = decoder::get_crate_attributes(crate_data);
match attr::find_crateid(attrs) {
None => false,
@ -246,8 +248,9 @@ fn crate_matches(crate_data: &[u8],
let chash = decoder::get_crate_hash(crate_data);
if chash != hash { return false; }
}
name == crateid.name.to_managed() &&
(version.is_empty() || version == crateid.version_or_default().to_managed())
name == crateid.name &&
(version.is_empty() ||
crateid.version_or_default() == version)
}
}
}

View file

@ -156,7 +156,6 @@ fn parse_vstore(st: &mut PState, conv: conv_did) -> ty::vstore {
match next(st) {
'~' => ty::vstore_uniq,
'@' => ty::vstore_box,
'&' => ty::vstore_slice(parse_region(st, conv)),
c => st.tcx.sess.bug(format!("parse_vstore(): bad input '{}'", c))
}

View file

@ -45,7 +45,7 @@ pub struct ctxt {
pub struct ty_abbrev {
pos: uint,
len: uint,
s: @str
s: ~str
}
pub enum abbrev_ctxt {
@ -65,19 +65,21 @@ pub fn enc_ty(w: &mut MemWriter, cx: @ctxt, t: ty::t) {
let short_names_cache = cx.tcx.short_names_cache.borrow();
result_str_opt = short_names_cache.get()
.find(&t)
.map(|result| *result);
.map(|result| {
(*result).clone()
});
}
let result_str = match result_str_opt {
Some(s) => s,
None => {
let wr = &mut MemWriter::new();
enc_sty(wr, cx, &ty::get(t).sty);
let s = str::from_utf8(wr.get_ref()).unwrap().to_managed();
let s = str::from_utf8(wr.get_ref()).unwrap();
let mut short_names_cache = cx.tcx
.short_names_cache
.borrow_mut();
short_names_cache.get().insert(t, s);
s
short_names_cache.get().insert(t, s.to_str());
s.to_str()
}
};
w.write(result_str.as_bytes());
@ -103,7 +105,7 @@ pub fn enc_ty(w: &mut MemWriter, cx: @ctxt, t: ty::t) {
let abbrev_len = 3 + estimate_sz(pos) + estimate_sz(len);
if abbrev_len < len {
// I.e. it's actually an abbreviation.
let s = format!("\\#{:x}:{:x}\\#", pos, len).to_managed();
let s = format!("\\#{:x}:{:x}\\#", pos, len);
let a = ty_abbrev { pos: pos as uint,
len: len as uint,
s: s };
@ -217,7 +219,6 @@ pub fn enc_vstore(w: &mut MemWriter, cx: @ctxt, v: ty::vstore) {
match v {
ty::vstore_fixed(u) => mywrite!(w, "{}|", u),
ty::vstore_uniq => mywrite!(w, "~"),
ty::vstore_box => mywrite!(w, "@"),
ty::vstore_slice(r) => {
mywrite!(w, "&");
enc_region(w, cx, r);

View file

@ -32,10 +32,10 @@ use syntax::fold::Folder;
use syntax::parse::token;
use syntax;
use std::at_vec;
use std::libc;
use std::cast;
use std::io::Seek;
use std::rc::Rc;
use extra::ebml::reader;
use extra::ebml;
@ -812,13 +812,13 @@ impl<'a> ebml_writer_helpers for writer::Encoder<'a> {
this.emit_struct_field("generics", 0, |this| {
this.emit_struct("Generics", 2, |this| {
this.emit_struct_field("type_param_defs", 0, |this| {
this.emit_from_vec(*tpbt.generics.type_param_defs,
this.emit_from_vec(tpbt.generics.type_param_defs(),
|this, type_param_def| {
this.emit_type_param_def(ecx, type_param_def);
})
});
this.emit_struct_field("region_param_defs", 1, |this| {
tpbt.generics.region_param_defs.encode(this);
tpbt.generics.region_param_defs().encode(this);
})
})
});
@ -997,7 +997,7 @@ fn encode_side_tables_for_id(ecx: &e::EncodeContext,
ebml_w.tag(c::tag_table_tcache, |ebml_w| {
ebml_w.id(id);
ebml_w.tag(c::tag_table_val, |ebml_w| {
ebml_w.emit_tpbt(ecx, *tpbt);
ebml_w.emit_tpbt(ecx, tpbt.clone());
})
})
}
@ -1064,7 +1064,7 @@ fn encode_side_tables_for_id(ecx: &e::EncodeContext,
ebml_w.tag(c::tag_table_capture_map, |ebml_w| {
ebml_w.id(id);
ebml_w.tag(c::tag_table_val, |ebml_w| {
ebml_w.emit_from_vec(*cap_vars, |ebml_w, cap_var| {
ebml_w.emit_from_vec(*cap_vars.borrow(), |ebml_w, cap_var| {
cap_var.encode(ebml_w);
})
})
@ -1183,8 +1183,8 @@ impl<'a> ebml_decoder_decoder_helpers for reader::Decoder<'a> {
this.read_struct_field("type_param_defs",
0,
|this| {
@this.read_to_vec(|this|
this.read_type_param_def(xcx))
Rc::new(this.read_to_vec(|this|
this.read_type_param_def(xcx)))
}),
region_param_defs:
this.read_struct_field("region_param_defs",
@ -1382,13 +1382,11 @@ fn decode_side_tables(xcx: @ExtendedDecodeContext,
}
c::tag_table_capture_map => {
let cvars =
at_vec::to_managed_move(
val_dsr.read_to_vec(
|val_dsr| val_dsr.read_capture_var(xcx)));
val_dsr.read_to_vec(|val_dsr| val_dsr.read_capture_var(xcx));
let mut capture_map = dcx.maps
.capture_map
.borrow_mut();
capture_map.get().insert(id, cvars);
capture_map.get().insert(id, Rc::new(cvars));
}
_ => {
xcx.dcx.tcx.sess.bug(

View file

@ -733,7 +733,7 @@ fn check_loans_in_fn<'a>(this: &mut CheckLoanCtxt<'a>,
span: Span) {
let capture_map = this.bccx.capture_map.borrow();
let cap_vars = capture_map.get().get(&closure_id);
for cap_var in cap_vars.iter() {
for cap_var in cap_vars.borrow().iter() {
let var_id = ast_util::def_id_of_def(cap_var.def).node;
let var_path = @LpVar(var_id);
this.check_if_path_is_moved(closure_id, span,
@ -839,4 +839,3 @@ fn check_loans_in_block<'a>(this: &mut CheckLoanCtxt<'a>,
visit::walk_block(this, blk, ());
this.check_for_conflicting_loans(blk.id);
}

View file

@ -69,7 +69,7 @@ pub fn gather_captures(bccx: &BorrowckCtxt,
closure_expr: &ast::Expr) {
let capture_map = bccx.capture_map.borrow();
let captured_vars = capture_map.get().get(&closure_expr.id);
for captured_var in captured_vars.iter() {
for captured_var in captured_vars.borrow().iter() {
match captured_var.mode {
moves::CapMove => {
let fvar_id = ast_util::def_id_of_def(captured_var.def).node;

View file

@ -194,16 +194,6 @@ pub struct BorrowStats {
//
// Note that there is no entry with derefs:3---the type of that expression
// is T, which is not a box.
//
// Note that implicit dereferences also occur with indexing of `@[]`,
// `@str`, etc. The same rules apply. So, for example, given a
// variable `x` of type `@[@[...]]`, if I have an instance of the
// expression `x[0]` which is then auto-slice'd, there would be two
// potential entries in the root map, both with the id of the `x[0]`
// expression. The entry with `derefs==0` refers to the deref of `x`
// used as part of evaluating `x[0]`. The entry with `derefs==1`
// refers to the deref of the `x[0]` that occurs as part of the
// auto-slice.
#[deriving(Eq, IterBytes)]
pub struct root_map_key {
id: ast::NodeId,
@ -774,7 +764,8 @@ impl BorrowckCtxt {
match pat.node {
ast::PatIdent(_, ref path, _) => {
let ident = ast_util::path_to_ident(path);
out.push_str(token::ident_to_str(&ident));
let string = token::get_ident(ident.name);
out.push_str(string.get());
}
_ => {
self.tcx.sess.bug(
@ -795,8 +786,9 @@ impl BorrowckCtxt {
self.append_loan_path_to_str_from_interior(lp_base, out);
match fname {
mc::NamedField(ref fname) => {
let string = token::get_ident(*fname);
out.push_char('.');
out.push_str(token::interner_get(*fname));
out.push_str(string.get());
}
mc::PositionalField(idx) => {
out.push_char('#'); // invent a notation here

View file

@ -192,8 +192,7 @@ pub fn check_expr(v: &mut CheckCrateVisitor,
"references in constants may only refer to \
immutable values");
},
ExprVstore(_, ExprVstoreUniq) |
ExprVstore(_, ExprVstoreBox) => {
ExprVstore(_, ExprVstoreUniq) => {
sess.span_err(e.span, "cannot allocate vectors in constant expressions")
},

View file

@ -176,8 +176,8 @@ fn check_exhaustive(cx: &MatchCheckCtxt, sp: Span, pats: ~[@Pat]) {
match ty::get(ty).sty {
ty::ty_bool => {
match *ctor {
val(const_bool(true)) => Some(@"true"),
val(const_bool(false)) => Some(@"false"),
val(const_bool(true)) => Some(~"true"),
val(const_bool(false)) => Some(~"false"),
_ => None
}
}
@ -197,7 +197,7 @@ fn check_exhaustive(cx: &MatchCheckCtxt, sp: Span, pats: ~[@Pat]) {
}
ty::ty_unboxed_vec(..) | ty::ty_vec(..) => {
match *ctor {
vec(n) => Some(format!("vectors of length {}", n).to_managed()),
vec(n) => Some(format!("vectors of length {}", n)),
_ => None
}
}
@ -214,9 +214,14 @@ fn check_exhaustive(cx: &MatchCheckCtxt, sp: Span, pats: ~[@Pat]) {
type matrix = ~[~[@Pat]];
enum useful { useful(ty::t, ctor), useful_, not_useful }
#[deriving(Clone)]
enum useful {
useful(ty::t, ctor),
useful_,
not_useful,
}
#[deriving(Eq)]
#[deriving(Clone, Eq)]
enum ctor {
single,
variant(DefId),
@ -261,7 +266,7 @@ fn is_useful(cx: &MatchCheckCtxt, m: &matrix, v: &[@Pat]) -> useful {
val(const_bool(false)),
0u, left_ty)
}
ref u => *u,
ref u => (*u).clone(),
}
}
ty::ty_enum(eid, _) => {
@ -269,7 +274,7 @@ fn is_useful(cx: &MatchCheckCtxt, m: &matrix, v: &[@Pat]) -> useful {
match is_useful_specialized(cx, m, v, variant(va.id),
va.args.len(), left_ty) {
not_useful => (),
ref u => return *u,
ref u => return (*u).clone(),
}
}
not_useful
@ -289,7 +294,7 @@ fn is_useful(cx: &MatchCheckCtxt, m: &matrix, v: &[@Pat]) -> useful {
for n in iter::range(0u, max_len + 1) {
match is_useful_specialized(cx, m, v, vec(n), n, left_ty) {
not_useful => (),
ref u => return *u,
ref u => return (*u).clone(),
}
}
not_useful
@ -304,15 +309,15 @@ fn is_useful(cx: &MatchCheckCtxt, m: &matrix, v: &[@Pat]) -> useful {
match is_useful(cx,
&m.iter().filter_map(|r| default(cx, *r)).collect::<matrix>(),
v.tail()) {
useful_ => useful(left_ty, *ctor),
ref u => *u,
useful_ => useful(left_ty, (*ctor).clone()),
ref u => (*u).clone(),
}
}
}
}
Some(ref v0_ctor) => {
let arity = ctor_arity(cx, v0_ctor, left_ty);
is_useful_specialized(cx, m, v, *v0_ctor, arity, left_ty)
is_useful_specialized(cx, m, v, (*v0_ctor).clone(), arity, left_ty)
}
}
}
@ -329,7 +334,7 @@ fn is_useful_specialized(cx: &MatchCheckCtxt,
cx, &ms, specialize(cx, v, &ctor, arity, lty).unwrap());
match could_be_useful {
useful_ => useful(lty, ctor),
ref u => *u,
ref u => (*u).clone(),
}
}
@ -407,7 +412,7 @@ fn missing_ctor(cx: &MatchCheckCtxt,
let r = pat_ctor_id(cx, r[0]);
for id in r.iter() {
if !found.contains(id) {
found.push(*id);
found.push((*id).clone());
}
}
}
@ -770,8 +775,8 @@ fn specialize(cx: &MatchCheckCtxt,
}
PatRange(lo, hi) => {
let (c_lo, c_hi) = match *ctor_id {
val(ref v) => (*v, *v),
range(ref lo, ref hi) => (*lo, *hi),
val(ref v) => ((*v).clone(), (*v).clone()),
range(ref lo, ref hi) => ((*lo).clone(), (*hi).clone()),
single => return Some(r.tail().to_owned()),
_ => fail!("type error")
};

View file

@ -16,13 +16,15 @@ use middle::ty;
use middle::typeck::astconv;
use middle;
use syntax::{ast, ast_map, ast_util};
use syntax::visit;
use syntax::visit::Visitor;
use syntax::ast::*;
use syntax::parse::token::InternedString;
use syntax::visit::Visitor;
use syntax::visit;
use syntax::{ast, ast_map, ast_util};
use std::cell::RefCell;
use std::hashmap::HashMap;
use std::rc::Rc;
//
// This pass classifies expressions by their constant-ness.
@ -238,7 +240,6 @@ impl ConstEvalVisitor {
match vstore {
ast::ExprVstoreSlice => self.classify(e),
ast::ExprVstoreUniq |
ast::ExprVstoreBox |
ast::ExprVstoreMutSlice => non_const
}
}
@ -319,8 +320,8 @@ pub enum const_val {
const_float(f64),
const_int(i64),
const_uint(u64),
const_str(@str),
const_binary(@[u8]),
const_str(InternedString),
const_binary(Rc<~[u8]>),
const_bool(bool)
}
@ -508,15 +509,15 @@ pub fn eval_const_expr_partial<T: ty::ExprTyProvider>(tcx: &T, e: &Expr)
pub fn lit_to_const(lit: &Lit) -> const_val {
match lit.node {
LitStr(s, _) => const_str(s),
LitBinary(data) => const_binary(data),
LitStr(ref s, _) => const_str((*s).clone()),
LitBinary(ref data) => const_binary(data.clone()),
LitChar(n) => const_uint(n as u64),
LitInt(n, _) => const_int(n),
LitUint(n, _) => const_uint(n),
LitIntUnsuffixed(n) => const_int(n),
LitFloat(n, _) => const_float(from_str::<f64>(n).unwrap() as f64),
LitFloatUnsuffixed(n) =>
const_float(from_str::<f64>(n).unwrap() as f64),
LitFloat(ref n, _) | LitFloatUnsuffixed(ref n) => {
const_float(from_str::<f64>(n.get()).unwrap() as f64)
}
LitNil => const_int(0i64),
LitBool(b) => const_bool(b)
}
@ -530,7 +531,7 @@ pub fn compare_const_vals(a: &const_val, b: &const_val) -> Option<int> {
(&const_int(a), &const_int(b)) => compare_vals(a, b),
(&const_uint(a), &const_uint(b)) => compare_vals(a, b),
(&const_float(a), &const_float(b)) => compare_vals(a, b),
(&const_str(a), &const_str(b)) => compare_vals(a, b),
(&const_str(ref a), &const_str(ref b)) => compare_vals(a, b),
(&const_bool(a), &const_bool(b)) => compare_vals(a, b),
_ => None
}

View file

@ -360,9 +360,10 @@ impl DeadVisitor {
fn warn_dead_code(&mut self, id: ast::NodeId,
span: codemap::Span, ident: &ast::Ident) {
let string = token::get_ident(ident.name);
self.tcx.sess.add_lint(DeadCode, id, span,
format!("code is never used: `{}`",
token::ident_to_str(ident)));
string.get()));
}
}

View file

@ -277,7 +277,7 @@ pub fn check_expr(cx: &mut Context, e: &Expr) {
ExprPath(_) => {
let did = ast_util::def_id_of_def(def_map.get()
.get_copy(&e.id));
ty::lookup_item_type(cx.tcx, did).generics.type_param_defs
ty::lookup_item_type(cx.tcx, did).generics.type_param_defs.clone()
}
_ => {
// Type substitutions should only occur on paths and
@ -289,6 +289,7 @@ pub fn check_expr(cx: &mut Context, e: &Expr) {
"non path/method call expr has type substs??")
}
};
let type_param_defs = type_param_defs.borrow();
if ts.len() != type_param_defs.len() {
// Fail earlier to make debugging easier
fail!("internal error: in kind::check_expr, length \
@ -362,8 +363,8 @@ fn check_ty(cx: &mut Context, aty: &Ty) {
for ts in r.iter() {
let def_map = cx.tcx.def_map.borrow();
let did = ast_util::def_id_of_def(def_map.get().get_copy(&id));
let type_param_defs =
ty::lookup_item_type(cx.tcx, did).generics.type_param_defs;
let generics = ty::lookup_item_type(cx.tcx, did).generics;
let type_param_defs = generics.type_param_defs();
for (&ty, type_param_def) in ts.iter().zip(type_param_defs.iter()) {
check_typaram_bounds(cx, aty.id, aty.span, ty, type_param_def)
}

View file

@ -26,8 +26,9 @@ use middle::ty::{BuiltinBound, BoundFreeze, BoundPod, BoundSend, BoundSized};
use syntax::ast;
use syntax::ast_util::local_def;
use syntax::attr::AttrMetaMethods;
use syntax::visit;
use syntax::parse::token::InternedString;
use syntax::visit::Visitor;
use syntax::visit;
use std::hashmap::HashMap;
use std::iter::Enumerate;
@ -182,11 +183,11 @@ impl LanguageItemCollector {
}
}
pub fn extract(attrs: &[ast::Attribute]) -> Option<@str> {
pub fn extract(attrs: &[ast::Attribute]) -> Option<InternedString> {
for attribute in attrs.iter() {
match attribute.name_str_pair() {
Some((key, value)) if "lang" == key => {
return Some(value);
Some((ref key, ref value)) if key.equiv(&("lang")) => {
return Some((*value).clone());
}
Some(..) | None => {}
}

View file

@ -34,18 +34,17 @@
//! Context itself, span_lint should be used instead of add_lint.
use driver::session;
use metadata::csearch;
use middle::dead::DEAD_CODE_LINT_STR;
use middle::pat_util;
use middle::privacy;
use middle::trans::adt; // for `adt::is_ffi_safe`
use middle::ty;
use middle::typeck;
use middle::pat_util;
use metadata::csearch;
use util::ppaux::{ty_to_str};
use std::to_str::ToStr;
use middle::typeck::infer;
use middle::typeck::astconv::{ast_ty_to_ty, AstConv};
use middle::typeck::infer;
use middle::typeck;
use std::to_str::ToStr;
use util::ppaux::{ty_to_str};
use std::cmp;
use std::hashmap::HashMap;
@ -59,13 +58,14 @@ use std::u64;
use std::u8;
use extra::smallintmap::SmallIntMap;
use syntax::ast_map;
use syntax::attr;
use syntax::attr::{AttrMetaMethods, AttributeMethods};
use syntax::codemap::Span;
use syntax::parse::token;
use syntax::{ast, ast_util, visit};
use syntax::ast_util::IdVisitingOperation;
use syntax::attr::{AttrMetaMethods, AttributeMethods};
use syntax::attr;
use syntax::codemap::Span;
use syntax::parse::token::InternedString;
use syntax::parse::token;
use syntax::visit::Visitor;
use syntax::{ast, ast_util, visit};
#[deriving(Clone, Eq, Ord, TotalEq, TotalOrd)]
pub enum Lint {
@ -540,10 +540,16 @@ impl<'a> Context<'a> {
});
let old_is_doc_hidden = self.is_doc_hidden;
self.is_doc_hidden = self.is_doc_hidden ||
attrs.iter().any(|attr| ("doc" == attr.name() && match attr.meta_item_list()
{ None => false,
Some(l) => attr::contains_name(l, "hidden") }));
self.is_doc_hidden =
self.is_doc_hidden ||
attrs.iter()
.any(|attr| {
attr.name().equiv(&("doc")) &&
match attr.meta_item_list() {
None => false,
Some(l) => attr::contains_name(l, "hidden")
}
});
f(self);
@ -569,12 +575,12 @@ impl<'a> Context<'a> {
// Return true if that's the case. Otherwise return false.
pub fn each_lint(sess: session::Session,
attrs: &[ast::Attribute],
f: |@ast::MetaItem, level, @str| -> bool)
f: |@ast::MetaItem, level, InternedString| -> bool)
-> bool {
let xs = [allow, warn, deny, forbid];
for &level in xs.iter() {
let level_name = level_to_str(level);
for attr in attrs.iter().filter(|m| level_name == m.name()) {
for attr in attrs.iter().filter(|m| m.name().equiv(&level_name)) {
let meta = attr.node.value;
let metas = match meta.node {
ast::MetaList(_, ref metas) => metas,
@ -585,8 +591,8 @@ pub fn each_lint(sess: session::Session,
};
for meta in metas.iter() {
match meta.node {
ast::MetaWord(lintname) => {
if !f(*meta, level, lintname) {
ast::MetaWord(ref lintname) => {
if !f(*meta, level, (*lintname).clone()) {
return false;
}
}
@ -603,15 +609,17 @@ pub fn each_lint(sess: session::Session,
// Check from a list of attributes if it contains the appropriate
// `#[level(lintname)]` attribute (e.g. `#[allow(dead_code)]).
pub fn contains_lint(attrs: &[ast::Attribute],
level: level, lintname: &'static str) -> bool {
level: level,
lintname: &'static str)
-> bool {
let level_name = level_to_str(level);
for attr in attrs.iter().filter(|m| level_name == m.name()) {
for attr in attrs.iter().filter(|m| m.name().equiv(&level_name)) {
if attr.meta_item_list().is_none() {
continue
}
let list = attr.meta_item_list().unwrap();
for meta_item in list.iter() {
if lintname == meta_item.name() {
if meta_item.name().equiv(&lintname) {
return true;
}
}
@ -879,8 +887,7 @@ fn check_heap_type(cx: &Context, span: Span, ty: ty::t) {
let mut n_uniq = 0;
ty::fold_ty(cx.tcx, ty, |t| {
match ty::get(t).sty {
ty::ty_box(_) | ty::ty_str(ty::vstore_box) |
ty::ty_vec(_, ty::vstore_box) |
ty::ty_box(_) |
ty::ty_trait(_, _, ty::BoxTraitStore, _, _) => {
n_box += 1;
}
@ -1240,8 +1247,7 @@ fn check_unnecessary_allocation(cx: &Context, e: &ast::Expr) {
// Warn if string and vector literals with sigils, or boxing expressions,
// are immediately borrowed.
let allocation = match e.node {
ast::ExprVstore(e2, ast::ExprVstoreUniq) |
ast::ExprVstore(e2, ast::ExprVstoreBox) => {
ast::ExprVstore(e2, ast::ExprVstoreUniq) => {
match e2.node {
ast::ExprLit(lit) if ast_util::lit_is_str(lit) => {
VectorAllocation
@ -1314,7 +1320,7 @@ fn check_missing_doc_attrs(cx: &Context,
let has_doc = attrs.iter().any(|a| {
match a.node.value.node {
ast::MetaNameValue(ref name, _) if "doc" == *name => true,
ast::MetaNameValue(ref name, _) if name.equiv(&("doc")) => true,
_ => false
}
});

View file

@ -120,6 +120,7 @@ use std::vec;
use syntax::ast::*;
use syntax::codemap::Span;
use syntax::parse::token::special_idents;
use syntax::parse::token;
use syntax::print::pprust::{expr_to_str, block_to_str};
use syntax::{visit, ast_util};
use syntax::visit::{Visitor, FnKind};
@ -332,13 +333,14 @@ impl IrMaps {
}
}
pub fn variable_name(&self, var: Variable) -> @str {
pub fn variable_name(&self, var: Variable) -> ~str {
let var_kinds = self.var_kinds.borrow();
match var_kinds.get()[var.get()] {
Local(LocalInfo { ident: nm, .. }) | Arg(_, nm) => {
self.tcx.sess.str_of(nm)
let string = token::get_ident(nm.name);
string.get().to_str()
},
ImplicitRet => @"<implicit-ret>"
ImplicitRet => ~"<implicit-ret>"
}
}
@ -500,7 +502,7 @@ fn visit_expr(v: &mut LivenessVisitor, expr: &Expr, this: @IrMaps) {
let capture_map = this.capture_map.borrow();
let cvs = capture_map.get().get(&expr.id);
let mut call_caps = ~[];
for cv in cvs.iter() {
for cv in cvs.borrow().iter() {
match moves::moved_variable_node_id_from_def(cv.def) {
Some(rv) => {
let cv_ln = this.add_live_node(FreeVarNode(cv.span));
@ -1669,7 +1671,7 @@ impl Liveness {
}
}
pub fn should_warn(&self, var: Variable) -> Option<@str> {
pub fn should_warn(&self, var: Variable) -> Option<~str> {
let name = self.ir.variable_name(var);
if name.len() == 0 || name[0] == ('_' as u8) { None } else { Some(name) }
}

View file

@ -178,9 +178,7 @@ pub fn opt_deref_kind(t: ty::t) -> Option<deref_kind> {
}
ty::ty_box(_) |
ty::ty_vec(_, ty::vstore_box) |
ty::ty_trait(_, _, ty::BoxTraitStore, _, _) |
ty::ty_str(ty::vstore_box) => {
ty::ty_trait(_, _, ty::BoxTraitStore, _, _) => {
Some(deref_ptr(gc_ptr))
}
@ -1233,7 +1231,10 @@ pub fn ptr_sigil(ptr: PointerKind) -> ~str {
impl Repr for InteriorKind {
fn repr(&self, _tcx: ty::ctxt) -> ~str {
match *self {
InteriorField(NamedField(fld)) => token::interner_get(fld).to_owned(),
InteriorField(NamedField(fld)) => {
let string = token::get_ident(fld);
string.get().to_owned()
}
InteriorField(PositionalField(i)) => format!("\\#{:?}", i),
InteriorElement(_) => ~"[]",
}

View file

@ -136,9 +136,9 @@ use util::ppaux::Repr;
use util::common::indenter;
use util::ppaux::UserString;
use std::at_vec;
use std::cell::RefCell;
use std::hashmap::{HashSet, HashMap};
use std::rc::Rc;
use syntax::ast::*;
use syntax::ast_util;
use syntax::visit;
@ -159,7 +159,7 @@ pub struct CaptureVar {
mode: CaptureMode // How variable is being accessed
}
pub type CaptureMap = @RefCell<HashMap<NodeId, @[CaptureVar]>>;
pub type CaptureMap = @RefCell<HashMap<NodeId, Rc<~[CaptureVar]>>>;
pub type MovesMap = @RefCell<HashSet<NodeId>>;
@ -681,23 +681,22 @@ impl VisitContext {
self.consume_expr(arg_expr)
}
pub fn compute_captures(&mut self, fn_expr_id: NodeId) -> @[CaptureVar] {
pub fn compute_captures(&mut self, fn_expr_id: NodeId) -> Rc<~[CaptureVar]> {
debug!("compute_capture_vars(fn_expr_id={:?})", fn_expr_id);
let _indenter = indenter();
let fn_ty = ty::node_id_to_type(self.tcx, fn_expr_id);
let sigil = ty::ty_closure_sigil(fn_ty);
let freevars = freevars::get_freevars(self.tcx, fn_expr_id);
if sigil == BorrowedSigil {
let v = if sigil == BorrowedSigil {
// || captures everything by ref
at_vec::from_fn(freevars.len(), |i| {
let fvar = &freevars[i];
CaptureVar {def: fvar.def, span: fvar.span, mode: CapRef}
})
freevars.iter()
.map(|fvar| CaptureVar {def: fvar.def, span: fvar.span, mode: CapRef})
.collect()
} else {
// @fn() and ~fn() capture by copy or by move depending on type
at_vec::from_fn(freevars.len(), |i| {
let fvar = &freevars[i];
freevars.iter()
.map(|fvar| {
let fvar_def_id = ast_util::def_id_of_def(fvar.def).node;
let fvar_ty = ty::node_id_to_type(self.tcx, fvar_def_id);
debug!("fvar_def_id={:?} fvar_ty={}",
@ -708,7 +707,9 @@ impl VisitContext {
CapCopy
};
CaptureVar {def: fvar.def, span: fvar.span, mode:mode}
})
}
}).collect()
};
Rc::new(v)
}
}

View file

@ -530,8 +530,10 @@ impl<'a> PrivacyVisitor<'a> {
ast::ItemTrait(..) => "trait",
_ => return false,
};
let msg = format!("{} `{}` is private", desc,
token::ident_to_str(&item.ident));
let string = token::get_ident(item.ident.name);
let msg = format!("{} `{}` is private",
desc,
string.get());
self.tcx.sess.span_note(span, msg);
}
Some(..) | None => {}
@ -588,8 +590,10 @@ impl<'a> PrivacyVisitor<'a> {
if struct_vis != ast::Public && field.vis == ast::Public { break }
if !is_local(field.id) ||
!self.private_accessible(field.id.node) {
self.tcx.sess.span_err(span, format!("field `{}` is private",
token::ident_to_str(&ident)));
let string = token::get_ident(ident.name);
self.tcx.sess.span_err(span,
format!("field `{}` is private",
string.get()))
}
break;
}
@ -603,8 +607,11 @@ impl<'a> PrivacyVisitor<'a> {
let method_id = ty::method(self.tcx, method_id).provided_source
.unwrap_or(method_id);
self.ensure_public(span, method_id, None,
format!("method `{}`", token::ident_to_str(name)));
let string = token::get_ident(name.name);
self.ensure_public(span,
method_id,
None,
format!("method `{}`", string.get()));
}
// Checks that a path is in scope.
@ -617,10 +624,17 @@ impl<'a> PrivacyVisitor<'a> {
match *self.last_private_map.get(&path_id) {
resolve::AllPublic => {},
resolve::DependsOn(def) => {
let name = token::ident_to_str(&path.segments.last().unwrap()
.identifier);
self.ensure_public(span, def, Some(origdid),
format!("{} `{}`", tyname, name));
let name = token::get_ident(path.segments
.last()
.unwrap()
.identifier
.name);
self.ensure_public(span,
def,
Some(origdid),
format!("{} `{}`",
tyname,
name.get()));
}
}
};

View file

@ -20,9 +20,8 @@ use syntax::ast::*;
use syntax::ast;
use syntax::ast_util::{def_id_of_def, local_def, mtwt_resolve};
use syntax::ast_util::{path_to_ident, walk_pat, trait_method_to_ty_method};
use syntax::parse::token::{IdentInterner, special_idents};
use syntax::parse::token;
use syntax::parse::token::{IdentInterner, interner_get};
use syntax::parse::token::special_idents;
use syntax::print::pprust::path_to_str;
use syntax::codemap::{Span, DUMMY_SP, Pos};
use syntax::opt_vec::OptVec;
@ -53,7 +52,7 @@ pub type TraitMap = HashMap<NodeId,@RefCell<~[DefId]>>;
pub type ExportMap2 = @RefCell<HashMap<NodeId, ~[Export2]>>;
pub struct Export2 {
name: @str, // The name of the target.
name: ~str, // The name of the target.
def_id: DefId, // The definition of the target.
}
@ -1894,8 +1893,9 @@ impl Resolver {
csearch::each_child_of_item(self.session.cstore,
def_id,
|def_like, child_ident, visibility| {
let child_ident_string = token::get_ident(child_ident.name);
debug!("(populating external module) ... found ident: {}",
token::ident_to_str(&child_ident));
child_ident_string.get());
self.build_reduced_graph_for_external_crate_def(module,
def_like,
child_ident,
@ -2114,24 +2114,26 @@ impl Resolver {
}
fn import_directive_subclass_to_str(&mut self,
subclass: ImportDirectiveSubclass)
-> @str {
subclass: ImportDirectiveSubclass)
-> ~str {
match subclass {
SingleImport(_target, source) => self.session.str_of(source),
GlobImport => @"*"
SingleImport(_target, source) => {
self.session.str_of(source).to_str()
}
GlobImport => ~"*"
}
}
fn import_path_to_str(&mut self,
idents: &[Ident],
subclass: ImportDirectiveSubclass)
-> @str {
idents: &[Ident],
subclass: ImportDirectiveSubclass)
-> ~str {
if idents.is_empty() {
self.import_directive_subclass_to_str(subclass)
} else {
(format!("{}::{}",
self.idents_to_str(idents),
self.import_directive_subclass_to_str(subclass))).to_managed()
self.idents_to_str(idents),
self.import_directive_subclass_to_str(subclass)))
}
}
@ -2584,7 +2586,7 @@ impl Resolver {
debug!("(resolving glob import) writing resolution `{}` in `{}` \
to `{}`",
interner_get(name),
token::get_ident(name).get().to_str(),
self.module_to_str(containing_module),
self.module_to_str(module_));
@ -3101,11 +3103,12 @@ impl Resolver {
// top of the crate otherwise.
let mut containing_module;
let mut i;
if "self" == token::ident_to_str(&module_path[0]) {
let first_module_path_string = token::get_ident(module_path[0].name);
if "self" == first_module_path_string.get() {
containing_module =
self.get_nearest_normal_module_parent_or_self(module_);
i = 1;
} else if "super" == token::ident_to_str(&module_path[0]) {
} else if "super" == first_module_path_string.get() {
containing_module =
self.get_nearest_normal_module_parent_or_self(module_);
i = 0; // We'll handle `super` below.
@ -3114,8 +3117,11 @@ impl Resolver {
}
// Now loop through all the `super`s we find.
while i < module_path.len() &&
"super" == token::ident_to_str(&module_path[i]) {
while i < module_path.len() {
let string = token::get_ident(module_path[i].name);
if "super" != string.get() {
break
}
debug!("(resolving module prefix) resolving `super` at {}",
self.module_to_str(containing_module));
match self.get_nearest_normal_module_parent(containing_module) {
@ -3354,10 +3360,10 @@ impl Resolver {
match namebindings.def_for_namespace(ns) {
Some(d) => {
debug!("(computing exports) YES: export '{}' => {:?}",
interner_get(name),
token::get_ident(name).get().to_str(),
def_id_of_def(d));
exports2.push(Export2 {
name: interner_get(name),
name: token::get_ident(name).get().to_str(),
def_id: def_id_of_def(d)
});
}
@ -3380,7 +3386,7 @@ impl Resolver {
match importresolution.target_for_namespace(ns) {
Some(target) => {
debug!("(computing exports) maybe export '{}'",
interner_get(*name));
token::get_ident(*name).get().to_str());
self.add_exports_of_namebindings(exports2,
*name,
target.bindings,
@ -4155,19 +4161,23 @@ impl Resolver {
for (&key, &binding_0) in map_0.iter() {
match map_i.find(&key) {
None => {
let string = token::get_ident(key);
self.resolve_error(
p.span,
format!("variable `{}` from pattern \\#1 is \
not bound in pattern \\#{}",
interner_get(key), i + 1));
string.get(),
i + 1));
}
Some(binding_i) => {
if binding_0.binding_mode != binding_i.binding_mode {
let string = token::get_ident(key);
self.resolve_error(
binding_i.span,
format!("variable `{}` is bound with different \
mode in pattern \\#{} than in pattern \\#1",
interner_get(key), i + 1));
string.get(),
i + 1));
}
}
}
@ -4175,11 +4185,13 @@ impl Resolver {
for (&key, &binding) in map_i.iter() {
if !map_0.contains_key(&key) {
let string = token::get_ident(key);
self.resolve_error(
binding.span,
format!("variable `{}` from pattern \\#{} is \
not bound in pattern \\#1",
interner_get(key), i + 1));
string.get(),
i + 1));
}
}
}
@ -4371,9 +4383,10 @@ impl Resolver {
match self.resolve_bare_identifier_pattern(ident) {
FoundStructOrEnumVariant(def, lp)
if mode == RefutableMode => {
let string = token::get_ident(renamed);
debug!("(resolving pattern) resolving `{}` to \
struct or enum variant",
interner_get(renamed));
string.get());
self.enforce_default_binding_mode(
pattern,
@ -4382,17 +4395,19 @@ impl Resolver {
self.record_def(pattern.id, (def, lp));
}
FoundStructOrEnumVariant(..) => {
let string = token::get_ident(renamed);
self.resolve_error(pattern.span,
format!("declaration of `{}` \
shadows an enum \
variant or unit-like \
struct in scope",
interner_get(renamed)));
string.get()));
}
FoundConst(def, lp) if mode == RefutableMode => {
let string = token::get_ident(renamed);
debug!("(resolving pattern) resolving `{}` to \
constant",
interner_get(renamed));
string.get());
self.enforce_default_binding_mode(
pattern,
@ -4406,8 +4421,9 @@ impl Resolver {
allowed here");
}
BareIdentifierPatternUnresolved => {
let string = token::get_ident(renamed);
debug!("(resolving pattern) binding `{}`",
interner_get(renamed));
string.get());
let def = match mode {
RefutableMode => {
@ -5009,10 +5025,10 @@ impl Resolver {
}
fn find_best_match_for_name(&mut self, name: &str, max_distance: uint)
-> Option<@str> {
-> Option<~str> {
let this = &mut *self;
let mut maybes: ~[@str] = ~[];
let mut maybes: ~[~str] = ~[];
let mut values: ~[uint] = ~[];
let mut j = {
@ -5024,14 +5040,15 @@ impl Resolver {
let value_ribs = this.value_ribs.borrow();
let bindings = value_ribs.get()[j].bindings.borrow();
for (&k, _) in bindings.get().iter() {
maybes.push(interner_get(k));
let string = token::get_ident(k);
maybes.push(string.get().to_str());
values.push(uint::MAX);
}
}
let mut smallest = 0;
for (i, &other) in maybes.iter().enumerate() {
values[i] = name.lev_distance(other);
for (i, other) in maybes.iter().enumerate() {
values[i] = name.lev_distance(*other);
if values[i] <= values[smallest] {
smallest = i;
@ -5190,7 +5207,9 @@ impl Resolver {
self.resolve_error(expr.span,
format!("use of undeclared label \
`{}`",
interner_get(label))),
token::get_ident(label)
.get()
.to_str())),
Some(DlDef(def @ DefLabel(_))) => {
// FIXME: is AllPublic correct?
self.record_def(expr.id, (def, AllPublic))
@ -5510,7 +5529,7 @@ impl Resolver {
self.populate_module_if_necessary(module_);
let children = module_.children.borrow();
for (&name, _) in children.get().iter() {
debug!("* {}", interner_get(name));
debug!("* {}", token::get_ident(name).get().to_str());
}
debug!("Import resolutions:");
@ -5534,7 +5553,7 @@ impl Resolver {
}
}
debug!("* {}:{}{}", interner_get(*name),
debug!("* {}:{}{}", token::get_ident(*name).get().to_str(),
value_repr, type_repr);
}
}

View file

@ -13,8 +13,9 @@
use middle::ty;
use middle::ty_fold;
use middle::ty_fold::TypeFolder;
use std::rc::Rc;
use syntax::opt_vec::OptVec;
use std::at_vec;
///////////////////////////////////////////////////////////////////////////
// Public trait `Subst`
@ -84,10 +85,9 @@ impl<T:Subst> Subst for ~[T] {
self.map(|t| t.subst(tcx, substs))
}
}
impl<T:Subst> Subst for @[T] {
fn subst(&self, tcx: ty::ctxt, substs: &ty::substs) -> @[T] {
at_vec::map(*self, |t| t.subst(tcx, substs))
impl<T:Subst> Subst for Rc<T> {
fn subst(&self, tcx: ty::ctxt, substs: &ty::substs) -> Rc<T> {
Rc::new(self.borrow().subst(tcx, substs))
}
}

View file

@ -229,6 +229,7 @@ use syntax::ast::Ident;
use syntax::ast_util::path_to_ident;
use syntax::ast_util;
use syntax::codemap::{Span, DUMMY_SP};
use syntax::parse::token::InternedString;
// An option identifying a literal: either a unit-like struct or an
// expression.
@ -1031,7 +1032,6 @@ fn match_datum(bcx: &Block,
fn extract_vec_elems<'a>(
bcx: &'a Block<'a>,
pat_span: Span,
pat_id: ast::NodeId,
elem_count: uint,
slice: Option<uint>,
@ -1040,7 +1040,7 @@ fn extract_vec_elems<'a>(
-> ExtractedBlock<'a> {
let _icx = push_ctxt("match::extract_vec_elems");
let vec_datum = match_datum(bcx, val, pat_id);
let (bcx, base, len) = vec_datum.get_vec_base_and_len(bcx, pat_span, pat_id, 0);
let (base, len) = vec_datum.get_vec_base_and_len(bcx);
let vt = tvec::vec_types(bcx, node_id_type(bcx, pat_id));
let mut elems = vec::from_fn(elem_count, |i| {
@ -1174,7 +1174,7 @@ fn any_tuple_struct_pat(bcx: &Block, m: &[Match], col: uint) -> bool {
struct DynamicFailureHandler<'a> {
bcx: &'a Block<'a>,
sp: Span,
msg: @str,
msg: InternedString,
finished: @Cell<Option<BasicBlockRef>>,
}
@ -1187,7 +1187,7 @@ impl<'a> DynamicFailureHandler<'a> {
let fcx = self.bcx.fcx;
let fail_cx = fcx.new_block(false, "case_fallthrough", None);
controlflow::trans_fail(fail_cx, Some(self.sp), self.msg);
controlflow::trans_fail(fail_cx, Some(self.sp), self.msg.clone());
self.finished.set(Some(fail_cx.llbb));
fail_cx.llbb
}
@ -1511,13 +1511,11 @@ fn compile_submatch_continue<'r,
vals.slice(col + 1u, vals.len()));
let ccx = bcx.fcx.ccx;
let mut pat_id = 0;
let mut pat_span = DUMMY_SP;
for br in m.iter() {
// Find a real id (we're adding placeholder wildcard patterns, but
// each column is guaranteed to have at least one real pattern)
if pat_id == 0 {
pat_id = br.pats[col].id;
pat_span = br.pats[col].span;
}
}
@ -1766,7 +1764,7 @@ fn compile_submatch_continue<'r,
vec_len_ge(i) => (n + 1u, Some(i)),
vec_len_eq => (n, None)
};
let args = extract_vec_elems(opt_cx, pat_span, pat_id, n,
let args = extract_vec_elems(opt_cx, pat_id, n,
slice, val, test_val);
size = args.vals.len();
unpacked = args.vals.clone();
@ -1891,7 +1889,8 @@ fn trans_match_inner<'a>(scope_cx: &'a Block<'a>,
let fail_handler = ~DynamicFailureHandler {
bcx: scope_cx,
sp: discr_expr.span,
msg: @"scrutinizing value that can't exist",
msg: InternedString::new("scrutinizing value that can't \
exist"),
finished: fail_cx,
};
DynamicFailureHandlerClass(fail_handler)

View file

@ -38,8 +38,8 @@ pub fn trans_inline_asm<'a>(bcx: &'a Block<'a>, ia: &ast::InlineAsm)
let temp_scope = fcx.push_custom_cleanup_scope();
// Prepare the output operands
let outputs = ia.outputs.map(|&(c, out)| {
constraints.push(c);
let outputs = ia.outputs.map(|&(ref c, out)| {
constraints.push((*c).clone());
let out_datum = unpack_datum!(bcx, expr::trans(bcx, out));
output_types.push(type_of::type_of(bcx.ccx(), out_datum.ty));
@ -48,8 +48,8 @@ pub fn trans_inline_asm<'a>(bcx: &'a Block<'a>, ia: &ast::InlineAsm)
});
// Now the input operands
let inputs = ia.inputs.map(|&(c, input)| {
constraints.push(c);
let inputs = ia.inputs.map(|&(ref c, input)| {
constraints.push((*c).clone());
unpack_result!(bcx, {
callee::trans_arg_expr(bcx,
@ -63,13 +63,13 @@ pub fn trans_inline_asm<'a>(bcx: &'a Block<'a>, ia: &ast::InlineAsm)
// no failure occurred preparing operands, no need to cleanup
fcx.pop_custom_cleanup_scope(temp_scope);
let mut constraints = constraints.connect(",");
let mut constraints = constraints.map(|s| s.get().to_str()).connect(",");
let mut clobbers = getClobbers();
if !ia.clobbers.is_empty() && !clobbers.is_empty() {
clobbers = format!("{},{}", ia.clobbers, clobbers);
if !ia.clobbers.get().is_empty() && !clobbers.is_empty() {
clobbers = format!("{},{}", ia.clobbers.get(), clobbers);
} else {
clobbers.push_str(ia.clobbers);
clobbers.push_str(ia.clobbers.get());
}
// Add the clobbers to our constraints list
@ -98,7 +98,7 @@ pub fn trans_inline_asm<'a>(bcx: &'a Block<'a>, ia: &ast::InlineAsm)
ast::AsmIntel => lib::llvm::AD_Intel
};
let r = ia.asm.with_c_str(|a| {
let r = ia.asm.get().with_c_str(|a| {
constraints.with_c_str(|c| {
InlineAsmCall(bcx, a, c, inputs, output_type, ia.volatile, ia.alignstack, dialect)
})

View file

@ -76,16 +76,17 @@ use std::hashmap::HashMap;
use std::libc::c_uint;
use std::vec;
use std::local_data;
use syntax::abi::{X86, X86_64, Arm, Mips, Rust, RustIntrinsic, OsWin32};
use syntax::ast_map::{PathName, PathPrettyName, path_elem_to_str};
use syntax::ast_util::{local_def, is_local};
use syntax::attr::AttrMetaMethods;
use syntax::attr;
use syntax::codemap::Span;
use syntax::parse::token::InternedString;
use syntax::parse::token;
use syntax::{ast, ast_util, ast_map};
use syntax::attr::AttrMetaMethods;
use syntax::abi::{X86, X86_64, Arm, Mips, Rust, RustIntrinsic, OsWin32};
use syntax::visit;
use syntax::visit::Visitor;
use syntax::visit;
use syntax::{ast, ast_util, ast_map};
pub use middle::trans::context::task_llcx;
@ -359,7 +360,7 @@ pub fn malloc_raw_dyn<'a>(
None);
rslt(r.bcx, PointerCast(r.bcx, r.val, llty_value.ptr_to()))
} else {
// we treat ~fn, @fn and @[] as @ here, which isn't ideal
// we treat ~fn as @ here, which isn't ideal
let langcall = match heap {
heap_managed => {
require_alloc_fn(bcx, t, MallocFnLangItem)
@ -509,7 +510,7 @@ pub fn set_no_split_stack(f: ValueRef) {
// Double-check that we never ask LLVM to declare the same symbol twice. It
// silently mangles such symbols, breaking our linkage model.
pub fn note_unique_llvm_symbol(ccx: &CrateContext, sym: @str) {
pub fn note_unique_llvm_symbol(ccx: &CrateContext, sym: ~str) {
let mut all_llvm_symbols = ccx.all_llvm_symbols.borrow_mut();
if all_llvm_symbols.get().contains(&sym) {
ccx.sess.bug(~"duplicate LLVM symbol: " + sym);
@ -604,7 +605,8 @@ pub fn compare_scalar_types<'a>(
rslt(
controlflow::trans_fail(
cx, None,
@"attempt to compare values of type type"),
InternedString::new("attempt to compare values of type \
type")),
C_nil())
}
_ => {
@ -856,9 +858,9 @@ pub fn fail_if_zero<'a>(
rhs_t: ty::t)
-> &'a Block<'a> {
let text = if divrem == ast::BiDiv {
@"attempted to divide by zero"
"attempted to divide by zero"
} else {
@"attempted remainder with a divisor of zero"
"attempted remainder with a divisor of zero"
};
let is_zero = match ty::get(rhs_t).sty {
ty::ty_int(t) => {
@ -875,7 +877,7 @@ pub fn fail_if_zero<'a>(
}
};
with_cond(cx, is_zero, |bcx| {
controlflow::trans_fail(bcx, Some(span), text)
controlflow::trans_fail(bcx, Some(span), InternedString::new(text))
})
}
@ -1951,7 +1953,7 @@ fn exported_name(ccx: &CrateContext, path: ast_map::Path,
ty: ty::t, attrs: &[ast::Attribute]) -> ~str {
match attr::first_attr_value_str_by_name(attrs, "export_name") {
// Use provided name
Some(name) => name.to_owned(),
Some(name) => name.get().to_owned(),
// Don't mangle
_ if attr::contains_name(attrs, "no_mangle")
@ -2099,7 +2101,7 @@ pub fn get_item_val(ccx: @CrateContext, id: ast::NodeId) -> ValueRef {
match attr::first_attr_value_str_by_name(i.attrs, "link_section") {
Some(sect) => unsafe {
sect.with_c_str(|buf| {
sect.get().with_c_str(|buf| {
llvm::LLVMSetSection(v, buf);
})
},
@ -2161,9 +2163,9 @@ pub fn get_item_val(ccx: @CrateContext, id: ast::NodeId) -> ValueRef {
ccx.crate_map
}
} else {
let ident = foreign::link_name(ccx, ni);
let ident = foreign::link_name(ni);
unsafe {
ident.with_c_str(|buf| {
ident.get().with_c_str(|buf| {
let ty = type_of(ccx, ty);
llvm::LLVMAddGlobal(ccx.llmod,
ty.to_ref(), buf)
@ -2476,21 +2478,21 @@ pub fn create_module_map(ccx: &CrateContext) -> (ValueRef, uint) {
let mut keys = ~[];
let module_data = ccx.module_data.borrow();
for (k, _) in module_data.get().iter() {
keys.push(k.to_managed());
keys.push(k.clone());
}
keys
};
for key in keys.iter() {
let llstrval = C_str_slice(ccx, *key);
let module_data = ccx.module_data.borrow();
let val = *module_data.get().find_equiv(key).unwrap();
let v_ptr = p2i(ccx, val);
let elt = C_struct([
llstrval,
v_ptr
], false);
elts.push(elt);
let llstrval = C_str_slice(ccx, token::intern_and_get_ident(*key));
let module_data = ccx.module_data.borrow();
let val = *module_data.get().find_equiv(key).unwrap();
let v_ptr = p2i(ccx, val);
let elt = C_struct([
llstrval,
v_ptr
], false);
elts.push(elt);
}
unsafe {
llvm::LLVMSetInitializer(map, C_array(elttype, elts));

View file

@ -220,7 +220,7 @@ fn resolve_default_method_vtables(bcx: &Block,
bcx.tcx(), param_substs, impl_res.trait_vtables);
// Now we pull any vtables for parameters on the actual method.
let num_method_vtables = method.generics.type_param_defs.len();
let num_method_vtables = method.generics.type_param_defs().len();
let method_vtables = match impl_vtables {
Some(vtables) => {
let num_impl_type_parameters =

View file

@ -403,11 +403,11 @@ pub fn trans_expr_fn<'a>(
let capture_map = ccx.maps.capture_map.borrow();
capture_map.get().get_copy(&user_id)
};
let ClosureResult {llbox, cdata_ty, bcx} = build_closure(bcx, cap_vars, sigil);
let ClosureResult {llbox, cdata_ty, bcx} = build_closure(bcx, *cap_vars.borrow(), sigil);
trans_closure(ccx, sub_path, decl, body, llfn,
bcx.fcx.param_substs, user_id,
[], ty::ty_fn_ret(fty),
|bcx| load_environment(bcx, cdata_ty, cap_vars, sigil));
|bcx| load_environment(bcx, cdata_ty, *cap_vars.borrow(), sigil));
fill_fn_pair(bcx, dest_addr, llfn, llbox);
bcx

View file

@ -30,7 +30,6 @@ use middle::ty;
use middle::typeck;
use util::ppaux::Repr;
use arena::TypedArena;
use std::c_str::ToCStr;
use std::cast::transmute;
@ -41,6 +40,7 @@ use std::libc::{c_uint, c_longlong, c_ulonglong, c_char};
use syntax::ast::{Ident};
use syntax::ast_map::{Path, PathElem, PathPrettyName};
use syntax::codemap::Span;
use syntax::parse::token::InternedString;
use syntax::parse::token;
use syntax::{ast, ast_map};
@ -446,8 +446,9 @@ impl<'a> Block<'a> {
}
pub fn sess(&self) -> Session { self.fcx.ccx.sess }
pub fn ident(&self, ident: Ident) -> @str {
token::ident_to_str(&ident)
pub fn ident(&self, ident: Ident) -> ~str {
let string = token::get_ident(ident.name);
string.get().to_str()
}
pub fn node_id_to_str(&self, id: ast::NodeId) -> ~str {
@ -597,18 +598,19 @@ pub fn C_u8(i: uint) -> ValueRef {
// This is a 'c-like' raw string, which differs from
// our boxed-and-length-annotated strings.
pub fn C_cstr(cx: &CrateContext, s: @str) -> ValueRef {
pub fn C_cstr(cx: &CrateContext, s: InternedString) -> ValueRef {
unsafe {
{
let const_cstr_cache = cx.const_cstr_cache.borrow();
match const_cstr_cache.get().find_equiv(&s) {
match const_cstr_cache.get().find(&s) {
Some(&llval) => return llval,
None => ()
}
}
let sc = llvm::LLVMConstStringInContext(cx.llcx,
s.as_ptr() as *c_char, s.len() as c_uint,
s.get().as_ptr() as *c_char,
s.get().len() as c_uint,
False);
let gsym = token::gensym("str");
@ -627,9 +629,9 @@ pub fn C_cstr(cx: &CrateContext, s: @str) -> ValueRef {
// NB: Do not use `do_spill_noroot` to make this into a constant string, or
// you will be kicked off fast isel. See issue #4352 for an example of this.
pub fn C_str_slice(cx: &CrateContext, s: @str) -> ValueRef {
pub fn C_str_slice(cx: &CrateContext, s: InternedString) -> ValueRef {
unsafe {
let len = s.len();
let len = s.get().len();
let cs = llvm::LLVMConstPointerCast(C_cstr(cx, s), Type::i8p().to_ref());
C_struct([cs, C_uint(cx, len)], false)
}
@ -766,7 +768,6 @@ pub fn mono_data_classify(t: ty::t) -> MonoDataClass {
ty::ty_float(_) => MonoFloat,
ty::ty_rptr(..) | ty::ty_uniq(..) | ty::ty_box(..) |
ty::ty_str(ty::vstore_uniq) | ty::ty_vec(_, ty::vstore_uniq) |
ty::ty_str(ty::vstore_box) | ty::ty_vec(_, ty::vstore_box) |
ty::ty_bare_fn(..) => MonoNonNull,
// Is that everything? Would closures or slices qualify?
_ => MonoBits
@ -970,7 +971,8 @@ pub fn dummy_substs(tps: ~[ty::t]) -> ty::substs {
pub fn filename_and_line_num_from_span(bcx: &Block, span: Span)
-> (ValueRef, ValueRef) {
let loc = bcx.sess().parse_sess.cm.lookup_char_pos(span.lo);
let filename_cstr = C_cstr(bcx.ccx(), loc.file.name);
let filename_cstr = C_cstr(bcx.ccx(),
token::intern_and_get_ident(loc.file.name));
let filename = build::PointerCast(bcx, filename_cstr, Type::i8p());
let line = C_int(bcx.ccx(), loc.line as int);
(filename, line)

View file

@ -57,12 +57,14 @@ pub fn const_lit(cx: &CrateContext, e: &ast::Expr, lit: ast::Lit)
ty_to_str(cx.tcx, lit_int_ty)))
}
}
ast::LitFloat(fs, t) => C_floating(fs, Type::float_from_ty(t)),
ast::LitFloatUnsuffixed(fs) => {
ast::LitFloat(ref fs, t) => {
C_floating(fs.get(), Type::float_from_ty(t))
}
ast::LitFloatUnsuffixed(ref fs) => {
let lit_float_ty = ty::node_id_to_type(cx.tcx, e.id);
match ty::get(lit_float_ty).sty {
ty::ty_float(t) => {
C_floating(fs, Type::float_from_ty(t))
C_floating(fs.get(), Type::float_from_ty(t))
}
_ => {
cx.sess.span_bug(lit.span,
@ -72,8 +74,8 @@ pub fn const_lit(cx: &CrateContext, e: &ast::Expr, lit: ast::Lit)
}
ast::LitBool(b) => C_bool(b),
ast::LitNil => C_nil(),
ast::LitStr(s, _) => C_str_slice(cx, s),
ast::LitBinary(data) => C_binary_slice(cx, data),
ast::LitStr(ref s, _) => C_str_slice(cx, (*s).clone()),
ast::LitBinary(ref data) => C_binary_slice(cx, *data.borrow()),
}
}
@ -312,7 +314,9 @@ fn const_expr_unadjusted(cx: @CrateContext, e: &ast::Expr,
unsafe {
let _icx = push_ctxt("const_expr");
return match e.node {
ast::ExprLit(lit) => (consts::const_lit(cx, e, *lit), true),
ast::ExprLit(lit) => {
(consts::const_lit(cx, e, (*lit).clone()), true)
}
ast::ExprBinary(_, b, e1, e2) => {
let (te1, _) = const_expr(cx, e1, is_local);
let (te2, _) = const_expr(cx, e2, is_local);

View file

@ -19,12 +19,12 @@ use middle::resolve;
use middle::trans::adt;
use middle::trans::base;
use middle::trans::builder::Builder;
use middle::trans::debuginfo;
use middle::trans::common::{C_i32, C_null};
use middle::ty;
use middle::trans::common::{mono_id,ExternMap,tydesc_info,BuilderRef_res,Stats};
use middle::trans::base::{decl_crate_map};
use middle::trans::debuginfo;
use middle::trans::type_::Type;
use middle::ty;
use util::sha2::Sha256;
use std::cell::{Cell, RefCell};
@ -33,10 +33,7 @@ use std::hashmap::{HashMap, HashSet};
use std::local_data;
use std::libc::c_uint;
use syntax::ast;
use middle::trans::common::{mono_id,ExternMap,tydesc_info,BuilderRef_res,Stats};
use middle::trans::base::{decl_crate_map};
use syntax::parse::token::InternedString;
pub struct CrateContext {
sess: session::Session,
@ -71,7 +68,7 @@ pub struct CrateContext {
// Cache generated vtables
vtables: RefCell<HashMap<(ty::t, mono_id), ValueRef>>,
// Cache of constant strings,
const_cstr_cache: RefCell<HashMap<@str, ValueRef>>,
const_cstr_cache: RefCell<HashMap<InternedString, ValueRef>>,
// Reverse-direction for const ptrs cast from globals.
// Key is an int, cast from a ValueRef holding a *T,
@ -99,8 +96,8 @@ pub struct CrateContext {
llsizingtypes: RefCell<HashMap<ty::t, Type>>,
adt_reprs: RefCell<HashMap<ty::t, @adt::Repr>>,
symbol_hasher: RefCell<Sha256>,
type_hashcodes: RefCell<HashMap<ty::t, @str>>,
all_llvm_symbols: RefCell<HashSet<@str>>,
type_hashcodes: RefCell<HashMap<ty::t, ~str>>,
all_llvm_symbols: RefCell<HashSet<~str>>,
tcx: ty::ctxt,
maps: astencode::Maps,
stats: @Stats,

View file

@ -28,6 +28,8 @@ use syntax::ast;
use syntax::ast::Name;
use syntax::ast_util;
use syntax::codemap::Span;
use syntax::parse::token::InternedString;
use syntax::parse::token;
use syntax::visit::Visitor;
pub fn trans_stmt<'a>(cx: &'a Block<'a>,
@ -332,7 +334,7 @@ pub fn trans_fail_expr<'a>(
unpack_datum!(bcx, expr::trans_to_lvalue(bcx, arg_expr, "fail"));
if ty::type_is_str(arg_datum.ty) {
let (lldata, _) = arg_datum.get_vec_base_and_len_no_root(bcx);
let (lldata, _) = arg_datum.get_vec_base_and_len(bcx);
return trans_fail_value(bcx, sp_opt, lldata);
} else if bcx.unreachable.get() || ty::type_is_bot(arg_datum.ty) {
return bcx;
@ -342,14 +344,14 @@ pub fn trans_fail_expr<'a>(
ppaux::ty_to_str(tcx, arg_datum.ty));
}
}
_ => trans_fail(bcx, sp_opt, @"explicit failure")
_ => trans_fail(bcx, sp_opt, InternedString::new("explicit failure"))
}
}
pub fn trans_fail<'a>(
bcx: &'a Block<'a>,
sp_opt: Option<Span>,
fail_str: @str)
fail_str: InternedString)
-> &'a Block<'a> {
let _icx = push_ctxt("trans_fail");
let V_fail_str = C_cstr(bcx.ccx(), fail_str);
@ -367,11 +369,11 @@ fn trans_fail_value<'a>(
Some(sp) => {
let sess = bcx.sess();
let loc = sess.parse_sess.cm.lookup_char_pos(sp.lo);
(C_cstr(bcx.ccx(), loc.file.name),
(C_cstr(bcx.ccx(), token::intern_and_get_ident(loc.file.name)),
loc.line as int)
}
None => {
(C_cstr(bcx.ccx(), @"<runtime>"), 0)
(C_cstr(bcx.ccx(), InternedString::new("<runtime>")), 0)
}
};
let V_str = PointerCast(bcx, V_fail_str, Type::i8p());

View file

@ -528,49 +528,8 @@ impl Datum<Lvalue> {
}
}
pub fn get_vec_base_and_byte_len<'a>(
&self,
mut bcx: &'a Block<'a>,
span: Span,
expr_id: ast::NodeId,
derefs: uint)
-> (&'a Block<'a>, ValueRef, ValueRef) {
//! Converts a vector into the slice pair. Performs rooting
//! and write guards checks.
// only imp't for @[] and @str, but harmless
bcx = write_guard::root_and_write_guard(self, bcx, span, expr_id, derefs);
let (base, len) = self.get_vec_base_and_byte_len_no_root(bcx);
(bcx, base, len)
}
pub fn get_vec_base_and_byte_len_no_root(&self, bcx: &Block)
-> (ValueRef, ValueRef) {
//! Converts a vector into the slice pair. Des not root
//! nor perform write guard checks.
tvec::get_base_and_byte_len(bcx, self.val, self.ty)
}
pub fn get_vec_base_and_len<'a>(&self,
mut bcx: &'a Block<'a>,
span: Span,
expr_id: ast::NodeId,
derefs: uint)
-> (&'a Block<'a>, ValueRef, ValueRef) {
//! Converts a vector into the slice pair. Performs rooting
//! and write guards checks.
// only imp't for @[] and @str, but harmless
bcx = write_guard::root_and_write_guard(self, bcx, span, expr_id, derefs);
let (base, len) = self.get_vec_base_and_len_no_root(bcx);
(bcx, base, len)
}
pub fn get_vec_base_and_len_no_root<'a>(&self, bcx: &'a Block<'a>)
-> (ValueRef, ValueRef) {
//! Converts a vector into the slice pair. Des not root
//! nor perform write guard checks.
pub fn get_vec_base_and_len<'a>(&self, bcx: &'a Block<'a>) -> (ValueRef, ValueRef) {
//! Converts a vector into the slice pair.
tvec::get_base_and_len(bcx, self.val, self.ty)
}

View file

@ -622,7 +622,8 @@ pub fn create_function_debug_context(cx: &CrateContext,
};
// get_template_parameters() will append a `<...>` clause to the function name if necessary.
let mut function_name = token::ident_to_str(&ident).to_owned();
let function_name_string = token::get_ident(ident.name);
let mut function_name = function_name_string.get().to_owned();
let template_parameters = get_template_parameters(cx,
generics,
param_substs,
@ -791,7 +792,9 @@ pub fn create_function_debug_context(cx: &CrateContext,
let ident = special_idents::type_self;
let param_metadata = token::ident_to_str(&ident).with_c_str(|name| {
let param_metadata_string = token::get_ident(ident.name);
let param_metadata = param_metadata_string.get()
.with_c_str(|name| {
unsafe {
llvm::LLVMDIBuilderCreateTemplateTypeParameter(
DIB(cx),
@ -829,7 +832,9 @@ pub fn create_function_debug_context(cx: &CrateContext,
// Again, only create type information if extra_debuginfo is enabled
if cx.sess.opts.extra_debuginfo {
let actual_type_metadata = type_metadata(cx, actual_type, codemap::DUMMY_SP);
let param_metadata = token::ident_to_str(&ident).with_c_str(|name| {
let param_metadata_string = token::get_ident(ident.name);
let param_metadata = param_metadata_string.get()
.with_c_str(|name| {
unsafe {
llvm::LLVMDIBuilderCreateTemplateTypeParameter(
DIB(cx),
@ -931,10 +936,11 @@ fn declare_local(bcx: &Block,
span: Span) {
let cx: &CrateContext = bcx.ccx();
let filename = span_start(cx, span).file.name;
let filename = span_start(cx, span).file.name.clone();
let file_metadata = file_metadata(cx, filename);
let name: &str = token::ident_to_str(&variable_ident);
let variable_ident_string = token::get_ident(variable_ident.name);
let name: &str = variable_ident_string.get();
let loc = span_start(cx, span);
let type_metadata = type_metadata(cx, variable_type, span);
@ -1139,9 +1145,10 @@ impl MemberDescriptionFactory for StructMemberDescriptionFactory {
-> ~[MemberDescription] {
self.fields.map(|field| {
let name = if field.ident.name == special_idents::unnamed_field.name {
@""
~""
} else {
token::ident_to_str(&field.ident)
let string = token::get_ident(field.ident.name);
string.get().to_str()
};
MemberDescription {
@ -1165,7 +1172,7 @@ fn prepare_struct_metadata(cx: &CrateContext,
let (containing_scope, definition_span) = get_namespace_and_span_for_item(cx, def_id, span);
let file_name = span_start(cx, definition_span).file.name;
let file_name = span_start(cx, definition_span).file.name.clone();
let file_metadata = file_metadata(cx, file_name);
let struct_metadata_stub = create_struct_stub(cx,
@ -1244,7 +1251,7 @@ impl MemberDescriptionFactory for TupleMemberDescriptionFactory {
-> ~[MemberDescription] {
self.component_types.map(|&component_type| {
MemberDescription {
name: @"",
name: ~"",
llvm_type: type_of::type_of(cx, component_type),
type_metadata: type_metadata(cx, component_type, self.span),
offset: ComputedMemberOffset,
@ -1322,7 +1329,7 @@ impl MemberDescriptionFactory for GeneralMemberDescriptionFactory {
self.file_metadata,
codemap::DUMMY_SP);
MemberDescription {
name: @"",
name: ~"",
llvm_type: variant_llvm_type,
type_metadata: variant_type_metadata,
offset: FixedMemberOffset { bytes: 0 },
@ -1332,7 +1339,7 @@ impl MemberDescriptionFactory for GeneralMemberDescriptionFactory {
}
struct EnumVariantMemberDescriptionFactory {
args: ~[(@str, ty::t)],
args: ~[(~str, ty::t)],
discriminant_type_metadata: Option<DIType>,
span: Span,
}
@ -1340,9 +1347,9 @@ struct EnumVariantMemberDescriptionFactory {
impl MemberDescriptionFactory for EnumVariantMemberDescriptionFactory {
fn create_member_descriptions(&self, cx: &CrateContext)
-> ~[MemberDescription] {
self.args.iter().enumerate().map(|(i, &(name, ty))| {
self.args.iter().enumerate().map(|(i, &(ref name, ty))| {
MemberDescription {
name: name,
name: name.to_str(),
llvm_type: type_of::type_of(cx, ty),
type_metadata: match self.discriminant_type_metadata {
Some(metadata) if i == 0 => metadata,
@ -1362,7 +1369,8 @@ fn describe_enum_variant(cx: &CrateContext,
file_metadata: DIFile,
span: Span)
-> (DICompositeType, Type, @MemberDescriptionFactory) {
let variant_name = token::ident_to_str(&variant_info.name);
let variant_info_string = token::get_ident(variant_info.name.name);
let variant_name = variant_info_string.get();
let variant_llvm_type = Type::struct_(struct_def.fields.map(|&t| type_of::type_of(cx, t)),
struct_def.packed);
// Could some consistency checks here: size, align, field count, discr type
@ -1395,19 +1403,24 @@ fn describe_enum_variant(cx: &CrateContext,
// Get the argument names from the enum variant info
let mut arg_names = match variant_info.arg_names {
Some(ref names) => names.map(|ident| token::ident_to_str(ident)),
None => variant_info.args.map(|_| @"")
Some(ref names) => {
names.map(|ident| {
let string = token::get_ident(ident.name);
string.get().to_str()
})
}
None => variant_info.args.map(|_| ~"")
};
// If this is not a univariant enum, there is also the (unnamed) discriminant field
if discriminant_type_metadata.is_some() {
arg_names.insert(0, @"");
arg_names.insert(0, ~"");
}
// Build an array of (field name, field type) pairs to be captured in the factory closure.
let args: ~[(@str, ty::t)] = arg_names.iter()
let args: ~[(~str, ty::t)] = arg_names.iter()
.zip(struct_def.fields.iter())
.map(|(&s, &t)| (s, t))
.map(|(s, &t)| (s.to_str(), t))
.collect();
let member_description_factory =
@ -1452,7 +1465,8 @@ fn prepare_enum_metadata(cx: &CrateContext,
let enumerators_metadata: ~[DIDescriptor] = variants
.iter()
.map(|v| {
let name: &str = token::ident_to_str(&v.name);
let string = token::get_ident(v.name.name);
let name: &str = string.get();
let discriminant_value = v.disr_val as c_ulonglong;
name.with_c_str(|name| {
@ -1580,7 +1594,7 @@ enum MemberOffset {
}
struct MemberDescription {
name: @str,
name: ~str,
llvm_type: Type,
type_metadata: DIType,
offset: MemberOffset,
@ -1737,31 +1751,31 @@ fn boxed_type_metadata(cx: &CrateContext,
let member_descriptions = [
MemberDescription {
name: @"refcnt",
name: ~"refcnt",
llvm_type: member_llvm_types[0],
type_metadata: type_metadata(cx, int_type, codemap::DUMMY_SP),
offset: ComputedMemberOffset,
},
MemberDescription {
name: @"tydesc",
name: ~"tydesc",
llvm_type: member_llvm_types[1],
type_metadata: nil_pointer_type_metadata,
offset: ComputedMemberOffset,
},
MemberDescription {
name: @"prev",
name: ~"prev",
llvm_type: member_llvm_types[2],
type_metadata: nil_pointer_type_metadata,
offset: ComputedMemberOffset,
},
MemberDescription {
name: @"next",
name: ~"next",
llvm_type: member_llvm_types[3],
type_metadata: nil_pointer_type_metadata,
offset: ComputedMemberOffset,
},
MemberDescription {
name: @"val",
name: ~"val",
llvm_type: member_llvm_types[4],
type_metadata: content_type_metadata,
offset: ComputedMemberOffset,
@ -1848,19 +1862,19 @@ fn vec_metadata(cx: &CrateContext,
let member_descriptions = [
MemberDescription {
name: @"fill",
name: ~"fill",
llvm_type: member_llvm_types[0],
type_metadata: int_type_metadata,
offset: ComputedMemberOffset,
},
MemberDescription {
name: @"alloc",
name: ~"alloc",
llvm_type: member_llvm_types[1],
type_metadata: int_type_metadata,
offset: ComputedMemberOffset,
},
MemberDescription {
name: @"elements",
name: ~"elements",
llvm_type: member_llvm_types[2],
type_metadata: array_type_metadata,
offset: ComputedMemberOffset,
@ -1882,23 +1896,6 @@ fn vec_metadata(cx: &CrateContext,
span);
}
fn boxed_vec_metadata(cx: &CrateContext,
element_type: ty::t,
span: Span)
-> DICompositeType {
let element_llvm_type = type_of::type_of(cx, element_type);
let vec_llvm_type = Type::vec(cx.sess.targ_cfg.arch, &element_llvm_type);
let vec_type_name: &str = format!("[{}]", ppaux::ty_to_str(cx.tcx, element_type));
let vec_metadata = vec_metadata(cx, element_type, span);
return boxed_type_metadata(
cx,
Some(vec_type_name),
vec_llvm_type,
vec_metadata,
span);
}
fn vec_slice_metadata(cx: &CrateContext,
vec_type: ty::t,
element_type: ty::t,
@ -1917,13 +1914,13 @@ fn vec_slice_metadata(cx: &CrateContext,
let member_descriptions = [
MemberDescription {
name: @"data_ptr",
name: ~"data_ptr",
llvm_type: member_llvm_types[0],
type_metadata: type_metadata(cx, data_ptr_type, span),
offset: ComputedMemberOffset,
},
MemberDescription {
name: @"length",
name: ~"length",
llvm_type: member_llvm_types[1],
type_metadata: type_metadata(cx, ty::mk_uint(), span),
offset: ComputedMemberOffset,
@ -1996,9 +1993,10 @@ fn trait_metadata(cx: &CrateContext,
// the trait's methods.
let path = ty::item_path(cx.tcx, def_id);
let ident = path.last().unwrap().ident();
let ident_string = token::get_ident(ident.name);
let name = ppaux::trait_store_to_str(cx.tcx, trait_store) +
ppaux::mutability_to_str(mutability) +
token::ident_to_str(&ident);
ident_string.get();
// Add type and region parameters
let name = ppaux::parameterized(cx.tcx, name, &substs.regions,
substs.tps, def_id, true);
@ -2006,7 +2004,7 @@ fn trait_metadata(cx: &CrateContext,
let (containing_scope, definition_span) =
get_namespace_and_span_for_item(cx, def_id, usage_site_span);
let file_name = span_start(cx, definition_span).file.name;
let file_name = span_start(cx, definition_span).file.name.clone();
let file_metadata = file_metadata(cx, file_name);
let trait_llvm_type = type_of::type_of(cx, trait_type);
@ -2078,10 +2076,6 @@ fn type_metadata(cx: &CrateContext,
let vec_metadata = vec_metadata(cx, i8_t, usage_site_span);
pointer_type_metadata(cx, t, vec_metadata)
}
ty::vstore_box => {
let boxed_vec_metadata = boxed_vec_metadata(cx, i8_t, usage_site_span);
pointer_type_metadata(cx, t, boxed_vec_metadata)
}
ty::vstore_slice(_region) => {
vec_slice_metadata(cx, t, i8_t, usage_site_span)
}
@ -2102,10 +2096,6 @@ fn type_metadata(cx: &CrateContext,
let vec_metadata = vec_metadata(cx, mt.ty, usage_site_span);
pointer_type_metadata(cx, t, vec_metadata)
}
ty::vstore_box => {
let boxed_vec_metadata = boxed_vec_metadata(cx, mt.ty, usage_site_span);
pointer_type_metadata(cx, t, boxed_vec_metadata)
}
ty::vstore_slice(_) => {
vec_slice_metadata(cx, t, mt.ty, usage_site_span)
}
@ -2714,7 +2704,7 @@ fn populate_scope_map(cx: &CrateContext,
ast::ExprInlineAsm(ast::InlineAsm { inputs: ref inputs,
outputs: ref outputs,
.. }) => {
// inputs, outputs: ~[(@str, @expr)]
// inputs, outputs: ~[(~str, @expr)]
for &(_, exp) in inputs.iter() {
walk_expr(cx, exp, scope_stack, scope_map);
}
@ -2755,8 +2745,10 @@ impl NamespaceTreeNode {
}
None => {}
}
let name = token::ident_to_str(&node.ident);
output.push_str(format!("{}{}", name.len(), name));
let string = token::get_ident(node.ident.name);
output.push_str(format!("{}{}",
string.get().len(),
string.get()));
}
}
}
@ -2807,7 +2799,8 @@ fn namespace_for_item(cx: &CrateContext,
Some(node) => node.scope,
None => ptr::null()
};
let namespace_name = token::ident_to_str(&ident);
let namespace_name_string = token::get_ident(ident.name);
let namespace_name = namespace_name_string.get();
let namespace_metadata = unsafe {
namespace_name.with_c_str(|namespace_name| {

View file

@ -202,12 +202,10 @@ fn apply_adjustments<'a>(bcx: &'a Block<'a>,
unpack_datum!(bcx, auto_ref(bcx, datum, expr))
}
Some(AutoBorrowVec(..)) => {
unpack_datum!(bcx, auto_slice(bcx, adj.autoderefs,
expr, datum))
unpack_datum!(bcx, auto_slice(bcx, expr, datum))
}
Some(AutoBorrowVecRef(..)) => {
unpack_datum!(bcx, auto_slice_and_ref(bcx, adj.autoderefs,
expr, datum))
unpack_datum!(bcx, auto_slice_and_ref(bcx, expr, datum))
}
Some(AutoBorrowFn(..)) => {
let adjusted_ty = ty::adjust_ty(bcx.tcx(), expr.span,
@ -271,7 +269,6 @@ fn apply_adjustments<'a>(bcx: &'a Block<'a>,
fn auto_slice<'a>(
bcx: &'a Block<'a>,
autoderefs: uint,
expr: &ast::Expr,
datum: Datum<Expr>)
-> DatumBlock<'a, Expr> {
@ -290,8 +287,7 @@ fn apply_adjustments<'a>(bcx: &'a Block<'a>,
let datum = unpack_datum!(
bcx, datum.to_lvalue_datum(bcx, "auto_slice", expr.id));
let (bcx, base, len) =
datum.get_vec_base_and_len(bcx, expr.span, expr.id, autoderefs+1);
let (base, len) = datum.get_vec_base_and_len(bcx);
// this type may have a different region/mutability than the
// real one, but it will have the same runtime representation
@ -323,11 +319,10 @@ fn apply_adjustments<'a>(bcx: &'a Block<'a>,
fn auto_slice_and_ref<'a>(
bcx: &'a Block<'a>,
autoderefs: uint,
expr: &ast::Expr,
datum: Datum<Expr>)
-> DatumBlock<'a, Expr> {
let DatumBlock { bcx, datum } = auto_slice(bcx, autoderefs, expr, datum);
let DatumBlock { bcx, datum } = auto_slice(bcx, expr, datum);
auto_ref(bcx, datum, expr)
}
@ -519,19 +514,10 @@ fn trans_datum_unadjusted<'a>(bcx: &'a Block<'a>,
ast::ExprIndex(_, base, idx) => {
trans_index(bcx, expr, base, idx)
}
ast::ExprVstore(contents, ast::ExprVstoreBox) => {
fcx.push_ast_cleanup_scope(contents.id);
let datum = unpack_datum!(
bcx, tvec::trans_uniq_or_managed_vstore(bcx, heap_managed,
expr, contents));
bcx = fcx.pop_and_trans_ast_cleanup_scope(bcx, contents.id);
DatumBlock(bcx, datum)
}
ast::ExprVstore(contents, ast::ExprVstoreUniq) => {
fcx.push_ast_cleanup_scope(contents.id);
let datum = unpack_datum!(
bcx, tvec::trans_uniq_or_managed_vstore(bcx, heap_exchange,
expr, contents));
bcx, tvec::trans_uniq_vstore(bcx, expr, contents));
bcx = fcx.pop_and_trans_ast_cleanup_scope(bcx, contents.id);
DatumBlock(bcx, datum)
}
@ -543,9 +529,7 @@ fn trans_datum_unadjusted<'a>(bcx: &'a Block<'a>,
let heap = heap_exchange;
return trans_boxed_expr(bcx, box_ty, contents, contents_ty, heap)
}
ast::ExprLit(lit) => {
trans_immediate_lit(bcx, expr, *lit)
}
ast::ExprLit(lit) => trans_immediate_lit(bcx, expr, (*lit).clone()),
ast::ExprBinary(_, op, lhs, rhs) => {
// if overloaded, would be RvalueDpsExpr
{
@ -636,8 +620,7 @@ fn trans_index<'a>(bcx: &'a Block<'a>,
let vt = tvec::vec_types(bcx, base_datum.ty);
base::maybe_name_value(bcx.ccx(), vt.llunit_size, "unit_sz");
let (bcx, base, len) =
base_datum.get_vec_base_and_len(bcx, index_expr.span, index_expr.id, 0);
let (base, len) = base_datum.get_vec_base_and_len(bcx);
debug!("trans_index: base {}", bcx.val_to_str(base));
debug!("trans_index: len {}", bcx.val_to_str(len));
@ -836,8 +819,8 @@ fn trans_rvalue_dps_unadjusted<'a>(bcx: &'a Block<'a>,
}
ast::ExprLit(lit) => {
match lit.node {
ast::LitStr(s, _) => {
tvec::trans_lit_str(bcx, expr, s, dest)
ast::LitStr(ref s, _) => {
tvec::trans_lit_str(bcx, expr, (*s).clone(), dest)
}
_ => {
bcx.tcx()
@ -1799,9 +1782,9 @@ fn trans_log_level<'a>(bcx: &'a Block<'a>)
let external_srcs = ccx.external_srcs.borrow();
srccrate = match external_srcs.get().find(&bcx.fcx.id) {
Some(&src) => {
ccx.sess.cstore.get_crate_data(src.crate).name
ccx.sess.cstore.get_crate_data(src.crate).name.clone()
}
None => ccx.link_meta.crateid.name.to_managed(),
None => ccx.link_meta.crateid.name.to_str(),
};
};
let mut modpath = ~[PathMod(ccx.sess.ident_of(srccrate))];
@ -2032,4 +2015,3 @@ fn deref_once<'a>(bcx: &'a Block<'a>,
DatumBlock { bcx: bcx, datum: datum }
}
}

View file

@ -31,7 +31,8 @@ use std::vec;
use syntax::abi::{Cdecl, Aapcs, C, AbiSet, Win64};
use syntax::abi::{RustIntrinsic, Rust, Stdcall, Fastcall, System};
use syntax::codemap::Span;
use syntax::parse::token::special_idents;
use syntax::parse::token::{InternedString, special_idents};
use syntax::parse::token;
use syntax::{ast};
use syntax::{attr, ast_map};
use util::ppaux::{Repr, UserString};
@ -135,7 +136,7 @@ pub fn register_foreign_item_fn(ccx: @CrateContext,
};
// Register the function as a C extern fn
let lname = link_name(ccx, foreign_item);
let lname = link_name(foreign_item);
let tys = foreign_types_for_id(ccx, foreign_item.id);
// Make sure the calling convention is right for variadic functions
@ -150,8 +151,12 @@ pub fn register_foreign_item_fn(ccx: @CrateContext,
let llfn;
{
let mut externs = ccx.externs.borrow_mut();
llfn = base::get_extern_fn(externs.get(), ccx.llmod, lname,
cc, llfn_ty, tys.fn_sig.output);
llfn = base::get_extern_fn(externs.get(),
ccx.llmod,
lname.get(),
cc,
llfn_ty,
tys.fn_sig.output);
};
add_argument_attributes(&tys, llfn);
@ -372,9 +377,9 @@ pub fn trans_foreign_mod(ccx: @CrateContext,
_ => ()
}
let lname = link_name(ccx, foreign_item);
let lname = link_name(foreign_item);
let mut item_symbols = ccx.item_symbols.borrow_mut();
item_symbols.get().insert(foreign_item.id, lname.to_owned());
item_symbols.get().insert(foreign_item.id, lname.get().to_owned());
}
}
@ -726,10 +731,10 @@ pub fn trans_rust_fn_with_foreign_abi(ccx: @CrateContext,
// This code is kind of a confused mess and needs to be reworked given
// the massive simplifications that have occurred.
pub fn link_name(ccx: &CrateContext, i: @ast::ForeignItem) -> @str {
pub fn link_name(i: @ast::ForeignItem) -> InternedString {
match attr::first_attr_value_str_by_name(i.attrs, "link_name") {
None => ccx.sess.str_of(i.ident),
Some(ln) => ln,
None => token::get_ident(i.ident.name),
Some(ln) => ln.clone(),
}
}

View file

@ -15,32 +15,32 @@
use back::abi;
use back::link::*;
use lib;
use lib::llvm::{llvm, ValueRef, True};
use lib;
use middle::lang_items::{FreeFnLangItem, ExchangeFreeFnLangItem};
use middle::trans::adt;
use middle::trans::base::*;
use middle::trans::build::*;
use middle::trans::callee;
use middle::trans::cleanup;
use middle::trans::cleanup::CleanupMethods;
use middle::trans::common::*;
use middle::trans::build::*;
use middle::trans::expr;
use middle::trans::machine::*;
use middle::trans::reflect;
use middle::trans::tvec;
use middle::trans::type_::Type;
use middle::trans::type_of::type_of;
use middle::ty;
use util::ppaux;
use util::ppaux::ty_to_short_str;
use middle::trans::type_::Type;
use util::ppaux;
use arena::TypedArena;
use std::c_str::ToCStr;
use std::cell::Cell;
use std::libc::c_uint;
use syntax::ast;
use syntax::parse::token;
pub fn trans_free<'a>(cx: &'a Block<'a>, v: ValueRef) -> &'a Block<'a> {
let _icx = push_ctxt("trans_free");
@ -64,10 +64,7 @@ pub fn take_ty<'a>(bcx: &'a Block<'a>, v: ValueRef, t: ty::t)
// NB: v is an *alias* of type t here, not a direct value.
let _icx = push_ctxt("take_ty");
match ty::get(t).sty {
ty::ty_box(_) |
ty::ty_vec(_, ty::vstore_box) | ty::ty_str(ty::vstore_box) => {
incr_refcnt_of_boxed(bcx, v)
}
ty::ty_box(_) => incr_refcnt_of_boxed(bcx, v),
ty::ty_trait(_, _, ty::BoxTraitStore, _, _) => {
incr_refcnt_of_boxed(bcx, GEPi(bcx, v, [0u, abi::trt_field_box]))
}
@ -113,10 +110,6 @@ fn simplified_glue_type(tcx: ty::ctxt, field: uint, t: ty::t) -> ty::t {
if !ty::type_needs_drop(tcx, typ) =>
return ty::mk_box(tcx, ty::mk_nil()),
ty::ty_vec(mt, ty::vstore_box)
if !ty::type_needs_drop(tcx, mt.ty) =>
return ty::mk_box(tcx, ty::mk_nil()),
ty::ty_uniq(typ)
if !ty::type_needs_drop(tcx, typ) =>
return ty::mk_uniq(tcx, ty::mk_nil()),
@ -326,11 +319,6 @@ fn make_drop_glue<'a>(bcx: &'a Block<'a>, v0: ValueRef, t: ty::t) -> &'a Block<'
ty::ty_box(body_ty) => {
decr_refcnt_maybe_free(bcx, v0, Some(body_ty))
}
ty::ty_str(ty::vstore_box) | ty::ty_vec(_, ty::vstore_box) => {
let unit_ty = ty::sequence_element_type(ccx.tcx, t);
let unboxed_vec_ty = ty::mk_mut_unboxed_vec(ccx.tcx, unit_ty);
decr_refcnt_maybe_free(bcx, v0, Some(unboxed_vec_ty))
}
ty::ty_uniq(content_ty) => {
let llbox = Load(bcx, v0);
let not_null = IsNotNull(bcx, llbox);
@ -471,16 +459,17 @@ pub fn declare_tydesc(ccx: &CrateContext, t: ty::t) -> @tydesc_info {
let llsize = llsize_of(ccx, llty);
let llalign = llalign_of(ccx, llty);
let name = mangle_internal_name_by_type_and_seq(ccx, t, "tydesc").to_managed();
note_unique_llvm_symbol(ccx, name);
let name = mangle_internal_name_by_type_and_seq(ccx, t, "tydesc");
debug!("+++ declare_tydesc {} {}", ppaux::ty_to_str(ccx.tcx, t), name);
let gvar = name.with_c_str(|buf| {
unsafe {
llvm::LLVMAddGlobal(ccx.llmod, ccx.tydesc_type.to_ref(), buf)
}
});
note_unique_llvm_symbol(ccx, name);
let ty_name = C_str_slice(ccx, ppaux::ty_to_str(ccx.tcx, t).to_managed());
let ty_name = token::intern_and_get_ident(ppaux::ty_to_str(ccx.tcx, t));
let ty_name = C_str_slice(ccx, ty_name);
let inf = @tydesc_info {
ty: t,
@ -498,10 +487,10 @@ pub fn declare_tydesc(ccx: &CrateContext, t: ty::t) -> @tydesc_info {
fn declare_generic_glue(ccx: &CrateContext, t: ty::t, llfnty: Type,
name: &str) -> ValueRef {
let _icx = push_ctxt("declare_generic_glue");
let fn_nm = mangle_internal_name_by_type_and_seq(ccx, t, (~"glue_" + name)).to_managed();
let fn_nm = mangle_internal_name_by_type_and_seq(ccx, t, ~"glue_" + name);
debug!("{} is for type {}", fn_nm, ppaux::ty_to_str(ccx.tcx, t));
note_unique_llvm_symbol(ccx, fn_nm);
let llfn = decl_cdecl_fn(ccx.llmod, fn_nm, llfnty, ty::mk_nil());
note_unique_llvm_symbol(ccx, fn_nm);
return llfn;
}

View file

@ -152,7 +152,7 @@ pub fn maybe_instantiate_inline(ccx: @CrateContext, fn_id: ast::DefId)
let impl_tpt = ty::lookup_item_type(ccx.tcx, impl_did);
let num_type_params =
impl_tpt.generics.type_param_defs.len() +
impl_tpt.generics.type_param_defs().len() +
mth.generics.ty_params.len();
if num_type_params == 0 {

View file

@ -337,8 +337,10 @@ pub fn trans_intrinsic(ccx: @CrateContext,
Ret(bcx, td);
}
"type_id" => {
let hash = ty::hash_crate_independent(ccx.tcx, substs.tys[0],
ccx.link_meta.crate_hash);
let hash = ty::hash_crate_independent(
ccx.tcx,
substs.tys[0],
ccx.link_meta.crate_hash.clone());
// NB: This needs to be kept in lockstep with the TypeId struct in
// libstd/unstable/intrinsics.rs
let val = C_named_struct(type_of::type_of(ccx, output_type), [C_u64(hash)]);

View file

@ -182,7 +182,7 @@ pub fn trans_static_method_callee(bcx: &Block,
// out which impl the `Trait<T1...Tn>` bound on the type `self` was
// bound to.
let bound_index = ty::lookup_trait_def(bcx.tcx(), trait_id).
generics.type_param_defs.len();
generics.type_param_defs().len();
let mname = if method_id.crate == ast::LOCAL_CRATE {
{
@ -318,7 +318,7 @@ pub fn combine_impl_and_methods_tps(bcx: &Block,
let ccx = bcx.ccx();
let method = ty::method(ccx.tcx, mth_did);
let n_m_tps = method.generics.type_param_defs.len();
let n_m_tps = method.generics.type_param_defs().len();
let node_substs = node_id_type_params(bcx, callee_id);
debug!("rcvr_substs={:?}", rcvr_substs.repr(ccx.tcx));
let ty_substs

View file

@ -20,6 +20,7 @@ use middle::trans::datum::*;
use middle::trans::glue;
use middle::trans::machine;
use middle::trans::meth;
use middle::trans::type_::Type;
use middle::trans::type_of::*;
use middle::ty;
use util::ppaux::ty_to_str;
@ -31,9 +32,8 @@ use std::vec;
use syntax::ast::DefId;
use syntax::ast;
use syntax::ast_map::PathName;
use syntax::parse::token::special_idents;
use middle::trans::type_::Type;
use syntax::parse::token::{InternedString, special_idents};
use syntax::parse::token;
pub struct Reflector<'a> {
visitor_val: ValueRef,
@ -56,14 +56,14 @@ impl<'a> Reflector<'a> {
C_bool(b)
}
pub fn c_slice(&mut self, s: @str) -> ValueRef {
pub fn c_slice(&mut self, s: InternedString) -> ValueRef {
// We're careful to not use first class aggregates here because that
// will kick us off fast isel. (Issue #4352.)
let bcx = self.bcx;
let str_vstore = ty::vstore_slice(ty::ReStatic);
let str_ty = ty::mk_str(bcx.tcx(), str_vstore);
let scratch = rvalue_scratch_datum(bcx, str_ty, "");
let len = C_uint(bcx.ccx(), s.len());
let len = C_uint(bcx.ccx(), s.get().len());
let c_str = PointerCast(bcx, C_cstr(bcx.ccx(), s), Type::i8p());
Store(bcx, c_str, GEPi(bcx, scratch.val, [ 0, 0 ]));
Store(bcx, len, GEPi(bcx, scratch.val, [ 0, 1 ]));
@ -140,7 +140,6 @@ impl<'a> Reflector<'a> {
}
ty::vstore_slice(_) => (~"slice", ~[]),
ty::vstore_uniq => (~"uniq", ~[]),
ty::vstore_box => (~"box", ~[])
}
}
@ -260,15 +259,19 @@ impl<'a> Reflector<'a> {
fields[0].ident.name != special_idents::unnamed_field.name;
}
let extra = ~[self.c_slice(ty_to_str(tcx, t).to_managed()),
self.c_bool(named_fields),
self.c_uint(fields.len())] + self.c_size_and_align(t);
let extra = ~[
self.c_slice(token::intern_and_get_ident(ty_to_str(tcx,
t))),
self.c_bool(named_fields),
self.c_uint(fields.len())
] + self.c_size_and_align(t);
self.bracketed("class", extra, |this| {
for (i, field) in fields.iter().enumerate() {
let extra = ~[this.c_uint(i),
this.c_slice(bcx.ccx().sess.str_of(field.ident)),
this.c_bool(named_fields)]
+ this.c_mt(&field.mt);
let extra = ~[
this.c_uint(i),
this.c_slice(token::get_ident(field.ident.name)),
this.c_bool(named_fields)
] + this.c_mt(&field.mt);
this.visit("class_field", extra);
}
})
@ -330,7 +333,7 @@ impl<'a> Reflector<'a> {
+ self.c_size_and_align(t);
self.bracketed("enum", enum_args, |this| {
for (i, v) in variants.iter().enumerate() {
let name = ccx.sess.str_of(v.name);
let name = token::get_ident(v.name.name);
let variant_args = ~[this.c_uint(i),
C_u64(v.disr_val),
this.c_uint(v.args.len()),
@ -352,7 +355,9 @@ impl<'a> Reflector<'a> {
}
ty::ty_trait(_, _, _, _, _) => {
let extra = [self.c_slice(ty_to_str(tcx, t).to_managed())];
let extra = [
self.c_slice(token::intern_and_get_ident(ty_to_str(tcx, t)))
];
self.visit("trait", extra);
}

View file

@ -31,6 +31,7 @@ use middle::ty;
use util::ppaux::ty_to_str;
use syntax::ast;
use syntax::parse::token::InternedString;
// Boxed vector types are in some sense currently a "shorthand" for a box
// containing an unboxed vector. This expands a boxed vector type into such an
@ -43,9 +44,6 @@ pub fn expand_boxed_vec_ty(tcx: ty::ctxt, t: ty::t) -> ty::t {
ty::ty_str(ty::vstore_uniq) | ty::ty_vec(_, ty::vstore_uniq) => {
ty::mk_uniq(tcx, unboxed_vec_ty)
}
ty::ty_str(ty::vstore_box) | ty::ty_vec(_, ty::vstore_box) => {
ty::mk_box(tcx, unboxed_vec_ty)
}
_ => tcx.sess.bug("non boxed-vec type \
in tvec::expand_boxed_vec_ty")
}
@ -64,21 +62,6 @@ pub fn get_alloc(bcx: &Block, vptr: ValueRef) -> ValueRef {
Load(bcx, GEPi(bcx, vptr, [0u, abi::vec_elt_alloc]))
}
pub fn get_bodyptr(bcx: &Block, vptr: ValueRef, t: ty::t) -> ValueRef {
let vt = vec_types(bcx, t);
let managed = match ty::get(vt.vec_ty).sty {
ty::ty_str(ty::vstore_box) | ty::ty_vec(_, ty::vstore_box) => true,
_ => false
};
if managed {
GEPi(bcx, vptr, [0u, abi::box_field_body])
} else {
vptr
}
}
pub fn get_dataptr(bcx: &Block, vptr: ValueRef) -> ValueRef {
let _icx = push_ctxt("tvec::get_dataptr");
GEPi(bcx, vptr, [0u, abi::vec_elt_elems, 0u])
@ -127,11 +110,10 @@ pub fn alloc_uniq_raw<'a>(
alloc_raw(bcx, unit_ty, fill, alloc, heap_exchange)
}
pub fn alloc_vec<'a>(
pub fn alloc_uniq_vec<'a>(
bcx: &'a Block<'a>,
unit_ty: ty::t,
elts: uint,
heap: heap)
elts: uint)
-> Result<'a> {
let _icx = push_ctxt("tvec::alloc_uniq");
let ccx = bcx.ccx();
@ -142,7 +124,7 @@ pub fn alloc_vec<'a>(
let alloc = if elts < 4u { Mul(bcx, C_int(ccx, 4), unit_sz) }
else { fill };
let Result {bcx: bcx, val: vptr} =
alloc_raw(bcx, unit_ty, fill, alloc, heap);
alloc_raw(bcx, unit_ty, fill, alloc, heap_exchange);
return rslt(bcx, vptr);
}
@ -231,8 +213,11 @@ pub fn trans_slice_vstore<'a>(
match content_expr.node {
ast::ExprLit(lit) => {
match lit.node {
ast::LitStr(s, _) => {
return trans_lit_str(bcx, content_expr, s, dest);
ast::LitStr(ref s, _) => {
return trans_lit_str(bcx,
content_expr,
s.clone(),
dest)
}
_ => {}
}
@ -284,7 +269,7 @@ pub fn trans_slice_vstore<'a>(
pub fn trans_lit_str<'a>(
bcx: &'a Block<'a>,
lit_expr: &ast::Expr,
str_lit: @str,
str_lit: InternedString,
dest: Dest)
-> &'a Block<'a> {
/*!
@ -301,7 +286,7 @@ pub fn trans_lit_str<'a>(
Ignore => bcx,
SaveIn(lldest) => {
unsafe {
let bytes = str_lit.len();
let bytes = str_lit.get().len();
let llbytes = C_uint(bcx.ccx(), bytes);
let llcstr = C_cstr(bcx.ccx(), str_lit);
let llcstr = llvm::LLVMConstPointerCast(llcstr, Type::i8p().to_ref());
@ -316,66 +301,62 @@ pub fn trans_lit_str<'a>(
}
pub fn trans_uniq_or_managed_vstore<'a>(bcx: &'a Block<'a>,
heap: heap,
vstore_expr: &ast::Expr,
content_expr: &ast::Expr)
-> DatumBlock<'a, Expr> {
pub fn trans_uniq_vstore<'a>(bcx: &'a Block<'a>,
vstore_expr: &ast::Expr,
content_expr: &ast::Expr)
-> DatumBlock<'a, Expr> {
/*!
* @[...] or ~[...] (also @"..." or ~"...") allocate boxes in the
* appropriate heap and write the array elements into them.
* ~[...] and ~"..." allocate boxes in the exchange heap and write
* the array elements into them.
*/
debug!("trans_uniq_or_managed_vstore(vstore_expr={}, heap={:?})",
bcx.expr_to_str(vstore_expr), heap);
debug!("trans_uniq_vstore(vstore_expr={})", bcx.expr_to_str(vstore_expr));
let fcx = bcx.fcx;
// Handle ~"".
match heap {
heap_exchange => {
match content_expr.node {
ast::ExprLit(lit) => {
match lit.node {
ast::LitStr(s, _) => {
let llptrval = C_cstr(bcx.ccx(), s);
let llptrval = PointerCast(bcx, llptrval, Type::i8p());
let llsizeval = C_uint(bcx.ccx(), s.len());
let typ = ty::mk_str(bcx.tcx(), ty::vstore_uniq);
let lldestval = rvalue_scratch_datum(bcx, typ, "");
let alloc_fn = langcall(bcx,
Some(lit.span),
"",
StrDupUniqFnLangItem);
let bcx = callee::trans_lang_call(
bcx,
alloc_fn,
[ llptrval, llsizeval ],
Some(expr::SaveIn(lldestval.val))).bcx;
return DatumBlock(bcx, lldestval).to_expr_datumblock();
}
_ => {}
}
match content_expr.node {
ast::ExprLit(lit) => {
match lit.node {
ast::LitStr(ref s, _) => {
let llptrval = C_cstr(bcx.ccx(), (*s).clone());
let llptrval = PointerCast(bcx,
llptrval,
Type::i8p());
let llsizeval = C_uint(bcx.ccx(), s.get().len());
let typ = ty::mk_str(bcx.tcx(), ty::vstore_uniq);
let lldestval = rvalue_scratch_datum(bcx,
typ,
"");
let alloc_fn = langcall(bcx,
Some(lit.span),
"",
StrDupUniqFnLangItem);
let bcx = callee::trans_lang_call(
bcx,
alloc_fn,
[ llptrval, llsizeval ],
Some(expr::SaveIn(lldestval.val))).bcx;
return DatumBlock(bcx, lldestval).to_expr_datumblock();
}
_ => {}
}
}
heap_exchange_closure => fail!("vectors use exchange_alloc"),
heap_managed => {}
_ => {}
}
let vt = vec_types_from_expr(bcx, vstore_expr);
let count = elements_required(bcx, content_expr);
let Result {bcx, val} = alloc_vec(bcx, vt.unit_ty, count, heap);
let Result {bcx, val} = alloc_uniq_vec(bcx, vt.unit_ty, count);
// Create a temporary scope lest execution should fail while
// constructing the vector.
let temp_scope = fcx.push_custom_cleanup_scope();
fcx.schedule_free_value(cleanup::CustomScope(temp_scope), val, heap);
fcx.schedule_free_value(cleanup::CustomScope(temp_scope), val, heap_exchange);
let dataptr = get_dataptr(bcx, get_bodyptr(bcx, val, vt.vec_ty));
let dataptr = get_dataptr(bcx, val);
debug!("alloc_vec() returned val={}, dataptr={}",
debug!("alloc_uniq_vec() returned val={}, dataptr={}",
bcx.val_to_str(val), bcx.val_to_str(dataptr));
let bcx = write_content(bcx, &vt, vstore_expr,
@ -405,15 +386,13 @@ pub fn write_content<'a>(
match content_expr.node {
ast::ExprLit(lit) => {
match lit.node {
ast::LitStr(s, _) => {
ast::LitStr(ref s, _) => {
match dest {
Ignore => {
return bcx;
}
Ignore => return bcx,
SaveIn(lldest) => {
let bytes = s.len();
let bytes = s.get().len();
let llbytes = C_uint(bcx.ccx(), bytes);
let llcstr = C_cstr(bcx.ccx(), s);
let llcstr = C_cstr(bcx.ccx(), (*s).clone());
base::call_memcpy(bcx,
lldest,
llcstr,
@ -516,7 +495,7 @@ pub fn elements_required(bcx: &Block, content_expr: &ast::Expr) -> uint {
match content_expr.node {
ast::ExprLit(lit) => {
match lit.node {
ast::LitStr(s, _) => s.len(),
ast::LitStr(ref s, _) => s.get().len(),
_ => {
bcx.tcx().sess.span_bug(content_expr.span,
"Unexpected evec content")
@ -564,10 +543,9 @@ pub fn get_base_and_byte_len(bcx: &Block,
let len = Mul(bcx, count, vt.llunit_size);
(base, len)
}
ty::vstore_uniq | ty::vstore_box => {
ty::vstore_uniq => {
assert!(type_is_immediate(bcx.ccx(), vt.vec_ty));
let llval = Load(bcx, llval);
let body = get_bodyptr(bcx, llval, vec_ty);
let body = Load(bcx, llval);
(get_dataptr(bcx, body), get_fill(bcx, body))
}
}
@ -604,10 +582,9 @@ pub fn get_base_and_len(bcx: &Block,
let count = Load(bcx, GEPi(bcx, llval, [0u, abi::slice_elt_len]));
(base, count)
}
ty::vstore_uniq | ty::vstore_box => {
ty::vstore_uniq => {
assert!(type_is_immediate(bcx.ccx(), vt.vec_ty));
let llval = Load(bcx, llval);
let body = get_bodyptr(bcx, llval, vec_ty);
let body = Load(bcx, llval);
(get_dataptr(bcx, body), UDiv(bcx, get_fill(bcx, body), vt.llunit_size))
}
}
@ -724,7 +701,7 @@ pub fn iter_vec_uniq<'r,
f: iter_vec_block<'r,'b>)
-> &'b Block<'b> {
let _icx = push_ctxt("tvec::iter_vec_uniq");
let data_ptr = get_dataptr(bcx, get_bodyptr(bcx, vptr, vec_ty));
let data_ptr = get_dataptr(bcx, vptr);
iter_vec_raw(bcx, data_ptr, vec_ty, fill, f)
}

View file

@ -115,9 +115,7 @@ pub fn sizing_type_of(cx: &CrateContext, t: ty::t) -> Type {
ty::ty_float(t) => Type::float_from_ty(t),
ty::ty_str(ty::vstore_uniq) |
ty::ty_str(ty::vstore_box) |
ty::ty_vec(_, ty::vstore_uniq) |
ty::ty_vec(_, ty::vstore_box) |
ty::ty_box(..) |
ty::ty_uniq(..) |
ty::ty_ptr(..) |
@ -221,13 +219,6 @@ pub fn type_of(cx: &CrateContext, t: ty::t) -> Type {
let name = llvm_type_name(cx, an_enum, did, substs.tps);
adt::incomplete_type_of(cx, repr, name)
}
ty::ty_str(ty::vstore_box) => {
Type::at_box(cx, Type::vec(cx.sess.targ_cfg.arch, &Type::i8())).ptr_to()
}
ty::ty_vec(ref mt, ty::vstore_box) => {
let e_ty = type_of(cx, mt.ty);
Type::at_box(cx, Type::vec(cx.sess.targ_cfg.arch, &e_ty)).ptr_to()
}
ty::ty_box(typ) => {
Type::at_box(cx, type_of(cx, typ)).ptr_to()
}

View file

@ -46,10 +46,10 @@ fn root<'a, K:KindOps>(datum: &Datum<K>,
_span: Span,
root_key: root_map_key,
root_info: RootInfo) -> &'a Block<'a> {
//! In some cases, borrowck will decide that an @T/@[]/@str
//! value must be rooted for the program to be safe. In that
//! case, we will call this function, which will stash a copy
//! away until we exit the scope `scope_id`.
//! In some cases, borrowck will decide that an @T value must be
//! rooted for the program to be safe. In that case, we will call
//! this function, which will stash a copy away until we exit the
//! scope `scope_id`.
debug!("write_guard::root(root_key={:?}, root_info={:?}, datum={:?})",
root_key, root_info, datum.to_str(bcx.ccx()));
@ -62,4 +62,3 @@ fn root<'a, K:KindOps>(datum: &Datum<K>,
cleanup::AstScope(root_info.scope), (),
|(), bcx, llval| datum.shallow_copy_and_take(bcx, llval)).bcx
}

View file

@ -34,6 +34,7 @@ use std::cmp;
use std::hashmap::{HashMap, HashSet};
use std::ops;
use std::ptr::to_unsafe_ptr;
use std::rc::Rc;
use std::to_bytes;
use std::to_str::ToStr;
use std::vec;
@ -129,7 +130,6 @@ pub struct mt {
pub enum vstore {
vstore_fixed(uint),
vstore_uniq,
vstore_box,
vstore_slice(Region)
}
@ -226,10 +226,10 @@ pub enum AutoRef {
/// Convert from T to &T
AutoPtr(Region, ast::Mutability),
/// Convert from @[]/~[]/&[] to &[] (or str)
/// Convert from ~[]/&[] to &[] (or str)
AutoBorrowVec(Region, ast::Mutability),
/// Convert from @[]/~[]/&[] to &&[] (or str)
/// Convert from ~[]/&[] to &&[] (or str)
AutoBorrowVecRef(Region, ast::Mutability),
/// Convert from @fn()/~fn()/|| to ||
@ -291,7 +291,7 @@ pub struct ctxt_ {
freevars: RefCell<freevars::freevar_map>,
tcache: type_cache,
rcache: creader_cache,
short_names_cache: RefCell<HashMap<t, @str>>,
short_names_cache: RefCell<HashMap<t, ~str>>,
needs_unwind_cleanup_cache: RefCell<HashMap<t, bool>>,
tc_cache: RefCell<HashMap<uint, TypeContents>>,
ast_ty_to_ty_cache: RefCell<HashMap<NodeId, ast_ty_to_ty_cache_entry>>,
@ -870,15 +870,21 @@ pub struct RegionParameterDef {
#[deriving(Clone)]
pub struct Generics {
/// List of type parameters declared on the item.
type_param_defs: @~[TypeParameterDef],
type_param_defs: Rc<~[TypeParameterDef]>,
/// List of region parameters declared on the item.
region_param_defs: @[RegionParameterDef],
region_param_defs: Rc<~[RegionParameterDef]>,
}
impl Generics {
pub fn has_type_params(&self) -> bool {
!self.type_param_defs.is_empty()
!self.type_param_defs.borrow().is_empty()
}
pub fn type_param_defs<'a>(&'a self) -> &'a [TypeParameterDef] {
self.type_param_defs.borrow().as_slice()
}
pub fn region_param_defs<'a>(&'a self) -> &'a [RegionParameterDef] {
self.region_param_defs.borrow().as_slice()
}
}
@ -1551,7 +1557,7 @@ pub fn type_is_box(ty: t) -> bool {
pub fn type_is_boxed(ty: t) -> bool {
match get(ty).sty {
ty_box(_) | ty_vec(_, vstore_box) | ty_str(vstore_box) => true,
ty_box(_) => true,
_ => false
}
}
@ -1675,10 +1681,7 @@ fn type_needs_unwind_cleanup_(cx: ctxt, ty: t,
}
ty_uniq(_) |
ty_str(vstore_uniq) |
ty_str(vstore_box) |
ty_vec(_, vstore_uniq) |
ty_vec(_, vstore_box)
=> {
ty_vec(_, vstore_uniq) => {
// Once we're inside a box, the annihilator will find
// it and destroy it.
if !encountered_box {
@ -2021,10 +2024,6 @@ pub fn type_contents(cx: ctxt, ty: t) -> TypeContents {
tc_mt(cx, mt, cache).owned_pointer()
}
ty_vec(mt, vstore_box) => {
tc_mt(cx, mt, cache).managed_pointer()
}
ty_vec(ref mt, vstore_slice(r)) => {
tc_ty(cx, mt.ty, cache).reference(
borrowed_contents(r, mt.mutbl))
@ -2034,10 +2033,6 @@ pub fn type_contents(cx: ctxt, ty: t) -> TypeContents {
tc_mt(cx, mt, cache)
}
ty_str(vstore_box) => {
TC::Managed
}
ty_str(vstore_slice(r)) => {
borrowed_contents(r, ast::MutImmutable)
}
@ -2523,8 +2518,8 @@ pub fn type_is_pod(cx: ctxt, ty: t) -> bool {
ty_type | ty_ptr(_) | ty_bare_fn(_) => result = true,
// Boxed types
ty_box(_) | ty_uniq(_) | ty_closure(_) |
ty_str(vstore_uniq) | ty_str(vstore_box) |
ty_vec(_, vstore_uniq) | ty_vec(_, vstore_box) |
ty_str(vstore_uniq) |
ty_vec(_, vstore_uniq) |
ty_trait(_, _, _, _, _) | ty_rptr(_,_) => result = false,
// Structural types
ty_enum(did, ref substs) => {
@ -3105,7 +3100,7 @@ pub fn expr_has_ty_params(cx: ctxt, expr: &ast::Expr) -> bool {
pub fn method_call_type_param_defs(tcx: ctxt,
method_map: typeck::method_map,
id: ast::NodeId)
-> Option<@~[TypeParameterDef]> {
-> Option<Rc<~[TypeParameterDef]>> {
let method_map = method_map.borrow();
method_map.get().find(&id).map(|method| {
match method.origin {
@ -3125,12 +3120,12 @@ pub fn method_call_type_param_defs(tcx: ctxt,
// method bounds, so we must preprend the tps from the
// trait itself. This ought to be harmonized.
let trait_type_param_defs =
lookup_trait_def(tcx, trt_id).generics.type_param_defs;
@vec::append(
(*trait_type_param_defs).clone(),
*ty::trait_method(tcx,
trt_id,
n_mth).generics.type_param_defs)
lookup_trait_def(tcx, trt_id).generics.type_param_defs();
Rc::new(vec::append(
trait_type_param_defs.to_owned(),
ty::trait_method(tcx,
trt_id,
n_mth).generics.type_param_defs()))
}
}
})
@ -3296,7 +3291,6 @@ pub fn expr_kind(tcx: ctxt,
ast::ExprUnary(..) |
ast::ExprAddrOf(..) |
ast::ExprBinary(..) |
ast::ExprVstore(_, ast::ExprVstoreBox) |
ast::ExprVstore(_, ast::ExprVstoreUniq) => {
RvalueDatumExpr
}
@ -3344,9 +3338,10 @@ pub fn field_idx_strict(tcx: ty::ctxt, name: ast::Name, fields: &[field])
-> uint {
let mut i = 0u;
for f in fields.iter() { if f.ident.name == name { return i; } i += 1u; }
let string = token::get_ident(name);
tcx.sess.bug(format!(
"No field named `{}` found in the list of fields `{:?}`",
token::interner_get(name),
string.get(),
fields.map(|f| tcx.sess.str_of(f.ident))));
}
@ -4165,7 +4160,7 @@ pub fn each_attr(tcx: ctxt, did: DefId, f: |@MetaItem| -> bool) -> bool {
pub fn has_attr(tcx: ctxt, did: DefId, attr: &str) -> bool {
let mut found = false;
each_attr(tcx, did, |item| {
if attr == item.name() {
if item.name().equiv(&attr) {
found = true;
false
} else {
@ -4211,7 +4206,7 @@ pub fn lookup_field_type(tcx: ctxt,
Some(&ty_param_bounds_and_ty {ty, ..}) => ty,
None => {
let tpt = csearch::get_field_type(tcx, struct_id, id);
tcache.get().insert(id, tpt);
tcache.get().insert(id, tpt.clone());
tpt.ty
}
}
@ -4419,7 +4414,7 @@ pub fn normalize_ty(cx: ctxt, t: t) -> t {
fn fold_vstore(&mut self, vstore: vstore) -> vstore {
match vstore {
vstore_fixed(..) | vstore_uniq | vstore_box => vstore,
vstore_fixed(..) | vstore_uniq => vstore,
vstore_slice(_) => vstore_slice(ReStatic)
}
}
@ -4834,7 +4829,7 @@ pub fn trait_method_of_method(tcx: ctxt,
/// Creates a hash of the type `t` which will be the same no matter what crate
/// context it's calculated within. This is used by the `type_id` intrinsic.
pub fn hash_crate_independent(tcx: ctxt, t: t, local_hash: @str) -> u64 {
pub fn hash_crate_independent(tcx: ctxt, t: t, local_hash: ~str) -> u64 {
use std::hash::{SipState, Streaming};
let mut hash = SipState::new(0, 0);
@ -4856,7 +4851,6 @@ pub fn hash_crate_independent(tcx: ctxt, t: t, local_hash: @str) -> u64 {
match v {
vstore_fixed(_) => hash.input([0]),
vstore_uniq => hash.input([1]),
vstore_box => hash.input([2]),
vstore_slice(r) => {
hash.input([3]);
region(hash, r);
@ -4865,7 +4859,7 @@ pub fn hash_crate_independent(tcx: ctxt, t: t, local_hash: @str) -> u64 {
};
let did = |hash: &mut SipState, did: DefId| {
let h = if ast_util::is_local(did) {
local_hash
local_hash.clone()
} else {
tcx.sess.cstore.get_crate_hash(did.crate)
};

View file

@ -202,7 +202,6 @@ pub fn super_fold_vstore<T:TypeFolder>(this: &mut T,
match vstore {
ty::vstore_fixed(i) => ty::vstore_fixed(i),
ty::vstore_uniq => ty::vstore_uniq,
ty::vstore_box => ty::vstore_box,
ty::vstore_slice(r) => ty::vstore_slice(this.fold_region(r)),
}
}

View file

@ -170,7 +170,7 @@ fn ast_path_substs<AC:AstConv,RS:RegionScope>(
// If the type is parameterized by the this region, then replace this
// region with the current anon region binding (in other words,
// whatever & would get replaced with).
let expected_num_region_params = decl_generics.region_param_defs.len();
let expected_num_region_params = decl_generics.region_param_defs().len();
let supplied_num_region_params = path.segments.last().unwrap().lifetimes.len();
let regions = if expected_num_region_params == supplied_num_region_params {
path.segments.last().unwrap().lifetimes.map(
@ -197,8 +197,8 @@ fn ast_path_substs<AC:AstConv,RS:RegionScope>(
// Convert the type parameters supplied by the user.
let supplied_ty_param_count = path.segments.iter().flat_map(|s| s.types.iter()).len();
let formal_ty_param_count = decl_generics.type_param_defs.len();
let required_ty_param_count = decl_generics.type_param_defs.iter()
let formal_ty_param_count = decl_generics.type_param_defs().len();
let required_ty_param_count = decl_generics.type_param_defs().iter()
.take_while(|x| x.default.is_none())
.len();
if supplied_ty_param_count < required_ty_param_count {
@ -228,7 +228,7 @@ fn ast_path_substs<AC:AstConv,RS:RegionScope>(
~"provided type arguments with defaults");
}
let defaults = decl_generics.type_param_defs.slice_from(supplied_ty_param_count)
let defaults = decl_generics.type_param_defs().slice_from(supplied_ty_param_count)
.iter().map(|&x| x.default.unwrap());
let tps = path.segments.iter().flat_map(|s| s.types.iter())
.map(|&a_t| ast_ty_to_ty(this, rscope, a_t))
@ -384,6 +384,23 @@ pub fn ast_ty_to_ty<AC:AstConv, RS:RegionScope>(
ty::mt {ty: ast_ty_to_ty(this, rscope, mt.ty), mutbl: mt.mutbl}
}
enum PointerTy {
Box,
VStore(ty::vstore)
}
impl PointerTy {
fn expect_vstore(&self, tcx: ty::ctxt, span: Span, ty: &str) -> ty::vstore {
match *self {
Box => {
tcx.sess.span_err(span, format!("managed {} are not supported", ty));
// everything can be ~, so this is a worth substitute
ty::vstore_uniq
}
VStore(vst) => vst
}
}
}
// Handle @, ~, and & being able to mean strs and vecs.
// If a_seq_ty is a str or a vec, make it a str/vec.
// Also handle first-class trait types.
@ -392,17 +409,18 @@ pub fn ast_ty_to_ty<AC:AstConv, RS:RegionScope>(
this: &AC,
rscope: &RS,
a_seq_ty: &ast::MutTy,
vst: ty::vstore,
ptr_ty: PointerTy,
constr: |ty::mt| -> ty::t)
-> ty::t {
let tcx = this.tcx();
debug!("mk_pointer(vst={:?})", vst);
debug!("mk_pointer(ptr_ty={:?})", ptr_ty);
match a_seq_ty.ty.node {
ast::TyVec(ty) => {
let vst = ptr_ty.expect_vstore(tcx, a_seq_ty.ty.span, "vectors");
let mut mt = ast_ty_to_mt(this, rscope, ty);
if a_seq_ty.mutbl == ast::MutMutable {
mt = ty::mt { ty: mt.ty, mutbl: a_seq_ty.mutbl };
mt.mutbl = ast::MutMutable;
}
debug!("&[]: vst={:?}", vst);
return ty::mk_vec(tcx, mt, vst);
@ -413,20 +431,22 @@ pub fn ast_ty_to_ty<AC:AstConv, RS:RegionScope>(
// will run after this as long as the path isn't a trait.
let def_map = tcx.def_map.borrow();
match def_map.get().find(&id) {
Some(&ast::DefPrimTy(ast::TyStr)) if a_seq_ty.mutbl == ast::MutImmutable => {
Some(&ast::DefPrimTy(ast::TyStr)) if
a_seq_ty.mutbl == ast::MutImmutable => {
check_path_args(tcx, path, NO_TPS | NO_REGIONS);
let vst = ptr_ty.expect_vstore(tcx, path.span, "strings");
return ty::mk_str(tcx, vst);
}
Some(&ast::DefTrait(trait_def_id)) => {
let result = ast_path_to_trait_ref(
this, rscope, trait_def_id, None, path);
let trait_store = match vst {
ty::vstore_box => ty::BoxTraitStore,
ty::vstore_uniq => ty::UniqTraitStore,
ty::vstore_slice(r) => {
let trait_store = match ptr_ty {
Box => ty::BoxTraitStore,
VStore(ty::vstore_uniq) => ty::UniqTraitStore,
VStore(ty::vstore_slice(r)) => {
ty::RegionTraitStore(r)
}
ty::vstore_fixed(..) => {
VStore(ty::vstore_fixed(..)) => {
tcx.sess.span_err(
path.span,
"@trait, ~trait or &trait are the only supported \
@ -474,12 +494,11 @@ pub fn ast_ty_to_ty<AC:AstConv, RS:RegionScope>(
ast::TyBot => ty::mk_bot(),
ast::TyBox(ty) => {
let mt = ast::MutTy { ty: ty, mutbl: ast::MutImmutable };
mk_pointer(this, rscope, &mt, ty::vstore_box,
|tmt| ty::mk_box(tcx, tmt.ty))
mk_pointer(this, rscope, &mt, Box, |tmt| ty::mk_box(tcx, tmt.ty))
}
ast::TyUniq(ty) => {
let mt = ast::MutTy { ty: ty, mutbl: ast::MutImmutable };
mk_pointer(this, rscope, &mt, ty::vstore_uniq,
mk_pointer(this, rscope, &mt, VStore(ty::vstore_uniq),
|tmt| ty::mk_uniq(tcx, tmt.ty))
}
ast::TyVec(ty) => {
@ -493,7 +512,7 @@ pub fn ast_ty_to_ty<AC:AstConv, RS:RegionScope>(
ast::TyRptr(ref region, ref mt) => {
let r = opt_ast_region_to_region(this, rscope, ast_ty.span, region);
debug!("ty_rptr r={}", r.repr(this.tcx()));
mk_pointer(this, rscope, mt, ty::vstore_slice(r),
mk_pointer(this, rscope, mt, VStore(ty::vstore_slice(r)),
|tmt| ty::mk_rptr(tcx, r, tmt))
}
ast::TyTup(ref fields) => {

View file

@ -147,7 +147,7 @@ pub fn check_pat_variant(pcx: &pat_ctxt, pat: &ast::Pat, path: &ast::Path,
ty::enum_variant_with_id(tcx, enm, var);
let var_tpt = ty::lookup_item_type(tcx, var);
vinfo.args.map(|t| {
if var_tpt.generics.type_param_defs.len() ==
if var_tpt.generics.type_param_defs().len() ==
expected_substs.tps.len()
{
ty::subst(tcx, expected_substs, *t)
@ -172,7 +172,7 @@ pub fn check_pat_variant(pcx: &pat_ctxt, pat: &ast::Pat, path: &ast::Path,
None);
fcx.write_error(pat.id);
kind_name = "[error]";
arg_types = (*subpats).clone()
arg_types = subpats.clone()
.unwrap_or_default()
.map(|_| ty::mk_err());
}
@ -221,7 +221,7 @@ pub fn check_pat_variant(pcx: &pat_ctxt, pat: &ast::Pat, path: &ast::Path,
None);
fcx.write_error(pat.id);
kind_name = "[error]";
arg_types = (*subpats).clone()
arg_types = subpats.clone()
.unwrap_or_default()
.map(|_| ty::mk_err());
}
@ -339,9 +339,11 @@ pub fn check_struct_pat_fields(pcx: &pat_ctxt,
if found_fields.contains(&i) {
continue;
}
let string = token::get_ident(field.name);
tcx.sess.span_err(span,
format!("pattern does not mention field `{}`",
token::interner_get(field.name)));
string.get()));
}
}
}
@ -602,7 +604,7 @@ pub fn check_pat(pcx: &pat_ctxt, pat: &ast::Pat, expected: ty::t) {
ty::ty_vec(mt, vstore) => {
let region_var = match vstore {
ty::vstore_slice(r) => r,
ty::vstore_box | ty::vstore_uniq | ty::vstore_fixed(_) => {
ty::vstore_uniq | ty::vstore_fixed(_) => {
default_region_var
}
};
@ -697,4 +699,3 @@ pub fn check_pointer_pat(pcx: &pat_ctxt,
#[deriving(Eq)]
enum PointerKind { Send, Borrowed }

View file

@ -555,8 +555,10 @@ impl<'a> LookupContext<'a> {
return; // already visited
}
}
let method_name = token::get_ident(self.m_name);
debug!("push_candidates_from_impl: {} {} {}",
token::interner_get(self.m_name),
method_name.get(),
impl_info.ident.repr(self.tcx()),
impl_info.methods.map(|m| m.ident).repr(self.tcx()));
@ -697,7 +699,6 @@ impl<'a> LookupContext<'a> {
let tcx = self.tcx();
let sty = ty::get(self_ty).sty.clone();
match sty {
ty_vec(mt, vstore_box) |
ty_vec(mt, vstore_uniq) |
ty_vec(mt, vstore_slice(_)) | // NDM(#3148)
ty_vec(mt, vstore_fixed(_)) => {
@ -726,7 +727,6 @@ impl<'a> LookupContext<'a> {
})
}
ty_str(vstore_box) |
ty_str(vstore_uniq) |
ty_str(vstore_fixed(_)) => {
let entry = self.search_for_some_kind_of_autorefd_method(
@ -952,7 +952,7 @@ impl<'a> LookupContext<'a> {
// If they were not explicitly supplied, just construct fresh
// type variables.
let num_supplied_tps = self.supplied_tps.len();
let num_method_tps = candidate.method_ty.generics.type_param_defs.len();
let num_method_tps = candidate.method_ty.generics.type_param_defs().len();
let m_substs = {
if num_supplied_tps == 0u {
self.fcx.infcx().next_ty_vars(num_method_tps)

View file

@ -564,7 +564,7 @@ pub fn check_item(ccx: @CrateCtxt, it: &ast::Item) {
let param_env = ty::construct_parameter_environment(
ccx.tcx,
None,
*fn_tpt.generics.type_param_defs,
fn_tpt.generics.type_param_defs(),
[],
[],
body.id);
@ -674,9 +674,9 @@ fn check_method_body(ccx: @CrateCtxt,
ty::construct_parameter_environment(
ccx.tcx,
self_bound,
*item_generics.type_param_defs,
*method_generics.type_param_defs,
item_generics.region_param_defs,
item_generics.type_param_defs(),
method_generics.type_param_defs(),
item_generics.region_param_defs(),
method.body.id);
// Compute the fty from point of view of inside fn
@ -776,7 +776,7 @@ fn compare_impl_method(tcx: ty::ctxt,
debug!("compare_impl_method()");
let infcx = infer::new_infer_ctxt(tcx);
let impl_tps = impl_generics.type_param_defs.len();
let impl_tps = impl_generics.type_param_defs().len();
// Try to give more informative error messages about self typing
// mismatches. Note that any mismatch will also be detected
@ -812,8 +812,8 @@ fn compare_impl_method(tcx: ty::ctxt,
}
}
let num_impl_m_type_params = impl_m.generics.type_param_defs.len();
let num_trait_m_type_params = trait_m.generics.type_param_defs.len();
let num_impl_m_type_params = impl_m.generics.type_param_defs().len();
let num_trait_m_type_params = trait_m.generics.type_param_defs().len();
if num_impl_m_type_params != num_trait_m_type_params {
tcx.sess.span_err(
impl_m_span,
@ -838,10 +838,10 @@ fn compare_impl_method(tcx: ty::ctxt,
return;
}
for (i, trait_param_def) in trait_m.generics.type_param_defs.iter().enumerate() {
// For each of the corresponding impl ty param's bounds...
let impl_param_def = &impl_m.generics.type_param_defs[i];
let it = trait_m.generics.type_param_defs().iter()
.zip(impl_m.generics.type_param_defs().iter());
for (i, (trait_param_def, impl_param_def)) in it.enumerate() {
// Check that the impl does not require any builtin-bounds
// that the trait does not guarantee:
let extra_bounds =
@ -886,15 +886,15 @@ fn compare_impl_method(tcx: ty::ctxt,
// impl type is "&'a str", then this would replace the self
// type with a free region `self`.
let dummy_impl_tps: ~[ty::t] =
impl_generics.type_param_defs.iter().enumerate().
impl_generics.type_param_defs().iter().enumerate().
map(|(i,t)| ty::mk_param(tcx, i, t.def_id)).
collect();
let dummy_method_tps: ~[ty::t] =
impl_m.generics.type_param_defs.iter().enumerate().
impl_m.generics.type_param_defs().iter().enumerate().
map(|(i,t)| ty::mk_param(tcx, i + impl_tps, t.def_id)).
collect();
let dummy_impl_regions: OptVec<ty::Region> =
impl_generics.region_param_defs.iter().
impl_generics.region_param_defs().iter().
map(|l| ty::ReFree(ty::FreeRegion {
scope_id: impl_m_body_id,
bound_region: ty::BrNamed(l.def_id, l.ident)})).
@ -1374,8 +1374,8 @@ pub fn impl_self_ty(vcx: &VtableContext,
let (n_tps, n_rps, raw_ty) = {
let ity = ty::lookup_item_type(tcx, did);
(ity.generics.type_param_defs.len(),
ity.generics.region_param_defs.len(),
(ity.generics.type_param_defs().len(),
ity.generics.region_param_defs().len(),
ity.ty)
};
@ -1419,10 +1419,10 @@ fn generics_of_static_method_container(type_context: ty::ctxt,
-> ty::Generics {
match provenance {
ast::FromTrait(trait_def_id) => {
ty::lookup_trait_def(type_context, trait_def_id).generics
ty::lookup_trait_def(type_context, trait_def_id).generics.clone()
}
ast::FromImpl(impl_def_id) => {
ty::lookup_item_type(type_context, impl_def_id).generics
ty::lookup_item_type(type_context, impl_def_id).generics.clone()
}
}
}
@ -1485,7 +1485,7 @@ fn check_type_parameter_positions_in_path(function_context: @FnCtxt,
// Make sure lifetime parameterization agrees with the trait or
// implementation type.
let trait_region_parameter_count = generics.region_param_defs.len();
let trait_region_parameter_count = generics.region_param_defs().len();
let supplied_region_parameter_count = trait_segment.lifetimes.len();
if trait_region_parameter_count != supplied_region_parameter_count
&& supplied_region_parameter_count != 0 {
@ -1501,8 +1501,8 @@ fn check_type_parameter_positions_in_path(function_context: @FnCtxt,
// Make sure the number of type parameters supplied on the trait
// or implementation segment equals the number of type parameters
// on the trait or implementation definition.
let formal_ty_param_count = generics.type_param_defs.len();
let required_ty_param_count = generics.type_param_defs.iter()
let formal_ty_param_count = generics.type_param_defs().len();
let required_ty_param_count = generics.type_param_defs().iter()
.take_while(|x| x.default.is_none())
.len();
let supplied_ty_param_count = trait_segment.types.len();
@ -1517,7 +1517,7 @@ fn check_type_parameter_positions_in_path(function_context: @FnCtxt,
} else {
"s"
};
let needs = if required_ty_param_count < generics.type_param_defs.len() {
let needs = if required_ty_param_count < generics.type_param_defs().len() {
"needs at least"
} else {
"needs"
@ -1539,7 +1539,7 @@ fn check_type_parameter_positions_in_path(function_context: @FnCtxt,
} else {
"s"
};
let needs = if required_ty_param_count < generics.type_param_defs.len() {
let needs = if required_ty_param_count < generics.type_param_defs().len() {
"needs at most"
} else {
"needs"
@ -2335,9 +2335,11 @@ pub fn check_expr_with_unifier(fcx: @FnCtxt,
fcx.type_error_message(
expr.span,
|actual| {
let string = token::get_ident(field);
format!("attempted to take value of method `{}` on type `{}` \
(try writing an anonymous function)",
token::interner_get(field), actual)
(try writing an anonymous function)",
string.get(),
actual)
},
expr_t, None);
}
@ -2346,9 +2348,11 @@ pub fn check_expr_with_unifier(fcx: @FnCtxt,
fcx.type_error_message(
expr.span,
|actual| {
let string = token::get_ident(field);
format!("attempted access of field `{}` on type `{}`, \
but no field with that name was found",
token::interner_get(field), actual)
but no field with that name was found",
string.get(),
actual)
},
expr_t, None);
}
@ -2428,8 +2432,8 @@ pub fn check_expr_with_unifier(fcx: @FnCtxt,
let name = class_field.name;
let (_, seen) = *class_field_map.get(&name);
if !seen {
missing_fields.push(
~"`" + token::interner_get(name) + "`");
let string = token::get_ident(name);
missing_fields.push(~"`" + string.get() + "`");
}
}
@ -2461,8 +2465,8 @@ pub fn check_expr_with_unifier(fcx: @FnCtxt,
// Look up the number of type parameters and the raw type, and
// determine whether the class is region-parameterized.
let item_type = ty::lookup_item_type(tcx, class_id);
let type_parameter_count = item_type.generics.type_param_defs.len();
let region_parameter_count = item_type.generics.region_param_defs.len();
let type_parameter_count = item_type.generics.type_param_defs().len();
let region_parameter_count = item_type.generics.region_param_defs().len();
let raw_type = item_type.ty;
// Generate the struct type.
@ -2519,8 +2523,8 @@ pub fn check_expr_with_unifier(fcx: @FnCtxt,
// Look up the number of type parameters and the raw type, and
// determine whether the enum is region-parameterized.
let item_type = ty::lookup_item_type(tcx, enum_id);
let type_parameter_count = item_type.generics.type_param_defs.len();
let region_parameter_count = item_type.generics.region_param_defs.len();
let type_parameter_count = item_type.generics.type_param_defs().len();
let region_parameter_count = item_type.generics.region_param_defs().len();
let raw_type = item_type.ty;
// Generate the enum type.
@ -3706,8 +3710,8 @@ pub fn instantiate_path(fcx: @FnCtxt,
node_id: ast::NodeId) {
debug!(">>> instantiate_path");
let ty_param_count = tpt.generics.type_param_defs.len();
let ty_param_req = tpt.generics.type_param_defs.iter()
let ty_param_count = tpt.generics.type_param_defs().len();
let ty_param_req = tpt.generics.type_param_defs().iter()
.take_while(|x| x.default.is_none())
.len();
let mut ty_substs_len = 0;
@ -3722,7 +3726,7 @@ pub fn instantiate_path(fcx: @FnCtxt,
// determine the region parameters, using the value given by the user
// (if any) and otherwise using a fresh region variable
let num_expected_regions = tpt.generics.region_param_defs.len();
let num_expected_regions = tpt.generics.region_param_defs().len();
let num_supplied_regions = pth.segments.last().unwrap().lifetimes.len();
let regions = if num_expected_regions == num_supplied_regions {
pth.segments.last().unwrap().lifetimes.map(
@ -3751,7 +3755,7 @@ pub fn instantiate_path(fcx: @FnCtxt,
ast::DefStaticMethod(_, provenance @ ast::FromTrait(_), _) => {
let generics = generics_of_static_method_container(fcx.ccx.tcx,
provenance);
(ty_param_count - 1, ty_param_req - 1, Some(generics.type_param_defs.len()))
(ty_param_count - 1, ty_param_req - 1, Some(generics.type_param_defs().len()))
}
_ => (ty_param_count, ty_param_req, None),
};
@ -3796,7 +3800,7 @@ pub fn instantiate_path(fcx: @FnCtxt,
// at the appropriate position.
let mut result = ~[];
let mut pushed = false;
let defaults = tpt.generics.type_param_defs.iter()
let defaults = tpt.generics.type_param_defs().iter()
.enumerate().filter_map(|(i, x)| {
match self_parameter_index {
Some(index) if index == i => None,
@ -3905,7 +3909,6 @@ pub fn ast_expr_vstore_to_vstore(fcx: @FnCtxt,
-> ty::vstore {
match v {
ast::ExprVstoreUniq => ty::vstore_uniq,
ast::ExprVstoreBox => ty::vstore_box,
ast::ExprVstoreSlice | ast::ExprVstoreMutSlice => {
match e.node {
ast::ExprLit(..) |
@ -4301,7 +4304,7 @@ pub fn check_intrinsic_type(ccx: @CrateCtxt, it: &ast::ForeignItem) {
variadic: false}
});
let i_ty = ty::lookup_item_type(ccx.tcx, local_def(it.id));
let i_n_tps = i_ty.generics.type_param_defs.len();
let i_n_tps = i_ty.generics.type_param_defs().len();
if i_n_tps != n_tps {
tcx.sess.span_err(it.span, format!("intrinsic has wrong number \
of type parameters: found {}, \

View file

@ -1215,9 +1215,7 @@ pub mod guarantor {
}
ty::ty_box(..) |
ty::ty_ptr(..) |
ty::ty_vec(_, ty::vstore_box) |
ty::ty_trait(_, _, ty::BoxTraitStore, _, _) |
ty::ty_str(ty::vstore_box) => {
ty::ty_trait(_, _, ty::BoxTraitStore, _, _) => {
OtherPointer
}
ty::ty_closure(ref closure_ty) => {
@ -1301,7 +1299,6 @@ pub mod guarantor {
let guarantor1 = match vstore {
ty::vstore_fixed(_) | ty::vstore_uniq => guarantor,
ty::vstore_slice(r) => Some(r),
ty::vstore_box => None
};
link_ref_bindings_in_pats(rcx, before, guarantor1);

View file

@ -423,7 +423,7 @@ fn search_for_vtable(vcx: &VtableContext,
let im_generics =
ty::lookup_item_type(tcx, im.did).generics;
let subres = lookup_vtables(vcx, location_info,
*im_generics.type_param_defs, &substs,
im_generics.type_param_defs(), &substs,
is_early);
@ -688,12 +688,12 @@ pub fn early_resolve_expr(ex: &ast::Expr, fcx: @FnCtxt, is_early: bool) {
let item_ty = ty::lookup_item_type(cx.tcx, did);
debug!("early resolve expr: def {:?} {:?}, {:?}, {}", ex.id, did, def,
fcx.infcx().ty_to_str(item_ty.ty));
if has_trait_bounds(*item_ty.generics.type_param_defs) {
if has_trait_bounds(item_ty.generics.type_param_defs()) {
debug!("early_resolve_expr: looking up vtables for type params {}",
item_ty.generics.type_param_defs.repr(fcx.tcx()));
item_ty.generics.type_param_defs().repr(fcx.tcx()));
let vcx = fcx.vtable_context();
let vtbls = lookup_vtables(&vcx, &location_info_for_expr(ex),
*item_ty.generics.type_param_defs,
item_ty.generics.type_param_defs(),
substs, is_early);
if !is_early {
insert_vtables(fcx, ex.id, vtbls);
@ -717,11 +717,11 @@ pub fn early_resolve_expr(ex: &ast::Expr, fcx: @FnCtxt, is_early: bool) {
Some(type_param_defs) => {
debug!("vtable resolution on parameter bounds for method call {}",
ex.repr(fcx.tcx()));
if has_trait_bounds(*type_param_defs) {
if has_trait_bounds(*type_param_defs.borrow()) {
let substs = fcx.node_ty_substs(callee_id);
let vcx = fcx.vtable_context();
let vtbls = lookup_vtables(&vcx, &location_info_for_expr(ex),
*type_param_defs, &substs, is_early);
*type_param_defs.borrow(), &substs, is_early);
if !is_early {
insert_vtables(fcx, callee_id, vtbls);
}
@ -784,9 +784,9 @@ pub fn resolve_impl(ccx: @CrateCtxt,
let param_env = ty::construct_parameter_environment(
ccx.tcx,
None,
*impl_generics.type_param_defs,
impl_generics.type_param_defs(),
[],
impl_generics.region_param_defs,
impl_generics.region_param_defs(),
impl_item.id);
let impl_trait_ref = @impl_trait_ref.subst(ccx.tcx, &param_env.free_substs);
@ -800,7 +800,7 @@ pub fn resolve_impl(ccx: @CrateCtxt,
let trait_def = ty::lookup_trait_def(ccx.tcx, impl_trait_ref.def_id);
let vtbls = lookup_vtables(&vcx,
&loc_info,
*trait_def.generics.type_param_defs,
trait_def.generics.type_param_defs(),
&impl_trait_ref.substs,
false);

View file

@ -47,13 +47,13 @@ use syntax::visit;
use std::cell::RefCell;
use std::hashmap::HashSet;
use std::result::Ok;
use std::rc::Rc;
use std::vec;
pub struct UniversalQuantificationResult {
monotype: t,
type_variables: ~[ty::t],
type_param_defs: @~[ty::TypeParameterDef]
type_param_defs: Rc<~[ty::TypeParameterDef]>
}
pub fn get_base_type(inference_context: @InferCtxt,
@ -356,11 +356,11 @@ impl CoherenceChecker {
// construct the polytype for the method based on the method_ty
let new_generics = ty::Generics {
type_param_defs:
@vec::append(
(*impl_poly_type.generics.type_param_defs).clone(),
*new_method_ty.generics.type_param_defs),
Rc::new(vec::append(
impl_poly_type.generics.type_param_defs().to_owned(),
new_method_ty.generics.type_param_defs())),
region_param_defs:
impl_poly_type.generics.region_param_defs
impl_poly_type.generics.region_param_defs.clone()
};
let new_polytype = ty::ty_param_bounds_and_ty {
generics: new_generics,
@ -449,7 +449,7 @@ impl CoherenceChecker {
let polytype_b = self.get_self_type_for_implementation(
implementation_b);
if self.polytypes_unify(polytype_a, polytype_b) {
if self.polytypes_unify(polytype_a.clone(), polytype_b) {
let session = self.crate_context.tcx.sess;
session.span_err(
self.span_of_impl(implementation_b),
@ -497,13 +497,13 @@ impl CoherenceChecker {
pub fn universally_quantify_polytype(&self,
polytype: ty_param_bounds_and_ty)
-> UniversalQuantificationResult {
let region_parameter_count = polytype.generics.region_param_defs.len();
let region_parameter_count = polytype.generics.region_param_defs().len();
let region_parameters =
self.inference_context.next_region_vars(
infer::BoundRegionInCoherence,
region_parameter_count);
let bounds_count = polytype.generics.type_param_defs.len();
let bounds_count = polytype.generics.type_param_defs().len();
let type_parameters = self.inference_context.next_ty_vars(bounds_count);
let substitutions = substs {
@ -518,7 +518,7 @@ impl CoherenceChecker {
UniversalQuantificationResult {
monotype: monotype,
type_variables: substitutions.tps,
type_param_defs: polytype.generics.type_param_defs
type_param_defs: polytype.generics.type_param_defs.clone()
}
}
@ -770,7 +770,7 @@ pub fn make_substs_for_receiver_types(tcx: ty::ctxt,
// determine how many type parameters were declared on the impl
let num_impl_type_parameters = {
let impl_polytype = ty::lookup_item_type(tcx, impl_id);
impl_polytype.generics.type_param_defs.len()
impl_polytype.generics.type_param_defs().len()
};
// determine how many type parameters appear on the trait
@ -778,7 +778,7 @@ pub fn make_substs_for_receiver_types(tcx: ty::ctxt,
// the current method type has the type parameters from the trait + method
let num_method_type_parameters =
num_trait_type_parameters + method.generics.type_param_defs.len();
num_trait_type_parameters + method.generics.type_param_defs().len();
// the new method type will have the type parameters from the impl + method
let combined_tps = vec::from_fn(num_method_type_parameters, |i| {
@ -789,7 +789,7 @@ pub fn make_substs_for_receiver_types(tcx: ty::ctxt,
// replace type parameters that belong to method with another
// type parameter, this time with the index adjusted
let method_index = i - num_trait_type_parameters;
let type_param_def = &method.generics.type_param_defs[method_index];
let type_param_def = &method.generics.type_param_defs()[method_index];
let new_index = num_impl_type_parameters + method_index;
ty::mk_param(tcx, new_index, type_param_def.def_id)
}

View file

@ -44,6 +44,7 @@ use middle::typeck::{CrateCtxt, lookup_def_tcx, no_params, write_ty_to_tcx};
use util::ppaux;
use util::ppaux::Repr;
use std::rc::Rc;
use std::vec;
use syntax::abi::AbiSet;
use syntax::ast::{RegionTyParamBound, TraitTyParamBound};
@ -285,9 +286,9 @@ pub fn ensure_trait_methods(ccx: &CrateCtxt, trait_id: ast::NodeId) {
let dummy_defid = ast::DefId {crate: 0, node: 0};
// Represents [A',B',C']
let num_trait_bounds = trait_ty_generics.type_param_defs.len();
let num_trait_bounds = trait_ty_generics.type_param_defs().len();
let non_shifted_trait_tps = vec::from_fn(num_trait_bounds, |i| {
ty::mk_param(tcx, i, trait_ty_generics.type_param_defs[i].def_id)
ty::mk_param(tcx, i, trait_ty_generics.type_param_defs()[i].def_id)
});
// Represents [D']
@ -295,18 +296,18 @@ pub fn ensure_trait_methods(ccx: &CrateCtxt, trait_id: ast::NodeId) {
dummy_defid);
// Represents [E',F',G']
let num_method_bounds = m.generics.type_param_defs.len();
let num_method_bounds = m.generics.type_param_defs().len();
let shifted_method_tps = vec::from_fn(num_method_bounds, |i| {
ty::mk_param(tcx, i + num_trait_bounds + 1,
m.generics.type_param_defs[i].def_id)
m.generics.type_param_defs()[i].def_id)
});
// Convert the regions 'a, 'b, 'c defined on the trait into
// bound regions on the fn. Note that because these appear in the
// bound for `Self` they must be early bound.
let new_early_region_param_defs = trait_ty_generics.region_param_defs;
let new_early_region_param_defs = trait_ty_generics.region_param_defs.clone();
let rps_from_trait =
trait_ty_generics.region_param_defs.iter().
trait_ty_generics.region_param_defs().iter().
enumerate().
map(|(index,d)| ty::ReEarlyBound(d.def_id.node, index, d.ident)).
collect();
@ -334,7 +335,7 @@ pub fn ensure_trait_methods(ccx: &CrateCtxt, trait_id: ast::NodeId) {
let mut new_type_param_defs = ~[];
let substd_type_param_defs =
trait_ty_generics.type_param_defs.subst(tcx, &substs);
new_type_param_defs.push_all(*substd_type_param_defs);
new_type_param_defs.push_all(*substd_type_param_defs.borrow());
// add in the "self" type parameter
let self_trait_def = get_trait_def(ccx, local_def(trait_id));
@ -351,7 +352,7 @@ pub fn ensure_trait_methods(ccx: &CrateCtxt, trait_id: ast::NodeId) {
// add in the type parameters from the method
let substd_type_param_defs = m.generics.type_param_defs.subst(tcx, &substs);
new_type_param_defs.push_all(*substd_type_param_defs);
new_type_param_defs.push_all(*substd_type_param_defs.borrow());
debug!("static method {} type_param_defs={} ty={}, substs={}",
m.def_id.repr(tcx),
@ -363,7 +364,7 @@ pub fn ensure_trait_methods(ccx: &CrateCtxt, trait_id: ast::NodeId) {
tcache.get().insert(m.def_id,
ty_param_bounds_and_ty {
generics: ty::Generics {
type_param_defs: @new_type_param_defs,
type_param_defs: Rc::new(new_type_param_defs),
region_param_defs: new_early_region_param_defs
},
ty: ty
@ -383,7 +384,7 @@ pub fn ensure_trait_methods(ccx: &CrateCtxt, trait_id: ast::NodeId) {
let trait_self_ty = ty::mk_self(this.tcx, local_def(trait_id));
let fty = astconv::ty_of_method(this, *m_id, *m_purity, trait_self_ty,
*m_explicit_self, m_decl);
let num_trait_type_params = trait_generics.type_param_defs.len();
let num_trait_type_params = trait_generics.type_param_defs().len();
ty::Method::new(
*m_ident,
// FIXME(#5121) -- distinguish early vs late lifetime params
@ -466,7 +467,7 @@ fn convert_methods(ccx: &CrateCtxt,
{
let tcx = ccx.tcx;
for m in ms.iter() {
let num_rcvr_ty_params = rcvr_ty_generics.type_param_defs.len();
let num_rcvr_ty_params = rcvr_ty_generics.type_param_defs().len();
let m_ty_generics = ty_generics(ccx, &m.generics, num_rcvr_ty_params);
let mty = @ty_of_method(ccx,
container,
@ -489,10 +490,10 @@ fn convert_methods(ccx: &CrateCtxt,
// itself
ty_param_bounds_and_ty {
generics: ty::Generics {
type_param_defs: @vec::append(
(*rcvr_ty_generics.type_param_defs).clone(),
*m_ty_generics.type_param_defs),
region_param_defs: rcvr_ty_generics.region_param_defs,
type_param_defs: Rc::new(vec::append(
rcvr_ty_generics.type_param_defs().to_owned(),
m_ty_generics.type_param_defs())),
region_param_defs: rcvr_ty_generics.region_param_defs.clone(),
},
ty: fty
});
@ -574,7 +575,7 @@ pub fn convert(ccx: &CrateCtxt, it: &ast::Item) {
let mut tcache = tcx.tcache.borrow_mut();
tcache.get().insert(local_def(it.id),
ty_param_bounds_and_ty {
generics: i_ty_generics,
generics: i_ty_generics.clone(),
ty: selfty});
}
@ -637,7 +638,7 @@ pub fn convert(ccx: &CrateCtxt, it: &ast::Item) {
{
let mut tcache = tcx.tcache.borrow_mut();
tcache.get().insert(local_def(it.id), tpt);
tcache.get().insert(local_def(it.id), tpt.clone());
}
convert_struct(ccx, struct_def, tpt, it.id);
@ -816,7 +817,7 @@ pub fn ty_of_item(ccx: &CrateCtxt, it: &ast::Item)
{
let tcache = tcx.tcache.borrow();
match tcache.get().find(&def_id) {
Some(&tpt) => return tpt,
Some(tpt) => return tpt.clone(),
_ => {}
}
}
@ -826,7 +827,7 @@ pub fn ty_of_item(ccx: &CrateCtxt, it: &ast::Item)
let tpt = no_params(typ);
let mut tcache = tcx.tcache.borrow_mut();
tcache.get().insert(local_def(it.id), tpt);
tcache.get().insert(local_def(it.id), tpt.clone());
return tpt;
}
ast::ItemFn(decl, purity, abi, ref generics, _) => {
@ -838,8 +839,8 @@ pub fn ty_of_item(ccx: &CrateCtxt, it: &ast::Item)
decl);
let tpt = ty_param_bounds_and_ty {
generics: ty::Generics {
type_param_defs: ty_generics.type_param_defs,
region_param_defs: @[],
type_param_defs: ty_generics.type_param_defs.clone(),
region_param_defs: Rc::new(~[]),
},
ty: ty::mk_bare_fn(ccx.tcx, tofd)
};
@ -849,14 +850,14 @@ pub fn ty_of_item(ccx: &CrateCtxt, it: &ast::Item)
ppaux::ty_to_str(tcx, tpt.ty));
let mut tcache = ccx.tcx.tcache.borrow_mut();
tcache.get().insert(local_def(it.id), tpt);
tcache.get().insert(local_def(it.id), tpt.clone());
return tpt;
}
ast::ItemTy(t, ref generics) => {
{
let mut tcache = tcx.tcache.borrow_mut();
match tcache.get().find(&local_def(it.id)) {
Some(&tpt) => return tpt,
Some(tpt) => return tpt.clone(),
None => { }
}
}
@ -870,7 +871,7 @@ pub fn ty_of_item(ccx: &CrateCtxt, it: &ast::Item)
};
let mut tcache = tcx.tcache.borrow_mut();
tcache.get().insert(local_def(it.id), tpt);
tcache.get().insert(local_def(it.id), tpt.clone());
return tpt;
}
ast::ItemEnum(_, ref generics) => {
@ -884,7 +885,7 @@ pub fn ty_of_item(ccx: &CrateCtxt, it: &ast::Item)
};
let mut tcache = tcx.tcache.borrow_mut();
tcache.get().insert(local_def(it.id), tpt);
tcache.get().insert(local_def(it.id), tpt.clone());
return tpt;
}
ast::ItemTrait(..) => {
@ -902,7 +903,7 @@ pub fn ty_of_item(ccx: &CrateCtxt, it: &ast::Item)
};
let mut tcache = tcx.tcache.borrow_mut();
tcache.get().insert(local_def(it.id), tpt);
tcache.get().insert(local_def(it.id), tpt.clone());
return tpt;
}
ast::ItemImpl(..) | ast::ItemMod(_) |
@ -925,8 +926,8 @@ pub fn ty_of_foreign_item(ccx: &CrateCtxt,
ast::ForeignItemStatic(t, _) => {
ty::ty_param_bounds_and_ty {
generics: ty::Generics {
type_param_defs: @~[],
region_param_defs: @[],
type_param_defs: Rc::new(~[]),
region_param_defs: Rc::new(~[]),
},
ty: ast_ty_to_ty(ccx, &ExplicitRscope, t)
}
@ -938,11 +939,11 @@ pub fn ty_generics(ccx: &CrateCtxt,
generics: &ast::Generics,
base_index: uint) -> ty::Generics {
return ty::Generics {
region_param_defs: generics.lifetimes.iter().map(|l| {
region_param_defs: Rc::new(generics.lifetimes.iter().map(|l| {
ty::RegionParameterDef { ident: l.ident,
def_id: local_def(l.id) }
}).collect(),
type_param_defs: @generics.ty_params.mapi_to_vec(|offset, param| {
}).collect()),
type_param_defs: Rc::new(generics.ty_params.mapi_to_vec(|offset, param| {
let existing_def_opt = {
let ty_param_defs = ccx.tcx.ty_param_defs.borrow();
ty_param_defs.get().find(&param.id).map(|def| *def)
@ -969,7 +970,7 @@ pub fn ty_generics(ccx: &CrateCtxt,
def
}
}
})
}))
};
fn compute_bounds(
@ -1040,7 +1041,7 @@ pub fn ty_of_foreign_fn_decl(ccx: &CrateCtxt,
};
let mut tcache = ccx.tcx.tcache.borrow_mut();
tcache.get().insert(def_id, tpt);
tcache.get().insert(def_id, tpt.clone());
return tpt;
}
@ -1049,11 +1050,11 @@ pub fn mk_item_substs(ccx: &CrateCtxt,
self_ty: Option<ty::t>) -> ty::substs
{
let params: ~[ty::t] =
ty_generics.type_param_defs.iter().enumerate().map(
ty_generics.type_param_defs().iter().enumerate().map(
|(i, t)| ty::mk_param(ccx.tcx, i, t.def_id)).collect();
let regions: OptVec<ty::Region> =
ty_generics.region_param_defs.iter().enumerate().map(
ty_generics.region_param_defs().iter().enumerate().map(
|(i, l)| ty::ReEarlyBound(l.def_id.node, i, l.ident)).collect();
substs {regions: ty::NonerasedRegions(regions),

View file

@ -67,7 +67,7 @@ we may want to adjust precisely when coercions occur.
use middle::ty::{AutoPtr, AutoBorrowVec, AutoBorrowFn, AutoBorrowObj};
use middle::ty::{AutoDerefRef};
use middle::ty::{vstore_slice, vstore_box, vstore_uniq};
use middle::ty::{vstore_slice, vstore_uniq};
use middle::ty::{mt};
use middle::ty;
use middle::typeck::infer::{CoerceResult, resolve_type, Coercion};
@ -272,7 +272,6 @@ impl Coerce {
b.inf_str(self.get_ref().infcx));
match *sty_a {
ty::ty_str(vstore_box) |
ty::ty_str(vstore_uniq) => {}
_ => {
return self.subtype(a, b);

View file

@ -70,6 +70,7 @@ use util::ppaux;
use std::cell::RefCell;
use std::hashmap::HashMap;
use std::rc::Rc;
use std::result;
use extra::list::List;
use extra::list;
@ -271,8 +272,8 @@ pub fn lookup_def_ccx(ccx: &CrateCtxt, sp: Span, id: ast::NodeId)
pub fn no_params(t: ty::t) -> ty::ty_param_bounds_and_ty {
ty::ty_param_bounds_and_ty {
generics: ty::Generics {type_param_defs: @~[],
region_param_defs: @[]},
generics: ty::Generics {type_param_defs: Rc::new(~[]),
region_param_defs: Rc::new(~[])},
ty: t
}
}

View file

@ -736,7 +736,7 @@ impl<'a> ConstraintContext<'a> {
self.add_constraints_from_region(r, contra);
}
ty::vstore_fixed(_) | ty::vstore_uniq | ty::vstore_box => {
ty::vstore_fixed(_) | ty::vstore_uniq => {
}
}
}
@ -750,7 +750,7 @@ impl<'a> ConstraintContext<'a> {
variance: VarianceTermPtr<'a>) {
debug!("add_constraints_from_substs(def_id={:?})", def_id);
for (i, p) in generics.type_param_defs.iter().enumerate() {
for (i, p) in generics.type_param_defs().iter().enumerate() {
let variance_decl =
self.declared_variance(p.def_id, def_id, TypeParam, i);
let variance_i = self.xform(variance, variance_decl);
@ -760,7 +760,7 @@ impl<'a> ConstraintContext<'a> {
match substs.regions {
ty::ErasedRegions => {}
ty::NonerasedRegions(ref rps) => {
for (i, p) in generics.region_param_defs.iter().enumerate() {
for (i, p) in generics.region_param_defs().iter().enumerate() {
let variance_decl =
self.declared_variance(p.def_id, def_id, RegionParam, i);
let variance_i = self.xform(variance, variance_decl);

View file

@ -260,7 +260,6 @@ pub fn vstore_to_str(cx: ctxt, vs: ty::vstore) -> ~str {
match vs {
ty::vstore_fixed(n) => format!("{}", n),
ty::vstore_uniq => ~"~",
ty::vstore_box => ~"@",
ty::vstore_slice(r) => region_ptr_to_str(cx, r)
}
}
@ -522,11 +521,11 @@ pub fn parameterized(cx: ctxt,
}
let generics = if is_trait {
ty::lookup_trait_def(cx, did).generics
ty::lookup_trait_def(cx, did).generics.clone()
} else {
ty::lookup_item_type(cx, did).generics
};
let ty_params = generics.type_param_defs.iter();
let ty_params = generics.type_param_defs().iter();
let num_defaults = ty_params.zip(tps.iter()).rev().take_while(|&(def, &actual)| {
match def.default {
Some(default) => default == actual,
@ -789,8 +788,8 @@ impl Repr for ty::ty_param_bounds_and_ty {
impl Repr for ty::Generics {
fn repr(&self, tcx: ctxt) -> ~str {
format!("Generics(type_param_defs: {}, region_param_defs: {})",
self.type_param_defs.repr(tcx),
self.region_param_defs.repr(tcx))
self.type_param_defs().repr(tcx),
self.region_param_defs().repr(tcx))
}
}
@ -824,7 +823,8 @@ impl Repr for ty::Method {
impl Repr for ast::Ident {
fn repr(&self, _tcx: ctxt) -> ~str {
token::ident_to_str(self).to_owned()
let string = token::get_ident(self.name);
string.get().to_str()
}
}

View file

@ -11,8 +11,6 @@
//! This module contains the "cleaned" pieces of the AST, and the functions
//! that clean them.
use its = syntax::parse::token::ident_to_str;
use syntax;
use syntax::ast;
use syntax::ast_map;
@ -20,6 +18,8 @@ use syntax::ast_util;
use syntax::attr;
use syntax::attr::AttributeMethods;
use syntax::codemap::Pos;
use syntax::parse::token::InternedString;
use syntax::parse::token;
use rustc::metadata::cstore;
use rustc::metadata::csearch;
@ -223,9 +223,13 @@ pub enum Attribute {
impl Clean<Attribute> for ast::MetaItem {
fn clean(&self) -> Attribute {
match self.node {
ast::MetaWord(s) => Word(s.to_owned()),
ast::MetaList(ref s, ref l) => List(s.to_owned(), l.clean()),
ast::MetaNameValue(s, ref v) => NameValue(s.to_owned(), lit_to_str(v))
ast::MetaWord(ref s) => Word(s.get().to_owned()),
ast::MetaList(ref s, ref l) => {
List(s.get().to_owned(), l.clean())
}
ast::MetaNameValue(ref s, ref v) => {
NameValue(s.get().to_owned(), lit_to_str(v))
}
}
}
}
@ -238,21 +242,24 @@ impl Clean<Attribute> for ast::Attribute {
// This is a rough approximation that gets us what we want.
impl<'a> attr::AttrMetaMethods for &'a Attribute {
fn name(&self) -> @str {
fn name(&self) -> InternedString {
match **self {
Word(ref n) | List(ref n, _) | NameValue(ref n, _) =>
n.to_managed()
Word(ref n) | List(ref n, _) | NameValue(ref n, _) => {
token::intern_and_get_ident(*n)
}
}
}
fn value_str(&self) -> Option<@str> {
fn value_str(&self) -> Option<InternedString> {
match **self {
NameValue(_, ref v) => Some(v.to_managed()),
NameValue(_, ref v) => Some(token::intern_and_get_ident(*v)),
_ => None,
}
}
fn meta_item_list<'a>(&'a self) -> Option<&'a [@ast::MetaItem]> { None }
fn name_str_pair(&self) -> Option<(@str, @str)> { None }
fn name_str_pair(&self) -> Option<(InternedString, InternedString)> {
None
}
}
#[deriving(Clone, Encodable, Decodable)]
@ -867,24 +874,25 @@ impl Clean<PathSegment> for ast::PathSegment {
}
fn path_to_str(p: &ast::Path) -> ~str {
use syntax::parse::token::interner_get;
use syntax::parse::token;
let mut s = ~"";
let mut first = true;
for i in p.segments.iter().map(|x| interner_get(x.identifier.name)) {
for i in p.segments.iter().map(|x| token::get_ident(x.identifier.name)) {
if !first || p.global {
s.push_str("::");
} else {
first = false;
}
s.push_str(i);
s.push_str(i.get());
}
s
}
impl Clean<~str> for ast::Ident {
fn clean(&self) -> ~str {
its(self).to_owned()
let string = token::get_ident(self.name);
string.get().to_owned()
}
}
@ -1030,8 +1038,13 @@ pub enum ViewItemInner {
impl Clean<ViewItemInner> for ast::ViewItem_ {
fn clean(&self) -> ViewItemInner {
match self {
&ast::ViewItemExternMod(ref i, ref p, ref id) =>
ExternMod(i.clean(), p.map(|(ref x, _)| x.to_owned()), *id),
&ast::ViewItemExternMod(ref i, ref p, ref id) => {
let string = match *p {
None => None,
Some((ref x, _)) => Some(x.get().to_owned()),
};
ExternMod(i.clean(), string, *id)
}
&ast::ViewItemUse(ref vp) => Import(vp.clean())
}
}
@ -1137,14 +1150,14 @@ impl ToSource for syntax::codemap::Span {
fn lit_to_str(lit: &ast::Lit) -> ~str {
match lit.node {
ast::LitStr(st, _) => st.to_owned(),
ast::LitBinary(data) => format!("{:?}", data.as_slice()),
ast::LitStr(ref st, _) => st.get().to_owned(),
ast::LitBinary(ref data) => format!("{:?}", data.borrow().as_slice()),
ast::LitChar(c) => ~"'" + std::char::from_u32(c).unwrap().to_str() + "'",
ast::LitInt(i, _t) => i.to_str(),
ast::LitUint(u, _t) => u.to_str(),
ast::LitIntUnsuffixed(i) => i.to_str(),
ast::LitFloat(f, _t) => f.to_str(),
ast::LitFloatUnsuffixed(f) => f.to_str(),
ast::LitFloat(ref f, _t) => f.get().to_str(),
ast::LitFloatUnsuffixed(ref f) => f.get().to_str(),
ast::LitBool(b) => b.to_str(),
ast::LitNil => ~"",
}

View file

@ -15,6 +15,7 @@ use rustc::middle::privacy;
use syntax::ast;
use syntax::diagnostic;
use syntax::parse::token;
use syntax::parse;
use syntax;
@ -71,7 +72,8 @@ fn get_ast_and_resolve(cpath: &Path,
let mut cfg = build_configuration(sess);
for cfg_ in cfgs.move_iter() {
cfg.push(@dummy_spanned(ast::MetaWord(cfg_.to_managed())));
let cfg_ = token::intern_and_get_ident(cfg_);
cfg.push(@dummy_spanned(ast::MetaWord(cfg_)));
}
let crate = phase_1_parse_input(sess, cfg.clone(), &input);

View file

@ -45,6 +45,7 @@ use extra::arc::Arc;
use extra::json::ToJson;
use syntax::ast;
use syntax::attr;
use syntax::parse::token::InternedString;
use clean;
use doctree;
@ -803,12 +804,13 @@ impl<'a> Item<'a> {
impl<'a> fmt::Default for Item<'a> {
fn fmt(it: &Item<'a>, fmt: &mut fmt::Formatter) {
match attr::find_stability(it.item.attrs.iter()) {
Some(stability) => {
Some(ref stability) => {
write!(fmt.buf,
"<a class='stability {lvl}' title='{reason}'>{lvl}</a>",
lvl = stability.level.to_str(),
reason = match stability.text {
Some(s) => s, None => @"",
Some(ref s) => (*s).clone(),
None => InternedString::new(""),
});
}
None => {}

View file

@ -137,7 +137,7 @@ fn runtest(test: &str, cratename: &str, libs: HashSet<Path>) {
}
}
fn maketest(s: &str, cratename: &str) -> @str {
fn maketest(s: &str, cratename: &str) -> ~str {
let mut prog = ~r"
#[deny(warnings)];
#[allow(unused_variable, dead_assignment, unused_mut, attribute_usage, dead_code)];
@ -156,7 +156,7 @@ fn maketest(s: &str, cratename: &str) -> @str {
prog.push_str("\n}");
}
return prog.to_managed();
return prog;
}
pub struct Collector {

View file

@ -30,6 +30,8 @@ use syntax::ext::base::{ExtCtxt, MacroCrate};
use syntax::{ast, attr, codemap, diagnostic, fold, visit};
use syntax::attr::AttrMetaMethods;
use syntax::fold::Folder;
use syntax::parse::token::InternedString;
use syntax::parse::token;
use syntax::visit::Visitor;
use syntax::util::small_vector::SmallVector;
use syntax::crateid::CrateId;
@ -77,7 +79,7 @@ fn fold_mod(m: &ast::Mod, fold: &mut CrateSetup) -> ast::Mod {
fn strip_main(item: @ast::Item) -> @ast::Item {
@ast::Item {
attrs: item.attrs.iter().filter_map(|attr| {
if "main" != attr.name() {
if !attr.name().equiv(&("main")) {
Some(*attr)
} else {
None
@ -101,13 +103,15 @@ fn fold_item(item: @ast::Item, fold: &mut CrateSetup)
let mut had_pkg_do = false;
for attr in item.attrs.iter() {
if "pkg_do" == attr.name() {
if attr.name().equiv(&("pkg_do")) {
had_pkg_do = true;
match attr.node.value.node {
ast::MetaList(_, ref mis) => {
for mi in mis.iter() {
match mi.node {
ast::MetaWord(cmd) => cmds.push(cmd.to_owned()),
ast::MetaWord(ref cmd) => {
cmds.push(cmd.get().to_owned())
}
_ => {}
};
}
@ -314,7 +318,9 @@ pub fn compile_input(context: &BuildContext,
if !attr::contains_name(crate.attrs, "crate_id") {
// FIXME (#9639): This needs to handle non-utf8 paths
let crateid_attr =
attr::mk_name_value_item_str(@"crate_id", crate_id.to_str().to_managed());
attr::mk_name_value_item_str(
InternedString::new("crate_id"),
token::intern_and_get_ident(crate_id.to_str()));
debug!("crateid attr: {:?}", crateid_attr);
crate.attrs.push(attr::mk_attr(crateid_attr));
@ -466,13 +472,14 @@ impl<'a> CrateInstaller<'a> {
match vi.node {
// ignore metadata, I guess
ast::ViewItemExternMod(lib_ident, path_opt, _) => {
let lib_name = match path_opt {
Some((p, _)) => p,
None => self.sess.str_of(lib_ident)
ast::ViewItemExternMod(ref lib_ident, ref path_opt, _) => {
let lib_name = match *path_opt {
Some((ref p, _)) => (*p).clone(),
None => token::get_ident(lib_ident.name),
};
debug!("Finding and installing... {}", lib_name);
let crate_id: CrateId = from_str(lib_name).expect("valid crate id");
let crate_id: CrateId =
from_str(lib_name.get()).expect("valid crate id");
// Check standard Rust library path first
let whatever = system_library(&self.context.sysroot_to_use(), &crate_id);
debug!("system library returned {:?}", whatever);
@ -642,7 +649,7 @@ pub fn find_and_install_dependencies(installer: &mut CrateInstaller,
visit::walk_crate(installer, c, ())
}
pub fn mk_string_lit(s: @str) -> ast::Lit {
pub fn mk_string_lit(s: InternedString) -> ast::Lit {
Spanned {
node: ast::LitStr(s, ast::CookedStr),
span: DUMMY_SP

View file

@ -1,424 +0,0 @@
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Operations on managed vectors (`@[T]` type)
use clone::Clone;
use container::Container;
use iter::{Iterator, FromIterator};
use option::{Option, Some, None};
use mem;
use unstable::raw::Repr;
use vec::{ImmutableVector, OwnedVector};
/// Code for dealing with @-vectors. This is pretty incomplete, and
/// contains a bunch of duplication from the code for ~-vectors.
/// Returns the number of elements the vector can hold without reallocating
#[inline]
pub fn capacity<T>(v: @[T]) -> uint {
unsafe {
let managed_box = v.repr();
(*managed_box).data.alloc / mem::size_of::<T>()
}
}
/**
* Builds a vector by calling a provided function with an argument
* function that pushes an element to the back of a vector.
* The initial size for the vector may optionally be specified
*
* # Arguments
*
* * size - An option, maybe containing initial size of the vector to reserve
* * builder - A function that will construct the vector. It receives
* as an argument a function that will push an element
* onto the vector being constructed.
*/
#[inline]
pub fn build<A>(size: Option<uint>, builder: |push: |v: A||) -> @[A] {
let mut vec = @[];
unsafe { raw::reserve(&mut vec, size.unwrap_or(4)); }
builder(|x| unsafe { raw::push(&mut vec, x) });
vec
}
// Appending
/// Iterates over the `rhs` vector, copying each element and appending it to the
/// `lhs`. Afterwards, the `lhs` is then returned for use again.
#[inline]
pub fn append<T:Clone>(lhs: @[T], rhs: &[T]) -> @[T] {
build(Some(lhs.len() + rhs.len()), |push| {
for x in lhs.iter() {
push((*x).clone());
}
for elt in rhs.iter() {
push(elt.clone());
}
})
}
/// Apply a function to each element of a vector and return the results
#[inline]
pub fn map<T, U>(v: &[T], f: |x: &T| -> U) -> @[U] {
build(Some(v.len()), |push| {
for elem in v.iter() {
push(f(elem));
}
})
}
/**
* Creates and initializes an immutable vector.
*
* Creates an immutable vector of size `n_elts` and initializes the elements
* to the value returned by the function `op`.
*/
#[inline]
pub fn from_fn<T>(n_elts: uint, op: |uint| -> T) -> @[T] {
build(Some(n_elts), |push| {
let mut i: uint = 0u;
while i < n_elts { push(op(i)); i += 1u; }
})
}
/**
* Creates and initializes an immutable vector.
*
* Creates an immutable vector of size `n_elts` and initializes the elements
* to the value `t`.
*/
#[inline]
pub fn from_elem<T:Clone>(n_elts: uint, t: T) -> @[T] {
build(Some(n_elts), |push| {
let mut i: uint = 0u;
while i < n_elts {
push(t.clone());
i += 1u;
}
})
}
/**
* Creates and initializes an immutable managed vector by moving all the
* elements from an owned vector.
*/
#[inline]
pub fn to_managed_move<T>(v: ~[T]) -> @[T] {
let mut av = @[];
unsafe {
raw::reserve(&mut av, v.len());
for x in v.move_iter() {
raw::push(&mut av, x);
}
av
}
}
/**
* Creates and initializes an immutable managed vector by copying all the
* elements of a slice.
*/
#[inline]
pub fn to_managed<T:Clone>(v: &[T]) -> @[T] {
from_fn(v.len(), |i| v[i].clone())
}
impl<T> Clone for @[T] {
fn clone(&self) -> @[T] {
*self
}
}
impl<A> FromIterator<A> for @[A] {
#[inline]
fn from_iterator<T: Iterator<A>>(iterator: &mut T) -> @[A] {
let (lower, _) = iterator.size_hint();
build(Some(lower), |push| {
for x in *iterator {
push(x);
}
})
}
}
#[cfg(not(test))]
#[allow(missing_doc)]
pub mod traits {
use at_vec::append;
use clone::Clone;
use ops::Add;
use vec::Vector;
impl<'a,T:Clone, V: Vector<T>> Add<V,@[T]> for @[T] {
#[inline]
fn add(&self, rhs: &V) -> @[T] {
append(*self, rhs.as_slice())
}
}
}
#[cfg(test)]
pub mod traits {}
#[allow(missing_doc)]
pub mod raw {
use at_vec::capacity;
use cast;
use cast::{transmute, transmute_copy};
use container::Container;
use option::None;
use mem;
use num::next_power_of_two;
use ptr;
use unstable::intrinsics::{move_val_init, TyDesc};
use unstable::intrinsics;
use unstable::raw::{Box, Vec};
/**
* Sets the length of a vector
*
* This will explicitly set the size of the vector, without actually
* modifying its buffers, so it is up to the caller to ensure that
* the vector is actually the specified size.
*/
#[inline]
pub unsafe fn set_len<T>(v: &mut @[T], new_len: uint) {
let repr: *mut Box<Vec<T>> = cast::transmute_copy(v);
(*repr).data.fill = new_len * mem::size_of::<T>();
}
/**
* Pushes a new value onto this vector.
*/
#[inline]
pub unsafe fn push<T>(v: &mut @[T], initval: T) {
let full = {
let repr: *Box<Vec<T>> = cast::transmute_copy(v);
(*repr).data.alloc > (*repr).data.fill
};
if full {
push_fast(v, initval);
} else {
push_slow(v, initval);
}
}
#[inline] // really pretty please
unsafe fn push_fast<T>(v: &mut @[T], initval: T) {
let repr: *mut Box<Vec<T>> = cast::transmute_copy(v);
let amt = v.len();
(*repr).data.fill += mem::size_of::<T>();
let p = ptr::offset(&(*repr).data.data as *T, amt as int) as *mut T;
move_val_init(&mut(*p), initval);
}
#[inline]
unsafe fn push_slow<T>(v: &mut @[T], initval: T) {
reserve_at_least(v, v.len() + 1u);
push_fast(v, initval);
}
/**
* Reserves capacity for exactly `n` elements in the given vector.
*
* If the capacity for `v` is already equal to or greater than the
* requested capacity, then no action is taken.
*
* # Arguments
*
* * v - A vector
* * n - The number of elements to reserve space for
*/
#[inline]
pub unsafe fn reserve<T>(v: &mut @[T], n: uint) {
// Only make the (slow) call into the runtime if we have to
if capacity(*v) < n {
let ptr: *mut *mut Box<Vec<()>> = transmute(v);
let ty = intrinsics::get_tydesc::<T>();
return reserve_raw(ty, ptr, n);
}
}
// Implementation detail. Shouldn't be public
#[allow(missing_doc)]
#[inline]
pub fn reserve_raw(ty: *TyDesc, ptr: *mut *mut Box<Vec<()>>, n: uint) {
// check for `uint` overflow
unsafe {
if n > (**ptr).data.alloc / (*ty).size {
let alloc = n * (*ty).size;
let total_size = alloc + mem::size_of::<Vec<()>>();
if alloc / (*ty).size != n || total_size < alloc {
fail!("vector size is too large: {}", n);
}
(*ptr) = local_realloc(*ptr as *(), total_size) as *mut Box<Vec<()>>;
(**ptr).data.alloc = alloc;
}
}
#[inline]
fn local_realloc(ptr: *(), size: uint) -> *() {
use rt::local::Local;
use rt::task::Task;
let mut task = Local::borrow(None::<Task>);
task.get().heap.realloc(ptr as *mut Box<()>, size) as *()
}
}
/**
* Reserves capacity for at least `n` elements in the given vector.
*
* This function will over-allocate in order to amortize the
* allocation costs in scenarios where the caller may need to
* repeatedly reserve additional space.
*
* If the capacity for `v` is already equal to or greater than the
* requested capacity, then no action is taken.
*
* # Arguments
*
* * v - A vector
* * n - The number of elements to reserve space for
*/
#[inline]
pub unsafe fn reserve_at_least<T>(v: &mut @[T], n: uint) {
reserve(v, next_power_of_two(n));
}
}
#[cfg(test)]
mod test {
use super::*;
use prelude::*;
use bh = extra::test::BenchHarness;
#[test]
fn test() {
// Some code that could use that, then:
fn seq_range(lo: uint, hi: uint) -> @[uint] {
build(None, |push| {
for i in range(lo, hi) {
push(i);
}
})
}
assert_eq!(seq_range(10, 15), @[10, 11, 12, 13, 14]);
assert_eq!(from_fn(5, |x| x+1), @[1, 2, 3, 4, 5]);
assert_eq!(from_elem(5, 3.14), @[3.14, 3.14, 3.14, 3.14, 3.14]);
}
#[test]
fn append_test() {
assert_eq!(@[1,2,3] + &[4,5,6], @[1,2,3,4,5,6]);
}
#[test]
fn test_to_managed_move() {
assert_eq!(to_managed_move::<int>(~[]), @[]);
assert_eq!(to_managed_move(~[true]), @[true]);
assert_eq!(to_managed_move(~[1, 2, 3, 4, 5]), @[1, 2, 3, 4, 5]);
assert_eq!(to_managed_move(~[~"abc", ~"123"]), @[~"abc", ~"123"]);
assert_eq!(to_managed_move(~[~[42]]), @[~[42]]);
}
#[test]
fn test_to_managed() {
assert_eq!(to_managed::<int>([]), @[]);
assert_eq!(to_managed([true]), @[true]);
assert_eq!(to_managed([1, 2, 3, 4, 5]), @[1, 2, 3, 4, 5]);
assert_eq!(to_managed([@"abc", @"123"]), @[@"abc", @"123"]);
assert_eq!(to_managed([@[42]]), @[@[42]]);
}
#[bench]
fn bench_capacity(b: &mut bh) {
let x = @[1, 2, 3];
b.iter(|| {
let _ = capacity(x);
});
}
#[bench]
fn bench_build_sized(b: &mut bh) {
let len = 64;
b.iter(|| {
build(Some(len), |push| for i in range(0, 1024) { push(i) });
});
}
#[bench]
fn bench_build(b: &mut bh) {
b.iter(|| {
for i in range(0, 95) {
build(None, |push| push(i));
}
});
}
#[bench]
fn bench_append(b: &mut bh) {
let lhs = @[7, ..128];
let rhs = range(0, 256).to_owned_vec();
b.iter(|| {
let _ = append(lhs, rhs);
})
}
#[bench]
fn bench_map(b: &mut bh) {
let elts = range(0, 256).to_owned_vec();
b.iter(|| {
let _ = map(elts, |x| x*2);
})
}
#[bench]
fn bench_from_fn(b: &mut bh) {
b.iter(|| {
let _ = from_fn(1024, |x| x);
});
}
#[bench]
fn bench_from_elem(b: &mut bh) {
b.iter(|| {
let _ = from_elem(1024, 0u64);
});
}
#[bench]
fn bench_to_managed_move(b: &mut bh) {
b.iter(|| {
let elts = range(0, 1024).to_owned_vec(); // yikes! can't move out of capture, though
to_managed_move(elts);
})
}
#[bench]
fn bench_to_managed(b: &mut bh) {
let elts = range(0, 1024).to_owned_vec();
b.iter(|| {
let _ = to_managed(elts);
});
}
#[bench]
fn bench_clone(b: &mut bh) {
let elts = to_managed(range(0, 1024).to_owned_vec());
b.iter(|| {
let _ = elts.clone();
});
}
}

View file

@ -1167,7 +1167,6 @@ delegate!( u8 to Unsigned)
delegate!( u16 to Unsigned)
delegate!( u32 to Unsigned)
delegate!( u64 to Unsigned)
delegate!(@str to String)
delegate!(~str to String)
delegate!(&'a str to String)
delegate!(bool to Bool)

View file

@ -35,7 +35,6 @@ pub struct Gc<T> {
}
#[cfg(test)]
#[no_send]
pub struct Gc<T> {
priv ptr: @T,
priv marker: marker::NoSend,

View file

@ -114,7 +114,6 @@ pub mod tuple;
pub mod vec;
pub mod vec_ng;
pub mod at_vec;
pub mod str;
pub mod ascii;

View file

@ -604,19 +604,6 @@ impl BytesContainer for ~str {
fn is_str(_: Option<~str>) -> bool { true }
}
impl BytesContainer for @str {
#[inline]
fn container_as_bytes<'a>(&'a self) -> &'a [u8] {
self.as_bytes()
}
#[inline]
fn container_as_str<'a>(&'a self) -> Option<&'a str> {
Some(self.as_slice())
}
#[inline]
fn is_str(_: Option<@str>) -> bool { true }
}
impl<'a> BytesContainer for &'a [u8] {
#[inline]
fn container_as_bytes<'a>(&'a self) -> &'a [u8] {
@ -635,13 +622,6 @@ impl BytesContainer for ~[u8] {
}
}
impl BytesContainer for @[u8] {
#[inline]
fn container_as_bytes<'a>(&'a self) -> &'a [u8] {
self.as_slice()
}
}
impl BytesContainer for CString {
#[inline]
fn container_as_bytes<'a>(&'a self) -> &'a [u8] {

View file

@ -807,8 +807,6 @@ mod tests {
#[test]
fn test_push_many() {
use to_man = at_vec::to_managed_move;
macro_rules! t(
(s: $path:expr, $push:expr, $exp:expr) => (
{
@ -830,12 +828,9 @@ mod tests {
t!(s: "a/b/c", ["d", "/e"], "/e");
t!(s: "a/b/c", ["d", "/e", "f"], "/e/f");
t!(s: "a/b/c", [~"d", ~"e"], "a/b/c/d/e");
t!(s: "a/b/c", [@"d", @"e"], "a/b/c/d/e");
t!(v: b!("a/b/c"), [b!("d"), b!("e")], b!("a/b/c/d/e"));
t!(v: b!("a/b/c"), [b!("d"), b!("/e"), b!("f")], b!("/e/f"));
t!(v: b!("a/b/c"), [b!("d").to_owned(), b!("e").to_owned()], b!("a/b/c/d/e"));
t!(v: b!("a/b/c"), [to_man(b!("d").to_owned()), to_man(b!("e").to_owned())],
b!("a/b/c/d/e"));
}
#[test]
@ -917,8 +912,6 @@ mod tests {
#[test]
fn test_join_many() {
use to_man = at_vec::to_managed_move;
macro_rules! t(
(s: $path:expr, $join:expr, $exp:expr) => (
{
@ -940,11 +933,8 @@ mod tests {
t!(s: "a/b/c", ["..", "d"], "a/b/d");
t!(s: "a/b/c", ["d", "/e", "f"], "/e/f");
t!(s: "a/b/c", [~"d", ~"e"], "a/b/c/d/e");
t!(s: "a/b/c", [@"d", @"e"], "a/b/c/d/e");
t!(v: b!("a/b/c"), [b!("d"), b!("e")], b!("a/b/c/d/e"));
t!(v: b!("a/b/c"), [b!("d").to_owned(), b!("e").to_owned()], b!("a/b/c/d/e"));
t!(v: b!("a/b/c"), [to_man(b!("d").to_owned()), to_man(b!("e").to_owned())],
b!("a/b/c/d/e"));
}
#[test]

View file

@ -1587,8 +1587,6 @@ mod tests {
#[test]
fn test_push_many() {
use to_man = at_vec::to_managed_move;
macro_rules! t(
(s: $path:expr, $push:expr, $exp:expr) => (
{
@ -1610,12 +1608,9 @@ mod tests {
t!(s: "a\\b\\c", ["d", "\\e"], "\\e");
t!(s: "a\\b\\c", ["d", "\\e", "f"], "\\e\\f");
t!(s: "a\\b\\c", [~"d", ~"e"], "a\\b\\c\\d\\e");
t!(s: "a\\b\\c", [@"d", @"e"], "a\\b\\c\\d\\e");
t!(v: b!("a\\b\\c"), [b!("d"), b!("e")], b!("a\\b\\c\\d\\e"));
t!(v: b!("a\\b\\c"), [b!("d"), b!("\\e"), b!("f")], b!("\\e\\f"));
t!(v: b!("a\\b\\c"), [b!("d").to_owned(), b!("e").to_owned()], b!("a\\b\\c\\d\\e"));
t!(v: b!("a\\b\\c"), [to_man(b!("d").to_owned()), to_man(b!("e").to_owned())],
b!("a\\b\\c\\d\\e"));
}
#[test]
@ -1732,8 +1727,6 @@ mod tests {
#[test]
fn test_join_many() {
use to_man = at_vec::to_managed_move;
macro_rules! t(
(s: $path:expr, $join:expr, $exp:expr) => (
{
@ -1755,11 +1748,8 @@ mod tests {
t!(s: "a\\b\\c", ["..", "d"], "a\\b\\d");
t!(s: "a\\b\\c", ["d", "\\e", "f"], "\\e\\f");
t!(s: "a\\b\\c", [~"d", ~"e"], "a\\b\\c\\d\\e");
t!(s: "a\\b\\c", [@"d", @"e"], "a\\b\\c\\d\\e");
t!(v: b!("a\\b\\c"), [b!("d"), b!("e")], b!("a\\b\\c\\d\\e"));
t!(v: b!("a\\b\\c"), [b!("d").to_owned(), b!("e").to_owned()], b!("a\\b\\c\\d\\e"));
t!(v: b!("a\\b\\c"), [to_man(b!("d").to_owned()), to_man(b!("e").to_owned())],
b!("a\\b\\c\\d\\e"));
}
#[test]

View file

@ -183,9 +183,6 @@ impl<V:TyVisitor + MovePtr> TyVisitor for MovePtrAdaptor<V> {
}
fn visit_estr_box(&mut self) -> bool {
self.align_to::<@str>();
if ! self.inner.visit_estr_box() { return false; }
self.bump_past::<@str>();
true
}
@ -253,10 +250,7 @@ impl<V:TyVisitor + MovePtr> TyVisitor for MovePtrAdaptor<V> {
true
}
fn visit_evec_box(&mut self, mtbl: uint, inner: *TyDesc) -> bool {
self.align_to::<@[u8]>();
if ! self.inner.visit_evec_box(mtbl, inner) { return false; }
self.bump_past::<@[u8]>();
fn visit_evec_box(&mut self, _mtbl: uint, _inner: *TyDesc) -> bool {
true
}

View file

@ -272,10 +272,7 @@ impl<'a> TyVisitor for ReprVisitor<'a> {
}
fn visit_estr_box(&mut self) -> bool {
self.get::<@str>(|this, s| {
this.writer.write(['@' as u8]);
this.write_escaped_slice(*s);
})
true
}
fn visit_estr_uniq(&mut self) -> bool {
@ -628,7 +625,6 @@ fn test_repr() {
exact_test(&false, "false");
exact_test(&1.234, "1.234f64");
exact_test(&(&"hello"), "\"hello\"");
exact_test(&(@"hello"), "@\"hello\"");
exact_test(&(~"he\u10f3llo"), "~\"he\\u10f3llo\"");
exact_test(&(@10), "@10");
@ -641,12 +637,6 @@ fn test_repr() {
exact_test(&(0 as *mut ()), "(0x0 as *mut ())");
exact_test(&(1,), "(1,)");
exact_test(&(@[1,2,3,4,5,6,7,8]),
"@[1, 2, 3, 4, 5, 6, 7, 8]");
exact_test(&(@[1u8,2u8,3u8,4u8]),
"@[1u8, 2u8, 3u8, 4u8]");
exact_test(&(@["hi", "there"]),
"@[\"hi\", \"there\"]");
exact_test(&(~["hi", "there"]),
"~[\"hi\", \"there\"]");
exact_test(&(&["hi", "there"]),

View file

@ -332,6 +332,6 @@ mod bench {
#[bench]
fn alloc_managed_big(bh: &mut BenchHarness) {
bh.iter(|| { @[10, ..1000]; });
bh.iter(|| { @([10, ..1000]); });
}
}

View file

@ -185,7 +185,6 @@ mod tests {
assert_eq!(s.len(), 5);
assert_eq!(s.as_slice(), "abcde");
assert_eq!(s.to_str(), ~"abcde");
assert!(s.equiv(&@"abcde"));
assert!(s.lt(&SendStrOwned(~"bcdef")));
assert_eq!(SendStrStatic(""), Default::default());
@ -193,7 +192,6 @@ mod tests {
assert_eq!(o.len(), 5);
assert_eq!(o.as_slice(), "abcde");
assert_eq!(o.to_str(), ~"abcde");
assert!(o.equiv(&@"abcde"));
assert!(o.lt(&SendStrStatic("bcdef")));
assert_eq!(SendStrOwned(~""), Default::default());

View file

@ -17,46 +17,35 @@ Unicode string manipulation (`str` type)
Rust's string type is one of the core primitive types of the language. While
represented by the name `str`, the name `str` is not actually a valid type in
Rust. Each string must also be decorated with its ownership. This means that
there are three common kinds of strings in rust:
there are two common kinds of strings in rust:
* `~str` - This is an owned string. This type obeys all of the normal semantics
of the `~T` types, meaning that it has one, and only one, owner. This
type cannot be implicitly copied, and is moved out of when passed to
other functions.
* `@str` - This is a managed string. Similarly to `@T`, this type can be
implicitly copied, and each implicit copy will increment the
reference count to the string. This means that there is no "true
owner" of the string, and the string will be deallocated when the
reference count reaches 0.
* `&str` - Finally, this is the borrowed string type. This type of string can
only be created from one of the other two kinds of strings. As the
name "borrowed" implies, this type of string is owned elsewhere, and
this string cannot be moved out of.
* `&str` - This is the borrowed string type. This type of string can only be
created from the other kind of string. As the name "borrowed"
implies, this type of string is owned elsewhere, and this string
cannot be moved out of.
As an example, here's a few different kinds of strings.
```rust
#[feature(managed_boxes)];
fn main() {
let owned_string = ~"I am an owned string";
let managed_string = @"This string is garbage-collected";
let borrowed_string1 = "This string is borrowed with the 'static lifetime";
let borrowed_string2: &str = owned_string; // owned strings can be borrowed
let borrowed_string3: &str = managed_string; // managed strings can also be borrowed
}
```
From the example above, you can see that rust has 3 different kinds of string
literals. The owned/managed literals correspond to the owned/managed string
types, but the "borrowed literal" is actually more akin to C's concept of a
static string.
From the example above, you can see that rust has 2 different kinds of string
literals. The owned literals correspond to the owned string types, but the
"borrowed literal" is actually more akin to C's concept of a static string.
When a string is declared without a `~` or `@` sigil, then the string is
allocated statically in the rodata of the executable/library. The string then
has the type `&'static str` meaning that the string is valid for the `'static`
When a string is declared without a `~` sigil, then the string is allocated
statically in the rodata of the executable/library. The string then has the
type `&'static str` meaning that the string is valid for the `'static`
lifetime, otherwise known as the lifetime of the entire program. As can be
inferred from the type, these static strings are not mutable.
@ -89,11 +78,9 @@ The actual representation of strings have direct mappings to vectors:
* `~str` is the same as `~[u8]`
* `&str` is the same as `&[u8]`
* `@str` is the same as `@[u8]`
*/
use at_vec;
use cast;
use cast::transmute;
use char;
@ -157,16 +144,6 @@ impl<'a> ToStr for &'a str {
fn to_str(&self) -> ~str { self.to_owned() }
}
impl ToStr for @str {
#[inline]
fn to_str(&self) -> ~str { self.to_owned() }
}
impl<'a> FromStr for @str {
#[inline]
fn from_str(s: &str) -> Option<@str> { Some(s.to_managed()) }
}
/// Convert a byte to a UTF-8 string
///
/// # Failure
@ -1140,11 +1117,6 @@ pub mod traits {
fn cmp(&self, other: &~str) -> Ordering { self.as_slice().cmp(&other.as_slice()) }
}
impl TotalOrd for @str {
#[inline]
fn cmp(&self, other: &@str) -> Ordering { self.as_slice().cmp(&other.as_slice()) }
}
impl<'a> Eq for &'a str {
#[inline]
fn eq(&self, other: & &'a str) -> bool {
@ -1161,13 +1133,6 @@ pub mod traits {
}
}
impl Eq for @str {
#[inline]
fn eq(&self, other: &@str) -> bool {
eq_slice((*self), (*other))
}
}
impl<'a> TotalEq for &'a str {
#[inline]
fn equals(&self, other: & &'a str) -> bool {
@ -1182,13 +1147,6 @@ pub mod traits {
}
}
impl TotalEq for @str {
#[inline]
fn equals(&self, other: &@str) -> bool {
eq_slice((*self), (*other))
}
}
impl<'a> Ord for &'a str {
#[inline]
fn lt(&self, other: & &'a str) -> bool { self.cmp(other) == Less }
@ -1199,21 +1157,11 @@ pub mod traits {
fn lt(&self, other: &~str) -> bool { self.cmp(other) == Less }
}
impl Ord for @str {
#[inline]
fn lt(&self, other: &@str) -> bool { self.cmp(other) == Less }
}
impl<'a, S: Str> Equiv<S> for &'a str {
#[inline]
fn equiv(&self, other: &S) -> bool { eq_slice(*self, other.as_slice()) }
}
impl<'a, S: Str> Equiv<S> for @str {
#[inline]
fn equiv(&self, other: &S) -> bool { eq_slice(*self, other.as_slice()) }
}
impl<'a, S: Str> Equiv<S> for ~str {
#[inline]
fn equiv(&self, other: &S) -> bool { eq_slice(*self, other.as_slice()) }
@ -1250,16 +1198,6 @@ impl<'a> Str for ~str {
fn into_owned(self) -> ~str { self }
}
impl<'a> Str for @str {
#[inline]
fn as_slice<'a>(&'a self) -> &'a str {
let s: &'a str = *self; s
}
#[inline]
fn into_owned(self) -> ~str { self.to_owned() }
}
impl<'a> Container for &'a str {
#[inline]
fn len(&self) -> uint {
@ -1272,11 +1210,6 @@ impl Container for ~str {
fn len(&self) -> uint { self.as_slice().len() }
}
impl Container for @str {
#[inline]
fn len(&self) -> uint { self.as_slice().len() }
}
impl Mutable for ~str {
/// Remove all content, make the string empty
#[inline]
@ -1734,9 +1667,6 @@ pub trait StrSlice<'a> {
/// Copy a slice into a new owned str.
fn to_owned(&self) -> ~str;
/// Copy a slice into a new managed str.
fn to_managed(&self) -> @str;
/// Converts to a vector of `u16` encoded as UTF-16.
fn to_utf16(&self) -> ~[u16];
@ -2246,14 +2176,6 @@ impl<'a> StrSlice<'a> for &'a str {
}
}
#[inline]
fn to_managed(&self) -> @str {
unsafe {
let v: *&[u8] = cast::transmute(self);
cast::transmute(at_vec::to_managed(*v))
}
}
fn to_utf16(&self) -> ~[u16] {
let mut u = ~[];
for ch in self.chars() {
@ -2682,20 +2604,6 @@ impl DeepClone for ~str {
}
}
impl Clone for @str {
#[inline]
fn clone(&self) -> @str {
*self
}
}
impl DeepClone for @str {
#[inline]
fn deep_clone(&self) -> @str {
*self
}
}
impl FromIterator<char> for ~str {
#[inline]
fn from_iterator<T: Iterator<char>>(iterator: &mut T) -> ~str {
@ -2727,10 +2635,6 @@ impl Default for ~str {
fn default() -> ~str { ~"" }
}
impl Default for @str {
fn default() -> @str { @"" }
}
#[cfg(test)]
mod tests {
use iter::AdditiveIterator;
@ -3536,12 +3440,6 @@ mod tests {
assert_eq!("\U0001d4ea\r".escape_default(), ~"\\U0001d4ea\\r");
}
#[test]
fn test_to_managed() {
assert_eq!("abc".to_managed(), @"abc");
assert_eq!("abcdef".slice(1, 5).to_managed(), @"bcde");
}
#[test]
fn test_total_ord() {
"1234".cmp(& &"123") == Greater;
@ -3579,15 +3477,12 @@ mod tests {
let e = $e;
assert_eq!(s1 + s2, e.to_owned());
assert_eq!(s1.to_owned() + s2, e.to_owned());
assert_eq!(s1.to_managed() + s2, e.to_owned());
} }
);
t!("foo", "bar", "foobar");
t!("foo", @"bar", "foobar");
t!("foo", ~"bar", "foobar");
t!("ศไทย中", "华Việt Nam", "ศไทย中华Việt Nam");
t!("ศไทย中", @"华Việt Nam", "ศไทย中华Việt Nam");
t!("ศไทย中", ~"华Việt Nam", "ศไทย中华Việt Nam");
}
@ -3874,7 +3769,6 @@ mod tests {
}
t::<&str>();
t::<@str>();
t::<~str>();
}
@ -3886,7 +3780,6 @@ mod tests {
let s = ~"01234";
assert_eq!(5, sum_len(["012", "", "34"]));
assert_eq!(5, sum_len([@"01", @"2", @"34", @""]));
assert_eq!(5, sum_len([~"01", ~"2", ~"34", ~""]));
assert_eq!(5, sum_len([s.as_slice()]));
}
@ -3957,8 +3850,6 @@ mod tests {
fn test_from_str() {
let owned: Option<~str> = from_str(&"string");
assert_eq!(owned, Some(~"string"));
let managed: Option<@str> = from_str(&"string");
assert_eq!(managed, Some(@"string"));
}
}

View file

@ -266,13 +266,6 @@ impl<A:IterBytes> IterBytes for ~[A] {
}
}
impl<A:IterBytes> IterBytes for @[A] {
#[inline]
fn iter_bytes(&self, lsb0: bool, f: Cb) -> bool {
self.as_slice().iter_bytes(lsb0, f)
}
}
impl<'a> IterBytes for &'a str {
#[inline]
fn iter_bytes(&self, _lsb0: bool, f: Cb) -> bool {
@ -288,13 +281,6 @@ impl IterBytes for ~str {
}
}
impl IterBytes for @str {
#[inline]
fn iter_bytes(&self, lsb0: bool, f: Cb) -> bool {
self.as_slice().iter_bytes(lsb0, f)
}
}
impl<A:IterBytes> IterBytes for Option<A> {
#[inline]
fn iter_bytes(&self, lsb0: bool, f: Cb) -> bool {

View file

@ -159,25 +159,6 @@ impl<A:ToStr> ToStr for ~[A] {
}
}
impl<A:ToStr> ToStr for @[A] {
#[inline]
fn to_str(&self) -> ~str {
let mut acc = ~"[";
let mut first = true;
for elt in self.iter() {
if first {
first = false;
}
else {
acc.push_str(", ");
}
acc.push_str(elt.to_str());
}
acc.push_char(']');
acc
}
}
#[cfg(test)]
mod tests {
use hashmap::HashMap;
@ -195,7 +176,6 @@ mod tests {
assert_eq!(false.to_str(), ~"false");
assert_eq!(().to_str(), ~"()");
assert_eq!((~"hi").to_str(), ~"hi");
assert_eq!((@"hi").to_str(), ~"hi");
}
#[test]

View file

@ -56,10 +56,8 @@ pub trait Repr<T> {
impl<'a, T> Repr<Slice<T>> for &'a [T] {}
impl<'a> Repr<Slice<u8>> for &'a str {}
impl<T> Repr<*Box<T>> for @T {}
impl<T> Repr<*Box<Vec<T>>> for @[T] {}
impl<T> Repr<*Vec<T>> for ~[T] {}
impl Repr<*String> for ~str {}
impl Repr<*Box<String>> for @str {}
// sure would be nice to have this
// impl<T> Repr<*Vec<T>> for ~[T] {}

View file

@ -646,13 +646,6 @@ pub mod traits {
fn ne(&self, other: &~[T]) -> bool { !self.eq(other) }
}
impl<T:Eq> Eq for @[T] {
#[inline]
fn eq(&self, other: &@[T]) -> bool { self.as_slice() == *other }
#[inline]
fn ne(&self, other: &@[T]) -> bool { !self.eq(other) }
}
impl<'a,T:TotalEq> TotalEq for &'a [T] {
fn equals(&self, other: & &'a [T]) -> bool {
self.len() == other.len() &&
@ -665,11 +658,6 @@ pub mod traits {
fn equals(&self, other: &~[T]) -> bool { self.as_slice().equals(&other.as_slice()) }
}
impl<T:TotalEq> TotalEq for @[T] {
#[inline]
fn equals(&self, other: &@[T]) -> bool { self.as_slice().equals(&other.as_slice()) }
}
impl<'a,T:Eq, V: Vector<T>> Equiv<V> for &'a [T] {
#[inline]
fn equiv(&self, other: &V) -> bool { self.as_slice() == other.as_slice() }
@ -680,11 +668,6 @@ pub mod traits {
fn equiv(&self, other: &V) -> bool { self.as_slice() == other.as_slice() }
}
impl<'a,T:Eq, V: Vector<T>> Equiv<V> for @[T] {
#[inline]
fn equiv(&self, other: &V) -> bool { self.as_slice() == other.as_slice() }
}
impl<'a,T:TotalOrd> TotalOrd for &'a [T] {
fn cmp(&self, other: & &'a [T]) -> Ordering {
order::cmp(self.iter(), other.iter())
@ -696,11 +679,6 @@ pub mod traits {
fn cmp(&self, other: &~[T]) -> Ordering { self.as_slice().cmp(&other.as_slice()) }
}
impl<T: TotalOrd> TotalOrd for @[T] {
#[inline]
fn cmp(&self, other: &@[T]) -> Ordering { self.as_slice().cmp(&other.as_slice()) }
}
impl<'a, T: Eq + Ord> Ord for &'a [T] {
fn lt(&self, other: & &'a [T]) -> bool {
order::lt(self.iter(), other.iter())
@ -730,17 +708,6 @@ pub mod traits {
fn gt(&self, other: &~[T]) -> bool { self.as_slice() > other.as_slice() }
}
impl<T: Eq + Ord> Ord for @[T] {
#[inline]
fn lt(&self, other: &@[T]) -> bool { self.as_slice() < other.as_slice() }
#[inline]
fn le(&self, other: &@[T]) -> bool { self.as_slice() <= other.as_slice() }
#[inline]
fn ge(&self, other: &@[T]) -> bool { self.as_slice() >= other.as_slice() }
#[inline]
fn gt(&self, other: &@[T]) -> bool { self.as_slice() > other.as_slice() }
}
impl<'a,T:Clone, V: Vector<T>> Add<V, ~[T]> for &'a [T] {
#[inline]
fn add(&self, rhs: &V) -> ~[T] {
@ -778,11 +745,6 @@ impl<T> Vector<T> for ~[T] {
fn as_slice<'a>(&'a self) -> &'a [T] { let v: &'a [T] = *self; v }
}
impl<T> Vector<T> for @[T] {
#[inline(always)]
fn as_slice<'a>(&'a self) -> &'a [T] { let v: &'a [T] = *self; v }
}
impl<'a, T> Container for &'a [T] {
/// Returns the length of a vector
#[inline]
@ -833,15 +795,6 @@ impl<T: Clone> CloneableVector<T> for ~[T] {
fn into_owned(self) -> ~[T] { self }
}
/// Extension methods for managed vectors
impl<T: Clone> CloneableVector<T> for @[T] {
#[inline]
fn to_owned(&self) -> ~[T] { self.as_slice().to_owned() }
#[inline(always)]
fn into_owned(self) -> ~[T] { self.to_owned() }
}
/// Extension methods for vectors
pub trait ImmutableVector<'a, T> {
/**
@ -2637,10 +2590,6 @@ impl<A> Default for ~[A] {
fn default() -> ~[A] { ~[] }
}
impl<A> Default for @[A] {
fn default() -> @[A] { @[] }
}
macro_rules! iterator {
(struct $name:ident -> $ptr:ty, $elem:ty) => {
/// An iterator for iterating over a vector.
@ -3117,14 +3066,6 @@ mod tests {
assert_eq!(v_b[0], 2);
assert_eq!(v_b[1], 3);
// Test on managed heap.
let vec_managed = @[1, 2, 3, 4, 5];
let v_c = vec_managed.slice(0u, 3u).to_owned();
assert_eq!(v_c.len(), 3u);
assert_eq!(v_c[0], 1);
assert_eq!(v_c[1], 2);
assert_eq!(v_c[2], 3);
// Test on exchange heap.
let vec_unique = ~[1, 2, 3, 4, 5, 6];
let v_d = vec_unique.slice(1u, 6u).to_owned();
@ -4060,7 +4001,6 @@ mod tests {
);
t!(&[int]);
t!(@[int]);
t!(~[int]);
}

View file

@ -14,11 +14,13 @@ use codemap::{Span, Spanned, DUMMY_SP};
use abi::AbiSet;
use ast_util;
use opt_vec::OptVec;
use parse::token::{interner_get, str_to_ident, special_idents};
use parse::token::{InternedString, special_idents, str_to_ident};
use parse::token;
use std::cell::RefCell;
use std::hashmap::HashMap;
use std::option::Option;
use std::rc::Rc;
use std::to_str::ToStr;
use extra::serialize::{Encodable, Decodable, Encoder, Decoder};
@ -125,7 +127,8 @@ pub type Mrk = u32;
impl<S:Encoder> Encodable<S> for Ident {
fn encode(&self, s: &mut S) {
s.emit_str(interner_get(self.name));
let string = token::get_ident(self.name);
s.emit_str(string.get());
}
}
@ -295,9 +298,9 @@ pub type MetaItem = Spanned<MetaItem_>;
#[deriving(Clone, Encodable, Decodable, IterBytes)]
pub enum MetaItem_ {
MetaWord(@str),
MetaList(@str, ~[@MetaItem]),
MetaNameValue(@str, Lit),
MetaWord(InternedString),
MetaList(InternedString, ~[@MetaItem]),
MetaNameValue(InternedString, Lit),
}
// can't be derived because the MetaList requires an unordered comparison
@ -402,19 +405,9 @@ impl ToStr for Sigil {
}
}
#[deriving(Eq, Encodable, Decodable, IterBytes)]
pub enum Vstore {
// FIXME (#3469): Change uint to @expr (actually only constant exprs)
VstoreFixed(Option<uint>), // [1,2,3,4]
VstoreUniq, // ~[1,2,3,4]
VstoreBox, // @[1,2,3,4]
VstoreSlice(Option<Lifetime>) // &'foo? [1,2,3,4]
}
#[deriving(Clone, Eq, Encodable, Decodable, IterBytes)]
pub enum ExprVstore {
ExprVstoreUniq, // ~[1,2,3,4]
ExprVstoreBox, // @[1,2,3,4]
ExprVstoreSlice, // &[1,2,3,4]
ExprVstoreMutSlice, // &mut [1,2,3,4]
}
@ -721,14 +714,14 @@ pub type Lit = Spanned<Lit_>;
#[deriving(Clone, Eq, Encodable, Decodable, IterBytes)]
pub enum Lit_ {
LitStr(@str, StrStyle),
LitBinary(@[u8]),
LitStr(InternedString, StrStyle),
LitBinary(Rc<~[u8]>),
LitChar(u32),
LitInt(i64, IntTy),
LitUint(u64, UintTy),
LitIntUnsuffixed(i64),
LitFloat(@str, FloatTy),
LitFloatUnsuffixed(@str),
LitFloat(InternedString, FloatTy),
LitFloatUnsuffixed(InternedString),
LitNil,
LitBool(bool),
}
@ -897,11 +890,11 @@ pub enum AsmDialect {
#[deriving(Clone, Eq, Encodable, Decodable, IterBytes)]
pub struct InlineAsm {
asm: @str,
asm: InternedString,
asm_str_style: StrStyle,
clobbers: @str,
inputs: ~[(@str, @Expr)],
outputs: ~[(@str, @Expr)],
clobbers: InternedString,
inputs: ~[(InternedString, @Expr)],
outputs: ~[(InternedString, @Expr)],
volatile: bool,
alignstack: bool,
dialect: AsmDialect
@ -1074,7 +1067,7 @@ pub enum ViewItem_ {
// optional @str: if present, this is a location (containing
// arbitrary characters) from which to fetch the crate sources
// For example, extern mod whatever = "github.com/mozilla/rust"
ViewItemExternMod(Ident, Option<(@str, StrStyle)>, NodeId),
ViewItemExternMod(Ident, Option<(InternedString,StrStyle)>, NodeId),
ViewItemUse(~[@ViewPath]),
}

View file

@ -62,9 +62,10 @@ pub fn path_to_str_with_sep(p: &[PathElem], sep: &str, itr: @IdentInterner)
pub fn path_ident_to_str(p: &Path, i: Ident, itr: @IdentInterner) -> ~str {
if p.is_empty() {
itr.get(i.name).to_owned()
itr.get(i.name).into_owned()
} else {
format!("{}::{}", path_to_str(*p, itr), itr.get(i.name))
let string = itr.get(i.name);
format!("{}::{}", path_to_str(*p, itr), string.as_slice())
}
}
@ -75,7 +76,7 @@ pub fn path_to_str(p: &[PathElem], itr: @IdentInterner) -> ~str {
pub fn path_elem_to_str(pe: PathElem, itr: @IdentInterner) -> ~str {
match pe {
PathMod(s) | PathName(s) | PathPrettyName(s, _) => {
itr.get(s.name).to_owned()
itr.get(s.name).into_owned()
}
}
}
@ -105,7 +106,11 @@ fn pretty_ty(ty: &Ty, itr: @IdentInterner, out: &mut ~str) {
// need custom handling.
TyNil => { out.push_str("$NIL$"); return }
TyPath(ref path, _, _) => {
out.push_str(itr.get(path.segments.last().unwrap().identifier.name));
out.push_str(itr.get(path.segments
.last()
.unwrap()
.identifier
.name).as_slice());
return
}
TyTup(ref tys) => {
@ -138,7 +143,8 @@ pub fn impl_pretty_name(trait_ref: &Option<TraitRef>, ty: &Ty) -> PathElem {
match *trait_ref {
None => pretty = ~"",
Some(ref trait_ref) => {
pretty = itr.get(trait_ref.path.segments.last().unwrap().identifier.name).to_owned();
pretty = itr.get(trait_ref.path.segments.last().unwrap().identifier.name)
.into_owned();
pretty.push_char('$');
}
};
@ -489,17 +495,21 @@ pub fn node_id_to_str(map: Map, id: NodeId, itr: @IdentInterner) -> ~str {
path_ident_to_str(path, item.ident, itr), abi, id)
}
Some(NodeMethod(m, _, path)) => {
let name = itr.get(m.ident.name);
format!("method {} in {} (id={})",
itr.get(m.ident.name), path_to_str(*path, itr), id)
name.as_slice(), path_to_str(*path, itr), id)
}
Some(NodeTraitMethod(ref tm, _, path)) => {
let m = ast_util::trait_method_to_ty_method(&**tm);
let name = itr.get(m.ident.name);
format!("method {} in {} (id={})",
itr.get(m.ident.name), path_to_str(*path, itr), id)
name.as_slice(), path_to_str(*path, itr), id)
}
Some(NodeVariant(ref variant, _, path)) => {
let name = itr.get(variant.node.name.name);
format!("variant {} in {} (id={})",
itr.get(variant.node.name.name), path_to_str(*path, itr), id)
name.as_slice(),
path_to_str(*path, itr), id)
}
Some(NodeExpr(expr)) => {
format!("expr {} (id={})", pprust::expr_to_str(expr, itr), id)

View file

@ -25,7 +25,10 @@ use std::num;
pub fn path_name_i(idents: &[Ident]) -> ~str {
// FIXME: Bad copies (#2543 -- same for everything else that says "bad")
idents.map(|i| token::interner_get(i.name)).connect("::")
idents.map(|i| {
let string = token::get_ident(i.name);
string.get().to_str()
}).connect("::")
}
// totally scary function: ignores all but the last element, should have

View file

@ -16,24 +16,26 @@ use codemap::{Span, Spanned, spanned, dummy_spanned};
use codemap::BytePos;
use diagnostic::SpanHandler;
use parse::comments::{doc_comment_style, strip_doc_comment_decoration};
use parse::token::InternedString;
use parse::token;
use crateid::CrateId;
use std::hashmap::HashSet;
pub trait AttrMetaMethods {
// This could be changed to `fn check_name(&self, name: @str) ->
// This could be changed to `fn check_name(&self, name: InternedString) ->
// bool` which would facilitate a side table recording which
// attributes/meta items are used/unused.
/// Retrieve the name of the meta item, e.g. foo in #[foo],
/// #[foo="bar"] and #[foo(bar)]
fn name(&self) -> @str;
fn name(&self) -> InternedString;
/**
* Gets the string value if self is a MetaNameValue variant
* containing a string, otherwise None.
*/
fn value_str(&self) -> Option<@str>;
fn value_str(&self) -> Option<InternedString>;
/// Gets a list of inner meta items from a list MetaItem type.
fn meta_item_list<'a>(&'a self) -> Option<&'a [@MetaItem]>;
@ -41,32 +43,36 @@ pub trait AttrMetaMethods {
* If the meta item is a name-value type with a string value then returns
* a tuple containing the name and string value, otherwise `None`
*/
fn name_str_pair(&self) -> Option<(@str, @str)>;
fn name_str_pair(&self) -> Option<(InternedString,InternedString)>;
}
impl AttrMetaMethods for Attribute {
fn name(&self) -> @str { self.meta().name() }
fn value_str(&self) -> Option<@str> { self.meta().value_str() }
fn name(&self) -> InternedString { self.meta().name() }
fn value_str(&self) -> Option<InternedString> {
self.meta().value_str()
}
fn meta_item_list<'a>(&'a self) -> Option<&'a [@MetaItem]> {
self.node.value.meta_item_list()
}
fn name_str_pair(&self) -> Option<(@str, @str)> { self.meta().name_str_pair() }
fn name_str_pair(&self) -> Option<(InternedString,InternedString)> {
self.meta().name_str_pair()
}
}
impl AttrMetaMethods for MetaItem {
fn name(&self) -> @str {
fn name(&self) -> InternedString {
match self.node {
MetaWord(n) => n,
MetaNameValue(n, _) => n,
MetaList(n, _) => n
MetaWord(ref n) => (*n).clone(),
MetaNameValue(ref n, _) => (*n).clone(),
MetaList(ref n, _) => (*n).clone(),
}
}
fn value_str(&self) -> Option<@str> {
fn value_str(&self) -> Option<InternedString> {
match self.node {
MetaNameValue(_, ref v) => {
match v.node {
ast::LitStr(s, _) => Some(s),
ast::LitStr(ref s, _) => Some((*s).clone()),
_ => None,
}
},
@ -81,19 +87,21 @@ impl AttrMetaMethods for MetaItem {
}
}
fn name_str_pair(&self) -> Option<(@str, @str)> {
fn name_str_pair(&self) -> Option<(InternedString,InternedString)> {
self.value_str().map(|s| (self.name(), s))
}
}
// Annoying, but required to get test_cfg to work
impl AttrMetaMethods for @MetaItem {
fn name(&self) -> @str { (**self).name() }
fn value_str(&self) -> Option<@str> { (**self).value_str() }
fn name(&self) -> InternedString { (**self).name() }
fn value_str(&self) -> Option<InternedString> { (**self).value_str() }
fn meta_item_list<'a>(&'a self) -> Option<&'a [@MetaItem]> {
(**self).meta_item_list()
}
fn name_str_pair(&self) -> Option<(@str, @str)> { (**self).name_str_pair() }
fn name_str_pair(&self) -> Option<(InternedString,InternedString)> {
(**self).name_str_pair()
}
}
@ -114,8 +122,10 @@ impl AttributeMethods for Attribute {
fn desugar_doc(&self) -> Attribute {
if self.node.is_sugared_doc {
let comment = self.value_str().unwrap();
let meta = mk_name_value_item_str(@"doc",
strip_doc_comment_decoration(comment).to_managed());
let meta = mk_name_value_item_str(
InternedString::new("doc"),
token::intern_and_get_ident(strip_doc_comment_decoration(
comment.get())));
mk_attr(meta)
} else {
*self
@ -125,20 +135,22 @@ impl AttributeMethods for Attribute {
/* Constructors */
pub fn mk_name_value_item_str(name: @str, value: @str) -> @MetaItem {
pub fn mk_name_value_item_str(name: InternedString, value: InternedString)
-> @MetaItem {
let value_lit = dummy_spanned(ast::LitStr(value, ast::CookedStr));
mk_name_value_item(name, value_lit)
}
pub fn mk_name_value_item(name: @str, value: ast::Lit) -> @MetaItem {
pub fn mk_name_value_item(name: InternedString, value: ast::Lit)
-> @MetaItem {
@dummy_spanned(MetaNameValue(name, value))
}
pub fn mk_list_item(name: @str, items: ~[@MetaItem]) -> @MetaItem {
pub fn mk_list_item(name: InternedString, items: ~[@MetaItem]) -> @MetaItem {
@dummy_spanned(MetaList(name, items))
}
pub fn mk_word_item(name: @str) -> @MetaItem {
pub fn mk_word_item(name: InternedString) -> @MetaItem {
@dummy_spanned(MetaWord(name))
}
@ -150,12 +162,14 @@ pub fn mk_attr(item: @MetaItem) -> Attribute {
})
}
pub fn mk_sugared_doc_attr(text: @str, lo: BytePos, hi: BytePos) -> Attribute {
let style = doc_comment_style(text);
pub fn mk_sugared_doc_attr(text: InternedString, lo: BytePos, hi: BytePos)
-> Attribute {
let style = doc_comment_style(text.get());
let lit = spanned(lo, hi, ast::LitStr(text, ast::CookedStr));
let attr = Attribute_ {
style: style,
value: @spanned(lo, hi, MetaNameValue(@"doc", lit)),
value: @spanned(lo, hi, MetaNameValue(InternedString::new("doc"),
lit)),
is_sugared_doc: true
};
spanned(lo, hi, attr)
@ -178,20 +192,22 @@ pub fn contains_name<AM: AttrMetaMethods>(metas: &[AM], name: &str) -> bool {
debug!("attr::contains_name (name={})", name);
metas.iter().any(|item| {
debug!(" testing: {}", item.name());
name == item.name()
item.name().equiv(&name)
})
}
pub fn first_attr_value_str_by_name(attrs: &[Attribute], name: &str)
-> Option<@str> {
-> Option<InternedString> {
attrs.iter()
.find(|at| name == at.name())
.find(|at| at.name().equiv(&name))
.and_then(|at| at.value_str())
}
pub fn last_meta_item_value_str_by_name(items: &[@MetaItem], name: &str)
-> Option<@str> {
items.rev_iter().find(|mi| name == mi.name()).and_then(|i| i.value_str())
-> Option<InternedString> {
items.rev_iter()
.find(|mi| mi.name().equiv(&name))
.and_then(|i| i.value_str())
}
/* Higher-level applications */
@ -201,16 +217,16 @@ pub fn sort_meta_items(items: &[@MetaItem]) -> ~[@MetaItem] {
// human-readable strings.
let mut v = items.iter()
.map(|&mi| (mi.name(), mi))
.collect::<~[(@str, @MetaItem)]>();
.collect::<~[(InternedString, @MetaItem)]>();
v.sort_by(|&(a, _), &(b, _)| a.cmp(&b));
v.sort_by(|&(ref a, _), &(ref b, _)| a.cmp(b));
// There doesn't seem to be a more optimal way to do this
v.move_iter().map(|(_, m)| {
match m.node {
MetaList(n, ref mis) => {
MetaList(ref n, ref mis) => {
@Spanned {
node: MetaList(n, sort_meta_items(*mis)),
node: MetaList((*n).clone(), sort_meta_items(*mis)),
.. /*bad*/ (*m).clone()
}
}
@ -225,7 +241,7 @@ pub fn sort_meta_items(items: &[@MetaItem]) -> ~[@MetaItem] {
*/
pub fn find_linkage_metas(attrs: &[Attribute]) -> ~[@MetaItem] {
let mut result = ~[];
for attr in attrs.iter().filter(|at| "link" == at.name()) {
for attr in attrs.iter().filter(|at| at.name().equiv(&("link"))) {
match attr.meta().node {
MetaList(_, ref items) => result.push_all(*items),
_ => ()
@ -237,7 +253,7 @@ pub fn find_linkage_metas(attrs: &[Attribute]) -> ~[@MetaItem] {
pub fn find_crateid(attrs: &[Attribute]) -> Option<CrateId> {
match first_attr_value_str_by_name(attrs, "crate_id") {
None => None,
Some(id) => from_str::<CrateId>(id),
Some(id) => from_str::<CrateId>(id.get()),
}
}
@ -254,8 +270,8 @@ pub fn find_inline_attr(attrs: &[Attribute]) -> InlineAttr {
// FIXME (#2809)---validate the usage of #[inline] and #[inline]
attrs.iter().fold(InlineNone, |ia,attr| {
match attr.node.value.node {
MetaWord(n) if "inline" == n => InlineHint,
MetaList(n, ref items) if "inline" == n => {
MetaWord(ref n) if n.equiv(&("inline")) => InlineHint,
MetaList(ref n, ref items) if n.equiv(&("inline")) => {
if contains_name(*items, "always") {
InlineAlways
} else if contains_name(*items, "never") {
@ -284,7 +300,7 @@ pub fn test_cfg<AM: AttrMetaMethods, It: Iterator<AM>>
// this doesn't work.
let some_cfg_matches = metas.any(|mi| {
debug!("testing name: {}", mi.name());
if "cfg" == mi.name() { // it is a #[cfg()] attribute
if mi.name().equiv(&("cfg")) { // it is a #[cfg()] attribute
debug!("is cfg");
no_cfgs = false;
// only #[cfg(...)] ones are understood.
@ -294,7 +310,8 @@ pub fn test_cfg<AM: AttrMetaMethods, It: Iterator<AM>>
cfg_meta.iter().all(|cfg_mi| {
debug!("cfg({}[...])", cfg_mi.name());
match cfg_mi.node {
ast::MetaList(s, ref not_cfgs) if "not" == s => {
ast::MetaList(ref s, ref not_cfgs)
if s.equiv(&("not")) => {
debug!("not!");
// inside #[cfg(not(...))], so these need to all
// not match.
@ -320,7 +337,7 @@ pub fn test_cfg<AM: AttrMetaMethods, It: Iterator<AM>>
/// Represents the #[deprecated="foo"] (etc) attributes.
pub struct Stability {
level: StabilityLevel,
text: Option<@str>
text: Option<InternedString>
}
/// The available stability levels.
@ -335,9 +352,10 @@ pub enum StabilityLevel {
}
/// Find the first stability attribute. `None` if none exists.
pub fn find_stability<AM: AttrMetaMethods, It: Iterator<AM>>(mut metas: It) -> Option<Stability> {
pub fn find_stability<AM: AttrMetaMethods, It: Iterator<AM>>(mut metas: It)
-> Option<Stability> {
for m in metas {
let level = match m.name().as_slice() {
let level = match m.name().get() {
"deprecated" => Deprecated,
"experimental" => Experimental,
"unstable" => Unstable,
@ -360,7 +378,7 @@ pub fn require_unique_names(diagnostic: @SpanHandler, metas: &[@MetaItem]) {
for meta in metas.iter() {
let name = meta.name();
if !set.insert(name) {
if !set.insert(name.clone()) {
diagnostic.span_fatal(meta.span,
format!("duplicate meta item `{}`", name));
}
@ -384,14 +402,14 @@ pub fn find_repr_attr(diagnostic: @SpanHandler, attr: @ast::MetaItem, acc: ReprA
-> ReprAttr {
let mut acc = acc;
match attr.node {
ast::MetaList(s, ref items) if "repr" == s => {
ast::MetaList(ref s, ref items) if s.equiv(&("repr")) => {
for item in items.iter() {
match item.node {
ast::MetaWord(word) => {
let hint = match word.as_slice() {
ast::MetaWord(ref word) => {
let hint = match word.get() {
// Can't use "extern" because it's not a lexical identifier.
"C" => ReprExtern,
_ => match int_type_of_word(word) {
_ => match int_type_of_word(word.get()) {
Some(ity) => ReprInt(item.span, ity),
None => {
// Not a word we recognize

View file

@ -160,7 +160,7 @@ pub struct LocWithOpt {
pub struct FileMapAndLine {fm: @FileMap, line: uint}
pub struct FileMapAndBytePos {fm: @FileMap, pos: BytePos}
#[deriving(IterBytes)]
#[deriving(Clone, IterBytes)]
pub enum MacroFormat {
// e.g. #[deriving(...)] <item>
MacroAttribute,
@ -168,9 +168,9 @@ pub enum MacroFormat {
MacroBang
}
#[deriving(IterBytes)]
#[deriving(Clone, IterBytes)]
pub struct NameAndSpan {
name: @str,
name: ~str,
// the format with which the macro was invoked.
format: MacroFormat,
span: Option<Span>
@ -183,7 +183,7 @@ pub struct ExpnInfo {
callee: NameAndSpan
}
pub type FileName = @str;
pub type FileName = ~str;
pub struct FileLines
{
@ -206,7 +206,7 @@ pub struct FileMap {
/// e.g. `<anon>`
name: FileName,
/// The complete source code
src: @str,
src: ~str,
/// The start position of this source in the CodeMap
start_pos: BytePos,
/// Locations of lines beginnings in the source code
@ -267,7 +267,7 @@ impl CodeMap {
}
}
pub fn new_filemap(&self, filename: FileName, src: @str) -> @FileMap {
pub fn new_filemap(&self, filename: FileName, src: ~str) -> @FileMap {
let mut files = self.files.borrow_mut();
let start_pos = match files.get().last() {
None => 0,
@ -301,7 +301,7 @@ impl CodeMap {
pub fn lookup_char_pos_adj(&self, pos: BytePos) -> LocWithOpt {
let loc = self.lookup_char_pos(pos);
LocWithOpt {
filename: loc.file.name,
filename: loc.file.name.to_str(),
line: loc.line,
col: loc.col,
file: Some(loc.file)
@ -324,7 +324,7 @@ impl CodeMap {
pub fn span_to_filename(&self, sp: Span) -> FileName {
let lo = self.lookup_char_pos(sp.lo);
lo.file.name
lo.file.name.to_str()
}
pub fn span_to_lines(&self, sp: Span) -> @FileLines {
@ -468,7 +468,7 @@ mod test {
#[test]
fn t1 () {
let cm = CodeMap::new();
let fm = cm.new_filemap(@"blork.rs",@"first line.\nsecond line");
let fm = cm.new_filemap(~"blork.rs",~"first line.\nsecond line");
fm.next_line(BytePos(0));
assert_eq!(&fm.get_line(0),&~"first line.");
// TESTING BROKEN BEHAVIOR:
@ -480,7 +480,7 @@ mod test {
#[should_fail]
fn t2 () {
let cm = CodeMap::new();
let fm = cm.new_filemap(@"blork.rs",@"first line.\nsecond line");
let fm = cm.new_filemap(~"blork.rs",~"first line.\nsecond line");
// TESTING *REALLY* BROKEN BEHAVIOR:
fm.next_line(BytePos(0));
fm.next_line(BytePos(10));

View file

@ -17,6 +17,7 @@ use codemap::Span;
use ext::base;
use ext::base::*;
use parse;
use parse::token::InternedString;
use parse::token;
enum State {
@ -43,7 +44,7 @@ pub fn expand_asm(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
cx.cfg(),
tts.to_owned());
let mut asm = @"";
let mut asm = InternedString::new("");
let mut asm_str_style = None;
let mut outputs = ~[];
let mut inputs = ~[];
@ -79,10 +80,10 @@ pub fn expand_asm(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
let (constraint, _str_style) = p.parse_str();
if constraint.starts_with("+") {
if constraint.get().starts_with("+") {
cx.span_unimpl(p.last_span,
"'+' (read+write) output operand constraint modifier");
} else if !constraint.starts_with("=") {
} else if !constraint.get().starts_with("=") {
cx.span_err(p.last_span, "output operand constraint lacks '='");
}
@ -104,9 +105,9 @@ pub fn expand_asm(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
let (constraint, _str_style) = p.parse_str();
if constraint.starts_with("=") {
if constraint.get().starts_with("=") {
cx.span_err(p.last_span, "input operand constraint contains '='");
} else if constraint.starts_with("+") {
} else if constraint.get().starts_with("+") {
cx.span_err(p.last_span, "input operand constraint contains '+'");
}
@ -137,11 +138,11 @@ pub fn expand_asm(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
Options => {
let (option, _str_style) = p.parse_str();
if "volatile" == option {
if option.equiv(&("volatile")) {
volatile = true;
} else if "alignstack" == option {
} else if option.equiv(&("alignstack")) {
alignstack = true;
} else if "intel" == option {
} else if option.equiv(&("intel")) {
dialect = ast::AsmIntel;
}
@ -191,9 +192,9 @@ pub fn expand_asm(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
MRExpr(@ast::Expr {
id: ast::DUMMY_NODE_ID,
node: ast::ExprInlineAsm(ast::InlineAsm {
asm: asm,
asm: token::intern_and_get_ident(asm.get()),
asm_str_style: asm_str_style.unwrap(),
clobbers: cons.to_managed(),
clobbers: token::intern_and_get_ident(cons),
inputs: inputs,
outputs: outputs,
volatile: volatile,

Some files were not shown because too many files have changed in this diff Show more