Fallout of std::str stabilization
This commit is contained in:
parent
4908017d59
commit
082bfde412
193 changed files with 2143 additions and 2230 deletions
|
|
@ -251,17 +251,17 @@ pub fn file_to_filemap(sess: &ParseSess, path: &Path, spanopt: Option<Span>)
|
|||
Err(e) => {
|
||||
err(format!("couldn't read {}: {}",
|
||||
path.display(),
|
||||
e).as_slice());
|
||||
e)[]);
|
||||
unreachable!()
|
||||
}
|
||||
};
|
||||
match str::from_utf8(bytes.as_slice()) {
|
||||
match str::from_utf8(bytes[]).ok() {
|
||||
Some(s) => {
|
||||
return string_to_filemap(sess, s.to_string(),
|
||||
path.as_str().unwrap().to_string())
|
||||
}
|
||||
None => {
|
||||
err(format!("{} is not UTF-8 encoded", path.display()).as_slice())
|
||||
err(format!("{} is not UTF-8 encoded", path.display())[])
|
||||
}
|
||||
}
|
||||
unreachable!()
|
||||
|
|
@ -391,10 +391,10 @@ pub fn char_lit(lit: &str) -> (char, int) {
|
|||
}
|
||||
|
||||
let msg = format!("lexer should have rejected a bad character escape {}", lit);
|
||||
let msg2 = msg.as_slice();
|
||||
let msg2 = msg[];
|
||||
|
||||
fn esc(len: uint, lit: &str) -> Option<(char, int)> {
|
||||
num::from_str_radix(lit.slice(2, len), 16)
|
||||
num::from_str_radix(lit[2..len], 16)
|
||||
.and_then(char::from_u32)
|
||||
.map(|x| (x, len as int))
|
||||
}
|
||||
|
|
@ -402,10 +402,10 @@ pub fn char_lit(lit: &str) -> (char, int) {
|
|||
let unicode_escape: || -> Option<(char, int)> = ||
|
||||
if lit.as_bytes()[2] == b'{' {
|
||||
let idx = lit.find('}').expect(msg2);
|
||||
let subslice = lit.slice(3, idx);
|
||||
let subslice = lit[3..idx];
|
||||
num::from_str_radix(subslice, 16)
|
||||
.and_then(char::from_u32)
|
||||
.map(|x| (x, subslice.char_len() as int + 4))
|
||||
.map(|x| (x, subslice.chars().count() as int + 4))
|
||||
} else {
|
||||
esc(6, lit)
|
||||
};
|
||||
|
|
@ -429,7 +429,7 @@ pub fn str_lit(lit: &str) -> String {
|
|||
let error = |i| format!("lexer should have rejected {} at {}", lit, i);
|
||||
|
||||
/// Eat everything up to a non-whitespace
|
||||
fn eat<'a>(it: &mut iter::Peekable<(uint, char), str::CharOffsets<'a>>) {
|
||||
fn eat<'a>(it: &mut iter::Peekable<(uint, char), str::CharIndices<'a>>) {
|
||||
loop {
|
||||
match it.peek().map(|x| x.1) {
|
||||
Some(' ') | Some('\n') | Some('\r') | Some('\t') => {
|
||||
|
|
@ -464,7 +464,7 @@ pub fn str_lit(lit: &str) -> String {
|
|||
eat(&mut chars);
|
||||
} else {
|
||||
// otherwise, a normal escape
|
||||
let (c, n) = char_lit(lit.slice_from(i));
|
||||
let (c, n) = char_lit(lit[i..]);
|
||||
for _ in range(0, n - 1) { // we don't need to move past the first \
|
||||
chars.next();
|
||||
}
|
||||
|
|
@ -527,7 +527,7 @@ pub fn raw_str_lit(lit: &str) -> String {
|
|||
fn looks_like_width_suffix(first_chars: &[char], s: &str) -> bool {
|
||||
s.len() > 1 &&
|
||||
first_chars.contains(&s.char_at(0)) &&
|
||||
s.slice_from(1).chars().all(|c| '0' <= c && c <= '9')
|
||||
s[1..].chars().all(|c| '0' <= c && c <= '9')
|
||||
}
|
||||
|
||||
fn filtered_float_lit(data: token::InternedString, suffix: Option<&str>,
|
||||
|
|
@ -540,7 +540,7 @@ fn filtered_float_lit(data: token::InternedString, suffix: Option<&str>,
|
|||
if suf.len() >= 2 && looks_like_width_suffix(&['f'], suf) {
|
||||
// if it looks like a width, lets try to be helpful.
|
||||
sd.span_err(sp, &*format!("illegal width `{}` for float literal, \
|
||||
valid widths are 32 and 64", suf.slice_from(1)));
|
||||
valid widths are 32 and 64", suf[1..]));
|
||||
} else {
|
||||
sd.span_err(sp, &*format!("illegal suffix `{}` for float literal, \
|
||||
valid suffixes are `f32` and `f64`", suf));
|
||||
|
|
@ -576,7 +576,7 @@ pub fn byte_lit(lit: &str) -> (u8, uint) {
|
|||
b'\'' => b'\'',
|
||||
b'0' => b'\0',
|
||||
_ => {
|
||||
match ::std::num::from_str_radix::<u64>(lit.slice(2, 4), 16) {
|
||||
match ::std::num::from_str_radix::<u64>(lit[2..4], 16) {
|
||||
Some(c) =>
|
||||
if c > 0xFF {
|
||||
panic!(err(2))
|
||||
|
|
@ -626,7 +626,7 @@ pub fn binary_lit(lit: &str) -> Rc<Vec<u8>> {
|
|||
}
|
||||
_ => {
|
||||
// otherwise, a normal escape
|
||||
let (c, n) = byte_lit(lit.slice_from(i));
|
||||
let (c, n) = byte_lit(lit[i..]);
|
||||
// we don't need to move past the first \
|
||||
for _ in range(0, n - 1) {
|
||||
chars.next();
|
||||
|
|
@ -655,7 +655,7 @@ pub fn integer_lit(s: &str, suffix: Option<&str>, sd: &SpanHandler, sp: Span) ->
|
|||
// s can only be ascii, byte indexing is fine
|
||||
|
||||
let s2 = s.chars().filter(|&c| c != '_').collect::<String>();
|
||||
let mut s = s2.as_slice();
|
||||
let mut s = s2[];
|
||||
|
||||
debug!("integer_lit: {}, {}", s, suffix);
|
||||
|
||||
|
|
@ -688,7 +688,7 @@ pub fn integer_lit(s: &str, suffix: Option<&str>, sd: &SpanHandler, sp: Span) ->
|
|||
}
|
||||
|
||||
if base != 10 {
|
||||
s = s.slice_from(2);
|
||||
s = s[2..];
|
||||
}
|
||||
|
||||
if let Some(suf) = suffix {
|
||||
|
|
@ -710,7 +710,7 @@ pub fn integer_lit(s: &str, suffix: Option<&str>, sd: &SpanHandler, sp: Span) ->
|
|||
if looks_like_width_suffix(&['i', 'u'], suf) {
|
||||
sd.span_err(sp, &*format!("illegal width `{}` for integer literal; \
|
||||
valid widths are 8, 16, 32 and 64",
|
||||
suf.slice_from(1)));
|
||||
suf[1..]));
|
||||
} else {
|
||||
sd.span_err(sp, &*format!("illegal suffix `{}` for numeric literal", suf));
|
||||
}
|
||||
|
|
@ -808,7 +808,7 @@ mod test {
|
|||
#[test]
|
||||
fn string_to_tts_macro () {
|
||||
let tts = string_to_tts("macro_rules! zip (($a)=>($a))".to_string());
|
||||
let tts: &[ast::TokenTree] = tts.as_slice();
|
||||
let tts: &[ast::TokenTree] = tts[];
|
||||
match tts {
|
||||
[ast::TtToken(_, token::Ident(name_macro_rules, token::Plain)),
|
||||
ast::TtToken(_, token::Not),
|
||||
|
|
@ -816,19 +816,19 @@ mod test {
|
|||
ast::TtDelimited(_, ref macro_delimed)]
|
||||
if name_macro_rules.as_str() == "macro_rules"
|
||||
&& name_zip.as_str() == "zip" => {
|
||||
match macro_delimed.tts.as_slice() {
|
||||
match macro_delimed.tts[] {
|
||||
[ast::TtDelimited(_, ref first_delimed),
|
||||
ast::TtToken(_, token::FatArrow),
|
||||
ast::TtDelimited(_, ref second_delimed)]
|
||||
if macro_delimed.delim == token::Paren => {
|
||||
match first_delimed.tts.as_slice() {
|
||||
match first_delimed.tts[] {
|
||||
[ast::TtToken(_, token::Dollar),
|
||||
ast::TtToken(_, token::Ident(name, token::Plain))]
|
||||
if first_delimed.delim == token::Paren
|
||||
&& name.as_str() == "a" => {},
|
||||
_ => panic!("value 3: {}", **first_delimed),
|
||||
}
|
||||
match second_delimed.tts.as_slice() {
|
||||
match second_delimed.tts[] {
|
||||
[ast::TtToken(_, token::Dollar),
|
||||
ast::TtToken(_, token::Ident(name, token::Plain))]
|
||||
if second_delimed.delim == token::Paren
|
||||
|
|
@ -1106,24 +1106,24 @@ mod test {
|
|||
let use_s = "use foo::bar::baz;";
|
||||
let vitem = string_to_view_item(use_s.to_string());
|
||||
let vitem_s = view_item_to_string(&vitem);
|
||||
assert_eq!(vitem_s.as_slice(), use_s);
|
||||
assert_eq!(vitem_s[], use_s);
|
||||
|
||||
let use_s = "use foo::bar as baz;";
|
||||
let vitem = string_to_view_item(use_s.to_string());
|
||||
let vitem_s = view_item_to_string(&vitem);
|
||||
assert_eq!(vitem_s.as_slice(), use_s);
|
||||
assert_eq!(vitem_s[], use_s);
|
||||
}
|
||||
|
||||
#[test] fn parse_extern_crate() {
|
||||
let ex_s = "extern crate foo;";
|
||||
let vitem = string_to_view_item(ex_s.to_string());
|
||||
let vitem_s = view_item_to_string(&vitem);
|
||||
assert_eq!(vitem_s.as_slice(), ex_s);
|
||||
assert_eq!(vitem_s[], ex_s);
|
||||
|
||||
let ex_s = "extern crate \"foo\" as bar;";
|
||||
let vitem = string_to_view_item(ex_s.to_string());
|
||||
let vitem_s = view_item_to_string(&vitem);
|
||||
assert_eq!(vitem_s.as_slice(), ex_s);
|
||||
assert_eq!(vitem_s[], ex_s);
|
||||
}
|
||||
|
||||
fn get_spans_of_pat_idents(src: &str) -> Vec<Span> {
|
||||
|
|
@ -1161,9 +1161,9 @@ mod test {
|
|||
for &src in srcs.iter() {
|
||||
let spans = get_spans_of_pat_idents(src);
|
||||
let Span{lo:lo,hi:hi,..} = spans[0];
|
||||
assert!("self" == src.slice(lo.to_uint(), hi.to_uint()),
|
||||
assert!("self" == src[lo.to_uint()..hi.to_uint()],
|
||||
"\"{}\" != \"self\". src=\"{}\"",
|
||||
src.slice(lo.to_uint(), hi.to_uint()), src)
|
||||
src[lo.to_uint()..hi.to_uint()], src)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -1202,7 +1202,7 @@ mod test {
|
|||
let docs = item.attrs.iter().filter(|a| a.name().get() == "doc")
|
||||
.map(|a| a.value_str().unwrap().get().to_string()).collect::<Vec<_>>();
|
||||
let b: &[_] = &["/// doc comment".to_string(), "/// line 2".to_string()];
|
||||
assert_eq!(docs.as_slice(), b);
|
||||
assert_eq!(docs[], b);
|
||||
|
||||
let source = "/** doc comment\r\n * with CRLF */\r\nfn foo() {}".to_string();
|
||||
let item = parse_item_from_source_str(name, source, Vec::new(), &sess).unwrap();
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue