diff --git a/doc/rust.md b/doc/rust.md index 503d1a1072b91..0b817539b2988 100644 --- a/doc/rust.md +++ b/doc/rust.md @@ -3079,7 +3079,7 @@ A value of type `str` is a Unicode string, represented as a vector of 8-bit unsigned bytes holding a sequence of UTF-8 codepoints. Since `str` is of unknown size, it is not a _first class_ type, but can only be instantiated through a pointer type, -such as `&str`, `@str` or `~str`. +such as `&str` or `~str`. ### Tuple types @@ -3115,7 +3115,7 @@ Such a definite-sized vector type is a first-class type, since its size is known A vector without such a size is said to be of _indefinite_ size, and is therefore not a _first-class_ type. An indefinite-size vector can only be instantiated through a pointer type, -such as `&[T]`, `@[T]` or `~[T]`. +such as `&[T]` or `~[T]`. The kind of a vector type depends on the kind of its element type, as with other simple structural types. diff --git a/src/libarena/lib.rs b/src/libarena/lib.rs index ac290de0d7218..9bd4c1d58fc6b 100644 --- a/src/libarena/lib.rs +++ b/src/libarena/lib.rs @@ -27,7 +27,6 @@ extern mod extra; use extra::list::{List, Cons, Nil}; use extra::list; -use std::at_vec; use std::cast::{transmute, transmute_mut, transmute_mut_region}; use std::cast; use std::cell::{Cell, RefCell}; @@ -35,20 +34,31 @@ use std::num; use std::ptr; use std::kinds::marker; use std::mem; +use std::rc::Rc; use std::rt::global_heap; use std::unstable::intrinsics::{TyDesc, get_tydesc}; use std::unstable::intrinsics; use std::util; +use std::vec; // The way arena uses arrays is really deeply awful. The arrays are // allocated, and have capacities reserved, but the fill for the array // will always stay at 0. #[deriving(Clone)] struct Chunk { - data: RefCell<@[u8]>, + data: Rc>, fill: Cell, is_pod: Cell, } +impl Chunk { + fn capacity(&self) -> uint { + self.data.borrow().borrow().get().capacity() + } + + unsafe fn as_ptr(&self) -> *u8 { + self.data.borrow().borrow().get().as_ptr() + } +} // Arenas are used to quickly allocate objects that share a // lifetime. The arena uses ~[u8] vectors as a backing store to @@ -97,10 +107,8 @@ impl Arena { } fn chunk(size: uint, is_pod: bool) -> Chunk { - let mut v: @[u8] = @[]; - unsafe { at_vec::raw::reserve(&mut v, size); } Chunk { - data: RefCell::new(unsafe { cast::transmute(v) }), + data: Rc::new(RefCell::new(vec::with_capacity(size))), fill: Cell::new(0u), is_pod: Cell::new(is_pod), } @@ -131,10 +139,7 @@ fn round_up(base: uint, align: uint) -> uint { // in it. unsafe fn destroy_chunk(chunk: &Chunk) { let mut idx = 0; - let buf = { - let data = chunk.data.borrow(); - data.get().as_ptr() - }; + let buf = chunk.as_ptr(); let fill = chunk.fill.get(); while idx < fill { @@ -172,11 +177,13 @@ unsafe fn un_bitpack_tydesc_ptr(p: uint) -> (*TyDesc, bool) { } impl Arena { + fn chunk_size(&self) -> uint { + self.pod_head.capacity() + } // Functions for the POD part of the arena fn alloc_pod_grow(&mut self, n_bytes: uint, align: uint) -> *u8 { // Allocate a new chunk. - let chunk_size = at_vec::capacity(self.pod_head.data.get()); - let new_min_chunk_size = num::max(n_bytes, chunk_size); + let new_min_chunk_size = num::max(n_bytes, self.chunk_size()); self.chunks.set(@Cons(self.pod_head.clone(), self.chunks.get())); self.pod_head = chunk(num::next_power_of_two(new_min_chunk_size + 1u), true); @@ -190,7 +197,7 @@ impl Arena { let this = transmute_mut_region(self); let start = round_up(this.pod_head.fill.get(), align); let end = start + n_bytes; - if end > at_vec::capacity(this.pod_head.data.get()) { + if end > self.chunk_size() { return this.alloc_pod_grow(n_bytes, align); } this.pod_head.fill.set(end); @@ -198,7 +205,7 @@ impl Arena { //debug!("idx = {}, size = {}, align = {}, fill = {}", // start, n_bytes, align, head.fill.get()); - ptr::offset(this.pod_head.data.get().as_ptr(), start as int) + this.pod_head.as_ptr().offset(start as int) } } @@ -217,8 +224,7 @@ impl Arena { fn alloc_nonpod_grow(&mut self, n_bytes: uint, align: uint) -> (*u8, *u8) { // Allocate a new chunk. - let chunk_size = at_vec::capacity(self.head.data.get()); - let new_min_chunk_size = num::max(n_bytes, chunk_size); + let new_min_chunk_size = num::max(n_bytes, self.chunk_size()); self.chunks.set(@Cons(self.head.clone(), self.chunks.get())); self.head = chunk(num::next_power_of_two(new_min_chunk_size + 1u), false); @@ -244,7 +250,7 @@ impl Arena { end = start + n_bytes; } - if end > at_vec::capacity(self.head.data.get()) { + if end > self.head.capacity() { return self.alloc_nonpod_grow(n_bytes, align); } @@ -254,7 +260,7 @@ impl Arena { //debug!("idx = {}, size = {}, align = {}, fill = {}", // start, n_bytes, align, head.fill); - let buf = self.head.data.get().as_ptr(); + let buf = self.head.as_ptr(); return (ptr::offset(buf, tydesc_start as int), ptr::offset(buf, start as int)); } } @@ -606,5 +612,3 @@ mod test { }) } } - - diff --git a/src/libextra/num/bigint.rs b/src/libextra/num/bigint.rs index c1959843d598b..cea899b18c01a 100644 --- a/src/libextra/num/bigint.rs +++ b/src/libextra/num/bigint.rs @@ -80,7 +80,7 @@ pub mod BigDigit { /** A big unsigned integer type. -A `BigUint`-typed value `BigUint { data: @[a, b, c] }` represents a number +A `BigUint`-typed value `BigUint { data: ~[a, b, c] }` represents a number `(a + b * BigDigit::base + c * BigDigit::base^2)`. */ #[deriving(Clone)] diff --git a/src/libextra/serialize.rs b/src/libextra/serialize.rs index 020404057fb1f..9b1b1e0548e07 100644 --- a/src/libextra/serialize.rs +++ b/src/libextra/serialize.rs @@ -18,7 +18,6 @@ Core encoding and decoding interfaces. #[forbid(non_camel_case_types)]; -use std::at_vec; use std::hashmap::{HashMap, HashSet}; use std::rc::Rc; use std::trie::{TrieMap, TrieSet}; @@ -310,18 +309,6 @@ impl Decodable for ~str { } } -impl Encodable for @str { - fn encode(&self, s: &mut S) { - s.emit_str(*self) - } -} - -impl Decodable for @str { - fn decode(d: &mut D) -> @str { - d.read_str().to_managed() - } -} - impl Encodable for f32 { fn encode(&self, s: &mut S) { s.emit_f32(*self) @@ -456,26 +443,6 @@ impl> Decodable for ~[T] { } } -impl> Encodable for @[T] { - fn encode(&self, s: &mut S) { - s.emit_seq(self.len(), |s| { - for (i, e) in self.iter().enumerate() { - s.emit_seq_elt(i, |s| e.encode(s)) - } - }) - } -} - -impl> Decodable for @[T] { - fn decode(d: &mut D) -> @[T] { - d.read_seq(|d, len| { - at_vec::from_fn(len, |i| { - d.read_seq_elt(i, |d| Decodable::decode(d)) - }) - }) - } -} - impl> Encodable for Option { fn encode(&self, s: &mut S) { s.emit_option(|s| { diff --git a/src/librustc/back/link.rs b/src/librustc/back/link.rs index a81302035ee1c..fc38fa25a2146 100644 --- a/src/librustc/back/link.rs +++ b/src/librustc/back/link.rs @@ -473,10 +473,10 @@ pub fn build_link_meta(sess: Session, symbol_hasher: &mut Sha256) -> LinkMeta { // This calculates CMH as defined above - fn crate_hash(symbol_hasher: &mut Sha256, crateid: &CrateId) -> @str { + fn crate_hash(symbol_hasher: &mut Sha256, crateid: &CrateId) -> ~str { symbol_hasher.reset(); symbol_hasher.input_str(crateid.to_str()); - truncated_hash_result(symbol_hasher).to_managed() + truncated_hash_result(symbol_hasher) } let crateid = match attr::find_crateid(attrs) { @@ -510,7 +510,8 @@ fn truncated_hash_result(symbol_hasher: &mut Sha256) -> ~str { pub fn symbol_hash(tcx: ty::ctxt, symbol_hasher: &mut Sha256, t: ty::t, - link_meta: &LinkMeta) -> @str { + link_meta: &LinkMeta) + -> ~str { // NB: do *not* use abbrevs here as we want the symbol names // to be independent of one another in the crate. @@ -523,15 +524,14 @@ pub fn symbol_hash(tcx: ty::ctxt, let mut hash = truncated_hash_result(symbol_hasher); // Prefix with 'h' so that it never blends into adjacent digits hash.unshift_char('h'); - // tjc: allocation is unfortunate; need to change std::hash - hash.to_managed() + hash } -pub fn get_symbol_hash(ccx: &CrateContext, t: ty::t) -> @str { +pub fn get_symbol_hash(ccx: &CrateContext, t: ty::t) -> ~str { { let type_hashcodes = ccx.type_hashcodes.borrow(); match type_hashcodes.get().find(&t) { - Some(&h) => return h, + Some(h) => return h.to_str(), None => {} } } @@ -539,7 +539,7 @@ pub fn get_symbol_hash(ccx: &CrateContext, t: ty::t) -> @str { let mut type_hashcodes = ccx.type_hashcodes.borrow_mut(); let mut symbol_hasher = ccx.symbol_hasher.borrow_mut(); let hash = symbol_hash(ccx.tcx, symbol_hasher.get(), t, &ccx.link_meta); - type_hashcodes.get().insert(t, hash); + type_hashcodes.get().insert(t, hash.clone()); hash } @@ -963,7 +963,7 @@ fn link_staticlib(sess: Session, obj_filename: &Path, out_filename: &Path) { let crates = sess.cstore.get_used_crates(cstore::RequireStatic); for &(cnum, ref path) in crates.iter() { - let name = sess.cstore.get_crate_data(cnum).name; + let name = sess.cstore.get_crate_data(cnum).name.clone(); let p = match *path { Some(ref p) => p.clone(), None => { sess.err(format!("could not find rlib for: `{}`", name)); @@ -1221,7 +1221,7 @@ fn add_upstream_rust_crates(args: &mut ~[~str], sess: Session, // If we're not doing LTO, then our job is simply to just link // against the archive. if sess.lto() { - let name = sess.cstore.get_crate_data(cnum).name; + let name = sess.cstore.get_crate_data(cnum).name.clone(); time(sess.time_passes(), format!("altering {}.rlib", name), (), |()| { let dst = tmpdir.join(cratepath.filename().unwrap()); diff --git a/src/librustc/back/lto.rs b/src/librustc/back/lto.rs index ced8fa68f59cd..3fbcd377b8b1c 100644 --- a/src/librustc/back/lto.rs +++ b/src/librustc/back/lto.rs @@ -42,7 +42,7 @@ pub fn run(sess: session::Session, llmod: ModuleRef, // module that we've got. let crates = sess.cstore.get_used_crates(cstore::RequireStatic); for (cnum, path) in crates.move_iter() { - let name = sess.cstore.get_crate_data(cnum).name; + let name = sess.cstore.get_crate_data(cnum).name.clone(); let path = match path { Some(p) => p, None => { diff --git a/src/librustc/driver/driver.rs b/src/librustc/driver/driver.rs index 211d60f7e2d53..b779c7e73b1c9 100644 --- a/src/librustc/driver/driver.rs +++ b/src/librustc/driver/driver.rs @@ -44,6 +44,7 @@ use syntax::codemap; use syntax::diagnostic; use syntax::ext::base::CrateLoader; use syntax::parse; +use syntax::parse::token::InternedString; use syntax::parse::token; use syntax::print::{pp, pprust}; use syntax; @@ -60,12 +61,14 @@ pub enum PpMode { * The name used for source code that doesn't originate in a file * (e.g. source from stdin or a string) */ -pub fn anon_src() -> @str { @"" } +pub fn anon_src() -> ~str { + "".to_str() +} -pub fn source_name(input: &Input) -> @str { +pub fn source_name(input: &Input) -> ~str { match *input { // FIXME (#9639): This needs to handle non-utf8 paths - FileInput(ref ifile) => ifile.as_str().unwrap().to_managed(), + FileInput(ref ifile) => ifile.as_str().unwrap().to_str(), StrInput(_) => anon_src() } } @@ -73,39 +76,41 @@ pub fn source_name(input: &Input) -> @str { pub fn default_configuration(sess: Session) -> ast::CrateConfig { let tos = match sess.targ_cfg.os { - abi::OsWin32 => @"win32", - abi::OsMacos => @"macos", - abi::OsLinux => @"linux", - abi::OsAndroid => @"android", - abi::OsFreebsd => @"freebsd" + abi::OsWin32 => InternedString::new("win32"), + abi::OsMacos => InternedString::new("macos"), + abi::OsLinux => InternedString::new("linux"), + abi::OsAndroid => InternedString::new("android"), + abi::OsFreebsd => InternedString::new("freebsd"), }; // ARM is bi-endian, however using NDK seems to default // to little-endian unless a flag is provided. let (end,arch,wordsz) = match sess.targ_cfg.arch { - abi::X86 => (@"little", @"x86", @"32"), - abi::X86_64 => (@"little", @"x86_64", @"64"), - abi::Arm => (@"little", @"arm", @"32"), - abi::Mips => (@"big", @"mips", @"32") + abi::X86 => ("little", "x86", "32"), + abi::X86_64 => ("little", "x86_64", "64"), + abi::Arm => ("little", "arm", "32"), + abi::Mips => ("big", "mips", "32") }; let fam = match sess.targ_cfg.os { - abi::OsWin32 => @"windows", - _ => @"unix" + abi::OsWin32 => InternedString::new("windows"), + _ => InternedString::new("unix") }; let mk = attr::mk_name_value_item_str; return ~[ // Target bindings. - attr::mk_word_item(fam), - mk(@"target_os", tos), - mk(@"target_family", fam), - mk(@"target_arch", arch), - mk(@"target_endian", end), - mk(@"target_word_size", wordsz), + attr::mk_word_item(fam.clone()), + mk(InternedString::new("target_os"), tos), + mk(InternedString::new("target_family"), fam), + mk(InternedString::new("target_arch"), InternedString::new(arch)), + mk(InternedString::new("target_endian"), InternedString::new(end)), + mk(InternedString::new("target_word_size"), + InternedString::new(wordsz)), ]; } -pub fn append_configuration(cfg: &mut ast::CrateConfig, name: @str) { +pub fn append_configuration(cfg: &mut ast::CrateConfig, + name: InternedString) { if !cfg.iter().any(|mi| mi.name() == name) { cfg.push(attr::mk_word_item(name)) } @@ -118,9 +123,15 @@ pub fn build_configuration(sess: Session) -> let default_cfg = default_configuration(sess); let mut user_cfg = sess.opts.cfg.clone(); // If the user wants a test runner, then add the test cfg - if sess.opts.test { append_configuration(&mut user_cfg, @"test") } + if sess.opts.test { + append_configuration(&mut user_cfg, InternedString::new("test")) + } // If the user requested GC, then add the GC cfg - append_configuration(&mut user_cfg, if sess.opts.gc { @"gc" } else { @"nogc" }); + append_configuration(&mut user_cfg, if sess.opts.gc { + InternedString::new("gc") + } else { + InternedString::new("nogc") + }); return vec::append(user_cfg, default_cfg); } @@ -129,7 +140,7 @@ fn parse_cfgspecs(cfgspecs: ~[~str], demitter: @diagnostic::Emitter) -> ast::CrateConfig { cfgspecs.move_iter().map(|s| { let sess = parse::new_parse_sess(Some(demitter)); - parse::parse_meta_from_source_str(@"cfgspec", s.to_managed(), ~[], sess) + parse::parse_meta_from_source_str("cfgspec".to_str(), s, ~[], sess) }).collect::() } @@ -137,8 +148,7 @@ pub enum Input { /// Load source from file FileInput(Path), /// The string is the source - // FIXME (#2319): Don't really want to box the source string - StrInput(@str) + StrInput(~str) } pub fn phase_1_parse_input(sess: Session, cfg: ast::CrateConfig, input: &Input) @@ -148,9 +158,11 @@ pub fn phase_1_parse_input(sess: Session, cfg: ast::CrateConfig, input: &Input) FileInput(ref file) => { parse::parse_crate_from_file(&(*file), cfg.clone(), sess.parse_sess) } - StrInput(src) => { - parse::parse_crate_from_source_str( - anon_src(), src, cfg.clone(), sess.parse_sess) + StrInput(ref src) => { + parse::parse_crate_from_source_str(anon_src(), + (*src).clone(), + cfg.clone(), + sess.parse_sess) } } }) @@ -474,13 +486,13 @@ fn write_out_deps(sess: Session, input: &Input, outputs: &OutputFilenames, crate // Build a list of files used to compile the output and // write Makefile-compatible dependency rules - let files: ~[@str] = { + let files: ~[~str] = { let files = sess.codemap.files.borrow(); files.get() .iter() .filter_map(|fmap| { if fmap.is_real_file() { - Some(fmap.name) + Some(fmap.name.clone()) } else { None } @@ -615,7 +627,7 @@ pub fn pretty_print_input(sess: Session, _ => @pprust::NoAnn as @pprust::PpAnn, }; - let src = sess.codemap.get_filemap(source_name(input)).src; + let src = &sess.codemap.get_filemap(source_name(input)).src; let mut rdr = MemReader::new(src.as_bytes().to_owned()); let stdout = io::stdout(); pprust::print_crate(sess.codemap, @@ -1100,17 +1112,17 @@ pub fn build_output_filenames(input: &Input, let mut stem = match *input { // FIXME (#9639): This needs to handle non-utf8 paths - FileInput(ref ifile) => (*ifile).filestem_str().unwrap().to_managed(), - StrInput(_) => @"rust_out" + FileInput(ref ifile) => { + (*ifile).filestem_str().unwrap().to_str() + } + StrInput(_) => ~"rust_out" }; // If a crateid is present, we use it as the link name let crateid = attr::find_crateid(attrs); match crateid { None => {} - Some(crateid) => { - stem = crateid.name.to_managed() - } + Some(crateid) => stem = crateid.name.to_str(), } if sess.building_library.get() { @@ -1201,7 +1213,7 @@ mod test { let sessopts = build_session_options(~"rustc", matches, @diagnostic::DefaultEmitter); let sess = build_session(sessopts, None, @diagnostic::DefaultEmitter); let cfg = build_configuration(sess); - let mut test_items = cfg.iter().filter(|m| "test" == m.name()); + let mut test_items = cfg.iter().filter(|m| m.name().equiv(&("test"))); assert!(test_items.next().is_some()); assert!(test_items.next().is_none()); } diff --git a/src/librustc/driver/session.rs b/src/librustc/driver/session.rs index 5cda81836a44b..cebc25c4845d4 100644 --- a/src/librustc/driver/session.rs +++ b/src/librustc/driver/session.rs @@ -352,9 +352,11 @@ impl Session_ { self.debugging_opt(NO_LANDING_PADS) } - // pointless function, now... - pub fn str_of(&self, id: ast::Ident) -> @str { - token::ident_to_str(&id) + // DEPRECATED. This function results in a lot of allocations when they + // are not necessary. + pub fn str_of(&self, id: ast::Ident) -> ~str { + let string = token::get_ident(id.name); + string.get().to_str() } // pointless function, now... @@ -417,7 +419,12 @@ pub fn building_library(options: &Options, crate: &ast::Crate) -> bool { } } match syntax::attr::first_attr_value_str_by_name(crate.attrs, "crate_type") { - Some(s) => "lib" == s || "rlib" == s || "dylib" == s || "staticlib" == s, + Some(s) => { + s.equiv(&("lib")) || + s.equiv(&("rlib")) || + s.equiv(&("dylib")) || + s.equiv(&("staticlib")) + } _ => false } } @@ -435,16 +442,22 @@ pub fn collect_outputs(session: &Session, } let mut base = session.opts.outputs.clone(); let mut iter = attrs.iter().filter_map(|a| { - if "crate_type" == a.name() { + if a.name().equiv(&("crate_type")) { match a.value_str() { - Some(n) if "rlib" == n => Some(OutputRlib), - Some(n) if "dylib" == n => Some(OutputDylib), - Some(n) if "lib" == n => Some(default_lib_output()), - Some(n) if "staticlib" == n => Some(OutputStaticlib), - Some(n) if "bin" == n => Some(OutputExecutable), + Some(ref n) if n.equiv(&("rlib")) => Some(OutputRlib), + Some(ref n) if n.equiv(&("dylib")) => Some(OutputDylib), + Some(ref n) if n.equiv(&("lib")) => { + Some(default_lib_output()) + } + Some(ref n) if n.equiv(&("staticlib")) => { + Some(OutputStaticlib) + } + Some(ref n) if n.equiv(&("bin")) => Some(OutputExecutable), Some(_) => { - session.add_lint(lint::UnknownCrateType, ast::CRATE_NODE_ID, - a.span, ~"invalid `crate_type` value"); + session.add_lint(lint::UnknownCrateType, + ast::CRATE_NODE_ID, + a.span, + ~"invalid `crate_type` value"); None } _ => { diff --git a/src/librustc/front/feature_gate.rs b/src/librustc/front/feature_gate.rs index bfb2759410854..ed4455c2f89a1 100644 --- a/src/librustc/front/feature_gate.rs +++ b/src/librustc/front/feature_gate.rs @@ -98,7 +98,8 @@ impl Context { impl Visitor<()> for Context { fn visit_ident(&mut self, sp: Span, id: ast::Ident, _: ()) { - let s = token::ident_to_str(&id); + let string = token::get_ident(id.name); + let s = string.get(); if !s.is_ascii() { self.gate_feature("non_ascii_idents", sp, @@ -122,7 +123,7 @@ impl Visitor<()> for Context { } ast::ViewItemExternMod(..) => { for attr in i.attrs.iter() { - if "phase" == attr.name() { + if attr.name().get() == "phase"{ self.gate_feature("phase", attr.span, "compile time crate loading is \ experimental and possibly buggy"); @@ -135,7 +136,7 @@ impl Visitor<()> for Context { fn visit_item(&mut self, i: &ast::Item, _:()) { for attr in i.attrs.iter() { - if "thread_local" == attr.name() { + if attr.name().equiv(&("thread_local")) { self.gate_feature("thread_local", i.span, "`#[thread_local]` is an experimental feature, and does not \ currently handle destructors. There is no corresponding \ @@ -227,8 +228,7 @@ impl Visitor<()> for Context { fn visit_expr(&mut self, e: &ast::Expr, _: ()) { match e.node { - ast::ExprUnary(_, ast::UnBox, _) | - ast::ExprVstore(_, ast::ExprVstoreBox) => { + ast::ExprUnary(_, ast::UnBox, _) => { self.gate_box(e.span); } _ => {} @@ -258,7 +258,9 @@ pub fn check_crate(sess: Session, crate: &ast::Crate) { }; for attr in crate.attrs.iter() { - if "feature" != attr.name() { continue } + if !attr.name().equiv(&("feature")) { + continue + } match attr.meta_item_list() { None => { @@ -268,14 +270,16 @@ pub fn check_crate(sess: Session, crate: &ast::Crate) { Some(list) => { for &mi in list.iter() { let name = match mi.node { - ast::MetaWord(word) => word, + ast::MetaWord(ref word) => (*word).clone(), _ => { - sess.span_err(mi.span, "malformed feature, expected \ - just one word"); + sess.span_err(mi.span, + "malformed feature, expected just \ + one word"); continue } }; - match KNOWN_FEATURES.iter().find(|& &(n, _)| n == name) { + match KNOWN_FEATURES.iter() + .find(|& &(n, _)| name.equiv(&n)) { Some(&(name, Active)) => { cx.features.push(name); } Some(&(_, Removed)) => { sess.span_err(mi.span, "feature has been removed"); diff --git a/src/librustc/front/std_inject.rs b/src/librustc/front/std_inject.rs index 71a82536aee0c..4eb36b0f3fbd1 100644 --- a/src/librustc/front/std_inject.rs +++ b/src/librustc/front/std_inject.rs @@ -19,6 +19,8 @@ use syntax::codemap; use syntax::fold::Folder; use syntax::fold; use syntax::opt_vec; +use syntax::parse::token::InternedString; +use syntax::parse::token; use syntax::util::small_vector::SmallVector; pub static VERSION: &'static str = "0.10-pre"; @@ -56,11 +58,13 @@ struct StandardLibraryInjector { sess: Session, } -pub fn with_version(crate: &str) -> Option<(@str, ast::StrStyle)> { +pub fn with_version(crate: &str) -> Option<(InternedString, ast::StrStyle)> { match option_env!("CFG_DISABLE_INJECT_STD_VERSION") { Some("1") => None, _ => { - Some((format!("{}\\#{}", crate, VERSION).to_managed(), + Some((token::intern_and_get_ident(format!("{}\\#{}", + crate, + VERSION)), ast::CookedStr)) } } @@ -73,9 +77,12 @@ impl fold::Folder for StandardLibraryInjector { with_version("std"), ast::DUMMY_NODE_ID), attrs: ~[ - attr::mk_attr(attr::mk_list_item(@"phase", - ~[attr::mk_word_item(@"syntax"), - attr::mk_word_item(@"link")])) + attr::mk_attr(attr::mk_list_item( + InternedString::new("phase"), + ~[ + attr::mk_word_item(InternedString::new("syntax")), + attr::mk_word_item(InternedString::new("link") + )])) ], vis: ast::Inherited, span: DUMMY_SP diff --git a/src/librustc/front/test.rs b/src/librustc/front/test.rs index 0714c1c620bb1..2704e828ea463 100644 --- a/src/librustc/front/test.rs +++ b/src/librustc/front/test.rs @@ -27,6 +27,8 @@ use syntax::ext::base::ExtCtxt; use syntax::fold::Folder; use syntax::fold; use syntax::opt_vec; +use syntax::parse::token::InternedString; +use syntax::parse::token; use syntax::print::pprust; use syntax::{ast, ast_util}; use syntax::util::small_vector::SmallVector; @@ -132,7 +134,7 @@ impl<'a> fold::Folder for TestHarnessGenerator<'a> { if !cx.sess.building_library.get() { @ast::Item { attrs: item.attrs.iter().filter_map(|attr| { - if "main" != attr.name() { + if !attr.name().equiv(&("main")) { Some(*attr) } else { None @@ -169,7 +171,7 @@ fn generate_test_harness(sess: session::Session, crate: ast::Crate) cx.ext_cx.bt_push(ExpnInfo { call_site: DUMMY_SP, callee: NameAndSpan { - name: @"test", + name: ~"test", format: MacroAttribute, span: None } @@ -248,7 +250,7 @@ fn is_bench_fn(i: @ast::Item) -> bool { fn is_ignored(cx: &TestCtxt, i: @ast::Item) -> bool { i.attrs.iter().any(|attr| { // check ignore(cfg(foo, bar)) - "ignore" == attr.name() && match attr.meta_item_list() { + attr.name().equiv(&("ignore")) && match attr.meta_item_list() { Some(ref cfgs) => attr::test_cfg(cx.config, cfgs.iter().map(|x| *x)), None => true } @@ -330,8 +332,9 @@ fn mk_test_module(cx: &TestCtxt) -> @ast::Item { let item_ = ast::ItemMod(testmod); // This attribute tells resolve to let us call unexported functions + let resolve_unexported_str = InternedString::new("!resolve_unexported"); let resolve_unexported_attr = - attr::mk_attr(attr::mk_word_item(@"!resolve_unexported")); + attr::mk_attr(attr::mk_word_item(resolve_unexported_str)); let item = ast::Item { ident: cx.sess.ident_of("__test"), @@ -424,7 +427,8 @@ fn mk_test_desc_and_fn_rec(cx: &TestCtxt, test: &Test) -> @ast::Expr { debug!("encoding {}", ast_util::path_name_i(path)); let name_lit: ast::Lit = - nospan(ast::LitStr(ast_util::path_name_i(path).to_managed(), ast::CookedStr)); + nospan(ast::LitStr(token::intern_and_get_ident( + ast_util::path_name_i(path)), ast::CookedStr)); let name_expr = @ast::Expr { id: ast::DUMMY_NODE_ID, diff --git a/src/librustc/lib.rs b/src/librustc/lib.rs index c840faecb559b..7ba96516bf91b 100644 --- a/src/librustc/lib.rs +++ b/src/librustc/lib.rs @@ -235,9 +235,10 @@ pub fn run_compiler(args: &[~str], demitter: @diagnostic::Emitter) { 0u => d::early_error(demitter, "no input filename given"), 1u => { let ifile = matches.free[0].as_slice(); - if "-" == ifile { - let src = str::from_utf8_owned(io::stdin().read_to_end()).unwrap(); - (d::StrInput(src.to_managed()), None) + if ifile == "-" { + let src = + str::from_utf8_owned(io::stdin().read_to_end()).unwrap(); + (d::StrInput(src), None) } else { (d::FileInput(Path::new(ifile)), Some(Path::new(ifile))) } @@ -319,9 +320,11 @@ fn parse_crate_attrs(sess: session::Session, d::FileInput(ref ifile) => { parse::parse_crate_attrs_from_file(ifile, ~[], sess.parse_sess) } - d::StrInput(src) => { - parse::parse_crate_attrs_from_source_str( - d::anon_src(), src, ~[], sess.parse_sess) + d::StrInput(ref src) => { + parse::parse_crate_attrs_from_source_str(d::anon_src(), + (*src).clone(), + ~[], + sess.parse_sess) } } } diff --git a/src/librustc/metadata/common.rs b/src/librustc/metadata/common.rs index e8cfa97c0e1c8..e9732f5c9960d 100644 --- a/src/librustc/metadata/common.rs +++ b/src/librustc/metadata/common.rs @@ -210,5 +210,5 @@ pub static tag_macro_def: uint = 0x112; #[deriving(Clone)] pub struct LinkMeta { crateid: CrateId, - crate_hash: @str, + crate_hash: ~str, } diff --git a/src/librustc/metadata/creader.rs b/src/librustc/metadata/creader.rs index fa2e94b6f8f20..9c2c5a5745848 100644 --- a/src/librustc/metadata/creader.rs +++ b/src/librustc/metadata/creader.rs @@ -27,8 +27,8 @@ use syntax::attr::AttrMetaMethods; use syntax::codemap::{Span, DUMMY_SP}; use syntax::diagnostic::SpanHandler; use syntax::ext::base::{CrateLoader, MacroCrate}; +use syntax::parse::token::{IdentInterner, InternedString}; use syntax::parse::token; -use syntax::parse::token::IdentInterner; use syntax::crateid::CrateId; use syntax::visit; @@ -76,7 +76,7 @@ impl<'a> visit::Visitor<()> for ReadCrateVisitor<'a> { struct cache_entry { cnum: ast::CrateNum, span: Span, - hash: @str, + hash: ~str, crateid: CrateId, } @@ -124,19 +124,17 @@ struct Env { fn visit_crate(e: &Env, c: &ast::Crate) { let cstore = e.sess.cstore; - for a in c.attrs.iter().filter(|m| "link_args" == m.name()) { + for a in c.attrs.iter().filter(|m| m.name().equiv(&("link_args"))) { match a.value_str() { - Some(ref linkarg) => { - cstore.add_used_link_args(*linkarg); - } - None => {/* fallthrough */ } + Some(ref linkarg) => cstore.add_used_link_args(linkarg.get()), + None => { /* fallthrough */ } } } } fn visit_view_item(e: &mut Env, i: &ast::ViewItem) { let should_load = i.attrs.iter().all(|attr| { - "phase" != attr.name() || + attr.name().get() != "phase" || attr.meta_item_list().map_or(false, |phases| { attr::contains_name(phases, "link") }) @@ -148,8 +146,12 @@ fn visit_view_item(e: &mut Env, i: &ast::ViewItem) { match extract_crate_info(i) { Some(info) => { - let cnum = resolve_crate(e, info.ident, info.name, info.version, - @"", i.span); + let cnum = resolve_crate(e, + info.ident.clone(), + info.name.clone(), + info.version.clone(), + ~"", + i.span); e.sess.cstore.add_extern_mod_stmt_cnum(info.id, cnum); } None => () @@ -157,36 +159,36 @@ fn visit_view_item(e: &mut Env, i: &ast::ViewItem) { } struct CrateInfo { - ident: @str, - name: @str, - version: @str, + ident: ~str, + name: ~str, + version: ~str, id: ast::NodeId, } fn extract_crate_info(i: &ast::ViewItem) -> Option { match i.node { - ast::ViewItemExternMod(ident, path_opt, id) => { - let ident = token::ident_to_str(&ident); + ast::ViewItemExternMod(ref ident, ref path_opt, id) => { + let ident = token::get_ident(ident.name); debug!("resolving extern mod stmt. ident: {:?} path_opt: {:?}", - ident, path_opt); - let (name, version) = match path_opt { - Some((path_str, _)) => { - let crateid: Option = from_str(path_str); + ident.get(), path_opt); + let (name, version) = match *path_opt { + Some((ref path_str, _)) => { + let crateid: Option = from_str(path_str.get()); match crateid { - None => (@"", @""), + None => (~"", ~""), Some(crateid) => { let version = match crateid.version { - None => @"", - Some(ref ver) => ver.to_managed(), + None => ~"", + Some(ref ver) => ver.to_str(), }; - (crateid.name.to_managed(), version) + (crateid.name.to_str(), version) } } } - None => (ident, @""), + None => (ident.get().to_str(), ~""), }; Some(CrateInfo { - ident: ident, + ident: ident.get().to_str(), name: name, version: version, id: id, @@ -206,13 +208,15 @@ fn visit_item(e: &Env, i: &ast::Item) { // First, add all of the custom link_args attributes let cstore = e.sess.cstore; let link_args = i.attrs.iter() - .filter_map(|at| if "link_args" == at.name() {Some(at)} else {None}) + .filter_map(|at| if at.name().equiv(&("link_args")) { + Some(at) + } else { + None + }) .to_owned_vec(); for m in link_args.iter() { match m.value_str() { - Some(linkarg) => { - cstore.add_used_link_args(linkarg); - } + Some(linkarg) => cstore.add_used_link_args(linkarg.get()), None => { /* fallthrough */ } } } @@ -220,22 +224,26 @@ fn visit_item(e: &Env, i: &ast::Item) { // Next, process all of the #[link(..)]-style arguments let cstore = e.sess.cstore; let link_args = i.attrs.iter() - .filter_map(|at| if "link" == at.name() {Some(at)} else {None}) + .filter_map(|at| if at.name().equiv(&("link")) { + Some(at) + } else { + None + }) .to_owned_vec(); for m in link_args.iter() { match m.meta_item_list() { Some(items) => { let kind = items.iter().find(|k| { - "kind" == k.name() + k.name().equiv(&("kind")) }).and_then(|a| a.value_str()); let kind = match kind { Some(k) => { - if "static" == k { + if k.equiv(&("static")) { cstore::NativeStatic } else if e.sess.targ_cfg.os == abi::OsMacos && - "framework" == k { + k.equiv(&("framework")) { cstore::NativeFramework - } else if "framework" == k { + } else if k.equiv(&("framework")) { e.sess.span_err(m.span, "native frameworks are only available \ on OSX targets"); @@ -249,7 +257,7 @@ fn visit_item(e: &Env, i: &ast::Item) { None => cstore::NativeUnknown }; let n = items.iter().find(|n| { - "name" == n.name() + n.name().equiv(&("name")) }).and_then(|a| a.value_str()); let n = match n { Some(n) => n, @@ -257,13 +265,13 @@ fn visit_item(e: &Env, i: &ast::Item) { e.sess.span_err(m.span, "#[link(...)] specified without \ `name = \"foo\"`"); - @"foo" + InternedString::new("foo") } }; - if n.is_empty() { + if n.get().is_empty() { e.sess.span_err(m.span, "#[link(name = \"\")] given with empty name"); } else { - cstore.add_used_library(n.to_owned(), kind); + cstore.add_used_library(n.get().to_owned(), kind); } } None => {} @@ -274,14 +282,14 @@ fn visit_item(e: &Env, i: &ast::Item) { } } -fn existing_match(e: &Env, name: @str, version: @str, hash: &str) -> Option { +fn existing_match(e: &Env, name: ~str, version: ~str, hash: &str) -> Option { let crate_cache = e.crate_cache.borrow(); for c in crate_cache.get().iter() { let crateid_version = match c.crateid.version { - None => @"0.0", - Some(ref ver) => ver.to_managed(), + None => ~"0.0", + Some(ref ver) => ver.to_str(), }; - if (name.is_empty() || c.crateid.name.to_managed() == name) && + if (name.is_empty() || c.crateid.name == name) && (version.is_empty() || crateid_version == version) && (hash.is_empty() || c.hash.as_slice() == hash) { return Some(c.cnum); @@ -291,19 +299,19 @@ fn existing_match(e: &Env, name: @str, version: @str, hash: &str) -> Option ast::CrateNum { - match existing_match(e, name, version, hash) { + match existing_match(e, name.clone(), version.clone(), hash.clone()) { None => { let load_ctxt = loader::Context { sess: e.sess, span: span, ident: ident, - name: name, + name: name.clone(), version: version, hash: hash, os: e.os, @@ -364,10 +372,13 @@ fn resolve_crate_deps(e: &mut Env, cdata: &[u8]) -> cstore::cnum_map { let r = decoder::get_crate_deps(cdata); for dep in r.iter() { let extrn_cnum = dep.cnum; - let cname_str = token::ident_to_str(&dep.name); + let cname_str = token::get_ident(dep.name.name); debug!("resolving dep crate {} ver: {} hash: {}", cname_str, dep.vers, dep.hash); - match existing_match(e, cname_str, dep.vers, dep.hash) { + match existing_match(e, + cname_str.get().to_str(), + dep.vers.clone(), + dep.hash.clone()) { Some(local_cnum) => { debug!("already have it"); // We've already seen this crate @@ -379,8 +390,12 @@ fn resolve_crate_deps(e: &mut Env, cdata: &[u8]) -> cstore::cnum_map { // FIXME (#2404): Need better error reporting than just a bogus // span. let fake_span = DUMMY_SP; - let local_cnum = resolve_crate(e, cname_str, cname_str, dep.vers, - dep.hash, fake_span); + let local_cnum = resolve_crate(e, + cname_str.get().to_str(), + cname_str.get().to_str(), + dep.vers.clone(), + dep.hash.clone(), + fake_span); cnum_map.insert(extrn_cnum, local_cnum); } } @@ -411,8 +426,12 @@ impl Loader { impl CrateLoader for Loader { fn load_crate(&mut self, crate: &ast::ViewItem) -> MacroCrate { let info = extract_crate_info(crate).unwrap(); - let cnum = resolve_crate(&mut self.env, info.ident, info.name, - info.version, @"", crate.span); + let cnum = resolve_crate(&mut self.env, + info.ident.clone(), + info.name.clone(), + info.version.clone(), + ~"", + crate.span); let library = self.env.sess.cstore.get_used_crate_source(cnum).unwrap(); MacroCrate { lib: library.dylib, diff --git a/src/librustc/metadata/csearch.rs b/src/librustc/metadata/csearch.rs index 8d5be0cfb8284..1a65b326bbdea 100644 --- a/src/librustc/metadata/csearch.rs +++ b/src/librustc/metadata/csearch.rs @@ -18,6 +18,7 @@ use middle::ty; use middle::typeck; use std::vec; +use std::rc::Rc; use reader = extra::ebml::reader; use syntax::ast; use syntax::ast_map; @@ -221,8 +222,8 @@ pub fn get_field_type(tcx: ty::ctxt, class_id: ast::DefId, class_id, def) ); let ty = decoder::item_type(def, the_field, tcx, cdata); ty::ty_param_bounds_and_ty { - generics: ty::Generics {type_param_defs: @~[], - region_param_defs: @[]}, + generics: ty::Generics {type_param_defs: Rc::new(~[]), + region_param_defs: Rc::new(~[])}, ty: ty } } diff --git a/src/librustc/metadata/cstore.rs b/src/librustc/metadata/cstore.rs index 024e214a2fe71..33625000e4ad0 100644 --- a/src/librustc/metadata/cstore.rs +++ b/src/librustc/metadata/cstore.rs @@ -32,7 +32,7 @@ pub enum MetadataBlob { } pub struct crate_metadata { - name: @str, + name: ~str, data: MetadataBlob, cnum_map: cnum_map, cnum: ast::CrateNum @@ -89,12 +89,12 @@ impl CStore { *metas.get().get(&cnum) } - pub fn get_crate_hash(&self, cnum: ast::CrateNum) -> @str { + pub fn get_crate_hash(&self, cnum: ast::CrateNum) -> ~str { let cdata = self.get_crate_data(cnum); decoder::get_crate_hash(cdata.data()) } - pub fn get_crate_vers(&self, cnum: ast::CrateNum) -> @str { + pub fn get_crate_vers(&self, cnum: ast::CrateNum) -> ~str { let cdata = self.get_crate_data(cnum); decoder::get_crate_vers(cdata.data()) } @@ -192,7 +192,7 @@ impl CStore { // returns hashes of crates directly used by this crate. Hashes are sorted by // (crate name, crate version, crate hash) in lexicographic order (not semver) - pub fn get_dep_hashes(&self) -> ~[@str] { + pub fn get_dep_hashes(&self) -> ~[~str] { let mut result = ~[]; let extern_mod_crate_map = self.extern_mod_crate_map.borrow(); @@ -202,7 +202,7 @@ impl CStore { let vers = decoder::get_crate_vers(cdata.data()); debug!("Add hash[{}]: {} {}", cdata.name, vers, hash); result.push(crate_hash { - name: cdata.name, + name: cdata.name.clone(), vers: vers, hash: hash }); @@ -215,15 +215,15 @@ impl CStore { debug!(" hash[{}]: {}", x.name, x.hash); } - result.map(|ch| ch.hash) + result.move_iter().map(|crate_hash { hash, ..}| hash).collect() } } #[deriving(Clone, TotalEq, TotalOrd)] struct crate_hash { - name: @str, - vers: @str, - hash: @str, + name: ~str, + vers: ~str, + hash: ~str, } impl crate_metadata { diff --git a/src/librustc/metadata/decoder.rs b/src/librustc/metadata/decoder.rs index 11fab9cced7f7..8514025f9dd32 100644 --- a/src/librustc/metadata/decoder.rs +++ b/src/librustc/metadata/decoder.rs @@ -25,11 +25,11 @@ use middle::ty; use middle::typeck; use middle::astencode::vtable_decoder_helpers; -use std::at_vec; use std::u64; use std::io; use std::io::extensions::u64_from_be_bytes; use std::option; +use std::rc::Rc; use std::vec; use extra::ebml::reader; use extra::ebml; @@ -246,7 +246,7 @@ fn item_ty_param_defs(item: ebml::Doc, tcx: ty::ctxt, cdata: Cmd, tag: uint) - -> @~[ty::TypeParameterDef] { + -> Rc<~[ty::TypeParameterDef]> { let mut bounds = ~[]; reader::tagged_docs(item, tag, |p| { let bd = parse_type_param_def_data( @@ -255,15 +255,15 @@ fn item_ty_param_defs(item: ebml::Doc, bounds.push(bd); true }); - @bounds + Rc::new(bounds) } fn item_region_param_defs(item_doc: ebml::Doc, tcx: ty::ctxt, cdata: Cmd) - -> @[ty::RegionParameterDef] { - at_vec::build(None, |push| { - reader::tagged_docs(item_doc, tag_region_param_def, |rp_doc| { + -> Rc<~[ty::RegionParameterDef]> { + let mut v = ~[]; + reader::tagged_docs(item_doc, tag_region_param_def, |rp_doc| { let ident_str_doc = reader::get_doc(rp_doc, tag_region_param_def_ident); let ident = item_name(tcx.sess.intr(), ident_str_doc); @@ -271,11 +271,11 @@ fn item_region_param_defs(item_doc: ebml::Doc, tag_region_param_def_def_id); let def_id = reader::with_doc_data(def_id_doc, parse_def_id); let def_id = translate_def_id(cdata, def_id); - push(ty::RegionParameterDef { ident: ident, - def_id: def_id }); + v.push(ty::RegionParameterDef { ident: ident, + def_id: def_id }); true }); - }) + Rc::new(v) } fn item_ty_param_count(item: ebml::Doc) -> uint { @@ -1042,15 +1042,15 @@ fn get_meta_items(md: ebml::Doc) -> ~[@ast::MetaItem] { let mut items: ~[@ast::MetaItem] = ~[]; reader::tagged_docs(md, tag_meta_item_word, |meta_item_doc| { let nd = reader::get_doc(meta_item_doc, tag_meta_item_name); - let n = nd.as_str_slice().to_managed(); + let n = token::intern_and_get_ident(nd.as_str_slice()); items.push(attr::mk_word_item(n)); true }); reader::tagged_docs(md, tag_meta_item_name_value, |meta_item_doc| { let nd = reader::get_doc(meta_item_doc, tag_meta_item_name); let vd = reader::get_doc(meta_item_doc, tag_meta_item_value); - let n = nd.as_str_slice().to_managed(); - let v = vd.as_str_slice().to_managed(); + let n = token::intern_and_get_ident(nd.as_str_slice()); + let v = token::intern_and_get_ident(vd.as_str_slice()); // FIXME (#623): Should be able to decode MetaNameValue variants, // but currently the encoder just drops them items.push(attr::mk_name_value_item_str(n, v)); @@ -1058,7 +1058,7 @@ fn get_meta_items(md: ebml::Doc) -> ~[@ast::MetaItem] { }); reader::tagged_docs(md, tag_meta_item_list, |meta_item_doc| { let nd = reader::get_doc(meta_item_doc, tag_meta_item_name); - let n = nd.as_str_slice().to_managed(); + let n = token::intern_and_get_ident(nd.as_str_slice()); let subitems = get_meta_items(meta_item_doc); items.push(attr::mk_list_item(n, subitems)); true @@ -1113,8 +1113,8 @@ pub fn get_crate_attributes(data: &[u8]) -> ~[ast::Attribute] { pub struct CrateDep { cnum: ast::CrateNum, name: ast::Ident, - vers: @str, - hash: @str + vers: ~str, + hash: ~str } pub fn get_crate_deps(data: &[u8]) -> ~[CrateDep] { @@ -1122,9 +1122,9 @@ pub fn get_crate_deps(data: &[u8]) -> ~[CrateDep] { let cratedoc = reader::Doc(data); let depsdoc = reader::get_doc(cratedoc, tag_crate_deps); let mut crate_num = 1; - fn docstr(doc: ebml::Doc, tag_: uint) -> @str { + fn docstr(doc: ebml::Doc, tag_: uint) -> ~str { let d = reader::get_doc(doc, tag_); - d.as_str_slice().to_managed() + d.as_str_slice().to_str() } reader::tagged_docs(depsdoc, tag_crate_dep, |depdoc| { deps.push(CrateDep {cnum: crate_num, @@ -1142,24 +1142,29 @@ fn list_crate_deps(data: &[u8], out: &mut io::Writer) { let r = get_crate_deps(data); for dep in r.iter() { - write!(out, "{} {}-{}-{}\n", - dep.cnum, token::ident_to_str(&dep.name), dep.hash, dep.vers); + let string = token::get_ident(dep.name.name); + write!(out, + "{} {}-{}-{}\n", + dep.cnum, + string.get(), + dep.hash, + dep.vers); } write!(out, "\n"); } -pub fn get_crate_hash(data: &[u8]) -> @str { +pub fn get_crate_hash(data: &[u8]) -> ~str { let cratedoc = reader::Doc(data); let hashdoc = reader::get_doc(cratedoc, tag_crate_hash); - hashdoc.as_str_slice().to_managed() + hashdoc.as_str_slice().to_str() } -pub fn get_crate_vers(data: &[u8]) -> @str { +pub fn get_crate_vers(data: &[u8]) -> ~str { let attrs = decoder::get_crate_attributes(data); match attr::find_crateid(attrs) { - None => @"0.0", - Some(crateid) => crateid.version_or_default().to_managed(), + None => ~"0.0", + Some(crateid) => crateid.version_or_default().to_str(), } } diff --git a/src/librustc/metadata/encoder.rs b/src/librustc/metadata/encoder.rs index ac3ee78fb8642..4170d572f20ba 100644 --- a/src/librustc/metadata/encoder.rs +++ b/src/librustc/metadata/encoder.rs @@ -21,29 +21,28 @@ use middle::ty; use middle::typeck; use middle; +use extra::serialize::Encodable; use std::cast; use std::cell::{Cell, RefCell}; use std::hashmap::{HashMap, HashSet}; use std::io::MemWriter; use std::str; use std::vec; - -use extra::serialize::Encodable; - use syntax::abi::AbiSet; use syntax::ast::*; use syntax::ast; use syntax::ast_map; use syntax::ast_util::*; -use syntax::attr; +use syntax::ast_util; use syntax::attr::AttrMetaMethods; +use syntax::attr; use syntax::codemap; use syntax::diagnostic::SpanHandler; +use syntax::parse::token::InternedString; use syntax::parse::token::special_idents; -use syntax::ast_util; +use syntax::parse::token; use syntax::visit::Visitor; use syntax::visit; -use syntax::parse::token; use syntax; use writer = extra::ebml::writer; @@ -172,7 +171,7 @@ pub fn def_to_str(did: DefId) -> ~str { fn encode_ty_type_param_defs(ebml_w: &mut writer::Encoder, ecx: &EncodeContext, - params: @~[ty::TypeParameterDef], + params: &[ty::TypeParameterDef], tag: uint) { let ty_str_ctxt = @tyencode::ctxt { diag: ecx.diag, @@ -189,7 +188,7 @@ fn encode_ty_type_param_defs(ebml_w: &mut writer::Encoder, fn encode_region_param_defs(ebml_w: &mut writer::Encoder, ecx: &EncodeContext, - params: @[ty::RegionParameterDef]) { + params: &[ty::RegionParameterDef]) { for param in params.iter() { ebml_w.start_tag(tag_region_param_def); @@ -216,9 +215,9 @@ fn encode_item_variances(ebml_w: &mut writer::Encoder, fn encode_bounds_and_type(ebml_w: &mut writer::Encoder, ecx: &EncodeContext, tpt: &ty::ty_param_bounds_and_ty) { - encode_ty_type_param_defs(ebml_w, ecx, tpt.generics.type_param_defs, + encode_ty_type_param_defs(ebml_w, ecx, tpt.generics.type_param_defs(), tag_items_data_item_ty_param_bounds); - encode_region_param_defs(ebml_w, ecx, tpt.generics.region_param_defs); + encode_region_param_defs(ebml_w, ecx, tpt.generics.region_param_defs()); encode_type(ecx, ebml_w, tpt.ty); } @@ -491,7 +490,7 @@ fn encode_reexported_static_methods(ecx: &EncodeContext, exp: &middle::resolve::Export2) { match ecx.tcx.items.find(exp.def_id.node) { Some(ast_map::NodeItem(item, path)) => { - let original_name = ecx.tcx.sess.str_of(item.ident); + let original_name = token::get_ident(item.ident.name); // // We don't need to reexport static methods on items @@ -503,7 +502,7 @@ fn encode_reexported_static_methods(ecx: &EncodeContext, // encoded metadata for static methods relative to Bar, // but not yet for Foo. // - if mod_path != *path || exp.name != original_name { + if mod_path != *path || original_name.get() != exp.name { if !encode_reexported_static_base_methods(ecx, ebml_w, exp) { if encode_reexported_static_trait_methods(ecx, ebml_w, exp) { debug!("(encode reexported static methods) {} \ @@ -787,7 +786,7 @@ fn encode_method_ty_fields(ecx: &EncodeContext, encode_def_id(ebml_w, method_ty.def_id); encode_name(ecx, ebml_w, method_ty.ident); encode_ty_type_param_defs(ebml_w, ecx, - method_ty.generics.type_param_defs, + method_ty.generics.type_param_defs(), tag_item_method_tps); encode_method_fty(ecx, ebml_w, &method_ty.fty); encode_visibility(ebml_w, method_ty.vis); @@ -828,7 +827,7 @@ fn encode_info_for_method(ecx: &EncodeContext, } for &ast_method in ast_method_opt.iter() { - let num_params = tpt.generics.type_param_defs.len(); + let num_params = tpt.generics.type_param_defs().len(); if num_params > 0u || is_default_impl || should_inline(ast_method.attrs) { (ecx.encode_inlined_item)( @@ -1172,10 +1171,10 @@ fn encode_info_for_item(ecx: &EncodeContext, encode_item_variances(ebml_w, ecx, item.id); let trait_def = ty::lookup_trait_def(tcx, def_id); encode_ty_type_param_defs(ebml_w, ecx, - trait_def.generics.type_param_defs, + trait_def.generics.type_param_defs(), tag_items_data_item_ty_param_bounds); encode_region_param_defs(ebml_w, ecx, - trait_def.generics.region_param_defs); + trait_def.generics.region_param_defs()); encode_trait_ref(ebml_w, ecx, trait_def.trait_ref, tag_item_trait_ref); encode_name(ecx, ebml_w, item.ident); encode_attributes(ebml_w, item.attrs); @@ -1351,11 +1350,10 @@ fn my_visit_foreign_item(ni: &ForeignItem, index: @RefCell<~[entry]>) { match items.get(ni.id) { ast_map::NodeForeignItem(_, abi, _, pt) => { + let string = token::get_ident(ni.ident.name); debug!("writing foreign item {}::{}", - ast_map::path_to_str( - *pt, - token::get_ident_interner()), - token::ident_to_str(&ni.ident)); + ast_map::path_to_str(*pt, token::get_ident_interner()), + string.get()); let mut ebml_w = unsafe { ebml_w.unsafe_clone() @@ -1507,32 +1505,32 @@ fn write_i64(writer: &mut MemWriter, &n: &i64) { fn encode_meta_item(ebml_w: &mut writer::Encoder, mi: @MetaItem) { match mi.node { - MetaWord(name) => { + MetaWord(ref name) => { ebml_w.start_tag(tag_meta_item_word); ebml_w.start_tag(tag_meta_item_name); - ebml_w.writer.write(name.as_bytes()); + ebml_w.writer.write(name.get().as_bytes()); ebml_w.end_tag(); ebml_w.end_tag(); } - MetaNameValue(name, value) => { + MetaNameValue(ref name, ref value) => { match value.node { - LitStr(value, _) => { + LitStr(ref value, _) => { ebml_w.start_tag(tag_meta_item_name_value); ebml_w.start_tag(tag_meta_item_name); - ebml_w.writer.write(name.as_bytes()); + ebml_w.writer.write(name.get().as_bytes()); ebml_w.end_tag(); ebml_w.start_tag(tag_meta_item_value); - ebml_w.writer.write(value.as_bytes()); + ebml_w.writer.write(value.get().as_bytes()); ebml_w.end_tag(); ebml_w.end_tag(); } _ => {/* FIXME (#623): encode other variants */ } } } - MetaList(name, ref items) => { + MetaList(ref name, ref items) => { ebml_w.start_tag(tag_meta_item_list); ebml_w.start_tag(tag_meta_item_name); - ebml_w.writer.write(name.as_bytes()); + ebml_w.writer.write(name.get().as_bytes()); ebml_w.end_tag(); for inner_item in items.iter() { encode_meta_item(ebml_w, *inner_item); @@ -1563,13 +1561,13 @@ fn synthesize_crate_attrs(ecx: &EncodeContext, attr::mk_attr( attr::mk_name_value_item_str( - @"crate_id", - ecx.link_meta.crateid.to_str().to_managed())) + InternedString::new("crate_id"), + token::intern_and_get_ident(ecx.link_meta.crateid.to_str()))) } let mut attrs = ~[]; for attr in crate.attrs.iter() { - if "crate_id" != attr.name() { + if !attr.name().equiv(&("crate_id")) { attrs.push(*attr); } } @@ -1615,7 +1613,7 @@ fn encode_crate_deps(ecx: &EncodeContext, ebml_w.start_tag(tag_crate_deps); let r = get_ordered_deps(ecx, cstore); for dep in r.iter() { - encode_crate_dep(ecx, ebml_w, *dep); + encode_crate_dep(ecx, ebml_w, (*dep).clone()); } ebml_w.end_tag(); } diff --git a/src/librustc/metadata/loader.rs b/src/librustc/metadata/loader.rs index 046184bef58bb..8e557560b95f1 100644 --- a/src/librustc/metadata/loader.rs +++ b/src/librustc/metadata/loader.rs @@ -46,10 +46,10 @@ pub enum Os { pub struct Context { sess: Session, span: Span, - ident: @str, - name: @str, - version: @str, - hash: @str, + ident: ~str, + name: ~str, + version: ~str, + hash: ~str, os: Os, intr: @IdentInterner } @@ -80,7 +80,7 @@ impl Context { fn find_library_crate(&self) -> Option { let filesearch = self.sess.filesearch; - let crate_name = self.name; + let crate_name = self.name.clone(); let (dyprefix, dysuffix) = self.dylibname(); // want: crate_name.dir_part() + prefix + crate_name.file_part + "-" @@ -109,8 +109,10 @@ impl Context { } else if candidate { match get_metadata_section(self.os, path) { Some(cvec) => - if crate_matches(cvec.as_slice(), self.name, - self.version, self.hash) { + if crate_matches(cvec.as_slice(), + self.name.clone(), + self.version.clone(), + self.hash.clone()) { debug!("found {} with matching crate_id", path.display()); let (rlib, dylib) = if file.ends_with(".rlib") { @@ -235,9 +237,9 @@ pub fn note_crateid_attr(diag: @SpanHandler, crateid: &CrateId) { } fn crate_matches(crate_data: &[u8], - name: @str, - version: @str, - hash: @str) -> bool { + name: ~str, + version: ~str, + hash: ~str) -> bool { let attrs = decoder::get_crate_attributes(crate_data); match attr::find_crateid(attrs) { None => false, @@ -246,8 +248,9 @@ fn crate_matches(crate_data: &[u8], let chash = decoder::get_crate_hash(crate_data); if chash != hash { return false; } } - name == crateid.name.to_managed() && - (version.is_empty() || version == crateid.version_or_default().to_managed()) + name == crateid.name && + (version.is_empty() || + crateid.version_or_default() == version) } } } diff --git a/src/librustc/metadata/tydecode.rs b/src/librustc/metadata/tydecode.rs index c986016206047..92af631c2bd3e 100644 --- a/src/librustc/metadata/tydecode.rs +++ b/src/librustc/metadata/tydecode.rs @@ -156,7 +156,6 @@ fn parse_vstore(st: &mut PState, conv: conv_did) -> ty::vstore { match next(st) { '~' => ty::vstore_uniq, - '@' => ty::vstore_box, '&' => ty::vstore_slice(parse_region(st, conv)), c => st.tcx.sess.bug(format!("parse_vstore(): bad input '{}'", c)) } diff --git a/src/librustc/metadata/tyencode.rs b/src/librustc/metadata/tyencode.rs index 9da19d666673f..d17a45165432c 100644 --- a/src/librustc/metadata/tyencode.rs +++ b/src/librustc/metadata/tyencode.rs @@ -45,7 +45,7 @@ pub struct ctxt { pub struct ty_abbrev { pos: uint, len: uint, - s: @str + s: ~str } pub enum abbrev_ctxt { @@ -65,19 +65,21 @@ pub fn enc_ty(w: &mut MemWriter, cx: @ctxt, t: ty::t) { let short_names_cache = cx.tcx.short_names_cache.borrow(); result_str_opt = short_names_cache.get() .find(&t) - .map(|result| *result); + .map(|result| { + (*result).clone() + }); } let result_str = match result_str_opt { Some(s) => s, None => { let wr = &mut MemWriter::new(); enc_sty(wr, cx, &ty::get(t).sty); - let s = str::from_utf8(wr.get_ref()).unwrap().to_managed(); + let s = str::from_utf8(wr.get_ref()).unwrap(); let mut short_names_cache = cx.tcx .short_names_cache .borrow_mut(); - short_names_cache.get().insert(t, s); - s + short_names_cache.get().insert(t, s.to_str()); + s.to_str() } }; w.write(result_str.as_bytes()); @@ -103,7 +105,7 @@ pub fn enc_ty(w: &mut MemWriter, cx: @ctxt, t: ty::t) { let abbrev_len = 3 + estimate_sz(pos) + estimate_sz(len); if abbrev_len < len { // I.e. it's actually an abbreviation. - let s = format!("\\#{:x}:{:x}\\#", pos, len).to_managed(); + let s = format!("\\#{:x}:{:x}\\#", pos, len); let a = ty_abbrev { pos: pos as uint, len: len as uint, s: s }; @@ -217,7 +219,6 @@ pub fn enc_vstore(w: &mut MemWriter, cx: @ctxt, v: ty::vstore) { match v { ty::vstore_fixed(u) => mywrite!(w, "{}|", u), ty::vstore_uniq => mywrite!(w, "~"), - ty::vstore_box => mywrite!(w, "@"), ty::vstore_slice(r) => { mywrite!(w, "&"); enc_region(w, cx, r); diff --git a/src/librustc/middle/astencode.rs b/src/librustc/middle/astencode.rs index a601b624c1979..29ea3475d3441 100644 --- a/src/librustc/middle/astencode.rs +++ b/src/librustc/middle/astencode.rs @@ -32,10 +32,10 @@ use syntax::fold::Folder; use syntax::parse::token; use syntax; -use std::at_vec; use std::libc; use std::cast; use std::io::Seek; +use std::rc::Rc; use extra::ebml::reader; use extra::ebml; @@ -812,13 +812,13 @@ impl<'a> ebml_writer_helpers for writer::Encoder<'a> { this.emit_struct_field("generics", 0, |this| { this.emit_struct("Generics", 2, |this| { this.emit_struct_field("type_param_defs", 0, |this| { - this.emit_from_vec(*tpbt.generics.type_param_defs, + this.emit_from_vec(tpbt.generics.type_param_defs(), |this, type_param_def| { this.emit_type_param_def(ecx, type_param_def); }) }); this.emit_struct_field("region_param_defs", 1, |this| { - tpbt.generics.region_param_defs.encode(this); + tpbt.generics.region_param_defs().encode(this); }) }) }); @@ -997,7 +997,7 @@ fn encode_side_tables_for_id(ecx: &e::EncodeContext, ebml_w.tag(c::tag_table_tcache, |ebml_w| { ebml_w.id(id); ebml_w.tag(c::tag_table_val, |ebml_w| { - ebml_w.emit_tpbt(ecx, *tpbt); + ebml_w.emit_tpbt(ecx, tpbt.clone()); }) }) } @@ -1064,7 +1064,7 @@ fn encode_side_tables_for_id(ecx: &e::EncodeContext, ebml_w.tag(c::tag_table_capture_map, |ebml_w| { ebml_w.id(id); ebml_w.tag(c::tag_table_val, |ebml_w| { - ebml_w.emit_from_vec(*cap_vars, |ebml_w, cap_var| { + ebml_w.emit_from_vec(*cap_vars.borrow(), |ebml_w, cap_var| { cap_var.encode(ebml_w); }) }) @@ -1183,8 +1183,8 @@ impl<'a> ebml_decoder_decoder_helpers for reader::Decoder<'a> { this.read_struct_field("type_param_defs", 0, |this| { - @this.read_to_vec(|this| - this.read_type_param_def(xcx)) + Rc::new(this.read_to_vec(|this| + this.read_type_param_def(xcx))) }), region_param_defs: this.read_struct_field("region_param_defs", @@ -1382,13 +1382,11 @@ fn decode_side_tables(xcx: @ExtendedDecodeContext, } c::tag_table_capture_map => { let cvars = - at_vec::to_managed_move( - val_dsr.read_to_vec( - |val_dsr| val_dsr.read_capture_var(xcx))); + val_dsr.read_to_vec(|val_dsr| val_dsr.read_capture_var(xcx)); let mut capture_map = dcx.maps .capture_map .borrow_mut(); - capture_map.get().insert(id, cvars); + capture_map.get().insert(id, Rc::new(cvars)); } _ => { xcx.dcx.tcx.sess.bug( diff --git a/src/librustc/middle/borrowck/check_loans.rs b/src/librustc/middle/borrowck/check_loans.rs index 59310d722e426..b280da31c4537 100644 --- a/src/librustc/middle/borrowck/check_loans.rs +++ b/src/librustc/middle/borrowck/check_loans.rs @@ -733,7 +733,7 @@ fn check_loans_in_fn<'a>(this: &mut CheckLoanCtxt<'a>, span: Span) { let capture_map = this.bccx.capture_map.borrow(); let cap_vars = capture_map.get().get(&closure_id); - for cap_var in cap_vars.iter() { + for cap_var in cap_vars.borrow().iter() { let var_id = ast_util::def_id_of_def(cap_var.def).node; let var_path = @LpVar(var_id); this.check_if_path_is_moved(closure_id, span, @@ -839,4 +839,3 @@ fn check_loans_in_block<'a>(this: &mut CheckLoanCtxt<'a>, visit::walk_block(this, blk, ()); this.check_for_conflicting_loans(blk.id); } - diff --git a/src/librustc/middle/borrowck/gather_loans/gather_moves.rs b/src/librustc/middle/borrowck/gather_loans/gather_moves.rs index 78350a993b483..0d9b4b0b171cc 100644 --- a/src/librustc/middle/borrowck/gather_loans/gather_moves.rs +++ b/src/librustc/middle/borrowck/gather_loans/gather_moves.rs @@ -69,7 +69,7 @@ pub fn gather_captures(bccx: &BorrowckCtxt, closure_expr: &ast::Expr) { let capture_map = bccx.capture_map.borrow(); let captured_vars = capture_map.get().get(&closure_expr.id); - for captured_var in captured_vars.iter() { + for captured_var in captured_vars.borrow().iter() { match captured_var.mode { moves::CapMove => { let fvar_id = ast_util::def_id_of_def(captured_var.def).node; diff --git a/src/librustc/middle/borrowck/mod.rs b/src/librustc/middle/borrowck/mod.rs index 90c9a61b18b17..8f6d4de476b13 100644 --- a/src/librustc/middle/borrowck/mod.rs +++ b/src/librustc/middle/borrowck/mod.rs @@ -194,16 +194,6 @@ pub struct BorrowStats { // // Note that there is no entry with derefs:3---the type of that expression // is T, which is not a box. -// -// Note that implicit dereferences also occur with indexing of `@[]`, -// `@str`, etc. The same rules apply. So, for example, given a -// variable `x` of type `@[@[...]]`, if I have an instance of the -// expression `x[0]` which is then auto-slice'd, there would be two -// potential entries in the root map, both with the id of the `x[0]` -// expression. The entry with `derefs==0` refers to the deref of `x` -// used as part of evaluating `x[0]`. The entry with `derefs==1` -// refers to the deref of the `x[0]` that occurs as part of the -// auto-slice. #[deriving(Eq, IterBytes)] pub struct root_map_key { id: ast::NodeId, @@ -774,7 +764,8 @@ impl BorrowckCtxt { match pat.node { ast::PatIdent(_, ref path, _) => { let ident = ast_util::path_to_ident(path); - out.push_str(token::ident_to_str(&ident)); + let string = token::get_ident(ident.name); + out.push_str(string.get()); } _ => { self.tcx.sess.bug( @@ -795,8 +786,9 @@ impl BorrowckCtxt { self.append_loan_path_to_str_from_interior(lp_base, out); match fname { mc::NamedField(ref fname) => { + let string = token::get_ident(*fname); out.push_char('.'); - out.push_str(token::interner_get(*fname)); + out.push_str(string.get()); } mc::PositionalField(idx) => { out.push_char('#'); // invent a notation here diff --git a/src/librustc/middle/check_const.rs b/src/librustc/middle/check_const.rs index 2af40163588b5..d52ae81328f63 100644 --- a/src/librustc/middle/check_const.rs +++ b/src/librustc/middle/check_const.rs @@ -192,8 +192,7 @@ pub fn check_expr(v: &mut CheckCrateVisitor, "references in constants may only refer to \ immutable values"); }, - ExprVstore(_, ExprVstoreUniq) | - ExprVstore(_, ExprVstoreBox) => { + ExprVstore(_, ExprVstoreUniq) => { sess.span_err(e.span, "cannot allocate vectors in constant expressions") }, diff --git a/src/librustc/middle/check_match.rs b/src/librustc/middle/check_match.rs index 38376de4346df..d402305401eb9 100644 --- a/src/librustc/middle/check_match.rs +++ b/src/librustc/middle/check_match.rs @@ -176,8 +176,8 @@ fn check_exhaustive(cx: &MatchCheckCtxt, sp: Span, pats: ~[@Pat]) { match ty::get(ty).sty { ty::ty_bool => { match *ctor { - val(const_bool(true)) => Some(@"true"), - val(const_bool(false)) => Some(@"false"), + val(const_bool(true)) => Some(~"true"), + val(const_bool(false)) => Some(~"false"), _ => None } } @@ -197,7 +197,7 @@ fn check_exhaustive(cx: &MatchCheckCtxt, sp: Span, pats: ~[@Pat]) { } ty::ty_unboxed_vec(..) | ty::ty_vec(..) => { match *ctor { - vec(n) => Some(format!("vectors of length {}", n).to_managed()), + vec(n) => Some(format!("vectors of length {}", n)), _ => None } } @@ -214,9 +214,14 @@ fn check_exhaustive(cx: &MatchCheckCtxt, sp: Span, pats: ~[@Pat]) { type matrix = ~[~[@Pat]]; -enum useful { useful(ty::t, ctor), useful_, not_useful } +#[deriving(Clone)] +enum useful { + useful(ty::t, ctor), + useful_, + not_useful, +} -#[deriving(Eq)] +#[deriving(Clone, Eq)] enum ctor { single, variant(DefId), @@ -261,7 +266,7 @@ fn is_useful(cx: &MatchCheckCtxt, m: &matrix, v: &[@Pat]) -> useful { val(const_bool(false)), 0u, left_ty) } - ref u => *u, + ref u => (*u).clone(), } } ty::ty_enum(eid, _) => { @@ -269,7 +274,7 @@ fn is_useful(cx: &MatchCheckCtxt, m: &matrix, v: &[@Pat]) -> useful { match is_useful_specialized(cx, m, v, variant(va.id), va.args.len(), left_ty) { not_useful => (), - ref u => return *u, + ref u => return (*u).clone(), } } not_useful @@ -289,7 +294,7 @@ fn is_useful(cx: &MatchCheckCtxt, m: &matrix, v: &[@Pat]) -> useful { for n in iter::range(0u, max_len + 1) { match is_useful_specialized(cx, m, v, vec(n), n, left_ty) { not_useful => (), - ref u => return *u, + ref u => return (*u).clone(), } } not_useful @@ -304,15 +309,15 @@ fn is_useful(cx: &MatchCheckCtxt, m: &matrix, v: &[@Pat]) -> useful { match is_useful(cx, &m.iter().filter_map(|r| default(cx, *r)).collect::(), v.tail()) { - useful_ => useful(left_ty, *ctor), - ref u => *u, + useful_ => useful(left_ty, (*ctor).clone()), + ref u => (*u).clone(), } } } } Some(ref v0_ctor) => { let arity = ctor_arity(cx, v0_ctor, left_ty); - is_useful_specialized(cx, m, v, *v0_ctor, arity, left_ty) + is_useful_specialized(cx, m, v, (*v0_ctor).clone(), arity, left_ty) } } } @@ -329,7 +334,7 @@ fn is_useful_specialized(cx: &MatchCheckCtxt, cx, &ms, specialize(cx, v, &ctor, arity, lty).unwrap()); match could_be_useful { useful_ => useful(lty, ctor), - ref u => *u, + ref u => (*u).clone(), } } @@ -407,7 +412,7 @@ fn missing_ctor(cx: &MatchCheckCtxt, let r = pat_ctor_id(cx, r[0]); for id in r.iter() { if !found.contains(id) { - found.push(*id); + found.push((*id).clone()); } } } @@ -770,8 +775,8 @@ fn specialize(cx: &MatchCheckCtxt, } PatRange(lo, hi) => { let (c_lo, c_hi) = match *ctor_id { - val(ref v) => (*v, *v), - range(ref lo, ref hi) => (*lo, *hi), + val(ref v) => ((*v).clone(), (*v).clone()), + range(ref lo, ref hi) => ((*lo).clone(), (*hi).clone()), single => return Some(r.tail().to_owned()), _ => fail!("type error") }; diff --git a/src/librustc/middle/const_eval.rs b/src/librustc/middle/const_eval.rs index 5905712855536..cbe1e8f72d933 100644 --- a/src/librustc/middle/const_eval.rs +++ b/src/librustc/middle/const_eval.rs @@ -16,13 +16,15 @@ use middle::ty; use middle::typeck::astconv; use middle; -use syntax::{ast, ast_map, ast_util}; -use syntax::visit; -use syntax::visit::Visitor; use syntax::ast::*; +use syntax::parse::token::InternedString; +use syntax::visit::Visitor; +use syntax::visit; +use syntax::{ast, ast_map, ast_util}; use std::cell::RefCell; use std::hashmap::HashMap; +use std::rc::Rc; // // This pass classifies expressions by their constant-ness. @@ -238,7 +240,6 @@ impl ConstEvalVisitor { match vstore { ast::ExprVstoreSlice => self.classify(e), ast::ExprVstoreUniq | - ast::ExprVstoreBox | ast::ExprVstoreMutSlice => non_const } } @@ -319,8 +320,8 @@ pub enum const_val { const_float(f64), const_int(i64), const_uint(u64), - const_str(@str), - const_binary(@[u8]), + const_str(InternedString), + const_binary(Rc<~[u8]>), const_bool(bool) } @@ -508,15 +509,15 @@ pub fn eval_const_expr_partial(tcx: &T, e: &Expr) pub fn lit_to_const(lit: &Lit) -> const_val { match lit.node { - LitStr(s, _) => const_str(s), - LitBinary(data) => const_binary(data), + LitStr(ref s, _) => const_str((*s).clone()), + LitBinary(ref data) => const_binary(data.clone()), LitChar(n) => const_uint(n as u64), LitInt(n, _) => const_int(n), LitUint(n, _) => const_uint(n), LitIntUnsuffixed(n) => const_int(n), - LitFloat(n, _) => const_float(from_str::(n).unwrap() as f64), - LitFloatUnsuffixed(n) => - const_float(from_str::(n).unwrap() as f64), + LitFloat(ref n, _) | LitFloatUnsuffixed(ref n) => { + const_float(from_str::(n.get()).unwrap() as f64) + } LitNil => const_int(0i64), LitBool(b) => const_bool(b) } @@ -530,7 +531,7 @@ pub fn compare_const_vals(a: &const_val, b: &const_val) -> Option { (&const_int(a), &const_int(b)) => compare_vals(a, b), (&const_uint(a), &const_uint(b)) => compare_vals(a, b), (&const_float(a), &const_float(b)) => compare_vals(a, b), - (&const_str(a), &const_str(b)) => compare_vals(a, b), + (&const_str(ref a), &const_str(ref b)) => compare_vals(a, b), (&const_bool(a), &const_bool(b)) => compare_vals(a, b), _ => None } diff --git a/src/librustc/middle/dead.rs b/src/librustc/middle/dead.rs index c333bc58feee1..08ab8edf750ca 100644 --- a/src/librustc/middle/dead.rs +++ b/src/librustc/middle/dead.rs @@ -360,9 +360,10 @@ impl DeadVisitor { fn warn_dead_code(&mut self, id: ast::NodeId, span: codemap::Span, ident: &ast::Ident) { + let string = token::get_ident(ident.name); self.tcx.sess.add_lint(DeadCode, id, span, format!("code is never used: `{}`", - token::ident_to_str(ident))); + string.get())); } } diff --git a/src/librustc/middle/kind.rs b/src/librustc/middle/kind.rs index c05ca3e876950..0d6dc6572c36e 100644 --- a/src/librustc/middle/kind.rs +++ b/src/librustc/middle/kind.rs @@ -277,7 +277,7 @@ pub fn check_expr(cx: &mut Context, e: &Expr) { ExprPath(_) => { let did = ast_util::def_id_of_def(def_map.get() .get_copy(&e.id)); - ty::lookup_item_type(cx.tcx, did).generics.type_param_defs + ty::lookup_item_type(cx.tcx, did).generics.type_param_defs.clone() } _ => { // Type substitutions should only occur on paths and @@ -289,6 +289,7 @@ pub fn check_expr(cx: &mut Context, e: &Expr) { "non path/method call expr has type substs??") } }; + let type_param_defs = type_param_defs.borrow(); if ts.len() != type_param_defs.len() { // Fail earlier to make debugging easier fail!("internal error: in kind::check_expr, length \ @@ -362,8 +363,8 @@ fn check_ty(cx: &mut Context, aty: &Ty) { for ts in r.iter() { let def_map = cx.tcx.def_map.borrow(); let did = ast_util::def_id_of_def(def_map.get().get_copy(&id)); - let type_param_defs = - ty::lookup_item_type(cx.tcx, did).generics.type_param_defs; + let generics = ty::lookup_item_type(cx.tcx, did).generics; + let type_param_defs = generics.type_param_defs(); for (&ty, type_param_def) in ts.iter().zip(type_param_defs.iter()) { check_typaram_bounds(cx, aty.id, aty.span, ty, type_param_def) } diff --git a/src/librustc/middle/lang_items.rs b/src/librustc/middle/lang_items.rs index 82c927a1c2d58..2b100ffa4c98d 100644 --- a/src/librustc/middle/lang_items.rs +++ b/src/librustc/middle/lang_items.rs @@ -26,8 +26,9 @@ use middle::ty::{BuiltinBound, BoundFreeze, BoundPod, BoundSend, BoundSized}; use syntax::ast; use syntax::ast_util::local_def; use syntax::attr::AttrMetaMethods; -use syntax::visit; +use syntax::parse::token::InternedString; use syntax::visit::Visitor; +use syntax::visit; use std::hashmap::HashMap; use std::iter::Enumerate; @@ -182,11 +183,11 @@ impl LanguageItemCollector { } } -pub fn extract(attrs: &[ast::Attribute]) -> Option<@str> { +pub fn extract(attrs: &[ast::Attribute]) -> Option { for attribute in attrs.iter() { match attribute.name_str_pair() { - Some((key, value)) if "lang" == key => { - return Some(value); + Some((ref key, ref value)) if key.equiv(&("lang")) => { + return Some((*value).clone()); } Some(..) | None => {} } diff --git a/src/librustc/middle/lint.rs b/src/librustc/middle/lint.rs index 7cb549d91a8a1..e674af6b3b3e3 100644 --- a/src/librustc/middle/lint.rs +++ b/src/librustc/middle/lint.rs @@ -34,18 +34,17 @@ //! Context itself, span_lint should be used instead of add_lint. use driver::session; +use metadata::csearch; use middle::dead::DEAD_CODE_LINT_STR; +use middle::pat_util; use middle::privacy; use middle::trans::adt; // for `adt::is_ffi_safe` use middle::ty; +use middle::typeck::astconv::{ast_ty_to_ty, AstConv}; +use middle::typeck::infer; use middle::typeck; -use middle::pat_util; -use metadata::csearch; -use util::ppaux::{ty_to_str}; use std::to_str::ToStr; - -use middle::typeck::infer; -use middle::typeck::astconv::{ast_ty_to_ty, AstConv}; +use util::ppaux::{ty_to_str}; use std::cmp; use std::hashmap::HashMap; @@ -59,13 +58,14 @@ use std::u64; use std::u8; use extra::smallintmap::SmallIntMap; use syntax::ast_map; -use syntax::attr; +use syntax::ast_util::IdVisitingOperation; use syntax::attr::{AttrMetaMethods, AttributeMethods}; +use syntax::attr; use syntax::codemap::Span; +use syntax::parse::token::InternedString; use syntax::parse::token; -use syntax::{ast, ast_util, visit}; -use syntax::ast_util::IdVisitingOperation; use syntax::visit::Visitor; +use syntax::{ast, ast_util, visit}; #[deriving(Clone, Eq, Ord, TotalEq, TotalOrd)] pub enum Lint { @@ -540,10 +540,16 @@ impl<'a> Context<'a> { }); let old_is_doc_hidden = self.is_doc_hidden; - self.is_doc_hidden = self.is_doc_hidden || - attrs.iter().any(|attr| ("doc" == attr.name() && match attr.meta_item_list() - { None => false, - Some(l) => attr::contains_name(l, "hidden") })); + self.is_doc_hidden = + self.is_doc_hidden || + attrs.iter() + .any(|attr| { + attr.name().equiv(&("doc")) && + match attr.meta_item_list() { + None => false, + Some(l) => attr::contains_name(l, "hidden") + } + }); f(self); @@ -569,12 +575,12 @@ impl<'a> Context<'a> { // Return true if that's the case. Otherwise return false. pub fn each_lint(sess: session::Session, attrs: &[ast::Attribute], - f: |@ast::MetaItem, level, @str| -> bool) + f: |@ast::MetaItem, level, InternedString| -> bool) -> bool { let xs = [allow, warn, deny, forbid]; for &level in xs.iter() { let level_name = level_to_str(level); - for attr in attrs.iter().filter(|m| level_name == m.name()) { + for attr in attrs.iter().filter(|m| m.name().equiv(&level_name)) { let meta = attr.node.value; let metas = match meta.node { ast::MetaList(_, ref metas) => metas, @@ -585,8 +591,8 @@ pub fn each_lint(sess: session::Session, }; for meta in metas.iter() { match meta.node { - ast::MetaWord(lintname) => { - if !f(*meta, level, lintname) { + ast::MetaWord(ref lintname) => { + if !f(*meta, level, (*lintname).clone()) { return false; } } @@ -603,15 +609,17 @@ pub fn each_lint(sess: session::Session, // Check from a list of attributes if it contains the appropriate // `#[level(lintname)]` attribute (e.g. `#[allow(dead_code)]). pub fn contains_lint(attrs: &[ast::Attribute], - level: level, lintname: &'static str) -> bool { + level: level, + lintname: &'static str) + -> bool { let level_name = level_to_str(level); - for attr in attrs.iter().filter(|m| level_name == m.name()) { + for attr in attrs.iter().filter(|m| m.name().equiv(&level_name)) { if attr.meta_item_list().is_none() { continue } let list = attr.meta_item_list().unwrap(); for meta_item in list.iter() { - if lintname == meta_item.name() { + if meta_item.name().equiv(&lintname) { return true; } } @@ -879,8 +887,7 @@ fn check_heap_type(cx: &Context, span: Span, ty: ty::t) { let mut n_uniq = 0; ty::fold_ty(cx.tcx, ty, |t| { match ty::get(t).sty { - ty::ty_box(_) | ty::ty_str(ty::vstore_box) | - ty::ty_vec(_, ty::vstore_box) | + ty::ty_box(_) | ty::ty_trait(_, _, ty::BoxTraitStore, _, _) => { n_box += 1; } @@ -1240,8 +1247,7 @@ fn check_unnecessary_allocation(cx: &Context, e: &ast::Expr) { // Warn if string and vector literals with sigils, or boxing expressions, // are immediately borrowed. let allocation = match e.node { - ast::ExprVstore(e2, ast::ExprVstoreUniq) | - ast::ExprVstore(e2, ast::ExprVstoreBox) => { + ast::ExprVstore(e2, ast::ExprVstoreUniq) => { match e2.node { ast::ExprLit(lit) if ast_util::lit_is_str(lit) => { VectorAllocation @@ -1314,7 +1320,7 @@ fn check_missing_doc_attrs(cx: &Context, let has_doc = attrs.iter().any(|a| { match a.node.value.node { - ast::MetaNameValue(ref name, _) if "doc" == *name => true, + ast::MetaNameValue(ref name, _) if name.equiv(&("doc")) => true, _ => false } }); diff --git a/src/librustc/middle/liveness.rs b/src/librustc/middle/liveness.rs index 6a1fa488121bb..fb5e548e12f6e 100644 --- a/src/librustc/middle/liveness.rs +++ b/src/librustc/middle/liveness.rs @@ -120,6 +120,7 @@ use std::vec; use syntax::ast::*; use syntax::codemap::Span; use syntax::parse::token::special_idents; +use syntax::parse::token; use syntax::print::pprust::{expr_to_str, block_to_str}; use syntax::{visit, ast_util}; use syntax::visit::{Visitor, FnKind}; @@ -332,13 +333,14 @@ impl IrMaps { } } - pub fn variable_name(&self, var: Variable) -> @str { + pub fn variable_name(&self, var: Variable) -> ~str { let var_kinds = self.var_kinds.borrow(); match var_kinds.get()[var.get()] { Local(LocalInfo { ident: nm, .. }) | Arg(_, nm) => { - self.tcx.sess.str_of(nm) + let string = token::get_ident(nm.name); + string.get().to_str() }, - ImplicitRet => @"" + ImplicitRet => ~"" } } @@ -500,7 +502,7 @@ fn visit_expr(v: &mut LivenessVisitor, expr: &Expr, this: @IrMaps) { let capture_map = this.capture_map.borrow(); let cvs = capture_map.get().get(&expr.id); let mut call_caps = ~[]; - for cv in cvs.iter() { + for cv in cvs.borrow().iter() { match moves::moved_variable_node_id_from_def(cv.def) { Some(rv) => { let cv_ln = this.add_live_node(FreeVarNode(cv.span)); @@ -1669,7 +1671,7 @@ impl Liveness { } } - pub fn should_warn(&self, var: Variable) -> Option<@str> { + pub fn should_warn(&self, var: Variable) -> Option<~str> { let name = self.ir.variable_name(var); if name.len() == 0 || name[0] == ('_' as u8) { None } else { Some(name) } } diff --git a/src/librustc/middle/mem_categorization.rs b/src/librustc/middle/mem_categorization.rs index ce1840283b2e4..72fd98e56ed67 100644 --- a/src/librustc/middle/mem_categorization.rs +++ b/src/librustc/middle/mem_categorization.rs @@ -178,9 +178,7 @@ pub fn opt_deref_kind(t: ty::t) -> Option { } ty::ty_box(_) | - ty::ty_vec(_, ty::vstore_box) | - ty::ty_trait(_, _, ty::BoxTraitStore, _, _) | - ty::ty_str(ty::vstore_box) => { + ty::ty_trait(_, _, ty::BoxTraitStore, _, _) => { Some(deref_ptr(gc_ptr)) } @@ -1233,7 +1231,10 @@ pub fn ptr_sigil(ptr: PointerKind) -> ~str { impl Repr for InteriorKind { fn repr(&self, _tcx: ty::ctxt) -> ~str { match *self { - InteriorField(NamedField(fld)) => token::interner_get(fld).to_owned(), + InteriorField(NamedField(fld)) => { + let string = token::get_ident(fld); + string.get().to_owned() + } InteriorField(PositionalField(i)) => format!("\\#{:?}", i), InteriorElement(_) => ~"[]", } diff --git a/src/librustc/middle/moves.rs b/src/librustc/middle/moves.rs index b0171eafeb3fa..d73d7f3d6f1db 100644 --- a/src/librustc/middle/moves.rs +++ b/src/librustc/middle/moves.rs @@ -136,9 +136,9 @@ use util::ppaux::Repr; use util::common::indenter; use util::ppaux::UserString; -use std::at_vec; use std::cell::RefCell; use std::hashmap::{HashSet, HashMap}; +use std::rc::Rc; use syntax::ast::*; use syntax::ast_util; use syntax::visit; @@ -159,7 +159,7 @@ pub struct CaptureVar { mode: CaptureMode // How variable is being accessed } -pub type CaptureMap = @RefCell>; +pub type CaptureMap = @RefCell>>; pub type MovesMap = @RefCell>; @@ -681,23 +681,22 @@ impl VisitContext { self.consume_expr(arg_expr) } - pub fn compute_captures(&mut self, fn_expr_id: NodeId) -> @[CaptureVar] { + pub fn compute_captures(&mut self, fn_expr_id: NodeId) -> Rc<~[CaptureVar]> { debug!("compute_capture_vars(fn_expr_id={:?})", fn_expr_id); let _indenter = indenter(); let fn_ty = ty::node_id_to_type(self.tcx, fn_expr_id); let sigil = ty::ty_closure_sigil(fn_ty); let freevars = freevars::get_freevars(self.tcx, fn_expr_id); - if sigil == BorrowedSigil { + let v = if sigil == BorrowedSigil { // || captures everything by ref - at_vec::from_fn(freevars.len(), |i| { - let fvar = &freevars[i]; - CaptureVar {def: fvar.def, span: fvar.span, mode: CapRef} - }) + freevars.iter() + .map(|fvar| CaptureVar {def: fvar.def, span: fvar.span, mode: CapRef}) + .collect() } else { // @fn() and ~fn() capture by copy or by move depending on type - at_vec::from_fn(freevars.len(), |i| { - let fvar = &freevars[i]; + freevars.iter() + .map(|fvar| { let fvar_def_id = ast_util::def_id_of_def(fvar.def).node; let fvar_ty = ty::node_id_to_type(self.tcx, fvar_def_id); debug!("fvar_def_id={:?} fvar_ty={}", @@ -708,7 +707,9 @@ impl VisitContext { CapCopy }; CaptureVar {def: fvar.def, span: fvar.span, mode:mode} - }) - } + + }).collect() + }; + Rc::new(v) } } diff --git a/src/librustc/middle/privacy.rs b/src/librustc/middle/privacy.rs index 2562c34b54b00..ae1b71f5ccad1 100644 --- a/src/librustc/middle/privacy.rs +++ b/src/librustc/middle/privacy.rs @@ -530,8 +530,10 @@ impl<'a> PrivacyVisitor<'a> { ast::ItemTrait(..) => "trait", _ => return false, }; - let msg = format!("{} `{}` is private", desc, - token::ident_to_str(&item.ident)); + let string = token::get_ident(item.ident.name); + let msg = format!("{} `{}` is private", + desc, + string.get()); self.tcx.sess.span_note(span, msg); } Some(..) | None => {} @@ -588,8 +590,10 @@ impl<'a> PrivacyVisitor<'a> { if struct_vis != ast::Public && field.vis == ast::Public { break } if !is_local(field.id) || !self.private_accessible(field.id.node) { - self.tcx.sess.span_err(span, format!("field `{}` is private", - token::ident_to_str(&ident))); + let string = token::get_ident(ident.name); + self.tcx.sess.span_err(span, + format!("field `{}` is private", + string.get())) } break; } @@ -603,8 +607,11 @@ impl<'a> PrivacyVisitor<'a> { let method_id = ty::method(self.tcx, method_id).provided_source .unwrap_or(method_id); - self.ensure_public(span, method_id, None, - format!("method `{}`", token::ident_to_str(name))); + let string = token::get_ident(name.name); + self.ensure_public(span, + method_id, + None, + format!("method `{}`", string.get())); } // Checks that a path is in scope. @@ -617,10 +624,17 @@ impl<'a> PrivacyVisitor<'a> { match *self.last_private_map.get(&path_id) { resolve::AllPublic => {}, resolve::DependsOn(def) => { - let name = token::ident_to_str(&path.segments.last().unwrap() - .identifier); - self.ensure_public(span, def, Some(origdid), - format!("{} `{}`", tyname, name)); + let name = token::get_ident(path.segments + .last() + .unwrap() + .identifier + .name); + self.ensure_public(span, + def, + Some(origdid), + format!("{} `{}`", + tyname, + name.get())); } } }; diff --git a/src/librustc/middle/resolve.rs b/src/librustc/middle/resolve.rs index c28306a7aaca3..6ee44c03a8041 100644 --- a/src/librustc/middle/resolve.rs +++ b/src/librustc/middle/resolve.rs @@ -20,9 +20,8 @@ use syntax::ast::*; use syntax::ast; use syntax::ast_util::{def_id_of_def, local_def, mtwt_resolve}; use syntax::ast_util::{path_to_ident, walk_pat, trait_method_to_ty_method}; +use syntax::parse::token::{IdentInterner, special_idents}; use syntax::parse::token; -use syntax::parse::token::{IdentInterner, interner_get}; -use syntax::parse::token::special_idents; use syntax::print::pprust::path_to_str; use syntax::codemap::{Span, DUMMY_SP, Pos}; use syntax::opt_vec::OptVec; @@ -53,7 +52,7 @@ pub type TraitMap = HashMap>; pub type ExportMap2 = @RefCell>; pub struct Export2 { - name: @str, // The name of the target. + name: ~str, // The name of the target. def_id: DefId, // The definition of the target. } @@ -1894,8 +1893,9 @@ impl Resolver { csearch::each_child_of_item(self.session.cstore, def_id, |def_like, child_ident, visibility| { + let child_ident_string = token::get_ident(child_ident.name); debug!("(populating external module) ... found ident: {}", - token::ident_to_str(&child_ident)); + child_ident_string.get()); self.build_reduced_graph_for_external_crate_def(module, def_like, child_ident, @@ -2114,24 +2114,26 @@ impl Resolver { } fn import_directive_subclass_to_str(&mut self, - subclass: ImportDirectiveSubclass) - -> @str { + subclass: ImportDirectiveSubclass) + -> ~str { match subclass { - SingleImport(_target, source) => self.session.str_of(source), - GlobImport => @"*" + SingleImport(_target, source) => { + self.session.str_of(source).to_str() + } + GlobImport => ~"*" } } fn import_path_to_str(&mut self, - idents: &[Ident], - subclass: ImportDirectiveSubclass) - -> @str { + idents: &[Ident], + subclass: ImportDirectiveSubclass) + -> ~str { if idents.is_empty() { self.import_directive_subclass_to_str(subclass) } else { (format!("{}::{}", - self.idents_to_str(idents), - self.import_directive_subclass_to_str(subclass))).to_managed() + self.idents_to_str(idents), + self.import_directive_subclass_to_str(subclass))) } } @@ -2584,7 +2586,7 @@ impl Resolver { debug!("(resolving glob import) writing resolution `{}` in `{}` \ to `{}`", - interner_get(name), + token::get_ident(name).get().to_str(), self.module_to_str(containing_module), self.module_to_str(module_)); @@ -3101,11 +3103,12 @@ impl Resolver { // top of the crate otherwise. let mut containing_module; let mut i; - if "self" == token::ident_to_str(&module_path[0]) { + let first_module_path_string = token::get_ident(module_path[0].name); + if "self" == first_module_path_string.get() { containing_module = self.get_nearest_normal_module_parent_or_self(module_); i = 1; - } else if "super" == token::ident_to_str(&module_path[0]) { + } else if "super" == first_module_path_string.get() { containing_module = self.get_nearest_normal_module_parent_or_self(module_); i = 0; // We'll handle `super` below. @@ -3114,8 +3117,11 @@ impl Resolver { } // Now loop through all the `super`s we find. - while i < module_path.len() && - "super" == token::ident_to_str(&module_path[i]) { + while i < module_path.len() { + let string = token::get_ident(module_path[i].name); + if "super" != string.get() { + break + } debug!("(resolving module prefix) resolving `super` at {}", self.module_to_str(containing_module)); match self.get_nearest_normal_module_parent(containing_module) { @@ -3354,10 +3360,10 @@ impl Resolver { match namebindings.def_for_namespace(ns) { Some(d) => { debug!("(computing exports) YES: export '{}' => {:?}", - interner_get(name), + token::get_ident(name).get().to_str(), def_id_of_def(d)); exports2.push(Export2 { - name: interner_get(name), + name: token::get_ident(name).get().to_str(), def_id: def_id_of_def(d) }); } @@ -3380,7 +3386,7 @@ impl Resolver { match importresolution.target_for_namespace(ns) { Some(target) => { debug!("(computing exports) maybe export '{}'", - interner_get(*name)); + token::get_ident(*name).get().to_str()); self.add_exports_of_namebindings(exports2, *name, target.bindings, @@ -4155,19 +4161,23 @@ impl Resolver { for (&key, &binding_0) in map_0.iter() { match map_i.find(&key) { None => { + let string = token::get_ident(key); self.resolve_error( p.span, format!("variable `{}` from pattern \\#1 is \ not bound in pattern \\#{}", - interner_get(key), i + 1)); + string.get(), + i + 1)); } Some(binding_i) => { if binding_0.binding_mode != binding_i.binding_mode { + let string = token::get_ident(key); self.resolve_error( binding_i.span, format!("variable `{}` is bound with different \ mode in pattern \\#{} than in pattern \\#1", - interner_get(key), i + 1)); + string.get(), + i + 1)); } } } @@ -4175,11 +4185,13 @@ impl Resolver { for (&key, &binding) in map_i.iter() { if !map_0.contains_key(&key) { + let string = token::get_ident(key); self.resolve_error( binding.span, format!("variable `{}` from pattern \\#{} is \ not bound in pattern \\#1", - interner_get(key), i + 1)); + string.get(), + i + 1)); } } } @@ -4371,9 +4383,10 @@ impl Resolver { match self.resolve_bare_identifier_pattern(ident) { FoundStructOrEnumVariant(def, lp) if mode == RefutableMode => { + let string = token::get_ident(renamed); debug!("(resolving pattern) resolving `{}` to \ struct or enum variant", - interner_get(renamed)); + string.get()); self.enforce_default_binding_mode( pattern, @@ -4382,17 +4395,19 @@ impl Resolver { self.record_def(pattern.id, (def, lp)); } FoundStructOrEnumVariant(..) => { + let string = token::get_ident(renamed); self.resolve_error(pattern.span, format!("declaration of `{}` \ shadows an enum \ variant or unit-like \ struct in scope", - interner_get(renamed))); + string.get())); } FoundConst(def, lp) if mode == RefutableMode => { + let string = token::get_ident(renamed); debug!("(resolving pattern) resolving `{}` to \ constant", - interner_get(renamed)); + string.get()); self.enforce_default_binding_mode( pattern, @@ -4406,8 +4421,9 @@ impl Resolver { allowed here"); } BareIdentifierPatternUnresolved => { + let string = token::get_ident(renamed); debug!("(resolving pattern) binding `{}`", - interner_get(renamed)); + string.get()); let def = match mode { RefutableMode => { @@ -5009,10 +5025,10 @@ impl Resolver { } fn find_best_match_for_name(&mut self, name: &str, max_distance: uint) - -> Option<@str> { + -> Option<~str> { let this = &mut *self; - let mut maybes: ~[@str] = ~[]; + let mut maybes: ~[~str] = ~[]; let mut values: ~[uint] = ~[]; let mut j = { @@ -5024,14 +5040,15 @@ impl Resolver { let value_ribs = this.value_ribs.borrow(); let bindings = value_ribs.get()[j].bindings.borrow(); for (&k, _) in bindings.get().iter() { - maybes.push(interner_get(k)); + let string = token::get_ident(k); + maybes.push(string.get().to_str()); values.push(uint::MAX); } } let mut smallest = 0; - for (i, &other) in maybes.iter().enumerate() { - values[i] = name.lev_distance(other); + for (i, other) in maybes.iter().enumerate() { + values[i] = name.lev_distance(*other); if values[i] <= values[smallest] { smallest = i; @@ -5190,7 +5207,9 @@ impl Resolver { self.resolve_error(expr.span, format!("use of undeclared label \ `{}`", - interner_get(label))), + token::get_ident(label) + .get() + .to_str())), Some(DlDef(def @ DefLabel(_))) => { // FIXME: is AllPublic correct? self.record_def(expr.id, (def, AllPublic)) @@ -5510,7 +5529,7 @@ impl Resolver { self.populate_module_if_necessary(module_); let children = module_.children.borrow(); for (&name, _) in children.get().iter() { - debug!("* {}", interner_get(name)); + debug!("* {}", token::get_ident(name).get().to_str()); } debug!("Import resolutions:"); @@ -5534,7 +5553,7 @@ impl Resolver { } } - debug!("* {}:{}{}", interner_get(*name), + debug!("* {}:{}{}", token::get_ident(*name).get().to_str(), value_repr, type_repr); } } diff --git a/src/librustc/middle/subst.rs b/src/librustc/middle/subst.rs index 067082992c6fc..c4a30171687d2 100644 --- a/src/librustc/middle/subst.rs +++ b/src/librustc/middle/subst.rs @@ -13,8 +13,9 @@ use middle::ty; use middle::ty_fold; use middle::ty_fold::TypeFolder; + +use std::rc::Rc; use syntax::opt_vec::OptVec; -use std::at_vec; /////////////////////////////////////////////////////////////////////////// // Public trait `Subst` @@ -84,10 +85,9 @@ impl Subst for ~[T] { self.map(|t| t.subst(tcx, substs)) } } - -impl Subst for @[T] { - fn subst(&self, tcx: ty::ctxt, substs: &ty::substs) -> @[T] { - at_vec::map(*self, |t| t.subst(tcx, substs)) +impl Subst for Rc { + fn subst(&self, tcx: ty::ctxt, substs: &ty::substs) -> Rc { + Rc::new(self.borrow().subst(tcx, substs)) } } diff --git a/src/librustc/middle/trans/_match.rs b/src/librustc/middle/trans/_match.rs index 7bc37d4e69512..135b16b22abd2 100644 --- a/src/librustc/middle/trans/_match.rs +++ b/src/librustc/middle/trans/_match.rs @@ -229,6 +229,7 @@ use syntax::ast::Ident; use syntax::ast_util::path_to_ident; use syntax::ast_util; use syntax::codemap::{Span, DUMMY_SP}; +use syntax::parse::token::InternedString; // An option identifying a literal: either a unit-like struct or an // expression. @@ -1031,7 +1032,6 @@ fn match_datum(bcx: &Block, fn extract_vec_elems<'a>( bcx: &'a Block<'a>, - pat_span: Span, pat_id: ast::NodeId, elem_count: uint, slice: Option, @@ -1040,7 +1040,7 @@ fn extract_vec_elems<'a>( -> ExtractedBlock<'a> { let _icx = push_ctxt("match::extract_vec_elems"); let vec_datum = match_datum(bcx, val, pat_id); - let (bcx, base, len) = vec_datum.get_vec_base_and_len(bcx, pat_span, pat_id, 0); + let (base, len) = vec_datum.get_vec_base_and_len(bcx); let vt = tvec::vec_types(bcx, node_id_type(bcx, pat_id)); let mut elems = vec::from_fn(elem_count, |i| { @@ -1174,7 +1174,7 @@ fn any_tuple_struct_pat(bcx: &Block, m: &[Match], col: uint) -> bool { struct DynamicFailureHandler<'a> { bcx: &'a Block<'a>, sp: Span, - msg: @str, + msg: InternedString, finished: @Cell>, } @@ -1187,7 +1187,7 @@ impl<'a> DynamicFailureHandler<'a> { let fcx = self.bcx.fcx; let fail_cx = fcx.new_block(false, "case_fallthrough", None); - controlflow::trans_fail(fail_cx, Some(self.sp), self.msg); + controlflow::trans_fail(fail_cx, Some(self.sp), self.msg.clone()); self.finished.set(Some(fail_cx.llbb)); fail_cx.llbb } @@ -1511,13 +1511,11 @@ fn compile_submatch_continue<'r, vals.slice(col + 1u, vals.len())); let ccx = bcx.fcx.ccx; let mut pat_id = 0; - let mut pat_span = DUMMY_SP; for br in m.iter() { // Find a real id (we're adding placeholder wildcard patterns, but // each column is guaranteed to have at least one real pattern) if pat_id == 0 { pat_id = br.pats[col].id; - pat_span = br.pats[col].span; } } @@ -1766,7 +1764,7 @@ fn compile_submatch_continue<'r, vec_len_ge(i) => (n + 1u, Some(i)), vec_len_eq => (n, None) }; - let args = extract_vec_elems(opt_cx, pat_span, pat_id, n, + let args = extract_vec_elems(opt_cx, pat_id, n, slice, val, test_val); size = args.vals.len(); unpacked = args.vals.clone(); @@ -1891,7 +1889,8 @@ fn trans_match_inner<'a>(scope_cx: &'a Block<'a>, let fail_handler = ~DynamicFailureHandler { bcx: scope_cx, sp: discr_expr.span, - msg: @"scrutinizing value that can't exist", + msg: InternedString::new("scrutinizing value that can't \ + exist"), finished: fail_cx, }; DynamicFailureHandlerClass(fail_handler) diff --git a/src/librustc/middle/trans/asm.rs b/src/librustc/middle/trans/asm.rs index bae35f68ada56..db99bd53704eb 100644 --- a/src/librustc/middle/trans/asm.rs +++ b/src/librustc/middle/trans/asm.rs @@ -38,8 +38,8 @@ pub fn trans_inline_asm<'a>(bcx: &'a Block<'a>, ia: &ast::InlineAsm) let temp_scope = fcx.push_custom_cleanup_scope(); // Prepare the output operands - let outputs = ia.outputs.map(|&(c, out)| { - constraints.push(c); + let outputs = ia.outputs.map(|&(ref c, out)| { + constraints.push((*c).clone()); let out_datum = unpack_datum!(bcx, expr::trans(bcx, out)); output_types.push(type_of::type_of(bcx.ccx(), out_datum.ty)); @@ -48,8 +48,8 @@ pub fn trans_inline_asm<'a>(bcx: &'a Block<'a>, ia: &ast::InlineAsm) }); // Now the input operands - let inputs = ia.inputs.map(|&(c, input)| { - constraints.push(c); + let inputs = ia.inputs.map(|&(ref c, input)| { + constraints.push((*c).clone()); unpack_result!(bcx, { callee::trans_arg_expr(bcx, @@ -63,13 +63,13 @@ pub fn trans_inline_asm<'a>(bcx: &'a Block<'a>, ia: &ast::InlineAsm) // no failure occurred preparing operands, no need to cleanup fcx.pop_custom_cleanup_scope(temp_scope); - let mut constraints = constraints.connect(","); + let mut constraints = constraints.map(|s| s.get().to_str()).connect(","); let mut clobbers = getClobbers(); - if !ia.clobbers.is_empty() && !clobbers.is_empty() { - clobbers = format!("{},{}", ia.clobbers, clobbers); + if !ia.clobbers.get().is_empty() && !clobbers.is_empty() { + clobbers = format!("{},{}", ia.clobbers.get(), clobbers); } else { - clobbers.push_str(ia.clobbers); + clobbers.push_str(ia.clobbers.get()); } // Add the clobbers to our constraints list @@ -98,7 +98,7 @@ pub fn trans_inline_asm<'a>(bcx: &'a Block<'a>, ia: &ast::InlineAsm) ast::AsmIntel => lib::llvm::AD_Intel }; - let r = ia.asm.with_c_str(|a| { + let r = ia.asm.get().with_c_str(|a| { constraints.with_c_str(|c| { InlineAsmCall(bcx, a, c, inputs, output_type, ia.volatile, ia.alignstack, dialect) }) diff --git a/src/librustc/middle/trans/base.rs b/src/librustc/middle/trans/base.rs index 38b34cd13bc6e..7bd2653f86990 100644 --- a/src/librustc/middle/trans/base.rs +++ b/src/librustc/middle/trans/base.rs @@ -76,16 +76,17 @@ use std::hashmap::HashMap; use std::libc::c_uint; use std::vec; use std::local_data; +use syntax::abi::{X86, X86_64, Arm, Mips, Rust, RustIntrinsic, OsWin32}; use syntax::ast_map::{PathName, PathPrettyName, path_elem_to_str}; use syntax::ast_util::{local_def, is_local}; +use syntax::attr::AttrMetaMethods; use syntax::attr; use syntax::codemap::Span; +use syntax::parse::token::InternedString; use syntax::parse::token; -use syntax::{ast, ast_util, ast_map}; -use syntax::attr::AttrMetaMethods; -use syntax::abi::{X86, X86_64, Arm, Mips, Rust, RustIntrinsic, OsWin32}; -use syntax::visit; use syntax::visit::Visitor; +use syntax::visit; +use syntax::{ast, ast_util, ast_map}; pub use middle::trans::context::task_llcx; @@ -359,7 +360,7 @@ pub fn malloc_raw_dyn<'a>( None); rslt(r.bcx, PointerCast(r.bcx, r.val, llty_value.ptr_to())) } else { - // we treat ~fn, @fn and @[] as @ here, which isn't ideal + // we treat ~fn as @ here, which isn't ideal let langcall = match heap { heap_managed => { require_alloc_fn(bcx, t, MallocFnLangItem) @@ -509,7 +510,7 @@ pub fn set_no_split_stack(f: ValueRef) { // Double-check that we never ask LLVM to declare the same symbol twice. It // silently mangles such symbols, breaking our linkage model. -pub fn note_unique_llvm_symbol(ccx: &CrateContext, sym: @str) { +pub fn note_unique_llvm_symbol(ccx: &CrateContext, sym: ~str) { let mut all_llvm_symbols = ccx.all_llvm_symbols.borrow_mut(); if all_llvm_symbols.get().contains(&sym) { ccx.sess.bug(~"duplicate LLVM symbol: " + sym); @@ -604,7 +605,8 @@ pub fn compare_scalar_types<'a>( rslt( controlflow::trans_fail( cx, None, - @"attempt to compare values of type type"), + InternedString::new("attempt to compare values of type \ + type")), C_nil()) } _ => { @@ -856,9 +858,9 @@ pub fn fail_if_zero<'a>( rhs_t: ty::t) -> &'a Block<'a> { let text = if divrem == ast::BiDiv { - @"attempted to divide by zero" + "attempted to divide by zero" } else { - @"attempted remainder with a divisor of zero" + "attempted remainder with a divisor of zero" }; let is_zero = match ty::get(rhs_t).sty { ty::ty_int(t) => { @@ -875,7 +877,7 @@ pub fn fail_if_zero<'a>( } }; with_cond(cx, is_zero, |bcx| { - controlflow::trans_fail(bcx, Some(span), text) + controlflow::trans_fail(bcx, Some(span), InternedString::new(text)) }) } @@ -1951,7 +1953,7 @@ fn exported_name(ccx: &CrateContext, path: ast_map::Path, ty: ty::t, attrs: &[ast::Attribute]) -> ~str { match attr::first_attr_value_str_by_name(attrs, "export_name") { // Use provided name - Some(name) => name.to_owned(), + Some(name) => name.get().to_owned(), // Don't mangle _ if attr::contains_name(attrs, "no_mangle") @@ -2099,7 +2101,7 @@ pub fn get_item_val(ccx: @CrateContext, id: ast::NodeId) -> ValueRef { match attr::first_attr_value_str_by_name(i.attrs, "link_section") { Some(sect) => unsafe { - sect.with_c_str(|buf| { + sect.get().with_c_str(|buf| { llvm::LLVMSetSection(v, buf); }) }, @@ -2161,9 +2163,9 @@ pub fn get_item_val(ccx: @CrateContext, id: ast::NodeId) -> ValueRef { ccx.crate_map } } else { - let ident = foreign::link_name(ccx, ni); + let ident = foreign::link_name(ni); unsafe { - ident.with_c_str(|buf| { + ident.get().with_c_str(|buf| { let ty = type_of(ccx, ty); llvm::LLVMAddGlobal(ccx.llmod, ty.to_ref(), buf) @@ -2476,21 +2478,21 @@ pub fn create_module_map(ccx: &CrateContext) -> (ValueRef, uint) { let mut keys = ~[]; let module_data = ccx.module_data.borrow(); for (k, _) in module_data.get().iter() { - keys.push(k.to_managed()); + keys.push(k.clone()); } keys }; for key in keys.iter() { - let llstrval = C_str_slice(ccx, *key); - let module_data = ccx.module_data.borrow(); - let val = *module_data.get().find_equiv(key).unwrap(); - let v_ptr = p2i(ccx, val); - let elt = C_struct([ - llstrval, - v_ptr - ], false); - elts.push(elt); + let llstrval = C_str_slice(ccx, token::intern_and_get_ident(*key)); + let module_data = ccx.module_data.borrow(); + let val = *module_data.get().find_equiv(key).unwrap(); + let v_ptr = p2i(ccx, val); + let elt = C_struct([ + llstrval, + v_ptr + ], false); + elts.push(elt); } unsafe { llvm::LLVMSetInitializer(map, C_array(elttype, elts)); diff --git a/src/librustc/middle/trans/callee.rs b/src/librustc/middle/trans/callee.rs index e10c7af7f313d..80d92d856864c 100644 --- a/src/librustc/middle/trans/callee.rs +++ b/src/librustc/middle/trans/callee.rs @@ -220,7 +220,7 @@ fn resolve_default_method_vtables(bcx: &Block, bcx.tcx(), param_substs, impl_res.trait_vtables); // Now we pull any vtables for parameters on the actual method. - let num_method_vtables = method.generics.type_param_defs.len(); + let num_method_vtables = method.generics.type_param_defs().len(); let method_vtables = match impl_vtables { Some(vtables) => { let num_impl_type_parameters = diff --git a/src/librustc/middle/trans/closure.rs b/src/librustc/middle/trans/closure.rs index d1979c3fee71d..c9631a72ee7c9 100644 --- a/src/librustc/middle/trans/closure.rs +++ b/src/librustc/middle/trans/closure.rs @@ -403,11 +403,11 @@ pub fn trans_expr_fn<'a>( let capture_map = ccx.maps.capture_map.borrow(); capture_map.get().get_copy(&user_id) }; - let ClosureResult {llbox, cdata_ty, bcx} = build_closure(bcx, cap_vars, sigil); + let ClosureResult {llbox, cdata_ty, bcx} = build_closure(bcx, *cap_vars.borrow(), sigil); trans_closure(ccx, sub_path, decl, body, llfn, bcx.fcx.param_substs, user_id, [], ty::ty_fn_ret(fty), - |bcx| load_environment(bcx, cdata_ty, cap_vars, sigil)); + |bcx| load_environment(bcx, cdata_ty, *cap_vars.borrow(), sigil)); fill_fn_pair(bcx, dest_addr, llfn, llbox); bcx diff --git a/src/librustc/middle/trans/common.rs b/src/librustc/middle/trans/common.rs index d35f9a28a835b..78affcc69e113 100644 --- a/src/librustc/middle/trans/common.rs +++ b/src/librustc/middle/trans/common.rs @@ -30,7 +30,6 @@ use middle::ty; use middle::typeck; use util::ppaux::Repr; - use arena::TypedArena; use std::c_str::ToCStr; use std::cast::transmute; @@ -41,6 +40,7 @@ use std::libc::{c_uint, c_longlong, c_ulonglong, c_char}; use syntax::ast::{Ident}; use syntax::ast_map::{Path, PathElem, PathPrettyName}; use syntax::codemap::Span; +use syntax::parse::token::InternedString; use syntax::parse::token; use syntax::{ast, ast_map}; @@ -446,8 +446,9 @@ impl<'a> Block<'a> { } pub fn sess(&self) -> Session { self.fcx.ccx.sess } - pub fn ident(&self, ident: Ident) -> @str { - token::ident_to_str(&ident) + pub fn ident(&self, ident: Ident) -> ~str { + let string = token::get_ident(ident.name); + string.get().to_str() } pub fn node_id_to_str(&self, id: ast::NodeId) -> ~str { @@ -597,18 +598,19 @@ pub fn C_u8(i: uint) -> ValueRef { // This is a 'c-like' raw string, which differs from // our boxed-and-length-annotated strings. -pub fn C_cstr(cx: &CrateContext, s: @str) -> ValueRef { +pub fn C_cstr(cx: &CrateContext, s: InternedString) -> ValueRef { unsafe { { let const_cstr_cache = cx.const_cstr_cache.borrow(); - match const_cstr_cache.get().find_equiv(&s) { + match const_cstr_cache.get().find(&s) { Some(&llval) => return llval, None => () } } let sc = llvm::LLVMConstStringInContext(cx.llcx, - s.as_ptr() as *c_char, s.len() as c_uint, + s.get().as_ptr() as *c_char, + s.get().len() as c_uint, False); let gsym = token::gensym("str"); @@ -627,9 +629,9 @@ pub fn C_cstr(cx: &CrateContext, s: @str) -> ValueRef { // NB: Do not use `do_spill_noroot` to make this into a constant string, or // you will be kicked off fast isel. See issue #4352 for an example of this. -pub fn C_str_slice(cx: &CrateContext, s: @str) -> ValueRef { +pub fn C_str_slice(cx: &CrateContext, s: InternedString) -> ValueRef { unsafe { - let len = s.len(); + let len = s.get().len(); let cs = llvm::LLVMConstPointerCast(C_cstr(cx, s), Type::i8p().to_ref()); C_struct([cs, C_uint(cx, len)], false) } @@ -766,7 +768,6 @@ pub fn mono_data_classify(t: ty::t) -> MonoDataClass { ty::ty_float(_) => MonoFloat, ty::ty_rptr(..) | ty::ty_uniq(..) | ty::ty_box(..) | ty::ty_str(ty::vstore_uniq) | ty::ty_vec(_, ty::vstore_uniq) | - ty::ty_str(ty::vstore_box) | ty::ty_vec(_, ty::vstore_box) | ty::ty_bare_fn(..) => MonoNonNull, // Is that everything? Would closures or slices qualify? _ => MonoBits @@ -970,7 +971,8 @@ pub fn dummy_substs(tps: ~[ty::t]) -> ty::substs { pub fn filename_and_line_num_from_span(bcx: &Block, span: Span) -> (ValueRef, ValueRef) { let loc = bcx.sess().parse_sess.cm.lookup_char_pos(span.lo); - let filename_cstr = C_cstr(bcx.ccx(), loc.file.name); + let filename_cstr = C_cstr(bcx.ccx(), + token::intern_and_get_ident(loc.file.name)); let filename = build::PointerCast(bcx, filename_cstr, Type::i8p()); let line = C_int(bcx.ccx(), loc.line as int); (filename, line) diff --git a/src/librustc/middle/trans/consts.rs b/src/librustc/middle/trans/consts.rs index 79aa536a0dbdc..a8b0da8026689 100644 --- a/src/librustc/middle/trans/consts.rs +++ b/src/librustc/middle/trans/consts.rs @@ -57,12 +57,14 @@ pub fn const_lit(cx: &CrateContext, e: &ast::Expr, lit: ast::Lit) ty_to_str(cx.tcx, lit_int_ty))) } } - ast::LitFloat(fs, t) => C_floating(fs, Type::float_from_ty(t)), - ast::LitFloatUnsuffixed(fs) => { + ast::LitFloat(ref fs, t) => { + C_floating(fs.get(), Type::float_from_ty(t)) + } + ast::LitFloatUnsuffixed(ref fs) => { let lit_float_ty = ty::node_id_to_type(cx.tcx, e.id); match ty::get(lit_float_ty).sty { ty::ty_float(t) => { - C_floating(fs, Type::float_from_ty(t)) + C_floating(fs.get(), Type::float_from_ty(t)) } _ => { cx.sess.span_bug(lit.span, @@ -72,8 +74,8 @@ pub fn const_lit(cx: &CrateContext, e: &ast::Expr, lit: ast::Lit) } ast::LitBool(b) => C_bool(b), ast::LitNil => C_nil(), - ast::LitStr(s, _) => C_str_slice(cx, s), - ast::LitBinary(data) => C_binary_slice(cx, data), + ast::LitStr(ref s, _) => C_str_slice(cx, (*s).clone()), + ast::LitBinary(ref data) => C_binary_slice(cx, *data.borrow()), } } @@ -312,7 +314,9 @@ fn const_expr_unadjusted(cx: @CrateContext, e: &ast::Expr, unsafe { let _icx = push_ctxt("const_expr"); return match e.node { - ast::ExprLit(lit) => (consts::const_lit(cx, e, *lit), true), + ast::ExprLit(lit) => { + (consts::const_lit(cx, e, (*lit).clone()), true) + } ast::ExprBinary(_, b, e1, e2) => { let (te1, _) = const_expr(cx, e1, is_local); let (te2, _) = const_expr(cx, e2, is_local); diff --git a/src/librustc/middle/trans/context.rs b/src/librustc/middle/trans/context.rs index 8503ce9066bc3..2872d5566339b 100644 --- a/src/librustc/middle/trans/context.rs +++ b/src/librustc/middle/trans/context.rs @@ -19,12 +19,12 @@ use middle::resolve; use middle::trans::adt; use middle::trans::base; use middle::trans::builder::Builder; -use middle::trans::debuginfo; use middle::trans::common::{C_i32, C_null}; -use middle::ty; - +use middle::trans::common::{mono_id,ExternMap,tydesc_info,BuilderRef_res,Stats}; +use middle::trans::base::{decl_crate_map}; +use middle::trans::debuginfo; use middle::trans::type_::Type; - +use middle::ty; use util::sha2::Sha256; use std::cell::{Cell, RefCell}; @@ -33,10 +33,7 @@ use std::hashmap::{HashMap, HashSet}; use std::local_data; use std::libc::c_uint; use syntax::ast; - -use middle::trans::common::{mono_id,ExternMap,tydesc_info,BuilderRef_res,Stats}; - -use middle::trans::base::{decl_crate_map}; +use syntax::parse::token::InternedString; pub struct CrateContext { sess: session::Session, @@ -71,7 +68,7 @@ pub struct CrateContext { // Cache generated vtables vtables: RefCell>, // Cache of constant strings, - const_cstr_cache: RefCell>, + const_cstr_cache: RefCell>, // Reverse-direction for const ptrs cast from globals. // Key is an int, cast from a ValueRef holding a *T, @@ -99,8 +96,8 @@ pub struct CrateContext { llsizingtypes: RefCell>, adt_reprs: RefCell>, symbol_hasher: RefCell, - type_hashcodes: RefCell>, - all_llvm_symbols: RefCell>, + type_hashcodes: RefCell>, + all_llvm_symbols: RefCell>, tcx: ty::ctxt, maps: astencode::Maps, stats: @Stats, diff --git a/src/librustc/middle/trans/controlflow.rs b/src/librustc/middle/trans/controlflow.rs index 8c8c6829e49fd..d361c36fbdacd 100644 --- a/src/librustc/middle/trans/controlflow.rs +++ b/src/librustc/middle/trans/controlflow.rs @@ -28,6 +28,8 @@ use syntax::ast; use syntax::ast::Name; use syntax::ast_util; use syntax::codemap::Span; +use syntax::parse::token::InternedString; +use syntax::parse::token; use syntax::visit::Visitor; pub fn trans_stmt<'a>(cx: &'a Block<'a>, @@ -332,7 +334,7 @@ pub fn trans_fail_expr<'a>( unpack_datum!(bcx, expr::trans_to_lvalue(bcx, arg_expr, "fail")); if ty::type_is_str(arg_datum.ty) { - let (lldata, _) = arg_datum.get_vec_base_and_len_no_root(bcx); + let (lldata, _) = arg_datum.get_vec_base_and_len(bcx); return trans_fail_value(bcx, sp_opt, lldata); } else if bcx.unreachable.get() || ty::type_is_bot(arg_datum.ty) { return bcx; @@ -342,14 +344,14 @@ pub fn trans_fail_expr<'a>( ppaux::ty_to_str(tcx, arg_datum.ty)); } } - _ => trans_fail(bcx, sp_opt, @"explicit failure") + _ => trans_fail(bcx, sp_opt, InternedString::new("explicit failure")) } } pub fn trans_fail<'a>( bcx: &'a Block<'a>, sp_opt: Option, - fail_str: @str) + fail_str: InternedString) -> &'a Block<'a> { let _icx = push_ctxt("trans_fail"); let V_fail_str = C_cstr(bcx.ccx(), fail_str); @@ -367,11 +369,11 @@ fn trans_fail_value<'a>( Some(sp) => { let sess = bcx.sess(); let loc = sess.parse_sess.cm.lookup_char_pos(sp.lo); - (C_cstr(bcx.ccx(), loc.file.name), + (C_cstr(bcx.ccx(), token::intern_and_get_ident(loc.file.name)), loc.line as int) } None => { - (C_cstr(bcx.ccx(), @""), 0) + (C_cstr(bcx.ccx(), InternedString::new("")), 0) } }; let V_str = PointerCast(bcx, V_fail_str, Type::i8p()); diff --git a/src/librustc/middle/trans/datum.rs b/src/librustc/middle/trans/datum.rs index 4888c2ce4d5dc..2a2421077a8c6 100644 --- a/src/librustc/middle/trans/datum.rs +++ b/src/librustc/middle/trans/datum.rs @@ -528,49 +528,8 @@ impl Datum { } } - pub fn get_vec_base_and_byte_len<'a>( - &self, - mut bcx: &'a Block<'a>, - span: Span, - expr_id: ast::NodeId, - derefs: uint) - -> (&'a Block<'a>, ValueRef, ValueRef) { - //! Converts a vector into the slice pair. Performs rooting - //! and write guards checks. - - // only imp't for @[] and @str, but harmless - bcx = write_guard::root_and_write_guard(self, bcx, span, expr_id, derefs); - let (base, len) = self.get_vec_base_and_byte_len_no_root(bcx); - (bcx, base, len) - } - - pub fn get_vec_base_and_byte_len_no_root(&self, bcx: &Block) - -> (ValueRef, ValueRef) { - //! Converts a vector into the slice pair. Des not root - //! nor perform write guard checks. - - tvec::get_base_and_byte_len(bcx, self.val, self.ty) - } - - pub fn get_vec_base_and_len<'a>(&self, - mut bcx: &'a Block<'a>, - span: Span, - expr_id: ast::NodeId, - derefs: uint) - -> (&'a Block<'a>, ValueRef, ValueRef) { - //! Converts a vector into the slice pair. Performs rooting - //! and write guards checks. - - // only imp't for @[] and @str, but harmless - bcx = write_guard::root_and_write_guard(self, bcx, span, expr_id, derefs); - let (base, len) = self.get_vec_base_and_len_no_root(bcx); - (bcx, base, len) - } - - pub fn get_vec_base_and_len_no_root<'a>(&self, bcx: &'a Block<'a>) - -> (ValueRef, ValueRef) { - //! Converts a vector into the slice pair. Des not root - //! nor perform write guard checks. + pub fn get_vec_base_and_len<'a>(&self, bcx: &'a Block<'a>) -> (ValueRef, ValueRef) { + //! Converts a vector into the slice pair. tvec::get_base_and_len(bcx, self.val, self.ty) } diff --git a/src/librustc/middle/trans/debuginfo.rs b/src/librustc/middle/trans/debuginfo.rs index 36cc6f3afd16e..3cdb1c52d3c5b 100644 --- a/src/librustc/middle/trans/debuginfo.rs +++ b/src/librustc/middle/trans/debuginfo.rs @@ -622,7 +622,8 @@ pub fn create_function_debug_context(cx: &CrateContext, }; // get_template_parameters() will append a `<...>` clause to the function name if necessary. - let mut function_name = token::ident_to_str(&ident).to_owned(); + let function_name_string = token::get_ident(ident.name); + let mut function_name = function_name_string.get().to_owned(); let template_parameters = get_template_parameters(cx, generics, param_substs, @@ -791,7 +792,9 @@ pub fn create_function_debug_context(cx: &CrateContext, let ident = special_idents::type_self; - let param_metadata = token::ident_to_str(&ident).with_c_str(|name| { + let param_metadata_string = token::get_ident(ident.name); + let param_metadata = param_metadata_string.get() + .with_c_str(|name| { unsafe { llvm::LLVMDIBuilderCreateTemplateTypeParameter( DIB(cx), @@ -829,7 +832,9 @@ pub fn create_function_debug_context(cx: &CrateContext, // Again, only create type information if extra_debuginfo is enabled if cx.sess.opts.extra_debuginfo { let actual_type_metadata = type_metadata(cx, actual_type, codemap::DUMMY_SP); - let param_metadata = token::ident_to_str(&ident).with_c_str(|name| { + let param_metadata_string = token::get_ident(ident.name); + let param_metadata = param_metadata_string.get() + .with_c_str(|name| { unsafe { llvm::LLVMDIBuilderCreateTemplateTypeParameter( DIB(cx), @@ -931,10 +936,11 @@ fn declare_local(bcx: &Block, span: Span) { let cx: &CrateContext = bcx.ccx(); - let filename = span_start(cx, span).file.name; + let filename = span_start(cx, span).file.name.clone(); let file_metadata = file_metadata(cx, filename); - let name: &str = token::ident_to_str(&variable_ident); + let variable_ident_string = token::get_ident(variable_ident.name); + let name: &str = variable_ident_string.get(); let loc = span_start(cx, span); let type_metadata = type_metadata(cx, variable_type, span); @@ -1139,9 +1145,10 @@ impl MemberDescriptionFactory for StructMemberDescriptionFactory { -> ~[MemberDescription] { self.fields.map(|field| { let name = if field.ident.name == special_idents::unnamed_field.name { - @"" + ~"" } else { - token::ident_to_str(&field.ident) + let string = token::get_ident(field.ident.name); + string.get().to_str() }; MemberDescription { @@ -1165,7 +1172,7 @@ fn prepare_struct_metadata(cx: &CrateContext, let (containing_scope, definition_span) = get_namespace_and_span_for_item(cx, def_id, span); - let file_name = span_start(cx, definition_span).file.name; + let file_name = span_start(cx, definition_span).file.name.clone(); let file_metadata = file_metadata(cx, file_name); let struct_metadata_stub = create_struct_stub(cx, @@ -1244,7 +1251,7 @@ impl MemberDescriptionFactory for TupleMemberDescriptionFactory { -> ~[MemberDescription] { self.component_types.map(|&component_type| { MemberDescription { - name: @"", + name: ~"", llvm_type: type_of::type_of(cx, component_type), type_metadata: type_metadata(cx, component_type, self.span), offset: ComputedMemberOffset, @@ -1322,7 +1329,7 @@ impl MemberDescriptionFactory for GeneralMemberDescriptionFactory { self.file_metadata, codemap::DUMMY_SP); MemberDescription { - name: @"", + name: ~"", llvm_type: variant_llvm_type, type_metadata: variant_type_metadata, offset: FixedMemberOffset { bytes: 0 }, @@ -1332,7 +1339,7 @@ impl MemberDescriptionFactory for GeneralMemberDescriptionFactory { } struct EnumVariantMemberDescriptionFactory { - args: ~[(@str, ty::t)], + args: ~[(~str, ty::t)], discriminant_type_metadata: Option, span: Span, } @@ -1340,9 +1347,9 @@ struct EnumVariantMemberDescriptionFactory { impl MemberDescriptionFactory for EnumVariantMemberDescriptionFactory { fn create_member_descriptions(&self, cx: &CrateContext) -> ~[MemberDescription] { - self.args.iter().enumerate().map(|(i, &(name, ty))| { + self.args.iter().enumerate().map(|(i, &(ref name, ty))| { MemberDescription { - name: name, + name: name.to_str(), llvm_type: type_of::type_of(cx, ty), type_metadata: match self.discriminant_type_metadata { Some(metadata) if i == 0 => metadata, @@ -1362,7 +1369,8 @@ fn describe_enum_variant(cx: &CrateContext, file_metadata: DIFile, span: Span) -> (DICompositeType, Type, @MemberDescriptionFactory) { - let variant_name = token::ident_to_str(&variant_info.name); + let variant_info_string = token::get_ident(variant_info.name.name); + let variant_name = variant_info_string.get(); let variant_llvm_type = Type::struct_(struct_def.fields.map(|&t| type_of::type_of(cx, t)), struct_def.packed); // Could some consistency checks here: size, align, field count, discr type @@ -1395,19 +1403,24 @@ fn describe_enum_variant(cx: &CrateContext, // Get the argument names from the enum variant info let mut arg_names = match variant_info.arg_names { - Some(ref names) => names.map(|ident| token::ident_to_str(ident)), - None => variant_info.args.map(|_| @"") + Some(ref names) => { + names.map(|ident| { + let string = token::get_ident(ident.name); + string.get().to_str() + }) + } + None => variant_info.args.map(|_| ~"") }; // If this is not a univariant enum, there is also the (unnamed) discriminant field if discriminant_type_metadata.is_some() { - arg_names.insert(0, @""); + arg_names.insert(0, ~""); } // Build an array of (field name, field type) pairs to be captured in the factory closure. - let args: ~[(@str, ty::t)] = arg_names.iter() + let args: ~[(~str, ty::t)] = arg_names.iter() .zip(struct_def.fields.iter()) - .map(|(&s, &t)| (s, t)) + .map(|(s, &t)| (s.to_str(), t)) .collect(); let member_description_factory = @@ -1452,7 +1465,8 @@ fn prepare_enum_metadata(cx: &CrateContext, let enumerators_metadata: ~[DIDescriptor] = variants .iter() .map(|v| { - let name: &str = token::ident_to_str(&v.name); + let string = token::get_ident(v.name.name); + let name: &str = string.get(); let discriminant_value = v.disr_val as c_ulonglong; name.with_c_str(|name| { @@ -1580,7 +1594,7 @@ enum MemberOffset { } struct MemberDescription { - name: @str, + name: ~str, llvm_type: Type, type_metadata: DIType, offset: MemberOffset, @@ -1737,31 +1751,31 @@ fn boxed_type_metadata(cx: &CrateContext, let member_descriptions = [ MemberDescription { - name: @"refcnt", + name: ~"refcnt", llvm_type: member_llvm_types[0], type_metadata: type_metadata(cx, int_type, codemap::DUMMY_SP), offset: ComputedMemberOffset, }, MemberDescription { - name: @"tydesc", + name: ~"tydesc", llvm_type: member_llvm_types[1], type_metadata: nil_pointer_type_metadata, offset: ComputedMemberOffset, }, MemberDescription { - name: @"prev", + name: ~"prev", llvm_type: member_llvm_types[2], type_metadata: nil_pointer_type_metadata, offset: ComputedMemberOffset, }, MemberDescription { - name: @"next", + name: ~"next", llvm_type: member_llvm_types[3], type_metadata: nil_pointer_type_metadata, offset: ComputedMemberOffset, }, MemberDescription { - name: @"val", + name: ~"val", llvm_type: member_llvm_types[4], type_metadata: content_type_metadata, offset: ComputedMemberOffset, @@ -1848,19 +1862,19 @@ fn vec_metadata(cx: &CrateContext, let member_descriptions = [ MemberDescription { - name: @"fill", + name: ~"fill", llvm_type: member_llvm_types[0], type_metadata: int_type_metadata, offset: ComputedMemberOffset, }, MemberDescription { - name: @"alloc", + name: ~"alloc", llvm_type: member_llvm_types[1], type_metadata: int_type_metadata, offset: ComputedMemberOffset, }, MemberDescription { - name: @"elements", + name: ~"elements", llvm_type: member_llvm_types[2], type_metadata: array_type_metadata, offset: ComputedMemberOffset, @@ -1882,23 +1896,6 @@ fn vec_metadata(cx: &CrateContext, span); } -fn boxed_vec_metadata(cx: &CrateContext, - element_type: ty::t, - span: Span) - -> DICompositeType { - let element_llvm_type = type_of::type_of(cx, element_type); - let vec_llvm_type = Type::vec(cx.sess.targ_cfg.arch, &element_llvm_type); - let vec_type_name: &str = format!("[{}]", ppaux::ty_to_str(cx.tcx, element_type)); - let vec_metadata = vec_metadata(cx, element_type, span); - - return boxed_type_metadata( - cx, - Some(vec_type_name), - vec_llvm_type, - vec_metadata, - span); -} - fn vec_slice_metadata(cx: &CrateContext, vec_type: ty::t, element_type: ty::t, @@ -1917,13 +1914,13 @@ fn vec_slice_metadata(cx: &CrateContext, let member_descriptions = [ MemberDescription { - name: @"data_ptr", + name: ~"data_ptr", llvm_type: member_llvm_types[0], type_metadata: type_metadata(cx, data_ptr_type, span), offset: ComputedMemberOffset, }, MemberDescription { - name: @"length", + name: ~"length", llvm_type: member_llvm_types[1], type_metadata: type_metadata(cx, ty::mk_uint(), span), offset: ComputedMemberOffset, @@ -1996,9 +1993,10 @@ fn trait_metadata(cx: &CrateContext, // the trait's methods. let path = ty::item_path(cx.tcx, def_id); let ident = path.last().unwrap().ident(); + let ident_string = token::get_ident(ident.name); let name = ppaux::trait_store_to_str(cx.tcx, trait_store) + ppaux::mutability_to_str(mutability) + - token::ident_to_str(&ident); + ident_string.get(); // Add type and region parameters let name = ppaux::parameterized(cx.tcx, name, &substs.regions, substs.tps, def_id, true); @@ -2006,7 +2004,7 @@ fn trait_metadata(cx: &CrateContext, let (containing_scope, definition_span) = get_namespace_and_span_for_item(cx, def_id, usage_site_span); - let file_name = span_start(cx, definition_span).file.name; + let file_name = span_start(cx, definition_span).file.name.clone(); let file_metadata = file_metadata(cx, file_name); let trait_llvm_type = type_of::type_of(cx, trait_type); @@ -2078,10 +2076,6 @@ fn type_metadata(cx: &CrateContext, let vec_metadata = vec_metadata(cx, i8_t, usage_site_span); pointer_type_metadata(cx, t, vec_metadata) } - ty::vstore_box => { - let boxed_vec_metadata = boxed_vec_metadata(cx, i8_t, usage_site_span); - pointer_type_metadata(cx, t, boxed_vec_metadata) - } ty::vstore_slice(_region) => { vec_slice_metadata(cx, t, i8_t, usage_site_span) } @@ -2102,10 +2096,6 @@ fn type_metadata(cx: &CrateContext, let vec_metadata = vec_metadata(cx, mt.ty, usage_site_span); pointer_type_metadata(cx, t, vec_metadata) } - ty::vstore_box => { - let boxed_vec_metadata = boxed_vec_metadata(cx, mt.ty, usage_site_span); - pointer_type_metadata(cx, t, boxed_vec_metadata) - } ty::vstore_slice(_) => { vec_slice_metadata(cx, t, mt.ty, usage_site_span) } @@ -2714,7 +2704,7 @@ fn populate_scope_map(cx: &CrateContext, ast::ExprInlineAsm(ast::InlineAsm { inputs: ref inputs, outputs: ref outputs, .. }) => { - // inputs, outputs: ~[(@str, @expr)] + // inputs, outputs: ~[(~str, @expr)] for &(_, exp) in inputs.iter() { walk_expr(cx, exp, scope_stack, scope_map); } @@ -2755,8 +2745,10 @@ impl NamespaceTreeNode { } None => {} } - let name = token::ident_to_str(&node.ident); - output.push_str(format!("{}{}", name.len(), name)); + let string = token::get_ident(node.ident.name); + output.push_str(format!("{}{}", + string.get().len(), + string.get())); } } } @@ -2807,7 +2799,8 @@ fn namespace_for_item(cx: &CrateContext, Some(node) => node.scope, None => ptr::null() }; - let namespace_name = token::ident_to_str(&ident); + let namespace_name_string = token::get_ident(ident.name); + let namespace_name = namespace_name_string.get(); let namespace_metadata = unsafe { namespace_name.with_c_str(|namespace_name| { diff --git a/src/librustc/middle/trans/expr.rs b/src/librustc/middle/trans/expr.rs index d0a01f56b530c..517f9e1a31d81 100644 --- a/src/librustc/middle/trans/expr.rs +++ b/src/librustc/middle/trans/expr.rs @@ -202,12 +202,10 @@ fn apply_adjustments<'a>(bcx: &'a Block<'a>, unpack_datum!(bcx, auto_ref(bcx, datum, expr)) } Some(AutoBorrowVec(..)) => { - unpack_datum!(bcx, auto_slice(bcx, adj.autoderefs, - expr, datum)) + unpack_datum!(bcx, auto_slice(bcx, expr, datum)) } Some(AutoBorrowVecRef(..)) => { - unpack_datum!(bcx, auto_slice_and_ref(bcx, adj.autoderefs, - expr, datum)) + unpack_datum!(bcx, auto_slice_and_ref(bcx, expr, datum)) } Some(AutoBorrowFn(..)) => { let adjusted_ty = ty::adjust_ty(bcx.tcx(), expr.span, @@ -271,7 +269,6 @@ fn apply_adjustments<'a>(bcx: &'a Block<'a>, fn auto_slice<'a>( bcx: &'a Block<'a>, - autoderefs: uint, expr: &ast::Expr, datum: Datum) -> DatumBlock<'a, Expr> { @@ -290,8 +287,7 @@ fn apply_adjustments<'a>(bcx: &'a Block<'a>, let datum = unpack_datum!( bcx, datum.to_lvalue_datum(bcx, "auto_slice", expr.id)); - let (bcx, base, len) = - datum.get_vec_base_and_len(bcx, expr.span, expr.id, autoderefs+1); + let (base, len) = datum.get_vec_base_and_len(bcx); // this type may have a different region/mutability than the // real one, but it will have the same runtime representation @@ -323,11 +319,10 @@ fn apply_adjustments<'a>(bcx: &'a Block<'a>, fn auto_slice_and_ref<'a>( bcx: &'a Block<'a>, - autoderefs: uint, expr: &ast::Expr, datum: Datum) -> DatumBlock<'a, Expr> { - let DatumBlock { bcx, datum } = auto_slice(bcx, autoderefs, expr, datum); + let DatumBlock { bcx, datum } = auto_slice(bcx, expr, datum); auto_ref(bcx, datum, expr) } @@ -519,19 +514,10 @@ fn trans_datum_unadjusted<'a>(bcx: &'a Block<'a>, ast::ExprIndex(_, base, idx) => { trans_index(bcx, expr, base, idx) } - ast::ExprVstore(contents, ast::ExprVstoreBox) => { - fcx.push_ast_cleanup_scope(contents.id); - let datum = unpack_datum!( - bcx, tvec::trans_uniq_or_managed_vstore(bcx, heap_managed, - expr, contents)); - bcx = fcx.pop_and_trans_ast_cleanup_scope(bcx, contents.id); - DatumBlock(bcx, datum) - } ast::ExprVstore(contents, ast::ExprVstoreUniq) => { fcx.push_ast_cleanup_scope(contents.id); let datum = unpack_datum!( - bcx, tvec::trans_uniq_or_managed_vstore(bcx, heap_exchange, - expr, contents)); + bcx, tvec::trans_uniq_vstore(bcx, expr, contents)); bcx = fcx.pop_and_trans_ast_cleanup_scope(bcx, contents.id); DatumBlock(bcx, datum) } @@ -543,9 +529,7 @@ fn trans_datum_unadjusted<'a>(bcx: &'a Block<'a>, let heap = heap_exchange; return trans_boxed_expr(bcx, box_ty, contents, contents_ty, heap) } - ast::ExprLit(lit) => { - trans_immediate_lit(bcx, expr, *lit) - } + ast::ExprLit(lit) => trans_immediate_lit(bcx, expr, (*lit).clone()), ast::ExprBinary(_, op, lhs, rhs) => { // if overloaded, would be RvalueDpsExpr { @@ -636,8 +620,7 @@ fn trans_index<'a>(bcx: &'a Block<'a>, let vt = tvec::vec_types(bcx, base_datum.ty); base::maybe_name_value(bcx.ccx(), vt.llunit_size, "unit_sz"); - let (bcx, base, len) = - base_datum.get_vec_base_and_len(bcx, index_expr.span, index_expr.id, 0); + let (base, len) = base_datum.get_vec_base_and_len(bcx); debug!("trans_index: base {}", bcx.val_to_str(base)); debug!("trans_index: len {}", bcx.val_to_str(len)); @@ -836,8 +819,8 @@ fn trans_rvalue_dps_unadjusted<'a>(bcx: &'a Block<'a>, } ast::ExprLit(lit) => { match lit.node { - ast::LitStr(s, _) => { - tvec::trans_lit_str(bcx, expr, s, dest) + ast::LitStr(ref s, _) => { + tvec::trans_lit_str(bcx, expr, (*s).clone(), dest) } _ => { bcx.tcx() @@ -1799,9 +1782,9 @@ fn trans_log_level<'a>(bcx: &'a Block<'a>) let external_srcs = ccx.external_srcs.borrow(); srccrate = match external_srcs.get().find(&bcx.fcx.id) { Some(&src) => { - ccx.sess.cstore.get_crate_data(src.crate).name + ccx.sess.cstore.get_crate_data(src.crate).name.clone() } - None => ccx.link_meta.crateid.name.to_managed(), + None => ccx.link_meta.crateid.name.to_str(), }; }; let mut modpath = ~[PathMod(ccx.sess.ident_of(srccrate))]; @@ -2032,4 +2015,3 @@ fn deref_once<'a>(bcx: &'a Block<'a>, DatumBlock { bcx: bcx, datum: datum } } } - diff --git a/src/librustc/middle/trans/foreign.rs b/src/librustc/middle/trans/foreign.rs index d9a34e1da7d51..bc9dd767ec670 100644 --- a/src/librustc/middle/trans/foreign.rs +++ b/src/librustc/middle/trans/foreign.rs @@ -31,7 +31,8 @@ use std::vec; use syntax::abi::{Cdecl, Aapcs, C, AbiSet, Win64}; use syntax::abi::{RustIntrinsic, Rust, Stdcall, Fastcall, System}; use syntax::codemap::Span; -use syntax::parse::token::special_idents; +use syntax::parse::token::{InternedString, special_idents}; +use syntax::parse::token; use syntax::{ast}; use syntax::{attr, ast_map}; use util::ppaux::{Repr, UserString}; @@ -135,7 +136,7 @@ pub fn register_foreign_item_fn(ccx: @CrateContext, }; // Register the function as a C extern fn - let lname = link_name(ccx, foreign_item); + let lname = link_name(foreign_item); let tys = foreign_types_for_id(ccx, foreign_item.id); // Make sure the calling convention is right for variadic functions @@ -150,8 +151,12 @@ pub fn register_foreign_item_fn(ccx: @CrateContext, let llfn; { let mut externs = ccx.externs.borrow_mut(); - llfn = base::get_extern_fn(externs.get(), ccx.llmod, lname, - cc, llfn_ty, tys.fn_sig.output); + llfn = base::get_extern_fn(externs.get(), + ccx.llmod, + lname.get(), + cc, + llfn_ty, + tys.fn_sig.output); }; add_argument_attributes(&tys, llfn); @@ -372,9 +377,9 @@ pub fn trans_foreign_mod(ccx: @CrateContext, _ => () } - let lname = link_name(ccx, foreign_item); + let lname = link_name(foreign_item); let mut item_symbols = ccx.item_symbols.borrow_mut(); - item_symbols.get().insert(foreign_item.id, lname.to_owned()); + item_symbols.get().insert(foreign_item.id, lname.get().to_owned()); } } @@ -726,10 +731,10 @@ pub fn trans_rust_fn_with_foreign_abi(ccx: @CrateContext, // This code is kind of a confused mess and needs to be reworked given // the massive simplifications that have occurred. -pub fn link_name(ccx: &CrateContext, i: @ast::ForeignItem) -> @str { +pub fn link_name(i: @ast::ForeignItem) -> InternedString { match attr::first_attr_value_str_by_name(i.attrs, "link_name") { - None => ccx.sess.str_of(i.ident), - Some(ln) => ln, + None => token::get_ident(i.ident.name), + Some(ln) => ln.clone(), } } diff --git a/src/librustc/middle/trans/glue.rs b/src/librustc/middle/trans/glue.rs index 705501c82235f..1bfbb3f99b199 100644 --- a/src/librustc/middle/trans/glue.rs +++ b/src/librustc/middle/trans/glue.rs @@ -15,32 +15,32 @@ use back::abi; use back::link::*; -use lib; use lib::llvm::{llvm, ValueRef, True}; +use lib; use middle::lang_items::{FreeFnLangItem, ExchangeFreeFnLangItem}; use middle::trans::adt; use middle::trans::base::*; +use middle::trans::build::*; use middle::trans::callee; use middle::trans::cleanup; use middle::trans::cleanup::CleanupMethods; use middle::trans::common::*; -use middle::trans::build::*; use middle::trans::expr; use middle::trans::machine::*; use middle::trans::reflect; use middle::trans::tvec; +use middle::trans::type_::Type; use middle::trans::type_of::type_of; use middle::ty; -use util::ppaux; use util::ppaux::ty_to_short_str; - -use middle::trans::type_::Type; +use util::ppaux; use arena::TypedArena; use std::c_str::ToCStr; use std::cell::Cell; use std::libc::c_uint; use syntax::ast; +use syntax::parse::token; pub fn trans_free<'a>(cx: &'a Block<'a>, v: ValueRef) -> &'a Block<'a> { let _icx = push_ctxt("trans_free"); @@ -64,10 +64,7 @@ pub fn take_ty<'a>(bcx: &'a Block<'a>, v: ValueRef, t: ty::t) // NB: v is an *alias* of type t here, not a direct value. let _icx = push_ctxt("take_ty"); match ty::get(t).sty { - ty::ty_box(_) | - ty::ty_vec(_, ty::vstore_box) | ty::ty_str(ty::vstore_box) => { - incr_refcnt_of_boxed(bcx, v) - } + ty::ty_box(_) => incr_refcnt_of_boxed(bcx, v), ty::ty_trait(_, _, ty::BoxTraitStore, _, _) => { incr_refcnt_of_boxed(bcx, GEPi(bcx, v, [0u, abi::trt_field_box])) } @@ -113,10 +110,6 @@ fn simplified_glue_type(tcx: ty::ctxt, field: uint, t: ty::t) -> ty::t { if !ty::type_needs_drop(tcx, typ) => return ty::mk_box(tcx, ty::mk_nil()), - ty::ty_vec(mt, ty::vstore_box) - if !ty::type_needs_drop(tcx, mt.ty) => - return ty::mk_box(tcx, ty::mk_nil()), - ty::ty_uniq(typ) if !ty::type_needs_drop(tcx, typ) => return ty::mk_uniq(tcx, ty::mk_nil()), @@ -326,11 +319,6 @@ fn make_drop_glue<'a>(bcx: &'a Block<'a>, v0: ValueRef, t: ty::t) -> &'a Block<' ty::ty_box(body_ty) => { decr_refcnt_maybe_free(bcx, v0, Some(body_ty)) } - ty::ty_str(ty::vstore_box) | ty::ty_vec(_, ty::vstore_box) => { - let unit_ty = ty::sequence_element_type(ccx.tcx, t); - let unboxed_vec_ty = ty::mk_mut_unboxed_vec(ccx.tcx, unit_ty); - decr_refcnt_maybe_free(bcx, v0, Some(unboxed_vec_ty)) - } ty::ty_uniq(content_ty) => { let llbox = Load(bcx, v0); let not_null = IsNotNull(bcx, llbox); @@ -471,16 +459,17 @@ pub fn declare_tydesc(ccx: &CrateContext, t: ty::t) -> @tydesc_info { let llsize = llsize_of(ccx, llty); let llalign = llalign_of(ccx, llty); - let name = mangle_internal_name_by_type_and_seq(ccx, t, "tydesc").to_managed(); - note_unique_llvm_symbol(ccx, name); + let name = mangle_internal_name_by_type_and_seq(ccx, t, "tydesc"); debug!("+++ declare_tydesc {} {}", ppaux::ty_to_str(ccx.tcx, t), name); let gvar = name.with_c_str(|buf| { unsafe { llvm::LLVMAddGlobal(ccx.llmod, ccx.tydesc_type.to_ref(), buf) } }); + note_unique_llvm_symbol(ccx, name); - let ty_name = C_str_slice(ccx, ppaux::ty_to_str(ccx.tcx, t).to_managed()); + let ty_name = token::intern_and_get_ident(ppaux::ty_to_str(ccx.tcx, t)); + let ty_name = C_str_slice(ccx, ty_name); let inf = @tydesc_info { ty: t, @@ -498,10 +487,10 @@ pub fn declare_tydesc(ccx: &CrateContext, t: ty::t) -> @tydesc_info { fn declare_generic_glue(ccx: &CrateContext, t: ty::t, llfnty: Type, name: &str) -> ValueRef { let _icx = push_ctxt("declare_generic_glue"); - let fn_nm = mangle_internal_name_by_type_and_seq(ccx, t, (~"glue_" + name)).to_managed(); + let fn_nm = mangle_internal_name_by_type_and_seq(ccx, t, ~"glue_" + name); debug!("{} is for type {}", fn_nm, ppaux::ty_to_str(ccx.tcx, t)); - note_unique_llvm_symbol(ccx, fn_nm); let llfn = decl_cdecl_fn(ccx.llmod, fn_nm, llfnty, ty::mk_nil()); + note_unique_llvm_symbol(ccx, fn_nm); return llfn; } diff --git a/src/librustc/middle/trans/inline.rs b/src/librustc/middle/trans/inline.rs index 3a0b97813df50..c60199b3c0d0a 100644 --- a/src/librustc/middle/trans/inline.rs +++ b/src/librustc/middle/trans/inline.rs @@ -152,7 +152,7 @@ pub fn maybe_instantiate_inline(ccx: @CrateContext, fn_id: ast::DefId) let impl_tpt = ty::lookup_item_type(ccx.tcx, impl_did); let num_type_params = - impl_tpt.generics.type_param_defs.len() + + impl_tpt.generics.type_param_defs().len() + mth.generics.ty_params.len(); if num_type_params == 0 { diff --git a/src/librustc/middle/trans/intrinsic.rs b/src/librustc/middle/trans/intrinsic.rs index da3b9202d9215..49f1b073f873e 100644 --- a/src/librustc/middle/trans/intrinsic.rs +++ b/src/librustc/middle/trans/intrinsic.rs @@ -337,8 +337,10 @@ pub fn trans_intrinsic(ccx: @CrateContext, Ret(bcx, td); } "type_id" => { - let hash = ty::hash_crate_independent(ccx.tcx, substs.tys[0], - ccx.link_meta.crate_hash); + let hash = ty::hash_crate_independent( + ccx.tcx, + substs.tys[0], + ccx.link_meta.crate_hash.clone()); // NB: This needs to be kept in lockstep with the TypeId struct in // libstd/unstable/intrinsics.rs let val = C_named_struct(type_of::type_of(ccx, output_type), [C_u64(hash)]); diff --git a/src/librustc/middle/trans/meth.rs b/src/librustc/middle/trans/meth.rs index 09bfa36ddc172..b13a8800cee84 100644 --- a/src/librustc/middle/trans/meth.rs +++ b/src/librustc/middle/trans/meth.rs @@ -182,7 +182,7 @@ pub fn trans_static_method_callee(bcx: &Block, // out which impl the `Trait` bound on the type `self` was // bound to. let bound_index = ty::lookup_trait_def(bcx.tcx(), trait_id). - generics.type_param_defs.len(); + generics.type_param_defs().len(); let mname = if method_id.crate == ast::LOCAL_CRATE { { @@ -318,7 +318,7 @@ pub fn combine_impl_and_methods_tps(bcx: &Block, let ccx = bcx.ccx(); let method = ty::method(ccx.tcx, mth_did); - let n_m_tps = method.generics.type_param_defs.len(); + let n_m_tps = method.generics.type_param_defs().len(); let node_substs = node_id_type_params(bcx, callee_id); debug!("rcvr_substs={:?}", rcvr_substs.repr(ccx.tcx)); let ty_substs diff --git a/src/librustc/middle/trans/reflect.rs b/src/librustc/middle/trans/reflect.rs index 0e245de60193c..b9d23c47fcdce 100644 --- a/src/librustc/middle/trans/reflect.rs +++ b/src/librustc/middle/trans/reflect.rs @@ -20,6 +20,7 @@ use middle::trans::datum::*; use middle::trans::glue; use middle::trans::machine; use middle::trans::meth; +use middle::trans::type_::Type; use middle::trans::type_of::*; use middle::ty; use util::ppaux::ty_to_str; @@ -31,9 +32,8 @@ use std::vec; use syntax::ast::DefId; use syntax::ast; use syntax::ast_map::PathName; -use syntax::parse::token::special_idents; - -use middle::trans::type_::Type; +use syntax::parse::token::{InternedString, special_idents}; +use syntax::parse::token; pub struct Reflector<'a> { visitor_val: ValueRef, @@ -56,14 +56,14 @@ impl<'a> Reflector<'a> { C_bool(b) } - pub fn c_slice(&mut self, s: @str) -> ValueRef { + pub fn c_slice(&mut self, s: InternedString) -> ValueRef { // We're careful to not use first class aggregates here because that // will kick us off fast isel. (Issue #4352.) let bcx = self.bcx; let str_vstore = ty::vstore_slice(ty::ReStatic); let str_ty = ty::mk_str(bcx.tcx(), str_vstore); let scratch = rvalue_scratch_datum(bcx, str_ty, ""); - let len = C_uint(bcx.ccx(), s.len()); + let len = C_uint(bcx.ccx(), s.get().len()); let c_str = PointerCast(bcx, C_cstr(bcx.ccx(), s), Type::i8p()); Store(bcx, c_str, GEPi(bcx, scratch.val, [ 0, 0 ])); Store(bcx, len, GEPi(bcx, scratch.val, [ 0, 1 ])); @@ -140,7 +140,6 @@ impl<'a> Reflector<'a> { } ty::vstore_slice(_) => (~"slice", ~[]), ty::vstore_uniq => (~"uniq", ~[]), - ty::vstore_box => (~"box", ~[]) } } @@ -260,15 +259,19 @@ impl<'a> Reflector<'a> { fields[0].ident.name != special_idents::unnamed_field.name; } - let extra = ~[self.c_slice(ty_to_str(tcx, t).to_managed()), - self.c_bool(named_fields), - self.c_uint(fields.len())] + self.c_size_and_align(t); + let extra = ~[ + self.c_slice(token::intern_and_get_ident(ty_to_str(tcx, + t))), + self.c_bool(named_fields), + self.c_uint(fields.len()) + ] + self.c_size_and_align(t); self.bracketed("class", extra, |this| { for (i, field) in fields.iter().enumerate() { - let extra = ~[this.c_uint(i), - this.c_slice(bcx.ccx().sess.str_of(field.ident)), - this.c_bool(named_fields)] - + this.c_mt(&field.mt); + let extra = ~[ + this.c_uint(i), + this.c_slice(token::get_ident(field.ident.name)), + this.c_bool(named_fields) + ] + this.c_mt(&field.mt); this.visit("class_field", extra); } }) @@ -330,7 +333,7 @@ impl<'a> Reflector<'a> { + self.c_size_and_align(t); self.bracketed("enum", enum_args, |this| { for (i, v) in variants.iter().enumerate() { - let name = ccx.sess.str_of(v.name); + let name = token::get_ident(v.name.name); let variant_args = ~[this.c_uint(i), C_u64(v.disr_val), this.c_uint(v.args.len()), @@ -352,7 +355,9 @@ impl<'a> Reflector<'a> { } ty::ty_trait(_, _, _, _, _) => { - let extra = [self.c_slice(ty_to_str(tcx, t).to_managed())]; + let extra = [ + self.c_slice(token::intern_and_get_ident(ty_to_str(tcx, t))) + ]; self.visit("trait", extra); } diff --git a/src/librustc/middle/trans/tvec.rs b/src/librustc/middle/trans/tvec.rs index 5754a9ba88b99..a0a1ff20a9a9c 100644 --- a/src/librustc/middle/trans/tvec.rs +++ b/src/librustc/middle/trans/tvec.rs @@ -31,6 +31,7 @@ use middle::ty; use util::ppaux::ty_to_str; use syntax::ast; +use syntax::parse::token::InternedString; // Boxed vector types are in some sense currently a "shorthand" for a box // containing an unboxed vector. This expands a boxed vector type into such an @@ -43,9 +44,6 @@ pub fn expand_boxed_vec_ty(tcx: ty::ctxt, t: ty::t) -> ty::t { ty::ty_str(ty::vstore_uniq) | ty::ty_vec(_, ty::vstore_uniq) => { ty::mk_uniq(tcx, unboxed_vec_ty) } - ty::ty_str(ty::vstore_box) | ty::ty_vec(_, ty::vstore_box) => { - ty::mk_box(tcx, unboxed_vec_ty) - } _ => tcx.sess.bug("non boxed-vec type \ in tvec::expand_boxed_vec_ty") } @@ -64,21 +62,6 @@ pub fn get_alloc(bcx: &Block, vptr: ValueRef) -> ValueRef { Load(bcx, GEPi(bcx, vptr, [0u, abi::vec_elt_alloc])) } -pub fn get_bodyptr(bcx: &Block, vptr: ValueRef, t: ty::t) -> ValueRef { - let vt = vec_types(bcx, t); - - let managed = match ty::get(vt.vec_ty).sty { - ty::ty_str(ty::vstore_box) | ty::ty_vec(_, ty::vstore_box) => true, - _ => false - }; - - if managed { - GEPi(bcx, vptr, [0u, abi::box_field_body]) - } else { - vptr - } -} - pub fn get_dataptr(bcx: &Block, vptr: ValueRef) -> ValueRef { let _icx = push_ctxt("tvec::get_dataptr"); GEPi(bcx, vptr, [0u, abi::vec_elt_elems, 0u]) @@ -127,11 +110,10 @@ pub fn alloc_uniq_raw<'a>( alloc_raw(bcx, unit_ty, fill, alloc, heap_exchange) } -pub fn alloc_vec<'a>( +pub fn alloc_uniq_vec<'a>( bcx: &'a Block<'a>, unit_ty: ty::t, - elts: uint, - heap: heap) + elts: uint) -> Result<'a> { let _icx = push_ctxt("tvec::alloc_uniq"); let ccx = bcx.ccx(); @@ -142,7 +124,7 @@ pub fn alloc_vec<'a>( let alloc = if elts < 4u { Mul(bcx, C_int(ccx, 4), unit_sz) } else { fill }; let Result {bcx: bcx, val: vptr} = - alloc_raw(bcx, unit_ty, fill, alloc, heap); + alloc_raw(bcx, unit_ty, fill, alloc, heap_exchange); return rslt(bcx, vptr); } @@ -231,8 +213,11 @@ pub fn trans_slice_vstore<'a>( match content_expr.node { ast::ExprLit(lit) => { match lit.node { - ast::LitStr(s, _) => { - return trans_lit_str(bcx, content_expr, s, dest); + ast::LitStr(ref s, _) => { + return trans_lit_str(bcx, + content_expr, + s.clone(), + dest) } _ => {} } @@ -284,7 +269,7 @@ pub fn trans_slice_vstore<'a>( pub fn trans_lit_str<'a>( bcx: &'a Block<'a>, lit_expr: &ast::Expr, - str_lit: @str, + str_lit: InternedString, dest: Dest) -> &'a Block<'a> { /*! @@ -301,7 +286,7 @@ pub fn trans_lit_str<'a>( Ignore => bcx, SaveIn(lldest) => { unsafe { - let bytes = str_lit.len(); + let bytes = str_lit.get().len(); let llbytes = C_uint(bcx.ccx(), bytes); let llcstr = C_cstr(bcx.ccx(), str_lit); let llcstr = llvm::LLVMConstPointerCast(llcstr, Type::i8p().to_ref()); @@ -316,66 +301,62 @@ pub fn trans_lit_str<'a>( } -pub fn trans_uniq_or_managed_vstore<'a>(bcx: &'a Block<'a>, - heap: heap, - vstore_expr: &ast::Expr, - content_expr: &ast::Expr) - -> DatumBlock<'a, Expr> { +pub fn trans_uniq_vstore<'a>(bcx: &'a Block<'a>, + vstore_expr: &ast::Expr, + content_expr: &ast::Expr) + -> DatumBlock<'a, Expr> { /*! - * @[...] or ~[...] (also @"..." or ~"...") allocate boxes in the - * appropriate heap and write the array elements into them. + * ~[...] and ~"..." allocate boxes in the exchange heap and write + * the array elements into them. */ - debug!("trans_uniq_or_managed_vstore(vstore_expr={}, heap={:?})", - bcx.expr_to_str(vstore_expr), heap); + debug!("trans_uniq_vstore(vstore_expr={})", bcx.expr_to_str(vstore_expr)); let fcx = bcx.fcx; // Handle ~"". - match heap { - heap_exchange => { - match content_expr.node { - ast::ExprLit(lit) => { - match lit.node { - ast::LitStr(s, _) => { - let llptrval = C_cstr(bcx.ccx(), s); - let llptrval = PointerCast(bcx, llptrval, Type::i8p()); - let llsizeval = C_uint(bcx.ccx(), s.len()); - let typ = ty::mk_str(bcx.tcx(), ty::vstore_uniq); - let lldestval = rvalue_scratch_datum(bcx, typ, ""); - let alloc_fn = langcall(bcx, - Some(lit.span), - "", - StrDupUniqFnLangItem); - let bcx = callee::trans_lang_call( - bcx, - alloc_fn, - [ llptrval, llsizeval ], - Some(expr::SaveIn(lldestval.val))).bcx; - return DatumBlock(bcx, lldestval).to_expr_datumblock(); - } - _ => {} - } + match content_expr.node { + ast::ExprLit(lit) => { + match lit.node { + ast::LitStr(ref s, _) => { + let llptrval = C_cstr(bcx.ccx(), (*s).clone()); + let llptrval = PointerCast(bcx, + llptrval, + Type::i8p()); + let llsizeval = C_uint(bcx.ccx(), s.get().len()); + let typ = ty::mk_str(bcx.tcx(), ty::vstore_uniq); + let lldestval = rvalue_scratch_datum(bcx, + typ, + ""); + let alloc_fn = langcall(bcx, + Some(lit.span), + "", + StrDupUniqFnLangItem); + let bcx = callee::trans_lang_call( + bcx, + alloc_fn, + [ llptrval, llsizeval ], + Some(expr::SaveIn(lldestval.val))).bcx; + return DatumBlock(bcx, lldestval).to_expr_datumblock(); } _ => {} } } - heap_exchange_closure => fail!("vectors use exchange_alloc"), - heap_managed => {} + _ => {} } let vt = vec_types_from_expr(bcx, vstore_expr); let count = elements_required(bcx, content_expr); - let Result {bcx, val} = alloc_vec(bcx, vt.unit_ty, count, heap); + let Result {bcx, val} = alloc_uniq_vec(bcx, vt.unit_ty, count); // Create a temporary scope lest execution should fail while // constructing the vector. let temp_scope = fcx.push_custom_cleanup_scope(); - fcx.schedule_free_value(cleanup::CustomScope(temp_scope), val, heap); + fcx.schedule_free_value(cleanup::CustomScope(temp_scope), val, heap_exchange); - let dataptr = get_dataptr(bcx, get_bodyptr(bcx, val, vt.vec_ty)); + let dataptr = get_dataptr(bcx, val); - debug!("alloc_vec() returned val={}, dataptr={}", + debug!("alloc_uniq_vec() returned val={}, dataptr={}", bcx.val_to_str(val), bcx.val_to_str(dataptr)); let bcx = write_content(bcx, &vt, vstore_expr, @@ -405,15 +386,13 @@ pub fn write_content<'a>( match content_expr.node { ast::ExprLit(lit) => { match lit.node { - ast::LitStr(s, _) => { + ast::LitStr(ref s, _) => { match dest { - Ignore => { - return bcx; - } + Ignore => return bcx, SaveIn(lldest) => { - let bytes = s.len(); + let bytes = s.get().len(); let llbytes = C_uint(bcx.ccx(), bytes); - let llcstr = C_cstr(bcx.ccx(), s); + let llcstr = C_cstr(bcx.ccx(), (*s).clone()); base::call_memcpy(bcx, lldest, llcstr, @@ -516,7 +495,7 @@ pub fn elements_required(bcx: &Block, content_expr: &ast::Expr) -> uint { match content_expr.node { ast::ExprLit(lit) => { match lit.node { - ast::LitStr(s, _) => s.len(), + ast::LitStr(ref s, _) => s.get().len(), _ => { bcx.tcx().sess.span_bug(content_expr.span, "Unexpected evec content") @@ -564,10 +543,9 @@ pub fn get_base_and_byte_len(bcx: &Block, let len = Mul(bcx, count, vt.llunit_size); (base, len) } - ty::vstore_uniq | ty::vstore_box => { + ty::vstore_uniq => { assert!(type_is_immediate(bcx.ccx(), vt.vec_ty)); - let llval = Load(bcx, llval); - let body = get_bodyptr(bcx, llval, vec_ty); + let body = Load(bcx, llval); (get_dataptr(bcx, body), get_fill(bcx, body)) } } @@ -604,10 +582,9 @@ pub fn get_base_and_len(bcx: &Block, let count = Load(bcx, GEPi(bcx, llval, [0u, abi::slice_elt_len])); (base, count) } - ty::vstore_uniq | ty::vstore_box => { + ty::vstore_uniq => { assert!(type_is_immediate(bcx.ccx(), vt.vec_ty)); - let llval = Load(bcx, llval); - let body = get_bodyptr(bcx, llval, vec_ty); + let body = Load(bcx, llval); (get_dataptr(bcx, body), UDiv(bcx, get_fill(bcx, body), vt.llunit_size)) } } @@ -724,7 +701,7 @@ pub fn iter_vec_uniq<'r, f: iter_vec_block<'r,'b>) -> &'b Block<'b> { let _icx = push_ctxt("tvec::iter_vec_uniq"); - let data_ptr = get_dataptr(bcx, get_bodyptr(bcx, vptr, vec_ty)); + let data_ptr = get_dataptr(bcx, vptr); iter_vec_raw(bcx, data_ptr, vec_ty, fill, f) } diff --git a/src/librustc/middle/trans/type_of.rs b/src/librustc/middle/trans/type_of.rs index 86456187d1ad1..0bc5ffd10196f 100644 --- a/src/librustc/middle/trans/type_of.rs +++ b/src/librustc/middle/trans/type_of.rs @@ -115,9 +115,7 @@ pub fn sizing_type_of(cx: &CrateContext, t: ty::t) -> Type { ty::ty_float(t) => Type::float_from_ty(t), ty::ty_str(ty::vstore_uniq) | - ty::ty_str(ty::vstore_box) | ty::ty_vec(_, ty::vstore_uniq) | - ty::ty_vec(_, ty::vstore_box) | ty::ty_box(..) | ty::ty_uniq(..) | ty::ty_ptr(..) | @@ -221,13 +219,6 @@ pub fn type_of(cx: &CrateContext, t: ty::t) -> Type { let name = llvm_type_name(cx, an_enum, did, substs.tps); adt::incomplete_type_of(cx, repr, name) } - ty::ty_str(ty::vstore_box) => { - Type::at_box(cx, Type::vec(cx.sess.targ_cfg.arch, &Type::i8())).ptr_to() - } - ty::ty_vec(ref mt, ty::vstore_box) => { - let e_ty = type_of(cx, mt.ty); - Type::at_box(cx, Type::vec(cx.sess.targ_cfg.arch, &e_ty)).ptr_to() - } ty::ty_box(typ) => { Type::at_box(cx, type_of(cx, typ)).ptr_to() } diff --git a/src/librustc/middle/trans/write_guard.rs b/src/librustc/middle/trans/write_guard.rs index 5b310feb58db5..0f4b11bde707b 100644 --- a/src/librustc/middle/trans/write_guard.rs +++ b/src/librustc/middle/trans/write_guard.rs @@ -46,10 +46,10 @@ fn root<'a, K:KindOps>(datum: &Datum, _span: Span, root_key: root_map_key, root_info: RootInfo) -> &'a Block<'a> { - //! In some cases, borrowck will decide that an @T/@[]/@str - //! value must be rooted for the program to be safe. In that - //! case, we will call this function, which will stash a copy - //! away until we exit the scope `scope_id`. + //! In some cases, borrowck will decide that an @T value must be + //! rooted for the program to be safe. In that case, we will call + //! this function, which will stash a copy away until we exit the + //! scope `scope_id`. debug!("write_guard::root(root_key={:?}, root_info={:?}, datum={:?})", root_key, root_info, datum.to_str(bcx.ccx())); @@ -62,4 +62,3 @@ fn root<'a, K:KindOps>(datum: &Datum, cleanup::AstScope(root_info.scope), (), |(), bcx, llval| datum.shallow_copy_and_take(bcx, llval)).bcx } - diff --git a/src/librustc/middle/ty.rs b/src/librustc/middle/ty.rs index 9062949b000d9..003a10eb48258 100644 --- a/src/librustc/middle/ty.rs +++ b/src/librustc/middle/ty.rs @@ -34,6 +34,7 @@ use std::cmp; use std::hashmap::{HashMap, HashSet}; use std::ops; use std::ptr::to_unsafe_ptr; +use std::rc::Rc; use std::to_bytes; use std::to_str::ToStr; use std::vec; @@ -129,7 +130,6 @@ pub struct mt { pub enum vstore { vstore_fixed(uint), vstore_uniq, - vstore_box, vstore_slice(Region) } @@ -226,10 +226,10 @@ pub enum AutoRef { /// Convert from T to &T AutoPtr(Region, ast::Mutability), - /// Convert from @[]/~[]/&[] to &[] (or str) + /// Convert from ~[]/&[] to &[] (or str) AutoBorrowVec(Region, ast::Mutability), - /// Convert from @[]/~[]/&[] to &&[] (or str) + /// Convert from ~[]/&[] to &&[] (or str) AutoBorrowVecRef(Region, ast::Mutability), /// Convert from @fn()/~fn()/|| to || @@ -291,7 +291,7 @@ pub struct ctxt_ { freevars: RefCell, tcache: type_cache, rcache: creader_cache, - short_names_cache: RefCell>, + short_names_cache: RefCell>, needs_unwind_cleanup_cache: RefCell>, tc_cache: RefCell>, ast_ty_to_ty_cache: RefCell>, @@ -870,15 +870,21 @@ pub struct RegionParameterDef { #[deriving(Clone)] pub struct Generics { /// List of type parameters declared on the item. - type_param_defs: @~[TypeParameterDef], + type_param_defs: Rc<~[TypeParameterDef]>, /// List of region parameters declared on the item. - region_param_defs: @[RegionParameterDef], + region_param_defs: Rc<~[RegionParameterDef]>, } impl Generics { pub fn has_type_params(&self) -> bool { - !self.type_param_defs.is_empty() + !self.type_param_defs.borrow().is_empty() + } + pub fn type_param_defs<'a>(&'a self) -> &'a [TypeParameterDef] { + self.type_param_defs.borrow().as_slice() + } + pub fn region_param_defs<'a>(&'a self) -> &'a [RegionParameterDef] { + self.region_param_defs.borrow().as_slice() } } @@ -1551,7 +1557,7 @@ pub fn type_is_box(ty: t) -> bool { pub fn type_is_boxed(ty: t) -> bool { match get(ty).sty { - ty_box(_) | ty_vec(_, vstore_box) | ty_str(vstore_box) => true, + ty_box(_) => true, _ => false } } @@ -1675,10 +1681,7 @@ fn type_needs_unwind_cleanup_(cx: ctxt, ty: t, } ty_uniq(_) | ty_str(vstore_uniq) | - ty_str(vstore_box) | - ty_vec(_, vstore_uniq) | - ty_vec(_, vstore_box) - => { + ty_vec(_, vstore_uniq) => { // Once we're inside a box, the annihilator will find // it and destroy it. if !encountered_box { @@ -2021,10 +2024,6 @@ pub fn type_contents(cx: ctxt, ty: t) -> TypeContents { tc_mt(cx, mt, cache).owned_pointer() } - ty_vec(mt, vstore_box) => { - tc_mt(cx, mt, cache).managed_pointer() - } - ty_vec(ref mt, vstore_slice(r)) => { tc_ty(cx, mt.ty, cache).reference( borrowed_contents(r, mt.mutbl)) @@ -2034,10 +2033,6 @@ pub fn type_contents(cx: ctxt, ty: t) -> TypeContents { tc_mt(cx, mt, cache) } - ty_str(vstore_box) => { - TC::Managed - } - ty_str(vstore_slice(r)) => { borrowed_contents(r, ast::MutImmutable) } @@ -2523,8 +2518,8 @@ pub fn type_is_pod(cx: ctxt, ty: t) -> bool { ty_type | ty_ptr(_) | ty_bare_fn(_) => result = true, // Boxed types ty_box(_) | ty_uniq(_) | ty_closure(_) | - ty_str(vstore_uniq) | ty_str(vstore_box) | - ty_vec(_, vstore_uniq) | ty_vec(_, vstore_box) | + ty_str(vstore_uniq) | + ty_vec(_, vstore_uniq) | ty_trait(_, _, _, _, _) | ty_rptr(_,_) => result = false, // Structural types ty_enum(did, ref substs) => { @@ -3105,7 +3100,7 @@ pub fn expr_has_ty_params(cx: ctxt, expr: &ast::Expr) -> bool { pub fn method_call_type_param_defs(tcx: ctxt, method_map: typeck::method_map, id: ast::NodeId) - -> Option<@~[TypeParameterDef]> { + -> Option> { let method_map = method_map.borrow(); method_map.get().find(&id).map(|method| { match method.origin { @@ -3125,12 +3120,12 @@ pub fn method_call_type_param_defs(tcx: ctxt, // method bounds, so we must preprend the tps from the // trait itself. This ought to be harmonized. let trait_type_param_defs = - lookup_trait_def(tcx, trt_id).generics.type_param_defs; - @vec::append( - (*trait_type_param_defs).clone(), - *ty::trait_method(tcx, - trt_id, - n_mth).generics.type_param_defs) + lookup_trait_def(tcx, trt_id).generics.type_param_defs(); + Rc::new(vec::append( + trait_type_param_defs.to_owned(), + ty::trait_method(tcx, + trt_id, + n_mth).generics.type_param_defs())) } } }) @@ -3296,7 +3291,6 @@ pub fn expr_kind(tcx: ctxt, ast::ExprUnary(..) | ast::ExprAddrOf(..) | ast::ExprBinary(..) | - ast::ExprVstore(_, ast::ExprVstoreBox) | ast::ExprVstore(_, ast::ExprVstoreUniq) => { RvalueDatumExpr } @@ -3344,9 +3338,10 @@ pub fn field_idx_strict(tcx: ty::ctxt, name: ast::Name, fields: &[field]) -> uint { let mut i = 0u; for f in fields.iter() { if f.ident.name == name { return i; } i += 1u; } + let string = token::get_ident(name); tcx.sess.bug(format!( "No field named `{}` found in the list of fields `{:?}`", - token::interner_get(name), + string.get(), fields.map(|f| tcx.sess.str_of(f.ident)))); } @@ -4165,7 +4160,7 @@ pub fn each_attr(tcx: ctxt, did: DefId, f: |@MetaItem| -> bool) -> bool { pub fn has_attr(tcx: ctxt, did: DefId, attr: &str) -> bool { let mut found = false; each_attr(tcx, did, |item| { - if attr == item.name() { + if item.name().equiv(&attr) { found = true; false } else { @@ -4211,7 +4206,7 @@ pub fn lookup_field_type(tcx: ctxt, Some(&ty_param_bounds_and_ty {ty, ..}) => ty, None => { let tpt = csearch::get_field_type(tcx, struct_id, id); - tcache.get().insert(id, tpt); + tcache.get().insert(id, tpt.clone()); tpt.ty } } @@ -4419,7 +4414,7 @@ pub fn normalize_ty(cx: ctxt, t: t) -> t { fn fold_vstore(&mut self, vstore: vstore) -> vstore { match vstore { - vstore_fixed(..) | vstore_uniq | vstore_box => vstore, + vstore_fixed(..) | vstore_uniq => vstore, vstore_slice(_) => vstore_slice(ReStatic) } } @@ -4834,7 +4829,7 @@ pub fn trait_method_of_method(tcx: ctxt, /// Creates a hash of the type `t` which will be the same no matter what crate /// context it's calculated within. This is used by the `type_id` intrinsic. -pub fn hash_crate_independent(tcx: ctxt, t: t, local_hash: @str) -> u64 { +pub fn hash_crate_independent(tcx: ctxt, t: t, local_hash: ~str) -> u64 { use std::hash::{SipState, Streaming}; let mut hash = SipState::new(0, 0); @@ -4856,7 +4851,6 @@ pub fn hash_crate_independent(tcx: ctxt, t: t, local_hash: @str) -> u64 { match v { vstore_fixed(_) => hash.input([0]), vstore_uniq => hash.input([1]), - vstore_box => hash.input([2]), vstore_slice(r) => { hash.input([3]); region(hash, r); @@ -4865,7 +4859,7 @@ pub fn hash_crate_independent(tcx: ctxt, t: t, local_hash: @str) -> u64 { }; let did = |hash: &mut SipState, did: DefId| { let h = if ast_util::is_local(did) { - local_hash + local_hash.clone() } else { tcx.sess.cstore.get_crate_hash(did.crate) }; diff --git a/src/librustc/middle/ty_fold.rs b/src/librustc/middle/ty_fold.rs index e322792c99680..63d6e2ae4428a 100644 --- a/src/librustc/middle/ty_fold.rs +++ b/src/librustc/middle/ty_fold.rs @@ -202,7 +202,6 @@ pub fn super_fold_vstore(this: &mut T, match vstore { ty::vstore_fixed(i) => ty::vstore_fixed(i), ty::vstore_uniq => ty::vstore_uniq, - ty::vstore_box => ty::vstore_box, ty::vstore_slice(r) => ty::vstore_slice(this.fold_region(r)), } } diff --git a/src/librustc/middle/typeck/astconv.rs b/src/librustc/middle/typeck/astconv.rs index 9f44aa1a0237e..90abdc5ac502f 100644 --- a/src/librustc/middle/typeck/astconv.rs +++ b/src/librustc/middle/typeck/astconv.rs @@ -170,7 +170,7 @@ fn ast_path_substs( // If the type is parameterized by the this region, then replace this // region with the current anon region binding (in other words, // whatever & would get replaced with). - let expected_num_region_params = decl_generics.region_param_defs.len(); + let expected_num_region_params = decl_generics.region_param_defs().len(); let supplied_num_region_params = path.segments.last().unwrap().lifetimes.len(); let regions = if expected_num_region_params == supplied_num_region_params { path.segments.last().unwrap().lifetimes.map( @@ -197,8 +197,8 @@ fn ast_path_substs( // Convert the type parameters supplied by the user. let supplied_ty_param_count = path.segments.iter().flat_map(|s| s.types.iter()).len(); - let formal_ty_param_count = decl_generics.type_param_defs.len(); - let required_ty_param_count = decl_generics.type_param_defs.iter() + let formal_ty_param_count = decl_generics.type_param_defs().len(); + let required_ty_param_count = decl_generics.type_param_defs().iter() .take_while(|x| x.default.is_none()) .len(); if supplied_ty_param_count < required_ty_param_count { @@ -228,7 +228,7 @@ fn ast_path_substs( ~"provided type arguments with defaults"); } - let defaults = decl_generics.type_param_defs.slice_from(supplied_ty_param_count) + let defaults = decl_generics.type_param_defs().slice_from(supplied_ty_param_count) .iter().map(|&x| x.default.unwrap()); let tps = path.segments.iter().flat_map(|s| s.types.iter()) .map(|&a_t| ast_ty_to_ty(this, rscope, a_t)) @@ -384,6 +384,23 @@ pub fn ast_ty_to_ty( ty::mt {ty: ast_ty_to_ty(this, rscope, mt.ty), mutbl: mt.mutbl} } + enum PointerTy { + Box, + VStore(ty::vstore) + } + impl PointerTy { + fn expect_vstore(&self, tcx: ty::ctxt, span: Span, ty: &str) -> ty::vstore { + match *self { + Box => { + tcx.sess.span_err(span, format!("managed {} are not supported", ty)); + // everything can be ~, so this is a worth substitute + ty::vstore_uniq + } + VStore(vst) => vst + } + } + } + // Handle @, ~, and & being able to mean strs and vecs. // If a_seq_ty is a str or a vec, make it a str/vec. // Also handle first-class trait types. @@ -392,17 +409,18 @@ pub fn ast_ty_to_ty( this: &AC, rscope: &RS, a_seq_ty: &ast::MutTy, - vst: ty::vstore, + ptr_ty: PointerTy, constr: |ty::mt| -> ty::t) -> ty::t { let tcx = this.tcx(); - debug!("mk_pointer(vst={:?})", vst); + debug!("mk_pointer(ptr_ty={:?})", ptr_ty); match a_seq_ty.ty.node { ast::TyVec(ty) => { + let vst = ptr_ty.expect_vstore(tcx, a_seq_ty.ty.span, "vectors"); let mut mt = ast_ty_to_mt(this, rscope, ty); if a_seq_ty.mutbl == ast::MutMutable { - mt = ty::mt { ty: mt.ty, mutbl: a_seq_ty.mutbl }; + mt.mutbl = ast::MutMutable; } debug!("&[]: vst={:?}", vst); return ty::mk_vec(tcx, mt, vst); @@ -413,20 +431,22 @@ pub fn ast_ty_to_ty( // will run after this as long as the path isn't a trait. let def_map = tcx.def_map.borrow(); match def_map.get().find(&id) { - Some(&ast::DefPrimTy(ast::TyStr)) if a_seq_ty.mutbl == ast::MutImmutable => { + Some(&ast::DefPrimTy(ast::TyStr)) if + a_seq_ty.mutbl == ast::MutImmutable => { check_path_args(tcx, path, NO_TPS | NO_REGIONS); + let vst = ptr_ty.expect_vstore(tcx, path.span, "strings"); return ty::mk_str(tcx, vst); } Some(&ast::DefTrait(trait_def_id)) => { let result = ast_path_to_trait_ref( this, rscope, trait_def_id, None, path); - let trait_store = match vst { - ty::vstore_box => ty::BoxTraitStore, - ty::vstore_uniq => ty::UniqTraitStore, - ty::vstore_slice(r) => { + let trait_store = match ptr_ty { + Box => ty::BoxTraitStore, + VStore(ty::vstore_uniq) => ty::UniqTraitStore, + VStore(ty::vstore_slice(r)) => { ty::RegionTraitStore(r) } - ty::vstore_fixed(..) => { + VStore(ty::vstore_fixed(..)) => { tcx.sess.span_err( path.span, "@trait, ~trait or &trait are the only supported \ @@ -474,12 +494,11 @@ pub fn ast_ty_to_ty( ast::TyBot => ty::mk_bot(), ast::TyBox(ty) => { let mt = ast::MutTy { ty: ty, mutbl: ast::MutImmutable }; - mk_pointer(this, rscope, &mt, ty::vstore_box, - |tmt| ty::mk_box(tcx, tmt.ty)) + mk_pointer(this, rscope, &mt, Box, |tmt| ty::mk_box(tcx, tmt.ty)) } ast::TyUniq(ty) => { let mt = ast::MutTy { ty: ty, mutbl: ast::MutImmutable }; - mk_pointer(this, rscope, &mt, ty::vstore_uniq, + mk_pointer(this, rscope, &mt, VStore(ty::vstore_uniq), |tmt| ty::mk_uniq(tcx, tmt.ty)) } ast::TyVec(ty) => { @@ -493,7 +512,7 @@ pub fn ast_ty_to_ty( ast::TyRptr(ref region, ref mt) => { let r = opt_ast_region_to_region(this, rscope, ast_ty.span, region); debug!("ty_rptr r={}", r.repr(this.tcx())); - mk_pointer(this, rscope, mt, ty::vstore_slice(r), + mk_pointer(this, rscope, mt, VStore(ty::vstore_slice(r)), |tmt| ty::mk_rptr(tcx, r, tmt)) } ast::TyTup(ref fields) => { diff --git a/src/librustc/middle/typeck/check/_match.rs b/src/librustc/middle/typeck/check/_match.rs index 9303bf80208a7..97b07186f4e09 100644 --- a/src/librustc/middle/typeck/check/_match.rs +++ b/src/librustc/middle/typeck/check/_match.rs @@ -147,7 +147,7 @@ pub fn check_pat_variant(pcx: &pat_ctxt, pat: &ast::Pat, path: &ast::Path, ty::enum_variant_with_id(tcx, enm, var); let var_tpt = ty::lookup_item_type(tcx, var); vinfo.args.map(|t| { - if var_tpt.generics.type_param_defs.len() == + if var_tpt.generics.type_param_defs().len() == expected_substs.tps.len() { ty::subst(tcx, expected_substs, *t) @@ -172,7 +172,7 @@ pub fn check_pat_variant(pcx: &pat_ctxt, pat: &ast::Pat, path: &ast::Path, None); fcx.write_error(pat.id); kind_name = "[error]"; - arg_types = (*subpats).clone() + arg_types = subpats.clone() .unwrap_or_default() .map(|_| ty::mk_err()); } @@ -221,7 +221,7 @@ pub fn check_pat_variant(pcx: &pat_ctxt, pat: &ast::Pat, path: &ast::Path, None); fcx.write_error(pat.id); kind_name = "[error]"; - arg_types = (*subpats).clone() + arg_types = subpats.clone() .unwrap_or_default() .map(|_| ty::mk_err()); } @@ -339,9 +339,11 @@ pub fn check_struct_pat_fields(pcx: &pat_ctxt, if found_fields.contains(&i) { continue; } + + let string = token::get_ident(field.name); tcx.sess.span_err(span, format!("pattern does not mention field `{}`", - token::interner_get(field.name))); + string.get())); } } } @@ -602,7 +604,7 @@ pub fn check_pat(pcx: &pat_ctxt, pat: &ast::Pat, expected: ty::t) { ty::ty_vec(mt, vstore) => { let region_var = match vstore { ty::vstore_slice(r) => r, - ty::vstore_box | ty::vstore_uniq | ty::vstore_fixed(_) => { + ty::vstore_uniq | ty::vstore_fixed(_) => { default_region_var } }; @@ -697,4 +699,3 @@ pub fn check_pointer_pat(pcx: &pat_ctxt, #[deriving(Eq)] enum PointerKind { Send, Borrowed } - diff --git a/src/librustc/middle/typeck/check/method.rs b/src/librustc/middle/typeck/check/method.rs index 398b4cca015b4..d32d51c251a86 100644 --- a/src/librustc/middle/typeck/check/method.rs +++ b/src/librustc/middle/typeck/check/method.rs @@ -555,8 +555,10 @@ impl<'a> LookupContext<'a> { return; // already visited } } + + let method_name = token::get_ident(self.m_name); debug!("push_candidates_from_impl: {} {} {}", - token::interner_get(self.m_name), + method_name.get(), impl_info.ident.repr(self.tcx()), impl_info.methods.map(|m| m.ident).repr(self.tcx())); @@ -697,7 +699,6 @@ impl<'a> LookupContext<'a> { let tcx = self.tcx(); let sty = ty::get(self_ty).sty.clone(); match sty { - ty_vec(mt, vstore_box) | ty_vec(mt, vstore_uniq) | ty_vec(mt, vstore_slice(_)) | // NDM(#3148) ty_vec(mt, vstore_fixed(_)) => { @@ -726,7 +727,6 @@ impl<'a> LookupContext<'a> { }) } - ty_str(vstore_box) | ty_str(vstore_uniq) | ty_str(vstore_fixed(_)) => { let entry = self.search_for_some_kind_of_autorefd_method( @@ -952,7 +952,7 @@ impl<'a> LookupContext<'a> { // If they were not explicitly supplied, just construct fresh // type variables. let num_supplied_tps = self.supplied_tps.len(); - let num_method_tps = candidate.method_ty.generics.type_param_defs.len(); + let num_method_tps = candidate.method_ty.generics.type_param_defs().len(); let m_substs = { if num_supplied_tps == 0u { self.fcx.infcx().next_ty_vars(num_method_tps) diff --git a/src/librustc/middle/typeck/check/mod.rs b/src/librustc/middle/typeck/check/mod.rs index 7e8fa4e66713c..43179aa3c928a 100644 --- a/src/librustc/middle/typeck/check/mod.rs +++ b/src/librustc/middle/typeck/check/mod.rs @@ -564,7 +564,7 @@ pub fn check_item(ccx: @CrateCtxt, it: &ast::Item) { let param_env = ty::construct_parameter_environment( ccx.tcx, None, - *fn_tpt.generics.type_param_defs, + fn_tpt.generics.type_param_defs(), [], [], body.id); @@ -674,9 +674,9 @@ fn check_method_body(ccx: @CrateCtxt, ty::construct_parameter_environment( ccx.tcx, self_bound, - *item_generics.type_param_defs, - *method_generics.type_param_defs, - item_generics.region_param_defs, + item_generics.type_param_defs(), + method_generics.type_param_defs(), + item_generics.region_param_defs(), method.body.id); // Compute the fty from point of view of inside fn @@ -776,7 +776,7 @@ fn compare_impl_method(tcx: ty::ctxt, debug!("compare_impl_method()"); let infcx = infer::new_infer_ctxt(tcx); - let impl_tps = impl_generics.type_param_defs.len(); + let impl_tps = impl_generics.type_param_defs().len(); // Try to give more informative error messages about self typing // mismatches. Note that any mismatch will also be detected @@ -812,8 +812,8 @@ fn compare_impl_method(tcx: ty::ctxt, } } - let num_impl_m_type_params = impl_m.generics.type_param_defs.len(); - let num_trait_m_type_params = trait_m.generics.type_param_defs.len(); + let num_impl_m_type_params = impl_m.generics.type_param_defs().len(); + let num_trait_m_type_params = trait_m.generics.type_param_defs().len(); if num_impl_m_type_params != num_trait_m_type_params { tcx.sess.span_err( impl_m_span, @@ -838,10 +838,10 @@ fn compare_impl_method(tcx: ty::ctxt, return; } - for (i, trait_param_def) in trait_m.generics.type_param_defs.iter().enumerate() { - // For each of the corresponding impl ty param's bounds... - let impl_param_def = &impl_m.generics.type_param_defs[i]; + let it = trait_m.generics.type_param_defs().iter() + .zip(impl_m.generics.type_param_defs().iter()); + for (i, (trait_param_def, impl_param_def)) in it.enumerate() { // Check that the impl does not require any builtin-bounds // that the trait does not guarantee: let extra_bounds = @@ -886,15 +886,15 @@ fn compare_impl_method(tcx: ty::ctxt, // impl type is "&'a str", then this would replace the self // type with a free region `self`. let dummy_impl_tps: ~[ty::t] = - impl_generics.type_param_defs.iter().enumerate(). + impl_generics.type_param_defs().iter().enumerate(). map(|(i,t)| ty::mk_param(tcx, i, t.def_id)). collect(); let dummy_method_tps: ~[ty::t] = - impl_m.generics.type_param_defs.iter().enumerate(). + impl_m.generics.type_param_defs().iter().enumerate(). map(|(i,t)| ty::mk_param(tcx, i + impl_tps, t.def_id)). collect(); let dummy_impl_regions: OptVec = - impl_generics.region_param_defs.iter(). + impl_generics.region_param_defs().iter(). map(|l| ty::ReFree(ty::FreeRegion { scope_id: impl_m_body_id, bound_region: ty::BrNamed(l.def_id, l.ident)})). @@ -1374,8 +1374,8 @@ pub fn impl_self_ty(vcx: &VtableContext, let (n_tps, n_rps, raw_ty) = { let ity = ty::lookup_item_type(tcx, did); - (ity.generics.type_param_defs.len(), - ity.generics.region_param_defs.len(), + (ity.generics.type_param_defs().len(), + ity.generics.region_param_defs().len(), ity.ty) }; @@ -1419,10 +1419,10 @@ fn generics_of_static_method_container(type_context: ty::ctxt, -> ty::Generics { match provenance { ast::FromTrait(trait_def_id) => { - ty::lookup_trait_def(type_context, trait_def_id).generics + ty::lookup_trait_def(type_context, trait_def_id).generics.clone() } ast::FromImpl(impl_def_id) => { - ty::lookup_item_type(type_context, impl_def_id).generics + ty::lookup_item_type(type_context, impl_def_id).generics.clone() } } } @@ -1485,7 +1485,7 @@ fn check_type_parameter_positions_in_path(function_context: @FnCtxt, // Make sure lifetime parameterization agrees with the trait or // implementation type. - let trait_region_parameter_count = generics.region_param_defs.len(); + let trait_region_parameter_count = generics.region_param_defs().len(); let supplied_region_parameter_count = trait_segment.lifetimes.len(); if trait_region_parameter_count != supplied_region_parameter_count && supplied_region_parameter_count != 0 { @@ -1501,8 +1501,8 @@ fn check_type_parameter_positions_in_path(function_context: @FnCtxt, // Make sure the number of type parameters supplied on the trait // or implementation segment equals the number of type parameters // on the trait or implementation definition. - let formal_ty_param_count = generics.type_param_defs.len(); - let required_ty_param_count = generics.type_param_defs.iter() + let formal_ty_param_count = generics.type_param_defs().len(); + let required_ty_param_count = generics.type_param_defs().iter() .take_while(|x| x.default.is_none()) .len(); let supplied_ty_param_count = trait_segment.types.len(); @@ -1517,7 +1517,7 @@ fn check_type_parameter_positions_in_path(function_context: @FnCtxt, } else { "s" }; - let needs = if required_ty_param_count < generics.type_param_defs.len() { + let needs = if required_ty_param_count < generics.type_param_defs().len() { "needs at least" } else { "needs" @@ -1539,7 +1539,7 @@ fn check_type_parameter_positions_in_path(function_context: @FnCtxt, } else { "s" }; - let needs = if required_ty_param_count < generics.type_param_defs.len() { + let needs = if required_ty_param_count < generics.type_param_defs().len() { "needs at most" } else { "needs" @@ -2335,9 +2335,11 @@ pub fn check_expr_with_unifier(fcx: @FnCtxt, fcx.type_error_message( expr.span, |actual| { + let string = token::get_ident(field); format!("attempted to take value of method `{}` on type `{}` \ - (try writing an anonymous function)", - token::interner_get(field), actual) + (try writing an anonymous function)", + string.get(), + actual) }, expr_t, None); } @@ -2346,9 +2348,11 @@ pub fn check_expr_with_unifier(fcx: @FnCtxt, fcx.type_error_message( expr.span, |actual| { + let string = token::get_ident(field); format!("attempted access of field `{}` on type `{}`, \ - but no field with that name was found", - token::interner_get(field), actual) + but no field with that name was found", + string.get(), + actual) }, expr_t, None); } @@ -2428,8 +2432,8 @@ pub fn check_expr_with_unifier(fcx: @FnCtxt, let name = class_field.name; let (_, seen) = *class_field_map.get(&name); if !seen { - missing_fields.push( - ~"`" + token::interner_get(name) + "`"); + let string = token::get_ident(name); + missing_fields.push(~"`" + string.get() + "`"); } } @@ -2461,8 +2465,8 @@ pub fn check_expr_with_unifier(fcx: @FnCtxt, // Look up the number of type parameters and the raw type, and // determine whether the class is region-parameterized. let item_type = ty::lookup_item_type(tcx, class_id); - let type_parameter_count = item_type.generics.type_param_defs.len(); - let region_parameter_count = item_type.generics.region_param_defs.len(); + let type_parameter_count = item_type.generics.type_param_defs().len(); + let region_parameter_count = item_type.generics.region_param_defs().len(); let raw_type = item_type.ty; // Generate the struct type. @@ -2519,8 +2523,8 @@ pub fn check_expr_with_unifier(fcx: @FnCtxt, // Look up the number of type parameters and the raw type, and // determine whether the enum is region-parameterized. let item_type = ty::lookup_item_type(tcx, enum_id); - let type_parameter_count = item_type.generics.type_param_defs.len(); - let region_parameter_count = item_type.generics.region_param_defs.len(); + let type_parameter_count = item_type.generics.type_param_defs().len(); + let region_parameter_count = item_type.generics.region_param_defs().len(); let raw_type = item_type.ty; // Generate the enum type. @@ -3706,8 +3710,8 @@ pub fn instantiate_path(fcx: @FnCtxt, node_id: ast::NodeId) { debug!(">>> instantiate_path"); - let ty_param_count = tpt.generics.type_param_defs.len(); - let ty_param_req = tpt.generics.type_param_defs.iter() + let ty_param_count = tpt.generics.type_param_defs().len(); + let ty_param_req = tpt.generics.type_param_defs().iter() .take_while(|x| x.default.is_none()) .len(); let mut ty_substs_len = 0; @@ -3722,7 +3726,7 @@ pub fn instantiate_path(fcx: @FnCtxt, // determine the region parameters, using the value given by the user // (if any) and otherwise using a fresh region variable - let num_expected_regions = tpt.generics.region_param_defs.len(); + let num_expected_regions = tpt.generics.region_param_defs().len(); let num_supplied_regions = pth.segments.last().unwrap().lifetimes.len(); let regions = if num_expected_regions == num_supplied_regions { pth.segments.last().unwrap().lifetimes.map( @@ -3751,7 +3755,7 @@ pub fn instantiate_path(fcx: @FnCtxt, ast::DefStaticMethod(_, provenance @ ast::FromTrait(_), _) => { let generics = generics_of_static_method_container(fcx.ccx.tcx, provenance); - (ty_param_count - 1, ty_param_req - 1, Some(generics.type_param_defs.len())) + (ty_param_count - 1, ty_param_req - 1, Some(generics.type_param_defs().len())) } _ => (ty_param_count, ty_param_req, None), }; @@ -3796,7 +3800,7 @@ pub fn instantiate_path(fcx: @FnCtxt, // at the appropriate position. let mut result = ~[]; let mut pushed = false; - let defaults = tpt.generics.type_param_defs.iter() + let defaults = tpt.generics.type_param_defs().iter() .enumerate().filter_map(|(i, x)| { match self_parameter_index { Some(index) if index == i => None, @@ -3905,7 +3909,6 @@ pub fn ast_expr_vstore_to_vstore(fcx: @FnCtxt, -> ty::vstore { match v { ast::ExprVstoreUniq => ty::vstore_uniq, - ast::ExprVstoreBox => ty::vstore_box, ast::ExprVstoreSlice | ast::ExprVstoreMutSlice => { match e.node { ast::ExprLit(..) | @@ -4301,7 +4304,7 @@ pub fn check_intrinsic_type(ccx: @CrateCtxt, it: &ast::ForeignItem) { variadic: false} }); let i_ty = ty::lookup_item_type(ccx.tcx, local_def(it.id)); - let i_n_tps = i_ty.generics.type_param_defs.len(); + let i_n_tps = i_ty.generics.type_param_defs().len(); if i_n_tps != n_tps { tcx.sess.span_err(it.span, format!("intrinsic has wrong number \ of type parameters: found {}, \ diff --git a/src/librustc/middle/typeck/check/regionck.rs b/src/librustc/middle/typeck/check/regionck.rs index f134fb3b6ae85..6d46cd38087c7 100644 --- a/src/librustc/middle/typeck/check/regionck.rs +++ b/src/librustc/middle/typeck/check/regionck.rs @@ -1215,9 +1215,7 @@ pub mod guarantor { } ty::ty_box(..) | ty::ty_ptr(..) | - ty::ty_vec(_, ty::vstore_box) | - ty::ty_trait(_, _, ty::BoxTraitStore, _, _) | - ty::ty_str(ty::vstore_box) => { + ty::ty_trait(_, _, ty::BoxTraitStore, _, _) => { OtherPointer } ty::ty_closure(ref closure_ty) => { @@ -1301,7 +1299,6 @@ pub mod guarantor { let guarantor1 = match vstore { ty::vstore_fixed(_) | ty::vstore_uniq => guarantor, ty::vstore_slice(r) => Some(r), - ty::vstore_box => None }; link_ref_bindings_in_pats(rcx, before, guarantor1); diff --git a/src/librustc/middle/typeck/check/vtable.rs b/src/librustc/middle/typeck/check/vtable.rs index 0a231be5d459b..4db43b2c91370 100644 --- a/src/librustc/middle/typeck/check/vtable.rs +++ b/src/librustc/middle/typeck/check/vtable.rs @@ -423,7 +423,7 @@ fn search_for_vtable(vcx: &VtableContext, let im_generics = ty::lookup_item_type(tcx, im.did).generics; let subres = lookup_vtables(vcx, location_info, - *im_generics.type_param_defs, &substs, + im_generics.type_param_defs(), &substs, is_early); @@ -688,12 +688,12 @@ pub fn early_resolve_expr(ex: &ast::Expr, fcx: @FnCtxt, is_early: bool) { let item_ty = ty::lookup_item_type(cx.tcx, did); debug!("early resolve expr: def {:?} {:?}, {:?}, {}", ex.id, did, def, fcx.infcx().ty_to_str(item_ty.ty)); - if has_trait_bounds(*item_ty.generics.type_param_defs) { + if has_trait_bounds(item_ty.generics.type_param_defs()) { debug!("early_resolve_expr: looking up vtables for type params {}", - item_ty.generics.type_param_defs.repr(fcx.tcx())); + item_ty.generics.type_param_defs().repr(fcx.tcx())); let vcx = fcx.vtable_context(); let vtbls = lookup_vtables(&vcx, &location_info_for_expr(ex), - *item_ty.generics.type_param_defs, + item_ty.generics.type_param_defs(), substs, is_early); if !is_early { insert_vtables(fcx, ex.id, vtbls); @@ -717,11 +717,11 @@ pub fn early_resolve_expr(ex: &ast::Expr, fcx: @FnCtxt, is_early: bool) { Some(type_param_defs) => { debug!("vtable resolution on parameter bounds for method call {}", ex.repr(fcx.tcx())); - if has_trait_bounds(*type_param_defs) { + if has_trait_bounds(*type_param_defs.borrow()) { let substs = fcx.node_ty_substs(callee_id); let vcx = fcx.vtable_context(); let vtbls = lookup_vtables(&vcx, &location_info_for_expr(ex), - *type_param_defs, &substs, is_early); + *type_param_defs.borrow(), &substs, is_early); if !is_early { insert_vtables(fcx, callee_id, vtbls); } @@ -784,9 +784,9 @@ pub fn resolve_impl(ccx: @CrateCtxt, let param_env = ty::construct_parameter_environment( ccx.tcx, None, - *impl_generics.type_param_defs, + impl_generics.type_param_defs(), [], - impl_generics.region_param_defs, + impl_generics.region_param_defs(), impl_item.id); let impl_trait_ref = @impl_trait_ref.subst(ccx.tcx, ¶m_env.free_substs); @@ -800,7 +800,7 @@ pub fn resolve_impl(ccx: @CrateCtxt, let trait_def = ty::lookup_trait_def(ccx.tcx, impl_trait_ref.def_id); let vtbls = lookup_vtables(&vcx, &loc_info, - *trait_def.generics.type_param_defs, + trait_def.generics.type_param_defs(), &impl_trait_ref.substs, false); diff --git a/src/librustc/middle/typeck/coherence.rs b/src/librustc/middle/typeck/coherence.rs index 0dcb3c7329ba5..5ca879fea4a67 100644 --- a/src/librustc/middle/typeck/coherence.rs +++ b/src/librustc/middle/typeck/coherence.rs @@ -47,13 +47,13 @@ use syntax::visit; use std::cell::RefCell; use std::hashmap::HashSet; -use std::result::Ok; +use std::rc::Rc; use std::vec; pub struct UniversalQuantificationResult { monotype: t, type_variables: ~[ty::t], - type_param_defs: @~[ty::TypeParameterDef] + type_param_defs: Rc<~[ty::TypeParameterDef]> } pub fn get_base_type(inference_context: @InferCtxt, @@ -356,11 +356,11 @@ impl CoherenceChecker { // construct the polytype for the method based on the method_ty let new_generics = ty::Generics { type_param_defs: - @vec::append( - (*impl_poly_type.generics.type_param_defs).clone(), - *new_method_ty.generics.type_param_defs), + Rc::new(vec::append( + impl_poly_type.generics.type_param_defs().to_owned(), + new_method_ty.generics.type_param_defs())), region_param_defs: - impl_poly_type.generics.region_param_defs + impl_poly_type.generics.region_param_defs.clone() }; let new_polytype = ty::ty_param_bounds_and_ty { generics: new_generics, @@ -449,7 +449,7 @@ impl CoherenceChecker { let polytype_b = self.get_self_type_for_implementation( implementation_b); - if self.polytypes_unify(polytype_a, polytype_b) { + if self.polytypes_unify(polytype_a.clone(), polytype_b) { let session = self.crate_context.tcx.sess; session.span_err( self.span_of_impl(implementation_b), @@ -497,13 +497,13 @@ impl CoherenceChecker { pub fn universally_quantify_polytype(&self, polytype: ty_param_bounds_and_ty) -> UniversalQuantificationResult { - let region_parameter_count = polytype.generics.region_param_defs.len(); + let region_parameter_count = polytype.generics.region_param_defs().len(); let region_parameters = self.inference_context.next_region_vars( infer::BoundRegionInCoherence, region_parameter_count); - let bounds_count = polytype.generics.type_param_defs.len(); + let bounds_count = polytype.generics.type_param_defs().len(); let type_parameters = self.inference_context.next_ty_vars(bounds_count); let substitutions = substs { @@ -518,7 +518,7 @@ impl CoherenceChecker { UniversalQuantificationResult { monotype: monotype, type_variables: substitutions.tps, - type_param_defs: polytype.generics.type_param_defs + type_param_defs: polytype.generics.type_param_defs.clone() } } @@ -770,7 +770,7 @@ pub fn make_substs_for_receiver_types(tcx: ty::ctxt, // determine how many type parameters were declared on the impl let num_impl_type_parameters = { let impl_polytype = ty::lookup_item_type(tcx, impl_id); - impl_polytype.generics.type_param_defs.len() + impl_polytype.generics.type_param_defs().len() }; // determine how many type parameters appear on the trait @@ -778,7 +778,7 @@ pub fn make_substs_for_receiver_types(tcx: ty::ctxt, // the current method type has the type parameters from the trait + method let num_method_type_parameters = - num_trait_type_parameters + method.generics.type_param_defs.len(); + num_trait_type_parameters + method.generics.type_param_defs().len(); // the new method type will have the type parameters from the impl + method let combined_tps = vec::from_fn(num_method_type_parameters, |i| { @@ -789,7 +789,7 @@ pub fn make_substs_for_receiver_types(tcx: ty::ctxt, // replace type parameters that belong to method with another // type parameter, this time with the index adjusted let method_index = i - num_trait_type_parameters; - let type_param_def = &method.generics.type_param_defs[method_index]; + let type_param_def = &method.generics.type_param_defs()[method_index]; let new_index = num_impl_type_parameters + method_index; ty::mk_param(tcx, new_index, type_param_def.def_id) } diff --git a/src/librustc/middle/typeck/collect.rs b/src/librustc/middle/typeck/collect.rs index d9a9d9f8fe123..c1a6e53069efe 100644 --- a/src/librustc/middle/typeck/collect.rs +++ b/src/librustc/middle/typeck/collect.rs @@ -44,6 +44,7 @@ use middle::typeck::{CrateCtxt, lookup_def_tcx, no_params, write_ty_to_tcx}; use util::ppaux; use util::ppaux::Repr; +use std::rc::Rc; use std::vec; use syntax::abi::AbiSet; use syntax::ast::{RegionTyParamBound, TraitTyParamBound}; @@ -285,9 +286,9 @@ pub fn ensure_trait_methods(ccx: &CrateCtxt, trait_id: ast::NodeId) { let dummy_defid = ast::DefId {crate: 0, node: 0}; // Represents [A',B',C'] - let num_trait_bounds = trait_ty_generics.type_param_defs.len(); + let num_trait_bounds = trait_ty_generics.type_param_defs().len(); let non_shifted_trait_tps = vec::from_fn(num_trait_bounds, |i| { - ty::mk_param(tcx, i, trait_ty_generics.type_param_defs[i].def_id) + ty::mk_param(tcx, i, trait_ty_generics.type_param_defs()[i].def_id) }); // Represents [D'] @@ -295,18 +296,18 @@ pub fn ensure_trait_methods(ccx: &CrateCtxt, trait_id: ast::NodeId) { dummy_defid); // Represents [E',F',G'] - let num_method_bounds = m.generics.type_param_defs.len(); + let num_method_bounds = m.generics.type_param_defs().len(); let shifted_method_tps = vec::from_fn(num_method_bounds, |i| { ty::mk_param(tcx, i + num_trait_bounds + 1, - m.generics.type_param_defs[i].def_id) + m.generics.type_param_defs()[i].def_id) }); // Convert the regions 'a, 'b, 'c defined on the trait into // bound regions on the fn. Note that because these appear in the // bound for `Self` they must be early bound. - let new_early_region_param_defs = trait_ty_generics.region_param_defs; + let new_early_region_param_defs = trait_ty_generics.region_param_defs.clone(); let rps_from_trait = - trait_ty_generics.region_param_defs.iter(). + trait_ty_generics.region_param_defs().iter(). enumerate(). map(|(index,d)| ty::ReEarlyBound(d.def_id.node, index, d.ident)). collect(); @@ -334,7 +335,7 @@ pub fn ensure_trait_methods(ccx: &CrateCtxt, trait_id: ast::NodeId) { let mut new_type_param_defs = ~[]; let substd_type_param_defs = trait_ty_generics.type_param_defs.subst(tcx, &substs); - new_type_param_defs.push_all(*substd_type_param_defs); + new_type_param_defs.push_all(*substd_type_param_defs.borrow()); // add in the "self" type parameter let self_trait_def = get_trait_def(ccx, local_def(trait_id)); @@ -351,7 +352,7 @@ pub fn ensure_trait_methods(ccx: &CrateCtxt, trait_id: ast::NodeId) { // add in the type parameters from the method let substd_type_param_defs = m.generics.type_param_defs.subst(tcx, &substs); - new_type_param_defs.push_all(*substd_type_param_defs); + new_type_param_defs.push_all(*substd_type_param_defs.borrow()); debug!("static method {} type_param_defs={} ty={}, substs={}", m.def_id.repr(tcx), @@ -363,7 +364,7 @@ pub fn ensure_trait_methods(ccx: &CrateCtxt, trait_id: ast::NodeId) { tcache.get().insert(m.def_id, ty_param_bounds_and_ty { generics: ty::Generics { - type_param_defs: @new_type_param_defs, + type_param_defs: Rc::new(new_type_param_defs), region_param_defs: new_early_region_param_defs }, ty: ty @@ -383,7 +384,7 @@ pub fn ensure_trait_methods(ccx: &CrateCtxt, trait_id: ast::NodeId) { let trait_self_ty = ty::mk_self(this.tcx, local_def(trait_id)); let fty = astconv::ty_of_method(this, *m_id, *m_purity, trait_self_ty, *m_explicit_self, m_decl); - let num_trait_type_params = trait_generics.type_param_defs.len(); + let num_trait_type_params = trait_generics.type_param_defs().len(); ty::Method::new( *m_ident, // FIXME(#5121) -- distinguish early vs late lifetime params @@ -466,7 +467,7 @@ fn convert_methods(ccx: &CrateCtxt, { let tcx = ccx.tcx; for m in ms.iter() { - let num_rcvr_ty_params = rcvr_ty_generics.type_param_defs.len(); + let num_rcvr_ty_params = rcvr_ty_generics.type_param_defs().len(); let m_ty_generics = ty_generics(ccx, &m.generics, num_rcvr_ty_params); let mty = @ty_of_method(ccx, container, @@ -489,10 +490,10 @@ fn convert_methods(ccx: &CrateCtxt, // itself ty_param_bounds_and_ty { generics: ty::Generics { - type_param_defs: @vec::append( - (*rcvr_ty_generics.type_param_defs).clone(), - *m_ty_generics.type_param_defs), - region_param_defs: rcvr_ty_generics.region_param_defs, + type_param_defs: Rc::new(vec::append( + rcvr_ty_generics.type_param_defs().to_owned(), + m_ty_generics.type_param_defs())), + region_param_defs: rcvr_ty_generics.region_param_defs.clone(), }, ty: fty }); @@ -574,7 +575,7 @@ pub fn convert(ccx: &CrateCtxt, it: &ast::Item) { let mut tcache = tcx.tcache.borrow_mut(); tcache.get().insert(local_def(it.id), ty_param_bounds_and_ty { - generics: i_ty_generics, + generics: i_ty_generics.clone(), ty: selfty}); } @@ -637,7 +638,7 @@ pub fn convert(ccx: &CrateCtxt, it: &ast::Item) { { let mut tcache = tcx.tcache.borrow_mut(); - tcache.get().insert(local_def(it.id), tpt); + tcache.get().insert(local_def(it.id), tpt.clone()); } convert_struct(ccx, struct_def, tpt, it.id); @@ -816,7 +817,7 @@ pub fn ty_of_item(ccx: &CrateCtxt, it: &ast::Item) { let tcache = tcx.tcache.borrow(); match tcache.get().find(&def_id) { - Some(&tpt) => return tpt, + Some(tpt) => return tpt.clone(), _ => {} } } @@ -826,7 +827,7 @@ pub fn ty_of_item(ccx: &CrateCtxt, it: &ast::Item) let tpt = no_params(typ); let mut tcache = tcx.tcache.borrow_mut(); - tcache.get().insert(local_def(it.id), tpt); + tcache.get().insert(local_def(it.id), tpt.clone()); return tpt; } ast::ItemFn(decl, purity, abi, ref generics, _) => { @@ -838,8 +839,8 @@ pub fn ty_of_item(ccx: &CrateCtxt, it: &ast::Item) decl); let tpt = ty_param_bounds_and_ty { generics: ty::Generics { - type_param_defs: ty_generics.type_param_defs, - region_param_defs: @[], + type_param_defs: ty_generics.type_param_defs.clone(), + region_param_defs: Rc::new(~[]), }, ty: ty::mk_bare_fn(ccx.tcx, tofd) }; @@ -849,14 +850,14 @@ pub fn ty_of_item(ccx: &CrateCtxt, it: &ast::Item) ppaux::ty_to_str(tcx, tpt.ty)); let mut tcache = ccx.tcx.tcache.borrow_mut(); - tcache.get().insert(local_def(it.id), tpt); + tcache.get().insert(local_def(it.id), tpt.clone()); return tpt; } ast::ItemTy(t, ref generics) => { { let mut tcache = tcx.tcache.borrow_mut(); match tcache.get().find(&local_def(it.id)) { - Some(&tpt) => return tpt, + Some(tpt) => return tpt.clone(), None => { } } } @@ -870,7 +871,7 @@ pub fn ty_of_item(ccx: &CrateCtxt, it: &ast::Item) }; let mut tcache = tcx.tcache.borrow_mut(); - tcache.get().insert(local_def(it.id), tpt); + tcache.get().insert(local_def(it.id), tpt.clone()); return tpt; } ast::ItemEnum(_, ref generics) => { @@ -884,7 +885,7 @@ pub fn ty_of_item(ccx: &CrateCtxt, it: &ast::Item) }; let mut tcache = tcx.tcache.borrow_mut(); - tcache.get().insert(local_def(it.id), tpt); + tcache.get().insert(local_def(it.id), tpt.clone()); return tpt; } ast::ItemTrait(..) => { @@ -902,7 +903,7 @@ pub fn ty_of_item(ccx: &CrateCtxt, it: &ast::Item) }; let mut tcache = tcx.tcache.borrow_mut(); - tcache.get().insert(local_def(it.id), tpt); + tcache.get().insert(local_def(it.id), tpt.clone()); return tpt; } ast::ItemImpl(..) | ast::ItemMod(_) | @@ -925,8 +926,8 @@ pub fn ty_of_foreign_item(ccx: &CrateCtxt, ast::ForeignItemStatic(t, _) => { ty::ty_param_bounds_and_ty { generics: ty::Generics { - type_param_defs: @~[], - region_param_defs: @[], + type_param_defs: Rc::new(~[]), + region_param_defs: Rc::new(~[]), }, ty: ast_ty_to_ty(ccx, &ExplicitRscope, t) } @@ -938,11 +939,11 @@ pub fn ty_generics(ccx: &CrateCtxt, generics: &ast::Generics, base_index: uint) -> ty::Generics { return ty::Generics { - region_param_defs: generics.lifetimes.iter().map(|l| { + region_param_defs: Rc::new(generics.lifetimes.iter().map(|l| { ty::RegionParameterDef { ident: l.ident, def_id: local_def(l.id) } - }).collect(), - type_param_defs: @generics.ty_params.mapi_to_vec(|offset, param| { + }).collect()), + type_param_defs: Rc::new(generics.ty_params.mapi_to_vec(|offset, param| { let existing_def_opt = { let ty_param_defs = ccx.tcx.ty_param_defs.borrow(); ty_param_defs.get().find(¶m.id).map(|def| *def) @@ -969,7 +970,7 @@ pub fn ty_generics(ccx: &CrateCtxt, def } } - }) + })) }; fn compute_bounds( @@ -1040,7 +1041,7 @@ pub fn ty_of_foreign_fn_decl(ccx: &CrateCtxt, }; let mut tcache = ccx.tcx.tcache.borrow_mut(); - tcache.get().insert(def_id, tpt); + tcache.get().insert(def_id, tpt.clone()); return tpt; } @@ -1049,11 +1050,11 @@ pub fn mk_item_substs(ccx: &CrateCtxt, self_ty: Option) -> ty::substs { let params: ~[ty::t] = - ty_generics.type_param_defs.iter().enumerate().map( + ty_generics.type_param_defs().iter().enumerate().map( |(i, t)| ty::mk_param(ccx.tcx, i, t.def_id)).collect(); let regions: OptVec = - ty_generics.region_param_defs.iter().enumerate().map( + ty_generics.region_param_defs().iter().enumerate().map( |(i, l)| ty::ReEarlyBound(l.def_id.node, i, l.ident)).collect(); substs {regions: ty::NonerasedRegions(regions), diff --git a/src/librustc/middle/typeck/infer/coercion.rs b/src/librustc/middle/typeck/infer/coercion.rs index 6a4de959c2bb9..adf36cf973bac 100644 --- a/src/librustc/middle/typeck/infer/coercion.rs +++ b/src/librustc/middle/typeck/infer/coercion.rs @@ -67,7 +67,7 @@ we may want to adjust precisely when coercions occur. use middle::ty::{AutoPtr, AutoBorrowVec, AutoBorrowFn, AutoBorrowObj}; use middle::ty::{AutoDerefRef}; -use middle::ty::{vstore_slice, vstore_box, vstore_uniq}; +use middle::ty::{vstore_slice, vstore_uniq}; use middle::ty::{mt}; use middle::ty; use middle::typeck::infer::{CoerceResult, resolve_type, Coercion}; @@ -272,7 +272,6 @@ impl Coerce { b.inf_str(self.get_ref().infcx)); match *sty_a { - ty::ty_str(vstore_box) | ty::ty_str(vstore_uniq) => {} _ => { return self.subtype(a, b); diff --git a/src/librustc/middle/typeck/mod.rs b/src/librustc/middle/typeck/mod.rs index 8837a978f439a..a6fc91899f067 100644 --- a/src/librustc/middle/typeck/mod.rs +++ b/src/librustc/middle/typeck/mod.rs @@ -70,6 +70,7 @@ use util::ppaux; use std::cell::RefCell; use std::hashmap::HashMap; +use std::rc::Rc; use std::result; use extra::list::List; use extra::list; @@ -271,8 +272,8 @@ pub fn lookup_def_ccx(ccx: &CrateCtxt, sp: Span, id: ast::NodeId) pub fn no_params(t: ty::t) -> ty::ty_param_bounds_and_ty { ty::ty_param_bounds_and_ty { - generics: ty::Generics {type_param_defs: @~[], - region_param_defs: @[]}, + generics: ty::Generics {type_param_defs: Rc::new(~[]), + region_param_defs: Rc::new(~[])}, ty: t } } diff --git a/src/librustc/middle/typeck/variance.rs b/src/librustc/middle/typeck/variance.rs index 83be5fd2a305a..51364addfed23 100644 --- a/src/librustc/middle/typeck/variance.rs +++ b/src/librustc/middle/typeck/variance.rs @@ -736,7 +736,7 @@ impl<'a> ConstraintContext<'a> { self.add_constraints_from_region(r, contra); } - ty::vstore_fixed(_) | ty::vstore_uniq | ty::vstore_box => { + ty::vstore_fixed(_) | ty::vstore_uniq => { } } } @@ -750,7 +750,7 @@ impl<'a> ConstraintContext<'a> { variance: VarianceTermPtr<'a>) { debug!("add_constraints_from_substs(def_id={:?})", def_id); - for (i, p) in generics.type_param_defs.iter().enumerate() { + for (i, p) in generics.type_param_defs().iter().enumerate() { let variance_decl = self.declared_variance(p.def_id, def_id, TypeParam, i); let variance_i = self.xform(variance, variance_decl); @@ -760,7 +760,7 @@ impl<'a> ConstraintContext<'a> { match substs.regions { ty::ErasedRegions => {} ty::NonerasedRegions(ref rps) => { - for (i, p) in generics.region_param_defs.iter().enumerate() { + for (i, p) in generics.region_param_defs().iter().enumerate() { let variance_decl = self.declared_variance(p.def_id, def_id, RegionParam, i); let variance_i = self.xform(variance, variance_decl); diff --git a/src/librustc/util/ppaux.rs b/src/librustc/util/ppaux.rs index f391239df3304..eba99c7fb5a02 100644 --- a/src/librustc/util/ppaux.rs +++ b/src/librustc/util/ppaux.rs @@ -260,7 +260,6 @@ pub fn vstore_to_str(cx: ctxt, vs: ty::vstore) -> ~str { match vs { ty::vstore_fixed(n) => format!("{}", n), ty::vstore_uniq => ~"~", - ty::vstore_box => ~"@", ty::vstore_slice(r) => region_ptr_to_str(cx, r) } } @@ -522,11 +521,11 @@ pub fn parameterized(cx: ctxt, } let generics = if is_trait { - ty::lookup_trait_def(cx, did).generics + ty::lookup_trait_def(cx, did).generics.clone() } else { ty::lookup_item_type(cx, did).generics }; - let ty_params = generics.type_param_defs.iter(); + let ty_params = generics.type_param_defs().iter(); let num_defaults = ty_params.zip(tps.iter()).rev().take_while(|&(def, &actual)| { match def.default { Some(default) => default == actual, @@ -789,8 +788,8 @@ impl Repr for ty::ty_param_bounds_and_ty { impl Repr for ty::Generics { fn repr(&self, tcx: ctxt) -> ~str { format!("Generics(type_param_defs: {}, region_param_defs: {})", - self.type_param_defs.repr(tcx), - self.region_param_defs.repr(tcx)) + self.type_param_defs().repr(tcx), + self.region_param_defs().repr(tcx)) } } @@ -824,7 +823,8 @@ impl Repr for ty::Method { impl Repr for ast::Ident { fn repr(&self, _tcx: ctxt) -> ~str { - token::ident_to_str(self).to_owned() + let string = token::get_ident(self.name); + string.get().to_str() } } diff --git a/src/librustdoc/clean.rs b/src/librustdoc/clean.rs index e86122fb7d1e6..8220be1046166 100644 --- a/src/librustdoc/clean.rs +++ b/src/librustdoc/clean.rs @@ -11,8 +11,6 @@ //! This module contains the "cleaned" pieces of the AST, and the functions //! that clean them. -use its = syntax::parse::token::ident_to_str; - use syntax; use syntax::ast; use syntax::ast_map; @@ -20,6 +18,8 @@ use syntax::ast_util; use syntax::attr; use syntax::attr::AttributeMethods; use syntax::codemap::Pos; +use syntax::parse::token::InternedString; +use syntax::parse::token; use rustc::metadata::cstore; use rustc::metadata::csearch; @@ -223,9 +223,13 @@ pub enum Attribute { impl Clean for ast::MetaItem { fn clean(&self) -> Attribute { match self.node { - ast::MetaWord(s) => Word(s.to_owned()), - ast::MetaList(ref s, ref l) => List(s.to_owned(), l.clean()), - ast::MetaNameValue(s, ref v) => NameValue(s.to_owned(), lit_to_str(v)) + ast::MetaWord(ref s) => Word(s.get().to_owned()), + ast::MetaList(ref s, ref l) => { + List(s.get().to_owned(), l.clean()) + } + ast::MetaNameValue(ref s, ref v) => { + NameValue(s.get().to_owned(), lit_to_str(v)) + } } } } @@ -238,21 +242,24 @@ impl Clean for ast::Attribute { // This is a rough approximation that gets us what we want. impl<'a> attr::AttrMetaMethods for &'a Attribute { - fn name(&self) -> @str { + fn name(&self) -> InternedString { match **self { - Word(ref n) | List(ref n, _) | NameValue(ref n, _) => - n.to_managed() + Word(ref n) | List(ref n, _) | NameValue(ref n, _) => { + token::intern_and_get_ident(*n) + } } } - fn value_str(&self) -> Option<@str> { + fn value_str(&self) -> Option { match **self { - NameValue(_, ref v) => Some(v.to_managed()), + NameValue(_, ref v) => Some(token::intern_and_get_ident(*v)), _ => None, } } fn meta_item_list<'a>(&'a self) -> Option<&'a [@ast::MetaItem]> { None } - fn name_str_pair(&self) -> Option<(@str, @str)> { None } + fn name_str_pair(&self) -> Option<(InternedString, InternedString)> { + None + } } #[deriving(Clone, Encodable, Decodable)] @@ -867,24 +874,25 @@ impl Clean for ast::PathSegment { } fn path_to_str(p: &ast::Path) -> ~str { - use syntax::parse::token::interner_get; + use syntax::parse::token; let mut s = ~""; let mut first = true; - for i in p.segments.iter().map(|x| interner_get(x.identifier.name)) { + for i in p.segments.iter().map(|x| token::get_ident(x.identifier.name)) { if !first || p.global { s.push_str("::"); } else { first = false; } - s.push_str(i); + s.push_str(i.get()); } s } impl Clean<~str> for ast::Ident { fn clean(&self) -> ~str { - its(self).to_owned() + let string = token::get_ident(self.name); + string.get().to_owned() } } @@ -1030,8 +1038,13 @@ pub enum ViewItemInner { impl Clean for ast::ViewItem_ { fn clean(&self) -> ViewItemInner { match self { - &ast::ViewItemExternMod(ref i, ref p, ref id) => - ExternMod(i.clean(), p.map(|(ref x, _)| x.to_owned()), *id), + &ast::ViewItemExternMod(ref i, ref p, ref id) => { + let string = match *p { + None => None, + Some((ref x, _)) => Some(x.get().to_owned()), + }; + ExternMod(i.clean(), string, *id) + } &ast::ViewItemUse(ref vp) => Import(vp.clean()) } } @@ -1137,14 +1150,14 @@ impl ToSource for syntax::codemap::Span { fn lit_to_str(lit: &ast::Lit) -> ~str { match lit.node { - ast::LitStr(st, _) => st.to_owned(), - ast::LitBinary(data) => format!("{:?}", data.as_slice()), + ast::LitStr(ref st, _) => st.get().to_owned(), + ast::LitBinary(ref data) => format!("{:?}", data.borrow().as_slice()), ast::LitChar(c) => ~"'" + std::char::from_u32(c).unwrap().to_str() + "'", ast::LitInt(i, _t) => i.to_str(), ast::LitUint(u, _t) => u.to_str(), ast::LitIntUnsuffixed(i) => i.to_str(), - ast::LitFloat(f, _t) => f.to_str(), - ast::LitFloatUnsuffixed(f) => f.to_str(), + ast::LitFloat(ref f, _t) => f.get().to_str(), + ast::LitFloatUnsuffixed(ref f) => f.get().to_str(), ast::LitBool(b) => b.to_str(), ast::LitNil => ~"", } diff --git a/src/librustdoc/core.rs b/src/librustdoc/core.rs index e4260e367a879..0e2d6c972ae60 100644 --- a/src/librustdoc/core.rs +++ b/src/librustdoc/core.rs @@ -15,6 +15,7 @@ use rustc::middle::privacy; use syntax::ast; use syntax::diagnostic; +use syntax::parse::token; use syntax::parse; use syntax; @@ -71,7 +72,8 @@ fn get_ast_and_resolve(cpath: &Path, let mut cfg = build_configuration(sess); for cfg_ in cfgs.move_iter() { - cfg.push(@dummy_spanned(ast::MetaWord(cfg_.to_managed()))); + let cfg_ = token::intern_and_get_ident(cfg_); + cfg.push(@dummy_spanned(ast::MetaWord(cfg_))); } let crate = phase_1_parse_input(sess, cfg.clone(), &input); diff --git a/src/librustdoc/html/render.rs b/src/librustdoc/html/render.rs index 2885d05a5bd5c..86b6a5734a5d8 100644 --- a/src/librustdoc/html/render.rs +++ b/src/librustdoc/html/render.rs @@ -45,6 +45,7 @@ use extra::arc::Arc; use extra::json::ToJson; use syntax::ast; use syntax::attr; +use syntax::parse::token::InternedString; use clean; use doctree; @@ -803,12 +804,13 @@ impl<'a> Item<'a> { impl<'a> fmt::Default for Item<'a> { fn fmt(it: &Item<'a>, fmt: &mut fmt::Formatter) { match attr::find_stability(it.item.attrs.iter()) { - Some(stability) => { + Some(ref stability) => { write!(fmt.buf, "{lvl}", lvl = stability.level.to_str(), reason = match stability.text { - Some(s) => s, None => @"", + Some(ref s) => (*s).clone(), + None => InternedString::new(""), }); } None => {} diff --git a/src/librustdoc/test.rs b/src/librustdoc/test.rs index 9271af9d575ea..12874d1b502a2 100644 --- a/src/librustdoc/test.rs +++ b/src/librustdoc/test.rs @@ -137,7 +137,7 @@ fn runtest(test: &str, cratename: &str, libs: HashSet) { } } -fn maketest(s: &str, cratename: &str) -> @str { +fn maketest(s: &str, cratename: &str) -> ~str { let mut prog = ~r" #[deny(warnings)]; #[allow(unused_variable, dead_assignment, unused_mut, attribute_usage, dead_code)]; @@ -156,7 +156,7 @@ fn maketest(s: &str, cratename: &str) -> @str { prog.push_str("\n}"); } - return prog.to_managed(); + return prog; } pub struct Collector { diff --git a/src/librustpkg/util.rs b/src/librustpkg/util.rs index 1f8962fbd3af0..ba31699a7d04e 100644 --- a/src/librustpkg/util.rs +++ b/src/librustpkg/util.rs @@ -30,6 +30,8 @@ use syntax::ext::base::{ExtCtxt, MacroCrate}; use syntax::{ast, attr, codemap, diagnostic, fold, visit}; use syntax::attr::AttrMetaMethods; use syntax::fold::Folder; +use syntax::parse::token::InternedString; +use syntax::parse::token; use syntax::visit::Visitor; use syntax::util::small_vector::SmallVector; use syntax::crateid::CrateId; @@ -77,7 +79,7 @@ fn fold_mod(m: &ast::Mod, fold: &mut CrateSetup) -> ast::Mod { fn strip_main(item: @ast::Item) -> @ast::Item { @ast::Item { attrs: item.attrs.iter().filter_map(|attr| { - if "main" != attr.name() { + if !attr.name().equiv(&("main")) { Some(*attr) } else { None @@ -101,13 +103,15 @@ fn fold_item(item: @ast::Item, fold: &mut CrateSetup) let mut had_pkg_do = false; for attr in item.attrs.iter() { - if "pkg_do" == attr.name() { + if attr.name().equiv(&("pkg_do")) { had_pkg_do = true; match attr.node.value.node { ast::MetaList(_, ref mis) => { for mi in mis.iter() { match mi.node { - ast::MetaWord(cmd) => cmds.push(cmd.to_owned()), + ast::MetaWord(ref cmd) => { + cmds.push(cmd.get().to_owned()) + } _ => {} }; } @@ -314,7 +318,9 @@ pub fn compile_input(context: &BuildContext, if !attr::contains_name(crate.attrs, "crate_id") { // FIXME (#9639): This needs to handle non-utf8 paths let crateid_attr = - attr::mk_name_value_item_str(@"crate_id", crate_id.to_str().to_managed()); + attr::mk_name_value_item_str( + InternedString::new("crate_id"), + token::intern_and_get_ident(crate_id.to_str())); debug!("crateid attr: {:?}", crateid_attr); crate.attrs.push(attr::mk_attr(crateid_attr)); @@ -466,13 +472,14 @@ impl<'a> CrateInstaller<'a> { match vi.node { // ignore metadata, I guess - ast::ViewItemExternMod(lib_ident, path_opt, _) => { - let lib_name = match path_opt { - Some((p, _)) => p, - None => self.sess.str_of(lib_ident) + ast::ViewItemExternMod(ref lib_ident, ref path_opt, _) => { + let lib_name = match *path_opt { + Some((ref p, _)) => (*p).clone(), + None => token::get_ident(lib_ident.name), }; debug!("Finding and installing... {}", lib_name); - let crate_id: CrateId = from_str(lib_name).expect("valid crate id"); + let crate_id: CrateId = + from_str(lib_name.get()).expect("valid crate id"); // Check standard Rust library path first let whatever = system_library(&self.context.sysroot_to_use(), &crate_id); debug!("system library returned {:?}", whatever); @@ -642,7 +649,7 @@ pub fn find_and_install_dependencies(installer: &mut CrateInstaller, visit::walk_crate(installer, c, ()) } -pub fn mk_string_lit(s: @str) -> ast::Lit { +pub fn mk_string_lit(s: InternedString) -> ast::Lit { Spanned { node: ast::LitStr(s, ast::CookedStr), span: DUMMY_SP diff --git a/src/libstd/at_vec.rs b/src/libstd/at_vec.rs deleted file mode 100644 index 55e90248e1c3b..0000000000000 --- a/src/libstd/at_vec.rs +++ /dev/null @@ -1,424 +0,0 @@ -// Copyright 2012 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -//! Operations on managed vectors (`@[T]` type) - -use clone::Clone; -use container::Container; -use iter::{Iterator, FromIterator}; -use option::{Option, Some, None}; -use mem; -use unstable::raw::Repr; -use vec::{ImmutableVector, OwnedVector}; - -/// Code for dealing with @-vectors. This is pretty incomplete, and -/// contains a bunch of duplication from the code for ~-vectors. - -/// Returns the number of elements the vector can hold without reallocating -#[inline] -pub fn capacity(v: @[T]) -> uint { - unsafe { - let managed_box = v.repr(); - (*managed_box).data.alloc / mem::size_of::() - } -} - -/** - * Builds a vector by calling a provided function with an argument - * function that pushes an element to the back of a vector. - * The initial size for the vector may optionally be specified - * - * # Arguments - * - * * size - An option, maybe containing initial size of the vector to reserve - * * builder - A function that will construct the vector. It receives - * as an argument a function that will push an element - * onto the vector being constructed. - */ -#[inline] -pub fn build(size: Option, builder: |push: |v: A||) -> @[A] { - let mut vec = @[]; - unsafe { raw::reserve(&mut vec, size.unwrap_or(4)); } - builder(|x| unsafe { raw::push(&mut vec, x) }); - vec -} - -// Appending - -/// Iterates over the `rhs` vector, copying each element and appending it to the -/// `lhs`. Afterwards, the `lhs` is then returned for use again. -#[inline] -pub fn append(lhs: @[T], rhs: &[T]) -> @[T] { - build(Some(lhs.len() + rhs.len()), |push| { - for x in lhs.iter() { - push((*x).clone()); - } - for elt in rhs.iter() { - push(elt.clone()); - } - }) -} - - -/// Apply a function to each element of a vector and return the results -#[inline] -pub fn map(v: &[T], f: |x: &T| -> U) -> @[U] { - build(Some(v.len()), |push| { - for elem in v.iter() { - push(f(elem)); - } - }) -} - -/** - * Creates and initializes an immutable vector. - * - * Creates an immutable vector of size `n_elts` and initializes the elements - * to the value returned by the function `op`. - */ -#[inline] -pub fn from_fn(n_elts: uint, op: |uint| -> T) -> @[T] { - build(Some(n_elts), |push| { - let mut i: uint = 0u; - while i < n_elts { push(op(i)); i += 1u; } - }) -} - -/** - * Creates and initializes an immutable vector. - * - * Creates an immutable vector of size `n_elts` and initializes the elements - * to the value `t`. - */ -#[inline] -pub fn from_elem(n_elts: uint, t: T) -> @[T] { - build(Some(n_elts), |push| { - let mut i: uint = 0u; - while i < n_elts { - push(t.clone()); - i += 1u; - } - }) -} - -/** - * Creates and initializes an immutable managed vector by moving all the - * elements from an owned vector. - */ -#[inline] -pub fn to_managed_move(v: ~[T]) -> @[T] { - let mut av = @[]; - unsafe { - raw::reserve(&mut av, v.len()); - for x in v.move_iter() { - raw::push(&mut av, x); - } - av - } -} - -/** - * Creates and initializes an immutable managed vector by copying all the - * elements of a slice. - */ -#[inline] -pub fn to_managed(v: &[T]) -> @[T] { - from_fn(v.len(), |i| v[i].clone()) -} - -impl Clone for @[T] { - fn clone(&self) -> @[T] { - *self - } -} - -impl FromIterator for @[A] { - #[inline] - fn from_iterator>(iterator: &mut T) -> @[A] { - let (lower, _) = iterator.size_hint(); - build(Some(lower), |push| { - for x in *iterator { - push(x); - } - }) - } -} - -#[cfg(not(test))] -#[allow(missing_doc)] -pub mod traits { - use at_vec::append; - use clone::Clone; - use ops::Add; - use vec::Vector; - - impl<'a,T:Clone, V: Vector> Add for @[T] { - #[inline] - fn add(&self, rhs: &V) -> @[T] { - append(*self, rhs.as_slice()) - } - } -} - -#[cfg(test)] -pub mod traits {} - -#[allow(missing_doc)] -pub mod raw { - use at_vec::capacity; - use cast; - use cast::{transmute, transmute_copy}; - use container::Container; - use option::None; - use mem; - use num::next_power_of_two; - use ptr; - use unstable::intrinsics::{move_val_init, TyDesc}; - use unstable::intrinsics; - use unstable::raw::{Box, Vec}; - - /** - * Sets the length of a vector - * - * This will explicitly set the size of the vector, without actually - * modifying its buffers, so it is up to the caller to ensure that - * the vector is actually the specified size. - */ - #[inline] - pub unsafe fn set_len(v: &mut @[T], new_len: uint) { - let repr: *mut Box> = cast::transmute_copy(v); - (*repr).data.fill = new_len * mem::size_of::(); - } - - /** - * Pushes a new value onto this vector. - */ - #[inline] - pub unsafe fn push(v: &mut @[T], initval: T) { - let full = { - let repr: *Box> = cast::transmute_copy(v); - (*repr).data.alloc > (*repr).data.fill - }; - if full { - push_fast(v, initval); - } else { - push_slow(v, initval); - } - } - - #[inline] // really pretty please - unsafe fn push_fast(v: &mut @[T], initval: T) { - let repr: *mut Box> = cast::transmute_copy(v); - let amt = v.len(); - (*repr).data.fill += mem::size_of::(); - let p = ptr::offset(&(*repr).data.data as *T, amt as int) as *mut T; - move_val_init(&mut(*p), initval); - } - - #[inline] - unsafe fn push_slow(v: &mut @[T], initval: T) { - reserve_at_least(v, v.len() + 1u); - push_fast(v, initval); - } - - /** - * Reserves capacity for exactly `n` elements in the given vector. - * - * If the capacity for `v` is already equal to or greater than the - * requested capacity, then no action is taken. - * - * # Arguments - * - * * v - A vector - * * n - The number of elements to reserve space for - */ - #[inline] - pub unsafe fn reserve(v: &mut @[T], n: uint) { - // Only make the (slow) call into the runtime if we have to - if capacity(*v) < n { - let ptr: *mut *mut Box> = transmute(v); - let ty = intrinsics::get_tydesc::(); - return reserve_raw(ty, ptr, n); - } - } - - // Implementation detail. Shouldn't be public - #[allow(missing_doc)] - #[inline] - pub fn reserve_raw(ty: *TyDesc, ptr: *mut *mut Box>, n: uint) { - // check for `uint` overflow - unsafe { - if n > (**ptr).data.alloc / (*ty).size { - let alloc = n * (*ty).size; - let total_size = alloc + mem::size_of::>(); - if alloc / (*ty).size != n || total_size < alloc { - fail!("vector size is too large: {}", n); - } - (*ptr) = local_realloc(*ptr as *(), total_size) as *mut Box>; - (**ptr).data.alloc = alloc; - } - } - - #[inline] - fn local_realloc(ptr: *(), size: uint) -> *() { - use rt::local::Local; - use rt::task::Task; - - let mut task = Local::borrow(None::); - task.get().heap.realloc(ptr as *mut Box<()>, size) as *() - } - } - - /** - * Reserves capacity for at least `n` elements in the given vector. - * - * This function will over-allocate in order to amortize the - * allocation costs in scenarios where the caller may need to - * repeatedly reserve additional space. - * - * If the capacity for `v` is already equal to or greater than the - * requested capacity, then no action is taken. - * - * # Arguments - * - * * v - A vector - * * n - The number of elements to reserve space for - */ - #[inline] - pub unsafe fn reserve_at_least(v: &mut @[T], n: uint) { - reserve(v, next_power_of_two(n)); - } -} - -#[cfg(test)] -mod test { - use super::*; - use prelude::*; - use bh = extra::test::BenchHarness; - - #[test] - fn test() { - // Some code that could use that, then: - fn seq_range(lo: uint, hi: uint) -> @[uint] { - build(None, |push| { - for i in range(lo, hi) { - push(i); - } - }) - } - - assert_eq!(seq_range(10, 15), @[10, 11, 12, 13, 14]); - assert_eq!(from_fn(5, |x| x+1), @[1, 2, 3, 4, 5]); - assert_eq!(from_elem(5, 3.14), @[3.14, 3.14, 3.14, 3.14, 3.14]); - } - - #[test] - fn append_test() { - assert_eq!(@[1,2,3] + &[4,5,6], @[1,2,3,4,5,6]); - } - - #[test] - fn test_to_managed_move() { - assert_eq!(to_managed_move::(~[]), @[]); - assert_eq!(to_managed_move(~[true]), @[true]); - assert_eq!(to_managed_move(~[1, 2, 3, 4, 5]), @[1, 2, 3, 4, 5]); - assert_eq!(to_managed_move(~[~"abc", ~"123"]), @[~"abc", ~"123"]); - assert_eq!(to_managed_move(~[~[42]]), @[~[42]]); - } - - #[test] - fn test_to_managed() { - assert_eq!(to_managed::([]), @[]); - assert_eq!(to_managed([true]), @[true]); - assert_eq!(to_managed([1, 2, 3, 4, 5]), @[1, 2, 3, 4, 5]); - assert_eq!(to_managed([@"abc", @"123"]), @[@"abc", @"123"]); - assert_eq!(to_managed([@[42]]), @[@[42]]); - } - - #[bench] - fn bench_capacity(b: &mut bh) { - let x = @[1, 2, 3]; - b.iter(|| { - let _ = capacity(x); - }); - } - - #[bench] - fn bench_build_sized(b: &mut bh) { - let len = 64; - b.iter(|| { - build(Some(len), |push| for i in range(0, 1024) { push(i) }); - }); - } - - #[bench] - fn bench_build(b: &mut bh) { - b.iter(|| { - for i in range(0, 95) { - build(None, |push| push(i)); - } - }); - } - - #[bench] - fn bench_append(b: &mut bh) { - let lhs = @[7, ..128]; - let rhs = range(0, 256).to_owned_vec(); - b.iter(|| { - let _ = append(lhs, rhs); - }) - } - - #[bench] - fn bench_map(b: &mut bh) { - let elts = range(0, 256).to_owned_vec(); - b.iter(|| { - let _ = map(elts, |x| x*2); - }) - } - - #[bench] - fn bench_from_fn(b: &mut bh) { - b.iter(|| { - let _ = from_fn(1024, |x| x); - }); - } - - #[bench] - fn bench_from_elem(b: &mut bh) { - b.iter(|| { - let _ = from_elem(1024, 0u64); - }); - } - - #[bench] - fn bench_to_managed_move(b: &mut bh) { - b.iter(|| { - let elts = range(0, 1024).to_owned_vec(); // yikes! can't move out of capture, though - to_managed_move(elts); - }) - } - - #[bench] - fn bench_to_managed(b: &mut bh) { - let elts = range(0, 1024).to_owned_vec(); - b.iter(|| { - let _ = to_managed(elts); - }); - } - - #[bench] - fn bench_clone(b: &mut bh) { - let elts = to_managed(range(0, 1024).to_owned_vec()); - b.iter(|| { - let _ = elts.clone(); - }); - } -} diff --git a/src/libstd/fmt/mod.rs b/src/libstd/fmt/mod.rs index 111eb70eb204a..13e6d80809584 100644 --- a/src/libstd/fmt/mod.rs +++ b/src/libstd/fmt/mod.rs @@ -1167,7 +1167,6 @@ delegate!( u8 to Unsigned) delegate!( u16 to Unsigned) delegate!( u32 to Unsigned) delegate!( u64 to Unsigned) -delegate!(@str to String) delegate!(~str to String) delegate!(&'a str to String) delegate!(bool to Bool) diff --git a/src/libstd/gc.rs b/src/libstd/gc.rs index 8ec07290a3164..fa7c94ac9948a 100644 --- a/src/libstd/gc.rs +++ b/src/libstd/gc.rs @@ -35,7 +35,6 @@ pub struct Gc { } #[cfg(test)] -#[no_send] pub struct Gc { priv ptr: @T, priv marker: marker::NoSend, diff --git a/src/libstd/lib.rs b/src/libstd/lib.rs index d7a7011319ae9..adce11fed2dad 100644 --- a/src/libstd/lib.rs +++ b/src/libstd/lib.rs @@ -114,7 +114,6 @@ pub mod tuple; pub mod vec; pub mod vec_ng; -pub mod at_vec; pub mod str; pub mod ascii; diff --git a/src/libstd/path/mod.rs b/src/libstd/path/mod.rs index 11f23b22c51fa..c5482811a94d6 100644 --- a/src/libstd/path/mod.rs +++ b/src/libstd/path/mod.rs @@ -604,19 +604,6 @@ impl BytesContainer for ~str { fn is_str(_: Option<~str>) -> bool { true } } -impl BytesContainer for @str { - #[inline] - fn container_as_bytes<'a>(&'a self) -> &'a [u8] { - self.as_bytes() - } - #[inline] - fn container_as_str<'a>(&'a self) -> Option<&'a str> { - Some(self.as_slice()) - } - #[inline] - fn is_str(_: Option<@str>) -> bool { true } -} - impl<'a> BytesContainer for &'a [u8] { #[inline] fn container_as_bytes<'a>(&'a self) -> &'a [u8] { @@ -635,13 +622,6 @@ impl BytesContainer for ~[u8] { } } -impl BytesContainer for @[u8] { - #[inline] - fn container_as_bytes<'a>(&'a self) -> &'a [u8] { - self.as_slice() - } -} - impl BytesContainer for CString { #[inline] fn container_as_bytes<'a>(&'a self) -> &'a [u8] { diff --git a/src/libstd/path/posix.rs b/src/libstd/path/posix.rs index 707ba18378a83..ba0cd0bb521dc 100644 --- a/src/libstd/path/posix.rs +++ b/src/libstd/path/posix.rs @@ -807,8 +807,6 @@ mod tests { #[test] fn test_push_many() { - use to_man = at_vec::to_managed_move; - macro_rules! t( (s: $path:expr, $push:expr, $exp:expr) => ( { @@ -830,12 +828,9 @@ mod tests { t!(s: "a/b/c", ["d", "/e"], "/e"); t!(s: "a/b/c", ["d", "/e", "f"], "/e/f"); t!(s: "a/b/c", [~"d", ~"e"], "a/b/c/d/e"); - t!(s: "a/b/c", [@"d", @"e"], "a/b/c/d/e"); t!(v: b!("a/b/c"), [b!("d"), b!("e")], b!("a/b/c/d/e")); t!(v: b!("a/b/c"), [b!("d"), b!("/e"), b!("f")], b!("/e/f")); t!(v: b!("a/b/c"), [b!("d").to_owned(), b!("e").to_owned()], b!("a/b/c/d/e")); - t!(v: b!("a/b/c"), [to_man(b!("d").to_owned()), to_man(b!("e").to_owned())], - b!("a/b/c/d/e")); } #[test] @@ -917,8 +912,6 @@ mod tests { #[test] fn test_join_many() { - use to_man = at_vec::to_managed_move; - macro_rules! t( (s: $path:expr, $join:expr, $exp:expr) => ( { @@ -940,11 +933,8 @@ mod tests { t!(s: "a/b/c", ["..", "d"], "a/b/d"); t!(s: "a/b/c", ["d", "/e", "f"], "/e/f"); t!(s: "a/b/c", [~"d", ~"e"], "a/b/c/d/e"); - t!(s: "a/b/c", [@"d", @"e"], "a/b/c/d/e"); t!(v: b!("a/b/c"), [b!("d"), b!("e")], b!("a/b/c/d/e")); t!(v: b!("a/b/c"), [b!("d").to_owned(), b!("e").to_owned()], b!("a/b/c/d/e")); - t!(v: b!("a/b/c"), [to_man(b!("d").to_owned()), to_man(b!("e").to_owned())], - b!("a/b/c/d/e")); } #[test] diff --git a/src/libstd/path/windows.rs b/src/libstd/path/windows.rs index a07471afc1a67..eec6f37b627a2 100644 --- a/src/libstd/path/windows.rs +++ b/src/libstd/path/windows.rs @@ -1587,8 +1587,6 @@ mod tests { #[test] fn test_push_many() { - use to_man = at_vec::to_managed_move; - macro_rules! t( (s: $path:expr, $push:expr, $exp:expr) => ( { @@ -1610,12 +1608,9 @@ mod tests { t!(s: "a\\b\\c", ["d", "\\e"], "\\e"); t!(s: "a\\b\\c", ["d", "\\e", "f"], "\\e\\f"); t!(s: "a\\b\\c", [~"d", ~"e"], "a\\b\\c\\d\\e"); - t!(s: "a\\b\\c", [@"d", @"e"], "a\\b\\c\\d\\e"); t!(v: b!("a\\b\\c"), [b!("d"), b!("e")], b!("a\\b\\c\\d\\e")); t!(v: b!("a\\b\\c"), [b!("d"), b!("\\e"), b!("f")], b!("\\e\\f")); t!(v: b!("a\\b\\c"), [b!("d").to_owned(), b!("e").to_owned()], b!("a\\b\\c\\d\\e")); - t!(v: b!("a\\b\\c"), [to_man(b!("d").to_owned()), to_man(b!("e").to_owned())], - b!("a\\b\\c\\d\\e")); } #[test] @@ -1732,8 +1727,6 @@ mod tests { #[test] fn test_join_many() { - use to_man = at_vec::to_managed_move; - macro_rules! t( (s: $path:expr, $join:expr, $exp:expr) => ( { @@ -1755,11 +1748,8 @@ mod tests { t!(s: "a\\b\\c", ["..", "d"], "a\\b\\d"); t!(s: "a\\b\\c", ["d", "\\e", "f"], "\\e\\f"); t!(s: "a\\b\\c", [~"d", ~"e"], "a\\b\\c\\d\\e"); - t!(s: "a\\b\\c", [@"d", @"e"], "a\\b\\c\\d\\e"); t!(v: b!("a\\b\\c"), [b!("d"), b!("e")], b!("a\\b\\c\\d\\e")); t!(v: b!("a\\b\\c"), [b!("d").to_owned(), b!("e").to_owned()], b!("a\\b\\c\\d\\e")); - t!(v: b!("a\\b\\c"), [to_man(b!("d").to_owned()), to_man(b!("e").to_owned())], - b!("a\\b\\c\\d\\e")); } #[test] diff --git a/src/libstd/reflect.rs b/src/libstd/reflect.rs index 87655f5911fe7..d6b4d3f565629 100644 --- a/src/libstd/reflect.rs +++ b/src/libstd/reflect.rs @@ -183,9 +183,6 @@ impl TyVisitor for MovePtrAdaptor { } fn visit_estr_box(&mut self) -> bool { - self.align_to::<@str>(); - if ! self.inner.visit_estr_box() { return false; } - self.bump_past::<@str>(); true } @@ -253,10 +250,7 @@ impl TyVisitor for MovePtrAdaptor { true } - fn visit_evec_box(&mut self, mtbl: uint, inner: *TyDesc) -> bool { - self.align_to::<@[u8]>(); - if ! self.inner.visit_evec_box(mtbl, inner) { return false; } - self.bump_past::<@[u8]>(); + fn visit_evec_box(&mut self, _mtbl: uint, _inner: *TyDesc) -> bool { true } diff --git a/src/libstd/repr.rs b/src/libstd/repr.rs index 1ecc31ec2f4e9..41ddf02778768 100644 --- a/src/libstd/repr.rs +++ b/src/libstd/repr.rs @@ -272,10 +272,7 @@ impl<'a> TyVisitor for ReprVisitor<'a> { } fn visit_estr_box(&mut self) -> bool { - self.get::<@str>(|this, s| { - this.writer.write(['@' as u8]); - this.write_escaped_slice(*s); - }) + true } fn visit_estr_uniq(&mut self) -> bool { @@ -628,7 +625,6 @@ fn test_repr() { exact_test(&false, "false"); exact_test(&1.234, "1.234f64"); exact_test(&(&"hello"), "\"hello\""); - exact_test(&(@"hello"), "@\"hello\""); exact_test(&(~"he\u10f3llo"), "~\"he\\u10f3llo\""); exact_test(&(@10), "@10"); @@ -641,12 +637,6 @@ fn test_repr() { exact_test(&(0 as *mut ()), "(0x0 as *mut ())"); exact_test(&(1,), "(1,)"); - exact_test(&(@[1,2,3,4,5,6,7,8]), - "@[1, 2, 3, 4, 5, 6, 7, 8]"); - exact_test(&(@[1u8,2u8,3u8,4u8]), - "@[1u8, 2u8, 3u8, 4u8]"); - exact_test(&(@["hi", "there"]), - "@[\"hi\", \"there\"]"); exact_test(&(~["hi", "there"]), "~[\"hi\", \"there\"]"); exact_test(&(&["hi", "there"]), diff --git a/src/libstd/rt/local_heap.rs b/src/libstd/rt/local_heap.rs index 42a7e7867f94d..79936b4afad0a 100644 --- a/src/libstd/rt/local_heap.rs +++ b/src/libstd/rt/local_heap.rs @@ -332,6 +332,6 @@ mod bench { #[bench] fn alloc_managed_big(bh: &mut BenchHarness) { - bh.iter(|| { @[10, ..1000]; }); + bh.iter(|| { @([10, ..1000]); }); } } diff --git a/src/libstd/send_str.rs b/src/libstd/send_str.rs index 2599a74a748cb..b075b75b70acc 100644 --- a/src/libstd/send_str.rs +++ b/src/libstd/send_str.rs @@ -185,7 +185,6 @@ mod tests { assert_eq!(s.len(), 5); assert_eq!(s.as_slice(), "abcde"); assert_eq!(s.to_str(), ~"abcde"); - assert!(s.equiv(&@"abcde")); assert!(s.lt(&SendStrOwned(~"bcdef"))); assert_eq!(SendStrStatic(""), Default::default()); @@ -193,7 +192,6 @@ mod tests { assert_eq!(o.len(), 5); assert_eq!(o.as_slice(), "abcde"); assert_eq!(o.to_str(), ~"abcde"); - assert!(o.equiv(&@"abcde")); assert!(o.lt(&SendStrStatic("bcdef"))); assert_eq!(SendStrOwned(~""), Default::default()); diff --git a/src/libstd/str.rs b/src/libstd/str.rs index 3cc199ce19571..0315e560bb783 100644 --- a/src/libstd/str.rs +++ b/src/libstd/str.rs @@ -17,46 +17,35 @@ Unicode string manipulation (`str` type) Rust's string type is one of the core primitive types of the language. While represented by the name `str`, the name `str` is not actually a valid type in Rust. Each string must also be decorated with its ownership. This means that -there are three common kinds of strings in rust: +there are two common kinds of strings in rust: * `~str` - This is an owned string. This type obeys all of the normal semantics of the `~T` types, meaning that it has one, and only one, owner. This type cannot be implicitly copied, and is moved out of when passed to other functions. -* `@str` - This is a managed string. Similarly to `@T`, this type can be - implicitly copied, and each implicit copy will increment the - reference count to the string. This means that there is no "true - owner" of the string, and the string will be deallocated when the - reference count reaches 0. - -* `&str` - Finally, this is the borrowed string type. This type of string can - only be created from one of the other two kinds of strings. As the - name "borrowed" implies, this type of string is owned elsewhere, and - this string cannot be moved out of. +* `&str` - This is the borrowed string type. This type of string can only be + created from the other kind of string. As the name "borrowed" + implies, this type of string is owned elsewhere, and this string + cannot be moved out of. As an example, here's a few different kinds of strings. ```rust -#[feature(managed_boxes)]; - fn main() { let owned_string = ~"I am an owned string"; - let managed_string = @"This string is garbage-collected"; let borrowed_string1 = "This string is borrowed with the 'static lifetime"; let borrowed_string2: &str = owned_string; // owned strings can be borrowed - let borrowed_string3: &str = managed_string; // managed strings can also be borrowed } ``` -From the example above, you can see that rust has 3 different kinds of string -literals. The owned/managed literals correspond to the owned/managed string -types, but the "borrowed literal" is actually more akin to C's concept of a -static string. +From the example above, you can see that rust has 2 different kinds of string +literals. The owned literals correspond to the owned string types, but the +"borrowed literal" is actually more akin to C's concept of a static string. -When a string is declared without a `~` or `@` sigil, then the string is -allocated statically in the rodata of the executable/library. The string then -has the type `&'static str` meaning that the string is valid for the `'static` +When a string is declared without a `~` sigil, then the string is allocated +statically in the rodata of the executable/library. The string then has the +type `&'static str` meaning that the string is valid for the `'static` lifetime, otherwise known as the lifetime of the entire program. As can be inferred from the type, these static strings are not mutable. @@ -89,11 +78,9 @@ The actual representation of strings have direct mappings to vectors: * `~str` is the same as `~[u8]` * `&str` is the same as `&[u8]` -* `@str` is the same as `@[u8]` */ -use at_vec; use cast; use cast::transmute; use char; @@ -157,16 +144,6 @@ impl<'a> ToStr for &'a str { fn to_str(&self) -> ~str { self.to_owned() } } -impl ToStr for @str { - #[inline] - fn to_str(&self) -> ~str { self.to_owned() } -} - -impl<'a> FromStr for @str { - #[inline] - fn from_str(s: &str) -> Option<@str> { Some(s.to_managed()) } -} - /// Convert a byte to a UTF-8 string /// /// # Failure @@ -1140,11 +1117,6 @@ pub mod traits { fn cmp(&self, other: &~str) -> Ordering { self.as_slice().cmp(&other.as_slice()) } } - impl TotalOrd for @str { - #[inline] - fn cmp(&self, other: &@str) -> Ordering { self.as_slice().cmp(&other.as_slice()) } - } - impl<'a> Eq for &'a str { #[inline] fn eq(&self, other: & &'a str) -> bool { @@ -1161,13 +1133,6 @@ pub mod traits { } } - impl Eq for @str { - #[inline] - fn eq(&self, other: &@str) -> bool { - eq_slice((*self), (*other)) - } - } - impl<'a> TotalEq for &'a str { #[inline] fn equals(&self, other: & &'a str) -> bool { @@ -1182,13 +1147,6 @@ pub mod traits { } } - impl TotalEq for @str { - #[inline] - fn equals(&self, other: &@str) -> bool { - eq_slice((*self), (*other)) - } - } - impl<'a> Ord for &'a str { #[inline] fn lt(&self, other: & &'a str) -> bool { self.cmp(other) == Less } @@ -1199,21 +1157,11 @@ pub mod traits { fn lt(&self, other: &~str) -> bool { self.cmp(other) == Less } } - impl Ord for @str { - #[inline] - fn lt(&self, other: &@str) -> bool { self.cmp(other) == Less } - } - impl<'a, S: Str> Equiv for &'a str { #[inline] fn equiv(&self, other: &S) -> bool { eq_slice(*self, other.as_slice()) } } - impl<'a, S: Str> Equiv for @str { - #[inline] - fn equiv(&self, other: &S) -> bool { eq_slice(*self, other.as_slice()) } - } - impl<'a, S: Str> Equiv for ~str { #[inline] fn equiv(&self, other: &S) -> bool { eq_slice(*self, other.as_slice()) } @@ -1250,16 +1198,6 @@ impl<'a> Str for ~str { fn into_owned(self) -> ~str { self } } -impl<'a> Str for @str { - #[inline] - fn as_slice<'a>(&'a self) -> &'a str { - let s: &'a str = *self; s - } - - #[inline] - fn into_owned(self) -> ~str { self.to_owned() } -} - impl<'a> Container for &'a str { #[inline] fn len(&self) -> uint { @@ -1272,11 +1210,6 @@ impl Container for ~str { fn len(&self) -> uint { self.as_slice().len() } } -impl Container for @str { - #[inline] - fn len(&self) -> uint { self.as_slice().len() } -} - impl Mutable for ~str { /// Remove all content, make the string empty #[inline] @@ -1734,9 +1667,6 @@ pub trait StrSlice<'a> { /// Copy a slice into a new owned str. fn to_owned(&self) -> ~str; - /// Copy a slice into a new managed str. - fn to_managed(&self) -> @str; - /// Converts to a vector of `u16` encoded as UTF-16. fn to_utf16(&self) -> ~[u16]; @@ -2246,14 +2176,6 @@ impl<'a> StrSlice<'a> for &'a str { } } - #[inline] - fn to_managed(&self) -> @str { - unsafe { - let v: *&[u8] = cast::transmute(self); - cast::transmute(at_vec::to_managed(*v)) - } - } - fn to_utf16(&self) -> ~[u16] { let mut u = ~[]; for ch in self.chars() { @@ -2682,20 +2604,6 @@ impl DeepClone for ~str { } } -impl Clone for @str { - #[inline] - fn clone(&self) -> @str { - *self - } -} - -impl DeepClone for @str { - #[inline] - fn deep_clone(&self) -> @str { - *self - } -} - impl FromIterator for ~str { #[inline] fn from_iterator>(iterator: &mut T) -> ~str { @@ -2727,10 +2635,6 @@ impl Default for ~str { fn default() -> ~str { ~"" } } -impl Default for @str { - fn default() -> @str { @"" } -} - #[cfg(test)] mod tests { use iter::AdditiveIterator; @@ -3536,12 +3440,6 @@ mod tests { assert_eq!("\U0001d4ea\r".escape_default(), ~"\\U0001d4ea\\r"); } - #[test] - fn test_to_managed() { - assert_eq!("abc".to_managed(), @"abc"); - assert_eq!("abcdef".slice(1, 5).to_managed(), @"bcde"); - } - #[test] fn test_total_ord() { "1234".cmp(& &"123") == Greater; @@ -3579,15 +3477,12 @@ mod tests { let e = $e; assert_eq!(s1 + s2, e.to_owned()); assert_eq!(s1.to_owned() + s2, e.to_owned()); - assert_eq!(s1.to_managed() + s2, e.to_owned()); } } ); t!("foo", "bar", "foobar"); - t!("foo", @"bar", "foobar"); t!("foo", ~"bar", "foobar"); t!("ศไทย中", "华Việt Nam", "ศไทย中华Việt Nam"); - t!("ศไทย中", @"华Việt Nam", "ศไทย中华Việt Nam"); t!("ศไทย中", ~"华Việt Nam", "ศไทย中华Việt Nam"); } @@ -3874,7 +3769,6 @@ mod tests { } t::<&str>(); - t::<@str>(); t::<~str>(); } @@ -3886,7 +3780,6 @@ mod tests { let s = ~"01234"; assert_eq!(5, sum_len(["012", "", "34"])); - assert_eq!(5, sum_len([@"01", @"2", @"34", @""])); assert_eq!(5, sum_len([~"01", ~"2", ~"34", ~""])); assert_eq!(5, sum_len([s.as_slice()])); } @@ -3957,8 +3850,6 @@ mod tests { fn test_from_str() { let owned: Option<~str> = from_str(&"string"); assert_eq!(owned, Some(~"string")); - let managed: Option<@str> = from_str(&"string"); - assert_eq!(managed, Some(@"string")); } } diff --git a/src/libstd/to_bytes.rs b/src/libstd/to_bytes.rs index 8df028f56d509..4c545de73b48b 100644 --- a/src/libstd/to_bytes.rs +++ b/src/libstd/to_bytes.rs @@ -266,13 +266,6 @@ impl IterBytes for ~[A] { } } -impl IterBytes for @[A] { - #[inline] - fn iter_bytes(&self, lsb0: bool, f: Cb) -> bool { - self.as_slice().iter_bytes(lsb0, f) - } -} - impl<'a> IterBytes for &'a str { #[inline] fn iter_bytes(&self, _lsb0: bool, f: Cb) -> bool { @@ -288,13 +281,6 @@ impl IterBytes for ~str { } } -impl IterBytes for @str { - #[inline] - fn iter_bytes(&self, lsb0: bool, f: Cb) -> bool { - self.as_slice().iter_bytes(lsb0, f) - } -} - impl IterBytes for Option { #[inline] fn iter_bytes(&self, lsb0: bool, f: Cb) -> bool { diff --git a/src/libstd/to_str.rs b/src/libstd/to_str.rs index a58b09d8ecde3..87d59f0979194 100644 --- a/src/libstd/to_str.rs +++ b/src/libstd/to_str.rs @@ -159,25 +159,6 @@ impl ToStr for ~[A] { } } -impl ToStr for @[A] { - #[inline] - fn to_str(&self) -> ~str { - let mut acc = ~"["; - let mut first = true; - for elt in self.iter() { - if first { - first = false; - } - else { - acc.push_str(", "); - } - acc.push_str(elt.to_str()); - } - acc.push_char(']'); - acc - } -} - #[cfg(test)] mod tests { use hashmap::HashMap; @@ -195,7 +176,6 @@ mod tests { assert_eq!(false.to_str(), ~"false"); assert_eq!(().to_str(), ~"()"); assert_eq!((~"hi").to_str(), ~"hi"); - assert_eq!((@"hi").to_str(), ~"hi"); } #[test] diff --git a/src/libstd/unstable/raw.rs b/src/libstd/unstable/raw.rs index 8aee26c24b299..63208b3f2d707 100644 --- a/src/libstd/unstable/raw.rs +++ b/src/libstd/unstable/raw.rs @@ -56,10 +56,8 @@ pub trait Repr { impl<'a, T> Repr> for &'a [T] {} impl<'a> Repr> for &'a str {} impl Repr<*Box> for @T {} -impl Repr<*Box>> for @[T] {} impl Repr<*Vec> for ~[T] {} impl Repr<*String> for ~str {} -impl Repr<*Box> for @str {} // sure would be nice to have this // impl Repr<*Vec> for ~[T] {} diff --git a/src/libstd/vec.rs b/src/libstd/vec.rs index 15cd5ce334323..2679ef0d46e8e 100644 --- a/src/libstd/vec.rs +++ b/src/libstd/vec.rs @@ -646,13 +646,6 @@ pub mod traits { fn ne(&self, other: &~[T]) -> bool { !self.eq(other) } } - impl Eq for @[T] { - #[inline] - fn eq(&self, other: &@[T]) -> bool { self.as_slice() == *other } - #[inline] - fn ne(&self, other: &@[T]) -> bool { !self.eq(other) } - } - impl<'a,T:TotalEq> TotalEq for &'a [T] { fn equals(&self, other: & &'a [T]) -> bool { self.len() == other.len() && @@ -665,11 +658,6 @@ pub mod traits { fn equals(&self, other: &~[T]) -> bool { self.as_slice().equals(&other.as_slice()) } } - impl TotalEq for @[T] { - #[inline] - fn equals(&self, other: &@[T]) -> bool { self.as_slice().equals(&other.as_slice()) } - } - impl<'a,T:Eq, V: Vector> Equiv for &'a [T] { #[inline] fn equiv(&self, other: &V) -> bool { self.as_slice() == other.as_slice() } @@ -680,11 +668,6 @@ pub mod traits { fn equiv(&self, other: &V) -> bool { self.as_slice() == other.as_slice() } } - impl<'a,T:Eq, V: Vector> Equiv for @[T] { - #[inline] - fn equiv(&self, other: &V) -> bool { self.as_slice() == other.as_slice() } - } - impl<'a,T:TotalOrd> TotalOrd for &'a [T] { fn cmp(&self, other: & &'a [T]) -> Ordering { order::cmp(self.iter(), other.iter()) @@ -696,11 +679,6 @@ pub mod traits { fn cmp(&self, other: &~[T]) -> Ordering { self.as_slice().cmp(&other.as_slice()) } } - impl TotalOrd for @[T] { - #[inline] - fn cmp(&self, other: &@[T]) -> Ordering { self.as_slice().cmp(&other.as_slice()) } - } - impl<'a, T: Eq + Ord> Ord for &'a [T] { fn lt(&self, other: & &'a [T]) -> bool { order::lt(self.iter(), other.iter()) @@ -730,17 +708,6 @@ pub mod traits { fn gt(&self, other: &~[T]) -> bool { self.as_slice() > other.as_slice() } } - impl Ord for @[T] { - #[inline] - fn lt(&self, other: &@[T]) -> bool { self.as_slice() < other.as_slice() } - #[inline] - fn le(&self, other: &@[T]) -> bool { self.as_slice() <= other.as_slice() } - #[inline] - fn ge(&self, other: &@[T]) -> bool { self.as_slice() >= other.as_slice() } - #[inline] - fn gt(&self, other: &@[T]) -> bool { self.as_slice() > other.as_slice() } - } - impl<'a,T:Clone, V: Vector> Add for &'a [T] { #[inline] fn add(&self, rhs: &V) -> ~[T] { @@ -778,11 +745,6 @@ impl Vector for ~[T] { fn as_slice<'a>(&'a self) -> &'a [T] { let v: &'a [T] = *self; v } } -impl Vector for @[T] { - #[inline(always)] - fn as_slice<'a>(&'a self) -> &'a [T] { let v: &'a [T] = *self; v } -} - impl<'a, T> Container for &'a [T] { /// Returns the length of a vector #[inline] @@ -833,15 +795,6 @@ impl CloneableVector for ~[T] { fn into_owned(self) -> ~[T] { self } } -/// Extension methods for managed vectors -impl CloneableVector for @[T] { - #[inline] - fn to_owned(&self) -> ~[T] { self.as_slice().to_owned() } - - #[inline(always)] - fn into_owned(self) -> ~[T] { self.to_owned() } -} - /// Extension methods for vectors pub trait ImmutableVector<'a, T> { /** @@ -2629,10 +2582,6 @@ impl Default for ~[A] { fn default() -> ~[A] { ~[] } } -impl Default for @[A] { - fn default() -> @[A] { @[] } -} - macro_rules! iterator { (struct $name:ident -> $ptr:ty, $elem:ty) => { /// An iterator for iterating over a vector. @@ -3109,14 +3058,6 @@ mod tests { assert_eq!(v_b[0], 2); assert_eq!(v_b[1], 3); - // Test on managed heap. - let vec_managed = @[1, 2, 3, 4, 5]; - let v_c = vec_managed.slice(0u, 3u).to_owned(); - assert_eq!(v_c.len(), 3u); - assert_eq!(v_c[0], 1); - assert_eq!(v_c[1], 2); - assert_eq!(v_c[2], 3); - // Test on exchange heap. let vec_unique = ~[1, 2, 3, 4, 5, 6]; let v_d = vec_unique.slice(1u, 6u).to_owned(); @@ -4052,7 +3993,6 @@ mod tests { ); t!(&[int]); - t!(@[int]); t!(~[int]); } diff --git a/src/libsyntax/ast.rs b/src/libsyntax/ast.rs index 1513946e401d3..228329cbda12a 100644 --- a/src/libsyntax/ast.rs +++ b/src/libsyntax/ast.rs @@ -14,11 +14,13 @@ use codemap::{Span, Spanned, DUMMY_SP}; use abi::AbiSet; use ast_util; use opt_vec::OptVec; -use parse::token::{interner_get, str_to_ident, special_idents}; +use parse::token::{InternedString, special_idents, str_to_ident}; +use parse::token; use std::cell::RefCell; use std::hashmap::HashMap; use std::option::Option; +use std::rc::Rc; use std::to_str::ToStr; use extra::serialize::{Encodable, Decodable, Encoder, Decoder}; @@ -125,7 +127,8 @@ pub type Mrk = u32; impl Encodable for Ident { fn encode(&self, s: &mut S) { - s.emit_str(interner_get(self.name)); + let string = token::get_ident(self.name); + s.emit_str(string.get()); } } @@ -295,9 +298,9 @@ pub type MetaItem = Spanned; #[deriving(Clone, Encodable, Decodable, IterBytes)] pub enum MetaItem_ { - MetaWord(@str), - MetaList(@str, ~[@MetaItem]), - MetaNameValue(@str, Lit), + MetaWord(InternedString), + MetaList(InternedString, ~[@MetaItem]), + MetaNameValue(InternedString, Lit), } // can't be derived because the MetaList requires an unordered comparison @@ -402,19 +405,9 @@ impl ToStr for Sigil { } } -#[deriving(Eq, Encodable, Decodable, IterBytes)] -pub enum Vstore { - // FIXME (#3469): Change uint to @expr (actually only constant exprs) - VstoreFixed(Option), // [1,2,3,4] - VstoreUniq, // ~[1,2,3,4] - VstoreBox, // @[1,2,3,4] - VstoreSlice(Option) // &'foo? [1,2,3,4] -} - #[deriving(Clone, Eq, Encodable, Decodable, IterBytes)] pub enum ExprVstore { ExprVstoreUniq, // ~[1,2,3,4] - ExprVstoreBox, // @[1,2,3,4] ExprVstoreSlice, // &[1,2,3,4] ExprVstoreMutSlice, // &mut [1,2,3,4] } @@ -721,14 +714,14 @@ pub type Lit = Spanned; #[deriving(Clone, Eq, Encodable, Decodable, IterBytes)] pub enum Lit_ { - LitStr(@str, StrStyle), - LitBinary(@[u8]), + LitStr(InternedString, StrStyle), + LitBinary(Rc<~[u8]>), LitChar(u32), LitInt(i64, IntTy), LitUint(u64, UintTy), LitIntUnsuffixed(i64), - LitFloat(@str, FloatTy), - LitFloatUnsuffixed(@str), + LitFloat(InternedString, FloatTy), + LitFloatUnsuffixed(InternedString), LitNil, LitBool(bool), } @@ -897,11 +890,11 @@ pub enum AsmDialect { #[deriving(Clone, Eq, Encodable, Decodable, IterBytes)] pub struct InlineAsm { - asm: @str, + asm: InternedString, asm_str_style: StrStyle, - clobbers: @str, - inputs: ~[(@str, @Expr)], - outputs: ~[(@str, @Expr)], + clobbers: InternedString, + inputs: ~[(InternedString, @Expr)], + outputs: ~[(InternedString, @Expr)], volatile: bool, alignstack: bool, dialect: AsmDialect @@ -1074,7 +1067,7 @@ pub enum ViewItem_ { // optional @str: if present, this is a location (containing // arbitrary characters) from which to fetch the crate sources // For example, extern mod whatever = "github.com/mozilla/rust" - ViewItemExternMod(Ident, Option<(@str, StrStyle)>, NodeId), + ViewItemExternMod(Ident, Option<(InternedString,StrStyle)>, NodeId), ViewItemUse(~[@ViewPath]), } diff --git a/src/libsyntax/ast_map.rs b/src/libsyntax/ast_map.rs index bb66d620d2910..89209ab2104f0 100644 --- a/src/libsyntax/ast_map.rs +++ b/src/libsyntax/ast_map.rs @@ -62,9 +62,10 @@ pub fn path_to_str_with_sep(p: &[PathElem], sep: &str, itr: @IdentInterner) pub fn path_ident_to_str(p: &Path, i: Ident, itr: @IdentInterner) -> ~str { if p.is_empty() { - itr.get(i.name).to_owned() + itr.get(i.name).into_owned() } else { - format!("{}::{}", path_to_str(*p, itr), itr.get(i.name)) + let string = itr.get(i.name); + format!("{}::{}", path_to_str(*p, itr), string.as_slice()) } } @@ -75,7 +76,7 @@ pub fn path_to_str(p: &[PathElem], itr: @IdentInterner) -> ~str { pub fn path_elem_to_str(pe: PathElem, itr: @IdentInterner) -> ~str { match pe { PathMod(s) | PathName(s) | PathPrettyName(s, _) => { - itr.get(s.name).to_owned() + itr.get(s.name).into_owned() } } } @@ -105,7 +106,11 @@ fn pretty_ty(ty: &Ty, itr: @IdentInterner, out: &mut ~str) { // need custom handling. TyNil => { out.push_str("$NIL$"); return } TyPath(ref path, _, _) => { - out.push_str(itr.get(path.segments.last().unwrap().identifier.name)); + out.push_str(itr.get(path.segments + .last() + .unwrap() + .identifier + .name).as_slice()); return } TyTup(ref tys) => { @@ -138,7 +143,8 @@ pub fn impl_pretty_name(trait_ref: &Option, ty: &Ty) -> PathElem { match *trait_ref { None => pretty = ~"", Some(ref trait_ref) => { - pretty = itr.get(trait_ref.path.segments.last().unwrap().identifier.name).to_owned(); + pretty = itr.get(trait_ref.path.segments.last().unwrap().identifier.name) + .into_owned(); pretty.push_char('$'); } }; @@ -489,17 +495,21 @@ pub fn node_id_to_str(map: Map, id: NodeId, itr: @IdentInterner) -> ~str { path_ident_to_str(path, item.ident, itr), abi, id) } Some(NodeMethod(m, _, path)) => { + let name = itr.get(m.ident.name); format!("method {} in {} (id={})", - itr.get(m.ident.name), path_to_str(*path, itr), id) + name.as_slice(), path_to_str(*path, itr), id) } Some(NodeTraitMethod(ref tm, _, path)) => { let m = ast_util::trait_method_to_ty_method(&**tm); + let name = itr.get(m.ident.name); format!("method {} in {} (id={})", - itr.get(m.ident.name), path_to_str(*path, itr), id) + name.as_slice(), path_to_str(*path, itr), id) } Some(NodeVariant(ref variant, _, path)) => { + let name = itr.get(variant.node.name.name); format!("variant {} in {} (id={})", - itr.get(variant.node.name.name), path_to_str(*path, itr), id) + name.as_slice(), + path_to_str(*path, itr), id) } Some(NodeExpr(expr)) => { format!("expr {} (id={})", pprust::expr_to_str(expr, itr), id) diff --git a/src/libsyntax/ast_util.rs b/src/libsyntax/ast_util.rs index 405de5c5542d0..afedb62105ba7 100644 --- a/src/libsyntax/ast_util.rs +++ b/src/libsyntax/ast_util.rs @@ -25,7 +25,10 @@ use std::num; pub fn path_name_i(idents: &[Ident]) -> ~str { // FIXME: Bad copies (#2543 -- same for everything else that says "bad") - idents.map(|i| token::interner_get(i.name)).connect("::") + idents.map(|i| { + let string = token::get_ident(i.name); + string.get().to_str() + }).connect("::") } // totally scary function: ignores all but the last element, should have diff --git a/src/libsyntax/attr.rs b/src/libsyntax/attr.rs index c44861bd7d7f7..78e9d3bd46f8c 100644 --- a/src/libsyntax/attr.rs +++ b/src/libsyntax/attr.rs @@ -16,24 +16,26 @@ use codemap::{Span, Spanned, spanned, dummy_spanned}; use codemap::BytePos; use diagnostic::SpanHandler; use parse::comments::{doc_comment_style, strip_doc_comment_decoration}; +use parse::token::InternedString; +use parse::token; use crateid::CrateId; use std::hashmap::HashSet; pub trait AttrMetaMethods { - // This could be changed to `fn check_name(&self, name: @str) -> + // This could be changed to `fn check_name(&self, name: InternedString) -> // bool` which would facilitate a side table recording which // attributes/meta items are used/unused. /// Retrieve the name of the meta item, e.g. foo in #[foo], /// #[foo="bar"] and #[foo(bar)] - fn name(&self) -> @str; + fn name(&self) -> InternedString; /** * Gets the string value if self is a MetaNameValue variant * containing a string, otherwise None. */ - fn value_str(&self) -> Option<@str>; + fn value_str(&self) -> Option; /// Gets a list of inner meta items from a list MetaItem type. fn meta_item_list<'a>(&'a self) -> Option<&'a [@MetaItem]>; @@ -41,32 +43,36 @@ pub trait AttrMetaMethods { * If the meta item is a name-value type with a string value then returns * a tuple containing the name and string value, otherwise `None` */ - fn name_str_pair(&self) -> Option<(@str, @str)>; + fn name_str_pair(&self) -> Option<(InternedString,InternedString)>; } impl AttrMetaMethods for Attribute { - fn name(&self) -> @str { self.meta().name() } - fn value_str(&self) -> Option<@str> { self.meta().value_str() } + fn name(&self) -> InternedString { self.meta().name() } + fn value_str(&self) -> Option { + self.meta().value_str() + } fn meta_item_list<'a>(&'a self) -> Option<&'a [@MetaItem]> { self.node.value.meta_item_list() } - fn name_str_pair(&self) -> Option<(@str, @str)> { self.meta().name_str_pair() } + fn name_str_pair(&self) -> Option<(InternedString,InternedString)> { + self.meta().name_str_pair() + } } impl AttrMetaMethods for MetaItem { - fn name(&self) -> @str { + fn name(&self) -> InternedString { match self.node { - MetaWord(n) => n, - MetaNameValue(n, _) => n, - MetaList(n, _) => n + MetaWord(ref n) => (*n).clone(), + MetaNameValue(ref n, _) => (*n).clone(), + MetaList(ref n, _) => (*n).clone(), } } - fn value_str(&self) -> Option<@str> { + fn value_str(&self) -> Option { match self.node { MetaNameValue(_, ref v) => { match v.node { - ast::LitStr(s, _) => Some(s), + ast::LitStr(ref s, _) => Some((*s).clone()), _ => None, } }, @@ -81,19 +87,21 @@ impl AttrMetaMethods for MetaItem { } } - fn name_str_pair(&self) -> Option<(@str, @str)> { + fn name_str_pair(&self) -> Option<(InternedString,InternedString)> { self.value_str().map(|s| (self.name(), s)) } } // Annoying, but required to get test_cfg to work impl AttrMetaMethods for @MetaItem { - fn name(&self) -> @str { (**self).name() } - fn value_str(&self) -> Option<@str> { (**self).value_str() } + fn name(&self) -> InternedString { (**self).name() } + fn value_str(&self) -> Option { (**self).value_str() } fn meta_item_list<'a>(&'a self) -> Option<&'a [@MetaItem]> { (**self).meta_item_list() } - fn name_str_pair(&self) -> Option<(@str, @str)> { (**self).name_str_pair() } + fn name_str_pair(&self) -> Option<(InternedString,InternedString)> { + (**self).name_str_pair() + } } @@ -114,8 +122,10 @@ impl AttributeMethods for Attribute { fn desugar_doc(&self) -> Attribute { if self.node.is_sugared_doc { let comment = self.value_str().unwrap(); - let meta = mk_name_value_item_str(@"doc", - strip_doc_comment_decoration(comment).to_managed()); + let meta = mk_name_value_item_str( + InternedString::new("doc"), + token::intern_and_get_ident(strip_doc_comment_decoration( + comment.get()))); mk_attr(meta) } else { *self @@ -125,20 +135,22 @@ impl AttributeMethods for Attribute { /* Constructors */ -pub fn mk_name_value_item_str(name: @str, value: @str) -> @MetaItem { +pub fn mk_name_value_item_str(name: InternedString, value: InternedString) + -> @MetaItem { let value_lit = dummy_spanned(ast::LitStr(value, ast::CookedStr)); mk_name_value_item(name, value_lit) } -pub fn mk_name_value_item(name: @str, value: ast::Lit) -> @MetaItem { +pub fn mk_name_value_item(name: InternedString, value: ast::Lit) + -> @MetaItem { @dummy_spanned(MetaNameValue(name, value)) } -pub fn mk_list_item(name: @str, items: ~[@MetaItem]) -> @MetaItem { +pub fn mk_list_item(name: InternedString, items: ~[@MetaItem]) -> @MetaItem { @dummy_spanned(MetaList(name, items)) } -pub fn mk_word_item(name: @str) -> @MetaItem { +pub fn mk_word_item(name: InternedString) -> @MetaItem { @dummy_spanned(MetaWord(name)) } @@ -150,12 +162,14 @@ pub fn mk_attr(item: @MetaItem) -> Attribute { }) } -pub fn mk_sugared_doc_attr(text: @str, lo: BytePos, hi: BytePos) -> Attribute { - let style = doc_comment_style(text); +pub fn mk_sugared_doc_attr(text: InternedString, lo: BytePos, hi: BytePos) + -> Attribute { + let style = doc_comment_style(text.get()); let lit = spanned(lo, hi, ast::LitStr(text, ast::CookedStr)); let attr = Attribute_ { style: style, - value: @spanned(lo, hi, MetaNameValue(@"doc", lit)), + value: @spanned(lo, hi, MetaNameValue(InternedString::new("doc"), + lit)), is_sugared_doc: true }; spanned(lo, hi, attr) @@ -178,20 +192,22 @@ pub fn contains_name(metas: &[AM], name: &str) -> bool { debug!("attr::contains_name (name={})", name); metas.iter().any(|item| { debug!(" testing: {}", item.name()); - name == item.name() + item.name().equiv(&name) }) } pub fn first_attr_value_str_by_name(attrs: &[Attribute], name: &str) - -> Option<@str> { + -> Option { attrs.iter() - .find(|at| name == at.name()) + .find(|at| at.name().equiv(&name)) .and_then(|at| at.value_str()) } pub fn last_meta_item_value_str_by_name(items: &[@MetaItem], name: &str) - -> Option<@str> { - items.rev_iter().find(|mi| name == mi.name()).and_then(|i| i.value_str()) + -> Option { + items.rev_iter() + .find(|mi| mi.name().equiv(&name)) + .and_then(|i| i.value_str()) } /* Higher-level applications */ @@ -201,16 +217,16 @@ pub fn sort_meta_items(items: &[@MetaItem]) -> ~[@MetaItem] { // human-readable strings. let mut v = items.iter() .map(|&mi| (mi.name(), mi)) - .collect::<~[(@str, @MetaItem)]>(); + .collect::<~[(InternedString, @MetaItem)]>(); - v.sort_by(|&(a, _), &(b, _)| a.cmp(&b)); + v.sort_by(|&(ref a, _), &(ref b, _)| a.cmp(b)); // There doesn't seem to be a more optimal way to do this v.move_iter().map(|(_, m)| { match m.node { - MetaList(n, ref mis) => { + MetaList(ref n, ref mis) => { @Spanned { - node: MetaList(n, sort_meta_items(*mis)), + node: MetaList((*n).clone(), sort_meta_items(*mis)), .. /*bad*/ (*m).clone() } } @@ -225,7 +241,7 @@ pub fn sort_meta_items(items: &[@MetaItem]) -> ~[@MetaItem] { */ pub fn find_linkage_metas(attrs: &[Attribute]) -> ~[@MetaItem] { let mut result = ~[]; - for attr in attrs.iter().filter(|at| "link" == at.name()) { + for attr in attrs.iter().filter(|at| at.name().equiv(&("link"))) { match attr.meta().node { MetaList(_, ref items) => result.push_all(*items), _ => () @@ -237,7 +253,7 @@ pub fn find_linkage_metas(attrs: &[Attribute]) -> ~[@MetaItem] { pub fn find_crateid(attrs: &[Attribute]) -> Option { match first_attr_value_str_by_name(attrs, "crate_id") { None => None, - Some(id) => from_str::(id), + Some(id) => from_str::(id.get()), } } @@ -254,8 +270,8 @@ pub fn find_inline_attr(attrs: &[Attribute]) -> InlineAttr { // FIXME (#2809)---validate the usage of #[inline] and #[inline] attrs.iter().fold(InlineNone, |ia,attr| { match attr.node.value.node { - MetaWord(n) if "inline" == n => InlineHint, - MetaList(n, ref items) if "inline" == n => { + MetaWord(ref n) if n.equiv(&("inline")) => InlineHint, + MetaList(ref n, ref items) if n.equiv(&("inline")) => { if contains_name(*items, "always") { InlineAlways } else if contains_name(*items, "never") { @@ -284,7 +300,7 @@ pub fn test_cfg> // this doesn't work. let some_cfg_matches = metas.any(|mi| { debug!("testing name: {}", mi.name()); - if "cfg" == mi.name() { // it is a #[cfg()] attribute + if mi.name().equiv(&("cfg")) { // it is a #[cfg()] attribute debug!("is cfg"); no_cfgs = false; // only #[cfg(...)] ones are understood. @@ -294,7 +310,8 @@ pub fn test_cfg> cfg_meta.iter().all(|cfg_mi| { debug!("cfg({}[...])", cfg_mi.name()); match cfg_mi.node { - ast::MetaList(s, ref not_cfgs) if "not" == s => { + ast::MetaList(ref s, ref not_cfgs) + if s.equiv(&("not")) => { debug!("not!"); // inside #[cfg(not(...))], so these need to all // not match. @@ -320,7 +337,7 @@ pub fn test_cfg> /// Represents the #[deprecated="foo"] (etc) attributes. pub struct Stability { level: StabilityLevel, - text: Option<@str> + text: Option } /// The available stability levels. @@ -335,9 +352,10 @@ pub enum StabilityLevel { } /// Find the first stability attribute. `None` if none exists. -pub fn find_stability>(mut metas: It) -> Option { +pub fn find_stability>(mut metas: It) + -> Option { for m in metas { - let level = match m.name().as_slice() { + let level = match m.name().get() { "deprecated" => Deprecated, "experimental" => Experimental, "unstable" => Unstable, @@ -360,7 +378,7 @@ pub fn require_unique_names(diagnostic: @SpanHandler, metas: &[@MetaItem]) { for meta in metas.iter() { let name = meta.name(); - if !set.insert(name) { + if !set.insert(name.clone()) { diagnostic.span_fatal(meta.span, format!("duplicate meta item `{}`", name)); } @@ -384,14 +402,14 @@ pub fn find_repr_attr(diagnostic: @SpanHandler, attr: @ast::MetaItem, acc: ReprA -> ReprAttr { let mut acc = acc; match attr.node { - ast::MetaList(s, ref items) if "repr" == s => { + ast::MetaList(ref s, ref items) if s.equiv(&("repr")) => { for item in items.iter() { match item.node { - ast::MetaWord(word) => { - let hint = match word.as_slice() { + ast::MetaWord(ref word) => { + let hint = match word.get() { // Can't use "extern" because it's not a lexical identifier. "C" => ReprExtern, - _ => match int_type_of_word(word) { + _ => match int_type_of_word(word.get()) { Some(ity) => ReprInt(item.span, ity), None => { // Not a word we recognize diff --git a/src/libsyntax/codemap.rs b/src/libsyntax/codemap.rs index d4a412bbe9ff2..2ada3ac16ea66 100644 --- a/src/libsyntax/codemap.rs +++ b/src/libsyntax/codemap.rs @@ -160,7 +160,7 @@ pub struct LocWithOpt { pub struct FileMapAndLine {fm: @FileMap, line: uint} pub struct FileMapAndBytePos {fm: @FileMap, pos: BytePos} -#[deriving(IterBytes)] +#[deriving(Clone, IterBytes)] pub enum MacroFormat { // e.g. #[deriving(...)] MacroAttribute, @@ -168,9 +168,9 @@ pub enum MacroFormat { MacroBang } -#[deriving(IterBytes)] +#[deriving(Clone, IterBytes)] pub struct NameAndSpan { - name: @str, + name: ~str, // the format with which the macro was invoked. format: MacroFormat, span: Option @@ -183,7 +183,7 @@ pub struct ExpnInfo { callee: NameAndSpan } -pub type FileName = @str; +pub type FileName = ~str; pub struct FileLines { @@ -206,7 +206,7 @@ pub struct FileMap { /// e.g. `` name: FileName, /// The complete source code - src: @str, + src: ~str, /// The start position of this source in the CodeMap start_pos: BytePos, /// Locations of lines beginnings in the source code @@ -267,7 +267,7 @@ impl CodeMap { } } - pub fn new_filemap(&self, filename: FileName, src: @str) -> @FileMap { + pub fn new_filemap(&self, filename: FileName, src: ~str) -> @FileMap { let mut files = self.files.borrow_mut(); let start_pos = match files.get().last() { None => 0, @@ -301,7 +301,7 @@ impl CodeMap { pub fn lookup_char_pos_adj(&self, pos: BytePos) -> LocWithOpt { let loc = self.lookup_char_pos(pos); LocWithOpt { - filename: loc.file.name, + filename: loc.file.name.to_str(), line: loc.line, col: loc.col, file: Some(loc.file) @@ -324,7 +324,7 @@ impl CodeMap { pub fn span_to_filename(&self, sp: Span) -> FileName { let lo = self.lookup_char_pos(sp.lo); - lo.file.name + lo.file.name.to_str() } pub fn span_to_lines(&self, sp: Span) -> @FileLines { @@ -468,7 +468,7 @@ mod test { #[test] fn t1 () { let cm = CodeMap::new(); - let fm = cm.new_filemap(@"blork.rs",@"first line.\nsecond line"); + let fm = cm.new_filemap(~"blork.rs",~"first line.\nsecond line"); fm.next_line(BytePos(0)); assert_eq!(&fm.get_line(0),&~"first line."); // TESTING BROKEN BEHAVIOR: @@ -480,7 +480,7 @@ mod test { #[should_fail] fn t2 () { let cm = CodeMap::new(); - let fm = cm.new_filemap(@"blork.rs",@"first line.\nsecond line"); + let fm = cm.new_filemap(~"blork.rs",~"first line.\nsecond line"); // TESTING *REALLY* BROKEN BEHAVIOR: fm.next_line(BytePos(0)); fm.next_line(BytePos(10)); diff --git a/src/libsyntax/ext/asm.rs b/src/libsyntax/ext/asm.rs index 021f0d29d9e23..1a3ebf3ce5d1c 100644 --- a/src/libsyntax/ext/asm.rs +++ b/src/libsyntax/ext/asm.rs @@ -17,6 +17,7 @@ use codemap::Span; use ext::base; use ext::base::*; use parse; +use parse::token::InternedString; use parse::token; enum State { @@ -43,7 +44,7 @@ pub fn expand_asm(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) cx.cfg(), tts.to_owned()); - let mut asm = @""; + let mut asm = InternedString::new(""); let mut asm_str_style = None; let mut outputs = ~[]; let mut inputs = ~[]; @@ -79,10 +80,10 @@ pub fn expand_asm(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) let (constraint, _str_style) = p.parse_str(); - if constraint.starts_with("+") { + if constraint.get().starts_with("+") { cx.span_unimpl(p.last_span, "'+' (read+write) output operand constraint modifier"); - } else if !constraint.starts_with("=") { + } else if !constraint.get().starts_with("=") { cx.span_err(p.last_span, "output operand constraint lacks '='"); } @@ -104,9 +105,9 @@ pub fn expand_asm(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) let (constraint, _str_style) = p.parse_str(); - if constraint.starts_with("=") { + if constraint.get().starts_with("=") { cx.span_err(p.last_span, "input operand constraint contains '='"); - } else if constraint.starts_with("+") { + } else if constraint.get().starts_with("+") { cx.span_err(p.last_span, "input operand constraint contains '+'"); } @@ -137,11 +138,11 @@ pub fn expand_asm(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) Options => { let (option, _str_style) = p.parse_str(); - if "volatile" == option { + if option.equiv(&("volatile")) { volatile = true; - } else if "alignstack" == option { + } else if option.equiv(&("alignstack")) { alignstack = true; - } else if "intel" == option { + } else if option.equiv(&("intel")) { dialect = ast::AsmIntel; } @@ -191,9 +192,9 @@ pub fn expand_asm(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) MRExpr(@ast::Expr { id: ast::DUMMY_NODE_ID, node: ast::ExprInlineAsm(ast::InlineAsm { - asm: asm, + asm: token::intern_and_get_ident(asm.get()), asm_str_style: asm_str_style.unwrap(), - clobbers: cons.to_managed(), + clobbers: token::intern_and_get_ident(cons), inputs: inputs, outputs: outputs, volatile: volatile, diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs index adf1eabf9d94d..08098b71ce450 100644 --- a/src/libsyntax/ext/base.rs +++ b/src/libsyntax/ext/base.rs @@ -16,7 +16,7 @@ use ext; use ext::expand; use parse; use parse::token; -use parse::token::{ident_to_str, intern, str_to_ident}; +use parse::token::{InternedString, intern, str_to_ident}; use util::small_vector::SmallVector; use std::hashmap::HashMap; @@ -31,7 +31,7 @@ use std::unstable::dynamic_lib::DynamicLibrary; // ast::MacInvocTT. pub struct MacroDef { - name: @str, + name: ~str, ext: SyntaxExtension } @@ -335,7 +335,8 @@ impl<'a> ExtCtxt<'a> { Some(@ExpnInfo { call_site: Span {lo: cs.lo, hi: cs.hi, expn_info: self.backtrace}, - callee: *callee}); + callee: (*callee).clone() + }); } } } @@ -396,9 +397,6 @@ impl<'a> ExtCtxt<'a> { pub fn set_trace_macros(&mut self, x: bool) { self.trace_mac = x } - pub fn str_of(&self, id: ast::Ident) -> @str { - ident_to_str(&id) - } pub fn ident_of(&self, st: &str) -> ast::Ident { str_to_ident(st) } @@ -407,11 +405,11 @@ impl<'a> ExtCtxt<'a> { /// Extract a string literal from `expr`, emitting `err_msg` if `expr` /// is not a string literal. This does not stop compilation on error, /// merely emits a non-fatal error and returns None. -pub fn expr_to_str(cx: &ExtCtxt, expr: @ast::Expr, - err_msg: &str) -> Option<(@str, ast::StrStyle)> { +pub fn expr_to_str(cx: &ExtCtxt, expr: @ast::Expr, err_msg: &str) + -> Option<(InternedString, ast::StrStyle)> { match expr.node { ast::ExprLit(l) => match l.node { - ast::LitStr(s, style) => return Some((s, style)), + ast::LitStr(ref s, style) => return Some(((*s).clone(), style)), _ => cx.span_err(l.span, err_msg) }, _ => cx.span_err(expr.span, err_msg) @@ -424,7 +422,9 @@ pub fn expr_to_str(cx: &ExtCtxt, expr: @ast::Expr, /// compilation should call /// `cx.parse_sess.span_diagnostic.abort_if_errors()` (this should be /// done as rarely as possible). -pub fn check_zero_tts(cx: &ExtCtxt, sp: Span, tts: &[ast::TokenTree], +pub fn check_zero_tts(cx: &ExtCtxt, + sp: Span, + tts: &[ast::TokenTree], name: &str) { if tts.len() != 0 { cx.span_err(sp, format!("{} takes no arguments", name)); @@ -437,13 +437,16 @@ pub fn get_single_str_from_tts(cx: &ExtCtxt, sp: Span, tts: &[ast::TokenTree], name: &str) - -> Option<@str> { + -> Option<~str> { if tts.len() != 1 { cx.span_err(sp, format!("{} takes 1 argument.", name)); } else { match tts[0] { ast::TTTok(_, token::LIT_STR(ident)) - | ast::TTTok(_, token::LIT_STR_RAW(ident, _)) => return Some(cx.str_of(ident)), + | ast::TTTok(_, token::LIT_STR_RAW(ident, _)) => { + let interned_str = token::get_ident(ident.name); + return Some(interned_str.get().to_str()) + } _ => cx.span_err(sp, format!("{} requires a string.", name)), } } diff --git a/src/libsyntax/ext/build.rs b/src/libsyntax/ext/build.rs index 9ad4f4f7fac2d..c5ee19484668e 100644 --- a/src/libsyntax/ext/build.rs +++ b/src/libsyntax/ext/build.rs @@ -19,6 +19,7 @@ use fold::Folder; use opt_vec; use opt_vec::OptVec; use parse::token::special_idents; +use parse::token; pub struct Field { ident: ast::Ident, @@ -134,13 +135,13 @@ pub trait AstBuilder { fn expr_vec(&self, sp: Span, exprs: ~[@ast::Expr]) -> @ast::Expr; fn expr_vec_uniq(&self, sp: Span, exprs: ~[@ast::Expr]) -> @ast::Expr; fn expr_vec_slice(&self, sp: Span, exprs: ~[@ast::Expr]) -> @ast::Expr; - fn expr_str(&self, sp: Span, s: @str) -> @ast::Expr; - fn expr_str_uniq(&self, sp: Span, s: @str) -> @ast::Expr; + fn expr_str(&self, sp: Span, s: InternedString) -> @ast::Expr; + fn expr_str_uniq(&self, sp: Span, s: InternedString) -> @ast::Expr; fn expr_some(&self, sp: Span, expr: @ast::Expr) -> @ast::Expr; fn expr_none(&self, sp: Span) -> @ast::Expr; - fn expr_fail(&self, span: Span, msg: @str) -> @ast::Expr; + fn expr_fail(&self, span: Span, msg: InternedString) -> @ast::Expr; fn expr_unreachable(&self, span: Span) -> @ast::Expr; fn pat(&self, span: Span, pat: ast::Pat_) -> @ast::Pat; @@ -228,9 +229,17 @@ pub trait AstBuilder { fn attribute(&self, sp: Span, mi: @ast::MetaItem) -> ast::Attribute; - fn meta_word(&self, sp: Span, w: @str) -> @ast::MetaItem; - fn meta_list(&self, sp: Span, name: @str, mis: ~[@ast::MetaItem]) -> @ast::MetaItem; - fn meta_name_value(&self, sp: Span, name: @str, value: ast::Lit_) -> @ast::MetaItem; + fn meta_word(&self, sp: Span, w: InternedString) -> @ast::MetaItem; + fn meta_list(&self, + sp: Span, + name: InternedString, + mis: ~[@ast::MetaItem]) + -> @ast::MetaItem; + fn meta_name_value(&self, + sp: Span, + name: InternedString, + value: ast::Lit_) + -> @ast::MetaItem; fn view_use(&self, sp: Span, vis: ast::Visibility, vp: ~[@ast::ViewPath]) -> ast::ViewItem; @@ -581,10 +590,10 @@ impl<'a> AstBuilder for ExtCtxt<'a> { fn expr_vec_slice(&self, sp: Span, exprs: ~[@ast::Expr]) -> @ast::Expr { self.expr_vstore(sp, self.expr_vec(sp, exprs), ast::ExprVstoreSlice) } - fn expr_str(&self, sp: Span, s: @str) -> @ast::Expr { + fn expr_str(&self, sp: Span, s: InternedString) -> @ast::Expr { self.expr_lit(sp, ast::LitStr(s, ast::CookedStr)) } - fn expr_str_uniq(&self, sp: Span, s: @str) -> @ast::Expr { + fn expr_str_uniq(&self, sp: Span, s: InternedString) -> @ast::Expr { self.expr_vstore(sp, self.expr_str(sp, s), ast::ExprVstoreUniq) } @@ -612,7 +621,7 @@ impl<'a> AstBuilder for ExtCtxt<'a> { self.expr_path(none) } - fn expr_fail(&self, span: Span, msg: @str) -> @ast::Expr { + fn expr_fail(&self, span: Span, msg: InternedString) -> @ast::Expr { let loc = self.codemap().lookup_char_pos(span.lo); self.expr_call_global( span, @@ -623,13 +632,16 @@ impl<'a> AstBuilder for ExtCtxt<'a> { ], ~[ self.expr_str(span, msg), - self.expr_str(span, loc.file.name), + self.expr_str(span, + token::intern_and_get_ident(loc.file.name)), self.expr_uint(span, loc.line), ]) } fn expr_unreachable(&self, span: Span) -> @ast::Expr { - self.expr_fail(span, @"internal error: entered unreachable code") + self.expr_fail(span, + InternedString::new( + "internal error: entered unreachable code")) } @@ -866,13 +878,21 @@ impl<'a> AstBuilder for ExtCtxt<'a> { }) } - fn meta_word(&self, sp: Span, w: @str) -> @ast::MetaItem { + fn meta_word(&self, sp: Span, w: InternedString) -> @ast::MetaItem { @respan(sp, ast::MetaWord(w)) } - fn meta_list(&self, sp: Span, name: @str, mis: ~[@ast::MetaItem]) -> @ast::MetaItem { + fn meta_list(&self, + sp: Span, + name: InternedString, + mis: ~[@ast::MetaItem]) + -> @ast::MetaItem { @respan(sp, ast::MetaList(name, mis)) } - fn meta_name_value(&self, sp: Span, name: @str, value: ast::Lit_) -> @ast::MetaItem { + fn meta_name_value(&self, + sp: Span, + name: InternedString, + value: ast::Lit_) + -> @ast::MetaItem { @respan(sp, ast::MetaNameValue(name, respan(sp, value))) } diff --git a/src/libsyntax/ext/bytes.rs b/src/libsyntax/ext/bytes.rs index 0c9a23be558c8..6852a0cec33ac 100644 --- a/src/libsyntax/ext/bytes.rs +++ b/src/libsyntax/ext/bytes.rs @@ -31,8 +31,8 @@ pub fn expand_syntax_ext(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) -> // expression is a literal ast::ExprLit(lit) => match lit.node { // string literal, push each byte to vector expression - ast::LitStr(s, _) => { - for byte in s.bytes() { + ast::LitStr(ref s, _) => { + for byte in s.get().bytes() { bytes.push(cx.expr_u8(expr.span, byte)); } } diff --git a/src/libsyntax/ext/cfg.rs b/src/libsyntax/ext/cfg.rs index 9af295c0b113c..295c456c9d0bc 100644 --- a/src/libsyntax/ext/cfg.rs +++ b/src/libsyntax/ext/cfg.rs @@ -21,9 +21,10 @@ use ext::base; use ext::build::AstBuilder; use attr; use attr::*; -use parse; -use parse::token; use parse::attr::ParserAttr; +use parse::token::InternedString; +use parse::token; +use parse; pub fn expand_cfg(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) -> base::MacResult { let mut p = parse::new_parser_from_tts(cx.parse_sess(), @@ -39,7 +40,7 @@ pub fn expand_cfg(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) -> base::M } // test_cfg searches for meta items looking like `cfg(foo, ...)` - let in_cfg = &[cx.meta_list(sp, @"cfg", cfgs)]; + let in_cfg = &[cx.meta_list(sp, InternedString::new("cfg"), cfgs)]; let matches_cfg = attr::test_cfg(cx.cfg(), in_cfg.iter().map(|&x| x)); let e = cx.expr_bool(sp, matches_cfg); diff --git a/src/libsyntax/ext/concat.rs b/src/libsyntax/ext/concat.rs index 2a68674af952c..c13f9bf92af02 100644 --- a/src/libsyntax/ext/concat.rs +++ b/src/libsyntax/ext/concat.rs @@ -14,6 +14,7 @@ use ast; use codemap; use ext::base; use ext::build::AstBuilder; +use parse::token; pub fn expand_syntax_ext(cx: &mut base::ExtCtxt, sp: codemap::Span, @@ -28,9 +29,10 @@ pub fn expand_syntax_ext(cx: &mut base::ExtCtxt, match e.node { ast::ExprLit(lit) => { match lit.node { - ast::LitStr(s, _) | ast::LitFloat(s, _) - | ast::LitFloatUnsuffixed(s) => { - accumulator.push_str(s); + ast::LitStr(ref s, _) | + ast::LitFloat(ref s, _) | + ast::LitFloatUnsuffixed(ref s) => { + accumulator.push_str(s.get()); } ast::LitChar(c) => { accumulator.push_char(char::from_u32(c).unwrap()); @@ -55,5 +57,5 @@ pub fn expand_syntax_ext(cx: &mut base::ExtCtxt, } } } - return base::MRExpr(cx.expr_str(sp, accumulator.to_managed())); + base::MRExpr(cx.expr_str(sp, token::intern_and_get_ident(accumulator))) } diff --git a/src/libsyntax/ext/concat_idents.rs b/src/libsyntax/ext/concat_idents.rs index 9dcb5b4cb4c2a..e0d53add6489f 100644 --- a/src/libsyntax/ext/concat_idents.rs +++ b/src/libsyntax/ext/concat_idents.rs @@ -30,7 +30,10 @@ pub fn expand_syntax_ext(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) } } else { match *e { - ast::TTTok(_, token::IDENT(ident,_)) => res_str.push_str(cx.str_of(ident)), + ast::TTTok(_, token::IDENT(ident,_)) => { + let interned_str = token::get_ident(ident.name); + res_str.push_str(interned_str.get()) + } _ => { cx.span_err(sp, "concat_idents! requires ident args."); return MacResult::dummy_expr(); diff --git a/src/libsyntax/ext/deriving/decodable.rs b/src/libsyntax/ext/deriving/decodable.rs index a9268d85c9154..019a4dfe7cca4 100644 --- a/src/libsyntax/ext/deriving/decodable.rs +++ b/src/libsyntax/ext/deriving/decodable.rs @@ -18,6 +18,8 @@ use codemap::Span; use ext::base::ExtCtxt; use ext::build::AstBuilder; use ext::deriving::generic::*; +use parse::token::InternedString; +use parse::token; pub fn expand_deriving_decodable(cx: &ExtCtxt, span: Span, @@ -82,10 +84,15 @@ fn decodable_substructure(cx: &ExtCtxt, trait_span: Span, cx.expr_uint(span, field), lambdadecode]) }); - cx.expr_method_call(trait_span, decoder, cx.ident_of("read_struct"), - ~[cx.expr_str(trait_span, cx.str_of(substr.type_ident)), - cx.expr_uint(trait_span, nfields), - cx.lambda_expr_1(trait_span, result, blkarg)]) + cx.expr_method_call(trait_span, + decoder, + cx.ident_of("read_struct"), + ~[ + cx.expr_str(trait_span, + token::get_ident(substr.type_ident.name)), + cx.expr_uint(trait_span, nfields), + cx.lambda_expr_1(trait_span, result, blkarg) + ]) } StaticEnum(_, ref fields) => { let variant = cx.ident_of("i"); @@ -95,7 +102,8 @@ fn decodable_substructure(cx: &ExtCtxt, trait_span: Span, let rvariant_arg = cx.ident_of("read_enum_variant_arg"); for (i, &(name, v_span, ref parts)) in fields.iter().enumerate() { - variants.push(cx.expr_str(v_span, cx.str_of(name))); + variants.push(cx.expr_str(v_span, + token::get_ident(name.name))); let decoded = decode_static_fields(cx, v_span, @@ -120,9 +128,14 @@ fn decodable_substructure(cx: &ExtCtxt, trait_span: Span, let result = cx.expr_method_call(trait_span, blkdecoder, cx.ident_of("read_enum_variant"), ~[variant_vec, lambda]); - cx.expr_method_call(trait_span, decoder, cx.ident_of("read_enum"), - ~[cx.expr_str(trait_span, cx.str_of(substr.type_ident)), - cx.lambda_expr_1(trait_span, result, blkarg)]) + cx.expr_method_call(trait_span, + decoder, + cx.ident_of("read_enum"), + ~[ + cx.expr_str(trait_span, + token::get_ident(substr.type_ident.name)), + cx.lambda_expr_1(trait_span, result, blkarg) + ]) } _ => cx.bug("expected StaticEnum or StaticStruct in deriving(Decodable)") }; @@ -135,7 +148,7 @@ fn decode_static_fields(cx: &ExtCtxt, trait_span: Span, outer_pat_ident: Ident, fields: &StaticFields, - getarg: |Span, @str, uint| -> @Expr) + getarg: |Span, InternedString, uint| -> @Expr) -> @Expr { match *fields { Unnamed(ref fields) => { @@ -143,7 +156,10 @@ fn decode_static_fields(cx: &ExtCtxt, cx.expr_ident(trait_span, outer_pat_ident) } else { let fields = fields.iter().enumerate().map(|(i, &span)| { - getarg(span, format!("_field{}", i).to_managed(), i) + getarg(span, + token::intern_and_get_ident(format!("_field{}", + i)), + i) }).collect(); cx.expr_call_ident(trait_span, outer_pat_ident, fields) @@ -152,7 +168,9 @@ fn decode_static_fields(cx: &ExtCtxt, Named(ref fields) => { // use the field's span to get nicer error messages. let fields = fields.iter().enumerate().map(|(i, &(name, span))| { - cx.field_imm(span, name, getarg(span, cx.str_of(name), i)) + cx.field_imm(span, + name, + getarg(span, token::get_ident(name.name), i)) }).collect(); cx.expr_struct_ident(trait_span, outer_pat_ident, fields) } diff --git a/src/libsyntax/ext/deriving/encodable.rs b/src/libsyntax/ext/deriving/encodable.rs index 9a8861f2e70e2..c50c9f18389c2 100644 --- a/src/libsyntax/ext/deriving/encodable.rs +++ b/src/libsyntax/ext/deriving/encodable.rs @@ -80,6 +80,7 @@ use codemap::Span; use ext::base::ExtCtxt; use ext::build::AstBuilder; use ext::deriving::generic::*; +use parse::token; pub fn expand_deriving_encodable(cx: &ExtCtxt, span: Span, @@ -125,10 +126,17 @@ fn encodable_substructure(cx: &ExtCtxt, trait_span: Span, Struct(ref fields) => { let emit_struct_field = cx.ident_of("emit_struct_field"); let mut stmts = ~[]; - for (i, &FieldInfo { name, self_, span, .. }) in fields.iter().enumerate() { + for (i, &FieldInfo { + name, + self_, + span, + .. + }) in fields.iter().enumerate() { let name = match name { - Some(id) => cx.str_of(id), - None => format!("_field{}", i).to_managed() + Some(id) => token::get_ident(id.name), + None => { + token::intern_and_get_ident(format!("_field{}", i)) + } }; let enc = cx.expr_method_call(span, self_, encode, ~[blkencoder]); let lambda = cx.lambda_expr_1(span, enc, blkarg); @@ -141,10 +149,15 @@ fn encodable_substructure(cx: &ExtCtxt, trait_span: Span, } let blk = cx.lambda_stmts_1(trait_span, stmts, blkarg); - cx.expr_method_call(trait_span, encoder, cx.ident_of("emit_struct"), - ~[cx.expr_str(trait_span, cx.str_of(substr.type_ident)), - cx.expr_uint(trait_span, fields.len()), - blk]) + cx.expr_method_call(trait_span, + encoder, + cx.ident_of("emit_struct"), + ~[ + cx.expr_str(trait_span, + token::get_ident(substr.type_ident.name)), + cx.expr_uint(trait_span, fields.len()), + blk + ]) } EnumMatching(idx, variant, ref fields) => { @@ -167,7 +180,8 @@ fn encodable_substructure(cx: &ExtCtxt, trait_span: Span, } let blk = cx.lambda_stmts_1(trait_span, stmts, blkarg); - let name = cx.expr_str(trait_span, cx.str_of(variant.node.name)); + let name = cx.expr_str(trait_span, + token::get_ident(variant.node.name.name)); let call = cx.expr_method_call(trait_span, blkencoder, cx.ident_of("emit_enum_variant"), ~[name, @@ -175,11 +189,14 @@ fn encodable_substructure(cx: &ExtCtxt, trait_span: Span, cx.expr_uint(trait_span, fields.len()), blk]); let blk = cx.lambda_expr_1(trait_span, call, blkarg); - let ret = cx.expr_method_call(trait_span, encoder, + let ret = cx.expr_method_call(trait_span, + encoder, cx.ident_of("emit_enum"), - ~[cx.expr_str(trait_span, - cx.str_of(substr.type_ident)), - blk]); + ~[ + cx.expr_str(trait_span, + token::get_ident(substr.type_ident.name)), + blk + ]); cx.expr_block(cx.block(trait_span, ~[me], Some(ret))) } diff --git a/src/libsyntax/ext/deriving/generic.rs b/src/libsyntax/ext/deriving/generic.rs index 6449d0aab5e54..8eaff592765b2 100644 --- a/src/libsyntax/ext/deriving/generic.rs +++ b/src/libsyntax/ext/deriving/generic.rs @@ -184,6 +184,8 @@ use ext::build::AstBuilder; use codemap; use codemap::Span; use opt_vec; +use parse::token::InternedString; +use parse::token; use std::vec; @@ -396,8 +398,10 @@ impl<'a> TraitDef<'a> { let doc_attr = cx.attribute( self.span, cx.meta_name_value(self.span, - @"doc", - ast::LitStr(@"Automatically derived.", ast::CookedStr))); + InternedString::new("doc"), + ast::LitStr(token::intern_and_get_ident( + "Automatically derived."), + ast::CookedStr))); cx.item( self.span, ::parse::token::special_idents::clownshoes_extensions, @@ -567,7 +571,14 @@ impl<'a> MethodDef<'a> { let body_block = trait_.cx.block_expr(body); let attrs = if self.inline { - ~[trait_.cx.attribute(trait_.span, trait_.cx.meta_word(trait_.span, @"inline"))] + ~[ + trait_.cx + .attribute(trait_.span, + trait_.cx + .meta_word(trait_.span, + InternedString::new( + "inline"))) + ] } else { ~[] }; @@ -933,7 +944,7 @@ impl<'a> TraitDef<'a> { to_set.expn_info = Some(@codemap::ExpnInfo { call_site: to_set, callee: codemap::NameAndSpan { - name: format!("deriving({})", trait_name).to_managed(), + name: format!("deriving({})", trait_name), format: codemap::MacroAttribute, span: Some(self.span) } diff --git a/src/libsyntax/ext/deriving/mod.rs b/src/libsyntax/ext/deriving/mod.rs index 652f5ebe6c70c..9c487146639bb 100644 --- a/src/libsyntax/ext/deriving/mod.rs +++ b/src/libsyntax/ext/deriving/mod.rs @@ -75,12 +75,12 @@ pub fn expand_meta_deriving(cx: &ExtCtxt, MetaList(_, ref titems) => { titems.rev_iter().fold(in_items, |in_items, &titem| { match titem.node { - MetaNameValue(tname, _) | - MetaList(tname, _) | - MetaWord(tname) => { + MetaNameValue(ref tname, _) | + MetaList(ref tname, _) | + MetaWord(ref tname) => { macro_rules! expand(($func:path) => ($func(cx, titem.span, titem, in_items))); - match tname.as_slice() { + match tname.get() { "Clone" => expand!(clone::expand_deriving_clone), "DeepClone" => expand!(clone::expand_deriving_deep_clone), diff --git a/src/libsyntax/ext/deriving/primitive.rs b/src/libsyntax/ext/deriving/primitive.rs index a4e606f53c0c2..e2f72e8708551 100644 --- a/src/libsyntax/ext/deriving/primitive.rs +++ b/src/libsyntax/ext/deriving/primitive.rs @@ -14,6 +14,7 @@ use codemap::Span; use ext::base::ExtCtxt; use ext::build::AstBuilder; use ext::deriving::generic::*; +use parse::token::InternedString; pub fn expand_deriving_from_primitive(cx: &ExtCtxt, span: Span, @@ -73,13 +74,13 @@ fn cs_from(name: &str, cx: &ExtCtxt, trait_span: Span, substr: &Substructure) -> match *substr.fields { StaticStruct(..) => { cx.span_err(trait_span, "`FromPrimitive` cannot be derived for structs"); - return cx.expr_fail(trait_span, @""); + return cx.expr_fail(trait_span, InternedString::new("")); } StaticEnum(enum_def, _) => { if enum_def.variants.is_empty() { cx.span_err(trait_span, "`FromPrimitive` cannot be derived for enums with no variants"); - return cx.expr_fail(trait_span, @""); + return cx.expr_fail(trait_span, InternedString::new("")); } let mut arms = ~[]; @@ -91,7 +92,8 @@ fn cs_from(name: &str, cx: &ExtCtxt, trait_span: Span, substr: &Substructure) -> cx.span_err(trait_span, "`FromPrimitive` cannot be derived for \ enum variants with arguments"); - return cx.expr_fail(trait_span, @""); + return cx.expr_fail(trait_span, + InternedString::new("")); } let span = variant.span; @@ -117,7 +119,8 @@ fn cs_from(name: &str, cx: &ExtCtxt, trait_span: Span, substr: &Substructure) -> cx.span_err(trait_span, "`FromPrimitive` cannot be derived for enums \ with struct variants"); - return cx.expr_fail(trait_span, @""); + return cx.expr_fail(trait_span, + InternedString::new("")); } } } diff --git a/src/libsyntax/ext/deriving/to_str.rs b/src/libsyntax/ext/deriving/to_str.rs index 81453a5a10b05..6101d647ca5dd 100644 --- a/src/libsyntax/ext/deriving/to_str.rs +++ b/src/libsyntax/ext/deriving/to_str.rs @@ -14,6 +14,8 @@ use codemap::Span; use ext::base::ExtCtxt; use ext::build::AstBuilder; use ext::deriving::generic::*; +use parse::token::InternedString; +use parse::token; pub fn expand_deriving_to_str(cx: &ExtCtxt, span: Span, @@ -47,18 +49,22 @@ pub fn expand_deriving_to_str(cx: &ExtCtxt, // doesn't invoke the to_str() method on each field. Hence we mirror // the logic of the repr_to_str() method, but with tweaks to call to_str() // on sub-fields. -fn to_str_substructure(cx: &ExtCtxt, span: Span, - substr: &Substructure) -> @Expr { +fn to_str_substructure(cx: &ExtCtxt, span: Span, substr: &Substructure) + -> @Expr { let to_str = cx.ident_of("to_str"); - let doit = |start: &str, end: @str, name: ast::Ident, + let doit = |start: &str, + end: InternedString, + name: ast::Ident, fields: &[FieldInfo]| { if fields.len() == 0 { - cx.expr_str_uniq(span, cx.str_of(name)) + cx.expr_str_uniq(span, token::get_ident(name.name)) } else { let buf = cx.ident_of("buf"); - let start = cx.str_of(name) + start; - let init = cx.expr_str_uniq(span, start.to_managed()); + let interned_str = token::get_ident(name.name); + let start = + token::intern_and_get_ident(interned_str.get() + start); + let init = cx.expr_str_uniq(span, start); let mut stmts = ~[cx.stmt_let(span, true, buf, init)]; let push_str = cx.ident_of("push_str"); @@ -70,38 +76,53 @@ fn to_str_substructure(cx: &ExtCtxt, span: Span, for (i, &FieldInfo {name, span, self_, .. }) in fields.iter().enumerate() { if i > 0 { - push(cx.expr_str(span, @", ")); + push(cx.expr_str(span, InternedString::new(", "))); } match name { None => {} Some(id) => { - let name = cx.str_of(id) + ": "; - push(cx.expr_str(span, name.to_managed())); + let interned_id = token::get_ident(id.name); + let name = interned_id.get() + ": "; + push(cx.expr_str(span, + token::intern_and_get_ident(name))); } } push(cx.expr_method_call(span, self_, to_str, ~[])); } push(cx.expr_str(span, end)); - cx.expr_block(cx.block(span, stmts, Some(cx.expr_ident(span, buf)))) + cx.expr_block(cx.block(span, stmts, Some(cx.expr_ident(span, + buf)))) } }; return match *substr.fields { Struct(ref fields) => { if fields.len() == 0 || fields[0].name.is_none() { - doit("(", @")", substr.type_ident, *fields) + doit("(", + InternedString::new(")"), + substr.type_ident, + *fields) } else { - doit("{", @"}", substr.type_ident, *fields) + doit("{", + InternedString::new("}"), + substr.type_ident, + *fields) } } EnumMatching(_, variant, ref fields) => { match variant.node.kind { ast::TupleVariantKind(..) => - doit("(", @")", variant.node.name, *fields), + doit("(", + InternedString::new(")"), + variant.node.name, + *fields), ast::StructVariantKind(..) => - doit("{", @"}", variant.node.name, *fields), + doit("{", + InternedString::new("}"), + variant.node.name, + *fields), } } diff --git a/src/libsyntax/ext/env.rs b/src/libsyntax/ext/env.rs index a9b40ea7ec638..c23a1ce1e28d5 100644 --- a/src/libsyntax/ext/env.rs +++ b/src/libsyntax/ext/env.rs @@ -19,6 +19,7 @@ use codemap::Span; use ext::base::*; use ext::base; use ext::build::AstBuilder; +use parse::token; use std::os; @@ -52,7 +53,11 @@ pub fn expand_env(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) Some((v, _style)) => v }; let msg = match exprs.len() { - 1 => format!("environment variable `{}` not defined", var).to_managed(), + 1 => { + token::intern_and_get_ident(format!("environment variable `{}` \ + not defined", + var)) + } 2 => { match expr_to_str(cx, exprs[1], "expected string literal") { None => return MacResult::dummy_expr(), @@ -65,12 +70,12 @@ pub fn expand_env(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) } }; - let e = match os::getenv(var) { + let e = match os::getenv(var.get()) { None => { - cx.span_err(sp, msg); + cx.span_err(sp, msg.get()); cx.expr_uint(sp, 0) } - Some(s) => cx.expr_str(sp, s.to_managed()) + Some(s) => cx.expr_str(sp, token::intern_and_get_ident(s)) }; MRExpr(e) } diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs index 1ffff03a80f4a..d8d98b2779316 100644 --- a/src/libsyntax/ext/expand.rs +++ b/src/libsyntax/ext/expand.rs @@ -22,8 +22,8 @@ use codemap::{Span, Spanned, ExpnInfo, NameAndSpan, MacroBang, MacroAttribute}; use ext::base::*; use fold::*; use parse; +use parse::token::{fresh_mark, fresh_name, intern}; use parse::token; -use parse::token::{fresh_mark, fresh_name, ident_to_str, intern}; use visit; use visit::Visitor; use util::small_vector::SmallVector; @@ -54,13 +54,14 @@ pub fn expand_expr(e: @ast::Expr, fld: &mut MacroExpander) -> @ast::Expr { return e; } let extname = &pth.segments[0].identifier; - let extnamestr = ident_to_str(extname); + let extnamestr = token::get_ident(extname.name); // leaving explicit deref here to highlight unbox op: let marked_after = match fld.extsbox.find(&extname.name) { None => { fld.cx.span_err( pth.span, - format!("macro undefined: '{}'", extnamestr)); + format!("macro undefined: '{}'", + extnamestr.get())); // let compilation continue return e; @@ -69,7 +70,7 @@ pub fn expand_expr(e: @ast::Expr, fld: &mut MacroExpander) -> @ast::Expr { fld.cx.bt_push(ExpnInfo { call_site: e.span, callee: NameAndSpan { - name: extnamestr, + name: extnamestr.get().to_str(), format: MacroBang, span: exp_span, }, @@ -94,7 +95,7 @@ pub fn expand_expr(e: @ast::Expr, fld: &mut MacroExpander) -> @ast::Expr { pth.span, format!( "non-expr macro in expr pos: {}", - extnamestr + extnamestr.get() ) ); return e; @@ -107,7 +108,8 @@ pub fn expand_expr(e: @ast::Expr, fld: &mut MacroExpander) -> @ast::Expr { _ => { fld.cx.span_err( pth.span, - format!("'{}' is not a tt-style macro", extnamestr) + format!("'{}' is not a tt-style macro", + extnamestr.get()) ); return e; } @@ -221,12 +223,12 @@ pub fn expand_mod_items(module_: &ast::Mod, fld: &mut MacroExpander) -> ast::Mod item.attrs.rev_iter().fold(~[*item], |items, attr| { let mname = attr.name(); - match fld.extsbox.find(&intern(mname)) { + match fld.extsbox.find(&intern(mname.get())) { Some(&ItemDecorator(dec_fn)) => { fld.cx.bt_push(ExpnInfo { call_site: attr.span, callee: NameAndSpan { - name: mname, + name: mname.get().to_str(), format: MacroAttribute, span: None } @@ -295,28 +297,31 @@ pub fn expand_item_mac(it: @ast::Item, fld: &mut MacroExpander) }; let extname = &pth.segments[0].identifier; - let extnamestr = ident_to_str(extname); + let extnamestr = token::get_ident(extname.name); let fm = fresh_mark(); let expanded = match fld.extsbox.find(&extname.name) { None => { fld.cx.span_err(pth.span, - format!("macro undefined: '{}!'", extnamestr)); + format!("macro undefined: '{}!'", + extnamestr.get())); // let compilation continue return SmallVector::zero(); } Some(&NormalTT(ref expander, span)) => { if it.ident.name != parse::token::special_idents::invalid.name { + let string = token::get_ident(it.ident.name); fld.cx.span_err(pth.span, format!("macro {}! expects no ident argument, \ - given '{}'", extnamestr, - ident_to_str(&it.ident))); + given '{}'", + extnamestr.get(), + string.get())); return SmallVector::zero(); } fld.cx.bt_push(ExpnInfo { call_site: it.span, callee: NameAndSpan { - name: extnamestr, + name: extnamestr.get().to_str(), format: MacroBang, span: span } @@ -328,13 +333,14 @@ pub fn expand_item_mac(it: @ast::Item, fld: &mut MacroExpander) Some(&IdentTT(ref expander, span)) => { if it.ident.name == parse::token::special_idents::invalid.name { fld.cx.span_err(pth.span, - format!("macro {}! expects an ident argument", extnamestr)); + format!("macro {}! expects an ident argument", + extnamestr.get())); return SmallVector::zero(); } fld.cx.bt_push(ExpnInfo { call_site: it.span, callee: NameAndSpan { - name: extnamestr, + name: extnamestr.get().to_str(), format: MacroBang, span: span } @@ -344,7 +350,9 @@ pub fn expand_item_mac(it: @ast::Item, fld: &mut MacroExpander) expander.expand(fld.cx, it.span, it.ident, marked_tts) } _ => { - fld.cx.span_err(it.span, format!("{}! is not legal in item position", extnamestr)); + fld.cx.span_err(it.span, + format!("{}! is not legal in item position", + extnamestr.get())); return SmallVector::zero(); } }; @@ -356,7 +364,9 @@ pub fn expand_item_mac(it: @ast::Item, fld: &mut MacroExpander) .collect() } MRExpr(_) => { - fld.cx.span_err(pth.span, format!("expr macro in item position: {}", extnamestr)); + fld.cx.span_err(pth.span, + format!("expr macro in item position: {}", + extnamestr.get())); return SmallVector::zero(); } MRAny(any_macro) => { @@ -385,7 +395,7 @@ pub fn expand_view_item(vi: &ast::ViewItem, fld: &mut MacroExpander) -> ast::ViewItem { let should_load = vi.attrs.iter().any(|attr| { - "phase" == attr.name() && + attr.name().get() == "phase" && attr.meta_item_list().map_or(false, |phases| { attr::contains_name(phases, "syntax") }) @@ -402,15 +412,18 @@ fn load_extern_macros(crate: &ast::ViewItem, fld: &mut MacroExpander) { let MacroCrate { lib, cnum } = fld.cx.loader.load_crate(crate); let crate_name = match crate.node { - ast::ViewItemExternMod(ref name, _, _) => token::ident_to_str(name), + ast::ViewItemExternMod(ref name, _, _) => { + let string = token::get_ident(name.name); + string.get().to_str() + }, _ => unreachable!(), }; - let name = format!("<{} macros>", crate_name).to_managed(); + let name = format!("<{} macros>", crate_name); let exported_macros = fld.cx.loader.get_exported_macros(cnum); for source in exported_macros.iter() { - let item = parse::parse_item_from_source_str(name, - source.to_managed(), + let item = parse::parse_item_from_source_str(name.clone(), + (*source).clone(), fld.cx.cfg(), fld.cx.parse_sess()) .expect("expected a serialized item"); @@ -475,10 +488,11 @@ pub fn expand_stmt(s: &Stmt, fld: &mut MacroExpander) -> SmallVector<@Stmt> { return SmallVector::zero(); } let extname = &pth.segments[0].identifier; - let extnamestr = ident_to_str(extname); + let extnamestr = token::get_ident(extname.name); let marked_after = match fld.extsbox.find(&extname.name) { None => { - fld.cx.span_err(pth.span, format!("macro undefined: '{}'", extnamestr)); + fld.cx.span_err(pth.span, format!("macro undefined: '{}'", + extnamestr.get())); return SmallVector::zero(); } @@ -486,7 +500,7 @@ pub fn expand_stmt(s: &Stmt, fld: &mut MacroExpander) -> SmallVector<@Stmt> { fld.cx.bt_push(ExpnInfo { call_site: s.span, callee: NameAndSpan { - name: extnamestr, + name: extnamestr.get().to_str(), format: MacroBang, span: exp_span, } @@ -511,7 +525,8 @@ pub fn expand_stmt(s: &Stmt, fld: &mut MacroExpander) -> SmallVector<@Stmt> { MRAny(any_macro) => any_macro.make_stmt(), _ => { fld.cx.span_err(pth.span, - format!("non-stmt macro in stmt pos: {}", extnamestr)); + format!("non-stmt macro in stmt pos: {}", + extnamestr.get())); return SmallVector::zero(); } }; @@ -520,7 +535,8 @@ pub fn expand_stmt(s: &Stmt, fld: &mut MacroExpander) -> SmallVector<@Stmt> { } _ => { - fld.cx.span_err(pth.span, format!("'{}' is not a tt-style macro", extnamestr)); + fld.cx.span_err(pth.span, format!("'{}' is not a tt-style macro", + extnamestr.get())); return SmallVector::zero(); } }; @@ -945,7 +961,7 @@ mod test { use fold::*; use ext::base::{CrateLoader, MacroCrate}; use parse; - use parse::token::{fresh_mark, gensym, intern, ident_to_str}; + use parse::token::{fresh_mark, gensym, intern}; use parse::token; use util::parser_testing::{string_to_crate, string_to_crate_and_sess}; use util::parser_testing::{string_to_pat, string_to_tts, strs_to_idents}; @@ -1009,11 +1025,11 @@ mod test { // make sure that macros can leave scope #[should_fail] #[test] fn macros_cant_escape_fns_test () { - let src = @"fn bogus() {macro_rules! z (() => (3+4))}\ + let src = ~"fn bogus() {macro_rules! z (() => (3+4))}\ fn inty() -> int { z!() }"; let sess = parse::new_parse_sess(None); let crate_ast = parse::parse_crate_from_source_str( - @"", + ~"", src, ~[],sess); // should fail: @@ -1024,11 +1040,11 @@ mod test { // make sure that macros can leave scope for modules #[should_fail] #[test] fn macros_cant_escape_mods_test () { - let src = @"mod foo {macro_rules! z (() => (3+4))}\ + let src = ~"mod foo {macro_rules! z (() => (3+4))}\ fn inty() -> int { z!() }"; let sess = parse::new_parse_sess(None); let crate_ast = parse::parse_crate_from_source_str( - @"", + ~"", src, ~[],sess); // should fail: @@ -1038,22 +1054,22 @@ mod test { // macro_escape modules shouldn't cause macros to leave scope #[test] fn macros_can_escape_flattened_mods_test () { - let src = @"#[macro_escape] mod foo {macro_rules! z (() => (3+4))}\ + let src = ~"#[macro_escape] mod foo {macro_rules! z (() => (3+4))}\ fn inty() -> int { z!() }"; let sess = parse::new_parse_sess(None); let crate_ast = parse::parse_crate_from_source_str( - @"", + ~"", src, ~[], sess); // should fail: let mut loader = ErrLoader; - expand_crate(sess,&mut loader,~[],crate_ast); + expand_crate(sess, &mut loader, ~[], crate_ast); } #[test] fn test_contains_flatten (){ - let attr1 = make_dummy_attr (@"foo"); - let attr2 = make_dummy_attr (@"bar"); - let escape_attr = make_dummy_attr (@"macro_escape"); + let attr1 = make_dummy_attr ("foo"); + let attr2 = make_dummy_attr ("bar"); + let escape_attr = make_dummy_attr ("macro_escape"); let attrs1 = ~[attr1, escape_attr, attr2]; assert_eq!(contains_macro_escape (attrs1),true); let attrs2 = ~[attr1,attr2]; @@ -1061,13 +1077,13 @@ mod test { } // make a MetaWord outer attribute with the given name - fn make_dummy_attr(s: @str) -> ast::Attribute { + fn make_dummy_attr(s: &str) -> ast::Attribute { Spanned { span:codemap::DUMMY_SP, node: Attribute_ { style: AttrOuter, value: @Spanned { - node: MetaWord(s), + node: MetaWord(token::intern_and_get_ident(s)), span: codemap::DUMMY_SP, }, is_sugared_doc: false, @@ -1077,7 +1093,7 @@ mod test { #[test] fn renaming () { - let item_ast = string_to_crate(@"fn f() -> int { a }"); + let item_ast = string_to_crate(~"fn f() -> int { a }"); let a_name = intern("a"); let a2_name = gensym("a2"); let mut renamer = new_rename_folder(ast::Ident{name:a_name,ctxt:EMPTY_CTXT}, @@ -1116,7 +1132,7 @@ mod test { // pprust::print_crate_(&mut s, crate); //} - fn expand_crate_str(crate_str: @str) -> ast::Crate { + fn expand_crate_str(crate_str: ~str) -> ast::Crate { let (crate_ast,ps) = string_to_crate_and_sess(crate_str); // the cfg argument actually does matter, here... let mut loader = ErrLoader; @@ -1134,7 +1150,7 @@ mod test { //} #[test] fn macro_tokens_should_match(){ - expand_crate_str(@"macro_rules! m((a)=>(13)) fn main(){m!(a);}"); + expand_crate_str(~"macro_rules! m((a)=>(13)) fn main(){m!(a);}"); } // renaming tests expand a crate and then check that the bindings match @@ -1208,9 +1224,9 @@ mod test { fn run_renaming_test(t: &RenamingTest, test_idx: uint) { let invalid_name = token::special_idents::invalid.name; let (teststr, bound_connections, bound_ident_check) = match *t { - (ref str,ref conns, bic) => (str.to_managed(), conns.clone(), bic) + (ref str,ref conns, bic) => (str.to_owned(), conns.clone(), bic) }; - let cr = expand_crate_str(teststr.to_managed()); + let cr = expand_crate_str(teststr.to_owned()); // find the bindings: let mut name_finder = new_name_finder(~[]); visit::walk_crate(&mut name_finder,&cr,()); @@ -1260,9 +1276,12 @@ mod test { println!("uh oh, matches but shouldn't:"); println!("varref: {:?}",varref); // good lord, you can't make a path with 0 segments, can you? + let string = token::get_ident(varref.segments[0] + .identifier + .name); println!("varref's first segment's uint: {}, and string: \"{}\"", varref.segments[0].identifier.name, - ident_to_str(&varref.segments[0].identifier)); + string.get()); println!("binding: {:?}", bindings[binding_idx]); ast_util::display_sctable(get_sctable()); } @@ -1273,7 +1292,7 @@ mod test { } #[test] fn fmt_in_macro_used_inside_module_macro() { - let crate_str = @"macro_rules! fmt_wrap(($b:expr)=>($b.to_str())) + let crate_str = ~"macro_rules! fmt_wrap(($b:expr)=>($b.to_str())) macro_rules! foo_module (() => (mod generated { fn a() { let xx = 147; fmt_wrap!(xx);}})) foo_module!() "; @@ -1284,7 +1303,10 @@ foo_module!() let bindings = name_finder.ident_accumulator; let cxbinds : ~[&ast::Ident] = - bindings.iter().filter(|b|{@"xx" == (ident_to_str(*b))}).collect(); + bindings.iter().filter(|b| { + let string = token::get_ident(b.name); + "xx" == string.get() + }).collect(); let cxbind = match cxbinds { [b] => b, _ => fail!("expected just one binding for ext_cx") @@ -1296,9 +1318,13 @@ foo_module!() let varrefs = path_finder.path_accumulator; // the xx binding should bind all of the xx varrefs: - for (idx,v) in varrefs.iter().filter(|p|{ p.segments.len() == 1 - && (@"xx" == (ident_to_str(&p.segments[0].identifier))) - }).enumerate() { + for (idx,v) in varrefs.iter().filter(|p|{ + p.segments.len() == 1 + && { + let string = token::get_ident(p.segments[0].identifier.name); + "xx" == string.get() + } + }).enumerate() { if (mtwt_resolve(v.segments[0].identifier) != resolved_binding) { println!("uh oh, xx binding didn't match xx varref:"); println!("this is xx varref \\# {:?}",idx); @@ -1323,7 +1349,7 @@ foo_module!() #[test] fn pat_idents(){ - let pat = string_to_pat(@"(a,Foo{x:c @ (b,9),y:Bar(4,d)})"); + let pat = string_to_pat(~"(a,Foo{x:c @ (b,9),y:Bar(4,d)})"); let mut pat_idents = new_name_finder(~[]); pat_idents.visit_pat(pat, ()); assert_eq!(pat_idents.ident_accumulator, diff --git a/src/libsyntax/ext/format.rs b/src/libsyntax/ext/format.rs index bbf6f7fff7f9e..ba1d5efdd49cd 100644 --- a/src/libsyntax/ext/format.rs +++ b/src/libsyntax/ext/format.rs @@ -14,23 +14,24 @@ use codemap::{Span, respan}; use ext::base::*; use ext::base; use ext::build::AstBuilder; -use rsparse = parse; -use parse::token; use opt_vec; +use parse::token::InternedString; +use parse::token; +use rsparse = parse; use std::fmt::parse; use std::hashmap::{HashMap, HashSet}; use std::vec; #[deriving(Eq)] enum ArgumentType { - Known(@str), + Known(~str), Unsigned, String, } enum Position { Exact(uint), - Named(@str), + Named(~str), } struct Context<'a> { @@ -42,12 +43,12 @@ struct Context<'a> { args: ~[@ast::Expr], arg_types: ~[Option], // Parsed named expressions and the types that we've found for them so far - names: HashMap<@str, @ast::Expr>, - name_types: HashMap<@str, ArgumentType>, + names: HashMap<~str, @ast::Expr>, + name_types: HashMap<~str, ArgumentType>, // Collection of the compiled `rt::Piece` structures pieces: ~[@ast::Expr], - name_positions: HashMap<@str, uint>, + name_positions: HashMap<~str, uint>, method_statics: ~[@ast::Item], // Updated as arguments are consumed or methods are entered @@ -104,10 +105,11 @@ impl<'a> Context<'a> { return (extra, None); } }; - let name = self.ecx.str_of(ident); + let interned_name = token::get_ident(ident.name); + let name = interned_name.get(); p.expect(&token::EQ); let e = p.parse_expr(); - match self.names.find(&name) { + match self.names.find_equiv(&name) { None => {} Some(prev) => { self.ecx.span_err(e.span, format!("duplicate argument \ @@ -117,7 +119,7 @@ impl<'a> Context<'a> { continue } } - self.names.insert(name, e); + self.names.insert(name.to_str(), e); } else { self.args.push(p.parse_expr()); self.arg_types.push(None); @@ -156,13 +158,13 @@ impl<'a> Context<'a> { Exact(i) } parse::ArgumentIs(i) => Exact(i), - parse::ArgumentNamed(s) => Named(s.to_managed()), + parse::ArgumentNamed(s) => Named(s.to_str()), }; // and finally the method being applied match arg.method { None => { - let ty = Known(arg.format.ty.to_managed()); + let ty = Known(arg.format.ty.to_str()); self.verify_arg_type(pos, ty); } Some(ref method) => { self.verify_method(pos, *method); } @@ -184,7 +186,7 @@ impl<'a> Context<'a> { self.verify_arg_type(Exact(i), Unsigned); } parse::CountIsName(s) => { - self.verify_arg_type(Named(s.to_managed()), Unsigned); + self.verify_arg_type(Named(s.to_str()), Unsigned); } parse::CountIsNextParam => { if self.check_positional_ok() { @@ -259,7 +261,13 @@ impl<'a> Context<'a> { self.ecx.span_err(self.fmtsp, msg); return; } - self.verify_same(self.args[arg].span, ty, self.arg_types[arg]); + { + let arg_type = match self.arg_types[arg] { + None => None, + Some(ref x) => Some(x) + }; + self.verify_same(self.args[arg].span, &ty, arg_type); + } if self.arg_types[arg].is_none() { self.arg_types[arg] = Some(ty); } @@ -274,10 +282,9 @@ impl<'a> Context<'a> { return; } }; - self.verify_same(span, ty, - self.name_types.find(&name).map(|&x| x)); + self.verify_same(span, &ty, self.name_types.find(&name)); if !self.name_types.contains_key(&name) { - self.name_types.insert(name, ty); + self.name_types.insert(name.clone(), ty); } // Assign this named argument a slot in the arguments array if // it hasn't already been assigned a slot. @@ -297,30 +304,36 @@ impl<'a> Context<'a> { /// /// Obviously `Some(Some(x)) != Some(Some(y))`, but we consider it true /// that: `Some(None) == Some(Some(x))` - fn verify_same(&self, sp: Span, ty: ArgumentType, - before: Option) { + fn verify_same(&self, + sp: Span, + ty: &ArgumentType, + before: Option<&ArgumentType>) { let cur = match before { None => return, Some(t) => t, }; - if ty == cur { return } + if *ty == *cur { + return + } match (cur, ty) { - (Known(cur), Known(ty)) => { + (&Known(ref cur), &Known(ref ty)) => { self.ecx.span_err(sp, format!("argument redeclared with type `{}` when \ - it was previously `{}`", ty, cur)); + it was previously `{}`", + *ty, + *cur)); } - (Known(cur), _) => { + (&Known(ref cur), _) => { self.ecx.span_err(sp, format!("argument used to format with `{}` was \ attempted to not be used for formatting", - cur)); + *cur)); } - (_, Known(ty)) => { + (_, &Known(ref ty)) => { self.ecx.span_err(sp, format!("argument previously used as a format \ argument attempted to be used as `{}`", - ty)); + *ty)); } (_, _) => { self.ecx.span_err(sp, "argument declared with multiple formats"); @@ -333,13 +346,18 @@ impl<'a> Context<'a> { fn static_attrs(&self) -> ~[ast::Attribute] { // Flag statics as `address_insignificant` so LLVM can merge duplicate // globals as much as possible (which we're generating a whole lot of). - let unnamed = self.ecx.meta_word(self.fmtsp, @"address_insignificant"); + let unnamed = self.ecx + .meta_word(self.fmtsp, + InternedString::new( + "address_insignificant")); let unnamed = self.ecx.attribute(self.fmtsp, unnamed); // Do not warn format string as dead code - let dead_code = self.ecx.meta_word(self.fmtsp, @"dead_code"); + let dead_code = self.ecx.meta_word(self.fmtsp, + InternedString::new("dead_code")); let allow_dead_code = self.ecx.meta_list(self.fmtsp, - @"allow", ~[dead_code]); + InternedString::new("allow"), + ~[dead_code]); let allow_dead_code = self.ecx.attribute(self.fmtsp, allow_dead_code); return ~[unnamed, allow_dead_code]; } @@ -391,9 +409,8 @@ impl<'a> Context<'a> { self.ecx.expr_path(path) } parse::CountIsName(n) => { - let n = n.to_managed(); - let i = match self.name_positions.find_copy(&n) { - Some(i) => i, + let i = match self.name_positions.find_equiv(&n) { + Some(&i) => i, None => 0, // error already emitted elsewhere }; let i = i + self.args.len(); @@ -410,7 +427,7 @@ impl<'a> Context<'a> { let result = arm.result.iter().map(|p| { self.trans_piece(p) }).collect(); - let s = arm.selector.to_managed(); + let s = token::intern_and_get_ident(arm.selector); let selector = self.ecx.expr_str(sp, s); self.ecx.expr_struct(sp, p, ~[ self.ecx.field_imm(sp, @@ -486,8 +503,12 @@ impl<'a> Context<'a> { match *piece { parse::String(s) => { - self.ecx.expr_call_global(sp, rtpath("String"), - ~[self.ecx.expr_str(sp, s.to_managed())]) + let s = token::intern_and_get_ident(s); + self.ecx.expr_call_global(sp, + rtpath("String"), + ~[ + self.ecx.expr_str(sp, s) + ]) } parse::CurrentArgument => { let nil = self.ecx.expr_lit(sp, ast::LitNil); @@ -509,9 +530,8 @@ impl<'a> Context<'a> { // Named arguments are converted to positional arguments at // the end of the list of arguments parse::ArgumentNamed(n) => { - let n = n.to_managed(); - let i = match self.name_positions.find_copy(&n) { - Some(i) => i, + let i = match self.name_positions.find_equiv(&n) { + Some(&i) => i, None => 0, // error already emitted elsewhere }; let i = i + self.args.len(); @@ -623,14 +643,17 @@ impl<'a> Context<'a> { locals.push(self.format_arg(e.span, Exact(i), self.ecx.expr_ident(e.span, name))); } - for (&name, &e) in self.names.iter() { - if !self.name_types.contains_key(&name) { continue } + for (name, &e) in self.names.iter() { + if !self.name_types.contains_key(name) { + continue + } - let lname = self.ecx.ident_of(format!("__arg{}", name)); + let lname = self.ecx.ident_of(format!("__arg{}", *name)); let e = self.ecx.expr_addr_of(e.span, e); lets.push(self.ecx.stmt_let(e.span, false, lname, e)); - names[*self.name_positions.get(&name)] = - Some(self.format_arg(e.span, Named(name), + names[*self.name_positions.get(name)] = + Some(self.format_arg(e.span, + Named((*name).clone()), self.ecx.expr_ident(e.span, lname))); } @@ -672,16 +695,16 @@ impl<'a> Context<'a> { Some(result))) } - fn format_arg(&self, sp: Span, argno: Position, - arg: @ast::Expr) -> @ast::Expr { + fn format_arg(&self, sp: Span, argno: Position, arg: @ast::Expr) + -> @ast::Expr { let ty = match argno { - Exact(i) => self.arg_types[i].unwrap(), - Named(s) => *self.name_types.get(&s) + Exact(ref i) => self.arg_types[*i].get_ref(), + Named(ref s) => self.name_types.get(s) }; - let fmt_trait = match ty { - Known(tyname) => { - match tyname.as_slice() { + let fmt_trait = match *ty { + Known(ref tyname) => { + match (*tyname).as_slice() { "" => "Default", "?" => "Poly", "b" => "Bool", @@ -698,8 +721,9 @@ impl<'a> Context<'a> { "x" => "LowerHex", "X" => "UpperHex", _ => { - self.ecx.span_err(sp, format!("unknown format trait \ - `{}`", tyname)); + self.ecx.span_err(sp, + format!("unknown format trait `{}`", + *tyname)); "Dummy" } } @@ -757,8 +781,9 @@ pub fn expand_args(ecx: &mut ExtCtxt, sp: Span, // Be sure to recursively expand macros just in case the format string uses // a macro to build the format expression. let expr = cx.ecx.expand_expr(efmt); - let fmt = match expr_to_str(cx.ecx, expr, - "format argument must be a string literal.") { + let fmt = match expr_to_str(cx.ecx, + expr, + "format argument must be a string literal.") { Some((fmt, _)) => fmt, None => return MacResult::dummy_expr() }; @@ -770,7 +795,7 @@ pub fn expand_args(ecx: &mut ExtCtxt, sp: Span, cx.ecx.span_err(efmt.span, m); } }).inside(|| { - for piece in parse::Parser::new(fmt) { + for piece in parse::Parser::new(fmt.get()) { if !err { cx.verify_piece(&piece); let piece = cx.trans_piece(&piece); diff --git a/src/libsyntax/ext/quote.rs b/src/libsyntax/ext/quote.rs index 6faed270875c4..bd1ac616f52d2 100644 --- a/src/libsyntax/ext/quote.rs +++ b/src/libsyntax/ext/quote.rs @@ -31,6 +31,7 @@ use parse; pub mod rt { use ast; use ext::base::ExtCtxt; + use parse::token; use parse; use print::pprust; @@ -65,132 +66,134 @@ pub mod rt { pub trait ToSource { // Takes a thing and generates a string containing rust code for it. - fn to_source(&self) -> @str; + fn to_source(&self) -> ~str; } impl ToSource for ast::Ident { - fn to_source(&self) -> @str { - ident_to_str(self) + fn to_source(&self) -> ~str { + let this = get_ident(self.name); + this.get().to_owned() } } impl ToSource for @ast::Item { - fn to_source(&self) -> @str { - pprust::item_to_str(*self, get_ident_interner()).to_managed() + fn to_source(&self) -> ~str { + pprust::item_to_str(*self, get_ident_interner()) } } impl<'a> ToSource for &'a [@ast::Item] { - fn to_source(&self) -> @str { - self.map(|i| i.to_source()).connect("\n\n").to_managed() + fn to_source(&self) -> ~str { + self.map(|i| i.to_source()).connect("\n\n") } } impl ToSource for ast::Ty { - fn to_source(&self) -> @str { - pprust::ty_to_str(self, get_ident_interner()).to_managed() + fn to_source(&self) -> ~str { + pprust::ty_to_str(self, get_ident_interner()) } } impl<'a> ToSource for &'a [ast::Ty] { - fn to_source(&self) -> @str { - self.map(|i| i.to_source()).connect(", ").to_managed() + fn to_source(&self) -> ~str { + self.map(|i| i.to_source()).connect(", ") } } impl ToSource for Generics { - fn to_source(&self) -> @str { - pprust::generics_to_str(self, get_ident_interner()).to_managed() + fn to_source(&self) -> ~str { + pprust::generics_to_str(self, get_ident_interner()) } } impl ToSource for @ast::Expr { - fn to_source(&self) -> @str { - pprust::expr_to_str(*self, get_ident_interner()).to_managed() + fn to_source(&self) -> ~str { + pprust::expr_to_str(*self, get_ident_interner()) } } impl ToSource for ast::Block { - fn to_source(&self) -> @str { - pprust::block_to_str(self, get_ident_interner()).to_managed() + fn to_source(&self) -> ~str { + pprust::block_to_str(self, get_ident_interner()) } } impl<'a> ToSource for &'a str { - fn to_source(&self) -> @str { - let lit = dummy_spanned(ast::LitStr(self.to_managed(), ast::CookedStr)); - pprust::lit_to_str(&lit).to_managed() + fn to_source(&self) -> ~str { + let lit = dummy_spanned(ast::LitStr( + token::intern_and_get_ident(*self), ast::CookedStr)); + pprust::lit_to_str(&lit) } } impl ToSource for int { - fn to_source(&self) -> @str { + fn to_source(&self) -> ~str { let lit = dummy_spanned(ast::LitInt(*self as i64, ast::TyI)); - pprust::lit_to_str(&lit).to_managed() + pprust::lit_to_str(&lit) } } impl ToSource for i8 { - fn to_source(&self) -> @str { + fn to_source(&self) -> ~str { let lit = dummy_spanned(ast::LitInt(*self as i64, ast::TyI8)); - pprust::lit_to_str(&lit).to_managed() + pprust::lit_to_str(&lit) } } impl ToSource for i16 { - fn to_source(&self) -> @str { + fn to_source(&self) -> ~str { let lit = dummy_spanned(ast::LitInt(*self as i64, ast::TyI16)); - pprust::lit_to_str(&lit).to_managed() + pprust::lit_to_str(&lit) } } impl ToSource for i32 { - fn to_source(&self) -> @str { + fn to_source(&self) -> ~str { let lit = dummy_spanned(ast::LitInt(*self as i64, ast::TyI32)); - pprust::lit_to_str(&lit).to_managed() + pprust::lit_to_str(&lit) } } impl ToSource for i64 { - fn to_source(&self) -> @str { + fn to_source(&self) -> ~str { let lit = dummy_spanned(ast::LitInt(*self as i64, ast::TyI64)); - pprust::lit_to_str(&lit).to_managed() + pprust::lit_to_str(&lit) } } impl ToSource for uint { - fn to_source(&self) -> @str { + fn to_source(&self) -> ~str { let lit = dummy_spanned(ast::LitUint(*self as u64, ast::TyU)); - pprust::lit_to_str(&lit).to_managed() + pprust::lit_to_str(&lit) } } impl ToSource for u8 { - fn to_source(&self) -> @str { + fn to_source(&self) -> ~str { let lit = dummy_spanned(ast::LitUint(*self as u64, ast::TyU8)); - pprust::lit_to_str(&lit).to_managed() + pprust::lit_to_str(&lit) } } impl ToSource for u16 { - fn to_source(&self) -> @str { + fn to_source(&self) -> ~str { let lit = dummy_spanned(ast::LitUint(*self as u64, ast::TyU16)); - pprust::lit_to_str(&lit).to_managed() + pprust::lit_to_str(&lit) } } impl ToSource for u32 { - fn to_source(&self) -> @str { + fn to_source(&self) -> ~str { let lit = dummy_spanned(ast::LitUint(*self as u64, ast::TyU32)); - pprust::lit_to_str(&lit).to_managed() + pprust::lit_to_str(&lit) } } impl ToSource for u64 { - fn to_source(&self) -> @str { + fn to_source(&self) -> ~str { let lit = dummy_spanned(ast::LitUint(*self as u64, ast::TyU64)); - pprust::lit_to_str(&lit).to_managed() + pprust::lit_to_str(&lit) } } @@ -237,52 +240,49 @@ pub mod rt { impl_to_tokens!(u64) pub trait ExtParseUtils { - fn parse_item(&self, s: @str) -> @ast::Item; - fn parse_expr(&self, s: @str) -> @ast::Expr; - fn parse_stmt(&self, s: @str) -> @ast::Stmt; - fn parse_tts(&self, s: @str) -> ~[ast::TokenTree]; + fn parse_item(&self, s: ~str) -> @ast::Item; + fn parse_expr(&self, s: ~str) -> @ast::Expr; + fn parse_stmt(&self, s: ~str) -> @ast::Stmt; + fn parse_tts(&self, s: ~str) -> ~[ast::TokenTree]; } impl<'a> ExtParseUtils for ExtCtxt<'a> { - fn parse_item(&self, s: @str) -> @ast::Item { + fn parse_item(&self, s: ~str) -> @ast::Item { let res = parse::parse_item_from_source_str( - @"", + "".to_str(), s, self.cfg(), self.parse_sess()); match res { Some(ast) => ast, None => { - error!("Parse error with ```\n{}\n```", s); + error!("Parse error"); fail!() } } } - fn parse_stmt(&self, s: @str) -> @ast::Stmt { - parse::parse_stmt_from_source_str( - @"", - s, - self.cfg(), - ~[], - self.parse_sess()) + fn parse_stmt(&self, s: ~str) -> @ast::Stmt { + parse::parse_stmt_from_source_str("".to_str(), + s, + self.cfg(), + ~[], + self.parse_sess()) } - fn parse_expr(&self, s: @str) -> @ast::Expr { - parse::parse_expr_from_source_str( - @"", - s, - self.cfg(), - self.parse_sess()) + fn parse_expr(&self, s: ~str) -> @ast::Expr { + parse::parse_expr_from_source_str("".to_str(), + s, + self.cfg(), + self.parse_sess()) } - fn parse_tts(&self, s: @str) -> ~[ast::TokenTree] { - parse::parse_tts_from_source_str( - @"", - s, - self.cfg(), - self.parse_sess()) + fn parse_tts(&self, s: ~str) -> ~[ast::TokenTree] { + parse::parse_tts_from_source_str("".to_str(), + s, + self.cfg(), + self.parse_sess()) } } @@ -349,7 +349,7 @@ fn id_ext(str: &str) -> ast::Ident { // Lift an ident to the expr that evaluates to that ident. fn mk_ident(cx: &ExtCtxt, sp: Span, ident: ast::Ident) -> @ast::Expr { - let e_str = cx.expr_str(sp, cx.str_of(ident)); + let e_str = cx.expr_str(sp, token::get_ident(ident.name)); cx.expr_method_call(sp, cx.expr_ident(sp, id_ext("ext_cx")), id_ext("ident_of"), diff --git a/src/libsyntax/ext/source_util.rs b/src/libsyntax/ext/source_util.rs index a9f94da7a98cb..f3f947ec00d3d 100644 --- a/src/libsyntax/ext/source_util.rs +++ b/src/libsyntax/ext/source_util.rs @@ -16,11 +16,13 @@ use ext::base::*; use ext::base; use ext::build::AstBuilder; use parse; -use parse::token::{get_ident_interner}; +use parse::token::get_ident_interner; +use parse::token; use print::pprust; use std::io; use std::io::File; +use std::rc::Rc; use std::str; // These macros all relate to the file system; they either return @@ -57,21 +59,26 @@ pub fn expand_file(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) let topmost = topmost_expn_info(cx.backtrace().unwrap()); let loc = cx.codemap().lookup_char_pos(topmost.call_site.lo); - let filename = loc.file.name; + let filename = token::intern_and_get_ident(loc.file.name); base::MRExpr(cx.expr_str(topmost.call_site, filename)) } pub fn expand_stringify(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) -> base::MacResult { let s = pprust::tts_to_str(tts, get_ident_interner()); - base::MRExpr(cx.expr_str(sp, s.to_managed())) + base::MRExpr(cx.expr_str(sp, token::intern_and_get_ident(s))) } pub fn expand_mod(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) -> base::MacResult { base::check_zero_tts(cx, sp, tts, "module_path!"); - base::MRExpr(cx.expr_str(sp, - cx.mod_path().map(|x| cx.str_of(*x)).connect("::").to_managed())) + let string = cx.mod_path() + .map(|x| { + let interned_str = token::get_ident(x.name); + interned_str.get().to_str() + }) + .connect("::"); + base::MRExpr(cx.expr_str(sp, token::intern_and_get_ident(string))) } // include! : parse the given file as an expr @@ -113,11 +120,11 @@ pub fn expand_include_str(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) Some(src) => { // Add this input file to the code map to make it available as // dependency information - let src = src.to_managed(); - let filename = file.display().to_str().to_managed(); + let filename = file.display().to_str(); + let interned = token::intern_and_get_ident(src); cx.parse_sess.cm.new_filemap(filename, src); - base::MRExpr(cx.expr_str(sp, src)) + base::MRExpr(cx.expr_str(sp, interned)) } None => { cx.span_err(sp, format!("{} wasn't a utf-8 file", file.display())); @@ -129,8 +136,6 @@ pub fn expand_include_str(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) pub fn expand_include_bin(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) -> base::MacResult { - use std::at_vec; - let file = match get_single_str_from_tts(cx, sp, tts, "include_bin!") { Some(f) => f, None => return MacResult::dummy_expr() @@ -142,8 +147,7 @@ pub fn expand_include_bin(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) return MacResult::dummy_expr(); } Ok(bytes) => { - let bytes = at_vec::to_managed_move(bytes); - base::MRExpr(cx.expr_lit(sp, ast::LitBinary(bytes))) + base::MRExpr(cx.expr_lit(sp, ast::LitBinary(Rc::new(bytes)))) } } } diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs index d5a30a7cf1186..6d1b8dd235854 100644 --- a/src/libsyntax/ext/tt/macro_parser.rs +++ b/src/libsyntax/ext/tt/macro_parser.rs @@ -18,7 +18,7 @@ use parse::lexer::*; //resolve bug? use parse::ParseSess; use parse::attr::ParserAttr; use parse::parser::{LifetimeAndTypesWithoutColons, Parser}; -use parse::token::{Token, EOF, to_str, Nonterminal, get_ident_interner, ident_to_str}; +use parse::token::{Token, EOF, to_str, Nonterminal, get_ident_interner}; use parse::token; use std::hashmap::HashMap; @@ -183,8 +183,9 @@ pub fn nameize(p_s: @ParseSess, ms: &[Matcher], res: &[@NamedMatch]) node: MatchNonterminal(ref bind_name, _, idx), span: sp } => { if ret_val.contains_key(bind_name) { - p_s.span_diagnostic.span_fatal(sp, - "Duplicated bind name: "+ ident_to_str(bind_name)) + let string = token::get_ident(bind_name.name); + p_s.span_diagnostic + .span_fatal(sp, "Duplicated bind name: " + string.get()) } ret_val.insert(*bind_name, res[idx]); } @@ -364,8 +365,11 @@ pub fn parse(sess: @ParseSess, let nts = bb_eis.map(|ei| { match ei.elts[ei.idx].node { MatchNonterminal(ref bind,ref name,_) => { - format!("{} ('{}')", ident_to_str(name), - ident_to_str(bind)) + let bind_string = token::get_ident(bind.name); + let name_string = token::get_ident(name.name); + format!("{} ('{}')", + name_string.get(), + bind_string.get()) } _ => fail!() } }).connect(" or "); @@ -388,8 +392,9 @@ pub fn parse(sess: @ParseSess, let mut ei = bb_eis.pop().unwrap(); match ei.elts[ei.idx].node { MatchNonterminal(_, ref name, idx) => { + let name_string = token::get_ident(name.name); ei.matches[idx].push(@MatchedNonterminal( - parse_nt(&mut rust_parser, ident_to_str(name)))); + parse_nt(&mut rust_parser, name_string.get()))); ei.idx += 1u; } _ => fail!() diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs index aabd9c694f7c0..c179e9959e0be 100644 --- a/src/libsyntax/ext/tt/macro_rules.rs +++ b/src/libsyntax/ext/tt/macro_rules.rs @@ -21,8 +21,9 @@ use ext::tt::macro_parser::{parse, parse_or_else}; use parse::lexer::{new_tt_reader, Reader}; use parse::parser::Parser; use parse::attr::ParserAttr; -use parse::token::{get_ident_interner, special_idents, gensym_ident, ident_to_str}; +use parse::token::{get_ident_interner, special_idents, gensym_ident}; use parse::token::{FAT_ARROW, SEMI, NtMatchers, NtTT, EOF}; +use parse::token; use print; use std::cell::RefCell; use util::small_vector::SmallVector; @@ -112,10 +113,11 @@ fn generic_extension(cx: &ExtCtxt, rhses: &[@NamedMatch]) -> MacResult { if cx.trace_macros() { + let interned_name = token::get_ident(name.name); println!("{}! \\{ {} \\}", - cx.str_of(name), - print::pprust::tt_to_str(&TTDelim(@arg.to_owned()), - get_ident_interner())); + interned_name.get(), + print::pprust::tt_to_str(&TTDelim(@arg.to_owned()), + get_ident_interner())); } // Which arm's failure should we report? (the one furthest along) @@ -229,7 +231,7 @@ pub fn add_new_extension(cx: &mut ExtCtxt, }; return MRDef(MacroDef { - name: ident_to_str(&name), + name: token::get_ident(name.name).get().to_str(), ext: NormalTT(exp, Some(sp)) }); } diff --git a/src/libsyntax/ext/tt/transcribe.rs b/src/libsyntax/ext/tt/transcribe.rs index 87a2f374c9005..d2fa24b1cfede 100644 --- a/src/libsyntax/ext/tt/transcribe.rs +++ b/src/libsyntax/ext/tt/transcribe.rs @@ -14,7 +14,7 @@ use codemap::{Span, DUMMY_SP}; use diagnostic::SpanHandler; use ext::tt::macro_parser::{NamedMatch, MatchedSeq, MatchedNonterminal}; use parse::token::{EOF, INTERPOLATED, IDENT, Token, NtIdent}; -use parse::token::{ident_to_str}; +use parse::token; use parse::lexer::TokenAndSpan; use std::cell::{Cell, RefCell}; @@ -122,9 +122,10 @@ fn lookup_cur_matched(r: &TtReader, name: Ident) -> @NamedMatch { match matched_opt { Some(s) => lookup_cur_matched_by_matched(r, s), None => { + let name_string = token::get_ident(name.name); r.sp_diag.span_fatal(r.cur_span.get(), format!("unknown macro variable `{}`", - ident_to_str(&name))); + name_string.get())); } } } @@ -145,11 +146,11 @@ fn lis_merge(lhs: LockstepIterSize, rhs: LockstepIterSize) -> LockstepIterSize { LisContradiction(_) => rhs.clone(), LisConstraint(r_len, _) if l_len == r_len => lhs.clone(), LisConstraint(r_len, ref r_id) => { - let l_n = ident_to_str(l_id); - let r_n = ident_to_str(r_id); + let l_n = token::get_ident(l_id.name); + let r_n = token::get_ident(r_id.name); LisContradiction(format!("Inconsistent lockstep iteration: \ '{}' has {} items, but '{}' has {}", - l_n, l_len, r_n, r_len)) + l_n.get(), l_len, r_n.get(), r_len)) } } } @@ -313,10 +314,11 @@ pub fn tt_next_token(r: &TtReader) -> TokenAndSpan { return ret_val; } MatchedSeq(..) => { + let string = token::get_ident(ident.name); r.sp_diag.span_fatal( r.cur_span.get(), /* blame the macro writer */ format!("variable '{}' is still repeating at this depth", - ident_to_str(&ident))); + string.get())); } } } diff --git a/src/libsyntax/fold.rs b/src/libsyntax/fold.rs index 8dac13f1e31a9..8f5bbc2cdad18 100644 --- a/src/libsyntax/fold.rs +++ b/src/libsyntax/fold.rs @@ -321,15 +321,14 @@ fn fold_meta_item_(mi: @MetaItem, fld: &mut T) -> @MetaItem { @Spanned { node: match mi.node { - MetaWord(id) => MetaWord(id), - MetaList(id, ref mis) => { + MetaWord(ref id) => MetaWord((*id).clone()), + MetaList(ref id, ref mis) => { let fold_meta_item = |x| fold_meta_item_(x, fld); - MetaList( - id, - mis.map(|e| fold_meta_item(*e)) - ) + MetaList((*id).clone(), mis.map(|e| fold_meta_item(*e))) + } + MetaNameValue(ref id, ref s) => { + MetaNameValue((*id).clone(), (*s).clone()) } - MetaNameValue(id, s) => MetaNameValue(id, s) }, span: fld.new_span(mi.span) } } @@ -498,12 +497,10 @@ fn fold_variant_arg_(va: &VariantArg, folder: &mut T) -> VariantArg { pub fn noop_fold_view_item(vi: &ViewItem, folder: &mut T) -> ViewItem{ let inner_view_item = match vi.node { - ViewItemExternMod(ref ident, - string, - node_id) => { + ViewItemExternMod(ref ident, ref string, node_id) => { ViewItemExternMod(ident.clone(), - string, - folder.new_id(node_id)) + (*string).clone(), + folder.new_id(node_id)) } ViewItemUse(ref view_paths) => { ViewItemUse(folder.fold_view_paths(*view_paths)) @@ -815,8 +812,12 @@ pub fn noop_fold_expr(e: @Expr, folder: &mut T) -> @Expr { } ExprInlineAsm(ref a) => { ExprInlineAsm(InlineAsm { - inputs: a.inputs.map(|&(c, input)| (c, folder.fold_expr(input))), - outputs: a.outputs.map(|&(c, out)| (c, folder.fold_expr(out))), + inputs: a.inputs.map(|&(ref c, input)| { + ((*c).clone(), folder.fold_expr(input)) + }), + outputs: a.outputs.map(|&(ref c, out)| { + ((*c).clone(), folder.fold_expr(out)) + }), .. (*a).clone() }) } @@ -898,7 +899,8 @@ mod test { // make sure idents get transformed everywhere #[test] fn ident_transformation () { let mut zz_fold = ToZzIdentFolder; - let ast = string_to_crate(@"#[a] mod b {fn c (d : e, f : g) {h!(i,j,k);l;m}}"); + let ast = string_to_crate( + ~"#[a] mod b {fn c (d : e, f : g) {h!(i,j,k);l;m}}"); assert_pred!(matches_codepattern, "matches_codepattern", pprust::to_str(&mut zz_fold.fold_crate(ast),fake_print_crate, @@ -909,8 +911,9 @@ mod test { // even inside macro defs.... #[test] fn ident_transformation_in_defs () { let mut zz_fold = ToZzIdentFolder; - let ast = string_to_crate(@"macro_rules! a {(b $c:expr $(d $e:token)f+ -=> (g $(d $d $e)+))} "); + let ast = string_to_crate( + ~"macro_rules! a {(b $c:expr $(d $e:token)f+ => \ + (g $(d $d $e)+))} "); assert_pred!(matches_codepattern, "matches_codepattern", pprust::to_str(&mut zz_fold.fold_crate(ast),fake_print_crate, diff --git a/src/libsyntax/parse/attr.rs b/src/libsyntax/parse/attr.rs index e7630a668558d..c9bea78d02db5 100644 --- a/src/libsyntax/parse/attr.rs +++ b/src/libsyntax/parse/attr.rs @@ -45,7 +45,7 @@ impl ParserAttr for Parser { } token::DOC_COMMENT(s) => { let attr = ::attr::mk_sugared_doc_attr( - self.id_to_str(s), + self.id_to_interned_str(s), self.span.lo, self.span.hi ); @@ -133,7 +133,7 @@ impl ParserAttr for Parser { } token::DOC_COMMENT(s) => { self.bump(); - ::attr::mk_sugared_doc_attr(self.id_to_str(s), + ::attr::mk_sugared_doc_attr(self.id_to_interned_str(s), self.span.lo, self.span.hi) } @@ -157,7 +157,7 @@ impl ParserAttr for Parser { fn parse_meta_item(&mut self) -> @ast::MetaItem { let lo = self.span.lo; let ident = self.parse_ident(); - let name = self.id_to_str(ident); + let name = self.id_to_interned_str(ident); match self.token { token::EQ => { self.bump(); diff --git a/src/libsyntax/parse/comments.rs b/src/libsyntax/parse/comments.rs index aa5e4e01ae0a0..7165e7b404f1c 100644 --- a/src/libsyntax/parse/comments.rs +++ b/src/libsyntax/parse/comments.rs @@ -54,7 +54,6 @@ pub fn doc_comment_style(comment: &str) -> ast::AttrStyle { } pub fn strip_doc_comment_decoration(comment: &str) -> ~str { - /// remove whitespace-only lines from the start/end of lines fn vertical_trim(lines: ~[~str]) -> ~[~str] { let mut i = 0u; @@ -348,10 +347,10 @@ pub struct Literal { // probably not a good thing. pub fn gather_comments_and_literals(span_diagnostic: @diagnostic::SpanHandler, - path: @str, + path: ~str, srdr: &mut io::Reader) -> (~[Comment], ~[Literal]) { - let src = str::from_utf8_owned(srdr.read_to_end()).unwrap().to_managed(); + let src = str::from_utf8_owned(srdr.read_to_end()).unwrap(); let cm = CodeMap::new(); let filemap = cm.new_filemap(path, src); let rdr = lexer::new_low_level_string_reader(span_diagnostic, filemap); diff --git a/src/libsyntax/parse/lexer.rs b/src/libsyntax/parse/lexer.rs index 2521bb515f769..8c55990289aa8 100644 --- a/src/libsyntax/parse/lexer.rs +++ b/src/libsyntax/parse/lexer.rs @@ -42,7 +42,6 @@ pub struct TokenAndSpan { pub struct StringReader { span_diagnostic: @SpanHandler, - src: @str, // The absolute offset within the codemap of the next character to read pos: Cell, // The absolute offset within the codemap of the last character read(curr) @@ -73,7 +72,6 @@ pub fn new_low_level_string_reader(span_diagnostic: @SpanHandler, let initial_char = '\n'; let r = @StringReader { span_diagnostic: span_diagnostic, - src: filemap.src, pos: Cell::new(filemap.start_pos), last_pos: Cell::new(filemap.start_pos), col: Cell::new(CharPos(0)), @@ -93,7 +91,6 @@ pub fn new_low_level_string_reader(span_diagnostic: @SpanHandler, fn dup_string_reader(r: @StringReader) -> @StringReader { @StringReader { span_diagnostic: r.span_diagnostic, - src: r.src, pos: Cell::new(r.pos.get()), last_pos: Cell::new(r.last_pos.get()), col: Cell::new(r.col.get()), @@ -188,7 +185,7 @@ fn fatal_span_verbose(rdr: @StringReader, -> ! { let mut m = m; m.push_str(": "); - let s = rdr.src.slice( + let s = rdr.filemap.src.slice( byte_offset(rdr, from_pos).to_uint(), byte_offset(rdr, to_pos).to_uint()); m.push_str(s); @@ -239,7 +236,7 @@ fn with_str_from_to( end: BytePos, f: |s: &str| -> T) -> T { - f(rdr.src.slice( + f(rdr.filemap.src.slice( byte_offset(rdr, start).to_uint(), byte_offset(rdr, end).to_uint())) } @@ -249,12 +246,12 @@ fn with_str_from_to( pub fn bump(rdr: &StringReader) { rdr.last_pos.set(rdr.pos.get()); let current_byte_offset = byte_offset(rdr, rdr.pos.get()).to_uint(); - if current_byte_offset < (rdr.src).len() { + if current_byte_offset < (rdr.filemap.src).len() { assert!(rdr.curr.get() != unsafe { transmute(-1u32) }); // FIXME: #8971: unsound let last_char = rdr.curr.get(); - let next = rdr.src.char_range_at(current_byte_offset); + let next = rdr.filemap.src.char_range_at(current_byte_offset); let byte_offset_diff = next.next - current_byte_offset; rdr.pos.set(rdr.pos.get() + Pos::from_uint(byte_offset_diff)); rdr.curr.set(next.ch); @@ -277,8 +274,8 @@ pub fn is_eof(rdr: @StringReader) -> bool { } pub fn nextch(rdr: @StringReader) -> char { let offset = byte_offset(rdr, rdr.pos.get()).to_uint(); - if offset < (rdr.src).len() { - return rdr.src.char_at(offset); + if offset < (rdr.filemap.src).len() { + return rdr.filemap.src.char_at(offset); } else { return unsafe { transmute(-1u32) }; } // FIXME: #8971: unsound } @@ -975,9 +972,9 @@ mod test { } // open a string reader for the given string - fn setup(teststr: @str) -> Env { + fn setup(teststr: ~str) -> Env { let cm = CodeMap::new(); - let fm = cm.new_filemap(@"zebra.rs", teststr); + let fm = cm.new_filemap(~"zebra.rs", teststr); let span_handler = diagnostic::mk_span_handler(diagnostic::mk_handler(None),@cm); Env { @@ -987,7 +984,7 @@ mod test { #[test] fn t1 () { let Env {string_reader} = - setup(@"/* my source file */ \ + setup(~"/* my source file */ \ fn main() { println!(\"zebra\"); }\n"); let id = str_to_ident("fn"); let tok1 = string_reader.next_token(); @@ -1023,14 +1020,14 @@ mod test { } #[test] fn doublecolonparsing () { - let env = setup (@"a b"); + let env = setup (~"a b"); check_tokenization (env, ~[mk_ident("a",false), mk_ident("b",false)]); } #[test] fn dcparsing_2 () { - let env = setup (@"a::b"); + let env = setup (~"a::b"); check_tokenization (env, ~[mk_ident("a",true), token::MOD_SEP, @@ -1038,7 +1035,7 @@ mod test { } #[test] fn dcparsing_3 () { - let env = setup (@"a ::b"); + let env = setup (~"a ::b"); check_tokenization (env, ~[mk_ident("a",false), token::MOD_SEP, @@ -1046,7 +1043,7 @@ mod test { } #[test] fn dcparsing_4 () { - let env = setup (@"a:: b"); + let env = setup (~"a:: b"); check_tokenization (env, ~[mk_ident("a",true), token::MOD_SEP, @@ -1054,28 +1051,28 @@ mod test { } #[test] fn character_a() { - let env = setup(@"'a'"); + let env = setup(~"'a'"); let TokenAndSpan {tok, sp: _} = env.string_reader.next_token(); assert_eq!(tok,token::LIT_CHAR('a' as u32)); } #[test] fn character_space() { - let env = setup(@"' '"); + let env = setup(~"' '"); let TokenAndSpan {tok, sp: _} = env.string_reader.next_token(); assert_eq!(tok, token::LIT_CHAR(' ' as u32)); } #[test] fn character_escaped() { - let env = setup(@"'\\n'"); + let env = setup(~"'\\n'"); let TokenAndSpan {tok, sp: _} = env.string_reader.next_token(); assert_eq!(tok, token::LIT_CHAR('\n' as u32)); } #[test] fn lifetime_name() { - let env = setup(@"'abc"); + let env = setup(~"'abc"); let TokenAndSpan {tok, sp: _} = env.string_reader.next_token(); let id = token::str_to_ident("abc"); @@ -1083,7 +1080,7 @@ mod test { } #[test] fn raw_string() { - let env = setup(@"r###\"\"#a\\b\x00c\"\"###"); + let env = setup(~"r###\"\"#a\\b\x00c\"\"###"); let TokenAndSpan {tok, sp: _} = env.string_reader.next_token(); let id = token::str_to_ident("\"#a\\b\x00c\""); @@ -1097,7 +1094,7 @@ mod test { } #[test] fn nested_block_comments() { - let env = setup(@"/* /* */ */'a'"); + let env = setup(~"/* /* */ */'a'"); let TokenAndSpan {tok, sp: _} = env.string_reader.next_token(); assert_eq!(tok,token::LIT_CHAR('a' as u32)); diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index e026a11cafe37..cec9f7c2d9f11 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -89,12 +89,11 @@ pub fn parse_crate_attrs_from_file( return inner; } -pub fn parse_crate_from_source_str( - name: @str, - source: @str, - cfg: ast::CrateConfig, - sess: @ParseSess -) -> ast::Crate { +pub fn parse_crate_from_source_str(name: ~str, + source: ~str, + cfg: ast::CrateConfig, + sess: @ParseSess) + -> ast::Crate { let mut p = new_parser_from_source_str(sess, /*bad*/ cfg.clone(), name, @@ -102,12 +101,11 @@ pub fn parse_crate_from_source_str( maybe_aborted(p.parse_crate_mod(),p) } -pub fn parse_crate_attrs_from_source_str( - name: @str, - source: @str, - cfg: ast::CrateConfig, - sess: @ParseSess -) -> ~[ast::Attribute] { +pub fn parse_crate_attrs_from_source_str(name: ~str, + source: ~str, + cfg: ast::CrateConfig, + sess: @ParseSess) + -> ~[ast::Attribute] { let mut p = new_parser_from_source_str(sess, /*bad*/ cfg.clone(), name, @@ -116,44 +114,40 @@ pub fn parse_crate_attrs_from_source_str( return inner; } -pub fn parse_expr_from_source_str( - name: @str, - source: @str, - cfg: ast::CrateConfig, - sess: @ParseSess -) -> @ast::Expr { +pub fn parse_expr_from_source_str(name: ~str, + source: ~str, + cfg: ast::CrateConfig, + sess: @ParseSess) + -> @ast::Expr { let mut p = new_parser_from_source_str(sess, cfg, name, source); maybe_aborted(p.parse_expr(), p) } -pub fn parse_item_from_source_str( - name: @str, - source: @str, - cfg: ast::CrateConfig, - sess: @ParseSess -) -> Option<@ast::Item> { +pub fn parse_item_from_source_str(name: ~str, + source: ~str, + cfg: ast::CrateConfig, + sess: @ParseSess) + -> Option<@ast::Item> { let mut p = new_parser_from_source_str(sess, cfg, name, source); let attrs = p.parse_outer_attributes(); maybe_aborted(p.parse_item(attrs),p) } -pub fn parse_meta_from_source_str( - name: @str, - source: @str, - cfg: ast::CrateConfig, - sess: @ParseSess -) -> @ast::MetaItem { +pub fn parse_meta_from_source_str(name: ~str, + source: ~str, + cfg: ast::CrateConfig, + sess: @ParseSess) + -> @ast::MetaItem { let mut p = new_parser_from_source_str(sess, cfg, name, source); maybe_aborted(p.parse_meta_item(),p) } -pub fn parse_stmt_from_source_str( - name: @str, - source: @str, - cfg: ast::CrateConfig, - attrs: ~[ast::Attribute], - sess: @ParseSess -) -> @ast::Stmt { +pub fn parse_stmt_from_source_str(name: ~str, + source: ~str, + cfg: ast::CrateConfig, + attrs: ~[ast::Attribute], + sess: @ParseSess) + -> @ast::Stmt { let mut p = new_parser_from_source_str( sess, cfg, @@ -163,12 +157,11 @@ pub fn parse_stmt_from_source_str( maybe_aborted(p.parse_stmt(attrs),p) } -pub fn parse_tts_from_source_str( - name: @str, - source: @str, - cfg: ast::CrateConfig, - sess: @ParseSess -) -> ~[ast::TokenTree] { +pub fn parse_tts_from_source_str(name: ~str, + source: ~str, + cfg: ast::CrateConfig, + sess: @ParseSess) + -> ~[ast::TokenTree] { let mut p = new_parser_from_source_str( sess, cfg, @@ -183,9 +176,9 @@ pub fn parse_tts_from_source_str( // Create a new parser from a source string pub fn new_parser_from_source_str(sess: @ParseSess, cfg: ast::CrateConfig, - name: @str, - source: @str) - -> Parser { + name: ~str, + source: ~str) + -> Parser { filemap_to_parser(sess,string_to_filemap(sess,source,name),cfg) } @@ -248,20 +241,17 @@ pub fn file_to_filemap(sess: @ParseSess, path: &Path, spanopt: Option) }; match str::from_utf8_owned(bytes) { Some(s) => { - return string_to_filemap(sess, s.to_managed(), - path.as_str().unwrap().to_managed()); - } - None => { - err(format!("{} is not UTF-8 encoded", path.display())) + return string_to_filemap(sess, s, path.as_str().unwrap().to_str()) } + None => err(format!("{} is not UTF-8 encoded", path.display())), } unreachable!() } // given a session and a string, add the string to // the session's codemap and return the new filemap -pub fn string_to_filemap(sess: @ParseSess, source: @str, path: @str) - -> @FileMap { +pub fn string_to_filemap(sess: @ParseSess, source: ~str, path: ~str) + -> @FileMap { sess.cm.new_filemap(path, source) } @@ -324,7 +314,7 @@ mod test { } #[test] fn path_exprs_1() { - assert_eq!(string_to_expr(@"a"), + assert_eq!(string_to_expr(~"a"), @ast::Expr{ id: ast::DUMMY_NODE_ID, node: ast::ExprPath(ast::Path { @@ -343,7 +333,7 @@ mod test { } #[test] fn path_exprs_2 () { - assert_eq!(string_to_expr(@"::a::b"), + assert_eq!(string_to_expr(~"::a::b"), @ast::Expr { id: ast::DUMMY_NODE_ID, node: ast::ExprPath(ast::Path { @@ -368,12 +358,12 @@ mod test { #[should_fail] #[test] fn bad_path_expr_1() { - string_to_expr(@"::abc::def::return"); + string_to_expr(~"::abc::def::return"); } // check the token-tree-ization of macros #[test] fn string_to_tts_macro () { - let tts = string_to_tts(@"macro_rules! zip (($a)=>($a))"); + let tts = string_to_tts(~"macro_rules! zip (($a)=>($a))"); match tts { [ast::TTTok(_,_), ast::TTTok(_,token::NOT), @@ -417,7 +407,7 @@ mod test { } #[test] fn string_to_tts_1 () { - let tts = string_to_tts(@"fn a (b : int) { b; }"); + let tts = string_to_tts(~"fn a (b : int) { b; }"); assert_eq!(to_json_str(&tts), ~"[\ {\ @@ -546,7 +536,7 @@ mod test { } #[test] fn ret_expr() { - assert_eq!(string_to_expr(@"return d"), + assert_eq!(string_to_expr(~"return d"), @ast::Expr{ id: ast::DUMMY_NODE_ID, node:ast::ExprRet(Some(@ast::Expr{ @@ -569,7 +559,7 @@ mod test { } #[test] fn parse_stmt_1 () { - assert_eq!(string_to_stmt(@"b;"), + assert_eq!(string_to_stmt(~"b;"), @Spanned{ node: ast::StmtExpr(@ast::Expr { id: ast::DUMMY_NODE_ID, @@ -595,7 +585,7 @@ mod test { } #[test] fn parse_ident_pat () { - let mut parser = string_to_parser(@"b"); + let mut parser = string_to_parser(~"b"); assert_eq!(parser.parse_pat(), @ast::Pat{id: ast::DUMMY_NODE_ID, node: ast::PatIdent( @@ -619,7 +609,7 @@ mod test { // check the contents of the tt manually: #[test] fn parse_fundecl () { // this test depends on the intern order of "fn" and "int" - assert_eq!(string_to_item(@"fn a (b : int) { b; }"), + assert_eq!(string_to_item(~"fn a (b : int) { b; }"), Some( @ast::Item{ident:str_to_ident("a"), attrs:~[], @@ -711,12 +701,12 @@ mod test { #[test] fn parse_exprs () { // just make sure that they parse.... - string_to_expr(@"3 + 4"); - string_to_expr(@"a::z.froob(b,@(987+3))"); + string_to_expr(~"3 + 4"); + string_to_expr(~"a::z.froob(b,@(987+3))"); } #[test] fn attrs_fix_bug () { - string_to_item(@"pub fn mk_file_writer(path: &Path, flags: &[FileFlag]) + string_to_item(~"pub fn mk_file_writer(path: &Path, flags: &[FileFlag]) -> Result<@Writer, ~str> { #[cfg(windows)] fn wb() -> c_int { diff --git a/src/libsyntax/parse/obsolete.rs b/src/libsyntax/parse/obsolete.rs index c4887d55e2a29..b85d89cf804a8 100644 --- a/src/libsyntax/parse/obsolete.rs +++ b/src/libsyntax/parse/obsolete.rs @@ -22,7 +22,6 @@ use codemap::{Span, respan}; use parse::parser::Parser; use parse::token; -use std::str; use std::to_bytes; /// The specific types of unsupported syntax @@ -45,6 +44,8 @@ pub enum ObsoleteSyntax { ObsoleteMultipleImport, ObsoleteExternModAttributesInParens, ObsoleteManagedPattern, + ObsoleteManagedString, + ObsoleteManagedVec, } impl to_bytes::IterBytes for ObsoleteSyntax { @@ -150,6 +151,14 @@ impl ParserObsoleteMethods for Parser { "use a nested `match` expression instead of a managed box \ pattern" ), + ObsoleteManagedString => ( + "managed string", + "use `Rc<~str>` instead of a managed string" + ), + ObsoleteManagedVec => ( + "managed vector", + "use `Rc<~[T]>` instead of a managed vector" + ), }; self.report(sp, kind, kind_str, desc); @@ -178,7 +187,8 @@ impl ParserObsoleteMethods for Parser { fn is_obsolete_ident(&mut self, ident: &str) -> bool { match self.token { token::IDENT(sid, _) => { - str::eq_slice(self.id_to_str(sid), ident) + let interned_string = token::get_ident(sid.name); + interned_string.equiv(&ident) } _ => false } diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 642624adfb2b5..dd7cc3a231435 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -29,7 +29,7 @@ use ast::{ExprField, ExprFnBlock, ExprIf, ExprIndex}; use ast::{ExprLit, ExprLogLevel, ExprLoop, ExprMac}; use ast::{ExprMethodCall, ExprParen, ExprPath, ExprProc}; use ast::{ExprRepeat, ExprRet, ExprStruct, ExprTup, ExprUnary}; -use ast::{ExprVec, ExprVstore, ExprVstoreSlice, ExprVstoreBox}; +use ast::{ExprVec, ExprVstore, ExprVstoreSlice}; use ast::{ExprVstoreMutSlice, ExprWhile, ExprForLoop, ExternFn, Field, FnDecl}; use ast::{ExprVstoreUniq, Onceness, Once, Many}; use ast::{ForeignItem, ForeignItemStatic, ForeignItemFn, ForeignMod}; @@ -71,10 +71,9 @@ use parse::common::{seq_sep_trailing_disallowed, seq_sep_trailing_allowed}; use parse::lexer::Reader; use parse::lexer::TokenAndSpan; use parse::obsolete::*; -use parse::token::{can_begin_expr, get_ident_interner, ident_to_str, is_ident}; -use parse::token::{is_ident_or_path}; -use parse::token::{is_plain_ident, INTERPOLATED, keywords, special_idents}; -use parse::token::{token_to_binop}; +use parse::token::{INTERPOLATED, InternedString, can_begin_expr, get_ident}; +use parse::token::{get_ident_interner, is_ident, is_ident_or_path}; +use parse::token::{is_plain_ident, keywords, special_idents, token_to_binop}; use parse::token; use parse::{new_sub_parser_from_file, ParseSess}; use opt_vec; @@ -345,7 +344,7 @@ pub struct Parser { /// extra detail when the same error is seen twice obsolete_set: HashSet, /// Used to determine the path to externally loaded source files - mod_path_stack: ~[@str], + mod_path_stack: ~[InternedString], /// Stack of spans of open delimiters. Used for error message. open_braces: ~[Span], /* do not copy the parser; its state is tied to outside state */ @@ -531,10 +530,11 @@ impl Parser { // otherwise, eat it. pub fn expect_keyword(&mut self, kw: keywords::Keyword) { if !self.eat_keyword(kw) { - let id_str = self.id_to_str(kw.to_ident()).to_str(); + let id_ident = kw.to_ident(); + let id_interned_str = token::get_ident(id_ident.name); let token_str = self.this_token_to_str(); self.fatal(format!("expected `{}`, found `{}`", - id_str, + id_interned_str.get(), token_str)) } } @@ -802,8 +802,8 @@ impl Parser { self.sess.span_diagnostic.handler().abort_if_errors(); } - pub fn id_to_str(&mut self, id: Ident) -> @str { - get_ident_interner().get(id.name) + pub fn id_to_interned_str(&mut self, id: Ident) -> InternedString { + get_ident(id.name) } // Is the current token one of the keywords that signals a bare function @@ -1291,7 +1291,7 @@ impl Parser { } // other things are parsed as @/~ + a type. Note that constructs like - // @[] and @str will be resolved during typeck to slices and so forth, + // ~[] and ~str will be resolved during typeck to slices and so forth, // rather than boxed ptrs. But the special casing of str/vec is not // reflected in the AST type. if sigil == OwnedSigil { @@ -1401,11 +1401,18 @@ impl Parser { token::LIT_INT(i, it) => LitInt(i, it), token::LIT_UINT(u, ut) => LitUint(u, ut), token::LIT_INT_UNSUFFIXED(i) => LitIntUnsuffixed(i), - token::LIT_FLOAT(s, ft) => LitFloat(self.id_to_str(s), ft), - token::LIT_FLOAT_UNSUFFIXED(s) => - LitFloatUnsuffixed(self.id_to_str(s)), - token::LIT_STR(s) => LitStr(self.id_to_str(s), ast::CookedStr), - token::LIT_STR_RAW(s, n) => LitStr(self.id_to_str(s), ast::RawStr(n)), + token::LIT_FLOAT(s, ft) => { + LitFloat(self.id_to_interned_str(s), ft) + } + token::LIT_FLOAT_UNSUFFIXED(s) => { + LitFloatUnsuffixed(self.id_to_interned_str(s)) + } + token::LIT_STR(s) => { + LitStr(self.id_to_interned_str(s), ast::CookedStr) + } + token::LIT_STR_RAW(s, n) => { + LitStr(self.id_to_interned_str(s), ast::RawStr(n)) + } token::LPAREN => { self.expect(&token::RPAREN); LitNil }, _ => { self.unexpected_last(tok); } } @@ -2284,11 +2291,19 @@ impl Parser { self.bump(); let e = self.parse_prefix_expr(); hi = e.span.hi; - // HACK: turn @[...] into a @-vec + // HACK: pretending @[] is a (removed) @-vec ex = match e.node { ExprVec(..) | - ExprRepeat(..) => ExprVstore(e, ExprVstoreBox), - ExprLit(lit) if lit_is_str(lit) => ExprVstore(e, ExprVstoreBox), + ExprRepeat(..) => { + self.obsolete(e.span, ObsoleteManagedVec); + // the above error means that no-one will know we're + // lying... hopefully. + ExprVstore(e, ExprVstoreUniq) + } + ExprLit(lit) if lit_is_str(lit) => { + self.obsolete(self.last_span, ObsoleteManagedString); + ExprVstore(e, ExprVstoreUniq) + } _ => self.mk_unary(UnBox, e) }; } @@ -2806,34 +2821,11 @@ impl Parser { token::AT => { self.bump(); let sub = self.parse_pat(); - hi = sub.span.hi; - // HACK: parse @"..." as a literal of a vstore @str - pat = match sub.node { - PatLit(e) => { - match e.node { - ExprLit(lit) if lit_is_str(lit) => { - let vst = @Expr { - id: ast::DUMMY_NODE_ID, - node: ExprVstore(e, ExprVstoreBox), - span: mk_sp(lo, hi), - }; - PatLit(vst) - } - _ => { - self.obsolete(self.span, ObsoleteManagedPattern); - PatUniq(sub) - } - } - } - _ => { - self.obsolete(self.span, ObsoleteManagedPattern); - PatUniq(sub) - } - }; - hi = self.last_span.hi; + self.obsolete(self.span, ObsoleteManagedPattern); + let hi = self.last_span.hi; return @ast::Pat { id: ast::DUMMY_NODE_ID, - node: pat, + node: PatUniq(sub), span: mk_sp(lo, hi) } } @@ -3429,7 +3421,9 @@ impl Parser { loop { match self.token { token::LIFETIME(lifetime) => { - if "static" == self.id_to_str(lifetime) { + let lifetime_interned_string = + token::get_ident(lifetime.name); + if lifetime_interned_string.equiv(&("static")) { result.push(RegionTyParamBound); } else { self.span_err(self.span, @@ -3970,8 +3964,9 @@ impl Parser { fields.push(self.parse_struct_decl_field()); } if fields.len() == 0 { + let string = get_ident_interner().get(class_name.name); self.fatal(format!("Unit-like struct definition should be written as `struct {};`", - get_ident_interner().get(class_name.name))); + string.as_slice())); } self.bump(); } else if self.token == token::LPAREN { @@ -4142,11 +4137,11 @@ impl Parser { } fn push_mod_path(&mut self, id: Ident, attrs: &[Attribute]) { - let default_path = token::interner_get(id.name); + let default_path = self.id_to_interned_str(id); let file_path = match ::attr::first_attr_value_str_by_name(attrs, "path") { Some(d) => d, - None => default_path + None => default_path, }; self.mod_path_stack.push(file_path) } @@ -4169,7 +4164,8 @@ impl Parser { outer_attrs, "path") { Some(d) => dir_path.join(d), None => { - let mod_name = token::interner_get(id.name).to_owned(); + let mod_string = token::get_ident(id.name); + let mod_name = mod_string.get().to_owned(); let default_path_str = mod_name + ".rs"; let secondary_path_str = mod_name + "/mod.rs"; let default_path = dir_path.join(default_path_str.as_slice()); @@ -4524,7 +4520,8 @@ impl Parser { token::LIT_STR(s) | token::LIT_STR_RAW(s, _) => { self.bump(); - let the_string = ident_to_str(&s); + let identifier_string = token::get_ident(s.name); + let the_string = identifier_string.get(); let mut abis = AbiSet::empty(); for word in the_string.words() { match abi::lookup(word) { @@ -4860,7 +4857,6 @@ impl Parser { let first_ident = self.parse_ident(); let mut path = ~[first_ident]; - debug!("parsed view path: {}", self.id_to_str(first_ident)); match self.token { token::EQ => { // x = foo::bar @@ -5119,17 +5115,20 @@ impl Parser { } } - pub fn parse_optional_str(&mut self) -> Option<(@str, ast::StrStyle)> { + pub fn parse_optional_str(&mut self) + -> Option<(InternedString, ast::StrStyle)> { let (s, style) = match self.token { - token::LIT_STR(s) => (s, ast::CookedStr), - token::LIT_STR_RAW(s, n) => (s, ast::RawStr(n)), + token::LIT_STR(s) => (self.id_to_interned_str(s), ast::CookedStr), + token::LIT_STR_RAW(s, n) => { + (self.id_to_interned_str(s), ast::RawStr(n)) + } _ => return None }; self.bump(); - Some((ident_to_str(&s), style)) + Some((s, style)) } - pub fn parse_str(&mut self) -> (@str, StrStyle) { + pub fn parse_str(&mut self) -> (InternedString, StrStyle) { match self.parse_optional_str() { Some(s) => { s } _ => self.fatal("expected string literal") diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs index 68e2f44ebb184..d6edccd33a498 100644 --- a/src/libsyntax/parse/token.rs +++ b/src/libsyntax/parse/token.rs @@ -12,12 +12,15 @@ use ast; use ast::{P, Name, Mrk}; use ast_util; use parse::token; -use util::interner::StrInterner; +use util::interner::{RcStr, StrInterner}; use util::interner; +use extra::serialize::{Decodable, Decoder, Encodable, Encoder}; use std::cast; use std::char; +use std::fmt; use std::local_data; +use std::path::BytesContainer; #[allow(non_camel_case_types)] #[deriving(Clone, Encodable, Decodable, Eq, IterBytes)] @@ -185,32 +188,44 @@ pub fn to_str(input: @IdentInterner, t: &Token) -> ~str { } LIT_INT_UNSUFFIXED(i) => { i.to_str() } LIT_FLOAT(ref s, t) => { - let mut body = ident_to_str(s).to_owned(); + let body_string = get_ident(s.name); + let mut body = body_string.get().to_str(); if body.ends_with(".") { body.push_char('0'); // `10.f` is not a float literal } body + ast_util::float_ty_to_str(t) } LIT_FLOAT_UNSUFFIXED(ref s) => { - let mut body = ident_to_str(s).to_owned(); + let body_string = get_ident(s.name); + let mut body = body_string.get().to_owned(); if body.ends_with(".") { body.push_char('0'); // `10.f` is not a float literal } body } - LIT_STR(ref s) => { format!("\"{}\"", ident_to_str(s).escape_default()) } + LIT_STR(ref s) => { + let literal_string = get_ident(s.name); + format!("\"{}\"", literal_string.get().escape_default()) + } LIT_STR_RAW(ref s, n) => { + let literal_string = get_ident(s.name); format!("r{delim}\"{string}\"{delim}", - delim="#".repeat(n), string=ident_to_str(s)) + delim="#".repeat(n), string=literal_string.get()) } /* Name components */ - IDENT(s, _) => input.get(s.name).to_owned(), - LIFETIME(s) => format!("'{}", input.get(s.name)), + IDENT(s, _) => input.get(s.name).into_owned(), + LIFETIME(s) => { + let name = input.get(s.name); + format!("'{}", name.as_slice()) + } UNDERSCORE => ~"_", /* Other */ - DOC_COMMENT(ref s) => ident_to_str(s).to_owned(), + DOC_COMMENT(ref s) => { + let comment_string = get_ident(s.name); + comment_string.get().to_str() + } EOF => ~"", INTERPOLATED(ref nt) => { match nt { @@ -525,6 +540,93 @@ pub fn get_ident_interner() -> @IdentInterner { } } +/// Represents a string stored in the task-local interner. Because the +/// interner lives for the life of the task, this can be safely treated as an +/// immortal string, as long as it never crosses between tasks. +/// +/// FIXME(pcwalton): You must be careful about what you do in the destructors +/// of objects stored in TLS, because they may run after the interner is +/// destroyed. In particular, they must not access string contents. This can +/// be fixed in the future by just leaking all strings until task death +/// somehow. +#[deriving(Clone, Eq, IterBytes, Ord, TotalEq, TotalOrd)] +pub struct InternedString { + priv string: RcStr, +} + +impl InternedString { + #[inline] + pub fn new(string: &'static str) -> InternedString { + InternedString { + string: RcStr::new(string), + } + } + + #[inline] + fn new_from_rc_str(string: RcStr) -> InternedString { + InternedString { + string: string, + } + } + + #[inline] + pub fn get<'a>(&'a self) -> &'a str { + self.string.as_slice() + } +} + +impl BytesContainer for InternedString { + fn container_as_bytes<'a>(&'a self) -> &'a [u8] { + // FIXME(pcwalton): This is a workaround for the incorrect signature + // of `BytesContainer`, which is itself a workaround for the lack of + // DST. + unsafe { + let this = self.get(); + cast::transmute(this.container_as_bytes()) + } + } +} + +impl fmt::Default for InternedString { + fn fmt(obj: &InternedString, f: &mut fmt::Formatter) { + write!(f.buf, "{}", obj.string.as_slice()); + } +} + +impl<'a> Equiv<&'a str> for InternedString { + fn equiv(&self, other: & &'a str) -> bool { + (*other) == self.string.as_slice() + } +} + +impl Decodable for InternedString { + fn decode(d: &mut D) -> InternedString { + let interner = get_ident_interner(); + get_ident(interner.intern(d.read_str())) + } +} + +impl Encodable for InternedString { + fn encode(&self, e: &mut E) { + e.emit_str(self.string.as_slice()) + } +} + +/// Returns the string contents of an identifier, using the task-local +/// interner. +#[inline] +pub fn get_ident(idx: Name) -> InternedString { + let interner = get_ident_interner(); + InternedString::new_from_rc_str(interner.get(idx)) +} + +/// Interns and returns the string contents of an identifier, using the +/// task-local interner. +#[inline] +pub fn intern_and_get_ident(s: &str) -> InternedString { + get_ident(intern(s)) +} + /* for when we don't care about the contents; doesn't interact with TLD or serialization */ pub fn mk_fake_ident_interner() -> @IdentInterner { @@ -532,6 +634,7 @@ pub fn mk_fake_ident_interner() -> @IdentInterner { } // maps a string to its interned representation +#[inline] pub fn intern(str : &str) -> Name { let interner = get_ident_interner(); interner.intern(str) @@ -543,16 +646,6 @@ pub fn gensym(str : &str) -> Name { interner.gensym(str) } -// map an interned representation back to a string -pub fn interner_get(name : Name) -> @str { - get_ident_interner().get(name) -} - -// maps an identifier to the string that it corresponds to -pub fn ident_to_str(id : &ast::Ident) -> @str { - interner_get(id.name) -} - // maps a string to an identifier with an empty syntax context pub fn str_to_ident(str : &str) -> ast::Ident { ast::Ident::new(intern(str)) @@ -576,28 +669,6 @@ pub fn fresh_name(src : &ast::Ident) -> Name { gensym(format!("{}_{}",ident_to_str(src),num))*/ } -// it looks like there oughta be a str_ptr_eq fn, but no one bothered to implement it? - -// determine whether two @str values are pointer-equal -pub fn str_ptr_eq(a : @str, b : @str) -> bool { - unsafe { - let p : uint = cast::transmute(a); - let q : uint = cast::transmute(b); - let result = p == q; - // got to transmute them back, to make sure the ref count is correct: - let _junk1 : @str = cast::transmute(p); - let _junk2 : @str = cast::transmute(q); - result - } -} - -// return true when two identifiers refer (through the intern table) to the same ptr_eq -// string. This is used to compare identifiers in places where hygienic comparison is -// not wanted (i.e. not lexical vars). -pub fn ident_spelling_eq(a : &ast::Ident, b : &ast::Ident) -> bool { - str_ptr_eq(interner_get(a.name),interner_get(b.name)) -} - // create a fresh mark. pub fn fresh_mark() -> Mrk { gensym("mark") @@ -669,23 +740,4 @@ mod test { let a1 = mark_ident(a,92); assert!(mtwt_token_eq(&IDENT(a,true),&IDENT(a1,false))); } - - - #[test] fn str_ptr_eq_tests(){ - let a = @"abc"; - let b = @"abc"; - let c = a; - assert!(str_ptr_eq(a,c)); - assert!(!str_ptr_eq(a,b)); - } - - #[test] fn fresh_name_pointer_sharing() { - let ghi = str_to_ident("ghi"); - assert_eq!(ident_to_str(&ghi),@"ghi"); - assert!(str_ptr_eq(ident_to_str(&ghi),ident_to_str(&ghi))) - let fresh = ast::Ident::new(fresh_name(&ghi)); - assert_eq!(ident_to_str(&fresh),@"ghi"); - assert!(str_ptr_eq(ident_to_str(&ghi),ident_to_str(&fresh))); - } - } diff --git a/src/libsyntax/print/pp.rs b/src/libsyntax/print/pp.rs index 902d9e1c28468..3e1f5b4cfb353 100644 --- a/src/libsyntax/print/pp.rs +++ b/src/libsyntax/print/pp.rs @@ -84,7 +84,7 @@ pub struct BeginToken { #[deriving(Clone)] pub enum Token { - String(@str, int), + String(~str, int), Break(BreakToken), Begin(BeginToken), End, @@ -131,7 +131,7 @@ pub fn buf_str(toks: ~[Token], szs: ~[int], left: uint, right: uint, if i != left { s.push_str(", "); } - s.push_str(format!("{}={}", szs[i], tok_str(toks[i]))); + s.push_str(format!("{}={}", szs[i], tok_str(toks[i].clone()))); i += 1u; i %= n; } @@ -285,7 +285,9 @@ pub struct Printer { } impl Printer { - pub fn last_token(&mut self) -> Token { self.token[self.right] } + pub fn last_token(&mut self) -> Token { + self.token[self.right].clone() + } // be very careful with this! pub fn replace_last_token(&mut self, t: Token) { self.token[self.right] = t; @@ -296,8 +298,8 @@ impl Printer { Eof => { if !self.scan_stack_empty { self.check_stack(0); - self.advance_left(self.token[self.left], - self.size[self.left]); + let left = self.token[self.left].clone(); + self.advance_left(left, self.size[self.left]); } self.indent(0); } @@ -341,16 +343,16 @@ impl Printer { self.size[self.right] = -self.right_total; self.right_total += b.blank_space; } - String(s, len) => { + String(ref s, len) => { if self.scan_stack_empty { debug!("pp String('{}')/print ~[{},{}]", - s, self.left, self.right); - self.print(t, len); + *s, self.left, self.right); + self.print(t.clone(), len); } else { debug!("pp String('{}')/buffer ~[{},{}]", - s, self.left, self.right); + *s, self.left, self.right); self.advance_right(); - self.token[self.right] = t; + self.token[self.right] = t.clone(); self.size[self.right] = len; self.right_total += len; self.check_stream(); @@ -370,7 +372,8 @@ impl Printer { self.size[self.scan_pop_bottom()] = SIZE_INFINITY; } } - self.advance_left(self.token[self.left], self.size[self.left]); + let left = self.token[self.left].clone(); + self.advance_left(left, self.size[self.left]); if self.left != self.right { self.check_stream(); } } } @@ -414,7 +417,7 @@ impl Printer { debug!("advnce_left ~[{},{}], sizeof({})={}", self.left, self.right, self.left, L); if L >= 0 { - self.print(x, L); + self.print(x.clone(), L); match x { Break(b) => self.left_total += b.blank_space, String(_, len) => { @@ -425,8 +428,8 @@ impl Printer { if self.left != self.right { self.left += 1u; self.left %= self.buf_len; - self.advance_left(self.token[self.left], - self.size[self.left]); + let left = self.token[self.left].clone(); + self.advance_left(left, self.size[self.left]); } } } @@ -483,7 +486,7 @@ impl Printer { write!(self.out, "{}", s); } pub fn print(&mut self, x: Token, L: int) { - debug!("print {} {} (remaining line space={})", tok_str(x), L, + debug!("print {} {} (remaining line space={})", tok_str(x.clone()), L, self.space); debug!("{}", buf_str(self.token.clone(), self.size.clone(), @@ -583,15 +586,15 @@ pub fn end(p: &mut Printer) { p.pretty_print(End); } pub fn eof(p: &mut Printer) { p.pretty_print(Eof); } pub fn word(p: &mut Printer, wrd: &str) { - p.pretty_print(String(/* bad */ wrd.to_managed(), wrd.len() as int)); + p.pretty_print(String(/* bad */ wrd.to_str(), wrd.len() as int)); } pub fn huge_word(p: &mut Printer, wrd: &str) { - p.pretty_print(String(/* bad */ wrd.to_managed(), SIZE_INFINITY)); + p.pretty_print(String(/* bad */ wrd.to_str(), SIZE_INFINITY)); } pub fn zero_word(p: &mut Printer, wrd: &str) { - p.pretty_print(String(/* bad */ wrd.to_managed(), 0)); + p.pretty_print(String(/* bad */ wrd.to_str(), 0)); } pub fn spaces(p: &mut Printer, n: uint) { break_offset(p, n, 0); } diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs index 2783284ea8b05..037c69eb918d1 100644 --- a/src/libsyntax/print/pprust.rs +++ b/src/libsyntax/print/pprust.rs @@ -19,7 +19,7 @@ use codemap::{CodeMap, BytePos}; use codemap; use diagnostic; use parse::classify::expr_is_simple_block; -use parse::token::{IdentInterner, ident_to_str, interner_get}; +use parse::token::IdentInterner; use parse::{comments, token}; use parse; use print::pp::{break_offset, word, space, zerobreak, hardbreak}; @@ -117,7 +117,7 @@ pub fn print_crate(cm: @CodeMap, intr: @IdentInterner, span_diagnostic: @diagnostic::SpanHandler, crate: &ast::Crate, - filename: @str, + filename: ~str, input: &mut io::Reader, out: ~io::Writer, ann: @PpAnn, @@ -897,7 +897,7 @@ pub fn print_attribute(s: &mut State, attr: &ast::Attribute) { maybe_print_comment(s, attr.span.lo); if attr.node.is_sugared_doc { let comment = attr.value_str().unwrap(); - word(&mut s.s, comment); + word(&mut s.s, comment.get()); } else { word(&mut s.s, "#["); print_meta_item(s, attr.meta()); @@ -1058,23 +1058,9 @@ pub fn print_mac(s: &mut State, m: &ast::Mac) { } } -pub fn print_vstore(s: &mut State, t: ast::Vstore) { - match t { - ast::VstoreFixed(Some(i)) => word(&mut s.s, format!("{}", i)), - ast::VstoreFixed(None) => word(&mut s.s, "_"), - ast::VstoreUniq => word(&mut s.s, "~"), - ast::VstoreBox => word(&mut s.s, "@"), - ast::VstoreSlice(ref r) => { - word(&mut s.s, "&"); - print_opt_lifetime(s, r); - } - } -} - pub fn print_expr_vstore(s: &mut State, t: ast::ExprVstore) { match t { ast::ExprVstoreUniq => word(&mut s.s, "~"), - ast::ExprVstoreBox => word(&mut s.s, "@"), ast::ExprVstoreSlice => word(&mut s.s, "&"), ast::ExprVstoreMutSlice => { word(&mut s.s, "&"); @@ -1466,25 +1452,25 @@ pub fn print_expr(s: &mut State, expr: &ast::Expr) { word(&mut s.s, "asm!"); } popen(s); - print_string(s, a.asm, a.asm_str_style); + print_string(s, a.asm.get(), a.asm_str_style); word_space(s, ":"); - for &(co, o) in a.outputs.iter() { - print_string(s, co, ast::CookedStr); + for &(ref co, o) in a.outputs.iter() { + print_string(s, co.get(), ast::CookedStr); popen(s); print_expr(s, o); pclose(s); word_space(s, ","); } word_space(s, ":"); - for &(co, o) in a.inputs.iter() { - print_string(s, co, ast::CookedStr); + for &(ref co, o) in a.inputs.iter() { + print_string(s, co.get(), ast::CookedStr); popen(s); print_expr(s, o); pclose(s); word_space(s, ","); } word_space(s, ":"); - print_string(s, a.clobbers, ast::CookedStr); + print_string(s, a.clobbers.get(), ast::CookedStr); pclose(s); } ast::ExprMac(ref m) => print_mac(s, m), @@ -1539,11 +1525,13 @@ pub fn print_decl(s: &mut State, decl: &ast::Decl) { } pub fn print_ident(s: &mut State, ident: ast::Ident) { - word(&mut s.s, ident_to_str(&ident)); + let string = token::get_ident(ident.name); + word(&mut s.s, string.get()); } pub fn print_name(s: &mut State, name: ast::Name) { - word(&mut s.s, interner_get(name)); + let string = token::get_ident(name); + word(&mut s.s, string.get()); } pub fn print_for_decl(s: &mut State, loc: &ast::Local, coll: &ast::Expr) { @@ -1930,14 +1918,14 @@ pub fn print_generics(s: &mut State, generics: &ast::Generics) { pub fn print_meta_item(s: &mut State, item: &ast::MetaItem) { ibox(s, indent_unit); match item.node { - ast::MetaWord(name) => word(&mut s.s, name), - ast::MetaNameValue(name, value) => { - word_space(s, name); + ast::MetaWord(ref name) => word(&mut s.s, name.get()), + ast::MetaNameValue(ref name, ref value) => { + word_space(s, name.get()); word_space(s, "="); - print_literal(s, &value); + print_literal(s, value); } - ast::MetaList(name, ref items) => { - word(&mut s.s, name); + ast::MetaList(ref name, ref items) => { + word(&mut s.s, name.get()); popen(s); commasep(s, Consistent, @@ -1998,7 +1986,7 @@ pub fn print_view_item(s: &mut State, item: &ast::ViewItem) { space(&mut s.s); word(&mut s.s, "="); space(&mut s.s); - print_string(s, *p, style); + print_string(s, p.get(), style); } } @@ -2172,7 +2160,7 @@ pub fn print_literal(s: &mut State, lit: &ast::Lit) { _ => () } match lit.node { - ast::LitStr(st, style) => print_string(s, st, style), + ast::LitStr(ref st, style) => print_string(s, st.get(), style), ast::LitChar(ch) => { let mut res = ~"'"; char::from_u32(ch).unwrap().escape_default(|c| res.push_char(c)); @@ -2202,18 +2190,18 @@ pub fn print_literal(s: &mut State, lit: &ast::Lit) { word(&mut s.s, (i as u64).to_str_radix(10u)); } } - ast::LitFloat(f, t) => { - word(&mut s.s, f.to_owned() + ast_util::float_ty_to_str(t)); + ast::LitFloat(ref f, t) => { + word(&mut s.s, f.get() + ast_util::float_ty_to_str(t)); } - ast::LitFloatUnsuffixed(f) => word(&mut s.s, f), + ast::LitFloatUnsuffixed(ref f) => word(&mut s.s, f.get()), ast::LitNil => word(&mut s.s, "()"), ast::LitBool(val) => { if val { word(&mut s.s, "true"); } else { word(&mut s.s, "false"); } } - ast::LitBinary(arr) => { + ast::LitBinary(ref arr) => { ibox(s, indent_unit); word(&mut s.s, "["); - commasep_cmnt(s, Inconsistent, arr, |s, u| word(&mut s.s, format!("{}", *u)), + commasep_cmnt(s, Inconsistent, *arr.borrow(), |s, u| word(&mut s.s, format!("{}", *u)), |_| lit.span); word(&mut s.s, "]"); end(s); diff --git a/src/libsyntax/util/interner.rs b/src/libsyntax/util/interner.rs index fdc54f1f140a6..fc3e55dcde256 100644 --- a/src/libsyntax/util/interner.rs +++ b/src/libsyntax/util/interner.rs @@ -14,9 +14,11 @@ use ast::Name; +use std::cast; use std::cell::RefCell; use std::cmp::Equiv; use std::hashmap::HashMap; +use std::rc::Rc; pub struct Interner { priv map: @RefCell>, @@ -82,11 +84,49 @@ impl Interner { } } +#[deriving(Clone, Eq, IterBytes, Ord)] +pub struct RcStr { + priv string: Rc<~str>, +} + +impl TotalEq for RcStr { + fn equals(&self, other: &RcStr) -> bool { + self.as_slice().equals(&other.as_slice()) + } +} + +impl TotalOrd for RcStr { + fn cmp(&self, other: &RcStr) -> Ordering { + self.as_slice().cmp(&other.as_slice()) + } +} + +impl Str for RcStr { + #[inline] + fn as_slice<'a>(&'a self) -> &'a str { + let s: &'a str = *self.string.borrow(); + s + } + + #[inline] + fn into_owned(self) -> ~str { + self.string.borrow().to_owned() + } +} + +impl RcStr { + pub fn new(string: &str) -> RcStr { + RcStr { + string: Rc::new(string.to_owned()), + } + } +} + // A StrInterner differs from Interner in that it accepts // references rather than @ ones, resulting in less allocation. pub struct StrInterner { - priv map: @RefCell>, - priv vect: @RefCell<~[@str]>, + priv map: @RefCell>, + priv vect: @RefCell<~[RcStr]>, } // when traits can extend traits, we should extend index to get [] @@ -112,8 +152,8 @@ impl StrInterner { } let new_idx = self.len() as Name; - let val = val.to_managed(); - map.get().insert(val, new_idx); + let val = RcStr::new(val); + map.get().insert(val.clone(), new_idx); let mut vect = self.vect.borrow_mut(); vect.get().push(val); new_idx @@ -123,7 +163,7 @@ impl StrInterner { let new_idx = self.len() as Name; // leave out of .map to avoid colliding let mut vect = self.vect.borrow_mut(); - vect.get().push(val.to_managed()); + vect.get().push(RcStr::new(val)); new_idx } @@ -141,14 +181,24 @@ impl StrInterner { let new_idx = self.len() as Name; // leave out of map to avoid colliding let mut vect = self.vect.borrow_mut(); - let existing = vect.get()[idx]; + let existing = vect.get()[idx].clone(); vect.get().push(existing); new_idx } - pub fn get(&self, idx: Name) -> @str { + pub fn get(&self, idx: Name) -> RcStr { let vect = self.vect.borrow(); - vect.get()[idx] + vect.get()[idx].clone() + } + + /// Returns this string with lifetime tied to the interner. Since + /// strings may never be removed from the interner, this is safe. + pub fn get_ref<'a>(&'a self, idx: Name) -> &'a str { + let vect = self.vect.borrow(); + let s: &str = vect.get()[idx].as_slice(); + unsafe { + cast::transmute(s) + } } pub fn len(&self) -> uint { @@ -156,7 +206,7 @@ impl StrInterner { vect.get().len() } - pub fn find_equiv>(&self, val: &Q) + pub fn find_equiv>(&self, val: &Q) -> Option { let map = self.map.borrow(); match map.get().find_equiv(val) { @@ -172,42 +222,46 @@ mod tests { #[test] #[should_fail] fn i1 () { - let i : Interner<@str> = Interner::new(); + let i : Interner = Interner::new(); i.get(13); } #[test] fn interner_tests () { - let i : Interner<@str> = Interner::new(); + let i : Interner = Interner::new(); // first one is zero: - assert_eq!(i.intern(@"dog"), 0); + assert_eq!(i.intern(RcStr::new("dog")), 0); // re-use gets the same entry: - assert_eq!(i.intern(@"dog"), 0); + assert_eq!(i.intern(RcStr::new("dog")), 0); // different string gets a different #: - assert_eq!(i.intern(@"cat"), 1); - assert_eq!(i.intern(@"cat"), 1); + assert_eq!(i.intern(RcStr::new("cat")), 1); + assert_eq!(i.intern(RcStr::new("cat")), 1); // dog is still at zero - assert_eq!(i.intern(@"dog"), 0); + assert_eq!(i.intern(RcStr::new("dog")), 0); // gensym gets 3 - assert_eq!(i.gensym(@"zebra" ), 2); + assert_eq!(i.gensym(RcStr::new("zebra") ), 2); // gensym of same string gets new number : - assert_eq!(i.gensym (@"zebra" ), 3); + assert_eq!(i.gensym (RcStr::new("zebra") ), 3); // gensym of *existing* string gets new number: - assert_eq!(i.gensym(@"dog"), 4); - assert_eq!(i.get(0), @"dog"); - assert_eq!(i.get(1), @"cat"); - assert_eq!(i.get(2), @"zebra"); - assert_eq!(i.get(3), @"zebra"); - assert_eq!(i.get(4), @"dog"); + assert_eq!(i.gensym(RcStr::new("dog")), 4); + assert_eq!(i.get(0), RcStr::new("dog")); + assert_eq!(i.get(1), RcStr::new("cat")); + assert_eq!(i.get(2), RcStr::new("zebra")); + assert_eq!(i.get(3), RcStr::new("zebra")); + assert_eq!(i.get(4), RcStr::new("dog")); } #[test] fn i3 () { - let i : Interner<@str> = Interner::prefill([@"Alan",@"Bob",@"Carol"]); - assert_eq!(i.get(0), @"Alan"); - assert_eq!(i.get(1), @"Bob"); - assert_eq!(i.get(2), @"Carol"); - assert_eq!(i.intern(@"Bob"), 1); + let i : Interner = Interner::prefill([ + RcStr::new("Alan"), + RcStr::new("Bob"), + RcStr::new("Carol") + ]); + assert_eq!(i.get(0), RcStr::new("Alan")); + assert_eq!(i.get(1), RcStr::new("Bob")); + assert_eq!(i.get(2), RcStr::new("Carol")); + assert_eq!(i.intern(RcStr::new("Bob")), 1); } #[test] @@ -230,13 +284,13 @@ mod tests { assert_eq!(i.gensym("dog"), 4); // gensym tests again with gensym_copy: assert_eq!(i.gensym_copy(2), 5); - assert_eq!(i.get(5), @"zebra"); + assert_eq!(i.get(5), RcStr::new("zebra")); assert_eq!(i.gensym_copy(2), 6); - assert_eq!(i.get(6), @"zebra"); - assert_eq!(i.get(0), @"dog"); - assert_eq!(i.get(1), @"cat"); - assert_eq!(i.get(2), @"zebra"); - assert_eq!(i.get(3), @"zebra"); - assert_eq!(i.get(4), @"dog"); + assert_eq!(i.get(6), RcStr::new("zebra")); + assert_eq!(i.get(0), RcStr::new("dog")); + assert_eq!(i.get(1), RcStr::new("cat")); + assert_eq!(i.get(2), RcStr::new("zebra")); + assert_eq!(i.get(3), RcStr::new("zebra")); + assert_eq!(i.get(4), RcStr::new("dog")); } } diff --git a/src/libsyntax/util/parser_testing.rs b/src/libsyntax/util/parser_testing.rs index dd3ae168149eb..58c2bed7a45a7 100644 --- a/src/libsyntax/util/parser_testing.rs +++ b/src/libsyntax/util/parser_testing.rs @@ -17,29 +17,29 @@ use parse::token; // map a string to tts, using a made-up filename: return both the TokenTree's // and the ParseSess -pub fn string_to_tts_and_sess (source_str : @str) -> (~[ast::TokenTree], @ParseSess) { +pub fn string_to_tts_and_sess (source_str : ~str) -> (~[ast::TokenTree], @ParseSess) { let ps = new_parse_sess(None); - (filemap_to_tts(ps,string_to_filemap(ps,source_str,@"bogofile")),ps) + (filemap_to_tts(ps,string_to_filemap(ps,source_str,~"bogofile")),ps) } // map a string to tts, using a made-up filename: -pub fn string_to_tts(source_str : @str) -> ~[ast::TokenTree] { +pub fn string_to_tts(source_str : ~str) -> ~[ast::TokenTree] { let (tts,_) = string_to_tts_and_sess(source_str); tts } -pub fn string_to_parser_and_sess(source_str: @str) -> (Parser,@ParseSess) { +pub fn string_to_parser_and_sess(source_str: ~str) -> (Parser,@ParseSess) { let ps = new_parse_sess(None); - (new_parser_from_source_str(ps,~[],@"bogofile",source_str),ps) + (new_parser_from_source_str(ps,~[],~"bogofile",source_str),ps) } // map string to parser (via tts) -pub fn string_to_parser(source_str: @str) -> Parser { +pub fn string_to_parser(source_str: ~str) -> Parser { let (p,_) = string_to_parser_and_sess(source_str); p } -fn with_error_checking_parse(s: @str, f: |&mut Parser| -> T) -> T { +fn with_error_checking_parse(s: ~str, f: |&mut Parser| -> T) -> T { let mut p = string_to_parser(s); let x = f(&mut p); p.abort_if_errors(); @@ -47,34 +47,34 @@ fn with_error_checking_parse(s: @str, f: |&mut Parser| -> T) -> T { } // parse a string, return a crate. -pub fn string_to_crate (source_str : @str) -> ast::Crate { +pub fn string_to_crate (source_str : ~str) -> ast::Crate { with_error_checking_parse(source_str, |p| { p.parse_crate_mod() }) } // parse a string, return a crate and the ParseSess -pub fn string_to_crate_and_sess (source_str : @str) -> (ast::Crate,@ParseSess) { +pub fn string_to_crate_and_sess (source_str : ~str) -> (ast::Crate,@ParseSess) { let (mut p,ps) = string_to_parser_and_sess(source_str); (p.parse_crate_mod(),ps) } // parse a string, return an expr -pub fn string_to_expr (source_str : @str) -> @ast::Expr { +pub fn string_to_expr (source_str : ~str) -> @ast::Expr { with_error_checking_parse(source_str, |p| { p.parse_expr() }) } // parse a string, return an item -pub fn string_to_item (source_str : @str) -> Option<@ast::Item> { +pub fn string_to_item (source_str : ~str) -> Option<@ast::Item> { with_error_checking_parse(source_str, |p| { p.parse_item(~[]) }) } // parse a string, return a stmt -pub fn string_to_stmt(source_str : @str) -> @ast::Stmt { +pub fn string_to_stmt(source_str : ~str) -> @ast::Stmt { with_error_checking_parse(source_str, |p| { p.parse_stmt(~[]) }) @@ -82,7 +82,7 @@ pub fn string_to_stmt(source_str : @str) -> @ast::Stmt { // parse a string, return a pat. Uses "irrefutable"... which doesn't // (currently) affect parsing. -pub fn string_to_pat(source_str : @str) -> @ast::Pat { +pub fn string_to_pat(source_str : ~str) -> @ast::Pat { string_to_parser(source_str).parse_pat() } diff --git a/src/test/compile-fail/auto-ref-slice-plus-ref.rs b/src/test/compile-fail/auto-ref-slice-plus-ref.rs index 311becc63eff1..6a0f5a39202a9 100644 --- a/src/test/compile-fail/auto-ref-slice-plus-ref.rs +++ b/src/test/compile-fail/auto-ref-slice-plus-ref.rs @@ -17,7 +17,7 @@ fn main() { // reference. That would allow creating a mutable pointer to a // temporary, which would be a source of confusion - let mut a = @[0]; + let mut a = ~[0]; a.test_mut(); //~ ERROR does not implement any method in scope named `test_mut` } diff --git a/src/test/compile-fail/drop-on-non-struct.rs b/src/test/compile-fail/drop-on-non-struct.rs index ff901f986e68d..0d01fe4e8c732 100644 --- a/src/test/compile-fail/drop-on-non-struct.rs +++ b/src/test/compile-fail/drop-on-non-struct.rs @@ -10,7 +10,7 @@ #[feature(managed_boxes)]; -type Foo = @[u8]; +type Foo = ~[u8]; impl Drop for Foo { //~ ERROR the Drop trait may only be implemented //~^ ERROR cannot provide an extension implementation diff --git a/src/test/compile-fail/estr-subtyping.rs b/src/test/compile-fail/estr-subtyping.rs index 7dc99074f72f1..d99d29fb81080 100644 --- a/src/test/compile-fail/estr-subtyping.rs +++ b/src/test/compile-fail/estr-subtyping.rs @@ -8,26 +8,15 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -#[feature(managed_boxes)]; - -fn wants_box(x: @str) { } fn wants_uniq(x: ~str) { } fn wants_slice(x: &str) { } -fn has_box(x: @str) { - wants_box(x); - wants_uniq(x); //~ ERROR str storage differs: expected `~` but found `@` - wants_slice(x); -} - fn has_uniq(x: ~str) { - wants_box(x); //~ ERROR str storage differs: expected `@` but found `~` wants_uniq(x); wants_slice(x); } fn has_slice(x: &str) { - wants_box(x); //~ ERROR str storage differs: expected `@` but found `&` wants_uniq(x); //~ ERROR str storage differs: expected `~` but found `&` wants_slice(x); } diff --git a/src/test/compile-fail/evec-subtyping.rs b/src/test/compile-fail/evec-subtyping.rs index da324e1dc202e..9a0227b7d31a2 100644 --- a/src/test/compile-fail/evec-subtyping.rs +++ b/src/test/compile-fail/evec-subtyping.rs @@ -10,30 +10,20 @@ #[feature(managed_boxes)]; -fn wants_box(x: @[uint]) { } fn wants_uniq(x: ~[uint]) { } fn wants_three(x: [uint, ..3]) { } -fn has_box(x: @[uint]) { - wants_box(x); - wants_uniq(x); //~ ERROR [] storage differs: expected `~` but found `@` - wants_three(x); //~ ERROR [] storage differs: expected `3` but found `@` -} - fn has_uniq(x: ~[uint]) { - wants_box(x); //~ ERROR [] storage differs: expected `@` but found `~` wants_uniq(x); wants_three(x); //~ ERROR [] storage differs: expected `3` but found `~` } fn has_three(x: [uint, ..3]) { - wants_box(x); //~ ERROR [] storage differs: expected `@` but found `3` wants_uniq(x); //~ ERROR [] storage differs: expected `~` but found `3` wants_three(x); } fn has_four(x: [uint, ..4]) { - wants_box(x); //~ ERROR [] storage differs: expected `@` but found `4` wants_uniq(x); //~ ERROR [] storage differs: expected `~` but found `4` wants_three(x); //~ ERROR [] storage differs: expected `3` but found `4` } diff --git a/src/test/compile-fail/issue-10487.rs b/src/test/compile-fail/issue-10487.rs index 302e883942e6c..3fc6106f7480b 100644 --- a/src/test/compile-fail/issue-10487.rs +++ b/src/test/compile-fail/issue-10487.rs @@ -11,6 +11,5 @@ #[feature(managed_boxes)]; static x: ~[int] = ~[123, 456]; //~ ERROR: cannot allocate vectors in constant expressions -static y: @[int] = @[123, 456]; //~ ERROR: cannot allocate vectors in constant expressions fn main() {} diff --git a/src/test/compile-fail/lint-heap-memory.rs b/src/test/compile-fail/lint-heap-memory.rs index c02da1beeb732..8899f3f5dbbcf 100644 --- a/src/test/compile-fail/lint-heap-memory.rs +++ b/src/test/compile-fail/lint-heap-memory.rs @@ -22,11 +22,8 @@ fn main() { let _x : Bar = Bar {x : ~10}; //~ ERROR type uses owned @2; //~ ERROR type uses managed - @[1]; //~ ERROR type uses managed - //~^ ERROR type uses managed + fn f(_: @Clone) {} //~ ERROR type uses managed - @""; //~ ERROR type uses managed - //~^ ERROR type uses managed ~2; //~ ERROR type uses owned ~[1]; //~ ERROR type uses owned diff --git a/src/test/compile-fail/moves-based-on-type-exprs.rs b/src/test/compile-fail/moves-based-on-type-exprs.rs index 34e506ed015c8..4e2391ea25ff2 100644 --- a/src/test/compile-fail/moves-based-on-type-exprs.rs +++ b/src/test/compile-fail/moves-based-on-type-exprs.rs @@ -72,12 +72,6 @@ fn f80() { touch(&x); //~ ERROR use of moved value: `x` } -fn f90() { - let x = ~"hi"; - let _y = @[x]; - touch(&x); //~ ERROR use of moved value: `x` -} - fn f100() { let x = ~[~"hi"]; let _y = x[0]; diff --git a/src/test/debug-info/boxed-vec.rs b/src/test/debug-info/boxed-vec.rs index d7756c00dabbd..2c0edb783f830 100644 --- a/src/test/debug-info/boxed-vec.rs +++ b/src/test/debug-info/boxed-vec.rs @@ -17,21 +17,15 @@ // debugger:run // debugger:finish -// debugger:print managed->val.fill -// check:$1 = 24 -// debugger:print *((uint64_t[3]*)(managed->val.elements)) -// check:$2 = {7, 8, 9} - // debugger:print unique->fill -// check:$3 = 32 +// check:$1 = 32 // debugger:print *((uint64_t[4]*)(unique->elements)) -// check:$4 = {10, 11, 12, 13} +// check:$2 = {10, 11, 12, 13} #[allow(unused_variable)]; fn main() { - let managed: @[i64] = @[7, 8, 9]; let unique: ~[i64] = ~[10, 11, 12, 13]; zzz(); diff --git a/src/test/run-pass/auto-encode.rs b/src/test/run-pass/auto-encode.rs index 5f697b7e51440..f3af7d652cde4 100644 --- a/src/test/run-pass/auto-encode.rs +++ b/src/test/run-pass/auto-encode.rs @@ -144,9 +144,6 @@ pub fn main() { let a = &Point {x: 3u, y: 5u}; test_ebml(a); - let a = &@[1u, 2u, 3u]; - test_ebml(a); - let a = &Top(22u); test_ebml(a); diff --git a/src/test/run-pass/auto-ref-slice-plus-ref.rs b/src/test/run-pass/auto-ref-slice-plus-ref.rs index eae791b6b08a6..c22e25e5d95be 100644 --- a/src/test/run-pass/auto-ref-slice-plus-ref.rs +++ b/src/test/run-pass/auto-ref-slice-plus-ref.rs @@ -30,11 +30,9 @@ pub fn main() { ([1]).test_imm(); (~[1]).test_imm(); - (@[1]).test_imm(); (&[1]).test_imm(); ("test").test_imm(); (~"test").test_imm(); - (@"test").test_imm(); (&"test").test_imm(); // FIXME: Other types of mutable vecs don't currently exist diff --git a/src/test/run-pass/borrowck-borrow-from-at-vec.rs b/src/test/run-pass/borrowck-borrow-from-at-vec.rs deleted file mode 100644 index 5ae959ef16999..0000000000000 --- a/src/test/run-pass/borrowck-borrow-from-at-vec.rs +++ /dev/null @@ -1,22 +0,0 @@ -// Copyright 2012 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -#[feature(managed_boxes)]; - -fn sum_slice(x: &[int]) -> int { - let mut sum = 0; - for i in x.iter() { sum += *i; } - return sum; -} - -pub fn main() { - let x = @[1, 2, 3]; - assert_eq!(sum_slice(x), 6); -} diff --git a/src/test/run-pass/borrowed-ptr-pattern-infallible.rs b/src/test/run-pass/borrowed-ptr-pattern-infallible.rs index 77484b8da4a7d..07a13e5395fc3 100644 --- a/src/test/run-pass/borrowed-ptr-pattern-infallible.rs +++ b/src/test/run-pass/borrowed-ptr-pattern-infallible.rs @@ -11,8 +11,7 @@ #[feature(managed_boxes)]; pub fn main() { - let (&x, &y, &z) = (&3, &'a', &@"No pets!"); + let (&x, &y) = (&3, &'a'); assert_eq!(x, 3); assert_eq!(y, 'a'); - assert_eq!(z, @"No pets!"); } diff --git a/src/test/run-pass/borrowed-ptr-pattern.rs b/src/test/run-pass/borrowed-ptr-pattern.rs index 11751ed6ade3b..7ccb40c8e7b37 100644 --- a/src/test/run-pass/borrowed-ptr-pattern.rs +++ b/src/test/run-pass/borrowed-ptr-pattern.rs @@ -17,5 +17,4 @@ fn foo(x: &T) -> T{ pub fn main() { assert_eq!(foo(&3), 3); assert_eq!(foo(&'a'), 'a'); - assert_eq!(foo(&@"Dogs rule, cats drool"), @"Dogs rule, cats drool"); } diff --git a/src/test/run-pass/estr-shared.rs b/src/test/run-pass/estr-shared.rs deleted file mode 100644 index 73837a46df707..0000000000000 --- a/src/test/run-pass/estr-shared.rs +++ /dev/null @@ -1,15 +0,0 @@ -// Copyright 2012 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -#[feature(managed_boxes)]; - -pub fn main() { - let _x : @str = @"hello"; -} diff --git a/src/test/run-pass/expr-repeat-vstore.rs b/src/test/run-pass/expr-repeat-vstore.rs index 28fd5dbfa8dec..c841297e19f2f 100644 --- a/src/test/run-pass/expr-repeat-vstore.rs +++ b/src/test/run-pass/expr-repeat-vstore.rs @@ -7,10 +7,4 @@ pub fn main() { println!("{}", v[2]); println!("{}", v[3]); println!("{}", v[4]); - let v: @[int] = @[ 2, ..5 ]; - println!("{}", v[0]); - println!("{}", v[1]); - println!("{}", v[2]); - println!("{}", v[3]); - println!("{}", v[4]); } diff --git a/src/test/run-pass/ifmt.rs b/src/test/run-pass/ifmt.rs index 610cba1eb1fa6..cc59ce5d8b245 100644 --- a/src/test/run-pass/ifmt.rs +++ b/src/test/run-pass/ifmt.rs @@ -58,7 +58,6 @@ pub fn main() { t!(format!("{}", 1.0f64), "1"); t!(format!("{}", "a"), "a"); t!(format!("{}", ~"a"), "a"); - t!(format!("{}", @"a"), "a"); t!(format!("{}", false), "false"); t!(format!("{}", 'a'), "a"); @@ -73,7 +72,6 @@ pub fn main() { t!(format!("{:X}", 10u), "A"); t!(format!("{:s}", "foo"), "foo"); t!(format!("{:s}", ~"foo"), "foo"); - t!(format!("{:s}", @"foo"), "foo"); t!(format!("{:p}", 0x1234 as *int), "0x1234"); t!(format!("{:p}", 0x1234 as *mut int), "0x1234"); t!(format!("{:d}", A), "aloha"); diff --git a/src/test/run-pass/issue-3574.rs b/src/test/run-pass/issue-3574.rs index eb59b3e12b610..ace27c5ea11f6 100644 --- a/src/test/run-pass/issue-3574.rs +++ b/src/test/run-pass/issue-3574.rs @@ -26,5 +26,4 @@ pub fn main() { assert!(compare("foo", "foo")); assert!(compare(~"foo", ~"foo")); - assert!(compare(@"foo", @"foo")); } diff --git a/src/test/run-pass/issue-4092.rs b/src/test/run-pass/issue-4092.rs deleted file mode 100644 index 62174a70d07fe..0000000000000 --- a/src/test/run-pass/issue-4092.rs +++ /dev/null @@ -1,18 +0,0 @@ -// Copyright 2012 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -#[feature(managed_boxes)]; - -use std::hashmap::HashMap; - -pub fn main() { - let mut x = HashMap::new(); - x.insert((@"abc", 0), 0); -} diff --git a/src/test/run-pass/issue-5926.rs b/src/test/run-pass/issue-5926.rs deleted file mode 100644 index ffb7a0a5bb30e..0000000000000 --- a/src/test/run-pass/issue-5926.rs +++ /dev/null @@ -1,20 +0,0 @@ -// Copyright 2013 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -#[feature(managed_boxes)]; -#[allow(unused_mut)]; - -pub fn main() { - let mut your_favorite_numbers = @[1,2,3]; - let mut my_favorite_numbers = @[4,5,6]; - let f = your_favorite_numbers + my_favorite_numbers; - println!("The third favorite number is {:?}.", f) -} - diff --git a/src/test/run-pass/issue-9382.rs b/src/test/run-pass/issue-9382.rs index f6bbd8ebef86a..c5123f2311625 100644 --- a/src/test/run-pass/issue-9382.rs +++ b/src/test/run-pass/issue-9382.rs @@ -35,10 +35,6 @@ pub fn main() { baz: ~[], bar: ~32, }; - let _t1_at = Thing1 { - baz: @[], - bar: ~32, - }; let _t2_fixed = Thing2 { baz: &[], bar: 32, @@ -47,8 +43,4 @@ pub fn main() { baz: ~[], bar: 32, }; - let _t2_at = Thing2 { - baz: @[], - bar: 32, - }; } diff --git a/src/test/run-pass/match-borrowed_str.rs b/src/test/run-pass/match-borrowed_str.rs index acff2de548eed..b0f31f70f53c0 100644 --- a/src/test/run-pass/match-borrowed_str.rs +++ b/src/test/run-pass/match-borrowed_str.rs @@ -43,19 +43,15 @@ fn g2(ref_1: &str, ref_2: &str) -> ~str { } pub fn main() { - assert_eq!(f1(@"a"), ~"found a"); assert_eq!(f1(~"b"), ~"found b"); assert_eq!(f1(&"c"), ~"not found"); assert_eq!(f1("d"), ~"not found"); - assert_eq!(f2(@"a"), ~"found a"); assert_eq!(f2(~"b"), ~"found b"); assert_eq!(f2(&"c"), ~"not found (c)"); assert_eq!(f2("d"), ~"not found (d)"); - assert_eq!(g1(@"a", @"b"), ~"found a,b"); assert_eq!(g1(~"b", ~"c"), ~"found b,c"); assert_eq!(g1(&"c", &"d"), ~"not found"); assert_eq!(g1("d", "e"), ~"not found"); - assert_eq!(g2(@"a", @"b"), ~"found a,b"); assert_eq!(g2(~"b", ~"c"), ~"found b,c"); assert_eq!(g2(&"c", &"d"), ~"not found (c, d)"); assert_eq!(g2("d", "e"), ~"not found (d, e)"); diff --git a/src/test/run-pass/nullable-pointer-iotareduction.rs b/src/test/run-pass/nullable-pointer-iotareduction.rs index acb7fe12360e1..7d8d5d635f904 100644 --- a/src/test/run-pass/nullable-pointer-iotareduction.rs +++ b/src/test/run-pass/nullable-pointer-iotareduction.rs @@ -79,10 +79,7 @@ pub fn main() { check_type!(~18: ~int); check_type!(@19: @int); check_type!(~"foo": ~str); - check_type!(@"bar": @str); check_type!(~[20, 22]: ~[int]); - check_type!(@[]: @[int]); - check_type!(@[24, 26]: @[int]); let mint: uint = unsafe { cast::transmute(main) }; check_type!(main: extern fn(), |pthing| { assert!(mint == unsafe { cast::transmute(*pthing) }) diff --git a/src/test/run-pass/nullable-pointer-size.rs b/src/test/run-pass/nullable-pointer-size.rs index 228b91a4532cd..84a6baa5de8a9 100644 --- a/src/test/run-pass/nullable-pointer-size.rs +++ b/src/test/run-pass/nullable-pointer-size.rs @@ -41,8 +41,6 @@ pub fn main() { check_type!(~int); check_type!(@int); check_type!(~str); - check_type!(@str); check_type!(~[int]); - check_type!(@[int]); check_type!(extern fn()); } diff --git a/src/test/run-pass/packed-struct-generic-size.rs b/src/test/run-pass/packed-struct-generic-size.rs index cba923ef646f8..0b6ab579e6b73 100644 --- a/src/test/run-pass/packed-struct-generic-size.rs +++ b/src/test/run-pass/packed-struct-generic-size.rs @@ -22,6 +22,6 @@ pub fn main() { assert_eq!(mem::size_of::>(), 11); - assert_eq!(mem::size_of::>(), - 1 + mem::size_of::<~str>() + mem::size_of::<@[int]>()); + assert_eq!(mem::size_of::>(), + 1 + mem::size_of::<~str>() + mem::size_of::<~[int]>()); } diff --git a/src/test/run-pass/reflect-visit-data.rs b/src/test/run-pass/reflect-visit-data.rs index 6dec5fdaa1cba..6a817bf03d401 100644 --- a/src/test/run-pass/reflect-visit-data.rs +++ b/src/test/run-pass/reflect-visit-data.rs @@ -180,9 +180,6 @@ impl TyVisitor for ptr_visit_adaptor { } fn visit_estr_box(&mut self) -> bool { - self.align_to::<@str>(); - if ! self.inner().visit_estr_box() { return false; } - self.bump_past::<@str>(); true } @@ -255,9 +252,6 @@ impl TyVisitor for ptr_visit_adaptor { } fn visit_evec_box(&mut self, mtbl: uint, inner: *TyDesc) -> bool { - self.align_to::<@[u8]>(); - if ! self.inner().visit_evec_box(mtbl, inner) { return false; } - self.bump_past::<@[u8]>(); true } diff --git a/src/test/run-pass/regions-borrow-evec-at.rs b/src/test/run-pass/regions-borrow-evec-at.rs deleted file mode 100644 index 3c0fcba2064fb..0000000000000 --- a/src/test/run-pass/regions-borrow-evec-at.rs +++ /dev/null @@ -1,21 +0,0 @@ -// Copyright 2012 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -#[feature(managed_boxes)]; - -fn foo(x: &[uint]) -> uint { - x[0] -} - -pub fn main() { - let p = @[22u]; - let r = foo(p); - assert_eq!(r, 22u); -} diff --git a/src/test/run-pass/repeated-vector-syntax.rs b/src/test/run-pass/repeated-vector-syntax.rs index 03497de0d554f..9f2069908437b 100644 --- a/src/test/run-pass/repeated-vector-syntax.rs +++ b/src/test/run-pass/repeated-vector-syntax.rs @@ -16,7 +16,7 @@ struct Foo { } pub fn main() { - let x = [ @[true], ..512 ]; + let x = [ [true], ..512 ]; let y = [ 0, ..1 ]; error!("{:?}", x); diff --git a/src/test/run-pass/send_str_hashmap.rs b/src/test/run-pass/send_str_hashmap.rs index 1e3bd5897a9d5..dc7e51c3c2364 100644 --- a/src/test/run-pass/send_str_hashmap.rs +++ b/src/test/run-pass/send_str_hashmap.rs @@ -63,11 +63,6 @@ pub fn main() { assert_eq!(map.find_equiv(&(~"cde")), Some(&c)); assert_eq!(map.find_equiv(&(~"def")), Some(&d)); - assert_eq!(map.find_equiv(&(@"abc")), Some(&a)); - assert_eq!(map.find_equiv(&(@"bcd")), Some(&b)); - assert_eq!(map.find_equiv(&(@"cde")), Some(&c)); - assert_eq!(map.find_equiv(&(@"def")), Some(&d)); - assert_eq!(map.find_equiv(&SendStrStatic("abc")), Some(&a)); assert_eq!(map.find_equiv(&SendStrStatic("bcd")), Some(&b)); assert_eq!(map.find_equiv(&SendStrStatic("cde")), Some(&c)); diff --git a/src/test/run-pass/vec-matching-autoslice.rs b/src/test/run-pass/vec-matching-autoslice.rs index cd9d9603ffb1e..68d2ce364631c 100644 --- a/src/test/run-pass/vec-matching-autoslice.rs +++ b/src/test/run-pass/vec-matching-autoslice.rs @@ -1,7 +1,5 @@ -#[feature(managed_boxes)]; - pub fn main() { - let x = @[1, 2, 3]; + let x = ~[1, 2, 3]; match x { [2, ..] => fail!(), [1, ..tail] => { diff --git a/src/test/run-pass/vec-to_str.rs b/src/test/run-pass/vec-to_str.rs index 16a895f723162..e25b4de0a11c9 100644 --- a/src/test/run-pass/vec-to_str.rs +++ b/src/test/run-pass/vec-to_str.rs @@ -8,19 +8,13 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -#[feature(managed_boxes)]; - pub fn main() { assert_eq!((~[0, 1]).to_str(), ~"[0, 1]"); assert_eq!((&[1, 2]).to_str(), ~"[1, 2]"); - assert_eq!((@[2, 3]).to_str(), ~"[2, 3]"); let foo = ~[3, 4]; let bar = &[4, 5]; - let baz = @[5, 6]; assert_eq!(foo.to_str(), ~"[3, 4]"); assert_eq!(bar.to_str(), ~"[4, 5]"); - assert_eq!(baz.to_str(), ~"[5, 6]"); - }