diff --git a/AUTHORS.txt b/AUTHORS.txt index 08a3ec77807c8..90690a0a3eaf3 100644 --- a/AUTHORS.txt +++ b/AUTHORS.txt @@ -72,7 +72,9 @@ Kevin Cantu Lennart Kudling Lindsey Kuper Luca Bruno +Luqman Aden Magnus Auvinen +Mahmut Bulut Margaret Meyerhofer Marijn Haverbeke Matt Brubeck diff --git a/Makefile.in b/Makefile.in index 383793be6f894..fb674878f556f 100644 --- a/Makefile.in +++ b/Makefile.in @@ -144,8 +144,9 @@ ifneq ($(wildcard $(CFG_GIT_DIR)),) endif endif -ifdef CFG_DISABLE_VALGRIND - $(info cfg: disabling valgrind (CFG_DISABLE_VALGRIND)) +ifdef CFG_ENABLE_VALGRIND + $(info cfg: enabling valgrind (CFG_ENABLE_VALGRIND)) +else CFG_VALGRIND := endif ifdef CFG_BAD_VALGRIND diff --git a/configure b/configure index e67ea3af5f382..9da652762a741 100755 --- a/configure +++ b/configure @@ -187,6 +187,7 @@ need_cmd cmp need_cmd mkdir need_cmd printf need_cmd cut +need_cmd head need_cmd grep need_cmd xargs need_cmd cp @@ -257,6 +258,16 @@ case $CFG_CPUTYPE in err "unknown CPU type: $CFG_CPUTYPE" esac +# Detect 64 bit linux systems with 32 bit userland and force 32 bit compilation +if [ $CFG_OSTYPE = unknown-linux-gnu -a $CFG_CPUTYPE = x86_64 ] +then + file -L "$SHELL" | grep -q "x86[_-]64" + if [ $? != 0 ]; then + CFG_CPUTYPE=i686 + fi +fi + + DEFAULT_HOST_TRIPLE="${CFG_CPUTYPE}-${CFG_OSTYPE}" CFG_SRC_DIR="$(cd $(dirname $0) && pwd)/" @@ -283,7 +294,7 @@ else fi opt sharedstd 1 "build libstd as a shared library" -opt valgrind 1 "run tests with valgrind (memcheck by default)" +opt valgrind 0 "run tests with valgrind (memcheck by default)" opt helgrind 0 "run tests with helgrind instead of memcheck" opt docs 1 "build documentation" opt optimize 1 "build optimized rust code" @@ -352,11 +363,16 @@ fi if [ ! -z "$CFG_PANDOC" ] then - PV=$(pandoc --version | awk '/^pandoc/ {print $2}') - if [ "$PV" \< "1.8" ] + PANDOC_VER_LINE=$(pandoc --version | grep '^pandoc ') + PANDOC_VER=${PANDOC_VER_LINE#pandoc } + PV_MAJOR_MINOR=${PANDOC_VER%.[0-9]*} + PV_MAJOR=${PV_MAJOR_MINOR%%[.][0-9]*} + PV_MINOR=${PV_MAJOR_MINOR#[0-9]*[.]} + PV_MINOR=${PV_MINOR%%[.][0-9]*} + if [ "$PV_MAJOR" -lt "1" ] || [ "$PV_MINOR" -lt "8" ] then - step_msg "pandoc $PV is too old. disabling" - BAD_PANDOC=1 + step_msg "pandoc $PV_MAJOR.$PV_MINOR is too old. disabling" + BAD_PANDOC=1 fi fi diff --git a/doc/tutorial-macros.md b/doc/tutorial-macros.md index 995944b2fbd89..c7e1ada648e9b 100644 --- a/doc/tutorial-macros.md +++ b/doc/tutorial-macros.md @@ -155,17 +155,22 @@ If it is under more, it'll be repeated, as appropriate. ## Parsing limitations -The macro parser will parse Rust syntax with two limitations: -1. The parser will always parse as much as possible. For example, if the comma -were omitted from the syntax of `early_return!` above, `input_1 [` would've -been interpreted as the beginning of an array index. In fact, invoking the -macro would have been impossible. -2. The parser must have eliminated all ambiguity by the time it reaches a +For technical reasons, there are two limitations to the treatment of syntax +fragments by the macro parser: + +1. The parser will always parse as much as possible of a Rust syntactic +fragment. For example, if the comma were omitted from the syntax of +`early_return!` above, `input_1 [` would've been interpreted as the beginning +of an array index. In fact, invoking the macro would have been impossible. +2. The parser must have eliminated all ambiguity by the time it reaches a `$name:fragment_specifier` declaration. This limitation can result in parse errors when declarations occur at the beginning of, or immediately after, -a `$(...)*`. Changing the invocation syntax to require a distinctive -token in front can solve the problem. +a `$(...)*`. For example, the grammar `$($t:ty)* $e:expr` will always fail to +parse because the parser would be forced to choose between parsing `t` and +parsing `e`. Changing the invocation syntax to require a distinctive token in +front can solve the problem. In the above example, `$(T $t:ty)* E $e:exp` +solves the problem. ## A final note diff --git a/doc/tutorial.md b/doc/tutorial.md index 8746cf026f9ec..02927c4ddd1b5 100644 --- a/doc/tutorial.md +++ b/doc/tutorial.md @@ -1502,9 +1502,9 @@ and [`core::str`]. Here are some examples. # fn unwrap_crayon(c: Crayon) -> int { 0 } # fn eat_crayon_wax(i: int) { } # fn store_crayon_in_nasal_cavity(i: uint, c: Crayon) { } -# fn crayon_to_str(c: Crayon) -> ~str { ~"" } +# fn crayon_to_str(c: Crayon) -> &str { "" } -let crayons = &[Almond, AntiqueBrass, Apricot]; +let crayons = [Almond, AntiqueBrass, Apricot]; // Check the length of the vector assert crayons.len() == 3; @@ -1569,7 +1569,7 @@ let bloop = |well, oh: mygoodness| -> what_the { fail oh(well) }; ~~~~ There are several forms of closure, each with its own role. The most -common, called a _stack closure_, has type `fn&` and can directly +common, called a _stack closure_, has type `&fn` and can directly access local variables in the enclosing scope. ~~~~ @@ -1591,7 +1591,7 @@ pervasively in Rust code. When you need to store a closure in a data structure, a stack closure will not do, since the compiler will refuse to let you store it. For this purpose, Rust provides a type of closure that has an arbitrary -lifetime, written `fn@` (boxed closure, analogous to the `@` pointer +lifetime, written `@fn` (boxed closure, analogous to the `@` pointer type described earlier). This type of closure *is* first-class. A managed closure does not directly access its environment, but merely @@ -1604,8 +1604,9 @@ returns it from a function, and then calls it: ~~~~ # extern mod std; -fn mk_appender(suffix: ~str) -> fn@(~str) -> ~str { - return fn@(s: ~str) -> ~str { s + suffix }; +fn mk_appender(suffix: ~str) -> @fn(~str) -> ~str { + // The compiler knows that we intend this closure to be of type @fn + return |s| s + suffix; } fn main() { @@ -1614,22 +1615,9 @@ fn main() { } ~~~~ -This example uses the long closure syntax, `fn@(s: ~str) ...`. Using -this syntax makes it explicit that we are declaring a boxed -closure. In practice, boxed closures are usually defined with the -short closure syntax introduced earlier, in which case the compiler -infers the type of closure. Thus our managed closure example could -also be written: - -~~~~ -fn mk_appender(suffix: ~str) -> fn@(~str) -> ~str { - return |s| s + suffix; -} -~~~~ - ## Owned closures -Owned closures, written `fn~` in analogy to the `~` pointer type, +Owned closures, written `~fn` in analogy to the `~` pointer type, hold on to things that can safely be sent between processes. They copy the values they close over, much like managed closures, but they also own them: that is, no other code can access @@ -1649,12 +1637,10 @@ callers may pass any kind of closure. ~~~~ fn call_twice(f: fn()) { f(); f(); } -call_twice(|| { ~"I am an inferred stack closure"; } ); -call_twice(fn&() { ~"I am also a stack closure"; } ); -call_twice(fn@() { ~"I am a managed closure"; }); -call_twice(fn~() { ~"I am an owned closure"; }); -fn bare_function() { ~"I am a plain function"; } -call_twice(bare_function); +let closure = || { "I'm a closure, and it doesn't matter what type I am"; }; +fn function() { "I'm a normal function"; } +call_twice(closure); +call_twice(function); ~~~~ > ***Note:*** Both the syntax and the semantics will be changing @@ -1693,7 +1679,7 @@ structure. ~~~~ # fn each(v: &[int], op: fn(v: &int)) { } # fn do_some_work(i: &int) { } -each(&[1, 2, 3], |n| { +each([1, 2, 3], |n| { do_some_work(n); }); ~~~~ @@ -1704,7 +1690,7 @@ call that can be written more like a built-in control structure: ~~~~ # fn each(v: &[int], op: fn(v: &int)) { } # fn do_some_work(i: &int) { } -do each(&[1, 2, 3]) |n| { +do each([1, 2, 3]) |n| { do_some_work(n); } ~~~~ @@ -1715,7 +1701,7 @@ parentheses, where it looks more like a typical block of code. `do` is a convenient way to create tasks with the `task::spawn` -function. `spawn` has the signature `spawn(fn: fn~())`. In other +function. `spawn` has the signature `spawn(fn: ~fn())`. In other words, it is a function that takes an owned closure that takes no arguments. @@ -1765,9 +1751,9 @@ And using this function to iterate over a vector: ~~~~ # use each = vec::each; # use println = io::println; -each(&[2, 4, 8, 5, 16], |n| { +each([2, 4, 8, 5, 16], |n| { if *n % 2 != 0 { - println(~"found odd number!"); + println("found odd number!"); false } else { true } }); @@ -1782,9 +1768,9 @@ to the next iteration, write `loop`. ~~~~ # use each = vec::each; # use println = io::println; -for each(&[2, 4, 8, 5, 16]) |n| { +for each([2, 4, 8, 5, 16]) |n| { if *n % 2 != 0 { - println(~"found odd number!"); + println("found odd number!"); break; } } @@ -1967,12 +1953,12 @@ impl int: Printable { fn print() { io::println(fmt!("%d", self)) } } -impl ~str: Printable { +impl &str: Printable { fn print() { io::println(self) } } # 1.print(); -# (~"foo").print(); +# ("foo").print(); ~~~~ Methods defined in an implementation of a trait may be called just like @@ -2120,7 +2106,7 @@ impl @Rectangle: Drawable { fn draw() { ... } } let c: @Circle = @new_circle(); let r: @Rectangle = @new_rectangle(); -draw_all(&[c as @Drawable, r as @Drawable]); +draw_all([c as @Drawable, r as @Drawable]); ~~~~ We omit the code for `new_circle` and `new_rectangle`; imagine that @@ -2162,8 +2148,8 @@ additional modules. ~~~~ mod farm { - pub fn chicken() -> ~str { ~"cluck cluck" } - pub fn cow() -> ~str { ~"mooo" } + pub fn chicken() -> &str { "cluck cluck" } + pub fn cow() -> &str { "mooo" } } fn main() { @@ -2360,13 +2346,13 @@ these two files: ~~~~ // world.rs #[link(name = "world", vers = "1.0")]; -fn explore() -> ~str { ~"world" } +pub fn explore() -> &str { "world" } ~~~~ ~~~~ {.xfail-test} // main.rs extern mod world; -fn main() { io::println(~"hello " + world::explore()); } +fn main() { io::println("hello " + world::explore()); } ~~~~ Now compile and run like this (adjust to your platform if necessary): diff --git a/src/cargo/cargo.rs b/src/cargo/cargo.rs index 5f39eb6b960e2..a06eb19910be9 100644 --- a/src/cargo/cargo.rs +++ b/src/cargo/cargo.rs @@ -1162,20 +1162,20 @@ fn sync_one_file(c: &Cargo, dir: &Path, src: @Source) -> bool { } match (src.key, src.keyfp) { (Some(_), Some(f)) => { - let r = pgp::verify(&c.root, &pkgfile, &sigfile, f); + let r = pgp::verify(&c.root, &pkgfile, &sigfile); if !r { - error(fmt!("signature verification failed for source %s", - name)); + error(fmt!("signature verification failed for source %s with \ + key %s", name, f)); return false; } if has_src_file { - let e = pgp::verify(&c.root, &srcfile, &srcsigfile, f); + let e = pgp::verify(&c.root, &srcfile, &srcsigfile); if !e { - error(fmt!("signature verification failed for source %s", - name)); + error(fmt!("signature verification failed for source %s \ + with key %s", name, f)); return false; } } @@ -1273,21 +1273,21 @@ fn sync_one_git(c: &Cargo, dir: &Path, src: @Source) -> bool { } match (src.key, src.keyfp) { (Some(_), Some(f)) => { - let r = pgp::verify(&c.root, &pkgfile, &sigfile, f); + let r = pgp::verify(&c.root, &pkgfile, &sigfile); if !r { - error(fmt!("signature verification failed for source %s", - name)); + error(fmt!("signature verification failed for source %s with \ + key %s", name, f)); rollback(name, dir, false); return false; } if has_src_file { - let e = pgp::verify(&c.root, &srcfile, &srcsigfile, f); + let e = pgp::verify(&c.root, &srcfile, &srcsigfile); if !e { - error(fmt!("signature verification failed for source %s", - name)); + error(fmt!("signature verification failed for source %s \ + with key %s", name, f)); rollback(name, dir, false); return false; } @@ -1370,11 +1370,11 @@ fn sync_one_curl(c: &Cargo, dir: &Path, src: @Source) -> bool { return false; } - let r = pgp::verify(&c.root, &pkgfile, &sigfile, f); + let r = pgp::verify(&c.root, &pkgfile, &sigfile); if !r { - error(fmt!("signature verification failed for source %s", - name)); + error(fmt!("signature verification failed for source %s with \ + key %s", name, f)); return false; } @@ -1390,11 +1390,11 @@ fn sync_one_curl(c: &Cargo, dir: &Path, src: @Source) -> bool { return false; } - let e = pgp::verify(&c.root, &srcfile, &srcsigfile, f); + let e = pgp::verify(&c.root, &srcfile, &srcsigfile); if !e { error(~"signature verification failed for " + - ~"source " + name); + ~"source " + name + ~" with key " + f); return false; } } @@ -1463,8 +1463,7 @@ fn cmd_init(c: &Cargo) { return; } - let r = pgp::verify(&c.root, &srcfile, &sigfile, - pgp::signing_key_fp()); + let r = pgp::verify(&c.root, &srcfile, &sigfile); if !r { error(fmt!("signature verification failed for '%s'", srcfile.to_str())); diff --git a/src/cargo/pgp.rs b/src/cargo/pgp.rs index 17cb8dc648789..5fbfa55838c8e 100644 --- a/src/cargo/pgp.rs +++ b/src/cargo/pgp.rs @@ -1,5 +1,5 @@ -fn gpg(args: ~[~str]) -> { status: int, out: ~str, err: ~str } { - return run::program_output(~"gpg", args); +fn gpgv(args: ~[~str]) -> { status: int, out: ~str, err: ~str } { + return run::program_output(~"gpgv", args); } fn signing_key() -> ~str { @@ -59,7 +59,7 @@ fn signing_key_fp() -> ~str { } fn supported() -> bool { - let r = gpg(~[~"--version"]); + let r = gpgv(~[~"--version"]); r.status == 0 } @@ -88,15 +88,14 @@ fn add(root: &Path, key: &Path) { } } -fn verify(root: &Path, data: &Path, sig: &Path, keyfp: ~str) -> bool { +fn verify(root: &Path, data: &Path, sig: &Path) -> bool { let path = root.push("gpg"); - let p = gpg(~[~"--homedir", path.to_str(), - ~"--with-fingerprint", - ~"--verify", sig.to_str(), - data.to_str()]); - let res = ~"Primary key fingerprint: " + keyfp; - for str::split_char_each(p.err, '\n') |line| { - if line == res { return true; } + let res = gpgv(~[~"--homedir", path.to_str(), + ~"--keyring", ~"pubring.gpg", + ~"--verbose", + sig.to_str(), data.to_str()]); + if res.status != 0 { + return false; } - return false; + return true; } diff --git a/src/fuzzer/fuzzer.rs b/src/fuzzer/fuzzer.rs index 3e31287e3cd19..a4968382cf478 100644 --- a/src/fuzzer/fuzzer.rs +++ b/src/fuzzer/fuzzer.rs @@ -104,7 +104,7 @@ pure fn safe_to_use_expr(e: ast::expr, tm: test_mode) -> bool { } } -fn safe_to_steal_ty(t: @ast::ty, tm: test_mode) -> bool { +fn safe_to_steal_ty(t: @ast::Ty, tm: test_mode) -> bool { // Restrictions happen to be the same. safe_to_replace_ty(t.node, tm) } @@ -119,16 +119,16 @@ fn stash_expr_if(c: fn@(@ast::expr, test_mode)->bool, } else {/* now my indices are wrong :( */ } } -fn stash_ty_if(c: fn@(@ast::ty, test_mode)->bool, - es: @mut ~[ast::ty], - e: @ast::ty, +fn stash_ty_if(c: fn@(@ast::Ty, test_mode)->bool, + es: @mut ~[ast::Ty], + e: @ast::Ty, tm: test_mode) { if c(e, tm) { es.push(*e); } else {/* now my indices are wrong :( */ } } -type stolen_stuff = {exprs: ~[ast::expr], tys: ~[ast::ty]}; +type stolen_stuff = {exprs: ~[ast::expr], tys: ~[ast::Ty]}; fn steal(crate: ast::crate, tm: test_mode) -> stolen_stuff { let exprs = @mut ~[]; @@ -195,7 +195,7 @@ fn replace_expr_in_crate(crate: ast::crate, i: uint, // Replace the |i|th ty (in fold order) of |crate| with |newty|. -fn replace_ty_in_crate(crate: ast::crate, i: uint, newty: ast::ty, +fn replace_ty_in_crate(crate: ast::crate, i: uint, newty: ast::Ty, tm: test_mode) -> ast::crate { let j: @mut uint = @mut 0u; fn fold_ty_rep(j_: @mut uint, i_: uint, newty_: ast::ty_, @@ -225,7 +225,7 @@ fn as_str(f: fn@(+x: io::Writer)) -> ~str { io::with_str_writer(f) } -fn check_variants_of_ast(crate: ast::crate, codemap: codemap::codemap, +fn check_variants_of_ast(crate: ast::crate, codemap: codemap::CodeMap, filename: &Path, cx: context) { let stolen = steal(crate, cx.mode); let extra_exprs = vec::filter(common_exprs(), @@ -239,7 +239,7 @@ fn check_variants_of_ast(crate: ast::crate, codemap: codemap::codemap, fn check_variants_T( crate: ast::crate, - codemap: codemap::codemap, + codemap: codemap::CodeMap, filename: &Path, thing_label: ~str, things: ~[T], @@ -296,7 +296,7 @@ fn check_variants_T( } fn last_part(filename: ~str) -> ~str { - let ix = option::get(&str::rfind_char(filename, '/')); + let ix = option::get(str::rfind_char(filename, '/')); str::slice(filename, ix + 1u, str::len(filename) - 3u) } @@ -444,7 +444,7 @@ fn parse_and_print(code: @~str) -> ~str { fn has_raw_pointers(c: ast::crate) -> bool { let has_rp = @mut false; - fn visit_ty(flag: @mut bool, t: @ast::ty) { + fn visit_ty(flag: @mut bool, t: @ast::Ty) { match t.node { ast::ty_ptr(_) => { *flag = true; } _ => { } diff --git a/src/libcore/cmath.rs b/src/libcore/cmath.rs index b0aeb78afaa83..46ac90413a648 100644 --- a/src/libcore/cmath.rs +++ b/src/libcore/cmath.rs @@ -12,7 +12,7 @@ use libc::c_double; #[link_name = "m"] #[abi = "cdecl"] -pub extern mod c_double { +pub extern mod c_double_utils { // Alpabetically sorted by link_name @@ -87,7 +87,7 @@ pub extern mod c_double { #[link_name = "m"] #[abi = "cdecl"] -pub extern mod c_float { +pub extern mod c_float_utils { // Alpabetically sorted by link_name diff --git a/src/libcore/condition.rs b/src/libcore/condition.rs new file mode 100644 index 0000000000000..77bd88e04d786 --- /dev/null +++ b/src/libcore/condition.rs @@ -0,0 +1,300 @@ +// helper for transmutation, shown below. +type RustClosure = (int,int); + +struct Condition { + key: task::local_data::LocalDataKey> +} + +struct Handler { + handle: RustClosure +} + + +struct ProtectBlock { + cond: &Condition, + inner: RustClosure +} + +struct Guard { + cond: &Condition, + prev: Option<@Handler>, + drop { + match self.prev { + None => (), + Some(p) => + unsafe { + debug!("Guard: popping handler from TLS"); + task::local_data::local_data_set(self.cond.key, p) + } + } + } +} + +struct HandleBlock { + pb: &ProtectBlock, + prev: Option<@Handler>, + handler: @Handler, + drop { + unsafe { + debug!("HandleBlock: pushing handler to TLS"); + let _g = Guard { cond: self.pb.cond, + prev: self.prev }; + task::local_data::local_data_set(self.pb.cond.key, + self.handler); + // transmutation to avoid copying non-copyable, should + // be fixable by tracking closure pointees in regionck. + let f : &fn() = ::cast::transmute(self.pb.inner); + debug!("HandleBlock: invoking protected code"); + f(); + debug!("HandleBlock: returned from protected code"); + } + } +} + +struct Trap { + cond: &Condition, + handler: @Handler +} + +impl ProtectBlock { + fn handle(&self, h: &self/fn(&T) ->U) -> HandleBlock/&self { + unsafe { + debug!("ProtectBlock.handle: setting up handler block"); + let p : *RustClosure = ::cast::transmute(&h); + let prev = task::local_data::local_data_get(self.cond.key); + HandleBlock { pb: self, + prev: prev, + handler: @Handler{handle: *p} } + } + } +} + + + +impl Trap { + fn in(&self, inner: &self/fn() -> V) -> V { + unsafe { + let prev = task::local_data::local_data_get(self.cond.key); + let _g = Guard { cond: self.cond, + prev: prev }; + debug!("Trap: pushing handler to TLS"); + task::local_data::local_data_set(self.cond.key, self.handler); + inner() + } + } +} + +impl Condition { + + fn guard(&self, h: &self/fn(&T) ->U) -> Guard/&self { + unsafe { + let prev = task::local_data::local_data_get(self.key); + let g = Guard { cond: self, prev: prev }; + debug!("Guard: pushing handler to TLS"); + let p : *RustClosure = ::cast::transmute(&h); + let h = @Handler{handle: *p}; + task::local_data::local_data_set(self.key, h); + move g + } + } + + fn trap(&self, h: &self/fn(&T) ->U) -> Trap/&self { + unsafe { + let p : *RustClosure = ::cast::transmute(&h); + let h = @Handler{handle: *p}; + move Trap { cond: self, handler: h } + } + } + + fn protect(&self, inner: &self/fn()) -> ProtectBlock/&self { + unsafe { + // transmutation to avoid copying non-copyable, should + // be fixable by tracking closure pointees in regionck. + debug!("Condition.protect: setting up protected block"); + let p : *RustClosure = ::cast::transmute(&inner); + ProtectBlock { cond: self, + inner: *p } + } + } + + fn raise(t:&T) -> U { + unsafe { + match task::local_data::local_data_get(self.key) { + None => { + debug!("Condition.raise: found no handler"); + fail + } + + Some(handler) => { + debug!("Condition.raise: found handler"); + let f : &fn(&T) -> U = ::cast::transmute(handler.handle); + f(t) + } + } + } + } +} + + +#[cfg(test)] +fn sadness_key(_x: @Handler) { } + +#[cfg(test)] +fn trouble(i: int) { + // Condition should work as a const, just limitations in consts. + let sadness_condition : Condition = + Condition { key: sadness_key }; + debug!("trouble: raising conition"); + let j = sadness_condition.raise(&i); + debug!("trouble: handler recovered with %d", j); +} + +#[test] +fn test1() { + + let sadness_condition : Condition = + Condition { key: sadness_key }; + + let mut i = 10; + + let b = do sadness_condition.protect { + debug!("test1: in protected block"); + trouble(1); + trouble(2); + trouble(3); + }; + + do b.handle |j| { + debug!("test1: in handler"); + i += *j; + i + }; + + assert i == 16; +} +#[cfg(test)] +fn nested_test_inner() { + let sadness_condition : Condition = + Condition { key: sadness_key }; + + let mut inner_trapped = false; + + let b = do sadness_condition.protect { + debug!("nested_test_inner: in protected block"); + trouble(1); + }; + + do b.handle |_j| { + debug!("nested_test_inner: in handler"); + inner_trapped = true; + 0 + }; + + assert inner_trapped; +} + +#[test] +fn nested_test_outer() { + + let sadness_condition : Condition = + Condition { key: sadness_key }; + + let mut outer_trapped = false; + + let b = do sadness_condition.protect { + debug!("nested_test_outer: in protected block"); + nested_test_inner(); + trouble(1); + }; + + do b.handle |_j| { + debug!("nested_test_outer: in handler"); + outer_trapped = true; + 0 + }; + + assert outer_trapped; +} + + +#[cfg(test)] +fn nested_guard_test_inner() { + let sadness_condition : Condition = + Condition { key: sadness_key }; + + let mut inner_trapped = false; + + let _g = do sadness_condition.guard |_j| { + debug!("nested_guard_test_inner: in handler"); + inner_trapped = true; + 0 + }; + + debug!("nested_guard_test_inner: in protected block"); + trouble(1); + + assert inner_trapped; +} + +#[test] +fn nested_guard_test_outer() { + + let sadness_condition : Condition = + Condition { key: sadness_key }; + + let mut outer_trapped = false; + + let _g = do sadness_condition.guard |_j| { + debug!("nested_guard_test_outer: in handler"); + outer_trapped = true; + 0 + }; + + debug!("nested_guard_test_outer: in protected block"); + nested_guard_test_inner(); + trouble(1); + + assert outer_trapped; +} + + + +#[cfg(test)] +fn nested_trap_test_inner() { + let sadness_condition : Condition = + Condition { key: sadness_key }; + + let mut inner_trapped = false; + + do sadness_condition.trap(|_j| { + debug!("nested_trap_test_inner: in handler"); + inner_trapped = true; + 0 + }).in { + debug!("nested_trap_test_inner: in protected block"); + trouble(1); + } + + assert inner_trapped; +} + +#[test] +fn nested_trap_test_outer() { + + let sadness_condition : Condition = + Condition { key: sadness_key }; + + let mut outer_trapped = false; + + do sadness_condition.trap(|_j| { + debug!("nested_trap_test_outer: in handler"); + outer_trapped = true; 0 + }).in { + debug!("nested_guard_test_outer: in protected block"); + nested_trap_test_inner(); + trouble(1); + } + + + assert outer_trapped; +} diff --git a/src/libcore/core.rc b/src/libcore/core.rc index 484c58750328d..9ebdad3521e53 100644 --- a/src/libcore/core.rc +++ b/src/libcore/core.rc @@ -200,6 +200,7 @@ pub mod flate; pub mod repr; pub mod cleanup; pub mod reflect; +pub mod condition; // Modules supporting compiler-generated code // Exported but not part of the public interface diff --git a/src/libcore/core.rs b/src/libcore/core.rs index 5ef11a4ad4648..35131e561d953 100644 --- a/src/libcore/core.rs +++ b/src/libcore/core.rs @@ -12,7 +12,7 @@ pub use WindowsPath = path::WindowsPath; pub use PosixPath = path::PosixPath; pub use tuple::{CopyableTuple, ImmutableTuple, ExtendedTupleOps}; -pub use str::{StrSlice, UniqueStr}; +pub use str::{StrSlice, Trimmable}; pub use vec::{ConstVector, CopyableVector, ImmutableVector}; pub use vec::{ImmutableEqVector, ImmutableCopyableVector}; pub use vec::{MutableVector, MutableCopyableVector}; diff --git a/src/libcore/dlist.rs b/src/libcore/dlist.rs index 3bcf486ef7e0d..35399878e2614 100644 --- a/src/libcore/dlist.rs +++ b/src/libcore/dlist.rs @@ -208,7 +208,7 @@ impl DList { fn push_head_n(data: T) -> DListNode { let mut nobe = self.new_link(move data); self.add_head(nobe); - option::get(&nobe) + option::get(nobe) } /// Add data to the tail of the list. O(1). fn push(data: T) { @@ -221,7 +221,7 @@ impl DList { fn push_n(data: T) -> DListNode { let mut nobe = self.new_link(move data); self.add_tail(nobe); - option::get(&nobe) + option::get(nobe) } /** * Insert data into the middle of the list, left of the given node. @@ -245,7 +245,7 @@ impl DList { fn insert_before_n(data: T, neighbour: DListNode) -> DListNode { let mut nobe = self.new_link(move data); self.insert_left(nobe, neighbour); - option::get(&nobe) + option::get(nobe) } /** * Insert data into the middle of the list, right of the given node. @@ -269,7 +269,7 @@ impl DList { fn insert_after_n(data: T, neighbour: DListNode) -> DListNode { let mut nobe = self.new_link(move data); self.insert_right(neighbour, nobe); - option::get(&nobe) + option::get(nobe) } /// Remove a node from the head of the list. O(1). @@ -385,17 +385,17 @@ impl DList { let mut link = self.peek_n(); let mut rabbit = link; while option::is_some(&link) { - let nobe = option::get(&link); + let nobe = option::get(link); assert nobe.linked; // check cycle if option::is_some(&rabbit) { - rabbit = option::get(&rabbit).next; + rabbit = option::get(rabbit).next; } if option::is_some(&rabbit) { - rabbit = option::get(&rabbit).next; + rabbit = option::get(rabbit).next; } if option::is_some(&rabbit) { - assert !box::ptr_eq(*option::get(&rabbit), *nobe); + assert !box::ptr_eq(*option::get(rabbit), *nobe); } // advance link = nobe.next_link(); @@ -406,17 +406,17 @@ impl DList { link = self.peek_tail_n(); rabbit = link; while option::is_some(&link) { - let nobe = option::get(&link); + let nobe = option::get(link); assert nobe.linked; // check cycle if option::is_some(&rabbit) { - rabbit = option::get(&rabbit).prev; + rabbit = option::get(rabbit).prev; } if option::is_some(&rabbit) { - rabbit = option::get(&rabbit).prev; + rabbit = option::get(rabbit).prev; } if option::is_some(&rabbit) { - assert !box::ptr_eq(*option::get(&rabbit), *nobe); + assert !box::ptr_eq(*option::get(rabbit), *nobe); } // advance link = nobe.prev_link(); diff --git a/src/libcore/dvec.rs b/src/libcore/dvec.rs index 1540eb30fe5a4..1b6a7522864ef 100644 --- a/src/libcore/dvec.rs +++ b/src/libcore/dvec.rs @@ -56,7 +56,7 @@ pub enum DVec { } /// Creates a new, empty dvec -pub fn DVec() -> DVec { +pub pure fn DVec() -> DVec { DVec_({mut data: ~[]}) } diff --git a/src/libcore/extfmt.rs b/src/libcore/extfmt.rs index 5acb45fdf1a55..d41393c23934c 100644 --- a/src/libcore/extfmt.rs +++ b/src/libcore/extfmt.rs @@ -329,11 +329,11 @@ pub mod rt { // For strings, precision is the maximum characters // displayed let mut unpadded = match cv.precision { - CountImplied => s.to_unique(), + CountImplied => s.to_owned(), CountIs(max) => if max as uint < str::char_len(s) { str::substr(s, 0u, max as uint) } else { - s.to_unique() + s.to_owned() } }; return unsafe { pad(cv, move unpadded, PadNozero) }; diff --git a/src/libcore/f32.rs b/src/libcore/f32.rs index ec0e66734fa39..ed6908d110d6d 100644 --- a/src/libcore/f32.rs +++ b/src/libcore/f32.rs @@ -4,7 +4,7 @@ //! Operations and constants for `f32` -pub use cmath::c_float::*; +pub use cmath::c_float_utils::*; pub use cmath::c_float_targ_consts::*; // These are not defined inside consts:: for consistency with diff --git a/src/libcore/f64.rs b/src/libcore/f64.rs index 731d369649b16..2d13dc86e2fa5 100644 --- a/src/libcore/f64.rs +++ b/src/libcore/f64.rs @@ -4,7 +4,7 @@ //! Operations and constants for `f64` -pub use cmath::c_double::*; +pub use cmath::c_double_utils::*; pub use cmath::c_double_targ_consts::*; // FIXME (#1433): obtain these in a different way @@ -59,7 +59,7 @@ pub pure fn ge(x: f64, y: f64) -> bool { return x >= y; } pub pure fn gt(x: f64, y: f64) -> bool { return x > y; } pub pure fn sqrt(x: f64) -> f64 { - cmath::c_double::sqrt(x as libc::c_double) as f64 + cmath::c_double_utils::sqrt(x as libc::c_double) as f64 } /// Returns true if `x` is a positive number, including +0.0f640 and +Infinity diff --git a/src/libcore/hash.rs b/src/libcore/hash.rs index 1b4996a683d10..e3bbfbc4129de 100644 --- a/src/libcore/hash.rs +++ b/src/libcore/hash.rs @@ -156,7 +156,7 @@ struct SipState { mut v1: u64, mut v2: u64, mut v3: u64, - tail: [mut u8]/8, // unprocessed bytes + tail: [mut u8 * 8], // unprocessed bytes mut ntail: uint, // how many bytes in tail are valid } @@ -359,72 +359,72 @@ impl &SipState : Streaming { #[test] pub fn test_siphash() { - let vecs : [[u8]/8]/64 = [ - [ 0x31, 0x0e, 0x0e, 0xdd, 0x47, 0xdb, 0x6f, 0x72, ]/_, - [ 0xfd, 0x67, 0xdc, 0x93, 0xc5, 0x39, 0xf8, 0x74, ]/_, - [ 0x5a, 0x4f, 0xa9, 0xd9, 0x09, 0x80, 0x6c, 0x0d, ]/_, - [ 0x2d, 0x7e, 0xfb, 0xd7, 0x96, 0x66, 0x67, 0x85, ]/_, - [ 0xb7, 0x87, 0x71, 0x27, 0xe0, 0x94, 0x27, 0xcf, ]/_, - [ 0x8d, 0xa6, 0x99, 0xcd, 0x64, 0x55, 0x76, 0x18, ]/_, - [ 0xce, 0xe3, 0xfe, 0x58, 0x6e, 0x46, 0xc9, 0xcb, ]/_, - [ 0x37, 0xd1, 0x01, 0x8b, 0xf5, 0x00, 0x02, 0xab, ]/_, - [ 0x62, 0x24, 0x93, 0x9a, 0x79, 0xf5, 0xf5, 0x93, ]/_, - [ 0xb0, 0xe4, 0xa9, 0x0b, 0xdf, 0x82, 0x00, 0x9e, ]/_, - [ 0xf3, 0xb9, 0xdd, 0x94, 0xc5, 0xbb, 0x5d, 0x7a, ]/_, - [ 0xa7, 0xad, 0x6b, 0x22, 0x46, 0x2f, 0xb3, 0xf4, ]/_, - [ 0xfb, 0xe5, 0x0e, 0x86, 0xbc, 0x8f, 0x1e, 0x75, ]/_, - [ 0x90, 0x3d, 0x84, 0xc0, 0x27, 0x56, 0xea, 0x14, ]/_, - [ 0xee, 0xf2, 0x7a, 0x8e, 0x90, 0xca, 0x23, 0xf7, ]/_, - [ 0xe5, 0x45, 0xbe, 0x49, 0x61, 0xca, 0x29, 0xa1, ]/_, - [ 0xdb, 0x9b, 0xc2, 0x57, 0x7f, 0xcc, 0x2a, 0x3f, ]/_, - [ 0x94, 0x47, 0xbe, 0x2c, 0xf5, 0xe9, 0x9a, 0x69, ]/_, - [ 0x9c, 0xd3, 0x8d, 0x96, 0xf0, 0xb3, 0xc1, 0x4b, ]/_, - [ 0xbd, 0x61, 0x79, 0xa7, 0x1d, 0xc9, 0x6d, 0xbb, ]/_, - [ 0x98, 0xee, 0xa2, 0x1a, 0xf2, 0x5c, 0xd6, 0xbe, ]/_, - [ 0xc7, 0x67, 0x3b, 0x2e, 0xb0, 0xcb, 0xf2, 0xd0, ]/_, - [ 0x88, 0x3e, 0xa3, 0xe3, 0x95, 0x67, 0x53, 0x93, ]/_, - [ 0xc8, 0xce, 0x5c, 0xcd, 0x8c, 0x03, 0x0c, 0xa8, ]/_, - [ 0x94, 0xaf, 0x49, 0xf6, 0xc6, 0x50, 0xad, 0xb8, ]/_, - [ 0xea, 0xb8, 0x85, 0x8a, 0xde, 0x92, 0xe1, 0xbc, ]/_, - [ 0xf3, 0x15, 0xbb, 0x5b, 0xb8, 0x35, 0xd8, 0x17, ]/_, - [ 0xad, 0xcf, 0x6b, 0x07, 0x63, 0x61, 0x2e, 0x2f, ]/_, - [ 0xa5, 0xc9, 0x1d, 0xa7, 0xac, 0xaa, 0x4d, 0xde, ]/_, - [ 0x71, 0x65, 0x95, 0x87, 0x66, 0x50, 0xa2, 0xa6, ]/_, - [ 0x28, 0xef, 0x49, 0x5c, 0x53, 0xa3, 0x87, 0xad, ]/_, - [ 0x42, 0xc3, 0x41, 0xd8, 0xfa, 0x92, 0xd8, 0x32, ]/_, - [ 0xce, 0x7c, 0xf2, 0x72, 0x2f, 0x51, 0x27, 0x71, ]/_, - [ 0xe3, 0x78, 0x59, 0xf9, 0x46, 0x23, 0xf3, 0xa7, ]/_, - [ 0x38, 0x12, 0x05, 0xbb, 0x1a, 0xb0, 0xe0, 0x12, ]/_, - [ 0xae, 0x97, 0xa1, 0x0f, 0xd4, 0x34, 0xe0, 0x15, ]/_, - [ 0xb4, 0xa3, 0x15, 0x08, 0xbe, 0xff, 0x4d, 0x31, ]/_, - [ 0x81, 0x39, 0x62, 0x29, 0xf0, 0x90, 0x79, 0x02, ]/_, - [ 0x4d, 0x0c, 0xf4, 0x9e, 0xe5, 0xd4, 0xdc, 0xca, ]/_, - [ 0x5c, 0x73, 0x33, 0x6a, 0x76, 0xd8, 0xbf, 0x9a, ]/_, - [ 0xd0, 0xa7, 0x04, 0x53, 0x6b, 0xa9, 0x3e, 0x0e, ]/_, - [ 0x92, 0x59, 0x58, 0xfc, 0xd6, 0x42, 0x0c, 0xad, ]/_, - [ 0xa9, 0x15, 0xc2, 0x9b, 0xc8, 0x06, 0x73, 0x18, ]/_, - [ 0x95, 0x2b, 0x79, 0xf3, 0xbc, 0x0a, 0xa6, 0xd4, ]/_, - [ 0xf2, 0x1d, 0xf2, 0xe4, 0x1d, 0x45, 0x35, 0xf9, ]/_, - [ 0x87, 0x57, 0x75, 0x19, 0x04, 0x8f, 0x53, 0xa9, ]/_, - [ 0x10, 0xa5, 0x6c, 0xf5, 0xdf, 0xcd, 0x9a, 0xdb, ]/_, - [ 0xeb, 0x75, 0x09, 0x5c, 0xcd, 0x98, 0x6c, 0xd0, ]/_, - [ 0x51, 0xa9, 0xcb, 0x9e, 0xcb, 0xa3, 0x12, 0xe6, ]/_, - [ 0x96, 0xaf, 0xad, 0xfc, 0x2c, 0xe6, 0x66, 0xc7, ]/_, - [ 0x72, 0xfe, 0x52, 0x97, 0x5a, 0x43, 0x64, 0xee, ]/_, - [ 0x5a, 0x16, 0x45, 0xb2, 0x76, 0xd5, 0x92, 0xa1, ]/_, - [ 0xb2, 0x74, 0xcb, 0x8e, 0xbf, 0x87, 0x87, 0x0a, ]/_, - [ 0x6f, 0x9b, 0xb4, 0x20, 0x3d, 0xe7, 0xb3, 0x81, ]/_, - [ 0xea, 0xec, 0xb2, 0xa3, 0x0b, 0x22, 0xa8, 0x7f, ]/_, - [ 0x99, 0x24, 0xa4, 0x3c, 0xc1, 0x31, 0x57, 0x24, ]/_, - [ 0xbd, 0x83, 0x8d, 0x3a, 0xaf, 0xbf, 0x8d, 0xb7, ]/_, - [ 0x0b, 0x1a, 0x2a, 0x32, 0x65, 0xd5, 0x1a, 0xea, ]/_, - [ 0x13, 0x50, 0x79, 0xa3, 0x23, 0x1c, 0xe6, 0x60, ]/_, - [ 0x93, 0x2b, 0x28, 0x46, 0xe4, 0xd7, 0x06, 0x66, ]/_, - [ 0xe1, 0x91, 0x5f, 0x5c, 0xb1, 0xec, 0xa4, 0x6c, ]/_, - [ 0xf3, 0x25, 0x96, 0x5c, 0xa1, 0x6d, 0x62, 0x9f, ]/_, - [ 0x57, 0x5f, 0xf2, 0x8e, 0x60, 0x38, 0x1b, 0xe5, ]/_, - [ 0x72, 0x45, 0x06, 0xeb, 0x4c, 0x32, 0x8a, 0x95, ]/_ - ]/_; + let vecs : [[u8 * 8] * 64] = [ + [ 0x31, 0x0e, 0x0e, 0xdd, 0x47, 0xdb, 0x6f, 0x72, ], + [ 0xfd, 0x67, 0xdc, 0x93, 0xc5, 0x39, 0xf8, 0x74, ], + [ 0x5a, 0x4f, 0xa9, 0xd9, 0x09, 0x80, 0x6c, 0x0d, ], + [ 0x2d, 0x7e, 0xfb, 0xd7, 0x96, 0x66, 0x67, 0x85, ], + [ 0xb7, 0x87, 0x71, 0x27, 0xe0, 0x94, 0x27, 0xcf, ], + [ 0x8d, 0xa6, 0x99, 0xcd, 0x64, 0x55, 0x76, 0x18, ], + [ 0xce, 0xe3, 0xfe, 0x58, 0x6e, 0x46, 0xc9, 0xcb, ], + [ 0x37, 0xd1, 0x01, 0x8b, 0xf5, 0x00, 0x02, 0xab, ], + [ 0x62, 0x24, 0x93, 0x9a, 0x79, 0xf5, 0xf5, 0x93, ], + [ 0xb0, 0xe4, 0xa9, 0x0b, 0xdf, 0x82, 0x00, 0x9e, ], + [ 0xf3, 0xb9, 0xdd, 0x94, 0xc5, 0xbb, 0x5d, 0x7a, ], + [ 0xa7, 0xad, 0x6b, 0x22, 0x46, 0x2f, 0xb3, 0xf4, ], + [ 0xfb, 0xe5, 0x0e, 0x86, 0xbc, 0x8f, 0x1e, 0x75, ], + [ 0x90, 0x3d, 0x84, 0xc0, 0x27, 0x56, 0xea, 0x14, ], + [ 0xee, 0xf2, 0x7a, 0x8e, 0x90, 0xca, 0x23, 0xf7, ], + [ 0xe5, 0x45, 0xbe, 0x49, 0x61, 0xca, 0x29, 0xa1, ], + [ 0xdb, 0x9b, 0xc2, 0x57, 0x7f, 0xcc, 0x2a, 0x3f, ], + [ 0x94, 0x47, 0xbe, 0x2c, 0xf5, 0xe9, 0x9a, 0x69, ], + [ 0x9c, 0xd3, 0x8d, 0x96, 0xf0, 0xb3, 0xc1, 0x4b, ], + [ 0xbd, 0x61, 0x79, 0xa7, 0x1d, 0xc9, 0x6d, 0xbb, ], + [ 0x98, 0xee, 0xa2, 0x1a, 0xf2, 0x5c, 0xd6, 0xbe, ], + [ 0xc7, 0x67, 0x3b, 0x2e, 0xb0, 0xcb, 0xf2, 0xd0, ], + [ 0x88, 0x3e, 0xa3, 0xe3, 0x95, 0x67, 0x53, 0x93, ], + [ 0xc8, 0xce, 0x5c, 0xcd, 0x8c, 0x03, 0x0c, 0xa8, ], + [ 0x94, 0xaf, 0x49, 0xf6, 0xc6, 0x50, 0xad, 0xb8, ], + [ 0xea, 0xb8, 0x85, 0x8a, 0xde, 0x92, 0xe1, 0xbc, ], + [ 0xf3, 0x15, 0xbb, 0x5b, 0xb8, 0x35, 0xd8, 0x17, ], + [ 0xad, 0xcf, 0x6b, 0x07, 0x63, 0x61, 0x2e, 0x2f, ], + [ 0xa5, 0xc9, 0x1d, 0xa7, 0xac, 0xaa, 0x4d, 0xde, ], + [ 0x71, 0x65, 0x95, 0x87, 0x66, 0x50, 0xa2, 0xa6, ], + [ 0x28, 0xef, 0x49, 0x5c, 0x53, 0xa3, 0x87, 0xad, ], + [ 0x42, 0xc3, 0x41, 0xd8, 0xfa, 0x92, 0xd8, 0x32, ], + [ 0xce, 0x7c, 0xf2, 0x72, 0x2f, 0x51, 0x27, 0x71, ], + [ 0xe3, 0x78, 0x59, 0xf9, 0x46, 0x23, 0xf3, 0xa7, ], + [ 0x38, 0x12, 0x05, 0xbb, 0x1a, 0xb0, 0xe0, 0x12, ], + [ 0xae, 0x97, 0xa1, 0x0f, 0xd4, 0x34, 0xe0, 0x15, ], + [ 0xb4, 0xa3, 0x15, 0x08, 0xbe, 0xff, 0x4d, 0x31, ], + [ 0x81, 0x39, 0x62, 0x29, 0xf0, 0x90, 0x79, 0x02, ], + [ 0x4d, 0x0c, 0xf4, 0x9e, 0xe5, 0xd4, 0xdc, 0xca, ], + [ 0x5c, 0x73, 0x33, 0x6a, 0x76, 0xd8, 0xbf, 0x9a, ], + [ 0xd0, 0xa7, 0x04, 0x53, 0x6b, 0xa9, 0x3e, 0x0e, ], + [ 0x92, 0x59, 0x58, 0xfc, 0xd6, 0x42, 0x0c, 0xad, ], + [ 0xa9, 0x15, 0xc2, 0x9b, 0xc8, 0x06, 0x73, 0x18, ], + [ 0x95, 0x2b, 0x79, 0xf3, 0xbc, 0x0a, 0xa6, 0xd4, ], + [ 0xf2, 0x1d, 0xf2, 0xe4, 0x1d, 0x45, 0x35, 0xf9, ], + [ 0x87, 0x57, 0x75, 0x19, 0x04, 0x8f, 0x53, 0xa9, ], + [ 0x10, 0xa5, 0x6c, 0xf5, 0xdf, 0xcd, 0x9a, 0xdb, ], + [ 0xeb, 0x75, 0x09, 0x5c, 0xcd, 0x98, 0x6c, 0xd0, ], + [ 0x51, 0xa9, 0xcb, 0x9e, 0xcb, 0xa3, 0x12, 0xe6, ], + [ 0x96, 0xaf, 0xad, 0xfc, 0x2c, 0xe6, 0x66, 0xc7, ], + [ 0x72, 0xfe, 0x52, 0x97, 0x5a, 0x43, 0x64, 0xee, ], + [ 0x5a, 0x16, 0x45, 0xb2, 0x76, 0xd5, 0x92, 0xa1, ], + [ 0xb2, 0x74, 0xcb, 0x8e, 0xbf, 0x87, 0x87, 0x0a, ], + [ 0x6f, 0x9b, 0xb4, 0x20, 0x3d, 0xe7, 0xb3, 0x81, ], + [ 0xea, 0xec, 0xb2, 0xa3, 0x0b, 0x22, 0xa8, 0x7f, ], + [ 0x99, 0x24, 0xa4, 0x3c, 0xc1, 0x31, 0x57, 0x24, ], + [ 0xbd, 0x83, 0x8d, 0x3a, 0xaf, 0xbf, 0x8d, 0xb7, ], + [ 0x0b, 0x1a, 0x2a, 0x32, 0x65, 0xd5, 0x1a, 0xea, ], + [ 0x13, 0x50, 0x79, 0xa3, 0x23, 0x1c, 0xe6, 0x60, ], + [ 0x93, 0x2b, 0x28, 0x46, 0xe4, 0xd7, 0x06, 0x66, ], + [ 0xe1, 0x91, 0x5f, 0x5c, 0xb1, 0xec, 0xa4, 0x6c, ], + [ 0xf3, 0x25, 0x96, 0x5c, 0xa1, 0x6d, 0x62, 0x9f, ], + [ 0x57, 0x5f, 0xf2, 0x8e, 0x60, 0x38, 0x1b, 0xe5, ], + [ 0x72, 0x45, 0x06, 0xeb, 0x4c, 0x32, 0x8a, 0x95, ] + ]; let k0 = 0x_07_06_05_04_03_02_01_00_u64; let k1 = 0x_0f_0e_0d_0c_0b_0a_09_08_u64; @@ -433,7 +433,7 @@ pub fn test_siphash() { let stream_inc = &State(k0,k1); let stream_full = &State(k0,k1); - fn to_hex_str(r: &[u8]/8) -> ~str { + fn to_hex_str(r: &[u8 * 8]) -> ~str { let mut s = ~""; for vec::each(*r) |b| { s += uint::to_str(*b as uint, 16u); diff --git a/src/libcore/io.rs b/src/libcore/io.rs index fd0fcbbe1c1f8..77f7b5023dfa9 100644 --- a/src/libcore/io.rs +++ b/src/libcore/io.rs @@ -37,7 +37,7 @@ pub trait Reader { // FIXME (#2004): Seekable really should be orthogonal. // FIXME (#2982): This should probably return an error. - fn read(buf: &[mut u8], len: uint) -> uint; + fn read(bytes: &[mut u8], len: uint) -> uint; fn read_byte() -> int; fn unread_byte(int); fn eof() -> bool; @@ -65,32 +65,32 @@ pub trait ReaderUtil { impl T : ReaderUtil { fn read_bytes(len: uint) -> ~[u8] { - let mut buf = vec::with_capacity(len); - unsafe { vec::raw::set_len(&mut buf, len); } + let mut bytes = vec::with_capacity(len); + unsafe { vec::raw::set_len(&mut bytes, len); } - let count = self.read(buf, len); + let count = self.read(bytes, len); - unsafe { vec::raw::set_len(&mut buf, count); } - move buf + unsafe { vec::raw::set_len(&mut bytes, count); } + move bytes } fn read_line() -> ~str { - let mut buf = ~[]; + let mut bytes = ~[]; loop { let ch = self.read_byte(); if ch == -1 || ch == 10 { break; } - buf.push(ch as u8); + bytes.push(ch as u8); } - str::from_bytes(buf) + str::from_bytes(bytes) } fn read_chars(n: uint) -> ~[char] { // returns the (consumed offset, n_req), appends characters to &chars - fn chars_from_bytes(buf: &~[u8], chars: &mut ~[char]) + fn chars_from_bytes(bytes: &~[u8], chars: &mut ~[char]) -> (uint, uint) { let mut i = 0; - let buf_len = buf.len(); - while i < buf_len { - let b0 = buf[i]; + let bytes_len = bytes.len(); + while i < bytes_len { + let b0 = bytes[i]; let w = str::utf8_char_width(b0); let end = i + w; i += 1; @@ -100,12 +100,12 @@ impl T : ReaderUtil { loop; } // can't satisfy this char with the existing data - if end > buf_len { - return (i - 1, end - buf_len); + if end > bytes_len { + return (i - 1, end - bytes_len); } let mut val = 0; while i < end { - let next = buf[i] as int; + let next = bytes[i] as int; i += 1; assert (next > -1); assert (next & 192 == 128); @@ -119,8 +119,8 @@ impl T : ReaderUtil { } return (i, 0); } - let mut buf: ~[u8] = ~[]; - let mut chars: ~[char] = ~[]; + let mut bytes = ~[]; + let mut chars = ~[]; // might need more bytes, but reading n will never over-read let mut nbread = n; while nbread > 0 { @@ -130,15 +130,15 @@ impl T : ReaderUtil { // we're split in a unicode char? break; } - buf.push_all(data); - let (offset, nbreq) = chars_from_bytes::(&buf, &mut chars); + bytes.push_all(data); + let (offset, nbreq) = chars_from_bytes::(&bytes, &mut chars); let ncreq = n - chars.len(); // again we either know we need a certain number of bytes // to complete a character, or we make sure we don't // over-read by reading 1-byte per char needed nbread = if ncreq > nbreq { ncreq } else { nbreq }; if nbread > 0 { - buf = vec::slice(buf, offset, buf.len()); + bytes = vec::slice(bytes, offset, bytes.len()); } } move chars @@ -154,12 +154,12 @@ impl T : ReaderUtil { } fn read_c_str() -> ~str { - let mut buf: ~[u8] = ~[]; + let mut bytes: ~[u8] = ~[]; loop { let ch = self.read_byte(); - if ch < 1 { break; } else { buf.push(ch as u8); } + if ch < 1 { break; } else { bytes.push(ch as u8); } } - str::from_bytes(buf) + str::from_bytes(bytes) } // FIXME deal with eof? // #2004 @@ -191,9 +191,9 @@ impl T : ReaderUtil { } fn read_whole_stream() -> ~[u8] { - let mut buf: ~[u8] = ~[]; - while !self.eof() { buf.push_all(self.read_bytes(2048u)); } - move buf + let mut bytes: ~[u8] = ~[]; + while !self.eof() { bytes.push_all(self.read_bytes(2048u)); } + move bytes } fn each_byte(it: fn(int) -> bool) { @@ -226,8 +226,8 @@ fn convert_whence(whence: SeekStyle) -> i32 { } impl *libc::FILE: Reader { - fn read(buf: &[mut u8], len: uint) -> uint { - do vec::as_mut_buf(buf) |buf_p, buf_len| { + fn read(bytes: &[mut u8], len: uint) -> uint { + do vec::as_mut_buf(bytes) |buf_p, buf_len| { assert buf_len <= len; let count = libc::fread(buf_p as *mut c_void, 1u as size_t, @@ -250,7 +250,9 @@ impl *libc::FILE: Reader { // duration of its lifetime. // FIXME there really should be a better way to do this // #2004 impl {base: T, cleanup: C}: Reader { - fn read(buf: &[mut u8], len: uint) -> uint { self.base.read(buf, len) } + fn read(bytes: &[mut u8], len: uint) -> uint { + self.base.read(bytes, len) + } fn read_byte() -> int { self.base.read_byte() } fn unread_byte(byte: int) { self.base.unread_byte(byte); } fn eof() -> bool { self.base.eof() } @@ -297,39 +299,41 @@ pub fn file_reader(path: &Path) -> Result { } -// Byte buffer readers - -pub type ByteBuf = {buf: &[const u8], mut pos: uint}; +// Byte readers +pub struct BytesReader { + bytes: &[u8], + mut pos: uint +} -impl ByteBuf: Reader { - fn read(buf: &[mut u8], len: uint) -> uint { - let count = uint::min(len, self.buf.len() - self.pos); +impl BytesReader: Reader { + fn read(bytes: &[mut u8], len: uint) -> uint { + let count = uint::min(len, self.bytes.len() - self.pos); - let view = vec::const_view(self.buf, self.pos, self.buf.len()); - vec::bytes::memcpy(buf, view, count); + let view = vec::view(self.bytes, self.pos, self.bytes.len()); + vec::bytes::memcpy(bytes, view, count); self.pos += count; count } fn read_byte() -> int { - if self.pos == self.buf.len() { return -1; } - let b = self.buf[self.pos]; + if self.pos == self.bytes.len() { return -1; } + let b = self.bytes[self.pos]; self.pos += 1u; return b as int; } // FIXME (#2738): implement this fn unread_byte(_byte: int) { error!("Unimplemented: unread_byte"); fail; } - fn eof() -> bool { self.pos == self.buf.len() } + fn eof() -> bool { self.pos == self.bytes.len() } fn seek(offset: int, whence: SeekStyle) { let pos = self.pos; - self.pos = seek_in_buf(offset, pos, self.buf.len(), whence); + self.pos = seek_in_buf(offset, pos, self.bytes.len(), whence); } fn tell() -> uint { self.pos } } -pub fn with_bytes_reader(bytes: &[u8], f: fn(Reader) -> t) -> t { - f({buf: bytes, mut pos: 0u} as Reader) +pub pure fn with_bytes_reader(bytes: &[u8], f: fn(Reader) -> t) -> t { + f(BytesReader { bytes: bytes, pos: 0u } as Reader) } pub fn with_str_reader(s: &str, f: fn(Reader) -> T) -> T { @@ -602,10 +606,10 @@ impl T : WriterUtil { self.write_str(&"\n"); } fn write_int(n: int) { - int::to_str_bytes(n, 10u, |buf| self.write(buf)) + int::to_str_bytes(n, 10u, |bytes| self.write(bytes)) } fn write_uint(n: uint) { - uint::to_str_bytes(false, n, 10u, |buf| self.write(buf)) + uint::to_str_bytes(false, n, 10u, |bytes| self.write(bytes)) } fn write_le_uint(n: uint) { u64_to_le_bytes(n as u64, uint::bytes, |v| self.write(v)) @@ -687,34 +691,34 @@ pub fn print(s: &str) { stdout().write_str(s); } pub fn println(s: &str) { stdout().write_line(s); } pub struct BytesWriter { - buf: DVec, + bytes: DVec, mut pos: uint, } impl BytesWriter: Writer { fn write(v: &[const u8]) { - do self.buf.swap |buf| { - let mut buf <- buf; + do self.bytes.swap |bytes| { + let mut bytes <- bytes; let v_len = v.len(); - let buf_len = buf.len(); + let bytes_len = bytes.len(); - let count = uint::max(buf_len, self.pos + v_len); - vec::reserve(&mut buf, count); - unsafe { vec::raw::set_len(&mut buf, count); } + let count = uint::max(bytes_len, self.pos + v_len); + vec::reserve(&mut bytes, count); + unsafe { vec::raw::set_len(&mut bytes, count); } { - let view = vec::mut_view(buf, self.pos, count); + let view = vec::mut_view(bytes, self.pos, count); vec::bytes::memcpy(view, v, v_len); } self.pos += v_len; - move buf + move bytes } } fn seek(offset: int, whence: SeekStyle) { let pos = self.pos; - let len = self.buf.len(); + let len = self.bytes.len(); self.pos = seek_in_buf(offset, pos, len, whence); } fn tell() -> uint { self.pos } @@ -730,21 +734,25 @@ impl @BytesWriter : Writer { fn get_type() -> WriterType { (*self).get_type() } } -pub fn BytesWriter() -> BytesWriter { - BytesWriter { buf: DVec(), mut pos: 0u } +pub pure fn BytesWriter() -> BytesWriter { + BytesWriter { bytes: DVec(), mut pos: 0u } } -pub fn with_bytes_writer(f: fn(Writer)) -> ~[u8] { +pub pure fn with_bytes_writer(f: fn(Writer)) -> ~[u8] { let wr = @BytesWriter(); f(wr as Writer); - wr.buf.check_out(|buf| move buf) + // FIXME (#3758): This should not be needed. + unsafe { wr.bytes.check_out(|bytes| move bytes) } } -pub fn with_str_writer(f: fn(Writer)) -> ~str { +pub pure fn with_str_writer(f: fn(Writer)) -> ~str { let mut v = with_bytes_writer(f); - // Make sure the vector has a trailing null and is proper utf8. - v.push(0); + // FIXME (#3758): This should not be needed. + unsafe { + // Make sure the vector has a trailing null and is proper utf8. + v.push(0); + } assert str::is_utf8(v); unsafe { move ::cast::transmute(move v) } @@ -975,15 +983,17 @@ mod tests { fn bytes_buffer_overwrite() { let wr = BytesWriter(); wr.write(~[0u8, 1u8, 2u8, 3u8]); - assert wr.buf.borrow(|buf| buf == ~[0u8, 1u8, 2u8, 3u8]); + assert wr.bytes.borrow(|bytes| bytes == ~[0u8, 1u8, 2u8, 3u8]); wr.seek(-2, SeekCur); wr.write(~[4u8, 5u8, 6u8, 7u8]); - assert wr.buf.borrow(|buf| buf == ~[0u8, 1u8, 4u8, 5u8, 6u8, 7u8]); + assert wr.bytes.borrow(|bytes| bytes == + ~[0u8, 1u8, 4u8, 5u8, 6u8, 7u8]); wr.seek(-2, SeekEnd); wr.write(~[8u8]); wr.seek(1, SeekSet); wr.write(~[9u8]); - assert wr.buf.borrow(|buf| buf == ~[0u8, 9u8, 4u8, 5u8, 8u8, 7u8]); + assert wr.bytes.borrow(|bytes| bytes == + ~[0u8, 9u8, 4u8, 5u8, 8u8, 7u8]); } } diff --git a/src/libcore/iter-trait/dlist.rs b/src/libcore/iter-trait/dlist.rs index 2a5bb59b0c1fc..1b5f11569c37d 100644 --- a/src/libcore/iter-trait/dlist.rs +++ b/src/libcore/iter-trait/dlist.rs @@ -11,7 +11,7 @@ pub type IMPL_T = dlist::DList; pub pure fn EACH(self: &IMPL_T, f: fn(v: &A) -> bool) { let mut link = self.peek_n(); while option::is_some(&link) { - let nobe = option::get(&link); + let nobe = option::get(link); assert nobe.linked; if !f(&nobe.data) { break; } // Check (weakly) that the user didn't do a remove. diff --git a/src/libcore/libc.rs b/src/libcore/libc.rs index dd8f76c89d5ff..7ed43f619e13e 100644 --- a/src/libcore/libc.rs +++ b/src/libcore/libc.rs @@ -87,7 +87,7 @@ pub use funcs::extra::*; pub use size_t; pub use c_float, c_double, c_void, FILE, fpos_t; -pub use DIR, dirent; +pub use DIR, dirent_t; pub use c_char, c_schar, c_uchar; pub use c_short, c_ushort, c_int, c_uint, c_long, c_ulong; pub use size_t, ptrdiff_t, clock_t, time_t; @@ -147,7 +147,7 @@ mod types { } pub mod posix88 { pub enum DIR {} - pub enum dirent {} + pub enum dirent_t {} } pub mod posix01 {} pub mod posix08 {} @@ -1019,7 +1019,7 @@ pub mod funcs { pub extern mod dirent { fn opendir(dirname: *c_char) -> *DIR; fn closedir(dirp: *DIR) -> c_int; - fn readdir(dirp: *DIR) -> *dirent; + fn readdir(dirp: *DIR) -> *dirent_t; fn rewinddir(dirp: *DIR); fn seekdir(dirp: *DIR, loc: c_long); fn telldir(dirp: *DIR) -> c_long; diff --git a/src/libcore/logging.rs b/src/libcore/logging.rs index d4f3c0ea272ed..958d1ac56ea78 100644 --- a/src/libcore/logging.rs +++ b/src/libcore/logging.rs @@ -32,7 +32,7 @@ pub fn console_off() { #[cfg(notest)] #[lang="log_type"] pub fn log_type(level: u32, object: &T) { - let bytes = do io::with_bytes_writer() |writer| { + let bytes = do io::with_bytes_writer |writer| { repr::write_repr(writer, object); }; unsafe { diff --git a/src/libcore/option.rs b/src/libcore/option.rs index f8bafe29fdde2..baabc35b428c2 100644 --- a/src/libcore/option.rs +++ b/src/libcore/option.rs @@ -42,7 +42,7 @@ pub enum Option { Some(T), } -pub pure fn get(opt: &Option) -> T { +pub pure fn get(opt: Option) -> T { /*! Gets the value out of an option @@ -58,7 +58,7 @@ pub pure fn get(opt: &Option) -> T { case explicitly. */ - match *opt { + match opt { Some(copy x) => return x, None => fail ~"option::get none" } @@ -85,7 +85,7 @@ pub pure fn get_ref(opt: &r/Option) -> &r/T { } } -pub pure fn expect(opt: &Option, reason: ~str) -> T { +pub pure fn expect(opt: Option, reason: ~str) -> T { /*! * Gets the value out of an option, printing a specified message on * failure @@ -94,7 +94,7 @@ pub pure fn expect(opt: &Option, reason: ~str) -> T { * * Fails if the value equals `none` */ - match *opt { Some(copy x) => x, None => fail reason } + match opt { Some(copy x) => x, None => fail reason } } pub pure fn map(opt: &Option, f: fn(x: &T) -> U) -> Option { @@ -167,10 +167,10 @@ pub pure fn is_some(opt: &Option) -> bool { !is_none(opt) } -pub pure fn get_default(opt: &Option, def: T) -> T { +pub pure fn get_default(opt: Option, def: T) -> T { //! Returns the contained value or a default - match *opt { Some(copy x) => x, None => def } + match opt { Some(copy x) => x, None => def } } pub pure fn map_default(opt: &Option, def: U, @@ -225,7 +225,7 @@ pub fn swap_unwrap(opt: &mut Option) -> T { pub pure fn unwrap_expect(opt: Option, reason: &str) -> T { //! As unwrap, but with a specified failure message. - if opt.is_none() { fail reason.to_unique(); } + if opt.is_none() { fail reason.to_owned(); } unwrap(move opt) } @@ -284,8 +284,8 @@ impl Option { Instead, prefer to use pattern matching and handle the `None` case explicitly. */ - pure fn get() -> T { get(&self) } - pure fn get_default(def: T) -> T { get_default(&self, def) } + pure fn get() -> T { get(self) } + pure fn get_default(def: T) -> T { get_default(self, def) } /** * Gets the value out of an option, printing a specified message on * failure @@ -294,7 +294,7 @@ impl Option { * * Fails if the value equals `none` */ - pure fn expect(reason: ~str) -> T { expect(&self, move reason) } + pure fn expect(reason: ~str) -> T { expect(self, move reason) } /// Applies a function zero or more times until the result is none. pure fn while_some(blk: fn(v: T) -> Option) { while_some(self, blk) } } diff --git a/src/libcore/os.rs b/src/libcore/os.rs index 24e4d7eff41f2..a834bb84f8d87 100644 --- a/src/libcore/os.rs +++ b/src/libcore/os.rs @@ -473,7 +473,7 @@ pub fn tmpdir() -> Path { #[cfg(unix)] #[allow(non_implicitly_copyable_typarams)] fn lookup() -> Path { - option::get_default(&getenv_nonempty("TMPDIR"), + option::get_default(getenv_nonempty("TMPDIR"), Path("/tmp")) } @@ -481,7 +481,7 @@ pub fn tmpdir() -> Path { #[allow(non_implicitly_copyable_typarams)] fn lookup() -> Path { option::get_default( - &option::or(getenv_nonempty("TMP"), + option::or(getenv_nonempty("TMP"), option::or(getenv_nonempty("TEMP"), option::or(getenv_nonempty("USERPROFILE"), getenv_nonempty("WINDIR")))), diff --git a/src/libcore/pipes.rs b/src/libcore/pipes.rs index e77cb69b05658..3f8de19498f62 100644 --- a/src/libcore/pipes.rs +++ b/src/libcore/pipes.rs @@ -667,7 +667,7 @@ pub fn select2( -> Either<(Option, RecvPacketBuffered), (RecvPacketBuffered, Option)> { - let i = wait_many([a.header(), b.header()]/_); + let i = wait_many([a.header(), b.header()]); match i { 0 => Left((try_recv(move a), move b)), @@ -693,7 +693,7 @@ pub fn selecti(endpoints: &[T]) -> uint { /// Returns 0 or 1 depending on which endpoint is ready to receive pub fn select2i(a: &A, b: &B) -> Either<(), ()> { - match wait_many([a.header(), b.header()]/_) { + match wait_many([a.header(), b.header()]) { 0 => Left(()), 1 => Right(()), _ => fail ~"wait returned unexpected index" diff --git a/src/libcore/repr.rs b/src/libcore/repr.rs index ff29953f09a49..b246adcb1d7cd 100644 --- a/src/libcore/repr.rs +++ b/src/libcore/repr.rs @@ -559,7 +559,7 @@ impl ReprPrinter { unsafe { self.align(sys::min_align_of::()); let value_addr: &T = transmute(copy self.ptr); - (*value_addr).write_repr(self.writer); + value_addr.write_repr(self.writer); self.bump(sys::size_of::()); true } diff --git a/src/libcore/run.rs b/src/libcore/run.rs index 06b869306305b..cb6a7f737f989 100644 --- a/src/libcore/run.rs +++ b/src/libcore/run.rs @@ -248,12 +248,14 @@ pub fn start_program(prog: &str, args: &[~str]) -> Program { } fn read_all(rd: io::Reader) -> ~str { - let mut buf = ~""; - while !rd.eof() { - let bytes = rd.read_bytes(4096u); - buf += str::from_bytes(bytes); - } - move buf + let buf = io::with_bytes_writer(|wr| { + let mut bytes = [mut 0, ..4096]; + while !rd.eof() { + let nread = rd.read(bytes, bytes.len()); + wr.write(bytes.view(0, nread)); + } + }); + str::from_bytes(buf) } /** @@ -341,13 +343,15 @@ fn writeclose(fd: c_int, s: ~str) { fn readclose(fd: c_int) -> ~str { let file = os::fdopen(fd); let reader = io::FILE_reader(file, false); - let mut buf = ~""; - while !reader.eof() { - let bytes = reader.read_bytes(4096u); - buf += str::from_bytes(bytes); - } + let buf = io::with_bytes_writer(|writer| { + let mut bytes = [mut 0, ..4096]; + while !reader.eof() { + let nread = reader.read(bytes, bytes.len()); + writer.write(bytes.view(0, nread)); + } + }); os::fclose(file); - move buf + str::from_bytes(buf) } /// Waits for a process to exit and returns the exit code diff --git a/src/libcore/str.rs b/src/libcore/str.rs index 447cb59acf80e..0c722c437ee58 100644 --- a/src/libcore/str.rs +++ b/src/libcore/str.rs @@ -203,6 +203,13 @@ pub pure fn connect(v: &[~str], sep: &str) -> ~str { move s } +/// Given a string, make a new string with repeated copies of it +pub fn repeat(ss: &str, nn: uint) -> ~str { + let mut acc = ~""; + for nn.times { acc += ss; } + move acc +} + /* Section: Adding to and removing from a string */ @@ -573,6 +580,40 @@ pub pure fn words(s: &str) -> ~[~str] { split_nonempty(s, |c| char::is_whitespace(c)) } +/** Split a string into a vector of substrings, + * each of which is less than a limit + */ +pub fn split_within(ss: &str, lim: uint) -> ~[~str] { + let words = str::words(ss); + + // empty? + if words == ~[] { return ~[]; } + + let mut rows : ~[~str] = ~[]; + let mut row : ~str = ~""; + + for words.each |wptr| { + let word = copy *wptr; + + // if adding this word to the row would go over the limit, + // then start a new row + if row.len() + word.len() + 1 > lim { + rows.push(copy row); // save previous row + row = move word; // start a new one + } else { + if row.len() > 0 { row += ~" " } // separate words + row += word; // append to this row + } + } + + // save the last row + if row != ~"" { rows.push(move row); } + + move rows +} + + + /// Convert a string to lowercase. ASCII only pub pure fn to_lower(s: &str) -> ~str { map(s, @@ -1473,6 +1514,11 @@ pub pure fn from_utf16(v: &[u16]) -> ~str { move buf } +pub pure fn with_capacity(capacity: uint) -> ~str { + let mut buf = ~""; + unsafe { reserve(&mut buf, capacity); } + move buf +} /** * As char_len but for a slice of a string @@ -1906,7 +1952,7 @@ pub mod raw { } /// Converts a vector of bytes to a string. - pub pub unsafe fn from_bytes(v: &[const u8]) -> ~str { + pub unsafe fn from_bytes(v: &[const u8]) -> ~str { do vec::as_const_buf(v) |buf, len| { from_buf_len(buf, len) } @@ -2029,14 +2075,14 @@ pub mod raw { } -pub trait UniqueStr { +pub trait Trimmable { pure fn trim() -> self; pure fn trim_left() -> self; pure fn trim_right() -> self; } /// Extension methods for strings -impl ~str: UniqueStr { +impl ~str: Trimmable { /// Returns a string with leading and trailing whitespace removed #[inline] pure fn trim() -> ~str { trim(self) } @@ -2089,7 +2135,7 @@ pub trait StrSlice { pure fn trim() -> ~str; pure fn trim_left() -> ~str; pure fn trim_right() -> ~str; - pure fn to_unique() -> ~str; + pure fn to_owned() -> ~str; pure fn to_managed() -> @str; pure fn char_at(i: uint) -> char; } @@ -2212,7 +2258,7 @@ impl &str: StrSlice { pure fn trim_right() -> ~str { trim_right(self) } #[inline] - pure fn to_unique() -> ~str { self.slice(0, self.len()) } + pure fn to_owned() -> ~str { self.slice(0, self.len()) } #[inline] pure fn to_managed() -> @str { @@ -2474,6 +2520,19 @@ mod tests { assert ~[] == words(~""); } + #[test] + fn test_split_within() { + assert split_within(~"", 0) == ~[]; + assert split_within(~"", 15) == ~[]; + assert split_within(~"hello", 15) == ~[~"hello"]; + + let data = ~"\nMary had a little lamb\nLittle lamb\n"; + error!("~~~~ %?", split_within(data, 15)); + assert split_within(data, 15) == ~[~"Mary had a", + ~"little lamb", + ~"Little lamb"]; + } + #[test] fn test_find_str() { // byte positions @@ -2549,6 +2608,15 @@ mod tests { t(~[~"hi"], ~" ", ~"hi"); } + #[test] + fn test_repeat() { + assert repeat(~"x", 4) == ~"xxxx"; + assert repeat(~"hi", 4) == ~"hihihihi"; + assert repeat(~"ไท华", 3) == ~"ไท华ไท华ไท华"; + assert repeat(~"", 4) == ~""; + assert repeat(~"hi", 0) == ~""; + } + #[test] fn test_to_upper() { // libc::toupper, and hence str::to_upper diff --git a/src/libcore/uint-template.rs b/src/libcore/uint-template.rs index 00dd9be76db07..3199262ae163f 100644 --- a/src/libcore/uint-template.rs +++ b/src/libcore/uint-template.rs @@ -187,19 +187,7 @@ pub pure fn to_str_bytes(neg: bool, num: T, radix: uint, // Enough room to hold any number in any radix. // Worst case: 64-bit number, binary-radix, with // a leading negative sign = 65 bytes. - let buf : [mut u8]/65 = - [mut - 0u8,0u8,0u8,0u8,0u8, 0u8,0u8,0u8,0u8,0u8, - 0u8,0u8,0u8,0u8,0u8, 0u8,0u8,0u8,0u8,0u8, - - 0u8,0u8,0u8,0u8,0u8, 0u8,0u8,0u8,0u8,0u8, - 0u8,0u8,0u8,0u8,0u8, 0u8,0u8,0u8,0u8,0u8, - - 0u8,0u8,0u8,0u8,0u8, 0u8,0u8,0u8,0u8,0u8, - 0u8,0u8,0u8,0u8,0u8, 0u8,0u8,0u8,0u8,0u8, - - 0u8,0u8,0u8,0u8,0u8 - ]/65; + let buf : [mut u8 * 65] = [mut 0u8, ..65]; // FIXME (#2649): post-snapshot, you can do this without the raw // pointers and unsafe bits, and the codegen will prove it's all diff --git a/src/libstd/arena.rs b/src/libstd/arena.rs index 6a2ac88f71435..9f40794b28a79 100644 --- a/src/libstd/arena.rs +++ b/src/libstd/arena.rs @@ -244,7 +244,7 @@ fn test_arena_destructors() { do arena.alloc { @i }; // Allocate something with funny size and alignment, to keep // things interesting. - do arena.alloc { [0u8, 1u8, 2u8]/3 }; + do arena.alloc { [0u8, 1u8, 2u8] }; } } @@ -258,7 +258,7 @@ fn test_arena_destructors_fail() { do arena.alloc { @i }; // Allocate something with funny size and alignment, to keep // things interesting. - do arena.alloc { [0u8, 1u8, 2u8]/3 }; + do arena.alloc { [0u8, 1u8, 2u8] }; } // Now, fail while allocating do arena.alloc::<@int> { diff --git a/src/libstd/getopts.rs b/src/libstd/getopts.rs index 6da51571e34a2..8d77b88aba230 100644 --- a/src/libstd/getopts.rs +++ b/src/libstd/getopts.rs @@ -82,7 +82,7 @@ pub type Opt = {name: Name, hasarg: HasArg, occur: Occur}; fn mkname(nm: &str) -> Name { let unm = str::from_slice(nm); - return if str::len(nm) == 1u { + return if nm.len() == 1u { Short(str::char_at(unm, 0u)) } else { Long(unm) }; } @@ -114,6 +114,22 @@ impl Occur : Eq { pure fn ne(other: &Occur) -> bool { !self.eq(other) } } +impl HasArg : Eq { + pure fn eq(other: &HasArg) -> bool { + (self as uint) == ((*other) as uint) + } + pure fn ne(other: &HasArg) -> bool { !self.eq(other) } +} + +impl Opt : Eq { + pure fn eq(other: &Opt) -> bool { + self.name == (*other).name && + self.hasarg == (*other).hasarg && + self.occur == (*other).occur + } + pure fn ne(other: &Opt) -> bool { !self.eq(other) } +} + /// Create an option that is required and takes an argument pub fn reqopt(name: &str) -> Opt { return {name: mkname(name), hasarg: Yes, occur: Req}; @@ -150,8 +166,29 @@ enum Optval { Val(~str), Given, } */ pub type Matches = {opts: ~[Opt], vals: ~[~[Optval]], free: ~[~str]}; +impl Optval : Eq { + pure fn eq(other: &Optval) -> bool { + match self { + Val(ref s) => match *other { Val (ref os) => s == os, + Given => false }, + Given => match *other { Val(_) => false, + Given => true } + } + } + pure fn ne(other: &Optval) -> bool { !self.eq(other) } +} + +impl Matches : Eq { + pure fn eq(other: &Matches) -> bool { + self.opts == (*other).opts && + self.vals == (*other).vals && + self.free == (*other).free + } + pure fn ne(other: &Matches) -> bool { !self.eq(other) } +} + fn is_arg(arg: &str) -> bool { - return str::len(arg) > 1u && arg[0] == '-' as u8; + return arg.len() > 1u && arg[0] == '-' as u8; } fn name_str(nm: &Name) -> ~str { @@ -177,6 +214,35 @@ pub enum Fail_ { UnexpectedArgument(~str), } +impl Fail_ : Eq { + // this whole thing should be easy to infer... + pure fn eq(other: &Fail_) -> bool { + match self { + ArgumentMissing(ref s) => { + match *other { ArgumentMissing(ref so) => s == so, + _ => false } + } + UnrecognizedOption(ref s) => { + match *other { UnrecognizedOption(ref so) => s == so, + _ => false } + } + OptionMissing(ref s) => { + match *other { OptionMissing(ref so) => s == so, + _ => false } + } + OptionDuplicated(ref s) => { + match *other { OptionDuplicated(ref so) => s == so, + _ => false } + } + UnexpectedArgument(ref s) => { + match *other { UnexpectedArgument(ref so) => s == so, + _ => false } + } + } + } + pure fn ne(other: &Fail_) -> bool { !self.eq(other) } +} + /// Convert a `fail_` enum into an error string pub fn fail_str(f: Fail_) -> ~str { return match f { @@ -220,7 +286,7 @@ pub fn getopts(args: &[~str], opts: &[Opt]) -> Result unsafe { let mut i = 0u; while i < l { let cur = args[i]; - let curlen = str::len(cur); + let curlen = cur.len(); if !is_arg(cur) { free.push(cur); } else if cur == ~"--" { @@ -444,6 +510,194 @@ impl FailType : Eq { pure fn ne(other: &FailType) -> bool { !self.eq(other) } } +/** A module which provides a way to specify descriptions and + * groups of short and long option names, together. + */ +pub mod groups { + + /** one group of options, e.g., both -h and --help, along with + * their shared description and properties + */ + pub type OptGroup = { + short_name: ~str, + long_name: ~str, + hint: ~str, + desc: ~str, + hasarg: HasArg, + occur: Occur + }; + + impl OptGroup : Eq { + pure fn eq(other: &OptGroup) -> bool { + self.short_name == (*other).short_name && + self.long_name == (*other).long_name && + self.hint == (*other).hint && + self.desc == (*other).desc && + self.hasarg == (*other).hasarg && + self.occur == (*other).occur + } + pure fn ne(other: &OptGroup) -> bool { !self.eq(other) } + } + + /// Create a long option that is required and takes an argument + pub fn reqopt(short_name: &str, long_name: &str, + desc: &str, hint: &str) -> OptGroup { + let len = short_name.len(); + assert len == 1 || len == 0; + return {short_name: str::from_slice(short_name), + long_name: str::from_slice(long_name), + hint: str::from_slice(hint), + desc: str::from_slice(desc), + hasarg: Yes, + occur: Req}; + } + + /// Create a long option that is optional and takes an argument + pub fn optopt(short_name: &str, long_name: &str, + desc: &str, hint: &str) -> OptGroup { + let len = short_name.len(); + assert len == 1 || len == 0; + return {short_name: str::from_slice(short_name), + long_name: str::from_slice(long_name), + hint: str::from_slice(hint), + desc: str::from_slice(desc), + hasarg: Yes, + occur: Optional}; + } + + /// Create a long option that is optional and does not take an argument + pub fn optflag(short_name: &str, long_name: &str, + desc: &str) -> OptGroup { + let len = short_name.len(); + assert len == 1 || len == 0; + return {short_name: str::from_slice(short_name), + long_name: str::from_slice(long_name), + hint: ~"", + desc: str::from_slice(desc), + hasarg: No, + occur: Optional}; + } + + /// Create a long option that is optional and takes an optional argument + pub fn optflagopt(short_name: &str, long_name: &str, + desc: &str, hint: &str) -> OptGroup { + let len = short_name.len(); + assert len == 1 || len == 0; + return {short_name: str::from_slice(short_name), + long_name: str::from_slice(long_name), + hint: str::from_slice(hint), + desc: str::from_slice(desc), + hasarg: Maybe, + occur: Optional}; + } + + /** + * Create a long option that is optional, takes an argument, and may occur + * multiple times + */ + pub fn optmulti(short_name: &str, long_name: &str, + desc: &str, hint: &str) -> OptGroup { + let len = short_name.len(); + assert len == 1 || len == 0; + return {short_name: str::from_slice(short_name), + long_name: str::from_slice(long_name), + hint: str::from_slice(hint), + desc: str::from_slice(desc), + hasarg: Yes, + occur: Multi}; + } + + // translate OptGroup into Opt + // (both short and long names correspond to different Opts) + pub fn long_to_short(lopt: &OptGroup) -> ~[Opt] { + match ((*lopt).short_name.len(), + (*lopt).long_name.len()) { + + (0,0) => fail ~"this long-format option was given no name", + + (0,_) => ~[{name: Long(((*lopt).long_name)), + hasarg: (*lopt).hasarg, + occur: (*lopt).occur}], + + (1,0) => ~[{name: Short(str::char_at((*lopt).short_name, 0)), + hasarg: (*lopt).hasarg, + occur: (*lopt).occur}], + + (1,_) => ~[{name: Short(str::char_at((*lopt).short_name, 0)), + hasarg: (*lopt).hasarg, + occur: (*lopt).occur}, + {name: Long(((*lopt).long_name)), + hasarg: (*lopt).hasarg, + occur: (*lopt).occur}], + + (_,_) => fail ~"something is wrong with the long-form opt" + } + } + + /* + * Parse command line args with the provided long format options + */ + pub fn getopts(args: &[~str], opts: &[OptGroup]) -> Result { + ::getopts::getopts(args, vec::flat_map(opts, long_to_short)) + } + + /** + * Derive a usage message from a set of long options + */ + pub fn usage(brief: &str, opts: &[OptGroup]) -> ~str { + + let desc_sep = ~"\n" + str::repeat(~" ", 24); + + let rows = vec::map(opts, |optref| { + let short_name = (*optref).short_name; + let long_name = (*optref).long_name; + let hint = (*optref).hint; + let desc = (*optref).desc; + let hasarg = (*optref).hasarg; + + let mut row = str::repeat(~" ", 4); + + // short option + row += match short_name.len() { + 0 => ~"", + 1 => ~"-" + short_name + " ", + _ => fail ~"the short name should only be 1 char long", + }; + + // long option + row += match long_name.len() { + 0 => ~"", + _ => ~"--" + long_name + " ", + }; + + // arg + row += match hasarg { + No => ~"", + Yes => hint, + Maybe => ~"[" + hint + ~"]", + }; + + // here we just need to indent the start of the description + let rowlen = row.len(); + row += if rowlen < 24 { + str::repeat(~" ", 24 - rowlen) + } else { + desc_sep + }; + + // wrapped description + row += str::connect(str::split_within(desc, 54), desc_sep); + + row + }); + + return str::from_slice(brief) + + ~"\n\nOptions:\n" + + str::connect(rows, ~"\n") + + ~"\n\n"; + } +} // end groups module + #[cfg(test)] mod tests { #[legacy_exports]; @@ -943,6 +1197,158 @@ mod tests { assert opts_present(matches, ~[~"L"]); assert opts_str(matches, ~[~"L"]) == ~"foo"; } + + #[test] + fn test_groups_reqopt() { + let opt = groups::reqopt(~"b", ~"banana", ~"some bananas", ~"VAL"); + assert opt == { short_name: ~"b", + long_name: ~"banana", + hint: ~"VAL", + desc: ~"some bananas", + hasarg: Yes, + occur: Req } + } + + #[test] + fn test_groups_optopt() { + let opt = groups::optopt(~"a", ~"apple", ~"some apples", ~"VAL"); + assert opt == { short_name: ~"a", + long_name: ~"apple", + hint: ~"VAL", + desc: ~"some apples", + hasarg: Yes, + occur: Optional } + } + + #[test] + fn test_groups_optflag() { + let opt = groups::optflag(~"k", ~"kiwi", ~"some kiwis"); + assert opt == { short_name: ~"k", + long_name: ~"kiwi", + hint: ~"", + desc: ~"some kiwis", + hasarg: No, + occur: Optional } + } + + #[test] + fn test_groups_optflagopt() { + let opt = groups::optflagopt(~"p", ~"pineapple", + ~"some pineapples", ~"VAL"); + assert opt == { short_name: ~"p", + long_name: ~"pineapple", + hint: ~"VAL", + desc: ~"some pineapples", + hasarg: Maybe, + occur: Optional } + } + + #[test] + fn test_groups_optmulti() { + let opt = groups::optmulti(~"l", ~"lime", + ~"some limes", ~"VAL"); + assert opt == { short_name: ~"l", + long_name: ~"lime", + hint: ~"VAL", + desc: ~"some limes", + hasarg: Yes, + occur: Multi } + } + + #[test] + fn test_groups_long_to_short() { + let short = ~[reqopt(~"b"), reqopt(~"banana")]; + let verbose = groups::reqopt(~"b", ~"banana", + ~"some bananas", ~"VAL"); + + assert groups::long_to_short(&verbose) == short; + } + + #[test] + fn test_groups_getopts() { + let short = ~[ + reqopt(~"b"), reqopt(~"banana"), + optopt(~"a"), optopt(~"apple"), + optflag(~"k"), optflagopt(~"kiwi"), + optflagopt(~"p"), + optmulti(~"l") + ]; + + let verbose = ~[ + groups::reqopt(~"b", ~"banana", ~"Desc", ~"VAL"), + groups::optopt(~"a", ~"apple", ~"Desc", ~"VAL"), + groups::optflag(~"k", ~"kiwi", ~"Desc"), + groups::optflagopt(~"p", ~"", ~"Desc", ~"VAL"), + groups::optmulti(~"l", ~"", ~"Desc", ~"VAL"), + ]; + + let sample_args = ~[~"-k", ~"15", ~"--apple", ~"1", ~"k", + ~"-p", ~"16", ~"l", ~"35"]; + + // NOTE: we should sort before comparing + assert getopts(sample_args, short) + == groups::getopts(sample_args, verbose); + } + + #[test] + fn test_groups_usage() { + let optgroups = ~[ + groups::reqopt(~"b", ~"banana", ~"Desc", ~"VAL"), + groups::optopt(~"a", ~"012345678901234567890123456789", + ~"Desc", ~"VAL"), + groups::optflag(~"k", ~"kiwi", ~"Desc"), + groups::optflagopt(~"p", ~"", ~"Desc", ~"VAL"), + groups::optmulti(~"l", ~"", ~"Desc", ~"VAL"), + ]; + + let expected = +~"Usage: fruits + +Options: + -b --banana VAL Desc + -a --012345678901234567890123456789 VAL + Desc + -k --kiwi Desc + -p [VAL] Desc + -l VAL Desc + +"; + + let generated_usage = groups::usage(~"Usage: fruits", optgroups); + + debug!("expected: <<%s>>", expected); + debug!("generated: <<%s>>", generated_usage); + assert generated_usage == expected; + } + + #[test] + fn test_groups_usage_description_wrapping() { + // indentation should be 24 spaces + // lines wrap after 78: or rather descriptions wrap after 54 + + let optgroups = ~[ + groups::optflag(~"k", ~"kiwi", + ~"This is a long description which won't be wrapped..+.."), // 54 + groups::optflag(~"a", ~"apple", + ~"This is a long description which _will_ be wrapped..+.."), // 55 + ]; + + let expected = +~"Usage: fruits + +Options: + -k --kiwi This is a long description which won't be wrapped..+.. + -a --apple This is a long description which _will_ be + wrapped..+.. + +"; + + let usage = groups::usage(~"Usage: fruits", optgroups); + + debug!("expected: <<%s>>", expected); + debug!("generated: <<%s>>", usage); + assert usage == expected + } } // Local Variables: diff --git a/src/libstd/json.rs b/src/libstd/json.rs index 0d3391c1867c2..5f64389e58329 100644 --- a/src/libstd/json.rs +++ b/src/libstd/json.rs @@ -273,6 +273,7 @@ pub impl PrettySerializer: serialization::Serializer { } } +#[cfg(stage0)] pub impl Json: serialization::Serializable { fn serialize(&self, s: &S) { match *self { @@ -296,6 +297,33 @@ pub impl Json: serialization::Serializable { } } +#[cfg(stage1)] +#[cfg(stage2)] +pub impl< + S: serialization::Serializer +> Json: serialization::Serializable { + fn serialize(&self, s: &S) { + match *self { + Number(v) => v.serialize(s), + String(ref v) => v.serialize(s), + Boolean(v) => v.serialize(s), + List(v) => v.serialize(s), + Object(ref v) => { + do s.emit_rec || { + let mut idx = 0; + for v.each |key, value| { + do s.emit_field(*key, idx) { + value.serialize(s); + } + idx += 1; + } + } + }, + Null => s.emit_nil(), + } + } +} + /// Serializes a json value into a io::writer pub fn to_writer(wr: io::Writer, json: &Json) { json.serialize(&Serializer(wr)) @@ -869,7 +897,7 @@ pub impl Deserializer: serialization::Deserializer { // FIXME(#3148) This hint should not be necessary. let obj: &self/~Object = obj; - match obj.find_ref(&name.to_unique()) { + match obj.find_ref(&name.to_owned()) { None => fail fmt!("no such field: %s", name), Some(json) => { self.stack.push(json); diff --git a/src/libstd/map.rs b/src/libstd/map.rs index 9f78f98fa316e..e49f1abd02b4d 100644 --- a/src/libstd/map.rs +++ b/src/libstd/map.rs @@ -723,7 +723,7 @@ mod tests { let map = map::HashMap::<~str, ~str>(); assert (option::is_none(&map.find(key))); map.insert(key, ~"val"); - assert (option::get(&map.find(key)) == ~"val"); + assert (option::get(map.find(key)) == ~"val"); } #[test] diff --git a/src/libstd/net_ip.rs b/src/libstd/net_ip.rs index cf5323c498b73..5d78fb19bab39 100644 --- a/src/libstd/net_ip.rs +++ b/src/libstd/net_ip.rs @@ -10,8 +10,10 @@ use addrinfo = uv::ll::addrinfo; use uv_getaddrinfo_t = uv::ll::uv_getaddrinfo_t; use uv_ip4_addr = uv::ll::ip4_addr; use uv_ip4_name = uv::ll::ip4_name; +use uv_ip4_port = uv::ll::ip4_port; use uv_ip6_addr = uv::ll::ip6_addr; use uv_ip6_name = uv::ll::ip6_name; +use uv_ip6_port = uv::ll::ip6_port; use uv_getaddrinfo = uv::ll::getaddrinfo; use uv_freeaddrinfo = uv::ll::freeaddrinfo; use create_uv_getaddrinfo_t = uv::ll::getaddrinfo_t; @@ -33,11 +35,11 @@ type ParseAddrErr = { }; /** - * Convert a `ip_addr` to a str + * Convert a `IpAddr` to a str * * # Arguments * - * * ip - a `std::net::ip::ip_addr` + * * ip - a `std::net::ip::IpAddr` */ pub fn format_addr(ip: &IpAddr) -> ~str { match *ip { @@ -58,6 +60,23 @@ pub fn format_addr(ip: &IpAddr) -> ~str { } } +/** + * Get the associated port + * + * # Arguments + * * ip - a `std::net::ip::IpAddr` + */ +pub fn get_port(ip: &IpAddr) -> uint { + match *ip { + Ipv4(ref addr) => unsafe { + uv_ip4_port(addr) + }, + Ipv6(ref addr) => unsafe { + uv_ip6_port(addr) + } + } +} + /// Represents errors returned from `net::ip::get_addr()` enum IpGetAddrErr { GetAddrUnknownError diff --git a/src/libstd/net_tcp.rs b/src/libstd/net_tcp.rs index db5c1328e62b2..942d52a3ad6b7 100644 --- a/src/libstd/net_tcp.rs +++ b/src/libstd/net_tcp.rs @@ -134,6 +134,10 @@ pub fn connect(input_ip: ip::IpAddr, port: uint, stream_handle_ptr: stream_handle_ptr, connect_req: uv::ll::connect_t(), write_req: uv::ll::write_t(), + ipv6: match input_ip { + ip::Ipv4(_) => { false } + ip::Ipv6(_) => { true } + }, iotask: iotask }; let socket_data_ptr = ptr::addr_of(&(*socket_data)); @@ -475,6 +479,7 @@ pub fn accept(new_conn: TcpNewConnection) stream_handle_ptr : stream_handle_ptr, connect_req : uv::ll::connect_t(), write_req : uv::ll::write_t(), + ipv6: (*server_data_ptr).ipv6, iotask : iotask }; let client_socket_data_ptr = ptr::addr_of(&(*client_socket_data)); @@ -590,6 +595,10 @@ fn listen_common(host_ip: ip::IpAddr, port: uint, backlog: uint, kill_ch: kill_ch, on_connect_cb: move on_connect_cb, iotask: iotask, + ipv6: match host_ip { + ip::Ipv4(_) => { false } + ip::Ipv6(_) => { true } + }, mut active: true }; let server_data_ptr = ptr::addr_of(&server_data); @@ -746,6 +755,21 @@ impl TcpSocket { -> future::Future> { write_future(&self, raw_write_data) } + pub fn get_peer_addr() -> ip::IpAddr { + unsafe { + if self.socket_data.ipv6 { + let addr = uv::ll::ip6_addr("", 0); + uv::ll::tcp_getpeername6(self.socket_data.stream_handle_ptr, + ptr::addr_of(&addr)); + ip::Ipv6(move addr) + } else { + let addr = uv::ll::ip4_addr("", 0); + uv::ll::tcp_getpeername(self.socket_data.stream_handle_ptr, + ptr::addr_of(&addr)); + ip::Ipv4(move addr) + } + } + } } /// Implementation of `io::reader` trait for a buffered `net::tcp::tcp_socket` @@ -1003,6 +1027,7 @@ type TcpListenFcData = { kill_ch: comm::Chan>, on_connect_cb: fn~(*uv::ll::uv_tcp_t), iotask: IoTask, + ipv6: bool, mut active: bool }; @@ -1201,6 +1226,7 @@ type TcpSocketData = { stream_handle_ptr: *uv::ll::uv_tcp_t, connect_req: uv::ll::uv_connect_t, write_req: uv::ll::uv_write_t, + ipv6: bool, iotask: IoTask }; @@ -1223,6 +1249,10 @@ mod test { impl_gl_tcp_ipv4_server_and_client(); } #[test] + fn test_gl_tcp_get_peer_addr() unsafe { + impl_gl_tcp_ipv4_get_peer_addr(); + } + #[test] fn test_gl_tcp_ipv4_client_error_connection_refused() unsafe { impl_gl_tcp_ipv4_client_error_connection_refused(); } @@ -1249,6 +1279,11 @@ mod test { } #[test] #[ignore(cfg(target_os = "linux"))] + fn test_gl_tcp_get_peer_addr() unsafe { + impl_gl_tcp_ipv4_get_peer_addr(); + } + #[test] + #[ignore(cfg(target_os = "linux"))] fn test_gl_tcp_ipv4_client_error_connection_refused() unsafe { impl_gl_tcp_ipv4_client_error_connection_refused(); } @@ -1316,6 +1351,53 @@ mod test { assert str::contains(actual_req, expected_req); assert str::contains(actual_resp, expected_resp); } + fn impl_gl_tcp_ipv4_get_peer_addr() { + let hl_loop = uv::global_loop::get(); + let server_ip = ~"127.0.0.1"; + let server_port = 8887u; + let expected_resp = ~"pong"; + + let server_result_po = core::comm::Port::<~str>(); + let server_result_ch = core::comm::Chan(&server_result_po); + + let cont_po = core::comm::Port::<()>(); + let cont_ch = core::comm::Chan(&cont_po); + // server + do task::spawn_sched(task::ManualThreads(1u)) { + let actual_req = do comm::listen |server_ch| { + run_tcp_test_server( + server_ip, + server_port, + expected_resp, + server_ch, + cont_ch, + hl_loop) + }; + server_result_ch.send(actual_req); + }; + core::comm::recv(cont_po); + // client + log(debug, ~"server started, firing up client.."); + do core::comm::listen |client_ch| { + let server_ip_addr = ip::v4::parse_addr(server_ip); + let iotask = uv::global_loop::get(); + let connect_result = connect(move server_ip_addr, server_port, + iotask); + + let sock = result::unwrap(move connect_result); + + // This is what we are actually testing! + assert net::ip::format_addr(&sock.get_peer_addr()) == + ~"127.0.0.1"; + assert net::ip::get_port(&sock.get_peer_addr()) == 8887; + + // Fulfill the protocol the test server expects + let resp_bytes = str::to_bytes(~"ping"); + tcp_write_single(&sock, resp_bytes); + let read_result = sock.read(0u); + client_ch.send(str::from_bytes(read_result.get())); + }; + } fn impl_gl_tcp_ipv4_client_error_connection_refused() { let hl_loop = uv::global_loop::get(); let server_ip = ~"127.0.0.1"; @@ -1511,8 +1593,11 @@ mod test { ~"SERVER/WORKER: send on cont ch"); cont_ch.send(()); let sock = result::unwrap(move accept_result); + let peer_addr = sock.get_peer_addr(); log(debug, ~"SERVER: successfully accepted"+ - ~"connection!"); + fmt!(" connection from %s:%u", + ip::format_addr(&peer_addr), + ip::get_port(&peer_addr))); let received_req_bytes = read(&sock, 0u); match move received_req_bytes { result::Ok(move data) => { diff --git a/src/libstd/net_url.rs b/src/libstd/net_url.rs index 109e71a3eaa70..8ea9513d15518 100644 --- a/src/libstd/net_url.rs +++ b/src/libstd/net_url.rs @@ -95,7 +95,7 @@ pub fn encode(s: &str) -> ~str { * This function is compliant with RFC 3986. */ -fn encode_component(s: &str) -> ~str { +pub fn encode_component(s: &str) -> ~str { encode_inner(s, false) } diff --git a/src/libstd/serialization.rs b/src/libstd/serialization.rs index b7cf09cc6aa50..9df2a326a8474 100644 --- a/src/libstd/serialization.rs +++ b/src/libstd/serialization.rs @@ -92,6 +92,8 @@ pub trait Deserializer { fn read_tup_elt(&self, idx: uint, f: fn() -> T) -> T; } +#[cfg(stage0)] +pub mod traits { pub trait Serializable { fn serialize(&self, s: &S); } @@ -561,3 +563,488 @@ pub impl D: DeserializerHelpers { } } } +} + +#[cfg(stage1)] +#[cfg(stage2)] +pub mod traits { +pub trait Serializable { + fn serialize(&self, s: &S); +} + +pub trait Deserializable { + static fn deserialize(&self, d: &D) -> self; +} + +pub impl uint: Serializable { + fn serialize(&self, s: &S) { s.emit_uint(*self) } +} + +pub impl uint: Deserializable { + static fn deserialize(&self, d: &D) -> uint { + d.read_uint() + } +} + +pub impl u8: Serializable { + fn serialize(&self, s: &S) { s.emit_u8(*self) } +} + +pub impl u8: Deserializable { + static fn deserialize(&self, d: &D) -> u8 { + d.read_u8() + } +} + +pub impl u16: Serializable { + fn serialize(&self, s: &S) { s.emit_u16(*self) } +} + +pub impl u16: Deserializable { + static fn deserialize(&self, d: &D) -> u16 { + d.read_u16() + } +} + +pub impl u32: Serializable { + fn serialize(&self, s: &S) { s.emit_u32(*self) } +} + +pub impl u32: Deserializable { + static fn deserialize(&self, d: &D) -> u32 { + d.read_u32() + } +} + +pub impl u64: Serializable { + fn serialize(&self, s: &S) { s.emit_u64(*self) } +} + +pub impl u64: Deserializable { + static fn deserialize(&self, d: &D) -> u64 { + d.read_u64() + } +} + +pub impl int: Serializable { + fn serialize(&self, s: &S) { s.emit_int(*self) } +} + +pub impl int: Deserializable { + static fn deserialize(&self, d: &D) -> int { + d.read_int() + } +} + +pub impl i8: Serializable { + fn serialize(&self, s: &S) { s.emit_i8(*self) } +} + +pub impl i8: Deserializable { + static fn deserialize(&self, d: &D) -> i8 { + d.read_i8() + } +} + +pub impl i16: Serializable { + fn serialize(&self, s: &S) { s.emit_i16(*self) } +} + +pub impl i16: Deserializable { + static fn deserialize(&self, d: &D) -> i16 { + d.read_i16() + } +} + +pub impl i32: Serializable { + fn serialize(&self, s: &S) { s.emit_i32(*self) } +} + +pub impl i32: Deserializable { + static fn deserialize(&self, d: &D) -> i32 { + d.read_i32() + } +} + +pub impl i64: Serializable { + fn serialize(&self, s: &S) { s.emit_i64(*self) } +} + +pub impl i64: Deserializable { + static fn deserialize(&self, d: &D) -> i64 { + d.read_i64() + } +} + +pub impl &str: Serializable { + fn serialize(&self, s: &S) { s.emit_borrowed_str(*self) } +} + +pub impl ~str: Serializable { + fn serialize(&self, s: &S) { s.emit_owned_str(*self) } +} + +pub impl ~str: Deserializable { + static fn deserialize(&self, d: &D) -> ~str { + d.read_owned_str() + } +} + +pub impl @str: Serializable { + fn serialize(&self, s: &S) { s.emit_managed_str(*self) } +} + +pub impl @str: Deserializable { + static fn deserialize(&self, d: &D) -> @str { + d.read_managed_str() + } +} + +pub impl float: Serializable { + fn serialize(&self, s: &S) { s.emit_float(*self) } +} + +pub impl float: Deserializable { + static fn deserialize(&self, d: &D) -> float { + d.read_float() + } +} + +pub impl f32: Serializable { + fn serialize(&self, s: &S) { s.emit_f32(*self) } +} + +pub impl f32: Deserializable { + static fn deserialize(&self, d: &D) -> f32 { + d.read_f32() } +} + +pub impl f64: Serializable { + fn serialize(&self, s: &S) { s.emit_f64(*self) } +} + +pub impl f64: Deserializable { + static fn deserialize(&self, d: &D) -> f64 { + d.read_f64() + } +} + +pub impl bool: Serializable { + fn serialize(&self, s: &S) { s.emit_bool(*self) } +} + +pub impl bool: Deserializable { + static fn deserialize(&self, d: &D) -> bool { + d.read_bool() + } +} + +pub impl (): Serializable { + fn serialize(&self, s: &S) { s.emit_nil() } +} + +pub impl (): Deserializable { + static fn deserialize(&self, d: &D) -> () { + d.read_nil() + } +} + +pub impl> &T: Serializable { + fn serialize(&self, s: &S) { + s.emit_borrowed(|| (**self).serialize(s)) + } +} + +pub impl> ~T: Serializable { + fn serialize(&self, s: &S) { + s.emit_owned(|| (**self).serialize(s)) + } +} + +pub impl> ~T: Deserializable { + static fn deserialize(&self, d: &D) -> ~T { + d.read_owned(|| ~deserialize(d)) + } +} + +pub impl> @T: Serializable { + fn serialize(&self, s: &S) { + s.emit_managed(|| (**self).serialize(s)) + } +} + +pub impl> @T: Deserializable { + static fn deserialize(&self, d: &D) -> @T { + d.read_managed(|| @deserialize(d)) + } +} + +pub impl> &[T]: Serializable { + fn serialize(&self, s: &S) { + do s.emit_borrowed_vec(self.len()) { + for self.eachi |i, e| { + s.emit_vec_elt(i, || e.serialize(s)) + } + } + } +} + +pub impl> ~[T]: Serializable { + fn serialize(&self, s: &S) { + do s.emit_owned_vec(self.len()) { + for self.eachi |i, e| { + s.emit_vec_elt(i, || e.serialize(s)) + } + } + } +} + +pub impl> ~[T]: Deserializable { + static fn deserialize(&self, d: &D) -> ~[T] { + do d.read_owned_vec |len| { + do vec::from_fn(len) |i| { + d.read_vec_elt(i, || deserialize(d)) + } + } + } +} + +pub impl> @[T]: Serializable { + fn serialize(&self, s: &S) { + do s.emit_managed_vec(self.len()) { + for self.eachi |i, e| { + s.emit_vec_elt(i, || e.serialize(s)) + } + } + } +} + +pub impl> @[T]: Deserializable { + static fn deserialize(&self, d: &D) -> @[T] { + do d.read_managed_vec |len| { + do at_vec::from_fn(len) |i| { + d.read_vec_elt(i, || deserialize(d)) + } + } + } +} + +pub impl> Option: Serializable { + fn serialize(&self, s: &S) { + do s.emit_enum(~"option") { + match *self { + None => do s.emit_enum_variant(~"none", 0u, 0u) { + }, + + Some(ref v) => do s.emit_enum_variant(~"some", 1u, 1u) { + s.emit_enum_variant_arg(0u, || v.serialize(s)) + } + } + } + } +} + +pub impl> Option: Deserializable { + static fn deserialize(&self, d: &D) -> Option { + do d.read_enum(~"option") { + do d.read_enum_variant |i| { + match i { + 0 => None, + 1 => Some(d.read_enum_variant_arg(0u, || deserialize(d))), + _ => fail(#fmt("Bad variant for option: %u", i)) + } + } + } + } +} + +pub impl< + S: Serializer, + T0: Serializable, + T1: Serializable +> (T0, T1): Serializable { + fn serialize(&self, s: &S) { + match *self { + (ref t0, ref t1) => { + do s.emit_tup(2) { + s.emit_tup_elt(0, || t0.serialize(s)); + s.emit_tup_elt(1, || t1.serialize(s)); + } + } + } + } +} + +pub impl< + D: Deserializer, + T0: Deserializable, + T1: Deserializable +> (T0, T1): Deserializable { + static fn deserialize(&self, d: &D) -> (T0, T1) { + do d.read_tup(2) { + ( + d.read_tup_elt(0, || deserialize(d)), + d.read_tup_elt(1, || deserialize(d)) + ) + } + } +} + +pub impl< + S: Serializer, + T0: Serializable, + T1: Serializable, + T2: Serializable +> (T0, T1, T2): Serializable { + fn serialize(&self, s: &S) { + match *self { + (ref t0, ref t1, ref t2) => { + do s.emit_tup(3) { + s.emit_tup_elt(0, || t0.serialize(s)); + s.emit_tup_elt(1, || t1.serialize(s)); + s.emit_tup_elt(2, || t2.serialize(s)); + } + } + } + } +} + +pub impl< + D: Deserializer, + T0: Deserializable, + T1: Deserializable, + T2: Deserializable +> (T0, T1, T2): Deserializable { + static fn deserialize(&self, d: &D) -> (T0, T1, T2) { + do d.read_tup(3) { + ( + d.read_tup_elt(0, || deserialize(d)), + d.read_tup_elt(1, || deserialize(d)), + d.read_tup_elt(2, || deserialize(d)) + ) + } + } +} + +pub impl< + S: Serializer, + T0: Serializable, + T1: Serializable, + T2: Serializable, + T3: Serializable +> (T0, T1, T2, T3): Serializable { + fn serialize(&self, s: &S) { + match *self { + (ref t0, ref t1, ref t2, ref t3) => { + do s.emit_tup(4) { + s.emit_tup_elt(0, || t0.serialize(s)); + s.emit_tup_elt(1, || t1.serialize(s)); + s.emit_tup_elt(2, || t2.serialize(s)); + s.emit_tup_elt(3, || t3.serialize(s)); + } + } + } + } +} + +pub impl< + D: Deserializer, + T0: Deserializable, + T1: Deserializable, + T2: Deserializable, + T3: Deserializable +> (T0, T1, T2, T3): Deserializable { + static fn deserialize(&self, d: &D) -> (T0, T1, T2, T3) { + do d.read_tup(4) { + ( + d.read_tup_elt(0, || deserialize(d)), + d.read_tup_elt(1, || deserialize(d)), + d.read_tup_elt(2, || deserialize(d)), + d.read_tup_elt(3, || deserialize(d)) + ) + } + } +} + +pub impl< + S: Serializer, + T0: Serializable, + T1: Serializable, + T2: Serializable, + T3: Serializable, + T4: Serializable +> (T0, T1, T2, T3, T4): Serializable { + fn serialize(&self, s: &S) { + match *self { + (ref t0, ref t1, ref t2, ref t3, ref t4) => { + do s.emit_tup(5) { + s.emit_tup_elt(0, || t0.serialize(s)); + s.emit_tup_elt(1, || t1.serialize(s)); + s.emit_tup_elt(2, || t2.serialize(s)); + s.emit_tup_elt(3, || t3.serialize(s)); + s.emit_tup_elt(4, || t4.serialize(s)); + } + } + } + } +} + +pub impl< + D: Deserializer, + T0: Deserializable, + T1: Deserializable, + T2: Deserializable, + T3: Deserializable, + T4: Deserializable +> (T0, T1, T2, T3, T4): Deserializable { + static fn deserialize(&self, d: &D) + -> (T0, T1, T2, T3, T4) { + do d.read_tup(5) { + ( + d.read_tup_elt(0, || deserialize(d)), + d.read_tup_elt(1, || deserialize(d)), + d.read_tup_elt(2, || deserialize(d)), + d.read_tup_elt(3, || deserialize(d)), + d.read_tup_elt(4, || deserialize(d)) + ) + } + } +} + +// ___________________________________________________________________________ +// Helper routines +// +// In some cases, these should eventually be coded as traits. + +pub trait SerializerHelpers { + fn emit_from_vec(&self, v: ~[T], f: fn(v: &T)); +} + +pub impl S: SerializerHelpers { + fn emit_from_vec(&self, v: ~[T], f: fn(v: &T)) { + do self.emit_owned_vec(v.len()) { + for v.eachi |i, e| { + do self.emit_vec_elt(i) { + f(e) + } + } + } + } +} + +pub trait DeserializerHelpers { + fn read_to_vec(&self, f: fn() -> T) -> ~[T]; +} + +pub impl D: DeserializerHelpers { + fn read_to_vec(&self, f: fn() -> T) -> ~[T] { + do self.read_owned_vec |len| { + do vec::from_fn(len) |i| { + self.read_vec_elt(i, || f()) + } + } + } +} +} + +pub use traits::*; diff --git a/src/libstd/treemap.rs b/src/libstd/treemap.rs index 8ab0dc7f2e7da..e4b6c9b5b9a95 100644 --- a/src/libstd/treemap.rs +++ b/src/libstd/treemap.rs @@ -11,28 +11,28 @@ use core::cmp::{Eq, Ord}; use core::option::{Some, None}; use Option = core::Option; -pub type TreeMap = @mut TreeEdge; +pub type TreeMap = @mut TreeEdge; -type TreeEdge = Option<@TreeNode>; +type TreeEdge = Option<@TreeNode>; -enum TreeNode = { +struct TreeNode { key: K, mut value: V, mut left: TreeEdge, mut right: TreeEdge -}; +} /// Create a treemap -pub fn TreeMap() -> TreeMap { @mut None } +pub fn TreeMap() -> TreeMap { @mut None } /// Insert a value into the map pub fn insert(m: &mut TreeEdge, k: K, v: V) { match copy *m { None => { - *m = Some(@TreeNode({key: k, - mut value: v, - mut left: None, - mut right: None})); + *m = Some(@TreeNode {key: k, + mut value: v, + mut left: None, + mut right: None}); return; } Some(node) => { @@ -67,7 +67,8 @@ pub fn find(m: &const TreeEdge, k: K) } /// Visit all pairs in the map in order. -pub fn traverse(m: &const TreeEdge, f: fn((&K), (&V))) { +pub fn traverse(m: &const TreeEdge, + f: fn((&K), (&V))) { match copy *m { None => (), Some(node) => { @@ -79,6 +80,19 @@ pub fn traverse(m: &const TreeEdge, f: fn((&K), (&V))) { } } +/// Compare two treemaps and return true iff +/// they contain same keys and values +pub fn equals(t1: &const TreeEdge, + t2: &const TreeEdge) + -> bool { + let mut v1 = ~[]; + let mut v2 = ~[]; + traverse(t1, |k,v| { v1.push((copy *k, copy *v)) }); + traverse(t2, |k,v| { v2.push((copy *k, copy *v)) }); + return v1 == v2; +} + + #[cfg(test)] mod tests { #[legacy_exports]; @@ -127,6 +141,28 @@ mod tests { traverse(m, |x,y| t(n, *x, *y)); } + #[test] + fn equality() { + let m1 = TreeMap(); + insert(m1, 3, ()); + insert(m1, 0, ()); + insert(m1, 4, ()); + insert(m1, 2, ()); + insert(m1, 1, ()); + let m2 = TreeMap(); + insert(m2, 2, ()); + insert(m2, 1, ()); + insert(m2, 3, ()); + insert(m2, 0, ()); + insert(m2, 4, ()); + + assert equals(m1, m2); + + let m3 = TreeMap(); + assert !equals(m1,m3); + + } + #[test] fn u8_map() { let m = TreeMap(); diff --git a/src/libstd/uv_ll.rs b/src/libstd/uv_ll.rs index b4b04a1a714f3..8bf4e9ed3afc0 100644 --- a/src/libstd/uv_ll.rs +++ b/src/libstd/uv_ll.rs @@ -590,6 +590,8 @@ extern mod rustrt { -> libc::c_int; fn rust_uv_ip6_name(src: *sockaddr_in6, dst: *u8, size: libc::size_t) -> libc::c_int; + fn rust_uv_ip4_port(src: *sockaddr_in) -> libc::c_uint; + fn rust_uv_ip6_port(src: *sockaddr_in6) -> libc::c_uint; // FIXME ref #2064 fn rust_uv_tcp_connect(connect_ptr: *uv_connect_t, tcp_handle_ptr: *uv_tcp_t, @@ -606,6 +608,10 @@ extern mod rustrt { // FIXME ref #2064 fn rust_uv_tcp_bind6(tcp_server: *uv_tcp_t, ++addr: *sockaddr_in6) -> libc::c_int; + fn rust_uv_tcp_getpeername(tcp_handle_ptr: *uv_tcp_t, + ++name: *sockaddr_in) -> libc::c_int; + fn rust_uv_tcp_getpeername6(tcp_handle_ptr: *uv_tcp_t, + ++name: *sockaddr_in6) ->libc::c_int; fn rust_uv_listen(stream: *libc::c_void, backlog: libc::c_int, cb: *u8) -> libc::c_int; fn rust_uv_accept(server: *libc::c_void, client: *libc::c_void) @@ -736,6 +742,16 @@ pub unsafe fn tcp_bind6(tcp_server_ptr: *uv_tcp_t, addr_ptr); } +pub unsafe fn tcp_getpeername(tcp_handle_ptr: *uv_tcp_t, + name: *sockaddr_in) -> libc::c_int { + return rustrt::rust_uv_tcp_getpeername(tcp_handle_ptr, name); +} + +pub unsafe fn tcp_getpeername6(tcp_handle_ptr: *uv_tcp_t, + name: *sockaddr_in6) ->libc::c_int { + return rustrt::rust_uv_tcp_getpeername6(tcp_handle_ptr, name); +} + pub unsafe fn listen(stream: *T, backlog: libc::c_int, cb: *u8) -> libc::c_int { return rustrt::rust_uv_listen(stream as *libc::c_void, backlog, cb); @@ -857,6 +873,12 @@ pub unsafe fn ip6_name(src: &sockaddr_in6) -> ~str { } } } +pub unsafe fn ip4_port(src: &sockaddr_in) -> uint { + rustrt::rust_uv_ip4_port(to_unsafe_ptr(src)) as uint +} +pub unsafe fn ip6_port(src: &sockaddr_in6) -> uint { + rustrt::rust_uv_ip6_port(to_unsafe_ptr(src)) as uint +} pub unsafe fn timer_init(loop_ptr: *libc::c_void, timer_ptr: *uv_timer_t) -> libc::c_int { @@ -1462,7 +1484,7 @@ pub mod test { fn impl_uv_tcp_server_and_request() unsafe { let bind_ip = ~"0.0.0.0"; let request_ip = ~"127.0.0.1"; - let port = 8887; + let port = 8886; let kill_server_msg = ~"does a dog have buddha nature?"; let server_resp_msg = ~"mu!"; let client_port = core::comm::Port::<~str>(); diff --git a/src/libsyntax/ast.rs b/src/libsyntax/ast.rs index 3740557b7f8fe..9c49c1638bff9 100644 --- a/src/libsyntax/ast.rs +++ b/src/libsyntax/ast.rs @@ -7,17 +7,6 @@ use std::serialization::{Serializable, use codemap::{span, filename}; use parse::token; -impl span: Serializable { - /* Note #1972 -- spans are serialized but not deserialized */ - fn serialize(&self, _s: &S) { } -} - -impl span: Deserializable { - static fn deserialize(_d: &D) -> span { - ast_util::dummy_sp() - } -} - #[auto_serialize] #[auto_deserialize] type spanned = {node: T, span: span}; @@ -34,6 +23,7 @@ macro_rules! interner_key ( // implemented. struct ident { repr: uint } +#[cfg(stage0)] impl ident: Serializable { fn serialize(&self, s: &S) { let intr = match unsafe { @@ -47,6 +37,7 @@ impl ident: Serializable { } } +#[cfg(stage0)] impl ident: Deserializable { static fn deserialize(d: &D) -> ident { let intr = match unsafe { @@ -60,6 +51,36 @@ impl ident: Deserializable { } } +#[cfg(stage1)] +#[cfg(stage2)] +impl ident: Serializable { + fn serialize(&self, s: &S) { + let intr = match unsafe { + task::local_data::local_data_get(interner_key!()) + } { + None => fail ~"serialization: TLS interner not set up", + Some(intr) => intr + }; + + s.emit_owned_str(*(*intr).get(*self)); + } +} + +#[cfg(stage1)] +#[cfg(stage2)] +impl ident: Deserializable { + static fn deserialize(d: &D) -> ident { + let intr = match unsafe { + task::local_data::local_data_get(interner_key!()) + } { + None => fail ~"deserialization: TLS interner not set up", + Some(intr) => intr + }; + + (*intr).intern(@d.read_owned_str()) + } +} + impl ident: cmp::Eq { pure fn eq(other: &ident) -> bool { self.repr == other.repr } pure fn ne(other: &ident) -> bool { !self.eq(other) } @@ -80,7 +101,7 @@ type path = {span: span, global: bool, idents: ~[ident], rp: Option<@region>, - types: ~[@ty]}; + types: ~[@Ty]}; type crate_num = int; @@ -107,7 +128,7 @@ enum ty_param_bound { bound_send, bound_const, bound_owned, - bound_trait(@ty), + bound_trait(@Ty), } #[auto_serialize] @@ -119,7 +140,7 @@ type ty_param = {ident: ident, id: node_id, bounds: @~[ty_param_bound]}; enum def { def_fn(def_id, purity), def_static_method(/* method */ def_id, - /* trait */ def_id, + /* trait */ Option, purity), def_self(node_id), def_mod(def_id), @@ -441,7 +462,7 @@ impl proto : cmp::Eq { #[auto_deserialize] enum vstore { // FIXME (#3469): Change uint to @expr (actually only constant exprs) - vstore_fixed(Option), // [1,2,3,4]/_ or 4 + vstore_fixed(Option), // [1,2,3,4] vstore_uniq, // ~[1,2,3,4] vstore_box, // @[1,2,3,4] vstore_slice(@region) // &[1,2,3,4](foo)? @@ -451,7 +472,7 @@ enum vstore { #[auto_deserialize] enum expr_vstore { // FIXME (#3469): Change uint to @expr (actually only constant exprs) - expr_vstore_fixed(Option), // [1,2,3,4]/_ or 4 + expr_vstore_fixed(Option), // [1,2,3,4] expr_vstore_uniq, // ~[1,2,3,4] expr_vstore_box, // @[1,2,3,4] expr_vstore_slice // &[1,2,3,4] @@ -653,7 +674,7 @@ type initializer = {op: init_op, expr: @expr}; // a refinement on pat. #[auto_serialize] #[auto_deserialize] -type local_ = {is_mutbl: bool, ty: @ty, pat: @pat, +type local_ = {is_mutbl: bool, ty: @Ty, pat: @pat, init: Option, id: node_id}; type local = spanned; @@ -700,10 +721,6 @@ type expr = {id: node_id, callee_id: node_id, node: expr_, span: span}; enum log_level { error, debug, other } // 0 = error, 1 = debug, 2 = other -#[auto_serialize] -#[auto_deserialize] -enum alt_mode { alt_check, alt_exhaustive, } - #[auto_serialize] #[auto_deserialize] enum expr_ { @@ -715,7 +732,7 @@ enum expr_ { expr_binary(binop, @expr, @expr), expr_unary(unop, @expr), expr_lit(@lit), - expr_cast(@expr, @ty), + expr_cast(@expr, @Ty), expr_if(@expr, blk, Option<@expr>), expr_while(@expr, blk), /* Conditionless loop (can be exited with break, cont, ret, or fail) @@ -739,7 +756,7 @@ enum expr_ { expr_assign(@expr, @expr), expr_swap(@expr, @expr), expr_assign_op(binop, @expr, @expr), - expr_field(@expr, ident, ~[@ty]), + expr_field(@expr, ident, ~[@Ty]), expr_index(@expr, @expr), expr_path(@path), expr_addr_of(mutability, @expr), @@ -794,10 +811,10 @@ type capture_clause = @~[capture_item]; #[auto_deserialize] #[doc="For macro invocations; parsing is delegated to the macro"] enum token_tree { - tt_tok(span, token::token), + tt_tok(span, token::Token), tt_delim(~[token_tree]), // These only make sense for right-hand-sides of MBE macros - tt_seq(span, ~[token_tree], Option, bool), + tt_seq(span, ~[token_tree], Option, bool), tt_nonterminal(span, ident) } @@ -859,10 +876,10 @@ type matcher = spanned; #[auto_deserialize] enum matcher_ { // match one token - match_tok(token::token), + match_tok(token::Token), // match repetitions of a sequence: body, separator, zero ok?, // lo, hi position-in-match-array used: - match_seq(~[matcher], Option, bool, uint, uint), + match_seq(~[matcher], Option, bool, uint, uint), // parse a Rust NT: name to bind, name of NT, position in match array: match_nonterminal(ident, ident, uint) } @@ -935,7 +952,7 @@ impl ast::lit_: cmp::Eq { // type structure in middle/ty.rs as well. #[auto_serialize] #[auto_deserialize] -type mt = {ty: @ty, mutbl: mutability}; +type mt = {ty: @Ty, mutbl: mutability}; #[auto_serialize] #[auto_deserialize] @@ -1038,7 +1055,7 @@ impl float_ty : cmp::Eq { #[auto_serialize] #[auto_deserialize] -type ty = {id: node_id, node: ty_, span: span}; +type Ty = {id: node_id, node: ty_, span: span}; // Not represented directly in the AST, referred to by name through a ty_path. #[auto_serialize] @@ -1114,9 +1131,9 @@ enum ty_ { ty_rptr(@region, mt), ty_rec(~[ty_field]), ty_fn(proto, purity, @~[ty_param_bound], fn_decl), - ty_tup(~[@ty]), + ty_tup(~[@Ty]), ty_path(@path, node_id), - ty_fixed_length(@ty, Option), + ty_fixed_length(@Ty, Option), ty_mac(mac), // ty_infer means the type should be inferred instead of it having been // specified. This should only appear at the "top level" of a type and not @@ -1126,16 +1143,16 @@ enum ty_ { // Equality and byte-iter (hashing) can be quite approximate for AST types. // since we only care about this for normalizing them to "real" types. -impl ty : cmp::Eq { - pure fn eq(other: &ty) -> bool { +impl Ty : cmp::Eq { + pure fn eq(other: &Ty) -> bool { ptr::addr_of(&self) == ptr::addr_of(&(*other)) } - pure fn ne(other: &ty) -> bool { + pure fn ne(other: &Ty) -> bool { ptr::addr_of(&self) != ptr::addr_of(&(*other)) } } -impl ty : to_bytes::IterBytes { +impl Ty : to_bytes::IterBytes { pure fn iter_bytes(+lsb0: bool, f: to_bytes::Cb) { to_bytes::iter_bytes_2(&self.span.lo, &self.span.hi, lsb0, f); } @@ -1144,13 +1161,13 @@ impl ty : to_bytes::IterBytes { #[auto_serialize] #[auto_deserialize] -type arg = {mode: mode, ty: @ty, ident: ident, id: node_id}; +type arg = {mode: mode, ty: @Ty, ident: ident, id: node_id}; #[auto_serialize] #[auto_deserialize] type fn_decl = {inputs: ~[arg], - output: @ty, + output: @Ty, cf: ret_style}; #[auto_serialize] @@ -1313,7 +1330,7 @@ type foreign_mod = #[auto_serialize] #[auto_deserialize] -type variant_arg = {ty: @ty, id: node_id}; +type variant_arg = {ty: @Ty, id: node_id}; #[auto_serialize] #[auto_deserialize] @@ -1446,7 +1463,7 @@ impl visibility : cmp::Eq { type struct_field_ = { kind: struct_field_kind, id: node_id, - ty: @ty + ty: @Ty }; type struct_field = spanned; @@ -1482,17 +1499,17 @@ type item = {ident: ident, attrs: ~[attribute], #[auto_serialize] #[auto_deserialize] enum item_ { - item_const(@ty, @expr), + item_const(@Ty, @expr), item_fn(fn_decl, purity, ~[ty_param], blk), item_mod(_mod), item_foreign_mod(foreign_mod), - item_ty(@ty, ~[ty_param]), + item_ty(@Ty, ~[ty_param]), item_enum(enum_def, ~[ty_param]), item_class(@struct_def, ~[ty_param]), item_trait(~[ty_param], ~[@trait_ref], ~[trait_method]), item_impl(~[ty_param], Option<@trait_ref>, /* (optional) trait this impl implements */ - @ty, /* self */ + @Ty, /* self */ ~[@method]), item_mac(mac), } @@ -1552,7 +1569,7 @@ type foreign_item = #[auto_deserialize] enum foreign_item_ { foreign_item_fn(fn_decl, purity, ~[ty_param]), - foreign_item_const(@ty) + foreign_item_const(@Ty) } // The data we save and restore about an inlined item or method. This is not diff --git a/src/libsyntax/ast_util.rs b/src/libsyntax/ast_util.rs index 4c18b6b8ecac9..6fd84c3317f72 100644 --- a/src/libsyntax/ast_util.rs +++ b/src/libsyntax/ast_util.rs @@ -471,7 +471,7 @@ fn id_visitor(vfn: fn@(node_id)) -> visit::vt<()> { visit_expr_post: fn@(_e: @expr) { }, - visit_ty: fn@(t: @ty) { + visit_ty: fn@(t: @Ty) { match t.node { ty_path(_, id) => vfn(id), _ => { /* fall through */ } diff --git a/src/libsyntax/attr.rs b/src/libsyntax/attr.rs index d08edd7af1d07..4bd1679600ffb 100644 --- a/src/libsyntax/attr.rs +++ b/src/libsyntax/attr.rs @@ -90,9 +90,7 @@ fn attr_meta(attr: ast::attribute) -> @ast::meta_item { @attr.node.value } // Get the meta_items from inside a vector of attributes fn attr_metas(attrs: ~[ast::attribute]) -> ~[@ast::meta_item] { - let mut mitems = ~[]; - for attrs.each |a| { mitems.push(attr_meta(*a)); } - return mitems; + do attrs.map |a| { attr_meta(*a) } } fn desugar_doc_attr(attr: &ast::attribute) -> ast::attribute { diff --git a/src/libsyntax/codemap.rs b/src/libsyntax/codemap.rs index e07985119ec49..69a80d0bac13a 100644 --- a/src/libsyntax/codemap.rs +++ b/src/libsyntax/codemap.rs @@ -1,4 +1,8 @@ use dvec::DVec; +use std::serialization::{Serializable, + Deserializable, + Serializer, + Deserializer}; export filename; export filemap; @@ -7,7 +11,7 @@ export file_substr; export fss_none; export fss_internal; export fss_external; -export codemap; +export CodeMap; export expn_info; export expn_info_; export expanded_from; @@ -55,11 +59,11 @@ type filemap = @{name: filename, substr: file_substr, src: @~str, start_pos: file_pos, mut lines: ~[file_pos]}; -type codemap = @{files: DVec}; +type CodeMap = @{files: DVec}; type loc = {file: filemap, line: uint, col: uint}; -fn new_codemap() -> codemap { @{files: DVec()} } +fn new_codemap() -> CodeMap { @{files: DVec()} } fn new_filemap_w_substr(+filename: filename, +substr: file_substr, src: @~str, @@ -77,7 +81,7 @@ fn new_filemap(+filename: filename, src: @~str, start_pos_ch, start_pos_byte); } -fn mk_substr_filename(cm: codemap, sp: span) -> ~str +fn mk_substr_filename(cm: CodeMap, sp: span) -> ~str { let pos = lookup_char_pos(cm, sp.lo); return fmt!("<%s:%u:%u>", pos.file.name, pos.line, pos.col); @@ -89,7 +93,7 @@ fn next_line(file: filemap, chpos: uint, byte_pos: uint) { type lookup_fn = pure fn(file_pos) -> uint; -fn lookup_line(map: codemap, pos: uint, lookup: lookup_fn) +fn lookup_line(map: CodeMap, pos: uint, lookup: lookup_fn) -> {fm: filemap, line: uint} { let len = map.files.len(); @@ -112,22 +116,22 @@ fn lookup_line(map: codemap, pos: uint, lookup: lookup_fn) return {fm: f, line: a}; } -fn lookup_pos(map: codemap, pos: uint, lookup: lookup_fn) -> loc { +fn lookup_pos(map: CodeMap, pos: uint, lookup: lookup_fn) -> loc { let {fm: f, line: a} = lookup_line(map, pos, lookup); return {file: f, line: a + 1u, col: pos - lookup(f.lines[a])}; } -fn lookup_char_pos(map: codemap, pos: uint) -> loc { +fn lookup_char_pos(map: CodeMap, pos: uint) -> loc { pure fn lookup(pos: file_pos) -> uint { return pos.ch; } return lookup_pos(map, pos, lookup); } -fn lookup_byte_pos(map: codemap, pos: uint) -> loc { +fn lookup_byte_pos(map: CodeMap, pos: uint) -> loc { pure fn lookup(pos: file_pos) -> uint { return pos.byte; } return lookup_pos(map, pos, lookup); } -fn lookup_char_pos_adj(map: codemap, pos: uint) +fn lookup_char_pos_adj(map: CodeMap, pos: uint) -> {filename: ~str, line: uint, col: uint, file: Option} { let loc = lookup_char_pos(map, pos); @@ -150,7 +154,7 @@ fn lookup_char_pos_adj(map: codemap, pos: uint) } } -fn adjust_span(map: codemap, sp: span) -> span { +fn adjust_span(map: CodeMap, sp: span) -> span { pure fn lookup(pos: file_pos) -> uint { return pos.ch; } let line = lookup_line(map, sp.lo, lookup); match (line.fm.substr) { @@ -178,14 +182,42 @@ impl span : cmp::Eq { pure fn ne(other: &span) -> bool { !self.eq(other) } } -fn span_to_str_no_adj(sp: span, cm: codemap) -> ~str { +#[cfg(stage0)] +impl span: Serializable { + /* Note #1972 -- spans are serialized but not deserialized */ + fn serialize(&self, _s: &S) { } +} + +#[cfg(stage0)] +impl span: Deserializable { + static fn deserialize(_d: &D) -> span { + ast_util::dummy_sp() + } +} + +#[cfg(stage1)] +#[cfg(stage2)] +impl span: Serializable { + /* Note #1972 -- spans are serialized but not deserialized */ + fn serialize(&self, _s: &S) { } +} + +#[cfg(stage1)] +#[cfg(stage2)] +impl span: Deserializable { + static fn deserialize(_d: &D) -> span { + ast_util::dummy_sp() + } +} + +fn span_to_str_no_adj(sp: span, cm: CodeMap) -> ~str { let lo = lookup_char_pos(cm, sp.lo); let hi = lookup_char_pos(cm, sp.hi); return fmt!("%s:%u:%u: %u:%u", lo.file.name, lo.line, lo.col, hi.line, hi.col) } -fn span_to_str(sp: span, cm: codemap) -> ~str { +fn span_to_str(sp: span, cm: CodeMap) -> ~str { let lo = lookup_char_pos_adj(cm, sp.lo); let hi = lookup_char_pos_adj(cm, sp.hi); return fmt!("%s:%u:%u: %u:%u", lo.filename, @@ -194,12 +226,12 @@ fn span_to_str(sp: span, cm: codemap) -> ~str { type file_lines = {file: filemap, lines: ~[uint]}; -fn span_to_filename(sp: span, cm: codemap::codemap) -> filename { +fn span_to_filename(sp: span, cm: codemap::CodeMap) -> filename { let lo = lookup_char_pos(cm, sp.lo); return /* FIXME (#2543) */ copy lo.file.name; } -fn span_to_lines(sp: span, cm: codemap::codemap) -> @file_lines { +fn span_to_lines(sp: span, cm: codemap::CodeMap) -> @file_lines { let lo = lookup_char_pos(cm, sp.lo); let hi = lookup_char_pos(cm, sp.hi); let mut lines = ~[]; @@ -218,7 +250,7 @@ fn get_line(fm: filemap, line: int) -> ~str unsafe { str::slice(*fm.src, begin, end) } -fn lookup_byte_offset(cm: codemap::codemap, chpos: uint) +fn lookup_byte_offset(cm: codemap::CodeMap, chpos: uint) -> {fm: filemap, pos: uint} { pure fn lookup(pos: file_pos) -> uint { return pos.ch; } let {fm, line} = lookup_line(cm, chpos, lookup); @@ -228,20 +260,20 @@ fn lookup_byte_offset(cm: codemap::codemap, chpos: uint) {fm: fm, pos: line_offset + col_offset} } -fn span_to_snippet(sp: span, cm: codemap::codemap) -> ~str { +fn span_to_snippet(sp: span, cm: codemap::CodeMap) -> ~str { let begin = lookup_byte_offset(cm, sp.lo); let end = lookup_byte_offset(cm, sp.hi); assert begin.fm.start_pos == end.fm.start_pos; return str::slice(*begin.fm.src, begin.pos, end.pos); } -fn get_snippet(cm: codemap::codemap, fidx: uint, lo: uint, hi: uint) -> ~str +fn get_snippet(cm: codemap::CodeMap, fidx: uint, lo: uint, hi: uint) -> ~str { let fm = cm.files[fidx]; return str::slice(*fm.src, lo, hi) } -fn get_filemap(cm: codemap, filename: ~str) -> filemap { +fn get_filemap(cm: CodeMap, filename: ~str) -> filemap { for cm.files.each |fm| { if fm.name == filename { return *fm; } } //XXjdm the following triggers a mismatched type bug // (or expected function, found _|_) diff --git a/src/libsyntax/diagnostic.rs b/src/libsyntax/diagnostic.rs index 2addb3d9e12a9..855b0ca3ef568 100644 --- a/src/libsyntax/diagnostic.rs +++ b/src/libsyntax/diagnostic.rs @@ -9,7 +9,7 @@ export codemap_span_handler, codemap_handler; export ice_msg; export expect; -type emitter = fn@(cmsp: Option<(codemap::codemap, span)>, +type emitter = fn@(cmsp: Option<(codemap::CodeMap, span)>, msg: &str, lvl: level); @@ -33,7 +33,7 @@ trait handler { fn note(msg: &str); fn bug(msg: &str) -> !; fn unimpl(msg: &str) -> !; - fn emit(cmsp: Option<(codemap::codemap, span)>, msg: &str, lvl: level); + fn emit(cmsp: Option<(codemap::CodeMap, span)>, msg: &str, lvl: level); } type handler_t = @{ @@ -43,7 +43,7 @@ type handler_t = @{ type codemap_t = @{ handler: handler, - cm: codemap::codemap + cm: codemap::CodeMap }; impl codemap_t: span_handler { @@ -107,7 +107,7 @@ impl handler_t: handler { self.fatal(ice_msg(msg)); } fn unimpl(msg: &str) -> ! { self.bug(~"unimplemented " + msg); } - fn emit(cmsp: Option<(codemap::codemap, span)>, msg: &str, lvl: level) { + fn emit(cmsp: Option<(codemap::CodeMap, span)>, msg: &str, lvl: level) { self.emit(cmsp, msg, lvl); } } @@ -116,7 +116,7 @@ fn ice_msg(msg: &str) -> ~str { fmt!("internal compiler error: %s", msg) } -fn mk_span_handler(handler: handler, cm: codemap::codemap) -> span_handler { +fn mk_span_handler(handler: handler, cm: codemap::CodeMap) -> span_handler { @{ handler: handler, cm: cm } as span_handler } @@ -125,7 +125,7 @@ fn mk_handler(emitter: Option) -> handler { let emit = match emitter { Some(e) => e, None => { - let f = fn@(cmsp: Option<(codemap::codemap, span)>, + let f = fn@(cmsp: Option<(codemap::CodeMap, span)>, msg: &str, t: level) { emit(cmsp, msg, t); }; @@ -189,8 +189,7 @@ fn print_diagnostic(topic: ~str, lvl: level, msg: &str) { io::stderr().write_str(fmt!(" %s\n", msg)); } -fn emit(cmsp: Option<(codemap::codemap, span)>, - msg: &str, lvl: level) { +fn emit(cmsp: Option<(codemap::CodeMap, span)>, msg: &str, lvl: level) { match cmsp { Some((cm, sp)) => { let sp = codemap::adjust_span(cm,sp); @@ -206,7 +205,7 @@ fn emit(cmsp: Option<(codemap::codemap, span)>, } } -fn highlight_lines(cm: codemap::codemap, sp: span, +fn highlight_lines(cm: codemap::CodeMap, sp: span, lines: @codemap::file_lines) { let fm = lines.file; @@ -261,7 +260,7 @@ fn highlight_lines(cm: codemap::codemap, sp: span, } } -fn print_macro_backtrace(cm: codemap::codemap, sp: span) { +fn print_macro_backtrace(cm: codemap::CodeMap, sp: span) { do option::iter(&sp.expn_info) |ei| { let ss = option::map_default(&ei.callie.span, @~"", |span| @codemap::span_to_str(*span, cm)); diff --git a/src/libsyntax/ext/auto_serialize.rs b/src/libsyntax/ext/auto_serialize.rs index b06536f4e026a..7e5e68ffff90e 100644 --- a/src/libsyntax/ext/auto_serialize.rs +++ b/src/libsyntax/ext/auto_serialize.rs @@ -13,16 +13,16 @@ For example, a type like: would generate two implementations like: - impl Node: Serializable { - fn serialize(s: &S) { + impl node_id: Serializable { + fn serialize(s: &S) { do s.emit_struct("Node") { s.emit_field("id", 0, || s.emit_uint(self)) } } } - impl node_id: Deserializable { - static fn deserialize(d: &D) -> Node { + impl node_id: Deserializable { + static fn deserialize(d: &D) -> Node { do d.read_struct("Node") { Node { id: d.read_field(~"x", 0, || deserialize(d)) @@ -40,7 +40,10 @@ references other non-built-in types. A type definition like: would yield functions like: - impl spanned: Serializable { + impl< + S: Serializer, + T: Serializable + > spanned: Serializable { fn serialize(s: &S) { do s.emit_rec { s.emit_field("node", 0, || self.node.serialize(s)); @@ -49,8 +52,11 @@ would yield functions like: } } - impl spanned: Deserializable { - static fn deserialize(d: &D) -> spanned { + impl< + D: Deserializer, + T: Deserializable + > spanned: Deserializable { + static fn deserialize(d: &D) -> spanned { do d.read_rec { { node: d.read_field(~"node", 0, || deserialize(d)), @@ -215,6 +221,25 @@ fn expand_auto_deserialize( } priv impl ext_ctxt { + fn bind_path( + span: span, + ident: ast::ident, + path: @ast::path, + bounds: @~[ast::ty_param_bound] + ) -> ast::ty_param { + let bound = ast::bound_trait(@{ + id: self.next_id(), + node: ast::ty_path(path, self.next_id()), + span: span, + }); + + { + ident: ident, + id: self.next_id(), + bounds: @vec::append(~[bound], *bounds) + } + } + fn expr(span: span, node: ast::expr_) -> @ast::expr { @{id: self.next_id(), callee_id: self.next_id(), node: node, span: span} @@ -225,12 +250,12 @@ priv impl ext_ctxt { } fn path_tps(span: span, strs: ~[ast::ident], - tps: ~[@ast::ty]) -> @ast::path { + tps: ~[@ast::Ty]) -> @ast::path { @{span: span, global: false, idents: strs, rp: None, types: tps} } fn ty_path(span: span, strs: ~[ast::ident], - tps: ~[@ast::ty]) -> @ast::ty { + tps: ~[@ast::Ty]) -> @ast::Ty { @{id: self.next_id(), node: ast::ty_path(self.path_tps(span, strs, tps), self.next_id()), span: span} @@ -332,24 +357,28 @@ fn mk_impl( cx: ext_ctxt, span: span, ident: ast::ident, + ty_param: ast::ty_param, path: @ast::path, tps: ~[ast::ty_param], - f: fn(@ast::ty) -> @ast::method + f: fn(@ast::Ty) -> @ast::method ) -> @ast::item { // All the type parameters need to bound to the trait. - let trait_tps = do tps.map |tp| { - let t_bound = ast::bound_trait(@{ - id: cx.next_id(), - node: ast::ty_path(path, cx.next_id()), - span: span, - }); + let mut trait_tps = vec::append( + ~[ty_param], + do tps.map |tp| { + let t_bound = ast::bound_trait(@{ + id: cx.next_id(), + node: ast::ty_path(path, cx.next_id()), + span: span, + }); - { - ident: tp.ident, - id: cx.next_id(), - bounds: @vec::append(~[t_bound], *tp.bounds) + { + ident: tp.ident, + id: cx.next_id(), + bounds: @vec::append(~[t_bound], *tp.bounds) + } } - }; + ); let opt_trait = Some(@{ path: path, @@ -382,20 +411,37 @@ fn mk_ser_impl( tps: ~[ast::ty_param], body: @ast::expr ) -> @ast::item { + // Make a path to the std::serialization::Serializable typaram. + let ty_param = cx.bind_path( + span, + cx.ident_of(~"__S"), + cx.path( + span, + ~[ + cx.ident_of(~"std"), + cx.ident_of(~"serialization"), + cx.ident_of(~"Serializer"), + ] + ), + @~[] + ); + // Make a path to the std::serialization::Serializable trait. - let path = cx.path( + let path = cx.path_tps( span, ~[ cx.ident_of(~"std"), cx.ident_of(~"serialization"), cx.ident_of(~"Serializable"), - ] + ], + ~[cx.ty_path(span, ~[cx.ident_of(~"__S")], ~[])] ); mk_impl( cx, span, ident, + ty_param, path, tps, |_ty| mk_ser_method(cx, span, cx.expr_blk(body)) @@ -409,20 +455,37 @@ fn mk_deser_impl( tps: ~[ast::ty_param], body: @ast::expr ) -> @ast::item { + // Make a path to the std::serialization::Deserializable typaram. + let ty_param = cx.bind_path( + span, + cx.ident_of(~"__D"), + cx.path( + span, + ~[ + cx.ident_of(~"std"), + cx.ident_of(~"serialization"), + cx.ident_of(~"Deserializer"), + ] + ), + @~[] + ); + // Make a path to the std::serialization::Deserializable trait. - let path = cx.path( + let path = cx.path_tps( span, ~[ cx.ident_of(~"std"), cx.ident_of(~"serialization"), cx.ident_of(~"Deserializable"), - ] + ], + ~[cx.ty_path(span, ~[cx.ident_of(~"__D")], ~[])] ); mk_impl( cx, span, ident, + ty_param, path, tps, |ty| mk_deser_method(cx, span, ty, cx.expr_blk(body)) @@ -434,22 +497,6 @@ fn mk_ser_method( span: span, ser_body: ast::blk ) -> @ast::method { - let ser_bound = cx.ty_path( - span, - ~[ - cx.ident_of(~"std"), - cx.ident_of(~"serialization"), - cx.ident_of(~"Serializer"), - ], - ~[] - ); - - let ser_tps = ~[{ - ident: cx.ident_of(~"__S"), - id: cx.next_id(), - bounds: @~[ast::bound_trait(ser_bound)], - }]; - let ty_s = @{ id: cx.next_id(), node: ast::ty_rptr( @@ -487,7 +534,7 @@ fn mk_ser_method( @{ ident: cx.ident_of(~"serialize"), attrs: ~[], - tps: ser_tps, + tps: ~[], self_ty: { node: ast::sty_region(ast::m_imm), span: span }, purity: ast::impure_fn, decl: ser_decl, @@ -502,25 +549,9 @@ fn mk_ser_method( fn mk_deser_method( cx: ext_ctxt, span: span, - ty: @ast::ty, + ty: @ast::Ty, deser_body: ast::blk ) -> @ast::method { - let deser_bound = cx.ty_path( - span, - ~[ - cx.ident_of(~"std"), - cx.ident_of(~"serialization"), - cx.ident_of(~"Deserializer"), - ], - ~[] - ); - - let deser_tps = ~[{ - ident: cx.ident_of(~"__D"), - id: cx.next_id(), - bounds: @~[ast::bound_trait(deser_bound)], - }]; - let ty_d = @{ id: cx.next_id(), node: ast::ty_rptr( @@ -552,7 +583,7 @@ fn mk_deser_method( @{ ident: cx.ident_of(~"deserialize"), attrs: ~[], - tps: deser_tps, + tps: ~[], self_ty: { node: ast::sty_static, span: span }, purity: ast::impure_fn, decl: deser_decl, diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs index 5894758cd85ca..5b4cc23ce09fd 100644 --- a/src/libsyntax/ext/base.rs +++ b/src/libsyntax/ext/base.rs @@ -1,7 +1,7 @@ use std::map::HashMap; use parse::parser; use diagnostic::span_handler; -use codemap::{codemap, span, expn_info, expanded_from}; +use codemap::{CodeMap, span, expn_info, expanded_from}; // obsolete old-style #macro code: // @@ -124,7 +124,7 @@ fn syntax_expander_table() -> HashMap<~str, syntax_extension> { // when a macro expansion occurs, the resulting nodes have the backtrace() // -> expn_info of their expansion context stored into their span. trait ext_ctxt { - fn codemap() -> codemap; + fn codemap() -> CodeMap; fn parse_sess() -> parse::parse_sess; fn cfg() -> ast::crate_cfg; fn print_backtrace(); @@ -156,7 +156,7 @@ fn mk_ctxt(parse_sess: parse::parse_sess, mut mod_path: ~[ast::ident], mut trace_mac: bool}; impl ctxt_repr: ext_ctxt { - fn codemap() -> codemap { self.parse_sess.cm } + fn codemap() -> CodeMap { self.parse_sess.cm } fn parse_sess() -> parse::parse_sess { self.parse_sess } fn cfg() -> ast::crate_cfg { self.cfg } fn print_backtrace() { } diff --git a/src/libsyntax/ext/pipes.rs b/src/libsyntax/ext/pipes.rs index ad4984c55582d..4d04552bfa15a 100644 --- a/src/libsyntax/ext/pipes.rs +++ b/src/libsyntax/ext/pipes.rs @@ -37,7 +37,7 @@ use codemap::span; use ext::base::ext_ctxt; use ast::tt_delim; use parse::lexer::{new_tt_reader, reader}; -use parse::parser::{parser, SOURCE_FILE}; +use parse::parser::{Parser, SOURCE_FILE}; use parse::common::parser_common; use pipes::parse_proto::proto_parser; @@ -52,7 +52,7 @@ fn expand_proto(cx: ext_ctxt, _sp: span, id: ast::ident, let tt_rdr = new_tt_reader(cx.parse_sess().span_diagnostic, cx.parse_sess().interner, None, tt); let rdr = tt_rdr as reader; - let rust_parser = parser(sess, cfg, rdr.dup(), SOURCE_FILE); + let rust_parser = Parser(sess, cfg, rdr.dup(), SOURCE_FILE); let proto = rust_parser.parse_proto(cx.str_of(id)); diff --git a/src/libsyntax/ext/pipes/ast_builder.rs b/src/libsyntax/ext/pipes/ast_builder.rs index 4da9992b0dd36..f10cbc2a5898a 100644 --- a/src/libsyntax/ext/pipes/ast_builder.rs +++ b/src/libsyntax/ext/pipes/ast_builder.rs @@ -28,17 +28,17 @@ fn empty_span() -> span { } trait append_types { - fn add_ty(ty: @ast::ty) -> @ast::path; - fn add_tys(+tys: ~[@ast::ty]) -> @ast::path; + fn add_ty(ty: @ast::Ty) -> @ast::path; + fn add_tys(+tys: ~[@ast::Ty]) -> @ast::path; } impl @ast::path: append_types { - fn add_ty(ty: @ast::ty) -> @ast::path { + fn add_ty(ty: @ast::Ty) -> @ast::path { @{types: vec::append_one(self.types, ty), .. *self} } - fn add_tys(+tys: ~[@ast::ty]) -> @ast::path { + fn add_tys(+tys: ~[@ast::Ty]) -> @ast::path { @{types: vec::append(self.types, tys), .. *self} } @@ -47,18 +47,18 @@ impl @ast::path: append_types { trait ext_ctxt_ast_builder { fn ty_param(id: ast::ident, +bounds: ~[ast::ty_param_bound]) -> ast::ty_param; - fn arg(name: ident, ty: @ast::ty) -> ast::arg; + fn arg(name: ident, ty: @ast::Ty) -> ast::arg; fn expr_block(e: @ast::expr) -> ast::blk; - fn fn_decl(+inputs: ~[ast::arg], output: @ast::ty) -> ast::fn_decl; + fn fn_decl(+inputs: ~[ast::arg], output: @ast::Ty) -> ast::fn_decl; fn item(name: ident, span: span, +node: ast::item_) -> @ast::item; fn item_fn_poly(name: ident, +inputs: ~[ast::arg], - output: @ast::ty, + output: @ast::Ty, +ty_params: ~[ast::ty_param], +body: ast::blk) -> @ast::item; fn item_fn(name: ident, +inputs: ~[ast::arg], - output: @ast::ty, + output: @ast::Ty, +body: ast::blk) -> @ast::item; fn item_enum_poly(name: ident, span: span, @@ -66,17 +66,17 @@ trait ext_ctxt_ast_builder { +ty_params: ~[ast::ty_param]) -> @ast::item; fn item_enum(name: ident, span: span, +enum_definition: ast::enum_def) -> @ast::item; - fn variant(name: ident, span: span, +tys: ~[@ast::ty]) -> ast::variant; + fn variant(name: ident, span: span, +tys: ~[@ast::Ty]) -> ast::variant; fn item_mod(name: ident, span: span, +items: ~[@ast::item]) -> @ast::item; - fn ty_path_ast_builder(path: @ast::path) -> @ast::ty; + fn ty_path_ast_builder(path: @ast::path) -> @ast::Ty; fn item_ty_poly(name: ident, span: span, - ty: @ast::ty, + ty: @ast::Ty, +params: ~[ast::ty_param]) -> @ast::item; - fn item_ty(name: ident, span: span, ty: @ast::ty) -> @ast::item; - fn ty_vars(+ty_params: ~[ast::ty_param]) -> ~[@ast::ty]; - fn ty_field_imm(name: ident, ty: @ast::ty) -> ast::ty_field; - fn ty_rec(+v: ~[ast::ty_field]) -> @ast::ty; + fn item_ty(name: ident, span: span, ty: @ast::Ty) -> @ast::item; + fn ty_vars(+ty_params: ~[ast::ty_param]) -> ~[@ast::Ty]; + fn ty_field_imm(name: ident, ty: @ast::Ty) -> ast::ty_field; + fn ty_rec(+v: ~[ast::ty_field]) -> @ast::Ty; fn field_imm(name: ident, e: @ast::expr) -> ast::field; fn rec(+v: ~[ast::field]) -> @ast::expr; fn block(+stmts: ~[@ast::stmt], e: @ast::expr) -> ast::blk; @@ -84,11 +84,11 @@ trait ext_ctxt_ast_builder { fn stmt_expr(e: @ast::expr) -> @ast::stmt; fn block_expr(b: ast::blk) -> @ast::expr; fn empty_span() -> span; - fn ty_option(ty: @ast::ty) -> @ast::ty; + fn ty_option(ty: @ast::Ty) -> @ast::Ty; } impl ext_ctxt: ext_ctxt_ast_builder { - fn ty_option(ty: @ast::ty) -> @ast::ty { + fn ty_option(ty: @ast::Ty) -> @ast::Ty { self.ty_path_ast_builder(path(~[self.ident_of(~"Option")], self.empty_span()) .add_ty(ty)) @@ -146,18 +146,18 @@ impl ext_ctxt: ext_ctxt_ast_builder { span: self.empty_span()} } - fn ty_field_imm(name: ident, ty: @ast::ty) -> ast::ty_field { + fn ty_field_imm(name: ident, ty: @ast::Ty) -> ast::ty_field { {node: {ident: name, mt: { ty: ty, mutbl: ast::m_imm } }, span: self.empty_span()} } - fn ty_rec(+fields: ~[ast::ty_field]) -> @ast::ty { + fn ty_rec(+fields: ~[ast::ty_field]) -> @ast::Ty { @{id: self.next_id(), node: ast::ty_rec(fields), span: self.empty_span()} } - fn ty_infer() -> @ast::ty { + fn ty_infer() -> @ast::Ty { @{id: self.next_id(), node: ast::ty_infer, span: self.empty_span()} @@ -169,7 +169,7 @@ impl ext_ctxt: ext_ctxt_ast_builder { {ident: id, id: self.next_id(), bounds: @bounds} } - fn arg(name: ident, ty: @ast::ty) -> ast::arg { + fn arg(name: ident, ty: @ast::Ty) -> ast::arg { {mode: ast::infer(self.next_id()), ty: ty, ident: name, @@ -192,7 +192,7 @@ impl ext_ctxt: ext_ctxt_ast_builder { } fn fn_decl(+inputs: ~[ast::arg], - output: @ast::ty) -> ast::fn_decl { + output: @ast::Ty) -> ast::fn_decl { {inputs: inputs, output: output, cf: ast::return_val} @@ -224,7 +224,7 @@ impl ext_ctxt: ext_ctxt_ast_builder { fn item_fn_poly(name: ident, +inputs: ~[ast::arg], - output: @ast::ty, + output: @ast::Ty, +ty_params: ~[ast::ty_param], +body: ast::blk) -> @ast::item { self.item(name, @@ -237,7 +237,7 @@ impl ext_ctxt: ext_ctxt_ast_builder { fn item_fn(name: ident, +inputs: ~[ast::arg], - output: @ast::ty, + output: @ast::Ty, +body: ast::blk) -> @ast::item { self.item_fn_poly(name, inputs, output, ~[], body) } @@ -256,7 +256,7 @@ impl ext_ctxt: ext_ctxt_ast_builder { fn variant(name: ident, span: span, - +tys: ~[@ast::ty]) -> ast::variant { + +tys: ~[@ast::Ty]) -> ast::variant { let args = tys.map(|ty| {ty: *ty, id: self.next_id()}); {node: {name: name, @@ -278,13 +278,13 @@ impl ext_ctxt: ext_ctxt_ast_builder { items: items})) } - fn ty_path_ast_builder(path: @ast::path) -> @ast::ty { + fn ty_path_ast_builder(path: @ast::path) -> @ast::Ty { @{id: self.next_id(), node: ast::ty_path(path, self.next_id()), span: path.span} } - fn ty_nil_ast_builder() -> @ast::ty { + fn ty_nil_ast_builder() -> @ast::Ty { @{id: self.next_id(), node: ast::ty_nil, span: self.empty_span()} @@ -292,16 +292,16 @@ impl ext_ctxt: ext_ctxt_ast_builder { fn item_ty_poly(name: ident, span: span, - ty: @ast::ty, + ty: @ast::Ty, +params: ~[ast::ty_param]) -> @ast::item { self.item(name, span, ast::item_ty(ty, params)) } - fn item_ty(name: ident, span: span, ty: @ast::ty) -> @ast::item { + fn item_ty(name: ident, span: span, ty: @ast::Ty) -> @ast::item { self.item_ty_poly(name, span, ty, ~[]) } - fn ty_vars(+ty_params: ~[ast::ty_param]) -> ~[@ast::ty] { + fn ty_vars(+ty_params: ~[ast::ty_param]) -> ~[@ast::Ty] { ty_params.map(|p| self.ty_path_ast_builder( path(~[p.ident], self.empty_span()))) } diff --git a/src/libsyntax/ext/pipes/check.rs b/src/libsyntax/ext/pipes/check.rs index 5fcc00ef01217..fcc0c84a4ff39 100644 --- a/src/libsyntax/ext/pipes/check.rs +++ b/src/libsyntax/ext/pipes/check.rs @@ -38,7 +38,7 @@ impl ext_ctxt: proto::visitor<(), (), ()> { } } - fn visit_message(name: ~str, _span: span, _tys: &[@ast::ty], + fn visit_message(name: ~str, _span: span, _tys: &[@ast::Ty], this: state, next: next_state) { match next { Some({state: next, tys: next_tys}) => { @@ -68,4 +68,4 @@ impl ext_ctxt: proto::visitor<(), (), ()> { None => () } } -} \ No newline at end of file +} diff --git a/src/libsyntax/ext/pipes/parse_proto.rs b/src/libsyntax/ext/pipes/parse_proto.rs index 5c15b616b4aef..8f2b92a720c07 100644 --- a/src/libsyntax/ext/pipes/parse_proto.rs +++ b/src/libsyntax/ext/pipes/parse_proto.rs @@ -10,7 +10,7 @@ trait proto_parser { fn parse_state(proto: protocol); } -impl parser: proto_parser { +impl parser::Parser: proto_parser { fn parse_proto(id: ~str) -> protocol { let proto = protocol(id, self.span); diff --git a/src/libsyntax/ext/pipes/pipec.rs b/src/libsyntax/ext/pipes/pipec.rs index 874ea01e9b01d..7e1cbe9ad0dbf 100644 --- a/src/libsyntax/ext/pipes/pipec.rs +++ b/src/libsyntax/ext/pipes/pipec.rs @@ -181,7 +181,7 @@ impl message: gen_send { } } - fn to_ty(cx: ext_ctxt) -> @ast::ty { + fn to_ty(cx: ext_ctxt) -> @ast::Ty { cx.ty_path_ast_builder(path(~[cx.ident_of(self.name())], self.span()) .add_tys(cx.ty_vars(self.get_params()))) } @@ -360,7 +360,7 @@ impl protocol: gen_init { }} } - fn buffer_ty_path(cx: ext_ctxt) -> @ast::ty { + fn buffer_ty_path(cx: ext_ctxt) -> @ast::Ty { let mut params: ~[ast::ty_param] = ~[]; for (copy self.states).each |s| { for s.ty_params.each |tp| { @@ -444,13 +444,13 @@ impl ~[@ast::item]: to_source { } } -impl @ast::ty: to_source { +impl @ast::Ty: to_source { fn to_source(cx: ext_ctxt) -> ~str { ty_to_str(self, cx.parse_sess().interner) } } -impl ~[@ast::ty]: to_source { +impl ~[@ast::Ty]: to_source { fn to_source(cx: ext_ctxt) -> ~str { str::connect(self.map(|i| i.to_source(cx)), ~", ") } diff --git a/src/libsyntax/ext/pipes/proto.rs b/src/libsyntax/ext/pipes/proto.rs index a501df4c32d2b..229e55fdfcc6a 100644 --- a/src/libsyntax/ext/pipes/proto.rs +++ b/src/libsyntax/ext/pipes/proto.rs @@ -35,11 +35,11 @@ impl direction { } } -type next_state = Option<{state: ~str, tys: ~[@ast::ty]}>; +type next_state = Option<{state: ~str, tys: ~[@ast::Ty]}>; enum message { // name, span, data, current state, next state - message(~str, span, ~[@ast::ty], state, next_state) + message(~str, span, ~[@ast::Ty], state, next_state) } impl message { @@ -78,7 +78,7 @@ enum state { impl state { fn add_message(name: ~str, span: span, - +data: ~[@ast::ty], next: next_state) { + +data: ~[@ast::Ty], next: next_state) { self.messages.push(message(name, span, data, self, next)); } @@ -92,7 +92,7 @@ impl state { } /// Returns the type that is used for the messages. - fn to_ty(cx: ext_ctxt) -> @ast::ty { + fn to_ty(cx: ext_ctxt) -> @ast::Ty { cx.ty_path_ast_builder (path(~[cx.ident_of(self.name)],self.span).add_tys( cx.ty_vars(self.ty_params))) @@ -200,7 +200,7 @@ impl protocol { trait visitor { fn visit_proto(proto: protocol, st: &[Tstate]) -> Tproto; fn visit_state(state: state, m: &[Tmessage]) -> Tstate; - fn visit_message(name: ~str, spane: span, tys: &[@ast::ty], + fn visit_message(name: ~str, spane: span, tys: &[@ast::Ty], this: state, next: next_state) -> Tmessage; } diff --git a/src/libsyntax/ext/qquote.rs b/src/libsyntax/ext/qquote.rs index ee9602598d1ed..a83789642ccc8 100644 --- a/src/libsyntax/ext/qquote.rs +++ b/src/libsyntax/ext/qquote.rs @@ -1,7 +1,7 @@ use ast::{crate, expr_, mac_invoc, mac_aq, mac_var}; use parse::parser; -use parse::parser::parse_from_source_str; +use parse::parser::{Parser, parse_from_source_str}; use dvec::DVec; use parse::token::ident_interner; @@ -24,7 +24,7 @@ struct gather_item { type aq_ctxt = @{lo: uint, gather: DVec}; enum fragment { from_expr(@ast::expr), - from_ty(@ast::ty) + from_ty(@ast::Ty) } fn ids_ext(cx: ext_ctxt, strs: ~[~str]) -> ~[ast::ident] { @@ -68,7 +68,7 @@ impl @ast::expr: qq_helper { } fn get_fold_fn() -> ~str {~"fold_expr"} } -impl @ast::ty: qq_helper { +impl @ast::Ty: qq_helper { fn span() -> span {self.span} fn visit(cx: aq_ctxt, v: vt) {visit_ty(self, cx, v);} fn extract_mac() -> Option { @@ -186,13 +186,13 @@ fn expand_ast(ecx: ext_ctxt, _sp: span, }; } -fn parse_crate(p: parser) -> @ast::crate { p.parse_crate_mod(~[]) } -fn parse_ty(p: parser) -> @ast::ty { p.parse_ty(false) } -fn parse_stmt(p: parser) -> @ast::stmt { p.parse_stmt(~[]) } -fn parse_expr(p: parser) -> @ast::expr { p.parse_expr() } -fn parse_pat(p: parser) -> @ast::pat { p.parse_pat(true) } +fn parse_crate(p: Parser) -> @ast::crate { p.parse_crate_mod(~[]) } +fn parse_ty(p: Parser) -> @ast::Ty { p.parse_ty(false) } +fn parse_stmt(p: Parser) -> @ast::stmt { p.parse_stmt(~[]) } +fn parse_expr(p: Parser) -> @ast::expr { p.parse_expr() } +fn parse_pat(p: Parser) -> @ast::pat { p.parse_pat(true) } -fn parse_item(p: parser) -> @ast::item { +fn parse_item(p: Parser) -> @ast::item { match p.parse_item(~[]) { Some(item) => item, None => fail ~"parse_item: parsing an item failed" @@ -200,7 +200,7 @@ fn parse_item(p: parser) -> @ast::item { } fn finish - (ecx: ext_ctxt, body: ast::mac_body_, f: fn (p: parser) -> T) + (ecx: ext_ctxt, body: ast::mac_body_, f: fn (p: Parser) -> T) -> @ast::expr { let cm = ecx.codemap(); @@ -309,7 +309,7 @@ fn fold_crate(f: ast_fold, &&n: @ast::crate) -> @ast::crate { @f.fold_crate(*n) } fn fold_expr(f: ast_fold, &&n: @ast::expr) -> @ast::expr {f.fold_expr(n)} -fn fold_ty(f: ast_fold, &&n: @ast::ty) -> @ast::ty {f.fold_ty(n)} +fn fold_ty(f: ast_fold, &&n: @ast::Ty) -> @ast::Ty {f.fold_ty(n)} fn fold_item(f: ast_fold, &&n: @ast::item) -> @ast::item { f.fold_item(n).get() //HACK: we know we don't drop items } diff --git a/src/libsyntax/ext/simplext.rs b/src/libsyntax/ext/simplext.rs index e16e1c5534997..bec29c9a83540 100644 --- a/src/libsyntax/ext/simplext.rs +++ b/src/libsyntax/ext/simplext.rs @@ -6,7 +6,7 @@ use base::*; use fold::*; use ast_util::respan; -use ast::{ident, path, ty, blk_, expr, expr_path, +use ast::{ident, path, Ty, blk_, expr, expr_path, expr_vec, expr_mac, mac_invoc, node_id, expr_index}; export add_new_extension; @@ -29,7 +29,7 @@ enum matchable { match_expr(@expr), match_path(@path), match_ident(ast::spanned), - match_ty(@ty), + match_ty(@Ty), match_block(ast::blk), match_exact, /* don't bind anything, just verify the AST traversal */ } diff --git a/src/libsyntax/ext/trace_macros.rs b/src/libsyntax/ext/trace_macros.rs index c2d4de1b423c4..0c7d408db7cc3 100644 --- a/src/libsyntax/ext/trace_macros.rs +++ b/src/libsyntax/ext/trace_macros.rs @@ -2,7 +2,7 @@ use codemap::span; use ext::base::ext_ctxt; use ast::tt_delim; use parse::lexer::{new_tt_reader, reader}; -use parse::parser::{parser, SOURCE_FILE}; +use parse::parser::{Parser, SOURCE_FILE}; use parse::common::parser_common; fn expand_trace_macros(cx: ext_ctxt, sp: span, @@ -13,7 +13,7 @@ fn expand_trace_macros(cx: ext_ctxt, sp: span, let tt_rdr = new_tt_reader(cx.parse_sess().span_diagnostic, cx.parse_sess().interner, None, tt); let rdr = tt_rdr as reader; - let rust_parser = parser(sess, cfg, rdr.dup(), SOURCE_FILE); + let rust_parser = Parser(sess, cfg, rdr.dup(), SOURCE_FILE); let arg = cx.str_of(rust_parser.parse_ident()); match arg { @@ -21,7 +21,7 @@ fn expand_trace_macros(cx: ext_ctxt, sp: span, ~"false" => cx.set_trace_macros(false), _ => cx.span_fatal(sp, ~"trace_macros! only accepts `true` or `false`") } - let rust_parser = parser(sess, cfg, rdr.dup(), SOURCE_FILE); + let rust_parser = Parser(sess, cfg, rdr.dup(), SOURCE_FILE); let result = rust_parser.parse_expr(); base::mr_expr(result) } diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs index 0b2070c8c86df..17122b85fb360 100644 --- a/src/libsyntax/ext/tt/macro_parser.rs +++ b/src/libsyntax/ext/tt/macro_parser.rs @@ -1,9 +1,9 @@ // Earley-like parser for macros. use parse::token; -use parse::token::{token, EOF, to_str, nonterminal}; +use parse::token::{Token, EOF, to_str, nonterminal}; use parse::lexer::*; //resolve bug? //import parse::lexer::{reader, tt_reader, tt_reader_as_reader}; -use parse::parser::{parser,SOURCE_FILE}; +use parse::parser::{Parser, SOURCE_FILE}; //import parse::common::parser_common; use parse::common::*; //resolve bug? use parse::parse_sess; @@ -97,7 +97,7 @@ fn is_some(&&mpu: matcher_pos_up) -> bool { type matcher_pos = ~{ elts: ~[ast::matcher], // maybe should be /&? Need to understand regions. - sep: Option, + sep: Option, mut idx: uint, mut up: matcher_pos_up, // mutable for swapping only matches: ~[DVec<@named_match>], @@ -122,7 +122,7 @@ fn count_names(ms: &[matcher]) -> uint { } #[allow(non_implicitly_copyable_typarams)] -fn initial_matcher_pos(ms: ~[matcher], sep: Option, lo: uint) +fn initial_matcher_pos(ms: ~[matcher], sep: Option, lo: uint) -> matcher_pos { let mut match_idx_hi = 0u; for ms.each() |elt| { @@ -354,7 +354,7 @@ fn parse(sess: parse_sess, cfg: ast::crate_cfg, rdr: reader, ms: ~[matcher]) } rdr.next_token(); } else /* bb_eis.len() == 1 */ { - let rust_parser = parser(sess, cfg, rdr.dup(), SOURCE_FILE); + let rust_parser = Parser(sess, cfg, rdr.dup(), SOURCE_FILE); let ei = bb_eis.pop(); match ei.elts[ei.idx].node { @@ -381,7 +381,7 @@ fn parse(sess: parse_sess, cfg: ast::crate_cfg, rdr: reader, ms: ~[matcher]) } } -fn parse_nt(p: parser, name: ~str) -> nonterminal { +fn parse_nt(p: Parser, name: ~str) -> nonterminal { match name { ~"item" => match p.parse_item(~[]) { Some(i) => token::nt_item(i), diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs index 52369ad7207f0..31bc375a76d56 100644 --- a/src/libsyntax/ext/tt/macro_rules.rs +++ b/src/libsyntax/ext/tt/macro_rules.rs @@ -4,7 +4,7 @@ use ast::{ident, matcher_, matcher, match_tok, match_nonterminal, match_seq, tt_delim}; use parse::lexer::{new_tt_reader, reader}; use parse::token::{FAT_ARROW, SEMI, LBRACE, RBRACE, nt_matchers, nt_tt}; -use parse::parser::{parser, SOURCE_FILE}; +use parse::parser::{Parser, SOURCE_FILE}; use macro_parser::{parse, parse_or_else, success, failure, named_match, matched_seq, matched_nonterminal, error}; use std::map::HashMap; @@ -86,7 +86,7 @@ fn add_new_extension(cx: ext_ctxt, sp: span, name: ident, // rhs has holes ( `$id` and `$(...)` that need filled) let trncbr = new_tt_reader(s_d, itr, Some(named_matches), ~[rhs]); - let p = parser(cx.parse_sess(), cx.cfg(), + let p = Parser(cx.parse_sess(), cx.cfg(), trncbr as reader, SOURCE_FILE); let e = p.parse_expr(); return mr_expr(e); @@ -111,4 +111,4 @@ fn add_new_extension(cx: ext_ctxt, sp: span, name: ident, name: *cx.parse_sess().interner.get(name), ext: expr_tt({expander: exp, span: Some(sp)}) }); -} \ No newline at end of file +} diff --git a/src/libsyntax/ext/tt/transcribe.rs b/src/libsyntax/ext/tt/transcribe.rs index a8a41cca6cbd7..238f9db6ac537 100644 --- a/src/libsyntax/ext/tt/transcribe.rs +++ b/src/libsyntax/ext/tt/transcribe.rs @@ -2,8 +2,7 @@ use diagnostic::span_handler; use ast::{token_tree, tt_delim, tt_tok, tt_seq, tt_nonterminal,ident}; use macro_parser::{named_match, matched_seq, matched_nonterminal}; use codemap::span; -use parse::token::{EOF, INTERPOLATED, IDENT, token, nt_ident, - ident_interner}; +use parse::token::{EOF, INTERPOLATED, IDENT, Token, nt_ident, ident_interner}; use std::map::HashMap; export tt_reader, new_tt_reader, dup_tt_reader, tt_next_token; @@ -19,7 +18,7 @@ type tt_frame = @{ readme: ~[ast::token_tree], mut idx: uint, dotdotdoted: bool, - sep: Option, + sep: Option, up: tt_frame_up, }; @@ -32,7 +31,7 @@ type tt_reader = @{ mut repeat_idx: ~[uint], mut repeat_len: ~[uint], /* cached: */ - mut cur_tok: token, + mut cur_tok: Token, mut cur_span: span }; @@ -134,7 +133,7 @@ fn lockstep_iter_size(t: token_tree, r: tt_reader) -> lis { } -fn tt_next_token(&&r: tt_reader) -> {tok: token, sp: span} { +fn tt_next_token(&&r: tt_reader) -> {tok: Token, sp: span} { let ret_val = { tok: r.cur_tok, sp: r.cur_span }; while r.cur.idx >= r.cur.readme.len() { /* done with this set; pop or repeat? */ diff --git a/src/libsyntax/fold.rs b/src/libsyntax/fold.rs index 68d9cd80430d3..564debefa2539 100644 --- a/src/libsyntax/fold.rs +++ b/src/libsyntax/fold.rs @@ -33,7 +33,7 @@ trait ast_fold { fn fold_pat(&&v: @pat) -> @pat; fn fold_decl(&&v: @decl) -> @decl; fn fold_expr(&&v: @expr) -> @expr; - fn fold_ty(&&v: @ty) -> @ty; + fn fold_ty(&&v: @Ty) -> @Ty; fn fold_mod(_mod) -> _mod; fn fold_foreign_mod(foreign_mod) -> foreign_mod; fn fold_variant(variant) -> variant; @@ -728,7 +728,7 @@ impl ast_fold_precursor: ast_fold { node: n, span: self.new_span(s)}; } - fn fold_ty(&&x: @ty) -> @ty { + fn fold_ty(&&x: @Ty) -> @Ty { let (n, s) = self.fold_ty(x.node, x.span, self as ast_fold); return @{id: self.new_id(x.id), node: n, span: self.new_span(s)}; } diff --git a/src/libsyntax/parse.rs b/src/libsyntax/parse.rs index 2c04b2a14190c..e38ee7ff03763 100644 --- a/src/libsyntax/parse.rs +++ b/src/libsyntax/parse.rs @@ -12,7 +12,7 @@ export parse_expr_from_source_str, parse_item_from_source_str; export parse_stmt_from_source_str; export parse_from_source_str; -use parser::parser; +use parser::Parser; use attr::parser_attr; use common::parser_common; use ast::node_id; @@ -22,7 +22,7 @@ use lexer::{reader, string_reader}; use parse::token::{ident_interner, mk_ident_interner}; type parse_sess = @{ - cm: codemap::codemap, + cm: codemap::CodeMap, mut next_id: node_id, span_diagnostic: span_handler, interner: @ident_interner, @@ -40,7 +40,7 @@ fn new_parse_sess(demitter: Option) -> parse_sess { mut chpos: 0u, mut byte_pos: 0u}; } -fn new_parse_sess_special_handler(sh: span_handler, cm: codemap::codemap) +fn new_parse_sess_special_handler(sh: span_handler, cm: codemap::CodeMap) -> parse_sess { return @{cm: cm, mut next_id: 1, @@ -142,7 +142,7 @@ fn parse_stmt_from_source_str(name: ~str, source: @~str, cfg: ast::crate_cfg, return r; } -fn parse_from_source_str(f: fn (p: parser) -> T, +fn parse_from_source_str(f: fn (p: Parser) -> T, name: ~str, ss: codemap::file_substr, source: @~str, cfg: ast::crate_cfg, sess: parse_sess) @@ -170,19 +170,19 @@ fn next_node_id(sess: parse_sess) -> node_id { fn new_parser_etc_from_source_str(sess: parse_sess, cfg: ast::crate_cfg, +name: ~str, +ss: codemap::file_substr, - source: @~str) -> (parser, string_reader) { + source: @~str) -> (Parser, string_reader) { let ftype = parser::SOURCE_FILE; let filemap = codemap::new_filemap_w_substr (name, ss, source, sess.chpos, sess.byte_pos); sess.cm.files.push(filemap); let srdr = lexer::new_string_reader(sess.span_diagnostic, filemap, sess.interner); - return (parser(sess, cfg, srdr as reader, ftype), srdr); + return (Parser(sess, cfg, srdr as reader, ftype), srdr); } fn new_parser_from_source_str(sess: parse_sess, cfg: ast::crate_cfg, +name: ~str, +ss: codemap::file_substr, - source: @~str) -> parser { + source: @~str) -> Parser { let (p, _) = new_parser_etc_from_source_str(sess, cfg, name, ss, source); move p } @@ -190,7 +190,7 @@ fn new_parser_from_source_str(sess: parse_sess, cfg: ast::crate_cfg, fn new_parser_etc_from_file(sess: parse_sess, cfg: ast::crate_cfg, path: &Path, ftype: parser::file_type) -> - (parser, string_reader) { + (Parser, string_reader) { let res = io::read_whole_file_str(path); match res { result::Ok(_) => { /* Continue. */ } @@ -202,18 +202,18 @@ fn new_parser_etc_from_file(sess: parse_sess, cfg: ast::crate_cfg, sess.cm.files.push(filemap); let srdr = lexer::new_string_reader(sess.span_diagnostic, filemap, sess.interner); - return (parser(sess, cfg, srdr as reader, ftype), srdr); + return (Parser(sess, cfg, srdr as reader, ftype), srdr); } fn new_parser_from_file(sess: parse_sess, cfg: ast::crate_cfg, path: &Path, - ftype: parser::file_type) -> parser { + ftype: parser::file_type) -> Parser { let (p, _) = new_parser_etc_from_file(sess, cfg, path, ftype); move p } fn new_parser_from_tt(sess: parse_sess, cfg: ast::crate_cfg, - tt: ~[ast::token_tree]) -> parser { + tt: ~[ast::token_tree]) -> Parser { let trdr = lexer::new_tt_reader(sess.span_diagnostic, sess.interner, None, tt); - return parser(sess, cfg, trdr as reader, parser::SOURCE_FILE) + return Parser(sess, cfg, trdr as reader, parser::SOURCE_FILE) } diff --git a/src/libsyntax/parse/attr.rs b/src/libsyntax/parse/attr.rs index 9be4909814b13..42101a431d6c6 100644 --- a/src/libsyntax/parse/attr.rs +++ b/src/libsyntax/parse/attr.rs @@ -23,7 +23,7 @@ trait parser_attr { fn parse_optional_meta() -> ~[@ast::meta_item]; } -impl parser: parser_attr { +impl Parser: parser_attr { fn parse_outer_attrs_or_ext(first_item_attrs: ~[ast::attribute]) -> attr_or_ext diff --git a/src/libsyntax/parse/common.rs b/src/libsyntax/parse/common.rs index c8c30ee7fa9cb..50c22c08f4f88 100644 --- a/src/libsyntax/parse/common.rs +++ b/src/libsyntax/parse/common.rs @@ -1,63 +1,63 @@ use std::map::{HashMap}; use ast_util::spanned; -use parser::parser; +use parser::Parser; use lexer::reader; type seq_sep = { - sep: Option, + sep: Option, trailing_sep_allowed: bool }; -fn seq_sep_trailing_disallowed(t: token::token) -> seq_sep { +fn seq_sep_trailing_disallowed(t: token::Token) -> seq_sep { return {sep: option::Some(t), trailing_sep_allowed: false}; } -fn seq_sep_trailing_allowed(t: token::token) -> seq_sep { +fn seq_sep_trailing_allowed(t: token::Token) -> seq_sep { return {sep: option::Some(t), trailing_sep_allowed: true}; } fn seq_sep_none() -> seq_sep { return {sep: option::None, trailing_sep_allowed: false}; } -fn token_to_str(reader: reader, ++token: token::token) -> ~str { +fn token_to_str(reader: reader, ++token: token::Token) -> ~str { token::to_str(reader.interner(), token) } trait parser_common { - fn unexpected_last(t: token::token) -> !; + fn unexpected_last(t: token::Token) -> !; fn unexpected() -> !; - fn expect(t: token::token); + fn expect(t: token::Token); fn parse_ident() -> ast::ident; fn parse_path_list_ident() -> ast::path_list_ident; fn parse_value_ident() -> ast::ident; - fn eat(tok: token::token) -> bool; + fn eat(tok: token::Token) -> bool; // A sanity check that the word we are asking for is a known keyword fn require_keyword(word: ~str); - fn token_is_keyword(word: ~str, ++tok: token::token) -> bool; + fn token_is_keyword(word: ~str, ++tok: token::Token) -> bool; fn is_keyword(word: ~str) -> bool; - fn is_any_keyword(tok: token::token) -> bool; + fn is_any_keyword(tok: token::Token) -> bool; fn eat_keyword(word: ~str) -> bool; fn expect_keyword(word: ~str); fn expect_gt(); - fn parse_seq_to_before_gt(sep: Option, - f: fn(parser) -> T) -> ~[T]; - fn parse_seq_to_gt(sep: Option, - f: fn(parser) -> T) -> ~[T]; - fn parse_seq_lt_gt(sep: Option, - f: fn(parser) -> T) -> spanned<~[T]>; - fn parse_seq_to_end(ket: token::token, sep: seq_sep, - f: fn(parser) -> T) -> ~[T]; - fn parse_seq_to_before_end(ket: token::token, sep: seq_sep, - f: fn(parser) -> T) -> ~[T]; - fn parse_unspanned_seq(bra: token::token, - ket: token::token, + fn parse_seq_to_before_gt(sep: Option, + f: fn(Parser) -> T) -> ~[T]; + fn parse_seq_to_gt(sep: Option, + f: fn(Parser) -> T) -> ~[T]; + fn parse_seq_lt_gt(sep: Option, + f: fn(Parser) -> T) -> spanned<~[T]>; + fn parse_seq_to_end(ket: token::Token, sep: seq_sep, + f: fn(Parser) -> T) -> ~[T]; + fn parse_seq_to_before_end(ket: token::Token, sep: seq_sep, + f: fn(Parser) -> T) -> ~[T]; + fn parse_unspanned_seq(bra: token::Token, + ket: token::Token, sep: seq_sep, - f: fn(parser) -> T) -> ~[T]; - fn parse_seq(bra: token::token, ket: token::token, sep: seq_sep, - f: fn(parser) -> T) -> spanned<~[T]>; + f: fn(Parser) -> T) -> ~[T]; + fn parse_seq(bra: token::Token, ket: token::Token, sep: seq_sep, + f: fn(Parser) -> T) -> spanned<~[T]>; } -impl parser: parser_common { - fn unexpected_last(t: token::token) -> ! { +impl Parser: parser_common { + fn unexpected_last(t: token::Token) -> ! { self.span_fatal( copy self.last_span, ~"unexpected token: `" + token_to_str(self.reader, t) + ~"`"); @@ -68,7 +68,7 @@ impl parser: parser_common { + token_to_str(self.reader, self.token) + ~"`"); } - fn expect(t: token::token) { + fn expect(t: token::Token) { if self.token == t { self.bump(); } else { @@ -104,7 +104,7 @@ impl parser: parser_common { return self.parse_ident(); } - fn eat(tok: token::token) -> bool { + fn eat(tok: token::Token) -> bool { return if self.token == tok { self.bump(); true } else { false }; } @@ -117,14 +117,14 @@ impl parser: parser_common { } } - fn token_is_word(word: ~str, ++tok: token::token) -> bool { + fn token_is_word(word: ~str, ++tok: token::Token) -> bool { match tok { token::IDENT(sid, false) => { *self.id_to_str(sid) == word } _ => { false } } } - fn token_is_keyword(word: ~str, ++tok: token::token) -> bool { + fn token_is_keyword(word: ~str, ++tok: token::Token) -> bool { self.require_keyword(word); self.token_is_word(word, tok) } @@ -133,7 +133,7 @@ impl parser: parser_common { self.token_is_keyword(word, self.token) } - fn is_any_keyword(tok: token::token) -> bool { + fn is_any_keyword(tok: token::Token) -> bool { match tok { token::IDENT(sid, false) => { self.keywords.contains_key_ref(self.id_to_str(sid)) @@ -216,8 +216,8 @@ impl parser: parser_common { } } - fn parse_seq_to_before_gt(sep: Option, - f: fn(parser) -> T) -> ~[T] { + fn parse_seq_to_before_gt(sep: Option, + f: fn(Parser) -> T) -> ~[T] { let mut first = true; let mut v = ~[]; while self.token != token::GT @@ -235,16 +235,16 @@ impl parser: parser_common { return v; } - fn parse_seq_to_gt(sep: Option, - f: fn(parser) -> T) -> ~[T] { + fn parse_seq_to_gt(sep: Option, + f: fn(Parser) -> T) -> ~[T] { let v = self.parse_seq_to_before_gt(sep, f); self.expect_gt(); return v; } - fn parse_seq_lt_gt(sep: Option, - f: fn(parser) -> T) -> spanned<~[T]> { + fn parse_seq_lt_gt(sep: Option, + f: fn(Parser) -> T) -> spanned<~[T]> { let lo = self.span.lo; self.expect(token::LT); let result = self.parse_seq_to_before_gt::(sep, f); @@ -253,16 +253,16 @@ impl parser: parser_common { return spanned(lo, hi, result); } - fn parse_seq_to_end(ket: token::token, sep: seq_sep, - f: fn(parser) -> T) -> ~[T] { + fn parse_seq_to_end(ket: token::Token, sep: seq_sep, + f: fn(Parser) -> T) -> ~[T] { let val = self.parse_seq_to_before_end(ket, sep, f); self.bump(); return val; } - fn parse_seq_to_before_end(ket: token::token, sep: seq_sep, - f: fn(parser) -> T) -> ~[T] { + fn parse_seq_to_before_end(ket: token::Token, sep: seq_sep, + f: fn(Parser) -> T) -> ~[T] { let mut first: bool = true; let mut v: ~[T] = ~[]; while self.token != ket { @@ -279,10 +279,10 @@ impl parser: parser_common { return v; } - fn parse_unspanned_seq(bra: token::token, - ket: token::token, + fn parse_unspanned_seq(bra: token::Token, + ket: token::Token, sep: seq_sep, - f: fn(parser) -> T) -> ~[T] { + f: fn(Parser) -> T) -> ~[T] { self.expect(bra); let result = self.parse_seq_to_before_end::(ket, sep, f); self.bump(); @@ -291,8 +291,8 @@ impl parser: parser_common { // NB: Do not use this function unless you actually plan to place the // spanned list in the AST. - fn parse_seq(bra: token::token, ket: token::token, sep: seq_sep, - f: fn(parser) -> T) -> spanned<~[T]> { + fn parse_seq(bra: token::Token, ket: token::Token, sep: seq_sep, + f: fn(Parser) -> T) -> spanned<~[T]> { let lo = self.span.lo; self.expect(bra); let result = self.parse_seq_to_before_end::(ket, sep, f); diff --git a/src/libsyntax/parse/eval.rs b/src/libsyntax/parse/eval.rs index c91060284910f..56c9d4de9f3cd 100644 --- a/src/libsyntax/parse/eval.rs +++ b/src/libsyntax/parse/eval.rs @@ -1,4 +1,4 @@ -use parser::{parser, SOURCE_FILE}; +use parser::{Parser, SOURCE_FILE}; use attr::parser_attr; export eval_crate_directives_to_mod; diff --git a/src/libsyntax/parse/lexer.rs b/src/libsyntax/parse/lexer.rs index 06fcc1cf9589f..8f57d733eb51f 100644 --- a/src/libsyntax/parse/lexer.rs +++ b/src/libsyntax/parse/lexer.rs @@ -10,11 +10,11 @@ export string_reader_as_reader, tt_reader_as_reader; trait reader { fn is_eof() -> bool; - fn next_token() -> {tok: token::token, sp: span}; + fn next_token() -> {tok: token::Token, sp: span}; fn fatal(~str) -> !; fn span_diag() -> span_handler; pure fn interner() -> @token::ident_interner; - fn peek() -> {tok: token::token, sp: span}; + fn peek() -> {tok: token::Token, sp: span}; fn dup() -> reader; } @@ -28,7 +28,7 @@ type string_reader = @{ filemap: codemap::filemap, interner: @token::ident_interner, /* cached: */ - mut peek_tok: token::token, + mut peek_tok: token::Token, mut peek_span: span }; @@ -69,7 +69,7 @@ fn dup_string_reader(&&r: string_reader) -> string_reader { impl string_reader: reader { fn is_eof() -> bool { is_eof(self) } - fn next_token() -> {tok: token::token, sp: span} { + fn next_token() -> {tok: token::Token, sp: span} { let ret_val = {tok: self.peek_tok, sp: self.peek_span}; string_advance_token(self); return ret_val; @@ -79,7 +79,7 @@ impl string_reader: reader { } fn span_diag() -> span_handler { self.span_diagnostic } pure fn interner() -> @token::ident_interner { self.interner } - fn peek() -> {tok: token::token, sp: span} { + fn peek() -> {tok: token::Token, sp: span} { {tok: self.peek_tok, sp: self.peek_span} } fn dup() -> reader { dup_string_reader(self) as reader } @@ -87,7 +87,7 @@ impl string_reader: reader { impl tt_reader: reader { fn is_eof() -> bool { self.cur_tok == token::EOF } - fn next_token() -> {tok: token::token, sp: span} { + fn next_token() -> {tok: token::Token, sp: span} { /* weird resolve bug: if the following `if`, or any of its statements are removed, we get resolution errors */ if false { @@ -101,7 +101,7 @@ impl tt_reader: reader { } fn span_diag() -> span_handler { self.sp_diag } pure fn interner() -> @token::ident_interner { self.interner } - fn peek() -> {tok: token::token, sp: span} { + fn peek() -> {tok: token::Token, sp: span} { { tok: self.cur_tok, sp: self.cur_span } } fn dup() -> reader { dup_tt_reader(self) as reader } @@ -196,14 +196,14 @@ fn is_bin_digit(c: char) -> bool { return c == '0' || c == '1'; } // might return a sugared-doc-attr fn consume_whitespace_and_comments(rdr: string_reader) - -> Option<{tok: token::token, sp: span}> { + -> Option<{tok: token::Token, sp: span}> { while is_whitespace(rdr.curr) { bump(rdr); } return consume_any_line_comment(rdr); } // might return a sugared-doc-attr fn consume_any_line_comment(rdr: string_reader) - -> Option<{tok: token::token, sp: span}> { + -> Option<{tok: token::Token, sp: span}> { if rdr.curr == '/' { match nextch(rdr) { '/' => { @@ -246,7 +246,7 @@ fn consume_any_line_comment(rdr: string_reader) // might return a sugared-doc-attr fn consume_block_comment(rdr: string_reader) - -> Option<{tok: token::token, sp: span}> { + -> Option<{tok: token::Token, sp: span}> { // block comments starting with "/**" or "/*!" are doc-comments if rdr.curr == '*' || rdr.curr == '!' { @@ -317,7 +317,7 @@ fn scan_digits(rdr: string_reader, radix: uint) -> ~str { }; } -fn scan_number(c: char, rdr: string_reader) -> token::token { +fn scan_number(c: char, rdr: string_reader) -> token::Token { let mut num_str, base = 10u, c = c, n = nextch(rdr); if c == '0' && n == 'x' { bump(rdr); @@ -435,7 +435,7 @@ fn scan_numeric_escape(rdr: string_reader, n_hex_digits: uint) -> char { return accum_int as char; } -fn next_token_inner(rdr: string_reader) -> token::token { +fn next_token_inner(rdr: string_reader) -> token::Token { let mut accum_str = ~""; let mut c = rdr.curr; if (c >= 'a' && c <= 'z') @@ -460,7 +460,7 @@ fn next_token_inner(rdr: string_reader) -> token::token { if is_dec_digit(c) { return scan_number(c, rdr); } - fn binop(rdr: string_reader, op: token::binop) -> token::token { + fn binop(rdr: string_reader, op: token::binop) -> token::Token { bump(rdr); if rdr.curr == '=' { bump(rdr); diff --git a/src/libsyntax/parse/obsolete.rs b/src/libsyntax/parse/obsolete.rs index 828d498ca3c23..1f607d849d965 100644 --- a/src/libsyntax/parse/obsolete.rs +++ b/src/libsyntax/parse/obsolete.rs @@ -10,7 +10,7 @@ removed. use codemap::span; use ast::{expr, expr_lit, lit_nil}; use ast_util::{respan}; -use token::token; +use token::Token; /// The specific types of unsupported syntax pub enum ObsoleteSyntax { @@ -23,7 +23,8 @@ pub enum ObsoleteSyntax { ObsoleteClassTraits, ObsoletePrivSection, ObsoleteModeInFnType, - ObsoleteByMutRefMode + ObsoleteByMutRefMode, + ObsoleteFixedLengthVec, } impl ObsoleteSyntax : cmp::Eq { @@ -47,7 +48,7 @@ pub trait ObsoleteReporter { fn obsolete_expr(sp: span, kind: ObsoleteSyntax) -> @expr; } -impl parser : ObsoleteReporter { +impl Parser : ObsoleteReporter { /// Reports an obsolete syntax non-fatal error. fn obsolete(sp: span, kind: ObsoleteSyntax) { let (kind_str, desc) = match kind { @@ -99,6 +100,11 @@ impl parser : ObsoleteReporter { "by-mutable-reference mode", "Declare an argument of type &mut T instead" ), + ObsoleteFixedLengthVec => ( + "fixed-length vector", + "Fixed-length types are now written `[T * N]`, and instances \ + are type-inferred" + ) }; self.report(sp, kind, kind_str, desc); @@ -121,7 +127,7 @@ impl parser : ObsoleteReporter { } } - fn token_is_obsolete_ident(ident: &str, token: token) -> bool { + fn token_is_obsolete_ident(ident: &str, token: Token) -> bool { match token { token::IDENT(copy sid, _) => { str::eq_slice(*self.id_to_str(sid), ident) @@ -183,5 +189,66 @@ impl parser : ObsoleteReporter { false } } + + fn try_parse_obsolete_fixed_vstore() -> Option> { + if self.token == token::BINOP(token::SLASH) { + self.bump(); + match copy self.token { + token::UNDERSCORE => { + self.obsolete(copy self.last_span, + ObsoleteFixedLengthVec); + self.bump(); Some(None) + } + token::LIT_INT_UNSUFFIXED(i) if i >= 0i64 => { + self.obsolete(copy self.last_span, + ObsoleteFixedLengthVec); + self.bump(); Some(Some(i as uint)) + } + _ => None + } + } else { + None + } + } + + fn try_convert_ty_to_obsolete_fixed_length_vstore(sp: span, t: ast::ty_) + -> ast::ty_ { + match self.try_parse_obsolete_fixed_vstore() { + // Consider a fixed length vstore suffix (/N or /_) + None => t, + Some(v) => { + ast::ty_fixed_length( + @{id: self.get_id(), node: t, span: sp}, v) + } + } + } + + fn try_convert_expr_to_obsolete_fixed_length_vstore( + lo: uint, hi: uint, ex: ast::expr_ + ) -> (uint, ast::expr_) { + + let mut hi = hi; + let mut ex = ex; + + // Vstore is legal following expr_lit(lit_str(...)) and expr_vec(...) + // only. + match ex { + ast::expr_lit(@{node: ast::lit_str(_), span: _}) | + ast::expr_vec(_, _) => { + match self.try_parse_obsolete_fixed_vstore() { + None => (), + Some(v) => { + hi = self.span.hi; + ex = ast::expr_vstore(self.mk_expr(lo, hi, ex), + ast::expr_vstore_fixed(v)); + } + } + } + _ => () + } + + return (hi, ex); + } + } diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 973822ddff9b9..1ee683bdd0887 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -6,7 +6,7 @@ use std::map::HashMap; use token::{can_begin_expr, is_ident, is_ident_or_path, is_plain_ident, INTERPOLATED, special_idents}; use codemap::{span,fss_none}; -use util::interner::interner; +use util::interner::Interner; use ast_util::{spanned, respan, mk_sp, ident_to_path, operator_prec}; use lexer::reader; use prec::{as_prec, token_to_binop}; @@ -22,7 +22,7 @@ use obsolete::{ ObsoleteWith, ObsoleteClassMethod, ObsoleteClassTraits, ObsoleteModeInFnType, ObsoleteByMutRefMode }; -use ast::{_mod, add, alt_check, alt_exhaustive, arg, arm, attribute, +use ast::{_mod, add, arg, arm, attribute, bind_by_ref, bind_by_implicit_ref, bind_by_value, bind_by_move, bitand, bitor, bitxor, blk, blk_check_mode, bound_const, bound_copy, bound_send, bound_trait, bound_owned, box, by_copy, @@ -58,7 +58,7 @@ use ast::{_mod, add, alt_check, alt_exhaustive, arg, arm, attribute, stmt_semi, struct_def, struct_field, struct_variant_kind, subtract, sty_box, sty_by_ref, sty_region, sty_static, sty_uniq, sty_value, token_tree, trait_method, trait_ref, tt_delim, tt_seq, - tt_tok, tt_nonterminal, tuple_variant_kind, ty, ty_, ty_bot, + tt_tok, tt_nonterminal, tuple_variant_kind, Ty, ty_, ty_bot, ty_box, ty_field, ty_fn, ty_infer, ty_mac, ty_method, ty_nil, ty_param, ty_param_bound, ty_path, ty_ptr, ty_rec, ty_rptr, ty_tup, ty_u32, ty_uniq, ty_vec, ty_fixed_length, type_value_ns, @@ -71,7 +71,7 @@ use ast::{_mod, add, alt_check, alt_exhaustive, arg, arm, attribute, expr_vstore_uniq}; export file_type; -export parser; +export Parser; export CRATE_FILE; export SOURCE_FILE; @@ -124,12 +124,13 @@ type item_info = (ident, item_, Option<~[attribute]>); enum item_or_view_item { iovi_none, iovi_item(@item), + iovi_foreign_item(@foreign_item), iovi_view_item(@view_item) } enum view_item_parse_mode { VIEW_ITEMS_AND_ITEMS_ALLOWED, - VIEW_ITEMS_ALLOWED, + VIEW_ITEMS_AND_FOREIGN_ITEMS_ALLOWED, IMPORTS_AND_ITEMS_ALLOWED } @@ -190,14 +191,14 @@ pure fn maybe_append(+lhs: ~[attribute], rhs: Option<~[attribute]>) /* ident is handled by common.rs */ -fn parser(sess: parse_sess, cfg: ast::crate_cfg, - +rdr: reader, ftype: file_type) -> parser { +fn Parser(sess: parse_sess, cfg: ast::crate_cfg, + +rdr: reader, ftype: file_type) -> Parser { let tok0 = rdr.next_token(); let span0 = tok0.sp; let interner = rdr.interner(); - parser { + Parser { reader: move rdr, interner: move interner, sess: sess, @@ -206,12 +207,7 @@ fn parser(sess: parse_sess, cfg: ast::crate_cfg, token: tok0.tok, span: span0, last_span: span0, - buffer: [mut - {tok: tok0.tok, sp: span0}, - {tok: tok0.tok, sp: span0}, - {tok: tok0.tok, sp: span0}, - {tok: tok0.tok, sp: span0} - ]/4, + buffer: [mut {tok: tok0.tok, sp: span0}, ..4], buffer_start: 0, buffer_end: 0, restriction: UNRESTRICTED, @@ -223,14 +219,14 @@ fn parser(sess: parse_sess, cfg: ast::crate_cfg, } } -struct parser { +struct Parser { sess: parse_sess, cfg: crate_cfg, file_type: file_type, - mut token: token::token, + mut token: token::Token, mut span: span, mut last_span: span, - mut buffer: [mut {tok: token::token, sp: span}]/4, + mut buffer: [mut {tok: token::Token, sp: span} * 4], mut buffer_start: int, mut buffer_end: int, mut restriction: restriction, @@ -247,7 +243,7 @@ struct parser { drop {} /* do not copy the parser; its state is tied to outside state */ } -impl parser { +impl Parser { fn bump() { self.last_span = self.span; let next = if self.buffer_start == self.buffer_end { @@ -260,7 +256,7 @@ impl parser { self.token = next.tok; self.span = next.sp; } - fn swap(next: token::token, lo: uint, hi: uint) { + fn swap(next: token::Token, lo: uint, hi: uint) { self.token = next; self.span = mk_sp(lo, hi); } @@ -270,7 +266,7 @@ impl parser { } return (4 - self.buffer_start) + self.buffer_end; } - fn look_ahead(distance: uint) -> token::token { + fn look_ahead(distance: uint) -> token::Token { let dist = distance as int; while self.buffer_length() < dist { self.buffer[self.buffer_end] = self.reader.next_token(); @@ -411,7 +407,7 @@ impl parser { }); } - fn parse_ret_ty() -> (ret_style, @ty) { + fn parse_ret_ty() -> (ret_style, @Ty) { return if self.eat(token::RARROW) { let lo = self.span.lo; if self.eat(token::NOT) { @@ -472,7 +468,7 @@ impl parser { self.region_from_name(name) } - fn parse_ty(colons_before_params: bool) -> @ty { + fn parse_ty(colons_before_params: bool) -> @Ty { maybe_whole!(self, nt_ty); let lo = self.span.lo; @@ -557,14 +553,13 @@ impl parser { } else { self.fatal(~"expected type"); }; let sp = mk_sp(lo, self.last_span.hi); - return @{id: self.get_id(), - node: match self.maybe_parse_fixed_vstore() { - // Consider a fixed vstore suffix (/N or /_) - None => t, - Some(v) => { - ty_fixed_length(@{id: self.get_id(), node:t, span: sp}, v) - } }, + return { + let node = + self.try_convert_ty_to_obsolete_fixed_length_vstore(sp, t); + @{id: self.get_id(), + node: node, span: sp} + }; } fn parse_arg_mode() -> mode { @@ -609,10 +604,10 @@ impl parser { } } - fn parse_capture_item_or(parse_arg_fn: fn(parser) -> arg_or_capture_item) + fn parse_capture_item_or(parse_arg_fn: fn(Parser) -> arg_or_capture_item) -> arg_or_capture_item { - fn parse_capture_item(p:parser, is_move: bool) -> capture_item { + fn parse_capture_item(p:Parser, is_move: bool) -> capture_item { let sp = mk_sp(p.span.lo, p.span.hi); let ident = p.parse_ident(); @{id: p.get_id(), is_move: is_move, name: ident, span: sp} @@ -695,23 +690,6 @@ impl parser { } } - fn maybe_parse_fixed_vstore() -> Option> { - if self.token == token::BINOP(token::SLASH) { - self.bump(); - match copy self.token { - token::UNDERSCORE => { - self.bump(); Some(None) - } - token::LIT_INT_UNSUFFIXED(i) if i >= 0i64 => { - self.bump(); Some(Some(i as uint)) - } - _ => None - } - } else { - None - } - } - fn maybe_parse_fixed_vstore_with_star() -> Option> { if self.eat(token::BINOP(token::STAR)) { match copy self.token { @@ -728,7 +706,7 @@ impl parser { } } - fn lit_from_token(tok: token::token) -> lit_ { + fn lit_from_token(tok: token::Token) -> lit_ { match tok { token::LIT_INT(i, it) => lit_int(i, it), token::LIT_UINT(u, ut) => lit_uint(u, ut), @@ -760,8 +738,8 @@ impl parser { } fn parse_path_without_tps_( - parse_ident: fn(parser) -> ident, - parse_last_ident: fn(parser) -> ident) -> @path { + parse_ident: fn(Parser) -> ident, + parse_last_ident: fn(Parser) -> ident) -> @path { maybe_whole!(self, nt_path); let lo = self.span.lo; @@ -842,7 +820,7 @@ impl parser { } } - fn parse_field(sep: token::token) -> field { + fn parse_field(sep: token::Token) -> field { let lo = self.span.lo; let m = self.parse_mutability(); let i = self.parse_ident(); @@ -1087,20 +1065,8 @@ impl parser { ex = expr_lit(@lit); } - // Vstore is legal following expr_lit(lit_str(...)) and expr_vec(...) - // only. - match ex { - expr_lit(@{node: lit_str(_), span: _}) | - expr_vec(_, _) => match self.maybe_parse_fixed_vstore() { - None => (), - Some(v) => { - hi = self.span.hi; - ex = expr_vstore(self.mk_expr(lo, hi, ex), - expr_vstore_fixed(v)); - } - }, - _ => () - } + let (hi, ex) = + self.try_convert_expr_to_obsolete_fixed_length_vstore(lo, hi, ex); return self.mk_pexpr(lo, hi, ex); } @@ -1220,7 +1186,7 @@ impl parser { return e; } - fn parse_sep_and_zerok() -> (Option, bool) { + fn parse_sep_and_zerok() -> (Option, bool) { if self.token == token::BINOP(token::STAR) || self.token == token::BINOP(token::PLUS) { let zerok = self.token == token::BINOP(token::STAR); @@ -1243,7 +1209,7 @@ impl parser { fn parse_token_tree() -> token_tree { maybe_whole!(deref self, nt_tt); - fn parse_tt_tok(p: parser, delim_ok: bool) -> token_tree { + fn parse_tt_tok(p: Parser, delim_ok: bool) -> token_tree { match p.token { token::RPAREN | token::RBRACE | token::RBRACKET if !delim_ok => { @@ -1310,8 +1276,8 @@ impl parser { // This goofy function is necessary to correctly match parens in matchers. // Otherwise, `$( ( )` would be a valid matcher, and `$( () )` would be // invalid. It's similar to common::parse_seq. - fn parse_matcher_subseq(name_idx: @mut uint, bra: token::token, - ket: token::token) -> ~[matcher] { + fn parse_matcher_subseq(name_idx: @mut uint, bra: token::Token, + ket: token::Token) -> ~[matcher] { let mut ret_val = ~[]; let mut lparens = 0u; @@ -2158,7 +2124,7 @@ impl parser { fn parse_stmt(+first_item_attrs: ~[attribute]) -> @stmt { maybe_whole!(self, nt_stmt); - fn check_expected_item(p: parser, current_attrs: ~[attribute]) { + fn check_expected_item(p: Parser, current_attrs: ~[attribute]) { // If we have attributes then we should have an item if vec::is_not_empty(current_attrs) { p.fatal(~"expected item"); @@ -2184,7 +2150,7 @@ impl parser { let item_attrs = vec::append(first_item_attrs, item_attrs); - match self.parse_item_or_view_item(item_attrs, true) { + match self.parse_item_or_view_item(item_attrs, true, false) { iovi_item(i) => { let mut hi = i.span.hi; let decl = @spanned(lo, hi, decl_item(i)); @@ -2194,6 +2160,9 @@ impl parser { self.span_fatal(vi.span, ~"view items must be declared at \ the top of the block"); } + iovi_foreign_item(_) => { + self.fatal(~"foreign items are not allowed here"); + } iovi_none() => { /* fallthrough */ } } @@ -2221,7 +2190,7 @@ impl parser { maybe_whole!(pair_empty self, nt_block); - fn maybe_parse_inner_attrs_and_next(p: parser, parse_attrs: bool) -> + fn maybe_parse_inner_attrs_and_next(p: Parser, parse_attrs: bool) -> {inner: ~[attribute], next: ~[attribute]} { if parse_attrs { p.parse_inner_attrs_and_next() @@ -2259,7 +2228,7 @@ impl parser { let mut stmts = ~[]; let mut expr = None; - let {attrs_remaining, view_items, items: items} = + let {attrs_remaining, view_items, items: items, _} = self.parse_items_and_view_items(first_item_attrs, IMPORTS_AND_ITEMS_ALLOWED); @@ -2386,7 +2355,7 @@ impl parser { } else { ~[] } } - fn parse_fn_decl(parse_arg_fn: fn(parser) -> arg_or_capture_item) + fn parse_fn_decl(parse_arg_fn: fn(Parser) -> arg_or_capture_item) -> (fn_decl, capture_clause) { let args_or_capture_items: ~[arg_or_capture_item] = @@ -2420,11 +2389,11 @@ impl parser { } fn parse_fn_decl_with_self(parse_arg_fn: - fn(parser) -> arg_or_capture_item) + fn(Parser) -> arg_or_capture_item) -> (self_ty, fn_decl, capture_clause) { fn maybe_parse_self_ty(cnstr: fn(+v: mutability) -> ast::self_ty_, - p: parser) -> ast::self_ty_ { + p: Parser) -> ast::self_ty_ { // We need to make sure it isn't a mode or a type if p.token_is_keyword(~"self", p.look_ahead(1)) || ((p.token_is_keyword(~"const", p.look_ahead(1)) || @@ -2604,7 +2573,7 @@ impl parser { // Parses four variants (with the region/type params always optional): // impl ~[T] : to_str { ... } fn parse_item_impl() -> item_info { - fn wrap_path(p: parser, pt: @path) -> @ty { + fn wrap_path(p: Parser, pt: @path) -> @Ty { @{id: p.get_id(), node: ty_path(pt, p.get_id()), span: pt.span} } @@ -2664,7 +2633,7 @@ impl parser { ref_id: self.get_id(), impl_id: self.get_id()} } - fn parse_trait_ref_list(ket: token::token) -> ~[@trait_ref] { + fn parse_trait_ref_list(ket: token::Token) -> ~[@trait_ref] { self.parse_seq_to_before_end( ket, seq_sep_trailing_disallowed(token::COMMA), |p| p.parse_trait_ref()) @@ -2756,7 +2725,7 @@ impl parser { None) } - fn token_is_pound_or_doc_comment(++tok: token::token) -> bool { + fn token_is_pound_or_doc_comment(++tok: token::Token) -> bool { match tok { token::POUND | token::DOC_COMMENT(_) => true, _ => false @@ -2841,10 +2810,10 @@ impl parser { self.eat_keyword(~"static") } - fn parse_mod_items(term: token::token, + fn parse_mod_items(term: token::Token, +first_item_attrs: ~[attribute]) -> _mod { // Shouldn't be any view items since we've already parsed an item attr - let {attrs_remaining, view_items, items: starting_items} = + let {attrs_remaining, view_items, items: starting_items, _} = self.parse_items_and_view_items(first_item_attrs, VIEW_ITEMS_AND_ITEMS_ALLOWED); let mut items: ~[@item] = move starting_items; @@ -2858,7 +2827,7 @@ impl parser { } debug!("parse_mod_items: parse_item_or_view_item(attrs=%?)", attrs); - match self.parse_item_or_view_item(attrs, true) { + match self.parse_item_or_view_item(attrs, true, false) { iovi_item(item) => items.push(item), iovi_view_item(view_item) => { self.span_fatal(view_item.span, ~"view items must be \ @@ -2958,11 +2927,11 @@ impl parser { +first_item_attrs: ~[attribute]) -> foreign_mod { // Shouldn't be any view items since we've already parsed an item attr - let {attrs_remaining, view_items, items: _} = + let {attrs_remaining, view_items, items: _, foreign_items} = self.parse_items_and_view_items(first_item_attrs, - VIEW_ITEMS_ALLOWED); + VIEW_ITEMS_AND_FOREIGN_ITEMS_ALLOWED); - let mut items: ~[@foreign_item] = ~[]; + let mut items: ~[@foreign_item] = move foreign_items; let mut initial_attrs = attrs_remaining; while self.token != token::RBRACE { let attrs = vec::append(initial_attrs, @@ -2971,7 +2940,7 @@ impl parser { items.push(self.parse_foreign_item(attrs)); } return {sort: sort, view_items: view_items, - items: items}; + items: items}; } fn parse_item_foreign_mod(lo: uint, @@ -3222,15 +3191,18 @@ impl parser { } } - fn fn_expr_lookahead(tok: token::token) -> bool { + fn fn_expr_lookahead(tok: token::Token) -> bool { match tok { token::LPAREN | token::AT | token::TILDE | token::BINOP(_) => true, _ => false } } - fn parse_item_or_view_item(+attrs: ~[attribute], items_allowed: bool) + fn parse_item_or_view_item(+attrs: ~[attribute], items_allowed: bool, + foreign_items_allowed: bool) -> item_or_view_item { + assert items_allowed != foreign_items_allowed; + maybe_whole!(iovi self,nt_item); let lo = self.span.lo; @@ -3248,6 +3220,9 @@ impl parser { return iovi_item(self.mk_item(lo, self.last_span.hi, ident, item_, visibility, maybe_append(attrs, extra_attrs))); + } else if foreign_items_allowed && self.is_keyword(~"const") { + let item = self.parse_item_foreign_const(visibility, attrs); + return iovi_foreign_item(item); } else if items_allowed && self.is_keyword(~"fn") && !self.fn_expr_lookahead(self.look_ahead(1u)) { @@ -3262,6 +3237,10 @@ impl parser { return iovi_item(self.mk_item(lo, self.last_span.hi, ident, item_, visibility, maybe_append(attrs, extra_attrs))); + } else if foreign_items_allowed && + (self.is_keyword(~"fn") || self.is_keyword(~"pure")) { + let item = self.parse_item_foreign_fn(visibility, attrs); + return iovi_foreign_item(item); } else if items_allowed && self.is_keyword(~"unsafe") && self.look_ahead(1u) != token::LBRACE { self.bump(); @@ -3348,16 +3327,24 @@ impl parser { return iovi_item(self.mk_item(lo, self.last_span.hi, id, item_, visibility, attrs)); } else { + if visibility != inherited { + let mut s = ~"unmatched visibility `"; + s += if visibility == public { ~"pub" } else { ~"priv" }; + s += ~"`"; + self.span_fatal(copy self.last_span, s); + } return iovi_none; }; } fn parse_item(+attrs: ~[attribute]) -> Option<@ast::item> { - match self.parse_item_or_view_item(attrs, true) { + match self.parse_item_or_view_item(attrs, true, false) { iovi_none => None, iovi_view_item(_) => self.fatal(~"view items are not allowed here"), + iovi_foreign_item(_) => + self.fatal(~"foreign items are not allowed here"), iovi_item(item) => Some(item) } @@ -3492,28 +3479,35 @@ impl parser { mode: view_item_parse_mode) -> {attrs_remaining: ~[attribute], view_items: ~[@view_item], - items: ~[@item]} { + items: ~[@item], + foreign_items: ~[@foreign_item]} { let mut attrs = vec::append(first_item_attrs, self.parse_outer_attributes()); - let items_allowed; - match mode { - VIEW_ITEMS_AND_ITEMS_ALLOWED | IMPORTS_AND_ITEMS_ALLOWED => - items_allowed = true, - VIEW_ITEMS_ALLOWED => - items_allowed = false - } + let items_allowed = match mode { + VIEW_ITEMS_AND_ITEMS_ALLOWED | IMPORTS_AND_ITEMS_ALLOWED => true, + VIEW_ITEMS_AND_FOREIGN_ITEMS_ALLOWED => false + }; + + let restricted_to_imports = match mode { + IMPORTS_AND_ITEMS_ALLOWED => true, + VIEW_ITEMS_AND_ITEMS_ALLOWED | + VIEW_ITEMS_AND_FOREIGN_ITEMS_ALLOWED => false + }; + + let foreign_items_allowed = match mode { + VIEW_ITEMS_AND_FOREIGN_ITEMS_ALLOWED => true, + VIEW_ITEMS_AND_ITEMS_ALLOWED | IMPORTS_AND_ITEMS_ALLOWED => false + }; - let (view_items, items) = (DVec(), DVec()); + let (view_items, items, foreign_items) = (DVec(), DVec(), DVec()); loop { - match self.parse_item_or_view_item(attrs, items_allowed) { + match self.parse_item_or_view_item(attrs, items_allowed, + foreign_items_allowed) { iovi_none => break, iovi_view_item(view_item) => { - match mode { - VIEW_ITEMS_AND_ITEMS_ALLOWED | - VIEW_ITEMS_ALLOWED => {} - IMPORTS_AND_ITEMS_ALLOWED => + if restricted_to_imports { match view_item.node { view_item_import(_) => {} view_item_export(_) | view_item_use(*) => @@ -3528,13 +3522,18 @@ impl parser { assert items_allowed; items.push(item) } + iovi_foreign_item(foreign_item) => { + assert foreign_items_allowed; + foreign_items.push(foreign_item); + } } attrs = self.parse_outer_attributes(); } {attrs_remaining: attrs, view_items: dvec::unwrap(move view_items), - items: dvec::unwrap(move items)} + items: dvec::unwrap(move items), + foreign_items: dvec::unwrap(move foreign_items)} } // Parses a source module as a crate @@ -3608,7 +3607,7 @@ impl parser { return self.fatal(~"expected crate directive"); } - fn parse_crate_directives(term: token::token, + fn parse_crate_directives(term: token::Token, first_outer_attr: ~[attribute]) -> ~[@crate_directive] { diff --git a/src/libsyntax/parse/prec.rs b/src/libsyntax/parse/prec.rs index 668301db6201e..3fd905cb8ecec 100644 --- a/src/libsyntax/parse/prec.rs +++ b/src/libsyntax/parse/prec.rs @@ -3,7 +3,7 @@ export unop_prec; export token_to_binop; use token::*; -use token::token; +use token::Token; use ast::*; /// Unary operators have higher precedence than binary @@ -19,7 +19,7 @@ const as_prec: uint = 11u; * Maps a token to a record specifying the corresponding binary * operator and its precedence */ -fn token_to_binop(tok: token) -> Option { +fn token_to_binop(tok: Token) -> Option { match tok { BINOP(STAR) => Some(mul), BINOP(SLASH) => Some(div), diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs index a7d439b8ce6d0..5151fd1bac837 100644 --- a/src/libsyntax/parse/token.rs +++ b/src/libsyntax/parse/token.rs @@ -1,5 +1,5 @@ use util::interner; -use util::interner::interner; +use util::interner::Interner; use std::map::HashMap; #[auto_serialize] @@ -19,7 +19,7 @@ enum binop { #[auto_serialize] #[auto_deserialize] -enum token { +enum Token { /* Expression-operator symbols. */ EQ, LT, @@ -84,7 +84,7 @@ enum nonterminal { nt_stmt(@ast::stmt), nt_pat( @ast::pat), nt_expr(@ast::expr), - nt_ty( @ast::ty), + nt_ty( @ast::Ty), nt_ident(ast::ident, bool), nt_path(@ast::path), nt_tt( @ast::token_tree), //needs @ed to break a circularity @@ -106,7 +106,7 @@ fn binop_to_str(o: binop) -> ~str { } } -fn to_str(in: @ident_interner, t: token) -> ~str { +fn to_str(in: @ident_interner, t: Token) -> ~str { match t { EQ => ~"=", LT => ~"<", @@ -192,7 +192,7 @@ fn to_str(in: @ident_interner, t: token) -> ~str { } } -pure fn can_begin_expr(t: token) -> bool { +pure fn can_begin_expr(t: Token) -> bool { match t { LPAREN => true, LBRACE => true, @@ -223,7 +223,7 @@ pure fn can_begin_expr(t: token) -> bool { } /// what's the opposite delimiter? -fn flip_delimiter(t: token::token) -> token::token { +fn flip_delimiter(t: token::Token) -> token::Token { match t { token::LPAREN => token::RPAREN, token::LBRACE => token::RBRACE, @@ -237,7 +237,7 @@ fn flip_delimiter(t: token::token) -> token::token { -fn is_lit(t: token) -> bool { +fn is_lit(t: Token) -> bool { match t { LIT_INT(_, _) => true, LIT_UINT(_, _) => true, @@ -248,22 +248,22 @@ fn is_lit(t: token) -> bool { } } -pure fn is_ident(t: token) -> bool { +pure fn is_ident(t: Token) -> bool { match t { IDENT(_, _) => true, _ => false } } -pure fn is_ident_or_path(t: token) -> bool { +pure fn is_ident_or_path(t: Token) -> bool { match t { IDENT(_, _) | INTERPOLATED(nt_path(*)) => true, _ => false } } -pure fn is_plain_ident(t: token) -> bool { +pure fn is_plain_ident(t: Token) -> bool { match t { IDENT(_, false) => true, _ => false } } -pure fn is_bar(t: token) -> bool { +pure fn is_bar(t: Token) -> bool { match t { BINOP(OR) | OROR => true, _ => false } } @@ -314,7 +314,7 @@ mod special_idents { } struct ident_interner { - priv interner: util::interner::interner<@~str>, + priv interner: util::interner::Interner<@~str>, } impl ident_interner { @@ -457,8 +457,8 @@ impl binop : cmp::Eq { pure fn ne(other: &binop) -> bool { !self.eq(other) } } -impl token : cmp::Eq { - pure fn eq(other: &token) -> bool { +impl Token : cmp::Eq { + pure fn eq(other: &Token) -> bool { match self { EQ => { match (*other) { @@ -720,7 +720,7 @@ impl token : cmp::Eq { } } } - pure fn ne(other: &token) -> bool { !self.eq(other) } + pure fn ne(other: &Token) -> bool { !self.eq(other) } } // Local Variables: diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs index b98014f421bb3..807a78b312264 100644 --- a/src/libsyntax/print/pprust.rs +++ b/src/libsyntax/print/pprust.rs @@ -1,5 +1,5 @@ use parse::{comments, lexer, token}; -use codemap::codemap; +use codemap::CodeMap; use pp::{break_offset, word, printer, space, zerobreak, hardbreak, breaks}; use pp::{consistent, inconsistent, eof}; use ast::{required, provided}; @@ -24,7 +24,7 @@ fn no_ann() -> pp_ann { type ps = @{s: pp::printer, - cm: Option, + cm: Option, intr: @token::ident_interner, comments: Option<~[comments::cmnt]>, literals: Option<~[comments::lit]>, @@ -45,7 +45,7 @@ fn end(s: ps) { fn rust_printer(writer: io::Writer, intr: @ident_interner) -> ps { return @{s: pp::mk_printer(writer, default_columns), - cm: None::, + cm: None::, intr: intr, comments: None::<~[comments::cmnt]>, literals: None::<~[comments::lit]>, @@ -63,7 +63,7 @@ const default_columns: uint = 78u; // Requires you to pass an input filename and reader so that // it can scan the input text for comments and literals to // copy forward. -fn print_crate(cm: codemap, intr: @ident_interner, +fn print_crate(cm: CodeMap, intr: @ident_interner, span_diagnostic: diagnostic::span_handler, crate: @ast::crate, filename: ~str, in: io::Reader, out: io::Writer, ann: pp_ann, is_expanded: bool) { @@ -91,7 +91,7 @@ fn print_crate_(s: ps, &&crate: @ast::crate) { eof(s.s); } -fn ty_to_str(ty: @ast::ty, intr: @ident_interner) -> ~str { +fn ty_to_str(ty: @ast::Ty, intr: @ident_interner) -> ~str { to_str(ty, print_type, intr) } @@ -348,11 +348,11 @@ fn print_region(s: ps, region: @ast::region, sep: ~str) { word(s.s, sep); } -fn print_type(s: ps, &&ty: @ast::ty) { +fn print_type(s: ps, &&ty: @ast::Ty) { print_type_ex(s, ty, false); } -fn print_type_ex(s: ps, &&ty: @ast::ty, print_colons: bool) { +fn print_type_ex(s: ps, &&ty: @ast::Ty, print_colons: bool) { maybe_print_comment(s, ty.span.lo); ibox(s, 0u); match ty.node { @@ -399,9 +399,21 @@ fn print_type_ex(s: ps, &&ty: @ast::ty, print_colons: bool) { } ast::ty_path(path, _) => print_path(s, path, print_colons), ast::ty_fixed_length(t, v) => { - print_type(s, t); - word(s.s, ~"/"); + word(s.s, ~"["); + match t.node { + ast::ty_vec(mt) => { + match mt.mutbl { + ast::m_mutbl => word_space(s, ~"mut"), + ast::m_const => word_space(s, ~"const"), + ast::m_imm => () + } + print_type(s, mt.ty); + } + _ => fail ~"ty_fixed_length can only contain ty_vec as type" + } + word(s.s, ~" * "); print_vstore(s, ast::vstore_fixed(v)); + word(s.s, ~"]"); } ast::ty_mac(_) => { fail ~"print_type doesn't know how to print a ty_mac"; @@ -433,6 +445,7 @@ fn print_foreign_item(s: ps, item: @ast::foreign_item) { print_type(s, t); word(s.s, ~";"); end(s); // end the head-ibox + end(s); // end the outer cbox } } } @@ -443,7 +456,6 @@ fn print_item(s: ps, &&item: @ast::item) { print_outer_attributes(s, item.attrs); let ann_node = node_item(s, item); s.ann.pre(ann_node); - print_visibility(s, item.vis); match item.node { ast::item_const(ty, expr) => { head(s, visibility_qualified(item.vis, ~"const")); @@ -479,10 +491,10 @@ fn print_item(s: ps, &&item: @ast::item) { ast::named => { word_nbsp(s, ~"mod"); print_ident(s, item.ident); + nbsp(s); } ast::anonymous => {} } - nbsp(s); bopen(s); print_foreign_mod(s, nmod, item.attrs); bclose(s, item.span); @@ -490,7 +502,7 @@ fn print_item(s: ps, &&item: @ast::item) { ast::item_ty(ty, params) => { ibox(s, indent_unit); ibox(s, 0u); - word_nbsp(s, ~"type"); + word_nbsp(s, visibility_qualified(item.vis, ~"type")); print_ident(s, item.ident); print_type_params(s, params); end(s); // end the inner ibox @@ -502,15 +514,16 @@ fn print_item(s: ps, &&item: @ast::item) { end(s); // end the outer ibox } ast::item_enum(enum_definition, params) => { - print_enum_def(s, enum_definition, params, item.ident, item.span); + print_enum_def(s, enum_definition, params, item.ident, + item.span, item.vis); } ast::item_class(struct_def, tps) => { - head(s, ~"struct"); + head(s, visibility_qualified(item.vis, ~"struct")); print_struct(s, struct_def, tps, item.ident, item.span); } ast::item_impl(tps, opt_trait, ty, methods) => { - head(s, ~"impl"); + head(s, visibility_qualified(item.vis, ~"impl")); if tps.is_not_empty() { print_type_params(s, tps); space(s.s); @@ -533,7 +546,7 @@ fn print_item(s: ps, &&item: @ast::item) { bclose(s, item.span); } ast::item_trait(tps, traits, methods) => { - head(s, ~"trait"); + head(s, visibility_qualified(item.vis, ~"trait")); print_ident(s, item.ident); print_type_params(s, tps); if vec::len(traits) != 0u { @@ -549,6 +562,7 @@ fn print_item(s: ps, &&item: @ast::item) { bclose(s, item.span); } ast::item_mac({node: ast::mac_invoc_tt(pth, tts), _}) => { + print_visibility(s, item.vis); print_path(s, pth, false); word(s.s, ~"! "); print_ident(s, item.ident); @@ -569,7 +583,7 @@ fn print_item(s: ps, &&item: @ast::item) { fn print_enum_def(s: ps, enum_definition: ast::enum_def, params: ~[ast::ty_param], ident: ast::ident, - span: ast::span) { + span: ast::span, visibility: ast::visibility) { let mut newtype = vec::len(enum_definition.variants) == 1u && ident == enum_definition.variants[0].node.name; @@ -581,9 +595,9 @@ fn print_enum_def(s: ps, enum_definition: ast::enum_def, } if newtype { ibox(s, indent_unit); - word_space(s, ~"enum"); + word_space(s, visibility_qualified(visibility, ~"enum")); } else { - head(s, ~"enum"); + head(s, visibility_qualified(visibility, ~"enum")); } print_ident(s, ident); @@ -876,7 +890,7 @@ fn print_possibly_embedded_block_(s: ps, blk: ast::blk, embedded: embed_type, indented: uint, attrs: ~[ast::attribute], close_box: bool) { match blk.node.rules { - ast::unsafe_blk => word(s.s, ~"unsafe"), + ast::unsafe_blk => word_space(s, ~"unsafe"), ast::default_blk => () } maybe_print_comment(s, blk.span.lo); diff --git a/src/libsyntax/util/interner.rs b/src/libsyntax/util/interner.rs index 5d991bb3551c1..f564589cbe0c0 100644 --- a/src/libsyntax/util/interner.rs +++ b/src/libsyntax/util/interner.rs @@ -12,14 +12,14 @@ type hash_interner = {map: HashMap, vect: DVec}; -fn mk() -> interner { +fn mk() -> Interner { let m = map::HashMap::(); let hi: hash_interner = {map: m, vect: DVec()}; - move ((move hi) as interner::) + move ((move hi) as Interner::) } -fn mk_prefill(init: ~[T]) -> interner { +fn mk_prefill(init: ~[T]) -> Interner { let rv = mk(); for init.each() |v| { rv.intern(*v); } return rv; @@ -27,14 +27,14 @@ fn mk_prefill(init: ~[T]) -> interner { /* when traits can extend traits, we should extend index to get [] */ -trait interner { +trait Interner { fn intern(T) -> uint; fn gensym(T) -> uint; pure fn get(uint) -> T; fn len() -> uint; } -impl hash_interner: interner { +impl hash_interner: Interner { fn intern(val: T) -> uint { match self.map.find(val) { Some(idx) => return idx, diff --git a/src/libsyntax/visit.rs b/src/libsyntax/visit.rs index 50fbd21f7b8dc..32fcbdfc7589c 100644 --- a/src/libsyntax/visit.rs +++ b/src/libsyntax/visit.rs @@ -55,7 +55,7 @@ type visitor = visit_decl: fn@(@decl, E, vt), visit_expr: fn@(@expr, E, vt), visit_expr_post: fn@(@expr, E, vt), - visit_ty: fn@(@ty, E, vt), + visit_ty: fn@(@Ty, E, vt), visit_ty_params: fn@(~[ty_param], E, vt), visit_fn: fn@(fn_kind, fn_decl, blk, span, node_id, E, vt), visit_ty_method: fn@(ty_method, E, vt), @@ -182,12 +182,14 @@ fn visit_enum_def(enum_definition: ast::enum_def, tps: ~[ast::ty_param], visit_enum_def(enum_definition, tps, e, v); } } + // Visit the disr expr if it exists + vr.node.disr_expr.iter(|ex| v.visit_expr(*ex, e, v)); } } -fn skip_ty(_t: @ty, _e: E, _v: vt) {} +fn skip_ty(_t: @Ty, _e: E, _v: vt) {} -fn visit_ty(t: @ty, e: E, v: vt) { +fn visit_ty(t: @Ty, e: E, v: vt) { match t.node { ty_box(mt) | ty_uniq(mt) | ty_vec(mt) | ty_ptr(mt) | ty_rptr(_, mt) => { @@ -488,7 +490,7 @@ type simple_visitor = visit_decl: fn@(@decl), visit_expr: fn@(@expr), visit_expr_post: fn@(@expr), - visit_ty: fn@(@ty), + visit_ty: fn@(@Ty), visit_ty_params: fn@(~[ty_param]), visit_fn: fn@(fn_kind, fn_decl, blk, span, node_id), visit_ty_method: fn@(ty_method), @@ -497,7 +499,7 @@ type simple_visitor = visit_struct_field: fn@(@struct_field), visit_struct_method: fn@(@method)}; -fn simple_ignore_ty(_t: @ty) {} +fn simple_ignore_ty(_t: @Ty) {} fn default_simple_visitor() -> simple_visitor { return @{visit_mod: fn@(_m: _mod, _sp: span, _id: node_id) { }, @@ -575,7 +577,7 @@ fn mk_simple_visitor(v: simple_visitor) -> vt<()> { fn v_expr_post(f: fn@(@expr), ex: @expr, &&_e: (), _v: vt<()>) { f(ex); } - fn v_ty(f: fn@(@ty), ty: @ty, &&e: (), v: vt<()>) { + fn v_ty(f: fn@(@Ty), ty: @Ty, &&e: (), v: vt<()>) { f(ty); visit_ty(ty, e, v); } diff --git a/src/rt/rust_uv.cpp b/src/rt/rust_uv.cpp index 706e8ff43807f..c34fd166baec7 100644 --- a/src/rt/rust_uv.cpp +++ b/src/rt/rust_uv.cpp @@ -269,6 +269,20 @@ rust_uv_tcp_bind6 return uv_tcp_bind6(tcp_server, addr); } +extern "C" int +rust_uv_tcp_getpeername +(uv_tcp_t* handle, sockaddr_in* name) { + int namelen = sizeof(sockaddr_in); + return uv_tcp_getpeername(handle, (sockaddr*)name, &namelen); +} + +extern "C" int +rust_uv_tcp_getpeername6 +(uv_tcp_t* handle, sockaddr_in6* name) { + int namelen = sizeof(sockaddr_in6); + return uv_tcp_getpeername(handle, (sockaddr*)name, &namelen); +} + extern "C" int rust_uv_listen(uv_stream_t* stream, int backlog, uv_connection_cb cb) { @@ -480,6 +494,14 @@ rust_uv_ip6_name(struct sockaddr_in6* src, char* dst, size_t size) { int result = uv_ip6_name(src, dst, size); return result; } +extern "C" unsigned int +rust_uv_ip4_port(struct sockaddr_in* src) { + return ntohs(src->sin_port); +} +extern "C" unsigned int +rust_uv_ip6_port(struct sockaddr_in6* src) { + return ntohs(src->sin6_port); +} extern "C" uintptr_t* rust_uv_get_kernel_global_chan_ptr() { diff --git a/src/rt/rustrt.def.in b/src/rt/rustrt.def.in index 890aa352c927d..6a2bdd622cba3 100644 --- a/src/rt/rustrt.def.in +++ b/src/rt/rustrt.def.in @@ -204,3 +204,7 @@ tdefl_compress_mem_to_heap tinfl_decompress_mem_to_heap rust_annihilate_box rust_gc_metadata +rust_uv_ip4_port +rust_uv_ip6_port +rust_uv_tcp_getpeername +rust_uv_tcp_getpeername6 \ No newline at end of file diff --git a/src/rustc/back/link.rs b/src/rustc/back/link.rs index 67edf6a32ba5c..61fd68c193a6a 100644 --- a/src/rustc/back/link.rs +++ b/src/rustc/back/link.rs @@ -1,6 +1,6 @@ use libc::{c_int, c_uint, c_char}; use driver::session; -use session::session; +use session::Session; use lib::llvm::llvm; use syntax::attr; use middle::ty; @@ -33,14 +33,14 @@ impl output_type : cmp::Eq { pure fn ne(other: &output_type) -> bool { !self.eq(other) } } -fn llvm_err(sess: session, msg: ~str) -> ! unsafe { +fn llvm_err(sess: Session, msg: ~str) -> ! unsafe { let cstr = llvm::LLVMRustGetLastError(); if cstr == ptr::null() { sess.fatal(msg); } else { sess.fatal(msg + ~": " + str::raw::from_c_str(cstr)); } } -fn WriteOutputFile(sess:session, +fn WriteOutputFile(sess: Session, PM: lib::llvm::PassManagerRef, M: ModuleRef, Triple: *c_char, // FIXME: When #2334 is fixed, change @@ -69,7 +69,7 @@ mod jit { env: *(), } - fn exec(sess: session, + fn exec(sess: Session, pm: PassManagerRef, m: ModuleRef, opt: c_int, @@ -131,7 +131,7 @@ mod write { return false; } - fn run_passes(sess: session, llmod: ModuleRef, output: &Path) { + fn run_passes(sess: Session, llmod: ModuleRef, output: &Path) { let opts = sess.opts; if sess.time_llvm_passes() { llvm::LLVMRustEnableTimePasses(); } let mut pm = mk_pass_manager(); @@ -384,7 +384,7 @@ mod write { * */ -fn build_link_meta(sess: session, c: ast::crate, output: &Path, +fn build_link_meta(sess: Session, c: ast::crate, output: &Path, symbol_hasher: &hash::State) -> link_meta { type provided_metas = @@ -392,7 +392,7 @@ fn build_link_meta(sess: session, c: ast::crate, output: &Path, vers: Option<~str>, cmh_items: ~[@ast::meta_item]}; - fn provided_link_metas(sess: session, c: ast::crate) -> + fn provided_link_metas(sess: Session, c: ast::crate) -> provided_metas { let mut name: Option<~str> = None; let mut vers: Option<~str> = None; @@ -454,13 +454,13 @@ fn build_link_meta(sess: session, c: ast::crate, output: &Path, return truncated_hash_result(symbol_hasher); } - fn warn_missing(sess: session, name: ~str, default: ~str) { + fn warn_missing(sess: Session, name: ~str, default: ~str) { if !sess.building_library { return; } sess.warn(fmt!("missing crate link meta `%s`, using `%s` as default", name, default)); } - fn crate_meta_name(sess: session, _crate: ast::crate, + fn crate_meta_name(sess: Session, _crate: ast::crate, output: &Path, metas: provided_metas) -> ~str { return match metas.name { Some(v) => v, @@ -477,7 +477,7 @@ fn build_link_meta(sess: session, c: ast::crate, output: &Path, }; } - fn crate_meta_vers(sess: session, _crate: ast::crate, + fn crate_meta_vers(sess: Session, _crate: ast::crate, metas: provided_metas) -> ~str { return match metas.vers { Some(v) => v, @@ -569,7 +569,7 @@ fn sanitize(s: ~str) -> ~str { return result; } -fn mangle(sess: session, ss: path) -> ~str { +fn mangle(sess: Session, ss: path) -> ~str { // Follow C++ namespace-mangling style let mut n = ~"_ZN"; // Begin name-sequence. @@ -584,7 +584,7 @@ fn mangle(sess: session, ss: path) -> ~str { n } -fn exported_name(sess: session, path: path, hash: ~str, vers: ~str) -> ~str { +fn exported_name(sess: Session, path: path, hash: ~str, vers: ~str) -> ~str { return mangle(sess, vec::append_one( vec::append_one(path, path_name(sess.ident_of(hash))), @@ -623,7 +623,7 @@ fn mangle_internal_name_by_seq(ccx: @crate_ctxt, flav: ~str) -> ~str { // If the user wants an exe generated we need to invoke // cc to link the object file with some libs -fn link_binary(sess: session, +fn link_binary(sess: Session, obj_filename: &Path, out_filename: &Path, lm: link_meta) { diff --git a/src/rustc/back/rpath.rs b/src/rustc/back/rpath.rs index 8aa7caefc7a5e..8038d7bb6ddf9 100644 --- a/src/rustc/back/rpath.rs +++ b/src/rustc/back/rpath.rs @@ -13,7 +13,7 @@ pure fn not_win32(os: session::os) -> bool { } } -fn get_rpath_flags(sess: session::session, out_filename: &Path) -> ~[~str] { +fn get_rpath_flags(sess: session::Session, out_filename: &Path) -> ~[~str] { let os = sess.targ_cfg.os; // No rpath on windows @@ -35,7 +35,7 @@ fn get_rpath_flags(sess: session::session, out_filename: &Path) -> ~[~str] { rpaths_to_flags(rpaths) } -fn get_sysroot_absolute_rt_lib(sess: session::session) -> Path { +fn get_sysroot_absolute_rt_lib(sess: session::Session) -> Path { let r = filesearch::relative_target_lib_path(sess.opts.target_triple); sess.filesearch.sysroot().push_rel(&r).push(os::dll_filename("rustrt")) } diff --git a/src/rustc/back/upcall.rs b/src/rustc/back/upcall.rs index a2c864f6f465f..45e7cd4e9d45a 100644 --- a/src/rustc/back/upcall.rs +++ b/src/rustc/back/upcall.rs @@ -27,8 +27,7 @@ fn declare_upcalls(targ_cfg: @session::config, fn decl(llmod: ModuleRef, prefix: ~str, name: ~str, tys: ~[TypeRef], rv: TypeRef) -> ValueRef { - let mut arg_tys: ~[TypeRef] = ~[]; - for tys.each |t| { arg_tys.push(*t); } + let arg_tys = tys.map(|t| *t); let fn_ty = T_fn(arg_tys, rv); return base::decl_cdecl_fn(llmod, prefix + name, fn_ty); } diff --git a/src/rustc/driver/driver.rs b/src/rustc/driver/driver.rs index e389f3a4bdf7d..934a02d6dd3e3 100644 --- a/src/rustc/driver/driver.rs +++ b/src/rustc/driver/driver.rs @@ -1,6 +1,6 @@ // -*- rust -*- use metadata::{creader, cstore, filesearch}; -use session::{session, session_, OptLevel, No, Less, Default, Aggressive}; +use session::{Session, Session_, OptLevel, No, Less, Default, Aggressive}; use syntax::parse; use syntax::{ast, codemap}; use syntax::attr; @@ -10,8 +10,10 @@ use util::ppaux; use back::link; use result::{Ok, Err}; use std::getopts; +use std::getopts::{opt_present}; +use std::getopts::groups; +use std::getopts::groups::{optopt, optmulti, optflag, optflagopt, getopts}; use io::WriterUtil; -use getopts::{optopt, optmulti, optflag, optflagopt, opt_present}; use back::{x86, x86_64}; use std::map::HashMap; use lib::llvm::llvm; @@ -32,7 +34,7 @@ fn source_name(input: input) -> ~str { } } -fn default_configuration(sess: session, argv0: ~str, input: input) -> +fn default_configuration(sess: Session, argv0: ~str, input: input) -> ast::crate_cfg { let libc = match sess.targ_cfg.os { session::os_win32 => ~"msvcrt.dll", @@ -70,7 +72,7 @@ fn append_configuration(cfg: ast::crate_cfg, name: ~str) -> ast::crate_cfg { } } -fn build_configuration(sess: session, argv0: ~str, input: input) -> +fn build_configuration(sess: Session, argv0: ~str, input: input) -> ast::crate_cfg { // Combine the configuration requested by the session (command line) with // some default and generated configuration items @@ -106,7 +108,7 @@ enum input { str_input(~str) } -fn parse_input(sess: session, cfg: ast::crate_cfg, input: input) +fn parse_input(sess: Session, cfg: ast::crate_cfg, input: input) -> @ast::crate { match input { file_input(file) => { @@ -145,7 +147,7 @@ impl compile_upto : cmp::Eq { pure fn ne(other: &compile_upto) -> bool { !self.eq(other) } } -fn compile_upto(sess: session, cfg: ast::crate_cfg, +fn compile_upto(sess: Session, cfg: ast::crate_cfg, input: input, upto: compile_upto, outputs: Option) -> {crate: @ast::crate, tcx: Option} { @@ -277,7 +279,7 @@ fn compile_upto(sess: session, cfg: ast::crate_cfg, return {crate: crate, tcx: Some(ty_cx)}; } -fn compile_input(sess: session, cfg: ast::crate_cfg, input: input, +fn compile_input(sess: Session, cfg: ast::crate_cfg, input: input, outdir: &Option, output: &Option) { let upto = if sess.opts.parse_only { cu_parse } @@ -287,7 +289,7 @@ fn compile_input(sess: session, cfg: ast::crate_cfg, input: input, compile_upto(sess, cfg, input, upto, Some(outputs)); } -fn pretty_print_input(sess: session, cfg: ast::crate_cfg, input: input, +fn pretty_print_input(sess: Session, cfg: ast::crate_cfg, input: input, ppm: pp_mode) { fn ann_paren_for_expr(node: pprust::ann_node) { match node { @@ -571,7 +573,7 @@ fn build_session_options(binary: ~str, } fn build_session(sopts: @session::options, - demitter: diagnostic::emitter) -> session { + demitter: diagnostic::emitter) -> Session { let codemap = codemap::new_codemap(); let diagnostic_handler = diagnostic::mk_handler(Some(demitter)); @@ -581,11 +583,10 @@ fn build_session(sopts: @session::options, } fn build_session_(sopts: @session::options, - cm: codemap::codemap, + cm: codemap::CodeMap, demitter: diagnostic::emitter, span_diagnostic_handler: diagnostic::span_handler) - -> session { - + -> Session { let target_cfg = build_target_config(sopts, demitter); let p_s = parse::new_parse_sess_special_handler(span_diagnostic_handler, cm); @@ -595,7 +596,7 @@ fn build_session_(sopts: @session::options, sopts.target_triple, sopts.addl_lib_search_paths); let lint_settings = lint::mk_lint_settings(); - session_(@{targ_cfg: target_cfg, + Session_(@{targ_cfg: target_cfg, opts: sopts, cstore: cstore, parse_sess: p_s, @@ -609,7 +610,7 @@ fn build_session_(sopts: @session::options, lint_settings: lint_settings}) } -fn parse_pretty(sess: session, &&name: ~str) -> pp_mode { +fn parse_pretty(sess: Session, &&name: ~str) -> pp_mode { match name { ~"normal" => ppm_normal, ~"expanded" => ppm_expanded, @@ -624,27 +625,69 @@ fn parse_pretty(sess: session, &&name: ~str) -> pp_mode { } } -fn opts() -> ~[getopts::Opt] { - return ~[optflag(~"h"), optflag(~"help"), - optflag(~"v"), optflag(~"version"), - optflag(~"emit-llvm"), optflagopt(~"pretty"), - optflag(~"ls"), optflag(~"parse-only"), optflag(~"no-trans"), - optflag(~"O"), optopt(~"opt-level"), optmulti(~"L"), optflag(~"S"), - optopt(~"o"), optopt(~"out-dir"), optflag(~"xg"), - optflag(~"c"), optflag(~"g"), optflag(~"save-temps"), - optopt(~"sysroot"), optopt(~"target"), - optflag(~"jit"), - - optmulti(~"W"), optmulti(~"warn"), - optmulti(~"A"), optmulti(~"allow"), - optmulti(~"D"), optmulti(~"deny"), - optmulti(~"F"), optmulti(~"forbid"), - - optmulti(~"Z"), - - optmulti(~"cfg"), optflag(~"test"), - optflag(~"lib"), optflag(~"bin"), - optflag(~"static"), optflag(~"gc")]; +// rustc command line options +fn optgroups() -> ~[getopts::groups::OptGroup] { + ~[ + optflag(~"", ~"bin", ~"Compile an executable crate (default)"), + optflag(~"c", ~"", ~"Compile and assemble, but do not link"), + optmulti(~"", ~"cfg", ~"Configure the compilation + environment", ~"SPEC"), + optflag(~"", ~"emit-llvm", + ~"Produce an LLVM bitcode file"), + optflag(~"g", ~"", ~"Produce debug info (experimental)"), + optflag(~"", ~"gc", ~"Garbage collect shared data (experimental)"), + optflag(~"h", ~"help",~"Display this message"), + optmulti(~"L", ~"", ~"Add a directory to the library search path", + ~"PATH"), + optflag(~"", ~"lib", ~"Compile a library crate"), + optflag(~"", ~"ls", ~"List the symbols defined by a library crate"), + optflag(~"", ~"jit", ~"Execute using JIT (experimental)"), + optflag(~"", ~"no-trans", + ~"Run all passes except translation; no output"), + optflag(~"O", ~"", ~"Equivalent to --opt-level=2"), + optopt(~"o", ~"", ~"Write output to ", ~"FILENAME"), + optopt(~"", ~"opt-level", + ~"Optimize with possible levels 0-3", ~"LEVEL"), + optopt( ~"", ~"out-dir", + ~"Write output to compiler-chosen filename + in ", ~"DIR"), + optflag(~"", ~"parse-only", + ~"Parse only; do not compile, assemble, or link"), + optflagopt(~"", ~"pretty", + ~"Pretty-print the input instead of compiling; + valid types are: normal (un-annotated source), + expanded (crates expanded), + typed (crates expanded, with type annotations), + or identified (fully parenthesized, + AST nodes and blocks with IDs)", ~"TYPE"), + optflag(~"S", ~"", ~"Compile only; do not assemble or link"), + optflag(~"", ~"xg", ~"Extra debugging info (experimental)"), + optflag(~"", ~"save-temps", + ~"Write intermediate files (.bc, .opt.bc, .o) + in addition to normal output"), + optflag(~"", ~"static", + ~"Use or produce static libraries or binaries + (experimental)"), + optopt(~"", ~"sysroot", + ~"Override the system root", ~"PATH"), + optflag(~"", ~"test", ~"Build a test harness"), + optopt(~"", ~"target", + ~"Target triple cpu-manufacturer-kernel[-os] + to compile for (see + http://sources.redhat.com/autobook/autobook/autobook_17.html + for detail)", ~"TRIPLE"), + optmulti(~"W", ~"warn", + ~"Set lint warnings", ~"OPT"), + optmulti(~"A", ~"allow", + ~"Set lint allowed", ~"OPT"), + optmulti(~"D", ~"deny", + ~"Set lint denied", ~"OPT"), + optmulti(~"F", ~"forbid", + ~"Set lint forbidden", ~"OPT"), + optmulti(~"Z", ~"", ~"Set internal debugging options", "FLAG"), + optflag( ~"v", ~"version", + ~"Print version info and exit"), + ] } type output_filenames = @{out_filename:Path, obj_filename:Path}; @@ -652,7 +695,7 @@ type output_filenames = @{out_filename:Path, obj_filename:Path}; fn build_output_filenames(input: input, odir: &Option, ofile: &Option, - sess: session) + sess: Session) -> output_filenames { let obj_path; let out_path; @@ -728,7 +771,7 @@ fn early_error(emitter: diagnostic::emitter, msg: ~str) -> ! { fail; } -fn list_metadata(sess: session, path: &Path, out: io::Writer) { +fn list_metadata(sess: Session, path: &Path, out: io::Writer) { metadata::loader::list_file_metadata( sess.parse_sess.interner, session::sess_os_to_meta_os(sess.targ_cfg.os), path, out); @@ -742,7 +785,7 @@ mod test { #[test] fn test_switch_implies_cfg_test() { let matches = - match getopts::getopts(~[~"--test"], opts()) { + match getopts(~[~"--test"], optgroups()) { Ok(m) => m, Err(f) => fail ~"test_switch_implies_cfg_test: " + getopts::fail_str(f) @@ -759,7 +802,7 @@ mod test { #[test] fn test_switch_implies_cfg_test_unless_cfg_test() { let matches = - match getopts::getopts(~[~"--test", ~"--cfg=test"], opts()) { + match getopts(~[~"--test", ~"--cfg=test"], optgroups()) { Ok(m) => m, Err(f) => { fail ~"test_switch_implies_cfg_test_unless_cfg_test: " + diff --git a/src/rustc/driver/rustc.rs b/src/rustc/driver/rustc.rs index 5833723ec101b..a6de92ceeb3e1 100644 --- a/src/rustc/driver/rustc.rs +++ b/src/rustc/driver/rustc.rs @@ -16,6 +16,7 @@ use io::ReaderUtil; use std::getopts; use std::map::HashMap; use getopts::{opt_present}; +use getopts::groups; use rustc::driver::driver::*; use syntax::codemap; use syntax::diagnostic; @@ -31,46 +32,11 @@ fn version(argv0: &str) { } fn usage(argv0: &str) { - io::println(fmt!("Usage: %s [options] \n", argv0) + - ~" -Options: - - --bin Compile an executable crate (default) - -c Compile and assemble, but do not link - --cfg Configure the compilation environment - --emit-llvm Produce an LLVM bitcode file - -g Produce debug info (experimental) - --gc Garbage collect shared data (experimental/temporary) - -h --help Display this message - -L Add a directory to the library search path - --lib Compile a library crate - --ls List the symbols defined by a compiled library crate - --jit Execute using JIT (experimental) - --no-trans Run all passes except translation; no output - -O Equivalent to --opt-level=2 - -o Write output to - --opt-level Optimize with possible levels 0-3 - --out-dir Write output to compiler-chosen filename in - --parse-only Parse only; do not compile, assemble, or link - --pretty [type] Pretty-print the input instead of compiling; - valid types are: normal (un-annotated source), - expanded (crates expanded), typed (crates expanded, - with type annotations), or identified (fully - parenthesized, AST nodes and blocks with IDs) - -S Compile only; do not assemble or link - --save-temps Write intermediate files (.bc, .opt.bc, .o) - in addition to normal output - --static Use or produce static libraries or binaries - (experimental) - --sysroot Override the system root - --test Build a test harness - --target Target cpu-manufacturer-kernel[-os] to compile for - (default: host triple) - (see http://sources.redhat.com/autobook/autobook/ - autobook_17.html for detail) - -W help Print 'lint' options and default settings - -Z help Print internal options for debugging rustc - -v --version Print version info and exit + let message = fmt!("Usage: %s [OPTIONS] INPUT", argv0); + io::println(groups::usage(message, optgroups()) + + ~"Additional help: + -W help Print 'lint' options and default settings + -Z help Print internal options for debugging rustc "); } @@ -127,7 +93,7 @@ fn run_compiler(args: &~[~str], demitter: diagnostic::emitter) { if args.is_empty() { usage(binary); return; } let matches = - match getopts::getopts(args, opts()) { + match getopts::groups::getopts(args, optgroups()) { Ok(m) => m, Err(f) => { early_error(demitter, getopts::fail_str(f)) @@ -235,7 +201,7 @@ fn monitor(+f: fn~(diagnostic::emitter)) { // The 'diagnostics emitter'. Every error, warning, etc. should // go through this function. - let demitter = fn@(cmsp: Option<(codemap::codemap, codemap::span)>, + let demitter = fn@(cmsp: Option<(codemap::CodeMap, codemap::span)>, msg: &str, lvl: diagnostic::level) { if lvl == diagnostic::fatal { comm::send(ch, fatal); @@ -267,7 +233,7 @@ fn monitor(+f: fn~(diagnostic::emitter)) { ~"try running with RUST_LOG=rustc=0,::rt::backtrace \ to get further details and report the results \ to github.com/mozilla/rust/issues" - ]/_.each |note| { + ].each |note| { diagnostic::emit(None, *note, diagnostic::note) } } diff --git a/src/rustc/driver/session.rs b/src/rustc/driver/session.rs index 550656c23df69..ed73bcb6d7259 100644 --- a/src/rustc/driver/session.rs +++ b/src/rustc/driver/session.rs @@ -127,24 +127,24 @@ type options = type crate_metadata = {name: ~str, data: ~[u8]}; -type session_ = {targ_cfg: @config, +type Session_ = {targ_cfg: @config, opts: @options, - cstore: metadata::cstore::cstore, + cstore: metadata::cstore::CStore, parse_sess: parse_sess, - codemap: codemap::codemap, + codemap: codemap::CodeMap, // For a library crate, this is always none mut main_fn: Option<(node_id, codemap::span)>, span_diagnostic: diagnostic::span_handler, - filesearch: filesearch::filesearch, + filesearch: filesearch::FileSearch, mut building_library: bool, working_dir: Path, lint_settings: lint::lint_settings}; -enum session { - session_(@session_) +enum Session { + Session_(@Session_) } -impl session { +impl Session { fn span_fatal(sp: span, msg: ~str) -> ! { self.span_diagnostic.span_fatal(sp, msg) } @@ -270,7 +270,7 @@ fn basic_options() -> @options { } // Seems out of place, but it uses session, so I'm putting it here -fn expect(sess: session, opt: Option, msg: fn() -> ~str) -> T { +fn expect(sess: Session, opt: Option, msg: fn() -> ~str) -> T { diagnostic::expect(sess.diagnostic(), opt, msg) } diff --git a/src/rustc/front/core_inject.rs b/src/rustc/front/core_inject.rs index f198a2ca79dc3..e9be56e7d4803 100644 --- a/src/rustc/front/core_inject.rs +++ b/src/rustc/front/core_inject.rs @@ -1,4 +1,4 @@ -use driver::session::session; +use driver::session::Session; use syntax::codemap; use syntax::ast; use syntax::ast_util::*; @@ -6,7 +6,7 @@ use syntax::attr; export maybe_inject_libcore_ref; -fn maybe_inject_libcore_ref(sess: session, +fn maybe_inject_libcore_ref(sess: Session, crate: @ast::crate) -> @ast::crate { if use_core(crate) { inject_libcore_ref(sess, crate) @@ -19,7 +19,7 @@ fn use_core(crate: @ast::crate) -> bool { !attr::attrs_contains_name(crate.node.attrs, ~"no_core") } -fn inject_libcore_ref(sess: session, +fn inject_libcore_ref(sess: Session, crate: @ast::crate) -> @ast::crate { fn spanned(x: T) -> @ast::spanned { diff --git a/src/rustc/front/intrinsic_inject.rs b/src/rustc/front/intrinsic_inject.rs index 8fd885e8f8b59..ac74bac3f2f16 100644 --- a/src/rustc/front/intrinsic_inject.rs +++ b/src/rustc/front/intrinsic_inject.rs @@ -1,10 +1,10 @@ -use driver::session::session; +use driver::session::Session; use syntax::parse; use syntax::ast; export inject_intrinsic; -fn inject_intrinsic(sess: session, +fn inject_intrinsic(sess: Session, crate: @ast::crate) -> @ast::crate { let intrinsic_module = @include_str!("intrinsic.rs"); diff --git a/src/rustc/front/test.rs b/src/rustc/front/test.rs index 1a6cc6dd895e0..f0c9de4f2a245 100644 --- a/src/rustc/front/test.rs +++ b/src/rustc/front/test.rs @@ -7,7 +7,7 @@ use syntax::fold; use syntax::print::pprust; use syntax::codemap::span; use driver::session; -use session::session; +use session::Session; use syntax::attr; use dvec::DVec; @@ -19,14 +19,14 @@ type test = {span: span, path: ~[ast::ident], ignore: bool, should_fail: bool}; type test_ctxt = - @{sess: session::session, + @{sess: session::Session, crate: @ast::crate, mut path: ~[ast::ident], testfns: DVec}; // Traverse the crate, collecting all the test functions, eliding any // existing main functions, and synthesizing a main test harness -fn modify_for_testing(sess: session::session, +fn modify_for_testing(sess: session::Session, crate: @ast::crate) -> @ast::crate { if sess.opts.test { @@ -36,7 +36,7 @@ fn modify_for_testing(sess: session::session, } } -fn generate_test_harness(sess: session::session, +fn generate_test_harness(sess: session::Session, crate: @ast::crate) -> @ast::crate { let cx: test_ctxt = @{sess: sess, @@ -261,13 +261,13 @@ fn mk_path(cx: test_ctxt, path: ~[ast::ident]) -> ~[ast::ident] { else { vec::append(~[cx.sess.ident_of(~"std")], path) } } -// The ast::ty of ~[std::test::test_desc] -fn mk_test_desc_vec_ty(cx: test_ctxt) -> @ast::ty { +// The ast::Ty of ~[std::test::test_desc] +fn mk_test_desc_vec_ty(cx: test_ctxt) -> @ast::Ty { let test_desc_ty_path = path_node(mk_path(cx, ~[cx.sess.ident_of(~"test"), cx.sess.ident_of(~"TestDesc")])); - let test_desc_ty: ast::ty = + let test_desc_ty: ast::Ty = {id: cx.sess.next_node_id(), node: ast::ty_path(test_desc_ty_path, cx.sess.next_node_id()), span: dummy_sp()}; diff --git a/src/rustc/metadata/common.rs b/src/rustc/metadata/common.rs index 1857abf2cf2f9..06f1dfdab1aa5 100644 --- a/src/rustc/metadata/common.rs +++ b/src/rustc/metadata/common.rs @@ -124,5 +124,9 @@ enum astencode_tag { // Reserves 0x50 -- 0x6f tag_table_legacy_boxed_trait = 0x63 } +const tag_item_trait_method_sort: uint = 0x70; + +const tag_item_impl_type_basename: uint = 0x71; + type link_meta = {name: ~str, vers: ~str, extras_hash: ~str}; diff --git a/src/rustc/metadata/creader.rs b/src/rustc/metadata/creader.rs index 3ed56a1953e68..3080426e5319c 100644 --- a/src/rustc/metadata/creader.rs +++ b/src/rustc/metadata/creader.rs @@ -7,7 +7,7 @@ use syntax::visit; use syntax::codemap::span; use std::map::HashMap; use syntax::print::pprust; -use filesearch::filesearch; +use filesearch::FileSearch; use common::*; use dvec::DVec; use syntax::parse::token::ident_interner; @@ -17,7 +17,7 @@ export read_crates; // Traverses an AST, reading all the information about use'd crates and extern // libraries necessary for later resolving, typechecking, linking, etc. fn read_crates(diag: span_handler, crate: ast::crate, - cstore: cstore::cstore, filesearch: filesearch, + cstore: cstore::CStore, filesearch: FileSearch, os: loader::os, static: bool, intr: @ident_interner) { let e = @{diag: diag, filesearch: filesearch, @@ -88,8 +88,8 @@ fn warn_if_multiple_versions(e: env, diag: span_handler, } type env = @{diag: span_handler, - filesearch: filesearch, - cstore: cstore::cstore, + filesearch: FileSearch, + cstore: cstore::CStore, os: loader::os, static: bool, crate_cache: DVec, diff --git a/src/rustc/metadata/csearch.rs b/src/rustc/metadata/csearch.rs index 5f5f938541f1d..141613c2240d8 100644 --- a/src/rustc/metadata/csearch.rs +++ b/src/rustc/metadata/csearch.rs @@ -23,7 +23,10 @@ export get_region_param; export get_enum_variants; export get_impls_for_mod; export get_trait_methods; +export get_provided_trait_methods; export get_method_names_if_trait; +export get_type_name_if_impl; +export get_static_methods_if_impl; export get_item_attrs; export each_path; export get_type; @@ -31,19 +34,32 @@ export get_impl_traits; export get_impl_method; export get_item_path; export maybe_get_item_ast, found_ast, found, found_parent, not_found; +export ProvidedTraitMethodInfo; +export StaticMethodInfo; -fn get_symbol(cstore: cstore::cstore, def: ast::def_id) -> ~str { +struct ProvidedTraitMethodInfo { + ty: ty::method, + def_id: ast::def_id +} + +struct StaticMethodInfo { + ident: ast::ident, + def_id: ast::def_id, + purity: ast::purity +} + +fn get_symbol(cstore: cstore::CStore, def: ast::def_id) -> ~str { let cdata = cstore::get_crate_data(cstore, def.crate).data; return decoder::get_symbol(cdata, def.node); } -fn get_type_param_count(cstore: cstore::cstore, def: ast::def_id) -> uint { +fn get_type_param_count(cstore: cstore::CStore, def: ast::def_id) -> uint { let cdata = cstore::get_crate_data(cstore, def.crate).data; return decoder::get_type_param_count(cdata, def.node); } /// Iterates over all the paths in the given crate. -fn each_path(cstore: cstore::cstore, cnum: ast::crate_num, +fn each_path(cstore: cstore::CStore, cnum: ast::crate_num, f: fn(decoder::path_entry) -> bool) { let crate_data = cstore::get_crate_data(cstore, cnum); decoder::each_path(cstore.intr, crate_data, f); @@ -84,7 +100,7 @@ fn get_enum_variants(tcx: ty::ctxt, def: ast::def_id) return decoder::get_enum_variants(cstore.intr, cdata, def.node, tcx) } -fn get_impls_for_mod(cstore: cstore::cstore, def: ast::def_id, +fn get_impls_for_mod(cstore: cstore::CStore, def: ast::def_id, name: Option) -> @~[@decoder::_impl] { let cdata = cstore::get_crate_data(cstore, def.crate); @@ -99,14 +115,33 @@ fn get_trait_methods(tcx: ty::ctxt, def: ast::def_id) -> @~[ty::method] { decoder::get_trait_methods(cstore.intr, cdata, def.node, tcx) } -fn get_method_names_if_trait(cstore: cstore::cstore, def: ast::def_id) +fn get_provided_trait_methods(tcx: ty::ctxt, def: ast::def_id) -> + ~[ProvidedTraitMethodInfo] { + let cstore = tcx.cstore; + let cdata = cstore::get_crate_data(cstore, def.crate); + decoder::get_provided_trait_methods(cstore.intr, cdata, def.node, tcx) +} + +fn get_method_names_if_trait(cstore: cstore::CStore, def: ast::def_id) -> Option<@DVec<(ast::ident, ast::self_ty_)>> { let cdata = cstore::get_crate_data(cstore, def.crate); return decoder::get_method_names_if_trait(cstore.intr, cdata, def.node); } -fn get_item_attrs(cstore: cstore::cstore, +fn get_type_name_if_impl(cstore: cstore::CStore, def: ast::def_id) -> + Option { + let cdata = cstore::get_crate_data(cstore, def.crate); + decoder::get_type_name_if_impl(cstore.intr, cdata, def.node) +} + +fn get_static_methods_if_impl(cstore: cstore::CStore, def: ast::def_id) -> + Option<~[StaticMethodInfo]> { + let cdata = cstore::get_crate_data(cstore, def.crate); + decoder::get_static_methods_if_impl(cstore.intr, cdata, def.node) +} + +fn get_item_attrs(cstore: cstore::CStore, def_id: ast::def_id, f: fn(~[@ast::meta_item])) { @@ -126,7 +161,7 @@ fn get_type(tcx: ty::ctxt, def: ast::def_id) -> ty::ty_param_bounds_and_ty { decoder::get_type(cdata, def.node, tcx) } -fn get_region_param(cstore: metadata::cstore::cstore, +fn get_region_param(cstore: metadata::cstore::CStore, def: ast::def_id) -> Option { let cdata = cstore::get_crate_data(cstore, def.crate); return decoder::get_region_param(cdata, def.node); @@ -163,7 +198,7 @@ fn get_impl_traits(tcx: ty::ctxt, def: ast::def_id) -> ~[ty::t] { decoder::get_impl_traits(cdata, def.node, tcx) } -fn get_impl_method(cstore: cstore::cstore, +fn get_impl_method(cstore: cstore::CStore, def: ast::def_id, mname: ast::ident) -> ast::def_id { let cdata = cstore::get_crate_data(cstore, def.crate); @@ -174,7 +209,7 @@ fn get_impl_method(cstore: cstore::cstore, for their methods (so that get_trait_methods can be reused to get class methods), classes require a slightly different version of get_impl_method. Sigh. */ -fn get_class_method(cstore: cstore::cstore, +fn get_class_method(cstore: cstore::CStore, def: ast::def_id, mname: ast::ident) -> ast::def_id { let cdata = cstore::get_crate_data(cstore, def.crate); @@ -182,7 +217,7 @@ fn get_class_method(cstore: cstore::cstore, } /* If def names a class with a dtor, return it. Otherwise, return none. */ -fn class_dtor(cstore: cstore::cstore, def: ast::def_id) +fn class_dtor(cstore: cstore::CStore, def: ast::def_id) -> Option { let cdata = cstore::get_crate_data(cstore, def.crate); decoder::class_dtor(cdata, def.node) diff --git a/src/rustc/metadata/cstore.rs b/src/rustc/metadata/cstore.rs index 483f7ea06a99a..4bbca3a06050c 100644 --- a/src/rustc/metadata/cstore.rs +++ b/src/rustc/metadata/cstore.rs @@ -6,7 +6,7 @@ use std::map::HashMap; use syntax::{ast, attr}; use syntax::parse::token::ident_interner; -export cstore; +export CStore; export cnum_map; export crate_metadata; export mk_cstore; @@ -49,7 +49,7 @@ type crate_metadata = @{name: ~str, // other modules to access the cstore's private data. This could also be // achieved with an obj, but at the expense of a vtable. Not sure if this is a // good pattern or not. -enum cstore { private(cstore_private), } +enum CStore { private(cstore_private), } type cstore_private = @{metas: map::HashMap, @@ -64,11 +64,11 @@ type cstore_private = type use_crate_map = map::HashMap; // Internal method to retrieve the data from the cstore -pure fn p(cstore: cstore) -> cstore_private { +pure fn p(cstore: CStore) -> cstore_private { match cstore { private(p) => p } } -fn mk_cstore(intr: @ident_interner) -> cstore { +fn mk_cstore(intr: @ident_interner) -> CStore { let meta_cache = map::HashMap(); let crate_map = map::HashMap(); let mod_path_map = HashMap(); @@ -81,21 +81,21 @@ fn mk_cstore(intr: @ident_interner) -> cstore { intr: intr}); } -fn get_crate_data(cstore: cstore, cnum: ast::crate_num) -> crate_metadata { +fn get_crate_data(cstore: CStore, cnum: ast::crate_num) -> crate_metadata { return p(cstore).metas.get(cnum); } -fn get_crate_hash(cstore: cstore, cnum: ast::crate_num) -> ~str { +fn get_crate_hash(cstore: CStore, cnum: ast::crate_num) -> ~str { let cdata = get_crate_data(cstore, cnum); return decoder::get_crate_hash(cdata.data); } -fn get_crate_vers(cstore: cstore, cnum: ast::crate_num) -> ~str { +fn get_crate_vers(cstore: CStore, cnum: ast::crate_num) -> ~str { let cdata = get_crate_data(cstore, cnum); return decoder::get_crate_vers(cdata.data); } -fn set_crate_data(cstore: cstore, cnum: ast::crate_num, +fn set_crate_data(cstore: CStore, cnum: ast::crate_num, data: crate_metadata) { p(cstore).metas.insert(cnum, data); for vec::each(decoder::get_crate_module_paths(cstore.intr, data)) |dp| { @@ -105,25 +105,25 @@ fn set_crate_data(cstore: cstore, cnum: ast::crate_num, } } -fn have_crate_data(cstore: cstore, cnum: ast::crate_num) -> bool { +fn have_crate_data(cstore: CStore, cnum: ast::crate_num) -> bool { return p(cstore).metas.contains_key(cnum); } -fn iter_crate_data(cstore: cstore, i: fn(ast::crate_num, crate_metadata)) { +fn iter_crate_data(cstore: CStore, i: fn(ast::crate_num, crate_metadata)) { for p(cstore).metas.each |k,v| { i(k, v);}; } -fn add_used_crate_file(cstore: cstore, lib: &Path) { +fn add_used_crate_file(cstore: CStore, lib: &Path) { if !vec::contains(p(cstore).used_crate_files, lib) { p(cstore).used_crate_files.push(copy *lib); } } -fn get_used_crate_files(cstore: cstore) -> ~[Path] { +fn get_used_crate_files(cstore: CStore) -> ~[Path] { return p(cstore).used_crate_files; } -fn add_used_library(cstore: cstore, lib: ~str) -> bool { +fn add_used_library(cstore: CStore, lib: ~str) -> bool { assert lib != ~""; if vec::contains(p(cstore).used_libraries, &lib) { return false; } @@ -131,31 +131,31 @@ fn add_used_library(cstore: cstore, lib: ~str) -> bool { return true; } -fn get_used_libraries(cstore: cstore) -> ~[~str] { +fn get_used_libraries(cstore: CStore) -> ~[~str] { return p(cstore).used_libraries; } -fn add_used_link_args(cstore: cstore, args: ~str) { +fn add_used_link_args(cstore: CStore, args: ~str) { p(cstore).used_link_args.push_all(str::split_char(args, ' ')); } -fn get_used_link_args(cstore: cstore) -> ~[~str] { +fn get_used_link_args(cstore: CStore) -> ~[~str] { return p(cstore).used_link_args; } -fn add_use_stmt_cnum(cstore: cstore, use_id: ast::node_id, +fn add_use_stmt_cnum(cstore: CStore, use_id: ast::node_id, cnum: ast::crate_num) { p(cstore).use_crate_map.insert(use_id, cnum); } -fn find_use_stmt_cnum(cstore: cstore, +fn find_use_stmt_cnum(cstore: CStore, use_id: ast::node_id) -> Option { p(cstore).use_crate_map.find(use_id) } // returns hashes of crates directly used by this crate. Hashes are // sorted by crate name. -fn get_dep_hashes(cstore: cstore) -> ~[~str] { +fn get_dep_hashes(cstore: CStore) -> ~[~str] { type crate_hash = {name: ~str, hash: ~str}; let mut result = ~[]; @@ -175,7 +175,7 @@ fn get_dep_hashes(cstore: cstore) -> ~[~str] { return vec::map(sorted, mapper); } -fn get_path(cstore: cstore, d: ast::def_id) -> ~[~str] { +fn get_path(cstore: CStore, d: ast::def_id) -> ~[~str] { option::map_default(&p(cstore).mod_path_map.find(d), ~[], |ds| str::split_str(**ds, ~"::")) } diff --git a/src/rustc/metadata/decoder.rs b/src/rustc/metadata/decoder.rs index e639449a8573f..4e30132b1a73f 100644 --- a/src/rustc/metadata/decoder.rs +++ b/src/rustc/metadata/decoder.rs @@ -19,6 +19,7 @@ use syntax::diagnostic::span_handler; use common::*; use syntax::parse::token::ident_interner; use hash::{Hash, HashUtil}; +use csearch::{ProvidedTraitMethodInfo, StaticMethodInfo}; export class_dtor; export get_class_fields; @@ -30,6 +31,7 @@ export get_type_param_count; export get_impl_traits; export get_class_method; export get_impl_method; +export get_static_methods_if_impl; export lookup_def; export resolve_path; export get_crate_attributes; @@ -40,7 +42,9 @@ export get_crate_hash; export get_crate_vers; export get_impls_for_mod; export get_trait_methods; +export get_provided_trait_methods; export get_method_names_if_trait; +export get_type_name_if_impl; export get_item_attrs; export get_crate_module_paths; export def_like; @@ -164,6 +168,13 @@ fn item_family(item: ebml::Doc) -> Family { } } +fn item_method_sort(item: ebml::Doc) -> char { + for ebml::tagged_docs(item, tag_item_trait_method_sort) |doc| { + return str::from_bytes(ebml::doc_data(doc))[0] as char; + } + return 'r'; +} + fn item_symbol(item: ebml::Doc) -> ~str { let sym = ebml::get_doc(item, tag_items_data_item_symbol); return str::from_bytes(ebml::doc_data(sym)); @@ -176,6 +187,12 @@ fn item_parent_item(d: ebml::Doc) -> Option { None } +fn translated_parent_item_opt(cnum: ast::crate_num, d: ebml::Doc) -> + Option { + let trait_did_opt = item_parent_item(d); + trait_did_opt.map(|trait_did| {crate: cnum, node: trait_did.node}) +} + fn item_reqd_and_translated_parent_item(cnum: ast::crate_num, d: ebml::Doc) -> ast::def_id { let trait_did = item_parent_item(d).expect(~"item without parent"); @@ -312,16 +329,16 @@ fn item_to_def_like(item: ebml::Doc, did: ast::def_id, cnum: ast::crate_num) PureFn => dl_def(ast::def_fn(did, ast::pure_fn)), ForeignFn => dl_def(ast::def_fn(did, ast::extern_fn)), UnsafeStaticMethod => { - let trait_did = item_reqd_and_translated_parent_item(cnum, item); - dl_def(ast::def_static_method(did, trait_did, ast::unsafe_fn)) + let trait_did_opt = translated_parent_item_opt(cnum, item); + dl_def(ast::def_static_method(did, trait_did_opt, ast::unsafe_fn)) } StaticMethod => { - let trait_did = item_reqd_and_translated_parent_item(cnum, item); - dl_def(ast::def_static_method(did, trait_did, ast::impure_fn)) + let trait_did_opt = translated_parent_item_opt(cnum, item); + dl_def(ast::def_static_method(did, trait_did_opt, ast::impure_fn)) } PureStaticMethod => { - let trait_did = item_reqd_and_translated_parent_item(cnum, item); - dl_def(ast::def_static_method(did, trait_did, ast::pure_fn)) + let trait_did_opt = translated_parent_item_opt(cnum, item); + dl_def(ast::def_static_method(did, trait_did_opt, ast::pure_fn)) } Type | ForeignType => dl_def(ast::def_ty(did)), Mod => dl_def(ast::def_mod(did)), @@ -589,13 +606,12 @@ fn get_enum_variants(intr: @ident_interner, cdata: cmd, id: ast::node_id, let ctor_ty = item_type({crate: cdata.cnum, node: id}, item, tcx, cdata); let name = item_name(intr, item); - let mut arg_tys: ~[ty::t] = ~[]; - match ty::get(ctor_ty).sty { - ty::ty_fn(f) => { - for f.sig.inputs.each |a| { arg_tys.push(a.ty); } - } - _ => { /* Nullary enum variant. */ } - } + let arg_tys = match ty::get(ctor_ty).sty { + ty::ty_fn(f) => f.sig.inputs.map(|a| a.ty), + + // Nullary enum variant. + _ => ~[], + }; match variant_disr_val(item) { Some(val) => { disr_val = val; } _ => { /* empty */ } @@ -701,6 +717,7 @@ fn get_trait_methods(intr: @ident_interner, cdata: cmd, id: ast::node_id, let bounds = item_ty_param_bounds(mth, tcx, cdata); let name = item_name(intr, mth); let ty = doc_type(mth, tcx, cdata); + let def_id = item_def_id(mth, cdata); let fty = match ty::get(ty).sty { ty::ty_fn(f) => f, _ => { @@ -708,14 +725,52 @@ fn get_trait_methods(intr: @ident_interner, cdata: cmd, id: ast::node_id, ~"get_trait_methods: id has non-function type"); } }; let self_ty = get_self_ty(mth); - result.push({ident: name, tps: bounds, fty: fty, - self_ty: self_ty, - vis: ast::public}); + result.push({ident: name, tps: bounds, fty: fty, self_ty: self_ty, + vis: ast::public, def_id: def_id}); } debug!("get_trait_methods: }"); @result } +fn get_provided_trait_methods(intr: @ident_interner, cdata: cmd, + id: ast::node_id, tcx: ty::ctxt) -> + ~[ProvidedTraitMethodInfo] { + let data = cdata.data; + let item = lookup_item(id, data); + let mut result = ~[]; + + for ebml::tagged_docs(item, tag_item_trait_method) |mth| { + if item_method_sort(mth) != 'p' { loop; } + + let did = item_def_id(mth, cdata); + + let bounds = item_ty_param_bounds(mth, tcx, cdata); + let name = item_name(intr, mth); + let ty = doc_type(mth, tcx, cdata); + + let fty; + match ty::get(ty).sty { + ty::ty_fn(f) => fty = f, + _ => { + tcx.diag.handler().bug(~"get_provided_trait_methods(): id \ + has non-function type"); + } + } + + let self_ty = get_self_ty(mth); + let ty_method = {ident: name, tps: bounds, fty: fty, self_ty: self_ty, + vis: ast::public, def_id: did}; + let provided_trait_method_info = ProvidedTraitMethodInfo { + ty: ty_method, + def_id: did + }; + + vec::push(&mut result, move provided_trait_method_info); + } + + return move result; +} + // If the item in question is a trait, returns its set of methods and // their self types. Otherwise, returns none. This overlaps in an // annoying way with get_trait_methods. @@ -736,6 +791,67 @@ fn get_method_names_if_trait(intr: @ident_interner, cdata: cmd, return Some(resulting_methods); } +fn get_type_name_if_impl(intr: @ident_interner, + cdata: cmd, + node_id: ast::node_id) -> Option { + let item = lookup_item(node_id, cdata.data); + if item_family(item) != Impl { + return None; + } + + for ebml::tagged_docs(item, tag_item_impl_type_basename) |doc| { + return Some(intr.intern(@str::from_bytes(ebml::doc_data(doc)))); + } + + return None; +} + +fn get_static_methods_if_impl(intr: @ident_interner, + cdata: cmd, + node_id: ast::node_id) -> + Option<~[StaticMethodInfo]> { + let item = lookup_item(node_id, cdata.data); + if item_family(item) != Impl { + return None; + } + + // If this impl has a trait ref, don't consider it. + for ebml::tagged_docs(item, tag_impl_trait) |_doc| { + return None; + } + + let impl_method_ids = DVec(); + for ebml::tagged_docs(item, tag_item_impl_method) |impl_method_doc| { + impl_method_ids.push(parse_def_id(ebml::doc_data(impl_method_doc))); + } + + let static_impl_methods = DVec(); + for impl_method_ids.each |impl_method_id| { + let impl_method_doc = lookup_item(impl_method_id.node, cdata.data); + let family = item_family(impl_method_doc); + match family { + StaticMethod | UnsafeStaticMethod | PureStaticMethod => { + let purity; + match item_family(impl_method_doc) { + StaticMethod => purity = ast::impure_fn, + UnsafeStaticMethod => purity = ast::unsafe_fn, + PureStaticMethod => purity = ast::pure_fn, + _ => fail + } + + static_impl_methods.push(StaticMethodInfo { + ident: item_name(intr, impl_method_doc), + def_id: item_def_id(impl_method_doc, cdata), + purity: purity + }); + } + _ => {} + } + } + + return Some(dvec::unwrap(move static_impl_methods)); +} + fn get_item_attrs(cdata: cmd, node_id: ast::node_id, f: fn(~[@ast::meta_item])) { diff --git a/src/rustc/metadata/encoder.rs b/src/rustc/metadata/encoder.rs index 2fdd39a2ca483..a1d85a63ee148 100644 --- a/src/rustc/metadata/encoder.rs +++ b/src/rustc/metadata/encoder.rs @@ -52,7 +52,7 @@ type encode_parms = { item_symbols: HashMap, discrim_symbols: HashMap, link_meta: link_meta, - cstore: cstore::cstore, + cstore: cstore::CStore, encode_inlined_item: encode_inlined_item }; @@ -77,7 +77,7 @@ enum encode_ctxt = { item_symbols: HashMap, discrim_symbols: HashMap, link_meta: link_meta, - cstore: cstore::cstore, + cstore: cstore::CStore, encode_inlined_item: encode_inlined_item, type_abbrevs: abbrev_map }; @@ -90,6 +90,12 @@ fn encode_name(ecx: @encode_ctxt, ebml_w: ebml::Serializer, name: ident) { ebml_w.wr_tagged_str(tag_paths_data_name, ecx.tcx.sess.str_of(name)); } +fn encode_impl_type_basename(ecx: @encode_ctxt, ebml_w: ebml::Serializer, + name: ident) { + ebml_w.wr_tagged_str(tag_item_impl_type_basename, + ecx.tcx.sess.str_of(name)); +} + fn encode_def_id(ebml_w: ebml::Serializer, id: def_id) { ebml_w.wr_tagged_str(tag_def_id, def_to_str(id)); } @@ -388,6 +394,12 @@ fn encode_self_type(ebml_w: ebml::Serializer, self_type: ast::self_ty_) { ebml_w.end_tag(); } +fn encode_method_sort(ebml_w: ebml::Serializer, sort: char) { + ebml_w.start_tag(tag_item_trait_method_sort); + ebml_w.writer.write(&[ sort as u8 ]); + ebml_w.end_tag(); +} + /* Returns an index of items in this class */ fn encode_info_for_class(ecx: @encode_ctxt, ebml_w: ebml::Serializer, id: node_id, path: ast_map::path, @@ -478,7 +490,12 @@ fn encode_info_for_method(ecx: @encode_ctxt, ebml_w: ebml::Serializer, ecx.tcx.sess.str_of(m.ident), all_tps.len()); ebml_w.start_tag(tag_items_data_item); encode_def_id(ebml_w, local_def(m.id)); - encode_family(ebml_w, purity_fn_family(m.purity)); + match m.self_ty.node { + ast::sty_static => { + encode_family(ebml_w, purity_static_method_family(m.purity)); + } + _ => encode_family(ebml_w, purity_fn_family(m.purity)) + } encode_type_param_bounds(ebml_w, ecx, all_tps); encode_type(ecx, ebml_w, node_id_to_type(ecx.tcx, m.id)); encode_name(ecx, ebml_w, m.ident); @@ -695,7 +712,7 @@ fn encode_info_for_item(ecx: @encode_ctxt, ebml_w: ebml::Serializer, encode_index(ebml_w, bkts, write_int); ebml_w.end_tag(); } - item_impl(tps, opt_trait, _, methods) => { + item_impl(tps, opt_trait, ty, methods) => { add_to_index(); ebml_w.start_tag(tag_items_data_item); encode_def_id(ebml_w, local_def(item.id)); @@ -705,6 +722,13 @@ fn encode_info_for_item(ecx: @encode_ctxt, ebml_w: ebml::Serializer, encode_type(ecx, ebml_w, node_id_to_type(tcx, item.id)); encode_name(ecx, ebml_w, item.ident); encode_attributes(ebml_w, item.attrs); + match ty.node { + ast::ty_path(path, _) if path.idents.len() == 1 => { + encode_impl_type_basename(ecx, ebml_w, + ast_util::path_to_ident(path)); + } + _ => {} + } for methods.each |m| { ebml_w.start_tag(tag_item_impl_method); ebml_w.writer.write(str::to_bytes(def_to_str(local_def(m.id)))); @@ -726,6 +750,8 @@ fn encode_info_for_item(ecx: @encode_ctxt, ebml_w: ebml::Serializer, } } item_trait(tps, traits, ms) => { + let provided_methods = dvec::DVec(); + add_to_index(); ebml_w.start_tag(tag_items_data_item); encode_def_id(ebml_w, local_def(item.id)); @@ -746,12 +772,21 @@ fn encode_info_for_item(ecx: @encode_ctxt, ebml_w: ebml::Serializer, encode_type(ecx, ebml_w, ty::mk_fn(tcx, mty.fty)); encode_family(ebml_w, purity_fn_family(mty.fty.meta.purity)); encode_self_type(ebml_w, mty.self_ty); + encode_method_sort(ebml_w, 'r'); ebml_w.end_tag(); } provided(m) => { - encode_info_for_method(ecx, ebml_w, path, - should_inline(m.attrs), item.id, - m, m.tps); + provided_methods.push(m); + + ebml_w.start_tag(tag_item_trait_method); + encode_def_id(ebml_w, local_def(m.id)); + encode_name(ecx, ebml_w, mty.ident); + encode_type_param_bounds(ebml_w, ecx, m.tps); + encode_type(ecx, ebml_w, ty::mk_fn(tcx, mty.fty)); + encode_family(ebml_w, purity_fn_family(mty.fty.meta.purity)); + encode_self_type(ebml_w, mty.self_ty); + encode_method_sort(ebml_w, 'p'); + ebml_w.end_tag(); } } i += 1u; @@ -785,7 +820,12 @@ fn encode_info_for_item(ecx: @encode_ctxt, ebml_w: ebml::Serializer, ebml_w.end_tag(); } - + // Finally, output all the provided methods as items. + for provided_methods.each |m| { + index.push({val: m.id, pos: ebml_w.writer.tell()}); + encode_info_for_method(ecx, ebml_w, path, true, item.id, *m, + m.tps); + } } item_mac(*) => fail ~"item macros unimplemented" } @@ -1013,9 +1053,9 @@ fn synthesize_crate_attrs(ecx: @encode_ctxt, crate: @crate) -> ~[attribute] { } fn encode_crate_deps(ecx: @encode_ctxt, ebml_w: ebml::Serializer, - cstore: cstore::cstore) { + cstore: cstore::CStore) { - fn get_ordered_deps(ecx: @encode_ctxt, cstore: cstore::cstore) + fn get_ordered_deps(ecx: @encode_ctxt, cstore: cstore::CStore) -> ~[decoder::crate_dep] { type hashkv = @{key: crate_num, val: cstore::crate_metadata}; @@ -1140,7 +1180,7 @@ fn encode_metadata(parms: encode_parms, crate: @crate) -> ~[u8] { if (parms.tcx.sess.meta_stats()) { - do wr.buf.borrow |v| { + do wr.bytes.borrow |v| { do v.each |e| { if *e == 0 { ecx.stats.zero_bytes += 1; @@ -1173,7 +1213,7 @@ fn encode_metadata(parms: encode_parms, crate: @crate) -> ~[u8] { (do str::as_bytes(&~"rust\x00\x00\x00\x01") |bytes| { vec::slice(*bytes, 0, 8) - }) + flate::deflate_bytes(wr.buf.check_out(|buf| buf)) + }) + flate::deflate_bytes(wr.bytes.check_out(|buf| buf)) } // Get the encoded string for a type diff --git a/src/rustc/metadata/filesearch.rs b/src/rustc/metadata/filesearch.rs index 63370b0932104..b2d20ce56e838 100644 --- a/src/rustc/metadata/filesearch.rs +++ b/src/rustc/metadata/filesearch.rs @@ -3,7 +3,7 @@ // probably just be folded into cstore. use result::Result; -export filesearch; +export FileSearch; export mk_filesearch; export pick; export pick_file; @@ -21,7 +21,7 @@ fn pick_file(file: Path, path: &Path) -> Option { else { option::None } } -trait filesearch { +trait FileSearch { fn sysroot() -> Path; fn lib_search_paths() -> ~[Path]; fn get_target_lib_path() -> Path; @@ -30,11 +30,11 @@ trait filesearch { fn mk_filesearch(maybe_sysroot: Option, target_triple: &str, - addl_lib_search_paths: ~[Path]) -> filesearch { + addl_lib_search_paths: ~[Path]) -> FileSearch { type filesearch_impl = {sysroot: Path, addl_lib_search_paths: ~[Path], target_triple: ~str}; - impl filesearch_impl: filesearch { + impl filesearch_impl: FileSearch { fn sysroot() -> Path { self.sysroot } fn lib_search_paths() -> ~[Path] { let mut paths = self.addl_lib_search_paths; @@ -64,10 +64,10 @@ fn mk_filesearch(maybe_sysroot: Option, debug!("using sysroot = %s", sysroot.to_str()); {sysroot: sysroot, addl_lib_search_paths: addl_lib_search_paths, - target_triple: str::from_slice(target_triple)} as filesearch + target_triple: str::from_slice(target_triple)} as FileSearch } -fn search(filesearch: filesearch, pick: pick) -> Option { +fn search(filesearch: FileSearch, pick: pick) -> Option { let mut rslt = None; for filesearch.lib_search_paths().each |lib_search_path| { debug!("searching %s", lib_search_path.to_str()); diff --git a/src/rustc/metadata/loader.rs b/src/rustc/metadata/loader.rs index 0a8354be71f76..61b8bcf9067b5 100644 --- a/src/rustc/metadata/loader.rs +++ b/src/rustc/metadata/loader.rs @@ -5,7 +5,7 @@ use syntax::{ast, attr}; use syntax::print::pprust; use syntax::codemap::span; use lib::llvm::{False, llvm, mk_object_file, mk_section_iter}; -use filesearch::filesearch; +use filesearch::FileSearch; use io::WriterUtil; use syntax::parse::token::ident_interner; @@ -28,7 +28,7 @@ enum os { type ctxt = { diag: span_handler, - filesearch: filesearch, + filesearch: FileSearch, span: span, ident: ast::ident, metas: ~[@ast::meta_item], @@ -66,7 +66,7 @@ fn libname(cx: ctxt) -> {prefix: ~str, suffix: ~str} { fn find_library_crate_aux(cx: ctxt, nn: {prefix: ~str, suffix: ~str}, - filesearch: filesearch::filesearch) -> + filesearch: filesearch::FileSearch) -> Option<{ident: ~str, data: @~[u8]}> { let crate_name = crate_name_from_metas(cx.metas); let prefix: ~str = nn.prefix + crate_name + ~"-"; diff --git a/src/rustc/metadata/tydecode.rs b/src/rustc/metadata/tydecode.rs index 1375ff2d0be08..14aef6db1adbd 100644 --- a/src/rustc/metadata/tydecode.rs +++ b/src/rustc/metadata/tydecode.rs @@ -162,7 +162,7 @@ fn parse_bound_region(st: @pstate) -> ty::bound_region { } } -fn parse_region(st: @pstate) -> ty::region { +fn parse_region(st: @pstate) -> ty::Region { match next(st) { 'b' => { ty::re_bound(parse_bound_region(st)) diff --git a/src/rustc/metadata/tyencode.rs b/src/rustc/metadata/tyencode.rs index 69689b16e1542..941dd35bdf0b4 100644 --- a/src/rustc/metadata/tyencode.rs +++ b/src/rustc/metadata/tyencode.rs @@ -125,7 +125,7 @@ fn enc_substs(w: io::Writer, cx: @ctxt, substs: ty::substs) { w.write_char(']'); } -fn enc_region(w: io::Writer, cx: @ctxt, r: ty::region) { +fn enc_region(w: io::Writer, cx: @ctxt, r: ty::Region) { match r { ty::re_bound(br) => { w.write_char('b'); diff --git a/src/rustc/middle/astencode.rs b/src/rustc/middle/astencode.rs index d1f766dd86724..d264188e65d34 100644 --- a/src/rustc/middle/astencode.rs +++ b/src/rustc/middle/astencode.rs @@ -19,7 +19,7 @@ use middle::{ty, typeck}; use middle::typeck::{method_origin, method_map_entry, vtable_res, vtable_origin}; -use driver::session::session; +use driver::session::Session; use middle::freevars::freevar_entry; use c = metadata::common; use e = metadata::encoder; @@ -136,7 +136,7 @@ fn decode_inlined_item(cdata: cstore::crate_metadata, // ______________________________________________________________________ // Enumerating the IDs which appear in an AST -fn reserve_id_range(sess: session, +fn reserve_id_range(sess: Session, from_id_range: ast_util::id_range) -> ast_util::id_range { // Handle the case of an empty range: if ast_util::empty(from_id_range) { return from_id_range; } @@ -326,8 +326,10 @@ impl ast::def: tr { fn tr(xcx: extended_decode_ctxt) -> ast::def { match self { ast::def_fn(did, p) => { ast::def_fn(did.tr(xcx), p) } - ast::def_static_method(did, did2, p) => { - ast::def_static_method(did.tr(xcx), did2.tr(xcx), p) + ast::def_static_method(did, did2_opt, p) => { + ast::def_static_method(did.tr(xcx), + did2_opt.map(|did2| did2.tr(xcx)), + p) } ast::def_self(nid) => { ast::def_self(xcx.tr_id(nid)) } ast::def_mod(did) => { ast::def_mod(did.tr(xcx)) } @@ -379,8 +381,8 @@ impl ty::AutoRef: tr { } } -impl ty::region: tr { - fn tr(xcx: extended_decode_ctxt) -> ty::region { +impl ty::Region: tr { + fn tr(xcx: extended_decode_ctxt) -> ty::Region { match self { ty::re_bound(br) => ty::re_bound(br.tr(xcx)), ty::re_free(id, br) => ty::re_free(xcx.tr_id(id), br.tr(xcx)), diff --git a/src/rustc/middle/borrowck.rs b/src/rustc/middle/borrowck.rs index e2f7ba20642aa..db0e092ed83e0 100644 --- a/src/rustc/middle/borrowck.rs +++ b/src/rustc/middle/borrowck.rs @@ -229,7 +229,6 @@ use result::{Result, Ok, Err}; use syntax::print::pprust; use util::common::indenter; use ty::to_str; -use driver::session::session; use dvec::DVec; use mem_categorization::*; @@ -319,8 +318,8 @@ enum bckerr_code { err_mut_variant, err_root_not_permitted, err_mutbl(ast::mutability), - err_out_of_root_scope(ty::region, ty::region), // superscope, subscope - err_out_of_scope(ty::region, ty::region) // superscope, subscope + err_out_of_root_scope(ty::Region, ty::Region), // superscope, subscope + err_out_of_scope(ty::Region, ty::Region) // superscope, subscope } impl bckerr_code : cmp::Eq { @@ -383,7 +382,7 @@ impl bckerr : cmp::Eq { type bckres = Result; /// a complete record of a loan that was granted -type loan = {lp: @loan_path, cmt: cmt, mutbl: ast::mutability}; +struct Loan {lp: @loan_path, cmt: cmt, mutbl: ast::mutability} /// maps computed by `gather_loans` that are then used by `check_loans` /// @@ -392,7 +391,7 @@ type loan = {lp: @loan_path, cmt: cmt, mutbl: ast::mutability}; /// - `pure_map`: map from block/expr that must be pure to the error message /// that should be reported if they are not pure type req_maps = { - req_loan_map: HashMap>>, + req_loan_map: HashMap>, pure_map: HashMap }; @@ -436,7 +435,7 @@ fn root_map() -> root_map { // Misc impl borrowck_ctxt { - fn is_subregion_of(r_sub: ty::region, r_sup: ty::region) -> bool { + fn is_subregion_of(r_sub: ty::Region, r_sup: ty::Region) -> bool { region::is_subregion_of(self.tcx.region_map, r_sub, r_sup) } @@ -582,6 +581,11 @@ impl borrowck_ctxt { method_map: self.method_map}; mc.mut_to_str(mutbl) } + + fn loan_to_repr(loan: &Loan) -> ~str { + fmt!("Loan(lp=%?, cmt=%s, mutbl=%?)", + loan.lp, self.cmt_to_repr(loan.cmt), loan.mutbl) + } } // The inherent mutability of a component is its default mutability diff --git a/src/rustc/middle/borrowck/check_loans.rs b/src/rustc/middle/borrowck/check_loans.rs index 6a9195b45096b..7f95d44fd3b85 100644 --- a/src/rustc/middle/borrowck/check_loans.rs +++ b/src/rustc/middle/borrowck/check_loans.rs @@ -131,18 +131,15 @@ impl check_loan_ctxt { } } - fn walk_loans(scope_id: ast::node_id, - f: fn(v: &loan) -> bool) { + fn walk_loans(scope_id: ast::node_id, f: fn(v: &Loan) -> bool) { let mut scope_id = scope_id; let region_map = self.tcx().region_map; let req_loan_map = self.req_maps.req_loan_map; loop { - for req_loan_map.find(scope_id).each |loanss| { - for loanss.each |loans| { - for loans.each |loan| { - if !f(loan) { return; } - } + for req_loan_map.find(scope_id).each |loans| { + for loans.each |loan| { + if !f(loan) { return; } } } @@ -155,7 +152,7 @@ impl check_loan_ctxt { fn walk_loans_of(scope_id: ast::node_id, lp: @loan_path, - f: fn(v: &loan) -> bool) { + f: fn(v: &Loan) -> bool) { for self.walk_loans(scope_id) |loan| { if loan.lp == lp { if !f(loan) { return; } @@ -256,36 +253,58 @@ impl check_loan_ctxt { } fn check_for_conflicting_loans(scope_id: ast::node_id) { - let new_loanss = match self.req_maps.req_loan_map.find(scope_id) { + debug!("check_for_conflicting_loans(scope_id=%?)", scope_id); + + let new_loans = match self.req_maps.req_loan_map.find(scope_id) { None => return, - Some(loanss) => loanss + Some(loans) => loans }; + debug!("new_loans has length %?", new_loans.len()); + let par_scope_id = self.tcx().region_map.get(scope_id); for self.walk_loans(par_scope_id) |old_loan| { - for new_loanss.each |new_loans| { - for new_loans.each |new_loan| { - if old_loan.lp != new_loan.lp { loop; } - match (old_loan.mutbl, new_loan.mutbl) { - (m_const, _) | (_, m_const) | - (m_mutbl, m_mutbl) | (m_imm, m_imm) => { - /*ok*/ - } - - (m_mutbl, m_imm) | (m_imm, m_mutbl) => { - self.bccx.span_err( - new_loan.cmt.span, - fmt!("loan of %s as %s \ - conflicts with prior loan", - self.bccx.cmt_to_str(new_loan.cmt), - self.bccx.mut_to_str(new_loan.mutbl))); - self.bccx.span_note( - old_loan.cmt.span, - fmt!("prior loan as %s granted here", - self.bccx.mut_to_str(old_loan.mutbl))); - } - } - } + debug!("old_loan=%?", self.bccx.loan_to_repr(old_loan)); + + for new_loans.each |new_loan| { + self.report_error_if_loans_conflict(old_loan, new_loan); + } + } + + let len = new_loans.len(); + for uint::range(0, len) |i| { + let loan_i = new_loans[i]; + for uint::range(i+1, len) |j| { + let loan_j = new_loans[j]; + self.report_error_if_loans_conflict(&loan_i, &loan_j); + } + } + } + + fn report_error_if_loans_conflict(&self, + old_loan: &Loan, + new_loan: &Loan) { + if old_loan.lp != new_loan.lp { + return; + } + + match (old_loan.mutbl, new_loan.mutbl) { + (m_const, _) | (_, m_const) | + (m_mutbl, m_mutbl) | (m_imm, m_imm) => { + /*ok*/ + } + + (m_mutbl, m_imm) | (m_imm, m_mutbl) => { + self.bccx.span_err( + new_loan.cmt.span, + fmt!("loan of %s as %s \ + conflicts with prior loan", + self.bccx.cmt_to_str(new_loan.cmt), + self.bccx.mut_to_str(new_loan.mutbl))); + self.bccx.span_note( + old_loan.cmt.span, + fmt!("prior loan as %s granted here", + self.bccx.mut_to_str(old_loan.mutbl))); } } } diff --git a/src/rustc/middle/borrowck/gather_loans.rs b/src/rustc/middle/borrowck/gather_loans.rs index a2c8f18507138..e0eb5519d4da3 100644 --- a/src/rustc/middle/borrowck/gather_loans.rs +++ b/src/rustc/middle/borrowck/gather_loans.rs @@ -213,9 +213,10 @@ fn req_loans_in_expr(ex: @ast::expr, } impl gather_loan_ctxt { - fn tcx() -> ty::ctxt { self.bccx.tcx } + fn tcx(&self) -> ty::ctxt { self.bccx.tcx } - fn guarantee_adjustments(expr: @ast::expr, + fn guarantee_adjustments(&self, + expr: @ast::expr, adjustment: &ty::AutoAdjustment) { debug!("guarantee_adjustments(expr=%s, adjustment=%?)", expr_repr(self.tcx(), expr), adjustment); @@ -256,9 +257,10 @@ impl gather_loan_ctxt { // out loans, which will be added to the `req_loan_map`. This can // also entail "rooting" GC'd pointers, which means ensuring // dynamically that they are not freed. - fn guarantee_valid(cmt: cmt, + fn guarantee_valid(&self, + cmt: cmt, req_mutbl: ast::mutability, - scope_r: ty::region) { + scope_r: ty::Region) { self.bccx.guaranteed_paths += 1; @@ -280,35 +282,12 @@ impl gather_loan_ctxt { // it within that scope, the loan will be detected and an // error will be reported. Some(_) => { - match self.bccx.loan(cmt, scope_r, req_mutbl) { - Err(e) => { self.bccx.report(e); } - Ok(loans) if loans.len() == 0 => {} - Ok(loans) => { - match scope_r { - ty::re_scope(scope_id) => { - self.add_loans(scope_id, loans); - - if req_mutbl == m_imm && cmt.mutbl != m_imm { - self.bccx.loaned_paths_imm += 1; - - if self.tcx().sess.borrowck_note_loan() { - self.bccx.span_note( - cmt.span, - fmt!("immutable loan required")); - } - } else { - self.bccx.loaned_paths_same += 1; - } + match self.bccx.loan(cmt, scope_r, req_mutbl) { + Err(e) => { self.bccx.report(e); } + Ok(move loans) => { + self.add_loans(cmt, req_mutbl, scope_r, move loans); } - _ => { - self.bccx.tcx.sess.span_bug( - cmt.span, - fmt!("loans required but scope is scope_region is %s", - region_to_str(self.tcx(), scope_r))); - } - } } - } } // The path is not loanable: in that case, we must try and @@ -385,7 +364,8 @@ impl gather_loan_ctxt { // has type `@mut{f:int}`, this check might fail because `&x.f` // reqires an immutable pointer, but `f` lives in (aliased) // mutable memory. - fn check_mutbl(req_mutbl: ast::mutability, + fn check_mutbl(&self, + req_mutbl: ast::mutability, cmt: cmt) -> bckres { debug!("check_mutbl(req_mutbl=%?, cmt.mutbl=%?)", req_mutbl, cmt.mutbl); @@ -407,21 +387,58 @@ impl gather_loan_ctxt { } } - fn add_loans(scope_id: ast::node_id, loans: @DVec) { + fn add_loans(&self, + cmt: cmt, + req_mutbl: ast::mutability, + scope_r: ty::Region, + +loans: ~[Loan]) { + if loans.len() == 0 { + return; + } + + let scope_id = match scope_r { + ty::re_scope(scope_id) => scope_id, + _ => { + self.bccx.tcx.sess.span_bug( + cmt.span, + fmt!("loans required but scope is scope_region is %s", + region_to_str(self.tcx(), scope_r))); + } + }; + + self.add_loans_to_scope_id(scope_id, move loans); + + if req_mutbl == m_imm && cmt.mutbl != m_imm { + self.bccx.loaned_paths_imm += 1; + + if self.tcx().sess.borrowck_note_loan() { + self.bccx.span_note( + cmt.span, + fmt!("immutable loan required")); + } + } else { + self.bccx.loaned_paths_same += 1; + } + } + + fn add_loans_to_scope_id(&self, scope_id: ast::node_id, +loans: ~[Loan]) { debug!("adding %u loans to scope_id %?", loans.len(), scope_id); match self.req_maps.req_loan_map.find(scope_id) { - Some(l) => { - l.push(loans); + Some(req_loans) => { + req_loans.push_all(loans); } None => { - self.req_maps.req_loan_map.insert( - scope_id, @dvec::from_vec(~[loans])); + let dvec = @dvec::from_vec(move loans); + self.req_maps.req_loan_map.insert(scope_id, dvec); } } } - fn gather_pat(discr_cmt: cmt, root_pat: @ast::pat, - arm_id: ast::node_id, alt_id: ast::node_id) { + fn gather_pat(&self, + discr_cmt: cmt, + root_pat: @ast::pat, + arm_id: ast::node_id, + alt_id: ast::node_id) { do self.bccx.cat_pattern(discr_cmt, root_pat) |cmt, pat| { match pat.node { ast::pat_ident(bm, _, _) if !self.pat_is_variant(pat) => { @@ -475,7 +492,7 @@ impl gather_loan_ctxt { } } - fn pat_is_variant(pat: @ast::pat) -> bool { + fn pat_is_variant(&self, pat: @ast::pat) -> bool { pat_util::pat_is_variant(self.bccx.tcx.def_map, pat) } } diff --git a/src/rustc/middle/borrowck/loan.rs b/src/rustc/middle/borrowck/loan.rs index 8d9d7a5796a9a..7f4f857dae83f 100644 --- a/src/rustc/middle/borrowck/loan.rs +++ b/src/rustc/middle/borrowck/loan.rs @@ -7,37 +7,39 @@ use result::{Result, Ok, Err}; impl borrowck_ctxt { fn loan(cmt: cmt, - scope_region: ty::region, - mutbl: ast::mutability) -> bckres<@DVec> { - let lc = loan_ctxt_(@{bccx: self, - scope_region: scope_region, - loans: @DVec()}); + scope_region: ty::Region, + mutbl: ast::mutability) -> bckres<~[Loan]> { + let lc = LoanContext { + bccx: self, + scope_region: scope_region, + loans: ~[] + }; match lc.loan(cmt, mutbl) { - Ok(()) => {Ok(lc.loans)} - Err(e) => {Err(e)} + Err(e) => Err(e), + Ok(()) => { + let LoanContext {loans, _} = move lc; + Ok(loans) + } } } } -type loan_ctxt_ = { +struct LoanContext { bccx: borrowck_ctxt, // the region scope for which we must preserve the memory - scope_region: ty::region, + scope_region: ty::Region, // accumulated list of loans that will be required - loans: @DVec -}; - -enum loan_ctxt { - loan_ctxt_(@loan_ctxt_) + mut loans: ~[Loan] } -impl loan_ctxt { - fn tcx() -> ty::ctxt { self.bccx.tcx } +impl LoanContext { + fn tcx(&self) -> ty::ctxt { self.bccx.tcx } - fn issue_loan(cmt: cmt, - scope_ub: ty::region, + fn issue_loan(&self, + cmt: cmt, + scope_ub: ty::Region, req_mutbl: ast::mutability) -> bckres<()> { if self.bccx.is_subregion_of(self.scope_region, scope_ub) { match req_mutbl { @@ -57,12 +59,13 @@ impl loan_ctxt { } } - (*self.loans).push({ + self.loans.push(Loan { // Note: cmt.lp must be Some(_) because otherwise this // loan process does not apply at all. lp: cmt.lp.get(), cmt: cmt, - mutbl: req_mutbl}); + mutbl: req_mutbl + }); return Ok(()); } else { // The loan being requested lives longer than the data @@ -73,7 +76,7 @@ impl loan_ctxt { } } - fn loan(cmt: cmt, req_mutbl: ast::mutability) -> bckres<()> { + fn loan(&self, cmt: cmt, req_mutbl: ast::mutability) -> bckres<()> { debug!("loan(%s, %s)", self.bccx.cmt_to_repr(cmt), self.bccx.mut_to_str(req_mutbl)); @@ -144,7 +147,8 @@ impl loan_ctxt { // A "stable component" is one where assigning the base of the // component cannot cause the component itself to change types. // Example: record fields. - fn loan_stable_comp(cmt: cmt, + fn loan_stable_comp(&self, + cmt: cmt, cmt_base: cmt, req_mutbl: ast::mutability) -> bckres<()> { let base_mutbl = match req_mutbl { @@ -162,7 +166,8 @@ impl loan_ctxt { // An "unstable deref" means a deref of a ptr/comp where, if the // base of the deref is assigned to, pointers into the result of the // deref would be invalidated. Examples: interior of variants, uniques. - fn loan_unstable_deref(cmt: cmt, + fn loan_unstable_deref(&self, + cmt: cmt, cmt_base: cmt, req_mutbl: ast::mutability) -> bckres<()> { // Variant components: the base must be immutable, because diff --git a/src/rustc/middle/borrowck/preserve.rs b/src/rustc/middle/borrowck/preserve.rs index 7e1d47eed6944..556ea7867cfca 100644 --- a/src/rustc/middle/borrowck/preserve.rs +++ b/src/rustc/middle/borrowck/preserve.rs @@ -23,7 +23,7 @@ impl preserve_condition { impl borrowck_ctxt { fn preserve(cmt: cmt, - scope_region: ty::region, + scope_region: ty::Region, item_ub: ast::node_id, root_ub: ast::node_id) -> bckres { @@ -41,7 +41,7 @@ enum preserve_ctxt = { bccx: borrowck_ctxt, // the region scope for which we must preserve the memory - scope_region: ty::region, + scope_region: ty::Region, // the scope for the body of the enclosing fn/method item item_ub: ast::node_id, @@ -277,7 +277,7 @@ priv impl &preserve_ctxt { /// Checks that the scope for which the value must be preserved /// is a subscope of `scope_ub`; if so, success. fn compare_scope(cmt: cmt, - scope_ub: ty::region) -> bckres { + scope_ub: ty::Region) -> bckres { if self.bccx.is_subregion_of(self.scope_region, scope_ub) { Ok(pc_ok) } else { diff --git a/src/rustc/middle/capture.rs b/src/rustc/middle/capture.rs index 618d43e121a30..563ea8f84be76 100644 --- a/src/rustc/middle/capture.rs +++ b/src/rustc/middle/capture.rs @@ -1,5 +1,4 @@ use syntax::{ast, ast_util}; -use driver::session::session; use syntax::codemap::span; use std::map; use std::map::HashMap; diff --git a/src/rustc/middle/check_alt.rs b/src/rustc/middle/check_alt.rs index aab470f6907da..fc040ecc4cd3a 100644 --- a/src/rustc/middle/check_alt.rs +++ b/src/rustc/middle/check_alt.rs @@ -7,7 +7,6 @@ use syntax::print::pprust::pat_to_str; use util::ppaux::ty_to_str; use pat_util::*; use syntax::visit; -use driver::session::session; use middle::ty; use middle::ty::*; use std::map::HashMap; diff --git a/src/rustc/middle/check_const.rs b/src/rustc/middle/check_const.rs index bd3abe2013495..bdc042fb764e9 100644 --- a/src/rustc/middle/check_const.rs +++ b/src/rustc/middle/check_const.rs @@ -1,10 +1,10 @@ use syntax::ast::*; use syntax::{visit, ast_util, ast_map}; -use driver::session::session; +use driver::session::Session; use std::map::HashMap; use dvec::DVec; -fn check_crate(sess: session, crate: @crate, ast_map: ast_map::map, +fn check_crate(sess: Session, crate: @crate, ast_map: ast_map::map, def_map: resolve::DefMap, method_map: typeck::method_map, tcx: ty::ctxt) { visit::visit_crate(*crate, false, visit::mk_vt(@{ @@ -17,7 +17,7 @@ fn check_crate(sess: session, crate: @crate, ast_map: ast_map::map, sess.abort_if_errors(); } -fn check_item(sess: session, ast_map: ast_map::map, +fn check_item(sess: Session, ast_map: ast_map::map, def_map: resolve::DefMap, it: @item, &&_is_const: bool, v: visit::vt) { match it.node { @@ -55,7 +55,7 @@ fn check_pat(p: @pat, &&_is_const: bool, v: visit::vt) { } } -fn check_expr(sess: session, def_map: resolve::DefMap, +fn check_expr(sess: Session, def_map: resolve::DefMap, method_map: typeck::method_map, tcx: ty::ctxt, e: @expr, &&is_const: bool, v: visit::vt) { if is_const { @@ -142,12 +142,12 @@ fn check_expr(sess: session, def_map: resolve::DefMap, // Make sure a const item doesn't recursively refer to itself // FIXME: Should use the dependency graph when it's available (#1356) -fn check_item_recursion(sess: session, ast_map: ast_map::map, +fn check_item_recursion(sess: Session, ast_map: ast_map::map, def_map: resolve::DefMap, it: @item) { type env = { root_it: @item, - sess: session, + sess: Session, ast_map: ast_map::map, def_map: resolve::DefMap, idstack: @DVec, diff --git a/src/rustc/middle/check_loop.rs b/src/rustc/middle/check_loop.rs index 3cd26f3039b4c..3fa7f34fb33e2 100644 --- a/src/rustc/middle/check_loop.rs +++ b/src/rustc/middle/check_loop.rs @@ -1,6 +1,5 @@ use syntax::ast::*; use syntax::visit; -use driver::session::session; type ctx = {in_loop: bool, can_ret: bool}; diff --git a/src/rustc/middle/const_eval.rs b/src/rustc/middle/const_eval.rs index 51382b8108cf9..d0296ebd2e544 100644 --- a/src/rustc/middle/const_eval.rs +++ b/src/rustc/middle/const_eval.rs @@ -1,4 +1,4 @@ -use syntax::{ast,ast_util,visit}; +use syntax::{ast,ast_map,ast_util,visit}; use ast::*; // @@ -19,7 +19,7 @@ use ast::*; // target uses". This _includes_ integer-constants, plus the following // constructors: // -// fixed-size vectors and strings: []/_ and ""/_ +// fixed-size vectors and strings: [] and ""/_ // vector and string slices: &[] and &"" // tuples: (,) // records: {...} @@ -135,28 +135,7 @@ fn classify(e: @expr, // FIXME: (#3728) we can probably do something CCI-ish // surrounding nonlocal constants. But we don't yet. ast::expr_path(_) => { - match def_map.find(e.id) { - Some(ast::def_const(def_id)) => { - if ast_util::is_local(def_id) { - let ty = ty::expr_ty(tcx, e); - if ty::type_is_integral(ty) { - integral_const - } else { - general_const - } - } else { - non_const - } - } - Some(_) => { - non_const - } - None => { - tcx.sess.span_bug(e.span, - ~"unknown path when \ - classifying constants"); - } - } + lookup_constness(tcx, e) } _ => non_const @@ -167,6 +146,40 @@ fn classify(e: @expr, } } +fn lookup_const(tcx: ty::ctxt, e: @expr) -> Option<@expr> { + match tcx.def_map.find(e.id) { + Some(ast::def_const(def_id)) => { + if ast_util::is_local(def_id) { + match tcx.items.find(def_id.node) { + None => None, + Some(ast_map::node_item(it, _)) => match it.node { + item_const(_, const_expr) => Some(const_expr), + _ => None + }, + Some(_) => None + } + } + else { None } + } + Some(_) => None, + None => None + } +} + +fn lookup_constness(tcx: ty::ctxt, e: @expr) -> constness { + match lookup_const(tcx, e) { + Some(rhs) => { + let ty = ty::expr_ty(tcx, rhs); + if ty::type_is_integral(ty) { + integral_const + } else { + general_const + } + } + None => non_const + } +} + fn process_crate(crate: @ast::crate, def_map: resolve::DefMap, tcx: ty::ctxt) { @@ -204,58 +217,67 @@ impl const_val : cmp::Eq { pure fn ne(other: &const_val) -> bool { !self.eq(other) } } -// FIXME: issue #1417 fn eval_const_expr(tcx: middle::ty::ctxt, e: @expr) -> const_val { + match eval_const_expr_partial(tcx, e) { + Ok(r) => r, + Err(s) => fail s + } +} + +fn eval_const_expr_partial(tcx: middle::ty::ctxt, e: @expr) + -> Result { use middle::ty; - fn fromb(b: bool) -> const_val { const_int(b as i64) } + fn fromb(b: bool) -> Result { Ok(const_int(b as i64)) } match e.node { expr_unary(neg, inner) => { - match eval_const_expr(tcx, inner) { - const_float(f) => const_float(-f), - const_int(i) => const_int(-i), - const_uint(i) => const_uint(-i), - const_str(_) => fail ~"Negate on string", - const_bool(_) => fail ~"Negate on boolean" + match eval_const_expr_partial(tcx, inner) { + Ok(const_float(f)) => Ok(const_float(-f)), + Ok(const_int(i)) => Ok(const_int(-i)), + Ok(const_uint(i)) => Ok(const_uint(-i)), + Ok(const_str(_)) => Err(~"Negate on string"), + Ok(const_bool(_)) => Err(~"Negate on boolean"), + err => err } } expr_unary(not, inner) => { - match eval_const_expr(tcx, inner) { - const_int(i) => const_int(!i), - const_uint(i) => const_uint(!i), - const_bool(b) => const_bool(!b), - _ => fail ~"Not on float or string" + match eval_const_expr_partial(tcx, inner) { + Ok(const_int(i)) => Ok(const_int(!i)), + Ok(const_uint(i)) => Ok(const_uint(!i)), + Ok(const_bool(b)) => Ok(const_bool(!b)), + _ => Err(~"Not on float or string") } } expr_binary(op, a, b) => { - match (eval_const_expr(tcx, a), eval_const_expr(tcx, b)) { - (const_float(a), const_float(b)) => { + match (eval_const_expr_partial(tcx, a), + eval_const_expr_partial(tcx, b)) { + (Ok(const_float(a)), Ok(const_float(b))) => { match op { - add => const_float(a + b), - subtract => const_float(a - b), - mul => const_float(a * b), - div => const_float(a / b), - rem => const_float(a % b), + add => Ok(const_float(a + b)), + subtract => Ok(const_float(a - b)), + mul => Ok(const_float(a * b)), + div => Ok(const_float(a / b)), + rem => Ok(const_float(a % b)), eq => fromb(a == b), lt => fromb(a < b), le => fromb(a <= b), ne => fromb(a != b), ge => fromb(a >= b), gt => fromb(a > b), - _ => fail ~"Can't do this op on floats" + _ => Err(~"Can't do this op on floats") } } - (const_int(a), const_int(b)) => { + (Ok(const_int(a)), Ok(const_int(b))) => { match op { - add => const_int(a + b), - subtract => const_int(a - b), - mul => const_int(a * b), - div => const_int(a / b), - rem => const_int(a % b), - and | bitand => const_int(a & b), - or | bitor => const_int(a | b), - bitxor => const_int(a ^ b), - shl => const_int(a << b), - shr => const_int(a >> b), + add => Ok(const_int(a + b)), + subtract => Ok(const_int(a - b)), + mul => Ok(const_int(a * b)), + div => Ok(const_int(a / b)), + rem => Ok(const_int(a % b)), + and | bitand => Ok(const_int(a & b)), + or | bitor => Ok(const_int(a | b)), + bitxor => Ok(const_int(a ^ b)), + shl => Ok(const_int(a << b)), + shr => Ok(const_int(a >> b)), eq => fromb(a == b), lt => fromb(a < b), le => fromb(a <= b), @@ -264,18 +286,18 @@ fn eval_const_expr(tcx: middle::ty::ctxt, e: @expr) -> const_val { gt => fromb(a > b) } } - (const_uint(a), const_uint(b)) => { + (Ok(const_uint(a)), Ok(const_uint(b))) => { match op { - add => const_uint(a + b), - subtract => const_uint(a - b), - mul => const_uint(a * b), - div => const_uint(a / b), - rem => const_uint(a % b), - and | bitand => const_uint(a & b), - or | bitor => const_uint(a | b), - bitxor => const_uint(a ^ b), - shl => const_uint(a << b), - shr => const_uint(a >> b), + add => Ok(const_uint(a + b)), + subtract => Ok(const_uint(a - b)), + mul => Ok(const_uint(a * b)), + div => Ok(const_uint(a / b)), + rem => Ok(const_uint(a % b)), + and | bitand => Ok(const_uint(a & b)), + or | bitor => Ok(const_uint(a | b)), + bitxor => Ok(const_uint(a ^ b)), + shl => Ok(const_uint(a << b)), + shr => Ok(const_uint(a >> b)), eq => fromb(a == b), lt => fromb(a < b), le => fromb(a <= b), @@ -285,22 +307,22 @@ fn eval_const_expr(tcx: middle::ty::ctxt, e: @expr) -> const_val { } } // shifts can have any integral type as their rhs - (const_int(a), const_uint(b)) => { + (Ok(const_int(a)), Ok(const_uint(b))) => { match op { - shl => const_int(a << b), - shr => const_int(a >> b), - _ => fail ~"Can't do this op on an int and uint" + shl => Ok(const_int(a << b)), + shr => Ok(const_int(a >> b)), + _ => Err(~"Can't do this op on an int and uint") } } - (const_uint(a), const_int(b)) => { + (Ok(const_uint(a)), Ok(const_int(b))) => { match op { - shl => const_uint(a << b), - shr => const_uint(a >> b), - _ => fail ~"Can't do this op on a uint and int" + shl => Ok(const_uint(a << b)), + shr => Ok(const_uint(a >> b)), + _ => Err(~"Can't do this op on a uint and int") } } - (const_bool(a), const_bool(b)) => { - const_bool(match op { + (Ok(const_bool(a)), Ok(const_bool(b))) => { + Ok(const_bool(match op { and => a && b, or => a || b, bitxor => a ^ b, @@ -308,47 +330,53 @@ fn eval_const_expr(tcx: middle::ty::ctxt, e: @expr) -> const_val { bitor => a | b, eq => a == b, ne => a != b, - _ => fail ~"Can't do this op on bools" - }) + _ => return Err(~"Can't do this op on bools") + })) } - _ => fail ~"Bad operands for binary" + _ => Err(~"Bad operands for binary") } } expr_cast(base, _) => { let ety = ty::expr_ty(tcx, e); - let base = eval_const_expr(tcx, base); + let base = eval_const_expr_partial(tcx, base); match ty::get(ety).sty { ty::ty_float(_) => { match base { - const_uint(u) => const_float(u as f64), - const_int(i) => const_float(i as f64), - const_float(_) => base, - _ => fail ~"Can't cast float to str" + Ok(const_uint(u)) => Ok(const_float(u as f64)), + Ok(const_int(i)) => Ok(const_float(i as f64)), + Ok(const_float(_)) => base, + _ => Err(~"Can't cast float to str") } } ty::ty_uint(_) => { match base { - const_uint(_) => base, - const_int(i) => const_uint(i as u64), - const_float(f) => const_uint(f as u64), - _ => fail ~"Can't cast str to uint" + Ok(const_uint(_)) => base, + Ok(const_int(i)) => Ok(const_uint(i as u64)), + Ok(const_float(f)) => Ok(const_uint(f as u64)), + _ => Err(~"Can't cast str to uint") } } ty::ty_int(_) | ty::ty_bool => { match base { - const_uint(u) => const_int(u as i64), - const_int(_) => base, - const_float(f) => const_int(f as i64), - _ => fail ~"Can't cast str to int" + Ok(const_uint(u)) => Ok(const_int(u as i64)), + Ok(const_int(_)) => base, + Ok(const_float(f)) => Ok(const_int(f as i64)), + _ => Err(~"Can't cast str to int") } } - _ => fail ~"Can't cast this type" + _ => Err(~"Can't cast this type") } } - expr_lit(lit) => lit_to_const(lit), + expr_path(_) => { + match lookup_const(tcx, e) { + Some(actual_e) => eval_const_expr_partial(tcx, actual_e), + None => Err(~"Non-constant path in constant expr") + } + } + expr_lit(lit) => Ok(lit_to_const(lit)), // If we have a vstore, just keep going; it has to be a string - expr_vstore(e, _) => eval_const_expr(tcx, e), - _ => fail ~"Unsupported constant expr" + expr_vstore(e, _) => eval_const_expr_partial(tcx, e), + _ => Err(~"Unsupported constant expr") } } diff --git a/src/rustc/middle/kind.rs b/src/rustc/middle/kind.rs index 36a05d6650615..e4dc9e8330e88 100644 --- a/src/rustc/middle/kind.rs +++ b/src/rustc/middle/kind.rs @@ -1,8 +1,7 @@ use syntax::{visit, ast_util}; use syntax::ast::*; use syntax::codemap::span; -use ty::{kind, kind_copyable, kind_noncopyable, kind_const}; -use driver::session::session; +use middle::ty::{Kind, kind_copyable, kind_noncopyable, kind_const}; use std::map::HashMap; use util::ppaux::{ty_to_str, tys_to_str}; use syntax::print::pprust::expr_to_str; @@ -40,7 +39,7 @@ use lint::{non_implicitly_copyable_typarams,implicit_copies}; const try_adding: &str = "Try adding a move"; -fn kind_to_str(k: kind) -> ~str { +fn kind_to_str(k: Kind) -> ~str { let mut kinds = ~[]; if ty::kind_lteq(kind_const(), k) { @@ -387,7 +386,7 @@ fn check_stmt(stmt: @stmt, cx: ctx, v: visit::vt) { visit::visit_stmt(stmt, cx, v); } -fn check_ty(aty: @ty, cx: ctx, v: visit::vt) { +fn check_ty(aty: @Ty, cx: ctx, v: visit::vt) { match aty.node { ty_path(_, id) => { do option::iter(&cx.tcx.node_type_substs.find(id)) |ts| { diff --git a/src/rustc/middle/lang_items.rs b/src/rustc/middle/lang_items.rs index 7cb2c9eb9cf19..383fe2db3231c 100644 --- a/src/rustc/middle/lang_items.rs +++ b/src/rustc/middle/lang_items.rs @@ -9,7 +9,7 @@ // // * Functions called by the compiler itself. -use driver::session::session; +use driver::session::Session; use metadata::csearch::{each_path, get_item_attrs}; use metadata::cstore::{iter_crate_data}; use metadata::decoder::{dl_def, dl_field, dl_impl}; @@ -50,7 +50,7 @@ struct LanguageItems { mut log_type_fn: Option } -mod LanguageItems { +mod language_items { #[legacy_exports]; fn make() -> LanguageItems { LanguageItems { @@ -83,7 +83,7 @@ mod LanguageItems { } } -fn LanguageItemCollector(crate: @crate, session: session, +fn LanguageItemCollector(crate: @crate, session: Session, items: &r/LanguageItems) -> LanguageItemCollector/&r { @@ -127,7 +127,7 @@ struct LanguageItemCollector { items: &LanguageItems, crate: @crate, - session: session, + session: Session, item_refs: HashMap<~str,&mut Option>, } @@ -239,8 +239,8 @@ impl LanguageItemCollector { } } -fn collect_language_items(crate: @crate, session: session) -> LanguageItems { - let items = LanguageItems::make(); +fn collect_language_items(crate: @crate, session: Session) -> LanguageItems { + let items = language_items::make(); let collector = LanguageItemCollector(crate, session, &items); collector.collect(); copy items diff --git a/src/rustc/middle/lint.rs b/src/rustc/middle/lint.rs index 0f31f2056a14a..0768a09252247 100644 --- a/src/rustc/middle/lint.rs +++ b/src/rustc/middle/lint.rs @@ -1,5 +1,5 @@ use driver::session; -use driver::session::session; +use driver::session::Session; use middle::ty; use syntax::{ast, ast_util, visit}; use syntax::attr; @@ -244,7 +244,7 @@ fn clone_lint_modes(modes: lint_modes) -> lint_modes { type ctxt_ = {dict: lint_dict, curr: lint_modes, is_default: bool, - sess: session}; + sess: Session}; enum ctxt { ctxt_(ctxt_) @@ -355,7 +355,7 @@ fn build_settings_item(i: @ast::item, &&cx: ctxt, v: visit::vt) { } } -fn build_settings_crate(sess: session::session, crate: @ast::crate) { +fn build_settings_crate(sess: session::Session, crate: @ast::crate) { let cx = ctxt_({dict: get_lint_dict(), curr: std::smallintmap::mk(), diff --git a/src/rustc/middle/liveness.rs b/src/rustc/middle/liveness.rs index a0a422bc027b7..89d5c842a9f62 100644 --- a/src/rustc/middle/liveness.rs +++ b/src/rustc/middle/liveness.rs @@ -99,7 +99,6 @@ use syntax::print::pprust::{expr_to_str}; use visit::vt; use syntax::codemap::span; use syntax::ast::*; -use driver::session::session; use io::WriterUtil; use capture::{cap_move, cap_drop, cap_copy, cap_ref}; diff --git a/src/rustc/middle/mem_categorization.rs b/src/rustc/middle/mem_categorization.rs index dc5874ea2cfae..a61cb28c16b1a 100644 --- a/src/rustc/middle/mem_categorization.rs +++ b/src/rustc/middle/mem_categorization.rs @@ -122,7 +122,7 @@ impl categorization : cmp::Eq { enum ptr_kind { uniq_ptr, gc_ptr, - region_ptr(ty::region), + region_ptr(ty::Region), unsafe_ptr } @@ -993,7 +993,7 @@ impl &mem_categorization_ctxt { } } - fn region_to_str(r: ty::region) -> ~str { + fn region_to_str(r: ty::Region) -> ~str { region_to_str(self.tcx, r) } } diff --git a/src/rustc/middle/region.rs b/src/rustc/middle/region.rs index eb0bf8796f075..95280032ae0ab 100644 --- a/src/rustc/middle/region.rs +++ b/src/rustc/middle/region.rs @@ -7,7 +7,7 @@ region parameterized. */ -use driver::session::session; +use driver::session::Session; use middle::ty; use syntax::{ast, visit}; use syntax::codemap::span; @@ -41,7 +41,7 @@ Encodes the bounding lifetime for a given AST node: type region_map = HashMap; struct ctxt { - sess: session, + sess: Session, def_map: resolve::DefMap, // Generated maps: @@ -108,8 +108,8 @@ fn scope_contains(region_map: region_map, superscope: ast::node_id, /// intended to run *after inference* and sadly the logic is somewhat /// duplicated with the code in infer.rs. fn is_subregion_of(region_map: region_map, - sub_region: ty::region, - super_region: ty::region) -> bool { + sub_region: ty::Region, + super_region: ty::Region) -> bool { sub_region == super_region || match (sub_region, super_region) { (_, ty::re_static) => { @@ -328,7 +328,7 @@ fn resolve_fn(fk: visit::fn_kind, decl: ast::fn_decl, body: ast::blk, visit::visit_fn(fk, decl, body, sp, id, fn_cx, visitor); } -fn resolve_crate(sess: session, def_map: resolve::DefMap, +fn resolve_crate(sess: Session, def_map: resolve::DefMap, crate: @ast::crate) -> region_map { let cx: ctxt = ctxt {sess: sess, def_map: def_map, @@ -382,7 +382,7 @@ impl region_dep : cmp::Eq { } type determine_rp_ctxt_ = { - sess: session, + sess: Session, ast_map: ast_map::map, def_map: resolve::DefMap, region_paramd_items: region_paramd_items, @@ -599,7 +599,7 @@ fn determine_rp_in_ty_method(ty_m: ast::ty_method, } } -fn determine_rp_in_ty(ty: @ast::ty, +fn determine_rp_in_ty(ty: @ast::Ty, &&cx: determine_rp_ctxt, visitor: visit::vt) { @@ -640,8 +640,8 @@ fn determine_rp_in_ty(ty: @ast::ty, // that as a direct dependency. match ty.node { ast::ty_path(path, id) => { - match cx.def_map.get(id) { - ast::def_ty(did) | ast::def_class(did) => { + match cx.def_map.find(id) { + Some(ast::def_ty(did)) | Some(ast::def_class(did)) => { if did.crate == ast::local_crate { if cx.opt_region_is_relevant(path.rp) { cx.add_dep(did.node); @@ -755,7 +755,7 @@ fn determine_rp_in_struct_field(cm: @ast::struct_field, } } -fn determine_rp_in_crate(sess: session, +fn determine_rp_in_crate(sess: Session, ast_map: ast_map::map, def_map: resolve::DefMap, crate: @ast::crate) -> region_paramd_items { diff --git a/src/rustc/middle/resolve.rs b/src/rustc/middle/resolve.rs index 045905bbe07b5..33b56d7fabe0d 100644 --- a/src/rustc/middle/resolve.rs +++ b/src/rustc/middle/resolve.rs @@ -1,5 +1,6 @@ -use driver::session::session; +use driver::session::Session; use metadata::csearch::{each_path, get_method_names_if_trait}; +use metadata::csearch::{get_static_methods_if_impl, get_type_name_if_impl}; use metadata::cstore::find_use_stmt_cnum; use metadata::decoder::{def_like, dl_def, dl_field, dl_impl}; use middle::lang_items::LanguageItems; @@ -9,9 +10,8 @@ use syntax::ast::{_mod, add, arm}; use syntax::ast::{bind_by_ref, bind_by_implicit_ref, bind_by_value}; use syntax::ast::{bitand, bitor, bitxor}; use syntax::ast::{blk, bound_const, bound_copy, bound_owned, bound_send}; -use syntax::ast::{bound_trait, binding_mode, - capture_clause, class_ctor, class_dtor}; -use syntax::ast::{crate, crate_num, decl_item}; +use syntax::ast::{bound_trait, binding_mode, capture_clause, class_ctor}; +use syntax::ast::{class_dtor, crate, crate_num, decl_item}; use syntax::ast::{def, def_arg, def_binding, def_class, def_const, def_fn}; use syntax::ast::{def_foreign_mod, def_id, def_label, def_local, def_mod}; use syntax::ast::{def_prim_ty, def_region, def_self, def_ty, def_ty_param}; @@ -35,7 +35,7 @@ use syntax::ast::{pat_box, pat_lit, pat_range, pat_rec, pat_struct}; use syntax::ast::{pat_tup, pat_uniq, pat_wild, private, provided, public}; use syntax::ast::{required, rem, self_ty_, shl, shr, stmt_decl}; use syntax::ast::{struct_field, struct_variant_kind, sty_static, subtract}; -use syntax::ast::{trait_ref, tuple_variant_kind, ty, ty_bool, ty_char}; +use syntax::ast::{trait_ref, tuple_variant_kind, Ty, ty_bool, ty_char}; use syntax::ast::{ty_f, ty_f32, ty_f64, ty_float, ty_i, ty_i16, ty_i32}; use syntax::ast::{ty_i64, ty_i8, ty_int, ty_param, ty_path, ty_str, ty_u}; use syntax::ast::{ty_u16, ty_u32, ty_u64, ty_u8, ty_uint, type_value_ns}; @@ -115,7 +115,6 @@ impl PatternBindingMode : cmp::Eq { enum Namespace { - ModuleNS, TypeNS, ValueNS } @@ -166,19 +165,8 @@ enum CaptureClause { type ResolveVisitor = vt<()>; -enum ModuleDef { - NoModuleDef, // Does not define a module. - ModuleDef(Privacy, @Module), // Defines a module. -} - -impl ModuleDef { - pure fn is_none() -> bool { - match self { NoModuleDef => true, _ => false } - } -} - enum ImportDirectiveNS { - ModuleNSOnly, + TypeNSOnly, AnyNS } @@ -257,7 +245,10 @@ enum RibKind { MethodRibKind(node_id, MethodSort), // We passed through a function *item* scope. Disallow upvars. - OpaqueFunctionRibKind + OpaqueFunctionRibKind, + + // We're in a constant item. Can't refer to dynamic stuff. + ConstantItemRibKind } // Methods can be required or provided. Required methods only occur in traits. @@ -303,6 +294,35 @@ enum EnumVariantOrConstResolution { EnumVariantOrConstNotFound } +// Specifies how duplicates should be handled when adding a child item if +// another item exists with the same name in some namespace. +enum DuplicateCheckingMode { + ForbidDuplicateModules, + ForbidDuplicateTypes, + ForbidDuplicateValues, + ForbidDuplicateTypesAndValues, + OverwriteDuplicates +} + +impl DuplicateCheckingMode : cmp::Eq { + pure fn eq(other: &DuplicateCheckingMode) -> bool { + (self as uint) == (*other as uint) + } + pure fn ne(other: &DuplicateCheckingMode) -> bool { !self.eq(other) } +} + +// Returns the namespace associated with the given duplicate checking mode, +// or fails for OverwriteDuplicates. This is used for error messages. +fn namespace_for_duplicate_checking_mode(mode: DuplicateCheckingMode) -> + Namespace { + match mode { + ForbidDuplicateModules | ForbidDuplicateTypes | + ForbidDuplicateTypesAndValues => TypeNS, + ForbidDuplicateValues => ValueNS, + OverwriteDuplicates => fail ~"OverwriteDuplicates has no namespace" + } +} + /// One local scope. struct Rib { bindings: HashMap, @@ -360,7 +380,6 @@ struct ImportResolution { mut outstanding_references: uint, - mut module_target: Option, mut value_target: Option, mut type_target: Option, @@ -372,7 +391,6 @@ fn ImportResolution(privacy: Privacy, span: span) -> ImportResolution { privacy: privacy, span: span, outstanding_references: 0u, - module_target: None, value_target: None, type_target: None, used: false @@ -382,7 +400,6 @@ fn ImportResolution(privacy: Privacy, span: span) -> ImportResolution { impl ImportResolution { fn target_for_namespace(namespace: Namespace) -> Option { match namespace { - ModuleNS => return copy self.module_target, TypeNS => return copy self.type_target, ValueNS => return copy self.value_target } @@ -479,7 +496,7 @@ pure fn is_none(x: Option) -> bool { } } -fn unused_import_lint_level(session: session) -> level { +fn unused_import_lint_level(session: Session) -> level { for session.opts.lint_opts.each |lint_option_pair| { let (lint_type, lint_level) = *lint_option_pair; if lint_type == unused_imports { @@ -501,8 +518,15 @@ impl Privacy : cmp::Eq { pure fn ne(other: &Privacy) -> bool { !self.eq(other) } } -// Records a possibly-private definition. -struct Definition { +// Records a possibly-private type definition. +struct TypeNsDef { + mut privacy: Privacy, + mut module_def: Option<@Module>, + mut type_def: Option +} + +// Records a possibly-private value definition. +struct ValueNsDef { privacy: Privacy, def: def, } @@ -510,13 +534,11 @@ struct Definition { // Records the definitions (at most one for each namespace) that a name is // bound to. struct NameBindings { - mut module_def: ModuleDef, //< Meaning in module namespace. - mut type_def: Option, //< Meaning in type namespace. - mut value_def: Option, //< Meaning in value namespace. + mut type_def: Option, //< Meaning in type namespace. + mut value_def: Option, //< Meaning in value namespace. // For error reporting - // XXX: Merge me into Definition. - mut module_span: Option, + // FIXME (#3783): Merge me into TypeNsDef and ValueNsDef. mut type_span: Option, mut value_span: Option, } @@ -529,30 +551,60 @@ impl NameBindings { def_id: Option, legacy_exports: bool, sp: span) { - if self.module_def.is_none() { - let module_ = @Module(parent_link, def_id, legacy_exports); - self.module_def = ModuleDef(privacy, module_); - self.module_span = Some(sp); + // Merges the module with the existing type def or creates a new one. + let module_ = @Module(parent_link, def_id, legacy_exports); + match self.type_def { + None => { + self.type_def = Some(TypeNsDef { + privacy: privacy, + module_def: Some(module_), + type_def: None + }); + } + Some(copy type_def) => { + self.type_def = Some(TypeNsDef { + privacy: privacy, + module_def: Some(module_), + .. type_def + }); + } } + self.type_span = Some(sp); } /// Records a type definition. fn define_type(privacy: Privacy, def: def, sp: span) { - self.type_def = Some(Definition { privacy: privacy, def: def }); + // Merges the type with the existing type def or creates a new one. + match self.type_def { + None => { + self.type_def = Some(TypeNsDef { + privacy: privacy, + module_def: None, + type_def: Some(def) + }); + } + Some(copy type_def) => { + self.type_def = Some(TypeNsDef { + privacy: privacy, + type_def: Some(def), + .. type_def + }); + } + } self.type_span = Some(sp); } /// Records a value definition. fn define_value(privacy: Privacy, def: def, sp: span) { - self.value_def = Some(Definition { privacy: privacy, def: def }); + self.value_def = Some(ValueNsDef { privacy: privacy, def: def }); self.value_span = Some(sp); } /// Returns the module node if applicable. fn get_module_if_available() -> Option<@Module> { - match self.module_def { - NoModuleDef => return None, - ModuleDef(_privacy, module_) => return Some(module_) + match self.type_def { + Some(type_def) => type_def.module_def, + None => None } } @@ -561,70 +613,88 @@ impl NameBindings { * definition. */ fn get_module() -> @Module { - match self.module_def { - NoModuleDef => { - fail - ~"get_module called on a node with no module definition!"; - } - ModuleDef(_, module_) => { - return module_; + match self.get_module_if_available() { + None => { + fail ~"get_module called on a node with no module \ + definition!" } + Some(module_def) => module_def } } fn defined_in_namespace(namespace: Namespace) -> bool { match namespace { - ModuleNS => { - match self.module_def { - NoModuleDef => false, - _ => true - } - } TypeNS => return self.type_def.is_some(), ValueNS => return self.value_def.is_some() } } - fn def_for_namespace(namespace: Namespace) -> Option { + fn def_for_namespace(namespace: Namespace) -> Option { match namespace { - TypeNS => return self.type_def, - ValueNS => return self.value_def, - ModuleNS => match self.module_def { - NoModuleDef => return None, - ModuleDef(privacy, module_) => - match module_.def_id { - None => return None, - Some(def_id) => { - return Some(Definition { - privacy: privacy, - def: def_mod(def_id) - }); + TypeNS => { + match self.type_def { + None => None, + Some(type_def) => { + // FIXME (#3784): This is reallllly questionable. + // Perhaps the right thing to do is to merge def_mod + // and def_ty. + match type_def.type_def { + Some(type_def) => Some(type_def), + None => { + match type_def.module_def { + Some(module_def) => { + module_def.def_id.map(|def_id| + def_mod(*def_id)) + } + None => None + } + } + } } } - } + } + ValueNS => { + match self.value_def { + None => None, + Some(value_def) => Some(value_def.def) + } + } + } + } + + fn privacy_for_namespace(namespace: Namespace) -> Option { + match namespace { + TypeNS => { + match self.type_def { + None => None, + Some(type_def) => Some(type_def.privacy) + } + } + ValueNS => { + match self.value_def { + None => None, + Some(value_def) => Some(value_def.privacy) + } + } } } fn span_for_namespace(namespace: Namespace) -> Option { - match self.def_for_namespace(namespace) { - Some(_) => { + if self.defined_in_namespace(namespace) { match namespace { - TypeNS => self.type_span, - ValueNS => self.value_span, - ModuleNS => self.module_span + TypeNS => self.type_span, + ValueNS => self.value_span, } - } - None => None + } else { + None } } } fn NameBindings() -> NameBindings { NameBindings { - module_def: NoModuleDef, type_def: None, value_def: None, - module_span: None, type_span: None, value_span: None } @@ -672,9 +742,8 @@ fn PrimitiveTypeTable(intr: @ident_interner) -> PrimitiveTypeTable { fn namespace_to_str(ns: Namespace) -> ~str { match ns { - TypeNS => ~"type", - ValueNS => ~"value", - ModuleNS => ~"module" + TypeNS => ~"type", + ValueNS => ~"value", } } @@ -690,9 +759,8 @@ fn has_legacy_export_attr(attrs: &[syntax::ast::attribute]) -> bool { return false; } -fn Resolver(session: session, lang_items: LanguageItems, +fn Resolver(session: Session, lang_items: LanguageItems, crate: @crate) -> Resolver { - let graph_root = @NameBindings(); (*graph_root).define_module(Public, @@ -732,7 +800,7 @@ fn Resolver(session: session, lang_items: LanguageItems, primitive_type_table: @PrimitiveTypeTable(session. parse_sess.interner), - namespaces: ~[ ModuleNS, TypeNS, ValueNS ], + namespaces: ~[ TypeNS, ValueNS ], def_map: HashMap(), export_map2: HashMap(), @@ -746,7 +814,7 @@ fn Resolver(session: session, lang_items: LanguageItems, /// The main resolver class. struct Resolver { - session: session, + session: Session, lang_items: LanguageItems, crate: @crate, @@ -886,9 +954,7 @@ impl Resolver { */ fn add_child(name: ident, reduced_graph_parent: ReducedGraphParent, - // Pass in the namespaces for the child item so that we can - // check for duplicate items in the same namespace - ns: ~[Namespace], + duplicate_checking_mode: DuplicateCheckingMode, // For printing errors sp: span) -> (@NameBindings, ReducedGraphParent) { @@ -908,29 +974,67 @@ impl Resolver { let new_parent = ModuleReducedGraphParent(module_); match module_.children.find(name) { None => { - let child = @NameBindings(); - module_.children.insert(name, child); - return (child, new_parent); + let child = @NameBindings(); + module_.children.insert(name, child); + return (child, new_parent); } Some(child) => { - // We don't want to complain if the multiple definitions - // are in different namespaces. - match ns.find(|n| child.defined_in_namespace(n)) { - Some(ns) => { - self.session.span_err(sp, - fmt!("Duplicate definition of %s %s", - namespace_to_str(ns), - self.session.str_of(name))); - do child.span_for_namespace(ns).iter() |sp| { - self.session.span_note(*sp, - fmt!("First definition of %s %s here:", - namespace_to_str(ns), - self.session.str_of(name))); - } + // Enforce the duplicate checking mode. If we're requesting + // duplicate module checking, check that there isn't a module + // in the module with the same name. If we're requesting + // duplicate type checking, check that there isn't a type in + // the module with the same name. If we're requesting + // duplicate value checking, check that there isn't a value in + // the module with the same name. If we're requesting + // duplicate type checking and duplicate value checking, check + // that there isn't a duplicate type and a duplicate value + // with the same name. If no duplicate checking was requested + // at all, do nothing. + + let mut is_duplicate = false; + match duplicate_checking_mode { + ForbidDuplicateModules => { + is_duplicate = + child.get_module_if_available().is_some(); + } + ForbidDuplicateTypes => { + match child.def_for_namespace(TypeNS) { + Some(def_mod(_)) | None => {} + Some(_) => is_duplicate = true + } + } + ForbidDuplicateValues => { + is_duplicate = child.defined_in_namespace(ValueNS); + } + ForbidDuplicateTypesAndValues => { + match child.def_for_namespace(TypeNS) { + Some(def_mod(_)) | None => {} + Some(_) => is_duplicate = true + }; + if child.defined_in_namespace(ValueNS) { + is_duplicate = true; + } + } + OverwriteDuplicates => {} + } + if duplicate_checking_mode != OverwriteDuplicates && + is_duplicate { + // Return an error here by looking up the namespace that + // had the duplicate. + let ns = namespace_for_duplicate_checking_mode( + duplicate_checking_mode); + self.session.span_err(sp, + fmt!("duplicate definition of %s %s", + namespace_to_str(ns), + self.session.str_of(name))); + do child.span_for_namespace(ns).iter() |sp| { + self.session.span_note(*sp, + fmt!("first definition of %s %s here:", + namespace_to_str(ns), + self.session.str_of(name))); + } } - _ => {} - } - return (child, new_parent); + return (child, new_parent); } } } @@ -979,7 +1083,6 @@ impl Resolver { fn build_reduced_graph_for_item(item: @item, parent: ReducedGraphParent, &&visitor: vt) { - let ident = item.ident; let sp = item.span; let legacy = match parent { @@ -989,53 +1092,60 @@ impl Resolver { match item.node { item_mod(module_) => { - let legacy = has_legacy_export_attr(item.attrs); - let (name_bindings, new_parent) = self.add_child(ident, parent, - ~[ModuleNS], sp); + let legacy = has_legacy_export_attr(item.attrs); + let (name_bindings, new_parent) = + self.add_child(ident, parent, ForbidDuplicateModules, sp); let parent_link = self.get_parent_link(new_parent, ident); let def_id = { crate: 0, node: item.id }; - (*name_bindings).define_module(privacy, parent_link, - Some(def_id), legacy, sp); + (*name_bindings).define_module(privacy, parent_link, + Some(def_id), legacy, sp); let new_parent = ModuleReducedGraphParent((*name_bindings).get_module()); visit_mod(module_, sp, item.id, new_parent, visitor); } + item_foreign_mod(fm) => { - let legacy = has_legacy_export_attr(item.attrs); - let new_parent = match fm.sort { - named => { - let (name_bindings, new_parent) = self.add_child(ident, - parent, ~[ModuleNS], sp); + let legacy = has_legacy_export_attr(item.attrs); + let new_parent = match fm.sort { + named => { + let (name_bindings, new_parent) = + self.add_child(ident, parent, + ForbidDuplicateModules, sp); - let parent_link = self.get_parent_link(new_parent, ident); - let def_id = { crate: 0, node: item.id }; - (*name_bindings).define_module(privacy, parent_link, - Some(def_id), legacy, sp); + let parent_link = self.get_parent_link(new_parent, + ident); + let def_id = { crate: 0, node: item.id }; + (*name_bindings).define_module(privacy, + parent_link, + Some(def_id), + legacy, + sp); + + ModuleReducedGraphParent(name_bindings.get_module()) + } - ModuleReducedGraphParent((*name_bindings).get_module()) - } - // For anon foreign mods, the contents just go in the - // current scope - anonymous => parent - }; + // For anon foreign mods, the contents just go in the + // current scope + anonymous => parent + }; - visit_item(item, new_parent, visitor); + visit_item(item, new_parent, visitor); } // These items live in the value namespace. item_const(*) => { - let (name_bindings, _) = self.add_child(ident, parent, - ~[ValueNS], sp); + let (name_bindings, _) = + self.add_child(ident, parent, ForbidDuplicateValues, sp); (*name_bindings).define_value (privacy, def_const(local_def(item.id)), sp); } item_fn(_, purity, _, _) => { - let (name_bindings, new_parent) = self.add_child(ident, parent, - ~[ValueNS], sp); + let (name_bindings, new_parent) = + self.add_child(ident, parent, ForbidDuplicateValues, sp); let def = def_fn(local_def(item.id), purity); (*name_bindings).define_value(privacy, def, sp); @@ -1044,17 +1154,16 @@ impl Resolver { // These items live in the type namespace. item_ty(*) => { - let (name_bindings, _) = self.add_child(ident, parent, - ~[TypeNS], sp); + let (name_bindings, _) = + self.add_child(ident, parent, ForbidDuplicateTypes, sp); (*name_bindings).define_type (privacy, def_ty(local_def(item.id)), sp); } item_enum(enum_definition, _) => { - - let (name_bindings, new_parent) = self.add_child(ident, parent, - ~[TypeNS], sp); + let (name_bindings, new_parent) = + self.add_child(ident, parent, ForbidDuplicateTypes, sp); (*name_bindings).define_type (privacy, def_ty(local_def(item.id)), sp); @@ -1071,7 +1180,7 @@ impl Resolver { // These items live in both the type and value namespaces. item_class(*) => { let (name_bindings, new_parent) = - self.add_child(ident, parent, ~[TypeNS], sp); + self.add_child(ident, parent, ForbidDuplicateTypes, sp); (*name_bindings).define_type (privacy, def_ty(local_def(item.id)), sp); @@ -1082,13 +1191,75 @@ impl Resolver { visit_item(item, new_parent, visitor); } - item_impl(*) => { + item_impl(_, trait_ref_opt, ty, methods) => { + // If this implements an anonymous trait and it has static + // methods, then add all the static methods within to a new + // module, if the type was defined within this module. + // + // FIXME (#3785): This is quite unsatisfactory. Perhaps we + // should modify anonymous traits to only be implementable in + // the same module that declared the type. + + // Bail out early if there are no static methods. + let mut has_static_methods = false; + for methods.each |method| { + match method.self_ty.node { + sty_static => has_static_methods = true, + _ => {} + } + } + + // If there are static methods, then create the module + // and add them. + match (trait_ref_opt, ty) { + (None, @{ id: _, node: ty_path(path, _), span: _ }) if + has_static_methods && path.idents.len() == 1 => { + // Create the module. + let name = path_to_ident(path); + let (name_bindings, new_parent) = + self.add_child(name, + parent, + ForbidDuplicateModules, + sp); + + let parent_link = self.get_parent_link(new_parent, + ident); + let def_id = local_def(item.id); + name_bindings.define_module(privacy, parent_link, + Some(def_id), false, sp); + + let new_parent = ModuleReducedGraphParent( + name_bindings.get_module()); + + // For each static method... + for methods.each |method| { + match method.self_ty.node { + sty_static => { + // Add the static method to the module. + let ident = method.ident; + let (method_name_bindings, _) = + self.add_child(ident, + new_parent, + ForbidDuplicateValues, + method.span); + let def = def_fn(local_def(method.id), + method.purity); + method_name_bindings.define_value( + Public, def, method.span); + } + _ => {} + } + } + } + _ => {} + } + visit_item(item, parent, visitor); } item_trait(_, _, methods) => { - let (name_bindings, new_parent) = self.add_child(ident, parent, - ~[TypeNS], sp); + let (name_bindings, new_parent) = + self.add_child(ident, parent, ForbidDuplicateTypes, sp); // Add the names of all the methods to the trait info. let method_names = @HashMap(); @@ -1102,10 +1273,10 @@ impl Resolver { sty_static => { // which parent to use?? let (method_name_bindings, _) = - self.add_child(ident, new_parent, ~[ValueNS], - ty_m.span); + self.add_child(ident, new_parent, + ForbidDuplicateValues, ty_m.span); let def = def_static_method(local_def(ty_m.id), - local_def(item.id), + Some(local_def(item.id)), ty_m.purity); (*method_name_bindings).define_value (Public, def, ty_m.span); @@ -1141,7 +1312,7 @@ impl Resolver { &&visitor: vt) { let ident = variant.node.name; - let (child, _) = self.add_child(ident, parent, ~[ValueNS], + let (child, _) = self.add_child(ident, parent, ForbidDuplicateValues, variant.span); let privacy; @@ -1223,7 +1394,7 @@ impl Resolver { match view_path.node { view_path_simple(binding, full_path, ns, _) => { let ns = match ns { - module_ns => ModuleNSOnly, + module_ns => TypeNSOnly, type_value_ns => AnyNS }; @@ -1323,8 +1494,7 @@ impl Resolver { match find_use_stmt_cnum(self.session.cstore, node_id) { Some(crate_id) => { let (child_name_bindings, new_parent) = - // should this be in ModuleNS? --tjc - self.add_child(name, parent, ~[ModuleNS], + self.add_child(name, parent, ForbidDuplicateTypes, view_item.span); let def_id = { crate: crate_id, node: 0 }; @@ -1355,7 +1525,8 @@ impl Resolver { let name = foreign_item.ident; let (name_bindings, new_parent) = - self.add_child(name, parent, ~[ValueNS], foreign_item.span); + self.add_child(name, parent, ForbidDuplicateValues, + foreign_item.span); match foreign_item.node { foreign_item_fn(_, purity, type_parameters) => { @@ -1407,8 +1578,14 @@ impl Resolver { ident: ident, new_parent: ReducedGraphParent) { match def { def_mod(def_id) | def_foreign_mod(def_id) => { - match copy child_name_bindings.module_def { - NoModuleDef => { + match copy child_name_bindings.type_def { + Some(TypeNsDef { module_def: Some(copy module_def), _ }) => { + debug!("(building reduced graph for external crate) \ + already created module"); + module_def.def_id = Some(def_id); + modules.insert(def_id, module_def); + } + Some(_) | None => { debug!("(building reduced graph for \ external crate) building module \ %s", final_ident); @@ -1438,10 +1615,8 @@ impl Resolver { fail ~"can't happen"; } ModuleParentLink(parent_module, ident) => { - let name_bindings = parent_module.children.get(ident); - - resolution.module_target = + resolution.type_target = Some(Target(parent_module, name_bindings)); } } @@ -1453,13 +1628,6 @@ impl Resolver { } } } - ModuleDef(_priv, module_) => { - debug!("(building reduced graph for \ - external crate) already created \ - module"); - module_.def_id = Some(def_id); - modules.insert(def_id, module_); - } } } def_fn(*) | def_static_method(*) | def_const(*) | @@ -1475,8 +1643,7 @@ impl Resolver { // If this is a trait, add all the method names // to the trait info. - match get_method_names_if_trait(self.session.cstore, - def_id) { + match get_method_names_if_trait(self.session.cstore, def_id) { None => { // Nothing to do. } @@ -1546,12 +1713,12 @@ impl Resolver { let (child_name_bindings, new_parent) = self.add_child(ident, ModuleReducedGraphParent(current_module), - // May want a better span - ~[], dummy_sp()); + OverwriteDuplicates, + dummy_sp()); // Define or reuse the module node. - match child_name_bindings.module_def { - NoModuleDef => { + match child_name_bindings.type_def { + None => { debug!("(building reduced graph for external crate) \ autovivifying %s", *ident_str); let parent_link = self.get_parent_link(new_parent, @@ -1561,32 +1728,111 @@ impl Resolver { None, false, dummy_sp()); } - ModuleDef(*) => { /* Fall through. */ } + Some(_) => { /* Fall through. */ } } current_module = (*child_name_bindings).get_module(); } - // Add the new child item. - let (child_name_bindings, new_parent) = - self.add_child(final_ident, - ModuleReducedGraphParent(current_module), - ~[], dummy_sp()); - match path_entry.def_like { dl_def(def) => { + // Add the new child item. + let (child_name_bindings, new_parent) = + self.add_child(final_ident, + ModuleReducedGraphParent( + current_module), + OverwriteDuplicates, + dummy_sp()); + self.handle_external_def(def, modules, child_name_bindings, self.session.str_of(final_ident), final_ident, new_parent); } - dl_impl(_) => { - // Because of the infelicitous way the metadata is - // written, we can't process this impl now. We'll get it - // later. - + dl_impl(def) => { + // We only process static methods of impls here. debug!("(building reduced graph for external crate) \ - ignoring impl %s", final_ident_str); + processing impl %s", final_ident_str); + + match get_type_name_if_impl(self.session.cstore, def) { + None => {} + Some(final_ident) => { + let static_methods_opt = + get_static_methods_if_impl( + self.session.cstore, def); + match static_methods_opt { + Some(static_methods) if + static_methods.len() >= 1 => { + debug!("(building reduced graph for \ + external crate) processing \ + static methods for type name %s", + self.session.str_of(final_ident)); + + let (child_name_bindings, new_parent) = + self.add_child(final_ident, + ModuleReducedGraphParent( + current_module), + OverwriteDuplicates, + dummy_sp()); + + // Process the static methods. First, + // create the module. + let type_module; + match copy child_name_bindings.type_def { + Some(TypeNsDef { + module_def: Some(copy module_def), + _ + }) => { + // We already have a module. This + // is OK. + type_module = module_def; + } + Some(_) | None => { + let parent_link = + self.get_parent_link( + new_parent, final_ident); + child_name_bindings.define_module( + Public, + parent_link, + Some(def), + false, + dummy_sp()); + type_module = + child_name_bindings. + get_module(); + } + } + + // Add each static method to the module. + let new_parent = ModuleReducedGraphParent( + type_module); + for static_methods.each + |static_method_info| { + let ident = static_method_info.ident; + debug!("(building reduced graph for \ + external crate) creating \ + static method '%s'", + self.session.str_of(ident)); + + let (method_name_bindings, _) = + self.add_child( + ident, + new_parent, + OverwriteDuplicates, + dummy_sp()); + let def = def_fn( + static_method_info.def_id, + static_method_info.purity); + method_name_bindings.define_value( + Public, def, dummy_sp()); + } + } + + // Otherwise, do nothing. + Some(_) | None => {} + } + } + } } dl_field => { debug!("(building reduced graph for external crate) \ @@ -1602,7 +1848,6 @@ impl Resolver { module_path: @DVec, subclass: @ImportDirectiveSubclass, span: span) { - let directive = @ImportDirective(privacy, module_path, subclass, span); module_.imports.push(directive); @@ -1804,7 +2049,7 @@ impl Resolver { target, source); } - SingleImport(target, source, ModuleNSOnly) => { + SingleImport(target, source, TypeNSOnly) => { resolution_result = self.resolve_single_module_import (module_, containing_module, target, @@ -1873,12 +2118,11 @@ impl Resolver { return Failed; } - // We need to resolve all four namespaces for this to succeed. + // We need to resolve both namespaces for this to succeed. // // XXX: See if there's some way of handling namespaces in a more - // generic way. We have four of them; it seems worth doing... + // generic way. We have two of them; it seems worth doing... - let mut module_result = UnknownResult; let mut value_result = UnknownResult; let mut type_result = UnknownResult; @@ -1888,10 +2132,6 @@ impl Resolver { // Continue. } Some(child_name_bindings) => { - if (*child_name_bindings).defined_in_namespace(ModuleNS) { - module_result = BoundResult(containing_module, - child_name_bindings); - } if (*child_name_bindings).defined_in_namespace(ValueNS) { value_result = BoundResult(containing_module, child_name_bindings); @@ -1903,11 +2143,10 @@ impl Resolver { } } - // Unless we managed to find a result in all four namespaces - // (exceedingly unlikely), search imports as well. - - match (module_result, value_result, type_result) { - (BoundResult(*), BoundResult(*), BoundResult(*)) => { + // Unless we managed to find a result in both namespaces (unlikely), + // search imports as well. + match (value_result, type_result) { + (BoundResult(*), BoundResult(*)) => { // Continue. } _ => { @@ -1931,9 +2170,6 @@ impl Resolver { // therefore accurately report that the names are // unbound. - if module_result.is_unknown() { - module_result = UnboundResult; - } if value_result.is_unknown() { value_result = UnboundResult; } @@ -1970,11 +2206,6 @@ impl Resolver { // The name is an import which has been fully // resolved. We can, therefore, just follow it. - - if module_result.is_unknown() { - module_result = get_binding(import_resolution, - ModuleNS); - } if value_result.is_unknown() { value_result = get_binding(import_resolution, ValueNS); @@ -1998,20 +2229,6 @@ impl Resolver { assert module_.import_resolutions.contains_key(target); let import_resolution = module_.import_resolutions.get(target); - match module_result { - BoundResult(target_module, name_bindings) => { - debug!("(resolving single import) found module binding"); - import_resolution.module_target = - Some(Target(target_module, name_bindings)); - } - UnboundResult => { - debug!("(resolving single import) didn't find module \ - binding"); - } - UnknownResult => { - fail ~"module result should be known at this point"; - } - } match value_result { BoundResult(target_module, name_bindings) => { import_resolution.value_target = @@ -2034,12 +2251,10 @@ impl Resolver { } let i = import_resolution; - match (i.module_target, i.value_target, i.type_target) { - /* - If this name wasn't found in any of the four namespaces, it's - definitely unresolved - */ - (None, None, None) => { return Failed; } + match (i.value_target, i.type_target) { + // If this name wasn't found in either namespace, it's definitely + // unresolved. + (None, None) => { return Failed; } _ => {} } @@ -2078,7 +2293,7 @@ impl Resolver { // Continue. } Some(child_name_bindings) => { - if (*child_name_bindings).defined_in_namespace(ModuleNS) { + if (*child_name_bindings).defined_in_namespace(TypeNS) { module_result = BoundResult(containing_module, child_name_bindings); } @@ -2122,8 +2337,8 @@ impl Resolver { // resolved. We can, therefore, just follow it. if module_result.is_unknown() { - match (*import_resolution). - target_for_namespace(ModuleNS) { + match (*import_resolution).target_for_namespace( + TypeNS) { None => { module_result = UnboundResult; } @@ -2153,7 +2368,7 @@ impl Resolver { match module_result { BoundResult(target_module, name_bindings) => { debug!("(resolving single import) found module binding"); - import_resolution.module_target = + import_resolution.type_target = Some(Target(target_module, name_bindings)); } UnboundResult => { @@ -2166,8 +2381,8 @@ impl Resolver { } let i = import_resolution; - if i.module_target.is_none() { - // If this name wasn't found in the module namespace, it's + if i.type_target.is_none() { + // If this name wasn't found in the type namespace, it's // definitely unresolved. return Failed; } @@ -2219,7 +2434,7 @@ impl Resolver { debug!("(resolving glob import) writing module resolution \ %? into `%s`", - is_none(target_import_resolution.module_target), + is_none(target_import_resolution.type_target), self.module_to_str(module_)); // Here we merge two import resolutions. @@ -2229,8 +2444,6 @@ impl Resolver { let new_import_resolution = @ImportResolution(privacy, target_import_resolution.span); - new_import_resolution.module_target = - copy target_import_resolution.module_target; new_import_resolution.value_target = copy target_import_resolution.value_target; new_import_resolution.type_target = @@ -2243,15 +2456,6 @@ impl Resolver { // Merge the two import resolutions at a finer-grained // level. - match copy target_import_resolution.module_target { - None => { - // Continue. - } - Some(module_target) => { - dest_import_resolution.module_target = - Some(copy module_target); - } - } match copy target_import_resolution.value_target { None => { // Continue. @@ -2304,11 +2508,6 @@ impl Resolver { self.module_to_str(module_)); // Merge the child item into the import resolution. - if (*name_bindings).defined_in_namespace(ModuleNS) { - debug!("(resolving glob import) ... for module target"); - dest_import_resolution.module_target = - Some(Target(containing_module, name_bindings)); - } if (*name_bindings).defined_in_namespace(ValueNS) { debug!("(resolving glob import) ... for value target"); dest_import_resolution.value_target = @@ -2331,7 +2530,6 @@ impl Resolver { xray: XrayFlag, span: span) -> ResolveResult<@Module> { - let mut search_module = module_; let mut index = index; let module_path_len = (*module_path).len(); @@ -2342,9 +2540,8 @@ impl Resolver { while index < module_path_len { let name = (*module_path).get_elt(index); - match self.resolve_name_in_module(search_module, name, ModuleNS, - xray) { - + match self.resolve_name_in_module(search_module, name, TypeNS, + xray) { Failed => { self.session.span_err(span, ~"unresolved name"); return Failed; @@ -2356,18 +2553,34 @@ impl Resolver { return Indeterminate; } Success(target) => { - match target.bindings.module_def { - NoModuleDef => { - // Not a module. + // Check to see whether there are type bindings, and, if + // so, whether there is a module within. + match target.bindings.type_def { + Some(copy type_def) => { + match type_def.module_def { + None => { + // Not a module. + self.session.span_err(span, + fmt!("not a \ + module: %s", + self.session. + str_of( + name))); + return Failed; + } + Some(copy module_def) => { + search_module = module_def; + } + } + } + None => { + // There are no type bindings at all. self.session.span_err(span, fmt!("not a module: %s", - self.session. - str_of(name))); + self.session.str_of( + name))); return Failed; } - ModuleDef(_, copy module_) => { - search_module = module_; - } } } } @@ -2440,7 +2653,6 @@ impl Resolver { match module_.children.find(name) { Some(name_bindings) if (*name_bindings).defined_in_namespace(namespace) => { - return Success(Target(module_, name_bindings)); } Some(_) | None => { /* Not found; continue. */ } @@ -2512,18 +2724,27 @@ impl Resolver { fn resolve_module_in_lexical_scope(module_: @Module, name: ident) -> ResolveResult<@Module> { - - match self.resolve_item_in_lexical_scope(module_, name, ModuleNS) { + match self.resolve_item_in_lexical_scope(module_, name, TypeNS) { Success(target) => { - match target.bindings.module_def { - NoModuleDef => { + match target.bindings.type_def { + Some(type_def) => { + match type_def.module_def { + None => { + error!("!!! (resolving module in lexical \ + scope) module wasn't actually a \ + module!"); + return Failed; + } + Some(module_def) => { + return Success(module_def); + } + } + } + None => { error!("!!! (resolving module in lexical scope) module wasn't actually a module!"); return Failed; } - ModuleDef(_, module_) => { - return Success(module_); - } } } Indeterminate => { @@ -2658,8 +2879,7 @@ impl Resolver { debug!("(resolving one-level naming result) searching for module"); match self.resolve_item_in_lexical_scope(module_, source_name, - ModuleNS) { - + TypeNS) { Failed => { debug!("(resolving one-level renaming import) didn't find \ module result"); @@ -2679,7 +2899,7 @@ impl Resolver { let mut value_result; let mut type_result; - if allowable_namespaces == ModuleNSOnly { + if allowable_namespaces == TypeNSOnly { value_result = None; type_result = None; } else { @@ -2769,7 +2989,6 @@ impl Resolver { self.session.str_of(target_name), self.module_to_str(module_)); - import_resolution.module_target = module_result; import_resolution.value_target = value_result; import_resolution.type_target = type_result; @@ -2882,18 +3101,19 @@ impl Resolver { ident: ident, namebindings: @NameBindings, reexport: bool) { - for [ModuleNS, TypeNS, ValueNS].each |ns| { - match namebindings.def_for_namespace(*ns) { - Some(d) if d.privacy == Public => { + for [ TypeNS, ValueNS ].each |ns| { + match (namebindings.def_for_namespace(*ns), + namebindings.privacy_for_namespace(*ns)) { + (Some(d), Some(Public)) => { debug!("(computing exports) YES: %s '%s' \ => %?", if reexport { ~"reexport" } else { ~"export"}, self.session.str_of(ident), - def_id_of_def(d.def)); + def_id_of_def(d)); exports2.push(Export2 { reexport: reexport, name: self.session.str_of(ident), - def_id: def_id_of_def(d.def) + def_id: def_id_of_def(d) }); } _ => () @@ -2911,12 +3131,13 @@ impl Resolver { } for module_.import_resolutions.each_ref |ident, importresolution| { - for [ModuleNS, TypeNS, ValueNS].each |ns| { + for [ TypeNS, ValueNS ].each |ns| { match importresolution.target_for_namespace(*ns) { Some(target) => { debug!("(computing exports) maybe reexport '%s'", self.session.str_of(*ident)); - self.add_exports_of_namebindings(exports2, *ident, + self.add_exports_of_namebindings(exports2, + *ident, target.bindings, true) } @@ -3114,9 +3335,16 @@ impl Resolver { return None; } + ConstantItemRibKind => { + // Still doesn't deal with upvars + self.session.span_err(span, + ~"attempt to use a non-constant \ + value in a constant"); + + } } - rib_index += 1u; + rib_index += 1; } return Some(dl_def(def)); @@ -3130,8 +3358,8 @@ impl Resolver { // XXX: Try caching? let mut i = (*ribs).len(); - while i != 0u { - i -= 1u; + while i != 0 { + i -= 1; let rib = (*ribs).get_elt(i); match rib.bindings.find(name) { Some(def_like) => { @@ -3179,7 +3407,33 @@ impl Resolver { } match item.node { - item_enum(_, type_parameters) | + + // enum item: resolve all the variants' discrs, + // then resolve the ty params + item_enum(enum_def, type_parameters) => { + + for enum_def.variants.each() |variant| { + do variant.node.disr_expr.iter() |dis_expr| { + // resolve the discriminator expr + // as a constant + self.with_constant_rib(|| { + self.resolve_expr(*dis_expr, visitor); + }); + } + } + + // n.b. the discr expr gets visted twice. + // but maybe it's okay since the first time will signal an + // error if there is one? -- tjc + do self.with_type_parameter_rib + (HasTypeParameters(&type_parameters, item.id, 0, + NormalRibKind)) + || { + + visit_item(item, (), visitor); + } + } + item_ty(_, type_parameters) => { do self.with_type_parameter_rib (HasTypeParameters(&type_parameters, item.id, 0u, @@ -3344,7 +3598,9 @@ impl Resolver { } item_const(*) => { - visit_item(item, (), visitor); + self.with_constant_rib(|| { + visit_item(item, (), visitor); + }); } item_mac(*) => { @@ -3401,6 +3657,12 @@ impl Resolver { f(); (*self.label_ribs).pop(); } + fn with_constant_rib(f: fn()) { + (*self.value_ribs).push(@Rib(ConstantItemRibKind)); + f(); + (*self.value_ribs).pop(); + } + fn resolve_function(rib_kind: RibKind, optional_declaration: Option<@fn_decl>, @@ -3409,7 +3671,6 @@ impl Resolver { self_binding: SelfBinding, capture_clause: CaptureClause, visitor: ResolveVisitor) { - // Check each element of the capture clause. match capture_clause { NoCaptureClause => { @@ -3501,7 +3762,6 @@ impl Resolver { fn resolve_type_parameters(type_parameters: ~[ty_param], visitor: ResolveVisitor) { - for type_parameters.each |type_parameter| { for type_parameter.bounds.each |bound| { match *bound { @@ -3523,13 +3783,12 @@ impl Resolver { methods: ~[@method], optional_destructor: Option, visitor: ResolveVisitor) { - // If applicable, create a rib for the type parameters. let outer_type_parameter_count = (*type_parameters).len(); let borrowed_type_parameters: &~[ty_param] = &*type_parameters; do self.with_type_parameter_rib(HasTypeParameters - (borrowed_type_parameters, id, 0u, - NormalRibKind)) { + (borrowed_type_parameters, id, 0, + OpaqueFunctionRibKind)) { // Resolve the type parameters. self.resolve_type_parameters(*type_parameters, visitor); @@ -3622,26 +3881,24 @@ impl Resolver { span: span, type_parameters: ~[ty_param], opt_trait_reference: Option<@trait_ref>, - self_type: @ty, + self_type: @Ty, methods: ~[@method], visitor: ResolveVisitor) { - // If applicable, create a rib for the type parameters. let outer_type_parameter_count = type_parameters.len(); let borrowed_type_parameters: &~[ty_param] = &type_parameters; do self.with_type_parameter_rib(HasTypeParameters (borrowed_type_parameters, id, 0u, NormalRibKind)) { - // Resolve the type parameters. self.resolve_type_parameters(type_parameters, visitor); // Resolve the trait reference, if necessary. let original_trait_refs = self.current_trait_refs; match opt_trait_reference { - Some(trait_reference) => { - let new_trait_refs = @DVec(); - match self.resolve_path( + Some(trait_reference) => { + let new_trait_refs = @DVec(); + match self.resolve_path( trait_reference.path, TypeNS, true, visitor) { None => { self.session.span_err(span, @@ -3655,10 +3912,10 @@ impl Resolver { (*new_trait_refs).push(def_id_of_def(def)); } } - // Record the current set of trait references. - self.current_trait_refs = Some(new_trait_refs); - } - None => () + // Record the current set of trait references. + self.current_trait_refs = Some(new_trait_refs); + } + None => () } // Resolve the self type. @@ -3820,7 +4077,7 @@ impl Resolver { debug!("(resolving block) leaving block"); } - fn resolve_type(ty: @ty, visitor: ResolveVisitor) { + fn resolve_type(ty: @Ty, visitor: ResolveVisitor) { match ty.node { // Like path expressions, the interpretation of path types depends // on whether the path has multiple elements in it or not. @@ -3828,42 +4085,45 @@ impl Resolver { ty_path(path, path_id) => { // This is a path in the type namespace. Walk through scopes // scopes looking for it. + let mut result_def = None; - let mut result_def; - match self.resolve_path(path, TypeNS, true, visitor) { - Some(def) => { - debug!("(resolving type) resolved `%s` to type", - self.session.str_of(path.idents.last())); - result_def = Some(def); - } - None => { - result_def = None; + // First, check to see whether the name is a primitive type. + if path.idents.len() == 1u { + let name = path.idents.last(); + + match self.primitive_type_table + .primitive_types + .find(name) { + + Some(primitive_type) => { + result_def = + Some(def_prim_ty(primitive_type)); + } + None => { + // Continue. + } } } match result_def { - Some(_) => { - // Continue. - } None => { - // Check to see whether the name is a primitive type. - if path.idents.len() == 1u { - let name = path.idents.last(); - - match self.primitive_type_table - .primitive_types - .find(name) { - - Some(primitive_type) => { - result_def = - Some(def_prim_ty(primitive_type)); - } - None => { - // Continue. - } + match self.resolve_path(path, TypeNS, true, visitor) { + Some(def) => { + debug!("(resolving type) resolved `%s` to \ + type %?", + self.session.str_of( + path.idents.last()), + def); + result_def = Some(def); + } + None => { + result_def = None; } } } + Some(_) => { + // Continue. + } } match copy result_def { @@ -4127,7 +4387,7 @@ impl Resolver { namespace); } - if path.idents.len() > 1u { + if path.idents.len() > 1 { return self.resolve_module_relative_path(path, self.xray_context, namespace); @@ -4179,12 +4439,17 @@ impl Resolver { // First, search children. match containing_module.children.find(name) { Some(child_name_bindings) => { - match (*child_name_bindings).def_for_namespace(namespace) { - Some(def) if def.privacy == Public || xray == Xray => { + match (child_name_bindings.def_for_namespace(namespace), + child_name_bindings.privacy_for_namespace(namespace)) { + (Some(def), Some(Public)) => { + // Found it. Stop the search here. + return ChildNameDefinition(def); + } + (Some(def), _) if xray == Xray => { // Found it. Stop the search here. - return ChildNameDefinition(def.def); + return ChildNameDefinition(def); } - Some(_) | None => { + (Some(_), _) | (None, _) => { // Continue. } } @@ -4200,14 +4465,15 @@ impl Resolver { xray == Xray => { match (*import_resolution).target_for_namespace(namespace) { Some(target) => { - match (*target.bindings) - .def_for_namespace(namespace) { - Some(def) if def.privacy == Public => { + match (target.bindings.def_for_namespace(namespace), + target.bindings.privacy_for_namespace( + namespace)) { + (Some(def), Some(Public)) => { // Found it. import_resolution.used = true; - return ImportNameDefinition(def.def); + return ImportNameDefinition(def); } - Some(_) | None => { + (Some(_), _) | (None, _) => { // This can happen with external impls, due to // the imperfect way we read the metadata. @@ -4347,9 +4613,6 @@ impl Resolver { search_result = self.search_ribs(self.type_ribs, ident, span, AllowCapturingSelf); } - ModuleNS => { - fail ~"module namespaces do not have local ribs"; - } } match copy search_result { @@ -4369,23 +4632,22 @@ impl Resolver { fn resolve_item_by_identifier_in_lexical_scope(ident: ident, namespace: Namespace) -> Option { - // Check the items. match self.resolve_item_in_lexical_scope(self.current_module, ident, namespace) { - Success(target) => { match (*target.bindings).def_for_namespace(namespace) { None => { - fail ~"resolved name in a namespace to a set of name \ - bindings with no def for that namespace?!"; + // This can happen if we were looking for a type and + // found a module instead. Modules don't have defs. + return None; } Some(def) => { debug!("(resolving item path in lexical scope) \ resolved `%s` to item", self.session.str_of(ident)); - return Some(def.def); + return Some(def); } } } @@ -4636,6 +4898,9 @@ impl Resolver { } fn search_for_traits_containing_method(name: ident) -> @DVec { + debug!("(searching for traits containing method) looking for '%s'", + self.session.str_of(name)); + let found_traits = @DVec(); let mut search_module = self.current_module; loop { @@ -4643,8 +4908,8 @@ impl Resolver { match copy self.current_trait_refs { Some(trait_def_ids) => { for trait_def_ids.each |trait_def_id| { - self.add_trait_info_if_containing_method - (found_traits, *trait_def_id, name); + self.add_trait_info_if_containing_method( + found_traits, *trait_def_id, name); } } None => { @@ -4656,10 +4921,10 @@ impl Resolver { for search_module.children.each |_name, child_name_bindings| { match child_name_bindings.def_for_namespace(TypeNS) { Some(def) => { - match def.def { + match def { def_ty(trait_def_id) => { - self.add_trait_info_if_containing_method - (found_traits, trait_def_id, name); + self.add_trait_info_if_containing_method( + found_traits, trait_def_id, name); } _ => { // Continue. @@ -4683,11 +4948,11 @@ impl Resolver { Some(target) => { match target.bindings.def_for_namespace(TypeNS) { Some(def) => { - match def.def { + match def { def_ty(trait_def_id) => { self. - add_trait_info_if_containing_method - (found_traits, trait_def_id, name); + add_trait_info_if_containing_method( + found_traits, trait_def_id, name); } _ => { // Continue. @@ -4722,6 +4987,12 @@ impl Resolver { trait_def_id: def_id, name: ident) { + debug!("(adding trait info if containing method) trying trait %d:%d \ + for method '%s'", + trait_def_id.crate, + trait_def_id.node, + self.session.str_of(name)); + match self.trait_info.find(trait_def_id) { Some(trait_info) if trait_info.contains_key(name) => { debug!("(adding trait info if containing method) found trait \ @@ -4884,15 +5155,6 @@ impl Resolver { debug!("Import resolutions:"); for module_.import_resolutions.each |name, import_resolution| { - let mut module_repr; - match (*import_resolution).target_for_namespace(ModuleNS) { - None => { module_repr = ~""; } - Some(_) => { - module_repr = ~" module:?"; - // XXX - } - } - let mut value_repr; match (*import_resolution).target_for_namespace(ValueNS) { None => { value_repr = ~""; } @@ -4911,15 +5173,14 @@ impl Resolver { } } - debug!("* %s:%s%s%s", - self.session.str_of(name), - module_repr, value_repr, type_repr); + debug!("* %s:%s%s", self.session.str_of(name), + value_repr, type_repr); } } } /// Entry point to crate resolution. -fn resolve_crate(session: session, lang_items: LanguageItems, crate: @crate) +fn resolve_crate(session: Session, lang_items: LanguageItems, crate: @crate) -> { def_map: DefMap, exp_map2: ExportMap2, trait_map: TraitMap } { diff --git a/src/rustc/middle/trans/alt.rs b/src/rustc/middle/trans/alt.rs index 50ea80a134cb1..6de9ad977c9fb 100644 --- a/src/rustc/middle/trans/alt.rs +++ b/src/rustc/middle/trans/alt.rs @@ -99,7 +99,6 @@ * */ -use driver::session::session; use lib::llvm::llvm; use lib::llvm::{ValueRef, BasicBlockRef}; use pat_util::*; @@ -354,7 +353,7 @@ fn enter_opt(bcx: block, m: &[@Match/&r], opt: &Opt, col: uint, match p.node { ast::pat_enum(_, subpats) => { if opt_eq(tcx, &variant_opt(tcx, p.id), opt) { - Some(option::get_default(&subpats, + Some(option::get_default(subpats, vec::from_elem(variant_size, dummy))) } else { diff --git a/src/rustc/middle/trans/base.rs b/src/rustc/middle/trans/base.rs index 06d5b2f239e3c..d0fe4dec687a7 100644 --- a/src/rustc/middle/trans/base.rs +++ b/src/rustc/middle/trans/base.rs @@ -17,7 +17,7 @@ use libc::{c_uint, c_ulonglong}; use std::{map, time, list}; use std::map::HashMap; use driver::session; -use session::session; +use session::Session; use syntax::attr; use back::{link, abi, upcall}; use syntax::{ast, ast_util, codemap, ast_map}; @@ -206,7 +206,7 @@ fn GEP_enum(bcx: block, llblobptr: ValueRef, enum_id: ast::def_id, assert ix < variant.args.len(); let arg_lltys = vec::map(variant.args, |aty| { - type_of(ccx, ty::subst_tps(ccx.tcx, ty_substs, *aty)) + type_of(ccx, ty::subst_tps(ccx.tcx, ty_substs, None, *aty)) }); let typed_blobptr = PointerCast(bcx, llblobptr, T_ptr(T_struct(arg_lltys))); @@ -267,7 +267,7 @@ fn malloc_raw_dyn(bcx: block, t: ty::t, heap: heap, fn non_gc_box_cast(bcx: block, val: ValueRef) -> ValueRef { debug!("non_gc_box_cast"); add_comment(bcx, ~"non_gc_box_cast"); - assert(llvm::LLVMGetPointerAddressSpace(val_ty(val)) == gc_box_addrspace); + assert(llvm::LLVMGetPointerAddressSpace(val_ty(val)) == gc_box_addrspace || bcx.unreachable); let non_gc_t = T_ptr(llvm::LLVMGetElementType(val_ty(val))); PointerCast(bcx, val, non_gc_t) } @@ -385,16 +385,16 @@ fn get_res_dtor(ccx: @crate_ctxt, did: ast::def_id, let _icx = ccx.insn_ctxt("trans_res_dtor"); if (substs.is_not_empty()) { let did = if did.crate != ast::local_crate { - inline::maybe_instantiate_inline(ccx, did) + inline::maybe_instantiate_inline(ccx, did, true) } else { did }; assert did.crate == ast::local_crate; - monomorphize::monomorphic_fn(ccx, did, substs, None, None).val + monomorphize::monomorphic_fn(ccx, did, substs, None, None, None).val } else if did.crate == ast::local_crate { get_item_val(ccx, did.node) } else { let tcx = ccx.tcx; let name = csearch::get_symbol(ccx.sess.cstore, did); - let class_ty = ty::subst_tps(tcx, substs, + let class_ty = ty::subst_tps(tcx, substs, None, ty::lookup_item_type(tcx, parent_id).ty); let llty = type_of_dtor(ccx, class_ty); get_extern_fn(ccx.externs, ccx.llmod, name, lib::llvm::CCallConv, @@ -529,7 +529,8 @@ fn iter_structural_ty(cx: block, av: ValueRef, t: ty::t, let v_id = variant.id; for vec::each(fn_ty.sig.inputs) |a| { let llfldp_a = GEP_enum(cx, a_tup, tid, v_id, tps, j); - let ty_subst = ty::subst_tps(ccx.tcx, tps, a.ty); + // XXX: Is "None" right here? + let ty_subst = ty::subst_tps(ccx.tcx, tps, None, a.ty); cx = f(cx, llfldp_a, ty_subst); j += 1u; } @@ -1392,8 +1393,11 @@ fn mk_standard_basic_blocks(llfn: ValueRef) -> // - create_llargs_for_fn_args. // - new_fn_ctxt // - trans_args -fn new_fn_ctxt_w_id(ccx: @crate_ctxt, path: path, - llfndecl: ValueRef, id: ast::node_id, +fn new_fn_ctxt_w_id(ccx: @crate_ctxt, + path: path, + llfndecl: ValueRef, + id: ast::node_id, + impl_id: Option, param_substs: Option, sp: Option) -> fn_ctxt { let llbbs = mk_standard_basic_blocks(llfndecl); @@ -1410,6 +1414,7 @@ fn new_fn_ctxt_w_id(ccx: @crate_ctxt, path: path, lllocals: HashMap(), llupvars: HashMap(), id: id, + impl_id: impl_id, param_substs: param_substs, span: sp, path: path, @@ -1418,7 +1423,7 @@ fn new_fn_ctxt_w_id(ccx: @crate_ctxt, path: path, fn new_fn_ctxt(ccx: @crate_ctxt, path: path, llfndecl: ValueRef, sp: Option) -> fn_ctxt { - return new_fn_ctxt_w_id(ccx, path, llfndecl, -1, None, sp); + return new_fn_ctxt_w_id(ccx, path, llfndecl, -1, None, None, sp); } // NB: must keep 4 fns in sync: @@ -1561,6 +1566,7 @@ fn trans_closure(ccx: @crate_ctxt, path: path, decl: ast::fn_decl, ty_self: self_arg, param_substs: Option, id: ast::node_id, + impl_id: Option, maybe_load_env: fn(fn_ctxt), finish: fn(block)) { ccx.stats.n_closures += 1; @@ -1568,7 +1574,7 @@ fn trans_closure(ccx: @crate_ctxt, path: path, decl: ast::fn_decl, set_uwtable(llfndecl); // Set up arguments to the function. - let fcx = new_fn_ctxt_w_id(ccx, path, llfndecl, id, param_substs, + let fcx = new_fn_ctxt_w_id(ccx, path, llfndecl, id, impl_id, param_substs, Some(body.span)); let raw_llargs = create_llargs_for_fn_args(fcx, ty_self, decl.inputs); @@ -1620,14 +1626,15 @@ fn trans_fn(ccx: @crate_ctxt, llfndecl: ValueRef, ty_self: self_arg, param_substs: Option, - id: ast::node_id) { + id: ast::node_id, + impl_id: Option) { let do_time = ccx.sess.trans_stats(); let start = if do_time { time::get_time() } else { {sec: 0i64, nsec: 0i32} }; let _icx = ccx.insn_ctxt("trans_fn"); ccx.stats.n_fns += 1; trans_closure(ccx, path, decl, body, llfndecl, ty_self, - param_substs, id, + param_substs, id, impl_id, |fcx| { if ccx.sess.opts.extra_debuginfo { debuginfo::create_function(fcx); @@ -1654,7 +1661,7 @@ fn trans_enum_variant(ccx: @crate_ctxt, ty: varg.ty, ident: special_idents::arg, id: varg.id}); - let fcx = new_fn_ctxt_w_id(ccx, ~[], llfndecl, variant.node.id, + let fcx = new_fn_ctxt_w_id(ccx, ~[], llfndecl, variant.node.id, None, param_substs, None); let raw_llargs = create_llargs_for_fn_args(fcx, no_self, fn_args); let ty_param_substs = match param_substs { @@ -1704,7 +1711,7 @@ fn trans_class_dtor(ccx: @crate_ctxt, path: path, let mut class_ty = ty::lookup_item_type(tcx, parent_id).ty; /* Substitute in the class type if necessary */ do option::iter(&psubsts) |ss| { - class_ty = ty::subst_tps(tcx, ss.tys, class_ty); + class_ty = ty::subst_tps(tcx, ss.tys, ss.self_ty, class_ty); } /* The dtor takes a (null) output pointer, and a self argument, @@ -1724,7 +1731,7 @@ fn trans_class_dtor(ccx: @crate_ctxt, path: path, } /* Translate the dtor body */ trans_fn(ccx, path, ast_util::dtor_dec(), - body, lldecl, impl_self(class_ty), psubsts, dtor_id); + body, lldecl, impl_self(class_ty), psubsts, dtor_id, None); lldecl } @@ -1777,7 +1784,7 @@ fn trans_item(ccx: @crate_ctxt, item: ast::item) { let llfndecl = get_item_val(ccx, item.id); trans_fn(ccx, vec::append(*path, ~[path_name(item.ident)]), - decl, body, llfndecl, no_self, None, item.id); + decl, body, llfndecl, no_self, None, item.id, None); } else { for vec::each(body.node.stmts) |stmt| { match stmt.node { @@ -1789,48 +1796,8 @@ fn trans_item(ccx: @crate_ctxt, item: ast::item) { } } } - ast::item_impl(tps, trait_refs, self_ast_ty, ms) => { - meth::trans_impl(ccx, *path, item.ident, ms, tps, None); - - // Translate any methods that have provided implementations. - for trait_refs.each |trait_ref_ptr| { - let trait_def = ccx.tcx.def_map.get(trait_ref_ptr.ref_id); - - // XXX: Cross-crate default methods. - let trait_id = def_id_of_def(trait_def); - if trait_id.crate != ast::local_crate { - loop; - } - - // Get the self type. - let self_ty; - match ccx.tcx.ast_ty_to_ty_cache.get(self_ast_ty) { - ty::atttce_resolved(self_type) => self_ty = self_type, - ty::atttce_unresolved => { - ccx.tcx.sess.impossible_case(item.span, - ~"didn't cache self ast ty"); - } - } - - match ccx.tcx.items.get(trait_id.node) { - ast_map::node_item(trait_item, _) => { - match trait_item.node { - ast::item_trait(tps, _, trait_methods) => { - trans_trait(ccx, tps, trait_methods, path, - item.ident, self_ty); - } - _ => { - ccx.tcx.sess.impossible_case(item.span, - ~"trait item not a \ - trait"); - } - } - } - _ => { - ccx.tcx.sess.impossible_case(item.span, ~"no trait item"); - } - } - } + ast::item_impl(tps, _, _, ms) => { + meth::trans_impl(ccx, *path, item.ident, ms, tps, None, item.id); } ast::item_mod(m) => { trans_mod(ccx, m); @@ -1871,16 +1838,7 @@ fn trans_struct_def(ccx: @crate_ctxt, struct_def: @ast::struct_def, // If there are ty params, the ctor will get monomorphized // Translate methods - meth::trans_impl(ccx, *path, ident, struct_def.methods, tps, None); -} - -fn trans_trait(ccx: @crate_ctxt, tps: ~[ast::ty_param], - trait_methods: ~[ast::trait_method], - path: @ast_map::path, ident: ast::ident, - self_ty: ty::t) { - // Translate any methods that have provided implementations - let (_, provided_methods) = ast_util::split_trait_methods(trait_methods); - meth::trans_impl(ccx, *path, ident, provided_methods, tps, Some(self_ty)); + meth::trans_impl(ccx, *path, ident, struct_def.methods, tps, None, id); } // Translate a module. Doing this amounts to translating the items in the @@ -2035,7 +1993,7 @@ fn get_dtor_symbol(ccx: @crate_ctxt, path: path, id: ast::node_id, // this to item_symbols match substs { Some(ss) => { - let mono_ty = ty::subst_tps(ccx.tcx, ss.tys, t); + let mono_ty = ty::subst_tps(ccx.tcx, ss.tys, ss.self_ty, t); mangle_exported_name( ccx, vec::append(path, @@ -2419,7 +2377,7 @@ fn create_module_map(ccx: @crate_ctxt) -> ValueRef { } -fn decl_crate_map(sess: session::session, mapmeta: link_meta, +fn decl_crate_map(sess: session::Session, mapmeta: link_meta, llmod: ModuleRef) -> ValueRef { let targ_cfg = sess.targ_cfg; let int_type = T_int(targ_cfg); @@ -2524,7 +2482,7 @@ fn write_abi_version(ccx: @crate_ctxt) { false); } -fn trans_crate(sess: session::session, +fn trans_crate(sess: session::Session, crate: @ast::crate, tcx: ty::ctxt, output: &Path, diff --git a/src/rustc/middle/trans/build.rs b/src/rustc/middle/trans/build.rs index 69de8a2cca3e2..ea992600ae19b 100644 --- a/src/rustc/middle/trans/build.rs +++ b/src/rustc/middle/trans/build.rs @@ -6,7 +6,6 @@ use codemap::span; use lib::llvm::{ValueRef, TypeRef, BasicBlockRef, BuilderRef, ModuleRef}; use lib::llvm::{Opcode, IntPredicate, RealPredicate, True, False, CallConv, TypeKind, AtomicBinOp, AtomicOrdering}; -use driver::session::session; use common::*; fn B(cx: block) -> BuilderRef { @@ -434,8 +433,7 @@ fn GEP(cx: block, Pointer: ValueRef, Indices: ~[ValueRef]) -> ValueRef { // // XXX: Use a small-vector optimization to avoid allocations here. fn GEPi(cx: block, base: ValueRef, ixs: &[uint]) -> ValueRef { - let mut v: ~[ValueRef] = ~[]; - for vec::each(ixs) |i| { v.push(C_i32(*i as i32)); } + let v = do vec::map(ixs) |i| { C_i32(*i as i32) }; count_insn(cx, "gepi"); return InBoundsGEP(cx, base, v); } diff --git a/src/rustc/middle/trans/callee.rs b/src/rustc/middle/trans/callee.rs index aa998fb7b92d1..175381a7bd1ee 100644 --- a/src/rustc/middle/trans/callee.rs +++ b/src/rustc/middle/trans/callee.rs @@ -75,10 +75,10 @@ fn trans(bcx: block, expr: @ast::expr) -> Callee { fn trans_def(bcx: block, def: ast::def, ref_expr: @ast::expr) -> Callee { match def { - ast::def_fn(did, _) => { + ast::def_fn(did, _) | ast::def_static_method(did, None, _) => { fn_callee(bcx, trans_fn_ref(bcx, did, ref_expr.id)) } - ast::def_static_method(impl_did, trait_did, _) => { + ast::def_static_method(impl_did, Some(trait_did), _) => { fn_callee(bcx, meth::trans_static_method_callee(bcx, impl_did, trait_did, ref_expr.id)) @@ -184,21 +184,31 @@ fn trans_fn_ref_with_vtables( // Polytype of the function item (may have type params) let fn_tpt = ty::lookup_item_type(tcx, def_id); + // Modify the def_id if this is a default method; we want to be + // monomorphizing the trait's code. + let (def_id, opt_impl_did) = + match tcx.provided_method_sources.find(def_id) { + None => (def_id, None), + Some(source) => (source.method_id, Some(source.impl_id)) + }; + // Check whether this fn has an inlined copy and, if so, redirect // def_id to the local id of the inlined copy. let def_id = { if def_id.crate != ast::local_crate { - inline::maybe_instantiate_inline(ccx, def_id) + let may_translate = opt_impl_did.is_none(); + inline::maybe_instantiate_inline(ccx, def_id, may_translate) } else { def_id } }; - // We must monomorphise if the fn has type parameters or is a rust - // intrinsic. In particular, if we see an intrinsic that is - // inlined from a different crate, we want to reemit the intrinsic - // instead of trying to call it in the other crate. - let must_monomorphise = type_params.len() > 0 || { + // We must monomorphise if the fn has type parameters, is a rust + // intrinsic, or is a default method. In particular, if we see an + // intrinsic that is inlined from a different crate, we want to reemit the + // intrinsic instead of trying to call it in the other crate. + let must_monomorphise = type_params.len() > 0 || + opt_impl_did.is_some() || { if def_id.crate == ast::local_crate { let map_node = session::expect( ccx.sess, @@ -222,7 +232,7 @@ fn trans_fn_ref_with_vtables( let mut {val, must_cast} = monomorphize::monomorphic_fn(ccx, def_id, type_params, - vtables, Some(ref_id)); + vtables, opt_impl_did, Some(ref_id)); if must_cast && ref_id != 0 { // Monotype of the REFERENCE to the function (type params // are subst'd) @@ -317,7 +327,9 @@ fn trans_rtcall_or_lang_call_with_type_params(bcx: block, match callee.data { Fn(fn_data) => { let substituted = ty::subst_tps(callee.bcx.tcx(), - type_params, fty); + type_params, + None, + fty); let mut llfnty = type_of::type_of(callee.bcx.ccx(), substituted); llfnty = T_ptr(struct_elt(llfnty, 0)); diff --git a/src/rustc/middle/trans/closure.rs b/src/rustc/middle/trans/closure.rs index 655efe75bf65c..3997076f98713 100644 --- a/src/rustc/middle/trans/closure.rs +++ b/src/rustc/middle/trans/closure.rs @@ -372,7 +372,7 @@ fn trans_expr_fn(bcx: block, let {llbox, cdata_ty, bcx} = build_closure(bcx, cap_vars, ck, ret_handle); trans_closure(ccx, sub_path, decl, body, llfn, no_self, - bcx.fcx.param_substs, id, |fcx| { + bcx.fcx.param_substs, id, None, |fcx| { load_environment(fcx, cdata_ty, cap_vars, ret_handle.is_some(), ck); }, |bcx| { @@ -395,7 +395,7 @@ fn trans_expr_fn(bcx: block, } ty::proto_bare => { trans_closure(ccx, sub_path, decl, body, llfn, no_self, None, - id, |_fcx| { }, |_bcx| { }); + id, None, |_fcx| { }, |_bcx| { }); rslt(bcx, C_null(T_opaque_box_ptr(ccx))) } ty::proto_vstore(ty::vstore_fixed(_)) => { diff --git a/src/rustc/middle/trans/common.rs b/src/rustc/middle/trans/common.rs index 61141f18336fa..554a533e110df 100644 --- a/src/rustc/middle/trans/common.rs +++ b/src/rustc/middle/trans/common.rs @@ -8,7 +8,7 @@ use vec::raw::to_ptr; use std::map::{HashMap,Set}; use syntax::{ast, ast_map}; use driver::session; -use session::session; +use session::Session; use middle::ty; use back::{link, abi, upcall}; use syntax::codemap::span; @@ -110,7 +110,7 @@ fn BuilderRef_res(B: BuilderRef) -> BuilderRef_res { // Crate context. Every crate we compile has one of these. type crate_ctxt = { - sess: session::session, + sess: session::Session, llmod: ModuleRef, td: target_data, tn: type_names, @@ -181,9 +181,12 @@ struct ValSelfData { enum local_val { local_mem(ValueRef), local_imm(ValueRef), } +// Here `self_ty` is the real type of the self parameter to this method. It +// will only be set in the case of default methods. type param_substs = {tys: ~[ty::t], vtables: Option, - bounds: @~[ty::param_bounds]}; + bounds: @~[ty::param_bounds], + self_ty: Option}; fn param_substs_to_str(tcx: ty::ctxt, substs: ¶m_substs) -> ~str { fmt!("param_substs {tys:%?, vtables:%?, bounds:%?}", @@ -220,6 +223,10 @@ type fn_ctxt = @{ mut llreturn: BasicBlockRef, // The 'self' value currently in use in this function, if there // is one. + // + // NB: This is the type of the self *variable*, not the self *type*. The + // self type is set only for default methods, while the self variable is + // set for all methods. mut llself: Option, // The a value alloca'd for calls to upcalls.rust_personality. Used when // outputting the resume instruction. @@ -240,6 +247,9 @@ type fn_ctxt = @{ // a user-defined function. id: ast::node_id, + // The def_id of the impl we're inside, or None if we aren't inside one. + impl_id: Option, + // If this function is being monomorphized, this contains the type // substitutions used. param_substs: Option, @@ -595,7 +605,7 @@ fn block_parent(cx: block) -> block { impl block { pure fn ccx() -> @crate_ctxt { self.fcx.ccx } pure fn tcx() -> ty::ctxt { self.fcx.ccx.tcx } - pure fn sess() -> session { self.fcx.ccx.sess } + pure fn sess() -> Session { self.fcx.ccx.sess } fn node_id_to_str(id: ast::node_id) -> ~str { ast_map::node_id_to_str(self.tcx().items, id, self.sess().intr()) @@ -1110,7 +1120,11 @@ enum mono_param_id { datum::DatumMode), } -type mono_id_ = {def: ast::def_id, params: ~[mono_param_id]}; +type mono_id_ = { + def: ast::def_id, + params: ~[mono_param_id], + impl_did_opt: Option +}; type mono_id = @mono_id_; @@ -1177,7 +1191,7 @@ fn align_to(cx: block, off: ValueRef, align: ValueRef) -> ValueRef { return build::And(cx, bumped, build::Not(cx, mask)); } -fn path_str(sess: session::session, p: path) -> ~str { +fn path_str(sess: session::Session, p: path) -> ~str { let mut r = ~"", first = true; for vec::each(p) |e| { match *e { @@ -1193,7 +1207,9 @@ fn path_str(sess: session::session, p: path) -> ~str { fn monomorphize_type(bcx: block, t: ty::t) -> ty::t { match bcx.fcx.param_substs { - Some(substs) => ty::subst_tps(bcx.tcx(), substs.tys, t), + Some(substs) => { + ty::subst_tps(bcx.tcx(), substs.tys, substs.self_ty, t) + } _ => { assert !ty::type_has_params(t); t } } } @@ -1213,7 +1229,9 @@ fn node_id_type_params(bcx: block, id: ast::node_id) -> ~[ty::t] { let params = ty::node_id_to_type_params(tcx, id); match bcx.fcx.param_substs { Some(substs) => { - vec::map(params, |t| ty::subst_tps(tcx, substs.tys, *t)) + do vec::map(params) |t| { + ty::subst_tps(tcx, substs.tys, substs.self_ty, *t) + } } _ => params } @@ -1241,7 +1259,9 @@ fn resolve_vtable_in_fn_ctxt(fcx: fn_ctxt, vt: typeck::vtable_origin) typeck::vtable_static(trait_id, tys, sub) => { let tys = match fcx.param_substs { Some(substs) => { - vec::map(tys, |t| ty::subst_tps(tcx, substs.tys, *t)) + do vec::map(tys) |t| { + ty::subst_tps(tcx, substs.tys, substs.self_ty, *t) + } } _ => tys }; @@ -1290,13 +1310,13 @@ fn dummy_substs(tps: ~[ty::t]) -> ty::substs { tps: tps} } -fn struct_field(index: uint) -> [uint]/3 { +fn struct_field(index: uint) -> [uint * 3] { //! The GEPi sequence to access a field of a record/struct. [0, 0, index] } -fn struct_dtor() -> [uint]/2 { +fn struct_dtor() -> [uint * 2] { //! The GEPi sequence to access the dtor of a struct. [0, 1] diff --git a/src/rustc/middle/trans/debuginfo.rs b/src/rustc/middle/trans/debuginfo.rs index 068ec49d6c7ec..2db0dd59cf918 100644 --- a/src/rustc/middle/trans/debuginfo.rs +++ b/src/rustc/middle/trans/debuginfo.rs @@ -9,7 +9,7 @@ use middle::ty; use syntax::{ast, codemap, ast_util, ast_map}; use syntax::parse::token::ident_interner; use codemap::span; -use ast::ty; +use ast::Ty; use pat_util::*; use util::ppaux::ty_to_str; use driver::session::session; @@ -229,7 +229,7 @@ fn create_file(cx: @crate_ctxt, full_path: ~str) -> @metadata { return mdval; } -fn line_from_span(cm: codemap::codemap, sp: span) -> uint { +fn line_from_span(cm: codemap::CodeMap, sp: span) -> uint { codemap::lookup_char_pos(cm, sp.lo).line } @@ -469,7 +469,7 @@ fn create_composite_type(type_tag: int, name: ~str, file: ValueRef, line: int, } fn create_vec(cx: @crate_ctxt, vec_t: ty::t, elem_t: ty::t, - vec_ty_span: codemap::span, elem_ty: @ast::ty) + vec_ty_span: codemap::span, elem_ty: @ast::Ty) -> @metadata { let fname = filename_from_span(cx, vec_ty_span); let file_node = create_file(cx, fname); @@ -492,7 +492,7 @@ fn create_vec(cx: @crate_ctxt, vec_t: ty::t, elem_t: ty::t, return @{node: llnode, data: {hash: ty::type_id(vec_t)}}; } -fn create_ty(_cx: @crate_ctxt, _t: ty::t, _ty: @ast::ty) +fn create_ty(_cx: @crate_ctxt, _t: ty::t, _ty: @ast::Ty) -> @metadata { /*let cache = get_cache(cx); match cached_metadata::<@metadata>( diff --git a/src/rustc/middle/trans/expr.rs b/src/rustc/middle/trans/expr.rs index 333d76a91ee68..30bea1376c000 100644 --- a/src/rustc/middle/trans/expr.rs +++ b/src/rustc/middle/trans/expr.rs @@ -637,11 +637,11 @@ fn trans_def_dps_unadjusted(bcx: block, ref_expr: @ast::expr, }; match def { - ast::def_fn(did, _) => { + ast::def_fn(did, _) | ast::def_static_method(did, None, _) => { let fn_data = callee::trans_fn_ref(bcx, did, ref_expr.id); return fn_data_to_datum(bcx, did, fn_data, lldest); } - ast::def_static_method(impl_did, trait_did, _) => { + ast::def_static_method(impl_did, Some(trait_did), _) => { let fn_data = meth::trans_static_method_callee(bcx, impl_did, trait_did, ref_expr.id); @@ -793,7 +793,9 @@ fn trans_local_var(bcx: block, def: ast::def) -> Datum { // This cast should not be necessary. We should cast self *once*, // but right now this conflicts with default methods. - let llselfty = T_ptr(type_of::type_of(bcx.ccx(), self_info.t)); + let real_self_ty = monomorphize_type(bcx, self_info.t); + let llselfty = T_ptr(type_of::type_of(bcx.ccx(), real_self_ty)); + let casted_val = PointerCast(bcx, self_info.v, llselfty); Datum { val: casted_val, diff --git a/src/rustc/middle/trans/foreign.rs b/src/rustc/middle/trans/foreign.rs index dbf5ef810462f..d307fcb5dca0f 100644 --- a/src/rustc/middle/trans/foreign.rs +++ b/src/rustc/middle/trans/foreign.rs @@ -1,7 +1,7 @@ // The classification code for the x86_64 ABI is taken from the clay language // https://github.com/jckarter/clay/blob/master/compiler/src/externals.cpp -use driver::session::{session, arch_x86_64}; +use driver::session::arch_x86_64; use syntax::codemap::span; use libc::c_uint; use syntax::{attr, ast_map}; @@ -112,9 +112,10 @@ fn classify_ty(ty: TypeRef) -> ~[x86_64_reg_class] { Float => 4, Double => 8, Struct => { - do vec::foldl(0, struct_tys(ty)) |s, t| { - s + ty_size(*t) - } + let size = do vec::foldl(0, struct_tys(ty)) |s, t| { + align(s, *t) + ty_size(*t) + }; + align(size, ty) } Array => { let len = llvm::LLVMGetArrayLength(ty) as uint; @@ -794,7 +795,7 @@ fn trans_intrinsic(ccx: @crate_ctxt, decl: ValueRef, item: @ast::foreign_item, { debug!("trans_intrinsic(item.ident=%s)", ccx.sess.str_of(item.ident)); - let fcx = new_fn_ctxt_w_id(ccx, path, decl, item.id, + let fcx = new_fn_ctxt_w_id(ccx, path, decl, item.id, None, Some(substs), Some(item.span)); let mut bcx = top_scope_block(fcx, None), lltop = bcx.llbb; match ccx.sess.str_of(item.ident) { @@ -1025,7 +1026,7 @@ fn trans_foreign_fn(ccx: @crate_ctxt, path: ast_map::path, decl: ast::fn_decl, ))); let llty = type_of_fn_from_ty(ccx, t); let llfndecl = decl_internal_cdecl_fn(ccx.llmod, ps, llty); - trans_fn(ccx, path, decl, body, llfndecl, no_self, None, id); + trans_fn(ccx, path, decl, body, llfndecl, no_self, None, id, None); return llfndecl; } diff --git a/src/rustc/middle/trans/inline.rs b/src/rustc/middle/trans/inline.rs index ce9088d4b55c3..d3cc23094ee87 100644 --- a/src/rustc/middle/trans/inline.rs +++ b/src/rustc/middle/trans/inline.rs @@ -5,9 +5,12 @@ use syntax::ast_map::{path, path_mod, path_name}; use base::{trans_item, get_item_val, self_arg, trans_fn, impl_self, get_insn_ctxt}; -fn maybe_instantiate_inline(ccx: @crate_ctxt, fn_id: ast::def_id) - -> ast::def_id -{ +// `translate` will be true if this function is allowed to translate the +// item and false otherwise. Currently, this parameter is set to false when +// translating default methods. +fn maybe_instantiate_inline(ccx: @crate_ctxt, fn_id: ast::def_id, + translate: bool) + -> ast::def_id { let _icx = ccx.insn_ctxt("maybe_instantiate_inline"); match ccx.external.find(fn_id) { Some(Some(node_id)) => { @@ -31,7 +34,7 @@ fn maybe_instantiate_inline(ccx: @crate_ctxt, fn_id: ast::def_id) csearch::found(ast::ii_item(item)) => { ccx.external.insert(fn_id, Some(item.id)); ccx.stats.n_inlines += 1; - trans_item(ccx, *item); + if translate { trans_item(ccx, *item); } local_def(item.id) } csearch::found(ast::ii_foreign(item)) => { @@ -53,7 +56,7 @@ fn maybe_instantiate_inline(ccx: @crate_ctxt, fn_id: ast::def_id) _ => ccx.sess.bug(~"maybe_instantiate_inline: item has a \ non-enum parent") } - trans_item(ccx, *item); + if translate { trans_item(ccx, *item); } local_def(my_id) } csearch::found_parent(_, _) => { @@ -65,13 +68,14 @@ fn maybe_instantiate_inline(ccx: @crate_ctxt, fn_id: ast::def_id) ccx.external.insert(fn_id, Some(mth.id)); let {bounds: impl_bnds, region_param: _, ty: impl_ty} = ty::lookup_item_type(ccx.tcx, impl_did); - if (*impl_bnds).len() + mth.tps.len() == 0u { + if translate && (*impl_bnds).len() + mth.tps.len() == 0u { let llfn = get_item_val(ccx, mth.id); let path = vec::append( ty::item_path(ccx.tcx, impl_did), ~[path_name(mth.ident)]); trans_fn(ccx, path, mth.decl, mth.body, - llfn, impl_self(impl_ty), None, mth.id); + llfn, impl_self(impl_ty), None, mth.id, + Some(impl_did)); } local_def(mth.id) } @@ -83,3 +87,4 @@ fn maybe_instantiate_inline(ccx: @crate_ctxt, fn_id: ast::def_id) } } } + diff --git a/src/rustc/middle/trans/meth.rs b/src/rustc/middle/trans/meth.rs index 96cf7fabd1f76..fadde08b9f89a 100644 --- a/src/rustc/middle/trans/meth.rs +++ b/src/rustc/middle/trans/meth.rs @@ -28,7 +28,7 @@ see `trans::base::lval_static_fn()` or `trans::base::monomorphic_fn()`. */ fn trans_impl(ccx: @crate_ctxt, path: path, name: ast::ident, methods: ~[@ast::method], tps: ~[ast::ty_param], - self_ty: Option) { + self_ty: Option, id: ast::node_id) { let _icx = ccx.insn_ctxt("impl::trans_impl"); if tps.len() > 0u { return; } let sub_path = vec::append_one(path, path_name(name)); @@ -36,7 +36,22 @@ fn trans_impl(ccx: @crate_ctxt, path: path, name: ast::ident, if method.tps.len() == 0u { let llfn = get_item_val(ccx, method.id); let path = vec::append_one(sub_path, path_name(method.ident)); - trans_method(ccx, path, *method, None, self_ty, llfn); + + let param_substs_opt; + match self_ty { + None => param_substs_opt = None, + Some(self_ty) => { + param_substs_opt = Some({ + tys: ~[], + vtables: None, + bounds: @~[], + self_ty: Some(self_ty) + }); + } + } + + trans_method(ccx, path, *method, param_substs_opt, self_ty, llfn, + ast_util::local_def(id)); } } } @@ -54,13 +69,15 @@ Translates a (possibly monomorphized) method body. will be none if this is not a default method and must always be present if this is a default method. - `llfn`: the LLVM ValueRef for the method +- `impl_id`: the node ID of the impl this method is inside */ fn trans_method(ccx: @crate_ctxt, path: path, method: &ast::method, param_substs: Option, base_self_ty: Option, - llfn: ValueRef) { + llfn: ValueRef, + impl_id: ast::def_id) { // figure out how self is being passed let self_arg = match method.self_ty.node { @@ -76,8 +93,10 @@ fn trans_method(ccx: @crate_ctxt, Some(provided_self_ty) => self_ty = provided_self_ty } let self_ty = match param_substs { - None => self_ty, - Some({tys: ref tys, _}) => ty::subst_tps(ccx.tcx, *tys, self_ty) + None => self_ty, + Some({tys: ref tys, _}) => { + ty::subst_tps(ccx.tcx, *tys, None, self_ty) + } }; match method.self_ty.node { ast::sty_value => { @@ -98,15 +117,20 @@ fn trans_method(ccx: @crate_ctxt, llfn, self_arg, param_substs, - method.id); + method.id, + Some(impl_id)); } -fn trans_self_arg(bcx: block, base: @ast::expr, +fn trans_self_arg(bcx: block, + base: @ast::expr, mentry: typeck::method_map_entry) -> Result { let _icx = bcx.insn_ctxt("impl::trans_self_arg"); let mut temp_cleanups = ~[]; + + // Compute the mode and type of self. let self_arg = {mode: mentry.self_arg.mode, ty: monomorphize_type(bcx, mentry.self_arg.ty)}; + let result = trans_arg_expr(bcx, self_arg, base, &mut temp_cleanups, None, DontAutorefArg); @@ -120,11 +144,31 @@ fn trans_self_arg(bcx: block, base: @ast::expr, } fn trans_method_callee(bcx: block, callee_id: ast::node_id, - self: @ast::expr, mentry: typeck::method_map_entry) - -> Callee -{ + self: @ast::expr, mentry: typeck::method_map_entry) -> + Callee { let _icx = bcx.insn_ctxt("impl::trans_method_callee"); - match mentry.origin { + + // Replace method_self with method_static here. + let mut origin = mentry.origin; + match origin { + typeck::method_self(copy trait_id, copy method_index) => { + // Get the ID of the impl we're inside. + let impl_def_id = bcx.fcx.impl_id.get(); + + io::println(fmt!("impl_def_id is %?", impl_def_id)); + + // Get the ID of the method we're calling. + let method_name = + ty::trait_methods(bcx.tcx(), trait_id)[method_index].ident; + let method_id = method_with_name(bcx.ccx(), impl_def_id, + method_name); + origin = typeck::method_static(method_id); + } + typeck::method_static(*) | typeck::method_param(*) | + typeck::method_trait(*) => {} + } + + match origin { typeck::method_static(did) => { let callee_fn = callee::trans_fn_ref(bcx, did, callee_id); let Result {bcx, val} = trans_self_arg(bcx, self, mentry); @@ -155,7 +199,7 @@ fn trans_method_callee(bcx: block, callee_id: ast::node_id, trans_trait_callee(bcx, callee_id, off, self, vstore) } typeck::method_self(*) => { - bcx.tcx().sess.span_bug(self.span, ~"self method call"); + fail ~"method_self should have been handled above" } } } @@ -255,7 +299,7 @@ fn trans_static_method_callee(bcx: block, fn method_from_methods(ms: ~[@ast::method], name: ast::ident) -> ast::def_id { - local_def(option::get(&vec::find(ms, |m| m.ident == name)).id) + local_def(option::get(vec::find(ms, |m| m.ident == name)).id) } fn method_with_name(ccx: @crate_ctxt, impl_id: ast::def_id, @@ -519,13 +563,21 @@ fn vtable_id(ccx: @crate_ctxt, origin: typeck::vtable_origin) -> mono_id { match origin { typeck::vtable_static(impl_id, substs, sub_vtables) => { monomorphize::make_mono_id( - ccx, impl_id, substs, - if (*sub_vtables).len() == 0u { None } - else { Some(sub_vtables) }, None) + ccx, + impl_id, + substs, + if (*sub_vtables).len() == 0u { + None + } else { + Some(sub_vtables) + }, + None, + None) } typeck::vtable_trait(trait_id, substs) => { @{def: trait_id, - params: vec::map(substs, |t| mono_precise(*t, None))} + params: vec::map(substs, |t| mono_precise(*t, None)), + impl_did_opt: None} } // can't this be checked at the callee? _ => fail ~"vtable_id" @@ -571,7 +623,7 @@ fn make_impl_vtable(ccx: @crate_ctxt, impl_id: ast::def_id, substs: ~[ty::t], let has_tps = (*ty::lookup_item_type(ccx.tcx, impl_id).bounds).len() > 0u; make_vtable(ccx, vec::map(*ty::trait_methods(tcx, trt_id), |im| { - let fty = ty::subst_tps(tcx, substs, ty::mk_fn(tcx, im.fty)); + let fty = ty::subst_tps(tcx, substs, None, ty::mk_fn(tcx, im.fty)); if (*im.tps).len() > 0u || ty::type_has_self(fty) { C_null(T_ptr(T_nil())) } else { @@ -580,10 +632,11 @@ fn make_impl_vtable(ccx: @crate_ctxt, impl_id: ast::def_id, substs: ~[ty::t], // If the method is in another crate, need to make an inlined // copy first if m_id.crate != ast::local_crate { - m_id = inline::maybe_instantiate_inline(ccx, m_id); + // XXX: Set impl ID here? + m_id = inline::maybe_instantiate_inline(ccx, m_id, true); } monomorphize::monomorphic_fn(ccx, m_id, substs, - Some(vtables), None).val + Some(vtables), None, None).val } else if m_id.crate == ast::local_crate { get_item_val(ccx, m_id.node) } else { diff --git a/src/rustc/middle/trans/monomorphize.rs b/src/rustc/middle/trans/monomorphize.rs index 914b733df198d..aadd6fc7957ab 100644 --- a/src/rustc/middle/trans/monomorphize.rs +++ b/src/rustc/middle/trans/monomorphize.rs @@ -16,9 +16,9 @@ fn monomorphic_fn(ccx: @crate_ctxt, fn_id: ast::def_id, real_substs: ~[ty::t], vtables: Option, - ref_id: Option) - -> {val: ValueRef, must_cast: bool} -{ + impl_did_opt: Option, + ref_id: Option) -> + {val: ValueRef, must_cast: bool} { let _icx = ccx.insn_ctxt("monomorphic_fn"); let mut must_cast = false; let substs = vec::map(real_substs, |t| { @@ -31,7 +31,8 @@ fn monomorphic_fn(ccx: @crate_ctxt, for real_substs.each() |s| { assert !ty::type_has_params(*s); } for substs.each() |s| { assert !ty::type_has_params(*s); } let param_uses = type_use::type_uses_for(ccx, fn_id, substs.len()); - let hash_id = make_mono_id(ccx, fn_id, substs, vtables, Some(param_uses)); + let hash_id = make_mono_id(ccx, fn_id, substs, vtables, impl_did_opt, + Some(param_uses)); if vec::any(hash_id.params, |p| match *p { mono_precise(_, _) => false, _ => true }) { must_cast = true; @@ -73,8 +74,11 @@ fn monomorphic_fn(ccx: @crate_ctxt, } ast_map::node_dtor(_, dtor, _, pt) => (pt, special_idents::dtor, dtor.span), - ast_map::node_trait_method(*) => { - ccx.tcx.sess.bug(~"Can't monomorphize a trait method") + ast_map::node_trait_method(@ast::provided(m), _, pt) => { + (pt, m.ident, m.span) + } + ast_map::node_trait_method(@ast::required(_), _, _) => { + ccx.tcx.sess.bug(~"Can't monomorphize a required trait method") } ast_map::node_expr(*) => { ccx.tcx.sess.bug(~"Can't monomorphize an expr") @@ -93,12 +97,23 @@ fn monomorphic_fn(ccx: @crate_ctxt, ccx.tcx.sess.bug(~"Can't monomorphize a local") } }; - let mono_ty = ty::subst_tps(ccx.tcx, substs, llitem_ty); + + // Look up the impl type if we're translating a default method. + // XXX: Generics. + let impl_ty_opt; + match impl_did_opt { + None => impl_ty_opt = None, + Some(impl_did) => { + impl_ty_opt = Some(ty::lookup_item_type(ccx.tcx, impl_did).ty); + } + } + + let mono_ty = ty::subst_tps(ccx.tcx, substs, impl_ty_opt, llitem_ty); let llfty = type_of_fn_from_ty(ccx, mono_ty); ccx.stats.n_monos += 1; - let depth = option::get_default(&ccx.monomorphizing.find(fn_id), 0u); + let depth = option::get_default(ccx.monomorphizing.find(fn_id), 0u); // Random cut-off -- code that needs to instantiate the same function // recursively more than ten times can probably safely be assumed to be // causing an infinite expansion. @@ -118,12 +133,18 @@ fn monomorphic_fn(ccx: @crate_ctxt, lldecl }; - let psubsts = Some({tys: substs, vtables: vtables, bounds: tpt.bounds}); + let psubsts = Some({ + tys: substs, + vtables: vtables, + bounds: tpt.bounds, + self_ty: impl_ty_opt + }); + let lldecl = match map_node { ast_map::node_item(i@@{node: ast::item_fn(decl, _, _, body), _}, _) => { let d = mk_lldecl(); set_inline_hint_if_appr(i.attrs, d); - trans_fn(ccx, pt, decl, body, d, no_self, psubsts, fn_id.node); + trans_fn(ccx, pt, decl, body, d, no_self, psubsts, fn_id.node, None); d } ast_map::node_item(*) => { @@ -137,7 +158,7 @@ fn monomorphic_fn(ccx: @crate_ctxt, } ast_map::node_variant(v, enum_item, _) => { let tvs = ty::enum_variants(ccx.tcx, local_def(enum_item.id)); - let this_tv = option::get(&vec::find(*tvs, |tv| { + let this_tv = option::get(vec::find(*tvs, |tv| { tv.id.node == fn_id.node})); let d = mk_lldecl(); set_inline_hint(d); @@ -154,11 +175,19 @@ fn monomorphic_fn(ccx: @crate_ctxt, } d } - ast_map::node_method(mth, _, _) => { + ast_map::node_method(mth, supplied_impl_did, _) => { // XXX: What should the self type be here? let d = mk_lldecl(); set_inline_hint_if_appr(mth.attrs, d); - meth::trans_method(ccx, pt, mth, psubsts, None, d); + + // Override the impl def ID if necessary. + let impl_did; + match impl_did_opt { + None => impl_did = supplied_impl_did, + Some(override_impl_did) => impl_did = override_impl_did + } + + meth::trans_method(ccx, pt, mth, psubsts, None, d, impl_did); d } ast_map::node_dtor(_, dtor, _, pt) => { @@ -171,6 +200,15 @@ fn monomorphic_fn(ccx: @crate_ctxt, trans_class_dtor(ccx, *pt, dtor.node.body, dtor.node.id, psubsts, Some(hash_id), parent_id) } + ast_map::node_trait_method(@ast::provided(mth), _, pt) => { + let d = mk_lldecl(); + set_inline_hint_if_appr(mth.attrs, d); + io::println(fmt!("monomorphic_fn impl_did_opt is %?", impl_did_opt)); + meth::trans_method(ccx, *pt, mth, psubsts, None, d, + impl_did_opt.get()); + d + } + // Ugh -- but this ensures any new variants won't be forgotten ast_map::node_expr(*) | ast_map::node_stmt(*) | @@ -226,6 +264,7 @@ fn normalize_for_monomorphization(tcx: ty::ctxt, ty: ty::t) -> Option { fn make_mono_id(ccx: @crate_ctxt, item: ast::def_id, substs: ~[ty::t], vtables: Option, + impl_did_opt: Option, param_uses: Option<~[type_use::type_uses]>) -> mono_id { let precise_param_ids = match vtables { Some(vts) => { @@ -295,5 +334,5 @@ fn make_mono_id(ccx: @crate_ctxt, item: ast::def_id, substs: ~[ty::t], }) } }; - @{def: item, params: param_ids} + @{def: item, params: param_ids, impl_did_opt: impl_did_opt} } diff --git a/src/rustc/middle/trans/reachable.rs b/src/rustc/middle/trans/reachable.rs index bfb8de76a6c58..a99ef96b2544a 100644 --- a/src/rustc/middle/trans/reachable.rs +++ b/src/rustc/middle/trans/reachable.rs @@ -128,7 +128,7 @@ fn mk_ty_visitor() -> visit::vt { visit::mk_vt(@{visit_ty: traverse_ty, ..*visit::default_visitor()}) } -fn traverse_ty(ty: @ty, cx: ctx, v: visit::vt) { +fn traverse_ty(ty: @Ty, cx: ctx, v: visit::vt) { if cx.rmap.contains_key(ty.id) { return; } cx.rmap.insert(ty.id, ()); diff --git a/src/rustc/middle/trans/reflect.rs b/src/rustc/middle/trans/reflect.rs index c105caecaebbe..c52b653d536c1 100644 --- a/src/rustc/middle/trans/reflect.rs +++ b/src/rustc/middle/trans/reflect.rs @@ -1,5 +1,4 @@ use std::map::HashMap; -use driver::session::session; use lib::llvm::{TypeRef, ValueRef}; use syntax::ast; use back::abi; @@ -58,7 +57,7 @@ impl reflector { fn visit(ty_name: ~str, args: ~[ValueRef]) { let tcx = self.bcx.tcx(); - let mth_idx = option::get(&ty::method_idx( + let mth_idx = option::get(ty::method_idx( tcx.sess.ident_of(~"visit_" + ty_name), *self.visitor_methods)); let mth_ty = ty::mk_fn(tcx, self.visitor_methods[mth_idx].fty); diff --git a/src/rustc/middle/trans/tvec.rs b/src/rustc/middle/trans/tvec.rs index b78314a67478c..bfc172db3148a 100644 --- a/src/rustc/middle/trans/tvec.rs +++ b/src/rustc/middle/trans/tvec.rs @@ -1,5 +1,4 @@ use syntax::ast; -use driver::session::session; use lib::llvm::{ValueRef, TypeRef}; use back::abi; use syntax::codemap::span; @@ -141,7 +140,7 @@ fn trans_fixed_vstore(bcx: block, { //! // - // [...]/_ allocates a fixed-size array and moves it around "by value". + // [...] allocates a fixed-size array and moves it around "by value". // In this case, it means that the caller has already given us a location // to store the array of the suitable size, so all we have to do is // generate the content. diff --git a/src/rustc/middle/trans/type_use.rs b/src/rustc/middle/trans/type_use.rs index 1f9ad20dd03e7..8b2efacd4d16a 100644 --- a/src/rustc/middle/trans/type_use.rs +++ b/src/rustc/middle/trans/type_use.rs @@ -20,7 +20,6 @@ use std::map::HashMap; use std::list; use std::list::{List, Cons, Nil}; -use driver::session::session; use metadata::csearch; use syntax::ast::*, syntax::ast_util, syntax::visit; use syntax::ast_map; @@ -40,8 +39,13 @@ fn type_uses_for(ccx: @crate_ctxt, fn_id: def_id, n_tps: uint) Some(uses) => return uses, None => () } - let fn_id_loc = if fn_id.crate == local_crate { fn_id } - else { inline::maybe_instantiate_inline(ccx, fn_id) }; + + let fn_id_loc = if fn_id.crate == local_crate { + fn_id + } else { + inline::maybe_instantiate_inline(ccx, fn_id, true) + }; + // Conservatively assume full use for recursive loops ccx.type_use_cache.insert(fn_id, vec::from_elem(n_tps, 3u)); diff --git a/src/rustc/middle/ty.rs b/src/rustc/middle/ty.rs index cc132a431a330..90527e88bc850 100644 --- a/src/rustc/middle/ty.rs +++ b/src/rustc/middle/ty.rs @@ -5,7 +5,7 @@ use std::{map, smallintmap}; use result::Result; use std::map::HashMap; use driver::session; -use session::session; +use session::Session; use syntax::{ast, ast_map}; use syntax::ast_util; use syntax::ast_util::{is_local, local_def}; @@ -19,6 +19,7 @@ use syntax::ast::*; use syntax::print::pprust::*; use util::ppaux::{ty_to_str, proto_ty_to_str, tys_to_str}; +export ProvidedMethodSource; export TyVid, IntVid, FnVid, RegionVid, vid; export br_hashmap; export is_instantiable; @@ -102,7 +103,7 @@ export ty_infer, mk_infer, type_is_ty_var, mk_var, mk_int_var; export InferTy, TyVar, IntVar; export ty_self, mk_self, type_has_self; export ty_class; -export region, bound_region, encl_region; +export Region, bound_region, encl_region; export re_bound, re_free, re_scope, re_static, re_var; export br_self, br_anon, br_named, br_cap_avoid; export get, type_has_params, type_needs_infer, type_has_regions; @@ -113,7 +114,7 @@ export ty_var_id; export ty_to_def_id; export ty_fn_args; export ty_region; -export kind, kind_implicitly_copyable, kind_send_copy, kind_copyable; +export Kind, kind_implicitly_copyable, kind_send_copy, kind_copyable; export kind_noncopyable, kind_const; export kind_can_be_copied, kind_can_be_sent, kind_can_be_implicitly_copied; export kind_is_safe_for_default_mode; @@ -207,7 +208,8 @@ type method = {ident: ast::ident, tps: @~[param_bounds], fty: FnTy, self_ty: ast::self_ty_, - vis: ast::visibility}; + vis: ast::visibility, + def_id: ast::def_id}; type mt = {ty: t, mutbl: ast::mutability}; @@ -217,7 +219,7 @@ enum vstore { vstore_fixed(uint), vstore_uniq, vstore_box, - vstore_slice(region) + vstore_slice(Region) } type field_ty = { @@ -300,7 +302,7 @@ type AutoAdjustment = { #[auto_deserialize] type AutoRef = { kind: AutoRefKind, - region: region, + region: Region, mutbl: ast::mutability }; @@ -314,14 +316,19 @@ enum AutoRefKind { AutoPtr } +struct ProvidedMethodSource { + method_id: ast::def_id, + impl_id: ast::def_id +} + type ctxt = @{diag: syntax::diagnostic::span_handler, interner: HashMap, mut next_id: uint, vecs_implicitly_copyable: bool, legacy_modes: bool, - cstore: metadata::cstore::cstore, - sess: session::session, + cstore: metadata::cstore::CStore, + sess: session::Session, def_map: resolve::DefMap, region_map: middle::region::region_map, @@ -347,8 +354,8 @@ type ctxt = short_names_cache: HashMap, needs_drop_cache: HashMap, needs_unwind_cleanup_cache: HashMap, - kind_cache: HashMap, - ast_ty_to_ty_cache: HashMap<@ast::ty, ast_ty_to_ty_cache_entry>, + kind_cache: HashMap, + ast_ty_to_ty_cache: HashMap<@ast::Ty, ast_ty_to_ty_cache_entry>, enum_var_cache: HashMap, trait_method_cache: HashMap, ty_param_bounds: HashMap, @@ -356,7 +363,8 @@ type ctxt = adjustments: HashMap, normalized_cache: HashMap, lang_items: middle::lang_items::LanguageItems, - legacy_boxed_traits: HashMap}; + legacy_boxed_traits: HashMap, + provided_method_sources: HashMap}; enum tbox_flag { has_params = 1, @@ -511,7 +519,7 @@ impl param_ty : to_bytes::IterBytes { /// Representation of regions: #[auto_serialize] #[auto_deserialize] -enum region { +enum Region { /// Bound regions are found (primarily) in function types. They indicate /// region parameters that have yet to be replaced with actual regions /// (analogous to type parameters, except that due to the monomorphic @@ -562,7 +570,7 @@ enum bound_region { br_cap_avoid(ast::node_id, @bound_region), } -type opt_region = Option; +type opt_region = Option; /** * The type substs represents the kinds of things that can be substituted to @@ -602,7 +610,7 @@ enum sty { ty_uniq(mt), ty_evec(mt, vstore), ty_ptr(mt), - ty_rptr(region, mt), + ty_rptr(Region, mt), ty_rec(~[field]), ty_fn(FnTy), ty_trait(def_id, substs, vstore), @@ -648,9 +656,9 @@ enum type_err { terr_record_fields(expected_found), terr_arg_count, terr_mode_mismatch(expected_found), - terr_regions_does_not_outlive(region, region), - terr_regions_not_same(region, region), - terr_regions_no_overlap(region, region), + terr_regions_does_not_outlive(Region, Region), + terr_regions_not_same(Region, Region), + terr_regions_no_overlap(Region, Region), terr_vstores_differ(terr_vstore_kind, expected_found), terr_in_field(@type_err, ast::ident), terr_sorts(expected_found), @@ -775,7 +783,7 @@ impl FnVid : to_bytes::IterBytes { } } -fn param_bounds_to_kind(bounds: param_bounds) -> kind { +fn param_bounds_to_kind(bounds: param_bounds) -> Kind { let mut kind = kind_noncopyable(); for vec::each(*bounds) |bound| { match *bound { @@ -826,7 +834,7 @@ fn new_ty_hash() -> map::HashMap { map::HashMap() } -fn mk_ctxt(s: session::session, +fn mk_ctxt(s: session::Session, dm: resolve::DefMap, amap: ast_map::map, freevars: freevars::freevar_map, @@ -879,7 +887,8 @@ fn mk_ctxt(s: session::session, adjustments: HashMap(), normalized_cache: new_ty_hash(), lang_items: move lang_items, - legacy_boxed_traits: HashMap()} + legacy_boxed_traits: HashMap(), + provided_method_sources: HashMap()} } @@ -895,7 +904,7 @@ fn mk_t_with_id(cx: ctxt, +st: sty, o_def_id: Option) -> t { _ => () } let mut flags = 0u; - fn rflags(r: region) -> uint { + fn rflags(r: Region) -> uint { (has_regions as uint) | { match r { ty::re_var(_) => needs_infer as uint, @@ -1009,12 +1018,12 @@ fn mk_imm_uniq(cx: ctxt, ty: t) -> t { mk_uniq(cx, {ty: ty, fn mk_ptr(cx: ctxt, tm: mt) -> t { mk_t(cx, ty_ptr(tm)) } -fn mk_rptr(cx: ctxt, r: region, tm: mt) -> t { mk_t(cx, ty_rptr(r, tm)) } +fn mk_rptr(cx: ctxt, r: Region, tm: mt) -> t { mk_t(cx, ty_rptr(r, tm)) } -fn mk_mut_rptr(cx: ctxt, r: region, ty: t) -> t { +fn mk_mut_rptr(cx: ctxt, r: Region, ty: t) -> t { mk_rptr(cx, r, {ty: ty, mutbl: ast::m_mutbl}) } -fn mk_imm_rptr(cx: ctxt, r: region, ty: t) -> t { +fn mk_imm_rptr(cx: ctxt, r: Region, ty: t) -> t { mk_rptr(cx, r, {ty: ty, mutbl: ast::m_imm}) } @@ -1139,7 +1148,7 @@ fn default_arg_mode_for_ty(tcx: ctxt, ty: ty::t) -> ast::rmode { // Returns the narrowest lifetime enclosing the evaluation of the expression // with id `id`. -fn encl_region(cx: ctxt, id: ast::node_id) -> ty::region { +fn encl_region(cx: ctxt, id: ast::node_id) -> ty::Region { match cx.region_map.find(id) { Some(encl_scope) => ty::re_scope(encl_scope), None => ty::re_static @@ -1256,7 +1265,7 @@ fn fold_ty(cx: ctxt, t0: t, fldop: fn(t) -> t) -> t { fn walk_regions_and_ty( cx: ctxt, ty: t, - walkr: fn(r: region), + walkr: fn(r: Region), walkt: fn(t: t) -> bool) { if (walkt(ty)) { @@ -1271,13 +1280,13 @@ fn walk_regions_and_ty( fn fold_regions_and_ty( cx: ctxt, ty: t, - fldr: fn(r: region) -> region, + fldr: fn(r: Region) -> Region, fldfnt: fn(t: t) -> t, fldt: fn(t: t) -> t) -> t { fn fold_substs( substs: &substs, - fldr: fn(r: region) -> region, + fldr: fn(r: Region) -> Region, fldt: fn(t: t) -> t) -> substs { {self_r: substs.self_r.map(|r| fldr(*r)), @@ -1342,10 +1351,10 @@ fn fold_regions_and_ty( fn fold_regions( cx: ctxt, ty: t, - fldr: fn(r: region, in_fn: bool) -> region) -> t { + fldr: fn(r: Region, in_fn: bool) -> Region) -> t { fn do_fold(cx: ctxt, ty: t, in_fn: bool, - fldr: fn(region, bool) -> region) -> t { + fldr: fn(Region, bool) -> Region) -> t { if !type_has_regions(ty) { return ty; } fold_regions_and_ty( cx, ty, @@ -1356,9 +1365,9 @@ fn fold_regions( do_fold(cx, ty, false, fldr) } -fn fold_region(cx: ctxt, t0: t, fldop: fn(region, bool) -> region) -> t { +fn fold_region(cx: ctxt, t0: t, fldop: fn(Region, bool) -> Region) -> t { fn do_fold(cx: ctxt, t0: t, under_r: bool, - fldop: fn(region, bool) -> region) -> t { + fldop: fn(Region, bool) -> Region) -> t { let tb = get(t0); if !tbox_has_flag(tb, has_regions) { return t0; } match tb.sty { @@ -1392,13 +1401,23 @@ fn fold_region(cx: ctxt, t0: t, fldop: fn(region, bool) -> region) -> t { } // Substitute *only* type parameters. Used in trans where regions are erased. -fn subst_tps(cx: ctxt, tps: &[t], typ: t) -> t { - if tps.len() == 0u { return typ; } +fn subst_tps(cx: ctxt, tps: &[t], self_ty_opt: Option, typ: t) -> t { + if tps.len() == 0u && self_ty_opt.is_none() { return typ; } let tb = ty::get(typ); - if !tbox_has_flag(tb, has_params) { return typ; } + if self_ty_opt.is_none() && !tbox_has_flag(tb, has_params) { return typ; } match tb.sty { - ty_param(p) => tps[p.idx], - ref sty => fold_sty_to_ty(cx, sty, |t| subst_tps(cx, tps, t)) + ty_param(p) => tps[p.idx], + ty_self => { + match self_ty_opt { + None => cx.sess.bug(~"ty_self unexpected here"), + Some(self_ty) => { + subst_tps(cx, tps, self_ty_opt, self_ty) + } + } + } + ref sty => { + fold_sty_to_ty(cx, sty, |t| subst_tps(cx, tps, self_ty_opt, t)) + } } } @@ -1454,7 +1473,10 @@ fn subst(cx: ctxt, fold_regions_and_ty( cx, typ, |r| match r { - re_bound(br_self) => substs.self_r.get(), + re_bound(br_self) => substs.self_r.expect( + #fmt("ty::subst: \ + Reference to self region when given substs with no \ + self region, ty = %s", ty_to_str(cx, typ))), _ => r }, |t| do_subst(cx, substs, t), @@ -1758,7 +1780,7 @@ fn type_needs_unwind_cleanup_(cx: ctxt, ty: t, return needs_unwind_cleanup; } -enum kind { kind_(u32) } +enum Kind { kind_(u32) } /// can be copied (implicitly or explicitly) const KIND_MASK_COPY : u32 = 0b000000000000000000000000001_u32; @@ -1778,92 +1800,92 @@ const KIND_MASK_IMPLICIT : u32 = 0b000000000000000000000010000_u32; /// safe for default mode (subset of KIND_MASK_IMPLICIT) const KIND_MASK_DEFAULT_MODE : u32 = 0b000000000000000000000100000_u32; -fn kind_noncopyable() -> kind { +fn kind_noncopyable() -> Kind { kind_(0u32) } -fn kind_copyable() -> kind { +fn kind_copyable() -> Kind { kind_(KIND_MASK_COPY) } -fn kind_implicitly_copyable() -> kind { +fn kind_implicitly_copyable() -> Kind { kind_(KIND_MASK_IMPLICIT | KIND_MASK_COPY) } -fn kind_safe_for_default_mode() -> kind { +fn kind_safe_for_default_mode() -> Kind { // similar to implicit copy, but always includes vectors and strings kind_(KIND_MASK_DEFAULT_MODE | KIND_MASK_IMPLICIT | KIND_MASK_COPY) } -fn kind_implicitly_sendable() -> kind { +fn kind_implicitly_sendable() -> Kind { kind_(KIND_MASK_IMPLICIT | KIND_MASK_COPY | KIND_MASK_SEND) } -fn kind_safe_for_default_mode_send() -> kind { +fn kind_safe_for_default_mode_send() -> Kind { // similar to implicit copy, but always includes vectors and strings kind_(KIND_MASK_DEFAULT_MODE | KIND_MASK_IMPLICIT | KIND_MASK_COPY | KIND_MASK_SEND) } -fn kind_send_copy() -> kind { +fn kind_send_copy() -> Kind { kind_(KIND_MASK_COPY | KIND_MASK_SEND) } -fn kind_send_only() -> kind { +fn kind_send_only() -> Kind { kind_(KIND_MASK_SEND) } -fn kind_const() -> kind { +fn kind_const() -> Kind { kind_(KIND_MASK_CONST) } -fn kind_owned() -> kind { +fn kind_owned() -> Kind { kind_(KIND_MASK_OWNED) } -fn kind_top() -> kind { +fn kind_top() -> Kind { kind_(0xffffffffu32) } -fn remove_const(k: kind) -> kind { +fn remove_const(k: Kind) -> Kind { k - kind_const() } -fn remove_implicit(k: kind) -> kind { +fn remove_implicit(k: Kind) -> Kind { k - kind_(KIND_MASK_IMPLICIT | KIND_MASK_DEFAULT_MODE) } -fn remove_send(k: kind) -> kind { +fn remove_send(k: Kind) -> Kind { k - kind_(KIND_MASK_SEND) } -fn remove_owned_send(k: kind) -> kind { +fn remove_owned_send(k: Kind) -> Kind { k - kind_(KIND_MASK_OWNED) - kind_(KIND_MASK_SEND) } -fn remove_copyable(k: kind) -> kind { +fn remove_copyable(k: Kind) -> Kind { k - kind_(KIND_MASK_COPY | KIND_MASK_DEFAULT_MODE) } -impl kind : ops::BitAnd { - pure fn bitand(other: &kind) -> kind { +impl Kind : ops::BitAnd { + pure fn bitand(other: &Kind) -> Kind { unsafe { lower_kind(self, (*other)) } } } -impl kind : ops::BitOr { - pure fn bitor(other: &kind) -> kind { +impl Kind : ops::BitOr { + pure fn bitor(other: &Kind) -> Kind { unsafe { raise_kind(self, (*other)) } } } -impl kind : ops::Sub { - pure fn sub(other: &kind) -> kind { +impl Kind : ops::Sub { + pure fn sub(other: &Kind) -> Kind { unsafe { kind_(*self & !*(*other)) } @@ -1873,27 +1895,27 @@ impl kind : ops::Sub { // Using these query functions is preferable to direct comparison or matching // against the kind constants, as we may modify the kind hierarchy in the // future. -pure fn kind_can_be_implicitly_copied(k: kind) -> bool { +pure fn kind_can_be_implicitly_copied(k: Kind) -> bool { *k & KIND_MASK_IMPLICIT == KIND_MASK_IMPLICIT } -pure fn kind_is_safe_for_default_mode(k: kind) -> bool { +pure fn kind_is_safe_for_default_mode(k: Kind) -> bool { *k & KIND_MASK_DEFAULT_MODE == KIND_MASK_DEFAULT_MODE } -pure fn kind_can_be_copied(k: kind) -> bool { +pure fn kind_can_be_copied(k: Kind) -> bool { *k & KIND_MASK_COPY == KIND_MASK_COPY } -pure fn kind_can_be_sent(k: kind) -> bool { +pure fn kind_can_be_sent(k: Kind) -> bool { *k & KIND_MASK_SEND == KIND_MASK_SEND } -pure fn kind_is_owned(k: kind) -> bool { +pure fn kind_is_owned(k: Kind) -> bool { *k & KIND_MASK_OWNED == KIND_MASK_OWNED } -fn meta_kind(p: FnMeta) -> kind { +fn meta_kind(p: FnMeta) -> Kind { match p.proto { // XXX consider the kind bounds! proto_vstore(vstore_slice(_)) => kind_noncopyable() | kind_(KIND_MASK_DEFAULT_MODE), @@ -1908,15 +1930,15 @@ fn meta_kind(p: FnMeta) -> kind { } } -fn kind_lteq(a: kind, b: kind) -> bool { +fn kind_lteq(a: Kind, b: Kind) -> bool { *a & *b == *a } -fn lower_kind(a: kind, b: kind) -> kind { +fn lower_kind(a: Kind, b: Kind) -> Kind { kind_(*a & *b) } -fn raise_kind(a: kind, b: kind) -> kind { +fn raise_kind(a: Kind, b: Kind) -> Kind { kind_(*a | *b) } @@ -1941,7 +1963,7 @@ fn test_kinds() { // with the given mutability can have. // This is used to prevent objects containing mutable state from being // implicitly copied and to compute whether things have const kind. -fn mutability_kind(m: mutability) -> kind { +fn mutability_kind(m: mutability) -> Kind { match (m) { m_mutbl => remove_const(remove_implicit(kind_top())), m_const => remove_implicit(kind_top()), @@ -1949,11 +1971,11 @@ fn mutability_kind(m: mutability) -> kind { } } -fn mutable_type_kind(cx: ctxt, ty: mt) -> kind { +fn mutable_type_kind(cx: ctxt, ty: mt) -> Kind { lower_kind(mutability_kind(ty.mutbl), type_kind(cx, ty.ty)) } -fn type_kind(cx: ctxt, ty: t) -> kind { +fn type_kind(cx: ctxt, ty: t) -> Kind { match cx.kind_cache.find(ty) { Some(result) => return result, None => {/* fall through */ } @@ -2531,7 +2553,7 @@ impl bound_region : to_bytes::IterBytes { } } -impl region : to_bytes::IterBytes { +impl Region : to_bytes::IterBytes { pure fn iter_bytes(+lsb0: bool, f: to_bytes::Cb) { match self { re_bound(ref br) => @@ -2744,7 +2766,7 @@ fn is_fn_ty(fty: t) -> bool { } } -fn ty_region(ty: t) -> region { +fn ty_region(ty: t) -> Region { match get(ty).sty { ty_rptr(r, _) => r, s => fail fmt!("ty_region() invoked on non-rptr: %?", s) @@ -3328,20 +3350,18 @@ fn store_trait_methods(cx: ctxt, id: ast::node_id, ms: @~[method]) { cx.trait_method_cache.insert(ast_util::local_def(id), ms); } -fn provided_trait_methods(cx: ctxt, id: ast::def_id) -> ~[@ast::method] { +fn provided_trait_methods(cx: ctxt, id: ast::def_id) -> ~[ast::ident] { if is_local(id) { match cx.items.find(id.node) { Some(ast_map::node_item(@{node: item_trait(_, _, ms),_}, _)) => match ast_util::split_trait_methods(ms) { - (_, p) => p + (_, p) => p.map(|method| method.ident) }, _ => cx.sess.bug(fmt!("provided_trait_methods: %? is not a trait", id)) } - } - else { - // FIXME #2794: default methods for traits don't work cross-crate - ~[] + } else { + csearch::get_provided_trait_methods(cx, id).map(|info| info.ty.ident) } } @@ -3599,10 +3619,12 @@ fn enum_variant_with_id(cx: ctxt, enum_id: ast::def_id, // the type cache. Returns the type parameters and type. fn lookup_item_type(cx: ctxt, did: ast::def_id) -> ty_param_bounds_and_ty { match cx.tcache.find(did) { - Some(tpt) => return tpt, - None => { + Some(tpt) => { // The item is in this crate. The caller should have added it to the // type cache already + return tpt; + } + None => { assert did.crate != ast::local_crate; let tyt = csearch::get_type(cx, did); cx.tcache.insert(did, tyt); @@ -3891,9 +3913,11 @@ fn normalize_ty(cx: ctxt, t: t) -> t { ty_enum(did, r) => match r.self_r { Some(_) => - // This enum has a self region. Get rid of it + // Use re_static since trans doesn't care about regions mk_enum(cx, did, - {self_r: None, self_ty: None, tps: r.tps}), + {self_r: Some(ty::re_static), + self_ty: None, + tps: r.tps}), None => t }, @@ -3902,7 +3926,8 @@ fn normalize_ty(cx: ctxt, t: t) -> t { match r.self_r { Some(_) => // Ditto. - mk_class(cx, did, {self_r: None, self_ty: None, tps: r.tps}), + mk_class(cx, did, {self_r: Some(ty::re_static), self_ty: None, + tps: r.tps}), None => t }, @@ -4065,8 +4090,8 @@ impl RegionVid : cmp::Eq { pure fn ne(other: &RegionVid) -> bool { *self != *(*other) } } -impl region : cmp::Eq { - pure fn eq(other: ®ion) -> bool { +impl Region : cmp::Eq { + pure fn eq(other: &Region) -> bool { match self { re_bound(e0a) => { match (*other) { @@ -4100,7 +4125,7 @@ impl region : cmp::Eq { } } } - pure fn ne(other: ®ion) -> bool { !self.eq(other) } + pure fn ne(other: &Region) -> bool { !self.eq(other) } } impl bound_region : cmp::Eq { @@ -4348,9 +4373,9 @@ impl param_bound : cmp::Eq { pure fn ne(other: ¶m_bound) -> bool { !self.eq(other) } } -impl kind : cmp::Eq { - pure fn eq(other: &kind) -> bool { *self == *(*other) } - pure fn ne(other: &kind) -> bool { *self != *(*other) } +impl Kind : cmp::Eq { + pure fn eq(other: &Kind) -> bool { *self == *(*other) } + pure fn ne(other: &Kind) -> bool { *self != *(*other) } } diff --git a/src/rustc/middle/typeck.rs b/src/rustc/middle/typeck.rs index 7cb04bc0ea3c1..8d10343d78ee0 100644 --- a/src/rustc/middle/typeck.rs +++ b/src/rustc/middle/typeck.rs @@ -46,7 +46,6 @@ use syntax::ast_map::node_id_to_str; use syntax::ast_util::{local_def, respan, split_trait_methods}; use syntax::visit; use metadata::csearch; -use driver::session::session; use util::common::may_break; use syntax::codemap::span; use pat_util::{pat_is_variant, pat_id_map, PatIdMap}; @@ -62,6 +61,7 @@ use util::ppaux::{ty_to_str, tys_to_str, region_to_str, use util::common::{indent, indenter}; use std::list; use list::{List, Nil, Cons}; +use dvec::DVec; export check_crate; export infer; @@ -174,12 +174,6 @@ impl vtable_origin { type vtable_map = HashMap; -// Stores information about provided methods, aka "default methods" in traits. -// Maps from a trait's def_id to a MethodInfo about -// that method in that trait. -type provided_methods_map = HashMap; - type ty_param_substs_and_ty = {substs: ty::substs, ty: ty::t}; type crate_ctxt_ = {// A mapping from method call sites to traits that have @@ -188,7 +182,6 @@ type crate_ctxt_ = {// A mapping from method call sites to traits that have method_map: method_map, vtable_map: vtable_map, coherence_info: @coherence::CoherenceInfo, - provided_methods_map: provided_methods_map, tcx: ty::ctxt}; enum crate_ctxt { @@ -340,7 +333,6 @@ fn check_crate(tcx: ty::ctxt, method_map: std::map::HashMap(), vtable_map: std::map::HashMap(), coherence_info: @coherence::CoherenceInfo(), - provided_methods_map: std::map::HashMap(), tcx: tcx}); collect::collect_item_types(ccx, crate); coherence::check_coherence(ccx, crate); diff --git a/src/rustc/middle/typeck/astconv.rs b/src/rustc/middle/typeck/astconv.rs index 389c1adb016bc..da7a71b3efbf7 100644 --- a/src/rustc/middle/typeck/astconv.rs +++ b/src/rustc/middle/typeck/astconv.rs @@ -58,7 +58,7 @@ trait ast_conv { fn get_region_reporting_err(tcx: ty::ctxt, span: span, - res: Result) -> ty::region { + res: Result) -> ty::Region { match res { result::Ok(r) => r, @@ -70,7 +70,7 @@ fn get_region_reporting_err(tcx: ty::ctxt, } fn ast_region_to_region( - self: AC, rscope: RS, span: span, a_r: @ast::region) -> ty::region { + self: AC, rscope: RS, span: span, a_r: @ast::region) -> ty::Region { let res = match a_r.node { ast::re_static => Ok(ty::re_static), @@ -148,14 +148,14 @@ fn ast_path_to_ty( return {substs: substs, ty: ty}; } -const NO_REGIONS: uint = 1u; -const NO_TPS: uint = 2u; +const NO_REGIONS: uint = 1; +const NO_TPS: uint = 2; // Parses the programmer's textual representation of a type into our // internal notion of a type. `getter` is a function that returns the type // corresponding to a definition ID: fn ast_ty_to_ty( - self: AC, rscope: RS, &&ast_ty: @ast::ty) -> ty::t { + self: AC, rscope: RS, &&ast_ty: @ast::Ty) -> ty::t { fn ast_mt_to_mt( self: AC, rscope: RS, mt: ast::mt) -> ty::mt { diff --git a/src/rustc/middle/typeck/check.rs b/src/rustc/middle/typeck/check.rs index 2bf124526ac8e..5f9e584fa7b99 100644 --- a/src/rustc/middle/typeck/check.rs +++ b/src/rustc/middle/typeck/check.rs @@ -166,20 +166,20 @@ fn blank_fn_ctxt(ccx: @crate_ctxt, rty: ty::t, } // a list of mapping from in-scope-region-names ("isr") to the -// corresponding ty::region -type isr_alist = @List<(ty::bound_region, ty::region)>; +// corresponding ty::Region +type isr_alist = @List<(ty::bound_region, ty::Region)>; trait get_and_find_region { - fn get(br: ty::bound_region) -> ty::region; - fn find(br: ty::bound_region) -> Option; + fn get(br: ty::bound_region) -> ty::Region; + fn find(br: ty::bound_region) -> Option; } impl isr_alist: get_and_find_region { - fn get(br: ty::bound_region) -> ty::region { + fn get(br: ty::bound_region) -> ty::Region { self.find(br).get() } - fn find(br: ty::bound_region) -> Option { + fn find(br: ty::bound_region) -> Option { for list::each(self) |isr| { let (isr_br, isr_r) = *isr; if isr_br == br { return Some(isr_r); } @@ -563,7 +563,7 @@ impl @fn_ctxt: ast_conv { impl @fn_ctxt { fn search_in_scope_regions(br: ty::bound_region) - -> Result + -> Result { match self.in_scope_regions.find(br) { Some(r) => result::Ok(r), @@ -581,13 +581,13 @@ impl @fn_ctxt { } impl @fn_ctxt: region_scope { - fn anon_region(span: span) -> Result { + fn anon_region(span: span) -> Result { result::Ok(self.infcx().next_region_var_nb(span)) } - fn self_region(_span: span) -> Result { + fn self_region(_span: span) -> Result { self.search_in_scope_regions(ty::br_self) } - fn named_region(_span: span, id: ast::ident) -> Result { + fn named_region(_span: span, id: ast::ident) -> Result { self.search_in_scope_regions(ty::br_named(id)) } } @@ -600,7 +600,7 @@ impl @fn_ctxt { pprust::expr_to_str(expr, self.tcx().sess.intr())) } - fn block_region() -> ty::region { + fn block_region() -> ty::Region { ty::re_scope(self.region_lb) } @@ -645,7 +645,7 @@ impl @fn_ctxt { self.write_ty(node_id, ty::mk_bot(self.tcx())); } - fn to_ty(ast_t: @ast::ty) -> ty::t { + fn to_ty(ast_t: @ast::Ty) -> ty::t { ast_ty_to_ty(self, self, ast_t) } @@ -736,7 +736,7 @@ impl @fn_ctxt { } fn mk_subr(a_is_expected: bool, span: span, - sub: ty::region, sup: ty::region) -> Result<(), ty::type_err> { + sub: ty::Region, sup: ty::Region) -> Result<(), ty::type_err> { infer::mk_subr(self.infcx(), a_is_expected, span, sub, sup) } @@ -760,8 +760,8 @@ impl @fn_ctxt { fn region_var_if_parameterized(rp: Option, span: span, - lower_bound: ty::region) - -> Option + lower_bound: ty::Region) + -> Option { rp.map( |_rp| self.infcx().next_region_var_with_lb(span, lower_bound)) @@ -1019,7 +1019,7 @@ fn check_expr_with_unifier(fcx: @fn_ctxt, // functions. This is so that we have more information about the types // of arguments when we typecheck the functions. This isn't really the // right way to do this. - for [false, true]/_.each |check_blocks| { + for [false, true].each |check_blocks| { let check_blocks = *check_blocks; debug!("check_blocks=%b", check_blocks); @@ -1359,7 +1359,7 @@ fn check_expr_with_unifier(fcx: @fn_ctxt, // Check field access expressions fn check_field(fcx: @fn_ctxt, expr: @ast::expr, is_callee: bool, - base: @ast::expr, field: ast::ident, tys: ~[@ast::ty]) + base: @ast::expr, field: ast::ident, tys: ~[@ast::Ty]) -> bool { let tcx = fcx.ccx.tcx; @@ -2219,9 +2219,14 @@ fn check_block(fcx0: @fn_ctxt, blk: ast::blk) -> bool { fn check_const(ccx: @crate_ctxt, _sp: span, e: @ast::expr, id: ast::node_id) { let rty = ty::node_id_to_type(ccx.tcx, id); let fcx = blank_fn_ctxt(ccx, rty, e.id); + let declty = fcx.ccx.tcx.tcache.get(local_def(id)).ty; + check_const_with_ty(fcx, _sp, e, declty); +} + +fn check_const_with_ty(fcx: @fn_ctxt, _sp: span, e: @ast::expr, + declty: ty::t) { check_expr(fcx, e, None); let cty = fcx.expr_ty(e); - let declty = fcx.ccx.tcx.tcache.get(local_def(id)).ty; demand::suptype(fcx, e.span, declty, cty); regionck::regionck_expr(fcx, e); writeback::resolve_type_vars_in_expr(fcx, e); @@ -2259,27 +2264,31 @@ fn check_enum_variants(ccx: @crate_ctxt, variants: &mut ~[ty::variant_info]) { let rty = ty::node_id_to_type(ccx.tcx, id); for vs.each |v| { - match v.node.disr_expr { - Some(e) => { - let fcx = blank_fn_ctxt(ccx, rty, e.id); - check_expr(fcx, e, None); - let cty = fcx.expr_ty(e); + do v.node.disr_expr.iter |e_ref| { + let e = *e_ref; + debug!("disr expr, checking %s", + expr_to_str(e, ccx.tcx.sess.intr())); let declty = ty::mk_int(ccx.tcx); - demand::suptype(fcx, e.span, declty, cty); + let fcx = blank_fn_ctxt(ccx, rty, e.id); + check_const_with_ty(fcx, e.span, e, declty); // check_expr (from check_const pass) doesn't guarantee // that the expression is in an form that eval_const_expr can // handle, so we may still get an internal compiler error - match const_eval::eval_const_expr(ccx.tcx, e) { - const_eval::const_int(val) => { + + match const_eval::eval_const_expr_partial(ccx.tcx, e) { + Ok(const_eval::const_int(val)) => { *disr_val = val as int; } - _ => { + Ok(_) => { ccx.tcx.sess.span_err(e.span, ~"expected signed integer \ constant"); } + Err(err) => { + ccx.tcx.sess.span_err(e.span, + #fmt("expected constant: %s", err)); + + } } - } - _ => () } if vec::contains(*disr_vals, &*disr_val) { ccx.tcx.sess.span_err(v.span, @@ -2434,7 +2443,7 @@ fn instantiate_path(fcx: @fn_ctxt, tpt: ty_param_bounds_and_ty, span: span, node_id: ast::node_id, - region_lb: ty::region) { + region_lb: ty::Region) { let ty_param_count = vec::len(*tpt.bounds); let ty_substs_len = vec::len(pth.types); diff --git a/src/rustc/middle/typeck/check/alt.rs b/src/rustc/middle/typeck/check/alt.rs index 24bcc2281fb9a..caace6051982e 100644 --- a/src/rustc/middle/typeck/check/alt.rs +++ b/src/rustc/middle/typeck/check/alt.rs @@ -112,8 +112,8 @@ fn check_legality_of_move_bindings(fcx: @fn_ctxt, type pat_ctxt = { fcx: @fn_ctxt, map: PatIdMap, - alt_region: ty::region, // Region for the alt as a whole - block_region: ty::region, // Region for the block of the arm + alt_region: ty::Region, // Region for the alt as a whole + block_region: ty::Region, // Region for the block of the arm }; fn check_pat_variant(pcx: pat_ctxt, pat: @ast::pat, path: @ast::path, diff --git a/src/rustc/middle/typeck/check/method.rs b/src/rustc/middle/typeck/check/method.rs index 0d71d61bdaadb..eaf1a45afa9c0 100644 --- a/src/rustc/middle/typeck/check/method.rs +++ b/src/rustc/middle/typeck/check/method.rs @@ -69,7 +69,7 @@ obtained the type `Foo`, we would never match this method. */ -use coherence::get_base_type_def_id; +use coherence::{ProvidedMethodInfo, get_base_type_def_id}; use middle::resolve::{Impl, MethodInfo}; use middle::ty::*; use syntax::ast::{def_id, sty_by_ref, sty_value, sty_region, sty_box, @@ -146,7 +146,7 @@ impl LookupContext { // Prepare the list of candidates self.push_inherent_candidates(self_ty); - self.push_extension_candidates(); + self.push_extension_candidates(self_ty); let enum_dids = DVec(); let mut self_ty = self_ty; @@ -251,7 +251,7 @@ impl LookupContext { } } - fn push_extension_candidates(&self) { + fn push_extension_candidates(&self, self_ty: ty::t) { // If the method being called is associated with a trait, then // find all the impls of that trait. Each of those are // candidates. @@ -259,6 +259,8 @@ impl LookupContext { for opt_applicable_traits.each |applicable_traits| { for applicable_traits.each |trait_did| { let coherence_info = self.fcx.ccx.coherence_info; + + // Look for explicit implementations. let opt_impl_infos = coherence_info.extension_methods.find(*trait_did); for opt_impl_infos.each |impl_infos| { @@ -267,12 +269,21 @@ impl LookupContext { &self.extension_candidates, *impl_info); } } + + // Look for default methods. + match coherence_info.provided_methods.find(*trait_did) { + Some(methods) => { + self.push_candidates_from_provided_methods( + &self.extension_candidates, self_ty, *trait_did, + methods); + } + None => {} + } } } } - fn push_inherent_candidates_from_param(&self, param_ty: param_ty) - { + fn push_inherent_candidates_from_param(&self, param_ty: param_ty) { debug!("push_inherent_candidates_from_param(param_ty=%?)", param_ty); let _indenter = indenter(); @@ -348,8 +359,7 @@ impl LookupContext { self_ty: ty::t, did: def_id, substs: &ty::substs, - vstore: ty::vstore) - { + vstore: ty::vstore) { debug!("push_inherent_candidates_from_trait(did=%s, substs=%s)", self.did_to_str(did), substs_to_str(self.tcx(), substs)); @@ -423,8 +433,7 @@ impl LookupContext { }); } - fn push_inherent_impl_candidates_for_type(did: def_id) - { + fn push_inherent_impl_candidates_for_type(did: def_id) { let opt_impl_infos = self.fcx.ccx.coherence_info.inherent_methods.find(did); for opt_impl_infos.each |impl_infos| { @@ -436,8 +445,7 @@ impl LookupContext { } fn push_candidates_from_impl(&self, candidates: &DVec, - impl_info: &resolve::Impl) - { + impl_info: &resolve::Impl) { if !self.impl_dups.insert(impl_info.did, ()) { return; // already visited } @@ -471,12 +479,47 @@ impl LookupContext { }); } + fn push_candidates_from_provided_methods( + &self, + candidates: &DVec, + self_ty: ty::t, + trait_def_id: def_id, + methods: @DVec<@ProvidedMethodInfo>) { + debug!("(pushing candidates from provided methods) considering trait \ + id %d:%d", + trait_def_id.crate, + trait_def_id.node); + + for methods.each |provided_method_info| { + if provided_method_info.method_info.ident != self.m_name { loop; } + + debug!("(pushing candidates from provided methods) adding \ + candidate"); + + // XXX: Needs to support generics. + let dummy_substs = { self_r: None, self_ty: None, tps: ~[] }; + let (impl_ty, impl_substs) = + self.create_rcvr_ty_and_substs_for_method( + provided_method_info.method_info.self_type, + self_ty, + dummy_substs); + + candidates.push(Candidate { + rcvr_ty: impl_ty, + rcvr_substs: move impl_substs, + num_method_tps: provided_method_info.method_info.n_tps, + self_mode: get_mode_from_self_type( + provided_method_info.method_info.self_type), + origin: method_static(provided_method_info.method_info.did) + }); + } + } + fn create_rcvr_ty_and_substs_for_method(&self, self_decl: ast::self_ty_, self_ty: ty::t, +self_substs: ty::substs) - -> (ty::t, ty::substs) - { + -> (ty::t, ty::substs) { // If the self type includes a region (like &self), we need to // ensure that the receiver substitutions have a self region. // If the receiver type does not itself contain borrowed @@ -611,7 +654,7 @@ impl LookupContext { kind: AutoRefKind, autoderefs: uint, mutbls: &[ast::mutability], - mk_autoref_ty: &fn(ast::mutability, ty::region) -> ty::t) + mk_autoref_ty: &fn(ast::mutability, ty::Region) -> ty::t) -> Option { // This is hokey. We should have mutability inference as a @@ -693,8 +736,7 @@ impl LookupContext { fn confirm_candidate(&self, self_ty: ty::t, candidate: &Candidate) - -> method_map_entry - { + -> method_map_entry { let tcx = self.tcx(); let fty = self.fn_ty_from_origin(&candidate.origin); @@ -888,7 +930,7 @@ impl LookupContext { } fn transform_self_type_for_method(tcx: ty::ctxt, - self_region: Option, + self_region: Option, impl_ty: ty::t, self_type: ast::self_ty_) -> ty::t diff --git a/src/rustc/middle/typeck/check/regionck.rs b/src/rustc/middle/typeck/check/regionck.rs index 0b258da5672db..932cdd994da7d 100644 --- a/src/rustc/middle/typeck/check/regionck.rs +++ b/src/rustc/middle/typeck/check/regionck.rs @@ -32,7 +32,7 @@ use middle::ty::{vstore_uniq}; enum rcx { rcx_({fcx: @fn_ctxt, mut errors_reported: uint}) } type rvt = visit::vt<@rcx>; -fn encl_region_of_def(fcx: @fn_ctxt, def: ast::def) -> ty::region { +fn encl_region_of_def(fcx: @fn_ctxt, def: ast::def) -> ty::Region { let tcx = fcx.tcx(); match def { def_local(node_id, _) | def_arg(node_id, _) | def_self(node_id) | @@ -335,7 +335,7 @@ fn constrain_auto_ref( fn constrain_free_variables( rcx: @rcx, - region: ty::region, + region: ty::Region, expr: @ast::expr) { /*! @@ -373,7 +373,7 @@ fn constrain_free_variables( fn constrain_regions_in_type_of_node( rcx: @rcx, id: ast::node_id, - encl_region: ty::region, + encl_region: ty::Region, span: span) -> bool { let tcx = rcx.fcx.tcx(); @@ -395,7 +395,7 @@ fn constrain_regions_in_type_of_node( fn constrain_regions_in_type( rcx: @rcx, - encl_region: ty::region, + encl_region: ty::Region, span: span, ty: ty::t) -> bool { @@ -417,9 +417,9 @@ fn constrain_regions_in_type( return (e == rcx.errors_reported); fn constrain_region(rcx: @rcx, - encl_region: ty::region, + encl_region: ty::Region, span: span, - region: ty::region) { + region: ty::Region) { let tcx = rcx.fcx.ccx.tcx; debug!("constrain_region(encl_region=%?, region=%?)", diff --git a/src/rustc/middle/typeck/check/regionmanip.rs b/src/rustc/middle/typeck/check/regionmanip.rs index 4afb3ad78a635..806b234540cdc 100644 --- a/src/rustc/middle/typeck/check/regionmanip.rs +++ b/src/rustc/middle/typeck/check/regionmanip.rs @@ -10,7 +10,7 @@ fn replace_bound_regions_in_fn_ty( isr: isr_alist, self_info: Option, fn_ty: &ty::FnTy, - mapf: fn(ty::bound_region) -> ty::region) -> + mapf: fn(ty::bound_region) -> ty::Region) -> {isr: isr_alist, self_info: Option, fn_ty: ty::FnTy} { // Take self_info apart; the self_ty part is the only one we want @@ -83,7 +83,7 @@ fn replace_bound_regions_in_fn_ty( tcx: ty::ctxt, isr: isr_alist, tys: ~[ty::t], - to_r: fn(ty::bound_region) -> ty::region) -> isr_alist { + to_r: fn(ty::bound_region) -> ty::Region) -> isr_alist { // Takes `isr` (described above), `to_r` (described above), // and `r`, a region. If `r` is anything other than a bound @@ -93,8 +93,8 @@ fn replace_bound_regions_in_fn_ty( // updated isr_alist that now contains a mapping from `r` to // the result of calling `to_r` on it. fn append_isr(isr: isr_alist, - to_r: fn(ty::bound_region) -> ty::region, - r: ty::region) -> isr_alist { + to_r: fn(ty::bound_region) -> ty::Region, + r: ty::Region) -> isr_alist { match r { ty::re_free(_, _) | ty::re_static | ty::re_scope(_) | ty::re_var(_) => { diff --git a/src/rustc/middle/typeck/check/vtable.rs b/src/rustc/middle/typeck/check/vtable.rs index 00fb134f2be55..345b8246b4278 100644 --- a/src/rustc/middle/typeck/check/vtable.rs +++ b/src/rustc/middle/typeck/check/vtable.rs @@ -444,7 +444,7 @@ fn connect_trait_tps(fcx: @fn_ctxt, expr: @ast::expr, impl_tys: ~[ty::t], // XXX: This should work for multiple traits. let ity = ty::impl_traits(tcx, impl_did, vstore)[0]; - let trait_ty = ty::subst_tps(tcx, impl_tys, ity); + let trait_ty = ty::subst_tps(tcx, impl_tys, None, ity); debug!("(connect trait tps) trait type is %?, impl did is %?", ty::get(trait_ty).sty, impl_did); match ty::get(trait_ty).sty { diff --git a/src/rustc/middle/typeck/coherence.rs b/src/rustc/middle/typeck/coherence.rs index 89cd696eb6fbd..189e7377d9c30 100644 --- a/src/rustc/middle/typeck/coherence.rs +++ b/src/rustc/middle/typeck/coherence.rs @@ -4,12 +4,13 @@ // has at most one implementation for each type. Then we build a mapping from // each trait in the system to its implementations. -use metadata::csearch::{each_path, get_impl_traits, get_impls_for_mod}; -use metadata::cstore::{cstore, iter_crate_data}; +use metadata::csearch::{ProvidedTraitMethodInfo, each_path, get_impl_traits}; +use metadata::csearch::{get_impls_for_mod}; +use metadata::cstore::{CStore, iter_crate_data}; use metadata::decoder::{dl_def, dl_field, dl_impl}; use middle::resolve::{Impl, MethodInfo}; -use middle::ty::{get, lookup_item_type, subst, t, ty_box}; -use middle::ty::{ty_uniq, ty_ptr, ty_rptr, ty_enum}; +use middle::ty::{ProvidedMethodSource, get, lookup_item_type, subst, t}; +use middle::ty::{ty_box, ty_uniq, ty_ptr, ty_rptr, ty_enum}; use middle::ty::{ty_class, ty_nil, ty_bot, ty_bool, ty_int, ty_uint}; use middle::ty::{ty_float, ty_estr, ty_evec, ty_rec}; use middle::ty::{ty_fn, ty_trait, ty_tup, ty_infer}; @@ -17,7 +18,7 @@ use middle::ty::{ty_param, ty_self, ty_type, ty_opaque_box}; use middle::ty::{ty_opaque_closure_ptr, ty_unboxed_vec, type_is_ty_var}; use middle::typeck::infer::{infer_ctxt, can_mk_subty}; use middle::typeck::infer::{new_infer_ctxt, resolve_ivar, resolve_type}; -use syntax::ast::{crate, def_id, def_mod}; +use syntax::ast::{crate, def_id, def_mod, def_ty}; use syntax::ast::{item, item_class, item_const, item_enum, item_fn}; use syntax::ast::{item_foreign_mod, item_impl, item_mac, item_mod}; use syntax::ast::{item_trait, item_ty, local_crate, method, node_id}; @@ -118,6 +119,21 @@ fn method_to_MethodInfo(ast_method: @method) -> @MethodInfo { } } +// Stores the method info and definition ID of the associated trait method for +// each instantiation of each provided method. +struct ProvidedMethodInfo { + method_info: @MethodInfo, + trait_method_def_id: def_id +} + +// Stores information about provided methods (a.k.a. default methods) in +// implementations. +// +// This is a map from ID of each implementation to the method info and trait +// method ID of each of the default methods belonging to the trait that that +// implementation implements. +type ProvidedMethodsMap = HashMap>; + struct CoherenceInfo { // Contains implementations of methods that are inherent to a type. // Methods in these implementations don't need to be exported. @@ -128,14 +144,20 @@ struct CoherenceInfo { extension_methods: HashMap>, // A mapping from a supertrait to its subtraits. - supertrait_to_subtraits: HashMap> + supertrait_to_subtraits: HashMap>, + + // A mapping from an implementation ID to the method info and trait method + // ID of the provided (a.k.a. default) methods in the traits that that + // implementation implements. + provided_methods: ProvidedMethodsMap, } fn CoherenceInfo() -> CoherenceInfo { CoherenceInfo { inherent_methods: HashMap(), extension_methods: HashMap(), - supertrait_to_subtraits: HashMap() + supertrait_to_subtraits: HashMap(), + provided_methods: HashMap(), } } @@ -165,68 +187,6 @@ struct CoherenceChecker { } impl CoherenceChecker { - // Create a mapping containing a MethodInfo for every provided - // method in every trait. - fn build_provided_methods_map(crate: @crate) { - let sess = self.crate_context.tcx.sess; - - let pmm = self.crate_context.provided_methods_map; - - visit_crate(*crate, (), mk_simple_visitor(@{ - visit_item: |item| { - match item.node { - item_trait(_, _, trait_methods) => { - for trait_methods.each |trait_method| { - debug!("(building provided methods map) checking \ - trait `%s` with id %d", - sess.str_of(item.ident), item.id); - - match *trait_method { - required(_) => { /* fall through */} - provided(m) => { - // For every provided method in the - // trait, store a MethodInfo. - let mi = method_to_MethodInfo(m); - - match pmm.find(item.id) { - Some(mis) => { - // If the trait already has an - // entry in the - // provided_methods_map, we just - // need to add this method to - // that entry. - debug!("(building provided \ - methods map) adding \ - method `%s` to entry for \ - existing trait", - sess.str_of(mi.ident)); - let mut method_infos = mis; - method_infos.push(mi); - pmm.insert(item.id, method_infos); - } - None => { - // If the trait doesn't have an - // entry yet, create one. - debug!("(building provided \ - methods map) creating new \ - entry for method `%s`", - sess.str_of(mi.ident)); - pmm.insert(item.id, ~[mi]); - } - } - } - } - } - } - _ => { - // Nothing to do. - } - }; - }, - .. *default_simple_visitor() - })); - } - fn check_coherence(crate: @crate) { // Check implementations and traits. This populates the tables // containing the inherent methods and extension methods. It also @@ -307,6 +267,7 @@ impl CoherenceChecker { self.crate_context.tcx.sess.parse_sess.interner), self.crate_context.tcx.sess.str_of(item.ident)); + self.instantiate_default_methods(item.id, trait_did); let implementation = self.create_impl_from_item(item); self.add_trait_method(trait_did, implementation); } @@ -321,6 +282,7 @@ impl CoherenceChecker { // Nothing to do. } Some(base_type_def_id) => { + // XXX: Gather up default methods? let implementation = self.create_impl_from_item(item); self.add_inherent_method(base_type_def_id, implementation); @@ -330,6 +292,68 @@ impl CoherenceChecker { } } + // Creates default method IDs and performs type substitutions for an impl + // and trait pair. Then, for each provided method in the trait, inserts a + // `ProvidedMethodInfo` instance into the `provided_method_sources` map. + fn instantiate_default_methods(impl_id: ast::node_id, + trait_did: ast::def_id) { + for self.each_provided_trait_method(trait_did) |trait_method| { + // Synthesize an ID. + let tcx = self.crate_context.tcx; + let new_id = syntax::parse::next_node_id(tcx.sess.parse_sess); + let new_did = local_def(new_id); + + // XXX: Perform substitutions. + let new_polytype = ty::lookup_item_type(tcx, trait_method.def_id); + tcx.tcache.insert(new_did, new_polytype); + + // Pair the new synthesized ID up with the + // ID of the method. + let source = ProvidedMethodSource { + method_id: trait_method.def_id, + impl_id: local_def(impl_id) + }; + + self.crate_context.tcx.provided_method_sources.insert(new_did, + source); + + let provided_method_info = + @ProvidedMethodInfo { + method_info: @{ + did: new_did, + n_tps: trait_method.tps.len(), + ident: trait_method.ident, + self_type: trait_method.self_ty + }, + trait_method_def_id: trait_method.def_id + }; + + let pmm = self.crate_context.coherence_info.provided_methods; + match pmm.find(local_def(impl_id)) { + Some(mis) => { + // If the trait already has an entry in the + // provided_methods_map, we just need to add this + // method to that entry. + debug!("(checking implementation) adding method `%s` \ + to entry for existing trait", + self.crate_context.tcx.sess.str_of( + provided_method_info.method_info.ident)); + mis.push(provided_method_info); + } + None => { + // If the trait doesn't have an entry yet, create one. + debug!("(checking implementation) creating new entry \ + for method `%s`", + self.crate_context.tcx.sess.str_of( + provided_method_info.method_info.ident)); + let method_infos = @DVec(); + method_infos.push(provided_method_info); + pmm.insert(local_def(impl_id), method_infos); + } + } + } + } + fn register_inherited_trait(item: @item, supertraits: ~[@trait_ref]) { // XXX: This is wrong. We need to support substitutions; e.g. // trait Foo : Bar. @@ -354,8 +378,7 @@ impl CoherenceChecker { fn add_inherent_method(base_def_id: def_id, implementation: @Impl) { let implementation_list; match self.crate_context.coherence_info.inherent_methods - .find(base_def_id) { - + .find(base_def_id) { None => { implementation_list = @DVec(); self.crate_context.coherence_info.inherent_methods @@ -372,8 +395,7 @@ impl CoherenceChecker { fn add_trait_method(trait_id: def_id, implementation: @Impl) { let implementation_list; match self.crate_context.coherence_info.extension_methods - .find(trait_id) { - + .find(trait_id) { None => { implementation_list = @DVec(); self.crate_context.coherence_info.extension_methods @@ -413,6 +435,26 @@ impl CoherenceChecker { } } + fn each_provided_trait_method( + trait_did: ast::def_id, + f: &fn(x: &ty::method) -> bool) { + // Make a list of all the names of the provided methods. + // XXX: This is horrible. + let provided_method_idents = HashMap(); + let tcx = self.crate_context.tcx; + for ty::provided_trait_methods(tcx, trait_did).each |ident| { + provided_method_idents.insert(*ident, ()); + } + + for ty::trait_methods(tcx, trait_did).each |method| { + if provided_method_idents.contains_key(method.ident) { + if !f(method) { + break; + } + } + } + } + fn polytypes_unify(polytype_a: ty_param_bounds_and_ty, polytype_b: ty_param_bounds_and_ty) -> bool { @@ -449,7 +491,6 @@ impl CoherenceChecker { fn get_self_type_for_implementation(implementation: @Impl) -> ty_param_bounds_and_ty { - return self.crate_context.tcx.tcache.get(implementation.did); } @@ -552,33 +593,15 @@ impl CoherenceChecker { // Converts an implementation in the AST to an Impl structure. fn create_impl_from_item(item: @item) -> @Impl { - - fn add_provided_methods(inherent_methods: ~[@MethodInfo], - all_provided_methods: ~[@MethodInfo], - sess: driver::session::session) - -> ~[@MethodInfo] { - - let mut methods = inherent_methods; - - // If there's no inherent method with the same name as a - // provided method, add that provided method to `methods`. + fn add_provided_methods(all_methods: &mut ~[@MethodInfo], + all_provided_methods: ~[@ProvidedMethodInfo], + sess: driver::session::Session) { for all_provided_methods.each |provided_method| { - let mut method_inherent_to_impl = false; - for inherent_methods.each |inherent_method| { - if provided_method.ident == inherent_method.ident { - method_inherent_to_impl = true; - } - } - - if !method_inherent_to_impl { - debug!( - "(creating impl) adding provided method `%s` to impl", - sess.str_of(provided_method.ident)); - methods.push(*provided_method); - } + debug!( + "(creating impl) adding provided method `%s` to impl", + sess.str_of(provided_method.method_info.ident)); + vec::push(all_methods, provided_method.method_info); } - - return methods; } match item.node { @@ -598,24 +621,22 @@ impl CoherenceChecker { let trait_did = self.trait_ref_to_trait_def_id(*trait_ref); - match self.crate_context.provided_methods_map - .find(trait_did.node) { + match self.crate_context + .coherence_info + .provided_methods + .find(local_def(item.id)) { None => { debug!("(creating impl) trait with node_id `%d` \ has no provided methods", trait_did.node); /* fall through */ } - Some(all_provided) - => { + Some(all_provided) => { debug!("(creating impl) trait with node_id `%d` \ has provided methods", trait_did.node); - // Selectively add only those provided - // methods that aren't inherent to the - // trait. - - // XXX: could probably be doing this with filter. - methods = add_provided_methods( - methods, all_provided, + // Add all provided methods. + add_provided_methods( + &mut methods, + all_provided.get(), self.crate_context.tcx.sess); } } @@ -673,7 +694,7 @@ impl CoherenceChecker { // External crate handling fn add_impls_for_module(impls_seen: HashMap, - crate_store: cstore, + crate_store: CStore, module_def_id: def_id) { let implementations = get_impls_for_mod(crate_store, @@ -758,6 +779,41 @@ impl CoherenceChecker { } } + fn add_default_methods_for_external_trait(trait_def_id: ast::def_id) { + let tcx = self.crate_context.tcx; + let pmm = self.crate_context.coherence_info.provided_methods; + + if pmm.contains_key(trait_def_id) { return; } + + debug!("(adding default methods for trait) processing trait"); + + for csearch::get_provided_trait_methods(tcx, + trait_def_id).each |info| { + debug!("(adding default methods for trait) found default method"); + + // Create a new def ID for this provided method. + let parse_sess = &self.crate_context.tcx.sess.parse_sess; + let new_did = local_def(syntax::parse::next_node_id(*parse_sess)); + + let provided_method_info = + @ProvidedMethodInfo { + method_info: @{ + did: new_did, + n_tps: info.ty.tps.len(), + ident: info.ty.ident, + self_type: info.ty.self_ty + }, + trait_method_def_id: info.def_id + }; + + let method_infos = @DVec(); + method_infos.push(provided_method_info); + pmm.insert(trait_def_id, method_infos); + } + } + + // Adds implementations and traits from external crates to the coherence + // info. fn add_external_crates() { let impls_seen = HashMap(); @@ -768,20 +824,28 @@ impl CoherenceChecker { { crate: crate_number, node: 0 }); for each_path(crate_store, crate_number) |path_entry| { - let module_def_id; match path_entry.def_like { dl_def(def_mod(def_id)) => { - module_def_id = def_id; + self.add_impls_for_module(impls_seen, + crate_store, + def_id); + } + dl_def(def_ty(def_id)) => { + let tcx = self.crate_context.tcx; + let polytype = csearch::get_type(tcx, def_id); + match ty::get(polytype.ty).sty { + ty::ty_trait(*) => { + self.add_default_methods_for_external_trait( + def_id); + } + _ => {} + } } dl_def(_) | dl_impl(_) | dl_field => { // Skip this. loop; } } - - self.add_impls_for_module(impls_seen, - crate_store, - module_def_id); } } } @@ -789,7 +853,6 @@ impl CoherenceChecker { fn check_coherence(crate_context: @crate_ctxt, crate: @crate) { let coherence_checker = @CoherenceChecker(crate_context); - (*coherence_checker).build_provided_methods_map(crate); (*coherence_checker).check_coherence(crate); } diff --git a/src/rustc/middle/typeck/collect.rs b/src/rustc/middle/typeck/collect.rs index a38e69effdffc..a5390d8f293af 100644 --- a/src/rustc/middle/typeck/collect.rs +++ b/src/rustc/middle/typeck/collect.rs @@ -76,7 +76,7 @@ fn collect_item_types(ccx: @crate_ctxt, crate: @ast::crate) { impl @crate_ctxt { fn to_ty( - rs: RS, ast_ty: @ast::ty) -> ty::t { + rs: RS, ast_ty: @ast::Ty) -> ty::t { ast_ty_to_ty(self, rs, ast_ty) } @@ -212,9 +212,15 @@ fn ensure_trait_methods(ccx: @crate_ctxt, id: ast::node_id, trait_ty: ty::t) { match tcx.items.get(id) { ast_map::node_item(@{node: ast::item_trait(params, _, ms), _}, _) => { store_methods::(ccx, id, ms, |m| { + let def_id; + match *m { + ast::required(ty_method) => def_id = local_def(ty_method.id), + ast::provided(method) => def_id = local_def(method.id) + } + let trait_bounds = ty_param_bounds(ccx, params); let ty_m = trait_method_to_ty_method(*m); - let method_ty = ty_of_ty_method(ccx, ty_m, region_paramd); + let method_ty = ty_of_ty_method(ccx, ty_m, region_paramd, def_id); if ty_m.self_ty.node == ast::sty_static { make_static_method_ty(ccx, ty_m, region_paramd, method_ty, trait_ty, trait_bounds); @@ -339,7 +345,7 @@ fn compare_impl_method(tcx: ty::ctxt, sp: span, // Replaces bound references to the self region with `with_r`. fn replace_bound_self(tcx: ty::ctxt, ty: ty::t, - with_r: ty::region) -> ty::t { + with_r: ty::Region) -> ty::t { do ty::fold_regions(tcx, ty) |r, _in_fn| { if r == ty::re_bound(ty::br_self) {with_r} else {r} } @@ -373,7 +379,7 @@ fn check_methods_against_trait(ccx: @crate_ctxt, let provided_methods = ty::provided_trait_methods(tcx, did); match vec::find(provided_methods, |provided_method| - provided_method.ident == trait_m.ident) { + *provided_method == trait_m.ident) { Some(_) => { // If there's a provided method with the name we // want, then we're fine; nothing else to do. @@ -546,19 +552,22 @@ fn ty_of_method(ccx: @crate_ctxt, m.purity, @~[], m.decl, None, m.span), self_ty: m.self_ty.node, - vis: m.vis} + vis: m.vis, + def_id: local_def(m.id)} } fn ty_of_ty_method(self: @crate_ctxt, m: ast::ty_method, - rp: Option) -> ty::method { + rp: Option, + id: ast::def_id) -> ty::method { {ident: m.ident, tps: ty_param_bounds(self, m.tps), fty: ty_of_fn_decl(self, type_rscope(rp), ast::proto_bare, m.purity, @~[], m.decl, None, m.span), // assume public, because this is only invoked on trait methods self_ty: m.self_ty.node, - vis: ast::public} + vis: ast::public, + def_id: id} } /* diff --git a/src/rustc/middle/typeck/infer.rs b/src/rustc/middle/typeck/infer.rs index 96849bf918d4f..e0465b22c931f 100644 --- a/src/rustc/middle/typeck/infer.rs +++ b/src/rustc/middle/typeck/infer.rs @@ -258,7 +258,6 @@ use util::ppaux::{ty_to_str, mt_to_str}; use result::{Result, Ok, Err, map_vec, map_vec2, iter_vec2}; use ty::{mk_fn, type_is_bot}; use check::regionmanip::{replace_bound_regions_in_fn_ty}; -use driver::session::session; use util::common::{indent, indenter}; use ast::{unsafe_fn, impure_fn, pure_fn, extern_fn}; use ast::{m_const, m_imm, m_mutbl}; @@ -275,7 +274,7 @@ use unify::{vals_and_bindings, root}; use integral::{int_ty_set, int_ty_set_all}; use combine::{combine_fields, eq_tys}; use assignment::Assign; -use to_str::to_str; +use to_str::ToStr; use sub::Sub; use lub::Lub; @@ -385,7 +384,7 @@ fn can_mk_subty(cx: infer_ctxt, a: ty::t, b: ty::t) -> ures { } fn mk_subr(cx: infer_ctxt, a_is_expected: bool, span: span, - a: ty::region, b: ty::region) -> ures { + a: ty::Region, b: ty::Region) -> ures { debug!("mk_subr(%s <: %s)", a.to_str(cx), b.to_str(cx)); do indent { do cx.commit { @@ -431,8 +430,8 @@ fn resolve_type(cx: infer_ctxt, a: ty::t, modes: uint) resolver(cx, modes).resolve_type_chk(a) } -fn resolve_region(cx: infer_ctxt, r: ty::region, modes: uint) - -> fres { +fn resolve_region(cx: infer_ctxt, r: ty::Region, modes: uint) + -> fres { resolver(cx, modes).resolve_region_chk(r) } @@ -628,12 +627,12 @@ impl infer_ctxt { ty::mk_int_var(self.tcx, self.next_int_var_id()) } - fn next_region_var_nb(span: span) -> ty::region { + fn next_region_var_nb(span: span) -> ty::Region { ty::re_var(self.region_vars.new_region_var(span)) } fn next_region_var_with_lb(span: span, - lb_region: ty::region) -> ty::region { + lb_region: ty::Region) -> ty::Region { let region_var = self.next_region_var_nb(span); // add lb_region as a lower bound on the newly built variable @@ -644,7 +643,7 @@ impl infer_ctxt { return region_var; } - fn next_region_var(span: span, scope_id: ast::node_id) -> ty::region { + fn next_region_var(span: span, scope_id: ast::node_id) -> ty::Region { self.next_region_var_with_lb(span, ty::re_scope(scope_id)) } diff --git a/src/rustc/middle/typeck/infer/assignment.rs b/src/rustc/middle/typeck/infer/assignment.rs index 53731551df57c..a5af58904dd77 100644 --- a/src/rustc/middle/typeck/infer/assignment.rs +++ b/src/rustc/middle/typeck/infer/assignment.rs @@ -48,7 +48,7 @@ // A. But this upper-bound might be stricter than what is truly // needed. -use to_str::to_str; +use to_str::ToStr; use combine::combine_fields; fn to_ares(+c: cres) -> ares { @@ -190,7 +190,7 @@ priv impl Assign { a: ty::t, nr_b: ty::t, m: ast::mutability, - r_b: ty::region) -> ares { + r_b: ty::Region) -> ares { debug!("try_assign(a=%s, nr_b=%s, m=%?, r_b=%s)", a.to_str(self.infcx), diff --git a/src/rustc/middle/typeck/infer/combine.rs b/src/rustc/middle/typeck/infer/combine.rs index bdda45c1168ad..342a2ce2b76cd 100644 --- a/src/rustc/middle/typeck/infer/combine.rs +++ b/src/rustc/middle/typeck/infer/combine.rs @@ -44,7 +44,7 @@ // terms of error reporting, although we do not do that properly right // now. -use to_str::to_str; +use to_str::ToStr; use ty::{FnTyBase, FnMeta, FnSig}; trait combine { @@ -72,8 +72,8 @@ trait combine { fn protos(p1: ty::fn_proto, p2: ty::fn_proto) -> cres; fn ret_styles(r1: ret_style, r2: ret_style) -> cres; fn purities(a: purity, b: purity) -> cres; - fn contraregions(a: ty::region, b: ty::region) -> cres; - fn regions(a: ty::region, b: ty::region) -> cres; + fn contraregions(a: ty::Region, b: ty::Region) -> cres; + fn regions(a: ty::Region, b: ty::Region) -> cres; fn vstores(vk: ty::terr_vstore_kind, a: ty::vstore, b: ty::vstore) -> cres; } @@ -103,7 +103,7 @@ fn eq_tys(self: &C, a: ty::t, b: ty::t) -> ures { } } -fn eq_regions(self: &C, a: ty::region, b: ty::region) -> ures { +fn eq_regions(self: &C, a: ty::Region, b: ty::Region) -> ures { debug!("eq_regions(%s, %s)", a.to_str(self.infcx()), b.to_str(self.infcx())); @@ -127,8 +127,8 @@ fn eq_regions(self: &C, a: ty::region, b: ty::region) -> ures { fn eq_opt_regions( self: &C, - a: Option, - b: Option) -> cres> { + a: Option, + b: Option) -> cres> { match (a, b) { (None, None) => { @@ -160,9 +160,9 @@ fn super_substs( fn relate_region_param( self: &C, did: ast::def_id, - a: Option, - b: Option) - -> cres> + a: Option, + b: Option) + -> cres> { let polyty = ty::lookup_item_type(self.infcx().tcx, did); match (polyty.region_param, a, b) { diff --git a/src/rustc/middle/typeck/infer/glb.rs b/src/rustc/middle/typeck/infer/glb.rs index a8676a63b8895..77e753fa2204f 100644 --- a/src/rustc/middle/typeck/infer/glb.rs +++ b/src/rustc/middle/typeck/infer/glb.rs @@ -1,6 +1,6 @@ use combine::*; use lattice::*; -use to_str::to_str; +use to_str::ToStr; enum Glb = combine_fields; // "greatest lower bound" (common subtype) @@ -109,7 +109,7 @@ impl Glb: combine { } } - fn regions(a: ty::region, b: ty::region) -> cres { + fn regions(a: ty::Region, b: ty::Region) -> cres { debug!("%s.regions(%?, %?)", self.tag(), a.to_str(self.infcx), @@ -120,7 +120,7 @@ impl Glb: combine { } } - fn contraregions(a: ty::region, b: ty::region) -> cres { + fn contraregions(a: ty::Region, b: ty::Region) -> cres { Lub(*self).regions(a, b) } diff --git a/src/rustc/middle/typeck/infer/integral.rs b/src/rustc/middle/typeck/infer/integral.rs index 168709596dc43..1b23cb52b20c8 100644 --- a/src/rustc/middle/typeck/infer/integral.rs +++ b/src/rustc/middle/typeck/infer/integral.rs @@ -4,7 +4,7 @@ Code related to integral type inference. */ -use to_str::to_str; +use to_str::ToStr; // Bitvector to represent sets of integral types enum int_ty_set = uint; diff --git a/src/rustc/middle/typeck/infer/lattice.rs b/src/rustc/middle/typeck/infer/lattice.rs index 04133cab9d76c..699613e8ae655 100644 --- a/src/rustc/middle/typeck/infer/lattice.rs +++ b/src/rustc/middle/typeck/infer/lattice.rs @@ -1,6 +1,6 @@ use combine::*; use unify::*; -use to_str::to_str; +use to_str::ToStr; // ______________________________________________________________________ // Lattice operations on variables diff --git a/src/rustc/middle/typeck/infer/lub.rs b/src/rustc/middle/typeck/infer/lub.rs index 093da5caec872..dcff863a126f0 100644 --- a/src/rustc/middle/typeck/infer/lub.rs +++ b/src/rustc/middle/typeck/infer/lub.rs @@ -1,6 +1,6 @@ use combine::*; use lattice::*; -use to_str::to_str; +use to_str::ToStr; enum Lub = combine_fields; // "subtype", "subregion" etc @@ -88,11 +88,11 @@ impl Lub: combine { } } - fn contraregions(a: ty::region, b: ty::region) -> cres { + fn contraregions(a: ty::Region, b: ty::Region) -> cres { return Glb(*self).regions(a, b); } - fn regions(a: ty::region, b: ty::region) -> cres { + fn regions(a: ty::Region, b: ty::Region) -> cres { debug!("%s.regions(%?, %?)", self.tag(), a.to_str(self.infcx), diff --git a/src/rustc/middle/typeck/infer/region_var_bindings.rs b/src/rustc/middle/typeck/infer/region_var_bindings.rs index 8bbdab74d230e..86a872341f561 100644 --- a/src/rustc/middle/typeck/infer/region_var_bindings.rs +++ b/src/rustc/middle/typeck/infer/region_var_bindings.rs @@ -312,10 +312,10 @@ use std::map::HashMap; use std::cell::{Cell, empty_cell}; use std::list::{List, Nil, Cons}; -use ty::{region, RegionVid}; use region::is_subregion_of; +use ty::{Region, RegionVid}; use syntax::codemap; -use to_str::to_str; +use to_str::ToStr; use util::ppaux::note_and_explain_region; export RegionVarBindings; @@ -325,8 +325,8 @@ export glb_regions; enum Constraint { ConstrainVarSubVar(RegionVid, RegionVid), - ConstrainRegSubVar(region, RegionVid), - ConstrainVarSubReg(RegionVid, region) + ConstrainRegSubVar(Region, RegionVid), + ConstrainVarSubReg(RegionVid, Region) } impl Constraint : cmp::Eq { @@ -365,8 +365,8 @@ impl Constraint : to_bytes::IterBytes { } struct TwoRegions { - a: region, - b: region, + a: Region, + b: Region, } impl TwoRegions : cmp::Eq { @@ -394,7 +394,7 @@ type CombineMap = HashMap; struct RegionVarBindings { tcx: ty::ctxt, var_spans: DVec, - values: Cell<~[ty::region]>, + values: Cell<~[ty::Region]>, constraints: HashMap, lubs: CombineMap, glbs: CombineMap, @@ -501,7 +501,7 @@ impl RegionVarBindings { } } - fn make_subregion(span: span, sub: region, sup: region) -> cres<()> { + fn make_subregion(span: span, sub: Region, sup: Region) -> cres<()> { // cannot add constraints once regions are resolved assert self.values.is_empty(); @@ -529,7 +529,7 @@ impl RegionVarBindings { } } - fn lub_regions(span: span, a: region, b: region) -> cres { + fn lub_regions(span: span, a: Region, b: Region) -> cres { // cannot add constraints once regions are resolved assert self.values.is_empty(); @@ -551,7 +551,7 @@ impl RegionVarBindings { } } - fn glb_regions(span: span, a: region, b: region) -> cres { + fn glb_regions(span: span, a: Region, b: Region) -> cres { // cannot add constraints once regions are resolved assert self.values.is_empty(); @@ -574,7 +574,7 @@ impl RegionVarBindings { } } - fn resolve_var(rid: RegionVid) -> ty::region { + fn resolve_var(rid: RegionVid) -> ty::Region { debug!("RegionVarBindings: resolve_var(%?=%u)", rid, *rid); if self.values.is_empty() { self.tcx.sess.span_bug( @@ -586,9 +586,9 @@ impl RegionVarBindings { self.values.with_ref(|values| values[*rid]) } - fn combine_vars(combines: CombineMap, a: region, b: region, span: span, - relate: fn(old_r: region, new_r: region) -> cres<()>) - -> cres { + fn combine_vars(combines: CombineMap, a: Region, b: Region, span: span, + relate: fn(old_r: Region, new_r: Region) -> cres<()>) + -> cres { let vars = TwoRegions { a: a, b: b }; match combines.find(vars) { @@ -623,11 +623,11 @@ impl RegionVarBindings { } priv impl RegionVarBindings { - fn is_subregion_of(sub: region, sup: region) -> bool { + fn is_subregion_of(sub: Region, sup: Region) -> bool { is_subregion_of(self.tcx.region_map, sub, sup) } - fn lub_concrete_regions(+a: region, +b: region) -> region { + fn lub_concrete_regions(+a: Region, +b: Region) -> Region { match (a, b) { (ty::re_static, _) | (_, ty::re_static) => { ty::re_static // nothing lives longer than static @@ -682,7 +682,7 @@ priv impl RegionVarBindings { } } - fn glb_concrete_regions(+a: region, +b: region) -> cres { + fn glb_concrete_regions(+a: Region, +b: Region) -> cres { match (a, b) { (ty::re_static, r) | (r, ty::re_static) => { // static lives longer than everything else @@ -771,7 +771,7 @@ impl Classification : cmp::Eq { pure fn ne(other: &Classification) -> bool { !self.eq(other) } } -enum GraphNodeValue { NoValue, Value(region), ErrorValue } +enum GraphNodeValue { NoValue, Value(Region), ErrorValue } struct GraphNode { span: span, @@ -792,7 +792,7 @@ struct Graph { } struct SpannedRegion { - region: region, + region: Region, span: span, } @@ -803,7 +803,7 @@ fn TwoRegionsMap() -> TwoRegionsMap { } impl RegionVarBindings { - fn infer_variable_values() -> ~[region] { + fn infer_variable_values() -> ~[Region] { let graph = self.construct_graph(); self.expansion(&graph); self.contraction(&graph); @@ -895,7 +895,7 @@ impl RegionVarBindings { } } - fn expand_node(a_region: region, + fn expand_node(a_region: Region, b_vid: RegionVid, b_node: &GraphNode) -> bool { debug!("expand_node(%?, %? == %?)", @@ -955,7 +955,7 @@ impl RegionVarBindings { fn contract_node(a_vid: RegionVid, a_node: &GraphNode, - b_region: region) -> bool { + b_region: Region) -> bool { debug!("contract_node(%? == %?/%?, %?)", a_vid, a_node.value, a_node.classification, b_region); @@ -985,8 +985,8 @@ impl RegionVarBindings { fn check_node(self: &RegionVarBindings, a_vid: RegionVid, a_node: &GraphNode, - a_region: region, - b_region: region) -> bool { + a_region: Region, + b_region: Region) -> bool { if !self.is_subregion_of(a_region, b_region) { debug!("Setting %? to ErrorValue: %? not subregion of %?", a_vid, a_region, b_region); @@ -998,8 +998,8 @@ impl RegionVarBindings { fn adjust_node(self: &RegionVarBindings, a_vid: RegionVid, a_node: &GraphNode, - a_region: region, - b_region: region) -> bool { + a_region: Region, + b_region: Region) -> bool { match self.glb_concrete_regions(a_region, b_region) { Ok(glb) => { if glb == a_region { @@ -1040,7 +1040,7 @@ impl RegionVarBindings { debug!("---- %s Complete after %u iteration(s)", tag, iteration); } - fn extract_regions_and_report_errors(graph: &Graph) -> ~[region] { + fn extract_regions_and_report_errors(graph: &Graph) -> ~[Region] { let dup_map = TwoRegionsMap(); graph.nodes.mapi(|idx, node| { match node.value { @@ -1073,8 +1073,8 @@ impl RegionVarBindings { // Used to suppress reporting the same basic error over and over fn is_reported(dup_map: TwoRegionsMap, - r_a: region, - r_b: region) -> bool { + r_a: Region, + r_b: Region) -> bool { let key = TwoRegions { a: r_a, b: r_b }; !dup_map.insert(key, ()) } diff --git a/src/rustc/middle/typeck/infer/resolve.rs b/src/rustc/middle/typeck/infer/resolve.rs index 2a851a5f7bb26..5a55fbf9a5dfc 100644 --- a/src/rustc/middle/typeck/infer/resolve.rs +++ b/src/rustc/middle/typeck/infer/resolve.rs @@ -35,7 +35,7 @@ // probably better off writing `resolve_all - resolve_ivar`. use integral::*; -use to_str::to_str; +use to_str::ToStr; const resolve_nested_tvar: uint = 0b00000001; const resolve_rvar: uint = 0b00000010; @@ -98,7 +98,7 @@ impl resolve_state { } } - fn resolve_region_chk(orig: ty::region) -> fres { + fn resolve_region_chk(orig: ty::Region) -> fres { self.err = None; let resolved = indent(|| self.resolve_region(orig) ); match self.err { @@ -145,7 +145,7 @@ impl resolve_state { } } - fn resolve_region(orig: ty::region) -> ty::region { + fn resolve_region(orig: ty::Region) -> ty::Region { debug!("Resolve_region(%s)", orig.to_str(self.infcx)); match orig { ty::re_var(rid) => self.resolve_region_var(rid), @@ -153,14 +153,14 @@ impl resolve_state { } } - fn resolve_region_var(rid: RegionVid) -> ty::region { + fn resolve_region_var(rid: RegionVid) -> ty::Region { if !self.should(resolve_rvar) { return ty::re_var(rid) } self.infcx.region_vars.resolve_var(rid) } - fn assert_not_rvar(rid: RegionVid, r: ty::region) { + fn assert_not_rvar(rid: RegionVid, r: ty::Region) { match r { ty::re_var(rid2) => { self.err = Some(region_var_bound_by_region_var(rid, rid2)); diff --git a/src/rustc/middle/typeck/infer/sub.rs b/src/rustc/middle/typeck/infer/sub.rs index e6bcdf3e71ff1..0aba993512bb4 100644 --- a/src/rustc/middle/typeck/infer/sub.rs +++ b/src/rustc/middle/typeck/infer/sub.rs @@ -1,6 +1,6 @@ use combine::*; use unify::*; -use to_str::to_str; +use to_str::ToStr; enum Sub = combine_fields; // "subtype", "subregion" etc @@ -20,14 +20,14 @@ impl Sub: combine { Sub(opp).tys(b, a) } - fn contraregions(a: ty::region, b: ty::region) -> cres { + fn contraregions(a: ty::Region, b: ty::Region) -> cres { let opp = combine_fields { a_is_expected: !self.a_is_expected,.. *self }; Sub(opp).regions(b, a) } - fn regions(a: ty::region, b: ty::region) -> cres { + fn regions(a: ty::Region, b: ty::Region) -> cres { debug!("%s.regions(%s, %s)", self.tag(), a.to_str(self.infcx), diff --git a/src/rustc/middle/typeck/infer/to_str.rs b/src/rustc/middle/typeck/infer/to_str.rs index 7acfdcac424de..c98a217a7464e 100644 --- a/src/rustc/middle/typeck/infer/to_str.rs +++ b/src/rustc/middle/typeck/infer/to_str.rs @@ -1,29 +1,29 @@ use integral::{int_ty_set}; use unify::{var_value, redirect, root}; -trait to_str { +trait ToStr { fn to_str(cx: infer_ctxt) -> ~str; } -impl ty::t: to_str { +impl ty::t: ToStr { fn to_str(cx: infer_ctxt) -> ~str { ty_to_str(cx.tcx, self) } } -impl ty::mt: to_str { +impl ty::mt: ToStr { fn to_str(cx: infer_ctxt) -> ~str { mt_to_str(cx.tcx, self) } } -impl ty::region: to_str { +impl ty::Region: ToStr { fn to_str(cx: infer_ctxt) -> ~str { util::ppaux::region_to_str(cx.tcx, self) } } -impl bound: to_str { +impl bound: ToStr { fn to_str(cx: infer_ctxt) -> ~str { match self { Some(v) => v.to_str(cx), @@ -32,7 +32,7 @@ impl bound: to_str { } } -impl bounds: to_str { +impl bounds: ToStr { fn to_str(cx: infer_ctxt) -> ~str { fmt!("{%s <: %s}", self.lb.to_str(cx), @@ -40,7 +40,7 @@ impl bounds: to_str { } } -impl int_ty_set: to_str { +impl int_ty_set: ToStr { fn to_str(_cx: infer_ctxt) -> ~str { match self { int_ty_set(v) => uint::to_str(v, 10u) @@ -48,7 +48,7 @@ impl int_ty_set: to_str { } } -impl var_value: to_str { +impl var_value: ToStr { fn to_str(cx: infer_ctxt) -> ~str { match self { redirect(vid) => fmt!("redirect(%s)", vid.to_str()), diff --git a/src/rustc/middle/typeck/infer/unify.rs b/src/rustc/middle/typeck/infer/unify.rs index 7ccbaa40ada19..f865705563c60 100644 --- a/src/rustc/middle/typeck/infer/unify.rs +++ b/src/rustc/middle/typeck/infer/unify.rs @@ -1,6 +1,6 @@ use combine::combine; use integral::*; -use to_str::to_str; +use to_str::ToStr; use std::smallintmap::SmallIntMap; enum var_value { @@ -46,7 +46,7 @@ impl infer_ctxt { } } - fn set( + fn set( vb: &vals_and_bindings, vid: V, +new_v: var_value) { diff --git a/src/rustc/middle/typeck/rscope.rs b/src/rustc/middle/typeck/rscope.rs index 9b9695088f379..d379607d6a81f 100644 --- a/src/rustc/middle/typeck/rscope.rs +++ b/src/rustc/middle/typeck/rscope.rs @@ -2,21 +2,21 @@ use result::Result; use syntax::parse::token::special_idents; trait region_scope { - fn anon_region(span: span) -> Result; - fn self_region(span: span) -> Result; - fn named_region(span: span, id: ast::ident) -> Result; + fn anon_region(span: span) -> Result; + fn self_region(span: span) -> Result; + fn named_region(span: span, id: ast::ident) -> Result; } enum empty_rscope { empty_rscope } impl empty_rscope: region_scope { - fn anon_region(_span: span) -> Result { + fn anon_region(_span: span) -> Result { result::Ok(ty::re_static) } - fn self_region(_span: span) -> Result { + fn self_region(_span: span) -> Result { result::Err(~"only the static region is allowed here") } fn named_region(_span: span, _id: ast::ident) - -> Result + -> Result { result::Err(~"only the static region is allowed here") } @@ -24,17 +24,17 @@ impl empty_rscope: region_scope { enum type_rscope = Option; impl type_rscope: region_scope { - fn anon_region(_span: span) -> Result { + fn anon_region(_span: span) -> Result { match *self { Some(_) => result::Ok(ty::re_bound(ty::br_self)), None => result::Err(~"to use region types here, the containing \ type must be declared with a region bound") } } - fn self_region(span: span) -> Result { + fn self_region(span: span) -> Result { self.anon_region(span) } - fn named_region(span: span, id: ast::ident) -> Result { + fn named_region(span: span, id: ast::ident) -> Result { do empty_rscope.named_region(span, id).chain_err |_e| { result::Err(~"named regions other than `self` are not \ allowed as part of a type declaration") @@ -42,26 +42,26 @@ impl type_rscope: region_scope { } } -fn bound_self_region(rp: Option) -> Option { +fn bound_self_region(rp: Option) -> Option { match rp { Some(_) => Some(ty::re_bound(ty::br_self)), None => None } } -enum anon_rscope = {anon: ty::region, base: region_scope}; -fn in_anon_rscope(self: RS, r: ty::region) +enum anon_rscope = {anon: ty::Region, base: region_scope}; +fn in_anon_rscope(self: RS, r: ty::Region) -> @anon_rscope { @anon_rscope({anon: r, base: self as region_scope}) } impl @anon_rscope: region_scope { - fn anon_region(_span: span) -> Result { + fn anon_region(_span: span) -> Result { result::Ok(self.anon) } - fn self_region(span: span) -> Result { + fn self_region(span: span) -> Result { self.base.self_region(span) } - fn named_region(span: span, id: ast::ident) -> Result { + fn named_region(span: span, id: ast::ident) -> Result { self.base.named_region(span, id) } } @@ -76,15 +76,15 @@ fn in_binding_rscope(self: RS) @binding_rscope { base: base, anon_bindings: 0 } } impl @binding_rscope: region_scope { - fn anon_region(_span: span) -> Result { + fn anon_region(_span: span) -> Result { let idx = self.anon_bindings; self.anon_bindings += 1; result::Ok(ty::re_bound(ty::br_anon(idx))) } - fn self_region(span: span) -> Result { + fn self_region(span: span) -> Result { self.base.self_region(span) } - fn named_region(span: span, id: ast::ident) -> Result { + fn named_region(span: span, id: ast::ident) -> Result { do self.base.named_region(span, id).chain_err |_e| { result::Ok(ty::re_bound(ty::br_named(id))) } diff --git a/src/rustc/util/common.rs b/src/rustc/util/common.rs index e314a12a6765b..123905adba11b 100644 --- a/src/rustc/util/common.rs +++ b/src/rustc/util/common.rs @@ -1,6 +1,5 @@ use std::map::HashMap; use syntax::ast; -use ast::{ty, pat}; use syntax::codemap::{span}; use syntax::visit; use syntax::print; @@ -35,9 +34,7 @@ type flag = HashMap<~str, ()>; fn field_expr(f: ast::field) -> @ast::expr { return f.node.expr; } fn field_exprs(fields: ~[ast::field]) -> ~[@ast::expr] { - let mut es = ~[]; - for fields.each |f| { es.push(f.node.expr); } - return es; + fields.map(|f| f.node.expr) } // Takes a predicate p, returns true iff p is true for any subexpressions diff --git a/src/rustc/util/ppaux.rs b/src/rustc/util/ppaux.rs index 3f8ca0f6e6a64..8207082cf20e6 100644 --- a/src/rustc/util/ppaux.rs +++ b/src/rustc/util/ppaux.rs @@ -6,7 +6,7 @@ use middle::ty::{bound_copy, bound_const, bound_owned, bound_send, use middle::ty::{bound_region, br_anon, br_named, br_self, br_cap_avoid}; use middle::ty::{ck_block, ck_box, ck_uniq, ctxt, field, method}; use middle::ty::{mt, t, param_bound}; -use middle::ty::{re_bound, re_free, re_scope, re_var, re_static, region}; +use middle::ty::{re_bound, re_free, re_scope, re_var, re_static, Region}; use middle::ty::{ty_bool, ty_bot, ty_box, ty_class, ty_enum}; use middle::ty::{ty_estr, ty_evec, ty_float, ty_fn, ty_trait, ty_int}; use middle::ty::{ty_nil, ty_opaque_box, ty_opaque_closure_ptr, ty_param}; @@ -21,11 +21,10 @@ use syntax::print::pprust::{path_to_str, proto_to_str, mode_to_str, purity_to_str}; use syntax::{ast, ast_util}; use syntax::ast_map; -use driver::session::session; fn note_and_explain_region(cx: ctxt, prefix: ~str, - region: ty::region, + region: ty::Region, suffix: ~str) { match explain_region_and_span(cx, region) { (str, Some(span)) => { @@ -42,13 +41,13 @@ fn note_and_explain_region(cx: ctxt, /// Returns a string like "the block at 27:31" that attempts to explain a /// lifetime in a way it might plausibly be understood. -fn explain_region(cx: ctxt, region: ty::region) -> ~str { +fn explain_region(cx: ctxt, region: ty::Region) -> ~str { let (res, _) = explain_region_and_span(cx, region); return res; } -fn explain_region_and_span(cx: ctxt, region: ty::region) +fn explain_region_and_span(cx: ctxt, region: ty::Region) -> (~str, Option) { return match region { @@ -172,7 +171,7 @@ fn re_scope_id_to_str(cx: ctxt, node_id: ast::node_id) -> ~str { // In general, if you are giving a region error message, // you should use `explain_region()` or, better yet, // `note_and_explain_region()` -fn region_to_str(cx: ctxt, region: region) -> ~str { +fn region_to_str(cx: ctxt, region: Region) -> ~str { if cx.sess.verbose() { return fmt!("&%?", region); } @@ -283,8 +282,7 @@ fn ty_to_str(cx: ctxt, typ: t) -> ~str { _ => { } } s += ~"("; - let mut strs = ~[]; - for inputs.each |a| { strs.push(fn_input_to_str(cx, *a)); } + let strs = inputs.map(|a| fn_input_to_str(cx, *a)); s += str::connect(strs, ~", "); s += ~")"; if ty::get(output).sty != ty_nil { @@ -339,13 +337,11 @@ fn ty_to_str(cx: ctxt, typ: t) -> ~str { ty_unboxed_vec(tm) => { ~"unboxed_vec<" + mt_to_str(cx, tm) + ~">" } ty_type => ~"type", ty_rec(elems) => { - let mut strs: ~[~str] = ~[]; - for elems.each |fld| { strs.push(field_to_str(cx, *fld)); } + let strs = elems.map(|fld| field_to_str(cx, *fld)); ~"{" + str::connect(strs, ~",") + ~"}" } ty_tup(elems) => { - let mut strs = ~[]; - for elems.each |elem| { strs.push(ty_to_str(cx, *elem)); } + let strs = elems.map(|elem| ty_to_str(cx, *elem)); ~"(" + str::connect(strs, ~",") + ~")" } ty_fn(ref f) => { @@ -381,7 +377,7 @@ fn ty_to_str(cx: ctxt, typ: t) -> ~str { fn parameterized(cx: ctxt, base: ~str, - self_r: Option, + self_r: Option, tps: ~[ty::t]) -> ~str { let r_str = match self_r { diff --git a/src/rustdoc/astsrv.rs b/src/rustdoc/astsrv.rs index 27d4d51a010f3..7b2c6fe5f0cbc 100644 --- a/src/rustdoc/astsrv.rs +++ b/src/rustdoc/astsrv.rs @@ -10,7 +10,7 @@ non-sendableness. use std::map::HashMap; use rustc::driver::session; use session::{basic_options, options}; -use session::session; +use session::Session; use rustc::driver::driver; use syntax::diagnostic; use syntax::diagnostic::handler; @@ -35,7 +35,7 @@ type Ctxt = { type SrvOwner = fn(srv: Srv) -> T; type CtxtHandler = fn~(ctxt: Ctxt) -> T; -type Parser = fn~(session, ~str) -> @ast::crate; +type Parser = fn~(Session, ~str) -> @ast::crate; enum Msg { HandleRequest(fn~(Ctxt)), @@ -101,7 +101,7 @@ fn exec( comm::recv(po) } -fn build_ctxt(sess: session, +fn build_ctxt(sess: Session, ast: @ast::crate) -> Ctxt { use rustc::front::config; @@ -118,7 +118,7 @@ fn build_ctxt(sess: session, } } -fn build_session() -> session { +fn build_session() -> Session { let sopts: @options = basic_options(); let codemap = codemap::new_codemap(); let error_handlers = build_error_handlers(codemap); @@ -137,7 +137,7 @@ type ErrorHandlers = { // Build a custom error handler that will allow us to ignore non-fatal // errors fn build_error_handlers( - codemap: codemap::codemap + codemap: codemap::CodeMap ) -> ErrorHandlers { type DiagnosticHandler = { @@ -156,13 +156,13 @@ fn build_error_handlers( fn note(msg: &str) { self.inner.note(msg) } fn bug(msg: &str) -> ! { self.inner.bug(msg) } fn unimpl(msg: &str) -> ! { self.inner.unimpl(msg) } - fn emit(cmsp: Option<(codemap::codemap, codemap::span)>, + fn emit(cmsp: Option<(codemap::CodeMap, codemap::span)>, msg: &str, lvl: diagnostic::level) { self.inner.emit(cmsp, msg, lvl) } } - let emitter = fn@(cmsp: Option<(codemap::codemap, codemap::span)>, + let emitter = fn@(cmsp: Option<(codemap::CodeMap, codemap::span)>, msg: &str, lvl: diagnostic::level) { diagnostic::emit(cmsp, msg, lvl); }; diff --git a/src/rustdoc/attr_pass.rs b/src/rustdoc/attr_pass.rs index cd310791b4dfd..0748f603580b5 100644 --- a/src/rustdoc/attr_pass.rs +++ b/src/rustdoc/attr_pass.rs @@ -52,7 +52,7 @@ fn fold_crate( { topmod: doc::ModDoc_({ item: { - name: option::get_default(&attrs.name, doc.topmod.name()), + name: option::get_default(attrs.name, doc.topmod.name()), .. doc.topmod.item }, .. *doc.topmod @@ -151,7 +151,7 @@ fn fold_enum( node: ast::item_enum(enum_definition, _), _ }, _) => { let ast_variant = option::get( - &vec::find(enum_definition.variants, |v| { + vec::find(enum_definition.variants, |v| { to_str(v.node.name) == variant.name })); diff --git a/src/rustdoc/doc.rs b/src/rustdoc/doc.rs index 0764d9e243268..2d5bf5fc3c693 100644 --- a/src/rustdoc/doc.rs +++ b/src/rustdoc/doc.rs @@ -377,7 +377,7 @@ impl IndexEntry : cmp::Eq { impl Doc { fn CrateDoc() -> CrateDoc { - option::get(&vec::foldl(None, self.pages, |_m, page| { + option::get(vec::foldl(None, self.pages, |_m, page| { match *page { doc::CratePage(doc) => Some(doc), _ => None diff --git a/src/rustdoc/markdown_writer.rs b/src/rustdoc/markdown_writer.rs index ea559ae2ee6bc..1ed007def6334 100644 --- a/src/rustdoc/markdown_writer.rs +++ b/src/rustdoc/markdown_writer.rs @@ -136,13 +136,15 @@ fn readclose(fd: libc::c_int) -> ~str { // Copied from run::program_output let file = os::fdopen(fd); let reader = io::FILE_reader(file, false); - let mut buf = ~""; - while !reader.eof() { - let bytes = reader.read_bytes(4096u); - buf += str::from_bytes(bytes); - } + let buf = io::with_bytes_writer(|writer| { + let mut bytes = [mut 0, ..4096]; + while !reader.eof() { + let nread = reader.read(bytes, bytes.len()); + writer.write(bytes.view(0, nread)); + } + }); os::fclose(file); - return buf; + str::from_bytes(buf) } fn generic_writer(+process: fn~(markdown: ~str)) -> Writer { diff --git a/src/rustdoc/parse.rs b/src/rustdoc/parse.rs index 59d64f18d59af..7fc17dfe83861 100644 --- a/src/rustdoc/parse.rs +++ b/src/rustdoc/parse.rs @@ -20,16 +20,16 @@ fn from_str(source: ~str) -> @ast::crate { ~"-", @source, ~[], parse::new_parse_sess(None)) } -fn from_file_sess(sess: session::session, file: &Path) -> @ast::crate { +fn from_file_sess(sess: session::Session, file: &Path) -> @ast::crate { parse::parse_crate_from_file( file, cfg(sess, file_input(*file)), sess.parse_sess) } -fn from_str_sess(sess: session::session, source: ~str) -> @ast::crate { +fn from_str_sess(sess: session::Session, source: ~str) -> @ast::crate { parse::parse_crate_from_source_str( ~"-", @source, cfg(sess, str_input(source)), sess.parse_sess) } -fn cfg(sess: session::session, input: driver::input) -> ast::crate_cfg { +fn cfg(sess: session::Session, input: driver::input) -> ast::crate_cfg { driver::default_configuration(sess, ~"rustdoc", input) } diff --git a/src/rustllvm/RustWrapper.cpp b/src/rustllvm/RustWrapper.cpp index fc2049507eed9..498a4e137f0e3 100644 --- a/src/rustllvm/RustWrapper.cpp +++ b/src/rustllvm/RustWrapper.cpp @@ -493,5 +493,7 @@ extern "C" LLVMValueRef LLVMBuildAtomicRMW(LLVMBuilderRef B, } extern "C" void LLVMSetDebug(int Enabled) { +#ifndef NDEBUG DebugFlag = Enabled; +#endif } diff --git a/src/test/auxiliary/anon_trait_static_method_lib.rs b/src/test/auxiliary/anon_trait_static_method_lib.rs new file mode 100644 index 0000000000000..ec9398dea15ec --- /dev/null +++ b/src/test/auxiliary/anon_trait_static_method_lib.rs @@ -0,0 +1,10 @@ +pub struct Foo { + x: int +} + +pub impl Foo { + static fn new() -> Foo { + Foo { x: 3 } + } +} + diff --git a/src/test/auxiliary/issue_3136_a.rc b/src/test/auxiliary/issue_3136_a.rc new file mode 100644 index 0000000000000..532c669bd1d20 --- /dev/null +++ b/src/test/auxiliary/issue_3136_a.rc @@ -0,0 +1,3 @@ +#[crate_type = "lib"]; + +pub mod issue_3136_a; diff --git a/src/test/auxiliary/issue_3136_a.rs b/src/test/auxiliary/issue_3136_a.rs new file mode 100644 index 0000000000000..b3af688032326 --- /dev/null +++ b/src/test/auxiliary/issue_3136_a.rs @@ -0,0 +1,15 @@ +trait x { + fn use_x(); +} +enum y = (); +impl y:x { + fn use_x() { + struct foo { //~ ERROR quux + i: () + } + fn new_foo(i: ()) -> foo { + foo { i: i } + } + } +} + diff --git a/src/test/compile-fail/borrowck-loan-local-as-both-mut-and-imm.rs b/src/test/compile-fail/borrowck-loan-local-as-both-mut-and-imm.rs new file mode 100644 index 0000000000000..54048ed2fd8cb --- /dev/null +++ b/src/test/compile-fail/borrowck-loan-local-as-both-mut-and-imm.rs @@ -0,0 +1,25 @@ +use core::either::{Either, Left, Right}; + + fn f(x: &mut Either, y: &Either) -> int { + match *y { + Left(ref z) => { + *x = Right(1.0); + *z + } + _ => fail + } + } + + fn g() { + let mut x: Either = Left(3); + io::println(f(&mut x, &x).to_str()); //~ ERROR conflicts with prior loan + } + + fn h() { + let mut x: Either = Left(3); + let y: &Either = &x; + let z: &mut Either = &mut x; //~ ERROR conflicts with prior loan + *z = *y; + } + + fn main() {} diff --git a/src/test/compile-fail/duplicate-visibility.rs b/src/test/compile-fail/duplicate-visibility.rs new file mode 100644 index 0000000000000..32997fcce31a6 --- /dev/null +++ b/src/test/compile-fail/duplicate-visibility.rs @@ -0,0 +1,4 @@ +// error-pattern:unmatched visibility `pub` +extern { + pub pub fn foo(); +} diff --git a/src/test/compile-fail/estr-subtyping.rs b/src/test/compile-fail/estr-subtyping.rs index 7e3e29f2b8f53..626e7eff3f7cb 100644 --- a/src/test/compile-fail/estr-subtyping.rs +++ b/src/test/compile-fail/estr-subtyping.rs @@ -1,29 +1,23 @@ fn wants_box(x: @str) { } fn wants_uniq(x: ~str) { } -fn wants_three(x: str/3) { } +fn wants_slice(x: &str) { } fn has_box(x: @str) { wants_box(x); wants_uniq(x); //~ ERROR str storage differs: expected ~ but found @ - wants_three(x); //~ ERROR str storage differs: expected 3 but found @ + wants_slice(x); } fn has_uniq(x: ~str) { wants_box(x); //~ ERROR str storage differs: expected @ but found ~ wants_uniq(x); - wants_three(x); //~ ERROR str storage differs: expected 3 but found ~ + wants_slice(x); } -fn has_three(x: str/3) { - wants_box(x); //~ ERROR str storage differs: expected @ but found 3 - wants_uniq(x); //~ ERROR str storage differs: expected ~ but found 3 - wants_three(x); -} - -fn has_four(x: str/4) { - wants_box(x); //~ ERROR str storage differs: expected @ but found 4 - wants_uniq(x); //~ ERROR str storage differs: expected ~ but found 4 - wants_three(x); //~ ERROR str storage differs: expected 3 but found 4 +fn has_slice(x: &str) { + wants_box(x); //~ ERROR str storage differs: expected @ but found & + wants_uniq(x); //~ ERROR str storage differs: expected ~ but found & + wants_slice(x); } fn main() { diff --git a/src/test/compile-fail/evec-subtyping.rs b/src/test/compile-fail/evec-subtyping.rs index 2eabfa0c48c61..e5e9aa0c2579f 100644 --- a/src/test/compile-fail/evec-subtyping.rs +++ b/src/test/compile-fail/evec-subtyping.rs @@ -1,6 +1,6 @@ fn wants_box(x: @[uint]) { } fn wants_uniq(x: ~[uint]) { } -fn wants_three(x: [uint]/3) { } +fn wants_three(x: [uint * 3]) { } fn has_box(x: @[uint]) { wants_box(x); @@ -14,13 +14,13 @@ fn has_uniq(x: ~[uint]) { wants_three(x); //~ ERROR [] storage differs: expected 3 but found ~ } -fn has_three(x: [uint]/3) { +fn has_three(x: [uint * 3]) { wants_box(x); //~ ERROR [] storage differs: expected @ but found 3 wants_uniq(x); //~ ERROR [] storage differs: expected ~ but found 3 wants_three(x); } -fn has_four(x: [uint]/4) { +fn has_four(x: [uint * 4]) { wants_box(x); //~ ERROR [] storage differs: expected @ but found 4 wants_uniq(x); //~ ERROR [] storage differs: expected ~ but found 4 wants_three(x); //~ ERROR [] storage differs: expected 3 but found 4 diff --git a/src/test/compile-fail/issue-2074.rs b/src/test/compile-fail/issue-2074.rs new file mode 100644 index 0000000000000..d911b7db3dd4d --- /dev/null +++ b/src/test/compile-fail/issue-2074.rs @@ -0,0 +1,12 @@ +// xfail-test +fn main() { + let one = fn@() -> uint { + enum r { a }; + return a as uint; + }; + let two = fn@() -> uint { + enum r { a }; + return a as uint; + }; + one(); two(); +} diff --git a/src/test/compile-fail/issue-2766-a.rs b/src/test/compile-fail/issue-2766-a.rs index 75e524e27ac48..590ea5e0b7ccf 100644 --- a/src/test/compile-fail/issue-2766-a.rs +++ b/src/test/compile-fail/issue-2766-a.rs @@ -1,13 +1,13 @@ mod stream { #[legacy_exports]; - enum stream { send(T, server::stream), } + enum Stream { send(T, server::Stream), } mod server { #[legacy_exports]; - impl stream { - fn recv() -> extern fn(+v: stream) -> stream::stream { + impl Stream { + fn recv() -> extern fn(+v: Stream) -> stream::Stream { // resolve really should report just one error here. // Change the test case when it changes. - fn recv(+pipe: stream) -> stream::stream { //~ ERROR attempt to use a type argument out of scope + fn recv(+pipe: Stream) -> stream::Stream { //~ ERROR attempt to use a type argument out of scope //~^ ERROR use of undeclared type name //~^^ ERROR attempt to use a type argument out of scope //~^^^ ERROR use of undeclared type name @@ -16,7 +16,7 @@ mod stream { recv } } - type stream = pipes::RecvPacket>; + type Stream = pipes::RecvPacket>; } } diff --git a/src/test/compile-fail/issue-2823.rs b/src/test/compile-fail/issue-2823.rs new file mode 100644 index 0000000000000..fd2c17584581c --- /dev/null +++ b/src/test/compile-fail/issue-2823.rs @@ -0,0 +1,12 @@ +struct C { + x: int, + drop { + #error("dropping: %?", self.x); + } +} + +fn main() { + let c = C{ x: 2}; + let d = copy c; //~ ERROR copying a noncopyable value + #error("%?", d.x); +} \ No newline at end of file diff --git a/src/test/compile-fail/issue-3099-a.rs b/src/test/compile-fail/issue-3099-a.rs index 2721186585bce..fee676ed5e282 100644 --- a/src/test/compile-fail/issue-3099-a.rs +++ b/src/test/compile-fail/issue-3099-a.rs @@ -1,5 +1,5 @@ enum a { b, c } -enum a { d, e } //~ ERROR Duplicate definition of type a +enum a { d, e } //~ ERROR duplicate definition of type a fn main() {} diff --git a/src/test/compile-fail/issue-3099-b.rs b/src/test/compile-fail/issue-3099-b.rs index 997bb9e25571d..dc393eb752070 100644 --- a/src/test/compile-fail/issue-3099-b.rs +++ b/src/test/compile-fail/issue-3099-b.rs @@ -2,6 +2,6 @@ mod a {} #[legacy_exports] -mod a {} //~ ERROR Duplicate definition of module a +mod a {} //~ ERROR duplicate definition of type a fn main() {} diff --git a/src/test/compile-fail/issue-3099.rs b/src/test/compile-fail/issue-3099.rs index d5c016c1ea87b..79212686130a2 100644 --- a/src/test/compile-fail/issue-3099.rs +++ b/src/test/compile-fail/issue-3099.rs @@ -2,7 +2,7 @@ fn a(x: ~str) -> ~str { fmt!("First function with %s", x) } -fn a(x: ~str, y: ~str) -> ~str { //~ ERROR Duplicate definition of value a +fn a(x: ~str, y: ~str) -> ~str { //~ ERROR duplicate definition of value a fmt!("Second function with %s and %s", x, y) } diff --git a/src/test/compile-fail/issue-3214.rs b/src/test/compile-fail/issue-3214.rs new file mode 100644 index 0000000000000..7008f8c4ce293 --- /dev/null +++ b/src/test/compile-fail/issue-3214.rs @@ -0,0 +1,8 @@ +fn foo() { + struct foo { + mut x: T, //~ ERROR attempt to use a type argument out of scope + //~^ ERROR use of undeclared type name + drop { } + } +} +fn main() { } diff --git a/src/test/compile-fail/issue-3521-2.rs b/src/test/compile-fail/issue-3521-2.rs new file mode 100644 index 0000000000000..5af0417af047f --- /dev/null +++ b/src/test/compile-fail/issue-3521-2.rs @@ -0,0 +1,7 @@ +fn main() { + let foo = 100; + + const y: int = foo + 1; //~ ERROR: attempt to use a non-constant value in a constant + + log(error, y); +} diff --git a/src/test/compile-fail/issue-3521.rs b/src/test/compile-fail/issue-3521.rs new file mode 100644 index 0000000000000..9ad483367fe91 --- /dev/null +++ b/src/test/compile-fail/issue-3521.rs @@ -0,0 +1,9 @@ +fn main() { + let foo = 100; + + enum Stuff { + Bar = foo //~ ERROR attempt to use a non-constant value in a constant + } + + log(error, Bar); +} diff --git a/src/test/compile-fail/issue-3668-2.rs b/src/test/compile-fail/issue-3668-2.rs new file mode 100644 index 0000000000000..89c9e2b2e91e1 --- /dev/null +++ b/src/test/compile-fail/issue-3668-2.rs @@ -0,0 +1,5 @@ +fn f(x:int) { + const child: int = x + 1; //~ ERROR attempt to use a non-constant value in a constant +} + +fn main() {} diff --git a/src/test/run-pass/issue-3668.rs b/src/test/compile-fail/issue-3668.rs similarity index 63% rename from src/test/run-pass/issue-3668.rs rename to src/test/compile-fail/issue-3668.rs index 8b3005a3589dc..6cbd64c6aa9bd 100644 --- a/src/test/run-pass/issue-3668.rs +++ b/src/test/compile-fail/issue-3668.rs @@ -1,4 +1,3 @@ -// xfail-test struct P { child: Option<@mut P> } trait PTrait { fn getChildOption() -> Option<@P>; @@ -6,7 +5,7 @@ trait PTrait { impl P: PTrait { fn getChildOption() -> Option<@P> { - const childVal: @P = self.child.get(); + const childVal: @P = self.child.get(); //~ ERROR attempt to use a non-constant value in a constant fail; } } diff --git a/src/test/compile-fail/obsolete-syntax.rs b/src/test/compile-fail/obsolete-syntax.rs index 9f57e08cdb2f8..c8a8bd859615f 100644 --- a/src/test/compile-fail/obsolete-syntax.rs +++ b/src/test/compile-fail/obsolete-syntax.rs @@ -56,4 +56,15 @@ fn obsolete_with() { //~^ ERROR obsolete syntax: with } +fn obsolete_fixed_length_vec() { + let foo: [int]/1; + //~^ ERROR obsolete syntax: fixed-length vector + foo = [1]/_; + //~^ ERROR obsolete syntax: fixed-length vector + let foo: [int]/1; + //~^ ERROR obsolete syntax: fixed-length vector + foo = [1]/1; + //~^ ERROR obsolete syntax: fixed-length vector +} + fn main() { } diff --git a/src/test/pretty/blank-lines.rs b/src/test/pretty/blank-lines.rs index 3ef46a721aeb8..d677577e44240 100644 --- a/src/test/pretty/blank-lines.rs +++ b/src/test/pretty/blank-lines.rs @@ -1,5 +1,5 @@ // pp-exact -fn f() -> [int]/3 { +fn f() -> [int * 3] { let picard = 0; let data = 1; @@ -7,7 +7,7 @@ fn f() -> [int]/3 { let worf = 2; - let enterprise = [picard, data, worf]/_; + let enterprise = [picard, data, worf]; diff --git a/src/test/run-fail/doublefail.rs b/src/test/run-fail/doublefail.rs new file mode 100644 index 0000000000000..37beb7b895ed0 --- /dev/null +++ b/src/test/run-fail/doublefail.rs @@ -0,0 +1,5 @@ +//error-pattern:One +fn main() { + fail ~"One"; + fail ~"Two"; +} \ No newline at end of file diff --git a/src/test/run-pass/anon-trait-static-method.rs b/src/test/run-pass/anon-trait-static-method.rs new file mode 100644 index 0000000000000..6c4e9abc5ff7d --- /dev/null +++ b/src/test/run-pass/anon-trait-static-method.rs @@ -0,0 +1,15 @@ +struct Foo { + x: int +} + +impl Foo { + static fn new() -> Foo { + Foo { x: 3 } + } +} + +fn main() { + let x = Foo::new(); + io::println(x.x.to_str()); +} + diff --git a/src/test/run-pass/anon_trait_static_method_exe.rs b/src/test/run-pass/anon_trait_static_method_exe.rs new file mode 100644 index 0000000000000..052f95ed3b544 --- /dev/null +++ b/src/test/run-pass/anon_trait_static_method_exe.rs @@ -0,0 +1,13 @@ +// xfail-fast - check-fast doesn't understand aux-build +// aux-build:anon_trait_static_method_lib.rs + +extern mod anon_trait_static_method_lib; +use anon_trait_static_method_lib::Foo; + +fn main() { + let x = Foo::new(); + io::println(x.x.to_str()); +} + + + diff --git a/src/test/run-pass/assignability-trait.rs b/src/test/run-pass/assignability-trait.rs index 4652212ce5f4c..16e7473fb783b 100644 --- a/src/test/run-pass/assignability-trait.rs +++ b/src/test/run-pass/assignability-trait.rs @@ -39,7 +39,7 @@ fn main() { assert length::(x) == vec::len(x); // Now try it with a type that *needs* to be borrowed - let z = [0,1,2,3]/_; + let z = [0,1,2,3]; // Call a method for z.iterate() |y| { assert z[*y] == *y; } // Call a parameterized function diff --git a/src/test/run-pass/auto_serialize.rs b/src/test/run-pass/auto_serialize.rs index 6c85f59b74ed2..b63d1dcab7ec8 100644 --- a/src/test/run-pass/auto_serialize.rs +++ b/src/test/run-pass/auto_serialize.rs @@ -9,18 +9,22 @@ use io::Writer; use std::serialization::{Serializable, Deserializable, deserialize}; use std::prettyprint; -fn test_ser_and_deser( - a1: &A, - +expected: ~str +fn test_prettyprint>( + a: &A, + expected: &~str ) { - // check the pretty printer: let s = do io::with_str_writer |w| { - a1.serialize(&prettyprint::Serializer(w)) + a.serialize(&prettyprint::Serializer(w)) }; debug!("s == %?", s); - assert s == expected; + assert s == *expected; +} - // check the EBML serializer: +fn test_ebml + Deserializable +>(a1: &A) { let bytes = do io::with_bytes_writer |wr| { let ebml_w = &ebml::Serializer(wr); a1.serialize(ebml_w) @@ -140,24 +144,40 @@ enum Quark { enum CLike { A, B, C } fn main() { - test_ser_and_deser(&Plus(@Minus(@Val(3u), @Val(10u)), - @Plus(@Val(22u), @Val(5u))), - ~"Plus(@Minus(@Val(3u), @Val(10u)), \ - @Plus(@Val(22u), @Val(5u)))"); - - test_ser_and_deser(&{lo: 0u, hi: 5u, node: 22u}, - ~"{lo: 0u, hi: 5u, node: 22u}"); - - test_ser_and_deser(&AnEnum({v: ~[1u, 2u, 3u]}), - ~"AnEnum({v: ~[1u, 2u, 3u]})"); - - test_ser_and_deser(&Point {x: 3u, y: 5u}, ~"Point {x: 3u, y: 5u}"); - - test_ser_and_deser(&@[1u, 2u, 3u], ~"@[1u, 2u, 3u]"); - - test_ser_and_deser(&Top(22u), ~"Top(22u)"); - test_ser_and_deser(&Bottom(222u), ~"Bottom(222u)"); - - test_ser_and_deser(&A, ~"A"); - test_ser_and_deser(&B, ~"B"); + let a = &Plus(@Minus(@Val(3u), @Val(10u)), @Plus(@Val(22u), @Val(5u))); + test_prettyprint(a, &~"Plus(@Minus(@Val(3u), @Val(10u)), \ + @Plus(@Val(22u), @Val(5u)))"); + test_ebml(a); + + let a = &{lo: 0u, hi: 5u, node: 22u}; + test_prettyprint(a, &~"{lo: 0u, hi: 5u, node: 22u}"); + test_ebml(a); + + let a = &AnEnum({v: ~[1u, 2u, 3u]}); + test_prettyprint(a, &~"AnEnum({v: ~[1u, 2u, 3u]})"); + test_ebml(a); + + let a = &Point {x: 3u, y: 5u}; + test_prettyprint(a, &~"Point {x: 3u, y: 5u}"); + test_ebml(a); + + let a = &@[1u, 2u, 3u]; + test_prettyprint(a, &~"@[1u, 2u, 3u]"); + test_ebml(a); + + let a = &Top(22u); + test_prettyprint(a, &~"Top(22u)"); + test_ebml(a); + + let a = &Bottom(222u); + test_prettyprint(a, &~"Bottom(222u)"); + test_ebml(a); + + let a = &A; + test_prettyprint(a, &~"A"); + test_ebml(a); + + let a = &B; + test_prettyprint(a, &~"B"); + test_ebml(a); } diff --git a/src/test/run-pass/borrowck-fixed-length-vecs.rs b/src/test/run-pass/borrowck-fixed-length-vecs.rs index 9d79e28b219d0..22862d7e58f1c 100644 --- a/src/test/run-pass/borrowck-fixed-length-vecs.rs +++ b/src/test/run-pass/borrowck-fixed-length-vecs.rs @@ -1,5 +1,5 @@ fn main() { - let x = [22]/1; + let x = [22]; let y = &x[0]; assert *y == 22; -} \ No newline at end of file +} diff --git a/src/test/run-pass/const-fields-and-indexing.rs b/src/test/run-pass/const-fields-and-indexing.rs index 387ca032f6a74..a5bcfc93c79b4 100644 --- a/src/test/run-pass/const-fields-and-indexing.rs +++ b/src/test/run-pass/const-fields-and-indexing.rs @@ -1,4 +1,4 @@ -const x : [int]/4 = [1,2,3,4]; +const x : [int * 4] = [1,2,3,4]; const p : int = x[2]; const y : &[int] = &[1,2,3,4]; const q : int = y[2]; @@ -17,4 +17,4 @@ fn main() { assert p == 3; assert q == 3; assert t == 20; -} \ No newline at end of file +} diff --git a/src/test/run-pass/const-vecs-and-slices.rs b/src/test/run-pass/const-vecs-and-slices.rs index 9cfee2d5dff91..36327172fabb8 100644 --- a/src/test/run-pass/const-vecs-and-slices.rs +++ b/src/test/run-pass/const-vecs-and-slices.rs @@ -1,4 +1,4 @@ -const x : [int]/4 = [1,2,3,4]; +const x : [int * 4] = [1,2,3,4]; const y : &[int] = &[1,2,3,4]; fn main() { @@ -7,4 +7,4 @@ fn main() { assert x[1] == 2; assert x[3] == 4; assert x[3] == y[3]; -} \ No newline at end of file +} diff --git a/src/test/run-pass/default-method-simple.rs b/src/test/run-pass/default-method-simple.rs new file mode 100644 index 0000000000000..6a05d9589130e --- /dev/null +++ b/src/test/run-pass/default-method-simple.rs @@ -0,0 +1,23 @@ +trait Foo { + fn f() { + io::println("Hello!"); + self.g(); + } + fn g(); +} + +struct A { + x: int +} + +impl A : Foo { + fn g() { + io::println("Goodbye!"); + } +} + +fn main() { + let a = A { x: 1 }; + a.f(); +} + diff --git a/src/test/run-pass/evec-internal-boxes.rs b/src/test/run-pass/evec-internal-boxes.rs index ce0fe172cf806..5ad12510b38fa 100644 --- a/src/test/run-pass/evec-internal-boxes.rs +++ b/src/test/run-pass/evec-internal-boxes.rs @@ -1,7 +1,7 @@ fn main() { - let x : [@int]/5 = [@1,@2,@3,@4,@5]/5; - let _y : [@int]/5 = [@1,@2,@3,@4,@5]/_; - let mut z = [@1,@2,@3,@4,@5]/_; + let x : [@int * 5] = [@1,@2,@3,@4,@5]; + let _y : [@int * 5] = [@1,@2,@3,@4,@5]; + let mut z = [@1,@2,@3,@4,@5]; z = x; assert *z[0] == 1; assert *z[4] == 5; diff --git a/src/test/run-pass/evec-internal.rs b/src/test/run-pass/evec-internal.rs index 789660c11f0f9..7ee72272e0ba5 100644 --- a/src/test/run-pass/evec-internal.rs +++ b/src/test/run-pass/evec-internal.rs @@ -4,16 +4,16 @@ // Doesn't work; needs a design decision. fn main() { - let x : [int]/5 = [1,2,3,4,5]/5; - let _y : [int]/5 = [1,2,3,4,5]/_; - let mut z = [1,2,3,4,5]/_; + let x : [int * 5] = [1,2,3,4,5]; + let _y : [int * 5] = [1,2,3,4,5]; + let mut z = [1,2,3,4,5]; z = x; assert z[0] == 1; assert z[4] == 5; - let a : [int]/5 = [1,1,1,1,1]/_; - let b : [int]/5 = [2,2,2,2,2]/_; - let c : [int]/5 = [2,2,2,2,3]/_; + let a : [int * 5] = [1,1,1,1,1]; + let b : [int * 5] = [2,2,2,2,2]; + let c : [int * 5] = [2,2,2,2,3]; log(debug, a); diff --git a/src/test/run-pass/fixed_length_copy.rs b/src/test/run-pass/fixed_length_copy.rs index 5bf6eb9a44d48..ca42fc35f2ba0 100644 --- a/src/test/run-pass/fixed_length_copy.rs +++ b/src/test/run-pass/fixed_length_copy.rs @@ -3,7 +3,7 @@ // are implicitly copyable #[deny(implicit_copies)] fn main() { - let arr = [1,2,3]/3; + let arr = [1,2,3]; let arr2 = arr; assert(arr[1] == 2); assert(arr2[2] == 3); diff --git a/src/test/run-pass/fixed_length_vec_glue.rs b/src/test/run-pass/fixed_length_vec_glue.rs index 8be0b1b0c432c..d9488c5abd88c 100644 --- a/src/test/run-pass/fixed_length_vec_glue.rs +++ b/src/test/run-pass/fixed_length_vec_glue.rs @@ -1,5 +1,5 @@ fn main() { - let arr = [1,2,3]/3; + let arr = [1,2,3]; let struc = {a: 13u8, b: arr, c: 42}; let s = sys::log_str(&struc); assert(s == ~"{ a: 13, b: [ 1, 2, 3 ], c: 42 }"); diff --git a/src/test/run-pass/issue-2284.rs b/src/test/run-pass/issue-2284.rs new file mode 100644 index 0000000000000..3689c42253b90 --- /dev/null +++ b/src/test/run-pass/issue-2284.rs @@ -0,0 +1,11 @@ +// xfail-test +trait Send { + fn f(); +} + +fn f(t: T) { + t.f(); +} + +fn main() { +} \ No newline at end of file diff --git a/src/test/run-pass/issue-2428.rs b/src/test/run-pass/issue-2428.rs new file mode 100644 index 0000000000000..76fc674e73448 --- /dev/null +++ b/src/test/run-pass/issue-2428.rs @@ -0,0 +1,10 @@ +fn main() { + let foo = 100; + const quux: int = 5; + + enum Stuff { + Bar = quux + } + + assert (Bar as int == quux); +} diff --git a/src/test/run-pass/issue-2895.rs b/src/test/run-pass/issue-2895.rs new file mode 100644 index 0000000000000..1765e8638a637 --- /dev/null +++ b/src/test/run-pass/issue-2895.rs @@ -0,0 +1,23 @@ +use sys::size_of; +extern mod std; + +struct Cat { + x: int +} + +struct Kitty { + x: int, + drop {} +} + +#[cfg(target_arch = "x86_64")] +fn main() { + assert (size_of::() == 8 as uint); + assert (size_of::() == 16 as uint); +} + +#[cfg(target_arch = "x86")] +fn main() { + assert (size_of::() == 4 as uint); + assert (size_of::() == 8 as uint); +} diff --git a/src/test/run-pass/issue-2930.rs b/src/test/run-pass/issue-2930.rs index c480d382adc03..bccaeeaf18ddc 100644 --- a/src/test/run-pass/issue-2930.rs +++ b/src/test/run-pass/issue-2930.rs @@ -1,6 +1,6 @@ proto! stream ( - stream:send { - send(T) -> stream + Stream:send { + send(T) -> Stream } ) diff --git a/src/test/run-pass/issue-3447.rs b/src/test/run-pass/issue-3447.rs new file mode 100644 index 0000000000000..7302163a312c4 --- /dev/null +++ b/src/test/run-pass/issue-3447.rs @@ -0,0 +1,24 @@ +struct list { + element: &self/T, + mut next: Option<@list> +} + +impl list{ + fn addEnd(&self, element: &self/T) { + let newList = list { + element: element, + next: option::None + }; + + self.next = Some(@(move newList)); + } +} + +fn main() { + let s = @"str"; + let ls = list { + element: &s, + next: option::None + }; + io::println(*ls.element); +} diff --git a/src/test/run-pass/issue-3656.rs b/src/test/run-pass/issue-3656.rs new file mode 100644 index 0000000000000..330ec59a0a8f2 --- /dev/null +++ b/src/test/run-pass/issue-3656.rs @@ -0,0 +1,20 @@ +// Issue #3656 +// Incorrect struct size computation in the FFI, because of not taking +// the alignment of elements into account. + +use libc::*; + +struct KEYGEN { + hash_algorithm: [c_uint * 2], + count: uint32_t, + salt: *c_void, + salt_size: uint32_t, +} + +extern { + // Bogus signature, just need to test if it compiles. + pub fn malloc(++data: KEYGEN); +} + +fn main() { +} diff --git a/src/test/run-pass/issue-3688-2.rs b/src/test/run-pass/issue-3688-2.rs deleted file mode 100644 index 8a5b0e26829cf..0000000000000 --- a/src/test/run-pass/issue-3688-2.rs +++ /dev/null @@ -1,6 +0,0 @@ -// xfail-test -fn f(x:int) { - const child: int = x + 1; -} - -fn main() {} diff --git a/src/test/run-pass/issue_3136_b.rs b/src/test/run-pass/issue_3136_b.rs new file mode 100644 index 0000000000000..ef8e1af56d70b --- /dev/null +++ b/src/test/run-pass/issue_3136_b.rs @@ -0,0 +1,6 @@ +// xfail-fast - check-fast doesn't understand aux-build +// aux-build:issue_3136_a.rc + +extern mod issue_3136_a; +fn main() {} + diff --git a/src/test/run-pass/iter-all.rs b/src/test/run-pass/iter-all.rs index 75334db86cc68..b5c10fd2c8121 100644 --- a/src/test/run-pass/iter-all.rs +++ b/src/test/run-pass/iter-all.rs @@ -1,9 +1,9 @@ fn is_even(x: &uint) -> bool { (*x % 2) == 0 } fn main() { - assert ![1u, 2u]/_.all(is_even); - assert [2u, 4u]/_.all(is_even); - assert []/_.all(is_even); + assert ![1u, 2u].all(is_even); + assert [2u, 4u].all(is_even); + assert [].all(is_even); assert !Some(1u).all(is_even); assert Some(2u).all(is_even); diff --git a/src/test/run-pass/iter-any.rs b/src/test/run-pass/iter-any.rs index 22057b74a4140..f964eda54a5ac 100644 --- a/src/test/run-pass/iter-any.rs +++ b/src/test/run-pass/iter-any.rs @@ -1,9 +1,9 @@ fn is_even(x: &uint) -> bool { (*x % 2) == 0 } fn main() { - assert ![1u, 3u]/_.any(is_even); - assert [1u, 2u]/_.any(is_even); - assert ![]/_.any(is_even); + assert ![1u, 3u].any(is_even); + assert [1u, 2u].any(is_even); + assert ![].any(is_even); assert !Some(1).any(is_even); assert Some(2).any(is_even); diff --git a/src/test/run-pass/iter-contains.rs b/src/test/run-pass/iter-contains.rs index 6036b5b2d24b7..31fa52175aad3 100644 --- a/src/test/run-pass/iter-contains.rs +++ b/src/test/run-pass/iter-contains.rs @@ -1,9 +1,9 @@ fn main() { - assert []/_.contains(&22u) == false; - assert [1u, 3u]/_.contains(&22u) == false; - assert [22u, 1u, 3u]/_.contains(&22u) == true; - assert [1u, 22u, 3u]/_.contains(&22u) == true; - assert [1u, 3u, 22u]/_.contains(&22u) == true; + assert [].contains(&22u) == false; + assert [1u, 3u].contains(&22u) == false; + assert [22u, 1u, 3u].contains(&22u) == true; + assert [1u, 22u, 3u].contains(&22u) == true; + assert [1u, 3u, 22u].contains(&22u) == true; assert None.contains(&22u) == false; assert Some(1u).contains(&22u) == false; assert Some(22u).contains(&22u) == true; diff --git a/src/test/run-pass/iter-count.rs b/src/test/run-pass/iter-count.rs index 0b6f94367be31..26aa26b4c8db7 100644 --- a/src/test/run-pass/iter-count.rs +++ b/src/test/run-pass/iter-count.rs @@ -1,8 +1,8 @@ fn main() { - assert []/_.count(&22u) == 0u; - assert [1u, 3u]/_.count(&22u) == 0u; - assert [22u, 1u, 3u]/_.count(&22u) == 1u; - assert [22u, 1u, 22u]/_.count(&22u) == 2u; + assert [].count(&22u) == 0u; + assert [1u, 3u].count(&22u) == 0u; + assert [22u, 1u, 3u].count(&22u) == 1u; + assert [22u, 1u, 22u].count(&22u) == 2u; assert None.count(&22u) == 0u; assert Some(1u).count(&22u) == 0u; assert Some(22u).count(&22u) == 1u; diff --git a/src/test/run-pass/iter-eachi.rs b/src/test/run-pass/iter-eachi.rs index 9ae3cc43913b4..4c85e6ca3af43 100644 --- a/src/test/run-pass/iter-eachi.rs +++ b/src/test/run-pass/iter-eachi.rs @@ -1,6 +1,6 @@ fn main() { let mut c = 0u; - for [1u, 2u, 3u, 4u, 5u]/_.eachi |i, v| { + for [1u, 2u, 3u, 4u, 5u].eachi |i, v| { assert (i + 1u) == *v; c += 1u; } diff --git a/src/test/run-pass/iter-filter-to-vec.rs b/src/test/run-pass/iter-filter-to-vec.rs index f96b18f140a41..000e7c491be13 100644 --- a/src/test/run-pass/iter-filter-to-vec.rs +++ b/src/test/run-pass/iter-filter-to-vec.rs @@ -1,8 +1,8 @@ fn is_even(+x: uint) -> bool { (x % 2) == 0 } fn main() { - assert [1, 3]/_.filter_to_vec(is_even) == ~[]; - assert [1, 2, 3]/_.filter_to_vec(is_even) == ~[2]; + assert [1, 3].filter_to_vec(is_even) == ~[]; + assert [1, 2, 3].filter_to_vec(is_even) == ~[2]; assert None.filter_to_vec(is_even) == ~[]; assert Some(1).filter_to_vec(is_even) == ~[]; assert Some(2).filter_to_vec(is_even) == ~[2]; diff --git a/src/test/run-pass/iter-foldl.rs b/src/test/run-pass/iter-foldl.rs index bbc1673f6864d..65b517acf1c1a 100644 --- a/src/test/run-pass/iter-foldl.rs +++ b/src/test/run-pass/iter-foldl.rs @@ -1,8 +1,8 @@ fn add(x: &float, y: &uint) -> float { *x + ((*y) as float) } fn main() { - assert [1u, 3u]/_.foldl(20f, add) == 24f; - assert []/_.foldl(20f, add) == 20f; + assert [1u, 3u].foldl(20f, add) == 24f; + assert [].foldl(20f, add) == 20f; assert None.foldl(20f, add) == 20f; assert Some(1u).foldl(20f, add) == 21f; assert Some(2u).foldl(20f, add) == 22f; diff --git a/src/test/run-pass/iter-map-to-vec.rs b/src/test/run-pass/iter-map-to-vec.rs index 2f5359f197f15..208fc5d127edf 100644 --- a/src/test/run-pass/iter-map-to-vec.rs +++ b/src/test/run-pass/iter-map-to-vec.rs @@ -1,8 +1,8 @@ fn inc(+x: uint) -> uint { x + 1 } fn main() { - assert [1, 3]/_.map_to_vec(inc) == ~[2, 4]; - assert [1, 2, 3]/_.map_to_vec(inc) == ~[2, 3, 4]; + assert [1, 3].map_to_vec(inc) == ~[2, 4]; + assert [1, 2, 3].map_to_vec(inc) == ~[2, 3, 4]; assert None.map_to_vec(inc) == ~[]; assert Some(1).map_to_vec(inc) == ~[2]; assert Some(2).map_to_vec(inc) == ~[3]; diff --git a/src/test/run-pass/iter-min-max.rs b/src/test/run-pass/iter-min-max.rs index 23bd7fb05af4e..60001c8f7e522 100644 --- a/src/test/run-pass/iter-min-max.rs +++ b/src/test/run-pass/iter-min-max.rs @@ -1,11 +1,11 @@ fn is_even(&&x: uint) -> bool { (x % 2u) == 0u } fn main() { - assert [1u, 3u]/_.min() == 1u; - assert [3u, 1u]/_.min() == 1u; + assert [1u, 3u].min() == 1u; + assert [3u, 1u].min() == 1u; assert Some(1u).min() == 1u; - assert [1u, 3u]/_.max() == 3u; - assert [3u, 1u]/_.max() == 3u; + assert [1u, 3u].max() == 3u; + assert [3u, 1u].max() == 3u; assert Some(3u).max() == 3u; } diff --git a/src/test/run-pass/iter-to-vec.rs b/src/test/run-pass/iter-to-vec.rs index c2f1330b72126..358f484464d5d 100644 --- a/src/test/run-pass/iter-to-vec.rs +++ b/src/test/run-pass/iter-to-vec.rs @@ -1,5 +1,5 @@ fn main() { - assert [1u, 3u]/_.to_vec() == ~[1u, 3u]; + assert [1u, 3u].to_vec() == ~[1u, 3u]; let e: ~[uint] = ~[]; assert e.to_vec() == ~[]; assert None::.to_vec() == ~[]; diff --git a/src/test/run-pass/pipe-presentation-examples.rs b/src/test/run-pass/pipe-presentation-examples.rs index 7ce8e6ea73d0d..853aba53eda47 100644 --- a/src/test/run-pass/pipe-presentation-examples.rs +++ b/src/test/run-pass/pipe-presentation-examples.rs @@ -56,7 +56,7 @@ macro_rules! select ( -> $next:ident $e:expr),+ } )+ } => { - let index = pipes::selecti([$(($port).header()),+]/_); + let index = pipes::selecti([$(($port).header()),+]); select_if!(index, 0, $( $port => [ $($message$(($($x),+))dont_type_this* -> $next $e),+ ], )+) diff --git a/src/test/run-pass/pipe-select.rs b/src/test/run-pass/pipe-select.rs index 627cdbee9cabd..23588de2ecafc 100644 --- a/src/test/run-pass/pipe-select.rs +++ b/src/test/run-pass/pipe-select.rs @@ -14,8 +14,8 @@ proto! oneshot ( ) proto! stream ( - stream:send { - send(T) -> stream + Stream:send { + send(T) -> Stream } ) diff --git a/src/test/run-pass/regions-borrow-evec-fixed.rs b/src/test/run-pass/regions-borrow-evec-fixed.rs index 5a0875109f6cd..2526540bde1bb 100644 --- a/src/test/run-pass/regions-borrow-evec-fixed.rs +++ b/src/test/run-pass/regions-borrow-evec-fixed.rs @@ -5,6 +5,6 @@ fn foo(x: &[int]) -> int { } fn main() { - let p = [1,2,3,4,5]/_; + let p = [1,2,3,4,5]; assert foo(p) == 1; } diff --git a/src/test/run-pass/select-macro.rs b/src/test/run-pass/select-macro.rs index d6ce85ac3446d..271ed99057fc2 100644 --- a/src/test/run-pass/select-macro.rs +++ b/src/test/run-pass/select-macro.rs @@ -52,7 +52,7 @@ macro_rules! select ( -> $next:ident $e:expr),+ } )+ } => { - let index = pipes::selecti([$(($port).header()),+]/_); + let index = pipes::selecti([$(($port).header()),+]); select_if!(index, 0 $(, $port => [ $(type_this $message$(($(x $x),+))dont_type_this* -> $next => { move $e }),+ ])+) diff --git a/src/test/run-pass/test-ignore-cfg.rs b/src/test/run-pass/test-ignore-cfg.rs index 0482fc278c81a..0baabecb0ab91 100644 --- a/src/test/run-pass/test-ignore-cfg.rs +++ b/src/test/run-pass/test-ignore-cfg.rs @@ -20,10 +20,10 @@ fn checktests() { let tests = __test::tests(); let shouldignore = option::get( - &vec::find(tests, |t| t.name == ~"shouldignore" )); + vec::find(tests, |t| t.name == ~"shouldignore" )); assert shouldignore.ignore == true; let shouldnotignore = option::get( - &vec::find(tests, |t| t.name == ~"shouldnotignore" )); + vec::find(tests, |t| t.name == ~"shouldnotignore" )); assert shouldnotignore.ignore == false; } \ No newline at end of file