diff --git a/src/comp/driver/rustc.rs b/src/comp/driver/rustc.rs index f1bd93a4a5d38..15ea7f188c27c 100644 --- a/src/comp/driver/rustc.rs +++ b/src/comp/driver/rustc.rs @@ -80,10 +80,10 @@ fn parse_input(session::session sess, &ast::crate_cfg cfg, str input) -> @ast::crate { ret if (str::ends_with(input, ".rc")) { parser::parse_crate_from_crate_file - (input, cfg, sess.get_codemap()) + (input, cfg, sess.get_parse_sess()) } else if (str::ends_with(input, ".rs")) { parser::parse_crate_from_source_file - (input, cfg, sess.get_codemap()) + (input, cfg, sess.get_parse_sess()) } else { sess.fatal("unknown input file type: " + input); fail }; } @@ -110,6 +110,9 @@ fn compile_input(session::session sess, ast::crate_cfg cfg, str input, crate = time(time_passes, "building test harness", bind front::test::modify_for_testing(sess, crate)); } + crate = time(time_passes, "expansion", + bind syntax::ext::expand::expand_crate(sess, crate)); + auto ast_map = time(time_passes, "ast indexing", bind middle::ast_map::map_crate(*crate)); auto d = @@ -351,8 +354,9 @@ fn build_session_options(str binary, getopts::match match, str binary_dir) -> fn build_session(@session::options sopts) -> session::session { auto target_cfg = build_target_config(); auto crate_cache = std::map::new_int_hash[session::crate_metadata](); - ret session::session(target_cfg, sopts, crate_cache, [], - [], [], codemap::new_codemap(), 0u); + ret session::session(target_cfg, sopts, crate_cache, [], [], [], + @rec(cm=codemap::new_codemap(), mutable next_id=0), + 0u); } fn parse_pretty(session::session sess, &str name) -> pp_mode { diff --git a/src/comp/driver/session.rs b/src/comp/driver/session.rs index 7dce5b9b068a6..5028887333e58 100644 --- a/src/comp/driver/session.rs +++ b/src/comp/driver/session.rs @@ -10,6 +10,7 @@ import std::option; import std::option::some; import std::option::none; import std::str; +import syntax::parse::parser::parse_sess; tag os { os_win32; os_macos; os_linux; } @@ -48,25 +49,25 @@ obj session(@config targ_cfg, mutable vec[str] used_crate_files, mutable vec[str] used_libraries, mutable vec[str] used_link_args, - codemap::codemap cm, + parse_sess parse_sess, mutable uint err_count) { fn get_targ_cfg() -> @config { ret targ_cfg; } fn get_opts() -> @options { ret opts; } fn span_fatal(span sp, str msg) -> ! { // FIXME: Use constants, but rustboot doesn't know how to export them. - codemap::emit_error(some(sp), msg, cm); + codemap::emit_error(some(sp), msg, parse_sess.cm); fail; } fn fatal(str msg) -> ! { - codemap::emit_error(none, msg, cm); + codemap::emit_error(none, msg, parse_sess.cm); fail; } fn span_err(span sp, str msg) { - codemap::emit_error(some(sp), msg, cm); + codemap::emit_error(some(sp), msg, parse_sess.cm); err_count += 1u; } fn err(str msg) { - codemap::emit_error(none, msg, cm); + codemap::emit_error(none, msg, parse_sess.cm); err_count += 1u; } fn abort_if_errors() { @@ -76,17 +77,17 @@ obj session(@config targ_cfg, } fn span_warn(span sp, str msg) { // FIXME: Use constants, but rustboot doesn't know how to export them. - codemap::emit_warning(some(sp), msg, cm); + codemap::emit_warning(some(sp), msg, parse_sess.cm); } fn warn(str msg) { - codemap::emit_warning(none, msg, cm); + codemap::emit_warning(none, msg, parse_sess.cm); } fn span_note(span sp, str msg) { // FIXME: Use constants, but rustboot doesn't know how to export them. - codemap::emit_note(some(sp), msg, cm); + codemap::emit_note(some(sp), msg, parse_sess.cm); } fn note(str msg) { - codemap::emit_note(none, msg, cm); + codemap::emit_note(none, msg, parse_sess.cm); } fn span_bug(span sp, str msg) -> ! { self.span_fatal(sp, #fmt("internal compiler error %s", msg)); @@ -139,9 +140,13 @@ obj session(@config targ_cfg, fn get_used_crate_files() -> vec[str] { ret used_crate_files; } - fn get_codemap() -> codemap::codemap { ret cm; } + fn get_codemap() -> codemap::codemap { ret parse_sess.cm; } fn lookup_pos(uint pos) -> codemap::loc { - ret codemap::lookup_pos(cm, pos); + ret codemap::lookup_pos(parse_sess.cm, pos); + } + fn get_parse_sess() -> parse_sess { ret parse_sess; } + fn next_node_id() -> ast::node_id { + ret syntax::parse::parser::next_node_id(parse_sess); } fn span_str(span sp) -> str { ret codemap::span_to_str(sp, self.get_codemap()); diff --git a/src/comp/middle/trans.rs b/src/comp/middle/trans.rs index 93a8d775e1d11..136438fef0328 100644 --- a/src/comp/middle/trans.rs +++ b/src/comp/middle/trans.rs @@ -6246,8 +6246,8 @@ fn trans_expr_out(&@block_ctxt cx, &@ast::expr e, out_method output) -> case (ast::expr_rec(?args, ?base)) { ret trans_rec(cx, args, base, e.id); } - case (ast::expr_ext(_, _, _, ?expanded)) { - ret trans_expr(cx, expanded); + case (ast::expr_ext(_, _, _)) { + ret cx.fcx.lcx.ccx.sess.bug("unexpanded macro"); } case (ast::expr_fail(?expr)) { ret trans_fail_expr(cx, some(e.span), expr); diff --git a/src/comp/middle/tstate/pre_post_conditions.rs b/src/comp/middle/tstate/pre_post_conditions.rs index f58fdb0f6cf9d..b6eb6156eee7b 100644 --- a/src/comp/middle/tstate/pre_post_conditions.rs +++ b/src/comp/middle/tstate/pre_post_conditions.rs @@ -567,9 +567,8 @@ fn find_pre_post_expr(&fn_ctxt fcx, @expr e) { case (expr_break) { clear_pp(expr_pp(fcx.ccx, e)); } case (expr_cont) { clear_pp(expr_pp(fcx.ccx, e)); } case (expr_port(_)) { clear_pp(expr_pp(fcx.ccx, e)); } - case (expr_ext(_, _, _, ?expanded)) { - find_pre_post_expr(fcx, expanded); - copy_pre_post(fcx.ccx, e.id, expanded); + case (expr_ext(_, _, _)) { + fcx.ccx.tcx.sess.bug("unexpanded macro"); } case (expr_anon_obj(?anon_obj, _)) { alt (anon_obj.with_obj) { diff --git a/src/comp/middle/tstate/states.rs b/src/comp/middle/tstate/states.rs index 229bd17654439..71b20900adadc 100644 --- a/src/comp/middle/tstate/states.rs +++ b/src/comp/middle/tstate/states.rs @@ -323,8 +323,8 @@ fn find_pre_post_state_expr(&fn_ctxt fcx, &prestate pres, @expr e) -> bool { case (expr_chan(?ex)) { ret find_pre_post_state_sub(fcx, pres, ex, e.id, none); } - case (expr_ext(_, _, _, ?expanded)) { - ret find_pre_post_state_sub(fcx, pres, expanded, e.id, none); + case (expr_ext(_, _, _)) { + fcx.ccx.tcx.sess.bug("unexpanded macro"); } case (expr_put(?maybe_e)) { alt (maybe_e) { diff --git a/src/comp/middle/typeck.rs b/src/comp/middle/typeck.rs index 5d06e9e7d04a7..a8ea354afcccd 100644 --- a/src/comp/middle/typeck.rs +++ b/src/comp/middle/typeck.rs @@ -1700,10 +1700,8 @@ fn check_expr(&@fn_ctxt fcx, &@ast::expr expr) { } write::ty_only_fixup(fcx, id, tpt._1); } - case (ast::expr_ext(?p, ?args, ?body, ?expanded)) { - check_expr(fcx, expanded); - auto t = expr_ty(fcx.ccx.tcx, expanded); - write::ty_only_fixup(fcx, id, t); + case (ast::expr_ext(_,_,_)) { + fcx.ccx.tcx.sess.bug("unexpanded macro"); } case (ast::expr_fail(?expr_opt)) { alt (expr_opt) { diff --git a/src/comp/rustc.rc b/src/comp/rustc.rc index 69336c5cb614a..5bc491d975c94 100644 --- a/src/comp/rustc.rc +++ b/src/comp/rustc.rc @@ -53,6 +53,7 @@ mod syntax { mod fmt; mod env; mod simplext; + mod expand; } mod print { mod pprust; diff --git a/src/comp/syntax/ast.rs b/src/comp/syntax/ast.rs index 4a4b3db4785dd..5d1737a1c0ce4 100644 --- a/src/comp/syntax/ast.rs +++ b/src/comp/syntax/ast.rs @@ -277,7 +277,7 @@ tag expr_ { expr_field(@expr, ident); expr_index(@expr, @expr); expr_path(path); - expr_ext(path, vec[@expr], option::t[str], @expr); + expr_ext(path, vec[@expr], option::t[str]); expr_fail(option::t[@expr]); expr_break; expr_cont; @@ -297,6 +297,9 @@ tag expr_ { expr_port(option::t[@ty]); expr_chan(@expr); expr_anon_obj(anon_obj, vec[ty_param]); + /* for the macro system */ + expr_embeded_type(@ty); + expr_embeded_block(block); } type lit = spanned[lit_]; diff --git a/src/comp/syntax/ext/base.rs b/src/comp/syntax/ext/base.rs index a0ba306edfa52..949c0d9f40d72 100644 --- a/src/comp/syntax/ext/base.rs +++ b/src/comp/syntax/ext/base.rs @@ -1,7 +1,7 @@ import std::vec; import std::option; import std::map::hashmap; -import parse::parser::parse_sess; +import driver::session::session; import codemap::span; import std::map::new_str_hash; import codemap; @@ -39,18 +39,21 @@ type ext_ctxt = span_msg_fn span_unimpl, next_id_fn next_id); -fn mk_ctxt(&parse_sess sess) -> ext_ctxt { - fn ext_span_fatal_(&codemap::codemap cm, span sp, str msg) -> ! { - codemap::emit_error(option::some(sp), msg, cm); +fn mk_ctxt(&session sess) -> ext_ctxt { + fn ext_span_fatal_(&session sess, span sp, str msg) -> ! { + sess.span_err(sp, msg); fail; } - auto ext_span_fatal = bind ext_span_fatal_(sess.cm, _, _); - fn ext_span_unimpl_(&codemap::codemap cm, span sp, str msg) -> ! { - codemap::emit_error(option::some(sp), "unimplemented " + msg, cm); + auto ext_span_fatal = bind ext_span_fatal_(sess, _, _); + fn ext_span_unimpl_(&session sess, span sp, str msg) -> ! { + sess.span_err(sp, "unimplemented " + msg); fail; } - auto ext_span_unimpl = bind ext_span_unimpl_(sess.cm, _, _); - auto ext_next_id = bind parse::parser::next_node_id(sess); + auto ext_span_unimpl = bind ext_span_unimpl_(sess, _, _); + fn ext_next_id_(&session sess) -> ast::node_id { + ret sess.next_node_id(); // temporary, until bind works better + } + auto ext_next_id = bind ext_next_id_(sess); ret rec(span_fatal=ext_span_fatal, span_unimpl=ext_span_unimpl, next_id=ext_next_id); diff --git a/src/comp/syntax/ext/expand.rs b/src/comp/syntax/ext/expand.rs new file mode 100644 index 0000000000000..517868e07089d --- /dev/null +++ b/src/comp/syntax/ext/expand.rs @@ -0,0 +1,65 @@ + +import codemap::emit_error; +import driver::session; +import syntax::ast::crate; +import syntax::ast::expr_; +import syntax::ast::expr_ext; +import syntax::fold::*; + +import std::option::none; +import std::option::some; + +import std::map::hashmap; +import std::vec; + +fn expand_expr(&hashmap[str, base::syntax_extension] exts, + &session::session sess, &expr_ e, ast_fold fld, + &fn(&ast::expr_, ast_fold) -> expr_ orig) -> expr_ { + ret alt(e) { + case (expr_ext(?pth, ?args, ?body)) { + assert (vec::len(pth.node.idents) > 0u); + auto extname = pth.node.idents.(0); + auto ext_cx = base::mk_ctxt(sess); + alt (exts.find(extname)) { + case (none) { + emit_error(some(pth.span), "unknown syntax expander: '" + + extname + "'", sess.get_codemap()); + fail + } + case (some(base::normal(?ext))) { + //keep going, outside-in + fld.fold_expr(ext(ext_cx, pth.span, args, body)).node + } + case (some(base::macro_defining(?ext))) { + auto named_extension = ext(ext_cx, pth.span, args, body); + exts.insert(named_extension._0, named_extension._1); + ast::expr_tup(vec::empty[ast::elt]()) + } + } + + } + case (_) { orig(e, fld) } + }; +} + +fn expand_crate(&session::session sess, &@crate c) -> @crate { + auto exts = ext::base::syntax_expander_table(); + auto afp = default_ast_fold(); + auto f_pre = + rec(fold_expr = bind expand_expr(exts, sess, _, _, afp.fold_expr) + with *afp); + auto f = make_fold(f_pre); + auto res = @f.fold_crate(*c); + dummy_out(f); //temporary: kill circular reference + ret res; + +} + +// Local Variables: +// mode: rust +// fill-column: 78; +// indent-tabs-mode: nil +// c-basic-offset: 4 +// buffer-file-coding-system: utf-8-unix +// compile-command: "make -k -C $RBUILD 2>&1 | sed -e 's/\\/x\\//x:\\//g'"; +// End: diff --git a/src/comp/syntax/ext/simplext.rs b/src/comp/syntax/ext/simplext.rs index e214524f0b799..74e855af5e134 100644 --- a/src/comp/syntax/ext/simplext.rs +++ b/src/comp/syntax/ext/simplext.rs @@ -1,6 +1,8 @@ use std; import codemap::span; +import ast::respan; + import std::vec; import std::option; import vec::map; @@ -16,8 +18,11 @@ import base::expr_to_ident; import fold::*; import ast::ident; +import ast::path; import ast::path_; import ast::expr_path; +import ast::expr_vec; +import ast::expr_ext; export add_new_extension; @@ -32,102 +37,229 @@ fn position[T](&T x, &vec[T] v) -> option::t[uint] { ret none[uint]; } +fn lookup(&vec[invk_binding] ibs, ident i) -> option::t[invk_binding] { + for (invk_binding ib in ibs) { + alt (ib) { + case (ident_binding(?p_id, _)) { if (i == p_id) { ret some(ib); }} + case (path_binding(?p_id, _)) { if (i == p_id) { ret some(ib); }} + case (expr_binding(?p_id, _)) { if (i == p_id) { ret some(ib); }} + } + } + ret none; +} + // substitute, in a position that's required to be an ident -fn subst_ident(&ext_ctxt cx, &vec[@ast::expr] args, - @vec[ident] param_names, &ident i, ast_fold fld) -> ident { - alt (position(i, *param_names)) { - case (some[uint](?idx)) { - ret expr_to_ident(cx, args.(idx), - "This argument is expanded as an " - + "identifier; it must be one."); +fn subst_ident(&ext_ctxt cx, &vec[invk_binding] ibs, &ident i, ast_fold fld) + -> ident { + ret alt (lookup(ibs, i)) { + case (some(ident_binding(_, ?a_id))) { a_id.node } + case (some(path_binding(_, ?pth))) { + cx.span_fatal(pth.span, "This argument is expanded as an " + + "identifier; it must be one.") } - case (none[uint]) { - ret i; + case (some(expr_binding(_, ?expr))) { + cx.span_fatal(expr.span, "This argument is expanded as an " + + "identifier; it must be one.") } + case (none) { i } } } -fn subst_path(&ext_ctxt cx, &vec[@ast::expr] args, - @vec[ident] param_names, &path_ p, ast_fold fld) -> path_ { +fn subst_path(&ext_ctxt cx, &vec[invk_binding] ibs, &path_ p, ast_fold fld) + -> path_ { // Don't substitute into qualified names. if (len(p.types) > 0u || len(p.idents) != 1u) { ret p; } - alt (position(p.idents.(0), *param_names)) { - case (some[uint](?idx)) { - alt (args.(idx).node) { - case (expr_path(?new_path)) { - ret new_path.node; - } - case (_) { - cx.span_fatal(args.(idx).span, - "This argument is expanded as a path; " - + "it must be one."); - } - } + ret alt (lookup(ibs, p.idents.(0))) { + case (some(ident_binding(_, ?id))) { rec(idents=[id.node], types=[]) } + case (some(path_binding(_, ?a_pth))) { a_pth.node } + case (some(expr_binding(_, ?expr))) { + cx.span_fatal(expr.span, "This argument is expanded as an " + + "path; it must be one.") } - case (none[uint]) { ret p; } + case (none) { p } } } -fn subst_expr(&ext_ctxt cx, &vec[@ast::expr] args, @vec[ident] param_names, - &ast::expr_ e, ast_fold fld, - fn(&ast::expr_, ast_fold) -> ast::expr_ orig) -> ast::expr_ { +fn subst_expr(&ext_ctxt cx, &vec[invk_binding] ibs, &ast::expr_ e, + ast_fold fld, fn(&ast::expr_, ast_fold) -> ast::expr_ orig) + -> ast::expr_ { ret alt(e) { case (expr_path(?p)){ // Don't substitute into qualified names. if (len(p.node.types) > 0u || len(p.node.idents) != 1u) { e } - alt (position(p.node.idents.(0), *param_names)) { - case (some[uint](?idx)) { - args.(idx).node + alt (lookup(ibs, p.node.idents.(0))) { + case (some(ident_binding(_, ?id))) { + expr_path(respan(id.span, rec(idents=[id.node],types=[]))) } - case (none[uint]) { e } + case (some(path_binding(_, ?a_pth))) { expr_path(*a_pth) } + case (some(expr_binding(_, ?a_exp))) { a_exp.node } + case (none) { orig(e,fld) } } } case (_) { orig(e,fld) } } } +type pat_ext = rec(vec[@ast::expr] invk, @ast::expr body); + +// maybe box? +tag invk_binding { + expr_binding(ident, @ast::expr); + path_binding(ident, @ast::path); + ident_binding(ident, ast::spanned[ident]); +} + +fn path_to_ident(&path pth) -> option::t[ident] { + if (vec::len(pth.node.idents) == 1u + && vec::len(pth.node.types) == 0u) { + ret some(pth.node.idents.(0u)); + } + ret none; +} fn add_new_extension(&ext_ctxt cx, span sp, &vec[@ast::expr] args, option::t[str] body) -> tup(str, syntax_extension) { - if (len(args) < 2u) { - cx.span_fatal(sp, "malformed extension description"); + let option::t[str] macro_name = none; + let vec[pat_ext] pat_exts = []; + for (@ast::expr arg in args) { + alt(arg.node) { + case(expr_vec(?elts, ?mut, ?seq_kind)) { + + if (len(elts) != 2u) { + cx.span_fatal((*arg).span, + "extension clause must consist of [" + + "macro invocation, expansion body]"); + } + alt(elts.(0u).node) { + case(expr_ext(?pth, ?invk_args, ?body)) { + let str clause_name = alt(path_to_ident(pth)) { + case (some(?id)) { id } + case (none) { + cx.span_fatal + (elts.(0u).span, + "macro name must not be a path") + } + }; + if (macro_name == none) { + macro_name = some(clause_name); + } else if (macro_name != some(clause_name)) { + cx.span_fatal(elts.(0u).span, "macros must have" + + " only one name"); + } + pat_exts += [rec(invk=invk_args, body=elts.(1u))]; + } + case(_) { + cx.span_fatal(elts.(0u).span, "extension clause must" + + " start with a macro invocation."); + } + } + } + case(_) { + cx.span_fatal((*arg).span, "extension must be [clause, " + + " ...]"); + } + } } + auto ext = bind generic_extension(_,_,_,_,@pat_exts); + + ret tup(alt (macro_name) { + case (some(?id)) { id } + case (none) { + cx.span_fatal(sp, "macro definition must have " + + "at least one clause") + } + }, + normal(ext)); + + fn generic_extension(&ext_ctxt cx, span sp, &vec[@ast::expr] args, - option::t[str] body, @vec[ident] param_names, - @ast::expr dest_form) -> @ast::expr { - if (len(args) != len(*param_names)) { - cx.span_fatal(sp, #fmt("extension expects %u arguments, got %u", - len(*param_names), len(args))); + option::t[str] body, @vec[pat_ext] clauses) + -> @ast::expr { + + /* returns a list of bindings, or none if the match fails. */ + fn match_invk(@ast::expr pattern, @ast::expr argument) + -> option::t[vec[invk_binding]] { + auto pat = pattern.node; + auto arg = argument.node; + ret alt (pat) { + case (expr_vec(?p_elts, _, _)) { + alt (arg) { + case (expr_vec(?a_elts, _, _)) { + if (vec::len(p_elts) != vec::len(a_elts)) { + none[vec[invk_binding]] + } + let uint i = 0u; + let vec[invk_binding] res = []; + while (i < vec::len(p_elts)) { + alt (match_invk(p_elts.(i), a_elts.(i))) { + case (some(?v)) { res += v; } + case (none) { ret none; } + } + i += 1u; + } + some(res) + } + case (_) { none } + } + } + case (expr_path(?p_pth)) { + alt (path_to_ident(p_pth)) { + case (some(?p_id)) { + /* let's bind! */ + alt (arg) { + case (expr_path(?a_pth)) { + alt (path_to_ident(a_pth)) { + case (some(?a_id)) { + some([ident_binding + (p_id, respan(argument.span, + a_id))]) + } + case (none) { + some([path_binding(p_id, @a_pth)]) + } + } + } + case (_) { + some([expr_binding(p_id, argument)]) + } + } + } + // FIXME this still compares on internal spans + case (_) { if(pat == arg) { some([]) } else { none } } + } + } + // FIXME this still compares on internal spans + case (_) { if (pat == arg) { some([]) } else { none } } + } } - auto afp = default_ast_fold(); - auto f_pre = - rec(fold_ident = bind subst_ident(cx, args, param_names, _, _), - fold_path = bind subst_path(cx, args, param_names, _, _), - fold_expr = bind subst_expr(cx, args, param_names, _, _, - afp.fold_expr) + for (pat_ext pe in *clauses) { + if (vec::len(args) != vec::len(pe.invk)) { cont; } + let uint i = 0u; + let vec[invk_binding] bindings = []; + while (i < vec::len(args)) { + alt (match_invk(pe.invk.(i), args.(i))) { + case (some(?v)) { bindings += v; } + case (none) { cont } + } + i += 1u; + } + auto afp = default_ast_fold(); + auto f_pre = + rec(fold_ident = bind subst_ident(cx, bindings, _, _), + fold_path = bind subst_path(cx, bindings, _, _), + fold_expr = bind subst_expr(cx, bindings, _, _, + afp.fold_expr) with *afp); - auto f = make_fold(f_pre); - auto result = f.fold_expr(dest_form); - dummy_out(f); //temporary: kill circular reference - ret result; - - } - - let vec[ident] param_names = vec::empty[ident](); - let uint idx = 1u; - while(1u+idx < len(args)) { - param_names += - [expr_to_ident(cx, args.(idx), - "this parameter name must be an identifier.")]; - idx += 1u; + auto f = make_fold(f_pre); + auto result = f.fold_expr(pe.body); + dummy_out(f); //temporary: kill circular reference + ret result; + } + cx.span_fatal(sp, "no clauses match macro invocation"); } - - ret tup(expr_to_str(cx, args.(0), "first arg must be a literal string."), - normal(bind generic_extension(_,_,_,_,@param_names, - args.(len(args)-1u)))); } diff --git a/src/comp/syntax/fold.rs b/src/comp/syntax/fold.rs index c53665cf71c57..99bdf8caac72a 100644 --- a/src/comp/syntax/fold.rs +++ b/src/comp/syntax/fold.rs @@ -402,11 +402,10 @@ fn noop_fold_expr(&expr_ e, ast_fold fld) -> expr_ { case (expr_path(?pth)) { expr_path(fld.fold_path(pth)) } - case (expr_ext(?pth, ?args, ?body, ?expanded)) { - expr_ext(fld.fold_path(pth), map(fld.fold_expr, args), - body, fld.fold_expr(expanded)) + case (expr_ext(?pth, ?args, ?body)) { + expr_ext(fld.fold_path(pth), map(fld.fold_expr, args), body) } - case (expr_fail(_)) { e } + case (expr_fail(?e)) { expr_fail(option::map(fld.fold_expr, e)) } case (expr_break()) { e } case (expr_cont()) { e } case (expr_ret(?e)) { @@ -433,6 +432,12 @@ fn noop_fold_expr(&expr_ e, ast_fold fld) -> expr_ { case (expr_anon_obj(?ao, ?typms)) { expr_anon_obj(fold_anon_obj(ao), typms) } + case (expr_embeded_type(?ty)) { + expr_embeded_type(fld.fold_ty(ty)) + } + case (expr_embeded_block(?blk)) { + expr_embeded_block(fld.fold_block(blk)) + } } } @@ -681,22 +686,6 @@ fn make_fold(&ast_fold_precursor afp) -> ast_fold { fold_path = bind f_path(afp,result,_), fold_local = bind f_local(afp,result,_)); ret result; - /* - ret rec(fold_crate = noop_fold_crate, - fold_crate_directive = noop_fold_crate_drective, - fold_view_item = noop_fold_view_item, - fold_native_item = noop_fold_native_item, - fold_item = noop_fold_item, - fold_method = noop_fold_method, - fold_block = noop_fold_block, - fold_stmt = noop_fold_stmt, - fold_arm = noop_fold_arm, - fold_pat = noop_fold_pat, - fold_decl = noop_fold_decl, - fold_expr = noop_fold_expr, - fold_ty = noop_fold_ty, - fold_constr = noop_fold_constr, - fold_fn = noop_fold_fn);*/ } diff --git a/src/comp/syntax/parse/lexer.rs b/src/comp/syntax/parse/lexer.rs index ffc83bcbbea4a..6256e8fe09142 100644 --- a/src/comp/syntax/parse/lexer.rs +++ b/src/comp/syntax/parse/lexer.rs @@ -361,12 +361,8 @@ fn next_token(&reader rdr) -> token::token { } else { ret token::BINOP(op); } } alt (c) { - case ( - // One-byte tokens. - '?') { - rdr.bump(); - ret token::QUES; - } + // One-byte tokens. + case ('?') { rdr.bump(); ret token::QUES; } case (';') { rdr.bump(); ret token::SEMI; } case (',') { rdr.bump(); ret token::COMMA; } case ('.') { rdr.bump(); ret token::DOT; } @@ -377,7 +373,18 @@ fn next_token(&reader rdr) -> token::token { case ('[') { rdr.bump(); ret token::LBRACKET; } case (']') { rdr.bump(); ret token::RBRACKET; } case ('@') { rdr.bump(); ret token::AT; } - case ('#') { rdr.bump(); ret token::POUND; } + case ('#') { + rdr.bump(); + if (rdr.curr() == '<') { + rdr.bump(); + ret token::POUND_LT; + } + if (rdr.curr() == '{') { + rdr.bump(); + ret token::POUND_LBRACE; + } + ret token::POUND; + } case ('~') { rdr.bump(); ret token::TILDE; } case (':') { rdr.bump(); @@ -386,9 +393,8 @@ fn next_token(&reader rdr) -> token::token { ret token::MOD_SEP; } else { ret token::COLON; } } - case ( - // Multi-byte tokens. - '=') { + // Multi-byte tokens. + case ('=') { rdr.bump(); if (rdr.curr() == '=') { rdr.bump(); diff --git a/src/comp/syntax/parse/parser.rs b/src/comp/syntax/parse/parser.rs index 209ddcc7ff61b..d931065b7d231 100644 --- a/src/comp/syntax/parse/parser.rs +++ b/src/comp/syntax/parse/parser.rs @@ -49,7 +49,6 @@ type parser = fn get_reader() -> lexer::reader ; fn get_filemap() -> codemap::filemap ; fn get_bad_expr_words() -> hashmap[str, ()] ; - fn get_syntax_expanders() -> hashmap[str, ex::syntax_extension] ; fn get_chpos() -> uint ; fn get_id() -> ast::node_id ; fn get_sess() -> parse_sess; @@ -67,8 +66,7 @@ fn new_parser(parse_sess sess, ast::crate_cfg cfg, mutable restriction restr, lexer::reader rdr, vec[op_spec] precs, - hashmap[str, ()] bad_words, - hashmap[str, ex::syntax_extension] syntax_expanders) { + hashmap[str, ()] bad_words) { fn peek() -> token::token { ret tok; } fn bump() { // log rdr.get_filename() @@ -101,9 +99,6 @@ fn new_parser(parse_sess sess, ast::crate_cfg cfg, fn get_reader() -> lexer::reader { ret rdr; } fn get_filemap() -> codemap::filemap { ret rdr.get_filemap(); } fn get_bad_expr_words() -> hashmap[str, ()] { ret bad_words; } - fn get_syntax_expanders() -> hashmap[str, ex::syntax_extension] { - ret syntax_expanders; - } fn get_chpos() -> uint { ret rdr.get_chpos(); } fn get_id() -> ast::node_id { ret next_node_id(sess); } fn get_sess() -> parse_sess { ret sess; } @@ -122,8 +117,7 @@ fn new_parser(parse_sess sess, ast::crate_cfg cfg, auto npos = rdr.get_chpos(); ret stdio_parser(sess, cfg, ftype, lexer::next_token(rdr), npos, npos, npos, UNRESTRICTED, rdr, - prec_table(), bad_expr_word_table(), - ex::syntax_expander_table()); + prec_table(), bad_expr_word_table()); } // These are the words that shouldn't be allowed as value identifiers, @@ -746,6 +740,13 @@ fn parse_bottom_expr(&parser p) -> @ast::expr { parse_seq_to_end(token::RBRACKET, some(token::COMMA), parse_expr, p); ex = ast::expr_vec(es, mut, ast::sk_rc); + } else if (p.peek() == token::POUND_LT) { + p.bump(); + ex = ast::expr_embeded_type(parse_ty(p)); + expect(p, token::GT); + } else if (p.peek() == token::POUND_LBRACE) { + p.bump(); + ex = ast::expr_embeded_block(parse_block_tail(p)); } else if (p.peek() == token::TILDE) { p.bump(); alt (p.peek()) { @@ -960,38 +961,7 @@ fn parse_syntax_ext_naked(&parser p, uint lo) -> @ast::expr { auto es = parse_seq(token::LPAREN, token::RPAREN, some(token::COMMA), parse_expr, p); auto hi = es.span.hi; - auto ext_span = rec(lo=lo, hi=hi); - auto ex = expand_syntax_ext(p, ext_span, pth, es.node, none); - ret mk_expr(p, lo, hi, ex); -} - -/* - * FIXME: This is a crude approximation of the syntax-extension system, - * for purposes of prototyping and/or hard-wiring any extensions we - * wish to use while bootstrapping. The eventual aim is to permit - * loading rust crates to process extensions. - */ -fn expand_syntax_ext(&parser p, span sp, &ast::path path, - vec[@ast::expr] args, option::t[str] body) -> - ast::expr_ { - assert (vec::len(path.node.idents) > 0u); - auto extname = path.node.idents.(0); - alt (p.get_syntax_expanders().find(extname)) { - case (none) { p.fatal("unknown syntax expander: '" + extname + "'"); } - case (some(ex::normal(?ext))) { - auto ext_cx = ex::mk_ctxt(p.get_sess()); - ret ast::expr_ext(path, args, body, ext(ext_cx, sp, args, body)); - } - // because we have expansion inside parsing, new macros are only - // visible further down the file - case (some(ex::macro_defining(?ext))) { - auto ext_cx = ex::mk_ctxt(p.get_sess()); - auto name_and_extension = ext(ext_cx, sp, args, body); - p.get_syntax_expanders().insert(name_and_extension._0, - name_and_extension._1); - ret ast::expr_tup(vec::empty[ast::elt]()); - } - } + ret mk_expr(p, lo, hi, ast::expr_ext(pth, es.node, none)); } fn parse_self_method(&parser p) -> @ast::expr { @@ -1579,7 +1549,7 @@ fn stmt_ends_with_semi(&ast::stmt stmt) -> bool { case (ast::expr_field(_, _)) { true } case (ast::expr_index(_, _)) { true } case (ast::expr_path(_)) { true } - case (ast::expr_ext(_, _, _, _)) { true } + case (ast::expr_ext(_, _, _)) { true } case (ast::expr_fail(_)) { true } case (ast::expr_break) { true } case (ast::expr_cont) { true } @@ -1603,10 +1573,15 @@ fn stmt_ends_with_semi(&ast::stmt stmt) -> bool { } fn parse_block(&parser p) -> ast::block { + expect(p, token::LBRACE); + be parse_block_tail(p); +} + +// some blocks start with "#{"... +fn parse_block_tail(&parser p) -> ast::block { auto lo = p.get_lo_pos(); let vec[@ast::stmt] stmts = []; let option::t[@ast::expr] expr = none; - expect(p, token::LBRACE); while (p.peek() != token::RBRACE) { alt (p.peek()) { case (token::SEMI) { @@ -2094,8 +2069,10 @@ fn parse_outer_attrs_or_ext(&parser p) -> attr_or_ext { if (p.peek() == token::LBRACKET) { auto first_attr = parse_attribute_naked(p, ast::attr_outer, lo); ret some(left([first_attr] + parse_outer_attributes(p))); - } else { + } else if (! (p.peek() == token::LT || p.peek() == token::LBRACKET)) { ret some(right(parse_syntax_ext_naked(p, lo))); + } else { + ret none; } } else { ret none; @@ -2320,8 +2297,7 @@ fn parse_native_view(&parser p) -> vec[@ast::view_item] { } fn parse_crate_from_source_file(&str input, &ast::crate_cfg cfg, - &codemap::codemap cm) -> @ast::crate { - auto sess = @rec(cm=cm, mutable next_id=0); + parse_sess sess) -> @ast::crate { auto p = new_parser(sess, cfg, input, 0u); auto lo = p.get_lo_pos(); auto crate_attrs = parse_inner_attrs_and_next(p); @@ -2431,8 +2407,7 @@ fn parse_crate_directives(&parser p, token::token term, } fn parse_crate_from_crate_file(&str input, &ast::crate_cfg cfg, - &codemap::codemap cm) -> @ast::crate { - auto sess = @rec(cm=cm, mutable next_id=0); + parse_sess sess) -> @ast::crate { auto p = new_parser(sess, cfg, input, 0u); auto lo = p.get_lo_pos(); auto prefix = std::fs::dirname(p.get_filemap().name); diff --git a/src/comp/syntax/parse/token.rs b/src/comp/syntax/parse/token.rs index 2ce5d9e6cb4f9..6ba8c6ce525a3 100644 --- a/src/comp/syntax/parse/token.rs +++ b/src/comp/syntax/parse/token.rs @@ -60,6 +60,8 @@ tag token { LBRACE; RBRACE; POUND; + POUND_LBRACE; + POUND_LT; /* Literals */ LIT_INT(int); @@ -110,11 +112,8 @@ fn to_str(lexer::reader r, token t) -> str { case (ANDAND) { ret "&&"; } case (BINOP(?op)) { ret binop_to_str(op); } case (BINOPEQ(?op)) { ret binop_to_str(op) + "="; } - case ( /* Structural symbols */ - AT) { - ret "@"; - } + case (AT) { ret "@"; } case (DOT) { ret "."; } case (COMMA) { ret ","; } case (SEMI) { ret ";"; } @@ -133,11 +132,10 @@ fn to_str(lexer::reader r, token t) -> str { case (LBRACE) { ret "{"; } case (RBRACE) { ret "}"; } case (POUND) { ret "#"; } - case ( + case (POUND_LBRACE) { ret "#{"; } + case (POUND_LT) { ret "#<"; } /* Literals */ - LIT_INT(?i)) { - ret int::to_str(i, 10u); - } + case (LIT_INT(?i)) { ret int::to_str(i, 10u); } case (LIT_UINT(?u)) { ret uint::to_str(u, 10u); } case (LIT_MACH_INT(?tm, ?i)) { ret int::to_str(i, 10u) + "_" + ty_mach_to_str(tm); @@ -147,25 +145,19 @@ fn to_str(lexer::reader r, token t) -> str { ty_mach_to_str(tm); } case (LIT_FLOAT(?s)) { ret interner::get[str](*r.get_interner(), s); } - case (LIT_STR(?s)) { - // FIXME: escape. - + case (LIT_STR(?s)) { // FIXME: escape. ret "\"" + interner::get[str](*r.get_interner(), s) + "\""; } case (LIT_CHAR(?c)) { // FIXME: escape. - auto tmp = "'"; str::push_char(tmp, c); str::push_byte(tmp, '\'' as u8); ret tmp; } case (LIT_BOOL(?b)) { if (b) { ret "true"; } else { ret "false"; } } - case ( /* Name components */ - IDENT(?s, _)) { - ret interner::get[str](*r.get_interner(), s); - } + case (IDENT(?s, _)) { ret interner::get[str](*r.get_interner(), s); } case (IDX(?i)) { ret "_" + int::to_str(i, 10u); } case (UNDERSCORE) { ret "_"; } case (BRACEQUOTE(_)) { ret ""; } diff --git a/src/comp/syntax/print/pprust.rs b/src/comp/syntax/print/pprust.rs index c3557a9573b07..fb32ea4d2d577 100644 --- a/src/comp/syntax/print/pprust.rs +++ b/src/comp/syntax/print/pprust.rs @@ -924,7 +924,7 @@ fn print_expr(&ps s, &@ast::expr expr) { print_expr(s, expr); pclose(s); } - case (ast::expr_ext(?path, ?args, ?body, _)) { + case (ast::expr_ext(?path, ?args, ?body)) { word(s.s, "#"); print_path(s, path); if (vec::len(args) > 0u) { diff --git a/src/comp/syntax/visit.rs b/src/comp/syntax/visit.rs index 96dccf28b129a..0be69a91abd84 100644 --- a/src/comp/syntax/visit.rs +++ b/src/comp/syntax/visit.rs @@ -355,8 +355,10 @@ fn visit_expr[E](&@expr ex, &E e, &vt[E] v) { case (expr_path(?p)) { for (@ty tp in p.node.types) { vt(v).visit_ty(tp, e, v); } } - case (expr_ext(_, _, _, ?expansion)) { - vt(v).visit_expr(expansion, e, v); + case (expr_ext(_, ?args, _)) { + for(@ast::expr arg in args) { + vt(v).visit_expr(arg, e, v); + } } case (expr_fail(?eo)) { visit_expr_opt(eo, e, v); @@ -390,6 +392,12 @@ fn visit_expr[E](&@expr ex, &E e, &vt[E] v) { m.node.id, e, v); } } + case (expr_embeded_type(?ty)) { + vt(v).visit_ty(ty, e, v); + } + case (expr_embeded_block(?blk)) { + vt(v).visit_block(blk, e, v); + } } } diff --git a/src/comp/syntax/walk.rs b/src/comp/syntax/walk.rs index 2b74f81f9b319..c3515700e79ec 100644 --- a/src/comp/syntax/walk.rs +++ b/src/comp/syntax/walk.rs @@ -364,10 +364,10 @@ fn walk_expr(&ast_visitor v, @ast::expr e) { case (ast::expr_path(?p)) { for (@ast::ty tp in p.node.types) { walk_ty(v, tp); } } - case (ast::expr_ext(_, ?args, ?body, ?expansion)) { - // Only walk expansion, not args/body. - - walk_expr(v, expansion); + case (ast::expr_ext(_, ?args, _)) { + for (@ast::expr e in args) { + walk_expr(v, e); + } } case (ast::expr_fail(?eo)) { walk_expr_opt(v, eo); } case (ast::expr_break) { } @@ -407,6 +407,12 @@ fn walk_expr(&ast_visitor v, @ast::expr e) { v.visit_method_post(m); } } + case (ast::expr_embeded_type(?ty)) { + walk_ty(v, ty); + } + case (ast::expr_embeded_block(?blk)) { + walk_block(v, blk); + } } v.visit_expr_post(e); } diff --git a/src/test/compile-fail/macro-2.rs b/src/test/compile-fail/macro-2.rs index c2a706228c931..a48413af65697 100644 --- a/src/test/compile-fail/macro-2.rs +++ b/src/test/compile-fail/macro-2.rs @@ -1,6 +1,6 @@ //error-pattern:expanded as an identifier fn main() { - #macro("mylambda", x, body, {fn f(int x) -> int {ret body}; f}); + #macro([#mylambda(x, body), {fn f(int x) -> int {ret body}; f}]); assert(#mylambda(y*1, y*2)(8) == 16); } \ No newline at end of file diff --git a/src/test/compile-fail/macro.rs b/src/test/compile-fail/macro.rs index b69ad18cda00a..4ce021a9d746a 100644 --- a/src/test/compile-fail/macro.rs +++ b/src/test/compile-fail/macro.rs @@ -1,7 +1,7 @@ -//error-pattern:expects 0 arguments, got 16 +//error-pattern:no clauses match fn main() { - #macro("trivial", 1*2*4*2*1); + #macro([#trivial(), 1*2*4*2*1]); assert(#trivial(1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16) == 16); } diff --git a/src/test/run-pass/macro-2.rs b/src/test/run-pass/macro-2.rs index a75d67074552e..7d07b19a08519 100644 --- a/src/test/run-pass/macro-2.rs +++ b/src/test/run-pass/macro-2.rs @@ -1,5 +1,5 @@ fn main() { - #macro("mylambda", x, body, {fn f(int x) -> int {ret body}; f}); + #macro([#mylambda(x,body), {fn f(int x) -> int { ret body }; f}]); assert(#mylambda(y,y*2)(8) == 16); } \ No newline at end of file diff --git a/src/test/run-pass/macro-3.rs b/src/test/run-pass/macro-3.rs index aa9169ed126ba..6b48a9a1bcb98 100644 --- a/src/test/run-pass/macro-3.rs +++ b/src/test/run-pass/macro-3.rs @@ -1,5 +1,5 @@ fn main() { - #macro("trivial", 1*2*4*2*1); + #macro([#trivial(), 1*2*4*2*1]); assert(#trivial() == 16); } diff --git a/src/test/run-pass/macro.rs b/src/test/run-pass/macro.rs index 77ff122e7c6b2..3618228b1914f 100644 --- a/src/test/run-pass/macro.rs +++ b/src/test/run-pass/macro.rs @@ -1,4 +1,4 @@ fn main() { - #macro("m1", a, a*4); + #macro([#m1(a), a*4]); assert (#m1(2) == 8); } \ No newline at end of file