diff --git a/src/libfuzzer/fuzzer.rs b/src/libfuzzer/fuzzer.rs index a4968382cf478..86e45179cb033 100644 --- a/src/libfuzzer/fuzzer.rs +++ b/src/libfuzzer/fuzzer.rs @@ -225,7 +225,7 @@ fn as_str(f: fn@(+x: io::Writer)) -> ~str { io::with_str_writer(f) } -fn check_variants_of_ast(crate: ast::crate, codemap: codemap::CodeMap, +fn check_variants_of_ast(crate: ast::crate, codemap: @codemap::CodeMap, filename: &Path, cx: context) { let stolen = steal(crate, cx.mode); let extra_exprs = vec::filter(common_exprs(), @@ -239,7 +239,7 @@ fn check_variants_of_ast(crate: ast::crate, codemap: codemap::CodeMap, fn check_variants_T( crate: ast::crate, - codemap: codemap::CodeMap, + codemap: @codemap::CodeMap, filename: &Path, thing_label: ~str, things: ~[T], diff --git a/src/librustc/driver/driver.rs b/src/librustc/driver/driver.rs index 934a02d6dd3e3..4879ae30699a1 100644 --- a/src/librustc/driver/driver.rs +++ b/src/librustc/driver/driver.rs @@ -354,7 +354,7 @@ fn pretty_print_input(sess: Session, cfg: ast::crate_cfg, input: input, ppm_expanded | ppm_normal => pprust::no_ann() }; let is_expanded = upto != cu_parse; - let src = codemap::get_filemap(sess.codemap, source_name(input)).src; + let src = sess.codemap.get_filemap(source_name(input)).src; do io::with_str_reader(*src) |rdr| { pprust::print_crate(sess.codemap, sess.parse_sess.interner, sess.span_diagnostic, crate, @@ -574,7 +574,7 @@ fn build_session_options(binary: ~str, fn build_session(sopts: @session::options, demitter: diagnostic::emitter) -> Session { - let codemap = codemap::new_codemap(); + let codemap = @codemap::CodeMap::new(); let diagnostic_handler = diagnostic::mk_handler(Some(demitter)); let span_diagnostic_handler = @@ -583,7 +583,7 @@ fn build_session(sopts: @session::options, } fn build_session_(sopts: @session::options, - cm: codemap::CodeMap, + cm: @codemap::CodeMap, demitter: diagnostic::emitter, span_diagnostic_handler: diagnostic::span_handler) -> Session { diff --git a/src/librustc/driver/session.rs b/src/librustc/driver/session.rs index ed73bcb6d7259..d2a277e82fbd5 100644 --- a/src/librustc/driver/session.rs +++ b/src/librustc/driver/session.rs @@ -131,7 +131,7 @@ type Session_ = {targ_cfg: @config, opts: @options, cstore: metadata::cstore::CStore, parse_sess: parse_sess, - codemap: codemap::CodeMap, + codemap: @codemap::CodeMap, // For a library crate, this is always none mut main_fn: Option<(node_id, codemap::span)>, span_diagnostic: diagnostic::span_handler, diff --git a/src/librustc/metadata/encoder.rs b/src/librustc/metadata/encoder.rs index 07fc637a04933..5aed12dba9594 100644 --- a/src/librustc/metadata/encoder.rs +++ b/src/librustc/metadata/encoder.rs @@ -557,7 +557,7 @@ fn encode_info_for_item(ecx: @encode_ctxt, ebml_w: ebml::Serializer, let add_to_index = |copy ebml_w| add_to_index_(item, ebml_w, index); debug!("encoding info for item at %s", - syntax::codemap::span_to_str(item.span, ecx.tcx.sess.codemap)); + ecx.tcx.sess.codemap.span_to_str(item.span)); match item.node { item_const(_, _) => { diff --git a/src/librustc/middle/liveness.rs b/src/librustc/middle/liveness.rs index ec7c2ffc6f1da..ed9549cedd7e6 100644 --- a/src/librustc/middle/liveness.rs +++ b/src/librustc/middle/liveness.rs @@ -97,7 +97,7 @@ use std::map::HashMap; use syntax::{visit, ast_util}; use syntax::print::pprust::{expr_to_str, block_to_str}; use visit::vt; -use syntax::codemap::{span, span_to_str}; +use syntax::codemap::span; use syntax::ast::*; use io::WriterUtil; use capture::{cap_move, cap_drop, cap_copy, cap_ref}; @@ -170,9 +170,9 @@ impl LiveNodeKind : cmp::Eq { fn live_node_kind_to_str(lnk: LiveNodeKind, cx: ty::ctxt) -> ~str { let cm = cx.sess.codemap; match lnk { - FreeVarNode(s) => fmt!("Free var node [%s]", span_to_str(s, cm)), - ExprNode(s) => fmt!("Expr node [%s]", span_to_str(s, cm)), - VarDefNode(s) => fmt!("Var def node [%s]", span_to_str(s, cm)), + FreeVarNode(s) => fmt!("Free var node [%s]", cm.span_to_str(s)), + ExprNode(s) => fmt!("Expr node [%s]", cm.span_to_str(s)), + VarDefNode(s) => fmt!("Var def node [%s]", cm.span_to_str(s)), ExitNode => ~"Exit node" } } diff --git a/src/librustc/middle/trans/base.rs b/src/librustc/middle/trans/base.rs index f509c15ab5f8d..4941885cafb54 100644 --- a/src/librustc/middle/trans/base.rs +++ b/src/librustc/middle/trans/base.rs @@ -919,7 +919,7 @@ fn trans_trace(bcx: block, sp_opt: Option, trace_str: ~str) { let {V_filename, V_line} = match sp_opt { Some(sp) => { let sess = bcx.sess(); - let loc = codemap::lookup_char_pos(sess.parse_sess.cm, sp.lo); + let loc = sess.parse_sess.cm.lookup_char_pos(sp.lo); {V_filename: C_cstr(bcx.ccx(), loc.file.name), V_line: loc.line as int} } diff --git a/src/librustc/middle/trans/build.rs b/src/librustc/middle/trans/build.rs index f7690b7bc9300..f980990517495 100644 --- a/src/librustc/middle/trans/build.rs +++ b/src/librustc/middle/trans/build.rs @@ -645,7 +645,7 @@ fn _UndefReturn(cx: block, Fn: ValueRef) -> ValueRef { fn add_span_comment(bcx: block, sp: span, text: ~str) { let ccx = bcx.ccx(); if !ccx.sess.no_asm_comments() { - let s = text + ~" (" + codemap::span_to_str(sp, ccx.sess.codemap) + let s = text + ~" (" + ccx.sess.codemap.span_to_str(sp) + ~")"; log(debug, s); add_comment(bcx, s); diff --git a/src/librustc/middle/trans/controlflow.rs b/src/librustc/middle/trans/controlflow.rs index 59a733433bf22..0c09b02bb07b1 100644 --- a/src/librustc/middle/trans/controlflow.rs +++ b/src/librustc/middle/trans/controlflow.rs @@ -339,7 +339,7 @@ fn trans_fail_value(bcx: block, sp_opt: Option, V_fail_str: ValueRef) let {V_filename, V_line} = match sp_opt { Some(sp) => { let sess = bcx.sess(); - let loc = codemap::lookup_char_pos(sess.parse_sess.cm, sp.lo); + let loc = sess.parse_sess.cm.lookup_char_pos(sp.lo); {V_filename: C_cstr(bcx.ccx(), loc.file.name), V_line: loc.line as int} } @@ -361,7 +361,7 @@ fn trans_fail_bounds_check(bcx: block, sp: span, let _icx = bcx.insn_ctxt("trans_fail_bounds_check"); let ccx = bcx.ccx(); - let loc = codemap::lookup_char_pos(bcx.sess().parse_sess.cm, sp.lo); + let loc = bcx.sess().parse_sess.cm.lookup_char_pos(sp.lo); let line = C_int(ccx, loc.line as int); let filename_cstr = C_cstr(bcx.ccx(), loc.file.name); let filename = PointerCast(bcx, filename_cstr, T_ptr(T_i8())); diff --git a/src/librustc/middle/trans/debuginfo.rs b/src/librustc/middle/trans/debuginfo.rs index fd18aaaf58ec6..d4d1c8d3b2ecd 100644 --- a/src/librustc/middle/trans/debuginfo.rs +++ b/src/librustc/middle/trans/debuginfo.rs @@ -8,7 +8,7 @@ use trans::build::B; use middle::ty; use syntax::{ast, codemap, ast_util, ast_map}; use syntax::parse::token::ident_interner; -use codemap::span; +use codemap::{span, CharPos}; use ast::Ty; use pat_util::*; use util::ppaux::ty_to_str; @@ -112,7 +112,7 @@ type compile_unit_md = {name: ~str}; type subprogram_md = {id: ast::node_id}; type local_var_md = {id: ast::node_id}; type tydesc_md = {hash: uint}; -type block_md = {start: codemap::loc, end: codemap::loc}; +type block_md = {start: codemap::Loc, end: codemap::Loc}; type argument_md = {id: ast::node_id}; type retval_md = {id: ast::node_id}; @@ -229,8 +229,8 @@ fn create_file(cx: @crate_ctxt, full_path: ~str) -> @metadata { return mdval; } -fn line_from_span(cm: codemap::CodeMap, sp: span) -> uint { - codemap::lookup_char_pos(cm, sp.lo).line +fn line_from_span(cm: @codemap::CodeMap, sp: span) -> uint { + cm.lookup_char_pos(sp.lo).line } fn create_block(cx: block) -> @metadata { @@ -244,9 +244,9 @@ fn create_block(cx: block) -> @metadata { } let sp = cx.node_info.get().span; - let start = codemap::lookup_char_pos(cx.sess().codemap, sp.lo); + let start = cx.sess().codemap.lookup_char_pos(sp.lo); let fname = start.file.name; - let end = codemap::lookup_char_pos(cx.sess().codemap, sp.hi); + let end = cx.sess().codemap.lookup_char_pos(sp.hi); let tg = LexicalBlockTag; /*alt cached_metadata::<@metadata>( cache, tg, @@ -266,8 +266,8 @@ fn create_block(cx: block) -> @metadata { }; let lldata = ~[lltag(tg), parent, - lli32(start.line as int), - lli32(start.col as int), + lli32(start.line.to_int()), + lli32(start.col.to_int()), file_node.node, lli32(unique_id) ]; @@ -597,7 +597,7 @@ fn create_ty(_cx: @crate_ctxt, _t: ty::t, _ty: @ast::Ty) } fn filename_from_span(cx: @crate_ctxt, sp: codemap::span) -> ~str { - codemap::lookup_char_pos(cx.sess.codemap, sp.lo).file.name + cx.sess.codemap.lookup_char_pos(sp.lo).file.name } fn create_var(type_tag: int, context: ValueRef, name: ~str, file: ValueRef, @@ -629,8 +629,7 @@ fn create_local_var(bcx: block, local: @ast::local) // FIXME this should be handled (#2533) _ => fail ~"no single variable name for local" }; - let loc = codemap::lookup_char_pos(cx.sess.codemap, - local.span.lo); + let loc = cx.sess.codemap.lookup_char_pos(local.span.lo); let ty = node_id_type(bcx, local.node.id); let tymd = create_ty(cx, ty, local.node.ty); let filemd = create_file(cx, loc.file.name); @@ -674,8 +673,7 @@ fn create_arg(bcx: block, arg: ast::arg, sp: span) option::None => () } - let loc = codemap::lookup_char_pos(cx.sess.codemap, - sp.lo); + let loc = cx.sess.codemap.lookup_char_pos(sp.lo); let ty = node_id_type(bcx, arg.id); let tymd = create_ty(cx, ty, arg.ty); let filemd = create_file(cx, loc.file.name); @@ -714,9 +712,9 @@ fn update_source_pos(cx: block, s: span) { } let cm = cx.sess().codemap; let blockmd = create_block(cx); - let loc = codemap::lookup_char_pos(cm, s.lo); - let scopedata = ~[lli32(loc.line as int), - lli32(loc.col as int), + let loc = cm.lookup_char_pos(s.lo); + let scopedata = ~[lli32(loc.line.to_int()), + lli32(loc.col.to_int()), blockmd.node, llnull()]; let dbgscope = llmdnode(scopedata); @@ -731,7 +729,7 @@ fn create_function(fcx: fn_ctxt) -> @metadata { log(debug, fcx.id); let sp = fcx.span.get(); - log(debug, codemap::span_to_str(sp, cx.sess.codemap)); + log(debug, cx.sess.codemap.span_to_str(sp)); let (ident, ret_ty, id) = match cx.tcx.items.get(fcx.id) { ast_map::node_item(item, _) => { @@ -773,8 +771,7 @@ fn create_function(fcx: fn_ctxt) -> @metadata { option::None => () } - let loc = codemap::lookup_char_pos(cx.sess.codemap, - sp.lo); + let loc = cx.sess.codemap.lookup_char_pos(sp.lo); let file_node = create_file(cx, loc.file.name).node; let ty_node = if cx.sess.opts.extra_debuginfo { match ret_ty.node { diff --git a/src/librustc/middle/typeck/infer/region_inference.rs b/src/librustc/middle/typeck/infer/region_inference.rs index bdc764a8d0b09..652d99779d312 100644 --- a/src/librustc/middle/typeck/infer/region_inference.rs +++ b/src/librustc/middle/typeck/infer/region_inference.rs @@ -507,7 +507,7 @@ impl RegionVarBindings { self.undo_log.push(AddVar(vid)); } debug!("created new region variable %? with span %?", - vid, codemap::span_to_str(span, self.tcx.sess.codemap)); + vid, self.tcx.sess.codemap.span_to_str(span)); return vid; } diff --git a/src/librustc/rustc.rs b/src/librustc/rustc.rs index 8b832b9ab9f87..789c7ca7073a8 100644 --- a/src/librustc/rustc.rs +++ b/src/librustc/rustc.rs @@ -193,7 +193,7 @@ fn monitor(+f: fn~(diagnostic::emitter)) { // The 'diagnostics emitter'. Every error, warning, etc. should // go through this function. - let demitter = fn@(cmsp: Option<(codemap::CodeMap, codemap::span)>, + let demitter = fn@(cmsp: Option<(@codemap::CodeMap, codemap::span)>, msg: &str, lvl: diagnostic::level) { if lvl == diagnostic::fatal { comm::send(ch, fatal); diff --git a/src/librustc/util/ppaux.rs b/src/librustc/util/ppaux.rs index a9b67a7ff29c9..198b26c4ecc69 100644 --- a/src/librustc/util/ppaux.rs +++ b/src/librustc/util/ppaux.rs @@ -105,8 +105,9 @@ fn explain_region_and_span(cx: ctxt, region: ty::Region) fn explain_span(cx: ctxt, heading: ~str, span: span) -> (~str, Option) { - let lo = codemap::lookup_char_pos_adj(cx.sess.codemap, span.lo); - (fmt!("the %s at %u:%u", heading, lo.line, lo.col), Some(span)) + let lo = cx.sess.codemap.lookup_char_pos_adj(span.lo); + (fmt!("the %s at %u:%u", heading, + lo.line, lo.col.to_uint()), Some(span)) } } @@ -131,17 +132,17 @@ fn re_scope_id_to_str(cx: ctxt, node_id: ast::node_id) -> ~str { match cx.items.find(node_id) { Some(ast_map::node_block(blk)) => { fmt!("", - codemap::span_to_str(blk.span, cx.sess.codemap)) + cx.sess.codemap.span_to_str(blk.span)) } Some(ast_map::node_expr(expr)) => { match expr.node { ast::expr_call(*) => { fmt!("", - codemap::span_to_str(expr.span, cx.sess.codemap)) + cx.sess.codemap.span_to_str(expr.span)) } ast::expr_match(*) => { fmt!("", - codemap::span_to_str(expr.span, cx.sess.codemap)) + cx.sess.codemap.span_to_str(expr.span)) } ast::expr_assign_op(*) | ast::expr_field(*) | @@ -149,11 +150,11 @@ fn re_scope_id_to_str(cx: ctxt, node_id: ast::node_id) -> ~str { ast::expr_binary(*) | ast::expr_index(*) => { fmt!("", - codemap::span_to_str(expr.span, cx.sess.codemap)) + cx.sess.codemap.span_to_str(expr.span)) } _ => { fmt!("", - codemap::span_to_str(expr.span, cx.sess.codemap)) + cx.sess.codemap.span_to_str(expr.span)) } } } diff --git a/src/librustdoc/astsrv.rs b/src/librustdoc/astsrv.rs index 7b2c6fe5f0cbc..cb97d38b20854 100644 --- a/src/librustdoc/astsrv.rs +++ b/src/librustdoc/astsrv.rs @@ -120,7 +120,7 @@ fn build_ctxt(sess: Session, fn build_session() -> Session { let sopts: @options = basic_options(); - let codemap = codemap::new_codemap(); + let codemap = @codemap::CodeMap::new(); let error_handlers = build_error_handlers(codemap); let {emitter, span_handler} = error_handlers; @@ -137,7 +137,7 @@ type ErrorHandlers = { // Build a custom error handler that will allow us to ignore non-fatal // errors fn build_error_handlers( - codemap: codemap::CodeMap + codemap: @codemap::CodeMap ) -> ErrorHandlers { type DiagnosticHandler = { @@ -156,13 +156,13 @@ fn build_error_handlers( fn note(msg: &str) { self.inner.note(msg) } fn bug(msg: &str) -> ! { self.inner.bug(msg) } fn unimpl(msg: &str) -> ! { self.inner.unimpl(msg) } - fn emit(cmsp: Option<(codemap::CodeMap, codemap::span)>, + fn emit(cmsp: Option<(@codemap::CodeMap, codemap::span)>, msg: &str, lvl: diagnostic::level) { self.inner.emit(cmsp, msg, lvl) } } - let emitter = fn@(cmsp: Option<(codemap::CodeMap, codemap::span)>, + let emitter = fn@(cmsp: Option<(@codemap::CodeMap, codemap::span)>, msg: &str, lvl: diagnostic::level) { diagnostic::emit(cmsp, msg, lvl); }; diff --git a/src/librustdoc/attr_parser.rs b/src/librustdoc/attr_parser.rs index 2b16112fe16c7..4e8b11d2ca6fa 100644 --- a/src/librustdoc/attr_parser.rs +++ b/src/librustdoc/attr_parser.rs @@ -30,7 +30,7 @@ mod test { let parse_sess = syntax::parse::new_parse_sess(None); let parser = parse::new_parser_from_source_str( - parse_sess, ~[], ~"-", codemap::fss_none, @source); + parse_sess, ~[], ~"-", codemap::FssNone, @source); parser.parse_outer_attributes() } diff --git a/src/libsyntax/ast.rs b/src/libsyntax/ast.rs index 7452e41fac337..4455db0882695 100644 --- a/src/libsyntax/ast.rs +++ b/src/libsyntax/ast.rs @@ -4,7 +4,7 @@ use std::serialization::{Serializable, Deserializable, Serializer, Deserializer}; -use codemap::{span, filename}; +use codemap::{span, FileName}; use parse::token; #[auto_serialize] diff --git a/src/libsyntax/ast_util.rs b/src/libsyntax/ast_util.rs index d3b879da7dd95..73a1c4b7530a0 100644 --- a/src/libsyntax/ast_util.rs +++ b/src/libsyntax/ast_util.rs @@ -1,7 +1,7 @@ -use codemap::span; +use codemap::{span, BytePos}; use ast::*; -pure fn spanned(lo: uint, hi: uint, +t: T) -> spanned { +pure fn spanned(+lo: BytePos, +hi: BytePos, +t: T) -> spanned { respan(mk_sp(lo, hi), move t) } @@ -14,12 +14,12 @@ pure fn dummy_spanned(+t: T) -> spanned { } /* assuming that we're not in macro expansion */ -pure fn mk_sp(lo: uint, hi: uint) -> span { - {lo: lo, hi: hi, expn_info: None} +pure fn mk_sp(+lo: BytePos, +hi: BytePos) -> span { + span {lo: lo, hi: hi, expn_info: None} } // make this a const, once the compiler supports it -pure fn dummy_sp() -> span { return mk_sp(0u, 0u); } +pure fn dummy_sp() -> span { return mk_sp(BytePos(0), BytePos(0)); } diff --git a/src/libsyntax/attr.rs b/src/libsyntax/attr.rs index 8c19814350ca7..da80e26b1afe9 100644 --- a/src/libsyntax/attr.rs +++ b/src/libsyntax/attr.rs @@ -6,6 +6,7 @@ use either::Either; use diagnostic::span_handler; use ast_util::{spanned, dummy_spanned}; use parse::comments::{doc_comment_style, strip_doc_comment_decoration}; +use codemap::BytePos; // Constructors export mk_name_value_item_str; @@ -74,7 +75,8 @@ fn mk_attr(item: @ast::meta_item) -> ast::attribute { is_sugared_doc: false}); } -fn mk_sugared_doc_attr(text: ~str, lo: uint, hi: uint) -> ast::attribute { +fn mk_sugared_doc_attr(text: ~str, + +lo: BytePos, +hi: BytePos) -> ast::attribute { let lit = spanned(lo, hi, ast::lit_str(@text)); let attr = { style: doc_comment_style(text), diff --git a/src/libsyntax/codemap.rs b/src/libsyntax/codemap.rs index 4278e1f199a62..d291d9545eb6a 100644 --- a/src/libsyntax/codemap.rs +++ b/src/libsyntax/codemap.rs @@ -1,179 +1,144 @@ +/*! + +The CodeMap tracks all the source code used within a single crate, mapping +from integer byte positions to the original source code location. Each bit of +source parsed during crate parsing (typically files, in-memory strings, or +various bits of macro expansion) cover a continuous range of bytes in the +CodeMap and are represented by FileMaps. Byte positions are stored in `spans` +and used pervasively in the compiler. They are absolute positions within the +CodeMap, which upon request can be converted to line and column information, +source code snippets, etc. + +*/ + use dvec::DVec; use std::serialization::{Serializable, Deserializable, Serializer, Deserializer}; -export filename; -export filemap; -export span; -export file_substr; -export fss_none; -export fss_internal; -export fss_external; -export CodeMap; -export expn_info; -export expn_info_; -export expanded_from; -export new_filemap; -export new_filemap_w_substr; -export mk_substr_filename; -export lookup_char_pos; -export lookup_char_pos_adj; -export adjust_span; -export span_to_str; -export span_to_filename; -export span_to_lines; -export file_lines; -export get_line; -export next_line; -export span_to_snippet; -export loc; -export get_filemap; -export new_codemap; - -type filename = ~str; - -type file_pos = {ch: uint, byte: uint}; - -impl file_pos : cmp::Eq { - pure fn eq(other: &file_pos) -> bool { - self.ch == (*other).ch && self.byte == (*other).byte - } - pure fn ne(other: &file_pos) -> bool { !self.eq(other) } +trait Pos { + static pure fn from_uint(n: uint) -> self; + pure fn to_uint(&self) -> uint; } -/* A codemap is a thing that maps uints to file/line/column positions - * in a crate. This to make it possible to represent the positions - * with single-word things, rather than passing records all over the - * compiler. - */ - -enum file_substr { - fss_none, - fss_internal(span), - fss_external({filename: ~str, line: uint, col: uint}) -} +/// A byte offset +pub enum BytePos = uint; +/// A character offset. Because of multibyte utf8 characters, a byte offset +/// is not equivalent to a character offset. The CodeMap will convert BytePos +/// values to CharPos values as necessary. +pub enum CharPos = uint; -type filemap = - @{name: filename, substr: file_substr, src: @~str, - start_pos: file_pos, mut lines: ~[file_pos]}; +// XXX: Lots of boilerplate in these impls, but so far my attempts to fix +// have been unsuccessful -type CodeMap = @{files: DVec}; - -type loc = {file: filemap, line: uint, col: uint}; - -fn new_codemap() -> CodeMap { @{files: DVec()} } - -fn new_filemap_w_substr(+filename: filename, +substr: file_substr, - src: @~str, - start_pos_ch: uint, start_pos_byte: uint) - -> filemap { - return @{name: filename, substr: substr, src: src, - start_pos: {ch: start_pos_ch, byte: start_pos_byte}, - mut lines: ~[{ch: start_pos_ch, byte: start_pos_byte}]}; +impl BytePos: Pos { + static pure fn from_uint(n: uint) -> BytePos { BytePos(n) } + pure fn to_uint(&self) -> uint { **self } } -fn new_filemap(+filename: filename, src: @~str, - start_pos_ch: uint, start_pos_byte: uint) - -> filemap { - return new_filemap_w_substr(filename, fss_none, src, - start_pos_ch, start_pos_byte); +impl BytePos: cmp::Eq { + pure fn eq(other: &BytePos) -> bool { + *self == **other + } + pure fn ne(other: &BytePos) -> bool { !self.eq(other) } } -fn mk_substr_filename(cm: CodeMap, sp: span) -> ~str -{ - let pos = lookup_char_pos(cm, sp.lo); - return fmt!("<%s:%u:%u>", pos.file.name, pos.line, pos.col); +impl BytePos: cmp::Ord { + pure fn lt(other: &BytePos) -> bool { *self < **other } + pure fn le(other: &BytePos) -> bool { *self <= **other } + pure fn ge(other: &BytePos) -> bool { *self >= **other } + pure fn gt(other: &BytePos) -> bool { *self > **other } } -fn next_line(file: filemap, chpos: uint, byte_pos: uint) { - file.lines.push({ch: chpos, byte: byte_pos + file.start_pos.byte}); +impl BytePos: Num { + pure fn add(other: &BytePos) -> BytePos { + BytePos(*self + **other) + } + pure fn sub(other: &BytePos) -> BytePos { + BytePos(*self - **other) + } + pure fn mul(other: &BytePos) -> BytePos { + BytePos(*self * (**other)) + } + pure fn div(other: &BytePos) -> BytePos { + BytePos(*self / **other) + } + pure fn modulo(other: &BytePos) -> BytePos { + BytePos(*self % **other) + } + pure fn neg() -> BytePos { + BytePos(-*self) + } + pure fn to_int() -> int { *self as int } + static pure fn from_int(+n: int) -> BytePos { BytePos(n as uint) } } -type lookup_fn = pure fn(file_pos) -> uint; - -fn lookup_line(map: CodeMap, pos: uint, lookup: lookup_fn) - -> {fm: filemap, line: uint} -{ - let len = map.files.len(); - let mut a = 0u; - let mut b = len; - while b - a > 1u { - let m = (a + b) / 2u; - if lookup(map.files[m].start_pos) > pos { b = m; } else { a = m; } - } - if (a >= len) { - fail fmt!("position %u does not resolve to a source location", pos) - } - let f = map.files[a]; - a = 0u; - b = vec::len(f.lines); - while b - a > 1u { - let m = (a + b) / 2u; - if lookup(f.lines[m]) > pos { b = m; } else { a = m; } - } - return {fm: f, line: a}; +impl BytePos: to_bytes::IterBytes { + pure fn iter_bytes(+lsb0: bool, f: to_bytes::Cb) { + (*self).iter_bytes(lsb0, f) + } } -fn lookup_pos(map: CodeMap, pos: uint, lookup: lookup_fn) -> loc { - let {fm: f, line: a} = lookup_line(map, pos, lookup); - return {file: f, line: a + 1u, col: pos - lookup(f.lines[a])}; +impl CharPos: Pos { + static pure fn from_uint(n: uint) -> CharPos { CharPos(n) } + pure fn to_uint(&self) -> uint { **self } } -fn lookup_char_pos(map: CodeMap, pos: uint) -> loc { - pure fn lookup(pos: file_pos) -> uint { return pos.ch; } - return lookup_pos(map, pos, lookup); +impl CharPos: cmp::Eq { + pure fn eq(other: &CharPos) -> bool { + *self == **other + } + pure fn ne(other: &CharPos) -> bool { !self.eq(other) } } -fn lookup_byte_pos(map: CodeMap, pos: uint) -> loc { - pure fn lookup(pos: file_pos) -> uint { return pos.byte; } - return lookup_pos(map, pos, lookup); +impl CharPos: cmp::Ord { + pure fn lt(other: &CharPos) -> bool { *self < **other } + pure fn le(other: &CharPos) -> bool { *self <= **other } + pure fn ge(other: &CharPos) -> bool { *self >= **other } + pure fn gt(other: &CharPos) -> bool { *self > **other } } -fn lookup_char_pos_adj(map: CodeMap, pos: uint) - -> {filename: ~str, line: uint, col: uint, file: Option} -{ - let loc = lookup_char_pos(map, pos); - match (loc.file.substr) { - fss_none => { - {filename: /* FIXME (#2543) */ copy loc.file.name, - line: loc.line, - col: loc.col, - file: Some(loc.file)} - } - fss_internal(sp) => { - lookup_char_pos_adj(map, sp.lo + (pos - loc.file.start_pos.ch)) - } - fss_external(eloc) => { - {filename: /* FIXME (#2543) */ copy eloc.filename, - line: eloc.line + loc.line - 1u, - col: if loc.line == 1u {eloc.col + loc.col} else {loc.col}, - file: None} - } +impl CharPos: Num { + pure fn add(other: &CharPos) -> CharPos { + CharPos(*self + **other) } + pure fn sub(other: &CharPos) -> CharPos { + CharPos(*self - **other) + } + pure fn mul(other: &CharPos) -> CharPos { + CharPos(*self * (**other)) + } + pure fn div(other: &CharPos) -> CharPos { + CharPos(*self / **other) + } + pure fn modulo(other: &CharPos) -> CharPos { + CharPos(*self % **other) + } + pure fn neg() -> CharPos { + CharPos(-*self) + } + pure fn to_int() -> int { *self as int } + static pure fn from_int(+n: int) -> CharPos { CharPos(n as uint) } } -fn adjust_span(map: CodeMap, sp: span) -> span { - pure fn lookup(pos: file_pos) -> uint { return pos.ch; } - let line = lookup_line(map, sp.lo, lookup); - match (line.fm.substr) { - fss_none => sp, - fss_internal(s) => { - adjust_span(map, {lo: s.lo + (sp.lo - line.fm.start_pos.ch), - hi: s.lo + (sp.hi - line.fm.start_pos.ch), - expn_info: sp.expn_info})} - fss_external(_) => sp +impl CharPos: to_bytes::IterBytes { + pure fn iter_bytes(+lsb0: bool, f: to_bytes::Cb) { + (*self).iter_bytes(lsb0, f) } } -enum expn_info_ { - expanded_from({call_site: span, - callie: {name: ~str, span: Option}}) +/** +Spans represent a region of code, used for error reporting. Positions in spans +are *absolute* positions from the beginning of the codemap, not positions +relative to FileMaps. Methods on the CodeMap can be used to relate spans back +to the original source. +*/ +pub struct span { + lo: BytePos, + hi: BytePos, + expn_info: Option<@ExpnInfo> } -type expn_info = Option<@expn_info_>; - -type span = {lo: uint, hi: uint, expn_info: expn_info}; impl span : cmp::Eq { pure fn eq(other: &span) -> bool { @@ -193,74 +158,308 @@ impl span: Deserializable { } } -fn span_to_str_no_adj(sp: span, cm: CodeMap) -> ~str { - let lo = lookup_char_pos(cm, sp.lo); - let hi = lookup_char_pos(cm, sp.hi); - return fmt!("%s:%u:%u: %u:%u", lo.file.name, - lo.line, lo.col, hi.line, hi.col) +/// A source code location used for error reporting +pub struct Loc { + /// Information about the original source + file: @FileMap, + /// The (1-based) line number + line: uint, + /// The (0-based) column offset + col: CharPos } -fn span_to_str(sp: span, cm: CodeMap) -> ~str { - let lo = lookup_char_pos_adj(cm, sp.lo); - let hi = lookup_char_pos_adj(cm, sp.hi); - return fmt!("%s:%u:%u: %u:%u", lo.filename, - lo.line, lo.col, hi.line, hi.col) +/// Extra information for tracking macro expansion of spans +pub enum ExpnInfo { + ExpandedFrom({call_site: span, + callie: {name: ~str, span: Option}}) } -type file_lines = {file: filemap, lines: ~[uint]}; +pub type FileName = ~str; -fn span_to_filename(sp: span, cm: codemap::CodeMap) -> filename { - let lo = lookup_char_pos(cm, sp.lo); - return /* FIXME (#2543) */ copy lo.file.name; +pub struct FileLines { + file: @FileMap, + lines: ~[uint] } -fn span_to_lines(sp: span, cm: codemap::CodeMap) -> @file_lines { - let lo = lookup_char_pos(cm, sp.lo); - let hi = lookup_char_pos(cm, sp.hi); - let mut lines = ~[]; - for uint::range(lo.line - 1u, hi.line as uint) |i| { - lines.push(i); - }; - return @{file: lo.file, lines: lines}; +pub enum FileSubstr { + pub FssNone, + pub FssInternal(span), + pub FssExternal({filename: ~str, line: uint, col: CharPos}) } -fn get_line(fm: filemap, line: int) -> ~str unsafe { - let begin: uint = fm.lines[line].byte - fm.start_pos.byte; - let end = match str::find_char_from(*fm.src, '\n', begin) { - Some(e) => e, - None => str::len(*fm.src) - }; - str::slice(*fm.src, begin, end) +/// Identifies an offset of a multi-byte character in a FileMap +pub struct MultiByteChar { + /// The absolute offset of the character in the CodeMap + pos: BytePos, + /// The number of bytes, >=2 + bytes: uint, } -fn lookup_byte_offset(cm: codemap::CodeMap, chpos: uint) - -> {fm: filemap, pos: uint} { - pure fn lookup(pos: file_pos) -> uint { return pos.ch; } - let {fm, line} = lookup_line(cm, chpos, lookup); - let line_offset = fm.lines[line].byte - fm.start_pos.byte; - let col = chpos - fm.lines[line].ch; - let col_offset = str::count_bytes(*fm.src, line_offset, col); - {fm: fm, pos: line_offset + col_offset} +/// A single source in the CodeMap +pub struct FileMap { + /// The name of the file that the source came from, source that doesn't + /// originate from files has names between angle brackets by convention, + /// e.g. `` + name: FileName, + /// Extra information used by qquote + substr: FileSubstr, + /// The complete source code + src: @~str, + /// The start position of this source in the CodeMap + start_pos: BytePos, + /// Locations of lines beginnings in the source code + mut lines: ~[BytePos], + /// Locations of multi-byte characters in the source code + multibyte_chars: DVec } -fn span_to_snippet(sp: span, cm: codemap::CodeMap) -> ~str { - let begin = lookup_byte_offset(cm, sp.lo); - let end = lookup_byte_offset(cm, sp.hi); - assert begin.fm.start_pos == end.fm.start_pos; - return str::slice(*begin.fm.src, begin.pos, end.pos); +pub impl FileMap { + fn next_line(&self, +pos: BytePos) { + self.lines.push(pos); + } + + pub fn get_line(&self, line: int) -> ~str unsafe { + let begin: BytePos = self.lines[line] - self.start_pos; + let begin = begin.to_uint(); + let end = match str::find_char_from(*self.src, '\n', begin) { + Some(e) => e, + None => str::len(*self.src) + }; + str::slice(*self.src, begin, end) + } + + pub fn record_multibyte_char(&self, pos: BytePos, bytes: uint) { + assert bytes >=2 && bytes <= 4; + let mbc = MultiByteChar { + pos: pos, + bytes: bytes, + }; + self.multibyte_chars.push(mbc); + } } -fn get_snippet(cm: codemap::CodeMap, fidx: uint, lo: uint, hi: uint) -> ~str -{ - let fm = cm.files[fidx]; - return str::slice(*fm.src, lo, hi) +pub struct CodeMap { + files: DVec<@FileMap> } -fn get_filemap(cm: CodeMap, filename: ~str) -> filemap { - for cm.files.each |fm| { if fm.name == filename { return *fm; } } - //XXjdm the following triggers a mismatched type bug - // (or expected function, found _|_) - fail; // ("asking for " + filename + " which we don't know about"); +pub impl CodeMap { + static pub fn new() -> CodeMap { + CodeMap { + files: DVec() + } + } + + /// Add a new FileMap to the CodeMap and return it + fn new_filemap(+filename: FileName, src: @~str) -> @FileMap { + return self.new_filemap_w_substr(filename, FssNone, src); + } + + fn new_filemap_w_substr(+filename: FileName, +substr: FileSubstr, + src: @~str) -> @FileMap { + let start_pos = if self.files.len() == 0 { + 0 + } else { + let last_start = self.files.last().start_pos.to_uint(); + let last_len = self.files.last().src.len(); + last_start + last_len + }; + + let filemap = @FileMap { + name: filename, substr: substr, src: src, + start_pos: BytePos(start_pos), + mut lines: ~[], + multibyte_chars: DVec() + }; + + self.files.push(filemap); + + return filemap; + } + + pub fn mk_substr_filename(&self, sp: span) -> ~str { + let pos = self.lookup_char_pos(sp.lo); + return fmt!("<%s:%u:%u>", pos.file.name, + pos.line, pos.col.to_uint()); + } + + /// Lookup source information about a BytePos + pub fn lookup_char_pos(&self, +pos: BytePos) -> Loc { + return self.lookup_pos(pos); + } + + pub fn lookup_char_pos_adj(&self, +pos: BytePos) + -> {filename: ~str, line: uint, col: CharPos, file: Option<@FileMap>} + { + let loc = self.lookup_char_pos(pos); + match (loc.file.substr) { + FssNone => { + {filename: /* FIXME (#2543) */ copy loc.file.name, + line: loc.line, + col: loc.col, + file: Some(loc.file)} + } + FssInternal(sp) => { + self.lookup_char_pos_adj( + sp.lo + (pos - loc.file.start_pos)) + } + FssExternal(eloc) => { + {filename: /* FIXME (#2543) */ copy eloc.filename, + line: eloc.line + loc.line - 1u, + col: if loc.line == 1u {eloc.col + loc.col} else {loc.col}, + file: None} + } + } + } + + pub fn adjust_span(&self, sp: span) -> span { + let line = self.lookup_line(sp.lo); + match (line.fm.substr) { + FssNone => sp, + FssInternal(s) => { + self.adjust_span(span { + lo: s.lo + (sp.lo - line.fm.start_pos), + hi: s.lo + (sp.hi - line.fm.start_pos), + expn_info: sp.expn_info + }) + } + FssExternal(_) => sp + } + } + + pub fn span_to_str(&self, sp: span) -> ~str { + let lo = self.lookup_char_pos_adj(sp.lo); + let hi = self.lookup_char_pos_adj(sp.hi); + return fmt!("%s:%u:%u: %u:%u", lo.filename, + lo.line, lo.col.to_uint(), hi.line, hi.col.to_uint()) + } + + pub fn span_to_filename(&self, sp: span) -> FileName { + let lo = self.lookup_char_pos(sp.lo); + return /* FIXME (#2543) */ copy lo.file.name; + } + + pub fn span_to_lines(&self, sp: span) -> @FileLines { + let lo = self.lookup_char_pos(sp.lo); + let hi = self.lookup_char_pos(sp.hi); + let mut lines = ~[]; + for uint::range(lo.line - 1u, hi.line as uint) |i| { + lines.push(i); + }; + return @FileLines {file: lo.file, lines: lines}; + } + + pub fn span_to_snippet(&self, sp: span) -> ~str { + let begin = self.lookup_byte_offset(sp.lo); + let end = self.lookup_byte_offset(sp.hi); + assert begin.fm.start_pos == end.fm.start_pos; + return str::slice(*begin.fm.src, + begin.pos.to_uint(), end.pos.to_uint()); + } + + pub fn get_filemap(&self, filename: ~str) -> @FileMap { + for self.files.each |fm| { if fm.name == filename { return *fm; } } + //XXjdm the following triggers a mismatched type bug + // (or expected function, found _|_) + fail; // ("asking for " + filename + " which we don't know about"); + } + +} + +priv impl CodeMap { + + fn lookup_filemap_idx(&self, +pos: BytePos) -> uint { + let len = self.files.len(); + let mut a = 0u; + let mut b = len; + while b - a > 1u { + let m = (a + b) / 2u; + if self.files[m].start_pos > pos { + b = m; + } else { + a = m; + } + } + if (a >= len) { + fail fmt!("position %u does not resolve to a source location", + pos.to_uint()) + } + + return a; + } + + fn lookup_line(&self, +pos: BytePos) + -> {fm: @FileMap, line: uint} + { + let idx = self.lookup_filemap_idx(pos); + let f = self.files[idx]; + let mut a = 0u; + let mut b = vec::len(f.lines); + while b - a > 1u { + let m = (a + b) / 2u; + if f.lines[m] > pos { b = m; } else { a = m; } + } + return {fm: f, line: a}; + } + + fn lookup_pos(&self, +pos: BytePos) -> Loc { + let {fm: f, line: a} = self.lookup_line(pos); + let line = a + 1u; // Line numbers start at 1 + let chpos = self.bytepos_to_local_charpos(pos); + let linebpos = f.lines[a]; + let linechpos = self.bytepos_to_local_charpos(linebpos); + debug!("codemap: byte pos %? is on the line at byte pos %?", + pos, linebpos); + debug!("codemap: char pos %? is on the line at char pos %?", + chpos, linechpos); + debug!("codemap: byte is on line: %?", line); + assert chpos >= linechpos; + return Loc { + file: f, + line: line, + col: chpos - linechpos + }; + } + + fn span_to_str_no_adj(&self, sp: span) -> ~str { + let lo = self.lookup_char_pos(sp.lo); + let hi = self.lookup_char_pos(sp.hi); + return fmt!("%s:%u:%u: %u:%u", lo.file.name, + lo.line, lo.col.to_uint(), hi.line, hi.col.to_uint()) + } + + fn lookup_byte_offset(&self, +bpos: BytePos) + -> {fm: @FileMap, pos: BytePos} { + let idx = self.lookup_filemap_idx(bpos); + let fm = self.files[idx]; + let offset = bpos - fm.start_pos; + return {fm: fm, pos: offset}; + } + + // Converts an absolute BytePos to a CharPos relative to the file it is + // located in + fn bytepos_to_local_charpos(&self, +bpos: BytePos) -> CharPos { + debug!("codemap: converting %? to char pos", bpos); + let idx = self.lookup_filemap_idx(bpos); + let map = self.files[idx]; + + // The number of extra bytes due to multibyte chars in the FileMap + let mut total_extra_bytes = 0; + + for map.multibyte_chars.each |mbc| { + debug!("codemap: %?-byte char at %?", mbc.bytes, mbc.pos); + if mbc.pos < bpos { + total_extra_bytes += mbc.bytes; + // We should never see a byte position in the middle of a + // character + assert bpos == mbc.pos + || bpos.to_uint() >= mbc.pos.to_uint() + mbc.bytes; + } else { + break; + } + } + + CharPos(bpos.to_uint() - total_extra_bytes) + } } // diff --git a/src/libsyntax/diagnostic.rs b/src/libsyntax/diagnostic.rs index 855b0ca3ef568..007100856ebc4 100644 --- a/src/libsyntax/diagnostic.rs +++ b/src/libsyntax/diagnostic.rs @@ -9,7 +9,7 @@ export codemap_span_handler, codemap_handler; export ice_msg; export expect; -type emitter = fn@(cmsp: Option<(codemap::CodeMap, span)>, +type emitter = fn@(cmsp: Option<(@codemap::CodeMap, span)>, msg: &str, lvl: level); @@ -33,7 +33,7 @@ trait handler { fn note(msg: &str); fn bug(msg: &str) -> !; fn unimpl(msg: &str) -> !; - fn emit(cmsp: Option<(codemap::CodeMap, span)>, msg: &str, lvl: level); + fn emit(cmsp: Option<(@codemap::CodeMap, span)>, msg: &str, lvl: level); } type handler_t = @{ @@ -43,7 +43,7 @@ type handler_t = @{ type codemap_t = @{ handler: handler, - cm: codemap::CodeMap + cm: @codemap::CodeMap }; impl codemap_t: span_handler { @@ -107,7 +107,7 @@ impl handler_t: handler { self.fatal(ice_msg(msg)); } fn unimpl(msg: &str) -> ! { self.bug(~"unimplemented " + msg); } - fn emit(cmsp: Option<(codemap::CodeMap, span)>, msg: &str, lvl: level) { + fn emit(cmsp: Option<(@codemap::CodeMap, span)>, msg: &str, lvl: level) { self.emit(cmsp, msg, lvl); } } @@ -116,7 +116,7 @@ fn ice_msg(msg: &str) -> ~str { fmt!("internal compiler error: %s", msg) } -fn mk_span_handler(handler: handler, cm: codemap::CodeMap) -> span_handler { +fn mk_span_handler(handler: handler, cm: @codemap::CodeMap) -> span_handler { @{ handler: handler, cm: cm } as span_handler } @@ -125,7 +125,7 @@ fn mk_handler(emitter: Option) -> handler { let emit = match emitter { Some(e) => e, None => { - let f = fn@(cmsp: Option<(codemap::CodeMap, span)>, + let f = fn@(cmsp: Option<(@codemap::CodeMap, span)>, msg: &str, t: level) { emit(cmsp, msg, t); }; @@ -189,12 +189,12 @@ fn print_diagnostic(topic: ~str, lvl: level, msg: &str) { io::stderr().write_str(fmt!(" %s\n", msg)); } -fn emit(cmsp: Option<(codemap::CodeMap, span)>, msg: &str, lvl: level) { +fn emit(cmsp: Option<(@codemap::CodeMap, span)>, msg: &str, lvl: level) { match cmsp { Some((cm, sp)) => { - let sp = codemap::adjust_span(cm,sp); - let ss = codemap::span_to_str(sp, cm); - let lines = codemap::span_to_lines(sp, cm); + let sp = cm.adjust_span(sp); + let ss = cm.span_to_str(sp); + let lines = cm.span_to_lines(sp); print_diagnostic(ss, lvl, msg); highlight_lines(cm, sp, lines); print_macro_backtrace(cm, sp); @@ -205,8 +205,8 @@ fn emit(cmsp: Option<(codemap::CodeMap, span)>, msg: &str, lvl: level) { } } -fn highlight_lines(cm: codemap::CodeMap, sp: span, - lines: @codemap::file_lines) { +fn highlight_lines(cm: @codemap::CodeMap, sp: span, + lines: @codemap::FileLines) { let fm = lines.file; @@ -221,7 +221,7 @@ fn highlight_lines(cm: codemap::CodeMap, sp: span, // Print the offending lines for display_lines.each |line| { io::stderr().write_str(fmt!("%s:%u ", fm.name, *line + 1u)); - let s = codemap::get_line(fm, *line as int) + ~"\n"; + let s = fm.get_line(*line as int) + ~"\n"; io::stderr().write_str(s); } if elided { @@ -237,7 +237,7 @@ fn highlight_lines(cm: codemap::CodeMap, sp: span, // If there's one line at fault we can easily point to the problem if vec::len(lines.lines) == 1u { - let lo = codemap::lookup_char_pos(cm, sp.lo); + let lo = cm.lookup_char_pos(sp.lo); let mut digits = 0u; let mut num = (lines.lines[0] + 1u) / 10u; @@ -245,28 +245,28 @@ fn highlight_lines(cm: codemap::CodeMap, sp: span, while num > 0u { num /= 10u; digits += 1u; } // indent past |name:## | and the 0-offset column location - let mut left = str::len(fm.name) + digits + lo.col + 3u; + let mut left = str::len(fm.name) + digits + lo.col.to_uint() + 3u; let mut s = ~""; while left > 0u { str::push_char(&mut s, ' '); left -= 1u; } s += ~"^"; - let hi = codemap::lookup_char_pos(cm, sp.hi); + let hi = cm.lookup_char_pos(sp.hi); if hi.col != lo.col { // the ^ already takes up one space - let mut width = hi.col - lo.col - 1u; + let mut width = hi.col.to_uint() - lo.col.to_uint() - 1u; while width > 0u { str::push_char(&mut s, '~'); width -= 1u; } } io::stderr().write_str(s + ~"\n"); } } -fn print_macro_backtrace(cm: codemap::CodeMap, sp: span) { +fn print_macro_backtrace(cm: @codemap::CodeMap, sp: span) { do option::iter(&sp.expn_info) |ei| { let ss = option::map_default(&ei.callie.span, @~"", - |span| @codemap::span_to_str(*span, cm)); + |span| @cm.span_to_str(*span)); print_diagnostic(*ss, note, fmt!("in expansion of %s!", ei.callie.name)); - let ss = codemap::span_to_str(ei.call_site, cm); + let ss = cm.span_to_str(ei.call_site); print_diagnostic(ss, note, ~"expansion site"); print_macro_backtrace(cm, ei.call_site); } diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs index ddf58ce0fef1c..66ef3bec2a747 100644 --- a/src/libsyntax/ext/base.rs +++ b/src/libsyntax/ext/base.rs @@ -1,7 +1,8 @@ use std::map::HashMap; use parse::parser; use diagnostic::span_handler; -use codemap::{CodeMap, span, expn_info, expanded_from}; +use codemap::{CodeMap, span, ExpnInfo, ExpandedFrom}; +use ast_util::dummy_sp; // obsolete old-style #macro code: // @@ -124,15 +125,15 @@ fn syntax_expander_table() -> HashMap<~str, syntax_extension> { // when a macro expansion occurs, the resulting nodes have the backtrace() // -> expn_info of their expansion context stored into their span. trait ext_ctxt { - fn codemap() -> CodeMap; + fn codemap() -> @CodeMap; fn parse_sess() -> parse::parse_sess; fn cfg() -> ast::crate_cfg; fn print_backtrace(); - fn backtrace() -> expn_info; + fn backtrace() -> Option<@ExpnInfo>; fn mod_push(mod_name: ast::ident); fn mod_pop(); fn mod_path() -> ~[ast::ident]; - fn bt_push(ei: codemap::expn_info_); + fn bt_push(ei: codemap::ExpnInfo); fn bt_pop(); fn span_fatal(sp: span, msg: &str) -> !; fn span_err(sp: span, msg: &str); @@ -152,32 +153,34 @@ fn mk_ctxt(parse_sess: parse::parse_sess, cfg: ast::crate_cfg) -> ext_ctxt { type ctxt_repr = {parse_sess: parse::parse_sess, cfg: ast::crate_cfg, - mut backtrace: expn_info, + mut backtrace: Option<@ExpnInfo>, mut mod_path: ~[ast::ident], mut trace_mac: bool}; impl ctxt_repr: ext_ctxt { - fn codemap() -> CodeMap { self.parse_sess.cm } + fn codemap() -> @CodeMap { self.parse_sess.cm } fn parse_sess() -> parse::parse_sess { self.parse_sess } fn cfg() -> ast::crate_cfg { self.cfg } fn print_backtrace() { } - fn backtrace() -> expn_info { self.backtrace } + fn backtrace() -> Option<@ExpnInfo> { self.backtrace } fn mod_push(i: ast::ident) { self.mod_path.push(i); } fn mod_pop() { self.mod_path.pop(); } fn mod_path() -> ~[ast::ident] { return self.mod_path; } - fn bt_push(ei: codemap::expn_info_) { + fn bt_push(ei: codemap::ExpnInfo) { match ei { - expanded_from({call_site: cs, callie: callie}) => { + ExpandedFrom({call_site: cs, callie: callie}) => { self.backtrace = - Some(@expanded_from({ - call_site: {lo: cs.lo, hi: cs.hi, - expn_info: self.backtrace}, + Some(@ExpandedFrom({ + call_site: span {lo: cs.lo, hi: cs.hi, + expn_info: self.backtrace}, callie: callie})); } } } fn bt_pop() { match self.backtrace { - Some(@expanded_from({call_site: {expn_info: prev, _}, _})) => { + Some(@ExpandedFrom({ + call_site: span {expn_info: prev, _}, _ + })) => { self.backtrace = prev } _ => self.bug(~"tried to pop without a push") @@ -311,7 +314,7 @@ fn tt_args_to_original_flavor(cx: ext_ctxt, sp: span, arg: ~[ast::token_tree]) // these spans won't matter, anyways fn ms(m: matcher_) -> matcher { - {node: m, span: {lo: 0u, hi: 0u, expn_info: None}} + {node: m, span: dummy_sp()} } let arg_nm = cx.parse_sess().interner.gensym(@~"arg"); diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs index 22e2cfcde6b51..69d067f1ddb0a 100644 --- a/src/libsyntax/ext/expand.rs +++ b/src/libsyntax/ext/expand.rs @@ -8,7 +8,7 @@ use ext::qquote::{qq_helper}; use parse::{parser, parse_expr_from_source_str, new_parser_from_tt}; -use codemap::{span, expanded_from}; +use codemap::{span, ExpandedFrom}; fn expand_expr(exts: HashMap<~str, syntax_extension>, cx: ext_ctxt, e: expr_, s: span, fld: ast_fold, @@ -41,7 +41,7 @@ fn expand_expr(exts: HashMap<~str, syntax_extension>, cx: ext_ctxt, Some(normal({expander: exp, span: exp_sp})) => { let expanded = exp(cx, mac.span, args, body); - cx.bt_push(expanded_from({call_site: s, + cx.bt_push(ExpandedFrom({call_site: s, callie: {name: *extname, span: exp_sp}})); //keep going, outside-in let fully_expanded = fld.fold_expr(expanded).node; @@ -86,7 +86,7 @@ fn expand_expr(exts: HashMap<~str, syntax_extension>, cx: ext_ctxt, *extname)) }; - cx.bt_push(expanded_from({call_site: s, + cx.bt_push(ExpandedFrom({call_site: s, callie: {name: *extname, span: exp_sp}})); //keep going, outside-in let fully_expanded = fld.fold_expr(expanded).node; @@ -100,7 +100,7 @@ fn expand_expr(exts: HashMap<~str, syntax_extension>, cx: ext_ctxt, tts); let expanded = exp(cx, mac.span, arg, None); - cx.bt_push(expanded_from({call_site: s, + cx.bt_push(ExpandedFrom({call_site: s, callie: {name: *extname, span: exp_sp}})); //keep going, outside-in let fully_expanded = fld.fold_expr(expanded).node; @@ -206,7 +206,7 @@ fn expand_item_mac(exts: HashMap<~str, syntax_extension>, } Some(item_tt(expand)) => { let expanded = expand.expander(cx, it.span, it.ident, tts); - cx.bt_push(expanded_from({call_site: it.span, + cx.bt_push(ExpandedFrom({call_site: it.span, callie: {name: *extname, span: expand.span}})); let maybe_it = match expanded { @@ -232,7 +232,7 @@ fn expand_item_mac(exts: HashMap<~str, syntax_extension>, fn new_span(cx: ext_ctxt, sp: span) -> span { /* this discards information in the case of macro-defining macros */ - return {lo: sp.lo, hi: sp.hi, expn_info: cx.backtrace()}; + return span {lo: sp.lo, hi: sp.hi, expn_info: cx.backtrace()}; } // FIXME (#2247): this is a terrible kludge to inject some macros into diff --git a/src/libsyntax/ext/pipes/ast_builder.rs b/src/libsyntax/ext/pipes/ast_builder.rs index f03adb90f0bcf..652ad5533c4c7 100644 --- a/src/libsyntax/ext/pipes/ast_builder.rs +++ b/src/libsyntax/ext/pipes/ast_builder.rs @@ -4,7 +4,7 @@ // something smarter. use ast::{ident, node_id}; -use ast_util::{ident_to_path, respan}; +use ast_util::{ident_to_path, respan, dummy_sp}; use codemap::span; use ext::base::mk_ctxt; @@ -23,10 +23,6 @@ fn path(ids: ~[ident], span: span) -> @ast::path { types: ~[]} } -fn empty_span() -> span { - {lo: 0, hi: 0, expn_info: None} -} - trait append_types { fn add_ty(ty: @ast::Ty) -> @ast::path; fn add_tys(+tys: ~[@ast::Ty]) -> @ast::path; @@ -83,26 +79,21 @@ trait ext_ctxt_ast_builder { fn stmt_let(ident: ident, e: @ast::expr) -> @ast::stmt; fn stmt_expr(e: @ast::expr) -> @ast::stmt; fn block_expr(b: ast::blk) -> @ast::expr; - fn empty_span() -> span; fn ty_option(ty: @ast::Ty) -> @ast::Ty; } impl ext_ctxt: ext_ctxt_ast_builder { fn ty_option(ty: @ast::Ty) -> @ast::Ty { self.ty_path_ast_builder(path(~[self.ident_of(~"Option")], - self.empty_span()) + dummy_sp()) .add_ty(ty)) } - fn empty_span() -> span { - {lo: 0, hi: 0, expn_info: self.backtrace()} - } - fn block_expr(b: ast::blk) -> @ast::expr { @{id: self.next_id(), callee_id: self.next_id(), node: ast::expr_block(b), - span: self.empty_span()} + span: dummy_sp()} } fn move_expr(e: @ast::expr) -> @ast::expr { @@ -114,7 +105,7 @@ impl ext_ctxt: ext_ctxt_ast_builder { fn stmt_expr(e: @ast::expr) -> @ast::stmt { @{node: ast::stmt_expr(e, self.next_id()), - span: self.empty_span()} + span: dummy_sp()} } fn stmt_let(ident: ident, e: @ast::expr) -> @ast::stmt { @@ -130,43 +121,43 @@ impl ext_ctxt: ext_ctxt_ast_builder { pat: @{id: self.next_id(), node: ast::pat_ident(ast::bind_by_implicit_ref, path(~[ident], - self.empty_span()), + dummy_sp()), None), - span: self.empty_span()}, + span: dummy_sp()}, init: Some(self.move_expr(e)), id: self.next_id()}, - span: self.empty_span()}]), - span: self.empty_span()}, self.next_id()), - span: self.empty_span()} + span: dummy_sp()}]), + span: dummy_sp()}, self.next_id()), + span: dummy_sp()} } fn field_imm(name: ident, e: @ast::expr) -> ast::field { {node: {mutbl: ast::m_imm, ident: name, expr: e}, - span: self.empty_span()} + span: dummy_sp()} } fn rec(+fields: ~[ast::field]) -> @ast::expr { @{id: self.next_id(), callee_id: self.next_id(), node: ast::expr_rec(fields, None), - span: self.empty_span()} + span: dummy_sp()} } fn ty_field_imm(name: ident, ty: @ast::Ty) -> ast::ty_field { {node: {ident: name, mt: { ty: ty, mutbl: ast::m_imm } }, - span: self.empty_span()} + span: dummy_sp()} } fn ty_rec(+fields: ~[ast::ty_field]) -> @ast::Ty { @{id: self.next_id(), node: ast::ty_rec(fields), - span: self.empty_span()} + span: dummy_sp()} } fn ty_infer() -> @ast::Ty { @{id: self.next_id(), node: ast::ty_infer, - span: self.empty_span()} + span: dummy_sp()} } fn ty_param(id: ast::ident, +bounds: ~[ast::ty_param_bound]) @@ -181,9 +172,9 @@ impl ext_ctxt: ext_ctxt_ast_builder { pat: @{id: self.next_id(), node: ast::pat_ident( ast::bind_by_value, - ast_util::ident_to_path(self.empty_span(), name), + ast_util::ident_to_path(dummy_sp(), name), None), - span: self.empty_span()}, + span: dummy_sp()}, id: self.next_id()} } @@ -195,7 +186,7 @@ impl ext_ctxt: ext_ctxt_ast_builder { rules: ast::default_blk}; {node: blk, - span: self.empty_span()} + span: dummy_sp()} } fn expr_block(e: @ast::expr) -> ast::blk { @@ -215,11 +206,11 @@ impl ext_ctxt: ext_ctxt_ast_builder { // XXX: Would be nice if our generated code didn't violate // Rust coding conventions - let non_camel_case_attribute = respan(self.empty_span(), { + let non_camel_case_attribute = respan(dummy_sp(), { style: ast::attr_outer, - value: respan(self.empty_span(), + value: respan(dummy_sp(), ast::meta_list(~"allow", ~[ - @respan(self.empty_span(), + @respan(dummy_sp(), ast::meta_word(~"non_camel_case_types")) ])), is_sugared_doc: false @@ -239,7 +230,7 @@ impl ext_ctxt: ext_ctxt_ast_builder { +ty_params: ~[ast::ty_param], +body: ast::blk) -> @ast::item { self.item(name, - self.empty_span(), + dummy_sp(), ast::item_fn(self.fn_decl(inputs, output), ast::impure_fn, ty_params, @@ -298,7 +289,7 @@ impl ext_ctxt: ext_ctxt_ast_builder { fn ty_nil_ast_builder() -> @ast::Ty { @{id: self.next_id(), node: ast::ty_nil, - span: self.empty_span()} + span: dummy_sp()} } fn item_ty_poly(name: ident, @@ -314,6 +305,6 @@ impl ext_ctxt: ext_ctxt_ast_builder { fn ty_vars(+ty_params: ~[ast::ty_param]) -> ~[@ast::Ty] { ty_params.map(|p| self.ty_path_ast_builder( - path(~[p.ident], self.empty_span()))) + path(~[p.ident], dummy_sp()))) } } diff --git a/src/libsyntax/ext/pipes/check.rs b/src/libsyntax/ext/pipes/check.rs index fcc0c84a4ff39..a90b679f6974c 100644 --- a/src/libsyntax/ext/pipes/check.rs +++ b/src/libsyntax/ext/pipes/check.rs @@ -22,7 +22,6 @@ that. use ext::base::ext_ctxt; use proto::{state, protocol, next_state}; -use ast_builder::empty_span; impl ext_ctxt: proto::visitor<(), (), ()> { fn visit_proto(_proto: protocol, diff --git a/src/libsyntax/ext/pipes/liveness.rs b/src/libsyntax/ext/pipes/liveness.rs index a9bfd87ab0eb3..e86b3f0ea59e4 100644 --- a/src/libsyntax/ext/pipes/liveness.rs +++ b/src/libsyntax/ext/pipes/liveness.rs @@ -29,8 +29,6 @@ updating the states using rule (2) until there are no changes. use std::bitv::{Bitv}; -use ast_builder::empty_span; - fn analyze(proto: protocol, _cx: ext_ctxt) { debug!("initializing colive analysis"); let num_states = proto.num_states(); diff --git a/src/libsyntax/ext/pipes/pipec.rs b/src/libsyntax/ext/pipes/pipec.rs index 7e1cbe9ad0dbf..d03a0fde66c97 100644 --- a/src/libsyntax/ext/pipes/pipec.rs +++ b/src/libsyntax/ext/pipes/pipec.rs @@ -5,6 +5,7 @@ use to_str::ToStr; use dvec::DVec; use ast::ident; +use ast_util::dummy_sp; use util::interner; use print::pprust; use pprust::{item_to_str, ty_to_str}; @@ -12,7 +13,7 @@ use ext::base::{mk_ctxt, ext_ctxt}; use parse::*; use proto::*; -use ast_builder::{append_types, path, empty_span}; +use ast_builder::{append_types, path}; // Transitional reexports so qquote can find the paths it is looking for mod syntax { @@ -256,11 +257,11 @@ impl state: to_type_decls { cx.ty_path_ast_builder( path(~[cx.ident_of(~"pipes"), cx.ident_of(dir.to_str() + ~"Packet")], - empty_span()) + dummy_sp()) .add_ty(cx.ty_path_ast_builder( path(~[cx.ident_of(self.proto.name), self.data_name()], - empty_span()) + dummy_sp()) .add_tys(cx.ty_vars(self.ty_params))))), self.ty_params)); } @@ -273,11 +274,11 @@ impl state: to_type_decls { path(~[cx.ident_of(~"pipes"), cx.ident_of(dir.to_str() + ~"PacketBuffered")], - empty_span()) + dummy_sp()) .add_tys(~[cx.ty_path_ast_builder( path(~[cx.ident_of(self.proto.name), self.data_name()], - empty_span()) + dummy_sp()) .add_tys(cx.ty_vars(self.ty_params))), self.proto.buffer_ty_path(cx)])), self.ty_params)); @@ -394,7 +395,7 @@ impl protocol: gen_init { cx.item_ty_poly( cx.ident_of(~"__Buffer"), - cx.empty_span(), + dummy_sp(), cx.ty_rec(fields), params) } diff --git a/src/libsyntax/ext/qquote.rs b/src/libsyntax/ext/qquote.rs index af7ffaa73f5bf..888932e58e713 100644 --- a/src/libsyntax/ext/qquote.rs +++ b/src/libsyntax/ext/qquote.rs @@ -4,6 +4,7 @@ use parse::parser; use parse::parser::{Parser, parse_from_source_str}; use dvec::DVec; use parse::token::ident_interner; +use codemap::{CharPos, BytePos}; use fold::*; use visit::*; @@ -15,13 +16,13 @@ use io::*; use codemap::span; struct gather_item { - lo: uint, - hi: uint, + lo: BytePos, + hi: BytePos, e: @ast::expr, constr: ~str } -type aq_ctxt = @{lo: uint, gather: DVec}; +type aq_ctxt = @{lo: BytePos, gather: DVec}; enum fragment { from_expr(@ast::expr), from_ty(@ast::Ty) @@ -114,7 +115,7 @@ impl @ast::pat: qq_helper { fn get_fold_fn() -> ~str {~"fold_pat"} } -fn gather_anti_quotes(lo: uint, node: N) -> aq_ctxt +fn gather_anti_quotes(lo: BytePos, node: N) -> aq_ctxt { let v = @{visit_expr: |node, &&cx, v| visit_aq(node, ~"from_expr", cx, v), visit_ty: |node, &&cx, v| visit_aq(node, ~"from_ty", cx, v), @@ -204,13 +205,13 @@ fn finish -> @ast::expr { let cm = ecx.codemap(); - let str = @codemap::span_to_snippet(body.span, cm); + let str = @cm.span_to_snippet(body.span); debug!("qquote--str==%?", str); - let fname = codemap::mk_substr_filename(cm, body.span); + let fname = cm.mk_substr_filename(body.span); let node = parse_from_source_str - (f, fname, codemap::fss_internal(body.span), str, + (f, fname, codemap::FssInternal(body.span), str, ecx.cfg(), ecx.parse_sess()); - let loc = codemap::lookup_char_pos(cm, body.span.lo); + let loc = cm.lookup_char_pos(body.span.lo); let sp = node.span(); let qcx = gather_anti_quotes(sp.lo, node); @@ -226,7 +227,8 @@ fn finish let mut str2 = ~""; enum state {active, skip(uint), blank}; let mut state = active; - let mut i = 0u, j = 0u; + let mut i = BytePos(0u); + let mut j = 0u; let g_len = cx.gather.len(); for str::chars_each(*str) |ch| { if (j < g_len && i == cx.gather[j].lo) { @@ -242,7 +244,7 @@ fn finish blank if is_space(ch) => str::push_char(&mut str2, ch), blank => str::push_char(&mut str2, ' ') } - i += 1u; + i += BytePos(1u); if (j < g_len && i == cx.gather[j].hi) { assert ch == ')'; state = active; @@ -270,7 +272,7 @@ fn finish ~"qquote", ~"mk_file_substr"]), ~[mk_uniq_str(cx,sp, loc.file.name), mk_uint(cx,sp, loc.line), - mk_uint(cx,sp, loc.col)]), + mk_uint(cx,sp, loc.col.to_uint())]), mk_unary(cx,sp, ast::box(ast::m_imm), mk_uniq_str(cx,sp, str2)), cfg_call(), @@ -345,8 +347,8 @@ fn replace_ty(repls: ~[fragment], } fn mk_file_substr(fname: ~str, line: uint, col: uint) -> - codemap::file_substr { - codemap::fss_external({filename: fname, line: line, col: col}) + codemap::FileSubstr { + codemap::FssExternal({filename: fname, line: line, col: CharPos(col)}) } // Local Variables: diff --git a/src/libsyntax/ext/simplext.rs b/src/libsyntax/ext/simplext.rs index bec29c9a83540..df7674264ca11 100644 --- a/src/libsyntax/ext/simplext.rs +++ b/src/libsyntax/ext/simplext.rs @@ -177,7 +177,7 @@ fn transcribe(cx: ext_ctxt, b: bindings, body: @expr) -> @expr { fn new_id(_old: node_id, cx: ext_ctxt) -> node_id { return cx.next_id(); } fn new_span(cx: ext_ctxt, sp: span) -> span { /* this discards information in the case of macro-defining macros */ - return {lo: sp.lo, hi: sp.hi, expn_info: cx.backtrace()}; + return span {lo: sp.lo, hi: sp.hi, expn_info: cx.backtrace()}; } let afp = default_ast_fold(); let f_pre = diff --git a/src/libsyntax/ext/source_util.rs b/src/libsyntax/ext/source_util.rs index 9b41d90e6d045..93cfaadcbd378 100644 --- a/src/libsyntax/ext/source_util.rs +++ b/src/libsyntax/ext/source_util.rs @@ -1,5 +1,5 @@ use base::*; -use codemap::span; +use codemap::{span, Loc, FileMap}; use print::pprust; use build::{mk_base_vec_e,mk_uint,mk_u8,mk_uniq_str}; @@ -16,7 +16,7 @@ export expand_include_bin; fn expand_line(cx: ext_ctxt, sp: span, arg: ast::mac_arg, _body: ast::mac_body) -> @ast::expr { get_mac_args(cx, sp, arg, 0u, option::Some(0u), ~"line"); - let loc = codemap::lookup_char_pos(cx.codemap(), sp.lo); + let loc = cx.codemap().lookup_char_pos(sp.lo); return mk_uint(cx, sp, loc.line); } @@ -24,8 +24,8 @@ fn expand_line(cx: ext_ctxt, sp: span, arg: ast::mac_arg, fn expand_col(cx: ext_ctxt, sp: span, arg: ast::mac_arg, _body: ast::mac_body) -> @ast::expr { get_mac_args(cx, sp, arg, 0u, option::Some(0u), ~"col"); - let loc = codemap::lookup_char_pos(cx.codemap(), sp.lo); - return mk_uint(cx, sp, loc.col); + let loc = cx.codemap().lookup_char_pos(sp.lo); + return mk_uint(cx, sp, loc.col.to_uint()); } /* file!(): expands to the current filename */ @@ -34,8 +34,8 @@ fn expand_col(cx: ext_ctxt, sp: span, arg: ast::mac_arg, fn expand_file(cx: ext_ctxt, sp: span, arg: ast::mac_arg, _body: ast::mac_body) -> @ast::expr { get_mac_args(cx, sp, arg, 0u, option::Some(0u), ~"file"); - let { file: @{ name: filename, _ }, _ } = - codemap::lookup_char_pos(cx.codemap(), sp.lo); + let Loc { file: @FileMap { name: filename, _ }, _ } = + cx.codemap().lookup_char_pos(sp.lo); return mk_uniq_str(cx, sp, filename); } @@ -58,10 +58,12 @@ fn expand_include(cx: ext_ctxt, sp: span, arg: ast::mac_arg, _body: ast::mac_body) -> @ast::expr { let args = get_mac_args(cx, sp, arg, 1u, option::Some(1u), ~"include"); let file = expr_to_str(cx, args[0], ~"#include_str requires a string"); - let p = parse::new_parser_from_file(cx.parse_sess(), cx.cfg(), - &res_rel_file(cx, sp, &Path(file)), - parse::parser::SOURCE_FILE); - return p.parse_expr(); + let p = parse::new_parser_from_file( + cx.parse_sess(), cx.cfg(), + &res_rel_file(cx, sp, &Path(file)), + parse::parser::SOURCE_FILE); + let e = p.parse_expr(); + return e; } fn expand_include_str(cx: ext_ctxt, sp: codemap::span, arg: ast::mac_arg, @@ -103,7 +105,7 @@ fn expand_include_bin(cx: ext_ctxt, sp: codemap::span, arg: ast::mac_arg, fn res_rel_file(cx: ext_ctxt, sp: codemap::span, arg: &Path) -> Path { // NB: relative paths are resolved relative to the compilation unit if !arg.is_absolute { - let cu = Path(codemap::span_to_filename(sp, cx.codemap())); + let cu = Path(cx.codemap().span_to_filename(sp)); cu.dir_path().push_many(arg.components) } else { copy *arg diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs index 76814a688f5b7..6779ed263d5a8 100644 --- a/src/libsyntax/ext/tt/macro_parser.rs +++ b/src/libsyntax/ext/tt/macro_parser.rs @@ -11,6 +11,7 @@ use dvec::DVec; use ast::{matcher, match_tok, match_seq, match_nonterminal, ident}; use ast_util::mk_sp; use std::map::HashMap; +use codemap::BytePos; /* This is an Earley-like parser, without support for in-grammar nonterminals, only by calling out to the main rust parser for named nonterminals (which it @@ -102,7 +103,7 @@ type matcher_pos = ~{ mut up: matcher_pos_up, // mutable for swapping only matches: ~[DVec<@named_match>], match_lo: uint, match_hi: uint, - sp_lo: uint, + sp_lo: BytePos, }; fn copy_up(&& mpu: matcher_pos_up) -> matcher_pos { @@ -122,7 +123,7 @@ fn count_names(ms: &[matcher]) -> uint { } #[allow(non_implicitly_copyable_typarams)] -fn initial_matcher_pos(ms: ~[matcher], sep: Option, lo: uint) +fn initial_matcher_pos(ms: ~[matcher], sep: Option, lo: BytePos) -> matcher_pos { let mut match_idx_hi = 0u; for ms.each() |elt| { diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs index 31bc375a76d56..8bfd1c0a18d39 100644 --- a/src/libsyntax/ext/tt/macro_rules.rs +++ b/src/libsyntax/ext/tt/macro_rules.rs @@ -9,12 +9,13 @@ use macro_parser::{parse, parse_or_else, success, failure, named_match, matched_seq, matched_nonterminal, error}; use std::map::HashMap; use parse::token::special_idents; +use ast_util::dummy_sp; fn add_new_extension(cx: ext_ctxt, sp: span, name: ident, arg: ~[ast::token_tree]) -> base::mac_result { // these spans won't matter, anyways fn ms(m: matcher_) -> matcher { - {node: m, span: {lo: 0u, hi: 0u, expn_info: None}} + {node: m, span: dummy_sp()} } let lhs_nm = cx.parse_sess().interner.gensym(@~"lhs"); @@ -65,7 +66,7 @@ fn add_new_extension(cx: ext_ctxt, sp: span, name: ident, } // Which arm's failure should we report? (the one furthest along) - let mut best_fail_spot = {lo: 0u, hi: 0u, expn_info: None}; + let mut best_fail_spot = dummy_sp(); let mut best_fail_msg = ~"internal error: ran no matchers"; let s_d = cx.parse_sess().span_diagnostic; diff --git a/src/libsyntax/ext/tt/transcribe.rs b/src/libsyntax/ext/tt/transcribe.rs index 238f9db6ac537..78f0e4fc8f8bf 100644 --- a/src/libsyntax/ext/tt/transcribe.rs +++ b/src/libsyntax/ext/tt/transcribe.rs @@ -53,7 +53,7 @@ fn new_tt_reader(sp_diag: span_handler, itr: @ident_interner, mut repeat_len: ~[], /* dummy values, never read: */ mut cur_tok: EOF, - mut cur_span: ast_util::mk_sp(0u,0u) + mut cur_span: ast_util::dummy_sp() }; tt_next_token(r); /* get cur_tok and cur_span set up */ return r; diff --git a/src/libsyntax/parse.rs b/src/libsyntax/parse.rs index e38ee7ff03763..593ff6d034ee0 100644 --- a/src/libsyntax/parse.rs +++ b/src/libsyntax/parse.rs @@ -20,33 +20,31 @@ use util::interner; use diagnostic::{span_handler, mk_span_handler, mk_handler, emitter}; use lexer::{reader, string_reader}; use parse::token::{ident_interner, mk_ident_interner}; +use codemap::{CodeMap, FileMap, CharPos, BytePos}; type parse_sess = @{ - cm: codemap::CodeMap, + cm: @codemap::CodeMap, mut next_id: node_id, span_diagnostic: span_handler, interner: @ident_interner, - // these two must be kept up to date - mut chpos: uint, - mut byte_pos: uint }; fn new_parse_sess(demitter: Option) -> parse_sess { - let cm = codemap::new_codemap(); + let cm = @CodeMap::new(); return @{cm: cm, mut next_id: 1, span_diagnostic: mk_span_handler(mk_handler(demitter), cm), interner: mk_ident_interner(), - mut chpos: 0u, mut byte_pos: 0u}; + }; } -fn new_parse_sess_special_handler(sh: span_handler, cm: codemap::CodeMap) +fn new_parse_sess_special_handler(sh: span_handler, cm: @codemap::CodeMap) -> parse_sess { return @{cm: cm, mut next_id: 1, span_diagnostic: sh, interner: mk_ident_interner(), - mut chpos: 0u, mut byte_pos: 0u}; + }; } fn parse_crate_from_file(input: &Path, cfg: ast::crate_cfg, @@ -63,15 +61,13 @@ fn parse_crate_from_file(input: &Path, cfg: ast::crate_cfg, fn parse_crate_from_crate_file(input: &Path, cfg: ast::crate_cfg, sess: parse_sess) -> @ast::crate { - let (p, rdr) = new_parser_etc_from_file(sess, cfg, input, - parser::CRATE_FILE); + let p = new_parser_from_file(sess, cfg, input, + parser::CRATE_FILE); let lo = p.span.lo; let prefix = input.dir_path(); let leading_attrs = p.parse_inner_attrs_and_next(); let { inner: crate_attrs, next: first_cdir_attr } = leading_attrs; let cdirs = p.parse_crate_directives(token::EOF, first_cdir_attr); - sess.chpos = rdr.chpos; - sess.byte_pos = sess.byte_pos + rdr.pos; let cx = @{sess: sess, cfg: /* FIXME (#2543) */ copy p.cfg}; let companionmod = input.filestem().map(|s| Path(*s)); let (m, attrs) = eval::eval_crate_directives_to_mod( @@ -88,75 +84,63 @@ fn parse_crate_from_crate_file(input: &Path, cfg: ast::crate_cfg, fn parse_crate_from_source_file(input: &Path, cfg: ast::crate_cfg, sess: parse_sess) -> @ast::crate { - let (p, rdr) = new_parser_etc_from_file(sess, cfg, input, - parser::SOURCE_FILE); + let p = new_parser_from_file(sess, cfg, input, + parser::SOURCE_FILE); let r = p.parse_crate_mod(cfg); - sess.chpos = rdr.chpos; - sess.byte_pos = sess.byte_pos + rdr.pos; return r; } fn parse_crate_from_source_str(name: ~str, source: @~str, cfg: ast::crate_cfg, sess: parse_sess) -> @ast::crate { - let (p, rdr) = new_parser_etc_from_source_str(sess, cfg, name, - codemap::fss_none, source); + let p = new_parser_from_source_str(sess, cfg, name, + codemap::FssNone, source); let r = p.parse_crate_mod(cfg); p.abort_if_errors(); - sess.chpos = rdr.chpos; - sess.byte_pos = sess.byte_pos + rdr.pos; return r; } fn parse_expr_from_source_str(name: ~str, source: @~str, cfg: ast::crate_cfg, sess: parse_sess) -> @ast::expr { - let (p, rdr) = new_parser_etc_from_source_str(sess, cfg, name, - codemap::fss_none, source); + let p = new_parser_from_source_str(sess, cfg, name, + codemap::FssNone, source); let r = p.parse_expr(); p.abort_if_errors(); - sess.chpos = rdr.chpos; - sess.byte_pos = sess.byte_pos + rdr.pos; return r; } fn parse_item_from_source_str(name: ~str, source: @~str, cfg: ast::crate_cfg, +attrs: ~[ast::attribute], sess: parse_sess) -> Option<@ast::item> { - let (p, rdr) = new_parser_etc_from_source_str(sess, cfg, name, - codemap::fss_none, source); + let p = new_parser_from_source_str(sess, cfg, name, + codemap::FssNone, source); let r = p.parse_item(attrs); p.abort_if_errors(); - sess.chpos = rdr.chpos; - sess.byte_pos = sess.byte_pos + rdr.pos; return r; } fn parse_stmt_from_source_str(name: ~str, source: @~str, cfg: ast::crate_cfg, +attrs: ~[ast::attribute], sess: parse_sess) -> @ast::stmt { - let (p, rdr) = new_parser_etc_from_source_str(sess, cfg, name, - codemap::fss_none, source); + let p = new_parser_from_source_str(sess, cfg, name, + codemap::FssNone, source); let r = p.parse_stmt(attrs); p.abort_if_errors(); - sess.chpos = rdr.chpos; - sess.byte_pos = sess.byte_pos + rdr.pos; return r; } fn parse_from_source_str(f: fn (p: Parser) -> T, - name: ~str, ss: codemap::file_substr, + name: ~str, ss: codemap::FileSubstr, source: @~str, cfg: ast::crate_cfg, sess: parse_sess) -> T { - let (p, rdr) = new_parser_etc_from_source_str(sess, cfg, name, ss, - source); + let p = new_parser_from_source_str(sess, cfg, name, ss, + source); let r = f(p); if !p.reader.is_eof() { p.reader.fatal(~"expected end-of-string"); } p.abort_if_errors(); - sess.chpos = rdr.chpos; - sess.byte_pos = sess.byte_pos + rdr.pos; move r } @@ -168,47 +152,28 @@ fn next_node_id(sess: parse_sess) -> node_id { return rv; } -fn new_parser_etc_from_source_str(sess: parse_sess, cfg: ast::crate_cfg, - +name: ~str, +ss: codemap::file_substr, - source: @~str) -> (Parser, string_reader) { +fn new_parser_from_source_str(sess: parse_sess, cfg: ast::crate_cfg, + +name: ~str, +ss: codemap::FileSubstr, + source: @~str) -> Parser { let ftype = parser::SOURCE_FILE; - let filemap = codemap::new_filemap_w_substr - (name, ss, source, sess.chpos, sess.byte_pos); - sess.cm.files.push(filemap); + let filemap = sess.cm.new_filemap_w_substr(name, ss, source); let srdr = lexer::new_string_reader(sess.span_diagnostic, filemap, sess.interner); - return (Parser(sess, cfg, srdr as reader, ftype), srdr); + return Parser(sess, cfg, srdr as reader, ftype); } -fn new_parser_from_source_str(sess: parse_sess, cfg: ast::crate_cfg, - +name: ~str, +ss: codemap::file_substr, - source: @~str) -> Parser { - let (p, _) = new_parser_etc_from_source_str(sess, cfg, name, ss, source); - move p -} - - -fn new_parser_etc_from_file(sess: parse_sess, cfg: ast::crate_cfg, - path: &Path, ftype: parser::file_type) -> - (Parser, string_reader) { +fn new_parser_from_file(sess: parse_sess, cfg: ast::crate_cfg, + path: &Path, ftype: parser::file_type) -> Parser { let res = io::read_whole_file_str(path); match res { result::Ok(_) => { /* Continue. */ } result::Err(e) => sess.span_diagnostic.handler().fatal(e) } let src = @result::unwrap(res); - let filemap = codemap::new_filemap(path.to_str(), src, - sess.chpos, sess.byte_pos); - sess.cm.files.push(filemap); + let filemap = sess.cm.new_filemap(path.to_str(), src); let srdr = lexer::new_string_reader(sess.span_diagnostic, filemap, sess.interner); - return (Parser(sess, cfg, srdr as reader, ftype), srdr); -} - -fn new_parser_from_file(sess: parse_sess, cfg: ast::crate_cfg, path: &Path, - ftype: parser::file_type) -> Parser { - let (p, _) = new_parser_etc_from_file(sess, cfg, path, ftype); - move p + return Parser(sess, cfg, srdr as reader, ftype); } fn new_parser_from_tt(sess: parse_sess, cfg: ast::crate_cfg, diff --git a/src/libsyntax/parse/attr.rs b/src/libsyntax/parse/attr.rs index 42101a431d6c6..f0cb1d4ba3e1c 100644 --- a/src/libsyntax/parse/attr.rs +++ b/src/libsyntax/parse/attr.rs @@ -14,7 +14,7 @@ trait parser_attr { -> attr_or_ext; fn parse_outer_attributes() -> ~[ast::attribute]; fn parse_attribute(style: ast::attr_style) -> ast::attribute; - fn parse_attribute_naked(style: ast::attr_style, lo: uint) -> + fn parse_attribute_naked(style: ast::attr_style, lo: BytePos) -> ast::attribute; fn parse_inner_attrs_and_next() -> {inner: ~[ast::attribute], next: ~[ast::attribute]}; @@ -85,7 +85,7 @@ impl Parser: parser_attr { return self.parse_attribute_naked(style, lo); } - fn parse_attribute_naked(style: ast::attr_style, lo: uint) -> + fn parse_attribute_naked(style: ast::attr_style, lo: BytePos) -> ast::attribute { self.expect(token::LBRACKET); let meta_item = self.parse_meta_item(); diff --git a/src/libsyntax/parse/comments.rs b/src/libsyntax/parse/comments.rs index 4f265e1919c2e..2a8bbe3b6d862 100644 --- a/src/libsyntax/parse/comments.rs +++ b/src/libsyntax/parse/comments.rs @@ -3,6 +3,7 @@ use io::ReaderUtil; use util::interner; use lexer::{string_reader, bump, is_eof, nextch, is_whitespace, get_str_from, reader}; +use codemap::{FileMap, CharPos}; export cmnt; export lit; @@ -27,7 +28,7 @@ impl cmnt_style : cmp::Eq { } } -type cmnt = {style: cmnt_style, lines: ~[~str], pos: uint}; +type cmnt = {style: cmnt_style, lines: ~[~str], pos: BytePos}; fn is_doc_comment(s: ~str) -> bool { s.starts_with(~"///") || @@ -130,13 +131,13 @@ fn consume_non_eol_whitespace(rdr: string_reader) { fn push_blank_line_comment(rdr: string_reader, comments: &mut ~[cmnt]) { debug!(">>> blank-line comment"); let v: ~[~str] = ~[]; - comments.push({style: blank_line, lines: v, pos: rdr.chpos}); + comments.push({style: blank_line, lines: v, pos: rdr.last_pos}); } fn consume_whitespace_counting_blank_lines(rdr: string_reader, comments: &mut ~[cmnt]) { while is_whitespace(rdr.curr) && !is_eof(rdr) { - if rdr.col == 0u && rdr.curr == '\n' { + if rdr.col == CharPos(0u) && rdr.curr == '\n' { push_blank_line_comment(rdr, comments); } bump(rdr); @@ -147,7 +148,7 @@ fn consume_whitespace_counting_blank_lines(rdr: string_reader, fn read_shebang_comment(rdr: string_reader, code_to_the_left: bool, comments: &mut ~[cmnt]) { debug!(">>> shebang comment"); - let p = rdr.chpos; + let p = rdr.last_pos; debug!("<<< shebang comment"); comments.push({ style: if code_to_the_left { trailing } else { isolated }, @@ -159,7 +160,7 @@ fn read_shebang_comment(rdr: string_reader, code_to_the_left: bool, fn read_line_comments(rdr: string_reader, code_to_the_left: bool, comments: &mut ~[cmnt]) { debug!(">>> line comments"); - let p = rdr.chpos; + let p = rdr.last_pos; let mut lines: ~[~str] = ~[]; while rdr.curr == '/' && nextch(rdr) == '/' { let line = read_one_line_comment(rdr); @@ -180,6 +181,8 @@ fn read_line_comments(rdr: string_reader, code_to_the_left: bool, } } +// FIXME #3961: This is not the right way to convert string byte +// offsets to characters. fn all_whitespace(s: ~str, begin: uint, end: uint) -> bool { let mut i: uint = begin; while i != end { @@ -189,9 +192,11 @@ fn all_whitespace(s: ~str, begin: uint, end: uint) -> bool { } fn trim_whitespace_prefix_and_push_line(lines: &mut ~[~str], - s: ~str, col: uint) { + s: ~str, col: CharPos) { let mut s1; let len = str::len(s); + // FIXME #3961: Doing bytewise comparison and slicing with CharPos + let col = col.to_uint(); if all_whitespace(s, 0u, uint::min(len, col)) { if col < len { s1 = str::slice(s, col, len); @@ -204,9 +209,9 @@ fn trim_whitespace_prefix_and_push_line(lines: &mut ~[~str], fn read_block_comment(rdr: string_reader, code_to_the_left: bool, comments: &mut ~[cmnt]) { debug!(">>> block comment"); - let p = rdr.chpos; + let p = rdr.last_pos; let mut lines: ~[~str] = ~[]; - let mut col: uint = rdr.col; + let mut col: CharPos = rdr.col; bump(rdr); bump(rdr); @@ -279,7 +284,7 @@ fn consume_comment(rdr: string_reader, code_to_the_left: bool, debug!("<<< consume comment"); } -type lit = {lit: ~str, pos: uint}; +type lit = {lit: ~str, pos: BytePos}; fn gather_comments_and_literals(span_diagnostic: diagnostic::span_handler, path: ~str, @@ -287,8 +292,10 @@ fn gather_comments_and_literals(span_diagnostic: diagnostic::span_handler, {cmnts: ~[cmnt], lits: ~[lit]} { let src = @str::from_bytes(srdr.read_whole_stream()); let itr = parse::token::mk_fake_ident_interner(); - let rdr = lexer::new_low_level_string_reader - (span_diagnostic, codemap::new_filemap(path, src, 0u, 0u), itr); + let cm = CodeMap::new(); + let filemap = cm.new_filemap(path, src); + let rdr = lexer::new_low_level_string_reader( + span_diagnostic, filemap, itr); let mut comments: ~[cmnt] = ~[]; let mut literals: ~[lit] = ~[]; diff --git a/src/libsyntax/parse/common.rs b/src/libsyntax/parse/common.rs index 50c22c08f4f88..1811951fc0e9a 100644 --- a/src/libsyntax/parse/common.rs +++ b/src/libsyntax/parse/common.rs @@ -205,7 +205,7 @@ impl Parser: parser_common { if self.token == token::GT { self.bump(); } else if self.token == token::BINOP(token::SHR) { - self.swap(token::GT, self.span.lo + 1u, self.span.hi); + self.swap(token::GT, self.span.lo + BytePos(1u), self.span.hi); } else { let mut s: ~str = ~"expected `"; s += token_to_str(self.reader, token::GT); diff --git a/src/libsyntax/parse/eval.rs b/src/libsyntax/parse/eval.rs index 56c9d4de9f3cd..f08f195446442 100644 --- a/src/libsyntax/parse/eval.rs +++ b/src/libsyntax/parse/eval.rs @@ -62,12 +62,10 @@ fn parse_companion_mod(cx: ctx, prefix: &Path, suffix: &Option) let modpath = &companion_file(prefix, suffix); if file_exists(modpath) { debug!("found companion mod"); - let (p0, r0) = new_parser_etc_from_file(cx.sess, cx.cfg, - modpath, SOURCE_FILE); + let p0 = new_parser_from_file(cx.sess, cx.cfg, + modpath, SOURCE_FILE); let inner_attrs = p0.parse_inner_attrs_and_next(); let m0 = p0.parse_mod_items(token::EOF, inner_attrs.next); - cx.sess.chpos = r0.chpos; - cx.sess.byte_pos = cx.sess.byte_pos + r0.pos; return (m0.view_items, m0.items, inner_attrs.inner); } else { return (~[], ~[], ~[]); @@ -93,9 +91,9 @@ fn eval_crate_directive(cx: ctx, cdir: @ast::crate_directive, prefix: &Path, } else { prefix.push_many(file_path.components) }; - let (p0, r0) = - new_parser_etc_from_file(cx.sess, cx.cfg, - &full_path, SOURCE_FILE); + let p0 = + new_parser_from_file(cx.sess, cx.cfg, + &full_path, SOURCE_FILE); let inner_attrs = p0.parse_inner_attrs_and_next(); let mod_attrs = vec::append(attrs, inner_attrs.inner); let first_item_outer_attrs = inner_attrs.next; @@ -104,9 +102,6 @@ fn eval_crate_directive(cx: ctx, cdir: @ast::crate_directive, prefix: &Path, let i = p0.mk_item(cdir.span.lo, cdir.span.hi, /* FIXME (#2543) */ copy id, ast::item_mod(m0), vis, mod_attrs); - // Thread defids, chpos and byte_pos through the parsers - cx.sess.chpos = r0.chpos; - cx.sess.byte_pos = cx.sess.byte_pos + r0.pos; items.push(i); } ast::cdir_dir_mod(vis, id, cdirs, attrs) => { diff --git a/src/libsyntax/parse/lexer.rs b/src/libsyntax/parse/lexer.rs index 8f57d733eb51f..6f1f644ab27b1 100644 --- a/src/libsyntax/parse/lexer.rs +++ b/src/libsyntax/parse/lexer.rs @@ -1,5 +1,5 @@ use diagnostic::span_handler; -use codemap::span; +use codemap::{span, CodeMap, CharPos, BytePos}; use ext::tt::transcribe::{tt_reader, new_tt_reader, dup_tt_reader, tt_next_token}; @@ -21,11 +21,15 @@ trait reader { type string_reader = @{ span_diagnostic: span_handler, src: @~str, - mut col: uint, - mut pos: uint, + // The absolute offset within the codemap of the next character to read + mut pos: BytePos, + // The absolute offset within the codemap of the last character read(curr) + mut last_pos: BytePos, + // The column of the next character to read + mut col: CharPos, + // The last character to be read mut curr: char, - mut chpos: uint, - filemap: codemap::filemap, + filemap: @codemap::FileMap, interner: @token::ident_interner, /* cached: */ mut peek_tok: token::Token, @@ -33,7 +37,7 @@ type string_reader = @{ }; fn new_string_reader(span_diagnostic: span_handler, - filemap: codemap::filemap, + filemap: @codemap::FileMap, itr: @token::ident_interner) -> string_reader { let r = new_low_level_string_reader(span_diagnostic, filemap, itr); string_advance_token(r); /* fill in peek_* */ @@ -42,27 +46,29 @@ fn new_string_reader(span_diagnostic: span_handler, /* For comments.rs, which hackily pokes into 'pos' and 'curr' */ fn new_low_level_string_reader(span_diagnostic: span_handler, - filemap: codemap::filemap, + filemap: @codemap::FileMap, itr: @token::ident_interner) -> string_reader { + // Force the initial reader bump to start on a fresh line + let initial_char = '\n'; let r = @{span_diagnostic: span_diagnostic, src: filemap.src, - mut col: 0u, mut pos: 0u, mut curr: -1 as char, - mut chpos: filemap.start_pos.ch, + mut pos: filemap.start_pos, + mut last_pos: filemap.start_pos, + mut col: CharPos(0), + mut curr: initial_char, filemap: filemap, interner: itr, /* dummy values; not read */ mut peek_tok: token::EOF, - mut peek_span: ast_util::mk_sp(0u,0u)}; - if r.pos < (*filemap.src).len() { - let next = str::char_range_at(*r.src, r.pos); - r.pos = next.next; - r.curr = next.ch; - } + mut peek_span: ast_util::dummy_sp()}; + bump(r); return r; } fn dup_string_reader(&&r: string_reader) -> string_reader { @{span_diagnostic: r.span_diagnostic, src: r.src, - mut col: r.col, mut pos: r.pos, mut curr: r.curr, mut chpos: r.chpos, + mut pos: r.pos, + mut last_pos: r.last_pos, + mut col: r.col, mut curr: r.curr, filemap: r.filemap, interner: r.interner, mut peek_tok: r.peek_tok, mut peek_span: r.peek_span} } @@ -117,34 +123,48 @@ fn string_advance_token(&&r: string_reader) { if is_eof(r) { r.peek_tok = token::EOF; } else { - let start_chpos = r.chpos; + let start_bytepos = r.last_pos; r.peek_tok = next_token_inner(r); - r.peek_span = ast_util::mk_sp(start_chpos, r.chpos); + r.peek_span = ast_util::mk_sp(start_bytepos, r.last_pos); }; } -fn get_str_from(rdr: string_reader, start: uint) -> ~str unsafe { +fn byte_offset(rdr: string_reader) -> BytePos { + (rdr.pos - rdr.filemap.start_pos) +} + +fn get_str_from(rdr: string_reader, start: BytePos) -> ~str unsafe { // I'm pretty skeptical about this subtraction. What if there's a // multi-byte character before the mark? - return str::slice(*rdr.src, start - 1u, rdr.pos - 1u); + return str::slice(*rdr.src, start.to_uint() - 1u, + byte_offset(rdr).to_uint() - 1u); } fn bump(rdr: string_reader) { - if rdr.pos < (*rdr.src).len() { - rdr.col += 1u; - rdr.chpos += 1u; - if rdr.curr == '\n' { - codemap::next_line(rdr.filemap, rdr.chpos, rdr.pos); - rdr.col = 0u; - } - let next = str::char_range_at(*rdr.src, rdr.pos); - rdr.pos = next.next; + rdr.last_pos = rdr.pos; + let current_byte_offset = byte_offset(rdr).to_uint();; + if current_byte_offset < (*rdr.src).len() { + let last_char = rdr.curr; + let next = str::char_range_at(*rdr.src, current_byte_offset); + let byte_offset_diff = next.next - current_byte_offset; + rdr.pos = rdr.pos + BytePos(byte_offset_diff); rdr.curr = next.ch; + rdr.col += CharPos(1u); + if last_char == '\n' { + rdr.filemap.next_line(rdr.last_pos); + rdr.col = CharPos(0u); + } + + if byte_offset_diff > 1 { + rdr.filemap.record_multibyte_char( + BytePos(current_byte_offset), byte_offset_diff); + } } else { + // XXX: What does this accomplish? if (rdr.curr != -1 as char) { - rdr.col += 1u; - rdr.chpos += 1u; + rdr.pos = rdr.pos + BytePos(1u); + rdr.col += CharPos(1u); rdr.curr = -1 as char; } } @@ -153,8 +173,9 @@ fn is_eof(rdr: string_reader) -> bool { rdr.curr == -1 as char } fn nextch(rdr: string_reader) -> char { - if rdr.pos < (*rdr.src).len() { - return str::char_at(*rdr.src, rdr.pos); + let offset = byte_offset(rdr).to_uint(); + if offset < (*rdr.src).len() { + return str::char_at(*rdr.src, offset); } else { return -1 as char; } } @@ -211,7 +232,7 @@ fn consume_any_line_comment(rdr: string_reader) bump(rdr); // line comments starting with "///" or "//!" are doc-comments if rdr.curr == '/' || rdr.curr == '!' { - let start_chpos = rdr.chpos - 2u; + let start_bpos = rdr.pos - BytePos(2u); let mut acc = ~"//"; while rdr.curr != '\n' && !is_eof(rdr) { str::push_char(&mut acc, rdr.curr); @@ -219,7 +240,7 @@ fn consume_any_line_comment(rdr: string_reader) } return Some({ tok: token::DOC_COMMENT(rdr.interner.intern(@acc)), - sp: ast_util::mk_sp(start_chpos, rdr.chpos) + sp: ast_util::mk_sp(start_bpos, rdr.pos) }); } else { while rdr.curr != '\n' && !is_eof(rdr) { bump(rdr); } @@ -232,10 +253,10 @@ fn consume_any_line_comment(rdr: string_reader) } } else if rdr.curr == '#' { if nextch(rdr) == '!' { - let cmap = codemap::new_codemap(); + let cmap = @CodeMap::new(); (*cmap).files.push(rdr.filemap); - let loc = codemap::lookup_char_pos_adj(cmap, rdr.chpos); - if loc.line == 1u && loc.col == 0u { + let loc = cmap.lookup_char_pos_adj(rdr.last_pos); + if loc.line == 1u && loc.col == CharPos(0u) { while rdr.curr != '\n' && !is_eof(rdr) { bump(rdr); } return consume_whitespace_and_comments(rdr); } @@ -250,7 +271,7 @@ fn consume_block_comment(rdr: string_reader) // block comments starting with "/**" or "/*!" are doc-comments if rdr.curr == '*' || rdr.curr == '!' { - let start_chpos = rdr.chpos - 2u; + let start_bpos = rdr.pos - BytePos(2u); let mut acc = ~"/*"; while !(rdr.curr == '*' && nextch(rdr) == '/') && !is_eof(rdr) { str::push_char(&mut acc, rdr.curr); @@ -264,7 +285,7 @@ fn consume_block_comment(rdr: string_reader) bump(rdr); return Some({ tok: token::DOC_COMMENT(rdr.interner.intern(@acc)), - sp: ast_util::mk_sp(start_chpos, rdr.chpos) + sp: ast_util::mk_sp(start_bpos, rdr.pos) }); } } else { @@ -584,7 +605,7 @@ fn next_token_inner(rdr: string_reader) -> token::Token { return token::LIT_INT(c2 as i64, ast::ty_char); } '"' => { - let n = rdr.chpos; + let n = byte_offset(rdr); bump(rdr); while rdr.curr != '"' { if is_eof(rdr) { diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 2b42dcc0ed06a..74d06789ad899 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -5,7 +5,7 @@ use either::{Either, Left, Right}; use std::map::HashMap; use token::{can_begin_expr, is_ident, is_ident_or_path, is_plain_ident, INTERPOLATED, special_idents}; -use codemap::{span,fss_none}; +use codemap::{span,FssNone, BytePos}; use util::interner::Interner; use ast_util::{spanned, respan, mk_sp, ident_to_path, operator_prec}; use lexer::reader; @@ -244,7 +244,7 @@ impl Parser { self.token = next.tok; self.span = next.sp; } - fn swap(next: token::Token, lo: uint, hi: uint) { + fn swap(next: token::Token, +lo: BytePos, +hi: BytePos) { self.token = next; self.span = mk_sp(lo, hi); } @@ -904,12 +904,12 @@ impl Parser { return spanned(lo, e.span.hi, {mutbl: m, ident: i, expr: e}); } - fn mk_expr(lo: uint, hi: uint, +node: expr_) -> @expr { + fn mk_expr(+lo: BytePos, +hi: BytePos, +node: expr_) -> @expr { return @{id: self.get_id(), callee_id: self.get_id(), node: node, span: mk_sp(lo, hi)}; } - fn mk_mac_expr(lo: uint, hi: uint, m: mac_) -> @expr { + fn mk_mac_expr(+lo: BytePos, +hi: BytePos, m: mac_) -> @expr { return @{id: self.get_id(), callee_id: self.get_id(), node: expr_mac({node: m, span: mk_sp(lo, hi)}), @@ -1134,7 +1134,7 @@ impl Parser { return self.mk_expr(lo, hi, ex); } - fn parse_block_expr(lo: uint, blk_mode: blk_check_mode) -> @expr { + fn parse_block_expr(lo: BytePos, blk_mode: blk_check_mode) -> @expr { self.expect(token::LBRACE); let blk = self.parse_block_tail(lo, blk_mode); return self.mk_expr(blk.span.lo, blk.span.hi, expr_block(blk)); @@ -1146,7 +1146,7 @@ impl Parser { return self.parse_syntax_ext_naked(lo); } - fn parse_syntax_ext_naked(lo: uint) -> @expr { + fn parse_syntax_ext_naked(lo: BytePos) -> @expr { match self.token { token::IDENT(_, _) => (), _ => self.fatal(~"expected a syntax expander name") @@ -2279,11 +2279,11 @@ impl Parser { // I guess that also means "already parsed the 'impure'" if // necessary, and this should take a qualifier. // some blocks start with "#{"... - fn parse_block_tail(lo: uint, s: blk_check_mode) -> blk { + fn parse_block_tail(lo: BytePos, s: blk_check_mode) -> blk { self.parse_block_tail_(lo, s, ~[]) } - fn parse_block_tail_(lo: uint, s: blk_check_mode, + fn parse_block_tail_(lo: BytePos, s: blk_check_mode, +first_item_attrs: ~[attribute]) -> blk { let mut stmts = ~[]; let mut expr = None; @@ -2581,7 +2581,7 @@ impl Parser { return {ident: id, tps: ty_params}; } - fn mk_item(lo: uint, hi: uint, +ident: ident, + fn mk_item(+lo: BytePos, +hi: BytePos, +ident: ident, +node: item_, vis: visibility, +attrs: ~[attribute]) -> @item { return @{ident: ident, @@ -3037,7 +3037,7 @@ impl Parser { items: items}; } - fn parse_item_foreign_mod(lo: uint, + fn parse_item_foreign_mod(lo: BytePos, visibility: visibility, attrs: ~[attribute], items_allowed: bool) @@ -3092,7 +3092,7 @@ impl Parser { }); } - fn parse_type_decl() -> {lo: uint, ident: ident} { + fn parse_type_decl() -> {lo: BytePos, ident: ident} { let lo = self.last_span.lo; let id = self.parse_ident(); return {lo: lo, ident: id}; @@ -3415,9 +3415,8 @@ impl Parser { |p| p.parse_token_tree()); let m = ast::mac_invoc_tt(pth, tts); let m: ast::mac = {node: m, - span: {lo: self.span.lo, - hi: self.span.hi, - expn_info: None}}; + span: mk_sp(self.span.lo, + self.span.hi)}; let item_ = item_mac(m); return iovi_item(self.mk_item(lo, self.last_span.hi, id, item_, visibility, attrs)); diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs index 0418f6776de6b..949d2defa9a2a 100644 --- a/src/libsyntax/print/pprust.rs +++ b/src/libsyntax/print/pprust.rs @@ -1,5 +1,5 @@ use parse::{comments, lexer, token}; -use codemap::CodeMap; +use codemap::{CodeMap, BytePos}; use pp::{break_offset, word, printer, space, zerobreak, hardbreak, breaks}; use pp::{consistent, inconsistent, eof}; use ast::{required, provided}; @@ -25,7 +25,7 @@ fn no_ann() -> pp_ann { type ps = @{s: pp::printer, - cm: Option, + cm: Option<@CodeMap>, intr: @token::ident_interner, comments: Option<~[comments::cmnt]>, literals: Option<~[comments::lit]>, @@ -46,7 +46,7 @@ fn end(s: ps) { fn rust_printer(writer: io::Writer, intr: @ident_interner) -> ps { return @{s: pp::mk_printer(writer, default_columns), - cm: None::, + cm: None::<@CodeMap>, intr: intr, comments: None::<~[comments::cmnt]>, literals: None::<~[comments::lit]>, @@ -64,7 +64,7 @@ const default_columns: uint = 78u; // Requires you to pass an input filename and reader so that // it can scan the input text for comments and literals to // copy forward. -fn print_crate(cm: CodeMap, intr: @ident_interner, +fn print_crate(cm: @CodeMap, intr: @ident_interner, span_diagnostic: diagnostic::span_handler, crate: @ast::crate, filename: ~str, in: io::Reader, out: io::Writer, ann: pp_ann, is_expanded: bool) { @@ -631,7 +631,7 @@ fn print_variants(s: ps, variants: ~[ast::variant], span: ast::span) { print_variant(s, *v); word(s.s, ~","); end(s); - maybe_print_trailing_comment(s, v.span, None::); + maybe_print_trailing_comment(s, v.span, None); } bclose(s, span); } @@ -886,7 +886,7 @@ fn print_stmt(s: ps, st: ast::stmt) { } } if parse::classify::stmt_ends_with_semi(st) { word(s.s, ~";"); } - maybe_print_trailing_comment(s, st.span, None::); + maybe_print_trailing_comment(s, st.span, None); } fn print_block(s: ps, blk: ast::blk) { @@ -1898,15 +1898,15 @@ fn print_ty_fn(s: ps, } fn maybe_print_trailing_comment(s: ps, span: codemap::span, - next_pos: Option) { + next_pos: Option) { let mut cm; match s.cm { Some(ccm) => cm = ccm, _ => return } match next_comment(s) { Some(cmnt) => { if cmnt.style != comments::trailing { return; } - let span_line = codemap::lookup_char_pos(cm, span.hi); - let comment_line = codemap::lookup_char_pos(cm, cmnt.pos); - let mut next = cmnt.pos + 1u; + let span_line = cm.lookup_char_pos(span.hi); + let comment_line = cm.lookup_char_pos(cmnt.pos); + let mut next = cmnt.pos + BytePos(1u); match next_pos { None => (), Some(p) => next = p } if span.hi < cmnt.pos && cmnt.pos < next && span_line.line == comment_line.line { @@ -1981,7 +1981,7 @@ fn lit_to_str(l: @ast::lit) -> ~str { return to_str(l, print_literal, parse::token::mk_fake_ident_interner()); } -fn next_lit(s: ps, pos: uint) -> Option { +fn next_lit(s: ps, pos: BytePos) -> Option { match s.literals { Some(lits) => { while s.cur_lit < vec::len(lits) { @@ -1996,7 +1996,7 @@ fn next_lit(s: ps, pos: uint) -> Option { } } -fn maybe_print_comment(s: ps, pos: uint) { +fn maybe_print_comment(s: ps, pos: BytePos) { loop { match next_comment(s) { Some(cmnt) => { diff --git a/src/libsyntax/syntax.rc b/src/libsyntax/syntax.rc index 7c73deed1a91f..9195f1fd8f7d9 100644 --- a/src/libsyntax/syntax.rc +++ b/src/libsyntax/syntax.rc @@ -25,7 +25,6 @@ use core::*; mod attr; #[legacy_exports] mod diagnostic; -#[legacy_exports] mod codemap; #[legacy_exports] mod ast;