fix line numbers in error messages. (start with 1.)

This commit is contained in:
Havoc Pennington 2011-11-16 21:04:59 -05:00
parent e1628ee6de
commit e782f6c1ad
4 changed files with 48 additions and 10 deletions

View File

@ -42,7 +42,7 @@ final class Parser {
ParseContext(ConfigSyntax flavor, ConfigOrigin origin, ParseContext(ConfigSyntax flavor, ConfigOrigin origin,
Iterator<Token> tokens, ConfigIncluder includer, Iterator<Token> tokens, ConfigIncluder includer,
ConfigIncludeContext includeContext) { ConfigIncludeContext includeContext) {
lineNumber = 0; lineNumber = 1;
buffer = new Stack<Token>(); buffer = new Stack<Token>();
this.tokens = tokens; this.tokens = tokens;
this.flavor = flavor; this.flavor = flavor;
@ -79,7 +79,9 @@ final class Parser {
private Token nextTokenIgnoringNewline() { private Token nextTokenIgnoringNewline() {
Token t = nextToken(); Token t = nextToken();
while (Tokens.isNewline(t)) { while (Tokens.isNewline(t)) {
lineNumber = Tokens.getLineNumber(t); // line number tokens have the line that was _ended_ by the
// newline, so we have to add one.
lineNumber = Tokens.getLineNumber(t) + 1;
t = nextToken(); t = nextToken();
} }
return t; return t;

View File

@ -95,7 +95,7 @@ final class Tokenizer {
this.input = input; this.input = input;
this.allowComments = allowComments; this.allowComments = allowComments;
this.buffer = new LinkedList<Integer>(); this.buffer = new LinkedList<Integer>();
lineNumber = 0; lineNumber = 1;
tokens = new LinkedList<Token>(); tokens = new LinkedList<Token>();
tokens.add(Tokens.START); tokens.add(Tokens.START);
whitespaceSaver = new WhitespaceSaver(); whitespaceSaver = new WhitespaceSaver();

View File

@ -276,4 +276,40 @@ class ConfParserTest extends TestUtils {
assertEquals(42, obj.getInt("/a/b/c")) assertEquals(42, obj.getInt("/a/b/c"))
assertEquals(32, obj.getInt("x/y/z")) assertEquals(32, obj.getInt("x/y/z"))
} }
private def lineNumberTest(num: Int, text: String) {
val e = intercept[ConfigException] {
parseObject(text)
}
if (!e.getMessage.contains(num + ":"))
throw new Exception("error message did not contain line '" + num + "' '" + text.replace("\n", "\\n") + "'", e)
}
@Test
def lineNumbersInErrors() {
// error is at the last char
lineNumberTest(1, "}")
lineNumberTest(2, "\n}")
lineNumberTest(3, "\n\n}")
// error is before a final newline
lineNumberTest(1, "}\n")
lineNumberTest(2, "\n}\n")
lineNumberTest(3, "\n\n}\n")
// with unquoted string
lineNumberTest(1, "foo")
lineNumberTest(2, "\nfoo")
lineNumberTest(3, "\n\nfoo")
// with quoted string
lineNumberTest(1, "\"foo\"")
lineNumberTest(2, "\n\"foo\"")
lineNumberTest(3, "\n\n\"foo\"")
// newline in middle of number uses the line the number was on
lineNumberTest(1, "1e\n")
lineNumberTest(2, "\n1e\n")
lineNumberTest(3, "\n\n1e\n")
}
} }

View File

@ -20,7 +20,7 @@ class TokenizerTest extends TestUtils {
@Test @Test
def tokenizeNewlines() { def tokenizeNewlines() {
assertEquals(List(Tokens.START, Tokens.newLine(0), Tokens.newLine(1), Tokens.END), assertEquals(List(Tokens.START, Tokens.newLine(1), Tokens.newLine(2), Tokens.END),
tokenizeAsList("\n\n")) tokenizeAsList("\n\n"))
} }
@ -33,7 +33,7 @@ class TokenizerTest extends TestUtils {
Tokens.OPEN_CURLY, Tokens.CLOSE_SQUARE, Tokens.OPEN_SQUARE, tokenString("foo"), Tokens.OPEN_CURLY, Tokens.CLOSE_SQUARE, Tokens.OPEN_SQUARE, tokenString("foo"),
tokenTrue, tokenDouble(3.14), tokenFalse, tokenTrue, tokenDouble(3.14), tokenFalse,
tokenLong(42), tokenNull, tokenSubstitution(tokenUnquoted("a.b")), tokenLong(42), tokenNull, tokenSubstitution(tokenUnquoted("a.b")),
tokenKeySubstitution("c.d"), Tokens.newLine(0), Tokens.END) tokenKeySubstitution("c.d"), Tokens.newLine(1), Tokens.END)
assertEquals(expected, tokenizeAsList(""",:=}{]["foo"true3.14false42null${a.b}${"c.d"}""" + "\n")) assertEquals(expected, tokenizeAsList(""",:=}{]["foo"true3.14false42null${a.b}${"c.d"}""" + "\n"))
} }
@ -44,7 +44,7 @@ class TokenizerTest extends TestUtils {
tokenUnquoted(" "), tokenLong(42), tokenUnquoted(" "), tokenTrue, tokenUnquoted(" "), tokenUnquoted(" "), tokenLong(42), tokenUnquoted(" "), tokenTrue, tokenUnquoted(" "),
tokenDouble(3.14), tokenUnquoted(" "), tokenFalse, tokenUnquoted(" "), tokenNull, tokenDouble(3.14), tokenUnquoted(" "), tokenFalse, tokenUnquoted(" "), tokenNull,
tokenUnquoted(" "), tokenSubstitution(tokenUnquoted("a.b")), tokenUnquoted(" "), tokenKeySubstitution("c.d"), tokenUnquoted(" "), tokenSubstitution(tokenUnquoted("a.b")), tokenUnquoted(" "), tokenKeySubstitution("c.d"),
Tokens.newLine(0), Tokens.END) Tokens.newLine(1), Tokens.END)
assertEquals(expected, tokenizeAsList(""" , : = } { ] [ "foo" 42 true 3.14 false null ${a.b} ${"c.d"} """ + "\n ")) assertEquals(expected, tokenizeAsList(""" , : = } { ] [ "foo" 42 true 3.14 false null ${a.b} ${"c.d"} """ + "\n "))
} }
@ -56,7 +56,7 @@ class TokenizerTest extends TestUtils {
tokenDouble(3.14), tokenUnquoted(" "), tokenFalse, tokenUnquoted(" "), tokenNull, tokenDouble(3.14), tokenUnquoted(" "), tokenFalse, tokenUnquoted(" "), tokenNull,
tokenUnquoted(" "), tokenSubstitution(tokenUnquoted("a.b")), tokenUnquoted(" "), tokenUnquoted(" "), tokenSubstitution(tokenUnquoted("a.b")), tokenUnquoted(" "),
tokenKeySubstitution("c.d"), tokenKeySubstitution("c.d"),
Tokens.newLine(0), Tokens.END) Tokens.newLine(1), Tokens.END)
assertEquals(expected, tokenizeAsList(""" , : = } { ] [ "foo" 42 true 3.14 false null ${a.b} ${"c.d"} """ + "\n ")) assertEquals(expected, tokenizeAsList(""" , : = } { ] [ "foo" 42 true 3.14 false null ${a.b} ${"c.d"} """ + "\n "))
} }
@ -106,14 +106,14 @@ class TokenizerTest extends TestUtils {
@Test @Test
def tokenizeUnquotedTextTrimsSpaces() { def tokenizeUnquotedTextTrimsSpaces() {
val expected = List(Tokens.START, tokenUnquoted("foo"), Tokens.newLine(0), Tokens.END) val expected = List(Tokens.START, tokenUnquoted("foo"), Tokens.newLine(1), Tokens.END)
assertEquals(expected, tokenizeAsList(" foo \n")) assertEquals(expected, tokenizeAsList(" foo \n"))
} }
@Test @Test
def tokenizeUnquotedTextKeepsInternalSpaces() { def tokenizeUnquotedTextKeepsInternalSpaces() {
val expected = List(Tokens.START, tokenUnquoted("foo"), tokenUnquoted(" "), tokenUnquoted("bar"), val expected = List(Tokens.START, tokenUnquoted("foo"), tokenUnquoted(" "), tokenUnquoted("bar"),
tokenUnquoted(" "), tokenUnquoted("baz"), Tokens.newLine(0), Tokens.END) tokenUnquoted(" "), tokenUnquoted("baz"), Tokens.newLine(1), Tokens.END)
assertEquals(expected, tokenizeAsList(" foo bar baz \n")) assertEquals(expected, tokenizeAsList(" foo bar baz \n"))
} }
@ -121,7 +121,7 @@ class TokenizerTest extends TestUtils {
def tokenizeMixedUnquotedQuoted() { def tokenizeMixedUnquotedQuoted() {
val expected = List(Tokens.START, tokenUnquoted("foo"), val expected = List(Tokens.START, tokenUnquoted("foo"),
tokenString("bar"), tokenUnquoted("baz"), tokenString("bar"), tokenUnquoted("baz"),
Tokens.newLine(0), Tokens.END) Tokens.newLine(1), Tokens.END)
assertEquals(expected, tokenizeAsList(" foo\"bar\"baz \n")) assertEquals(expected, tokenizeAsList(" foo\"bar\"baz \n"))
} }