From e782f6c1add44a8020ce9790553f696a7565c10e Mon Sep 17 00:00:00 2001 From: Havoc Pennington Date: Wed, 16 Nov 2011 21:04:59 -0500 Subject: [PATCH] fix line numbers in error messages. (start with 1.) --- .../java/com/typesafe/config/impl/Parser.java | 6 ++-- .../com/typesafe/config/impl/Tokenizer.java | 2 +- .../typesafe/config/impl/ConfParserTest.scala | 36 +++++++++++++++++++ .../typesafe/config/impl/TokenizerTest.scala | 14 ++++---- 4 files changed, 48 insertions(+), 10 deletions(-) diff --git a/src/main/java/com/typesafe/config/impl/Parser.java b/src/main/java/com/typesafe/config/impl/Parser.java index 7b904871..a93e7002 100644 --- a/src/main/java/com/typesafe/config/impl/Parser.java +++ b/src/main/java/com/typesafe/config/impl/Parser.java @@ -42,7 +42,7 @@ final class Parser { ParseContext(ConfigSyntax flavor, ConfigOrigin origin, Iterator tokens, ConfigIncluder includer, ConfigIncludeContext includeContext) { - lineNumber = 0; + lineNumber = 1; buffer = new Stack(); this.tokens = tokens; this.flavor = flavor; @@ -79,7 +79,9 @@ final class Parser { private Token nextTokenIgnoringNewline() { Token t = nextToken(); while (Tokens.isNewline(t)) { - lineNumber = Tokens.getLineNumber(t); + // line number tokens have the line that was _ended_ by the + // newline, so we have to add one. + lineNumber = Tokens.getLineNumber(t) + 1; t = nextToken(); } return t; diff --git a/src/main/java/com/typesafe/config/impl/Tokenizer.java b/src/main/java/com/typesafe/config/impl/Tokenizer.java index 330e2793..aa038916 100644 --- a/src/main/java/com/typesafe/config/impl/Tokenizer.java +++ b/src/main/java/com/typesafe/config/impl/Tokenizer.java @@ -95,7 +95,7 @@ final class Tokenizer { this.input = input; this.allowComments = allowComments; this.buffer = new LinkedList(); - lineNumber = 0; + lineNumber = 1; tokens = new LinkedList(); tokens.add(Tokens.START); whitespaceSaver = new WhitespaceSaver(); diff --git a/src/test/scala/com/typesafe/config/impl/ConfParserTest.scala b/src/test/scala/com/typesafe/config/impl/ConfParserTest.scala index f1b6631a..a4dac2bc 100644 --- a/src/test/scala/com/typesafe/config/impl/ConfParserTest.scala +++ b/src/test/scala/com/typesafe/config/impl/ConfParserTest.scala @@ -276,4 +276,40 @@ class ConfParserTest extends TestUtils { assertEquals(42, obj.getInt("/a/b/c")) assertEquals(32, obj.getInt("x/y/z")) } + + private def lineNumberTest(num: Int, text: String) { + val e = intercept[ConfigException] { + parseObject(text) + } + if (!e.getMessage.contains(num + ":")) + throw new Exception("error message did not contain line '" + num + "' '" + text.replace("\n", "\\n") + "'", e) + } + + @Test + def lineNumbersInErrors() { + // error is at the last char + lineNumberTest(1, "}") + lineNumberTest(2, "\n}") + lineNumberTest(3, "\n\n}") + + // error is before a final newline + lineNumberTest(1, "}\n") + lineNumberTest(2, "\n}\n") + lineNumberTest(3, "\n\n}\n") + + // with unquoted string + lineNumberTest(1, "foo") + lineNumberTest(2, "\nfoo") + lineNumberTest(3, "\n\nfoo") + + // with quoted string + lineNumberTest(1, "\"foo\"") + lineNumberTest(2, "\n\"foo\"") + lineNumberTest(3, "\n\n\"foo\"") + + // newline in middle of number uses the line the number was on + lineNumberTest(1, "1e\n") + lineNumberTest(2, "\n1e\n") + lineNumberTest(3, "\n\n1e\n") + } } diff --git a/src/test/scala/com/typesafe/config/impl/TokenizerTest.scala b/src/test/scala/com/typesafe/config/impl/TokenizerTest.scala index 1d39aaaa..b51dcd63 100644 --- a/src/test/scala/com/typesafe/config/impl/TokenizerTest.scala +++ b/src/test/scala/com/typesafe/config/impl/TokenizerTest.scala @@ -20,7 +20,7 @@ class TokenizerTest extends TestUtils { @Test def tokenizeNewlines() { - assertEquals(List(Tokens.START, Tokens.newLine(0), Tokens.newLine(1), Tokens.END), + assertEquals(List(Tokens.START, Tokens.newLine(1), Tokens.newLine(2), Tokens.END), tokenizeAsList("\n\n")) } @@ -33,7 +33,7 @@ class TokenizerTest extends TestUtils { Tokens.OPEN_CURLY, Tokens.CLOSE_SQUARE, Tokens.OPEN_SQUARE, tokenString("foo"), tokenTrue, tokenDouble(3.14), tokenFalse, tokenLong(42), tokenNull, tokenSubstitution(tokenUnquoted("a.b")), - tokenKeySubstitution("c.d"), Tokens.newLine(0), Tokens.END) + tokenKeySubstitution("c.d"), Tokens.newLine(1), Tokens.END) assertEquals(expected, tokenizeAsList(""",:=}{]["foo"true3.14false42null${a.b}${"c.d"}""" + "\n")) } @@ -44,7 +44,7 @@ class TokenizerTest extends TestUtils { tokenUnquoted(" "), tokenLong(42), tokenUnquoted(" "), tokenTrue, tokenUnquoted(" "), tokenDouble(3.14), tokenUnquoted(" "), tokenFalse, tokenUnquoted(" "), tokenNull, tokenUnquoted(" "), tokenSubstitution(tokenUnquoted("a.b")), tokenUnquoted(" "), tokenKeySubstitution("c.d"), - Tokens.newLine(0), Tokens.END) + Tokens.newLine(1), Tokens.END) assertEquals(expected, tokenizeAsList(""" , : = } { ] [ "foo" 42 true 3.14 false null ${a.b} ${"c.d"} """ + "\n ")) } @@ -56,7 +56,7 @@ class TokenizerTest extends TestUtils { tokenDouble(3.14), tokenUnquoted(" "), tokenFalse, tokenUnquoted(" "), tokenNull, tokenUnquoted(" "), tokenSubstitution(tokenUnquoted("a.b")), tokenUnquoted(" "), tokenKeySubstitution("c.d"), - Tokens.newLine(0), Tokens.END) + Tokens.newLine(1), Tokens.END) assertEquals(expected, tokenizeAsList(""" , : = } { ] [ "foo" 42 true 3.14 false null ${a.b} ${"c.d"} """ + "\n ")) } @@ -106,14 +106,14 @@ class TokenizerTest extends TestUtils { @Test def tokenizeUnquotedTextTrimsSpaces() { - val expected = List(Tokens.START, tokenUnquoted("foo"), Tokens.newLine(0), Tokens.END) + val expected = List(Tokens.START, tokenUnquoted("foo"), Tokens.newLine(1), Tokens.END) assertEquals(expected, tokenizeAsList(" foo \n")) } @Test def tokenizeUnquotedTextKeepsInternalSpaces() { val expected = List(Tokens.START, tokenUnquoted("foo"), tokenUnquoted(" "), tokenUnquoted("bar"), - tokenUnquoted(" "), tokenUnquoted("baz"), Tokens.newLine(0), Tokens.END) + tokenUnquoted(" "), tokenUnquoted("baz"), Tokens.newLine(1), Tokens.END) assertEquals(expected, tokenizeAsList(" foo bar baz \n")) } @@ -121,7 +121,7 @@ class TokenizerTest extends TestUtils { def tokenizeMixedUnquotedQuoted() { val expected = List(Tokens.START, tokenUnquoted("foo"), tokenString("bar"), tokenUnquoted("baz"), - Tokens.newLine(0), Tokens.END) + Tokens.newLine(1), Tokens.END) assertEquals(expected, tokenizeAsList(" foo\"bar\"baz \n")) }