From 69e6164b304feac56bf7bc0fb8ed3cec9c453eb8 Mon Sep 17 00:00:00 2001 From: Havoc Pennington Date: Fri, 6 Apr 2012 10:58:00 -0400 Subject: [PATCH] Support += convenience syntax a += b transforms to a += ${?a} [b] --- HOCON.md | 19 +++++ README.md | 8 ++ .../java/com/typesafe/config/impl/Parser.java | 26 ++++++- .../com/typesafe/config/impl/TokenType.java | 3 +- .../com/typesafe/config/impl/Tokenizer.java | 18 ++++- .../java/com/typesafe/config/impl/Tokens.java | 1 + .../config/impl/ConcatenationTest.scala | 75 +++++++++++++++++++ .../com/typesafe/config/impl/TestUtils.scala | 10 ++- .../typesafe/config/impl/TokenizerTest.scala | 17 +++-- 9 files changed, 164 insertions(+), 13 deletions(-) diff --git a/HOCON.md b/HOCON.md index 35173ab7..4c4f6239 100644 --- a/HOCON.md +++ b/HOCON.md @@ -633,6 +633,25 @@ resolving an optional substitution (i.e. the `${?foo}` syntax). If `${?foo}` refers to itself then it's as if it referred to a nonexistent value. +#### The `+=` field separator + +Fields may have `+=` as a separator rather than `:` or `=`. A +field with `+=` transforms into a self-referential array +concatenation, like this: + + a += b + +becomes: + + a = ${?a} [b] + +`+=` appends an element to a previous array. If the previous value +was not an array, an error will result just as it would in the +long form `a = ${?a} [b]`. Note that the previous value is +optional (`${?a}` not `${a}`), which allows `a += b` to be the +first mention of `a` in the file (it is not necessary to have `a = +[]` first). + #### Examples of Self-Referential Substitutions In isolation (with no merges involved), a self-referential field diff --git a/README.md b/README.md index 17670f40..21949e64 100644 --- a/README.md +++ b/README.md @@ -209,6 +209,7 @@ tree that you could have written (less conveniently) in JSON. you could use `${user.home}`. - substitutions normally cause an error if unresolved, but there is a syntax `${?a.b}` to permit them to be missing. + - `+=` syntax to append elements to arrays, `path += "/bin"` ### Examples of HOCON @@ -358,6 +359,13 @@ Arrays can be concatenated as well: path : [ "/bin" ] path : ${path} [ "/usr/bin" ] +There is a shorthand for appending to arrays: + + // equivalent to: path = ${?path} [ "/usr/bin" ] + path += "/usr/bin" + +To prepend or insert into an array, there is no shorthand. + When objects are "concatenated," they are merged, so object concatenation is just a shorthand for defining the same object twice. The long way (mentioned earlier) is: diff --git a/config/src/main/java/com/typesafe/config/impl/Parser.java b/config/src/main/java/com/typesafe/config/impl/Parser.java index f56938b5..fd7ff232 100644 --- a/config/src/main/java/com/typesafe/config/impl/Parser.java +++ b/config/src/main/java/com/typesafe/config/impl/Parser.java @@ -321,7 +321,6 @@ final class Parser { return new ConfigException.Parse(lineOrigin(), message, cause); } - private String previousFieldName(Path lastPath) { if (lastPath != null) { return lastPath.render(); @@ -331,6 +330,18 @@ final class Parser { return pathStack.peek().render(); } + private Path fullCurrentPath() { + Path full = null; + // pathStack has top of stack at front + for (Path p : pathStack) { + if (full == null) + full = p; + else + full = full.prepend(p); + } + return full; + } + private String previousFieldName() { return previousFieldName(null); } @@ -517,7 +528,7 @@ final class Parser { if (flavor == ConfigSyntax.JSON) { return t == Tokens.COLON; } else { - return t == Tokens.COLON || t == Tokens.EQUALS; + return t == Tokens.COLON || t == Tokens.EQUALS || t == Tokens.PLUS_EQUALS; } } @@ -579,6 +590,17 @@ final class Parser { newValue = parseValue(valueToken.prepend(keyToken.comments)); + if (afterKey.token == Tokens.PLUS_EQUALS) { + List concat = new ArrayList(2); + AbstractConfigValue previousRef = new ConfigReference(newValue.origin(), + new SubstitutionExpression(fullCurrentPath(), true /* optional */)); + AbstractConfigValue list = new SimpleConfigList(newValue.origin(), + Collections.singletonList(newValue)); + concat.add(previousRef); + concat.add(list); + newValue = ConfigConcatenation.concatenate(concat); + } + lastPath = pathStack.pop(); if (insideEquals) { equalsCount -= 1; diff --git a/config/src/main/java/com/typesafe/config/impl/TokenType.java b/config/src/main/java/com/typesafe/config/impl/TokenType.java index 7853c094..7202a9b3 100644 --- a/config/src/main/java/com/typesafe/config/impl/TokenType.java +++ b/config/src/main/java/com/typesafe/config/impl/TokenType.java @@ -18,5 +18,6 @@ enum TokenType { UNQUOTED_TEXT, SUBSTITUTION, PROBLEM, - COMMENT; + COMMENT, + PLUS_EQUALS; } diff --git a/config/src/main/java/com/typesafe/config/impl/Tokenizer.java b/config/src/main/java/com/typesafe/config/impl/Tokenizer.java index 2fcee8e6..4d2f397f 100644 --- a/config/src/main/java/com/typesafe/config/impl/Tokenizer.java +++ b/config/src/main/java/com/typesafe/config/impl/Tokenizer.java @@ -136,8 +136,9 @@ final class Tokenizer { // this should ONLY be called from nextCharSkippingComments - // or when inside a quoted string, everything else should - // use nextCharSkippingComments(). + // or when inside a quoted string, or when parsing a sequence + // like ${ or +=, everything else should use + // nextCharSkippingComments(). private int nextCharRaw() { if (buffer.isEmpty()) { try { @@ -438,6 +439,16 @@ final class Tokenizer { return Tokens.newString(lineOrigin, sb.toString()); } + private Token pullPlusEquals() throws ProblemException { + // the initial '+' has already been consumed + int c = nextCharRaw(); + if (c != '=') { + throw problem(asString(c), "'+' not followed by =, '" + asString(c) + + "' not allowed after '+'", true /* suggestQuotes */); + } + return Tokens.PLUS_EQUALS; + } + private Token pullSubstitution() throws ProblemException { // the initial '$' has already been consumed ConfigOrigin origin = lineOrigin; @@ -525,6 +536,9 @@ final class Tokenizer { case ']': t = Tokens.CLOSE_SQUARE; break; + case '+': + t = pullPlusEquals(); + break; } if (t == null) { diff --git a/config/src/main/java/com/typesafe/config/impl/Tokens.java b/config/src/main/java/com/typesafe/config/impl/Tokens.java index 83bec62a..2778bbf4 100644 --- a/config/src/main/java/com/typesafe/config/impl/Tokens.java +++ b/config/src/main/java/com/typesafe/config/impl/Tokens.java @@ -357,6 +357,7 @@ final class Tokens { final static Token CLOSE_CURLY = Token.newWithoutOrigin(TokenType.CLOSE_CURLY, "'}'"); final static Token OPEN_SQUARE = Token.newWithoutOrigin(TokenType.OPEN_SQUARE, "'['"); final static Token CLOSE_SQUARE = Token.newWithoutOrigin(TokenType.CLOSE_SQUARE, "']'"); + final static Token PLUS_EQUALS = Token.newWithoutOrigin(TokenType.PLUS_EQUALS, "'+='"); static Token newLine(ConfigOrigin origin) { return new Line(origin); diff --git a/config/src/test/scala/com/typesafe/config/impl/ConcatenationTest.scala b/config/src/test/scala/com/typesafe/config/impl/ConcatenationTest.scala index 0665f294..1e2bd03e 100644 --- a/config/src/test/scala/com/typesafe/config/impl/ConcatenationTest.scala +++ b/config/src/test/scala/com/typesafe/config/impl/ConcatenationTest.scala @@ -245,4 +245,79 @@ class ConcatenationTest extends TestUtils { } assertTrue("wrong exception: " + e.getMessage, e.getMessage.contains("expecting a close") && e.getMessage.contains("'['")) } + + @Test + def emptyArrayPlusEquals() { + val conf = parseConfig(""" a = [], a += 2 """).resolve() + assertEquals(Seq(2), conf.getIntList("a").asScala.toList) + } + + @Test + def missingArrayPlusEquals() { + val conf = parseConfig(""" a += 2 """).resolve() + assertEquals(Seq(2), conf.getIntList("a").asScala.toList) + } + + @Test + def shortArrayPlusEquals() { + val conf = parseConfig(""" a = [1], a += 2 """).resolve() + assertEquals(Seq(1, 2), conf.getIntList("a").asScala.toList) + } + + @Test + def numberPlusEquals() { + val e = intercept[ConfigException.WrongType] { + val conf = parseConfig(""" a = 10, a += 2 """).resolve() + } + assertTrue("wrong exception: " + e.getMessage, + e.getMessage.contains("Cannot concatenate") && + e.getMessage.contains("10") && + e.getMessage.contains("[2]")) + } + + @Test + def stringPlusEquals() { + val e = intercept[ConfigException.WrongType] { + parseConfig(""" a = abc, a += 2 """).resolve() + } + assertTrue("wrong exception: " + e.getMessage, + e.getMessage.contains("Cannot concatenate") && + e.getMessage.contains("abc") && + e.getMessage.contains("[2]")) + } + + @Test + def objectPlusEquals() { + val e = intercept[ConfigException.WrongType] { + parseConfig(""" a = { x : y }, a += 2 """).resolve() + } + assertTrue("wrong exception: " + e.getMessage, + e.getMessage.contains("Cannot concatenate") && + e.getMessage.contains("\"x\" : \"y\"") && + e.getMessage.contains("[2]")) + } + + @Test + def plusEqualsNestedPath() { + val conf = parseConfig(""" a.b.c = [1], a.b.c += 2 """).resolve() + assertEquals(Seq(1, 2), conf.getIntList("a.b.c").asScala.toList) + } + + @Test + def plusEqualsNestedObjects() { + val conf = parseConfig(""" a : { b : { c : [1] } }, a : { b : { c += 2 } }""").resolve() + assertEquals(Seq(1, 2), conf.getIntList("a.b.c").asScala.toList) + } + + @Test + def plusEqualsSingleNestedObject() { + val conf = parseConfig(""" a : { b : { c : [1], c += 2 } }""").resolve() + assertEquals(Seq(1, 2), conf.getIntList("a.b.c").asScala.toList) + } + + @Test + def substitutionPlusEqualsSubstitution() { + val conf = parseConfig(""" a = ${x}, a += ${y}, x = [1], y = 2 """).resolve() + assertEquals(Seq(1, 2), conf.getIntList("a").asScala.toList) + } } diff --git a/config/src/test/scala/com/typesafe/config/impl/TestUtils.scala b/config/src/test/scala/com/typesafe/config/impl/TestUtils.scala index 8ad84f23..86e85ca3 100644 --- a/config/src/test/scala/com/typesafe/config/impl/TestUtils.scala +++ b/config/src/test/scala/com/typesafe/config/impl/TestUtils.scala @@ -279,6 +279,10 @@ abstract trait TestUtils { "[ * ]", "[ & ]", "[ \\ ]", + "+=", + "[ += ]", + "+= 10", + "10 +=", ParseTest(true, "[ \"foo\nbar\" ]"), // unescaped newline in quoted string, lift doesn't care "[ # comment ]", "${ #comment }", @@ -399,7 +403,11 @@ abstract trait TestUtils { ParseTest(false, true, "[${\"foo.bar\" }]"), // substitution with trailing spaces and quoted """[ ${"foo""bar"} ]""", // multiple strings in substitution """[ ${foo "bar" baz} ]""", // multiple strings and whitespace in substitution - "[${true}]") // substitution with unquoted true token + "[${true}]", // substitution with unquoted true token + "a = [], a += b", // += operator with previous init + "{ a = [], a += 10 }", // += in braces object with previous init + "a += b", // += operator without previous init + "{ a += 10 }") // += in braces object without previous init protected val invalidJson = validConfInvalidJson ++ invalidJsonInvalidConf; diff --git a/config/src/test/scala/com/typesafe/config/impl/TokenizerTest.scala b/config/src/test/scala/com/typesafe/config/impl/TokenizerTest.scala index d679e9c7..ba108efa 100644 --- a/config/src/test/scala/com/typesafe/config/impl/TokenizerTest.scala +++ b/config/src/test/scala/com/typesafe/config/impl/TokenizerTest.scala @@ -33,38 +33,38 @@ class TokenizerTest extends TestUtils { // but spec is unclear to me when spaces are required, and banning them // is actually extra work). val expected = List(Tokens.START, Tokens.COMMA, Tokens.COLON, Tokens.EQUALS, Tokens.CLOSE_CURLY, - Tokens.OPEN_CURLY, Tokens.CLOSE_SQUARE, Tokens.OPEN_SQUARE, tokenString("foo"), + Tokens.OPEN_CURLY, Tokens.CLOSE_SQUARE, Tokens.OPEN_SQUARE, Tokens.PLUS_EQUALS, tokenString("foo"), tokenTrue, tokenDouble(3.14), tokenFalse, tokenLong(42), tokenNull, tokenSubstitution(tokenUnquoted("a.b")), tokenOptionalSubstitution(tokenUnquoted("x.y")), tokenKeySubstitution("c.d"), tokenLine(1), Tokens.END) - assertEquals(expected, tokenizeAsList(""",:=}{]["foo"true3.14false42null${a.b}${?x.y}${"c.d"}""" + "\n")) + assertEquals(expected, tokenizeAsList(""",:=}{][+="foo"true3.14false42null${a.b}${?x.y}${"c.d"}""" + "\n")) } @Test def tokenizeAllTypesWithSingleSpaces() { val expected = List(Tokens.START, Tokens.COMMA, Tokens.COLON, Tokens.EQUALS, Tokens.CLOSE_CURLY, - Tokens.OPEN_CURLY, Tokens.CLOSE_SQUARE, Tokens.OPEN_SQUARE, tokenString("foo"), + Tokens.OPEN_CURLY, Tokens.CLOSE_SQUARE, Tokens.OPEN_SQUARE, Tokens.PLUS_EQUALS, tokenString("foo"), tokenUnquoted(" "), tokenLong(42), tokenUnquoted(" "), tokenTrue, tokenUnquoted(" "), tokenDouble(3.14), tokenUnquoted(" "), tokenFalse, tokenUnquoted(" "), tokenNull, tokenUnquoted(" "), tokenSubstitution(tokenUnquoted("a.b")), tokenUnquoted(" "), tokenOptionalSubstitution(tokenUnquoted("x.y")), tokenUnquoted(" "), tokenKeySubstitution("c.d"), tokenLine(1), Tokens.END) - assertEquals(expected, tokenizeAsList(""" , : = } { ] [ "foo" 42 true 3.14 false null ${a.b} ${?x.y} ${"c.d"} """ + "\n ")) + assertEquals(expected, tokenizeAsList(""" , : = } { ] [ += "foo" 42 true 3.14 false null ${a.b} ${?x.y} ${"c.d"} """ + "\n ")) } @Test def tokenizeAllTypesWithMultipleSpaces() { val expected = List(Tokens.START, Tokens.COMMA, Tokens.COLON, Tokens.EQUALS, Tokens.CLOSE_CURLY, - Tokens.OPEN_CURLY, Tokens.CLOSE_SQUARE, Tokens.OPEN_SQUARE, tokenString("foo"), + Tokens.OPEN_CURLY, Tokens.CLOSE_SQUARE, Tokens.OPEN_SQUARE, Tokens.PLUS_EQUALS, tokenString("foo"), tokenUnquoted(" "), tokenLong(42), tokenUnquoted(" "), tokenTrue, tokenUnquoted(" "), tokenDouble(3.14), tokenUnquoted(" "), tokenFalse, tokenUnquoted(" "), tokenNull, tokenUnquoted(" "), tokenSubstitution(tokenUnquoted("a.b")), tokenUnquoted(" "), tokenOptionalSubstitution(tokenUnquoted("x.y")), tokenUnquoted(" "), tokenKeySubstitution("c.d"), tokenLine(1), Tokens.END) - assertEquals(expected, tokenizeAsList(""" , : = } { ] [ "foo" 42 true 3.14 false null ${a.b} ${?x.y} ${"c.d"} """ + "\n ")) + assertEquals(expected, tokenizeAsList(""" , : = } { ] [ += "foo" 42 true 3.14 false null ${a.b} ${?x.y} ${"c.d"} """ + "\n ")) } @Test @@ -228,7 +228,10 @@ class TokenizerTest extends TestUtils { assertEquals(Tokens.END, tokenized(2)) val problem = tokenized(1) assertTrue("reserved char is a problem", Tokens.isProblem(problem)) - assertEquals("'" + invalid + "'", problem.toString()) + if (invalid == '+') + assertEquals("'end of file'", problem.toString()) + else + assertEquals("'" + invalid + "'", problem.toString()) } } }