Support += convenience syntax

a += b

transforms to

    a += ${?a} [b]
This commit is contained in:
Havoc Pennington 2012-04-06 10:58:00 -04:00
parent 27096e262f
commit 69e6164b30
9 changed files with 164 additions and 13 deletions

View File

@ -633,6 +633,25 @@ resolving an optional substitution (i.e. the `${?foo}` syntax).
If `${?foo}` refers to itself then it's as if it referred to a If `${?foo}` refers to itself then it's as if it referred to a
nonexistent value. nonexistent value.
#### The `+=` field separator
Fields may have `+=` as a separator rather than `:` or `=`. A
field with `+=` transforms into a self-referential array
concatenation, like this:
a += b
becomes:
a = ${?a} [b]
`+=` appends an element to a previous array. If the previous value
was not an array, an error will result just as it would in the
long form `a = ${?a} [b]`. Note that the previous value is
optional (`${?a}` not `${a}`), which allows `a += b` to be the
first mention of `a` in the file (it is not necessary to have `a =
[]` first).
#### Examples of Self-Referential Substitutions #### Examples of Self-Referential Substitutions
In isolation (with no merges involved), a self-referential field In isolation (with no merges involved), a self-referential field

View File

@ -209,6 +209,7 @@ tree that you could have written (less conveniently) in JSON.
you could use `${user.home}`. you could use `${user.home}`.
- substitutions normally cause an error if unresolved, but - substitutions normally cause an error if unresolved, but
there is a syntax `${?a.b}` to permit them to be missing. there is a syntax `${?a.b}` to permit them to be missing.
- `+=` syntax to append elements to arrays, `path += "/bin"`
### Examples of HOCON ### Examples of HOCON
@ -358,6 +359,13 @@ Arrays can be concatenated as well:
path : [ "/bin" ] path : [ "/bin" ]
path : ${path} [ "/usr/bin" ] path : ${path} [ "/usr/bin" ]
There is a shorthand for appending to arrays:
// equivalent to: path = ${?path} [ "/usr/bin" ]
path += "/usr/bin"
To prepend or insert into an array, there is no shorthand.
When objects are "concatenated," they are merged, so object When objects are "concatenated," they are merged, so object
concatenation is just a shorthand for defining the same object concatenation is just a shorthand for defining the same object
twice. The long way (mentioned earlier) is: twice. The long way (mentioned earlier) is:

View File

@ -321,7 +321,6 @@ final class Parser {
return new ConfigException.Parse(lineOrigin(), message, cause); return new ConfigException.Parse(lineOrigin(), message, cause);
} }
private String previousFieldName(Path lastPath) { private String previousFieldName(Path lastPath) {
if (lastPath != null) { if (lastPath != null) {
return lastPath.render(); return lastPath.render();
@ -331,6 +330,18 @@ final class Parser {
return pathStack.peek().render(); return pathStack.peek().render();
} }
private Path fullCurrentPath() {
Path full = null;
// pathStack has top of stack at front
for (Path p : pathStack) {
if (full == null)
full = p;
else
full = full.prepend(p);
}
return full;
}
private String previousFieldName() { private String previousFieldName() {
return previousFieldName(null); return previousFieldName(null);
} }
@ -517,7 +528,7 @@ final class Parser {
if (flavor == ConfigSyntax.JSON) { if (flavor == ConfigSyntax.JSON) {
return t == Tokens.COLON; return t == Tokens.COLON;
} else { } else {
return t == Tokens.COLON || t == Tokens.EQUALS; return t == Tokens.COLON || t == Tokens.EQUALS || t == Tokens.PLUS_EQUALS;
} }
} }
@ -579,6 +590,17 @@ final class Parser {
newValue = parseValue(valueToken.prepend(keyToken.comments)); newValue = parseValue(valueToken.prepend(keyToken.comments));
if (afterKey.token == Tokens.PLUS_EQUALS) {
List<AbstractConfigValue> concat = new ArrayList<AbstractConfigValue>(2);
AbstractConfigValue previousRef = new ConfigReference(newValue.origin(),
new SubstitutionExpression(fullCurrentPath(), true /* optional */));
AbstractConfigValue list = new SimpleConfigList(newValue.origin(),
Collections.singletonList(newValue));
concat.add(previousRef);
concat.add(list);
newValue = ConfigConcatenation.concatenate(concat);
}
lastPath = pathStack.pop(); lastPath = pathStack.pop();
if (insideEquals) { if (insideEquals) {
equalsCount -= 1; equalsCount -= 1;

View File

@ -18,5 +18,6 @@ enum TokenType {
UNQUOTED_TEXT, UNQUOTED_TEXT,
SUBSTITUTION, SUBSTITUTION,
PROBLEM, PROBLEM,
COMMENT; COMMENT,
PLUS_EQUALS;
} }

View File

@ -136,8 +136,9 @@ final class Tokenizer {
// this should ONLY be called from nextCharSkippingComments // this should ONLY be called from nextCharSkippingComments
// or when inside a quoted string, everything else should // or when inside a quoted string, or when parsing a sequence
// use nextCharSkippingComments(). // like ${ or +=, everything else should use
// nextCharSkippingComments().
private int nextCharRaw() { private int nextCharRaw() {
if (buffer.isEmpty()) { if (buffer.isEmpty()) {
try { try {
@ -438,6 +439,16 @@ final class Tokenizer {
return Tokens.newString(lineOrigin, sb.toString()); return Tokens.newString(lineOrigin, sb.toString());
} }
private Token pullPlusEquals() throws ProblemException {
// the initial '+' has already been consumed
int c = nextCharRaw();
if (c != '=') {
throw problem(asString(c), "'+' not followed by =, '" + asString(c)
+ "' not allowed after '+'", true /* suggestQuotes */);
}
return Tokens.PLUS_EQUALS;
}
private Token pullSubstitution() throws ProblemException { private Token pullSubstitution() throws ProblemException {
// the initial '$' has already been consumed // the initial '$' has already been consumed
ConfigOrigin origin = lineOrigin; ConfigOrigin origin = lineOrigin;
@ -525,6 +536,9 @@ final class Tokenizer {
case ']': case ']':
t = Tokens.CLOSE_SQUARE; t = Tokens.CLOSE_SQUARE;
break; break;
case '+':
t = pullPlusEquals();
break;
} }
if (t == null) { if (t == null) {

View File

@ -357,6 +357,7 @@ final class Tokens {
final static Token CLOSE_CURLY = Token.newWithoutOrigin(TokenType.CLOSE_CURLY, "'}'"); final static Token CLOSE_CURLY = Token.newWithoutOrigin(TokenType.CLOSE_CURLY, "'}'");
final static Token OPEN_SQUARE = Token.newWithoutOrigin(TokenType.OPEN_SQUARE, "'['"); final static Token OPEN_SQUARE = Token.newWithoutOrigin(TokenType.OPEN_SQUARE, "'['");
final static Token CLOSE_SQUARE = Token.newWithoutOrigin(TokenType.CLOSE_SQUARE, "']'"); final static Token CLOSE_SQUARE = Token.newWithoutOrigin(TokenType.CLOSE_SQUARE, "']'");
final static Token PLUS_EQUALS = Token.newWithoutOrigin(TokenType.PLUS_EQUALS, "'+='");
static Token newLine(ConfigOrigin origin) { static Token newLine(ConfigOrigin origin) {
return new Line(origin); return new Line(origin);

View File

@ -245,4 +245,79 @@ class ConcatenationTest extends TestUtils {
} }
assertTrue("wrong exception: " + e.getMessage, e.getMessage.contains("expecting a close") && e.getMessage.contains("'['")) assertTrue("wrong exception: " + e.getMessage, e.getMessage.contains("expecting a close") && e.getMessage.contains("'['"))
} }
@Test
def emptyArrayPlusEquals() {
val conf = parseConfig(""" a = [], a += 2 """).resolve()
assertEquals(Seq(2), conf.getIntList("a").asScala.toList)
}
@Test
def missingArrayPlusEquals() {
val conf = parseConfig(""" a += 2 """).resolve()
assertEquals(Seq(2), conf.getIntList("a").asScala.toList)
}
@Test
def shortArrayPlusEquals() {
val conf = parseConfig(""" a = [1], a += 2 """).resolve()
assertEquals(Seq(1, 2), conf.getIntList("a").asScala.toList)
}
@Test
def numberPlusEquals() {
val e = intercept[ConfigException.WrongType] {
val conf = parseConfig(""" a = 10, a += 2 """).resolve()
}
assertTrue("wrong exception: " + e.getMessage,
e.getMessage.contains("Cannot concatenate") &&
e.getMessage.contains("10") &&
e.getMessage.contains("[2]"))
}
@Test
def stringPlusEquals() {
val e = intercept[ConfigException.WrongType] {
parseConfig(""" a = abc, a += 2 """).resolve()
}
assertTrue("wrong exception: " + e.getMessage,
e.getMessage.contains("Cannot concatenate") &&
e.getMessage.contains("abc") &&
e.getMessage.contains("[2]"))
}
@Test
def objectPlusEquals() {
val e = intercept[ConfigException.WrongType] {
parseConfig(""" a = { x : y }, a += 2 """).resolve()
}
assertTrue("wrong exception: " + e.getMessage,
e.getMessage.contains("Cannot concatenate") &&
e.getMessage.contains("\"x\" : \"y\"") &&
e.getMessage.contains("[2]"))
}
@Test
def plusEqualsNestedPath() {
val conf = parseConfig(""" a.b.c = [1], a.b.c += 2 """).resolve()
assertEquals(Seq(1, 2), conf.getIntList("a.b.c").asScala.toList)
}
@Test
def plusEqualsNestedObjects() {
val conf = parseConfig(""" a : { b : { c : [1] } }, a : { b : { c += 2 } }""").resolve()
assertEquals(Seq(1, 2), conf.getIntList("a.b.c").asScala.toList)
}
@Test
def plusEqualsSingleNestedObject() {
val conf = parseConfig(""" a : { b : { c : [1], c += 2 } }""").resolve()
assertEquals(Seq(1, 2), conf.getIntList("a.b.c").asScala.toList)
}
@Test
def substitutionPlusEqualsSubstitution() {
val conf = parseConfig(""" a = ${x}, a += ${y}, x = [1], y = 2 """).resolve()
assertEquals(Seq(1, 2), conf.getIntList("a").asScala.toList)
}
} }

View File

@ -279,6 +279,10 @@ abstract trait TestUtils {
"[ * ]", "[ * ]",
"[ & ]", "[ & ]",
"[ \\ ]", "[ \\ ]",
"+=",
"[ += ]",
"+= 10",
"10 +=",
ParseTest(true, "[ \"foo\nbar\" ]"), // unescaped newline in quoted string, lift doesn't care ParseTest(true, "[ \"foo\nbar\" ]"), // unescaped newline in quoted string, lift doesn't care
"[ # comment ]", "[ # comment ]",
"${ #comment }", "${ #comment }",
@ -399,7 +403,11 @@ abstract trait TestUtils {
ParseTest(false, true, "[${\"foo.bar\" }]"), // substitution with trailing spaces and quoted ParseTest(false, true, "[${\"foo.bar\" }]"), // substitution with trailing spaces and quoted
"""[ ${"foo""bar"} ]""", // multiple strings in substitution """[ ${"foo""bar"} ]""", // multiple strings in substitution
"""[ ${foo "bar" baz} ]""", // multiple strings and whitespace in substitution """[ ${foo "bar" baz} ]""", // multiple strings and whitespace in substitution
"[${true}]") // substitution with unquoted true token "[${true}]", // substitution with unquoted true token
"a = [], a += b", // += operator with previous init
"{ a = [], a += 10 }", // += in braces object with previous init
"a += b", // += operator without previous init
"{ a += 10 }") // += in braces object without previous init
protected val invalidJson = validConfInvalidJson ++ invalidJsonInvalidConf; protected val invalidJson = validConfInvalidJson ++ invalidJsonInvalidConf;

View File

@ -33,38 +33,38 @@ class TokenizerTest extends TestUtils {
// but spec is unclear to me when spaces are required, and banning them // but spec is unclear to me when spaces are required, and banning them
// is actually extra work). // is actually extra work).
val expected = List(Tokens.START, Tokens.COMMA, Tokens.COLON, Tokens.EQUALS, Tokens.CLOSE_CURLY, val expected = List(Tokens.START, Tokens.COMMA, Tokens.COLON, Tokens.EQUALS, Tokens.CLOSE_CURLY,
Tokens.OPEN_CURLY, Tokens.CLOSE_SQUARE, Tokens.OPEN_SQUARE, tokenString("foo"), Tokens.OPEN_CURLY, Tokens.CLOSE_SQUARE, Tokens.OPEN_SQUARE, Tokens.PLUS_EQUALS, tokenString("foo"),
tokenTrue, tokenDouble(3.14), tokenFalse, tokenTrue, tokenDouble(3.14), tokenFalse,
tokenLong(42), tokenNull, tokenSubstitution(tokenUnquoted("a.b")), tokenLong(42), tokenNull, tokenSubstitution(tokenUnquoted("a.b")),
tokenOptionalSubstitution(tokenUnquoted("x.y")), tokenOptionalSubstitution(tokenUnquoted("x.y")),
tokenKeySubstitution("c.d"), tokenLine(1), Tokens.END) tokenKeySubstitution("c.d"), tokenLine(1), Tokens.END)
assertEquals(expected, tokenizeAsList(""",:=}{]["foo"true3.14false42null${a.b}${?x.y}${"c.d"}""" + "\n")) assertEquals(expected, tokenizeAsList(""",:=}{][+="foo"true3.14false42null${a.b}${?x.y}${"c.d"}""" + "\n"))
} }
@Test @Test
def tokenizeAllTypesWithSingleSpaces() { def tokenizeAllTypesWithSingleSpaces() {
val expected = List(Tokens.START, Tokens.COMMA, Tokens.COLON, Tokens.EQUALS, Tokens.CLOSE_CURLY, val expected = List(Tokens.START, Tokens.COMMA, Tokens.COLON, Tokens.EQUALS, Tokens.CLOSE_CURLY,
Tokens.OPEN_CURLY, Tokens.CLOSE_SQUARE, Tokens.OPEN_SQUARE, tokenString("foo"), Tokens.OPEN_CURLY, Tokens.CLOSE_SQUARE, Tokens.OPEN_SQUARE, Tokens.PLUS_EQUALS, tokenString("foo"),
tokenUnquoted(" "), tokenLong(42), tokenUnquoted(" "), tokenTrue, tokenUnquoted(" "), tokenUnquoted(" "), tokenLong(42), tokenUnquoted(" "), tokenTrue, tokenUnquoted(" "),
tokenDouble(3.14), tokenUnquoted(" "), tokenFalse, tokenUnquoted(" "), tokenNull, tokenDouble(3.14), tokenUnquoted(" "), tokenFalse, tokenUnquoted(" "), tokenNull,
tokenUnquoted(" "), tokenSubstitution(tokenUnquoted("a.b")), tokenUnquoted(" "), tokenUnquoted(" "), tokenSubstitution(tokenUnquoted("a.b")), tokenUnquoted(" "),
tokenOptionalSubstitution(tokenUnquoted("x.y")), tokenUnquoted(" "), tokenOptionalSubstitution(tokenUnquoted("x.y")), tokenUnquoted(" "),
tokenKeySubstitution("c.d"), tokenKeySubstitution("c.d"),
tokenLine(1), Tokens.END) tokenLine(1), Tokens.END)
assertEquals(expected, tokenizeAsList(""" , : = } { ] [ "foo" 42 true 3.14 false null ${a.b} ${?x.y} ${"c.d"} """ + "\n ")) assertEquals(expected, tokenizeAsList(""" , : = } { ] [ += "foo" 42 true 3.14 false null ${a.b} ${?x.y} ${"c.d"} """ + "\n "))
} }
@Test @Test
def tokenizeAllTypesWithMultipleSpaces() { def tokenizeAllTypesWithMultipleSpaces() {
val expected = List(Tokens.START, Tokens.COMMA, Tokens.COLON, Tokens.EQUALS, Tokens.CLOSE_CURLY, val expected = List(Tokens.START, Tokens.COMMA, Tokens.COLON, Tokens.EQUALS, Tokens.CLOSE_CURLY,
Tokens.OPEN_CURLY, Tokens.CLOSE_SQUARE, Tokens.OPEN_SQUARE, tokenString("foo"), Tokens.OPEN_CURLY, Tokens.CLOSE_SQUARE, Tokens.OPEN_SQUARE, Tokens.PLUS_EQUALS, tokenString("foo"),
tokenUnquoted(" "), tokenLong(42), tokenUnquoted(" "), tokenTrue, tokenUnquoted(" "), tokenUnquoted(" "), tokenLong(42), tokenUnquoted(" "), tokenTrue, tokenUnquoted(" "),
tokenDouble(3.14), tokenUnquoted(" "), tokenFalse, tokenUnquoted(" "), tokenNull, tokenDouble(3.14), tokenUnquoted(" "), tokenFalse, tokenUnquoted(" "), tokenNull,
tokenUnquoted(" "), tokenSubstitution(tokenUnquoted("a.b")), tokenUnquoted(" "), tokenUnquoted(" "), tokenSubstitution(tokenUnquoted("a.b")), tokenUnquoted(" "),
tokenOptionalSubstitution(tokenUnquoted("x.y")), tokenUnquoted(" "), tokenOptionalSubstitution(tokenUnquoted("x.y")), tokenUnquoted(" "),
tokenKeySubstitution("c.d"), tokenKeySubstitution("c.d"),
tokenLine(1), Tokens.END) tokenLine(1), Tokens.END)
assertEquals(expected, tokenizeAsList(""" , : = } { ] [ "foo" 42 true 3.14 false null ${a.b} ${?x.y} ${"c.d"} """ + "\n ")) assertEquals(expected, tokenizeAsList(""" , : = } { ] [ += "foo" 42 true 3.14 false null ${a.b} ${?x.y} ${"c.d"} """ + "\n "))
} }
@Test @Test
@ -228,7 +228,10 @@ class TokenizerTest extends TestUtils {
assertEquals(Tokens.END, tokenized(2)) assertEquals(Tokens.END, tokenized(2))
val problem = tokenized(1) val problem = tokenized(1)
assertTrue("reserved char is a problem", Tokens.isProblem(problem)) assertTrue("reserved char is a problem", Tokens.isProblem(problem))
assertEquals("'" + invalid + "'", problem.toString()) if (invalid == '+')
assertEquals("'end of file'", problem.toString())
else
assertEquals("'" + invalid + "'", problem.toString())
} }
} }
} }