Support += convenience syntax

a += b

transforms to

    a += ${?a} [b]
This commit is contained in:
Havoc Pennington 2012-04-06 10:58:00 -04:00
parent 27096e262f
commit 69e6164b30
9 changed files with 164 additions and 13 deletions

View File

@ -633,6 +633,25 @@ resolving an optional substitution (i.e. the `${?foo}` syntax).
If `${?foo}` refers to itself then it's as if it referred to a
nonexistent value.
#### The `+=` field separator
Fields may have `+=` as a separator rather than `:` or `=`. A
field with `+=` transforms into a self-referential array
concatenation, like this:
a += b
becomes:
a = ${?a} [b]
`+=` appends an element to a previous array. If the previous value
was not an array, an error will result just as it would in the
long form `a = ${?a} [b]`. Note that the previous value is
optional (`${?a}` not `${a}`), which allows `a += b` to be the
first mention of `a` in the file (it is not necessary to have `a =
[]` first).
#### Examples of Self-Referential Substitutions
In isolation (with no merges involved), a self-referential field

View File

@ -209,6 +209,7 @@ tree that you could have written (less conveniently) in JSON.
you could use `${user.home}`.
- substitutions normally cause an error if unresolved, but
there is a syntax `${?a.b}` to permit them to be missing.
- `+=` syntax to append elements to arrays, `path += "/bin"`
### Examples of HOCON
@ -358,6 +359,13 @@ Arrays can be concatenated as well:
path : [ "/bin" ]
path : ${path} [ "/usr/bin" ]
There is a shorthand for appending to arrays:
// equivalent to: path = ${?path} [ "/usr/bin" ]
path += "/usr/bin"
To prepend or insert into an array, there is no shorthand.
When objects are "concatenated," they are merged, so object
concatenation is just a shorthand for defining the same object
twice. The long way (mentioned earlier) is:

View File

@ -321,7 +321,6 @@ final class Parser {
return new ConfigException.Parse(lineOrigin(), message, cause);
}
private String previousFieldName(Path lastPath) {
if (lastPath != null) {
return lastPath.render();
@ -331,6 +330,18 @@ final class Parser {
return pathStack.peek().render();
}
private Path fullCurrentPath() {
Path full = null;
// pathStack has top of stack at front
for (Path p : pathStack) {
if (full == null)
full = p;
else
full = full.prepend(p);
}
return full;
}
private String previousFieldName() {
return previousFieldName(null);
}
@ -517,7 +528,7 @@ final class Parser {
if (flavor == ConfigSyntax.JSON) {
return t == Tokens.COLON;
} else {
return t == Tokens.COLON || t == Tokens.EQUALS;
return t == Tokens.COLON || t == Tokens.EQUALS || t == Tokens.PLUS_EQUALS;
}
}
@ -579,6 +590,17 @@ final class Parser {
newValue = parseValue(valueToken.prepend(keyToken.comments));
if (afterKey.token == Tokens.PLUS_EQUALS) {
List<AbstractConfigValue> concat = new ArrayList<AbstractConfigValue>(2);
AbstractConfigValue previousRef = new ConfigReference(newValue.origin(),
new SubstitutionExpression(fullCurrentPath(), true /* optional */));
AbstractConfigValue list = new SimpleConfigList(newValue.origin(),
Collections.singletonList(newValue));
concat.add(previousRef);
concat.add(list);
newValue = ConfigConcatenation.concatenate(concat);
}
lastPath = pathStack.pop();
if (insideEquals) {
equalsCount -= 1;

View File

@ -18,5 +18,6 @@ enum TokenType {
UNQUOTED_TEXT,
SUBSTITUTION,
PROBLEM,
COMMENT;
COMMENT,
PLUS_EQUALS;
}

View File

@ -136,8 +136,9 @@ final class Tokenizer {
// this should ONLY be called from nextCharSkippingComments
// or when inside a quoted string, everything else should
// use nextCharSkippingComments().
// or when inside a quoted string, or when parsing a sequence
// like ${ or +=, everything else should use
// nextCharSkippingComments().
private int nextCharRaw() {
if (buffer.isEmpty()) {
try {
@ -438,6 +439,16 @@ final class Tokenizer {
return Tokens.newString(lineOrigin, sb.toString());
}
private Token pullPlusEquals() throws ProblemException {
// the initial '+' has already been consumed
int c = nextCharRaw();
if (c != '=') {
throw problem(asString(c), "'+' not followed by =, '" + asString(c)
+ "' not allowed after '+'", true /* suggestQuotes */);
}
return Tokens.PLUS_EQUALS;
}
private Token pullSubstitution() throws ProblemException {
// the initial '$' has already been consumed
ConfigOrigin origin = lineOrigin;
@ -525,6 +536,9 @@ final class Tokenizer {
case ']':
t = Tokens.CLOSE_SQUARE;
break;
case '+':
t = pullPlusEquals();
break;
}
if (t == null) {

View File

@ -357,6 +357,7 @@ final class Tokens {
final static Token CLOSE_CURLY = Token.newWithoutOrigin(TokenType.CLOSE_CURLY, "'}'");
final static Token OPEN_SQUARE = Token.newWithoutOrigin(TokenType.OPEN_SQUARE, "'['");
final static Token CLOSE_SQUARE = Token.newWithoutOrigin(TokenType.CLOSE_SQUARE, "']'");
final static Token PLUS_EQUALS = Token.newWithoutOrigin(TokenType.PLUS_EQUALS, "'+='");
static Token newLine(ConfigOrigin origin) {
return new Line(origin);

View File

@ -245,4 +245,79 @@ class ConcatenationTest extends TestUtils {
}
assertTrue("wrong exception: " + e.getMessage, e.getMessage.contains("expecting a close") && e.getMessage.contains("'['"))
}
@Test
def emptyArrayPlusEquals() {
val conf = parseConfig(""" a = [], a += 2 """).resolve()
assertEquals(Seq(2), conf.getIntList("a").asScala.toList)
}
@Test
def missingArrayPlusEquals() {
val conf = parseConfig(""" a += 2 """).resolve()
assertEquals(Seq(2), conf.getIntList("a").asScala.toList)
}
@Test
def shortArrayPlusEquals() {
val conf = parseConfig(""" a = [1], a += 2 """).resolve()
assertEquals(Seq(1, 2), conf.getIntList("a").asScala.toList)
}
@Test
def numberPlusEquals() {
val e = intercept[ConfigException.WrongType] {
val conf = parseConfig(""" a = 10, a += 2 """).resolve()
}
assertTrue("wrong exception: " + e.getMessage,
e.getMessage.contains("Cannot concatenate") &&
e.getMessage.contains("10") &&
e.getMessage.contains("[2]"))
}
@Test
def stringPlusEquals() {
val e = intercept[ConfigException.WrongType] {
parseConfig(""" a = abc, a += 2 """).resolve()
}
assertTrue("wrong exception: " + e.getMessage,
e.getMessage.contains("Cannot concatenate") &&
e.getMessage.contains("abc") &&
e.getMessage.contains("[2]"))
}
@Test
def objectPlusEquals() {
val e = intercept[ConfigException.WrongType] {
parseConfig(""" a = { x : y }, a += 2 """).resolve()
}
assertTrue("wrong exception: " + e.getMessage,
e.getMessage.contains("Cannot concatenate") &&
e.getMessage.contains("\"x\" : \"y\"") &&
e.getMessage.contains("[2]"))
}
@Test
def plusEqualsNestedPath() {
val conf = parseConfig(""" a.b.c = [1], a.b.c += 2 """).resolve()
assertEquals(Seq(1, 2), conf.getIntList("a.b.c").asScala.toList)
}
@Test
def plusEqualsNestedObjects() {
val conf = parseConfig(""" a : { b : { c : [1] } }, a : { b : { c += 2 } }""").resolve()
assertEquals(Seq(1, 2), conf.getIntList("a.b.c").asScala.toList)
}
@Test
def plusEqualsSingleNestedObject() {
val conf = parseConfig(""" a : { b : { c : [1], c += 2 } }""").resolve()
assertEquals(Seq(1, 2), conf.getIntList("a.b.c").asScala.toList)
}
@Test
def substitutionPlusEqualsSubstitution() {
val conf = parseConfig(""" a = ${x}, a += ${y}, x = [1], y = 2 """).resolve()
assertEquals(Seq(1, 2), conf.getIntList("a").asScala.toList)
}
}

View File

@ -279,6 +279,10 @@ abstract trait TestUtils {
"[ * ]",
"[ & ]",
"[ \\ ]",
"+=",
"[ += ]",
"+= 10",
"10 +=",
ParseTest(true, "[ \"foo\nbar\" ]"), // unescaped newline in quoted string, lift doesn't care
"[ # comment ]",
"${ #comment }",
@ -399,7 +403,11 @@ abstract trait TestUtils {
ParseTest(false, true, "[${\"foo.bar\" }]"), // substitution with trailing spaces and quoted
"""[ ${"foo""bar"} ]""", // multiple strings in substitution
"""[ ${foo "bar" baz} ]""", // multiple strings and whitespace in substitution
"[${true}]") // substitution with unquoted true token
"[${true}]", // substitution with unquoted true token
"a = [], a += b", // += operator with previous init
"{ a = [], a += 10 }", // += in braces object with previous init
"a += b", // += operator without previous init
"{ a += 10 }") // += in braces object without previous init
protected val invalidJson = validConfInvalidJson ++ invalidJsonInvalidConf;

View File

@ -33,38 +33,38 @@ class TokenizerTest extends TestUtils {
// but spec is unclear to me when spaces are required, and banning them
// is actually extra work).
val expected = List(Tokens.START, Tokens.COMMA, Tokens.COLON, Tokens.EQUALS, Tokens.CLOSE_CURLY,
Tokens.OPEN_CURLY, Tokens.CLOSE_SQUARE, Tokens.OPEN_SQUARE, tokenString("foo"),
Tokens.OPEN_CURLY, Tokens.CLOSE_SQUARE, Tokens.OPEN_SQUARE, Tokens.PLUS_EQUALS, tokenString("foo"),
tokenTrue, tokenDouble(3.14), tokenFalse,
tokenLong(42), tokenNull, tokenSubstitution(tokenUnquoted("a.b")),
tokenOptionalSubstitution(tokenUnquoted("x.y")),
tokenKeySubstitution("c.d"), tokenLine(1), Tokens.END)
assertEquals(expected, tokenizeAsList(""",:=}{]["foo"true3.14false42null${a.b}${?x.y}${"c.d"}""" + "\n"))
assertEquals(expected, tokenizeAsList(""",:=}{][+="foo"true3.14false42null${a.b}${?x.y}${"c.d"}""" + "\n"))
}
@Test
def tokenizeAllTypesWithSingleSpaces() {
val expected = List(Tokens.START, Tokens.COMMA, Tokens.COLON, Tokens.EQUALS, Tokens.CLOSE_CURLY,
Tokens.OPEN_CURLY, Tokens.CLOSE_SQUARE, Tokens.OPEN_SQUARE, tokenString("foo"),
Tokens.OPEN_CURLY, Tokens.CLOSE_SQUARE, Tokens.OPEN_SQUARE, Tokens.PLUS_EQUALS, tokenString("foo"),
tokenUnquoted(" "), tokenLong(42), tokenUnquoted(" "), tokenTrue, tokenUnquoted(" "),
tokenDouble(3.14), tokenUnquoted(" "), tokenFalse, tokenUnquoted(" "), tokenNull,
tokenUnquoted(" "), tokenSubstitution(tokenUnquoted("a.b")), tokenUnquoted(" "),
tokenOptionalSubstitution(tokenUnquoted("x.y")), tokenUnquoted(" "),
tokenKeySubstitution("c.d"),
tokenLine(1), Tokens.END)
assertEquals(expected, tokenizeAsList(""" , : = } { ] [ "foo" 42 true 3.14 false null ${a.b} ${?x.y} ${"c.d"} """ + "\n "))
assertEquals(expected, tokenizeAsList(""" , : = } { ] [ += "foo" 42 true 3.14 false null ${a.b} ${?x.y} ${"c.d"} """ + "\n "))
}
@Test
def tokenizeAllTypesWithMultipleSpaces() {
val expected = List(Tokens.START, Tokens.COMMA, Tokens.COLON, Tokens.EQUALS, Tokens.CLOSE_CURLY,
Tokens.OPEN_CURLY, Tokens.CLOSE_SQUARE, Tokens.OPEN_SQUARE, tokenString("foo"),
Tokens.OPEN_CURLY, Tokens.CLOSE_SQUARE, Tokens.OPEN_SQUARE, Tokens.PLUS_EQUALS, tokenString("foo"),
tokenUnquoted(" "), tokenLong(42), tokenUnquoted(" "), tokenTrue, tokenUnquoted(" "),
tokenDouble(3.14), tokenUnquoted(" "), tokenFalse, tokenUnquoted(" "), tokenNull,
tokenUnquoted(" "), tokenSubstitution(tokenUnquoted("a.b")), tokenUnquoted(" "),
tokenOptionalSubstitution(tokenUnquoted("x.y")), tokenUnquoted(" "),
tokenKeySubstitution("c.d"),
tokenLine(1), Tokens.END)
assertEquals(expected, tokenizeAsList(""" , : = } { ] [ "foo" 42 true 3.14 false null ${a.b} ${?x.y} ${"c.d"} """ + "\n "))
assertEquals(expected, tokenizeAsList(""" , : = } { ] [ += "foo" 42 true 3.14 false null ${a.b} ${?x.y} ${"c.d"} """ + "\n "))
}
@Test
@ -228,7 +228,10 @@ class TokenizerTest extends TestUtils {
assertEquals(Tokens.END, tokenized(2))
val problem = tokenized(1)
assertTrue("reserved char is a problem", Tokens.isProblem(problem))
assertEquals("'" + invalid + "'", problem.toString())
if (invalid == '+')
assertEquals("'end of file'", problem.toString())
else
assertEquals("'" + invalid + "'", problem.toString())
}
}
}