allow '=' in addition to ':' to separate key from value

This commit is contained in:
Havoc Pennington 2011-11-12 14:28:01 -05:00
parent 508c5910ad
commit 55595e4c39
8 changed files with 64 additions and 10 deletions

View File

@ -371,6 +371,14 @@ final class Parser {
}
}
private boolean isKeyValueSeparatorToken(Token t) {
if (flavor == SyntaxFlavor.JSON) {
return t == Tokens.COLON;
} else {
return t == Tokens.COLON || t == Tokens.EQUALS;
}
}
private AbstractConfigObject parseObject() {
// invoked just after the OPEN_CURLY
Map<String, AbstractConfigValue> values = new HashMap<String, AbstractConfigValue>();
@ -390,8 +398,8 @@ final class Parser {
} else {
Path path = parseKey(t);
Token afterKey = nextTokenIgnoringNewline();
if (afterKey != Tokens.COLON) {
throw parseError("Key not followed by a colon, followed by token "
if (!isKeyValueSeparatorToken(afterKey)) {
throw parseError("Key may not be followed by token: "
+ afterKey);
}

View File

@ -1,5 +1,5 @@
package com.typesafe.config.impl;
enum TokenType {
START, END, COMMA, COLON, OPEN_CURLY, CLOSE_CURLY, OPEN_SQUARE, CLOSE_SQUARE, VALUE, NEWLINE, UNQUOTED_TEXT, SUBSTITUTION;
START, END, COMMA, EQUALS, COLON, OPEN_CURLY, CLOSE_CURLY, OPEN_SQUARE, CLOSE_SQUARE, VALUE, NEWLINE, UNQUOTED_TEXT, SUBSTITUTION;
}

View File

@ -420,6 +420,9 @@ final class Tokenizer {
case ',':
t = Tokens.COMMA;
break;
case '=':
t = Tokens.EQUALS;
break;
case '{':
t = Tokens.OPEN_CURLY;
break;

View File

@ -237,6 +237,7 @@ final class Tokens {
final static Token START = new Token(TokenType.START);
final static Token END = new Token(TokenType.END);
final static Token COMMA = new Token(TokenType.COMMA);
final static Token EQUALS = new Token(TokenType.EQUALS);
final static Token COLON = new Token(TokenType.COLON);
final static Token OPEN_CURLY = new Token(TokenType.OPEN_CURLY);
final static Token CLOSE_CURLY = new Token(TokenType.CLOSE_CURLY);

View File

@ -0,0 +1,41 @@
{
"ints" = {
"fortyTwo" = 42,
"fortyTwoAgain" = 42
},
"floats" = {
"fortyTwoPointOne" = 42.1,
"fortyTwoPointOneAgain" = 42.1
},
"strings" = {
"abcd" = "abcd",
"abcdAgain" = "abcd",
"a" = "a",
"b" = "b",
"c" = "c",
"d" = "d",
"concatenated" = "null bar 42 baz true 3.14 hi"
},
"arrays" = {
"empty" = [],
"1" = [ 1 ],
"12" = [1, 2],
"123" = [1, 2, 3],
"ofString" = [ "a", "b", "c" ]
},
"booleans" = {
"true" = true,
"trueAgain" = true,
"false" = false,
"falseAgain" = false
},
"nulls" = {
"null" = null,
"nullAgain" = null
}
}

View File

@ -87,6 +87,6 @@ class EquivalentsTest extends TestUtils {
// it breaks every time you add a file, so you have to update it.
assertEquals(2, dirCount)
// this is the number of files not named original.*
assertEquals(7, fileCount)
assertEquals(8, fileCount)
}
}

View File

@ -171,6 +171,7 @@ abstract trait TestUtils {
"{}")
private val validConfInvalidJson = List[ParseTest](
"""{ "foo" = 42 }""", // equals rather than colon
"""{ "foo" : bar }""", // no quotes on value
"""{ "foo" : null bar 42 baz true 3.14 "hi" }""", // bunch of values to concat into a string
"{ foo : \"bar\" }", // no quotes on key

View File

@ -24,35 +24,35 @@ class TokenizerTest extends TestUtils {
// all token types with no spaces (not sure JSON spec wants this to work,
// but spec is unclear to me when spaces are required, and banning them
// is actually extra work).
val expected = List(Tokens.START, Tokens.COMMA, Tokens.COLON, Tokens.CLOSE_CURLY,
val expected = List(Tokens.START, Tokens.COMMA, Tokens.COLON, Tokens.EQUALS, Tokens.CLOSE_CURLY,
Tokens.OPEN_CURLY, Tokens.CLOSE_SQUARE, Tokens.OPEN_SQUARE, tokenString("foo"),
tokenTrue, tokenDouble(3.14), tokenFalse,
tokenLong(42), tokenNull, tokenSubstitution(tokenUnquoted("a.b")),
tokenKeySubstitution("c.d"), Tokens.newLine(0), Tokens.END)
assertEquals(expected, tokenizeAsList(""",:}{]["foo"true3.14false42null${a.b}${"c.d"}""" + "\n"))
assertEquals(expected, tokenizeAsList(""",:=}{]["foo"true3.14false42null${a.b}${"c.d"}""" + "\n"))
}
@Test
def tokenizeAllTypesWithSingleSpaces() {
val expected = List(Tokens.START, Tokens.COMMA, Tokens.COLON, Tokens.CLOSE_CURLY,
val expected = List(Tokens.START, Tokens.COMMA, Tokens.COLON, Tokens.EQUALS, Tokens.CLOSE_CURLY,
Tokens.OPEN_CURLY, Tokens.CLOSE_SQUARE, Tokens.OPEN_SQUARE, tokenString("foo"),
tokenUnquoted(" "), tokenLong(42), tokenUnquoted(" "), tokenTrue, tokenUnquoted(" "),
tokenDouble(3.14), tokenUnquoted(" "), tokenFalse, tokenUnquoted(" "), tokenNull,
tokenUnquoted(" "), tokenSubstitution(tokenUnquoted("a.b")), tokenUnquoted(" "), tokenKeySubstitution("c.d"),
Tokens.newLine(0), Tokens.END)
assertEquals(expected, tokenizeAsList(""" , : } { ] [ "foo" 42 true 3.14 false null ${a.b} ${"c.d"} """ + "\n "))
assertEquals(expected, tokenizeAsList(""" , : = } { ] [ "foo" 42 true 3.14 false null ${a.b} ${"c.d"} """ + "\n "))
}
@Test
def tokenizeAllTypesWithMultipleSpaces() {
val expected = List(Tokens.START, Tokens.COMMA, Tokens.COLON, Tokens.CLOSE_CURLY,
val expected = List(Tokens.START, Tokens.COMMA, Tokens.COLON, Tokens.EQUALS, Tokens.CLOSE_CURLY,
Tokens.OPEN_CURLY, Tokens.CLOSE_SQUARE, Tokens.OPEN_SQUARE, tokenString("foo"),
tokenUnquoted(" "), tokenLong(42), tokenUnquoted(" "), tokenTrue, tokenUnquoted(" "),
tokenDouble(3.14), tokenUnquoted(" "), tokenFalse, tokenUnquoted(" "), tokenNull,
tokenUnquoted(" "), tokenSubstitution(tokenUnquoted("a.b")), tokenUnquoted(" "),
tokenKeySubstitution("c.d"),
Tokens.newLine(0), Tokens.END)
assertEquals(expected, tokenizeAsList(""" , : } { ] [ "foo" 42 true 3.14 false null ${a.b} ${"c.d"} """ + "\n "))
assertEquals(expected, tokenizeAsList(""" , : = } { ] [ "foo" 42 true 3.14 false null ${a.b} ${"c.d"} """ + "\n "))
}
@Test