mirror of
https://github.com/lightbend/config.git
synced 2025-03-21 06:40:25 +08:00
Support # and // comments
This commit is contained in:
parent
796937f3ac
commit
508c5910ad
@ -42,7 +42,7 @@ final class Parser {
|
|||||||
|
|
||||||
static AbstractConfigValue parse(SyntaxFlavor flavor, ConfigOrigin origin,
|
static AbstractConfigValue parse(SyntaxFlavor flavor, ConfigOrigin origin,
|
||||||
Reader input, IncludeHandler includer) {
|
Reader input, IncludeHandler includer) {
|
||||||
Iterator<Token> tokens = Tokenizer.tokenize(origin, input);
|
Iterator<Token> tokens = Tokenizer.tokenize(origin, input, flavor);
|
||||||
return parse(flavor, origin, tokens, includer);
|
return parse(flavor, origin, tokens, includer);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -654,7 +654,8 @@ final class Parser {
|
|||||||
StringReader reader = new StringReader(path);
|
StringReader reader = new StringReader(path);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
Iterator<Token> tokens = Tokenizer.tokenize(apiOrigin, reader);
|
Iterator<Token> tokens = Tokenizer.tokenize(apiOrigin, reader,
|
||||||
|
SyntaxFlavor.CONF);
|
||||||
tokens.next(); // drop START
|
tokens.next(); // drop START
|
||||||
return parsePathExpression(tokens, apiOrigin);
|
return parsePathExpression(tokens, apiOrigin);
|
||||||
} finally {
|
} finally {
|
||||||
|
@ -16,8 +16,8 @@ final class Tokenizer {
|
|||||||
* Tokenizes a Reader. Does not close the reader; you have to arrange to do
|
* Tokenizes a Reader. Does not close the reader; you have to arrange to do
|
||||||
* that after you're done with the returned iterator.
|
* that after you're done with the returned iterator.
|
||||||
*/
|
*/
|
||||||
static Iterator<Token> tokenize(ConfigOrigin origin, Reader input) {
|
static Iterator<Token> tokenize(ConfigOrigin origin, Reader input, SyntaxFlavor flavor) {
|
||||||
return new TokenIterator(origin, input);
|
return new TokenIterator(origin, input, flavor != SyntaxFlavor.JSON);
|
||||||
}
|
}
|
||||||
|
|
||||||
private static class TokenIterator implements Iterator<Token> {
|
private static class TokenIterator implements Iterator<Token> {
|
||||||
@ -87,10 +87,12 @@ final class Tokenizer {
|
|||||||
private int lineNumber;
|
private int lineNumber;
|
||||||
final private Queue<Token> tokens;
|
final private Queue<Token> tokens;
|
||||||
final private WhitespaceSaver whitespaceSaver;
|
final private WhitespaceSaver whitespaceSaver;
|
||||||
|
final private boolean allowComments;
|
||||||
|
|
||||||
TokenIterator(ConfigOrigin origin, Reader input) {
|
TokenIterator(ConfigOrigin origin, Reader input, boolean allowComments) {
|
||||||
this.origin = origin;
|
this.origin = origin;
|
||||||
this.input = input;
|
this.input = input;
|
||||||
|
this.allowComments = allowComments;
|
||||||
oneCharBuffer = -1;
|
oneCharBuffer = -1;
|
||||||
lineNumber = 0;
|
lineNumber = 0;
|
||||||
tokens = new LinkedList<Token>();
|
tokens = new LinkedList<Token>();
|
||||||
@ -132,6 +134,15 @@ final class Tokenizer {
|
|||||||
return c == ' ' || (c != '\n' && Character.isWhitespace(c));
|
return c == ' ' || (c != '\n' && Character.isWhitespace(c));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private int slurpComment() {
|
||||||
|
for (;;) {
|
||||||
|
int c = nextChar();
|
||||||
|
if (c == -1 || c == '\n') {
|
||||||
|
return c;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// get next char, skipping non-newline whitespace
|
// get next char, skipping non-newline whitespace
|
||||||
private int nextCharAfterWhitespace(WhitespaceSaver saver) {
|
private int nextCharAfterWhitespace(WhitespaceSaver saver) {
|
||||||
for (;;) {
|
for (;;) {
|
||||||
@ -139,11 +150,27 @@ final class Tokenizer {
|
|||||||
|
|
||||||
if (c == -1) {
|
if (c == -1) {
|
||||||
return -1;
|
return -1;
|
||||||
} else if (isWhitespaceNotNewline(c)) {
|
|
||||||
saver.add(c);
|
|
||||||
continue;
|
|
||||||
} else {
|
} else {
|
||||||
return c;
|
if (isWhitespaceNotNewline(c)) {
|
||||||
|
saver.add(c);
|
||||||
|
continue;
|
||||||
|
} else if (allowComments) {
|
||||||
|
if (c == '#') {
|
||||||
|
return slurpComment();
|
||||||
|
} else if (c == '/') {
|
||||||
|
int maybeSecondSlash = nextChar();
|
||||||
|
if (maybeSecondSlash == '/') {
|
||||||
|
return slurpComment();
|
||||||
|
} else {
|
||||||
|
putBack(maybeSecondSlash);
|
||||||
|
return c;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
return c;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
return c;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
45
src/test/resources/equiv01/comments.conf
Normal file
45
src/test/resources/equiv01/comments.conf
Normal file
@ -0,0 +1,45 @@
|
|||||||
|
{ // These are some integers
|
||||||
|
"ints" : { # we can comment them with # too
|
||||||
|
"fortyTwo" : 42, ## double-# !
|
||||||
|
"fortyTwoAgain" : 42 # { }
|
||||||
|
},
|
||||||
|
######## COMMENT COMMENT COMMENT
|
||||||
|
/// I COMMENT YOU
|
||||||
|
"floats" : {
|
||||||
|
"fortyTwoPointOne" : 42.1,
|
||||||
|
"fortyTwoPointOneAgain" : 42.1
|
||||||
|
// I CAN HAS COMMENT
|
||||||
|
},
|
||||||
|
|
||||||
|
"strings" : {
|
||||||
|
"abcd" : // I got yr comment right here
|
||||||
|
"abcd",
|
||||||
|
"abcdAgain" : "abcd",
|
||||||
|
"a" : "a",
|
||||||
|
"b" : "b",
|
||||||
|
"c" : "c",
|
||||||
|
"d" : "d",
|
||||||
|
"concatenated" # more to say?
|
||||||
|
: "null bar 42 baz true 3.14 hi"
|
||||||
|
},
|
||||||
|
|
||||||
|
"arrays" : {
|
||||||
|
"empty" : [],
|
||||||
|
"1" : [ 1 ],
|
||||||
|
"12" : [1, 2],
|
||||||
|
"123" : [1, 2, 3],
|
||||||
|
"ofString" : [ "a", "b", "c" ]
|
||||||
|
},
|
||||||
|
|
||||||
|
"booleans" : {
|
||||||
|
"true" : true,
|
||||||
|
"trueAgain" : true,
|
||||||
|
"false" : false,
|
||||||
|
"falseAgain" : false
|
||||||
|
},
|
||||||
|
|
||||||
|
"nulls" : {
|
||||||
|
"null" : null,
|
||||||
|
"nullAgain" : null
|
||||||
|
}
|
||||||
|
}
|
@ -87,6 +87,6 @@ class EquivalentsTest extends TestUtils {
|
|||||||
// it breaks every time you add a file, so you have to update it.
|
// it breaks every time you add a file, so you have to update it.
|
||||||
assertEquals(2, dirCount)
|
assertEquals(2, dirCount)
|
||||||
// this is the number of files not named original.*
|
// this is the number of files not named original.*
|
||||||
assertEquals(6, fileCount)
|
assertEquals(7, fileCount)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -138,6 +138,8 @@ abstract trait TestUtils {
|
|||||||
"[ / ]",
|
"[ / ]",
|
||||||
"[ # ]",
|
"[ # ]",
|
||||||
"[ \\ ]",
|
"[ \\ ]",
|
||||||
|
"[ # comment ]",
|
||||||
|
"${ #comment }",
|
||||||
"{ include \"bar\" : 10 }", // include with a value after it
|
"{ include \"bar\" : 10 }", // include with a value after it
|
||||||
"{ include foo }", // include with unquoted string
|
"{ include foo }", // include with unquoted string
|
||||||
"{ include : { \"a\" : 1 } }", // include used as unquoted key
|
"{ include : { \"a\" : 1 } }", // include used as unquoted key
|
||||||
@ -162,6 +164,8 @@ abstract trait TestUtils {
|
|||||||
"""[[[[[[]]]]]]""",
|
"""[[[[[[]]]]]]""",
|
||||||
"""[[1], [1,2], [1,2,3], []]""", // nested multiple-valued array
|
"""[[1], [1,2], [1,2,3], []]""", // nested multiple-valued array
|
||||||
"""{"a":{"a":{"a":{"a":{"a":{"a":{"a":{"a":42}}}}}}}}""",
|
"""{"a":{"a":{"a":{"a":{"a":{"a":{"a":{"a":42}}}}}}}}""",
|
||||||
|
"[ \"#comment\" ]", // quoted # comment
|
||||||
|
"[ \"//comment\" ]", // quoted // comment
|
||||||
// this long one is mostly to test rendering
|
// this long one is mostly to test rendering
|
||||||
"""{ "foo" : { "bar" : "baz", "woo" : "w00t" }, "baz" : { "bar" : "baz", "woo" : [1,2,3,4], "w00t" : true, "a" : false, "b" : 3.14, "c" : null } }""",
|
"""{ "foo" : { "bar" : "baz", "woo" : "w00t" }, "baz" : { "bar" : "baz", "woo" : [1,2,3,4], "w00t" : true, "a" : false, "b" : 3.14, "c" : null } }""",
|
||||||
"{}")
|
"{}")
|
||||||
@ -197,6 +201,28 @@ abstract trait TestUtils {
|
|||||||
"[ abc xyz ${foo.bar} qrs tuv ]", // value concatenation
|
"[ abc xyz ${foo.bar} qrs tuv ]", // value concatenation
|
||||||
"[ 1, 2, 3, blah ]",
|
"[ 1, 2, 3, blah ]",
|
||||||
"[ ${\"foo.bar\"} ]",
|
"[ ${\"foo.bar\"} ]",
|
||||||
|
"{} # comment",
|
||||||
|
"{} // comment",
|
||||||
|
"""{ "foo" #comment
|
||||||
|
: 10 }""",
|
||||||
|
"""{ "foo" // comment
|
||||||
|
: 10 }""",
|
||||||
|
"""{ "foo" : #comment
|
||||||
|
10 }""",
|
||||||
|
"""{ "foo" : // comment
|
||||||
|
10 }""",
|
||||||
|
"""{ "foo" : 10 #comment
|
||||||
|
}""",
|
||||||
|
"""{ "foo" : 10 // comment
|
||||||
|
}""",
|
||||||
|
"""[ 10, # comment
|
||||||
|
11]""",
|
||||||
|
"""[ 10, // comment
|
||||||
|
11]""",
|
||||||
|
"""[ 10 # comment
|
||||||
|
, 11]""",
|
||||||
|
"""[ 10 // comment
|
||||||
|
, 11]""",
|
||||||
ParseTest(false, true, "[${ foo.bar}]"), // substitution with leading spaces
|
ParseTest(false, true, "[${ foo.bar}]"), // substitution with leading spaces
|
||||||
ParseTest(false, true, "[${foo.bar }]"), // substitution with trailing spaces
|
ParseTest(false, true, "[${foo.bar }]"), // substitution with trailing spaces
|
||||||
ParseTest(false, true, "[${ \"foo.bar\"}]"), // substitution with leading spaces and quoted
|
ParseTest(false, true, "[${ \"foo.bar\"}]"), // substitution with leading spaces and quoted
|
||||||
@ -294,7 +320,7 @@ abstract trait TestUtils {
|
|||||||
def tokenKeySubstitution(s: String) = tokenSubstitution(tokenString(s))
|
def tokenKeySubstitution(s: String) = tokenSubstitution(tokenString(s))
|
||||||
|
|
||||||
def tokenize(origin: ConfigOrigin, input: Reader): java.util.Iterator[Token] = {
|
def tokenize(origin: ConfigOrigin, input: Reader): java.util.Iterator[Token] = {
|
||||||
Tokenizer.tokenize(origin, input)
|
Tokenizer.tokenize(origin, input, SyntaxFlavor.CONF)
|
||||||
}
|
}
|
||||||
|
|
||||||
def tokenize(input: Reader): java.util.Iterator[Token] = {
|
def tokenize(input: Reader): java.util.Iterator[Token] = {
|
||||||
|
Loading…
Reference in New Issue
Block a user