From 3ad6e71f53a87215fb5286bcf87de15524699561 Mon Sep 17 00:00:00 2001 From: David Tolnay Date: Sun, 12 Nov 2017 17:26:09 -0800 Subject: Move tests into their own crate --- .travis.yml | 1 + test-suite/Cargo.toml | 14 + test-suite/tests/README.md | 1 + test-suite/tests/backcompat.rs | 19 + test-suite/tests/datetime.rs | 58 +++ test-suite/tests/display-tricky.rs | 49 ++ test-suite/tests/display.rs | 103 ++++ test-suite/tests/formatting.rs | 54 ++ test-suite/tests/invalid-encoder-misc.rs | 14 + .../array-mixed-types-ints-and-floats.json | 15 + test-suite/tests/invalid-misc.rs | 17 + test-suite/tests/invalid.rs | 98 ++++ .../invalid/array-mixed-types-arrays-and-ints.toml | 1 + .../invalid/array-mixed-types-ints-and-floats.toml | 1 + .../array-mixed-types-strings-and-ints.toml | 1 + .../tests/invalid/datetime-malformed-no-leads.toml | 1 + .../tests/invalid/datetime-malformed-no-secs.toml | 1 + .../tests/invalid/datetime-malformed-no-t.toml | 1 + .../invalid/datetime-malformed-with-milli.toml | 1 + test-suite/tests/invalid/duplicate-key-table.toml | 5 + test-suite/tests/invalid/duplicate-keys.toml | 2 + test-suite/tests/invalid/duplicate-tables.toml | 2 + test-suite/tests/invalid/empty-implicit-table.toml | 1 + test-suite/tests/invalid/empty-table.toml | 1 + .../tests/invalid/float-no-leading-zero.toml | 2 + .../tests/invalid/float-no-trailing-digits.toml | 2 + test-suite/tests/invalid/key-after-array.toml | 1 + test-suite/tests/invalid/key-after-table.toml | 1 + test-suite/tests/invalid/key-empty.toml | 1 + test-suite/tests/invalid/key-hash.toml | 1 + test-suite/tests/invalid/key-newline.toml | 2 + test-suite/tests/invalid/key-open-bracket.toml | 1 + .../tests/invalid/key-single-open-bracket.toml | 1 + test-suite/tests/invalid/key-space.toml | 1 + test-suite/tests/invalid/key-start-bracket.toml | 3 + test-suite/tests/invalid/key-two-equals.toml | 1 + .../tests/invalid/string-bad-byte-escape.toml | 1 + test-suite/tests/invalid/string-bad-escape.toml | 1 + test-suite/tests/invalid/string-byte-escapes.toml | 1 + test-suite/tests/invalid/string-no-close.toml | 1 + test-suite/tests/invalid/table-array-implicit.toml | 14 + .../invalid/table-array-malformed-bracket.toml | 2 + .../tests/invalid/table-array-malformed-empty.toml | 2 + test-suite/tests/invalid/table-empty.toml | 1 + .../tests/invalid/table-nested-brackets-close.toml | 2 + .../tests/invalid/table-nested-brackets-open.toml | 2 + test-suite/tests/invalid/table-whitespace.toml | 1 + test-suite/tests/invalid/table-with-pound.toml | 2 + .../tests/invalid/text-after-array-entries.toml | 4 + test-suite/tests/invalid/text-after-integer.toml | 1 + test-suite/tests/invalid/text-after-string.toml | 1 + test-suite/tests/invalid/text-after-table.toml | 1 + .../tests/invalid/text-before-array-separator.toml | 4 + test-suite/tests/invalid/text-in-array.toml | 5 + test-suite/tests/parser.rs | 495 ++++++++++++++++++ test-suite/tests/pretty.rs | 308 +++++++++++ test-suite/tests/serde.rs | 578 +++++++++++++++++++++ test-suite/tests/tables-last.rs | 30 ++ test-suite/tests/valid.rs | 249 +++++++++ test-suite/tests/valid/array-empty.json | 11 + test-suite/tests/valid/array-empty.toml | 1 + test-suite/tests/valid/array-nospaces.json | 10 + test-suite/tests/valid/array-nospaces.toml | 1 + test-suite/tests/valid/arrays-hetergeneous.json | 19 + test-suite/tests/valid/arrays-hetergeneous.toml | 1 + test-suite/tests/valid/arrays-nested.json | 13 + test-suite/tests/valid/arrays-nested.toml | 1 + test-suite/tests/valid/arrays.json | 34 ++ test-suite/tests/valid/arrays.toml | 8 + test-suite/tests/valid/bool.json | 4 + test-suite/tests/valid/bool.toml | 2 + test-suite/tests/valid/comments-everywhere.json | 12 + test-suite/tests/valid/comments-everywhere.toml | 24 + test-suite/tests/valid/datetime-truncate.json | 6 + test-suite/tests/valid/datetime-truncate.toml | 1 + test-suite/tests/valid/datetime.json | 3 + test-suite/tests/valid/datetime.toml | 1 + test-suite/tests/valid/empty.json | 1 + test-suite/tests/valid/empty.toml | 0 test-suite/tests/valid/example-bom.toml | 5 + test-suite/tests/valid/example-v0.3.0.json | 1 + test-suite/tests/valid/example-v0.3.0.toml | 182 +++++++ test-suite/tests/valid/example-v0.4.0.json | 1 + test-suite/tests/valid/example-v0.4.0.toml | 235 +++++++++ test-suite/tests/valid/example.json | 14 + test-suite/tests/valid/example.toml | 5 + test-suite/tests/valid/example2.json | 1 + test-suite/tests/valid/example2.toml | 47 ++ test-suite/tests/valid/float.json | 4 + test-suite/tests/valid/float.toml | 2 + test-suite/tests/valid/hard_example.json | 1 + test-suite/tests/valid/hard_example.toml | 33 ++ .../tests/valid/implicit-and-explicit-after.json | 10 + .../tests/valid/implicit-and-explicit-after.toml | 5 + .../tests/valid/implicit-and-explicit-before.json | 10 + .../tests/valid/implicit-and-explicit-before.toml | 5 + test-suite/tests/valid/implicit-groups.json | 9 + test-suite/tests/valid/implicit-groups.toml | 2 + test-suite/tests/valid/integer.json | 4 + test-suite/tests/valid/integer.toml | 2 + test-suite/tests/valid/key-equals-nospace.json | 3 + test-suite/tests/valid/key-equals-nospace.toml | 1 + test-suite/tests/valid/key-quote-newline.json | 3 + test-suite/tests/valid/key-quote-newline.toml | 1 + test-suite/tests/valid/key-space.json | 3 + test-suite/tests/valid/key-space.toml | 1 + test-suite/tests/valid/key-special-chars.json | 5 + test-suite/tests/valid/key-special-chars.toml | 1 + test-suite/tests/valid/key-with-pound.json | 3 + test-suite/tests/valid/key-with-pound.toml | 1 + test-suite/tests/valid/long-float.json | 4 + test-suite/tests/valid/long-float.toml | 2 + test-suite/tests/valid/long-integer.json | 4 + test-suite/tests/valid/long-integer.toml | 2 + test-suite/tests/valid/multiline-string.json | 30 ++ test-suite/tests/valid/multiline-string.toml | 23 + test-suite/tests/valid/raw-multiline-string.json | 14 + test-suite/tests/valid/raw-multiline-string.toml | 9 + test-suite/tests/valid/raw-string.json | 30 ++ test-suite/tests/valid/raw-string.toml | 7 + test-suite/tests/valid/string-empty.json | 6 + test-suite/tests/valid/string-empty.toml | 1 + test-suite/tests/valid/string-escapes.json | 50 ++ test-suite/tests/valid/string-escapes.toml | 12 + test-suite/tests/valid/string-simple.json | 6 + test-suite/tests/valid/string-simple.toml | 1 + test-suite/tests/valid/string-with-pound.json | 7 + test-suite/tests/valid/string-with-pound.toml | 2 + test-suite/tests/valid/table-array-implicit.json | 7 + test-suite/tests/valid/table-array-implicit.toml | 2 + test-suite/tests/valid/table-array-many.json | 16 + test-suite/tests/valid/table-array-many.toml | 11 + .../tests/valid/table-array-nest-no-keys.json | 14 + .../tests/valid/table-array-nest-no-keys.toml | 6 + test-suite/tests/valid/table-array-nest.json | 18 + test-suite/tests/valid/table-array-nest.toml | 17 + test-suite/tests/valid/table-array-one.json | 8 + test-suite/tests/valid/table-array-one.toml | 3 + test-suite/tests/valid/table-empty.json | 3 + test-suite/tests/valid/table-empty.toml | 1 + test-suite/tests/valid/table-multi-empty.json | 5 + test-suite/tests/valid/table-multi-empty.toml | 5 + test-suite/tests/valid/table-sub-empty.json | 3 + test-suite/tests/valid/table-sub-empty.toml | 2 + test-suite/tests/valid/table-whitespace.json | 3 + test-suite/tests/valid/table-whitespace.toml | 1 + test-suite/tests/valid/table-with-pound.json | 5 + test-suite/tests/valid/table-with-pound.toml | 2 + test-suite/tests/valid/unicode-escape.json | 5 + test-suite/tests/valid/unicode-escape.toml | 3 + test-suite/tests/valid/unicode-literal.json | 3 + test-suite/tests/valid/unicode-literal.toml | 1 + tests/README.md | 1 - tests/backcompat.rs | 19 - tests/datetime.rs | 58 --- tests/display-tricky.rs | 49 -- tests/display.rs | 103 ---- tests/formatting.rs | 54 -- tests/invalid-encoder-misc.rs | 14 - .../array-mixed-types-ints-and-floats.json | 15 - tests/invalid-misc.rs | 17 - tests/invalid.rs | 98 ---- .../invalid/array-mixed-types-arrays-and-ints.toml | 1 - .../invalid/array-mixed-types-ints-and-floats.toml | 1 - .../array-mixed-types-strings-and-ints.toml | 1 - tests/invalid/datetime-malformed-no-leads.toml | 1 - tests/invalid/datetime-malformed-no-secs.toml | 1 - tests/invalid/datetime-malformed-no-t.toml | 1 - tests/invalid/datetime-malformed-with-milli.toml | 1 - tests/invalid/duplicate-key-table.toml | 5 - tests/invalid/duplicate-keys.toml | 2 - tests/invalid/duplicate-tables.toml | 2 - tests/invalid/empty-implicit-table.toml | 1 - tests/invalid/empty-table.toml | 1 - tests/invalid/float-no-leading-zero.toml | 2 - tests/invalid/float-no-trailing-digits.toml | 2 - tests/invalid/key-after-array.toml | 1 - tests/invalid/key-after-table.toml | 1 - tests/invalid/key-empty.toml | 1 - tests/invalid/key-hash.toml | 1 - tests/invalid/key-newline.toml | 2 - tests/invalid/key-open-bracket.toml | 1 - tests/invalid/key-single-open-bracket.toml | 1 - tests/invalid/key-space.toml | 1 - tests/invalid/key-start-bracket.toml | 3 - tests/invalid/key-two-equals.toml | 1 - tests/invalid/string-bad-byte-escape.toml | 1 - tests/invalid/string-bad-escape.toml | 1 - tests/invalid/string-byte-escapes.toml | 1 - tests/invalid/string-no-close.toml | 1 - tests/invalid/table-array-implicit.toml | 14 - tests/invalid/table-array-malformed-bracket.toml | 2 - tests/invalid/table-array-malformed-empty.toml | 2 - tests/invalid/table-empty.toml | 1 - tests/invalid/table-nested-brackets-close.toml | 2 - tests/invalid/table-nested-brackets-open.toml | 2 - tests/invalid/table-whitespace.toml | 1 - tests/invalid/table-with-pound.toml | 2 - tests/invalid/text-after-array-entries.toml | 4 - tests/invalid/text-after-integer.toml | 1 - tests/invalid/text-after-string.toml | 1 - tests/invalid/text-after-table.toml | 1 - tests/invalid/text-before-array-separator.toml | 4 - tests/invalid/text-in-array.toml | 5 - tests/parser.rs | 495 ------------------ tests/pretty.rs | 308 ----------- tests/serde.rs | 578 --------------------- tests/tables-last.rs | 30 -- tests/valid.rs | 249 --------- tests/valid/array-empty.json | 11 - tests/valid/array-empty.toml | 1 - tests/valid/array-nospaces.json | 10 - tests/valid/array-nospaces.toml | 1 - tests/valid/arrays-hetergeneous.json | 19 - tests/valid/arrays-hetergeneous.toml | 1 - tests/valid/arrays-nested.json | 13 - tests/valid/arrays-nested.toml | 1 - tests/valid/arrays.json | 34 -- tests/valid/arrays.toml | 8 - tests/valid/bool.json | 4 - tests/valid/bool.toml | 2 - tests/valid/comments-everywhere.json | 12 - tests/valid/comments-everywhere.toml | 24 - tests/valid/datetime-truncate.json | 6 - tests/valid/datetime-truncate.toml | 1 - tests/valid/datetime.json | 3 - tests/valid/datetime.toml | 1 - tests/valid/empty.json | 1 - tests/valid/empty.toml | 0 tests/valid/example-bom.toml | 5 - tests/valid/example-v0.3.0.json | 1 - tests/valid/example-v0.3.0.toml | 182 ------- tests/valid/example-v0.4.0.json | 1 - tests/valid/example-v0.4.0.toml | 235 --------- tests/valid/example.json | 14 - tests/valid/example.toml | 5 - tests/valid/example2.json | 1 - tests/valid/example2.toml | 47 -- tests/valid/float.json | 4 - tests/valid/float.toml | 2 - tests/valid/hard_example.json | 1 - tests/valid/hard_example.toml | 33 -- tests/valid/implicit-and-explicit-after.json | 10 - tests/valid/implicit-and-explicit-after.toml | 5 - tests/valid/implicit-and-explicit-before.json | 10 - tests/valid/implicit-and-explicit-before.toml | 5 - tests/valid/implicit-groups.json | 9 - tests/valid/implicit-groups.toml | 2 - tests/valid/integer.json | 4 - tests/valid/integer.toml | 2 - tests/valid/key-equals-nospace.json | 3 - tests/valid/key-equals-nospace.toml | 1 - tests/valid/key-quote-newline.json | 3 - tests/valid/key-quote-newline.toml | 1 - tests/valid/key-space.json | 3 - tests/valid/key-space.toml | 1 - tests/valid/key-special-chars.json | 5 - tests/valid/key-special-chars.toml | 1 - tests/valid/key-with-pound.json | 3 - tests/valid/key-with-pound.toml | 1 - tests/valid/long-float.json | 4 - tests/valid/long-float.toml | 2 - tests/valid/long-integer.json | 4 - tests/valid/long-integer.toml | 2 - tests/valid/multiline-string.json | 30 -- tests/valid/multiline-string.toml | 23 - tests/valid/raw-multiline-string.json | 14 - tests/valid/raw-multiline-string.toml | 9 - tests/valid/raw-string.json | 30 -- tests/valid/raw-string.toml | 7 - tests/valid/string-empty.json | 6 - tests/valid/string-empty.toml | 1 - tests/valid/string-escapes.json | 50 -- tests/valid/string-escapes.toml | 12 - tests/valid/string-simple.json | 6 - tests/valid/string-simple.toml | 1 - tests/valid/string-with-pound.json | 7 - tests/valid/string-with-pound.toml | 2 - tests/valid/table-array-implicit.json | 7 - tests/valid/table-array-implicit.toml | 2 - tests/valid/table-array-many.json | 16 - tests/valid/table-array-many.toml | 11 - tests/valid/table-array-nest-no-keys.json | 14 - tests/valid/table-array-nest-no-keys.toml | 6 - tests/valid/table-array-nest.json | 18 - tests/valid/table-array-nest.toml | 17 - tests/valid/table-array-one.json | 8 - tests/valid/table-array-one.toml | 3 - tests/valid/table-empty.json | 3 - tests/valid/table-empty.toml | 1 - tests/valid/table-multi-empty.json | 5 - tests/valid/table-multi-empty.toml | 5 - tests/valid/table-sub-empty.json | 3 - tests/valid/table-sub-empty.toml | 2 - tests/valid/table-whitespace.json | 3 - tests/valid/table-whitespace.toml | 1 - tests/valid/table-with-pound.json | 5 - tests/valid/table-with-pound.toml | 2 - tests/valid/unicode-escape.json | 5 - tests/valid/unicode-escape.toml | 3 - tests/valid/unicode-literal.json | 3 - tests/valid/unicode-literal.toml | 1 - 302 files changed, 3291 insertions(+), 3276 deletions(-) create mode 100644 test-suite/Cargo.toml create mode 100644 test-suite/tests/README.md create mode 100644 test-suite/tests/backcompat.rs create mode 100644 test-suite/tests/datetime.rs create mode 100644 test-suite/tests/display-tricky.rs create mode 100644 test-suite/tests/display.rs create mode 100644 test-suite/tests/formatting.rs create mode 100644 test-suite/tests/invalid-encoder-misc.rs create mode 100644 test-suite/tests/invalid-encoder/array-mixed-types-ints-and-floats.json create mode 100644 test-suite/tests/invalid-misc.rs create mode 100644 test-suite/tests/invalid.rs create mode 100644 test-suite/tests/invalid/array-mixed-types-arrays-and-ints.toml create mode 100644 test-suite/tests/invalid/array-mixed-types-ints-and-floats.toml create mode 100644 test-suite/tests/invalid/array-mixed-types-strings-and-ints.toml create mode 100644 test-suite/tests/invalid/datetime-malformed-no-leads.toml create mode 100644 test-suite/tests/invalid/datetime-malformed-no-secs.toml create mode 100644 test-suite/tests/invalid/datetime-malformed-no-t.toml create mode 100644 test-suite/tests/invalid/datetime-malformed-with-milli.toml create mode 100644 test-suite/tests/invalid/duplicate-key-table.toml create mode 100644 test-suite/tests/invalid/duplicate-keys.toml create mode 100644 test-suite/tests/invalid/duplicate-tables.toml create mode 100644 test-suite/tests/invalid/empty-implicit-table.toml create mode 100644 test-suite/tests/invalid/empty-table.toml create mode 100644 test-suite/tests/invalid/float-no-leading-zero.toml create mode 100644 test-suite/tests/invalid/float-no-trailing-digits.toml create mode 100644 test-suite/tests/invalid/key-after-array.toml create mode 100644 test-suite/tests/invalid/key-after-table.toml create mode 100644 test-suite/tests/invalid/key-empty.toml create mode 100644 test-suite/tests/invalid/key-hash.toml create mode 100644 test-suite/tests/invalid/key-newline.toml create mode 100644 test-suite/tests/invalid/key-open-bracket.toml create mode 100644 test-suite/tests/invalid/key-single-open-bracket.toml create mode 100644 test-suite/tests/invalid/key-space.toml create mode 100644 test-suite/tests/invalid/key-start-bracket.toml create mode 100644 test-suite/tests/invalid/key-two-equals.toml create mode 100644 test-suite/tests/invalid/string-bad-byte-escape.toml create mode 100644 test-suite/tests/invalid/string-bad-escape.toml create mode 100644 test-suite/tests/invalid/string-byte-escapes.toml create mode 100644 test-suite/tests/invalid/string-no-close.toml create mode 100644 test-suite/tests/invalid/table-array-implicit.toml create mode 100644 test-suite/tests/invalid/table-array-malformed-bracket.toml create mode 100644 test-suite/tests/invalid/table-array-malformed-empty.toml create mode 100644 test-suite/tests/invalid/table-empty.toml create mode 100644 test-suite/tests/invalid/table-nested-brackets-close.toml create mode 100644 test-suite/tests/invalid/table-nested-brackets-open.toml create mode 100644 test-suite/tests/invalid/table-whitespace.toml create mode 100644 test-suite/tests/invalid/table-with-pound.toml create mode 100644 test-suite/tests/invalid/text-after-array-entries.toml create mode 100644 test-suite/tests/invalid/text-after-integer.toml create mode 100644 test-suite/tests/invalid/text-after-string.toml create mode 100644 test-suite/tests/invalid/text-after-table.toml create mode 100644 test-suite/tests/invalid/text-before-array-separator.toml create mode 100644 test-suite/tests/invalid/text-in-array.toml create mode 100644 test-suite/tests/parser.rs create mode 100644 test-suite/tests/pretty.rs create mode 100644 test-suite/tests/serde.rs create mode 100644 test-suite/tests/tables-last.rs create mode 100644 test-suite/tests/valid.rs create mode 100644 test-suite/tests/valid/array-empty.json create mode 100644 test-suite/tests/valid/array-empty.toml create mode 100644 test-suite/tests/valid/array-nospaces.json create mode 100644 test-suite/tests/valid/array-nospaces.toml create mode 100644 test-suite/tests/valid/arrays-hetergeneous.json create mode 100644 test-suite/tests/valid/arrays-hetergeneous.toml create mode 100644 test-suite/tests/valid/arrays-nested.json create mode 100644 test-suite/tests/valid/arrays-nested.toml create mode 100644 test-suite/tests/valid/arrays.json create mode 100644 test-suite/tests/valid/arrays.toml create mode 100644 test-suite/tests/valid/bool.json create mode 100644 test-suite/tests/valid/bool.toml create mode 100644 test-suite/tests/valid/comments-everywhere.json create mode 100644 test-suite/tests/valid/comments-everywhere.toml create mode 100644 test-suite/tests/valid/datetime-truncate.json create mode 100644 test-suite/tests/valid/datetime-truncate.toml create mode 100644 test-suite/tests/valid/datetime.json create mode 100644 test-suite/tests/valid/datetime.toml create mode 100644 test-suite/tests/valid/empty.json create mode 100644 test-suite/tests/valid/empty.toml create mode 100644 test-suite/tests/valid/example-bom.toml create mode 100644 test-suite/tests/valid/example-v0.3.0.json create mode 100644 test-suite/tests/valid/example-v0.3.0.toml create mode 100644 test-suite/tests/valid/example-v0.4.0.json create mode 100644 test-suite/tests/valid/example-v0.4.0.toml create mode 100644 test-suite/tests/valid/example.json create mode 100644 test-suite/tests/valid/example.toml create mode 100644 test-suite/tests/valid/example2.json create mode 100644 test-suite/tests/valid/example2.toml create mode 100644 test-suite/tests/valid/float.json create mode 100644 test-suite/tests/valid/float.toml create mode 100644 test-suite/tests/valid/hard_example.json create mode 100644 test-suite/tests/valid/hard_example.toml create mode 100644 test-suite/tests/valid/implicit-and-explicit-after.json create mode 100644 test-suite/tests/valid/implicit-and-explicit-after.toml create mode 100644 test-suite/tests/valid/implicit-and-explicit-before.json create mode 100644 test-suite/tests/valid/implicit-and-explicit-before.toml create mode 100644 test-suite/tests/valid/implicit-groups.json create mode 100644 test-suite/tests/valid/implicit-groups.toml create mode 100644 test-suite/tests/valid/integer.json create mode 100644 test-suite/tests/valid/integer.toml create mode 100644 test-suite/tests/valid/key-equals-nospace.json create mode 100644 test-suite/tests/valid/key-equals-nospace.toml create mode 100644 test-suite/tests/valid/key-quote-newline.json create mode 100644 test-suite/tests/valid/key-quote-newline.toml create mode 100644 test-suite/tests/valid/key-space.json create mode 100644 test-suite/tests/valid/key-space.toml create mode 100644 test-suite/tests/valid/key-special-chars.json create mode 100644 test-suite/tests/valid/key-special-chars.toml create mode 100644 test-suite/tests/valid/key-with-pound.json create mode 100644 test-suite/tests/valid/key-with-pound.toml create mode 100644 test-suite/tests/valid/long-float.json create mode 100644 test-suite/tests/valid/long-float.toml create mode 100644 test-suite/tests/valid/long-integer.json create mode 100644 test-suite/tests/valid/long-integer.toml create mode 100644 test-suite/tests/valid/multiline-string.json create mode 100644 test-suite/tests/valid/multiline-string.toml create mode 100644 test-suite/tests/valid/raw-multiline-string.json create mode 100644 test-suite/tests/valid/raw-multiline-string.toml create mode 100644 test-suite/tests/valid/raw-string.json create mode 100644 test-suite/tests/valid/raw-string.toml create mode 100644 test-suite/tests/valid/string-empty.json create mode 100644 test-suite/tests/valid/string-empty.toml create mode 100644 test-suite/tests/valid/string-escapes.json create mode 100644 test-suite/tests/valid/string-escapes.toml create mode 100644 test-suite/tests/valid/string-simple.json create mode 100644 test-suite/tests/valid/string-simple.toml create mode 100644 test-suite/tests/valid/string-with-pound.json create mode 100644 test-suite/tests/valid/string-with-pound.toml create mode 100644 test-suite/tests/valid/table-array-implicit.json create mode 100644 test-suite/tests/valid/table-array-implicit.toml create mode 100644 test-suite/tests/valid/table-array-many.json create mode 100644 test-suite/tests/valid/table-array-many.toml create mode 100644 test-suite/tests/valid/table-array-nest-no-keys.json create mode 100644 test-suite/tests/valid/table-array-nest-no-keys.toml create mode 100644 test-suite/tests/valid/table-array-nest.json create mode 100644 test-suite/tests/valid/table-array-nest.toml create mode 100644 test-suite/tests/valid/table-array-one.json create mode 100644 test-suite/tests/valid/table-array-one.toml create mode 100644 test-suite/tests/valid/table-empty.json create mode 100644 test-suite/tests/valid/table-empty.toml create mode 100644 test-suite/tests/valid/table-multi-empty.json create mode 100644 test-suite/tests/valid/table-multi-empty.toml create mode 100644 test-suite/tests/valid/table-sub-empty.json create mode 100644 test-suite/tests/valid/table-sub-empty.toml create mode 100644 test-suite/tests/valid/table-whitespace.json create mode 100644 test-suite/tests/valid/table-whitespace.toml create mode 100644 test-suite/tests/valid/table-with-pound.json create mode 100644 test-suite/tests/valid/table-with-pound.toml create mode 100644 test-suite/tests/valid/unicode-escape.json create mode 100644 test-suite/tests/valid/unicode-escape.toml create mode 100644 test-suite/tests/valid/unicode-literal.json create mode 100644 test-suite/tests/valid/unicode-literal.toml delete mode 100644 tests/README.md delete mode 100644 tests/backcompat.rs delete mode 100644 tests/datetime.rs delete mode 100644 tests/display-tricky.rs delete mode 100644 tests/display.rs delete mode 100644 tests/formatting.rs delete mode 100644 tests/invalid-encoder-misc.rs delete mode 100644 tests/invalid-encoder/array-mixed-types-ints-and-floats.json delete mode 100644 tests/invalid-misc.rs delete mode 100644 tests/invalid.rs delete mode 100644 tests/invalid/array-mixed-types-arrays-and-ints.toml delete mode 100644 tests/invalid/array-mixed-types-ints-and-floats.toml delete mode 100644 tests/invalid/array-mixed-types-strings-and-ints.toml delete mode 100644 tests/invalid/datetime-malformed-no-leads.toml delete mode 100644 tests/invalid/datetime-malformed-no-secs.toml delete mode 100644 tests/invalid/datetime-malformed-no-t.toml delete mode 100644 tests/invalid/datetime-malformed-with-milli.toml delete mode 100644 tests/invalid/duplicate-key-table.toml delete mode 100644 tests/invalid/duplicate-keys.toml delete mode 100644 tests/invalid/duplicate-tables.toml delete mode 100644 tests/invalid/empty-implicit-table.toml delete mode 100644 tests/invalid/empty-table.toml delete mode 100644 tests/invalid/float-no-leading-zero.toml delete mode 100644 tests/invalid/float-no-trailing-digits.toml delete mode 100644 tests/invalid/key-after-array.toml delete mode 100644 tests/invalid/key-after-table.toml delete mode 100644 tests/invalid/key-empty.toml delete mode 100644 tests/invalid/key-hash.toml delete mode 100644 tests/invalid/key-newline.toml delete mode 100644 tests/invalid/key-open-bracket.toml delete mode 100644 tests/invalid/key-single-open-bracket.toml delete mode 100644 tests/invalid/key-space.toml delete mode 100644 tests/invalid/key-start-bracket.toml delete mode 100644 tests/invalid/key-two-equals.toml delete mode 100644 tests/invalid/string-bad-byte-escape.toml delete mode 100644 tests/invalid/string-bad-escape.toml delete mode 100644 tests/invalid/string-byte-escapes.toml delete mode 100644 tests/invalid/string-no-close.toml delete mode 100644 tests/invalid/table-array-implicit.toml delete mode 100644 tests/invalid/table-array-malformed-bracket.toml delete mode 100644 tests/invalid/table-array-malformed-empty.toml delete mode 100644 tests/invalid/table-empty.toml delete mode 100644 tests/invalid/table-nested-brackets-close.toml delete mode 100644 tests/invalid/table-nested-brackets-open.toml delete mode 100644 tests/invalid/table-whitespace.toml delete mode 100644 tests/invalid/table-with-pound.toml delete mode 100644 tests/invalid/text-after-array-entries.toml delete mode 100644 tests/invalid/text-after-integer.toml delete mode 100644 tests/invalid/text-after-string.toml delete mode 100644 tests/invalid/text-after-table.toml delete mode 100644 tests/invalid/text-before-array-separator.toml delete mode 100644 tests/invalid/text-in-array.toml delete mode 100644 tests/parser.rs delete mode 100644 tests/pretty.rs delete mode 100644 tests/serde.rs delete mode 100644 tests/tables-last.rs delete mode 100644 tests/valid.rs delete mode 100644 tests/valid/array-empty.json delete mode 100644 tests/valid/array-empty.toml delete mode 100644 tests/valid/array-nospaces.json delete mode 100644 tests/valid/array-nospaces.toml delete mode 100644 tests/valid/arrays-hetergeneous.json delete mode 100644 tests/valid/arrays-hetergeneous.toml delete mode 100644 tests/valid/arrays-nested.json delete mode 100644 tests/valid/arrays-nested.toml delete mode 100644 tests/valid/arrays.json delete mode 100644 tests/valid/arrays.toml delete mode 100644 tests/valid/bool.json delete mode 100644 tests/valid/bool.toml delete mode 100644 tests/valid/comments-everywhere.json delete mode 100644 tests/valid/comments-everywhere.toml delete mode 100644 tests/valid/datetime-truncate.json delete mode 100644 tests/valid/datetime-truncate.toml delete mode 100644 tests/valid/datetime.json delete mode 100644 tests/valid/datetime.toml delete mode 100644 tests/valid/empty.json delete mode 100644 tests/valid/empty.toml delete mode 100644 tests/valid/example-bom.toml delete mode 100644 tests/valid/example-v0.3.0.json delete mode 100644 tests/valid/example-v0.3.0.toml delete mode 100644 tests/valid/example-v0.4.0.json delete mode 100644 tests/valid/example-v0.4.0.toml delete mode 100644 tests/valid/example.json delete mode 100644 tests/valid/example.toml delete mode 100644 tests/valid/example2.json delete mode 100644 tests/valid/example2.toml delete mode 100644 tests/valid/float.json delete mode 100644 tests/valid/float.toml delete mode 100644 tests/valid/hard_example.json delete mode 100644 tests/valid/hard_example.toml delete mode 100644 tests/valid/implicit-and-explicit-after.json delete mode 100644 tests/valid/implicit-and-explicit-after.toml delete mode 100644 tests/valid/implicit-and-explicit-before.json delete mode 100644 tests/valid/implicit-and-explicit-before.toml delete mode 100644 tests/valid/implicit-groups.json delete mode 100644 tests/valid/implicit-groups.toml delete mode 100644 tests/valid/integer.json delete mode 100644 tests/valid/integer.toml delete mode 100644 tests/valid/key-equals-nospace.json delete mode 100644 tests/valid/key-equals-nospace.toml delete mode 100644 tests/valid/key-quote-newline.json delete mode 100644 tests/valid/key-quote-newline.toml delete mode 100644 tests/valid/key-space.json delete mode 100644 tests/valid/key-space.toml delete mode 100644 tests/valid/key-special-chars.json delete mode 100644 tests/valid/key-special-chars.toml delete mode 100644 tests/valid/key-with-pound.json delete mode 100644 tests/valid/key-with-pound.toml delete mode 100644 tests/valid/long-float.json delete mode 100644 tests/valid/long-float.toml delete mode 100644 tests/valid/long-integer.json delete mode 100644 tests/valid/long-integer.toml delete mode 100644 tests/valid/multiline-string.json delete mode 100644 tests/valid/multiline-string.toml delete mode 100644 tests/valid/raw-multiline-string.json delete mode 100644 tests/valid/raw-multiline-string.toml delete mode 100644 tests/valid/raw-string.json delete mode 100644 tests/valid/raw-string.toml delete mode 100644 tests/valid/string-empty.json delete mode 100644 tests/valid/string-empty.toml delete mode 100644 tests/valid/string-escapes.json delete mode 100644 tests/valid/string-escapes.toml delete mode 100644 tests/valid/string-simple.json delete mode 100644 tests/valid/string-simple.toml delete mode 100644 tests/valid/string-with-pound.json delete mode 100644 tests/valid/string-with-pound.toml delete mode 100644 tests/valid/table-array-implicit.json delete mode 100644 tests/valid/table-array-implicit.toml delete mode 100644 tests/valid/table-array-many.json delete mode 100644 tests/valid/table-array-many.toml delete mode 100644 tests/valid/table-array-nest-no-keys.json delete mode 100644 tests/valid/table-array-nest-no-keys.toml delete mode 100644 tests/valid/table-array-nest.json delete mode 100644 tests/valid/table-array-nest.toml delete mode 100644 tests/valid/table-array-one.json delete mode 100644 tests/valid/table-array-one.toml delete mode 100644 tests/valid/table-empty.json delete mode 100644 tests/valid/table-empty.toml delete mode 100644 tests/valid/table-multi-empty.json delete mode 100644 tests/valid/table-multi-empty.toml delete mode 100644 tests/valid/table-sub-empty.json delete mode 100644 tests/valid/table-sub-empty.toml delete mode 100644 tests/valid/table-whitespace.json delete mode 100644 tests/valid/table-whitespace.toml delete mode 100644 tests/valid/table-with-pound.json delete mode 100644 tests/valid/table-with-pound.toml delete mode 100644 tests/valid/unicode-escape.json delete mode 100644 tests/valid/unicode-escape.toml delete mode 100644 tests/valid/unicode-literal.json delete mode 100644 tests/valid/unicode-literal.toml diff --git a/.travis.yml b/.travis.yml index e4c5407..0715ef1 100644 --- a/.travis.yml +++ b/.travis.yml @@ -9,6 +9,7 @@ before_script: - pip install 'travis-cargo<0.2' --user && export PATH=$HOME/.local/bin:$PATH script: - cargo test + - cargo test --manifest-path test-suite/Cargo.toml - rustdoc --test README.md -L target - test "$TRAVIS_RUST_VERSION" != "1.15.0" && cargo doc --no-deps || echo "skipping cargo doc" after_success: diff --git a/test-suite/Cargo.toml b/test-suite/Cargo.toml new file mode 100644 index 0000000..7c91787 --- /dev/null +++ b/test-suite/Cargo.toml @@ -0,0 +1,14 @@ +[package] +name = "toml_test_suite" +version = "0.0.0" +authors = ["Alex Crichton "] +publish = false + +[build-dependencies] +rustc_version = "0.2" + +[dev-dependencies] +toml = { path = ".." } +serde = "1.0" +serde_derive = "1.0" +serde_json = "1.0" diff --git a/test-suite/tests/README.md b/test-suite/tests/README.md new file mode 100644 index 0000000..ebbc01c --- /dev/null +++ b/test-suite/tests/README.md @@ -0,0 +1 @@ +Tests are from https://github.com/BurntSushi/toml-test diff --git a/test-suite/tests/backcompat.rs b/test-suite/tests/backcompat.rs new file mode 100644 index 0000000..1b3f599 --- /dev/null +++ b/test-suite/tests/backcompat.rs @@ -0,0 +1,19 @@ +extern crate toml; +extern crate serde; + +use serde::de::Deserialize; + +#[test] +fn main() { + let s = " + [a] foo = 1 + [[b]] foo = 1 + "; + assert!(s.parse::().is_err()); + + let mut d = toml::de::Deserializer::new(s); + d.set_require_newline_after_table(false); + let value = toml::Value::deserialize(&mut d).unwrap(); + assert_eq!(value["a"]["foo"].as_integer(), Some(1)); + assert_eq!(value["b"][0]["foo"].as_integer(), Some(1)); +} diff --git a/test-suite/tests/datetime.rs b/test-suite/tests/datetime.rs new file mode 100644 index 0000000..948e863 --- /dev/null +++ b/test-suite/tests/datetime.rs @@ -0,0 +1,58 @@ +extern crate toml; + +use std::str::FromStr; + +use toml::Value; + +#[test] +fn times() { + fn good(s: &str) { + let to_parse = format!("foo = {}", s); + let value = Value::from_str(&to_parse).unwrap(); + assert_eq!(value["foo"].as_datetime().unwrap().to_string(), s); + } + + good("1997-09-09T09:09:09Z"); + good("1997-09-09T09:09:09+09:09"); + good("1997-09-09T09:09:09-09:09"); + good("1997-09-09T09:09:09"); + good("1997-09-09"); + good("09:09:09"); + good("1997-09-09T09:09:09.09Z"); + good("1997-09-09T09:09:09.09+09:09"); + good("1997-09-09T09:09:09.09-09:09"); + good("1997-09-09T09:09:09.09"); + good("09:09:09.09"); +} + +#[test] +fn bad_times() { + fn bad(s: &str) { + let to_parse = format!("foo = {}", s); + assert!(Value::from_str(&to_parse).is_err()); + } + + bad("199-09-09"); + bad("199709-09"); + bad("1997-9-09"); + bad("1997-09-9"); + bad("1997-09-0909:09:09"); + bad("1997-09-09T09:09:09."); + bad("T"); + bad("T."); + bad("TZ"); + bad("1997-09-09T09:09:09.09+"); + bad("1997-09-09T09:09:09.09+09"); + bad("1997-09-09T09:09:09.09+09:9"); + bad("1997-09-09T09:09:09.09+0909"); + bad("1997-09-09T09:09:09.09-"); + bad("1997-09-09T09:09:09.09-09"); + bad("1997-09-09T09:09:09.09-09:9"); + bad("1997-09-09T09:09:09.09-0909"); + + bad("1997-00-09T09:09:09.09Z"); + bad("1997-09-00T09:09:09.09Z"); + bad("1997-09-09T30:09:09.09Z"); + bad("1997-09-09T12:69:09.09Z"); + bad("1997-09-09T12:09:69.09Z"); +} diff --git a/test-suite/tests/display-tricky.rs b/test-suite/tests/display-tricky.rs new file mode 100644 index 0000000..069e0f9 --- /dev/null +++ b/test-suite/tests/display-tricky.rs @@ -0,0 +1,49 @@ +extern crate toml; +#[macro_use] extern crate serde_derive; + +#[derive(Debug, Serialize, Deserialize)] +pub struct Recipe { + pub name: String, + pub description: Option, + #[serde(default)] + pub modules: Vec, + #[serde(default)] + pub packages: Vec +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct Modules { + pub name: String, + pub version: Option +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct Packages { + pub name: String, + pub version: Option +} + +#[test] +fn both_ends() { + let recipe_works = toml::from_str::(r#" + name = "testing" + description = "example" + modules = [] + + [[packages]] + name = "base" + "#).unwrap(); + toml::to_string(&recipe_works).unwrap(); + + let recipe_fails = toml::from_str::(r#" + name = "testing" + description = "example" + packages = [] + + [[modules]] + name = "base" + "#).unwrap(); + + let recipe_toml = toml::Value::try_from(recipe_fails).unwrap(); + recipe_toml.to_string(); +} diff --git a/test-suite/tests/display.rs b/test-suite/tests/display.rs new file mode 100644 index 0000000..ca4fdd8 --- /dev/null +++ b/test-suite/tests/display.rs @@ -0,0 +1,103 @@ +extern crate toml; + +use std::collections::BTreeMap; + +use toml::Value::{String, Integer, Float, Boolean, Array, Table}; + +macro_rules! map( ($($k:expr => $v:expr),*) => ({ + let mut _m = BTreeMap::new(); + $(_m.insert($k.to_string(), $v);)* + _m +}) ); + +#[test] +fn simple_show() { + assert_eq!(String("foo".to_string()).to_string(), + "\"foo\""); + assert_eq!(Integer(10).to_string(), + "10"); + assert_eq!(Float(10.0).to_string(), + "10.0"); + assert_eq!(Float(2.4).to_string(), + "2.4"); + assert_eq!(Boolean(true).to_string(), + "true"); + assert_eq!(Array(vec![]).to_string(), + "[]"); + assert_eq!(Array(vec![Integer(1), Integer(2)]).to_string(), + "[1, 2]"); +} + +#[test] +fn table() { + assert_eq!(Table(map! { }).to_string(), + ""); + assert_eq!(Table(map! { + "test" => Integer(2), + "test2" => Integer(3) }).to_string(), + "test = 2\ntest2 = 3\n"); + assert_eq!(Table(map! { + "test" => Integer(2), + "test2" => Table(map! { + "test" => String("wut".to_string()) + }) + }).to_string(), + "test = 2\n\ + \n\ + [test2]\n\ + test = \"wut\"\n"); + assert_eq!(Table(map! { + "test" => Integer(2), + "test2" => Table(map! { + "test" => String("wut".to_string()) + }) + }).to_string(), + "test = 2\n\ + \n\ + [test2]\n\ + test = \"wut\"\n"); + assert_eq!(Table(map! { + "test" => Integer(2), + "test2" => Array(vec![Table(map! { + "test" => String("wut".to_string()) + })]) + }).to_string(), + "test = 2\n\ + \n\ + [[test2]]\n\ + test = \"wut\"\n"); + assert_eq!(Table(map! { + "foo.bar" => Integer(2), + "foo\"bar" => Integer(2) + }).to_string(), + "\"foo\\\"bar\" = 2\n\ + \"foo.bar\" = 2\n"); + assert_eq!(Table(map! { + "test" => Integer(2), + "test2" => Array(vec![Table(map! { + "test" => Array(vec![Integer(2)]) + })]) + }).to_string(), + "test = 2\n\ + \n\ + [[test2]]\n\ + test = [2]\n"); + let table = Table(map! { + "test" => Integer(2), + "test2" => Array(vec![Table(map! { + "test" => Array(vec![Array(vec![Integer(2), Integer(3)]), + Array(vec![String("foo".to_string()), String("bar".to_string())])]) + })]) + }); + assert_eq!(table.to_string(), + "test = 2\n\ + \n\ + [[test2]]\n\ + test = [[2, 3], [\"foo\", \"bar\"]]\n"); + assert_eq!(Table(map! { + "test" => Array(vec![Integer(2)]), + "test2" => Integer(2) + }).to_string(), + "test = [2]\n\ + test2 = 2\n"); +} diff --git a/test-suite/tests/formatting.rs b/test-suite/tests/formatting.rs new file mode 100644 index 0000000..4ba1418 --- /dev/null +++ b/test-suite/tests/formatting.rs @@ -0,0 +1,54 @@ +#[macro_use] +extern crate serde_derive; +extern crate toml; + +use toml::to_string; + +#[derive(Debug, Clone, Hash, PartialEq, Eq, Serialize, Deserialize)] +struct User { + pub name: String, + pub surname: String, +} + +#[derive(Debug, Clone, Hash, PartialEq, Eq, Serialize, Deserialize)] +struct Users { + pub user: Vec, +} + +#[derive(Debug, Clone, Hash, PartialEq, Eq, Serialize, Deserialize)] +struct TwoUsers { + pub user0: User, + pub user1: User, +} + +#[test] +fn no_unnecessary_newlines_array() { + assert!(!to_string(&Users { + user: vec![ + User { + name: "John".to_string(), + surname: "Doe".to_string(), + }, + User { + name: "Jane".to_string(), + surname: "Dough".to_string(), + }, + ], + }).unwrap() + .starts_with("\n")); +} + +#[test] +fn no_unnecessary_newlines_table() { + assert!(!to_string(&TwoUsers { + user0: User { + name: "John".to_string(), + surname: "Doe".to_string(), + }, + user1: User { + name: "Jane".to_string(), + surname: "Dough".to_string(), + }, + }).unwrap() + .starts_with("\n")); +} diff --git a/test-suite/tests/invalid-encoder-misc.rs b/test-suite/tests/invalid-encoder-misc.rs new file mode 100644 index 0000000..272f58f --- /dev/null +++ b/test-suite/tests/invalid-encoder-misc.rs @@ -0,0 +1,14 @@ +extern crate toml; + +use std::f64; + +#[test] +fn test_invalid_float_encode() { + fn bad(value: toml::Value) { + assert!(toml::to_string(&value).is_err()); + } + + bad(toml::Value::Float(f64::INFINITY)); + bad(toml::Value::Float(f64::NEG_INFINITY)); + bad(toml::Value::Float(f64::NAN)); +} diff --git a/test-suite/tests/invalid-encoder/array-mixed-types-ints-and-floats.json b/test-suite/tests/invalid-encoder/array-mixed-types-ints-and-floats.json new file mode 100644 index 0000000..2d42ead --- /dev/null +++ b/test-suite/tests/invalid-encoder/array-mixed-types-ints-and-floats.json @@ -0,0 +1,15 @@ +{ + "ints-and-floats": { + "type": "array", + "value": [ + { + "type": "integer", + "value": "1" + }, + { + "type": "float", + "value": "1.1" + } + ] + } +} diff --git a/test-suite/tests/invalid-misc.rs b/test-suite/tests/invalid-misc.rs new file mode 100644 index 0000000..bb70b97 --- /dev/null +++ b/test-suite/tests/invalid-misc.rs @@ -0,0 +1,17 @@ +extern crate toml; + +#[test] +fn bad() { + fn bad(s: &str) { + assert!(s.parse::().is_err()); + } + + bad("a = 01"); + bad("a = 1__1"); + bad("a = 1_"); + bad("''"); + bad("a = nan"); + bad("a = -inf"); + bad("a = inf"); + bad("a = 9e99999"); +} diff --git a/test-suite/tests/invalid.rs b/test-suite/tests/invalid.rs new file mode 100644 index 0000000..4679684 --- /dev/null +++ b/test-suite/tests/invalid.rs @@ -0,0 +1,98 @@ +extern crate toml; + +fn run(toml: &str) { + println!("test if invalid:\n{}", toml); + if let Ok(e) = toml.parse::() { + panic!("parsed to: {:#?}", e); + } +} + +macro_rules! test( ($name:ident, $toml:expr) => ( + #[test] + fn $name() { run($toml); } +) ); + +test!(array_mixed_types_arrays_and_ints, + include_str!("invalid/array-mixed-types-arrays-and-ints.toml")); +test!(array_mixed_types_ints_and_floats, + include_str!("invalid/array-mixed-types-ints-and-floats.toml")); +test!(array_mixed_types_strings_and_ints, + include_str!("invalid/array-mixed-types-strings-and-ints.toml")); +test!(datetime_malformed_no_leads, + include_str!("invalid/datetime-malformed-no-leads.toml")); +test!(datetime_malformed_no_secs, + include_str!("invalid/datetime-malformed-no-secs.toml")); +test!(datetime_malformed_no_t, + include_str!("invalid/datetime-malformed-no-t.toml")); +test!(datetime_malformed_with_milli, + include_str!("invalid/datetime-malformed-with-milli.toml")); +test!(duplicate_keys, + include_str!("invalid/duplicate-keys.toml")); +test!(duplicate_key_table, + include_str!("invalid/duplicate-key-table.toml")); +test!(duplicate_tables, + include_str!("invalid/duplicate-tables.toml")); +test!(empty_implicit_table, + include_str!("invalid/empty-implicit-table.toml")); +test!(empty_table, + include_str!("invalid/empty-table.toml")); +test!(float_no_leading_zero, + include_str!("invalid/float-no-leading-zero.toml")); +test!(float_no_trailing_digits, + include_str!("invalid/float-no-trailing-digits.toml")); +test!(key_after_array, + include_str!("invalid/key-after-array.toml")); +test!(key_after_table, + include_str!("invalid/key-after-table.toml")); +test!(key_empty, + include_str!("invalid/key-empty.toml")); +test!(key_hash, + include_str!("invalid/key-hash.toml")); +test!(key_newline, + include_str!("invalid/key-newline.toml")); +test!(key_open_bracket, + include_str!("invalid/key-open-bracket.toml")); +test!(key_single_open_bracket, + include_str!("invalid/key-single-open-bracket.toml")); +test!(key_space, + include_str!("invalid/key-space.toml")); +test!(key_start_bracket, + include_str!("invalid/key-start-bracket.toml")); +test!(key_two_equals, + include_str!("invalid/key-two-equals.toml")); +test!(string_bad_byte_escape, + include_str!("invalid/string-bad-byte-escape.toml")); +test!(string_bad_escape, + include_str!("invalid/string-bad-escape.toml")); +test!(string_byte_escapes, + include_str!("invalid/string-byte-escapes.toml")); +test!(string_no_close, + include_str!("invalid/string-no-close.toml")); +test!(table_array_implicit, + include_str!("invalid/table-array-implicit.toml")); +test!(table_array_malformed_bracket, + include_str!("invalid/table-array-malformed-bracket.toml")); +test!(table_array_malformed_empty, + include_str!("invalid/table-array-malformed-empty.toml")); +test!(table_empty, + include_str!("invalid/table-empty.toml")); +test!(table_nested_brackets_close, + include_str!("invalid/table-nested-brackets-close.toml")); +test!(table_nested_brackets_open, + include_str!("invalid/table-nested-brackets-open.toml")); +test!(table_whitespace, + include_str!("invalid/table-whitespace.toml")); +test!(table_with_pound, + include_str!("invalid/table-with-pound.toml")); +test!(text_after_array_entries, + include_str!("invalid/text-after-array-entries.toml")); +test!(text_after_integer, + include_str!("invalid/text-after-integer.toml")); +test!(text_after_string, + include_str!("invalid/text-after-string.toml")); +test!(text_after_table, + include_str!("invalid/text-after-table.toml")); +test!(text_before_array_separator, + include_str!("invalid/text-before-array-separator.toml")); +test!(text_in_array, + include_str!("invalid/text-in-array.toml")); diff --git a/test-suite/tests/invalid/array-mixed-types-arrays-and-ints.toml b/test-suite/tests/invalid/array-mixed-types-arrays-and-ints.toml new file mode 100644 index 0000000..051ec73 --- /dev/null +++ b/test-suite/tests/invalid/array-mixed-types-arrays-and-ints.toml @@ -0,0 +1 @@ +arrays-and-ints = [1, ["Arrays are not integers."]] diff --git a/test-suite/tests/invalid/array-mixed-types-ints-and-floats.toml b/test-suite/tests/invalid/array-mixed-types-ints-and-floats.toml new file mode 100644 index 0000000..a5aa9b7 --- /dev/null +++ b/test-suite/tests/invalid/array-mixed-types-ints-and-floats.toml @@ -0,0 +1 @@ +ints-and-floats = [1, 1.1] diff --git a/test-suite/tests/invalid/array-mixed-types-strings-and-ints.toml b/test-suite/tests/invalid/array-mixed-types-strings-and-ints.toml new file mode 100644 index 0000000..f348308 --- /dev/null +++ b/test-suite/tests/invalid/array-mixed-types-strings-and-ints.toml @@ -0,0 +1 @@ +strings-and-ints = ["hi", 42] diff --git a/test-suite/tests/invalid/datetime-malformed-no-leads.toml b/test-suite/tests/invalid/datetime-malformed-no-leads.toml new file mode 100644 index 0000000..123f173 --- /dev/null +++ b/test-suite/tests/invalid/datetime-malformed-no-leads.toml @@ -0,0 +1 @@ +no-leads = 1987-7-05T17:45:00Z diff --git a/test-suite/tests/invalid/datetime-malformed-no-secs.toml b/test-suite/tests/invalid/datetime-malformed-no-secs.toml new file mode 100644 index 0000000..ba93900 --- /dev/null +++ b/test-suite/tests/invalid/datetime-malformed-no-secs.toml @@ -0,0 +1 @@ +no-secs = 1987-07-05T17:45Z diff --git a/test-suite/tests/invalid/datetime-malformed-no-t.toml b/test-suite/tests/invalid/datetime-malformed-no-t.toml new file mode 100644 index 0000000..617e3c5 --- /dev/null +++ b/test-suite/tests/invalid/datetime-malformed-no-t.toml @@ -0,0 +1 @@ +no-t = 1987-07-0517:45:00Z diff --git a/test-suite/tests/invalid/datetime-malformed-with-milli.toml b/test-suite/tests/invalid/datetime-malformed-with-milli.toml new file mode 100644 index 0000000..eef792f --- /dev/null +++ b/test-suite/tests/invalid/datetime-malformed-with-milli.toml @@ -0,0 +1 @@ +with-milli = 1987-07-5T17:45:00.12Z diff --git a/test-suite/tests/invalid/duplicate-key-table.toml b/test-suite/tests/invalid/duplicate-key-table.toml new file mode 100644 index 0000000..cedf05f --- /dev/null +++ b/test-suite/tests/invalid/duplicate-key-table.toml @@ -0,0 +1,5 @@ +[fruit] +type = "apple" + +[fruit.type] +apple = "yes" diff --git a/test-suite/tests/invalid/duplicate-keys.toml b/test-suite/tests/invalid/duplicate-keys.toml new file mode 100644 index 0000000..9b5aee0 --- /dev/null +++ b/test-suite/tests/invalid/duplicate-keys.toml @@ -0,0 +1,2 @@ +dupe = false +dupe = true diff --git a/test-suite/tests/invalid/duplicate-tables.toml b/test-suite/tests/invalid/duplicate-tables.toml new file mode 100644 index 0000000..8ddf49b --- /dev/null +++ b/test-suite/tests/invalid/duplicate-tables.toml @@ -0,0 +1,2 @@ +[a] +[a] diff --git a/test-suite/tests/invalid/empty-implicit-table.toml b/test-suite/tests/invalid/empty-implicit-table.toml new file mode 100644 index 0000000..0cc36d0 --- /dev/null +++ b/test-suite/tests/invalid/empty-implicit-table.toml @@ -0,0 +1 @@ +[naughty..naughty] diff --git a/test-suite/tests/invalid/empty-table.toml b/test-suite/tests/invalid/empty-table.toml new file mode 100644 index 0000000..fe51488 --- /dev/null +++ b/test-suite/tests/invalid/empty-table.toml @@ -0,0 +1 @@ +[] diff --git a/test-suite/tests/invalid/float-no-leading-zero.toml b/test-suite/tests/invalid/float-no-leading-zero.toml new file mode 100644 index 0000000..cab76bf --- /dev/null +++ b/test-suite/tests/invalid/float-no-leading-zero.toml @@ -0,0 +1,2 @@ +answer = .12345 +neganswer = -.12345 diff --git a/test-suite/tests/invalid/float-no-trailing-digits.toml b/test-suite/tests/invalid/float-no-trailing-digits.toml new file mode 100644 index 0000000..cbff2d0 --- /dev/null +++ b/test-suite/tests/invalid/float-no-trailing-digits.toml @@ -0,0 +1,2 @@ +answer = 1. +neganswer = -1. diff --git a/test-suite/tests/invalid/key-after-array.toml b/test-suite/tests/invalid/key-after-array.toml new file mode 100644 index 0000000..5c1a1b0 --- /dev/null +++ b/test-suite/tests/invalid/key-after-array.toml @@ -0,0 +1 @@ +[[agencies]] owner = "S Cjelli" diff --git a/test-suite/tests/invalid/key-after-table.toml b/test-suite/tests/invalid/key-after-table.toml new file mode 100644 index 0000000..4bc8213 --- /dev/null +++ b/test-suite/tests/invalid/key-after-table.toml @@ -0,0 +1 @@ +[history] guard = "sleeping" diff --git a/test-suite/tests/invalid/key-empty.toml b/test-suite/tests/invalid/key-empty.toml new file mode 100644 index 0000000..09f998f --- /dev/null +++ b/test-suite/tests/invalid/key-empty.toml @@ -0,0 +1 @@ + = 1 diff --git a/test-suite/tests/invalid/key-hash.toml b/test-suite/tests/invalid/key-hash.toml new file mode 100644 index 0000000..e321b1f --- /dev/null +++ b/test-suite/tests/invalid/key-hash.toml @@ -0,0 +1 @@ +a# = 1 diff --git a/test-suite/tests/invalid/key-newline.toml b/test-suite/tests/invalid/key-newline.toml new file mode 100644 index 0000000..707aad5 --- /dev/null +++ b/test-suite/tests/invalid/key-newline.toml @@ -0,0 +1,2 @@ +a += 1 diff --git a/test-suite/tests/invalid/key-open-bracket.toml b/test-suite/tests/invalid/key-open-bracket.toml new file mode 100644 index 0000000..f0aeb16 --- /dev/null +++ b/test-suite/tests/invalid/key-open-bracket.toml @@ -0,0 +1 @@ +[abc = 1 diff --git a/test-suite/tests/invalid/key-single-open-bracket.toml b/test-suite/tests/invalid/key-single-open-bracket.toml new file mode 100644 index 0000000..8e2f0be --- /dev/null +++ b/test-suite/tests/invalid/key-single-open-bracket.toml @@ -0,0 +1 @@ +[ \ No newline at end of file diff --git a/test-suite/tests/invalid/key-space.toml b/test-suite/tests/invalid/key-space.toml new file mode 100644 index 0000000..201806d --- /dev/null +++ b/test-suite/tests/invalid/key-space.toml @@ -0,0 +1 @@ +a b = 1 \ No newline at end of file diff --git a/test-suite/tests/invalid/key-start-bracket.toml b/test-suite/tests/invalid/key-start-bracket.toml new file mode 100644 index 0000000..e0597ae --- /dev/null +++ b/test-suite/tests/invalid/key-start-bracket.toml @@ -0,0 +1,3 @@ +[a] +[xyz = 5 +[b] diff --git a/test-suite/tests/invalid/key-two-equals.toml b/test-suite/tests/invalid/key-two-equals.toml new file mode 100644 index 0000000..25a0378 --- /dev/null +++ b/test-suite/tests/invalid/key-two-equals.toml @@ -0,0 +1 @@ +key= = 1 diff --git a/test-suite/tests/invalid/string-bad-byte-escape.toml b/test-suite/tests/invalid/string-bad-byte-escape.toml new file mode 100644 index 0000000..4c7be59 --- /dev/null +++ b/test-suite/tests/invalid/string-bad-byte-escape.toml @@ -0,0 +1 @@ +naughty = "\xAg" diff --git a/test-suite/tests/invalid/string-bad-escape.toml b/test-suite/tests/invalid/string-bad-escape.toml new file mode 100644 index 0000000..60acb0c --- /dev/null +++ b/test-suite/tests/invalid/string-bad-escape.toml @@ -0,0 +1 @@ +invalid-escape = "This string has a bad \a escape character." diff --git a/test-suite/tests/invalid/string-byte-escapes.toml b/test-suite/tests/invalid/string-byte-escapes.toml new file mode 100644 index 0000000..e94452a --- /dev/null +++ b/test-suite/tests/invalid/string-byte-escapes.toml @@ -0,0 +1 @@ +answer = "\x33" diff --git a/test-suite/tests/invalid/string-no-close.toml b/test-suite/tests/invalid/string-no-close.toml new file mode 100644 index 0000000..0c292fc --- /dev/null +++ b/test-suite/tests/invalid/string-no-close.toml @@ -0,0 +1 @@ +no-ending-quote = "One time, at band camp diff --git a/test-suite/tests/invalid/table-array-implicit.toml b/test-suite/tests/invalid/table-array-implicit.toml new file mode 100644 index 0000000..05f2507 --- /dev/null +++ b/test-suite/tests/invalid/table-array-implicit.toml @@ -0,0 +1,14 @@ +# This test is a bit tricky. It should fail because the first use of +# `[[albums.songs]]` without first declaring `albums` implies that `albums` +# must be a table. The alternative would be quite weird. Namely, it wouldn't +# comply with the TOML spec: "Each double-bracketed sub-table will belong to +# the most *recently* defined table element *above* it." +# +# This is in contrast to the *valid* test, table-array-implicit where +# `[[albums.songs]]` works by itself, so long as `[[albums]]` isn't declared +# later. (Although, `[albums]` could be.) +[[albums.songs]] +name = "Glory Days" + +[[albums]] +name = "Born in the USA" diff --git a/test-suite/tests/invalid/table-array-malformed-bracket.toml b/test-suite/tests/invalid/table-array-malformed-bracket.toml new file mode 100644 index 0000000..39c73b0 --- /dev/null +++ b/test-suite/tests/invalid/table-array-malformed-bracket.toml @@ -0,0 +1,2 @@ +[[albums] +name = "Born to Run" diff --git a/test-suite/tests/invalid/table-array-malformed-empty.toml b/test-suite/tests/invalid/table-array-malformed-empty.toml new file mode 100644 index 0000000..a470ca3 --- /dev/null +++ b/test-suite/tests/invalid/table-array-malformed-empty.toml @@ -0,0 +1,2 @@ +[[]] +name = "Born to Run" diff --git a/test-suite/tests/invalid/table-empty.toml b/test-suite/tests/invalid/table-empty.toml new file mode 100644 index 0000000..fe51488 --- /dev/null +++ b/test-suite/tests/invalid/table-empty.toml @@ -0,0 +1 @@ +[] diff --git a/test-suite/tests/invalid/table-nested-brackets-close.toml b/test-suite/tests/invalid/table-nested-brackets-close.toml new file mode 100644 index 0000000..c8b5a67 --- /dev/null +++ b/test-suite/tests/invalid/table-nested-brackets-close.toml @@ -0,0 +1,2 @@ +[a]b] +zyx = 42 diff --git a/test-suite/tests/invalid/table-nested-brackets-open.toml b/test-suite/tests/invalid/table-nested-brackets-open.toml new file mode 100644 index 0000000..246d7e9 --- /dev/null +++ b/test-suite/tests/invalid/table-nested-brackets-open.toml @@ -0,0 +1,2 @@ +[a[b] +zyx = 42 diff --git a/test-suite/tests/invalid/table-whitespace.toml b/test-suite/tests/invalid/table-whitespace.toml new file mode 100644 index 0000000..79bbcb1 --- /dev/null +++ b/test-suite/tests/invalid/table-whitespace.toml @@ -0,0 +1 @@ +[invalid key] \ No newline at end of file diff --git a/test-suite/tests/invalid/table-with-pound.toml b/test-suite/tests/invalid/table-with-pound.toml new file mode 100644 index 0000000..0d8edb5 --- /dev/null +++ b/test-suite/tests/invalid/table-with-pound.toml @@ -0,0 +1,2 @@ +[key#group] +answer = 42 \ No newline at end of file diff --git a/test-suite/tests/invalid/text-after-array-entries.toml b/test-suite/tests/invalid/text-after-array-entries.toml new file mode 100644 index 0000000..1a72890 --- /dev/null +++ b/test-suite/tests/invalid/text-after-array-entries.toml @@ -0,0 +1,4 @@ +array = [ + "Is there life after an array separator?", No + "Entry" +] diff --git a/test-suite/tests/invalid/text-after-integer.toml b/test-suite/tests/invalid/text-after-integer.toml new file mode 100644 index 0000000..42de7af --- /dev/null +++ b/test-suite/tests/invalid/text-after-integer.toml @@ -0,0 +1 @@ +answer = 42 the ultimate answer? diff --git a/test-suite/tests/invalid/text-after-string.toml b/test-suite/tests/invalid/text-after-string.toml new file mode 100644 index 0000000..c92a6f1 --- /dev/null +++ b/test-suite/tests/invalid/text-after-string.toml @@ -0,0 +1 @@ +string = "Is there life after strings?" No. diff --git a/test-suite/tests/invalid/text-after-table.toml b/test-suite/tests/invalid/text-after-table.toml new file mode 100644 index 0000000..87da9db --- /dev/null +++ b/test-suite/tests/invalid/text-after-table.toml @@ -0,0 +1 @@ +[error] this shouldn't be here diff --git a/test-suite/tests/invalid/text-before-array-separator.toml b/test-suite/tests/invalid/text-before-array-separator.toml new file mode 100644 index 0000000..9b06a39 --- /dev/null +++ b/test-suite/tests/invalid/text-before-array-separator.toml @@ -0,0 +1,4 @@ +array = [ + "Is there life before an array separator?" No, + "Entry" +] diff --git a/test-suite/tests/invalid/text-in-array.toml b/test-suite/tests/invalid/text-in-array.toml new file mode 100644 index 0000000..a6a6c42 --- /dev/null +++ b/test-suite/tests/invalid/text-in-array.toml @@ -0,0 +1,5 @@ +array = [ + "Entry 1", + I don't belong, + "Entry 2", +] diff --git a/test-suite/tests/parser.rs b/test-suite/tests/parser.rs new file mode 100644 index 0000000..2282416 --- /dev/null +++ b/test-suite/tests/parser.rs @@ -0,0 +1,495 @@ +extern crate toml; + +use toml::Value; + +macro_rules! bad { + ($s:expr, $msg:expr) => ({ + match $s.parse::() { + Ok(s) => panic!("successfully parsed as {}", s), + Err(e) => { + let e = e.to_string(); + assert!(e.contains($msg), "error: {}", e); + } + } + }) +} + +#[test] +fn crlf() { + "\ +[project]\r\n\ +\r\n\ +name = \"splay\"\r\n\ +version = \"0.1.0\"\r\n\ +authors = [\"alex@crichton.co\"]\r\n\ +\r\n\ +[[lib]]\r\n\ +\r\n\ +path = \"lib.rs\"\r\n\ +name = \"splay\"\r\n\ +description = \"\"\"\ +A Rust implementation of a TAR file reader and writer. This library does not\r\n\ +currently handle compression, but it is abstract over all I/O readers and\r\n\ +writers. Additionally, great lengths are taken to ensure that the entire\r\n\ +contents are never required to be entirely resident in memory all at once.\r\n\ +\"\"\"\ +".parse::().unwrap(); +} + +#[test] +fn fun_with_strings() { + let table = r#" +bar = "\U00000000" +key1 = "One\nTwo" +key2 = """One\nTwo""" +key3 = """ +One +Two""" + +key4 = "The quick brown fox jumps over the lazy dog." +key5 = """ +The quick brown \ + + +fox jumps over \ +the lazy dog.""" +key6 = """\ + The quick brown \ + fox jumps over \ + the lazy dog.\ + """ +# What you see is what you get. +winpath = 'C:\Users\nodejs\templates' +winpath2 = '\\ServerX\admin$\system32\' +quoted = 'Tom "Dubs" Preston-Werner' +regex = '<\i\c*\s*>' + +regex2 = '''I [dw]on't need \d{2} apples''' +lines = ''' +The first newline is +trimmed in raw strings. +All other whitespace +is preserved. +''' +"#.parse::().unwrap(); + assert_eq!(table["bar"].as_str(), Some("\0")); + assert_eq!(table["key1"].as_str(), Some("One\nTwo")); + assert_eq!(table["key2"].as_str(), Some("One\nTwo")); + assert_eq!(table["key3"].as_str(), Some("One\nTwo")); + + let msg = "The quick brown fox jumps over the lazy dog."; + assert_eq!(table["key4"].as_str(), Some(msg)); + assert_eq!(table["key5"].as_str(), Some(msg)); + assert_eq!(table["key6"].as_str(), Some(msg)); + + assert_eq!(table["winpath"].as_str(), Some(r"C:\Users\nodejs\templates")); + assert_eq!(table["winpath2"].as_str(), Some(r"\\ServerX\admin$\system32\")); + assert_eq!(table["quoted"].as_str(), Some(r#"Tom "Dubs" Preston-Werner"#)); + assert_eq!(table["regex"].as_str(), Some(r"<\i\c*\s*>")); + assert_eq!(table["regex2"].as_str(), Some(r"I [dw]on't need \d{2} apples")); + assert_eq!(table["lines"].as_str(), + Some("The first newline is\n\ + trimmed in raw strings.\n\ + All other whitespace\n\ + is preserved.\n")); +} + +#[test] +fn tables_in_arrays() { + let table = r#" +[[foo]] +#… +[foo.bar] +#… + +[[foo]] # ... +#… +[foo.bar] +#... +"#.parse::().unwrap(); + table["foo"][0]["bar"].as_table().unwrap(); + table["foo"][1]["bar"].as_table().unwrap(); +} + +#[test] +fn empty_table() { + let table = r#" +[foo]"#.parse::().unwrap(); + table["foo"].as_table().unwrap(); +} + +#[test] +fn fruit() { + let table = r#" +[[fruit]] +name = "apple" + +[fruit.physical] +color = "red" +shape = "round" + +[[fruit.variety]] +name = "red delicious" + +[[fruit.variety]] +name = "granny smith" + +[[fruit]] +name = "banana" + +[[fruit.variety]] +name = "plantain" +"#.parse::().unwrap(); + assert_eq!(table["fruit"][0]["name"].as_str(), Some("apple")); + assert_eq!(table["fruit"][0]["physical"]["color"].as_str(), Some("red")); + assert_eq!(table["fruit"][0]["physical"]["shape"].as_str(), Some("round")); + assert_eq!(table["fruit"][0]["variety"][0]["name"].as_str(), Some("red delicious")); + assert_eq!(table["fruit"][0]["variety"][1]["name"].as_str(), Some("granny smith")); + assert_eq!(table["fruit"][1]["name"].as_str(), Some("banana")); + assert_eq!(table["fruit"][1]["variety"][0]["name"].as_str(), Some("plantain")); +} + +#[test] +fn stray_cr() { + "\r".parse::().unwrap_err(); + "a = [ \r ]".parse::().unwrap_err(); + "a = \"\"\"\r\"\"\"".parse::().unwrap_err(); + "a = \"\"\"\\ \r \"\"\"".parse::().unwrap_err(); + "a = '''\r'''".parse::().unwrap_err(); + "a = '\r'".parse::().unwrap_err(); + "a = \"\r\"".parse::().unwrap_err(); +} + +#[test] +fn blank_literal_string() { + let table = "foo = ''".parse::().unwrap(); + assert_eq!(table["foo"].as_str(), Some("")); +} + +#[test] +fn many_blank() { + let table = "foo = \"\"\"\n\n\n\"\"\"".parse::().unwrap(); + assert_eq!(table["foo"].as_str(), Some("\n\n")); +} + +#[test] +fn literal_eats_crlf() { + let table = " + foo = \"\"\"\\\r\n\"\"\" + bar = \"\"\"\\\r\n \r\n \r\n a\"\"\" + ".parse::().unwrap(); + assert_eq!(table["foo"].as_str(), Some("")); + assert_eq!(table["bar"].as_str(), Some("a")); +} + +#[test] +fn string_no_newline() { + "a = \"\n\"".parse::().unwrap_err(); + "a = '\n'".parse::().unwrap_err(); +} + +#[test] +fn bad_leading_zeros() { + "a = 00".parse::().unwrap_err(); + "a = -00".parse::().unwrap_err(); + "a = +00".parse::().unwrap_err(); + "a = 00.0".parse::().unwrap_err(); + "a = -00.0".parse::().unwrap_err(); + "a = +00.0".parse::().unwrap_err(); + "a = 9223372036854775808".parse::().unwrap_err(); + "a = -9223372036854775809".parse::().unwrap_err(); +} + +#[test] +fn bad_floats() { + "a = 0.".parse::().unwrap_err(); + "a = 0.e".parse::().unwrap_err(); + "a = 0.E".parse::().unwrap_err(); + "a = 0.0E".parse::().unwrap_err(); + "a = 0.0e".parse::().unwrap_err(); + "a = 0.0e-".parse::().unwrap_err(); + "a = 0.0e+".parse::().unwrap_err(); + "a = 0.0e+00".parse::().unwrap_err(); +} + +#[test] +fn floats() { + macro_rules! t { + ($actual:expr, $expected:expr) => ({ + let f = format!("foo = {}", $actual); + println!("{}", f); + let a = f.parse::().unwrap(); + assert_eq!(a["foo"].as_float().unwrap(), $expected); + }) + } + + t!("1.0", 1.0); + t!("1.0e0", 1.0); + t!("1.0e+0", 1.0); + t!("1.0e-0", 1.0); + t!("1.001e-0", 1.001); + t!("2e10", 2e10); + t!("2e+10", 2e10); + t!("2e-10", 2e-10); + t!("2_0.0", 20.0); + t!("2_0.0_0e1_0", 20.0e10); + t!("2_0.1_0e1_0", 20.1e10); +} + +#[test] +fn bare_key_names() { + let a = " + foo = 3 + foo_3 = 3 + foo_-2--3--r23f--4-f2-4 = 3 + _ = 3 + - = 3 + 8 = 8 + \"a\" = 3 + \"!\" = 3 + \"a^b\" = 3 + \"\\\"\" = 3 + \"character encoding\" = \"value\" + 'ʎǝʞ' = \"value\" + ".parse::().unwrap(); + &a["foo"]; + &a["-"]; + &a["_"]; + &a["8"]; + &a["foo_3"]; + &a["foo_-2--3--r23f--4-f2-4"]; + &a["a"]; + &a["!"]; + &a["\""]; + &a["character encoding"]; + &a["ʎǝʞ"]; +} + +#[test] +fn bad_keys() { + "key\n=3".parse::().unwrap_err(); + "key=\n3".parse::().unwrap_err(); + "key|=3".parse::().unwrap_err(); + "\"\"=3".parse::().unwrap_err(); + "=3".parse::().unwrap_err(); + "\"\"|=3".parse::().unwrap_err(); + "\"\n\"|=3".parse::().unwrap_err(); + "\"\r\"|=3".parse::().unwrap_err(); +} + +#[test] +fn bad_table_names() { + "[]".parse::().unwrap_err(); + "[.]".parse::().unwrap_err(); + "[\"\".\"\"]".parse::().unwrap_err(); + "[a.]".parse::().unwrap_err(); + "[\"\"]".parse::().unwrap_err(); + "[!]".parse::().unwrap_err(); + "[\"\n\"]".parse::().unwrap_err(); + "[a.b]\n[a.\"b\"]".parse::().unwrap_err(); + "[']".parse::().unwrap_err(); + "[''']".parse::().unwrap_err(); + "['''''']".parse::().unwrap_err(); + "['\n']".parse::().unwrap_err(); + "['\r\n']".parse::().unwrap_err(); +} + +#[test] +fn table_names() { + let a = " + [a.\"b\"] + [\"f f\"] + [\"f.f\"] + [\"\\\"\"] + ['a.a'] + ['\"\"'] + ".parse::().unwrap(); + println!("{:?}", a); + &a["a"]["b"]; + &a["f f"]; + &a["f.f"]; + &a["\""]; + &a["\"\""]; +} + +#[test] +fn invalid_bare_numeral() { + "4".parse::().unwrap_err(); +} + +#[test] +fn inline_tables() { + "a = {}".parse::().unwrap(); + "a = {b=1}".parse::().unwrap(); + "a = { b = 1 }".parse::().unwrap(); + "a = {a=1,b=2}".parse::().unwrap(); + "a = {a=1,b=2,c={}}".parse::().unwrap(); + "a = {a=1,}".parse::().unwrap_err(); + "a = {,}".parse::().unwrap_err(); + "a = {a=1,a=1}".parse::().unwrap_err(); + "a = {\n}".parse::().unwrap_err(); + "a = {".parse::().unwrap_err(); + "a = {a=[\n]}".parse::().unwrap(); + "a = {\"a\"=[\n]}".parse::().unwrap(); + "a = [\n{},\n{},\n]".parse::().unwrap(); +} + +#[test] +fn number_underscores() { + macro_rules! t { + ($actual:expr, $expected:expr) => ({ + let f = format!("foo = {}", $actual); + let table = f.parse::().unwrap(); + assert_eq!(table["foo"].as_integer().unwrap(), $expected); + }) + } + + t!("1_0", 10); + t!("1_0_0", 100); + t!("1_000", 1000); + t!("+1_000", 1000); + t!("-1_000", -1000); +} + +#[test] +fn bad_underscores() { + bad!("foo = 0_", "invalid number"); + bad!("foo = 0__0", "invalid number"); + bad!("foo = __0", "invalid number"); + bad!("foo = 1_0_", "invalid number"); +} + +#[test] +fn bad_unicode_codepoint() { + bad!("foo = \"\\uD800\"", "invalid escape value"); +} + +#[test] +fn bad_strings() { + bad!("foo = \"\\uxx\"", "invalid hex escape"); + bad!("foo = \"\\u\"", "invalid hex escape"); + bad!("foo = \"\\", "unterminated"); + bad!("foo = '", "unterminated"); +} + +#[test] +fn empty_string() { + assert_eq!("foo = \"\"".parse::() + .unwrap()["foo"] + .as_str() + .unwrap(), + ""); +} + +#[test] +fn booleans() { + let table = "foo = true".parse::().unwrap(); + assert_eq!(table["foo"].as_bool(), Some(true)); + + let table = "foo = false".parse::().unwrap(); + assert_eq!(table["foo"].as_bool(), Some(false)); + + assert!("foo = true2".parse::().is_err()); + assert!("foo = false2".parse::().is_err()); + assert!("foo = t1".parse::().is_err()); + assert!("foo = f2".parse::().is_err()); +} + +#[test] +fn bad_nesting() { + bad!(" + a = [2] + [[a]] + b = 5 + ", "duplicate key: `a`"); + bad!(" + a = 1 + [a.b] + ", "duplicate key: `a`"); + bad!(" + a = [] + [a.b] + ", "duplicate key: `a`"); + bad!(" + a = [] + [[a.b]] + ", "duplicate key: `a`"); + bad!(" + [a] + b = { c = 2, d = {} } + [a.b] + c = 2 + ", "duplicate key: `b`"); +} + +#[test] +fn bad_table_redefine() { + bad!(" + [a] + foo=\"bar\" + [a.b] + foo=\"bar\" + [a] + ", "redefinition of table `a`"); + bad!(" + [a] + foo=\"bar\" + b = { foo = \"bar\" } + [a] + ", "redefinition of table `a`"); + bad!(" + [a] + b = {} + [a.b] + ", "duplicate key: `b`"); + + bad!(" + [a] + b = {} + [a] + ", "redefinition of table `a`"); +} + +#[test] +fn datetimes() { + macro_rules! t { + ($actual:expr) => ({ + let f = format!("foo = {}", $actual); + let toml = f.parse::().expect(&format!("failed: {}", f)); + assert_eq!(toml["foo"].as_datetime().unwrap().to_string(), $actual); + }) + } + + t!("2016-09-09T09:09:09Z"); + t!("2016-09-09T09:09:09.1Z"); + t!("2016-09-09T09:09:09.2+10:00"); + t!("2016-09-09T09:09:09.123456789-02:00"); + bad!("foo = 2016-09-09T09:09:09.Z", "failed to parse date"); + bad!("foo = 2016-9-09T09:09:09Z", "failed to parse date"); + bad!("foo = 2016-09-09T09:09:09+2:00", "failed to parse date"); + bad!("foo = 2016-09-09T09:09:09-2:00", "failed to parse date"); + bad!("foo = 2016-09-09T09:09:09Z-2:00", "failed to parse date"); +} + +#[test] +fn require_newline_after_value() { + bad!("0=0r=false", "invalid number at line 1"); + bad!(r#" +0=""o=""m=""r=""00="0"q="""0"""e="""0""" +"#, "expected newline"); + bad!(r#" +[[0000l0]] +0="0"[[0000l0]] +0="0"[[0000l0]] +0="0"l="0" +"#, "expected newline"); + bad!(r#" +0=[0]00=[0,0,0]t=["0","0","0"]s=[1000-00-00T00:00:00Z,2000-00-00T00:00:00Z] +"#, "expected newline"); + bad!(r#" +0=0r0=0r=false +"#, "invalid number at line 2"); + bad!(r#" +0=0r0=0r=falsefal=false +"#, "invalid number at line 2"); +} diff --git a/test-suite/tests/pretty.rs b/test-suite/tests/pretty.rs new file mode 100644 index 0000000..19ed22d --- /dev/null +++ b/test-suite/tests/pretty.rs @@ -0,0 +1,308 @@ +extern crate toml; +extern crate serde; + +use serde::ser::Serialize; + +const NO_PRETTY: &'static str = "\ +[example] +array = [\"item 1\", \"item 2\"] +empty = [] +oneline = \"this has no newlines.\" +text = \"\\nthis is the first line\\nthis is the second line\\n\" +"; + +#[test] +fn no_pretty() { + let toml = NO_PRETTY; + let value: toml::Value = toml::from_str(toml).unwrap(); + let mut result = String::with_capacity(128); + value.serialize(&mut toml::Serializer::new(&mut result)).unwrap(); + println!("EXPECTED:\n{}", toml); + println!("\nRESULT:\n{}", result); + assert_eq!(toml, &result); +} + +#[test] +fn disable_pretty() { + let toml = NO_PRETTY; + let value: toml::Value = toml::from_str(toml).unwrap(); + let mut result = String::with_capacity(128); + { + let mut serializer = toml::Serializer::pretty(&mut result); + serializer.pretty_string(false); + serializer.pretty_array(false); + value.serialize(&mut serializer).unwrap(); + } + println!("EXPECTED:\n{}", toml); + println!("\nRESULT:\n{}", result); + assert_eq!(toml, &result); +} + +const PRETTY_STD: &'static str = "\ +[example] +array = [ + 'item 1', + 'item 2', +] +empty = [] +one = ['one'] +oneline = 'this has no newlines.' +text = ''' +this is the first line +this is the second line +''' +"; + +#[test] +fn pretty_std() { + let toml = PRETTY_STD; + let value: toml::Value = toml::from_str(toml).unwrap(); + let mut result = String::with_capacity(128); + value.serialize(&mut toml::Serializer::pretty(&mut result)).unwrap(); + println!("EXPECTED:\n{}", toml); + println!("\nRESULT:\n{}", result); + assert_eq!(toml, &result); +} + + +const PRETTY_INDENT_2: &'static str = "\ +[example] +array = [ + 'item 1', + 'item 2', +] +empty = [] +one = ['one'] +oneline = 'this has no newlines.' +text = ''' +this is the first line +this is the second line +''' +three = [ + 'one', + 'two', + 'three', +] +"; + +#[test] +fn pretty_indent_2() { + let toml = PRETTY_INDENT_2; + let value: toml::Value = toml::from_str(toml).unwrap(); + let mut result = String::with_capacity(128); + { + let mut serializer = toml::Serializer::pretty(&mut result); + serializer.pretty_array_indent(2); + value.serialize(&mut serializer).unwrap(); + } + println!(">> Result:\n{}", result); + assert_eq!(toml, &result); +} + +const PRETTY_INDENT_2_OTHER: &'static str = "\ +[example] +array = [ + \"item 1\", + \"item 2\", +] +empty = [] +oneline = \"this has no newlines.\" +text = \"\\nthis is the first line\\nthis is the second line\\n\" +"; + + +#[test] +/// Test pretty indent when gotten the other way +fn pretty_indent_2_other() { + let toml = PRETTY_INDENT_2_OTHER; + let value: toml::Value = toml::from_str(toml).unwrap(); + let mut result = String::with_capacity(128); + { + let mut serializer = toml::Serializer::new(&mut result); + serializer.pretty_array_indent(2); + value.serialize(&mut serializer).unwrap(); + } + assert_eq!(toml, &result); +} + + +const PRETTY_ARRAY_NO_COMMA: &'static str = "\ +[example] +array = [ + \"item 1\", + \"item 2\" +] +empty = [] +oneline = \"this has no newlines.\" +text = \"\\nthis is the first line\\nthis is the second line\\n\" +"; +#[test] +/// Test pretty indent when gotten the other way +fn pretty_indent_array_no_comma() { + let toml = PRETTY_ARRAY_NO_COMMA; + let value: toml::Value = toml::from_str(toml).unwrap(); + let mut result = String::with_capacity(128); + { + let mut serializer = toml::Serializer::new(&mut result); + serializer.pretty_array_trailing_comma(false); + value.serialize(&mut serializer).unwrap(); + } + assert_eq!(toml, &result); +} + + +const PRETTY_NO_STRING: &'static str = "\ +[example] +array = [ + \"item 1\", + \"item 2\", +] +empty = [] +oneline = \"this has no newlines.\" +text = \"\\nthis is the first line\\nthis is the second line\\n\" +"; +#[test] +/// Test pretty indent when gotten the other way +fn pretty_no_string() { + let toml = PRETTY_NO_STRING; + let value: toml::Value = toml::from_str(toml).unwrap(); + let mut result = String::with_capacity(128); + { + let mut serializer = toml::Serializer::pretty(&mut result); + serializer.pretty_string(false); + value.serialize(&mut serializer).unwrap(); + } + assert_eq!(toml, &result); +} + +const PRETTY_TRICKY: &'static str = r##"[example] +f = "\f" +glass = ''' +Nothing too unusual, except that I can eat glass in: +- Greek: Μπορώ να φάω σπασμένα γυαλιά χωρίς να πάθω τίποτα. +- Polish: Mogę jeść szkło, i mi nie szkodzi. +- Hindi: मैं काँच खा सकता हूँ, मुझे उस से कोई पीडा नहीं होती. +- Japanese: 私はガラスを食べられます。それは私を傷つけません。 +''' +r = "\r" +r_newline = """ +\r +""" +single = '''this is a single line but has '' cuz it's tricky''' +single_tricky = "single line with ''' in it" +tabs = ''' +this is pretty standard + except for some tabs right here +''' +text = """ +this is the first line. +This has a ''' in it and \"\"\" cuz it's tricky yo +Also ' and \" because why not +this is the fourth line +""" +"##; + +#[test] +fn pretty_tricky() { + let toml = PRETTY_TRICKY; + let value: toml::Value = toml::from_str(toml).unwrap(); + let mut result = String::with_capacity(128); + value.serialize(&mut toml::Serializer::pretty(&mut result)).unwrap(); + println!("EXPECTED:\n{}", toml); + println!("\nRESULT:\n{}", result); + assert_eq!(toml, &result); +} + +const PRETTY_TABLE_ARRAY: &'static str = r##"[[array]] +key = 'foo' + +[[array]] +key = 'bar' + +[abc] +doc = 'this is a table' + +[example] +single = 'this is a single line string' +"##; + +#[test] +fn pretty_table_array() { + let toml = PRETTY_TABLE_ARRAY; + let value: toml::Value = toml::from_str(toml).unwrap(); + let mut result = String::with_capacity(128); + value.serialize(&mut toml::Serializer::pretty(&mut result)).unwrap(); + println!("EXPECTED:\n{}", toml); + println!("\nRESULT:\n{}", result); + assert_eq!(toml, &result); +} + +const TABLE_ARRAY: &'static str = r##"[[array]] +key = "foo" + +[[array]] +key = "bar" + +[abc] +doc = "this is a table" + +[example] +single = "this is a single line string" +"##; + +#[test] +fn table_array() { + let toml = TABLE_ARRAY; + let value: toml::Value = toml::from_str(toml).unwrap(); + let mut result = String::with_capacity(128); + value.serialize(&mut toml::Serializer::new(&mut result)).unwrap(); + println!("EXPECTED:\n{}", toml); + println!("\nRESULT:\n{}", result); + assert_eq!(toml, &result); +} + +const PRETTY_TRICKY_NON_LITERAL: &'static str = r##"[example] +f = "\f" +glass = """ +Nothing too unusual, except that I can eat glass in: +- Greek: Μπορώ να φάω σπασμένα γυαλιά χωρίς να πάθω τίποτα. +- Polish: Mogę jeść szkło, i mi nie szkodzi. +- Hindi: मैं काँच खा सकता हूँ, मुझे उस से कोई पीडा नहीं होती. +- Japanese: 私はガラスを食べられます。それは私を傷つけません。 +""" +plain = """ +This has a couple of lines +Because it likes to. +""" +r = "\r" +r_newline = """ +\r +""" +single = "this is a single line but has '' cuz it's tricky" +single_tricky = "single line with ''' in it" +tabs = """ +this is pretty standard +\texcept for some \ttabs right here +""" +text = """ +this is the first line. +This has a ''' in it and \"\"\" cuz it's tricky yo +Also ' and \" because why not +this is the fourth line +""" +"##; + +#[test] +fn pretty_tricky_non_literal() { + let toml = PRETTY_TRICKY_NON_LITERAL; + let value: toml::Value = toml::from_str(toml).unwrap(); + let mut result = String::with_capacity(128); + { + let mut serializer = toml::Serializer::pretty(&mut result); + serializer.pretty_string_literal(false); + value.serialize(&mut serializer).unwrap(); + } + println!("EXPECTED:\n{}", toml); + println!("\nRESULT:\n{}", result); + assert_eq!(toml, &result); +} diff --git a/test-suite/tests/serde.rs b/test-suite/tests/serde.rs new file mode 100644 index 0000000..57fa5db --- /dev/null +++ b/test-suite/tests/serde.rs @@ -0,0 +1,578 @@ +extern crate serde; +extern crate toml; +#[macro_use] +extern crate serde_derive; + +use std::collections::{BTreeMap, HashSet}; +use serde::{Deserialize, Deserializer}; + +use toml::Value; +use toml::Value::{Table, Integer, Array, Float}; + +macro_rules! t { + ($e:expr) => (match $e { + Ok(t) => t, + Err(e) => panic!("{} failed with {}", stringify!($e), e), + }) +} + +macro_rules! equivalent { + ($literal:expr, $toml:expr,) => ({ + let toml = $toml; + let literal = $literal; + + // In/out of Value is equivalent + println!("try_from"); + assert_eq!(t!(Value::try_from(literal.clone())), toml); + println!("try_into"); + assert_eq!(literal, t!(toml.clone().try_into())); + + // Through a string equivalent + println!("to_string(literal)"); + assert_eq!(t!(toml::to_string(&literal)), toml.to_string()); + println!("to_string(toml)"); + assert_eq!(t!(toml::to_string(&toml)), toml.to_string()); + println!("literal, from_str(toml)"); + assert_eq!(literal, t!(toml::from_str(&toml.to_string()))); + println!("toml, from_str(toml)"); + assert_eq!(toml, t!(toml::from_str(&toml.to_string()))); + }) +} + +macro_rules! error { + ($ty:ty, $toml:expr, $error:expr) => ({ + println!("attempting parsing"); + match toml::from_str::<$ty>(&$toml.to_string()) { + Ok(_) => panic!("successful"), + Err(e) => { + assert!(e.to_string().contains($error), + "bad error: {}", e); + } + } + + println!("attempting toml decoding"); + match $toml.try_into::<$ty>() { + Ok(_) => panic!("successful"), + Err(e) => { + assert!(e.to_string().contains($error), + "bad error: {}", e); + } + } + }) +} + +macro_rules! map( ($($k:ident: $v:expr),*) => ({ + let mut _m = BTreeMap::new(); + $(_m.insert(stringify!($k).to_string(), $v);)* + _m +}) ); + +#[test] +fn smoke() { + #[derive(Serialize, Deserialize, PartialEq, Debug, Clone)] + struct Foo { a: isize } + + equivalent!( + Foo { a: 2 }, + Table(map! { a: Integer(2) }), + ); +} + +#[test] +fn smoke_hyphen() { + #[derive(Serialize, Deserialize, PartialEq, Debug, Clone)] + struct Foo { + a_b: isize, + } + + equivalent! { + Foo { a_b: 2 }, + Table(map! { a_b: Integer(2) }), + } + + #[derive(Serialize, Deserialize, PartialEq, Debug, Clone)] + struct Foo2 { + #[serde(rename = "a-b")] + a_b: isize, + } + + let mut m = BTreeMap::new(); + m.insert("a-b".to_string(), Integer(2)); + equivalent! { + Foo2 { a_b: 2 }, + Table(m), + } +} + +#[test] +fn nested() { + #[derive(Serialize, Deserialize, PartialEq, Debug, Clone)] + struct Foo { a: isize, b: Bar } + #[derive(Serialize, Deserialize, PartialEq, Debug, Clone)] + struct Bar { a: String } + + equivalent! { + Foo { a: 2, b: Bar { a: "test".to_string() } }, + Table(map! { + a: Integer(2), + b: Table(map! { + a: Value::String("test".to_string()) + }) + }), + } +} + +#[test] +fn application_decode_error() { + #[derive(PartialEq, Debug)] + struct Range10(usize); + impl<'de> Deserialize<'de> for Range10 { + fn deserialize>(d: D) -> Result { + let x: usize = try!(Deserialize::deserialize(d)); + if x > 10 { + Err(serde::de::Error::custom("more than 10")) + } else { + Ok(Range10(x)) + } + } + } + let d_good = Integer(5); + let d_bad1 = Value::String("not an isize".to_string()); + let d_bad2 = Integer(11); + + assert_eq!(Range10(5), d_good.try_into().unwrap()); + + let err1: Result = d_bad1.try_into(); + assert!(err1.is_err()); + let err2: Result = d_bad2.try_into(); + assert!(err2.is_err()); +} + +#[test] +fn array() { + #[derive(Serialize, Deserialize, PartialEq, Debug, Clone)] + struct Foo { a: Vec } + + equivalent! { + Foo { a: vec![1, 2, 3, 4] }, + Table(map! { + a: Array(vec![ + Integer(1), + Integer(2), + Integer(3), + Integer(4) + ]) + }), + }; +} + +#[test] +fn inner_structs_with_options() { + #[derive(Serialize, Deserialize, PartialEq, Debug, Clone)] + struct Foo { + a: Option>, + b: Bar, + } + #[derive(Serialize, Deserialize, PartialEq, Debug, Clone)] + struct Bar { + a: String, + b: f64, + } + + equivalent! { + Foo { + a: Some(Box::new(Foo { + a: None, + b: Bar { a: "foo".to_string(), b: 4.5 }, + })), + b: Bar { a: "bar".to_string(), b: 1.0 }, + }, + Table(map! { + a: Table(map! { + b: Table(map! { + a: Value::String("foo".to_string()), + b: Float(4.5) + }) + }), + b: Table(map! { + a: Value::String("bar".to_string()), + b: Float(1.0) + }) + }), + } +} + +#[test] +fn hashmap() { + #[derive(Serialize, Deserialize, PartialEq, Debug, Clone)] + struct Foo { + set: HashSet, + map: BTreeMap, + } + + equivalent! { + Foo { + map: { + let mut m = BTreeMap::new(); + m.insert("foo".to_string(), 10); + m.insert("bar".to_string(), 4); + m + }, + set: { + let mut s = HashSet::new(); + s.insert('a'); + s + }, + }, + Table(map! { + map: Table(map! { + foo: Integer(10), + bar: Integer(4) + }), + set: Array(vec![Value::String("a".to_string())]) + }), + } +} + +#[test] +fn table_array() { + #[derive(Serialize, Deserialize, PartialEq, Debug, Clone)] + struct Foo { a: Vec, } + #[derive(Serialize, Deserialize, PartialEq, Debug, Clone)] + struct Bar { a: isize } + + equivalent! { + Foo { a: vec![Bar { a: 1 }, Bar { a: 2 }] }, + Table(map! { + a: Array(vec![ + Table(map!{ a: Integer(1) }), + Table(map!{ a: Integer(2) }), + ]) + }), + } +} + +#[test] +fn type_errors() { + #[derive(Deserialize)] + #[allow(dead_code)] + struct Foo { bar: isize } + + error! { + Foo, + Table(map! { + bar: Value::String("a".to_string()) + }), + "invalid type: string \"a\", expected isize for key `bar`" + } + + #[derive(Deserialize)] + #[allow(dead_code)] + struct Bar { foo: Foo } + + error! { + Bar, + Table(map! { + foo: Table(map! { + bar: Value::String("a".to_string()) + }) + }), + "invalid type: string \"a\", expected isize for key `foo.bar`" + } +} + +#[test] +fn missing_errors() { + #[derive(Serialize, Deserialize, PartialEq, Debug)] + struct Foo { bar: isize } + + error! { + Foo, + Table(map! { }), + "missing field `bar`" + } +} + +#[test] +fn parse_enum() { + #[derive(Serialize, Deserialize, PartialEq, Debug, Clone)] + struct Foo { a: E } + #[derive(Serialize, Deserialize, PartialEq, Debug, Clone)] + #[serde(untagged)] + enum E { + Bar(isize), + Baz(String), + Last(Foo2), + } + #[derive(Serialize, Deserialize, PartialEq, Debug, Clone)] + struct Foo2 { + test: String, + } + + equivalent! { + Foo { a: E::Bar(10) }, + Table(map! { a: Integer(10) }), + } + + equivalent! { + Foo { a: E::Baz("foo".to_string()) }, + Table(map! { a: Value::String("foo".to_string()) }), + } + + equivalent! { + Foo { a: E::Last(Foo2 { test: "test".to_string() }) }, + Table(map! { a: Table(map! { test: Value::String("test".to_string()) }) }), + } +} + +#[test] +fn parse_enum_string() { + #[derive(Serialize, Deserialize, PartialEq, Debug, Clone)] + struct Foo { a: Sort } + + #[derive(Serialize, Deserialize, PartialEq, Debug, Clone)] + #[serde(rename_all = "lowercase")] + enum Sort { + Asc, + Desc, + } + + equivalent! { + Foo { a: Sort::Desc }, + Table(map! { a: Value::String("desc".to_string()) }), + } + +} + +// #[test] +// fn unused_fields() { +// #[derive(Serialize, Deserialize, PartialEq, Debug)] +// struct Foo { a: isize } +// +// let v = Foo { a: 2 }; +// let mut d = Decoder::new(Table(map! { +// a, Integer(2), +// b, Integer(5) +// })); +// assert_eq!(v, t!(Deserialize::deserialize(&mut d))); +// +// assert_eq!(d.toml, Some(Table(map! { +// b, Integer(5) +// }))); +// } +// +// #[test] +// fn unused_fields2() { +// #[derive(Serialize, Deserialize, PartialEq, Debug)] +// struct Foo { a: Bar } +// #[derive(Serialize, Deserialize, PartialEq, Debug)] +// struct Bar { a: isize } +// +// let v = Foo { a: Bar { a: 2 } }; +// let mut d = Decoder::new(Table(map! { +// a, Table(map! { +// a, Integer(2), +// b, Integer(5) +// }) +// })); +// assert_eq!(v, t!(Deserialize::deserialize(&mut d))); +// +// assert_eq!(d.toml, Some(Table(map! { +// a, Table(map! { +// b, Integer(5) +// }) +// }))); +// } +// +// #[test] +// fn unused_fields3() { +// #[derive(Serialize, Deserialize, PartialEq, Debug)] +// struct Foo { a: Bar } +// #[derive(Serialize, Deserialize, PartialEq, Debug)] +// struct Bar { a: isize } +// +// let v = Foo { a: Bar { a: 2 } }; +// let mut d = Decoder::new(Table(map! { +// a, Table(map! { +// a, Integer(2) +// }) +// })); +// assert_eq!(v, t!(Deserialize::deserialize(&mut d))); +// +// assert_eq!(d.toml, None); +// } +// +// #[test] +// fn unused_fields4() { +// #[derive(Serialize, Deserialize, PartialEq, Debug)] +// struct Foo { a: BTreeMap } +// +// let v = Foo { a: map! { a, "foo".to_string() } }; +// let mut d = Decoder::new(Table(map! { +// a, Table(map! { +// a, Value::String("foo".to_string()) +// }) +// })); +// assert_eq!(v, t!(Deserialize::deserialize(&mut d))); +// +// assert_eq!(d.toml, None); +// } +// +// #[test] +// fn unused_fields5() { +// #[derive(Serialize, Deserialize, PartialEq, Debug)] +// struct Foo { a: Vec } +// +// let v = Foo { a: vec!["a".to_string()] }; +// let mut d = Decoder::new(Table(map! { +// a, Array(vec![Value::String("a".to_string())]) +// })); +// assert_eq!(v, t!(Deserialize::deserialize(&mut d))); +// +// assert_eq!(d.toml, None); +// } +// +// #[test] +// fn unused_fields6() { +// #[derive(Serialize, Deserialize, PartialEq, Debug)] +// struct Foo { a: Option> } +// +// let v = Foo { a: Some(vec![]) }; +// let mut d = Decoder::new(Table(map! { +// a, Array(vec![]) +// })); +// assert_eq!(v, t!(Deserialize::deserialize(&mut d))); +// +// assert_eq!(d.toml, None); +// } +// +// #[test] +// fn unused_fields7() { +// #[derive(Serialize, Deserialize, PartialEq, Debug)] +// struct Foo { a: Vec } +// #[derive(Serialize, Deserialize, PartialEq, Debug)] +// struct Bar { a: isize } +// +// let v = Foo { a: vec![Bar { a: 1 }] }; +// let mut d = Decoder::new(Table(map! { +// a, Array(vec![Table(map! { +// a, Integer(1), +// b, Integer(2) +// })]) +// })); +// assert_eq!(v, t!(Deserialize::deserialize(&mut d))); +// +// assert_eq!(d.toml, Some(Table(map! { +// a, Array(vec![Table(map! { +// b, Integer(2) +// })]) +// }))); +// } + +#[test] +fn empty_arrays() { + #[derive(Serialize, Deserialize, PartialEq, Debug, Clone)] + struct Foo { a: Vec } + #[derive(Serialize, Deserialize, PartialEq, Debug, Clone)] + struct Bar; + + equivalent! { + Foo { a: vec![] }, + Table(map! {a: Array(Vec::new())}), + } +} + +#[test] +fn empty_arrays2() { + #[derive(Serialize, Deserialize, PartialEq, Debug, Clone)] + struct Foo { a: Option> } + #[derive(Serialize, Deserialize, PartialEq, Debug, Clone)] + struct Bar; + + equivalent! { + Foo { a: None }, + Table(map! {}), + } + + equivalent!{ + Foo { a: Some(vec![]) }, + Table(map! { a: Array(vec![]) }), + } +} + +#[test] +fn extra_keys() { + #[derive(Serialize, Deserialize)] + struct Foo { a: isize } + + let toml = Table(map! { a: Integer(2), b: Integer(2) }); + assert!(toml.clone().try_into::().is_ok()); + assert!(toml::from_str::(&toml.to_string()).is_ok()); +} + +#[test] +fn newtypes() { + #[derive(Deserialize, Serialize, PartialEq, Debug, Clone)] + struct A { + b: B + } + + #[derive(Deserialize, Serialize, PartialEq, Debug, Clone)] + struct B(u32); + + equivalent! { + A { b: B(2) }, + Table(map! { b: Integer(2) }), + } +} + +#[test] +fn newtypes2() { + #[derive(Deserialize, Serialize, PartialEq, Debug, Clone)] + struct A { + b: B + } + + #[derive(Deserialize, Serialize, PartialEq, Debug, Clone)] + struct B(Option); + + #[derive(Deserialize, Serialize, PartialEq, Debug, Clone)] + struct C { + x: u32, + y: u32, + z: u32 + } + + equivalent! { + A { b: B(Some(C { x: 0, y: 1, z: 2 })) }, + Table(map! { + b: Table(map! { + x: Integer(0), + y: Integer(1), + z: Integer(2) + }) + }), + } +} + +#[derive(Debug, Default, PartialEq, Serialize, Deserialize)] +struct CanBeEmpty { + a: Option, + b: Option, +} + +#[test] +fn table_structs_empty() { + let text = "[bar]\n\n[baz]\n\n[bazv]\na = \"foo\"\n\n[foo]\n"; + let value: BTreeMap = toml::from_str(text).unwrap(); + let mut expected: BTreeMap = BTreeMap::new(); + expected.insert("bar".to_string(), CanBeEmpty::default()); + expected.insert("baz".to_string(), CanBeEmpty::default()); + expected.insert( + "bazv".to_string(), + CanBeEmpty {a: Some("foo".to_string()), b: None}, + ); + expected.insert("foo".to_string(), CanBeEmpty::default()); + assert_eq!(value, expected); + assert_eq!(toml::to_string(&value).unwrap(), text); +} diff --git a/test-suite/tests/tables-last.rs b/test-suite/tests/tables-last.rs new file mode 100644 index 0000000..d05c8f0 --- /dev/null +++ b/test-suite/tests/tables-last.rs @@ -0,0 +1,30 @@ +#[macro_use] +extern crate serde_derive; +extern crate toml; + +use std::collections::HashMap; + +#[derive(Serialize)] +struct A { + #[serde(serialize_with = "toml::ser::tables_last")] + vals: HashMap<&'static str, Value>, +} + +#[derive(Serialize)] +#[serde(untagged)] +enum Value { + Map(HashMap<&'static str, &'static str>), + Int(i32), +} + +#[test] +fn always_works() { + let mut a = A { vals: HashMap::new() }; + a.vals.insert("foo", Value::Int(0)); + + let mut sub = HashMap::new(); + sub.insert("foo", "bar"); + a.vals.insert("bar", Value::Map(sub)); + + toml::to_string(&a).unwrap(); +} diff --git a/test-suite/tests/valid.rs b/test-suite/tests/valid.rs new file mode 100644 index 0000000..b186800 --- /dev/null +++ b/test-suite/tests/valid.rs @@ -0,0 +1,249 @@ +extern crate toml; +extern crate serde; +extern crate serde_json; + +use toml::{Value as Toml, to_string_pretty}; +use serde::ser::Serialize; +use serde_json::Value as Json; + +fn to_json(toml: toml::Value) -> Json { + fn doit(s: &str, json: Json) -> Json { + let mut map = serde_json::Map::new(); + map.insert("type".to_string(), Json::String(s.to_string())); + map.insert("value".to_string(), json); + Json::Object(map) + } + + match toml { + Toml::String(s) => doit("string", Json::String(s)), + Toml::Integer(i) => doit("integer", Json::String(i.to_string())), + Toml::Float(f) => doit("float", Json::String({ + let s = format!("{:.15}", f); + let s = format!("{}", s.trim_right_matches('0')); + if s.ends_with('.') {format!("{}0", s)} else {s} + })), + Toml::Boolean(b) => doit("bool", Json::String(format!("{}", b))), + Toml::Datetime(s) => doit("datetime", Json::String(s.to_string())), + Toml::Array(arr) => { + let is_table = match arr.first() { + Some(&Toml::Table(..)) => true, + _ => false, + }; + let json = Json::Array(arr.into_iter().map(to_json).collect()); + if is_table {json} else {doit("array", json)} + } + Toml::Table(table) => { + let mut map = serde_json::Map::new(); + for (k, v) in table { + map.insert(k, to_json(v)); + } + Json::Object(map) + } + } +} + +fn run_pretty(toml: Toml) { + // Assert toml == json + println!("### pretty round trip parse."); + + // standard pretty + let toml_raw = to_string_pretty(&toml).expect("to string"); + let toml2 = toml_raw.parse().expect("from string"); + assert_eq!(toml, toml2); + + // pretty with indent 2 + let mut result = String::with_capacity(128); + { + let mut serializer = toml::Serializer::pretty(&mut result); + serializer.pretty_array_indent(2); + toml.serialize(&mut serializer).expect("to string"); + } + assert_eq!(toml, result.parse().expect("from str")); + result.clear(); + { + let mut serializer = toml::Serializer::new(&mut result); + serializer.pretty_array_trailing_comma(false); + toml.serialize(&mut serializer).expect("to string"); + } + assert_eq!(toml, result.parse().expect("from str")); + result.clear(); + { + let mut serializer = toml::Serializer::pretty(&mut result); + serializer.pretty_string(false); + toml.serialize(&mut serializer).expect("to string"); + assert_eq!(toml, toml2); + } + assert_eq!(toml, result.parse().expect("from str")); + result.clear(); + { + let mut serializer = toml::Serializer::pretty(&mut result); + serializer.pretty_array(false); + toml.serialize(&mut serializer).expect("to string"); + assert_eq!(toml, toml2); + } + assert_eq!(toml, result.parse().expect("from str")); +} + +fn run(toml_raw: &str, json_raw: &str) { + println!("parsing:\n{}", toml_raw); + let toml: Toml = toml_raw.parse().unwrap(); + let json: Json = json_raw.parse().unwrap(); + + // Assert toml == json + let toml_json = to_json(toml.clone()); + assert!(json == toml_json, + "expected\n{}\ngot\n{}\n", + serde_json::to_string_pretty(&json).unwrap(), + serde_json::to_string_pretty(&toml_json).unwrap()); + + // Assert round trip + println!("round trip parse: {}", toml); + let toml2 = toml.to_string().parse().unwrap(); + assert_eq!(toml, toml2); + run_pretty(toml); +} + +macro_rules! test( ($name:ident, $toml:expr, $json:expr) => ( + #[test] + fn $name() { run($toml, $json); } +) ); + +test!(array_empty, + include_str!("valid/array-empty.toml"), + include_str!("valid/array-empty.json")); +test!(array_nospaces, + include_str!("valid/array-nospaces.toml"), + include_str!("valid/array-nospaces.json")); +test!(arrays_hetergeneous, + include_str!("valid/arrays-hetergeneous.toml"), + include_str!("valid/arrays-hetergeneous.json")); +test!(arrays, + include_str!("valid/arrays.toml"), + include_str!("valid/arrays.json")); +test!(arrays_nested, + include_str!("valid/arrays-nested.toml"), + include_str!("valid/arrays-nested.json")); +test!(empty, + include_str!("valid/empty.toml"), + include_str!("valid/empty.json")); +test!(bool, + include_str!("valid/bool.toml"), + include_str!("valid/bool.json")); +test!(datetime, + include_str!("valid/datetime.toml"), + include_str!("valid/datetime.json")); +test!(example, + include_str!("valid/example.toml"), + include_str!("valid/example.json")); +test!(float, + include_str!("valid/float.toml"), + include_str!("valid/float.json")); +test!(implicit_and_explicit_after, + include_str!("valid/implicit-and-explicit-after.toml"), + include_str!("valid/implicit-and-explicit-after.json")); +test!(implicit_and_explicit_before, + include_str!("valid/implicit-and-explicit-before.toml"), + include_str!("valid/implicit-and-explicit-before.json")); +test!(implicit_groups, + include_str!("valid/implicit-groups.toml"), + include_str!("valid/implicit-groups.json")); +test!(integer, + include_str!("valid/integer.toml"), + include_str!("valid/integer.json")); +test!(key_equals_nospace, + include_str!("valid/key-equals-nospace.toml"), + include_str!("valid/key-equals-nospace.json")); +test!(key_space, + include_str!("valid/key-space.toml"), + include_str!("valid/key-space.json")); +test!(key_special_chars, + include_str!("valid/key-special-chars.toml"), + include_str!("valid/key-special-chars.json")); +test!(key_with_pound, + include_str!("valid/key-with-pound.toml"), + include_str!("valid/key-with-pound.json")); +test!(long_float, + include_str!("valid/long-float.toml"), + include_str!("valid/long-float.json")); +test!(long_integer, + include_str!("valid/long-integer.toml"), + include_str!("valid/long-integer.json")); +test!(multiline_string, + include_str!("valid/multiline-string.toml"), + include_str!("valid/multiline-string.json")); +test!(raw_multiline_string, + include_str!("valid/raw-multiline-string.toml"), + include_str!("valid/raw-multiline-string.json")); +test!(raw_string, + include_str!("valid/raw-string.toml"), + include_str!("valid/raw-string.json")); +test!(string_empty, + include_str!("valid/string-empty.toml"), + include_str!("valid/string-empty.json")); +test!(string_escapes, + include_str!("valid/string-escapes.toml"), + include_str!("valid/string-escapes.json")); +test!(string_simple, + include_str!("valid/string-simple.toml"), + include_str!("valid/string-simple.json")); +test!(string_with_pound, + include_str!("valid/string-with-pound.toml"), + include_str!("valid/string-with-pound.json")); +test!(table_array_implicit, + include_str!("valid/table-array-implicit.toml"), + include_str!("valid/table-array-implicit.json")); +test!(table_array_many, + include_str!("valid/table-array-many.toml"), + include_str!("valid/table-array-many.json")); +test!(table_array_nest, + include_str!("valid/table-array-nest.toml"), + include_str!("valid/table-array-nest.json")); +test!(table_array_one, + include_str!("valid/table-array-one.toml"), + include_str!("valid/table-array-one.json")); +test!(table_empty, + include_str!("valid/table-empty.toml"), + include_str!("valid/table-empty.json")); +test!(table_sub_empty, + include_str!("valid/table-sub-empty.toml"), + include_str!("valid/table-sub-empty.json")); +test!(table_multi_empty, + include_str!("valid/table-multi-empty.toml"), + include_str!("valid/table-multi-empty.json")); +test!(table_whitespace, + include_str!("valid/table-whitespace.toml"), + include_str!("valid/table-whitespace.json")); +test!(table_with_pound, + include_str!("valid/table-with-pound.toml"), + include_str!("valid/table-with-pound.json")); +test!(unicode_escape, + include_str!("valid/unicode-escape.toml"), + include_str!("valid/unicode-escape.json")); +test!(unicode_literal, + include_str!("valid/unicode-literal.toml"), + include_str!("valid/unicode-literal.json")); +test!(hard_example, + include_str!("valid/hard_example.toml"), + include_str!("valid/hard_example.json")); +test!(example2, + include_str!("valid/example2.toml"), + include_str!("valid/example2.json")); +test!(example3, + include_str!("valid/example-v0.3.0.toml"), + include_str!("valid/example-v0.3.0.json")); +test!(example4, + include_str!("valid/example-v0.4.0.toml"), + include_str!("valid/example-v0.4.0.json")); +test!(example_bom, + include_str!("valid/example-bom.toml"), + include_str!("valid/example.json")); + +test!(datetime_truncate, + include_str!("valid/datetime-truncate.toml"), + include_str!("valid/datetime-truncate.json")); +test!(key_quote_newline, + include_str!("valid/key-quote-newline.toml"), + include_str!("valid/key-quote-newline.json")); +test!(table_array_nest_no_keys, + include_str!("valid/table-array-nest-no-keys.toml"), + include_str!("valid/table-array-nest-no-keys.json")); diff --git a/test-suite/tests/valid/array-empty.json b/test-suite/tests/valid/array-empty.json new file mode 100644 index 0000000..2fbf256 --- /dev/null +++ b/test-suite/tests/valid/array-empty.json @@ -0,0 +1,11 @@ +{ + "thevoid": { "type": "array", "value": [ + {"type": "array", "value": [ + {"type": "array", "value": [ + {"type": "array", "value": [ + {"type": "array", "value": []} + ]} + ]} + ]} + ]} +} diff --git a/test-suite/tests/valid/array-empty.toml b/test-suite/tests/valid/array-empty.toml new file mode 100644 index 0000000..fa58dc6 --- /dev/null +++ b/test-suite/tests/valid/array-empty.toml @@ -0,0 +1 @@ +thevoid = [[[[[]]]]] diff --git a/test-suite/tests/valid/array-nospaces.json b/test-suite/tests/valid/array-nospaces.json new file mode 100644 index 0000000..1833d61 --- /dev/null +++ b/test-suite/tests/valid/array-nospaces.json @@ -0,0 +1,10 @@ +{ + "ints": { + "type": "array", + "value": [ + {"type": "integer", "value": "1"}, + {"type": "integer", "value": "2"}, + {"type": "integer", "value": "3"} + ] + } +} diff --git a/test-suite/tests/valid/array-nospaces.toml b/test-suite/tests/valid/array-nospaces.toml new file mode 100644 index 0000000..6618936 --- /dev/null +++ b/test-suite/tests/valid/array-nospaces.toml @@ -0,0 +1 @@ +ints = [1,2,3] diff --git a/test-suite/tests/valid/arrays-hetergeneous.json b/test-suite/tests/valid/arrays-hetergeneous.json new file mode 100644 index 0000000..478fa5c --- /dev/null +++ b/test-suite/tests/valid/arrays-hetergeneous.json @@ -0,0 +1,19 @@ +{ + "mixed": { + "type": "array", + "value": [ + {"type": "array", "value": [ + {"type": "integer", "value": "1"}, + {"type": "integer", "value": "2"} + ]}, + {"type": "array", "value": [ + {"type": "string", "value": "a"}, + {"type": "string", "value": "b"} + ]}, + {"type": "array", "value": [ + {"type": "float", "value": "1.1"}, + {"type": "float", "value": "2.1"} + ]} + ] + } +} diff --git a/test-suite/tests/valid/arrays-hetergeneous.toml b/test-suite/tests/valid/arrays-hetergeneous.toml new file mode 100644 index 0000000..a246fcf --- /dev/null +++ b/test-suite/tests/valid/arrays-hetergeneous.toml @@ -0,0 +1 @@ +mixed = [[1, 2], ["a", "b"], [1.1, 2.1]] diff --git a/test-suite/tests/valid/arrays-nested.json b/test-suite/tests/valid/arrays-nested.json new file mode 100644 index 0000000..d21920c --- /dev/null +++ b/test-suite/tests/valid/arrays-nested.json @@ -0,0 +1,13 @@ +{ + "nest": { + "type": "array", + "value": [ + {"type": "array", "value": [ + {"type": "string", "value": "a"} + ]}, + {"type": "array", "value": [ + {"type": "string", "value": "b"} + ]} + ] + } +} diff --git a/test-suite/tests/valid/arrays-nested.toml b/test-suite/tests/valid/arrays-nested.toml new file mode 100644 index 0000000..ce33022 --- /dev/null +++ b/test-suite/tests/valid/arrays-nested.toml @@ -0,0 +1 @@ +nest = [["a"], ["b"]] diff --git a/test-suite/tests/valid/arrays.json b/test-suite/tests/valid/arrays.json new file mode 100644 index 0000000..58aedbc --- /dev/null +++ b/test-suite/tests/valid/arrays.json @@ -0,0 +1,34 @@ +{ + "ints": { + "type": "array", + "value": [ + {"type": "integer", "value": "1"}, + {"type": "integer", "value": "2"}, + {"type": "integer", "value": "3"} + ] + }, + "floats": { + "type": "array", + "value": [ + {"type": "float", "value": "1.1"}, + {"type": "float", "value": "2.1"}, + {"type": "float", "value": "3.1"} + ] + }, + "strings": { + "type": "array", + "value": [ + {"type": "string", "value": "a"}, + {"type": "string", "value": "b"}, + {"type": "string", "value": "c"} + ] + }, + "dates": { + "type": "array", + "value": [ + {"type": "datetime", "value": "1987-07-05T17:45:00Z"}, + {"type": "datetime", "value": "1979-05-27T07:32:00Z"}, + {"type": "datetime", "value": "2006-06-01T11:00:00Z"} + ] + } +} diff --git a/test-suite/tests/valid/arrays.toml b/test-suite/tests/valid/arrays.toml new file mode 100644 index 0000000..c435f57 --- /dev/null +++ b/test-suite/tests/valid/arrays.toml @@ -0,0 +1,8 @@ +ints = [1, 2, 3] +floats = [1.1, 2.1, 3.1] +strings = ["a", "b", "c"] +dates = [ + 1987-07-05T17:45:00Z, + 1979-05-27T07:32:00Z, + 2006-06-01T11:00:00Z, +] diff --git a/test-suite/tests/valid/bool.json b/test-suite/tests/valid/bool.json new file mode 100644 index 0000000..ae368e9 --- /dev/null +++ b/test-suite/tests/valid/bool.json @@ -0,0 +1,4 @@ +{ + "f": {"type": "bool", "value": "false"}, + "t": {"type": "bool", "value": "true"} +} diff --git a/test-suite/tests/valid/bool.toml b/test-suite/tests/valid/bool.toml new file mode 100644 index 0000000..a8a829b --- /dev/null +++ b/test-suite/tests/valid/bool.toml @@ -0,0 +1,2 @@ +t = true +f = false diff --git a/test-suite/tests/valid/comments-everywhere.json b/test-suite/tests/valid/comments-everywhere.json new file mode 100644 index 0000000..e69a2e9 --- /dev/null +++ b/test-suite/tests/valid/comments-everywhere.json @@ -0,0 +1,12 @@ +{ + "group": { + "answer": {"type": "integer", "value": "42"}, + "more": { + "type": "array", + "value": [ + {"type": "integer", "value": "42"}, + {"type": "integer", "value": "42"} + ] + } + } +} diff --git a/test-suite/tests/valid/comments-everywhere.toml b/test-suite/tests/valid/comments-everywhere.toml new file mode 100644 index 0000000..3dca74c --- /dev/null +++ b/test-suite/tests/valid/comments-everywhere.toml @@ -0,0 +1,24 @@ +# Top comment. + # Top comment. +# Top comment. + +# [no-extraneous-groups-please] + +[group] # Comment +answer = 42 # Comment +# no-extraneous-keys-please = 999 +# Inbetween comment. +more = [ # Comment + # What about multiple # comments? + # Can you handle it? + # + # Evil. +# Evil. + 42, 42, # Comments within arrays are fun. + # What about multiple # comments? + # Can you handle it? + # + # Evil. +# Evil. +# ] Did I fool you? +] # Hopefully not. diff --git a/test-suite/tests/valid/datetime-truncate.json b/test-suite/tests/valid/datetime-truncate.json new file mode 100644 index 0000000..8c512e1 --- /dev/null +++ b/test-suite/tests/valid/datetime-truncate.json @@ -0,0 +1,6 @@ +{ + "bestdayever": { + "type": "datetime", + "value": "1987-07-05T17:45:00.123456789Z" + } +} diff --git a/test-suite/tests/valid/datetime-truncate.toml b/test-suite/tests/valid/datetime-truncate.toml new file mode 100644 index 0000000..05de841 --- /dev/null +++ b/test-suite/tests/valid/datetime-truncate.toml @@ -0,0 +1 @@ +bestdayever = 1987-07-05T17:45:00.123456789012345Z diff --git a/test-suite/tests/valid/datetime.json b/test-suite/tests/valid/datetime.json new file mode 100644 index 0000000..2ca93ce --- /dev/null +++ b/test-suite/tests/valid/datetime.json @@ -0,0 +1,3 @@ +{ + "bestdayever": {"type": "datetime", "value": "1987-07-05T17:45:00Z"} +} diff --git a/test-suite/tests/valid/datetime.toml b/test-suite/tests/valid/datetime.toml new file mode 100644 index 0000000..2e99340 --- /dev/null +++ b/test-suite/tests/valid/datetime.toml @@ -0,0 +1 @@ +bestdayever = 1987-07-05T17:45:00Z diff --git a/test-suite/tests/valid/empty.json b/test-suite/tests/valid/empty.json new file mode 100644 index 0000000..0967ef4 --- /dev/null +++ b/test-suite/tests/valid/empty.json @@ -0,0 +1 @@ +{} diff --git a/test-suite/tests/valid/empty.toml b/test-suite/tests/valid/empty.toml new file mode 100644 index 0000000..e69de29 diff --git a/test-suite/tests/valid/example-bom.toml b/test-suite/tests/valid/example-bom.toml new file mode 100644 index 0000000..fb5ac81 --- /dev/null +++ b/test-suite/tests/valid/example-bom.toml @@ -0,0 +1,5 @@ +best-day-ever = 1987-07-05T17:45:00Z + +[numtheory] +boring = false +perfection = [6, 28, 496] diff --git a/test-suite/tests/valid/example-v0.3.0.json b/test-suite/tests/valid/example-v0.3.0.json new file mode 100644 index 0000000..1d9dcb5 --- /dev/null +++ b/test-suite/tests/valid/example-v0.3.0.json @@ -0,0 +1 @@ +{"Array":{"key1":{"type":"array","value":[{"type":"integer","value":"1"},{"type":"integer","value":"2"},{"type":"integer","value":"3"}]},"key2":{"type":"array","value":[{"type":"string","value":"red"},{"type":"string","value":"yellow"},{"type":"string","value":"green"}]},"key3":{"type":"array","value":[{"type":"array","value":[{"type":"integer","value":"1"},{"type":"integer","value":"2"}]},{"type":"array","value":[{"type":"integer","value":"3"},{"type":"integer","value":"4"},{"type":"integer","value":"5"}]}]},"key4":{"type":"array","value":[{"type":"array","value":[{"type":"integer","value":"1"},{"type":"integer","value":"2"}]},{"type":"array","value":[{"type":"string","value":"a"},{"type":"string","value":"b"},{"type":"string","value":"c"}]}]},"key5":{"type":"array","value":[{"type":"integer","value":"1"},{"type":"integer","value":"2"},{"type":"integer","value":"3"}]},"key6":{"type":"array","value":[{"type":"integer","value":"1"},{"type":"integer","value":"2"}]}},"Booleans":{"False":{"type":"bool","value":"false"},"True":{"type":"bool","value":"true"}},"Datetime":{"key1":{"type":"datetime","value":"1979-05-27T07:32:00Z"}},"Float":{"both":{},"exponent":{},"fractional":{"key1":{"type":"float","value":"1.0"},"key2":{"type":"float","value":"3.1415"},"key3":{"type":"float","value":"-0.01"}}},"Integer":{"key1":{"type":"integer","value":"99"},"key2":{"type":"integer","value":"42"},"key3":{"type":"integer","value":"0"},"key4":{"type":"integer","value":"-17"}},"String":{"Literal":{"Multiline":{"lines":{"type":"string","value":"The first newline is\ntrimmed in raw strings.\n All other whitespace\n is preserved.\n"},"regex2":{"type":"string","value":"I [dw]on't need \\d{2} apples"}},"quoted":{"type":"string","value":"Tom \"Dubs\" Preston-Werner"},"regex":{"type":"string","value":"\u003c\\i\\c*\\s*\u003e"},"winpath":{"type":"string","value":"C:\\Users\\nodejs\\templates"},"winpath2":{"type":"string","value":"\\\\ServerX\\admin$\\system32\\"}},"Multiline":{"key1":{"type":"string","value":"One\nTwo"},"key2":{"type":"string","value":"One\nTwo"},"key3":{"type":"string","value":"One\nTwo"}},"Multilined":{"Singleline":{"key1":{"type":"string","value":"The quick brown fox jumps over the lazy dog."},"key2":{"type":"string","value":"The quick brown fox jumps over the lazy dog."},"key3":{"type":"string","value":"The quick brown fox jumps over the lazy dog."}}},"basic":{"type":"string","value":"I'm a string. \"You can quote me\". Name\u0009José\nLocation\u0009SF."}},"Table":{"key":{"type":"string","value":"value"}},"dog":{"tater":{"type":{"type":"string","value":"pug"}}},"fruit":[{"name":{"type":"string","value":"apple"},"physical":{"color":{"type":"string","value":"red"},"shape":{"type":"string","value":"round"}},"variety":[{"name":{"type":"string","value":"red delicious"}},{"name":{"type":"string","value":"granny smith"}}]},{"name":{"type":"string","value":"banana"},"variety":[{"name":{"type":"string","value":"plantain"}}]}],"products":[{"name":{"type":"string","value":"Hammer"},"sku":{"type":"integer","value":"738594937"}},{},{"color":{"type":"string","value":"gray"},"name":{"type":"string","value":"Nail"},"sku":{"type":"integer","value":"284758393"}}],"x":{"y":{"z":{"w":{}}}}} diff --git a/test-suite/tests/valid/example-v0.3.0.toml b/test-suite/tests/valid/example-v0.3.0.toml new file mode 100644 index 0000000..76aacc3 --- /dev/null +++ b/test-suite/tests/valid/example-v0.3.0.toml @@ -0,0 +1,182 @@ +# Comment +# I am a comment. Hear me roar. Roar. + +# Table +# Tables (also known as hash tables or dictionaries) are collections of key/value pairs. +# They appear in square brackets on a line by themselves. + +[Table] + +key = "value" # Yeah, you can do this. + +# Nested tables are denoted by table names with dots in them. Name your tables whatever crap you please, just don't use #, ., [ or ]. + +[dog.tater] +type = "pug" + +# You don't need to specify all the super-tables if you don't want to. TOML knows how to do it for you. + +# [x] you +# [x.y] don't +# [x.y.z] need these +[x.y.z.w] # for this to work + +# String +# There are four ways to express strings: basic, multi-line basic, literal, and multi-line literal. +# All strings must contain only valid UTF-8 characters. + +[String] +basic = "I'm a string. \"You can quote me\". Name\tJos\u00E9\nLocation\tSF." + +[String.Multiline] + +# The following strings are byte-for-byte equivalent: +key1 = "One\nTwo" +key2 = """One\nTwo""" +key3 = """ +One +Two""" + +[String.Multilined.Singleline] + +# The following strings are byte-for-byte equivalent: +key1 = "The quick brown fox jumps over the lazy dog." + +key2 = """ +The quick brown \ + + + fox jumps over \ + the lazy dog.""" + +key3 = """\ + The quick brown \ + fox jumps over \ + the lazy dog.\ + """ + +[String.Literal] + +# What you see is what you get. +winpath = 'C:\Users\nodejs\templates' +winpath2 = '\\ServerX\admin$\system32\' +quoted = 'Tom "Dubs" Preston-Werner' +regex = '<\i\c*\s*>' + + +[String.Literal.Multiline] + +regex2 = '''I [dw]on't need \d{2} apples''' +lines = ''' +The first newline is +trimmed in raw strings. + All other whitespace + is preserved. +''' + +# Integer +# Integers are whole numbers. Positive numbers may be prefixed with a plus sign. +# Negative numbers are prefixed with a minus sign. + +[Integer] +key1 = +99 +key2 = 42 +key3 = 0 +key4 = -17 + +# Float +# A float consists of an integer part (which may be prefixed with a plus or minus sign) +# followed by a fractional part and/or an exponent part. + +[Float.fractional] + +# fractional +key1 = +1.0 +key2 = 3.1415 +key3 = -0.01 + +[Float.exponent] + +# exponent +#key1 = 5e+22 +#key2 = 1e6 +#key3 = -2E-2 + +[Float.both] + +# both +#key = 6.626e-34 + +# Boolean +# Booleans are just the tokens you're used to. Always lowercase. + +[Booleans] +True = true +False = false + +# Datetime +# Datetimes are RFC 3339 dates. + +[Datetime] +key1 = 1979-05-27T07:32:00Z +#key2 = 1979-05-27T00:32:00-07:00 +#key3 = 1979-05-27T00:32:00.999999-07:00 + +# Array +# Arrays are square brackets with other primitives inside. Whitespace is ignored. Elements are separated by commas. Data types may not be mixed. + +[Array] +key1 = [ 1, 2, 3 ] +key2 = [ "red", "yellow", "green" ] +key3 = [ [ 1, 2 ], [3, 4, 5] ] +key4 = [ [ 1, 2 ], ["a", "b", "c"] ] # this is ok + +#Arrays can also be multiline. So in addition to ignoring whitespace, arrays also ignore newlines between the brackets. +# Terminating commas are ok before the closing bracket. + +key5 = [ + 1, 2, 3 +] +key6 = [ + 1, + 2, # this is ok +] + +# Array of Tables +# These can be expressed by using a table name in double brackets. +# Each table with the same double bracketed name will be an element in the array. +# The tables are inserted in the order encountered. + +[[products]] +name = "Hammer" +sku = 738594937 + +[[products]] + +[[products]] +name = "Nail" +sku = 284758393 +color = "gray" + + +# You can create nested arrays of tables as well. + +[[fruit]] + name = "apple" + + [fruit.physical] + color = "red" + shape = "round" + + [[fruit.variety]] + name = "red delicious" + + [[fruit.variety]] + name = "granny smith" + +[[fruit]] + name = "banana" + + [[fruit.variety]] + name = "plantain" + diff --git a/test-suite/tests/valid/example-v0.4.0.json b/test-suite/tests/valid/example-v0.4.0.json new file mode 100644 index 0000000..d5cac34 --- /dev/null +++ b/test-suite/tests/valid/example-v0.4.0.json @@ -0,0 +1 @@ +{"array":{"key1":{"type":"array","value":[{"type":"integer","value":"1"},{"type":"integer","value":"2"},{"type":"integer","value":"3"}]},"key2":{"type":"array","value":[{"type":"string","value":"red"},{"type":"string","value":"yellow"},{"type":"string","value":"green"}]},"key3":{"type":"array","value":[{"type":"array","value":[{"type":"integer","value":"1"},{"type":"integer","value":"2"}]},{"type":"array","value":[{"type":"integer","value":"3"},{"type":"integer","value":"4"},{"type":"integer","value":"5"}]}]},"key4":{"type":"array","value":[{"type":"array","value":[{"type":"integer","value":"1"},{"type":"integer","value":"2"}]},{"type":"array","value":[{"type":"string","value":"a"},{"type":"string","value":"b"},{"type":"string","value":"c"}]}]},"key5":{"type":"array","value":[{"type":"integer","value":"1"},{"type":"integer","value":"2"},{"type":"integer","value":"3"}]},"key6":{"type":"array","value":[{"type":"integer","value":"1"},{"type":"integer","value":"2"}]}},"boolean":{"False":{"type":"bool","value":"false"},"True":{"type":"bool","value":"true"}},"datetime":{},"float":{"both":{},"exponent":{},"fractional":{"key1":{"type":"float","value":"1.0"},"key2":{"type":"float","value":"3.1415"},"key3":{"type":"float","value":"-0.01"}},"underscores":{}},"fruit":[{"name":{"type":"string","value":"apple"},"physical":{"color":{"type":"string","value":"red"},"shape":{"type":"string","value":"round"}},"variety":[{"name":{"type":"string","value":"red delicious"}},{"name":{"type":"string","value":"granny smith"}}]},{"name":{"type":"string","value":"banana"},"variety":[{"name":{"type":"string","value":"plantain"}}]}],"integer":{"key1":{"type":"integer","value":"99"},"key2":{"type":"integer","value":"42"},"key3":{"type":"integer","value":"0"},"key4":{"type":"integer","value":"-17"},"underscores":{"key1":{"type":"integer","value":"1000"},"key2":{"type":"integer","value":"5349221"},"key3":{"type":"integer","value":"12345"}}},"products":[{"name":{"type":"string","value":"Hammer"},"sku":{"type":"integer","value":"738594937"}},{},{"color":{"type":"string","value":"gray"},"name":{"type":"string","value":"Nail"},"sku":{"type":"integer","value":"284758393"}}],"string":{"basic":{"basic":{"type":"string","value":"I'm a string. \"You can quote me\". Name\u0009José\nLocation\u0009SF."}},"literal":{"multiline":{"lines":{"type":"string","value":"The first newline is\ntrimmed in raw strings.\n All other whitespace\n is preserved.\n"},"regex2":{"type":"string","value":"I [dw]on't need \\d{2} apples"}},"quoted":{"type":"string","value":"Tom \"Dubs\" Preston-Werner"},"regex":{"type":"string","value":"\u003c\\i\\c*\\s*\u003e"},"winpath":{"type":"string","value":"C:\\Users\\nodejs\\templates"},"winpath2":{"type":"string","value":"\\\\ServerX\\admin$\\system32\\"}},"multiline":{"continued":{"key1":{"type":"string","value":"The quick brown fox jumps over the lazy dog."},"key2":{"type":"string","value":"The quick brown fox jumps over the lazy dog."},"key3":{"type":"string","value":"The quick brown fox jumps over the lazy dog."}},"key1":{"type":"string","value":"One\nTwo"},"key2":{"type":"string","value":"One\nTwo"},"key3":{"type":"string","value":"One\nTwo"}}},"table":{"inline":{"name":{"first":{"type":"string","value":"Tom"},"last":{"type":"string","value":"Preston-Werner"}},"point":{"x":{"type":"integer","value":"1"},"y":{"type":"integer","value":"2"}}},"key":{"type":"string","value":"value"},"subtable":{"key":{"type":"string","value":"another value"}}},"x":{"y":{"z":{"w":{}}}}} diff --git a/test-suite/tests/valid/example-v0.4.0.toml b/test-suite/tests/valid/example-v0.4.0.toml new file mode 100644 index 0000000..ffbcce0 --- /dev/null +++ b/test-suite/tests/valid/example-v0.4.0.toml @@ -0,0 +1,235 @@ +################################################################################ +## Comment + +# Speak your mind with the hash symbol. They go from the symbol to the end of +# the line. + + +################################################################################ +## Table + +# Tables (also known as hash tables or dictionaries) are collections of +# key/value pairs. They appear in square brackets on a line by themselves. + +[table] + +key = "value" # Yeah, you can do this. + +# Nested tables are denoted by table names with dots in them. Name your tables +# whatever crap you please, just don't use #, ., [ or ]. + +[table.subtable] + +key = "another value" + +# You don't need to specify all the super-tables if you don't want to. TOML +# knows how to do it for you. + +# [x] you +# [x.y] don't +# [x.y.z] need these +[x.y.z.w] # for this to work + + +################################################################################ +## Inline Table + +# Inline tables provide a more compact syntax for expressing tables. They are +# especially useful for grouped data that can otherwise quickly become verbose. +# Inline tables are enclosed in curly braces `{` and `}`. No newlines are +# allowed between the curly braces unless they are valid within a value. + +[table.inline] + +name = { first = "Tom", last = "Preston-Werner" } +point = { x = 1, y = 2 } + + +################################################################################ +## String + +# There are four ways to express strings: basic, multi-line basic, literal, and +# multi-line literal. All strings must contain only valid UTF-8 characters. + +[string.basic] + +basic = "I'm a string. \"You can quote me\". Name\tJos\u00E9\nLocation\tSF." + +[string.multiline] + +# The following strings are byte-for-byte equivalent: +key1 = "One\nTwo" +key2 = """One\nTwo""" +key3 = """ +One +Two""" + +[string.multiline.continued] + +# The following strings are byte-for-byte equivalent: +key1 = "The quick brown fox jumps over the lazy dog." + +key2 = """ +The quick brown \ + + + fox jumps over \ + the lazy dog.""" + +key3 = """\ + The quick brown \ + fox jumps over \ + the lazy dog.\ + """ + +[string.literal] + +# What you see is what you get. +winpath = 'C:\Users\nodejs\templates' +winpath2 = '\\ServerX\admin$\system32\' +quoted = 'Tom "Dubs" Preston-Werner' +regex = '<\i\c*\s*>' + + +[string.literal.multiline] + +regex2 = '''I [dw]on't need \d{2} apples''' +lines = ''' +The first newline is +trimmed in raw strings. + All other whitespace + is preserved. +''' + + +################################################################################ +## Integer + +# Integers are whole numbers. Positive numbers may be prefixed with a plus sign. +# Negative numbers are prefixed with a minus sign. + +[integer] + +key1 = +99 +key2 = 42 +key3 = 0 +key4 = -17 + +[integer.underscores] + +# For large numbers, you may use underscores to enhance readability. Each +# underscore must be surrounded by at least one digit. +key1 = 1_000 +key2 = 5_349_221 +key3 = 1_2_3_4_5 # valid but inadvisable + + +################################################################################ +## Float + +# A float consists of an integer part (which may be prefixed with a plus or +# minus sign) followed by a fractional part and/or an exponent part. + +[float.fractional] + +key1 = +1.0 +key2 = 3.1415 +key3 = -0.01 + +[float.exponent] + +[float.both] + +[float.underscores] + + +################################################################################ +## Boolean + +# Booleans are just the tokens you're used to. Always lowercase. + +[boolean] + +True = true +False = false + + +################################################################################ +## Datetime + +# Datetimes are RFC 3339 dates. + +[datetime] + +#key1 = 1979-05-27T07:32:00Z +#key2 = 1979-05-27T00:32:00-07:00 +#key3 = 1979-05-27T00:32:00.999999-07:00 + + +################################################################################ +## Array + +# Arrays are square brackets with other primitives inside. Whitespace is +# ignored. Elements are separated by commas. Data types may not be mixed. + +[array] + +key1 = [ 1, 2, 3 ] +key2 = [ "red", "yellow", "green" ] +key3 = [ [ 1, 2 ], [3, 4, 5] ] +key4 = [ [ 1, 2 ], ["a", "b", "c"] ] # this is ok + +# Arrays can also be multiline. So in addition to ignoring whitespace, arrays +# also ignore newlines between the brackets. Terminating commas are ok before +# the closing bracket. + +key5 = [ + 1, 2, 3 +] +key6 = [ + 1, + 2, # this is ok +] + + +################################################################################ +## Array of Tables + +# These can be expressed by using a table name in double brackets. Each table +# with the same double bracketed name will be an element in the array. The +# tables are inserted in the order encountered. + +[[products]] + +name = "Hammer" +sku = 738594937 + +[[products]] + +[[products]] + +name = "Nail" +sku = 284758393 +color = "gray" + + +# You can create nested arrays of tables as well. + +[[fruit]] + name = "apple" + + [fruit.physical] + color = "red" + shape = "round" + + [[fruit.variety]] + name = "red delicious" + + [[fruit.variety]] + name = "granny smith" + +[[fruit]] + name = "banana" + + [[fruit.variety]] + name = "plantain" diff --git a/test-suite/tests/valid/example.json b/test-suite/tests/valid/example.json new file mode 100644 index 0000000..48aa907 --- /dev/null +++ b/test-suite/tests/valid/example.json @@ -0,0 +1,14 @@ +{ + "best-day-ever": {"type": "datetime", "value": "1987-07-05T17:45:00Z"}, + "numtheory": { + "boring": {"type": "bool", "value": "false"}, + "perfection": { + "type": "array", + "value": [ + {"type": "integer", "value": "6"}, + {"type": "integer", "value": "28"}, + {"type": "integer", "value": "496"} + ] + } + } +} diff --git a/test-suite/tests/valid/example.toml b/test-suite/tests/valid/example.toml new file mode 100644 index 0000000..8cb02e0 --- /dev/null +++ b/test-suite/tests/valid/example.toml @@ -0,0 +1,5 @@ +best-day-ever = 1987-07-05T17:45:00Z + +[numtheory] +boring = false +perfection = [6, 28, 496] diff --git a/test-suite/tests/valid/example2.json b/test-suite/tests/valid/example2.json new file mode 100644 index 0000000..3249a97 --- /dev/null +++ b/test-suite/tests/valid/example2.json @@ -0,0 +1 @@ +{"clients":{"data":{"type":"array","value":[{"type":"array","value":[{"type":"string","value":"gamma"},{"type":"string","value":"delta"}]},{"type":"array","value":[{"type":"integer","value":"1"},{"type":"integer","value":"2"}]}]},"hosts":{"type":"array","value":[{"type":"string","value":"alpha"},{"type":"string","value":"omega"}]}},"database":{"connection_max":{"type":"integer","value":"5000"},"enabled":{"type":"bool","value":"true"},"ports":{"type":"array","value":[{"type":"integer","value":"8001"},{"type":"integer","value":"8001"},{"type":"integer","value":"8002"}]},"server":{"type":"string","value":"192.168.1.1"}},"owner":{"bio":{"type":"string","value":"GitHub Cofounder \u0026 CEO\nLikes tater tots and beer."},"dob":{"type":"datetime","value":"1979-05-27T07:32:00Z"},"name":{"type":"string","value":"Tom Preston-Werner"},"organization":{"type":"string","value":"GitHub"}},"products":[{"name":{"type":"string","value":"Hammer"},"sku":{"type":"integer","value":"738594937"}},{"color":{"type":"string","value":"gray"},"name":{"type":"string","value":"Nail"},"sku":{"type":"integer","value":"284758393"}}],"servers":{"alpha":{"dc":{"type":"string","value":"eqdc10"},"ip":{"type":"string","value":"10.0.0.1"}},"beta":{"country":{"type":"string","value":"中国"},"dc":{"type":"string","value":"eqdc10"},"ip":{"type":"string","value":"10.0.0.2"}}},"title":{"type":"string","value":"TOML Example"}} diff --git a/test-suite/tests/valid/example2.toml b/test-suite/tests/valid/example2.toml new file mode 100644 index 0000000..bc12c99 --- /dev/null +++ b/test-suite/tests/valid/example2.toml @@ -0,0 +1,47 @@ +# This is a TOML document. Boom. + +title = "TOML Example" + +[owner] +name = "Tom Preston-Werner" +organization = "GitHub" +bio = "GitHub Cofounder & CEO\nLikes tater tots and beer." +dob = 1979-05-27T07:32:00Z # First class dates? Why not? + +[database] +server = "192.168.1.1" +ports = [ 8001, 8001, 8002 ] +connection_max = 5000 +enabled = true + +[servers] + + # You can indent as you please. Tabs or spaces. TOML don't care. + [servers.alpha] + ip = "10.0.0.1" + dc = "eqdc10" + + [servers.beta] + ip = "10.0.0.2" + dc = "eqdc10" + country = "中国" # This should be parsed as UTF-8 + +[clients] +data = [ ["gamma", "delta"], [1, 2] ] # just an update to make sure parsers support it + +# Line breaks are OK when inside arrays +hosts = [ + "alpha", + "omega" +] + +# Products + + [[products]] + name = "Hammer" + sku = 738594937 + + [[products]] + name = "Nail" + sku = 284758393 + color = "gray" diff --git a/test-suite/tests/valid/float.json b/test-suite/tests/valid/float.json new file mode 100644 index 0000000..b8a2e97 --- /dev/null +++ b/test-suite/tests/valid/float.json @@ -0,0 +1,4 @@ +{ + "pi": {"type": "float", "value": "3.14"}, + "negpi": {"type": "float", "value": "-3.14"} +} diff --git a/test-suite/tests/valid/float.toml b/test-suite/tests/valid/float.toml new file mode 100644 index 0000000..7c528d2 --- /dev/null +++ b/test-suite/tests/valid/float.toml @@ -0,0 +1,2 @@ +pi = 3.14 +negpi = -3.14 diff --git a/test-suite/tests/valid/hard_example.json b/test-suite/tests/valid/hard_example.json new file mode 100644 index 0000000..9762e58 --- /dev/null +++ b/test-suite/tests/valid/hard_example.json @@ -0,0 +1 @@ +{"the":{"hard":{"another_test_string":{"type":"string","value":" Same thing, but with a string #"},"bit#":{"multi_line_array":{"type":"array","value":[{"type":"string","value":"]"}]},"what?":{"type":"string","value":"You don't think some user won't do that?"}},"harder_test_string":{"type":"string","value":" And when \"'s are in the string, along with # \""},"test_array":{"type":"array","value":[{"type":"string","value":"] "},{"type":"string","value":" # "}]},"test_array2":{"type":"array","value":[{"type":"string","value":"Test #11 ]proved that"},{"type":"string","value":"Experiment #9 was a success"}]}},"test_string":{"type":"string","value":"You'll hate me after this - #"}}} diff --git a/test-suite/tests/valid/hard_example.toml b/test-suite/tests/valid/hard_example.toml new file mode 100644 index 0000000..38856c8 --- /dev/null +++ b/test-suite/tests/valid/hard_example.toml @@ -0,0 +1,33 @@ +# Test file for TOML +# Only this one tries to emulate a TOML file written by a user of the kind of parser writers probably hate +# This part you'll really hate + +[the] +test_string = "You'll hate me after this - #" # " Annoying, isn't it? + + [the.hard] + test_array = [ "] ", " # "] # ] There you go, parse this! + test_array2 = [ "Test #11 ]proved that", "Experiment #9 was a success" ] + # You didn't think it'd as easy as chucking out the last #, did you? + another_test_string = " Same thing, but with a string #" + harder_test_string = " And when \"'s are in the string, along with # \"" # "and comments are there too" + # Things will get harder + + [the.hard."bit#"] + "what?" = "You don't think some user won't do that?" + multi_line_array = [ + "]", + # ] Oh yes I did + ] + +# Each of the following keygroups/key value pairs should produce an error. Uncomment to them to test + +#[error] if you didn't catch this, your parser is broken +#string = "Anything other than tabs, spaces and newline after a keygroup or key value pair has ended should produce an error unless it is a comment" like this +#array = [ +# "This might most likely happen in multiline arrays", +# Like here, +# "or here, +# and here" +# ] End of array comment, forgot the # +#number = 3.14 pi <--again forgot the # diff --git a/test-suite/tests/valid/implicit-and-explicit-after.json b/test-suite/tests/valid/implicit-and-explicit-after.json new file mode 100644 index 0000000..374bd09 --- /dev/null +++ b/test-suite/tests/valid/implicit-and-explicit-after.json @@ -0,0 +1,10 @@ +{ + "a": { + "better": {"type": "integer", "value": "43"}, + "b": { + "c": { + "answer": {"type": "integer", "value": "42"} + } + } + } +} diff --git a/test-suite/tests/valid/implicit-and-explicit-after.toml b/test-suite/tests/valid/implicit-and-explicit-after.toml new file mode 100644 index 0000000..c0e8865 --- /dev/null +++ b/test-suite/tests/valid/implicit-and-explicit-after.toml @@ -0,0 +1,5 @@ +[a.b.c] +answer = 42 + +[a] +better = 43 diff --git a/test-suite/tests/valid/implicit-and-explicit-before.json b/test-suite/tests/valid/implicit-and-explicit-before.json new file mode 100644 index 0000000..374bd09 --- /dev/null +++ b/test-suite/tests/valid/implicit-and-explicit-before.json @@ -0,0 +1,10 @@ +{ + "a": { + "better": {"type": "integer", "value": "43"}, + "b": { + "c": { + "answer": {"type": "integer", "value": "42"} + } + } + } +} diff --git a/test-suite/tests/valid/implicit-and-explicit-before.toml b/test-suite/tests/valid/implicit-and-explicit-before.toml new file mode 100644 index 0000000..eee68ff --- /dev/null +++ b/test-suite/tests/valid/implicit-and-explicit-before.toml @@ -0,0 +1,5 @@ +[a] +better = 43 + +[a.b.c] +answer = 42 diff --git a/test-suite/tests/valid/implicit-groups.json b/test-suite/tests/valid/implicit-groups.json new file mode 100644 index 0000000..fbae7fc --- /dev/null +++ b/test-suite/tests/valid/implicit-groups.json @@ -0,0 +1,9 @@ +{ + "a": { + "b": { + "c": { + "answer": {"type": "integer", "value": "42"} + } + } + } +} diff --git a/test-suite/tests/valid/implicit-groups.toml b/test-suite/tests/valid/implicit-groups.toml new file mode 100644 index 0000000..b6333e4 --- /dev/null +++ b/test-suite/tests/valid/implicit-groups.toml @@ -0,0 +1,2 @@ +[a.b.c] +answer = 42 diff --git a/test-suite/tests/valid/integer.json b/test-suite/tests/valid/integer.json new file mode 100644 index 0000000..61985a1 --- /dev/null +++ b/test-suite/tests/valid/integer.json @@ -0,0 +1,4 @@ +{ + "answer": {"type": "integer", "value": "42"}, + "neganswer": {"type": "integer", "value": "-42"} +} diff --git a/test-suite/tests/valid/integer.toml b/test-suite/tests/valid/integer.toml new file mode 100644 index 0000000..c4f6297 --- /dev/null +++ b/test-suite/tests/valid/integer.toml @@ -0,0 +1,2 @@ +answer = 42 +neganswer = -42 diff --git a/test-suite/tests/valid/key-equals-nospace.json b/test-suite/tests/valid/key-equals-nospace.json new file mode 100644 index 0000000..1f8709a --- /dev/null +++ b/test-suite/tests/valid/key-equals-nospace.json @@ -0,0 +1,3 @@ +{ + "answer": {"type": "integer", "value": "42"} +} diff --git a/test-suite/tests/valid/key-equals-nospace.toml b/test-suite/tests/valid/key-equals-nospace.toml new file mode 100644 index 0000000..560901c --- /dev/null +++ b/test-suite/tests/valid/key-equals-nospace.toml @@ -0,0 +1 @@ +answer=42 diff --git a/test-suite/tests/valid/key-quote-newline.json b/test-suite/tests/valid/key-quote-newline.json new file mode 100644 index 0000000..12473e4 --- /dev/null +++ b/test-suite/tests/valid/key-quote-newline.json @@ -0,0 +1,3 @@ +{ + "\n": {"type": "integer", "value": "1"} +} diff --git a/test-suite/tests/valid/key-quote-newline.toml b/test-suite/tests/valid/key-quote-newline.toml new file mode 100644 index 0000000..a2639bf --- /dev/null +++ b/test-suite/tests/valid/key-quote-newline.toml @@ -0,0 +1 @@ +"\n" = 1 diff --git a/test-suite/tests/valid/key-space.json b/test-suite/tests/valid/key-space.json new file mode 100644 index 0000000..9d1f769 --- /dev/null +++ b/test-suite/tests/valid/key-space.json @@ -0,0 +1,3 @@ +{ + "a b": {"type": "integer", "value": "1"} +} diff --git a/test-suite/tests/valid/key-space.toml b/test-suite/tests/valid/key-space.toml new file mode 100644 index 0000000..f4f36c4 --- /dev/null +++ b/test-suite/tests/valid/key-space.toml @@ -0,0 +1 @@ +"a b" = 1 diff --git a/test-suite/tests/valid/key-special-chars.json b/test-suite/tests/valid/key-special-chars.json new file mode 100644 index 0000000..6550ebd --- /dev/null +++ b/test-suite/tests/valid/key-special-chars.json @@ -0,0 +1,5 @@ +{ + "~!@#$^&*()_+-`1234567890[]\\|/?><.,;:'": { + "type": "integer", "value": "1" + } +} diff --git a/test-suite/tests/valid/key-special-chars.toml b/test-suite/tests/valid/key-special-chars.toml new file mode 100644 index 0000000..dc43625 --- /dev/null +++ b/test-suite/tests/valid/key-special-chars.toml @@ -0,0 +1 @@ +"~!@#$^&*()_+-`1234567890[]\\|/?><.,;:'" = 1 diff --git a/test-suite/tests/valid/key-with-pound.json b/test-suite/tests/valid/key-with-pound.json new file mode 100644 index 0000000..ee39e1d --- /dev/null +++ b/test-suite/tests/valid/key-with-pound.json @@ -0,0 +1,3 @@ +{ + "key#name": {"type": "integer", "value": "5"} +} diff --git a/test-suite/tests/valid/key-with-pound.toml b/test-suite/tests/valid/key-with-pound.toml new file mode 100644 index 0000000..65b766f --- /dev/null +++ b/test-suite/tests/valid/key-with-pound.toml @@ -0,0 +1 @@ +"key#name" = 5 diff --git a/test-suite/tests/valid/long-float.json b/test-suite/tests/valid/long-float.json new file mode 100644 index 0000000..8ceed47 --- /dev/null +++ b/test-suite/tests/valid/long-float.json @@ -0,0 +1,4 @@ +{ + "longpi": {"type": "float", "value": "3.141592653589793"}, + "neglongpi": {"type": "float", "value": "-3.141592653589793"} +} diff --git a/test-suite/tests/valid/long-float.toml b/test-suite/tests/valid/long-float.toml new file mode 100644 index 0000000..9558ae4 --- /dev/null +++ b/test-suite/tests/valid/long-float.toml @@ -0,0 +1,2 @@ +longpi = 3.141592653589793 +neglongpi = -3.141592653589793 diff --git a/test-suite/tests/valid/long-integer.json b/test-suite/tests/valid/long-integer.json new file mode 100644 index 0000000..16c331e --- /dev/null +++ b/test-suite/tests/valid/long-integer.json @@ -0,0 +1,4 @@ +{ + "answer": {"type": "integer", "value": "9223372036854775807"}, + "neganswer": {"type": "integer", "value": "-9223372036854775808"} +} diff --git a/test-suite/tests/valid/long-integer.toml b/test-suite/tests/valid/long-integer.toml new file mode 100644 index 0000000..424a13a --- /dev/null +++ b/test-suite/tests/valid/long-integer.toml @@ -0,0 +1,2 @@ +answer = 9223372036854775807 +neganswer = -9223372036854775808 diff --git a/test-suite/tests/valid/multiline-string.json b/test-suite/tests/valid/multiline-string.json new file mode 100644 index 0000000..075bf50 --- /dev/null +++ b/test-suite/tests/valid/multiline-string.json @@ -0,0 +1,30 @@ +{ + "multiline_empty_one": { + "type": "string", + "value": "" + }, + "multiline_empty_two": { + "type": "string", + "value": "" + }, + "multiline_empty_three": { + "type": "string", + "value": "" + }, + "multiline_empty_four": { + "type": "string", + "value": "" + }, + "equivalent_one": { + "type": "string", + "value": "The quick brown fox jumps over the lazy dog." + }, + "equivalent_two": { + "type": "string", + "value": "The quick brown fox jumps over the lazy dog." + }, + "equivalent_three": { + "type": "string", + "value": "The quick brown fox jumps over the lazy dog." + } +} diff --git a/test-suite/tests/valid/multiline-string.toml b/test-suite/tests/valid/multiline-string.toml new file mode 100644 index 0000000..15b1143 --- /dev/null +++ b/test-suite/tests/valid/multiline-string.toml @@ -0,0 +1,23 @@ +multiline_empty_one = """""" +multiline_empty_two = """ +""" +multiline_empty_three = """\ + """ +multiline_empty_four = """\ + \ + \ + """ + +equivalent_one = "The quick brown fox jumps over the lazy dog." +equivalent_two = """ +The quick brown \ + + + fox jumps over \ + the lazy dog.""" + +equivalent_three = """\ + The quick brown \ + fox jumps over \ + the lazy dog.\ + """ diff --git a/test-suite/tests/valid/raw-multiline-string.json b/test-suite/tests/valid/raw-multiline-string.json new file mode 100644 index 0000000..b43cce5 --- /dev/null +++ b/test-suite/tests/valid/raw-multiline-string.json @@ -0,0 +1,14 @@ +{ + "oneline": { + "type": "string", + "value": "This string has a ' quote character." + }, + "firstnl": { + "type": "string", + "value": "This string has a ' quote character." + }, + "multiline": { + "type": "string", + "value": "This string\nhas ' a quote character\nand more than\none newline\nin it." + } +} diff --git a/test-suite/tests/valid/raw-multiline-string.toml b/test-suite/tests/valid/raw-multiline-string.toml new file mode 100644 index 0000000..8094c03 --- /dev/null +++ b/test-suite/tests/valid/raw-multiline-string.toml @@ -0,0 +1,9 @@ +oneline = '''This string has a ' quote character.''' +firstnl = ''' +This string has a ' quote character.''' +multiline = ''' +This string +has ' a quote character +and more than +one newline +in it.''' diff --git a/test-suite/tests/valid/raw-string.json b/test-suite/tests/valid/raw-string.json new file mode 100644 index 0000000..693ab9b --- /dev/null +++ b/test-suite/tests/valid/raw-string.json @@ -0,0 +1,30 @@ +{ + "backspace": { + "type": "string", + "value": "This string has a \\b backspace character." + }, + "tab": { + "type": "string", + "value": "This string has a \\t tab character." + }, + "newline": { + "type": "string", + "value": "This string has a \\n new line character." + }, + "formfeed": { + "type": "string", + "value": "This string has a \\f form feed character." + }, + "carriage": { + "type": "string", + "value": "This string has a \\r carriage return character." + }, + "slash": { + "type": "string", + "value": "This string has a \\/ slash character." + }, + "backslash": { + "type": "string", + "value": "This string has a \\\\ backslash character." + } +} diff --git a/test-suite/tests/valid/raw-string.toml b/test-suite/tests/valid/raw-string.toml new file mode 100644 index 0000000..92acd25 --- /dev/null +++ b/test-suite/tests/valid/raw-string.toml @@ -0,0 +1,7 @@ +backspace = 'This string has a \b backspace character.' +tab = 'This string has a \t tab character.' +newline = 'This string has a \n new line character.' +formfeed = 'This string has a \f form feed character.' +carriage = 'This string has a \r carriage return character.' +slash = 'This string has a \/ slash character.' +backslash = 'This string has a \\ backslash character.' diff --git a/test-suite/tests/valid/string-empty.json b/test-suite/tests/valid/string-empty.json new file mode 100644 index 0000000..6c26d69 --- /dev/null +++ b/test-suite/tests/valid/string-empty.json @@ -0,0 +1,6 @@ +{ + "answer": { + "type": "string", + "value": "" + } +} diff --git a/test-suite/tests/valid/string-empty.toml b/test-suite/tests/valid/string-empty.toml new file mode 100644 index 0000000..e37e681 --- /dev/null +++ b/test-suite/tests/valid/string-empty.toml @@ -0,0 +1 @@ +answer = "" diff --git a/test-suite/tests/valid/string-escapes.json b/test-suite/tests/valid/string-escapes.json new file mode 100644 index 0000000..62dac51 --- /dev/null +++ b/test-suite/tests/valid/string-escapes.json @@ -0,0 +1,50 @@ +{ + "backspace": { + "type": "string", + "value": "This string has a \u0008 backspace character." + }, + "tab": { + "type": "string", + "value": "This string has a \u0009 tab character." + }, + "newline": { + "type": "string", + "value": "This string has a \u000A new line character." + }, + "formfeed": { + "type": "string", + "value": "This string has a \u000C form feed character." + }, + "carriage": { + "type": "string", + "value": "This string has a \u000D carriage return character." + }, + "quote": { + "type": "string", + "value": "This string has a \u0022 quote character." + }, + "slash": { + "type": "string", + "value": "This string has a \u002F slash character." + }, + "backslash": { + "type": "string", + "value": "This string has a \u005C backslash character." + }, + "notunicode1": { + "type": "string", + "value": "This string does not have a unicode \\u escape." + }, + "notunicode2": { + "type": "string", + "value": "This string does not have a unicode \u005Cu escape." + }, + "notunicode3": { + "type": "string", + "value": "This string does not have a unicode \\u0075 escape." + }, + "notunicode4": { + "type": "string", + "value": "This string does not have a unicode \\\u0075 escape." + } +} diff --git a/test-suite/tests/valid/string-escapes.toml b/test-suite/tests/valid/string-escapes.toml new file mode 100644 index 0000000..c5d4954 --- /dev/null +++ b/test-suite/tests/valid/string-escapes.toml @@ -0,0 +1,12 @@ +backspace = "This string has a \b backspace character." +tab = "This string has a \t tab character." +newline = "This string has a \n new line character." +formfeed = "This string has a \f form feed character." +carriage = "This string has a \r carriage return character." +quote = "This string has a \" quote character." +slash = "This string has a / slash character." +backslash = "This string has a \\ backslash character." +notunicode1 = "This string does not have a unicode \\u escape." +notunicode2 = "This string does not have a unicode \u005Cu escape." +notunicode3 = "This string does not have a unicode \\u0075 escape." +notunicode4 = "This string does not have a unicode \\\u0075 escape." diff --git a/test-suite/tests/valid/string-simple.json b/test-suite/tests/valid/string-simple.json new file mode 100644 index 0000000..2e05f99 --- /dev/null +++ b/test-suite/tests/valid/string-simple.json @@ -0,0 +1,6 @@ +{ + "answer": { + "type": "string", + "value": "You are not drinking enough whisky." + } +} diff --git a/test-suite/tests/valid/string-simple.toml b/test-suite/tests/valid/string-simple.toml new file mode 100644 index 0000000..e17ade6 --- /dev/null +++ b/test-suite/tests/valid/string-simple.toml @@ -0,0 +1 @@ +answer = "You are not drinking enough whisky." diff --git a/test-suite/tests/valid/string-with-pound.json b/test-suite/tests/valid/string-with-pound.json new file mode 100644 index 0000000..33cdc9c --- /dev/null +++ b/test-suite/tests/valid/string-with-pound.json @@ -0,0 +1,7 @@ +{ + "pound": {"type": "string", "value": "We see no # comments here."}, + "poundcomment": { + "type": "string", + "value": "But there are # some comments here." + } +} diff --git a/test-suite/tests/valid/string-with-pound.toml b/test-suite/tests/valid/string-with-pound.toml new file mode 100644 index 0000000..5fd8746 --- /dev/null +++ b/test-suite/tests/valid/string-with-pound.toml @@ -0,0 +1,2 @@ +pound = "We see no # comments here." +poundcomment = "But there are # some comments here." # Did I # mess you up? diff --git a/test-suite/tests/valid/table-array-implicit.json b/test-suite/tests/valid/table-array-implicit.json new file mode 100644 index 0000000..32e4640 --- /dev/null +++ b/test-suite/tests/valid/table-array-implicit.json @@ -0,0 +1,7 @@ +{ + "albums": { + "songs": [ + {"name": {"type": "string", "value": "Glory Days"}} + ] + } +} diff --git a/test-suite/tests/valid/table-array-implicit.toml b/test-suite/tests/valid/table-array-implicit.toml new file mode 100644 index 0000000..3157ac9 --- /dev/null +++ b/test-suite/tests/valid/table-array-implicit.toml @@ -0,0 +1,2 @@ +[[albums.songs]] +name = "Glory Days" diff --git a/test-suite/tests/valid/table-array-many.json b/test-suite/tests/valid/table-array-many.json new file mode 100644 index 0000000..84df2da --- /dev/null +++ b/test-suite/tests/valid/table-array-many.json @@ -0,0 +1,16 @@ +{ + "people": [ + { + "first_name": {"type": "string", "value": "Bruce"}, + "last_name": {"type": "string", "value": "Springsteen"} + }, + { + "first_name": {"type": "string", "value": "Eric"}, + "last_name": {"type": "string", "value": "Clapton"} + }, + { + "first_name": {"type": "string", "value": "Bob"}, + "last_name": {"type": "string", "value": "Seger"} + } + ] +} diff --git a/test-suite/tests/valid/table-array-many.toml b/test-suite/tests/valid/table-array-many.toml new file mode 100644 index 0000000..46062be --- /dev/null +++ b/test-suite/tests/valid/table-array-many.toml @@ -0,0 +1,11 @@ +[[people]] +first_name = "Bruce" +last_name = "Springsteen" + +[[people]] +first_name = "Eric" +last_name = "Clapton" + +[[people]] +first_name = "Bob" +last_name = "Seger" diff --git a/test-suite/tests/valid/table-array-nest-no-keys.json b/test-suite/tests/valid/table-array-nest-no-keys.json new file mode 100644 index 0000000..7537b1a --- /dev/null +++ b/test-suite/tests/valid/table-array-nest-no-keys.json @@ -0,0 +1,14 @@ +{ + "albums": [ + { + "songs": [{}, {}] + } + ], + "artists": [ + { + "home": { + "address": {} + } + } + ] +} diff --git a/test-suite/tests/valid/table-array-nest-no-keys.toml b/test-suite/tests/valid/table-array-nest-no-keys.toml new file mode 100644 index 0000000..ad6eb10 --- /dev/null +++ b/test-suite/tests/valid/table-array-nest-no-keys.toml @@ -0,0 +1,6 @@ +[[ albums ]] + [[ albums.songs ]] + [[ albums.songs ]] + +[[ artists ]] + [ artists.home.address ] diff --git a/test-suite/tests/valid/table-array-nest.json b/test-suite/tests/valid/table-array-nest.json new file mode 100644 index 0000000..c117afa --- /dev/null +++ b/test-suite/tests/valid/table-array-nest.json @@ -0,0 +1,18 @@ +{ + "albums": [ + { + "name": {"type": "string", "value": "Born to Run"}, + "songs": [ + {"name": {"type": "string", "value": "Jungleland"}}, + {"name": {"type": "string", "value": "Meeting Across the River"}} + ] + }, + { + "name": {"type": "string", "value": "Born in the USA"}, + "songs": [ + {"name": {"type": "string", "value": "Glory Days"}}, + {"name": {"type": "string", "value": "Dancing in the Dark"}} + ] + } + ] +} diff --git a/test-suite/tests/valid/table-array-nest.toml b/test-suite/tests/valid/table-array-nest.toml new file mode 100644 index 0000000..d659a3d --- /dev/null +++ b/test-suite/tests/valid/table-array-nest.toml @@ -0,0 +1,17 @@ +[[albums]] +name = "Born to Run" + + [[albums.songs]] + name = "Jungleland" + + [[albums.songs]] + name = "Meeting Across the River" + +[[albums]] +name = "Born in the USA" + + [[albums.songs]] + name = "Glory Days" + + [[albums.songs]] + name = "Dancing in the Dark" diff --git a/test-suite/tests/valid/table-array-one.json b/test-suite/tests/valid/table-array-one.json new file mode 100644 index 0000000..d75faae --- /dev/null +++ b/test-suite/tests/valid/table-array-one.json @@ -0,0 +1,8 @@ +{ + "people": [ + { + "first_name": {"type": "string", "value": "Bruce"}, + "last_name": {"type": "string", "value": "Springsteen"} + } + ] +} diff --git a/test-suite/tests/valid/table-array-one.toml b/test-suite/tests/valid/table-array-one.toml new file mode 100644 index 0000000..cd7e1b6 --- /dev/null +++ b/test-suite/tests/valid/table-array-one.toml @@ -0,0 +1,3 @@ +[[people]] +first_name = "Bruce" +last_name = "Springsteen" diff --git a/test-suite/tests/valid/table-empty.json b/test-suite/tests/valid/table-empty.json new file mode 100644 index 0000000..6f3873a --- /dev/null +++ b/test-suite/tests/valid/table-empty.json @@ -0,0 +1,3 @@ +{ + "a": {} +} diff --git a/test-suite/tests/valid/table-empty.toml b/test-suite/tests/valid/table-empty.toml new file mode 100644 index 0000000..8bb6a0a --- /dev/null +++ b/test-suite/tests/valid/table-empty.toml @@ -0,0 +1 @@ +[a] diff --git a/test-suite/tests/valid/table-multi-empty.json b/test-suite/tests/valid/table-multi-empty.json new file mode 100644 index 0000000..a6e17c9 --- /dev/null +++ b/test-suite/tests/valid/table-multi-empty.json @@ -0,0 +1,5 @@ +{ + "a": { "b": {} }, + "b": {}, + "c": { "a": {} } +} diff --git a/test-suite/tests/valid/table-multi-empty.toml b/test-suite/tests/valid/table-multi-empty.toml new file mode 100644 index 0000000..2266ed2 --- /dev/null +++ b/test-suite/tests/valid/table-multi-empty.toml @@ -0,0 +1,5 @@ +[a] +[a.b] +[b] +[c] +[c.a] diff --git a/test-suite/tests/valid/table-sub-empty.json b/test-suite/tests/valid/table-sub-empty.json new file mode 100644 index 0000000..9787770 --- /dev/null +++ b/test-suite/tests/valid/table-sub-empty.json @@ -0,0 +1,3 @@ +{ + "a": { "b": {} } +} diff --git a/test-suite/tests/valid/table-sub-empty.toml b/test-suite/tests/valid/table-sub-empty.toml new file mode 100644 index 0000000..70b7fe1 --- /dev/null +++ b/test-suite/tests/valid/table-sub-empty.toml @@ -0,0 +1,2 @@ +[a] +[a.b] diff --git a/test-suite/tests/valid/table-whitespace.json b/test-suite/tests/valid/table-whitespace.json new file mode 100644 index 0000000..3a73ec8 --- /dev/null +++ b/test-suite/tests/valid/table-whitespace.json @@ -0,0 +1,3 @@ +{ + "valid key": {} +} diff --git a/test-suite/tests/valid/table-whitespace.toml b/test-suite/tests/valid/table-whitespace.toml new file mode 100644 index 0000000..daf881d --- /dev/null +++ b/test-suite/tests/valid/table-whitespace.toml @@ -0,0 +1 @@ +["valid key"] diff --git a/test-suite/tests/valid/table-with-pound.json b/test-suite/tests/valid/table-with-pound.json new file mode 100644 index 0000000..5e594e4 --- /dev/null +++ b/test-suite/tests/valid/table-with-pound.json @@ -0,0 +1,5 @@ +{ + "key#group": { + "answer": {"type": "integer", "value": "42"} + } +} diff --git a/test-suite/tests/valid/table-with-pound.toml b/test-suite/tests/valid/table-with-pound.toml new file mode 100644 index 0000000..33f2c4f --- /dev/null +++ b/test-suite/tests/valid/table-with-pound.toml @@ -0,0 +1,2 @@ +["key#group"] +answer = 42 diff --git a/test-suite/tests/valid/unicode-escape.json b/test-suite/tests/valid/unicode-escape.json new file mode 100644 index 0000000..32948c6 --- /dev/null +++ b/test-suite/tests/valid/unicode-escape.json @@ -0,0 +1,5 @@ +{ + "answer1": {"type": "string", "value": "\u000B"}, + "answer4": {"type": "string", "value": "\u03B4α"}, + "answer8": {"type": "string", "value": "\u03B4β"} +} diff --git a/test-suite/tests/valid/unicode-escape.toml b/test-suite/tests/valid/unicode-escape.toml new file mode 100644 index 0000000..c0d5a25 --- /dev/null +++ b/test-suite/tests/valid/unicode-escape.toml @@ -0,0 +1,3 @@ +answer1 = "\u000B" +answer4 = "\u03B4α" +answer8 = "\U000003B4β" diff --git a/test-suite/tests/valid/unicode-literal.json b/test-suite/tests/valid/unicode-literal.json new file mode 100644 index 0000000..00aa2f8 --- /dev/null +++ b/test-suite/tests/valid/unicode-literal.json @@ -0,0 +1,3 @@ +{ + "answer": {"type": "string", "value": "δ"} +} diff --git a/test-suite/tests/valid/unicode-literal.toml b/test-suite/tests/valid/unicode-literal.toml new file mode 100644 index 0000000..c65723c --- /dev/null +++ b/test-suite/tests/valid/unicode-literal.toml @@ -0,0 +1 @@ +answer = "δ" diff --git a/tests/README.md b/tests/README.md deleted file mode 100644 index ebbc01c..0000000 --- a/tests/README.md +++ /dev/null @@ -1 +0,0 @@ -Tests are from https://github.com/BurntSushi/toml-test diff --git a/tests/backcompat.rs b/tests/backcompat.rs deleted file mode 100644 index 1b3f599..0000000 --- a/tests/backcompat.rs +++ /dev/null @@ -1,19 +0,0 @@ -extern crate toml; -extern crate serde; - -use serde::de::Deserialize; - -#[test] -fn main() { - let s = " - [a] foo = 1 - [[b]] foo = 1 - "; - assert!(s.parse::().is_err()); - - let mut d = toml::de::Deserializer::new(s); - d.set_require_newline_after_table(false); - let value = toml::Value::deserialize(&mut d).unwrap(); - assert_eq!(value["a"]["foo"].as_integer(), Some(1)); - assert_eq!(value["b"][0]["foo"].as_integer(), Some(1)); -} diff --git a/tests/datetime.rs b/tests/datetime.rs deleted file mode 100644 index 948e863..0000000 --- a/tests/datetime.rs +++ /dev/null @@ -1,58 +0,0 @@ -extern crate toml; - -use std::str::FromStr; - -use toml::Value; - -#[test] -fn times() { - fn good(s: &str) { - let to_parse = format!("foo = {}", s); - let value = Value::from_str(&to_parse).unwrap(); - assert_eq!(value["foo"].as_datetime().unwrap().to_string(), s); - } - - good("1997-09-09T09:09:09Z"); - good("1997-09-09T09:09:09+09:09"); - good("1997-09-09T09:09:09-09:09"); - good("1997-09-09T09:09:09"); - good("1997-09-09"); - good("09:09:09"); - good("1997-09-09T09:09:09.09Z"); - good("1997-09-09T09:09:09.09+09:09"); - good("1997-09-09T09:09:09.09-09:09"); - good("1997-09-09T09:09:09.09"); - good("09:09:09.09"); -} - -#[test] -fn bad_times() { - fn bad(s: &str) { - let to_parse = format!("foo = {}", s); - assert!(Value::from_str(&to_parse).is_err()); - } - - bad("199-09-09"); - bad("199709-09"); - bad("1997-9-09"); - bad("1997-09-9"); - bad("1997-09-0909:09:09"); - bad("1997-09-09T09:09:09."); - bad("T"); - bad("T."); - bad("TZ"); - bad("1997-09-09T09:09:09.09+"); - bad("1997-09-09T09:09:09.09+09"); - bad("1997-09-09T09:09:09.09+09:9"); - bad("1997-09-09T09:09:09.09+0909"); - bad("1997-09-09T09:09:09.09-"); - bad("1997-09-09T09:09:09.09-09"); - bad("1997-09-09T09:09:09.09-09:9"); - bad("1997-09-09T09:09:09.09-0909"); - - bad("1997-00-09T09:09:09.09Z"); - bad("1997-09-00T09:09:09.09Z"); - bad("1997-09-09T30:09:09.09Z"); - bad("1997-09-09T12:69:09.09Z"); - bad("1997-09-09T12:09:69.09Z"); -} diff --git a/tests/display-tricky.rs b/tests/display-tricky.rs deleted file mode 100644 index 069e0f9..0000000 --- a/tests/display-tricky.rs +++ /dev/null @@ -1,49 +0,0 @@ -extern crate toml; -#[macro_use] extern crate serde_derive; - -#[derive(Debug, Serialize, Deserialize)] -pub struct Recipe { - pub name: String, - pub description: Option, - #[serde(default)] - pub modules: Vec, - #[serde(default)] - pub packages: Vec -} - -#[derive(Debug, Serialize, Deserialize)] -pub struct Modules { - pub name: String, - pub version: Option -} - -#[derive(Debug, Serialize, Deserialize)] -pub struct Packages { - pub name: String, - pub version: Option -} - -#[test] -fn both_ends() { - let recipe_works = toml::from_str::(r#" - name = "testing" - description = "example" - modules = [] - - [[packages]] - name = "base" - "#).unwrap(); - toml::to_string(&recipe_works).unwrap(); - - let recipe_fails = toml::from_str::(r#" - name = "testing" - description = "example" - packages = [] - - [[modules]] - name = "base" - "#).unwrap(); - - let recipe_toml = toml::Value::try_from(recipe_fails).unwrap(); - recipe_toml.to_string(); -} diff --git a/tests/display.rs b/tests/display.rs deleted file mode 100644 index ca4fdd8..0000000 --- a/tests/display.rs +++ /dev/null @@ -1,103 +0,0 @@ -extern crate toml; - -use std::collections::BTreeMap; - -use toml::Value::{String, Integer, Float, Boolean, Array, Table}; - -macro_rules! map( ($($k:expr => $v:expr),*) => ({ - let mut _m = BTreeMap::new(); - $(_m.insert($k.to_string(), $v);)* - _m -}) ); - -#[test] -fn simple_show() { - assert_eq!(String("foo".to_string()).to_string(), - "\"foo\""); - assert_eq!(Integer(10).to_string(), - "10"); - assert_eq!(Float(10.0).to_string(), - "10.0"); - assert_eq!(Float(2.4).to_string(), - "2.4"); - assert_eq!(Boolean(true).to_string(), - "true"); - assert_eq!(Array(vec![]).to_string(), - "[]"); - assert_eq!(Array(vec![Integer(1), Integer(2)]).to_string(), - "[1, 2]"); -} - -#[test] -fn table() { - assert_eq!(Table(map! { }).to_string(), - ""); - assert_eq!(Table(map! { - "test" => Integer(2), - "test2" => Integer(3) }).to_string(), - "test = 2\ntest2 = 3\n"); - assert_eq!(Table(map! { - "test" => Integer(2), - "test2" => Table(map! { - "test" => String("wut".to_string()) - }) - }).to_string(), - "test = 2\n\ - \n\ - [test2]\n\ - test = \"wut\"\n"); - assert_eq!(Table(map! { - "test" => Integer(2), - "test2" => Table(map! { - "test" => String("wut".to_string()) - }) - }).to_string(), - "test = 2\n\ - \n\ - [test2]\n\ - test = \"wut\"\n"); - assert_eq!(Table(map! { - "test" => Integer(2), - "test2" => Array(vec![Table(map! { - "test" => String("wut".to_string()) - })]) - }).to_string(), - "test = 2\n\ - \n\ - [[test2]]\n\ - test = \"wut\"\n"); - assert_eq!(Table(map! { - "foo.bar" => Integer(2), - "foo\"bar" => Integer(2) - }).to_string(), - "\"foo\\\"bar\" = 2\n\ - \"foo.bar\" = 2\n"); - assert_eq!(Table(map! { - "test" => Integer(2), - "test2" => Array(vec![Table(map! { - "test" => Array(vec![Integer(2)]) - })]) - }).to_string(), - "test = 2\n\ - \n\ - [[test2]]\n\ - test = [2]\n"); - let table = Table(map! { - "test" => Integer(2), - "test2" => Array(vec![Table(map! { - "test" => Array(vec![Array(vec![Integer(2), Integer(3)]), - Array(vec![String("foo".to_string()), String("bar".to_string())])]) - })]) - }); - assert_eq!(table.to_string(), - "test = 2\n\ - \n\ - [[test2]]\n\ - test = [[2, 3], [\"foo\", \"bar\"]]\n"); - assert_eq!(Table(map! { - "test" => Array(vec![Integer(2)]), - "test2" => Integer(2) - }).to_string(), - "test = [2]\n\ - test2 = 2\n"); -} diff --git a/tests/formatting.rs b/tests/formatting.rs deleted file mode 100644 index 4ba1418..0000000 --- a/tests/formatting.rs +++ /dev/null @@ -1,54 +0,0 @@ -#[macro_use] -extern crate serde_derive; -extern crate toml; - -use toml::to_string; - -#[derive(Debug, Clone, Hash, PartialEq, Eq, Serialize, Deserialize)] -struct User { - pub name: String, - pub surname: String, -} - -#[derive(Debug, Clone, Hash, PartialEq, Eq, Serialize, Deserialize)] -struct Users { - pub user: Vec, -} - -#[derive(Debug, Clone, Hash, PartialEq, Eq, Serialize, Deserialize)] -struct TwoUsers { - pub user0: User, - pub user1: User, -} - -#[test] -fn no_unnecessary_newlines_array() { - assert!(!to_string(&Users { - user: vec![ - User { - name: "John".to_string(), - surname: "Doe".to_string(), - }, - User { - name: "Jane".to_string(), - surname: "Dough".to_string(), - }, - ], - }).unwrap() - .starts_with("\n")); -} - -#[test] -fn no_unnecessary_newlines_table() { - assert!(!to_string(&TwoUsers { - user0: User { - name: "John".to_string(), - surname: "Doe".to_string(), - }, - user1: User { - name: "Jane".to_string(), - surname: "Dough".to_string(), - }, - }).unwrap() - .starts_with("\n")); -} diff --git a/tests/invalid-encoder-misc.rs b/tests/invalid-encoder-misc.rs deleted file mode 100644 index 272f58f..0000000 --- a/tests/invalid-encoder-misc.rs +++ /dev/null @@ -1,14 +0,0 @@ -extern crate toml; - -use std::f64; - -#[test] -fn test_invalid_float_encode() { - fn bad(value: toml::Value) { - assert!(toml::to_string(&value).is_err()); - } - - bad(toml::Value::Float(f64::INFINITY)); - bad(toml::Value::Float(f64::NEG_INFINITY)); - bad(toml::Value::Float(f64::NAN)); -} diff --git a/tests/invalid-encoder/array-mixed-types-ints-and-floats.json b/tests/invalid-encoder/array-mixed-types-ints-and-floats.json deleted file mode 100644 index 2d42ead..0000000 --- a/tests/invalid-encoder/array-mixed-types-ints-and-floats.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "ints-and-floats": { - "type": "array", - "value": [ - { - "type": "integer", - "value": "1" - }, - { - "type": "float", - "value": "1.1" - } - ] - } -} diff --git a/tests/invalid-misc.rs b/tests/invalid-misc.rs deleted file mode 100644 index bb70b97..0000000 --- a/tests/invalid-misc.rs +++ /dev/null @@ -1,17 +0,0 @@ -extern crate toml; - -#[test] -fn bad() { - fn bad(s: &str) { - assert!(s.parse::().is_err()); - } - - bad("a = 01"); - bad("a = 1__1"); - bad("a = 1_"); - bad("''"); - bad("a = nan"); - bad("a = -inf"); - bad("a = inf"); - bad("a = 9e99999"); -} diff --git a/tests/invalid.rs b/tests/invalid.rs deleted file mode 100644 index 4679684..0000000 --- a/tests/invalid.rs +++ /dev/null @@ -1,98 +0,0 @@ -extern crate toml; - -fn run(toml: &str) { - println!("test if invalid:\n{}", toml); - if let Ok(e) = toml.parse::() { - panic!("parsed to: {:#?}", e); - } -} - -macro_rules! test( ($name:ident, $toml:expr) => ( - #[test] - fn $name() { run($toml); } -) ); - -test!(array_mixed_types_arrays_and_ints, - include_str!("invalid/array-mixed-types-arrays-and-ints.toml")); -test!(array_mixed_types_ints_and_floats, - include_str!("invalid/array-mixed-types-ints-and-floats.toml")); -test!(array_mixed_types_strings_and_ints, - include_str!("invalid/array-mixed-types-strings-and-ints.toml")); -test!(datetime_malformed_no_leads, - include_str!("invalid/datetime-malformed-no-leads.toml")); -test!(datetime_malformed_no_secs, - include_str!("invalid/datetime-malformed-no-secs.toml")); -test!(datetime_malformed_no_t, - include_str!("invalid/datetime-malformed-no-t.toml")); -test!(datetime_malformed_with_milli, - include_str!("invalid/datetime-malformed-with-milli.toml")); -test!(duplicate_keys, - include_str!("invalid/duplicate-keys.toml")); -test!(duplicate_key_table, - include_str!("invalid/duplicate-key-table.toml")); -test!(duplicate_tables, - include_str!("invalid/duplicate-tables.toml")); -test!(empty_implicit_table, - include_str!("invalid/empty-implicit-table.toml")); -test!(empty_table, - include_str!("invalid/empty-table.toml")); -test!(float_no_leading_zero, - include_str!("invalid/float-no-leading-zero.toml")); -test!(float_no_trailing_digits, - include_str!("invalid/float-no-trailing-digits.toml")); -test!(key_after_array, - include_str!("invalid/key-after-array.toml")); -test!(key_after_table, - include_str!("invalid/key-after-table.toml")); -test!(key_empty, - include_str!("invalid/key-empty.toml")); -test!(key_hash, - include_str!("invalid/key-hash.toml")); -test!(key_newline, - include_str!("invalid/key-newline.toml")); -test!(key_open_bracket, - include_str!("invalid/key-open-bracket.toml")); -test!(key_single_open_bracket, - include_str!("invalid/key-single-open-bracket.toml")); -test!(key_space, - include_str!("invalid/key-space.toml")); -test!(key_start_bracket, - include_str!("invalid/key-start-bracket.toml")); -test!(key_two_equals, - include_str!("invalid/key-two-equals.toml")); -test!(string_bad_byte_escape, - include_str!("invalid/string-bad-byte-escape.toml")); -test!(string_bad_escape, - include_str!("invalid/string-bad-escape.toml")); -test!(string_byte_escapes, - include_str!("invalid/string-byte-escapes.toml")); -test!(string_no_close, - include_str!("invalid/string-no-close.toml")); -test!(table_array_implicit, - include_str!("invalid/table-array-implicit.toml")); -test!(table_array_malformed_bracket, - include_str!("invalid/table-array-malformed-bracket.toml")); -test!(table_array_malformed_empty, - include_str!("invalid/table-array-malformed-empty.toml")); -test!(table_empty, - include_str!("invalid/table-empty.toml")); -test!(table_nested_brackets_close, - include_str!("invalid/table-nested-brackets-close.toml")); -test!(table_nested_brackets_open, - include_str!("invalid/table-nested-brackets-open.toml")); -test!(table_whitespace, - include_str!("invalid/table-whitespace.toml")); -test!(table_with_pound, - include_str!("invalid/table-with-pound.toml")); -test!(text_after_array_entries, - include_str!("invalid/text-after-array-entries.toml")); -test!(text_after_integer, - include_str!("invalid/text-after-integer.toml")); -test!(text_after_string, - include_str!("invalid/text-after-string.toml")); -test!(text_after_table, - include_str!("invalid/text-after-table.toml")); -test!(text_before_array_separator, - include_str!("invalid/text-before-array-separator.toml")); -test!(text_in_array, - include_str!("invalid/text-in-array.toml")); diff --git a/tests/invalid/array-mixed-types-arrays-and-ints.toml b/tests/invalid/array-mixed-types-arrays-and-ints.toml deleted file mode 100644 index 051ec73..0000000 --- a/tests/invalid/array-mixed-types-arrays-and-ints.toml +++ /dev/null @@ -1 +0,0 @@ -arrays-and-ints = [1, ["Arrays are not integers."]] diff --git a/tests/invalid/array-mixed-types-ints-and-floats.toml b/tests/invalid/array-mixed-types-ints-and-floats.toml deleted file mode 100644 index a5aa9b7..0000000 --- a/tests/invalid/array-mixed-types-ints-and-floats.toml +++ /dev/null @@ -1 +0,0 @@ -ints-and-floats = [1, 1.1] diff --git a/tests/invalid/array-mixed-types-strings-and-ints.toml b/tests/invalid/array-mixed-types-strings-and-ints.toml deleted file mode 100644 index f348308..0000000 --- a/tests/invalid/array-mixed-types-strings-and-ints.toml +++ /dev/null @@ -1 +0,0 @@ -strings-and-ints = ["hi", 42] diff --git a/tests/invalid/datetime-malformed-no-leads.toml b/tests/invalid/datetime-malformed-no-leads.toml deleted file mode 100644 index 123f173..0000000 --- a/tests/invalid/datetime-malformed-no-leads.toml +++ /dev/null @@ -1 +0,0 @@ -no-leads = 1987-7-05T17:45:00Z diff --git a/tests/invalid/datetime-malformed-no-secs.toml b/tests/invalid/datetime-malformed-no-secs.toml deleted file mode 100644 index ba93900..0000000 --- a/tests/invalid/datetime-malformed-no-secs.toml +++ /dev/null @@ -1 +0,0 @@ -no-secs = 1987-07-05T17:45Z diff --git a/tests/invalid/datetime-malformed-no-t.toml b/tests/invalid/datetime-malformed-no-t.toml deleted file mode 100644 index 617e3c5..0000000 --- a/tests/invalid/datetime-malformed-no-t.toml +++ /dev/null @@ -1 +0,0 @@ -no-t = 1987-07-0517:45:00Z diff --git a/tests/invalid/datetime-malformed-with-milli.toml b/tests/invalid/datetime-malformed-with-milli.toml deleted file mode 100644 index eef792f..0000000 --- a/tests/invalid/datetime-malformed-with-milli.toml +++ /dev/null @@ -1 +0,0 @@ -with-milli = 1987-07-5T17:45:00.12Z diff --git a/tests/invalid/duplicate-key-table.toml b/tests/invalid/duplicate-key-table.toml deleted file mode 100644 index cedf05f..0000000 --- a/tests/invalid/duplicate-key-table.toml +++ /dev/null @@ -1,5 +0,0 @@ -[fruit] -type = "apple" - -[fruit.type] -apple = "yes" diff --git a/tests/invalid/duplicate-keys.toml b/tests/invalid/duplicate-keys.toml deleted file mode 100644 index 9b5aee0..0000000 --- a/tests/invalid/duplicate-keys.toml +++ /dev/null @@ -1,2 +0,0 @@ -dupe = false -dupe = true diff --git a/tests/invalid/duplicate-tables.toml b/tests/invalid/duplicate-tables.toml deleted file mode 100644 index 8ddf49b..0000000 --- a/tests/invalid/duplicate-tables.toml +++ /dev/null @@ -1,2 +0,0 @@ -[a] -[a] diff --git a/tests/invalid/empty-implicit-table.toml b/tests/invalid/empty-implicit-table.toml deleted file mode 100644 index 0cc36d0..0000000 --- a/tests/invalid/empty-implicit-table.toml +++ /dev/null @@ -1 +0,0 @@ -[naughty..naughty] diff --git a/tests/invalid/empty-table.toml b/tests/invalid/empty-table.toml deleted file mode 100644 index fe51488..0000000 --- a/tests/invalid/empty-table.toml +++ /dev/null @@ -1 +0,0 @@ -[] diff --git a/tests/invalid/float-no-leading-zero.toml b/tests/invalid/float-no-leading-zero.toml deleted file mode 100644 index cab76bf..0000000 --- a/tests/invalid/float-no-leading-zero.toml +++ /dev/null @@ -1,2 +0,0 @@ -answer = .12345 -neganswer = -.12345 diff --git a/tests/invalid/float-no-trailing-digits.toml b/tests/invalid/float-no-trailing-digits.toml deleted file mode 100644 index cbff2d0..0000000 --- a/tests/invalid/float-no-trailing-digits.toml +++ /dev/null @@ -1,2 +0,0 @@ -answer = 1. -neganswer = -1. diff --git a/tests/invalid/key-after-array.toml b/tests/invalid/key-after-array.toml deleted file mode 100644 index 5c1a1b0..0000000 --- a/tests/invalid/key-after-array.toml +++ /dev/null @@ -1 +0,0 @@ -[[agencies]] owner = "S Cjelli" diff --git a/tests/invalid/key-after-table.toml b/tests/invalid/key-after-table.toml deleted file mode 100644 index 4bc8213..0000000 --- a/tests/invalid/key-after-table.toml +++ /dev/null @@ -1 +0,0 @@ -[history] guard = "sleeping" diff --git a/tests/invalid/key-empty.toml b/tests/invalid/key-empty.toml deleted file mode 100644 index 09f998f..0000000 --- a/tests/invalid/key-empty.toml +++ /dev/null @@ -1 +0,0 @@ - = 1 diff --git a/tests/invalid/key-hash.toml b/tests/invalid/key-hash.toml deleted file mode 100644 index e321b1f..0000000 --- a/tests/invalid/key-hash.toml +++ /dev/null @@ -1 +0,0 @@ -a# = 1 diff --git a/tests/invalid/key-newline.toml b/tests/invalid/key-newline.toml deleted file mode 100644 index 707aad5..0000000 --- a/tests/invalid/key-newline.toml +++ /dev/null @@ -1,2 +0,0 @@ -a -= 1 diff --git a/tests/invalid/key-open-bracket.toml b/tests/invalid/key-open-bracket.toml deleted file mode 100644 index f0aeb16..0000000 --- a/tests/invalid/key-open-bracket.toml +++ /dev/null @@ -1 +0,0 @@ -[abc = 1 diff --git a/tests/invalid/key-single-open-bracket.toml b/tests/invalid/key-single-open-bracket.toml deleted file mode 100644 index 8e2f0be..0000000 --- a/tests/invalid/key-single-open-bracket.toml +++ /dev/null @@ -1 +0,0 @@ -[ \ No newline at end of file diff --git a/tests/invalid/key-space.toml b/tests/invalid/key-space.toml deleted file mode 100644 index 201806d..0000000 --- a/tests/invalid/key-space.toml +++ /dev/null @@ -1 +0,0 @@ -a b = 1 \ No newline at end of file diff --git a/tests/invalid/key-start-bracket.toml b/tests/invalid/key-start-bracket.toml deleted file mode 100644 index e0597ae..0000000 --- a/tests/invalid/key-start-bracket.toml +++ /dev/null @@ -1,3 +0,0 @@ -[a] -[xyz = 5 -[b] diff --git a/tests/invalid/key-two-equals.toml b/tests/invalid/key-two-equals.toml deleted file mode 100644 index 25a0378..0000000 --- a/tests/invalid/key-two-equals.toml +++ /dev/null @@ -1 +0,0 @@ -key= = 1 diff --git a/tests/invalid/string-bad-byte-escape.toml b/tests/invalid/string-bad-byte-escape.toml deleted file mode 100644 index 4c7be59..0000000 --- a/tests/invalid/string-bad-byte-escape.toml +++ /dev/null @@ -1 +0,0 @@ -naughty = "\xAg" diff --git a/tests/invalid/string-bad-escape.toml b/tests/invalid/string-bad-escape.toml deleted file mode 100644 index 60acb0c..0000000 --- a/tests/invalid/string-bad-escape.toml +++ /dev/null @@ -1 +0,0 @@ -invalid-escape = "This string has a bad \a escape character." diff --git a/tests/invalid/string-byte-escapes.toml b/tests/invalid/string-byte-escapes.toml deleted file mode 100644 index e94452a..0000000 --- a/tests/invalid/string-byte-escapes.toml +++ /dev/null @@ -1 +0,0 @@ -answer = "\x33" diff --git a/tests/invalid/string-no-close.toml b/tests/invalid/string-no-close.toml deleted file mode 100644 index 0c292fc..0000000 --- a/tests/invalid/string-no-close.toml +++ /dev/null @@ -1 +0,0 @@ -no-ending-quote = "One time, at band camp diff --git a/tests/invalid/table-array-implicit.toml b/tests/invalid/table-array-implicit.toml deleted file mode 100644 index 05f2507..0000000 --- a/tests/invalid/table-array-implicit.toml +++ /dev/null @@ -1,14 +0,0 @@ -# This test is a bit tricky. It should fail because the first use of -# `[[albums.songs]]` without first declaring `albums` implies that `albums` -# must be a table. The alternative would be quite weird. Namely, it wouldn't -# comply with the TOML spec: "Each double-bracketed sub-table will belong to -# the most *recently* defined table element *above* it." -# -# This is in contrast to the *valid* test, table-array-implicit where -# `[[albums.songs]]` works by itself, so long as `[[albums]]` isn't declared -# later. (Although, `[albums]` could be.) -[[albums.songs]] -name = "Glory Days" - -[[albums]] -name = "Born in the USA" diff --git a/tests/invalid/table-array-malformed-bracket.toml b/tests/invalid/table-array-malformed-bracket.toml deleted file mode 100644 index 39c73b0..0000000 --- a/tests/invalid/table-array-malformed-bracket.toml +++ /dev/null @@ -1,2 +0,0 @@ -[[albums] -name = "Born to Run" diff --git a/tests/invalid/table-array-malformed-empty.toml b/tests/invalid/table-array-malformed-empty.toml deleted file mode 100644 index a470ca3..0000000 --- a/tests/invalid/table-array-malformed-empty.toml +++ /dev/null @@ -1,2 +0,0 @@ -[[]] -name = "Born to Run" diff --git a/tests/invalid/table-empty.toml b/tests/invalid/table-empty.toml deleted file mode 100644 index fe51488..0000000 --- a/tests/invalid/table-empty.toml +++ /dev/null @@ -1 +0,0 @@ -[] diff --git a/tests/invalid/table-nested-brackets-close.toml b/tests/invalid/table-nested-brackets-close.toml deleted file mode 100644 index c8b5a67..0000000 --- a/tests/invalid/table-nested-brackets-close.toml +++ /dev/null @@ -1,2 +0,0 @@ -[a]b] -zyx = 42 diff --git a/tests/invalid/table-nested-brackets-open.toml b/tests/invalid/table-nested-brackets-open.toml deleted file mode 100644 index 246d7e9..0000000 --- a/tests/invalid/table-nested-brackets-open.toml +++ /dev/null @@ -1,2 +0,0 @@ -[a[b] -zyx = 42 diff --git a/tests/invalid/table-whitespace.toml b/tests/invalid/table-whitespace.toml deleted file mode 100644 index 79bbcb1..0000000 --- a/tests/invalid/table-whitespace.toml +++ /dev/null @@ -1 +0,0 @@ -[invalid key] \ No newline at end of file diff --git a/tests/invalid/table-with-pound.toml b/tests/invalid/table-with-pound.toml deleted file mode 100644 index 0d8edb5..0000000 --- a/tests/invalid/table-with-pound.toml +++ /dev/null @@ -1,2 +0,0 @@ -[key#group] -answer = 42 \ No newline at end of file diff --git a/tests/invalid/text-after-array-entries.toml b/tests/invalid/text-after-array-entries.toml deleted file mode 100644 index 1a72890..0000000 --- a/tests/invalid/text-after-array-entries.toml +++ /dev/null @@ -1,4 +0,0 @@ -array = [ - "Is there life after an array separator?", No - "Entry" -] diff --git a/tests/invalid/text-after-integer.toml b/tests/invalid/text-after-integer.toml deleted file mode 100644 index 42de7af..0000000 --- a/tests/invalid/text-after-integer.toml +++ /dev/null @@ -1 +0,0 @@ -answer = 42 the ultimate answer? diff --git a/tests/invalid/text-after-string.toml b/tests/invalid/text-after-string.toml deleted file mode 100644 index c92a6f1..0000000 --- a/tests/invalid/text-after-string.toml +++ /dev/null @@ -1 +0,0 @@ -string = "Is there life after strings?" No. diff --git a/tests/invalid/text-after-table.toml b/tests/invalid/text-after-table.toml deleted file mode 100644 index 87da9db..0000000 --- a/tests/invalid/text-after-table.toml +++ /dev/null @@ -1 +0,0 @@ -[error] this shouldn't be here diff --git a/tests/invalid/text-before-array-separator.toml b/tests/invalid/text-before-array-separator.toml deleted file mode 100644 index 9b06a39..0000000 --- a/tests/invalid/text-before-array-separator.toml +++ /dev/null @@ -1,4 +0,0 @@ -array = [ - "Is there life before an array separator?" No, - "Entry" -] diff --git a/tests/invalid/text-in-array.toml b/tests/invalid/text-in-array.toml deleted file mode 100644 index a6a6c42..0000000 --- a/tests/invalid/text-in-array.toml +++ /dev/null @@ -1,5 +0,0 @@ -array = [ - "Entry 1", - I don't belong, - "Entry 2", -] diff --git a/tests/parser.rs b/tests/parser.rs deleted file mode 100644 index 2282416..0000000 --- a/tests/parser.rs +++ /dev/null @@ -1,495 +0,0 @@ -extern crate toml; - -use toml::Value; - -macro_rules! bad { - ($s:expr, $msg:expr) => ({ - match $s.parse::() { - Ok(s) => panic!("successfully parsed as {}", s), - Err(e) => { - let e = e.to_string(); - assert!(e.contains($msg), "error: {}", e); - } - } - }) -} - -#[test] -fn crlf() { - "\ -[project]\r\n\ -\r\n\ -name = \"splay\"\r\n\ -version = \"0.1.0\"\r\n\ -authors = [\"alex@crichton.co\"]\r\n\ -\r\n\ -[[lib]]\r\n\ -\r\n\ -path = \"lib.rs\"\r\n\ -name = \"splay\"\r\n\ -description = \"\"\"\ -A Rust implementation of a TAR file reader and writer. This library does not\r\n\ -currently handle compression, but it is abstract over all I/O readers and\r\n\ -writers. Additionally, great lengths are taken to ensure that the entire\r\n\ -contents are never required to be entirely resident in memory all at once.\r\n\ -\"\"\"\ -".parse::().unwrap(); -} - -#[test] -fn fun_with_strings() { - let table = r#" -bar = "\U00000000" -key1 = "One\nTwo" -key2 = """One\nTwo""" -key3 = """ -One -Two""" - -key4 = "The quick brown fox jumps over the lazy dog." -key5 = """ -The quick brown \ - - -fox jumps over \ -the lazy dog.""" -key6 = """\ - The quick brown \ - fox jumps over \ - the lazy dog.\ - """ -# What you see is what you get. -winpath = 'C:\Users\nodejs\templates' -winpath2 = '\\ServerX\admin$\system32\' -quoted = 'Tom "Dubs" Preston-Werner' -regex = '<\i\c*\s*>' - -regex2 = '''I [dw]on't need \d{2} apples''' -lines = ''' -The first newline is -trimmed in raw strings. -All other whitespace -is preserved. -''' -"#.parse::().unwrap(); - assert_eq!(table["bar"].as_str(), Some("\0")); - assert_eq!(table["key1"].as_str(), Some("One\nTwo")); - assert_eq!(table["key2"].as_str(), Some("One\nTwo")); - assert_eq!(table["key3"].as_str(), Some("One\nTwo")); - - let msg = "The quick brown fox jumps over the lazy dog."; - assert_eq!(table["key4"].as_str(), Some(msg)); - assert_eq!(table["key5"].as_str(), Some(msg)); - assert_eq!(table["key6"].as_str(), Some(msg)); - - assert_eq!(table["winpath"].as_str(), Some(r"C:\Users\nodejs\templates")); - assert_eq!(table["winpath2"].as_str(), Some(r"\\ServerX\admin$\system32\")); - assert_eq!(table["quoted"].as_str(), Some(r#"Tom "Dubs" Preston-Werner"#)); - assert_eq!(table["regex"].as_str(), Some(r"<\i\c*\s*>")); - assert_eq!(table["regex2"].as_str(), Some(r"I [dw]on't need \d{2} apples")); - assert_eq!(table["lines"].as_str(), - Some("The first newline is\n\ - trimmed in raw strings.\n\ - All other whitespace\n\ - is preserved.\n")); -} - -#[test] -fn tables_in_arrays() { - let table = r#" -[[foo]] -#… -[foo.bar] -#… - -[[foo]] # ... -#… -[foo.bar] -#... -"#.parse::().unwrap(); - table["foo"][0]["bar"].as_table().unwrap(); - table["foo"][1]["bar"].as_table().unwrap(); -} - -#[test] -fn empty_table() { - let table = r#" -[foo]"#.parse::().unwrap(); - table["foo"].as_table().unwrap(); -} - -#[test] -fn fruit() { - let table = r#" -[[fruit]] -name = "apple" - -[fruit.physical] -color = "red" -shape = "round" - -[[fruit.variety]] -name = "red delicious" - -[[fruit.variety]] -name = "granny smith" - -[[fruit]] -name = "banana" - -[[fruit.variety]] -name = "plantain" -"#.parse::().unwrap(); - assert_eq!(table["fruit"][0]["name"].as_str(), Some("apple")); - assert_eq!(table["fruit"][0]["physical"]["color"].as_str(), Some("red")); - assert_eq!(table["fruit"][0]["physical"]["shape"].as_str(), Some("round")); - assert_eq!(table["fruit"][0]["variety"][0]["name"].as_str(), Some("red delicious")); - assert_eq!(table["fruit"][0]["variety"][1]["name"].as_str(), Some("granny smith")); - assert_eq!(table["fruit"][1]["name"].as_str(), Some("banana")); - assert_eq!(table["fruit"][1]["variety"][0]["name"].as_str(), Some("plantain")); -} - -#[test] -fn stray_cr() { - "\r".parse::().unwrap_err(); - "a = [ \r ]".parse::().unwrap_err(); - "a = \"\"\"\r\"\"\"".parse::().unwrap_err(); - "a = \"\"\"\\ \r \"\"\"".parse::().unwrap_err(); - "a = '''\r'''".parse::().unwrap_err(); - "a = '\r'".parse::().unwrap_err(); - "a = \"\r\"".parse::().unwrap_err(); -} - -#[test] -fn blank_literal_string() { - let table = "foo = ''".parse::().unwrap(); - assert_eq!(table["foo"].as_str(), Some("")); -} - -#[test] -fn many_blank() { - let table = "foo = \"\"\"\n\n\n\"\"\"".parse::().unwrap(); - assert_eq!(table["foo"].as_str(), Some("\n\n")); -} - -#[test] -fn literal_eats_crlf() { - let table = " - foo = \"\"\"\\\r\n\"\"\" - bar = \"\"\"\\\r\n \r\n \r\n a\"\"\" - ".parse::().unwrap(); - assert_eq!(table["foo"].as_str(), Some("")); - assert_eq!(table["bar"].as_str(), Some("a")); -} - -#[test] -fn string_no_newline() { - "a = \"\n\"".parse::().unwrap_err(); - "a = '\n'".parse::().unwrap_err(); -} - -#[test] -fn bad_leading_zeros() { - "a = 00".parse::().unwrap_err(); - "a = -00".parse::().unwrap_err(); - "a = +00".parse::().unwrap_err(); - "a = 00.0".parse::().unwrap_err(); - "a = -00.0".parse::().unwrap_err(); - "a = +00.0".parse::().unwrap_err(); - "a = 9223372036854775808".parse::().unwrap_err(); - "a = -9223372036854775809".parse::().unwrap_err(); -} - -#[test] -fn bad_floats() { - "a = 0.".parse::().unwrap_err(); - "a = 0.e".parse::().unwrap_err(); - "a = 0.E".parse::().unwrap_err(); - "a = 0.0E".parse::().unwrap_err(); - "a = 0.0e".parse::().unwrap_err(); - "a = 0.0e-".parse::().unwrap_err(); - "a = 0.0e+".parse::().unwrap_err(); - "a = 0.0e+00".parse::().unwrap_err(); -} - -#[test] -fn floats() { - macro_rules! t { - ($actual:expr, $expected:expr) => ({ - let f = format!("foo = {}", $actual); - println!("{}", f); - let a = f.parse::().unwrap(); - assert_eq!(a["foo"].as_float().unwrap(), $expected); - }) - } - - t!("1.0", 1.0); - t!("1.0e0", 1.0); - t!("1.0e+0", 1.0); - t!("1.0e-0", 1.0); - t!("1.001e-0", 1.001); - t!("2e10", 2e10); - t!("2e+10", 2e10); - t!("2e-10", 2e-10); - t!("2_0.0", 20.0); - t!("2_0.0_0e1_0", 20.0e10); - t!("2_0.1_0e1_0", 20.1e10); -} - -#[test] -fn bare_key_names() { - let a = " - foo = 3 - foo_3 = 3 - foo_-2--3--r23f--4-f2-4 = 3 - _ = 3 - - = 3 - 8 = 8 - \"a\" = 3 - \"!\" = 3 - \"a^b\" = 3 - \"\\\"\" = 3 - \"character encoding\" = \"value\" - 'ʎǝʞ' = \"value\" - ".parse::().unwrap(); - &a["foo"]; - &a["-"]; - &a["_"]; - &a["8"]; - &a["foo_3"]; - &a["foo_-2--3--r23f--4-f2-4"]; - &a["a"]; - &a["!"]; - &a["\""]; - &a["character encoding"]; - &a["ʎǝʞ"]; -} - -#[test] -fn bad_keys() { - "key\n=3".parse::().unwrap_err(); - "key=\n3".parse::().unwrap_err(); - "key|=3".parse::().unwrap_err(); - "\"\"=3".parse::().unwrap_err(); - "=3".parse::().unwrap_err(); - "\"\"|=3".parse::().unwrap_err(); - "\"\n\"|=3".parse::().unwrap_err(); - "\"\r\"|=3".parse::().unwrap_err(); -} - -#[test] -fn bad_table_names() { - "[]".parse::().unwrap_err(); - "[.]".parse::().unwrap_err(); - "[\"\".\"\"]".parse::().unwrap_err(); - "[a.]".parse::().unwrap_err(); - "[\"\"]".parse::().unwrap_err(); - "[!]".parse::().unwrap_err(); - "[\"\n\"]".parse::().unwrap_err(); - "[a.b]\n[a.\"b\"]".parse::().unwrap_err(); - "[']".parse::().unwrap_err(); - "[''']".parse::().unwrap_err(); - "['''''']".parse::().unwrap_err(); - "['\n']".parse::().unwrap_err(); - "['\r\n']".parse::().unwrap_err(); -} - -#[test] -fn table_names() { - let a = " - [a.\"b\"] - [\"f f\"] - [\"f.f\"] - [\"\\\"\"] - ['a.a'] - ['\"\"'] - ".parse::().unwrap(); - println!("{:?}", a); - &a["a"]["b"]; - &a["f f"]; - &a["f.f"]; - &a["\""]; - &a["\"\""]; -} - -#[test] -fn invalid_bare_numeral() { - "4".parse::().unwrap_err(); -} - -#[test] -fn inline_tables() { - "a = {}".parse::().unwrap(); - "a = {b=1}".parse::().unwrap(); - "a = { b = 1 }".parse::().unwrap(); - "a = {a=1,b=2}".parse::().unwrap(); - "a = {a=1,b=2,c={}}".parse::().unwrap(); - "a = {a=1,}".parse::().unwrap_err(); - "a = {,}".parse::().unwrap_err(); - "a = {a=1,a=1}".parse::().unwrap_err(); - "a = {\n}".parse::().unwrap_err(); - "a = {".parse::().unwrap_err(); - "a = {a=[\n]}".parse::().unwrap(); - "a = {\"a\"=[\n]}".parse::().unwrap(); - "a = [\n{},\n{},\n]".parse::().unwrap(); -} - -#[test] -fn number_underscores() { - macro_rules! t { - ($actual:expr, $expected:expr) => ({ - let f = format!("foo = {}", $actual); - let table = f.parse::().unwrap(); - assert_eq!(table["foo"].as_integer().unwrap(), $expected); - }) - } - - t!("1_0", 10); - t!("1_0_0", 100); - t!("1_000", 1000); - t!("+1_000", 1000); - t!("-1_000", -1000); -} - -#[test] -fn bad_underscores() { - bad!("foo = 0_", "invalid number"); - bad!("foo = 0__0", "invalid number"); - bad!("foo = __0", "invalid number"); - bad!("foo = 1_0_", "invalid number"); -} - -#[test] -fn bad_unicode_codepoint() { - bad!("foo = \"\\uD800\"", "invalid escape value"); -} - -#[test] -fn bad_strings() { - bad!("foo = \"\\uxx\"", "invalid hex escape"); - bad!("foo = \"\\u\"", "invalid hex escape"); - bad!("foo = \"\\", "unterminated"); - bad!("foo = '", "unterminated"); -} - -#[test] -fn empty_string() { - assert_eq!("foo = \"\"".parse::() - .unwrap()["foo"] - .as_str() - .unwrap(), - ""); -} - -#[test] -fn booleans() { - let table = "foo = true".parse::().unwrap(); - assert_eq!(table["foo"].as_bool(), Some(true)); - - let table = "foo = false".parse::().unwrap(); - assert_eq!(table["foo"].as_bool(), Some(false)); - - assert!("foo = true2".parse::().is_err()); - assert!("foo = false2".parse::().is_err()); - assert!("foo = t1".parse::().is_err()); - assert!("foo = f2".parse::().is_err()); -} - -#[test] -fn bad_nesting() { - bad!(" - a = [2] - [[a]] - b = 5 - ", "duplicate key: `a`"); - bad!(" - a = 1 - [a.b] - ", "duplicate key: `a`"); - bad!(" - a = [] - [a.b] - ", "duplicate key: `a`"); - bad!(" - a = [] - [[a.b]] - ", "duplicate key: `a`"); - bad!(" - [a] - b = { c = 2, d = {} } - [a.b] - c = 2 - ", "duplicate key: `b`"); -} - -#[test] -fn bad_table_redefine() { - bad!(" - [a] - foo=\"bar\" - [a.b] - foo=\"bar\" - [a] - ", "redefinition of table `a`"); - bad!(" - [a] - foo=\"bar\" - b = { foo = \"bar\" } - [a] - ", "redefinition of table `a`"); - bad!(" - [a] - b = {} - [a.b] - ", "duplicate key: `b`"); - - bad!(" - [a] - b = {} - [a] - ", "redefinition of table `a`"); -} - -#[test] -fn datetimes() { - macro_rules! t { - ($actual:expr) => ({ - let f = format!("foo = {}", $actual); - let toml = f.parse::().expect(&format!("failed: {}", f)); - assert_eq!(toml["foo"].as_datetime().unwrap().to_string(), $actual); - }) - } - - t!("2016-09-09T09:09:09Z"); - t!("2016-09-09T09:09:09.1Z"); - t!("2016-09-09T09:09:09.2+10:00"); - t!("2016-09-09T09:09:09.123456789-02:00"); - bad!("foo = 2016-09-09T09:09:09.Z", "failed to parse date"); - bad!("foo = 2016-9-09T09:09:09Z", "failed to parse date"); - bad!("foo = 2016-09-09T09:09:09+2:00", "failed to parse date"); - bad!("foo = 2016-09-09T09:09:09-2:00", "failed to parse date"); - bad!("foo = 2016-09-09T09:09:09Z-2:00", "failed to parse date"); -} - -#[test] -fn require_newline_after_value() { - bad!("0=0r=false", "invalid number at line 1"); - bad!(r#" -0=""o=""m=""r=""00="0"q="""0"""e="""0""" -"#, "expected newline"); - bad!(r#" -[[0000l0]] -0="0"[[0000l0]] -0="0"[[0000l0]] -0="0"l="0" -"#, "expected newline"); - bad!(r#" -0=[0]00=[0,0,0]t=["0","0","0"]s=[1000-00-00T00:00:00Z,2000-00-00T00:00:00Z] -"#, "expected newline"); - bad!(r#" -0=0r0=0r=false -"#, "invalid number at line 2"); - bad!(r#" -0=0r0=0r=falsefal=false -"#, "invalid number at line 2"); -} diff --git a/tests/pretty.rs b/tests/pretty.rs deleted file mode 100644 index 19ed22d..0000000 --- a/tests/pretty.rs +++ /dev/null @@ -1,308 +0,0 @@ -extern crate toml; -extern crate serde; - -use serde::ser::Serialize; - -const NO_PRETTY: &'static str = "\ -[example] -array = [\"item 1\", \"item 2\"] -empty = [] -oneline = \"this has no newlines.\" -text = \"\\nthis is the first line\\nthis is the second line\\n\" -"; - -#[test] -fn no_pretty() { - let toml = NO_PRETTY; - let value: toml::Value = toml::from_str(toml).unwrap(); - let mut result = String::with_capacity(128); - value.serialize(&mut toml::Serializer::new(&mut result)).unwrap(); - println!("EXPECTED:\n{}", toml); - println!("\nRESULT:\n{}", result); - assert_eq!(toml, &result); -} - -#[test] -fn disable_pretty() { - let toml = NO_PRETTY; - let value: toml::Value = toml::from_str(toml).unwrap(); - let mut result = String::with_capacity(128); - { - let mut serializer = toml::Serializer::pretty(&mut result); - serializer.pretty_string(false); - serializer.pretty_array(false); - value.serialize(&mut serializer).unwrap(); - } - println!("EXPECTED:\n{}", toml); - println!("\nRESULT:\n{}", result); - assert_eq!(toml, &result); -} - -const PRETTY_STD: &'static str = "\ -[example] -array = [ - 'item 1', - 'item 2', -] -empty = [] -one = ['one'] -oneline = 'this has no newlines.' -text = ''' -this is the first line -this is the second line -''' -"; - -#[test] -fn pretty_std() { - let toml = PRETTY_STD; - let value: toml::Value = toml::from_str(toml).unwrap(); - let mut result = String::with_capacity(128); - value.serialize(&mut toml::Serializer::pretty(&mut result)).unwrap(); - println!("EXPECTED:\n{}", toml); - println!("\nRESULT:\n{}", result); - assert_eq!(toml, &result); -} - - -const PRETTY_INDENT_2: &'static str = "\ -[example] -array = [ - 'item 1', - 'item 2', -] -empty = [] -one = ['one'] -oneline = 'this has no newlines.' -text = ''' -this is the first line -this is the second line -''' -three = [ - 'one', - 'two', - 'three', -] -"; - -#[test] -fn pretty_indent_2() { - let toml = PRETTY_INDENT_2; - let value: toml::Value = toml::from_str(toml).unwrap(); - let mut result = String::with_capacity(128); - { - let mut serializer = toml::Serializer::pretty(&mut result); - serializer.pretty_array_indent(2); - value.serialize(&mut serializer).unwrap(); - } - println!(">> Result:\n{}", result); - assert_eq!(toml, &result); -} - -const PRETTY_INDENT_2_OTHER: &'static str = "\ -[example] -array = [ - \"item 1\", - \"item 2\", -] -empty = [] -oneline = \"this has no newlines.\" -text = \"\\nthis is the first line\\nthis is the second line\\n\" -"; - - -#[test] -/// Test pretty indent when gotten the other way -fn pretty_indent_2_other() { - let toml = PRETTY_INDENT_2_OTHER; - let value: toml::Value = toml::from_str(toml).unwrap(); - let mut result = String::with_capacity(128); - { - let mut serializer = toml::Serializer::new(&mut result); - serializer.pretty_array_indent(2); - value.serialize(&mut serializer).unwrap(); - } - assert_eq!(toml, &result); -} - - -const PRETTY_ARRAY_NO_COMMA: &'static str = "\ -[example] -array = [ - \"item 1\", - \"item 2\" -] -empty = [] -oneline = \"this has no newlines.\" -text = \"\\nthis is the first line\\nthis is the second line\\n\" -"; -#[test] -/// Test pretty indent when gotten the other way -fn pretty_indent_array_no_comma() { - let toml = PRETTY_ARRAY_NO_COMMA; - let value: toml::Value = toml::from_str(toml).unwrap(); - let mut result = String::with_capacity(128); - { - let mut serializer = toml::Serializer::new(&mut result); - serializer.pretty_array_trailing_comma(false); - value.serialize(&mut serializer).unwrap(); - } - assert_eq!(toml, &result); -} - - -const PRETTY_NO_STRING: &'static str = "\ -[example] -array = [ - \"item 1\", - \"item 2\", -] -empty = [] -oneline = \"this has no newlines.\" -text = \"\\nthis is the first line\\nthis is the second line\\n\" -"; -#[test] -/// Test pretty indent when gotten the other way -fn pretty_no_string() { - let toml = PRETTY_NO_STRING; - let value: toml::Value = toml::from_str(toml).unwrap(); - let mut result = String::with_capacity(128); - { - let mut serializer = toml::Serializer::pretty(&mut result); - serializer.pretty_string(false); - value.serialize(&mut serializer).unwrap(); - } - assert_eq!(toml, &result); -} - -const PRETTY_TRICKY: &'static str = r##"[example] -f = "\f" -glass = ''' -Nothing too unusual, except that I can eat glass in: -- Greek: Μπορώ να φάω σπασμένα γυαλιά χωρίς να πάθω τίποτα. -- Polish: Mogę jeść szkło, i mi nie szkodzi. -- Hindi: मैं काँच खा सकता हूँ, मुझे उस से कोई पीडा नहीं होती. -- Japanese: 私はガラスを食べられます。それは私を傷つけません。 -''' -r = "\r" -r_newline = """ -\r -""" -single = '''this is a single line but has '' cuz it's tricky''' -single_tricky = "single line with ''' in it" -tabs = ''' -this is pretty standard - except for some tabs right here -''' -text = """ -this is the first line. -This has a ''' in it and \"\"\" cuz it's tricky yo -Also ' and \" because why not -this is the fourth line -""" -"##; - -#[test] -fn pretty_tricky() { - let toml = PRETTY_TRICKY; - let value: toml::Value = toml::from_str(toml).unwrap(); - let mut result = String::with_capacity(128); - value.serialize(&mut toml::Serializer::pretty(&mut result)).unwrap(); - println!("EXPECTED:\n{}", toml); - println!("\nRESULT:\n{}", result); - assert_eq!(toml, &result); -} - -const PRETTY_TABLE_ARRAY: &'static str = r##"[[array]] -key = 'foo' - -[[array]] -key = 'bar' - -[abc] -doc = 'this is a table' - -[example] -single = 'this is a single line string' -"##; - -#[test] -fn pretty_table_array() { - let toml = PRETTY_TABLE_ARRAY; - let value: toml::Value = toml::from_str(toml).unwrap(); - let mut result = String::with_capacity(128); - value.serialize(&mut toml::Serializer::pretty(&mut result)).unwrap(); - println!("EXPECTED:\n{}", toml); - println!("\nRESULT:\n{}", result); - assert_eq!(toml, &result); -} - -const TABLE_ARRAY: &'static str = r##"[[array]] -key = "foo" - -[[array]] -key = "bar" - -[abc] -doc = "this is a table" - -[example] -single = "this is a single line string" -"##; - -#[test] -fn table_array() { - let toml = TABLE_ARRAY; - let value: toml::Value = toml::from_str(toml).unwrap(); - let mut result = String::with_capacity(128); - value.serialize(&mut toml::Serializer::new(&mut result)).unwrap(); - println!("EXPECTED:\n{}", toml); - println!("\nRESULT:\n{}", result); - assert_eq!(toml, &result); -} - -const PRETTY_TRICKY_NON_LITERAL: &'static str = r##"[example] -f = "\f" -glass = """ -Nothing too unusual, except that I can eat glass in: -- Greek: Μπορώ να φάω σπασμένα γυαλιά χωρίς να πάθω τίποτα. -- Polish: Mogę jeść szkło, i mi nie szkodzi. -- Hindi: मैं काँच खा सकता हूँ, मुझे उस से कोई पीडा नहीं होती. -- Japanese: 私はガラスを食べられます。それは私を傷つけません。 -""" -plain = """ -This has a couple of lines -Because it likes to. -""" -r = "\r" -r_newline = """ -\r -""" -single = "this is a single line but has '' cuz it's tricky" -single_tricky = "single line with ''' in it" -tabs = """ -this is pretty standard -\texcept for some \ttabs right here -""" -text = """ -this is the first line. -This has a ''' in it and \"\"\" cuz it's tricky yo -Also ' and \" because why not -this is the fourth line -""" -"##; - -#[test] -fn pretty_tricky_non_literal() { - let toml = PRETTY_TRICKY_NON_LITERAL; - let value: toml::Value = toml::from_str(toml).unwrap(); - let mut result = String::with_capacity(128); - { - let mut serializer = toml::Serializer::pretty(&mut result); - serializer.pretty_string_literal(false); - value.serialize(&mut serializer).unwrap(); - } - println!("EXPECTED:\n{}", toml); - println!("\nRESULT:\n{}", result); - assert_eq!(toml, &result); -} diff --git a/tests/serde.rs b/tests/serde.rs deleted file mode 100644 index 57fa5db..0000000 --- a/tests/serde.rs +++ /dev/null @@ -1,578 +0,0 @@ -extern crate serde; -extern crate toml; -#[macro_use] -extern crate serde_derive; - -use std::collections::{BTreeMap, HashSet}; -use serde::{Deserialize, Deserializer}; - -use toml::Value; -use toml::Value::{Table, Integer, Array, Float}; - -macro_rules! t { - ($e:expr) => (match $e { - Ok(t) => t, - Err(e) => panic!("{} failed with {}", stringify!($e), e), - }) -} - -macro_rules! equivalent { - ($literal:expr, $toml:expr,) => ({ - let toml = $toml; - let literal = $literal; - - // In/out of Value is equivalent - println!("try_from"); - assert_eq!(t!(Value::try_from(literal.clone())), toml); - println!("try_into"); - assert_eq!(literal, t!(toml.clone().try_into())); - - // Through a string equivalent - println!("to_string(literal)"); - assert_eq!(t!(toml::to_string(&literal)), toml.to_string()); - println!("to_string(toml)"); - assert_eq!(t!(toml::to_string(&toml)), toml.to_string()); - println!("literal, from_str(toml)"); - assert_eq!(literal, t!(toml::from_str(&toml.to_string()))); - println!("toml, from_str(toml)"); - assert_eq!(toml, t!(toml::from_str(&toml.to_string()))); - }) -} - -macro_rules! error { - ($ty:ty, $toml:expr, $error:expr) => ({ - println!("attempting parsing"); - match toml::from_str::<$ty>(&$toml.to_string()) { - Ok(_) => panic!("successful"), - Err(e) => { - assert!(e.to_string().contains($error), - "bad error: {}", e); - } - } - - println!("attempting toml decoding"); - match $toml.try_into::<$ty>() { - Ok(_) => panic!("successful"), - Err(e) => { - assert!(e.to_string().contains($error), - "bad error: {}", e); - } - } - }) -} - -macro_rules! map( ($($k:ident: $v:expr),*) => ({ - let mut _m = BTreeMap::new(); - $(_m.insert(stringify!($k).to_string(), $v);)* - _m -}) ); - -#[test] -fn smoke() { - #[derive(Serialize, Deserialize, PartialEq, Debug, Clone)] - struct Foo { a: isize } - - equivalent!( - Foo { a: 2 }, - Table(map! { a: Integer(2) }), - ); -} - -#[test] -fn smoke_hyphen() { - #[derive(Serialize, Deserialize, PartialEq, Debug, Clone)] - struct Foo { - a_b: isize, - } - - equivalent! { - Foo { a_b: 2 }, - Table(map! { a_b: Integer(2) }), - } - - #[derive(Serialize, Deserialize, PartialEq, Debug, Clone)] - struct Foo2 { - #[serde(rename = "a-b")] - a_b: isize, - } - - let mut m = BTreeMap::new(); - m.insert("a-b".to_string(), Integer(2)); - equivalent! { - Foo2 { a_b: 2 }, - Table(m), - } -} - -#[test] -fn nested() { - #[derive(Serialize, Deserialize, PartialEq, Debug, Clone)] - struct Foo { a: isize, b: Bar } - #[derive(Serialize, Deserialize, PartialEq, Debug, Clone)] - struct Bar { a: String } - - equivalent! { - Foo { a: 2, b: Bar { a: "test".to_string() } }, - Table(map! { - a: Integer(2), - b: Table(map! { - a: Value::String("test".to_string()) - }) - }), - } -} - -#[test] -fn application_decode_error() { - #[derive(PartialEq, Debug)] - struct Range10(usize); - impl<'de> Deserialize<'de> for Range10 { - fn deserialize>(d: D) -> Result { - let x: usize = try!(Deserialize::deserialize(d)); - if x > 10 { - Err(serde::de::Error::custom("more than 10")) - } else { - Ok(Range10(x)) - } - } - } - let d_good = Integer(5); - let d_bad1 = Value::String("not an isize".to_string()); - let d_bad2 = Integer(11); - - assert_eq!(Range10(5), d_good.try_into().unwrap()); - - let err1: Result = d_bad1.try_into(); - assert!(err1.is_err()); - let err2: Result = d_bad2.try_into(); - assert!(err2.is_err()); -} - -#[test] -fn array() { - #[derive(Serialize, Deserialize, PartialEq, Debug, Clone)] - struct Foo { a: Vec } - - equivalent! { - Foo { a: vec![1, 2, 3, 4] }, - Table(map! { - a: Array(vec![ - Integer(1), - Integer(2), - Integer(3), - Integer(4) - ]) - }), - }; -} - -#[test] -fn inner_structs_with_options() { - #[derive(Serialize, Deserialize, PartialEq, Debug, Clone)] - struct Foo { - a: Option>, - b: Bar, - } - #[derive(Serialize, Deserialize, PartialEq, Debug, Clone)] - struct Bar { - a: String, - b: f64, - } - - equivalent! { - Foo { - a: Some(Box::new(Foo { - a: None, - b: Bar { a: "foo".to_string(), b: 4.5 }, - })), - b: Bar { a: "bar".to_string(), b: 1.0 }, - }, - Table(map! { - a: Table(map! { - b: Table(map! { - a: Value::String("foo".to_string()), - b: Float(4.5) - }) - }), - b: Table(map! { - a: Value::String("bar".to_string()), - b: Float(1.0) - }) - }), - } -} - -#[test] -fn hashmap() { - #[derive(Serialize, Deserialize, PartialEq, Debug, Clone)] - struct Foo { - set: HashSet, - map: BTreeMap, - } - - equivalent! { - Foo { - map: { - let mut m = BTreeMap::new(); - m.insert("foo".to_string(), 10); - m.insert("bar".to_string(), 4); - m - }, - set: { - let mut s = HashSet::new(); - s.insert('a'); - s - }, - }, - Table(map! { - map: Table(map! { - foo: Integer(10), - bar: Integer(4) - }), - set: Array(vec![Value::String("a".to_string())]) - }), - } -} - -#[test] -fn table_array() { - #[derive(Serialize, Deserialize, PartialEq, Debug, Clone)] - struct Foo { a: Vec, } - #[derive(Serialize, Deserialize, PartialEq, Debug, Clone)] - struct Bar { a: isize } - - equivalent! { - Foo { a: vec![Bar { a: 1 }, Bar { a: 2 }] }, - Table(map! { - a: Array(vec![ - Table(map!{ a: Integer(1) }), - Table(map!{ a: Integer(2) }), - ]) - }), - } -} - -#[test] -fn type_errors() { - #[derive(Deserialize)] - #[allow(dead_code)] - struct Foo { bar: isize } - - error! { - Foo, - Table(map! { - bar: Value::String("a".to_string()) - }), - "invalid type: string \"a\", expected isize for key `bar`" - } - - #[derive(Deserialize)] - #[allow(dead_code)] - struct Bar { foo: Foo } - - error! { - Bar, - Table(map! { - foo: Table(map! { - bar: Value::String("a".to_string()) - }) - }), - "invalid type: string \"a\", expected isize for key `foo.bar`" - } -} - -#[test] -fn missing_errors() { - #[derive(Serialize, Deserialize, PartialEq, Debug)] - struct Foo { bar: isize } - - error! { - Foo, - Table(map! { }), - "missing field `bar`" - } -} - -#[test] -fn parse_enum() { - #[derive(Serialize, Deserialize, PartialEq, Debug, Clone)] - struct Foo { a: E } - #[derive(Serialize, Deserialize, PartialEq, Debug, Clone)] - #[serde(untagged)] - enum E { - Bar(isize), - Baz(String), - Last(Foo2), - } - #[derive(Serialize, Deserialize, PartialEq, Debug, Clone)] - struct Foo2 { - test: String, - } - - equivalent! { - Foo { a: E::Bar(10) }, - Table(map! { a: Integer(10) }), - } - - equivalent! { - Foo { a: E::Baz("foo".to_string()) }, - Table(map! { a: Value::String("foo".to_string()) }), - } - - equivalent! { - Foo { a: E::Last(Foo2 { test: "test".to_string() }) }, - Table(map! { a: Table(map! { test: Value::String("test".to_string()) }) }), - } -} - -#[test] -fn parse_enum_string() { - #[derive(Serialize, Deserialize, PartialEq, Debug, Clone)] - struct Foo { a: Sort } - - #[derive(Serialize, Deserialize, PartialEq, Debug, Clone)] - #[serde(rename_all = "lowercase")] - enum Sort { - Asc, - Desc, - } - - equivalent! { - Foo { a: Sort::Desc }, - Table(map! { a: Value::String("desc".to_string()) }), - } - -} - -// #[test] -// fn unused_fields() { -// #[derive(Serialize, Deserialize, PartialEq, Debug)] -// struct Foo { a: isize } -// -// let v = Foo { a: 2 }; -// let mut d = Decoder::new(Table(map! { -// a, Integer(2), -// b, Integer(5) -// })); -// assert_eq!(v, t!(Deserialize::deserialize(&mut d))); -// -// assert_eq!(d.toml, Some(Table(map! { -// b, Integer(5) -// }))); -// } -// -// #[test] -// fn unused_fields2() { -// #[derive(Serialize, Deserialize, PartialEq, Debug)] -// struct Foo { a: Bar } -// #[derive(Serialize, Deserialize, PartialEq, Debug)] -// struct Bar { a: isize } -// -// let v = Foo { a: Bar { a: 2 } }; -// let mut d = Decoder::new(Table(map! { -// a, Table(map! { -// a, Integer(2), -// b, Integer(5) -// }) -// })); -// assert_eq!(v, t!(Deserialize::deserialize(&mut d))); -// -// assert_eq!(d.toml, Some(Table(map! { -// a, Table(map! { -// b, Integer(5) -// }) -// }))); -// } -// -// #[test] -// fn unused_fields3() { -// #[derive(Serialize, Deserialize, PartialEq, Debug)] -// struct Foo { a: Bar } -// #[derive(Serialize, Deserialize, PartialEq, Debug)] -// struct Bar { a: isize } -// -// let v = Foo { a: Bar { a: 2 } }; -// let mut d = Decoder::new(Table(map! { -// a, Table(map! { -// a, Integer(2) -// }) -// })); -// assert_eq!(v, t!(Deserialize::deserialize(&mut d))); -// -// assert_eq!(d.toml, None); -// } -// -// #[test] -// fn unused_fields4() { -// #[derive(Serialize, Deserialize, PartialEq, Debug)] -// struct Foo { a: BTreeMap } -// -// let v = Foo { a: map! { a, "foo".to_string() } }; -// let mut d = Decoder::new(Table(map! { -// a, Table(map! { -// a, Value::String("foo".to_string()) -// }) -// })); -// assert_eq!(v, t!(Deserialize::deserialize(&mut d))); -// -// assert_eq!(d.toml, None); -// } -// -// #[test] -// fn unused_fields5() { -// #[derive(Serialize, Deserialize, PartialEq, Debug)] -// struct Foo { a: Vec } -// -// let v = Foo { a: vec!["a".to_string()] }; -// let mut d = Decoder::new(Table(map! { -// a, Array(vec![Value::String("a".to_string())]) -// })); -// assert_eq!(v, t!(Deserialize::deserialize(&mut d))); -// -// assert_eq!(d.toml, None); -// } -// -// #[test] -// fn unused_fields6() { -// #[derive(Serialize, Deserialize, PartialEq, Debug)] -// struct Foo { a: Option> } -// -// let v = Foo { a: Some(vec![]) }; -// let mut d = Decoder::new(Table(map! { -// a, Array(vec![]) -// })); -// assert_eq!(v, t!(Deserialize::deserialize(&mut d))); -// -// assert_eq!(d.toml, None); -// } -// -// #[test] -// fn unused_fields7() { -// #[derive(Serialize, Deserialize, PartialEq, Debug)] -// struct Foo { a: Vec } -// #[derive(Serialize, Deserialize, PartialEq, Debug)] -// struct Bar { a: isize } -// -// let v = Foo { a: vec![Bar { a: 1 }] }; -// let mut d = Decoder::new(Table(map! { -// a, Array(vec![Table(map! { -// a, Integer(1), -// b, Integer(2) -// })]) -// })); -// assert_eq!(v, t!(Deserialize::deserialize(&mut d))); -// -// assert_eq!(d.toml, Some(Table(map! { -// a, Array(vec![Table(map! { -// b, Integer(2) -// })]) -// }))); -// } - -#[test] -fn empty_arrays() { - #[derive(Serialize, Deserialize, PartialEq, Debug, Clone)] - struct Foo { a: Vec } - #[derive(Serialize, Deserialize, PartialEq, Debug, Clone)] - struct Bar; - - equivalent! { - Foo { a: vec![] }, - Table(map! {a: Array(Vec::new())}), - } -} - -#[test] -fn empty_arrays2() { - #[derive(Serialize, Deserialize, PartialEq, Debug, Clone)] - struct Foo { a: Option> } - #[derive(Serialize, Deserialize, PartialEq, Debug, Clone)] - struct Bar; - - equivalent! { - Foo { a: None }, - Table(map! {}), - } - - equivalent!{ - Foo { a: Some(vec![]) }, - Table(map! { a: Array(vec![]) }), - } -} - -#[test] -fn extra_keys() { - #[derive(Serialize, Deserialize)] - struct Foo { a: isize } - - let toml = Table(map! { a: Integer(2), b: Integer(2) }); - assert!(toml.clone().try_into::().is_ok()); - assert!(toml::from_str::(&toml.to_string()).is_ok()); -} - -#[test] -fn newtypes() { - #[derive(Deserialize, Serialize, PartialEq, Debug, Clone)] - struct A { - b: B - } - - #[derive(Deserialize, Serialize, PartialEq, Debug, Clone)] - struct B(u32); - - equivalent! { - A { b: B(2) }, - Table(map! { b: Integer(2) }), - } -} - -#[test] -fn newtypes2() { - #[derive(Deserialize, Serialize, PartialEq, Debug, Clone)] - struct A { - b: B - } - - #[derive(Deserialize, Serialize, PartialEq, Debug, Clone)] - struct B(Option); - - #[derive(Deserialize, Serialize, PartialEq, Debug, Clone)] - struct C { - x: u32, - y: u32, - z: u32 - } - - equivalent! { - A { b: B(Some(C { x: 0, y: 1, z: 2 })) }, - Table(map! { - b: Table(map! { - x: Integer(0), - y: Integer(1), - z: Integer(2) - }) - }), - } -} - -#[derive(Debug, Default, PartialEq, Serialize, Deserialize)] -struct CanBeEmpty { - a: Option, - b: Option, -} - -#[test] -fn table_structs_empty() { - let text = "[bar]\n\n[baz]\n\n[bazv]\na = \"foo\"\n\n[foo]\n"; - let value: BTreeMap = toml::from_str(text).unwrap(); - let mut expected: BTreeMap = BTreeMap::new(); - expected.insert("bar".to_string(), CanBeEmpty::default()); - expected.insert("baz".to_string(), CanBeEmpty::default()); - expected.insert( - "bazv".to_string(), - CanBeEmpty {a: Some("foo".to_string()), b: None}, - ); - expected.insert("foo".to_string(), CanBeEmpty::default()); - assert_eq!(value, expected); - assert_eq!(toml::to_string(&value).unwrap(), text); -} diff --git a/tests/tables-last.rs b/tests/tables-last.rs deleted file mode 100644 index d05c8f0..0000000 --- a/tests/tables-last.rs +++ /dev/null @@ -1,30 +0,0 @@ -#[macro_use] -extern crate serde_derive; -extern crate toml; - -use std::collections::HashMap; - -#[derive(Serialize)] -struct A { - #[serde(serialize_with = "toml::ser::tables_last")] - vals: HashMap<&'static str, Value>, -} - -#[derive(Serialize)] -#[serde(untagged)] -enum Value { - Map(HashMap<&'static str, &'static str>), - Int(i32), -} - -#[test] -fn always_works() { - let mut a = A { vals: HashMap::new() }; - a.vals.insert("foo", Value::Int(0)); - - let mut sub = HashMap::new(); - sub.insert("foo", "bar"); - a.vals.insert("bar", Value::Map(sub)); - - toml::to_string(&a).unwrap(); -} diff --git a/tests/valid.rs b/tests/valid.rs deleted file mode 100644 index b186800..0000000 --- a/tests/valid.rs +++ /dev/null @@ -1,249 +0,0 @@ -extern crate toml; -extern crate serde; -extern crate serde_json; - -use toml::{Value as Toml, to_string_pretty}; -use serde::ser::Serialize; -use serde_json::Value as Json; - -fn to_json(toml: toml::Value) -> Json { - fn doit(s: &str, json: Json) -> Json { - let mut map = serde_json::Map::new(); - map.insert("type".to_string(), Json::String(s.to_string())); - map.insert("value".to_string(), json); - Json::Object(map) - } - - match toml { - Toml::String(s) => doit("string", Json::String(s)), - Toml::Integer(i) => doit("integer", Json::String(i.to_string())), - Toml::Float(f) => doit("float", Json::String({ - let s = format!("{:.15}", f); - let s = format!("{}", s.trim_right_matches('0')); - if s.ends_with('.') {format!("{}0", s)} else {s} - })), - Toml::Boolean(b) => doit("bool", Json::String(format!("{}", b))), - Toml::Datetime(s) => doit("datetime", Json::String(s.to_string())), - Toml::Array(arr) => { - let is_table = match arr.first() { - Some(&Toml::Table(..)) => true, - _ => false, - }; - let json = Json::Array(arr.into_iter().map(to_json).collect()); - if is_table {json} else {doit("array", json)} - } - Toml::Table(table) => { - let mut map = serde_json::Map::new(); - for (k, v) in table { - map.insert(k, to_json(v)); - } - Json::Object(map) - } - } -} - -fn run_pretty(toml: Toml) { - // Assert toml == json - println!("### pretty round trip parse."); - - // standard pretty - let toml_raw = to_string_pretty(&toml).expect("to string"); - let toml2 = toml_raw.parse().expect("from string"); - assert_eq!(toml, toml2); - - // pretty with indent 2 - let mut result = String::with_capacity(128); - { - let mut serializer = toml::Serializer::pretty(&mut result); - serializer.pretty_array_indent(2); - toml.serialize(&mut serializer).expect("to string"); - } - assert_eq!(toml, result.parse().expect("from str")); - result.clear(); - { - let mut serializer = toml::Serializer::new(&mut result); - serializer.pretty_array_trailing_comma(false); - toml.serialize(&mut serializer).expect("to string"); - } - assert_eq!(toml, result.parse().expect("from str")); - result.clear(); - { - let mut serializer = toml::Serializer::pretty(&mut result); - serializer.pretty_string(false); - toml.serialize(&mut serializer).expect("to string"); - assert_eq!(toml, toml2); - } - assert_eq!(toml, result.parse().expect("from str")); - result.clear(); - { - let mut serializer = toml::Serializer::pretty(&mut result); - serializer.pretty_array(false); - toml.serialize(&mut serializer).expect("to string"); - assert_eq!(toml, toml2); - } - assert_eq!(toml, result.parse().expect("from str")); -} - -fn run(toml_raw: &str, json_raw: &str) { - println!("parsing:\n{}", toml_raw); - let toml: Toml = toml_raw.parse().unwrap(); - let json: Json = json_raw.parse().unwrap(); - - // Assert toml == json - let toml_json = to_json(toml.clone()); - assert!(json == toml_json, - "expected\n{}\ngot\n{}\n", - serde_json::to_string_pretty(&json).unwrap(), - serde_json::to_string_pretty(&toml_json).unwrap()); - - // Assert round trip - println!("round trip parse: {}", toml); - let toml2 = toml.to_string().parse().unwrap(); - assert_eq!(toml, toml2); - run_pretty(toml); -} - -macro_rules! test( ($name:ident, $toml:expr, $json:expr) => ( - #[test] - fn $name() { run($toml, $json); } -) ); - -test!(array_empty, - include_str!("valid/array-empty.toml"), - include_str!("valid/array-empty.json")); -test!(array_nospaces, - include_str!("valid/array-nospaces.toml"), - include_str!("valid/array-nospaces.json")); -test!(arrays_hetergeneous, - include_str!("valid/arrays-hetergeneous.toml"), - include_str!("valid/arrays-hetergeneous.json")); -test!(arrays, - include_str!("valid/arrays.toml"), - include_str!("valid/arrays.json")); -test!(arrays_nested, - include_str!("valid/arrays-nested.toml"), - include_str!("valid/arrays-nested.json")); -test!(empty, - include_str!("valid/empty.toml"), - include_str!("valid/empty.json")); -test!(bool, - include_str!("valid/bool.toml"), - include_str!("valid/bool.json")); -test!(datetime, - include_str!("valid/datetime.toml"), - include_str!("valid/datetime.json")); -test!(example, - include_str!("valid/example.toml"), - include_str!("valid/example.json")); -test!(float, - include_str!("valid/float.toml"), - include_str!("valid/float.json")); -test!(implicit_and_explicit_after, - include_str!("valid/implicit-and-explicit-after.toml"), - include_str!("valid/implicit-and-explicit-after.json")); -test!(implicit_and_explicit_before, - include_str!("valid/implicit-and-explicit-before.toml"), - include_str!("valid/implicit-and-explicit-before.json")); -test!(implicit_groups, - include_str!("valid/implicit-groups.toml"), - include_str!("valid/implicit-groups.json")); -test!(integer, - include_str!("valid/integer.toml"), - include_str!("valid/integer.json")); -test!(key_equals_nospace, - include_str!("valid/key-equals-nospace.toml"), - include_str!("valid/key-equals-nospace.json")); -test!(key_space, - include_str!("valid/key-space.toml"), - include_str!("valid/key-space.json")); -test!(key_special_chars, - include_str!("valid/key-special-chars.toml"), - include_str!("valid/key-special-chars.json")); -test!(key_with_pound, - include_str!("valid/key-with-pound.toml"), - include_str!("valid/key-with-pound.json")); -test!(long_float, - include_str!("valid/long-float.toml"), - include_str!("valid/long-float.json")); -test!(long_integer, - include_str!("valid/long-integer.toml"), - include_str!("valid/long-integer.json")); -test!(multiline_string, - include_str!("valid/multiline-string.toml"), - include_str!("valid/multiline-string.json")); -test!(raw_multiline_string, - include_str!("valid/raw-multiline-string.toml"), - include_str!("valid/raw-multiline-string.json")); -test!(raw_string, - include_str!("valid/raw-string.toml"), - include_str!("valid/raw-string.json")); -test!(string_empty, - include_str!("valid/string-empty.toml"), - include_str!("valid/string-empty.json")); -test!(string_escapes, - include_str!("valid/string-escapes.toml"), - include_str!("valid/string-escapes.json")); -test!(string_simple, - include_str!("valid/string-simple.toml"), - include_str!("valid/string-simple.json")); -test!(string_with_pound, - include_str!("valid/string-with-pound.toml"), - include_str!("valid/string-with-pound.json")); -test!(table_array_implicit, - include_str!("valid/table-array-implicit.toml"), - include_str!("valid/table-array-implicit.json")); -test!(table_array_many, - include_str!("valid/table-array-many.toml"), - include_str!("valid/table-array-many.json")); -test!(table_array_nest, - include_str!("valid/table-array-nest.toml"), - include_str!("valid/table-array-nest.json")); -test!(table_array_one, - include_str!("valid/table-array-one.toml"), - include_str!("valid/table-array-one.json")); -test!(table_empty, - include_str!("valid/table-empty.toml"), - include_str!("valid/table-empty.json")); -test!(table_sub_empty, - include_str!("valid/table-sub-empty.toml"), - include_str!("valid/table-sub-empty.json")); -test!(table_multi_empty, - include_str!("valid/table-multi-empty.toml"), - include_str!("valid/table-multi-empty.json")); -test!(table_whitespace, - include_str!("valid/table-whitespace.toml"), - include_str!("valid/table-whitespace.json")); -test!(table_with_pound, - include_str!("valid/table-with-pound.toml"), - include_str!("valid/table-with-pound.json")); -test!(unicode_escape, - include_str!("valid/unicode-escape.toml"), - include_str!("valid/unicode-escape.json")); -test!(unicode_literal, - include_str!("valid/unicode-literal.toml"), - include_str!("valid/unicode-literal.json")); -test!(hard_example, - include_str!("valid/hard_example.toml"), - include_str!("valid/hard_example.json")); -test!(example2, - include_str!("valid/example2.toml"), - include_str!("valid/example2.json")); -test!(example3, - include_str!("valid/example-v0.3.0.toml"), - include_str!("valid/example-v0.3.0.json")); -test!(example4, - include_str!("valid/example-v0.4.0.toml"), - include_str!("valid/example-v0.4.0.json")); -test!(example_bom, - include_str!("valid/example-bom.toml"), - include_str!("valid/example.json")); - -test!(datetime_truncate, - include_str!("valid/datetime-truncate.toml"), - include_str!("valid/datetime-truncate.json")); -test!(key_quote_newline, - include_str!("valid/key-quote-newline.toml"), - include_str!("valid/key-quote-newline.json")); -test!(table_array_nest_no_keys, - include_str!("valid/table-array-nest-no-keys.toml"), - include_str!("valid/table-array-nest-no-keys.json")); diff --git a/tests/valid/array-empty.json b/tests/valid/array-empty.json deleted file mode 100644 index 2fbf256..0000000 --- a/tests/valid/array-empty.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "thevoid": { "type": "array", "value": [ - {"type": "array", "value": [ - {"type": "array", "value": [ - {"type": "array", "value": [ - {"type": "array", "value": []} - ]} - ]} - ]} - ]} -} diff --git a/tests/valid/array-empty.toml b/tests/valid/array-empty.toml deleted file mode 100644 index fa58dc6..0000000 --- a/tests/valid/array-empty.toml +++ /dev/null @@ -1 +0,0 @@ -thevoid = [[[[[]]]]] diff --git a/tests/valid/array-nospaces.json b/tests/valid/array-nospaces.json deleted file mode 100644 index 1833d61..0000000 --- a/tests/valid/array-nospaces.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "ints": { - "type": "array", - "value": [ - {"type": "integer", "value": "1"}, - {"type": "integer", "value": "2"}, - {"type": "integer", "value": "3"} - ] - } -} diff --git a/tests/valid/array-nospaces.toml b/tests/valid/array-nospaces.toml deleted file mode 100644 index 6618936..0000000 --- a/tests/valid/array-nospaces.toml +++ /dev/null @@ -1 +0,0 @@ -ints = [1,2,3] diff --git a/tests/valid/arrays-hetergeneous.json b/tests/valid/arrays-hetergeneous.json deleted file mode 100644 index 478fa5c..0000000 --- a/tests/valid/arrays-hetergeneous.json +++ /dev/null @@ -1,19 +0,0 @@ -{ - "mixed": { - "type": "array", - "value": [ - {"type": "array", "value": [ - {"type": "integer", "value": "1"}, - {"type": "integer", "value": "2"} - ]}, - {"type": "array", "value": [ - {"type": "string", "value": "a"}, - {"type": "string", "value": "b"} - ]}, - {"type": "array", "value": [ - {"type": "float", "value": "1.1"}, - {"type": "float", "value": "2.1"} - ]} - ] - } -} diff --git a/tests/valid/arrays-hetergeneous.toml b/tests/valid/arrays-hetergeneous.toml deleted file mode 100644 index a246fcf..0000000 --- a/tests/valid/arrays-hetergeneous.toml +++ /dev/null @@ -1 +0,0 @@ -mixed = [[1, 2], ["a", "b"], [1.1, 2.1]] diff --git a/tests/valid/arrays-nested.json b/tests/valid/arrays-nested.json deleted file mode 100644 index d21920c..0000000 --- a/tests/valid/arrays-nested.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "nest": { - "type": "array", - "value": [ - {"type": "array", "value": [ - {"type": "string", "value": "a"} - ]}, - {"type": "array", "value": [ - {"type": "string", "value": "b"} - ]} - ] - } -} diff --git a/tests/valid/arrays-nested.toml b/tests/valid/arrays-nested.toml deleted file mode 100644 index ce33022..0000000 --- a/tests/valid/arrays-nested.toml +++ /dev/null @@ -1 +0,0 @@ -nest = [["a"], ["b"]] diff --git a/tests/valid/arrays.json b/tests/valid/arrays.json deleted file mode 100644 index 58aedbc..0000000 --- a/tests/valid/arrays.json +++ /dev/null @@ -1,34 +0,0 @@ -{ - "ints": { - "type": "array", - "value": [ - {"type": "integer", "value": "1"}, - {"type": "integer", "value": "2"}, - {"type": "integer", "value": "3"} - ] - }, - "floats": { - "type": "array", - "value": [ - {"type": "float", "value": "1.1"}, - {"type": "float", "value": "2.1"}, - {"type": "float", "value": "3.1"} - ] - }, - "strings": { - "type": "array", - "value": [ - {"type": "string", "value": "a"}, - {"type": "string", "value": "b"}, - {"type": "string", "value": "c"} - ] - }, - "dates": { - "type": "array", - "value": [ - {"type": "datetime", "value": "1987-07-05T17:45:00Z"}, - {"type": "datetime", "value": "1979-05-27T07:32:00Z"}, - {"type": "datetime", "value": "2006-06-01T11:00:00Z"} - ] - } -} diff --git a/tests/valid/arrays.toml b/tests/valid/arrays.toml deleted file mode 100644 index c435f57..0000000 --- a/tests/valid/arrays.toml +++ /dev/null @@ -1,8 +0,0 @@ -ints = [1, 2, 3] -floats = [1.1, 2.1, 3.1] -strings = ["a", "b", "c"] -dates = [ - 1987-07-05T17:45:00Z, - 1979-05-27T07:32:00Z, - 2006-06-01T11:00:00Z, -] diff --git a/tests/valid/bool.json b/tests/valid/bool.json deleted file mode 100644 index ae368e9..0000000 --- a/tests/valid/bool.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "f": {"type": "bool", "value": "false"}, - "t": {"type": "bool", "value": "true"} -} diff --git a/tests/valid/bool.toml b/tests/valid/bool.toml deleted file mode 100644 index a8a829b..0000000 --- a/tests/valid/bool.toml +++ /dev/null @@ -1,2 +0,0 @@ -t = true -f = false diff --git a/tests/valid/comments-everywhere.json b/tests/valid/comments-everywhere.json deleted file mode 100644 index e69a2e9..0000000 --- a/tests/valid/comments-everywhere.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "group": { - "answer": {"type": "integer", "value": "42"}, - "more": { - "type": "array", - "value": [ - {"type": "integer", "value": "42"}, - {"type": "integer", "value": "42"} - ] - } - } -} diff --git a/tests/valid/comments-everywhere.toml b/tests/valid/comments-everywhere.toml deleted file mode 100644 index 3dca74c..0000000 --- a/tests/valid/comments-everywhere.toml +++ /dev/null @@ -1,24 +0,0 @@ -# Top comment. - # Top comment. -# Top comment. - -# [no-extraneous-groups-please] - -[group] # Comment -answer = 42 # Comment -# no-extraneous-keys-please = 999 -# Inbetween comment. -more = [ # Comment - # What about multiple # comments? - # Can you handle it? - # - # Evil. -# Evil. - 42, 42, # Comments within arrays are fun. - # What about multiple # comments? - # Can you handle it? - # - # Evil. -# Evil. -# ] Did I fool you? -] # Hopefully not. diff --git a/tests/valid/datetime-truncate.json b/tests/valid/datetime-truncate.json deleted file mode 100644 index 8c512e1..0000000 --- a/tests/valid/datetime-truncate.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "bestdayever": { - "type": "datetime", - "value": "1987-07-05T17:45:00.123456789Z" - } -} diff --git a/tests/valid/datetime-truncate.toml b/tests/valid/datetime-truncate.toml deleted file mode 100644 index 05de841..0000000 --- a/tests/valid/datetime-truncate.toml +++ /dev/null @@ -1 +0,0 @@ -bestdayever = 1987-07-05T17:45:00.123456789012345Z diff --git a/tests/valid/datetime.json b/tests/valid/datetime.json deleted file mode 100644 index 2ca93ce..0000000 --- a/tests/valid/datetime.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "bestdayever": {"type": "datetime", "value": "1987-07-05T17:45:00Z"} -} diff --git a/tests/valid/datetime.toml b/tests/valid/datetime.toml deleted file mode 100644 index 2e99340..0000000 --- a/tests/valid/datetime.toml +++ /dev/null @@ -1 +0,0 @@ -bestdayever = 1987-07-05T17:45:00Z diff --git a/tests/valid/empty.json b/tests/valid/empty.json deleted file mode 100644 index 0967ef4..0000000 --- a/tests/valid/empty.json +++ /dev/null @@ -1 +0,0 @@ -{} diff --git a/tests/valid/empty.toml b/tests/valid/empty.toml deleted file mode 100644 index e69de29..0000000 diff --git a/tests/valid/example-bom.toml b/tests/valid/example-bom.toml deleted file mode 100644 index fb5ac81..0000000 --- a/tests/valid/example-bom.toml +++ /dev/null @@ -1,5 +0,0 @@ -best-day-ever = 1987-07-05T17:45:00Z - -[numtheory] -boring = false -perfection = [6, 28, 496] diff --git a/tests/valid/example-v0.3.0.json b/tests/valid/example-v0.3.0.json deleted file mode 100644 index 1d9dcb5..0000000 --- a/tests/valid/example-v0.3.0.json +++ /dev/null @@ -1 +0,0 @@ -{"Array":{"key1":{"type":"array","value":[{"type":"integer","value":"1"},{"type":"integer","value":"2"},{"type":"integer","value":"3"}]},"key2":{"type":"array","value":[{"type":"string","value":"red"},{"type":"string","value":"yellow"},{"type":"string","value":"green"}]},"key3":{"type":"array","value":[{"type":"array","value":[{"type":"integer","value":"1"},{"type":"integer","value":"2"}]},{"type":"array","value":[{"type":"integer","value":"3"},{"type":"integer","value":"4"},{"type":"integer","value":"5"}]}]},"key4":{"type":"array","value":[{"type":"array","value":[{"type":"integer","value":"1"},{"type":"integer","value":"2"}]},{"type":"array","value":[{"type":"string","value":"a"},{"type":"string","value":"b"},{"type":"string","value":"c"}]}]},"key5":{"type":"array","value":[{"type":"integer","value":"1"},{"type":"integer","value":"2"},{"type":"integer","value":"3"}]},"key6":{"type":"array","value":[{"type":"integer","value":"1"},{"type":"integer","value":"2"}]}},"Booleans":{"False":{"type":"bool","value":"false"},"True":{"type":"bool","value":"true"}},"Datetime":{"key1":{"type":"datetime","value":"1979-05-27T07:32:00Z"}},"Float":{"both":{},"exponent":{},"fractional":{"key1":{"type":"float","value":"1.0"},"key2":{"type":"float","value":"3.1415"},"key3":{"type":"float","value":"-0.01"}}},"Integer":{"key1":{"type":"integer","value":"99"},"key2":{"type":"integer","value":"42"},"key3":{"type":"integer","value":"0"},"key4":{"type":"integer","value":"-17"}},"String":{"Literal":{"Multiline":{"lines":{"type":"string","value":"The first newline is\ntrimmed in raw strings.\n All other whitespace\n is preserved.\n"},"regex2":{"type":"string","value":"I [dw]on't need \\d{2} apples"}},"quoted":{"type":"string","value":"Tom \"Dubs\" Preston-Werner"},"regex":{"type":"string","value":"\u003c\\i\\c*\\s*\u003e"},"winpath":{"type":"string","value":"C:\\Users\\nodejs\\templates"},"winpath2":{"type":"string","value":"\\\\ServerX\\admin$\\system32\\"}},"Multiline":{"key1":{"type":"string","value":"One\nTwo"},"key2":{"type":"string","value":"One\nTwo"},"key3":{"type":"string","value":"One\nTwo"}},"Multilined":{"Singleline":{"key1":{"type":"string","value":"The quick brown fox jumps over the lazy dog."},"key2":{"type":"string","value":"The quick brown fox jumps over the lazy dog."},"key3":{"type":"string","value":"The quick brown fox jumps over the lazy dog."}}},"basic":{"type":"string","value":"I'm a string. \"You can quote me\". Name\u0009José\nLocation\u0009SF."}},"Table":{"key":{"type":"string","value":"value"}},"dog":{"tater":{"type":{"type":"string","value":"pug"}}},"fruit":[{"name":{"type":"string","value":"apple"},"physical":{"color":{"type":"string","value":"red"},"shape":{"type":"string","value":"round"}},"variety":[{"name":{"type":"string","value":"red delicious"}},{"name":{"type":"string","value":"granny smith"}}]},{"name":{"type":"string","value":"banana"},"variety":[{"name":{"type":"string","value":"plantain"}}]}],"products":[{"name":{"type":"string","value":"Hammer"},"sku":{"type":"integer","value":"738594937"}},{},{"color":{"type":"string","value":"gray"},"name":{"type":"string","value":"Nail"},"sku":{"type":"integer","value":"284758393"}}],"x":{"y":{"z":{"w":{}}}}} diff --git a/tests/valid/example-v0.3.0.toml b/tests/valid/example-v0.3.0.toml deleted file mode 100644 index 76aacc3..0000000 --- a/tests/valid/example-v0.3.0.toml +++ /dev/null @@ -1,182 +0,0 @@ -# Comment -# I am a comment. Hear me roar. Roar. - -# Table -# Tables (also known as hash tables or dictionaries) are collections of key/value pairs. -# They appear in square brackets on a line by themselves. - -[Table] - -key = "value" # Yeah, you can do this. - -# Nested tables are denoted by table names with dots in them. Name your tables whatever crap you please, just don't use #, ., [ or ]. - -[dog.tater] -type = "pug" - -# You don't need to specify all the super-tables if you don't want to. TOML knows how to do it for you. - -# [x] you -# [x.y] don't -# [x.y.z] need these -[x.y.z.w] # for this to work - -# String -# There are four ways to express strings: basic, multi-line basic, literal, and multi-line literal. -# All strings must contain only valid UTF-8 characters. - -[String] -basic = "I'm a string. \"You can quote me\". Name\tJos\u00E9\nLocation\tSF." - -[String.Multiline] - -# The following strings are byte-for-byte equivalent: -key1 = "One\nTwo" -key2 = """One\nTwo""" -key3 = """ -One -Two""" - -[String.Multilined.Singleline] - -# The following strings are byte-for-byte equivalent: -key1 = "The quick brown fox jumps over the lazy dog." - -key2 = """ -The quick brown \ - - - fox jumps over \ - the lazy dog.""" - -key3 = """\ - The quick brown \ - fox jumps over \ - the lazy dog.\ - """ - -[String.Literal] - -# What you see is what you get. -winpath = 'C:\Users\nodejs\templates' -winpath2 = '\\ServerX\admin$\system32\' -quoted = 'Tom "Dubs" Preston-Werner' -regex = '<\i\c*\s*>' - - -[String.Literal.Multiline] - -regex2 = '''I [dw]on't need \d{2} apples''' -lines = ''' -The first newline is -trimmed in raw strings. - All other whitespace - is preserved. -''' - -# Integer -# Integers are whole numbers. Positive numbers may be prefixed with a plus sign. -# Negative numbers are prefixed with a minus sign. - -[Integer] -key1 = +99 -key2 = 42 -key3 = 0 -key4 = -17 - -# Float -# A float consists of an integer part (which may be prefixed with a plus or minus sign) -# followed by a fractional part and/or an exponent part. - -[Float.fractional] - -# fractional -key1 = +1.0 -key2 = 3.1415 -key3 = -0.01 - -[Float.exponent] - -# exponent -#key1 = 5e+22 -#key2 = 1e6 -#key3 = -2E-2 - -[Float.both] - -# both -#key = 6.626e-34 - -# Boolean -# Booleans are just the tokens you're used to. Always lowercase. - -[Booleans] -True = true -False = false - -# Datetime -# Datetimes are RFC 3339 dates. - -[Datetime] -key1 = 1979-05-27T07:32:00Z -#key2 = 1979-05-27T00:32:00-07:00 -#key3 = 1979-05-27T00:32:00.999999-07:00 - -# Array -# Arrays are square brackets with other primitives inside. Whitespace is ignored. Elements are separated by commas. Data types may not be mixed. - -[Array] -key1 = [ 1, 2, 3 ] -key2 = [ "red", "yellow", "green" ] -key3 = [ [ 1, 2 ], [3, 4, 5] ] -key4 = [ [ 1, 2 ], ["a", "b", "c"] ] # this is ok - -#Arrays can also be multiline. So in addition to ignoring whitespace, arrays also ignore newlines between the brackets. -# Terminating commas are ok before the closing bracket. - -key5 = [ - 1, 2, 3 -] -key6 = [ - 1, - 2, # this is ok -] - -# Array of Tables -# These can be expressed by using a table name in double brackets. -# Each table with the same double bracketed name will be an element in the array. -# The tables are inserted in the order encountered. - -[[products]] -name = "Hammer" -sku = 738594937 - -[[products]] - -[[products]] -name = "Nail" -sku = 284758393 -color = "gray" - - -# You can create nested arrays of tables as well. - -[[fruit]] - name = "apple" - - [fruit.physical] - color = "red" - shape = "round" - - [[fruit.variety]] - name = "red delicious" - - [[fruit.variety]] - name = "granny smith" - -[[fruit]] - name = "banana" - - [[fruit.variety]] - name = "plantain" - diff --git a/tests/valid/example-v0.4.0.json b/tests/valid/example-v0.4.0.json deleted file mode 100644 index d5cac34..0000000 --- a/tests/valid/example-v0.4.0.json +++ /dev/null @@ -1 +0,0 @@ -{"array":{"key1":{"type":"array","value":[{"type":"integer","value":"1"},{"type":"integer","value":"2"},{"type":"integer","value":"3"}]},"key2":{"type":"array","value":[{"type":"string","value":"red"},{"type":"string","value":"yellow"},{"type":"string","value":"green"}]},"key3":{"type":"array","value":[{"type":"array","value":[{"type":"integer","value":"1"},{"type":"integer","value":"2"}]},{"type":"array","value":[{"type":"integer","value":"3"},{"type":"integer","value":"4"},{"type":"integer","value":"5"}]}]},"key4":{"type":"array","value":[{"type":"array","value":[{"type":"integer","value":"1"},{"type":"integer","value":"2"}]},{"type":"array","value":[{"type":"string","value":"a"},{"type":"string","value":"b"},{"type":"string","value":"c"}]}]},"key5":{"type":"array","value":[{"type":"integer","value":"1"},{"type":"integer","value":"2"},{"type":"integer","value":"3"}]},"key6":{"type":"array","value":[{"type":"integer","value":"1"},{"type":"integer","value":"2"}]}},"boolean":{"False":{"type":"bool","value":"false"},"True":{"type":"bool","value":"true"}},"datetime":{},"float":{"both":{},"exponent":{},"fractional":{"key1":{"type":"float","value":"1.0"},"key2":{"type":"float","value":"3.1415"},"key3":{"type":"float","value":"-0.01"}},"underscores":{}},"fruit":[{"name":{"type":"string","value":"apple"},"physical":{"color":{"type":"string","value":"red"},"shape":{"type":"string","value":"round"}},"variety":[{"name":{"type":"string","value":"red delicious"}},{"name":{"type":"string","value":"granny smith"}}]},{"name":{"type":"string","value":"banana"},"variety":[{"name":{"type":"string","value":"plantain"}}]}],"integer":{"key1":{"type":"integer","value":"99"},"key2":{"type":"integer","value":"42"},"key3":{"type":"integer","value":"0"},"key4":{"type":"integer","value":"-17"},"underscores":{"key1":{"type":"integer","value":"1000"},"key2":{"type":"integer","value":"5349221"},"key3":{"type":"integer","value":"12345"}}},"products":[{"name":{"type":"string","value":"Hammer"},"sku":{"type":"integer","value":"738594937"}},{},{"color":{"type":"string","value":"gray"},"name":{"type":"string","value":"Nail"},"sku":{"type":"integer","value":"284758393"}}],"string":{"basic":{"basic":{"type":"string","value":"I'm a string. \"You can quote me\". Name\u0009José\nLocation\u0009SF."}},"literal":{"multiline":{"lines":{"type":"string","value":"The first newline is\ntrimmed in raw strings.\n All other whitespace\n is preserved.\n"},"regex2":{"type":"string","value":"I [dw]on't need \\d{2} apples"}},"quoted":{"type":"string","value":"Tom \"Dubs\" Preston-Werner"},"regex":{"type":"string","value":"\u003c\\i\\c*\\s*\u003e"},"winpath":{"type":"string","value":"C:\\Users\\nodejs\\templates"},"winpath2":{"type":"string","value":"\\\\ServerX\\admin$\\system32\\"}},"multiline":{"continued":{"key1":{"type":"string","value":"The quick brown fox jumps over the lazy dog."},"key2":{"type":"string","value":"The quick brown fox jumps over the lazy dog."},"key3":{"type":"string","value":"The quick brown fox jumps over the lazy dog."}},"key1":{"type":"string","value":"One\nTwo"},"key2":{"type":"string","value":"One\nTwo"},"key3":{"type":"string","value":"One\nTwo"}}},"table":{"inline":{"name":{"first":{"type":"string","value":"Tom"},"last":{"type":"string","value":"Preston-Werner"}},"point":{"x":{"type":"integer","value":"1"},"y":{"type":"integer","value":"2"}}},"key":{"type":"string","value":"value"},"subtable":{"key":{"type":"string","value":"another value"}}},"x":{"y":{"z":{"w":{}}}}} diff --git a/tests/valid/example-v0.4.0.toml b/tests/valid/example-v0.4.0.toml deleted file mode 100644 index ffbcce0..0000000 --- a/tests/valid/example-v0.4.0.toml +++ /dev/null @@ -1,235 +0,0 @@ -################################################################################ -## Comment - -# Speak your mind with the hash symbol. They go from the symbol to the end of -# the line. - - -################################################################################ -## Table - -# Tables (also known as hash tables or dictionaries) are collections of -# key/value pairs. They appear in square brackets on a line by themselves. - -[table] - -key = "value" # Yeah, you can do this. - -# Nested tables are denoted by table names with dots in them. Name your tables -# whatever crap you please, just don't use #, ., [ or ]. - -[table.subtable] - -key = "another value" - -# You don't need to specify all the super-tables if you don't want to. TOML -# knows how to do it for you. - -# [x] you -# [x.y] don't -# [x.y.z] need these -[x.y.z.w] # for this to work - - -################################################################################ -## Inline Table - -# Inline tables provide a more compact syntax for expressing tables. They are -# especially useful for grouped data that can otherwise quickly become verbose. -# Inline tables are enclosed in curly braces `{` and `}`. No newlines are -# allowed between the curly braces unless they are valid within a value. - -[table.inline] - -name = { first = "Tom", last = "Preston-Werner" } -point = { x = 1, y = 2 } - - -################################################################################ -## String - -# There are four ways to express strings: basic, multi-line basic, literal, and -# multi-line literal. All strings must contain only valid UTF-8 characters. - -[string.basic] - -basic = "I'm a string. \"You can quote me\". Name\tJos\u00E9\nLocation\tSF." - -[string.multiline] - -# The following strings are byte-for-byte equivalent: -key1 = "One\nTwo" -key2 = """One\nTwo""" -key3 = """ -One -Two""" - -[string.multiline.continued] - -# The following strings are byte-for-byte equivalent: -key1 = "The quick brown fox jumps over the lazy dog." - -key2 = """ -The quick brown \ - - - fox jumps over \ - the lazy dog.""" - -key3 = """\ - The quick brown \ - fox jumps over \ - the lazy dog.\ - """ - -[string.literal] - -# What you see is what you get. -winpath = 'C:\Users\nodejs\templates' -winpath2 = '\\ServerX\admin$\system32\' -quoted = 'Tom "Dubs" Preston-Werner' -regex = '<\i\c*\s*>' - - -[string.literal.multiline] - -regex2 = '''I [dw]on't need \d{2} apples''' -lines = ''' -The first newline is -trimmed in raw strings. - All other whitespace - is preserved. -''' - - -################################################################################ -## Integer - -# Integers are whole numbers. Positive numbers may be prefixed with a plus sign. -# Negative numbers are prefixed with a minus sign. - -[integer] - -key1 = +99 -key2 = 42 -key3 = 0 -key4 = -17 - -[integer.underscores] - -# For large numbers, you may use underscores to enhance readability. Each -# underscore must be surrounded by at least one digit. -key1 = 1_000 -key2 = 5_349_221 -key3 = 1_2_3_4_5 # valid but inadvisable - - -################################################################################ -## Float - -# A float consists of an integer part (which may be prefixed with a plus or -# minus sign) followed by a fractional part and/or an exponent part. - -[float.fractional] - -key1 = +1.0 -key2 = 3.1415 -key3 = -0.01 - -[float.exponent] - -[float.both] - -[float.underscores] - - -################################################################################ -## Boolean - -# Booleans are just the tokens you're used to. Always lowercase. - -[boolean] - -True = true -False = false - - -################################################################################ -## Datetime - -# Datetimes are RFC 3339 dates. - -[datetime] - -#key1 = 1979-05-27T07:32:00Z -#key2 = 1979-05-27T00:32:00-07:00 -#key3 = 1979-05-27T00:32:00.999999-07:00 - - -################################################################################ -## Array - -# Arrays are square brackets with other primitives inside. Whitespace is -# ignored. Elements are separated by commas. Data types may not be mixed. - -[array] - -key1 = [ 1, 2, 3 ] -key2 = [ "red", "yellow", "green" ] -key3 = [ [ 1, 2 ], [3, 4, 5] ] -key4 = [ [ 1, 2 ], ["a", "b", "c"] ] # this is ok - -# Arrays can also be multiline. So in addition to ignoring whitespace, arrays -# also ignore newlines between the brackets. Terminating commas are ok before -# the closing bracket. - -key5 = [ - 1, 2, 3 -] -key6 = [ - 1, - 2, # this is ok -] - - -################################################################################ -## Array of Tables - -# These can be expressed by using a table name in double brackets. Each table -# with the same double bracketed name will be an element in the array. The -# tables are inserted in the order encountered. - -[[products]] - -name = "Hammer" -sku = 738594937 - -[[products]] - -[[products]] - -name = "Nail" -sku = 284758393 -color = "gray" - - -# You can create nested arrays of tables as well. - -[[fruit]] - name = "apple" - - [fruit.physical] - color = "red" - shape = "round" - - [[fruit.variety]] - name = "red delicious" - - [[fruit.variety]] - name = "granny smith" - -[[fruit]] - name = "banana" - - [[fruit.variety]] - name = "plantain" diff --git a/tests/valid/example.json b/tests/valid/example.json deleted file mode 100644 index 48aa907..0000000 --- a/tests/valid/example.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "best-day-ever": {"type": "datetime", "value": "1987-07-05T17:45:00Z"}, - "numtheory": { - "boring": {"type": "bool", "value": "false"}, - "perfection": { - "type": "array", - "value": [ - {"type": "integer", "value": "6"}, - {"type": "integer", "value": "28"}, - {"type": "integer", "value": "496"} - ] - } - } -} diff --git a/tests/valid/example.toml b/tests/valid/example.toml deleted file mode 100644 index 8cb02e0..0000000 --- a/tests/valid/example.toml +++ /dev/null @@ -1,5 +0,0 @@ -best-day-ever = 1987-07-05T17:45:00Z - -[numtheory] -boring = false -perfection = [6, 28, 496] diff --git a/tests/valid/example2.json b/tests/valid/example2.json deleted file mode 100644 index 3249a97..0000000 --- a/tests/valid/example2.json +++ /dev/null @@ -1 +0,0 @@ -{"clients":{"data":{"type":"array","value":[{"type":"array","value":[{"type":"string","value":"gamma"},{"type":"string","value":"delta"}]},{"type":"array","value":[{"type":"integer","value":"1"},{"type":"integer","value":"2"}]}]},"hosts":{"type":"array","value":[{"type":"string","value":"alpha"},{"type":"string","value":"omega"}]}},"database":{"connection_max":{"type":"integer","value":"5000"},"enabled":{"type":"bool","value":"true"},"ports":{"type":"array","value":[{"type":"integer","value":"8001"},{"type":"integer","value":"8001"},{"type":"integer","value":"8002"}]},"server":{"type":"string","value":"192.168.1.1"}},"owner":{"bio":{"type":"string","value":"GitHub Cofounder \u0026 CEO\nLikes tater tots and beer."},"dob":{"type":"datetime","value":"1979-05-27T07:32:00Z"},"name":{"type":"string","value":"Tom Preston-Werner"},"organization":{"type":"string","value":"GitHub"}},"products":[{"name":{"type":"string","value":"Hammer"},"sku":{"type":"integer","value":"738594937"}},{"color":{"type":"string","value":"gray"},"name":{"type":"string","value":"Nail"},"sku":{"type":"integer","value":"284758393"}}],"servers":{"alpha":{"dc":{"type":"string","value":"eqdc10"},"ip":{"type":"string","value":"10.0.0.1"}},"beta":{"country":{"type":"string","value":"中国"},"dc":{"type":"string","value":"eqdc10"},"ip":{"type":"string","value":"10.0.0.2"}}},"title":{"type":"string","value":"TOML Example"}} diff --git a/tests/valid/example2.toml b/tests/valid/example2.toml deleted file mode 100644 index bc12c99..0000000 --- a/tests/valid/example2.toml +++ /dev/null @@ -1,47 +0,0 @@ -# This is a TOML document. Boom. - -title = "TOML Example" - -[owner] -name = "Tom Preston-Werner" -organization = "GitHub" -bio = "GitHub Cofounder & CEO\nLikes tater tots and beer." -dob = 1979-05-27T07:32:00Z # First class dates? Why not? - -[database] -server = "192.168.1.1" -ports = [ 8001, 8001, 8002 ] -connection_max = 5000 -enabled = true - -[servers] - - # You can indent as you please. Tabs or spaces. TOML don't care. - [servers.alpha] - ip = "10.0.0.1" - dc = "eqdc10" - - [servers.beta] - ip = "10.0.0.2" - dc = "eqdc10" - country = "中国" # This should be parsed as UTF-8 - -[clients] -data = [ ["gamma", "delta"], [1, 2] ] # just an update to make sure parsers support it - -# Line breaks are OK when inside arrays -hosts = [ - "alpha", - "omega" -] - -# Products - - [[products]] - name = "Hammer" - sku = 738594937 - - [[products]] - name = "Nail" - sku = 284758393 - color = "gray" diff --git a/tests/valid/float.json b/tests/valid/float.json deleted file mode 100644 index b8a2e97..0000000 --- a/tests/valid/float.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "pi": {"type": "float", "value": "3.14"}, - "negpi": {"type": "float", "value": "-3.14"} -} diff --git a/tests/valid/float.toml b/tests/valid/float.toml deleted file mode 100644 index 7c528d2..0000000 --- a/tests/valid/float.toml +++ /dev/null @@ -1,2 +0,0 @@ -pi = 3.14 -negpi = -3.14 diff --git a/tests/valid/hard_example.json b/tests/valid/hard_example.json deleted file mode 100644 index 9762e58..0000000 --- a/tests/valid/hard_example.json +++ /dev/null @@ -1 +0,0 @@ -{"the":{"hard":{"another_test_string":{"type":"string","value":" Same thing, but with a string #"},"bit#":{"multi_line_array":{"type":"array","value":[{"type":"string","value":"]"}]},"what?":{"type":"string","value":"You don't think some user won't do that?"}},"harder_test_string":{"type":"string","value":" And when \"'s are in the string, along with # \""},"test_array":{"type":"array","value":[{"type":"string","value":"] "},{"type":"string","value":" # "}]},"test_array2":{"type":"array","value":[{"type":"string","value":"Test #11 ]proved that"},{"type":"string","value":"Experiment #9 was a success"}]}},"test_string":{"type":"string","value":"You'll hate me after this - #"}}} diff --git a/tests/valid/hard_example.toml b/tests/valid/hard_example.toml deleted file mode 100644 index 38856c8..0000000 --- a/tests/valid/hard_example.toml +++ /dev/null @@ -1,33 +0,0 @@ -# Test file for TOML -# Only this one tries to emulate a TOML file written by a user of the kind of parser writers probably hate -# This part you'll really hate - -[the] -test_string = "You'll hate me after this - #" # " Annoying, isn't it? - - [the.hard] - test_array = [ "] ", " # "] # ] There you go, parse this! - test_array2 = [ "Test #11 ]proved that", "Experiment #9 was a success" ] - # You didn't think it'd as easy as chucking out the last #, did you? - another_test_string = " Same thing, but with a string #" - harder_test_string = " And when \"'s are in the string, along with # \"" # "and comments are there too" - # Things will get harder - - [the.hard."bit#"] - "what?" = "You don't think some user won't do that?" - multi_line_array = [ - "]", - # ] Oh yes I did - ] - -# Each of the following keygroups/key value pairs should produce an error. Uncomment to them to test - -#[error] if you didn't catch this, your parser is broken -#string = "Anything other than tabs, spaces and newline after a keygroup or key value pair has ended should produce an error unless it is a comment" like this -#array = [ -# "This might most likely happen in multiline arrays", -# Like here, -# "or here, -# and here" -# ] End of array comment, forgot the # -#number = 3.14 pi <--again forgot the # diff --git a/tests/valid/implicit-and-explicit-after.json b/tests/valid/implicit-and-explicit-after.json deleted file mode 100644 index 374bd09..0000000 --- a/tests/valid/implicit-and-explicit-after.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "a": { - "better": {"type": "integer", "value": "43"}, - "b": { - "c": { - "answer": {"type": "integer", "value": "42"} - } - } - } -} diff --git a/tests/valid/implicit-and-explicit-after.toml b/tests/valid/implicit-and-explicit-after.toml deleted file mode 100644 index c0e8865..0000000 --- a/tests/valid/implicit-and-explicit-after.toml +++ /dev/null @@ -1,5 +0,0 @@ -[a.b.c] -answer = 42 - -[a] -better = 43 diff --git a/tests/valid/implicit-and-explicit-before.json b/tests/valid/implicit-and-explicit-before.json deleted file mode 100644 index 374bd09..0000000 --- a/tests/valid/implicit-and-explicit-before.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "a": { - "better": {"type": "integer", "value": "43"}, - "b": { - "c": { - "answer": {"type": "integer", "value": "42"} - } - } - } -} diff --git a/tests/valid/implicit-and-explicit-before.toml b/tests/valid/implicit-and-explicit-before.toml deleted file mode 100644 index eee68ff..0000000 --- a/tests/valid/implicit-and-explicit-before.toml +++ /dev/null @@ -1,5 +0,0 @@ -[a] -better = 43 - -[a.b.c] -answer = 42 diff --git a/tests/valid/implicit-groups.json b/tests/valid/implicit-groups.json deleted file mode 100644 index fbae7fc..0000000 --- a/tests/valid/implicit-groups.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "a": { - "b": { - "c": { - "answer": {"type": "integer", "value": "42"} - } - } - } -} diff --git a/tests/valid/implicit-groups.toml b/tests/valid/implicit-groups.toml deleted file mode 100644 index b6333e4..0000000 --- a/tests/valid/implicit-groups.toml +++ /dev/null @@ -1,2 +0,0 @@ -[a.b.c] -answer = 42 diff --git a/tests/valid/integer.json b/tests/valid/integer.json deleted file mode 100644 index 61985a1..0000000 --- a/tests/valid/integer.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "answer": {"type": "integer", "value": "42"}, - "neganswer": {"type": "integer", "value": "-42"} -} diff --git a/tests/valid/integer.toml b/tests/valid/integer.toml deleted file mode 100644 index c4f6297..0000000 --- a/tests/valid/integer.toml +++ /dev/null @@ -1,2 +0,0 @@ -answer = 42 -neganswer = -42 diff --git a/tests/valid/key-equals-nospace.json b/tests/valid/key-equals-nospace.json deleted file mode 100644 index 1f8709a..0000000 --- a/tests/valid/key-equals-nospace.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "answer": {"type": "integer", "value": "42"} -} diff --git a/tests/valid/key-equals-nospace.toml b/tests/valid/key-equals-nospace.toml deleted file mode 100644 index 560901c..0000000 --- a/tests/valid/key-equals-nospace.toml +++ /dev/null @@ -1 +0,0 @@ -answer=42 diff --git a/tests/valid/key-quote-newline.json b/tests/valid/key-quote-newline.json deleted file mode 100644 index 12473e4..0000000 --- a/tests/valid/key-quote-newline.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "\n": {"type": "integer", "value": "1"} -} diff --git a/tests/valid/key-quote-newline.toml b/tests/valid/key-quote-newline.toml deleted file mode 100644 index a2639bf..0000000 --- a/tests/valid/key-quote-newline.toml +++ /dev/null @@ -1 +0,0 @@ -"\n" = 1 diff --git a/tests/valid/key-space.json b/tests/valid/key-space.json deleted file mode 100644 index 9d1f769..0000000 --- a/tests/valid/key-space.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "a b": {"type": "integer", "value": "1"} -} diff --git a/tests/valid/key-space.toml b/tests/valid/key-space.toml deleted file mode 100644 index f4f36c4..0000000 --- a/tests/valid/key-space.toml +++ /dev/null @@ -1 +0,0 @@ -"a b" = 1 diff --git a/tests/valid/key-special-chars.json b/tests/valid/key-special-chars.json deleted file mode 100644 index 6550ebd..0000000 --- a/tests/valid/key-special-chars.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "~!@#$^&*()_+-`1234567890[]\\|/?><.,;:'": { - "type": "integer", "value": "1" - } -} diff --git a/tests/valid/key-special-chars.toml b/tests/valid/key-special-chars.toml deleted file mode 100644 index dc43625..0000000 --- a/tests/valid/key-special-chars.toml +++ /dev/null @@ -1 +0,0 @@ -"~!@#$^&*()_+-`1234567890[]\\|/?><.,;:'" = 1 diff --git a/tests/valid/key-with-pound.json b/tests/valid/key-with-pound.json deleted file mode 100644 index ee39e1d..0000000 --- a/tests/valid/key-with-pound.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "key#name": {"type": "integer", "value": "5"} -} diff --git a/tests/valid/key-with-pound.toml b/tests/valid/key-with-pound.toml deleted file mode 100644 index 65b766f..0000000 --- a/tests/valid/key-with-pound.toml +++ /dev/null @@ -1 +0,0 @@ -"key#name" = 5 diff --git a/tests/valid/long-float.json b/tests/valid/long-float.json deleted file mode 100644 index 8ceed47..0000000 --- a/tests/valid/long-float.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "longpi": {"type": "float", "value": "3.141592653589793"}, - "neglongpi": {"type": "float", "value": "-3.141592653589793"} -} diff --git a/tests/valid/long-float.toml b/tests/valid/long-float.toml deleted file mode 100644 index 9558ae4..0000000 --- a/tests/valid/long-float.toml +++ /dev/null @@ -1,2 +0,0 @@ -longpi = 3.141592653589793 -neglongpi = -3.141592653589793 diff --git a/tests/valid/long-integer.json b/tests/valid/long-integer.json deleted file mode 100644 index 16c331e..0000000 --- a/tests/valid/long-integer.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "answer": {"type": "integer", "value": "9223372036854775807"}, - "neganswer": {"type": "integer", "value": "-9223372036854775808"} -} diff --git a/tests/valid/long-integer.toml b/tests/valid/long-integer.toml deleted file mode 100644 index 424a13a..0000000 --- a/tests/valid/long-integer.toml +++ /dev/null @@ -1,2 +0,0 @@ -answer = 9223372036854775807 -neganswer = -9223372036854775808 diff --git a/tests/valid/multiline-string.json b/tests/valid/multiline-string.json deleted file mode 100644 index 075bf50..0000000 --- a/tests/valid/multiline-string.json +++ /dev/null @@ -1,30 +0,0 @@ -{ - "multiline_empty_one": { - "type": "string", - "value": "" - }, - "multiline_empty_two": { - "type": "string", - "value": "" - }, - "multiline_empty_three": { - "type": "string", - "value": "" - }, - "multiline_empty_four": { - "type": "string", - "value": "" - }, - "equivalent_one": { - "type": "string", - "value": "The quick brown fox jumps over the lazy dog." - }, - "equivalent_two": { - "type": "string", - "value": "The quick brown fox jumps over the lazy dog." - }, - "equivalent_three": { - "type": "string", - "value": "The quick brown fox jumps over the lazy dog." - } -} diff --git a/tests/valid/multiline-string.toml b/tests/valid/multiline-string.toml deleted file mode 100644 index 15b1143..0000000 --- a/tests/valid/multiline-string.toml +++ /dev/null @@ -1,23 +0,0 @@ -multiline_empty_one = """""" -multiline_empty_two = """ -""" -multiline_empty_three = """\ - """ -multiline_empty_four = """\ - \ - \ - """ - -equivalent_one = "The quick brown fox jumps over the lazy dog." -equivalent_two = """ -The quick brown \ - - - fox jumps over \ - the lazy dog.""" - -equivalent_three = """\ - The quick brown \ - fox jumps over \ - the lazy dog.\ - """ diff --git a/tests/valid/raw-multiline-string.json b/tests/valid/raw-multiline-string.json deleted file mode 100644 index b43cce5..0000000 --- a/tests/valid/raw-multiline-string.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "oneline": { - "type": "string", - "value": "This string has a ' quote character." - }, - "firstnl": { - "type": "string", - "value": "This string has a ' quote character." - }, - "multiline": { - "type": "string", - "value": "This string\nhas ' a quote character\nand more than\none newline\nin it." - } -} diff --git a/tests/valid/raw-multiline-string.toml b/tests/valid/raw-multiline-string.toml deleted file mode 100644 index 8094c03..0000000 --- a/tests/valid/raw-multiline-string.toml +++ /dev/null @@ -1,9 +0,0 @@ -oneline = '''This string has a ' quote character.''' -firstnl = ''' -This string has a ' quote character.''' -multiline = ''' -This string -has ' a quote character -and more than -one newline -in it.''' diff --git a/tests/valid/raw-string.json b/tests/valid/raw-string.json deleted file mode 100644 index 693ab9b..0000000 --- a/tests/valid/raw-string.json +++ /dev/null @@ -1,30 +0,0 @@ -{ - "backspace": { - "type": "string", - "value": "This string has a \\b backspace character." - }, - "tab": { - "type": "string", - "value": "This string has a \\t tab character." - }, - "newline": { - "type": "string", - "value": "This string has a \\n new line character." - }, - "formfeed": { - "type": "string", - "value": "This string has a \\f form feed character." - }, - "carriage": { - "type": "string", - "value": "This string has a \\r carriage return character." - }, - "slash": { - "type": "string", - "value": "This string has a \\/ slash character." - }, - "backslash": { - "type": "string", - "value": "This string has a \\\\ backslash character." - } -} diff --git a/tests/valid/raw-string.toml b/tests/valid/raw-string.toml deleted file mode 100644 index 92acd25..0000000 --- a/tests/valid/raw-string.toml +++ /dev/null @@ -1,7 +0,0 @@ -backspace = 'This string has a \b backspace character.' -tab = 'This string has a \t tab character.' -newline = 'This string has a \n new line character.' -formfeed = 'This string has a \f form feed character.' -carriage = 'This string has a \r carriage return character.' -slash = 'This string has a \/ slash character.' -backslash = 'This string has a \\ backslash character.' diff --git a/tests/valid/string-empty.json b/tests/valid/string-empty.json deleted file mode 100644 index 6c26d69..0000000 --- a/tests/valid/string-empty.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "answer": { - "type": "string", - "value": "" - } -} diff --git a/tests/valid/string-empty.toml b/tests/valid/string-empty.toml deleted file mode 100644 index e37e681..0000000 --- a/tests/valid/string-empty.toml +++ /dev/null @@ -1 +0,0 @@ -answer = "" diff --git a/tests/valid/string-escapes.json b/tests/valid/string-escapes.json deleted file mode 100644 index 62dac51..0000000 --- a/tests/valid/string-escapes.json +++ /dev/null @@ -1,50 +0,0 @@ -{ - "backspace": { - "type": "string", - "value": "This string has a \u0008 backspace character." - }, - "tab": { - "type": "string", - "value": "This string has a \u0009 tab character." - }, - "newline": { - "type": "string", - "value": "This string has a \u000A new line character." - }, - "formfeed": { - "type": "string", - "value": "This string has a \u000C form feed character." - }, - "carriage": { - "type": "string", - "value": "This string has a \u000D carriage return character." - }, - "quote": { - "type": "string", - "value": "This string has a \u0022 quote character." - }, - "slash": { - "type": "string", - "value": "This string has a \u002F slash character." - }, - "backslash": { - "type": "string", - "value": "This string has a \u005C backslash character." - }, - "notunicode1": { - "type": "string", - "value": "This string does not have a unicode \\u escape." - }, - "notunicode2": { - "type": "string", - "value": "This string does not have a unicode \u005Cu escape." - }, - "notunicode3": { - "type": "string", - "value": "This string does not have a unicode \\u0075 escape." - }, - "notunicode4": { - "type": "string", - "value": "This string does not have a unicode \\\u0075 escape." - } -} diff --git a/tests/valid/string-escapes.toml b/tests/valid/string-escapes.toml deleted file mode 100644 index c5d4954..0000000 --- a/tests/valid/string-escapes.toml +++ /dev/null @@ -1,12 +0,0 @@ -backspace = "This string has a \b backspace character." -tab = "This string has a \t tab character." -newline = "This string has a \n new line character." -formfeed = "This string has a \f form feed character." -carriage = "This string has a \r carriage return character." -quote = "This string has a \" quote character." -slash = "This string has a / slash character." -backslash = "This string has a \\ backslash character." -notunicode1 = "This string does not have a unicode \\u escape." -notunicode2 = "This string does not have a unicode \u005Cu escape." -notunicode3 = "This string does not have a unicode \\u0075 escape." -notunicode4 = "This string does not have a unicode \\\u0075 escape." diff --git a/tests/valid/string-simple.json b/tests/valid/string-simple.json deleted file mode 100644 index 2e05f99..0000000 --- a/tests/valid/string-simple.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "answer": { - "type": "string", - "value": "You are not drinking enough whisky." - } -} diff --git a/tests/valid/string-simple.toml b/tests/valid/string-simple.toml deleted file mode 100644 index e17ade6..0000000 --- a/tests/valid/string-simple.toml +++ /dev/null @@ -1 +0,0 @@ -answer = "You are not drinking enough whisky." diff --git a/tests/valid/string-with-pound.json b/tests/valid/string-with-pound.json deleted file mode 100644 index 33cdc9c..0000000 --- a/tests/valid/string-with-pound.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "pound": {"type": "string", "value": "We see no # comments here."}, - "poundcomment": { - "type": "string", - "value": "But there are # some comments here." - } -} diff --git a/tests/valid/string-with-pound.toml b/tests/valid/string-with-pound.toml deleted file mode 100644 index 5fd8746..0000000 --- a/tests/valid/string-with-pound.toml +++ /dev/null @@ -1,2 +0,0 @@ -pound = "We see no # comments here." -poundcomment = "But there are # some comments here." # Did I # mess you up? diff --git a/tests/valid/table-array-implicit.json b/tests/valid/table-array-implicit.json deleted file mode 100644 index 32e4640..0000000 --- a/tests/valid/table-array-implicit.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "albums": { - "songs": [ - {"name": {"type": "string", "value": "Glory Days"}} - ] - } -} diff --git a/tests/valid/table-array-implicit.toml b/tests/valid/table-array-implicit.toml deleted file mode 100644 index 3157ac9..0000000 --- a/tests/valid/table-array-implicit.toml +++ /dev/null @@ -1,2 +0,0 @@ -[[albums.songs]] -name = "Glory Days" diff --git a/tests/valid/table-array-many.json b/tests/valid/table-array-many.json deleted file mode 100644 index 84df2da..0000000 --- a/tests/valid/table-array-many.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "people": [ - { - "first_name": {"type": "string", "value": "Bruce"}, - "last_name": {"type": "string", "value": "Springsteen"} - }, - { - "first_name": {"type": "string", "value": "Eric"}, - "last_name": {"type": "string", "value": "Clapton"} - }, - { - "first_name": {"type": "string", "value": "Bob"}, - "last_name": {"type": "string", "value": "Seger"} - } - ] -} diff --git a/tests/valid/table-array-many.toml b/tests/valid/table-array-many.toml deleted file mode 100644 index 46062be..0000000 --- a/tests/valid/table-array-many.toml +++ /dev/null @@ -1,11 +0,0 @@ -[[people]] -first_name = "Bruce" -last_name = "Springsteen" - -[[people]] -first_name = "Eric" -last_name = "Clapton" - -[[people]] -first_name = "Bob" -last_name = "Seger" diff --git a/tests/valid/table-array-nest-no-keys.json b/tests/valid/table-array-nest-no-keys.json deleted file mode 100644 index 7537b1a..0000000 --- a/tests/valid/table-array-nest-no-keys.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "albums": [ - { - "songs": [{}, {}] - } - ], - "artists": [ - { - "home": { - "address": {} - } - } - ] -} diff --git a/tests/valid/table-array-nest-no-keys.toml b/tests/valid/table-array-nest-no-keys.toml deleted file mode 100644 index ad6eb10..0000000 --- a/tests/valid/table-array-nest-no-keys.toml +++ /dev/null @@ -1,6 +0,0 @@ -[[ albums ]] - [[ albums.songs ]] - [[ albums.songs ]] - -[[ artists ]] - [ artists.home.address ] diff --git a/tests/valid/table-array-nest.json b/tests/valid/table-array-nest.json deleted file mode 100644 index c117afa..0000000 --- a/tests/valid/table-array-nest.json +++ /dev/null @@ -1,18 +0,0 @@ -{ - "albums": [ - { - "name": {"type": "string", "value": "Born to Run"}, - "songs": [ - {"name": {"type": "string", "value": "Jungleland"}}, - {"name": {"type": "string", "value": "Meeting Across the River"}} - ] - }, - { - "name": {"type": "string", "value": "Born in the USA"}, - "songs": [ - {"name": {"type": "string", "value": "Glory Days"}}, - {"name": {"type": "string", "value": "Dancing in the Dark"}} - ] - } - ] -} diff --git a/tests/valid/table-array-nest.toml b/tests/valid/table-array-nest.toml deleted file mode 100644 index d659a3d..0000000 --- a/tests/valid/table-array-nest.toml +++ /dev/null @@ -1,17 +0,0 @@ -[[albums]] -name = "Born to Run" - - [[albums.songs]] - name = "Jungleland" - - [[albums.songs]] - name = "Meeting Across the River" - -[[albums]] -name = "Born in the USA" - - [[albums.songs]] - name = "Glory Days" - - [[albums.songs]] - name = "Dancing in the Dark" diff --git a/tests/valid/table-array-one.json b/tests/valid/table-array-one.json deleted file mode 100644 index d75faae..0000000 --- a/tests/valid/table-array-one.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "people": [ - { - "first_name": {"type": "string", "value": "Bruce"}, - "last_name": {"type": "string", "value": "Springsteen"} - } - ] -} diff --git a/tests/valid/table-array-one.toml b/tests/valid/table-array-one.toml deleted file mode 100644 index cd7e1b6..0000000 --- a/tests/valid/table-array-one.toml +++ /dev/null @@ -1,3 +0,0 @@ -[[people]] -first_name = "Bruce" -last_name = "Springsteen" diff --git a/tests/valid/table-empty.json b/tests/valid/table-empty.json deleted file mode 100644 index 6f3873a..0000000 --- a/tests/valid/table-empty.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "a": {} -} diff --git a/tests/valid/table-empty.toml b/tests/valid/table-empty.toml deleted file mode 100644 index 8bb6a0a..0000000 --- a/tests/valid/table-empty.toml +++ /dev/null @@ -1 +0,0 @@ -[a] diff --git a/tests/valid/table-multi-empty.json b/tests/valid/table-multi-empty.json deleted file mode 100644 index a6e17c9..0000000 --- a/tests/valid/table-multi-empty.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "a": { "b": {} }, - "b": {}, - "c": { "a": {} } -} diff --git a/tests/valid/table-multi-empty.toml b/tests/valid/table-multi-empty.toml deleted file mode 100644 index 2266ed2..0000000 --- a/tests/valid/table-multi-empty.toml +++ /dev/null @@ -1,5 +0,0 @@ -[a] -[a.b] -[b] -[c] -[c.a] diff --git a/tests/valid/table-sub-empty.json b/tests/valid/table-sub-empty.json deleted file mode 100644 index 9787770..0000000 --- a/tests/valid/table-sub-empty.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "a": { "b": {} } -} diff --git a/tests/valid/table-sub-empty.toml b/tests/valid/table-sub-empty.toml deleted file mode 100644 index 70b7fe1..0000000 --- a/tests/valid/table-sub-empty.toml +++ /dev/null @@ -1,2 +0,0 @@ -[a] -[a.b] diff --git a/tests/valid/table-whitespace.json b/tests/valid/table-whitespace.json deleted file mode 100644 index 3a73ec8..0000000 --- a/tests/valid/table-whitespace.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "valid key": {} -} diff --git a/tests/valid/table-whitespace.toml b/tests/valid/table-whitespace.toml deleted file mode 100644 index daf881d..0000000 --- a/tests/valid/table-whitespace.toml +++ /dev/null @@ -1 +0,0 @@ -["valid key"] diff --git a/tests/valid/table-with-pound.json b/tests/valid/table-with-pound.json deleted file mode 100644 index 5e594e4..0000000 --- a/tests/valid/table-with-pound.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "key#group": { - "answer": {"type": "integer", "value": "42"} - } -} diff --git a/tests/valid/table-with-pound.toml b/tests/valid/table-with-pound.toml deleted file mode 100644 index 33f2c4f..0000000 --- a/tests/valid/table-with-pound.toml +++ /dev/null @@ -1,2 +0,0 @@ -["key#group"] -answer = 42 diff --git a/tests/valid/unicode-escape.json b/tests/valid/unicode-escape.json deleted file mode 100644 index 32948c6..0000000 --- a/tests/valid/unicode-escape.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "answer1": {"type": "string", "value": "\u000B"}, - "answer4": {"type": "string", "value": "\u03B4α"}, - "answer8": {"type": "string", "value": "\u03B4β"} -} diff --git a/tests/valid/unicode-escape.toml b/tests/valid/unicode-escape.toml deleted file mode 100644 index c0d5a25..0000000 --- a/tests/valid/unicode-escape.toml +++ /dev/null @@ -1,3 +0,0 @@ -answer1 = "\u000B" -answer4 = "\u03B4α" -answer8 = "\U000003B4β" diff --git a/tests/valid/unicode-literal.json b/tests/valid/unicode-literal.json deleted file mode 100644 index 00aa2f8..0000000 --- a/tests/valid/unicode-literal.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "answer": {"type": "string", "value": "δ"} -} diff --git a/tests/valid/unicode-literal.toml b/tests/valid/unicode-literal.toml deleted file mode 100644 index c65723c..0000000 --- a/tests/valid/unicode-literal.toml +++ /dev/null @@ -1 +0,0 @@ -answer = "δ" -- cgit v1.2.3 From d53db5c5335fb73389e5a78c190bf2a1903f3b6a Mon Sep 17 00:00:00 2001 From: David Tolnay Date: Sun, 12 Nov 2017 15:17:52 -0800 Subject: A toml macro --- src/lib.rs | 3 + src/macros.rs | 373 +++++++++++++++++++++++++++++++++++++++++++++ test-suite/Cargo.toml | 1 + test-suite/build.rs | 8 + test-suite/tests/macros.rs | 286 ++++++++++++++++++++++++++++++++++ 5 files changed, 671 insertions(+) create mode 100644 src/macros.rs create mode 100644 test-suite/build.rs create mode 100644 test-suite/tests/macros.rs diff --git a/src/lib.rs b/src/lib.rs index c4a7e9d..1c4842b 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -166,3 +166,6 @@ pub mod de; #[doc(no_inline)] pub use de::{from_slice, from_str, Deserializer}; mod tokens; + +#[doc(hidden)] +pub mod macros; diff --git a/src/macros.rs b/src/macros.rs new file mode 100644 index 0000000..7a2bcd3 --- /dev/null +++ b/src/macros.rs @@ -0,0 +1,373 @@ +pub use serde::de::{Deserialize, IntoDeserializer}; + +use value::{Value, Table, Array}; + +/// Construct a [`toml::Value`] from TOML syntax. +/// +/// [`toml::Value`]: value/enum.Value.html +/// +/// ```rust +/// #[macro_use] +/// extern crate toml; +/// +/// fn main() { +/// let cargo_toml = toml! { +/// [package] +/// name = "toml" +/// version = "0.4.5" +/// authors = ["Alex Crichton "] +/// +/// [badges] +/// travis-ci = { repository = "alexcrichton/toml-rs" } +/// +/// [dependencies] +/// serde = "1.0" +/// +/// [dev-dependencies] +/// serde_derive = "1.0" +/// serde_json = "1.0" +/// }; +/// +/// println!("{:#?}", cargo_toml); +/// } +/// ``` +#[macro_export] +macro_rules! toml { + ($($toml:tt)+) => {{ + let table = $crate::value::Table::new(); + let mut root = $crate::Value::Table(table); + toml_internal!(@toplevel root [] $($toml)+); + root + }}; +} + +// TT-muncher to parse TOML syntax into a toml::Value. +// +// @toplevel -- Parse tokens outside of an inline table or inline array. In +// this state, `[table headers]` and `[[array headers]]` are +// allowed and `key = value` pairs are not separated by commas. +// +// @topleveldatetime -- Helper to parse a Datetime from string and insert it +// into a table, continuing in the @toplevel state. +// +// @path -- Turn a path segment into a string. Segments that look like idents +// are stringified, while quoted segments like `"cfg(windows)"` +// are not. +// +// @value -- Parse the value part of a `key = value` pair, which may be a +// primitive or inline table or inline array. +// +// @table -- Parse the contents of an inline table, returning them as a +// toml::Value::Table. +// +// @tabledatetime -- Helper to parse a Datetime from string and insert it +// into a table, continuing in the @table state. +// +// @array -- Parse the contents of an inline array, returning them as a +// toml::Value::Array. +// +// @arraydatetime -- Helper to parse a Datetime from string and push it into +// an array, continuing in the @array state. +// +// @trailingcomma -- Helper to append a comma to a sequence of tokens if the +// sequence is non-empty and does not already end in a trailing +// comma. +// +#[macro_export] +#[doc(hidden)] +macro_rules! toml_internal { + // Base case, no elements remaining. + (@toplevel $root:ident [$($path:tt)*]) => {}; + + // Parse negative number `key = -value`. + (@toplevel $root:ident [$($path:tt)*] $($k:tt)-+ = - $v:tt $($rest:tt)*) => { + toml_internal!(@toplevel $root [$($path)*] $($k)-+ = (-$v) $($rest)*); + }; + + // Parse offset datetime `key = 1979-05-27T00:32:00.999999-07:00`. + (@toplevel $root:ident [$($path:tt)*] $($k:tt)-+ = $yr:tt - $mo:tt - $dhr:tt : $min:tt : $sec:tt . $frac:tt - $tzh:tt : $tzm:tt $($rest:tt)*) => { + toml_internal!(@topleveldatetime $root [$($path)*] $($k)-+ = ($yr - $mo - $dhr : $min : $sec . $frac - $tzh : $tzm) $($rest)*); + }; + + // Parse offset datetime `key = 1979-05-27T00:32:00-07:00`. + (@toplevel $root:ident [$($path:tt)*] $($k:tt)-+ = $yr:tt - $mo:tt - $dhr:tt : $min:tt : $sec:tt - $tzh:tt : $tzm:tt $($rest:tt)*) => { + toml_internal!(@topleveldatetime $root [$($path)*] $($k)-+ = ($yr - $mo - $dhr : $min : $sec - $tzh : $tzm) $($rest)*); + }; + + // Parse local datetime `key = 1979-05-27T00:32:00.999999`. + (@toplevel $root:ident [$($path:tt)*] $($k:tt)-+ = $yr:tt - $mo:tt - $dhr:tt : $min:tt : $sec:tt . $frac:tt $($rest:tt)*) => { + toml_internal!(@topleveldatetime $root [$($path)*] $($k)-+ = ($yr - $mo - $dhr : $min : $sec . $frac) $($rest)*); + }; + + // Parse offset datetime `key = 1979-05-27T07:32:00Z` and local datetime `key = 1979-05-27T07:32:00`. + (@toplevel $root:ident [$($path:tt)*] $($k:tt)-+ = $yr:tt - $mo:tt - $dhr:tt : $min:tt : $sec:tt $($rest:tt)*) => { + toml_internal!(@topleveldatetime $root [$($path)*] $($k)-+ = ($yr - $mo - $dhr : $min : $sec) $($rest)*); + }; + + // Parse local date `key = 1979-05-27`. + (@toplevel $root:ident [$($path:tt)*] $($k:tt)-+ = $yr:tt - $mo:tt - $day:tt $($rest:tt)*) => { + toml_internal!(@topleveldatetime $root [$($path)*] $($k)-+ = ($yr - $mo - $day) $($rest)*); + }; + + // Parse local time `key = 00:32:00.999999`. + (@toplevel $root:ident [$($path:tt)*] $($k:tt)-+ = $hr:tt : $min:tt : $sec:tt . $frac:tt $($rest:tt)*) => { + toml_internal!(@topleveldatetime $root [$($path)*] $($k)-+ = ($hr : $min : $sec . $frac) $($rest)*); + }; + + // Parse local time `key = 07:32:00`. + (@toplevel $root:ident [$($path:tt)*] $($k:tt)-+ = $hr:tt : $min:tt : $sec:tt $($rest:tt)*) => { + toml_internal!(@topleveldatetime $root [$($path)*] $($k)-+ = ($hr : $min : $sec) $($rest)*); + }; + + // Parse any other `key = value` including string, inline array, inline + // table, number, and boolean. + (@toplevel $root:ident [$($path:tt)*] $($k:tt)-+ = $v:tt $($rest:tt)*) => { + $crate::macros::insert_toml( + &mut $root, + &[$($path)* &concat!($("-", toml_internal!(@path $k),)+)[1..]], + toml_internal!(@value $v)); + toml_internal!(@toplevel $root [$($path)*] $($rest)*); + }; + + // Parse array header `[[bin]]`. + (@toplevel $root:ident $oldpath:tt [[$($($path:tt)-+).+]] $($rest:tt)*) => { + $crate::macros::push_toml( + &mut $root, + &[$(&concat!($("-", toml_internal!(@path $path),)+)[1..],)+]); + toml_internal!(@toplevel $root [$(&concat!($("-", toml_internal!(@path $path),)+)[1..],)+] $($rest)*); + }; + + // Parse table header `[patch.crates-io]`. + (@toplevel $root:ident $oldpath:tt [$($($path:tt)-+).+] $($rest:tt)*) => { + $crate::macros::insert_toml( + &mut $root, + &[$(&concat!($("-", toml_internal!(@path $path),)+)[1..],)+], + $crate::Value::Table($crate::value::Table::new())); + toml_internal!(@toplevel $root [$(&concat!($("-", toml_internal!(@path $path),)+)[1..],)+] $($rest)*); + }; + + // Parse datetime from string and insert into table. + (@topleveldatetime $root:ident [$($path:tt)*] $($k:tt)-+ = ($($datetime:tt)+) $($rest:tt)*) => { + $crate::macros::insert_toml( + &mut $root, + &[$($path)* &concat!($("-", toml_internal!(@path $k),)+)[1..]], + $crate::Value::Datetime(concat!($(stringify!($datetime)),+).parse().unwrap())); + toml_internal!(@toplevel $root [$($path)*] $($rest)*); + }; + + // Turn a path segment into a string. + (@path $ident:ident) => { + stringify!($ident) + }; + + // For a path segment that is not an ident, expect that it is already a + // quoted string, like in `[target."cfg(windows)".dependencies]`. + (@path $quoted:tt) => { + $quoted + }; + + // Construct a Value from an inline table. + (@value { $($inline:tt)* }) => {{ + let mut table = $crate::value::Table::new(); + toml_internal!(@trailingcomma (@table table) $($inline)*); + $crate::Value::Table(table) + }}; + + // Construct a Value from an inline array. + (@value [ $($inline:tt)* ]) => {{ + let mut array = $crate::value::Array::new(); + toml_internal!(@trailingcomma (@array array) $($inline)*); + $crate::Value::Array(array) + }}; + + // Construct a Value from any other type, probably string or boolean or number. + (@value $v:tt) => {{ + // TODO: Implement this with something like serde_json::to_value instead. + let de = $crate::macros::IntoDeserializer::<$crate::de::Error>::into_deserializer($v); + <$crate::Value as $crate::macros::Deserialize>::deserialize(de).unwrap() + }}; + + // Base case of inline table. + (@table $root:ident) => {}; + + // Parse negative number `key = -value`. + (@table $root:ident $($k:tt)-+ = - $v:tt , $($rest:tt)*) => { + toml_internal!(@table $root $($k)-+ = (-$v) , $($rest)*); + }; + + // Parse offset datetime `key = 1979-05-27T00:32:00.999999-07:00`. + (@table $root:ident $($k:tt)-+ = $yr:tt - $mo:tt - $dhr:tt : $min:tt : $sec:tt . $frac:tt - $tzh:tt : $tzm:tt , $($rest:tt)*) => { + toml_internal!(@tabledatetime $root $($k)-+ = ($yr - $mo - $dhr : $min : $sec . $frac - $tzh : $tzm) $($rest)*); + }; + + // Parse offset datetime `key = 1979-05-27T00:32:00-07:00`. + (@table $root:ident $($k:tt)-+ = $yr:tt - $mo:tt - $dhr:tt : $min:tt : $sec:tt - $tzh:tt : $tzm:tt , $($rest:tt)*) => { + toml_internal!(@tabledatetime $root $($k)-+ = ($yr - $mo - $dhr : $min : $sec - $tzh : $tzm) $($rest)*); + }; + + // Parse local datetime `key = 1979-05-27T00:32:00.999999`. + (@table $root:ident $($k:tt)-+ = $yr:tt - $mo:tt - $dhr:tt : $min:tt : $sec:tt . $frac:tt , $($rest:tt)*) => { + toml_internal!(@tabledatetime $root $($k)-+ = ($yr - $mo - $dhr : $min : $sec . $frac) $($rest)*); + }; + + // Parse offset datetime `key = 1979-05-27T07:32:00Z` and local datetime `key = 1979-05-27T07:32:00`. + (@table $root:ident $($k:tt)-+ = $yr:tt - $mo:tt - $dhr:tt : $min:tt : $sec:tt , $($rest:tt)*) => { + toml_internal!(@tabledatetime $root $($k)-+ = ($yr - $mo - $dhr : $min : $sec) $($rest)*); + }; + + // Parse local date `key = 1979-05-27`. + (@table $root:ident $($k:tt)-+ = $yr:tt - $mo:tt - $day:tt , $($rest:tt)*) => { + toml_internal!(@tabledatetime $root $($k)-+ = ($yr - $mo - $day) $($rest)*); + }; + + // Parse local time `key = 00:32:00.999999`. + (@table $root:ident $($k:tt)-+ = $hr:tt : $min:tt : $sec:tt . $frac:tt , $($rest:tt)*) => { + toml_internal!(@tabledatetime $root $($k)-+ = ($hr : $min : $sec . $frac) $($rest)*); + }; + + // Parse local time `key = 07:32:00`. + (@table $root:ident $($k:tt)-+ = $hr:tt : $min:tt : $sec:tt , $($rest:tt)*) => { + toml_internal!(@tabledatetime $root $($k)-+ = ($hr : $min : $sec) $($rest)*); + }; + + // Parse any other type, probably string or boolean or number. + (@table $root:ident $($k:tt)-+ = $v:tt , $($rest:tt)*) => { + $root.insert( + concat!($("-", toml_internal!(@path $k),)+)[1..].to_owned(), + toml_internal!(@value $v)); + toml_internal!(@table $root $($rest)*); + }; + + // Parse a Datetime from string and continue in @table state. + (@tabledatetime $root:ident $($k:tt)-+ = ($($datetime:tt)*) $($rest:tt)*) => { + $root.insert( + concat!($("-", toml_internal!(@path $k),)+)[1..].to_owned(), + $crate::Value::Datetime(concat!($(stringify!($datetime)),+).parse().unwrap())); + toml_internal!(@table $root $($rest)*); + }; + + // Base case of inline array. + (@array $root:ident) => {}; + + // Parse negative number `-value`. + (@array $root:ident - $v:tt , $($rest:tt)*) => { + toml_internal!(@array $root (-$v) , $($rest)*); + }; + + // Parse offset datetime `1979-05-27T00:32:00.999999-07:00`. + (@array $root:ident $yr:tt - $mo:tt - $dhr:tt : $min:tt : $sec:tt . $frac:tt - $tzh:tt : $tzm:tt , $($rest:tt)*) => { + toml_internal!(@arraydatetime $root ($yr - $mo - $dhr : $min : $sec . $frac - $tzh : $tzm) $($rest)*); + }; + + // Parse offset datetime `1979-05-27T00:32:00-07:00`. + (@array $root:ident $yr:tt - $mo:tt - $dhr:tt : $min:tt : $sec:tt - $tzh:tt : $tzm:tt , $($rest:tt)*) => { + toml_internal!(@arraydatetime $root ($yr - $mo - $dhr : $min : $sec - $tzh : $tzm) $($rest)*); + }; + + // Parse local datetime `1979-05-27T00:32:00.999999`. + (@array $root:ident $yr:tt - $mo:tt - $dhr:tt : $min:tt : $sec:tt . $frac:tt , $($rest:tt)*) => { + toml_internal!(@arraydatetime $root ($yr - $mo - $dhr : $min : $sec . $frac) $($rest)*); + }; + + // Parse offset datetime `1979-05-27T07:32:00Z` and local datetime `1979-05-27T07:32:00`. + (@array $root:ident $yr:tt - $mo:tt - $dhr:tt : $min:tt : $sec:tt , $($rest:tt)*) => { + toml_internal!(@arraydatetime $root ($yr - $mo - $dhr : $min : $sec) $($rest)*); + }; + + // Parse local date `1979-05-27`. + (@array $root:ident $yr:tt - $mo:tt - $day:tt , $($rest:tt)*) => { + toml_internal!(@arraydatetime $root ($yr - $mo - $day) $($rest)*); + }; + + // Parse local time `00:32:00.999999`. + (@array $root:ident $hr:tt : $min:tt : $sec:tt . $frac:tt , $($rest:tt)*) => { + toml_internal!(@arraydatetime $root ($hr : $min : $sec . $frac) $($rest)*); + }; + + // Parse local time `07:32:00`. + (@array $root:ident $hr:tt : $min:tt : $sec:tt , $($rest:tt)*) => { + toml_internal!(@arraydatetime $root ($hr : $min : $sec) $($rest)*); + }; + + // Parse any other type, probably string or boolean or number. + (@array $root:ident $v:tt , $($rest:tt)*) => { + $root.push(toml_internal!(@value $v)); + toml_internal!(@array $root $($rest)*); + }; + + // Parse a Datetime from string and continue in @array state. + (@arraydatetime $root:ident ($($datetime:tt)*) $($rest:tt)*) => { + $root.push($crate::Value::Datetime(concat!($(stringify!($datetime)),+).parse().unwrap())); + toml_internal!(@array $root $($rest)*); + }; + + // No trailing comma required if the tokens are empty. + (@trailingcomma ($($args:tt)*)) => { + toml_internal!($($args)*); + }; + + // Tokens end with a trailing comma, do not append another one. + (@trailingcomma ($($args:tt)*) ,) => { + toml_internal!($($args)* ,); + }; + + // Tokens end with something other than comma, append a trailing comma. + (@trailingcomma ($($args:tt)*) $last:tt) => { + toml_internal!($($args)* $last ,); + }; + + // Not yet at the last token. + (@trailingcomma ($($args:tt)*) $first:tt $($rest:tt)+) => { + toml_internal!(@trailingcomma ($($args)* $first) $($rest)+); + }; +} + +// Called when parsing a `key = value` pair. +// Inserts an entry into the table at the given path. +pub fn insert_toml(root: &mut Value, path: &[&str], value: Value) { + *traverse(root, path) = value; +} + +// Called when parsing an `[[array header]]`. +// Pushes an empty table onto the array at the given path. +pub fn push_toml(root: &mut Value, path: &[&str]) { + let target = traverse(root, path); + if !target.is_array() { + *target = Value::Array(Array::new()); + } + target.as_array_mut().unwrap().push(Value::Table(Table::new())); +} + +fn traverse<'a>(root: &'a mut Value, path: &[&str]) -> &'a mut Value { + let mut cur = root; + for &key in path { + // Lexical lifetimes :D + let cur1 = cur; + let cur2; + + // From the TOML spec: + // + // > Each double-bracketed sub-table will belong to the most recently + // > defined table element above it. + if cur1.is_array() { + cur2 = cur1.as_array_mut().unwrap().last_mut().unwrap(); + } else { + cur2 = cur1; + }; + + // We are about to index into this value, so it better be a table. + if !cur2.is_table() { + *cur2 = Value::Table(Table::new()); + } + + if !cur2.as_table().unwrap().contains_key(key) { + // Insert an empty table for the next loop iteration to point to. + let empty = Value::Table(Table::new()); + cur2.as_table_mut().unwrap().insert(key.to_owned(), empty); + } + + // Step into the current table. + cur = cur2.as_table_mut().unwrap().get_mut(key).unwrap(); + } + cur +} diff --git a/test-suite/Cargo.toml b/test-suite/Cargo.toml index 7c91787..10ffbcb 100644 --- a/test-suite/Cargo.toml +++ b/test-suite/Cargo.toml @@ -2,6 +2,7 @@ name = "toml_test_suite" version = "0.0.0" authors = ["Alex Crichton "] +build = "build.rs" publish = false [build-dependencies] diff --git a/test-suite/build.rs b/test-suite/build.rs new file mode 100644 index 0000000..ca63946 --- /dev/null +++ b/test-suite/build.rs @@ -0,0 +1,8 @@ +extern crate rustc_version; +use rustc_version::{version, Version}; + +fn main() { + if version().unwrap() >= Version::parse("1.20.0").unwrap() { + println!(r#"cargo:rustc-cfg=feature="test-quoted-keys-in-macro""#); + } +} diff --git a/test-suite/tests/macros.rs b/test-suite/tests/macros.rs new file mode 100644 index 0000000..439420d --- /dev/null +++ b/test-suite/tests/macros.rs @@ -0,0 +1,286 @@ +#![recursion_limit = "128"] + +#[macro_use] +extern crate toml; + +macro_rules! table { + ($($key:expr => $value:expr,)*) => {{ + let mut table = toml::value::Table::new(); + $( + table.insert($key.to_string(), $value.into()); + )* + toml::Value::Table(table) + }}; +} + +macro_rules! array { + ($($element:expr,)*) => {{ + let mut array = toml::value::Array::new(); + $( + array.push($element.into()); + )* + toml::Value::Array(array) + }}; +} + +macro_rules! datetime { + ($s:tt) => { + $s.parse::().unwrap() + }; +} + +#[test] +fn test_cargo_toml() { + // Simple sanity check of: + // + // - Ordinary tables + // - Inline tables + // - Inline arrays + // - String values + // - Table keys containing hyphen + // - Table headers containing hyphen + let actual = toml! { + [package] + name = "toml" + version = "0.4.5" + authors = ["Alex Crichton "] + + [badges] + travis-ci = { repository = "alexcrichton/toml-rs" } + + [dependencies] + serde = "1.0" + + [dev-dependencies] + serde_derive = "1.0" + serde_json = "1.0" + }; + + let expected = table! { + "package" => table! { + "name" => "toml".to_owned(), + "version" => "0.4.5".to_owned(), + "authors" => array! { + "Alex Crichton ".to_owned(), + }, + }, + "badges" => table! { + "travis-ci" => table! { + "repository" => "alexcrichton/toml-rs".to_owned(), + }, + }, + "dependencies" => table! { + "serde" => "1.0".to_owned(), + }, + "dev-dependencies" => table! { + "serde_derive" => "1.0".to_owned(), + "serde_json" => "1.0".to_owned(), + }, + }; + + assert_eq!(actual, expected); +} + +#[test] +fn test_array() { + // Copied from the TOML spec. + let actual = toml! { + [[fruit]] + name = "apple" + + [fruit.physical] + color = "red" + shape = "round" + + [[fruit.variety]] + name = "red delicious" + + [[fruit.variety]] + name = "granny smith" + + [[fruit]] + name = "banana" + + [[fruit.variety]] + name = "plantain" + }; + + let expected = table! { + "fruit" => array! { + table! { + "name" => "apple", + "physical" => table! { + "color" => "red", + "shape" => "round", + }, + "variety" => array! { + table! { + "name" => "red delicious", + }, + table! { + "name" => "granny smith", + }, + }, + }, + table! { + "name" => "banana", + "variety" => array! { + table! { + "name" => "plantain", + }, + }, + }, + }, + }; + + assert_eq!(actual, expected); +} + +#[test] +fn test_number() { + let actual = toml! { + positive = 1 + negative = -1 + table = { positive = 1, negative = -1 } + array = [ 1, -1 ] + }; + + let expected = table! { + "positive" => 1, + "negative" => -1, + "table" => table! { + "positive" => 1, + "negative" => -1, + }, + "array" => array! { + 1, + -1, + }, + }; + + assert_eq!(actual, expected); +} + +#[test] +fn test_datetime() { + let actual = toml! { + // Copied from the TOML spec. + odt1 = 1979-05-27T07:32:00Z + odt2 = 1979-05-27T00:32:00-07:00 + odt3 = 1979-05-27T00:32:00.999999-07:00 + ldt1 = 1979-05-27T07:32:00 + ldt2 = 1979-05-27T00:32:00.999999 + ld1 = 1979-05-27 + lt1 = 07:32:00 + lt2 = 00:32:00.999999 + + table = { + odt1 = 1979-05-27T07:32:00Z, + odt2 = 1979-05-27T00:32:00-07:00, + odt3 = 1979-05-27T00:32:00.999999-07:00, + ldt1 = 1979-05-27T07:32:00, + ldt2 = 1979-05-27T00:32:00.999999, + ld1 = 1979-05-27, + lt1 = 07:32:00, + lt2 = 00:32:00.999999, + } + + array = [ + 1979-05-27T07:32:00Z, + 1979-05-27T00:32:00-07:00, + 1979-05-27T00:32:00.999999-07:00, + 1979-05-27T07:32:00, + 1979-05-27T00:32:00.999999, + 1979-05-27, + 07:32:00, + 00:32:00.999999, + ] + }; + + let expected = table! { + "odt1" => datetime!("1979-05-27T07:32:00Z"), + "odt2" => datetime!("1979-05-27T00:32:00-07:00"), + "odt3" => datetime!("1979-05-27T00:32:00.999999-07:00"), + "ldt1" => datetime!("1979-05-27T07:32:00"), + "ldt2" => datetime!("1979-05-27T00:32:00.999999"), + "ld1" => datetime!("1979-05-27"), + "lt1" => datetime!("07:32:00"), + "lt2" => datetime!("00:32:00.999999"), + + "table" => table! { + "odt1" => datetime!("1979-05-27T07:32:00Z"), + "odt2" => datetime!("1979-05-27T00:32:00-07:00"), + "odt3" => datetime!("1979-05-27T00:32:00.999999-07:00"), + "ldt1" => datetime!("1979-05-27T07:32:00"), + "ldt2" => datetime!("1979-05-27T00:32:00.999999"), + "ld1" => datetime!("1979-05-27"), + "lt1" => datetime!("07:32:00"), + "lt2" => datetime!("00:32:00.999999"), + }, + + "array" => array! { + datetime!("1979-05-27T07:32:00Z"), + datetime!("1979-05-27T00:32:00-07:00"), + datetime!("1979-05-27T00:32:00.999999-07:00"), + datetime!("1979-05-27T07:32:00"), + datetime!("1979-05-27T00:32:00.999999"), + datetime!("1979-05-27"), + datetime!("07:32:00"), + datetime!("00:32:00.999999"), + }, + }; + + assert_eq!(actual, expected); +} + +// This test requires rustc >= 1.20. +#[test] +#[cfg(feature = "test-quoted-keys-in-macro")] +fn test_quoted_key() { + let actual = toml! { + "quoted" = true + table = { "quoted" = true } + + [target."cfg(windows)".dependencies] + winapi = "0.2.8" + }; + + let expected = table! { + "quoted" => true, + "table" => table! { + "quoted" => true, + }, + "target" => table! { + "cfg(windows)" => table! { + "dependencies" => table! { + "winapi" => "0.2.8", + }, + }, + }, + }; + + assert_eq!(actual, expected); +} + +#[test] +fn test_empty() { + let actual = toml! { + empty_inline_table = {} + empty_inline_array = [] + + [empty_table] + + [[empty_array]] + }; + + let expected = table! { + "empty_inline_table" => table! {}, + "empty_inline_array" => array! {}, + "empty_table" => table! {}, + "empty_array" => array! { + table! {}, + }, + }; + + assert_eq!(actual, expected); +} -- cgit v1.2.3