aboutsummaryrefslogtreecommitdiff
path: root/test-suite/tests
diff options
context:
space:
mode:
authorDavid Tolnay <dtolnay@gmail.com>2017-11-12 17:26:09 -0800
committerDavid Tolnay <dtolnay@gmail.com>2017-11-12 19:33:11 -0800
commit3ad6e71f53a87215fb5286bcf87de15524699561 (patch)
treef9ac5154af7984a7c65b903869487c11baee3596 /test-suite/tests
parent3b77b5fb6520c7bf13be6308978efa8a185d0c12 (diff)
downloadmilf-rs-3ad6e71f53a87215fb5286bcf87de15524699561.tar.gz
milf-rs-3ad6e71f53a87215fb5286bcf87de15524699561.zip
Move tests into their own crate
Diffstat (limited to 'test-suite/tests')
-rw-r--r--test-suite/tests/README.md1
-rw-r--r--test-suite/tests/backcompat.rs19
-rw-r--r--test-suite/tests/datetime.rs58
-rw-r--r--test-suite/tests/display-tricky.rs49
-rw-r--r--test-suite/tests/display.rs103
-rw-r--r--test-suite/tests/formatting.rs54
-rw-r--r--test-suite/tests/invalid-encoder-misc.rs14
-rw-r--r--test-suite/tests/invalid-encoder/array-mixed-types-ints-and-floats.json15
-rw-r--r--test-suite/tests/invalid-misc.rs17
-rw-r--r--test-suite/tests/invalid.rs98
-rw-r--r--test-suite/tests/invalid/array-mixed-types-arrays-and-ints.toml1
-rw-r--r--test-suite/tests/invalid/array-mixed-types-ints-and-floats.toml1
-rw-r--r--test-suite/tests/invalid/array-mixed-types-strings-and-ints.toml1
-rw-r--r--test-suite/tests/invalid/datetime-malformed-no-leads.toml1
-rw-r--r--test-suite/tests/invalid/datetime-malformed-no-secs.toml1
-rw-r--r--test-suite/tests/invalid/datetime-malformed-no-t.toml1
-rw-r--r--test-suite/tests/invalid/datetime-malformed-with-milli.toml1
-rw-r--r--test-suite/tests/invalid/duplicate-key-table.toml5
-rw-r--r--test-suite/tests/invalid/duplicate-keys.toml2
-rw-r--r--test-suite/tests/invalid/duplicate-tables.toml2
-rw-r--r--test-suite/tests/invalid/empty-implicit-table.toml1
-rw-r--r--test-suite/tests/invalid/empty-table.toml1
-rw-r--r--test-suite/tests/invalid/float-no-leading-zero.toml2
-rw-r--r--test-suite/tests/invalid/float-no-trailing-digits.toml2
-rw-r--r--test-suite/tests/invalid/key-after-array.toml1
-rw-r--r--test-suite/tests/invalid/key-after-table.toml1
-rw-r--r--test-suite/tests/invalid/key-empty.toml1
-rw-r--r--test-suite/tests/invalid/key-hash.toml1
-rw-r--r--test-suite/tests/invalid/key-newline.toml2
-rw-r--r--test-suite/tests/invalid/key-open-bracket.toml1
-rw-r--r--test-suite/tests/invalid/key-single-open-bracket.toml1
-rw-r--r--test-suite/tests/invalid/key-space.toml1
-rw-r--r--test-suite/tests/invalid/key-start-bracket.toml3
-rw-r--r--test-suite/tests/invalid/key-two-equals.toml1
-rw-r--r--test-suite/tests/invalid/string-bad-byte-escape.toml1
-rw-r--r--test-suite/tests/invalid/string-bad-escape.toml1
-rw-r--r--test-suite/tests/invalid/string-byte-escapes.toml1
-rw-r--r--test-suite/tests/invalid/string-no-close.toml1
-rw-r--r--test-suite/tests/invalid/table-array-implicit.toml14
-rw-r--r--test-suite/tests/invalid/table-array-malformed-bracket.toml2
-rw-r--r--test-suite/tests/invalid/table-array-malformed-empty.toml2
-rw-r--r--test-suite/tests/invalid/table-empty.toml1
-rw-r--r--test-suite/tests/invalid/table-nested-brackets-close.toml2
-rw-r--r--test-suite/tests/invalid/table-nested-brackets-open.toml2
-rw-r--r--test-suite/tests/invalid/table-whitespace.toml1
-rw-r--r--test-suite/tests/invalid/table-with-pound.toml2
-rw-r--r--test-suite/tests/invalid/text-after-array-entries.toml4
-rw-r--r--test-suite/tests/invalid/text-after-integer.toml1
-rw-r--r--test-suite/tests/invalid/text-after-string.toml1
-rw-r--r--test-suite/tests/invalid/text-after-table.toml1
-rw-r--r--test-suite/tests/invalid/text-before-array-separator.toml4
-rw-r--r--test-suite/tests/invalid/text-in-array.toml5
-rw-r--r--test-suite/tests/parser.rs495
-rw-r--r--test-suite/tests/pretty.rs308
-rw-r--r--test-suite/tests/serde.rs578
-rw-r--r--test-suite/tests/tables-last.rs30
-rw-r--r--test-suite/tests/valid.rs249
-rw-r--r--test-suite/tests/valid/array-empty.json11
-rw-r--r--test-suite/tests/valid/array-empty.toml1
-rw-r--r--test-suite/tests/valid/array-nospaces.json10
-rw-r--r--test-suite/tests/valid/array-nospaces.toml1
-rw-r--r--test-suite/tests/valid/arrays-hetergeneous.json19
-rw-r--r--test-suite/tests/valid/arrays-hetergeneous.toml1
-rw-r--r--test-suite/tests/valid/arrays-nested.json13
-rw-r--r--test-suite/tests/valid/arrays-nested.toml1
-rw-r--r--test-suite/tests/valid/arrays.json34
-rw-r--r--test-suite/tests/valid/arrays.toml8
-rw-r--r--test-suite/tests/valid/bool.json4
-rw-r--r--test-suite/tests/valid/bool.toml2
-rw-r--r--test-suite/tests/valid/comments-everywhere.json12
-rw-r--r--test-suite/tests/valid/comments-everywhere.toml24
-rw-r--r--test-suite/tests/valid/datetime-truncate.json6
-rw-r--r--test-suite/tests/valid/datetime-truncate.toml1
-rw-r--r--test-suite/tests/valid/datetime.json3
-rw-r--r--test-suite/tests/valid/datetime.toml1
-rw-r--r--test-suite/tests/valid/empty.json1
-rw-r--r--test-suite/tests/valid/empty.toml0
-rw-r--r--test-suite/tests/valid/example-bom.toml5
-rw-r--r--test-suite/tests/valid/example-v0.3.0.json1
-rw-r--r--test-suite/tests/valid/example-v0.3.0.toml182
-rw-r--r--test-suite/tests/valid/example-v0.4.0.json1
-rw-r--r--test-suite/tests/valid/example-v0.4.0.toml235
-rw-r--r--test-suite/tests/valid/example.json14
-rw-r--r--test-suite/tests/valid/example.toml5
-rw-r--r--test-suite/tests/valid/example2.json1
-rw-r--r--test-suite/tests/valid/example2.toml47
-rw-r--r--test-suite/tests/valid/float.json4
-rw-r--r--test-suite/tests/valid/float.toml2
-rw-r--r--test-suite/tests/valid/hard_example.json1
-rw-r--r--test-suite/tests/valid/hard_example.toml33
-rw-r--r--test-suite/tests/valid/implicit-and-explicit-after.json10
-rw-r--r--test-suite/tests/valid/implicit-and-explicit-after.toml5
-rw-r--r--test-suite/tests/valid/implicit-and-explicit-before.json10
-rw-r--r--test-suite/tests/valid/implicit-and-explicit-before.toml5
-rw-r--r--test-suite/tests/valid/implicit-groups.json9
-rw-r--r--test-suite/tests/valid/implicit-groups.toml2
-rw-r--r--test-suite/tests/valid/integer.json4
-rw-r--r--test-suite/tests/valid/integer.toml2
-rw-r--r--test-suite/tests/valid/key-equals-nospace.json3
-rw-r--r--test-suite/tests/valid/key-equals-nospace.toml1
-rw-r--r--test-suite/tests/valid/key-quote-newline.json3
-rw-r--r--test-suite/tests/valid/key-quote-newline.toml1
-rw-r--r--test-suite/tests/valid/key-space.json3
-rw-r--r--test-suite/tests/valid/key-space.toml1
-rw-r--r--test-suite/tests/valid/key-special-chars.json5
-rw-r--r--test-suite/tests/valid/key-special-chars.toml1
-rw-r--r--test-suite/tests/valid/key-with-pound.json3
-rw-r--r--test-suite/tests/valid/key-with-pound.toml1
-rw-r--r--test-suite/tests/valid/long-float.json4
-rw-r--r--test-suite/tests/valid/long-float.toml2
-rw-r--r--test-suite/tests/valid/long-integer.json4
-rw-r--r--test-suite/tests/valid/long-integer.toml2
-rw-r--r--test-suite/tests/valid/multiline-string.json30
-rw-r--r--test-suite/tests/valid/multiline-string.toml23
-rw-r--r--test-suite/tests/valid/raw-multiline-string.json14
-rw-r--r--test-suite/tests/valid/raw-multiline-string.toml9
-rw-r--r--test-suite/tests/valid/raw-string.json30
-rw-r--r--test-suite/tests/valid/raw-string.toml7
-rw-r--r--test-suite/tests/valid/string-empty.json6
-rw-r--r--test-suite/tests/valid/string-empty.toml1
-rw-r--r--test-suite/tests/valid/string-escapes.json50
-rw-r--r--test-suite/tests/valid/string-escapes.toml12
-rw-r--r--test-suite/tests/valid/string-simple.json6
-rw-r--r--test-suite/tests/valid/string-simple.toml1
-rw-r--r--test-suite/tests/valid/string-with-pound.json7
-rw-r--r--test-suite/tests/valid/string-with-pound.toml2
-rw-r--r--test-suite/tests/valid/table-array-implicit.json7
-rw-r--r--test-suite/tests/valid/table-array-implicit.toml2
-rw-r--r--test-suite/tests/valid/table-array-many.json16
-rw-r--r--test-suite/tests/valid/table-array-many.toml11
-rw-r--r--test-suite/tests/valid/table-array-nest-no-keys.json14
-rw-r--r--test-suite/tests/valid/table-array-nest-no-keys.toml6
-rw-r--r--test-suite/tests/valid/table-array-nest.json18
-rw-r--r--test-suite/tests/valid/table-array-nest.toml17
-rw-r--r--test-suite/tests/valid/table-array-one.json8
-rw-r--r--test-suite/tests/valid/table-array-one.toml3
-rw-r--r--test-suite/tests/valid/table-empty.json3
-rw-r--r--test-suite/tests/valid/table-empty.toml1
-rw-r--r--test-suite/tests/valid/table-multi-empty.json5
-rw-r--r--test-suite/tests/valid/table-multi-empty.toml5
-rw-r--r--test-suite/tests/valid/table-sub-empty.json3
-rw-r--r--test-suite/tests/valid/table-sub-empty.toml2
-rw-r--r--test-suite/tests/valid/table-whitespace.json3
-rw-r--r--test-suite/tests/valid/table-whitespace.toml1
-rw-r--r--test-suite/tests/valid/table-with-pound.json5
-rw-r--r--test-suite/tests/valid/table-with-pound.toml2
-rw-r--r--test-suite/tests/valid/unicode-escape.json5
-rw-r--r--test-suite/tests/valid/unicode-escape.toml3
-rw-r--r--test-suite/tests/valid/unicode-literal.json3
-rw-r--r--test-suite/tests/valid/unicode-literal.toml1
150 files changed, 3276 insertions, 0 deletions
diff --git a/test-suite/tests/README.md b/test-suite/tests/README.md
new file mode 100644
index 0000000..ebbc01c
--- /dev/null
+++ b/test-suite/tests/README.md
@@ -0,0 +1 @@
+Tests are from https://github.com/BurntSushi/toml-test
diff --git a/test-suite/tests/backcompat.rs b/test-suite/tests/backcompat.rs
new file mode 100644
index 0000000..1b3f599
--- /dev/null
+++ b/test-suite/tests/backcompat.rs
@@ -0,0 +1,19 @@
+extern crate toml;
+extern crate serde;
+
+use serde::de::Deserialize;
+
+#[test]
+fn main() {
+ let s = "
+ [a] foo = 1
+ [[b]] foo = 1
+ ";
+ assert!(s.parse::<toml::Value>().is_err());
+
+ let mut d = toml::de::Deserializer::new(s);
+ d.set_require_newline_after_table(false);
+ let value = toml::Value::deserialize(&mut d).unwrap();
+ assert_eq!(value["a"]["foo"].as_integer(), Some(1));
+ assert_eq!(value["b"][0]["foo"].as_integer(), Some(1));
+}
diff --git a/test-suite/tests/datetime.rs b/test-suite/tests/datetime.rs
new file mode 100644
index 0000000..948e863
--- /dev/null
+++ b/test-suite/tests/datetime.rs
@@ -0,0 +1,58 @@
+extern crate toml;
+
+use std::str::FromStr;
+
+use toml::Value;
+
+#[test]
+fn times() {
+ fn good(s: &str) {
+ let to_parse = format!("foo = {}", s);
+ let value = Value::from_str(&to_parse).unwrap();
+ assert_eq!(value["foo"].as_datetime().unwrap().to_string(), s);
+ }
+
+ good("1997-09-09T09:09:09Z");
+ good("1997-09-09T09:09:09+09:09");
+ good("1997-09-09T09:09:09-09:09");
+ good("1997-09-09T09:09:09");
+ good("1997-09-09");
+ good("09:09:09");
+ good("1997-09-09T09:09:09.09Z");
+ good("1997-09-09T09:09:09.09+09:09");
+ good("1997-09-09T09:09:09.09-09:09");
+ good("1997-09-09T09:09:09.09");
+ good("09:09:09.09");
+}
+
+#[test]
+fn bad_times() {
+ fn bad(s: &str) {
+ let to_parse = format!("foo = {}", s);
+ assert!(Value::from_str(&to_parse).is_err());
+ }
+
+ bad("199-09-09");
+ bad("199709-09");
+ bad("1997-9-09");
+ bad("1997-09-9");
+ bad("1997-09-0909:09:09");
+ bad("1997-09-09T09:09:09.");
+ bad("T");
+ bad("T.");
+ bad("TZ");
+ bad("1997-09-09T09:09:09.09+");
+ bad("1997-09-09T09:09:09.09+09");
+ bad("1997-09-09T09:09:09.09+09:9");
+ bad("1997-09-09T09:09:09.09+0909");
+ bad("1997-09-09T09:09:09.09-");
+ bad("1997-09-09T09:09:09.09-09");
+ bad("1997-09-09T09:09:09.09-09:9");
+ bad("1997-09-09T09:09:09.09-0909");
+
+ bad("1997-00-09T09:09:09.09Z");
+ bad("1997-09-00T09:09:09.09Z");
+ bad("1997-09-09T30:09:09.09Z");
+ bad("1997-09-09T12:69:09.09Z");
+ bad("1997-09-09T12:09:69.09Z");
+}
diff --git a/test-suite/tests/display-tricky.rs b/test-suite/tests/display-tricky.rs
new file mode 100644
index 0000000..069e0f9
--- /dev/null
+++ b/test-suite/tests/display-tricky.rs
@@ -0,0 +1,49 @@
+extern crate toml;
+#[macro_use] extern crate serde_derive;
+
+#[derive(Debug, Serialize, Deserialize)]
+pub struct Recipe {
+ pub name: String,
+ pub description: Option<String>,
+ #[serde(default)]
+ pub modules: Vec<Modules>,
+ #[serde(default)]
+ pub packages: Vec<Packages>
+}
+
+#[derive(Debug, Serialize, Deserialize)]
+pub struct Modules {
+ pub name: String,
+ pub version: Option<String>
+}
+
+#[derive(Debug, Serialize, Deserialize)]
+pub struct Packages {
+ pub name: String,
+ pub version: Option<String>
+}
+
+#[test]
+fn both_ends() {
+ let recipe_works = toml::from_str::<Recipe>(r#"
+ name = "testing"
+ description = "example"
+ modules = []
+
+ [[packages]]
+ name = "base"
+ "#).unwrap();
+ toml::to_string(&recipe_works).unwrap();
+
+ let recipe_fails = toml::from_str::<Recipe>(r#"
+ name = "testing"
+ description = "example"
+ packages = []
+
+ [[modules]]
+ name = "base"
+ "#).unwrap();
+
+ let recipe_toml = toml::Value::try_from(recipe_fails).unwrap();
+ recipe_toml.to_string();
+}
diff --git a/test-suite/tests/display.rs b/test-suite/tests/display.rs
new file mode 100644
index 0000000..ca4fdd8
--- /dev/null
+++ b/test-suite/tests/display.rs
@@ -0,0 +1,103 @@
+extern crate toml;
+
+use std::collections::BTreeMap;
+
+use toml::Value::{String, Integer, Float, Boolean, Array, Table};
+
+macro_rules! map( ($($k:expr => $v:expr),*) => ({
+ let mut _m = BTreeMap::new();
+ $(_m.insert($k.to_string(), $v);)*
+ _m
+}) );
+
+#[test]
+fn simple_show() {
+ assert_eq!(String("foo".to_string()).to_string(),
+ "\"foo\"");
+ assert_eq!(Integer(10).to_string(),
+ "10");
+ assert_eq!(Float(10.0).to_string(),
+ "10.0");
+ assert_eq!(Float(2.4).to_string(),
+ "2.4");
+ assert_eq!(Boolean(true).to_string(),
+ "true");
+ assert_eq!(Array(vec![]).to_string(),
+ "[]");
+ assert_eq!(Array(vec![Integer(1), Integer(2)]).to_string(),
+ "[1, 2]");
+}
+
+#[test]
+fn table() {
+ assert_eq!(Table(map! { }).to_string(),
+ "");
+ assert_eq!(Table(map! {
+ "test" => Integer(2),
+ "test2" => Integer(3) }).to_string(),
+ "test = 2\ntest2 = 3\n");
+ assert_eq!(Table(map! {
+ "test" => Integer(2),
+ "test2" => Table(map! {
+ "test" => String("wut".to_string())
+ })
+ }).to_string(),
+ "test = 2\n\
+ \n\
+ [test2]\n\
+ test = \"wut\"\n");
+ assert_eq!(Table(map! {
+ "test" => Integer(2),
+ "test2" => Table(map! {
+ "test" => String("wut".to_string())
+ })
+ }).to_string(),
+ "test = 2\n\
+ \n\
+ [test2]\n\
+ test = \"wut\"\n");
+ assert_eq!(Table(map! {
+ "test" => Integer(2),
+ "test2" => Array(vec![Table(map! {
+ "test" => String("wut".to_string())
+ })])
+ }).to_string(),
+ "test = 2\n\
+ \n\
+ [[test2]]\n\
+ test = \"wut\"\n");
+ assert_eq!(Table(map! {
+ "foo.bar" => Integer(2),
+ "foo\"bar" => Integer(2)
+ }).to_string(),
+ "\"foo\\\"bar\" = 2\n\
+ \"foo.bar\" = 2\n");
+ assert_eq!(Table(map! {
+ "test" => Integer(2),
+ "test2" => Array(vec![Table(map! {
+ "test" => Array(vec![Integer(2)])
+ })])
+ }).to_string(),
+ "test = 2\n\
+ \n\
+ [[test2]]\n\
+ test = [2]\n");
+ let table = Table(map! {
+ "test" => Integer(2),
+ "test2" => Array(vec![Table(map! {
+ "test" => Array(vec![Array(vec![Integer(2), Integer(3)]),
+ Array(vec![String("foo".to_string()), String("bar".to_string())])])
+ })])
+ });
+ assert_eq!(table.to_string(),
+ "test = 2\n\
+ \n\
+ [[test2]]\n\
+ test = [[2, 3], [\"foo\", \"bar\"]]\n");
+ assert_eq!(Table(map! {
+ "test" => Array(vec![Integer(2)]),
+ "test2" => Integer(2)
+ }).to_string(),
+ "test = [2]\n\
+ test2 = 2\n");
+}
diff --git a/test-suite/tests/formatting.rs b/test-suite/tests/formatting.rs
new file mode 100644
index 0000000..4ba1418
--- /dev/null
+++ b/test-suite/tests/formatting.rs
@@ -0,0 +1,54 @@
+#[macro_use]
+extern crate serde_derive;
+extern crate toml;
+
+use toml::to_string;
+
+#[derive(Debug, Clone, Hash, PartialEq, Eq, Serialize, Deserialize)]
+struct User {
+ pub name: String,
+ pub surname: String,
+}
+
+#[derive(Debug, Clone, Hash, PartialEq, Eq, Serialize, Deserialize)]
+struct Users {
+ pub user: Vec<User>,
+}
+
+#[derive(Debug, Clone, Hash, PartialEq, Eq, Serialize, Deserialize)]
+struct TwoUsers {
+ pub user0: User,
+ pub user1: User,
+}
+
+#[test]
+fn no_unnecessary_newlines_array() {
+ assert!(!to_string(&Users {
+ user: vec![
+ User {
+ name: "John".to_string(),
+ surname: "Doe".to_string(),
+ },
+ User {
+ name: "Jane".to_string(),
+ surname: "Dough".to_string(),
+ },
+ ],
+ }).unwrap()
+ .starts_with("\n"));
+}
+
+#[test]
+fn no_unnecessary_newlines_table() {
+ assert!(!to_string(&TwoUsers {
+ user0: User {
+ name: "John".to_string(),
+ surname: "Doe".to_string(),
+ },
+ user1: User {
+ name: "Jane".to_string(),
+ surname: "Dough".to_string(),
+ },
+ }).unwrap()
+ .starts_with("\n"));
+}
diff --git a/test-suite/tests/invalid-encoder-misc.rs b/test-suite/tests/invalid-encoder-misc.rs
new file mode 100644
index 0000000..272f58f
--- /dev/null
+++ b/test-suite/tests/invalid-encoder-misc.rs
@@ -0,0 +1,14 @@
+extern crate toml;
+
+use std::f64;
+
+#[test]
+fn test_invalid_float_encode() {
+ fn bad(value: toml::Value) {
+ assert!(toml::to_string(&value).is_err());
+ }
+
+ bad(toml::Value::Float(f64::INFINITY));
+ bad(toml::Value::Float(f64::NEG_INFINITY));
+ bad(toml::Value::Float(f64::NAN));
+}
diff --git a/test-suite/tests/invalid-encoder/array-mixed-types-ints-and-floats.json b/test-suite/tests/invalid-encoder/array-mixed-types-ints-and-floats.json
new file mode 100644
index 0000000..2d42ead
--- /dev/null
+++ b/test-suite/tests/invalid-encoder/array-mixed-types-ints-and-floats.json
@@ -0,0 +1,15 @@
+{
+ "ints-and-floats": {
+ "type": "array",
+ "value": [
+ {
+ "type": "integer",
+ "value": "1"
+ },
+ {
+ "type": "float",
+ "value": "1.1"
+ }
+ ]
+ }
+}
diff --git a/test-suite/tests/invalid-misc.rs b/test-suite/tests/invalid-misc.rs
new file mode 100644
index 0000000..bb70b97
--- /dev/null
+++ b/test-suite/tests/invalid-misc.rs
@@ -0,0 +1,17 @@
+extern crate toml;
+
+#[test]
+fn bad() {
+ fn bad(s: &str) {
+ assert!(s.parse::<toml::Value>().is_err());
+ }
+
+ bad("a = 01");
+ bad("a = 1__1");
+ bad("a = 1_");
+ bad("''");
+ bad("a = nan");
+ bad("a = -inf");
+ bad("a = inf");
+ bad("a = 9e99999");
+}
diff --git a/test-suite/tests/invalid.rs b/test-suite/tests/invalid.rs
new file mode 100644
index 0000000..4679684
--- /dev/null
+++ b/test-suite/tests/invalid.rs
@@ -0,0 +1,98 @@
+extern crate toml;
+
+fn run(toml: &str) {
+ println!("test if invalid:\n{}", toml);
+ if let Ok(e) = toml.parse::<toml::Value>() {
+ panic!("parsed to: {:#?}", e);
+ }
+}
+
+macro_rules! test( ($name:ident, $toml:expr) => (
+ #[test]
+ fn $name() { run($toml); }
+) );
+
+test!(array_mixed_types_arrays_and_ints,
+ include_str!("invalid/array-mixed-types-arrays-and-ints.toml"));
+test!(array_mixed_types_ints_and_floats,
+ include_str!("invalid/array-mixed-types-ints-and-floats.toml"));
+test!(array_mixed_types_strings_and_ints,
+ include_str!("invalid/array-mixed-types-strings-and-ints.toml"));
+test!(datetime_malformed_no_leads,
+ include_str!("invalid/datetime-malformed-no-leads.toml"));
+test!(datetime_malformed_no_secs,
+ include_str!("invalid/datetime-malformed-no-secs.toml"));
+test!(datetime_malformed_no_t,
+ include_str!("invalid/datetime-malformed-no-t.toml"));
+test!(datetime_malformed_with_milli,
+ include_str!("invalid/datetime-malformed-with-milli.toml"));
+test!(duplicate_keys,
+ include_str!("invalid/duplicate-keys.toml"));
+test!(duplicate_key_table,
+ include_str!("invalid/duplicate-key-table.toml"));
+test!(duplicate_tables,
+ include_str!("invalid/duplicate-tables.toml"));
+test!(empty_implicit_table,
+ include_str!("invalid/empty-implicit-table.toml"));
+test!(empty_table,
+ include_str!("invalid/empty-table.toml"));
+test!(float_no_leading_zero,
+ include_str!("invalid/float-no-leading-zero.toml"));
+test!(float_no_trailing_digits,
+ include_str!("invalid/float-no-trailing-digits.toml"));
+test!(key_after_array,
+ include_str!("invalid/key-after-array.toml"));
+test!(key_after_table,
+ include_str!("invalid/key-after-table.toml"));
+test!(key_empty,
+ include_str!("invalid/key-empty.toml"));
+test!(key_hash,
+ include_str!("invalid/key-hash.toml"));
+test!(key_newline,
+ include_str!("invalid/key-newline.toml"));
+test!(key_open_bracket,
+ include_str!("invalid/key-open-bracket.toml"));
+test!(key_single_open_bracket,
+ include_str!("invalid/key-single-open-bracket.toml"));
+test!(key_space,
+ include_str!("invalid/key-space.toml"));
+test!(key_start_bracket,
+ include_str!("invalid/key-start-bracket.toml"));
+test!(key_two_equals,
+ include_str!("invalid/key-two-equals.toml"));
+test!(string_bad_byte_escape,
+ include_str!("invalid/string-bad-byte-escape.toml"));
+test!(string_bad_escape,
+ include_str!("invalid/string-bad-escape.toml"));
+test!(string_byte_escapes,
+ include_str!("invalid/string-byte-escapes.toml"));
+test!(string_no_close,
+ include_str!("invalid/string-no-close.toml"));
+test!(table_array_implicit,
+ include_str!("invalid/table-array-implicit.toml"));
+test!(table_array_malformed_bracket,
+ include_str!("invalid/table-array-malformed-bracket.toml"));
+test!(table_array_malformed_empty,
+ include_str!("invalid/table-array-malformed-empty.toml"));
+test!(table_empty,
+ include_str!("invalid/table-empty.toml"));
+test!(table_nested_brackets_close,
+ include_str!("invalid/table-nested-brackets-close.toml"));
+test!(table_nested_brackets_open,
+ include_str!("invalid/table-nested-brackets-open.toml"));
+test!(table_whitespace,
+ include_str!("invalid/table-whitespace.toml"));
+test!(table_with_pound,
+ include_str!("invalid/table-with-pound.toml"));
+test!(text_after_array_entries,
+ include_str!("invalid/text-after-array-entries.toml"));
+test!(text_after_integer,
+ include_str!("invalid/text-after-integer.toml"));
+test!(text_after_string,
+ include_str!("invalid/text-after-string.toml"));
+test!(text_after_table,
+ include_str!("invalid/text-after-table.toml"));
+test!(text_before_array_separator,
+ include_str!("invalid/text-before-array-separator.toml"));
+test!(text_in_array,
+ include_str!("invalid/text-in-array.toml"));
diff --git a/test-suite/tests/invalid/array-mixed-types-arrays-and-ints.toml b/test-suite/tests/invalid/array-mixed-types-arrays-and-ints.toml
new file mode 100644
index 0000000..051ec73
--- /dev/null
+++ b/test-suite/tests/invalid/array-mixed-types-arrays-and-ints.toml
@@ -0,0 +1 @@
+arrays-and-ints = [1, ["Arrays are not integers."]]
diff --git a/test-suite/tests/invalid/array-mixed-types-ints-and-floats.toml b/test-suite/tests/invalid/array-mixed-types-ints-and-floats.toml
new file mode 100644
index 0000000..a5aa9b7
--- /dev/null
+++ b/test-suite/tests/invalid/array-mixed-types-ints-and-floats.toml
@@ -0,0 +1 @@
+ints-and-floats = [1, 1.1]
diff --git a/test-suite/tests/invalid/array-mixed-types-strings-and-ints.toml b/test-suite/tests/invalid/array-mixed-types-strings-and-ints.toml
new file mode 100644
index 0000000..f348308
--- /dev/null
+++ b/test-suite/tests/invalid/array-mixed-types-strings-and-ints.toml
@@ -0,0 +1 @@
+strings-and-ints = ["hi", 42]
diff --git a/test-suite/tests/invalid/datetime-malformed-no-leads.toml b/test-suite/tests/invalid/datetime-malformed-no-leads.toml
new file mode 100644
index 0000000..123f173
--- /dev/null
+++ b/test-suite/tests/invalid/datetime-malformed-no-leads.toml
@@ -0,0 +1 @@
+no-leads = 1987-7-05T17:45:00Z
diff --git a/test-suite/tests/invalid/datetime-malformed-no-secs.toml b/test-suite/tests/invalid/datetime-malformed-no-secs.toml
new file mode 100644
index 0000000..ba93900
--- /dev/null
+++ b/test-suite/tests/invalid/datetime-malformed-no-secs.toml
@@ -0,0 +1 @@
+no-secs = 1987-07-05T17:45Z
diff --git a/test-suite/tests/invalid/datetime-malformed-no-t.toml b/test-suite/tests/invalid/datetime-malformed-no-t.toml
new file mode 100644
index 0000000..617e3c5
--- /dev/null
+++ b/test-suite/tests/invalid/datetime-malformed-no-t.toml
@@ -0,0 +1 @@
+no-t = 1987-07-0517:45:00Z
diff --git a/test-suite/tests/invalid/datetime-malformed-with-milli.toml b/test-suite/tests/invalid/datetime-malformed-with-milli.toml
new file mode 100644
index 0000000..eef792f
--- /dev/null
+++ b/test-suite/tests/invalid/datetime-malformed-with-milli.toml
@@ -0,0 +1 @@
+with-milli = 1987-07-5T17:45:00.12Z
diff --git a/test-suite/tests/invalid/duplicate-key-table.toml b/test-suite/tests/invalid/duplicate-key-table.toml
new file mode 100644
index 0000000..cedf05f
--- /dev/null
+++ b/test-suite/tests/invalid/duplicate-key-table.toml
@@ -0,0 +1,5 @@
+[fruit]
+type = "apple"
+
+[fruit.type]
+apple = "yes"
diff --git a/test-suite/tests/invalid/duplicate-keys.toml b/test-suite/tests/invalid/duplicate-keys.toml
new file mode 100644
index 0000000..9b5aee0
--- /dev/null
+++ b/test-suite/tests/invalid/duplicate-keys.toml
@@ -0,0 +1,2 @@
+dupe = false
+dupe = true
diff --git a/test-suite/tests/invalid/duplicate-tables.toml b/test-suite/tests/invalid/duplicate-tables.toml
new file mode 100644
index 0000000..8ddf49b
--- /dev/null
+++ b/test-suite/tests/invalid/duplicate-tables.toml
@@ -0,0 +1,2 @@
+[a]
+[a]
diff --git a/test-suite/tests/invalid/empty-implicit-table.toml b/test-suite/tests/invalid/empty-implicit-table.toml
new file mode 100644
index 0000000..0cc36d0
--- /dev/null
+++ b/test-suite/tests/invalid/empty-implicit-table.toml
@@ -0,0 +1 @@
+[naughty..naughty]
diff --git a/test-suite/tests/invalid/empty-table.toml b/test-suite/tests/invalid/empty-table.toml
new file mode 100644
index 0000000..fe51488
--- /dev/null
+++ b/test-suite/tests/invalid/empty-table.toml
@@ -0,0 +1 @@
+[]
diff --git a/test-suite/tests/invalid/float-no-leading-zero.toml b/test-suite/tests/invalid/float-no-leading-zero.toml
new file mode 100644
index 0000000..cab76bf
--- /dev/null
+++ b/test-suite/tests/invalid/float-no-leading-zero.toml
@@ -0,0 +1,2 @@
+answer = .12345
+neganswer = -.12345
diff --git a/test-suite/tests/invalid/float-no-trailing-digits.toml b/test-suite/tests/invalid/float-no-trailing-digits.toml
new file mode 100644
index 0000000..cbff2d0
--- /dev/null
+++ b/test-suite/tests/invalid/float-no-trailing-digits.toml
@@ -0,0 +1,2 @@
+answer = 1.
+neganswer = -1.
diff --git a/test-suite/tests/invalid/key-after-array.toml b/test-suite/tests/invalid/key-after-array.toml
new file mode 100644
index 0000000..5c1a1b0
--- /dev/null
+++ b/test-suite/tests/invalid/key-after-array.toml
@@ -0,0 +1 @@
+[[agencies]] owner = "S Cjelli"
diff --git a/test-suite/tests/invalid/key-after-table.toml b/test-suite/tests/invalid/key-after-table.toml
new file mode 100644
index 0000000..4bc8213
--- /dev/null
+++ b/test-suite/tests/invalid/key-after-table.toml
@@ -0,0 +1 @@
+[history] guard = "sleeping"
diff --git a/test-suite/tests/invalid/key-empty.toml b/test-suite/tests/invalid/key-empty.toml
new file mode 100644
index 0000000..09f998f
--- /dev/null
+++ b/test-suite/tests/invalid/key-empty.toml
@@ -0,0 +1 @@
+ = 1
diff --git a/test-suite/tests/invalid/key-hash.toml b/test-suite/tests/invalid/key-hash.toml
new file mode 100644
index 0000000..e321b1f
--- /dev/null
+++ b/test-suite/tests/invalid/key-hash.toml
@@ -0,0 +1 @@
+a# = 1
diff --git a/test-suite/tests/invalid/key-newline.toml b/test-suite/tests/invalid/key-newline.toml
new file mode 100644
index 0000000..707aad5
--- /dev/null
+++ b/test-suite/tests/invalid/key-newline.toml
@@ -0,0 +1,2 @@
+a
+= 1
diff --git a/test-suite/tests/invalid/key-open-bracket.toml b/test-suite/tests/invalid/key-open-bracket.toml
new file mode 100644
index 0000000..f0aeb16
--- /dev/null
+++ b/test-suite/tests/invalid/key-open-bracket.toml
@@ -0,0 +1 @@
+[abc = 1
diff --git a/test-suite/tests/invalid/key-single-open-bracket.toml b/test-suite/tests/invalid/key-single-open-bracket.toml
new file mode 100644
index 0000000..8e2f0be
--- /dev/null
+++ b/test-suite/tests/invalid/key-single-open-bracket.toml
@@ -0,0 +1 @@
+[ \ No newline at end of file
diff --git a/test-suite/tests/invalid/key-space.toml b/test-suite/tests/invalid/key-space.toml
new file mode 100644
index 0000000..201806d
--- /dev/null
+++ b/test-suite/tests/invalid/key-space.toml
@@ -0,0 +1 @@
+a b = 1 \ No newline at end of file
diff --git a/test-suite/tests/invalid/key-start-bracket.toml b/test-suite/tests/invalid/key-start-bracket.toml
new file mode 100644
index 0000000..e0597ae
--- /dev/null
+++ b/test-suite/tests/invalid/key-start-bracket.toml
@@ -0,0 +1,3 @@
+[a]
+[xyz = 5
+[b]
diff --git a/test-suite/tests/invalid/key-two-equals.toml b/test-suite/tests/invalid/key-two-equals.toml
new file mode 100644
index 0000000..25a0378
--- /dev/null
+++ b/test-suite/tests/invalid/key-two-equals.toml
@@ -0,0 +1 @@
+key= = 1
diff --git a/test-suite/tests/invalid/string-bad-byte-escape.toml b/test-suite/tests/invalid/string-bad-byte-escape.toml
new file mode 100644
index 0000000..4c7be59
--- /dev/null
+++ b/test-suite/tests/invalid/string-bad-byte-escape.toml
@@ -0,0 +1 @@
+naughty = "\xAg"
diff --git a/test-suite/tests/invalid/string-bad-escape.toml b/test-suite/tests/invalid/string-bad-escape.toml
new file mode 100644
index 0000000..60acb0c
--- /dev/null
+++ b/test-suite/tests/invalid/string-bad-escape.toml
@@ -0,0 +1 @@
+invalid-escape = "This string has a bad \a escape character."
diff --git a/test-suite/tests/invalid/string-byte-escapes.toml b/test-suite/tests/invalid/string-byte-escapes.toml
new file mode 100644
index 0000000..e94452a
--- /dev/null
+++ b/test-suite/tests/invalid/string-byte-escapes.toml
@@ -0,0 +1 @@
+answer = "\x33"
diff --git a/test-suite/tests/invalid/string-no-close.toml b/test-suite/tests/invalid/string-no-close.toml
new file mode 100644
index 0000000..0c292fc
--- /dev/null
+++ b/test-suite/tests/invalid/string-no-close.toml
@@ -0,0 +1 @@
+no-ending-quote = "One time, at band camp
diff --git a/test-suite/tests/invalid/table-array-implicit.toml b/test-suite/tests/invalid/table-array-implicit.toml
new file mode 100644
index 0000000..05f2507
--- /dev/null
+++ b/test-suite/tests/invalid/table-array-implicit.toml
@@ -0,0 +1,14 @@
+# This test is a bit tricky. It should fail because the first use of
+# `[[albums.songs]]` without first declaring `albums` implies that `albums`
+# must be a table. The alternative would be quite weird. Namely, it wouldn't
+# comply with the TOML spec: "Each double-bracketed sub-table will belong to
+# the most *recently* defined table element *above* it."
+#
+# This is in contrast to the *valid* test, table-array-implicit where
+# `[[albums.songs]]` works by itself, so long as `[[albums]]` isn't declared
+# later. (Although, `[albums]` could be.)
+[[albums.songs]]
+name = "Glory Days"
+
+[[albums]]
+name = "Born in the USA"
diff --git a/test-suite/tests/invalid/table-array-malformed-bracket.toml b/test-suite/tests/invalid/table-array-malformed-bracket.toml
new file mode 100644
index 0000000..39c73b0
--- /dev/null
+++ b/test-suite/tests/invalid/table-array-malformed-bracket.toml
@@ -0,0 +1,2 @@
+[[albums]
+name = "Born to Run"
diff --git a/test-suite/tests/invalid/table-array-malformed-empty.toml b/test-suite/tests/invalid/table-array-malformed-empty.toml
new file mode 100644
index 0000000..a470ca3
--- /dev/null
+++ b/test-suite/tests/invalid/table-array-malformed-empty.toml
@@ -0,0 +1,2 @@
+[[]]
+name = "Born to Run"
diff --git a/test-suite/tests/invalid/table-empty.toml b/test-suite/tests/invalid/table-empty.toml
new file mode 100644
index 0000000..fe51488
--- /dev/null
+++ b/test-suite/tests/invalid/table-empty.toml
@@ -0,0 +1 @@
+[]
diff --git a/test-suite/tests/invalid/table-nested-brackets-close.toml b/test-suite/tests/invalid/table-nested-brackets-close.toml
new file mode 100644
index 0000000..c8b5a67
--- /dev/null
+++ b/test-suite/tests/invalid/table-nested-brackets-close.toml
@@ -0,0 +1,2 @@
+[a]b]
+zyx = 42
diff --git a/test-suite/tests/invalid/table-nested-brackets-open.toml b/test-suite/tests/invalid/table-nested-brackets-open.toml
new file mode 100644
index 0000000..246d7e9
--- /dev/null
+++ b/test-suite/tests/invalid/table-nested-brackets-open.toml
@@ -0,0 +1,2 @@
+[a[b]
+zyx = 42
diff --git a/test-suite/tests/invalid/table-whitespace.toml b/test-suite/tests/invalid/table-whitespace.toml
new file mode 100644
index 0000000..79bbcb1
--- /dev/null
+++ b/test-suite/tests/invalid/table-whitespace.toml
@@ -0,0 +1 @@
+[invalid key] \ No newline at end of file
diff --git a/test-suite/tests/invalid/table-with-pound.toml b/test-suite/tests/invalid/table-with-pound.toml
new file mode 100644
index 0000000..0d8edb5
--- /dev/null
+++ b/test-suite/tests/invalid/table-with-pound.toml
@@ -0,0 +1,2 @@
+[key#group]
+answer = 42 \ No newline at end of file
diff --git a/test-suite/tests/invalid/text-after-array-entries.toml b/test-suite/tests/invalid/text-after-array-entries.toml
new file mode 100644
index 0000000..1a72890
--- /dev/null
+++ b/test-suite/tests/invalid/text-after-array-entries.toml
@@ -0,0 +1,4 @@
+array = [
+ "Is there life after an array separator?", No
+ "Entry"
+]
diff --git a/test-suite/tests/invalid/text-after-integer.toml b/test-suite/tests/invalid/text-after-integer.toml
new file mode 100644
index 0000000..42de7af
--- /dev/null
+++ b/test-suite/tests/invalid/text-after-integer.toml
@@ -0,0 +1 @@
+answer = 42 the ultimate answer?
diff --git a/test-suite/tests/invalid/text-after-string.toml b/test-suite/tests/invalid/text-after-string.toml
new file mode 100644
index 0000000..c92a6f1
--- /dev/null
+++ b/test-suite/tests/invalid/text-after-string.toml
@@ -0,0 +1 @@
+string = "Is there life after strings?" No.
diff --git a/test-suite/tests/invalid/text-after-table.toml b/test-suite/tests/invalid/text-after-table.toml
new file mode 100644
index 0000000..87da9db
--- /dev/null
+++ b/test-suite/tests/invalid/text-after-table.toml
@@ -0,0 +1 @@
+[error] this shouldn't be here
diff --git a/test-suite/tests/invalid/text-before-array-separator.toml b/test-suite/tests/invalid/text-before-array-separator.toml
new file mode 100644
index 0000000..9b06a39
--- /dev/null
+++ b/test-suite/tests/invalid/text-before-array-separator.toml
@@ -0,0 +1,4 @@
+array = [
+ "Is there life before an array separator?" No,
+ "Entry"
+]
diff --git a/test-suite/tests/invalid/text-in-array.toml b/test-suite/tests/invalid/text-in-array.toml
new file mode 100644
index 0000000..a6a6c42
--- /dev/null
+++ b/test-suite/tests/invalid/text-in-array.toml
@@ -0,0 +1,5 @@
+array = [
+ "Entry 1",
+ I don't belong,
+ "Entry 2",
+]
diff --git a/test-suite/tests/parser.rs b/test-suite/tests/parser.rs
new file mode 100644
index 0000000..2282416
--- /dev/null
+++ b/test-suite/tests/parser.rs
@@ -0,0 +1,495 @@
+extern crate toml;
+
+use toml::Value;
+
+macro_rules! bad {
+ ($s:expr, $msg:expr) => ({
+ match $s.parse::<Value>() {
+ Ok(s) => panic!("successfully parsed as {}", s),
+ Err(e) => {
+ let e = e.to_string();
+ assert!(e.contains($msg), "error: {}", e);
+ }
+ }
+ })
+}
+
+#[test]
+fn crlf() {
+ "\
+[project]\r\n\
+\r\n\
+name = \"splay\"\r\n\
+version = \"0.1.0\"\r\n\
+authors = [\"alex@crichton.co\"]\r\n\
+\r\n\
+[[lib]]\r\n\
+\r\n\
+path = \"lib.rs\"\r\n\
+name = \"splay\"\r\n\
+description = \"\"\"\
+A Rust implementation of a TAR file reader and writer. This library does not\r\n\
+currently handle compression, but it is abstract over all I/O readers and\r\n\
+writers. Additionally, great lengths are taken to ensure that the entire\r\n\
+contents are never required to be entirely resident in memory all at once.\r\n\
+\"\"\"\
+".parse::<Value>().unwrap();
+}
+
+#[test]
+fn fun_with_strings() {
+ let table = r#"
+bar = "\U00000000"
+key1 = "One\nTwo"
+key2 = """One\nTwo"""
+key3 = """
+One
+Two"""
+
+key4 = "The quick brown fox jumps over the lazy dog."
+key5 = """
+The quick brown \
+
+
+fox jumps over \
+the lazy dog."""
+key6 = """\
+ The quick brown \
+ fox jumps over \
+ the lazy dog.\
+ """
+# What you see is what you get.
+winpath = 'C:\Users\nodejs\templates'
+winpath2 = '\\ServerX\admin$\system32\'
+quoted = 'Tom "Dubs" Preston-Werner'
+regex = '<\i\c*\s*>'
+
+regex2 = '''I [dw]on't need \d{2} apples'''
+lines = '''
+The first newline is
+trimmed in raw strings.
+All other whitespace
+is preserved.
+'''
+"#.parse::<Value>().unwrap();
+ assert_eq!(table["bar"].as_str(), Some("\0"));
+ assert_eq!(table["key1"].as_str(), Some("One\nTwo"));
+ assert_eq!(table["key2"].as_str(), Some("One\nTwo"));
+ assert_eq!(table["key3"].as_str(), Some("One\nTwo"));
+
+ let msg = "The quick brown fox jumps over the lazy dog.";
+ assert_eq!(table["key4"].as_str(), Some(msg));
+ assert_eq!(table["key5"].as_str(), Some(msg));
+ assert_eq!(table["key6"].as_str(), Some(msg));
+
+ assert_eq!(table["winpath"].as_str(), Some(r"C:\Users\nodejs\templates"));
+ assert_eq!(table["winpath2"].as_str(), Some(r"\\ServerX\admin$\system32\"));
+ assert_eq!(table["quoted"].as_str(), Some(r#"Tom "Dubs" Preston-Werner"#));
+ assert_eq!(table["regex"].as_str(), Some(r"<\i\c*\s*>"));
+ assert_eq!(table["regex2"].as_str(), Some(r"I [dw]on't need \d{2} apples"));
+ assert_eq!(table["lines"].as_str(),
+ Some("The first newline is\n\
+ trimmed in raw strings.\n\
+ All other whitespace\n\
+ is preserved.\n"));
+}
+
+#[test]
+fn tables_in_arrays() {
+ let table = r#"
+[[foo]]
+#…
+[foo.bar]
+#…
+
+[[foo]] # ...
+#…
+[foo.bar]
+#...
+"#.parse::<Value>().unwrap();
+ table["foo"][0]["bar"].as_table().unwrap();
+ table["foo"][1]["bar"].as_table().unwrap();
+}
+
+#[test]
+fn empty_table() {
+ let table = r#"
+[foo]"#.parse::<Value>().unwrap();
+ table["foo"].as_table().unwrap();
+}
+
+#[test]
+fn fruit() {
+ let table = r#"
+[[fruit]]
+name = "apple"
+
+[fruit.physical]
+color = "red"
+shape = "round"
+
+[[fruit.variety]]
+name = "red delicious"
+
+[[fruit.variety]]
+name = "granny smith"
+
+[[fruit]]
+name = "banana"
+
+[[fruit.variety]]
+name = "plantain"
+"#.parse::<Value>().unwrap();
+ assert_eq!(table["fruit"][0]["name"].as_str(), Some("apple"));
+ assert_eq!(table["fruit"][0]["physical"]["color"].as_str(), Some("red"));
+ assert_eq!(table["fruit"][0]["physical"]["shape"].as_str(), Some("round"));
+ assert_eq!(table["fruit"][0]["variety"][0]["name"].as_str(), Some("red delicious"));
+ assert_eq!(table["fruit"][0]["variety"][1]["name"].as_str(), Some("granny smith"));
+ assert_eq!(table["fruit"][1]["name"].as_str(), Some("banana"));
+ assert_eq!(table["fruit"][1]["variety"][0]["name"].as_str(), Some("plantain"));
+}
+
+#[test]
+fn stray_cr() {
+ "\r".parse::<Value>().unwrap_err();
+ "a = [ \r ]".parse::<Value>().unwrap_err();
+ "a = \"\"\"\r\"\"\"".parse::<Value>().unwrap_err();
+ "a = \"\"\"\\ \r \"\"\"".parse::<Value>().unwrap_err();
+ "a = '''\r'''".parse::<Value>().unwrap_err();
+ "a = '\r'".parse::<Value>().unwrap_err();
+ "a = \"\r\"".parse::<Value>().unwrap_err();
+}
+
+#[test]
+fn blank_literal_string() {
+ let table = "foo = ''".parse::<Value>().unwrap();
+ assert_eq!(table["foo"].as_str(), Some(""));
+}
+
+#[test]
+fn many_blank() {
+ let table = "foo = \"\"\"\n\n\n\"\"\"".parse::<Value>().unwrap();
+ assert_eq!(table["foo"].as_str(), Some("\n\n"));
+}
+
+#[test]
+fn literal_eats_crlf() {
+ let table = "
+ foo = \"\"\"\\\r\n\"\"\"
+ bar = \"\"\"\\\r\n \r\n \r\n a\"\"\"
+ ".parse::<Value>().unwrap();
+ assert_eq!(table["foo"].as_str(), Some(""));
+ assert_eq!(table["bar"].as_str(), Some("a"));
+}
+
+#[test]
+fn string_no_newline() {
+ "a = \"\n\"".parse::<Value>().unwrap_err();
+ "a = '\n'".parse::<Value>().unwrap_err();
+}
+
+#[test]
+fn bad_leading_zeros() {
+ "a = 00".parse::<Value>().unwrap_err();
+ "a = -00".parse::<Value>().unwrap_err();
+ "a = +00".parse::<Value>().unwrap_err();
+ "a = 00.0".parse::<Value>().unwrap_err();
+ "a = -00.0".parse::<Value>().unwrap_err();
+ "a = +00.0".parse::<Value>().unwrap_err();
+ "a = 9223372036854775808".parse::<Value>().unwrap_err();
+ "a = -9223372036854775809".parse::<Value>().unwrap_err();
+}
+
+#[test]
+fn bad_floats() {
+ "a = 0.".parse::<Value>().unwrap_err();
+ "a = 0.e".parse::<Value>().unwrap_err();
+ "a = 0.E".parse::<Value>().unwrap_err();
+ "a = 0.0E".parse::<Value>().unwrap_err();
+ "a = 0.0e".parse::<Value>().unwrap_err();
+ "a = 0.0e-".parse::<Value>().unwrap_err();
+ "a = 0.0e+".parse::<Value>().unwrap_err();
+ "a = 0.0e+00".parse::<Value>().unwrap_err();
+}
+
+#[test]
+fn floats() {
+ macro_rules! t {
+ ($actual:expr, $expected:expr) => ({
+ let f = format!("foo = {}", $actual);
+ println!("{}", f);
+ let a = f.parse::<Value>().unwrap();
+ assert_eq!(a["foo"].as_float().unwrap(), $expected);
+ })
+ }
+
+ t!("1.0", 1.0);
+ t!("1.0e0", 1.0);
+ t!("1.0e+0", 1.0);
+ t!("1.0e-0", 1.0);
+ t!("1.001e-0", 1.001);
+ t!("2e10", 2e10);
+ t!("2e+10", 2e10);
+ t!("2e-10", 2e-10);
+ t!("2_0.0", 20.0);
+ t!("2_0.0_0e1_0", 20.0e10);
+ t!("2_0.1_0e1_0", 20.1e10);
+}
+
+#[test]
+fn bare_key_names() {
+ let a = "
+ foo = 3
+ foo_3 = 3
+ foo_-2--3--r23f--4-f2-4 = 3
+ _ = 3
+ - = 3
+ 8 = 8
+ \"a\" = 3
+ \"!\" = 3
+ \"a^b\" = 3
+ \"\\\"\" = 3
+ \"character encoding\" = \"value\"
+ 'ʎǝʞ' = \"value\"
+ ".parse::<Value>().unwrap();
+ &a["foo"];
+ &a["-"];
+ &a["_"];
+ &a["8"];
+ &a["foo_3"];
+ &a["foo_-2--3--r23f--4-f2-4"];
+ &a["a"];
+ &a["!"];
+ &a["\""];
+ &a["character encoding"];
+ &a["ʎǝʞ"];
+}
+
+#[test]
+fn bad_keys() {
+ "key\n=3".parse::<Value>().unwrap_err();
+ "key=\n3".parse::<Value>().unwrap_err();
+ "key|=3".parse::<Value>().unwrap_err();
+ "\"\"=3".parse::<Value>().unwrap_err();
+ "=3".parse::<Value>().unwrap_err();
+ "\"\"|=3".parse::<Value>().unwrap_err();
+ "\"\n\"|=3".parse::<Value>().unwrap_err();
+ "\"\r\"|=3".parse::<Value>().unwrap_err();
+}
+
+#[test]
+fn bad_table_names() {
+ "[]".parse::<Value>().unwrap_err();
+ "[.]".parse::<Value>().unwrap_err();
+ "[\"\".\"\"]".parse::<Value>().unwrap_err();
+ "[a.]".parse::<Value>().unwrap_err();
+ "[\"\"]".parse::<Value>().unwrap_err();
+ "[!]".parse::<Value>().unwrap_err();
+ "[\"\n\"]".parse::<Value>().unwrap_err();
+ "[a.b]\n[a.\"b\"]".parse::<Value>().unwrap_err();
+ "[']".parse::<Value>().unwrap_err();
+ "[''']".parse::<Value>().unwrap_err();
+ "['''''']".parse::<Value>().unwrap_err();
+ "['\n']".parse::<Value>().unwrap_err();
+ "['\r\n']".parse::<Value>().unwrap_err();
+}
+
+#[test]
+fn table_names() {
+ let a = "
+ [a.\"b\"]
+ [\"f f\"]
+ [\"f.f\"]
+ [\"\\\"\"]
+ ['a.a']
+ ['\"\"']
+ ".parse::<Value>().unwrap();
+ println!("{:?}", a);
+ &a["a"]["b"];
+ &a["f f"];
+ &a["f.f"];
+ &a["\""];
+ &a["\"\""];
+}
+
+#[test]
+fn invalid_bare_numeral() {
+ "4".parse::<Value>().unwrap_err();
+}
+
+#[test]
+fn inline_tables() {
+ "a = {}".parse::<Value>().unwrap();
+ "a = {b=1}".parse::<Value>().unwrap();
+ "a = { b = 1 }".parse::<Value>().unwrap();
+ "a = {a=1,b=2}".parse::<Value>().unwrap();
+ "a = {a=1,b=2,c={}}".parse::<Value>().unwrap();
+ "a = {a=1,}".parse::<Value>().unwrap_err();
+ "a = {,}".parse::<Value>().unwrap_err();
+ "a = {a=1,a=1}".parse::<Value>().unwrap_err();
+ "a = {\n}".parse::<Value>().unwrap_err();
+ "a = {".parse::<Value>().unwrap_err();
+ "a = {a=[\n]}".parse::<Value>().unwrap();
+ "a = {\"a\"=[\n]}".parse::<Value>().unwrap();
+ "a = [\n{},\n{},\n]".parse::<Value>().unwrap();
+}
+
+#[test]
+fn number_underscores() {
+ macro_rules! t {
+ ($actual:expr, $expected:expr) => ({
+ let f = format!("foo = {}", $actual);
+ let table = f.parse::<Value>().unwrap();
+ assert_eq!(table["foo"].as_integer().unwrap(), $expected);
+ })
+ }
+
+ t!("1_0", 10);
+ t!("1_0_0", 100);
+ t!("1_000", 1000);
+ t!("+1_000", 1000);
+ t!("-1_000", -1000);
+}
+
+#[test]
+fn bad_underscores() {
+ bad!("foo = 0_", "invalid number");
+ bad!("foo = 0__0", "invalid number");
+ bad!("foo = __0", "invalid number");
+ bad!("foo = 1_0_", "invalid number");
+}
+
+#[test]
+fn bad_unicode_codepoint() {
+ bad!("foo = \"\\uD800\"", "invalid escape value");
+}
+
+#[test]
+fn bad_strings() {
+ bad!("foo = \"\\uxx\"", "invalid hex escape");
+ bad!("foo = \"\\u\"", "invalid hex escape");
+ bad!("foo = \"\\", "unterminated");
+ bad!("foo = '", "unterminated");
+}
+
+#[test]
+fn empty_string() {
+ assert_eq!("foo = \"\"".parse::<Value>()
+ .unwrap()["foo"]
+ .as_str()
+ .unwrap(),
+ "");
+}
+
+#[test]
+fn booleans() {
+ let table = "foo = true".parse::<Value>().unwrap();
+ assert_eq!(table["foo"].as_bool(), Some(true));
+
+ let table = "foo = false".parse::<Value>().unwrap();
+ assert_eq!(table["foo"].as_bool(), Some(false));
+
+ assert!("foo = true2".parse::<Value>().is_err());
+ assert!("foo = false2".parse::<Value>().is_err());
+ assert!("foo = t1".parse::<Value>().is_err());
+ assert!("foo = f2".parse::<Value>().is_err());
+}
+
+#[test]
+fn bad_nesting() {
+ bad!("
+ a = [2]
+ [[a]]
+ b = 5
+ ", "duplicate key: `a`");
+ bad!("
+ a = 1
+ [a.b]
+ ", "duplicate key: `a`");
+ bad!("
+ a = []
+ [a.b]
+ ", "duplicate key: `a`");
+ bad!("
+ a = []
+ [[a.b]]
+ ", "duplicate key: `a`");
+ bad!("
+ [a]
+ b = { c = 2, d = {} }
+ [a.b]
+ c = 2
+ ", "duplicate key: `b`");
+}
+
+#[test]
+fn bad_table_redefine() {
+ bad!("
+ [a]
+ foo=\"bar\"
+ [a.b]
+ foo=\"bar\"
+ [a]
+ ", "redefinition of table `a`");
+ bad!("
+ [a]
+ foo=\"bar\"
+ b = { foo = \"bar\" }
+ [a]
+ ", "redefinition of table `a`");
+ bad!("
+ [a]
+ b = {}
+ [a.b]
+ ", "duplicate key: `b`");
+
+ bad!("
+ [a]
+ b = {}
+ [a]
+ ", "redefinition of table `a`");
+}
+
+#[test]
+fn datetimes() {
+ macro_rules! t {
+ ($actual:expr) => ({
+ let f = format!("foo = {}", $actual);
+ let toml = f.parse::<Value>().expect(&format!("failed: {}", f));
+ assert_eq!(toml["foo"].as_datetime().unwrap().to_string(), $actual);
+ })
+ }
+
+ t!("2016-09-09T09:09:09Z");
+ t!("2016-09-09T09:09:09.1Z");
+ t!("2016-09-09T09:09:09.2+10:00");
+ t!("2016-09-09T09:09:09.123456789-02:00");
+ bad!("foo = 2016-09-09T09:09:09.Z", "failed to parse date");
+ bad!("foo = 2016-9-09T09:09:09Z", "failed to parse date");
+ bad!("foo = 2016-09-09T09:09:09+2:00", "failed to parse date");
+ bad!("foo = 2016-09-09T09:09:09-2:00", "failed to parse date");
+ bad!("foo = 2016-09-09T09:09:09Z-2:00", "failed to parse date");
+}
+
+#[test]
+fn require_newline_after_value() {
+ bad!("0=0r=false", "invalid number at line 1");
+ bad!(r#"
+0=""o=""m=""r=""00="0"q="""0"""e="""0"""
+"#, "expected newline");
+ bad!(r#"
+[[0000l0]]
+0="0"[[0000l0]]
+0="0"[[0000l0]]
+0="0"l="0"
+"#, "expected newline");
+ bad!(r#"
+0=[0]00=[0,0,0]t=["0","0","0"]s=[1000-00-00T00:00:00Z,2000-00-00T00:00:00Z]
+"#, "expected newline");
+ bad!(r#"
+0=0r0=0r=false
+"#, "invalid number at line 2");
+ bad!(r#"
+0=0r0=0r=falsefal=false
+"#, "invalid number at line 2");
+}
diff --git a/test-suite/tests/pretty.rs b/test-suite/tests/pretty.rs
new file mode 100644
index 0000000..19ed22d
--- /dev/null
+++ b/test-suite/tests/pretty.rs
@@ -0,0 +1,308 @@
+extern crate toml;
+extern crate serde;
+
+use serde::ser::Serialize;
+
+const NO_PRETTY: &'static str = "\
+[example]
+array = [\"item 1\", \"item 2\"]
+empty = []
+oneline = \"this has no newlines.\"
+text = \"\\nthis is the first line\\nthis is the second line\\n\"
+";
+
+#[test]
+fn no_pretty() {
+ let toml = NO_PRETTY;
+ let value: toml::Value = toml::from_str(toml).unwrap();
+ let mut result = String::with_capacity(128);
+ value.serialize(&mut toml::Serializer::new(&mut result)).unwrap();
+ println!("EXPECTED:\n{}", toml);
+ println!("\nRESULT:\n{}", result);
+ assert_eq!(toml, &result);
+}
+
+#[test]
+fn disable_pretty() {
+ let toml = NO_PRETTY;
+ let value: toml::Value = toml::from_str(toml).unwrap();
+ let mut result = String::with_capacity(128);
+ {
+ let mut serializer = toml::Serializer::pretty(&mut result);
+ serializer.pretty_string(false);
+ serializer.pretty_array(false);
+ value.serialize(&mut serializer).unwrap();
+ }
+ println!("EXPECTED:\n{}", toml);
+ println!("\nRESULT:\n{}", result);
+ assert_eq!(toml, &result);
+}
+
+const PRETTY_STD: &'static str = "\
+[example]
+array = [
+ 'item 1',
+ 'item 2',
+]
+empty = []
+one = ['one']
+oneline = 'this has no newlines.'
+text = '''
+this is the first line
+this is the second line
+'''
+";
+
+#[test]
+fn pretty_std() {
+ let toml = PRETTY_STD;
+ let value: toml::Value = toml::from_str(toml).unwrap();
+ let mut result = String::with_capacity(128);
+ value.serialize(&mut toml::Serializer::pretty(&mut result)).unwrap();
+ println!("EXPECTED:\n{}", toml);
+ println!("\nRESULT:\n{}", result);
+ assert_eq!(toml, &result);
+}
+
+
+const PRETTY_INDENT_2: &'static str = "\
+[example]
+array = [
+ 'item 1',
+ 'item 2',
+]
+empty = []
+one = ['one']
+oneline = 'this has no newlines.'
+text = '''
+this is the first line
+this is the second line
+'''
+three = [
+ 'one',
+ 'two',
+ 'three',
+]
+";
+
+#[test]
+fn pretty_indent_2() {
+ let toml = PRETTY_INDENT_2;
+ let value: toml::Value = toml::from_str(toml).unwrap();
+ let mut result = String::with_capacity(128);
+ {
+ let mut serializer = toml::Serializer::pretty(&mut result);
+ serializer.pretty_array_indent(2);
+ value.serialize(&mut serializer).unwrap();
+ }
+ println!(">> Result:\n{}", result);
+ assert_eq!(toml, &result);
+}
+
+const PRETTY_INDENT_2_OTHER: &'static str = "\
+[example]
+array = [
+ \"item 1\",
+ \"item 2\",
+]
+empty = []
+oneline = \"this has no newlines.\"
+text = \"\\nthis is the first line\\nthis is the second line\\n\"
+";
+
+
+#[test]
+/// Test pretty indent when gotten the other way
+fn pretty_indent_2_other() {
+ let toml = PRETTY_INDENT_2_OTHER;
+ let value: toml::Value = toml::from_str(toml).unwrap();
+ let mut result = String::with_capacity(128);
+ {
+ let mut serializer = toml::Serializer::new(&mut result);
+ serializer.pretty_array_indent(2);
+ value.serialize(&mut serializer).unwrap();
+ }
+ assert_eq!(toml, &result);
+}
+
+
+const PRETTY_ARRAY_NO_COMMA: &'static str = "\
+[example]
+array = [
+ \"item 1\",
+ \"item 2\"
+]
+empty = []
+oneline = \"this has no newlines.\"
+text = \"\\nthis is the first line\\nthis is the second line\\n\"
+";
+#[test]
+/// Test pretty indent when gotten the other way
+fn pretty_indent_array_no_comma() {
+ let toml = PRETTY_ARRAY_NO_COMMA;
+ let value: toml::Value = toml::from_str(toml).unwrap();
+ let mut result = String::with_capacity(128);
+ {
+ let mut serializer = toml::Serializer::new(&mut result);
+ serializer.pretty_array_trailing_comma(false);
+ value.serialize(&mut serializer).unwrap();
+ }
+ assert_eq!(toml, &result);
+}
+
+
+const PRETTY_NO_STRING: &'static str = "\
+[example]
+array = [
+ \"item 1\",
+ \"item 2\",
+]
+empty = []
+oneline = \"this has no newlines.\"
+text = \"\\nthis is the first line\\nthis is the second line\\n\"
+";
+#[test]
+/// Test pretty indent when gotten the other way
+fn pretty_no_string() {
+ let toml = PRETTY_NO_STRING;
+ let value: toml::Value = toml::from_str(toml).unwrap();
+ let mut result = String::with_capacity(128);
+ {
+ let mut serializer = toml::Serializer::pretty(&mut result);
+ serializer.pretty_string(false);
+ value.serialize(&mut serializer).unwrap();
+ }
+ assert_eq!(toml, &result);
+}
+
+const PRETTY_TRICKY: &'static str = r##"[example]
+f = "\f"
+glass = '''
+Nothing too unusual, except that I can eat glass in:
+- Greek: Μπορώ να φάω σπασμένα γυαλιά χωρίς να πάθω τίποτα.
+- Polish: Mogę jeść szkło, i mi nie szkodzi.
+- Hindi: मैं काँच खा सकता हूँ, मुझे उस से कोई पीडा नहीं होती.
+- Japanese: 私はガラスを食べられます。それは私を傷つけません。
+'''
+r = "\r"
+r_newline = """
+\r
+"""
+single = '''this is a single line but has '' cuz it's tricky'''
+single_tricky = "single line with ''' in it"
+tabs = '''
+this is pretty standard
+ except for some tabs right here
+'''
+text = """
+this is the first line.
+This has a ''' in it and \"\"\" cuz it's tricky yo
+Also ' and \" because why not
+this is the fourth line
+"""
+"##;
+
+#[test]
+fn pretty_tricky() {
+ let toml = PRETTY_TRICKY;
+ let value: toml::Value = toml::from_str(toml).unwrap();
+ let mut result = String::with_capacity(128);
+ value.serialize(&mut toml::Serializer::pretty(&mut result)).unwrap();
+ println!("EXPECTED:\n{}", toml);
+ println!("\nRESULT:\n{}", result);
+ assert_eq!(toml, &result);
+}
+
+const PRETTY_TABLE_ARRAY: &'static str = r##"[[array]]
+key = 'foo'
+
+[[array]]
+key = 'bar'
+
+[abc]
+doc = 'this is a table'
+
+[example]
+single = 'this is a single line string'
+"##;
+
+#[test]
+fn pretty_table_array() {
+ let toml = PRETTY_TABLE_ARRAY;
+ let value: toml::Value = toml::from_str(toml).unwrap();
+ let mut result = String::with_capacity(128);
+ value.serialize(&mut toml::Serializer::pretty(&mut result)).unwrap();
+ println!("EXPECTED:\n{}", toml);
+ println!("\nRESULT:\n{}", result);
+ assert_eq!(toml, &result);
+}
+
+const TABLE_ARRAY: &'static str = r##"[[array]]
+key = "foo"
+
+[[array]]
+key = "bar"
+
+[abc]
+doc = "this is a table"
+
+[example]
+single = "this is a single line string"
+"##;
+
+#[test]
+fn table_array() {
+ let toml = TABLE_ARRAY;
+ let value: toml::Value = toml::from_str(toml).unwrap();
+ let mut result = String::with_capacity(128);
+ value.serialize(&mut toml::Serializer::new(&mut result)).unwrap();
+ println!("EXPECTED:\n{}", toml);
+ println!("\nRESULT:\n{}", result);
+ assert_eq!(toml, &result);
+}
+
+const PRETTY_TRICKY_NON_LITERAL: &'static str = r##"[example]
+f = "\f"
+glass = """
+Nothing too unusual, except that I can eat glass in:
+- Greek: Μπορώ να φάω σπασμένα γυαλιά χωρίς να πάθω τίποτα.
+- Polish: Mogę jeść szkło, i mi nie szkodzi.
+- Hindi: मैं काँच खा सकता हूँ, मुझे उस से कोई पीडा नहीं होती.
+- Japanese: 私はガラスを食べられます。それは私を傷つけません。
+"""
+plain = """
+This has a couple of lines
+Because it likes to.
+"""
+r = "\r"
+r_newline = """
+\r
+"""
+single = "this is a single line but has '' cuz it's tricky"
+single_tricky = "single line with ''' in it"
+tabs = """
+this is pretty standard
+\texcept for some \ttabs right here
+"""
+text = """
+this is the first line.
+This has a ''' in it and \"\"\" cuz it's tricky yo
+Also ' and \" because why not
+this is the fourth line
+"""
+"##;
+
+#[test]
+fn pretty_tricky_non_literal() {
+ let toml = PRETTY_TRICKY_NON_LITERAL;
+ let value: toml::Value = toml::from_str(toml).unwrap();
+ let mut result = String::with_capacity(128);
+ {
+ let mut serializer = toml::Serializer::pretty(&mut result);
+ serializer.pretty_string_literal(false);
+ value.serialize(&mut serializer).unwrap();
+ }
+ println!("EXPECTED:\n{}", toml);
+ println!("\nRESULT:\n{}", result);
+ assert_eq!(toml, &result);
+}
diff --git a/test-suite/tests/serde.rs b/test-suite/tests/serde.rs
new file mode 100644
index 0000000..57fa5db
--- /dev/null
+++ b/test-suite/tests/serde.rs
@@ -0,0 +1,578 @@
+extern crate serde;
+extern crate toml;
+#[macro_use]
+extern crate serde_derive;
+
+use std::collections::{BTreeMap, HashSet};
+use serde::{Deserialize, Deserializer};
+
+use toml::Value;
+use toml::Value::{Table, Integer, Array, Float};
+
+macro_rules! t {
+ ($e:expr) => (match $e {
+ Ok(t) => t,
+ Err(e) => panic!("{} failed with {}", stringify!($e), e),
+ })
+}
+
+macro_rules! equivalent {
+ ($literal:expr, $toml:expr,) => ({
+ let toml = $toml;
+ let literal = $literal;
+
+ // In/out of Value is equivalent
+ println!("try_from");
+ assert_eq!(t!(Value::try_from(literal.clone())), toml);
+ println!("try_into");
+ assert_eq!(literal, t!(toml.clone().try_into()));
+
+ // Through a string equivalent
+ println!("to_string(literal)");
+ assert_eq!(t!(toml::to_string(&literal)), toml.to_string());
+ println!("to_string(toml)");
+ assert_eq!(t!(toml::to_string(&toml)), toml.to_string());
+ println!("literal, from_str(toml)");
+ assert_eq!(literal, t!(toml::from_str(&toml.to_string())));
+ println!("toml, from_str(toml)");
+ assert_eq!(toml, t!(toml::from_str(&toml.to_string())));
+ })
+}
+
+macro_rules! error {
+ ($ty:ty, $toml:expr, $error:expr) => ({
+ println!("attempting parsing");
+ match toml::from_str::<$ty>(&$toml.to_string()) {
+ Ok(_) => panic!("successful"),
+ Err(e) => {
+ assert!(e.to_string().contains($error),
+ "bad error: {}", e);
+ }
+ }
+
+ println!("attempting toml decoding");
+ match $toml.try_into::<$ty>() {
+ Ok(_) => panic!("successful"),
+ Err(e) => {
+ assert!(e.to_string().contains($error),
+ "bad error: {}", e);
+ }
+ }
+ })
+}
+
+macro_rules! map( ($($k:ident: $v:expr),*) => ({
+ let mut _m = BTreeMap::new();
+ $(_m.insert(stringify!($k).to_string(), $v);)*
+ _m
+}) );
+
+#[test]
+fn smoke() {
+ #[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]
+ struct Foo { a: isize }
+
+ equivalent!(
+ Foo { a: 2 },
+ Table(map! { a: Integer(2) }),
+ );
+}
+
+#[test]
+fn smoke_hyphen() {
+ #[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]
+ struct Foo {
+ a_b: isize,
+ }
+
+ equivalent! {
+ Foo { a_b: 2 },
+ Table(map! { a_b: Integer(2) }),
+ }
+
+ #[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]
+ struct Foo2 {
+ #[serde(rename = "a-b")]
+ a_b: isize,
+ }
+
+ let mut m = BTreeMap::new();
+ m.insert("a-b".to_string(), Integer(2));
+ equivalent! {
+ Foo2 { a_b: 2 },
+ Table(m),
+ }
+}
+
+#[test]
+fn nested() {
+ #[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]
+ struct Foo { a: isize, b: Bar }
+ #[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]
+ struct Bar { a: String }
+
+ equivalent! {
+ Foo { a: 2, b: Bar { a: "test".to_string() } },
+ Table(map! {
+ a: Integer(2),
+ b: Table(map! {
+ a: Value::String("test".to_string())
+ })
+ }),
+ }
+}
+
+#[test]
+fn application_decode_error() {
+ #[derive(PartialEq, Debug)]
+ struct Range10(usize);
+ impl<'de> Deserialize<'de> for Range10 {
+ fn deserialize<D: Deserializer<'de>>(d: D) -> Result<Range10, D::Error> {
+ let x: usize = try!(Deserialize::deserialize(d));
+ if x > 10 {
+ Err(serde::de::Error::custom("more than 10"))
+ } else {
+ Ok(Range10(x))
+ }
+ }
+ }
+ let d_good = Integer(5);
+ let d_bad1 = Value::String("not an isize".to_string());
+ let d_bad2 = Integer(11);
+
+ assert_eq!(Range10(5), d_good.try_into().unwrap());
+
+ let err1: Result<Range10, _> = d_bad1.try_into();
+ assert!(err1.is_err());
+ let err2: Result<Range10, _> = d_bad2.try_into();
+ assert!(err2.is_err());
+}
+
+#[test]
+fn array() {
+ #[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]
+ struct Foo { a: Vec<isize> }
+
+ equivalent! {
+ Foo { a: vec![1, 2, 3, 4] },
+ Table(map! {
+ a: Array(vec![
+ Integer(1),
+ Integer(2),
+ Integer(3),
+ Integer(4)
+ ])
+ }),
+ };
+}
+
+#[test]
+fn inner_structs_with_options() {
+ #[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]
+ struct Foo {
+ a: Option<Box<Foo>>,
+ b: Bar,
+ }
+ #[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]
+ struct Bar {
+ a: String,
+ b: f64,
+ }
+
+ equivalent! {
+ Foo {
+ a: Some(Box::new(Foo {
+ a: None,
+ b: Bar { a: "foo".to_string(), b: 4.5 },
+ })),
+ b: Bar { a: "bar".to_string(), b: 1.0 },
+ },
+ Table(map! {
+ a: Table(map! {
+ b: Table(map! {
+ a: Value::String("foo".to_string()),
+ b: Float(4.5)
+ })
+ }),
+ b: Table(map! {
+ a: Value::String("bar".to_string()),
+ b: Float(1.0)
+ })
+ }),
+ }
+}
+
+#[test]
+fn hashmap() {
+ #[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]
+ struct Foo {
+ set: HashSet<char>,
+ map: BTreeMap<String, isize>,
+ }
+
+ equivalent! {
+ Foo {
+ map: {
+ let mut m = BTreeMap::new();
+ m.insert("foo".to_string(), 10);
+ m.insert("bar".to_string(), 4);
+ m
+ },
+ set: {
+ let mut s = HashSet::new();
+ s.insert('a');
+ s
+ },
+ },
+ Table(map! {
+ map: Table(map! {
+ foo: Integer(10),
+ bar: Integer(4)
+ }),
+ set: Array(vec![Value::String("a".to_string())])
+ }),
+ }
+}
+
+#[test]
+fn table_array() {
+ #[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]
+ struct Foo { a: Vec<Bar>, }
+ #[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]
+ struct Bar { a: isize }
+
+ equivalent! {
+ Foo { a: vec![Bar { a: 1 }, Bar { a: 2 }] },
+ Table(map! {
+ a: Array(vec![
+ Table(map!{ a: Integer(1) }),
+ Table(map!{ a: Integer(2) }),
+ ])
+ }),
+ }
+}
+
+#[test]
+fn type_errors() {
+ #[derive(Deserialize)]
+ #[allow(dead_code)]
+ struct Foo { bar: isize }
+
+ error! {
+ Foo,
+ Table(map! {
+ bar: Value::String("a".to_string())
+ }),
+ "invalid type: string \"a\", expected isize for key `bar`"
+ }
+
+ #[derive(Deserialize)]
+ #[allow(dead_code)]
+ struct Bar { foo: Foo }
+
+ error! {
+ Bar,
+ Table(map! {
+ foo: Table(map! {
+ bar: Value::String("a".to_string())
+ })
+ }),
+ "invalid type: string \"a\", expected isize for key `foo.bar`"
+ }
+}
+
+#[test]
+fn missing_errors() {
+ #[derive(Serialize, Deserialize, PartialEq, Debug)]
+ struct Foo { bar: isize }
+
+ error! {
+ Foo,
+ Table(map! { }),
+ "missing field `bar`"
+ }
+}
+
+#[test]
+fn parse_enum() {
+ #[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]
+ struct Foo { a: E }
+ #[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]
+ #[serde(untagged)]
+ enum E {
+ Bar(isize),
+ Baz(String),
+ Last(Foo2),
+ }
+ #[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]
+ struct Foo2 {
+ test: String,
+ }
+
+ equivalent! {
+ Foo { a: E::Bar(10) },
+ Table(map! { a: Integer(10) }),
+ }
+
+ equivalent! {
+ Foo { a: E::Baz("foo".to_string()) },
+ Table(map! { a: Value::String("foo".to_string()) }),
+ }
+
+ equivalent! {
+ Foo { a: E::Last(Foo2 { test: "test".to_string() }) },
+ Table(map! { a: Table(map! { test: Value::String("test".to_string()) }) }),
+ }
+}
+
+#[test]
+fn parse_enum_string() {
+ #[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]
+ struct Foo { a: Sort }
+
+ #[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]
+ #[serde(rename_all = "lowercase")]
+ enum Sort {
+ Asc,
+ Desc,
+ }
+
+ equivalent! {
+ Foo { a: Sort::Desc },
+ Table(map! { a: Value::String("desc".to_string()) }),
+ }
+
+}
+
+// #[test]
+// fn unused_fields() {
+// #[derive(Serialize, Deserialize, PartialEq, Debug)]
+// struct Foo { a: isize }
+//
+// let v = Foo { a: 2 };
+// let mut d = Decoder::new(Table(map! {
+// a, Integer(2),
+// b, Integer(5)
+// }));
+// assert_eq!(v, t!(Deserialize::deserialize(&mut d)));
+//
+// assert_eq!(d.toml, Some(Table(map! {
+// b, Integer(5)
+// })));
+// }
+//
+// #[test]
+// fn unused_fields2() {
+// #[derive(Serialize, Deserialize, PartialEq, Debug)]
+// struct Foo { a: Bar }
+// #[derive(Serialize, Deserialize, PartialEq, Debug)]
+// struct Bar { a: isize }
+//
+// let v = Foo { a: Bar { a: 2 } };
+// let mut d = Decoder::new(Table(map! {
+// a, Table(map! {
+// a, Integer(2),
+// b, Integer(5)
+// })
+// }));
+// assert_eq!(v, t!(Deserialize::deserialize(&mut d)));
+//
+// assert_eq!(d.toml, Some(Table(map! {
+// a, Table(map! {
+// b, Integer(5)
+// })
+// })));
+// }
+//
+// #[test]
+// fn unused_fields3() {
+// #[derive(Serialize, Deserialize, PartialEq, Debug)]
+// struct Foo { a: Bar }
+// #[derive(Serialize, Deserialize, PartialEq, Debug)]
+// struct Bar { a: isize }
+//
+// let v = Foo { a: Bar { a: 2 } };
+// let mut d = Decoder::new(Table(map! {
+// a, Table(map! {
+// a, Integer(2)
+// })
+// }));
+// assert_eq!(v, t!(Deserialize::deserialize(&mut d)));
+//
+// assert_eq!(d.toml, None);
+// }
+//
+// #[test]
+// fn unused_fields4() {
+// #[derive(Serialize, Deserialize, PartialEq, Debug)]
+// struct Foo { a: BTreeMap<String, String> }
+//
+// let v = Foo { a: map! { a, "foo".to_string() } };
+// let mut d = Decoder::new(Table(map! {
+// a, Table(map! {
+// a, Value::String("foo".to_string())
+// })
+// }));
+// assert_eq!(v, t!(Deserialize::deserialize(&mut d)));
+//
+// assert_eq!(d.toml, None);
+// }
+//
+// #[test]
+// fn unused_fields5() {
+// #[derive(Serialize, Deserialize, PartialEq, Debug)]
+// struct Foo { a: Vec<String> }
+//
+// let v = Foo { a: vec!["a".to_string()] };
+// let mut d = Decoder::new(Table(map! {
+// a, Array(vec![Value::String("a".to_string())])
+// }));
+// assert_eq!(v, t!(Deserialize::deserialize(&mut d)));
+//
+// assert_eq!(d.toml, None);
+// }
+//
+// #[test]
+// fn unused_fields6() {
+// #[derive(Serialize, Deserialize, PartialEq, Debug)]
+// struct Foo { a: Option<Vec<String>> }
+//
+// let v = Foo { a: Some(vec![]) };
+// let mut d = Decoder::new(Table(map! {
+// a, Array(vec![])
+// }));
+// assert_eq!(v, t!(Deserialize::deserialize(&mut d)));
+//
+// assert_eq!(d.toml, None);
+// }
+//
+// #[test]
+// fn unused_fields7() {
+// #[derive(Serialize, Deserialize, PartialEq, Debug)]
+// struct Foo { a: Vec<Bar> }
+// #[derive(Serialize, Deserialize, PartialEq, Debug)]
+// struct Bar { a: isize }
+//
+// let v = Foo { a: vec![Bar { a: 1 }] };
+// let mut d = Decoder::new(Table(map! {
+// a, Array(vec![Table(map! {
+// a, Integer(1),
+// b, Integer(2)
+// })])
+// }));
+// assert_eq!(v, t!(Deserialize::deserialize(&mut d)));
+//
+// assert_eq!(d.toml, Some(Table(map! {
+// a, Array(vec![Table(map! {
+// b, Integer(2)
+// })])
+// })));
+// }
+
+#[test]
+fn empty_arrays() {
+ #[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]
+ struct Foo { a: Vec<Bar> }
+ #[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]
+ struct Bar;
+
+ equivalent! {
+ Foo { a: vec![] },
+ Table(map! {a: Array(Vec::new())}),
+ }
+}
+
+#[test]
+fn empty_arrays2() {
+ #[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]
+ struct Foo { a: Option<Vec<Bar>> }
+ #[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]
+ struct Bar;
+
+ equivalent! {
+ Foo { a: None },
+ Table(map! {}),
+ }
+
+ equivalent!{
+ Foo { a: Some(vec![]) },
+ Table(map! { a: Array(vec![]) }),
+ }
+}
+
+#[test]
+fn extra_keys() {
+ #[derive(Serialize, Deserialize)]
+ struct Foo { a: isize }
+
+ let toml = Table(map! { a: Integer(2), b: Integer(2) });
+ assert!(toml.clone().try_into::<Foo>().is_ok());
+ assert!(toml::from_str::<Foo>(&toml.to_string()).is_ok());
+}
+
+#[test]
+fn newtypes() {
+ #[derive(Deserialize, Serialize, PartialEq, Debug, Clone)]
+ struct A {
+ b: B
+ }
+
+ #[derive(Deserialize, Serialize, PartialEq, Debug, Clone)]
+ struct B(u32);
+
+ equivalent! {
+ A { b: B(2) },
+ Table(map! { b: Integer(2) }),
+ }
+}
+
+#[test]
+fn newtypes2() {
+ #[derive(Deserialize, Serialize, PartialEq, Debug, Clone)]
+ struct A {
+ b: B
+ }
+
+ #[derive(Deserialize, Serialize, PartialEq, Debug, Clone)]
+ struct B(Option<C>);
+
+ #[derive(Deserialize, Serialize, PartialEq, Debug, Clone)]
+ struct C {
+ x: u32,
+ y: u32,
+ z: u32
+ }
+
+ equivalent! {
+ A { b: B(Some(C { x: 0, y: 1, z: 2 })) },
+ Table(map! {
+ b: Table(map! {
+ x: Integer(0),
+ y: Integer(1),
+ z: Integer(2)
+ })
+ }),
+ }
+}
+
+#[derive(Debug, Default, PartialEq, Serialize, Deserialize)]
+struct CanBeEmpty {
+ a: Option<String>,
+ b: Option<String>,
+}
+
+#[test]
+fn table_structs_empty() {
+ let text = "[bar]\n\n[baz]\n\n[bazv]\na = \"foo\"\n\n[foo]\n";
+ let value: BTreeMap<String, CanBeEmpty> = toml::from_str(text).unwrap();
+ let mut expected: BTreeMap<String, CanBeEmpty> = BTreeMap::new();
+ expected.insert("bar".to_string(), CanBeEmpty::default());
+ expected.insert("baz".to_string(), CanBeEmpty::default());
+ expected.insert(
+ "bazv".to_string(),
+ CanBeEmpty {a: Some("foo".to_string()), b: None},
+ );
+ expected.insert("foo".to_string(), CanBeEmpty::default());
+ assert_eq!(value, expected);
+ assert_eq!(toml::to_string(&value).unwrap(), text);
+}
diff --git a/test-suite/tests/tables-last.rs b/test-suite/tests/tables-last.rs
new file mode 100644
index 0000000..d05c8f0
--- /dev/null
+++ b/test-suite/tests/tables-last.rs
@@ -0,0 +1,30 @@
+#[macro_use]
+extern crate serde_derive;
+extern crate toml;
+
+use std::collections::HashMap;
+
+#[derive(Serialize)]
+struct A {
+ #[serde(serialize_with = "toml::ser::tables_last")]
+ vals: HashMap<&'static str, Value>,
+}
+
+#[derive(Serialize)]
+#[serde(untagged)]
+enum Value {
+ Map(HashMap<&'static str, &'static str>),
+ Int(i32),
+}
+
+#[test]
+fn always_works() {
+ let mut a = A { vals: HashMap::new() };
+ a.vals.insert("foo", Value::Int(0));
+
+ let mut sub = HashMap::new();
+ sub.insert("foo", "bar");
+ a.vals.insert("bar", Value::Map(sub));
+
+ toml::to_string(&a).unwrap();
+}
diff --git a/test-suite/tests/valid.rs b/test-suite/tests/valid.rs
new file mode 100644
index 0000000..b186800
--- /dev/null
+++ b/test-suite/tests/valid.rs
@@ -0,0 +1,249 @@
+extern crate toml;
+extern crate serde;
+extern crate serde_json;
+
+use toml::{Value as Toml, to_string_pretty};
+use serde::ser::Serialize;
+use serde_json::Value as Json;
+
+fn to_json(toml: toml::Value) -> Json {
+ fn doit(s: &str, json: Json) -> Json {
+ let mut map = serde_json::Map::new();
+ map.insert("type".to_string(), Json::String(s.to_string()));
+ map.insert("value".to_string(), json);
+ Json::Object(map)
+ }
+
+ match toml {
+ Toml::String(s) => doit("string", Json::String(s)),
+ Toml::Integer(i) => doit("integer", Json::String(i.to_string())),
+ Toml::Float(f) => doit("float", Json::String({
+ let s = format!("{:.15}", f);
+ let s = format!("{}", s.trim_right_matches('0'));
+ if s.ends_with('.') {format!("{}0", s)} else {s}
+ })),
+ Toml::Boolean(b) => doit("bool", Json::String(format!("{}", b))),
+ Toml::Datetime(s) => doit("datetime", Json::String(s.to_string())),
+ Toml::Array(arr) => {
+ let is_table = match arr.first() {
+ Some(&Toml::Table(..)) => true,
+ _ => false,
+ };
+ let json = Json::Array(arr.into_iter().map(to_json).collect());
+ if is_table {json} else {doit("array", json)}
+ }
+ Toml::Table(table) => {
+ let mut map = serde_json::Map::new();
+ for (k, v) in table {
+ map.insert(k, to_json(v));
+ }
+ Json::Object(map)
+ }
+ }
+}
+
+fn run_pretty(toml: Toml) {
+ // Assert toml == json
+ println!("### pretty round trip parse.");
+
+ // standard pretty
+ let toml_raw = to_string_pretty(&toml).expect("to string");
+ let toml2 = toml_raw.parse().expect("from string");
+ assert_eq!(toml, toml2);
+
+ // pretty with indent 2
+ let mut result = String::with_capacity(128);
+ {
+ let mut serializer = toml::Serializer::pretty(&mut result);
+ serializer.pretty_array_indent(2);
+ toml.serialize(&mut serializer).expect("to string");
+ }
+ assert_eq!(toml, result.parse().expect("from str"));
+ result.clear();
+ {
+ let mut serializer = toml::Serializer::new(&mut result);
+ serializer.pretty_array_trailing_comma(false);
+ toml.serialize(&mut serializer).expect("to string");
+ }
+ assert_eq!(toml, result.parse().expect("from str"));
+ result.clear();
+ {
+ let mut serializer = toml::Serializer::pretty(&mut result);
+ serializer.pretty_string(false);
+ toml.serialize(&mut serializer).expect("to string");
+ assert_eq!(toml, toml2);
+ }
+ assert_eq!(toml, result.parse().expect("from str"));
+ result.clear();
+ {
+ let mut serializer = toml::Serializer::pretty(&mut result);
+ serializer.pretty_array(false);
+ toml.serialize(&mut serializer).expect("to string");
+ assert_eq!(toml, toml2);
+ }
+ assert_eq!(toml, result.parse().expect("from str"));
+}
+
+fn run(toml_raw: &str, json_raw: &str) {
+ println!("parsing:\n{}", toml_raw);
+ let toml: Toml = toml_raw.parse().unwrap();
+ let json: Json = json_raw.parse().unwrap();
+
+ // Assert toml == json
+ let toml_json = to_json(toml.clone());
+ assert!(json == toml_json,
+ "expected\n{}\ngot\n{}\n",
+ serde_json::to_string_pretty(&json).unwrap(),
+ serde_json::to_string_pretty(&toml_json).unwrap());
+
+ // Assert round trip
+ println!("round trip parse: {}", toml);
+ let toml2 = toml.to_string().parse().unwrap();
+ assert_eq!(toml, toml2);
+ run_pretty(toml);
+}
+
+macro_rules! test( ($name:ident, $toml:expr, $json:expr) => (
+ #[test]
+ fn $name() { run($toml, $json); }
+) );
+
+test!(array_empty,
+ include_str!("valid/array-empty.toml"),
+ include_str!("valid/array-empty.json"));
+test!(array_nospaces,
+ include_str!("valid/array-nospaces.toml"),
+ include_str!("valid/array-nospaces.json"));
+test!(arrays_hetergeneous,
+ include_str!("valid/arrays-hetergeneous.toml"),
+ include_str!("valid/arrays-hetergeneous.json"));
+test!(arrays,
+ include_str!("valid/arrays.toml"),
+ include_str!("valid/arrays.json"));
+test!(arrays_nested,
+ include_str!("valid/arrays-nested.toml"),
+ include_str!("valid/arrays-nested.json"));
+test!(empty,
+ include_str!("valid/empty.toml"),
+ include_str!("valid/empty.json"));
+test!(bool,
+ include_str!("valid/bool.toml"),
+ include_str!("valid/bool.json"));
+test!(datetime,
+ include_str!("valid/datetime.toml"),
+ include_str!("valid/datetime.json"));
+test!(example,
+ include_str!("valid/example.toml"),
+ include_str!("valid/example.json"));
+test!(float,
+ include_str!("valid/float.toml"),
+ include_str!("valid/float.json"));
+test!(implicit_and_explicit_after,
+ include_str!("valid/implicit-and-explicit-after.toml"),
+ include_str!("valid/implicit-and-explicit-after.json"));
+test!(implicit_and_explicit_before,
+ include_str!("valid/implicit-and-explicit-before.toml"),
+ include_str!("valid/implicit-and-explicit-before.json"));
+test!(implicit_groups,
+ include_str!("valid/implicit-groups.toml"),
+ include_str!("valid/implicit-groups.json"));
+test!(integer,
+ include_str!("valid/integer.toml"),
+ include_str!("valid/integer.json"));
+test!(key_equals_nospace,
+ include_str!("valid/key-equals-nospace.toml"),
+ include_str!("valid/key-equals-nospace.json"));
+test!(key_space,
+ include_str!("valid/key-space.toml"),
+ include_str!("valid/key-space.json"));
+test!(key_special_chars,
+ include_str!("valid/key-special-chars.toml"),
+ include_str!("valid/key-special-chars.json"));
+test!(key_with_pound,
+ include_str!("valid/key-with-pound.toml"),
+ include_str!("valid/key-with-pound.json"));
+test!(long_float,
+ include_str!("valid/long-float.toml"),
+ include_str!("valid/long-float.json"));
+test!(long_integer,
+ include_str!("valid/long-integer.toml"),
+ include_str!("valid/long-integer.json"));
+test!(multiline_string,
+ include_str!("valid/multiline-string.toml"),
+ include_str!("valid/multiline-string.json"));
+test!(raw_multiline_string,
+ include_str!("valid/raw-multiline-string.toml"),
+ include_str!("valid/raw-multiline-string.json"));
+test!(raw_string,
+ include_str!("valid/raw-string.toml"),
+ include_str!("valid/raw-string.json"));
+test!(string_empty,
+ include_str!("valid/string-empty.toml"),
+ include_str!("valid/string-empty.json"));
+test!(string_escapes,
+ include_str!("valid/string-escapes.toml"),
+ include_str!("valid/string-escapes.json"));
+test!(string_simple,
+ include_str!("valid/string-simple.toml"),
+ include_str!("valid/string-simple.json"));
+test!(string_with_pound,
+ include_str!("valid/string-with-pound.toml"),
+ include_str!("valid/string-with-pound.json"));
+test!(table_array_implicit,
+ include_str!("valid/table-array-implicit.toml"),
+ include_str!("valid/table-array-implicit.json"));
+test!(table_array_many,
+ include_str!("valid/table-array-many.toml"),
+ include_str!("valid/table-array-many.json"));
+test!(table_array_nest,
+ include_str!("valid/table-array-nest.toml"),
+ include_str!("valid/table-array-nest.json"));
+test!(table_array_one,
+ include_str!("valid/table-array-one.toml"),
+ include_str!("valid/table-array-one.json"));
+test!(table_empty,
+ include_str!("valid/table-empty.toml"),
+ include_str!("valid/table-empty.json"));
+test!(table_sub_empty,
+ include_str!("valid/table-sub-empty.toml"),
+ include_str!("valid/table-sub-empty.json"));
+test!(table_multi_empty,
+ include_str!("valid/table-multi-empty.toml"),
+ include_str!("valid/table-multi-empty.json"));
+test!(table_whitespace,
+ include_str!("valid/table-whitespace.toml"),
+ include_str!("valid/table-whitespace.json"));
+test!(table_with_pound,
+ include_str!("valid/table-with-pound.toml"),
+ include_str!("valid/table-with-pound.json"));
+test!(unicode_escape,
+ include_str!("valid/unicode-escape.toml"),
+ include_str!("valid/unicode-escape.json"));
+test!(unicode_literal,
+ include_str!("valid/unicode-literal.toml"),
+ include_str!("valid/unicode-literal.json"));
+test!(hard_example,
+ include_str!("valid/hard_example.toml"),
+ include_str!("valid/hard_example.json"));
+test!(example2,
+ include_str!("valid/example2.toml"),
+ include_str!("valid/example2.json"));
+test!(example3,
+ include_str!("valid/example-v0.3.0.toml"),
+ include_str!("valid/example-v0.3.0.json"));
+test!(example4,
+ include_str!("valid/example-v0.4.0.toml"),
+ include_str!("valid/example-v0.4.0.json"));
+test!(example_bom,
+ include_str!("valid/example-bom.toml"),
+ include_str!("valid/example.json"));
+
+test!(datetime_truncate,
+ include_str!("valid/datetime-truncate.toml"),
+ include_str!("valid/datetime-truncate.json"));
+test!(key_quote_newline,
+ include_str!("valid/key-quote-newline.toml"),
+ include_str!("valid/key-quote-newline.json"));
+test!(table_array_nest_no_keys,
+ include_str!("valid/table-array-nest-no-keys.toml"),
+ include_str!("valid/table-array-nest-no-keys.json"));
diff --git a/test-suite/tests/valid/array-empty.json b/test-suite/tests/valid/array-empty.json
new file mode 100644
index 0000000..2fbf256
--- /dev/null
+++ b/test-suite/tests/valid/array-empty.json
@@ -0,0 +1,11 @@
+{
+ "thevoid": { "type": "array", "value": [
+ {"type": "array", "value": [
+ {"type": "array", "value": [
+ {"type": "array", "value": [
+ {"type": "array", "value": []}
+ ]}
+ ]}
+ ]}
+ ]}
+}
diff --git a/test-suite/tests/valid/array-empty.toml b/test-suite/tests/valid/array-empty.toml
new file mode 100644
index 0000000..fa58dc6
--- /dev/null
+++ b/test-suite/tests/valid/array-empty.toml
@@ -0,0 +1 @@
+thevoid = [[[[[]]]]]
diff --git a/test-suite/tests/valid/array-nospaces.json b/test-suite/tests/valid/array-nospaces.json
new file mode 100644
index 0000000..1833d61
--- /dev/null
+++ b/test-suite/tests/valid/array-nospaces.json
@@ -0,0 +1,10 @@
+{
+ "ints": {
+ "type": "array",
+ "value": [
+ {"type": "integer", "value": "1"},
+ {"type": "integer", "value": "2"},
+ {"type": "integer", "value": "3"}
+ ]
+ }
+}
diff --git a/test-suite/tests/valid/array-nospaces.toml b/test-suite/tests/valid/array-nospaces.toml
new file mode 100644
index 0000000..6618936
--- /dev/null
+++ b/test-suite/tests/valid/array-nospaces.toml
@@ -0,0 +1 @@
+ints = [1,2,3]
diff --git a/test-suite/tests/valid/arrays-hetergeneous.json b/test-suite/tests/valid/arrays-hetergeneous.json
new file mode 100644
index 0000000..478fa5c
--- /dev/null
+++ b/test-suite/tests/valid/arrays-hetergeneous.json
@@ -0,0 +1,19 @@
+{
+ "mixed": {
+ "type": "array",
+ "value": [
+ {"type": "array", "value": [
+ {"type": "integer", "value": "1"},
+ {"type": "integer", "value": "2"}
+ ]},
+ {"type": "array", "value": [
+ {"type": "string", "value": "a"},
+ {"type": "string", "value": "b"}
+ ]},
+ {"type": "array", "value": [
+ {"type": "float", "value": "1.1"},
+ {"type": "float", "value": "2.1"}
+ ]}
+ ]
+ }
+}
diff --git a/test-suite/tests/valid/arrays-hetergeneous.toml b/test-suite/tests/valid/arrays-hetergeneous.toml
new file mode 100644
index 0000000..a246fcf
--- /dev/null
+++ b/test-suite/tests/valid/arrays-hetergeneous.toml
@@ -0,0 +1 @@
+mixed = [[1, 2], ["a", "b"], [1.1, 2.1]]
diff --git a/test-suite/tests/valid/arrays-nested.json b/test-suite/tests/valid/arrays-nested.json
new file mode 100644
index 0000000..d21920c
--- /dev/null
+++ b/test-suite/tests/valid/arrays-nested.json
@@ -0,0 +1,13 @@
+{
+ "nest": {
+ "type": "array",
+ "value": [
+ {"type": "array", "value": [
+ {"type": "string", "value": "a"}
+ ]},
+ {"type": "array", "value": [
+ {"type": "string", "value": "b"}
+ ]}
+ ]
+ }
+}
diff --git a/test-suite/tests/valid/arrays-nested.toml b/test-suite/tests/valid/arrays-nested.toml
new file mode 100644
index 0000000..ce33022
--- /dev/null
+++ b/test-suite/tests/valid/arrays-nested.toml
@@ -0,0 +1 @@
+nest = [["a"], ["b"]]
diff --git a/test-suite/tests/valid/arrays.json b/test-suite/tests/valid/arrays.json
new file mode 100644
index 0000000..58aedbc
--- /dev/null
+++ b/test-suite/tests/valid/arrays.json
@@ -0,0 +1,34 @@
+{
+ "ints": {
+ "type": "array",
+ "value": [
+ {"type": "integer", "value": "1"},
+ {"type": "integer", "value": "2"},
+ {"type": "integer", "value": "3"}
+ ]
+ },
+ "floats": {
+ "type": "array",
+ "value": [
+ {"type": "float", "value": "1.1"},
+ {"type": "float", "value": "2.1"},
+ {"type": "float", "value": "3.1"}
+ ]
+ },
+ "strings": {
+ "type": "array",
+ "value": [
+ {"type": "string", "value": "a"},
+ {"type": "string", "value": "b"},
+ {"type": "string", "value": "c"}
+ ]
+ },
+ "dates": {
+ "type": "array",
+ "value": [
+ {"type": "datetime", "value": "1987-07-05T17:45:00Z"},
+ {"type": "datetime", "value": "1979-05-27T07:32:00Z"},
+ {"type": "datetime", "value": "2006-06-01T11:00:00Z"}
+ ]
+ }
+}
diff --git a/test-suite/tests/valid/arrays.toml b/test-suite/tests/valid/arrays.toml
new file mode 100644
index 0000000..c435f57
--- /dev/null
+++ b/test-suite/tests/valid/arrays.toml
@@ -0,0 +1,8 @@
+ints = [1, 2, 3]
+floats = [1.1, 2.1, 3.1]
+strings = ["a", "b", "c"]
+dates = [
+ 1987-07-05T17:45:00Z,
+ 1979-05-27T07:32:00Z,
+ 2006-06-01T11:00:00Z,
+]
diff --git a/test-suite/tests/valid/bool.json b/test-suite/tests/valid/bool.json
new file mode 100644
index 0000000..ae368e9
--- /dev/null
+++ b/test-suite/tests/valid/bool.json
@@ -0,0 +1,4 @@
+{
+ "f": {"type": "bool", "value": "false"},
+ "t": {"type": "bool", "value": "true"}
+}
diff --git a/test-suite/tests/valid/bool.toml b/test-suite/tests/valid/bool.toml
new file mode 100644
index 0000000..a8a829b
--- /dev/null
+++ b/test-suite/tests/valid/bool.toml
@@ -0,0 +1,2 @@
+t = true
+f = false
diff --git a/test-suite/tests/valid/comments-everywhere.json b/test-suite/tests/valid/comments-everywhere.json
new file mode 100644
index 0000000..e69a2e9
--- /dev/null
+++ b/test-suite/tests/valid/comments-everywhere.json
@@ -0,0 +1,12 @@
+{
+ "group": {
+ "answer": {"type": "integer", "value": "42"},
+ "more": {
+ "type": "array",
+ "value": [
+ {"type": "integer", "value": "42"},
+ {"type": "integer", "value": "42"}
+ ]
+ }
+ }
+}
diff --git a/test-suite/tests/valid/comments-everywhere.toml b/test-suite/tests/valid/comments-everywhere.toml
new file mode 100644
index 0000000..3dca74c
--- /dev/null
+++ b/test-suite/tests/valid/comments-everywhere.toml
@@ -0,0 +1,24 @@
+# Top comment.
+ # Top comment.
+# Top comment.
+
+# [no-extraneous-groups-please]
+
+[group] # Comment
+answer = 42 # Comment
+# no-extraneous-keys-please = 999
+# Inbetween comment.
+more = [ # Comment
+ # What about multiple # comments?
+ # Can you handle it?
+ #
+ # Evil.
+# Evil.
+ 42, 42, # Comments within arrays are fun.
+ # What about multiple # comments?
+ # Can you handle it?
+ #
+ # Evil.
+# Evil.
+# ] Did I fool you?
+] # Hopefully not.
diff --git a/test-suite/tests/valid/datetime-truncate.json b/test-suite/tests/valid/datetime-truncate.json
new file mode 100644
index 0000000..8c512e1
--- /dev/null
+++ b/test-suite/tests/valid/datetime-truncate.json
@@ -0,0 +1,6 @@
+{
+ "bestdayever": {
+ "type": "datetime",
+ "value": "1987-07-05T17:45:00.123456789Z"
+ }
+}
diff --git a/test-suite/tests/valid/datetime-truncate.toml b/test-suite/tests/valid/datetime-truncate.toml
new file mode 100644
index 0000000..05de841
--- /dev/null
+++ b/test-suite/tests/valid/datetime-truncate.toml
@@ -0,0 +1 @@
+bestdayever = 1987-07-05T17:45:00.123456789012345Z
diff --git a/test-suite/tests/valid/datetime.json b/test-suite/tests/valid/datetime.json
new file mode 100644
index 0000000..2ca93ce
--- /dev/null
+++ b/test-suite/tests/valid/datetime.json
@@ -0,0 +1,3 @@
+{
+ "bestdayever": {"type": "datetime", "value": "1987-07-05T17:45:00Z"}
+}
diff --git a/test-suite/tests/valid/datetime.toml b/test-suite/tests/valid/datetime.toml
new file mode 100644
index 0000000..2e99340
--- /dev/null
+++ b/test-suite/tests/valid/datetime.toml
@@ -0,0 +1 @@
+bestdayever = 1987-07-05T17:45:00Z
diff --git a/test-suite/tests/valid/empty.json b/test-suite/tests/valid/empty.json
new file mode 100644
index 0000000..0967ef4
--- /dev/null
+++ b/test-suite/tests/valid/empty.json
@@ -0,0 +1 @@
+{}
diff --git a/test-suite/tests/valid/empty.toml b/test-suite/tests/valid/empty.toml
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/test-suite/tests/valid/empty.toml
diff --git a/test-suite/tests/valid/example-bom.toml b/test-suite/tests/valid/example-bom.toml
new file mode 100644
index 0000000..fb5ac81
--- /dev/null
+++ b/test-suite/tests/valid/example-bom.toml
@@ -0,0 +1,5 @@
+best-day-ever = 1987-07-05T17:45:00Z
+
+[numtheory]
+boring = false
+perfection = [6, 28, 496]
diff --git a/test-suite/tests/valid/example-v0.3.0.json b/test-suite/tests/valid/example-v0.3.0.json
new file mode 100644
index 0000000..1d9dcb5
--- /dev/null
+++ b/test-suite/tests/valid/example-v0.3.0.json
@@ -0,0 +1 @@
+{"Array":{"key1":{"type":"array","value":[{"type":"integer","value":"1"},{"type":"integer","value":"2"},{"type":"integer","value":"3"}]},"key2":{"type":"array","value":[{"type":"string","value":"red"},{"type":"string","value":"yellow"},{"type":"string","value":"green"}]},"key3":{"type":"array","value":[{"type":"array","value":[{"type":"integer","value":"1"},{"type":"integer","value":"2"}]},{"type":"array","value":[{"type":"integer","value":"3"},{"type":"integer","value":"4"},{"type":"integer","value":"5"}]}]},"key4":{"type":"array","value":[{"type":"array","value":[{"type":"integer","value":"1"},{"type":"integer","value":"2"}]},{"type":"array","value":[{"type":"string","value":"a"},{"type":"string","value":"b"},{"type":"string","value":"c"}]}]},"key5":{"type":"array","value":[{"type":"integer","value":"1"},{"type":"integer","value":"2"},{"type":"integer","value":"3"}]},"key6":{"type":"array","value":[{"type":"integer","value":"1"},{"type":"integer","value":"2"}]}},"Booleans":{"False":{"type":"bool","value":"false"},"True":{"type":"bool","value":"true"}},"Datetime":{"key1":{"type":"datetime","value":"1979-05-27T07:32:00Z"}},"Float":{"both":{},"exponent":{},"fractional":{"key1":{"type":"float","value":"1.0"},"key2":{"type":"float","value":"3.1415"},"key3":{"type":"float","value":"-0.01"}}},"Integer":{"key1":{"type":"integer","value":"99"},"key2":{"type":"integer","value":"42"},"key3":{"type":"integer","value":"0"},"key4":{"type":"integer","value":"-17"}},"String":{"Literal":{"Multiline":{"lines":{"type":"string","value":"The first newline is\ntrimmed in raw strings.\n All other whitespace\n is preserved.\n"},"regex2":{"type":"string","value":"I [dw]on't need \\d{2} apples"}},"quoted":{"type":"string","value":"Tom \"Dubs\" Preston-Werner"},"regex":{"type":"string","value":"\u003c\\i\\c*\\s*\u003e"},"winpath":{"type":"string","value":"C:\\Users\\nodejs\\templates"},"winpath2":{"type":"string","value":"\\\\ServerX\\admin$\\system32\\"}},"Multiline":{"key1":{"type":"string","value":"One\nTwo"},"key2":{"type":"string","value":"One\nTwo"},"key3":{"type":"string","value":"One\nTwo"}},"Multilined":{"Singleline":{"key1":{"type":"string","value":"The quick brown fox jumps over the lazy dog."},"key2":{"type":"string","value":"The quick brown fox jumps over the lazy dog."},"key3":{"type":"string","value":"The quick brown fox jumps over the lazy dog."}}},"basic":{"type":"string","value":"I'm a string. \"You can quote me\". Name\u0009José\nLocation\u0009SF."}},"Table":{"key":{"type":"string","value":"value"}},"dog":{"tater":{"type":{"type":"string","value":"pug"}}},"fruit":[{"name":{"type":"string","value":"apple"},"physical":{"color":{"type":"string","value":"red"},"shape":{"type":"string","value":"round"}},"variety":[{"name":{"type":"string","value":"red delicious"}},{"name":{"type":"string","value":"granny smith"}}]},{"name":{"type":"string","value":"banana"},"variety":[{"name":{"type":"string","value":"plantain"}}]}],"products":[{"name":{"type":"string","value":"Hammer"},"sku":{"type":"integer","value":"738594937"}},{},{"color":{"type":"string","value":"gray"},"name":{"type":"string","value":"Nail"},"sku":{"type":"integer","value":"284758393"}}],"x":{"y":{"z":{"w":{}}}}}
diff --git a/test-suite/tests/valid/example-v0.3.0.toml b/test-suite/tests/valid/example-v0.3.0.toml
new file mode 100644
index 0000000..76aacc3
--- /dev/null
+++ b/test-suite/tests/valid/example-v0.3.0.toml
@@ -0,0 +1,182 @@
+# Comment
+# I am a comment. Hear me roar. Roar.
+
+# Table
+# Tables (also known as hash tables or dictionaries) are collections of key/value pairs.
+# They appear in square brackets on a line by themselves.
+
+[Table]
+
+key = "value" # Yeah, you can do this.
+
+# Nested tables are denoted by table names with dots in them. Name your tables whatever crap you please, just don't use #, ., [ or ].
+
+[dog.tater]
+type = "pug"
+
+# You don't need to specify all the super-tables if you don't want to. TOML knows how to do it for you.
+
+# [x] you
+# [x.y] don't
+# [x.y.z] need these
+[x.y.z.w] # for this to work
+
+# String
+# There are four ways to express strings: basic, multi-line basic, literal, and multi-line literal.
+# All strings must contain only valid UTF-8 characters.
+
+[String]
+basic = "I'm a string. \"You can quote me\". Name\tJos\u00E9\nLocation\tSF."
+
+[String.Multiline]
+
+# The following strings are byte-for-byte equivalent:
+key1 = "One\nTwo"
+key2 = """One\nTwo"""
+key3 = """
+One
+Two"""
+
+[String.Multilined.Singleline]
+
+# The following strings are byte-for-byte equivalent:
+key1 = "The quick brown fox jumps over the lazy dog."
+
+key2 = """
+The quick brown \
+
+
+ fox jumps over \
+ the lazy dog."""
+
+key3 = """\
+ The quick brown \
+ fox jumps over \
+ the lazy dog.\
+ """
+
+[String.Literal]
+
+# What you see is what you get.
+winpath = 'C:\Users\nodejs\templates'
+winpath2 = '\\ServerX\admin$\system32\'
+quoted = 'Tom "Dubs" Preston-Werner'
+regex = '<\i\c*\s*>'
+
+
+[String.Literal.Multiline]
+
+regex2 = '''I [dw]on't need \d{2} apples'''
+lines = '''
+The first newline is
+trimmed in raw strings.
+ All other whitespace
+ is preserved.
+'''
+
+# Integer
+# Integers are whole numbers. Positive numbers may be prefixed with a plus sign.
+# Negative numbers are prefixed with a minus sign.
+
+[Integer]
+key1 = +99
+key2 = 42
+key3 = 0
+key4 = -17
+
+# Float
+# A float consists of an integer part (which may be prefixed with a plus or minus sign)
+# followed by a fractional part and/or an exponent part.
+
+[Float.fractional]
+
+# fractional
+key1 = +1.0
+key2 = 3.1415
+key3 = -0.01
+
+[Float.exponent]
+
+# exponent
+#key1 = 5e+22
+#key2 = 1e6
+#key3 = -2E-2
+
+[Float.both]
+
+# both
+#key = 6.626e-34
+
+# Boolean
+# Booleans are just the tokens you're used to. Always lowercase.
+
+[Booleans]
+True = true
+False = false
+
+# Datetime
+# Datetimes are RFC 3339 dates.
+
+[Datetime]
+key1 = 1979-05-27T07:32:00Z
+#key2 = 1979-05-27T00:32:00-07:00
+#key3 = 1979-05-27T00:32:00.999999-07:00
+
+# Array
+# Arrays are square brackets with other primitives inside. Whitespace is ignored. Elements are separated by commas. Data types may not be mixed.
+
+[Array]
+key1 = [ 1, 2, 3 ]
+key2 = [ "red", "yellow", "green" ]
+key3 = [ [ 1, 2 ], [3, 4, 5] ]
+key4 = [ [ 1, 2 ], ["a", "b", "c"] ] # this is ok
+
+#Arrays can also be multiline. So in addition to ignoring whitespace, arrays also ignore newlines between the brackets.
+# Terminating commas are ok before the closing bracket.
+
+key5 = [
+ 1, 2, 3
+]
+key6 = [
+ 1,
+ 2, # this is ok
+]
+
+# Array of Tables
+# These can be expressed by using a table name in double brackets.
+# Each table with the same double bracketed name will be an element in the array.
+# The tables are inserted in the order encountered.
+
+[[products]]
+name = "Hammer"
+sku = 738594937
+
+[[products]]
+
+[[products]]
+name = "Nail"
+sku = 284758393
+color = "gray"
+
+
+# You can create nested arrays of tables as well.
+
+[[fruit]]
+ name = "apple"
+
+ [fruit.physical]
+ color = "red"
+ shape = "round"
+
+ [[fruit.variety]]
+ name = "red delicious"
+
+ [[fruit.variety]]
+ name = "granny smith"
+
+[[fruit]]
+ name = "banana"
+
+ [[fruit.variety]]
+ name = "plantain"
+
diff --git a/test-suite/tests/valid/example-v0.4.0.json b/test-suite/tests/valid/example-v0.4.0.json
new file mode 100644
index 0000000..d5cac34
--- /dev/null
+++ b/test-suite/tests/valid/example-v0.4.0.json
@@ -0,0 +1 @@
+{"array":{"key1":{"type":"array","value":[{"type":"integer","value":"1"},{"type":"integer","value":"2"},{"type":"integer","value":"3"}]},"key2":{"type":"array","value":[{"type":"string","value":"red"},{"type":"string","value":"yellow"},{"type":"string","value":"green"}]},"key3":{"type":"array","value":[{"type":"array","value":[{"type":"integer","value":"1"},{"type":"integer","value":"2"}]},{"type":"array","value":[{"type":"integer","value":"3"},{"type":"integer","value":"4"},{"type":"integer","value":"5"}]}]},"key4":{"type":"array","value":[{"type":"array","value":[{"type":"integer","value":"1"},{"type":"integer","value":"2"}]},{"type":"array","value":[{"type":"string","value":"a"},{"type":"string","value":"b"},{"type":"string","value":"c"}]}]},"key5":{"type":"array","value":[{"type":"integer","value":"1"},{"type":"integer","value":"2"},{"type":"integer","value":"3"}]},"key6":{"type":"array","value":[{"type":"integer","value":"1"},{"type":"integer","value":"2"}]}},"boolean":{"False":{"type":"bool","value":"false"},"True":{"type":"bool","value":"true"}},"datetime":{},"float":{"both":{},"exponent":{},"fractional":{"key1":{"type":"float","value":"1.0"},"key2":{"type":"float","value":"3.1415"},"key3":{"type":"float","value":"-0.01"}},"underscores":{}},"fruit":[{"name":{"type":"string","value":"apple"},"physical":{"color":{"type":"string","value":"red"},"shape":{"type":"string","value":"round"}},"variety":[{"name":{"type":"string","value":"red delicious"}},{"name":{"type":"string","value":"granny smith"}}]},{"name":{"type":"string","value":"banana"},"variety":[{"name":{"type":"string","value":"plantain"}}]}],"integer":{"key1":{"type":"integer","value":"99"},"key2":{"type":"integer","value":"42"},"key3":{"type":"integer","value":"0"},"key4":{"type":"integer","value":"-17"},"underscores":{"key1":{"type":"integer","value":"1000"},"key2":{"type":"integer","value":"5349221"},"key3":{"type":"integer","value":"12345"}}},"products":[{"name":{"type":"string","value":"Hammer"},"sku":{"type":"integer","value":"738594937"}},{},{"color":{"type":"string","value":"gray"},"name":{"type":"string","value":"Nail"},"sku":{"type":"integer","value":"284758393"}}],"string":{"basic":{"basic":{"type":"string","value":"I'm a string. \"You can quote me\". Name\u0009José\nLocation\u0009SF."}},"literal":{"multiline":{"lines":{"type":"string","value":"The first newline is\ntrimmed in raw strings.\n All other whitespace\n is preserved.\n"},"regex2":{"type":"string","value":"I [dw]on't need \\d{2} apples"}},"quoted":{"type":"string","value":"Tom \"Dubs\" Preston-Werner"},"regex":{"type":"string","value":"\u003c\\i\\c*\\s*\u003e"},"winpath":{"type":"string","value":"C:\\Users\\nodejs\\templates"},"winpath2":{"type":"string","value":"\\\\ServerX\\admin$\\system32\\"}},"multiline":{"continued":{"key1":{"type":"string","value":"The quick brown fox jumps over the lazy dog."},"key2":{"type":"string","value":"The quick brown fox jumps over the lazy dog."},"key3":{"type":"string","value":"The quick brown fox jumps over the lazy dog."}},"key1":{"type":"string","value":"One\nTwo"},"key2":{"type":"string","value":"One\nTwo"},"key3":{"type":"string","value":"One\nTwo"}}},"table":{"inline":{"name":{"first":{"type":"string","value":"Tom"},"last":{"type":"string","value":"Preston-Werner"}},"point":{"x":{"type":"integer","value":"1"},"y":{"type":"integer","value":"2"}}},"key":{"type":"string","value":"value"},"subtable":{"key":{"type":"string","value":"another value"}}},"x":{"y":{"z":{"w":{}}}}}
diff --git a/test-suite/tests/valid/example-v0.4.0.toml b/test-suite/tests/valid/example-v0.4.0.toml
new file mode 100644
index 0000000..ffbcce0
--- /dev/null
+++ b/test-suite/tests/valid/example-v0.4.0.toml
@@ -0,0 +1,235 @@
+################################################################################
+## Comment
+
+# Speak your mind with the hash symbol. They go from the symbol to the end of
+# the line.
+
+
+################################################################################
+## Table
+
+# Tables (also known as hash tables or dictionaries) are collections of
+# key/value pairs. They appear in square brackets on a line by themselves.
+
+[table]
+
+key = "value" # Yeah, you can do this.
+
+# Nested tables are denoted by table names with dots in them. Name your tables
+# whatever crap you please, just don't use #, ., [ or ].
+
+[table.subtable]
+
+key = "another value"
+
+# You don't need to specify all the super-tables if you don't want to. TOML
+# knows how to do it for you.
+
+# [x] you
+# [x.y] don't
+# [x.y.z] need these
+[x.y.z.w] # for this to work
+
+
+################################################################################
+## Inline Table
+
+# Inline tables provide a more compact syntax for expressing tables. They are
+# especially useful for grouped data that can otherwise quickly become verbose.
+# Inline tables are enclosed in curly braces `{` and `}`. No newlines are
+# allowed between the curly braces unless they are valid within a value.
+
+[table.inline]
+
+name = { first = "Tom", last = "Preston-Werner" }
+point = { x = 1, y = 2 }
+
+
+################################################################################
+## String
+
+# There are four ways to express strings: basic, multi-line basic, literal, and
+# multi-line literal. All strings must contain only valid UTF-8 characters.
+
+[string.basic]
+
+basic = "I'm a string. \"You can quote me\". Name\tJos\u00E9\nLocation\tSF."
+
+[string.multiline]
+
+# The following strings are byte-for-byte equivalent:
+key1 = "One\nTwo"
+key2 = """One\nTwo"""
+key3 = """
+One
+Two"""
+
+[string.multiline.continued]
+
+# The following strings are byte-for-byte equivalent:
+key1 = "The quick brown fox jumps over the lazy dog."
+
+key2 = """
+The quick brown \
+
+
+ fox jumps over \
+ the lazy dog."""
+
+key3 = """\
+ The quick brown \
+ fox jumps over \
+ the lazy dog.\
+ """
+
+[string.literal]
+
+# What you see is what you get.
+winpath = 'C:\Users\nodejs\templates'
+winpath2 = '\\ServerX\admin$\system32\'
+quoted = 'Tom "Dubs" Preston-Werner'
+regex = '<\i\c*\s*>'
+
+
+[string.literal.multiline]
+
+regex2 = '''I [dw]on't need \d{2} apples'''
+lines = '''
+The first newline is
+trimmed in raw strings.
+ All other whitespace
+ is preserved.
+'''
+
+
+################################################################################
+## Integer
+
+# Integers are whole numbers. Positive numbers may be prefixed with a plus sign.
+# Negative numbers are prefixed with a minus sign.
+
+[integer]
+
+key1 = +99
+key2 = 42
+key3 = 0
+key4 = -17
+
+[integer.underscores]
+
+# For large numbers, you may use underscores to enhance readability. Each
+# underscore must be surrounded by at least one digit.
+key1 = 1_000
+key2 = 5_349_221
+key3 = 1_2_3_4_5 # valid but inadvisable
+
+
+################################################################################
+## Float
+
+# A float consists of an integer part (which may be prefixed with a plus or
+# minus sign) followed by a fractional part and/or an exponent part.
+
+[float.fractional]
+
+key1 = +1.0
+key2 = 3.1415
+key3 = -0.01
+
+[float.exponent]
+
+[float.both]
+
+[float.underscores]
+
+
+################################################################################
+## Boolean
+
+# Booleans are just the tokens you're used to. Always lowercase.
+
+[boolean]
+
+True = true
+False = false
+
+
+################################################################################
+## Datetime
+
+# Datetimes are RFC 3339 dates.
+
+[datetime]
+
+#key1 = 1979-05-27T07:32:00Z
+#key2 = 1979-05-27T00:32:00-07:00
+#key3 = 1979-05-27T00:32:00.999999-07:00
+
+
+################################################################################
+## Array
+
+# Arrays are square brackets with other primitives inside. Whitespace is
+# ignored. Elements are separated by commas. Data types may not be mixed.
+
+[array]
+
+key1 = [ 1, 2, 3 ]
+key2 = [ "red", "yellow", "green" ]
+key3 = [ [ 1, 2 ], [3, 4, 5] ]
+key4 = [ [ 1, 2 ], ["a", "b", "c"] ] # this is ok
+
+# Arrays can also be multiline. So in addition to ignoring whitespace, arrays
+# also ignore newlines between the brackets. Terminating commas are ok before
+# the closing bracket.
+
+key5 = [
+ 1, 2, 3
+]
+key6 = [
+ 1,
+ 2, # this is ok
+]
+
+
+################################################################################
+## Array of Tables
+
+# These can be expressed by using a table name in double brackets. Each table
+# with the same double bracketed name will be an element in the array. The
+# tables are inserted in the order encountered.
+
+[[products]]
+
+name = "Hammer"
+sku = 738594937
+
+[[products]]
+
+[[products]]
+
+name = "Nail"
+sku = 284758393
+color = "gray"
+
+
+# You can create nested arrays of tables as well.
+
+[[fruit]]
+ name = "apple"
+
+ [fruit.physical]
+ color = "red"
+ shape = "round"
+
+ [[fruit.variety]]
+ name = "red delicious"
+
+ [[fruit.variety]]
+ name = "granny smith"
+
+[[fruit]]
+ name = "banana"
+
+ [[fruit.variety]]
+ name = "plantain"
diff --git a/test-suite/tests/valid/example.json b/test-suite/tests/valid/example.json
new file mode 100644
index 0000000..48aa907
--- /dev/null
+++ b/test-suite/tests/valid/example.json
@@ -0,0 +1,14 @@
+{
+ "best-day-ever": {"type": "datetime", "value": "1987-07-05T17:45:00Z"},
+ "numtheory": {
+ "boring": {"type": "bool", "value": "false"},
+ "perfection": {
+ "type": "array",
+ "value": [
+ {"type": "integer", "value": "6"},
+ {"type": "integer", "value": "28"},
+ {"type": "integer", "value": "496"}
+ ]
+ }
+ }
+}
diff --git a/test-suite/tests/valid/example.toml b/test-suite/tests/valid/example.toml
new file mode 100644
index 0000000..8cb02e0
--- /dev/null
+++ b/test-suite/tests/valid/example.toml
@@ -0,0 +1,5 @@
+best-day-ever = 1987-07-05T17:45:00Z
+
+[numtheory]
+boring = false
+perfection = [6, 28, 496]
diff --git a/test-suite/tests/valid/example2.json b/test-suite/tests/valid/example2.json
new file mode 100644
index 0000000..3249a97
--- /dev/null
+++ b/test-suite/tests/valid/example2.json
@@ -0,0 +1 @@
+{"clients":{"data":{"type":"array","value":[{"type":"array","value":[{"type":"string","value":"gamma"},{"type":"string","value":"delta"}]},{"type":"array","value":[{"type":"integer","value":"1"},{"type":"integer","value":"2"}]}]},"hosts":{"type":"array","value":[{"type":"string","value":"alpha"},{"type":"string","value":"omega"}]}},"database":{"connection_max":{"type":"integer","value":"5000"},"enabled":{"type":"bool","value":"true"},"ports":{"type":"array","value":[{"type":"integer","value":"8001"},{"type":"integer","value":"8001"},{"type":"integer","value":"8002"}]},"server":{"type":"string","value":"192.168.1.1"}},"owner":{"bio":{"type":"string","value":"GitHub Cofounder \u0026 CEO\nLikes tater tots and beer."},"dob":{"type":"datetime","value":"1979-05-27T07:32:00Z"},"name":{"type":"string","value":"Tom Preston-Werner"},"organization":{"type":"string","value":"GitHub"}},"products":[{"name":{"type":"string","value":"Hammer"},"sku":{"type":"integer","value":"738594937"}},{"color":{"type":"string","value":"gray"},"name":{"type":"string","value":"Nail"},"sku":{"type":"integer","value":"284758393"}}],"servers":{"alpha":{"dc":{"type":"string","value":"eqdc10"},"ip":{"type":"string","value":"10.0.0.1"}},"beta":{"country":{"type":"string","value":"中国"},"dc":{"type":"string","value":"eqdc10"},"ip":{"type":"string","value":"10.0.0.2"}}},"title":{"type":"string","value":"TOML Example"}}
diff --git a/test-suite/tests/valid/example2.toml b/test-suite/tests/valid/example2.toml
new file mode 100644
index 0000000..bc12c99
--- /dev/null
+++ b/test-suite/tests/valid/example2.toml
@@ -0,0 +1,47 @@
+# This is a TOML document. Boom.
+
+title = "TOML Example"
+
+[owner]
+name = "Tom Preston-Werner"
+organization = "GitHub"
+bio = "GitHub Cofounder & CEO\nLikes tater tots and beer."
+dob = 1979-05-27T07:32:00Z # First class dates? Why not?
+
+[database]
+server = "192.168.1.1"
+ports = [ 8001, 8001, 8002 ]
+connection_max = 5000
+enabled = true
+
+[servers]
+
+ # You can indent as you please. Tabs or spaces. TOML don't care.
+ [servers.alpha]
+ ip = "10.0.0.1"
+ dc = "eqdc10"
+
+ [servers.beta]
+ ip = "10.0.0.2"
+ dc = "eqdc10"
+ country = "中国" # This should be parsed as UTF-8
+
+[clients]
+data = [ ["gamma", "delta"], [1, 2] ] # just an update to make sure parsers support it
+
+# Line breaks are OK when inside arrays
+hosts = [
+ "alpha",
+ "omega"
+]
+
+# Products
+
+ [[products]]
+ name = "Hammer"
+ sku = 738594937
+
+ [[products]]
+ name = "Nail"
+ sku = 284758393
+ color = "gray"
diff --git a/test-suite/tests/valid/float.json b/test-suite/tests/valid/float.json
new file mode 100644
index 0000000..b8a2e97
--- /dev/null
+++ b/test-suite/tests/valid/float.json
@@ -0,0 +1,4 @@
+{
+ "pi": {"type": "float", "value": "3.14"},
+ "negpi": {"type": "float", "value": "-3.14"}
+}
diff --git a/test-suite/tests/valid/float.toml b/test-suite/tests/valid/float.toml
new file mode 100644
index 0000000..7c528d2
--- /dev/null
+++ b/test-suite/tests/valid/float.toml
@@ -0,0 +1,2 @@
+pi = 3.14
+negpi = -3.14
diff --git a/test-suite/tests/valid/hard_example.json b/test-suite/tests/valid/hard_example.json
new file mode 100644
index 0000000..9762e58
--- /dev/null
+++ b/test-suite/tests/valid/hard_example.json
@@ -0,0 +1 @@
+{"the":{"hard":{"another_test_string":{"type":"string","value":" Same thing, but with a string #"},"bit#":{"multi_line_array":{"type":"array","value":[{"type":"string","value":"]"}]},"what?":{"type":"string","value":"You don't think some user won't do that?"}},"harder_test_string":{"type":"string","value":" And when \"'s are in the string, along with # \""},"test_array":{"type":"array","value":[{"type":"string","value":"] "},{"type":"string","value":" # "}]},"test_array2":{"type":"array","value":[{"type":"string","value":"Test #11 ]proved that"},{"type":"string","value":"Experiment #9 was a success"}]}},"test_string":{"type":"string","value":"You'll hate me after this - #"}}}
diff --git a/test-suite/tests/valid/hard_example.toml b/test-suite/tests/valid/hard_example.toml
new file mode 100644
index 0000000..38856c8
--- /dev/null
+++ b/test-suite/tests/valid/hard_example.toml
@@ -0,0 +1,33 @@
+# Test file for TOML
+# Only this one tries to emulate a TOML file written by a user of the kind of parser writers probably hate
+# This part you'll really hate
+
+[the]
+test_string = "You'll hate me after this - #" # " Annoying, isn't it?
+
+ [the.hard]
+ test_array = [ "] ", " # "] # ] There you go, parse this!
+ test_array2 = [ "Test #11 ]proved that", "Experiment #9 was a success" ]
+ # You didn't think it'd as easy as chucking out the last #, did you?
+ another_test_string = " Same thing, but with a string #"
+ harder_test_string = " And when \"'s are in the string, along with # \"" # "and comments are there too"
+ # Things will get harder
+
+ [the.hard."bit#"]
+ "what?" = "You don't think some user won't do that?"
+ multi_line_array = [
+ "]",
+ # ] Oh yes I did
+ ]
+
+# Each of the following keygroups/key value pairs should produce an error. Uncomment to them to test
+
+#[error] if you didn't catch this, your parser is broken
+#string = "Anything other than tabs, spaces and newline after a keygroup or key value pair has ended should produce an error unless it is a comment" like this
+#array = [
+# "This might most likely happen in multiline arrays",
+# Like here,
+# "or here,
+# and here"
+# ] End of array comment, forgot the #
+#number = 3.14 pi <--again forgot the #
diff --git a/test-suite/tests/valid/implicit-and-explicit-after.json b/test-suite/tests/valid/implicit-and-explicit-after.json
new file mode 100644
index 0000000..374bd09
--- /dev/null
+++ b/test-suite/tests/valid/implicit-and-explicit-after.json
@@ -0,0 +1,10 @@
+{
+ "a": {
+ "better": {"type": "integer", "value": "43"},
+ "b": {
+ "c": {
+ "answer": {"type": "integer", "value": "42"}
+ }
+ }
+ }
+}
diff --git a/test-suite/tests/valid/implicit-and-explicit-after.toml b/test-suite/tests/valid/implicit-and-explicit-after.toml
new file mode 100644
index 0000000..c0e8865
--- /dev/null
+++ b/test-suite/tests/valid/implicit-and-explicit-after.toml
@@ -0,0 +1,5 @@
+[a.b.c]
+answer = 42
+
+[a]
+better = 43
diff --git a/test-suite/tests/valid/implicit-and-explicit-before.json b/test-suite/tests/valid/implicit-and-explicit-before.json
new file mode 100644
index 0000000..374bd09
--- /dev/null
+++ b/test-suite/tests/valid/implicit-and-explicit-before.json
@@ -0,0 +1,10 @@
+{
+ "a": {
+ "better": {"type": "integer", "value": "43"},
+ "b": {
+ "c": {
+ "answer": {"type": "integer", "value": "42"}
+ }
+ }
+ }
+}
diff --git a/test-suite/tests/valid/implicit-and-explicit-before.toml b/test-suite/tests/valid/implicit-and-explicit-before.toml
new file mode 100644
index 0000000..eee68ff
--- /dev/null
+++ b/test-suite/tests/valid/implicit-and-explicit-before.toml
@@ -0,0 +1,5 @@
+[a]
+better = 43
+
+[a.b.c]
+answer = 42
diff --git a/test-suite/tests/valid/implicit-groups.json b/test-suite/tests/valid/implicit-groups.json
new file mode 100644
index 0000000..fbae7fc
--- /dev/null
+++ b/test-suite/tests/valid/implicit-groups.json
@@ -0,0 +1,9 @@
+{
+ "a": {
+ "b": {
+ "c": {
+ "answer": {"type": "integer", "value": "42"}
+ }
+ }
+ }
+}
diff --git a/test-suite/tests/valid/implicit-groups.toml b/test-suite/tests/valid/implicit-groups.toml
new file mode 100644
index 0000000..b6333e4
--- /dev/null
+++ b/test-suite/tests/valid/implicit-groups.toml
@@ -0,0 +1,2 @@
+[a.b.c]
+answer = 42
diff --git a/test-suite/tests/valid/integer.json b/test-suite/tests/valid/integer.json
new file mode 100644
index 0000000..61985a1
--- /dev/null
+++ b/test-suite/tests/valid/integer.json
@@ -0,0 +1,4 @@
+{
+ "answer": {"type": "integer", "value": "42"},
+ "neganswer": {"type": "integer", "value": "-42"}
+}
diff --git a/test-suite/tests/valid/integer.toml b/test-suite/tests/valid/integer.toml
new file mode 100644
index 0000000..c4f6297
--- /dev/null
+++ b/test-suite/tests/valid/integer.toml
@@ -0,0 +1,2 @@
+answer = 42
+neganswer = -42
diff --git a/test-suite/tests/valid/key-equals-nospace.json b/test-suite/tests/valid/key-equals-nospace.json
new file mode 100644
index 0000000..1f8709a
--- /dev/null
+++ b/test-suite/tests/valid/key-equals-nospace.json
@@ -0,0 +1,3 @@
+{
+ "answer": {"type": "integer", "value": "42"}
+}
diff --git a/test-suite/tests/valid/key-equals-nospace.toml b/test-suite/tests/valid/key-equals-nospace.toml
new file mode 100644
index 0000000..560901c
--- /dev/null
+++ b/test-suite/tests/valid/key-equals-nospace.toml
@@ -0,0 +1 @@
+answer=42
diff --git a/test-suite/tests/valid/key-quote-newline.json b/test-suite/tests/valid/key-quote-newline.json
new file mode 100644
index 0000000..12473e4
--- /dev/null
+++ b/test-suite/tests/valid/key-quote-newline.json
@@ -0,0 +1,3 @@
+{
+ "\n": {"type": "integer", "value": "1"}
+}
diff --git a/test-suite/tests/valid/key-quote-newline.toml b/test-suite/tests/valid/key-quote-newline.toml
new file mode 100644
index 0000000..a2639bf
--- /dev/null
+++ b/test-suite/tests/valid/key-quote-newline.toml
@@ -0,0 +1 @@
+"\n" = 1
diff --git a/test-suite/tests/valid/key-space.json b/test-suite/tests/valid/key-space.json
new file mode 100644
index 0000000..9d1f769
--- /dev/null
+++ b/test-suite/tests/valid/key-space.json
@@ -0,0 +1,3 @@
+{
+ "a b": {"type": "integer", "value": "1"}
+}
diff --git a/test-suite/tests/valid/key-space.toml b/test-suite/tests/valid/key-space.toml
new file mode 100644
index 0000000..f4f36c4
--- /dev/null
+++ b/test-suite/tests/valid/key-space.toml
@@ -0,0 +1 @@
+"a b" = 1
diff --git a/test-suite/tests/valid/key-special-chars.json b/test-suite/tests/valid/key-special-chars.json
new file mode 100644
index 0000000..6550ebd
--- /dev/null
+++ b/test-suite/tests/valid/key-special-chars.json
@@ -0,0 +1,5 @@
+{
+ "~!@#$^&*()_+-`1234567890[]\\|/?><.,;:'": {
+ "type": "integer", "value": "1"
+ }
+}
diff --git a/test-suite/tests/valid/key-special-chars.toml b/test-suite/tests/valid/key-special-chars.toml
new file mode 100644
index 0000000..dc43625
--- /dev/null
+++ b/test-suite/tests/valid/key-special-chars.toml
@@ -0,0 +1 @@
+"~!@#$^&*()_+-`1234567890[]\\|/?><.,;:'" = 1
diff --git a/test-suite/tests/valid/key-with-pound.json b/test-suite/tests/valid/key-with-pound.json
new file mode 100644
index 0000000..ee39e1d
--- /dev/null
+++ b/test-suite/tests/valid/key-with-pound.json
@@ -0,0 +1,3 @@
+{
+ "key#name": {"type": "integer", "value": "5"}
+}
diff --git a/test-suite/tests/valid/key-with-pound.toml b/test-suite/tests/valid/key-with-pound.toml
new file mode 100644
index 0000000..65b766f
--- /dev/null
+++ b/test-suite/tests/valid/key-with-pound.toml
@@ -0,0 +1 @@
+"key#name" = 5
diff --git a/test-suite/tests/valid/long-float.json b/test-suite/tests/valid/long-float.json
new file mode 100644
index 0000000..8ceed47
--- /dev/null
+++ b/test-suite/tests/valid/long-float.json
@@ -0,0 +1,4 @@
+{
+ "longpi": {"type": "float", "value": "3.141592653589793"},
+ "neglongpi": {"type": "float", "value": "-3.141592653589793"}
+}
diff --git a/test-suite/tests/valid/long-float.toml b/test-suite/tests/valid/long-float.toml
new file mode 100644
index 0000000..9558ae4
--- /dev/null
+++ b/test-suite/tests/valid/long-float.toml
@@ -0,0 +1,2 @@
+longpi = 3.141592653589793
+neglongpi = -3.141592653589793
diff --git a/test-suite/tests/valid/long-integer.json b/test-suite/tests/valid/long-integer.json
new file mode 100644
index 0000000..16c331e
--- /dev/null
+++ b/test-suite/tests/valid/long-integer.json
@@ -0,0 +1,4 @@
+{
+ "answer": {"type": "integer", "value": "9223372036854775807"},
+ "neganswer": {"type": "integer", "value": "-9223372036854775808"}
+}
diff --git a/test-suite/tests/valid/long-integer.toml b/test-suite/tests/valid/long-integer.toml
new file mode 100644
index 0000000..424a13a
--- /dev/null
+++ b/test-suite/tests/valid/long-integer.toml
@@ -0,0 +1,2 @@
+answer = 9223372036854775807
+neganswer = -9223372036854775808
diff --git a/test-suite/tests/valid/multiline-string.json b/test-suite/tests/valid/multiline-string.json
new file mode 100644
index 0000000..075bf50
--- /dev/null
+++ b/test-suite/tests/valid/multiline-string.json
@@ -0,0 +1,30 @@
+{
+ "multiline_empty_one": {
+ "type": "string",
+ "value": ""
+ },
+ "multiline_empty_two": {
+ "type": "string",
+ "value": ""
+ },
+ "multiline_empty_three": {
+ "type": "string",
+ "value": ""
+ },
+ "multiline_empty_four": {
+ "type": "string",
+ "value": ""
+ },
+ "equivalent_one": {
+ "type": "string",
+ "value": "The quick brown fox jumps over the lazy dog."
+ },
+ "equivalent_two": {
+ "type": "string",
+ "value": "The quick brown fox jumps over the lazy dog."
+ },
+ "equivalent_three": {
+ "type": "string",
+ "value": "The quick brown fox jumps over the lazy dog."
+ }
+}
diff --git a/test-suite/tests/valid/multiline-string.toml b/test-suite/tests/valid/multiline-string.toml
new file mode 100644
index 0000000..15b1143
--- /dev/null
+++ b/test-suite/tests/valid/multiline-string.toml
@@ -0,0 +1,23 @@
+multiline_empty_one = """"""
+multiline_empty_two = """
+"""
+multiline_empty_three = """\
+ """
+multiline_empty_four = """\
+ \
+ \
+ """
+
+equivalent_one = "The quick brown fox jumps over the lazy dog."
+equivalent_two = """
+The quick brown \
+
+
+ fox jumps over \
+ the lazy dog."""
+
+equivalent_three = """\
+ The quick brown \
+ fox jumps over \
+ the lazy dog.\
+ """
diff --git a/test-suite/tests/valid/raw-multiline-string.json b/test-suite/tests/valid/raw-multiline-string.json
new file mode 100644
index 0000000..b43cce5
--- /dev/null
+++ b/test-suite/tests/valid/raw-multiline-string.json
@@ -0,0 +1,14 @@
+{
+ "oneline": {
+ "type": "string",
+ "value": "This string has a ' quote character."
+ },
+ "firstnl": {
+ "type": "string",
+ "value": "This string has a ' quote character."
+ },
+ "multiline": {
+ "type": "string",
+ "value": "This string\nhas ' a quote character\nand more than\none newline\nin it."
+ }
+}
diff --git a/test-suite/tests/valid/raw-multiline-string.toml b/test-suite/tests/valid/raw-multiline-string.toml
new file mode 100644
index 0000000..8094c03
--- /dev/null
+++ b/test-suite/tests/valid/raw-multiline-string.toml
@@ -0,0 +1,9 @@
+oneline = '''This string has a ' quote character.'''
+firstnl = '''
+This string has a ' quote character.'''
+multiline = '''
+This string
+has ' a quote character
+and more than
+one newline
+in it.'''
diff --git a/test-suite/tests/valid/raw-string.json b/test-suite/tests/valid/raw-string.json
new file mode 100644
index 0000000..693ab9b
--- /dev/null
+++ b/test-suite/tests/valid/raw-string.json
@@ -0,0 +1,30 @@
+{
+ "backspace": {
+ "type": "string",
+ "value": "This string has a \\b backspace character."
+ },
+ "tab": {
+ "type": "string",
+ "value": "This string has a \\t tab character."
+ },
+ "newline": {
+ "type": "string",
+ "value": "This string has a \\n new line character."
+ },
+ "formfeed": {
+ "type": "string",
+ "value": "This string has a \\f form feed character."
+ },
+ "carriage": {
+ "type": "string",
+ "value": "This string has a \\r carriage return character."
+ },
+ "slash": {
+ "type": "string",
+ "value": "This string has a \\/ slash character."
+ },
+ "backslash": {
+ "type": "string",
+ "value": "This string has a \\\\ backslash character."
+ }
+}
diff --git a/test-suite/tests/valid/raw-string.toml b/test-suite/tests/valid/raw-string.toml
new file mode 100644
index 0000000..92acd25
--- /dev/null
+++ b/test-suite/tests/valid/raw-string.toml
@@ -0,0 +1,7 @@
+backspace = 'This string has a \b backspace character.'
+tab = 'This string has a \t tab character.'
+newline = 'This string has a \n new line character.'
+formfeed = 'This string has a \f form feed character.'
+carriage = 'This string has a \r carriage return character.'
+slash = 'This string has a \/ slash character.'
+backslash = 'This string has a \\ backslash character.'
diff --git a/test-suite/tests/valid/string-empty.json b/test-suite/tests/valid/string-empty.json
new file mode 100644
index 0000000..6c26d69
--- /dev/null
+++ b/test-suite/tests/valid/string-empty.json
@@ -0,0 +1,6 @@
+{
+ "answer": {
+ "type": "string",
+ "value": ""
+ }
+}
diff --git a/test-suite/tests/valid/string-empty.toml b/test-suite/tests/valid/string-empty.toml
new file mode 100644
index 0000000..e37e681
--- /dev/null
+++ b/test-suite/tests/valid/string-empty.toml
@@ -0,0 +1 @@
+answer = ""
diff --git a/test-suite/tests/valid/string-escapes.json b/test-suite/tests/valid/string-escapes.json
new file mode 100644
index 0000000..62dac51
--- /dev/null
+++ b/test-suite/tests/valid/string-escapes.json
@@ -0,0 +1,50 @@
+{
+ "backspace": {
+ "type": "string",
+ "value": "This string has a \u0008 backspace character."
+ },
+ "tab": {
+ "type": "string",
+ "value": "This string has a \u0009 tab character."
+ },
+ "newline": {
+ "type": "string",
+ "value": "This string has a \u000A new line character."
+ },
+ "formfeed": {
+ "type": "string",
+ "value": "This string has a \u000C form feed character."
+ },
+ "carriage": {
+ "type": "string",
+ "value": "This string has a \u000D carriage return character."
+ },
+ "quote": {
+ "type": "string",
+ "value": "This string has a \u0022 quote character."
+ },
+ "slash": {
+ "type": "string",
+ "value": "This string has a \u002F slash character."
+ },
+ "backslash": {
+ "type": "string",
+ "value": "This string has a \u005C backslash character."
+ },
+ "notunicode1": {
+ "type": "string",
+ "value": "This string does not have a unicode \\u escape."
+ },
+ "notunicode2": {
+ "type": "string",
+ "value": "This string does not have a unicode \u005Cu escape."
+ },
+ "notunicode3": {
+ "type": "string",
+ "value": "This string does not have a unicode \\u0075 escape."
+ },
+ "notunicode4": {
+ "type": "string",
+ "value": "This string does not have a unicode \\\u0075 escape."
+ }
+}
diff --git a/test-suite/tests/valid/string-escapes.toml b/test-suite/tests/valid/string-escapes.toml
new file mode 100644
index 0000000..c5d4954
--- /dev/null
+++ b/test-suite/tests/valid/string-escapes.toml
@@ -0,0 +1,12 @@
+backspace = "This string has a \b backspace character."
+tab = "This string has a \t tab character."
+newline = "This string has a \n new line character."
+formfeed = "This string has a \f form feed character."
+carriage = "This string has a \r carriage return character."
+quote = "This string has a \" quote character."
+slash = "This string has a / slash character."
+backslash = "This string has a \\ backslash character."
+notunicode1 = "This string does not have a unicode \\u escape."
+notunicode2 = "This string does not have a unicode \u005Cu escape."
+notunicode3 = "This string does not have a unicode \\u0075 escape."
+notunicode4 = "This string does not have a unicode \\\u0075 escape."
diff --git a/test-suite/tests/valid/string-simple.json b/test-suite/tests/valid/string-simple.json
new file mode 100644
index 0000000..2e05f99
--- /dev/null
+++ b/test-suite/tests/valid/string-simple.json
@@ -0,0 +1,6 @@
+{
+ "answer": {
+ "type": "string",
+ "value": "You are not drinking enough whisky."
+ }
+}
diff --git a/test-suite/tests/valid/string-simple.toml b/test-suite/tests/valid/string-simple.toml
new file mode 100644
index 0000000..e17ade6
--- /dev/null
+++ b/test-suite/tests/valid/string-simple.toml
@@ -0,0 +1 @@
+answer = "You are not drinking enough whisky."
diff --git a/test-suite/tests/valid/string-with-pound.json b/test-suite/tests/valid/string-with-pound.json
new file mode 100644
index 0000000..33cdc9c
--- /dev/null
+++ b/test-suite/tests/valid/string-with-pound.json
@@ -0,0 +1,7 @@
+{
+ "pound": {"type": "string", "value": "We see no # comments here."},
+ "poundcomment": {
+ "type": "string",
+ "value": "But there are # some comments here."
+ }
+}
diff --git a/test-suite/tests/valid/string-with-pound.toml b/test-suite/tests/valid/string-with-pound.toml
new file mode 100644
index 0000000..5fd8746
--- /dev/null
+++ b/test-suite/tests/valid/string-with-pound.toml
@@ -0,0 +1,2 @@
+pound = "We see no # comments here."
+poundcomment = "But there are # some comments here." # Did I # mess you up?
diff --git a/test-suite/tests/valid/table-array-implicit.json b/test-suite/tests/valid/table-array-implicit.json
new file mode 100644
index 0000000..32e4640
--- /dev/null
+++ b/test-suite/tests/valid/table-array-implicit.json
@@ -0,0 +1,7 @@
+{
+ "albums": {
+ "songs": [
+ {"name": {"type": "string", "value": "Glory Days"}}
+ ]
+ }
+}
diff --git a/test-suite/tests/valid/table-array-implicit.toml b/test-suite/tests/valid/table-array-implicit.toml
new file mode 100644
index 0000000..3157ac9
--- /dev/null
+++ b/test-suite/tests/valid/table-array-implicit.toml
@@ -0,0 +1,2 @@
+[[albums.songs]]
+name = "Glory Days"
diff --git a/test-suite/tests/valid/table-array-many.json b/test-suite/tests/valid/table-array-many.json
new file mode 100644
index 0000000..84df2da
--- /dev/null
+++ b/test-suite/tests/valid/table-array-many.json
@@ -0,0 +1,16 @@
+{
+ "people": [
+ {
+ "first_name": {"type": "string", "value": "Bruce"},
+ "last_name": {"type": "string", "value": "Springsteen"}
+ },
+ {
+ "first_name": {"type": "string", "value": "Eric"},
+ "last_name": {"type": "string", "value": "Clapton"}
+ },
+ {
+ "first_name": {"type": "string", "value": "Bob"},
+ "last_name": {"type": "string", "value": "Seger"}
+ }
+ ]
+}
diff --git a/test-suite/tests/valid/table-array-many.toml b/test-suite/tests/valid/table-array-many.toml
new file mode 100644
index 0000000..46062be
--- /dev/null
+++ b/test-suite/tests/valid/table-array-many.toml
@@ -0,0 +1,11 @@
+[[people]]
+first_name = "Bruce"
+last_name = "Springsteen"
+
+[[people]]
+first_name = "Eric"
+last_name = "Clapton"
+
+[[people]]
+first_name = "Bob"
+last_name = "Seger"
diff --git a/test-suite/tests/valid/table-array-nest-no-keys.json b/test-suite/tests/valid/table-array-nest-no-keys.json
new file mode 100644
index 0000000..7537b1a
--- /dev/null
+++ b/test-suite/tests/valid/table-array-nest-no-keys.json
@@ -0,0 +1,14 @@
+{
+ "albums": [
+ {
+ "songs": [{}, {}]
+ }
+ ],
+ "artists": [
+ {
+ "home": {
+ "address": {}
+ }
+ }
+ ]
+}
diff --git a/test-suite/tests/valid/table-array-nest-no-keys.toml b/test-suite/tests/valid/table-array-nest-no-keys.toml
new file mode 100644
index 0000000..ad6eb10
--- /dev/null
+++ b/test-suite/tests/valid/table-array-nest-no-keys.toml
@@ -0,0 +1,6 @@
+[[ albums ]]
+ [[ albums.songs ]]
+ [[ albums.songs ]]
+
+[[ artists ]]
+ [ artists.home.address ]
diff --git a/test-suite/tests/valid/table-array-nest.json b/test-suite/tests/valid/table-array-nest.json
new file mode 100644
index 0000000..c117afa
--- /dev/null
+++ b/test-suite/tests/valid/table-array-nest.json
@@ -0,0 +1,18 @@
+{
+ "albums": [
+ {
+ "name": {"type": "string", "value": "Born to Run"},
+ "songs": [
+ {"name": {"type": "string", "value": "Jungleland"}},
+ {"name": {"type": "string", "value": "Meeting Across the River"}}
+ ]
+ },
+ {
+ "name": {"type": "string", "value": "Born in the USA"},
+ "songs": [
+ {"name": {"type": "string", "value": "Glory Days"}},
+ {"name": {"type": "string", "value": "Dancing in the Dark"}}
+ ]
+ }
+ ]
+}
diff --git a/test-suite/tests/valid/table-array-nest.toml b/test-suite/tests/valid/table-array-nest.toml
new file mode 100644
index 0000000..d659a3d
--- /dev/null
+++ b/test-suite/tests/valid/table-array-nest.toml
@@ -0,0 +1,17 @@
+[[albums]]
+name = "Born to Run"
+
+ [[albums.songs]]
+ name = "Jungleland"
+
+ [[albums.songs]]
+ name = "Meeting Across the River"
+
+[[albums]]
+name = "Born in the USA"
+
+ [[albums.songs]]
+ name = "Glory Days"
+
+ [[albums.songs]]
+ name = "Dancing in the Dark"
diff --git a/test-suite/tests/valid/table-array-one.json b/test-suite/tests/valid/table-array-one.json
new file mode 100644
index 0000000..d75faae
--- /dev/null
+++ b/test-suite/tests/valid/table-array-one.json
@@ -0,0 +1,8 @@
+{
+ "people": [
+ {
+ "first_name": {"type": "string", "value": "Bruce"},
+ "last_name": {"type": "string", "value": "Springsteen"}
+ }
+ ]
+}
diff --git a/test-suite/tests/valid/table-array-one.toml b/test-suite/tests/valid/table-array-one.toml
new file mode 100644
index 0000000..cd7e1b6
--- /dev/null
+++ b/test-suite/tests/valid/table-array-one.toml
@@ -0,0 +1,3 @@
+[[people]]
+first_name = "Bruce"
+last_name = "Springsteen"
diff --git a/test-suite/tests/valid/table-empty.json b/test-suite/tests/valid/table-empty.json
new file mode 100644
index 0000000..6f3873a
--- /dev/null
+++ b/test-suite/tests/valid/table-empty.json
@@ -0,0 +1,3 @@
+{
+ "a": {}
+}
diff --git a/test-suite/tests/valid/table-empty.toml b/test-suite/tests/valid/table-empty.toml
new file mode 100644
index 0000000..8bb6a0a
--- /dev/null
+++ b/test-suite/tests/valid/table-empty.toml
@@ -0,0 +1 @@
+[a]
diff --git a/test-suite/tests/valid/table-multi-empty.json b/test-suite/tests/valid/table-multi-empty.json
new file mode 100644
index 0000000..a6e17c9
--- /dev/null
+++ b/test-suite/tests/valid/table-multi-empty.json
@@ -0,0 +1,5 @@
+{
+ "a": { "b": {} },
+ "b": {},
+ "c": { "a": {} }
+}
diff --git a/test-suite/tests/valid/table-multi-empty.toml b/test-suite/tests/valid/table-multi-empty.toml
new file mode 100644
index 0000000..2266ed2
--- /dev/null
+++ b/test-suite/tests/valid/table-multi-empty.toml
@@ -0,0 +1,5 @@
+[a]
+[a.b]
+[b]
+[c]
+[c.a]
diff --git a/test-suite/tests/valid/table-sub-empty.json b/test-suite/tests/valid/table-sub-empty.json
new file mode 100644
index 0000000..9787770
--- /dev/null
+++ b/test-suite/tests/valid/table-sub-empty.json
@@ -0,0 +1,3 @@
+{
+ "a": { "b": {} }
+}
diff --git a/test-suite/tests/valid/table-sub-empty.toml b/test-suite/tests/valid/table-sub-empty.toml
new file mode 100644
index 0000000..70b7fe1
--- /dev/null
+++ b/test-suite/tests/valid/table-sub-empty.toml
@@ -0,0 +1,2 @@
+[a]
+[a.b]
diff --git a/test-suite/tests/valid/table-whitespace.json b/test-suite/tests/valid/table-whitespace.json
new file mode 100644
index 0000000..3a73ec8
--- /dev/null
+++ b/test-suite/tests/valid/table-whitespace.json
@@ -0,0 +1,3 @@
+{
+ "valid key": {}
+}
diff --git a/test-suite/tests/valid/table-whitespace.toml b/test-suite/tests/valid/table-whitespace.toml
new file mode 100644
index 0000000..daf881d
--- /dev/null
+++ b/test-suite/tests/valid/table-whitespace.toml
@@ -0,0 +1 @@
+["valid key"]
diff --git a/test-suite/tests/valid/table-with-pound.json b/test-suite/tests/valid/table-with-pound.json
new file mode 100644
index 0000000..5e594e4
--- /dev/null
+++ b/test-suite/tests/valid/table-with-pound.json
@@ -0,0 +1,5 @@
+{
+ "key#group": {
+ "answer": {"type": "integer", "value": "42"}
+ }
+}
diff --git a/test-suite/tests/valid/table-with-pound.toml b/test-suite/tests/valid/table-with-pound.toml
new file mode 100644
index 0000000..33f2c4f
--- /dev/null
+++ b/test-suite/tests/valid/table-with-pound.toml
@@ -0,0 +1,2 @@
+["key#group"]
+answer = 42
diff --git a/test-suite/tests/valid/unicode-escape.json b/test-suite/tests/valid/unicode-escape.json
new file mode 100644
index 0000000..32948c6
--- /dev/null
+++ b/test-suite/tests/valid/unicode-escape.json
@@ -0,0 +1,5 @@
+{
+ "answer1": {"type": "string", "value": "\u000B"},
+ "answer4": {"type": "string", "value": "\u03B4α"},
+ "answer8": {"type": "string", "value": "\u03B4β"}
+}
diff --git a/test-suite/tests/valid/unicode-escape.toml b/test-suite/tests/valid/unicode-escape.toml
new file mode 100644
index 0000000..c0d5a25
--- /dev/null
+++ b/test-suite/tests/valid/unicode-escape.toml
@@ -0,0 +1,3 @@
+answer1 = "\u000B"
+answer4 = "\u03B4α"
+answer8 = "\U000003B4β"
diff --git a/test-suite/tests/valid/unicode-literal.json b/test-suite/tests/valid/unicode-literal.json
new file mode 100644
index 0000000..00aa2f8
--- /dev/null
+++ b/test-suite/tests/valid/unicode-literal.json
@@ -0,0 +1,3 @@
+{
+ "answer": {"type": "string", "value": "δ"}
+}
diff --git a/test-suite/tests/valid/unicode-literal.toml b/test-suite/tests/valid/unicode-literal.toml
new file mode 100644
index 0000000..c65723c
--- /dev/null
+++ b/test-suite/tests/valid/unicode-literal.toml
@@ -0,0 +1 @@
+answer = "δ"