aboutsummaryrefslogtreecommitdiff
path: root/test-suite/tests
diff options
context:
space:
mode:
authorest31 <est31@users.noreply.github.com>2019-10-29 15:10:15 +0100
committerAlex Crichton <alex@alexcrichton.com>2019-10-29 09:10:15 -0500
commit4673cfd79788c74c5bd6bcabe564c2dfcbd7c553 (patch)
treeee7f716b45f136725eabba6b5cd9f445ea957e7a /test-suite/tests
parent4b05a9d34938184d5a045cc33417f57bdf2e14a8 (diff)
downloadmilf-rs-4673cfd79788c74c5bd6bcabe564c2dfcbd7c553.tar.gz
milf-rs-4673cfd79788c74c5bd6bcabe564c2dfcbd7c553.zip
Replace the test added by #349 with a bench (#351)
CI environments can be noisy and while the test worked great locally on my machine, it didn't on the CI environment. This replaces the test with a (manually tracked) benchmark. As per https://github.com/alexcrichton/toml-rs/pull/349#issuecomment-546998173
Diffstat (limited to 'test-suite/tests')
-rw-r--r--test-suite/tests/linear.rs37
1 files changed, 0 insertions, 37 deletions
diff --git a/test-suite/tests/linear.rs b/test-suite/tests/linear.rs
deleted file mode 100644
index dab51f9..0000000
--- a/test-suite/tests/linear.rs
+++ /dev/null
@@ -1,37 +0,0 @@
-use std::time::{Duration, Instant};
-use toml::Value;
-
-const TOLERANCE: f64 = 2.0;
-
-fn measure_time(entries: usize, f: impl Fn(usize) -> String) -> Duration {
- let start = Instant::now();
- let mut s = String::new();
- for i in 0..entries {
- s += &f(i);
- s += "entry = 42\n"
- }
- s.parse::<Value>().unwrap();
- Instant::now() - start
-}
-
-#[test]
-fn linear_increase_map() {
- let time_1 = measure_time(100, |i| format!("[header_no_{}]\n", i));
- let time_4 = measure_time(400, |i| format!("[header_no_{}]\n", i));
- dbg!(time_1, time_4);
- // Now ensure that the deserialization time has increased linearly
- // (within a tolerance interval) instead of, say, quadratically
- assert!(time_4 > time_1.mul_f64(4.0 - TOLERANCE));
- assert!(time_4 < time_1.mul_f64(4.0 + TOLERANCE));
-}
-
-#[test]
-fn linear_increase_array() {
- let time_1 = measure_time(100, |i| format!("[[header_no_{}]]\n", i));
- let time_4 = measure_time(400, |i| format!("[[header_no_{}]]\n", i));
- dbg!(time_1, time_4);
- // Now ensure that the deserialization time has increased linearly
- // (within a tolerance interval) instead of, say, quadratically
- assert!(time_4 > time_1.mul_f64(4.0 - TOLERANCE));
- assert!(time_4 < time_1.mul_f64(4.0 + TOLERANCE));
-}