diff --git a/Cargo.toml b/Cargo.toml index 7f87ff0d..c1482b4a 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -189,6 +189,9 @@ version = "0.41" [target.'cfg(unix)'.dependencies] nix = { version = "0.30", features = ["resource"] } +[dev-dependencies] +tempfile = "3.2" + [features] backend_rocksdb = ["rocksdb"] backend_sqlite = ["sqlite"] diff --git a/src/database/abstraction.rs b/src/database/abstraction.rs index a1913b23..fd0d6fa0 100644 --- a/src/database/abstraction.rs +++ b/src/database/abstraction.rs @@ -12,6 +12,9 @@ pub mod rocksdb; #[cfg(any(feature = "sqlite", feature = "rocksdb"))] pub mod watchers; +#[cfg(test)] +mod tests; + pub trait KeyValueDatabaseEngine: Send + Sync { fn open(config: &Config) -> Result where diff --git a/src/database/abstraction/tests.rs b/src/database/abstraction/tests.rs new file mode 100644 index 00000000..b9a565ea --- /dev/null +++ b/src/database/abstraction/tests.rs @@ -0,0 +1,397 @@ +use crate::database::{ + abstraction::{KeyValueDatabaseEngine, KvTree}, + Config, +}; +use std::sync::Arc; +use tempfile::{Builder, TempDir}; + +fn empty_config(database_path: &str) -> Config { + use figment::providers::{Format, Toml}; + Toml::from_str(&format!( + r#" +server_name = "test" +database_path = "{}" +"#, + database_path + )) + .unwrap() +} + +/// Make sure to keep the reference of the tree returned values for +/// the length of the test, to avoid early cleanups that may create test issues +fn open_tree(test_name: &str) -> (Arc, impl KeyValueDatabaseEngine, TempDir) +where + Arc: KeyValueDatabaseEngine, +{ + let db_folder = Builder::new().prefix(test_name).tempdir().unwrap(); + let config = empty_config(db_folder.path().to_str().unwrap()); + let instance = Arc::::open(&config).unwrap(); + let tree = instance.open_tree("test").unwrap(); + (tree, instance, db_folder) +} + +fn insert_get(name: &str) +where + Arc: KeyValueDatabaseEngine, +{ + let (tree, _inst, _temp_dir) = open_tree::(name); + let key = "key".as_bytes(); + let value = "value".as_bytes(); + tree.insert(key, value).unwrap(); + let read = tree.get(key).unwrap(); + assert_eq!(read, Some(value.to_owned())); +} + +fn insert_get_replace(name: &str) +where + Arc: KeyValueDatabaseEngine, +{ + let (tree, _inst, _temp_dir) = open_tree::(name); + let key = "key".as_bytes(); + let value = "value".as_bytes(); + tree.insert(key, value).unwrap(); + let read = tree.get(key).unwrap(); + assert_eq!(read, Some(value.to_owned())); + + let value1 = "value1".as_bytes(); + tree.insert(key, value1).unwrap(); + let read = tree.get(key).unwrap(); + assert_eq!(read, Some(value1.to_owned())); +} + +fn insert_get_remove(name: &str) +where + Arc: KeyValueDatabaseEngine, +{ + let (tree, _inst, _temp_dir) = open_tree::(name); + let key = "key".as_bytes(); + let value = "value".as_bytes(); + tree.insert(key, value).unwrap(); + let read = tree.get(key).unwrap(); + assert_eq!(read, Some(value.to_owned())); + tree.remove(key).unwrap(); + let read = tree.get(key).unwrap(); + assert_eq!(read, None); + // Remove of not existing key should run seamless + tree.remove(key).unwrap(); +} + +fn batch_insert_get(name: &str) +where + Arc: KeyValueDatabaseEngine, +{ + let (tree, _inst, _temp_dir) = open_tree::(name); + let key = "key".as_bytes(); + let value = "value".as_bytes(); + let key1 = "key1".as_bytes(); + let value1 = "value1".as_bytes(); + let key2 = "key2".as_bytes(); + let value2 = "value2".as_bytes(); + tree.insert_batch( + &mut vec![ + (key.to_owned(), value.to_owned()), + (key1.to_owned(), value1.to_owned()), + (key2.to_owned(), value2.to_owned()), + ] + .into_iter(), + ) + .unwrap(); + let read = tree.get(key).unwrap(); + assert_eq!(read, Some(value.to_owned())); + let read = tree.get(key1).unwrap(); + assert_eq!(read, Some(value1.to_owned())); + let read = tree.get(key2).unwrap(); + assert_eq!(read, Some(value2.to_owned())); +} + +fn insert_iter(name: &str) +where + Arc: KeyValueDatabaseEngine, +{ + let (tree, _inst, _temp_dir) = open_tree::(name); + let key = "key".as_bytes(); + let value = "value".as_bytes(); + tree.insert(key, value).unwrap(); + let key1 = "key1".as_bytes(); + let value1 = "value1".as_bytes(); + tree.insert(key1, value1).unwrap(); + let key2 = "key2".as_bytes(); + let value2 = "value2".as_bytes(); + tree.insert(key2, value2).unwrap(); + let mut iter = tree.iter(); + assert_eq!(iter.next(), Some((key.to_owned(), value.to_owned()))); + assert_eq!(iter.next(), Some((key1.to_owned(), value1.to_owned()))); + assert_eq!(iter.next(), Some((key2.to_owned(), value2.to_owned()))); + assert_eq!(iter.next(), None); +} + +fn insert_iter_from(name: &str) +where + Arc: KeyValueDatabaseEngine, +{ + let (tree, _inst, _temp_dir) = open_tree::(name); + let key = "key".as_bytes(); + let value = "value".as_bytes(); + tree.insert(key, value).unwrap(); + let key1 = "key1".as_bytes(); + let value1 = "value1".as_bytes(); + tree.insert(key1, value1).unwrap(); + let key2 = "key2".as_bytes(); + let value2 = "value2".as_bytes(); + tree.insert(key2, value2).unwrap(); + let mut iter = tree.iter_from(key1, false); + assert_eq!(iter.next(), Some((key1.to_owned(), value1.to_owned()))); + assert_eq!(iter.next(), Some((key2.to_owned(), value2.to_owned()))); + assert_eq!(iter.next(), None); + let mut iter = tree.iter_from(key1, true); + assert_eq!(iter.next(), Some((key1.to_owned(), value1.to_owned()))); + assert_eq!(iter.next(), Some((key.to_owned(), value.to_owned()))); + assert_eq!(iter.next(), None); +} + +fn insert_iter_prefix(name: &str) +where + Arc: KeyValueDatabaseEngine, +{ + let (tree, _inst, _temp_dir) = open_tree::(name); + let key = "key".as_bytes(); + let value = "value".as_bytes(); + tree.insert(key, value).unwrap(); + let key1 = "key1".as_bytes(); + let value1 = "value1".as_bytes(); + tree.insert(key1, value1).unwrap(); + let key11 = "key11".as_bytes(); + let value11 = "value11".as_bytes(); + tree.insert(key11, value11).unwrap(); + let key2 = "key2".as_bytes(); + let value2 = "value2".as_bytes(); + tree.insert(key2, value2).unwrap(); + let mut iter = tree.scan_prefix(key1.to_owned()); + assert_eq!(iter.next(), Some((key1.to_owned(), value1.to_owned()))); + assert_eq!(iter.next(), Some((key11.to_owned(), value11.to_owned()))); + assert_eq!(iter.next(), None); +} + +fn insert_clear(name: &str) +where + Arc: KeyValueDatabaseEngine, +{ + let (tree, _inst, _temp_dir) = open_tree::(name); + let key = "key".as_bytes(); + let value = "value".as_bytes(); + tree.insert(key, value).unwrap(); + let key1 = "key1".as_bytes(); + let value1 = "value1".as_bytes(); + tree.insert(key1, value1).unwrap(); + let key2 = "key2".as_bytes(); + let value2 = "value2".as_bytes(); + tree.insert(key2, value2).unwrap(); + assert_eq!(tree.iter().count(), 3); + tree.clear().unwrap(); + assert_eq!(tree.iter().count(), 0); +} + +fn increment(name: &str) +where + Arc: KeyValueDatabaseEngine, +{ + let (tree, _inst, _temp_dir) = open_tree::(name); + let key = "key".as_bytes(); + tree.increment(key).unwrap(); + let read = tree.get(key).unwrap(); + assert_eq!(crate::utils::u64_from_bytes(&read.unwrap()).unwrap(), 1); + tree.increment(key).unwrap(); + let read = tree.get(key).unwrap(); + assert_eq!(crate::utils::u64_from_bytes(&read.unwrap()).unwrap(), 2); +} + +fn increment_batch(name: &str) +where + Arc: KeyValueDatabaseEngine, +{ + let (tree, _inst, _temp_dir) = open_tree::(name); + let key = "key".as_bytes(); + let key1 = "key1".as_bytes(); + tree.increment_batch(&mut vec![key.to_owned(), key1.to_owned()].into_iter()) + .unwrap(); + let read = tree.get(key).unwrap(); + assert_eq!(crate::utils::u64_from_bytes(&read.unwrap()).unwrap(), 1); + let read = tree.get(key1).unwrap(); + assert_eq!(crate::utils::u64_from_bytes(&read.unwrap()).unwrap(), 1); + tree.increment_batch(&mut vec![key.to_owned(), key1.to_owned()].into_iter()) + .unwrap(); + let read = tree.get(key).unwrap(); + assert_eq!(crate::utils::u64_from_bytes(&read.unwrap()).unwrap(), 2); + let read = tree.get(key1).unwrap(); + assert_eq!(crate::utils::u64_from_bytes(&read.unwrap()).unwrap(), 2); +} + +#[cfg(feature = "sqlite")] +mod sqlite { + + use super::*; + use crate::database::abstraction::sqlite::Engine; + + #[test] + fn sqlite_insert_get() { + insert_get::("sqlite_insert_get") + } + + #[test] + fn sqlite_insert_replace_get() { + insert_get_replace::("sqlite_insert_get_replace") + } + + #[test] + fn sqlite_insert_get_remove() { + insert_get_remove::("sqlite_insert_get_remove") + } + + #[test] + fn sqlite_batch_insert_get() { + batch_insert_get::("sqlite_batch_insert_get") + } + + #[test] + fn sqlite_insert_iter() { + insert_iter::("sqlite_insert_iter") + } + + #[test] + fn sqlite_insert_iter_from() { + insert_iter_from::("sqlite_insert_iter_from") + } + + #[test] + fn sqlite_insert_iter_prefix() { + insert_iter_prefix::("sqlite_insert_iter_prefix") + } + + #[test] + fn sqlite_insert_clear() { + insert_clear::("sqlite_insert_iter_prefix") + } + + #[test] + fn sqlite_increment() { + increment::("sqlite_increment") + } + + #[test] + fn sqlite_increment_batch() { + increment_batch::("sqlite_increment_batch") + } +} + +#[cfg(feature = "rocksdb")] +mod rocksdb { + + use super::*; + use crate::database::abstraction::rocksdb::Engine; + + #[test] + fn rocksdb_insert_get() { + insert_get::("rocksdb_insert_get") + } + + #[test] + fn rocksdb_insert_replace_get() { + insert_get_replace::("rocksdb_insert_get_replace") + } + + #[test] + fn rocksdb_insert_get_remove() { + insert_get_remove::("rocksdb_insert_get_remove") + } + + #[test] + fn rocksdb_batch_insert_get() { + batch_insert_get::("rocksdb_batch_insert_get") + } + + #[test] + fn rocksdb_insert_iter() { + insert_iter::("rocksdb_insert_iter") + } + + #[test] + fn rocksdb_insert_iter_from() { + insert_iter_from::("rocksdb_insert_iter_from") + } + + #[test] + fn rocksdb_insert_iter_prefix() { + insert_iter_prefix::("rocksdb_insert_iter_prefix") + } + + #[test] + fn rocksdb_insert_clear() { + insert_clear::("rocksdb_insert_iter_prefix") + } + + #[test] + fn rocksdb_increment() { + increment::("rocksdb_increment") + } + + #[test] + fn rocksdb_increment_batch() { + increment_batch::("rocksdb_increment_batch") + } +} +#[cfg(feature = "persy")] +mod persy { + + use super::*; + use crate::database::abstraction::persy::Engine; + + #[test] + fn persy_insert_get() { + insert_get::("persy_insert_get") + } + + #[test] + fn persy_insert_replace_get() { + insert_get_replace::("persy_insert_get_replace") + } + + #[test] + fn persy_insert_get_remove() { + insert_get_remove::("persy_insert_get_remove") + } + + #[test] + fn persy_batch_insert_get() { + batch_insert_get::("persy_batch_insert_get") + } + + #[test] + fn persy_insert_iter() { + insert_iter::("persy_insert_iter") + } + + #[test] + fn persy_insert_iter_from() { + insert_iter_from::("persy_insert_iter_from") + } + + #[test] + fn persy_insert_iter_prefix() { + insert_iter_prefix::("persy_insert_iter_prefix") + } + + #[test] + fn persy_insert_clear() { + insert_clear::("persy_insert_iter_prefix") + } + + #[test] + fn persy_increment() { + increment::("persy_increment") + } + + #[test] + fn persy_increment_batch() { + increment_batch::("persy_increment_batch") + } +}