mirror of
https://gitlab.com/famedly/conduit.git
synced 2025-08-01 17:38:36 +00:00
test: add abstract test implementations and specific implementation for sqlite
This commit is contained in:
parent
a9ff97e527
commit
e770de3d3d
3 changed files with 233 additions and 0 deletions
|
@ -175,6 +175,9 @@ version = "0.25"
|
|||
[target.'cfg(unix)'.dependencies]
|
||||
nix = { version = "0.28", features = ["resource"] }
|
||||
|
||||
[dev-dependencies]
|
||||
tempfile = "3.2"
|
||||
|
||||
[features]
|
||||
default = ["backend_rocksdb", "backend_sqlite", "conduit_bin", "systemd"]
|
||||
#backend_sled = ["sled"]
|
||||
|
|
|
@ -26,6 +26,9 @@ pub mod persy;
|
|||
))]
|
||||
pub mod watchers;
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
|
||||
pub trait KeyValueDatabaseEngine: Send + Sync {
|
||||
fn open(config: &Config) -> Result<Self>
|
||||
where
|
||||
|
|
227
src/database/abstraction/tests.rs
Normal file
227
src/database/abstraction/tests.rs
Normal file
|
@ -0,0 +1,227 @@
|
|||
use crate::database::{abstraction::Tree, Config};
|
||||
|
||||
fn insert_get(tree: &dyn Tree) {
|
||||
let key = "key".as_bytes();
|
||||
let value = "value".as_bytes();
|
||||
tree.insert(key, value).unwrap();
|
||||
let read = tree.get(key).unwrap();
|
||||
assert_eq!(read, Some(value.to_owned()));
|
||||
}
|
||||
|
||||
fn insert_get_remove(tree: &dyn Tree) {
|
||||
let key = "key".as_bytes();
|
||||
let value = "value".as_bytes();
|
||||
tree.insert(key, value).unwrap();
|
||||
let read = tree.get(key).unwrap();
|
||||
assert_eq!(read, Some(value.to_owned()));
|
||||
tree.remove(key).unwrap();
|
||||
let read = tree.get(key).unwrap();
|
||||
assert_eq!(read, None);
|
||||
}
|
||||
|
||||
fn batch_insert_get(tree: &dyn Tree) {
|
||||
let key = "key".as_bytes();
|
||||
let value = "value".as_bytes();
|
||||
let key1 = "key1".as_bytes();
|
||||
let value1 = "value1".as_bytes();
|
||||
let key2 = "key2".as_bytes();
|
||||
let value2 = "value2".as_bytes();
|
||||
tree.insert_batch(
|
||||
&mut vec![
|
||||
(key.to_owned(), value.to_owned()),
|
||||
(key1.to_owned(), value1.to_owned()),
|
||||
(key2.to_owned(), value2.to_owned()),
|
||||
]
|
||||
.into_iter(),
|
||||
)
|
||||
.unwrap();
|
||||
let read = tree.get(key).unwrap();
|
||||
assert_eq!(read, Some(value.to_owned()));
|
||||
let read = tree.get(key1).unwrap();
|
||||
assert_eq!(read, Some(value1.to_owned()));
|
||||
let read = tree.get(key2).unwrap();
|
||||
assert_eq!(read, Some(value2.to_owned()));
|
||||
}
|
||||
|
||||
fn insert_iter(tree: &dyn Tree) {
|
||||
let key = "key".as_bytes();
|
||||
let value = "value".as_bytes();
|
||||
tree.insert(key, value).unwrap();
|
||||
let key1 = "key1".as_bytes();
|
||||
let value1 = "value1".as_bytes();
|
||||
tree.insert(key1, value1).unwrap();
|
||||
let key2 = "key2".as_bytes();
|
||||
let value2 = "value2".as_bytes();
|
||||
tree.insert(key2, value2).unwrap();
|
||||
let mut iter = tree.iter();
|
||||
assert_eq!(iter.next(), Some((key.to_owned(), value.to_owned())));
|
||||
assert_eq!(iter.next(), Some((key1.to_owned(), value1.to_owned())));
|
||||
assert_eq!(iter.next(), Some((key2.to_owned(), value2.to_owned())));
|
||||
assert_eq!(iter.next(), None);
|
||||
}
|
||||
|
||||
fn insert_iter_from(tree: &dyn Tree) {
|
||||
let key = "key".as_bytes();
|
||||
let value = "value".as_bytes();
|
||||
tree.insert(key, value).unwrap();
|
||||
let key1 = "key1".as_bytes();
|
||||
let value1 = "value1".as_bytes();
|
||||
tree.insert(key1, value1).unwrap();
|
||||
let key2 = "key2".as_bytes();
|
||||
let value2 = "value2".as_bytes();
|
||||
tree.insert(key2, value2).unwrap();
|
||||
let mut iter = tree.iter_from(key1, false);
|
||||
assert_eq!(iter.next(), Some((key1.to_owned(), value1.to_owned())));
|
||||
assert_eq!(iter.next(), Some((key2.to_owned(), value2.to_owned())));
|
||||
assert_eq!(iter.next(), None);
|
||||
let mut iter = tree.iter_from(key1, true);
|
||||
assert_eq!(iter.next(), Some((key1.to_owned(), value1.to_owned())));
|
||||
assert_eq!(iter.next(), Some((key.to_owned(), value.to_owned())));
|
||||
assert_eq!(iter.next(), None);
|
||||
}
|
||||
|
||||
fn insert_iter_prefix(tree: &dyn Tree) {
|
||||
let key = "key".as_bytes();
|
||||
let value = "value".as_bytes();
|
||||
tree.insert(key, value).unwrap();
|
||||
let key1 = "key1".as_bytes();
|
||||
let value1 = "value1".as_bytes();
|
||||
tree.insert(key1, value1).unwrap();
|
||||
let key11 = "key11".as_bytes();
|
||||
let value11 = "value11".as_bytes();
|
||||
tree.insert(key11, value11).unwrap();
|
||||
let key2 = "key2".as_bytes();
|
||||
let value2 = "value2".as_bytes();
|
||||
tree.insert(key2, value2).unwrap();
|
||||
let mut iter = tree.scan_prefix(key1.to_owned());
|
||||
assert_eq!(iter.next(), Some((key1.to_owned(), value1.to_owned())));
|
||||
assert_eq!(iter.next(), Some((key11.to_owned(), value11.to_owned())));
|
||||
assert_eq!(iter.next(), None);
|
||||
}
|
||||
|
||||
fn insert_clear(tree: &dyn Tree) {
|
||||
let key = "key".as_bytes();
|
||||
let value = "value".as_bytes();
|
||||
tree.insert(key, value).unwrap();
|
||||
let key1 = "key1".as_bytes();
|
||||
let value1 = "value1".as_bytes();
|
||||
tree.insert(key1, value1).unwrap();
|
||||
let key2 = "key2".as_bytes();
|
||||
let value2 = "value2".as_bytes();
|
||||
tree.insert(key2, value2).unwrap();
|
||||
assert_eq!(tree.iter().count(), 3);
|
||||
tree.clear().unwrap();
|
||||
assert_eq!(tree.iter().count(), 0);
|
||||
}
|
||||
|
||||
fn increment(tree: &dyn Tree) {
|
||||
let key = "key".as_bytes();
|
||||
tree.increment(key).unwrap();
|
||||
let read = tree.get(key).unwrap();
|
||||
assert_eq!(crate::utils::u64_from_bytes(&read.unwrap()).unwrap(), 1);
|
||||
tree.increment(key).unwrap();
|
||||
let read = tree.get(key).unwrap();
|
||||
assert_eq!(crate::utils::u64_from_bytes(&read.unwrap()).unwrap(), 2);
|
||||
}
|
||||
|
||||
fn increment_batch(tree: &dyn Tree) {
|
||||
let key = "key".as_bytes();
|
||||
let key1 = "key1".as_bytes();
|
||||
tree.increment_batch(&mut vec![key.to_owned(), key1.to_owned()].into_iter())
|
||||
.unwrap();
|
||||
let read = tree.get(key).unwrap();
|
||||
assert_eq!(crate::utils::u64_from_bytes(&read.unwrap()).unwrap(), 1);
|
||||
let read = tree.get(key1).unwrap();
|
||||
assert_eq!(crate::utils::u64_from_bytes(&read.unwrap()).unwrap(), 1);
|
||||
tree.increment_batch(&mut vec![key.to_owned(), key1.to_owned()].into_iter())
|
||||
.unwrap();
|
||||
let read = tree.get(key).unwrap();
|
||||
assert_eq!(crate::utils::u64_from_bytes(&read.unwrap()).unwrap(), 2);
|
||||
let read = tree.get(key1).unwrap();
|
||||
assert_eq!(crate::utils::u64_from_bytes(&read.unwrap()).unwrap(), 2);
|
||||
}
|
||||
|
||||
fn empty_config(database_path: &str) -> Config {
|
||||
use rocket::figment::providers::{Format, Toml};
|
||||
Toml::from_str(&format!(
|
||||
r#"
|
||||
server_name = "test"
|
||||
database_path = "{}"
|
||||
"#,
|
||||
database_path
|
||||
))
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
#[cfg(feature = "sqlite")]
|
||||
mod sqlite {
|
||||
|
||||
use super::*;
|
||||
use crate::database::abstraction::{DatabaseEngine, Tree};
|
||||
use std::sync::Arc;
|
||||
use tempfile::{Builder, TempDir};
|
||||
|
||||
/// Make sure to keep the reference of the tree returned values for
|
||||
/// the length of the test, to avoid early cleanups that may create test issues
|
||||
fn open_tree(test_name: &str) -> (Arc<dyn Tree>, impl DatabaseEngine, TempDir) {
|
||||
let db_folder = Builder::new().prefix(test_name).tempdir().unwrap();
|
||||
let config = empty_config(db_folder.path().to_str().unwrap());
|
||||
let instance = Arc::<crate::database::abstraction::sqlite::Engine>::open(&config).unwrap();
|
||||
let tree = instance.open_tree("test").unwrap();
|
||||
(tree, instance, db_folder)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn sqlite_insert_get() {
|
||||
let (tree, _inst, _temp_dir) = open_tree("insert_get");
|
||||
insert_get(&*tree)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn sqlite_insert_get_remove() {
|
||||
let (tree, _inst, _temp_dir) = open_tree("insert_get_remove");
|
||||
insert_get_remove(&*tree)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn sqlite_batch_insert_get() {
|
||||
let (tree, _inst, _temp_dir) = open_tree("batch_insert_get");
|
||||
batch_insert_get(&*tree)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn sqlite_insert_iter() {
|
||||
let (tree, _inst, _temp_dir) = open_tree("insert_iter");
|
||||
insert_iter(&*tree)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn sqlite_insert_iter_from() {
|
||||
let (tree, _inst, _temp_dir) = open_tree("insert_iter_from");
|
||||
insert_iter_from(&*tree)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn sqlite_insert_iter_prefix() {
|
||||
let (tree, _inst, _temp_dir) = open_tree("insert_iter_prefix");
|
||||
insert_iter_prefix(&*tree)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn sqlite_insert_clear() {
|
||||
let (tree, _inst, _temp_dir) = open_tree("insert_iter_prefix");
|
||||
insert_clear(&*tree)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn sqlite_increment() {
|
||||
let (tree, _inst, _temp_dir) = open_tree("increment");
|
||||
increment(&*tree)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn sqlite_increment_batch() {
|
||||
let (tree, _inst, _temp_dir) = open_tree("increment_batch");
|
||||
increment_batch(&*tree)
|
||||
}
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue