From 0f1c3222ed027f67f448ee57865a695911136aa1 Mon Sep 17 00:00:00 2001 From: Tglman Date: Sun, 27 Aug 2023 18:56:09 +0100 Subject: [PATCH] test: add test for exporting and importing data from tree store --- src/database/abstraction.rs | 6 +- src/database/abstraction/heed.rs | 4 +- src/database/abstraction/persy.rs | 4 +- src/database/abstraction/rocksdb.rs | 16 ++-- src/database/abstraction/sqlite.rs | 2 +- src/database/abstraction/tests.rs | 121 +++++++++++++++++++++++++++- 6 files changed, 132 insertions(+), 21 deletions(-) diff --git a/src/database/abstraction.rs b/src/database/abstraction.rs index ffcf476d..5832c944 100644 --- a/src/database/abstraction.rs +++ b/src/database/abstraction.rs @@ -33,7 +33,7 @@ pub trait KeyValueDatabaseEngine: Send + Sync { fn open(config: &Config) -> Result where Self: Sized; - fn open_tree(&self, name: &'static str) -> Result>; + fn open_tree(&self, name: &str) -> Result>; fn flush(&self) -> Result<()>; fn cleanup(&self) -> Result<()> { Ok(()) @@ -43,9 +43,7 @@ pub trait KeyValueDatabaseEngine: Send + Sync { } fn clear_caches(&self) {} - fn export(&self, _exporter: &mut Box) -> Result<()> { - unimplemented!() - } + fn export(&self, exporter: &mut dyn KvExport) -> Result<()>; } pub trait KvExport { diff --git a/src/database/abstraction/heed.rs b/src/database/abstraction/heed.rs index ff843d97..f215f6d5 100644 --- a/src/database/abstraction/heed.rs +++ b/src/database/abstraction/heed.rs @@ -49,7 +49,7 @@ impl KeyValueDatabaseEngine for Arc { })) } - fn open_tree(&self, name: &'static str) -> Result> { + fn open_tree(&self, name: &str) -> Result> { // Creates the db if it doesn't exist already Ok(Arc::new(EngineTree { engine: self.clone(), @@ -67,7 +67,7 @@ impl KeyValueDatabaseEngine for Arc { Ok(()) } - fn export(&self, exporter: &mut Box) -> Result<()> { + fn export(&self, exporter: &mut dyn KvExport) -> Result<()> { // Heed do not support snapshots let trees: Vec = unimplemented!("heed has no way lo list trees"); for tree_name in &trees { diff --git a/src/database/abstraction/persy.rs b/src/database/abstraction/persy.rs index 99cbc061..7730fb07 100644 --- a/src/database/abstraction/persy.rs +++ b/src/database/abstraction/persy.rs @@ -27,7 +27,7 @@ impl KeyValueDatabaseEngine for Arc { Ok(Arc::new(Engine { persy })) } - fn open_tree(&self, name: &'static str) -> Result> { + fn open_tree(&self, name: &str) -> Result> { // Create if it doesn't exist if !self.persy.exists_index(name)? { let mut tx = self.persy.begin()?; @@ -46,7 +46,7 @@ impl KeyValueDatabaseEngine for Arc { Ok(()) } - fn export(&self, exporter: &mut Box) -> Result<()> { + fn export(&self, exporter: &mut dyn KvExport) -> Result<()> { let snapshot = self.persy.snapshot()?; let indexes = snapshot.list_indexes()?; for (index, _) in indexes { diff --git a/src/database/abstraction/rocksdb.rs b/src/database/abstraction/rocksdb.rs index 84ccf109..e441875e 100644 --- a/src/database/abstraction/rocksdb.rs +++ b/src/database/abstraction/rocksdb.rs @@ -14,9 +14,9 @@ pub struct Engine { database_path: String, } -pub struct RocksDbEngineTree<'a> { +pub struct RocksDbEngineTree { db: Arc, - name: &'a str, + name: String, watchers: Watchers, write_lock: RwLock<()>, } @@ -90,7 +90,7 @@ impl KeyValueDatabaseEngine for Arc { })) } - fn open_tree(&self, name: &'static str) -> Result> { + fn open_tree(&self, name: &str) -> Result> { if !self.old_cfs.contains(&name.to_owned()) { // Create if it didn't exist let _ = self @@ -99,7 +99,7 @@ impl KeyValueDatabaseEngine for Arc { } Ok(Arc::new(RocksDbEngineTree { - name, + name: name.to_owned(), db: Arc::clone(self), watchers: Watchers::default(), write_lock: RwLock::new(()), @@ -129,7 +129,7 @@ impl KeyValueDatabaseEngine for Arc { )) } - fn export(&self, exporter: &mut Box) -> Result<()> { + fn export(&self, exporter: &mut dyn KvExport) -> Result<()> { let snapshot = self.rocks.snapshot(); let column_familes = rocksdb::DBWithThreadMode::::list_cf( &rocksdb::Options::default(), @@ -154,13 +154,13 @@ impl KeyValueDatabaseEngine for Arc { fn clear_caches(&self) {} } -impl RocksDbEngineTree<'_> { +impl RocksDbEngineTree { fn cf(&self) -> Arc> { - self.db.rocks.cf_handle(self.name).unwrap() + self.db.rocks.cf_handle(&self.name).unwrap() } } -impl KvTree for RocksDbEngineTree<'_> { +impl KvTree for RocksDbEngineTree { fn get(&self, key: &[u8]) -> Result>> { let readoptions = rocksdb::ReadOptions::default(); diff --git a/src/database/abstraction/sqlite.rs b/src/database/abstraction/sqlite.rs index 35d07dfe..1d3e6cd0 100644 --- a/src/database/abstraction/sqlite.rs +++ b/src/database/abstraction/sqlite.rs @@ -128,7 +128,7 @@ impl KeyValueDatabaseEngine for Arc { self.flush_wal() } - fn export(&self, exporter: &mut Box) -> Result<()> { + fn export(&self, exporter: &mut dyn KvExport) -> Result<()> { // TODO: rusqlite do not support snapshot yet, change this when they are supported let tables: Vec = { let guard = self.read_lock(); diff --git a/src/database/abstraction/tests.rs b/src/database/abstraction/tests.rs index b9a565ea..c901a44b 100644 --- a/src/database/abstraction/tests.rs +++ b/src/database/abstraction/tests.rs @@ -1,5 +1,5 @@ use crate::database::{ - abstraction::{KeyValueDatabaseEngine, KvTree}, + abstraction::{KeyValueDatabaseEngine, KvExport, KvTree}, Config, }; use std::sync::Arc; @@ -17,15 +17,22 @@ database_path = "{}" .unwrap() } -/// Make sure to keep the reference of the tree returned values for -/// the length of the test, to avoid early cleanups that may create test issues -fn open_tree(test_name: &str) -> (Arc, impl KeyValueDatabaseEngine, TempDir) +fn open_instance(test_name: &str) -> (Arc, TempDir) where Arc: KeyValueDatabaseEngine, { let db_folder = Builder::new().prefix(test_name).tempdir().unwrap(); let config = empty_config(db_folder.path().to_str().unwrap()); let instance = Arc::::open(&config).unwrap(); + (instance, db_folder) +} +/// Make sure to keep the reference of the tree returned values for +/// the length of the test, to avoid early cleanups that may create test issues +fn open_tree(test_name: &str) -> (Arc, impl KeyValueDatabaseEngine, TempDir) +where + Arc: KeyValueDatabaseEngine, +{ + let (instance, db_folder) = open_instance(test_name); let tree = instance.open_tree("test").unwrap(); (tree, instance, db_folder) } @@ -226,6 +233,98 @@ where assert_eq!(crate::utils::u64_from_bytes(&read.unwrap()).unwrap(), 2); } +#[derive(Default)] +struct TestBackup { + data: Vec<(String, Vec, Vec)>, + current_tree: String, +} +impl TestBackup { + fn import(&self, store: &Arc) -> crate::Result<()> + where + Arc: KeyValueDatabaseEngine, + { + for (tree, k, v) in &self.data { + let data = store.open_tree(&tree)?; + data.insert(&k, &v)?; + } + Ok(()) + } +} +impl KvExport for TestBackup { + fn start_tree(&mut self, name: &str) -> crate::Result<()> { + self.current_tree = name.to_owned(); + Ok(()) + } + + fn key_value(&mut self, key: &[u8], value: &[u8]) -> crate::Result<()> { + self.data + .push((self.current_tree.clone(), key.to_owned(), value.to_owned())); + Ok(()) + } + + fn end_tree(&mut self, _name: &str) -> crate::Result<()> { + Ok(()) + } +} + +fn insert_data(instance: &Arc, data: &str) +where + Arc: KeyValueDatabaseEngine, +{ + let tree = instance.open_tree(data).unwrap(); + let key = format!("{}", data); + let value = "value".as_bytes(); + tree.insert(key.as_bytes(), value).unwrap(); + let key1 = format!("{}1", data); + let value1 = "value1".as_bytes(); + tree.insert(key1.as_bytes(), value1).unwrap(); + let key2 = format!("{}2", data); + let value2 = "value2".as_bytes(); + tree.insert(key2.as_bytes(), value2).unwrap(); +} + +fn check_data(instance: &Arc, data: &str) +where + Arc: KeyValueDatabaseEngine, +{ + let tree = instance.open_tree(data).unwrap(); + let key = format!("{}", data); + let value = "value".as_bytes(); + let key1 = format!("{}1", data); + let value1 = "value1".as_bytes(); + let key2 = format!("{}2", data); + let value2 = "value2".as_bytes(); + let mut iter = tree.iter(); + assert_eq!( + iter.next(), + Some((key.as_bytes().to_owned(), value.to_owned())) + ); + assert_eq!( + iter.next(), + Some((key1.as_bytes().to_owned(), value1.to_owned())) + ); + assert_eq!( + iter.next(), + Some((key2.as_bytes().to_owned(), value2.to_owned())) + ); + assert_eq!(iter.next(), None); +} + +fn test_export_import(test_name: &str) +where + Arc: KeyValueDatabaseEngine, +{ + let (instance, _db_folder) = open_instance(test_name); + insert_data(&instance, "one"); + insert_data(&instance, "two"); + let mut bk = TestBackup::default(); + instance.export(&mut bk).unwrap(); + let (instance_r, _db_folder) = open_instance(&format!("{}_restore", test_name)); + bk.import(&instance_r).unwrap(); + check_data(&instance_r, "one"); + check_data(&instance_r, "two"); +} + #[cfg(feature = "sqlite")] mod sqlite { @@ -281,6 +380,10 @@ mod sqlite { fn sqlite_increment_batch() { increment_batch::("sqlite_increment_batch") } + #[test] + fn sqlite_export_import() { + test_export_import::("sqlite_export_import") + } } #[cfg(feature = "rocksdb")] @@ -338,6 +441,11 @@ mod rocksdb { fn rocksdb_increment_batch() { increment_batch::("rocksdb_increment_batch") } + + #[test] + fn rocksdb_export_import() { + test_export_import::("rocksdb_export_import") + } } #[cfg(feature = "persy")] mod persy { @@ -394,4 +502,9 @@ mod persy { fn persy_increment_batch() { increment_batch::("persy_increment_batch") } + + #[test] + fn persy_export_import() { + test_export_import::("persy_export_import") + } }