mirror of
https://gitlab.com/famedly/conduit.git
synced 2025-06-27 16:35:59 +00:00
feat: add implementation to export and import low level data in json
This commit is contained in:
parent
0f1c3222ed
commit
dd75a7ba84
3 changed files with 112 additions and 1 deletions
|
@ -26,6 +26,8 @@ pub mod persy;
|
||||||
))]
|
))]
|
||||||
pub mod watchers;
|
pub mod watchers;
|
||||||
|
|
||||||
|
pub mod json_stream_export;
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests;
|
mod tests;
|
||||||
|
|
||||||
|
|
77
src/database/abstraction/json_stream_export.rs
Normal file
77
src/database/abstraction/json_stream_export.rs
Normal file
|
@ -0,0 +1,77 @@
|
||||||
|
use crate::{
|
||||||
|
database::abstraction::{KeyValueDatabaseEngine, KvExport},
|
||||||
|
Result,
|
||||||
|
};
|
||||||
|
use base64::{engine::general_purpose::STANDARD, Engine};
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use std::io::{BufRead, BufReader, Read, Write};
|
||||||
|
|
||||||
|
pub trait KeyValueJsonExporter {
|
||||||
|
fn export_json_stream(&self, output: &mut dyn Write) -> Result<()>;
|
||||||
|
fn import_json_stream(&self, input: &mut dyn Read) -> Result<()>;
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, Deserialize)]
|
||||||
|
struct Entry {
|
||||||
|
tree: String,
|
||||||
|
key: String,
|
||||||
|
value: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
struct JsonExporter<'a> {
|
||||||
|
current_name: String,
|
||||||
|
write: &'a mut dyn Write,
|
||||||
|
}
|
||||||
|
impl<'a> KvExport for JsonExporter<'a> {
|
||||||
|
fn start_tree(&mut self, name: &str) -> Result<()> {
|
||||||
|
self.current_name = name.to_owned();
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn key_value(&mut self, key: &[u8], value: &[u8]) -> Result<()> {
|
||||||
|
let entry = Entry {
|
||||||
|
tree: self.current_name.clone(),
|
||||||
|
key: STANDARD.encode(key),
|
||||||
|
value: STANDARD.encode(value),
|
||||||
|
};
|
||||||
|
writeln!(self.write, "{}", serde_json::to_string(&entry).unwrap())?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn end_tree(&mut self, _name: &str) -> Result<()> {
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T: KeyValueDatabaseEngine> KeyValueJsonExporter for T {
|
||||||
|
fn export_json_stream(&self, output: &mut dyn Write) -> Result<()> {
|
||||||
|
self.export(&mut JsonExporter {
|
||||||
|
current_name: Default::default(),
|
||||||
|
write: output,
|
||||||
|
})?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
fn import_json_stream(&self, input: &mut dyn Read) -> Result<()> {
|
||||||
|
let bf = BufReader::new(input);
|
||||||
|
//Just a cache to avoid to reopen the tree all the times
|
||||||
|
let mut cur_tree = None;
|
||||||
|
for line in bf.lines() {
|
||||||
|
if let Ok(entry) = serde_json::from_str::<Entry>(&line?) {
|
||||||
|
if let (Ok(key), Ok(value)) =
|
||||||
|
(STANDARD.decode(&entry.key), STANDARD.decode(&entry.value))
|
||||||
|
{
|
||||||
|
let (tree, tree_name) = match cur_tree {
|
||||||
|
Some((tree, tree_name)) if tree_name == entry.tree => (tree, tree_name),
|
||||||
|
_ => {
|
||||||
|
let tree = self.open_tree(&entry.tree)?;
|
||||||
|
(tree, entry.tree.clone())
|
||||||
|
}
|
||||||
|
};
|
||||||
|
tree.insert(&key, &value)?;
|
||||||
|
cur_tree = Some((tree, tree_name));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,5 +1,7 @@
|
||||||
use crate::database::{
|
use crate::database::{
|
||||||
abstraction::{KeyValueDatabaseEngine, KvExport, KvTree},
|
abstraction::{
|
||||||
|
json_stream_export::KeyValueJsonExporter, KeyValueDatabaseEngine, KvExport, KvTree,
|
||||||
|
},
|
||||||
Config,
|
Config,
|
||||||
};
|
};
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
@ -325,6 +327,23 @@ where
|
||||||
check_data(&instance_r, "two");
|
check_data(&instance_r, "two");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn test_export_import_json<T>(test_name: &str)
|
||||||
|
where
|
||||||
|
Arc<T>: KeyValueDatabaseEngine,
|
||||||
|
{
|
||||||
|
let (instance, _db_folder) = open_instance(test_name);
|
||||||
|
insert_data(&instance, "one");
|
||||||
|
insert_data(&instance, "two");
|
||||||
|
let mut buffer = Vec::new();
|
||||||
|
instance.export_json_stream(&mut buffer).unwrap();
|
||||||
|
let (instance_r, _db_folder) = open_instance(&format!("{}_restore", test_name));
|
||||||
|
instance_r
|
||||||
|
.import_json_stream(&mut std::io::Cursor::new(buffer))
|
||||||
|
.unwrap();
|
||||||
|
check_data(&instance_r, "one");
|
||||||
|
check_data(&instance_r, "two");
|
||||||
|
}
|
||||||
|
|
||||||
#[cfg(feature = "sqlite")]
|
#[cfg(feature = "sqlite")]
|
||||||
mod sqlite {
|
mod sqlite {
|
||||||
|
|
||||||
|
@ -384,6 +403,10 @@ mod sqlite {
|
||||||
fn sqlite_export_import() {
|
fn sqlite_export_import() {
|
||||||
test_export_import::<Engine>("sqlite_export_import")
|
test_export_import::<Engine>("sqlite_export_import")
|
||||||
}
|
}
|
||||||
|
#[test]
|
||||||
|
fn sqlite_export_import_json() {
|
||||||
|
test_export_import_json::<Engine>("sqlite_export_import_json")
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(feature = "rocksdb")]
|
#[cfg(feature = "rocksdb")]
|
||||||
|
@ -446,6 +469,10 @@ mod rocksdb {
|
||||||
fn rocksdb_export_import() {
|
fn rocksdb_export_import() {
|
||||||
test_export_import::<Engine>("rocksdb_export_import")
|
test_export_import::<Engine>("rocksdb_export_import")
|
||||||
}
|
}
|
||||||
|
#[test]
|
||||||
|
fn rocksdb_export_import_json() {
|
||||||
|
test_export_import_json::<Engine>("rocksdb_export_import_json")
|
||||||
|
}
|
||||||
}
|
}
|
||||||
#[cfg(feature = "persy")]
|
#[cfg(feature = "persy")]
|
||||||
mod persy {
|
mod persy {
|
||||||
|
@ -507,4 +534,9 @@ mod persy {
|
||||||
fn persy_export_import() {
|
fn persy_export_import() {
|
||||||
test_export_import::<Engine>("persy_export_import")
|
test_export_import::<Engine>("persy_export_import")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn persy_export_import_json() {
|
||||||
|
test_export_import_json::<Engine>("persy_export_import_json")
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue