2022-10-05 20:34:31 +02:00
|
|
|
use std::{
|
2024-03-05 19:48:54 -05:00
|
|
|
collections::HashSet,
|
2024-07-04 03:26:19 +00:00
|
|
|
fmt::Write,
|
2024-03-05 19:48:54 -05:00
|
|
|
mem::size_of,
|
2024-10-16 03:12:30 +00:00
|
|
|
sync::{Arc, Mutex},
|
2022-10-05 20:34:31 +02:00
|
|
|
};
|
2022-09-06 23:15:09 +02:00
|
|
|
|
2024-10-16 03:12:30 +00:00
|
|
|
use conduit::{checked, err, expected, utils, utils::math::usize_from_f64, Result};
|
|
|
|
use database::Map;
|
2022-10-05 20:33:55 +02:00
|
|
|
use lru_cache::LruCache;
|
2022-09-06 23:15:09 +02:00
|
|
|
use ruma::{EventId, RoomId};
|
|
|
|
|
2024-10-31 08:41:43 +00:00
|
|
|
use crate::{
|
|
|
|
rooms,
|
|
|
|
rooms::short::{ShortStateHash, ShortStateKey},
|
|
|
|
Dep,
|
|
|
|
};
|
2022-07-10 17:23:26 +02:00
|
|
|
|
2024-05-09 15:59:08 -07:00
|
|
|
pub struct Service {
|
2024-10-16 03:12:30 +00:00
|
|
|
pub stateinfo_cache: Mutex<StateInfoLruCache>,
|
2024-06-28 22:51:39 +00:00
|
|
|
db: Data,
|
2024-07-18 06:37:47 +00:00
|
|
|
services: Services,
|
2022-07-10 17:23:26 +02:00
|
|
|
}
|
|
|
|
|
2024-07-18 06:37:47 +00:00
|
|
|
struct Services {
|
|
|
|
short: Dep<rooms::short::Service>,
|
|
|
|
state: Dep<rooms::state::Service>,
|
|
|
|
}
|
|
|
|
|
2024-10-16 03:12:30 +00:00
|
|
|
struct Data {
|
|
|
|
shortstatehash_statediff: Arc<Map>,
|
|
|
|
}
|
|
|
|
|
2024-10-31 08:19:37 +00:00
|
|
|
#[derive(Clone)]
|
2024-10-16 03:12:30 +00:00
|
|
|
struct StateDiff {
|
|
|
|
parent: Option<u64>,
|
2024-11-02 06:12:54 +00:00
|
|
|
added: Arc<CompressedState>,
|
|
|
|
removed: Arc<CompressedState>,
|
2024-10-16 03:12:30 +00:00
|
|
|
}
|
|
|
|
|
2024-10-31 08:19:37 +00:00
|
|
|
#[derive(Clone, Default)]
|
|
|
|
pub struct ShortStateInfo {
|
|
|
|
pub shortstatehash: ShortStateHash,
|
2024-11-02 06:12:54 +00:00
|
|
|
pub full_state: Arc<CompressedState>,
|
|
|
|
pub added: Arc<CompressedState>,
|
|
|
|
pub removed: Arc<CompressedState>,
|
2024-10-31 08:19:37 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Clone, Default)]
|
|
|
|
pub struct HashSetCompressStateEvent {
|
|
|
|
pub shortstatehash: ShortStateHash,
|
2024-11-02 06:12:54 +00:00
|
|
|
pub added: Arc<CompressedState>,
|
|
|
|
pub removed: Arc<CompressedState>,
|
2024-10-31 08:19:37 +00:00
|
|
|
}
|
|
|
|
|
2024-11-02 06:12:54 +00:00
|
|
|
pub(crate) type CompressedState = HashSet<CompressedStateEvent>;
|
2024-10-31 08:19:37 +00:00
|
|
|
pub(crate) type CompressedStateEvent = [u8; 2 * size_of::<u64>()];
|
2024-10-31 08:41:43 +00:00
|
|
|
type StateInfoLruCache = LruCache<ShortStateHash, ShortStateInfoVec>;
|
2024-10-16 03:12:30 +00:00
|
|
|
type ShortStateInfoVec = Vec<ShortStateInfo>;
|
|
|
|
type ParentStatesVec = Vec<ShortStateInfo>;
|
|
|
|
|
2024-07-04 03:26:19 +00:00
|
|
|
impl crate::Service for Service {
|
|
|
|
fn build(args: crate::Args<'_>) -> Result<Arc<Self>> {
|
|
|
|
let config = &args.server.config;
|
2024-07-13 16:46:13 -04:00
|
|
|
let cache_capacity = f64::from(config.stateinfo_cache_capacity) * config.cache_capacity_modifier;
|
2024-07-04 03:26:19 +00:00
|
|
|
Ok(Arc::new(Self {
|
2024-10-16 03:12:30 +00:00
|
|
|
stateinfo_cache: LruCache::new(usize_from_f64(cache_capacity)?).into(),
|
|
|
|
db: Data {
|
|
|
|
shortstatehash_statediff: args.db["shortstatehash_statediff"].clone(),
|
|
|
|
},
|
2024-07-18 06:37:47 +00:00
|
|
|
services: Services {
|
|
|
|
short: args.depend::<rooms::short::Service>("rooms::short"),
|
|
|
|
state: args.depend::<rooms::state::Service>("rooms::state"),
|
|
|
|
},
|
2024-07-04 03:26:19 +00:00
|
|
|
}))
|
|
|
|
}
|
|
|
|
|
|
|
|
fn memory_usage(&self, out: &mut dyn Write) -> Result<()> {
|
|
|
|
let stateinfo_cache = self.stateinfo_cache.lock().expect("locked").len();
|
|
|
|
writeln!(out, "stateinfo_cache: {stateinfo_cache}")?;
|
|
|
|
|
|
|
|
Ok(())
|
2024-05-27 03:17:20 +00:00
|
|
|
}
|
|
|
|
|
2024-07-04 03:26:19 +00:00
|
|
|
fn clear_cache(&self) { self.stateinfo_cache.lock().expect("locked").clear(); }
|
|
|
|
|
|
|
|
fn name(&self) -> &str { crate::service::make_name(std::module_path!()) }
|
|
|
|
}
|
|
|
|
|
|
|
|
impl Service {
|
2024-03-05 19:48:54 -05:00
|
|
|
/// Returns a stack with info on shortstatehash, full state, added diff and
|
|
|
|
/// removed diff for the selected shortstatehash and each parent layer.
|
2024-10-31 08:41:43 +00:00
|
|
|
pub async fn load_shortstatehash_info(&self, shortstatehash: ShortStateHash) -> Result<ShortStateInfoVec> {
|
2024-03-25 17:05:11 -04:00
|
|
|
if let Some(r) = self
|
|
|
|
.stateinfo_cache
|
|
|
|
.lock()
|
2024-08-08 17:18:30 +00:00
|
|
|
.expect("locked")
|
2024-03-25 17:05:11 -04:00
|
|
|
.get_mut(&shortstatehash)
|
|
|
|
{
|
2024-03-05 19:48:54 -05:00
|
|
|
return Ok(r.clone());
|
|
|
|
}
|
|
|
|
|
|
|
|
let StateDiff {
|
|
|
|
parent,
|
|
|
|
added,
|
|
|
|
removed,
|
2024-10-16 03:12:30 +00:00
|
|
|
} = self.get_statediff(shortstatehash).await?;
|
2024-03-05 19:48:54 -05:00
|
|
|
|
2024-11-02 06:12:54 +00:00
|
|
|
let response = if let Some(parent) = parent {
|
2024-08-08 17:18:30 +00:00
|
|
|
let mut response = Box::pin(self.load_shortstatehash_info(parent)).await?;
|
2024-10-31 08:19:37 +00:00
|
|
|
let mut state = (*response.last().expect("at least one response").full_state).clone();
|
2024-03-05 19:48:54 -05:00
|
|
|
state.extend(added.iter().copied());
|
|
|
|
let removed = (*removed).clone();
|
|
|
|
for r in &removed {
|
|
|
|
state.remove(r);
|
|
|
|
}
|
|
|
|
|
2024-10-31 08:19:37 +00:00
|
|
|
response.push(ShortStateInfo {
|
|
|
|
shortstatehash,
|
|
|
|
full_state: Arc::new(state),
|
|
|
|
added,
|
|
|
|
removed: Arc::new(removed),
|
|
|
|
});
|
2024-03-05 19:48:54 -05:00
|
|
|
|
2024-11-02 06:12:54 +00:00
|
|
|
response
|
2024-03-05 19:48:54 -05:00
|
|
|
} else {
|
2024-11-02 06:12:54 +00:00
|
|
|
vec![ShortStateInfo {
|
2024-10-31 08:19:37 +00:00
|
|
|
shortstatehash,
|
|
|
|
full_state: added.clone(),
|
|
|
|
added,
|
|
|
|
removed,
|
2024-11-02 06:12:54 +00:00
|
|
|
}]
|
|
|
|
};
|
2024-10-31 08:19:37 +00:00
|
|
|
|
2024-11-02 06:12:54 +00:00
|
|
|
self.stateinfo_cache
|
|
|
|
.lock()
|
|
|
|
.expect("locked")
|
|
|
|
.insert(shortstatehash, response.clone());
|
2024-08-08 17:18:30 +00:00
|
|
|
|
2024-11-02 06:12:54 +00:00
|
|
|
Ok(response)
|
2024-03-05 19:48:54 -05:00
|
|
|
}
|
|
|
|
|
2024-10-31 08:41:43 +00:00
|
|
|
pub async fn compress_state_event(&self, shortstatekey: ShortStateKey, event_id: &EventId) -> CompressedStateEvent {
|
2024-03-05 19:48:54 -05:00
|
|
|
let mut v = shortstatekey.to_be_bytes().to_vec();
|
2024-03-25 17:05:11 -04:00
|
|
|
v.extend_from_slice(
|
2024-07-18 06:37:47 +00:00
|
|
|
&self
|
|
|
|
.services
|
2024-03-25 17:05:11 -04:00
|
|
|
.short
|
2024-08-08 17:18:30 +00:00
|
|
|
.get_or_create_shorteventid(event_id)
|
|
|
|
.await
|
2024-03-25 17:05:11 -04:00
|
|
|
.to_be_bytes(),
|
|
|
|
);
|
2024-08-08 17:18:30 +00:00
|
|
|
|
|
|
|
v.try_into().expect("we checked the size above")
|
2024-03-05 19:48:54 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
/// Returns shortstatekey, event id
|
2024-07-03 20:06:43 +00:00
|
|
|
#[inline]
|
2024-08-08 17:18:30 +00:00
|
|
|
pub async fn parse_compressed_state_event(
|
2024-11-02 06:12:54 +00:00
|
|
|
&self, compressed_event: CompressedStateEvent,
|
2024-10-31 08:41:43 +00:00
|
|
|
) -> Result<(ShortStateKey, Arc<EventId>)> {
|
2024-08-08 17:18:30 +00:00
|
|
|
use utils::u64_from_u8;
|
|
|
|
|
|
|
|
let shortstatekey = u64_from_u8(&compressed_event[0..size_of::<u64>()]);
|
|
|
|
let event_id = self
|
|
|
|
.services
|
|
|
|
.short
|
|
|
|
.get_eventid_from_short(u64_from_u8(&compressed_event[size_of::<u64>()..]))
|
|
|
|
.await?;
|
|
|
|
|
|
|
|
Ok((shortstatekey, event_id))
|
2024-03-05 19:48:54 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
/// Creates a new shortstatehash that often is just a diff to an already
|
|
|
|
/// existing shortstatehash and therefore very efficient.
|
|
|
|
///
|
|
|
|
/// There are multiple layers of diffs. The bottom layer 0 always contains
|
|
|
|
/// the full state. Layer 1 contains diffs to states of layer 0, layer 2
|
|
|
|
/// diffs to layer 1 and so on. If layer n > 0 grows too big, it will be
|
|
|
|
/// combined with layer n-1 to create a new diff on layer n-1 that's
|
|
|
|
/// based on layer n-2. If that layer is also too big, it will recursively
|
|
|
|
/// fix above layers too.
|
|
|
|
///
|
|
|
|
/// * `shortstatehash` - Shortstatehash of this state
|
|
|
|
/// * `statediffnew` - Added to base. Each vec is shortstatekey+shorteventid
|
|
|
|
/// * `statediffremoved` - Removed from base. Each vec is
|
|
|
|
/// shortstatekey+shorteventid
|
|
|
|
/// * `diff_to_sibling` - Approximately how much the diff grows each time
|
|
|
|
/// for this layer
|
|
|
|
/// * `parent_states` - A stack with info on shortstatehash, full state,
|
|
|
|
/// added diff and removed diff for each parent layer
|
2024-10-16 03:12:30 +00:00
|
|
|
#[tracing::instrument(skip_all, level = "debug")]
|
2024-05-09 15:59:08 -07:00
|
|
|
pub fn save_state_from_diff(
|
2024-10-31 08:41:43 +00:00
|
|
|
&self, shortstatehash: ShortStateHash, statediffnew: Arc<HashSet<CompressedStateEvent>>,
|
2024-03-05 19:48:54 -05:00
|
|
|
statediffremoved: Arc<HashSet<CompressedStateEvent>>, diff_to_sibling: usize,
|
|
|
|
mut parent_states: ParentStatesVec,
|
2024-10-16 03:12:30 +00:00
|
|
|
) -> Result {
|
2024-07-07 04:46:16 +00:00
|
|
|
let statediffnew_len = statediffnew.len();
|
|
|
|
let statediffremoved_len = statediffremoved.len();
|
|
|
|
let diffsum = checked!(statediffnew_len + statediffremoved_len)?;
|
2024-03-05 19:48:54 -05:00
|
|
|
|
|
|
|
if parent_states.len() > 3 {
|
|
|
|
// Number of layers
|
|
|
|
// To many layers, we have to go deeper
|
2024-07-07 04:46:16 +00:00
|
|
|
let parent = parent_states.pop().expect("parent must have a state");
|
2024-03-05 19:48:54 -05:00
|
|
|
|
2024-10-31 08:19:37 +00:00
|
|
|
let mut parent_new = (*parent.added).clone();
|
|
|
|
let mut parent_removed = (*parent.removed).clone();
|
2024-03-05 19:48:54 -05:00
|
|
|
|
|
|
|
for removed in statediffremoved.iter() {
|
|
|
|
if !parent_new.remove(removed) {
|
|
|
|
// It was not added in the parent and we removed it
|
|
|
|
parent_removed.insert(*removed);
|
|
|
|
}
|
|
|
|
// Else it was added in the parent and we removed it again. We
|
|
|
|
// can forget this change
|
|
|
|
}
|
|
|
|
|
|
|
|
for new in statediffnew.iter() {
|
|
|
|
if !parent_removed.remove(new) {
|
|
|
|
// It was not touched in the parent and we added it
|
|
|
|
parent_new.insert(*new);
|
|
|
|
}
|
|
|
|
// Else it was removed in the parent and we added it again. We
|
|
|
|
// can forget this change
|
|
|
|
}
|
|
|
|
|
|
|
|
self.save_state_from_diff(
|
|
|
|
shortstatehash,
|
|
|
|
Arc::new(parent_new),
|
|
|
|
Arc::new(parent_removed),
|
|
|
|
diffsum,
|
|
|
|
parent_states,
|
|
|
|
)?;
|
|
|
|
|
|
|
|
return Ok(());
|
|
|
|
}
|
|
|
|
|
|
|
|
if parent_states.is_empty() {
|
|
|
|
// There is no parent layer, create a new state
|
2024-10-16 03:12:30 +00:00
|
|
|
self.save_statediff(
|
2024-03-05 19:48:54 -05:00
|
|
|
shortstatehash,
|
2024-06-28 23:23:59 +00:00
|
|
|
&StateDiff {
|
2024-03-05 19:48:54 -05:00
|
|
|
parent: None,
|
|
|
|
added: statediffnew,
|
|
|
|
removed: statediffremoved,
|
|
|
|
},
|
2024-08-08 17:18:30 +00:00
|
|
|
);
|
2024-03-05 19:48:54 -05:00
|
|
|
|
|
|
|
return Ok(());
|
|
|
|
};
|
|
|
|
|
|
|
|
// Else we have two options.
|
|
|
|
// 1. We add the current diff on top of the parent layer.
|
|
|
|
// 2. We replace a layer above
|
|
|
|
|
2024-07-07 04:46:16 +00:00
|
|
|
let parent = parent_states.pop().expect("parent must have a state");
|
2024-10-31 08:19:37 +00:00
|
|
|
let parent_added_len = parent.added.len();
|
|
|
|
let parent_removed_len = parent.removed.len();
|
|
|
|
let parent_diff = checked!(parent_added_len + parent_removed_len)?;
|
2024-03-05 19:48:54 -05:00
|
|
|
|
2024-07-07 04:46:16 +00:00
|
|
|
if checked!(diffsum * diffsum)? >= checked!(2 * diff_to_sibling * parent_diff)? {
|
2024-03-05 19:48:54 -05:00
|
|
|
// Diff too big, we replace above layer(s)
|
2024-10-31 08:19:37 +00:00
|
|
|
let mut parent_new = (*parent.added).clone();
|
|
|
|
let mut parent_removed = (*parent.removed).clone();
|
2024-03-05 19:48:54 -05:00
|
|
|
|
|
|
|
for removed in statediffremoved.iter() {
|
|
|
|
if !parent_new.remove(removed) {
|
|
|
|
// It was not added in the parent and we removed it
|
|
|
|
parent_removed.insert(*removed);
|
|
|
|
}
|
|
|
|
// Else it was added in the parent and we removed it again. We
|
|
|
|
// can forget this change
|
|
|
|
}
|
|
|
|
|
|
|
|
for new in statediffnew.iter() {
|
|
|
|
if !parent_removed.remove(new) {
|
|
|
|
// It was not touched in the parent and we added it
|
|
|
|
parent_new.insert(*new);
|
|
|
|
}
|
|
|
|
// Else it was removed in the parent and we added it again. We
|
|
|
|
// can forget this change
|
|
|
|
}
|
|
|
|
|
|
|
|
self.save_state_from_diff(
|
|
|
|
shortstatehash,
|
|
|
|
Arc::new(parent_new),
|
|
|
|
Arc::new(parent_removed),
|
|
|
|
diffsum,
|
|
|
|
parent_states,
|
|
|
|
)?;
|
|
|
|
} else {
|
|
|
|
// Diff small enough, we add diff as layer on top of parent
|
2024-10-16 03:12:30 +00:00
|
|
|
self.save_statediff(
|
2024-03-05 19:48:54 -05:00
|
|
|
shortstatehash,
|
2024-06-28 23:23:59 +00:00
|
|
|
&StateDiff {
|
2024-10-31 08:19:37 +00:00
|
|
|
parent: Some(parent.shortstatehash),
|
2024-03-05 19:48:54 -05:00
|
|
|
added: statediffnew,
|
|
|
|
removed: statediffremoved,
|
|
|
|
},
|
2024-08-08 17:18:30 +00:00
|
|
|
);
|
2024-03-05 19:48:54 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Returns the new shortstatehash, and the state diff from the previous
|
|
|
|
/// room state
|
2024-08-08 17:18:30 +00:00
|
|
|
pub async fn save_state(
|
2024-03-05 19:48:54 -05:00
|
|
|
&self, room_id: &RoomId, new_state_ids_compressed: Arc<HashSet<CompressedStateEvent>>,
|
2024-08-08 17:18:30 +00:00
|
|
|
) -> Result<HashSetCompressStateEvent> {
|
|
|
|
let previous_shortstatehash = self
|
|
|
|
.services
|
|
|
|
.state
|
|
|
|
.get_room_shortstatehash(room_id)
|
|
|
|
.await
|
|
|
|
.ok();
|
2024-03-05 19:48:54 -05:00
|
|
|
|
2024-03-25 17:05:11 -04:00
|
|
|
let state_hash = utils::calculate_hash(
|
|
|
|
&new_state_ids_compressed
|
|
|
|
.iter()
|
|
|
|
.map(|bytes| &bytes[..])
|
|
|
|
.collect::<Vec<_>>(),
|
|
|
|
);
|
2024-03-05 19:48:54 -05:00
|
|
|
|
2024-07-18 06:37:47 +00:00
|
|
|
let (new_shortstatehash, already_existed) = self
|
|
|
|
.services
|
2024-03-25 17:05:11 -04:00
|
|
|
.short
|
2024-08-08 17:18:30 +00:00
|
|
|
.get_or_create_shortstatehash(&state_hash)
|
|
|
|
.await;
|
2024-03-05 19:48:54 -05:00
|
|
|
|
|
|
|
if Some(new_shortstatehash) == previous_shortstatehash {
|
2024-10-31 08:19:37 +00:00
|
|
|
return Ok(HashSetCompressStateEvent {
|
|
|
|
shortstatehash: new_shortstatehash,
|
|
|
|
..Default::default()
|
|
|
|
});
|
2024-03-05 19:48:54 -05:00
|
|
|
}
|
|
|
|
|
2024-08-08 17:18:30 +00:00
|
|
|
let states_parents = if let Some(p) = previous_shortstatehash {
|
|
|
|
self.load_shortstatehash_info(p).await.unwrap_or_default()
|
|
|
|
} else {
|
2024-10-16 03:12:30 +00:00
|
|
|
ShortStateInfoVec::new()
|
2024-08-08 17:18:30 +00:00
|
|
|
};
|
2024-03-05 19:48:54 -05:00
|
|
|
|
|
|
|
let (statediffnew, statediffremoved) = if let Some(parent_stateinfo) = states_parents.last() {
|
2024-03-25 17:05:11 -04:00
|
|
|
let statediffnew: HashSet<_> = new_state_ids_compressed
|
2024-10-31 08:19:37 +00:00
|
|
|
.difference(&parent_stateinfo.full_state)
|
2024-03-25 17:05:11 -04:00
|
|
|
.copied()
|
|
|
|
.collect();
|
|
|
|
|
|
|
|
let statediffremoved: HashSet<_> = parent_stateinfo
|
2024-10-31 08:19:37 +00:00
|
|
|
.full_state
|
2024-03-25 17:05:11 -04:00
|
|
|
.difference(&new_state_ids_compressed)
|
|
|
|
.copied()
|
|
|
|
.collect();
|
2024-03-05 19:48:54 -05:00
|
|
|
|
|
|
|
(Arc::new(statediffnew), Arc::new(statediffremoved))
|
|
|
|
} else {
|
|
|
|
(new_state_ids_compressed, Arc::new(HashSet::new()))
|
|
|
|
};
|
|
|
|
|
|
|
|
if !already_existed {
|
|
|
|
self.save_state_from_diff(
|
|
|
|
new_shortstatehash,
|
|
|
|
statediffnew.clone(),
|
|
|
|
statediffremoved.clone(),
|
|
|
|
2, // every state change is 2 event changes on average
|
|
|
|
states_parents,
|
|
|
|
)?;
|
|
|
|
};
|
|
|
|
|
2024-10-31 08:19:37 +00:00
|
|
|
Ok(HashSetCompressStateEvent {
|
|
|
|
shortstatehash: new_shortstatehash,
|
|
|
|
added: statediffnew,
|
|
|
|
removed: statediffremoved,
|
|
|
|
})
|
2024-03-05 19:48:54 -05:00
|
|
|
}
|
2024-10-16 03:12:30 +00:00
|
|
|
|
2024-10-31 08:41:43 +00:00
|
|
|
async fn get_statediff(&self, shortstatehash: ShortStateHash) -> Result<StateDiff> {
|
|
|
|
const BUFSIZE: usize = size_of::<ShortStateHash>();
|
|
|
|
const STRIDE: usize = size_of::<ShortStateHash>();
|
2024-10-16 03:12:30 +00:00
|
|
|
|
|
|
|
let value = self
|
|
|
|
.db
|
|
|
|
.shortstatehash_statediff
|
|
|
|
.aqry::<BUFSIZE, _>(&shortstatehash)
|
|
|
|
.await
|
|
|
|
.map_err(|e| err!(Database("Failed to find StateDiff from short {shortstatehash:?}: {e}")))?;
|
|
|
|
|
|
|
|
let parent = utils::u64_from_bytes(&value[0..size_of::<u64>()])
|
|
|
|
.ok()
|
|
|
|
.take_if(|parent| *parent != 0);
|
|
|
|
|
|
|
|
let mut add_mode = true;
|
|
|
|
let mut added = HashSet::new();
|
|
|
|
let mut removed = HashSet::new();
|
|
|
|
|
|
|
|
let mut i = STRIDE;
|
|
|
|
while let Some(v) = value.get(i..expected!(i + 2 * STRIDE)) {
|
|
|
|
if add_mode && v.starts_with(&0_u64.to_be_bytes()) {
|
|
|
|
add_mode = false;
|
|
|
|
i = expected!(i + STRIDE);
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
if add_mode {
|
|
|
|
added.insert(v.try_into()?);
|
|
|
|
} else {
|
|
|
|
removed.insert(v.try_into()?);
|
|
|
|
}
|
|
|
|
i = expected!(i + 2 * STRIDE);
|
|
|
|
}
|
|
|
|
|
|
|
|
Ok(StateDiff {
|
|
|
|
parent,
|
|
|
|
added: Arc::new(added),
|
|
|
|
removed: Arc::new(removed),
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2024-10-31 08:41:43 +00:00
|
|
|
fn save_statediff(&self, shortstatehash: ShortStateHash, diff: &StateDiff) {
|
2024-10-16 03:12:30 +00:00
|
|
|
let mut value = diff.parent.unwrap_or(0).to_be_bytes().to_vec();
|
|
|
|
for new in diff.added.iter() {
|
|
|
|
value.extend_from_slice(&new[..]);
|
|
|
|
}
|
|
|
|
|
|
|
|
if !diff.removed.is_empty() {
|
|
|
|
value.extend_from_slice(&0_u64.to_be_bytes());
|
|
|
|
for removed in diff.removed.iter() {
|
|
|
|
value.extend_from_slice(&removed[..]);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
self.db
|
|
|
|
.shortstatehash_statediff
|
|
|
|
.insert(&shortstatehash.to_be_bytes(), &value);
|
|
|
|
}
|
2022-07-10 17:23:26 +02:00
|
|
|
}
|