2022-06-20 11:31:27 +02:00
|
|
|
pub trait Data {
|
|
|
|
fn get_room_shortstatehash(room_id: &RoomId);
|
|
|
|
}
|
2020-08-06 08:29:59 -04:00
|
|
|
|
2020-10-27 19:10:09 -04:00
|
|
|
/// Returns the last state hash key added to the db for the given room.
|
2021-02-28 12:41:03 +01:00
|
|
|
#[tracing::instrument(skip(self))]
|
2021-03-17 22:30:25 +01:00
|
|
|
pub fn current_shortstatehash(&self, room_id: &RoomId) -> Result<Option<u64>> {
|
|
|
|
self.roomid_shortstatehash
|
|
|
|
.get(room_id.as_bytes())?
|
|
|
|
.map_or(Ok(None), |bytes| {
|
|
|
|
Ok(Some(utils::u64_from_bytes(&bytes).map_err(|_| {
|
|
|
|
Error::bad_database("Invalid shortstatehash in roomid_shortstatehash")
|
|
|
|
})?))
|
|
|
|
})
|
2020-08-06 08:29:59 -04:00
|
|
|
}
|
|
|
|
|
2022-06-20 11:31:27 +02:00
|
|
|
pub struct Service<D: Data> {
|
|
|
|
db: D,
|
|
|
|
}
|
2021-11-01 01:58:26 +00:00
|
|
|
|
2022-06-20 11:31:27 +02:00
|
|
|
impl Service {
|
2022-06-20 12:08:58 +02:00
|
|
|
/// Set the room to the given statehash and update caches.
|
2021-08-24 19:10:31 +02:00
|
|
|
#[tracing::instrument(skip(self, new_state_ids_compressed, db))]
|
2020-09-13 22:24:36 +02:00
|
|
|
pub fn force_state(
|
|
|
|
&self,
|
|
|
|
room_id: &RoomId,
|
2022-06-20 12:08:58 +02:00
|
|
|
shortstatehash: u64,
|
|
|
|
statediffnew :HashSet<CompressedStateEvent>,
|
|
|
|
statediffremoved :HashSet<CompressedStateEvent>,
|
2021-04-16 18:18:29 +02:00
|
|
|
db: &Database,
|
2020-09-13 22:24:36 +02:00
|
|
|
) -> Result<()> {
|
2021-04-16 18:18:29 +02:00
|
|
|
|
2021-08-24 19:10:31 +02:00
|
|
|
for event_id in statediffnew.into_iter().filter_map(|new| {
|
|
|
|
self.parse_compressed_state_event(new)
|
|
|
|
.ok()
|
|
|
|
.map(|(_, id)| id)
|
|
|
|
}) {
|
2021-10-13 10:16:45 +02:00
|
|
|
let pdu = match self.get_pdu_json(&event_id)? {
|
|
|
|
Some(pdu) => pdu,
|
|
|
|
None => continue,
|
|
|
|
};
|
|
|
|
|
|
|
|
if pdu.get("type").and_then(|val| val.as_str()) != Some("m.room.member") {
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
2021-10-13 11:51:30 +02:00
|
|
|
let pdu: PduEvent = match serde_json::from_str(
|
2021-10-13 10:16:45 +02:00
|
|
|
&serde_json::to_string(&pdu).expect("CanonicalJsonObj can be serialized to JSON"),
|
|
|
|
) {
|
|
|
|
Ok(pdu) => pdu,
|
|
|
|
Err(_) => continue,
|
|
|
|
};
|
|
|
|
|
|
|
|
#[derive(Deserialize)]
|
|
|
|
struct ExtractMembership {
|
|
|
|
membership: MembershipState,
|
2021-04-16 18:18:29 +02:00
|
|
|
}
|
2021-10-13 10:16:45 +02:00
|
|
|
|
|
|
|
let membership = match serde_json::from_str::<ExtractMembership>(pdu.content.get()) {
|
|
|
|
Ok(e) => e.membership,
|
|
|
|
Err(_) => continue,
|
|
|
|
};
|
|
|
|
|
|
|
|
let state_key = match pdu.state_key {
|
|
|
|
Some(k) => k,
|
|
|
|
None => continue,
|
|
|
|
};
|
|
|
|
|
2021-11-27 00:30:28 +01:00
|
|
|
let user_id = match UserId::parse(state_key) {
|
2021-10-13 10:16:45 +02:00
|
|
|
Ok(id) => id,
|
|
|
|
Err(_) => continue,
|
|
|
|
};
|
|
|
|
|
|
|
|
self.update_membership(room_id, &user_id, membership, &pdu.sender, None, db, false)?;
|
2020-09-13 22:24:36 +02:00
|
|
|
}
|
|
|
|
|
2021-09-13 19:45:56 +02:00
|
|
|
self.update_joined_count(room_id, db)?;
|
2021-08-17 00:22:52 +02:00
|
|
|
|
2021-03-17 22:30:25 +01:00
|
|
|
self.roomid_shortstatehash
|
2021-08-12 23:04:00 +02:00
|
|
|
.insert(room_id.as_bytes(), &new_shortstatehash.to_be_bytes())?;
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2020-05-03 17:25:31 +02:00
|
|
|
/// Returns the leaf pdus of a room.
|
2021-07-29 08:36:01 +02:00
|
|
|
#[tracing::instrument(skip(self))]
|
2021-11-27 17:44:52 +01:00
|
|
|
pub fn get_pdu_leaves(&self, room_id: &RoomId) -> Result<HashSet<Arc<EventId>>> {
|
2020-08-26 11:15:52 -04:00
|
|
|
let mut prefix = room_id.as_bytes().to_vec();
|
2020-05-03 17:25:31 +02:00
|
|
|
prefix.push(0xff);
|
|
|
|
|
2021-03-25 23:55:40 +01:00
|
|
|
self.roomid_pduleaves
|
2021-06-12 18:40:33 +02:00
|
|
|
.scan_prefix(prefix)
|
|
|
|
.map(|(_, bytes)| {
|
2021-11-27 17:44:52 +01:00
|
|
|
EventId::parse_arc(utils::string_from_bytes(&bytes).map_err(|_| {
|
2021-06-17 20:34:14 +02:00
|
|
|
Error::bad_database("EventID in roomid_pduleaves is invalid unicode.")
|
|
|
|
})?)
|
|
|
|
.map_err(|_| Error::bad_database("EventId in roomid_pduleaves is invalid."))
|
2020-06-09 15:13:17 +02:00
|
|
|
})
|
2021-03-25 23:55:40 +01:00
|
|
|
.collect()
|
2020-05-03 17:25:31 +02:00
|
|
|
}
|
|
|
|
|
2021-01-18 19:41:38 -05:00
|
|
|
/// Replace the leaves of a room.
|
|
|
|
///
|
2021-02-03 20:00:01 -05:00
|
|
|
/// The provided `event_ids` become the new leaves, this allows a room to have multiple
|
2021-01-18 19:41:38 -05:00
|
|
|
/// `prev_events`.
|
2021-07-29 08:36:01 +02:00
|
|
|
#[tracing::instrument(skip(self))]
|
2021-11-27 16:35:59 +01:00
|
|
|
pub fn replace_pdu_leaves<'a>(
|
|
|
|
&self,
|
|
|
|
room_id: &RoomId,
|
|
|
|
event_ids: impl IntoIterator<Item = &'a EventId> + Debug,
|
|
|
|
) -> Result<()> {
|
2021-01-18 19:08:59 -05:00
|
|
|
let mut prefix = room_id.as_bytes().to_vec();
|
|
|
|
prefix.push(0xff);
|
|
|
|
|
2021-06-08 18:10:00 +02:00
|
|
|
for (key, _) in self.roomid_pduleaves.scan_prefix(prefix.clone()) {
|
|
|
|
self.roomid_pduleaves.remove(&key)?;
|
2021-01-18 19:08:59 -05:00
|
|
|
}
|
|
|
|
|
2021-04-05 21:46:10 +02:00
|
|
|
for event_id in event_ids {
|
2021-01-18 19:08:59 -05:00
|
|
|
let mut key = prefix.to_owned();
|
|
|
|
key.extend_from_slice(event_id.as_bytes());
|
|
|
|
self.roomid_pduleaves.insert(&key, event_id.as_bytes())?;
|
|
|
|
}
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2022-06-19 22:56:14 +02:00
|
|
|
/// Generates a new StateHash and associates it with the incoming event.
|
2021-03-25 23:55:40 +01:00
|
|
|
///
|
|
|
|
/// This adds all current state events (not including the incoming event)
|
|
|
|
/// to `stateid_pduid` and adds the incoming event to `eventid_statehash`.
|
2021-08-24 19:10:31 +02:00
|
|
|
#[tracing::instrument(skip(self, state_ids_compressed, globals))]
|
2021-03-25 23:55:40 +01:00
|
|
|
pub fn set_event_state(
|
|
|
|
&self,
|
|
|
|
event_id: &EventId,
|
2021-08-12 23:04:00 +02:00
|
|
|
room_id: &RoomId,
|
2021-08-24 19:10:31 +02:00
|
|
|
state_ids_compressed: HashSet<CompressedStateEvent>,
|
2021-03-25 23:55:40 +01:00
|
|
|
globals: &super::globals::Globals,
|
|
|
|
) -> Result<()> {
|
2021-09-13 19:45:56 +02:00
|
|
|
let shorteventid = self.get_or_create_shorteventid(event_id, globals)?;
|
2021-03-25 23:55:40 +01:00
|
|
|
|
2021-09-13 19:45:56 +02:00
|
|
|
let previous_shortstatehash = self.current_shortstatehash(room_id)?;
|
2021-08-12 23:04:00 +02:00
|
|
|
|
2021-03-25 23:55:40 +01:00
|
|
|
let state_hash = self.calculate_hash(
|
2021-08-24 19:10:31 +02:00
|
|
|
&state_ids_compressed
|
|
|
|
.iter()
|
|
|
|
.map(|s| &s[..])
|
2021-03-25 23:55:40 +01:00
|
|
|
.collect::<Vec<_>>(),
|
|
|
|
);
|
|
|
|
|
2021-08-11 19:15:38 +02:00
|
|
|
let (shortstatehash, already_existed) =
|
|
|
|
self.get_or_create_shortstatehash(&state_hash, globals)?;
|
2021-03-25 23:55:40 +01:00
|
|
|
|
2021-08-11 19:15:38 +02:00
|
|
|
if !already_existed {
|
2021-08-12 23:04:00 +02:00
|
|
|
let states_parents = previous_shortstatehash
|
|
|
|
.map_or_else(|| Ok(Vec::new()), |p| self.load_shortstatehash_info(p))?;
|
|
|
|
|
|
|
|
let (statediffnew, statediffremoved) =
|
|
|
|
if let Some(parent_stateinfo) = states_parents.last() {
|
2021-10-13 11:51:30 +02:00
|
|
|
let statediffnew: HashSet<_> = state_ids_compressed
|
2021-08-12 23:04:00 +02:00
|
|
|
.difference(&parent_stateinfo.1)
|
2021-10-13 10:24:39 +02:00
|
|
|
.copied()
|
2021-10-13 11:51:30 +02:00
|
|
|
.collect();
|
2021-08-12 23:04:00 +02:00
|
|
|
|
2021-10-13 11:51:30 +02:00
|
|
|
let statediffremoved: HashSet<_> = parent_stateinfo
|
2021-08-12 23:04:00 +02:00
|
|
|
.1
|
|
|
|
.difference(&state_ids_compressed)
|
2021-10-13 10:24:39 +02:00
|
|
|
.copied()
|
2021-10-13 11:51:30 +02:00
|
|
|
.collect();
|
2021-08-12 23:04:00 +02:00
|
|
|
|
|
|
|
(statediffnew, statediffremoved)
|
|
|
|
} else {
|
|
|
|
(state_ids_compressed, HashSet::new())
|
|
|
|
};
|
|
|
|
self.save_state_from_diff(
|
|
|
|
shortstatehash,
|
2021-08-31 21:20:03 +02:00
|
|
|
statediffnew,
|
|
|
|
statediffremoved,
|
2021-08-12 23:04:00 +02:00
|
|
|
1_000_000, // high number because no state will be based on this one
|
|
|
|
states_parents,
|
|
|
|
)?;
|
2021-08-11 19:15:38 +02:00
|
|
|
}
|
2021-03-25 23:55:40 +01:00
|
|
|
|
|
|
|
self.shorteventid_shortstatehash
|
2021-08-11 19:15:38 +02:00
|
|
|
.insert(&shorteventid.to_be_bytes(), &shortstatehash.to_be_bytes())?;
|
2021-03-25 23:55:40 +01:00
|
|
|
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2020-08-19 17:27:24 -04:00
|
|
|
/// Generates a new StateHash and associates it with the incoming event.
|
|
|
|
///
|
|
|
|
/// This adds all current state events (not including the incoming event)
|
2021-03-17 22:30:25 +01:00
|
|
|
/// to `stateid_pduid` and adds the incoming event to `eventid_statehash`.
|
2021-07-29 08:36:01 +02:00
|
|
|
#[tracing::instrument(skip(self, new_pdu, globals))]
|
2020-12-19 16:00:11 +01:00
|
|
|
pub fn append_to_state(
|
|
|
|
&self,
|
|
|
|
new_pdu: &PduEvent,
|
|
|
|
globals: &super::globals::Globals,
|
2021-03-17 22:30:25 +01:00
|
|
|
) -> Result<u64> {
|
2021-08-11 19:15:38 +02:00
|
|
|
let shorteventid = self.get_or_create_shorteventid(&new_pdu.event_id, globals)?;
|
|
|
|
|
2021-08-12 23:04:00 +02:00
|
|
|
let previous_shortstatehash = self.current_shortstatehash(&new_pdu.room_id)?;
|
2021-08-11 19:15:38 +02:00
|
|
|
|
2021-08-12 23:04:00 +02:00
|
|
|
if let Some(p) = previous_shortstatehash {
|
|
|
|
self.shorteventid_shortstatehash
|
|
|
|
.insert(&shorteventid.to_be_bytes(), &p.to_be_bytes())?;
|
|
|
|
}
|
2021-03-17 22:30:25 +01:00
|
|
|
|
2020-09-12 21:30:07 +02:00
|
|
|
if let Some(state_key) = &new_pdu.state_key {
|
2021-08-12 23:04:00 +02:00
|
|
|
let states_parents = previous_shortstatehash
|
|
|
|
.map_or_else(|| Ok(Vec::new()), |p| self.load_shortstatehash_info(p))?;
|
2020-12-19 16:00:11 +01:00
|
|
|
|
2022-04-06 21:31:29 +02:00
|
|
|
let shortstatekey = self.get_or_create_shortstatekey(
|
|
|
|
&new_pdu.kind.to_string().into(),
|
|
|
|
state_key,
|
|
|
|
globals,
|
|
|
|
)?;
|
2020-09-12 21:30:07 +02:00
|
|
|
|
2021-08-16 23:24:52 +02:00
|
|
|
let new = self.compress_state_event(shortstatekey, &new_pdu.event_id, globals)?;
|
|
|
|
|
2021-08-12 23:04:00 +02:00
|
|
|
let replaces = states_parents
|
|
|
|
.last()
|
|
|
|
.map(|info| {
|
|
|
|
info.1
|
|
|
|
.iter()
|
|
|
|
.find(|bytes| bytes.starts_with(&shortstatekey.to_be_bytes()))
|
|
|
|
})
|
|
|
|
.unwrap_or_default();
|
2020-08-19 17:27:24 -04:00
|
|
|
|
2021-08-16 23:24:52 +02:00
|
|
|
if Some(&new) == replaces {
|
|
|
|
return Ok(previous_shortstatehash.expect("must exist"));
|
|
|
|
}
|
|
|
|
|
2021-08-12 23:04:00 +02:00
|
|
|
// TODO: statehash with deterministic inputs
|
|
|
|
let shortstatehash = globals.next_count()?;
|
2020-08-19 17:27:24 -04:00
|
|
|
|
2021-08-12 23:04:00 +02:00
|
|
|
let mut statediffnew = HashSet::new();
|
|
|
|
statediffnew.insert(new);
|
|
|
|
|
|
|
|
let mut statediffremoved = HashSet::new();
|
|
|
|
if let Some(replaces) = replaces {
|
2021-09-13 19:45:56 +02:00
|
|
|
statediffremoved.insert(*replaces);
|
2021-08-12 23:04:00 +02:00
|
|
|
}
|
2021-08-03 16:14:07 +02:00
|
|
|
|
2021-08-12 23:04:00 +02:00
|
|
|
self.save_state_from_diff(
|
|
|
|
shortstatehash,
|
|
|
|
statediffnew,
|
|
|
|
statediffremoved,
|
|
|
|
2,
|
|
|
|
states_parents,
|
|
|
|
)?;
|
2020-08-19 17:27:24 -04:00
|
|
|
|
2021-03-17 22:30:25 +01:00
|
|
|
Ok(shortstatehash)
|
2020-09-12 21:30:07 +02:00
|
|
|
} else {
|
2021-08-12 23:04:00 +02:00
|
|
|
Ok(previous_shortstatehash.expect("first event in room must be a state event"))
|
2020-09-12 21:30:07 +02:00
|
|
|
}
|
2020-08-19 17:27:24 -04:00
|
|
|
}
|
|
|
|
|
2021-07-29 08:36:01 +02:00
|
|
|
#[tracing::instrument(skip(self, invite_event))]
|
2021-04-25 14:10:07 +02:00
|
|
|
pub fn calculate_invite_state(
|
|
|
|
&self,
|
|
|
|
invite_event: &PduEvent,
|
|
|
|
) -> Result<Vec<Raw<AnyStrippedStateEvent>>> {
|
|
|
|
let mut state = Vec::new();
|
|
|
|
// Add recommended events
|
|
|
|
if let Some(e) =
|
2022-04-06 21:31:29 +02:00
|
|
|
self.room_state_get(&invite_event.room_id, &StateEventType::RoomCreate, "")?
|
2021-04-25 14:10:07 +02:00
|
|
|
{
|
|
|
|
state.push(e.to_stripped_state_event());
|
|
|
|
}
|
|
|
|
if let Some(e) =
|
2022-04-06 21:31:29 +02:00
|
|
|
self.room_state_get(&invite_event.room_id, &StateEventType::RoomJoinRules, "")?
|
2021-04-25 14:10:07 +02:00
|
|
|
{
|
|
|
|
state.push(e.to_stripped_state_event());
|
|
|
|
}
|
2022-04-06 21:31:29 +02:00
|
|
|
if let Some(e) = self.room_state_get(
|
|
|
|
&invite_event.room_id,
|
|
|
|
&StateEventType::RoomCanonicalAlias,
|
|
|
|
"",
|
|
|
|
)? {
|
|
|
|
state.push(e.to_stripped_state_event());
|
|
|
|
}
|
|
|
|
if let Some(e) =
|
|
|
|
self.room_state_get(&invite_event.room_id, &StateEventType::RoomAvatar, "")?
|
|
|
|
{
|
2021-04-25 14:10:07 +02:00
|
|
|
state.push(e.to_stripped_state_event());
|
|
|
|
}
|
2022-04-06 21:31:29 +02:00
|
|
|
if let Some(e) =
|
|
|
|
self.room_state_get(&invite_event.room_id, &StateEventType::RoomName, "")?
|
|
|
|
{
|
2021-04-25 14:10:07 +02:00
|
|
|
state.push(e.to_stripped_state_event());
|
|
|
|
}
|
|
|
|
if let Some(e) = self.room_state_get(
|
|
|
|
&invite_event.room_id,
|
2022-04-06 21:31:29 +02:00
|
|
|
&StateEventType::RoomMember,
|
2021-04-25 14:10:07 +02:00
|
|
|
invite_event.sender.as_str(),
|
|
|
|
)? {
|
|
|
|
state.push(e.to_stripped_state_event());
|
|
|
|
}
|
|
|
|
|
|
|
|
state.push(invite_event.to_stripped_state_event());
|
|
|
|
Ok(state)
|
|
|
|
}
|
|
|
|
|
2021-07-29 08:36:01 +02:00
|
|
|
#[tracing::instrument(skip(self))]
|
2021-03-17 22:30:25 +01:00
|
|
|
pub fn set_room_state(&self, room_id: &RoomId, shortstatehash: u64) -> Result<()> {
|
|
|
|
self.roomid_shortstatehash
|
|
|
|
.insert(room_id.as_bytes(), &shortstatehash.to_be_bytes())?;
|
2020-12-31 14:52:08 +01:00
|
|
|
|
|
|
|
Ok(())
|
|
|
|
}
|
2022-06-20 11:31:27 +02:00
|
|
|
}
|
2020-05-03 17:25:31 +02:00
|
|
|
|
2021-07-29 08:36:01 +02:00
|
|
|
#[tracing::instrument(skip(self, globals))]
|
2020-05-25 23:24:13 +02:00
|
|
|
pub fn set_alias(
|
|
|
|
&self,
|
|
|
|
alias: &RoomAliasId,
|
|
|
|
room_id: Option<&RoomId>,
|
|
|
|
globals: &super::globals::Globals,
|
|
|
|
) -> Result<()> {
|
|
|
|
if let Some(room_id) = room_id {
|
2020-06-03 20:55:11 +02:00
|
|
|
// New alias
|
2020-05-25 23:24:13 +02:00
|
|
|
self.alias_roomid
|
2021-09-13 19:45:56 +02:00
|
|
|
.insert(alias.alias().as_bytes(), room_id.as_bytes())?;
|
2020-08-26 11:15:52 -04:00
|
|
|
let mut aliasid = room_id.as_bytes().to_vec();
|
2021-02-11 13:16:14 +01:00
|
|
|
aliasid.push(0xff);
|
2020-05-25 23:24:13 +02:00
|
|
|
aliasid.extend_from_slice(&globals.next_count()?.to_be_bytes());
|
2021-06-08 18:10:00 +02:00
|
|
|
self.aliasid_alias.insert(&aliasid, &*alias.as_bytes())?;
|
2020-05-25 23:24:13 +02:00
|
|
|
} else {
|
2020-06-03 20:55:11 +02:00
|
|
|
// room_id=None means remove alias
|
2021-09-13 19:45:56 +02:00
|
|
|
if let Some(room_id) = self.alias_roomid.get(alias.alias().as_bytes())? {
|
2021-06-08 18:10:00 +02:00
|
|
|
let mut prefix = room_id.to_vec();
|
|
|
|
prefix.push(0xff);
|
|
|
|
|
|
|
|
for (key, _) in self.aliasid_alias.scan_prefix(prefix) {
|
|
|
|
self.aliasid_alias.remove(&key)?;
|
|
|
|
}
|
2021-09-13 19:45:56 +02:00
|
|
|
self.alias_roomid.remove(alias.alias().as_bytes())?;
|
2021-06-08 18:10:00 +02:00
|
|
|
} else {
|
|
|
|
return Err(Error::BadRequest(
|
2020-06-09 15:13:17 +02:00
|
|
|
ErrorKind::NotFound,
|
|
|
|
"Alias does not exist.",
|
2021-06-08 18:10:00 +02:00
|
|
|
));
|
2020-05-25 23:24:13 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2021-07-29 08:36:01 +02:00
|
|
|
#[tracing::instrument(skip(self))]
|
2021-11-26 20:36:40 +01:00
|
|
|
pub fn id_from_alias(&self, alias: &RoomAliasId) -> Result<Option<Box<RoomId>>> {
|
2020-05-25 23:24:13 +02:00
|
|
|
self.alias_roomid
|
2021-06-08 18:10:00 +02:00
|
|
|
.get(alias.alias().as_bytes())?
|
2021-10-13 11:51:30 +02:00
|
|
|
.map(|bytes| {
|
2021-11-27 00:30:28 +01:00
|
|
|
RoomId::parse(utils::string_from_bytes(&bytes).map_err(|_| {
|
2021-10-13 11:51:30 +02:00
|
|
|
Error::bad_database("Room ID in alias_roomid is invalid unicode.")
|
|
|
|
})?)
|
|
|
|
.map_err(|_| Error::bad_database("Room ID in alias_roomid is invalid."))
|
2020-05-25 23:24:13 +02:00
|
|
|
})
|
2021-10-13 11:51:30 +02:00
|
|
|
.transpose()
|
2020-05-25 23:24:13 +02:00
|
|
|
}
|
|
|
|
|
2021-07-29 08:36:01 +02:00
|
|
|
#[tracing::instrument(skip(self))]
|
2021-06-08 18:10:00 +02:00
|
|
|
pub fn room_aliases<'a>(
|
|
|
|
&'a self,
|
|
|
|
room_id: &RoomId,
|
2021-11-26 20:36:40 +01:00
|
|
|
) -> impl Iterator<Item = Result<Box<RoomAliasId>>> + 'a {
|
2020-08-26 11:15:52 -04:00
|
|
|
let mut prefix = room_id.as_bytes().to_vec();
|
2020-05-25 23:24:13 +02:00
|
|
|
prefix.push(0xff);
|
|
|
|
|
2021-06-08 18:10:00 +02:00
|
|
|
self.aliasid_alias.scan_prefix(prefix).map(|(_, bytes)| {
|
2021-06-17 20:34:14 +02:00
|
|
|
utils::string_from_bytes(&bytes)
|
2021-06-08 18:10:00 +02:00
|
|
|
.map_err(|_| Error::bad_database("Invalid alias bytes in aliasid_alias."))?
|
|
|
|
.try_into()
|
2021-06-17 20:34:14 +02:00
|
|
|
.map_err(|_| Error::bad_database("Invalid alias in aliasid_alias."))
|
2021-06-08 18:10:00 +02:00
|
|
|
})
|
2020-05-25 23:24:13 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2021-07-29 08:36:01 +02:00
|
|
|
#[tracing::instrument(skip(self))]
|
2020-05-25 23:24:13 +02:00
|
|
|
pub fn set_public(&self, room_id: &RoomId, public: bool) -> Result<()> {
|
|
|
|
if public {
|
2020-08-26 11:15:52 -04:00
|
|
|
self.publicroomids.insert(room_id.as_bytes(), &[])?;
|
2020-05-25 23:24:13 +02:00
|
|
|
} else {
|
2020-08-26 11:15:52 -04:00
|
|
|
self.publicroomids.remove(room_id.as_bytes())?;
|
2020-05-24 08:30:57 +02:00
|
|
|
}
|
|
|
|
|
2020-05-25 23:24:13 +02:00
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2021-07-29 08:36:01 +02:00
|
|
|
#[tracing::instrument(skip(self))]
|
2020-05-25 23:24:13 +02:00
|
|
|
pub fn is_public_room(&self, room_id: &RoomId) -> Result<bool> {
|
2021-06-08 18:10:00 +02:00
|
|
|
Ok(self.publicroomids.get(room_id.as_bytes())?.is_some())
|
2020-05-25 23:24:13 +02:00
|
|
|
}
|
|
|
|
|
2021-07-29 08:36:01 +02:00
|
|
|
#[tracing::instrument(skip(self))]
|
2021-11-26 20:36:40 +01:00
|
|
|
pub fn public_rooms(&self) -> impl Iterator<Item = Result<Box<RoomId>>> + '_ {
|
2021-06-08 18:10:00 +02:00
|
|
|
self.publicroomids.iter().map(|(bytes, _)| {
|
2021-11-27 00:30:28 +01:00
|
|
|
RoomId::parse(
|
2021-06-17 20:34:14 +02:00
|
|
|
utils::string_from_bytes(&bytes).map_err(|_| {
|
2020-06-11 10:03:08 +02:00
|
|
|
Error::bad_database("Room ID in publicroomids is invalid unicode.")
|
2021-06-17 20:34:14 +02:00
|
|
|
})?,
|
2020-06-11 10:03:08 +02:00
|
|
|
)
|
2021-06-17 20:34:14 +02:00
|
|
|
.map_err(|_| Error::bad_database("Room ID in publicroomids is invalid."))
|
2020-06-09 15:13:17 +02:00
|
|
|
})
|
2020-05-24 08:30:57 +02:00
|
|
|
}
|
|
|
|
|
2021-06-08 18:10:00 +02:00
|
|
|
use crate::{database::abstraction::Tree, utils, Error, Result};
|
2020-06-05 18:19:26 +02:00
|
|
|
use ruma::{
|
2020-07-28 15:58:50 +02:00
|
|
|
events::{
|
|
|
|
presence::{PresenceEvent, PresenceEventContent},
|
2022-02-12 01:58:36 +01:00
|
|
|
receipt::ReceiptEvent,
|
|
|
|
SyncEphemeralRoomEvent,
|
2020-07-28 15:58:50 +02:00
|
|
|
},
|
|
|
|
presence::PresenceState,
|
2020-12-04 18:16:17 -05:00
|
|
|
serde::Raw,
|
2021-05-12 20:04:28 +02:00
|
|
|
signatures::CanonicalJsonObject,
|
2021-03-04 12:29:08 +01:00
|
|
|
RoomId, UInt, UserId,
|
2020-06-05 18:19:26 +02:00
|
|
|
};
|
2020-07-28 15:58:50 +02:00
|
|
|
use std::{
|
2021-04-22 11:27:01 +02:00
|
|
|
collections::{HashMap, HashSet},
|
2020-10-06 20:43:35 +02:00
|
|
|
mem,
|
2021-06-08 18:10:00 +02:00
|
|
|
sync::Arc,
|
2020-07-28 15:58:50 +02:00
|
|
|
};
|
2020-05-03 17:25:31 +02:00
|
|
|
|
|
|
|
pub struct RoomEdus {
|
2021-06-08 18:10:00 +02:00
|
|
|
pub(in super::super) readreceiptid_readreceipt: Arc<dyn Tree>, // ReadReceiptId = RoomId + Count + UserId
|
|
|
|
pub(in super::super) roomuserid_privateread: Arc<dyn Tree>, // RoomUserId = Room + User, PrivateRead = Count
|
|
|
|
pub(in super::super) roomuserid_lastprivatereadupdate: Arc<dyn Tree>, // LastPrivateReadUpdate = Count
|
|
|
|
pub(in super::super) typingid_userid: Arc<dyn Tree>, // TypingId = RoomId + TimeoutTime + Count
|
|
|
|
pub(in super::super) roomid_lasttypingupdate: Arc<dyn Tree>, // LastRoomTypingUpdate = Count
|
|
|
|
pub(in super::super) presenceid_presence: Arc<dyn Tree>, // PresenceId = RoomId + Count + UserId
|
|
|
|
pub(in super::super) userid_lastpresenceupdate: Arc<dyn Tree>, // LastPresenceUpdate = Count
|
2020-05-03 17:25:31 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
impl RoomEdus {
|
|
|
|
/// Adds an event which will be saved until a new event replaces it (e.g. read receipt).
|
2020-08-23 17:29:39 +02:00
|
|
|
pub fn readreceipt_update(
|
2020-05-03 17:25:31 +02:00
|
|
|
&self,
|
|
|
|
user_id: &UserId,
|
|
|
|
room_id: &RoomId,
|
2022-02-12 01:58:36 +01:00
|
|
|
event: ReceiptEvent,
|
2020-05-03 17:25:31 +02:00
|
|
|
globals: &super::super::globals::Globals,
|
|
|
|
) -> Result<()> {
|
2021-04-05 21:44:21 +02:00
|
|
|
let mut prefix = room_id.as_bytes().to_vec();
|
2020-05-03 17:25:31 +02:00
|
|
|
prefix.push(0xff);
|
|
|
|
|
2021-06-08 18:10:00 +02:00
|
|
|
let mut last_possible_key = prefix.clone();
|
|
|
|
last_possible_key.extend_from_slice(&u64::MAX.to_be_bytes());
|
|
|
|
|
2020-05-03 17:25:31 +02:00
|
|
|
// Remove old entry
|
2021-06-08 18:10:00 +02:00
|
|
|
if let Some((old, _)) = self
|
2020-08-23 17:29:39 +02:00
|
|
|
.readreceiptid_readreceipt
|
2021-06-08 18:10:00 +02:00
|
|
|
.iter_from(&last_possible_key, true)
|
|
|
|
.take_while(|(key, _)| key.starts_with(&prefix))
|
|
|
|
.find(|(key, _)| {
|
2020-06-09 15:13:17 +02:00
|
|
|
key.rsplit(|&b| b == 0xff)
|
|
|
|
.next()
|
|
|
|
.expect("rsplit always returns an element")
|
2021-04-05 21:44:21 +02:00
|
|
|
== user_id.as_bytes()
|
2020-05-03 17:25:31 +02:00
|
|
|
})
|
|
|
|
{
|
|
|
|
// This is the old room_latest
|
2021-06-08 18:10:00 +02:00
|
|
|
self.readreceiptid_readreceipt.remove(&old)?;
|
2020-05-03 17:25:31 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
let mut room_latest_id = prefix;
|
|
|
|
room_latest_id.extend_from_slice(&globals.next_count()?.to_be_bytes());
|
|
|
|
room_latest_id.push(0xff);
|
2021-09-13 19:45:56 +02:00
|
|
|
room_latest_id.extend_from_slice(user_id.as_bytes());
|
2020-05-03 17:25:31 +02:00
|
|
|
|
2020-08-23 17:29:39 +02:00
|
|
|
self.readreceiptid_readreceipt.insert(
|
2021-06-08 18:10:00 +02:00
|
|
|
&room_latest_id,
|
|
|
|
&serde_json::to_vec(&event).expect("EduEvent::to_string always works"),
|
2020-06-09 15:13:17 +02:00
|
|
|
)?;
|
2020-05-03 17:25:31 +02:00
|
|
|
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Returns an iterator over the most recent read_receipts in a room that happened after the event with id `since`.
|
2021-02-28 12:41:03 +01:00
|
|
|
#[tracing::instrument(skip(self))]
|
2021-06-08 18:10:00 +02:00
|
|
|
pub fn readreceipts_since<'a>(
|
|
|
|
&'a self,
|
2020-05-03 17:25:31 +02:00
|
|
|
room_id: &RoomId,
|
|
|
|
since: u64,
|
2021-11-26 20:36:40 +01:00
|
|
|
) -> impl Iterator<
|
|
|
|
Item = Result<(
|
|
|
|
Box<UserId>,
|
|
|
|
u64,
|
|
|
|
Raw<ruma::events::AnySyncEphemeralRoomEvent>,
|
|
|
|
)>,
|
|
|
|
> + 'a {
|
2021-04-05 21:44:21 +02:00
|
|
|
let mut prefix = room_id.as_bytes().to_vec();
|
2020-05-03 17:25:31 +02:00
|
|
|
prefix.push(0xff);
|
2021-05-17 10:25:27 +02:00
|
|
|
let prefix2 = prefix.clone();
|
2020-05-03 17:25:31 +02:00
|
|
|
|
|
|
|
let mut first_possible_edu = prefix.clone();
|
2020-06-04 22:36:48 +02:00
|
|
|
first_possible_edu.extend_from_slice(&(since + 1).to_be_bytes()); // +1 so we don't send the event at since
|
2020-05-03 17:25:31 +02:00
|
|
|
|
2021-06-08 18:10:00 +02:00
|
|
|
self.readreceiptid_readreceipt
|
|
|
|
.iter_from(&first_possible_edu, false)
|
2021-05-17 10:25:27 +02:00
|
|
|
.take_while(move |(k, _)| k.starts_with(&prefix2))
|
|
|
|
.map(move |(k, v)| {
|
|
|
|
let count =
|
|
|
|
utils::u64_from_bytes(&k[prefix.len()..prefix.len() + mem::size_of::<u64>()])
|
|
|
|
.map_err(|_| Error::bad_database("Invalid readreceiptid count in db."))?;
|
2021-11-27 00:30:28 +01:00
|
|
|
let user_id = UserId::parse(
|
2021-05-17 10:25:27 +02:00
|
|
|
utils::string_from_bytes(&k[prefix.len() + mem::size_of::<u64>() + 1..])
|
|
|
|
.map_err(|_| {
|
|
|
|
Error::bad_database("Invalid readreceiptid userid bytes in db.")
|
|
|
|
})?,
|
|
|
|
)
|
|
|
|
.map_err(|_| Error::bad_database("Invalid readreceiptid userid in db."))?;
|
|
|
|
|
2021-05-12 20:04:28 +02:00
|
|
|
let mut json = serde_json::from_slice::<CanonicalJsonObject>(&v).map_err(|_| {
|
|
|
|
Error::bad_database("Read receipt in roomlatestid_roomlatest is invalid json.")
|
|
|
|
})?;
|
|
|
|
json.remove("room_id");
|
2021-05-17 10:25:27 +02:00
|
|
|
|
|
|
|
Ok((
|
|
|
|
user_id,
|
|
|
|
count,
|
|
|
|
Raw::from_json(
|
|
|
|
serde_json::value::to_raw_value(&json).expect("json is valid raw value"),
|
|
|
|
),
|
2021-05-12 20:04:28 +02:00
|
|
|
))
|
2021-06-08 18:10:00 +02:00
|
|
|
})
|
2020-05-03 17:25:31 +02:00
|
|
|
}
|
|
|
|
|
2020-08-23 17:29:39 +02:00
|
|
|
/// Sets a private read marker at `count`.
|
2021-07-29 08:36:01 +02:00
|
|
|
#[tracing::instrument(skip(self, globals))]
|
2020-08-25 13:24:38 +02:00
|
|
|
pub fn private_read_set(
|
|
|
|
&self,
|
|
|
|
room_id: &RoomId,
|
|
|
|
user_id: &UserId,
|
|
|
|
count: u64,
|
|
|
|
globals: &super::super::globals::Globals,
|
|
|
|
) -> Result<()> {
|
2021-04-05 21:44:21 +02:00
|
|
|
let mut key = room_id.as_bytes().to_vec();
|
2020-08-23 17:29:39 +02:00
|
|
|
key.push(0xff);
|
2021-09-13 19:45:56 +02:00
|
|
|
key.extend_from_slice(user_id.as_bytes());
|
2020-08-23 17:29:39 +02:00
|
|
|
|
|
|
|
self.roomuserid_privateread
|
|
|
|
.insert(&key, &count.to_be_bytes())?;
|
|
|
|
|
|
|
|
self.roomuserid_lastprivatereadupdate
|
|
|
|
.insert(&key, &globals.next_count()?.to_be_bytes())?;
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Returns the private read marker.
|
2021-02-28 12:41:03 +01:00
|
|
|
#[tracing::instrument(skip(self))]
|
2020-08-23 17:29:39 +02:00
|
|
|
pub fn private_read_get(&self, room_id: &RoomId, user_id: &UserId) -> Result<Option<u64>> {
|
2021-04-05 21:44:21 +02:00
|
|
|
let mut key = room_id.as_bytes().to_vec();
|
2020-08-23 17:29:39 +02:00
|
|
|
key.push(0xff);
|
2021-09-13 19:45:56 +02:00
|
|
|
key.extend_from_slice(user_id.as_bytes());
|
2020-08-23 17:29:39 +02:00
|
|
|
|
2021-06-08 18:10:00 +02:00
|
|
|
self.roomuserid_privateread
|
|
|
|
.get(&key)?
|
|
|
|
.map_or(Ok(None), |v| {
|
|
|
|
Ok(Some(utils::u64_from_bytes(&v).map_err(|_| {
|
|
|
|
Error::bad_database("Invalid private read marker bytes")
|
|
|
|
})?))
|
|
|
|
})
|
2020-08-23 17:29:39 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
/// Returns the count of the last typing update in this room.
|
|
|
|
pub fn last_privateread_update(&self, user_id: &UserId, room_id: &RoomId) -> Result<u64> {
|
2021-04-05 21:44:21 +02:00
|
|
|
let mut key = room_id.as_bytes().to_vec();
|
2020-08-23 17:29:39 +02:00
|
|
|
key.push(0xff);
|
2021-09-13 19:45:56 +02:00
|
|
|
key.extend_from_slice(user_id.as_bytes());
|
2020-08-23 17:29:39 +02:00
|
|
|
|
|
|
|
Ok(self
|
|
|
|
.roomuserid_lastprivatereadupdate
|
|
|
|
.get(&key)?
|
2021-10-13 11:51:30 +02:00
|
|
|
.map(|bytes| {
|
|
|
|
utils::u64_from_bytes(&bytes).map_err(|_| {
|
2020-08-23 17:29:39 +02:00
|
|
|
Error::bad_database("Count in roomuserid_lastprivatereadupdate is invalid.")
|
2021-10-13 11:51:30 +02:00
|
|
|
})
|
|
|
|
})
|
|
|
|
.transpose()?
|
2020-08-23 17:29:39 +02:00
|
|
|
.unwrap_or(0))
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Sets a user as typing until the timeout timestamp is reached or roomtyping_remove is
|
2020-06-04 11:17:36 +02:00
|
|
|
/// called.
|
2020-08-23 17:29:39 +02:00
|
|
|
pub fn typing_add(
|
2020-05-03 17:25:31 +02:00
|
|
|
&self,
|
2020-06-04 11:17:36 +02:00
|
|
|
user_id: &UserId,
|
2020-05-03 17:25:31 +02:00
|
|
|
room_id: &RoomId,
|
|
|
|
timeout: u64,
|
|
|
|
globals: &super::super::globals::Globals,
|
|
|
|
) -> Result<()> {
|
2021-04-05 21:44:21 +02:00
|
|
|
let mut prefix = room_id.as_bytes().to_vec();
|
2020-05-03 17:25:31 +02:00
|
|
|
prefix.push(0xff);
|
|
|
|
|
2020-06-04 11:17:36 +02:00
|
|
|
let count = globals.next_count()?.to_be_bytes();
|
|
|
|
|
2020-08-23 17:29:39 +02:00
|
|
|
let mut room_typing_id = prefix;
|
|
|
|
room_typing_id.extend_from_slice(&timeout.to_be_bytes());
|
|
|
|
room_typing_id.push(0xff);
|
|
|
|
room_typing_id.extend_from_slice(&count);
|
2020-06-04 11:17:36 +02:00
|
|
|
|
2020-08-23 17:29:39 +02:00
|
|
|
self.typingid_userid
|
2021-04-05 21:44:21 +02:00
|
|
|
.insert(&room_typing_id, &*user_id.as_bytes())?;
|
2020-06-04 11:17:36 +02:00
|
|
|
|
2020-08-23 17:29:39 +02:00
|
|
|
self.roomid_lasttypingupdate
|
2021-09-13 19:45:56 +02:00
|
|
|
.insert(room_id.as_bytes(), &count)?;
|
2020-06-04 11:17:36 +02:00
|
|
|
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Removes a user from typing before the timeout is reached.
|
2020-08-23 17:29:39 +02:00
|
|
|
pub fn typing_remove(
|
2020-06-04 11:17:36 +02:00
|
|
|
&self,
|
|
|
|
user_id: &UserId,
|
|
|
|
room_id: &RoomId,
|
|
|
|
globals: &super::super::globals::Globals,
|
|
|
|
) -> Result<()> {
|
2021-04-05 21:44:21 +02:00
|
|
|
let mut prefix = room_id.as_bytes().to_vec();
|
2020-06-04 11:17:36 +02:00
|
|
|
prefix.push(0xff);
|
|
|
|
|
|
|
|
let user_id = user_id.to_string();
|
|
|
|
|
|
|
|
let mut found_outdated = false;
|
|
|
|
|
2020-08-23 17:29:39 +02:00
|
|
|
// Maybe there are multiple ones from calling roomtyping_add multiple times
|
2020-05-03 17:25:31 +02:00
|
|
|
for outdated_edu in self
|
2020-08-23 17:29:39 +02:00
|
|
|
.typingid_userid
|
2021-06-08 18:10:00 +02:00
|
|
|
.scan_prefix(prefix)
|
|
|
|
.filter(|(_, v)| &**v == user_id.as_bytes())
|
2020-05-03 17:25:31 +02:00
|
|
|
{
|
2021-06-08 18:10:00 +02:00
|
|
|
self.typingid_userid.remove(&outdated_edu.0)?;
|
2020-06-04 11:17:36 +02:00
|
|
|
found_outdated = true;
|
2020-05-03 17:25:31 +02:00
|
|
|
}
|
|
|
|
|
2020-06-04 11:17:36 +02:00
|
|
|
if found_outdated {
|
2021-04-05 21:44:21 +02:00
|
|
|
self.roomid_lasttypingupdate
|
2021-09-13 19:45:56 +02:00
|
|
|
.insert(room_id.as_bytes(), &globals.next_count()?.to_be_bytes())?;
|
2020-06-04 11:17:36 +02:00
|
|
|
}
|
2020-05-03 17:25:31 +02:00
|
|
|
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2020-06-04 11:17:36 +02:00
|
|
|
/// Makes sure that typing events with old timestamps get removed.
|
2020-08-23 17:29:39 +02:00
|
|
|
fn typings_maintain(
|
2020-06-04 11:17:36 +02:00
|
|
|
&self,
|
|
|
|
room_id: &RoomId,
|
|
|
|
globals: &super::super::globals::Globals,
|
|
|
|
) -> Result<()> {
|
2021-04-05 21:44:21 +02:00
|
|
|
let mut prefix = room_id.as_bytes().to_vec();
|
2020-05-03 17:25:31 +02:00
|
|
|
prefix.push(0xff);
|
|
|
|
|
2020-06-04 11:17:36 +02:00
|
|
|
let current_timestamp = utils::millis_since_unix_epoch();
|
2020-05-03 17:25:31 +02:00
|
|
|
|
2020-06-04 11:17:36 +02:00
|
|
|
let mut found_outdated = false;
|
|
|
|
|
|
|
|
// Find all outdated edus before inserting a new one
|
2020-05-03 17:25:31 +02:00
|
|
|
for outdated_edu in self
|
2020-08-23 17:29:39 +02:00
|
|
|
.typingid_userid
|
2021-06-08 18:10:00 +02:00
|
|
|
.scan_prefix(prefix)
|
|
|
|
.map(|(key, _)| {
|
2020-06-09 15:13:17 +02:00
|
|
|
Ok::<_, Error>((
|
|
|
|
key.clone(),
|
2020-10-06 20:43:35 +02:00
|
|
|
utils::u64_from_bytes(
|
|
|
|
&key.splitn(2, |&b| b == 0xff).nth(1).ok_or_else(|| {
|
|
|
|
Error::bad_database("RoomTyping has invalid timestamp or delimiters.")
|
|
|
|
})?[0..mem::size_of::<u64>()],
|
|
|
|
)
|
2020-08-23 17:29:39 +02:00
|
|
|
.map_err(|_| Error::bad_database("RoomTyping has invalid timestamp bytes."))?,
|
2020-06-09 15:13:17 +02:00
|
|
|
))
|
2020-06-04 11:17:36 +02:00
|
|
|
})
|
2020-06-09 15:13:17 +02:00
|
|
|
.filter_map(|r| r.ok())
|
|
|
|
.take_while(|&(_, timestamp)| timestamp < current_timestamp)
|
2020-05-03 17:25:31 +02:00
|
|
|
{
|
2020-06-04 11:17:36 +02:00
|
|
|
// This is an outdated edu (time > timestamp)
|
2021-06-08 18:10:00 +02:00
|
|
|
self.typingid_userid.remove(&outdated_edu.0)?;
|
2020-06-04 11:17:36 +02:00
|
|
|
found_outdated = true;
|
|
|
|
}
|
|
|
|
|
|
|
|
if found_outdated {
|
2021-04-05 21:44:21 +02:00
|
|
|
self.roomid_lasttypingupdate
|
2021-09-13 19:45:56 +02:00
|
|
|
.insert(room_id.as_bytes(), &globals.next_count()?.to_be_bytes())?;
|
2020-05-03 17:25:31 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2020-08-23 17:29:39 +02:00
|
|
|
/// Returns the count of the last typing update in this room.
|
2021-02-28 12:41:03 +01:00
|
|
|
#[tracing::instrument(skip(self, globals))]
|
2020-08-23 17:29:39 +02:00
|
|
|
pub fn last_typing_update(
|
2020-05-03 17:25:31 +02:00
|
|
|
&self,
|
|
|
|
room_id: &RoomId,
|
2020-06-04 11:17:36 +02:00
|
|
|
globals: &super::super::globals::Globals,
|
|
|
|
) -> Result<u64> {
|
2020-08-23 17:29:39 +02:00
|
|
|
self.typings_maintain(room_id, globals)?;
|
2020-06-04 11:17:36 +02:00
|
|
|
|
|
|
|
Ok(self
|
2020-08-23 17:29:39 +02:00
|
|
|
.roomid_lasttypingupdate
|
2021-09-13 19:45:56 +02:00
|
|
|
.get(room_id.as_bytes())?
|
2021-10-13 11:51:30 +02:00
|
|
|
.map(|bytes| {
|
|
|
|
utils::u64_from_bytes(&bytes).map_err(|_| {
|
2020-06-11 10:03:08 +02:00
|
|
|
Error::bad_database("Count in roomid_lastroomactiveupdate is invalid.")
|
2021-10-13 11:51:30 +02:00
|
|
|
})
|
|
|
|
})
|
|
|
|
.transpose()?
|
2020-06-04 11:17:36 +02:00
|
|
|
.unwrap_or(0))
|
|
|
|
}
|
|
|
|
|
2020-08-23 17:29:39 +02:00
|
|
|
pub fn typings_all(
|
2020-07-21 14:04:39 -04:00
|
|
|
&self,
|
|
|
|
room_id: &RoomId,
|
|
|
|
) -> Result<SyncEphemeralRoomEvent<ruma::events::typing::TypingEventContent>> {
|
2021-04-05 21:44:21 +02:00
|
|
|
let mut prefix = room_id.as_bytes().to_vec();
|
2020-05-03 17:25:31 +02:00
|
|
|
prefix.push(0xff);
|
|
|
|
|
2021-04-22 11:27:01 +02:00
|
|
|
let mut user_ids = HashSet::new();
|
2020-05-03 17:25:31 +02:00
|
|
|
|
2021-11-27 00:30:28 +01:00
|
|
|
for (_, user_id) in self.typingid_userid.scan_prefix(prefix) {
|
|
|
|
let user_id = UserId::parse(utils::string_from_bytes(&user_id).map_err(|_| {
|
|
|
|
Error::bad_database("User ID in typingid_userid is invalid unicode.")
|
|
|
|
})?)
|
|
|
|
.map_err(|_| Error::bad_database("User ID in typingid_userid is invalid."))?;
|
|
|
|
|
|
|
|
user_ids.insert(user_id);
|
2020-06-04 11:17:36 +02:00
|
|
|
}
|
|
|
|
|
2020-07-21 14:04:39 -04:00
|
|
|
Ok(SyncEphemeralRoomEvent {
|
2021-04-22 11:27:01 +02:00
|
|
|
content: ruma::events::typing::TypingEventContent {
|
|
|
|
user_ids: user_ids.into_iter().collect(),
|
|
|
|
},
|
2020-06-04 11:17:36 +02:00
|
|
|
})
|
2020-05-03 17:25:31 +02:00
|
|
|
}
|
|
|
|
|
2020-07-28 15:58:50 +02:00
|
|
|
/// Adds a presence event which will be saved until a new event replaces it.
|
|
|
|
///
|
|
|
|
/// Note: This method takes a RoomId because presence updates are always bound to rooms to
|
|
|
|
/// make sure users outside these rooms can't see them.
|
|
|
|
pub fn update_presence(
|
|
|
|
&self,
|
|
|
|
user_id: &UserId,
|
|
|
|
room_id: &RoomId,
|
2021-10-13 10:24:39 +02:00
|
|
|
presence: PresenceEvent,
|
2020-07-28 15:58:50 +02:00
|
|
|
globals: &super::super::globals::Globals,
|
|
|
|
) -> Result<()> {
|
|
|
|
// TODO: Remove old entry? Or maybe just wipe completely from time to time?
|
|
|
|
|
|
|
|
let count = globals.next_count()?.to_be_bytes();
|
|
|
|
|
2021-04-05 21:44:21 +02:00
|
|
|
let mut presence_id = room_id.as_bytes().to_vec();
|
2020-07-28 15:58:50 +02:00
|
|
|
presence_id.push(0xff);
|
|
|
|
presence_id.extend_from_slice(&count);
|
|
|
|
presence_id.push(0xff);
|
2021-09-13 19:45:56 +02:00
|
|
|
presence_id.extend_from_slice(presence.sender.as_bytes());
|
2020-07-28 15:58:50 +02:00
|
|
|
|
|
|
|
self.presenceid_presence.insert(
|
2021-06-08 18:10:00 +02:00
|
|
|
&presence_id,
|
|
|
|
&serde_json::to_vec(&presence).expect("PresenceEvent can be serialized"),
|
2020-07-28 15:58:50 +02:00
|
|
|
)?;
|
|
|
|
|
|
|
|
self.userid_lastpresenceupdate.insert(
|
2021-06-08 18:10:00 +02:00
|
|
|
user_id.as_bytes(),
|
2020-07-28 15:58:50 +02:00
|
|
|
&utils::millis_since_unix_epoch().to_be_bytes(),
|
|
|
|
)?;
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Resets the presence timeout, so the user will stay in their current presence state.
|
2021-02-28 12:41:03 +01:00
|
|
|
#[tracing::instrument(skip(self))]
|
2020-07-28 15:58:50 +02:00
|
|
|
pub fn ping_presence(&self, user_id: &UserId) -> Result<()> {
|
|
|
|
self.userid_lastpresenceupdate.insert(
|
2021-09-13 19:45:56 +02:00
|
|
|
user_id.as_bytes(),
|
2020-07-28 15:58:50 +02:00
|
|
|
&utils::millis_since_unix_epoch().to_be_bytes(),
|
|
|
|
)?;
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Returns the timestamp of the last presence update of this user in millis since the unix epoch.
|
|
|
|
pub fn last_presence_update(&self, user_id: &UserId) -> Result<Option<u64>> {
|
|
|
|
self.userid_lastpresenceupdate
|
2021-09-13 19:45:56 +02:00
|
|
|
.get(user_id.as_bytes())?
|
2020-07-28 15:58:50 +02:00
|
|
|
.map(|bytes| {
|
|
|
|
utils::u64_from_bytes(&bytes).map_err(|_| {
|
|
|
|
Error::bad_database("Invalid timestamp in userid_lastpresenceupdate.")
|
|
|
|
})
|
|
|
|
})
|
|
|
|
.transpose()
|
|
|
|
}
|
|
|
|
|
2021-05-14 11:03:18 +02:00
|
|
|
pub fn get_last_presence_event(
|
|
|
|
&self,
|
|
|
|
user_id: &UserId,
|
|
|
|
room_id: &RoomId,
|
|
|
|
) -> Result<Option<PresenceEvent>> {
|
|
|
|
let last_update = match self.last_presence_update(user_id)? {
|
|
|
|
Some(last) => last,
|
|
|
|
None => return Ok(None),
|
|
|
|
};
|
|
|
|
|
|
|
|
let mut presence_id = room_id.as_bytes().to_vec();
|
|
|
|
presence_id.push(0xff);
|
|
|
|
presence_id.extend_from_slice(&last_update.to_be_bytes());
|
|
|
|
presence_id.push(0xff);
|
2021-09-13 19:45:56 +02:00
|
|
|
presence_id.extend_from_slice(user_id.as_bytes());
|
2021-05-14 11:03:18 +02:00
|
|
|
|
|
|
|
self.presenceid_presence
|
2021-06-08 18:10:00 +02:00
|
|
|
.get(&presence_id)?
|
2021-05-14 11:03:18 +02:00
|
|
|
.map(|value| {
|
2021-10-13 11:51:30 +02:00
|
|
|
let mut presence: PresenceEvent = serde_json::from_slice(&value)
|
2021-05-14 11:03:18 +02:00
|
|
|
.map_err(|_| Error::bad_database("Invalid presence event in db."))?;
|
|
|
|
let current_timestamp: UInt = utils::millis_since_unix_epoch()
|
|
|
|
.try_into()
|
|
|
|
.expect("time is valid");
|
|
|
|
|
|
|
|
if presence.content.presence == PresenceState::Online {
|
|
|
|
// Don't set last_active_ago when the user is online
|
|
|
|
presence.content.last_active_ago = None;
|
|
|
|
} else {
|
|
|
|
// Convert from timestamp to duration
|
|
|
|
presence.content.last_active_ago = presence
|
|
|
|
.content
|
|
|
|
.last_active_ago
|
|
|
|
.map(|timestamp| current_timestamp - timestamp);
|
|
|
|
}
|
|
|
|
|
|
|
|
Ok(presence)
|
|
|
|
})
|
|
|
|
.transpose()
|
|
|
|
}
|
|
|
|
|
2020-07-28 15:58:50 +02:00
|
|
|
/// Sets all users to offline who have been quiet for too long.
|
2021-09-01 15:21:02 +02:00
|
|
|
fn _presence_maintain(
|
2020-07-28 15:58:50 +02:00
|
|
|
&self,
|
|
|
|
rooms: &super::Rooms,
|
|
|
|
globals: &super::super::globals::Globals,
|
|
|
|
) -> Result<()> {
|
|
|
|
let current_timestamp = utils::millis_since_unix_epoch();
|
|
|
|
|
|
|
|
for (user_id_bytes, last_timestamp) in self
|
|
|
|
.userid_lastpresenceupdate
|
|
|
|
.iter()
|
|
|
|
.filter_map(|(k, bytes)| {
|
|
|
|
Some((
|
|
|
|
k,
|
|
|
|
utils::u64_from_bytes(&bytes)
|
|
|
|
.map_err(|_| {
|
|
|
|
Error::bad_database("Invalid timestamp in userid_lastpresenceupdate.")
|
|
|
|
})
|
|
|
|
.ok()?,
|
|
|
|
))
|
|
|
|
})
|
2021-04-11 21:35:17 +02:00
|
|
|
.take_while(|(_, timestamp)| current_timestamp.saturating_sub(*timestamp) > 5 * 60_000)
|
2020-07-29 17:37:26 +02:00
|
|
|
// 5 Minutes
|
2020-07-28 15:58:50 +02:00
|
|
|
{
|
|
|
|
// Send new presence events to set the user offline
|
|
|
|
let count = globals.next_count()?.to_be_bytes();
|
2021-11-26 20:36:40 +01:00
|
|
|
let user_id: Box<_> = utils::string_from_bytes(&user_id_bytes)
|
2020-07-28 15:58:50 +02:00
|
|
|
.map_err(|_| {
|
|
|
|
Error::bad_database("Invalid UserId bytes in userid_lastpresenceupdate.")
|
|
|
|
})?
|
|
|
|
.try_into()
|
|
|
|
.map_err(|_| Error::bad_database("Invalid UserId in userid_lastpresenceupdate."))?;
|
|
|
|
for room_id in rooms.rooms_joined(&user_id).filter_map(|r| r.ok()) {
|
2021-04-05 21:44:21 +02:00
|
|
|
let mut presence_id = room_id.as_bytes().to_vec();
|
2020-07-28 15:58:50 +02:00
|
|
|
presence_id.push(0xff);
|
|
|
|
presence_id.extend_from_slice(&count);
|
|
|
|
presence_id.push(0xff);
|
|
|
|
presence_id.extend_from_slice(&user_id_bytes);
|
|
|
|
|
|
|
|
self.presenceid_presence.insert(
|
2021-06-08 18:10:00 +02:00
|
|
|
&presence_id,
|
|
|
|
&serde_json::to_vec(&PresenceEvent {
|
2020-07-28 15:58:50 +02:00
|
|
|
content: PresenceEventContent {
|
|
|
|
avatar_url: None,
|
|
|
|
currently_active: None,
|
|
|
|
displayname: None,
|
|
|
|
last_active_ago: Some(
|
|
|
|
last_timestamp.try_into().expect("time is valid"),
|
|
|
|
),
|
|
|
|
presence: PresenceState::Offline,
|
|
|
|
status_msg: None,
|
|
|
|
},
|
2021-11-26 20:36:40 +01:00
|
|
|
sender: user_id.to_owned(),
|
2020-07-28 15:58:50 +02:00
|
|
|
})
|
|
|
|
.expect("PresenceEvent can be serialized"),
|
|
|
|
)?;
|
|
|
|
}
|
2021-01-10 17:12:16 +01:00
|
|
|
|
|
|
|
self.userid_lastpresenceupdate.insert(
|
2021-09-13 19:45:56 +02:00
|
|
|
user_id.as_bytes(),
|
2021-01-10 17:12:16 +01:00
|
|
|
&utils::millis_since_unix_epoch().to_be_bytes(),
|
|
|
|
)?;
|
2020-07-28 15:58:50 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Returns an iterator over the most recent presence updates that happened after the event with id `since`.
|
2021-09-01 15:21:02 +02:00
|
|
|
#[tracing::instrument(skip(self, since, _rooms, _globals))]
|
2020-07-28 15:58:50 +02:00
|
|
|
pub fn presence_since(
|
|
|
|
&self,
|
|
|
|
room_id: &RoomId,
|
|
|
|
since: u64,
|
2021-09-01 15:21:02 +02:00
|
|
|
_rooms: &super::Rooms,
|
|
|
|
_globals: &super::super::globals::Globals,
|
2021-11-26 20:36:40 +01:00
|
|
|
) -> Result<HashMap<Box<UserId>, PresenceEvent>> {
|
2021-08-03 16:14:07 +02:00
|
|
|
//self.presence_maintain(rooms, globals)?;
|
2020-07-28 15:58:50 +02:00
|
|
|
|
2021-04-05 21:44:21 +02:00
|
|
|
let mut prefix = room_id.as_bytes().to_vec();
|
2020-07-28 15:58:50 +02:00
|
|
|
prefix.push(0xff);
|
|
|
|
|
|
|
|
let mut first_possible_edu = prefix.clone();
|
|
|
|
first_possible_edu.extend_from_slice(&(since + 1).to_be_bytes()); // +1 so we don't send the event at since
|
|
|
|
let mut hashmap = HashMap::new();
|
|
|
|
|
|
|
|
for (key, value) in self
|
|
|
|
.presenceid_presence
|
2021-06-08 18:10:00 +02:00
|
|
|
.iter_from(&*first_possible_edu, false)
|
2020-07-28 15:58:50 +02:00
|
|
|
.take_while(|(key, _)| key.starts_with(&prefix))
|
|
|
|
{
|
2021-11-27 00:30:28 +01:00
|
|
|
let user_id = UserId::parse(
|
2020-07-28 15:58:50 +02:00
|
|
|
utils::string_from_bytes(
|
|
|
|
key.rsplit(|&b| b == 0xff)
|
|
|
|
.next()
|
|
|
|
.expect("rsplit always returns an element"),
|
|
|
|
)
|
|
|
|
.map_err(|_| Error::bad_database("Invalid UserId bytes in presenceid_presence."))?,
|
|
|
|
)
|
|
|
|
.map_err(|_| Error::bad_database("Invalid UserId in presenceid_presence."))?;
|
|
|
|
|
2021-10-13 11:51:30 +02:00
|
|
|
let mut presence: PresenceEvent = serde_json::from_slice(&value)
|
2020-07-28 15:58:50 +02:00
|
|
|
.map_err(|_| Error::bad_database("Invalid presence event in db."))?;
|
|
|
|
|
|
|
|
let current_timestamp: UInt = utils::millis_since_unix_epoch()
|
|
|
|
.try_into()
|
|
|
|
.expect("time is valid");
|
|
|
|
|
|
|
|
if presence.content.presence == PresenceState::Online {
|
|
|
|
// Don't set last_active_ago when the user is online
|
|
|
|
presence.content.last_active_ago = None;
|
|
|
|
} else {
|
|
|
|
// Convert from timestamp to duration
|
|
|
|
presence.content.last_active_ago = presence
|
|
|
|
.content
|
|
|
|
.last_active_ago
|
|
|
|
.map(|timestamp| current_timestamp - timestamp);
|
|
|
|
}
|
|
|
|
|
|
|
|
hashmap.insert(user_id, presence);
|
|
|
|
}
|
|
|
|
|
|
|
|
Ok(hashmap)
|
|
|
|
}
|
2020-05-03 17:25:31 +02:00
|
|
|
}
|
2022-01-04 14:30:13 +01:00
|
|
|
|
|
|
|
#[tracing::instrument(skip(self))]
|
|
|
|
pub fn lazy_load_was_sent_before(
|
|
|
|
&self,
|
|
|
|
user_id: &UserId,
|
|
|
|
device_id: &DeviceId,
|
|
|
|
room_id: &RoomId,
|
|
|
|
ll_user: &UserId,
|
|
|
|
) -> Result<bool> {
|
|
|
|
let mut key = user_id.as_bytes().to_vec();
|
|
|
|
key.push(0xff);
|
2022-01-20 00:10:39 +01:00
|
|
|
key.extend_from_slice(device_id.as_bytes());
|
2022-01-04 14:30:13 +01:00
|
|
|
key.push(0xff);
|
2022-01-20 00:10:39 +01:00
|
|
|
key.extend_from_slice(room_id.as_bytes());
|
2022-01-04 14:30:13 +01:00
|
|
|
key.push(0xff);
|
2022-01-20 00:10:39 +01:00
|
|
|
key.extend_from_slice(ll_user.as_bytes());
|
2022-01-04 14:30:13 +01:00
|
|
|
Ok(self.lazyloadedids.get(&key)?.is_some())
|
|
|
|
}
|
|
|
|
|
|
|
|
#[tracing::instrument(skip(self))]
|
|
|
|
pub fn lazy_load_mark_sent(
|
|
|
|
&self,
|
|
|
|
user_id: &UserId,
|
|
|
|
device_id: &DeviceId,
|
|
|
|
room_id: &RoomId,
|
2022-01-06 00:15:34 +01:00
|
|
|
lazy_load: HashSet<Box<UserId>>,
|
2022-01-04 14:30:13 +01:00
|
|
|
count: u64,
|
|
|
|
) {
|
|
|
|
self.lazy_load_waiting.lock().unwrap().insert(
|
|
|
|
(
|
|
|
|
user_id.to_owned(),
|
|
|
|
device_id.to_owned(),
|
|
|
|
room_id.to_owned(),
|
|
|
|
count,
|
|
|
|
),
|
|
|
|
lazy_load,
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
|
|
|
#[tracing::instrument(skip(self))]
|
|
|
|
pub fn lazy_load_confirm_delivery(
|
|
|
|
&self,
|
|
|
|
user_id: &UserId,
|
|
|
|
device_id: &DeviceId,
|
|
|
|
room_id: &RoomId,
|
|
|
|
since: u64,
|
|
|
|
) -> Result<()> {
|
|
|
|
if let Some(user_ids) = self.lazy_load_waiting.lock().unwrap().remove(&(
|
|
|
|
user_id.to_owned(),
|
|
|
|
device_id.to_owned(),
|
|
|
|
room_id.to_owned(),
|
|
|
|
since,
|
|
|
|
)) {
|
|
|
|
let mut prefix = user_id.as_bytes().to_vec();
|
|
|
|
prefix.push(0xff);
|
2022-01-20 00:10:39 +01:00
|
|
|
prefix.extend_from_slice(device_id.as_bytes());
|
2022-01-04 14:30:13 +01:00
|
|
|
prefix.push(0xff);
|
2022-01-20 00:10:39 +01:00
|
|
|
prefix.extend_from_slice(room_id.as_bytes());
|
2022-01-04 14:30:13 +01:00
|
|
|
prefix.push(0xff);
|
|
|
|
|
|
|
|
for ll_id in user_ids {
|
|
|
|
let mut key = prefix.clone();
|
2022-01-20 00:10:39 +01:00
|
|
|
key.extend_from_slice(ll_id.as_bytes());
|
2022-01-04 14:30:13 +01:00
|
|
|
self.lazyloadedids.insert(&key, &[])?;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
|
|
|
#[tracing::instrument(skip(self))]
|
|
|
|
pub fn lazy_load_reset(
|
|
|
|
&self,
|
2022-01-20 00:10:39 +01:00
|
|
|
user_id: &UserId,
|
|
|
|
device_id: &DeviceId,
|
|
|
|
room_id: &RoomId,
|
2022-01-04 14:30:13 +01:00
|
|
|
) -> Result<()> {
|
|
|
|
let mut prefix = user_id.as_bytes().to_vec();
|
|
|
|
prefix.push(0xff);
|
2022-01-20 00:10:39 +01:00
|
|
|
prefix.extend_from_slice(device_id.as_bytes());
|
2022-01-04 14:30:13 +01:00
|
|
|
prefix.push(0xff);
|
2022-01-20 00:10:39 +01:00
|
|
|
prefix.extend_from_slice(room_id.as_bytes());
|
2022-01-04 14:30:13 +01:00
|
|
|
prefix.push(0xff);
|
|
|
|
|
|
|
|
for (key, _) in self.lazyloadedids.scan_prefix(prefix) {
|
|
|
|
self.lazyloadedids.remove(&key)?;
|
|
|
|
}
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
}
|
2021-11-01 08:57:27 +00:00
|
|
|
|
2020-05-03 17:25:31 +02:00
|
|
|
/// Checks if a room exists.
|
2021-08-19 11:01:18 +02:00
|
|
|
#[tracing::instrument(skip(self))]
|
2020-05-03 17:25:31 +02:00
|
|
|
pub fn exists(&self, room_id: &RoomId) -> Result<bool> {
|
2021-08-14 08:26:45 +02:00
|
|
|
let prefix = match self.get_shortroomid(room_id)? {
|
|
|
|
Some(b) => b.to_be_bytes().to_vec(),
|
|
|
|
None => return Ok(false),
|
|
|
|
};
|
2020-05-03 17:25:31 +02:00
|
|
|
|
2020-05-08 21:13:52 +02:00
|
|
|
// Look for PDUs in that room.
|
2020-05-03 17:25:31 +02:00
|
|
|
Ok(self
|
|
|
|
.pduid_pdu
|
2021-06-08 18:10:00 +02:00
|
|
|
.iter_from(&prefix, false)
|
|
|
|
.next()
|
2020-05-03 17:25:31 +02:00
|
|
|
.filter(|(k, _)| k.starts_with(&prefix))
|
|
|
|
.is_some())
|
|
|
|
}
|
|
|
|
|
2021-08-14 08:26:45 +02:00
|
|
|
pub fn get_shortroomid(&self, room_id: &RoomId) -> Result<Option<u64>> {
|
2021-08-14 19:47:49 +02:00
|
|
|
self.roomid_shortroomid
|
2021-09-13 19:45:56 +02:00
|
|
|
.get(room_id.as_bytes())?
|
2021-08-14 19:47:49 +02:00
|
|
|
.map(|bytes| {
|
|
|
|
utils::u64_from_bytes(&bytes)
|
|
|
|
.map_err(|_| Error::bad_database("Invalid shortroomid in db."))
|
|
|
|
})
|
|
|
|
.transpose()
|
2021-08-12 23:04:00 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
pub fn get_or_create_shortroomid(
|
|
|
|
&self,
|
|
|
|
room_id: &RoomId,
|
|
|
|
globals: &super::globals::Globals,
|
|
|
|
) -> Result<u64> {
|
2021-09-13 19:45:56 +02:00
|
|
|
Ok(match self.roomid_shortroomid.get(room_id.as_bytes())? {
|
2021-08-12 23:04:00 +02:00
|
|
|
Some(short) => utils::u64_from_bytes(&short)
|
|
|
|
.map_err(|_| Error::bad_database("Invalid shortroomid in db."))?,
|
|
|
|
None => {
|
|
|
|
let short = globals.next_count()?;
|
|
|
|
self.roomid_shortroomid
|
2021-09-13 19:45:56 +02:00
|
|
|
.insert(room_id.as_bytes(), &short.to_be_bytes())?;
|
2021-08-12 23:04:00 +02:00
|
|
|
short
|
|
|
|
}
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2022-06-20 11:31:27 +02:00
|
|
|
/// Returns the pdu from the outlier tree.
|
|
|
|
pub fn get_outlier_pdu_json(&self, event_id: &EventId) -> Result<Option<CanonicalJsonObject>> {
|
|
|
|
self.eventid_outlierpdu
|
|
|
|
.get(event_id.as_bytes())?
|
|
|
|
.map_or(Ok(None), |pdu| {
|
|
|
|
serde_json::from_slice(&pdu).map_err(|_| Error::bad_database("Invalid PDU in db."))
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2021-01-14 21:32:22 -05:00
|
|
|
/// Returns the pdu from the outlier tree.
|
|
|
|
pub fn get_pdu_outlier(&self, event_id: &EventId) -> Result<Option<PduEvent>> {
|
2021-03-13 16:30:12 +01:00
|
|
|
self.eventid_outlierpdu
|
2021-02-01 12:44:30 -05:00
|
|
|
.get(event_id.as_bytes())?
|
|
|
|
.map_or(Ok(None), |pdu| {
|
2021-01-29 21:45:33 -05:00
|
|
|
serde_json::from_slice(&pdu).map_err(|_| Error::bad_database("Invalid PDU in db."))
|
2021-01-14 21:32:22 -05:00
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2021-02-03 20:00:01 -05:00
|
|
|
/// Append the PDU as an outlier.
|
2021-07-29 08:36:01 +02:00
|
|
|
#[tracing::instrument(skip(self, pdu))]
|
2021-04-16 18:18:29 +02:00
|
|
|
pub fn add_pdu_outlier(&self, event_id: &EventId, pdu: &CanonicalJsonObject) -> Result<()> {
|
2021-03-13 16:30:12 +01:00
|
|
|
self.eventid_outlierpdu.insert(
|
2021-09-13 19:45:56 +02:00
|
|
|
event_id.as_bytes(),
|
2021-06-08 18:10:00 +02:00
|
|
|
&serde_json::to_vec(&pdu).expect("CanonicalJsonObject is valid"),
|
2021-08-28 11:39:33 +02:00
|
|
|
)
|
|
|
|
}
|
2021-01-29 21:45:33 -05:00
|
|
|
|
2020-05-03 17:25:31 +02:00
|
|
|
|
2021-08-24 19:10:31 +02:00
|
|
|
#[tracing::instrument(skip(self, room_id, event_ids))]
|
2021-11-27 17:44:52 +01:00
|
|
|
pub fn mark_as_referenced(&self, room_id: &RoomId, event_ids: &[Arc<EventId>]) -> Result<()> {
|
2021-08-24 19:10:31 +02:00
|
|
|
for prev in event_ids {
|
|
|
|
let mut key = room_id.as_bytes().to_vec();
|
|
|
|
key.extend_from_slice(prev.as_bytes());
|
|
|
|
self.referencedevents.insert(&key, &[])?;
|
|
|
|
}
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2021-07-29 08:36:01 +02:00
|
|
|
#[tracing::instrument(skip(self))]
|
2021-07-20 12:41:35 +02:00
|
|
|
pub fn is_event_referenced(&self, room_id: &RoomId, event_id: &EventId) -> Result<bool> {
|
|
|
|
let mut key = room_id.as_bytes().to_vec();
|
|
|
|
key.extend_from_slice(event_id.as_bytes());
|
2021-07-29 08:36:01 +02:00
|
|
|
Ok(self.referencedevents.get(&key)?.is_some())
|
2021-02-03 20:00:01 -05:00
|
|
|
}
|
|
|
|
|
2021-08-28 11:39:33 +02:00
|
|
|
#[tracing::instrument(skip(self))]
|
|
|
|
pub fn mark_event_soft_failed(&self, event_id: &EventId) -> Result<()> {
|
2021-09-13 19:45:56 +02:00
|
|
|
self.softfailedeventids.insert(event_id.as_bytes(), &[])
|
2021-08-28 11:39:33 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
#[tracing::instrument(skip(self))]
|
|
|
|
pub fn is_event_soft_failed(&self, event_id: &EventId) -> Result<bool> {
|
|
|
|
self.softfailedeventids
|
2021-09-13 19:45:56 +02:00
|
|
|
.get(event_id.as_bytes())
|
2021-08-28 11:39:33 +02:00
|
|
|
.map(|o| o.is_some())
|
2021-01-14 21:32:22 -05:00
|
|
|
}
|
|
|
|
|