2020-05-03 16:25:31 +01:00
|
|
|
mod edus;
|
|
|
|
|
|
|
|
pub use edus::RoomEdus;
|
|
|
|
|
2021-01-15 16:05:57 +00:00
|
|
|
use crate::{pdu::PduBuilder, utils, Database, Error, PduEvent, Result};
|
2020-09-15 15:13:54 +01:00
|
|
|
use log::error;
|
2020-12-23 18:41:54 +00:00
|
|
|
use regex::Regex;
|
2020-08-18 19:32:38 +01:00
|
|
|
use ring::digest;
|
2020-06-05 17:19:26 +01:00
|
|
|
use ruma::{
|
2020-06-09 14:13:17 +01:00
|
|
|
api::client::error::ErrorKind,
|
2020-06-05 17:19:26 +01:00
|
|
|
events::{
|
2020-07-28 01:48:51 +01:00
|
|
|
ignored_user_list,
|
2020-06-05 17:19:26 +01:00
|
|
|
room::{
|
2020-10-05 21:19:22 +01:00
|
|
|
member, message,
|
2020-06-05 17:19:26 +01:00
|
|
|
power_levels::{self, PowerLevelsEventContent},
|
|
|
|
},
|
2020-07-26 14:41:28 +01:00
|
|
|
EventType,
|
2020-05-24 17:25:52 +01:00
|
|
|
},
|
2020-12-22 19:08:20 +00:00
|
|
|
serde::{to_canonical_value, CanonicalJsonObject, CanonicalJsonValue, Raw},
|
2020-12-04 23:16:17 +00:00
|
|
|
EventId, RoomAliasId, RoomId, RoomVersionId, ServerName, UserId,
|
2020-05-24 17:25:52 +01:00
|
|
|
};
|
2020-05-26 09:27:51 +01:00
|
|
|
use sled::IVec;
|
2021-01-12 13:26:52 +00:00
|
|
|
use state_res::{event_auth, Event, StateMap};
|
2020-08-06 13:29:59 +01:00
|
|
|
|
2020-05-03 16:25:31 +01:00
|
|
|
use std::{
|
2020-08-18 19:32:38 +01:00
|
|
|
collections::{BTreeMap, HashMap},
|
2020-05-03 16:25:31 +01:00
|
|
|
convert::{TryFrom, TryInto},
|
|
|
|
mem,
|
2020-09-12 20:30:07 +01:00
|
|
|
sync::Arc,
|
2020-05-03 16:25:31 +01:00
|
|
|
};
|
|
|
|
|
2021-02-01 17:44:30 +00:00
|
|
|
use super::admin::AdminCommand;
|
2020-11-09 11:21:04 +00:00
|
|
|
|
2020-08-06 13:29:59 +01:00
|
|
|
/// The unique identifier of each state group.
|
|
|
|
///
|
|
|
|
/// This is created when a state group is added to the database by
|
|
|
|
/// hashing the entire state.
|
2020-09-17 13:44:47 +01:00
|
|
|
pub type StateHashId = IVec;
|
2020-08-06 13:29:59 +01:00
|
|
|
|
2020-09-15 15:13:54 +01:00
|
|
|
#[derive(Clone)]
|
2020-05-03 16:25:31 +01:00
|
|
|
pub struct Rooms {
|
|
|
|
pub edus: edus::RoomEdus,
|
|
|
|
pub(super) pduid_pdu: sled::Tree, // PduId = RoomId + Count
|
|
|
|
pub(super) eventid_pduid: sled::Tree,
|
|
|
|
pub(super) roomid_pduleaves: sled::Tree,
|
2020-05-24 07:30:57 +01:00
|
|
|
pub(super) alias_roomid: sled::Tree,
|
2020-05-25 22:24:13 +01:00
|
|
|
pub(super) aliasid_alias: sled::Tree, // AliasId = RoomId + Count
|
|
|
|
pub(super) publicroomids: sled::Tree,
|
2020-05-24 07:30:57 +01:00
|
|
|
|
2020-08-18 11:15:27 +01:00
|
|
|
pub(super) tokenids: sled::Tree, // TokenId = RoomId + Token + PduId
|
|
|
|
|
2020-09-14 19:23:19 +01:00
|
|
|
/// Participating servers in a room.
|
|
|
|
pub(super) roomserverids: sled::Tree, // RoomServerId = RoomId + ServerName
|
2020-05-03 16:25:31 +01:00
|
|
|
pub(super) userroomid_joined: sled::Tree,
|
|
|
|
pub(super) roomuserid_joined: sled::Tree,
|
2020-08-06 12:21:53 +01:00
|
|
|
pub(super) roomuseroncejoinedids: sled::Tree,
|
2020-05-03 16:25:31 +01:00
|
|
|
pub(super) userroomid_invited: sled::Tree,
|
|
|
|
pub(super) roomuserid_invited: sled::Tree,
|
|
|
|
pub(super) userroomid_left: sled::Tree,
|
2020-08-06 13:29:59 +01:00
|
|
|
|
2020-09-12 20:30:07 +01:00
|
|
|
/// Remember the current state hash of a room.
|
2020-08-19 22:27:24 +01:00
|
|
|
pub(super) roomid_statehash: sled::Tree,
|
2020-09-12 20:30:07 +01:00
|
|
|
/// Remember the state hash at events in the past.
|
|
|
|
pub(super) pduid_statehash: sled::Tree,
|
|
|
|
/// The state for a given state hash.
|
2020-12-19 15:00:11 +00:00
|
|
|
pub(super) statekey_short: sled::Tree, // StateKey = EventType + StateKey, Short = Count
|
|
|
|
pub(super) stateid_pduid: sled::Tree, // StateId = StateHash + Short, PduId = Count (without roomid)
|
2021-01-15 02:32:22 +00:00
|
|
|
|
2021-02-01 17:44:30 +00:00
|
|
|
/// RoomId + EventId -> outlier PDU.
|
|
|
|
/// Any pdu that has passed the steps 1-8 in the incoming event /federation/send/txn.
|
|
|
|
pub(super) eventid_outlierpdu: sled::Tree,
|
2020-05-03 16:25:31 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
impl Rooms {
|
2020-08-26 16:15:52 +01:00
|
|
|
/// Builds a StateMap by iterating over all keys that start
|
|
|
|
/// with state_hash, this gives the full state for the given state_hash.
|
2021-02-01 17:44:30 +00:00
|
|
|
///
|
|
|
|
/// TODO: Should this check for outliers, it does now.
|
2020-12-19 15:00:11 +00:00
|
|
|
pub fn state_full(
|
|
|
|
&self,
|
|
|
|
room_id: &RoomId,
|
|
|
|
state_hash: &StateHashId,
|
2020-12-31 13:40:49 +00:00
|
|
|
) -> Result<BTreeMap<(EventType, String), PduEvent>> {
|
2020-08-06 13:29:59 +01:00
|
|
|
self.stateid_pduid
|
2020-08-20 00:29:39 +01:00
|
|
|
.scan_prefix(&state_hash)
|
2020-08-06 13:29:59 +01:00
|
|
|
.values()
|
2020-12-19 15:00:11 +00:00
|
|
|
.map(|pduid_short| {
|
|
|
|
let mut pduid = room_id.as_bytes().to_vec();
|
|
|
|
pduid.push(0xff);
|
|
|
|
pduid.extend_from_slice(&pduid_short?);
|
2021-01-30 02:45:33 +00:00
|
|
|
match self.pduid_pdu.get(&pduid)? {
|
|
|
|
Some(b) => serde_json::from_slice::<PduEvent>(&b)
|
|
|
|
.map_err(|_| Error::bad_database("Invalid PDU in db.")),
|
|
|
|
None => self
|
2021-02-01 17:44:30 +00:00
|
|
|
.eventid_outlierpdu
|
2021-01-30 02:45:33 +00:00
|
|
|
.get(pduid)?
|
|
|
|
.map(|b| {
|
|
|
|
serde_json::from_slice::<PduEvent>(&b)
|
|
|
|
.map_err(|_| Error::bad_database("Invalid PDU in db."))
|
|
|
|
})
|
|
|
|
.ok_or_else(|| {
|
|
|
|
Error::bad_database("Event is not in pdu tree or outliers.")
|
|
|
|
})?,
|
|
|
|
}
|
2020-08-06 13:29:59 +01:00
|
|
|
})
|
2020-12-19 15:00:11 +00:00
|
|
|
.filter_map(|r| r.ok())
|
2020-08-06 13:29:59 +01:00
|
|
|
.map(|pdu| {
|
2020-09-17 13:44:47 +01:00
|
|
|
Ok((
|
|
|
|
(
|
|
|
|
pdu.kind.clone(),
|
|
|
|
pdu.state_key
|
|
|
|
.as_ref()
|
|
|
|
.ok_or_else(|| Error::bad_database("State event has no state key."))?
|
|
|
|
.clone(),
|
|
|
|
),
|
|
|
|
pdu,
|
|
|
|
))
|
2020-08-06 13:29:59 +01:00
|
|
|
})
|
2020-12-31 13:40:49 +00:00
|
|
|
.collect()
|
2020-08-06 13:29:59 +01:00
|
|
|
}
|
|
|
|
|
2020-09-17 13:44:47 +01:00
|
|
|
/// Returns a single PDU from `room_id` with key (`event_type`, `state_key`).
|
2021-02-01 17:44:30 +00:00
|
|
|
///
|
|
|
|
/// TODO: Should this check for outliers, it does now.
|
2020-09-17 13:44:47 +01:00
|
|
|
pub fn state_get(
|
|
|
|
&self,
|
2020-12-19 15:00:11 +00:00
|
|
|
room_id: &RoomId,
|
2020-09-17 13:44:47 +01:00
|
|
|
state_hash: &StateHashId,
|
|
|
|
event_type: &EventType,
|
|
|
|
state_key: &str,
|
2020-10-27 19:25:43 +00:00
|
|
|
) -> Result<Option<(IVec, PduEvent)>> {
|
2020-12-19 15:00:11 +00:00
|
|
|
let mut key = event_type.to_string().as_bytes().to_vec();
|
2020-09-17 13:44:47 +01:00
|
|
|
key.push(0xff);
|
|
|
|
key.extend_from_slice(&state_key.as_bytes());
|
|
|
|
|
2020-12-19 15:00:11 +00:00
|
|
|
let short = self.statekey_short.get(&key)?;
|
|
|
|
|
|
|
|
if let Some(short) = short {
|
|
|
|
let mut stateid = state_hash.to_vec();
|
|
|
|
stateid.push(0xff);
|
|
|
|
stateid.extend_from_slice(&short);
|
|
|
|
|
|
|
|
self.stateid_pduid
|
|
|
|
.get(&stateid)?
|
|
|
|
.map_or(Ok(None), |pdu_id_short| {
|
|
|
|
let mut pdu_id = room_id.as_bytes().to_vec();
|
|
|
|
pdu_id.push(0xff);
|
|
|
|
pdu_id.extend_from_slice(&pdu_id_short);
|
|
|
|
|
|
|
|
Ok::<_, Error>(Some((
|
|
|
|
pdu_id.clone().into(),
|
2021-01-30 02:45:33 +00:00
|
|
|
match self.pduid_pdu.get(&pdu_id)? {
|
|
|
|
Some(b) => serde_json::from_slice::<PduEvent>(&b)
|
|
|
|
.map_err(|_| Error::bad_database("Invalid PDU in db."))?,
|
|
|
|
None => self
|
2021-02-01 17:44:30 +00:00
|
|
|
.eventid_outlierpdu
|
2021-01-30 02:45:33 +00:00
|
|
|
.get(pdu_id)?
|
|
|
|
.map(|b| {
|
|
|
|
serde_json::from_slice::<PduEvent>(&b)
|
|
|
|
.map_err(|_| Error::bad_database("Invalid PDU in db."))
|
|
|
|
})
|
|
|
|
.ok_or_else(|| {
|
|
|
|
Error::bad_database("Event is not in pdu tree or outliers.")
|
|
|
|
})??,
|
|
|
|
},
|
2020-12-19 15:00:11 +00:00
|
|
|
)))
|
|
|
|
})
|
|
|
|
} else {
|
2020-12-31 13:40:49 +00:00
|
|
|
Ok(None)
|
2020-12-19 15:00:11 +00:00
|
|
|
}
|
2020-08-06 13:29:59 +01:00
|
|
|
}
|
|
|
|
|
2021-01-16 21:37:20 +00:00
|
|
|
/// Returns the state hash for this pdu.
|
2020-09-17 13:44:47 +01:00
|
|
|
pub fn pdu_state_hash(&self, pdu_id: &[u8]) -> Result<Option<StateHashId>> {
|
|
|
|
Ok(self.pduid_statehash.get(pdu_id)?)
|
|
|
|
}
|
2020-08-06 13:29:59 +01:00
|
|
|
|
2020-10-27 23:10:09 +00:00
|
|
|
/// Returns the last state hash key added to the db for the given room.
|
2020-09-17 13:44:47 +01:00
|
|
|
pub fn current_state_hash(&self, room_id: &RoomId) -> Result<Option<StateHashId>> {
|
|
|
|
Ok(self.roomid_statehash.get(room_id.as_bytes())?)
|
2020-08-06 13:29:59 +01:00
|
|
|
}
|
|
|
|
|
2020-09-14 19:23:19 +01:00
|
|
|
/// This fetches auth events from the current state.
|
2020-08-06 13:29:59 +01:00
|
|
|
pub fn get_auth_events(
|
|
|
|
&self,
|
|
|
|
room_id: &RoomId,
|
|
|
|
kind: &EventType,
|
|
|
|
sender: &UserId,
|
|
|
|
state_key: Option<&str>,
|
|
|
|
content: serde_json::Value,
|
|
|
|
) -> Result<StateMap<PduEvent>> {
|
|
|
|
let auth_events = state_res::auth_types_for_event(
|
2020-12-31 13:40:49 +00:00
|
|
|
kind,
|
2020-08-06 13:29:59 +01:00
|
|
|
sender,
|
|
|
|
state_key.map(|s| s.to_string()),
|
|
|
|
content,
|
|
|
|
);
|
|
|
|
|
|
|
|
let mut events = StateMap::new();
|
|
|
|
for (event_type, state_key) in auth_events {
|
2020-12-31 13:40:49 +00:00
|
|
|
if let Some((_, pdu)) = self.room_state_get(
|
|
|
|
room_id,
|
|
|
|
&event_type,
|
|
|
|
&state_key
|
|
|
|
.as_deref()
|
|
|
|
.expect("found a non state event in auth events"),
|
|
|
|
)? {
|
2020-09-17 13:44:47 +01:00
|
|
|
events.insert((event_type, state_key), pdu);
|
2020-08-06 13:29:59 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
Ok(events)
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Generate a new StateHash.
|
|
|
|
///
|
2020-09-12 20:30:07 +01:00
|
|
|
/// A unique hash made from hashing all PDU ids of the state joined with 0xff.
|
|
|
|
fn calculate_hash(&self, pdu_id_bytes: &[&[u8]]) -> Result<StateHashId> {
|
|
|
|
// We only hash the pdu's event ids, not the whole pdu
|
|
|
|
let bytes = pdu_id_bytes.join(&0xff);
|
|
|
|
let hash = digest::digest(&digest::SHA256, &bytes);
|
2020-09-17 13:44:47 +01:00
|
|
|
Ok(hash.as_ref().into())
|
2020-08-06 13:29:59 +01:00
|
|
|
}
|
|
|
|
|
2020-05-03 16:25:31 +01:00
|
|
|
/// Checks if a room exists.
|
|
|
|
pub fn exists(&self, room_id: &RoomId) -> Result<bool> {
|
2020-08-26 16:15:52 +01:00
|
|
|
let mut prefix = room_id.as_bytes().to_vec();
|
2020-05-03 16:25:31 +01:00
|
|
|
prefix.push(0xff);
|
|
|
|
|
2020-05-08 20:13:52 +01:00
|
|
|
// Look for PDUs in that room.
|
2020-05-03 16:25:31 +01:00
|
|
|
Ok(self
|
|
|
|
.pduid_pdu
|
|
|
|
.get_gt(&prefix)?
|
|
|
|
.filter(|(k, _)| k.starts_with(&prefix))
|
|
|
|
.is_some())
|
|
|
|
}
|
|
|
|
|
2020-10-27 23:10:09 +00:00
|
|
|
/// Force the creation of a new StateHash and insert it into the db.
|
2020-11-15 21:48:43 +00:00
|
|
|
///
|
|
|
|
/// Whatever `state` is supplied to `force_state` __is__ the current room state snapshot.
|
2020-09-13 21:24:36 +01:00
|
|
|
pub fn force_state(
|
|
|
|
&self,
|
|
|
|
room_id: &RoomId,
|
|
|
|
state: HashMap<(EventType, String), Vec<u8>>,
|
2020-12-19 15:00:11 +00:00
|
|
|
globals: &super::globals::Globals,
|
2020-09-13 21:24:36 +01:00
|
|
|
) -> Result<()> {
|
|
|
|
let state_hash =
|
|
|
|
self.calculate_hash(&state.values().map(|pdu_id| &**pdu_id).collect::<Vec<_>>())?;
|
2020-09-17 13:44:47 +01:00
|
|
|
let mut prefix = state_hash.to_vec();
|
2020-09-13 21:24:36 +01:00
|
|
|
prefix.push(0xff);
|
|
|
|
|
|
|
|
for ((event_type, state_key), pdu_id) in state {
|
2020-12-19 15:00:11 +00:00
|
|
|
let mut statekey = event_type.as_ref().as_bytes().to_vec();
|
|
|
|
statekey.push(0xff);
|
|
|
|
statekey.extend_from_slice(&state_key.as_bytes());
|
|
|
|
|
|
|
|
let short = match self.statekey_short.get(&statekey)? {
|
|
|
|
Some(short) => utils::u64_from_bytes(&short)
|
|
|
|
.map_err(|_| Error::bad_database("Invalid short bytes in statekey_short."))?,
|
|
|
|
None => {
|
|
|
|
let short = globals.next_count()?;
|
|
|
|
self.statekey_short
|
|
|
|
.insert(&statekey, &short.to_be_bytes())?;
|
|
|
|
short
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2021-02-01 17:44:30 +00:00
|
|
|
// Because of outliers this could also be an eventID but that
|
|
|
|
// is handled by `state_full`
|
2020-12-19 15:00:11 +00:00
|
|
|
let pdu_id_short = pdu_id
|
|
|
|
.splitn(2, |&b| b == 0xff)
|
|
|
|
.nth(1)
|
|
|
|
.ok_or_else(|| Error::bad_database("Invalid pduid in state."))?;
|
|
|
|
|
2020-09-13 21:24:36 +01:00
|
|
|
let mut state_id = prefix.clone();
|
2020-12-19 15:00:11 +00:00
|
|
|
state_id.extend_from_slice(&short.to_be_bytes());
|
|
|
|
self.stateid_pduid.insert(state_id, pdu_id_short)?;
|
2020-09-13 21:24:36 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
self.roomid_statehash
|
|
|
|
.insert(room_id.as_bytes(), &*state_hash)?;
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2020-05-03 16:25:31 +01:00
|
|
|
/// Returns the full room state.
|
2020-12-31 13:40:49 +00:00
|
|
|
pub fn room_state_full(
|
|
|
|
&self,
|
|
|
|
room_id: &RoomId,
|
|
|
|
) -> Result<BTreeMap<(EventType, String), PduEvent>> {
|
2020-09-12 20:30:07 +01:00
|
|
|
if let Some(current_state_hash) = self.current_state_hash(room_id)? {
|
2020-12-19 15:00:11 +00:00
|
|
|
self.state_full(&room_id, ¤t_state_hash)
|
2020-09-12 20:30:07 +01:00
|
|
|
} else {
|
2020-09-17 13:44:47 +01:00
|
|
|
Ok(BTreeMap::new())
|
2020-05-03 16:25:31 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-08-26 16:15:52 +01:00
|
|
|
/// Returns a single PDU from `room_id` with key (`event_type`, `state_key`).
|
2020-06-12 12:18:25 +01:00
|
|
|
pub fn room_state_get(
|
|
|
|
&self,
|
|
|
|
room_id: &RoomId,
|
|
|
|
event_type: &EventType,
|
|
|
|
state_key: &str,
|
2020-10-27 19:25:43 +00:00
|
|
|
) -> Result<Option<(IVec, PduEvent)>> {
|
2020-09-12 20:30:07 +01:00
|
|
|
if let Some(current_state_hash) = self.current_state_hash(room_id)? {
|
2020-12-19 15:00:11 +00:00
|
|
|
self.state_get(&room_id, ¤t_state_hash, event_type, state_key)
|
2020-09-12 20:30:07 +01:00
|
|
|
} else {
|
|
|
|
Ok(None)
|
|
|
|
}
|
2020-06-12 12:18:25 +01:00
|
|
|
}
|
|
|
|
|
2020-09-16 20:11:38 +01:00
|
|
|
/// Returns the `count` of this pdu's id.
|
|
|
|
pub fn pdu_count(&self, pdu_id: &[u8]) -> Result<u64> {
|
|
|
|
Ok(
|
|
|
|
utils::u64_from_bytes(&pdu_id[pdu_id.len() - mem::size_of::<u64>()..pdu_id.len()])
|
|
|
|
.map_err(|_| Error::bad_database("PDU has invalid count bytes."))?,
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
2020-05-03 16:25:31 +01:00
|
|
|
/// Returns the `count` of this pdu's id.
|
|
|
|
pub fn get_pdu_count(&self, event_id: &EventId) -> Result<Option<u64>> {
|
2020-06-09 14:13:17 +01:00
|
|
|
self.eventid_pduid
|
2020-08-06 13:29:59 +01:00
|
|
|
.get(event_id.as_bytes())?
|
2020-09-16 20:11:38 +01:00
|
|
|
.map_or(Ok(None), |pdu_id| self.pdu_count(&pdu_id).map(Some))
|
2020-05-03 16:25:31 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
/// Returns the json of a pdu.
|
|
|
|
pub fn get_pdu_json(&self, event_id: &EventId) -> Result<Option<serde_json::Value>> {
|
|
|
|
self.eventid_pduid
|
2020-08-06 13:29:59 +01:00
|
|
|
.get(event_id.as_bytes())?
|
2020-05-03 16:25:31 +01:00
|
|
|
.map_or(Ok(None), |pdu_id| {
|
2020-06-09 14:13:17 +01:00
|
|
|
Ok(Some(
|
2021-01-30 02:45:33 +00:00
|
|
|
serde_json::from_slice(&match self.pduid_pdu.get(&pdu_id)? {
|
|
|
|
Some(b) => b,
|
2021-02-01 17:44:30 +00:00
|
|
|
None => self
|
|
|
|
.eventid_outlierpdu
|
|
|
|
.get(event_id.as_bytes())?
|
|
|
|
.ok_or_else(|| {
|
|
|
|
Error::bad_database("Event is not in pdu tree or outliers.")
|
|
|
|
})?,
|
2021-01-30 02:45:33 +00:00
|
|
|
})
|
2020-06-11 09:03:08 +01:00
|
|
|
.map_err(|_| Error::bad_database("Invalid PDU in db."))?,
|
2020-06-09 14:13:17 +01:00
|
|
|
))
|
2020-05-03 16:25:31 +01:00
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2020-05-26 09:27:51 +01:00
|
|
|
/// Returns the pdu's id.
|
|
|
|
pub fn get_pdu_id(&self, event_id: &EventId) -> Result<Option<IVec>> {
|
|
|
|
self.eventid_pduid
|
|
|
|
.get(event_id.to_string().as_bytes())?
|
|
|
|
.map_or(Ok(None), |pdu_id| Ok(Some(pdu_id)))
|
|
|
|
}
|
|
|
|
|
2020-05-24 17:25:52 +01:00
|
|
|
/// Returns the pdu.
|
2021-02-01 17:44:30 +00:00
|
|
|
///
|
|
|
|
/// Checks the `eventid_outlierpdu` Tree if not found in the timeline.
|
2020-05-24 17:25:52 +01:00
|
|
|
pub fn get_pdu(&self, event_id: &EventId) -> Result<Option<PduEvent>> {
|
|
|
|
self.eventid_pduid
|
2020-08-06 13:29:59 +01:00
|
|
|
.get(event_id.as_bytes())?
|
2020-05-24 17:25:52 +01:00
|
|
|
.map_or(Ok(None), |pdu_id| {
|
2020-06-09 14:13:17 +01:00
|
|
|
Ok(Some(
|
2021-01-30 02:45:33 +00:00
|
|
|
serde_json::from_slice(&match self.pduid_pdu.get(&pdu_id)? {
|
|
|
|
Some(b) => b,
|
2021-02-01 17:44:30 +00:00
|
|
|
None => self
|
|
|
|
.eventid_outlierpdu
|
|
|
|
.get(event_id.as_bytes())?
|
|
|
|
.ok_or_else(|| {
|
|
|
|
Error::bad_database("Event is not in pdu tree or outliers.")
|
|
|
|
})?,
|
2021-01-30 02:45:33 +00:00
|
|
|
})
|
2020-06-11 09:03:08 +01:00
|
|
|
.map_err(|_| Error::bad_database("Invalid PDU in db."))?,
|
2020-06-09 14:13:17 +01:00
|
|
|
))
|
2020-05-24 17:25:52 +01:00
|
|
|
})
|
|
|
|
}
|
2021-01-30 02:45:33 +00:00
|
|
|
|
2020-05-26 09:27:51 +01:00
|
|
|
/// Returns the pdu.
|
|
|
|
pub fn get_pdu_from_id(&self, pdu_id: &IVec) -> Result<Option<PduEvent>> {
|
2020-06-09 14:13:17 +01:00
|
|
|
self.pduid_pdu.get(pdu_id)?.map_or(Ok(None), |pdu| {
|
|
|
|
Ok(Some(
|
|
|
|
serde_json::from_slice(&pdu)
|
2020-06-11 09:03:08 +01:00
|
|
|
.map_err(|_| Error::bad_database("Invalid PDU in db."))?,
|
2020-06-09 14:13:17 +01:00
|
|
|
))
|
|
|
|
})
|
2020-05-26 09:27:51 +01:00
|
|
|
}
|
|
|
|
|
2020-11-30 19:46:47 +00:00
|
|
|
/// Returns the pdu as a `BTreeMap<String, CanonicalJsonValue>`.
|
|
|
|
pub fn get_pdu_json_from_id(&self, pdu_id: &[u8]) -> Result<Option<CanonicalJsonObject>> {
|
2020-09-15 15:13:54 +01:00
|
|
|
self.pduid_pdu.get(pdu_id)?.map_or(Ok(None), |pdu| {
|
|
|
|
Ok(Some(
|
|
|
|
serde_json::from_slice(&pdu)
|
|
|
|
.map_err(|_| Error::bad_database("Invalid PDU in db."))?,
|
|
|
|
))
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2020-06-09 14:13:17 +01:00
|
|
|
/// Removes a pdu and creates a new one with the same id.
|
|
|
|
fn replace_pdu(&self, pdu_id: &IVec, pdu: &PduEvent) -> Result<()> {
|
2020-05-26 09:27:51 +01:00
|
|
|
if self.pduid_pdu.get(&pdu_id)?.is_some() {
|
2020-06-09 14:13:17 +01:00
|
|
|
self.pduid_pdu.insert(
|
|
|
|
&pdu_id,
|
|
|
|
&*serde_json::to_string(pdu).expect("PduEvent::to_string always works"),
|
|
|
|
)?;
|
2020-05-26 09:27:51 +01:00
|
|
|
Ok(())
|
|
|
|
} else {
|
2020-06-09 14:13:17 +01:00
|
|
|
Err(Error::BadRequest(
|
|
|
|
ErrorKind::NotFound,
|
|
|
|
"PDU does not exist.",
|
|
|
|
))
|
2020-05-26 09:27:51 +01:00
|
|
|
}
|
|
|
|
}
|
2020-05-24 17:25:52 +01:00
|
|
|
|
2020-05-03 16:25:31 +01:00
|
|
|
/// Returns the leaf pdus of a room.
|
|
|
|
pub fn get_pdu_leaves(&self, room_id: &RoomId) -> Result<Vec<EventId>> {
|
2020-08-26 16:15:52 +01:00
|
|
|
let mut prefix = room_id.as_bytes().to_vec();
|
2020-05-03 16:25:31 +01:00
|
|
|
prefix.push(0xff);
|
|
|
|
|
|
|
|
let mut events = Vec::new();
|
|
|
|
|
|
|
|
for event in self
|
|
|
|
.roomid_pduleaves
|
|
|
|
.scan_prefix(prefix)
|
|
|
|
.values()
|
2020-06-09 14:13:17 +01:00
|
|
|
.map(|bytes| {
|
|
|
|
Ok::<_, Error>(
|
2020-06-11 09:03:08 +01:00
|
|
|
EventId::try_from(utils::string_from_bytes(&bytes?).map_err(|_| {
|
|
|
|
Error::bad_database("EventID in roomid_pduleaves is invalid unicode.")
|
|
|
|
})?)
|
|
|
|
.map_err(|_| Error::bad_database("EventId in roomid_pduleaves is invalid."))?,
|
2020-06-09 14:13:17 +01:00
|
|
|
)
|
|
|
|
})
|
2020-05-03 16:25:31 +01:00
|
|
|
{
|
|
|
|
events.push(event?);
|
|
|
|
}
|
|
|
|
|
|
|
|
Ok(events)
|
|
|
|
}
|
|
|
|
|
2021-01-19 00:41:38 +00:00
|
|
|
/// Replace the leaves of a room.
|
|
|
|
///
|
|
|
|
/// The provided `event_ids` become the new leaves, this enables an event having multiple
|
|
|
|
/// `prev_events`.
|
|
|
|
pub fn replace_pdu_leaves(&self, room_id: &RoomId, event_ids: &[EventId]) -> Result<()> {
|
2021-01-19 00:08:59 +00:00
|
|
|
let mut prefix = room_id.as_bytes().to_vec();
|
|
|
|
prefix.push(0xff);
|
|
|
|
|
|
|
|
for key in self.roomid_pduleaves.scan_prefix(&prefix).keys() {
|
|
|
|
self.roomid_pduleaves.remove(key?)?;
|
|
|
|
}
|
|
|
|
|
|
|
|
for event_id in event_ids.iter() {
|
|
|
|
let mut key = prefix.to_owned();
|
|
|
|
key.extend_from_slice(event_id.as_bytes());
|
|
|
|
self.roomid_pduleaves.insert(&key, event_id.as_bytes())?;
|
|
|
|
}
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2021-01-15 02:32:22 +00:00
|
|
|
/// Returns the pdu from the outlier tree.
|
|
|
|
pub fn get_pdu_outlier(&self, event_id: &EventId) -> Result<Option<PduEvent>> {
|
2021-02-01 17:44:30 +00:00
|
|
|
self.eventid_outlierpdu
|
|
|
|
.get(event_id.as_bytes())?
|
|
|
|
.map_or(Ok(None), |pdu| {
|
2021-01-30 02:45:33 +00:00
|
|
|
serde_json::from_slice(&pdu).map_err(|_| Error::bad_database("Invalid PDU in db."))
|
2021-01-15 02:32:22 +00:00
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Returns true if the event_id was previously inserted.
|
2021-02-01 17:44:30 +00:00
|
|
|
pub fn append_pdu_outlier(&self, pdu: &PduEvent) -> Result<bool> {
|
|
|
|
log::info!(
|
|
|
|
"Number of outlier pdu's {:#?}",
|
|
|
|
self.eventid_outlierpdu
|
|
|
|
.iter()
|
|
|
|
.map(|pair| {
|
|
|
|
let (_k, v) = pair.unwrap();
|
|
|
|
serde_json::from_slice::<PduBuilder>(&v).unwrap()
|
|
|
|
})
|
|
|
|
.collect::<Vec<_>>()
|
|
|
|
);
|
2021-01-30 02:45:33 +00:00
|
|
|
|
2021-02-01 17:44:30 +00:00
|
|
|
let mut key = pdu.room_id().as_bytes().to_vec();
|
|
|
|
key.push(0xff);
|
|
|
|
key.extend_from_slice(pdu.event_id().as_bytes());
|
2021-01-30 02:45:33 +00:00
|
|
|
|
2021-01-15 02:32:22 +00:00
|
|
|
let res = self
|
2021-02-01 17:44:30 +00:00
|
|
|
.eventid_outlierpdu
|
2021-01-15 02:32:22 +00:00
|
|
|
.insert(
|
2021-02-01 17:44:30 +00:00
|
|
|
&key,
|
2021-01-15 02:32:22 +00:00
|
|
|
&*serde_json::to_string(&pdu).expect("PduEvent is always a valid String"),
|
|
|
|
)
|
|
|
|
.map(|op| op.is_some())?;
|
|
|
|
Ok(res)
|
|
|
|
}
|
|
|
|
|
2020-05-03 16:25:31 +01:00
|
|
|
/// Creates a new persisted data unit and adds it to a room.
|
2020-11-08 19:44:02 +00:00
|
|
|
///
|
|
|
|
/// By this point the incoming event should be fully authenticated, no auth happens
|
|
|
|
/// in `append_pdu`.
|
2020-05-03 16:25:31 +01:00
|
|
|
pub fn append_pdu(
|
2020-08-18 21:26:03 +01:00
|
|
|
&self,
|
2020-09-13 21:24:36 +01:00
|
|
|
pdu: &PduEvent,
|
2020-12-22 19:08:20 +00:00
|
|
|
mut pdu_json: CanonicalJsonObject,
|
2020-10-18 07:56:21 +01:00
|
|
|
count: u64,
|
|
|
|
pdu_id: IVec,
|
2021-01-19 00:41:38 +00:00
|
|
|
leaves: &[EventId],
|
2021-01-15 16:05:57 +00:00
|
|
|
db: &Database,
|
2020-10-18 07:56:21 +01:00
|
|
|
) -> Result<()> {
|
2020-12-22 19:08:20 +00:00
|
|
|
// Make unsigned fields correct. This is not properly documented in the spec, but state
|
|
|
|
// events need to have previous content in the unsigned field, so clients can easily
|
|
|
|
// interpret things like membership changes
|
|
|
|
if let Some(state_key) = &pdu.state_key {
|
|
|
|
if let CanonicalJsonValue::Object(unsigned) = pdu_json
|
|
|
|
.entry("unsigned".to_owned())
|
|
|
|
.or_insert_with(|| CanonicalJsonValue::Object(Default::default()))
|
|
|
|
{
|
|
|
|
if let Some(prev_state_hash) = self.pdu_state_hash(&pdu_id).unwrap() {
|
|
|
|
if let Some(prev_state) = self
|
|
|
|
.state_get(&pdu.room_id, &prev_state_hash, &pdu.kind, &state_key)
|
|
|
|
.unwrap()
|
|
|
|
{
|
|
|
|
unsigned.insert(
|
|
|
|
"prev_content".to_owned(),
|
|
|
|
CanonicalJsonValue::Object(
|
|
|
|
utils::to_canonical_object(prev_state.1.content)
|
|
|
|
.expect("event is valid, we just created it"),
|
|
|
|
),
|
|
|
|
);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
error!("Invalid unsigned type in pdu.");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-01-15 02:32:22 +00:00
|
|
|
// We no longer keep this pdu as an outlier
|
2021-02-01 17:44:30 +00:00
|
|
|
let mut key = pdu.room_id().as_bytes().to_vec();
|
|
|
|
key.push(0xff);
|
|
|
|
key.extend_from_slice(pdu.event_id().as_bytes());
|
|
|
|
self.eventid_outlierpdu.remove(key)?;
|
2021-01-15 02:32:22 +00:00
|
|
|
|
2021-01-19 00:41:38 +00:00
|
|
|
self.replace_pdu_leaves(&pdu.room_id, leaves)?;
|
2020-08-18 21:26:03 +01:00
|
|
|
|
2020-09-17 18:58:19 +01:00
|
|
|
// Mark as read first so the sending client doesn't get a notification even if appending
|
|
|
|
// fails
|
|
|
|
self.edus
|
2021-01-15 16:05:57 +00:00
|
|
|
.private_read_set(&pdu.room_id, &pdu.sender, count, &db.globals)?;
|
2020-08-18 21:26:03 +01:00
|
|
|
|
2020-11-30 17:10:33 +00:00
|
|
|
self.pduid_pdu.insert(
|
|
|
|
&pdu_id,
|
2020-12-22 19:08:20 +00:00
|
|
|
&*serde_json::to_string(&pdu_json)
|
2020-11-30 17:10:33 +00:00
|
|
|
.expect("CanonicalJsonObject is always a valid String"),
|
|
|
|
)?;
|
2020-08-18 21:26:03 +01:00
|
|
|
|
|
|
|
self.eventid_pduid
|
|
|
|
.insert(pdu.event_id.as_bytes(), &*pdu_id)?;
|
|
|
|
|
2020-09-12 21:13:53 +01:00
|
|
|
match pdu.kind {
|
2020-08-18 21:26:03 +01:00
|
|
|
EventType::RoomRedaction => {
|
|
|
|
if let Some(redact_id) = &pdu.redacts {
|
2020-09-12 21:13:53 +01:00
|
|
|
self.redact_pdu(&redact_id, &pdu)?;
|
2020-08-18 21:26:03 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
EventType::RoomMember => {
|
2020-09-13 21:24:36 +01:00
|
|
|
if let Some(state_key) = &pdu.state_key {
|
2020-08-18 21:26:03 +01:00
|
|
|
// if the state_key fails
|
2020-09-13 21:24:36 +01:00
|
|
|
let target_user_id = UserId::try_from(state_key.clone())
|
2020-08-18 21:26:03 +01:00
|
|
|
.expect("This state_key was previously validated");
|
|
|
|
// Update our membership info, we do this here incase a user is invited
|
|
|
|
// and immediately leaves we need the DB to record the invite event for auth
|
|
|
|
self.update_membership(
|
|
|
|
&pdu.room_id,
|
|
|
|
&target_user_id,
|
2020-09-13 21:24:36 +01:00
|
|
|
serde_json::from_value::<member::MemberEventContent>(pdu.content.clone())
|
|
|
|
.map_err(|_| {
|
|
|
|
Error::BadRequest(
|
|
|
|
ErrorKind::InvalidParam,
|
2020-10-17 10:24:57 +01:00
|
|
|
"Invalid member event content.",
|
2020-09-13 21:24:36 +01:00
|
|
|
)
|
|
|
|
})?,
|
2020-08-18 21:26:03 +01:00
|
|
|
&pdu.sender,
|
2021-01-15 16:05:57 +00:00
|
|
|
&db.account_data,
|
|
|
|
&db.globals,
|
2020-08-18 21:26:03 +01:00
|
|
|
)?;
|
|
|
|
}
|
|
|
|
}
|
2020-08-19 17:26:39 +01:00
|
|
|
EventType::RoomMessage => {
|
|
|
|
if let Some(body) = pdu.content.get("body").and_then(|b| b.as_str()) {
|
|
|
|
for word in body
|
|
|
|
.split_terminator(|c: char| !c.is_alphanumeric())
|
|
|
|
.map(str::to_lowercase)
|
|
|
|
{
|
2020-09-12 21:13:53 +01:00
|
|
|
let mut key = pdu.room_id.to_string().as_bytes().to_vec();
|
2020-08-19 17:26:39 +01:00
|
|
|
key.push(0xff);
|
|
|
|
key.extend_from_slice(word.as_bytes());
|
|
|
|
key.push(0xff);
|
|
|
|
key.extend_from_slice(&pdu_id);
|
|
|
|
self.tokenids.insert(key, &[])?;
|
|
|
|
}
|
2020-10-05 21:19:22 +01:00
|
|
|
|
2021-01-15 16:05:57 +00:00
|
|
|
if body.starts_with(&format!("@conduit:{}: ", db.globals.server_name()))
|
2020-10-05 21:19:22 +01:00
|
|
|
&& self
|
|
|
|
.id_from_alias(
|
2021-01-15 16:05:57 +00:00
|
|
|
&format!("#admins:{}", db.globals.server_name())
|
2020-10-05 21:19:22 +01:00
|
|
|
.try_into()
|
|
|
|
.expect("#admins:server_name is a valid room alias"),
|
|
|
|
)?
|
|
|
|
.as_ref()
|
|
|
|
== Some(&pdu.room_id)
|
|
|
|
{
|
2020-12-08 09:33:44 +00:00
|
|
|
let mut lines = body.lines();
|
|
|
|
let command_line = lines.next().expect("each string has at least one line");
|
|
|
|
let body = lines.collect::<Vec<_>>();
|
|
|
|
|
|
|
|
let mut parts = command_line.split_whitespace().skip(1);
|
2020-10-05 21:19:22 +01:00
|
|
|
if let Some(command) = parts.next() {
|
|
|
|
let args = parts.collect::<Vec<_>>();
|
|
|
|
|
2020-12-08 09:33:44 +00:00
|
|
|
match command {
|
|
|
|
"register_appservice" => {
|
|
|
|
if body.len() > 2
|
|
|
|
&& body[0].trim() == "```"
|
|
|
|
&& body.last().unwrap().trim() == "```"
|
|
|
|
{
|
|
|
|
let appservice_config = body[1..body.len() - 1].join("\n");
|
|
|
|
let parsed_config = serde_yaml::from_str::<serde_yaml::Value>(
|
|
|
|
&appservice_config,
|
|
|
|
);
|
|
|
|
match parsed_config {
|
|
|
|
Ok(yaml) => {
|
2021-01-15 16:05:57 +00:00
|
|
|
db.admin
|
|
|
|
.send(AdminCommand::RegisterAppservice(yaml));
|
2020-12-08 09:33:44 +00:00
|
|
|
}
|
|
|
|
Err(e) => {
|
2021-01-15 16:05:57 +00:00
|
|
|
db.admin.send(AdminCommand::SendMessage(
|
2020-12-08 09:33:44 +00:00
|
|
|
message::MessageEventContent::text_plain(
|
|
|
|
format!(
|
|
|
|
"Could not parse appservice config: {}",
|
|
|
|
e
|
|
|
|
),
|
|
|
|
),
|
|
|
|
));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
} else {
|
2021-01-15 16:05:57 +00:00
|
|
|
db.admin.send(AdminCommand::SendMessage(
|
2020-12-08 09:33:44 +00:00
|
|
|
message::MessageEventContent::text_plain(
|
|
|
|
"Expected code block in command body.",
|
|
|
|
),
|
|
|
|
));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
"list_appservices" => {
|
2021-01-15 16:05:57 +00:00
|
|
|
db.admin.send(AdminCommand::ListAppservices);
|
2020-12-08 09:33:44 +00:00
|
|
|
}
|
|
|
|
_ => {
|
2021-01-15 16:05:57 +00:00
|
|
|
db.admin.send(AdminCommand::SendMessage(
|
2020-12-08 09:33:44 +00:00
|
|
|
message::MessageEventContent::text_plain(format!(
|
|
|
|
"Command: {}, Args: {:?}",
|
|
|
|
command, args
|
|
|
|
)),
|
|
|
|
));
|
|
|
|
}
|
|
|
|
}
|
2020-10-05 21:19:22 +01:00
|
|
|
}
|
|
|
|
}
|
2020-08-19 17:26:39 +01:00
|
|
|
}
|
|
|
|
}
|
2020-08-18 21:26:03 +01:00
|
|
|
_ => {}
|
|
|
|
}
|
2020-09-12 21:13:53 +01:00
|
|
|
|
2020-10-18 07:56:21 +01:00
|
|
|
Ok(())
|
2020-08-18 21:26:03 +01:00
|
|
|
}
|
|
|
|
|
2020-08-19 22:27:24 +01:00
|
|
|
/// Generates a new StateHash and associates it with the incoming event.
|
|
|
|
///
|
|
|
|
/// This adds all current state events (not including the incoming event)
|
|
|
|
/// to `stateid_pduid` and adds the incoming event to `pduid_statehash`.
|
|
|
|
/// The incoming event is the `pdu_id` passed to this method.
|
2020-12-19 15:00:11 +00:00
|
|
|
pub fn append_to_state(
|
|
|
|
&self,
|
|
|
|
new_pdu_id: &[u8],
|
|
|
|
new_pdu: &PduEvent,
|
|
|
|
globals: &super::globals::Globals,
|
|
|
|
) -> Result<StateHashId> {
|
2020-09-12 20:30:07 +01:00
|
|
|
let old_state =
|
|
|
|
if let Some(old_state_hash) = self.roomid_statehash.get(new_pdu.room_id.as_bytes())? {
|
|
|
|
// Store state for event. The state does not include the event itself.
|
|
|
|
// Instead it's the state before the pdu, so the room's old state.
|
2020-09-17 21:41:43 +01:00
|
|
|
self.pduid_statehash.insert(new_pdu_id, &old_state_hash)?;
|
2020-09-12 20:30:07 +01:00
|
|
|
if new_pdu.state_key.is_none() {
|
2020-09-17 13:44:47 +01:00
|
|
|
return Ok(old_state_hash);
|
2020-09-12 20:30:07 +01:00
|
|
|
}
|
2020-08-19 22:27:24 +01:00
|
|
|
|
2020-09-12 20:30:07 +01:00
|
|
|
let mut prefix = old_state_hash.to_vec();
|
|
|
|
prefix.push(0xff);
|
|
|
|
self.stateid_pduid
|
|
|
|
.scan_prefix(&prefix)
|
|
|
|
.filter_map(|pdu| pdu.map_err(|e| error!("{}", e)).ok())
|
2020-12-19 15:00:11 +00:00
|
|
|
// Chop the old state_hash out leaving behind the short key (u64)
|
2020-09-12 20:30:07 +01:00
|
|
|
.map(|(k, v)| (k.subslice(prefix.len(), k.len() - prefix.len()), v))
|
|
|
|
.collect::<HashMap<IVec, IVec>>()
|
|
|
|
} else {
|
|
|
|
HashMap::new()
|
|
|
|
};
|
|
|
|
|
|
|
|
if let Some(state_key) = &new_pdu.state_key {
|
|
|
|
let mut new_state = old_state;
|
2020-12-04 23:16:17 +00:00
|
|
|
let mut pdu_key = new_pdu.kind.as_ref().as_bytes().to_vec();
|
2020-09-12 20:30:07 +01:00
|
|
|
pdu_key.push(0xff);
|
|
|
|
pdu_key.extend_from_slice(state_key.as_bytes());
|
2020-12-19 15:00:11 +00:00
|
|
|
|
|
|
|
let short = match self.statekey_short.get(&pdu_key)? {
|
|
|
|
Some(short) => utils::u64_from_bytes(&short)
|
|
|
|
.map_err(|_| Error::bad_database("Invalid short bytes in statekey_short."))?,
|
|
|
|
None => {
|
|
|
|
let short = globals.next_count()?;
|
|
|
|
self.statekey_short.insert(&pdu_key, &short.to_be_bytes())?;
|
|
|
|
short
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
let new_pdu_id_short = new_pdu_id
|
|
|
|
.splitn(2, |&b| b == 0xff)
|
|
|
|
.nth(1)
|
|
|
|
.ok_or_else(|| Error::bad_database("Invalid pduid in state."))?;
|
|
|
|
|
|
|
|
new_state.insert((&short.to_be_bytes()).into(), new_pdu_id_short.into());
|
2020-09-12 20:30:07 +01:00
|
|
|
|
|
|
|
let new_state_hash =
|
|
|
|
self.calculate_hash(&new_state.values().map(|b| &**b).collect::<Vec<_>>())?;
|
2020-08-19 22:27:24 +01:00
|
|
|
|
2020-09-12 20:30:07 +01:00
|
|
|
let mut key = new_state_hash.to_vec();
|
|
|
|
key.push(0xff);
|
2020-08-19 22:27:24 +01:00
|
|
|
|
2020-12-19 15:00:11 +00:00
|
|
|
for (short, short_pdu_id) in new_state {
|
2020-09-12 20:30:07 +01:00
|
|
|
let mut state_id = key.clone();
|
2020-12-19 15:00:11 +00:00
|
|
|
state_id.extend_from_slice(&short);
|
|
|
|
self.stateid_pduid.insert(&state_id, &short_pdu_id)?;
|
2020-09-12 20:30:07 +01:00
|
|
|
}
|
2020-08-19 22:27:24 +01:00
|
|
|
|
2020-09-12 20:30:07 +01:00
|
|
|
Ok(new_state_hash)
|
|
|
|
} else {
|
|
|
|
Err(Error::bad_database(
|
|
|
|
"Tried to insert non-state event into room without a state.",
|
|
|
|
))
|
|
|
|
}
|
2020-08-19 22:27:24 +01:00
|
|
|
}
|
|
|
|
|
2020-12-31 13:52:08 +00:00
|
|
|
pub fn set_room_state(&self, room_id: &RoomId, state_hash: &StateHashId) -> Result<()> {
|
|
|
|
self.roomid_statehash
|
|
|
|
.insert(room_id.as_bytes(), state_hash)?;
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2020-08-18 21:26:03 +01:00
|
|
|
/// Creates a new persisted data unit and adds it to a room.
|
2020-10-05 21:19:22 +01:00
|
|
|
pub fn build_and_append_pdu(
|
2020-05-03 16:25:31 +01:00
|
|
|
&self,
|
2020-07-28 14:00:23 +01:00
|
|
|
pdu_builder: PduBuilder,
|
2020-09-12 20:30:07 +01:00
|
|
|
sender: &UserId,
|
|
|
|
room_id: &RoomId,
|
2021-01-15 16:05:57 +00:00
|
|
|
db: &Database,
|
2020-05-03 16:25:31 +01:00
|
|
|
) -> Result<EventId> {
|
2020-07-28 14:00:23 +01:00
|
|
|
let PduBuilder {
|
|
|
|
event_type,
|
|
|
|
content,
|
|
|
|
unsigned,
|
|
|
|
state_key,
|
|
|
|
redacts,
|
|
|
|
} = pdu_builder;
|
2020-05-24 17:25:52 +01:00
|
|
|
// TODO: Make sure this isn't called twice in parallel
|
|
|
|
let prev_events = self.get_pdu_leaves(&room_id)?;
|
|
|
|
|
2020-08-06 13:29:59 +01:00
|
|
|
let auth_events = self.get_auth_events(
|
|
|
|
&room_id,
|
|
|
|
&event_type,
|
|
|
|
&sender,
|
|
|
|
state_key.as_deref(),
|
|
|
|
content.clone(),
|
|
|
|
)?;
|
|
|
|
|
2020-05-03 16:25:31 +01:00
|
|
|
// Is the event authorized?
|
2020-05-24 17:25:52 +01:00
|
|
|
if let Some(state_key) = &state_key {
|
|
|
|
let power_levels = self
|
2020-06-12 12:18:25 +01:00
|
|
|
.room_state_get(&room_id, &EventType::RoomPowerLevels, "")?
|
2020-05-24 17:25:52 +01:00
|
|
|
.map_or_else(
|
|
|
|
|| {
|
|
|
|
Ok::<_, Error>(power_levels::PowerLevelsEventContent {
|
|
|
|
ban: 50.into(),
|
|
|
|
events: BTreeMap::new(),
|
|
|
|
events_default: 0.into(),
|
|
|
|
invite: 50.into(),
|
|
|
|
kick: 50.into(),
|
|
|
|
redact: 50.into(),
|
|
|
|
state_default: 0.into(),
|
|
|
|
users: BTreeMap::new(),
|
|
|
|
users_default: 0.into(),
|
|
|
|
notifications:
|
2020-06-05 17:19:26 +01:00
|
|
|
ruma::events::room::power_levels::NotificationPowerLevels {
|
2020-05-24 17:25:52 +01:00
|
|
|
room: 50.into(),
|
|
|
|
},
|
|
|
|
})
|
|
|
|
},
|
2020-10-27 19:25:43 +00:00
|
|
|
|(_, power_levels)| {
|
2020-07-26 14:41:28 +01:00
|
|
|
Ok(serde_json::from_value::<Raw<PowerLevelsEventContent>>(
|
2020-07-26 04:56:50 +01:00
|
|
|
power_levels.content,
|
2020-05-24 17:25:52 +01:00
|
|
|
)
|
2020-07-26 14:41:28 +01:00
|
|
|
.expect("Raw::from_value always works.")
|
|
|
|
.deserialize()
|
|
|
|
.map_err(|_| Error::bad_database("Invalid PowerLevels event in db."))?)
|
2020-05-24 17:25:52 +01:00
|
|
|
},
|
|
|
|
)?;
|
2020-06-09 14:13:17 +01:00
|
|
|
let sender_membership = self
|
2020-06-12 12:18:25 +01:00
|
|
|
.room_state_get(&room_id, &EventType::RoomMember, &sender.to_string())?
|
2020-10-27 19:25:43 +00:00
|
|
|
.map_or(
|
|
|
|
Ok::<_, Error>(member::MembershipState::Leave),
|
|
|
|
|(_, pdu)| {
|
|
|
|
Ok(
|
|
|
|
serde_json::from_value::<Raw<member::MemberEventContent>>(pdu.content)
|
|
|
|
.expect("Raw::from_value always works.")
|
|
|
|
.deserialize()
|
|
|
|
.map_err(|_| Error::bad_database("Invalid Member event in db."))?
|
|
|
|
.membership,
|
|
|
|
)
|
|
|
|
},
|
|
|
|
)?;
|
2020-06-09 14:13:17 +01:00
|
|
|
|
|
|
|
let sender_power = power_levels.users.get(&sender).map_or_else(
|
|
|
|
|| {
|
|
|
|
if sender_membership != member::MembershipState::Join {
|
|
|
|
None
|
|
|
|
} else {
|
|
|
|
Some(&power_levels.users_default)
|
|
|
|
}
|
|
|
|
},
|
|
|
|
// If it's okay, wrap with Some(_)
|
|
|
|
Some,
|
|
|
|
);
|
|
|
|
|
2020-07-26 19:41:10 +01:00
|
|
|
// Is the event allowed?
|
2020-08-18 21:26:03 +01:00
|
|
|
#[allow(clippy::blocks_in_if_conditions)]
|
2020-06-09 14:13:17 +01:00
|
|
|
if !match event_type {
|
2020-07-26 19:41:10 +01:00
|
|
|
EventType::RoomEncryption => {
|
2021-01-01 12:47:53 +00:00
|
|
|
// Only allow encryption events if it's allowed in the config
|
2021-01-15 16:05:57 +00:00
|
|
|
db.globals.allow_encryption()
|
2020-07-26 19:41:10 +01:00
|
|
|
}
|
2020-08-27 21:10:20 +01:00
|
|
|
EventType::RoomMember => {
|
|
|
|
let prev_event = self
|
2020-10-27 23:10:09 +00:00
|
|
|
.get_pdu(prev_events.get(0).ok_or(Error::BadRequest(
|
2020-08-27 21:10:20 +01:00
|
|
|
ErrorKind::Unknown,
|
|
|
|
"Membership can't be the first event",
|
|
|
|
))?)?
|
2020-12-31 13:40:49 +00:00
|
|
|
.map(Arc::new);
|
2020-08-27 21:10:20 +01:00
|
|
|
event_auth::valid_membership_change(
|
2021-01-03 22:26:17 +00:00
|
|
|
Some(state_key.as_str()),
|
|
|
|
&sender,
|
|
|
|
content.clone(),
|
2020-09-12 20:30:07 +01:00
|
|
|
prev_event,
|
2020-10-27 23:10:09 +00:00
|
|
|
None, // TODO: third party invite
|
2020-08-27 21:10:20 +01:00
|
|
|
&auth_events
|
|
|
|
.iter()
|
|
|
|
.map(|((ty, key), pdu)| {
|
2020-12-31 13:40:49 +00:00
|
|
|
Ok(((ty.clone(), key.clone()), Arc::new(pdu.clone())))
|
2020-08-27 21:10:20 +01:00
|
|
|
})
|
|
|
|
.collect::<Result<StateMap<_>>>()?,
|
|
|
|
)
|
|
|
|
.map_err(|e| {
|
|
|
|
log::error!("{}", e);
|
|
|
|
Error::Conflict("Found incoming PDU with invalid data.")
|
|
|
|
})?
|
|
|
|
}
|
2020-06-09 14:13:17 +01:00
|
|
|
EventType::RoomCreate => prev_events.is_empty(),
|
|
|
|
// Not allow any of the following events if the sender is not joined.
|
|
|
|
_ if sender_membership != member::MembershipState::Join => false,
|
|
|
|
_ => {
|
|
|
|
// TODO
|
|
|
|
sender_power.unwrap_or(&power_levels.users_default)
|
|
|
|
>= &power_levels.state_default
|
2020-05-24 17:25:52 +01:00
|
|
|
}
|
2020-06-09 14:13:17 +01:00
|
|
|
} {
|
2020-08-06 13:29:59 +01:00
|
|
|
error!("Unauthorized {}", event_type);
|
2020-06-09 14:13:17 +01:00
|
|
|
// Not authorized
|
|
|
|
return Err(Error::BadRequest(
|
|
|
|
ErrorKind::Forbidden,
|
|
|
|
"Event is not authorized",
|
|
|
|
));
|
2020-05-03 16:25:31 +01:00
|
|
|
}
|
2020-05-24 17:25:52 +01:00
|
|
|
} else if !self.is_joined(&sender, &room_id)? {
|
2020-06-09 14:13:17 +01:00
|
|
|
// TODO: auth rules apply to all events, not only those with a state key
|
2020-08-06 13:29:59 +01:00
|
|
|
error!("Unauthorized {}", event_type);
|
2020-06-09 14:13:17 +01:00
|
|
|
return Err(Error::BadRequest(
|
|
|
|
ErrorKind::Forbidden,
|
|
|
|
"Event is not authorized",
|
|
|
|
));
|
2020-05-03 16:25:31 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
// Our depth is the maximum depth of prev_events + 1
|
|
|
|
let depth = prev_events
|
|
|
|
.iter()
|
|
|
|
.filter_map(|event_id| Some(self.get_pdu_json(event_id).ok()??.get("depth")?.as_u64()?))
|
|
|
|
.max()
|
|
|
|
.unwrap_or(0_u64)
|
|
|
|
+ 1;
|
|
|
|
|
|
|
|
let mut unsigned = unsigned.unwrap_or_default();
|
|
|
|
if let Some(state_key) = &state_key {
|
2020-10-27 19:25:43 +00:00
|
|
|
if let Some((_, prev_pdu)) = self.room_state_get(&room_id, &event_type, &state_key)? {
|
2020-07-26 04:56:50 +01:00
|
|
|
unsigned.insert("prev_content".to_owned(), prev_pdu.content);
|
2020-06-26 09:07:02 +01:00
|
|
|
unsigned.insert(
|
|
|
|
"prev_sender".to_owned(),
|
|
|
|
serde_json::to_value(prev_pdu.sender).expect("UserId::to_value always works"),
|
|
|
|
);
|
2020-05-03 16:25:31 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
let mut pdu = PduEvent {
|
2020-10-27 23:10:09 +00:00
|
|
|
event_id: ruma::event_id!("$thiswillbefilledinlater"),
|
2020-09-12 20:30:07 +01:00
|
|
|
room_id: room_id.clone(),
|
|
|
|
sender: sender.clone(),
|
2020-05-03 16:25:31 +01:00
|
|
|
origin_server_ts: utils::millis_since_unix_epoch()
|
|
|
|
.try_into()
|
2020-06-09 14:13:17 +01:00
|
|
|
.expect("time is valid"),
|
2020-08-18 21:26:03 +01:00
|
|
|
kind: event_type,
|
|
|
|
content,
|
|
|
|
state_key,
|
2020-05-03 16:25:31 +01:00
|
|
|
prev_events,
|
|
|
|
depth: depth
|
|
|
|
.try_into()
|
2020-06-11 09:03:08 +01:00
|
|
|
.map_err(|_| Error::bad_database("Depth is invalid"))?,
|
2020-08-06 13:29:59 +01:00
|
|
|
auth_events: auth_events
|
|
|
|
.into_iter()
|
|
|
|
.map(|(_, pdu)| pdu.event_id)
|
|
|
|
.collect(),
|
2020-08-18 21:26:03 +01:00
|
|
|
redacts,
|
2020-05-03 16:25:31 +01:00
|
|
|
unsigned,
|
2020-06-21 20:58:42 +01:00
|
|
|
hashes: ruma::events::pdu::EventHash {
|
2020-05-03 16:25:31 +01:00
|
|
|
sha256: "aaa".to_owned(),
|
|
|
|
},
|
2020-09-12 20:30:07 +01:00
|
|
|
signatures: BTreeMap::new(),
|
2020-05-03 16:25:31 +01:00
|
|
|
};
|
|
|
|
|
2020-09-14 19:23:19 +01:00
|
|
|
// Hash and sign
|
2020-11-18 13:36:12 +00:00
|
|
|
let mut pdu_json =
|
|
|
|
utils::to_canonical_object(&pdu).expect("event is valid, we just created it");
|
2020-10-27 23:10:09 +00:00
|
|
|
|
|
|
|
pdu_json.remove("event_id");
|
2020-09-14 19:23:19 +01:00
|
|
|
|
2020-09-16 09:49:54 +01:00
|
|
|
// Add origin because synapse likes that (and it's required in the spec)
|
2020-10-27 23:10:09 +00:00
|
|
|
pdu_json.insert(
|
|
|
|
"origin".to_owned(),
|
2021-01-15 16:05:57 +00:00
|
|
|
to_canonical_value(db.globals.server_name())
|
2020-10-27 23:10:09 +00:00
|
|
|
.expect("server name is a valid CanonicalJsonValue"),
|
|
|
|
);
|
2020-09-16 09:49:54 +01:00
|
|
|
|
2020-09-14 19:23:19 +01:00
|
|
|
ruma::signatures::hash_and_sign_event(
|
2021-01-15 16:05:57 +00:00
|
|
|
db.globals.server_name().as_str(),
|
|
|
|
db.globals.keypair(),
|
2020-09-14 19:23:19 +01:00
|
|
|
&mut pdu_json,
|
2020-10-27 23:10:09 +00:00
|
|
|
&RoomVersionId::Version6,
|
2020-09-14 19:23:19 +01:00
|
|
|
)
|
|
|
|
.expect("event is valid, we just created it");
|
|
|
|
|
2020-05-03 16:25:31 +01:00
|
|
|
// Generate event id
|
|
|
|
pdu.event_id = EventId::try_from(&*format!(
|
|
|
|
"${}",
|
2020-10-27 23:10:09 +00:00
|
|
|
ruma::signatures::reference_hash(&pdu_json, &RoomVersionId::Version6)
|
2020-09-14 19:23:19 +01:00
|
|
|
.expect("ruma can calculate reference hashes")
|
2020-05-03 16:25:31 +01:00
|
|
|
))
|
2020-06-09 14:13:17 +01:00
|
|
|
.expect("ruma's reference hashes are valid event ids");
|
2020-05-03 16:25:31 +01:00
|
|
|
|
2020-10-27 23:10:09 +00:00
|
|
|
pdu_json.insert(
|
|
|
|
"event_id".to_owned(),
|
2020-11-30 17:10:33 +00:00
|
|
|
to_canonical_value(&pdu.event_id).expect("EventId is a valid CanonicalJsonValue"),
|
2020-10-27 23:10:09 +00:00
|
|
|
);
|
2020-09-14 19:23:19 +01:00
|
|
|
|
2020-10-18 07:56:21 +01:00
|
|
|
// Increment the last index and use that
|
|
|
|
// This is also the next_batch/since value
|
2021-01-15 16:05:57 +00:00
|
|
|
let count = db.globals.next_count()?;
|
2020-10-18 07:56:21 +01:00
|
|
|
let mut pdu_id = room_id.as_bytes().to_vec();
|
|
|
|
pdu_id.push(0xff);
|
|
|
|
pdu_id.extend_from_slice(&count.to_be_bytes());
|
2020-09-13 21:24:36 +01:00
|
|
|
|
2020-10-18 07:56:21 +01:00
|
|
|
// We append to state before appending the pdu, so we don't have a moment in time with the
|
|
|
|
// pdu without it's state. This is okay because append_pdu can't fail.
|
2021-01-15 16:05:57 +00:00
|
|
|
let statehashid = self.append_to_state(&pdu_id, &pdu, &db.globals)?;
|
|
|
|
|
2021-01-19 00:41:38 +00:00
|
|
|
// remove the
|
|
|
|
self.append_pdu(
|
|
|
|
&pdu,
|
|
|
|
pdu_json,
|
|
|
|
count,
|
|
|
|
pdu_id.clone().into(),
|
|
|
|
// Since this PDU references all pdu_leaves we can update the leaves
|
|
|
|
// of the room
|
|
|
|
&[pdu.event_id.clone()],
|
|
|
|
db,
|
|
|
|
)?;
|
2020-10-18 07:56:21 +01:00
|
|
|
|
2020-12-31 13:52:08 +00:00
|
|
|
// We set the room state after inserting the pdu, so that we never have a moment in time
|
|
|
|
// where events in the current room state do not exist
|
|
|
|
self.set_room_state(&room_id, &statehashid)?;
|
|
|
|
|
2020-09-15 15:13:54 +01:00
|
|
|
for server in self
|
|
|
|
.room_servers(room_id)
|
|
|
|
.filter_map(|r| r.ok())
|
2021-01-15 16:05:57 +00:00
|
|
|
.filter(|server| &**server != db.globals.server_name())
|
2020-09-15 15:13:54 +01:00
|
|
|
{
|
2021-01-15 16:05:57 +00:00
|
|
|
db.sending.send_pdu(&server, &pdu_id)?;
|
2020-09-15 07:16:20 +01:00
|
|
|
}
|
2020-09-14 19:23:19 +01:00
|
|
|
|
2021-01-15 16:05:57 +00:00
|
|
|
for appservice in db.appservice.iter_all().filter_map(|r| r.ok()) {
|
2020-12-23 18:41:54 +00:00
|
|
|
if let Some(namespaces) = appservice.1.get("namespaces") {
|
|
|
|
let users = namespaces
|
|
|
|
.get("users")
|
|
|
|
.and_then(|users| users.as_sequence())
|
2021-01-15 16:05:57 +00:00
|
|
|
.map_or_else(Vec::new, |users| {
|
|
|
|
users
|
|
|
|
.iter()
|
|
|
|
.map(|users| {
|
|
|
|
users
|
|
|
|
.get("regex")
|
|
|
|
.and_then(|regex| regex.as_str())
|
|
|
|
.and_then(|regex| Regex::new(regex).ok())
|
|
|
|
})
|
|
|
|
.filter_map(|o| o)
|
|
|
|
.collect::<Vec<_>>()
|
|
|
|
});
|
2020-12-23 18:41:54 +00:00
|
|
|
let aliases = namespaces
|
|
|
|
.get("aliases")
|
|
|
|
.and_then(|users| users.get("regex"))
|
|
|
|
.and_then(|regex| regex.as_str())
|
|
|
|
.and_then(|regex| Regex::new(regex).ok());
|
|
|
|
let rooms = namespaces
|
|
|
|
.get("rooms")
|
|
|
|
.and_then(|rooms| rooms.as_sequence());
|
|
|
|
|
|
|
|
let room_aliases = self.room_aliases(&room_id);
|
|
|
|
|
|
|
|
let bridge_user_id = appservice
|
|
|
|
.1
|
|
|
|
.get("sender_localpart")
|
|
|
|
.and_then(|string| string.as_str())
|
|
|
|
.and_then(|string| {
|
2021-01-15 16:05:57 +00:00
|
|
|
UserId::parse_with_server_name(string, db.globals.server_name()).ok()
|
2020-12-23 18:41:54 +00:00
|
|
|
});
|
|
|
|
|
2021-01-15 16:05:57 +00:00
|
|
|
let user_is_joined =
|
|
|
|
|bridge_user_id| self.is_joined(&bridge_user_id, room_id).unwrap_or(false);
|
|
|
|
let matching_users = |users: &Regex| {
|
2020-12-31 13:52:08 +00:00
|
|
|
users.is_match(pdu.sender.as_str())
|
|
|
|
|| pdu.kind == EventType::RoomMember
|
|
|
|
&& pdu
|
|
|
|
.state_key
|
|
|
|
.as_ref()
|
|
|
|
.map_or(false, |state_key| users.is_match(&state_key))
|
2021-01-15 16:05:57 +00:00
|
|
|
};
|
|
|
|
let matching_aliases = |aliases: Regex| {
|
2020-12-23 18:41:54 +00:00
|
|
|
room_aliases
|
|
|
|
.filter_map(|r| r.ok())
|
|
|
|
.any(|room_alias| aliases.is_match(room_alias.as_str()))
|
2021-01-15 16:05:57 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
if bridge_user_id.map_or(false, user_is_joined)
|
|
|
|
|| users.iter().any(matching_users)
|
|
|
|
|| aliases.map_or(false, matching_aliases)
|
|
|
|
|| rooms.map_or(false, |rooms| rooms.contains(&room_id.as_str().into()))
|
2020-12-23 18:41:54 +00:00
|
|
|
{
|
2021-01-15 16:05:57 +00:00
|
|
|
db.sending.send_pdu_appservice(&appservice.0, &pdu_id)?;
|
2020-12-23 18:41:54 +00:00
|
|
|
}
|
|
|
|
}
|
2020-12-08 09:33:44 +00:00
|
|
|
}
|
|
|
|
|
2020-09-13 21:24:36 +01:00
|
|
|
Ok(pdu.event_id)
|
2020-05-03 16:25:31 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
/// Returns an iterator over all PDUs in a room.
|
2020-06-16 11:11:38 +01:00
|
|
|
pub fn all_pdus(
|
|
|
|
&self,
|
|
|
|
user_id: &UserId,
|
|
|
|
room_id: &RoomId,
|
2020-09-16 20:11:38 +01:00
|
|
|
) -> Result<impl Iterator<Item = Result<(IVec, PduEvent)>>> {
|
2020-06-16 11:11:38 +01:00
|
|
|
self.pdus_since(user_id, room_id, 0)
|
2020-05-03 16:25:31 +01:00
|
|
|
}
|
|
|
|
|
2020-07-29 16:37:26 +01:00
|
|
|
/// Returns a double-ended iterator over all events in a room that happened after the event with id `since`
|
|
|
|
/// in chronological order.
|
2020-05-03 16:25:31 +01:00
|
|
|
pub fn pdus_since(
|
|
|
|
&self,
|
2020-06-16 11:11:38 +01:00
|
|
|
user_id: &UserId,
|
2020-05-03 16:25:31 +01:00
|
|
|
room_id: &RoomId,
|
|
|
|
since: u64,
|
2020-09-16 20:11:38 +01:00
|
|
|
) -> Result<impl DoubleEndedIterator<Item = Result<(IVec, PduEvent)>>> {
|
2020-05-03 16:25:31 +01:00
|
|
|
let mut prefix = room_id.to_string().as_bytes().to_vec();
|
|
|
|
prefix.push(0xff);
|
|
|
|
|
2020-07-29 16:03:04 +01:00
|
|
|
// Skip the first pdu if it's exactly at since, because we sent that last time
|
|
|
|
let mut first_pdu_id = prefix.clone();
|
2020-07-29 16:37:26 +01:00
|
|
|
first_pdu_id.extend_from_slice(&(since + 1).to_be_bytes());
|
2020-07-29 16:03:04 +01:00
|
|
|
|
2020-07-28 14:00:23 +01:00
|
|
|
let mut last_pdu_id = prefix;
|
2020-07-29 16:03:04 +01:00
|
|
|
last_pdu_id.extend_from_slice(&u64::MAX.to_be_bytes());
|
|
|
|
|
2020-06-16 11:11:38 +01:00
|
|
|
let user_id = user_id.clone();
|
2020-05-03 16:25:31 +01:00
|
|
|
Ok(self
|
|
|
|
.pduid_pdu
|
2020-07-29 16:03:04 +01:00
|
|
|
.range(first_pdu_id..last_pdu_id)
|
2020-05-03 16:25:31 +01:00
|
|
|
.filter_map(|r| r.ok())
|
2020-09-16 20:11:38 +01:00
|
|
|
.map(move |(pdu_id, v)| {
|
2020-06-16 11:11:38 +01:00
|
|
|
let mut pdu = serde_json::from_slice::<PduEvent>(&v)
|
|
|
|
.map_err(|_| Error::bad_database("PDU in db is invalid."))?;
|
|
|
|
if pdu.sender != user_id {
|
|
|
|
pdu.unsigned.remove("transaction_id");
|
|
|
|
}
|
2020-09-16 20:11:38 +01:00
|
|
|
Ok((pdu_id, pdu))
|
2020-06-09 14:13:17 +01:00
|
|
|
}))
|
2020-05-03 16:25:31 +01:00
|
|
|
}
|
|
|
|
|
2020-07-26 16:34:12 +01:00
|
|
|
/// Returns an iterator over all events and their tokens in a room that happened before the
|
|
|
|
/// event with id `until` in reverse-chronological order.
|
2020-05-03 16:25:31 +01:00
|
|
|
pub fn pdus_until(
|
|
|
|
&self,
|
2020-06-16 11:11:38 +01:00
|
|
|
user_id: &UserId,
|
2020-05-03 16:25:31 +01:00
|
|
|
room_id: &RoomId,
|
|
|
|
until: u64,
|
2020-09-17 13:44:47 +01:00
|
|
|
) -> impl Iterator<Item = Result<(IVec, PduEvent)>> {
|
2020-05-03 16:25:31 +01:00
|
|
|
// Create the first part of the full pdu id
|
|
|
|
let mut prefix = room_id.to_string().as_bytes().to_vec();
|
|
|
|
prefix.push(0xff);
|
|
|
|
|
|
|
|
let mut current = prefix.clone();
|
|
|
|
current.extend_from_slice(&until.to_be_bytes());
|
|
|
|
|
|
|
|
let current: &[u8] = ¤t;
|
|
|
|
|
2020-06-16 11:11:38 +01:00
|
|
|
let user_id = user_id.clone();
|
2020-05-03 16:25:31 +01:00
|
|
|
self.pduid_pdu
|
|
|
|
.range(..current)
|
|
|
|
.rev()
|
|
|
|
.filter_map(|r| r.ok())
|
|
|
|
.take_while(move |(k, _)| k.starts_with(&prefix))
|
2020-09-17 13:44:47 +01:00
|
|
|
.map(move |(pdu_id, v)| {
|
2020-06-16 11:11:38 +01:00
|
|
|
let mut pdu = serde_json::from_slice::<PduEvent>(&v)
|
|
|
|
.map_err(|_| Error::bad_database("PDU in db is invalid."))?;
|
|
|
|
if pdu.sender != user_id {
|
|
|
|
pdu.unsigned.remove("transaction_id");
|
|
|
|
}
|
2020-09-17 13:44:47 +01:00
|
|
|
Ok((pdu_id, pdu))
|
2020-06-09 14:13:17 +01:00
|
|
|
})
|
2020-05-03 16:25:31 +01:00
|
|
|
}
|
|
|
|
|
2020-07-26 16:34:12 +01:00
|
|
|
/// Returns an iterator over all events and their token in a room that happened after the event
|
|
|
|
/// with id `from` in chronological order.
|
2020-06-04 12:58:55 +01:00
|
|
|
pub fn pdus_after(
|
|
|
|
&self,
|
2020-06-16 11:11:38 +01:00
|
|
|
user_id: &UserId,
|
2020-06-04 12:58:55 +01:00
|
|
|
room_id: &RoomId,
|
|
|
|
from: u64,
|
2020-09-17 13:44:47 +01:00
|
|
|
) -> impl Iterator<Item = Result<(IVec, PduEvent)>> {
|
2020-06-04 12:58:55 +01:00
|
|
|
// Create the first part of the full pdu id
|
|
|
|
let mut prefix = room_id.to_string().as_bytes().to_vec();
|
|
|
|
prefix.push(0xff);
|
|
|
|
|
|
|
|
let mut current = prefix.clone();
|
|
|
|
current.extend_from_slice(&(from + 1).to_be_bytes()); // +1 so we don't send the base event
|
|
|
|
|
|
|
|
let current: &[u8] = ¤t;
|
|
|
|
|
2020-06-16 11:11:38 +01:00
|
|
|
let user_id = user_id.clone();
|
2020-06-04 12:58:55 +01:00
|
|
|
self.pduid_pdu
|
|
|
|
.range(current..)
|
|
|
|
.filter_map(|r| r.ok())
|
|
|
|
.take_while(move |(k, _)| k.starts_with(&prefix))
|
2020-09-17 13:44:47 +01:00
|
|
|
.map(move |(pdu_id, v)| {
|
2020-06-16 11:11:38 +01:00
|
|
|
let mut pdu = serde_json::from_slice::<PduEvent>(&v)
|
|
|
|
.map_err(|_| Error::bad_database("PDU in db is invalid."))?;
|
|
|
|
if pdu.sender != user_id {
|
|
|
|
pdu.unsigned.remove("transaction_id");
|
|
|
|
}
|
2020-09-17 13:44:47 +01:00
|
|
|
Ok((pdu_id, pdu))
|
2020-06-09 14:13:17 +01:00
|
|
|
})
|
2020-06-04 12:58:55 +01:00
|
|
|
}
|
|
|
|
|
2020-05-26 09:27:51 +01:00
|
|
|
/// Replace a PDU with the redacted form.
|
2020-08-23 15:47:27 +01:00
|
|
|
pub fn redact_pdu(&self, event_id: &EventId, reason: &PduEvent) -> Result<()> {
|
2020-05-26 09:27:51 +01:00
|
|
|
if let Some(pdu_id) = self.get_pdu_id(event_id)? {
|
|
|
|
let mut pdu = self
|
|
|
|
.get_pdu_from_id(&pdu_id)?
|
2020-06-11 09:03:08 +01:00
|
|
|
.ok_or_else(|| Error::bad_database("PDU ID points to invalid PDU."))?;
|
2020-08-23 15:47:27 +01:00
|
|
|
pdu.redact(&reason)?;
|
2020-05-26 09:27:51 +01:00
|
|
|
self.replace_pdu(&pdu_id, &pdu)?;
|
|
|
|
Ok(())
|
|
|
|
} else {
|
2020-06-09 14:13:17 +01:00
|
|
|
Err(Error::BadRequest(
|
|
|
|
ErrorKind::NotFound,
|
|
|
|
"Event ID does not exist.",
|
|
|
|
))
|
2020-05-26 09:27:51 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Update current membership data.
|
2020-05-24 17:25:52 +01:00
|
|
|
fn update_membership(
|
2020-05-03 16:25:31 +01:00
|
|
|
&self,
|
|
|
|
room_id: &RoomId,
|
|
|
|
user_id: &UserId,
|
2020-09-14 19:23:19 +01:00
|
|
|
member_content: member::MemberEventContent,
|
2020-07-29 18:47:50 +01:00
|
|
|
sender: &UserId,
|
|
|
|
account_data: &super::account_data::AccountData,
|
|
|
|
globals: &super::globals::Globals,
|
2020-05-03 16:25:31 +01:00
|
|
|
) -> Result<()> {
|
2020-07-29 22:07:12 +01:00
|
|
|
let membership = member_content.membership;
|
2020-09-14 19:23:19 +01:00
|
|
|
|
|
|
|
let mut roomserver_id = room_id.as_bytes().to_vec();
|
|
|
|
roomserver_id.push(0xff);
|
|
|
|
roomserver_id.extend_from_slice(user_id.server_name().as_bytes());
|
|
|
|
|
2020-08-26 16:15:52 +01:00
|
|
|
let mut userroom_id = user_id.as_bytes().to_vec();
|
2020-05-03 16:25:31 +01:00
|
|
|
userroom_id.push(0xff);
|
2020-08-26 16:15:52 +01:00
|
|
|
userroom_id.extend_from_slice(room_id.as_bytes());
|
2020-05-03 16:25:31 +01:00
|
|
|
|
2020-08-26 16:15:52 +01:00
|
|
|
let mut roomuser_id = room_id.as_bytes().to_vec();
|
2020-05-03 16:25:31 +01:00
|
|
|
roomuser_id.push(0xff);
|
2020-08-26 16:15:52 +01:00
|
|
|
roomuser_id.extend_from_slice(user_id.as_bytes());
|
2020-05-03 16:25:31 +01:00
|
|
|
|
2020-05-24 17:25:52 +01:00
|
|
|
match &membership {
|
|
|
|
member::MembershipState::Join => {
|
2020-08-06 12:21:53 +01:00
|
|
|
// Check if the user never joined this room
|
|
|
|
if !self.once_joined(&user_id, &room_id)? {
|
|
|
|
// Add the user ID to the join list then
|
|
|
|
self.roomuseroncejoinedids.insert(&userroom_id, &[])?;
|
|
|
|
|
|
|
|
// Check if the room has a predecessor
|
2020-09-13 21:24:36 +01:00
|
|
|
if let Some(predecessor) = self
|
|
|
|
.room_state_get(&room_id, &EventType::RoomCreate, "")?
|
2020-10-27 19:25:43 +00:00
|
|
|
.and_then(|(_, create)| {
|
2020-09-13 21:24:36 +01:00
|
|
|
serde_json::from_value::<
|
|
|
|
Raw<ruma::events::room::create::CreateEventContent>,
|
|
|
|
>(create.content)
|
|
|
|
.expect("Raw::from_value always works")
|
|
|
|
.deserialize()
|
|
|
|
.ok()
|
|
|
|
})
|
|
|
|
.and_then(|content| content.predecessor)
|
2020-08-06 12:21:53 +01:00
|
|
|
{
|
|
|
|
// Copy user settings from predecessor to the current room:
|
|
|
|
// - Push rules
|
|
|
|
//
|
|
|
|
// TODO: finish this once push rules are implemented.
|
|
|
|
//
|
|
|
|
// let mut push_rules_event_content = account_data
|
|
|
|
// .get::<ruma::events::push_rules::PushRulesEvent>(
|
|
|
|
// None,
|
|
|
|
// user_id,
|
|
|
|
// EventType::PushRules,
|
|
|
|
// )?;
|
|
|
|
//
|
|
|
|
// NOTE: find where `predecessor.room_id` match
|
|
|
|
// and update to `room_id`.
|
|
|
|
//
|
|
|
|
// account_data
|
|
|
|
// .update(
|
|
|
|
// None,
|
|
|
|
// user_id,
|
|
|
|
// EventType::PushRules,
|
|
|
|
// &push_rules_event_content,
|
|
|
|
// globals,
|
|
|
|
// )
|
|
|
|
// .ok();
|
|
|
|
|
2020-09-01 12:07:32 +01:00
|
|
|
// Copy old tags to new room
|
|
|
|
if let Some(tag_event) = account_data.get::<ruma::events::tag::TagEvent>(
|
2020-08-06 12:21:53 +01:00
|
|
|
Some(&predecessor.room_id),
|
|
|
|
user_id,
|
|
|
|
EventType::Tag,
|
|
|
|
)? {
|
|
|
|
account_data
|
2020-09-01 12:07:32 +01:00
|
|
|
.update(Some(room_id), user_id, EventType::Tag, &tag_event, globals)
|
2020-08-06 12:21:53 +01:00
|
|
|
.ok();
|
|
|
|
};
|
|
|
|
|
2020-09-01 12:07:32 +01:00
|
|
|
// Copy direct chat flag
|
|
|
|
if let Some(mut direct_event) = account_data
|
2020-08-06 12:21:53 +01:00
|
|
|
.get::<ruma::events::direct::DirectEvent>(
|
2020-09-01 12:07:32 +01:00
|
|
|
None,
|
|
|
|
user_id,
|
|
|
|
EventType::Direct,
|
|
|
|
)? {
|
2020-08-06 12:21:53 +01:00
|
|
|
let mut room_ids_updated = false;
|
|
|
|
|
2020-09-01 12:07:32 +01:00
|
|
|
for room_ids in direct_event.content.0.values_mut() {
|
2020-08-06 12:21:53 +01:00
|
|
|
if room_ids.iter().any(|r| r == &predecessor.room_id) {
|
|
|
|
room_ids.push(room_id.clone());
|
|
|
|
room_ids_updated = true;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if room_ids_updated {
|
2020-09-01 12:07:32 +01:00
|
|
|
account_data.update(
|
|
|
|
None,
|
|
|
|
user_id,
|
|
|
|
EventType::Direct,
|
|
|
|
&direct_event,
|
|
|
|
globals,
|
|
|
|
)?;
|
2020-08-06 12:21:53 +01:00
|
|
|
}
|
|
|
|
};
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-09-14 19:23:19 +01:00
|
|
|
self.roomserverids.insert(&roomserver_id, &[])?;
|
2020-05-24 17:25:52 +01:00
|
|
|
self.userroomid_joined.insert(&userroom_id, &[])?;
|
|
|
|
self.roomuserid_joined.insert(&roomuser_id, &[])?;
|
|
|
|
self.userroomid_invited.remove(&userroom_id)?;
|
|
|
|
self.roomuserid_invited.remove(&roomuser_id)?;
|
|
|
|
self.userroomid_left.remove(&userroom_id)?;
|
|
|
|
}
|
|
|
|
member::MembershipState::Invite => {
|
2020-07-29 18:47:50 +01:00
|
|
|
// We want to know if the sender is ignored by the receiver
|
|
|
|
let is_ignored = account_data
|
2020-07-29 22:07:12 +01:00
|
|
|
.get::<ignored_user_list::IgnoredUserListEvent>(
|
2020-07-29 18:47:50 +01:00
|
|
|
None, // Ignored users are in global account data
|
|
|
|
&user_id, // Receiver
|
|
|
|
EventType::IgnoredUserList,
|
|
|
|
)?
|
2020-07-29 22:07:12 +01:00
|
|
|
.map_or(false, |ignored| {
|
|
|
|
ignored.content.ignored_users.contains(&sender)
|
|
|
|
});
|
2020-07-29 18:47:50 +01:00
|
|
|
|
|
|
|
if is_ignored {
|
2020-07-30 13:43:51 +01:00
|
|
|
return Ok(());
|
2020-07-29 18:47:50 +01:00
|
|
|
}
|
2020-09-14 19:23:19 +01:00
|
|
|
|
|
|
|
self.roomserverids.insert(&roomserver_id, &[])?;
|
2020-05-24 17:25:52 +01:00
|
|
|
self.userroomid_invited.insert(&userroom_id, &[])?;
|
|
|
|
self.roomuserid_invited.insert(&roomuser_id, &[])?;
|
|
|
|
self.userroomid_joined.remove(&userroom_id)?;
|
|
|
|
self.roomuserid_joined.remove(&roomuser_id)?;
|
|
|
|
self.userroomid_left.remove(&userroom_id)?;
|
|
|
|
}
|
|
|
|
member::MembershipState::Leave | member::MembershipState::Ban => {
|
2020-09-14 19:23:19 +01:00
|
|
|
if self
|
|
|
|
.room_members(room_id)
|
|
|
|
.chain(self.room_members_invited(room_id))
|
|
|
|
.filter_map(|r| r.ok())
|
|
|
|
.all(|u| u.server_name() != user_id.server_name())
|
|
|
|
{
|
|
|
|
self.roomserverids.remove(&roomserver_id)?;
|
|
|
|
}
|
2020-05-24 17:25:52 +01:00
|
|
|
self.userroomid_left.insert(&userroom_id, &[])?;
|
|
|
|
self.userroomid_joined.remove(&userroom_id)?;
|
|
|
|
self.roomuserid_joined.remove(&roomuser_id)?;
|
|
|
|
self.userroomid_invited.remove(&userroom_id)?;
|
|
|
|
self.roomuserid_invited.remove(&roomuser_id)?;
|
|
|
|
}
|
|
|
|
_ => {}
|
2020-05-03 16:25:31 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Makes a user forget a room.
|
|
|
|
pub fn forget(&self, room_id: &RoomId, user_id: &UserId) -> Result<()> {
|
2020-08-26 16:15:52 +01:00
|
|
|
let mut userroom_id = user_id.as_bytes().to_vec();
|
2020-05-03 16:25:31 +01:00
|
|
|
userroom_id.push(0xff);
|
2020-08-26 16:15:52 +01:00
|
|
|
userroom_id.extend_from_slice(room_id.as_bytes());
|
2020-05-03 16:25:31 +01:00
|
|
|
|
|
|
|
self.userroomid_left.remove(userroom_id)?;
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2020-05-25 22:24:13 +01:00
|
|
|
pub fn set_alias(
|
|
|
|
&self,
|
|
|
|
alias: &RoomAliasId,
|
|
|
|
room_id: Option<&RoomId>,
|
|
|
|
globals: &super::globals::Globals,
|
|
|
|
) -> Result<()> {
|
|
|
|
if let Some(room_id) = room_id {
|
2020-06-03 19:55:11 +01:00
|
|
|
// New alias
|
2020-05-25 22:24:13 +01:00
|
|
|
self.alias_roomid
|
2020-08-26 16:15:52 +01:00
|
|
|
.insert(alias.alias(), room_id.as_bytes())?;
|
|
|
|
let mut aliasid = room_id.as_bytes().to_vec();
|
2020-05-25 22:24:13 +01:00
|
|
|
aliasid.extend_from_slice(&globals.next_count()?.to_be_bytes());
|
|
|
|
self.aliasid_alias.insert(aliasid, &*alias.alias())?;
|
|
|
|
} else {
|
2020-06-03 19:55:11 +01:00
|
|
|
// room_id=None means remove alias
|
|
|
|
let room_id = self
|
|
|
|
.alias_roomid
|
|
|
|
.remove(alias.alias())?
|
2020-06-09 14:13:17 +01:00
|
|
|
.ok_or(Error::BadRequest(
|
|
|
|
ErrorKind::NotFound,
|
|
|
|
"Alias does not exist.",
|
|
|
|
))?;
|
2020-06-03 19:55:11 +01:00
|
|
|
|
|
|
|
for key in self.aliasid_alias.scan_prefix(room_id).keys() {
|
|
|
|
self.aliasid_alias.remove(key?)?;
|
2020-05-25 22:24:13 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn id_from_alias(&self, alias: &RoomAliasId) -> Result<Option<RoomId>> {
|
|
|
|
self.alias_roomid
|
|
|
|
.get(alias.alias())?
|
|
|
|
.map_or(Ok(None), |bytes| {
|
2020-06-11 09:03:08 +01:00
|
|
|
Ok(Some(
|
|
|
|
RoomId::try_from(utils::string_from_bytes(&bytes).map_err(|_| {
|
|
|
|
Error::bad_database("Room ID in alias_roomid is invalid unicode.")
|
|
|
|
})?)
|
|
|
|
.map_err(|_| Error::bad_database("Room ID in alias_roomid is invalid."))?,
|
|
|
|
))
|
2020-05-25 22:24:13 +01:00
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn room_aliases(&self, room_id: &RoomId) -> impl Iterator<Item = Result<RoomAliasId>> {
|
2020-08-26 16:15:52 +01:00
|
|
|
let mut prefix = room_id.as_bytes().to_vec();
|
2020-05-25 22:24:13 +01:00
|
|
|
prefix.push(0xff);
|
|
|
|
|
|
|
|
self.aliasid_alias
|
|
|
|
.scan_prefix(prefix)
|
|
|
|
.values()
|
2020-06-09 14:13:17 +01:00
|
|
|
.map(|bytes| {
|
|
|
|
Ok(serde_json::from_slice(&bytes?)
|
2020-06-11 09:03:08 +01:00
|
|
|
.map_err(|_| Error::bad_database("Alias in aliasid_alias is invalid."))?)
|
2020-06-09 14:13:17 +01:00
|
|
|
})
|
2020-05-25 22:24:13 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
pub fn set_public(&self, room_id: &RoomId, public: bool) -> Result<()> {
|
|
|
|
if public {
|
2020-08-26 16:15:52 +01:00
|
|
|
self.publicroomids.insert(room_id.as_bytes(), &[])?;
|
2020-05-25 22:24:13 +01:00
|
|
|
} else {
|
2020-08-26 16:15:52 +01:00
|
|
|
self.publicroomids.remove(room_id.as_bytes())?;
|
2020-05-24 07:30:57 +01:00
|
|
|
}
|
|
|
|
|
2020-05-25 22:24:13 +01:00
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn is_public_room(&self, room_id: &RoomId) -> Result<bool> {
|
2020-08-26 16:15:52 +01:00
|
|
|
Ok(self.publicroomids.contains_key(room_id.as_bytes())?)
|
2020-05-25 22:24:13 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
pub fn public_rooms(&self) -> impl Iterator<Item = Result<RoomId>> {
|
2020-06-09 14:13:17 +01:00
|
|
|
self.publicroomids.iter().keys().map(|bytes| {
|
2020-06-11 09:03:08 +01:00
|
|
|
Ok(
|
|
|
|
RoomId::try_from(utils::string_from_bytes(&bytes?).map_err(|_| {
|
|
|
|
Error::bad_database("Room ID in publicroomids is invalid unicode.")
|
|
|
|
})?)
|
|
|
|
.map_err(|_| Error::bad_database("Room ID in publicroomids is invalid."))?,
|
|
|
|
)
|
2020-06-09 14:13:17 +01:00
|
|
|
})
|
2020-05-24 07:30:57 +01:00
|
|
|
}
|
|
|
|
|
2020-08-21 20:22:59 +01:00
|
|
|
pub fn search_pdus<'a>(
|
|
|
|
&'a self,
|
2020-08-18 11:15:27 +01:00
|
|
|
room_id: &RoomId,
|
|
|
|
search_string: &str,
|
2020-08-21 20:22:59 +01:00
|
|
|
) -> Result<(impl Iterator<Item = IVec> + 'a, Vec<String>)> {
|
2020-08-26 16:15:52 +01:00
|
|
|
let mut prefix = room_id.as_bytes().to_vec();
|
2020-08-18 11:15:27 +01:00
|
|
|
prefix.push(0xff);
|
|
|
|
|
|
|
|
let words = search_string
|
|
|
|
.split_terminator(|c: char| !c.is_alphanumeric())
|
|
|
|
.map(str::to_lowercase)
|
|
|
|
.collect::<Vec<_>>();
|
|
|
|
|
2020-08-21 20:22:59 +01:00
|
|
|
let iterators = words.clone().into_iter().map(move |word| {
|
2020-08-18 11:15:27 +01:00
|
|
|
let mut prefix2 = prefix.clone();
|
|
|
|
prefix2.extend_from_slice(word.as_bytes());
|
|
|
|
prefix2.push(0xff);
|
|
|
|
self.tokenids
|
|
|
|
.scan_prefix(&prefix2)
|
|
|
|
.keys()
|
2020-08-22 22:09:53 +01:00
|
|
|
.rev() // Newest pdus first
|
2020-08-18 11:15:27 +01:00
|
|
|
.filter_map(|r| r.ok())
|
|
|
|
.map(|key| {
|
|
|
|
let pduid_index = key
|
|
|
|
.iter()
|
|
|
|
.enumerate()
|
|
|
|
.filter(|(_, &b)| b == 0xff)
|
|
|
|
.nth(1)
|
|
|
|
.ok_or_else(|| Error::bad_database("Invalid tokenid in db."))?
|
2020-08-21 20:22:59 +01:00
|
|
|
.0
|
|
|
|
+ 1; // +1 because the pdu id starts AFTER the separator
|
2020-08-18 11:15:27 +01:00
|
|
|
|
2020-08-21 20:22:59 +01:00
|
|
|
let pdu_id = key.subslice(pduid_index, key.len() - pduid_index);
|
2020-08-18 11:15:27 +01:00
|
|
|
|
|
|
|
Ok::<_, Error>(pdu_id)
|
|
|
|
})
|
|
|
|
.filter_map(|r| r.ok())
|
|
|
|
});
|
|
|
|
|
2020-08-22 22:09:53 +01:00
|
|
|
Ok((
|
|
|
|
utils::common_elements(iterators, |a, b| {
|
|
|
|
// We compare b with a because we reversed the iterator earlier
|
|
|
|
b.cmp(a)
|
|
|
|
})
|
|
|
|
.unwrap(),
|
|
|
|
words,
|
|
|
|
))
|
2020-08-21 20:22:59 +01:00
|
|
|
}
|
2020-08-18 11:15:27 +01:00
|
|
|
|
2020-08-21 20:22:59 +01:00
|
|
|
pub fn get_shared_rooms<'a>(
|
|
|
|
&'a self,
|
|
|
|
users: Vec<UserId>,
|
|
|
|
) -> impl Iterator<Item = Result<RoomId>> + 'a {
|
|
|
|
let iterators = users.into_iter().map(move |user_id| {
|
|
|
|
let mut prefix = user_id.as_bytes().to_vec();
|
|
|
|
prefix.push(0xff);
|
2020-08-18 11:15:27 +01:00
|
|
|
|
2020-08-21 20:22:59 +01:00
|
|
|
self.userroomid_joined
|
|
|
|
.scan_prefix(&prefix)
|
|
|
|
.keys()
|
|
|
|
.filter_map(|r| r.ok())
|
|
|
|
.map(|key| {
|
|
|
|
let roomid_index = key
|
|
|
|
.iter()
|
|
|
|
.enumerate()
|
2020-08-21 20:22:59 +01:00
|
|
|
.find(|(_, &b)| b == 0xff)
|
2020-08-21 20:22:59 +01:00
|
|
|
.ok_or_else(|| Error::bad_database("Invalid userroomid_joined in db."))?
|
|
|
|
.0
|
|
|
|
+ 1; // +1 because the room id starts AFTER the separator
|
|
|
|
|
|
|
|
let room_id = key.subslice(roomid_index, key.len() - roomid_index);
|
|
|
|
|
|
|
|
Ok::<_, Error>(room_id)
|
|
|
|
})
|
|
|
|
.filter_map(|r| r.ok())
|
|
|
|
});
|
|
|
|
|
2020-08-22 22:09:53 +01:00
|
|
|
// We use the default compare function because keys are sorted correctly (not reversed)
|
|
|
|
utils::common_elements(iterators, Ord::cmp)
|
2020-08-21 20:22:59 +01:00
|
|
|
.expect("users is not empty")
|
|
|
|
.map(|bytes| {
|
|
|
|
RoomId::try_from(utils::string_from_bytes(&*bytes).map_err(|_| {
|
|
|
|
Error::bad_database("Invalid RoomId bytes in userroomid_joined")
|
|
|
|
})?)
|
|
|
|
.map_err(|_| Error::bad_database("Invalid RoomId in userroomid_joined."))
|
|
|
|
})
|
2020-08-18 11:15:27 +01:00
|
|
|
}
|
|
|
|
|
2020-09-14 19:23:19 +01:00
|
|
|
/// Returns an iterator over all joined members of a room.
|
|
|
|
pub fn room_servers(&self, room_id: &RoomId) -> impl Iterator<Item = Result<Box<ServerName>>> {
|
|
|
|
let mut prefix = room_id.as_bytes().to_vec();
|
|
|
|
prefix.push(0xff);
|
|
|
|
|
|
|
|
self.roomserverids.scan_prefix(prefix).keys().map(|key| {
|
|
|
|
Ok(Box::<ServerName>::try_from(
|
|
|
|
utils::string_from_bytes(
|
|
|
|
&key?
|
|
|
|
.rsplit(|&b| b == 0xff)
|
|
|
|
.next()
|
|
|
|
.expect("rsplit always returns an element"),
|
|
|
|
)
|
|
|
|
.map_err(|_| {
|
|
|
|
Error::bad_database("Server name in roomserverids is invalid unicode.")
|
|
|
|
})?,
|
|
|
|
)
|
|
|
|
.map_err(|_| Error::bad_database("Server name in roomserverids is invalid."))?)
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2020-06-11 09:03:08 +01:00
|
|
|
/// Returns an iterator over all joined members of a room.
|
2020-05-03 16:25:31 +01:00
|
|
|
pub fn room_members(&self, room_id: &RoomId) -> impl Iterator<Item = Result<UserId>> {
|
2020-09-14 19:23:19 +01:00
|
|
|
let mut prefix = room_id.as_bytes().to_vec();
|
|
|
|
prefix.push(0xff);
|
|
|
|
|
2020-05-03 16:25:31 +01:00
|
|
|
self.roomuserid_joined
|
2020-09-14 19:23:19 +01:00
|
|
|
.scan_prefix(prefix)
|
2020-06-11 09:03:08 +01:00
|
|
|
.keys()
|
2020-05-03 16:25:31 +01:00
|
|
|
.map(|key| {
|
2020-06-11 09:03:08 +01:00
|
|
|
Ok(UserId::try_from(
|
|
|
|
utils::string_from_bytes(
|
|
|
|
&key?
|
|
|
|
.rsplit(|&b| b == 0xff)
|
|
|
|
.next()
|
|
|
|
.expect("rsplit always returns an element"),
|
|
|
|
)
|
|
|
|
.map_err(|_| {
|
|
|
|
Error::bad_database("User ID in roomuserid_joined is invalid unicode.")
|
|
|
|
})?,
|
2020-06-09 14:13:17 +01:00
|
|
|
)
|
2020-06-11 09:03:08 +01:00
|
|
|
.map_err(|_| Error::bad_database("User ID in roomuserid_joined is invalid."))?)
|
2020-05-03 16:25:31 +01:00
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2020-08-06 12:21:53 +01:00
|
|
|
/// Returns an iterator over all User IDs who ever joined a room.
|
|
|
|
pub fn room_useroncejoined(&self, room_id: &RoomId) -> impl Iterator<Item = Result<UserId>> {
|
2020-09-14 19:23:19 +01:00
|
|
|
let mut prefix = room_id.as_bytes().to_vec();
|
|
|
|
prefix.push(0xff);
|
|
|
|
|
2020-08-06 12:21:53 +01:00
|
|
|
self.roomuseroncejoinedids
|
2020-09-14 19:23:19 +01:00
|
|
|
.scan_prefix(prefix)
|
2020-08-06 12:21:53 +01:00
|
|
|
.keys()
|
|
|
|
.map(|key| {
|
|
|
|
Ok(UserId::try_from(
|
|
|
|
utils::string_from_bytes(
|
|
|
|
&key?
|
|
|
|
.rsplit(|&b| b == 0xff)
|
|
|
|
.next()
|
|
|
|
.expect("rsplit always returns an element"),
|
|
|
|
)
|
|
|
|
.map_err(|_| {
|
|
|
|
Error::bad_database("User ID in room_useroncejoined is invalid unicode.")
|
|
|
|
})?,
|
|
|
|
)
|
|
|
|
.map_err(|_| Error::bad_database("User ID in room_useroncejoined is invalid."))?)
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2020-06-11 09:03:08 +01:00
|
|
|
/// Returns an iterator over all invited members of a room.
|
2020-05-03 16:25:31 +01:00
|
|
|
pub fn room_members_invited(&self, room_id: &RoomId) -> impl Iterator<Item = Result<UserId>> {
|
2020-09-14 19:23:19 +01:00
|
|
|
let mut prefix = room_id.as_bytes().to_vec();
|
|
|
|
prefix.push(0xff);
|
|
|
|
|
2020-05-03 16:25:31 +01:00
|
|
|
self.roomuserid_invited
|
2020-09-14 19:23:19 +01:00
|
|
|
.scan_prefix(prefix)
|
2020-05-03 16:25:31 +01:00
|
|
|
.keys()
|
|
|
|
.map(|key| {
|
2020-06-11 09:03:08 +01:00
|
|
|
Ok(UserId::try_from(
|
|
|
|
utils::string_from_bytes(
|
|
|
|
&key?
|
|
|
|
.rsplit(|&b| b == 0xff)
|
|
|
|
.next()
|
|
|
|
.expect("rsplit always returns an element"),
|
|
|
|
)
|
|
|
|
.map_err(|_| {
|
|
|
|
Error::bad_database("User ID in roomuserid_invited is invalid unicode.")
|
|
|
|
})?,
|
2020-06-09 14:13:17 +01:00
|
|
|
)
|
2020-06-11 09:03:08 +01:00
|
|
|
.map_err(|_| Error::bad_database("User ID in roomuserid_invited is invalid."))?)
|
2020-05-03 16:25:31 +01:00
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2020-07-27 16:36:54 +01:00
|
|
|
/// Returns an iterator over all rooms this user joined.
|
2020-05-03 16:25:31 +01:00
|
|
|
pub fn rooms_joined(&self, user_id: &UserId) -> impl Iterator<Item = Result<RoomId>> {
|
|
|
|
self.userroomid_joined
|
2020-08-26 16:15:52 +01:00
|
|
|
.scan_prefix(user_id.as_bytes())
|
2020-05-03 16:25:31 +01:00
|
|
|
.keys()
|
|
|
|
.map(|key| {
|
2020-06-11 09:03:08 +01:00
|
|
|
Ok(RoomId::try_from(
|
|
|
|
utils::string_from_bytes(
|
|
|
|
&key?
|
|
|
|
.rsplit(|&b| b == 0xff)
|
|
|
|
.next()
|
|
|
|
.expect("rsplit always returns an element"),
|
|
|
|
)
|
|
|
|
.map_err(|_| {
|
|
|
|
Error::bad_database("Room ID in userroomid_joined is invalid unicode.")
|
|
|
|
})?,
|
2020-06-09 14:13:17 +01:00
|
|
|
)
|
2020-06-11 09:03:08 +01:00
|
|
|
.map_err(|_| Error::bad_database("Room ID in userroomid_joined is invalid."))?)
|
2020-05-03 16:25:31 +01:00
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Returns an iterator over all rooms a user was invited to.
|
|
|
|
pub fn rooms_invited(&self, user_id: &UserId) -> impl Iterator<Item = Result<RoomId>> {
|
2020-09-14 19:23:19 +01:00
|
|
|
let mut prefix = user_id.as_bytes().to_vec();
|
|
|
|
prefix.push(0xff);
|
|
|
|
|
2020-05-03 16:25:31 +01:00
|
|
|
self.userroomid_invited
|
2020-09-14 19:23:19 +01:00
|
|
|
.scan_prefix(prefix)
|
2020-05-03 16:25:31 +01:00
|
|
|
.keys()
|
|
|
|
.map(|key| {
|
2020-06-11 09:03:08 +01:00
|
|
|
Ok(RoomId::try_from(
|
|
|
|
utils::string_from_bytes(
|
|
|
|
&key?
|
|
|
|
.rsplit(|&b| b == 0xff)
|
|
|
|
.next()
|
|
|
|
.expect("rsplit always returns an element"),
|
|
|
|
)
|
|
|
|
.map_err(|_| {
|
|
|
|
Error::bad_database("Room ID in userroomid_invited is invalid unicode.")
|
|
|
|
})?,
|
2020-06-09 14:13:17 +01:00
|
|
|
)
|
2020-06-11 09:03:08 +01:00
|
|
|
.map_err(|_| Error::bad_database("Room ID in userroomid_invited is invalid."))?)
|
2020-05-03 16:25:31 +01:00
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Returns an iterator over all rooms a user left.
|
|
|
|
pub fn rooms_left(&self, user_id: &UserId) -> impl Iterator<Item = Result<RoomId>> {
|
2020-09-14 19:23:19 +01:00
|
|
|
let mut prefix = user_id.as_bytes().to_vec();
|
|
|
|
prefix.push(0xff);
|
|
|
|
|
|
|
|
self.userroomid_left.scan_prefix(prefix).keys().map(|key| {
|
|
|
|
Ok(RoomId::try_from(
|
|
|
|
utils::string_from_bytes(
|
|
|
|
&key?
|
|
|
|
.rsplit(|&b| b == 0xff)
|
|
|
|
.next()
|
|
|
|
.expect("rsplit always returns an element"),
|
2020-06-09 14:13:17 +01:00
|
|
|
)
|
2020-09-14 19:23:19 +01:00
|
|
|
.map_err(|_| {
|
|
|
|
Error::bad_database("Room ID in userroomid_left is invalid unicode.")
|
|
|
|
})?,
|
|
|
|
)
|
|
|
|
.map_err(|_| Error::bad_database("Room ID in userroomid_left is invalid."))?)
|
|
|
|
})
|
2020-05-03 16:25:31 +01:00
|
|
|
}
|
2020-05-24 17:25:52 +01:00
|
|
|
|
2020-08-06 12:21:53 +01:00
|
|
|
pub fn once_joined(&self, user_id: &UserId, room_id: &RoomId) -> Result<bool> {
|
|
|
|
let mut userroom_id = user_id.to_string().as_bytes().to_vec();
|
|
|
|
userroom_id.push(0xff);
|
|
|
|
userroom_id.extend_from_slice(room_id.to_string().as_bytes());
|
|
|
|
|
|
|
|
Ok(self.roomuseroncejoinedids.get(userroom_id)?.is_some())
|
|
|
|
}
|
|
|
|
|
2020-05-24 17:25:52 +01:00
|
|
|
pub fn is_joined(&self, user_id: &UserId, room_id: &RoomId) -> Result<bool> {
|
2020-08-26 16:15:52 +01:00
|
|
|
let mut userroom_id = user_id.as_bytes().to_vec();
|
2020-05-24 17:25:52 +01:00
|
|
|
userroom_id.push(0xff);
|
2020-08-26 16:15:52 +01:00
|
|
|
userroom_id.extend_from_slice(room_id.as_bytes());
|
2020-05-24 17:25:52 +01:00
|
|
|
|
|
|
|
Ok(self.userroomid_joined.get(userroom_id)?.is_some())
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn is_invited(&self, user_id: &UserId, room_id: &RoomId) -> Result<bool> {
|
2020-08-26 16:15:52 +01:00
|
|
|
let mut userroom_id = user_id.as_bytes().to_vec();
|
2020-05-24 17:25:52 +01:00
|
|
|
userroom_id.push(0xff);
|
2020-08-26 16:15:52 +01:00
|
|
|
userroom_id.extend_from_slice(room_id.as_bytes());
|
2020-05-24 17:25:52 +01:00
|
|
|
|
|
|
|
Ok(self.userroomid_invited.get(userroom_id)?.is_some())
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn is_left(&self, user_id: &UserId, room_id: &RoomId) -> Result<bool> {
|
2020-08-26 16:15:52 +01:00
|
|
|
let mut userroom_id = user_id.as_bytes().to_vec();
|
2020-05-24 17:25:52 +01:00
|
|
|
userroom_id.push(0xff);
|
2020-08-26 16:15:52 +01:00
|
|
|
userroom_id.extend_from_slice(room_id.as_bytes());
|
2020-05-24 17:25:52 +01:00
|
|
|
|
|
|
|
Ok(self.userroomid_left.get(userroom_id)?.is_some())
|
|
|
|
}
|
2020-05-03 16:25:31 +01:00
|
|
|
}
|