feat: cross-signing

next
timokoesters 2020-06-16 12:11:38 +02:00
parent 7031240af3
commit a49a572d76
No known key found for this signature in database
GPG Key ID: 24DA7517711A2BA4
9 changed files with 827 additions and 113 deletions

38
Cargo.lock generated
View File

@ -299,9 +299,9 @@ dependencies = [
[[package]] [[package]]
name = "dtoa" name = "dtoa"
version = "0.4.5" version = "0.4.6"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4358a9e11b9a09cf52383b451b49a169e8d797b68aa02301ff586d70d9661ea3" checksum = "134951f4028bdadb9b84baf4232681efbf277da25144b9b0ad65df75946c422b"
[[package]] [[package]]
name = "encoding_rs" name = "encoding_rs"
@ -632,9 +632,9 @@ dependencies = [
[[package]] [[package]]
name = "itoa" name = "itoa"
version = "0.4.5" version = "0.4.6"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b8b7a7c0c47db5545ed3fef7468ee7bb5b74691498139e4b3f6a20685dc6dd8e" checksum = "dc6f3ad7b9d11a0c00842ff8de1b60ee58661048eb8049ed33c73594f359d7e6"
[[package]] [[package]]
name = "jpeg-decoder" name = "jpeg-decoder"
@ -1261,7 +1261,7 @@ dependencies = [
[[package]] [[package]]
name = "ruma" name = "ruma"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/ruma/ruma?rev=baa87104569b45dc07a9a7a16d3c7592ab8f4d6b#baa87104569b45dc07a9a7a16d3c7592ab8f4d6b" source = "git+https://github.com/timokoesters/ruma#2e75b221b27698dea528d92b87e29f0e2968c495"
dependencies = [ dependencies = [
"ruma-api", "ruma-api",
"ruma-client-api", "ruma-client-api",
@ -1275,7 +1275,7 @@ dependencies = [
[[package]] [[package]]
name = "ruma-api" name = "ruma-api"
version = "0.16.1" version = "0.16.1"
source = "git+https://github.com/ruma/ruma?rev=baa87104569b45dc07a9a7a16d3c7592ab8f4d6b#baa87104569b45dc07a9a7a16d3c7592ab8f4d6b" source = "git+https://github.com/timokoesters/ruma#2e75b221b27698dea528d92b87e29f0e2968c495"
dependencies = [ dependencies = [
"http", "http",
"percent-encoding 2.1.0", "percent-encoding 2.1.0",
@ -1290,7 +1290,7 @@ dependencies = [
[[package]] [[package]]
name = "ruma-api-macros" name = "ruma-api-macros"
version = "0.16.1" version = "0.16.1"
source = "git+https://github.com/ruma/ruma?rev=baa87104569b45dc07a9a7a16d3c7592ab8f4d6b#baa87104569b45dc07a9a7a16d3c7592ab8f4d6b" source = "git+https://github.com/timokoesters/ruma#2e75b221b27698dea528d92b87e29f0e2968c495"
dependencies = [ dependencies = [
"proc-macro2 1.0.18", "proc-macro2 1.0.18",
"quote 1.0.7", "quote 1.0.7",
@ -1300,7 +1300,7 @@ dependencies = [
[[package]] [[package]]
name = "ruma-client-api" name = "ruma-client-api"
version = "0.9.0" version = "0.9.0"
source = "git+https://github.com/ruma/ruma?rev=baa87104569b45dc07a9a7a16d3c7592ab8f4d6b#baa87104569b45dc07a9a7a16d3c7592ab8f4d6b" source = "git+https://github.com/timokoesters/ruma#2e75b221b27698dea528d92b87e29f0e2968c495"
dependencies = [ dependencies = [
"http", "http",
"js_int", "js_int",
@ -1317,7 +1317,7 @@ dependencies = [
[[package]] [[package]]
name = "ruma-common" name = "ruma-common"
version = "0.1.3" version = "0.1.3"
source = "git+https://github.com/ruma/ruma?rev=baa87104569b45dc07a9a7a16d3c7592ab8f4d6b#baa87104569b45dc07a9a7a16d3c7592ab8f4d6b" source = "git+https://github.com/timokoesters/ruma#2e75b221b27698dea528d92b87e29f0e2968c495"
dependencies = [ dependencies = [
"matches", "matches",
"ruma-serde", "ruma-serde",
@ -1354,7 +1354,7 @@ dependencies = [
[[package]] [[package]]
name = "ruma-federation-api" name = "ruma-federation-api"
version = "0.0.2" version = "0.0.2"
source = "git+https://github.com/ruma/ruma?rev=baa87104569b45dc07a9a7a16d3c7592ab8f4d6b#baa87104569b45dc07a9a7a16d3c7592ab8f4d6b" source = "git+https://github.com/timokoesters/ruma#2e75b221b27698dea528d92b87e29f0e2968c495"
dependencies = [ dependencies = [
"js_int", "js_int",
"matches", "matches",
@ -1369,7 +1369,7 @@ dependencies = [
[[package]] [[package]]
name = "ruma-identifiers" name = "ruma-identifiers"
version = "0.16.2" version = "0.16.2"
source = "git+https://github.com/ruma/ruma?rev=baa87104569b45dc07a9a7a16d3c7592ab8f4d6b#baa87104569b45dc07a9a7a16d3c7592ab8f4d6b" source = "git+https://github.com/timokoesters/ruma#2e75b221b27698dea528d92b87e29f0e2968c495"
dependencies = [ dependencies = [
"rand", "rand",
"serde", "serde",
@ -1379,7 +1379,7 @@ dependencies = [
[[package]] [[package]]
name = "ruma-serde" name = "ruma-serde"
version = "0.2.2" version = "0.2.2"
source = "git+https://github.com/ruma/ruma?rev=baa87104569b45dc07a9a7a16d3c7592ab8f4d6b#baa87104569b45dc07a9a7a16d3c7592ab8f4d6b" source = "git+https://github.com/timokoesters/ruma#2e75b221b27698dea528d92b87e29f0e2968c495"
dependencies = [ dependencies = [
"dtoa", "dtoa",
"itoa", "itoa",
@ -1392,7 +1392,7 @@ dependencies = [
[[package]] [[package]]
name = "ruma-signatures" name = "ruma-signatures"
version = "0.6.0-dev.1" version = "0.6.0-dev.1"
source = "git+https://github.com/ruma/ruma?rev=baa87104569b45dc07a9a7a16d3c7592ab8f4d6b#baa87104569b45dc07a9a7a16d3c7592ab8f4d6b" source = "git+https://github.com/timokoesters/ruma#2e75b221b27698dea528d92b87e29f0e2968c495"
dependencies = [ dependencies = [
"base64 0.12.2", "base64 0.12.2",
"ring", "ring",
@ -1684,6 +1684,12 @@ dependencies = [
"winapi 0.3.8", "winapi 0.3.8",
] ]
[[package]]
name = "tinyvec"
version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "53953d2d3a5ad81d9f844a32f14ebb121f50b650cd59d0ee2a07cf13c617efed"
[[package]] [[package]]
name = "tokio" name = "tokio"
version = "0.2.21" version = "0.2.21"
@ -1795,11 +1801,11 @@ dependencies = [
[[package]] [[package]]
name = "unicode-normalization" name = "unicode-normalization"
version = "0.1.12" version = "0.1.13"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5479532badd04e128284890390c1e876ef7a993d0570b3597ae43dfa1d59afa4" checksum = "6fb19cf769fa8c6a80a162df694621ebeb4dafb606470b2b2fce0be40a98a977"
dependencies = [ dependencies = [
"smallvec", "tinyvec",
] ]
[[package]] [[package]]

View File

@ -29,15 +29,16 @@ thiserror = "1.0.19"
image = { version = "0.23.4", default-features = false, features = ["jpeg", "png", "gif"] } image = { version = "0.23.4", default-features = false, features = ["jpeg", "png", "gif"] }
[dependencies.ruma] [dependencies.ruma]
git = "https://github.com/ruma/ruma" git = "https://github.com/timokoesters/ruma"
rev = "baa87104569b45dc07a9a7a16d3c7592ab8f4d6b" #rev = "baa87104569b45dc07a9a7a16d3c7592ab8f4d6b"
#path = "../ruma/ruma" #path = "../ruma/ruma"
features = ["rand", "client-api", "federation-api"] features = ["rand", "client-api", "federation-api"]
# These are required only until ruma-events and ruma-federation-api are merged into ruma/ruma # These are required only until ruma-events and ruma-federation-api are merged into ruma/ruma
[patch.crates-io] [patch.crates-io]
ruma-common = { git = "https://github.com/ruma/ruma", rev = "baa87104569b45dc07a9a7a16d3c7592ab8f4d6b" } ruma-common = { git = "https://github.com/timokoesters/ruma" }
ruma-serde = { git = "https://github.com/ruma/ruma", rev = "baa87104569b45dc07a9a7a16d3c7592ab8f4d6b" } ruma-serde = { git = "https://github.com/timokoesters/ruma" }
ruma-identifiers = { git = "https://github.com/ruma/ruma", rev = "baa87104569b45dc07a9a7a16d3c7592ab8f4d6b" } ruma-identifiers = { git = "https://github.com/timokoesters/ruma" }
#ruma-common = { path = "../ruma/ruma-common" } #ruma-common = { path = "../ruma/ruma-common" }
#ruma-serde = { path = "../ruma/ruma-serde" } #ruma-serde = { path = "../ruma/ruma-serde" }
#ruma-identifiers = { path = "../ruma/ruma-identifiers" }

View File

@ -5,6 +5,7 @@ use std::{
}; };
use crate::{utils, ConduitResult, Database, Error, Ruma}; use crate::{utils, ConduitResult, Database, Error, Ruma};
use keys::{upload_signatures, upload_signing_keys};
use log::warn; use log::warn;
use rocket::{delete, get, options, post, put, State}; use rocket::{delete, get, options, post, put, State};
use ruma::{ use ruma::{
@ -13,6 +14,10 @@ use ruma::{
r0::{ r0::{
account::{get_username_availability, register}, account::{get_username_availability, register},
alias::{create_alias, delete_alias, get_alias}, alias::{create_alias, delete_alias, get_alias},
backup::{
add_backup_keys, create_backup, get_backup, get_backup_keys, get_latest_backup,
update_backup,
},
capabilities::get_capabilities, capabilities::get_capabilities,
config::{get_global_account_data, set_global_account_data}, config::{get_global_account_data, set_global_account_data},
context::get_context, context::get_context,
@ -33,7 +38,7 @@ use ruma::{
profile::{ profile::{
get_avatar_url, get_display_name, get_profile, set_avatar_url, set_display_name, get_avatar_url, get_display_name, get_profile, set_avatar_url, set_display_name,
}, },
push::{get_pushrules_all, set_pushrule, set_pushrule_enabled}, push::{get_pushers, get_pushrules_all, set_pushrule, set_pushrule_enabled},
read_marker::set_read_marker, read_marker::set_read_marker,
redact::redact_event, redact::redact_event,
room::{self, create_room}, room::{self, create_room},
@ -71,9 +76,13 @@ const SESSION_ID_LENGTH: usize = 256;
#[get("/_matrix/client/versions")] #[get("/_matrix/client/versions")]
pub fn get_supported_versions_route() -> ConduitResult<get_supported_versions::Response> { pub fn get_supported_versions_route() -> ConduitResult<get_supported_versions::Response> {
let mut unstable_features = BTreeMap::new();
unstable_features.insert("org.matrix.e2e_cross_signing".to_owned(), true);
Ok(get_supported_versions::Response { Ok(get_supported_versions::Response {
versions: vec!["r0.5.0".to_owned(), "r0.6.0".to_owned()], versions: vec!["r0.5.0".to_owned(), "r0.6.0".to_owned()],
unstable_features: BTreeMap::new(), unstable_features,
} }
.into()) .into())
} }
@ -349,11 +358,11 @@ pub fn get_pushrules_all_route(
#[put( #[put(
"/_matrix/client/r0/pushrules/<_scope>/<_kind>/<_rule_id>", "/_matrix/client/r0/pushrules/<_scope>/<_kind>/<_rule_id>",
data = "<body>" //data = "<body>"
)] )]
pub fn set_pushrule_route( pub fn set_pushrule_route(
db: State<'_, Database>, //db: State<'_, Database>,
body: Ruma<set_pushrule::Request>, //body: Ruma<set_pushrule::Request>,
_scope: String, _scope: String,
_kind: String, _kind: String,
_rule_id: String, _rule_id: String,
@ -694,8 +703,13 @@ pub fn upload_keys_route(
} }
if let Some(device_keys) = &body.device_keys { if let Some(device_keys) = &body.device_keys {
db.users // This check is needed to assure that signatures are kept
.add_device_keys(user_id, device_id, device_keys, &db.globals)?; if db.users.get_device_keys(user_id, device_id)?.is_none() {
db.users
.add_device_keys(user_id, device_id, device_keys, &db.globals)?;
} else {
println!("Key from {} was skipped: {:?}", user_id, device_keys);
}
} }
Ok(upload_keys::Response { Ok(upload_keys::Response {
@ -709,33 +723,38 @@ pub fn get_keys_route(
db: State<'_, Database>, db: State<'_, Database>,
body: Ruma<get_keys::Request>, body: Ruma<get_keys::Request>,
) -> ConduitResult<get_keys::Response> { ) -> ConduitResult<get_keys::Response> {
let sender_id = body.user_id.as_ref().expect("user is authenticated");
let mut master_keys = BTreeMap::new();
let mut self_signing_keys = BTreeMap::new();
let mut user_signing_keys = BTreeMap::new();
let mut device_keys = BTreeMap::new(); let mut device_keys = BTreeMap::new();
for (user_id, device_ids) in &body.device_keys { for (user_id, device_ids) in &body.device_keys {
if device_ids.is_empty() { if device_ids.is_empty() {
let mut container = BTreeMap::new(); let mut container = BTreeMap::new();
for result in db.users.all_device_keys(&user_id.clone()) { for device_id in db.users.all_device_ids(user_id) {
let (device_id, mut keys) = result?; let device_id = device_id?;
if let Some(mut keys) = db.users.get_device_keys(user_id, &device_id)? {
let metadata = db
.users
.get_device_metadata(user_id, &device_id)?
.ok_or_else(|| {
Error::bad_database("all_device_keys contained nonexistent device.")
})?;
let metadata = db keys.unsigned = Some(keys::UnsignedDeviceInfo {
.users device_display_name: metadata.display_name,
.get_device_metadata(user_id, &device_id)? });
.ok_or_else(|| {
Error::bad_database("all_device_keys contained nonexistent device.")
})?;
keys.unsigned = Some(keys::UnsignedDeviceInfo { container.insert(device_id.to_owned(), keys);
device_display_name: metadata.display_name, }
});
container.insert(device_id, keys);
} }
device_keys.insert(user_id.clone(), container); device_keys.insert(user_id.clone(), container);
} else { } else {
for device_id in device_ids { for device_id in device_ids {
let mut container = BTreeMap::new(); let mut container = BTreeMap::new();
for keys in db.users.get_device_keys(&user_id.clone(), &device_id) { if let Some(mut keys) = db.users.get_device_keys(&user_id.clone(), &device_id)? {
let mut keys = keys?;
let metadata = db.users.get_device_metadata(user_id, &device_id)?.ok_or( let metadata = db.users.get_device_metadata(user_id, &device_id)?.ok_or(
Error::BadRequest( Error::BadRequest(
ErrorKind::InvalidParam, ErrorKind::InvalidParam,
@ -752,11 +771,26 @@ pub fn get_keys_route(
device_keys.insert(user_id.clone(), container); device_keys.insert(user_id.clone(), container);
} }
} }
if let Some(master_key) = db.users.get_master_key(user_id, sender_id)? {
master_keys.insert(user_id.clone(), master_key);
}
if let Some(self_signing_key) = db.users.get_self_signing_key(user_id, sender_id)? {
self_signing_keys.insert(user_id.clone(), self_signing_key);
}
if user_id == sender_id {
if let Some(user_signing_key) = db.users.get_user_signing_key(sender_id)? {
user_signing_keys.insert(user_id.clone(), user_signing_key);
}
}
} }
Ok(get_keys::Response { Ok(get_keys::Response {
failures: BTreeMap::new(), master_keys,
self_signing_keys,
user_signing_keys,
device_keys, device_keys,
failures: BTreeMap::new(),
} }
.into()) .into())
} }
@ -789,6 +823,125 @@ pub fn claim_keys_route(
.into()) .into())
} }
#[post("/_matrix/client/unstable/room_keys/version", data = "<body>")]
pub fn create_backup_route(
db: State<'_, Database>,
body: Ruma<create_backup::Request>,
) -> ConduitResult<create_backup::Response> {
let user_id = body.user_id.as_ref().expect("user is authenticated");
let version = db
.key_backups
.create_backup(&user_id, &body.algorithm, &db.globals)?;
Ok(create_backup::Response { version }.into())
}
#[put(
"/_matrix/client/unstable/room_keys/version/<_version>",
data = "<body>"
)]
pub fn update_backup_route(
db: State<'_, Database>,
body: Ruma<update_backup::Request>,
_version: String,
) -> ConduitResult<update_backup::Response> {
let user_id = body.user_id.as_ref().expect("user is authenticated");
db.key_backups
.update_backup(&user_id, &body.version, &body.algorithm, &db.globals)?;
Ok(update_backup::Response.into())
}
#[get("/_matrix/client/unstable/room_keys/version", data = "<body>")]
pub fn get_latest_backup_route(
db: State<'_, Database>,
body: Ruma<get_latest_backup::Request>,
) -> ConduitResult<get_latest_backup::Response> {
let user_id = body.user_id.as_ref().expect("user is authenticated");
let (version, algorithm) =
db.key_backups
.get_latest_backup(&user_id)?
.ok_or(Error::BadRequest(
ErrorKind::NotFound,
"Key backup does not exist.",
))?;
Ok(get_latest_backup::Response {
algorithm,
count: (db.key_backups.count_keys(user_id, &version)? as u32).into(),
etag: db.key_backups.get_etag(user_id, &version)?,
version,
}
.into())
}
#[get(
"/_matrix/client/unstable/room_keys/version/<_version>",
data = "<body>"
)]
pub fn get_backup_route(
db: State<'_, Database>,
body: Ruma<get_backup::Request>,
_version: String,
) -> ConduitResult<get_backup::Response> {
let user_id = body.user_id.as_ref().expect("user is authenticated");
let algorithm =
db.key_backups
.get_backup(&user_id, &body.version)?
.ok_or(Error::BadRequest(
ErrorKind::NotFound,
"Key backup does not exist.",
))?;
Ok(get_backup::Response {
algorithm,
count: (db.key_backups.count_keys(user_id, &body.version)? as u32).into(),
etag: db.key_backups.get_etag(user_id, &body.version)?,
version: body.version.clone(),
}
.into())
}
#[put("/_matrix/client/unstable/room_keys/keys", data = "<body>")]
pub fn add_backup_keys_route(
db: State<'_, Database>,
body: Ruma<add_backup_keys::Request>,
) -> ConduitResult<add_backup_keys::Response> {
let user_id = body.user_id.as_ref().expect("user is authenticated");
for (room_id, room) in &body.rooms {
for (session_id, key_data) in &room.sessions {
db.key_backups.add_key(
&user_id,
&body.version,
&room_id,
&session_id,
&key_data,
&db.globals,
)?
}
}
Ok(add_backup_keys::Response {
count: (db.key_backups.count_keys(user_id, &body.version)? as u32).into(),
etag: db.key_backups.get_etag(user_id, &body.version)?,
}
.into())
}
#[get("/_matrix/client/unstable/room_keys/keys", data = "<body>")]
pub fn get_backup_keys_route(
db: State<'_, Database>,
body: Ruma<get_backup_keys::Request>,
) -> ConduitResult<get_backup_keys::Response> {
let user_id = body.user_id.as_ref().expect("user is authenticated");
let rooms = db.key_backups.get_all(&user_id, &body.version)?;
Ok(get_backup_keys::Response { rooms }.into())
}
#[post("/_matrix/client/r0/rooms/<_room_id>/read_markers", data = "<body>")] #[post("/_matrix/client/r0/rooms/<_room_id>/read_markers", data = "<body>")]
pub fn set_read_marker_route( pub fn set_read_marker_route(
db: State<'_, Database>, db: State<'_, Database>,
@ -2040,7 +2193,7 @@ pub fn sync_route(
let mut pdus = db let mut pdus = db
.rooms .rooms
.pdus_since(&room_id, since)? .pdus_since(&user_id, &room_id, since)?
.filter_map(|r| r.ok()) // Filter out buggy events .filter_map(|r| r.ok()) // Filter out buggy events
.collect::<Vec<_>>(); .collect::<Vec<_>>();
@ -2083,7 +2236,7 @@ pub fn sync_route(
for hero in db for hero in db
.rooms .rooms
.all_pdus(&room_id)? .all_pdus(&user_id, &room_id)?
.filter_map(|pdu| pdu.ok()) // Ignore all broken pdus .filter_map(|pdu| pdu.ok()) // Ignore all broken pdus
.filter(|pdu| pdu.kind == EventType::RoomMember) .filter(|pdu| pdu.kind == EventType::RoomMember)
.map(|pdu| { .map(|pdu| {
@ -2157,7 +2310,7 @@ pub fn sync_route(
if let Some(last_read) = db.rooms.edus.room_read_get(&room_id, &user_id)? { if let Some(last_read) = db.rooms.edus.room_read_get(&room_id, &user_id)? {
Some( Some(
(db.rooms (db.rooms
.pdus_since(&room_id, last_read)? .pdus_since(&user_id, &room_id, last_read)?
.filter_map(|pdu| pdu.ok()) // Filter out buggy events .filter_map(|pdu| pdu.ok()) // Filter out buggy events
.filter(|pdu| { .filter(|pdu| {
matches!( matches!(
@ -2271,7 +2424,7 @@ pub fn sync_route(
let mut left_rooms = BTreeMap::new(); let mut left_rooms = BTreeMap::new();
for room_id in db.rooms.rooms_left(&user_id) { for room_id in db.rooms.rooms_left(&user_id) {
let room_id = room_id?; let room_id = room_id?;
let pdus = db.rooms.pdus_since(&room_id, since)?; let pdus = db.rooms.pdus_since(&user_id, &room_id, since)?;
let room_events = pdus let room_events = pdus
.filter_map(|pdu| pdu.ok()) // Filter out buggy events .filter_map(|pdu| pdu.ok()) // Filter out buggy events
.map(|pdu| pdu.to_room_event()) .map(|pdu| pdu.to_room_event())
@ -2375,7 +2528,7 @@ pub fn sync_route(
device_lists: sync_events::DeviceLists { device_lists: sync_events::DeviceLists {
changed: if since != 0 { changed: if since != 0 {
db.users db.users
.device_keys_changed(since) .keys_changed(since)
.filter_map(|u| u.ok()) .filter_map(|u| u.ok())
.collect() // Filter out buggy events .collect() // Filter out buggy events
} else { } else {
@ -2426,7 +2579,7 @@ pub fn get_context_route(
let events_before = db let events_before = db
.rooms .rooms
.pdus_until(&body.room_id, base_token) .pdus_until(&user_id, &body.room_id, base_token)
.take( .take(
u32::try_from(body.limit).map_err(|_| { u32::try_from(body.limit).map_err(|_| {
Error::BadRequest(ErrorKind::InvalidParam, "Limit value is invalid.") Error::BadRequest(ErrorKind::InvalidParam, "Limit value is invalid.")
@ -2452,7 +2605,7 @@ pub fn get_context_route(
let events_after = db let events_after = db
.rooms .rooms
.pdus_after(&body.room_id, base_token) .pdus_after(&user_id, &body.room_id, base_token)
.take( .take(
u32::try_from(body.limit).map_err(|_| { u32::try_from(body.limit).map_err(|_| {
Error::BadRequest(ErrorKind::InvalidParam, "Limit value is invalid.") Error::BadRequest(ErrorKind::InvalidParam, "Limit value is invalid.")
@ -2516,7 +2669,7 @@ pub fn get_message_events_route(
get_message_events::Direction::Forward => { get_message_events::Direction::Forward => {
let events_after = db let events_after = db
.rooms .rooms
.pdus_after(&body.room_id, from) .pdus_after(&user_id, &body.room_id, from)
// Use limit or else 10 // Use limit or else 10
.take(body.limit.map_or(Ok::<_, Error>(10_usize), |l| { .take(body.limit.map_or(Ok::<_, Error>(10_usize), |l| {
Ok(u32::try_from(l).map_err(|_| { Ok(u32::try_from(l).map_err(|_| {
@ -2551,7 +2704,7 @@ pub fn get_message_events_route(
get_message_events::Direction::Backward => { get_message_events::Direction::Backward => {
let events_before = db let events_before = db
.rooms .rooms
.pdus_until(&body.room_id, from) .pdus_until(&user_id, &body.room_id, from)
// Use limit or else 10 // Use limit or else 10
.take(body.limit.map_or(Ok::<_, Error>(10_usize), |l| { .take(body.limit.map_or(Ok::<_, Error>(10_usize), |l| {
Ok(u32::try_from(l).map_err(|_| { Ok(u32::try_from(l).map_err(|_| {
@ -2871,9 +3024,126 @@ pub fn delete_devices_route(
Ok(delete_devices::Response.into()) Ok(delete_devices::Response.into())
} }
#[post("/_matrix/client/unstable/keys/device_signing/upload", data = "<body>")]
pub fn upload_signing_keys_route(
db: State<'_, Database>,
body: Ruma<upload_signing_keys::Request>,
) -> ConduitResult<upload_signing_keys::Response> {
let user_id = body.user_id.as_ref().expect("user is authenticated");
let device_id = body.device_id.as_ref().expect("user is authenticated");
// UIAA
let mut uiaainfo = UiaaInfo {
flows: vec![AuthFlow {
stages: vec!["m.login.password".to_owned()],
}],
completed: Vec::new(),
params: Default::default(),
session: None,
auth_error: None,
};
if let Some(auth) = &body.auth {
let (worked, uiaainfo) = db.uiaa.try_auth(
&user_id,
&device_id,
auth,
&uiaainfo,
&db.users,
&db.globals,
)?;
if !worked {
return Err(Error::Uiaa(uiaainfo));
}
// Success!
} else {
uiaainfo.session = Some(utils::random_string(SESSION_ID_LENGTH));
db.uiaa.create(&user_id, &device_id, &uiaainfo)?;
return Err(Error::Uiaa(uiaainfo));
}
if let Some(master_key) = &body.master_key {
db.users.add_cross_signing_keys(
user_id,
&master_key,
&body.self_signing_key,
&body.user_signing_key,
&db.globals,
)?;
}
Ok(upload_signing_keys::Response.into())
}
#[post("/_matrix/client/unstable/keys/signatures/upload", data = "<body>")]
pub fn upload_signatures_route(
db: State<'_, Database>,
body: Ruma<upload_signatures::Request>,
) -> ConduitResult<upload_signatures::Response> {
let sender_id = body.user_id.as_ref().expect("user is authenticated");
for (user_id, signed_keys) in &body.signed_keys {
for (key_id, signed_key) in signed_keys {
for signature in signed_key
.get("signatures")
.ok_or(Error::BadRequest(
ErrorKind::InvalidParam,
"Missing signatures field.",
))?
.get(sender_id.to_string())
.ok_or(Error::BadRequest(
ErrorKind::InvalidParam,
"Invalid user in signatures field.",
))?
.as_object()
.ok_or(Error::BadRequest(
ErrorKind::InvalidParam,
"Invalid signature.",
))?
.clone()
.into_iter()
{
// Signature validation?
let signature = (
signature.0,
signature
.1
.as_str()
.ok_or(Error::BadRequest(
ErrorKind::InvalidParam,
"Invalid signature value.",
))?
.to_owned(),
);
db.users
.sign_key(&user_id, &key_id, signature, &sender_id, &db.globals)?;
}
}
}
Ok(upload_signatures::Response.into())
}
#[get("/_matrix/client/r0/pushers")]
pub fn pushers_route() -> ConduitResult<get_pushers::Response> {
Ok(get_pushers::Response {
pushers: Vec::new(),
}
.into())
}
#[post("/_matrix/client/r0/pushers/set")]
pub fn set_pushers_route() -> ConduitResult<get_pushers::Response> {
Ok(get_pushers::Response {
pushers: Vec::new(),
}
.into())
}
#[options("/<_segments..>")] #[options("/<_segments..>")]
pub fn options_route( pub fn options_route(
_segments: rocket::http::uri::Segments<'_>, _segments: rocket::http::uri::Segments<'_>,
) -> ConduitResult<send_event_to_device::Response> { ) -> ConduitResult<send_event_to_device::Response> {
Ok(send_event_to_device::Response.into()) Ok(send_event_to_device::Response.into())
} }

View File

@ -1,6 +1,7 @@
pub(self) mod account_data; pub(self) mod account_data;
pub(self) mod global_edus; pub(self) mod global_edus;
pub(self) mod globals; pub(self) mod globals;
pub(self) mod key_backups;
pub(self) mod media; pub(self) mod media;
pub(self) mod rooms; pub(self) mod rooms;
pub(self) mod uiaa; pub(self) mod uiaa;
@ -21,6 +22,7 @@ pub struct Database {
pub account_data: account_data::AccountData, pub account_data: account_data::AccountData,
pub global_edus: global_edus::GlobalEdus, pub global_edus: global_edus::GlobalEdus,
pub media: media::Media, pub media: media::Media,
pub key_backups: key_backups::KeyBackups,
pub _db: sled::Db, pub _db: sled::Db,
} }
@ -73,8 +75,11 @@ impl Database {
userdeviceid_metadata: db.open_tree("userdeviceid_metadata")?, userdeviceid_metadata: db.open_tree("userdeviceid_metadata")?,
token_userdeviceid: db.open_tree("token_userdeviceid")?, token_userdeviceid: db.open_tree("token_userdeviceid")?,
onetimekeyid_onetimekeys: db.open_tree("onetimekeyid_onetimekeys")?, onetimekeyid_onetimekeys: db.open_tree("onetimekeyid_onetimekeys")?,
userdeviceid_devicekeys: db.open_tree("userdeviceid_devicekeys")?, keychangeid_userid: db.open_tree("devicekeychangeid_userid")?,
devicekeychangeid_userid: db.open_tree("devicekeychangeid_userid")?, keyid_key: db.open_tree("keyid_key")?,
userid_masterkeyid: db.open_tree("userid_masterkeyid")?,
userid_selfsigningkeyid: db.open_tree("userid_selfsigningkeyid")?,
userid_usersigningkeyid: db.open_tree("userid_usersigningkeyid")?,
todeviceid_events: db.open_tree("todeviceid_events")?, todeviceid_events: db.open_tree("todeviceid_events")?,
}, },
uiaa: uiaa::Uiaa { uiaa: uiaa::Uiaa {
@ -111,6 +116,11 @@ impl Database {
media: media::Media { media: media::Media {
mediaid_file: db.open_tree("mediaid_file")?, mediaid_file: db.open_tree("mediaid_file")?,
}, },
key_backups: key_backups::KeyBackups {
backupid_algorithm: db.open_tree("backupid_algorithm")?,
backupid_etag: db.open_tree("backupid_etag")?,
backupkeyid_backup: db.open_tree("backupkeyid_backupmetadata")?,
},
_db: db, _db: db,
}) })
} }

207
src/database/key_backups.rs Normal file
View File

@ -0,0 +1,207 @@
use crate::{utils, Error, Result};
use ruma::{
api::client::{
error::ErrorKind,
r0::backup::{get_backup_keys::Sessions, BackupAlgorithm, KeyData},
},
identifiers::{RoomId, UserId},
};
use std::{collections::BTreeMap, convert::TryFrom};
pub struct KeyBackups {
pub(super) backupid_algorithm: sled::Tree, // BackupId = UserId + Version(Count)
pub(super) backupid_etag: sled::Tree, // BackupId = UserId + Version(Count)
pub(super) backupkeyid_backup: sled::Tree, // BackupKeyId = UserId + Version + RoomId + SessionId
}
impl KeyBackups {
pub fn create_backup(
&self,
user_id: &UserId,
backup_metadata: &BackupAlgorithm,
globals: &super::globals::Globals,
) -> Result<String> {
let version = globals.next_count()?.to_string();
let mut key = user_id.to_string().as_bytes().to_vec();
key.push(0xff);
key.extend_from_slice(&version.as_bytes());
self.backupid_algorithm.insert(
&key,
&*serde_json::to_string(backup_metadata)
.expect("BackupAlgorithm::to_string always works"),
)?;
self.backupid_etag
.insert(&key, &globals.next_count()?.to_be_bytes())?;
Ok(version)
}
pub fn update_backup(
&self,
user_id: &UserId,
version: &str,
backup_metadata: &BackupAlgorithm,
globals: &super::globals::Globals,
) -> Result<String> {
let mut key = user_id.to_string().as_bytes().to_vec();
key.push(0xff);
key.extend_from_slice(&version.as_bytes());
if self.backupid_algorithm.get(&key)?.is_none() {
return Err(Error::BadRequest(
ErrorKind::NotFound,
"Tried to update nonexistent backup.",
));
}
self.backupid_algorithm.insert(
&key,
&*serde_json::to_string(backup_metadata)
.expect("BackupAlgorithm::to_string always works"),
)?;
self.backupid_etag
.insert(&key, &globals.next_count()?.to_be_bytes())?;
Ok(version.to_string())
}
pub fn get_latest_backup(&self, user_id: &UserId) -> Result<Option<(String, BackupAlgorithm)>> {
let mut prefix = user_id.to_string().as_bytes().to_vec();
prefix.push(0xff);
self.backupid_algorithm
.scan_prefix(&prefix)
.last()
.map_or(Ok(None), |r| {
let (key, value) = r?;
let version = utils::string_from_bytes(
key.rsplit(|&b| b == 0xff)
.next()
.expect("rsplit always returns an element"),
)
.map_err(|_| Error::bad_database("backupid_algorithm key is invalid."))?;
Ok(Some((
version,
serde_json::from_slice(&value).map_err(|_| {
Error::bad_database("Algorithm in backupid_algorithm is invalid.")
})?,
)))
})
}
pub fn get_backup(&self, user_id: &UserId, version: &str) -> Result<Option<BackupAlgorithm>> {
let mut key = user_id.to_string().as_bytes().to_vec();
key.push(0xff);
key.extend_from_slice(version.as_bytes());
self.backupid_algorithm.get(key)?.map_or(Ok(None), |bytes| {
Ok(serde_json::from_slice(&bytes)
.map_err(|_| Error::bad_database("Algorithm in backupid_algorithm is invalid."))?)
})
}
pub fn add_key(
&self,
user_id: &UserId,
version: &str,
room_id: &RoomId,
session_id: &str,
key_data: &KeyData,
globals: &super::globals::Globals,
) -> Result<()> {
let mut key = user_id.to_string().as_bytes().to_vec();
key.push(0xff);
key.extend_from_slice(version.as_bytes());
if self.backupid_algorithm.get(&key)?.is_none() {
return Err(Error::BadRequest(
ErrorKind::NotFound,
"Tried to update nonexistent backup.",
));
}
self.backupid_etag
.insert(&key, &globals.next_count()?.to_be_bytes())?;
key.push(0xff);
key.extend_from_slice(room_id.to_string().as_bytes());
key.push(0xff);
key.extend_from_slice(session_id.as_bytes());
self.backupkeyid_backup.insert(
&key,
&*serde_json::to_string(&key_data).expect("KeyData::to_string always works"),
)?;
Ok(())
}
pub fn count_keys(&self, user_id: &UserId, version: &str) -> Result<usize> {
let mut prefix = user_id.to_string().as_bytes().to_vec();
prefix.push(0xff);
prefix.extend_from_slice(version.as_bytes());
Ok(self.backupkeyid_backup.scan_prefix(&prefix).count())
}
pub fn get_etag(&self, user_id: &UserId, version: &str) -> Result<String> {
let mut key = user_id.to_string().as_bytes().to_vec();
key.push(0xff);
key.extend_from_slice(&version.as_bytes());
Ok(utils::u64_from_bytes(
&self
.backupid_etag
.get(&key)?
.ok_or_else(|| Error::bad_database("Backup has no etag."))?,
)
.map_err(|_| Error::bad_database("etag in backupid_etag invalid."))?
.to_string())
}
pub fn get_all(&self, user_id: &UserId, version: &str) -> Result<BTreeMap<RoomId, Sessions>> {
let mut prefix = user_id.to_string().as_bytes().to_vec();
prefix.push(0xff);
prefix.extend_from_slice(version.as_bytes());
let mut rooms = BTreeMap::<RoomId, Sessions>::new();
for result in self.backupkeyid_backup.scan_prefix(&prefix).map(|r| {
let (key, value) = r?;
let mut parts = key.rsplit(|&b| b == 0xff);
let session_id = utils::string_from_bytes(
&parts
.next()
.ok_or_else(|| Error::bad_database("backupkeyid_backup key is invalid."))?,
)
.map_err(|_| Error::bad_database("backupkeyid_backup session_id is invalid."))?;
let room_id = RoomId::try_from(
utils::string_from_bytes(
&parts
.next()
.ok_or_else(|| Error::bad_database("backupkeyid_backup key is invalid."))?,
)
.map_err(|_| Error::bad_database("backupkeyid_backup room_id is invalid."))?,
)
.map_err(|_| Error::bad_database("backupkeyid_backup room_id is invalid room id."))?;
let key_data = serde_json::from_slice(&value)
.map_err(|_| Error::bad_database("KeyData in backupkeyid_backup is invalid."))?;
Ok::<_, Error>((room_id, session_id, key_data))
}) {
let (room_id, session_id, key_data) = result?;
rooms
.entry(room_id)
.or_insert_with(|| Sessions {
sessions: BTreeMap::new(),
})
.sessions
.insert(session_id, key_data);
}
Ok(rooms)
}
}

View File

@ -602,13 +602,18 @@ impl Rooms {
} }
/// Returns an iterator over all PDUs in a room. /// Returns an iterator over all PDUs in a room.
pub fn all_pdus(&self, room_id: &RoomId) -> Result<impl Iterator<Item = Result<PduEvent>>> { pub fn all_pdus(
self.pdus_since(room_id, 0) &self,
user_id: &UserId,
room_id: &RoomId,
) -> Result<impl Iterator<Item = Result<PduEvent>>> {
self.pdus_since(user_id, room_id, 0)
} }
/// Returns an iterator over all events in a room that happened after the event with id `since`. /// Returns an iterator over all events in a room that happened after the event with id `since`.
pub fn pdus_since( pub fn pdus_since(
&self, &self,
user_id: &UserId,
room_id: &RoomId, room_id: &RoomId,
since: u64, since: u64,
) -> Result<impl Iterator<Item = Result<PduEvent>>> { ) -> Result<impl Iterator<Item = Result<PduEvent>>> {
@ -617,12 +622,13 @@ impl Rooms {
pdu_id.push(0xff); pdu_id.push(0xff);
pdu_id.extend_from_slice(&(since).to_be_bytes()); pdu_id.extend_from_slice(&(since).to_be_bytes());
self.pdus_since_pduid(room_id, &pdu_id) self.pdus_since_pduid(user_id, room_id, &pdu_id)
} }
/// Returns an iterator over all events in a room that happened after the event with id `since`. /// Returns an iterator over all events in a room that happened after the event with id `since`.
pub fn pdus_since_pduid( pub fn pdus_since_pduid(
&self, &self,
user_id: &UserId,
room_id: &RoomId, room_id: &RoomId,
pdu_id: &[u8], pdu_id: &[u8],
) -> Result<impl Iterator<Item = Result<PduEvent>>> { ) -> Result<impl Iterator<Item = Result<PduEvent>>> {
@ -630,6 +636,7 @@ impl Rooms {
let mut prefix = room_id.to_string().as_bytes().to_vec(); let mut prefix = room_id.to_string().as_bytes().to_vec();
prefix.push(0xff); prefix.push(0xff);
let user_id = user_id.clone();
Ok(self Ok(self
.pduid_pdu .pduid_pdu
.range(pdu_id..) .range(pdu_id..)
@ -641,9 +648,13 @@ impl Rooms {
}) })
.filter_map(|r| r.ok()) .filter_map(|r| r.ok())
.take_while(move |(k, _)| k.starts_with(&prefix)) .take_while(move |(k, _)| k.starts_with(&prefix))
.map(|(_, v)| { .map(move |(_, v)| {
Ok(serde_json::from_slice(&v) let mut pdu = serde_json::from_slice::<PduEvent>(&v)
.map_err(|_| Error::bad_database("PDU in db is invalid."))?) .map_err(|_| Error::bad_database("PDU in db is invalid."))?;
if pdu.sender != user_id {
pdu.unsigned.remove("transaction_id");
}
Ok(pdu)
})) }))
} }
@ -651,6 +662,7 @@ impl Rooms {
/// `until` in reverse-chronological order. /// `until` in reverse-chronological order.
pub fn pdus_until( pub fn pdus_until(
&self, &self,
user_id: &UserId,
room_id: &RoomId, room_id: &RoomId,
until: u64, until: u64,
) -> impl Iterator<Item = Result<PduEvent>> { ) -> impl Iterator<Item = Result<PduEvent>> {
@ -663,14 +675,19 @@ impl Rooms {
let current: &[u8] = &current; let current: &[u8] = &current;
let user_id = user_id.clone();
self.pduid_pdu self.pduid_pdu
.range(..current) .range(..current)
.rev() .rev()
.filter_map(|r| r.ok()) .filter_map(|r| r.ok())
.take_while(move |(k, _)| k.starts_with(&prefix)) .take_while(move |(k, _)| k.starts_with(&prefix))
.map(|(_, v)| { .map(move |(_, v)| {
Ok(serde_json::from_slice(&v) let mut pdu = serde_json::from_slice::<PduEvent>(&v)
.map_err(|_| Error::bad_database("PDU in db is invalid."))?) .map_err(|_| Error::bad_database("PDU in db is invalid."))?;
if pdu.sender != user_id {
pdu.unsigned.remove("transaction_id");
}
Ok(pdu)
}) })
} }
@ -678,6 +695,7 @@ impl Rooms {
/// `from` in chronological order. /// `from` in chronological order.
pub fn pdus_after( pub fn pdus_after(
&self, &self,
user_id: &UserId,
room_id: &RoomId, room_id: &RoomId,
from: u64, from: u64,
) -> impl Iterator<Item = Result<PduEvent>> { ) -> impl Iterator<Item = Result<PduEvent>> {
@ -690,13 +708,18 @@ impl Rooms {
let current: &[u8] = &current; let current: &[u8] = &current;
let user_id = user_id.clone();
self.pduid_pdu self.pduid_pdu
.range(current..) .range(current..)
.filter_map(|r| r.ok()) .filter_map(|r| r.ok())
.take_while(move |(k, _)| k.starts_with(&prefix)) .take_while(move |(k, _)| k.starts_with(&prefix))
.map(|(_, v)| { .map(move |(_, v)| {
Ok(serde_json::from_slice(&v) let mut pdu = serde_json::from_slice::<PduEvent>(&v)
.map_err(|_| Error::bad_database("PDU in db is invalid."))?) .map_err(|_| Error::bad_database("PDU in db is invalid."))?;
if pdu.sender != user_id {
pdu.unsigned.remove("transaction_id");
}
Ok(pdu)
}) })
} }

View File

@ -1,9 +1,12 @@
use crate::{utils, Error, Result}; use crate::{utils, Error, Result};
use js_int::UInt; use js_int::UInt;
use ruma::{ use ruma::{
api::client::r0::{ api::client::{
device::Device, error::ErrorKind,
keys::{AlgorithmAndDeviceId, DeviceKeys, KeyAlgorithm, OneTimeKey}, r0::{
device::Device,
keys::{AlgorithmAndDeviceId, CrossSigningKey, DeviceKeys, KeyAlgorithm, OneTimeKey},
},
}, },
events::{to_device::AnyToDeviceEvent, EventJson, EventType}, events::{to_device::AnyToDeviceEvent, EventJson, EventType},
identifiers::UserId, identifiers::UserId,
@ -19,8 +22,11 @@ pub struct Users {
pub(super) token_userdeviceid: sled::Tree, pub(super) token_userdeviceid: sled::Tree,
pub(super) onetimekeyid_onetimekeys: sled::Tree, // OneTimeKeyId = UserId + AlgorithmAndDeviceId pub(super) onetimekeyid_onetimekeys: sled::Tree, // OneTimeKeyId = UserId + AlgorithmAndDeviceId
pub(super) userdeviceid_devicekeys: sled::Tree, pub(super) keychangeid_userid: sled::Tree, // KeyChangeId = Count
pub(super) devicekeychangeid_userid: sled::Tree, // DeviceKeyChangeId = Count pub(super) keyid_key: sled::Tree, // KeyId = UserId + KeyId (depends on key type)
pub(super) userid_masterkeyid: sled::Tree,
pub(super) userid_selfsigningkeyid: sled::Tree,
pub(super) userid_usersigningkeyid: sled::Tree,
pub(super) todeviceid_events: sled::Tree, // ToDeviceId = UserId + DeviceId + Count pub(super) todeviceid_events: sled::Tree, // ToDeviceId = UserId + DeviceId + Count
} }
@ -171,9 +177,6 @@ impl Users {
userdeviceid.push(0xff); userdeviceid.push(0xff);
userdeviceid.extend_from_slice(device_id.as_bytes()); userdeviceid.extend_from_slice(device_id.as_bytes());
// Remove device keys
self.userdeviceid_devicekeys.remove(&userdeviceid)?;
// Remove tokens // Remove tokens
if let Some(old_token) = self.userdeviceid_token.remove(&userdeviceid)? { if let Some(old_token) = self.userdeviceid_token.remove(&userdeviceid)? {
self.token_userdeviceid.remove(&old_token)?; self.token_userdeviceid.remove(&old_token)?;
@ -350,38 +353,168 @@ impl Users {
userdeviceid.push(0xff); userdeviceid.push(0xff);
userdeviceid.extend_from_slice(device_id.as_bytes()); userdeviceid.extend_from_slice(device_id.as_bytes());
self.userdeviceid_devicekeys.insert( self.keyid_key.insert(
&userdeviceid, &userdeviceid,
&*serde_json::to_string(&device_keys).expect("DeviceKeys::to_string always works"), &*serde_json::to_string(&device_keys).expect("DeviceKeys::to_string always works"),
)?; )?;
self.devicekeychangeid_userid self.keychangeid_userid
.insert(globals.next_count()?.to_be_bytes(), &*user_id.to_string())?; .insert(globals.next_count()?.to_be_bytes(), &*user_id.to_string())?;
Ok(()) Ok(())
} }
pub fn get_device_keys( pub fn add_cross_signing_keys(
&self, &self,
user_id: &UserId, user_id: &UserId,
device_id: &str, master_key: &CrossSigningKey,
) -> impl Iterator<Item = Result<DeviceKeys>> { self_signing_key: &Option<CrossSigningKey>,
let mut key = user_id.to_string().as_bytes().to_vec(); user_signing_key: &Option<CrossSigningKey>,
key.push(0xff); globals: &super::globals::Globals,
key.extend_from_slice(device_id.as_bytes()); ) -> Result<()> {
// TODO: Check signatures
self.userdeviceid_devicekeys let mut prefix = user_id.to_string().as_bytes().to_vec();
.scan_prefix(key) prefix.push(0xff);
.values()
.map(|bytes| { // Master key
Ok(serde_json::from_slice(&bytes?) let mut master_key_ids = master_key.keys.values();
.map_err(|_| Error::bad_database("DeviceKeys in db are invalid."))?) let master_key_id = master_key_ids.next().ok_or(Error::BadRequest(
}) ErrorKind::InvalidParam,
"Master key contained no key.",
))?;
if master_key_ids.next().is_some() {
return Err(Error::BadRequest(
ErrorKind::InvalidParam,
"Master key contained more than one key.",
));
}
let mut master_key_key = prefix.clone();
master_key_key.extend_from_slice(master_key_id.as_bytes());
self.keyid_key.insert(
&master_key_key,
&*serde_json::to_string(&master_key).expect("CrossSigningKey::to_string always works"),
)?;
self.userid_masterkeyid
.insert(&*user_id.to_string(), master_key_key)?;
// Self-signing key
if let Some(self_signing_key) = self_signing_key {
let mut self_signing_key_ids = self_signing_key.keys.values();
let self_signing_key_id = self_signing_key_ids.next().ok_or(Error::BadRequest(
ErrorKind::InvalidParam,
"Self signing key contained no key.",
))?;
if self_signing_key_ids.next().is_some() {
return Err(Error::BadRequest(
ErrorKind::InvalidParam,
"Self signing key contained more than one key.",
));
}
let mut self_signing_key_key = prefix.clone();
self_signing_key_key.extend_from_slice(self_signing_key_id.as_bytes());
self.keyid_key.insert(
&self_signing_key_key,
&*serde_json::to_string(&self_signing_key)
.expect("CrossSigningKey::to_string always works"),
)?;
self.userid_selfsigningkeyid
.insert(&*user_id.to_string(), self_signing_key_key)?;
}
// User-signing key
if let Some(user_signing_key) = user_signing_key {
let mut user_signing_key_ids = user_signing_key.keys.values();
let user_signing_key_id = user_signing_key_ids.next().ok_or(Error::BadRequest(
ErrorKind::InvalidParam,
"User signing key contained no key.",
))?;
if user_signing_key_ids.next().is_some() {
return Err(Error::BadRequest(
ErrorKind::InvalidParam,
"User signing key contained more than one key.",
));
}
let mut user_signing_key_key = prefix.clone();
user_signing_key_key.extend_from_slice(user_signing_key_id.as_bytes());
self.keyid_key.insert(
&user_signing_key_key,
&*serde_json::to_string(&user_signing_key)
.expect("CrossSigningKey::to_string always works"),
)?;
self.userid_usersigningkeyid
.insert(&*user_id.to_string(), user_signing_key_key)?;
}
self.keychangeid_userid
.insert(globals.next_count()?.to_be_bytes(), &*user_id.to_string())?;
Ok(())
} }
pub fn device_keys_changed(&self, since: u64) -> impl Iterator<Item = Result<UserId>> { pub fn sign_key(
self.devicekeychangeid_userid &self,
.range(since.to_be_bytes()..) target_id: &UserId,
key_id: &str,
signature: (String, String),
sender_id: &UserId,
globals: &super::globals::Globals,
) -> Result<()> {
println!(
"Adding signatures on {}'s {} by {}: {}->{}",
target_id, key_id, sender_id, signature.0, signature.1
);
let mut key = target_id.to_string().as_bytes().to_vec();
key.push(0xff);
key.extend_from_slice(key_id.to_string().as_bytes());
let mut cross_signing_key =
serde_json::from_slice::<serde_json::Value>(&self.keyid_key.get(&key)?.ok_or(
Error::BadRequest(ErrorKind::InvalidParam, "Tried to sign nonexistent key."),
)?)
.map_err(|_| Error::bad_database("key in keyid_key is invalid."))?;
let signatures = cross_signing_key
.get_mut("signatures")
.ok_or_else(|| Error::bad_database("key in keyid_key has no signatures field."))?
.as_object_mut()
.ok_or_else(|| Error::bad_database("key in keyid_key has invalid signatures field."))?
.entry(sender_id.clone())
.or_insert_with(|| serde_json::Map::new().into());
signatures
.as_object_mut()
.ok_or_else(|| Error::bad_database("signatures in keyid_key for a user is invalid."))?
.insert(signature.0, signature.1.into());
self.keyid_key.insert(
&key,
&*serde_json::to_string(&cross_signing_key)
.expect("CrossSigningKey::to_string always works"),
)?;
self.keychangeid_userid
.insert(globals.next_count()?.to_be_bytes(), &*target_id.to_string())?;
Ok(())
}
pub fn keys_changed(&self, since: u64) -> impl Iterator<Item = Result<UserId>> {
self.keychangeid_userid
.range((since + 1).to_be_bytes()..)
.values() .values()
.map(|bytes| { .map(|bytes| {
Ok( Ok(
@ -397,29 +530,85 @@ impl Users {
}) })
} }
pub fn all_device_keys( pub fn get_device_keys(&self, user_id: &UserId, device_id: &str) -> Result<Option<DeviceKeys>> {
&self,
user_id: &UserId,
) -> impl Iterator<Item = Result<(String, DeviceKeys)>> {
let mut key = user_id.to_string().as_bytes().to_vec(); let mut key = user_id.to_string().as_bytes().to_vec();
key.push(0xff); key.push(0xff);
key.extend_from_slice(device_id.as_bytes());
self.userdeviceid_devicekeys.scan_prefix(key).map(|r| { self.keyid_key.get(key)?.map_or(Ok(None), |bytes| {
let (key, value) = r?; Ok(Some(serde_json::from_slice(&bytes).map_err(|_| {
let userdeviceid = utils::string_from_bytes( Error::bad_database("DeviceKeys in db are invalid.")
key.rsplit(|&b| b == 0xff) })?))
.next()
.ok_or_else(|| Error::bad_database("UserDeviceID in db is invalid."))?,
)
.map_err(|_| Error::bad_database("UserDeviceId in db is invalid."))?;
Ok((
userdeviceid,
serde_json::from_slice(&*value)
.map_err(|_| Error::bad_database("DeviceKeys in db are invalid."))?,
))
}) })
} }
pub fn get_master_key(
&self,
user_id: &UserId,
sender_id: &UserId,
) -> Result<Option<CrossSigningKey>> {
// TODO: hide some signatures
self.userid_masterkeyid
.get(user_id.to_string())?
.map_or(Ok(None), |key| {
self.keyid_key.get(key)?.map_or(Ok(None), |bytes| {
let mut cross_signing_key = serde_json::from_slice::<CrossSigningKey>(&bytes)
.map_err(|_| {
Error::bad_database("CrossSigningKey in db is invalid.")
})?;
// A user is not allowed to see signatures from users other than himself and
// the target user
cross_signing_key.signatures = cross_signing_key
.signatures
.into_iter()
.filter(|(user, _)| user == user_id || user == sender_id)
.collect();
Ok(Some(cross_signing_key))
})
})
}
pub fn get_self_signing_key(
&self,
user_id: &UserId,
sender_id: &UserId,
) -> Result<Option<CrossSigningKey>> {
self.userid_selfsigningkeyid
.get(user_id.to_string())?
.map_or(Ok(None), |key| {
self.keyid_key.get(key)?.map_or(Ok(None), |bytes| {
let mut cross_signing_key = serde_json::from_slice::<CrossSigningKey>(&bytes)
.map_err(|_| {
Error::bad_database("CrossSigningKey in db is invalid.")
})?;
// A user is not allowed to see signatures from users other than himself and
// the target user
cross_signing_key.signatures = cross_signing_key
.signatures
.into_iter()
.filter(|(user, _)| user == user_id || user == sender_id)
.collect();
Ok(Some(cross_signing_key))
})
})
}
pub fn get_user_signing_key(&self, user_id: &UserId) -> Result<Option<CrossSigningKey>> {
self.userid_usersigningkeyid
.get(user_id.to_string())?
.map_or(Ok(None), |key| {
self.keyid_key.get(key)?.map_or(Ok(None), |bytes| {
Ok(Some(serde_json::from_slice(&bytes).map_err(|_| {
Error::bad_database("CrossSigningKey in db is invalid.")
})?))
})
})
}
pub fn add_to_device_event( pub fn add_to_device_event(
&self, &self,
sender: &UserId, sender: &UserId,

View File

@ -46,6 +46,12 @@ fn setup_rocket() -> rocket::Rocket {
client_server::upload_keys_route, client_server::upload_keys_route,
client_server::get_keys_route, client_server::get_keys_route,
client_server::claim_keys_route, client_server::claim_keys_route,
client_server::create_backup_route,
client_server::update_backup_route,
client_server::get_latest_backup_route,
client_server::get_backup_route,
client_server::add_backup_keys_route,
client_server::get_backup_keys_route,
client_server::set_read_marker_route, client_server::set_read_marker_route,
client_server::create_typing_event_route, client_server::create_typing_event_route,
client_server::create_room_route, client_server::create_room_route,
@ -90,6 +96,10 @@ fn setup_rocket() -> rocket::Rocket {
client_server::delete_device_route, client_server::delete_device_route,
client_server::delete_devices_route, client_server::delete_devices_route,
client_server::options_route, client_server::options_route,
client_server::upload_signing_keys_route,
client_server::upload_signatures_route,
client_server::pushers_route,
client_server::set_pushers_route,
//server_server::well_known_server, //server_server::well_known_server,
//server_server::get_server_version, //server_server::get_server_version,
//server_server::get_server_keys, //server_server::get_server_keys,

View File

@ -1,7 +1,5 @@
use ruma::{ use ruma::{
events::push_rules::{ events::push_rules::{ConditionalPushRule, PatternedPushRule, PushCondition, Ruleset},
ConditionalPushRule, PatternedPushRule, PushCondition, PushRule, Ruleset,
},
identifiers::UserId, identifiers::UserId,
push::{Action, Tweak}, push::{Action, Tweak},
}; };