chore: update dependencies

next
Timo Kösters 2021-04-23 18:54:17 +02:00
parent bb234ca002
commit 23f81bfaf7
No known key found for this signature in database
GPG Key ID: 24DA7517711A2BA4
4 changed files with 215 additions and 2988 deletions

2752
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -14,7 +14,7 @@ edition = "2018"
[dependencies] [dependencies]
# Used to handle requests # Used to handle requests
# TODO: This can become optional as soon as proper configs are supported # TODO: This can become optional as soon as proper configs are supported
rocket = { git = "https://github.com/SergioBenitez/Rocket.git", rev = "93e62c86eddf7cc9a7fc40b044182f83f0d7d92a", features = ["tls"] } # Used to handle requests rocket = { git = "https://github.com/SergioBenitez/Rocket.git", rev = "e1307ddf48dac14e6a37e526098732327bcb86f0", features = ["tls"] } # Used to handle requests
#rocket = { git = "https://github.com/timokoesters/Rocket.git", branch = "empty_parameters", default-features = false, features = ["tls"] } #rocket = { git = "https://github.com/timokoesters/Rocket.git", branch = "empty_parameters", default-features = false, features = ["tls"] }
# Used for matrix spec type definitions and helpers # Used for matrix spec type definitions and helpers

View File

@ -21,7 +21,6 @@ pub use ruma_wrapper::{ConduitResult, Ruma, RumaResponse};
use rocket::{ use rocket::{
catch, catchers, catch, catchers,
fairing::AdHoc,
figment::{ figment::{
providers::{Env, Format, Toml}, providers::{Env, Format, Toml},
Figment, Figment,
@ -31,9 +30,9 @@ use rocket::{
use tracing::span; use tracing::span;
use tracing_subscriber::{prelude::*, Registry}; use tracing_subscriber::{prelude::*, Registry};
fn setup_rocket() -> (rocket::Rocket, Config) { async fn setup_rocket() -> (rocket::Rocket<rocket::Build>, Config) {
// Force log level off, so we can use our own logger // Force log level off, so we can use our own logger
std::env::set_var("CONDUIT_LOG_LEVEL", "off"); //std::env::set_var("CONDUIT_LOG_LEVEL", "off");
let config = let config =
Figment::from(rocket::Config::release_default()) Figment::from(rocket::Config::release_default())
@ -48,9 +47,15 @@ fn setup_rocket() -> (rocket::Rocket, Config) {
let parsed_config = config let parsed_config = config
.extract::<Config>() .extract::<Config>()
.expect("It looks like your config is invalid. Please take a look at the error"); .expect("It looks like your config is invalid. Please take a look at the error");
let parsed_config2 = parsed_config.clone();
let data = Database::load_or_create(parsed_config.clone())
.await
.expect("config is valid");
data.sending.start_handler(&data);
let rocket = rocket::custom(config) let rocket = rocket::custom(config)
.manage(data)
.mount( .mount(
"/", "/",
routes![ routes![
@ -176,29 +181,23 @@ fn setup_rocket() -> (rocket::Rocket, Config) {
server_server::get_profile_information_route, server_server::get_profile_information_route,
], ],
) )
.register(catchers![ .register(
not_found_catcher, "/",
forbidden_catcher, catchers![
unknown_token_catcher, not_found_catcher,
missing_token_catcher, forbidden_catcher,
bad_json_catcher unknown_token_catcher,
]) missing_token_catcher,
.attach(AdHoc::on_attach("Config", |rocket| async { bad_json_catcher
let data = Database::load_or_create(parsed_config2) ],
.await );
.expect("config is valid");
data.sending.start_handler(&data);
Ok(rocket.manage(data))
}));
(rocket, parsed_config) (rocket, parsed_config)
} }
#[rocket::main] #[rocket::main]
async fn main() { async fn main() {
let (rocket, config) = setup_rocket(); let (rocket, config) = setup_rocket().await;
if config.allow_jaeger { if config.allow_jaeger {
let (tracer, _uninstall) = opentelemetry_jaeger::new_pipeline() let (tracer, _uninstall) = opentelemetry_jaeger::new_pipeline()
@ -213,11 +212,11 @@ async fn main() {
rocket.launch().await.unwrap(); rocket.launch().await.unwrap();
} else { } else {
std::env::set_var("CONDUIT_LOG", config.log); //std::env::set_var("CONDUIT_LOG", config.log);
pretty_env_logger::init_custom_env("CONDUIT_LOG"); //pretty_env_logger::init_custom_env("CONDUIT_LOG");
let root = span!(tracing::Level::INFO, "app_start", work_units = 2); //let root = span!(tracing::Level::INFO, "app_start", work_units = 2);
let _enter = root.enter(); //let _enter = root.enter();
rocket.launch().await.unwrap(); rocket.launch().await.unwrap();
} }

View File

@ -11,10 +11,7 @@ use {
crate::{server_server, utils}, crate::{server_server, utils},
log::{debug, warn}, log::{debug, warn},
rocket::{ rocket::{
data::{ data::{self, ByteUnit, Data, FromData},
ByteUnit, Data, FromDataFuture, FromTransformedData, Transform, TransformFuture,
Transformed,
},
http::Status, http::Status,
outcome::Outcome::*, outcome::Outcome::*,
response::{self, Responder}, response::{self, Responder},
@ -42,106 +39,92 @@ pub struct Ruma<T: Outgoing> {
} }
#[cfg(feature = "conduit_bin")] #[cfg(feature = "conduit_bin")]
impl<'a, T: Outgoing> FromTransformedData<'a> for Ruma<T> #[rocket::async_trait]
impl<'a, T: Outgoing> FromData<'a> for Ruma<T>
where where
T::Incoming: IncomingRequest, T::Incoming: IncomingRequest,
{ {
type Error = (); type Error = ();
type Owned = Data;
type Borrowed = Self::Owned;
fn transform<'r>( async fn from_data(request: &'a Request<'_>, data: Data) -> data::Outcome<Self, Self::Error> {
_req: &'r Request<'_>,
data: Data,
) -> TransformFuture<'r, Self::Owned, Self::Error> {
Box::pin(async move { Transform::Owned(Success(data)) })
}
fn from_data(
request: &'a Request<'_>,
outcome: Transformed<'a, Self>,
) -> FromDataFuture<'a, Self, Self::Error> {
let metadata = T::Incoming::METADATA; let metadata = T::Incoming::METADATA;
let db = request
.guard::<State<'_, crate::Database>>()
.await
.expect("database was loaded");
Box::pin(async move { // Get token from header or query value
let data = rocket::try_outcome!(outcome.owned()); let token = request
let db = request .headers()
.guard::<State<'_, crate::Database>>() .get_one("Authorization")
.await .map(|s| s[7..].to_owned()) // Split off "Bearer "
.expect("database was loaded"); .or_else(|| request.query_value("access_token").and_then(|r| r.ok()));
// Get token from header or query value let limit = db.globals.max_request_size();
let token = request let mut handle = data.open(ByteUnit::Byte(limit.into()));
.headers() let mut body = Vec::new();
.get_one("Authorization") handle.read_to_end(&mut body).await.unwrap();
.map(|s| s[7..].to_owned()) // Split off "Bearer "
.or_else(|| request.get_query_value("access_token").and_then(|r| r.ok()));
let limit = db.globals.max_request_size(); let (sender_user, sender_device, from_appservice) = if let Some((_id, registration)) = db
let mut handle = data.open(ByteUnit::Byte(limit.into())); .appservice
let mut body = Vec::new(); .iter_all()
handle.read_to_end(&mut body).await.unwrap(); .filter_map(|r| r.ok())
.find(|(_id, registration)| {
let (sender_user, sender_device, from_appservice) = if let Some((_id, registration)) = registration
db.appservice .get("as_token")
.iter_all() .and_then(|as_token| as_token.as_str())
.filter_map(|r| r.ok()) .map_or(false, |as_token| token.as_deref() == Some(as_token))
.find(|(_id, registration)| { }) {
registration match metadata.authentication {
.get("as_token") AuthScheme::AccessToken | AuthScheme::QueryOnlyAccessToken => {
.and_then(|as_token| as_token.as_str()) let user_id = request.query_value::<String>("user_id").map_or_else(
.map_or(false, |as_token| token.as_deref() == Some(as_token)) || {
}) { UserId::parse_with_server_name(
match metadata.authentication { registration
AuthScheme::AccessToken | AuthScheme::QueryOnlyAccessToken => { .get("sender_localpart")
let user_id = request.get_query_value::<String>("user_id").map_or_else(
|| {
UserId::parse_with_server_name(
registration
.get("sender_localpart")
.unwrap()
.as_str()
.unwrap(),
db.globals.server_name(),
)
.unwrap()
},
|string| {
UserId::try_from(string.expect("parsing to string always works"))
.unwrap() .unwrap()
}, .as_str()
); .unwrap(),
db.globals.server_name(),
)
.unwrap()
},
|string| {
UserId::try_from(string.expect("parsing to string always works"))
.unwrap()
},
);
if !db.users.exists(&user_id).unwrap() { if !db.users.exists(&user_id).unwrap() {
// Forbidden // Forbidden
return Failure((Status::raw(580), ())); return Failure((Status::raw(580), ()));
}
// TODO: Check if appservice is allowed to be that user
(Some(user_id), None, true)
} }
AuthScheme::ServerSignatures => (None, None, true),
AuthScheme::None => (None, None, true), // TODO: Check if appservice is allowed to be that user
(Some(user_id), None, true)
} }
} else { AuthScheme::ServerSignatures => (None, None, true),
match metadata.authentication { AuthScheme::None => (None, None, true),
AuthScheme::AccessToken | AuthScheme::QueryOnlyAccessToken => { }
if let Some(token) = token { } else {
match db.users.find_from_token(&token).unwrap() { match metadata.authentication {
// Unknown Token AuthScheme::AccessToken | AuthScheme::QueryOnlyAccessToken => {
None => return Failure((Status::raw(581), ())), if let Some(token) = token {
Some((user_id, device_id)) => { match db.users.find_from_token(&token).unwrap() {
(Some(user_id), Some(device_id.into()), false) // Unknown Token
} None => return Failure((Status::raw(581), ())),
Some((user_id, device_id)) => {
(Some(user_id), Some(device_id.into()), false)
} }
} else {
// Missing Token
return Failure((Status::raw(582), ()));
} }
} else {
// Missing Token
return Failure((Status::raw(582), ()));
} }
AuthScheme::ServerSignatures => { }
// Get origin from header AuthScheme::ServerSignatures => {
let x_matrix = match request // Get origin from header
let x_matrix = match request
.headers() .headers()
.get_one("Authorization") .get_one("Authorization")
.map(|s| { .map(|s| {
@ -158,153 +141,150 @@ where
} }
}; };
let origin_str = match x_matrix.get(&Some("origin")) { let origin_str = match x_matrix.get(&Some("origin")) {
Some(Some(o)) => *o, Some(Some(o)) => *o,
_ => { _ => {
warn!("Invalid X-Matrix header origin field: {:?}", x_matrix); warn!("Invalid X-Matrix header origin field: {:?}", x_matrix);
// Forbidden // Forbidden
return Failure((Status::raw(580), ())); return Failure((Status::raw(580), ()));
} }
}; };
let origin = match Box::<ServerName>::try_from(origin_str) { let origin = match Box::<ServerName>::try_from(origin_str) {
Ok(s) => s, Ok(s) => s,
_ => { _ => {
warn!( warn!(
"Invalid server name in X-Matrix header origin field: {:?}", "Invalid server name in X-Matrix header origin field: {:?}",
x_matrix x_matrix
); );
// Forbidden // Forbidden
return Failure((Status::raw(580), ())); return Failure((Status::raw(580), ()));
} }
}; };
let key = match x_matrix.get(&Some("key")) { let key = match x_matrix.get(&Some("key")) {
Some(Some(k)) => *k, Some(Some(k)) => *k,
_ => { _ => {
warn!("Invalid X-Matrix header key field: {:?}", x_matrix); warn!("Invalid X-Matrix header key field: {:?}", x_matrix);
// Forbidden // Forbidden
return Failure((Status::raw(580), ())); return Failure((Status::raw(580), ()));
} }
}; };
let sig = match x_matrix.get(&Some("sig")) { let sig = match x_matrix.get(&Some("sig")) {
Some(Some(s)) => *s, Some(Some(s)) => *s,
_ => { _ => {
warn!("Invalid X-Matrix header sig field: {:?}", x_matrix); warn!("Invalid X-Matrix header sig field: {:?}", x_matrix);
// Forbidden // Forbidden
return Failure((Status::raw(580), ())); return Failure((Status::raw(580), ()));
} }
}; };
let json_body = serde_json::from_slice::<CanonicalJsonValue>(&body); let json_body = serde_json::from_slice::<CanonicalJsonValue>(&body);
let mut request_map = BTreeMap::<String, CanonicalJsonValue>::new(); let mut request_map = BTreeMap::<String, CanonicalJsonValue>::new();
if let Ok(json_body) = json_body { if let Ok(json_body) = json_body {
request_map.insert("content".to_owned(), json_body); request_map.insert("content".to_owned(), json_body);
}; };
request_map.insert( request_map.insert(
"method".to_owned(), "method".to_owned(),
CanonicalJsonValue::String(request.method().to_string()), CanonicalJsonValue::String(request.method().to_string()),
); );
request_map.insert( request_map.insert(
"uri".to_owned(), "uri".to_owned(),
CanonicalJsonValue::String(request.uri().to_string()), CanonicalJsonValue::String(request.uri().to_string()),
); );
request_map.insert(
"origin".to_owned(),
CanonicalJsonValue::String(origin.as_str().to_owned()),
);
request_map.insert(
"destination".to_owned(),
CanonicalJsonValue::String(
db.globals.server_name().as_str().to_owned(),
),
);
let mut origin_signatures = BTreeMap::new(); println!("{}: {:?}", origin, request.uri().to_string());
origin_signatures
.insert(key.to_owned(), CanonicalJsonValue::String(sig.to_owned()));
let mut signatures = BTreeMap::new(); request_map.insert(
signatures.insert( "origin".to_owned(),
origin.as_str().to_owned(), CanonicalJsonValue::String(origin.as_str().to_owned()),
CanonicalJsonValue::Object(origin_signatures), );
); request_map.insert(
"destination".to_owned(),
CanonicalJsonValue::String(db.globals.server_name().as_str().to_owned()),
);
request_map.insert( let mut origin_signatures = BTreeMap::new();
"signatures".to_owned(), origin_signatures
CanonicalJsonValue::Object(signatures), .insert(key.to_owned(), CanonicalJsonValue::String(sig.to_owned()));
);
let keys = match server_server::fetch_signing_keys( let mut signatures = BTreeMap::new();
&db, signatures.insert(
&origin, origin.as_str().to_owned(),
vec![&key.to_owned()], CanonicalJsonValue::Object(origin_signatures),
) );
.await
{
Ok(b) => b,
Err(e) => {
warn!("Failed to fetch signing keys: {}", e);
// Forbidden request_map.insert(
return Failure((Status::raw(580), ())); "signatures".to_owned(),
} CanonicalJsonValue::Object(signatures),
}; );
let mut pub_key_map = BTreeMap::new(); let keys = match server_server::fetch_signing_keys(
pub_key_map.insert(origin.as_str().to_owned(), keys); &db,
&origin,
vec![&key.to_owned()],
)
.await
{
Ok(b) => b,
Err(e) => {
warn!("Failed to fetch signing keys: {}", e);
match ruma::signatures::verify_json(&pub_key_map, &request_map) { // Forbidden
Ok(()) => (None, None, false), return Failure((Status::raw(580), ()));
Err(e) => { }
warn!( };
"Failed to verify json request: {}: {:?} {:?}",
e, pub_key_map, request_map
);
// Forbidden let mut pub_key_map = BTreeMap::new();
return Failure((Status::raw(580), ())); pub_key_map.insert(origin.as_str().to_owned(), keys);
}
match ruma::signatures::verify_json(&pub_key_map, &request_map) {
Ok(()) => (None, None, false),
Err(e) => {
warn!("Failed to verify json request from {}: {}", origin, e,);
// Forbidden
return Failure((Status::raw(580), ()));
} }
} }
AuthScheme::None => (None, None, false),
} }
}; AuthScheme::None => (None, None, false),
let mut http_request = http::Request::builder()
.uri(request.uri().to_string())
.method(&*request.method().to_string());
for header in request.headers().iter() {
http_request = http_request.header(header.name.as_str(), &*header.value);
} }
};
let http_request = http_request.body(&*body).unwrap(); let mut http_request = http::Request::builder()
debug!("{:?}", http_request); .uri(request.uri().to_string())
match <T::Incoming as IncomingRequest>::try_from_http_request(http_request) { .method(&*request.method().to_string());
Ok(t) => Success(Ruma { for header in request.headers().iter() {
body: t, http_request = http_request.header(header.name.as_str(), &*header.value);
sender_user, }
sender_device,
// TODO: Can we avoid parsing it again? (We only need this for append_pdu) let http_request = http_request.body(&*body).unwrap();
json_body: utils::string_from_bytes(&body) debug!("{:?}", http_request);
.ok() match <T::Incoming as IncomingRequest>::try_from_http_request(http_request) {
.and_then(|s| serde_json::value::RawValue::from_string(s).ok()), Ok(t) => Success(Ruma {
from_appservice, body: t,
}), sender_user,
Err(e) => { sender_device,
warn!("{:?}", e); // TODO: Can we avoid parsing it again? (We only need this for append_pdu)
Failure((Status::raw(583), ())) json_body: utils::string_from_bytes(&body)
} .ok()
.and_then(|s| serde_json::value::RawValue::from_string(s).ok()),
from_appservice,
}),
Err(e) => {
warn!("{:?}", e);
Failure((Status::raw(583), ()))
} }
}) }
} }
} }