diff --git a/Cargo.lock b/Cargo.lock
index dc215c3..646cdcc 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -1634,7 +1634,7 @@ dependencies = [
[[package]]
name = "ruma"
version = "0.0.1"
-source = "git+https://github.com/timokoesters/ruma?branch=timo-fed-fixes#ca07bb61d88fd665464dab9707de6d47048fc225"
+source = "git+https://github.com/timokoesters/ruma?branch=timo-fed-fixes#63341000fbabce9b230b6665ce65c617944408fa"
dependencies = [
"ruma-api",
"ruma-appservice-api",
@@ -1650,7 +1650,7 @@ dependencies = [
[[package]]
name = "ruma-api"
version = "0.17.0-alpha.1"
-source = "git+https://github.com/timokoesters/ruma?branch=timo-fed-fixes#ca07bb61d88fd665464dab9707de6d47048fc225"
+source = "git+https://github.com/timokoesters/ruma?branch=timo-fed-fixes#63341000fbabce9b230b6665ce65c617944408fa"
dependencies = [
"http",
"percent-encoding",
@@ -1665,7 +1665,7 @@ dependencies = [
[[package]]
name = "ruma-api-macros"
version = "0.17.0-alpha.1"
-source = "git+https://github.com/timokoesters/ruma?branch=timo-fed-fixes#ca07bb61d88fd665464dab9707de6d47048fc225"
+source = "git+https://github.com/timokoesters/ruma?branch=timo-fed-fixes#63341000fbabce9b230b6665ce65c617944408fa"
dependencies = [
"proc-macro-crate",
"proc-macro2",
@@ -1676,7 +1676,7 @@ dependencies = [
[[package]]
name = "ruma-appservice-api"
version = "0.2.0-alpha.1"
-source = "git+https://github.com/timokoesters/ruma?branch=timo-fed-fixes#ca07bb61d88fd665464dab9707de6d47048fc225"
+source = "git+https://github.com/timokoesters/ruma?branch=timo-fed-fixes#63341000fbabce9b230b6665ce65c617944408fa"
dependencies = [
"ruma-api",
"ruma-common",
@@ -1689,7 +1689,7 @@ dependencies = [
[[package]]
name = "ruma-client-api"
version = "0.10.0-alpha.1"
-source = "git+https://github.com/timokoesters/ruma?branch=timo-fed-fixes#ca07bb61d88fd665464dab9707de6d47048fc225"
+source = "git+https://github.com/timokoesters/ruma?branch=timo-fed-fixes#63341000fbabce9b230b6665ce65c617944408fa"
dependencies = [
"assign",
"http",
@@ -1708,7 +1708,7 @@ dependencies = [
[[package]]
name = "ruma-common"
version = "0.2.0"
-source = "git+https://github.com/timokoesters/ruma?branch=timo-fed-fixes#ca07bb61d88fd665464dab9707de6d47048fc225"
+source = "git+https://github.com/timokoesters/ruma?branch=timo-fed-fixes#63341000fbabce9b230b6665ce65c617944408fa"
dependencies = [
"js_int",
"ruma-api",
@@ -1722,7 +1722,7 @@ dependencies = [
[[package]]
name = "ruma-events"
version = "0.22.0-alpha.1"
-source = "git+https://github.com/timokoesters/ruma?branch=timo-fed-fixes#ca07bb61d88fd665464dab9707de6d47048fc225"
+source = "git+https://github.com/timokoesters/ruma?branch=timo-fed-fixes#63341000fbabce9b230b6665ce65c617944408fa"
dependencies = [
"js_int",
"ruma-common",
@@ -1737,7 +1737,7 @@ dependencies = [
[[package]]
name = "ruma-events-macros"
version = "0.22.0-alpha.1"
-source = "git+https://github.com/timokoesters/ruma?branch=timo-fed-fixes#ca07bb61d88fd665464dab9707de6d47048fc225"
+source = "git+https://github.com/timokoesters/ruma?branch=timo-fed-fixes#63341000fbabce9b230b6665ce65c617944408fa"
dependencies = [
"proc-macro-crate",
"proc-macro2",
@@ -1748,7 +1748,7 @@ dependencies = [
[[package]]
name = "ruma-federation-api"
version = "0.0.3"
-source = "git+https://github.com/timokoesters/ruma?branch=timo-fed-fixes#ca07bb61d88fd665464dab9707de6d47048fc225"
+source = "git+https://github.com/timokoesters/ruma?branch=timo-fed-fixes#63341000fbabce9b230b6665ce65c617944408fa"
dependencies = [
"js_int",
"ruma-api",
@@ -1763,7 +1763,7 @@ dependencies = [
[[package]]
name = "ruma-identifiers"
version = "0.17.4"
-source = "git+https://github.com/timokoesters/ruma?branch=timo-fed-fixes#ca07bb61d88fd665464dab9707de6d47048fc225"
+source = "git+https://github.com/timokoesters/ruma?branch=timo-fed-fixes#63341000fbabce9b230b6665ce65c617944408fa"
dependencies = [
"rand",
"ruma-identifiers-macros",
@@ -1775,7 +1775,7 @@ dependencies = [
[[package]]
name = "ruma-identifiers-macros"
version = "0.17.4"
-source = "git+https://github.com/timokoesters/ruma?branch=timo-fed-fixes#ca07bb61d88fd665464dab9707de6d47048fc225"
+source = "git+https://github.com/timokoesters/ruma?branch=timo-fed-fixes#63341000fbabce9b230b6665ce65c617944408fa"
dependencies = [
"proc-macro2",
"quote",
@@ -1786,7 +1786,7 @@ dependencies = [
[[package]]
name = "ruma-identifiers-validation"
version = "0.1.1"
-source = "git+https://github.com/timokoesters/ruma?branch=timo-fed-fixes#ca07bb61d88fd665464dab9707de6d47048fc225"
+source = "git+https://github.com/timokoesters/ruma?branch=timo-fed-fixes#63341000fbabce9b230b6665ce65c617944408fa"
dependencies = [
"serde",
"strum",
@@ -1795,7 +1795,7 @@ dependencies = [
[[package]]
name = "ruma-serde"
version = "0.2.3"
-source = "git+https://github.com/timokoesters/ruma?branch=timo-fed-fixes#ca07bb61d88fd665464dab9707de6d47048fc225"
+source = "git+https://github.com/timokoesters/ruma?branch=timo-fed-fixes#63341000fbabce9b230b6665ce65c617944408fa"
dependencies = [
"form_urlencoded",
"itoa",
@@ -1807,7 +1807,7 @@ dependencies = [
[[package]]
name = "ruma-signatures"
version = "0.6.0-dev.1"
-source = "git+https://github.com/timokoesters/ruma?branch=timo-fed-fixes#ca07bb61d88fd665464dab9707de6d47048fc225"
+source = "git+https://github.com/timokoesters/ruma?branch=timo-fed-fixes#63341000fbabce9b230b6665ce65c617944408fa"
dependencies = [
"base64",
"ring",
diff --git a/Dockerfile b/Dockerfile
index fa4b16d..ff84ac6 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -53,10 +53,10 @@ LABEL org.opencontainers.image.created=${CREATED} \
org.opencontainers.image.url="https://conduit.rs/" \
org.opencontainers.image.revision=${GIT_REF} \
org.opencontainers.image.source="https://git.koesters.xyz/timo/conduit.git" \
- org.opencontainers.image.documentation.="" \
org.opencontainers.image.licenses="AGPL-3.0-only" \
+ org.opencontainers.image.documentation="" \
org.opencontainers.image.ref.name="" \
- org.label-schema.docker.build="docker build . -t conduit_homeserver:latest --build-arg CREATED=$(date -u +'%Y-%m-%dT%H:%M:%SZ') --build-arg VERSION=$(grep -m1 -o '[0-9].[0-9].[0-9]' Cargo.toml)" \
+ org.label-schema.docker.build="docker build . -t matrixconduit/matrix-conduit:latest --build-arg CREATED=$(date -u +'%Y-%m-%dT%H:%M:%SZ') --build-arg VERSION=$(grep -m1 -o '[0-9].[0-9].[0-9]' Cargo.toml)" \
maintainer="Weasy666"
# Standard port on which Rocket launches
@@ -81,11 +81,15 @@ RUN chown -cR www-data:www-data /srv/conduit
# Install packages needed to run Conduit
RUN apk add --no-cache \
ca-certificates \
+ curl \
libgcc
# Create a volume for the database, to persist its contents
VOLUME ["/srv/conduit/.local/share/conduit"]
+# Test if Conduit is still alive, uses the same endpoint as Element
+HEALTHCHECK --start-period=2s CMD curl --fail -s http://localhost:8000/_matrix/client/versions || curl -k --fail -s https://localhost:8000/_matrix/client/versions || exit 1
+
# Set user to www-data
USER www-data
# Set container home directory
diff --git a/README.md b/README.md
index ad13089..44ab0d6 100644
--- a/README.md
+++ b/README.md
@@ -25,7 +25,14 @@ Clone the repo, build it with `cargo build --release` and call the binary
##### Using Docker
-Build the docker image and run it with docker or docker-compose. [Read more](docker/README.md)
+Pull and run the docker image with
+
+``` bash
+docker pull matrixconduit/matrix-conduit:latest
+docker run -d matrixconduit/matrix-conduit:latest -p 8448:8000 -v db:/srv/conduit/.local/share/conduit
+```
+
+Or build and run it with docker or docker-compose. [Read more](docker/README.md)
#### What is it build on?
diff --git a/docker-compose.yml b/docker-compose.yml
index afd3699..f06eaca 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -3,18 +3,19 @@ version: '3'
services:
homeserver:
- ### If you already built the Conduit image with 'docker build', then you can uncomment the
- ### 'image' line and comment out the 'build' option.
- # image: conduit_homeserver:latest
- ### If you want meaningful labels in you built Conduit image, you should run docker-compose like this:
+ ### If you already built the Conduit image with 'docker build' or want to use the Docker Hub image,
+ ### then you are ready to go.
+ image: matrixconduit/matrix-conduit:latest
+ ### If you want to build a fresh image from the sources, then comment the image line and uncomment the
+ ### build lines. If you want meaningful labels in your built Conduit image, you should run docker-compose like this:
### CREATED=$(date -u +'%Y-%m-%dT%H:%M:%SZ') VERSION=$(grep -m1 -o '[0-9].[0-9].[0-9]' Cargo.toml) docker-compose up -d
- build:
- context: .
- args:
- CREATED:
- VERSION:
- LOCAL: "false"
- GIT_REF: HEAD
+ # build:
+ # context: .
+ # args:
+ # CREATED:
+ # VERSION:
+ # LOCAL: 'false'
+ # GIT_REF: HEAD
restart: unless-stopped
ports:
- 8448:8000
diff --git a/docker/README.md b/docker/README.md
index 5a6ecde..c569c5f 100644
--- a/docker/README.md
+++ b/docker/README.md
@@ -28,10 +28,10 @@ ARG GIT_REF=HEAD
To build the image you can use the following command
``` bash
-docker build . -t conduit_homeserver:latest --build-arg CREATED=$(date -u +'%Y-%m-%dT%H:%M:%SZ') --build-arg VERSION=$(grep -m1 -o '[0-9].[0-9].[0-9]' Cargo.toml)
+docker build . -t matrixconduit/matrix-conduit:latest --build-arg CREATED=$(date -u +'%Y-%m-%dT%H:%M:%SZ') --build-arg VERSION=$(grep -m1 -o '[0-9].[0-9].[0-9]' Cargo.toml)
```
-which also will tag the resulting image as `conduit_homeserver:latest`.
+which also will tag the resulting image as `matrixconduit/matrix-conduit:latest`.
**Note:** it ommits the two optional `build-arg`s.
@@ -40,7 +40,7 @@ which also will tag the resulting image as `conduit_homeserver:latest`.
After building the image you can simply run it with
``` bash
-docker run conduit_homeserver:latest -p 8448:8000 -v db:/srv/conduit/.local/share/conduit -e ROCKET_SERVER_NAME="localhost:8000"
+docker run -d matrixconduit/matrix-conduit:latest -p 8448:8000 -v db:/srv/conduit/.local/share/conduit -e ROCKET_SERVER_NAME="localhost:8000"
```
For detached mode, you also need to use the `-d` flag. You can pass in more env vars as are shown here, for an overview of possible values, you can take a look at the `docker-compose.yml` file.
@@ -49,7 +49,7 @@ If you just want to test Conduit for a short time, you can use the `--rm` flag,
## Docker-compose
-If the docker command is not for you or your setup, you can also use one of the provided `docker-compose` files. Depending on your proxy setup, use the `docker-compose.traefik.yml` including `docker-compose.override.traefik.yml` or the normal `docker-compose.yml` for every other reverse proxy.
+If the docker command is not for you or your setup, you can also use one of the provided `docker-compose` files. Depending on your proxy setup, use the [`docker-compose.traefik.yml`](docker-compose.traefik.yml) including [`docker-compose.override.traefik.yml`](docker-compose.override.traefik.yml) or the normal [`docker-compose.yml`](../docker-compose.yml) for every other reverse proxy.
### Build
diff --git a/docker/docker-compose.traefik.yml b/docker/docker-compose.traefik.yml
index ad1dad8..111eaa5 100644
--- a/docker/docker-compose.traefik.yml
+++ b/docker/docker-compose.traefik.yml
@@ -3,18 +3,19 @@ version: '3'
services:
homeserver:
- ### If you already built the Conduit image with 'docker build', then you can uncomment the
- ### 'image' line and comment out the 'build' option.
- # image: conduit_homeserver:latest
- ### If you want meaningful labels in you built Conduit image, you should run docker-compose like this:
+ ### If you already built the Conduit image with 'docker build' or want to use the Docker Hub image,
+ ### then you are ready to go.
+ image: matrixconduit/matrix-conduit:latest
+ ### If you want to build a fresh image from the sources, then comment the image line and uncomment the
+ ### build lines. If you want meaningful labels in your built Conduit image, you should run docker-compose like this:
### CREATED=$(date -u +'%Y-%m-%dT%H:%M:%SZ') VERSION=$(grep -m1 -o '[0-9].[0-9].[0-9]' Cargo.toml) docker-compose up -d
- build:
- context: .
- args:
- CREATED:
- VERSION:
- LOCAL: false
- GIT_REF: HEAD
+ # build:
+ # context: .
+ # args:
+ # CREATED:
+ # VERSION:
+ # LOCAL: 'false'
+ # GIT_REF: HEAD
restart: unless-stopped
volumes:
- db:/srv/conduit/.local/share/conduit
diff --git a/src/client_server/backup.rs b/src/client_server/backup.rs
index 8966c01..5d9a925 100644
--- a/src/client_server/backup.rs
+++ b/src/client_server/backup.rs
@@ -3,13 +3,15 @@ use crate::{ConduitResult, Database, Error, Ruma};
use ruma::api::client::{
error::ErrorKind,
r0::backup::{
- add_backup_keys, create_backup, get_backup, get_backup_keys, get_latest_backup,
- update_backup,
+ add_backup_key_session, add_backup_key_sessions, add_backup_keys, create_backup,
+ delete_backup, delete_backup_key_session, delete_backup_key_sessions, delete_backup_keys,
+ get_backup, get_backup_key_session, get_backup_key_sessions, get_backup_keys,
+ get_latest_backup, update_backup,
},
};
#[cfg(feature = "conduit_bin")]
-use rocket::{get, post, put};
+use rocket::{delete, get, post, put};
#[cfg_attr(
feature = "conduit_bin",
@@ -95,7 +97,22 @@ pub fn get_backup_route(
.into())
}
-/// Add the received backup_keys to the database.
+#[cfg_attr(
+ feature = "conduit_bin",
+ delete("/_matrix/client/unstable/room_keys/version/<_>", data = "
")
+)]
+pub fn delete_backup_route(
+ db: State<'_, Database>,
+ body: Ruma,
+) -> ConduitResult {
+ let sender_id = body.sender_id.as_ref().expect("user is authenticated");
+
+ db.key_backups.delete_backup(&sender_id, &body.version)?;
+
+ Ok(delete_backup::Response.into())
+}
+
+/// Add the received backup keys to the database.
#[cfg_attr(
feature = "conduit_bin",
put("/_matrix/client/unstable/room_keys/keys", data = "")
@@ -126,6 +143,62 @@ pub fn add_backup_keys_route(
.into())
}
+/// Add the received backup keys to the database.
+#[cfg_attr(
+ feature = "conduit_bin",
+ put("/_matrix/client/unstable/room_keys/keys/<_>", data = "")
+)]
+pub fn add_backup_key_sessions_route(
+ db: State<'_, Database>,
+ body: Ruma,
+) -> ConduitResult {
+ let sender_id = body.sender_id.as_ref().expect("user is authenticated");
+
+ for (session_id, key_data) in &body.sessions {
+ db.key_backups.add_key(
+ &sender_id,
+ &body.version,
+ &body.room_id,
+ &session_id,
+ &key_data,
+ &db.globals,
+ )?
+ }
+
+ Ok(add_backup_key_sessions::Response {
+ count: (db.key_backups.count_keys(sender_id, &body.version)? as u32).into(),
+ etag: db.key_backups.get_etag(sender_id, &body.version)?,
+ }
+ .into())
+}
+
+/// Add the received backup key to the database.
+#[cfg_attr(
+ feature = "conduit_bin",
+ put("/_matrix/client/unstable/room_keys/keys/<_>/<_>", data = "")
+)]
+pub fn add_backup_key_session_route(
+ db: State<'_, Database>,
+ body: Ruma,
+) -> ConduitResult {
+ let sender_id = body.sender_id.as_ref().expect("user is authenticated");
+
+ db.key_backups.add_key(
+ &sender_id,
+ &body.version,
+ &body.room_id,
+ &body.session_id,
+ &body.session_data,
+ &db.globals,
+ )?;
+
+ Ok(add_backup_key_session::Response {
+ count: (db.key_backups.count_keys(sender_id, &body.version)? as u32).into(),
+ etag: db.key_backups.get_etag(sender_id, &body.version)?,
+ }
+ .into())
+}
+
#[cfg_attr(
feature = "conduit_bin",
get("/_matrix/client/unstable/room_keys/keys", data = "")
@@ -140,3 +213,96 @@ pub fn get_backup_keys_route(
Ok(get_backup_keys::Response { rooms }.into())
}
+
+#[cfg_attr(
+ feature = "conduit_bin",
+ get("/_matrix/client/unstable/room_keys/keys/<_>", data = "")
+)]
+pub fn get_backup_key_sessions_route(
+ db: State<'_, Database>,
+ body: Ruma,
+) -> ConduitResult {
+ let sender_id = body.sender_id.as_ref().expect("user is authenticated");
+
+ let sessions = db
+ .key_backups
+ .get_room(&sender_id, &body.version, &body.room_id);
+
+ Ok(get_backup_key_sessions::Response { sessions }.into())
+}
+
+#[cfg_attr(
+ feature = "conduit_bin",
+ get("/_matrix/client/unstable/room_keys/keys/<_>/<_>", data = "")
+)]
+pub fn get_backup_key_session_route(
+ db: State<'_, Database>,
+ body: Ruma,
+) -> ConduitResult {
+ let sender_id = body.sender_id.as_ref().expect("user is authenticated");
+
+ let key_data =
+ db.key_backups
+ .get_session(&sender_id, &body.version, &body.room_id, &body.session_id)?;
+
+ Ok(get_backup_key_session::Response { key_data }.into())
+}
+
+#[cfg_attr(
+ feature = "conduit_bin",
+ delete("/_matrix/client/unstable/room_keys/keys", data = "")
+)]
+pub fn delete_backup_keys_route(
+ db: State<'_, Database>,
+ body: Ruma,
+) -> ConduitResult {
+ let sender_id = body.sender_id.as_ref().expect("user is authenticated");
+
+ db.key_backups.delete_all_keys(&sender_id, &body.version)?;
+
+ Ok(delete_backup_keys::Response {
+ count: (db.key_backups.count_keys(sender_id, &body.version)? as u32).into(),
+ etag: db.key_backups.get_etag(sender_id, &body.version)?,
+ }
+ .into())
+}
+
+#[cfg_attr(
+ feature = "conduit_bin",
+ delete("/_matrix/client/unstable/room_keys/keys/<_>", data = "")
+)]
+pub fn delete_backup_key_sessions_route(
+ db: State<'_, Database>,
+ body: Ruma,
+) -> ConduitResult {
+ let sender_id = body.sender_id.as_ref().expect("user is authenticated");
+
+ db.key_backups
+ .delete_room_keys(&sender_id, &body.version, &body.room_id)?;
+
+ Ok(delete_backup_key_sessions::Response {
+ count: (db.key_backups.count_keys(sender_id, &body.version)? as u32).into(),
+ etag: db.key_backups.get_etag(sender_id, &body.version)?,
+ }
+ .into())
+}
+
+#[cfg_attr(
+ feature = "conduit_bin",
+ delete("/_matrix/client/unstable/room_keys/keys/<_>/<_>", data = "")
+)]
+pub fn delete_backup_key_session_route(
+ db: State<'_, Database>,
+ body: Ruma,
+) -> ConduitResult {
+ let sender_id = body.sender_id.as_ref().expect("user is authenticated");
+
+ db.key_backups
+ .delete_room_key(&sender_id, &body.version, &body.room_id, &body.session_id)?;
+
+ Ok(delete_backup_key_session::Response {
+ count: (db.key_backups.count_keys(sender_id, &body.version)? as u32).into(),
+ etag: db.key_backups.get_etag(sender_id, &body.version)?,
+ }
+ .into())
+}
diff --git a/src/client_server/message.rs b/src/client_server/message.rs
index 025331e..8a09aba 100644
--- a/src/client_server/message.rs
+++ b/src/client_server/message.rs
@@ -1,13 +1,14 @@
use super::State;
-use crate::{pdu::PduBuilder, ConduitResult, Database, Error, Ruma};
+use crate::{pdu::PduBuilder, utils, ConduitResult, Database, Error, Ruma};
use ruma::{
api::client::{
error::ErrorKind,
r0::message::{get_message_events, send_message_event},
},
events::EventContent,
+ EventId,
};
-use std::convert::TryInto;
+use std::convert::{TryFrom, TryInto};
#[cfg(feature = "conduit_bin")]
use rocket::{get, put};
@@ -21,6 +22,29 @@ pub fn send_message_event_route(
body: Ruma>,
) -> ConduitResult {
let sender_id = body.sender_id.as_ref().expect("user is authenticated");
+ let device_id = body.device_id.as_ref().expect("user is authenticated");
+
+ // Check if this is a new transaction id
+ if let Some(response) = db
+ .transaction_ids
+ .existing_txnid(sender_id, device_id, &body.txn_id)?
+ {
+ // The client might have sent a txnid of the /sendToDevice endpoint
+ // This txnid has no response associated with it
+ if response.is_empty() {
+ return Err(Error::BadRequest(
+ ErrorKind::InvalidParam,
+ "Tried to use txn id already used for an incompatible endpoint.",
+ ));
+ }
+
+ let event_id = EventId::try_from(
+ utils::string_from_bytes(&response)
+ .map_err(|_| Error::bad_database("Invalid txnid bytes in database."))?,
+ )
+ .map_err(|_| Error::bad_database("Invalid event id in txnid data."))?;
+ return Ok(send_message_event::Response { event_id }.into());
+ }
let mut unsigned = serde_json::Map::new();
unsigned.insert("transaction_id".to_owned(), body.txn_id.clone().into());
@@ -45,6 +69,9 @@ pub fn send_message_event_route(
&db.account_data,
)?;
+ db.transaction_ids
+ .add_txnid(sender_id, device_id, &body.txn_id, event_id.as_bytes())?;
+
Ok(send_message_event::Response::new(event_id).into())
}
diff --git a/src/client_server/profile.rs b/src/client_server/profile.rs
index 2a2e05e..c1c0253 100644
--- a/src/client_server/profile.rs
+++ b/src/client_server/profile.rs
@@ -217,11 +217,8 @@ pub fn get_profile_route(
db: State<'_, Database>,
body: Ruma>,
) -> ConduitResult {
- let avatar_url = db.users.avatar_url(&body.user_id)?;
- let displayname = db.users.displayname(&body.user_id)?;
-
- if avatar_url.is_none() && displayname.is_none() {
- // Return 404 if we don't have a profile for this id
+ if !db.users.exists(&body.user_id)? {
+ // Return 404 if this user doesn't exist
return Err(Error::BadRequest(
ErrorKind::NotFound,
"Profile was not found.",
@@ -229,8 +226,8 @@ pub fn get_profile_route(
}
Ok(get_profile::Response {
- avatar_url,
- displayname,
+ avatar_url: db.users.avatar_url(&body.user_id)?,
+ displayname: db.users.displayname(&body.user_id)?,
}
.into())
}
diff --git a/src/client_server/read_marker.rs b/src/client_server/read_marker.rs
index 023eece..34d1ccc 100644
--- a/src/client_server/read_marker.rs
+++ b/src/client_server/read_marker.rs
@@ -34,13 +34,14 @@ pub fn set_read_marker_route(
)?;
if let Some(event) = &body.read_receipt {
- db.rooms.edus.room_read_set(
+ db.rooms.edus.private_read_set(
&body.room_id,
&sender_id,
db.rooms.get_pdu_count(event)?.ok_or(Error::BadRequest(
ErrorKind::InvalidParam,
"Event does not exist.",
))?,
+ &db.globals,
)?;
let mut user_receipts = BTreeMap::new();
@@ -58,7 +59,7 @@ pub fn set_read_marker_route(
},
);
- db.rooms.edus.roomlatest_update(
+ db.rooms.edus.readreceipt_update(
&sender_id,
&body.room_id,
AnyEvent::Ephemeral(AnyEphemeralRoomEvent::Receipt(
diff --git a/src/client_server/room.rs b/src/client_server/room.rs
index 9a83f81..a5280cf 100644
--- a/src/client_server/room.rs
+++ b/src/client_server/room.rs
@@ -3,15 +3,15 @@ use crate::{pdu::PduBuilder, ConduitResult, Database, Error, Ruma};
use ruma::{
api::client::{
error::ErrorKind,
- r0::room::{self, create_room, get_room_event},
+ r0::room::{self, create_room, get_room_event, upgrade_room},
},
events::{
room::{guest_access, history_visibility, join_rules, member, name, topic},
EventType,
},
- RoomAliasId, RoomId, RoomVersionId,
+ Raw, RoomAliasId, RoomId, RoomVersionId,
};
-use std::{collections::BTreeMap, convert::TryFrom};
+use std::{cmp::max, collections::BTreeMap, convert::TryFrom};
#[cfg(feature = "conduit_bin")]
use rocket::{get, post};
@@ -332,3 +332,196 @@ pub fn get_room_event_route(
}
.into())
}
+
+#[cfg_attr(
+ feature = "conduit_bin",
+ post("/_matrix/client/r0/rooms/<_room_id>/upgrade", data = "")
+)]
+pub fn upgrade_room_route(
+ db: State<'_, Database>,
+ body: Ruma>,
+ _room_id: String,
+) -> ConduitResult {
+ let sender_id = body.sender_id.as_ref().expect("user is authenticated");
+
+ // Validate the room version requested
+ let new_version =
+ RoomVersionId::try_from(body.new_version.clone()).expect("invalid room version id");
+
+ if !matches!(
+ new_version,
+ RoomVersionId::Version5 | RoomVersionId::Version6
+ ) {
+ return Err(Error::BadRequest(
+ ErrorKind::UnsupportedRoomVersion,
+ "This server does not support that room version.",
+ ));
+ }
+
+ // Create a replacement room
+ let replacement_room = RoomId::new(db.globals.server_name());
+
+ // Send a m.room.tombstone event to the old room to indicate that it is not intended to be used any further
+ // Fail if the sender does not have the required permissions
+ let tombstone_event_id = db.rooms.build_and_append_pdu(
+ PduBuilder {
+ event_type: EventType::RoomTombstone,
+ content: serde_json::to_value(ruma::events::room::tombstone::TombstoneEventContent {
+ body: "This room has been replaced".to_string(),
+ replacement_room: replacement_room.clone(),
+ })
+ .expect("event is valid, we just created it"),
+ unsigned: None,
+ state_key: Some("".to_owned()),
+ redacts: None,
+ },
+ sender_id,
+ &body.room_id,
+ &db.globals,
+ &db.account_data,
+ )?;
+
+ // Get the old room federations status
+ let federate = serde_json::from_value::>(
+ db.rooms
+ .room_state_get(&body.room_id, &EventType::RoomCreate, "")?
+ .ok_or_else(|| Error::bad_database("Found room without m.room.create event."))?
+ .content,
+ )
+ .expect("Raw::from_value always works")
+ .deserialize()
+ .map_err(|_| Error::bad_database("Invalid room event in database."))?
+ .federate;
+
+ // Use the m.room.tombstone event as the predecessor
+ let predecessor = Some(ruma::events::room::create::PreviousRoom::new(
+ body.room_id.clone(),
+ tombstone_event_id,
+ ));
+
+ // Send a m.room.create event containing a predecessor field and the applicable room_version
+ let mut create_event_content =
+ ruma::events::room::create::CreateEventContent::new(sender_id.clone());
+ create_event_content.federate = federate;
+ create_event_content.room_version = new_version;
+ create_event_content.predecessor = predecessor;
+
+ db.rooms.build_and_append_pdu(
+ PduBuilder {
+ event_type: EventType::RoomCreate,
+ content: serde_json::to_value(create_event_content)
+ .expect("event is valid, we just created it"),
+ unsigned: None,
+ state_key: Some("".to_owned()),
+ redacts: None,
+ },
+ sender_id,
+ &replacement_room,
+ &db.globals,
+ &db.account_data,
+ )?;
+
+ // Join the new room
+ db.rooms.build_and_append_pdu(
+ PduBuilder {
+ event_type: EventType::RoomMember,
+ content: serde_json::to_value(member::MemberEventContent {
+ membership: member::MembershipState::Join,
+ displayname: db.users.displayname(&sender_id)?,
+ avatar_url: db.users.avatar_url(&sender_id)?,
+ is_direct: None,
+ third_party_invite: None,
+ })
+ .expect("event is valid, we just created it"),
+ unsigned: None,
+ state_key: Some(sender_id.to_string()),
+ redacts: None,
+ },
+ sender_id,
+ &replacement_room,
+ &db.globals,
+ &db.account_data,
+ )?;
+
+ // Recommended transferable state events list from the specs
+ let transferable_state_events = vec![
+ EventType::RoomServerAcl,
+ EventType::RoomEncryption,
+ EventType::RoomName,
+ EventType::RoomAvatar,
+ EventType::RoomTopic,
+ EventType::RoomGuestAccess,
+ EventType::RoomHistoryVisibility,
+ EventType::RoomJoinRules,
+ EventType::RoomPowerLevels,
+ ];
+
+ // Replicate transferable state events to the new room
+ for event_type in transferable_state_events {
+ let event_content = match db.rooms.room_state_get(&body.room_id, &event_type, "")? {
+ Some(v) => v.content.clone(),
+ None => continue, // Skipping missing events.
+ };
+
+ db.rooms.build_and_append_pdu(
+ PduBuilder {
+ event_type,
+ content: event_content,
+ unsigned: None,
+ state_key: Some("".to_owned()),
+ redacts: None,
+ },
+ sender_id,
+ &replacement_room,
+ &db.globals,
+ &db.account_data,
+ )?;
+ }
+
+ // Moves any local aliases to the new room
+ for alias in db.rooms.room_aliases(&body.room_id).filter_map(|r| r.ok()) {
+ db.rooms
+ .set_alias(&alias, Some(&replacement_room), &db.globals)?;
+ }
+
+ // Get the old room power levels
+ let mut power_levels_event_content =
+ serde_json::from_value::>(
+ db.rooms
+ .room_state_get(&body.room_id, &EventType::RoomPowerLevels, "")?
+ .ok_or_else(|| Error::bad_database("Found room without m.room.create event."))?
+ .content,
+ )
+ .expect("database contains invalid PDU")
+ .deserialize()
+ .map_err(|_| Error::bad_database("Invalid room event in database."))?;
+
+ // Setting events_default and invite to the greater of 50 and users_default + 1
+ let new_level = max(
+ 50.into(),
+ power_levels_event_content.users_default + 1.into(),
+ );
+ power_levels_event_content.events_default = new_level;
+ power_levels_event_content.invite = new_level;
+
+ // Modify the power levels in the old room to prevent sending of events and inviting new users
+ db.rooms
+ .build_and_append_pdu(
+ PduBuilder {
+ event_type: EventType::RoomPowerLevels,
+ content: serde_json::to_value(power_levels_event_content)
+ .expect("event is valid, we just created it"),
+ unsigned: None,
+ state_key: Some("".to_owned()),
+ redacts: None,
+ },
+ sender_id,
+ &body.room_id,
+ &db.globals,
+ &db.account_data,
+ )
+ .ok();
+
+ // Return the replacement room id
+ Ok(upgrade_room::Response { replacement_room }.into())
+}
diff --git a/src/client_server/sync.rs b/src/client_server/sync.rs
index 167ee75..eeeec00 100644
--- a/src/client_server/sync.rs
+++ b/src/client_server/sync.rs
@@ -81,7 +81,12 @@ pub async fn sync_events_route(
.rev()
.collect::>();
- let send_notification_counts = !timeline_pdus.is_empty();
+ let send_notification_counts = !timeline_pdus.is_empty()
+ || db
+ .rooms
+ .edus
+ .last_privateread_update(&sender_id, &room_id)?
+ > since;
// They /sync response doesn't always return all messages, so we say the output is
// limited unless there are events in non_timeline_pdus
@@ -234,7 +239,7 @@ pub async fn sync_events_route(
};
let notification_count = if send_notification_counts {
- if let Some(last_read) = db.rooms.edus.room_read_get(&room_id, &sender_id)? {
+ if let Some(last_read) = db.rooms.edus.private_read_get(&room_id, &sender_id)? {
Some(
(db.rooms
.pdus_since(&sender_id, &room_id, last_read)?
@@ -272,20 +277,15 @@ pub async fn sync_events_route(
let mut edus = db
.rooms
.edus
- .roomlatests_since(&room_id, since)?
+ .readreceipts_since(&room_id, since)?
.filter_map(|r| r.ok()) // Filter out buggy events
.collect::>();
- if db
- .rooms
- .edus
- .last_roomactive_update(&room_id, &db.globals)?
- > since
- {
+ if db.rooms.edus.last_typing_update(&room_id, &db.globals)? > since {
edus.push(
serde_json::from_str(
&serde_json::to_string(&AnySyncEphemeralRoomEvent::Typing(
- db.rooms.edus.roomactives_all(&room_id)?,
+ db.rooms.edus.typings_all(&room_id)?,
))
.expect("event is valid, we just created it"),
)
diff --git a/src/client_server/to_device.rs b/src/client_server/to_device.rs
index fe74101..e736388 100644
--- a/src/client_server/to_device.rs
+++ b/src/client_server/to_device.rs
@@ -17,6 +17,16 @@ pub fn send_event_to_device_route(
body: Ruma>,
) -> ConduitResult {
let sender_id = body.sender_id.as_ref().expect("user is authenticated");
+ let device_id = body.device_id.as_ref().expect("user is authenticated");
+
+ // Check if this is a new transaction id
+ if db
+ .transaction_ids
+ .existing_txnid(sender_id, device_id, &body.txn_id)?
+ .is_some()
+ {
+ return Ok(send_event_to_device::Response.into());
+ }
for (target_user_id, map) in &body.messages {
for (target_device_id_maybe, event) in map {
@@ -52,5 +62,9 @@ pub fn send_event_to_device_route(
}
}
+ // Save transaction id with empty data
+ db.transaction_ids
+ .add_txnid(sender_id, device_id, &body.txn_id, &[])?;
+
Ok(send_event_to_device::Response.into())
}
diff --git a/src/client_server/typing.rs b/src/client_server/typing.rs
index b15121c..b019769 100644
--- a/src/client_server/typing.rs
+++ b/src/client_server/typing.rs
@@ -17,7 +17,7 @@ pub fn create_typing_event_route(
let sender_id = body.sender_id.as_ref().expect("user is authenticated");
if let Typing::Yes(duration) = body.state {
- db.rooms.edus.roomactive_add(
+ db.rooms.edus.typing_add(
&sender_id,
&body.room_id,
duration.as_millis() as u64 + utils::millis_since_unix_epoch(),
@@ -26,7 +26,7 @@ pub fn create_typing_event_route(
} else {
db.rooms
.edus
- .roomactive_remove(&sender_id, &body.room_id, &db.globals)?;
+ .typing_remove(&sender_id, &body.room_id, &db.globals)?;
}
Ok(create_typing_event::Response.into())
diff --git a/src/database.rs b/src/database.rs
index 0d18020..83f30c9 100644
--- a/src/database.rs
+++ b/src/database.rs
@@ -3,6 +3,7 @@ pub mod globals;
pub mod key_backups;
pub mod media;
pub mod rooms;
+pub mod transaction_ids;
pub mod uiaa;
pub mod users;
@@ -23,6 +24,7 @@ pub struct Database {
pub account_data: account_data::AccountData,
pub media: media::Media,
pub key_backups: key_backups::KeyBackups,
+ pub transaction_ids: transaction_ids::TransactionIds,
pub _db: sled::Db,
}
@@ -88,10 +90,12 @@ impl Database {
},
rooms: rooms::Rooms {
edus: rooms::RoomEdus {
- roomuserid_lastread: db.open_tree("roomuserid_lastread")?, // "Private" read receipt
- roomlatestid_roomlatest: db.open_tree("roomlatestid_roomlatest")?, // Read receipts
- roomactiveid_userid: db.open_tree("roomactiveid_userid")?, // Typing notifs
- roomid_lastroomactiveupdate: db.open_tree("roomid_lastroomactiveupdate")?,
+ readreceiptid_readreceipt: db.open_tree("readreceiptid_readreceipt")?,
+ roomuserid_privateread: db.open_tree("roomuserid_privateread")?, // "Private" read receipt
+ roomuserid_lastprivatereadupdate: db
+ .open_tree("roomid_lastprivatereadupdate")?,
+ typingid_userid: db.open_tree("typingid_userid")?,
+ roomid_lasttypingupdate: db.open_tree("roomid_lasttypingupdate")?,
presenceid_presence: db.open_tree("presenceid_presence")?,
userid_lastpresenceupdate: db.open_tree("userid_lastpresenceupdate")?,
},
@@ -107,6 +111,7 @@ impl Database {
userroomid_joined: db.open_tree("userroomid_joined")?,
roomuserid_joined: db.open_tree("roomuserid_joined")?,
+ roomuseroncejoinedids: db.open_tree("roomuseroncejoinedids")?,
userroomid_invited: db.open_tree("userroomid_invited")?,
roomuserid_invited: db.open_tree("roomuserid_invited")?,
userroomid_left: db.open_tree("userroomid_left")?,
@@ -126,6 +131,9 @@ impl Database {
backupid_etag: db.open_tree("backupid_etag")?,
backupkeyid_backup: db.open_tree("backupkeyid_backupmetadata")?,
},
+ transaction_ids: transaction_ids::TransactionIds {
+ userdevicetxnid_response: db.open_tree("userdevicetxnid_response")?,
+ },
_db: db,
})
}
@@ -166,14 +174,14 @@ impl Database {
futures.push(
self.rooms
.edus
- .roomid_lastroomactiveupdate
+ .roomid_lasttypingupdate
.watch_prefix(&roomid_bytes),
);
futures.push(
self.rooms
.edus
- .roomlatestid_roomlatest
+ .readreceiptid_readreceipt
.watch_prefix(&roomid_prefix),
);
diff --git a/src/database/key_backups.rs b/src/database/key_backups.rs
index 5b37f1b..1ce7595 100644
--- a/src/database/key_backups.rs
+++ b/src/database/key_backups.rs
@@ -37,6 +37,28 @@ impl KeyBackups {
Ok(version)
}
+ pub fn delete_backup(&self, user_id: &UserId, version: &str) -> Result<()> {
+ let mut key = user_id.to_string().as_bytes().to_vec();
+ key.push(0xff);
+ key.extend_from_slice(&version.as_bytes());
+
+ self.backupid_algorithm.remove(&key)?;
+ self.backupid_etag.remove(&key)?;
+
+ key.push(0xff);
+
+ for outdated_key in self
+ .backupkeyid_backup
+ .scan_prefix(&key)
+ .keys()
+ .filter_map(|r| r.ok())
+ {
+ self.backupkeyid_backup.remove(outdated_key)?;
+ }
+
+ Ok(())
+ }
+
pub fn update_backup(
&self,
user_id: &UserId,
@@ -163,6 +185,7 @@ impl KeyBackups {
let mut prefix = user_id.to_string().as_bytes().to_vec();
prefix.push(0xff);
prefix.extend_from_slice(version.as_bytes());
+ prefix.push(0xff);
let mut rooms = BTreeMap::::new();
@@ -204,4 +227,135 @@ impl KeyBackups {
Ok(rooms)
}
+
+ pub fn get_room(
+ &self,
+ user_id: &UserId,
+ version: &str,
+ room_id: &RoomId,
+ ) -> BTreeMap {
+ let mut prefix = user_id.to_string().as_bytes().to_vec();
+ prefix.push(0xff);
+ prefix.extend_from_slice(version.as_bytes());
+ prefix.push(0xff);
+ prefix.extend_from_slice(room_id.as_bytes());
+ prefix.push(0xff);
+
+ self.backupkeyid_backup
+ .scan_prefix(&prefix)
+ .map(|r| {
+ let (key, value) = r?;
+ let mut parts = key.rsplit(|&b| b == 0xff);
+
+ let session_id =
+ utils::string_from_bytes(&parts.next().ok_or_else(|| {
+ Error::bad_database("backupkeyid_backup key is invalid.")
+ })?)
+ .map_err(|_| {
+ Error::bad_database("backupkeyid_backup session_id is invalid.")
+ })?;
+
+ let key_data = serde_json::from_slice(&value).map_err(|_| {
+ Error::bad_database("KeyData in backupkeyid_backup is invalid.")
+ })?;
+
+ Ok::<_, Error>((session_id, key_data))
+ })
+ .filter_map(|r| r.ok())
+ .collect()
+ }
+
+ pub fn get_session(
+ &self,
+ user_id: &UserId,
+ version: &str,
+ room_id: &RoomId,
+ session_id: &str,
+ ) -> Result