crypto: Correctly store the ed25519 key map for inbound group sessions.
This commit is contained in:
parent
aff1e1d0a8
commit
3e9b0a8e7f
2 changed files with 107 additions and 47 deletions
|
@ -49,9 +49,10 @@ pub struct InboundGroupSession {
|
|||
inner: Arc<Mutex<OlmInboundGroupSession>>,
|
||||
session_id: Arc<String>,
|
||||
pub(crate) sender_key: Arc<String>,
|
||||
pub(crate) signing_key: Arc<String>,
|
||||
pub(crate) signing_key: Arc<BTreeMap<DeviceKeyAlgorithm, String>>,
|
||||
pub(crate) room_id: Arc<RoomId>,
|
||||
forwarding_chains: Arc<Mutex<Option<Vec<String>>>>,
|
||||
imported: Arc<bool>,
|
||||
}
|
||||
|
||||
impl InboundGroupSession {
|
||||
|
@ -80,13 +81,17 @@ impl InboundGroupSession {
|
|||
let session = OlmInboundGroupSession::new(&session_key.0)?;
|
||||
let session_id = session.session_id();
|
||||
|
||||
let mut keys: BTreeMap<DeviceKeyAlgorithm, String> = BTreeMap::new();
|
||||
keys.insert(DeviceKeyAlgorithm::Ed25519, signing_key.to_owned());
|
||||
|
||||
Ok(InboundGroupSession {
|
||||
inner: Arc::new(Mutex::new(session)),
|
||||
session_id: Arc::new(session_id),
|
||||
sender_key: Arc::new(sender_key.to_owned()),
|
||||
signing_key: Arc::new(signing_key.to_owned()),
|
||||
signing_key: Arc::new(keys),
|
||||
room_id: Arc::new(room_id.clone()),
|
||||
forwarding_chains: Arc::new(Mutex::new(None)),
|
||||
imported: Arc::new(false),
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -102,9 +107,10 @@ impl InboundGroupSession {
|
|||
PickledInboundGroupSession {
|
||||
pickle: InboundGroupSessionPickle::from(pickle),
|
||||
sender_key: self.sender_key.to_string(),
|
||||
signing_key: self.signing_key.to_string(),
|
||||
signing_key: (&*self.signing_key).clone(),
|
||||
room_id: (&*self.room_id).clone(),
|
||||
forwarding_chains: self.forwarding_chains.lock().await.clone(),
|
||||
imported: *self.imported,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -120,10 +126,6 @@ impl InboundGroupSession {
|
|||
let session_key =
|
||||
ExportedGroupSessionKey(self.inner.lock().await.export(message_index).ok()?);
|
||||
|
||||
let mut sender_claimed_keys: BTreeMap<DeviceKeyAlgorithm, String> = BTreeMap::new();
|
||||
|
||||
sender_claimed_keys.insert(DeviceKeyAlgorithm::Ed25519, (&*self.signing_key).to_owned());
|
||||
|
||||
Some(ExportedRoomKey {
|
||||
algorithm: EventEncryptionAlgorithm::MegolmV1AesSha2,
|
||||
room_id: (&*self.room_id).clone(),
|
||||
|
@ -136,7 +138,7 @@ impl InboundGroupSession {
|
|||
.as_ref()
|
||||
.cloned()
|
||||
.unwrap_or_default(),
|
||||
sender_claimed_keys,
|
||||
sender_claimed_keys: (&*self.signing_key).clone(),
|
||||
session_key,
|
||||
})
|
||||
}
|
||||
|
@ -166,6 +168,7 @@ impl InboundGroupSession {
|
|||
signing_key: Arc::new(pickle.signing_key),
|
||||
room_id: Arc::new(pickle.room_id),
|
||||
forwarding_chains: Arc::new(Mutex::new(pickle.forwarding_chains)),
|
||||
imported: Arc::new(pickle.imported),
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -265,12 +268,15 @@ pub struct PickledInboundGroupSession {
|
|||
/// The public curve25519 key of the account that sent us the session
|
||||
pub sender_key: String,
|
||||
/// The public ed25519 key of the account that sent us the session.
|
||||
pub signing_key: String,
|
||||
pub signing_key: BTreeMap<DeviceKeyAlgorithm, String>,
|
||||
/// The id of the room that the session is used in.
|
||||
pub room_id: RoomId,
|
||||
/// The list of claimed ed25519 that forwarded us this key. Will be None if
|
||||
/// we dirrectly received this session.
|
||||
pub forwarding_chains: Option<Vec<String>>,
|
||||
/// Flag remembering if the session was dirrectly sent to us by the sender
|
||||
/// or if it was imported.
|
||||
pub imported: bool,
|
||||
}
|
||||
|
||||
/// The typed representation of a base64 encoded string of the GroupSession pickle.
|
||||
|
|
|
@ -231,14 +231,16 @@ impl SqliteStore {
|
|||
.execute(
|
||||
r#"
|
||||
CREATE TABLE IF NOT EXISTS inbound_group_sessions (
|
||||
"session_id" TEXT NOT NULL PRIMARY KEY,
|
||||
"id" INTEGER NOT NULL PRIMARY KEY,
|
||||
"session_id" TEXT NOT NULL,
|
||||
"account_id" INTEGER NOT NULL,
|
||||
"sender_key" TEXT NOT NULL,
|
||||
"signing_key" TEXT NOT NULL,
|
||||
"room_id" TEXT NOT NULL,
|
||||
"pickle" BLOB NOT NULL,
|
||||
"imported" INTEGER NOT NULL,
|
||||
FOREIGN KEY ("account_id") REFERENCES "accounts" ("id")
|
||||
ON DELETE CASCADE
|
||||
UNIQUE(account_id,session_id,sender_key)
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS "olm_groups_sessions_account_id" ON "inbound_group_sessions" ("account_id");
|
||||
|
@ -246,6 +248,24 @@ impl SqliteStore {
|
|||
)
|
||||
.await?;
|
||||
|
||||
connection
|
||||
.execute(
|
||||
r#"
|
||||
CREATE TABLE IF NOT EXISTS group_session_claimed_keys (
|
||||
"id" INTEGER NOT NULL PRIMARY KEY,
|
||||
"session_id" INTEGER NOT NULL,
|
||||
"algorithm" TEXT NOT NULL,
|
||||
"key" TEXT NOT NULL,
|
||||
FOREIGN KEY ("session_id") REFERENCES "inbound_group_sessions" ("id")
|
||||
ON DELETE CASCADE
|
||||
UNIQUE(session_id, algorithm)
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS "group_session_claimed_keys_session_id" ON "inbound_group_sessions" ("session_id");
|
||||
"#,
|
||||
)
|
||||
.await?;
|
||||
|
||||
connection
|
||||
.execute(
|
||||
r#"
|
||||
|
@ -475,45 +495,55 @@ impl SqliteStore {
|
|||
let account_id = self.account_id().ok_or(CryptoStoreError::AccountUnset)?;
|
||||
let mut connection = self.connection.lock().await;
|
||||
|
||||
let mut rows: Vec<(String, String, String, String)> = query_as(
|
||||
"SELECT pickle, sender_key, signing_key, room_id
|
||||
let mut rows: Vec<(i64, String, String, String, bool)> = query_as(
|
||||
"SELECT id, pickle, sender_key, room_id, imported
|
||||
FROM inbound_group_sessions WHERE account_id = ?",
|
||||
)
|
||||
.bind(account_id)
|
||||
.fetch_all(&mut *connection)
|
||||
.await?;
|
||||
|
||||
let mut group_sessions = rows
|
||||
.drain(..)
|
||||
.map(|row| {
|
||||
let pickle = row.0;
|
||||
let sender_key = row.1;
|
||||
let signing_key = row.2;
|
||||
let room_id = row.3;
|
||||
for row in rows.drain(..) {
|
||||
let session_row_id = row.0;
|
||||
let pickle = row.1;
|
||||
let sender_key = row.2;
|
||||
let room_id = row.3;
|
||||
let imported = row.4;
|
||||
|
||||
let pickle = PickledInboundGroupSession {
|
||||
pickle: InboundGroupSessionPickle::from(pickle),
|
||||
sender_key,
|
||||
signing_key,
|
||||
room_id: RoomId::try_from(room_id)?,
|
||||
// Fixme we need to store/restore these once we get support
|
||||
// for key requesting/forwarding.
|
||||
forwarding_chains: None,
|
||||
};
|
||||
let key_rows: Vec<(String, String)> = query_as(
|
||||
"SELECT algorithm, key FROM group_session_claimed_keys WHERE session_id = ?",
|
||||
)
|
||||
.bind(session_row_id)
|
||||
.fetch_all(&mut *connection)
|
||||
.await?;
|
||||
|
||||
Ok(InboundGroupSession::from_pickle(
|
||||
let claimed_keys: BTreeMap<DeviceKeyAlgorithm, String> = key_rows
|
||||
.into_iter()
|
||||
.filter_map(|row| {
|
||||
let algorithm = row.0.parse::<DeviceKeyAlgorithm>().ok()?;
|
||||
let key = row.1;
|
||||
|
||||
Some((algorithm, key))
|
||||
})
|
||||
.collect();
|
||||
|
||||
let pickle = PickledInboundGroupSession {
|
||||
pickle: InboundGroupSessionPickle::from(pickle),
|
||||
sender_key,
|
||||
signing_key: claimed_keys,
|
||||
room_id: RoomId::try_from(room_id)?,
|
||||
// Fixme we need to store/restore these once we get support
|
||||
// for key requesting/forwarding.
|
||||
forwarding_chains: None,
|
||||
imported,
|
||||
};
|
||||
|
||||
self.inbound_group_sessions
|
||||
.add(InboundGroupSession::from_pickle(
|
||||
pickle,
|
||||
self.get_pickle_mode(),
|
||||
)?)
|
||||
})
|
||||
.collect::<Result<Vec<InboundGroupSession>>>()?;
|
||||
|
||||
group_sessions
|
||||
.drain(..)
|
||||
.map(|s| {
|
||||
self.inbound_group_sessions.add(s);
|
||||
})
|
||||
.for_each(drop);
|
||||
)?);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
@ -1146,23 +1176,47 @@ impl CryptoStore for SqliteStore {
|
|||
// the key import feature.
|
||||
|
||||
query(
|
||||
"INSERT INTO inbound_group_sessions (
|
||||
session_id, account_id, sender_key, signing_key,
|
||||
room_id, pickle
|
||||
"REPLACE INTO inbound_group_sessions (
|
||||
session_id, account_id, sender_key,
|
||||
room_id, pickle, imported
|
||||
) VALUES (?1, ?2, ?3, ?4, ?5, ?6)
|
||||
ON CONFLICT(session_id) DO UPDATE SET
|
||||
pickle = excluded.pickle
|
||||
",
|
||||
)
|
||||
.bind(session_id)
|
||||
.bind(account_id)
|
||||
.bind(pickle.sender_key)
|
||||
.bind(pickle.signing_key)
|
||||
.bind(&pickle.sender_key)
|
||||
.bind(pickle.room_id.as_str())
|
||||
.bind(pickle.pickle.as_str())
|
||||
.bind(pickle.imported)
|
||||
.execute(&mut *connection)
|
||||
.await?;
|
||||
|
||||
let row: (i64,) = query_as(
|
||||
"SELECT id FROM inbound_group_sessions
|
||||
WHERE account_id = ? and session_id = ? and sender_key = ?",
|
||||
)
|
||||
.bind(account_id)
|
||||
.bind(session_id)
|
||||
.bind(pickle.sender_key)
|
||||
.fetch_one(&mut *connection)
|
||||
.await?;
|
||||
|
||||
let session_row_id = row.0;
|
||||
|
||||
for (key_id, key) in pickle.signing_key {
|
||||
query(
|
||||
"INSERT OR IGNORE INTO group_session_claimed_keys (
|
||||
session_id, algorithm, key
|
||||
) VALUES (?1, ?2, ?3)
|
||||
",
|
||||
)
|
||||
.bind(session_row_id)
|
||||
.bind(serde_json::to_string(&key_id)?)
|
||||
.bind(key)
|
||||
.execute(&mut *connection)
|
||||
.await?;
|
||||
}
|
||||
|
||||
Ok(self.inbound_group_sessions.add(session))
|
||||
}
|
||||
|
||||
|
|
Loading…
Reference in a new issue