Merge branch 'statediffs' into 'master'
Statediffs See merge request famedly/conduit!145
This commit is contained in:
		
						commit
						33481ec062
					
				
					 17 changed files with 1850 additions and 922 deletions
				
			
		
							
								
								
									
										114
									
								
								Cargo.lock
									
									
									
										generated
									
									
									
								
							
							
						
						
									
										114
									
								
								Cargo.lock
									
									
									
										generated
									
									
									
								
							|  | @ -248,7 +248,7 @@ dependencies = [ | ||||||
|  "jsonwebtoken", |  "jsonwebtoken", | ||||||
|  "lru-cache", |  "lru-cache", | ||||||
|  "num_cpus", |  "num_cpus", | ||||||
|  "opentelemetry", |  "opentelemetry 0.16.0", | ||||||
|  "opentelemetry-jaeger", |  "opentelemetry-jaeger", | ||||||
|  "parking_lot", |  "parking_lot", | ||||||
|  "pretty_env_logger", |  "pretty_env_logger", | ||||||
|  | @ -1466,16 +1466,46 @@ dependencies = [ | ||||||
| ] | ] | ||||||
| 
 | 
 | ||||||
| [[package]] | [[package]] | ||||||
| name = "opentelemetry-jaeger" | name = "opentelemetry" | ||||||
| version = "0.14.0" | version = "0.16.0" | ||||||
| source = "registry+https://github.com/rust-lang/crates.io-index" | source = "registry+https://github.com/rust-lang/crates.io-index" | ||||||
| checksum = "09a9fc8192722e7daa0c56e59e2336b797122fb8598383dcb11c8852733b435c" | checksum = "e1cf9b1c4e9a6c4de793c632496fa490bdc0e1eea73f0c91394f7b6990935d22" | ||||||
|  | dependencies = [ | ||||||
|  |  "async-trait", | ||||||
|  |  "crossbeam-channel", | ||||||
|  |  "futures", | ||||||
|  |  "js-sys", | ||||||
|  |  "lazy_static", | ||||||
|  |  "percent-encoding", | ||||||
|  |  "pin-project", | ||||||
|  |  "rand 0.8.4", | ||||||
|  |  "thiserror", | ||||||
|  |  "tokio", | ||||||
|  |  "tokio-stream", | ||||||
|  | ] | ||||||
|  | 
 | ||||||
|  | [[package]] | ||||||
|  | name = "opentelemetry-jaeger" | ||||||
|  | version = "0.15.0" | ||||||
|  | source = "registry+https://github.com/rust-lang/crates.io-index" | ||||||
|  | checksum = "db22f492873ea037bc267b35a0e8e4fb846340058cb7c864efe3d0bf23684593" | ||||||
| dependencies = [ | dependencies = [ | ||||||
|  "async-trait", |  "async-trait", | ||||||
|  "lazy_static", |  "lazy_static", | ||||||
|  "opentelemetry", |  "opentelemetry 0.16.0", | ||||||
|  |  "opentelemetry-semantic-conventions", | ||||||
|  "thiserror", |  "thiserror", | ||||||
|  "thrift", |  "thrift", | ||||||
|  |  "tokio", | ||||||
|  | ] | ||||||
|  | 
 | ||||||
|  | [[package]] | ||||||
|  | name = "opentelemetry-semantic-conventions" | ||||||
|  | version = "0.8.0" | ||||||
|  | source = "registry+https://github.com/rust-lang/crates.io-index" | ||||||
|  | checksum = "ffeac823339e8b0f27b961f4385057bf9f97f2863bc745bd015fd6091f2270e9" | ||||||
|  | dependencies = [ | ||||||
|  |  "opentelemetry 0.16.0", | ||||||
| ] | ] | ||||||
| 
 | 
 | ||||||
| [[package]] | [[package]] | ||||||
|  | @ -2014,8 +2044,8 @@ dependencies = [ | ||||||
| 
 | 
 | ||||||
| [[package]] | [[package]] | ||||||
| name = "ruma" | name = "ruma" | ||||||
| version = "0.2.0" | version = "0.3.0" | ||||||
| source = "git+https://github.com/timokoesters/ruma?rev=a2d93500e1dbc87e7032a3c74f3b2479a7f84e93#a2d93500e1dbc87e7032a3c74f3b2479a7f84e93" | source = "git+https://github.com/ruma/ruma?rev=f5ab038e22421ed338396ece977b6b2844772ced#f5ab038e22421ed338396ece977b6b2844772ced" | ||||||
| dependencies = [ | dependencies = [ | ||||||
|  "assign", |  "assign", | ||||||
|  "js_int", |  "js_int", | ||||||
|  | @ -2035,8 +2065,8 @@ dependencies = [ | ||||||
| 
 | 
 | ||||||
| [[package]] | [[package]] | ||||||
| name = "ruma-api" | name = "ruma-api" | ||||||
| version = "0.17.1" | version = "0.18.3" | ||||||
| source = "git+https://github.com/timokoesters/ruma?rev=a2d93500e1dbc87e7032a3c74f3b2479a7f84e93#a2d93500e1dbc87e7032a3c74f3b2479a7f84e93" | source = "git+https://github.com/ruma/ruma?rev=f5ab038e22421ed338396ece977b6b2844772ced#f5ab038e22421ed338396ece977b6b2844772ced" | ||||||
| dependencies = [ | dependencies = [ | ||||||
|  "bytes", |  "bytes", | ||||||
|  "http", |  "http", | ||||||
|  | @ -2051,8 +2081,8 @@ dependencies = [ | ||||||
| 
 | 
 | ||||||
| [[package]] | [[package]] | ||||||
| name = "ruma-api-macros" | name = "ruma-api-macros" | ||||||
| version = "0.17.1" | version = "0.18.3" | ||||||
| source = "git+https://github.com/timokoesters/ruma?rev=a2d93500e1dbc87e7032a3c74f3b2479a7f84e93#a2d93500e1dbc87e7032a3c74f3b2479a7f84e93" | source = "git+https://github.com/ruma/ruma?rev=f5ab038e22421ed338396ece977b6b2844772ced#f5ab038e22421ed338396ece977b6b2844772ced" | ||||||
| dependencies = [ | dependencies = [ | ||||||
|  "proc-macro-crate", |  "proc-macro-crate", | ||||||
|  "proc-macro2", |  "proc-macro2", | ||||||
|  | @ -2062,8 +2092,8 @@ dependencies = [ | ||||||
| 
 | 
 | ||||||
| [[package]] | [[package]] | ||||||
| name = "ruma-appservice-api" | name = "ruma-appservice-api" | ||||||
| version = "0.3.0" | version = "0.4.0" | ||||||
| source = "git+https://github.com/timokoesters/ruma?rev=a2d93500e1dbc87e7032a3c74f3b2479a7f84e93#a2d93500e1dbc87e7032a3c74f3b2479a7f84e93" | source = "git+https://github.com/ruma/ruma?rev=f5ab038e22421ed338396ece977b6b2844772ced#f5ab038e22421ed338396ece977b6b2844772ced" | ||||||
| dependencies = [ | dependencies = [ | ||||||
|  "ruma-api", |  "ruma-api", | ||||||
|  "ruma-common", |  "ruma-common", | ||||||
|  | @ -2076,8 +2106,8 @@ dependencies = [ | ||||||
| 
 | 
 | ||||||
| [[package]] | [[package]] | ||||||
| name = "ruma-client-api" | name = "ruma-client-api" | ||||||
| version = "0.11.0" | version = "0.12.2" | ||||||
| source = "git+https://github.com/timokoesters/ruma?rev=a2d93500e1dbc87e7032a3c74f3b2479a7f84e93#a2d93500e1dbc87e7032a3c74f3b2479a7f84e93" | source = "git+https://github.com/ruma/ruma?rev=f5ab038e22421ed338396ece977b6b2844772ced#f5ab038e22421ed338396ece977b6b2844772ced" | ||||||
| dependencies = [ | dependencies = [ | ||||||
|  "assign", |  "assign", | ||||||
|  "bytes", |  "bytes", | ||||||
|  | @ -2096,8 +2126,8 @@ dependencies = [ | ||||||
| 
 | 
 | ||||||
| [[package]] | [[package]] | ||||||
| name = "ruma-common" | name = "ruma-common" | ||||||
| version = "0.5.4" | version = "0.6.0" | ||||||
| source = "git+https://github.com/timokoesters/ruma?rev=a2d93500e1dbc87e7032a3c74f3b2479a7f84e93#a2d93500e1dbc87e7032a3c74f3b2479a7f84e93" | source = "git+https://github.com/ruma/ruma?rev=f5ab038e22421ed338396ece977b6b2844772ced#f5ab038e22421ed338396ece977b6b2844772ced" | ||||||
| dependencies = [ | dependencies = [ | ||||||
|  "indexmap", |  "indexmap", | ||||||
|  "js_int", |  "js_int", | ||||||
|  | @ -2111,8 +2141,8 @@ dependencies = [ | ||||||
| 
 | 
 | ||||||
| [[package]] | [[package]] | ||||||
| name = "ruma-events" | name = "ruma-events" | ||||||
| version = "0.23.2" | version = "0.24.4" | ||||||
| source = "git+https://github.com/timokoesters/ruma?rev=a2d93500e1dbc87e7032a3c74f3b2479a7f84e93#a2d93500e1dbc87e7032a3c74f3b2479a7f84e93" | source = "git+https://github.com/ruma/ruma?rev=f5ab038e22421ed338396ece977b6b2844772ced#f5ab038e22421ed338396ece977b6b2844772ced" | ||||||
| dependencies = [ | dependencies = [ | ||||||
|  "indoc", |  "indoc", | ||||||
|  "js_int", |  "js_int", | ||||||
|  | @ -2127,8 +2157,8 @@ dependencies = [ | ||||||
| 
 | 
 | ||||||
| [[package]] | [[package]] | ||||||
| name = "ruma-events-macros" | name = "ruma-events-macros" | ||||||
| version = "0.23.2" | version = "0.24.4" | ||||||
| source = "git+https://github.com/timokoesters/ruma?rev=a2d93500e1dbc87e7032a3c74f3b2479a7f84e93#a2d93500e1dbc87e7032a3c74f3b2479a7f84e93" | source = "git+https://github.com/ruma/ruma?rev=f5ab038e22421ed338396ece977b6b2844772ced#f5ab038e22421ed338396ece977b6b2844772ced" | ||||||
| dependencies = [ | dependencies = [ | ||||||
|  "proc-macro-crate", |  "proc-macro-crate", | ||||||
|  "proc-macro2", |  "proc-macro2", | ||||||
|  | @ -2138,8 +2168,8 @@ dependencies = [ | ||||||
| 
 | 
 | ||||||
| [[package]] | [[package]] | ||||||
| name = "ruma-federation-api" | name = "ruma-federation-api" | ||||||
| version = "0.2.0" | version = "0.3.0" | ||||||
| source = "git+https://github.com/timokoesters/ruma?rev=a2d93500e1dbc87e7032a3c74f3b2479a7f84e93#a2d93500e1dbc87e7032a3c74f3b2479a7f84e93" | source = "git+https://github.com/ruma/ruma?rev=f5ab038e22421ed338396ece977b6b2844772ced#f5ab038e22421ed338396ece977b6b2844772ced" | ||||||
| dependencies = [ | dependencies = [ | ||||||
|  "js_int", |  "js_int", | ||||||
|  "ruma-api", |  "ruma-api", | ||||||
|  | @ -2153,8 +2183,8 @@ dependencies = [ | ||||||
| 
 | 
 | ||||||
| [[package]] | [[package]] | ||||||
| name = "ruma-identifiers" | name = "ruma-identifiers" | ||||||
| version = "0.19.4" | version = "0.20.0" | ||||||
| source = "git+https://github.com/timokoesters/ruma?rev=a2d93500e1dbc87e7032a3c74f3b2479a7f84e93#a2d93500e1dbc87e7032a3c74f3b2479a7f84e93" | source = "git+https://github.com/ruma/ruma?rev=f5ab038e22421ed338396ece977b6b2844772ced#f5ab038e22421ed338396ece977b6b2844772ced" | ||||||
| dependencies = [ | dependencies = [ | ||||||
|  "paste", |  "paste", | ||||||
|  "rand 0.8.4", |  "rand 0.8.4", | ||||||
|  | @ -2167,8 +2197,8 @@ dependencies = [ | ||||||
| 
 | 
 | ||||||
| [[package]] | [[package]] | ||||||
| name = "ruma-identifiers-macros" | name = "ruma-identifiers-macros" | ||||||
| version = "0.19.4" | version = "0.20.0" | ||||||
| source = "git+https://github.com/timokoesters/ruma?rev=a2d93500e1dbc87e7032a3c74f3b2479a7f84e93#a2d93500e1dbc87e7032a3c74f3b2479a7f84e93" | source = "git+https://github.com/ruma/ruma?rev=f5ab038e22421ed338396ece977b6b2844772ced#f5ab038e22421ed338396ece977b6b2844772ced" | ||||||
| dependencies = [ | dependencies = [ | ||||||
|  "quote", |  "quote", | ||||||
|  "ruma-identifiers-validation", |  "ruma-identifiers-validation", | ||||||
|  | @ -2177,13 +2207,13 @@ dependencies = [ | ||||||
| 
 | 
 | ||||||
| [[package]] | [[package]] | ||||||
| name = "ruma-identifiers-validation" | name = "ruma-identifiers-validation" | ||||||
| version = "0.4.0" | version = "0.5.0" | ||||||
| source = "git+https://github.com/timokoesters/ruma?rev=a2d93500e1dbc87e7032a3c74f3b2479a7f84e93#a2d93500e1dbc87e7032a3c74f3b2479a7f84e93" | source = "git+https://github.com/ruma/ruma?rev=f5ab038e22421ed338396ece977b6b2844772ced#f5ab038e22421ed338396ece977b6b2844772ced" | ||||||
| 
 | 
 | ||||||
| [[package]] | [[package]] | ||||||
| name = "ruma-identity-service-api" | name = "ruma-identity-service-api" | ||||||
| version = "0.2.0" | version = "0.3.0" | ||||||
| source = "git+https://github.com/timokoesters/ruma?rev=a2d93500e1dbc87e7032a3c74f3b2479a7f84e93#a2d93500e1dbc87e7032a3c74f3b2479a7f84e93" | source = "git+https://github.com/ruma/ruma?rev=f5ab038e22421ed338396ece977b6b2844772ced#f5ab038e22421ed338396ece977b6b2844772ced" | ||||||
| dependencies = [ | dependencies = [ | ||||||
|  "js_int", |  "js_int", | ||||||
|  "ruma-api", |  "ruma-api", | ||||||
|  | @ -2195,8 +2225,8 @@ dependencies = [ | ||||||
| 
 | 
 | ||||||
| [[package]] | [[package]] | ||||||
| name = "ruma-push-gateway-api" | name = "ruma-push-gateway-api" | ||||||
| version = "0.2.0" | version = "0.3.0" | ||||||
| source = "git+https://github.com/timokoesters/ruma?rev=a2d93500e1dbc87e7032a3c74f3b2479a7f84e93#a2d93500e1dbc87e7032a3c74f3b2479a7f84e93" | source = "git+https://github.com/ruma/ruma?rev=f5ab038e22421ed338396ece977b6b2844772ced#f5ab038e22421ed338396ece977b6b2844772ced" | ||||||
| dependencies = [ | dependencies = [ | ||||||
|  "js_int", |  "js_int", | ||||||
|  "ruma-api", |  "ruma-api", | ||||||
|  | @ -2210,8 +2240,8 @@ dependencies = [ | ||||||
| 
 | 
 | ||||||
| [[package]] | [[package]] | ||||||
| name = "ruma-serde" | name = "ruma-serde" | ||||||
| version = "0.4.1" | version = "0.5.0" | ||||||
| source = "git+https://github.com/timokoesters/ruma?rev=a2d93500e1dbc87e7032a3c74f3b2479a7f84e93#a2d93500e1dbc87e7032a3c74f3b2479a7f84e93" | source = "git+https://github.com/ruma/ruma?rev=f5ab038e22421ed338396ece977b6b2844772ced#f5ab038e22421ed338396ece977b6b2844772ced" | ||||||
| dependencies = [ | dependencies = [ | ||||||
|  "bytes", |  "bytes", | ||||||
|  "form_urlencoded", |  "form_urlencoded", | ||||||
|  | @ -2224,8 +2254,8 @@ dependencies = [ | ||||||
| 
 | 
 | ||||||
| [[package]] | [[package]] | ||||||
| name = "ruma-serde-macros" | name = "ruma-serde-macros" | ||||||
| version = "0.4.1" | version = "0.5.0" | ||||||
| source = "git+https://github.com/timokoesters/ruma?rev=a2d93500e1dbc87e7032a3c74f3b2479a7f84e93#a2d93500e1dbc87e7032a3c74f3b2479a7f84e93" | source = "git+https://github.com/ruma/ruma?rev=f5ab038e22421ed338396ece977b6b2844772ced#f5ab038e22421ed338396ece977b6b2844772ced" | ||||||
| dependencies = [ | dependencies = [ | ||||||
|  "proc-macro-crate", |  "proc-macro-crate", | ||||||
|  "proc-macro2", |  "proc-macro2", | ||||||
|  | @ -2235,8 +2265,8 @@ dependencies = [ | ||||||
| 
 | 
 | ||||||
| [[package]] | [[package]] | ||||||
| name = "ruma-signatures" | name = "ruma-signatures" | ||||||
| version = "0.8.0" | version = "0.9.0" | ||||||
| source = "git+https://github.com/timokoesters/ruma?rev=a2d93500e1dbc87e7032a3c74f3b2479a7f84e93#a2d93500e1dbc87e7032a3c74f3b2479a7f84e93" | source = "git+https://github.com/ruma/ruma?rev=f5ab038e22421ed338396ece977b6b2844772ced#f5ab038e22421ed338396ece977b6b2844772ced" | ||||||
| dependencies = [ | dependencies = [ | ||||||
|  "base64 0.13.0", |  "base64 0.13.0", | ||||||
|  "ed25519-dalek", |  "ed25519-dalek", | ||||||
|  | @ -2252,8 +2282,8 @@ dependencies = [ | ||||||
| 
 | 
 | ||||||
| [[package]] | [[package]] | ||||||
| name = "ruma-state-res" | name = "ruma-state-res" | ||||||
| version = "0.2.0" | version = "0.3.0" | ||||||
| source = "git+https://github.com/timokoesters/ruma?rev=a2d93500e1dbc87e7032a3c74f3b2479a7f84e93#a2d93500e1dbc87e7032a3c74f3b2479a7f84e93" | source = "git+https://github.com/ruma/ruma?rev=f5ab038e22421ed338396ece977b6b2844772ced#f5ab038e22421ed338396ece977b6b2844772ced" | ||||||
| dependencies = [ | dependencies = [ | ||||||
|  "itertools 0.10.1", |  "itertools 0.10.1", | ||||||
|  "js_int", |  "js_int", | ||||||
|  | @ -3022,7 +3052,7 @@ version = "0.14.0" | ||||||
| source = "registry+https://github.com/rust-lang/crates.io-index" | source = "registry+https://github.com/rust-lang/crates.io-index" | ||||||
| checksum = "c47440f2979c4cd3138922840eec122e3c0ba2148bc290f756bd7fd60fc97fff" | checksum = "c47440f2979c4cd3138922840eec122e3c0ba2148bc290f756bd7fd60fc97fff" | ||||||
| dependencies = [ | dependencies = [ | ||||||
|  "opentelemetry", |  "opentelemetry 0.15.0", | ||||||
|  "tracing", |  "tracing", | ||||||
|  "tracing-core", |  "tracing-core", | ||||||
|  "tracing-log", |  "tracing-log", | ||||||
|  |  | ||||||
|  | @ -18,8 +18,8 @@ edition = "2018" | ||||||
| rocket = { version = "0.5.0-rc.1", features = ["tls"] } # Used to handle requests | rocket = { version = "0.5.0-rc.1", features = ["tls"] } # Used to handle requests | ||||||
| 
 | 
 | ||||||
| # Used for matrix spec type definitions and helpers | # Used for matrix spec type definitions and helpers | ||||||
| #ruma = { git = "https://github.com/ruma/ruma", rev = "eb19b0e08a901b87d11b3be0890ec788cc760492", features = ["compat", "rand", "appservice-api-c", "client-api", "federation-api", "push-gateway-api-c", "state-res", "unstable-pre-spec", "unstable-exhaustive-types"] } | ruma = { git = "https://github.com/ruma/ruma", rev = "f5ab038e22421ed338396ece977b6b2844772ced", features = ["compat", "rand", "appservice-api-c", "client-api", "federation-api", "push-gateway-api-c", "state-res", "unstable-pre-spec", "unstable-exhaustive-types"] } | ||||||
| ruma = { git = "https://github.com/timokoesters/ruma", rev = "a2d93500e1dbc87e7032a3c74f3b2479a7f84e93", features = ["compat", "rand", "appservice-api-c", "client-api", "federation-api", "push-gateway-api-c", "state-res", "unstable-pre-spec", "unstable-exhaustive-types"] } | #ruma = { git = "https://github.com/timokoesters/ruma", rev = "995ccea20f5f6d4a8fb22041749ed4de22fa1b6a", features = ["compat", "rand", "appservice-api-c", "client-api", "federation-api", "push-gateway-api-c", "state-res", "unstable-pre-spec", "unstable-exhaustive-types"] } | ||||||
| #ruma = { path = "../ruma/crates/ruma", features = ["compat", "rand", "appservice-api-c", "client-api", "federation-api", "push-gateway-api-c", "state-res", "unstable-pre-spec", "unstable-exhaustive-types"] } | #ruma = { path = "../ruma/crates/ruma", features = ["compat", "rand", "appservice-api-c", "client-api", "federation-api", "push-gateway-api-c", "state-res", "unstable-pre-spec", "unstable-exhaustive-types"] } | ||||||
| 
 | 
 | ||||||
| # Used for long polling and federation sender, should be the same as rocket::tokio | # Used for long polling and federation sender, should be the same as rocket::tokio | ||||||
|  | @ -66,11 +66,11 @@ regex = "1.5.4" | ||||||
| jsonwebtoken = "7.2.0" | jsonwebtoken = "7.2.0" | ||||||
| # Performance measurements | # Performance measurements | ||||||
| tracing = { version = "0.1.26", features = ["release_max_level_warn"] } | tracing = { version = "0.1.26", features = ["release_max_level_warn"] } | ||||||
| opentelemetry = "0.15.0" |  | ||||||
| tracing-subscriber = "0.2.19" | tracing-subscriber = "0.2.19" | ||||||
| tracing-opentelemetry = "0.14.0" | tracing-opentelemetry = "0.14.0" | ||||||
| tracing-flame = "0.1.0" | tracing-flame = "0.1.0" | ||||||
| opentelemetry-jaeger = "0.14.0" | opentelemetry = { version = "0.16.0", features = ["rt-tokio"] } | ||||||
|  | opentelemetry-jaeger = { version = "0.15.0", features = ["rt-tokio"] } | ||||||
| pretty_env_logger = "0.4.0" | pretty_env_logger = "0.4.0" | ||||||
| lru-cache = "0.1.2" | lru-cache = "0.1.2" | ||||||
| rusqlite = { version = "0.25.3", optional = true, features = ["bundled"] } | rusqlite = { version = "0.25.3", optional = true, features = ["bundled"] } | ||||||
|  |  | ||||||
|  | @ -249,6 +249,8 @@ pub async fn register_route( | ||||||
| 
 | 
 | ||||||
|         let room_id = RoomId::new(db.globals.server_name()); |         let room_id = RoomId::new(db.globals.server_name()); | ||||||
| 
 | 
 | ||||||
|  |         db.rooms.get_or_create_shortroomid(&room_id, &db.globals)?; | ||||||
|  | 
 | ||||||
|         let mutex_state = Arc::clone( |         let mutex_state = Arc::clone( | ||||||
|             db.globals |             db.globals | ||||||
|                 .roomid_mutex_state |                 .roomid_mutex_state | ||||||
|  | @ -290,6 +292,7 @@ pub async fn register_route( | ||||||
|                     is_direct: None, |                     is_direct: None, | ||||||
|                     third_party_invite: None, |                     third_party_invite: None, | ||||||
|                     blurhash: None, |                     blurhash: None, | ||||||
|  |                     reason: None, | ||||||
|                 }) |                 }) | ||||||
|                 .expect("event is valid, we just created it"), |                 .expect("event is valid, we just created it"), | ||||||
|                 unsigned: None, |                 unsigned: None, | ||||||
|  | @ -455,6 +458,7 @@ pub async fn register_route( | ||||||
|                     is_direct: None, |                     is_direct: None, | ||||||
|                     third_party_invite: None, |                     third_party_invite: None, | ||||||
|                     blurhash: None, |                     blurhash: None, | ||||||
|  |                     reason: None, | ||||||
|                 }) |                 }) | ||||||
|                 .expect("event is valid, we just created it"), |                 .expect("event is valid, we just created it"), | ||||||
|                 unsigned: None, |                 unsigned: None, | ||||||
|  | @ -476,6 +480,7 @@ pub async fn register_route( | ||||||
|                     is_direct: None, |                     is_direct: None, | ||||||
|                     third_party_invite: None, |                     third_party_invite: None, | ||||||
|                     blurhash: None, |                     blurhash: None, | ||||||
|  |                     reason: None, | ||||||
|                 }) |                 }) | ||||||
|                 .expect("event is valid, we just created it"), |                 .expect("event is valid, we just created it"), | ||||||
|                 unsigned: None, |                 unsigned: None, | ||||||
|  | @ -681,6 +686,7 @@ pub async fn deactivate_route( | ||||||
|             is_direct: None, |             is_direct: None, | ||||||
|             third_party_invite: None, |             third_party_invite: None, | ||||||
|             blurhash: None, |             blurhash: None, | ||||||
|  |             reason: None, | ||||||
|         }; |         }; | ||||||
| 
 | 
 | ||||||
|         let mutex_state = Arc::clone( |         let mutex_state = Arc::clone( | ||||||
|  | @ -731,7 +737,7 @@ pub async fn deactivate_route( | ||||||
| pub async fn third_party_route( | pub async fn third_party_route( | ||||||
|     body: Ruma<get_contacts::Request>, |     body: Ruma<get_contacts::Request>, | ||||||
| ) -> ConduitResult<get_contacts::Response> { | ) -> ConduitResult<get_contacts::Response> { | ||||||
|     let sender_user = body.sender_user.as_ref().expect("user is authenticated"); |     let _sender_user = body.sender_user.as_ref().expect("user is authenticated"); | ||||||
| 
 | 
 | ||||||
|     Ok(get_contacts::Response::new(Vec::new()).into()) |     Ok(get_contacts::Response::new(Vec::new()).into()) | ||||||
| } | } | ||||||
|  |  | ||||||
|  | @ -44,7 +44,7 @@ pub async fn get_context_route( | ||||||
| 
 | 
 | ||||||
|     let events_before = db |     let events_before = db | ||||||
|         .rooms |         .rooms | ||||||
|         .pdus_until(&sender_user, &body.room_id, base_token) |         .pdus_until(&sender_user, &body.room_id, base_token)? | ||||||
|         .take( |         .take( | ||||||
|             u32::try_from(body.limit).map_err(|_| { |             u32::try_from(body.limit).map_err(|_| { | ||||||
|                 Error::BadRequest(ErrorKind::InvalidParam, "Limit value is invalid.") |                 Error::BadRequest(ErrorKind::InvalidParam, "Limit value is invalid.") | ||||||
|  | @ -66,7 +66,7 @@ pub async fn get_context_route( | ||||||
| 
 | 
 | ||||||
|     let events_after = db |     let events_after = db | ||||||
|         .rooms |         .rooms | ||||||
|         .pdus_after(&sender_user, &body.room_id, base_token) |         .pdus_after(&sender_user, &body.room_id, base_token)? | ||||||
|         .take( |         .take( | ||||||
|             u32::try_from(body.limit).map_err(|_| { |             u32::try_from(body.limit).map_err(|_| { | ||||||
|                 Error::BadRequest(ErrorKind::InvalidParam, "Limit value is invalid.") |                 Error::BadRequest(ErrorKind::InvalidParam, "Limit value is invalid.") | ||||||
|  |  | ||||||
|  | @ -262,6 +262,7 @@ pub async fn ban_user_route( | ||||||
|                 is_direct: None, |                 is_direct: None, | ||||||
|                 third_party_invite: None, |                 third_party_invite: None, | ||||||
|                 blurhash: db.users.blurhash(&body.user_id)?, |                 blurhash: db.users.blurhash(&body.user_id)?, | ||||||
|  |                 reason: None, | ||||||
|             }), |             }), | ||||||
|             |event| { |             |event| { | ||||||
|                 let mut event = serde_json::from_value::<Raw<member::MemberEventContent>>( |                 let mut event = serde_json::from_value::<Raw<member::MemberEventContent>>( | ||||||
|  | @ -563,6 +564,7 @@ async fn join_room_by_id_helper( | ||||||
|                 is_direct: None, |                 is_direct: None, | ||||||
|                 third_party_invite: None, |                 third_party_invite: None, | ||||||
|                 blurhash: db.users.blurhash(&sender_user)?, |                 blurhash: db.users.blurhash(&sender_user)?, | ||||||
|  |                 reason: None, | ||||||
|             }) |             }) | ||||||
|             .expect("event is valid, we just created it"), |             .expect("event is valid, we just created it"), | ||||||
|         ); |         ); | ||||||
|  | @ -609,6 +611,8 @@ async fn join_room_by_id_helper( | ||||||
|             ) |             ) | ||||||
|             .await?; |             .await?; | ||||||
| 
 | 
 | ||||||
|  |         db.rooms.get_or_create_shortroomid(&room_id, &db.globals)?; | ||||||
|  | 
 | ||||||
|         let pdu = PduEvent::from_id_val(&event_id, join_event.clone()) |         let pdu = PduEvent::from_id_val(&event_id, join_event.clone()) | ||||||
|             .map_err(|_| Error::BadServerResponse("Invalid join event PDU."))?; |             .map_err(|_| Error::BadServerResponse("Invalid join event PDU."))?; | ||||||
| 
 | 
 | ||||||
|  | @ -693,6 +697,7 @@ async fn join_room_by_id_helper( | ||||||
|             is_direct: None, |             is_direct: None, | ||||||
|             third_party_invite: None, |             third_party_invite: None, | ||||||
|             blurhash: db.users.blurhash(&sender_user)?, |             blurhash: db.users.blurhash(&sender_user)?, | ||||||
|  |             reason: None, | ||||||
|         }; |         }; | ||||||
| 
 | 
 | ||||||
|         db.rooms.build_and_append_pdu( |         db.rooms.build_and_append_pdu( | ||||||
|  | @ -844,6 +849,7 @@ pub async fn invite_helper<'a>( | ||||||
|                 membership: MembershipState::Invite, |                 membership: MembershipState::Invite, | ||||||
|                 third_party_invite: None, |                 third_party_invite: None, | ||||||
|                 blurhash: None, |                 blurhash: None, | ||||||
|  |                 reason: None, | ||||||
|             }) |             }) | ||||||
|             .expect("member event is valid value"); |             .expect("member event is valid value"); | ||||||
| 
 | 
 | ||||||
|  | @ -1038,6 +1044,7 @@ pub async fn invite_helper<'a>( | ||||||
|                 is_direct: Some(is_direct), |                 is_direct: Some(is_direct), | ||||||
|                 third_party_invite: None, |                 third_party_invite: None, | ||||||
|                 blurhash: db.users.blurhash(&user_id)?, |                 blurhash: db.users.blurhash(&user_id)?, | ||||||
|  |                 reason: None, | ||||||
|             }) |             }) | ||||||
|             .expect("event is valid, we just created it"), |             .expect("event is valid, we just created it"), | ||||||
|             unsigned: None, |             unsigned: None, | ||||||
|  |  | ||||||
|  | @ -128,7 +128,7 @@ pub async fn get_message_events_route( | ||||||
|         get_message_events::Direction::Forward => { |         get_message_events::Direction::Forward => { | ||||||
|             let events_after = db |             let events_after = db | ||||||
|                 .rooms |                 .rooms | ||||||
|                 .pdus_after(&sender_user, &body.room_id, from) |                 .pdus_after(&sender_user, &body.room_id, from)? | ||||||
|                 .take(limit) |                 .take(limit) | ||||||
|                 .filter_map(|r| r.ok()) // Filter out buggy events
 |                 .filter_map(|r| r.ok()) // Filter out buggy events
 | ||||||
|                 .filter_map(|(pdu_id, pdu)| { |                 .filter_map(|(pdu_id, pdu)| { | ||||||
|  | @ -158,7 +158,7 @@ pub async fn get_message_events_route( | ||||||
|         get_message_events::Direction::Backward => { |         get_message_events::Direction::Backward => { | ||||||
|             let events_before = db |             let events_before = db | ||||||
|                 .rooms |                 .rooms | ||||||
|                 .pdus_until(&sender_user, &body.room_id, from) |                 .pdus_until(&sender_user, &body.room_id, from)? | ||||||
|                 .take(limit) |                 .take(limit) | ||||||
|                 .filter_map(|r| r.ok()) // Filter out buggy events
 |                 .filter_map(|r| r.ok()) // Filter out buggy events
 | ||||||
|                 .filter_map(|(pdu_id, pdu)| { |                 .filter_map(|(pdu_id, pdu)| { | ||||||
|  |  | ||||||
|  | @ -33,6 +33,8 @@ pub async fn create_room_route( | ||||||
| 
 | 
 | ||||||
|     let room_id = RoomId::new(db.globals.server_name()); |     let room_id = RoomId::new(db.globals.server_name()); | ||||||
| 
 | 
 | ||||||
|  |     db.rooms.get_or_create_shortroomid(&room_id, &db.globals)?; | ||||||
|  | 
 | ||||||
|     let mutex_state = Arc::clone( |     let mutex_state = Arc::clone( | ||||||
|         db.globals |         db.globals | ||||||
|             .roomid_mutex_state |             .roomid_mutex_state | ||||||
|  | @ -105,6 +107,7 @@ pub async fn create_room_route( | ||||||
|                 is_direct: Some(body.is_direct), |                 is_direct: Some(body.is_direct), | ||||||
|                 third_party_invite: None, |                 third_party_invite: None, | ||||||
|                 blurhash: db.users.blurhash(&sender_user)?, |                 blurhash: db.users.blurhash(&sender_user)?, | ||||||
|  |                 reason: None, | ||||||
|             }) |             }) | ||||||
|             .expect("event is valid, we just created it"), |             .expect("event is valid, we just created it"), | ||||||
|             unsigned: None, |             unsigned: None, | ||||||
|  | @ -173,7 +176,6 @@ pub async fn create_room_route( | ||||||
|     )?; |     )?; | ||||||
| 
 | 
 | ||||||
|     // 4. Canonical room alias
 |     // 4. Canonical room alias
 | ||||||
| 
 |  | ||||||
|     if let Some(room_alias_id) = &alias { |     if let Some(room_alias_id) = &alias { | ||||||
|         db.rooms.build_and_append_pdu( |         db.rooms.build_and_append_pdu( | ||||||
|             PduBuilder { |             PduBuilder { | ||||||
|  | @ -193,7 +195,7 @@ pub async fn create_room_route( | ||||||
|             &room_id, |             &room_id, | ||||||
|             &db, |             &db, | ||||||
|             &state_lock, |             &state_lock, | ||||||
|         ); |         )?; | ||||||
|     } |     } | ||||||
| 
 | 
 | ||||||
|     // 5. Events set by preset
 |     // 5. Events set by preset
 | ||||||
|  | @ -516,6 +518,7 @@ pub async fn upgrade_room_route( | ||||||
|                 is_direct: None, |                 is_direct: None, | ||||||
|                 third_party_invite: None, |                 third_party_invite: None, | ||||||
|                 blurhash: db.users.blurhash(&sender_user)?, |                 blurhash: db.users.blurhash(&sender_user)?, | ||||||
|  |                 reason: None, | ||||||
|             }) |             }) | ||||||
|             .expect("event is valid, we just created it"), |             .expect("event is valid, we just created it"), | ||||||
|             unsigned: None, |             unsigned: None, | ||||||
|  |  | ||||||
|  | @ -3,7 +3,10 @@ use crate::{database::DatabaseGuard, utils, ConduitResult, Error, Ruma}; | ||||||
| use ruma::{ | use ruma::{ | ||||||
|     api::client::{ |     api::client::{ | ||||||
|         error::ErrorKind, |         error::ErrorKind, | ||||||
|         r0::session::{get_login_types, login, logout, logout_all}, |         r0::{ | ||||||
|  |             session::{get_login_types, login, logout, logout_all}, | ||||||
|  |             uiaa::IncomingUserIdentifier, | ||||||
|  |         }, | ||||||
|     }, |     }, | ||||||
|     UserId, |     UserId, | ||||||
| }; | }; | ||||||
|  | @ -60,7 +63,7 @@ pub async fn login_route( | ||||||
|             identifier, |             identifier, | ||||||
|             password, |             password, | ||||||
|         } => { |         } => { | ||||||
|             let username = if let login::IncomingUserIdentifier::MatrixId(matrix_id) = identifier { |             let username = if let IncomingUserIdentifier::MatrixId(matrix_id) = identifier { | ||||||
|                 matrix_id |                 matrix_id | ||||||
|             } else { |             } else { | ||||||
|                 return Err(Error::BadRequest(ErrorKind::Forbidden, "Bad login type.")); |                 return Err(Error::BadRequest(ErrorKind::Forbidden, "Bad login type.")); | ||||||
|  |  | ||||||
|  | @ -205,7 +205,7 @@ async fn sync_helper( | ||||||
| 
 | 
 | ||||||
|         let mut non_timeline_pdus = db |         let mut non_timeline_pdus = db | ||||||
|             .rooms |             .rooms | ||||||
|             .pdus_until(&sender_user, &room_id, u64::MAX) |             .pdus_until(&sender_user, &room_id, u64::MAX)? | ||||||
|             .filter_map(|r| { |             .filter_map(|r| { | ||||||
|                 // Filter out buggy events
 |                 // Filter out buggy events
 | ||||||
|                 if r.is_err() { |                 if r.is_err() { | ||||||
|  | @ -248,13 +248,13 @@ async fn sync_helper( | ||||||
| 
 | 
 | ||||||
|         let first_pdu_before_since = db |         let first_pdu_before_since = db | ||||||
|             .rooms |             .rooms | ||||||
|             .pdus_until(&sender_user, &room_id, since) |             .pdus_until(&sender_user, &room_id, since)? | ||||||
|             .next() |             .next() | ||||||
|             .transpose()?; |             .transpose()?; | ||||||
| 
 | 
 | ||||||
|         let pdus_after_since = db |         let pdus_after_since = db | ||||||
|             .rooms |             .rooms | ||||||
|             .pdus_after(&sender_user, &room_id, since) |             .pdus_after(&sender_user, &room_id, since)? | ||||||
|             .next() |             .next() | ||||||
|             .is_some(); |             .is_some(); | ||||||
| 
 | 
 | ||||||
|  | @ -286,7 +286,7 @@ async fn sync_helper( | ||||||
| 
 | 
 | ||||||
|                 for hero in db |                 for hero in db | ||||||
|                     .rooms |                     .rooms | ||||||
|                     .all_pdus(&sender_user, &room_id) |                     .all_pdus(&sender_user, &room_id)? | ||||||
|                     .filter_map(|pdu| pdu.ok()) // Ignore all broken pdus
 |                     .filter_map(|pdu| pdu.ok()) // Ignore all broken pdus
 | ||||||
|                     .filter(|(_, pdu)| pdu.kind == EventType::RoomMember) |                     .filter(|(_, pdu)| pdu.kind == EventType::RoomMember) | ||||||
|                     .map(|(_, pdu)| { |                     .map(|(_, pdu)| { | ||||||
|  | @ -328,11 +328,11 @@ async fn sync_helper( | ||||||
|                 } |                 } | ||||||
|             } |             } | ||||||
| 
 | 
 | ||||||
|             ( |             Ok::<_, Error>(( | ||||||
|                 Some(joined_member_count), |                 Some(joined_member_count), | ||||||
|                 Some(invited_member_count), |                 Some(invited_member_count), | ||||||
|                 heroes, |                 heroes, | ||||||
|             ) |             )) | ||||||
|         }; |         }; | ||||||
| 
 | 
 | ||||||
|         let ( |         let ( | ||||||
|  | @ -343,7 +343,7 @@ async fn sync_helper( | ||||||
|             state_events, |             state_events, | ||||||
|         ) = if since_shortstatehash.is_none() { |         ) = if since_shortstatehash.is_none() { | ||||||
|             // Probably since = 0, we will do an initial sync
 |             // Probably since = 0, we will do an initial sync
 | ||||||
|             let (joined_member_count, invited_member_count, heroes) = calculate_counts(); |             let (joined_member_count, invited_member_count, heroes) = calculate_counts()?; | ||||||
| 
 | 
 | ||||||
|             let current_state_ids = db.rooms.state_full_ids(current_shortstatehash)?; |             let current_state_ids = db.rooms.state_full_ids(current_shortstatehash)?; | ||||||
|             let state_events = current_state_ids |             let state_events = current_state_ids | ||||||
|  | @ -510,7 +510,7 @@ async fn sync_helper( | ||||||
|             } |             } | ||||||
| 
 | 
 | ||||||
|             let (joined_member_count, invited_member_count, heroes) = if send_member_count { |             let (joined_member_count, invited_member_count, heroes) = if send_member_count { | ||||||
|                 calculate_counts() |                 calculate_counts()? | ||||||
|             } else { |             } else { | ||||||
|                 (None, None, Vec::new()) |                 (None, None, Vec::new()) | ||||||
|             }; |             }; | ||||||
|  |  | ||||||
							
								
								
									
										279
									
								
								src/database.rs
									
									
									
									
									
								
							
							
						
						
									
										279
									
								
								src/database.rs
									
									
									
									
									
								
							|  | @ -24,13 +24,14 @@ use rocket::{ | ||||||
|     request::{FromRequest, Request}, |     request::{FromRequest, Request}, | ||||||
|     Shutdown, State, |     Shutdown, State, | ||||||
| }; | }; | ||||||
| use ruma::{DeviceId, RoomId, ServerName, UserId}; | use ruma::{DeviceId, EventId, RoomId, ServerName, UserId}; | ||||||
| use serde::{de::IgnoredAny, Deserialize}; | use serde::{de::IgnoredAny, Deserialize}; | ||||||
| use std::{ | use std::{ | ||||||
|     collections::{BTreeMap, HashMap}, |     collections::{BTreeMap, HashMap, HashSet}, | ||||||
|     convert::TryFrom, |     convert::{TryFrom, TryInto}, | ||||||
|     fs::{self, remove_dir_all}, |     fs::{self, remove_dir_all}, | ||||||
|     io::Write, |     io::Write, | ||||||
|  |     mem::size_of, | ||||||
|     ops::Deref, |     ops::Deref, | ||||||
|     path::Path, |     path::Path, | ||||||
|     sync::{Arc, Mutex, RwLock}, |     sync::{Arc, Mutex, RwLock}, | ||||||
|  | @ -107,7 +108,7 @@ fn default_db_cache_capacity_mb() -> f64 { | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
| fn default_sqlite_wal_clean_second_interval() -> u32 { | fn default_sqlite_wal_clean_second_interval() -> u32 { | ||||||
|     15 * 60 // every 15 minutes
 |     1 * 60 // every minute
 | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
| fn default_max_request_size() -> u32 { | fn default_max_request_size() -> u32 { | ||||||
|  | @ -261,7 +262,11 @@ impl Database { | ||||||
|                 userroomid_highlightcount: builder.open_tree("userroomid_highlightcount")?, |                 userroomid_highlightcount: builder.open_tree("userroomid_highlightcount")?, | ||||||
| 
 | 
 | ||||||
|                 statekey_shortstatekey: builder.open_tree("statekey_shortstatekey")?, |                 statekey_shortstatekey: builder.open_tree("statekey_shortstatekey")?, | ||||||
|                 stateid_shorteventid: builder.open_tree("stateid_shorteventid")?, | 
 | ||||||
|  |                 shortroomid_roomid: builder.open_tree("shortroomid_roomid")?, | ||||||
|  |                 roomid_shortroomid: builder.open_tree("roomid_shortroomid")?, | ||||||
|  | 
 | ||||||
|  |                 shortstatehash_statediff: builder.open_tree("shortstatehash_statediff")?, | ||||||
|                 eventid_shorteventid: builder.open_tree("eventid_shorteventid")?, |                 eventid_shorteventid: builder.open_tree("eventid_shorteventid")?, | ||||||
|                 shorteventid_eventid: builder.open_tree("shorteventid_eventid")?, |                 shorteventid_eventid: builder.open_tree("shorteventid_eventid")?, | ||||||
|                 shorteventid_shortstatehash: builder.open_tree("shorteventid_shortstatehash")?, |                 shorteventid_shortstatehash: builder.open_tree("shorteventid_shortstatehash")?, | ||||||
|  | @ -270,8 +275,12 @@ impl Database { | ||||||
| 
 | 
 | ||||||
|                 eventid_outlierpdu: builder.open_tree("eventid_outlierpdu")?, |                 eventid_outlierpdu: builder.open_tree("eventid_outlierpdu")?, | ||||||
|                 referencedevents: builder.open_tree("referencedevents")?, |                 referencedevents: builder.open_tree("referencedevents")?, | ||||||
|                 pdu_cache: Mutex::new(LruCache::new(1_000_000)), |                 pdu_cache: Mutex::new(LruCache::new(100_000)), | ||||||
|                 auth_chain_cache: Mutex::new(LruCache::new(1_000_000)), |                 auth_chain_cache: Mutex::new(LruCache::new(100_000)), | ||||||
|  |                 shorteventid_cache: Mutex::new(LruCache::new(1_000_000)), | ||||||
|  |                 eventidshort_cache: Mutex::new(LruCache::new(1_000_000)), | ||||||
|  |                 statekeyshort_cache: Mutex::new(LruCache::new(1_000_000)), | ||||||
|  |                 stateinfo_cache: Mutex::new(LruCache::new(50)), | ||||||
|             }, |             }, | ||||||
|             account_data: account_data::AccountData { |             account_data: account_data::AccountData { | ||||||
|                 roomuserdataid_accountdata: builder.open_tree("roomuserdataid_accountdata")?, |                 roomuserdataid_accountdata: builder.open_tree("roomuserdataid_accountdata")?, | ||||||
|  | @ -424,7 +433,6 @@ impl Database { | ||||||
|             } |             } | ||||||
| 
 | 
 | ||||||
|             if db.globals.database_version()? < 6 { |             if db.globals.database_version()? < 6 { | ||||||
|                 // TODO update to 6
 |  | ||||||
|                 // Set room member count
 |                 // Set room member count
 | ||||||
|                 for (roomid, _) in db.rooms.roomid_shortstatehash.iter() { |                 for (roomid, _) in db.rooms.roomid_shortstatehash.iter() { | ||||||
|                     let room_id = |                     let room_id = | ||||||
|  | @ -437,6 +445,261 @@ impl Database { | ||||||
| 
 | 
 | ||||||
|                 println!("Migration: 5 -> 6 finished"); |                 println!("Migration: 5 -> 6 finished"); | ||||||
|             } |             } | ||||||
|  | 
 | ||||||
|  |             if db.globals.database_version()? < 7 { | ||||||
|  |                 // Upgrade state store
 | ||||||
|  |                 let mut last_roomstates: HashMap<RoomId, u64> = HashMap::new(); | ||||||
|  |                 let mut current_sstatehash: Option<u64> = None; | ||||||
|  |                 let mut current_room = None; | ||||||
|  |                 let mut current_state = HashSet::new(); | ||||||
|  |                 let mut counter = 0; | ||||||
|  | 
 | ||||||
|  |                 let mut handle_state = | ||||||
|  |                     |current_sstatehash: u64, | ||||||
|  |                      current_room: &RoomId, | ||||||
|  |                      current_state: HashSet<_>, | ||||||
|  |                      last_roomstates: &mut HashMap<_, _>| { | ||||||
|  |                         counter += 1; | ||||||
|  |                         println!("counter: {}", counter); | ||||||
|  |                         let last_roomsstatehash = last_roomstates.get(current_room); | ||||||
|  | 
 | ||||||
|  |                         let states_parents = last_roomsstatehash.map_or_else( | ||||||
|  |                             || Ok(Vec::new()), | ||||||
|  |                             |&last_roomsstatehash| { | ||||||
|  |                                 db.rooms.load_shortstatehash_info(dbg!(last_roomsstatehash)) | ||||||
|  |                             }, | ||||||
|  |                         )?; | ||||||
|  | 
 | ||||||
|  |                         let (statediffnew, statediffremoved) = | ||||||
|  |                             if let Some(parent_stateinfo) = states_parents.last() { | ||||||
|  |                                 let statediffnew = current_state | ||||||
|  |                                     .difference(&parent_stateinfo.1) | ||||||
|  |                                     .cloned() | ||||||
|  |                                     .collect::<HashSet<_>>(); | ||||||
|  | 
 | ||||||
|  |                                 let statediffremoved = parent_stateinfo | ||||||
|  |                                     .1 | ||||||
|  |                                     .difference(¤t_state) | ||||||
|  |                                     .cloned() | ||||||
|  |                                     .collect::<HashSet<_>>(); | ||||||
|  | 
 | ||||||
|  |                                 (statediffnew, statediffremoved) | ||||||
|  |                             } else { | ||||||
|  |                                 (current_state, HashSet::new()) | ||||||
|  |                             }; | ||||||
|  | 
 | ||||||
|  |                         db.rooms.save_state_from_diff( | ||||||
|  |                             dbg!(current_sstatehash), | ||||||
|  |                             statediffnew, | ||||||
|  |                             statediffremoved, | ||||||
|  |                             2, // every state change is 2 event changes on average
 | ||||||
|  |                             states_parents, | ||||||
|  |                         )?; | ||||||
|  | 
 | ||||||
|  |                         /* | ||||||
|  |                         let mut tmp = db.rooms.load_shortstatehash_info(¤t_sstatehash, &db)?; | ||||||
|  |                         let state = tmp.pop().unwrap(); | ||||||
|  |                         println!( | ||||||
|  |                             "{}\t{}{:?}: {:?} + {:?} - {:?}", | ||||||
|  |                             current_room, | ||||||
|  |                             "  ".repeat(tmp.len()), | ||||||
|  |                             utils::u64_from_bytes(¤t_sstatehash).unwrap(), | ||||||
|  |                             tmp.last().map(|b| utils::u64_from_bytes(&b.0).unwrap()), | ||||||
|  |                             state | ||||||
|  |                                 .2 | ||||||
|  |                                 .iter() | ||||||
|  |                                 .map(|b| utils::u64_from_bytes(&b[size_of::<u64>()..]).unwrap()) | ||||||
|  |                                 .collect::<Vec<_>>(), | ||||||
|  |                             state | ||||||
|  |                                 .3 | ||||||
|  |                                 .iter() | ||||||
|  |                                 .map(|b| utils::u64_from_bytes(&b[size_of::<u64>()..]).unwrap()) | ||||||
|  |                                 .collect::<Vec<_>>() | ||||||
|  |                         ); | ||||||
|  |                         */ | ||||||
|  | 
 | ||||||
|  |                         Ok::<_, Error>(()) | ||||||
|  |                     }; | ||||||
|  | 
 | ||||||
|  |                 for (k, seventid) in db._db.open_tree("stateid_shorteventid")?.iter() { | ||||||
|  |                     let sstatehash = utils::u64_from_bytes(&k[0..size_of::<u64>()]) | ||||||
|  |                         .expect("number of bytes is correct"); | ||||||
|  |                     let sstatekey = k[size_of::<u64>()..].to_vec(); | ||||||
|  |                     if Some(sstatehash) != current_sstatehash { | ||||||
|  |                         if let Some(current_sstatehash) = current_sstatehash { | ||||||
|  |                             handle_state( | ||||||
|  |                                 current_sstatehash, | ||||||
|  |                                 current_room.as_ref().unwrap(), | ||||||
|  |                                 current_state, | ||||||
|  |                                 &mut last_roomstates, | ||||||
|  |                             )?; | ||||||
|  |                             last_roomstates | ||||||
|  |                                 .insert(current_room.clone().unwrap(), current_sstatehash); | ||||||
|  |                         } | ||||||
|  |                         current_state = HashSet::new(); | ||||||
|  |                         current_sstatehash = Some(sstatehash); | ||||||
|  | 
 | ||||||
|  |                         let event_id = db | ||||||
|  |                             .rooms | ||||||
|  |                             .shorteventid_eventid | ||||||
|  |                             .get(&seventid) | ||||||
|  |                             .unwrap() | ||||||
|  |                             .unwrap(); | ||||||
|  |                         let event_id = | ||||||
|  |                             EventId::try_from(utils::string_from_bytes(&event_id).unwrap()) | ||||||
|  |                                 .unwrap(); | ||||||
|  |                         let pdu = db.rooms.get_pdu(&event_id).unwrap().unwrap(); | ||||||
|  | 
 | ||||||
|  |                         if Some(&pdu.room_id) != current_room.as_ref() { | ||||||
|  |                             current_room = Some(pdu.room_id.clone()); | ||||||
|  |                         } | ||||||
|  |                     } | ||||||
|  | 
 | ||||||
|  |                     let mut val = sstatekey; | ||||||
|  |                     val.extend_from_slice(&seventid); | ||||||
|  |                     current_state.insert(val.try_into().expect("size is correct")); | ||||||
|  |                 } | ||||||
|  | 
 | ||||||
|  |                 if let Some(current_sstatehash) = current_sstatehash { | ||||||
|  |                     handle_state( | ||||||
|  |                         current_sstatehash, | ||||||
|  |                         current_room.as_ref().unwrap(), | ||||||
|  |                         current_state, | ||||||
|  |                         &mut last_roomstates, | ||||||
|  |                     )?; | ||||||
|  |                 } | ||||||
|  | 
 | ||||||
|  |                 db.globals.bump_database_version(7)?; | ||||||
|  | 
 | ||||||
|  |                 println!("Migration: 6 -> 7 finished"); | ||||||
|  |             } | ||||||
|  | 
 | ||||||
|  |             if db.globals.database_version()? < 8 { | ||||||
|  |                 // Generate short room ids for all rooms
 | ||||||
|  |                 for (room_id, _) in db.rooms.roomid_shortstatehash.iter() { | ||||||
|  |                     let shortroomid = db.globals.next_count()?.to_be_bytes(); | ||||||
|  |                     db.rooms.roomid_shortroomid.insert(&room_id, &shortroomid)?; | ||||||
|  |                     db.rooms.shortroomid_roomid.insert(&shortroomid, &room_id)?; | ||||||
|  |                     println!("Migration: 8"); | ||||||
|  |                 } | ||||||
|  |                 // Update pduids db layout
 | ||||||
|  |                 let mut batch = db.rooms.pduid_pdu.iter().filter_map(|(key, v)| { | ||||||
|  |                     if !key.starts_with(b"!") { | ||||||
|  |                         return None; | ||||||
|  |                     } | ||||||
|  |                     let mut parts = key.splitn(2, |&b| b == 0xff); | ||||||
|  |                     let room_id = parts.next().unwrap(); | ||||||
|  |                     let count = parts.next().unwrap(); | ||||||
|  | 
 | ||||||
|  |                     let short_room_id = db | ||||||
|  |                         .rooms | ||||||
|  |                         .roomid_shortroomid | ||||||
|  |                         .get(&room_id) | ||||||
|  |                         .unwrap() | ||||||
|  |                         .expect("shortroomid should exist"); | ||||||
|  | 
 | ||||||
|  |                     let mut new_key = short_room_id; | ||||||
|  |                     new_key.extend_from_slice(count); | ||||||
|  | 
 | ||||||
|  |                     Some((new_key, v)) | ||||||
|  |                 }); | ||||||
|  | 
 | ||||||
|  |                 db.rooms.pduid_pdu.insert_batch(&mut batch)?; | ||||||
|  | 
 | ||||||
|  |                 let mut batch2 = db.rooms.eventid_pduid.iter().filter_map(|(k, value)| { | ||||||
|  |                     if !value.starts_with(b"!") { | ||||||
|  |                         return None; | ||||||
|  |                     } | ||||||
|  |                     let mut parts = value.splitn(2, |&b| b == 0xff); | ||||||
|  |                     let room_id = parts.next().unwrap(); | ||||||
|  |                     let count = parts.next().unwrap(); | ||||||
|  | 
 | ||||||
|  |                     let short_room_id = db | ||||||
|  |                         .rooms | ||||||
|  |                         .roomid_shortroomid | ||||||
|  |                         .get(&room_id) | ||||||
|  |                         .unwrap() | ||||||
|  |                         .expect("shortroomid should exist"); | ||||||
|  | 
 | ||||||
|  |                     let mut new_value = short_room_id; | ||||||
|  |                     new_value.extend_from_slice(count); | ||||||
|  | 
 | ||||||
|  |                     Some((k, new_value)) | ||||||
|  |                 }); | ||||||
|  | 
 | ||||||
|  |                 db.rooms.eventid_pduid.insert_batch(&mut batch2)?; | ||||||
|  | 
 | ||||||
|  |                 db.globals.bump_database_version(8)?; | ||||||
|  | 
 | ||||||
|  |                 println!("Migration: 7 -> 8 finished"); | ||||||
|  |             } | ||||||
|  | 
 | ||||||
|  |             if db.globals.database_version()? < 9 { | ||||||
|  |                 // Update tokenids db layout
 | ||||||
|  |                 let batch = db | ||||||
|  |                     .rooms | ||||||
|  |                     .tokenids | ||||||
|  |                     .iter() | ||||||
|  |                     .filter_map(|(key, _)| { | ||||||
|  |                         if !key.starts_with(b"!") { | ||||||
|  |                             return None; | ||||||
|  |                         } | ||||||
|  |                         let mut parts = key.splitn(4, |&b| b == 0xff); | ||||||
|  |                         let room_id = parts.next().unwrap(); | ||||||
|  |                         let word = parts.next().unwrap(); | ||||||
|  |                         let _pdu_id_room = parts.next().unwrap(); | ||||||
|  |                         let pdu_id_count = parts.next().unwrap(); | ||||||
|  | 
 | ||||||
|  |                         let short_room_id = db | ||||||
|  |                             .rooms | ||||||
|  |                             .roomid_shortroomid | ||||||
|  |                             .get(&room_id) | ||||||
|  |                             .unwrap() | ||||||
|  |                             .expect("shortroomid should exist"); | ||||||
|  |                         let mut new_key = short_room_id; | ||||||
|  |                         new_key.extend_from_slice(word); | ||||||
|  |                         new_key.push(0xff); | ||||||
|  |                         new_key.extend_from_slice(pdu_id_count); | ||||||
|  |                         println!("old {:?}", key); | ||||||
|  |                         println!("new {:?}", new_key); | ||||||
|  |                         Some((new_key, Vec::new())) | ||||||
|  |                     }) | ||||||
|  |                     .collect::<Vec<_>>(); | ||||||
|  | 
 | ||||||
|  |                 let mut iter = batch.into_iter().peekable(); | ||||||
|  | 
 | ||||||
|  |                 while iter.peek().is_some() { | ||||||
|  |                     db.rooms | ||||||
|  |                         .tokenids | ||||||
|  |                         .insert_batch(&mut iter.by_ref().take(1000))?; | ||||||
|  |                     println!("smaller batch done"); | ||||||
|  |                 } | ||||||
|  | 
 | ||||||
|  |                 println!("Deleting starts"); | ||||||
|  | 
 | ||||||
|  |                 let batch2 = db | ||||||
|  |                     .rooms | ||||||
|  |                     .tokenids | ||||||
|  |                     .iter() | ||||||
|  |                     .filter_map(|(key, _)| { | ||||||
|  |                         if key.starts_with(b"!") { | ||||||
|  |                             println!("del {:?}", key); | ||||||
|  |                             Some(key) | ||||||
|  |                         } else { | ||||||
|  |                             None | ||||||
|  |                         } | ||||||
|  |                     }) | ||||||
|  |                     .collect::<Vec<_>>(); | ||||||
|  | 
 | ||||||
|  |                 for key in batch2 { | ||||||
|  |                     println!("del"); | ||||||
|  |                     db.rooms.tokenids.remove(&key)?; | ||||||
|  |                 } | ||||||
|  | 
 | ||||||
|  |                 db.globals.bump_database_version(9)?; | ||||||
|  | 
 | ||||||
|  |                 println!("Migration: 8 -> 9 finished"); | ||||||
|  |             } | ||||||
|         } |         } | ||||||
| 
 | 
 | ||||||
|         let guard = db.read().await; |         let guard = db.read().await; | ||||||
|  |  | ||||||
|  | @ -35,6 +35,7 @@ pub trait Tree: Send + Sync { | ||||||
|     ) -> Box<dyn Iterator<Item = (Vec<u8>, Vec<u8>)> + 'a>; |     ) -> Box<dyn Iterator<Item = (Vec<u8>, Vec<u8>)> + 'a>; | ||||||
| 
 | 
 | ||||||
|     fn increment(&self, key: &[u8]) -> Result<Vec<u8>>; |     fn increment(&self, key: &[u8]) -> Result<Vec<u8>>; | ||||||
|  |     fn increment_batch<'a>(&self, iter: &mut dyn Iterator<Item = Vec<u8>>) -> Result<()>; | ||||||
| 
 | 
 | ||||||
|     fn scan_prefix<'a>( |     fn scan_prefix<'a>( | ||||||
|         &'a self, |         &'a self, | ||||||
|  |  | ||||||
|  | @ -9,15 +9,13 @@ use std::{ | ||||||
|     path::{Path, PathBuf}, |     path::{Path, PathBuf}, | ||||||
|     pin::Pin, |     pin::Pin, | ||||||
|     sync::Arc, |     sync::Arc, | ||||||
|     time::{Duration, Instant}, |  | ||||||
| }; | }; | ||||||
| use tokio::sync::oneshot::Sender; | use tokio::sync::oneshot::Sender; | ||||||
| use tracing::{debug, warn}; | use tracing::debug; | ||||||
| 
 |  | ||||||
| pub const MILLI: Duration = Duration::from_millis(1); |  | ||||||
| 
 | 
 | ||||||
| thread_local! { | thread_local! { | ||||||
|     static READ_CONNECTION: RefCell<Option<&'static Connection>> = RefCell::new(None); |     static READ_CONNECTION: RefCell<Option<&'static Connection>> = RefCell::new(None); | ||||||
|  |     static READ_CONNECTION_ITERATOR: RefCell<Option<&'static Connection>> = RefCell::new(None); | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
| struct PreparedStatementIterator<'a> { | struct PreparedStatementIterator<'a> { | ||||||
|  | @ -51,11 +49,11 @@ impl Engine { | ||||||
|     fn prepare_conn(path: &Path, cache_size_kb: u32) -> Result<Connection> { |     fn prepare_conn(path: &Path, cache_size_kb: u32) -> Result<Connection> { | ||||||
|         let conn = Connection::open(&path)?; |         let conn = Connection::open(&path)?; | ||||||
| 
 | 
 | ||||||
|         conn.pragma_update(Some(Main), "page_size", &32768)?; |         conn.pragma_update(Some(Main), "page_size", &2048)?; | ||||||
|         conn.pragma_update(Some(Main), "journal_mode", &"WAL")?; |         conn.pragma_update(Some(Main), "journal_mode", &"WAL")?; | ||||||
|         conn.pragma_update(Some(Main), "synchronous", &"NORMAL")?; |         conn.pragma_update(Some(Main), "synchronous", &"NORMAL")?; | ||||||
|         conn.pragma_update(Some(Main), "cache_size", &(-i64::from(cache_size_kb)))?; |         conn.pragma_update(Some(Main), "cache_size", &(-i64::from(cache_size_kb)))?; | ||||||
|         conn.pragma_update(Some(Main), "wal_autocheckpoint", &0)?; |         conn.pragma_update(Some(Main), "wal_autocheckpoint", &2000)?; | ||||||
| 
 | 
 | ||||||
|         Ok(conn) |         Ok(conn) | ||||||
|     } |     } | ||||||
|  | @ -79,9 +77,25 @@ impl Engine { | ||||||
|         }) |         }) | ||||||
|     } |     } | ||||||
| 
 | 
 | ||||||
|  |     fn read_lock_iterator(&self) -> &'static Connection { | ||||||
|  |         READ_CONNECTION_ITERATOR.with(|cell| { | ||||||
|  |             let connection = &mut cell.borrow_mut(); | ||||||
|  | 
 | ||||||
|  |             if (*connection).is_none() { | ||||||
|  |                 let c = Box::leak(Box::new( | ||||||
|  |                     Self::prepare_conn(&self.path, self.cache_size_per_thread).unwrap(), | ||||||
|  |                 )); | ||||||
|  |                 **connection = Some(c); | ||||||
|  |             } | ||||||
|  | 
 | ||||||
|  |             connection.unwrap() | ||||||
|  |         }) | ||||||
|  |     } | ||||||
|  | 
 | ||||||
|     pub fn flush_wal(self: &Arc<Self>) -> Result<()> { |     pub fn flush_wal(self: &Arc<Self>) -> Result<()> { | ||||||
|         self.write_lock() |         // We use autocheckpoints
 | ||||||
|             .pragma_update(Some(Main), "wal_checkpoint", &"TRUNCATE")?; |         //self.write_lock()
 | ||||||
|  |         //.pragma_update(Some(Main), "wal_checkpoint", &"TRUNCATE")?;
 | ||||||
|         Ok(()) |         Ok(()) | ||||||
|     } |     } | ||||||
| } | } | ||||||
|  | @ -153,6 +167,34 @@ impl SqliteTable { | ||||||
|         )?; |         )?; | ||||||
|         Ok(()) |         Ok(()) | ||||||
|     } |     } | ||||||
|  | 
 | ||||||
|  |     pub fn iter_with_guard<'a>( | ||||||
|  |         &'a self, | ||||||
|  |         guard: &'a Connection, | ||||||
|  |     ) -> Box<dyn Iterator<Item = TupleOfBytes> + 'a> { | ||||||
|  |         let statement = Box::leak(Box::new( | ||||||
|  |             guard | ||||||
|  |                 .prepare(&format!( | ||||||
|  |                     "SELECT key, value FROM {} ORDER BY key ASC", | ||||||
|  |                     &self.name | ||||||
|  |                 )) | ||||||
|  |                 .unwrap(), | ||||||
|  |         )); | ||||||
|  | 
 | ||||||
|  |         let statement_ref = NonAliasingBox(statement); | ||||||
|  | 
 | ||||||
|  |         let iterator = Box::new( | ||||||
|  |             statement | ||||||
|  |                 .query_map([], |row| Ok((row.get_unwrap(0), row.get_unwrap(1)))) | ||||||
|  |                 .unwrap() | ||||||
|  |                 .map(|r| r.unwrap()), | ||||||
|  |         ); | ||||||
|  | 
 | ||||||
|  |         Box::new(PreparedStatementIterator { | ||||||
|  |             iterator, | ||||||
|  |             statement_ref, | ||||||
|  |         }) | ||||||
|  |     } | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
| impl Tree for SqliteTable { | impl Tree for SqliteTable { | ||||||
|  | @ -164,16 +206,7 @@ impl Tree for SqliteTable { | ||||||
|     #[tracing::instrument(skip(self, key, value))] |     #[tracing::instrument(skip(self, key, value))] | ||||||
|     fn insert(&self, key: &[u8], value: &[u8]) -> Result<()> { |     fn insert(&self, key: &[u8], value: &[u8]) -> Result<()> { | ||||||
|         let guard = self.engine.write_lock(); |         let guard = self.engine.write_lock(); | ||||||
| 
 |  | ||||||
|         let start = Instant::now(); |  | ||||||
| 
 |  | ||||||
|         self.insert_with_guard(&guard, key, value)?; |         self.insert_with_guard(&guard, key, value)?; | ||||||
| 
 |  | ||||||
|         let elapsed = start.elapsed(); |  | ||||||
|         if elapsed > MILLI { |  | ||||||
|             warn!("insert took {:?} : {}", elapsed, &self.name); |  | ||||||
|         } |  | ||||||
| 
 |  | ||||||
|         drop(guard); |         drop(guard); | ||||||
| 
 | 
 | ||||||
|         let watchers = self.watchers.read(); |         let watchers = self.watchers.read(); | ||||||
|  | @ -216,53 +249,41 @@ impl Tree for SqliteTable { | ||||||
|         Ok(()) |         Ok(()) | ||||||
|     } |     } | ||||||
| 
 | 
 | ||||||
|  |     #[tracing::instrument(skip(self, iter))] | ||||||
|  |     fn increment_batch<'a>(&self, iter: &mut dyn Iterator<Item = Vec<u8>>) -> Result<()> { | ||||||
|  |         let guard = self.engine.write_lock(); | ||||||
|  | 
 | ||||||
|  |         guard.execute("BEGIN", [])?; | ||||||
|  |         for key in iter { | ||||||
|  |             let old = self.get_with_guard(&guard, &key)?; | ||||||
|  |             let new = crate::utils::increment(old.as_deref()) | ||||||
|  |                 .expect("utils::increment always returns Some"); | ||||||
|  |             self.insert_with_guard(&guard, &key, &new)?; | ||||||
|  |         } | ||||||
|  |         guard.execute("COMMIT", [])?; | ||||||
|  | 
 | ||||||
|  |         drop(guard); | ||||||
|  | 
 | ||||||
|  |         Ok(()) | ||||||
|  |     } | ||||||
|  | 
 | ||||||
|     #[tracing::instrument(skip(self, key))] |     #[tracing::instrument(skip(self, key))] | ||||||
|     fn remove(&self, key: &[u8]) -> Result<()> { |     fn remove(&self, key: &[u8]) -> Result<()> { | ||||||
|         let guard = self.engine.write_lock(); |         let guard = self.engine.write_lock(); | ||||||
| 
 | 
 | ||||||
|         let start = Instant::now(); |  | ||||||
| 
 |  | ||||||
|         guard.execute( |         guard.execute( | ||||||
|             format!("DELETE FROM {} WHERE key = ?", self.name).as_str(), |             format!("DELETE FROM {} WHERE key = ?", self.name).as_str(), | ||||||
|             [key], |             [key], | ||||||
|         )?; |         )?; | ||||||
| 
 | 
 | ||||||
|         let elapsed = start.elapsed(); |  | ||||||
| 
 |  | ||||||
|         if elapsed > MILLI { |  | ||||||
|             debug!("remove:    took {:012?} : {}", elapsed, &self.name); |  | ||||||
|         } |  | ||||||
|         // debug!("remove key: {:?}", &key);
 |  | ||||||
| 
 |  | ||||||
|         Ok(()) |         Ok(()) | ||||||
|     } |     } | ||||||
| 
 | 
 | ||||||
|     #[tracing::instrument(skip(self))] |     #[tracing::instrument(skip(self))] | ||||||
|     fn iter<'a>(&'a self) -> Box<dyn Iterator<Item = TupleOfBytes> + 'a> { |     fn iter<'a>(&'a self) -> Box<dyn Iterator<Item = TupleOfBytes> + 'a> { | ||||||
|         let guard = self.engine.read_lock(); |         let guard = self.engine.read_lock_iterator(); | ||||||
| 
 | 
 | ||||||
|         let statement = Box::leak(Box::new( |         self.iter_with_guard(&guard) | ||||||
|             guard |  | ||||||
|                 .prepare(&format!( |  | ||||||
|                     "SELECT key, value FROM {} ORDER BY key ASC", |  | ||||||
|                     &self.name |  | ||||||
|                 )) |  | ||||||
|                 .unwrap(), |  | ||||||
|         )); |  | ||||||
| 
 |  | ||||||
|         let statement_ref = NonAliasingBox(statement); |  | ||||||
| 
 |  | ||||||
|         let iterator = Box::new( |  | ||||||
|             statement |  | ||||||
|                 .query_map([], |row| Ok((row.get_unwrap(0), row.get_unwrap(1)))) |  | ||||||
|                 .unwrap() |  | ||||||
|                 .map(|r| r.unwrap()), |  | ||||||
|         ); |  | ||||||
| 
 |  | ||||||
|         Box::new(PreparedStatementIterator { |  | ||||||
|             iterator, |  | ||||||
|             statement_ref, |  | ||||||
|         }) |  | ||||||
|     } |     } | ||||||
| 
 | 
 | ||||||
|     #[tracing::instrument(skip(self, from, backwards))] |     #[tracing::instrument(skip(self, from, backwards))] | ||||||
|  | @ -271,7 +292,7 @@ impl Tree for SqliteTable { | ||||||
|         from: &[u8], |         from: &[u8], | ||||||
|         backwards: bool, |         backwards: bool, | ||||||
|     ) -> Box<dyn Iterator<Item = TupleOfBytes> + 'a> { |     ) -> Box<dyn Iterator<Item = TupleOfBytes> + 'a> { | ||||||
|         let guard = self.engine.read_lock(); |         let guard = self.engine.read_lock_iterator(); | ||||||
|         let from = from.to_vec(); // TODO change interface?
 |         let from = from.to_vec(); // TODO change interface?
 | ||||||
| 
 | 
 | ||||||
|         if backwards { |         if backwards { | ||||||
|  | @ -326,8 +347,6 @@ impl Tree for SqliteTable { | ||||||
|     fn increment(&self, key: &[u8]) -> Result<Vec<u8>> { |     fn increment(&self, key: &[u8]) -> Result<Vec<u8>> { | ||||||
|         let guard = self.engine.write_lock(); |         let guard = self.engine.write_lock(); | ||||||
| 
 | 
 | ||||||
|         let start = Instant::now(); |  | ||||||
| 
 |  | ||||||
|         let old = self.get_with_guard(&guard, key)?; |         let old = self.get_with_guard(&guard, key)?; | ||||||
| 
 | 
 | ||||||
|         let new = |         let new = | ||||||
|  | @ -335,26 +354,11 @@ impl Tree for SqliteTable { | ||||||
| 
 | 
 | ||||||
|         self.insert_with_guard(&guard, key, &new)?; |         self.insert_with_guard(&guard, key, &new)?; | ||||||
| 
 | 
 | ||||||
|         let elapsed = start.elapsed(); |  | ||||||
| 
 |  | ||||||
|         if elapsed > MILLI { |  | ||||||
|             debug!("increment: took {:012?} : {}", elapsed, &self.name); |  | ||||||
|         } |  | ||||||
|         // debug!("increment key: {:?}", &key);
 |  | ||||||
| 
 |  | ||||||
|         Ok(new) |         Ok(new) | ||||||
|     } |     } | ||||||
| 
 | 
 | ||||||
|     #[tracing::instrument(skip(self, prefix))] |     #[tracing::instrument(skip(self, prefix))] | ||||||
|     fn scan_prefix<'a>(&'a self, prefix: Vec<u8>) -> Box<dyn Iterator<Item = TupleOfBytes> + 'a> { |     fn scan_prefix<'a>(&'a self, prefix: Vec<u8>) -> Box<dyn Iterator<Item = TupleOfBytes> + 'a> { | ||||||
|         // let name = self.name.clone();
 |  | ||||||
|         // self.iter_from_thread(
 |  | ||||||
|         //     format!(
 |  | ||||||
|         //         "SELECT key, value FROM {} WHERE key BETWEEN ?1 AND ?1 || X'FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF' ORDER BY key ASC",
 |  | ||||||
|         //         name
 |  | ||||||
|         //     )
 |  | ||||||
|         //     [prefix]
 |  | ||||||
|         // )
 |  | ||||||
|         Box::new( |         Box::new( | ||||||
|             self.iter_from(&prefix, false) |             self.iter_from(&prefix, false) | ||||||
|                 .take_while(move |(key, _)| key.starts_with(&prefix)), |                 .take_while(move |(key, _)| key.starts_with(&prefix)), | ||||||
|  |  | ||||||
										
											
												File diff suppressed because it is too large
												Load diff
											
										
									
								
							|  | @ -4,11 +4,14 @@ use crate::{client_server::SESSION_ID_LENGTH, utils, Error, Result}; | ||||||
| use ruma::{ | use ruma::{ | ||||||
|     api::client::{ |     api::client::{ | ||||||
|         error::ErrorKind, |         error::ErrorKind, | ||||||
|         r0::uiaa::{IncomingAuthData, UiaaInfo}, |         r0::uiaa::{ | ||||||
|  |             IncomingAuthData, IncomingPassword, IncomingUserIdentifier::MatrixId, UiaaInfo, | ||||||
|  |         }, | ||||||
|     }, |     }, | ||||||
|     signatures::CanonicalJsonValue, |     signatures::CanonicalJsonValue, | ||||||
|     DeviceId, UserId, |     DeviceId, UserId, | ||||||
| }; | }; | ||||||
|  | use tracing::error; | ||||||
| 
 | 
 | ||||||
| use super::abstraction::Tree; | use super::abstraction::Tree; | ||||||
| 
 | 
 | ||||||
|  | @ -49,14 +52,8 @@ impl Uiaa { | ||||||
|         users: &super::users::Users, |         users: &super::users::Users, | ||||||
|         globals: &super::globals::Globals, |         globals: &super::globals::Globals, | ||||||
|     ) -> Result<(bool, UiaaInfo)> { |     ) -> Result<(bool, UiaaInfo)> { | ||||||
|         if let IncomingAuthData::DirectRequest { |         let mut uiaainfo = auth | ||||||
|             kind, |             .session() | ||||||
|             session, |  | ||||||
|             auth_parameters, |  | ||||||
|         } = &auth |  | ||||||
|         { |  | ||||||
|             let mut uiaainfo = session |  | ||||||
|                 .as_ref() |  | ||||||
|             .map(|session| self.get_uiaa_session(&user_id, &device_id, session)) |             .map(|session| self.get_uiaa_session(&user_id, &device_id, session)) | ||||||
|             .unwrap_or_else(|| Ok(uiaainfo.clone()))?; |             .unwrap_or_else(|| Ok(uiaainfo.clone()))?; | ||||||
| 
 | 
 | ||||||
|  | @ -64,55 +61,29 @@ impl Uiaa { | ||||||
|             uiaainfo.session = Some(utils::random_string(SESSION_ID_LENGTH)); |             uiaainfo.session = Some(utils::random_string(SESSION_ID_LENGTH)); | ||||||
|         } |         } | ||||||
| 
 | 
 | ||||||
|  |         match auth { | ||||||
|             // Find out what the user completed
 |             // Find out what the user completed
 | ||||||
|             match &**kind { |             IncomingAuthData::Password(IncomingPassword { | ||||||
|                 "m.login.password" => { |                 identifier, | ||||||
|                     let identifier = auth_parameters.get("identifier").ok_or(Error::BadRequest( |                 password, | ||||||
|                         ErrorKind::MissingParam, |                 .. | ||||||
|                         "m.login.password needs identifier.", |             }) => { | ||||||
|                     ))?; |                 let username = match identifier { | ||||||
| 
 |                     MatrixId(username) => username, | ||||||
|                     let identifier_type = identifier.get("type").ok_or(Error::BadRequest( |                     _ => { | ||||||
|                         ErrorKind::MissingParam, |  | ||||||
|                         "Identifier needs a type.", |  | ||||||
|                     ))?; |  | ||||||
| 
 |  | ||||||
|                     if identifier_type != "m.id.user" { |  | ||||||
|                         return Err(Error::BadRequest( |                         return Err(Error::BadRequest( | ||||||
|                             ErrorKind::Unrecognized, |                             ErrorKind::Unrecognized, | ||||||
|                             "Identifier type not recognized.", |                             "Identifier type not recognized.", | ||||||
|                         )); |                         )) | ||||||
|                     } |                     } | ||||||
|  |                 }; | ||||||
| 
 | 
 | ||||||
|                     let username = identifier |                 let user_id = | ||||||
|                         .get("user") |                     UserId::parse_with_server_name(username.clone(), globals.server_name()) | ||||||
|                         .ok_or(Error::BadRequest( |  | ||||||
|                             ErrorKind::MissingParam, |  | ||||||
|                             "Identifier needs user field.", |  | ||||||
|                         ))? |  | ||||||
|                         .as_str() |  | ||||||
|                         .ok_or(Error::BadRequest( |  | ||||||
|                             ErrorKind::BadJson, |  | ||||||
|                             "User is not a string.", |  | ||||||
|                         ))?; |  | ||||||
| 
 |  | ||||||
|                     let user_id = UserId::parse_with_server_name(username, globals.server_name()) |  | ||||||
|                         .map_err(|_| { |                         .map_err(|_| { | ||||||
|                             Error::BadRequest(ErrorKind::InvalidParam, "User ID is invalid.") |                             Error::BadRequest(ErrorKind::InvalidParam, "User ID is invalid.") | ||||||
|                         })?; |                         })?; | ||||||
| 
 | 
 | ||||||
|                     let password = auth_parameters |  | ||||||
|                         .get("password") |  | ||||||
|                         .ok_or(Error::BadRequest( |  | ||||||
|                             ErrorKind::MissingParam, |  | ||||||
|                             "Password is missing.", |  | ||||||
|                         ))? |  | ||||||
|                         .as_str() |  | ||||||
|                         .ok_or(Error::BadRequest( |  | ||||||
|                             ErrorKind::BadJson, |  | ||||||
|                             "Password is not a string.", |  | ||||||
|                         ))?; |  | ||||||
| 
 |  | ||||||
|                 // Check if password is correct
 |                 // Check if password is correct
 | ||||||
|                 if let Some(hash) = users.password_hash(&user_id)? { |                 if let Some(hash) = users.password_hash(&user_id)? { | ||||||
|                     let hash_matches = |                     let hash_matches = | ||||||
|  | @ -130,10 +101,10 @@ impl Uiaa { | ||||||
|                 // Password was correct! Let's add it to `completed`
 |                 // Password was correct! Let's add it to `completed`
 | ||||||
|                 uiaainfo.completed.push("m.login.password".to_owned()); |                 uiaainfo.completed.push("m.login.password".to_owned()); | ||||||
|             } |             } | ||||||
|                 "m.login.dummy" => { |             IncomingAuthData::Dummy(_) => { | ||||||
|                 uiaainfo.completed.push("m.login.dummy".to_owned()); |                 uiaainfo.completed.push("m.login.dummy".to_owned()); | ||||||
|             } |             } | ||||||
|                 k => panic!("type not supported: {}", k), |             k => error!("type not supported: {:?}", k), | ||||||
|         } |         } | ||||||
| 
 | 
 | ||||||
|         // Check if a flow now succeeds
 |         // Check if a flow now succeeds
 | ||||||
|  | @ -166,9 +137,6 @@ impl Uiaa { | ||||||
|             None, |             None, | ||||||
|         )?; |         )?; | ||||||
|         Ok((true, uiaainfo)) |         Ok((true, uiaainfo)) | ||||||
|         } else { |  | ||||||
|             panic!("FallbackAcknowledgement is not supported yet"); |  | ||||||
|         } |  | ||||||
|     } |     } | ||||||
| 
 | 
 | ||||||
|     fn set_uiaa_request( |     fn set_uiaa_request( | ||||||
|  |  | ||||||
							
								
								
									
										11
									
								
								src/main.rs
									
									
									
									
									
								
							
							
						
						
									
										11
									
								
								src/main.rs
									
									
									
									
									
								
							|  | @ -17,7 +17,7 @@ use std::sync::Arc; | ||||||
| use database::Config; | use database::Config; | ||||||
| pub use database::Database; | pub use database::Database; | ||||||
| pub use error::{Error, Result}; | pub use error::{Error, Result}; | ||||||
| use opentelemetry::trace::Tracer; | use opentelemetry::trace::{FutureExt, Tracer}; | ||||||
| pub use pdu::PduEvent; | pub use pdu::PduEvent; | ||||||
| pub use rocket::State; | pub use rocket::State; | ||||||
| use ruma::api::client::error::ErrorKind; | use ruma::api::client::error::ErrorKind; | ||||||
|  | @ -220,14 +220,17 @@ async fn main() { | ||||||
|     }; |     }; | ||||||
| 
 | 
 | ||||||
|     if config.allow_jaeger { |     if config.allow_jaeger { | ||||||
|  |         opentelemetry::global::set_text_map_propagator(opentelemetry_jaeger::Propagator::new()); | ||||||
|         let tracer = opentelemetry_jaeger::new_pipeline() |         let tracer = opentelemetry_jaeger::new_pipeline() | ||||||
|             .with_service_name("conduit") |             .install_batch(opentelemetry::runtime::Tokio) | ||||||
|             .install_simple() |  | ||||||
|             .unwrap(); |             .unwrap(); | ||||||
| 
 | 
 | ||||||
|         let span = tracer.start("conduit"); |         let span = tracer.start("conduit"); | ||||||
|         start.await; |         start.with_current_context().await; | ||||||
|         drop(span); |         drop(span); | ||||||
|  | 
 | ||||||
|  |         println!("exporting"); | ||||||
|  |         opentelemetry::global::shutdown_tracer_provider(); | ||||||
|     } else { |     } else { | ||||||
|         std::env::set_var("RUST_LOG", &config.log); |         std::env::set_var("RUST_LOG", &config.log); | ||||||
| 
 | 
 | ||||||
|  |  | ||||||
|  | @ -12,7 +12,7 @@ use ruma::{ | ||||||
| use serde::{Deserialize, Serialize}; | use serde::{Deserialize, Serialize}; | ||||||
| use serde_json::json; | use serde_json::json; | ||||||
| use std::{cmp::Ordering, collections::BTreeMap, convert::TryFrom}; | use std::{cmp::Ordering, collections::BTreeMap, convert::TryFrom}; | ||||||
| use tracing::error; | use tracing::warn; | ||||||
| 
 | 
 | ||||||
| #[derive(Clone, Deserialize, Serialize, Debug)] | #[derive(Clone, Deserialize, Serialize, Debug)] | ||||||
| pub struct PduEvent { | pub struct PduEvent { | ||||||
|  | @ -322,7 +322,7 @@ pub(crate) fn gen_event_id_canonical_json( | ||||||
|     pdu: &Raw<ruma::events::pdu::Pdu>, |     pdu: &Raw<ruma::events::pdu::Pdu>, | ||||||
| ) -> crate::Result<(EventId, CanonicalJsonObject)> { | ) -> crate::Result<(EventId, CanonicalJsonObject)> { | ||||||
|     let value = serde_json::from_str(pdu.json().get()).map_err(|e| { |     let value = serde_json::from_str(pdu.json().get()).map_err(|e| { | ||||||
|         error!("{:?}: {:?}", pdu, e); |         warn!("Error parsing incoming event {:?}: {:?}", pdu, e); | ||||||
|         Error::BadServerResponse("Invalid PDU in server response") |         Error::BadServerResponse("Invalid PDU in server response") | ||||||
|     })?; |     })?; | ||||||
| 
 | 
 | ||||||
|  |  | ||||||
|  | @ -111,7 +111,7 @@ impl FedDest { | ||||||
|     } |     } | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
| #[tracing::instrument(skip(globals))] | #[tracing::instrument(skip(globals, request))] | ||||||
| pub async fn send_request<T: OutgoingRequest>( | pub async fn send_request<T: OutgoingRequest>( | ||||||
|     globals: &crate::database::globals::Globals, |     globals: &crate::database::globals::Globals, | ||||||
|     destination: &ServerName, |     destination: &ServerName, | ||||||
|  | @ -254,7 +254,7 @@ where | ||||||
|             }); // TODO: handle timeout
 |             }); // TODO: handle timeout
 | ||||||
| 
 | 
 | ||||||
|             if status != 200 { |             if status != 200 { | ||||||
|                 info!( |                 warn!( | ||||||
|                     "{} {}: {}", |                     "{} {}: {}", | ||||||
|                     url, |                     url, | ||||||
|                     status, |                     status, | ||||||
|  | @ -272,14 +272,20 @@ where | ||||||
|             if status == 200 { |             if status == 200 { | ||||||
|                 let response = T::IncomingResponse::try_from_http_response(http_response); |                 let response = T::IncomingResponse::try_from_http_response(http_response); | ||||||
|                 response.map_err(|e| { |                 response.map_err(|e| { | ||||||
|                     warn!("Invalid 200 response from {}: {}", &destination, e); |                     warn!( | ||||||
|  |                         "Invalid 200 response from {} on: {} {}", | ||||||
|  |                         &destination, url, e | ||||||
|  |                     ); | ||||||
|                     Error::BadServerResponse("Server returned bad 200 response.") |                     Error::BadServerResponse("Server returned bad 200 response.") | ||||||
|                 }) |                 }) | ||||||
|             } else { |             } else { | ||||||
|                 Err(Error::FederationError( |                 Err(Error::FederationError( | ||||||
|                     destination.to_owned(), |                     destination.to_owned(), | ||||||
|                     RumaError::try_from_http_response(http_response).map_err(|e| { |                     RumaError::try_from_http_response(http_response).map_err(|e| { | ||||||
|                         warn!("Server returned bad error response: {}", e); |                         warn!( | ||||||
|  |                             "Invalid {} response from {} on: {} {}", | ||||||
|  |                             status, &destination, url, e | ||||||
|  |                         ); | ||||||
|                         Error::BadServerResponse("Server returned bad error response.") |                         Error::BadServerResponse("Server returned bad error response.") | ||||||
|                     })?, |                     })?, | ||||||
|                 )) |                 )) | ||||||
|  | @ -495,7 +501,7 @@ pub fn get_server_keys_route(db: DatabaseGuard) -> Json<String> { | ||||||
|     ) |     ) | ||||||
|     .unwrap(); |     .unwrap(); | ||||||
| 
 | 
 | ||||||
|     Json(ruma::serde::to_canonical_json_string(&response).expect("JSON is canonical")) |     Json(serde_json::to_string(&response).expect("JSON is canonical")) | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
| #[cfg_attr(feature = "conduit_bin", get("/_matrix/key/v2/server/<_>"))] | #[cfg_attr(feature = "conduit_bin", get("/_matrix/key/v2/server/<_>"))] | ||||||
|  | @ -668,7 +674,7 @@ pub async fn send_transaction_message_route( | ||||||
| 
 | 
 | ||||||
|         let elapsed = start_time.elapsed(); |         let elapsed = start_time.elapsed(); | ||||||
|         warn!( |         warn!( | ||||||
|             "Handling event {} took {}m{}s", |             "Handling transaction of event {} took {}m{}s", | ||||||
|             event_id, |             event_id, | ||||||
|             elapsed.as_secs() / 60, |             elapsed.as_secs() / 60, | ||||||
|             elapsed.as_secs() % 60 |             elapsed.as_secs() % 60 | ||||||
|  | @ -721,7 +727,8 @@ pub async fn send_transaction_message_route( | ||||||
|                                 &db.globals, |                                 &db.globals, | ||||||
|                             )?; |                             )?; | ||||||
|                         } else { |                         } else { | ||||||
|                             warn!("No known event ids in read receipt: {:?}", user_updates); |                             // TODO fetch missing events
 | ||||||
|  |                             debug!("No known event ids in read receipt: {:?}", user_updates); | ||||||
|                         } |                         } | ||||||
|                     } |                     } | ||||||
|                 } |                 } | ||||||
|  | @ -839,7 +846,7 @@ type AsyncRecursiveType<'a, T> = Pin<Box<dyn Future<Output = T> + 'a + Send>>; | ||||||
| /// 14. Use state resolution to find new room state
 | /// 14. Use state resolution to find new room state
 | ||||||
| // We use some AsyncRecursiveType hacks here so we can call this async funtion recursively
 | // We use some AsyncRecursiveType hacks here so we can call this async funtion recursively
 | ||||||
| #[tracing::instrument(skip(value, is_timeline_event, db, pub_key_map))] | #[tracing::instrument(skip(value, is_timeline_event, db, pub_key_map))] | ||||||
| pub fn handle_incoming_pdu<'a>( | pub async fn handle_incoming_pdu<'a>( | ||||||
|     origin: &'a ServerName, |     origin: &'a ServerName, | ||||||
|     event_id: &'a EventId, |     event_id: &'a EventId, | ||||||
|     room_id: &'a RoomId, |     room_id: &'a RoomId, | ||||||
|  | @ -847,9 +854,7 @@ pub fn handle_incoming_pdu<'a>( | ||||||
|     is_timeline_event: bool, |     is_timeline_event: bool, | ||||||
|     db: &'a Database, |     db: &'a Database, | ||||||
|     pub_key_map: &'a RwLock<BTreeMap<String, BTreeMap<String, String>>>, |     pub_key_map: &'a RwLock<BTreeMap<String, BTreeMap<String, String>>>, | ||||||
| ) -> AsyncRecursiveType<'a, StdResult<Option<Vec<u8>>, String>> { | ) -> StdResult<Option<Vec<u8>>, String> { | ||||||
|     Box::pin(async move { |  | ||||||
|         // TODO: For RoomVersion6 we must check that Raw<..> is canonical do we anywhere?: https://matrix.org/docs/spec/rooms/v6#canonical-json
 |  | ||||||
|     match db.rooms.exists(&room_id) { |     match db.rooms.exists(&room_id) { | ||||||
|         Ok(true) => {} |         Ok(true) => {} | ||||||
|         _ => { |         _ => { | ||||||
|  | @ -862,6 +867,162 @@ pub fn handle_incoming_pdu<'a>( | ||||||
|         return Ok(Some(pdu_id.to_vec())); |         return Ok(Some(pdu_id.to_vec())); | ||||||
|     } |     } | ||||||
| 
 | 
 | ||||||
|  |     let create_event = db | ||||||
|  |         .rooms | ||||||
|  |         .room_state_get(&room_id, &EventType::RoomCreate, "") | ||||||
|  |         .map_err(|_| "Failed to ask database for event.".to_owned())? | ||||||
|  |         .ok_or_else(|| "Failed to find create event in db.".to_owned())?; | ||||||
|  | 
 | ||||||
|  |     let (incoming_pdu, val) = handle_outlier_pdu( | ||||||
|  |         origin, | ||||||
|  |         &create_event, | ||||||
|  |         event_id, | ||||||
|  |         room_id, | ||||||
|  |         value, | ||||||
|  |         db, | ||||||
|  |         pub_key_map, | ||||||
|  |     ) | ||||||
|  |     .await?; | ||||||
|  | 
 | ||||||
|  |     // 8. if not timeline event: stop
 | ||||||
|  |     if !is_timeline_event { | ||||||
|  |         return Ok(None); | ||||||
|  |     } | ||||||
|  | 
 | ||||||
|  |     // 9. Fetch any missing prev events doing all checks listed here starting at 1. These are timeline events
 | ||||||
|  |     let mut graph = HashMap::new(); | ||||||
|  |     let mut eventid_info = HashMap::new(); | ||||||
|  |     let mut todo_outlier_stack = incoming_pdu.prev_events.clone(); | ||||||
|  | 
 | ||||||
|  |     let mut amount = 0; | ||||||
|  | 
 | ||||||
|  |     while let Some(prev_event_id) = todo_outlier_stack.pop() { | ||||||
|  |         if let Some((pdu, json_opt)) = fetch_and_handle_outliers( | ||||||
|  |             db, | ||||||
|  |             origin, | ||||||
|  |             &[prev_event_id.clone()], | ||||||
|  |             &create_event, | ||||||
|  |             &room_id, | ||||||
|  |             pub_key_map, | ||||||
|  |         ) | ||||||
|  |         .await | ||||||
|  |         .pop() | ||||||
|  |         { | ||||||
|  |             if amount > 100 { | ||||||
|  |                 // Max limit reached
 | ||||||
|  |                 warn!("Max prev event limit reached!"); | ||||||
|  |                 graph.insert(prev_event_id.clone(), HashSet::new()); | ||||||
|  |                 continue; | ||||||
|  |             } | ||||||
|  | 
 | ||||||
|  |             if let Some(json) = | ||||||
|  |                 json_opt.or_else(|| db.rooms.get_outlier_pdu_json(&prev_event_id).ok().flatten()) | ||||||
|  |             { | ||||||
|  |                 if pdu.origin_server_ts | ||||||
|  |                     > db.rooms | ||||||
|  |                         .first_pdu_in_room(&room_id) | ||||||
|  |                         .map_err(|_| "Error loading first room event.".to_owned())? | ||||||
|  |                         .expect("Room exists") | ||||||
|  |                         .origin_server_ts | ||||||
|  |                 { | ||||||
|  |                     amount += 1; | ||||||
|  |                     for prev_prev in &pdu.prev_events { | ||||||
|  |                         if !graph.contains_key(prev_prev) { | ||||||
|  |                             todo_outlier_stack.push(dbg!(prev_prev.clone())); | ||||||
|  |                         } | ||||||
|  |                     } | ||||||
|  | 
 | ||||||
|  |                     graph.insert( | ||||||
|  |                         prev_event_id.clone(), | ||||||
|  |                         pdu.prev_events.iter().cloned().collect(), | ||||||
|  |                     ); | ||||||
|  |                     eventid_info.insert(prev_event_id.clone(), (pdu, json)); | ||||||
|  |                 } else { | ||||||
|  |                     // Time based check failed
 | ||||||
|  |                     graph.insert(prev_event_id.clone(), HashSet::new()); | ||||||
|  |                     eventid_info.insert(prev_event_id.clone(), (pdu, json)); | ||||||
|  |                 } | ||||||
|  |             } else { | ||||||
|  |                 // Get json failed
 | ||||||
|  |                 graph.insert(prev_event_id.clone(), HashSet::new()); | ||||||
|  |             } | ||||||
|  |         } else { | ||||||
|  |             // Fetch and handle failed
 | ||||||
|  |             graph.insert(prev_event_id.clone(), HashSet::new()); | ||||||
|  |         } | ||||||
|  |     } | ||||||
|  | 
 | ||||||
|  |     let sorted = | ||||||
|  |         state_res::StateResolution::lexicographical_topological_sort(dbg!(&graph), |event_id| { | ||||||
|  |             // This return value is the key used for sorting events,
 | ||||||
|  |             // events are then sorted by power level, time,
 | ||||||
|  |             // and lexically by event_id.
 | ||||||
|  |             println!("{}", event_id); | ||||||
|  |             Ok(( | ||||||
|  |                 0, | ||||||
|  |                 MilliSecondsSinceUnixEpoch( | ||||||
|  |                     eventid_info | ||||||
|  |                         .get(event_id) | ||||||
|  |                         .map_or_else(|| uint!(0), |info| info.0.origin_server_ts.clone()), | ||||||
|  |                 ), | ||||||
|  |                 ruma::event_id!("$notimportant"), | ||||||
|  |             )) | ||||||
|  |         }) | ||||||
|  |         .map_err(|_| "Error sorting prev events".to_owned())?; | ||||||
|  | 
 | ||||||
|  |     for prev_id in dbg!(sorted) { | ||||||
|  |         if let Some((pdu, json)) = eventid_info.remove(&prev_id) { | ||||||
|  |             let start_time = Instant::now(); | ||||||
|  |             let event_id = pdu.event_id.clone(); | ||||||
|  |             if let Err(e) = upgrade_outlier_to_timeline_pdu( | ||||||
|  |                 pdu, | ||||||
|  |                 json, | ||||||
|  |                 &create_event, | ||||||
|  |                 origin, | ||||||
|  |                 db, | ||||||
|  |                 room_id, | ||||||
|  |                 pub_key_map, | ||||||
|  |             ) | ||||||
|  |             .await | ||||||
|  |             { | ||||||
|  |                 warn!("Prev event {} failed: {}", event_id, e); | ||||||
|  |             } | ||||||
|  |             let elapsed = start_time.elapsed(); | ||||||
|  |             warn!( | ||||||
|  |                 "Handling prev event {} took {}m{}s", | ||||||
|  |                 event_id, | ||||||
|  |                 elapsed.as_secs() / 60, | ||||||
|  |                 elapsed.as_secs() % 60 | ||||||
|  |             ); | ||||||
|  |         } | ||||||
|  |     } | ||||||
|  | 
 | ||||||
|  |     upgrade_outlier_to_timeline_pdu( | ||||||
|  |         incoming_pdu, | ||||||
|  |         val, | ||||||
|  |         &create_event, | ||||||
|  |         origin, | ||||||
|  |         db, | ||||||
|  |         room_id, | ||||||
|  |         pub_key_map, | ||||||
|  |     ) | ||||||
|  |     .await | ||||||
|  | } | ||||||
|  | 
 | ||||||
|  | #[tracing::instrument(skip(origin, create_event, event_id, room_id, value, db, pub_key_map))] | ||||||
|  | fn handle_outlier_pdu<'a>( | ||||||
|  |     origin: &'a ServerName, | ||||||
|  |     create_event: &'a PduEvent, | ||||||
|  |     event_id: &'a EventId, | ||||||
|  |     room_id: &'a RoomId, | ||||||
|  |     value: BTreeMap<String, CanonicalJsonValue>, | ||||||
|  |     db: &'a Database, | ||||||
|  |     pub_key_map: &'a RwLock<BTreeMap<String, BTreeMap<String, String>>>, | ||||||
|  | ) -> AsyncRecursiveType<'a, StdResult<(Arc<PduEvent>, BTreeMap<String, CanonicalJsonValue>), String>> | ||||||
|  | { | ||||||
|  |     Box::pin(async move { | ||||||
|  |         // TODO: For RoomVersion6 we must check that Raw<..> is canonical do we anywhere?: https://matrix.org/docs/spec/rooms/v6#canonical-json
 | ||||||
|  | 
 | ||||||
|         // We go through all the signatures we see on the value and fetch the corresponding signing
 |         // We go through all the signatures we see on the value and fetch the corresponding signing
 | ||||||
|         // keys
 |         // keys
 | ||||||
|         fetch_required_signing_keys(&value, &pub_key_map, db) |         fetch_required_signing_keys(&value, &pub_key_map, db) | ||||||
|  | @ -870,11 +1031,6 @@ pub fn handle_incoming_pdu<'a>( | ||||||
| 
 | 
 | ||||||
|         // 2. Check signatures, otherwise drop
 |         // 2. Check signatures, otherwise drop
 | ||||||
|         // 3. check content hash, redact if doesn't match
 |         // 3. check content hash, redact if doesn't match
 | ||||||
|         let create_event = db |  | ||||||
|             .rooms |  | ||||||
|             .room_state_get(&room_id, &EventType::RoomCreate, "") |  | ||||||
|             .map_err(|_| "Failed to ask database for event.".to_owned())? |  | ||||||
|             .ok_or_else(|| "Failed to find create event in db.".to_owned())?; |  | ||||||
| 
 | 
 | ||||||
|         let create_event_content = |         let create_event_content = | ||||||
|             serde_json::from_value::<Raw<CreateEventContent>>(create_event.content.clone()) |             serde_json::from_value::<Raw<CreateEventContent>>(create_event.content.clone()) | ||||||
|  | @ -921,13 +1077,13 @@ pub fn handle_incoming_pdu<'a>( | ||||||
|         // 5. Reject "due to auth events" if can't get all the auth events or some of the auth events are also rejected "due to auth events"
 |         // 5. Reject "due to auth events" if can't get all the auth events or some of the auth events are also rejected "due to auth events"
 | ||||||
|         // EDIT: Step 5 is not applied anymore because it failed too often
 |         // EDIT: Step 5 is not applied anymore because it failed too often
 | ||||||
|         debug!("Fetching auth events for {}", incoming_pdu.event_id); |         debug!("Fetching auth events for {}", incoming_pdu.event_id); | ||||||
|         fetch_and_handle_events( |         fetch_and_handle_outliers( | ||||||
|             db, |             db, | ||||||
|             origin, |             origin, | ||||||
|             &incoming_pdu.auth_events, |             &incoming_pdu.auth_events, | ||||||
|  |             &create_event, | ||||||
|             &room_id, |             &room_id, | ||||||
|             pub_key_map, |             pub_key_map, | ||||||
|             false, |  | ||||||
|         ) |         ) | ||||||
|         .await; |         .await; | ||||||
| 
 | 
 | ||||||
|  | @ -1010,31 +1166,23 @@ pub fn handle_incoming_pdu<'a>( | ||||||
|             .map_err(|_| "Failed to add pdu as outlier.".to_owned())?; |             .map_err(|_| "Failed to add pdu as outlier.".to_owned())?; | ||||||
|         debug!("Added pdu as outlier."); |         debug!("Added pdu as outlier."); | ||||||
| 
 | 
 | ||||||
|         // 8. if not timeline event: stop
 |         Ok((incoming_pdu, val)) | ||||||
|         if !is_timeline_event |     }) | ||||||
|             || incoming_pdu.origin_server_ts | } | ||||||
|                 < db.rooms | 
 | ||||||
|                     .first_pdu_in_room(&room_id) | #[tracing::instrument(skip(incoming_pdu, val, create_event, origin, db, room_id, pub_key_map))] | ||||||
|                     .map_err(|_| "Error loading first room event.".to_owned())? | async fn upgrade_outlier_to_timeline_pdu( | ||||||
|                     .expect("Room exists") |     incoming_pdu: Arc<PduEvent>, | ||||||
|                     .origin_server_ts |     val: BTreeMap<String, CanonicalJsonValue>, | ||||||
|         { |     create_event: &PduEvent, | ||||||
|             return Ok(None); |     origin: &ServerName, | ||||||
|  |     db: &Database, | ||||||
|  |     room_id: &RoomId, | ||||||
|  |     pub_key_map: &RwLock<BTreeMap<String, BTreeMap<String, String>>>, | ||||||
|  | ) -> StdResult<Option<Vec<u8>>, String> { | ||||||
|  |     if let Ok(Some(pduid)) = db.rooms.get_pdu_id(&incoming_pdu.event_id) { | ||||||
|  |         return Ok(Some(pduid)); | ||||||
|     } |     } | ||||||
| 
 |  | ||||||
|         // Load missing prev events first
 |  | ||||||
|         fetch_and_handle_events( |  | ||||||
|             db, |  | ||||||
|             origin, |  | ||||||
|             &incoming_pdu.prev_events, |  | ||||||
|             &room_id, |  | ||||||
|             pub_key_map, |  | ||||||
|             true, |  | ||||||
|         ) |  | ||||||
|         .await; |  | ||||||
| 
 |  | ||||||
|         // TODO: 9. fetch any missing prev events doing all checks listed here starting at 1. These are timeline events
 |  | ||||||
| 
 |  | ||||||
|     // 10. Fetch missing state and auth chain events by calling /state_ids at backwards extremities
 |     // 10. Fetch missing state and auth chain events by calling /state_ids at backwards extremities
 | ||||||
|     //     doing all the checks in this list starting at 1. These are not timeline events.
 |     //     doing all the checks in this list starting at 1. These are not timeline events.
 | ||||||
| 
 | 
 | ||||||
|  | @ -1046,24 +1194,27 @@ pub fn handle_incoming_pdu<'a>( | ||||||
| 
 | 
 | ||||||
|     if incoming_pdu.prev_events.len() == 1 { |     if incoming_pdu.prev_events.len() == 1 { | ||||||
|         let prev_event = &incoming_pdu.prev_events[0]; |         let prev_event = &incoming_pdu.prev_events[0]; | ||||||
|             let state = db |         let prev_event_sstatehash = db | ||||||
|             .rooms |             .rooms | ||||||
|             .pdu_shortstatehash(prev_event) |             .pdu_shortstatehash(prev_event) | ||||||
|                 .map_err(|_| "Failed talking to db".to_owned())? |             .map_err(|_| "Failed talking to db".to_owned())?; | ||||||
|                 .map(|shortstatehash| db.rooms.state_full_ids(shortstatehash).ok()) | 
 | ||||||
|                 .flatten(); |         let state = | ||||||
|             if let Some(state) = state { |             prev_event_sstatehash.map(|shortstatehash| db.rooms.state_full_ids(shortstatehash)); | ||||||
|                 let mut state = fetch_and_handle_events( | 
 | ||||||
|  |         if let Some(Ok(state)) = state { | ||||||
|  |             warn!("Using cached state"); | ||||||
|  |             let mut state = fetch_and_handle_outliers( | ||||||
|                 db, |                 db, | ||||||
|                 origin, |                 origin, | ||||||
|                 &state.into_iter().collect::<Vec<_>>(), |                 &state.into_iter().collect::<Vec<_>>(), | ||||||
|  |                 &create_event, | ||||||
|                 &room_id, |                 &room_id, | ||||||
|                 pub_key_map, |                 pub_key_map, | ||||||
|                     false, |  | ||||||
|             ) |             ) | ||||||
|             .await |             .await | ||||||
|             .into_iter() |             .into_iter() | ||||||
|                 .map(|pdu| { |             .map(|(pdu, _)| { | ||||||
|                 ( |                 ( | ||||||
|                     ( |                     ( | ||||||
|                         pdu.kind.clone(), |                         pdu.kind.clone(), | ||||||
|  | @ -1076,7 +1227,8 @@ pub fn handle_incoming_pdu<'a>( | ||||||
|             }) |             }) | ||||||
|             .collect::<HashMap<_, _>>(); |             .collect::<HashMap<_, _>>(); | ||||||
| 
 | 
 | ||||||
|                 let prev_pdu = db.rooms.get_pdu(prev_event).ok().flatten().ok_or_else(|| { |             let prev_pdu = | ||||||
|  |                 db.rooms.get_pdu(prev_event).ok().flatten().ok_or_else(|| { | ||||||
|                     "Could not find prev event, but we know the state.".to_owned() |                     "Could not find prev event, but we know the state.".to_owned() | ||||||
|                 })?; |                 })?; | ||||||
| 
 | 
 | ||||||
|  | @ -1090,6 +1242,7 @@ pub fn handle_incoming_pdu<'a>( | ||||||
|     } |     } | ||||||
| 
 | 
 | ||||||
|     if state_at_incoming_event.is_none() { |     if state_at_incoming_event.is_none() { | ||||||
|  |         warn!("Calling /state_ids"); | ||||||
|         // Call /state_ids to find out what the state at this pdu is. We trust the server's
 |         // Call /state_ids to find out what the state at this pdu is. We trust the server's
 | ||||||
|         // response to some extend, but we still do a lot of checks on the events
 |         // response to some extend, but we still do a lot of checks on the events
 | ||||||
|         match db |         match db | ||||||
|  | @ -1106,27 +1259,31 @@ pub fn handle_incoming_pdu<'a>( | ||||||
|         { |         { | ||||||
|             Ok(res) => { |             Ok(res) => { | ||||||
|                 debug!("Fetching state events at event."); |                 debug!("Fetching state events at event."); | ||||||
|                     let state_vec = fetch_and_handle_events( |                 let state_vec = fetch_and_handle_outliers( | ||||||
|                     &db, |                     &db, | ||||||
|                     origin, |                     origin, | ||||||
|                     &res.pdu_ids, |                     &res.pdu_ids, | ||||||
|  |                     &create_event, | ||||||
|                     &room_id, |                     &room_id, | ||||||
|                     pub_key_map, |                     pub_key_map, | ||||||
|                         false, |  | ||||||
|                 ) |                 ) | ||||||
|                 .await; |                 .await; | ||||||
| 
 | 
 | ||||||
|                 let mut state = HashMap::new(); |                 let mut state = HashMap::new(); | ||||||
|                     for pdu in state_vec { |                 for (pdu, _) in state_vec { | ||||||
|                         match state.entry((pdu.kind.clone(), pdu.state_key.clone().ok_or_else(|| "Found non-state pdu in state events.".to_owned())?)) { |                     match state.entry(( | ||||||
|  |                         pdu.kind.clone(), | ||||||
|  |                         pdu.state_key | ||||||
|  |                             .clone() | ||||||
|  |                             .ok_or_else(|| "Found non-state pdu in state events.".to_owned())?, | ||||||
|  |                     )) { | ||||||
|                         Entry::Vacant(v) => { |                         Entry::Vacant(v) => { | ||||||
|                             v.insert(pdu); |                             v.insert(pdu); | ||||||
|                         } |                         } | ||||||
|                             Entry::Occupied(_) => { |                         Entry::Occupied(_) => return Err( | ||||||
|                                 return Err( |                             "State event's type and state_key combination exists multiple times." | ||||||
|                                     "State event's type and state_key combination exists multiple times.".to_owned(), |                                 .to_owned(), | ||||||
|                                 ) |                         ), | ||||||
|                             } |  | ||||||
|                     } |                     } | ||||||
|                 } |                 } | ||||||
| 
 | 
 | ||||||
|  | @ -1140,13 +1297,13 @@ pub fn handle_incoming_pdu<'a>( | ||||||
|                 } |                 } | ||||||
| 
 | 
 | ||||||
|                 debug!("Fetching auth chain events at event."); |                 debug!("Fetching auth chain events at event."); | ||||||
|                     fetch_and_handle_events( |                 fetch_and_handle_outliers( | ||||||
|                     &db, |                     &db, | ||||||
|                     origin, |                     origin, | ||||||
|                     &res.auth_chain_ids, |                     &res.auth_chain_ids, | ||||||
|  |                     &create_event, | ||||||
|                     &room_id, |                     &room_id, | ||||||
|                     pub_key_map, |                     pub_key_map, | ||||||
|                         false, |  | ||||||
|                 ) |                 ) | ||||||
|                 .await; |                 .await; | ||||||
| 
 | 
 | ||||||
|  | @ -1162,6 +1319,27 @@ pub fn handle_incoming_pdu<'a>( | ||||||
|         state_at_incoming_event.expect("we always set this to some above"); |         state_at_incoming_event.expect("we always set this to some above"); | ||||||
| 
 | 
 | ||||||
|     // 11. Check the auth of the event passes based on the state of the event
 |     // 11. Check the auth of the event passes based on the state of the event
 | ||||||
|  |     let create_event_content = | ||||||
|  |         serde_json::from_value::<Raw<CreateEventContent>>(create_event.content.clone()) | ||||||
|  |             .expect("Raw::from_value always works.") | ||||||
|  |             .deserialize() | ||||||
|  |             .map_err(|_| "Invalid PowerLevels event in db.".to_owned())?; | ||||||
|  | 
 | ||||||
|  |     let room_version_id = &create_event_content.room_version; | ||||||
|  |     let room_version = RoomVersion::new(room_version_id).expect("room version is supported"); | ||||||
|  | 
 | ||||||
|  |     // If the previous event was the create event special rules apply
 | ||||||
|  |     let previous_create = if incoming_pdu.auth_events.len() == 1 | ||||||
|  |         && incoming_pdu.prev_events == incoming_pdu.auth_events | ||||||
|  |     { | ||||||
|  |         db.rooms | ||||||
|  |             .get_pdu(&incoming_pdu.auth_events[0]) | ||||||
|  |             .map_err(|e| e.to_string())? | ||||||
|  |             .filter(|maybe_create| **maybe_create == *create_event) | ||||||
|  |     } else { | ||||||
|  |         None | ||||||
|  |     }; | ||||||
|  | 
 | ||||||
|     if !state_res::event_auth::auth_check( |     if !state_res::event_auth::auth_check( | ||||||
|         &room_version, |         &room_version, | ||||||
|         &incoming_pdu, |         &incoming_pdu, | ||||||
|  | @ -1204,6 +1382,18 @@ pub fn handle_incoming_pdu<'a>( | ||||||
|     // Only keep those extremities were not referenced yet
 |     // Only keep those extremities were not referenced yet
 | ||||||
|     extremities.retain(|id| !matches!(db.rooms.is_event_referenced(&room_id, id), Ok(true))); |     extremities.retain(|id| !matches!(db.rooms.is_event_referenced(&room_id, id), Ok(true))); | ||||||
| 
 | 
 | ||||||
|  |     let current_statehash = db | ||||||
|  |         .rooms | ||||||
|  |         .current_shortstatehash(&room_id) | ||||||
|  |         .map_err(|_| "Failed to load current state hash.".to_owned())? | ||||||
|  |         .expect("every room has state"); | ||||||
|  | 
 | ||||||
|  |     let current_state = db | ||||||
|  |         .rooms | ||||||
|  |         .state_full(current_statehash) | ||||||
|  |         .map_err(|_| "Failed to load room state.")?; | ||||||
|  | 
 | ||||||
|  |     if incoming_pdu.state_key.is_some() { | ||||||
|         let mut extremity_statehashes = Vec::new(); |         let mut extremity_statehashes = Vec::new(); | ||||||
| 
 | 
 | ||||||
|         for id in &extremities { |         for id in &extremities { | ||||||
|  | @ -1239,16 +1429,6 @@ pub fn handle_incoming_pdu<'a>( | ||||||
|         //     don't just trust a set of state we got from a remote).
 |         //     don't just trust a set of state we got from a remote).
 | ||||||
| 
 | 
 | ||||||
|         // We do this by adding the current state to the list of fork states
 |         // We do this by adding the current state to the list of fork states
 | ||||||
|         let current_statehash = db |  | ||||||
|             .rooms |  | ||||||
|             .current_shortstatehash(&room_id) |  | ||||||
|             .map_err(|_| "Failed to load current state hash.".to_owned())? |  | ||||||
|             .expect("every room has state"); |  | ||||||
| 
 |  | ||||||
|         let current_state = db |  | ||||||
|             .rooms |  | ||||||
|             .state_full(current_statehash) |  | ||||||
|             .map_err(|_| "Failed to load room state.")?; |  | ||||||
| 
 | 
 | ||||||
|         extremity_statehashes.push((current_statehash.clone(), None)); |         extremity_statehashes.push((current_statehash.clone(), None)); | ||||||
| 
 | 
 | ||||||
|  | @ -1271,7 +1451,6 @@ pub fn handle_incoming_pdu<'a>( | ||||||
|         } |         } | ||||||
| 
 | 
 | ||||||
|         // We also add state after incoming event to the fork states
 |         // We also add state after incoming event to the fork states
 | ||||||
|         extremities.insert(incoming_pdu.event_id.clone()); |  | ||||||
|         let mut state_after = state_at_incoming_event.clone(); |         let mut state_after = state_at_incoming_event.clone(); | ||||||
|         if let Some(state_key) = &incoming_pdu.state_key { |         if let Some(state_key) = &incoming_pdu.state_key { | ||||||
|             state_after.insert( |             state_after.insert( | ||||||
|  | @ -1309,7 +1488,8 @@ pub fn handle_incoming_pdu<'a>( | ||||||
|             for state in fork_states { |             for state in fork_states { | ||||||
|                 auth_chain_sets.push( |                 auth_chain_sets.push( | ||||||
|                     get_auth_chain(state.iter().map(|(_, id)| id.clone()).collect(), db) |                     get_auth_chain(state.iter().map(|(_, id)| id.clone()).collect(), db) | ||||||
|                         .map_err(|_| "Failed to load auth chain.".to_owned())?, |                         .map_err(|_| "Failed to load auth chain.".to_owned())? | ||||||
|  |                         .collect(), | ||||||
|                 ); |                 ); | ||||||
|             } |             } | ||||||
| 
 | 
 | ||||||
|  | @ -1335,6 +1515,17 @@ pub fn handle_incoming_pdu<'a>( | ||||||
|             state |             state | ||||||
|         }; |         }; | ||||||
| 
 | 
 | ||||||
|  |         // Set the new room state to the resolved state
 | ||||||
|  |         if update_state { | ||||||
|  |             db.rooms | ||||||
|  |                 .force_state(&room_id, new_room_state, &db) | ||||||
|  |                 .map_err(|_| "Failed to set new room state.".to_owned())?; | ||||||
|  |         } | ||||||
|  |         debug!("Updated resolved state"); | ||||||
|  |     } | ||||||
|  | 
 | ||||||
|  |     extremities.insert(incoming_pdu.event_id.clone()); | ||||||
|  | 
 | ||||||
|     debug!("starting soft fail auth check"); |     debug!("starting soft fail auth check"); | ||||||
|     // 13. Check if the event passes auth based on the "current state" of the room, if not "soft fail" it
 |     // 13. Check if the event passes auth based on the "current state" of the room, if not "soft fail" it
 | ||||||
|     let soft_fail = !state_res::event_auth::auth_check( |     let soft_fail = !state_res::event_auth::auth_check( | ||||||
|  | @ -1367,14 +1558,6 @@ pub fn handle_incoming_pdu<'a>( | ||||||
|         warn!("Event was soft failed: {:?}", incoming_pdu); |         warn!("Event was soft failed: {:?}", incoming_pdu); | ||||||
|     } |     } | ||||||
| 
 | 
 | ||||||
|         // Set the new room state to the resolved state
 |  | ||||||
|         if update_state { |  | ||||||
|             db.rooms |  | ||||||
|                 .force_state(&room_id, new_room_state, &db) |  | ||||||
|                 .map_err(|_| "Failed to set new room state.".to_owned())?; |  | ||||||
|         } |  | ||||||
|         debug!("Updated resolved state"); |  | ||||||
| 
 |  | ||||||
|     if soft_fail { |     if soft_fail { | ||||||
|         // Soft fail, we leave the event as an outlier but don't add it to the timeline
 |         // Soft fail, we leave the event as an outlier but don't add it to the timeline
 | ||||||
|         return Err("Event has been soft failed".into()); |         return Err("Event has been soft failed".into()); | ||||||
|  | @ -1383,25 +1566,26 @@ pub fn handle_incoming_pdu<'a>( | ||||||
|     // Event has passed all auth/stateres checks
 |     // Event has passed all auth/stateres checks
 | ||||||
|     drop(state_lock); |     drop(state_lock); | ||||||
|     Ok(pdu_id) |     Ok(pdu_id) | ||||||
|     }) |  | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
| /// Find the event and auth it. Once the event is validated (steps 1 - 8)
 | /// Find the event and auth it. Once the event is validated (steps 1 - 8)
 | ||||||
| /// it is appended to the outliers Tree.
 | /// it is appended to the outliers Tree.
 | ||||||
| ///
 | ///
 | ||||||
|  | /// Returns pdu and if we fetched it over federation the raw json.
 | ||||||
|  | ///
 | ||||||
| /// a. Look in the main timeline (pduid_pdu tree)
 | /// a. Look in the main timeline (pduid_pdu tree)
 | ||||||
| /// b. Look at outlier pdu tree
 | /// b. Look at outlier pdu tree
 | ||||||
| /// c. Ask origin server over federation
 | /// c. Ask origin server over federation
 | ||||||
| /// d. TODO: Ask other servers over federation?
 | /// d. TODO: Ask other servers over federation?
 | ||||||
| //#[tracing::instrument(skip(db, key_map, auth_cache))]
 | #[tracing::instrument(skip(db, origin, events, create_event, room_id, pub_key_map))] | ||||||
| pub(crate) fn fetch_and_handle_events<'a>( | pub(crate) fn fetch_and_handle_outliers<'a>( | ||||||
|     db: &'a Database, |     db: &'a Database, | ||||||
|     origin: &'a ServerName, |     origin: &'a ServerName, | ||||||
|     events: &'a [EventId], |     events: &'a [EventId], | ||||||
|  |     create_event: &'a PduEvent, | ||||||
|     room_id: &'a RoomId, |     room_id: &'a RoomId, | ||||||
|     pub_key_map: &'a RwLock<BTreeMap<String, BTreeMap<String, String>>>, |     pub_key_map: &'a RwLock<BTreeMap<String, BTreeMap<String, String>>>, | ||||||
|     are_timeline_events: bool, | ) -> AsyncRecursiveType<'a, Vec<(Arc<PduEvent>, Option<BTreeMap<String, CanonicalJsonValue>>)>> { | ||||||
| ) -> AsyncRecursiveType<'a, Vec<Arc<PduEvent>>> { |  | ||||||
|     Box::pin(async move { |     Box::pin(async move { | ||||||
|         let back_off = |id| match db.globals.bad_event_ratelimiter.write().unwrap().entry(id) { |         let back_off = |id| match db.globals.bad_event_ratelimiter.write().unwrap().entry(id) { | ||||||
|             Entry::Vacant(e) => { |             Entry::Vacant(e) => { | ||||||
|  | @ -1412,35 +1596,32 @@ pub(crate) fn fetch_and_handle_events<'a>( | ||||||
| 
 | 
 | ||||||
|         let mut pdus = vec![]; |         let mut pdus = vec![]; | ||||||
|         for id in events { |         for id in events { | ||||||
|  |             info!("loading {}", id); | ||||||
|             if let Some((time, tries)) = db.globals.bad_event_ratelimiter.read().unwrap().get(&id) { |             if let Some((time, tries)) = db.globals.bad_event_ratelimiter.read().unwrap().get(&id) { | ||||||
|                 // Exponential backoff
 |                 // Exponential backoff
 | ||||||
|                 let mut min_elapsed_duration = Duration::from_secs(30) * (*tries) * (*tries); |                 let mut min_elapsed_duration = Duration::from_secs(5 * 60) * (*tries) * (*tries); | ||||||
|                 if min_elapsed_duration > Duration::from_secs(60 * 60 * 24) { |                 if min_elapsed_duration > Duration::from_secs(60 * 60 * 24) { | ||||||
|                     min_elapsed_duration = Duration::from_secs(60 * 60 * 24); |                     min_elapsed_duration = Duration::from_secs(60 * 60 * 24); | ||||||
|                 } |                 } | ||||||
| 
 | 
 | ||||||
|                 if time.elapsed() < min_elapsed_duration { |                 if time.elapsed() < min_elapsed_duration { | ||||||
|                     debug!("Backing off from {}", id); |                     info!("Backing off from {}", id); | ||||||
|                     continue; |                     continue; | ||||||
|                 } |                 } | ||||||
|             } |             } | ||||||
| 
 | 
 | ||||||
|             // a. Look in the main timeline (pduid_pdu tree)
 |             // a. Look in the main timeline (pduid_pdu tree)
 | ||||||
|             // b. Look at outlier pdu tree
 |             // b. Look at outlier pdu tree
 | ||||||
|             // (get_pdu checks both)
 |             // (get_pdu_json checks both)
 | ||||||
|             let local_pdu = if are_timeline_events { |             let local_pdu = db.rooms.get_pdu(&id); | ||||||
|                 db.rooms.get_non_outlier_pdu(&id).map(|o| o.map(Arc::new)) |  | ||||||
|             } else { |  | ||||||
|                 db.rooms.get_pdu(&id) |  | ||||||
|             }; |  | ||||||
|             let pdu = match local_pdu { |             let pdu = match local_pdu { | ||||||
|                 Ok(Some(pdu)) => { |                 Ok(Some(pdu)) => { | ||||||
|                     trace!("Found {} in db", id); |                     trace!("Found {} in db", id); | ||||||
|                     pdu |                     (pdu, None) | ||||||
|                 } |                 } | ||||||
|                 Ok(None) => { |                 Ok(None) => { | ||||||
|                     // c. Ask origin server over federation
 |                     // c. Ask origin server over federation
 | ||||||
|                     debug!("Fetching {} over federation.", id); |                     info!("Fetching {} over federation.", id); | ||||||
|                     match db |                     match db | ||||||
|                         .sending |                         .sending | ||||||
|                         .send_federation_request( |                         .send_federation_request( | ||||||
|  | @ -1451,41 +1632,29 @@ pub(crate) fn fetch_and_handle_events<'a>( | ||||||
|                         .await |                         .await | ||||||
|                     { |                     { | ||||||
|                         Ok(res) => { |                         Ok(res) => { | ||||||
|                             debug!("Got {} over federation", id); |                             info!("Got {} over federation", id); | ||||||
|                             let (event_id, mut value) = |                             let (event_id, value) = | ||||||
|                                 match crate::pdu::gen_event_id_canonical_json(&res.pdu) { |                                 match crate::pdu::gen_event_id_canonical_json(&res.pdu) { | ||||||
|                                     Ok(t) => t, |                                     Ok(t) => t, | ||||||
|                                     Err(_) => continue, |                                     Err(_) => { | ||||||
|  |                                         back_off(id.clone()); | ||||||
|  |                                         continue; | ||||||
|  |                                     } | ||||||
|                                 }; |                                 }; | ||||||
| 
 | 
 | ||||||
|                             // This will also fetch the auth chain
 |                             // This will also fetch the auth chain
 | ||||||
|                             match handle_incoming_pdu( |                             match handle_outlier_pdu( | ||||||
|                                 origin, |                                 origin, | ||||||
|  |                                 create_event, | ||||||
|                                 &event_id, |                                 &event_id, | ||||||
|                                 &room_id, |                                 &room_id, | ||||||
|                                 value.clone(), |                                 value.clone(), | ||||||
|                                 are_timeline_events, |  | ||||||
|                                 db, |                                 db, | ||||||
|                                 pub_key_map, |                                 pub_key_map, | ||||||
|                             ) |                             ) | ||||||
|                             .await |                             .await | ||||||
|                             { |                             { | ||||||
|                                 Ok(_) => { |                                 Ok((pdu, json)) => (pdu, Some(json)), | ||||||
|                                     value.insert( |  | ||||||
|                                         "event_id".to_owned(), |  | ||||||
|                                         CanonicalJsonValue::String(event_id.into()), |  | ||||||
|                                     ); |  | ||||||
| 
 |  | ||||||
|                                     Arc::new( |  | ||||||
|                                         serde_json::from_value( |  | ||||||
|                                             serde_json::to_value(value) |  | ||||||
|                                                 .expect("canonicaljsonobject is valid value"), |  | ||||||
|                                         ) |  | ||||||
|                                         .expect( |  | ||||||
|                                             "This is possible because handle_incoming_pdu worked", |  | ||||||
|                                         ), |  | ||||||
|                                     ) |  | ||||||
|                                 } |  | ||||||
|                                 Err(e) => { |                                 Err(e) => { | ||||||
|                                     warn!("Authentication of event {} failed: {:?}", id, e); |                                     warn!("Authentication of event {} failed: {:?}", id, e); | ||||||
|                                     back_off(id.clone()); |                                     back_off(id.clone()); | ||||||
|  | @ -1501,7 +1670,7 @@ pub(crate) fn fetch_and_handle_events<'a>( | ||||||
|                     } |                     } | ||||||
|                 } |                 } | ||||||
|                 Err(e) => { |                 Err(e) => { | ||||||
|                     debug!("Error loading {}: {}", id, e); |                     warn!("Error loading {}: {}", id, e); | ||||||
|                     continue; |                     continue; | ||||||
|                 } |                 } | ||||||
|             }; |             }; | ||||||
|  | @ -1513,7 +1682,7 @@ pub(crate) fn fetch_and_handle_events<'a>( | ||||||
| 
 | 
 | ||||||
| /// Search the DB for the signing keys of the given server, if we don't have them
 | /// Search the DB for the signing keys of the given server, if we don't have them
 | ||||||
| /// fetch them from the server and save to our DB.
 | /// fetch them from the server and save to our DB.
 | ||||||
| #[tracing::instrument(skip(db))] | #[tracing::instrument(skip(db, origin, signature_ids))] | ||||||
| pub(crate) async fn fetch_signing_keys( | pub(crate) async fn fetch_signing_keys( | ||||||
|     db: &Database, |     db: &Database, | ||||||
|     origin: &ServerName, |     origin: &ServerName, | ||||||
|  | @ -1684,7 +1853,7 @@ fn append_incoming_pdu( | ||||||
|     // We append to state before appending the pdu, so we don't have a moment in time with the
 |     // We append to state before appending the pdu, so we don't have a moment in time with the
 | ||||||
|     // pdu without it's state. This is okay because append_pdu can't fail.
 |     // pdu without it's state. This is okay because append_pdu can't fail.
 | ||||||
|     db.rooms |     db.rooms | ||||||
|         .set_event_state(&pdu.event_id, state, &db.globals)?; |         .set_event_state(&pdu.event_id, &pdu.room_id, state, &db.globals)?; | ||||||
| 
 | 
 | ||||||
|     let pdu_id = db.rooms.append_pdu( |     let pdu_id = db.rooms.append_pdu( | ||||||
|         pdu, |         pdu, | ||||||
|  | @ -1754,51 +1923,72 @@ fn append_incoming_pdu( | ||||||
|     Ok(pdu_id) |     Ok(pdu_id) | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
| fn get_auth_chain(starting_events: Vec<EventId>, db: &Database) -> Result<HashSet<EventId>> { | #[tracing::instrument(skip(starting_events, db))] | ||||||
|  | fn get_auth_chain( | ||||||
|  |     starting_events: Vec<EventId>, | ||||||
|  |     db: &Database, | ||||||
|  | ) -> Result<impl Iterator<Item = EventId> + '_> { | ||||||
|     let mut full_auth_chain = HashSet::new(); |     let mut full_auth_chain = HashSet::new(); | ||||||
| 
 | 
 | ||||||
|  |     let starting_events = starting_events | ||||||
|  |         .iter() | ||||||
|  |         .map(|id| { | ||||||
|  |             db.rooms | ||||||
|  |                 .get_or_create_shorteventid(id, &db.globals) | ||||||
|  |                 .map(|s| (s, id)) | ||||||
|  |         }) | ||||||
|  |         .collect::<Result<Vec<_>>>()?; | ||||||
|  | 
 | ||||||
|     let mut cache = db.rooms.auth_chain_cache(); |     let mut cache = db.rooms.auth_chain_cache(); | ||||||
| 
 | 
 | ||||||
|     for event_id in &starting_events { |     for (sevent_id, event_id) in starting_events { | ||||||
|         if let Some(cached) = cache.get_mut(&[event_id.clone()][..]) { |         if let Some(cached) = cache.get_mut(&sevent_id) { | ||||||
|             full_auth_chain.extend(cached.iter().cloned()); |             full_auth_chain.extend(cached.iter().cloned()); | ||||||
|         } else { |         } else { | ||||||
|             drop(cache); |             drop(cache); | ||||||
|             let mut auth_chain = HashSet::new(); |             let auth_chain = get_auth_chain_inner(&event_id, db)?; | ||||||
|             get_auth_chain_recursive(&event_id, &mut auth_chain, db)?; |  | ||||||
|             cache = db.rooms.auth_chain_cache(); |             cache = db.rooms.auth_chain_cache(); | ||||||
|             cache.insert(vec![event_id.clone()], auth_chain.clone()); |             cache.insert(sevent_id, auth_chain.clone()); | ||||||
|             full_auth_chain.extend(auth_chain); |             full_auth_chain.extend(auth_chain); | ||||||
|         }; |         }; | ||||||
|     } |     } | ||||||
| 
 | 
 | ||||||
|     Ok(full_auth_chain) |     drop(cache); | ||||||
|  | 
 | ||||||
|  |     Ok(full_auth_chain | ||||||
|  |         .into_iter() | ||||||
|  |         .filter_map(move |sid| db.rooms.get_eventid_from_short(sid).ok())) | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
| fn get_auth_chain_recursive( | #[tracing::instrument(skip(event_id, db))] | ||||||
|     event_id: &EventId, | fn get_auth_chain_inner(event_id: &EventId, db: &Database) -> Result<HashSet<u64>> { | ||||||
|     found: &mut HashSet<EventId>, |     let mut todo = vec![event_id.clone()]; | ||||||
|     db: &Database, |     let mut found = HashSet::new(); | ||||||
| ) -> Result<()> { | 
 | ||||||
|     let r = db.rooms.get_pdu(&event_id); |     while let Some(event_id) = todo.pop() { | ||||||
|     match r { |         match db.rooms.get_pdu(&event_id) { | ||||||
|             Ok(Some(pdu)) => { |             Ok(Some(pdu)) => { | ||||||
|                 for auth_event in &pdu.auth_events { |                 for auth_event in &pdu.auth_events { | ||||||
|                 if !found.contains(auth_event) { |                     let sauthevent = db | ||||||
|                     found.insert(auth_event.clone()); |                         .rooms | ||||||
|                     get_auth_chain_recursive(&auth_event, found, db)?; |                         .get_or_create_shorteventid(auth_event, &db.globals)?; | ||||||
|  | 
 | ||||||
|  |                     if !found.contains(&sauthevent) { | ||||||
|  |                         found.insert(sauthevent); | ||||||
|  |                         todo.push(auth_event.clone()); | ||||||
|                     } |                     } | ||||||
|                 } |                 } | ||||||
|             } |             } | ||||||
|             Ok(None) => { |             Ok(None) => { | ||||||
|             warn!("Could not find pdu mentioned in auth events."); |                 warn!("Could not find pdu mentioned in auth events: {}", event_id); | ||||||
|             } |             } | ||||||
|             Err(e) => { |             Err(e) => { | ||||||
|             warn!("Could not load event in auth chain: {}", e); |                 warn!("Could not load event in auth chain: {} {}", event_id, e); | ||||||
|  |             } | ||||||
|         } |         } | ||||||
|     } |     } | ||||||
| 
 | 
 | ||||||
|     Ok(()) |     Ok(found) | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
| #[cfg_attr(
 | #[cfg_attr(
 | ||||||
|  | @ -1892,7 +2082,6 @@ pub fn get_event_authorization_route( | ||||||
| 
 | 
 | ||||||
|     Ok(get_event_authorization::v1::Response { |     Ok(get_event_authorization::v1::Response { | ||||||
|         auth_chain: auth_chain_ids |         auth_chain: auth_chain_ids | ||||||
|             .into_iter() |  | ||||||
|             .filter_map(|id| Some(db.rooms.get_pdu_json(&id).ok()??)) |             .filter_map(|id| Some(db.rooms.get_pdu_json(&id).ok()??)) | ||||||
|             .map(|event| PduEvent::convert_to_outgoing_federation_event(event)) |             .map(|event| PduEvent::convert_to_outgoing_federation_event(event)) | ||||||
|             .collect(), |             .collect(), | ||||||
|  | @ -1936,7 +2125,6 @@ pub fn get_room_state_route( | ||||||
| 
 | 
 | ||||||
|     Ok(get_room_state::v1::Response { |     Ok(get_room_state::v1::Response { | ||||||
|         auth_chain: auth_chain_ids |         auth_chain: auth_chain_ids | ||||||
|             .into_iter() |  | ||||||
|             .map(|id| { |             .map(|id| { | ||||||
|                 Ok::<_, Error>(PduEvent::convert_to_outgoing_federation_event( |                 Ok::<_, Error>(PduEvent::convert_to_outgoing_federation_event( | ||||||
|                     db.rooms.get_pdu_json(&id)?.unwrap(), |                     db.rooms.get_pdu_json(&id)?.unwrap(), | ||||||
|  | @ -1979,7 +2167,7 @@ pub fn get_room_state_ids_route( | ||||||
|     let auth_chain_ids = get_auth_chain(vec![body.event_id.clone()], &db)?; |     let auth_chain_ids = get_auth_chain(vec![body.event_id.clone()], &db)?; | ||||||
| 
 | 
 | ||||||
|     Ok(get_room_state_ids::v1::Response { |     Ok(get_room_state_ids::v1::Response { | ||||||
|         auth_chain_ids: auth_chain_ids.into_iter().collect(), |         auth_chain_ids: auth_chain_ids.collect(), | ||||||
|         pdu_ids, |         pdu_ids, | ||||||
|     } |     } | ||||||
|     .into()) |     .into()) | ||||||
|  | @ -2056,6 +2244,7 @@ pub fn create_join_event_template_route( | ||||||
|         is_direct: None, |         is_direct: None, | ||||||
|         membership: MembershipState::Join, |         membership: MembershipState::Join, | ||||||
|         third_party_invite: None, |         third_party_invite: None, | ||||||
|  |         reason: None, | ||||||
|     }) |     }) | ||||||
|     .expect("member event is valid value"); |     .expect("member event is valid value"); | ||||||
| 
 | 
 | ||||||
|  | @ -2248,7 +2437,6 @@ pub async fn create_join_event_route( | ||||||
|     Ok(create_join_event::v2::Response { |     Ok(create_join_event::v2::Response { | ||||||
|         room_state: RoomState { |         room_state: RoomState { | ||||||
|             auth_chain: auth_chain_ids |             auth_chain: auth_chain_ids | ||||||
|                 .iter() |  | ||||||
|                 .filter_map(|id| db.rooms.get_pdu_json(&id).ok().flatten()) |                 .filter_map(|id| db.rooms.get_pdu_json(&id).ok().flatten()) | ||||||
|                 .map(PduEvent::convert_to_outgoing_federation_event) |                 .map(PduEvent::convert_to_outgoing_federation_event) | ||||||
|                 .collect(), |                 .collect(), | ||||||
|  | @ -2359,6 +2547,7 @@ pub async fn create_invite_route( | ||||||
|             &sender, |             &sender, | ||||||
|             Some(invite_state), |             Some(invite_state), | ||||||
|             &db, |             &db, | ||||||
|  |             true, | ||||||
|         )?; |         )?; | ||||||
|     } |     } | ||||||
| 
 | 
 | ||||||
|  | @ -2532,6 +2721,7 @@ pub async fn claim_keys_route( | ||||||
|     .into()) |     .into()) | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
|  | #[tracing::instrument(skip(event, pub_key_map, db))] | ||||||
| pub async fn fetch_required_signing_keys( | pub async fn fetch_required_signing_keys( | ||||||
|     event: &BTreeMap<String, CanonicalJsonValue>, |     event: &BTreeMap<String, CanonicalJsonValue>, | ||||||
|     pub_key_map: &RwLock<BTreeMap<String, BTreeMap<String, String>>>, |     pub_key_map: &RwLock<BTreeMap<String, BTreeMap<String, String>>>, | ||||||
|  |  | ||||||
		Loading…
	
		Reference in a new issue