From 0c2a3f09fe7d680f42e28f21bd98f586b7e06fd1 Mon Sep 17 00:00:00 2001 From: mdecimus Date: Thu, 11 Jul 2024 18:44:51 +0200 Subject: [PATCH] Improved error handling (part 1) --- Cargo.lock | 22 +- Cargo.toml | 1 + crates/common/Cargo.toml | 1 + crates/common/src/addresses.rs | 20 +- crates/common/src/config/jmap/settings.rs | 2 +- crates/common/src/config/server/listener.rs | 2 +- crates/common/src/config/server/tls.rs | 9 +- crates/common/src/config/smtp/auth.rs | 2 +- crates/common/src/config/smtp/queue.rs | 2 +- crates/common/src/config/smtp/report.rs | 4 +- crates/common/src/config/smtp/resolver.rs | 2 +- crates/common/src/config/smtp/session.rs | 2 +- crates/common/src/config/smtp/throttle.rs | 2 +- crates/common/src/enterprise/undelete.rs | 10 +- crates/common/src/expr/functions/asynch.rs | 2 +- crates/common/src/lib.rs | 21 +- crates/common/src/listener/acme/cache.rs | 61 +-- crates/common/src/listener/acme/directory.rs | 156 +++---- crates/common/src/listener/acme/jose.rs | 37 +- crates/common/src/listener/acme/mod.rs | 20 +- crates/common/src/listener/acme/order.rs | 140 ++---- crates/common/src/listener/blocked.rs | 2 +- crates/common/src/manager/backup.rs | 31 +- crates/common/src/manager/config.rs | 42 +- crates/common/src/manager/reload.rs | 8 +- crates/common/src/manager/webadmin.rs | 30 +- crates/common/src/scripts/plugins/lookup.rs | 2 +- crates/directory/Cargo.toml | 2 +- crates/directory/src/backend/imap/lookup.rs | 55 ++- .../directory/src/backend/internal/lookup.rs | 28 +- .../directory/src/backend/internal/manage.rs | 404 ++++++++++-------- crates/directory/src/backend/internal/mod.rs | 43 +- crates/directory/src/backend/ldap/lookup.rs | 109 +++-- crates/directory/src/backend/memory/lookup.rs | 12 +- crates/directory/src/backend/smtp/lookup.rs | 76 +++- crates/directory/src/backend/sql/lookup.rs | 77 ++-- crates/directory/src/core/config.rs | 3 +- crates/directory/src/core/dispatch.rs | 24 +- crates/directory/src/core/secret.rs | 129 +++--- crates/directory/src/lib.rs | 224 +++------- crates/imap/Cargo.toml | 1 + crates/imap/src/op/copy_move.rs | 2 +- crates/jmap-proto/Cargo.toml | 1 + crates/jmap-proto/src/error/method.rs | 218 ++++++---- crates/jmap-proto/src/method/get.rs | 11 +- crates/jmap-proto/src/method/set.rs | 11 +- crates/jmap-proto/src/object/mod.rs | 18 +- crates/jmap-proto/src/request/mod.rs | 3 +- crates/jmap-proto/src/request/parser.rs | 2 +- crates/jmap-proto/src/response/mod.rs | 18 +- crates/jmap-proto/src/response/references.rs | 66 +-- crates/jmap/Cargo.toml | 1 + crates/jmap/src/api/http.rs | 2 +- crates/jmap/src/api/management/dkim.rs | 5 +- crates/jmap/src/api/management/domain.rs | 2 +- crates/jmap/src/api/management/enterprise.rs | 8 +- crates/jmap/src/api/management/principal.rs | 91 ++-- crates/jmap/src/api/request.rs | 8 +- crates/jmap/src/auth/acl.rs | 29 +- crates/jmap/src/auth/mod.rs | 15 +- crates/jmap/src/blob/copy.rs | 7 +- crates/jmap/src/blob/download.rs | 53 +-- crates/jmap/src/blob/get.rs | 4 +- crates/jmap/src/blob/upload.rs | 34 +- crates/jmap/src/changes/get.rs | 18 +- crates/jmap/src/changes/query.rs | 8 +- crates/jmap/src/changes/state.rs | 24 +- crates/jmap/src/changes/write.rs | 38 +- crates/jmap/src/email/cache.rs | 20 +- crates/jmap/src/email/copy.rs | 36 +- crates/jmap/src/email/crypto.rs | 25 +- crates/jmap/src/email/delete.rs | 62 +-- crates/jmap/src/email/get.rs | 15 +- crates/jmap/src/email/import.rs | 47 +- crates/jmap/src/email/ingest.rs | 153 ++----- crates/jmap/src/email/parse.rs | 7 +- crates/jmap/src/email/query.rs | 17 +- crates/jmap/src/email/set.rs | 32 +- crates/jmap/src/email/snippet.rs | 4 +- crates/jmap/src/identity/get.rs | 27 +- crates/jmap/src/identity/set.rs | 4 +- crates/jmap/src/lib.rs | 234 +++------- crates/jmap/src/mailbox/get.rs | 23 +- crates/jmap/src/mailbox/query.rs | 6 +- crates/jmap/src/mailbox/set.rs | 76 +--- crates/jmap/src/principal/get.rs | 6 +- crates/jmap/src/principal/query.rs | 10 +- crates/jmap/src/push/get.rs | 35 +- crates/jmap/src/push/set.rs | 4 +- crates/jmap/src/quota/get.rs | 3 +- crates/jmap/src/quota/query.rs | 3 +- crates/jmap/src/quota/set.rs | 2 +- crates/jmap/src/services/index.rs | 6 +- crates/jmap/src/services/ingest.rs | 29 +- crates/jmap/src/sieve/get.rs | 65 +-- crates/jmap/src/sieve/ingest.rs | 25 +- crates/jmap/src/sieve/query.rs | 6 +- crates/jmap/src/sieve/set.rs | 66 +-- crates/jmap/src/sieve/validate.rs | 7 +- crates/jmap/src/submission/get.rs | 3 +- crates/jmap/src/submission/query.rs | 6 +- crates/jmap/src/submission/set.rs | 9 +- crates/jmap/src/thread/get.rs | 14 +- crates/jmap/src/vacation/get.rs | 8 +- crates/jmap/src/vacation/set.rs | 32 +- crates/main/Cargo.toml | 1 + crates/pop3/Cargo.toml | 1 + crates/pop3/src/mailbox.rs | 2 +- crates/smtp/Cargo.toml | 1 + crates/smtp/src/inbound/vrfy.rs | 25 +- crates/smtp/src/queue/spool.rs | 2 +- crates/smtp/src/reporting/dmarc.rs | 2 +- crates/smtp/src/reporting/scheduler.rs | 2 +- crates/smtp/src/reporting/tls.rs | 2 +- crates/store/Cargo.toml | 2 +- crates/store/src/backend/elastic/index.rs | 117 +++-- crates/store/src/backend/elastic/mod.rs | 39 +- crates/store/src/backend/elastic/query.rs | 48 ++- crates/store/src/backend/foundationdb/blob.rs | 16 +- crates/store/src/backend/foundationdb/mod.rs | 11 +- crates/store/src/backend/foundationdb/read.rs | 44 +- .../store/src/backend/foundationdb/write.rs | 43 +- crates/store/src/backend/fs/mod.rs | 6 +- crates/store/src/backend/mod.rs | 10 +- crates/store/src/backend/mysql/blob.rs | 33 +- crates/store/src/backend/mysql/lookup.rs | 20 +- crates/store/src/backend/mysql/main.rs | 18 +- crates/store/src/backend/mysql/mod.rs | 15 +- crates/store/src/backend/mysql/read.rs | 61 +-- crates/store/src/backend/mysql/write.rs | 38 +- crates/store/src/backend/postgres/blob.rs | 33 +- crates/store/src/backend/postgres/lookup.rs | 23 +- crates/store/src/backend/postgres/main.rs | 25 +- crates/store/src/backend/postgres/mod.rs | 22 +- crates/store/src/backend/postgres/read.rs | 61 +-- crates/store/src/backend/postgres/write.rs | 43 +- crates/store/src/backend/redis/lookup.rs | 130 ++++-- crates/store/src/backend/redis/mod.rs | 27 +- crates/store/src/backend/redis/pool.rs | 30 +- crates/store/src/backend/rocksdb/blob.rs | 14 +- crates/store/src/backend/rocksdb/main.rs | 9 +- crates/store/src/backend/rocksdb/mod.rs | 11 +- crates/store/src/backend/rocksdb/read.rs | 22 +- crates/store/src/backend/rocksdb/write.rs | 35 +- crates/store/src/backend/s3/mod.rs | 81 ++-- crates/store/src/backend/sqlite/blob.rs | 30 +- crates/store/src/backend/sqlite/lookup.rs | 20 +- crates/store/src/backend/sqlite/main.rs | 33 +- crates/store/src/backend/sqlite/mod.rs | 25 +- crates/store/src/backend/sqlite/pool.rs | 3 +- crates/store/src/backend/sqlite/read.rs | 110 +++-- crates/store/src/backend/sqlite/write.rs | 123 ++++-- crates/store/src/config.rs | 15 +- crates/store/src/dispatch/blob.rs | 33 +- crates/store/src/dispatch/fts.rs | 13 +- crates/store/src/dispatch/lookup.rs | 121 ++++-- crates/store/src/dispatch/store.rs | 103 +++-- crates/store/src/fts/index.rs | 10 +- crates/store/src/fts/query.rs | 4 +- crates/store/src/lib.rs | 42 +- crates/store/src/query/acl.rs | 29 +- crates/store/src/query/filter.rs | 42 +- crates/store/src/query/log.rs | 18 +- crates/store/src/query/sort.rs | 24 +- crates/store/src/write/assert.rs | 2 +- crates/store/src/write/blob.rs | 71 +-- crates/store/src/write/key.rs | 40 +- crates/store/src/write/log.rs | 2 +- crates/store/src/write/mod.rs | 83 ++-- crates/store/src/write/purge.rs | 43 +- crates/trc/Cargo.toml | 16 + crates/trc/src/conv.rs | 214 ++++++++++ crates/trc/src/imple.rs | 211 +++++++++ crates/trc/src/lib.rs | 144 +++++++ crates/trc/src/macros.rs | 46 ++ crates/utils/Cargo.toml | 1 + crates/utils/src/config/mod.rs | 6 +- crates/utils/src/config/utils.rs | 9 + tests/Cargo.toml | 2 +- 179 files changed, 3409 insertions(+), 3048 deletions(-) create mode 100644 crates/trc/Cargo.toml create mode 100644 crates/trc/src/conv.rs create mode 100644 crates/trc/src/imple.rs create mode 100644 crates/trc/src/lib.rs create mode 100644 crates/trc/src/macros.rs diff --git a/Cargo.lock b/Cargo.lock index 329f4bce..a790b622 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1068,6 +1068,7 @@ dependencies = [ "tracing-journald", "tracing-opentelemetry", "tracing-subscriber", + "trc", "unicode-security", "utils", "whatlang", @@ -1655,7 +1656,7 @@ dependencies = [ "tokio", "tokio-rustls 0.26.0", "totp-rs", - "tracing", + "trc", "utils", ] @@ -2959,6 +2960,7 @@ dependencies = [ "tokio", "tokio-rustls 0.26.0", "tracing", + "trc", "utils", ] @@ -3191,6 +3193,7 @@ dependencies = [ "tokio", "tokio-tungstenite 0.23.1", "tracing", + "trc", "tungstenite 0.23.0", "utils", "x509-parser 0.16.0", @@ -3230,6 +3233,7 @@ dependencies = [ "store", "tokio", "tracing", + "trc", "utils", ] @@ -3591,6 +3595,7 @@ dependencies = [ "store", "tokio", "tracing", + "trc", "utils", ] @@ -4468,6 +4473,7 @@ dependencies = [ "tokio", "tokio-rustls 0.26.0", "tracing", + "trc", "utils", ] @@ -6038,6 +6044,7 @@ dependencies = [ "tokio", "tokio-rustls 0.26.0", "tracing", + "trc", "utils", "webpki-roots 0.26.3", "x509-parser 0.16.0", @@ -6186,7 +6193,7 @@ dependencies = [ "tokio", "tokio-postgres", "tokio-rustls 0.26.0", - "tracing", + "trc", "utils", "xxhash-rust", ] @@ -6859,6 +6866,16 @@ dependencies = [ "tracing-log", ] +[[package]] +name = "trc" +version = "0.8.5" +dependencies = [ + "base64 0.22.1", + "bincode", + "reqwest 0.12.5", + "serde_json", +] + [[package]] name = "trim-in-place" version = "0.1.7" @@ -7117,6 +7134,7 @@ dependencies = [ "tokio-rustls 0.26.0", "tracing", "tracing-journald", + "trc", "webpki-roots 0.26.3", "x509-parser 0.16.0", ] diff --git a/Cargo.toml b/Cargo.toml index e1208b2d..6a429e14 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -14,6 +14,7 @@ members = [ "crates/directory", "crates/utils", "crates/common", + "crates/trc", "crates/cli", "tests", ] diff --git a/crates/common/Cargo.toml b/crates/common/Cargo.toml index 8c05a597..e40e06fe 100644 --- a/crates/common/Cargo.toml +++ b/crates/common/Cargo.toml @@ -8,6 +8,7 @@ resolver = "2" utils = { path = "../utils" } nlp = { path = "../nlp" } store = { path = "../store" } +trc = { path = "../trc" } directory = { path = "../directory" } jmap_proto = { path = "../jmap-proto" } sieve-rs = { version = "0.5" } diff --git a/crates/common/src/addresses.rs b/crates/common/src/addresses.rs index 24636d29..3e8ed7be 100644 --- a/crates/common/src/addresses.rs +++ b/crates/common/src/addresses.rs @@ -18,11 +18,7 @@ use crate::{ }; impl Core { - pub async fn email_to_ids( - &self, - directory: &Directory, - email: &str, - ) -> directory::Result> { + pub async fn email_to_ids(&self, directory: &Directory, email: &str) -> trc::Result> { let mut address = self .smtp .session @@ -53,7 +49,7 @@ impl Core { Ok(vec![]) } - pub async fn rcpt(&self, directory: &Directory, email: &str) -> directory::Result { + pub async fn rcpt(&self, directory: &Directory, email: &str) -> trc::Result { // Expand subaddress let mut address = self .smtp @@ -83,11 +79,7 @@ impl Core { Ok(false) } - pub async fn vrfy( - &self, - directory: &Directory, - address: &str, - ) -> directory::Result> { + pub async fn vrfy(&self, directory: &Directory, address: &str) -> trc::Result> { directory .vrfy( self.smtp @@ -101,11 +93,7 @@ impl Core { .await } - pub async fn expn( - &self, - directory: &Directory, - address: &str, - ) -> directory::Result> { + pub async fn expn(&self, directory: &Directory, address: &str) -> trc::Result> { directory .expn( self.smtp diff --git a/crates/common/src/config/jmap/settings.rs b/crates/common/src/config/jmap/settings.rs index 79dc965e..97899ebf 100644 --- a/crates/common/src/config/jmap/settings.rs +++ b/crates/common/src/config/jmap/settings.rs @@ -442,7 +442,7 @@ impl JmapConfig { } impl ParseValue for SpecialUse { - fn parse_value(value: &str) -> utils::config::Result { + fn parse_value(value: &str) -> Result { match value { "inbox" => Ok(SpecialUse::Inbox), "trash" => Ok(SpecialUse::Trash), diff --git a/crates/common/src/config/server/listener.rs b/crates/common/src/config/server/listener.rs index b9445cbc..0abe2148 100644 --- a/crates/common/src/config/server/listener.rs +++ b/crates/common/src/config/server/listener.rs @@ -304,7 +304,7 @@ impl Servers { } impl ParseValue for ServerProtocol { - fn parse_value(value: &str) -> utils::config::Result { + fn parse_value(value: &str) -> Result { if value.eq_ignore_ascii_case("smtp") { Ok(Self::Smtp) } else if value.eq_ignore_ascii_case("lmtp") { diff --git a/crates/common/src/config/server/tls.rs b/crates/common/src/config/server/tls.rs index 62c12b03..a98d8583 100644 --- a/crates/common/src/config/server/tls.rs +++ b/crates/common/src/config/server/tls.rs @@ -157,7 +157,7 @@ impl TlsManager { acme_providers.insert(acme_id.to_string(), acme_provider); } Err(err) => { - config.new_build_error(format!("acme.{acme_id}"), err); + config.new_build_error(format!("acme.{acme_id}"), err.to_string()); } } } @@ -359,10 +359,7 @@ pub(crate) fn parse_certificates( } } -pub(crate) fn build_certified_key( - cert: Vec, - pk: Vec, -) -> utils::config::Result { +pub(crate) fn build_certified_key(cert: Vec, pk: Vec) -> Result { let cert = certs(&mut Cursor::new(cert)) .collect::, _>>() .map_err(|err| format!("Failed to read certificates: {err}"))?; @@ -391,7 +388,7 @@ pub(crate) fn build_certified_key( pub(crate) fn build_self_signed_cert( domains: impl Into>, -) -> utils::config::Result { +) -> Result { let cert = generate_simple_self_signed(domains) .map_err(|err| format!("Failed to generate self-signed certificate: {err}",))?; build_certified_key( diff --git a/crates/common/src/config/smtp/auth.rs b/crates/common/src/config/smtp/auth.rs index 0dfdd4f0..9bf809d5 100644 --- a/crates/common/src/config/smtp/auth.rs +++ b/crates/common/src/config/smtp/auth.rs @@ -456,7 +456,7 @@ impl ConstantValue for VerifyStrategy { } impl ParseValue for DkimCanonicalization { - fn parse_value(value: &str) -> utils::config::Result { + fn parse_value(value: &str) -> Result { if let Some((headers, body)) = value.split_once('/') { Ok(DkimCanonicalization { headers: Canonicalization::parse_value(headers.trim())?, diff --git a/crates/common/src/config/smtp/queue.rs b/crates/common/src/config/smtp/queue.rs index a017b527..4ef9d729 100644 --- a/crates/common/src/config/smtp/queue.rs +++ b/crates/common/src/config/smtp/queue.rs @@ -534,7 +534,7 @@ fn parse_queue_quota_item(config: &mut Config, prefix: impl AsKey) -> Option utils::config::Result { + fn parse_value(value: &str) -> Result { match value { "optional" => Ok(RequireOptional::Optional), "require" | "required" => Ok(RequireOptional::Require), diff --git a/crates/common/src/config/smtp/report.rs b/crates/common/src/config/smtp/report.rs index b8e47c0f..4f81c3c2 100644 --- a/crates/common/src/config/smtp/report.rs +++ b/crates/common/src/config/smtp/report.rs @@ -214,7 +214,7 @@ impl Default for ReportConfig { } impl ParseValue for AggregateFrequency { - fn parse_value(value: &str) -> utils::config::Result { + fn parse_value(value: &str) -> Result { match value { "daily" | "day" => Ok(AggregateFrequency::Daily), "hourly" | "hour" => Ok(AggregateFrequency::Hourly), @@ -267,7 +267,7 @@ impl ConstantValue for AggregateFrequency { } impl ParseValue for AddressMatch { - fn parse_value(value: &str) -> utils::config::Result { + fn parse_value(value: &str) -> Result { if let Some(value) = value.strip_prefix('*').map(|v| v.trim()) { if !value.is_empty() { return Ok(AddressMatch::EndsWith(value.to_lowercase())); diff --git a/crates/common/src/config/smtp/resolver.rs b/crates/common/src/config/smtp/resolver.rs index cfac607c..85cc4d75 100644 --- a/crates/common/src/config/smtp/resolver.rs +++ b/crates/common/src/config/smtp/resolver.rs @@ -325,7 +325,7 @@ impl Core { } impl ParseValue for Mode { - fn parse_value(value: &str) -> utils::config::Result { + fn parse_value(value: &str) -> Result { match value { "enforce" => Ok(Self::Enforce), "testing" | "test" => Ok(Self::Testing), diff --git a/crates/common/src/config/smtp/session.rs b/crates/common/src/config/smtp/session.rs index 06606b66..d5960ff3 100644 --- a/crates/common/src/config/smtp/session.rs +++ b/crates/common/src/config/smtp/session.rs @@ -875,7 +875,7 @@ impl Default for SessionConfig { pub struct Mechanism(u64); impl ParseValue for Mechanism { - fn parse_value(value: &str) -> utils::config::Result { + fn parse_value(value: &str) -> Result { Ok(Mechanism(match value.to_ascii_uppercase().as_str() { "LOGIN" => AUTH_LOGIN, "PLAIN" => AUTH_PLAIN, diff --git a/crates/common/src/config/smtp/throttle.rs b/crates/common/src/config/smtp/throttle.rs index 4c16bba4..869a959c 100644 --- a/crates/common/src/config/smtp/throttle.rs +++ b/crates/common/src/config/smtp/throttle.rs @@ -104,7 +104,7 @@ fn parse_throttle_item( } } -pub(crate) fn parse_throttle_key(value: &str) -> utils::config::Result { +pub(crate) fn parse_throttle_key(value: &str) -> Result { match value { "rcpt" => Ok(THROTTLE_RCPT), "rcpt_domain" => Ok(THROTTLE_RCPT_DOMAIN), diff --git a/crates/common/src/enterprise/undelete.rs b/crates/common/src/enterprise/undelete.rs index cf5ec4cb..f99abd8a 100644 --- a/crates/common/src/enterprise/undelete.rs +++ b/crates/common/src/enterprise/undelete.rs @@ -16,6 +16,7 @@ use store::{ }, IterateParams, ValueKey, U32_LEN, U64_LEN, }; +use trc::AddContext; use utils::{BlobHash, BLOB_HASH_LEN}; use crate::Core; @@ -59,7 +60,7 @@ impl Core { pub async fn list_deleted( &self, account_id: u32, - ) -> store::Result>> { + ) -> trc::Result>> { let from_key = ValueKey { account_id, collection: 0, @@ -92,9 +93,7 @@ impl Core { results.push(DeletedBlob { hash: BlobHash::try_from_hash_slice( key.get(U32_LEN..U32_LEN + BLOB_HASH_LEN).ok_or_else(|| { - store::Error::InternalError(format!( - "Invalid key {key:?} in blob hash tables" - )) + trc::Error::corrupted_key(key, value.into(), trc::location!()) })?, ) .unwrap(), @@ -107,7 +106,8 @@ impl Core { Ok(true) }, ) - .await?; + .await + .caused_by(trc::location!())?; Ok(results) } diff --git a/crates/common/src/expr/functions/asynch.rs b/crates/common/src/expr/functions/asynch.rs index f9017a9c..182e5584 100644 --- a/crates/common/src/expr/functions/asynch.rs +++ b/crates/common/src/expr/functions/asynch.rs @@ -342,7 +342,7 @@ impl From for VariableWrapper { } impl Deserialize for VariableWrapper { - fn deserialize(bytes: &[u8]) -> store::Result { + fn deserialize(bytes: &[u8]) -> trc::Result { String::deserialize(bytes).map(|v| VariableWrapper(Variable::String(v.into()))) } } diff --git a/crates/common/src/lib.rs b/crates/common/src/lib.rs index 28ae5f18..a3d23656 100644 --- a/crates/common/src/lib.rs +++ b/crates/common/src/lib.rs @@ -20,9 +20,7 @@ use config::{ storage::Storage, tracers::{OtelTracer, Tracer, Tracers}, }; -use directory::{ - core::secret::verify_secret_hash, Directory, DirectoryError, Principal, QueryBy, Type, -}; +use directory::{core::secret::verify_secret_hash, Directory, Principal, QueryBy, Type}; use expr::if_block::IfBlock; use listener::{ blocked::{AllowedIps, BlockedIps}, @@ -92,7 +90,7 @@ pub enum AuthFailureReason { InvalidCredentials, MissingTotp, Banned, - InternalError(DirectoryError), + InternalError(trc::Error), } #[derive(Debug)] @@ -241,7 +239,7 @@ impl Core { remote_ip: IpAddr, protocol: ServerProtocol, return_member_of: bool, - ) -> directory::Result>> { + ) -> trc::Result>> { // First try to authenticate the user against the default directory let result = match directory .query(QueryBy::Credentials(credentials), return_member_of) @@ -266,10 +264,13 @@ impl Core { return Ok(AuthResult::Success(principal)); } Ok(None) => Ok(()), - Err(DirectoryError::MissingTotpCode) => { - return Ok(AuthResult::Failure(AuthFailureReason::MissingTotp)) + Err(err) => { + if err.matches(trc::Cause::MissingParameter) { + return Ok(AuthResult::Failure(AuthFailureReason::MissingTotp)); + } else { + Err(err) + } } - Err(err) => Err(err), }; // Then check if the credentials match the fallback admin or master user @@ -281,7 +282,7 @@ impl Core { (Some((fallback_admin, fallback_pass)), _, Credentials::Plain { username, secret }) if username == fallback_admin => { - if verify_secret_hash(fallback_pass, secret).await { + if verify_secret_hash(fallback_pass, secret).await? { // Send webhook event if self.has_webhook_subscribers(WebhookType::AuthSuccess) { ipc.send_webhook( @@ -304,7 +305,7 @@ impl Core { (_, Some((master_user, master_pass)), Credentials::Plain { username, secret }) if username.ends_with(master_user) => { - if verify_secret_hash(master_pass, secret).await { + if verify_secret_hash(master_pass, secret).await? { let username = username.strip_suffix(master_user).unwrap(); let username = username.strip_suffix('%').unwrap_or(username); return Ok( diff --git a/crates/common/src/listener/acme/cache.rs b/crates/common/src/listener/acme/cache.rs index 43a6d7a7..95b9d046 100644 --- a/crates/common/src/listener/acme/cache.rs +++ b/crates/common/src/listener/acme/cache.rs @@ -4,49 +4,50 @@ * SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-SEL */ -use std::io::ErrorKind; - use base64::{engine::general_purpose::URL_SAFE_NO_PAD, Engine}; +use trc::AddContext; use utils::config::ConfigKey; use crate::Core; -use super::{AcmeError, AcmeProvider}; +use super::AcmeProvider; impl Core { - pub(crate) async fn load_cert( - &self, - provider: &AcmeProvider, - ) -> Result>, AcmeError> { + pub(crate) async fn load_cert(&self, provider: &AcmeProvider) -> trc::Result>> { self.read_if_exists(provider, "cert", provider.domains.as_slice()) .await - .map_err(AcmeError::CertCacheLoad) + .add_context(|err| { + err.caused_by(trc::location!()) + .details("Failed to load certificates") + }) } - pub(crate) async fn store_cert( - &self, - provider: &AcmeProvider, - cert: &[u8], - ) -> Result<(), AcmeError> { + pub(crate) async fn store_cert(&self, provider: &AcmeProvider, cert: &[u8]) -> trc::Result<()> { self.write(provider, "cert", provider.domains.as_slice(), cert) .await - .map_err(AcmeError::CertCacheStore) + .add_context(|err| { + err.caused_by(trc::location!()) + .details("Failed to store certificate") + }) } pub(crate) async fn load_account( &self, provider: &AcmeProvider, - ) -> Result>, AcmeError> { + ) -> trc::Result>> { self.read_if_exists(provider, "account-key", provider.contact.as_slice()) .await - .map_err(AcmeError::AccountCacheLoad) + .add_context(|err| { + err.caused_by(trc::location!()) + .details("Failed to load account") + }) } pub(crate) async fn store_account( &self, provider: &AcmeProvider, account: &[u8], - ) -> Result<(), AcmeError> { + ) -> trc::Result<()> { self.write( provider, "account-key", @@ -54,7 +55,10 @@ impl Core { account, ) .await - .map_err(AcmeError::AccountCacheStore) + .add_context(|err| { + err.caused_by(trc::location!()) + .details("Failed to store account") + }) } async fn read_if_exists( @@ -62,19 +66,19 @@ impl Core { provider: &AcmeProvider, class: &str, items: &[String], - ) -> Result>, std::io::Error> { - match self + ) -> trc::Result>> { + if let Some(content) = self .storage .config .get(self.build_key(provider, class, items)) - .await + .await? { - Ok(Some(content)) => match URL_SAFE_NO_PAD.decode(content.as_bytes()) { - Ok(contents) => Ok(Some(contents)), - Err(err) => Err(std::io::Error::new(ErrorKind::Other, err)), - }, - Ok(None) => Ok(None), - Err(err) => Err(std::io::Error::new(ErrorKind::Other, err)), + URL_SAFE_NO_PAD + .decode(content.as_bytes()) + .map_err(Into::into) + .map(Some) + } else { + Ok(None) } } @@ -84,7 +88,7 @@ impl Core { class: &str, items: &[String], contents: impl AsRef<[u8]>, - ) -> Result<(), std::io::Error> { + ) -> trc::Result<()> { self.storage .config .set([ConfigKey { @@ -92,7 +96,6 @@ impl Core { value: URL_SAFE_NO_PAD.encode(contents.as_ref()), }]) .await - .map_err(|err| std::io::Error::new(ErrorKind::Other, err)) } fn build_key(&self, provider: &AcmeProvider, class: &str, _: &[String]) -> String { diff --git a/crates/common/src/listener/acme/directory.rs b/crates/common/src/listener/acme/directory.rs index 2a25e61e..ed5b9357 100644 --- a/crates/common/src/listener/acme/directory.rs +++ b/crates/common/src/listener/acme/directory.rs @@ -5,18 +5,18 @@ use std::time::Duration; use base64::engine::general_purpose::URL_SAFE_NO_PAD; use base64::Engine; use rcgen::{Certificate, CustomExtension, PKCS_ECDSA_P256_SHA256}; -use reqwest::header::{ToStrError, CONTENT_TYPE}; -use reqwest::{Method, Response, StatusCode}; -use ring::error::{KeyRejected, Unspecified}; +use reqwest::header::CONTENT_TYPE; +use reqwest::{Method, Response}; use ring::rand::SystemRandom; use ring::signature::{EcdsaKeyPair, EcdsaSigningAlgorithm, ECDSA_P256_SHA256_FIXED_SIGNING}; use serde::Deserialize; use serde_json::json; use store::write::Bincode; use store::Serialize; +use trc::conv::AssertSuccess; use super::jose::{ - key_authorization, key_authorization_sha256, key_authorization_sha256_base64, sign, JoseError, + key_authorization, key_authorization_sha256, key_authorization_sha256_base64, sign, }; pub const LETS_ENCRYPT_STAGING_DIRECTORY: &str = @@ -42,7 +42,7 @@ impl Account { .to_vec() } - pub async fn create<'a, S, I>(directory: Directory, contact: I) -> Result + pub async fn create<'a, S, I>(directory: Directory, contact: I) -> trc::Result where S: AsRef + 'a, I: IntoIterator, @@ -54,12 +54,13 @@ impl Account { directory: Directory, contact: I, key_pair: &[u8], - ) -> Result + ) -> trc::Result where S: AsRef + 'a, I: IntoIterator, { - let key_pair = EcdsaKeyPair::from_pkcs8(ALG, key_pair, &SystemRandom::new())?; + let key_pair = EcdsaKeyPair::from_pkcs8(ALG, key_pair, &SystemRandom::new()) + .map_err(|err| trc::Cause::Crypto.reason(err).caused_by(trc::location!()))?; let contact: Vec<&'a str> = contact.into_iter().map(AsRef::::as_ref).collect(); let payload = json!({ "termsOfServiceAgreed": true, @@ -86,7 +87,7 @@ impl Account { &self, url: impl AsRef, payload: &str, - ) -> Result<(Option, String), DirectoryError> { + ) -> trc::Result<(Option, String)> { let body = sign( &self.key_pair, Some(&self.kid), @@ -100,68 +101,66 @@ impl Account { Ok((location, body)) } - pub async fn new_order(&self, domains: Vec) -> Result<(String, Order), DirectoryError> { + pub async fn new_order(&self, domains: Vec) -> trc::Result<(String, Order)> { let domains: Vec = domains.into_iter().map(Identifier::Dns).collect(); let payload = format!("{{\"identifiers\":{}}}", serde_json::to_string(&domains)?); let response = self.request(&self.directory.new_order, &payload).await?; - let url = response - .0 - .ok_or(DirectoryError::MissingHeader("Location"))?; + let url = response.0.ok_or( + trc::Cause::Acme + .caused_by(trc::location!()) + .details("Missing header") + .ctx(trc::Key::Id, "Location"), + )?; let order = serde_json::from_str(&response.1)?; Ok((url, order)) } - pub async fn auth(&self, url: impl AsRef) -> Result { + pub async fn auth(&self, url: impl AsRef) -> trc::Result { let response = self.request(url, "").await?; serde_json::from_str(&response.1).map_err(Into::into) } - pub async fn challenge(&self, url: impl AsRef) -> Result<(), DirectoryError> { + pub async fn challenge(&self, url: impl AsRef) -> trc::Result<()> { self.request(&url, "{}").await.map(|_| ()) } - pub async fn order(&self, url: impl AsRef) -> Result { + pub async fn order(&self, url: impl AsRef) -> trc::Result { let response = self.request(&url, "").await?; serde_json::from_str(&response.1).map_err(Into::into) } - pub async fn finalize( - &self, - url: impl AsRef, - csr: Vec, - ) -> Result { + pub async fn finalize(&self, url: impl AsRef, csr: Vec) -> trc::Result { let payload = format!("{{\"csr\":\"{}\"}}", URL_SAFE_NO_PAD.encode(csr)); let response = self.request(&url, &payload).await?; serde_json::from_str(&response.1).map_err(Into::into) } - pub async fn certificate(&self, url: impl AsRef) -> Result { + pub async fn certificate(&self, url: impl AsRef) -> trc::Result { Ok(self.request(&url, "").await?.1) } - pub fn http_proof(&self, challenge: &Challenge) -> Result, DirectoryError> { + pub fn http_proof(&self, challenge: &Challenge) -> trc::Result> { key_authorization(&self.key_pair, &challenge.token) .map(|key| key.into_bytes()) .map_err(Into::into) } - pub fn dns_proof(&self, challenge: &Challenge) -> Result { + pub fn dns_proof(&self, challenge: &Challenge) -> trc::Result { key_authorization_sha256_base64(&self.key_pair, &challenge.token).map_err(Into::into) } - pub fn tls_alpn_key( - &self, - challenge: &Challenge, - domain: String, - ) -> Result, DirectoryError> { + pub fn tls_alpn_key(&self, challenge: &Challenge, domain: String) -> trc::Result> { let mut params = rcgen::CertificateParams::new(vec![domain]); let key_auth = key_authorization_sha256(&self.key_pair, &challenge.token)?; params.alg = &PKCS_ECDSA_P256_SHA256; params.custom_extensions = vec![CustomExtension::new_acme_identifier(key_auth.as_ref())]; - let cert = Certificate::from_params(params)?; + let cert = Certificate::from_params(params) + .map_err(|err| trc::Cause::Crypto.caused_by(trc::location!()).reason(err))?; Ok(Bincode::new(SerializedCert { - certificate: cert.serialize_der()?, + certificate: cert + .serialize_der() + .map_err(|err| trc::Cause::Crypto.caused_by(trc::location!()).reason(err))?, private_key: cert.serialize_private_key_der(), }) .serialize()) @@ -183,12 +182,12 @@ pub struct Directory { } impl Directory { - pub async fn discover(url: impl AsRef) -> Result { + pub async fn discover(url: impl AsRef) -> trc::Result { Ok(serde_json::from_str( &https(url, Method::GET, None).await?.text().await?, )?) } - pub async fn nonce(&self) -> Result { + pub async fn nonce(&self) -> trc::Result { get_header( &https(&self.new_nonce.as_str(), Method::HEAD, None).await?, "replay-nonce", @@ -269,27 +268,12 @@ pub struct Problem { pub detail: Option, } -#[derive(Debug)] -pub enum DirectoryError { - Io(std::io::Error), - Rcgen(rcgen::Error), - Jose(JoseError), - Json(serde_json::Error), - HttpRequest(reqwest::Error), - HttpRequestCode { code: StatusCode, reason: String }, - HttpResponseNonStringHeader(ToStrError), - KeyRejected(KeyRejected), - Crypto(Unspecified), - MissingHeader(&'static str), - NoChallenge(ChallengeType), -} - #[allow(unused_mut)] async fn https( url: impl AsRef, method: Method, body: Option, -) -> Result { +) -> trc::Result { let url = url.as_ref(); let mut builder = reqwest::Client::builder() .timeout(Duration::from_secs(30)) @@ -310,68 +294,38 @@ async fn https( .body(body); } - let response = request.send().await?; - if response.status().is_success() { - Ok(response) - } else { - Err(DirectoryError::HttpRequestCode { - code: response.status(), - reason: response.text().await?, - }) - } + request.send().await?.assert_success().await } -fn get_header(response: &Response, header: &'static str) -> Result { +fn get_header(response: &Response, header: &'static str) -> trc::Result { match response.headers().get_all(header).iter().last() { Some(value) => Ok(value.to_str()?.to_string()), - None => Err(DirectoryError::MissingHeader(header)), + None => Err(trc::Cause::Acme + .caused_by(trc::location!()) + .details("Missing header") + .ctx(trc::Key::Id, header)), } } -impl From for DirectoryError { - fn from(err: std::io::Error) -> Self { - Self::Io(err) +impl ChallengeType { + pub fn as_str(&self) -> &'static str { + match self { + Self::Http01 => "http-01", + Self::Dns01 => "dns-01", + Self::TlsAlpn01 => "tls-alpn-01", + } } } -impl From for DirectoryError { - fn from(err: rcgen::Error) -> Self { - Self::Rcgen(err) - } -} - -impl From for DirectoryError { - fn from(err: JoseError) -> Self { - Self::Jose(err) - } -} - -impl From for DirectoryError { - fn from(err: serde_json::Error) -> Self { - Self::Json(err) - } -} - -impl From for DirectoryError { - fn from(err: reqwest::Error) -> Self { - Self::HttpRequest(err) - } -} - -impl From for DirectoryError { - fn from(err: KeyRejected) -> Self { - Self::KeyRejected(err) - } -} - -impl From for DirectoryError { - fn from(err: Unspecified) -> Self { - Self::Crypto(err) - } -} - -impl From for DirectoryError { - fn from(err: ToStrError) -> Self { - Self::HttpResponseNonStringHeader(err) +impl AuthStatus { + pub fn as_str(&self) -> &'static str { + match self { + Self::Pending => "pending", + Self::Valid => "valid", + Self::Invalid => "invalid", + Self::Revoked => "revoked", + Self::Expired => "expired", + Self::Deactivated => "deactivated", + } } } diff --git a/crates/common/src/listener/acme/jose.rs b/crates/common/src/listener/acme/jose.rs index 7d5834af..805218d7 100644 --- a/crates/common/src/listener/acme/jose.rs +++ b/crates/common/src/listener/acme/jose.rs @@ -13,7 +13,7 @@ pub(crate) fn sign( nonce: String, url: &str, payload: &str, -) -> Result { +) -> trc::Result { let jwk = match kid { None => Some(Jwk::new(key)), Some(_) => None, @@ -21,7 +21,9 @@ pub(crate) fn sign( let protected = Protected::base64(jwk, kid, nonce, url)?; let payload = URL_SAFE_NO_PAD.encode(payload); let combined = format!("{}.{}", &protected, &payload); - let signature = key.sign(&SystemRandom::new(), combined.as_bytes())?; + let signature = key + .sign(&SystemRandom::new(), combined.as_bytes()) + .map_err(|err| trc::Cause::Crypto.caused_by(trc::location!()).reason(err))?; let signature = URL_SAFE_NO_PAD.encode(signature.as_ref()); let body = Body { protected, @@ -31,7 +33,7 @@ pub(crate) fn sign( Ok(serde_json::to_string(&body)?) } -pub(crate) fn key_authorization(key: &EcdsaKeyPair, token: &str) -> Result { +pub(crate) fn key_authorization(key: &EcdsaKeyPair, token: &str) -> trc::Result { Ok(format!( "{}.{}", token, @@ -39,17 +41,14 @@ pub(crate) fn key_authorization(key: &EcdsaKeyPair, token: &str) -> Result Result { +pub(crate) fn key_authorization_sha256(key: &EcdsaKeyPair, token: &str) -> trc::Result { key_authorization(key, token).map(|s| digest(&SHA256, s.as_bytes())) } pub(crate) fn key_authorization_sha256_base64( key: &EcdsaKeyPair, token: &str, -) -> Result { +) -> trc::Result { key_authorization_sha256(key, token).map(|s| URL_SAFE_NO_PAD.encode(s.as_ref())) } @@ -77,7 +76,7 @@ impl<'a> Protected<'a> { kid: Option<&'a str>, nonce: String, url: &'a str, - ) -> Result { + ) -> trc::Result { let protected = Self { alg: "ES256", jwk, @@ -113,7 +112,7 @@ impl Jwk { y: URL_SAFE_NO_PAD.encode(y), } } - pub(crate) fn thumb_sha256_base64(&self) -> Result { + pub(crate) fn thumb_sha256_base64(&self) -> trc::Result { let jwk_thumb = JwkThumb { crv: self.crv, kty: self.kty, @@ -133,21 +132,3 @@ struct JwkThumb<'a> { x: &'a str, y: &'a str, } - -#[derive(Debug)] -pub enum JoseError { - Json(serde_json::Error), - Crypto(ring::error::Unspecified), -} - -impl From for JoseError { - fn from(err: serde_json::Error) -> Self { - Self::Json(err) - } -} - -impl From for JoseError { - fn from(err: ring::error::Unspecified) -> Self { - Self::Crypto(err) - } -} diff --git a/crates/common/src/listener/acme/mod.rs b/crates/common/src/listener/acme/mod.rs index 569cca89..f429984d 100644 --- a/crates/common/src/listener/acme/mod.rs +++ b/crates/common/src/listener/acme/mod.rs @@ -18,10 +18,7 @@ use rustls::sign::CertifiedKey; use crate::Core; -use self::{ - directory::{Account, ChallengeType}, - order::{CertParseError, OrderError}, -}; +use self::directory::{Account, ChallengeType}; pub struct AcmeProvider { pub id: String, @@ -51,17 +48,6 @@ pub struct StaticResolver { pub key: Option>, } -#[derive(Debug)] -pub enum AcmeError { - CertCacheLoad(std::io::Error), - AccountCacheLoad(std::io::Error), - CertCacheStore(std::io::Error), - AccountCacheStore(std::io::Error), - CachedCertParse(CertParseError), - Order(OrderError), - NewCertParse(CertParseError), -} - impl AcmeProvider { pub fn new( id: String, @@ -71,7 +57,7 @@ impl AcmeProvider { challenge: ChallengeSettings, renew_before: Duration, default: bool, - ) -> utils::config::Result { + ) -> trc::Result { Ok(AcmeProvider { id, directory_url, @@ -95,7 +81,7 @@ impl AcmeProvider { } impl Core { - pub async fn init_acme(&self, provider: &AcmeProvider) -> Result { + pub async fn init_acme(&self, provider: &AcmeProvider) -> trc::Result { // Load account key from cache or generate a new one if let Some(account_key) = self.load_account(provider).await? { provider.account_key.store(Arc::new(account_key)); diff --git a/crates/common/src/listener/acme/order.rs b/crates/common/src/listener/acme/order.rs index c4b178d5..152e71a0 100644 --- a/crates/common/src/listener/acme/order.rs +++ b/crates/common/src/listener/acme/order.rs @@ -7,7 +7,6 @@ use rcgen::{CertificateParams, DistinguishedName, PKCS_ECDSA_P256_SHA256}; use rustls::crypto::ring::sign::any_ecdsa_type; use rustls::sign::CertifiedKey; use rustls_pki_types::{CertificateDer, PrivateKeyDer, PrivatePkcs8KeyDer}; -use std::fmt::Debug; use std::sync::Arc; use std::time::{Duration, Instant}; use utils::suffixlist::DomainPart; @@ -17,29 +16,8 @@ use crate::listener::acme::directory::Identifier; use crate::listener::acme::ChallengeSettings; use crate::Core; -use super::directory::{Account, Auth, AuthStatus, Directory, DirectoryError, Order, OrderStatus}; -use super::jose::JoseError; -use super::{AcmeError, AcmeProvider}; - -#[derive(Debug)] -pub enum OrderError { - Acme(DirectoryError), - Rcgen(rcgen::Error), - BadOrder(Order), - BadAuth(Auth), - TooManyAttemptsAuth(String), - ProcessingTimeout(Order), - Store(store::Error), - Dns(dns_update::Error), -} - -#[derive(Debug)] -pub enum CertParseError { - X509(x509_parser::nom::Err), - Pem(pem::PemError), - TooFewPem(usize), - InvalidPrivateKey, -} +use super::directory::{Account, AuthStatus, Directory, OrderStatus}; +use super::AcmeProvider; impl Core { pub(crate) async fn process_cert( @@ -47,16 +25,8 @@ impl Core { provider: &AcmeProvider, pem: Vec, cached: bool, - ) -> Result { - let (cert, validity) = match (parse_cert(&pem), cached) { - (Ok(r), _) => r, - (Err(err), cached) => { - return match cached { - true => Err(AcmeError::CachedCertParse(err)), - false => Err(AcmeError::NewCertParse(err)), - } - } - }; + ) -> trc::Result { + let (cert, validity) = parse_cert(&pem)?; self.set_cert(provider, Arc::new(cert)); @@ -82,7 +52,7 @@ impl Core { Ok(renew_at) } - pub async fn renew(&self, provider: &AcmeProvider) -> Result { + pub async fn renew(&self, provider: &AcmeProvider) -> trc::Result { let mut backoff = 0; loop { match self.order(provider).await { @@ -99,12 +69,12 @@ impl Core { backoff = (backoff + 1).min(16); tokio::time::sleep(Duration::from_secs(1 << backoff)).await; } - Err(err) => return Err(AcmeError::Order(err)), + Err(err) => return Err(err.details("Failed to renew certificate")), } } } - async fn order(&self, provider: &AcmeProvider) -> Result, OrderError> { + async fn order(&self, provider: &AcmeProvider) -> trc::Result> { let directory = Directory::discover(&provider.directory_url).await?; let account = Account::create_with_keypair( directory, @@ -116,7 +86,8 @@ impl Core { let mut params = CertificateParams::new(provider.domains.clone()); params.distinguished_name = DistinguishedName::new(); params.alg = &PKCS_ECDSA_P256_SHA256; - let cert = rcgen::Certificate::from_params(params)?; + let cert = rcgen::Certificate::from_params(params) + .map_err(|err| trc::Cause::Crypto.caused_by(trc::location!()).reason(err))?; let (order_url, mut order) = account.new_order(provider.domains.clone()).await?; loop { @@ -151,7 +122,9 @@ impl Core { } } if order.status == OrderStatus::Processing { - return Err(OrderError::ProcessingTimeout(order)); + return Err(trc::Cause::Timeout + .caused_by(trc::location!()) + .details("Order processing timed out")); } } OrderStatus::Ready => { @@ -162,7 +135,9 @@ impl Core { "Sending CSR" ); - let csr = cert.serialize_request_der()?; + let csr = cert.serialize_request_der().map_err(|err| { + trc::Cause::Crypto.caused_by(trc::location!()).reason(err) + })?; order = account.finalize(order.finalize, csr).await? } OrderStatus::Valid { certificate } => { @@ -190,7 +165,7 @@ impl Core { "Invalid order" ); - return Err(OrderError::BadOrder(order)); + return Err(trc::Cause::Invalid.into_err().details("Invalid ACME order")); } } } @@ -201,7 +176,7 @@ impl Core { provider: &AcmeProvider, account: &Account, url: &String, - ) -> Result<(), OrderError> { + ) -> trc::Result<()> { let auth = account.auth(url).await?; let (domain, challenge_url) = match auth.status { AuthStatus::Pending => { @@ -218,7 +193,11 @@ impl Core { .challenges .iter() .find(|c| c.typ == challenge_type) - .ok_or(DirectoryError::NoChallenge(challenge_type))?; + .ok_or( + trc::Cause::MissingParameter + .into_err() + .ctx(trc::Key::Id, challenge_type.as_str()), + )?; match &provider.challenge { ChallengeSettings::TlsAlpn01 => { @@ -290,7 +269,7 @@ impl Core { error = ?err, "Failed to create DNS record.", ); - return Err(OrderError::Dns(err)); + return Err(trc::Cause::Dns.caused_by(trc::location!()).reason(err)); } tracing::info!( @@ -362,7 +341,11 @@ impl Core { (domain, challenge.url.clone()) } AuthStatus::Valid => return Ok(()), - _ => return Err(OrderError::BadAuth(auth)), + _ => { + return Err(trc::Cause::Authentication + .into_err() + .ctx(trc::Key::Status, auth.status.as_str())) + } }; for i in 0u64..5 { @@ -389,23 +372,34 @@ impl Core { return Ok(()); } - _ => return Err(OrderError::BadAuth(auth)), + _ => { + return Err(trc::Cause::Authentication + .into_err() + .ctx(trc::Key::Status, auth.status.as_str())) + } } } - Err(OrderError::TooManyAttemptsAuth(domain)) + Err(trc::Cause::Authentication + .into_err() + .details("Too many attempts") + .ctx(trc::Key::Id, domain)) } } -fn parse_cert(pem: &[u8]) -> Result<(CertifiedKey, [DateTime; 2]), CertParseError> { - let mut pems = pem::parse_many(pem)?; +fn parse_cert(pem: &[u8]) -> trc::Result<(CertifiedKey, [DateTime; 2])> { + let mut pems = pem::parse_many(pem) + .map_err(|err| trc::Cause::Crypto.reason(err).caused_by(trc::location!()))?; if pems.len() < 2 { - return Err(CertParseError::TooFewPem(pems.len())); + return Err(trc::Cause::Crypto + .caused_by(trc::location!()) + .ctx(trc::Key::Size, pems.len()) + .details("Too few PEMs")); } let pk = match any_ecdsa_type(&PrivateKeyDer::Pkcs8(PrivatePkcs8KeyDer::from( pems.remove(0).contents(), ))) { Ok(pk) => pk, - Err(_) => return Err(CertParseError::InvalidPrivateKey), + Err(err) => return Err(trc::Cause::Crypto.reason(err).caused_by(trc::location!())), }; let cert_chain: Vec = pems .into_iter() @@ -420,50 +414,8 @@ fn parse_cert(pem: &[u8]) -> Result<(CertifiedKey, [DateTime; 2]), CertPars .unwrap_or_default() }) } - Err(err) => return Err(CertParseError::X509(err)), + Err(err) => return Err(trc::Cause::Crypto.reason(err).caused_by(trc::location!())), }; let cert = CertifiedKey::new(cert_chain, pk); Ok((cert, validity)) } - -impl From for OrderError { - fn from(err: DirectoryError) -> Self { - Self::Acme(err) - } -} - -impl From for OrderError { - fn from(err: rcgen::Error) -> Self { - Self::Rcgen(err) - } -} - -impl From> for CertParseError { - fn from(err: x509_parser::nom::Err) -> Self { - Self::X509(err) - } -} - -impl From for CertParseError { - fn from(err: pem::PemError) -> Self { - Self::Pem(err) - } -} - -impl From for OrderError { - fn from(err: JoseError) -> Self { - Self::Acme(DirectoryError::Jose(err)) - } -} - -impl From for AcmeError { - fn from(err: JoseError) -> Self { - Self::Order(OrderError::from(err)) - } -} - -impl From for OrderError { - fn from(value: store::Error) -> Self { - Self::Store(value) - } -} diff --git a/crates/common/src/listener/blocked.rs b/crates/common/src/listener/blocked.rs index e5efaea5..d7fef628 100644 --- a/crates/common/src/listener/blocked.rs +++ b/crates/common/src/listener/blocked.rs @@ -108,7 +108,7 @@ impl AllowedIps { } impl Core { - pub async fn is_fail2banned(&self, ip: IpAddr, login: String) -> store::Result { + pub async fn is_fail2banned(&self, ip: IpAddr, login: String) -> trc::Result { if let Some(rate) = &self.network.blocked_ips.limiter_rate { let is_allowed = self.is_ip_allowed(&ip) || (self diff --git a/crates/common/src/manager/backup.rs b/crates/common/src/manager/backup.rs index f48ec89e..bcfc61aa 100644 --- a/crates/common/src/manager/backup.rs +++ b/crates/common/src/manager/backup.rs @@ -900,11 +900,12 @@ impl Core { ); (hash, len as u8) } - invalid => { - return Err(format!( - "Invalid text bitmap key length {invalid}" - ) - .into()) + _ => { + return Err(trc::Error::corrupted_key( + key, + None, + trc::location!(), + )); } }; @@ -1108,34 +1109,34 @@ fn spawn_writer(path: PathBuf) -> (std::thread::JoinHandle<()>, SyncSender) } pub(super) trait DeserializeBytes { - fn range(&self, range: Range) -> store::Result<&[u8]>; - fn deserialize_u8(&self, offset: usize) -> store::Result; - fn deserialize_leb128(&self) -> store::Result; + fn range(&self, range: Range) -> trc::Result<&[u8]>; + fn deserialize_u8(&self, offset: usize) -> trc::Result; + fn deserialize_leb128(&self) -> trc::Result; } impl DeserializeBytes for &[u8] { - fn range(&self, range: Range) -> store::Result<&[u8]> { + fn range(&self, range: Range) -> trc::Result<&[u8]> { self.get(range.start..std::cmp::min(range.end, self.len())) - .ok_or_else(|| store::Error::InternalError("Failed to read range".to_string())) + .ok_or_else(|| trc::Cause::DataCorruption.caused_by(trc::location!())) } - fn deserialize_u8(&self, offset: usize) -> store::Result { + fn deserialize_u8(&self, offset: usize) -> trc::Result { self.get(offset) .copied() - .ok_or_else(|| store::Error::InternalError("Failed to read u8".to_string())) + .ok_or_else(|| trc::Cause::DataCorruption.caused_by(trc::location!())) } - fn deserialize_leb128(&self) -> store::Result { + fn deserialize_leb128(&self) -> trc::Result { self.read_leb128::() .map(|(v, _)| v) - .ok_or_else(|| store::Error::InternalError("Failed to read leb128".to_string())) + .ok_or_else(|| trc::Cause::DataCorruption.caused_by(trc::location!())) } } struct RawBytes(Vec); impl Deserialize for RawBytes { - fn deserialize(bytes: &[u8]) -> store::Result { + fn deserialize(bytes: &[u8]) -> trc::Result { Ok(Self(bytes.to_vec())) } } diff --git a/crates/common/src/manager/config.rs b/crates/common/src/manager/config.rs index 81dbc1fb..7a9460e5 100644 --- a/crates/common/src/manager/config.rs +++ b/crates/common/src/manager/config.rs @@ -54,7 +54,7 @@ pub(crate) struct ExternalConfig { } impl ConfigManager { - pub async fn build_config(&self, prefix: &str) -> store::Result { + pub async fn build_config(&self, prefix: &str) -> trc::Result { let mut config = Config { keys: self.cfg_local.load().as_ref().clone(), ..Default::default() @@ -65,11 +65,7 @@ impl ConfigManager { .map(|_| config) } - pub(crate) async fn extend_config( - &self, - config: &mut Config, - prefix: &str, - ) -> store::Result<()> { + pub(crate) async fn extend_config(&self, config: &mut Config, prefix: &str) -> trc::Result<()> { for (key, value) in self.db_list(prefix, false).await? { config.keys.entry(key).or_insert(value); } @@ -77,7 +73,7 @@ impl ConfigManager { Ok(()) } - pub async fn get(&self, key: impl AsRef) -> store::Result> { + pub async fn get(&self, key: impl AsRef) -> trc::Result> { let key = key.as_ref(); match self.cfg_local.load().get(key) { Some(value) => Ok(Some(value.to_string())), @@ -95,7 +91,7 @@ impl ConfigManager { &self, prefix: &str, strip_prefix: bool, - ) -> store::Result> { + ) -> trc::Result> { let mut results = self.db_list(prefix, strip_prefix).await?; for (key, value) in self.cfg_local.load().iter() { if prefix.is_empty() || (!strip_prefix && key.starts_with(prefix)) { @@ -112,7 +108,7 @@ impl ConfigManager { &self, prefix: &str, suffix: &str, - ) -> store::Result>> { + ) -> trc::Result>> { let mut grouped = AHashMap::new(); let mut list = self.list(prefix, true).await?; @@ -138,7 +134,7 @@ impl ConfigManager { &self, prefix: &str, strip_prefix: bool, - ) -> store::Result> { + ) -> trc::Result> { let key = prefix.as_bytes(); let from_key = ValueKey::from(ValueClass::Config(key.to_vec())); let to_key = ValueKey::from(ValueClass::Config( @@ -154,7 +150,7 @@ impl ConfigManager { IterateParams::new(from_key, to_key).ascending(), |key, value| { let mut key = std::str::from_utf8(key).map_err(|_| { - store::Error::InternalError("Failed to deserialize config key".to_string()) + trc::Error::corrupted_key(key, value.into(), trc::location!()) })?; if !patterns.is_local_key(key) { @@ -173,7 +169,7 @@ impl ConfigManager { Ok(results) } - pub async fn set(&self, keys: I) -> store::Result<()> + pub async fn set(&self, keys: I) -> trc::Result<()> where I: IntoIterator, T: Into, @@ -220,7 +216,7 @@ impl ConfigManager { Ok(()) } - pub async fn clear(&self, key: impl AsRef) -> store::Result<()> { + pub async fn clear(&self, key: impl AsRef) -> trc::Result<()> { let key = key.as_ref(); if self.cfg_local_patterns.is_local_key(key) { @@ -237,7 +233,7 @@ impl ConfigManager { } } - pub async fn clear_prefix(&self, key: impl AsRef) -> store::Result<()> { + pub async fn clear_prefix(&self, key: impl AsRef) -> trc::Result<()> { let key = key.as_ref(); // Delete local keys @@ -263,7 +259,7 @@ impl ConfigManager { .await } - async fn update_local(&self, map: BTreeMap) -> store::Result<()> { + async fn update_local(&self, map: BTreeMap) -> trc::Result<()> { let mut cfg_text = String::with_capacity(1024); for (key, value) in &map { cfg_text.push_str(key); @@ -319,17 +315,21 @@ impl ConfigManager { tokio::fs::write(&self.cfg_local_path, cfg_text) .await .map_err(|err| { - store::Error::InternalError(format!( - "Failed to write local configuration file: {err}" - )) + trc::Cause::Configuration + .caused_by(trc::Error::from(err)) + .ctx(trc::Key::Path, self.cfg_local_path.display().to_string()) }) } - pub async fn update_config_resource(&self, resource_id: &str) -> store::Result> { + pub async fn update_config_resource(&self, resource_id: &str) -> trc::Result> { let external = self .fetch_config_resource(resource_id) .await - .map_err(store::Error::InternalError)?; + .map_err(|reason| { + trc::Cause::Fetch + .caused_by(trc::location!()) + .ctx(trc::Key::Reason, reason) + })?; if self .get(&external.id) @@ -396,7 +396,7 @@ impl ConfigManager { } } - pub async fn get_services(&self) -> store::Result> { + pub async fn get_services(&self) -> trc::Result> { let mut result = Vec::new(); for listener in self diff --git a/crates/common/src/manager/reload.rs b/crates/common/src/manager/reload.rs index ec97ff2d..b78d11ed 100644 --- a/crates/common/src/manager/reload.rs +++ b/crates/common/src/manager/reload.rs @@ -26,7 +26,7 @@ pub struct ReloadResult { } impl Core { - pub async fn reload_blocked_ips(&self) -> store::Result { + pub async fn reload_blocked_ips(&self) -> trc::Result { let mut ip_addresses = AHashSet::new(); let mut config = self.storage.config.build_config(BLOCKED_IP_KEY).await?; @@ -51,7 +51,7 @@ impl Core { Ok(config.into()) } - pub async fn reload_certificates(&self) -> store::Result { + pub async fn reload_certificates(&self) -> trc::Result { let mut config = self.storage.config.build_config("certificate").await?; let mut certificates = self.tls.certificates.load().as_ref().clone(); @@ -62,7 +62,7 @@ impl Core { Ok(config.into()) } - pub async fn reload_lookups(&self) -> store::Result { + pub async fn reload_lookups(&self) -> trc::Result { let mut config = self.storage.config.build_config("certificate").await?; let mut stores = Stores::default(); stores.parse_memory_stores(&mut config); @@ -78,7 +78,7 @@ impl Core { }) } - pub async fn reload(&self) -> store::Result { + pub async fn reload(&self) -> trc::Result { let mut config = self.storage.config.build_config("").await?; // Parse tracers diff --git a/crates/common/src/manager/webadmin.rs b/crates/common/src/manager/webadmin.rs index 9a92f11f..89324b52 100644 --- a/crates/common/src/manager/webadmin.rs +++ b/crates/common/src/manager/webadmin.rs @@ -50,7 +50,7 @@ impl WebAdminManager { } } - pub async fn unpack(&self, blob_store: &BlobStore) -> store::Result<()> { + pub async fn unpack(&self, blob_store: &BlobStore) -> trc::Result<()> { // Delete any existing bundles self.bundle_path.clean().await?; @@ -58,17 +58,26 @@ impl WebAdminManager { let bundle = blob_store .get_blob(WEBADMIN_KEY, 0..usize::MAX) .await? - .ok_or_else(|| store::Error::InternalError("WebAdmin bundle not found".to_string()))?; + .ok_or_else(|| { + trc::Cause::NotFound + .caused_by(trc::location!()) + .details("Webadmin bundle not found") + })?; // Uncompress - let mut bundle = zip::ZipArchive::new(Cursor::new(bundle)) - .map_err(|err| store::Error::InternalError(format!("Unzip error: {err}")))?; + let mut bundle = zip::ZipArchive::new(Cursor::new(bundle)).map_err(|err| { + trc::Cause::Decompress + .caused_by(trc::location!()) + .reason(err) + })?; let mut routes = AHashMap::new(); for i in 0..bundle.len() { let (file_name, contents) = { - let mut file = bundle - .by_index(i) - .map_err(|err| store::Error::InternalError(format!("Unzip error: {err}")))?; + let mut file = bundle.by_index(i).map_err(|err| { + trc::Cause::Decompress + .caused_by(trc::location!()) + .reason(err) + })?; if file.is_dir() { continue; } @@ -113,14 +122,17 @@ impl WebAdminManager { Ok(()) } - pub async fn update_and_unpack(&self, core: &Core) -> store::Result<()> { + pub async fn update_and_unpack(&self, core: &Core) -> trc::Result<()> { let bytes = core .storage .config .fetch_resource("webadmin") .await .map_err(|err| { - store::Error::InternalError(format!("Failed to download webadmin: {err}")) + trc::Cause::Fetch + .caused_by(trc::location!()) + .reason(err) + .details("Failed to download webadmin") })?; core.storage.blob.put_blob(WEBADMIN_KEY, &bytes).await?; self.unpack(&core.storage.blob).await diff --git a/crates/common/src/scripts/plugins/lookup.rs b/crates/common/src/scripts/plugins/lookup.rs index 009b6883..bb9957d8 100644 --- a/crates/common/src/scripts/plugins/lookup.rs +++ b/crates/common/src/scripts/plugins/lookup.rs @@ -400,7 +400,7 @@ pub async fn exec_local_domain(ctx: PluginContext<'_>) -> Variable { pub struct VariableWrapper(Variable); impl Deserialize for VariableWrapper { - fn deserialize(bytes: &[u8]) -> store::Result { + fn deserialize(bytes: &[u8]) -> trc::Result { Ok(VariableWrapper( bincode::deserialize::(bytes).unwrap_or_else(|_| { Variable::String(String::from_utf8_lossy(bytes).into_owned().into()) diff --git a/crates/directory/Cargo.toml b/crates/directory/Cargo.toml index 607703a8..62a46556 100644 --- a/crates/directory/Cargo.toml +++ b/crates/directory/Cargo.toml @@ -7,6 +7,7 @@ resolver = "2" [dependencies] utils = { path = "../utils" } store = { path = "../store" } +trc = { path = "../trc" } jmap_proto = { path = "../jmap-proto" } smtp-proto = { version = "0.1" } mail-parser = { version = "0.9", features = ["full_encoding", "serde_support", "ludicrous_mode"] } @@ -21,7 +22,6 @@ deadpool = { version = "0.10", features = ["managed", "rt_tokio_1"] } async-trait = "0.1.68" parking_lot = "0.12" ahash = { version = "0.8" } -tracing = "0.1" lru-cache = "0.1.2" pwhash = "1" password-hash = "0.5.0" diff --git a/crates/directory/src/backend/imap/lookup.rs b/crates/directory/src/backend/imap/lookup.rs index e900df2d..e5a427b4 100644 --- a/crates/directory/src/backend/imap/lookup.rs +++ b/crates/directory/src/backend/imap/lookup.rs @@ -7,14 +7,18 @@ use mail_send::Credentials; use smtp_proto::{AUTH_CRAM_MD5, AUTH_LOGIN, AUTH_OAUTHBEARER, AUTH_PLAIN, AUTH_XOAUTH2}; -use crate::{DirectoryError, Principal, QueryBy}; +use crate::{IntoError, Principal, QueryBy}; use super::{ImapDirectory, ImapError}; impl ImapDirectory { - pub async fn query(&self, query: QueryBy<'_>) -> crate::Result>> { + pub async fn query(&self, query: QueryBy<'_>) -> trc::Result>> { if let QueryBy::Credentials(credentials) = query { - let mut client = self.pool.get().await?; + let mut client = self + .pool + .get() + .await + .map_err(|err| err.into_error().caused_by(trc::location!()))?; let mechanism = match credentials { Credentials::Plain { .. } if (client.mechanisms & (AUTH_PLAIN | AUTH_LOGIN | AUTH_CRAM_MD5)) != 0 => @@ -34,13 +38,12 @@ impl ImapDirectory { AUTH_XOAUTH2 } _ => { - tracing::warn!( - context = "remote", - event = "error", - protocol = "imap", - "IMAP server does not offer any supported auth mechanisms.", - ); - return Ok(None); + trc::bail!(trc::Cause::Unsupported + .ctx( + trc::Key::Reason, + "IMAP server does not offer any supported auth mechanisms." + ) + .protocol(trc::Protocol::Imap)); } }; @@ -51,31 +54,41 @@ impl ImapDirectory { } Err(err) => match &err { ImapError::AuthenticationFailed => Ok(None), - _ => Err(err.into()), + _ => Err(err.into_error()), }, } } else { - Err(DirectoryError::unsupported("imap", "query")) + Err(trc::Cause::Unsupported + .caused_by(trc::location!()) + .protocol(trc::Protocol::Imap)) } } - pub async fn email_to_ids(&self, _address: &str) -> crate::Result> { - Err(DirectoryError::unsupported("imap", "email_to_ids")) + pub async fn email_to_ids(&self, _address: &str) -> trc::Result> { + Err(trc::Cause::Unsupported + .caused_by(trc::location!()) + .protocol(trc::Protocol::Imap)) } - pub async fn rcpt(&self, _address: &str) -> crate::Result { - Err(DirectoryError::unsupported("imap", "rcpt")) + pub async fn rcpt(&self, _address: &str) -> trc::Result { + Err(trc::Cause::Unsupported + .caused_by(trc::location!()) + .protocol(trc::Protocol::Imap)) } - pub async fn vrfy(&self, _address: &str) -> crate::Result> { - Err(DirectoryError::unsupported("imap", "vrfy")) + pub async fn vrfy(&self, _address: &str) -> trc::Result> { + Err(trc::Cause::Unsupported + .caused_by(trc::location!()) + .protocol(trc::Protocol::Imap)) } - pub async fn expn(&self, _address: &str) -> crate::Result> { - Err(DirectoryError::unsupported("imap", "expn")) + pub async fn expn(&self, _address: &str) -> trc::Result> { + Err(trc::Cause::Unsupported + .caused_by(trc::location!()) + .protocol(trc::Protocol::Imap)) } - pub async fn is_local_domain(&self, domain: &str) -> crate::Result { + pub async fn is_local_domain(&self, domain: &str) -> trc::Result { Ok(self.domains.contains(domain)) } } diff --git a/crates/directory/src/backend/internal/lookup.rs b/crates/directory/src/backend/internal/lookup.rs index daa02fd2..cbe1d353 100644 --- a/crates/directory/src/backend/internal/lookup.rs +++ b/crates/directory/src/backend/internal/lookup.rs @@ -20,13 +20,13 @@ pub trait DirectoryStore: Sync + Send { &self, by: QueryBy<'_>, return_member_of: bool, - ) -> crate::Result>>; - async fn email_to_ids(&self, email: &str) -> crate::Result>; + ) -> trc::Result>>; + async fn email_to_ids(&self, email: &str) -> trc::Result>; - async fn is_local_domain(&self, domain: &str) -> crate::Result; - async fn rcpt(&self, address: &str) -> crate::Result; - async fn vrfy(&self, address: &str) -> crate::Result>; - async fn expn(&self, address: &str) -> crate::Result>; + async fn is_local_domain(&self, domain: &str) -> trc::Result; + async fn rcpt(&self, address: &str) -> trc::Result; + async fn vrfy(&self, address: &str) -> trc::Result>; + async fn expn(&self, address: &str) -> trc::Result>; } impl DirectoryStore for Store { @@ -34,7 +34,7 @@ impl DirectoryStore for Store { &self, by: QueryBy<'_>, return_member_of: bool, - ) -> crate::Result>> { + ) -> trc::Result>> { let (account_id, secret) = match by { QueryBy::Name(name) => (self.get_account_id(name).await?, None), QueryBy::Id(account_id) => (account_id.into(), None), @@ -79,7 +79,7 @@ impl DirectoryStore for Store { } } - async fn email_to_ids(&self, email: &str) -> crate::Result> { + async fn email_to_ids(&self, email: &str) -> trc::Result> { if let Some(ptype) = self .get_value::(ValueKey::from(ValueClass::Directory( DirectoryClass::EmailToId(email.as_bytes().to_vec()), @@ -89,32 +89,30 @@ impl DirectoryStore for Store { if ptype.typ != Type::List { Ok(vec![ptype.account_id]) } else { - self.get_members(ptype.account_id).await.map_err(Into::into) + self.get_members(ptype.account_id).await } } else { Ok(Vec::new()) } } - async fn is_local_domain(&self, domain: &str) -> crate::Result { + async fn is_local_domain(&self, domain: &str) -> trc::Result { self.get_value::<()>(ValueKey::from(ValueClass::Directory( DirectoryClass::Domain(domain.as_bytes().to_vec()), ))) .await .map(|ids| ids.is_some()) - .map_err(Into::into) } - async fn rcpt(&self, address: &str) -> crate::Result { + async fn rcpt(&self, address: &str) -> trc::Result { self.get_value::<()>(ValueKey::from(ValueClass::Directory( DirectoryClass::EmailToId(address.as_bytes().to_vec()), ))) .await .map(|ids| ids.is_some()) - .map_err(Into::into) } - async fn vrfy(&self, address: &str) -> crate::Result> { + async fn vrfy(&self, address: &str) -> trc::Result> { let mut results = Vec::new(); let address = address.split('@').next().unwrap_or(address); if address.len() > 3 { @@ -141,7 +139,7 @@ impl DirectoryStore for Store { Ok(results) } - async fn expn(&self, address: &str) -> crate::Result> { + async fn expn(&self, address: &str) -> trc::Result> { let mut results = Vec::new(); for account_id in self.email_to_ids(address).await? { if let Some(email) = self diff --git a/crates/directory/src/backend/internal/manage.rs b/crates/directory/src/backend/internal/manage.rs index 10598ef6..7518b296 100644 --- a/crates/directory/src/backend/internal/manage.rs +++ b/crates/directory/src/backend/internal/manage.rs @@ -12,8 +12,9 @@ use store::{ }, Deserialize, IterateParams, Serialize, Store, ValueKey, U32_LEN, }; +use trc::AddContext; -use crate::{DirectoryError, ManagementError, Principal, QueryBy, Type}; +use crate::{Principal, QueryBy, Type}; use super::{ lookup::DirectoryStore, PrincipalAction, PrincipalField, PrincipalIdType, PrincipalUpdate, @@ -22,83 +23,74 @@ use super::{ #[allow(async_fn_in_trait)] pub trait ManageDirectory: Sized { - async fn get_account_id(&self, name: &str) -> crate::Result>; - async fn get_or_create_account_id(&self, name: &str) -> crate::Result; - async fn get_account_name(&self, account_id: u32) -> crate::Result>; - async fn get_member_of(&self, account_id: u32) -> crate::Result>; - async fn get_members(&self, account_id: u32) -> crate::Result>; + async fn get_account_id(&self, name: &str) -> trc::Result>; + async fn get_or_create_account_id(&self, name: &str) -> trc::Result; + async fn get_account_name(&self, account_id: u32) -> trc::Result>; + async fn get_member_of(&self, account_id: u32) -> trc::Result>; + async fn get_members(&self, account_id: u32) -> trc::Result>; async fn create_account( &self, principal: Principal, members: Vec, - ) -> crate::Result; + ) -> trc::Result; async fn update_account( &self, by: QueryBy<'_>, changes: Vec, - ) -> crate::Result<()>; - async fn delete_account(&self, by: QueryBy<'_>) -> crate::Result<()>; + ) -> trc::Result<()>; + async fn delete_account(&self, by: QueryBy<'_>) -> trc::Result<()>; async fn list_accounts( &self, filter: Option<&str>, typ: Option, - ) -> crate::Result>; - async fn map_group_ids(&self, principal: Principal) -> crate::Result>; + ) -> trc::Result>; + async fn map_group_ids(&self, principal: Principal) -> trc::Result>; async fn map_principal( &self, principal: Principal, create_if_missing: bool, - ) -> crate::Result>; + ) -> trc::Result>; async fn map_group_names( &self, members: Vec, create_if_missing: bool, - ) -> crate::Result>; - async fn create_domain(&self, domain: &str) -> crate::Result<()>; - async fn delete_domain(&self, domain: &str) -> crate::Result<()>; - async fn list_domains(&self, filter: Option<&str>) -> crate::Result>; + ) -> trc::Result>; + async fn create_domain(&self, domain: &str) -> trc::Result<()>; + async fn delete_domain(&self, domain: &str) -> trc::Result<()>; + async fn list_domains(&self, filter: Option<&str>) -> trc::Result>; } impl ManageDirectory for Store { - async fn get_account_name(&self, account_id: u32) -> crate::Result> { + async fn get_account_name(&self, account_id: u32) -> trc::Result> { self.get_value::>(ValueKey::from(ValueClass::Directory( DirectoryClass::Principal(account_id), ))) .await - .map_err(Into::into) - .map(|v| { - if let Some(v) = v { - Some(v.name) - } else { - tracing::debug!( - context = "directory", - event = "not_found", - account = account_id, - "Principal not found for account id" - ); - - None - } - }) + .map(|v| if let Some(v) = v { Some(v.name) } else { None }) + .caused_by(trc::location!()) } - async fn get_account_id(&self, name: &str) -> crate::Result> { + async fn get_account_id(&self, name: &str) -> trc::Result> { self.get_value::(ValueKey::from(ValueClass::Directory( DirectoryClass::NameToId(name.as_bytes().to_vec()), ))) .await .map(|v| v.map(|v| v.account_id)) - .map_err(Into::into) + .caused_by(trc::location!()) } // Used by all directories except internal - async fn get_or_create_account_id(&self, name: &str) -> crate::Result { + async fn get_or_create_account_id(&self, name: &str) -> trc::Result { let mut try_count = 0; let name = name.to_lowercase(); loop { // Try to obtain ID - if let Some(account_id) = self.get_account_id(&name).await? { + if let Some(account_id) = self + .get_account_id(&name) + .await + .caused_by(trc::location!())? + { return Ok(account_id); } @@ -129,16 +121,13 @@ impl ManageDirectory for Store { Ok(account_id) => { return Ok(account_id); } - Err(store::Error::AssertValueFailed) if try_count < 3 => { - try_count += 1; - continue; - } Err(err) => { - tracing::error!(event = "error", - context = "store", - error = ?err, - "Failed to generate account id"); - return Err(err.into()); + if err.matches(trc::Cause::AssertValue) && try_count < 3 { + try_count += 1; + continue; + } else { + return Err(err.caused_by(trc::location!())); + } } } } @@ -148,41 +137,46 @@ impl ManageDirectory for Store { &self, principal: Principal, members: Vec, - ) -> crate::Result { + ) -> trc::Result { // Make sure the principal has a name if principal.name.is_empty() { - return Err(DirectoryError::Management(ManagementError::MissingField( - PrincipalField::Name, - ))); + return Err(not_found(PrincipalField::Name)); } // Map group names - let mut principal = self.map_principal(principal, false).await?; - let members = self.map_group_names(members, false).await?; + let mut principal = self + .map_principal(principal, false) + .await + .caused_by(trc::location!())?; + let members = self + .map_group_names(members, false) + .await + .caused_by(trc::location!())?; // Make sure new name is not taken principal.name = principal.name.to_lowercase(); - if self.get_account_id(&principal.name).await?.is_some() { - return Err(DirectoryError::Management(ManagementError::AlreadyExists { - field: PrincipalField::Name, - value: principal.name, - })); + if self + .get_account_id(&principal.name) + .await + .caused_by(trc::location!())? + .is_some() + { + return Err(err_exists(PrincipalField::Name, principal.name)); } // Make sure the e-mail is not taken and validate domain for email in principal.emails.iter_mut() { *email = email.to_lowercase(); - if self.rcpt(email).await? { - return Err(DirectoryError::Management(ManagementError::AlreadyExists { - field: PrincipalField::Emails, - value: email.to_string(), - })); + if self.rcpt(email).await.caused_by(trc::location!())? { + return Err(err_exists(PrincipalField::Emails, email.to_string())); } if let Some(domain) = email.split('@').nth(1) { - if !self.is_local_domain(domain).await? { - return Err(DirectoryError::Management(ManagementError::NotFound( - domain.to_string(), - ))); + if !self + .is_local_domain(domain) + .await + .caused_by(trc::location!())? + { + return Err(not_found(domain.to_string())); } } } @@ -254,14 +248,15 @@ impl ManageDirectory for Store { self.write(batch.build()) .await .and_then(|r| r.last_document_id()) - .map_err(Into::into) } - async fn delete_account(&self, by: QueryBy<'_>) -> crate::Result<()> { + async fn delete_account(&self, by: QueryBy<'_>) -> trc::Result<()> { let account_id = match by { - QueryBy::Name(name) => self.get_account_id(name).await?.ok_or_else(|| { - DirectoryError::Management(ManagementError::NotFound(name.to_string())) - })?, + QueryBy::Name(name) => self + .get_account_id(name) + .await + .caused_by(trc::location!())? + .ok_or_else(|| not_found(name.to_string()))?, QueryBy::Id(account_id) => account_id, QueryBy::Credentials(_) => unreachable!(), }; @@ -270,19 +265,24 @@ impl ManageDirectory for Store { .get_value::>(ValueKey::from(ValueClass::Directory( DirectoryClass::Principal(account_id), ))) - .await? - .ok_or_else(|| { - DirectoryError::Management(ManagementError::NotFound(account_id.to_string())) - })?; + .await + .caused_by(trc::location!())? + .ok_or_else(|| not_found(account_id.to_string()))?; // Unlink all account's blobs - self.blob_hash_unlink_account(account_id).await?; + self.blob_hash_unlink_account(account_id) + .await + .caused_by(trc::location!())?; // Revoke ACLs - self.acl_revoke_all(account_id).await?; + self.acl_revoke_all(account_id) + .await + .caused_by(trc::location!())?; // Delete account data - self.purge_account(account_id).await?; + self.purge_account(account_id) + .await + .caused_by(trc::location!())?; // Delete account let mut batch = BatchBuilder::new(); @@ -298,7 +298,11 @@ impl ManageDirectory for Store { batch.clear(DirectoryClass::EmailToId(email.into_bytes())); } - for member_id in self.get_member_of(account_id).await? { + for member_id in self + .get_member_of(account_id) + .await + .caused_by(trc::location!())? + { batch.clear(DirectoryClass::MemberOf { principal_id: MaybeDynamicId::Static(account_id), member_of: MaybeDynamicId::Static(member_id), @@ -309,7 +313,11 @@ impl ManageDirectory for Store { }); } - for member_id in self.get_members(account_id).await? { + for member_id in self + .get_members(account_id) + .await + .caused_by(trc::location!())? + { batch.clear(DirectoryClass::MemberOf { principal_id: MaybeDynamicId::Static(member_id), member_of: MaybeDynamicId::Static(account_id), @@ -320,7 +328,9 @@ impl ManageDirectory for Store { }); } - self.write(batch.build()).await?; + self.write(batch.build()) + .await + .caused_by(trc::location!())?; Ok(()) } @@ -329,11 +339,13 @@ impl ManageDirectory for Store { &self, by: QueryBy<'_>, changes: Vec, - ) -> crate::Result<()> { + ) -> trc::Result<()> { let account_id = match by { - QueryBy::Name(name) => self.get_account_id(name).await?.ok_or_else(|| { - DirectoryError::Management(ManagementError::NotFound(name.to_string())) - })?, + QueryBy::Name(name) => self + .get_account_id(name) + .await + .caused_by(trc::location!())? + .ok_or_else(|| not_found(name.to_string()))?, QueryBy::Id(account_id) => account_id, QueryBy::Credentials(_) => unreachable!(), }; @@ -343,14 +355,19 @@ impl ManageDirectory for Store { .get_value::>>(ValueKey::from(ValueClass::Directory( DirectoryClass::Principal(account_id), ))) - .await? - .ok_or_else(|| { - DirectoryError::Management(ManagementError::NotFound(account_id.to_string())) - })?; + .await + .caused_by(trc::location!())? + .ok_or_else(|| not_found(account_id.to_string()))?; // Obtain members and memberOf - let mut member_of = self.get_member_of(account_id).await?; - let mut members = self.get_members(account_id).await?; + let mut member_of = self + .get_member_of(account_id) + .await + .caused_by(trc::location!())?; + let mut members = self + .get_members(account_id) + .await + .caused_by(trc::location!())?; // Apply changes let mut batch = BatchBuilder::new(); @@ -375,13 +392,13 @@ impl ManageDirectory for Store { // Make sure new name is not taken let new_name = new_name.to_lowercase(); if principal.inner.name != new_name { - if self.get_account_id(&new_name).await?.is_some() { - return Err(DirectoryError::Management( - ManagementError::AlreadyExists { - field: PrincipalField::Name, - value: new_name, - }, - )); + if self + .get_account_id(&new_name) + .await + .caused_by(trc::location!())? + .is_some() + { + return Err(err_exists(PrincipalField::Name, new_name)); } batch.clear(ValueClass::Directory(DirectoryClass::NameToId( @@ -405,7 +422,7 @@ impl ManageDirectory for Store { continue; } } - return Err(DirectoryError::Unsupported); + return Err(trc::Cause::Unsupported.caused_by(trc::location!())); } ( PrincipalAction::Set, @@ -472,19 +489,16 @@ impl ManageDirectory for Store { .collect::>(); for email in &emails { if !principal.inner.emails.contains(email) { - if self.rcpt(email).await? { - return Err(DirectoryError::Management( - ManagementError::AlreadyExists { - field: PrincipalField::Emails, - value: email.to_string(), - }, - )); + if self.rcpt(email).await.caused_by(trc::location!())? { + return Err(err_exists(PrincipalField::Emails, email.to_string())); } if let Some(domain) = email.split('@').nth(1) { - if !self.is_local_domain(domain).await? { - return Err(DirectoryError::Management( - ManagementError::NotFound(domain.to_string()), - )); + if !self + .is_local_domain(domain) + .await + .caused_by(trc::location!())? + { + return Err(not_found(domain.to_string())); } } batch.set( @@ -513,19 +527,16 @@ impl ManageDirectory for Store { ) => { let email = email.to_lowercase(); if !principal.inner.emails.contains(&email) { - if self.rcpt(&email).await? { - return Err(DirectoryError::Management( - ManagementError::AlreadyExists { - field: PrincipalField::Emails, - value: email, - }, - )); + if self.rcpt(&email).await.caused_by(trc::location!())? { + return Err(err_exists(PrincipalField::Emails, email)); } if let Some(domain) = email.split('@').nth(1) { - if !self.is_local_domain(domain).await? { - return Err(DirectoryError::Management(ManagementError::NotFound( - domain.to_string(), - ))); + if !self + .is_local_domain(domain) + .await + .caused_by(trc::location!())? + { + return Err(not_found(domain.to_string())); } } batch.set( @@ -559,9 +570,11 @@ impl ManageDirectory for Store { ) => { let mut new_member_of = Vec::new(); for member in members { - let member_id = self.get_account_id(&member).await?.ok_or_else(|| { - DirectoryError::Management(ManagementError::NotFound(member)) - })?; + let member_id = self + .get_account_id(&member) + .await + .caused_by(trc::location!())? + .ok_or_else(|| not_found(member))?; if !member_of.contains(&member_id) { batch.set( ValueClass::Directory(DirectoryClass::MemberOf { @@ -602,9 +615,11 @@ impl ManageDirectory for Store { PrincipalField::MemberOf, PrincipalValue::String(member), ) => { - let member_id = self.get_account_id(&member).await?.ok_or_else(|| { - DirectoryError::Management(ManagementError::NotFound(member)) - })?; + let member_id = self + .get_account_id(&member) + .await + .caused_by(trc::location!())? + .ok_or_else(|| not_found(member))?; if !member_of.contains(&member_id) { batch.set( ValueClass::Directory(DirectoryClass::MemberOf { @@ -628,7 +643,11 @@ impl ManageDirectory for Store { PrincipalField::MemberOf, PrincipalValue::String(member), ) => { - if let Some(member_id) = self.get_account_id(&member).await? { + if let Some(member_id) = self + .get_account_id(&member) + .await + .caused_by(trc::location!())? + { if let Some(pos) = member_of.iter().position(|v| *v == member_id) { batch.clear(ValueClass::Directory(DirectoryClass::MemberOf { principal_id: MaybeDynamicId::Static(account_id), @@ -650,9 +669,11 @@ impl ManageDirectory for Store { ) => { let mut new_members = Vec::new(); for member in members_ { - let member_id = self.get_account_id(&member).await?.ok_or_else(|| { - DirectoryError::Management(ManagementError::NotFound(member)) - })?; + let member_id = self + .get_account_id(&member) + .await + .caused_by(trc::location!())? + .ok_or_else(|| not_found(member))?; if !members.contains(&member_id) { batch.set( ValueClass::Directory(DirectoryClass::MemberOf { @@ -693,9 +714,11 @@ impl ManageDirectory for Store { PrincipalField::Members, PrincipalValue::String(member), ) => { - let member_id = self.get_account_id(&member).await?.ok_or_else(|| { - DirectoryError::Management(ManagementError::NotFound(member)) - })?; + let member_id = self + .get_account_id(&member) + .await + .caused_by(trc::location!())? + .ok_or_else(|| not_found(member))?; if !members.contains(&member_id) { batch.set( ValueClass::Directory(DirectoryClass::MemberOf { @@ -719,7 +742,11 @@ impl ManageDirectory for Store { PrincipalField::Members, PrincipalValue::String(member), ) => { - if let Some(member_id) = self.get_account_id(&member).await? { + if let Some(member_id) = self + .get_account_id(&member) + .await + .caused_by(trc::location!())? + { if let Some(pos) = members.iter().position(|v| *v == member_id) { batch.clear(ValueClass::Directory(DirectoryClass::MemberOf { principal_id: MaybeDynamicId::Static(member_id), @@ -735,7 +762,7 @@ impl ManageDirectory for Store { } _ => { - return Err(DirectoryError::Unsupported); + return Err(trc::Cause::Unsupported.caused_by(trc::location!())); } } } @@ -749,45 +776,37 @@ impl ManageDirectory for Store { ); } - self.write(batch.build()).await?; + self.write(batch.build()) + .await + .caused_by(trc::location!())?; Ok(()) } - async fn create_domain(&self, domain: &str) -> crate::Result<()> { + async fn create_domain(&self, domain: &str) -> trc::Result<()> { if !domain.contains('.') { - return Err(DirectoryError::Management(ManagementError::MissingField( - PrincipalField::Name, - ))); + return Err(err_missing(PrincipalField::Name)); } let mut batch = BatchBuilder::new(); batch.set( ValueClass::Directory(DirectoryClass::Domain(domain.to_lowercase().into_bytes())), vec![], ); - self.write(batch.build()) - .await - .map_err(Into::into) - .map(|_| ()) + self.write(batch.build()).await.map(|_| ()) } - async fn delete_domain(&self, domain: &str) -> crate::Result<()> { + async fn delete_domain(&self, domain: &str) -> trc::Result<()> { if !domain.contains('.') { - return Err(DirectoryError::Management(ManagementError::MissingField( - PrincipalField::Name, - ))); + return Err(err_missing(PrincipalField::Name)); } let mut batch = BatchBuilder::new(); batch.clear(ValueClass::Directory(DirectoryClass::Domain( domain.to_lowercase().into_bytes(), ))); - self.write(batch.build()) - .await - .map_err(Into::into) - .map(|_| ()) + self.write(batch.build()).await.map(|_| ()) } - async fn map_group_ids(&self, principal: Principal) -> crate::Result> { + async fn map_group_ids(&self, principal: Principal) -> trc::Result> { let mut mapped = Principal { id: principal.id, typ: principal.typ, @@ -800,7 +819,11 @@ impl ManageDirectory for Store { }; for account_id in principal.member_of { - if let Some(name) = self.get_account_name(account_id).await? { + if let Some(name) = self + .get_account_name(account_id) + .await + .caused_by(trc::location!())? + { mapped.member_of.push(name); } } @@ -812,7 +835,7 @@ impl ManageDirectory for Store { &self, principal: Principal, create_if_missing: bool, - ) -> crate::Result> { + ) -> trc::Result> { Ok(Principal { id: principal.id, typ: principal.typ, @@ -822,7 +845,8 @@ impl ManageDirectory for Store { emails: principal.emails, member_of: self .map_group_names(principal.member_of, create_if_missing) - .await?, + .await + .caused_by(trc::location!())?, description: principal.description, }) } @@ -831,16 +855,19 @@ impl ManageDirectory for Store { &self, members: Vec, create_if_missing: bool, - ) -> crate::Result> { + ) -> trc::Result> { let mut member_ids = Vec::with_capacity(members.len()); for member in members { let account_id = if create_if_missing { - self.get_or_create_account_id(&member).await? + self.get_or_create_account_id(&member) + .await + .caused_by(trc::location!())? } else { self.get_account_id(&member) - .await? - .ok_or_else(|| DirectoryError::Management(ManagementError::NotFound(member)))? + .await + .caused_by(trc::location!())? + .ok_or_else(|| not_found(member))? }; member_ids.push(account_id); } @@ -852,7 +879,7 @@ impl ManageDirectory for Store { &self, filter: Option<&str>, typ: Option, - ) -> crate::Result> { + ) -> trc::Result> { let from_key = ValueKey::from(ValueClass::Directory(DirectoryClass::NameToId(vec![]))); let to_key = ValueKey::from(ValueClass::Directory(DirectoryClass::NameToId(vec![ u8::MAX; @@ -863,7 +890,7 @@ impl ManageDirectory for Store { self.iterate( IterateParams::new(from_key, to_key).ascending(), |key, value| { - let pt = PrincipalIdType::deserialize(value)?; + let pt = PrincipalIdType::deserialize(value).caused_by(trc::location!())?; if typ.map_or(true, |t| pt.typ == t) { results.push(( @@ -875,7 +902,8 @@ impl ManageDirectory for Store { Ok(true) }, ) - .await?; + .await + .caused_by(trc::location!())?; if let Some(filter) = filter { let mut filtered = Vec::new(); @@ -889,12 +917,9 @@ impl ManageDirectory for Store { .get_value::>(ValueKey::from(ValueClass::Directory( DirectoryClass::Principal(account_id), ))) - .await? - .ok_or_else(|| { - DirectoryError::Management(ManagementError::NotFound( - account_id.to_string(), - )) - })?; + .await + .caused_by(trc::location!())? + .ok_or_else(|| not_found(account_id.to_string()))?; if filters.iter().all(|f| { principal.name.to_lowercase().contains(f) || principal @@ -916,7 +941,7 @@ impl ManageDirectory for Store { } } - async fn list_domains(&self, filter: Option<&str>) -> crate::Result> { + async fn list_domains(&self, filter: Option<&str>) -> trc::Result> { let from_key = ValueKey::from(ValueClass::Directory(DirectoryClass::Domain(vec![]))); let to_key = ValueKey::from(ValueClass::Directory(DirectoryClass::Domain(vec![ u8::MAX; @@ -934,12 +959,13 @@ impl ManageDirectory for Store { Ok(true) }, ) - .await?; + .await + .caused_by(trc::location!())?; Ok(results) } - async fn get_member_of(&self, account_id: u32) -> crate::Result> { + async fn get_member_of(&self, account_id: u32) -> trc::Result> { let from_key = ValueKey::from(ValueClass::Directory(DirectoryClass::MemberOf { principal_id: account_id, member_of: 0, @@ -956,11 +982,12 @@ impl ManageDirectory for Store { Ok(true) }, ) - .await?; + .await + .caused_by(trc::location!())?; Ok(results) } - async fn get_members(&self, account_id: u32) -> crate::Result> { + async fn get_members(&self, account_id: u32) -> trc::Result> { let from_key = ValueKey::from(ValueClass::Directory(DirectoryClass::Members { principal_id: account_id, has_member: 0, @@ -977,15 +1004,16 @@ impl ManageDirectory for Store { Ok(true) }, ) - .await?; + .await + .caused_by(trc::location!())?; Ok(results) } } impl SerializeWithId for Principal { - fn serialize_with_id(&self, ids: &AssignedIds) -> store::Result> { + fn serialize_with_id(&self, ids: &AssignedIds) -> trc::Result> { let mut principal = self.clone(); - principal.id = ids.last_document_id()?; + principal.id = ids.last_document_id().caused_by(trc::location!())?; Ok(principal.serialize()) } } @@ -1000,7 +1028,7 @@ impl From> for MaybeDynamicValue { struct DynamicPrincipalIdType(Type); impl SerializeWithId for DynamicPrincipalIdType { - fn serialize_with_id(&self, ids: &AssignedIds) -> store::Result> { + fn serialize_with_id(&self, ids: &AssignedIds) -> trc::Result> { ids.last_document_id() .map(|account_id| PrincipalIdType::new(account_id, self.0).serialize()) } @@ -1026,3 +1054,23 @@ impl From> for Principal { } } } + +fn err_missing(field: impl Into) -> trc::Error { + trc::Cause::MissingParameter.ctx(trc::Key::Key, field) +} + +fn err_exists(field: impl Into, value: impl Into) -> trc::Error { + trc::Cause::AlreadyExists + .ctx(trc::Key::Key, field) + .ctx(trc::Key::Value, value) +} + +fn not_found(value: impl Into) -> trc::Error { + trc::Cause::NotFound.ctx(trc::Key::Key, value) +} + +impl From for trc::Value { + fn from(value: PrincipalField) -> Self { + trc::Value::Static(value.as_str()) + } +} diff --git a/crates/directory/src/backend/internal/mod.rs b/crates/directory/src/backend/internal/mod.rs index 656f7243..4dba13a5 100644 --- a/crates/directory/src/backend/internal/mod.rs +++ b/crates/directory/src/backend/internal/mod.rs @@ -56,9 +56,12 @@ impl Serialize for &Principal { } impl Deserialize for Principal { - fn deserialize(bytes: &[u8]) -> store::Result { - deserialize(bytes) - .ok_or_else(|| store::Error::InternalError("Failed to deserialize principal".into())) + fn deserialize(bytes: &[u8]) -> trc::Result { + deserialize(bytes).ok_or_else(|| { + trc::Cause::DataCorruption + .caused_by(trc::location!()) + .ctx(trc::Key::Value, bytes) + }) } } @@ -72,14 +75,18 @@ impl Serialize for PrincipalIdType { } impl Deserialize for PrincipalIdType { - fn deserialize(bytes: &[u8]) -> store::Result { - let mut bytes = bytes.iter(); + fn deserialize(bytes_: &[u8]) -> trc::Result { + let mut bytes = bytes_.iter(); Ok(PrincipalIdType { account_id: bytes.next_leb128().ok_or_else(|| { - store::Error::InternalError("Failed to deserialize principal account id".into()) + trc::Cause::DataCorruption + .caused_by(trc::location!()) + .ctx(trc::Key::Value, bytes_) })?, typ: Type::from_u8(*bytes.next().ok_or_else(|| { - store::Error::InternalError("Failed to deserialize principal id type".into()) + trc::Cause::DataCorruption + .caused_by(trc::location!()) + .ctx(trc::Key::Value, bytes_) })?), }) } @@ -189,15 +196,21 @@ impl PrincipalUpdate { impl Display for PrincipalField { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + self.as_str().fmt(f) + } +} + +impl PrincipalField { + pub fn as_str(&self) -> &'static str { match self { - PrincipalField::Name => write!(f, "name"), - PrincipalField::Type => write!(f, "type"), - PrincipalField::Quota => write!(f, "quota"), - PrincipalField::Description => write!(f, "description"), - PrincipalField::Secrets => write!(f, "secrets"), - PrincipalField::Emails => write!(f, "emails"), - PrincipalField::MemberOf => write!(f, "memberOf"), - PrincipalField::Members => write!(f, "members"), + PrincipalField::Name => "name", + PrincipalField::Type => "type", + PrincipalField::Quota => "quota", + PrincipalField::Description => "description", + PrincipalField::Secrets => "secrets", + PrincipalField::Emails => "emails", + PrincipalField::MemberOf => "memberOf", + PrincipalField::Members => "members", } } } diff --git a/crates/directory/src/backend/ldap/lookup.rs b/crates/directory/src/backend/ldap/lookup.rs index 339f6e75..d837cfda 100644 --- a/crates/directory/src/backend/ldap/lookup.rs +++ b/crates/directory/src/backend/ldap/lookup.rs @@ -4,10 +4,10 @@ * SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-SEL */ -use ldap3::{Ldap, LdapConnAsync, LdapError, Scope, SearchEntry}; +use ldap3::{Ldap, LdapConnAsync, Scope, SearchEntry}; use mail_send::Credentials; -use crate::{backend::internal::manage::ManageDirectory, DirectoryError, Principal, QueryBy, Type}; +use crate::{backend::internal::manage::ManageDirectory, IntoError, Principal, QueryBy, Type}; use super::{LdapDirectory, LdapMappings}; @@ -16,8 +16,8 @@ impl LdapDirectory { &self, by: QueryBy<'_>, return_member_of: bool, - ) -> crate::Result>> { - let mut conn = self.pool.get().await?; + ) -> trc::Result>> { + let mut conn = self.pool.get().await.map_err(|err| err.into_error())?; let mut account_id = None; let account_name; @@ -64,19 +64,26 @@ impl LdapDirectory { self.pool.manager().settings.clone(), &self.pool.manager().address, ) - .await?; + .await + .map_err(|err| err.into_error().caused_by(trc::location!()))?; ldap3::drive!(conn); - ldap.simple_bind(&auth_bind.build(username), secret).await?; + ldap.simple_bind(&auth_bind.build(username), secret) + .await + .map_err(|err| err.into_error().caused_by(trc::location!()))?; match self .find_principal(&mut ldap, &self.mappings.filter_name.build(username)) .await { Ok(Some(principal)) => principal, - Err(DirectoryError::Ldap(LdapError::LdapResult { result })) - if [49, 50].contains(&result.rc) => + Err(err) + if err.matches(trc::Cause::Ldap) + && err + .value(trc::Key::Code) + .and_then(|v| v.to_uint()) + .map_or(false, |rc| [49, 50].contains(&rc)) => { return Ok(None); } @@ -90,13 +97,6 @@ impl LdapDirectory { if principal.verify_secret(secret).await? { principal } else { - tracing::debug!( - context = "directory", - event = "invalid_password", - protocol = "ldap", - account = username, - "Invalid password for account" - ); return Ok(None); } } else { @@ -128,8 +128,10 @@ impl LdapDirectory { "objectClass=*", &self.mappings.attr_name, ) - .await? - .success()?; + .await + .map_err(|err| err.into_error().caused_by(trc::location!()))? + .success() + .map_err(|err| err.into_error().caused_by(trc::location!()))?; for entry in rs { 'outer: for (attr, value) in SearchEntry::construct(entry).attrs { if self.mappings.attr_name.contains(&attr) { @@ -156,20 +158,23 @@ impl LdapDirectory { } } - pub async fn email_to_ids(&self, address: &str) -> crate::Result> { + pub async fn email_to_ids(&self, address: &str) -> trc::Result> { let rs = self .pool .get() - .await? + .await + .map_err(|err| err.into_error().caused_by(trc::location!()))? .search( &self.mappings.base_dn, Scope::Subtree, &self.mappings.filter_email.build(address.as_ref()), &self.mappings.attr_name, ) - .await? + .await + .map_err(|err| err.into_error().caused_by(trc::location!()))? .success() - .map(|(rs, _res)| rs)?; + .map(|(rs, _res)| rs) + .map_err(|err| err.into_error().caused_by(trc::location!()))?; let mut ids = Vec::with_capacity(rs.len()); for entry in rs { @@ -187,38 +192,46 @@ impl LdapDirectory { Ok(ids) } - pub async fn rcpt(&self, address: &str) -> crate::Result { + pub async fn rcpt(&self, address: &str) -> trc::Result { self.pool .get() - .await? + .await + .map_err(|err| err.into_error().caused_by(trc::location!()))? .streaming_search( &self.mappings.base_dn, Scope::Subtree, &self.mappings.filter_email.build(address.as_ref()), &self.mappings.attr_email_address, ) - .await? + .await + .map_err(|err| err.into_error().caused_by(trc::location!()))? .next() .await .map(|entry| entry.is_some()) - .map_err(|e| e.into()) + .map_err(|err| err.into_error().caused_by(trc::location!())) } - pub async fn vrfy(&self, address: &str) -> crate::Result> { + pub async fn vrfy(&self, address: &str) -> trc::Result> { let mut stream = self .pool .get() - .await? + .await + .map_err(|err| err.into_error().caused_by(trc::location!()))? .streaming_search( &self.mappings.base_dn, Scope::Subtree, &self.mappings.filter_verify.build(address), &self.mappings.attr_email_address, ) - .await?; + .await + .map_err(|err| err.into_error().caused_by(trc::location!()))?; let mut emails = Vec::new(); - while let Some(entry) = stream.next().await? { + while let Some(entry) = stream + .next() + .await + .map_err(|err| err.into_error().caused_by(trc::location!()))? + { let entry = SearchEntry::construct(entry); for attr in &self.mappings.attr_email_address { if let Some(values) = entry.attrs.get(attr) { @@ -234,21 +247,27 @@ impl LdapDirectory { Ok(emails) } - pub async fn expn(&self, address: &str) -> crate::Result> { + pub async fn expn(&self, address: &str) -> trc::Result> { let mut stream = self .pool .get() - .await? + .await + .map_err(|err| err.into_error().caused_by(trc::location!()))? .streaming_search( &self.mappings.base_dn, Scope::Subtree, &self.mappings.filter_expand.build(address), &self.mappings.attr_email_address, ) - .await?; + .await + .map_err(|err| err.into_error().caused_by(trc::location!()))?; let mut emails = Vec::new(); - while let Some(entry) = stream.next().await? { + while let Some(entry) = stream + .next() + .await + .map_err(|err| err.into_error().caused_by(trc::location!()))? + { let entry = SearchEntry::construct(entry); for attr in &self.mappings.attr_email_address { if let Some(values) = entry.attrs.get(attr) { @@ -264,21 +283,23 @@ impl LdapDirectory { Ok(emails) } - pub async fn is_local_domain(&self, domain: &str) -> crate::Result { + pub async fn is_local_domain(&self, domain: &str) -> trc::Result { self.pool .get() - .await? + .await + .map_err(|err| err.into_error().caused_by(trc::location!()))? .streaming_search( &self.mappings.base_dn, Scope::Subtree, &self.mappings.filter_domains.build(domain), Vec::::new(), ) - .await? + .await + .map_err(|err| err.into_error().caused_by(trc::location!()))? .next() .await .map(|entry| entry.is_some()) - .map_err(|e| e.into()) + .map_err(|err| err.into_error().caused_by(trc::location!())) } } @@ -287,14 +308,15 @@ impl LdapDirectory { &self, conn: &mut Ldap, filter: &str, - ) -> crate::Result>> { + ) -> trc::Result>> { conn.search( &self.mappings.base_dn, Scope::Subtree, filter, &self.mappings.attrs_principal, ) - .await? + .await + .map_err(|err| err.into_error().caused_by(trc::location!()))? .success() .map(|(rs, _)| { rs.into_iter().next().map(|entry| { @@ -302,7 +324,7 @@ impl LdapDirectory { .entry_to_principal(SearchEntry::construct(entry)) }) }) - .map_err(Into::into) + .map_err(|err| err.into_error().caused_by(trc::location!())) } } @@ -310,12 +332,7 @@ impl LdapMappings { fn entry_to_principal(&self, entry: SearchEntry) -> Principal { let mut principal = Principal::default(); - tracing::debug!( - context = "ldap", - event = "fetch_principal", - entry = ?entry, - "LDAP entry" - ); + trc::trace!(LdapQuery, Value = format!("{entry:?}")); for (attr, value) in entry.attrs { if self.attr_name.contains(&attr) { diff --git a/crates/directory/src/backend/memory/lookup.rs b/crates/directory/src/backend/memory/lookup.rs index b4cc5450..979f70b1 100644 --- a/crates/directory/src/backend/memory/lookup.rs +++ b/crates/directory/src/backend/memory/lookup.rs @@ -11,7 +11,7 @@ use crate::{Principal, QueryBy}; use super::{EmailType, MemoryDirectory}; impl MemoryDirectory { - pub async fn query(&self, by: QueryBy<'_>) -> crate::Result>> { + pub async fn query(&self, by: QueryBy<'_>) -> trc::Result>> { match by { QueryBy::Name(name) => { for principal in &self.principals { @@ -48,7 +48,7 @@ impl MemoryDirectory { Ok(None) } - pub async fn email_to_ids(&self, address: &str) -> crate::Result> { + pub async fn email_to_ids(&self, address: &str) -> trc::Result> { Ok(self .emails_to_ids .get(address) @@ -65,11 +65,11 @@ impl MemoryDirectory { .unwrap_or_default()) } - pub async fn rcpt(&self, address: &str) -> crate::Result { + pub async fn rcpt(&self, address: &str) -> trc::Result { Ok(self.emails_to_ids.contains_key(address)) } - pub async fn vrfy(&self, address: &str) -> crate::Result> { + pub async fn vrfy(&self, address: &str) -> trc::Result> { let mut result = Vec::new(); for (key, value) in &self.emails_to_ids { if key.contains(address) && value.iter().any(|t| matches!(t, EmailType::Primary(_))) { @@ -79,7 +79,7 @@ impl MemoryDirectory { Ok(result) } - pub async fn expn(&self, address: &str) -> crate::Result> { + pub async fn expn(&self, address: &str) -> trc::Result> { let mut result = Vec::new(); for (key, value) in &self.emails_to_ids { if key == address { @@ -100,7 +100,7 @@ impl MemoryDirectory { Ok(result) } - pub async fn is_local_domain(&self, domain: &str) -> crate::Result { + pub async fn is_local_domain(&self, domain: &str) -> trc::Result { Ok(self.domains.contains(domain)) } } diff --git a/crates/directory/src/backend/smtp/lookup.rs b/crates/directory/src/backend/smtp/lookup.rs index e1a15663..298c4db2 100644 --- a/crates/directory/src/backend/smtp/lookup.rs +++ b/crates/directory/src/backend/smtp/lookup.rs @@ -7,36 +7,52 @@ use mail_send::{smtp::AssertReply, Credentials}; use smtp_proto::Severity; -use crate::{DirectoryError, Principal, QueryBy}; +use crate::{IntoError, Principal, QueryBy}; use super::{SmtpClient, SmtpDirectory}; impl SmtpDirectory { - pub async fn query(&self, query: QueryBy<'_>) -> crate::Result>> { + pub async fn query(&self, query: QueryBy<'_>) -> trc::Result>> { if let QueryBy::Credentials(credentials) = query { - self.pool.get().await?.authenticate(credentials).await + self.pool + .get() + .await + .map_err(|err| err.into_error().caused_by(trc::location!()))? + .authenticate(credentials) + .await } else { - Err(DirectoryError::unsupported("smtp", "query")) + Err(trc::Cause::Unsupported + .caused_by(trc::location!()) + .protocol(trc::Protocol::Smtp)) } } - pub async fn email_to_ids(&self, _address: &str) -> crate::Result> { - Err(DirectoryError::unsupported("smtp", "email_to_ids")) + pub async fn email_to_ids(&self, _address: &str) -> trc::Result> { + Err(trc::Cause::Unsupported + .caused_by(trc::location!()) + .protocol(trc::Protocol::Smtp)) } - pub async fn rcpt(&self, address: &str) -> crate::Result { - let mut conn = self.pool.get().await?; + pub async fn rcpt(&self, address: &str) -> trc::Result { + let mut conn = self + .pool + .get() + .await + .map_err(|err| err.into_error().caused_by(trc::location!()))?; if !conn.sent_mail_from { conn.client .cmd(b"MAIL FROM:<>\r\n") - .await? - .assert_positive_completion()?; + .await + .map_err(|err| err.into_error().caused_by(trc::location!()))? + .assert_positive_completion() + .map_err(|err| err.into_error().caused_by(trc::location!()))?; conn.sent_mail_from = true; } let reply = conn .client .cmd(format!("RCPT TO:<{address}>\r\n").as_bytes()) - .await?; + .await + .map_err(|err| err.into_error().caused_by(trc::location!()))?; match reply.severity() { Severity::PositiveCompletion => { conn.num_rcpts += 1; @@ -48,27 +64,32 @@ impl SmtpDirectory { Ok(true) } Severity::PermanentNegativeCompletion => Ok(false), - _ => Err(mail_send::Error::UnexpectedReply(reply).into()), + _ => Err(trc::Cause::Unexpected + .ctx(trc::Key::Protocol, trc::Protocol::Smtp) + .ctx(trc::Key::Code, reply.code()) + .ctx(trc::Key::Details, reply.message)), } } - pub async fn vrfy(&self, address: &str) -> crate::Result> { + pub async fn vrfy(&self, address: &str) -> trc::Result> { self.pool .get() - .await? + .await + .map_err(|err| err.into_error().caused_by(trc::location!()))? .expand(&format!("VRFY {address}\r\n")) .await } - pub async fn expn(&self, address: &str) -> crate::Result> { + pub async fn expn(&self, address: &str) -> trc::Result> { self.pool .get() - .await? + .await + .map_err(|err| err.into_error().caused_by(trc::location!()))? .expand(&format!("EXPN {address}\r\n")) .await } - pub async fn is_local_domain(&self, domain: &str) -> crate::Result { + pub async fn is_local_domain(&self, domain: &str) -> trc::Result { Ok(self.domains.contains(domain)) } } @@ -77,7 +98,7 @@ impl SmtpClient { async fn authenticate( &mut self, credentials: &Credentials, - ) -> crate::Result>> { + ) -> trc::Result>> { match self .client .authenticate(credentials, &self.capabilities) @@ -89,21 +110,30 @@ impl SmtpClient { self.num_auth_failures += 1; Ok(None) } - _ => Err(err.into()), + _ => Err(err.into_error()), }, } } - async fn expand(&mut self, command: &str) -> crate::Result> { - let reply = self.client.cmd(command.as_bytes()).await?; + async fn expand(&mut self, command: &str) -> trc::Result> { + let reply = self + .client + .cmd(command.as_bytes()) + .await + .map_err(|err| err.into_error().caused_by(trc::location!()))?; match reply.code() { 250 | 251 => Ok(reply .message() .split('\n') .map(|p| p.to_string()) .collect::>()), - 550 | 551 | 553 | 500 | 502 => Err(DirectoryError::Unsupported), - _ => Err(mail_send::Error::UnexpectedReply(reply).into()), + code @ (550 | 551 | 553 | 500 | 502) => Err(trc::Cause::Unsupported + .ctx(trc::Key::Protocol, trc::Protocol::Smtp) + .ctx(trc::Key::Code, code)), + code => Err(trc::Cause::Unexpected + .ctx(trc::Key::Protocol, trc::Protocol::Smtp) + .ctx(trc::Key::Code, code) + .ctx(trc::Key::Details, reply.message)), } } } diff --git a/crates/directory/src/backend/sql/lookup.rs b/crates/directory/src/backend/sql/lookup.rs index e49c5dab..f2e15061 100644 --- a/crates/directory/src/backend/sql/lookup.rs +++ b/crates/directory/src/backend/sql/lookup.rs @@ -6,6 +6,7 @@ use mail_send::Credentials; use store::{NamedRows, Rows, Value}; +use trc::AddContext; use crate::{backend::internal::manage::ManageDirectory, Principal, QueryBy, Type}; @@ -16,7 +17,7 @@ impl SqlDirectory { &self, by: QueryBy<'_>, return_member_of: bool, - ) -> crate::Result>> { + ) -> trc::Result>> { let mut account_id = None; let account_name; let mut secret = None; @@ -27,10 +28,16 @@ impl SqlDirectory { self.store .query::(&self.mappings.query_name, vec![username.into()]) - .await? + .await + .caused_by(trc::location!())? } QueryBy::Id(uid) => { - if let Some(username) = self.data_store.get_account_name(uid).await? { + if let Some(username) = self + .data_store + .get_account_name(uid) + .await + .caused_by(trc::location!())? + { account_name = username; } else { return Ok(None); @@ -42,7 +49,8 @@ impl SqlDirectory { &self.mappings.query_name, vec![account_name.clone().into()], ) - .await? + .await + .caused_by(trc::location!())? } QueryBy::Credentials(credentials) => { let (username, secret_) = match credentials { @@ -55,7 +63,8 @@ impl SqlDirectory { self.store .query::(&self.mappings.query_name, vec![username.into()]) - .await? + .await + .caused_by(trc::location!())? } }; @@ -64,18 +73,18 @@ impl SqlDirectory { } // Map row to principal - let mut principal = self.mappings.row_to_principal(result)?; + let mut principal = self + .mappings + .row_to_principal(result) + .caused_by(trc::location!())?; // Validate password if let Some(secret) = secret { - if !principal.verify_secret(secret).await? { - tracing::debug!( - context = "directory", - event = "invalid_password", - protocol = "sql", - account = account_name, - "Invalid password for account" - ); + if !principal + .verify_secret(secret) + .await + .caused_by(trc::location!())? + { return Ok(None); } } @@ -87,7 +96,8 @@ impl SqlDirectory { principal.id = self .data_store .get_or_create_account_id(&account_name) - .await?; + .await + .caused_by(trc::location!())?; } principal.name = account_name; @@ -99,13 +109,17 @@ impl SqlDirectory { &self.mappings.query_members, vec![principal.name.clone().into()], ) - .await? + .await + .caused_by(trc::location!())? .rows { if let Some(Value::Text(account_id)) = row.values.first() { - principal - .member_of - .push(self.data_store.get_or_create_account_id(account_id).await?); + principal.member_of.push( + self.data_store + .get_or_create_account_id(account_id) + .await + .caused_by(trc::location!())?, + ); } } } @@ -118,31 +132,38 @@ impl SqlDirectory { &self.mappings.query_emails, vec![principal.name.clone().into()], ) - .await? + .await + .caused_by(trc::location!())? .into(); } Ok(Some(principal)) } - pub async fn email_to_ids(&self, address: &str) -> crate::Result> { + pub async fn email_to_ids(&self, address: &str) -> trc::Result> { let names = self .store .query::(&self.mappings.query_recipients, vec![address.into()]) - .await?; + .await + .caused_by(trc::location!())?; let mut ids = Vec::with_capacity(names.rows.len()); for row in names.rows { if let Some(Value::Text(name)) = row.values.first() { - ids.push(self.data_store.get_or_create_account_id(name).await?); + ids.push( + self.data_store + .get_or_create_account_id(name) + .await + .caused_by(trc::location!())?, + ); } } Ok(ids) } - pub async fn rcpt(&self, address: &str) -> crate::Result { + pub async fn rcpt(&self, address: &str) -> trc::Result { self.store .query::( &self.mappings.query_recipients, @@ -152,7 +173,7 @@ impl SqlDirectory { .map_err(Into::into) } - pub async fn vrfy(&self, address: &str) -> crate::Result> { + pub async fn vrfy(&self, address: &str) -> trc::Result> { self.store .query::( &self.mappings.query_verify, @@ -163,7 +184,7 @@ impl SqlDirectory { .map_err(Into::into) } - pub async fn expn(&self, address: &str) -> crate::Result> { + pub async fn expn(&self, address: &str) -> trc::Result> { self.store .query::( &self.mappings.query_expand, @@ -174,7 +195,7 @@ impl SqlDirectory { .map_err(Into::into) } - pub async fn is_local_domain(&self, domain: &str) -> crate::Result { + pub async fn is_local_domain(&self, domain: &str) -> trc::Result { self.store .query::(&self.mappings.query_domains, vec![domain.into()]) .await @@ -183,7 +204,7 @@ impl SqlDirectory { } impl SqlMappings { - pub fn row_to_principal(&self, rows: NamedRows) -> crate::Result> { + pub fn row_to_principal(&self, rows: NamedRows) -> trc::Result> { let mut principal = Principal::default(); if let Some(row) = rows.rows.into_iter().next() { diff --git a/crates/directory/src/core/config.rs b/crates/directory/src/core/config.rs index a73698cd..fb0dcee1 100644 --- a/crates/directory/src/core/config.rs +++ b/crates/directory/src/core/config.rs @@ -41,7 +41,6 @@ impl Directories { .property_or_default::(("directory", id, "disable"), "false") .unwrap_or(false) { - tracing::debug!("Skipping disabled directory {id:?}."); continue; } } @@ -104,7 +103,7 @@ pub(crate) fn build_pool( config: &mut Config, prefix: &str, manager: M, -) -> utils::config::Result> { +) -> Result, String> { Pool::builder(manager) .runtime(Runtime::Tokio1) .max_size( diff --git a/crates/directory/src/core/dispatch.rs b/crates/directory/src/core/dispatch.rs index cb8d65ce..dfc6d799 100644 --- a/crates/directory/src/core/dispatch.rs +++ b/crates/directory/src/core/dispatch.rs @@ -4,6 +4,8 @@ * SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-SEL */ +use trc::AddContext; + use crate::{ backend::internal::lookup::DirectoryStore, Directory, DirectoryInner, Principal, QueryBy, }; @@ -13,7 +15,7 @@ impl Directory { &self, by: QueryBy<'_>, return_member_of: bool, - ) -> crate::Result>> { + ) -> trc::Result>> { match &self.store { DirectoryInner::Internal(store) => store.query(by, return_member_of).await, DirectoryInner::Ldap(store) => store.query(by, return_member_of).await, @@ -22,9 +24,10 @@ impl Directory { DirectoryInner::Smtp(store) => store.query(by).await, DirectoryInner::Memory(store) => store.query(by).await, } + .caused_by( trc::location!()) } - pub async fn email_to_ids(&self, email: &str) -> crate::Result> { + pub async fn email_to_ids(&self, email: &str) -> trc::Result> { match &self.store { DirectoryInner::Internal(store) => store.email_to_ids(email).await, DirectoryInner::Ldap(store) => store.email_to_ids(email).await, @@ -33,9 +36,10 @@ impl Directory { DirectoryInner::Smtp(store) => store.email_to_ids(email).await, DirectoryInner::Memory(store) => store.email_to_ids(email).await, } + .caused_by( trc::location!()) } - pub async fn is_local_domain(&self, domain: &str) -> crate::Result { + pub async fn is_local_domain(&self, domain: &str) -> trc::Result { // Check cache if let Some(cache) = &self.cache { if let Some(result) = cache.get_domain(domain) { @@ -50,7 +54,8 @@ impl Directory { DirectoryInner::Imap(store) => store.is_local_domain(domain).await, DirectoryInner::Smtp(store) => store.is_local_domain(domain).await, DirectoryInner::Memory(store) => store.is_local_domain(domain).await, - }?; + } + .caused_by( trc::location!())?; // Update cache if let Some(cache) = &self.cache { @@ -60,7 +65,7 @@ impl Directory { Ok(result) } - pub async fn rcpt(&self, email: &str) -> crate::Result { + pub async fn rcpt(&self, email: &str) -> trc::Result { // Check cache if let Some(cache) = &self.cache { if let Some(result) = cache.get_rcpt(email) { @@ -75,7 +80,8 @@ impl Directory { DirectoryInner::Imap(store) => store.rcpt(email).await, DirectoryInner::Smtp(store) => store.rcpt(email).await, DirectoryInner::Memory(store) => store.rcpt(email).await, - }?; + } + .caused_by( trc::location!())?; // Update cache if let Some(cache) = &self.cache { @@ -85,7 +91,7 @@ impl Directory { Ok(result) } - pub async fn vrfy(&self, address: &str) -> crate::Result> { + pub async fn vrfy(&self, address: &str) -> trc::Result> { match &self.store { DirectoryInner::Internal(store) => store.vrfy(address).await, DirectoryInner::Ldap(store) => store.vrfy(address).await, @@ -94,9 +100,10 @@ impl Directory { DirectoryInner::Smtp(store) => store.vrfy(address).await, DirectoryInner::Memory(store) => store.vrfy(address).await, } + .caused_by( trc::location!()) } - pub async fn expn(&self, address: &str) -> crate::Result> { + pub async fn expn(&self, address: &str) -> trc::Result> { match &self.store { DirectoryInner::Internal(store) => store.expn(address).await, DirectoryInner::Ldap(store) => store.expn(address).await, @@ -105,5 +112,6 @@ impl Directory { DirectoryInner::Smtp(store) => store.expn(address).await, DirectoryInner::Memory(store) => store.expn(address).await, } + .caused_by( trc::location!()) } } diff --git a/crates/directory/src/core/secret.rs b/crates/directory/src/core/secret.rs index dd12d76a..144c8d1f 100644 --- a/crates/directory/src/core/secret.rs +++ b/crates/directory/src/core/secret.rs @@ -19,11 +19,10 @@ use tokio::sync::oneshot; use totp_rs::TOTP; use crate::backend::internal::SpecialSecrets; -use crate::DirectoryError; use crate::Principal; impl Principal { - pub async fn verify_secret(&self, mut code: &str) -> crate::Result { + pub async fn verify_secret(&self, mut code: &str) -> trc::Result { let mut totp_token = None; let mut is_totp_token_missing = false; let mut is_totp_required = false; @@ -59,7 +58,7 @@ impl Principal { // Token needs to validate with at least one of the TOTP secrets is_totp_verified = TOTP::from_url(secret) - .map_err(DirectoryError::InvalidTotpUrl)? + .map_err(|err| trc::Cause::Invalid.reason(err).details(secret.to_string()))? .check_current(totp_token) .unwrap_or(false); } @@ -67,9 +66,9 @@ impl Principal { if let Some((_, app_secret)) = secret.strip_prefix("$app$").and_then(|s| s.split_once('$')) { - is_app_authenticated = verify_secret_hash(app_secret, code).await; + is_app_authenticated = verify_secret_hash(app_secret, code).await?; } else { - is_authenticated = verify_secret_hash(secret, code).await; + is_authenticated = verify_secret_hash(secret, code).await?; } } } @@ -83,7 +82,7 @@ impl Principal { // Only let the client know if the TOTP code is missing // if the password is correct - Err(DirectoryError::MissingTotpCode) + Err(trc::Cause::MissingParameter.into_err()) } else { // Return the TOTP verification status @@ -97,7 +96,7 @@ impl Principal { if is_totp_verified { // TOTP URL appeared after password hash in secrets list for secret in &self.secrets { - if secret.is_password() && verify_secret_hash(secret, code).await { + if secret.is_password() && verify_secret_hash(secret, code).await? { return Ok(true); } } @@ -108,7 +107,7 @@ impl Principal { } } -async fn verify_hash_prefix(hashed_secret: &str, secret: &str) -> bool { +async fn verify_hash_prefix(hashed_secret: &str, secret: &str) -> trc::Result { if hashed_secret.starts_with("$argon2") || hashed_secret.starts_with("$pbkdf2") || hashed_secret.starts_with("$scrypt") @@ -119,63 +118,49 @@ async fn verify_hash_prefix(hashed_secret: &str, secret: &str) -> bool { tokio::task::spawn_blocking(move || match PasswordHash::new(&hashed_secret) { Ok(hash) => { - tx.send( - hash.verify_password(&[&Argon2::default(), &Pbkdf2, &Scrypt], &secret) - .is_ok(), - ) - .ok(); + tx.send(Ok(hash + .verify_password(&[&Argon2::default(), &Pbkdf2, &Scrypt], &secret) + .is_ok())) + .ok(); } - Err(_) => { - tracing::warn!( - context = "directory", - event = "error", - hash = hashed_secret, - "Invalid password hash" - ); - tx.send(false).ok(); + Err(err) => { + tx.send(Err(trc::Cause::Invalid.reason(err).details(hashed_secret))) + .ok(); } }); match rx.await { Ok(result) => result, - Err(_) => { - tracing::warn!(context = "directory", event = "error", "Thread join error"); - false - } + Err(err) => Err(trc::Cause::Thread.reason(err)), } } else if hashed_secret.starts_with("$2") { // Blowfish crypt - bcrypt::verify(secret, hashed_secret) + Ok(bcrypt::verify(secret, hashed_secret)) } else if hashed_secret.starts_with("$6$") { // SHA-512 crypt - sha512_crypt::verify(secret, hashed_secret) + Ok(sha512_crypt::verify(secret, hashed_secret)) } else if hashed_secret.starts_with("$5$") { // SHA-256 crypt - sha256_crypt::verify(secret, hashed_secret) + Ok(sha256_crypt::verify(secret, hashed_secret)) } else if hashed_secret.starts_with("$sha1") { // SHA-1 crypt - sha1_crypt::verify(secret, hashed_secret) + Ok(sha1_crypt::verify(secret, hashed_secret)) } else if hashed_secret.starts_with("$1") { // MD5 based hash - md5_crypt::verify(secret, hashed_secret) + Ok(md5_crypt::verify(secret, hashed_secret)) } else { - // Unknown hash - tracing::warn!( - context = "directory", - event = "error", - hash = hashed_secret, - "Invalid password hash" - ); - false + Err(trc::Cause::Invalid + .into_err() + .details(hashed_secret.to_string())) } } -pub async fn verify_secret_hash(hashed_secret: &str, secret: &str) -> bool { +pub async fn verify_secret_hash(hashed_secret: &str, secret: &str) -> trc::Result { if hashed_secret.starts_with('$') { verify_hash_prefix(hashed_secret, secret).await } else if hashed_secret.starts_with('_') { // Enhanced DES-based hash - bsdi_crypt::verify(secret, hashed_secret) + Ok(bsdi_crypt::verify(secret, hashed_secret)) } else if let Some(hashed_secret) = hashed_secret.strip_prefix('{') { if let Some((algo, hashed_secret)) = hashed_secret.split_once('}') { match algo { @@ -186,9 +171,13 @@ pub async fn verify_secret_hash(hashed_secret: &str, secret: &str) -> bool { // SHA-1 let mut hasher = Sha1::new(); hasher.update(secret.as_bytes()); - String::from_utf8(base64_encode(&hasher.finalize()[..]).unwrap_or_default()) + Ok( + String::from_utf8( + base64_encode(&hasher.finalize()[..]).unwrap_or_default(), + ) .unwrap() - == hashed_secret + == hashed_secret, + ) } "SSHA" => { // Salted SHA-1 @@ -198,15 +187,19 @@ pub async fn verify_secret_hash(hashed_secret: &str, secret: &str) -> bool { let mut hasher = Sha1::new(); hasher.update(secret.as_bytes()); hasher.update(salt); - &hasher.finalize()[..] == hash + Ok(&hasher.finalize()[..] == hash) } "SHA256" => { // Verify hash let mut hasher = Sha256::new(); hasher.update(secret.as_bytes()); - String::from_utf8(base64_encode(&hasher.finalize()[..]).unwrap_or_default()) + Ok( + String::from_utf8( + base64_encode(&hasher.finalize()[..]).unwrap_or_default(), + ) .unwrap() - == hashed_secret + == hashed_secret, + ) } "SSHA256" => { // Salted SHA-256 @@ -216,15 +209,19 @@ pub async fn verify_secret_hash(hashed_secret: &str, secret: &str) -> bool { let mut hasher = Sha256::new(); hasher.update(secret.as_bytes()); hasher.update(salt); - &hasher.finalize()[..] == hash + Ok(&hasher.finalize()[..] == hash) } "SHA512" => { // SHA-512 let mut hasher = Sha512::new(); hasher.update(secret.as_bytes()); - String::from_utf8(base64_encode(&hasher.finalize()[..]).unwrap_or_default()) + Ok( + String::from_utf8( + base64_encode(&hasher.finalize()[..]).unwrap_or_default(), + ) .unwrap() - == hashed_secret + == hashed_secret, + ) } "SSHA512" => { // Salted SHA-512 @@ -234,43 +231,35 @@ pub async fn verify_secret_hash(hashed_secret: &str, secret: &str) -> bool { let mut hasher = Sha512::new(); hasher.update(secret.as_bytes()); hasher.update(salt); - &hasher.finalize()[..] == hash + Ok(&hasher.finalize()[..] == hash) } "MD5" => { // MD5 let digest = md5::compute(secret.as_bytes()); - String::from_utf8(base64_encode(&digest[..]).unwrap_or_default()).unwrap() - == hashed_secret + Ok( + String::from_utf8(base64_encode(&digest[..]).unwrap_or_default()).unwrap() + == hashed_secret, + ) } "CRYPT" | "crypt" => { if hashed_secret.starts_with('$') { verify_hash_prefix(hashed_secret, secret).await } else { // Unix crypt - unix_crypt::verify(secret, hashed_secret) + Ok(unix_crypt::verify(secret, hashed_secret)) } } - "PLAIN" | "plain" | "CLEAR" | "clear" => hashed_secret == secret, - _ => { - tracing::warn!( - context = "directory", - event = "error", - algorithm = algo, - "Unsupported password hash algorithm" - ); - false - } + "PLAIN" | "plain" | "CLEAR" | "clear" => Ok(hashed_secret == secret), + _ => Err(trc::Cause::Invalid + .ctx(trc::Key::Reason, "Unsupported algorithm") + .details(hashed_secret.to_string())), } } else { - tracing::warn!( - context = "directory", - event = "error", - hash = hashed_secret, - "Invalid password hash" - ); - false + Err(trc::Cause::Invalid + .into_err() + .details(hashed_secret.to_string())) } } else { - hashed_secret == secret + Ok(hashed_secret == secret) } } diff --git a/crates/directory/src/lib.rs b/crates/directory/src/lib.rs index a11f7978..9697cd63 100644 --- a/crates/directory/src/lib.rs +++ b/crates/directory/src/lib.rs @@ -5,15 +5,11 @@ */ use core::cache::CachedDirectory; -use std::{ - fmt::{Debug, Display}, - sync::Arc, -}; +use std::{fmt::Debug, sync::Arc}; use ahash::AHashMap; use backend::{ imap::{ImapDirectory, ImapError}, - internal::PrincipalField, ldap::LdapDirectory, memory::MemoryDirectory, smtp::SmtpDirectory, @@ -23,7 +19,6 @@ use deadpool::managed::PoolError; use ldap3::LdapError; use mail_send::Credentials; use store::Store; -use totp_rs::TotpUrlError; pub mod backend; pub mod core; @@ -72,30 +67,6 @@ pub enum Type { Other = 6, } -#[derive(Debug)] -pub enum DirectoryError { - Ldap(LdapError), - Store(store::Error), - Imap(ImapError), - Smtp(mail_send::Error), - Pool(String), - Management(ManagementError), - TimedOut, - Unsupported, - InvalidTotpUrl(TotpUrlError), - MissingTotpCode, -} - -#[derive(Debug, PartialEq, Eq)] -pub enum ManagementError { - MissingField(PrincipalField), - AlreadyExists { - field: PrincipalField, - value: String, - }, - NotFound(String), -} - pub enum DirectoryInner { Internal(Store), Ldap(LdapDirectory), @@ -158,38 +129,6 @@ pub struct Directories { pub directories: AHashMap>, } -pub type Result = std::result::Result; - -impl From> for DirectoryError { - fn from(error: PoolError) -> Self { - match error { - PoolError::Backend(error) => error.into(), - PoolError::Timeout(_) => DirectoryError::timeout("ldap"), - error => DirectoryError::Pool(error.to_string()), - } - } -} - -impl From> for DirectoryError { - fn from(error: PoolError) -> Self { - match error { - PoolError::Backend(error) => error.into(), - PoolError::Timeout(_) => DirectoryError::timeout("imap"), - error => DirectoryError::Pool(error.to_string()), - } - } -} - -impl From> for DirectoryError { - fn from(error: PoolError) -> Self { - match error { - PoolError::Backend(error) => error.into(), - PoolError::Timeout(_) => DirectoryError::timeout("smtp"), - error => DirectoryError::Pool(error.to_string()), - } - } -} - impl Principal { pub fn fallback_admin(fallback_pass: impl Into) -> Self { Principal { @@ -211,109 +150,70 @@ impl Principal { } } -impl From for DirectoryError { - fn from(error: LdapError) -> Self { - tracing::warn!( - context = "directory", - event = "error", - protocol = "ldap", - reason = %error, - "LDAP directory error" - ); - - DirectoryError::Ldap(error) - } +trait IntoError { + fn into_error(self) -> trc::Error; } -impl From for DirectoryError { - fn from(error: store::Error) -> Self { - tracing::warn!( - context = "directory", - event = "error", - protocol = "store", - reason = %error, - "Directory error" - ); - - DirectoryError::Store(error) - } -} - -impl From for DirectoryError { - fn from(error: ImapError) -> Self { - tracing::warn!( - context = "directory", - event = "error", - protocol = "imap", - reason = %error, - "IMAP directory error" - ); - - DirectoryError::Imap(error) - } -} - -impl From for DirectoryError { - fn from(error: mail_send::Error) -> Self { - tracing::warn!( - context = "directory", - event = "error", - protocol = "smtp", - reason = %error, - "SMTP directory error" - ); - - DirectoryError::Smtp(error) - } -} - -impl DirectoryError { - pub fn unsupported(protocol: &str, method: &str) -> Self { - tracing::warn!( - context = "directory", - event = "error", - protocol = protocol, - method = method, - "Method not supported by directory" - ); - DirectoryError::Unsupported - } - - pub fn timeout(protocol: &str) -> Self { - tracing::warn!( - context = "directory", - event = "error", - protocol = protocol, - "Directory timed out" - ); - DirectoryError::TimedOut - } -} - -impl PartialEq for DirectoryError { - fn eq(&self, other: &Self) -> bool { - match (self, other) { - (Self::Store(l0), Self::Store(r0)) => l0 == r0, - (Self::Pool(l0), Self::Pool(r0)) => l0 == r0, - (Self::Management(l0), Self::Management(r0)) => l0 == r0, - _ => false, - } - } -} - -impl Display for DirectoryError { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { +impl IntoError for PoolError { + fn into_error(self) -> trc::Error { match self { - Self::Ldap(error) => write!(f, "LDAP error: {}", error), - Self::Store(error) => write!(f, "Store error: {}", error), - Self::Imap(error) => write!(f, "IMAP error: {}", error), - Self::Smtp(error) => write!(f, "SMTP error: {}", error), - Self::Pool(error) => write!(f, "Pool error: {}", error), - Self::Management(error) => write!(f, "Management error: {:?}", error), - Self::TimedOut => write!(f, "Directory timed out"), - Self::Unsupported => write!(f, "Method not supported by directory"), - Self::InvalidTotpUrl(error) => write!(f, "Invalid TOTP URL: {}", error), - Self::MissingTotpCode => write!(f, "Missing TOTP code"), + PoolError::Backend(error) => error.into_error(), + PoolError::Timeout(_) => { + trc::Cause::Timeout.ctx(trc::Key::Protocol, trc::Protocol::Ldap) + } + err => trc::Cause::Pool + .ctx(trc::Key::Protocol, trc::Protocol::Ldap) + .reason(err), + } + } +} + +impl IntoError for PoolError { + fn into_error(self) -> trc::Error { + match self { + PoolError::Backend(error) => error.into_error(), + PoolError::Timeout(_) => { + trc::Cause::Timeout.ctx(trc::Key::Protocol, trc::Protocol::Imap) + } + err => trc::Cause::Pool + .ctx(trc::Key::Protocol, trc::Protocol::Imap) + .reason(err), + } + } +} + +impl IntoError for PoolError { + fn into_error(self) -> trc::Error { + match self { + PoolError::Backend(error) => error.into_error(), + PoolError::Timeout(_) => { + trc::Cause::Timeout.ctx(trc::Key::Protocol, trc::Protocol::Smtp) + } + err => trc::Cause::Pool + .ctx(trc::Key::Protocol, trc::Protocol::Smtp) + .reason(err), + } + } +} + +impl IntoError for ImapError { + fn into_error(self) -> trc::Error { + trc::Cause::Imap.reason(self) + } +} + +impl IntoError for mail_send::Error { + fn into_error(self) -> trc::Error { + trc::Cause::Smtp.reason(self) + } +} + +impl IntoError for LdapError { + fn into_error(self) -> trc::Error { + if let LdapError::LdapResult { result } = &self { + trc::Cause::Ldap.ctx(trc::Key::Code, result.rc).reason(self) + } else { + trc::Cause::Ldap.reason(self) } } } diff --git a/crates/imap/Cargo.toml b/crates/imap/Cargo.toml index fb351909..69d123f2 100644 --- a/crates/imap/Cargo.toml +++ b/crates/imap/Cargo.toml @@ -9,6 +9,7 @@ imap_proto = { path = "../imap-proto" } jmap = { path = "../jmap" } jmap_proto = { path = "../jmap-proto" } directory = { path = "../directory" } +trc = { path = "../trc" } store = { path = "../store" } common = { path = "../common" } nlp = { path = "../nlp" } diff --git a/crates/imap/src/op/copy_move.rs b/crates/imap/src/op/copy_move.rs index 6cffde99..cda7c333 100644 --- a/crates/imap/src/op/copy_move.rs +++ b/crates/imap/src/op/copy_move.rs @@ -422,7 +422,7 @@ impl SessionData { &self, account_id: u32, id: u32, - ) -> Result, u32)>, MethodError> { + ) -> trc::Result, u32)>> { // Obtain mailbox tags if let (Some(mailboxes), Some(thread_id)) = ( self.jmap diff --git a/crates/jmap-proto/Cargo.toml b/crates/jmap-proto/Cargo.toml index 95ab91f8..6b0383e0 100644 --- a/crates/jmap-proto/Cargo.toml +++ b/crates/jmap-proto/Cargo.toml @@ -7,6 +7,7 @@ resolver = "2" [dependencies] store = { path = "../store" } utils = { path = "../utils" } +trc = { path = "../trc" } mail-parser = { version = "0.9", features = ["full_encoding", "serde_support", "ludicrous_mode"] } fast-float = "0.2.0" serde = { version = "1.0", features = ["derive"]} diff --git a/crates/jmap-proto/src/error/method.rs b/crates/jmap-proto/src/error/method.rs index 14785b11..93879b30 100644 --- a/crates/jmap-proto/src/error/method.rs +++ b/crates/jmap-proto/src/error/method.rs @@ -31,6 +31,123 @@ pub enum MethodError { UnknownDataType, } +#[derive(Debug)] +pub struct MethodErrorWrapper(trc::Error); + +impl From for trc::Error { + fn from(value: MethodError) -> Self { + let (typ, description): (&'static str, trc::Value) = match value { + MethodError::InvalidArguments(description) => ("invalidArguments", description.into()), + MethodError::RequestTooLarge => ( + "requestTooLarge", + concat!( + "The number of ids requested by the client exceeds the maximum number ", + "the server is willing to process in a single method call." + ) + .into(), + ), + MethodError::StateMismatch => ( + "stateMismatch", + concat!( + "An \"ifInState\" argument was supplied, but ", + "it does not match the current state." + ) + .into(), + ), + MethodError::AnchorNotFound => ( + "anchorNotFound", + concat!( + "An anchor argument was supplied, but it ", + "cannot be found in the results of the query." + ) + .into(), + ), + MethodError::UnsupportedFilter(description) => { + ("unsupportedFilter", description.into()) + } + MethodError::UnsupportedSort(description) => ("unsupportedSort", description.into()), + MethodError::ServerFail(_) => ("serverFail", { + concat!( + "An unexpected error occurred while processing ", + "this call, please contact the system administrator." + ) + .into() + }), + MethodError::NotFound => ("serverPartialFail", { + concat!( + "One or more items are no longer available on the ", + "server, please try again." + ) + .into() + }), + MethodError::UnknownMethod(description) => ("unknownMethod", description.into()), + MethodError::ServerUnavailable => ( + "serverUnavailable", + concat!( + "This server is temporarily unavailable. ", + "Attempting this same operation later may succeed." + ) + .into(), + ), + MethodError::ServerPartialFail => ( + "serverPartialFail", + concat!( + "Some, but not all, expected changes described by the method ", + "occurred. Please resynchronize to determine server state." + ) + .into(), + ), + MethodError::InvalidResultReference(description) => { + ("invalidResultReference", description.into()) + } + MethodError::Forbidden(description) => ("forbidden", description.into()), + MethodError::AccountNotFound => ( + "accountNotFound", + "The accountId does not correspond to a valid account".into(), + ), + MethodError::AccountNotSupportedByMethod => ( + "accountNotSupportedByMethod", + concat!( + "The accountId given corresponds to a valid account, ", + "but the account does not support this method or data type." + ) + .into(), + ), + MethodError::AccountReadOnly => ( + "accountReadOnly", + "This method modifies state, but the account is read-only.".into(), + ), + MethodError::UnknownDataType => ( + "unknownDataType", + concat!( + "The server does not recognise this data type, ", + "or the capability to enable it is not present ", + "in the current Request Object." + ) + .into(), + ), + MethodError::CannotCalculateChanges => ( + "cannotCalculateChanges", + concat!( + "The server cannot calculate the changes ", + "between the old and new states." + ) + .into(), + ), + }; + + trc::Cause::Jmap + .ctx(trc::Key::Type, typ) + .ctx(trc::Key::Details, description) + } +} + +impl From for MethodErrorWrapper { + fn from(value: trc::Error) -> Self { + MethodErrorWrapper(value) + } +} + impl Display for MethodError { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { match self { @@ -60,103 +177,32 @@ impl Display for MethodError { } } -impl Serialize for MethodError { +impl Serialize for MethodErrorWrapper { fn serialize(&self, serializer: S) -> Result where S: serde::Serializer, { let mut map = serializer.serialize_map(2.into())?; - let (error_type, description) = match self { - MethodError::InvalidArguments(description) => { - ("invalidArguments", description.as_str()) - } - MethodError::RequestTooLarge => ( - "requestTooLarge", - concat!( - "The number of ids requested by the client exceeds the maximum number ", - "the server is willing to process in a single method call." - ), - ), - MethodError::StateMismatch => ( - "stateMismatch", - concat!( - "An \"ifInState\" argument was supplied, but ", - "it does not match the current state." - ), - ), - MethodError::AnchorNotFound => ( - "anchorNotFound", - concat!( - "An anchor argument was supplied, but it ", - "cannot be found in the results of the query." - ), - ), - MethodError::UnsupportedFilter(description) => { - ("unsupportedFilter", description.as_str()) - } - MethodError::UnsupportedSort(description) => ("unsupportedSort", description.as_str()), - MethodError::ServerFail(_) => ("serverFail", { - concat!( - "An unexpected error occurred while processing ", - "this call, please contact the system administrator." - ) - }), - MethodError::NotFound => ("serverPartialFail", { - concat!( - "One or more items are no longer available on the ", - "server, please try again." - ) - }), - MethodError::UnknownMethod(description) => ("unknownMethod", description.as_str()), - MethodError::ServerUnavailable => ( + let (error_type, description) = if self.0.matches(trc::Cause::Jmap) { + ( + self.0 + .value(trc::Key::Type) + .and_then(|v| v.as_str()) + .unwrap(), + self.0 + .value(trc::Key::Details) + .and_then(|v| v.as_str()) + .unwrap(), + ) + } else { + ( "serverUnavailable", concat!( "This server is temporarily unavailable. ", "Attempting this same operation later may succeed." ), - ), - MethodError::ServerPartialFail => ( - "serverPartialFail", - concat!( - "Some, but not all, expected changes described by the method ", - "occurred. Please resynchronize to determine server state." - ), - ), - MethodError::InvalidResultReference(description) => { - ("invalidResultReference", description.as_str()) - } - MethodError::Forbidden(description) => ("forbidden", description.as_str()), - MethodError::AccountNotFound => ( - "accountNotFound", - "The accountId does not correspond to a valid account", - ), - MethodError::AccountNotSupportedByMethod => ( - "accountNotSupportedByMethod", - concat!( - "The accountId given corresponds to a valid account, ", - "but the account does not support this method or data type." - ), - ), - MethodError::AccountReadOnly => ( - "accountReadOnly", - "This method modifies state, but the account is read-only.", - ), - MethodError::UnknownDataType => ( - "unknownDataType", - concat!( - "The server does not recognise this data type, ", - "or the capability to enable it is not present ", - "in the current Request Object." - ), - ), - MethodError::CannotCalculateChanges => ( - "cannotCalculateChanges", - concat!( - "The server cannot calculate the changes ", - "between the old and new states." - ), - ), + ) }; map.serialize_entry("type", error_type)?; diff --git a/crates/jmap-proto/src/method/get.rs b/crates/jmap-proto/src/method/get.rs index fa23f829..62e49b90 100644 --- a/crates/jmap-proto/src/method/get.rs +++ b/crates/jmap-proto/src/method/get.rs @@ -171,10 +171,7 @@ impl GetRequest { } } - pub fn unwrap_ids( - &mut self, - max_objects_in_get: usize, - ) -> Result>, MethodError> { + pub fn unwrap_ids(&mut self, max_objects_in_get: usize) -> trc::Result>> { if let Some(ids) = self.ids.take() { let ids = ids.unwrap(); if ids.len() <= max_objects_in_get { @@ -184,7 +181,7 @@ impl GetRequest { .collect::>(), )) } else { - Err(MethodError::RequestTooLarge) + Err(MethodError::RequestTooLarge.into()) } } else { Ok(None) @@ -194,7 +191,7 @@ impl GetRequest { pub fn unwrap_blob_ids( &mut self, max_objects_in_get: usize, - ) -> Result>, MethodError> { + ) -> trc::Result>> { if let Some(ids) = self.ids.take() { let ids = ids.unwrap(); if ids.len() <= max_objects_in_get { @@ -204,7 +201,7 @@ impl GetRequest { .collect::>(), )) } else { - Err(MethodError::RequestTooLarge) + Err(MethodError::RequestTooLarge.into()) } } else { Ok(None) diff --git a/crates/jmap-proto/src/method/set.rs b/crates/jmap-proto/src/method/set.rs index 39f14e83..28421cad 100644 --- a/crates/jmap-proto/src/method/set.rs +++ b/crates/jmap-proto/src/method/set.rs @@ -400,7 +400,7 @@ impl RequestPropertyParser for RequestArguments { } impl SetRequest { - pub fn validate(&self, max_objects_in_set: usize) -> Result<(), MethodError> { + pub fn validate(&self, max_objects_in_set: usize) -> trc::Result<()> { if self.create.as_ref().map_or(0, |objs| objs.len()) + self.update.as_ref().map_or(0, |objs| objs.len()) + self.destroy.as_ref().map_or(0, |objs| { @@ -412,7 +412,7 @@ impl SetRequest { }) > max_objects_in_set { - Err(MethodError::RequestTooLarge) + Err(MethodError::RequestTooLarge.into()) } else { Ok(()) } @@ -460,10 +460,7 @@ impl SetRequest { } impl SetResponse { - pub fn from_request( - request: &SetRequest, - max_objects: usize, - ) -> Result { + pub fn from_request(request: &SetRequest, max_objects: usize) -> trc::Result { let n_create = request.create.as_ref().map_or(0, |objs| objs.len()); let n_update = request.update.as_ref().map_or(0, |objs| objs.len()); let n_destroy = request.destroy.as_ref().map_or(0, |objs| { @@ -491,7 +488,7 @@ impl SetResponse { state_change: None, }) } else { - Err(MethodError::RequestTooLarge) + Err(MethodError::RequestTooLarge.into()) } } diff --git a/crates/jmap-proto/src/object/mod.rs b/crates/jmap-proto/src/object/mod.rs index 4f333366..13c70b67 100644 --- a/crates/jmap-proto/src/object/mod.rs +++ b/crates/jmap-proto/src/object/mod.rs @@ -122,9 +122,12 @@ impl Serialize for Value { } impl Deserialize for Value { - fn deserialize(bytes: &[u8]) -> store::Result { - Self::deserialize_from(&mut bytes.iter()) - .ok_or_else(|| store::Error::InternalError("Failed to deserialize value.".to_string())) + fn deserialize(bytes: &[u8]) -> trc::Result { + Self::deserialize_from(&mut bytes.iter()).ok_or_else(|| { + trc::Cause::DataCorruption + .caused_by(trc::location!()) + .ctx(trc::Key::Value, bytes) + }) } } @@ -143,9 +146,12 @@ impl Serialize for &Object { } impl Deserialize for Object { - fn deserialize(bytes: &[u8]) -> store::Result { - Object::deserialize_from(&mut bytes.iter()) - .ok_or_else(|| store::Error::InternalError("Failed to deserialize object.".to_string())) + fn deserialize(bytes: &[u8]) -> trc::Result { + Object::deserialize_from(&mut bytes.iter()).ok_or_else(|| { + trc::Cause::DataCorruption + .caused_by(trc::location!()) + .ctx(trc::Key::Value, bytes) + }) } } diff --git a/crates/jmap-proto/src/request/mod.rs b/crates/jmap-proto/src/request/mod.rs index 0afa8451..79468a9d 100644 --- a/crates/jmap-proto/src/request/mod.rs +++ b/crates/jmap-proto/src/request/mod.rs @@ -17,7 +17,6 @@ use std::{ }; use crate::{ - error::method::MethodError, method::{ changes::ChangesRequest, copy::{self, CopyBlobRequest, CopyRequest}, @@ -74,7 +73,7 @@ pub enum RequestMethod { LookupBlob(BlobLookupRequest), UploadBlob(BlobUploadRequest), Echo(Echo), - Error(MethodError), + Error(trc::Error), } impl JsonObjectParser for RequestProperty { diff --git a/crates/jmap-proto/src/request/parser.rs b/crates/jmap-proto/src/request/parser.rs index 3f62d523..74306337 100644 --- a/crates/jmap-proto/src/request/parser.rs +++ b/crates/jmap-proto/src/request/parser.rs @@ -178,7 +178,7 @@ impl Request { Ok(method) => method, Err(Error::Method(err)) => { parser.skip_token(start_depth_array, start_depth_dict)?; - RequestMethod::Error(err) + RequestMethod::Error(err.into()) } Err(err) => { return Err(err.into()); diff --git a/crates/jmap-proto/src/response/mod.rs b/crates/jmap-proto/src/response/mod.rs index 7d5b78b3..e6df3b30 100644 --- a/crates/jmap-proto/src/response/mod.rs +++ b/crates/jmap-proto/src/response/mod.rs @@ -10,7 +10,7 @@ pub mod serialize; use std::collections::HashMap; use crate::{ - error::method::MethodError, + error::method::MethodErrorWrapper, method::{ changes::ChangesResponse, copy::{CopyBlobResponse, CopyResponse}, @@ -48,7 +48,7 @@ pub enum ResponseMethod { LookupBlob(BlobLookupResponse), UploadBlob(BlobUploadResponse), Echo(Echo), - Error(MethodError), + Error(MethodErrorWrapper), } #[derive(Debug, serde::Serialize)] @@ -87,10 +87,10 @@ impl Response { }); } - pub fn push_error(&mut self, id: String, err: MethodError) { + pub fn push_error(&mut self, id: String, err: impl Into) { self.method_responses.push(Call { id, - method: ResponseMethod::Error(err), + method: ResponseMethod::Error(err.into()), name: MethodName::error(), }); } @@ -100,9 +100,9 @@ impl Response { } } -impl From for ResponseMethod { - fn from(error: MethodError) -> Self { - ResponseMethod::Error(error) +impl From for ResponseMethod { + fn from(error: trc::Error) -> Self { + ResponseMethod::Error(error.into()) } } @@ -190,8 +190,8 @@ impl From for ResponseMethod { } } -impl> From> for ResponseMethod { - fn from(result: Result) -> Self { +impl> From> for ResponseMethod { + fn from(result: trc::Result) -> Self { match result { Ok(value) => value.into(), Err(error) => error.into(), diff --git a/crates/jmap-proto/src/response/references.rs b/crates/jmap-proto/src/response/references.rs index 2c7aa2bd..1222ecb8 100644 --- a/crates/jmap-proto/src/response/references.rs +++ b/crates/jmap-proto/src/response/references.rs @@ -33,7 +33,7 @@ enum EvalResult { } impl Response { - pub fn resolve_references(&self, request: &mut RequestMethod) -> Result<(), MethodError> { + pub fn resolve_references(&self, request: &mut RequestMethod) -> trc::Result<()> { match request { RequestMethod::Get(request) => { // Resolve id references @@ -52,7 +52,8 @@ impl Response { } else { return Err(MethodError::InvalidResultReference(format!( "Id reference {reference:?} does not exist." - ))); + )) + .into()); } } } @@ -151,7 +152,8 @@ impl Response { Some(_) => { return Err(MethodError::InvalidResultReference(format!( "Id reference {parent_id:?} points to invalid type." - ))); + )) + .into()); } None => { graph @@ -246,13 +248,14 @@ impl Response { EvalResult::Failed } - fn eval_id_reference(&self, ir: &str) -> Result { + fn eval_id_reference(&self, ir: &str) -> trc::Result { if let Some(AnyId::Id(id)) = self.created_ids.get(ir) { Ok(*id) } else { - Err(MethodError::InvalidResultReference(format!( - "Id reference {ir:?} not found." - ))) + Err( + MethodError::InvalidResultReference(format!("Id reference {ir:?} not found.")) + .into(), + ) } } @@ -260,7 +263,7 @@ impl Response { &self, obj: &mut Object, mut graph: Option<(&str, &mut HashMap>)>, - ) -> Result<(), MethodError> { + ) -> trc::Result<()> { for set_value in obj.properties.values_mut() { match set_value { SetValue::IdReference(MaybeReference::Reference(parent_id)) => { @@ -274,7 +277,8 @@ impl Response { } else { return Err(MethodError::InvalidResultReference(format!( "Id reference {parent_id:?} not found." - ))); + )) + .into()); } } SetValue::IdReferences(id_refs) => { @@ -290,7 +294,8 @@ impl Response { } else { return Err(MethodError::InvalidResultReference(format!( "Id reference {parent_id:?} not found." - ))); + )) + .into()); } } } @@ -310,14 +315,15 @@ impl Response { fn topological_sort( create: &mut VecMap, graph: HashMap>, -) -> Result, MethodError> { +) -> trc::Result> { // Make sure all references exist for (from_id, to_ids) in graph.iter() { for to_id in to_ids { if !create.contains_key(to_id) { return Err(MethodError::InvalidResultReference(format!( "Invalid reference to non-existing object {to_id:?} from {from_id:?}" - ))); + )) + .into()); } } } @@ -334,7 +340,8 @@ fn topological_sort( if it_stack.len() > 1000 { return Err(MethodError::InvalidArguments( "Cyclical references are not allowed.".to_string(), - )); + ) + .into()); } it = to_ids.iter(); continue; @@ -437,7 +444,7 @@ impl EvalObjectReferences for CopyResponse { } impl EvalResult { - pub fn unwrap_ids(self, rr: &ResultReference) -> Result, MethodError> { + pub fn unwrap_ids(self, rr: &ResultReference) -> trc::Result> { if let EvalResult::Values(values) = self { let mut ids = Vec::with_capacity(values.len()); for value in values { @@ -450,7 +457,8 @@ impl EvalResult { _ => { return Err(MethodError::InvalidResultReference(format!( "Failed to evaluate {rr} result reference." - ))); + )) + .into()); } } } @@ -458,7 +466,8 @@ impl EvalResult { _ => { return Err(MethodError::InvalidResultReference(format!( "Failed to evaluate {rr} result reference." - ))) + )) + .into()) } } } @@ -466,14 +475,15 @@ impl EvalResult { } else { Err(MethodError::InvalidResultReference(format!( "Failed to evaluate {rr} result reference." - ))) + )) + .into()) } } pub fn unwrap_any_ids( self, rr: &ResultReference, - ) -> Result>, MethodError> { + ) -> trc::Result>> { if let EvalResult::Values(values) = self { let mut ids = Vec::with_capacity(values.len()); for value in values { @@ -490,7 +500,8 @@ impl EvalResult { _ => { return Err(MethodError::InvalidResultReference(format!( "Failed to evaluate {rr} result reference." - ))); + )) + .into()); } } } @@ -498,7 +509,8 @@ impl EvalResult { _ => { return Err(MethodError::InvalidResultReference(format!( "Failed to evaluate {rr} result reference." - ))) + )) + .into()) } } } @@ -506,17 +518,19 @@ impl EvalResult { } else { Err(MethodError::InvalidResultReference(format!( "Failed to evaluate {rr} result reference." - ))) + )) + .into()) } } - pub fn unwrap_properties(self, rr: &ResultReference) -> Result, MethodError> { + pub fn unwrap_properties(self, rr: &ResultReference) -> trc::Result> { if let EvalResult::Properties(properties) = self { Ok(properties) } else { Err(MethodError::InvalidResultReference(format!( "Failed to evaluate {rr} result reference." - ))) + )) + .into()) } } } @@ -526,7 +540,6 @@ mod tests { use std::collections::HashMap; use crate::{ - error::method::MethodError, request::{Request, RequestMethod}, response::Response, types::{ @@ -685,7 +698,10 @@ mod tests { ), Err(err) => { assert_eq!(test_num, 3); - assert!(matches!(err, MethodError::InvalidArguments(_))); + assert!(matches!( + err.value(trc::Key::Type).and_then(|v| v.as_str()).unwrap(), + "invalidArguments" + )); continue; } } diff --git a/crates/jmap/Cargo.toml b/crates/jmap/Cargo.toml index 24fe11d8..0b8644e8 100644 --- a/crates/jmap/Cargo.toml +++ b/crates/jmap/Cargo.toml @@ -12,6 +12,7 @@ smtp = { path = "../smtp" } utils = { path = "../utils" } common = { path = "../common" } directory = { path = "../directory" } +trc = { path = "../trc" } smtp-proto = { version = "0.1" } mail-parser = { version = "0.9", features = ["full_encoding", "serde_support", "ludicrous_mode"] } mail-builder = { version = "0.3", features = ["ludicrous_mode"] } diff --git a/crates/jmap/src/api/http.rs b/crates/jmap/src/api/http.rs index a9c1cf80..9c1df2b5 100644 --- a/crates/jmap/src/api/http.rs +++ b/crates/jmap/src/api/http.rs @@ -529,7 +529,7 @@ impl ToHttpResponse for JsonResponse { } } -impl ToHttpResponse for store::Error { +impl ToHttpResponse for trc::Error { fn into_http_response(self) -> HttpResponse { tracing::error!(context = "store", error = %self, "Database error"); diff --git a/crates/jmap/src/api/management/dkim.rs b/crates/jmap/src/api/management/dkim.rs index 6dd7e5ba..cda3af68 100644 --- a/crates/jmap/src/api/management/dkim.rs +++ b/crates/jmap/src/api/management/dkim.rs @@ -66,6 +66,7 @@ impl JMAP { return RequestError::not_found().into_http_response(); } }; + let todo = "bubble up error and log them"; let (pk, algo) = match ( self.core @@ -163,7 +164,7 @@ impl JMAP { id: impl AsRef, domain: impl Into, selector: impl Into, - ) -> store::Result<()> { + ) -> trc::Result<()> { let id = id.as_ref(); let (algorithm, pk_type) = match algo { Algorithm::Rsa => ("rsa-sha256", "RSA PRIVATE KEY"), @@ -176,7 +177,7 @@ impl JMAP { Algorithm::Rsa => DkimKeyPair::generate_rsa(2048), Algorithm::Ed25519 => DkimKeyPair::generate_ed25519(), } - .map_err(|err| store::Error::InternalError(err.to_string()))? + .map_err(|err| trc::Cause::Crypto.reason(err).caused_by(trc::location!()))? .private_key(), ) .unwrap_or_default() diff --git a/crates/jmap/src/api/management/domain.rs b/crates/jmap/src/api/management/domain.rs index ff021536..4ff4d247 100644 --- a/crates/jmap/src/api/management/domain.rs +++ b/crates/jmap/src/api/management/domain.rs @@ -123,7 +123,7 @@ impl JMAP { } } - async fn build_dns_records(&self, domain_name: &str) -> store::Result> { + async fn build_dns_records(&self, domain_name: &str) -> trc::Result> { // Obtain server name let server_name = self .core diff --git a/crates/jmap/src/api/management/enterprise.rs b/crates/jmap/src/api/management/enterprise.rs index bb6038e7..1931c70b 100644 --- a/crates/jmap/src/api/management/enterprise.rs +++ b/crates/jmap/src/api/management/enterprise.rs @@ -24,7 +24,7 @@ use crate::{ api::{http::ToHttpResponse, HttpRequest, HttpResponse, JsonResponse}, email::ingest::{IngestEmail, IngestSource}, mailbox::INBOX_ID, - IngestError, JMAP, + JMAP, }; #[derive(serde::Deserialize)] @@ -185,8 +185,10 @@ impl JMAP { batch.clear(ValueClass::Blob(BlobOp::Reserve { hash: request.hash, until: cancel_deletion as u64 })); } }, - Err(IngestError::Permanent { reason, .. }) => { - results.push(UndeleteResponse::Error { reason }); + Err(mut err) if err.matches(trc::Cause::Ingest) => { + results.push(UndeleteResponse::Error { reason: err.take_value(trc::Key::Reason) + .and_then(|v| v.into_string()) + .unwrap().into_owned() }); } Err(_) => { return RequestError::internal_server_error().into_http_response(); diff --git a/crates/jmap/src/api/management/principal.rs b/crates/jmap/src/api/management/principal.rs index 8168e42a..6da26b8e 100644 --- a/crates/jmap/src/api/management/principal.rs +++ b/crates/jmap/src/api/management/principal.rs @@ -11,7 +11,7 @@ use directory::{ lookup::DirectoryStore, manage::ManageDirectory, PrincipalAction, PrincipalField, PrincipalUpdate, PrincipalValue, SpecialSecrets, }, - DirectoryError, DirectoryInner, ManagementError, Principal, QueryBy, Type, + DirectoryInner, Principal, QueryBy, Type, }; use hyper::{header, Method, StatusCode}; @@ -116,7 +116,7 @@ impl JMAP { "data": account_id, })) .into_http_response(), - Err(err) => err.into_http_response(), + Err(err) => into_directory_response(err), } } Err(err) => err.into_http_response(), @@ -150,7 +150,7 @@ impl JMAP { })) .into_http_response() } - Err(err) => err.into_http_response(), + Err(err) => into_directory_response(err), } } (Some(name), method) => { @@ -167,7 +167,7 @@ impl JMAP { .into_http_response(); } Err(err) => { - return err.into_http_response(); + return into_directory_response(err); } }; @@ -227,7 +227,7 @@ impl JMAP { })) .into_http_response() } - Err(err) => err.into_http_response(), + Err(err) => into_directory_response(err), } } Method::DELETE => { @@ -253,7 +253,7 @@ impl JMAP { })) .into_http_response() } - Err(err) => err.into_http_response(), + Err(err) => into_directory_response(err), } } Method::PATCH => { @@ -296,7 +296,7 @@ impl JMAP { })) .into_http_response() } - Err(err) => err.into_http_response(), + Err(err) => into_directory_response(err), } } Err(err) => err.into_http_response(), @@ -339,7 +339,7 @@ impl JMAP { Ok(None) => { return RequestError::not_found().into_http_response(); } - Err(err) => return err.into_http_response(), + Err(err) => return into_directory_response(err), } } @@ -477,7 +477,7 @@ impl JMAP { })) .into_http_response() } - Err(err) => err.into_http_response(), + Err(err) => into_directory_response(err), } } @@ -515,40 +515,47 @@ impl From> for PrincipalResponse { } } -impl ToHttpResponse for DirectoryError { - fn into_http_response(self) -> HttpResponse { - match self { - DirectoryError::Management(err) => { - let response = match err { - ManagementError::MissingField(field) => ManagementApiError::FieldMissing { - field: field.to_string().into(), - }, - ManagementError::AlreadyExists { field, value } => { - ManagementApiError::FieldAlreadyExists { - field: field.to_string().into(), - value: value.into(), - } - } - ManagementError::NotFound(details) => ManagementApiError::NotFound { - item: details.into(), - }, - }; - JsonResponse::new(response).into_http_response() - } - DirectoryError::Unsupported => JsonResponse::new(ManagementApiError::Unsupported { +fn into_directory_response(mut error: trc::Error) -> HttpResponse { + let response = match error.as_ref() { + trc::Cause::MissingParameter => ManagementApiError::FieldMissing { + field: error + .take_value(trc::Key::Key) + .and_then(|v| v.into_string()) + .unwrap_or_default(), + }, + trc::Cause::AlreadyExists => ManagementApiError::FieldAlreadyExists { + field: error + .take_value(trc::Key::Key) + .and_then(|v| v.into_string()) + .unwrap_or_default(), + value: error + .take_value(trc::Key::Value) + .and_then(|v| v.into_string()) + .unwrap_or_default(), + }, + trc::Cause::NotFound => ManagementApiError::NotFound { + item: error + .take_value(trc::Key::Key) + .and_then(|v| v.into_string()) + .unwrap_or_default(), + }, + trc::Cause::Unsupported => { + return JsonResponse::new(ManagementApiError::Unsupported { details: "Requested action is unsupported".into(), }) - .into_http_response(), - err => { - tracing::warn!( - context = "directory", - event = "error", - reason = ?err, - "Directory error" - ); - - RequestError::internal_server_error().into_http_response() - } + .into_http_response(); } - } + _ => { + tracing::warn!( + context = "directory", + event = "error", + reason = ?error, + "Directory error" + ); + + return RequestError::internal_server_error().into_http_response(); + } + }; + + JsonResponse::new(response).into_http_response() } diff --git a/crates/jmap/src/api/request.rs b/crates/jmap/src/api/request.rs index 35c243b5..32a79704 100644 --- a/crates/jmap/src/api/request.rs +++ b/crates/jmap/src/api/request.rs @@ -113,7 +113,7 @@ impl JMAP { access_token: &AccessToken, next_call: &mut Option>, instance: &Arc, - ) -> Result { + ) -> trc::Result { Ok(match method { RequestMethod::Get(mut req) => match req.take_arguments() { get::RequestArguments::Email(arguments) => { @@ -162,7 +162,8 @@ impl JMAP { } else { return Err(MethodError::Forbidden( "Principal lookups are disabled".to_string(), - )); + ) + .into()); } } get::RequestArguments::Quota => { @@ -209,7 +210,8 @@ impl JMAP { } else { return Err(MethodError::Forbidden( "Principal lookups are disabled".to_string(), - )); + ) + .into()); } } query::RequestArguments::Quota => { diff --git a/crates/jmap/src/auth/acl.rs b/crates/jmap/src/auth/acl.rs index d462abd6..af0df6c0 100644 --- a/crates/jmap/src/auth/acl.rs +++ b/crates/jmap/src/auth/acl.rs @@ -6,7 +6,7 @@ use directory::QueryBy; use jmap_proto::{ - error::{method::MethodError, set::SetError}, + error::set::SetError, object::Object, types::{ acl::Acl, @@ -21,6 +21,7 @@ use store::{ write::{assert::HashedValue, ValueClass}, ValueKey, }; +use trc::AddContext; use utils::map::bitmap::{Bitmap, BitmapItem}; use crate::JMAP; @@ -96,7 +97,7 @@ impl JMAP { to_account_id: u32, to_collection: Collection, check_acls: impl Into>, - ) -> Result { + ) -> trc::Result { let check_acls = check_acls.into(); let mut document_ids = RoaringBitmap::new(); let to_collection = u8::from(to_collection); @@ -114,14 +115,7 @@ impl JMAP { to_collection, }) .await - .map_err(|err| { - tracing::error!( - event = "error", - context = "shared_documents", - error = ?err, - "Failed to iterate ACLs."); - MethodError::ServerPartialFail - })? + .caused_by(trc::location!())? { let mut acls = Bitmap::::from(acl_item.permissions); @@ -140,7 +134,7 @@ impl JMAP { access_token: &AccessToken, to_account_id: u32, check_acls: impl Into>, - ) -> Result { + ) -> trc::Result { let check_acls = check_acls.into(); let shared_mailboxes = self .shared_documents(access_token, to_account_id, Collection::Mailbox, check_acls) @@ -172,7 +166,7 @@ impl JMAP { account_id: u32, collection: Collection, check_acls: impl Into>, - ) -> Result { + ) -> trc::Result { let check_acls = check_acls.into(); let mut document_ids = self .get_document_ids(account_id, collection) @@ -191,7 +185,7 @@ impl JMAP { access_token: &AccessToken, account_id: u32, check_acls: impl Into>, - ) -> Result { + ) -> trc::Result { let check_acls = check_acls.into(); let mut document_ids = self .get_document_ids(account_id, Collection::Email) @@ -212,7 +206,7 @@ impl JMAP { to_collection: impl Into, to_document_id: u32, check_acls: impl Into>, - ) -> Result { + ) -> trc::Result { let to_collection = to_collection.into(); let check_acls = check_acls.into(); for &grant_account_id in [access_token.primary_id] @@ -241,12 +235,7 @@ impl JMAP { } Ok(None) => (), Err(err) => { - tracing::error!( - event = "error", - context = "has_access_to_document", - error = ?err, - "Failed to verify ACL."); - return Err(MethodError::ServerPartialFail); + return Err(err.caused_by(trc::location!())); } } } diff --git a/crates/jmap/src/auth/mod.rs b/crates/jmap/src/auth/mod.rs index 8c2f2c7a..0358a4b5 100644 --- a/crates/jmap/src/auth/mod.rs +++ b/crates/jmap/src/auth/mod.rs @@ -114,25 +114,26 @@ impl AccessToken { &self, to_account_id: Id, to_collection: Collection, - ) -> Result<&Self, MethodError> { + ) -> trc::Result<&Self> { if self.has_access(to_account_id.document_id(), to_collection) { Ok(self) } else { Err(MethodError::Forbidden(format!( "You do not have access to account {}", to_account_id - ))) + )) + .into()) } } - pub fn assert_is_member(&self, account_id: Id) -> Result<&Self, MethodError> { + pub fn assert_is_member(&self, account_id: Id) -> trc::Result<&Self> { if self.is_member(account_id.document_id()) { Ok(self) } else { - Err(MethodError::Forbidden(format!( - "You are not an owner of account {}", - account_id - ))) + Err( + MethodError::Forbidden(format!("You are not an owner of account {}", account_id)) + .into(), + ) } } } diff --git a/crates/jmap/src/blob/copy.rs b/crates/jmap/src/blob/copy.rs index 1f2173f9..160eb28f 100644 --- a/crates/jmap/src/blob/copy.rs +++ b/crates/jmap/src/blob/copy.rs @@ -5,10 +5,7 @@ */ use jmap_proto::{ - error::{ - method::MethodError, - set::{SetError, SetErrorType}, - }, + error::set::{SetError, SetErrorType}, method::copy::{CopyBlobRequest, CopyBlobResponse}, types::blob::BlobId, }; @@ -26,7 +23,7 @@ impl JMAP { &self, request: CopyBlobRequest, access_token: &AccessToken, - ) -> Result { + ) -> trc::Result { let mut response = CopyBlobResponse { from_account_id: request.from_account_id, account_id: request.account_id, diff --git a/crates/jmap/src/blob/download.rs b/crates/jmap/src/blob/download.rs index 3953cb17..ca2f8093 100644 --- a/crates/jmap/src/blob/download.rs +++ b/crates/jmap/src/blob/download.rs @@ -6,19 +6,17 @@ use std::ops::Range; -use jmap_proto::{ - error::method::MethodError, - types::{ - acl::Acl, - blob::{BlobId, BlobSection}, - collection::Collection, - }, +use jmap_proto::types::{ + acl::Acl, + blob::{BlobId, BlobSection}, + collection::Collection, }; use mail_parser::{ decoders::{base64::base64_decode, quoted_printable::quoted_printable_decode}, Encoding, }; use store::BlobClass; +use trc::AddContext; use utils::BlobHash; use crate::{auth::AccessToken, JMAP}; @@ -29,20 +27,14 @@ impl JMAP { &self, blob_id: &BlobId, access_token: &AccessToken, - ) -> Result>, MethodError> { + ) -> trc::Result>> { if !self .core .storage .data .blob_has_access(&blob_id.hash, &blob_id.class) .await - .map_err(|err| { - tracing::error!(event = "error", - context = "blob_download", - error = ?err, - "Failed to validate blob access"); - MethodError::ServerPartialFail - })? + .caused_by(trc::location!())? { return Ok(None); } @@ -95,7 +87,7 @@ impl JMAP { &self, hash: &BlobHash, section: &BlobSection, - ) -> Result>, MethodError> { + ) -> trc::Result>> { Ok(self .get_blob( hash, @@ -113,38 +105,27 @@ impl JMAP { &self, hash: &BlobHash, range: Range, - ) -> Result>, MethodError> { - match self.core.storage.blob.get_blob(hash.as_ref(), range).await { - Ok(blob) => Ok(blob), - Err(err) => { - tracing::error!(event = "error", - context = "blob_store", - blob_id = ?hash, - error = ?err, - "Failed to retrieve blob"); - Err(MethodError::ServerPartialFail) - } - } + ) -> trc::Result>> { + self.core + .storage + .blob + .get_blob(hash.as_ref(), range) + .await + .caused_by(trc::location!()) } pub async fn has_access_blob( &self, blob_id: &BlobId, access_token: &AccessToken, - ) -> Result { + ) -> trc::Result { Ok(self .core .storage .data .blob_has_access(&blob_id.hash, &blob_id.class) .await - .map_err(|err| { - tracing::error!(event = "error", - context = "has_access_blob", - error = ?err, - "Failed to validate blob access"); - MethodError::ServerPartialFail - })? + .caused_by(trc::location!())? && match &blob_id.class { BlobClass::Linked { account_id, diff --git a/crates/jmap/src/blob/get.rs b/crates/jmap/src/blob/get.rs index b7f2790c..1dc976e5 100644 --- a/crates/jmap/src/blob/get.rs +++ b/crates/jmap/src/blob/get.rs @@ -33,7 +33,7 @@ impl JMAP { &self, mut request: GetRequest, access_token: &AccessToken, - ) -> Result { + ) -> trc::Result { let ids = request .unwrap_blob_ids(self.core.jmap.get_max_objects)? .unwrap_or_default(); @@ -151,7 +151,7 @@ impl JMAP { pub async fn blob_lookup( &self, request: BlobLookupRequest, - ) -> Result { + ) -> trc::Result { let mut include_email = false; let mut include_mailbox = false; let mut include_thread = false; diff --git a/crates/jmap/src/blob/upload.rs b/crates/jmap/src/blob/upload.rs index 28ac8e4f..adcd46bd 100644 --- a/crates/jmap/src/blob/upload.rs +++ b/crates/jmap/src/blob/upload.rs @@ -18,6 +18,7 @@ use store::{ write::{now, BatchBuilder, BlobOp}, BlobClass, Serialize, }; +use trc::AddContext; use utils::BlobHash; use crate::{auth::AccessToken, JMAP}; @@ -33,7 +34,7 @@ impl JMAP { &self, request: BlobUploadRequest, access_token: &AccessToken, - ) -> Result { + ) -> trc::Result { let mut response = BlobUploadResponse { account_id: request.account_id, created: Default::default(), @@ -42,7 +43,7 @@ impl JMAP { let account_id = request.account_id.document_id(); if request.create.len() > self.core.jmap.set_max_objects { - return Err(MethodError::RequestTooLarge); + return Err(MethodError::RequestTooLarge.into()); } 'outer: for (create_id, upload_object) in request.create { @@ -142,14 +143,7 @@ impl JMAP { .data .blob_quota(account_id) .await - .map_err(|err| { - tracing::error!(event = "error", - context = "blob_store", - account_id = account_id, - error = ?err, - "Failed to obtain blob quota"); - MethodError::ServerPartialFail - })?; + .caused_by(trc::location!())?; if ((self.core.jmap.upload_tmp_quota_size > 0 && used.bytes + data.len() > self.core.jmap.upload_tmp_quota_size) @@ -253,7 +247,7 @@ impl JMAP { account_id: u32, data: &[u8], set_quota: bool, - ) -> Result { + ) -> trc::Result { // First reserve the hash let hash = BlobHash::from(data); let mut batch = BatchBuilder::new(); @@ -274,14 +268,7 @@ impl JMAP { .data .blob_exists(&hash) .await - .map_err(|err| { - tracing::error!( - event = "error", - context = "put_blob", - error = ?err, - "Failed to verify blob hash existence."); - MethodError::ServerPartialFail - })? + .caused_by(trc::location!())? { // Upload blob to store self.core @@ -289,14 +276,7 @@ impl JMAP { .blob .put_blob(hash.as_ref(), data) .await - .map_err(|err| { - tracing::error!( - event = "error", - context = "put_blob", - error = ?err, - "Failed to store blob."); - MethodError::ServerPartialFail - })?; + .caused_by(trc::location!())?; // Commit blob let mut batch = BatchBuilder::new(); diff --git a/crates/jmap/src/changes/get.rs b/crates/jmap/src/changes/get.rs index c7728cf2..d2a97928 100644 --- a/crates/jmap/src/changes/get.rs +++ b/crates/jmap/src/changes/get.rs @@ -10,6 +10,7 @@ use jmap_proto::{ types::{collection::Collection, property::Property, state::State}, }; use store::query::log::{Change, Changes, Query}; +use trc::AddContext; use crate::{auth::AccessToken, JMAP}; @@ -18,7 +19,7 @@ impl JMAP { &self, request: ChangesRequest, access_token: &AccessToken, - ) -> Result { + ) -> trc::Result { // Map collection and validate ACLs let collection = match request.arguments { RequestArguments::Email => { @@ -48,7 +49,7 @@ impl JMAP { RequestArguments::Quota => { access_token.assert_is_member(request.account_id)?; - return Err(MethodError::CannotCalculateChanges); + return Err(MethodError::CannotCalculateChanges.into()); } }; @@ -164,21 +165,12 @@ impl JMAP { account_id: u32, collection: Collection, query: Query, - ) -> Result { + ) -> trc::Result { self.core .storage .data .changes(account_id, collection, query) .await - .map_err(|err| { - tracing::error!( - event = "error", - context = "changes", - account_id = account_id, - collection = ?collection, - error = ?err, - "Failed to query changes."); - MethodError::ServerPartialFail - }) + .caused_by(trc::location!()) } } diff --git a/crates/jmap/src/changes/query.rs b/crates/jmap/src/changes/query.rs index 1cdac45d..60314afd 100644 --- a/crates/jmap/src/changes/query.rs +++ b/crates/jmap/src/changes/query.rs @@ -20,7 +20,7 @@ impl JMAP { &self, request: QueryChangesRequest, access_token: &AccessToken, - ) -> Result { + ) -> trc::Result { // Query changes let changes = self .changes( @@ -35,7 +35,11 @@ impl JMAP { changes::RequestArguments::EmailSubmission } query::RequestArguments::Quota => changes::RequestArguments::Quota, - _ => return Err(MethodError::UnknownMethod("Unknown method".to_string())), + _ => { + return Err( + MethodError::UnknownMethod("Unknown method".to_string()).into() + ) + } }, }, access_token, diff --git a/crates/jmap/src/changes/state.rs b/crates/jmap/src/changes/state.rs index a6ac7a63..313b4ab4 100644 --- a/crates/jmap/src/changes/state.rs +++ b/crates/jmap/src/changes/state.rs @@ -8,6 +8,7 @@ use jmap_proto::{ error::method::MethodError, types::{collection::Collection, state::State}, }; +use trc::AddContext; use crate::JMAP; @@ -16,26 +17,15 @@ impl JMAP { &self, account_id: u32, collection: impl Into, - ) -> Result { + ) -> trc::Result { let collection = collection.into(); - match self - .core + self.core .storage .data .get_last_change_id(account_id, collection) .await - { - Ok(id) => Ok(id.into()), - Err(err) => { - tracing::error!(event = "error", - context = "store", - account_id = account_id, - collection = ?Collection::from(collection), - error = ?err, - "Failed to obtain state"); - Err(MethodError::ServerPartialFail) - } - } + .caused_by(trc::location!()) + .map(State::from) } pub async fn assert_state( @@ -43,11 +33,11 @@ impl JMAP { account_id: u32, collection: Collection, if_in_state: &Option, - ) -> Result { + ) -> trc::Result { let old_state: State = self.get_state(account_id, collection).await?; if let Some(if_in_state) = if_in_state { if &old_state != if_in_state { - return Err(MethodError::StateMismatch); + return Err(MethodError::StateMismatch.into()); } } diff --git a/crates/jmap/src/changes/write.rs b/crates/jmap/src/changes/write.rs index db5e7822..36ebd9ef 100644 --- a/crates/jmap/src/changes/write.rs +++ b/crates/jmap/src/changes/write.rs @@ -6,33 +6,31 @@ use std::time::Duration; -use jmap_proto::{error::method::MethodError, types::collection::Collection}; +use jmap_proto::types::collection::Collection; use store::{ write::{log::ChangeLogBuilder, BatchBuilder}, LogKey, }; +use trc::AddContext; use crate::JMAP; impl JMAP { - pub async fn begin_changes(&self, account_id: u32) -> Result { + pub async fn begin_changes(&self, account_id: u32) -> trc::Result { self.assign_change_id(account_id) .await .map(ChangeLogBuilder::with_change_id) } - pub async fn assign_change_id(&self, _: u32) -> Result { + pub async fn assign_change_id(&self, _: u32) -> trc::Result { self.generate_snowflake_id() } - pub fn generate_snowflake_id(&self) -> Result { + pub fn generate_snowflake_id(&self) -> trc::Result { self.inner.snowflake_id.generate().ok_or_else(|| { - tracing::error!( - event = "error", - context = "change_log", - "Failed to generate snowflake id." - ); - MethodError::ServerPartialFail + trc::Cause::Unexpected + .caused_by(trc::location!()) + .ctx(trc::Key::Reason, "Failed to generate snowflake id.") }) } @@ -40,7 +38,7 @@ impl JMAP { &self, account_id: u32, mut changes: ChangeLogBuilder, - ) -> Result { + ) -> trc::Result { if changes.change_id == u64::MAX || changes.change_id == 0 { changes.change_id = self.assign_change_id(account_id).await?; } @@ -53,21 +51,15 @@ impl JMAP { .data .write(builder.build()) .await - .map_err(|err| { - tracing::error!( - event = "error", - context = "change_log", - error = ?err, - "Failed to write changes."); - MethodError::ServerPartialFail - })?; - - Ok(state) + .caused_by(trc::location!()) + .map(|_| state) } - pub async fn delete_changes(&self, account_id: u32, before: Duration) -> store::Result<()> { + pub async fn delete_changes(&self, account_id: u32, before: Duration) -> trc::Result<()> { let reference_cid = self.inner.snowflake_id.past_id(before).ok_or_else(|| { - store::Error::InternalError("Failed to generate reference change id.".to_string()) + trc::Cause::Unexpected + .caused_by(trc::location!()) + .ctx(trc::Key::Reason, "Failed to generate reference change id.") })?; for collection in [ diff --git a/crates/jmap/src/email/cache.rs b/crates/jmap/src/email/cache.rs index 8de9b27e..2d73c6a8 100644 --- a/crates/jmap/src/email/cache.rs +++ b/crates/jmap/src/email/cache.rs @@ -6,11 +6,8 @@ use std::{collections::HashMap, sync::Arc}; -use futures_util::TryFutureExt; -use jmap_proto::{ - error::method::MethodError, - types::{collection::Collection, property::Property}, -}; +use jmap_proto::types::{collection::Collection, property::Property}; +use trc::AddContext; use utils::lru_cache::LruCached; use crate::JMAP; @@ -26,22 +23,15 @@ impl JMAP { &self, account_id: u32, message_ids: impl Iterator, - ) -> Result, MethodError> { + ) -> trc::Result> { // Obtain current state let modseq = self .core .storage .data .get_last_change_id(account_id, Collection::Thread) - .map_err(|err| { - tracing::error!(event = "error", - context = "store", - account_id = account_id, - error = ?err, - "Failed to retrieve threads last change id"); - MethodError::ServerPartialFail - }) - .await?; + .await + .caused_by(trc::location!())?; // Lock the cache let thread_cache = if let Some(thread_cache) = diff --git a/crates/jmap/src/email/copy.rs b/crates/jmap/src/email/copy.rs index 3bdea146..a590320b 100644 --- a/crates/jmap/src/email/copy.rs +++ b/crates/jmap/src/email/copy.rs @@ -38,6 +38,7 @@ use store::{ }, BlobClass, Serialize, }; +use trc::AddContext; use utils::map::vec_map::VecMap; use crate::{auth::AccessToken, mailbox::UidMailbox, services::housekeeper::Event, JMAP}; @@ -54,14 +55,15 @@ impl JMAP { request: CopyRequest, access_token: &AccessToken, next_call: &mut Option>, - ) -> Result { + ) -> trc::Result { let account_id = request.account_id.document_id(); let from_account_id = request.from_account_id.document_id(); if account_id == from_account_id { return Err(MethodError::InvalidArguments( "From accountId is equal to fromAccountId".to_string(), - )); + ) + .into()); } let old_state = self .assert_state(account_id, Collection::Email, &request.if_in_state) @@ -277,7 +279,7 @@ impl JMAP { mailboxes: Vec, keywords: Vec, received_at: Option, - ) -> Result, MethodError> { + ) -> trc::Result> { // Obtain metadata let mut metadata = if let Some(metadata) = self .get_property::>( @@ -341,7 +343,7 @@ impl JMAP { let thread_id = if !references.is_empty() { self.find_or_merge_thread(account_id, subject, &references) .await - .map_err(|_| MethodError::ServerPartialFail)? + .caused_by(trc::location!())? } else { None }; @@ -360,14 +362,7 @@ impl JMAP { let uid = self .assign_imap_uid(account_id, *mailbox_id) .await - .map_err(|err| { - tracing::error!( - event = "error", - context = "email_copy", - error = ?err, - "Failed to assign IMAP UID."); - MethodError::ServerPartialFail - })?; + .caused_by(trc::location!())?; mailbox_ids.push(UidMailbox::new(*mailbox_id, uid)); email.imap_uids.push(uid); } @@ -416,23 +411,12 @@ impl JMAP { .data .write(batch.build()) .await - .map_err(|err| { - tracing::error!( - event = "error", - context = "email_copy", - error = ?err, - "Failed to write message to database."); - MethodError::ServerPartialFail - })?; + .caused_by(trc::location!())?; let thread_id = match thread_id { Some(thread_id) => thread_id, - None => ids - .first_document_id() - .map_err(|_| MethodError::ServerPartialFail)?, + None => ids.first_document_id().caused_by(trc::location!())?, }; - let document_id = ids - .last_document_id() - .map_err(|_| MethodError::ServerPartialFail)?; + let document_id = ids.last_document_id().caused_by(trc::location!())?; // Request FTS index let _ = self.inner.housekeeper_tx.send(Event::IndexStart).await; diff --git a/crates/jmap/src/email/crypto.rs b/crates/jmap/src/email/crypto.rs index 2c6f6aaa..0119ad80 100644 --- a/crates/jmap/src/email/crypto.rs +++ b/crates/jmap/src/email/crypto.rs @@ -605,24 +605,17 @@ impl Serialize for &EncryptionParams { } impl Deserialize for EncryptionParams { - fn deserialize(bytes: &[u8]) -> store::Result { - let version = *bytes.first().ok_or_else(|| { - store::Error::InternalError( - "Failed to read version while deserializing encryption params".to_string(), - ) - })?; + fn deserialize(bytes: &[u8]) -> trc::Result { + let version = *bytes + .first() + .ok_or_else(|| trc::Cause::DataCorruption.caused_by(trc::location!()))?; match version { - 1 if bytes.len() > 1 => bincode::deserialize(&bytes[1..]).map_err(|err| { - store::Error::InternalError(format!( - "Failed to deserialize encryption params: {}", - err - )) - }), + 1 if bytes.len() > 1 => bincode::deserialize(&bytes[1..]) + .map_err(|err| trc::Error::from(err).caused_by(trc::location!())), - _ => Err(store::Error::InternalError(format!( - "Unknown encryption params version: {}", - version - ))), + _ => Err(trc::Cause::Deserialize + .caused_by(trc::location!()) + .ctx(trc::Key::Value, version as u64)), } } } diff --git a/crates/jmap/src/email/delete.rs b/crates/jmap/src/email/delete.rs index 9f39ee74..7edebd9d 100644 --- a/crates/jmap/src/email/delete.rs +++ b/crates/jmap/src/email/delete.rs @@ -6,12 +6,9 @@ use std::time::Duration; -use jmap_proto::{ - error::method::MethodError, - types::{ - collection::Collection, id::Id, keyword::Keyword, property::Property, state::StateChange, - type_state::DataType, - }, +use jmap_proto::types::{ + collection::Collection, id::Id, keyword::Keyword, property::Property, state::StateChange, + type_state::DataType, }; use store::{ ahash::AHashMap, @@ -22,6 +19,7 @@ use store::{ }, BitmapKey, IterateParams, ValueKey, U32_LEN, }; +use trc::AddContext; use utils::codec::leb128::Leb128Reader; use crate::{ @@ -37,7 +35,7 @@ impl JMAP { &self, account_id: u32, mut document_ids: RoaringBitmap, - ) -> Result<(ChangeLogBuilder, RoaringBitmap), MethodError> { + ) -> trc::Result<(ChangeLogBuilder, RoaringBitmap)> { // Create batch let mut changes = ChangeLogBuilder::with_change_id(0); let mut delete_properties = AHashMap::new(); @@ -107,9 +105,7 @@ impl JMAP { let (thread_id, _) = key .get(U32_LEN + 2..) .and_then(|bytes| bytes.read_leb128::()) - .ok_or_else(|| { - store::Error::InternalError("Failed to read threadId.".to_string()) - })?; + .ok_or_else(|| trc::Error::corrupted_key(key, None, trc::location!()))?; if let Some(thread_count) = thread_ids.get_mut(&thread_id) { *thread_count -= 1; } @@ -118,15 +114,7 @@ impl JMAP { }, ) .await - .map_err(|err| { - tracing::error!( - event = "error", - context = "email_delete", - error = ?err, - "Failed to iterate threadIds." - ); - MethodError::ServerPartialFail - })?; + .caused_by(trc::location!())?; // Tombstone message and untag it from the mailboxes let mut batch = BatchBuilder::new(); @@ -189,14 +177,7 @@ impl JMAP { .data .write(batch.build()) .await - .map_err(|err| { - tracing::error!( - event = "error", - context = "email_delete", - error = ?err, - "Failed to commit batch."); - MethodError::ServerPartialFail - })?; + .caused_by(trc::location!())?; batch = BatchBuilder::new(); batch @@ -221,14 +202,7 @@ impl JMAP { .data .write(batch.build()) .await - .map_err(|err| { - tracing::error!( - event = "error", - context = "email_delete", - error = ?err, - "Failed to commit batch."); - MethodError::ServerPartialFail - })?; + .caused_by(trc::location!())?; } Ok((changes, document_ids)) @@ -339,11 +313,7 @@ impl JMAP { } } - pub async fn emails_auto_expunge( - &self, - account_id: u32, - period: Duration, - ) -> Result<(), MethodError> { + pub async fn emails_auto_expunge(&self, account_id: u32, period: Duration) -> trc::Result<()> { let deletion_candidates = self .get_tag( account_id, @@ -367,13 +337,9 @@ impl JMAP { return Ok(()); } let reference_cid = self.inner.snowflake_id.past_id(period).ok_or_else(|| { - tracing::error!( - event = "error", - context = "email_auto_expunge", - account_id = account_id, - "Failed to generate reference cid." - ); - MethodError::ServerPartialFail + trc::Cause::Unexpected + .caused_by(trc::location!()) + .ctx(trc::Key::Reason, "Failed to generate reference cid.") })?; // Find messages to destroy @@ -422,7 +388,7 @@ impl JMAP { Ok(()) } - pub async fn emails_purge_tombstoned(&self, account_id: u32) -> store::Result<()> { + pub async fn emails_purge_tombstoned(&self, account_id: u32) -> trc::Result<()> { // Obtain tombstoned messages let tombstoned_ids = self .core diff --git a/crates/jmap/src/email/get.rs b/crates/jmap/src/email/get.rs index f492ade2..13c48d49 100644 --- a/crates/jmap/src/email/get.rs +++ b/crates/jmap/src/email/get.rs @@ -21,6 +21,7 @@ use jmap_proto::{ }; use mail_parser::HeaderName; use store::{write::Bincode, BlobClass}; +use trc::AddContext; use crate::{auth::AccessToken, email::headers::HeaderToValue, mailbox::UidMailbox, JMAP}; @@ -35,7 +36,7 @@ impl JMAP { &self, mut request: GetRequest, access_token: &AccessToken, - ) -> Result { + ) -> trc::Result { let ids = request.unwrap_ids(self.core.jmap.get_max_objects)?; let properties = request.unwrap_properties(&[ Property::Id, @@ -95,14 +96,7 @@ impl JMAP { .collect::>(); self.get_cached_thread_ids(account_id, document_ids.iter().copied()) .await - .map_err(|err| { - tracing::error!(event = "error", - context = "store", - account_id = account_id, - error = ?err, - "Failed to retrieve thread Ids"); - MethodError::ServerPartialFail - })? + .caused_by(trc::location!())? .into_iter() .filter_map(|(document_id, thread_id)| { Id::from_parts(thread_id, document_id).into() @@ -408,7 +402,8 @@ impl JMAP { _ => { return Err(MethodError::InvalidArguments(format!( "Invalid property {property:?}" - ))); + )) + .into()); } } } diff --git a/crates/jmap/src/email/import.rs b/crates/jmap/src/email/import.rs index 767ae3aa..c154181c 100644 --- a/crates/jmap/src/email/import.rs +++ b/crates/jmap/src/email/import.rs @@ -5,10 +5,7 @@ */ use jmap_proto::{ - error::{ - method::MethodError, - set::{SetError, SetErrorType}, - }, + error::set::{SetError, SetErrorType}, method::import::{ImportEmailRequest, ImportEmailResponse}, types::{ acl::Acl, @@ -22,7 +19,7 @@ use jmap_proto::{ use mail_parser::MessageParser; use utils::map::vec_map::VecMap; -use crate::{auth::AccessToken, IngestError, JMAP}; +use crate::{auth::AccessToken, JMAP}; use super::ingest::{IngestEmail, IngestSource}; @@ -31,7 +28,7 @@ impl JMAP { &self, request: ImportEmailRequest, access_token: &AccessToken, - ) -> Result { + ) -> trc::Result { // Validate state let account_id = request.account_id.document_id(); let old_state: State = self @@ -131,22 +128,28 @@ impl JMAP { Ok(email) => { response.created.append(id, email.into()); } - Err(IngestError::Permanent { reason, .. }) => { - response.not_created.append( - id, - SetError::new(SetErrorType::InvalidEmail).with_description(reason), - ); - } - Err(IngestError::OverQuota) => { - response.not_created.append( - id, - SetError::new(SetErrorType::OverQuota) - .with_description("You have exceeded your disk quota."), - ); - } - Err(IngestError::Temporary) => { - return Err(MethodError::ServerPartialFail); - } + Err(mut err) => match err.as_ref() { + trc::Cause::OverQuota => { + response.not_created.append( + id, + SetError::new(SetErrorType::OverQuota) + .with_description("You have exceeded your disk quota."), + ); + } + trc::Cause::Ingest => { + response.not_created.append( + id, + SetError::new(SetErrorType::InvalidEmail).with_description( + err.take_value(trc::Key::Reason) + .and_then(|v| v.into_string()) + .unwrap(), + ), + ); + } + _ => { + return Err(err); + } + }, } } diff --git a/crates/jmap/src/email/ingest.rs b/crates/jmap/src/email/ingest.rs index 5c2b2c21..2e08ba5c 100644 --- a/crates/jmap/src/email/ingest.rs +++ b/crates/jmap/src/email/ingest.rs @@ -29,13 +29,14 @@ use store::{ }, BitmapKey, BlobClass, Serialize, }; +use trc::AddContext; use utils::map::vec_map::VecMap; use crate::{ email::index::{IndexMessage, VisitValues, MAX_ID_LENGTH}, mailbox::{UidMailbox, INBOX_ID, JUNK_ID}, services::housekeeper::Event, - IngestError, JMAP, + JMAP, }; use super::{ @@ -75,25 +76,23 @@ const MAX_RETRIES: u32 = 10; impl JMAP { #[allow(clippy::blocks_in_conditions)] - pub async fn email_ingest( - &self, - mut params: IngestEmail<'_>, - ) -> Result { + pub async fn email_ingest(&self, mut params: IngestEmail<'_>) -> trc::Result { // Check quota let mut raw_message_len = params.raw_message.len() as i64; if !self .has_available_quota(params.account_id, params.account_quota, raw_message_len) .await - .map_err(|_| IngestError::Temporary)? + .caused_by(trc::location!())? { - return Err(IngestError::OverQuota); + return Err(trc::Cause::OverQuota.into_err()); } // Parse message let mut raw_message = Cow::from(params.raw_message); - let mut message = params.message.ok_or_else(|| IngestError::Permanent { - code: [5, 5, 0], - reason: "Failed to parse e-mail message.".to_string(), + let mut message = params.message.ok_or_else(|| { + trc::Cause::Ingest + .ctx(trc::Key::Code, 550) + .ctx(trc::Key::Reason, "Failed to parse e-mail message.") })?; // Check for Spam headers @@ -162,25 +161,10 @@ impl JMAP { vec![Filter::eq(Property::MessageId, message_id)], ) .await - .map_err(|err| { - tracing::error!( - event = "error", - context = "find_duplicates", - error = ?err, - "Duplicate message search failed."); - IngestError::Temporary - })? + .caused_by(trc::location!())? .results .is_empty() { - tracing::debug!( - context = "email_ingest", - event = "skip", - account_id = ?params.account_id, - from = ?message.from(), - message_id = message_id, - "Duplicate message skipped."); - return Ok(IngestedEmail { id: Id::default(), change_id: u64::MAX, @@ -208,7 +192,7 @@ impl JMAP { Property::Parameters, ) .await - .map_err(|_| IngestError::Temporary)? + .caused_by(trc::location!())? { match message.encrypt(&encrypt_params).await { Ok(new_raw_message) => { @@ -216,9 +200,11 @@ impl JMAP { raw_message_len = raw_message.len() as i64; message = MessageParser::default() .parse(raw_message.as_ref()) - .ok_or_else(|| IngestError::Permanent { - code: [5, 5, 0], - reason: "Failed to parse encrypted e-mail message.".to_string(), + .ok_or_else(|| { + trc::Cause::Ingest.ctx(trc::Key::Code, 550).ctx( + trc::Key::Reason, + "Failed to parse encrypted e-mail message.", + ) })?; // Remove contents from parsed message @@ -238,12 +224,7 @@ impl JMAP { } } Err(EncryptMessageError::Error(err)) => { - tracing::error!( - event = "error", - context = "email_ingest", - error = ?err, - "Failed to encrypt message."); - return Err(IngestError::Temporary); + trc::bail!(trc::Cause::Crypto.caused_by(trc::location!()).reason(err)); } _ => unreachable!(), } @@ -254,27 +235,13 @@ impl JMAP { let change_id = self .assign_change_id(params.account_id) .await - .map_err(|_| { - tracing::error!( - event = "error", - context = "email_ingest", - "Failed to assign changeId." - ); - IngestError::Temporary - })?; + .caused_by(trc::location!())?; // Store blob let blob_id = self .put_blob(params.account_id, raw_message.as_ref(), false) .await - .map_err(|err| { - tracing::error!( - event = "error", - context = "email_ingest", - error = ?err, - "Failed to write blob."); - IngestError::Temporary - })?; + .caused_by(trc::location!())?; // Assign IMAP UIDs let mut mailbox_ids = Vec::with_capacity(params.mailbox_ids.len()); @@ -283,14 +250,7 @@ impl JMAP { let uid = self .assign_imap_uid(params.account_id, *mailbox_id) .await - .map_err(|err| { - tracing::error!( - event = "error", - context = "email_ingest", - error = ?err, - "Failed to assign IMAP UID."); - IngestError::Temporary - })?; + .caused_by(trc::location!())?; mailbox_ids.push(UidMailbox::new(*mailbox_id, uid)); imap_uids.push(uid); } @@ -329,9 +289,7 @@ impl JMAP { .tag(Property::ThreadId, TagValue::Id(maybe_thread_id), 0) .set( ValueClass::FtsQueue(FtsQueueClass { - seq: self - .generate_snowflake_id() - .map_err(|_| IngestError::Temporary)?, + seq: self.generate_snowflake_id().caused_by(trc::location!())?, hash: blob_id.hash.clone(), }), 0u64.serialize(), @@ -344,21 +302,12 @@ impl JMAP { .data .write(batch.build()) .await - .map_err(|err| { - tracing::error!( - event = "error", - context = "email_ingest", - error = ?err, - "Failed to write message to database."); - IngestError::Temporary - })?; + .caused_by(trc::location!())?; let thread_id = match thread_id { Some(thread_id) => thread_id, - None => ids - .first_document_id() - .map_err(|_| IngestError::Temporary)?, + None => ids.first_document_id().caused_by(trc::location!())?, }; - let document_id = ids.last_document_id().map_err(|_| IngestError::Temporary)?; + let document_id = ids.last_document_id().caused_by(trc::location!())?; let id = Id::from_parts(thread_id, document_id); // Request FTS index @@ -422,7 +371,7 @@ impl JMAP { account_id: u32, thread_name: &str, references: &[&str], - ) -> Result, IngestError> { + ) -> trc::Result> { let mut try_count = 0; loop { @@ -447,14 +396,7 @@ impl JMAP { .data .filter(account_id, Collection::Email, filters) .await - .map_err(|err| { - tracing::error!( - event = "error", - context = "find_or_merge_thread", - error = ?err, - "Thread search failed."); - IngestError::Temporary - })? + .caused_by(trc::location!())? .results; if results.is_empty() { @@ -465,14 +407,7 @@ impl JMAP { let thread_ids = self .get_cached_thread_ids(account_id, results.iter()) .await - .map_err(|err| { - tracing::error!( - event = "error", - context = "find_or_merge_thread", - error = ?err, - "Failed to obtain threadIds."); - IngestError::Temporary - })?; + .caused_by(trc::location!())?; if thread_ids.len() == 1 { return Ok(thread_ids @@ -502,14 +437,10 @@ impl JMAP { // Delete all but the most common threadId let mut batch = BatchBuilder::new(); - let change_id = self.assign_change_id(account_id).await.map_err(|_| { - tracing::error!( - event = "error", - context = "find_or_merge_thread", - "Failed to assign changeId for thread merge." - ); - IngestError::Temporary - })?; + let change_id = self + .assign_change_id(account_id) + .await + .caused_by(trc::location!())?; let mut changes = ChangeLogBuilder::with_change_id(change_id); batch .with_account_id(account_id) @@ -543,14 +474,7 @@ impl JMAP { document_id: 0, }) .await - .map_err(|err| { - tracing::error!( - event = "error", - context = "find_or_merge_thread", - error = ?err, - "Failed to obtain threadId bitmap."); - IngestError::Temporary - })? + .caused_by(trc::location!())? .unwrap_or_default() { batch @@ -570,24 +494,19 @@ impl JMAP { match self.core.storage.data.write(batch.build()).await { Ok(_) => return Ok(Some(thread_id)), - Err(store::Error::AssertValueFailed) if try_count < MAX_RETRIES => { + Err(err) if err.matches(trc::Cause::AssertValue) && try_count < MAX_RETRIES => { let backoff = rand::thread_rng().gen_range(50..=300); tokio::time::sleep(Duration::from_millis(backoff)).await; try_count += 1; } Err(err) => { - tracing::error!( - event = "error", - context = "find_or_merge_thread", - error = ?err, - "Failed to write thread merge batch."); - return Err(IngestError::Temporary); + return Err(err.caused_by(trc::location!())); } } } } - pub async fn assign_imap_uid(&self, account_id: u32, mailbox_id: u32) -> store::Result { + pub async fn assign_imap_uid(&self, account_id: u32, mailbox_id: u32) -> trc::Result { // Increment UID next let mut batch = BatchBuilder::new(); batch @@ -613,7 +532,7 @@ impl LogEmailInsert { } impl SerializeWithId for LogEmailInsert { - fn serialize_with_id(&self, ids: &AssignedIds) -> store::Result> { + fn serialize_with_id(&self, ids: &AssignedIds) -> trc::Result> { let thread_id = match self.0 { Some(thread_id) => thread_id, None => ids.first_document_id()?, diff --git a/crates/jmap/src/email/parse.rs b/crates/jmap/src/email/parse.rs index ad55ec57..63319c8c 100644 --- a/crates/jmap/src/email/parse.rs +++ b/crates/jmap/src/email/parse.rs @@ -28,9 +28,9 @@ impl JMAP { &self, request: ParseEmailRequest, access_token: &AccessToken, - ) -> Result { + ) -> trc::Result { if request.blob_ids.len() > self.core.jmap.mail_parse_max_items { - return Err(MethodError::RequestTooLarge); + return Err(MethodError::RequestTooLarge.into()); } let properties = request.properties.unwrap_or_else(|| { vec![ @@ -237,7 +237,8 @@ impl JMAP { _ => { return Err(MethodError::InvalidArguments(format!( "Invalid property {property:?}" - ))); + )) + .into()); } } } diff --git a/crates/jmap/src/email/query.rs b/crates/jmap/src/email/query.rs index efacc8a1..2b93b28b 100644 --- a/crates/jmap/src/email/query.rs +++ b/crates/jmap/src/email/query.rs @@ -27,7 +27,7 @@ impl JMAP { &self, mut request: QueryRequest, access_token: &AccessToken, - ) -> Result { + ) -> trc::Result { let account_id = request.account_id.document_id(); let mut filters = Vec::with_capacity(request.filter.len()); @@ -118,7 +118,8 @@ impl JMAP { Some(HeaderName::Other(header_name)) => { return Err(MethodError::InvalidArguments(format!( "Querying header '{header_name}' is not supported.", - ))); + )) + .into()); } Some(header_name) => { if let Some(header_value) = header.next() { @@ -153,7 +154,9 @@ impl JMAP { Filter::And | Filter::Or | Filter::Not | Filter::Close => { fts_filters.push(cond.into()); } - other => return Err(MethodError::UnsupportedFilter(other.to_string())), + other => { + return Err(MethodError::UnsupportedFilter(other.to_string()).into()) + } } } filters.push(query::Filter::is_in_set( @@ -248,7 +251,9 @@ impl JMAP { filters.push(cond.into()); } - other => return Err(MethodError::UnsupportedFilter(other.to_string())), + other => { + return Err(MethodError::UnsupportedFilter(other.to_string()).into()) + } } } } @@ -324,7 +329,7 @@ impl JMAP { query::Comparator::field(Property::Cc, comparator.is_ascending) } - other => return Err(MethodError::UnsupportedSort(other.to_string())), + other => return Err(MethodError::UnsupportedSort(other.to_string()).into()), }); } @@ -353,7 +358,7 @@ impl JMAP { account_id: u32, keyword: Keyword, match_all: bool, - ) -> Result { + ) -> trc::Result { let keyword_doc_ids = self .get_tag(account_id, Collection::Email, Property::Keywords, keyword) .await? diff --git a/crates/jmap/src/email/set.rs b/crates/jmap/src/email/set.rs index 26e0f4ea..9990ec41 100644 --- a/crates/jmap/src/email/set.rs +++ b/crates/jmap/src/email/set.rs @@ -7,10 +7,7 @@ use std::{borrow::Cow, collections::HashMap, slice::IterMut}; use jmap_proto::{ - error::{ - method::MethodError, - set::{SetError, SetErrorType}, - }, + error::set::{SetError, SetErrorType}, method::set::{RequestArguments, SetRequest, SetResponse}, response::references::EvalObjectReferences, types::{ @@ -41,8 +38,9 @@ use store::{ }, Serialize, }; +use trc::AddContext; -use crate::{auth::AccessToken, mailbox::UidMailbox, IngestError, JMAP}; +use crate::{auth::AccessToken, mailbox::UidMailbox, JMAP}; use super::{ headers::{BuildHeader, ValueToHeader}, @@ -54,7 +52,7 @@ impl JMAP { &self, mut request: SetRequest, access_token: &AccessToken, - ) -> Result { + ) -> trc::Result { // Prepare response let account_id = request.account_id.document_id(); let mut response = self @@ -728,14 +726,14 @@ impl JMAP { Ok(message) => { response.created.insert(id, message.into()); } - Err(IngestError::OverQuota) => { + Err(err) if err.matches(trc::Cause::OverQuota) => { response.not_created.append( id, SetError::new(SetErrorType::OverQuota) .with_description("You have exceeded your disk quota."), ); } - Err(_) => return Err(MethodError::ServerPartialFail), + Err(err) => return Err(err), } } @@ -944,14 +942,7 @@ impl JMAP { uid_mailbox.uid = self .assign_imap_uid(account_id, uid_mailbox.mailbox_id) .await - .map_err(|err| { - tracing::error!( - event = "error", - context = "email_copy", - error = ?err, - "Failed to assign IMAP UID."); - MethodError::ServerPartialFail - })?; + .caused_by(trc::location!())?; } } @@ -971,7 +962,7 @@ impl JMAP { // Add to updated list response.updated.append(id, None); } - Err(store::Error::AssertValueFailed) => { + Err(err) if err.matches(trc::Cause::AssertValue) => { response.not_updated.append( id, SetError::forbidden().with_description( @@ -980,12 +971,7 @@ impl JMAP { ); } Err(err) => { - tracing::error!( - event = "error", - context = "email_set", - error = ?err, - "Failed to write message changes to database."); - return Err(MethodError::ServerPartialFail); + return Err(err.caused_by(trc::location!())); } } } diff --git a/crates/jmap/src/email/snippet.rs b/crates/jmap/src/email/snippet.rs index 38544e67..61516e6e 100644 --- a/crates/jmap/src/email/snippet.rs +++ b/crates/jmap/src/email/snippet.rs @@ -25,7 +25,7 @@ impl JMAP { &self, request: GetSearchSnippetRequest, access_token: &AccessToken, - ) -> Result { + ) -> trc::Result { let mut filter_stack = vec![]; let mut include_term = true; let mut terms = vec![]; @@ -83,7 +83,7 @@ impl JMAP { }; if email_ids.len() > self.core.jmap.snippet_max_results { - return Err(MethodError::RequestTooLarge); + return Err(MethodError::RequestTooLarge.into()); } for email_id in email_ids { diff --git a/crates/jmap/src/identity/get.rs b/crates/jmap/src/identity/get.rs index c8ad3c23..360219f1 100644 --- a/crates/jmap/src/identity/get.rs +++ b/crates/jmap/src/identity/get.rs @@ -6,7 +6,6 @@ use directory::QueryBy; use jmap_proto::{ - error::method::MethodError, method::get::{GetRequest, GetResponse, RequestArguments}, object::Object, types::{collection::Collection, property::Property, value::Value}, @@ -15,6 +14,7 @@ use store::{ roaring::RoaringBitmap, write::{BatchBuilder, F_VALUE}, }; +use trc::AddContext; use crate::JMAP; @@ -24,7 +24,7 @@ impl JMAP { pub async fn identity_get( &self, mut request: GetRequest, - ) -> Result { + ) -> trc::Result { let ids = request.unwrap_ids(self.core.jmap.get_max_objects)?; let properties = request.unwrap_properties(&[ Property::Id, @@ -107,10 +107,7 @@ impl JMAP { Ok(response) } - pub async fn identity_get_or_create( - &self, - account_id: u32, - ) -> Result { + pub async fn identity_get_or_create(&self, account_id: u32) -> trc::Result { let mut identity_ids = self .get_document_ids(account_id, Collection::Identity) .await? @@ -126,14 +123,7 @@ impl JMAP { .directory .query(QueryBy::Id(account_id), false) .await - .map_err(|err| { - tracing::error!( - event = "error", - context = "identity_get_or_create", - error = ?err, - "Failed to query directory."); - MethodError::ServerPartialFail - })? + .caused_by(trc::location!())? .unwrap_or_default(); if principal.emails.is_empty() { return Ok(identity_ids); @@ -178,14 +168,7 @@ impl JMAP { .data .write(batch.build()) .await - .map_err(|err| { - tracing::error!( - event = "error", - context = "identity_get_or_create", - error = ?err, - "Failed to create identities."); - MethodError::ServerPartialFail - })?; + .caused_by(trc::location!())?; Ok(identity_ids) } diff --git a/crates/jmap/src/identity/set.rs b/crates/jmap/src/identity/set.rs index a21dbd01..e840f90f 100644 --- a/crates/jmap/src/identity/set.rs +++ b/crates/jmap/src/identity/set.rs @@ -6,7 +6,7 @@ use directory::QueryBy; use jmap_proto::{ - error::{method::MethodError, set::SetError}, + error::set::SetError, method::set::{RequestArguments, SetRequest, SetResponse}, object::Object, response::references::EvalObjectReferences, @@ -24,7 +24,7 @@ impl JMAP { pub async fn identity_set( &self, mut request: SetRequest, - ) -> Result { + ) -> trc::Result { let account_id = request.account_id.document_id(); let mut identity_ids = self .get_document_ids(account_id, Collection::Identity) diff --git a/crates/jmap/src/lib.rs b/crates/jmap/src/lib.rs index 4f308b3c..87296122 100644 --- a/crates/jmap/src/lib.rs +++ b/crates/jmap/src/lib.rs @@ -47,6 +47,7 @@ use store::{ BitmapKey, Deserialize, IterateParams, ValueKey, U32_LEN, }; use tokio::sync::mpsc; +use trc::AddContext; use utils::{ config::Config, lru_cache::{LruCache, LruCached}, @@ -103,13 +104,6 @@ pub struct Inner { pub cache_threads: LruCache>, } -#[derive(Debug)] -pub enum IngestError { - Temporary, - OverQuota, - Permanent { code: [u8; 3], reason: String }, -} - impl JMAP { pub async fn init( config: &mut Config, @@ -176,13 +170,13 @@ impl JMAP { collection: Collection, document_id: u32, property: impl AsRef, - ) -> Result, MethodError> + ) -> trc::Result> where U: Deserialize + 'static, { let property = property.as_ref(); - match self - .core + + self.core .storage .data .get_value::(ValueKey { @@ -192,20 +186,13 @@ impl JMAP { class: ValueClass::Property(property.into()), }) .await - { - Ok(value) => Ok(value), - Err(err) => { - tracing::error!(event = "error", - context = "store", - account_id = account_id, - collection = ?collection, - document_id = document_id, - property = ?property, - error = ?err, - "Failed to retrieve property"); - Err(MethodError::ServerPartialFail) - } - } + .add_context(|err| { + err.caused_by(trc::location!()) + .account_id(account_id) + .collection(collection) + .document_id(document_id) + .property(property) + }) } pub async fn get_properties( @@ -214,7 +201,7 @@ impl JMAP { collection: Collection, iterate: &I, property: P, - ) -> Result, MethodError> + ) -> trc::Result> where I: DocumentSet + Send + Sync, P: AsRef, @@ -254,43 +241,30 @@ impl JMAP { }, ) .await - .map_err(|err| { - tracing::error!(event = "error", - context = "store", - account_id = account_id, - collection = ?collection, - property = ?property, - error = ?err, - "Failed to retrieve properties"); - MethodError::ServerPartialFail - })?; - - Ok(results) + .add_context(|err| { + err.caused_by(trc::location!()) + .account_id(account_id) + .collection(collection) + .property(property) + }) + .map(|_| results) } pub async fn get_document_ids( &self, account_id: u32, collection: Collection, - ) -> Result, MethodError> { - match self - .core + ) -> trc::Result> { + self.core .storage .data .get_bitmap(BitmapKey::document_ids(account_id, collection)) .await - { - Ok(value) => Ok(value), - Err(err) => { - tracing::error!(event = "error", - context = "store", - account_id = account_id, - collection = ?collection, - error = ?err, - "Failed to retrieve document ids bitmap"); - Err(MethodError::ServerPartialFail) - } - } + .add_context(|err| { + err.caused_by(trc::location!()) + .account_id(account_id) + .collection(collection) + }) } pub async fn get_tag( @@ -299,10 +273,9 @@ impl JMAP { collection: Collection, property: impl AsRef, value: impl Into>, - ) -> Result, MethodError> { + ) -> trc::Result> { let property = property.as_ref(); - match self - .core + self.core .storage .data .get_bitmap(BitmapKey { @@ -315,26 +288,19 @@ impl JMAP { document_id: 0, }) .await - { - Ok(value) => Ok(value), - Err(err) => { - tracing::error!(event = "error", - context = "store", - account_id = account_id, - collection = ?collection, - property = ?property, - error = ?err, - "Failed to retrieve tag bitmap"); - Err(MethodError::ServerPartialFail) - } - } + .add_context(|err| { + err.caused_by(trc::location!()) + .account_id(account_id) + .collection(collection) + .property(property) + }) } pub async fn prepare_set_response( &self, request: &SetRequest, collection: Collection, - ) -> Result { + ) -> trc::Result { Ok( SetResponse::from_request(request, self.core.jmap.set_max_objects)?.with_state( self.assert_state( @@ -347,11 +313,7 @@ impl JMAP { ) } - pub async fn get_quota( - &self, - access_token: &AccessToken, - account_id: u32, - ) -> Result { + pub async fn get_quota(&self, access_token: &AccessToken, account_id: u32) -> trc::Result { Ok(if access_token.primary_id == account_id { access_token.quota as i64 } else { @@ -360,35 +322,19 @@ impl JMAP { .directory .query(QueryBy::Id(account_id), false) .await - .map_err(|err| { - tracing::error!( - event = "error", - context = "get_quota", - account_id = account_id, - error = ?err, - "Failed to obtain disk quota for account."); - MethodError::ServerPartialFail - })? + .add_context(|err| err.caused_by(trc::location!()).account_id(account_id))? .map(|p| p.quota as i64) .unwrap_or_default() }) } - pub async fn get_used_quota(&self, account_id: u32) -> Result { + pub async fn get_used_quota(&self, account_id: u32) -> trc::Result { self.core .storage .data .get_counter(DirectoryClass::UsedQuota(account_id)) .await - .map_err(|err| { - tracing::error!( - event = "error", - context = "get_used_quota", - account_id = account_id, - error = ?err, - "Failed to obtain used disk quota for account."); - MethodError::ServerPartialFail - }) + .add_context(|err| err.caused_by(trc::location!()).account_id(account_id)) } pub async fn has_available_quota( @@ -396,7 +342,7 @@ impl JMAP { account_id: u32, account_quota: i64, item_size: i64, - ) -> Result { + ) -> trc::Result { if account_quota == 0 { return Ok(true); } @@ -433,21 +379,16 @@ impl JMAP { account_id: u32, collection: Collection, filters: Vec, - ) -> Result { + ) -> trc::Result { self.core .storage .data .filter(account_id, collection, filters) .await - .map_err(|err| { - tracing::error!(event = "error", - context = "filter", - account_id = account_id, - collection = ?collection, - error = ?err, - "Failed to execute filter."); - - MethodError::ServerPartialFail + .add_context(|err| { + err.caused_by(trc::location!()) + .account_id(account_id) + .collection(collection) }) } @@ -456,21 +397,16 @@ impl JMAP { account_id: u32, collection: Collection, filters: Vec>, - ) -> Result { + ) -> trc::Result { self.core .storage .fts .query(account_id, collection, filters) .await - .map_err(|err| { - tracing::error!(event = "error", - context = "fts-filter", - account_id = account_id, - collection = ?collection, - error = ?err, - "Failed to execute filter."); - - MethodError::ServerPartialFail + .add_context(|err| { + err.caused_by(trc::location!()) + .account_id(account_id) + .collection(collection) }) } @@ -478,7 +414,7 @@ impl JMAP { &self, result_set: &ResultSet, request: &QueryRequest, - ) -> Result<(QueryResponse, Option), MethodError> { + ) -> trc::Result<(QueryResponse, Option)> { let total = result_set.results.len() as usize; let (limit_total, limit) = if let Some(limit) = request.limit { if limit > 0 { @@ -529,75 +465,39 @@ impl JMAP { comparators: Vec, paginate: Pagination, mut response: QueryResponse, - ) -> Result { + ) -> trc::Result { // Sort results let collection = result_set.collection; let account_id = result_set.account_id; response.update_results( - match self - .core + self.core .storage .data .sort(result_set, comparators, paginate) .await - { - Ok(result) => result, - Err(err) => { - tracing::error!(event = "error", - context = "store", - account_id = account_id, - collection = ?collection, - error = ?err, - "Sort failed"); - return Err(MethodError::ServerPartialFail); - } - }, + .add_context(|err| { + err.caused_by(trc::location!()) + .account_id(account_id) + .collection(collection) + })?, )?; Ok(response) } - pub async fn write_batch(&self, batch: BatchBuilder) -> Result { + pub async fn write_batch(&self, batch: BatchBuilder) -> trc::Result { self.core .storage .data .write(batch.build()) .await - .map_err(|err| { - match err { - store::Error::InternalError(err) => { - tracing::error!( - event = "error", - context = "write_batch", - error = ?err, - "Failed to write batch."); - MethodError::ServerPartialFail - } - store::Error::AssertValueFailed => { - // This should not occur, as we are not using assertions. - tracing::debug!( - event = "assert_failed", - context = "write_batch", - "Failed to assert value." - ); - MethodError::ServerUnavailable - } - } - }) + .caused_by(trc::location!()) } - pub async fn write_batch_expect_id(&self, batch: BatchBuilder) -> Result { - self.write_batch(batch).await.and_then(|ids| { - ids.last_document_id().map_err(|err| { - tracing::error!( - event = "error", - context = "write_batch_expect_id", - error = ?err, - "Failed to obtain last document id." - ); - MethodError::ServerPartialFail - }) - }) + pub async fn write_batch_expect_id(&self, batch: BatchBuilder) -> trc::Result { + self.write_batch(batch) + .await + .and_then(|ids| ids.last_document_id().caused_by(trc::location!())) } } @@ -625,11 +525,11 @@ impl From for JMAP { } trait UpdateResults: Sized { - fn update_results(&mut self, sorted_results: SortedResultSet) -> Result<(), MethodError>; + fn update_results(&mut self, sorted_results: SortedResultSet) -> trc::Result<()>; } impl UpdateResults for QueryResponse { - fn update_results(&mut self, sorted_results: SortedResultSet) -> Result<(), MethodError> { + fn update_results(&mut self, sorted_results: SortedResultSet) -> trc::Result<()> { // Prepare response if sorted_results.found_anchor { self.position = sorted_results.position; @@ -640,7 +540,7 @@ impl UpdateResults for QueryResponse { .collect::>(); Ok(()) } else { - Err(MethodError::AnchorNotFound) + Err(MethodError::AnchorNotFound.into()) } } } diff --git a/crates/jmap/src/mailbox/get.rs b/crates/jmap/src/mailbox/get.rs index 56a5fcc6..189fc74b 100644 --- a/crates/jmap/src/mailbox/get.rs +++ b/crates/jmap/src/mailbox/get.rs @@ -5,12 +5,12 @@ */ use jmap_proto::{ - error::method::MethodError, method::get::{GetRequest, GetResponse, RequestArguments}, object::Object, types::{acl::Acl, collection::Collection, keyword::Keyword, property::Property, value::Value}, }; use store::{ahash::AHashSet, query::Filter, roaring::RoaringBitmap}; +use trc::AddContext; use crate::{ auth::{acl::EffectiveAcl, AccessToken}, @@ -22,7 +22,7 @@ impl JMAP { &self, mut request: GetRequest, access_token: &AccessToken, - ) -> Result { + ) -> trc::Result { let ids = request.unwrap_ids(self.core.jmap.get_max_objects)?; let properties = request.unwrap_properties(&[ Property::Id, @@ -241,19 +241,12 @@ impl JMAP { &self, account_id: u32, document_ids: Option, - ) -> Result { + ) -> trc::Result { if let Some(document_ids) = document_ids { let mut thread_ids = AHashSet::default(); self.get_cached_thread_ids(account_id, document_ids.into_iter()) .await - .map_err(|err| { - tracing::error!(event = "error", - context = "store", - account_id = account_id, - error = ?err, - "Failed to retrieve thread Ids"); - MethodError::ServerPartialFail - })? + .caused_by(trc::location!())? .into_iter() .for_each(|(_, thread_id)| { thread_ids.insert(thread_id); @@ -269,7 +262,7 @@ impl JMAP { account_id: u32, document_id: u32, message_ids: &Option, - ) -> Result, MethodError> { + ) -> trc::Result> { if let (Some(message_ids), Some(mailbox_message_ids)) = ( message_ids, self.get_tag( @@ -309,7 +302,7 @@ impl JMAP { account_id: u32, path: &'x str, exact_match: bool, - ) -> Result>, MethodError> { + ) -> trc::Result>> { let path = path .split('/') .filter_map(|p| { @@ -376,7 +369,7 @@ impl JMAP { &self, account_id: u32, path: &str, - ) -> Result, MethodError> { + ) -> trc::Result> { Ok(self .mailbox_expand_path(account_id, path, true) .await? @@ -399,7 +392,7 @@ impl JMAP { &self, account_id: u32, role: &str, - ) -> Result, MethodError> { + ) -> trc::Result> { self.filter( account_id, Collection::Mailbox, diff --git a/crates/jmap/src/mailbox/query.rs b/crates/jmap/src/mailbox/query.rs index eecdd9ca..86d6ad65 100644 --- a/crates/jmap/src/mailbox/query.rs +++ b/crates/jmap/src/mailbox/query.rs @@ -23,7 +23,7 @@ impl JMAP { &self, mut request: QueryRequest, access_token: &AccessToken, - ) -> Result { + ) -> trc::Result { let account_id = request.account_id.document_id(); let sort_as_tree = request.arguments.sort_as_tree.unwrap_or(false); let filter_as_tree = request.arguments.filter_as_tree.unwrap_or(false); @@ -80,7 +80,7 @@ impl JMAP { filters.push(cond.into()); } - other => return Err(MethodError::UnsupportedFilter(other.to_string())), + other => return Err(MethodError::UnsupportedFilter(other.to_string()).into()), } } @@ -182,7 +182,7 @@ impl JMAP { query::Comparator::field(Property::ParentId, comparator.is_ascending) } - other => return Err(MethodError::UnsupportedSort(other.to_string())), + other => return Err(MethodError::UnsupportedSort(other.to_string()).into()), }); } diff --git a/crates/jmap/src/mailbox/set.rs b/crates/jmap/src/mailbox/set.rs index 1ced1c69..d95187a7 100644 --- a/crates/jmap/src/mailbox/set.rs +++ b/crates/jmap/src/mailbox/set.rs @@ -6,10 +6,7 @@ use common::config::jmap::settings::SpecialUse; use jmap_proto::{ - error::{ - method::MethodError, - set::{SetError, SetErrorType}, - }, + error::set::{SetError, SetErrorType}, method::set::{SetRequest, SetResponse}, object::{ index::{IndexAs, IndexProperty, ObjectIndexBuilder}, @@ -36,6 +33,7 @@ use store::{ BatchBuilder, F_BITMAP, F_CLEAR, F_VALUE, }, }; +use trc::AddContext; use crate::{ auth::{acl::EffectiveAcl, AccessToken}, @@ -79,7 +77,7 @@ impl JMAP { &self, mut request: SetRequest, access_token: &AccessToken, - ) -> Result { + ) -> trc::Result { // Prepare response let account_id = request.account_id.document_id(); let on_destroy_remove_emails = request.arguments.on_destroy_remove_emails.unwrap_or(false); @@ -130,7 +128,7 @@ impl JMAP { ctx.mailbox_ids.insert(document_id); ctx.response.created(id, document_id); } - Err(store::Error::AssertValueFailed) => { + Err(err) if err.matches(trc::Cause::AssertValue) => { ctx.response.not_created.append( id, SetError::forbidden().with_description( @@ -140,13 +138,7 @@ impl JMAP { continue 'create; } Err(err) => { - tracing::error!( - event = "error", - context = "mailbox_set", - account_id = account_id, - error = ?err, - "Failed to update mailbox(es)."); - return Err(MethodError::ServerPartialFail); + return Err(err.caused_by(trc::location!())); } } } @@ -229,20 +221,14 @@ impl JMAP { Ok(_) => { changes.log_update(Collection::Mailbox, document_id); } - Err(store::Error::AssertValueFailed) => { + Err(err) if err.matches(trc::Cause::AssertValue) => { ctx.response.not_updated.append(id, SetError::forbidden().with_description( "Another process modified this mailbox, please try again.", )); continue 'update; } Err(err) => { - tracing::error!( - event = "error", - context = "mailbox_set", - account_id = account_id, - error = ?err, - "Failed to update mailbox(es)."); - return Err(MethodError::ServerPartialFail); + return Err(err.caused_by(trc::location!())); } } } @@ -306,7 +292,7 @@ impl JMAP { changes: &mut ChangeLogBuilder, access_token: &AccessToken, remove_emails: bool, - ) -> Result, MethodError> { + ) -> trc::Result> { // Internal folders cannot be deleted #[cfg(feature = "test_mode")] if [INBOX_ID, TRASH_ID].contains(&document_id) && !access_token.is_super_user() { @@ -405,7 +391,7 @@ impl JMAP { Collection::Email, Id::from_parts(thread_id, message_id), ), - Err(store::Error::AssertValueFailed) => { + Err(err) if err.matches(trc::Cause::AssertValue) => { return Ok(Err(SetError::forbidden().with_description( concat!( "Another process modified a message in this mailbox ", @@ -414,15 +400,7 @@ impl JMAP { ))); } Err(err) => { - tracing::error!( - event = "error", - context = "mailbox_set", - account_id = account_id, - mailbox_id = document_id, - message_id = message_id, - error = ?err, - "Failed to update message while deleting mailbox."); - return Err(MethodError::ServerPartialFail); + return Err(err.caused_by(trc::location!())); } } } else { @@ -491,21 +469,12 @@ impl JMAP { changes.log_delete(Collection::Mailbox, document_id); Ok(Ok(did_remove_emails)) } - Err(store::Error::AssertValueFailed) => Ok(Err(SetError::forbidden() + Err(err) if err.matches(trc::Cause::AssertValue) => Ok(Err(SetError::forbidden() .with_description(concat!( "Another process modified this mailbox ", "while deleting it, please try again." )))), - Err(err) => { - tracing::error!( - event = "error", - context = "mailbox_set", - account_id = account_id, - document_id = document_id, - error = ?err, - "Failed to delete mailbox."); - Err(MethodError::ServerPartialFail) - } + Err(err) => Err(err.caused_by(trc::location!())), } } else { Ok(Err(SetError::not_found())) @@ -518,7 +487,7 @@ impl JMAP { changes_: Object, update: Option<(u32, HashedValue>)>, ctx: &SetContext<'_>, - ) -> Result, MethodError> { + ) -> trc::Result> { // Parse properties let mut changes = Object::with_capacity(changes_.properties.len()); for (property, value) in changes_.properties { @@ -799,10 +768,7 @@ impl JMAP { .validate()) } - pub async fn mailbox_get_or_create( - &self, - account_id: u32, - ) -> Result { + pub async fn mailbox_get_or_create(&self, account_id: u32) -> trc::Result { let mut mailbox_ids = self .get_document_ids(account_id, Collection::Mailbox) .await? @@ -865,23 +831,15 @@ impl JMAP { .data .write(batch.build()) .await - .map_err(|err| { - tracing::error!( - event = "error", - context = "mailbox_get_or_create", - error = ?err, - "Failed to create mailboxes."); - MethodError::ServerPartialFail - })?; - - Ok(mailbox_ids) + .caused_by(trc::location!()) + .map(|_| mailbox_ids) } pub async fn mailbox_create_path( &self, account_id: u32, path: &str, - ) -> Result)>, MethodError> { + ) -> trc::Result)>> { let expanded_path = if let Some(expand_path) = self.mailbox_expand_path(account_id, path, false).await? { expand_path diff --git a/crates/jmap/src/principal/get.rs b/crates/jmap/src/principal/get.rs index c766026f..b2ab9104 100644 --- a/crates/jmap/src/principal/get.rs +++ b/crates/jmap/src/principal/get.rs @@ -6,7 +6,6 @@ use directory::QueryBy; use jmap_proto::{ - error::method::MethodError, method::get::{GetRequest, GetResponse, RequestArguments}, object::Object, types::{collection::Collection, property::Property, state::State, value::Value}, @@ -18,7 +17,7 @@ impl JMAP { pub async fn principal_get( &self, mut request: GetRequest, - ) -> Result { + ) -> trc::Result { let ids = request.unwrap_ids(self.core.jmap.get_max_objects)?; let properties = request.unwrap_properties(&[ Property::Id, @@ -56,8 +55,7 @@ impl JMAP { .storage .directory .query(QueryBy::Id(id.document_id()), false) - .await - .map_err(|_| MethodError::ServerPartialFail)? + .await? { principal } else { diff --git a/crates/jmap/src/principal/query.rs b/crates/jmap/src/principal/query.rs index 62a63387..9e9674b3 100644 --- a/crates/jmap/src/principal/query.rs +++ b/crates/jmap/src/principal/query.rs @@ -18,7 +18,7 @@ impl JMAP { pub async fn principal_query( &self, mut request: QueryRequest, - ) -> Result { + ) -> trc::Result { let account_id = request.account_id.document_id(); let mut result_set = ResultSet { account_id, @@ -35,8 +35,7 @@ impl JMAP { .storage .directory .query(QueryBy::Name(name.as_str()), false) - .await - .map_err(|_| MethodError::ServerPartialFail)? + .await? { if is_set || result_set.results.contains(principal.id) { result_set.results = @@ -54,8 +53,7 @@ impl JMAP { for id in self .core .email_to_ids(&self.core.storage.directory, &email) - .await - .map_err(|_| MethodError::ServerPartialFail)? + .await? { ids.insert(id); } @@ -67,7 +65,7 @@ impl JMAP { } } Filter::Type(_) => {} - other => return Err(MethodError::UnsupportedFilter(other.to_string())), + other => return Err(MethodError::UnsupportedFilter(other.to_string()).into()), } } diff --git a/crates/jmap/src/push/get.rs b/crates/jmap/src/push/get.rs index 08418af5..a5f9268a 100644 --- a/crates/jmap/src/push/get.rs +++ b/crates/jmap/src/push/get.rs @@ -26,7 +26,7 @@ impl JMAP { &self, mut request: GetRequest, access_token: &AccessToken, - ) -> Result { + ) -> trc::Result { let ids = request.unwrap_ids(self.core.jmap.get_max_objects)?; let properties = request.unwrap_properties(&[ Property::Id, @@ -86,7 +86,8 @@ impl JMAP { Property::Url | Property::Keys | Property::Value => { return Err(MethodError::Forbidden( "The 'url' and 'keys' properties are not readable".to_string(), - )); + ) + .into()); } property => { result.append(property.clone(), push.remove(property)); @@ -99,7 +100,7 @@ impl JMAP { Ok(response) } - pub async fn fetch_push_subscriptions(&self, account_id: u32) -> store::Result { + pub async fn fetch_push_subscriptions(&self, account_id: u32) -> trc::Result { let mut subscriptions = Vec::new(); let document_ids = self .core @@ -127,10 +128,9 @@ impl JMAP { }) .await? .ok_or_else(|| { - store::Error::InternalError(format!( - "Could not find push subscription {}", - document_id - )) + trc::Cause::NotFound + .caused_by(trc::location!()) + .document_id(document_id) })?; let expires = subscription @@ -138,10 +138,9 @@ impl JMAP { .get(&Property::Expires) .and_then(|p| p.as_date()) .ok_or_else(|| { - store::Error::InternalError(format!( - "Missing expires property for push subscription {}", - document_id - )) + trc::Cause::Unexpected + .caused_by(trc::location!()) + .document_id(document_id) })? .timestamp() as u64; if expires > current_time { @@ -175,20 +174,18 @@ impl JMAP { .remove(&Property::Value) .and_then(|p| p.try_unwrap_string()) .ok_or_else(|| { - store::Error::InternalError(format!( - "Missing verificationCode property for push subscription {}", - document_id - )) + trc::Cause::Unexpected + .caused_by(trc::location!()) + .document_id(document_id) })?; let url = subscription .properties .remove(&Property::Url) .and_then(|p| p.try_unwrap_string()) .ok_or_else(|| { - store::Error::InternalError(format!( - "Missing Url property for push subscription {}", - document_id - )) + trc::Cause::Unexpected + .caused_by(trc::location!()) + .document_id(document_id) })?; if subscription diff --git a/crates/jmap/src/push/set.rs b/crates/jmap/src/push/set.rs index 9402e08f..e6f44b67 100644 --- a/crates/jmap/src/push/set.rs +++ b/crates/jmap/src/push/set.rs @@ -6,7 +6,7 @@ use base64::{engine::general_purpose, Engine}; use jmap_proto::{ - error::{method::MethodError, set::SetError}, + error::set::SetError, method::set::{RequestArguments, SetRequest, SetResponse}, object::Object, response::references::EvalObjectReferences, @@ -33,7 +33,7 @@ impl JMAP { &self, mut request: SetRequest, access_token: &AccessToken, - ) -> Result { + ) -> trc::Result { let account_id = access_token.primary_id(); let mut push_ids = self .get_document_ids(account_id, Collection::PushSubscription) diff --git a/crates/jmap/src/quota/get.rs b/crates/jmap/src/quota/get.rs index 3c97f8d0..7449e0c6 100644 --- a/crates/jmap/src/quota/get.rs +++ b/crates/jmap/src/quota/get.rs @@ -5,7 +5,6 @@ */ use jmap_proto::{ - error::method::MethodError, method::get::{GetRequest, GetResponse, RequestArguments}, object::Object, types::{id::Id, property::Property, state::State, type_state::DataType, value::Value}, @@ -18,7 +17,7 @@ impl JMAP { &self, mut request: GetRequest, access_token: &AccessToken, - ) -> Result { + ) -> trc::Result { let ids = request.unwrap_ids(self.core.jmap.get_max_objects)?; let properties = request.unwrap_properties(&[ Property::Id, diff --git a/crates/jmap/src/quota/query.rs b/crates/jmap/src/quota/query.rs index a869d2aa..fe3abbb6 100644 --- a/crates/jmap/src/quota/query.rs +++ b/crates/jmap/src/quota/query.rs @@ -5,7 +5,6 @@ */ use jmap_proto::{ - error::method::MethodError, method::query::{QueryRequest, QueryResponse, RequestArguments}, types::{id::Id, state::State}, }; @@ -17,7 +16,7 @@ impl JMAP { &self, request: QueryRequest, access_token: &AccessToken, - ) -> Result { + ) -> trc::Result { Ok(QueryResponse { account_id: request.account_id, query_state: State::Initial, diff --git a/crates/jmap/src/quota/set.rs b/crates/jmap/src/quota/set.rs index a88349cd..6aec092a 100644 --- a/crates/jmap/src/quota/set.rs +++ b/crates/jmap/src/quota/set.rs @@ -16,6 +16,6 @@ impl JMAP { &self, account_id: u32, quota: &AccessToken, - ) -> Result { + ) -> trc::Result { } } diff --git a/crates/jmap/src/services/index.rs b/crates/jmap/src/services/index.rs index 9a6aa184..22f9de05 100644 --- a/crates/jmap/src/services/index.rs +++ b/crates/jmap/src/services/index.rs @@ -222,7 +222,7 @@ impl JMAP { .set(event.value_class(), (now() + INDEX_LOCK_EXPIRY).serialize()); match self.core.storage.data.write(batch.build()).await { Ok(_) => true, - Err(store::Error::AssertValueFailed) => { + Err(err) if err.matches(trc::Cause::AssertValue) => { tracing::trace!( context = "queue", event = "locked", @@ -253,7 +253,7 @@ impl IndexEmail { }) } - fn deserialize(key: &[u8], value: &[u8]) -> store::Result { + fn deserialize(key: &[u8], value: &[u8]) -> trc::Result { Ok(IndexEmail { seq: key.deserialize_be_u64(0)?, account_id: key.deserialize_be_u32(U64_LEN)?, @@ -265,7 +265,7 @@ impl IndexEmail { ..U64_LEN + U32_LEN + U32_LEN + BLOB_HASH_LEN + 1, ) .and_then(|bytes| BlobHash::try_from_hash_slice(bytes).ok()) - .ok_or_else(|| store::Error::InternalError("Invalid blob hash".to_string()))?, + .ok_or_else(|| trc::Error::corrupted_key(key, value.into(), trc::location!()))?, }) } } diff --git a/crates/jmap/src/services/ingest.rs b/crates/jmap/src/services/ingest.rs index 767f686a..49021621 100644 --- a/crates/jmap/src/services/ingest.rs +++ b/crates/jmap/src/services/ingest.rs @@ -13,11 +13,13 @@ use store::ahash::AHashMap; use crate::{ email::ingest::{IngestEmail, IngestSource}, mailbox::INBOX_ID, - IngestError, JMAP, + JMAP, }; impl JMAP { pub async fn deliver_message(&self, message: IngestMessage) -> Vec { + let todo = "trace all errors"; + // Read message let raw_message = match self .core @@ -137,21 +139,28 @@ impl JMAP { .await; } } - Err(err) => match err { - IngestError::OverQuota => { + Err(mut err) => match err.as_ref() { + trc::Cause::OverQuota => { *status = DeliveryResult::TemporaryFailure { reason: "Mailbox over quota.".into(), } } - IngestError::Temporary => { - *status = DeliveryResult::TemporaryFailure { - reason: "Transient server failure.".into(), + trc::Cause::Ingest => { + *status = DeliveryResult::PermanentFailure { + code: err + .value(trc::Key::Reason) + .and_then(|v| v.to_uint()) + .map(|n| [(n / 100) as u8, ((n % 100) / 10) as u8, (n % 10) as u8]) + .unwrap(), + reason: err + .take_value(trc::Key::Reason) + .and_then(|v| v.into_string()) + .unwrap(), } } - IngestError::Permanent { code, reason } => { - *status = DeliveryResult::PermanentFailure { - code, - reason: reason.into(), + _ => { + *status = DeliveryResult::TemporaryFailure { + reason: "Transient server failure.".into(), } } }, diff --git a/crates/jmap/src/sieve/get.rs b/crates/jmap/src/sieve/get.rs index bf82dca4..4e4c6d51 100644 --- a/crates/jmap/src/sieve/get.rs +++ b/crates/jmap/src/sieve/get.rs @@ -7,7 +7,6 @@ use std::sync::Arc; use jmap_proto::{ - error::method::MethodError, method::get::{GetRequest, GetResponse, RequestArguments}, object::Object, types::{collection::Collection, property::Property, value::Value}, @@ -27,7 +26,7 @@ impl JMAP { pub async fn sieve_script_get( &self, mut request: GetRequest, - ) -> Result { + ) -> trc::Result { let ids = request.unwrap_ids(self.core.jmap.get_max_objects)?; let properties = request.unwrap_properties(&[Property::Id, Property::Name, Property::BlobId]); @@ -115,7 +114,7 @@ impl JMAP { pub async fn sieve_script_get_active( &self, account_id: u32, - ) -> Result, MethodError> { + ) -> trc::Result> { // Find the currently active script if let Some(document_id) = self .filter( @@ -157,7 +156,7 @@ impl JMAP { &self, account_id: u32, name: &str, - ) -> Result, MethodError> { + ) -> trc::Result> { // Find the script by name if let Some(document_id) = self .filter( @@ -182,7 +181,7 @@ impl JMAP { &self, account_id: u32, document_id: u32, - ) -> Result<(Sieve, Object), MethodError> { + ) -> trc::Result<(Sieve, Object)> { // Obtain script object let script_object = self .get_property::>>( @@ -193,15 +192,9 @@ impl JMAP { ) .await? .ok_or_else(|| { - tracing::warn!( - context = "sieve_script_compile", - event = "error", - account_id = account_id, - document_id = document_id, - "Failed to obtain sieve script object" - ); - - MethodError::ServerPartialFail + trc::Cause::NotFound + .caused_by(trc::location!()) + .document_id(document_id) })?; // Obtain the sieve script length @@ -212,22 +205,20 @@ impl JMAP { .and_then(|v| v.as_blob_id()) .and_then(|v| (v.section.as_ref()?.size, v).into()) .ok_or_else(|| { - tracing::warn!( - context = "sieve_script_compile", - event = "error", - account_id = account_id, - document_id = document_id, - "Failed to obtain sieve script blobId" - ); - - MethodError::ServerPartialFail + trc::Cause::NotFound + .caused_by(trc::location!()) + .document_id(document_id) })?; // Obtain the sieve script blob let script_bytes = self .get_blob(&blob_id.hash, 0..usize::MAX) .await? - .ok_or(MethodError::ServerPartialFail)?; + .ok_or_else(|| { + trc::Cause::NotFound + .caused_by(trc::location!()) + .document_id(document_id) + })?; // Obtain the precompiled script if let Some(sieve) = script_bytes @@ -239,15 +230,9 @@ impl JMAP { // Deserialization failed, probably because the script compiler version changed match self.core.sieve.untrusted_compiler.compile( script_bytes.get(0..script_offset).ok_or_else(|| { - tracing::warn!( - context = "sieve_script_compile", - event = "error", - account_id = account_id, - document_id = document_id, - "Invalid sieve script offset" - ); - - MethodError::ServerPartialFail + trc::Cause::NotFound + .caused_by(trc::location!()) + .document_id(document_id) })?, ) { Ok(sieve) => { @@ -289,16 +274,10 @@ impl JMAP { Ok((sieve.inner, new_script_object)) } - Err(error) => { - tracing::warn!( - context = "sieve_script_compile", - event = "error", - account_id = account_id, - document_id = document_id, - reason = %error, - "Failed to compile sieve script"); - Err(MethodError::ServerPartialFail) - } + Err(error) => Err(trc::Cause::Unexpected + .caused_by(trc::location!()) + .reason(error) + .details("Failed to compile Sieve script")), } } } diff --git a/crates/jmap/src/sieve/ingest.rs b/crates/jmap/src/sieve/ingest.rs index 10c8ca15..9587d521 100644 --- a/crates/jmap/src/sieve/ingest.rs +++ b/crates/jmap/src/sieve/ingest.rs @@ -16,12 +16,13 @@ use store::{ ahash::AHashSet, write::{now, BatchBuilder, Bincode, F_VALUE}, }; +use trc::AddContext; use crate::{ email::ingest::{IngestEmail, IngestSource, IngestedEmail}, mailbox::{INBOX_ID, TRASH_ID}, sieve::SeenIdHash, - IngestError, JMAP, + JMAP, }; use super::ActiveScript; @@ -41,22 +42,21 @@ impl JMAP { envelope_to: &str, account_id: u32, mut active_script: ActiveScript, - ) -> Result { + ) -> trc::Result { // Parse message let message = if let Some(message) = MessageParser::new().parse(raw_message) { message } else { - return Err(IngestError::Permanent { - code: [5, 5, 0], - reason: "Failed to parse message.".to_string(), - }); + return Err(trc::Cause::Ingest + .ctx(trc::Key::Code, 550) + .ctx(trc::Key::Reason, "Failed to parse e-mail message.")); }; // Obtain mailboxIds let mailbox_ids = self .mailbox_get_or_create(account_id) .await - .map_err(|_| IngestError::Temporary)?; + .caused_by(trc::location!())?; // Create Sieve instance let mut instance = self.core.sieve.untrusted_runtime.filter_parsed(message); @@ -74,8 +74,8 @@ impl JMAP { (p.quota as i64, p.emails.into_iter().next()) } Ok(None) => (0, None), - Err(_) => { - return Err(IngestError::Temporary); + Err(err) => { + return Err(err.caused_by(trc::location!())); } }; @@ -462,10 +462,9 @@ impl JMAP { } if let Some(reject_reason) = reject_reason { - Err(IngestError::Permanent { - code: [5, 7, 1], - reason: reject_reason, - }) + Err(trc::Cause::Ingest + .ctx(trc::Key::Code, 571) + .ctx(trc::Key::Reason, reject_reason)) } else if has_delivered || last_temp_error.is_none() { Ok(ingested_message) } else { diff --git a/crates/jmap/src/sieve/query.rs b/crates/jmap/src/sieve/query.rs index 1ceb1c1a..9a32c4b1 100644 --- a/crates/jmap/src/sieve/query.rs +++ b/crates/jmap/src/sieve/query.rs @@ -19,7 +19,7 @@ impl JMAP { pub async fn sieve_script_query( &self, mut request: QueryRequest, - ) -> Result { + ) -> trc::Result { let account_id = request.account_id.document_id(); let mut filters = Vec::with_capacity(request.filter.len()); @@ -32,7 +32,7 @@ impl JMAP { Filter::And | Filter::Or | Filter::Not | Filter::Close => { filters.push(cond.into()); } - other => return Err(MethodError::UnsupportedFilter(other.to_string())), + other => return Err(MethodError::UnsupportedFilter(other.to_string()).into()), } } @@ -57,7 +57,7 @@ impl JMAP { SortProperty::IsActive => { query::Comparator::field(Property::IsActive, comparator.is_ascending) } - other => return Err(MethodError::UnsupportedSort(other.to_string())), + other => return Err(MethodError::UnsupportedSort(other.to_string()).into()), }); } diff --git a/crates/jmap/src/sieve/set.rs b/crates/jmap/src/sieve/set.rs index 3a68a7b2..3512b950 100644 --- a/crates/jmap/src/sieve/set.rs +++ b/crates/jmap/src/sieve/set.rs @@ -5,10 +5,7 @@ */ use jmap_proto::{ - error::{ - method::MethodError, - set::{SetError, SetErrorType}, - }, + error::set::{SetError, SetErrorType}, method::set::{SetRequest, SetResponse}, object::{ index::{IndexAs, IndexProperty, ObjectIndexBuilder}, @@ -61,7 +58,7 @@ impl JMAP { &self, mut request: SetRequest, access_token: &AccessToken, - ) -> Result { + ) -> trc::Result { let account_id = request.account_id.document_id(); let mut sieve_ids = self .get_document_ids(account_id, Collection::SieveScript) @@ -162,14 +159,9 @@ impl JMAP { .inner .blob_id() .ok_or_else(|| { - tracing::warn!( - event = "error", - context = "sieve_set", - account_id = account_id, - document_id = document_id, - "Sieve does not contain a blobId." - ); - MethodError::ServerPartialFail + trc::Cause::NotFound + .caused_by(trc::location!()) + .document_id(document_id) })? .clone(); @@ -233,20 +225,14 @@ impl JMAP { changes.log_update(Collection::SieveScript, document_id); match self.core.storage.data.write(batch.build()).await { Ok(_) => (), - Err(store::Error::AssertValueFailed) => { + Err(err) if err.matches(trc::Cause::AssertValue) => { ctx.response.not_updated.append(id, SetError::forbidden().with_description( "Another process modified this sieve, please try again.", )); continue 'update; } Err(err) => { - tracing::error!( - event = "error", - context = "sieve_set", - account_id = account_id, - error = ?err, - "Failed to update sieve script(s)."); - return Err(MethodError::ServerPartialFail); + return Err(err.caused_by(trc::location!())); } } } @@ -339,7 +325,7 @@ impl JMAP { account_id: u32, document_id: u32, fail_if_active: bool, - ) -> Result { + ) -> trc::Result { // Fetch record let obj = self .get_property::>>( @@ -350,14 +336,9 @@ impl JMAP { ) .await? .ok_or_else(|| { - tracing::warn!( - event = "error", - context = "sieve_script_delete", - account_id = account_id, - document_id = document_id, - "Sieve script not found." - ); - MethodError::ServerPartialFail + trc::Cause::NotFound + .caused_by(trc::location!()) + .document_id(document_id) })?; // Make sure the script is not active @@ -373,14 +354,9 @@ impl JMAP { // Delete record let mut batch = BatchBuilder::new(); let blob_id = obj.inner.blob_id().ok_or_else(|| { - tracing::warn!( - event = "error", - context = "sieve_script_delete", - account_id = account_id, - document_id = document_id, - "Sieve does not contain a blobId." - ); - MethodError::ServerPartialFail + trc::Cause::NotFound + .caused_by(trc::location!()) + .document_id(document_id) })?; batch .with_account_id(account_id) @@ -405,7 +381,7 @@ impl JMAP { changes_: Object, update: Option<(u32, HashedValue>)>, ctx: &SetContext<'_>, - ) -> Result>), SetError>, MethodError> { + ) -> trc::Result>), SetError>> { // Vacation script cannot be modified if matches!(update.as_ref().and_then(|(_, obj)| obj.inner.properties.get(&Property::Name)), Some(Value::Text ( value )) if value.eq_ignore_ascii_case("vacation")) { @@ -562,7 +538,7 @@ impl JMAP { &self, account_id: u32, mut activate_id: Option, - ) -> Result, MethodError> { + ) -> trc::Result> { let mut changed_ids = Vec::new(); // Find the currently active script let mut active_ids = self @@ -640,17 +616,11 @@ impl JMAP { if !changed_ids.is_empty() { match self.core.storage.data.write(batch.build()).await { Ok(_) => (), - Err(store::Error::AssertValueFailed) => { + Err(err) if err.matches(trc::Cause::AssertValue) => { return Ok(vec![]); } Err(err) => { - tracing::error!( - event = "error", - context = "sieve_activate_script", - account_id = account_id, - error = ?err, - "Failed to activate sieve script(s)."); - return Err(MethodError::ServerPartialFail); + return Err(err.caused_by(trc::location!())); } } } diff --git a/crates/jmap/src/sieve/validate.rs b/crates/jmap/src/sieve/validate.rs index 10046600..4fd9fce8 100644 --- a/crates/jmap/src/sieve/validate.rs +++ b/crates/jmap/src/sieve/validate.rs @@ -5,10 +5,7 @@ */ use jmap_proto::{ - error::{ - method::MethodError, - set::{SetError, SetErrorType}, - }, + error::set::{SetError, SetErrorType}, method::validate::{ValidateSieveScriptRequest, ValidateSieveScriptResponse}, }; @@ -19,7 +16,7 @@ impl JMAP { &self, request: ValidateSieveScriptRequest, access_token: &AccessToken, - ) -> Result { + ) -> trc::Result { Ok(ValidateSieveScriptResponse { account_id: request.account_id, error: match self diff --git a/crates/jmap/src/submission/get.rs b/crates/jmap/src/submission/get.rs index 4c6db4a3..f08cb166 100644 --- a/crates/jmap/src/submission/get.rs +++ b/crates/jmap/src/submission/get.rs @@ -5,7 +5,6 @@ */ use jmap_proto::{ - error::method::MethodError, method::get::{GetRequest, GetResponse, RequestArguments}, object::Object, types::{collection::Collection, property::Property, value::Value}, @@ -18,7 +17,7 @@ impl JMAP { pub async fn email_submission_get( &self, mut request: GetRequest, - ) -> Result { + ) -> trc::Result { let ids = request.unwrap_ids(self.core.jmap.get_max_objects)?; let properties = request.unwrap_properties(&[ Property::Id, diff --git a/crates/jmap/src/submission/query.rs b/crates/jmap/src/submission/query.rs index 7f30b4c4..7f8aad44 100644 --- a/crates/jmap/src/submission/query.rs +++ b/crates/jmap/src/submission/query.rs @@ -19,7 +19,7 @@ impl JMAP { pub async fn email_submission_query( &self, mut request: QueryRequest, - ) -> Result { + ) -> trc::Result { let account_id = request.account_id.document_id(); let mut filters = Vec::with_capacity(request.filter.len()); @@ -60,7 +60,7 @@ impl JMAP { Filter::And | Filter::Or | Filter::Not | Filter::Close => { filters.push(cond.into()); } - other => return Err(MethodError::UnsupportedFilter(other.to_string())), + other => return Err(MethodError::UnsupportedFilter(other.to_string()).into()), } } @@ -88,7 +88,7 @@ impl JMAP { SortProperty::SentAt => { query::Comparator::field(Property::SendAt, comparator.is_ascending) } - other => return Err(MethodError::UnsupportedSort(other.to_string())), + other => return Err(MethodError::UnsupportedSort(other.to_string()).into()), }); } diff --git a/crates/jmap/src/submission/set.rs b/crates/jmap/src/submission/set.rs index 2fc91130..54e64a16 100644 --- a/crates/jmap/src/submission/set.rs +++ b/crates/jmap/src/submission/set.rs @@ -8,10 +8,7 @@ use std::{collections::HashMap, sync::Arc}; use common::listener::{stream::NullIo, ServerInstance}; use jmap_proto::{ - error::{ - method::MethodError, - set::{SetError, SetErrorType}, - }, + error::set::{SetError, SetErrorType}, method::set::{self, SetRequest, SetResponse}, object::{ email_submission::SetArguments, @@ -56,7 +53,7 @@ impl JMAP { mut request: SetRequest, instance: &Arc, next_call: &mut Option>, - ) -> Result { + ) -> trc::Result { let account_id = request.account_id.document_id(); let mut response = SetResponse::from_request(&request, self.core.jmap.set_max_objects)?; let will_destroy = request.unwrap_destroy(); @@ -295,7 +292,7 @@ impl JMAP { response: &SetResponse, instance: &Arc, object: Object, - ) -> Result, SetError>, MethodError> { + ) -> trc::Result, SetError>> { let mut submission = Object::with_capacity(object.properties.len()); let mut email_id = u32::MAX; let mut identity_id = u32::MAX; diff --git a/crates/jmap/src/thread/get.rs b/crates/jmap/src/thread/get.rs index da06d2d9..39403806 100644 --- a/crates/jmap/src/thread/get.rs +++ b/crates/jmap/src/thread/get.rs @@ -5,12 +5,12 @@ */ use jmap_proto::{ - error::method::MethodError, method::get::{GetRequest, GetResponse, RequestArguments}, object::Object, types::{collection::Collection, id::Id, property::Property}, }; use store::query::{sort::Pagination, Comparator, ResultSet}; +use trc::AddContext; use crate::JMAP; @@ -18,7 +18,7 @@ impl JMAP { pub async fn thread_get( &self, mut request: GetRequest, - ) -> Result { + ) -> trc::Result { let account_id = request.account_id.document_id(); let ids = if let Some(ids) = request.unwrap_ids(self.core.jmap.get_max_objects)? { ids @@ -60,15 +60,7 @@ impl JMAP { Pagination::new(document_ids.len() as usize, 0, None, 0), ) .await - .map_err(|err| { - tracing::error!(event = "error", - context = "store", - account_id = account_id, - collection = "email", - error = ?err, - "Thread emailIds sort failed"); - MethodError::ServerPartialFail - })? + .caused_by(trc::location!())? .ids .into_iter() .map(|id| Id::from_parts(thread_id, id as u32)) diff --git a/crates/jmap/src/vacation/get.rs b/crates/jmap/src/vacation/get.rs index 1dd3a6d1..087d5561 100644 --- a/crates/jmap/src/vacation/get.rs +++ b/crates/jmap/src/vacation/get.rs @@ -5,7 +5,6 @@ */ use jmap_proto::{ - error::method::MethodError, method::get::{GetRequest, GetResponse, RequestArguments}, object::Object, request::reference::MaybeReference, @@ -19,7 +18,7 @@ impl JMAP { pub async fn vacation_response_get( &self, mut request: GetRequest, - ) -> Result { + ) -> trc::Result { let account_id = request.account_id.document_id(); let properties = request.unwrap_properties(&[ Property::Id, @@ -101,10 +100,7 @@ impl JMAP { Ok(response) } - pub async fn get_vacation_sieve_script_id( - &self, - account_id: u32, - ) -> Result, MethodError> { + pub async fn get_vacation_sieve_script_id(&self, account_id: u32) -> trc::Result> { self.filter( account_id, Collection::SieveScript, diff --git a/crates/jmap/src/vacation/set.rs b/crates/jmap/src/vacation/set.rs index bc825813..749e5ac5 100644 --- a/crates/jmap/src/vacation/set.rs +++ b/crates/jmap/src/vacation/set.rs @@ -39,7 +39,7 @@ impl JMAP { pub async fn vacation_response_set( &self, mut request: SetRequest, - ) -> Result { + ) -> trc::Result { let account_id = request.account_id.document_id(); let mut response = self .prepare_set_response(&request, Collection::SieveScript) @@ -53,7 +53,8 @@ impl JMAP { (Some(create), Some(update)) if !create.is_empty() && !update.is_empty() => { return Err(MethodError::InvalidArguments( "Creating and updating on the same request is not allowed.".into(), - )); + ) + .into()); } (Some(create), _) if !create.is_empty() => { for (id, obj) in create { @@ -205,7 +206,7 @@ impl JMAP { == Some(&Value::Bool(true)); value }) - .ok_or(MethodError::ServerPartialFail)? + .ok_or_else(|| trc::Cause::NotFound.caused_by(trc::location!()))? .into() } else { None @@ -249,14 +250,9 @@ impl JMAP { if let Some(current) = obj.current() { let current_blob_id = current.inner.blob_id().ok_or_else(|| { - tracing::warn!( - event = "error", - context = "vacation_response_set", - account_id = account_id, - document_id = document_id, - "Sieve object does not contain a blobId." - ); - MethodError::ServerPartialFail + trc::Cause::NotFound + .caused_by(trc::location!()) + .document_id(document_id.unwrap_or(u32::MAX)) })?; // Unlink previous blob @@ -286,9 +282,7 @@ impl JMAP { response.new_state = Some(change_id.into()); match document_id { Some(document_id) => document_id, - None => ids - .last_document_id() - .map_err(|_| MethodError::ServerPartialFail)?, + None => ids.last_document_id()?, } } else { document_id.unwrap_or(u32::MAX) @@ -335,7 +329,7 @@ impl JMAP { Ok(response) } - fn build_script(&self, obj: &mut ObjectIndexBuilder) -> Result, MethodError> { + fn build_script(&self, obj: &mut ObjectIndexBuilder) -> trc::Result> { // Build Sieve script let mut script = Vec::with_capacity(1024); script.extend_from_slice(b"require [\"vacation\", \"relational\", \"date\"];\r\n\r\n"); @@ -435,10 +429,10 @@ impl JMAP { Ok(script) } - Err(err) => { - tracing::error!("Vacation Sieve Script failed to compile: {}", err); - Err(MethodError::ServerPartialFail) - } + Err(err) => Err(trc::Cause::Unexpected + .caused_by(trc::location!()) + .reason(err) + .details("Vacation Sieve Script failed to compile.")), } } } diff --git a/crates/main/Cargo.toml b/crates/main/Cargo.toml index ba4f83ba..965e4153 100644 --- a/crates/main/Cargo.toml +++ b/crates/main/Cargo.toml @@ -25,6 +25,7 @@ pop3 = { path = "../pop3" } managesieve = { path = "../managesieve" } common = { path = "../common" } directory = { path = "../directory" } +trc = { path = "../trc" } utils = { path = "../utils" } tokio = { version = "1.23", features = ["full"] } tracing = "0.1" diff --git a/crates/pop3/Cargo.toml b/crates/pop3/Cargo.toml index 54b4aafb..6a573d0e 100644 --- a/crates/pop3/Cargo.toml +++ b/crates/pop3/Cargo.toml @@ -10,6 +10,7 @@ common = { path = "../common" } jmap = { path = "../jmap" } imap = { path = "../imap" } utils = { path = "../utils" } +trc = { path = "../trc" } jmap_proto = { path = "../jmap-proto" } mail-parser = { version = "0.9", features = ["full_encoding", "ludicrous_mode"] } mail-send = { version = "0.4", default-features = false, features = ["cram-md5", "ring", "tls12"] } diff --git a/crates/pop3/src/mailbox.rs b/crates/pop3/src/mailbox.rs index 90b08103..157164f3 100644 --- a/crates/pop3/src/mailbox.rs +++ b/crates/pop3/src/mailbox.rs @@ -36,7 +36,7 @@ pub struct Message { } impl Session { - pub async fn fetch_mailbox(&self, account_id: u32) -> Result { + pub async fn fetch_mailbox(&self, account_id: u32) -> trc::Result { // Obtain message ids let message_ids = self .jmap diff --git a/crates/smtp/Cargo.toml b/crates/smtp/Cargo.toml index d4627a8f..077fffeb 100644 --- a/crates/smtp/Cargo.toml +++ b/crates/smtp/Cargo.toml @@ -17,6 +17,7 @@ utils = { path = "../utils" } nlp = { path = "../nlp" } directory = { path = "../directory" } common = { path = "../common" } +trc = { path = "../trc" } mail-auth = { version = "0.4" } mail-send = { version = "0.4", default-features = false, features = ["cram-md5", "ring", "tls12"] } mail-parser = { version = "0.9", features = ["full_encoding", "ludicrous_mode"] } diff --git a/crates/smtp/src/inbound/vrfy.rs b/crates/smtp/src/inbound/vrfy.rs index 2b383313..dfc722b7 100644 --- a/crates/smtp/src/inbound/vrfy.rs +++ b/crates/smtp/src/inbound/vrfy.rs @@ -5,7 +5,6 @@ */ use common::listener::SessionStream; -use directory::DirectoryError; use crate::core::Session; use std::fmt::Write; @@ -44,7 +43,7 @@ impl Session { self.write(result.as_bytes()).await } - Ok(_) | Err(DirectoryError::Unsupported) => { + Ok(_) => { tracing::debug!(parent: &self.span, context = "vrfy", event = "not-found", @@ -52,14 +51,18 @@ impl Session { self.write(b"550 5.1.2 Address not found.\r\n").await } - Err(_) => { + Err(err) => { tracing::debug!(parent: &self.span, context = "vrfy", event = "temp-fail", address = &address); - self.write(b"252 2.4.3 Unable to verify address at this time.\r\n") - .await + if !err.matches(trc::Cause::Unsupported) { + self.write(b"252 2.4.3 Unable to verify address at this time.\r\n") + .await + } else { + self.write(b"550 5.1.2 Address not found.\r\n").await + } } } } @@ -105,7 +108,7 @@ impl Session { address = &address); self.write(result.as_bytes()).await } - Ok(_) | Err(DirectoryError::Unsupported) => { + Ok(_) => { tracing::debug!(parent: &self.span, context = "expn", event = "not-found", @@ -113,14 +116,18 @@ impl Session { self.write(b"550 5.1.2 Mailing list not found.\r\n").await } - Err(_) => { + Err(err) => { tracing::debug!(parent: &self.span, context = "expn", event = "temp-fail", address = &address); - self.write(b"252 2.4.3 Unable to expand mailing list at this time.\r\n") - .await + if !err.matches(trc::Cause::Unsupported) { + self.write(b"252 2.4.3 Unable to expand mailing list at this time.\r\n") + .await + } else { + self.write(b"550 5.1.2 Mailing list not found.\r\n").await + } } } } diff --git a/crates/smtp/src/queue/spool.rs b/crates/smtp/src/queue/spool.rs index c09747e2..a6758ff1 100644 --- a/crates/smtp/src/queue/spool.rs +++ b/crates/smtp/src/queue/spool.rs @@ -127,7 +127,7 @@ impl SMTP { ); match self.core.storage.data.write(batch.build()).await { Ok(_) => Some(event), - Err(store::Error::AssertValueFailed) => { + Err(err) if err.matches(trc::Cause::AssertValue) => { tracing::debug!( context = "queue", event = "locked", diff --git a/crates/smtp/src/reporting/dmarc.rs b/crates/smtp/src/reporting/dmarc.rs index d947d53e..09cd1e9d 100644 --- a/crates/smtp/src/reporting/dmarc.rs +++ b/crates/smtp/src/reporting/dmarc.rs @@ -415,7 +415,7 @@ impl SMTP { event: &ReportEvent, rua: &mut Vec, mut serialized_size: Option<&mut serde_json::Serializer>, - ) -> store::Result> { + ) -> trc::Result> { // Deserialize report let dmarc = match self .core diff --git a/crates/smtp/src/reporting/scheduler.rs b/crates/smtp/src/reporting/scheduler.rs index 61235e8f..3b198928 100644 --- a/crates/smtp/src/reporting/scheduler.rs +++ b/crates/smtp/src/reporting/scheduler.rs @@ -173,7 +173,7 @@ impl SMTP { ); match self.core.storage.data.write(batch.build()).await { Ok(_) => true, - Err(store::Error::AssertValueFailed) => { + Err(err) if err.matches(trc::Cause::AssertValue) => { tracing::debug!( context = "queue", event = "locked", diff --git a/crates/smtp/src/reporting/tls.rs b/crates/smtp/src/reporting/tls.rs index ea5723ac..248373d4 100644 --- a/crates/smtp/src/reporting/tls.rs +++ b/crates/smtp/src/reporting/tls.rs @@ -238,7 +238,7 @@ impl SMTP { events: &[ReportEvent], rua: &mut Vec, mut serialized_size: Option<&mut serde_json::Serializer>, - ) -> store::Result> { + ) -> trc::Result> { let (domain_name, event_from, event_to, policy) = events .first() .map(|e| (e.domain.as_str(), e.seq_id, e.due, e.policy_hash)) diff --git a/crates/store/Cargo.toml b/crates/store/Cargo.toml index aca78644..4032ef3b 100644 --- a/crates/store/Cargo.toml +++ b/crates/store/Cargo.toml @@ -7,6 +7,7 @@ resolver = "2" [dependencies] utils = { path = "../utils" } nlp = { path = "../nlp" } +trc = { path = "../trc" } rocksdb = { version = "0.22", optional = true, features = ["multi-threaded-cf"] } foundationdb = { version = "0.9.0", features = ["embedded-fdb-include", "fdb-7_1"], optional = true } rusqlite = { version = "0.31.0", features = ["bundled"], optional = true } @@ -26,7 +27,6 @@ parking_lot = "0.12.1" lru-cache = { version = "0.1.2", optional = true } num_cpus = { version = "1.15.0", optional = true } blake3 = "1.3.3" -tracing = "0.1" lz4_flex = { version = "0.11", default-features = false } deadpool-postgres = { version = "0.14", optional = true } tokio-postgres = { version = "0.7.10", optional = true } diff --git a/crates/store/src/backend/elastic/index.rs b/crates/store/src/backend/elastic/index.rs index e5fb2afb..538af1fb 100644 --- a/crates/store/src/backend/elastic/index.rs +++ b/crates/store/src/backend/elastic/index.rs @@ -16,7 +16,7 @@ use crate::{ fts::{index::FtsDocument, Field}, }; -use super::ElasticSearchStore; +use super::{assert_success, ElasticSearchStore}; #[derive(Serialize, Deserialize, Default)] struct Document<'x> { @@ -38,23 +38,16 @@ impl ElasticSearchStore { pub async fn fts_index + Display + Clone + std::fmt::Debug>( &self, document: FtsDocument<'_, T>, - ) -> crate::Result<()> { - self.index - .index(IndexParts::Index(INDEX_NAMES[document.collection as usize])) - .body(Document::from(document)) - .send() - .await - .map_err(Into::into) - .and_then(|response| { - if response.status_code().is_success() { - Ok(()) - } else { - Err(crate::Error::InternalError(format!( - "Failed to index document: {:?}", - response - ))) - } - }) + ) -> trc::Result<()> { + assert_success( + self.index + .index(IndexParts::Index(INDEX_NAMES[document.collection as usize])) + .body(Document::from(document)) + .send() + .await, + ) + .await + .map(|_| ()) } pub async fn fts_remove( @@ -62,63 +55,49 @@ impl ElasticSearchStore { account_id: u32, collection: u8, document_ids: &impl DocumentSet, - ) -> crate::Result<()> { + ) -> trc::Result<()> { let document_ids = document_ids.iterate().collect::>(); - self.index - .delete_by_query(DeleteByQueryParts::Index(&[ - INDEX_NAMES[collection as usize] - ])) - .body(json!({ - "query": { - "bool": { - "must": [ - { "match": { "account_id": account_id } }, - { "terms": { "document_id": document_ids } } - ] + assert_success( + self.index + .delete_by_query(DeleteByQueryParts::Index(&[ + INDEX_NAMES[collection as usize] + ])) + .body(json!({ + "query": { + "bool": { + "must": [ + { "match": { "account_id": account_id } }, + { "terms": { "document_id": document_ids } } + ] + } } - } - })) - .send() - .await - .map_err(Into::into) - .and_then(|response| { - if response.status_code().is_success() { - Ok(()) - } else { - Err(crate::Error::InternalError(format!( - "Failed to remove document: {:?}", - response - ))) - } - }) + })) + .send() + .await, + ) + .await + .map(|_| ()) } - pub async fn fts_remove_all(&self, account_id: u32) -> crate::Result<()> { - self.index - .delete_by_query(DeleteByQueryParts::Index(INDEX_NAMES)) - .body(json!({ - "query": { - "bool": { - "must": [ - { "match": { "account_id": account_id } }, - ] + pub async fn fts_remove_all(&self, account_id: u32) -> trc::Result<()> { + assert_success( + self.index + .delete_by_query(DeleteByQueryParts::Index(INDEX_NAMES)) + .body(json!({ + "query": { + "bool": { + "must": [ + { "match": { "account_id": account_id } }, + ] + } } - } - })) - .send() - .await - .map_err(Into::into) - .and_then(|response| { - if response.status_code().is_success() { - Ok(()) - } else { - Err(crate::Error::InternalError(format!( - "Failed to remove document: {:?}", - response - ))) - } - }) + })) + .send() + .await, + ) + .await + .map(|_| ()) } } diff --git a/crates/store/src/backend/elastic/mod.rs b/crates/store/src/backend/elastic/mod.rs index fb00057c..99cb998d 100644 --- a/crates/store/src/backend/elastic/mod.rs +++ b/crates/store/src/backend/elastic/mod.rs @@ -8,7 +8,8 @@ use elasticsearch::{ auth::Credentials, cert::CertificateValidation, http::{ - transport::{BuildError, SingleNodeConnectionPool, Transport, TransportBuilder}, + response::Response, + transport::{SingleNodeConnectionPool, Transport, TransportBuilder}, StatusCode, Url, }, indices::{IndicesCreateParts, IndicesExistsParts}, @@ -103,13 +104,14 @@ impl ElasticSearchStore { Some(es) } - async fn create_index(&self, shards: usize, replicas: usize) -> crate::Result<()> { + async fn create_index(&self, shards: usize, replicas: usize) -> trc::Result<()> { let exists = self .index .indices() .exists(IndicesExistsParts::Index(&[INDEX_NAMES[0]])) .send() - .await?; + .await + .map_err(|err| trc::Cause::ElasticSearch.reason(err))?; if exists.status_code() == StatusCode::NOT_FOUND { let response = self @@ -165,28 +167,27 @@ impl ElasticSearchStore { } })) .send() - .await?; + .await; - if !response.status_code().is_success() { - return Err(crate::Error::InternalError(format!( - "Error while creating ElasticSearch index: {:?}", - response - ))); - } + assert_success(response).await?; } Ok(()) } } -impl From for crate::Error { - fn from(value: Error) -> Self { - crate::Error::InternalError(format!("ElasticSearch error: {}", value)) - } -} - -impl From for crate::Error { - fn from(value: BuildError) -> Self { - crate::Error::InternalError(format!("ElasticSearch build error: {}", value)) +pub(crate) async fn assert_success(response: Result) -> trc::Result { + match response { + Ok(response) => { + let status = response.status_code(); + if status.is_success() { + Ok(response) + } else { + Err(trc::Cause::ElasticSearch + .reason(response.text().await.unwrap_or_default()) + .ctx(trc::Key::Code, status.as_u16())) + } + } + Err(err) => Err(trc::Cause::ElasticSearch.reason(err)), } } diff --git a/crates/store/src/backend/elastic/query.rs b/crates/store/src/backend/elastic/query.rs index 39814c4a..b2448f7d 100644 --- a/crates/store/src/backend/elastic/query.rs +++ b/crates/store/src/backend/elastic/query.rs @@ -12,7 +12,7 @@ use serde_json::{json, Value}; use crate::fts::{Field, FtsFilter}; -use super::{ElasticSearchStore, INDEX_NAMES}; +use super::{assert_success, ElasticSearchStore, INDEX_NAMES}; impl ElasticSearchStore { pub async fn fts_query + Display + Clone + std::fmt::Debug>( @@ -20,7 +20,7 @@ impl ElasticSearchStore { account_id: u32, collection: impl Into, filters: Vec>, - ) -> crate::Result { + ) -> trc::Result { let mut stack: Vec<(FtsFilter, Vec)> = vec![]; let mut conditions = vec![json!({ "match": { "account_id": account_id } })]; let mut logical_op = FtsFilter::And; @@ -85,32 +85,36 @@ impl ElasticSearchStore { } // TODO implement pagination - let response = self - .index - .search(SearchParts::Index(&[ - INDEX_NAMES[collection.into() as usize] - ])) - .body(json!({ - "query": { - "bool": { - "must": conditions, - } - }, - "size": 10000, - "_source": ["document_id"] - })) - .send() - .await? - .error_for_status_code()?; + let response = assert_success( + self.index + .search(SearchParts::Index(&[ + INDEX_NAMES[collection.into() as usize] + ])) + .body(json!({ + "query": { + "bool": { + "must": conditions, + } + }, + "size": 10000, + "_source": ["document_id"] + })) + .send() + .await, + ) + .await?; - let json: Value = response.json().await?; + let json: Value = response + .json() + .await + .map_err(|err| trc::Cause::ElasticSearch.reason(err))?; let mut results = RoaringBitmap::new(); for hit in json["hits"]["hits"].as_array().ok_or_else(|| { - crate::Error::InternalError("Invalid response from ElasticSearch".to_string()) + trc::Cause::ElasticSearch.reason("Invalid response from ElasticSearch") })? { results.insert(hit["_source"]["document_id"].as_u64().ok_or_else(|| { - crate::Error::InternalError("Invalid response from ElasticSearch".to_string()) + trc::Cause::ElasticSearch.reason("Invalid response from ElasticSearch") })? as u32); } diff --git a/crates/store/src/backend/foundationdb/blob.rs b/crates/store/src/backend/foundationdb/blob.rs index 79542f0d..ab4659f0 100644 --- a/crates/store/src/backend/foundationdb/blob.rs +++ b/crates/store/src/backend/foundationdb/blob.rs @@ -10,7 +10,7 @@ use foundationdb::{options::StreamingMode, KeySelector, RangeOption}; use futures::TryStreamExt; use utils::BLOB_HASH_LEN; -use crate::{write::key::KeySerializer, SUBSPACE_BLOBS}; +use crate::{backend::foundationdb::into_error, write::key::KeySerializer, SUBSPACE_BLOBS}; use super::{FdbStore, MAX_VALUE_SIZE}; @@ -19,7 +19,7 @@ impl FdbStore { &self, key: &[u8], range: Range, - ) -> crate::Result>> { + ) -> trc::Result>> { let block_start = range.start / MAX_VALUE_SIZE; let bytes_start = range.start % MAX_VALUE_SIZE; let block_end = (range.end / MAX_VALUE_SIZE) + 1; @@ -49,7 +49,7 @@ impl FdbStore { let mut blob_data: Option> = None; let blob_range = range.end - range.start; - 'outer: while let Some(value) = values.try_next().await? { + 'outer: while let Some(value) = values.try_next().await.map_err(into_error)? { let key = value.key(); if key.len() == key_len { let value = value.value(); @@ -92,7 +92,7 @@ impl FdbStore { Ok(blob_data) } - pub(crate) async fn put_blob(&self, key: &[u8], data: &[u8]) -> crate::Result<()> { + pub(crate) async fn put_blob(&self, key: &[u8], data: &[u8]) -> trc::Result<()> { const N_CHUNKS: usize = (1 << 5) - 1; let last_chunk = std::cmp::max( (data.len() / MAX_VALUE_SIZE) @@ -103,7 +103,7 @@ impl FdbStore { }, 1, ) - 1; - let mut trx = self.db.create_trx()?; + let mut trx = self.db.create_trx().map_err(into_error)?; for (chunk_pos, chunk_bytes) in data.chunks(MAX_VALUE_SIZE).enumerate() { trx.set( @@ -117,7 +117,7 @@ impl FdbStore { if chunk_pos == last_chunk || (chunk_pos > 0 && chunk_pos % N_CHUNKS == 0) { self.commit(trx, false).await?; if chunk_pos < last_chunk { - trx = self.db.create_trx()?; + trx = self.db.create_trx().map_err(into_error)?; } else { break; } @@ -127,12 +127,12 @@ impl FdbStore { Ok(()) } - pub(crate) async fn delete_blob(&self, key: &[u8]) -> crate::Result { + pub(crate) async fn delete_blob(&self, key: &[u8]) -> trc::Result { if key.len() < BLOB_HASH_LEN { return Ok(false); } - let trx = self.db.create_trx()?; + let trx = self.db.create_trx().map_err(into_error)?; trx.clear_range( &KeySerializer::new(key.len() + 3) .write(SUBSPACE_BLOBS) diff --git a/crates/store/src/backend/foundationdb/mod.rs b/crates/store/src/backend/foundationdb/mod.rs index bd5c82f9..3ea02014 100644 --- a/crates/store/src/backend/foundationdb/mod.rs +++ b/crates/store/src/backend/foundationdb/mod.rs @@ -8,8 +8,6 @@ use std::time::{Duration, Instant}; use foundationdb::{api::NetworkAutoStop, Database, FdbError, Transaction}; -use crate::Error; - pub mod blob; pub mod main; pub mod read; @@ -77,8 +75,9 @@ impl TimedTransaction { } } -impl From for Error { - fn from(error: FdbError) -> Self { - Self::InternalError(format!("FoundationDB error: {}", error.message())) - } +#[inline(always)] +fn into_error(error: FdbError) -> trc::Error { + trc::Cause::FoundationDB + .reason(error.message()) + .ctx(trc::Key::Code, error.code()) } diff --git a/crates/store/src/backend/foundationdb/read.rs b/crates/store/src/backend/foundationdb/read.rs index 38f9338c..c4de2520 100644 --- a/crates/store/src/backend/foundationdb/read.rs +++ b/crates/store/src/backend/foundationdb/read.rs @@ -21,7 +21,7 @@ use crate::{ BitmapKey, Deserialize, IterateParams, Key, ValueKey, U32_LEN, WITH_SUBSPACE, }; -use super::{FdbStore, ReadVersion, TimedTransaction, MAX_VALUE_SIZE}; +use super::{into_error, FdbStore, ReadVersion, TimedTransaction, MAX_VALUE_SIZE}; #[allow(dead_code)] pub(crate) enum ChunkedValue { @@ -31,7 +31,7 @@ pub(crate) enum ChunkedValue { } impl FdbStore { - pub(crate) async fn get_value(&self, key: impl Key) -> crate::Result> + pub(crate) async fn get_value(&self, key: impl Key) -> trc::Result> where U: Deserialize, { @@ -48,7 +48,7 @@ impl FdbStore { pub(crate) async fn get_bitmap( &self, mut key: BitmapKey>, - ) -> crate::Result> { + ) -> trc::Result> { let mut bm = RoaringBitmap::new(); let begin = key.serialize(WITH_SUBSPACE); key.document_id = u32::MAX; @@ -66,7 +66,7 @@ impl FdbStore { true, ); - while let Some(value) = values.try_next().await? { + while let Some(value) = values.try_next().await.map_err(into_error)? { let key = value.key(); if key.len() == key_len { bm.insert(key.deserialize_be_u32(key.len() - U32_LEN)?); @@ -79,8 +79,8 @@ impl FdbStore { pub(crate) async fn iterate( &self, params: IterateParams, - mut cb: impl for<'x> FnMut(&'x [u8], &'x [u8]) -> crate::Result + Sync + Send, - ) -> crate::Result<()> { + mut cb: impl for<'x> FnMut(&'x [u8], &'x [u8]) -> trc::Result + Sync + Send, + ) -> trc::Result<()> { let mut begin = params.begin.serialize(WITH_SUBSPACE); let end = params.end.serialize(WITH_SUBSPACE); @@ -103,7 +103,7 @@ impl FdbStore { true, ); - while let Some(values) = values.try_next().await? { + while let Some(values) = values.try_next().await.map_err(into_error)? { let mut last_key = &[] as &[u8]; for value in values.iter() { @@ -140,7 +140,7 @@ impl FdbStore { true, ); - if let Some(value) = values.try_next().await? { + if let Some(value) = values.try_next().await.map_err(into_error)? { cb(value.key().get(1..).unwrap_or_default(), value.value())?; } } @@ -151,24 +151,30 @@ impl FdbStore { pub(crate) async fn get_counter( &self, key: impl Into>> + Sync + Send, - ) -> crate::Result { + ) -> trc::Result { let key = key.into().serialize(WITH_SUBSPACE); - if let Some(bytes) = self.read_trx().await?.get(&key, true).await? { - deserialize_i64_le(&bytes) + if let Some(bytes) = self + .read_trx() + .await? + .get(&key, true) + .await + .map_err(into_error)? + { + deserialize_i64_le(&key, &bytes) } else { Ok(0) } } - pub(crate) async fn read_trx(&self) -> crate::Result { + pub(crate) async fn read_trx(&self) -> trc::Result { let (is_expired, mut read_version) = { let version = self.version.lock(); (version.is_expired(), version.version) }; - let trx = self.db.create_trx()?; + let trx = self.db.create_trx().map_err(into_error)?; if is_expired { - read_version = trx.get_read_version().await?; + read_version = trx.get_read_version().await.map_err(into_error)?; *self.version.lock() = ReadVersion::new(read_version); } else { trx.set_read_version(read_version); @@ -177,10 +183,10 @@ impl FdbStore { Ok(trx) } - pub(crate) async fn timed_read_trx(&self) -> crate::Result { + pub(crate) async fn timed_read_trx(&self) -> trc::Result { self.db .create_trx() - .map_err(Into::into) + .map_err(into_error) .map(TimedTransaction::new) } } @@ -189,8 +195,8 @@ pub(crate) async fn read_chunked_value( key: &[u8], trx: &Transaction, snapshot: bool, -) -> crate::Result { - if let Some(bytes) = trx.get(key, snapshot).await? { +) -> trc::Result { + if let Some(bytes) = trx.get(key, snapshot).await.map_err(into_error)? { if bytes.len() < MAX_VALUE_SIZE { Ok(ChunkedValue::Single(bytes)) } else { @@ -201,7 +207,7 @@ pub(crate) async fn read_chunked_value( .write(0u8) .finalize(); - while let Some(bytes) = trx.get(&key, snapshot).await? { + while let Some(bytes) = trx.get(&key, snapshot).await.map_err(into_error)? { value.extend_from_slice(&bytes); *key.last_mut().unwrap() += 1; } diff --git a/crates/store/src/backend/foundationdb/write.rs b/crates/store/src/backend/foundationdb/write.rs index 35118e55..4a498995 100644 --- a/crates/store/src/backend/foundationdb/write.rs +++ b/crates/store/src/backend/foundationdb/write.rs @@ -28,12 +28,13 @@ use crate::{ }; use super::{ + into_error, read::{read_chunked_value, ChunkedValue}, FdbStore, ReadVersion, MAX_VALUE_SIZE, }; impl FdbStore { - pub(crate) async fn write(&self, batch: Batch) -> crate::Result { + pub(crate) async fn write(&self, batch: Batch) -> trc::Result { let start = Instant::now(); let mut retry_count = 0; @@ -44,7 +45,7 @@ impl FdbStore { let mut change_id = u64::MAX; let mut result = AssignedIds::default(); - let trx = self.db.create_trx()?; + let trx = self.db.create_trx().map_err(into_error)?; for op in &batch.ops { match op { @@ -93,8 +94,9 @@ impl FdbStore { *key.last_mut().unwrap() += 1; } else { trx.cancel(); - return Err(crate::Error::InternalError( - "Value too large".into(), + return Err(trc::Cause::FoundationDB.ctx( + trc::Key::Reason, + "Value is too large", )); } } @@ -109,8 +111,10 @@ impl FdbStore { trx.atomic_op(&key, &by.to_le_bytes()[..], MutationType::Add); } ValueOp::AddAndGet(by) => { - let num = if let Some(bytes) = trx.get(&key, false).await? { - deserialize_i64_le(&bytes)? + *by + let num = if let Some(bytes) = + trx.get(&key, false).await.map_err(into_error)? + { + deserialize_i64_le(&key, &bytes)? + *by } else { *by }; @@ -180,7 +184,7 @@ impl FdbStore { true, ); let mut found_ids = RoaringBitmap::new(); - while let Some(value) = values.try_next().await? { + while let Some(value) = values.try_next().await.map_err(into_error)? { let key = value.key(); if key.len() == key_len { found_ids.insert(key.deserialize_be_u32(key_len - U32_LEN)?); @@ -212,7 +216,8 @@ impl FdbStore { (&result).into(), ), options::ConflictRangeType::Read, - )?; + ) + .map_err(into_error)?; } trx.set(&key, &[]); @@ -252,7 +257,7 @@ impl FdbStore { if !matches { trx.cancel(); - return Err(crate::Error::AssertValueFailed); + return Err(trc::Cause::AssertValue.into()); } } } @@ -274,10 +279,10 @@ impl FdbStore { } } - pub(crate) async fn commit(&self, trx: Transaction, will_retry: bool) -> crate::Result { + pub(crate) async fn commit(&self, trx: Transaction, will_retry: bool) -> trc::Result { match trx.commit().await { Ok(result) => { - let commit_version = result.committed_version()?; + let commit_version = result.committed_version().map_err(into_error)?; let mut version = self.version.lock(); if commit_version > version.version { *version = ReadVersion::new(commit_version); @@ -286,20 +291,20 @@ impl FdbStore { } Err(err) => { if will_retry { - err.on_error().await?; + err.on_error().await.map_err(into_error)?; Ok(false) } else { - Err(FdbError::from(err).into()) + Err(into_error(FdbError::from(err))) } } } } - pub(crate) async fn purge_store(&self) -> crate::Result<()> { + pub(crate) async fn purge_store(&self) -> trc::Result<()> { // Obtain all zero counters let mut delete_keys = Vec::new(); for subspace in [SUBSPACE_COUNTER, SUBSPACE_QUOTA] { - let trx = self.db.create_trx()?; + let trx = self.db.create_trx().map_err(into_error)?; let from_key = [subspace, 0u8]; let to_key = [subspace, u8::MAX, u8::MAX, u8::MAX, u8::MAX, u8::MAX]; @@ -314,7 +319,7 @@ impl FdbStore { true, ); - while let Some(value) = values.try_next().await? { + while let Some(value) = values.try_next().await.map_err(into_error)? { if value.value().iter().all(|byte| *byte == 0) { delete_keys.push(value.key().to_vec()); } @@ -330,7 +335,7 @@ impl FdbStore { for chunk in delete_keys.chunks(1024) { let mut retry_count = 0; loop { - let trx = self.db.create_trx()?; + let trx = self.db.create_trx().map_err(into_error)?; for key in chunk { trx.atomic_op(key, &integer, MutationType::CompareAndClear); } @@ -346,11 +351,11 @@ impl FdbStore { Ok(()) } - pub(crate) async fn delete_range(&self, from: impl Key, to: impl Key) -> crate::Result<()> { + pub(crate) async fn delete_range(&self, from: impl Key, to: impl Key) -> trc::Result<()> { let from = from.serialize(WITH_SUBSPACE); let to = to.serialize(WITH_SUBSPACE); - let trx = self.db.create_trx()?; + let trx = self.db.create_trx().map_err(into_error)?; trx.clear_range(&from, &to); self.commit(trx, false).await.map(|_| ()) } diff --git a/crates/store/src/backend/fs/mod.rs b/crates/store/src/backend/fs/mod.rs index 8f069e29..f2695f2e 100644 --- a/crates/store/src/backend/fs/mod.rs +++ b/crates/store/src/backend/fs/mod.rs @@ -51,7 +51,7 @@ impl FsStore { &self, key: &[u8], range: Range, - ) -> crate::Result>> { + ) -> trc::Result>> { let blob_path = self.build_path(key); let blob_size = match fs::metadata(&blob_path).await { Ok(m) => m.len() as usize, @@ -79,7 +79,7 @@ impl FsStore { })) } - pub(crate) async fn put_blob(&self, key: &[u8], data: &[u8]) -> crate::Result<()> { + pub(crate) async fn put_blob(&self, key: &[u8], data: &[u8]) -> trc::Result<()> { let blob_path = self.build_path(key); if fs::metadata(&blob_path) @@ -95,7 +95,7 @@ impl FsStore { Ok(()) } - pub(crate) async fn delete_blob(&self, key: &[u8]) -> crate::Result { + pub(crate) async fn delete_blob(&self, key: &[u8]) -> trc::Result { let blob_path = self.build_path(key); if fs::metadata(&blob_path).await.is_ok() { fs::remove_file(&blob_path).await?; diff --git a/crates/store/src/backend/mod.rs b/crates/store/src/backend/mod.rs index 3af7828d..7ba2aaa1 100644 --- a/crates/store/src/backend/mod.rs +++ b/crates/store/src/backend/mod.rs @@ -26,15 +26,9 @@ pub mod sqlite; pub const MAX_TOKEN_LENGTH: usize = (u8::MAX >> 1) as usize; pub const MAX_TOKEN_MASK: usize = MAX_TOKEN_LENGTH - 1; -impl From for crate::Error { - fn from(err: std::io::Error) -> Self { - Self::InternalError(format!("IO error: {}", err)) - } -} - #[allow(dead_code)] -fn deserialize_i64_le(bytes: &[u8]) -> crate::Result { +fn deserialize_i64_le(key: &[u8], bytes: &[u8]) -> trc::Result { Ok(i64::from_le_bytes(bytes[..].try_into().map_err(|_| { - crate::Error::InternalError("Failed to deserialize i64 value.".to_string()) + trc::Error::corrupted_key(key, bytes.into(), trc::location!()) })?)) } diff --git a/crates/store/src/backend/mysql/blob.rs b/crates/store/src/backend/mysql/blob.rs index ac725f0a..39bb8bb1 100644 --- a/crates/store/src/backend/mysql/blob.rs +++ b/crates/store/src/backend/mysql/blob.rs @@ -8,16 +8,19 @@ use std::ops::Range; use mysql_async::prelude::Queryable; -use super::MysqlStore; +use super::{into_error, MysqlStore}; impl MysqlStore { pub(crate) async fn get_blob( &self, key: &[u8], range: Range, - ) -> crate::Result>> { - let mut conn = self.conn_pool.get_conn().await?; - let s = conn.prep("SELECT v FROM t WHERE k = ?").await?; + ) -> trc::Result>> { + let mut conn = self.conn_pool.get_conn().await.map_err(into_error)?; + let s = conn + .prep("SELECT v FROM t WHERE k = ?") + .await + .map_err(into_error)?; conn.exec_first::, _, _>(&s, (key,)) .await .map(|bytes| { @@ -32,26 +35,30 @@ impl MysqlStore { }) } }) - .map_err(Into::into) + .map_err(into_error) } - pub(crate) async fn put_blob(&self, key: &[u8], data: &[u8]) -> crate::Result<()> { - let mut conn = self.conn_pool.get_conn().await?; + pub(crate) async fn put_blob(&self, key: &[u8], data: &[u8]) -> trc::Result<()> { + let mut conn = self.conn_pool.get_conn().await.map_err(into_error)?; let s = conn .prep("INSERT INTO t (k, v) VALUES (?, ?) ON DUPLICATE KEY UPDATE v = VALUES(v)") - .await?; + .await + .map_err(into_error)?; conn.exec_drop(&s, (key, data)) .await - .map_err(|e| crate::Error::InternalError(format!("Failed to insert blob: {}", e))) + .map_err(into_error) .map(|_| ()) } - pub(crate) async fn delete_blob(&self, key: &[u8]) -> crate::Result { - let mut conn = self.conn_pool.get_conn().await?; - let s = conn.prep("DELETE FROM t WHERE k = ?").await?; + pub(crate) async fn delete_blob(&self, key: &[u8]) -> trc::Result { + let mut conn = self.conn_pool.get_conn().await.map_err(into_error)?; + let s = conn + .prep("DELETE FROM t WHERE k = ?") + .await + .map_err(into_error)?; conn.exec_iter(&s, (key,)) .await - .map_err(|e| crate::Error::InternalError(format!("Failed to delete blob: {}", e))) + .map_err(into_error) .map(|hits| hits.affected_rows() > 0) } } diff --git a/crates/store/src/backend/mysql/lookup.rs b/crates/store/src/backend/mysql/lookup.rs index af080901..8dd8d3dd 100644 --- a/crates/store/src/backend/mysql/lookup.rs +++ b/crates/store/src/backend/mysql/lookup.rs @@ -8,35 +8,35 @@ use mysql_async::{prelude::Queryable, Params, Row}; use crate::{IntoRows, QueryResult, QueryType, Value}; -use super::MysqlStore; +use super::{into_error, MysqlStore}; impl MysqlStore { pub(crate) async fn query( &self, query: &str, - params: Vec>, - ) -> crate::Result { - let mut conn = self.conn_pool.get_conn().await?; - let s = conn.prep(query).await?; - let params = Params::Positional(params.into_iter().map(Into::into).collect()); + params: &[Value<'_>], + ) -> trc::Result { + let mut conn = self.conn_pool.get_conn().await.map_err(into_error)?; + let s = conn.prep(query).await.map_err(into_error)?; + let params = Params::Positional(params.iter().map(Into::into).collect()); match T::query_type() { QueryType::Execute => conn.exec_drop(s, params).await.map_or_else( - |e| Err(e.into()), + |e| Err(into_error(e)), |_| Ok(T::from_exec(conn.affected_rows() as usize)), ), QueryType::Exists => conn .exec_first::(s, params) .await - .map_or_else(|e| Err(e.into()), |r| Ok(T::from_exists(r.is_some()))), + .map_or_else(|e| Err(into_error(e)), |r| Ok(T::from_exists(r.is_some()))), QueryType::QueryOne => conn .exec_first::(s, params) .await - .map_or_else(|e| Err(e.into()), |r| Ok(T::from_query_one(r))), + .map_or_else(|e| Err(into_error(e)), |r| Ok(T::from_query_one(r))), QueryType::QueryAll => conn .exec::(s, params) .await - .map_or_else(|e| Err(e.into()), |r| Ok(T::from_query_all(r))), + .map_or_else(|e| Err(into_error(e)), |r| Ok(T::from_query_all(r))), } } } diff --git a/crates/store/src/backend/mysql/main.rs b/crates/store/src/backend/mysql/main.rs index 078e9feb..e673280b 100644 --- a/crates/store/src/backend/mysql/main.rs +++ b/crates/store/src/backend/mysql/main.rs @@ -11,7 +11,7 @@ use utils::config::{utils::AsKey, Config}; use crate::*; -use super::MysqlStore; +use super::{into_error, MysqlStore}; impl MysqlStore { pub async fn open(config: &mut Config, prefix: impl AsKey) -> Option { @@ -81,8 +81,8 @@ impl MysqlStore { Some(db) } - pub(super) async fn create_tables(&self) -> crate::Result<()> { - let mut conn = self.conn_pool.get_conn().await?; + pub(super) async fn create_tables(&self) -> trc::Result<()> { + let mut conn = self.conn_pool.get_conn().await.map_err(into_error)?; for table in [ SUBSPACE_ACL, @@ -108,7 +108,8 @@ impl MysqlStore { PRIMARY KEY (k(255)) ) ENGINE=InnoDB" )) - .await?; + .await + .map_err(into_error)?; } conn.query_drop(&format!( @@ -119,7 +120,8 @@ impl MysqlStore { ) ENGINE=InnoDB", char::from(SUBSPACE_BLOBS), )) - .await?; + .await + .map_err(into_error)?; for table in [ SUBSPACE_INDEXES, @@ -134,7 +136,8 @@ impl MysqlStore { PRIMARY KEY (k(400)) ) ENGINE=InnoDB" )) - .await?; + .await + .map_err(into_error)?; } for table in [SUBSPACE_COUNTER, SUBSPACE_QUOTA] { @@ -146,7 +149,8 @@ impl MysqlStore { ) ENGINE=InnoDB", char::from(table) )) - .await?; + .await + .map_err(into_error)?; } Ok(()) diff --git a/crates/store/src/backend/mysql/mod.rs b/crates/store/src/backend/mysql/mod.rs index 408a3c30..2a3d2d90 100644 --- a/crates/store/src/backend/mysql/mod.rs +++ b/crates/store/src/backend/mysql/mod.rs @@ -4,6 +4,8 @@ * SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-SEL */ +use std::fmt::Display; + use mysql_async::Pool; pub mod blob; @@ -16,14 +18,7 @@ pub struct MysqlStore { pub(crate) conn_pool: Pool, } -impl From for crate::Error { - fn from(err: mysql_async::Error) -> Self { - Self::InternalError(format!("mySQL error: {}", err)) - } -} - -impl From for crate::Error { - fn from(err: mysql_async::FromValueError) -> Self { - Self::InternalError(format!("mySQL value conversion error: {}", err)) - } +#[inline(always)] +fn into_error(err: impl Display) -> trc::Error { + trc::Cause::MySQL.reason(err) } diff --git a/crates/store/src/backend/mysql/read.rs b/crates/store/src/backend/mysql/read.rs index d5f3b647..24a929f8 100644 --- a/crates/store/src/backend/mysql/read.rs +++ b/crates/store/src/backend/mysql/read.rs @@ -13,24 +13,25 @@ use crate::{ BitmapKey, Deserialize, IterateParams, Key, ValueKey, U32_LEN, }; -use super::MysqlStore; +use super::{into_error, MysqlStore}; impl MysqlStore { - pub(crate) async fn get_value(&self, key: impl Key) -> crate::Result> + pub(crate) async fn get_value(&self, key: impl Key) -> trc::Result> where U: Deserialize + 'static, { - let mut conn = self.conn_pool.get_conn().await?; + let mut conn = self.conn_pool.get_conn().await.map_err(into_error)?; let s = conn .prep(&format!( "SELECT v FROM {} WHERE k = ?", char::from(key.subspace()) )) - .await?; + .await + .map_err(into_error)?; let key = key.serialize(0); conn.exec_first::, _, _>(&s, (key,)) .await - .map_err(Into::into) + .map_err(into_error) .and_then(|r| { if let Some(r) = r { Ok(Some(U::deserialize(&r)?)) @@ -43,21 +44,25 @@ impl MysqlStore { pub(crate) async fn get_bitmap( &self, mut key: BitmapKey>, - ) -> crate::Result> { + ) -> trc::Result> { let begin = key.serialize(0); key.document_id = u32::MAX; let key_len = begin.len(); let end = key.serialize(0); - let mut conn = self.conn_pool.get_conn().await?; + let mut conn = self.conn_pool.get_conn().await.map_err(into_error)?; let table = char::from(key.subspace()); let mut bm = RoaringBitmap::new(); let s = conn .prep(&format!("SELECT k FROM {table} WHERE k >= ? AND k <= ?")) - .await?; - let mut rows = conn.exec_stream::, _, _>(&s, (begin, end)).await?; + .await + .map_err(into_error)?; + let mut rows = conn + .exec_stream::, _, _>(&s, (begin, end)) + .await + .map_err(into_error)?; - while let Some(key) = rows.try_next().await? { + while let Some(key) = rows.try_next().await.map_err(into_error)? { if key.len() == key_len { bm.insert(key.as_slice().deserialize_be_u32(key.len() - U32_LEN)?); } @@ -68,9 +73,9 @@ impl MysqlStore { pub(crate) async fn iterate( &self, params: IterateParams, - mut cb: impl for<'x> FnMut(&'x [u8], &'x [u8]) -> crate::Result + Sync + Send, - ) -> crate::Result<()> { - let mut conn = self.conn_pool.get_conn().await?; + mut cb: impl for<'x> FnMut(&'x [u8], &'x [u8]) -> trc::Result + Sync + Send, + ) -> trc::Result<()> { + let mut conn = self.conn_pool.get_conn().await.map_err(into_error)?; let table = char::from(params.begin.subspace()); let begin = params.begin.serialize(0); let end = params.end.serialize(0); @@ -95,27 +100,34 @@ impl MysqlStore { format!("SELECT {keys} FROM {table} WHERE k >= ? AND k <= ? ORDER BY k DESC") } }) - .await?; - let mut rows = conn.exec_stream::(&s, (begin, end)).await?; + .await + .map_err(into_error)?; + let mut rows = conn + .exec_stream::(&s, (begin, end)) + .await + .map_err(into_error)?; if params.values { - while let Some(mut row) = rows.try_next().await? { + while let Some(mut row) = rows.try_next().await.map_err(into_error)? { let value = row .take_opt::, _>(1) - .unwrap_or_else(|| Ok(vec![]))?; + .unwrap_or_else(|| Ok(vec![])) + .map_err(into_error)?; let key = row .take_opt::, _>(0) - .unwrap_or_else(|| Ok(vec![]))?; + .unwrap_or_else(|| Ok(vec![])) + .map_err(into_error)?; if !cb(&key, &value)? { break; } } } else { - while let Some(mut row) = rows.try_next().await? { + while let Some(mut row) = rows.try_next().await.map_err(into_error)? { if !cb( &row.take_opt::, _>(0) - .unwrap_or_else(|| Ok(vec![]))?, + .unwrap_or_else(|| Ok(vec![])) + .map_err(into_error)?, b"", )? { break; @@ -129,18 +141,19 @@ impl MysqlStore { pub(crate) async fn get_counter( &self, key: impl Into>> + Sync + Send, - ) -> crate::Result { + ) -> trc::Result { let key = key.into(); let table = char::from(key.subspace()); let key = key.serialize(0); - let mut conn = self.conn_pool.get_conn().await?; + let mut conn = self.conn_pool.get_conn().await.map_err(into_error)?; let s = conn .prep(&format!("SELECT v FROM {table} WHERE k = ?")) - .await?; + .await + .map_err(into_error)?; match conn.exec_first::(&s, (key,)).await { Ok(Some(num)) => Ok(num), Ok(None) => Ok(0), - Err(e) => Err(e.into()), + Err(e) => Err(into_error(e)), } } } diff --git a/crates/store/src/backend/mysql/write.rs b/crates/store/src/backend/mysql/write.rs index 3be983bb..5c7e7c08 100644 --- a/crates/store/src/backend/mysql/write.rs +++ b/crates/store/src/backend/mysql/write.rs @@ -20,20 +20,20 @@ use crate::{ BitmapKey, IndexKey, Key, LogKey, SUBSPACE_COUNTER, SUBSPACE_QUOTA, U32_LEN, }; -use super::MysqlStore; +use super::{into_error, MysqlStore}; #[derive(Debug)] enum CommitError { Mysql(mysql_async::Error), - Internal(crate::Error), + Internal(trc::Error), Retry, } impl MysqlStore { - pub(crate) async fn write(&self, batch: Batch) -> crate::Result { + pub(crate) async fn write(&self, batch: Batch) -> trc::Result { let start = Instant::now(); let mut retry_count = 0; - let mut conn = self.conn_pool.get_conn().await?; + let mut conn = self.conn_pool.get_conn().await.map_err(into_error)?; loop { match self.write_trx(&mut conn, &batch).await { @@ -46,11 +46,11 @@ impl MysqlStore { && start.elapsed() < MAX_COMMIT_TIME => {} Err(CommitError::Retry) => { if retry_count > MAX_COMMIT_ATTEMPTS || start.elapsed() > MAX_COMMIT_TIME { - return Err(crate::Error::AssertValueFailed); + return Err(trc::Cause::AssertValue.into()); } } Err(CommitError::Mysql(err)) => { - return Err(err.into()); + return Err(into_error(err)); } Err(CommitError::Internal(err)) => { return Err(err); @@ -135,7 +135,7 @@ impl MysqlStore { Ok(_) => { if exists.is_some() && trx.affected_rows() == 0 { trx.rollback().await?; - return Err(crate::Error::AssertValueFailed.into()); + return Err(trc::Cause::AssertValue.into_err().into()); } } Err(err) => { @@ -308,7 +308,7 @@ impl MysqlStore { .unwrap_or_else(|| (false, assert_value.is_none())); if !matches { trx.rollback().await?; - return Err(crate::Error::AssertValueFailed.into()); + return Err(trc::Cause::AssertValue.into_err().into()); } asserted_values.insert(key, exists); } @@ -318,35 +318,37 @@ impl MysqlStore { trx.commit().await.map(|_| result).map_err(Into::into) } - pub(crate) async fn purge_store(&self) -> crate::Result<()> { - let mut conn = self.conn_pool.get_conn().await?; + pub(crate) async fn purge_store(&self) -> trc::Result<()> { + let mut conn = self.conn_pool.get_conn().await.map_err(into_error)?; for subspace in [SUBSPACE_QUOTA, SUBSPACE_COUNTER] { let s = conn .prep(&format!("DELETE FROM {} WHERE v = 0", char::from(subspace),)) - .await?; - conn.exec_drop(&s, ()).await?; + .await + .map_err(into_error)?; + conn.exec_drop(&s, ()).await.map_err(into_error)?; } Ok(()) } - pub(crate) async fn delete_range(&self, from: impl Key, to: impl Key) -> crate::Result<()> { - let mut conn = self.conn_pool.get_conn().await?; + pub(crate) async fn delete_range(&self, from: impl Key, to: impl Key) -> trc::Result<()> { + let mut conn = self.conn_pool.get_conn().await.map_err(into_error)?; let s = conn .prep(&format!( "DELETE FROM {} WHERE k >= ? AND k < ?", char::from(from.subspace()), )) - .await?; + .await + .map_err(into_error)?; conn.exec_drop(&s, (&from.serialize(0), &to.serialize(0))) .await - .map_err(Into::into) + .map_err(into_error) } } -impl From for CommitError { - fn from(err: crate::Error) -> Self { +impl From for CommitError { + fn from(err: trc::Error) -> Self { CommitError::Internal(err) } } diff --git a/crates/store/src/backend/postgres/blob.rs b/crates/store/src/backend/postgres/blob.rs index 474d6ddc..4a91e383 100644 --- a/crates/store/src/backend/postgres/blob.rs +++ b/crates/store/src/backend/postgres/blob.rs @@ -6,16 +6,19 @@ use std::ops::Range; -use super::PostgresStore; +use super::{into_error, PostgresStore}; impl PostgresStore { pub(crate) async fn get_blob( &self, key: &[u8], range: Range, - ) -> crate::Result>> { - let conn = self.conn_pool.get().await?; - let s = conn.prepare_cached("SELECT v FROM t WHERE k = $1").await?; + ) -> trc::Result>> { + let conn = self.conn_pool.get().await.map_err(into_error)?; + let s = conn + .prepare_cached("SELECT v FROM t WHERE k = $1") + .await + .map_err(into_error)?; conn.query_opt(&s, &[&key]) .await .and_then(|row| { @@ -33,28 +36,32 @@ impl PostgresStore { Ok(None) } }) - .map_err(Into::into) + .map_err(into_error) } - pub(crate) async fn put_blob(&self, key: &[u8], data: &[u8]) -> crate::Result<()> { - let conn = self.conn_pool.get().await?; + pub(crate) async fn put_blob(&self, key: &[u8], data: &[u8]) -> trc::Result<()> { + let conn = self.conn_pool.get().await.map_err(into_error)?; let s = conn .prepare_cached( "INSERT INTO t (k, v) VALUES ($1, $2) ON CONFLICT (k) DO UPDATE SET v = EXCLUDED.v", ) - .await?; + .await + .map_err(into_error)?; conn.execute(&s, &[&key, &data]) .await - .map_err(|e| crate::Error::InternalError(format!("Failed to insert blob: {}", e))) + .map_err(into_error) .map(|_| ()) } - pub(crate) async fn delete_blob(&self, key: &[u8]) -> crate::Result { - let conn = self.conn_pool.get().await?; - let s = conn.prepare_cached("DELETE FROM t WHERE k = $1").await?; + pub(crate) async fn delete_blob(&self, key: &[u8]) -> trc::Result { + let conn = self.conn_pool.get().await.map_err(into_error)?; + let s = conn + .prepare_cached("DELETE FROM t WHERE k = $1") + .await + .map_err(into_error)?; conn.execute(&s, &[&key]) .await - .map_err(|e| crate::Error::InternalError(format!("Failed to delete blob: {}", e))) + .map_err(into_error) .map(|hits| hits > 0) } } diff --git a/crates/store/src/backend/postgres/lookup.rs b/crates/store/src/backend/postgres/lookup.rs index a59770dd..4b01e955 100644 --- a/crates/store/src/backend/postgres/lookup.rs +++ b/crates/store/src/backend/postgres/lookup.rs @@ -12,16 +12,16 @@ use tokio_postgres::types::{FromSql, ToSql, Type}; use crate::IntoRows; -use super::PostgresStore; +use super::{into_error, PostgresStore}; impl PostgresStore { pub(crate) async fn query( &self, query: &str, - params_: Vec>, - ) -> crate::Result { - let conn = self.conn_pool.get().await?; - let s = conn.prepare_cached(query).await?; + params_: &[crate::Value<'_>], + ) -> trc::Result { + let conn = self.conn_pool.get().await.map_err(into_error)?; + let s = conn.prepare_cached(query).await.map_err(into_error)?; let params = params_ .iter() .map(|v| v as &(dyn tokio_postgres::types::ToSql + Sync)) @@ -31,22 +31,25 @@ impl PostgresStore { QueryType::Execute => conn .execute(&s, params.as_slice()) .await - .map_or_else(|e| Err(e.into()), |r| Ok(T::from_exec(r as usize))), + .map_or_else(|e| Err(into_error(e)), |r| Ok(T::from_exec(r as usize))), QueryType::Exists => { - let rows = conn.query_raw(&s, params.into_iter()).await?; + let rows = conn + .query_raw(&s, params.into_iter()) + .await + .map_err(into_error)?; pin_mut!(rows); rows.try_next() .await - .map_or_else(|e| Err(e.into()), |r| Ok(T::from_exists(r.is_some()))) + .map_or_else(|e| Err(into_error(e)), |r| Ok(T::from_exists(r.is_some()))) } QueryType::QueryOne => conn .query_opt(&s, params.as_slice()) .await - .map_or_else(|e| Err(e.into()), |r| Ok(T::from_query_one(r))), + .map_or_else(|e| Err(into_error(e)), |r| Ok(T::from_query_one(r))), QueryType::QueryAll => conn .query(&s, params.as_slice()) .await - .map_or_else(|e| Err(e.into()), |r| Ok(T::from_query_all(r))), + .map_or_else(|e| Err(into_error(e)), |r| Ok(T::from_query_all(r))), } } } diff --git a/crates/store/src/backend/postgres/main.rs b/crates/store/src/backend/postgres/main.rs index 11afef0e..0277a4b7 100644 --- a/crates/store/src/backend/postgres/main.rs +++ b/crates/store/src/backend/postgres/main.rs @@ -8,11 +8,9 @@ use std::time::Duration; use crate::{backend::postgres::tls::MakeRustlsConnect, *}; -use super::PostgresStore; +use super::{into_error, PostgresStore}; -use deadpool_postgres::{ - Config, CreatePoolError, ManagerConfig, PoolConfig, RecyclingMethod, Runtime, -}; +use deadpool_postgres::{Config, ManagerConfig, PoolConfig, RecyclingMethod, Runtime}; use tokio_postgres::NoTls; use utils::{config::utils::AsKey, rustls_client_config}; @@ -69,8 +67,8 @@ impl PostgresStore { Some(db) } - pub(super) async fn create_tables(&self) -> crate::Result<()> { - let conn = self.conn_pool.get().await?; + pub(super) async fn create_tables(&self) -> trc::Result<()> { + let conn = self.conn_pool.get().await.map_err(into_error)?; for table in [ SUBSPACE_ACL, @@ -99,7 +97,8 @@ impl PostgresStore { ), &[], ) - .await?; + .await + .map_err(into_error)?; } for table in [ @@ -117,7 +116,8 @@ impl PostgresStore { ), &[], ) - .await?; + .await + .map_err(into_error)?; } for table in [SUBSPACE_COUNTER, SUBSPACE_QUOTA] { @@ -131,15 +131,10 @@ impl PostgresStore { ), &[], ) - .await?; + .await + .map_err(into_error)?; } Ok(()) } } - -impl From for crate::Error { - fn from(err: CreatePoolError) -> Self { - crate::Error::InternalError(format!("Failed to create connection pool: {}", err)) - } -} diff --git a/crates/store/src/backend/postgres/mod.rs b/crates/store/src/backend/postgres/mod.rs index f812b77d..81031e5d 100644 --- a/crates/store/src/backend/postgres/mod.rs +++ b/crates/store/src/backend/postgres/mod.rs @@ -4,7 +4,9 @@ * SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-SEL */ -use deadpool_postgres::{Pool, PoolError}; +use std::fmt::Display; + +use deadpool_postgres::Pool; pub mod blob; pub mod lookup; @@ -17,21 +19,7 @@ pub struct PostgresStore { pub(crate) conn_pool: Pool, } -impl From for crate::Error { - fn from(err: PoolError) -> Self { - Self::InternalError(format!("Connection pool error: {}", err)) - } -} - -impl From for crate::Error { - fn from(err: tokio_postgres::Error) -> Self { - Self::InternalError(format!("PostgreSQL error: {}", err)) - } -} - #[inline(always)] -pub fn deserialize_bitmap(bytes: &[u8]) -> crate::Result { - roaring::RoaringBitmap::deserialize_unchecked_from(bytes).map_err(|err| { - crate::Error::InternalError(format!("Failed to deserialize bitmap: {}", err)) - }) +fn into_error(err: impl Display) -> trc::Error { + trc::Cause::PostgreSQL.reason(err) } diff --git a/crates/store/src/backend/postgres/read.rs b/crates/store/src/backend/postgres/read.rs index 6324d71e..b070bec1 100644 --- a/crates/store/src/backend/postgres/read.rs +++ b/crates/store/src/backend/postgres/read.rs @@ -12,24 +12,25 @@ use crate::{ BitmapKey, Deserialize, IterateParams, Key, ValueKey, U32_LEN, }; -use super::PostgresStore; +use super::{into_error, PostgresStore}; impl PostgresStore { - pub(crate) async fn get_value(&self, key: impl Key) -> crate::Result> + pub(crate) async fn get_value(&self, key: impl Key) -> trc::Result> where U: Deserialize + 'static, { - let conn = self.conn_pool.get().await?; + let conn = self.conn_pool.get().await.map_err(into_error)?; let s = conn .prepare_cached(&format!( "SELECT v FROM {} WHERE k = $1", char::from(key.subspace()) )) - .await?; + .await + .map_err(into_error)?; let key = key.serialize(0); conn.query_opt(&s, &[&key]) .await - .map_err(Into::into) + .map_err(into_error) .and_then(|r| { if let Some(r) = r { Ok(Some(U::deserialize(r.get(0))?)) @@ -42,24 +43,28 @@ impl PostgresStore { pub(crate) async fn get_bitmap( &self, mut key: BitmapKey>, - ) -> crate::Result> { + ) -> trc::Result> { let begin = key.serialize(0); key.document_id = u32::MAX; let key_len = begin.len(); let end = key.serialize(0); - let conn = self.conn_pool.get().await?; + let conn = self.conn_pool.get().await.map_err(into_error)?; let table = char::from(key.subspace()); let mut bm = RoaringBitmap::new(); let s = conn .prepare_cached(&format!("SELECT k FROM {table} WHERE k >= $1 AND k <= $2")) - .await?; - let rows = conn.query_raw(&s, &[&begin, &end]).await?; + .await + .map_err(into_error)?; + let rows = conn + .query_raw(&s, &[&begin, &end]) + .await + .map_err(into_error)?; pin_mut!(rows); - while let Some(row) = rows.try_next().await? { - let key: &[u8] = row.try_get(0)?; + while let Some(row) = rows.try_next().await.map_err(into_error)? { + let key: &[u8] = row.try_get(0).map_err(into_error)?; if key.len() == key_len { bm.insert(key.deserialize_be_u32(key.len() - U32_LEN)?); } @@ -70,9 +75,9 @@ impl PostgresStore { pub(crate) async fn iterate( &self, params: IterateParams, - mut cb: impl for<'x> FnMut(&'x [u8], &'x [u8]) -> crate::Result + Sync + Send, - ) -> crate::Result<()> { - let conn = self.conn_pool.get().await?; + mut cb: impl for<'x> FnMut(&'x [u8], &'x [u8]) -> trc::Result + Sync + Send, + ) -> trc::Result<()> { + let conn = self.conn_pool.get().await.map_err(into_error)?; let table = char::from(params.begin.subspace()); let begin = params.begin.serialize(0); let end = params.end.serialize(0); @@ -97,23 +102,26 @@ impl PostgresStore { format!("SELECT {keys} FROM {table} WHERE k >= $1 AND k <= $2 ORDER BY k DESC") } }) - .await?; - let rows = conn.query_raw(&s, &[&begin, &end]).await?; + .await.map_err(into_error)?; + let rows = conn + .query_raw(&s, &[&begin, &end]) + .await + .map_err(into_error)?; pin_mut!(rows); if params.values { - while let Some(row) = rows.try_next().await? { - let key = row.try_get::<_, &[u8]>(0)?; - let value = row.try_get::<_, &[u8]>(1)?; + while let Some(row) = rows.try_next().await.map_err(into_error)? { + let key = row.try_get::<_, &[u8]>(0).map_err(into_error)?; + let value = row.try_get::<_, &[u8]>(1).map_err(into_error)?; if !cb(key, value)? { break; } } } else { - while let Some(row) = rows.try_next().await? { - if !cb(row.try_get::<_, &[u8]>(0)?, b"")? { + while let Some(row) = rows.try_next().await.map_err(into_error)? { + if !cb(row.try_get::<_, &[u8]>(0).map_err(into_error)?, b"")? { break; } } @@ -125,19 +133,20 @@ impl PostgresStore { pub(crate) async fn get_counter( &self, key: impl Into>> + Sync + Send, - ) -> crate::Result { + ) -> trc::Result { let key = key.into(); let table = char::from(key.subspace()); let key = key.serialize(0); - let conn = self.conn_pool.get().await?; + let conn = self.conn_pool.get().await.map_err(into_error)?; let s = conn .prepare_cached(&format!("SELECT v FROM {table} WHERE k = $1")) - .await?; + .await + .map_err(into_error)?; match conn.query_opt(&s, &[&key]).await { - Ok(Some(row)) => row.try_get(0).map_err(Into::into), + Ok(Some(row)) => row.try_get(0).map_err(into_error), Ok(None) => Ok(0), - Err(e) => Err(e.into()), + Err(e) => Err(into_error(e)), } } } diff --git a/crates/store/src/backend/postgres/write.rs b/crates/store/src/backend/postgres/write.rs index 2c365a34..04b19d32 100644 --- a/crates/store/src/backend/postgres/write.rs +++ b/crates/store/src/backend/postgres/write.rs @@ -21,18 +21,18 @@ use crate::{ BitmapKey, IndexKey, Key, LogKey, SUBSPACE_COUNTER, SUBSPACE_QUOTA, U32_LEN, }; -use super::PostgresStore; +use super::{into_error, PostgresStore}; #[derive(Debug)] enum CommitError { Postgres(tokio_postgres::Error), - Internal(crate::Error), + Internal(trc::Error), Retry, } impl PostgresStore { - pub(crate) async fn write(&self, batch: Batch) -> crate::Result { - let mut conn = self.conn_pool.get().await?; + pub(crate) async fn write(&self, batch: Batch) -> trc::Result { + let mut conn = self.conn_pool.get().await.map_err(into_error)?; let start = Instant::now(); let mut retry_count = 0; @@ -50,16 +50,16 @@ impl PostgresStore { ) if retry_count < MAX_COMMIT_ATTEMPTS && start.elapsed() < MAX_COMMIT_TIME => {} Some(&SqlState::UNIQUE_VIOLATION) => { - return Err(crate::Error::AssertValueFailed); + return Err(trc::Cause::AssertValue.into()); } - _ => return Err(err.into()), + _ => return Err(into_error(err)), }, CommitError::Internal(err) => return Err(err), CommitError::Retry => { if retry_count > MAX_COMMIT_ATTEMPTS || start.elapsed() > MAX_COMMIT_TIME { - return Err(crate::Error::AssertValueFailed); + return Err(trc::Cause::AssertValue.into()); } } } @@ -148,7 +148,7 @@ impl PostgresStore { .await? == 0 { - return Err(crate::Error::AssertValueFailed.into()); + return Err(trc::Cause::AssertValue.into_err().into()); } } ValueOp::AtomicAdd(by) => { @@ -322,7 +322,7 @@ impl PostgresStore { }) .unwrap_or_else(|| (false, assert_value.is_none())); if !matches { - return Err(crate::Error::AssertValueFailed.into()); + return Err(trc::Cause::AssertValue.into_err().into()); } asserted_values.insert(key, exists); } @@ -332,37 +332,42 @@ impl PostgresStore { trx.commit().await.map(|_| result).map_err(Into::into) } - pub(crate) async fn purge_store(&self) -> crate::Result<()> { - let conn = self.conn_pool.get().await?; + pub(crate) async fn purge_store(&self) -> trc::Result<()> { + let conn = self.conn_pool.get().await.map_err(into_error)?; for subspace in [SUBSPACE_QUOTA, SUBSPACE_COUNTER] { let s = conn .prepare_cached(&format!("DELETE FROM {} WHERE v = 0", char::from(subspace),)) - .await?; - conn.execute(&s, &[]).await.map(|_| ())? + .await + .map_err(into_error)?; + conn.execute(&s, &[]) + .await + .map(|_| ()) + .map_err(into_error)? } Ok(()) } - pub(crate) async fn delete_range(&self, from: impl Key, to: impl Key) -> crate::Result<()> { - let conn = self.conn_pool.get().await?; + pub(crate) async fn delete_range(&self, from: impl Key, to: impl Key) -> trc::Result<()> { + let conn = self.conn_pool.get().await.map_err(into_error)?; let s = conn .prepare_cached(&format!( "DELETE FROM {} WHERE k >= $1 AND k < $2", char::from(from.subspace()), )) - .await?; + .await + .map_err(into_error)?; conn.execute(&s, &[&from.serialize(0), &to.serialize(0)]) .await .map(|_| ()) - .map_err(Into::into) + .map_err(into_error) } } -impl From for CommitError { - fn from(err: crate::Error) -> Self { +impl From for CommitError { + fn from(err: trc::Error) -> Self { CommitError::Internal(err) } } diff --git a/crates/store/src/backend/redis/lookup.rs b/crates/store/src/backend/redis/lookup.rs index bf9a68d7..b0730f1b 100644 --- a/crates/store/src/backend/redis/lookup.rs +++ b/crates/store/src/backend/redis/lookup.rs @@ -8,7 +8,7 @@ use redis::AsyncCommands; use crate::Deserialize; -use super::{RedisPool, RedisStore}; +use super::{into_error, RedisPool, RedisStore}; impl RedisStore { pub async fn key_set( @@ -16,15 +16,25 @@ impl RedisStore { key: Vec, value: Vec, expires: Option, - ) -> crate::Result<()> { + ) -> trc::Result<()> { match &self.pool { RedisPool::Single(pool) => { - self.key_set_(pool.get().await?.as_mut(), key, value, expires) - .await + self.key_set_( + pool.get().await.map_err(into_error)?.as_mut(), + key, + value, + expires, + ) + .await } RedisPool::Cluster(pool) => { - self.key_set_(pool.get().await?.as_mut(), key, value, expires) - .await + self.key_set_( + pool.get().await.map_err(into_error)?.as_mut(), + key, + value, + expires, + ) + .await } } } @@ -34,47 +44,81 @@ impl RedisStore { key: Vec, value: i64, expires: Option, - ) -> crate::Result { + ) -> trc::Result { match &self.pool { RedisPool::Single(pool) => { - self.key_incr_(pool.get().await?.as_mut(), key, value, expires) - .await + self.key_incr_( + pool.get().await.map_err(into_error)?.as_mut(), + key, + value, + expires, + ) + .await } RedisPool::Cluster(pool) => { - self.key_incr_(pool.get().await?.as_mut(), key, value, expires) - .await + self.key_incr_( + pool.get().await.map_err(into_error)?.as_mut(), + key, + value, + expires, + ) + .await } } } - pub async fn key_delete(&self, key: Vec) -> crate::Result<()> { + pub async fn key_delete(&self, key: Vec) -> trc::Result<()> { match &self.pool { - RedisPool::Single(pool) => self.key_delete_(pool.get().await?.as_mut(), key).await, - RedisPool::Cluster(pool) => self.key_delete_(pool.get().await?.as_mut(), key).await, + RedisPool::Single(pool) => { + self.key_delete_(pool.get().await.map_err(into_error)?.as_mut(), key) + .await + } + RedisPool::Cluster(pool) => { + self.key_delete_(pool.get().await.map_err(into_error)?.as_mut(), key) + .await + } } } pub async fn key_get( &self, key: Vec, - ) -> crate::Result> { + ) -> trc::Result> { match &self.pool { - RedisPool::Single(pool) => self.key_get_(pool.get().await?.as_mut(), key).await, - RedisPool::Cluster(pool) => self.key_get_(pool.get().await?.as_mut(), key).await, + RedisPool::Single(pool) => { + self.key_get_(pool.get().await.map_err(into_error)?.as_mut(), key) + .await + } + RedisPool::Cluster(pool) => { + self.key_get_(pool.get().await.map_err(into_error)?.as_mut(), key) + .await + } } } - pub async fn counter_get(&self, key: Vec) -> crate::Result { + pub async fn counter_get(&self, key: Vec) -> trc::Result { match &self.pool { - RedisPool::Single(pool) => self.counter_get_(pool.get().await?.as_mut(), key).await, - RedisPool::Cluster(pool) => self.counter_get_(pool.get().await?.as_mut(), key).await, + RedisPool::Single(pool) => { + self.counter_get_(pool.get().await.map_err(into_error)?.as_mut(), key) + .await + } + RedisPool::Cluster(pool) => { + self.counter_get_(pool.get().await.map_err(into_error)?.as_mut(), key) + .await + } } } - pub async fn key_exists(&self, key: Vec) -> crate::Result { + pub async fn key_exists(&self, key: Vec) -> trc::Result { match &self.pool { - RedisPool::Single(pool) => self.key_exists_(pool.get().await?.as_mut(), key).await, - RedisPool::Cluster(pool) => self.key_exists_(pool.get().await?.as_mut(), key).await, + RedisPool::Single(pool) => { + self.key_exists_(pool.get().await.map_err(into_error)?.as_mut(), key) + .await + } + RedisPool::Cluster(pool) => { + self.key_exists_(pool.get().await.map_err(into_error)?.as_mut(), key) + .await + } } } @@ -82,31 +126,27 @@ impl RedisStore { &self, conn: &mut impl AsyncCommands, key: Vec, - ) -> crate::Result> { - if let Some(value) = conn.get::<_, Option>>(key).await? { + ) -> trc::Result> { + if let Some(value) = conn + .get::<_, Option>>(key) + .await + .map_err(into_error)? + { T::deserialize(&value).map(Some) } else { Ok(None) } } - async fn counter_get_( - &self, - conn: &mut impl AsyncCommands, - key: Vec, - ) -> crate::Result { + async fn counter_get_(&self, conn: &mut impl AsyncCommands, key: Vec) -> trc::Result { conn.get::<_, Option>(key) .await .map(|x| x.unwrap_or(0)) - .map_err(Into::into) + .map_err(into_error) } - async fn key_exists_( - &self, - conn: &mut impl AsyncCommands, - key: Vec, - ) -> crate::Result { - conn.exists(key).await.map_err(Into::into) + async fn key_exists_(&self, conn: &mut impl AsyncCommands, key: Vec) -> trc::Result { + conn.exists(key).await.map_err(into_error) } async fn key_set_( @@ -115,11 +155,11 @@ impl RedisStore { key: Vec, value: Vec, expires: Option, - ) -> crate::Result<()> { + ) -> trc::Result<()> { if let Some(expires) = expires { - conn.set_ex(key, value, expires).await.map_err(Into::into) + conn.set_ex(key, value, expires).await.map_err(into_error) } else { - conn.set(key, value).await.map_err(Into::into) + conn.set(key, value).await.map_err(into_error) } } @@ -129,7 +169,7 @@ impl RedisStore { key: Vec, value: i64, expires: Option, - ) -> crate::Result { + ) -> trc::Result { if let Some(expires) = expires { redis::pipe() .atomic() @@ -138,14 +178,14 @@ impl RedisStore { .ignore() .query_async::<_, Vec>(conn) .await - .map_err(Into::into) + .map_err(into_error) .map(|v| v.first().copied().unwrap_or(0)) } else { - conn.incr(&key, value).await.map_err(Into::into) + conn.incr(&key, value).await.map_err(into_error) } } - async fn key_delete_(&self, conn: &mut impl AsyncCommands, key: Vec) -> crate::Result<()> { - conn.del(key).await.map_err(Into::into) + async fn key_delete_(&self, conn: &mut impl AsyncCommands, key: Vec) -> trc::Result<()> { + conn.del(key).await.map_err(into_error) } } diff --git a/crates/store/src/backend/redis/mod.rs b/crates/store/src/backend/redis/mod.rs index 34356676..479b2c5e 100644 --- a/crates/store/src/backend/redis/mod.rs +++ b/crates/store/src/backend/redis/mod.rs @@ -4,15 +4,15 @@ * SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-SEL */ -use std::time::Duration; +use std::{fmt::Display, time::Duration}; use deadpool::{ - managed::{Manager, Pool, PoolError}, + managed::{Manager, Pool}, Runtime, }; use redis::{ cluster::{ClusterClient, ClusterClientBuilder}, - Client, RedisError, + Client, }; use utils::config::{utils::AsKey, Config}; @@ -149,7 +149,7 @@ fn build_pool( config: &mut Config, prefix: &str, manager: M, -) -> utils::config::Result> { +) -> Result, String> { Pool::builder(manager) .runtime(Runtime::Tokio1) .max_size( @@ -182,20 +182,7 @@ fn build_pool( }) } -impl From> for crate::Error { - fn from(value: PoolError) -> Self { - crate::Error::InternalError(format!("Redis pool error: {}", value)) - } -} - -impl From> for crate::Error { - fn from(value: PoolError) -> Self { - crate::Error::InternalError(format!("Connection pool {}", value)) - } -} - -impl From for crate::Error { - fn from(value: RedisError) -> Self { - crate::Error::InternalError(format!("Redis error: {}", value)) - } +#[inline(always)] +fn into_error(err: impl Display) -> trc::Error { + trc::Cause::Redis.reason(err) } diff --git a/crates/store/src/backend/redis/pool.rs b/crates/store/src/backend/redis/pool.rs index 3c8a227e..b8f01863 100644 --- a/crates/store/src/backend/redis/pool.rs +++ b/crates/store/src/backend/redis/pool.rs @@ -10,20 +10,18 @@ use redis::{ cluster_async::ClusterConnection, }; -use super::{RedisClusterConnectionManager, RedisConnectionManager}; +use super::{into_error, RedisClusterConnectionManager, RedisConnectionManager}; impl managed::Manager for RedisConnectionManager { type Type = MultiplexedConnection; - type Error = crate::Error; + type Error = trc::Error; - async fn create(&self) -> Result { + async fn create(&self) -> Result { match tokio::time::timeout(self.timeout, self.client.get_multiplexed_tokio_connection()) .await { - Ok(conn) => conn.map_err(Into::into), - Err(_) => Err(crate::Error::InternalError( - "Redis connection timeout".into(), - )), + Ok(conn) => conn.map_err(into_error), + Err(_) => Err(trc::Cause::Timeout.into()), } } @@ -31,24 +29,22 @@ impl managed::Manager for RedisConnectionManager { &self, conn: &mut MultiplexedConnection, _: &managed::Metrics, - ) -> managed::RecycleResult { + ) -> managed::RecycleResult { conn.req_packed_command(&redis::cmd("PING")) .await .map(|_| ()) - .map_err(|err| managed::RecycleError::Backend(err.into())) + .map_err(|err| managed::RecycleError::Backend(into_error(err))) } } impl managed::Manager for RedisClusterConnectionManager { type Type = ClusterConnection; - type Error = crate::Error; + type Error = trc::Error; - async fn create(&self) -> Result { + async fn create(&self) -> Result { match tokio::time::timeout(self.timeout, self.client.get_async_connection()).await { - Ok(conn) => conn.map_err(Into::into), - Err(_) => Err(crate::Error::InternalError( - "Redis connection timeout".into(), - )), + Ok(conn) => conn.map_err(into_error), + Err(_) => Err(trc::Cause::Timeout.into()), } } @@ -56,10 +52,10 @@ impl managed::Manager for RedisClusterConnectionManager { &self, conn: &mut ClusterConnection, _: &managed::Metrics, - ) -> managed::RecycleResult { + ) -> managed::RecycleResult { conn.req_packed_command(&redis::cmd("PING")) .await .map(|_| ()) - .map_err(|err| managed::RecycleError::Backend(err.into())) + .map_err(|err| managed::RecycleError::Backend(into_error(err))) } } diff --git a/crates/store/src/backend/rocksdb/blob.rs b/crates/store/src/backend/rocksdb/blob.rs index 0ac348a8..b3b38842 100644 --- a/crates/store/src/backend/rocksdb/blob.rs +++ b/crates/store/src/backend/rocksdb/blob.rs @@ -6,14 +6,14 @@ use std::ops::Range; -use super::{RocksDbStore, CF_BLOBS}; +use super::{into_error, RocksDbStore, CF_BLOBS}; impl RocksDbStore { pub(crate) async fn get_blob( &self, key: &[u8], range: Range, - ) -> crate::Result>> { + ) -> trc::Result>> { let db = self.db.clone(); self.spawn_worker(move || { db.get_pinned_cf(&db.cf_handle(CF_BLOBS).unwrap(), key) @@ -29,25 +29,25 @@ impl RocksDbStore { } }) }) - .map_err(|e| crate::Error::InternalError(format!("Failed to fetch blob: {}", e))) + .map_err(into_error) }) .await } - pub(crate) async fn put_blob(&self, key: &[u8], data: &[u8]) -> crate::Result<()> { + pub(crate) async fn put_blob(&self, key: &[u8], data: &[u8]) -> trc::Result<()> { let db = self.db.clone(); self.spawn_worker(move || { db.put_cf(&db.cf_handle(CF_BLOBS).unwrap(), key, data) - .map_err(|e| crate::Error::InternalError(format!("Failed to insert blob: {}", e))) + .map_err(into_error) }) .await } - pub(crate) async fn delete_blob(&self, key: &[u8]) -> crate::Result { + pub(crate) async fn delete_blob(&self, key: &[u8]) -> trc::Result { let db = self.db.clone(); self.spawn_worker(move || { db.delete_cf(&db.cf_handle(CF_BLOBS).unwrap(), key) - .map_err(|e| crate::Error::InternalError(format!("Failed to delete blob: {}", e))) + .map_err(into_error) .map(|_| true) }) .await diff --git a/crates/store/src/backend/rocksdb/main.rs b/crates/store/src/backend/rocksdb/main.rs index 88848a08..0135ab88 100644 --- a/crates/store/src/backend/rocksdb/main.rs +++ b/crates/store/src/backend/rocksdb/main.rs @@ -139,9 +139,9 @@ impl RocksDbStore { }) } - pub async fn spawn_worker(&self, mut f: U) -> crate::Result + pub async fn spawn_worker(&self, mut f: U) -> trc::Result where - U: FnMut() -> crate::Result + Send, + U: FnMut() -> trc::Result + Send, V: Sync + Send + 'static, { let (tx, rx) = oneshot::channel(); @@ -154,10 +154,7 @@ impl RocksDbStore { match rx.await { Ok(result) => result, - Err(err) => Err(crate::Error::InternalError(format!( - "Worker thread failed: {}", - err - ))), + Err(err) => Err(trc::Cause::Thread.reason(err)), } } } diff --git a/crates/store/src/backend/rocksdb/mod.rs b/crates/store/src/backend/rocksdb/mod.rs index 17e93ecb..33d273a9 100644 --- a/crates/store/src/backend/rocksdb/mod.rs +++ b/crates/store/src/backend/rocksdb/mod.rs @@ -19,12 +19,6 @@ static CF_LOGS: &str = unsafe { std::str::from_utf8_unchecked(&[SUBSPACE_LOGS]) static CF_INDEXES: &str = unsafe { std::str::from_utf8_unchecked(&[SUBSPACE_INDEXES]) }; static CF_BLOBS: &str = unsafe { std::str::from_utf8_unchecked(&[SUBSPACE_BLOBS]) }; -impl From for crate::Error { - fn from(value: rocksdb::Error) -> Self { - Self::InternalError(format!("RocksDB error: {}", value)) - } -} - pub(crate) trait CfHandle { fn subspace_handle(&self, subspace: u8) -> Arc>; } @@ -41,3 +35,8 @@ pub struct RocksDbStore { db: Arc>, worker_pool: rayon::ThreadPool, } + +#[inline(always)] +fn into_error(err: rocksdb::Error) -> trc::Error { + trc::Cause::RocksDB.reason(err) +} diff --git a/crates/store/src/backend/rocksdb/read.rs b/crates/store/src/backend/rocksdb/read.rs index cf377b8f..287de2c7 100644 --- a/crates/store/src/backend/rocksdb/read.rs +++ b/crates/store/src/backend/rocksdb/read.rs @@ -7,7 +7,7 @@ use roaring::RoaringBitmap; use rocksdb::{Direction, IteratorMode}; -use super::RocksDbStore; +use super::{into_error, RocksDbStore}; use crate::{ backend::rocksdb::CfHandle, @@ -16,7 +16,7 @@ use crate::{ }; impl RocksDbStore { - pub(crate) async fn get_value(&self, key: impl Key) -> crate::Result> + pub(crate) async fn get_value(&self, key: impl Key) -> trc::Result> where U: Deserialize + 'static, { @@ -27,7 +27,7 @@ impl RocksDbStore { .unwrap(), &key.serialize(0), ) - .map_err(Into::into) + .map_err(into_error) .and_then(|value| { if let Some(value) = value { U::deserialize(&value).map(Some) @@ -42,7 +42,7 @@ impl RocksDbStore { pub(crate) async fn get_bitmap( &self, mut key: BitmapKey>, - ) -> crate::Result> { + ) -> trc::Result> { let db = self.db.clone(); self.spawn_worker(move || { let mut bm = RoaringBitmap::new(); @@ -55,7 +55,7 @@ impl RocksDbStore { &db.subspace_handle(subspace), IteratorMode::From(&begin, Direction::Forward), ) { - let (key, _) = row?; + let (key, _) = row.map_err(into_error)?; let key = key.as_ref(); if key.len() == key_len && key >= begin.as_slice() && key <= end.as_slice() { bm.insert(key.deserialize_be_u32(key.len() - U32_LEN)?); @@ -72,8 +72,8 @@ impl RocksDbStore { pub(crate) async fn iterate( &self, params: IterateParams, - mut cb: impl for<'x> FnMut(&'x [u8], &'x [u8]) -> crate::Result + Sync + Send, - ) -> crate::Result<()> { + mut cb: impl for<'x> FnMut(&'x [u8], &'x [u8]) -> trc::Result + Sync + Send, + ) -> trc::Result<()> { let db = self.db.clone(); self.spawn_worker(move || { @@ -87,7 +87,7 @@ impl RocksDbStore { }; for row in db.iterator_cf(&cf, it_mode) { - let (key, value) = row?; + let (key, value) = row.map_err(into_error)?; if key.as_ref() < begin.as_slice() || key.as_ref() > end.as_slice() || !cb(&key, &value)? @@ -105,7 +105,7 @@ impl RocksDbStore { pub(crate) async fn get_counter( &self, key: impl Into>> + Sync + Send, - ) -> crate::Result { + ) -> trc::Result { let key = key.into(); let db = self.db.clone(); self.spawn_worker(move || { @@ -113,11 +113,11 @@ impl RocksDbStore { let key = key.serialize(0); db.get_pinned_cf(&cf, &key) - .map_err(Into::into) + .map_err(into_error) .and_then(|bytes| { Ok(if let Some(bytes) = bytes { i64::from_le_bytes(bytes[..].try_into().map_err(|_| { - crate::Error::InternalError("Invalid counter value.".to_string()) + trc::Error::corrupted_key(&key, (&bytes[..]).into(), trc::location!()) })?) } else { 0 diff --git a/crates/store/src/backend/rocksdb/write.rs b/crates/store/src/backend/rocksdb/write.rs index 5fbaf13d..8639e2b7 100644 --- a/crates/store/src/backend/rocksdb/write.rs +++ b/crates/store/src/backend/rocksdb/write.rs @@ -17,7 +17,7 @@ use rocksdb::{ OptimisticTransactionOptions, WriteOptions, }; -use super::{CfHandle, RocksDbStore, CF_INDEXES, CF_LOGS}; +use super::{into_error, CfHandle, RocksDbStore, CF_INDEXES, CF_LOGS}; use crate::{ backend::deserialize_i64_le, write::{ @@ -28,7 +28,7 @@ use crate::{ }; impl RocksDbStore { - pub(crate) async fn write(&self, batch: Batch) -> crate::Result { + pub(crate) async fn write(&self, batch: Batch) -> trc::Result { let db = self.db.clone(); self.spawn_worker(move || { @@ -59,7 +59,7 @@ impl RocksDbStore { sleep(Duration::from_millis(backoff)); retry_count += 1; } - _ => return Err(err.into()), + _ => return Err(into_error(err)), }, } } @@ -67,7 +67,7 @@ impl RocksDbStore { .await } - pub(crate) async fn delete_range(&self, from: impl Key, to: impl Key) -> crate::Result<()> { + pub(crate) async fn delete_range(&self, from: impl Key, to: impl Key) -> trc::Result<()> { let db = self.db.clone(); self.spawn_worker(move || { let cf = db @@ -81,7 +81,7 @@ impl RocksDbStore { let it_mode = IteratorMode::From(&from, Direction::Forward); for row in db.iterator_cf(&cf, it_mode) { - let (key, _) = row?; + let (key, _) = row.map_err(into_error)?; if key.as_ref() < from.as_slice() || key.as_ref() >= to.as_slice() { break; @@ -90,7 +90,7 @@ impl RocksDbStore { } for k in delete_keys { - db.delete_cf(&cf, &k)?; + db.delete_cf(&cf, &k).map_err(into_error)?; } Ok(()) @@ -98,7 +98,7 @@ impl RocksDbStore { .await } - pub(crate) async fn purge_store(&self) -> crate::Result<()> { + pub(crate) async fn purge_store(&self) -> trc::Result<()> { let db = self.db.clone(); self.spawn_worker(move || { for subspace in [SUBSPACE_QUOTA, SUBSPACE_COUNTER] { @@ -109,7 +109,7 @@ impl RocksDbStore { let mut delete_keys = Vec::new(); for row in db.iterator_cf(&cf, IteratorMode::Start) { - let (key, value) = row?; + let (key, value) = row.map_err(into_error)?; if i64::deserialize(&value)? == 0 { delete_keys.push(key); @@ -120,14 +120,15 @@ impl RocksDbStore { for key in delete_keys { let txn = db.transaction_opt(&WriteOptions::default(), &txn_opts); if txn - .get_pinned_for_update_cf(&cf, &key, true)? + .get_pinned_for_update_cf(&cf, &key, true) + .map_err(into_error)? .map(|value| i64::deserialize(&value).map(|v| v == 0).unwrap_or(false)) .unwrap_or(false) { - txn.delete_cf(&cf, key)?; - txn.commit()?; + txn.delete_cf(&cf, key).map_err(into_error)?; + txn.commit().map_err(into_error)?; } else { - txn.rollback()?; + txn.rollback().map_err(into_error)?; } } } @@ -147,7 +148,7 @@ struct RocksDBTransaction<'x> { } enum CommitError { - Internal(crate::Error), + Internal(trc::Error), RocksDB(rocksdb::Error), } @@ -203,7 +204,7 @@ impl<'x> RocksDBTransaction<'x> { .map_err(CommitError::from) .and_then(|bytes| { if let Some(bytes) = bytes { - deserialize_i64_le(&bytes) + deserialize_i64_le(&key, &bytes) .map(|v| v + *by) .map_err(CommitError::from) } else { @@ -307,7 +308,7 @@ impl<'x> RocksDBTransaction<'x> { if !matches { txn.rollback()?; - return Err(CommitError::Internal(crate::Error::AssertValueFailed)); + return Err(CommitError::Internal(trc::Cause::AssertValue.into())); } } } @@ -323,8 +324,8 @@ impl From for CommitError { } } -impl From for CommitError { - fn from(err: crate::Error) -> Self { +impl From for CommitError { + fn from(err: trc::Error) -> Self { CommitError::Internal(err) } } diff --git a/crates/store/src/backend/s3/mod.rs b/crates/store/src/backend/s3/mod.rs index cbcdc9dd..70b314e1 100644 --- a/crates/store/src/backend/s3/mod.rs +++ b/crates/store/src/backend/s3/mod.rs @@ -4,13 +4,9 @@ * SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-SEL */ -use std::{io::Write, ops::Range, time::Duration}; +use std::{fmt::Display, io::Write, ops::Range, time::Duration}; -use s3::{ - creds::{error::CredentialsError, Credentials}, - error::S3Error, - Bucket, Region, -}; +use s3::{creds::Credentials, Bucket, Region}; use utils::{ codec::base32_custom::Base32Writer, config::{utils::AsKey, Config}, @@ -76,7 +72,7 @@ impl S3Store { &self, key: &[u8], range: Range, - ) -> crate::Result>> { + ) -> trc::Result>> { let path = self.build_key(key); let response = if range.start != 0 || range.end != usize::MAX { self.bucket @@ -88,39 +84,47 @@ impl S3Store { .await } else { self.bucket.get_object(path).await - }; - match response { - Ok(response) if (200..300).contains(&response.status_code()) => { - Ok(Some(response.to_vec())) - } - Ok(response) if response.status_code() == 404 => Ok(None), - Ok(response) => Err(crate::Error::InternalError(format!( - "S3 error code {}: {}", - response.status_code(), - String::from_utf8_lossy(response.as_slice()) - ))), - Err(err) => Err(err.into()), + } + .map_err(into_error)?; + + match response.status_code() { + 200..=299 => Ok(Some(response.to_vec())), + 404 => Ok(None), + code => Err(trc::Cause::S3 + .reason(String::from_utf8_lossy(response.as_slice())) + .ctx(trc::Key::Code, code)), } } - pub(crate) async fn put_blob(&self, key: &[u8], data: &[u8]) -> crate::Result<()> { - match self.bucket.put_object(self.build_key(key), data).await { - Ok(response) if (200..300).contains(&response.status_code()) => Ok(()), - Ok(response) => Err(crate::Error::InternalError(format!( - "S3 error code {}: {}", - response.status_code(), - String::from_utf8_lossy(response.as_slice()) - ))), - Err(e) => Err(e.into()), + pub(crate) async fn put_blob(&self, key: &[u8], data: &[u8]) -> trc::Result<()> { + let response = self + .bucket + .put_object(self.build_key(key), data) + .await + .map_err(into_error)?; + + match response.status_code() { + 200..=299 => Ok(()), + code => Err(trc::Cause::S3 + .reason(String::from_utf8_lossy(response.as_slice())) + .ctx(trc::Key::Code, code)), } } - pub(crate) async fn delete_blob(&self, key: &[u8]) -> crate::Result { - self.bucket + pub(crate) async fn delete_blob(&self, key: &[u8]) -> trc::Result { + let response = self + .bucket .delete_object(self.build_key(key)) .await - .map(|response| (200..300).contains(&response.status_code())) - .map_err(|e| e.into()) + .map_err(into_error)?; + + match response.status_code() { + 200..=299 => Ok(true), + 404 => Ok(false), + code => Err(trc::Cause::S3 + .reason(String::from_utf8_lossy(response.as_slice())) + .ctx(trc::Key::Code, code)), + } } fn build_key(&self, key: &[u8]) -> String { @@ -136,14 +140,7 @@ impl S3Store { } } -impl From for crate::Error { - fn from(err: S3Error) -> Self { - Self::InternalError(format!("S3 error: {}", err)) - } -} - -impl From for crate::Error { - fn from(err: CredentialsError) -> Self { - Self::InternalError(format!("S3 Credentials error: {}", err)) - } +#[inline(always)] +fn into_error(err: impl Display) -> trc::Error { + trc::Cause::S3.reason(err) } diff --git a/crates/store/src/backend/sqlite/blob.rs b/crates/store/src/backend/sqlite/blob.rs index 3f2ee84c..0fc1f8a1 100644 --- a/crates/store/src/backend/sqlite/blob.rs +++ b/crates/store/src/backend/sqlite/blob.rs @@ -8,17 +8,19 @@ use std::ops::Range; use rusqlite::OptionalExtension; -use super::SqliteStore; +use super::{into_error, SqliteStore}; impl SqliteStore { pub(crate) async fn get_blob( &self, key: &[u8], range: Range, - ) -> crate::Result>> { - let conn = self.conn_pool.get()?; + ) -> trc::Result>> { + let conn = self.conn_pool.get().map_err(into_error)?; self.spawn_worker(move || { - let mut result = conn.prepare_cached("SELECT v FROM t WHERE k = ?")?; + let mut result = conn + .prepare_cached("SELECT v FROM t WHERE k = ?") + .map_err(into_error)?; result .query_row([&key], |row| { Ok({ @@ -34,28 +36,30 @@ impl SqliteStore { }) }) .optional() - .map_err(Into::into) + .map_err(into_error) }) .await } - pub(crate) async fn put_blob(&self, key: &[u8], data: &[u8]) -> crate::Result<()> { - let conn = self.conn_pool.get()?; + pub(crate) async fn put_blob(&self, key: &[u8], data: &[u8]) -> trc::Result<()> { + let conn = self.conn_pool.get().map_err(into_error)?; self.spawn_worker(move || { - conn.prepare_cached("INSERT OR REPLACE INTO t (k, v) VALUES (?, ?)")? + conn.prepare_cached("INSERT OR REPLACE INTO t (k, v) VALUES (?, ?)") + .map_err(into_error)? .execute([key, data]) - .map_err(|e| crate::Error::InternalError(format!("Failed to insert blob: {}", e))) + .map_err(into_error) .map(|_| ()) }) .await } - pub(crate) async fn delete_blob(&self, key: &[u8]) -> crate::Result { - let conn = self.conn_pool.get()?; + pub(crate) async fn delete_blob(&self, key: &[u8]) -> trc::Result { + let conn = self.conn_pool.get().map_err(into_error)?; self.spawn_worker(move || { - conn.prepare_cached("DELETE FROM t WHERE k = ?")? + conn.prepare_cached("DELETE FROM t WHERE k = ?") + .map_err(into_error)? .execute([key]) - .map_err(|e| crate::Error::InternalError(format!("Failed to delete blob: {}", e))) + .map_err(into_error) .map(|_| true) }) .await diff --git a/crates/store/src/backend/sqlite/lookup.rs b/crates/store/src/backend/sqlite/lookup.rs index dc4490d7..6b83aaf7 100644 --- a/crates/store/src/backend/sqlite/lookup.rs +++ b/crates/store/src/backend/sqlite/lookup.rs @@ -8,17 +8,17 @@ use rusqlite::{types::FromSql, Row, Rows, ToSql}; use crate::{IntoRows, QueryResult, QueryType, Value}; -use super::SqliteStore; +use super::{into_error, SqliteStore}; impl SqliteStore { pub(crate) async fn query( &self, query: &str, - params_: Vec>, - ) -> crate::Result { - let conn = self.conn_pool.get()?; + params_: &[Value<'_>], + ) -> trc::Result { + let conn = self.conn_pool.get().map_err(into_error)?; self.spawn_worker(move || { - let mut s = conn.prepare_cached(query)?; + let mut s = conn.prepare_cached(query).map_err(into_error)?; let params = params_ .iter() .map(|v| v as &(dyn rusqlite::types::ToSql)) @@ -27,16 +27,18 @@ impl SqliteStore { match T::query_type() { QueryType::Execute => s .execute(params.as_slice()) - .map_or_else(|e| Err(e.into()), |r| Ok(T::from_exec(r))), + .map_or_else(|e| Err(into_error(e)), |r| Ok(T::from_exec(r))), QueryType::Exists => s .exists(params.as_slice()) .map(T::from_exists) - .map_err(Into::into), + .map_err(into_error), QueryType::QueryOne => s .query(params.as_slice()) .and_then(|mut rows| Ok(T::from_query_one(rows.next()?))) - .map_err(Into::into), - QueryType::QueryAll => Ok(T::from_query_all(s.query(params.as_slice())?)), + .map_err(into_error), + QueryType::QueryAll => Ok(T::from_query_all( + s.query(params.as_slice()).map_err(into_error)?, + )), } }) .await diff --git a/crates/store/src/backend/sqlite/main.rs b/crates/store/src/backend/sqlite/main.rs index 7fefaaed..55fc9b42 100644 --- a/crates/store/src/backend/sqlite/main.rs +++ b/crates/store/src/backend/sqlite/main.rs @@ -10,7 +10,7 @@ use utils::config::{utils::AsKey, Config}; use crate::*; -use super::{pool::SqliteConnectionManager, SqliteStore}; +use super::{into_error, pool::SqliteConnectionManager, SqliteStore}; impl SqliteStore { pub fn open(config: &mut Config, prefix: impl AsKey) -> Option { @@ -66,24 +66,27 @@ impl SqliteStore { } #[cfg(feature = "test_mode")] - pub fn open_memory() -> crate::Result { + pub fn open_memory() -> trc::Result { + use super::into_error; + let db = Self { conn_pool: Pool::builder() .max_size(1) - .build(SqliteConnectionManager::memory())?, + .build(SqliteConnectionManager::memory()) + .map_err(into_error)?, worker_pool: rayon::ThreadPoolBuilder::new() .num_threads(num_cpus::get()) .build() .map_err(|err| { - crate::Error::InternalError(format!("Failed to build worker pool: {}", err)) + into_error(err).ctx(trc::Key::Reason, "Failed to build worker pool") })?, }; db.create_tables()?; Ok(db) } - pub(super) fn create_tables(&self) -> crate::Result<()> { - let conn = self.conn_pool.get()?; + pub(super) fn create_tables(&self) -> trc::Result<()> { + let conn = self.conn_pool.get().map_err(into_error)?; for table in [ SUBSPACE_ACL, @@ -111,7 +114,8 @@ impl SqliteStore { )" ), [], - )?; + ) + .map_err(into_error)?; } for table in [ @@ -128,7 +132,8 @@ impl SqliteStore { )" ), [], - )?; + ) + .map_err(into_error)?; } for table in [SUBSPACE_COUNTER, SUBSPACE_QUOTA] { @@ -141,15 +146,16 @@ impl SqliteStore { char::from(table) ), [], - )?; + ) + .map_err(into_error)?; } Ok(()) } - pub async fn spawn_worker(&self, mut f: U) -> crate::Result + pub async fn spawn_worker(&self, mut f: U) -> trc::Result where - U: FnMut() -> crate::Result + Send, + U: FnMut() -> trc::Result + Send, V: Sync + Send + 'static, { let (tx, rx) = oneshot::channel(); @@ -162,10 +168,7 @@ impl SqliteStore { match rx.await { Ok(result) => result, - Err(err) => Err(crate::Error::InternalError(format!( - "Worker thread failed: {}", - err - ))), + Err(err) => Err(trc::Cause::Thread.reason(err)), } } } diff --git a/crates/store/src/backend/sqlite/mod.rs b/crates/store/src/backend/sqlite/mod.rs index ad3a06b3..6cd07502 100644 --- a/crates/store/src/backend/sqlite/mod.rs +++ b/crates/store/src/backend/sqlite/mod.rs @@ -4,6 +4,8 @@ * SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-SEL */ +use std::fmt::Display; + use r2d2::Pool; use self::pool::SqliteConnectionManager; @@ -15,25 +17,12 @@ pub mod pool; pub mod read; pub mod write; -impl From for crate::Error { - fn from(err: r2d2::Error) -> Self { - Self::InternalError(format!("Connection pool error: {}", err)) - } -} - -impl From for crate::Error { - fn from(err: rusqlite::Error) -> Self { - Self::InternalError(format!("SQLite error: {}", err)) - } -} - -impl From for crate::Error { - fn from(err: rusqlite::types::FromSqlError) -> Self { - Self::InternalError(format!("SQLite error: {}", err)) - } -} - pub struct SqliteStore { pub(crate) conn_pool: Pool, pub(crate) worker_pool: rayon::ThreadPool, } + +#[inline(always)] +fn into_error(err: impl Display) -> trc::Error { + trc::Cause::SQLite.reason(err) +} diff --git a/crates/store/src/backend/sqlite/pool.rs b/crates/store/src/backend/sqlite/pool.rs index 963c3237..6471f7d8 100644 --- a/crates/store/src/backend/sqlite/pool.rs +++ b/crates/store/src/backend/sqlite/pool.rs @@ -85,8 +85,7 @@ impl SqliteConnectionManager { } } -fn sleeper(attempts: i32) -> bool { - tracing::debug!("SQLITE_BUSY, retrying after 200ms (attempt {})", attempts); +fn sleeper(_: i32) -> bool { std::thread::sleep(std::time::Duration::from_millis(200)); true } diff --git a/crates/store/src/backend/sqlite/read.rs b/crates/store/src/backend/sqlite/read.rs index 104ed76d..1540324a 100644 --- a/crates/store/src/backend/sqlite/read.rs +++ b/crates/store/src/backend/sqlite/read.rs @@ -12,19 +12,21 @@ use crate::{ BitmapKey, Deserialize, IterateParams, Key, ValueKey, U32_LEN, }; -use super::SqliteStore; +use super::{into_error, SqliteStore}; impl SqliteStore { - pub(crate) async fn get_value(&self, key: impl Key) -> crate::Result> + pub(crate) async fn get_value(&self, key: impl Key) -> trc::Result> where U: Deserialize + 'static, { - let conn = self.conn_pool.get()?; + let conn = self.conn_pool.get().map_err(into_error)?; self.spawn_worker(move || { - let mut result = conn.prepare_cached(&format!( - "SELECT v FROM {} WHERE k = ?", - char::from(key.subspace()) - ))?; + let mut result = conn + .prepare_cached(&format!( + "SELECT v FROM {} WHERE k = ?", + char::from(key.subspace()) + )) + .map_err(into_error)?; let key = key.serialize(0); result .query_row([&key], |row| { @@ -32,7 +34,7 @@ impl SqliteStore { .map_err(|err| rusqlite::Error::ToSqlConversionFailure(err.into())) }) .optional() - .map_err(Into::into) + .map_err(into_error) }) .await } @@ -40,22 +42,27 @@ impl SqliteStore { pub(crate) async fn get_bitmap( &self, mut key: BitmapKey>, - ) -> crate::Result> { + ) -> trc::Result> { let begin = key.serialize(0); key.document_id = u32::MAX; let key_len = begin.len(); let end = key.serialize(0); - let conn = self.conn_pool.get()?; + let conn = self.conn_pool.get().map_err(into_error)?; let table = char::from(key.subspace()); self.spawn_worker(move || { let mut bm = RoaringBitmap::new(); - let mut query = - conn.prepare_cached(&format!("SELECT k FROM {table} WHERE k >= ? AND k <= ?"))?; - let mut rows = query.query([&begin, &end])?; + let mut query = conn + .prepare_cached(&format!("SELECT k FROM {table} WHERE k >= ? AND k <= ?")) + .map_err(into_error)?; + let mut rows = query.query([&begin, &end]).map_err(into_error)?; - while let Some(row) = rows.next()? { - let key = row.get_ref(0)?.as_bytes()?; + while let Some(row) = rows.next().map_err(into_error)? { + let key = row + .get_ref(0) + .map_err(into_error)? + .as_bytes() + .map_err(into_error)?; if key.len() == key_len { bm.insert(key.deserialize_be_u32(key.len() - U32_LEN)?); } @@ -68,9 +75,9 @@ impl SqliteStore { pub(crate) async fn iterate( &self, params: IterateParams, - mut cb: impl for<'x> FnMut(&'x [u8], &'x [u8]) -> crate::Result + Sync + Send, - ) -> crate::Result<()> { - let conn = self.conn_pool.get()?; + mut cb: impl for<'x> FnMut(&'x [u8], &'x [u8]) -> trc::Result + Sync + Send, + ) -> trc::Result<()> { + let conn = self.conn_pool.get().map_err(into_error)?; self.spawn_worker(move || { let table = char::from(params.begin.subspace()); @@ -78,38 +85,56 @@ impl SqliteStore { let end = params.end.serialize(0); let keys = if params.values { "k, v" } else { "k" }; - let mut query = conn.prepare_cached(&match (params.first, params.ascending) { - (true, true) => { - format!( + let mut query = conn + .prepare_cached(&match (params.first, params.ascending) { + (true, true) => { + format!( "SELECT {keys} FROM {table} WHERE k >= ? AND k <= ? ORDER BY k ASC LIMIT 1" ) - } - (true, false) => { - format!( + } + (true, false) => { + format!( "SELECT {keys} FROM {table} WHERE k >= ? AND k <= ? ORDER BY k DESC LIMIT 1" ) - } - (false, true) => { - format!("SELECT {keys} FROM {table} WHERE k >= ? AND k <= ? ORDER BY k ASC") - } - (false, false) => { - format!("SELECT {keys} FROM {table} WHERE k >= ? AND k <= ? ORDER BY k DESC") - } - })?; - let mut rows = query.query([&begin, &end])?; + } + (false, true) => { + format!("SELECT {keys} FROM {table} WHERE k >= ? AND k <= ? ORDER BY k ASC") + } + (false, false) => { + format!( + "SELECT {keys} FROM {table} WHERE k >= ? AND k <= ? ORDER BY k DESC" + ) + } + }) + .map_err(into_error)?; + let mut rows = query.query([&begin, &end]).map_err(into_error)?; if params.values { - while let Some(row) = rows.next()? { - let key = row.get_ref(0)?.as_bytes()?; - let value = row.get_ref(1)?.as_bytes()?; + while let Some(row) = rows.next().map_err(into_error)? { + let key = row + .get_ref(0) + .map_err(into_error)? + .as_bytes() + .map_err(into_error)?; + let value = row + .get_ref(1) + .map_err(into_error)? + .as_bytes() + .map_err(into_error)?; if !cb(key, value)? { break; } } } else { - while let Some(row) = rows.next()? { - if !cb(row.get_ref(0)?.as_bytes()?, b"")? { + while let Some(row) = rows.next().map_err(into_error)? { + if !cb( + row.get_ref(0) + .map_err(into_error)? + .as_bytes() + .map_err(into_error)?, + b"", + )? { break; } } @@ -123,19 +148,20 @@ impl SqliteStore { pub(crate) async fn get_counter( &self, key: impl Into>> + Sync + Send, - ) -> crate::Result { + ) -> trc::Result { let key = key.into(); let table = char::from(key.subspace()); let key = key.serialize(0); - let conn = self.conn_pool.get()?; + let conn = self.conn_pool.get().map_err(into_error)?; self.spawn_worker(move || { match conn - .prepare_cached(&format!("SELECT v FROM {table} WHERE k = ?"))? + .prepare_cached(&format!("SELECT v FROM {table} WHERE k = ?")) + .map_err(into_error)? .query_row([&key], |row| row.get::<_, i64>(0)) { Ok(value) => Ok(value), Err(rusqlite::Error::QueryReturnedNoRows) => Ok(0), - Err(e) => Err(e.into()), + Err(e) => Err(into_error(e)), } }) .await diff --git a/crates/store/src/backend/sqlite/write.rs b/crates/store/src/backend/sqlite/write.rs index 6e2586c1..c5050462 100644 --- a/crates/store/src/backend/sqlite/write.rs +++ b/crates/store/src/backend/sqlite/write.rs @@ -15,17 +15,19 @@ use crate::{ BitmapKey, IndexKey, Key, LogKey, SUBSPACE_COUNTER, SUBSPACE_QUOTA, U32_LEN, }; -use super::SqliteStore; +use super::{into_error, SqliteStore}; impl SqliteStore { - pub(crate) async fn write(&self, batch: Batch) -> crate::Result { - let mut conn = self.conn_pool.get()?; + pub(crate) async fn write(&self, batch: Batch) -> trc::Result { + let mut conn = self.conn_pool.get().map_err(into_error)?; self.spawn_worker(move || { let mut account_id = u32::MAX; let mut collection = u8::MAX; let mut document_id = u32::MAX; let mut change_id = u64::MAX; - let trx = conn.transaction_with_behavior(TransactionBehavior::Immediate)?; + let trx = conn + .transaction_with_behavior(TransactionBehavior::Immediate) + .map_err(into_error)?; let mut result = AssignedIds::default(); for op in &batch.ops { @@ -65,8 +67,10 @@ impl SqliteStore { trx.prepare_cached(&format!( "INSERT OR REPLACE INTO {} (k, v) VALUES (?, ?)", table - ))? - .execute([&key, value.resolve(&result)?.as_ref()])?; + )) + .map_err(into_error)? + .execute([&key, value.resolve(&result)?.as_ref()]) + .map_err(into_error)?; } ValueOp::AtomicAdd(by) => { if *by >= 0 { @@ -76,13 +80,17 @@ impl SqliteStore { "ON CONFLICT(k) DO UPDATE SET v = v + excluded.v" ), table - ))? - .execute(params![&key, *by])?; + )) + .map_err(into_error)? + .execute(params![&key, *by]) + .map_err(into_error)?; } else { trx.prepare_cached(&format!( "UPDATE {table} SET v = v + ? WHERE k = ?" - ))? - .execute(params![*by, &key])?; + )) + .map_err(into_error)? + .execute(params![*by, &key]) + .map_err(into_error)?; } } ValueOp::AddAndGet(by) => { @@ -94,13 +102,17 @@ impl SqliteStore { "excluded.v RETURNING v" ), table - ))? - .query_row(params![&key, &by], |row| row.get::<_, i64>(0))?, + )) + .map_err(into_error)? + .query_row(params![&key, &by], |row| row.get::<_, i64>(0)) + .map_err(into_error)?, ); } ValueOp::Clear => { - trx.prepare_cached(&format!("DELETE FROM {} WHERE k = ?", table))? - .execute([&key])?; + trx.prepare_cached(&format!("DELETE FROM {} WHERE k = ?", table)) + .map_err(into_error)? + .execute([&key]) + .map_err(into_error)?; } } } @@ -115,11 +127,15 @@ impl SqliteStore { .serialize(0); if *set { - trx.prepare_cached("INSERT OR IGNORE INTO i (k) VALUES (?)")? - .execute([&key])?; + trx.prepare_cached("INSERT OR IGNORE INTO i (k) VALUES (?)") + .map_err(into_error)? + .execute([&key]) + .map_err(into_error)?; } else { - trx.prepare_cached("DELETE FROM i WHERE k = ?")? - .execute([&key])?; + trx.prepare_cached("DELETE FROM i WHERE k = ?") + .map_err(into_error)? + .execute([&key]) + .map_err(into_error)?; } } Operation::Bitmap { class, set } => { @@ -142,12 +158,17 @@ impl SqliteStore { .serialize(0); let key_len = begin.len(); - let mut query = - trx.prepare_cached("SELECT k FROM b WHERE k >= ? AND k <= ?")?; - let mut rows = query.query([&begin, &end])?; + let mut query = trx + .prepare_cached("SELECT k FROM b WHERE k >= ? AND k <= ?") + .map_err(into_error)?; + let mut rows = query.query([&begin, &end]).map_err(into_error)?; let mut found_ids = RoaringBitmap::new(); - while let Some(row) = rows.next()? { - let key = row.get_ref(0)?.as_bytes()?; + while let Some(row) = rows.next().map_err(into_error)? { + let key = row + .get_ref(0) + .map_err(into_error)? + .as_bytes() + .map_err(into_error)?; if key.len() == key_len { found_ids.insert(key.deserialize_be_u32(key.len() - U32_LEN)?); } @@ -167,18 +188,24 @@ impl SqliteStore { if *set { if is_document_id { - trx.prepare_cached("INSERT INTO b (k) VALUES (?)")? - .execute(params![&key])?; + trx.prepare_cached("INSERT INTO b (k) VALUES (?)") + .map_err(into_error)? + .execute(params![&key]) + .map_err(into_error)?; } else { trx.prepare_cached(&format!( "INSERT OR IGNORE INTO {} (k) VALUES (?)", table - ))? - .execute(params![&key])?; + )) + .map_err(into_error)? + .execute(params![&key]) + .map_err(into_error)?; } } else { - trx.prepare_cached(&format!("DELETE FROM {} WHERE k = ?", table))? - .execute(params![&key])?; + trx.prepare_cached(&format!("DELETE FROM {} WHERE k = ?", table)) + .map_err(into_error)? + .execute(params![&key]) + .map_err(into_error)?; }; } Operation::Log { set } => { @@ -189,8 +216,10 @@ impl SqliteStore { } .serialize(0); - trx.prepare_cached("INSERT OR REPLACE INTO l (k, v) VALUES (?, ?)")? - .execute([&key, set.resolve(&result)?.as_ref()])?; + trx.prepare_cached("INSERT OR REPLACE INTO l (k, v) VALUES (?, ?)") + .map_err(into_error)? + .execute([&key, set.resolve(&result).map_err(into_error)?.as_ref()]) + .map_err(into_error)?; } Operation::AssertValue { class, @@ -206,31 +235,35 @@ impl SqliteStore { let table = char::from(class.subspace(collection)); let matches = trx - .prepare_cached(&format!("SELECT v FROM {} WHERE k = ?", table))? + .prepare_cached(&format!("SELECT v FROM {} WHERE k = ?", table)) + .map_err(into_error)? .query_row([&key], |row| { Ok(assert_value.matches(row.get_ref(0)?.as_bytes()?)) }) - .optional()? + .optional() + .map_err(into_error)? .unwrap_or_else(|| assert_value.is_none()); if !matches { - trx.rollback()?; - return Err(crate::Error::AssertValueFailed); + trx.rollback().map_err(into_error)?; + return Err(trc::Cause::AssertValue.into()); } } } } - trx.commit().map(|_| result).map_err(Into::into) + trx.commit().map(|_| result).map_err(into_error) }) .await } - pub(crate) async fn purge_store(&self) -> crate::Result<()> { - let conn = self.conn_pool.get()?; + pub(crate) async fn purge_store(&self) -> trc::Result<()> { + let conn = self.conn_pool.get().map_err(into_error)?; self.spawn_worker(move || { for subspace in [SUBSPACE_QUOTA, SUBSPACE_COUNTER] { - conn.prepare_cached(&format!("DELETE FROM {} WHERE v = 0", char::from(subspace),))? - .execute([])?; + conn.prepare_cached(&format!("DELETE FROM {} WHERE v = 0", char::from(subspace),)) + .map_err(into_error)? + .execute([]) + .map_err(into_error)?; } Ok(()) @@ -238,14 +271,16 @@ impl SqliteStore { .await } - pub(crate) async fn delete_range(&self, from: impl Key, to: impl Key) -> crate::Result<()> { - let conn = self.conn_pool.get()?; + pub(crate) async fn delete_range(&self, from: impl Key, to: impl Key) -> trc::Result<()> { + let conn = self.conn_pool.get().map_err(into_error)?; self.spawn_worker(move || { conn.prepare_cached(&format!( "DELETE FROM {} WHERE k >= ? AND k < ?", char::from(from.subspace()), - ))? - .execute([from.serialize(0), to.serialize(0)])?; + )) + .map_err(into_error)? + .execute([from.serialize(0), to.serialize(0)]) + .map_err(into_error)?; Ok(()) }) diff --git a/crates/store/src/config.rs b/crates/store/src/config.rs index d8c40153..4e04c32f 100644 --- a/crates/store/src/config.rs +++ b/crates/store/src/config.rs @@ -67,7 +67,6 @@ impl Stores { .property_or_default::(("store", id, "disable"), "false") .unwrap_or(false) { - tracing::debug!("Skipping disabled store {id:?}."); continue; } } @@ -205,7 +204,10 @@ impl Stores { } } unknown => { - tracing::debug!("Unknown directory type: {unknown:?}"); + config.new_parse_warning( + ("store", id, "type"), + format!("Unknown directory type: {unknown:?}"), + ); } } } @@ -305,12 +307,3 @@ impl Stores { } } } - -impl From for String { - fn from(err: crate::Error) -> Self { - match err { - crate::Error::InternalError(err) => err, - crate::Error::AssertValueFailed => unimplemented!(), - } - } -} diff --git a/crates/store/src/dispatch/blob.rs b/crates/store/src/dispatch/blob.rs index 4ea58d65..f774b855 100644 --- a/crates/store/src/dispatch/blob.rs +++ b/crates/store/src/dispatch/blob.rs @@ -6,16 +6,13 @@ use std::{borrow::Cow, ops::Range}; +use trc::AddContext; use utils::config::utils::ParseValue; use crate::{BlobBackend, BlobStore, CompressionAlgo, Store}; impl BlobStore { - pub async fn get_blob( - &self, - key: &[u8], - range: Range, - ) -> crate::Result>> { + pub async fn get_blob(&self, key: &[u8], range: Range) -> trc::Result>> { let read_range = match self.compression { CompressionAlgo::None => range.clone(), CompressionAlgo::Lz4 => 0..usize::MAX, @@ -33,7 +30,7 @@ impl BlobStore { Store::MySQL(store) => store.get_blob(key, read_range).await, #[cfg(feature = "rocks")] Store::RocksDb(store) => store.get_blob(key, read_range).await, - Store::None => Err(crate::Error::InternalError("No store configured".into())), + Store::None => Err(trc::Cause::NotConfigured.into()), }, BlobBackend::Fs(store) => store.get_blob(key, read_range).await, #[cfg(feature = "s3")] @@ -41,7 +38,7 @@ impl BlobStore { }; let decompressed = match self.compression { - CompressionAlgo::Lz4 => match result? { + CompressionAlgo::Lz4 => match result.caused_by(trc::location!())? { Some(data) if data.last().copied().unwrap_or_default() == CompressionAlgo::Lz4.marker() => @@ -50,14 +47,14 @@ impl BlobStore { data.get(..data.len() - 1).unwrap_or_default(), ) .map_err(|err| { - crate::Error::InternalError(format!( - "Failed to decompress LZ4 data: {}", - err - )) + trc::Cause::Decompress + .reason(err) + .ctx(trc::Key::Key, key) + .ctx(trc::Key::CausedBy, trc::location!()) })? } Some(data) => { - tracing::debug!("Warning: Missing LZ4 marker for key: {key:?}"); + trc::error!(BlobMissingMarker, Details = key); data } None => return Ok(None), @@ -77,7 +74,7 @@ impl BlobStore { } } - pub async fn put_blob(&self, key: &[u8], data: &[u8]) -> crate::Result<()> { + pub async fn put_blob(&self, key: &[u8], data: &[u8]) -> trc::Result<()> { let data: Cow<[u8]> = match self.compression { CompressionAlgo::None => data.into(), CompressionAlgo::Lz4 => { @@ -99,15 +96,16 @@ impl BlobStore { Store::MySQL(store) => store.put_blob(key, data.as_ref()).await, #[cfg(feature = "rocks")] Store::RocksDb(store) => store.put_blob(key, data.as_ref()).await, - Store::None => Err(crate::Error::InternalError("No store configured".into())), + Store::None => Err(trc::Cause::NotConfigured.into()), }, BlobBackend::Fs(store) => store.put_blob(key, data.as_ref()).await, #[cfg(feature = "s3")] BlobBackend::S3(store) => store.put_blob(key, data.as_ref()).await, } + .caused_by(trc::location!()) } - pub async fn delete_blob(&self, key: &[u8]) -> crate::Result { + pub async fn delete_blob(&self, key: &[u8]) -> trc::Result { match &self.backend { BlobBackend::Store(store) => match store { #[cfg(feature = "sqlite")] @@ -120,12 +118,13 @@ impl BlobStore { Store::MySQL(store) => store.delete_blob(key).await, #[cfg(feature = "rocks")] Store::RocksDb(store) => store.delete_blob(key).await, - Store::None => Err(crate::Error::InternalError("No store configured".into())), + Store::None => Err(trc::Cause::NotConfigured.into()), }, BlobBackend::Fs(store) => store.delete_blob(key).await, #[cfg(feature = "s3")] BlobBackend::S3(store) => store.delete_blob(key).await, } + .caused_by(trc::location!()) } pub fn with_compression(self, compression: CompressionAlgo) -> Self { @@ -149,7 +148,7 @@ impl CompressionAlgo { } impl ParseValue for CompressionAlgo { - fn parse_value(value: &str) -> utils::config::Result { + fn parse_value(value: &str) -> Result { match value { "lz4" => Ok(CompressionAlgo::Lz4), //"zstd" => Ok(CompressionAlgo::Zstd), diff --git a/crates/store/src/dispatch/fts.rs b/crates/store/src/dispatch/fts.rs index 4eb010c2..57c47fcf 100644 --- a/crates/store/src/dispatch/fts.rs +++ b/crates/store/src/dispatch/fts.rs @@ -7,6 +7,7 @@ use std::fmt::Display; use roaring::RoaringBitmap; +use trc::AddContext; use crate::{ fts::{index::FtsDocument, FtsFilter}, @@ -19,12 +20,13 @@ impl FtsStore { pub async fn index + Display + Clone + std::fmt::Debug>( &self, document: FtsDocument<'_, T>, - ) -> crate::Result<()> { + ) -> trc::Result<()> { match self { FtsStore::Store(store) => store.fts_index(document).await, #[cfg(feature = "elastic")] FtsStore::ElasticSearch(store) => store.fts_index(document).await, } + .caused_by( trc::location!()) } pub async fn query + Display + Clone + std::fmt::Debug>( @@ -32,7 +34,7 @@ impl FtsStore { account_id: u32, collection: impl Into, filters: Vec>, - ) -> crate::Result { + ) -> trc::Result { match self { FtsStore::Store(store) => store.fts_query(account_id, collection, filters).await, #[cfg(feature = "elastic")] @@ -40,6 +42,7 @@ impl FtsStore { store.fts_query(account_id, collection, filters).await } } + .caused_by( trc::location!()) } pub async fn remove( @@ -47,7 +50,7 @@ impl FtsStore { account_id: u32, collection: u8, document_ids: &impl DocumentSet, - ) -> crate::Result<()> { + ) -> trc::Result<()> { match self { FtsStore::Store(store) => store.fts_remove(account_id, collection, document_ids).await, #[cfg(feature = "elastic")] @@ -55,13 +58,15 @@ impl FtsStore { store.fts_remove(account_id, collection, document_ids).await } } + .caused_by( trc::location!()) } - pub async fn remove_all(&self, account_id: u32) -> crate::Result<()> { + pub async fn remove_all(&self, account_id: u32) -> trc::Result<()> { match self { FtsStore::Store(store) => store.fts_remove_all(account_id).await, #[cfg(feature = "elastic")] FtsStore::ElasticSearch(store) => store.fts_remove_all(account_id).await, } + .caused_by( trc::location!()) } } diff --git a/crates/store/src/dispatch/lookup.rs b/crates/store/src/dispatch/lookup.rs index 93467b61..8ab3aaed 100644 --- a/crates/store/src/dispatch/lookup.rs +++ b/crates/store/src/dispatch/lookup.rs @@ -4,6 +4,7 @@ * SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-SEL */ +use trc::AddContext; use utils::config::Rate; use crate::{write::LookupClass, Row}; @@ -23,22 +24,25 @@ impl LookupStore { &self, query: &str, params: Vec>, - ) -> crate::Result { + ) -> trc::Result { let result = match self { #[cfg(feature = "sqlite")] - LookupStore::Store(Store::SQLite(store)) => store.query(query, params).await, + LookupStore::Store(Store::SQLite(store)) => store.query(query, ¶ms).await, #[cfg(feature = "postgres")] - LookupStore::Store(Store::PostgreSQL(store)) => store.query(query, params).await, + LookupStore::Store(Store::PostgreSQL(store)) => store.query(query, ¶ms).await, #[cfg(feature = "mysql")] - LookupStore::Store(Store::MySQL(store)) => store.query(query, params).await, - _ => Err(crate::Error::InternalError( - "Store does not support queries".into(), - )), + LookupStore::Store(Store::MySQL(store)) => store.query(query, ¶ms).await, + _ => Err(trc::Cause::Unsupported.into_err()), }; - tracing::trace!( context = "store", event = "query", query = query, result = ?result); + trc::trace!( + SqlQuery, + Query = query.to_string(), + Parameters = params.as_slice(), + Result = &result, + ); - result + result.caused_by( trc::location!()) } pub async fn key_set( @@ -46,7 +50,7 @@ impl LookupStore { key: Vec, value: Vec, expires: Option, - ) -> crate::Result<()> { + ) -> trc::Result<()> { match self { LookupStore::Store(store) => { let mut batch = BatchBuilder::new(); @@ -72,10 +76,9 @@ impl LookupStore { ) .await .map(|_| ()), - LookupStore::Memory(_) => Err(crate::Error::InternalError( - "This store does not support key_set".into(), - )), + LookupStore::Memory(_) => Err(trc::Cause::Unsupported.into_err()), } + .caused_by( trc::location!()) } pub async fn counter_incr( @@ -84,7 +87,7 @@ impl LookupStore { value: i64, expires: Option, return_value: bool, - ) -> crate::Result { + ) -> trc::Result { match self { LookupStore::Store(store) => { let mut batch = BatchBuilder::new(); @@ -121,13 +124,14 @@ impl LookupStore { } #[cfg(feature = "redis")] LookupStore::Redis(store) => store.key_incr(key, value, expires).await, - LookupStore::Query(_) | LookupStore::Memory(_) => Err(crate::Error::InternalError( - "This store does not support counter_incr".into(), - )), + LookupStore::Query(_) | LookupStore::Memory(_) => { + Err(trc::Cause::Unsupported.into_err()) + } } + .caused_by( trc::location!()) } - pub async fn key_delete(&self, key: Vec) -> crate::Result<()> { + pub async fn key_delete(&self, key: Vec) -> trc::Result<()> { match self { LookupStore::Store(store) => { let mut batch = BatchBuilder::new(); @@ -139,13 +143,14 @@ impl LookupStore { } #[cfg(feature = "redis")] LookupStore::Redis(store) => store.key_delete(key).await, - LookupStore::Query(_) | LookupStore::Memory(_) => Err(crate::Error::InternalError( - "This store does not support key_set".into(), - )), + LookupStore::Query(_) | LookupStore::Memory(_) => { + Err(trc::Cause::Unsupported.into_err()) + } } + .caused_by( trc::location!()) } - pub async fn counter_delete(&self, key: Vec) -> crate::Result<()> { + pub async fn counter_delete(&self, key: Vec) -> trc::Result<()> { match self { LookupStore::Store(store) => { let mut batch = BatchBuilder::new(); @@ -157,16 +162,17 @@ impl LookupStore { } #[cfg(feature = "redis")] LookupStore::Redis(store) => store.key_delete(key).await, - LookupStore::Query(_) | LookupStore::Memory(_) => Err(crate::Error::InternalError( - "This store does not support key_set".into(), - )), + LookupStore::Query(_) | LookupStore::Memory(_) => { + Err(trc::Cause::Unsupported.into_err()) + } } + .caused_by( trc::location!()) } pub async fn key_get> + std::fmt::Debug + 'static>( &self, key: Vec, - ) -> crate::Result> { + ) -> trc::Result> { match self { LookupStore::Store(store) => store .get_value::>(ValueKey::from(ValueClass::Lookup(LookupClass::Key( @@ -191,9 +197,10 @@ impl LookupStore { .get(std::str::from_utf8(&key).unwrap_or_default()) .map(|value| T::from(value.clone()))), } + .caused_by( trc::location!()) } - pub async fn counter_get(&self, key: Vec) -> crate::Result { + pub async fn counter_get(&self, key: Vec) -> trc::Result { match self { LookupStore::Store(store) => { store @@ -204,13 +211,14 @@ impl LookupStore { } #[cfg(feature = "redis")] LookupStore::Redis(store) => store.counter_get(key).await, - LookupStore::Query(_) | LookupStore::Memory(_) => Err(crate::Error::InternalError( - "This store does not support counter_get".into(), - )), + LookupStore::Query(_) | LookupStore::Memory(_) => { + Err(trc::Cause::Unsupported.into_err()) + } } + .caused_by( trc::location!()) } - pub async fn key_exists(&self, key: Vec) -> crate::Result { + pub async fn key_exists(&self, key: Vec) -> trc::Result { match self { LookupStore::Store(store) => store .get_value::>(ValueKey::from(ValueClass::Lookup(LookupClass::Key( @@ -232,6 +240,7 @@ impl LookupStore { .get(std::str::from_utf8(&key).unwrap_or_default()) .is_some()), } + .caused_by( trc::location!()) } pub async fn is_rate_allowed( @@ -239,7 +248,7 @@ impl LookupStore { key: &[u8], rate: &Rate, soft_check: bool, - ) -> crate::Result> { + ) -> trc::Result> { let now = now(); let range_start = now / rate.period.as_secs(); let range_end = (range_start * rate.period.as_secs()) + rate.period.as_secs(); @@ -251,9 +260,13 @@ impl LookupStore { let requests = if !soft_check { self.counter_incr(bucket, 1, expires_in.into(), true) - .await? + .await + .caused_by( trc::location!())? } else { - self.counter_get(bucket).await? + 1 + self.counter_get(bucket) + .await + .caused_by( trc::location!())? + + 1 }; if requests <= rate.requests as i64 { @@ -263,7 +276,7 @@ impl LookupStore { } } - pub async fn purge_lookup_store(&self) -> crate::Result<()> { + pub async fn purge_lookup_store(&self) -> trc::Result<()> { match self { LookupStore::Store(store) => { // Delete expired keys and counters @@ -276,9 +289,13 @@ impl LookupStore { let mut expired_counters = Vec::new(); store .iterate(IterateParams::new(from_key, to_key), |key, value| { - let expiry = value.deserialize_be_u64(0)?; + let expiry = value.deserialize_be_u64(0).caused_by( trc::location!())?; if expiry == 0 { - if value.deserialize_be_u64(U64_LEN)? <= current_time { + if value + .deserialize_be_u64(U64_LEN) + .caused_by( trc::location!())? + <= current_time + { expired_counters.push(key.to_vec()); } } else if expiry <= current_time { @@ -286,7 +303,8 @@ impl LookupStore { } Ok(true) }) - .await?; + .await + .caused_by( trc::location!())?; if !expired_keys.is_empty() { let mut batch = BatchBuilder::new(); @@ -296,12 +314,18 @@ impl LookupStore { op: ValueOp::Clear, }); if batch.ops.len() >= 1000 { - store.write(batch.build()).await?; + store + .write(batch.build()) + .await + .caused_by( trc::location!())?; batch = BatchBuilder::new(); } } if !batch.ops.is_empty() { - store.write(batch.build()).await?; + store + .write(batch.build()) + .await + .caused_by( trc::location!())?; } } @@ -317,12 +341,18 @@ impl LookupStore { op: ValueOp::Clear, }); if batch.ops.len() >= 1000 { - store.write(batch.build()).await?; + store + .write(batch.build()) + .await + .caused_by( trc::location!())?; batch = BatchBuilder::new(); } } if !batch.ops.is_empty() { - store.write(batch.build()).await?; + store + .write(batch.build()) + .await + .caused_by( trc::location!())?; } } } @@ -348,10 +378,13 @@ enum LookupValue { } impl Deserialize for LookupValue { - fn deserialize(bytes: &[u8]) -> crate::Result { + fn deserialize(bytes: &[u8]) -> trc::Result { bytes.deserialize_be_u64(0).and_then(|expires| { Ok(if expires > now() { - LookupValue::Value(T::deserialize(bytes.get(U64_LEN..).unwrap_or_default())?) + LookupValue::Value( + T::deserialize(bytes.get(U64_LEN..).unwrap_or_default()) + .caused_by( trc::location!())?, + ) } else { LookupValue::None }) diff --git a/crates/store/src/dispatch/store.rs b/crates/store/src/dispatch/store.rs index ee5f5623..8981a5da 100644 --- a/crates/store/src/dispatch/store.rs +++ b/crates/store/src/dispatch/store.rs @@ -7,6 +7,7 @@ use std::ops::{BitAndAssign, Range}; use roaring::RoaringBitmap; +use trc::AddContext; use crate::{ write::{ @@ -27,7 +28,7 @@ pub static ref BITMAPS: std::sync::Arc(&self, key: impl Key) -> crate::Result> + pub async fn get_value(&self, key: impl Key) -> trc::Result> where U: Deserialize + 'static, { @@ -42,14 +43,15 @@ impl Store { Self::MySQL(store) => store.get_value(key).await, #[cfg(feature = "rocks")] Self::RocksDb(store) => store.get_value(key).await, - Self::None => Err(crate::Error::InternalError("No store configured".into())), + Self::None => Err(trc::Cause::NotConfigured.into()), } + .caused_by( trc::location!()) } pub async fn get_bitmap( &self, key: BitmapKey>, - ) -> crate::Result> { + ) -> trc::Result> { match self { #[cfg(feature = "sqlite")] Self::SQLite(store) => store.get_bitmap(key).await, @@ -61,17 +63,18 @@ impl Store { Self::MySQL(store) => store.get_bitmap(key).await, #[cfg(feature = "rocks")] Self::RocksDb(store) => store.get_bitmap(key).await, - Self::None => Err(crate::Error::InternalError("No store configured".into())), + Self::None => Err(trc::Cause::NotConfigured.into()), } + .caused_by( trc::location!()) } pub async fn get_bitmaps_intersection( &self, keys: Vec>>, - ) -> crate::Result> { + ) -> trc::Result> { let mut result: Option = None; for key in keys { - if let Some(bitmap) = self.get_bitmap(key).await? { + if let Some(bitmap) = self.get_bitmap(key).await.caused_by( trc::location!())? { if let Some(result) = &mut result { result.bitand_assign(&bitmap); if result.is_empty() { @@ -90,8 +93,8 @@ impl Store { pub async fn iterate( &self, params: IterateParams, - cb: impl for<'x> FnMut(&'x [u8], &'x [u8]) -> crate::Result + Sync + Send, - ) -> crate::Result<()> { + cb: impl for<'x> FnMut(&'x [u8], &'x [u8]) -> trc::Result + Sync + Send, + ) -> trc::Result<()> { match self { #[cfg(feature = "sqlite")] Self::SQLite(store) => store.iterate(params, cb).await, @@ -103,14 +106,15 @@ impl Store { Self::MySQL(store) => store.iterate(params, cb).await, #[cfg(feature = "rocks")] Self::RocksDb(store) => store.iterate(params, cb).await, - Self::None => Err(crate::Error::InternalError("No store configured".into())), + Self::None => Err(trc::Cause::NotConfigured.into()), } + .caused_by( trc::location!()) } pub async fn get_counter( &self, key: impl Into>> + Sync + Send, - ) -> crate::Result { + ) -> trc::Result { match self { #[cfg(feature = "sqlite")] Self::SQLite(store) => store.get_counter(key).await, @@ -122,11 +126,12 @@ impl Store { Self::MySQL(store) => store.get_counter(key).await, #[cfg(feature = "rocks")] Self::RocksDb(store) => store.get_counter(key).await, - Self::None => Err(crate::Error::InternalError("No store configured".into())), + Self::None => Err(trc::Cause::NotConfigured.into()), } + .caused_by( trc::location!()) } - pub async fn write(&self, batch: Batch) -> crate::Result { + pub async fn write(&self, batch: Batch) -> trc::Result { #[cfg(feature = "test_mode")] if std::env::var("PARANOID_WRITE").map_or(false, |v| v == "1") { let mut account_id = u32::MAX; @@ -184,8 +189,9 @@ impl Store { Self::MySQL(store) => store.write(batch).await, #[cfg(feature = "rocks")] Self::RocksDb(store) => store.write(batch).await, - Self::None => Err(crate::Error::InternalError("No store configured".into())), - }?; + Self::None => Err(trc::Cause::NotConfigured.into()), + } + .caused_by( trc::location!())?; for (key, class, document_id, set) in bitmaps { let mut bitmaps = BITMAPS.lock(); @@ -225,11 +231,11 @@ impl Store { Self::MySQL(store) => store.write(batch).await, #[cfg(feature = "rocks")] Self::RocksDb(store) => store.write(batch).await, - Self::None => Err(crate::Error::InternalError("No store configured".into())), + Self::None => Err(trc::Cause::NotConfigured.into()), } } - pub async fn purge_store(&self) -> crate::Result<()> { + pub async fn purge_store(&self) -> trc::Result<()> { // Delete expired reports let now = now(); self.delete_range( @@ -239,7 +245,8 @@ impl Store { expires: now, })), ) - .await?; + .await + .caused_by( trc::location!())?; self.delete_range( ValueKey::from(ValueClass::Report(ReportClass::Tls { id: 0, expires: 0 })), ValueKey::from(ValueClass::Report(ReportClass::Tls { @@ -247,7 +254,8 @@ impl Store { expires: now, })), ) - .await?; + .await + .caused_by( trc::location!())?; self.delete_range( ValueKey::from(ValueClass::Report(ReportClass::Arf { id: 0, expires: 0 })), ValueKey::from(ValueClass::Report(ReportClass::Arf { @@ -255,7 +263,8 @@ impl Store { expires: now, })), ) - .await?; + .await + .caused_by( trc::location!())?; match self { #[cfg(feature = "sqlite")] @@ -268,11 +277,12 @@ impl Store { Self::MySQL(store) => store.purge_store().await, #[cfg(feature = "rocks")] Self::RocksDb(store) => store.purge_store().await, - Self::None => Err(crate::Error::InternalError("No store configured".into())), + Self::None => Err(trc::Cause::NotConfigured.into()), } + .caused_by( trc::location!()) } - pub async fn delete_range(&self, from: impl Key, to: impl Key) -> crate::Result<()> { + pub async fn delete_range(&self, from: impl Key, to: impl Key) -> trc::Result<()> { match self { #[cfg(feature = "sqlite")] Self::SQLite(store) => store.delete_range(from, to).await, @@ -284,8 +294,9 @@ impl Store { Self::MySQL(store) => store.delete_range(from, to).await, #[cfg(feature = "rocks")] Self::RocksDb(store) => store.delete_range(from, to).await, - Self::None => Err(crate::Error::InternalError("No store configured".into())), + Self::None => Err(trc::Cause::NotConfigured.into()), } + .caused_by( trc::location!()) } pub async fn delete_documents( @@ -295,7 +306,7 @@ impl Store { collection: u8, collection_offset: Option, document_ids: &impl DocumentSet, - ) -> crate::Result<()> { + ) -> trc::Result<()> { // Serialize keys let (from_key, to_key) = if collection_offset.is_some() { ( @@ -340,14 +351,17 @@ impl Store { Ok(true) }, ) - .await?; + .await + .caused_by( trc::location!())?; // Remove keys let mut batch = BatchBuilder::new(); for key in delete_keys { if batch.ops.len() >= 1000 { - self.write(std::mem::take(&mut batch).build()).await?; + self.write(std::mem::take(&mut batch).build()) + .await + .caused_by( trc::location!())?; } batch.ops.push(Operation::Value { class: ValueClass::Any(AnyClass { subspace, key }), @@ -356,13 +370,15 @@ impl Store { } if !batch.is_empty() { - self.write(batch.build()).await?; + self.write(batch.build()) + .await + .caused_by( trc::location!())?; } Ok(()) } - pub async fn purge_account(&self, account_id: u32) -> crate::Result<()> { + pub async fn purge_account(&self, account_id: u32) -> trc::Result<()> { for subspace in [ SUBSPACE_BITMAP_ID, SUBSPACE_BITMAP_TAG, @@ -380,7 +396,8 @@ impl Store { key: KeySerializer::new(U32_LEN).write(account_id + 1).finalize(), }, ) - .await?; + .await + .caused_by( trc::location!())?; } for (from_class, to_class) in [ @@ -411,17 +428,14 @@ impl Store { class: to_class, }, ) - .await?; + .await + .caused_by( trc::location!())?; } Ok(()) } - pub async fn get_blob( - &self, - key: &[u8], - range: Range, - ) -> crate::Result>> { + pub async fn get_blob(&self, key: &[u8], range: Range) -> trc::Result>> { match self { #[cfg(feature = "sqlite")] Self::SQLite(store) => store.get_blob(key, range).await, @@ -433,11 +447,12 @@ impl Store { Self::MySQL(store) => store.get_blob(key, range).await, #[cfg(feature = "rocks")] Self::RocksDb(store) => store.get_blob(key, range).await, - Self::None => Err(crate::Error::InternalError("No store configured".into())), + Self::None => Err(trc::Cause::NotConfigured.into()), } + .caused_by( trc::location!()) } - pub async fn put_blob(&self, key: &[u8], data: &[u8]) -> crate::Result<()> { + pub async fn put_blob(&self, key: &[u8], data: &[u8]) -> trc::Result<()> { match self { #[cfg(feature = "sqlite")] Self::SQLite(store) => store.put_blob(key, data).await, @@ -449,11 +464,12 @@ impl Store { Self::MySQL(store) => store.put_blob(key, data).await, #[cfg(feature = "rocks")] Self::RocksDb(store) => store.put_blob(key, data).await, - Self::None => Err(crate::Error::InternalError("No store configured".into())), + Self::None => Err(trc::Cause::NotConfigured.into()), } + .caused_by( trc::location!()) } - pub async fn delete_blob(&self, key: &[u8]) -> crate::Result { + pub async fn delete_blob(&self, key: &[u8]) -> trc::Result { match self { #[cfg(feature = "sqlite")] Self::SQLite(store) => store.delete_blob(key).await, @@ -465,8 +481,9 @@ impl Store { Self::MySQL(store) => store.delete_blob(key).await, #[cfg(feature = "rocks")] Self::RocksDb(store) => store.delete_blob(key).await, - Self::None => Err(crate::Error::InternalError("No store configured".into())), + Self::None => Err(trc::Cause::NotConfigured.into()), } + .caused_by( trc::location!()) } #[cfg(feature = "test_mode")] @@ -551,7 +568,7 @@ impl Store { self.iterate( IterateParams::new(from_key, to_key).ascending().no_values(), |key, _| { - let account_id = key.deserialize_be_u32(0)?; + let account_id = key.deserialize_be_u32(0).caused_by( trc::location!())?; if account_id != last_account_id { last_account_id = account_id; batch.with_account_id(account_id); @@ -563,7 +580,9 @@ impl Store { key.get(U32_LEN..U32_LEN + BLOB_HASH_LEN).unwrap(), ) .unwrap(), - until: key.deserialize_be_u64(key.len() - U64_LEN)?, + until: key + .deserialize_be_u64(key.len() - U64_LEN) + .caused_by( trc::location!())?, }), op: ValueOp::Clear, }); @@ -588,7 +607,7 @@ impl Store { let mut expired_counters = Vec::new(); self.iterate(IterateParams::new(from_key, to_key), |key, value| { - let expiry = value.deserialize_be_u64(0)?; + let expiry = value.deserialize_be_u64(0).caused_by( trc::location!())?; if expiry == 0 { expired_counters.push(key.to_vec()); } else if expiry != u64::MAX { diff --git a/crates/store/src/fts/index.rs b/crates/store/src/fts/index.rs index 8e0edc20..4b0c6c63 100644 --- a/crates/store/src/fts/index.rs +++ b/crates/store/src/fts/index.rs @@ -118,7 +118,7 @@ impl Store { pub async fn fts_index + Display + Clone + std::fmt::Debug>( &self, document: FtsDocument<'_, T>, - ) -> crate::Result<()> { + ) -> trc::Result<()> { let mut detect = LanguageDetector::new(); let mut tokens: AHashMap = AHashMap::new(); let mut parts = Vec::new(); @@ -230,7 +230,7 @@ impl Store { account_id: u32, collection: u8, document_ids: &impl DocumentSet, - ) -> crate::Result<()> { + ) -> trc::Result<()> { // Find keys to delete let mut delete_keys: AHashMap>> = AHashMap::new(); self.iterate( @@ -269,7 +269,9 @@ impl Store { (hash, len as u8) } invalid => { - return Err(format!("Invalid text bitmap key length {invalid}").into()) + return Err(trc::Error::corrupted_key(key, None, trc::location!()) + .ctx(trc::Key::Reason, "Invalid bitmap key length") + .ctx(trc::Key::Size, invalid)); } }; @@ -316,7 +318,7 @@ impl Store { Ok(()) } - pub async fn fts_remove_all(&self, _: u32) -> crate::Result<()> { + pub async fn fts_remove_all(&self, _: u32) -> trc::Result<()> { // No-op // Term indexes are stored in the same key range as the document diff --git a/crates/store/src/fts/query.rs b/crates/store/src/fts/query.rs index 7b24c02a..c95f4ea3 100644 --- a/crates/store/src/fts/query.rs +++ b/crates/store/src/fts/query.rs @@ -53,7 +53,7 @@ impl Store { account_id: u32, collection: impl Into, filters: Vec>, - ) -> crate::Result { + ) -> trc::Result { let collection = collection.into(); // Tokenize text @@ -271,7 +271,7 @@ impl Store { token_count: &AHashMap, token_cache: &mut AHashMap>>>, is_intersect: bool, - ) -> crate::Result> { + ) -> trc::Result> { let mut result_bm = RoaringBitmap::new(); let mut position_candidates = AHashMap::new(); let num_tokens = tokens.len(); diff --git a/crates/store/src/lib.rs b/crates/store/src/lib.rs index ea93ed9e..89a444f3 100644 --- a/crates/store/src/lib.rs +++ b/crates/store/src/lib.rs @@ -4,7 +4,7 @@ * SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-SEL */ -use std::{borrow::Cow, fmt::Display, sync::Arc}; +use std::{borrow::Cow, sync::Arc}; pub mod backend; pub mod config; @@ -47,7 +47,7 @@ use backend::elastic::ElasticSearchStore; use backend::redis::RedisStore; pub trait Deserialize: Sized + Sync + Send { - fn deserialize(bytes: &[u8]) -> crate::Result; + fn deserialize(bytes: &[u8]) -> trc::Result; } pub trait Serialize { @@ -104,8 +104,6 @@ pub struct LogKey { pub const U64_LEN: usize = std::mem::size_of::(); pub const U32_LEN: usize = std::mem::size_of::(); -pub type Result = std::result::Result; - #[derive(Clone, Debug, PartialEq, Eq, Hash)] pub enum BlobClass { Reserved { @@ -128,29 +126,6 @@ impl Default for BlobClass { } } -#[derive(Debug, PartialEq, Eq)] -pub enum Error { - InternalError(String), - AssertValueFailed, -} - -impl std::error::Error for Error {} - -impl Display for Error { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - match self { - Error::InternalError(msg) => write!(f, "Internal Error: {}", msg), - Error::AssertValueFailed => write!(f, "Transaction failed: Hash mismatch"), - } - } -} - -impl From for Error { - fn from(msg: String) -> Self { - Error::InternalError(msg) - } -} - pub const SUBSPACE_ACL: u8 = b'a'; pub const SUBSPACE_BITMAP_ID: u8 = b'b'; pub const SUBSPACE_BITMAP_TAG: u8 = b'c'; @@ -711,3 +686,16 @@ impl std::fmt::Debug for Store { } } } + +impl From> for trc::Value { + fn from(value: Value) -> Self { + match value { + Value::Integer(v) => trc::Value::Int(v), + Value::Bool(v) => trc::Value::Bool(v), + Value::Float(v) => trc::Value::Float(v), + Value::Text(v) => trc::Value::String(v.into_owned()), + Value::Blob(v) => trc::Value::Bytes(v.into_owned()), + Value::Null => trc::Value::None, + } + } +} diff --git a/crates/store/src/query/acl.rs b/crates/store/src/query/acl.rs index ca94a584..bd6f0d16 100644 --- a/crates/store/src/query/acl.rs +++ b/crates/store/src/query/acl.rs @@ -4,9 +4,11 @@ * SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-SEL */ +use trc::AddContext; + use crate::{ write::{key::DeserializeBigEndian, BatchBuilder, Operation, ValueClass, ValueOp}, - Deserialize, Error, IterateParams, Store, ValueKey, U32_LEN, + Deserialize, IterateParams, Store, ValueKey, U32_LEN, }; pub enum AclQuery { @@ -29,7 +31,7 @@ pub struct AclItem { } impl Store { - pub async fn acl_query(&self, query: AclQuery) -> crate::Result> { + pub async fn acl_query(&self, query: AclQuery) -> trc::Result> { let mut results = Vec::new(); let (from_key, to_key) = match query { AclQuery::SharedWith { @@ -72,12 +74,12 @@ impl Store { Ok(true) }, ) - .await?; - - Ok(results) + .await + .caused_by( trc::location!()) + .map(|_| results) } - pub async fn acl_revoke_all(&self, account_id: u32) -> crate::Result<()> { + pub async fn acl_revoke_all(&self, account_id: u32) -> trc::Result<()> { let from_key = ValueKey { account_id: 0, collection: 0, @@ -105,7 +107,8 @@ impl Store { Ok(true) }, ) - .await?; + .await + .caused_by( trc::location!())?; // Remove permissions let mut batch = BatchBuilder::new(); @@ -113,7 +116,9 @@ impl Store { let mut last_collection = u8::MAX; for (class, acl_item) in delete_keys.into_iter() { if batch.ops.len() >= 1000 { - self.write(batch.build()).await?; + self.write(batch.build()) + .await + .caused_by( trc::location!())?; batch = BatchBuilder::new(); batch.with_account_id(account_id); last_collection = u8::MAX; @@ -129,7 +134,9 @@ impl Store { }) } if !batch.is_empty() { - self.write(batch.build()).await?; + self.write(batch.build()) + .await + .caused_by( trc::location!())?; } Ok(()) @@ -137,12 +144,12 @@ impl Store { } impl Deserialize for AclItem { - fn deserialize(bytes: &[u8]) -> crate::Result { + fn deserialize(bytes: &[u8]) -> trc::Result { Ok(AclItem { to_account_id: bytes.deserialize_be_u32(U32_LEN)?, to_collection: *bytes .get(U32_LEN * 2) - .ok_or_else(|| Error::InternalError(format!("Corrupted acl key {bytes:?}")))?, + .ok_or_else(|| trc::Cause::DataCorruption.caused_by(trc::location!()))?, to_document_id: bytes.deserialize_be_u32((U32_LEN * 2) + 1)?, permissions: 0, }) diff --git a/crates/store/src/query/filter.rs b/crates/store/src/query/filter.rs index 9a30e00b..022b8c94 100644 --- a/crates/store/src/query/filter.rs +++ b/crates/store/src/query/filter.rs @@ -9,6 +9,7 @@ use std::ops::{BitAndAssign, BitOrAssign, BitXorAssign}; use ahash::HashSet; use nlp::tokenizers::word::WordTokenizer; use roaring::RoaringBitmap; +use trc::AddContext; use crate::{ backend::MAX_TOKEN_LENGTH, write::key::DeserializeBigEndian, BitmapKey, IndexKey, @@ -28,7 +29,7 @@ impl Store { account_id: u32, collection: impl Into + Sync + Send, filters: Vec, - ) -> crate::Result { + ) -> trc::Result { let collection = collection.into(); if filters.is_empty() { return Ok(ResultSet { @@ -36,7 +37,8 @@ impl Store { collection, results: self .get_bitmap(BitmapKey::document_ids(account_id, collection)) - .await? + .await + .caused_by(trc::location!())? .unwrap_or_else(RoaringBitmap::new), }); } @@ -50,10 +52,10 @@ impl Store { while let Some(filter) = filters.next() { let mut result = match filter { - Filter::MatchValue { field, op, value } => { - self.range_to_bitmap(account_id, collection, field, &value, op) - .await? - } + Filter::MatchValue { field, op, value } => self + .range_to_bitmap(account_id, collection, field, &value, op) + .await + .caused_by(trc::location!())?, Filter::HasText { field, text, @@ -70,21 +72,23 @@ impl Store { }) .collect(), ) - .await? + .await + .caused_by(trc::location!())? } else { self.get_bitmap(BitmapKey::text_token(account_id, collection, field, text)) - .await? + .await + .caused_by(trc::location!())? } } - Filter::InBitmap(class) => { - self.get_bitmap(BitmapKey { + Filter::InBitmap(class) => self + .get_bitmap(BitmapKey { account_id, collection, class, document_id: 0, }) - .await? - } + .await + .caused_by(trc::location!())?, Filter::DocumentSet(set) => Some(set), op @ (Filter::And | Filter::Or | Filter::Not) => { stack.push(state); @@ -106,7 +110,8 @@ impl Store { if matches!(state.op, Filter::Not) && !not_fetch { not_mask = self .get_bitmap(BitmapKey::document_ids(account_id, collection)) - .await? + .await + .caused_by(trc::location!())? .unwrap_or_else(RoaringBitmap::new); not_fetch = true; } @@ -171,7 +176,7 @@ impl Store { field: u8, match_value: &[u8], op: Operator, - ) -> crate::Result> { + ) -> trc::Result> { let (begin, end) = match op { Operator::LowerThan => ( IndexKey { @@ -271,9 +276,9 @@ impl Store { } let id_pos = key.len() - U32_LEN; - let value = key.get(IndexKeyPrefix::len()..id_pos).ok_or_else(|| { - crate::Error::InternalError("Invalid key found in index".to_string()) - })?; + let value = key + .get(IndexKeyPrefix::len()..id_pos) + .ok_or_else(|| trc::Error::corrupted_key(key, None, trc::location!()))?; let matches = match op { Operator::LowerThan => value < match_value, @@ -290,7 +295,8 @@ impl Store { Ok(true) }, ) - .await?; + .await + .caused_by(trc::location!())?; if !bm.is_empty() { Ok(Some(bm)) diff --git a/crates/store/src/query/log.rs b/crates/store/src/query/log.rs index 5439aaa9..e1419e99 100644 --- a/crates/store/src/query/log.rs +++ b/crates/store/src/query/log.rs @@ -4,9 +4,10 @@ * SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-SEL */ +use trc::AddContext; use utils::codec::leb128::Leb128Iterator; -use crate::{write::key::DeserializeBigEndian, Error, IterateParams, LogKey, Store, U64_LEN}; +use crate::{write::key::DeserializeBigEndian, IterateParams, LogKey, Store, U64_LEN}; #[derive(Debug, PartialEq, Eq, Clone, Copy)] pub enum Change { @@ -47,7 +48,7 @@ impl Store { account_id: u32, collection: impl Into + Sync + Send, query: Query, - ) -> crate::Result { + ) -> trc::Result { let collection = collection.into(); let (is_inclusive, from_change_id, to_change_id) = match query { Query::All => (true, 0, u64::MAX), @@ -80,16 +81,14 @@ impl Store { } changelog.to_change_id = change_id; changelog.deserialize(value).ok_or_else(|| { - Error::InternalError(format!( - "Failed to deserialize changelog for [{}/{:?}]: [{:?}]", - account_id, collection, query - )) + trc::Error::corrupted_key(key, value.into(), trc::location!()) })?; } Ok(true) }, ) - .await?; + .await + .caused_by(trc::location!())?; if changelog.changes.is_empty() { changelog.from_change_id = from_change_id; @@ -107,7 +106,7 @@ impl Store { &self, account_id: u32, collection: impl Into + Sync + Send, - ) -> crate::Result> { + ) -> trc::Result> { let collection = collection.into(); let from_key = LogKey { @@ -133,7 +132,8 @@ impl Store { Ok(false) }, ) - .await?; + .await + .caused_by(trc::location!())?; Ok(last_change_id) } diff --git a/crates/store/src/query/sort.rs b/crates/store/src/query/sort.rs index 7432609d..95c092a5 100644 --- a/crates/store/src/query/sort.rs +++ b/crates/store/src/query/sort.rs @@ -7,6 +7,7 @@ use std::cmp::Ordering; use ahash::{AHashMap, AHashSet}; +use trc::AddContext; use crate::{ write::{key::DeserializeBigEndian, ValueClass}, @@ -34,7 +35,7 @@ impl Store { result_set: ResultSet, mut comparators: Vec, mut paginate: Pagination, - ) -> crate::Result { + ) -> trc::Result { paginate.limit = match (result_set.results.len(), paginate.limit) { (0, _) => { return Ok(SortedResultSet { @@ -73,7 +74,8 @@ impl Store { Ok(!results.remove(document_id) || paginate.add(0, document_id)) }, ) - .await?; + .await + .caused_by(trc::location!())?; // Add remaining items not present in the index if !results.is_empty() && !paginate.is_full() { @@ -109,7 +111,8 @@ impl Store { for id in sorted_results.ids.iter_mut() { if let Some(prefix_id) = self .get_value::(prefix_key.clone().with_document_id(*id as u32)) - .await? + .await + .caused_by(trc::location!())? { *id |= (prefix_id as u64) << 32; } @@ -150,11 +153,7 @@ impl Store { Ok(if results.remove(document_id) { let data = key.get(IndexKeyPrefix::len()..id_pos).ok_or_else( - || { - crate::Error::InternalError( - "Invalid key found in index".to_string(), - ) - }, + || trc::Error::corrupted_key(key, None, trc::location!()), )?; debug_assert!(!data.is_empty()); @@ -173,7 +172,8 @@ impl Store { }) }, ) - .await?; + .await + .caused_by(trc::location!())?; // Add remaining items not present in the index if !results.is_empty() { @@ -216,7 +216,8 @@ impl Store { let prefix_id = if let Some(prefix_key) = &paginate.prefix_key { if let Some(prefix_id) = self .get_value(prefix_key.clone().with_document_id(document_id)) - .await? + .await + .caused_by(trc::location!())? { if paginate.prefix_unique && !seen_prefixes.insert(prefix_id) { continue; @@ -244,7 +245,8 @@ impl Store { let prefix_id = if let Some(prefix_key) = &paginate.prefix_key { if let Some(prefix_id) = self .get_value(prefix_key.clone().with_document_id(document_id)) - .await? + .await + .caused_by(trc::location!())? { if paginate.prefix_unique && !seen_prefixes.insert(prefix_id) { continue; diff --git a/crates/store/src/write/assert.rs b/crates/store/src/write/assert.rs index d48848cd..7f19cfac 100644 --- a/crates/store/src/write/assert.rs +++ b/crates/store/src/write/assert.rs @@ -84,7 +84,7 @@ impl AssertValue { } impl Deserialize for HashedValue { - fn deserialize(bytes: &[u8]) -> crate::Result { + fn deserialize(bytes: &[u8]) -> trc::Result { Ok(HashedValue { hash: xxhash_rust::xxh3::xxh3_64(bytes), inner: T::deserialize(bytes)?, diff --git a/crates/store/src/write/blob.rs b/crates/store/src/write/blob.rs index 9a75ab2f..4c2a6276 100644 --- a/crates/store/src/write/blob.rs +++ b/crates/store/src/write/blob.rs @@ -5,6 +5,7 @@ */ use ahash::AHashSet; +use trc::AddContext; use utils::{BlobHash, BLOB_HASH_LEN}; use crate::{ @@ -21,10 +22,7 @@ pub struct BlobQuota { } impl Store { - pub async fn blob_exists( - &self, - hash: impl AsRef + Sync + Send, - ) -> crate::Result { + pub async fn blob_exists(&self, hash: impl AsRef + Sync + Send) -> trc::Result { self.get_value::<()>(ValueKey { account_id: 0, collection: 0, @@ -35,9 +33,10 @@ impl Store { }) .await .map(|v| v.is_some()) + .caused_by(trc::location!()) } - pub async fn blob_quota(&self, account_id: u32) -> crate::Result { + pub async fn blob_quota(&self, account_id: u32) -> trc::Result { let from_key = ValueKey { account_id, collection: 0, @@ -74,7 +73,8 @@ impl Store { Ok(true) }, ) - .await?; + .await + .caused_by(trc::location!())?; Ok(quota) } @@ -83,7 +83,7 @@ impl Store { &self, hash: impl AsRef + Sync + Send, class: impl AsRef + Sync + Send, - ) -> crate::Result { + ) -> trc::Result { let key = match class.as_ref() { BlobClass::Reserved { account_id, @@ -115,7 +115,7 @@ impl Store { self.get_value::<()>(key).await.map(|v| v.is_some()) } - pub async fn purge_blobs(&self, blob_store: BlobStore) -> crate::Result<()> { + pub async fn purge_blobs(&self, blob_store: BlobStore) -> trc::Result<()> { // Remove expired temporary blobs let from_key = ValueKey { account_id: 0, @@ -142,11 +142,8 @@ impl Store { IterateParams::new(from_key, to_key).ascending().no_values(), |key, _| { let hash = BlobHash::try_from_hash_slice( - key.get(U32_LEN..U32_LEN + BLOB_HASH_LEN).ok_or_else(|| { - crate::Error::InternalError(format!( - "Invalid key {key:?} in blob hash tables" - )) - })?, + key.get(U32_LEN..U32_LEN + BLOB_HASH_LEN) + .ok_or_else(|| trc::Error::corrupted_key(key, None, trc::location!()))?, ) .unwrap(); let until = key.deserialize_be_u64(key.len() - U64_LEN)?; @@ -158,7 +155,8 @@ impl Store { Ok(true) }, ) - .await?; + .await + .caused_by(trc::location!())?; // Validate linked blobs let from_key = ValueKey { @@ -181,13 +179,11 @@ impl Store { self.iterate( IterateParams::new(from_key, to_key).ascending().no_values(), |key, _| { - let hash = - BlobHash::try_from_hash_slice(key.get(0..BLOB_HASH_LEN).ok_or_else(|| { - crate::Error::InternalError(format!( - "Invalid key {key:?} in blob hash tables" - )) - })?) - .unwrap(); + let hash = BlobHash::try_from_hash_slice( + key.get(0..BLOB_HASH_LEN) + .ok_or_else(|| trc::Error::corrupted_key(key, None, trc::location!()))?, + ) + .unwrap(); let document_id = key.deserialize_be_u32(key.len() - U32_LEN)?; if document_id != u32::MAX { @@ -202,12 +198,16 @@ impl Store { Ok(true) }, ) - .await?; + .await + .caused_by(trc::location!())?; // Delete expired or unlinked blobs for (_, op) in &delete_keys { if let BlobOp::Commit { hash } = op { - blob_store.delete_blob(hash.as_ref()).await?; + blob_store + .delete_blob(hash.as_ref()) + .await + .caused_by(trc::location!())?; } } @@ -217,7 +217,9 @@ impl Store { for (account_id, op) in delete_keys.into_iter() { if batch.ops.len() >= 1000 { last_account_id = u32::MAX; - self.write(batch.build()).await?; + self.write(batch.build()) + .await + .caused_by(trc::location!())?; batch = BatchBuilder::new(); } if matches!(op, BlobOp::Reserve { .. }) && account_id != last_account_id { @@ -230,13 +232,15 @@ impl Store { }) } if !batch.is_empty() { - self.write(batch.build()).await?; + self.write(batch.build()) + .await + .caused_by(trc::location!())?; } Ok(()) } - pub async fn blob_hash_unlink_account(&self, account_id: u32) -> crate::Result<()> { + pub async fn blob_hash_unlink_account(&self, account_id: u32) -> trc::Result<()> { // Validate linked blobs let from_key = ValueKey { account_id: 0, @@ -267,9 +271,7 @@ impl Store { BlobOp::Link { hash: BlobHash::try_from_hash_slice( key.get(0..BLOB_HASH_LEN).ok_or_else(|| { - crate::Error::InternalError(format!( - "Invalid key {key:?} in blob hash tables" - )) + trc::Error::corrupted_key(key, None, trc::location!()) })?, ) .unwrap(), @@ -280,7 +282,8 @@ impl Store { Ok(true) }, ) - .await?; + .await + .caused_by(trc::location!())?; // Unlink blobs let mut batch = BatchBuilder::new(); @@ -288,7 +291,9 @@ impl Store { let mut last_collection = u8::MAX; for (collection, document_id, op) in delete_keys.into_iter() { if batch.ops.len() >= 1000 { - self.write(batch.build()).await?; + self.write(batch.build()) + .await + .caused_by(trc::location!())?; batch = BatchBuilder::new(); batch.with_account_id(account_id); last_collection = u8::MAX; @@ -304,7 +309,9 @@ impl Store { }); } if !batch.is_empty() { - self.write(batch.build()).await?; + self.write(batch.build()) + .await + .caused_by(trc::location!())?; } Ok(()) diff --git a/crates/store/src/write/key.rs b/crates/store/src/write/key.rs index 1165f90b..22ff00e0 100644 --- a/crates/store/src/write/key.rs +++ b/crates/store/src/write/key.rs @@ -30,8 +30,8 @@ pub trait KeySerialize { } pub trait DeserializeBigEndian { - fn deserialize_be_u32(&self, index: usize) -> crate::Result; - fn deserialize_be_u64(&self, index: usize) -> crate::Result; + fn deserialize_be_u32(&self, index: usize) -> trc::Result; + fn deserialize_be_u64(&self, index: usize) -> trc::Result; } impl KeySerializer { @@ -99,35 +99,35 @@ impl KeySerialize for u64 { } impl DeserializeBigEndian for &[u8] { - fn deserialize_be_u32(&self, index: usize) -> crate::Result { + fn deserialize_be_u32(&self, index: usize) -> trc::Result { self.get(index..index + U32_LEN) .ok_or_else(|| { - crate::Error::InternalError( - "Index out of range while deserializing u32.".to_string(), - ) + trc::Cause::DataCorruption + .caused_by(trc::location!()) + .ctx(trc::Key::Value, *self) }) .and_then(|bytes| { bytes.try_into().map_err(|_| { - crate::Error::InternalError( - "Index out of range while deserializing u32.".to_string(), - ) + trc::Cause::DataCorruption + .caused_by(trc::location!()) + .ctx(trc::Key::Value, *self) }) }) .map(u32::from_be_bytes) } - fn deserialize_be_u64(&self, index: usize) -> crate::Result { + fn deserialize_be_u64(&self, index: usize) -> trc::Result { self.get(index..index + U64_LEN) .ok_or_else(|| { - crate::Error::InternalError( - "Index out of range while deserializing u64.".to_string(), - ) + trc::Cause::DataCorruption + .caused_by(trc::location!()) + .ctx(trc::Key::Value, *self) }) .and_then(|bytes| { bytes.try_into().map_err(|_| { - crate::Error::InternalError( - "Index out of range while deserializing u64.".to_string(), - ) + trc::Cause::DataCorruption + .caused_by(trc::location!()) + .ctx(trc::Key::Value, *self) }) }) .map(u64::from_be_bytes) @@ -634,7 +634,7 @@ impl From for ValueClass { } impl Deserialize for ReportEvent { - fn deserialize(key: &[u8]) -> crate::Result { + fn deserialize(key: &[u8]) -> trc::Result { Ok(ReportEvent { due: key.deserialize_be_u64(1)?, policy_hash: key.deserialize_be_u64(key.len() - (U64_LEN * 2 + 1))?, @@ -644,9 +644,9 @@ impl Deserialize for ReportEvent { .and_then(|domain| std::str::from_utf8(domain).ok()) .map(|s| s.to_string()) .ok_or_else(|| { - crate::Error::InternalError(format!( - "Failed to deserialize report domain: {key:?}" - )) + trc::Cause::DataCorruption + .caused_by(trc::location!()) + .ctx(trc::Key::Key, key) })?, }) } diff --git a/crates/store/src/write/log.rs b/crates/store/src/write/log.rs index 5440c6c3..86e42de8 100644 --- a/crates/store/src/write/log.rs +++ b/crates/store/src/write/log.rs @@ -222,7 +222,7 @@ impl From for MaybeDynamicValue { pub struct LogInsert(); impl SerializeWithId for LogInsert { - fn serialize_with_id(&self, ids: &super::AssignedIds) -> crate::Result> { + fn serialize_with_id(&self, ids: &super::AssignedIds) -> trc::Result> { ids.last_document_id() .map(|id| Changes::insert([id]).serialize()) } diff --git a/crates/store/src/write/mod.rs b/crates/store/src/write/mod.rs index cb901b69..b014637a 100644 --- a/crates/store/src/write/mod.rs +++ b/crates/store/src/write/mod.rs @@ -34,7 +34,7 @@ pub mod log; pub mod purge; pub trait SerializeWithId: Send + Sync { - fn serialize_with_id(&self, ids: &AssignedIds) -> crate::Result>; + fn serialize_with_id(&self, ids: &AssignedIds) -> trc::Result>; } pub trait ResolveId { @@ -338,31 +338,31 @@ impl Serialize for Vec { } impl Deserialize for String { - fn deserialize(bytes: &[u8]) -> crate::Result { + fn deserialize(bytes: &[u8]) -> trc::Result { Ok(String::from_utf8_lossy(bytes).into_owned()) } } impl Deserialize for u64 { - fn deserialize(bytes: &[u8]) -> crate::Result { + fn deserialize(bytes: &[u8]) -> trc::Result { Ok(u64::from_be_bytes(bytes.try_into().map_err(|_| { - crate::Error::InternalError("Failed to deserialize u64".to_string()) + trc::Cause::DataCorruption.caused_by(trc::location!()) })?)) } } impl Deserialize for i64 { - fn deserialize(bytes: &[u8]) -> crate::Result { + fn deserialize(bytes: &[u8]) -> trc::Result { Ok(i64::from_be_bytes(bytes.try_into().map_err(|_| { - crate::Error::InternalError("Failed to deserialize i64".to_string()) + trc::Cause::DataCorruption.caused_by(trc::location!()) })?)) } } impl Deserialize for u32 { - fn deserialize(bytes: &[u8]) -> crate::Result { + fn deserialize(bytes: &[u8]) -> trc::Result { Ok(u32::from_be_bytes(bytes.try_into().map_err(|_| { - crate::Error::InternalError("Failed to deserialize u32".to_string()) + trc::Cause::DataCorruption.caused_by(trc::location!()) })?)) } } @@ -452,16 +452,17 @@ impl DeserializeFrom for Vec { } impl Deserialize for Vec { - fn deserialize(bytes: &[u8]) -> crate::Result { + fn deserialize(bytes: &[u8]) -> trc::Result { let mut bytes = bytes.iter(); let len: usize = bytes .next_leb128() - .ok_or_else(|| crate::Error::InternalError("Failed to deserialize Vec".to_string()))?; + .ok_or_else(|| trc::Cause::DataCorruption.caused_by(trc::location!()))?; let mut list = Vec::with_capacity(len); for _ in 0..len { - list.push(T::deserialize_from(&mut bytes).ok_or_else(|| { - crate::Error::InternalError("Failed to deserialize Vec".to_string()) - })?); + list.push( + T::deserialize_from(&mut bytes) + .ok_or_else(|| trc::Cause::DataCorruption.caused_by(trc::location!()))?, + ); } Ok(list) } @@ -576,7 +577,7 @@ impl ToBitmaps for () { } impl Deserialize for () { - fn deserialize(_bytes: &[u8]) -> crate::Result { + fn deserialize(_bytes: &[u8]) -> trc::Result { Ok(()) } } @@ -682,17 +683,18 @@ impl Serialize for Bincode impl Deserialize for Bincode { - fn deserialize(bytes: &[u8]) -> crate::Result { + fn deserialize(bytes: &[u8]) -> trc::Result { lz4_flex::decompress_size_prepended(bytes) .map_err(|err| { - crate::Error::InternalError(format!("Bincode decompression failed: {err:?}")) + trc::Cause::Decompress + .caused_by(trc::location!()) + .reason(err) }) .and_then(|result| { bincode::deserialize(&result).map_err(|err| { - crate::Error::InternalError(format!( - "Bincode deserialization failed (len {}): {err:?}", - result.len() - )) + trc::Cause::DataCorruption + .caused_by(trc::location!()) + .reason(err) }) }) .map(|inner| Self { inner }) @@ -720,29 +722,32 @@ impl AssignedIds { self.counter_ids.push(id); } - pub fn get_document_id(&self, idx: usize) -> crate::Result { - self.document_ids - .get(idx) - .copied() - .ok_or_else(|| crate::Error::InternalError("No document ids were created".to_string())) + pub fn get_document_id(&self, idx: usize) -> trc::Result { + self.document_ids.get(idx).copied().ok_or_else(|| { + trc::Cause::Unexpected + .caused_by(trc::location!()) + .ctx(trc::Key::Reason, "No document ids were created") + }) } - pub fn first_document_id(&self) -> crate::Result { + pub fn first_document_id(&self) -> trc::Result { self.get_document_id(0) } - pub fn last_document_id(&self) -> crate::Result { - self.document_ids - .last() - .copied() - .ok_or_else(|| crate::Error::InternalError("No document ids were created".to_string())) + pub fn last_document_id(&self) -> trc::Result { + self.document_ids.last().copied().ok_or_else(|| { + trc::Cause::Unexpected + .caused_by(trc::location!()) + .ctx(trc::Key::Reason, "No document ids were created") + }) } - pub fn last_counter_id(&self) -> crate::Result { - self.counter_ids - .last() - .copied() - .ok_or_else(|| crate::Error::InternalError("No counter ids were created".to_string())) + pub fn last_counter_id(&self) -> trc::Result { + self.counter_ids.last().copied().ok_or_else(|| { + trc::Cause::Unexpected + .caused_by(trc::location!()) + .ctx(trc::Key::Reason, "No document ids were created") + }) } } @@ -765,7 +770,7 @@ impl From> for MaybeDynamicValue { } impl MaybeDynamicValue { - pub fn resolve(&self, ids: &AssignedIds) -> crate::Result> { + pub fn resolve(&self, ids: &AssignedIds) -> trc::Result> { match self { MaybeDynamicValue::Static(value) => Ok(Cow::Borrowed(value.as_slice())), MaybeDynamicValue::Dynamic(value) => value.serialize_with_id(ids).map(Cow::Owned), @@ -774,7 +779,7 @@ impl MaybeDynamicValue { } impl MaybeDynamicId { - pub fn resolve(&self, ids: &AssignedIds) -> crate::Result { + pub fn resolve(&self, ids: &AssignedIds) -> trc::Result { match self { MaybeDynamicId::Static(id) => Ok(*id), MaybeDynamicId::Dynamic(idx) => ids.get_document_id(*idx), @@ -842,7 +847,7 @@ impl From for MaybeDynamicValue { } impl SerializeWithId for DynamicDocumentId { - fn serialize_with_id(&self, ids: &AssignedIds) -> crate::Result> { + fn serialize_with_id(&self, ids: &AssignedIds) -> trc::Result> { ids.get_document_id(self.0).map(|id| id.serialize()) } } diff --git a/crates/store/src/write/purge.rs b/crates/store/src/write/purge.rs index e313acc8..76a1a192 100644 --- a/crates/store/src/write/purge.rs +++ b/crates/store/src/write/purge.rs @@ -27,25 +27,32 @@ pub struct PurgeSchedule { impl PurgeSchedule { pub fn spawn(self, mut shutdown_rx: watch::Receiver) { - tracing::debug!( - "Purge {} task started for store {:?}.", - self.store, - self.store_id + trc::trace!( + PurgeTaskStarted, + Type = self.store.as_str(), + Id = self.store_id.to_string() ); + tokio::spawn(async move { loop { if tokio::time::timeout(self.cron.time_to_next(), shutdown_rx.changed()) .await .is_ok() { - tracing::debug!( - "Purge {} task exiting for store {:?}.", - self.store, - self.store_id + trc::trace!( + PurgeTaskFinished, + Type = self.store.as_str(), + Id = self.store_id.to_string() ); return; } + trc::trace!( + PurgeTaskRunning, + Type = self.store.as_str(), + Id = self.store_id.to_string() + ); + let result = match &self.store { PurgeStore::Data(store) => store.purge_store().await, PurgeStore::Blobs { store, blob_store } => { @@ -55,11 +62,11 @@ impl PurgeSchedule { }; if let Err(err) = result { - tracing::warn!( - "Purge {} task failed for store {:?}: {:?}", - self.store, - self.store_id, - err + trc::error!( + Purge, + Type = self.store.as_str(), + Id = self.store_id.to_string(), + CausedBy = err ); } } @@ -67,6 +74,16 @@ impl PurgeSchedule { } } +impl PurgeStore { + pub fn as_str(&self) -> &'static str { + match self { + PurgeStore::Data(_) => "data", + PurgeStore::Blobs { .. } => "blobs", + PurgeStore::Lookup(_) => "lookup", + } + } +} + impl Display for PurgeStore { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { diff --git a/crates/trc/Cargo.toml b/crates/trc/Cargo.toml new file mode 100644 index 00000000..cb49c806 --- /dev/null +++ b/crates/trc/Cargo.toml @@ -0,0 +1,16 @@ +[package] +name = "trc" +version = "0.8.5" +edition = "2021" +resolver = "2" + +[dependencies] +base64 = "0.22.1" +serde_json = "1.0.120" +reqwest = { version = "0.12", default-features = false, features = ["rustls-tls-webpki-roots", "http2"]} +bincode = "1.3.3" + +[features] +test_mode = [] + +[dev-dependencies] diff --git a/crates/trc/src/conv.rs b/crates/trc/src/conv.rs new file mode 100644 index 00000000..bda0c37c --- /dev/null +++ b/crates/trc/src/conv.rs @@ -0,0 +1,214 @@ +/* + * SPDX-FileCopyrightText: 2020 Stalwart Labs Ltd + * + * SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-SEL + */ + +use std::fmt::Debug; + +use crate::*; + +impl AsRef for Context { + fn as_ref(&self) -> &T { + &self.inner + } +} + +impl From<&'static str> for Value { + fn from(value: &'static str) -> Self { + Self::Static(value) + } +} + +impl From for Value { + fn from(value: String) -> Self { + Self::String(value) + } +} + +impl From for Value { + fn from(value: u64) -> Self { + Self::UInt(value) + } +} + +impl From for Value { + fn from(value: f64) -> Self { + Self::Float(value) + } +} + +impl From for Value { + fn from(value: u16) -> Self { + Self::UInt(value.into()) + } +} + +impl From for Value { + fn from(value: i32) -> Self { + Self::Int(value.into()) + } +} + +impl From for Value { + fn from(value: u32) -> Self { + Self::UInt(value.into()) + } +} + +impl From for Value { + fn from(value: usize) -> Self { + Self::UInt(value as u64) + } +} + +impl From for Value { + fn from(value: bool) -> Self { + Self::Bool(value) + } +} + +impl From for Value { + fn from(value: IpAddr) -> Self { + match value { + IpAddr::V4(ip) => Value::Ipv4(ip), + IpAddr::V6(ip) => Value::Ipv6(Box::new(ip)), + } + } +} + +impl From for Value { + fn from(value: Error) -> Self { + Self::Error(Box::new(value)) + } +} + +impl From for Value { + fn from(value: ErrorKind) -> Self { + Self::ErrorKind(value) + } +} + +impl From for Error { + fn from(value: Cause) -> Self { + Error::new(value) + } +} + +impl From for Value { + fn from(value: Protocol) -> Self { + Self::Protocol(value) + } +} + +impl From> for Value { + fn from(value: Vec) -> Self { + Self::Bytes(value) + } +} + +impl From<&[u8]> for Value { + fn from(value: &[u8]) -> Self { + Self::Bytes(value.to_vec()) + } +} + +impl From<&crate::Result> for Value +where + T: Debug, +{ + fn from(value: &crate::Result) -> Self { + match value { + Ok(value) => format!("{:?}", value).into(), + Err(err) => err.clone().into(), + } + } +} + +impl From> for Value +where + T: Into, +{ + fn from(value: Vec) -> Self { + Self::Array(value.into_iter().map(Into::into).collect()) + } +} + +impl From<&[T]> for Value +where + T: Into + Clone, +{ + fn from(value: &[T]) -> Self { + Self::Array(value.iter().map(|v| v.clone().into()).collect()) + } +} + +impl From for Error { + fn from(err: std::io::Error) -> Self { + Cause::Io + .ctx(Key::Reason, err.kind()) + .ctx(Key::Details, err.to_string()) + } +} + +impl From for Error { + fn from(err: serde_json::Error) -> Self { + Cause::Deserialize + .reason(err) + .details("JSON deserialization failed") + } +} + +impl From for Error { + fn from(err: base64::DecodeError) -> Self { + Cause::DataCorruption + .reason(err) + .details("Base64 decoding failed") + } +} + +impl From for Error { + fn from(err: reqwest::Error) -> Self { + Cause::Http + .into_err() + .ctx_opt(Key::Url, err.url().map(|url| url.as_ref().to_string())) + .ctx_opt(Key::Code, err.status().map(|status| status.as_u16())) + .reason(err) + } +} + +impl From for Error { + fn from(value: bincode::Error) -> Self { + Cause::Deserialize + .reason(value) + .details("Bincode deserialization failed") + } +} + +impl From for Error { + fn from(value: reqwest::header::ToStrError) -> Self { + Cause::Http + .reason(value) + .details("Failed to convert header to string") + } +} + +pub trait AssertSuccess +where + Self: Sized, +{ + fn assert_success(self) -> impl std::future::Future> + Send; +} + +impl AssertSuccess for reqwest::Response { + async fn assert_success(self) -> crate::Result { + let status = self.status(); + if status.is_success() { + Ok(self) + } else { + Err(Cause::Http + .ctx(Key::Code, status.as_u16()) + .ctx_opt(Key::Reason, self.text().await.ok())) + } + } +} diff --git a/crates/trc/src/imple.rs b/crates/trc/src/imple.rs new file mode 100644 index 00000000..91ba3871 --- /dev/null +++ b/crates/trc/src/imple.rs @@ -0,0 +1,211 @@ +/* + * SPDX-FileCopyrightText: 2020 Stalwart Labs Ltd + * + * SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-SEL + */ + +use std::{borrow::Cow, fmt::Display}; + +use crate::*; + +impl Context +where + [(Key, Value); N]: Default, + T: Eq, +{ + pub fn new(inner: T) -> Self { + Self { + inner, + keys: Default::default(), + keys_size: 0, + } + } + + #[inline(always)] + pub fn ctx(mut self, key: Key, value: impl Into) -> Self { + if self.keys_size < N { + self.keys[self.keys_size] = (key, value.into()); + self.keys_size += 1; + } else { + #[cfg(debug_assertions)] + panic!( + "Context is full while inserting {:?}: {:?}", + key, + value.into() + ); + } + self + } + + pub fn ctx_opt(self, key: Key, value: Option>) -> Self { + match value { + Some(value) => self.ctx(key, value), + None => self, + } + } + + #[inline(always)] + pub fn matches(&self, inner: T) -> bool { + self.inner == inner + } + + pub fn value(&self, key: Key) -> Option<&Value> { + self.keys.iter().take(self.keys_size).find_map( + |(k, v)| { + if *k == key { + Some(v) + } else { + None + } + }, + ) + } + + pub fn take_value(&mut self, key: Key) -> Option { + self.keys + .iter_mut() + .take(self.keys_size) + .find_map(|(k, v)| { + if *k == key { + Some(std::mem::take(v)) + } else { + None + } + }) + } + + #[inline(always)] + pub fn caused_by(self, error: impl Into) -> Self { + self.ctx(Key::CausedBy, error) + } + + #[inline(always)] + pub fn details(self, error: impl Into) -> Self { + self.ctx(Key::Details, error) + } + + #[inline(always)] + pub fn reason(self, error: impl Display) -> Self { + self.ctx(Key::Reason, error.to_string()) + } + + #[inline(always)] + pub fn protocol(self, protocol: Protocol) -> Self { + self.ctx(Key::Protocol, protocol) + } + + #[inline(always)] + pub fn document_id(self, id: u32) -> Self { + self.ctx(Key::DocumentId, id) + } + + #[inline(always)] + pub fn account_id(self, id: u32) -> Self { + self.ctx(Key::AccountId, id) + } + + #[inline(always)] + pub fn collection(self, id: impl Into) -> Self { + self.ctx(Key::Code, id.into() as u64) + } + + #[inline(always)] + pub fn property(self, id: impl Into) -> Self { + self.ctx(Key::Property, id.into() as u64) + } + + pub fn corrupted_key(key: &[u8], value: Option<&[u8]>, caused_by: &'static str) -> Error { + Cause::DataCorruption + .ctx(Key::Key, key) + .ctx_opt(Key::Value, value) + .ctx(Key::CausedBy, caused_by) + } +} + +impl Cause { + #[inline(always)] + pub fn ctx(self, key: Key, value: impl Into) -> Error { + Error::new(self).ctx(key, value) + } + + #[inline(always)] + pub fn caused_by(self, error: impl Into) -> Error { + Error::new(self).caused_by(error) + } + + #[inline(always)] + pub fn reason(self, error: impl Display) -> Error { + Error::new(self).reason(error) + } + + #[inline(always)] + pub fn into_err(self) -> Error { + Error::new(self) + } +} + +impl Error { + #[inline(always)] + pub fn wrap(self, cause: Cause) -> Self { + Error::new(cause).caused_by(self) + } +} + +impl Value { + pub fn to_uint(&self) -> Option { + match self { + Self::UInt(value) => Some(*value), + Self::Int(value) => Some(*value as u64), + _ => None, + } + } + + pub fn as_str(&self) -> Option<&str> { + match self { + Self::String(value) => Some(value.as_str()), + Self::Static(value) => Some(value), + _ => None, + } + } + + pub fn into_string(self) -> Option> { + match self { + Self::String(value) => Some(Cow::Owned(value)), + Self::Static(value) => Some(Cow::Borrowed(value)), + _ => None, + } + } +} + +impl AddContext for Result { + #[inline(always)] + fn caused_by(self, location: &'static str) -> Result { + match self { + Ok(value) => Ok(value), + Err(err) => Err(err.ctx(Key::CausedBy, location)), + } + } + + #[inline(always)] + fn add_context(self, f: F) -> Result + where + F: FnOnce(Error) -> Error, + { + match self { + Ok(value) => Ok(value), + Err(err) => Err(f(err)), + } + } +} + +impl Display for Context { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{:?}", self.inner)?; + for (key, value) in self.keys.iter().take(self.keys_size) { + write!(f, "\n {:?} = {:?}", key, value)?; + } + Ok(()) + } +} + +impl std::error::Error for Error {} diff --git a/crates/trc/src/lib.rs b/crates/trc/src/lib.rs new file mode 100644 index 00000000..222051c8 --- /dev/null +++ b/crates/trc/src/lib.rs @@ -0,0 +1,144 @@ +/* + * SPDX-FileCopyrightText: 2020 Stalwart Labs Ltd + * + * SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-SEL + */ + +pub mod conv; +pub mod imple; +pub mod macros; + +use std::{ + io::ErrorKind, + net::{IpAddr, Ipv4Addr, Ipv6Addr}, +}; + +pub type Result = std::result::Result; +pub type Error = Context; +pub type Trace = Context; + +const ERROR_CONTEXT_SIZE: usize = 5; +const TRACE_CONTEXT_SIZE: usize = 10; + +#[derive(Debug, Default, Clone)] +pub enum Value { + Static(&'static str), + String(String), + UInt(u64), + Int(i64), + Float(f64), + Bytes(Vec), + Bool(bool), + Ipv4(Ipv4Addr), + Ipv6(Box), + Protocol(Protocol), + Error(Box), + ErrorKind(ErrorKind), + Array(Vec), + #[default] + None, +} + +#[derive(Debug, Default, Clone, Copy, PartialEq, Eq)] +pub enum Key { + RemoteIp, + #[default] + CausedBy, + Reason, + Details, + Query, + Result, + Parameters, + Type, + Id, + Code, + Key, + Value, + Size, + Status, + Protocol, + Property, + Path, + Url, + DocumentId, + Collection, + AccountId, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum Event { + NewConnection, + Error(Cause), + SqlQuery, + LdapQuery, + PurgeTaskStarted, + PurgeTaskRunning, + PurgeTaskFinished, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum Cause { + FoundationDB, + MySQL, + PostgreSQL, + RocksDB, + SQLite, + ElasticSearch, + Redis, + S3, + Io, + Imap, + Smtp, + Ldap, + BlobMissingMarker, + Unknown, + Purge, + AssertValue, + Timeout, + Thread, + Pool, + DataCorruption, + Decompress, + Deserialize, + NotConfigured, + Unsupported, + Unexpected, + MissingParameter, + Invalid, + AlreadyExists, + NotFound, + Configuration, + Fetch, + Acme, + Http, + Crypto, + Dns, + Authentication, + Jmap, + OverQuota, + Ingest, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum Protocol { + Jmap, + Imap, + Smtp, + ManageSieve, + Ldap, + Sql, +} + +#[derive(Debug, Clone)] +pub struct Context { + inner: T, + keys: [(Key, Value); N], + keys_size: usize, +} + +pub trait AddContext { + fn caused_by(self, location: &'static str) -> Result; + fn add_context(self, f: F) -> Result + where + F: FnOnce(Error) -> Error; +} diff --git a/crates/trc/src/macros.rs b/crates/trc/src/macros.rs new file mode 100644 index 00000000..3b668548 --- /dev/null +++ b/crates/trc/src/macros.rs @@ -0,0 +1,46 @@ +/* + * SPDX-FileCopyrightText: 2020 Stalwart Labs Ltd + * + * SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-SEL + */ + +#[macro_export] +macro_rules! trace { + ($event:ident $(, $key:ident = $value:expr)* $(,)?) => { + { + let event = $crate::Trace::new($crate::Event::$event) + $( + .ctx($crate::Key::$key, $crate::Value::from($value)) + )* ; + + eprintln!("{}", event); + } + }; + } + +#[macro_export] +macro_rules! error { + ($cause:ident $(, $key:ident = $value:expr)* $(,)?) => {{ + let event = $crate::Trace::new($crate::Event::Error($crate::Cause::$cause)) + .ctx($crate::Key::CausedBy, $crate::location!()) + $( + .ctx($crate::Key::$key, $crate::Value::from($value)) + )* ; + + eprintln!("{}", event); + }}; +} + +#[macro_export] +macro_rules! location { + () => {{ + concat!(file!(), ":", line!(), " (", module_path!(), ")") + }}; +} + +#[macro_export] +macro_rules! bail { + ($err:expr $(,)?) => { + return Err($err); + }; +} diff --git a/crates/utils/Cargo.toml b/crates/utils/Cargo.toml index 9a7bf19a..4cf0e1d6 100644 --- a/crates/utils/Cargo.toml +++ b/crates/utils/Cargo.toml @@ -5,6 +5,7 @@ edition = "2021" resolver = "2" [dependencies] +trc = { path = "../trc" } rustls = { version = "0.23.5", default-features = false, features = ["std", "ring", "tls12"] } rustls-pemfile = "2.0" rustls-pki-types = { version = "1" } diff --git a/crates/utils/src/config/mod.rs b/crates/utils/src/config/mod.rs index 5a215527..356cdbd7 100644 --- a/crates/utils/src/config/mod.rs +++ b/crates/utils/src/config/mod.rs @@ -32,6 +32,7 @@ pub enum ConfigWarning { AppliedDefault { default: String }, Unread { value: String }, Build { error: String }, + Parse { error: String }, } #[derive(Debug, Clone, PartialEq, Eq, Serialize)] @@ -54,7 +55,7 @@ pub struct Rate { pub period: Duration, } -pub type Result = std::result::Result; +pub(crate) type Result = std::result::Result; impl Config { pub async fn resolve_macros(&mut self, classes: &[&str]) { @@ -206,6 +207,9 @@ impl Config { ConfigWarning::Unread { value } => { format!("WARNING: Unused setting {key:?} with value {value:?}") } + ConfigWarning::Parse { error } => { + format!("WARNING: Failed to parse {key:?}: {error}") + } ConfigWarning::Build { error } => format!("WARNING for {key:?}: {error}"), }; if !use_stderr { diff --git a/crates/utils/src/config/utils.rs b/crates/utils/src/config/utils.rs index 125dae58..35d9aa2d 100644 --- a/crates/utils/src/config/utils.rs +++ b/crates/utils/src/config/utils.rs @@ -294,6 +294,15 @@ impl Config { ); } + pub fn new_parse_warning(&mut self, key: impl AsKey, details: impl Into) { + self.warnings.insert( + key.as_key(), + ConfigWarning::Parse { + error: details.into(), + }, + ); + } + pub fn new_build_warning(&mut self, key: impl AsKey, details: impl Into) { self.warnings.insert( key.as_key(), diff --git a/tests/Cargo.toml b/tests/Cargo.toml index e15a3502..47e52e76 100644 --- a/tests/Cargo.toml +++ b/tests/Cargo.toml @@ -5,7 +5,7 @@ edition = "2021" resolver = "2" [features] -default = ["sqlite", "postgres", "mysql", "rocks", "elastic", "s3", "redis"] +default = ["sqlite", "postgres", "mysql", "rocks", "elastic", "s3", "redis", "foundationdb"] #default = ["sqlite", "postgres", "mysql", "rocks", "elastic", "s3", "redis", "foundationdb"] sqlite = ["store/sqlite"] foundationdb = ["store/foundation"]