mirror of
https://github.com/stalwartlabs/mail-server.git
synced 2024-09-20 07:16:18 +08:00
Settings hot reloading - Part 1
This commit is contained in:
parent
7e1a95c1ee
commit
333a0d5a1b
325
Cargo.lock
generated
325
Cargo.lock
generated
|
@ -483,6 +483,12 @@ version = "0.21.7"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567"
|
||||
|
||||
[[package]]
|
||||
name = "base64"
|
||||
version = "0.22.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9475866fec1451be56a3c2400fd081ff546538961565ccb5b7142cbd22bc7a51"
|
||||
|
||||
[[package]]
|
||||
name = "base64ct"
|
||||
version = "1.6.0"
|
||||
|
@ -511,29 +517,6 @@ dependencies = [
|
|||
"serde",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "bindgen"
|
||||
version = "0.65.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cfdf7b466f9a4903edc73f95d6d2bcd5baf8ae620638762244d3f60143643cc5"
|
||||
dependencies = [
|
||||
"bitflags 1.3.2",
|
||||
"cexpr",
|
||||
"clang-sys",
|
||||
"lazy_static",
|
||||
"lazycell",
|
||||
"log",
|
||||
"peeking_take_while",
|
||||
"prettyplease",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"regex",
|
||||
"rustc-hash",
|
||||
"shlex",
|
||||
"syn 2.0.52",
|
||||
"which",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "bindgen"
|
||||
version = "0.69.4"
|
||||
|
@ -546,12 +529,15 @@ dependencies = [
|
|||
"itertools 0.12.1",
|
||||
"lazy_static",
|
||||
"lazycell",
|
||||
"log",
|
||||
"prettyplease",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"regex",
|
||||
"rustc-hash",
|
||||
"shlex",
|
||||
"syn 2.0.52",
|
||||
"which",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -690,7 +676,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||
checksum = "7aadb5b6ccbd078890f6d7003694e33816e6b784358f18e15e7e6d9f065a57cd"
|
||||
dependencies = [
|
||||
"once_cell",
|
||||
"proc-macro-crate 3.1.0",
|
||||
"proc-macro-crate",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.52",
|
||||
|
@ -997,6 +983,42 @@ dependencies = [
|
|||
"tokio-util",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "common"
|
||||
version = "0.6.0"
|
||||
dependencies = [
|
||||
"ahash 0.8.11",
|
||||
"arc-swap",
|
||||
"base64 0.22.0",
|
||||
"chrono",
|
||||
"directory",
|
||||
"futures",
|
||||
"mail-auth",
|
||||
"mail-send",
|
||||
"nlp",
|
||||
"parking_lot",
|
||||
"pem",
|
||||
"privdrop",
|
||||
"proxy-header",
|
||||
"rcgen",
|
||||
"regex",
|
||||
"reqwest 0.12.0",
|
||||
"ring 0.17.8",
|
||||
"rustls 0.22.2",
|
||||
"rustls-pemfile 2.1.1",
|
||||
"rustls-pki-types",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"sieve-rs",
|
||||
"store",
|
||||
"tokio",
|
||||
"tokio-rustls 0.25.0",
|
||||
"tracing",
|
||||
"tracing-journald",
|
||||
"utils",
|
||||
"x509-parser 0.16.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "console"
|
||||
version = "0.15.8"
|
||||
|
@ -1676,7 +1698,7 @@ dependencies = [
|
|||
"dyn-clone",
|
||||
"lazy_static",
|
||||
"percent-encoding",
|
||||
"reqwest",
|
||||
"reqwest 0.11.26",
|
||||
"rustc_version 0.2.3",
|
||||
"serde",
|
||||
"serde_json",
|
||||
|
@ -1910,9 +1932,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "foundationdb"
|
||||
version = "0.8.0"
|
||||
version = "0.9.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8696fd1be198f101eb58aeecf0f504fc02b28c7afcc008b4e4a998a91b305108"
|
||||
checksum = "020bf4ae7238dbdb1ff01e9f981db028515cf66883c461e29faedfea130b2728"
|
||||
dependencies = [
|
||||
"async-recursion",
|
||||
"async-trait",
|
||||
|
@ -1931,18 +1953,18 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "foundationdb-gen"
|
||||
version = "0.8.0"
|
||||
version = "0.9.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "62239700f01b041b6372aaeb847c52f960e1a69fd2b1025dc995ea3dd90e3308"
|
||||
checksum = "36878d54a76a48e794d0fe89be2096ab5968b071e7ec25f7becfe7846f55fa77"
|
||||
dependencies = [
|
||||
"xml-rs",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "foundationdb-macros"
|
||||
version = "0.2.0"
|
||||
version = "0.3.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "83c8d52fe8b46ab822b4decdcc0d6d85aeedfc98f0d52ba2bd4aec4a97807516"
|
||||
checksum = "f8db6653cbc621a3810d95d55bd342be3e71181d6df21a4eb29ef986202d3f9c"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
|
@ -1952,11 +1974,12 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "foundationdb-sys"
|
||||
version = "0.8.0"
|
||||
version = "0.9.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "98e49545f5393d276b7b888c77e3f9519fd33727435f8244344be72c3284256f"
|
||||
checksum = "ace2f49db8614b7d7e3b656a12e0059b5fbd0a4da3410b1797374bec3db269fa"
|
||||
dependencies = [
|
||||
"bindgen 0.65.1",
|
||||
"bindgen",
|
||||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -2465,7 +2488,7 @@ dependencies = [
|
|||
"httpdate",
|
||||
"itoa",
|
||||
"pin-project-lite",
|
||||
"socket2 0.5.6",
|
||||
"socket2",
|
||||
"tokio",
|
||||
"tower-service",
|
||||
"tracing",
|
||||
|
@ -2490,6 +2513,7 @@ dependencies = [
|
|||
"pin-project-lite",
|
||||
"smallvec",
|
||||
"tokio",
|
||||
"want",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -2506,6 +2530,23 @@ dependencies = [
|
|||
"tokio-rustls 0.24.1",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "hyper-rustls"
|
||||
version = "0.26.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a0bea761b46ae2b24eb4aef630d8d1c398157b6fc29e6350ecf090a0b70c952c"
|
||||
dependencies = [
|
||||
"futures-util",
|
||||
"http 1.1.0",
|
||||
"hyper 1.2.0",
|
||||
"hyper-util",
|
||||
"rustls 0.22.2",
|
||||
"rustls-pki-types",
|
||||
"tokio",
|
||||
"tokio-rustls 0.25.0",
|
||||
"tower-service",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "hyper-timeout"
|
||||
version = "0.4.1"
|
||||
|
@ -2525,13 +2566,17 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||
checksum = "ca38ef113da30126bbff9cd1705f9273e15d45498615d138b0c20279ac7a76aa"
|
||||
dependencies = [
|
||||
"bytes",
|
||||
"futures-channel",
|
||||
"futures-util",
|
||||
"http 1.1.0",
|
||||
"http-body 1.0.0",
|
||||
"hyper 1.2.0",
|
||||
"pin-project-lite",
|
||||
"socket2 0.5.6",
|
||||
"socket2",
|
||||
"tokio",
|
||||
"tower",
|
||||
"tower-service",
|
||||
"tracing",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -2702,7 +2747,7 @@ version = "0.3.2"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b58db92f96b720de98181bbbe63c831e87005ab460c1bf306eb2622b4707997f"
|
||||
dependencies = [
|
||||
"socket2 0.5.6",
|
||||
"socket2",
|
||||
"widestring",
|
||||
"windows-sys 0.48.0",
|
||||
"winreg",
|
||||
|
@ -2828,7 +2873,7 @@ dependencies = [
|
|||
"rasn",
|
||||
"rasn-cms",
|
||||
"rasn-pkix",
|
||||
"reqwest",
|
||||
"reqwest 0.12.0",
|
||||
"rsa",
|
||||
"sequoia-openpgp",
|
||||
"serde",
|
||||
|
@ -2859,7 +2904,7 @@ dependencies = [
|
|||
"futures-util",
|
||||
"maybe-async",
|
||||
"parking_lot",
|
||||
"reqwest",
|
||||
"reqwest 0.11.26",
|
||||
"rustls 0.22.2",
|
||||
"rustls-pki-types",
|
||||
"serde",
|
||||
|
@ -3002,7 +3047,7 @@ dependencies = [
|
|||
"percent-encoding",
|
||||
"ring 0.16.20",
|
||||
"rustls 0.21.10",
|
||||
"rustls-native-certs",
|
||||
"rustls-native-certs 0.6.3",
|
||||
"thiserror",
|
||||
"tokio",
|
||||
"tokio-rustls 0.24.1",
|
||||
|
@ -3051,7 +3096,7 @@ version = "0.16.0+8.10.0"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ce3d60bc059831dc1c83903fb45c103f75db65c5a7bf22272764d9cc683e348c"
|
||||
dependencies = [
|
||||
"bindgen 0.69.4",
|
||||
"bindgen",
|
||||
"bzip2-sys",
|
||||
"cc",
|
||||
"glob",
|
||||
|
@ -3375,14 +3420,14 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "mysql-common-derive"
|
||||
version = "0.30.2"
|
||||
version = "0.31.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "56b0d8a0db9bf6d2213e11f2c701cb91387b0614361625ab7b9743b41aa4938f"
|
||||
checksum = "c60492b5eb751e55b42d716b6b26dceb66767996cd7a5560a842fbf613ca2e92"
|
||||
dependencies = [
|
||||
"darling 0.20.8",
|
||||
"heck",
|
||||
"num-bigint",
|
||||
"proc-macro-crate 1.3.1",
|
||||
"proc-macro-crate",
|
||||
"proc-macro-error",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
|
@ -3393,9 +3438,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "mysql_async"
|
||||
version = "0.33.0"
|
||||
version = "0.34.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6750b17ce50f8f112ef1a8394121090d47c596b56a6a17569ca680a9626e2ef2"
|
||||
checksum = "fbfe87d7e35cb72363326216cc1712b865d8d4f70abf3b2d2e6b251fb6b2f427"
|
||||
dependencies = [
|
||||
"bytes",
|
||||
"crossbeam",
|
||||
|
@ -3413,30 +3458,30 @@ dependencies = [
|
|||
"percent-encoding",
|
||||
"pin-project",
|
||||
"rand",
|
||||
"rustls 0.21.10",
|
||||
"rustls-pemfile 1.0.4",
|
||||
"rustls 0.22.2",
|
||||
"rustls-pemfile 2.1.1",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"socket2 0.5.6",
|
||||
"socket2",
|
||||
"thiserror",
|
||||
"tokio",
|
||||
"tokio-rustls 0.24.1",
|
||||
"tokio-rustls 0.25.0",
|
||||
"tokio-util",
|
||||
"twox-hash",
|
||||
"url",
|
||||
"webpki",
|
||||
"webpki-roots 0.25.4",
|
||||
"webpki-roots 0.26.1",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "mysql_common"
|
||||
version = "0.31.0"
|
||||
version = "0.32.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "06f19e4cfa0ab5a76b627cec2d81331c49b034988eaf302c3bafeada684eadef"
|
||||
checksum = "8a60cb978c0a1d654edcc1460f8d6092dacf21346ed6017d81fb76a23ef5a8de"
|
||||
dependencies = [
|
||||
"base64 0.21.7",
|
||||
"bigdecimal",
|
||||
"bindgen 0.69.4",
|
||||
"bindgen",
|
||||
"bitflags 2.4.2",
|
||||
"bitvec",
|
||||
"btoi",
|
||||
|
@ -3464,7 +3509,7 @@ dependencies = [
|
|||
"thiserror",
|
||||
"time",
|
||||
"uuid",
|
||||
"zstd 0.12.4",
|
||||
"zstd 0.13.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -3734,7 +3779,7 @@ dependencies = [
|
|||
"bytes",
|
||||
"http 0.2.12",
|
||||
"opentelemetry",
|
||||
"reqwest",
|
||||
"reqwest 0.11.26",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -3752,7 +3797,7 @@ dependencies = [
|
|||
"opentelemetry-semantic-conventions",
|
||||
"opentelemetry_sdk",
|
||||
"prost",
|
||||
"reqwest",
|
||||
"reqwest 0.11.26",
|
||||
"thiserror",
|
||||
"tokio",
|
||||
"tonic",
|
||||
|
@ -3913,12 +3958,6 @@ dependencies = [
|
|||
"sha2 0.10.8",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "peeking_take_while"
|
||||
version = "0.1.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "19b17cddbe7ec3f8bc800887bab5e717348c95ea2ca0b1bf0837fb964dc67099"
|
||||
|
||||
[[package]]
|
||||
name = "pem"
|
||||
version = "3.0.3"
|
||||
|
@ -4188,23 +4227,13 @@ dependencies = [
|
|||
"nix",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "proc-macro-crate"
|
||||
version = "1.3.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7f4c021e1093a56626774e81216a4ce732a735e5bad4868a03f3ed65ca0c3919"
|
||||
dependencies = [
|
||||
"once_cell",
|
||||
"toml_edit 0.19.15",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "proc-macro-crate"
|
||||
version = "3.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6d37c51ca738a55da99dc0c4a34860fd675453b8b36209178c2249bb13651284"
|
||||
dependencies = [
|
||||
"toml_edit 0.21.1",
|
||||
"toml_edit",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -4479,9 +4508,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "redis"
|
||||
version = "0.24.0"
|
||||
version = "0.25.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c580d9cbbe1d1b479e8d67cf9daf6a62c957e6846048408b80b43ac3f6af84cd"
|
||||
checksum = "71d64e978fd98a0e6b105d066ba4889a7301fca65aeac850a877d8797343feeb"
|
||||
dependencies = [
|
||||
"async-trait",
|
||||
"bytes",
|
||||
|
@ -4494,18 +4523,18 @@ dependencies = [
|
|||
"percent-encoding",
|
||||
"pin-project-lite",
|
||||
"rand",
|
||||
"rustls 0.21.10",
|
||||
"rustls-native-certs",
|
||||
"rustls-pemfile 1.0.4",
|
||||
"rustls-webpki 0.101.7",
|
||||
"rustls 0.22.2",
|
||||
"rustls-native-certs 0.7.0",
|
||||
"rustls-pemfile 2.1.1",
|
||||
"rustls-pki-types",
|
||||
"ryu",
|
||||
"sha1_smol",
|
||||
"socket2 0.4.10",
|
||||
"socket2",
|
||||
"tokio",
|
||||
"tokio-rustls 0.24.1",
|
||||
"tokio-rustls 0.25.0",
|
||||
"tokio-util",
|
||||
"url",
|
||||
"webpki-roots 0.23.1",
|
||||
"webpki-roots 0.26.1",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -4597,12 +4626,11 @@ dependencies = [
|
|||
"http 0.2.12",
|
||||
"http-body 0.4.6",
|
||||
"hyper 0.14.28",
|
||||
"hyper-rustls",
|
||||
"hyper-rustls 0.24.2",
|
||||
"ipnet",
|
||||
"js-sys",
|
||||
"log",
|
||||
"mime",
|
||||
"mime_guess",
|
||||
"once_cell",
|
||||
"percent-encoding",
|
||||
"pin-project-lite",
|
||||
|
@ -4626,6 +4654,49 @@ dependencies = [
|
|||
"winreg",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "reqwest"
|
||||
version = "0.12.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "58b48d98d932f4ee75e541614d32a7f44c889b72bd9c2e04d95edd135989df88"
|
||||
dependencies = [
|
||||
"base64 0.21.7",
|
||||
"bytes",
|
||||
"futures-channel",
|
||||
"futures-core",
|
||||
"futures-util",
|
||||
"http 1.1.0",
|
||||
"http-body 1.0.0",
|
||||
"http-body-util",
|
||||
"hyper 1.2.0",
|
||||
"hyper-rustls 0.26.0",
|
||||
"hyper-util",
|
||||
"ipnet",
|
||||
"js-sys",
|
||||
"log",
|
||||
"mime",
|
||||
"mime_guess",
|
||||
"once_cell",
|
||||
"percent-encoding",
|
||||
"pin-project-lite",
|
||||
"rustls 0.22.2",
|
||||
"rustls-pemfile 1.0.4",
|
||||
"rustls-pki-types",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"serde_urlencoded",
|
||||
"sync_wrapper",
|
||||
"tokio",
|
||||
"tokio-rustls 0.25.0",
|
||||
"tower-service",
|
||||
"url",
|
||||
"wasm-bindgen",
|
||||
"wasm-bindgen-futures",
|
||||
"web-sys",
|
||||
"webpki-roots 0.26.1",
|
||||
"winreg",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "resolv-conf"
|
||||
version = "0.7.0"
|
||||
|
@ -4820,7 +4891,7 @@ dependencies = [
|
|||
"md5",
|
||||
"percent-encoding",
|
||||
"quick-xml 0.26.0",
|
||||
"reqwest",
|
||||
"reqwest 0.11.26",
|
||||
"serde",
|
||||
"serde_derive",
|
||||
"sha2 0.10.8",
|
||||
|
@ -4959,6 +5030,19 @@ dependencies = [
|
|||
"security-framework",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rustls-native-certs"
|
||||
version = "0.7.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8f1fb85efa936c42c6d5fc28d2629bb51e4b2f4b8a5211e297d599cc5a093792"
|
||||
dependencies = [
|
||||
"openssl-probe",
|
||||
"rustls-pemfile 2.1.1",
|
||||
"rustls-pki-types",
|
||||
"schannel",
|
||||
"security-framework",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rustls-pemfile"
|
||||
version = "1.0.4"
|
||||
|
@ -4984,16 +5068,6 @@ version = "1.3.1"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5ede67b28608b4c60685c7d54122d4400d90f62b40caee7700e700380a390fa8"
|
||||
|
||||
[[package]]
|
||||
name = "rustls-webpki"
|
||||
version = "0.100.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5f6a5fc258f1c1276dfe3016516945546e2d5383911efc0fc4f1cdc5df3a4ae3"
|
||||
dependencies = [
|
||||
"ring 0.16.20",
|
||||
"untrusted 0.7.1",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rustls-webpki"
|
||||
version = "0.101.7"
|
||||
|
@ -5497,7 +5571,7 @@ dependencies = [
|
|||
"rand",
|
||||
"rayon",
|
||||
"regex",
|
||||
"reqwest",
|
||||
"reqwest 0.12.0",
|
||||
"rustls 0.22.2",
|
||||
"rustls-pemfile 2.1.1",
|
||||
"rustls-pki-types",
|
||||
|
@ -5550,16 +5624,6 @@ dependencies = [
|
|||
"syn 1.0.109",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "socket2"
|
||||
version = "0.4.10"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9f7916fc008ca5542385b89a3d3ce689953c143e9304a9bf8beec1de48994c0d"
|
||||
dependencies = [
|
||||
"libc",
|
||||
"winapi",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "socket2"
|
||||
version = "0.5.6"
|
||||
|
@ -5610,7 +5674,7 @@ dependencies = [
|
|||
"prettytable-rs",
|
||||
"pwhash",
|
||||
"rand",
|
||||
"reqwest",
|
||||
"reqwest 0.12.0",
|
||||
"rpassword",
|
||||
"serde",
|
||||
"serde_json",
|
||||
|
@ -5631,7 +5695,7 @@ dependencies = [
|
|||
"pwhash",
|
||||
"rand",
|
||||
"rcgen",
|
||||
"reqwest",
|
||||
"reqwest 0.12.0",
|
||||
"rpassword",
|
||||
"tar",
|
||||
"zip-extract",
|
||||
|
@ -5645,7 +5709,7 @@ checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f"
|
|||
|
||||
[[package]]
|
||||
name = "store"
|
||||
version = "0.1.0"
|
||||
version = "0.6.0"
|
||||
dependencies = [
|
||||
"ahash 0.8.11",
|
||||
"arc-swap",
|
||||
|
@ -5672,7 +5736,6 @@ dependencies = [
|
|||
"rayon",
|
||||
"redis",
|
||||
"regex",
|
||||
"reqwest",
|
||||
"ring 0.17.8",
|
||||
"roaring",
|
||||
"rocksdb",
|
||||
|
@ -5906,7 +5969,7 @@ dependencies = [
|
|||
"nlp",
|
||||
"num_cpus",
|
||||
"rayon",
|
||||
"reqwest",
|
||||
"reqwest 0.12.0",
|
||||
"rustls 0.22.2",
|
||||
"rustls-pemfile 2.1.1",
|
||||
"rustls-pki-types",
|
||||
|
@ -6033,7 +6096,7 @@ dependencies = [
|
|||
"parking_lot",
|
||||
"pin-project-lite",
|
||||
"signal-hook-registry",
|
||||
"socket2 0.5.6",
|
||||
"socket2",
|
||||
"tokio-macros",
|
||||
"windows-sys 0.48.0",
|
||||
]
|
||||
|
@ -6079,7 +6142,7 @@ dependencies = [
|
|||
"postgres-protocol",
|
||||
"postgres-types",
|
||||
"rand",
|
||||
"socket2 0.5.6",
|
||||
"socket2",
|
||||
"tokio",
|
||||
"tokio-util",
|
||||
"whoami",
|
||||
|
@ -6153,17 +6216,6 @@ version = "0.6.5"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3550f4e9685620ac18a50ed434eb3aec30db8ba93b0287467bca5826ea25baf1"
|
||||
|
||||
[[package]]
|
||||
name = "toml_edit"
|
||||
version = "0.19.15"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1b5bb770da30e5cbfde35a2d7b9b8a2c4b8ef89548a7a6aeab5c9a576e3e7421"
|
||||
dependencies = [
|
||||
"indexmap 2.2.5",
|
||||
"toml_datetime",
|
||||
"winnow",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "toml_edit"
|
||||
version = "0.21.1"
|
||||
|
@ -6241,6 +6293,7 @@ version = "0.1.40"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c3523ab5a71916ccf420eebdf5521fcef02141234bbc0b8a49f2fdc4544364ef"
|
||||
dependencies = [
|
||||
"log",
|
||||
"pin-project-lite",
|
||||
"tracing-attributes",
|
||||
"tracing-core",
|
||||
|
@ -6547,7 +6600,7 @@ dependencies = [
|
|||
"rand",
|
||||
"rcgen",
|
||||
"regex",
|
||||
"reqwest",
|
||||
"reqwest 0.12.0",
|
||||
"ring 0.17.8",
|
||||
"rustls 0.22.2",
|
||||
"rustls-pemfile 2.1.1",
|
||||
|
@ -6748,15 +6801,6 @@ dependencies = [
|
|||
"webpki",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "webpki-roots"
|
||||
version = "0.23.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b03058f88386e5ff5310d9111d53f48b17d732b401aeb83a8d5190f2ac459338"
|
||||
dependencies = [
|
||||
"rustls-webpki 0.100.3",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "webpki-roots"
|
||||
version = "0.25.4"
|
||||
|
@ -7170,11 +7214,11 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "zstd"
|
||||
version = "0.12.4"
|
||||
version = "0.13.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1a27595e173641171fc74a1232b7b1c7a7cb6e18222c11e9dfb9888fa424c53c"
|
||||
checksum = "bffb3309596d527cfcba7dfc6ed6052f1d39dfbd7c867aa2e865e4a449c10110"
|
||||
dependencies = [
|
||||
"zstd-safe 6.0.6",
|
||||
"zstd-safe 7.0.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -7189,11 +7233,10 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "zstd-safe"
|
||||
version = "6.0.6"
|
||||
version = "7.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ee98ffd0b48ee95e6c5168188e44a54550b1564d9d530ee21d5f0eaed1069581"
|
||||
checksum = "43747c7422e2924c11144d5229878b98180ef8b06cca4ab5af37afc8a8d8ea3e"
|
||||
dependencies = [
|
||||
"libc",
|
||||
"zstd-sys",
|
||||
]
|
||||
|
||||
|
|
|
@ -12,6 +12,7 @@ members = [
|
|||
"crates/store",
|
||||
"crates/directory",
|
||||
"crates/utils",
|
||||
"crates/common",
|
||||
"crates/cli",
|
||||
"crates/install",
|
||||
"tests",
|
||||
|
|
|
@ -13,7 +13,7 @@ resolver = "2"
|
|||
[dependencies]
|
||||
jmap-client = { version = "0.3", features = ["async"] }
|
||||
mail-parser = { version = "0.9", features = ["full_encoding", "serde_support", "ludicrous_mode"] }
|
||||
reqwest = { version = "0.11", default-features = false, features = ["rustls-tls-webpki-roots"]}
|
||||
reqwest = { version = "0.12", default-features = false, features = ["rustls-tls-webpki-roots"]}
|
||||
tokio = { version = "1.23", features = ["full"] }
|
||||
num_cpus = "1.13.1"
|
||||
clap = { version = "4.1.6", features = ["derive"] }
|
||||
|
|
39
crates/common/Cargo.toml
Normal file
39
crates/common/Cargo.toml
Normal file
|
@ -0,0 +1,39 @@
|
|||
[package]
|
||||
name = "common"
|
||||
version = "0.6.0"
|
||||
edition = "2021"
|
||||
resolver = "2"
|
||||
|
||||
[dependencies]
|
||||
utils = { path = "../utils" }
|
||||
nlp = { path = "../nlp" }
|
||||
store = { path = "../store" }
|
||||
directory = { path = "../directory" }
|
||||
sieve-rs = { version = "0.4" }
|
||||
mail-auth = { version = "0.3" }
|
||||
mail-send = { version = "0.4", default-features = false, features = ["cram-md5"] }
|
||||
ahash = { version = "0.8.0", features = ["serde"] }
|
||||
parking_lot = "0.12.1"
|
||||
regex = "1.7.0"
|
||||
tracing = "0.1"
|
||||
proxy-header = { version = "0.1.0", features = ["tokio"] }
|
||||
arc-swap = "1.6.0"
|
||||
rustls = { version = "0.22", default-features = false, features = ["tls12"]}
|
||||
rustls-pemfile = "2.0"
|
||||
rustls-pki-types = { version = "1" }
|
||||
ring = { version = "0.17" }
|
||||
tokio = { version = "1.23", features = ["net", "macros"] }
|
||||
tokio-rustls = { version = "0.25.0"}
|
||||
futures = "0.3"
|
||||
rcgen = "0.12"
|
||||
reqwest = { version = "0.12", default-features = false, features = ["rustls-tls-webpki-roots"]}
|
||||
serde = { version = "1.0", features = ["derive"]}
|
||||
serde_json = "1.0"
|
||||
base64 = "0.22"
|
||||
x509-parser = "0.16.0"
|
||||
pem = "3.0"
|
||||
chrono = "0.4"
|
||||
|
||||
[target.'cfg(unix)'.dependencies]
|
||||
privdrop = "0.5.3"
|
||||
tracing-journald = "0.3"
|
201
crates/common/src/addresses.rs
Normal file
201
crates/common/src/addresses.rs
Normal file
|
@ -0,0 +1,201 @@
|
|||
use std::borrow::Cow;
|
||||
|
||||
use directory::Directory;
|
||||
use utils::config::{utils::AsKey, Config};
|
||||
|
||||
use crate::{
|
||||
config::smtp::session::AddressMapping,
|
||||
expr::{functions::ResolveVariable, if_block::IfBlock, tokenizer::TokenMap, Variable},
|
||||
Core,
|
||||
};
|
||||
|
||||
impl Core {
|
||||
pub async fn email_to_ids(
|
||||
&self,
|
||||
directory: &Directory,
|
||||
email: &str,
|
||||
) -> directory::Result<Vec<u32>> {
|
||||
let todo = "update functions using this method.";
|
||||
let mut address = self
|
||||
.smtp
|
||||
.session
|
||||
.rcpt
|
||||
.subaddressing
|
||||
.to_subaddress(self, email)
|
||||
.await;
|
||||
|
||||
for _ in 0..2 {
|
||||
let result = directory.email_to_ids(address.as_ref()).await?;
|
||||
|
||||
if !result.is_empty() {
|
||||
return Ok(result);
|
||||
} else if let Some(catch_all) = self
|
||||
.smtp
|
||||
.session
|
||||
.rcpt
|
||||
.catch_all
|
||||
.to_catch_all(self, email)
|
||||
.await
|
||||
{
|
||||
address = catch_all;
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(vec![])
|
||||
}
|
||||
|
||||
pub async fn rcpt(&self, directory: &Directory, email: &str) -> directory::Result<bool> {
|
||||
// Expand subaddress
|
||||
let mut address = self
|
||||
.smtp
|
||||
.session
|
||||
.rcpt
|
||||
.subaddressing
|
||||
.to_subaddress(self, email)
|
||||
.await;
|
||||
|
||||
for _ in 0..2 {
|
||||
if directory.rcpt(address.as_ref()).await? {
|
||||
return Ok(true);
|
||||
} else if let Some(catch_all) = self
|
||||
.smtp
|
||||
.session
|
||||
.rcpt
|
||||
.catch_all
|
||||
.to_catch_all(self, email)
|
||||
.await
|
||||
{
|
||||
address = catch_all;
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(false)
|
||||
}
|
||||
|
||||
pub async fn vrfy(
|
||||
&self,
|
||||
directory: &Directory,
|
||||
address: &str,
|
||||
) -> directory::Result<Vec<String>> {
|
||||
directory
|
||||
.vrfy(
|
||||
self.smtp
|
||||
.session
|
||||
.rcpt
|
||||
.subaddressing
|
||||
.to_subaddress(self, address)
|
||||
.await
|
||||
.as_ref(),
|
||||
)
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn expn(
|
||||
&self,
|
||||
directory: &Directory,
|
||||
address: &str,
|
||||
) -> directory::Result<Vec<String>> {
|
||||
directory
|
||||
.expn(
|
||||
self.smtp
|
||||
.session
|
||||
.rcpt
|
||||
.subaddressing
|
||||
.to_subaddress(self, address)
|
||||
.await
|
||||
.as_ref(),
|
||||
)
|
||||
.await
|
||||
}
|
||||
}
|
||||
|
||||
impl AddressMapping {
|
||||
pub fn try_parse(config: &mut Config, key: impl AsKey) -> Self {
|
||||
let key = key.as_key();
|
||||
if let Some(value) = config.value(key.as_str()) {
|
||||
match value {
|
||||
"true" => AddressMapping::Enable,
|
||||
"false" => AddressMapping::Disable,
|
||||
_ => {
|
||||
config.new_parse_error(
|
||||
key,
|
||||
format!("Invalid value for address mapping {value:?}",),
|
||||
);
|
||||
AddressMapping::Disable
|
||||
}
|
||||
}
|
||||
} else if let Some(if_block) = IfBlock::try_parse(
|
||||
config,
|
||||
key,
|
||||
&TokenMap::default().with_variables([("address", 1), ("email", 1), ("rcpt", 1)]),
|
||||
) {
|
||||
AddressMapping::Custom(if_block)
|
||||
} else {
|
||||
AddressMapping::Disable
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct Address<'x>(&'x str);
|
||||
|
||||
impl<'x> ResolveVariable<'x> for Address<'x> {
|
||||
fn resolve_variable(&self, _: u32) -> crate::expr::Variable<'x> {
|
||||
Variable::from(self.0)
|
||||
}
|
||||
}
|
||||
|
||||
impl AddressMapping {
|
||||
async fn to_subaddress<'x, 'y: 'x>(&'x self, core: &Core, address: &'y str) -> Cow<'x, str> {
|
||||
match self {
|
||||
AddressMapping::Enable => {
|
||||
if let Some((local_part, domain_part)) = address.rsplit_once('@') {
|
||||
if let Some((local_part, _)) = local_part.split_once('+') {
|
||||
return format!("{}@{}", local_part, domain_part).into();
|
||||
}
|
||||
}
|
||||
}
|
||||
AddressMapping::Custom(if_block) => {
|
||||
if let Ok(result) = String::try_from(
|
||||
if_block
|
||||
.eval(&Address(address), core, "session.rcpt.sub-addressing")
|
||||
.await,
|
||||
) {
|
||||
return result.into();
|
||||
}
|
||||
}
|
||||
AddressMapping::Disable => (),
|
||||
}
|
||||
|
||||
address.into()
|
||||
}
|
||||
|
||||
async fn to_catch_all<'x, 'y: 'x>(
|
||||
&'x self,
|
||||
core: &Core,
|
||||
address: &'y str,
|
||||
) -> Option<Cow<'x, str>> {
|
||||
match self {
|
||||
AddressMapping::Enable => address
|
||||
.rsplit_once('@')
|
||||
.map(|(_, domain_part)| format!("@{}", domain_part))
|
||||
.map(Cow::Owned),
|
||||
|
||||
AddressMapping::Custom(if_block) => {
|
||||
if let Ok(result) = String::try_from(
|
||||
if_block
|
||||
.eval(&Address(address), core, "session.rcpt.catch-all")
|
||||
.await,
|
||||
) {
|
||||
Some(result.into())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
AddressMapping::Disable => None,
|
||||
}
|
||||
}
|
||||
}
|
4
crates/common/src/config/mod.rs
Normal file
4
crates/common/src/config/mod.rs
Normal file
|
@ -0,0 +1,4 @@
|
|||
pub mod scripts;
|
||||
pub mod server;
|
||||
pub mod smtp;
|
||||
pub mod storage;
|
44
crates/common/src/config/scripts.rs
Normal file
44
crates/common/src/config/scripts.rs
Normal file
|
@ -0,0 +1,44 @@
|
|||
use std::{collections::HashSet, sync::Arc, time::Instant};
|
||||
|
||||
use ahash::AHashMap;
|
||||
use nlp::bayes::cache::BayesTokenCache;
|
||||
use parking_lot::RwLock;
|
||||
use sieve::{Compiler, Runtime, Sieve};
|
||||
use utils::suffixlist::PublicSuffix;
|
||||
|
||||
use super::smtp::auth::DkimSigner;
|
||||
|
||||
pub struct SieveCore {
|
||||
pub untrusted_compiler: Compiler,
|
||||
pub untrusted_runtime: Runtime<()>,
|
||||
pub trusted_runtime: Runtime<SieveContext>,
|
||||
pub from_addr: String,
|
||||
pub from_name: String,
|
||||
pub return_path: String,
|
||||
pub sign: Vec<Arc<DkimSigner>>,
|
||||
pub scripts: AHashMap<String, Arc<Sieve>>,
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct SieveContext {
|
||||
pub psl: PublicSuffix,
|
||||
pub bayes_cache: BayesTokenCache,
|
||||
pub remote_lists: RemoteLists,
|
||||
}
|
||||
|
||||
pub struct RemoteLists {
|
||||
pub lists: RwLock<AHashMap<String, RemoteList>>,
|
||||
}
|
||||
|
||||
pub struct RemoteList {
|
||||
pub entries: HashSet<String>,
|
||||
pub expires: Instant,
|
||||
}
|
||||
|
||||
impl Default for RemoteLists {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
lists: RwLock::new(AHashMap::new()),
|
||||
}
|
||||
}
|
||||
}
|
369
crates/common/src/config/server/listener.rs
Normal file
369
crates/common/src/config/server/listener.rs
Normal file
|
@ -0,0 +1,369 @@
|
|||
/*
|
||||
* Copyright (c) 2023 Stalwart Labs Ltd.
|
||||
*
|
||||
* This file is part of Stalwart Mail Server.
|
||||
*
|
||||
* This program is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Affero General Public License as
|
||||
* published by the Free Software Foundation, either version 3 of
|
||||
* the License, or (at your option) any later version.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Affero General Public License for more details.
|
||||
* in the LICENSE file at the top-level directory of this distribution.
|
||||
* You should have received a copy of the GNU Affero General Public License
|
||||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*
|
||||
* You can be released from the requirements of the AGPLv3 license by
|
||||
* purchasing a commercial license. Please contact licensing@stalw.art
|
||||
* for more details.
|
||||
*/
|
||||
|
||||
use std::{net::SocketAddr, sync::Arc};
|
||||
|
||||
use rustls::{
|
||||
crypto::ring::{default_provider, ALL_CIPHER_SUITES},
|
||||
server::ResolvesServerCert,
|
||||
ServerConfig, SupportedCipherSuite, ALL_VERSIONS,
|
||||
};
|
||||
use tokio::net::TcpSocket;
|
||||
use tokio_rustls::TlsAcceptor;
|
||||
use utils::config::{
|
||||
utils::{AsKey, ParseValue},
|
||||
Config,
|
||||
};
|
||||
|
||||
use crate::{
|
||||
listener::{acme::directory::ACME_TLS_ALPN_NAME, tls::CertificateResolver, TcpAcceptor},
|
||||
ConfigBuilder,
|
||||
};
|
||||
|
||||
use super::{
|
||||
tls::{TLS12_VERSION, TLS13_VERSION},
|
||||
Listener, Server, ServerProtocol,
|
||||
};
|
||||
|
||||
impl ConfigBuilder {
|
||||
pub fn parse_servers(&mut self, config: &mut Config) {
|
||||
// Parse certificates and ACME managers
|
||||
self.parse_certificates(config);
|
||||
self.parse_acmes(config);
|
||||
|
||||
// Parse servers
|
||||
let ids = config
|
||||
.sub_keys("server.listener", ".protocol")
|
||||
.map(|s| s.to_string())
|
||||
.collect::<Vec<_>>();
|
||||
for id in ids {
|
||||
self.parse_server(config, id);
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_server(&mut self, config: &mut Config, id_: String) {
|
||||
// Parse protocol
|
||||
let id = id_.as_str();
|
||||
let protocol =
|
||||
if let Some(protocol) = config.property_require_(("server.listener", id, "protocol")) {
|
||||
protocol
|
||||
} else {
|
||||
return;
|
||||
};
|
||||
|
||||
// Build listeners
|
||||
let mut listeners = Vec::new();
|
||||
for (_, addr) in config.properties_::<SocketAddr>(("server.listener", id, "bind")) {
|
||||
// Parse bind address and build socket
|
||||
let socket = match if addr.is_ipv4() {
|
||||
TcpSocket::new_v4()
|
||||
} else {
|
||||
TcpSocket::new_v6()
|
||||
} {
|
||||
Ok(socket) => socket,
|
||||
Err(err) => {
|
||||
config.new_build_error(
|
||||
("server.listener", id, "bind"),
|
||||
format!("Failed to create socket: {err}"),
|
||||
);
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
// Set socket options
|
||||
for option in [
|
||||
"reuse-addr",
|
||||
"reuse-port",
|
||||
"send-buffer-size",
|
||||
"recv-buffer-size",
|
||||
"tos",
|
||||
] {
|
||||
if let Some(value) = config.value_or_else(
|
||||
("server.listener", id, "socket", option),
|
||||
("server.socket", option),
|
||||
) {
|
||||
let value = value.to_string();
|
||||
let key = ("server.listener", id, "socket", option);
|
||||
let result = match option {
|
||||
"reuse-addr" => socket
|
||||
.set_reuseaddr(config.try_parse_value(key, &value).unwrap_or(true)),
|
||||
#[cfg(not(target_env = "msvc"))]
|
||||
"reuse-port" => socket
|
||||
.set_reuseport(config.try_parse_value(key, &value).unwrap_or(false)),
|
||||
"send-buffer-size" => {
|
||||
if let Some(value) = config.try_parse_value(key, &value) {
|
||||
socket.set_send_buffer_size(value)
|
||||
} else {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
"recv-buffer-size" => {
|
||||
if let Some(value) = config.try_parse_value(key, &value) {
|
||||
socket.set_recv_buffer_size(value)
|
||||
} else {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
"tos" => {
|
||||
if let Some(value) = config.try_parse_value(key, &value) {
|
||||
socket.set_tos(value)
|
||||
} else {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
_ => continue,
|
||||
};
|
||||
|
||||
if let Err(err) = result {
|
||||
config.new_build_error(key, format!("Failed to set socket option: {err}"));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
listeners.push(Listener {
|
||||
socket,
|
||||
addr,
|
||||
ttl: config
|
||||
.property_or_else_(("server.listener", id, "socket.ttl"), "server.socket.ttl"),
|
||||
backlog: config.property_or_else_(
|
||||
("server.listener", id, "socket.backlog"),
|
||||
"server.socket.backlog",
|
||||
),
|
||||
linger: config.property_or_else_(
|
||||
("server.listener", id, "socket.linger"),
|
||||
"server.socket.linger",
|
||||
),
|
||||
nodelay: config
|
||||
.property_or_else_(
|
||||
("server.listener", id, "socket.nodelay"),
|
||||
"server.socket.nodelay",
|
||||
)
|
||||
.unwrap_or(true),
|
||||
});
|
||||
}
|
||||
|
||||
if listeners.is_empty() {
|
||||
config.new_build_error(
|
||||
("server.listener", id),
|
||||
"No 'bind' directive found for listener",
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
// Build TLS config
|
||||
let (acceptor, tls_implicit) = if config
|
||||
.property_or_else_(("server.listener", id, "tls.enable"), "server.tls.enable")
|
||||
.unwrap_or(false)
|
||||
{
|
||||
// Parse protocol versions
|
||||
let mut tls_v2 = true;
|
||||
let mut tls_v3 = true;
|
||||
let mut proto_err = None;
|
||||
for (_, protocol) in config.values_or_else(
|
||||
("server.listener", id, "tls.disable-protocols"),
|
||||
"server.tls.disable-protocols",
|
||||
) {
|
||||
match protocol {
|
||||
"TLSv1.2" | "0x0303" => tls_v2 = false,
|
||||
"TLSv1.3" | "0x0304" => tls_v3 = false,
|
||||
protocol => {
|
||||
proto_err = format!("Unsupported TLS protocol {protocol:?}").into();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(proto_err) = proto_err {
|
||||
config.new_parse_error(("server.listener", id, "tls.disable-protocols"), proto_err);
|
||||
}
|
||||
|
||||
// Parse cipher suites
|
||||
let mut disabled_ciphers: Vec<SupportedCipherSuite> = Vec::new();
|
||||
let cipher_keys = if config.has_prefix(("server.listener", id, "tls.disable-ciphers")) {
|
||||
("server.listener", id, "tls.disable-ciphers").as_key()
|
||||
} else {
|
||||
"server.tls.disable-ciphers".as_key()
|
||||
};
|
||||
for (_, protocol) in config.properties_::<SupportedCipherSuite>(cipher_keys) {
|
||||
disabled_ciphers.push(protocol);
|
||||
}
|
||||
|
||||
// Build resolver
|
||||
let mut acme_acceptor = None;
|
||||
let resolver: Arc<dyn ResolvesServerCert> = if let Some(acme_id) =
|
||||
config.value_or_else(("server.listener", id, "tls.acme"), "server.tls.acme")
|
||||
{
|
||||
let acme = if let Some(acme) = self.acme_managers.get(acme_id) {
|
||||
acme
|
||||
} else {
|
||||
config.new_parse_error(
|
||||
("server.listener", id, "tls.acme"),
|
||||
format!("Undefined ACME manager id {acme_id:?}"),
|
||||
);
|
||||
return;
|
||||
};
|
||||
|
||||
// Check if this port is used to receive ACME challenges
|
||||
let port_key = ("acme", acme_id, "port").as_key();
|
||||
let acme_port = config
|
||||
.property_or_default_::<u16>(port_key, "443")
|
||||
.unwrap_or(443);
|
||||
if listeners.iter().any(|l| l.addr.port() == acme_port) {
|
||||
acme_acceptor = Some(acme.clone());
|
||||
}
|
||||
|
||||
acme.clone()
|
||||
} else if let Some(cert) = config
|
||||
.value_or_else(
|
||||
("server.listener", id, "tls.certificate"),
|
||||
"server.tls.certificate",
|
||||
)
|
||||
.and_then(|cert_id| self.certificates.get(cert_id))
|
||||
.cloned()
|
||||
{
|
||||
Arc::new(CertificateResolver {
|
||||
sni: self.certificates_sni.clone(),
|
||||
cert,
|
||||
})
|
||||
} else {
|
||||
config.new_parse_error(
|
||||
("server.listener", id, "tls.certificate"),
|
||||
"Undefined certificate id",
|
||||
);
|
||||
return;
|
||||
};
|
||||
|
||||
// Build cert provider
|
||||
let mut provider = default_provider();
|
||||
if !disabled_ciphers.is_empty() {
|
||||
provider.cipher_suites = ALL_CIPHER_SUITES
|
||||
.iter()
|
||||
.filter(|suite| !disabled_ciphers.contains(suite))
|
||||
.copied()
|
||||
.collect();
|
||||
}
|
||||
|
||||
// Build server config
|
||||
let mut server_config = match ServerConfig::builder_with_provider(provider.into())
|
||||
.with_protocol_versions(if tls_v3 == tls_v2 {
|
||||
ALL_VERSIONS
|
||||
} else if tls_v3 {
|
||||
TLS13_VERSION
|
||||
} else {
|
||||
TLS12_VERSION
|
||||
}) {
|
||||
Ok(server_config) => server_config
|
||||
.with_no_client_auth()
|
||||
.with_cert_resolver(resolver.clone()),
|
||||
Err(err) => {
|
||||
config.new_build_error(
|
||||
("server.listener", id, "tls"),
|
||||
format!("Failed to build TLS server config: {err}"),
|
||||
);
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
server_config.ignore_client_order = config
|
||||
.property_or_else_(
|
||||
("server.listener", id, "tls.ignore-client-order"),
|
||||
"server.tls.ignore-client-order",
|
||||
)
|
||||
.unwrap_or(true);
|
||||
|
||||
// Build acceptor
|
||||
let acceptor = if let Some(manager) = acme_acceptor {
|
||||
let mut challenge = ServerConfig::builder()
|
||||
.with_no_client_auth()
|
||||
.with_cert_resolver(resolver);
|
||||
challenge.alpn_protocols.push(ACME_TLS_ALPN_NAME.to_vec());
|
||||
TcpAcceptor::Acme {
|
||||
challenge: Arc::new(challenge),
|
||||
default: Arc::new(server_config),
|
||||
manager,
|
||||
}
|
||||
} else {
|
||||
TcpAcceptor::Tls(TlsAcceptor::from(Arc::new(server_config)))
|
||||
};
|
||||
|
||||
(
|
||||
acceptor,
|
||||
config
|
||||
.property_or_else_(
|
||||
("server.listener", id, "tls.implicit"),
|
||||
"server.tls.implicit",
|
||||
)
|
||||
.unwrap_or(true),
|
||||
)
|
||||
} else {
|
||||
(TcpAcceptor::Plain, false)
|
||||
};
|
||||
|
||||
// Parse proxy networks
|
||||
let mut proxy_networks = Vec::new();
|
||||
let proxy_keys = if config.has_prefix(("server.listener", id, "proxy.trusted-networks")) {
|
||||
("server.listener", id, "proxy.trusted-networks").as_key()
|
||||
} else {
|
||||
"server.proxy.trusted-networks".as_key()
|
||||
};
|
||||
for (_, network) in config.properties_(proxy_keys) {
|
||||
proxy_networks.push(network);
|
||||
}
|
||||
|
||||
self.servers.push(Server {
|
||||
max_connections: config
|
||||
.property_or_else_(
|
||||
("server.listener", id, "max-connections"),
|
||||
"server.max-connections",
|
||||
)
|
||||
.unwrap_or(8192),
|
||||
id: id_,
|
||||
protocol,
|
||||
listeners,
|
||||
acceptor,
|
||||
tls_implicit,
|
||||
proxy_networks,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
impl ParseValue for ServerProtocol {
|
||||
fn parse_value(key: impl AsKey, value: &str) -> utils::config::Result<Self> {
|
||||
if value.eq_ignore_ascii_case("smtp") {
|
||||
Ok(Self::Smtp)
|
||||
} else if value.eq_ignore_ascii_case("lmtp") {
|
||||
Ok(Self::Lmtp)
|
||||
} else if value.eq_ignore_ascii_case("imap") {
|
||||
Ok(Self::Imap)
|
||||
} else if value.eq_ignore_ascii_case("http") | value.eq_ignore_ascii_case("https") {
|
||||
Ok(Self::Http)
|
||||
} else if value.eq_ignore_ascii_case("managesieve") {
|
||||
Ok(Self::ManageSieve)
|
||||
} else {
|
||||
Err(format!(
|
||||
"Invalid server protocol type {:?} for property {:?}.",
|
||||
value,
|
||||
key.as_key()
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
54
crates/common/src/config/server/mod.rs
Normal file
54
crates/common/src/config/server/mod.rs
Normal file
|
@ -0,0 +1,54 @@
|
|||
use std::{fmt::Display, net::SocketAddr, time::Duration};
|
||||
|
||||
use tokio::net::TcpSocket;
|
||||
use utils::config::ipmask::IpAddrMask;
|
||||
|
||||
use crate::listener::TcpAcceptor;
|
||||
|
||||
pub mod listener;
|
||||
pub mod tls;
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct Server {
|
||||
pub id: String,
|
||||
pub protocol: ServerProtocol,
|
||||
pub listeners: Vec<Listener>,
|
||||
pub proxy_networks: Vec<IpAddrMask>,
|
||||
pub acceptor: TcpAcceptor,
|
||||
pub tls_implicit: bool,
|
||||
pub max_connections: u64,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Listener {
|
||||
pub socket: TcpSocket,
|
||||
pub addr: SocketAddr,
|
||||
pub backlog: Option<u32>,
|
||||
|
||||
// TCP options
|
||||
pub ttl: Option<u32>,
|
||||
pub linger: Option<Duration>,
|
||||
pub nodelay: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Copy, Default)]
|
||||
pub enum ServerProtocol {
|
||||
#[default]
|
||||
Smtp,
|
||||
Lmtp,
|
||||
Imap,
|
||||
Http,
|
||||
ManageSieve,
|
||||
}
|
||||
|
||||
impl Display for ServerProtocol {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
ServerProtocol::Smtp => write!(f, "smtp"),
|
||||
ServerProtocol::Lmtp => write!(f, "lmtp"),
|
||||
ServerProtocol::Imap => write!(f, "imap"),
|
||||
ServerProtocol::Http => write!(f, "http"),
|
||||
ServerProtocol::ManageSieve => write!(f, "managesieve"),
|
||||
}
|
||||
}
|
||||
}
|
233
crates/common/src/config/server/tls.rs
Normal file
233
crates/common/src/config/server/tls.rs
Normal file
|
@ -0,0 +1,233 @@
|
|||
/*
|
||||
* Copyright (c) 2023 Stalwart Labs Ltd.
|
||||
*
|
||||
* This file is part of Stalwart Mail Server.
|
||||
*
|
||||
* This program is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Affero General Public License as
|
||||
* published by the Free Software Foundation, either version 3 of
|
||||
* the License, or (at your option) any later version.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Affero General Public License for more details.
|
||||
* in the LICENSE file at the top-level directory of this distribution.
|
||||
* You should have received a copy of the GNU Affero General Public License
|
||||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*
|
||||
* You can be released from the requirements of the AGPLv3 license by
|
||||
* purchasing a commercial license. Please contact licensing@stalw.art
|
||||
* for more details.
|
||||
*/
|
||||
|
||||
use std::{io::Cursor, sync::Arc, time::Duration};
|
||||
|
||||
use arc_swap::ArcSwap;
|
||||
use rcgen::generate_simple_self_signed;
|
||||
use rustls::{
|
||||
client::verify_server_name,
|
||||
crypto::ring::sign::any_supported_type,
|
||||
server::ParsedCertificate,
|
||||
sign::CertifiedKey,
|
||||
version::{TLS12, TLS13},
|
||||
Error, SupportedProtocolVersion,
|
||||
};
|
||||
use rustls_pemfile::{certs, read_one, Item};
|
||||
use rustls_pki_types::{DnsName, PrivateKeyDer, ServerName};
|
||||
use utils::config::Config;
|
||||
|
||||
use crate::{
|
||||
listener::{
|
||||
acme::{directory::LETS_ENCRYPT_PRODUCTION_DIRECTORY, AcmeManager},
|
||||
tls::Certificate,
|
||||
},
|
||||
ConfigBuilder,
|
||||
};
|
||||
|
||||
pub static TLS13_VERSION: &[&SupportedProtocolVersion] = &[&TLS13];
|
||||
pub static TLS12_VERSION: &[&SupportedProtocolVersion] = &[&TLS12];
|
||||
|
||||
impl ConfigBuilder {
|
||||
pub fn parse_certificates(&mut self, config: &mut Config) {
|
||||
let cert_ids = config
|
||||
.sub_keys("certificate", ".cert")
|
||||
.map(|s| s.to_string())
|
||||
.collect::<Vec<_>>();
|
||||
for cert_id in cert_ids {
|
||||
let cert_id = cert_id.as_str();
|
||||
let key_cert = ("certificate", cert_id, "cert");
|
||||
let key_pk = ("certificate", cert_id, "private-key");
|
||||
|
||||
let cert = config
|
||||
.value_require_(key_cert)
|
||||
.map(|s| s.as_bytes().to_vec());
|
||||
let pk = config.value_require_(key_pk).map(|s| s.as_bytes().to_vec());
|
||||
|
||||
if let (Some(cert), Some(pk)) = (cert, pk) {
|
||||
match build_certified_key(cert, pk) {
|
||||
Ok(cert) => {
|
||||
// Parse alternative names
|
||||
let subjects = config
|
||||
.values(("certificate", cert_id, "sni-subjects"))
|
||||
.map(|(_, v)| v.to_string())
|
||||
.collect::<Vec<_>>();
|
||||
let mut sni_names = Vec::new();
|
||||
for subject in subjects {
|
||||
match DnsName::try_from(subject)
|
||||
.map_err(|_| Error::General("Bad DNS name".into()))
|
||||
.map(|name| ServerName::DnsName(name.to_lowercase_owned()))
|
||||
.and_then(|name| {
|
||||
cert.end_entity_cert()
|
||||
.and_then(ParsedCertificate::try_from)
|
||||
.and_then(|cert| verify_server_name(&cert, &name))
|
||||
.map(|_| name)
|
||||
}) {
|
||||
Ok(ServerName::DnsName(server_name)) => {
|
||||
sni_names.push(server_name.as_ref().to_string());
|
||||
}
|
||||
Ok(_) => {}
|
||||
Err(err) => {
|
||||
config.new_parse_error(
|
||||
("certificate", cert_id, "sni-subjects"),
|
||||
err.to_string(),
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let cert = Arc::new(Certificate {
|
||||
cert: ArcSwap::from(Arc::new(cert)),
|
||||
cert_id: cert_id.to_string(),
|
||||
});
|
||||
|
||||
for sni_name in sni_names {
|
||||
self.certificates_sni.insert(sni_name, cert.clone());
|
||||
}
|
||||
|
||||
self.certificates.insert(cert_id.to_string(), cert);
|
||||
}
|
||||
Err(err) => config.new_build_error(format!("certificate.{cert_id}"), err),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn parse_acmes(&mut self, config: &mut Config) {
|
||||
let acme_ids = config
|
||||
.sub_keys("acme", ".cache")
|
||||
.map(|s| s.to_string())
|
||||
.collect::<Vec<_>>();
|
||||
for acme_id in acme_ids {
|
||||
let directory = config
|
||||
.value(("acme", acme_id.as_str(), "directory"))
|
||||
.unwrap_or(LETS_ENCRYPT_PRODUCTION_DIRECTORY)
|
||||
.trim()
|
||||
.to_string();
|
||||
let contact = config
|
||||
.values(("acme", acme_id.as_str(), "contact"))
|
||||
.filter_map(|(_, v)| {
|
||||
let v = v.trim().to_string();
|
||||
if !v.is_empty() {
|
||||
Some(v)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
let renew_before: Duration = config
|
||||
.property_or_default_(("acme", acme_id.as_str(), "renew-before"), "30d")
|
||||
.unwrap_or_else(|| Duration::from_secs(30 * 24 * 60 * 60));
|
||||
|
||||
if directory.is_empty() {
|
||||
config.new_parse_error(format!("acme.{acme_id}.directory"), "Missing property");
|
||||
continue;
|
||||
}
|
||||
|
||||
if contact.is_empty() {
|
||||
config.new_parse_error(format!("acme.{acme_id}.contact"), "Missing property");
|
||||
continue;
|
||||
}
|
||||
|
||||
// Find which domains are covered by this ACME manager
|
||||
let mut domains = Vec::new();
|
||||
for id in config.sub_keys("server.listener", ".protocol") {
|
||||
match (
|
||||
config.value_or_else(("server.listener", id, "tls.acme"), "server.tls.acme"),
|
||||
config.value_or_else(("server.listener", id, "hostname"), "server.hostname"),
|
||||
) {
|
||||
(Some(listener_acme), Some(hostname)) if listener_acme == acme_id => {
|
||||
let hostname = hostname.trim().to_lowercase();
|
||||
|
||||
if !domains.contains(&hostname) {
|
||||
domains.push(hostname);
|
||||
}
|
||||
}
|
||||
_ => (),
|
||||
}
|
||||
}
|
||||
|
||||
if !domains.is_empty() {
|
||||
match AcmeManager::new(
|
||||
acme_id.to_string(),
|
||||
directory,
|
||||
domains,
|
||||
contact,
|
||||
renew_before,
|
||||
self.core.storage.data.clone(),
|
||||
) {
|
||||
Ok(acme_manager) => {
|
||||
self.acme_managers
|
||||
.insert(acme_id.to_string(), Arc::new(acme_manager));
|
||||
}
|
||||
Err(err) => {
|
||||
config.new_build_error(format!("acme.{acme_id}"), err);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn build_certified_key(
|
||||
cert: Vec<u8>,
|
||||
pk: Vec<u8>,
|
||||
) -> utils::config::Result<CertifiedKey> {
|
||||
let cert = certs(&mut Cursor::new(cert))
|
||||
.collect::<Result<Vec<_>, _>>()
|
||||
.map_err(|err| format!("Failed to read certificates: {err}"))?;
|
||||
if cert.is_empty() {
|
||||
return Err("No certificates found.".to_string());
|
||||
}
|
||||
let pk = match read_one(&mut Cursor::new(pk))
|
||||
.map_err(|err| format!("Failed to read private keys.: {err}",))?
|
||||
.into_iter()
|
||||
.next()
|
||||
{
|
||||
Some(Item::Pkcs8Key(key)) => PrivateKeyDer::Pkcs8(key),
|
||||
Some(Item::Pkcs1Key(key)) => PrivateKeyDer::Pkcs1(key),
|
||||
Some(Item::Sec1Key(key)) => PrivateKeyDer::Sec1(key),
|
||||
Some(_) => return Err("Unsupported private keys found.".to_string()),
|
||||
None => return Err("No private keys found.".to_string()),
|
||||
};
|
||||
|
||||
Ok(CertifiedKey {
|
||||
cert,
|
||||
key: any_supported_type(&pk)
|
||||
.map_err(|err| format!("Failed to sign certificate: {err}",))?,
|
||||
ocsp: None,
|
||||
})
|
||||
}
|
||||
|
||||
pub(crate) fn build_self_signed_cert(domains: &[String]) -> utils::config::Result<CertifiedKey> {
|
||||
let cert = generate_simple_self_signed(domains).map_err(|err| {
|
||||
format!(
|
||||
"Failed to generate self-signed certificate for {domains:?}: {err}",
|
||||
domains = domains
|
||||
)
|
||||
})?;
|
||||
build_certified_key(
|
||||
cert.serialize_pem().unwrap().into_bytes(),
|
||||
cert.serialize_private_key_pem().into_bytes(),
|
||||
)
|
||||
}
|
131
crates/common/src/config/smtp/auth.rs
Normal file
131
crates/common/src/config/smtp/auth.rs
Normal file
|
@ -0,0 +1,131 @@
|
|||
use std::sync::Arc;
|
||||
|
||||
use ahash::AHashMap;
|
||||
use mail_auth::{
|
||||
common::crypto::{Ed25519Key, RsaKey, Sha256},
|
||||
dkim::Done,
|
||||
};
|
||||
use utils::config::utils::{AsKey, ParseValue};
|
||||
|
||||
use crate::expr::{self, if_block::IfBlock, Constant, ConstantValue};
|
||||
|
||||
pub struct MailAuthConfig {
|
||||
pub dkim: DkimAuthConfig,
|
||||
pub arc: ArcAuthConfig,
|
||||
pub spf: SpfAuthConfig,
|
||||
pub dmarc: DmarcAuthConfig,
|
||||
pub iprev: IpRevAuthConfig,
|
||||
|
||||
pub signers: AHashMap<String, Arc<DkimSigner>>,
|
||||
pub sealers: AHashMap<String, Arc<ArcSealer>>,
|
||||
}
|
||||
|
||||
pub struct DkimAuthConfig {
|
||||
pub verify: IfBlock,
|
||||
pub sign: IfBlock,
|
||||
}
|
||||
|
||||
pub struct ArcAuthConfig {
|
||||
pub verify: IfBlock,
|
||||
pub seal: IfBlock,
|
||||
}
|
||||
|
||||
pub struct SpfAuthConfig {
|
||||
pub verify_ehlo: IfBlock,
|
||||
pub verify_mail_from: IfBlock,
|
||||
}
|
||||
pub struct DmarcAuthConfig {
|
||||
pub verify: IfBlock,
|
||||
}
|
||||
|
||||
pub struct IpRevAuthConfig {
|
||||
pub verify: IfBlock,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, Default)]
|
||||
pub enum VerifyStrategy {
|
||||
#[default]
|
||||
Relaxed,
|
||||
Strict,
|
||||
Disable,
|
||||
}
|
||||
|
||||
pub enum DkimSigner {
|
||||
RsaSha256(mail_auth::dkim::DkimSigner<RsaKey<Sha256>, Done>),
|
||||
Ed25519Sha256(mail_auth::dkim::DkimSigner<Ed25519Key, Done>),
|
||||
}
|
||||
|
||||
pub enum ArcSealer {
|
||||
RsaSha256(mail_auth::arc::ArcSealer<RsaKey<Sha256>, Done>),
|
||||
Ed25519Sha256(mail_auth::arc::ArcSealer<Ed25519Key, Done>),
|
||||
}
|
||||
|
||||
impl Default for MailAuthConfig {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
dkim: DkimAuthConfig {
|
||||
verify: IfBlock::new(VerifyStrategy::Relaxed),
|
||||
sign: Default::default(),
|
||||
},
|
||||
arc: ArcAuthConfig {
|
||||
verify: IfBlock::new(VerifyStrategy::Relaxed),
|
||||
seal: Default::default(),
|
||||
},
|
||||
spf: SpfAuthConfig {
|
||||
verify_ehlo: IfBlock::new(VerifyStrategy::Relaxed),
|
||||
verify_mail_from: IfBlock::new(VerifyStrategy::Relaxed),
|
||||
},
|
||||
dmarc: DmarcAuthConfig {
|
||||
verify: IfBlock::new(VerifyStrategy::Relaxed),
|
||||
},
|
||||
iprev: IpRevAuthConfig {
|
||||
verify: IfBlock::new(VerifyStrategy::Relaxed),
|
||||
},
|
||||
signers: Default::default(),
|
||||
sealers: Default::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'x> TryFrom<expr::Variable<'x>> for VerifyStrategy {
|
||||
type Error = ();
|
||||
|
||||
fn try_from(value: expr::Variable<'x>) -> Result<Self, Self::Error> {
|
||||
match value {
|
||||
expr::Variable::Integer(c) => match c {
|
||||
2 => Ok(VerifyStrategy::Relaxed),
|
||||
3 => Ok(VerifyStrategy::Strict),
|
||||
4 => Ok(VerifyStrategy::Disable),
|
||||
_ => Err(()),
|
||||
},
|
||||
_ => Err(()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<VerifyStrategy> for Constant {
|
||||
fn from(value: VerifyStrategy) -> Self {
|
||||
Constant::Integer(match value {
|
||||
VerifyStrategy::Relaxed => 2,
|
||||
VerifyStrategy::Strict => 3,
|
||||
VerifyStrategy::Disable => 4,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl ParseValue for VerifyStrategy {
|
||||
fn parse_value(key: impl AsKey, value: &str) -> Result<Self, String> {
|
||||
match value {
|
||||
"relaxed" => Ok(VerifyStrategy::Relaxed),
|
||||
"strict" => Ok(VerifyStrategy::Strict),
|
||||
"disable" | "disabled" | "never" | "none" => Ok(VerifyStrategy::Disable),
|
||||
_ => Err(format!(
|
||||
"Invalid value {:?} for key {:?}.",
|
||||
value,
|
||||
key.as_key()
|
||||
)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ConstantValue for VerifyStrategy {}
|
41
crates/common/src/config/smtp/mod.rs
Normal file
41
crates/common/src/config/smtp/mod.rs
Normal file
|
@ -0,0 +1,41 @@
|
|||
use utils::{config::Rate, expr::Expression};
|
||||
|
||||
pub mod auth;
|
||||
pub mod queue;
|
||||
pub mod report;
|
||||
pub mod resolver;
|
||||
pub mod session;
|
||||
|
||||
use self::{
|
||||
auth::MailAuthConfig, queue::QueueConfig, report::ReportConfig, resolver::Resolvers,
|
||||
session::SessionConfig,
|
||||
};
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct SmtpConfig {
|
||||
pub session: SessionConfig,
|
||||
pub queue: QueueConfig,
|
||||
pub resolvers: Resolvers,
|
||||
pub mail_auth: MailAuthConfig,
|
||||
pub report: ReportConfig,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
#[cfg_attr(feature = "test_mode", derive(PartialEq, Eq))]
|
||||
pub struct Throttle {
|
||||
pub expr: Expression,
|
||||
pub keys: u16,
|
||||
pub concurrency: Option<u64>,
|
||||
pub rate: Option<Rate>,
|
||||
}
|
||||
|
||||
pub const THROTTLE_RCPT: u16 = 1 << 0;
|
||||
pub const THROTTLE_RCPT_DOMAIN: u16 = 1 << 1;
|
||||
pub const THROTTLE_SENDER: u16 = 1 << 2;
|
||||
pub const THROTTLE_SENDER_DOMAIN: u16 = 1 << 3;
|
||||
pub const THROTTLE_AUTH_AS: u16 = 1 << 4;
|
||||
pub const THROTTLE_LISTENER: u16 = 1 << 5;
|
||||
pub const THROTTLE_MX: u16 = 1 << 6;
|
||||
pub const THROTTLE_REMOTE_IP: u16 = 1 << 7;
|
||||
pub const THROTTLE_LOCAL_IP: u16 = 1 << 8;
|
||||
pub const THROTTLE_HELO_DOMAIN: u16 = 1 << 9;
|
228
crates/common/src/config/smtp/queue.rs
Normal file
228
crates/common/src/config/smtp/queue.rs
Normal file
|
@ -0,0 +1,228 @@
|
|||
use std::time::Duration;
|
||||
|
||||
use ahash::AHashMap;
|
||||
use mail_auth::IpLookupStrategy;
|
||||
use mail_send::Credentials;
|
||||
use utils::config::{
|
||||
utils::{AsKey, ParseValue},
|
||||
ServerProtocol,
|
||||
};
|
||||
|
||||
use crate::expr::{if_block::IfBlock, Constant, ConstantValue, Expression, Variable};
|
||||
|
||||
use super::Throttle;
|
||||
|
||||
pub struct QueueConfig {
|
||||
// Schedule
|
||||
pub retry: IfBlock,
|
||||
pub notify: IfBlock,
|
||||
pub expire: IfBlock,
|
||||
|
||||
// Outbound
|
||||
pub hostname: IfBlock,
|
||||
pub next_hop: IfBlock,
|
||||
pub max_mx: IfBlock,
|
||||
pub max_multihomed: IfBlock,
|
||||
pub ip_strategy: IfBlock,
|
||||
pub source_ip: QueueOutboundSourceIp,
|
||||
pub tls: QueueOutboundTls,
|
||||
pub dsn: Dsn,
|
||||
|
||||
// Timeouts
|
||||
pub timeout: QueueOutboundTimeout,
|
||||
|
||||
// Throttle and Quotas
|
||||
pub throttle: QueueThrottle,
|
||||
pub quota: QueueQuotas,
|
||||
|
||||
// Relay hosts
|
||||
pub relay_hosts: AHashMap<String, RelayHost>,
|
||||
}
|
||||
|
||||
pub struct QueueOutboundSourceIp {
|
||||
pub ipv4: IfBlock,
|
||||
pub ipv6: IfBlock,
|
||||
}
|
||||
|
||||
pub struct Dsn {
|
||||
pub name: IfBlock,
|
||||
pub address: IfBlock,
|
||||
pub sign: IfBlock,
|
||||
}
|
||||
|
||||
pub struct QueueOutboundTls {
|
||||
pub dane: IfBlock,
|
||||
pub mta_sts: IfBlock,
|
||||
pub start: IfBlock,
|
||||
pub invalid_certs: IfBlock,
|
||||
}
|
||||
|
||||
pub struct QueueOutboundTimeout {
|
||||
pub connect: IfBlock,
|
||||
pub greeting: IfBlock,
|
||||
pub tls: IfBlock,
|
||||
pub ehlo: IfBlock,
|
||||
pub mail: IfBlock,
|
||||
pub rcpt: IfBlock,
|
||||
pub data: IfBlock,
|
||||
pub mta_sts: IfBlock,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct QueueThrottle {
|
||||
pub sender: Vec<Throttle>,
|
||||
pub rcpt: Vec<Throttle>,
|
||||
pub host: Vec<Throttle>,
|
||||
}
|
||||
|
||||
pub struct QueueQuotas {
|
||||
pub sender: Vec<QueueQuota>,
|
||||
pub rcpt: Vec<QueueQuota>,
|
||||
pub rcpt_domain: Vec<QueueQuota>,
|
||||
}
|
||||
|
||||
pub struct QueueQuota {
|
||||
pub expr: Expression,
|
||||
pub keys: u16,
|
||||
pub size: Option<usize>,
|
||||
pub messages: Option<usize>,
|
||||
}
|
||||
|
||||
pub struct RelayHost {
|
||||
pub address: String,
|
||||
pub port: u16,
|
||||
pub protocol: ServerProtocol,
|
||||
pub auth: Option<Credentials<String>>,
|
||||
pub tls_implicit: bool,
|
||||
pub tls_allow_invalid_certs: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, Default)]
|
||||
pub enum RequireOptional {
|
||||
#[default]
|
||||
Optional,
|
||||
Require,
|
||||
Disable,
|
||||
}
|
||||
|
||||
impl Default for QueueConfig {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
retry: IfBlock::new(Duration::from_secs(5 * 60)),
|
||||
notify: IfBlock::new(Duration::from_secs(86400)),
|
||||
expire: IfBlock::new(Duration::from_secs(5 * 86400)),
|
||||
hostname: IfBlock::new("localhost".to_string()),
|
||||
next_hop: Default::default(),
|
||||
max_mx: IfBlock::new(5),
|
||||
max_multihomed: IfBlock::new(2),
|
||||
ip_strategy: IfBlock::new(IpLookupStrategy::Ipv4thenIpv6),
|
||||
source_ip: QueueOutboundSourceIp {
|
||||
ipv4: Default::default(),
|
||||
ipv6: Default::default(),
|
||||
},
|
||||
tls: QueueOutboundTls {
|
||||
dane: IfBlock::new(RequireOptional::Optional),
|
||||
mta_sts: IfBlock::new(RequireOptional::Optional),
|
||||
start: IfBlock::new(RequireOptional::Optional),
|
||||
invalid_certs: IfBlock::new(false),
|
||||
},
|
||||
dsn: Dsn {
|
||||
name: IfBlock::new("Mail Delivery Subsystem".to_string()),
|
||||
address: IfBlock::new("MAILER-DAEMON@localhost".to_string()),
|
||||
sign: Default::default(),
|
||||
},
|
||||
timeout: QueueOutboundTimeout {
|
||||
connect: IfBlock::new(Duration::from_secs(5 * 60)),
|
||||
greeting: IfBlock::new(Duration::from_secs(5 * 60)),
|
||||
tls: IfBlock::new(Duration::from_secs(3 * 60)),
|
||||
ehlo: IfBlock::new(Duration::from_secs(5 * 60)),
|
||||
mail: IfBlock::new(Duration::from_secs(5 * 60)),
|
||||
rcpt: IfBlock::new(Duration::from_secs(5 * 60)),
|
||||
data: IfBlock::new(Duration::from_secs(10 * 60)),
|
||||
mta_sts: IfBlock::new(Duration::from_secs(10 * 60)),
|
||||
},
|
||||
throttle: QueueThrottle {
|
||||
sender: Default::default(),
|
||||
rcpt: Default::default(),
|
||||
host: Default::default(),
|
||||
},
|
||||
quota: QueueQuotas {
|
||||
sender: Default::default(),
|
||||
rcpt: Default::default(),
|
||||
rcpt_domain: Default::default(),
|
||||
},
|
||||
relay_hosts: Default::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ParseValue for RequireOptional {
|
||||
fn parse_value(key: impl AsKey, value: &str) -> utils::config::Result<Self> {
|
||||
match value {
|
||||
"optional" => Ok(RequireOptional::Optional),
|
||||
"require" | "required" => Ok(RequireOptional::Require),
|
||||
"disable" | "disabled" | "none" | "false" => Ok(RequireOptional::Disable),
|
||||
_ => Err(format!(
|
||||
"Invalid TLS option value {:?} for key {:?}.",
|
||||
value,
|
||||
key.as_key()
|
||||
)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'x> TryFrom<Variable<'x>> for RequireOptional {
|
||||
type Error = ();
|
||||
|
||||
fn try_from(value: Variable<'x>) -> Result<Self, Self::Error> {
|
||||
match value {
|
||||
Variable::Integer(2) => Ok(RequireOptional::Optional),
|
||||
Variable::Integer(1) => Ok(RequireOptional::Require),
|
||||
Variable::Integer(0) => Ok(RequireOptional::Disable),
|
||||
_ => Err(()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<RequireOptional> for Constant {
|
||||
fn from(value: RequireOptional) -> Self {
|
||||
Constant::Integer(match value {
|
||||
RequireOptional::Optional => 2,
|
||||
RequireOptional::Require => 1,
|
||||
RequireOptional::Disable => 0,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl ConstantValue for RequireOptional {}
|
||||
|
||||
impl<'x> TryFrom<Variable<'x>> for IpLookupStrategy {
|
||||
type Error = ();
|
||||
|
||||
fn try_from(value: Variable<'x>) -> Result<Self, Self::Error> {
|
||||
match value {
|
||||
Variable::Integer(value) => match value {
|
||||
2 => Ok(IpLookupStrategy::Ipv4Only),
|
||||
3 => Ok(IpLookupStrategy::Ipv6Only),
|
||||
4 => Ok(IpLookupStrategy::Ipv6thenIpv4),
|
||||
5 => Ok(IpLookupStrategy::Ipv4thenIpv6),
|
||||
_ => Err(()),
|
||||
},
|
||||
Variable::String(value) => IpLookupStrategy::parse_value("", &value).map_err(|_| ()),
|
||||
_ => Err(()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<IpLookupStrategy> for Constant {
|
||||
fn from(value: IpLookupStrategy) -> Self {
|
||||
Constant::Integer(match value {
|
||||
IpLookupStrategy::Ipv4Only => 2,
|
||||
IpLookupStrategy::Ipv6Only => 3,
|
||||
IpLookupStrategy::Ipv6thenIpv4 => 4,
|
||||
IpLookupStrategy::Ipv4thenIpv6 => 5,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl ConstantValue for IpLookupStrategy {}
|
147
crates/common/src/config/smtp/report.rs
Normal file
147
crates/common/src/config/smtp/report.rs
Normal file
|
@ -0,0 +1,147 @@
|
|||
use std::time::Duration;
|
||||
|
||||
use utils::{
|
||||
config::utils::{AsKey, ParseValue},
|
||||
snowflake::SnowflakeIdGenerator,
|
||||
};
|
||||
|
||||
use crate::expr::{if_block::IfBlock, Constant, ConstantValue, Variable};
|
||||
|
||||
pub struct ReportConfig {
|
||||
pub submitter: IfBlock,
|
||||
pub analysis: ReportAnalysis,
|
||||
|
||||
pub dkim: Report,
|
||||
pub spf: Report,
|
||||
pub dmarc: Report,
|
||||
pub dmarc_aggregate: AggregateReport,
|
||||
pub tls: AggregateReport,
|
||||
}
|
||||
|
||||
pub struct ReportAnalysis {
|
||||
pub addresses: Vec<AddressMatch>,
|
||||
pub forward: bool,
|
||||
pub store: Option<Duration>,
|
||||
pub report_id: SnowflakeIdGenerator,
|
||||
}
|
||||
|
||||
pub enum AddressMatch {
|
||||
StartsWith(String),
|
||||
EndsWith(String),
|
||||
Equals(String),
|
||||
}
|
||||
|
||||
pub struct AggregateReport {
|
||||
pub name: IfBlock,
|
||||
pub address: IfBlock,
|
||||
pub org_name: IfBlock,
|
||||
pub contact_info: IfBlock,
|
||||
pub send: IfBlock,
|
||||
pub sign: IfBlock,
|
||||
pub max_size: IfBlock,
|
||||
}
|
||||
|
||||
pub struct Report {
|
||||
pub name: IfBlock,
|
||||
pub address: IfBlock,
|
||||
pub subject: IfBlock,
|
||||
pub sign: IfBlock,
|
||||
pub send: IfBlock,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, Default, PartialEq, Eq)]
|
||||
pub enum AggregateFrequency {
|
||||
Hourly,
|
||||
Daily,
|
||||
Weekly,
|
||||
#[default]
|
||||
Never,
|
||||
}
|
||||
|
||||
impl Default for ReportConfig {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
submitter: IfBlock::new("localhost".to_string()),
|
||||
analysis: ReportAnalysis {
|
||||
addresses: Default::default(),
|
||||
forward: true,
|
||||
store: None,
|
||||
report_id: SnowflakeIdGenerator::new(),
|
||||
},
|
||||
dkim: Default::default(),
|
||||
spf: Default::default(),
|
||||
dmarc: Default::default(),
|
||||
dmarc_aggregate: Default::default(),
|
||||
tls: Default::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for Report {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
name: IfBlock::new("Mail Delivery Subsystem".to_string()),
|
||||
address: IfBlock::new("MAILER-DAEMON@localhost".to_string()),
|
||||
subject: IfBlock::new("Report".to_string()),
|
||||
sign: Default::default(),
|
||||
send: Default::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for AggregateReport {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
name: IfBlock::new("Reporting Subsystem".to_string()),
|
||||
address: IfBlock::new("no-replyN@localhost".to_string()),
|
||||
org_name: Default::default(),
|
||||
contact_info: Default::default(),
|
||||
send: IfBlock::new(AggregateFrequency::Never),
|
||||
sign: Default::default(),
|
||||
max_size: IfBlock::new(25 * 1024 * 1024),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ParseValue for AggregateFrequency {
|
||||
fn parse_value(key: impl AsKey, value: &str) -> utils::config::Result<Self> {
|
||||
match value {
|
||||
"daily" | "day" => Ok(AggregateFrequency::Daily),
|
||||
"hourly" | "hour" => Ok(AggregateFrequency::Hourly),
|
||||
"weekly" | "week" => Ok(AggregateFrequency::Weekly),
|
||||
"never" | "disable" | "false" => Ok(AggregateFrequency::Never),
|
||||
_ => Err(format!(
|
||||
"Invalid aggregate frequency value {:?} for key {:?}.",
|
||||
value,
|
||||
key.as_key()
|
||||
)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<AggregateFrequency> for Constant {
|
||||
fn from(value: AggregateFrequency) -> Self {
|
||||
match value {
|
||||
AggregateFrequency::Never => 0.into(),
|
||||
AggregateFrequency::Hourly => 2.into(),
|
||||
AggregateFrequency::Daily => 3.into(),
|
||||
AggregateFrequency::Weekly => 4.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'x> TryFrom<Variable<'x>> for AggregateFrequency {
|
||||
type Error = ();
|
||||
|
||||
fn try_from(value: Variable<'x>) -> Result<Self, Self::Error> {
|
||||
match value {
|
||||
Variable::Integer(0) => Ok(AggregateFrequency::Never),
|
||||
Variable::Integer(2) => Ok(AggregateFrequency::Hourly),
|
||||
Variable::Integer(3) => Ok(AggregateFrequency::Daily),
|
||||
Variable::Integer(4) => Ok(AggregateFrequency::Weekly),
|
||||
_ => Err(()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ConstantValue for AggregateFrequency {}
|
87
crates/common/src/config/smtp/resolver.rs
Normal file
87
crates/common/src/config/smtp/resolver.rs
Normal file
|
@ -0,0 +1,87 @@
|
|||
use std::sync::Arc;
|
||||
|
||||
use mail_auth::{
|
||||
common::lru::{DnsCache, LruCache},
|
||||
hickory_resolver::{
|
||||
config::{ResolverConfig, ResolverOpts},
|
||||
system_conf::read_system_conf,
|
||||
AsyncResolver, TokioAsyncResolver,
|
||||
},
|
||||
Resolver,
|
||||
};
|
||||
|
||||
pub struct Resolvers {
|
||||
pub dns: Resolver,
|
||||
pub dnssec: DnssecResolver,
|
||||
pub cache: DnsRecordCache,
|
||||
}
|
||||
|
||||
pub struct DnssecResolver {
|
||||
pub resolver: TokioAsyncResolver,
|
||||
}
|
||||
|
||||
pub struct DnsRecordCache {
|
||||
pub tlsa: LruCache<String, Arc<Tlsa>>,
|
||||
pub mta_sts: LruCache<String, Arc<Policy>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Hash, PartialEq, Eq)]
|
||||
pub struct TlsaEntry {
|
||||
pub is_end_entity: bool,
|
||||
pub is_sha256: bool,
|
||||
pub is_spki: bool,
|
||||
pub data: Vec<u8>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Hash, PartialEq, Eq)]
|
||||
pub struct Tlsa {
|
||||
pub entries: Vec<TlsaEntry>,
|
||||
pub has_end_entities: bool,
|
||||
pub has_intermediates: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Hash)]
|
||||
pub enum Mode {
|
||||
Enforce,
|
||||
Testing,
|
||||
None,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Hash)]
|
||||
pub enum MxPattern {
|
||||
Equals(String),
|
||||
StartsWith(String),
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Hash)]
|
||||
pub struct Policy {
|
||||
pub id: String,
|
||||
pub mode: Mode,
|
||||
pub mx: Vec<MxPattern>,
|
||||
pub max_age: u64,
|
||||
}
|
||||
|
||||
impl Default for Resolvers {
|
||||
fn default() -> Self {
|
||||
let (config, opts) = match read_system_conf() {
|
||||
Ok(conf) => conf,
|
||||
Err(_) => (ResolverConfig::cloudflare(), ResolverOpts::default()),
|
||||
};
|
||||
|
||||
let config_dnssec = config.clone();
|
||||
let mut opts_dnssec = opts.clone();
|
||||
opts_dnssec.validate = true;
|
||||
|
||||
Self {
|
||||
dns: Resolver::with_capacities(config, opts, 1024, 1024, 1024, 1024, 1024)
|
||||
.expect("Failed to build DNS resolver"),
|
||||
dnssec: DnssecResolver {
|
||||
resolver: AsyncResolver::tokio(config_dnssec, opts_dnssec),
|
||||
},
|
||||
cache: DnsRecordCache {
|
||||
tlsa: LruCache::with_capacity(1024),
|
||||
mta_sts: LruCache::with_capacity(1024),
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
211
crates/common/src/config/smtp/session.rs
Normal file
211
crates/common/src/config/smtp/session.rs
Normal file
|
@ -0,0 +1,211 @@
|
|||
use std::{net::SocketAddr, time::Duration};
|
||||
|
||||
use crate::expr::if_block::IfBlock;
|
||||
|
||||
use super::Throttle;
|
||||
|
||||
pub struct SessionConfig {
|
||||
pub timeout: IfBlock,
|
||||
pub duration: IfBlock,
|
||||
pub transfer_limit: IfBlock,
|
||||
pub throttle: SessionThrottle,
|
||||
|
||||
pub connect: Connect,
|
||||
pub ehlo: Ehlo,
|
||||
pub auth: Auth,
|
||||
pub mail: Mail,
|
||||
pub rcpt: Rcpt,
|
||||
pub data: Data,
|
||||
pub extensions: Extensions,
|
||||
}
|
||||
|
||||
pub struct SessionThrottle {
|
||||
pub connect: Vec<Throttle>,
|
||||
pub mail_from: Vec<Throttle>,
|
||||
pub rcpt_to: Vec<Throttle>,
|
||||
}
|
||||
|
||||
pub struct Connect {
|
||||
pub script: IfBlock,
|
||||
}
|
||||
|
||||
pub struct Ehlo {
|
||||
pub script: IfBlock,
|
||||
pub require: IfBlock,
|
||||
pub reject_non_fqdn: IfBlock,
|
||||
}
|
||||
|
||||
pub struct Extensions {
|
||||
pub pipelining: IfBlock,
|
||||
pub chunking: IfBlock,
|
||||
pub requiretls: IfBlock,
|
||||
pub dsn: IfBlock,
|
||||
pub vrfy: IfBlock,
|
||||
pub expn: IfBlock,
|
||||
pub no_soliciting: IfBlock,
|
||||
pub future_release: IfBlock,
|
||||
pub deliver_by: IfBlock,
|
||||
pub mt_priority: IfBlock,
|
||||
}
|
||||
|
||||
pub struct Auth {
|
||||
pub directory: IfBlock,
|
||||
pub mechanisms: IfBlock,
|
||||
pub require: IfBlock,
|
||||
pub allow_plain_text: IfBlock,
|
||||
pub must_match_sender: IfBlock,
|
||||
pub errors_max: IfBlock,
|
||||
pub errors_wait: IfBlock,
|
||||
}
|
||||
|
||||
pub struct Mail {
|
||||
pub script: IfBlock,
|
||||
pub rewrite: IfBlock,
|
||||
}
|
||||
|
||||
pub struct Rcpt {
|
||||
pub script: IfBlock,
|
||||
pub relay: IfBlock,
|
||||
pub directory: IfBlock,
|
||||
pub rewrite: IfBlock,
|
||||
|
||||
// Errors
|
||||
pub errors_max: IfBlock,
|
||||
pub errors_wait: IfBlock,
|
||||
|
||||
// Limits
|
||||
pub max_recipients: IfBlock,
|
||||
|
||||
// Catch-all and subadressing
|
||||
pub catch_all: AddressMapping,
|
||||
pub subaddressing: AddressMapping,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub enum AddressMapping {
|
||||
Enable,
|
||||
Custom(IfBlock),
|
||||
#[default]
|
||||
Disable,
|
||||
}
|
||||
|
||||
pub struct Data {
|
||||
pub script: IfBlock,
|
||||
pub pipe_commands: Vec<Pipe>,
|
||||
pub milters: Vec<Milter>,
|
||||
|
||||
// Limits
|
||||
pub max_messages: IfBlock,
|
||||
pub max_message_size: IfBlock,
|
||||
pub max_received_headers: IfBlock,
|
||||
|
||||
// Headers
|
||||
pub add_received: IfBlock,
|
||||
pub add_received_spf: IfBlock,
|
||||
pub add_return_path: IfBlock,
|
||||
pub add_auth_results: IfBlock,
|
||||
pub add_message_id: IfBlock,
|
||||
pub add_date: IfBlock,
|
||||
}
|
||||
|
||||
pub struct Pipe {
|
||||
pub command: IfBlock,
|
||||
pub arguments: IfBlock,
|
||||
pub timeout: IfBlock,
|
||||
}
|
||||
|
||||
pub struct Milter {
|
||||
pub enable: IfBlock,
|
||||
pub addrs: Vec<SocketAddr>,
|
||||
pub hostname: String,
|
||||
pub port: u16,
|
||||
pub timeout_connect: Duration,
|
||||
pub timeout_command: Duration,
|
||||
pub timeout_data: Duration,
|
||||
pub tls: bool,
|
||||
pub tls_allow_invalid_certs: bool,
|
||||
pub tempfail_on_error: bool,
|
||||
pub max_frame_len: usize,
|
||||
pub protocol_version: MilterVersion,
|
||||
pub flags_actions: Option<u32>,
|
||||
pub flags_protocol: Option<u32>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy)]
|
||||
pub enum MilterVersion {
|
||||
V2,
|
||||
V6,
|
||||
}
|
||||
|
||||
impl Default for SessionConfig {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
timeout: IfBlock::new(Duration::from_secs(15 * 60)),
|
||||
duration: IfBlock::new(Duration::from_secs(5 * 60)),
|
||||
transfer_limit: IfBlock::new(250 * 1024 * 1024),
|
||||
throttle: SessionThrottle {
|
||||
connect: Default::default(),
|
||||
mail_from: Default::default(),
|
||||
rcpt_to: Default::default(),
|
||||
},
|
||||
connect: Connect {
|
||||
script: Default::default(),
|
||||
},
|
||||
ehlo: Ehlo {
|
||||
script: Default::default(),
|
||||
require: IfBlock::new(true),
|
||||
reject_non_fqdn: IfBlock::new(true),
|
||||
},
|
||||
auth: Auth {
|
||||
directory: Default::default(),
|
||||
mechanisms: Default::default(),
|
||||
require: IfBlock::new(false),
|
||||
allow_plain_text: IfBlock::new(false),
|
||||
must_match_sender: IfBlock::new(true),
|
||||
errors_max: IfBlock::new(3),
|
||||
errors_wait: IfBlock::new(Duration::from_secs(30)),
|
||||
},
|
||||
mail: Mail {
|
||||
script: Default::default(),
|
||||
rewrite: Default::default(),
|
||||
},
|
||||
rcpt: Rcpt {
|
||||
script: Default::default(),
|
||||
relay: IfBlock::new(false),
|
||||
directory: Default::default(),
|
||||
rewrite: Default::default(),
|
||||
errors_max: IfBlock::new(10),
|
||||
errors_wait: IfBlock::new(Duration::from_secs(30)),
|
||||
max_recipients: IfBlock::new(100),
|
||||
catch_all: AddressMapping::Disable,
|
||||
subaddressing: AddressMapping::Disable,
|
||||
},
|
||||
data: Data {
|
||||
script: Default::default(),
|
||||
pipe_commands: Default::default(),
|
||||
milters: Default::default(),
|
||||
max_messages: IfBlock::new(10),
|
||||
max_message_size: IfBlock::new(25 * 1024 * 1024),
|
||||
max_received_headers: IfBlock::new(50),
|
||||
add_received: IfBlock::new(true),
|
||||
add_received_spf: IfBlock::new(true),
|
||||
add_return_path: IfBlock::new(true),
|
||||
add_auth_results: IfBlock::new(true),
|
||||
add_message_id: IfBlock::new(true),
|
||||
add_date: IfBlock::new(true),
|
||||
},
|
||||
extensions: Extensions {
|
||||
pipelining: IfBlock::new(true),
|
||||
chunking: IfBlock::new(true),
|
||||
requiretls: IfBlock::new(true),
|
||||
dsn: IfBlock::new(false),
|
||||
vrfy: IfBlock::new(false),
|
||||
expn: IfBlock::new(false),
|
||||
no_soliciting: IfBlock::new(false),
|
||||
future_release: Default::default(),
|
||||
deliver_by: Default::default(),
|
||||
mt_priority: Default::default(),
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
15
crates/common/src/config/storage.rs
Normal file
15
crates/common/src/config/storage.rs
Normal file
|
@ -0,0 +1,15 @@
|
|||
use std::sync::Arc;
|
||||
|
||||
use ahash::AHashMap;
|
||||
use directory::Directory;
|
||||
use store::{BlobStore, FtsStore, LookupStore, Store};
|
||||
|
||||
pub struct Storage {
|
||||
pub data: Store,
|
||||
pub blob: BlobStore,
|
||||
pub fts: FtsStore,
|
||||
pub lookup: LookupStore,
|
||||
pub lookups: AHashMap<String, LookupStore>,
|
||||
pub directory: Arc<Directory>,
|
||||
pub directories: AHashMap<String, Arc<Directory>>,
|
||||
}
|
646
crates/common/src/expr/eval.rs
Normal file
646
crates/common/src/expr/eval.rs
Normal file
|
@ -0,0 +1,646 @@
|
|||
/*
|
||||
* Copyright (c) 2020-2023, Stalwart Labs Ltd.
|
||||
*
|
||||
* This file is part of Stalwart Mail Server.
|
||||
*
|
||||
* This program is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Affero General Public License as
|
||||
* published by the Free Software Foundation, either version 3 of
|
||||
* the License, or (at your option) any later version.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Affero General Public License for more details.
|
||||
* in the LICENSE file at the top-level directory of this distribution.
|
||||
* You should have received a copy of the GNU Affero General Public License
|
||||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*
|
||||
* You can be released from the requirements of the AGPLv3 license by
|
||||
* purchasing a commercial license. Please contact licensing@stalw.art
|
||||
* for more details.
|
||||
*/
|
||||
|
||||
use std::{borrow::Cow, cmp::Ordering, fmt::Display};
|
||||
|
||||
use crate::Core;
|
||||
|
||||
use super::{
|
||||
functions::{ResolveVariable, FUNCTIONS},
|
||||
if_block::IfBlock,
|
||||
BinaryOperator, Constant, Expression, ExpressionItem, UnaryOperator, Variable,
|
||||
};
|
||||
|
||||
impl Core {
|
||||
pub async fn eval_if<R: for<'x> TryFrom<Variable<'x>>, V: for<'x> ResolveVariable<'x>>(
|
||||
&self,
|
||||
if_block: &IfBlock,
|
||||
resolver: &V,
|
||||
) -> Option<R> {
|
||||
if if_block.is_empty() {
|
||||
return None;
|
||||
}
|
||||
|
||||
let result = if_block.eval(resolver, self, &if_block.key).await;
|
||||
|
||||
tracing::trace!(context = "eval_if",
|
||||
property = if_block.key,
|
||||
result = ?result,
|
||||
);
|
||||
|
||||
match result.try_into() {
|
||||
Ok(value) => Some(value),
|
||||
Err(_) => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn eval_expr<R: for<'x> TryFrom<Variable<'x>>, V: for<'x> ResolveVariable<'x>>(
|
||||
&self,
|
||||
expr: &Expression,
|
||||
resolver: &V,
|
||||
expr_id: &str,
|
||||
) -> Option<R> {
|
||||
if expr.is_empty() {
|
||||
return None;
|
||||
}
|
||||
|
||||
let result = expr.eval(resolver, self, expr_id, &mut Vec::new()).await;
|
||||
|
||||
tracing::trace!(context = "eval_expr",
|
||||
property = expr_id,
|
||||
result = ?result,
|
||||
);
|
||||
|
||||
match result.try_into() {
|
||||
Ok(value) => Some(value),
|
||||
Err(_) => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl IfBlock {
|
||||
pub async fn eval<'x, V>(&'x self, resolver: &V, core: &Core, property: &str) -> Variable<'x>
|
||||
where
|
||||
V: ResolveVariable<'x>,
|
||||
{
|
||||
let mut captures = Vec::new();
|
||||
|
||||
for if_then in &self.if_then {
|
||||
if if_then
|
||||
.expr
|
||||
.eval(resolver, core, property, &mut captures)
|
||||
.await
|
||||
.to_bool()
|
||||
{
|
||||
return if_then
|
||||
.then
|
||||
.eval(resolver, core, property, &mut captures)
|
||||
.await;
|
||||
}
|
||||
}
|
||||
|
||||
self.default
|
||||
.eval(resolver, core, property, &mut captures)
|
||||
.await
|
||||
}
|
||||
}
|
||||
|
||||
impl Expression {
|
||||
async fn eval<'x, 'y, V>(
|
||||
&'x self,
|
||||
resolver: &V,
|
||||
core: &Core,
|
||||
property: &str,
|
||||
captures: &'y mut Vec<String>,
|
||||
) -> Variable<'x>
|
||||
where
|
||||
V: ResolveVariable<'x>,
|
||||
{
|
||||
let mut stack = Vec::new();
|
||||
let mut exprs = self.items.iter();
|
||||
|
||||
while let Some(expr) = exprs.next() {
|
||||
match expr {
|
||||
ExpressionItem::Variable(v) => {
|
||||
stack.push(resolver.resolve_variable(*v));
|
||||
}
|
||||
ExpressionItem::Constant(val) => {
|
||||
stack.push(Variable::from(val));
|
||||
}
|
||||
ExpressionItem::Capture(v) => {
|
||||
stack.push(Variable::String(Cow::Owned(
|
||||
captures
|
||||
.get(*v as usize)
|
||||
.map(|v| v.as_str())
|
||||
.unwrap_or_default()
|
||||
.to_string(),
|
||||
)));
|
||||
}
|
||||
ExpressionItem::UnaryOperator(op) => {
|
||||
let value = stack.pop().unwrap_or_default();
|
||||
stack.push(match op {
|
||||
UnaryOperator::Not => value.op_not(),
|
||||
UnaryOperator::Minus => value.op_minus(),
|
||||
});
|
||||
}
|
||||
ExpressionItem::BinaryOperator(op) => {
|
||||
let right = stack.pop().unwrap_or_default();
|
||||
let left = stack.pop().unwrap_or_default();
|
||||
stack.push(match op {
|
||||
BinaryOperator::Add => left.op_add(right),
|
||||
BinaryOperator::Subtract => left.op_subtract(right),
|
||||
BinaryOperator::Multiply => left.op_multiply(right),
|
||||
BinaryOperator::Divide => left.op_divide(right),
|
||||
BinaryOperator::And => left.op_and(right),
|
||||
BinaryOperator::Or => left.op_or(right),
|
||||
BinaryOperator::Xor => left.op_xor(right),
|
||||
BinaryOperator::Eq => left.op_eq(right),
|
||||
BinaryOperator::Ne => left.op_ne(right),
|
||||
BinaryOperator::Lt => left.op_lt(right),
|
||||
BinaryOperator::Le => left.op_le(right),
|
||||
BinaryOperator::Gt => left.op_gt(right),
|
||||
BinaryOperator::Ge => left.op_ge(right),
|
||||
});
|
||||
}
|
||||
ExpressionItem::Function { id, num_args } => {
|
||||
let num_args = *num_args as usize;
|
||||
|
||||
let mut arguments = Variable::array(num_args);
|
||||
for arg_num in 0..num_args {
|
||||
arguments[num_args - arg_num - 1] = stack.pop().unwrap_or_default();
|
||||
}
|
||||
|
||||
let result = if let Some((_, fnc, _)) = FUNCTIONS.get(*id as usize) {
|
||||
(fnc)(arguments)
|
||||
} else {
|
||||
core.eval_fnc(*id - FUNCTIONS.len() as u32, arguments, property)
|
||||
.await
|
||||
};
|
||||
|
||||
stack.push(result);
|
||||
}
|
||||
ExpressionItem::JmpIf { val, pos } => {
|
||||
if stack.last().map_or(false, |v| v.to_bool()) == *val {
|
||||
for _ in 0..*pos {
|
||||
exprs.next();
|
||||
}
|
||||
}
|
||||
}
|
||||
ExpressionItem::ArrayAccess => {
|
||||
let index = stack
|
||||
.pop()
|
||||
.unwrap_or_default()
|
||||
.to_usize()
|
||||
.unwrap_or_default();
|
||||
let array = stack.pop().unwrap_or_default().into_array();
|
||||
stack.push(array.into_iter().nth(index).unwrap_or_default());
|
||||
}
|
||||
ExpressionItem::ArrayBuild(num_items) => {
|
||||
let num_items = *num_items as usize;
|
||||
let mut items = Variable::array(num_items);
|
||||
for arg_num in 0..num_items {
|
||||
items[num_items - arg_num - 1] = stack.pop().unwrap_or_default();
|
||||
}
|
||||
stack.push(Variable::Array(items));
|
||||
}
|
||||
ExpressionItem::Regex(regex) => {
|
||||
captures.clear();
|
||||
let value = stack.pop().unwrap_or_default().into_string();
|
||||
|
||||
if let Some(captures_) = regex.captures(value.as_ref()) {
|
||||
for capture in captures_.iter() {
|
||||
captures.push(capture.map_or("", |m| m.as_str()).to_string());
|
||||
}
|
||||
}
|
||||
|
||||
stack.push(Variable::Integer(!captures.is_empty() as i64));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
stack.pop().unwrap_or_default()
|
||||
}
|
||||
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.items.is_empty()
|
||||
}
|
||||
|
||||
pub fn items(&self) -> &[ExpressionItem] {
|
||||
&self.items
|
||||
}
|
||||
}
|
||||
|
||||
impl<'x> Variable<'x> {
|
||||
pub fn op_add(self, other: Variable<'x>) -> Variable<'x> {
|
||||
match (self, other) {
|
||||
(Variable::Integer(a), Variable::Integer(b)) => Variable::Integer(a.saturating_add(b)),
|
||||
(Variable::Float(a), Variable::Float(b)) => Variable::Float(a + b),
|
||||
(Variable::Integer(i), Variable::Float(f))
|
||||
| (Variable::Float(f), Variable::Integer(i)) => Variable::Float(i as f64 + f),
|
||||
(Variable::Array(a), Variable::Array(b)) => {
|
||||
Variable::Array(a.into_iter().chain(b).collect::<Vec<_>>())
|
||||
}
|
||||
(Variable::Array(a), b) => {
|
||||
Variable::Array(a.into_iter().chain([b]).collect::<Vec<_>>())
|
||||
}
|
||||
(a, Variable::Array(b)) => {
|
||||
Variable::Array([a].into_iter().chain(b).collect::<Vec<_>>())
|
||||
}
|
||||
(Variable::String(a), b) => {
|
||||
if !a.is_empty() {
|
||||
Variable::String(format!("{}{}", a, b).into())
|
||||
} else {
|
||||
b
|
||||
}
|
||||
}
|
||||
(a, Variable::String(b)) => {
|
||||
if !b.is_empty() {
|
||||
Variable::String(format!("{}{}", a, b).into())
|
||||
} else {
|
||||
a
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn op_subtract(self, other: Variable<'x>) -> Variable<'x> {
|
||||
match (self, other) {
|
||||
(Variable::Integer(a), Variable::Integer(b)) => Variable::Integer(a.saturating_sub(b)),
|
||||
(Variable::Float(a), Variable::Float(b)) => Variable::Float(a - b),
|
||||
(Variable::Integer(a), Variable::Float(b)) => Variable::Float(a as f64 - b),
|
||||
(Variable::Float(a), Variable::Integer(b)) => Variable::Float(a - b as f64),
|
||||
(Variable::Array(a), b) | (b, Variable::Array(a)) => {
|
||||
Variable::Array(a.into_iter().filter(|v| v != &b).collect::<Vec<_>>())
|
||||
}
|
||||
(a, b) => a.parse_number().op_subtract(b.parse_number()),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn op_multiply(self, other: Variable<'x>) -> Variable<'x> {
|
||||
match (self, other) {
|
||||
(Variable::Integer(a), Variable::Integer(b)) => Variable::Integer(a.saturating_mul(b)),
|
||||
(Variable::Float(a), Variable::Float(b)) => Variable::Float(a * b),
|
||||
(Variable::Integer(i), Variable::Float(f))
|
||||
| (Variable::Float(f), Variable::Integer(i)) => Variable::Float(i as f64 * f),
|
||||
(a, b) => a.parse_number().op_multiply(b.parse_number()),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn op_divide(self, other: Variable<'x>) -> Variable<'x> {
|
||||
match (self, other) {
|
||||
(Variable::Integer(a), Variable::Integer(b)) => {
|
||||
Variable::Float(if b != 0 { a as f64 / b as f64 } else { 0.0 })
|
||||
}
|
||||
(Variable::Float(a), Variable::Float(b)) => {
|
||||
Variable::Float(if b != 0.0 { a / b } else { 0.0 })
|
||||
}
|
||||
(Variable::Integer(a), Variable::Float(b)) => {
|
||||
Variable::Float(if b != 0.0 { a as f64 / b } else { 0.0 })
|
||||
}
|
||||
(Variable::Float(a), Variable::Integer(b)) => {
|
||||
Variable::Float(if b != 0 { a / b as f64 } else { 0.0 })
|
||||
}
|
||||
(a, b) => a.parse_number().op_divide(b.parse_number()),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn op_and(self, other: Variable) -> Variable {
|
||||
Variable::Integer(i64::from(self.to_bool() & other.to_bool()))
|
||||
}
|
||||
|
||||
pub fn op_or(self, other: Variable) -> Variable {
|
||||
Variable::Integer(i64::from(self.to_bool() | other.to_bool()))
|
||||
}
|
||||
|
||||
pub fn op_xor(self, other: Variable) -> Variable {
|
||||
Variable::Integer(i64::from(self.to_bool() ^ other.to_bool()))
|
||||
}
|
||||
|
||||
pub fn op_eq(self, other: Variable) -> Variable {
|
||||
Variable::Integer(i64::from(self == other))
|
||||
}
|
||||
|
||||
pub fn op_ne(self, other: Variable) -> Variable {
|
||||
Variable::Integer(i64::from(self != other))
|
||||
}
|
||||
|
||||
pub fn op_lt(self, other: Variable) -> Variable {
|
||||
Variable::Integer(i64::from(self < other))
|
||||
}
|
||||
|
||||
pub fn op_le(self, other: Variable) -> Variable {
|
||||
Variable::Integer(i64::from(self <= other))
|
||||
}
|
||||
|
||||
pub fn op_gt(self, other: Variable) -> Variable {
|
||||
Variable::Integer(i64::from(self > other))
|
||||
}
|
||||
|
||||
pub fn op_ge(self, other: Variable) -> Variable {
|
||||
Variable::Integer(i64::from(self >= other))
|
||||
}
|
||||
|
||||
pub fn op_not(self) -> Variable<'static> {
|
||||
Variable::Integer(i64::from(!self.to_bool()))
|
||||
}
|
||||
|
||||
pub fn op_minus(self) -> Variable<'static> {
|
||||
match self {
|
||||
Variable::Integer(n) => Variable::Integer(-n),
|
||||
Variable::Float(n) => Variable::Float(-n),
|
||||
_ => self.parse_number().op_minus(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn parse_number(&self) -> Variable<'static> {
|
||||
match self {
|
||||
Variable::String(s) if !s.is_empty() => {
|
||||
if let Ok(n) = s.parse::<i64>() {
|
||||
Variable::Integer(n)
|
||||
} else if let Ok(n) = s.parse::<f64>() {
|
||||
Variable::Float(n)
|
||||
} else {
|
||||
Variable::Integer(0)
|
||||
}
|
||||
}
|
||||
Variable::Integer(n) => Variable::Integer(*n),
|
||||
Variable::Float(n) => Variable::Float(*n),
|
||||
Variable::Array(l) => Variable::Integer(l.is_empty() as i64),
|
||||
_ => Variable::Integer(0),
|
||||
}
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
fn array(num_items: usize) -> Vec<Variable<'static>> {
|
||||
let mut items = Vec::with_capacity(num_items);
|
||||
for _ in 0..num_items {
|
||||
items.push(Variable::Integer(0));
|
||||
}
|
||||
items
|
||||
}
|
||||
|
||||
pub fn to_ref<'y: 'x>(&'y self) -> Variable<'x> {
|
||||
match self {
|
||||
Variable::String(s) => Variable::String(Cow::Borrowed(s.as_ref())),
|
||||
Variable::Integer(n) => Variable::Integer(*n),
|
||||
Variable::Float(n) => Variable::Float(*n),
|
||||
Variable::Array(l) => Variable::Array(l.iter().map(|v| v.to_ref()).collect::<Vec<_>>()),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn to_bool(&self) -> bool {
|
||||
match self {
|
||||
Variable::Float(f) => *f != 0.0,
|
||||
Variable::Integer(n) => *n != 0,
|
||||
Variable::String(s) => !s.is_empty(),
|
||||
Variable::Array(a) => !a.is_empty(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn to_string(&self) -> Cow<'_, str> {
|
||||
match self {
|
||||
Variable::String(s) => Cow::Borrowed(s.as_ref()),
|
||||
Variable::Integer(n) => Cow::Owned(n.to_string()),
|
||||
Variable::Float(n) => Cow::Owned(n.to_string()),
|
||||
Variable::Array(l) => {
|
||||
let mut result = String::with_capacity(self.len() * 10);
|
||||
for item in l {
|
||||
if !result.is_empty() {
|
||||
result.push_str("\r\n");
|
||||
}
|
||||
match item {
|
||||
Variable::String(v) => result.push_str(v),
|
||||
Variable::Integer(v) => result.push_str(&v.to_string()),
|
||||
Variable::Float(v) => result.push_str(&v.to_string()),
|
||||
Variable::Array(_) => {}
|
||||
}
|
||||
}
|
||||
Cow::Owned(result)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn into_string(self) -> Cow<'x, str> {
|
||||
match self {
|
||||
Variable::String(s) => s,
|
||||
Variable::Integer(n) => Cow::Owned(n.to_string()),
|
||||
Variable::Float(n) => Cow::Owned(n.to_string()),
|
||||
Variable::Array(l) => {
|
||||
let mut result = String::with_capacity(l.len() * 10);
|
||||
for item in l {
|
||||
if !result.is_empty() {
|
||||
result.push_str("\r\n");
|
||||
}
|
||||
match item {
|
||||
Variable::String(v) => result.push_str(v.as_ref()),
|
||||
Variable::Integer(v) => result.push_str(&v.to_string()),
|
||||
Variable::Float(v) => result.push_str(&v.to_string()),
|
||||
Variable::Array(_) => {}
|
||||
}
|
||||
}
|
||||
Cow::Owned(result)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn to_integer(&self) -> Option<i64> {
|
||||
match self {
|
||||
Variable::Integer(n) => Some(*n),
|
||||
Variable::Float(n) => Some(*n as i64),
|
||||
Variable::String(s) if !s.is_empty() => s.parse::<i64>().ok(),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn to_usize(&self) -> Option<usize> {
|
||||
match self {
|
||||
Variable::Integer(n) => Some(*n as usize),
|
||||
Variable::Float(n) => Some(*n as usize),
|
||||
Variable::String(s) if !s.is_empty() => s.parse::<usize>().ok(),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn len(&self) -> usize {
|
||||
match self {
|
||||
Variable::String(s) => s.len(),
|
||||
Variable::Integer(_) | Variable::Float(_) => 2,
|
||||
Variable::Array(l) => l.iter().map(|v| v.len() + 2).sum(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_empty(&self) -> bool {
|
||||
match self {
|
||||
Variable::String(s) => s.is_empty(),
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn as_array(&self) -> Option<&[Variable]> {
|
||||
match self {
|
||||
Variable::Array(l) => Some(l),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn into_array(self) -> Vec<Variable<'x>> {
|
||||
match self {
|
||||
Variable::Array(l) => l,
|
||||
v if !v.is_empty() => vec![v],
|
||||
_ => vec![],
|
||||
}
|
||||
}
|
||||
|
||||
pub fn to_array(&self) -> Vec<Variable<'_>> {
|
||||
match self {
|
||||
Variable::Array(l) => l.iter().map(|v| v.to_ref()).collect::<Vec<_>>(),
|
||||
v if !v.is_empty() => vec![v.to_ref()],
|
||||
_ => vec![],
|
||||
}
|
||||
}
|
||||
|
||||
pub fn into_owned(self) -> Variable<'static> {
|
||||
match self {
|
||||
Variable::String(s) => Variable::String(Cow::Owned(s.into_owned())),
|
||||
Variable::Integer(n) => Variable::Integer(n),
|
||||
Variable::Float(n) => Variable::Float(n),
|
||||
Variable::Array(l) => Variable::Array(l.into_iter().map(|v| v.into_owned()).collect()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for Variable<'_> {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
match (self, other) {
|
||||
(Self::Integer(a), Self::Integer(b)) => a == b,
|
||||
(Self::Float(a), Self::Float(b)) => a == b,
|
||||
(Self::Integer(a), Self::Float(b)) | (Self::Float(b), Self::Integer(a)) => {
|
||||
*a as f64 == *b
|
||||
}
|
||||
(Self::String(a), Self::String(b)) => a == b,
|
||||
(Self::String(_), Self::Integer(_) | Self::Float(_)) => &self.parse_number() == other,
|
||||
(Self::Integer(_) | Self::Float(_), Self::String(_)) => self == &other.parse_number(),
|
||||
(Self::Array(a), Self::Array(b)) => a == b,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Eq for Variable<'_> {}
|
||||
|
||||
#[allow(clippy::non_canonical_partial_ord_impl)]
|
||||
impl PartialOrd for Variable<'_> {
|
||||
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
|
||||
match (self, other) {
|
||||
(Self::Integer(a), Self::Integer(b)) => a.partial_cmp(b),
|
||||
(Self::Float(a), Self::Float(b)) => a.partial_cmp(b),
|
||||
(Self::Integer(a), Self::Float(b)) => (*a as f64).partial_cmp(b),
|
||||
(Self::Float(a), Self::Integer(b)) => a.partial_cmp(&(*b as f64)),
|
||||
(Self::String(a), Self::String(b)) => a.partial_cmp(b),
|
||||
(Self::String(_), Self::Integer(_) | Self::Float(_)) => {
|
||||
self.parse_number().partial_cmp(other)
|
||||
}
|
||||
(Self::Integer(_) | Self::Float(_), Self::String(_)) => {
|
||||
self.partial_cmp(&other.parse_number())
|
||||
}
|
||||
(Self::Array(a), Self::Array(b)) => a.partial_cmp(b),
|
||||
(Self::Array(_) | Self::String(_), _) => Ordering::Greater.into(),
|
||||
(_, Self::Array(_)) => Ordering::Less.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Ord for Variable<'_> {
|
||||
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
|
||||
self.partial_cmp(other).unwrap_or(Ordering::Greater)
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for Variable<'_> {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
Variable::String(v) => v.fmt(f),
|
||||
Variable::Integer(v) => v.fmt(f),
|
||||
Variable::Float(v) => v.fmt(f),
|
||||
Variable::Array(v) => {
|
||||
for (i, v) in v.iter().enumerate() {
|
||||
if i > 0 {
|
||||
f.write_str("\n")?;
|
||||
}
|
||||
v.fmt(f)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
trait IntoBool {
|
||||
fn into_bool(self) -> bool;
|
||||
}
|
||||
|
||||
impl IntoBool for f64 {
|
||||
#[inline(always)]
|
||||
fn into_bool(self) -> bool {
|
||||
self != 0.0
|
||||
}
|
||||
}
|
||||
|
||||
impl IntoBool for i64 {
|
||||
#[inline(always)]
|
||||
fn into_bool(self) -> bool {
|
||||
self != 0
|
||||
}
|
||||
}
|
||||
|
||||
impl<'x> From<&'x Constant> for Variable<'x> {
|
||||
fn from(value: &'x Constant) -> Self {
|
||||
match value {
|
||||
Constant::Integer(i) => Variable::Integer(*i),
|
||||
Constant::Float(f) => Variable::Float(*f),
|
||||
Constant::String(s) => Variable::String(s.as_str().into()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'x> TryFrom<Variable<'x>> for String {
|
||||
type Error = ();
|
||||
|
||||
fn try_from(value: Variable<'x>) -> Result<Self, Self::Error> {
|
||||
if let Variable::String(s) = value {
|
||||
Ok(s.into_owned())
|
||||
} else {
|
||||
Err(())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'x> From<Variable<'x>> for bool {
|
||||
fn from(val: Variable<'x>) -> Self {
|
||||
val.to_bool()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'x> TryFrom<Variable<'x>> for i64 {
|
||||
type Error = ();
|
||||
|
||||
fn try_from(value: Variable<'x>) -> Result<Self, Self::Error> {
|
||||
value.to_integer().ok_or(())
|
||||
}
|
||||
}
|
||||
|
||||
impl<'x> TryFrom<Variable<'x>> for u64 {
|
||||
type Error = ();
|
||||
|
||||
fn try_from(value: Variable<'x>) -> Result<Self, Self::Error> {
|
||||
value.to_integer().map(|v| v as u64).ok_or(())
|
||||
}
|
||||
}
|
||||
|
||||
impl<'x> TryFrom<Variable<'x>> for usize {
|
||||
type Error = ();
|
||||
|
||||
fn try_from(value: Variable<'x>) -> Result<Self, Self::Error> {
|
||||
value.to_usize().ok_or(())
|
||||
}
|
||||
}
|
82
crates/common/src/expr/functions/array.rs
Normal file
82
crates/common/src/expr/functions/array.rs
Normal file
|
@ -0,0 +1,82 @@
|
|||
/*
|
||||
* Copyright (c) 2023 Stalwart Labs Ltd.
|
||||
*
|
||||
* This file is part of Stalwart Mail Server.
|
||||
*
|
||||
* This program is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Affero General Public License as
|
||||
* published by the Free Software Foundation, either version 3 of
|
||||
* the License, or (at your option) any later version.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Affero General Public License for more details.
|
||||
* in the LICENSE file at the top-level directory of this distribution.
|
||||
* You should have received a copy of the GNU Affero General Public License
|
||||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*
|
||||
* You can be released from the requirements of the AGPLv3 license by
|
||||
* purchasing a commercial license. Please contact licensing@stalw.art
|
||||
* for more details.
|
||||
*/
|
||||
|
||||
use crate::expr::Variable;
|
||||
|
||||
pub(crate) fn fn_count(v: Vec<Variable>) -> Variable {
|
||||
match &v[0] {
|
||||
Variable::Array(a) => a.len(),
|
||||
v => {
|
||||
if !v.is_empty() {
|
||||
1
|
||||
} else {
|
||||
0
|
||||
}
|
||||
}
|
||||
}
|
||||
.into()
|
||||
}
|
||||
|
||||
pub(crate) fn fn_sort(mut v: Vec<Variable>) -> Variable {
|
||||
let is_asc = v[1].to_bool();
|
||||
let mut arr = v.remove(0).into_array();
|
||||
if is_asc {
|
||||
arr.sort_unstable_by(|a, b| b.cmp(a));
|
||||
} else {
|
||||
arr.sort_unstable();
|
||||
}
|
||||
arr.into()
|
||||
}
|
||||
|
||||
pub(crate) fn fn_dedup(mut v: Vec<Variable>) -> Variable {
|
||||
let arr = v.remove(0).into_array();
|
||||
let mut result = Vec::with_capacity(arr.len());
|
||||
|
||||
for item in arr {
|
||||
if !result.contains(&item) {
|
||||
result.push(item);
|
||||
}
|
||||
}
|
||||
|
||||
result.into()
|
||||
}
|
||||
|
||||
pub(crate) fn fn_is_intersect(v: Vec<Variable>) -> Variable {
|
||||
match (&v[0], &v[1]) {
|
||||
(Variable::Array(a), Variable::Array(b)) => a.iter().any(|x| b.contains(x)),
|
||||
(Variable::Array(a), item) | (item, Variable::Array(a)) => a.contains(item),
|
||||
_ => false,
|
||||
}
|
||||
.into()
|
||||
}
|
||||
|
||||
pub(crate) fn fn_winnow(mut v: Vec<Variable>) -> Variable {
|
||||
match v.remove(0) {
|
||||
Variable::Array(a) => a
|
||||
.into_iter()
|
||||
.filter(|i| !i.is_empty())
|
||||
.collect::<Vec<_>>()
|
||||
.into(),
|
||||
v => v,
|
||||
}
|
||||
}
|
394
crates/common/src/expr/functions/asynch.rs
Normal file
394
crates/common/src/expr/functions/asynch.rs
Normal file
|
@ -0,0 +1,394 @@
|
|||
use std::{cmp::Ordering, net::IpAddr, vec::IntoIter};
|
||||
|
||||
use mail_auth::IpLookupStrategy;
|
||||
use store::{Deserialize, Rows, Value};
|
||||
|
||||
use crate::Core;
|
||||
|
||||
use super::*;
|
||||
|
||||
impl Core {
|
||||
pub(crate) async fn eval_fnc<'x>(
|
||||
&self,
|
||||
fnc_id: u32,
|
||||
params: Vec<Variable<'x>>,
|
||||
property: &str,
|
||||
) -> Variable<'x> {
|
||||
let mut params = FncParams::new(params);
|
||||
|
||||
match fnc_id {
|
||||
F_IS_LOCAL_DOMAIN => {
|
||||
let directory = params.next_as_string();
|
||||
let domain = params.next_as_string();
|
||||
|
||||
self.get_directory_or_default(directory.as_ref())
|
||||
.is_local_domain(domain.as_ref())
|
||||
.await
|
||||
.unwrap_or_else(|err| {
|
||||
tracing::warn!(
|
||||
context = "eval_if",
|
||||
event = "error",
|
||||
property = property,
|
||||
error = ?err,
|
||||
"Failed to check if domain is local."
|
||||
);
|
||||
|
||||
false
|
||||
})
|
||||
.into()
|
||||
}
|
||||
F_IS_LOCAL_ADDRESS => {
|
||||
let directory = params.next_as_string();
|
||||
let address = params.next_as_string();
|
||||
|
||||
self.get_directory_or_default(directory.as_ref())
|
||||
.rcpt(address.as_ref())
|
||||
.await
|
||||
.unwrap_or_else(|err| {
|
||||
tracing::warn!(
|
||||
context = "eval_if",
|
||||
event = "error",
|
||||
property = property,
|
||||
error = ?err,
|
||||
"Failed to check if address is local."
|
||||
);
|
||||
|
||||
false
|
||||
})
|
||||
.into()
|
||||
}
|
||||
F_KEY_GET => {
|
||||
let store = params.next_as_string();
|
||||
let key = params.next_as_string();
|
||||
|
||||
self.get_lookup_store(store.as_ref())
|
||||
.key_get::<VariableWrapper>(key.into_owned().into_bytes())
|
||||
.await
|
||||
.map(|value| value.map(|v| v.into_inner()).unwrap_or_default())
|
||||
.unwrap_or_else(|err| {
|
||||
tracing::warn!(
|
||||
context = "eval_if",
|
||||
event = "error",
|
||||
property = property,
|
||||
error = ?err,
|
||||
"Failed to get key."
|
||||
);
|
||||
|
||||
Variable::default()
|
||||
})
|
||||
}
|
||||
F_KEY_EXISTS => {
|
||||
let store = params.next_as_string();
|
||||
let key = params.next_as_string();
|
||||
|
||||
self.get_lookup_store(store.as_ref())
|
||||
.key_exists(key.into_owned().into_bytes())
|
||||
.await
|
||||
.unwrap_or_else(|err| {
|
||||
tracing::warn!(
|
||||
context = "eval_if",
|
||||
event = "error",
|
||||
property = property,
|
||||
error = ?err,
|
||||
"Failed to get key."
|
||||
);
|
||||
|
||||
false
|
||||
})
|
||||
.into()
|
||||
}
|
||||
F_KEY_SET => {
|
||||
let store = params.next_as_string();
|
||||
let key = params.next_as_string();
|
||||
let value = params.next_as_string();
|
||||
|
||||
self.get_lookup_store(store.as_ref())
|
||||
.key_set(
|
||||
key.into_owned().into_bytes(),
|
||||
value.into_owned().into_bytes(),
|
||||
None,
|
||||
)
|
||||
.await
|
||||
.map(|_| true)
|
||||
.unwrap_or_else(|err| {
|
||||
tracing::warn!(
|
||||
context = "eval_if",
|
||||
event = "error",
|
||||
property = property,
|
||||
error = ?err,
|
||||
"Failed to set key."
|
||||
);
|
||||
|
||||
false
|
||||
})
|
||||
.into()
|
||||
}
|
||||
F_COUNTER_INCR => {
|
||||
let store = params.next_as_string();
|
||||
let key = params.next_as_string();
|
||||
let value = params.next_as_integer();
|
||||
|
||||
self.get_lookup_store(store.as_ref())
|
||||
.counter_incr(key.into_owned().into_bytes(), value, None, true)
|
||||
.await
|
||||
.map(Variable::Integer)
|
||||
.unwrap_or_else(|err| {
|
||||
tracing::warn!(
|
||||
context = "eval_if",
|
||||
event = "error",
|
||||
property = property,
|
||||
error = ?err,
|
||||
"Failed to increment counter."
|
||||
);
|
||||
|
||||
Variable::default()
|
||||
})
|
||||
}
|
||||
F_COUNTER_GET => {
|
||||
let store = params.next_as_string();
|
||||
let key = params.next_as_string();
|
||||
|
||||
self.get_lookup_store(store.as_ref())
|
||||
.counter_get(key.into_owned().into_bytes())
|
||||
.await
|
||||
.map(Variable::Integer)
|
||||
.unwrap_or_else(|err| {
|
||||
tracing::warn!(
|
||||
context = "eval_if",
|
||||
event = "error",
|
||||
property = property,
|
||||
error = ?err,
|
||||
"Failed to increment counter."
|
||||
);
|
||||
|
||||
Variable::default()
|
||||
})
|
||||
}
|
||||
F_DNS_QUERY => self.dns_query(params).await,
|
||||
F_SQL_QUERY => self.sql_query(params).await,
|
||||
_ => Variable::default(),
|
||||
}
|
||||
}
|
||||
|
||||
async fn sql_query<'x>(&self, mut arguments: FncParams<'x>) -> Variable<'x> {
|
||||
let store = self.get_lookup_store(arguments.next_as_string().as_ref());
|
||||
let query = arguments.next_as_string();
|
||||
|
||||
if query.is_empty() {
|
||||
tracing::warn!(
|
||||
context = "eval:sql_query",
|
||||
event = "invalid",
|
||||
reason = "Empty query string",
|
||||
);
|
||||
return Variable::default();
|
||||
}
|
||||
|
||||
// Obtain arguments
|
||||
let arguments = match arguments.next() {
|
||||
Variable::Array(l) => l.into_iter().map(to_store_value).collect(),
|
||||
v => vec![to_store_value(v)],
|
||||
};
|
||||
|
||||
// Run query
|
||||
if query
|
||||
.as_bytes()
|
||||
.get(..6)
|
||||
.map_or(false, |q| q.eq_ignore_ascii_case(b"SELECT"))
|
||||
{
|
||||
if let Ok(mut rows) = store.query::<Rows>(&query, arguments).await {
|
||||
match rows.rows.len().cmp(&1) {
|
||||
Ordering::Equal => {
|
||||
let mut row = rows.rows.pop().unwrap().values;
|
||||
match row.len().cmp(&1) {
|
||||
Ordering::Equal if !matches!(row.first(), Some(Value::Null)) => {
|
||||
row.pop().map(into_variable).unwrap()
|
||||
}
|
||||
Ordering::Less => Variable::default(),
|
||||
_ => Variable::Array(
|
||||
row.into_iter().map(into_variable).collect::<Vec<_>>(),
|
||||
),
|
||||
}
|
||||
}
|
||||
Ordering::Less => Variable::default(),
|
||||
Ordering::Greater => rows
|
||||
.rows
|
||||
.into_iter()
|
||||
.map(|r| {
|
||||
Variable::Array(
|
||||
r.values.into_iter().map(into_variable).collect::<Vec<_>>(),
|
||||
)
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
.into(),
|
||||
}
|
||||
} else {
|
||||
false.into()
|
||||
}
|
||||
} else {
|
||||
store.query::<usize>(&query, arguments).await.is_ok().into()
|
||||
}
|
||||
}
|
||||
|
||||
async fn dns_query<'x>(&self, mut arguments: FncParams<'x>) -> Variable<'x> {
|
||||
let entry = arguments.next_as_string();
|
||||
let record_type = arguments.next_as_string();
|
||||
|
||||
if record_type.eq_ignore_ascii_case("ip") {
|
||||
match self
|
||||
.smtp
|
||||
.resolvers
|
||||
.dns
|
||||
.ip_lookup(entry.as_ref(), IpLookupStrategy::Ipv4thenIpv6, 10)
|
||||
.await
|
||||
{
|
||||
Ok(result) => result
|
||||
.iter()
|
||||
.map(|ip| Variable::from(ip.to_string()))
|
||||
.collect::<Vec<_>>()
|
||||
.into(),
|
||||
Err(_) => Variable::default(),
|
||||
}
|
||||
} else if record_type.eq_ignore_ascii_case("mx") {
|
||||
match self.smtp.resolvers.dns.mx_lookup(entry.as_ref()).await {
|
||||
Ok(result) => result
|
||||
.iter()
|
||||
.flat_map(|mx| {
|
||||
mx.exchanges.iter().map(|host| {
|
||||
Variable::String(
|
||||
host.strip_suffix('.')
|
||||
.unwrap_or(host.as_str())
|
||||
.to_string()
|
||||
.into(),
|
||||
)
|
||||
})
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
.into(),
|
||||
Err(_) => Variable::default(),
|
||||
}
|
||||
} else if record_type.eq_ignore_ascii_case("txt") {
|
||||
match self.smtp.resolvers.dns.txt_raw_lookup(entry.as_ref()).await {
|
||||
Ok(result) => Variable::from(String::from_utf8(result).unwrap_or_default()),
|
||||
Err(_) => Variable::default(),
|
||||
}
|
||||
} else if record_type.eq_ignore_ascii_case("ptr") {
|
||||
if let Ok(addr) = entry.parse::<IpAddr>() {
|
||||
match self.smtp.resolvers.dns.ptr_lookup(addr).await {
|
||||
Ok(result) => result
|
||||
.iter()
|
||||
.map(|host| Variable::from(host.to_string()))
|
||||
.collect::<Vec<_>>()
|
||||
.into(),
|
||||
Err(_) => Variable::default(),
|
||||
}
|
||||
} else {
|
||||
Variable::default()
|
||||
}
|
||||
} else if record_type.eq_ignore_ascii_case("ipv4") {
|
||||
match self.smtp.resolvers.dns.ipv4_lookup(entry.as_ref()).await {
|
||||
Ok(result) => result
|
||||
.iter()
|
||||
.map(|ip| Variable::from(ip.to_string()))
|
||||
.collect::<Vec<_>>()
|
||||
.into(),
|
||||
Err(_) => Variable::default(),
|
||||
}
|
||||
} else if record_type.eq_ignore_ascii_case("ipv6") {
|
||||
match self.smtp.resolvers.dns.ipv6_lookup(entry.as_ref()).await {
|
||||
Ok(result) => result
|
||||
.iter()
|
||||
.map(|ip| Variable::from(ip.to_string()))
|
||||
.collect::<Vec<_>>()
|
||||
.into(),
|
||||
Err(_) => Variable::default(),
|
||||
}
|
||||
} else {
|
||||
Variable::default()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct FncParams<'x> {
|
||||
params: IntoIter<Variable<'x>>,
|
||||
}
|
||||
|
||||
impl<'x> FncParams<'x> {
|
||||
pub fn new(params: Vec<Variable<'x>>) -> Self {
|
||||
Self {
|
||||
params: params.into_iter(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn next_as_string(&mut self) -> Cow<'x, str> {
|
||||
self.params.next().unwrap().into_string()
|
||||
}
|
||||
|
||||
pub fn next_as_integer(&mut self) -> i64 {
|
||||
self.params.next().unwrap().to_integer().unwrap_or_default()
|
||||
}
|
||||
|
||||
pub fn next(&mut self) -> Variable<'x> {
|
||||
self.params.next().unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct VariableWrapper(Variable<'static>);
|
||||
|
||||
impl From<i64> for VariableWrapper {
|
||||
fn from(value: i64) -> Self {
|
||||
VariableWrapper(Variable::Integer(value))
|
||||
}
|
||||
}
|
||||
|
||||
impl Deserialize for VariableWrapper {
|
||||
fn deserialize(bytes: &[u8]) -> store::Result<Self> {
|
||||
String::deserialize(bytes).map(|v| VariableWrapper(Variable::String(v.into())))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<store::Value<'static>> for VariableWrapper {
|
||||
fn from(value: store::Value<'static>) -> Self {
|
||||
VariableWrapper(match value {
|
||||
Value::Integer(v) => Variable::Integer(v),
|
||||
Value::Bool(v) => Variable::Integer(v as i64),
|
||||
Value::Float(v) => Variable::Float(v),
|
||||
Value::Text(v) => Variable::String(v),
|
||||
Value::Blob(v) => Variable::String(match v {
|
||||
std::borrow::Cow::Borrowed(v) => String::from_utf8_lossy(v),
|
||||
std::borrow::Cow::Owned(v) => String::from_utf8_lossy(&v).into_owned().into(),
|
||||
}),
|
||||
Value::Null => Variable::String("".into()),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl VariableWrapper {
|
||||
pub fn into_inner(self) -> Variable<'static> {
|
||||
self.0
|
||||
}
|
||||
}
|
||||
|
||||
fn to_store_value(value: Variable) -> Value {
|
||||
match value {
|
||||
Variable::String(v) => Value::Text(v),
|
||||
Variable::Integer(v) => Value::Integer(v),
|
||||
Variable::Float(v) => Value::Float(v),
|
||||
v => Value::Text(v.to_string().into_owned().into()),
|
||||
}
|
||||
}
|
||||
|
||||
fn into_variable(value: Value) -> Variable {
|
||||
match value {
|
||||
Value::Integer(v) => Variable::Integer(v),
|
||||
Value::Bool(v) => Variable::Integer(i64::from(v)),
|
||||
Value::Float(v) => Variable::Float(v),
|
||||
Value::Text(v) => Variable::String(v),
|
||||
Value::Blob(v) => Variable::String(
|
||||
String::from_utf8(v.into_owned())
|
||||
.unwrap_or_else(|err| String::from_utf8_lossy(err.as_bytes()).into_owned())
|
||||
.into(),
|
||||
),
|
||||
Value::Null => Variable::default(),
|
||||
}
|
||||
}
|
121
crates/common/src/expr/functions/email.rs
Normal file
121
crates/common/src/expr/functions/email.rs
Normal file
|
@ -0,0 +1,121 @@
|
|||
/*
|
||||
* Copyright (c) 2023 Stalwart Labs Ltd.
|
||||
*
|
||||
* This file is part of Stalwart Mail Server.
|
||||
*
|
||||
* This program is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Affero General Public License as
|
||||
* published by the Free Software Foundation, either version 3 of
|
||||
* the License, or (at your option) any later version.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Affero General Public License for more details.
|
||||
* in the LICENSE file at the top-level directory of this distribution.
|
||||
* You should have received a copy of the GNU Affero General Public License
|
||||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*
|
||||
* You can be released from the requirements of the AGPLv3 license by
|
||||
* purchasing a commercial license. Please contact licensing@stalw.art
|
||||
* for more details.
|
||||
*/
|
||||
|
||||
use std::borrow::Cow;
|
||||
|
||||
use crate::expr::Variable;
|
||||
|
||||
pub(crate) fn fn_is_email(v: Vec<Variable>) -> Variable {
|
||||
let mut last_ch = 0;
|
||||
let mut in_quote = false;
|
||||
let mut at_count = 0;
|
||||
let mut dot_count = 0;
|
||||
let mut lp_len = 0;
|
||||
let mut value = 0;
|
||||
|
||||
for ch in v[0].to_string().bytes() {
|
||||
match ch {
|
||||
b'0'..=b'9'
|
||||
| b'a'..=b'z'
|
||||
| b'A'..=b'Z'
|
||||
| b'!'
|
||||
| b'#'
|
||||
| b'$'
|
||||
| b'%'
|
||||
| b'&'
|
||||
| b'\''
|
||||
| b'*'
|
||||
| b'+'
|
||||
| b'-'
|
||||
| b'/'
|
||||
| b'='
|
||||
| b'?'
|
||||
| b'^'
|
||||
| b'_'
|
||||
| b'`'
|
||||
| b'{'
|
||||
| b'|'
|
||||
| b'}'
|
||||
| b'~'
|
||||
| 0x7f..=u8::MAX => {
|
||||
value += 1;
|
||||
}
|
||||
b'.' if !in_quote => {
|
||||
if last_ch != b'.' && last_ch != b'@' && value != 0 {
|
||||
value += 1;
|
||||
if at_count == 1 {
|
||||
dot_count += 1;
|
||||
}
|
||||
} else {
|
||||
return false.into();
|
||||
}
|
||||
}
|
||||
b'@' if !in_quote => {
|
||||
at_count += 1;
|
||||
lp_len = value;
|
||||
value = 0;
|
||||
}
|
||||
b'>' | b':' | b',' | b' ' if in_quote => {
|
||||
value += 1;
|
||||
}
|
||||
b'\"' if !in_quote || last_ch != b'\\' => {
|
||||
in_quote = !in_quote;
|
||||
}
|
||||
b'\\' if in_quote && last_ch != b'\\' => (),
|
||||
_ => {
|
||||
if !in_quote {
|
||||
return false.into();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
last_ch = ch;
|
||||
}
|
||||
|
||||
(at_count == 1 && dot_count > 0 && lp_len > 0 && value > 0).into()
|
||||
}
|
||||
|
||||
pub(crate) fn fn_email_part(v: Vec<Variable>) -> Variable {
|
||||
let mut v = v.into_iter();
|
||||
let value = v.next().unwrap();
|
||||
let part = v.next().unwrap().into_string();
|
||||
|
||||
value.transform(|s| match s {
|
||||
Cow::Borrowed(s) => s
|
||||
.rsplit_once('@')
|
||||
.map(|(u, d)| match part.as_ref() {
|
||||
"local" => Variable::from(u.trim()),
|
||||
"domain" => Variable::from(d.trim()),
|
||||
_ => Variable::default(),
|
||||
})
|
||||
.unwrap_or_default(),
|
||||
Cow::Owned(s) => s
|
||||
.rsplit_once('@')
|
||||
.map(|(u, d)| match part.as_ref() {
|
||||
"local" => Variable::from(u.trim().to_string()),
|
||||
"domain" => Variable::from(d.trim().to_string()),
|
||||
_ => Variable::default(),
|
||||
})
|
||||
.unwrap_or_default(),
|
||||
})
|
||||
}
|
67
crates/common/src/expr/functions/misc.rs
Normal file
67
crates/common/src/expr/functions/misc.rs
Normal file
|
@ -0,0 +1,67 @@
|
|||
/*
|
||||
* Copyright (c) 2023 Stalwart Labs Ltd.
|
||||
*
|
||||
* This file is part of Stalwart Mail Server.
|
||||
*
|
||||
* This program is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Affero General Public License as
|
||||
* published by the Free Software Foundation, either version 3 of
|
||||
* the License, or (at your option) any later version.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Affero General Public License for more details.
|
||||
* in the LICENSE file at the top-level directory of this distribution.
|
||||
* You should have received a copy of the GNU Affero General Public License
|
||||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*
|
||||
* You can be released from the requirements of the AGPLv3 license by
|
||||
* purchasing a commercial license. Please contact licensing@stalw.art
|
||||
* for more details.
|
||||
*/
|
||||
|
||||
use std::net::IpAddr;
|
||||
|
||||
use mail_auth::common::resolver::ToReverseName;
|
||||
|
||||
use crate::expr::Variable;
|
||||
|
||||
pub(crate) fn fn_is_empty(v: Vec<Variable>) -> Variable {
|
||||
match &v[0] {
|
||||
Variable::String(s) => s.is_empty(),
|
||||
Variable::Integer(_) | Variable::Float(_) => false,
|
||||
Variable::Array(a) => a.is_empty(),
|
||||
}
|
||||
.into()
|
||||
}
|
||||
|
||||
pub(crate) fn fn_is_number(v: Vec<Variable>) -> Variable {
|
||||
matches!(&v[0], Variable::Integer(_) | Variable::Float(_)).into()
|
||||
}
|
||||
|
||||
pub(crate) fn fn_is_ip_addr(v: Vec<Variable>) -> Variable {
|
||||
v[0].to_string().parse::<std::net::IpAddr>().is_ok().into()
|
||||
}
|
||||
|
||||
pub(crate) fn fn_is_ipv4_addr(v: Vec<Variable>) -> Variable {
|
||||
v[0].to_string()
|
||||
.parse::<std::net::IpAddr>()
|
||||
.map_or(false, |ip| matches!(ip, IpAddr::V4(_)))
|
||||
.into()
|
||||
}
|
||||
|
||||
pub(crate) fn fn_is_ipv6_addr(v: Vec<Variable>) -> Variable {
|
||||
v[0].to_string()
|
||||
.parse::<std::net::IpAddr>()
|
||||
.map_or(false, |ip| matches!(ip, IpAddr::V6(_)))
|
||||
.into()
|
||||
}
|
||||
|
||||
pub(crate) fn fn_ip_reverse_name(v: Vec<Variable>) -> Variable {
|
||||
v[0].to_string()
|
||||
.parse::<std::net::IpAddr>()
|
||||
.map(|ip| ip.to_reverse_name())
|
||||
.unwrap_or_default()
|
||||
.into()
|
||||
}
|
119
crates/common/src/expr/functions/mod.rs
Normal file
119
crates/common/src/expr/functions/mod.rs
Normal file
|
@ -0,0 +1,119 @@
|
|||
/*
|
||||
* Copyright (c) 2023 Stalwart Labs Ltd.
|
||||
*
|
||||
* This file is part of Stalwart Mail Server.
|
||||
*
|
||||
* This program is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Affero General Public License as
|
||||
* published by the Free Software Foundation, either version 3 of
|
||||
* the License, or (at your option) any later version.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Affero General Public License for more details.
|
||||
* in the LICENSE file at the top-level directory of this distribution.
|
||||
* You should have received a copy of the GNU Affero General Public License
|
||||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*
|
||||
* You can be released from the requirements of the AGPLv3 license by
|
||||
* purchasing a commercial license. Please contact licensing@stalw.art
|
||||
* for more details.
|
||||
*/
|
||||
|
||||
use std::borrow::Cow;
|
||||
|
||||
use super::Variable;
|
||||
|
||||
pub mod array;
|
||||
pub mod asynch;
|
||||
pub mod email;
|
||||
pub mod misc;
|
||||
pub mod text;
|
||||
|
||||
pub trait ResolveVariable<'x> {
|
||||
fn resolve_variable(&self, variable: u32) -> Variable<'x>;
|
||||
}
|
||||
|
||||
impl<'x> Variable<'x> {
|
||||
fn transform(self, f: impl Fn(Cow<'x, str>) -> Variable<'x>) -> Variable<'x> {
|
||||
match self {
|
||||
Variable::String(s) => f(s),
|
||||
Variable::Array(list) => Variable::Array(
|
||||
list.into_iter()
|
||||
.map(|v| match v {
|
||||
Variable::String(s) => f(s),
|
||||
v => f(v.into_string()),
|
||||
})
|
||||
.collect::<Vec<_>>(),
|
||||
),
|
||||
v => f(v.into_string()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::type_complexity)]
|
||||
pub(crate) const FUNCTIONS: &[(&str, fn(Vec<Variable>) -> Variable, u32)] = &[
|
||||
("count", array::fn_count, 1),
|
||||
("sort", array::fn_sort, 2),
|
||||
("dedup", array::fn_dedup, 1),
|
||||
("winnow", array::fn_winnow, 1),
|
||||
("is_intersect", array::fn_is_intersect, 2),
|
||||
("is_email", email::fn_is_email, 1),
|
||||
("email_part", email::fn_email_part, 2),
|
||||
("is_empty", misc::fn_is_empty, 1),
|
||||
("is_number", misc::fn_is_number, 1),
|
||||
("is_ip_addr", misc::fn_is_ip_addr, 1),
|
||||
("is_ipv4_addr", misc::fn_is_ipv4_addr, 1),
|
||||
("is_ipv6_addr", misc::fn_is_ipv6_addr, 1),
|
||||
("ip_reverse_name", misc::fn_ip_reverse_name, 1),
|
||||
("trim", text::fn_trim, 1),
|
||||
("trim_end", text::fn_trim_end, 1),
|
||||
("trim_start", text::fn_trim_start, 1),
|
||||
("len", text::fn_len, 1),
|
||||
("to_lowercase", text::fn_to_lowercase, 1),
|
||||
("to_uppercase", text::fn_to_uppercase, 1),
|
||||
("is_uppercase", text::fn_is_uppercase, 1),
|
||||
("is_lowercase", text::fn_is_lowercase, 1),
|
||||
("has_digits", text::fn_has_digits, 1),
|
||||
("count_spaces", text::fn_count_spaces, 1),
|
||||
("count_uppercase", text::fn_count_uppercase, 1),
|
||||
("count_lowercase", text::fn_count_lowercase, 1),
|
||||
("count_chars", text::fn_count_chars, 1),
|
||||
("contains", text::fn_contains, 2),
|
||||
("contains_ignore_case", text::fn_contains_ignore_case, 2),
|
||||
("eq_ignore_case", text::fn_eq_ignore_case, 2),
|
||||
("starts_with", text::fn_starts_with, 2),
|
||||
("ends_with", text::fn_ends_with, 2),
|
||||
("lines", text::fn_lines, 1),
|
||||
("substring", text::fn_substring, 3),
|
||||
("strip_prefix", text::fn_strip_prefix, 2),
|
||||
("strip_suffix", text::fn_strip_suffix, 2),
|
||||
("split", text::fn_split, 2),
|
||||
("rsplit", text::fn_rsplit, 2),
|
||||
("split_once", text::fn_split_once, 2),
|
||||
("rsplit_once", text::fn_rsplit_once, 2),
|
||||
("split_words", text::fn_split_words, 1),
|
||||
];
|
||||
|
||||
pub const F_IS_LOCAL_DOMAIN: u32 = 0;
|
||||
pub const F_IS_LOCAL_ADDRESS: u32 = 1;
|
||||
pub const F_KEY_GET: u32 = 2;
|
||||
pub const F_KEY_EXISTS: u32 = 3;
|
||||
pub const F_KEY_SET: u32 = 4;
|
||||
pub const F_COUNTER_INCR: u32 = 5;
|
||||
pub const F_COUNTER_GET: u32 = 6;
|
||||
pub const F_SQL_QUERY: u32 = 7;
|
||||
pub const F_DNS_QUERY: u32 = 8;
|
||||
|
||||
pub const ASYNC_FUNCTIONS: &[(&str, u32, u32)] = &[
|
||||
("is_local_domain", F_IS_LOCAL_DOMAIN, 2),
|
||||
("is_local_address", F_IS_LOCAL_ADDRESS, 2),
|
||||
("key_get", F_KEY_GET, 2),
|
||||
("key_exists", F_KEY_EXISTS, 2),
|
||||
("key_set", F_KEY_SET, 3),
|
||||
("counter_incr", F_COUNTER_INCR, 3),
|
||||
("counter_get", F_COUNTER_GET, 2),
|
||||
("dns_query", F_DNS_QUERY, 2),
|
||||
("sql_query", F_SQL_QUERY, 3),
|
||||
];
|
301
crates/common/src/expr/functions/text.rs
Normal file
301
crates/common/src/expr/functions/text.rs
Normal file
|
@ -0,0 +1,301 @@
|
|||
/*
|
||||
* Copyright (c) 2023 Stalwart Labs Ltd.
|
||||
*
|
||||
* This file is part of Stalwart Mail Server.
|
||||
*
|
||||
* This program is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Affero General Public License as
|
||||
* published by the Free Software Foundation, either version 3 of
|
||||
* the License, or (at your option) any later version.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Affero General Public License for more details.
|
||||
* in the LICENSE file at the top-level directory of this distribution.
|
||||
* You should have received a copy of the GNU Affero General Public License
|
||||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*
|
||||
* You can be released from the requirements of the AGPLv3 license by
|
||||
* purchasing a commercial license. Please contact licensing@stalw.art
|
||||
* for more details.
|
||||
*/
|
||||
|
||||
use std::borrow::Cow;
|
||||
|
||||
use crate::expr::Variable;
|
||||
|
||||
pub(crate) fn fn_trim(mut v: Vec<Variable>) -> Variable {
|
||||
v.remove(0).transform(|s| match s {
|
||||
Cow::Borrowed(s) => Variable::from(s.trim()),
|
||||
Cow::Owned(s) => Variable::from(s.trim().to_string()),
|
||||
})
|
||||
}
|
||||
|
||||
pub(crate) fn fn_trim_end(mut v: Vec<Variable>) -> Variable {
|
||||
v.remove(0).transform(|s| match s {
|
||||
Cow::Borrowed(s) => Variable::from(s.trim_end()),
|
||||
Cow::Owned(s) => Variable::from(s.trim_end().to_string()),
|
||||
})
|
||||
}
|
||||
|
||||
pub(crate) fn fn_trim_start(mut v: Vec<Variable>) -> Variable {
|
||||
v.remove(0).transform(|s| match s {
|
||||
Cow::Borrowed(s) => Variable::from(s.trim_start()),
|
||||
Cow::Owned(s) => Variable::from(s.trim_start().to_string()),
|
||||
})
|
||||
}
|
||||
|
||||
pub(crate) fn fn_len(v: Vec<Variable>) -> Variable {
|
||||
match &v[0] {
|
||||
Variable::String(s) => s.len(),
|
||||
Variable::Array(a) => a.len(),
|
||||
v => v.to_string().len(),
|
||||
}
|
||||
.into()
|
||||
}
|
||||
|
||||
pub(crate) fn fn_to_lowercase(mut v: Vec<Variable>) -> Variable {
|
||||
v.remove(0).transform(|s| Variable::from(s.to_lowercase()))
|
||||
}
|
||||
|
||||
pub(crate) fn fn_to_uppercase(mut v: Vec<Variable>) -> Variable {
|
||||
v.remove(0).transform(|s| Variable::from(s.to_uppercase()))
|
||||
}
|
||||
|
||||
pub(crate) fn fn_is_uppercase(mut v: Vec<Variable>) -> Variable {
|
||||
v.remove(0).transform(|s| {
|
||||
s.chars()
|
||||
.filter(|c| c.is_alphabetic())
|
||||
.all(|c| c.is_uppercase())
|
||||
.into()
|
||||
})
|
||||
}
|
||||
|
||||
pub(crate) fn fn_is_lowercase(mut v: Vec<Variable>) -> Variable {
|
||||
v.remove(0).transform(|s| {
|
||||
s.chars()
|
||||
.filter(|c| c.is_alphabetic())
|
||||
.all(|c| c.is_lowercase())
|
||||
.into()
|
||||
})
|
||||
}
|
||||
|
||||
pub(crate) fn fn_has_digits(mut v: Vec<Variable>) -> Variable {
|
||||
v.remove(0)
|
||||
.transform(|s| s.chars().any(|c| c.is_ascii_digit()).into())
|
||||
}
|
||||
|
||||
pub(crate) fn fn_split_words(v: Vec<Variable>) -> Variable {
|
||||
v[0].to_string()
|
||||
.split_whitespace()
|
||||
.filter(|word| word.chars().all(|c| c.is_alphanumeric()))
|
||||
.map(|word| Variable::from(word.to_string()))
|
||||
.collect::<Vec<_>>()
|
||||
.into()
|
||||
}
|
||||
|
||||
pub(crate) fn fn_count_spaces(v: Vec<Variable>) -> Variable {
|
||||
v[0].to_string()
|
||||
.as_ref()
|
||||
.chars()
|
||||
.filter(|c| c.is_whitespace())
|
||||
.count()
|
||||
.into()
|
||||
}
|
||||
|
||||
pub(crate) fn fn_count_uppercase(v: Vec<Variable>) -> Variable {
|
||||
v[0].to_string()
|
||||
.as_ref()
|
||||
.chars()
|
||||
.filter(|c| c.is_alphabetic() && c.is_uppercase())
|
||||
.count()
|
||||
.into()
|
||||
}
|
||||
|
||||
pub(crate) fn fn_count_lowercase(v: Vec<Variable>) -> Variable {
|
||||
v[0].to_string()
|
||||
.as_ref()
|
||||
.chars()
|
||||
.filter(|c| c.is_alphabetic() && c.is_lowercase())
|
||||
.count()
|
||||
.into()
|
||||
}
|
||||
|
||||
pub(crate) fn fn_count_chars(v: Vec<Variable>) -> Variable {
|
||||
v[0].to_string().as_ref().chars().count().into()
|
||||
}
|
||||
|
||||
pub(crate) fn fn_eq_ignore_case(v: Vec<Variable>) -> Variable {
|
||||
v[0].to_string()
|
||||
.eq_ignore_ascii_case(v[1].to_string().as_ref())
|
||||
.into()
|
||||
}
|
||||
|
||||
pub(crate) fn fn_contains(v: Vec<Variable>) -> Variable {
|
||||
match &v[0] {
|
||||
Variable::String(s) => s.contains(v[1].to_string().as_ref()),
|
||||
Variable::Array(arr) => arr.contains(&v[1]),
|
||||
val => val.to_string().contains(v[1].to_string().as_ref()),
|
||||
}
|
||||
.into()
|
||||
}
|
||||
|
||||
pub(crate) fn fn_contains_ignore_case(v: Vec<Variable>) -> Variable {
|
||||
let needle = v[1].to_string();
|
||||
match &v[0] {
|
||||
Variable::String(s) => s.to_lowercase().contains(&needle.to_lowercase()),
|
||||
Variable::Array(arr) => arr.iter().any(|v| match v {
|
||||
Variable::String(s) => s.eq_ignore_ascii_case(needle.as_ref()),
|
||||
_ => false,
|
||||
}),
|
||||
val => val.to_string().contains(needle.as_ref()),
|
||||
}
|
||||
.into()
|
||||
}
|
||||
|
||||
pub(crate) fn fn_starts_with(v: Vec<Variable>) -> Variable {
|
||||
v[0].to_string()
|
||||
.starts_with(v[1].to_string().as_ref())
|
||||
.into()
|
||||
}
|
||||
|
||||
pub(crate) fn fn_ends_with(v: Vec<Variable>) -> Variable {
|
||||
v[0].to_string().ends_with(v[1].to_string().as_ref()).into()
|
||||
}
|
||||
|
||||
pub(crate) fn fn_lines(mut v: Vec<Variable>) -> Variable {
|
||||
match v.remove(0) {
|
||||
Variable::String(s) => s
|
||||
.lines()
|
||||
.map(|s| Variable::from(s.to_string()))
|
||||
.collect::<Vec<_>>()
|
||||
.into(),
|
||||
val => val,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn fn_substring(v: Vec<Variable>) -> Variable {
|
||||
v[0].to_string()
|
||||
.chars()
|
||||
.skip(v[1].to_usize().unwrap_or_default())
|
||||
.take(v[2].to_usize().unwrap_or_default())
|
||||
.collect::<String>()
|
||||
.into()
|
||||
}
|
||||
|
||||
pub(crate) fn fn_strip_prefix(v: Vec<Variable>) -> Variable {
|
||||
let mut v = v.into_iter();
|
||||
let value = v.next().unwrap();
|
||||
let prefix = v.next().unwrap().into_string();
|
||||
|
||||
value.transform(|s| match s {
|
||||
Cow::Borrowed(s) => s
|
||||
.strip_prefix(prefix.as_ref())
|
||||
.map(Variable::from)
|
||||
.unwrap_or_default(),
|
||||
Cow::Owned(s) => s
|
||||
.strip_prefix(prefix.as_ref())
|
||||
.map(|s| Variable::from(s.to_string()))
|
||||
.unwrap_or_default(),
|
||||
})
|
||||
}
|
||||
|
||||
pub(crate) fn fn_strip_suffix(v: Vec<Variable>) -> Variable {
|
||||
let mut v = v.into_iter();
|
||||
let value = v.next().unwrap();
|
||||
let suffix = v.next().unwrap().into_string();
|
||||
|
||||
value.transform(|s| match s {
|
||||
Cow::Borrowed(s) => s
|
||||
.strip_suffix(suffix.as_ref())
|
||||
.map(Variable::from)
|
||||
.unwrap_or_default(),
|
||||
Cow::Owned(s) => s
|
||||
.strip_suffix(suffix.as_ref())
|
||||
.map(|s| Variable::from(s.to_string()))
|
||||
.unwrap_or_default(),
|
||||
})
|
||||
}
|
||||
|
||||
pub(crate) fn fn_split(v: Vec<Variable>) -> Variable {
|
||||
let mut v = v.into_iter();
|
||||
let value = v.next().unwrap().into_string();
|
||||
let arg = v.next().unwrap().into_string();
|
||||
|
||||
match value {
|
||||
Cow::Borrowed(s) => s
|
||||
.split(arg.as_ref())
|
||||
.map(Variable::from)
|
||||
.collect::<Vec<_>>()
|
||||
.into(),
|
||||
Cow::Owned(s) => s
|
||||
.split(arg.as_ref())
|
||||
.map(|s| Variable::from(s.to_string()))
|
||||
.collect::<Vec<_>>()
|
||||
.into(),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn fn_rsplit(v: Vec<Variable>) -> Variable {
|
||||
let mut v = v.into_iter();
|
||||
let value = v.next().unwrap().into_string();
|
||||
let arg = v.next().unwrap().into_string();
|
||||
|
||||
match value {
|
||||
Cow::Borrowed(s) => s
|
||||
.rsplit(arg.as_ref())
|
||||
.map(Variable::from)
|
||||
.collect::<Vec<_>>()
|
||||
.into(),
|
||||
Cow::Owned(s) => s
|
||||
.rsplit(arg.as_ref())
|
||||
.map(|s| Variable::from(s.to_string()))
|
||||
.collect::<Vec<_>>()
|
||||
.into(),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn fn_split_once(v: Vec<Variable>) -> Variable {
|
||||
let mut v = v.into_iter();
|
||||
let value = v.next().unwrap().into_string();
|
||||
let arg = v.next().unwrap().into_string();
|
||||
|
||||
match value {
|
||||
Cow::Borrowed(s) => s
|
||||
.split_once(arg.as_ref())
|
||||
.map(|(a, b)| Variable::Array(vec![Variable::from(a), Variable::from(b)]))
|
||||
.unwrap_or_default(),
|
||||
Cow::Owned(s) => s
|
||||
.split_once(arg.as_ref())
|
||||
.map(|(a, b)| {
|
||||
Variable::Array(vec![
|
||||
Variable::from(a.to_string()),
|
||||
Variable::from(b.to_string()),
|
||||
])
|
||||
})
|
||||
.unwrap_or_default(),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn fn_rsplit_once(v: Vec<Variable>) -> Variable {
|
||||
let mut v = v.into_iter();
|
||||
let value = v.next().unwrap().into_string();
|
||||
let arg = v.next().unwrap().into_string();
|
||||
|
||||
match value {
|
||||
Cow::Borrowed(s) => s
|
||||
.rsplit_once(arg.as_ref())
|
||||
.map(|(a, b)| Variable::Array(vec![Variable::from(a), Variable::from(b)]))
|
||||
.unwrap_or_default(),
|
||||
Cow::Owned(s) => s
|
||||
.rsplit_once(arg.as_ref())
|
||||
.map(|(a, b)| {
|
||||
Variable::Array(vec![
|
||||
Variable::from(a.to_string()),
|
||||
Variable::from(b.to_string()),
|
||||
])
|
||||
})
|
||||
.unwrap_or_default(),
|
||||
}
|
||||
}
|
197
crates/common/src/expr/if_block.rs
Normal file
197
crates/common/src/expr/if_block.rs
Normal file
|
@ -0,0 +1,197 @@
|
|||
/*
|
||||
* Copyright (c) 2023 Stalwart Labs Ltd.
|
||||
*
|
||||
* This file is part of Stalwart Mail Server.
|
||||
*
|
||||
* This program is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Affero General Public License as
|
||||
* published by the Free Software Foundation, either version 3 of
|
||||
* the License, or (at your option) any later version.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Affero General Public License for more details.
|
||||
* in the LICENSE file at the top-level directory of this distribution.
|
||||
* You should have received a copy of the GNU Affero General Public License
|
||||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*
|
||||
* You can be released from the requirements of the AGPLv3 license by
|
||||
* purchasing a commercial license. Please contact licensing@stalw.art
|
||||
* for more details.
|
||||
*/
|
||||
|
||||
use utils::config::{utils::AsKey, Config};
|
||||
|
||||
use crate::expr::{Constant, Expression};
|
||||
|
||||
use super::{
|
||||
parser::ExpressionParser,
|
||||
tokenizer::{TokenMap, Tokenizer},
|
||||
};
|
||||
|
||||
#[derive(Debug, Clone, Default)]
|
||||
#[cfg_attr(feature = "test_mode", derive(PartialEq, Eq))]
|
||||
pub struct IfThen {
|
||||
pub expr: Expression,
|
||||
pub then: Expression,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Default)]
|
||||
#[cfg_attr(feature = "test_mode", derive(PartialEq, Eq))]
|
||||
pub struct IfBlock {
|
||||
pub key: String,
|
||||
pub if_then: Vec<IfThen>,
|
||||
pub default: Expression,
|
||||
}
|
||||
|
||||
impl IfBlock {
|
||||
pub fn new<T: Into<Constant>>(value: T) -> Self {
|
||||
Self {
|
||||
key: String::new(),
|
||||
if_then: Vec::new(),
|
||||
default: Expression::from(value),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.default.is_empty() && self.if_then.is_empty()
|
||||
}
|
||||
}
|
||||
|
||||
impl Expression {
|
||||
pub fn try_parse(config: &mut Config, key: &str, token_map: &TokenMap) -> Option<Expression> {
|
||||
if let Some(expr) = config.value_or_warn(key) {
|
||||
match ExpressionParser::new(Tokenizer::new(expr, token_map)).parse() {
|
||||
Ok(expr) => Some(expr),
|
||||
Err(err) => {
|
||||
config.new_parse_error(key, err);
|
||||
None
|
||||
}
|
||||
}
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl IfBlock {
|
||||
pub fn try_parse(
|
||||
config: &mut Config,
|
||||
prefix: impl AsKey,
|
||||
token_map: &TokenMap,
|
||||
) -> Option<IfBlock> {
|
||||
let key = prefix.as_key();
|
||||
|
||||
// Parse conditions
|
||||
let mut if_block = IfBlock {
|
||||
key,
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
// Try first with a single value
|
||||
if config.contains_key(if_block.key.as_str()) {
|
||||
if_block.default = Expression::try_parse(config, &if_block.key, token_map)?;
|
||||
return Some(if_block);
|
||||
}
|
||||
|
||||
// Collect prefixes
|
||||
let prefix = prefix.as_prefix();
|
||||
let keys = config
|
||||
.keys
|
||||
.keys()
|
||||
.filter(|k| k.starts_with(&prefix))
|
||||
.cloned()
|
||||
.collect::<Vec<_>>();
|
||||
let mut found_if = false;
|
||||
let mut found_else = "";
|
||||
let mut found_then = false;
|
||||
let mut last_array_pos = "";
|
||||
|
||||
for item in &keys {
|
||||
let suffix_ = item.strip_prefix(&prefix).unwrap();
|
||||
|
||||
if let Some((array_pos, suffix)) = suffix_.split_once('.') {
|
||||
let if_key = suffix.split_once('.').map(|(v, _)| v).unwrap_or(suffix);
|
||||
if if_key == "if" {
|
||||
if array_pos != last_array_pos {
|
||||
if !last_array_pos.is_empty() && !found_then {
|
||||
config.new_parse_error(
|
||||
if_block.key,
|
||||
format!(
|
||||
"Missing 'then' in 'if' condition {}.",
|
||||
last_array_pos.parse().unwrap_or(0) + 1,
|
||||
),
|
||||
);
|
||||
return None;
|
||||
}
|
||||
|
||||
if_block.if_then.push(IfThen {
|
||||
expr: Expression::try_parse(config, item, token_map)?,
|
||||
then: Expression::default(),
|
||||
});
|
||||
|
||||
found_then = false;
|
||||
last_array_pos = array_pos;
|
||||
}
|
||||
|
||||
found_if = true;
|
||||
} else if if_key == "else" {
|
||||
if found_else.is_empty() {
|
||||
if found_if {
|
||||
if_block.default = Expression::try_parse(config, item, token_map)?;
|
||||
found_else = array_pos;
|
||||
} else {
|
||||
config.new_parse_error(if_block.key, "Found 'else' before 'if'");
|
||||
return None;
|
||||
}
|
||||
} else if array_pos != found_else {
|
||||
config.new_parse_error(if_block.key, "Multiple 'else' found");
|
||||
return None;
|
||||
}
|
||||
} else if if_key == "then" {
|
||||
if found_else.is_empty() {
|
||||
if array_pos == last_array_pos {
|
||||
if !found_then {
|
||||
if_block.if_then.last_mut().unwrap().then =
|
||||
Expression::try_parse(config, item, token_map)?;
|
||||
found_then = true;
|
||||
}
|
||||
} else {
|
||||
config.new_parse_error(if_block.key, "Found 'then' without 'if'");
|
||||
return None;
|
||||
}
|
||||
} else {
|
||||
config.new_parse_error(if_block.key, "Found 'then' in 'else' block");
|
||||
return None;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
config.new_parse_error(
|
||||
if_block.key,
|
||||
format!("Invalid property {item:?} found in 'if' block."),
|
||||
);
|
||||
return None;
|
||||
}
|
||||
}
|
||||
|
||||
if !found_if {
|
||||
config.new_missing_property(if_block.key);
|
||||
None
|
||||
} else if !found_then {
|
||||
config.new_parse_error(
|
||||
if_block.key,
|
||||
format!(
|
||||
"Missing 'then' in 'if' condition {}",
|
||||
last_array_pos.parse().unwrap_or(0) + 1,
|
||||
),
|
||||
);
|
||||
None
|
||||
} else if found_else.is_empty() {
|
||||
config.new_parse_error(if_block.key, "Missing 'else'");
|
||||
None
|
||||
} else {
|
||||
Some(if_block)
|
||||
}
|
||||
}
|
||||
}
|
315
crates/common/src/expr/mod.rs
Normal file
315
crates/common/src/expr/mod.rs
Normal file
|
@ -0,0 +1,315 @@
|
|||
/*
|
||||
* Copyright (c) 2020-2023, Stalwart Labs Ltd.
|
||||
*
|
||||
* This file is part of Stalwart Mail Server.
|
||||
*
|
||||
* This program is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Affero General Public License as
|
||||
* published by the Free Software Foundation, either version 3 of
|
||||
* the License, or (at your option) any later version.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Affero General Public License for more details.
|
||||
* in the LICENSE file at the top-level directory of this distribution.
|
||||
* You should have received a copy of the GNU Affero General Public License
|
||||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*
|
||||
* You can be released from the requirements of the AGPLv3 license by
|
||||
* purchasing a commercial license. Please contact licensing@stalw.art
|
||||
* for more details.
|
||||
*/
|
||||
|
||||
use std::{borrow::Cow, time::Duration};
|
||||
|
||||
use regex::Regex;
|
||||
use utils::config::utils::ParseValue;
|
||||
|
||||
pub mod eval;
|
||||
pub mod functions;
|
||||
pub mod if_block;
|
||||
pub mod parser;
|
||||
pub mod tokenizer;
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Default)]
|
||||
pub struct Expression {
|
||||
pub items: Vec<ExpressionItem>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum ExpressionItem {
|
||||
Variable(u32),
|
||||
Capture(u32),
|
||||
Constant(Constant),
|
||||
BinaryOperator(BinaryOperator),
|
||||
UnaryOperator(UnaryOperator),
|
||||
Regex(Regex),
|
||||
JmpIf { val: bool, pos: u32 },
|
||||
Function { id: u32, num_args: u32 },
|
||||
ArrayAccess,
|
||||
ArrayBuild(u32),
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum Variable<'x> {
|
||||
String(Cow<'x, str>),
|
||||
Integer(i64),
|
||||
Float(f64),
|
||||
Array(Vec<Variable<'x>>),
|
||||
}
|
||||
|
||||
impl Default for Variable<'_> {
|
||||
fn default() -> Self {
|
||||
Variable::Integer(0)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
pub enum Constant {
|
||||
Integer(i64),
|
||||
Float(f64),
|
||||
String(String),
|
||||
}
|
||||
|
||||
impl Eq for Constant {}
|
||||
|
||||
impl From<String> for Constant {
|
||||
fn from(value: String) -> Self {
|
||||
Constant::String(value)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<bool> for Constant {
|
||||
fn from(value: bool) -> Self {
|
||||
Constant::Integer(value as i64)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<i64> for Constant {
|
||||
fn from(value: i64) -> Self {
|
||||
Constant::Integer(value)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<i32> for Constant {
|
||||
fn from(value: i32) -> Self {
|
||||
Constant::Integer(value as i64)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<i16> for Constant {
|
||||
fn from(value: i16) -> Self {
|
||||
Constant::Integer(value as i64)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<f64> for Constant {
|
||||
fn from(value: f64) -> Self {
|
||||
Constant::Float(value)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<usize> for Constant {
|
||||
fn from(value: usize) -> Self {
|
||||
Constant::Integer(value as i64)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
|
||||
pub enum BinaryOperator {
|
||||
Add,
|
||||
Subtract,
|
||||
Multiply,
|
||||
Divide,
|
||||
|
||||
And,
|
||||
Or,
|
||||
Xor,
|
||||
|
||||
Eq,
|
||||
Ne,
|
||||
Lt,
|
||||
Le,
|
||||
Gt,
|
||||
Ge,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
|
||||
pub enum UnaryOperator {
|
||||
Not,
|
||||
Minus,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum Token {
|
||||
Variable(u32),
|
||||
Capture(u32),
|
||||
Function {
|
||||
name: Cow<'static, str>,
|
||||
id: u32,
|
||||
num_args: u32,
|
||||
},
|
||||
Constant(Constant),
|
||||
Regex(Regex),
|
||||
BinaryOperator(BinaryOperator),
|
||||
UnaryOperator(UnaryOperator),
|
||||
OpenParen,
|
||||
CloseParen,
|
||||
OpenBracket,
|
||||
CloseBracket,
|
||||
Comma,
|
||||
}
|
||||
|
||||
impl From<usize> for Variable<'_> {
|
||||
fn from(value: usize) -> Self {
|
||||
Variable::Integer(value as i64)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<i64> for Variable<'_> {
|
||||
fn from(value: i64) -> Self {
|
||||
Variable::Integer(value)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<i32> for Variable<'_> {
|
||||
fn from(value: i32) -> Self {
|
||||
Variable::Integer(value as i64)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<i16> for Variable<'_> {
|
||||
fn from(value: i16) -> Self {
|
||||
Variable::Integer(value as i64)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<f64> for Variable<'_> {
|
||||
fn from(value: f64) -> Self {
|
||||
Variable::Float(value)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'x> From<&'x str> for Variable<'x> {
|
||||
fn from(value: &'x str) -> Self {
|
||||
Variable::String(Cow::Borrowed(value))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<String> for Variable<'_> {
|
||||
fn from(value: String) -> Self {
|
||||
Variable::String(Cow::Owned(value))
|
||||
}
|
||||
}
|
||||
|
||||
impl<'x> From<Vec<Variable<'x>>> for Variable<'x> {
|
||||
fn from(value: Vec<Variable<'x>>) -> Self {
|
||||
Variable::Array(value)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<bool> for Variable<'_> {
|
||||
fn from(value: bool) -> Self {
|
||||
Variable::Integer(value as i64)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Into<Constant>> From<T> for Expression {
|
||||
fn from(value: T) -> Self {
|
||||
Expression {
|
||||
items: vec![ExpressionItem::Constant(value.into())],
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for ExpressionItem {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
match (self, other) {
|
||||
(Self::Variable(l0), Self::Variable(r0)) => l0 == r0,
|
||||
(Self::Constant(l0), Self::Constant(r0)) => l0 == r0,
|
||||
(Self::BinaryOperator(l0), Self::BinaryOperator(r0)) => l0 == r0,
|
||||
(Self::UnaryOperator(l0), Self::UnaryOperator(r0)) => l0 == r0,
|
||||
(Self::Regex(_), Self::Regex(_)) => true,
|
||||
(
|
||||
Self::JmpIf {
|
||||
val: l_val,
|
||||
pos: l_pos,
|
||||
},
|
||||
Self::JmpIf {
|
||||
val: r_val,
|
||||
pos: r_pos,
|
||||
},
|
||||
) => l_val == r_val && l_pos == r_pos,
|
||||
(
|
||||
Self::Function {
|
||||
id: l_id,
|
||||
num_args: l_num_args,
|
||||
},
|
||||
Self::Function {
|
||||
id: r_id,
|
||||
num_args: r_num_args,
|
||||
},
|
||||
) => l_id == r_id && l_num_args == r_num_args,
|
||||
(Self::ArrayBuild(l0), Self::ArrayBuild(r0)) => l0 == r0,
|
||||
_ => core::mem::discriminant(self) == core::mem::discriminant(other),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Eq for ExpressionItem {}
|
||||
|
||||
impl PartialEq for Token {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
match (self, other) {
|
||||
(Self::Variable(l0), Self::Variable(r0)) => l0 == r0,
|
||||
(
|
||||
Self::Function {
|
||||
name: l_name,
|
||||
id: l_id,
|
||||
num_args: l_num_args,
|
||||
},
|
||||
Self::Function {
|
||||
name: r_name,
|
||||
id: r_id,
|
||||
num_args: r_num_args,
|
||||
},
|
||||
) => l_name == r_name && l_id == r_id && l_num_args == r_num_args,
|
||||
(Self::Constant(l0), Self::Constant(r0)) => l0 == r0,
|
||||
(Self::Regex(_), Self::Regex(_)) => true,
|
||||
(Self::BinaryOperator(l0), Self::BinaryOperator(r0)) => l0 == r0,
|
||||
(Self::UnaryOperator(l0), Self::UnaryOperator(r0)) => l0 == r0,
|
||||
_ => core::mem::discriminant(self) == core::mem::discriminant(other),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Eq for Token {}
|
||||
|
||||
pub trait ConstantValue:
|
||||
ParseValue + for<'x> TryFrom<Variable<'x>> + Into<Constant> + Sized
|
||||
{
|
||||
}
|
||||
|
||||
impl ConstantValue for Duration {}
|
||||
|
||||
impl<'x> TryFrom<Variable<'x>> for Duration {
|
||||
type Error = ();
|
||||
|
||||
fn try_from(value: Variable<'x>) -> Result<Self, Self::Error> {
|
||||
match value {
|
||||
Variable::Integer(value) if value > 0 => Ok(Duration::from_millis(value as u64)),
|
||||
Variable::Float(value) if value > 0.0 => Ok(Duration::from_millis(value as u64)),
|
||||
Variable::String(value) if !value.is_empty() => {
|
||||
Duration::parse_value("", &value).map_err(|_| ())
|
||||
}
|
||||
_ => Err(()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Duration> for Constant {
|
||||
fn from(value: Duration) -> Self {
|
||||
Constant::Integer(value.as_millis() as i64)
|
||||
}
|
||||
}
|
287
crates/common/src/expr/parser.rs
Normal file
287
crates/common/src/expr/parser.rs
Normal file
|
@ -0,0 +1,287 @@
|
|||
/*
|
||||
* Copyright (c) 2020-2023, Stalwart Labs Ltd.
|
||||
*
|
||||
* This file is part of Stalwart Mail Server.
|
||||
*
|
||||
* This program is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Affero General Public License as
|
||||
* published by the Free Software Foundation, either version 3 of
|
||||
* the License, or (at your option) any later version.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Affero General Public License for more details.
|
||||
* in the LICENSE file at the top-level directory of this distribution.
|
||||
* You should have received a copy of the GNU Affero General Public License
|
||||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*
|
||||
* You can be released from the requirements of the AGPLv3 license by
|
||||
* purchasing a commercial license. Please contact licensing@stalw.art
|
||||
* for more details.
|
||||
*/
|
||||
|
||||
use super::{tokenizer::Tokenizer, BinaryOperator, Expression, ExpressionItem, Token};
|
||||
|
||||
pub struct ExpressionParser<'x> {
|
||||
pub(crate) tokenizer: Tokenizer<'x>,
|
||||
pub(crate) output: Vec<ExpressionItem>,
|
||||
operator_stack: Vec<(Token, Option<usize>)>,
|
||||
arg_count: Vec<i32>,
|
||||
}
|
||||
|
||||
pub(crate) const ID_ARRAY_ACCESS: u32 = u32::MAX;
|
||||
pub(crate) const ID_ARRAY_BUILD: u32 = u32::MAX - 1;
|
||||
|
||||
impl<'x> ExpressionParser<'x> {
|
||||
pub fn new(tokenizer: Tokenizer<'x>) -> Self {
|
||||
Self {
|
||||
tokenizer,
|
||||
output: Vec::new(),
|
||||
operator_stack: Vec::new(),
|
||||
arg_count: Vec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn parse(mut self) -> Result<Expression, String> {
|
||||
let mut last_is_var_or_fnc = false;
|
||||
|
||||
while let Some(token) = self.tokenizer.next()? {
|
||||
let mut is_var_or_fnc = false;
|
||||
match token {
|
||||
Token::Variable(v) => {
|
||||
self.inc_arg_count();
|
||||
is_var_or_fnc = true;
|
||||
self.output.push(ExpressionItem::Variable(v))
|
||||
}
|
||||
Token::Constant(c) => {
|
||||
self.inc_arg_count();
|
||||
self.output.push(ExpressionItem::Constant(c))
|
||||
}
|
||||
Token::Capture(c) => {
|
||||
self.inc_arg_count();
|
||||
self.output.push(ExpressionItem::Capture(c))
|
||||
}
|
||||
Token::UnaryOperator(uop) => {
|
||||
self.operator_stack.push((Token::UnaryOperator(uop), None))
|
||||
}
|
||||
Token::OpenParen => self.operator_stack.push((token, None)),
|
||||
Token::CloseParen | Token::CloseBracket => {
|
||||
let expect_token = if matches!(token, Token::CloseParen) {
|
||||
Token::OpenParen
|
||||
} else {
|
||||
Token::OpenBracket
|
||||
};
|
||||
loop {
|
||||
match self.operator_stack.pop() {
|
||||
Some((t, _)) if t == expect_token => {
|
||||
break;
|
||||
}
|
||||
Some((Token::BinaryOperator(bop), jmp_pos)) => {
|
||||
self.update_jmp_pos(jmp_pos);
|
||||
self.output.push(ExpressionItem::BinaryOperator(bop))
|
||||
}
|
||||
Some((Token::UnaryOperator(uop), _)) => {
|
||||
self.output.push(ExpressionItem::UnaryOperator(uop))
|
||||
}
|
||||
_ => return Err("Mismatched parentheses".to_string()),
|
||||
}
|
||||
}
|
||||
|
||||
match self.operator_stack.last() {
|
||||
Some((Token::Function { id, num_args, name }, _)) => {
|
||||
let got_args = self.arg_count.pop().unwrap();
|
||||
if got_args != *num_args as i32 {
|
||||
return Err(if *id != u32::MAX {
|
||||
format!(
|
||||
"Expression function {:?} expected {} arguments, got {}",
|
||||
name, num_args, got_args
|
||||
)
|
||||
} else {
|
||||
"Missing array index".to_string()
|
||||
});
|
||||
}
|
||||
|
||||
let expr = match *id {
|
||||
ID_ARRAY_ACCESS => ExpressionItem::ArrayAccess,
|
||||
ID_ARRAY_BUILD => ExpressionItem::ArrayBuild(*num_args),
|
||||
id => ExpressionItem::Function {
|
||||
id,
|
||||
num_args: *num_args,
|
||||
},
|
||||
};
|
||||
|
||||
self.operator_stack.pop();
|
||||
self.output.push(expr);
|
||||
}
|
||||
Some((Token::Regex(regex), _)) => {
|
||||
if self.arg_count.pop().unwrap() != 1 {
|
||||
return Err("Expression function \"matches\" expected 2 arguments"
|
||||
.to_string());
|
||||
}
|
||||
self.output.push(ExpressionItem::Regex(regex.clone()));
|
||||
self.operator_stack.pop();
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
is_var_or_fnc = true;
|
||||
}
|
||||
Token::BinaryOperator(bop) => {
|
||||
self.dec_arg_count();
|
||||
while let Some((top_token, prev_jmp_pos)) = self.operator_stack.last() {
|
||||
match top_token {
|
||||
Token::BinaryOperator(top_bop) => {
|
||||
if bop.precedence() <= top_bop.precedence() {
|
||||
let top_bop = *top_bop;
|
||||
let jmp_pos = *prev_jmp_pos;
|
||||
self.update_jmp_pos(jmp_pos);
|
||||
self.operator_stack.pop();
|
||||
self.output.push(ExpressionItem::BinaryOperator(top_bop));
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
Token::UnaryOperator(top_uop) => {
|
||||
let top_uop = *top_uop;
|
||||
self.operator_stack.pop();
|
||||
self.output.push(ExpressionItem::UnaryOperator(top_uop));
|
||||
}
|
||||
_ => break,
|
||||
}
|
||||
}
|
||||
|
||||
// Add jump instruction for short-circuiting
|
||||
let jmp_pos = match bop {
|
||||
BinaryOperator::And => {
|
||||
self.output
|
||||
.push(ExpressionItem::JmpIf { val: false, pos: 0 });
|
||||
Some(self.output.len() - 1)
|
||||
}
|
||||
BinaryOperator::Or => {
|
||||
self.output
|
||||
.push(ExpressionItem::JmpIf { val: true, pos: 0 });
|
||||
Some(self.output.len() - 1)
|
||||
}
|
||||
_ => None,
|
||||
};
|
||||
|
||||
self.operator_stack
|
||||
.push((Token::BinaryOperator(bop), jmp_pos));
|
||||
}
|
||||
Token::Function { id, name, num_args } => {
|
||||
self.inc_arg_count();
|
||||
self.arg_count.push(0);
|
||||
self.operator_stack
|
||||
.push((Token::Function { id, name, num_args }, None))
|
||||
}
|
||||
Token::Regex(regex) => {
|
||||
self.inc_arg_count();
|
||||
self.arg_count.push(0);
|
||||
self.operator_stack.push((Token::Regex(regex), None))
|
||||
}
|
||||
Token::OpenBracket => {
|
||||
// Array functions
|
||||
let (id, num_args, arg_count) = if last_is_var_or_fnc {
|
||||
(ID_ARRAY_ACCESS, 2, 1)
|
||||
} else {
|
||||
self.inc_arg_count();
|
||||
(ID_ARRAY_BUILD, 0, 0)
|
||||
};
|
||||
self.arg_count.push(arg_count);
|
||||
self.operator_stack.push((
|
||||
Token::Function {
|
||||
id,
|
||||
name: "array".into(),
|
||||
num_args,
|
||||
},
|
||||
None,
|
||||
));
|
||||
self.operator_stack.push((token, None));
|
||||
}
|
||||
Token::Comma => {
|
||||
while let Some((token, jmp_pos)) = self.operator_stack.last() {
|
||||
match token {
|
||||
Token::OpenParen => break,
|
||||
Token::BinaryOperator(bop) => {
|
||||
let bop = *bop;
|
||||
let jmp_pos = *jmp_pos;
|
||||
self.update_jmp_pos(jmp_pos);
|
||||
self.output.push(ExpressionItem::BinaryOperator(bop));
|
||||
self.operator_stack.pop();
|
||||
}
|
||||
Token::UnaryOperator(uop) => {
|
||||
self.output.push(ExpressionItem::UnaryOperator(*uop));
|
||||
self.operator_stack.pop();
|
||||
}
|
||||
_ => break,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
last_is_var_or_fnc = is_var_or_fnc;
|
||||
}
|
||||
|
||||
while let Some((token, jmp_pos)) = self.operator_stack.pop() {
|
||||
match token {
|
||||
Token::BinaryOperator(bop) => {
|
||||
self.update_jmp_pos(jmp_pos);
|
||||
self.output.push(ExpressionItem::BinaryOperator(bop))
|
||||
}
|
||||
Token::UnaryOperator(uop) => self.output.push(ExpressionItem::UnaryOperator(uop)),
|
||||
_ => return Err("Invalid token on the operator stack".to_string()),
|
||||
}
|
||||
}
|
||||
|
||||
if self.operator_stack.is_empty() {
|
||||
Ok(Expression { items: self.output })
|
||||
} else {
|
||||
Err("Invalid expression".to_string())
|
||||
}
|
||||
}
|
||||
|
||||
fn inc_arg_count(&mut self) {
|
||||
if let Some(x) = self.arg_count.last_mut() {
|
||||
*x = x.saturating_add(1);
|
||||
let op_pos = self.operator_stack.len().saturating_sub(2);
|
||||
match self.operator_stack.get_mut(op_pos) {
|
||||
Some((Token::Function { num_args, id, .. }, _)) if *id == ID_ARRAY_BUILD => {
|
||||
*num_args += 1;
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn dec_arg_count(&mut self) {
|
||||
if let Some(x) = self.arg_count.last_mut() {
|
||||
*x = x.saturating_sub(1);
|
||||
}
|
||||
}
|
||||
|
||||
fn update_jmp_pos(&mut self, jmp_pos: Option<usize>) {
|
||||
if let Some(jmp_pos) = jmp_pos {
|
||||
let cur_pos = self.output.len();
|
||||
if let ExpressionItem::JmpIf { pos, .. } = &mut self.output[jmp_pos] {
|
||||
*pos = (cur_pos - jmp_pos) as u32;
|
||||
} else {
|
||||
#[cfg(test)]
|
||||
panic!("Invalid jump position");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl BinaryOperator {
|
||||
fn precedence(&self) -> i32 {
|
||||
match self {
|
||||
BinaryOperator::Multiply | BinaryOperator::Divide => 7,
|
||||
BinaryOperator::Add | BinaryOperator::Subtract => 6,
|
||||
BinaryOperator::Gt | BinaryOperator::Ge | BinaryOperator::Lt | BinaryOperator::Le => 5,
|
||||
BinaryOperator::Eq | BinaryOperator::Ne => 4,
|
||||
BinaryOperator::Xor => 3,
|
||||
BinaryOperator::And => 2,
|
||||
BinaryOperator::Or => 1,
|
||||
}
|
||||
}
|
||||
}
|
373
crates/common/src/expr/tokenizer.rs
Normal file
373
crates/common/src/expr/tokenizer.rs
Normal file
|
@ -0,0 +1,373 @@
|
|||
/*
|
||||
* Copyright (c) 2020-2023, Stalwart Labs Ltd.
|
||||
*
|
||||
* This file is part of Stalwart Mail Server.
|
||||
*
|
||||
* This program is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Affero General Public License as
|
||||
* published by the Free Software Foundation, either version 3 of
|
||||
* the License, or (at your option) any later version.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Affero General Public License for more details.
|
||||
* in the LICENSE file at the top-level directory of this distribution.
|
||||
* You should have received a copy of the GNU Affero General Public License
|
||||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*
|
||||
* You can be released from the requirements of the AGPLv3 license by
|
||||
* purchasing a commercial license. Please contact licensing@stalw.art
|
||||
* for more details.
|
||||
*/
|
||||
|
||||
use std::{borrow::Cow, iter::Peekable, slice::Iter, time::Duration};
|
||||
|
||||
use ahash::AHashMap;
|
||||
use regex::Regex;
|
||||
use utils::config::utils::ParseValue;
|
||||
|
||||
use super::{
|
||||
functions::{ASYNC_FUNCTIONS, FUNCTIONS},
|
||||
BinaryOperator, Constant, Token, UnaryOperator,
|
||||
};
|
||||
|
||||
pub struct Tokenizer<'x> {
|
||||
pub(crate) iter: Peekable<Iter<'x, u8>>,
|
||||
token_map: &'x TokenMap,
|
||||
buf: Vec<u8>,
|
||||
depth: u32,
|
||||
next_token: Vec<Token>,
|
||||
has_number: bool,
|
||||
has_dot: bool,
|
||||
has_alpha: bool,
|
||||
is_start: bool,
|
||||
is_eof: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct TokenMap {
|
||||
tokens: AHashMap<&'static str, Token>,
|
||||
}
|
||||
|
||||
impl<'x> Tokenizer<'x> {
|
||||
#[allow(clippy::should_implement_trait)]
|
||||
pub fn new(expr: &'x str, token_map: &'x TokenMap) -> Self {
|
||||
Self {
|
||||
iter: expr.as_bytes().iter().peekable(),
|
||||
buf: Vec::new(),
|
||||
depth: 0,
|
||||
next_token: Vec::with_capacity(2),
|
||||
has_number: false,
|
||||
has_dot: false,
|
||||
has_alpha: false,
|
||||
is_start: true,
|
||||
is_eof: false,
|
||||
token_map,
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::should_implement_trait)]
|
||||
pub fn next(&mut self) -> Result<Option<Token>, String> {
|
||||
if let Some(token) = self.next_token.pop() {
|
||||
return Ok(Some(token));
|
||||
} else if self.is_eof {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
while let Some(&ch) = self.iter.next() {
|
||||
match ch {
|
||||
b'A'..=b'Z' | b'a'..=b'z' | b'_' | b'$' => {
|
||||
self.buf.push(ch);
|
||||
self.has_alpha = true;
|
||||
}
|
||||
b'0'..=b'9' => {
|
||||
self.buf.push(ch);
|
||||
self.has_number = true;
|
||||
}
|
||||
b'.' => {
|
||||
self.buf.push(ch);
|
||||
self.has_dot = true;
|
||||
}
|
||||
b'}' => {
|
||||
self.is_eof = true;
|
||||
break;
|
||||
}
|
||||
b'-' if self.buf.last().map_or(false, |c| *c == b'[') => {
|
||||
self.buf.push(ch);
|
||||
}
|
||||
b':' if self.buf.contains(&b'.') => {
|
||||
self.buf.push(ch);
|
||||
}
|
||||
b']' if self.buf.contains(&b'[') => {
|
||||
self.buf.push(b']');
|
||||
}
|
||||
b'*' if self.buf.last().map_or(false, |&c| c == b'[' || c == b'.') => {
|
||||
self.buf.push(ch);
|
||||
}
|
||||
_ => {
|
||||
let (prev_token, ch) = if ch == b'(' && self.buf.eq(b"matches") {
|
||||
// Parse regular expressions
|
||||
let stop_ch = self.find_char(&[b'\"', b'\''])?;
|
||||
let regex_str = self.parse_string(stop_ch)?;
|
||||
let regex = Regex::new(®ex_str).map_err(|e| {
|
||||
format!("Invalid regular expression {:?}: {}", regex_str, e)
|
||||
})?;
|
||||
self.has_alpha = false;
|
||||
self.buf.clear();
|
||||
self.find_char(&[b','])?;
|
||||
(Token::Regex(regex).into(), b'(')
|
||||
} else if !self.buf.is_empty() {
|
||||
self.is_start = false;
|
||||
(self.parse_buf()?.into(), ch)
|
||||
} else {
|
||||
(None, ch)
|
||||
};
|
||||
let token = match ch {
|
||||
b'&' => {
|
||||
if matches!(self.iter.peek(), Some(b'&')) {
|
||||
self.iter.next();
|
||||
}
|
||||
Token::BinaryOperator(BinaryOperator::And)
|
||||
}
|
||||
b'|' => {
|
||||
if matches!(self.iter.peek(), Some(b'|')) {
|
||||
self.iter.next();
|
||||
}
|
||||
Token::BinaryOperator(BinaryOperator::Or)
|
||||
}
|
||||
b'!' => {
|
||||
if matches!(self.iter.peek(), Some(b'=')) {
|
||||
self.iter.next();
|
||||
Token::BinaryOperator(BinaryOperator::Ne)
|
||||
} else {
|
||||
Token::UnaryOperator(UnaryOperator::Not)
|
||||
}
|
||||
}
|
||||
b'^' => Token::BinaryOperator(BinaryOperator::Xor),
|
||||
b'(' => {
|
||||
self.depth += 1;
|
||||
Token::OpenParen
|
||||
}
|
||||
b')' => {
|
||||
if self.depth == 0 {
|
||||
return Err("Unmatched close parenthesis".to_string());
|
||||
}
|
||||
self.depth -= 1;
|
||||
Token::CloseParen
|
||||
}
|
||||
b'+' => Token::BinaryOperator(BinaryOperator::Add),
|
||||
b'*' => Token::BinaryOperator(BinaryOperator::Multiply),
|
||||
b'/' => Token::BinaryOperator(BinaryOperator::Divide),
|
||||
b'-' => {
|
||||
if self.is_start {
|
||||
Token::UnaryOperator(UnaryOperator::Minus)
|
||||
} else {
|
||||
Token::BinaryOperator(BinaryOperator::Subtract)
|
||||
}
|
||||
}
|
||||
b'=' => match self.iter.next() {
|
||||
Some(b'=') => Token::BinaryOperator(BinaryOperator::Eq),
|
||||
Some(b'>') => Token::BinaryOperator(BinaryOperator::Ge),
|
||||
Some(b'<') => Token::BinaryOperator(BinaryOperator::Le),
|
||||
_ => Token::BinaryOperator(BinaryOperator::Eq),
|
||||
},
|
||||
b'>' => match self.iter.peek() {
|
||||
Some(b'=') => {
|
||||
self.iter.next();
|
||||
Token::BinaryOperator(BinaryOperator::Ge)
|
||||
}
|
||||
_ => Token::BinaryOperator(BinaryOperator::Gt),
|
||||
},
|
||||
b'<' => match self.iter.peek() {
|
||||
Some(b'=') => {
|
||||
self.iter.next();
|
||||
Token::BinaryOperator(BinaryOperator::Le)
|
||||
}
|
||||
_ => Token::BinaryOperator(BinaryOperator::Lt),
|
||||
},
|
||||
b',' => Token::Comma,
|
||||
b'[' => Token::OpenBracket,
|
||||
b']' => Token::CloseBracket,
|
||||
b' ' | b'\r' | b'\n' => {
|
||||
if prev_token.is_some() {
|
||||
return Ok(prev_token);
|
||||
} else {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
b'\"' | b'\'' => Token::Constant(Constant::String(self.parse_string(ch)?)),
|
||||
_ => {
|
||||
return Err(format!("Invalid character {:?}", char::from(ch),));
|
||||
}
|
||||
};
|
||||
self.is_start = matches!(
|
||||
token,
|
||||
Token::OpenParen | Token::Comma | Token::BinaryOperator(_)
|
||||
);
|
||||
|
||||
return if prev_token.is_some() {
|
||||
self.next_token.push(token);
|
||||
Ok(prev_token)
|
||||
} else {
|
||||
Ok(Some(token))
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if self.depth > 0 {
|
||||
Err("Unmatched open parenthesis".to_string())
|
||||
} else if !self.buf.is_empty() {
|
||||
self.parse_buf().map(Some)
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
|
||||
fn find_char(&mut self, chars: &[u8]) -> Result<u8, String> {
|
||||
for &ch in self.iter.by_ref() {
|
||||
if !ch.is_ascii_whitespace() {
|
||||
return if chars.contains(&ch) {
|
||||
Ok(ch)
|
||||
} else {
|
||||
Err(format!(
|
||||
"Expected {:?}, found invalid character {:?}",
|
||||
char::from(chars[0]),
|
||||
char::from(ch),
|
||||
))
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
Err("Unexpected end of expression".to_string())
|
||||
}
|
||||
|
||||
fn parse_string(&mut self, stop_ch: u8) -> Result<String, String> {
|
||||
let mut buf = Vec::with_capacity(16);
|
||||
let mut last_ch = 0;
|
||||
let mut found_end = false;
|
||||
|
||||
for &ch in self.iter.by_ref() {
|
||||
if last_ch != b'\\' {
|
||||
if ch != stop_ch {
|
||||
buf.push(ch);
|
||||
} else {
|
||||
found_end = true;
|
||||
break;
|
||||
}
|
||||
} else {
|
||||
match ch {
|
||||
b'n' => {
|
||||
buf.push(b'\n');
|
||||
}
|
||||
b'r' => {
|
||||
buf.push(b'\r');
|
||||
}
|
||||
b't' => {
|
||||
buf.push(b'\t');
|
||||
}
|
||||
_ => {
|
||||
buf.push(ch);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
last_ch = ch;
|
||||
}
|
||||
|
||||
if found_end {
|
||||
String::from_utf8(buf).map_err(|_| "Invalid UTF-8".to_string())
|
||||
} else {
|
||||
Err("Unterminated string".to_string())
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_buf(&mut self) -> Result<Token, String> {
|
||||
let buf = String::from_utf8(std::mem::take(&mut self.buf)).unwrap_or_default();
|
||||
if self.has_number && !self.has_alpha {
|
||||
self.has_number = false;
|
||||
if self.has_dot {
|
||||
self.has_dot = false;
|
||||
|
||||
buf.parse::<f64>()
|
||||
.map(|f| Token::Constant(Constant::Float(f)))
|
||||
.map_err(|_| format!("Invalid float value {}", buf,))
|
||||
} else {
|
||||
buf.parse::<i64>()
|
||||
.map(|i| Token::Constant(Constant::Integer(i)))
|
||||
.map_err(|_| format!("Invalid integer value {}", buf,))
|
||||
}
|
||||
} else {
|
||||
let has_dot = self.has_dot;
|
||||
let has_number = self.has_number;
|
||||
|
||||
self.has_alpha = false;
|
||||
self.has_number = false;
|
||||
self.has_dot = false;
|
||||
|
||||
if !has_number && !has_dot && [4, 5].contains(&buf.len()) {
|
||||
if buf == "true" {
|
||||
return Ok(Token::Constant(Constant::Integer(1)));
|
||||
} else if buf == "false" {
|
||||
return Ok(Token::Constant(Constant::Integer(0)));
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(regex_capture) = buf.strip_prefix('$').and_then(|v| v.parse::<u32>().ok()) {
|
||||
Ok(Token::Capture(regex_capture))
|
||||
} else if let Some((idx, (name, _, num_args))) = FUNCTIONS
|
||||
.iter()
|
||||
.enumerate()
|
||||
.find(|(_, (name, _, _))| name == &buf)
|
||||
{
|
||||
Ok(Token::Function {
|
||||
name: Cow::Borrowed(*name),
|
||||
id: idx as u32,
|
||||
num_args: *num_args,
|
||||
})
|
||||
} else if let Some((name, idx, num_args)) =
|
||||
ASYNC_FUNCTIONS.iter().find(|(name, _, _)| name == &buf)
|
||||
{
|
||||
Ok(Token::Function {
|
||||
name: Cow::Borrowed(*name),
|
||||
id: *idx + FUNCTIONS.len() as u32,
|
||||
num_args: *num_args,
|
||||
})
|
||||
} else if let Some(token) = self.token_map.tokens.get(buf.as_str()) {
|
||||
Ok(token.clone())
|
||||
} else if let Ok(duration) = Duration::parse_value("", &buf) {
|
||||
Ok(Token::Constant(Constant::Integer(
|
||||
duration.as_millis() as i64
|
||||
)))
|
||||
} else {
|
||||
Err(format!("Invalid variable or constant {buf:?}"))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl TokenMap {
|
||||
pub fn with_variables<I>(mut self, vars: I) -> Self
|
||||
where
|
||||
I: IntoIterator<Item = (&'static str, u32)>,
|
||||
{
|
||||
for (name, idx) in vars {
|
||||
self.tokens.insert(name, Token::Variable(idx));
|
||||
}
|
||||
|
||||
self
|
||||
}
|
||||
|
||||
pub fn with_constants<I, T>(mut self, consts: I) -> Self
|
||||
where
|
||||
I: IntoIterator<Item = (&'static str, T)>,
|
||||
T: Into<Constant>,
|
||||
{
|
||||
for (name, constant) in consts {
|
||||
self.tokens.insert(name, Token::Constant(constant.into()));
|
||||
}
|
||||
|
||||
self
|
||||
}
|
||||
}
|
174
crates/common/src/lib.rs
Normal file
174
crates/common/src/lib.rs
Normal file
|
@ -0,0 +1,174 @@
|
|||
use std::{net::IpAddr, sync::Arc};
|
||||
|
||||
use ahash::AHashMap;
|
||||
use config::{
|
||||
scripts::SieveCore,
|
||||
server::Server,
|
||||
smtp::{
|
||||
auth::{ArcSealer, DkimSigner},
|
||||
queue::RelayHost,
|
||||
SmtpConfig,
|
||||
},
|
||||
storage::Storage,
|
||||
};
|
||||
use directory::{Directory, Principal, QueryBy};
|
||||
use listener::{acme::AcmeManager, blocked::BlockedIps, tls::Certificate};
|
||||
use mail_send::Credentials;
|
||||
use sieve::Sieve;
|
||||
use store::LookupStore;
|
||||
|
||||
pub mod addresses;
|
||||
pub mod config;
|
||||
pub mod expr;
|
||||
pub mod listener;
|
||||
|
||||
pub struct Core {
|
||||
pub storage: Storage,
|
||||
pub sieve: SieveCore,
|
||||
pub smtp: SmtpConfig,
|
||||
pub blocked_ips: BlockedIps,
|
||||
}
|
||||
|
||||
pub struct ConfigBuilder {
|
||||
pub servers: Vec<Server>,
|
||||
pub certificates: AHashMap<String, Arc<Certificate>>,
|
||||
pub certificates_sni: AHashMap<String, Arc<Certificate>>,
|
||||
pub acme_managers: AHashMap<String, Arc<AcmeManager>>,
|
||||
pub core: Core,
|
||||
}
|
||||
|
||||
pub enum AuthResult<T> {
|
||||
Success(T),
|
||||
Failure,
|
||||
Banned,
|
||||
}
|
||||
|
||||
impl Core {
|
||||
pub fn get_directory(&self, name: &str) -> Option<&Arc<Directory>> {
|
||||
self.storage.directories.get(name)
|
||||
}
|
||||
|
||||
pub fn get_directory_or_default(&self, name: &str) -> &Arc<Directory> {
|
||||
self.storage.directories.get(name).unwrap_or_else(|| {
|
||||
tracing::debug!(
|
||||
context = "get_directory",
|
||||
event = "error",
|
||||
directory = name,
|
||||
"Directory not found, using default."
|
||||
);
|
||||
|
||||
&self.storage.directory
|
||||
})
|
||||
}
|
||||
|
||||
pub fn get_lookup_store(&self, name: &str) -> &LookupStore {
|
||||
self.storage.lookups.get(name).unwrap_or_else(|| {
|
||||
tracing::debug!(
|
||||
context = "get_lookup_store",
|
||||
event = "error",
|
||||
directory = name,
|
||||
"Store not found, using default."
|
||||
);
|
||||
|
||||
&self.storage.lookup
|
||||
})
|
||||
}
|
||||
|
||||
pub fn get_arc_sealer(&self, name: &str) -> Option<&ArcSealer> {
|
||||
self.smtp
|
||||
.mail_auth
|
||||
.sealers
|
||||
.get(name)
|
||||
.map(|s| s.as_ref())
|
||||
.or_else(|| {
|
||||
tracing::warn!(
|
||||
context = "get_arc_sealer",
|
||||
event = "error",
|
||||
name = name,
|
||||
"Arc sealer not found."
|
||||
);
|
||||
|
||||
None
|
||||
})
|
||||
}
|
||||
|
||||
pub fn get_dkim_signer(&self, name: &str) -> Option<&DkimSigner> {
|
||||
self.smtp
|
||||
.mail_auth
|
||||
.signers
|
||||
.get(name)
|
||||
.map(|s| s.as_ref())
|
||||
.or_else(|| {
|
||||
tracing::warn!(
|
||||
context = "get_dkim_signer",
|
||||
event = "error",
|
||||
name = name,
|
||||
"DKIM signer not found."
|
||||
);
|
||||
|
||||
None
|
||||
})
|
||||
}
|
||||
|
||||
pub fn get_sieve_script(&self, name: &str) -> Option<&Arc<Sieve>> {
|
||||
self.sieve.scripts.get(name).or_else(|| {
|
||||
tracing::warn!(
|
||||
context = "get_sieve_script",
|
||||
event = "error",
|
||||
name = name,
|
||||
"Sieve script not found."
|
||||
);
|
||||
|
||||
None
|
||||
})
|
||||
}
|
||||
|
||||
pub fn get_relay_host(&self, name: &str) -> Option<&RelayHost> {
|
||||
self.smtp.queue.relay_hosts.get(name).or_else(|| {
|
||||
tracing::warn!(
|
||||
context = "get_relay_host",
|
||||
event = "error",
|
||||
name = name,
|
||||
"Remote host not found."
|
||||
);
|
||||
|
||||
None
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn authenticate(
|
||||
&self,
|
||||
directory: &Directory,
|
||||
credentials: &Credentials<String>,
|
||||
remote_ip: IpAddr,
|
||||
return_member_of: bool,
|
||||
) -> directory::Result<AuthResult<Principal<u32>>> {
|
||||
if let Some(principal) = directory
|
||||
.query(QueryBy::Credentials(credentials), return_member_of)
|
||||
.await?
|
||||
{
|
||||
Ok(AuthResult::Success(principal))
|
||||
} else if self.has_fail2ban() {
|
||||
let login = match credentials {
|
||||
Credentials::Plain { username, .. }
|
||||
| Credentials::XOauth2 { username, .. }
|
||||
| Credentials::OAuthBearer { token: username } => username,
|
||||
};
|
||||
if self.is_fail2banned(remote_ip, login.to_string()).await? {
|
||||
tracing::info!(
|
||||
context = "directory",
|
||||
event = "fail2ban",
|
||||
remote_ip = ?remote_ip,
|
||||
login = ?login,
|
||||
"IP address blocked after too many failed login attempts",
|
||||
);
|
||||
|
||||
Ok(AuthResult::Banned)
|
||||
} else {
|
||||
Ok(AuthResult::Failure)
|
||||
}
|
||||
} else {
|
||||
Ok(AuthResult::Failure)
|
||||
}
|
||||
}
|
||||
}
|
102
crates/common/src/listener/acme/cache.rs
Normal file
102
crates/common/src/listener/acme/cache.rs
Normal file
|
@ -0,0 +1,102 @@
|
|||
/*
|
||||
* Copyright (c) 2023 Stalwart Labs Ltd.
|
||||
*
|
||||
* This file is part of Stalwart Mail Server.
|
||||
*
|
||||
* This program is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Affero General Public License as
|
||||
* published by the Free Software Foundation, either version 3 of
|
||||
* the License, or (at your option) any later version.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Affero General Public License for more details.
|
||||
* in the LICENSE file at the top-level directory of this distribution.
|
||||
* You should have received a copy of the GNU Affero General Public License
|
||||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*
|
||||
* You can be released from the requirements of the AGPLv3 license by
|
||||
* purchasing a commercial license. Please contact licensing@stalw.art
|
||||
* for more details.
|
||||
*/
|
||||
|
||||
use std::io::ErrorKind;
|
||||
|
||||
use base64::{engine::general_purpose::URL_SAFE_NO_PAD, Engine};
|
||||
use ring::digest::{Context, SHA512};
|
||||
use utils::config::ConfigKey;
|
||||
|
||||
use super::{AcmeError, AcmeManager};
|
||||
|
||||
impl AcmeManager {
|
||||
pub(crate) async fn load_cert(&self) -> Result<Option<Vec<u8>>, AcmeError> {
|
||||
self.read_if_exists("private-key", self.domains.as_slice())
|
||||
.await
|
||||
.map_err(AcmeError::CertCacheLoad)
|
||||
}
|
||||
|
||||
pub(crate) async fn store_cert(&self, cert: &[u8]) -> Result<(), AcmeError> {
|
||||
self.write("private-key", self.domains.as_slice(), cert)
|
||||
.await
|
||||
.map_err(AcmeError::CertCacheStore)
|
||||
}
|
||||
|
||||
pub(crate) async fn load_account(&self) -> Result<Option<Vec<u8>>, AcmeError> {
|
||||
self.read_if_exists("cert", self.contact.as_slice())
|
||||
.await
|
||||
.map_err(AcmeError::AccountCacheLoad)
|
||||
}
|
||||
|
||||
pub(crate) async fn store_account(&self, account: &[u8]) -> Result<(), AcmeError> {
|
||||
self.write("cert", self.contact.as_slice(), account)
|
||||
.await
|
||||
.map_err(AcmeError::AccountCacheStore)
|
||||
}
|
||||
|
||||
async fn read_if_exists(
|
||||
&self,
|
||||
class: &str,
|
||||
items: &[String],
|
||||
) -> Result<Option<Vec<u8>>, std::io::Error> {
|
||||
match self.store.config_get(self.build_key(class, items)).await {
|
||||
Ok(Some(content)) => match URL_SAFE_NO_PAD.decode(content.as_bytes()) {
|
||||
Ok(contents) => Ok(Some(contents)),
|
||||
Err(err) => Err(std::io::Error::new(ErrorKind::Other, err)),
|
||||
},
|
||||
Ok(None) => Ok(None),
|
||||
Err(err) => Err(std::io::Error::new(ErrorKind::Other, err)),
|
||||
}
|
||||
}
|
||||
|
||||
async fn write(
|
||||
&self,
|
||||
class: &str,
|
||||
items: &[String],
|
||||
contents: impl AsRef<[u8]>,
|
||||
) -> Result<(), std::io::Error> {
|
||||
self.store
|
||||
.config_set([ConfigKey {
|
||||
key: self.build_key(class, items),
|
||||
value: URL_SAFE_NO_PAD.encode(contents.as_ref()),
|
||||
}])
|
||||
.await
|
||||
.map_err(|err| std::io::Error::new(ErrorKind::Other, err))
|
||||
}
|
||||
|
||||
fn build_key(&self, class: &str, items: &[String]) -> String {
|
||||
let mut ctx = Context::new(&SHA512);
|
||||
for el in items {
|
||||
ctx.update(el.as_ref());
|
||||
ctx.update(&[0])
|
||||
}
|
||||
ctx.update(self.directory_url.as_bytes());
|
||||
|
||||
format!(
|
||||
"certificate.acme-{}-{}.{}",
|
||||
self.id,
|
||||
URL_SAFE_NO_PAD.encode(ctx.finish()),
|
||||
class
|
||||
)
|
||||
}
|
||||
}
|
365
crates/common/src/listener/acme/directory.rs
Normal file
365
crates/common/src/listener/acme/directory.rs
Normal file
|
@ -0,0 +1,365 @@
|
|||
// Adapted from rustls-acme (https://github.com/FlorianUekermann/rustls-acme), licensed under MIT/Apache-2.0.
|
||||
|
||||
use std::time::Duration;
|
||||
|
||||
use base64::engine::general_purpose::URL_SAFE_NO_PAD;
|
||||
use base64::Engine;
|
||||
use rcgen::{Certificate, CustomExtension, PKCS_ECDSA_P256_SHA256};
|
||||
use reqwest::header::{ToStrError, CONTENT_TYPE};
|
||||
use reqwest::{Method, Response, StatusCode};
|
||||
use ring::error::{KeyRejected, Unspecified};
|
||||
use ring::rand::SystemRandom;
|
||||
use ring::signature::{EcdsaKeyPair, EcdsaSigningAlgorithm, ECDSA_P256_SHA256_FIXED_SIGNING};
|
||||
use rustls::crypto::ring::sign::any_ecdsa_type;
|
||||
use rustls::sign::CertifiedKey;
|
||||
use rustls_pki_types::{CertificateDer, PrivateKeyDer, PrivatePkcs8KeyDer};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::json;
|
||||
|
||||
use super::jose::{key_authorization_sha256, sign, JoseError};
|
||||
|
||||
pub const LETS_ENCRYPT_STAGING_DIRECTORY: &str =
|
||||
"https://acme-staging-v02.api.letsencrypt.org/directory";
|
||||
pub const LETS_ENCRYPT_PRODUCTION_DIRECTORY: &str =
|
||||
"https://acme-v02.api.letsencrypt.org/directory";
|
||||
pub const ACME_TLS_ALPN_NAME: &[u8] = b"acme-tls/1";
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Account {
|
||||
pub key_pair: EcdsaKeyPair,
|
||||
pub directory: Directory,
|
||||
pub kid: String,
|
||||
}
|
||||
|
||||
static ALG: &EcdsaSigningAlgorithm = &ECDSA_P256_SHA256_FIXED_SIGNING;
|
||||
|
||||
impl Account {
|
||||
pub fn generate_key_pair() -> Vec<u8> {
|
||||
EcdsaKeyPair::generate_pkcs8(ALG, &SystemRandom::new())
|
||||
.unwrap()
|
||||
.as_ref()
|
||||
.to_vec()
|
||||
}
|
||||
|
||||
pub async fn create<'a, S, I>(directory: Directory, contact: I) -> Result<Self, DirectoryError>
|
||||
where
|
||||
S: AsRef<str> + 'a,
|
||||
I: IntoIterator<Item = &'a S>,
|
||||
{
|
||||
Self::create_with_keypair(directory, contact, &Self::generate_key_pair()).await
|
||||
}
|
||||
|
||||
pub async fn create_with_keypair<'a, S, I>(
|
||||
directory: Directory,
|
||||
contact: I,
|
||||
key_pair: &[u8],
|
||||
) -> Result<Self, DirectoryError>
|
||||
where
|
||||
S: AsRef<str> + 'a,
|
||||
I: IntoIterator<Item = &'a S>,
|
||||
{
|
||||
let key_pair = EcdsaKeyPair::from_pkcs8(ALG, key_pair, &SystemRandom::new())?;
|
||||
let contact: Vec<&'a str> = contact.into_iter().map(AsRef::<str>::as_ref).collect();
|
||||
let payload = json!({
|
||||
"termsOfServiceAgreed": true,
|
||||
"contact": contact,
|
||||
})
|
||||
.to_string();
|
||||
let body = sign(
|
||||
&key_pair,
|
||||
None,
|
||||
directory.nonce().await?,
|
||||
&directory.new_account,
|
||||
&payload,
|
||||
)?;
|
||||
let response = https(&directory.new_account, Method::POST, Some(body)).await?;
|
||||
let kid = get_header(&response, "Location")?;
|
||||
Ok(Account {
|
||||
key_pair,
|
||||
kid,
|
||||
directory,
|
||||
})
|
||||
}
|
||||
|
||||
async fn request(
|
||||
&self,
|
||||
url: impl AsRef<str>,
|
||||
payload: &str,
|
||||
) -> Result<(Option<String>, String), DirectoryError> {
|
||||
let body = sign(
|
||||
&self.key_pair,
|
||||
Some(&self.kid),
|
||||
self.directory.nonce().await?,
|
||||
url.as_ref(),
|
||||
payload,
|
||||
)?;
|
||||
let response = https(url.as_ref(), Method::POST, Some(body)).await?;
|
||||
let location = get_header(&response, "Location").ok();
|
||||
let body = response.text().await?;
|
||||
Ok((location, body))
|
||||
}
|
||||
|
||||
pub async fn new_order(&self, domains: Vec<String>) -> Result<(String, Order), DirectoryError> {
|
||||
let domains: Vec<Identifier> = domains.into_iter().map(Identifier::Dns).collect();
|
||||
let payload = format!("{{\"identifiers\":{}}}", serde_json::to_string(&domains)?);
|
||||
let response = self.request(&self.directory.new_order, &payload).await?;
|
||||
let url = response
|
||||
.0
|
||||
.ok_or(DirectoryError::MissingHeader("Location"))?;
|
||||
let order = serde_json::from_str(&response.1)?;
|
||||
Ok((url, order))
|
||||
}
|
||||
|
||||
pub async fn auth(&self, url: impl AsRef<str>) -> Result<Auth, DirectoryError> {
|
||||
let response = self.request(url, "").await?;
|
||||
serde_json::from_str(&response.1).map_err(Into::into)
|
||||
}
|
||||
|
||||
pub async fn challenge(&self, url: impl AsRef<str>) -> Result<(), DirectoryError> {
|
||||
self.request(&url, "{}").await.map(|_| ())
|
||||
}
|
||||
|
||||
pub async fn order(&self, url: impl AsRef<str>) -> Result<Order, DirectoryError> {
|
||||
let response = self.request(&url, "").await?;
|
||||
serde_json::from_str(&response.1).map_err(Into::into)
|
||||
}
|
||||
|
||||
pub async fn finalize(
|
||||
&self,
|
||||
url: impl AsRef<str>,
|
||||
csr: Vec<u8>,
|
||||
) -> Result<Order, DirectoryError> {
|
||||
let payload = format!("{{\"csr\":\"{}\"}}", URL_SAFE_NO_PAD.encode(csr));
|
||||
let response = self.request(&url, &payload).await?;
|
||||
serde_json::from_str(&response.1).map_err(Into::into)
|
||||
}
|
||||
|
||||
pub async fn certificate(&self, url: impl AsRef<str>) -> Result<String, DirectoryError> {
|
||||
Ok(self.request(&url, "").await?.1)
|
||||
}
|
||||
|
||||
pub fn tls_alpn_01<'a>(
|
||||
&self,
|
||||
challenges: &'a [Challenge],
|
||||
domain: String,
|
||||
) -> Result<(&'a Challenge, CertifiedKey), DirectoryError> {
|
||||
let challenge = challenges
|
||||
.iter()
|
||||
.find(|c| c.typ == ChallengeType::TlsAlpn01);
|
||||
let challenge = match challenge {
|
||||
Some(challenge) => challenge,
|
||||
None => return Err(DirectoryError::NoTlsAlpn01Challenge),
|
||||
};
|
||||
let mut params = rcgen::CertificateParams::new(vec![domain]);
|
||||
let key_auth = key_authorization_sha256(&self.key_pair, &challenge.token)?;
|
||||
params.alg = &PKCS_ECDSA_P256_SHA256;
|
||||
params.custom_extensions = vec![CustomExtension::new_acme_identifier(key_auth.as_ref())];
|
||||
let cert = Certificate::from_params(params)?;
|
||||
let pk = any_ecdsa_type(&PrivateKeyDer::Pkcs8(PrivatePkcs8KeyDer::from(
|
||||
cert.serialize_private_key_der(),
|
||||
)))
|
||||
.unwrap();
|
||||
let certified_key =
|
||||
CertifiedKey::new(vec![CertificateDer::from(cert.serialize_der()?)], pk);
|
||||
Ok((challenge, certified_key))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Directory {
|
||||
pub new_nonce: String,
|
||||
pub new_account: String,
|
||||
pub new_order: String,
|
||||
}
|
||||
|
||||
impl Directory {
|
||||
pub async fn discover(url: impl AsRef<str>) -> Result<Self, DirectoryError> {
|
||||
Ok(serde_json::from_str(
|
||||
&https(url, Method::GET, None).await?.text().await?,
|
||||
)?)
|
||||
}
|
||||
pub async fn nonce(&self) -> Result<String, DirectoryError> {
|
||||
get_header(
|
||||
&https(&self.new_nonce.as_str(), Method::HEAD, None).await?,
|
||||
"replay-nonce",
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize, Eq, PartialEq)]
|
||||
pub enum ChallengeType {
|
||||
#[serde(rename = "http-01")]
|
||||
Http01,
|
||||
#[serde(rename = "dns-01")]
|
||||
Dns01,
|
||||
#[serde(rename = "tls-alpn-01")]
|
||||
TlsAlpn01,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Order {
|
||||
#[serde(flatten)]
|
||||
pub status: OrderStatus,
|
||||
pub authorizations: Vec<String>,
|
||||
pub finalize: String,
|
||||
pub error: Option<Problem>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize, Clone, PartialEq, Eq)]
|
||||
#[serde(tag = "status", rename_all = "camelCase")]
|
||||
pub enum OrderStatus {
|
||||
Pending,
|
||||
Ready,
|
||||
Valid { certificate: String },
|
||||
Invalid,
|
||||
Processing,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Auth {
|
||||
pub status: AuthStatus,
|
||||
pub identifier: Identifier,
|
||||
pub challenges: Vec<Challenge>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub enum AuthStatus {
|
||||
Pending,
|
||||
Valid,
|
||||
Invalid,
|
||||
Revoked,
|
||||
Expired,
|
||||
Deactivated,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||
#[serde(tag = "type", content = "value", rename_all = "camelCase")]
|
||||
pub enum Identifier {
|
||||
Dns(String),
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct Challenge {
|
||||
#[serde(rename = "type")]
|
||||
pub typ: ChallengeType,
|
||||
pub url: String,
|
||||
pub token: String,
|
||||
pub error: Option<Problem>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Problem {
|
||||
#[serde(rename = "type")]
|
||||
pub typ: Option<String>,
|
||||
pub detail: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum DirectoryError {
|
||||
Io(std::io::Error),
|
||||
Rcgen(rcgen::Error),
|
||||
Jose(JoseError),
|
||||
Json(serde_json::Error),
|
||||
HttpRequest(reqwest::Error),
|
||||
HttpRequestCode { code: StatusCode, reason: String },
|
||||
HttpResponseNonStringHeader(ToStrError),
|
||||
KeyRejected(KeyRejected),
|
||||
Crypto(Unspecified),
|
||||
MissingHeader(&'static str),
|
||||
NoTlsAlpn01Challenge,
|
||||
}
|
||||
|
||||
#[allow(unused_mut)]
|
||||
async fn https(
|
||||
url: impl AsRef<str>,
|
||||
method: Method,
|
||||
body: Option<String>,
|
||||
) -> Result<Response, DirectoryError> {
|
||||
let url = url.as_ref();
|
||||
let mut builder = reqwest::Client::builder().timeout(Duration::from_secs(30));
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
{
|
||||
builder = builder.danger_accept_invalid_certs(
|
||||
url.starts_with("https://localhost") || url.starts_with("https://127.0.0.1"),
|
||||
);
|
||||
}
|
||||
|
||||
let mut request = builder.build()?.request(method, url);
|
||||
|
||||
if let Some(body) = body {
|
||||
request = request
|
||||
.header(CONTENT_TYPE, "application/jose+json")
|
||||
.body(body);
|
||||
}
|
||||
|
||||
let response = request.send().await?;
|
||||
if response.status().is_success() {
|
||||
Ok(response)
|
||||
} else {
|
||||
Err(DirectoryError::HttpRequestCode {
|
||||
code: response.status(),
|
||||
reason: response.text().await?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
fn get_header(response: &Response, header: &'static str) -> Result<String, DirectoryError> {
|
||||
match response.headers().get_all(header).iter().last() {
|
||||
Some(value) => Ok(value.to_str()?.to_string()),
|
||||
None => Err(DirectoryError::MissingHeader(header)),
|
||||
}
|
||||
}
|
||||
|
||||
impl From<std::io::Error> for DirectoryError {
|
||||
fn from(err: std::io::Error) -> Self {
|
||||
Self::Io(err)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<rcgen::Error> for DirectoryError {
|
||||
fn from(err: rcgen::Error) -> Self {
|
||||
Self::Rcgen(err)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<JoseError> for DirectoryError {
|
||||
fn from(err: JoseError) -> Self {
|
||||
Self::Jose(err)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<serde_json::Error> for DirectoryError {
|
||||
fn from(err: serde_json::Error) -> Self {
|
||||
Self::Json(err)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<reqwest::Error> for DirectoryError {
|
||||
fn from(err: reqwest::Error) -> Self {
|
||||
Self::HttpRequest(err)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<KeyRejected> for DirectoryError {
|
||||
fn from(err: KeyRejected) -> Self {
|
||||
Self::KeyRejected(err)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Unspecified> for DirectoryError {
|
||||
fn from(err: Unspecified) -> Self {
|
||||
Self::Crypto(err)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<ToStrError> for DirectoryError {
|
||||
fn from(err: ToStrError) -> Self {
|
||||
Self::HttpResponseNonStringHeader(err)
|
||||
}
|
||||
}
|
140
crates/common/src/listener/acme/jose.rs
Normal file
140
crates/common/src/listener/acme/jose.rs
Normal file
|
@ -0,0 +1,140 @@
|
|||
// Adapted from rustls-acme (https://github.com/FlorianUekermann/rustls-acme), licensed under MIT/Apache-2.0.
|
||||
|
||||
use base64::engine::general_purpose::URL_SAFE_NO_PAD;
|
||||
use base64::Engine;
|
||||
use ring::digest::{digest, Digest, SHA256};
|
||||
use ring::rand::SystemRandom;
|
||||
use ring::signature::{EcdsaKeyPair, KeyPair};
|
||||
use serde::Serialize;
|
||||
|
||||
pub(crate) fn sign(
|
||||
key: &EcdsaKeyPair,
|
||||
kid: Option<&str>,
|
||||
nonce: String,
|
||||
url: &str,
|
||||
payload: &str,
|
||||
) -> Result<String, JoseError> {
|
||||
let jwk = match kid {
|
||||
None => Some(Jwk::new(key)),
|
||||
Some(_) => None,
|
||||
};
|
||||
let protected = Protected::base64(jwk, kid, nonce, url)?;
|
||||
let payload = URL_SAFE_NO_PAD.encode(payload);
|
||||
let combined = format!("{}.{}", &protected, &payload);
|
||||
let signature = key.sign(&SystemRandom::new(), combined.as_bytes())?;
|
||||
let signature = URL_SAFE_NO_PAD.encode(signature.as_ref());
|
||||
let body = Body {
|
||||
protected,
|
||||
payload,
|
||||
signature,
|
||||
};
|
||||
Ok(serde_json::to_string(&body)?)
|
||||
}
|
||||
|
||||
pub(crate) fn key_authorization_sha256(
|
||||
key: &EcdsaKeyPair,
|
||||
token: &str,
|
||||
) -> Result<Digest, JoseError> {
|
||||
let jwk = Jwk::new(key);
|
||||
let key_authorization = format!("{}.{}", token, jwk.thumb_sha256_base64()?);
|
||||
Ok(digest(&SHA256, key_authorization.as_bytes()))
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct Body {
|
||||
protected: String,
|
||||
payload: String,
|
||||
signature: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct Protected<'a> {
|
||||
alg: &'static str,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
jwk: Option<Jwk>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
kid: Option<&'a str>,
|
||||
nonce: String,
|
||||
url: &'a str,
|
||||
}
|
||||
|
||||
impl<'a> Protected<'a> {
|
||||
fn base64(
|
||||
jwk: Option<Jwk>,
|
||||
kid: Option<&'a str>,
|
||||
nonce: String,
|
||||
url: &'a str,
|
||||
) -> Result<String, JoseError> {
|
||||
let protected = Self {
|
||||
alg: "ES256",
|
||||
jwk,
|
||||
kid,
|
||||
nonce,
|
||||
url,
|
||||
};
|
||||
let protected = serde_json::to_vec(&protected)?;
|
||||
Ok(URL_SAFE_NO_PAD.encode(protected))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct Jwk {
|
||||
alg: &'static str,
|
||||
crv: &'static str,
|
||||
kty: &'static str,
|
||||
#[serde(rename = "use")]
|
||||
u: &'static str,
|
||||
x: String,
|
||||
y: String,
|
||||
}
|
||||
|
||||
impl Jwk {
|
||||
pub(crate) fn new(key: &EcdsaKeyPair) -> Self {
|
||||
let (x, y) = key.public_key().as_ref()[1..].split_at(32);
|
||||
Self {
|
||||
alg: "ES256",
|
||||
crv: "P-256",
|
||||
kty: "EC",
|
||||
u: "sig",
|
||||
x: URL_SAFE_NO_PAD.encode(x),
|
||||
y: URL_SAFE_NO_PAD.encode(y),
|
||||
}
|
||||
}
|
||||
pub(crate) fn thumb_sha256_base64(&self) -> Result<String, JoseError> {
|
||||
let jwk_thumb = JwkThumb {
|
||||
crv: self.crv,
|
||||
kty: self.kty,
|
||||
x: &self.x,
|
||||
y: &self.y,
|
||||
};
|
||||
let json = serde_json::to_vec(&jwk_thumb)?;
|
||||
let hash = digest(&SHA256, &json);
|
||||
Ok(URL_SAFE_NO_PAD.encode(hash))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct JwkThumb<'a> {
|
||||
crv: &'a str,
|
||||
kty: &'a str,
|
||||
x: &'a str,
|
||||
y: &'a str,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum JoseError {
|
||||
Json(serde_json::Error),
|
||||
Crypto(ring::error::Unspecified),
|
||||
}
|
||||
|
||||
impl From<serde_json::Error> for JoseError {
|
||||
fn from(err: serde_json::Error) -> Self {
|
||||
Self::Json(err)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<ring::error::Unspecified> for JoseError {
|
||||
fn from(err: ring::error::Unspecified) -> Self {
|
||||
Self::Crypto(err)
|
||||
}
|
||||
}
|
208
crates/common/src/listener/acme/mod.rs
Normal file
208
crates/common/src/listener/acme/mod.rs
Normal file
|
@ -0,0 +1,208 @@
|
|||
/*
|
||||
* Copyright (c) 2023 Stalwart Labs Ltd.
|
||||
*
|
||||
* This file is part of Stalwart Mail Server.
|
||||
*
|
||||
* This program is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Affero General Public License as
|
||||
* published by the Free Software Foundation, either version 3 of
|
||||
* the License, or (at your option) any later version.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Affero General Public License for more details.
|
||||
* in the LICENSE file at the top-level directory of this distribution.
|
||||
* You should have received a copy of the GNU Affero General Public License
|
||||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*
|
||||
* You can be released from the requirements of the AGPLv3 license by
|
||||
* purchasing a commercial license. Please contact licensing@stalw.art
|
||||
* for more details.
|
||||
*/
|
||||
|
||||
pub mod cache;
|
||||
pub mod directory;
|
||||
pub mod jose;
|
||||
pub mod order;
|
||||
pub mod resolver;
|
||||
|
||||
use std::{
|
||||
fmt::Debug,
|
||||
sync::{
|
||||
atomic::{AtomicBool, Ordering},
|
||||
Arc,
|
||||
},
|
||||
time::Duration,
|
||||
};
|
||||
|
||||
use ahash::AHashMap;
|
||||
use arc_swap::ArcSwap;
|
||||
use parking_lot::Mutex;
|
||||
use rustls::sign::CertifiedKey;
|
||||
use store::Store;
|
||||
use tokio::sync::watch;
|
||||
|
||||
use crate::config::server::tls::build_self_signed_cert;
|
||||
|
||||
use self::{
|
||||
directory::Account,
|
||||
order::{CertParseError, OrderError},
|
||||
};
|
||||
|
||||
pub struct AcmeManager {
|
||||
id: String,
|
||||
pub(crate) directory_url: String,
|
||||
pub(crate) domains: Vec<String>,
|
||||
contact: Vec<String>,
|
||||
renew_before: chrono::Duration,
|
||||
store: Store,
|
||||
account_key: ArcSwap<Vec<u8>>,
|
||||
auth_keys: Mutex<AHashMap<String, Arc<CertifiedKey>>>,
|
||||
order_in_progress: AtomicBool,
|
||||
cert: ArcSwap<CertifiedKey>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum AcmeError {
|
||||
CertCacheLoad(std::io::Error),
|
||||
AccountCacheLoad(std::io::Error),
|
||||
CertCacheStore(std::io::Error),
|
||||
AccountCacheStore(std::io::Error),
|
||||
CachedCertParse(CertParseError),
|
||||
Order(OrderError),
|
||||
NewCertParse(CertParseError),
|
||||
}
|
||||
|
||||
impl AcmeManager {
|
||||
pub fn new(
|
||||
id: String,
|
||||
directory_url: String,
|
||||
domains: Vec<String>,
|
||||
contact: Vec<String>,
|
||||
renew_before: Duration,
|
||||
store: Store,
|
||||
) -> utils::config::Result<Self> {
|
||||
Ok(AcmeManager {
|
||||
id,
|
||||
directory_url,
|
||||
contact: contact
|
||||
.into_iter()
|
||||
.map(|c| {
|
||||
if !c.starts_with("mailto:") {
|
||||
format!("mailto:{}", c)
|
||||
} else {
|
||||
c
|
||||
}
|
||||
})
|
||||
.collect(),
|
||||
renew_before: chrono::Duration::from_std(renew_before).unwrap(),
|
||||
store,
|
||||
account_key: ArcSwap::from_pointee(Vec::new()),
|
||||
auth_keys: Mutex::new(AHashMap::new()),
|
||||
order_in_progress: false.into(),
|
||||
cert: ArcSwap::from_pointee(build_self_signed_cert(&domains)?),
|
||||
domains,
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn init(&self) -> Result<Duration, AcmeError> {
|
||||
// Load account key from cache or generate a new one
|
||||
if let Some(account_key) = self.load_account().await? {
|
||||
self.account_key.store(Arc::new(account_key));
|
||||
} else {
|
||||
let account_key = Account::generate_key_pair();
|
||||
self.store_account(&account_key).await?;
|
||||
self.account_key.store(Arc::new(account_key));
|
||||
}
|
||||
|
||||
// Load certificate from cache or request a new one
|
||||
Ok(if let Some(pem) = self.load_cert().await? {
|
||||
self.process_cert(pem, true).await?
|
||||
} else {
|
||||
Duration::from_millis(1000)
|
||||
})
|
||||
}
|
||||
|
||||
pub fn has_order_in_progress(&self) -> bool {
|
||||
self.order_in_progress.load(Ordering::Relaxed)
|
||||
}
|
||||
}
|
||||
|
||||
pub trait SpawnAcme {
|
||||
fn spawn(self, shutdown_rx: watch::Receiver<bool>);
|
||||
}
|
||||
|
||||
impl SpawnAcme for Arc<AcmeManager> {
|
||||
fn spawn(self, mut shutdown_rx: watch::Receiver<bool>) {
|
||||
tokio::spawn(async move {
|
||||
let acme = self;
|
||||
let mut renew_at = match acme.init().await {
|
||||
Ok(renew_at) => renew_at,
|
||||
Err(err) => {
|
||||
tracing::error!(
|
||||
context = "acme",
|
||||
event = "error",
|
||||
error = ?err,
|
||||
"Failed to initialize ACME certificate manager.");
|
||||
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
loop {
|
||||
tokio::select! {
|
||||
_ = tokio::time::sleep(renew_at) => {
|
||||
tracing::info!(
|
||||
context = "acme",
|
||||
event = "order",
|
||||
domains = ?acme.domains,
|
||||
"Ordering certificates.");
|
||||
|
||||
match acme.renew().await {
|
||||
Ok(renew_at_) => {
|
||||
renew_at = renew_at_;
|
||||
tracing::info!(
|
||||
context = "acme",
|
||||
event = "success",
|
||||
domains = ?acme.domains,
|
||||
next_renewal = ?renew_at,
|
||||
"Certificates renewed.");
|
||||
},
|
||||
Err(err) => {
|
||||
tracing::error!(
|
||||
context = "acme",
|
||||
event = "error",
|
||||
error = ?err,
|
||||
"Failed to renew certificates.");
|
||||
|
||||
renew_at = Duration::from_secs(3600);
|
||||
},
|
||||
}
|
||||
|
||||
},
|
||||
_ = shutdown_rx.changed() => {
|
||||
tracing::debug!(
|
||||
context = "acme",
|
||||
event = "shutdown",
|
||||
domains = ?acme.domains,
|
||||
"ACME certificate manager shutting down.");
|
||||
|
||||
break;
|
||||
}
|
||||
};
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
impl Debug for AcmeManager {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
f.debug_struct("AcmeManager")
|
||||
.field("directory_url", &self.directory_url)
|
||||
.field("domains", &self.domains)
|
||||
.field("contact", &self.contact)
|
||||
.field("account_key", &self.account_key)
|
||||
.finish()
|
||||
}
|
||||
}
|
300
crates/common/src/listener/acme/order.rs
Normal file
300
crates/common/src/listener/acme/order.rs
Normal file
|
@ -0,0 +1,300 @@
|
|||
// Adapted from rustls-acme (https://github.com/FlorianUekermann/rustls-acme), licensed under MIT/Apache-2.0.
|
||||
|
||||
use chrono::{DateTime, TimeZone, Utc};
|
||||
use futures::future::try_join_all;
|
||||
use rcgen::{CertificateParams, DistinguishedName, PKCS_ECDSA_P256_SHA256};
|
||||
use rustls::crypto::ring::sign::any_ecdsa_type;
|
||||
use rustls::sign::CertifiedKey;
|
||||
use rustls_pki_types::{CertificateDer, PrivateKeyDer, PrivatePkcs8KeyDer};
|
||||
use std::fmt::Debug;
|
||||
use std::sync::atomic::Ordering;
|
||||
use std::sync::Arc;
|
||||
use std::time::Duration;
|
||||
use x509_parser::parse_x509_certificate;
|
||||
|
||||
use crate::listener::acme::directory::Identifier;
|
||||
|
||||
use super::directory::{Account, Auth, AuthStatus, Directory, DirectoryError, Order, OrderStatus};
|
||||
use super::jose::JoseError;
|
||||
use super::{AcmeError, AcmeManager};
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum OrderError {
|
||||
Acme(DirectoryError),
|
||||
Rcgen(rcgen::Error),
|
||||
BadOrder(Order),
|
||||
BadAuth(Auth),
|
||||
TooManyAttemptsAuth(String),
|
||||
ProcessingTimeout(Order),
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum CertParseError {
|
||||
X509(x509_parser::nom::Err<x509_parser::error::X509Error>),
|
||||
Pem(pem::PemError),
|
||||
TooFewPem(usize),
|
||||
InvalidPrivateKey,
|
||||
}
|
||||
|
||||
impl AcmeManager {
|
||||
pub(crate) async fn process_cert(
|
||||
&self,
|
||||
pem: Vec<u8>,
|
||||
cached: bool,
|
||||
) -> Result<Duration, AcmeError> {
|
||||
let (cert, validity) = match (parse_cert(&pem), cached) {
|
||||
(Ok(r), _) => r,
|
||||
(Err(err), cached) => {
|
||||
return match cached {
|
||||
true => Err(AcmeError::CachedCertParse(err)),
|
||||
false => Err(AcmeError::NewCertParse(err)),
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
self.set_cert(Arc::new(cert));
|
||||
|
||||
let renew_at = (validity[1] - self.renew_before - Utc::now())
|
||||
.max(chrono::Duration::zero())
|
||||
.to_std()
|
||||
.unwrap_or_default();
|
||||
let renewal_date = validity[1] - self.renew_before;
|
||||
|
||||
tracing::info!(
|
||||
context = "acme",
|
||||
event = "process-cert",
|
||||
valid_not_before = %validity[0],
|
||||
valid_not_after = %validity[1],
|
||||
renewal_date = ?renewal_date,
|
||||
domains = ?self.domains,
|
||||
"Loaded certificate for domains {:?}", self.domains);
|
||||
|
||||
if !cached {
|
||||
self.store_cert(&pem).await?;
|
||||
}
|
||||
|
||||
Ok(renew_at)
|
||||
}
|
||||
|
||||
pub async fn renew(&self) -> Result<Duration, AcmeError> {
|
||||
let mut backoff = 0;
|
||||
self.order_in_progress.store(true, Ordering::Relaxed);
|
||||
loop {
|
||||
match self.order().await {
|
||||
Ok(pem) => return self.process_cert(pem, false).await,
|
||||
Err(err) if backoff < 16 => {
|
||||
tracing::debug!(
|
||||
context = "acme",
|
||||
event = "renew-backoff",
|
||||
domains = ?self.domains,
|
||||
attempt = backoff,
|
||||
reason = ?err,
|
||||
"Failed to renew certificate, backing off for {} seconds",
|
||||
1 << backoff);
|
||||
backoff = (backoff + 1).min(16);
|
||||
tokio::time::sleep(Duration::from_secs(1 << backoff)).await;
|
||||
}
|
||||
Err(err) => return Err(AcmeError::Order(err)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn order(&self) -> Result<Vec<u8>, OrderError> {
|
||||
let directory = Directory::discover(&self.directory_url).await?;
|
||||
let account = Account::create_with_keypair(
|
||||
directory,
|
||||
&self.contact,
|
||||
self.account_key.load().as_slice(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
let mut params = CertificateParams::new(self.domains.clone());
|
||||
params.distinguished_name = DistinguishedName::new();
|
||||
params.alg = &PKCS_ECDSA_P256_SHA256;
|
||||
let cert = rcgen::Certificate::from_params(params)?;
|
||||
|
||||
let (order_url, mut order) = account.new_order(self.domains.clone()).await?;
|
||||
loop {
|
||||
match order.status {
|
||||
OrderStatus::Pending => {
|
||||
let auth_futures = order
|
||||
.authorizations
|
||||
.iter()
|
||||
.map(|url| self.authorize(&account, url));
|
||||
try_join_all(auth_futures).await?;
|
||||
tracing::info!(
|
||||
context = "acme",
|
||||
event = "auth-complete",
|
||||
domains = ?self.domains.as_slice(),
|
||||
"Completed all authorizations"
|
||||
);
|
||||
order = account.order(&order_url).await?;
|
||||
}
|
||||
OrderStatus::Processing => {
|
||||
for i in 0u64..10 {
|
||||
tracing::info!(
|
||||
context = "acme",
|
||||
event = "processing",
|
||||
domains = ?self.domains.as_slice(),
|
||||
attempt = i,
|
||||
"Processing order"
|
||||
);
|
||||
tokio::time::sleep(Duration::from_secs(1u64 << i)).await;
|
||||
order = account.order(&order_url).await?;
|
||||
if order.status != OrderStatus::Processing {
|
||||
break;
|
||||
}
|
||||
}
|
||||
if order.status == OrderStatus::Processing {
|
||||
return Err(OrderError::ProcessingTimeout(order));
|
||||
}
|
||||
}
|
||||
OrderStatus::Ready => {
|
||||
tracing::info!(
|
||||
context = "acme",
|
||||
event = "csr-send",
|
||||
domains = ?self.domains.as_slice(),
|
||||
"Sending CSR"
|
||||
);
|
||||
|
||||
let csr = cert.serialize_request_der()?;
|
||||
order = account.finalize(order.finalize, csr).await?
|
||||
}
|
||||
OrderStatus::Valid { certificate } => {
|
||||
tracing::info!(
|
||||
context = "acme",
|
||||
event = "download",
|
||||
domains = ?self.domains.as_slice(),
|
||||
"Downloading certificate"
|
||||
);
|
||||
|
||||
let pem = [
|
||||
&cert.serialize_private_key_pem(),
|
||||
"\n",
|
||||
&account.certificate(certificate).await?,
|
||||
]
|
||||
.concat();
|
||||
return Ok(pem.into_bytes());
|
||||
}
|
||||
OrderStatus::Invalid => {
|
||||
tracing::warn!(
|
||||
context = "acme",
|
||||
event = "error",
|
||||
reason = "invalid-order",
|
||||
domains = ?self.domains.as_slice(),
|
||||
"Invalid order"
|
||||
);
|
||||
|
||||
return Err(OrderError::BadOrder(order));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn authorize(&self, account: &Account, url: &String) -> Result<(), OrderError> {
|
||||
let auth = account.auth(url).await?;
|
||||
let (domain, challenge_url) = match auth.status {
|
||||
AuthStatus::Pending => {
|
||||
let Identifier::Dns(domain) = auth.identifier;
|
||||
tracing::info!(
|
||||
context = "acme",
|
||||
event = "challenge",
|
||||
domain = domain,
|
||||
"Requesting challenge for domain {domain}"
|
||||
);
|
||||
let (challenge, auth_key) =
|
||||
account.tls_alpn_01(&auth.challenges, domain.clone())?;
|
||||
self.set_auth_key(domain.clone(), Arc::new(auth_key));
|
||||
account.challenge(&challenge.url).await?;
|
||||
(domain, challenge.url.clone())
|
||||
}
|
||||
AuthStatus::Valid => return Ok(()),
|
||||
_ => return Err(OrderError::BadAuth(auth)),
|
||||
};
|
||||
for i in 0u64..5 {
|
||||
tokio::time::sleep(Duration::from_secs(1u64 << i)).await;
|
||||
let auth = account.auth(url).await?;
|
||||
match auth.status {
|
||||
AuthStatus::Pending => {
|
||||
tracing::info!(
|
||||
context = "acme",
|
||||
event = "auth-pending",
|
||||
domain = domain,
|
||||
attempt = i,
|
||||
"Authorization for domain {domain} is still pending",
|
||||
);
|
||||
account.challenge(&challenge_url).await?
|
||||
}
|
||||
AuthStatus::Valid => return Ok(()),
|
||||
_ => return Err(OrderError::BadAuth(auth)),
|
||||
}
|
||||
}
|
||||
Err(OrderError::TooManyAttemptsAuth(domain))
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_cert(pem: &[u8]) -> Result<(CertifiedKey, [DateTime<Utc>; 2]), CertParseError> {
|
||||
let mut pems = pem::parse_many(pem)?;
|
||||
if pems.len() < 2 {
|
||||
return Err(CertParseError::TooFewPem(pems.len()));
|
||||
}
|
||||
let pk = match any_ecdsa_type(&PrivateKeyDer::Pkcs8(PrivatePkcs8KeyDer::from(
|
||||
pems.remove(0).contents(),
|
||||
))) {
|
||||
Ok(pk) => pk,
|
||||
Err(_) => return Err(CertParseError::InvalidPrivateKey),
|
||||
};
|
||||
let cert_chain: Vec<CertificateDer> = pems
|
||||
.into_iter()
|
||||
.map(|p| CertificateDer::from(p.into_contents()))
|
||||
.collect();
|
||||
let validity = match parse_x509_certificate(&cert_chain[0]) {
|
||||
Ok((_, cert)) => {
|
||||
let validity = cert.validity();
|
||||
[validity.not_before, validity.not_after].map(|t| {
|
||||
Utc.timestamp_opt(t.timestamp(), 0)
|
||||
.earliest()
|
||||
.unwrap_or_default()
|
||||
})
|
||||
}
|
||||
Err(err) => return Err(CertParseError::X509(err)),
|
||||
};
|
||||
let cert = CertifiedKey::new(cert_chain, pk);
|
||||
Ok((cert, validity))
|
||||
}
|
||||
|
||||
impl From<DirectoryError> for OrderError {
|
||||
fn from(err: DirectoryError) -> Self {
|
||||
Self::Acme(err)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<rcgen::Error> for OrderError {
|
||||
fn from(err: rcgen::Error) -> Self {
|
||||
Self::Rcgen(err)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<x509_parser::nom::Err<x509_parser::error::X509Error>> for CertParseError {
|
||||
fn from(err: x509_parser::nom::Err<x509_parser::error::X509Error>) -> Self {
|
||||
Self::X509(err)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<pem::PemError> for CertParseError {
|
||||
fn from(err: pem::PemError) -> Self {
|
||||
Self::Pem(err)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<JoseError> for OrderError {
|
||||
fn from(err: JoseError) -> Self {
|
||||
Self::Acme(DirectoryError::Jose(err))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<JoseError> for AcmeError {
|
||||
fn from(err: JoseError) -> Self {
|
||||
Self::Order(OrderError::from(err))
|
||||
}
|
||||
}
|
81
crates/common/src/listener/acme/resolver.rs
Normal file
81
crates/common/src/listener/acme/resolver.rs
Normal file
|
@ -0,0 +1,81 @@
|
|||
/*
|
||||
* Copyright (c) 2023 Stalwart Labs Ltd.
|
||||
*
|
||||
* This file is part of Stalwart Mail Server.
|
||||
*
|
||||
* This program is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Affero General Public License as
|
||||
* published by the Free Software Foundation, either version 3 of
|
||||
* the License, or (at your option) any later version.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Affero General Public License for more details.
|
||||
* in the LICENSE file at the top-level directory of this distribution.
|
||||
* You should have received a copy of the GNU Affero General Public License
|
||||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*
|
||||
* You can be released from the requirements of the AGPLv3 license by
|
||||
* purchasing a commercial license. Please contact licensing@stalw.art
|
||||
* for more details.
|
||||
*/
|
||||
|
||||
use std::sync::{atomic::Ordering, Arc};
|
||||
|
||||
use rustls::{
|
||||
server::{ClientHello, ResolvesServerCert},
|
||||
sign::CertifiedKey,
|
||||
};
|
||||
|
||||
use super::{directory::ACME_TLS_ALPN_NAME, AcmeManager};
|
||||
|
||||
impl AcmeManager {
|
||||
pub(crate) fn set_cert(&self, cert: Arc<CertifiedKey>) {
|
||||
self.cert.store(cert);
|
||||
self.order_in_progress.store(false, Ordering::Relaxed);
|
||||
self.auth_keys.lock().clear();
|
||||
}
|
||||
pub(crate) fn set_auth_key(&self, domain: String, cert: Arc<CertifiedKey>) {
|
||||
self.auth_keys.lock().insert(domain, cert);
|
||||
}
|
||||
}
|
||||
|
||||
impl ResolvesServerCert for AcmeManager {
|
||||
fn resolve(&self, client_hello: ClientHello) -> Option<Arc<CertifiedKey>> {
|
||||
if self.has_order_in_progress() && client_hello.is_tls_alpn_challenge() {
|
||||
match client_hello.server_name() {
|
||||
None => {
|
||||
tracing::debug!(
|
||||
context = "acme",
|
||||
event = "error",
|
||||
reason = "missing-sni",
|
||||
"client did not supply SNI"
|
||||
);
|
||||
None
|
||||
}
|
||||
Some(domain) => {
|
||||
tracing::trace!(
|
||||
context = "acme",
|
||||
event = "auth-key",
|
||||
domain = %domain,
|
||||
"Found client supplied SNI");
|
||||
|
||||
self.auth_keys.lock().get(domain).cloned()
|
||||
}
|
||||
}
|
||||
} else {
|
||||
self.cert.load().clone().into()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub trait IsTlsAlpnChallenge {
|
||||
fn is_tls_alpn_challenge(&self) -> bool;
|
||||
}
|
||||
|
||||
impl IsTlsAlpnChallenge for ClientHello<'_> {
|
||||
fn is_tls_alpn_challenge(&self) -> bool {
|
||||
self.alpn().into_iter().flatten().eq([ACME_TLS_ALPN_NAME])
|
||||
}
|
||||
}
|
148
crates/common/src/listener/blocked.rs
Normal file
148
crates/common/src/listener/blocked.rs
Normal file
|
@ -0,0 +1,148 @@
|
|||
/*
|
||||
* Copyright (c) 2023 Stalwart Labs Ltd.
|
||||
*
|
||||
* This file is part of the Stalwart Mail Server.
|
||||
*
|
||||
* This program is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Affero General Public License as
|
||||
* published by the Free Software Foundation, either version 3 of
|
||||
* the License, or (at your option) any later version.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Affero General Public License for more details.
|
||||
* in the LICENSE file at the top-level directory of this distribution.
|
||||
* You should have received a copy of the GNU Affero General Public License
|
||||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*
|
||||
* You can be released from the requirements of the AGPLv3 license by
|
||||
* purchasing a commercial license. Please contact licensing@stalw.art
|
||||
* for more details.
|
||||
*/
|
||||
|
||||
use std::{fmt::Debug, net::IpAddr};
|
||||
|
||||
use ahash::AHashSet;
|
||||
use parking_lot::RwLock;
|
||||
use utils::config::{
|
||||
ipmask::{IpAddrMask, IpAddrOrMask},
|
||||
utils::ParseValue,
|
||||
Config, ConfigKey, Rate,
|
||||
};
|
||||
|
||||
use crate::Core;
|
||||
|
||||
pub struct BlockedIps {
|
||||
ip_addresses: RwLock<AHashSet<IpAddr>>,
|
||||
ip_networks: Vec<IpAddrMask>,
|
||||
has_networks: bool,
|
||||
limiter_rate: Option<Rate>,
|
||||
}
|
||||
|
||||
pub const BLOCKED_IP_KEY: &str = "server.blocked-ip";
|
||||
pub const BLOCKED_IP_PREFIX: &str = "server.blocked-ip.";
|
||||
|
||||
impl BlockedIps {
|
||||
pub fn parse(config: &mut Config) -> Self {
|
||||
let mut ip_addresses = AHashSet::new();
|
||||
let mut ip_networks = Vec::new();
|
||||
|
||||
let ips = config
|
||||
.set_values(BLOCKED_IP_KEY)
|
||||
.map(|value| IpAddrOrMask::parse_value(BLOCKED_IP_KEY, value))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
for ip in ips {
|
||||
match ip {
|
||||
Ok(IpAddrOrMask::Ip(ip)) => {
|
||||
ip_addresses.insert(ip);
|
||||
}
|
||||
Ok(IpAddrOrMask::Mask(ip)) => {
|
||||
ip_networks.push(ip);
|
||||
}
|
||||
Err(err) => {
|
||||
config.new_parse_error(BLOCKED_IP_KEY, err);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
BlockedIps {
|
||||
ip_addresses: RwLock::new(ip_addresses),
|
||||
has_networks: !ip_networks.is_empty(),
|
||||
ip_networks,
|
||||
limiter_rate: config.property_::<Rate>("authentication.fail2ban"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Core {
|
||||
pub async fn is_fail2banned(&self, ip: IpAddr, login: String) -> store::Result<bool> {
|
||||
if let Some(rate) = &self.blocked_ips.limiter_rate {
|
||||
let is_allowed = self
|
||||
.storage
|
||||
.lookup
|
||||
.is_rate_allowed(format!("b:{}", ip).as_bytes(), rate, false)
|
||||
.await?
|
||||
.is_none()
|
||||
&& self
|
||||
.storage
|
||||
.lookup
|
||||
.is_rate_allowed(format!("b:{}", login).as_bytes(), rate, false)
|
||||
.await?
|
||||
.is_none();
|
||||
if !is_allowed {
|
||||
// Add IP to blocked list
|
||||
self.blocked_ips.ip_addresses.write().insert(ip);
|
||||
|
||||
// Write blocked IP to config
|
||||
self.storage
|
||||
.data
|
||||
.config_set([ConfigKey {
|
||||
key: format!("{}.{}", BLOCKED_IP_KEY, ip),
|
||||
value: String::new(),
|
||||
}])
|
||||
.await?;
|
||||
|
||||
return Ok(true);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(false)
|
||||
}
|
||||
|
||||
pub fn has_fail2ban(&self) -> bool {
|
||||
self.blocked_ips.limiter_rate.is_some()
|
||||
}
|
||||
|
||||
pub fn is_ip_blocked(&self, ip: &IpAddr) -> bool {
|
||||
self.blocked_ips.ip_addresses.read().contains(ip)
|
||||
|| (self.blocked_ips.has_networks
|
||||
&& self
|
||||
.blocked_ips
|
||||
.ip_networks
|
||||
.iter()
|
||||
.any(|network| network.matches(ip)))
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for BlockedIps {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
ip_addresses: RwLock::new(AHashSet::new()),
|
||||
ip_networks: Default::default(),
|
||||
has_networks: Default::default(),
|
||||
limiter_rate: Default::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Debug for BlockedIps {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
f.debug_struct("BlockedIps")
|
||||
.field("ip_addresses", &self.ip_addresses)
|
||||
.field("ip_networks", &self.ip_networks)
|
||||
.field("limiter_rate", &self.limiter_rate)
|
||||
.finish()
|
||||
}
|
||||
}
|
138
crates/common/src/listener/limiter.rs
Normal file
138
crates/common/src/listener/limiter.rs
Normal file
|
@ -0,0 +1,138 @@
|
|||
/*
|
||||
* Copyright (c) 2023 Stalwart Labs Ltd.
|
||||
*
|
||||
* This file is part of the Stalwart Mail Server.
|
||||
*
|
||||
* This program is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Affero General Public License as
|
||||
* published by the Free Software Foundation, either version 3 of
|
||||
* the License, or (at your option) any later version.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Affero General Public License for more details.
|
||||
* in the LICENSE file at the top-level directory of this distribution.
|
||||
* You should have received a copy of the GNU Affero General Public License
|
||||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*
|
||||
* You can be released from the requirements of the AGPLv3 license by
|
||||
* purchasing a commercial license. Please contact licensing@stalw.art
|
||||
* for more details.
|
||||
*/
|
||||
|
||||
use std::{
|
||||
sync::{
|
||||
atomic::{AtomicU64, Ordering},
|
||||
Arc,
|
||||
},
|
||||
time::SystemTime,
|
||||
};
|
||||
|
||||
use utils::config::Rate;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct RateLimiter {
|
||||
next_refill: AtomicU64,
|
||||
used_tokens: AtomicU64,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ConcurrencyLimiter {
|
||||
pub max_concurrent: u64,
|
||||
pub concurrent: Arc<AtomicU64>,
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct InFlight {
|
||||
concurrent: Arc<AtomicU64>,
|
||||
}
|
||||
|
||||
impl Drop for InFlight {
|
||||
fn drop(&mut self) {
|
||||
self.concurrent.fetch_sub(1, Ordering::Relaxed);
|
||||
}
|
||||
}
|
||||
|
||||
impl RateLimiter {
|
||||
pub fn new(rate: &Rate) -> Self {
|
||||
RateLimiter {
|
||||
next_refill: (now() + rate.period.as_secs()).into(),
|
||||
used_tokens: 0.into(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_allowed(&self, rate: &Rate) -> bool {
|
||||
// Check rate limit
|
||||
if self.used_tokens.fetch_add(1, Ordering::Relaxed) < rate.requests {
|
||||
true
|
||||
} else {
|
||||
let now = now();
|
||||
if self.next_refill.load(Ordering::Relaxed) <= now {
|
||||
self.next_refill
|
||||
.store(now + rate.period.as_secs(), Ordering::Relaxed);
|
||||
self.used_tokens.store(1, Ordering::Relaxed);
|
||||
true
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_allowed_soft(&self, rate: &Rate) -> bool {
|
||||
self.used_tokens.load(Ordering::Relaxed) < rate.requests
|
||||
|| self.next_refill.load(Ordering::Relaxed) <= now()
|
||||
}
|
||||
|
||||
pub fn secs_to_refill(&self) -> u64 {
|
||||
self.next_refill
|
||||
.load(Ordering::Relaxed)
|
||||
.saturating_sub(now())
|
||||
}
|
||||
|
||||
pub fn is_active(&self) -> bool {
|
||||
self.next_refill.load(Ordering::Relaxed) > now()
|
||||
}
|
||||
}
|
||||
|
||||
impl ConcurrencyLimiter {
|
||||
pub fn new(max_concurrent: u64) -> Self {
|
||||
ConcurrencyLimiter {
|
||||
max_concurrent,
|
||||
concurrent: Arc::new(0.into()),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_allowed(&self) -> Option<InFlight> {
|
||||
if self.concurrent.load(Ordering::Relaxed) < self.max_concurrent {
|
||||
// Return in-flight request
|
||||
self.concurrent.fetch_add(1, Ordering::Relaxed);
|
||||
Some(InFlight {
|
||||
concurrent: self.concurrent.clone(),
|
||||
})
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
pub fn check_is_allowed(&self) -> bool {
|
||||
self.concurrent.load(Ordering::Relaxed) < self.max_concurrent
|
||||
}
|
||||
|
||||
pub fn is_active(&self) -> bool {
|
||||
self.concurrent.load(Ordering::Relaxed) > 0
|
||||
}
|
||||
}
|
||||
|
||||
impl InFlight {
|
||||
pub fn num_concurrent(&self) -> u64 {
|
||||
self.concurrent.load(Ordering::Relaxed)
|
||||
}
|
||||
}
|
||||
|
||||
fn now() -> u64 {
|
||||
SystemTime::UNIX_EPOCH
|
||||
.elapsed()
|
||||
.unwrap_or_default()
|
||||
.as_secs()
|
||||
}
|
374
crates/common/src/listener/listen.rs
Normal file
374
crates/common/src/listener/listen.rs
Normal file
|
@ -0,0 +1,374 @@
|
|||
/*
|
||||
* Copyright (c) 2023 Stalwart Labs Ltd.
|
||||
*
|
||||
* This file is part of the Stalwart Mail Server.
|
||||
*
|
||||
* This program is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Affero General Public License as
|
||||
* published by the Free Software Foundation, either version 3 of
|
||||
* the License, or (at your option) any later version.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Affero General Public License for more details.
|
||||
* in the LICENSE file at the top-level directory of this distribution.
|
||||
* You should have received a copy of the GNU Affero General Public License
|
||||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*
|
||||
* You can be released from the requirements of the AGPLv3 license by
|
||||
* purchasing a commercial license. Please contact licensing@stalw.art
|
||||
* for more details.
|
||||
*/
|
||||
|
||||
use std::{
|
||||
net::{IpAddr, SocketAddr},
|
||||
sync::Arc,
|
||||
time::Duration,
|
||||
};
|
||||
|
||||
use arc_swap::ArcSwap;
|
||||
use proxy_header::io::ProxiedStream;
|
||||
use rustls::crypto::ring::cipher_suite::TLS13_AES_128_GCM_SHA256;
|
||||
use tokio::{
|
||||
net::{TcpListener, TcpStream},
|
||||
sync::watch,
|
||||
};
|
||||
use tokio_rustls::server::TlsStream;
|
||||
use tracing::Span;
|
||||
use utils::{config::Config, failed, UnwrapFailure};
|
||||
|
||||
use crate::{
|
||||
config::server::{Listener, Server},
|
||||
ConfigBuilder, Core,
|
||||
};
|
||||
|
||||
use super::{
|
||||
acme::SpawnAcme, limiter::ConcurrencyLimiter, ServerInstance, SessionData, SessionManager,
|
||||
SessionStream, TcpAcceptorResult,
|
||||
};
|
||||
|
||||
impl Server {
|
||||
pub fn spawn(
|
||||
self,
|
||||
manager: impl SessionManager,
|
||||
core: Arc<ArcSwap<Core>>,
|
||||
shutdown_rx: watch::Receiver<bool>,
|
||||
) {
|
||||
// Prepare instance
|
||||
let instance = Arc::new(ServerInstance {
|
||||
id: self.id,
|
||||
protocol: self.protocol,
|
||||
acceptor: self.acceptor,
|
||||
proxy_networks: self.proxy_networks,
|
||||
limiter: ConcurrencyLimiter::new(self.max_connections),
|
||||
shutdown_rx,
|
||||
});
|
||||
let is_tls = self.tls_implicit;
|
||||
let has_proxies = !instance.proxy_networks.is_empty();
|
||||
|
||||
// Spawn listeners
|
||||
for listener in self.listeners {
|
||||
tracing::info!(
|
||||
id = instance.id,
|
||||
protocol = ?instance.protocol,
|
||||
bind.ip = listener.addr.ip().to_string(),
|
||||
bind.port = listener.addr.port(),
|
||||
tls = is_tls,
|
||||
"Starting listener"
|
||||
);
|
||||
let local_ip = listener.addr.ip();
|
||||
|
||||
// Obtain TCP options
|
||||
let opts = SocketOpts {
|
||||
nodelay: listener.nodelay,
|
||||
ttl: listener.ttl,
|
||||
linger: listener.linger,
|
||||
};
|
||||
|
||||
// Bind socket
|
||||
let listener = listener.listen();
|
||||
|
||||
// Spawn listener
|
||||
let mut shutdown_rx = instance.shutdown_rx.clone();
|
||||
let manager = manager.clone();
|
||||
let instance = instance.clone();
|
||||
let core = core.clone();
|
||||
tokio::spawn(async move {
|
||||
loop {
|
||||
tokio::select! {
|
||||
stream = listener.accept() => {
|
||||
match stream {
|
||||
Ok((stream, remote_addr)) => {
|
||||
let core = core.as_ref().load();
|
||||
|
||||
if has_proxies && instance.proxy_networks.iter().any(|network| network.matches(&remote_addr.ip())) {
|
||||
let instance = instance.clone();
|
||||
let manager = manager.clone();
|
||||
|
||||
// Set socket options
|
||||
opts.apply(&stream);
|
||||
|
||||
tokio::spawn(async move {
|
||||
match ProxiedStream::create_from_tokio(stream, Default::default()).await {
|
||||
Ok(stream) =>{
|
||||
let remote_addr = stream.proxy_header()
|
||||
.proxied_address()
|
||||
.map(|addr| addr.source)
|
||||
.unwrap_or(remote_addr);
|
||||
if let Some(session) = instance.build_session(stream, local_ip, remote_addr, &core) {
|
||||
// Spawn session
|
||||
manager.spawn(session, is_tls);
|
||||
}
|
||||
}
|
||||
Err(err) => {
|
||||
tracing::trace!(context = "io",
|
||||
event = "error",
|
||||
instance = instance.id,
|
||||
protocol = ?instance.protocol,
|
||||
reason = %err,
|
||||
"Failed to accept proxied TCP connection");
|
||||
}
|
||||
}
|
||||
});
|
||||
} else if let Some(session) = instance.build_session(stream, local_ip, remote_addr, &core) {
|
||||
// Set socket options
|
||||
opts.apply(&session.stream);
|
||||
|
||||
// Spawn session
|
||||
manager.spawn(session, is_tls);
|
||||
}
|
||||
}
|
||||
Err(err) => {
|
||||
tracing::trace!(context = "io",
|
||||
event = "error",
|
||||
instance = instance.id,
|
||||
protocol = ?instance.protocol,
|
||||
"Failed to accept TCP connection: {}", err);
|
||||
}
|
||||
}
|
||||
},
|
||||
_ = shutdown_rx.changed() => {
|
||||
tracing::debug!(
|
||||
event = "shutdown",
|
||||
instance = instance.id,
|
||||
protocol = ?instance.protocol,
|
||||
"Listener shutting down.");
|
||||
manager.shutdown().await;
|
||||
break;
|
||||
}
|
||||
};
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
trait BuildSession {
|
||||
fn build_session<T: SessionStream>(
|
||||
&self,
|
||||
stream: T,
|
||||
local_ip: IpAddr,
|
||||
remote_addr: SocketAddr,
|
||||
core: &Core,
|
||||
) -> Option<SessionData<T>>;
|
||||
}
|
||||
|
||||
impl BuildSession for Arc<ServerInstance> {
|
||||
fn build_session<T: SessionStream>(
|
||||
&self,
|
||||
stream: T,
|
||||
local_ip: IpAddr,
|
||||
remote_addr: SocketAddr,
|
||||
core: &Core,
|
||||
) -> Option<SessionData<T>> {
|
||||
// Convert mapped IPv6 addresses to IPv4
|
||||
let remote_ip = match remote_addr.ip() {
|
||||
IpAddr::V6(ip) => ip
|
||||
.to_ipv4_mapped()
|
||||
.map(IpAddr::V4)
|
||||
.unwrap_or(IpAddr::V6(ip)),
|
||||
remote_ip => remote_ip,
|
||||
};
|
||||
let remote_port = remote_addr.port();
|
||||
|
||||
// Check if blocked
|
||||
if core.is_ip_blocked(&remote_ip) {
|
||||
tracing::debug!(
|
||||
context = "listener",
|
||||
event = "blocked",
|
||||
instance = self.id,
|
||||
protocol = ?self.protocol,
|
||||
remote.ip = remote_ip.to_string(),
|
||||
remote.port = remote_port,
|
||||
"Dropping connection from blocked IP."
|
||||
);
|
||||
None
|
||||
} else if let Some(in_flight) = self.limiter.is_allowed() {
|
||||
// Enforce concurrency
|
||||
SessionData {
|
||||
stream,
|
||||
in_flight,
|
||||
span: tracing::info_span!(
|
||||
"session",
|
||||
instance = self.id,
|
||||
protocol = ?self.protocol,
|
||||
remote.ip = remote_ip.to_string(),
|
||||
remote.port = remote_port,
|
||||
),
|
||||
local_ip,
|
||||
remote_ip,
|
||||
remote_port,
|
||||
instance: self.clone(),
|
||||
}
|
||||
.into()
|
||||
} else {
|
||||
tracing::info!(
|
||||
context = "throttle",
|
||||
event = "too-many-requests",
|
||||
instance = self.id,
|
||||
protocol = ?self.protocol,
|
||||
remote.ip = remote_ip.to_string(),
|
||||
remote.port = remote_port,
|
||||
max_concurrent = self.limiter.max_concurrent,
|
||||
"Too many concurrent connections."
|
||||
);
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct SocketOpts {
|
||||
pub nodelay: bool,
|
||||
pub ttl: Option<u32>,
|
||||
pub linger: Option<Duration>,
|
||||
}
|
||||
|
||||
impl SocketOpts {
|
||||
pub fn apply(&self, stream: &TcpStream) {
|
||||
// Set TCP options
|
||||
if let Err(err) = stream.set_nodelay(self.nodelay) {
|
||||
tracing::warn!(
|
||||
context = "tcp",
|
||||
event = "error",
|
||||
"Failed to set no-delay: {}",
|
||||
err
|
||||
);
|
||||
}
|
||||
if let Some(ttl) = self.ttl {
|
||||
if let Err(err) = stream.set_ttl(ttl) {
|
||||
tracing::warn!(
|
||||
context = "tcp",
|
||||
event = "error",
|
||||
"Failed to set TTL: {}",
|
||||
err
|
||||
);
|
||||
}
|
||||
}
|
||||
if self.linger.is_some() {
|
||||
if let Err(err) = stream.set_linger(self.linger) {
|
||||
tracing::warn!(
|
||||
context = "tcp",
|
||||
event = "error",
|
||||
"Failed to set linger: {}",
|
||||
err
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ConfigBuilder {
|
||||
pub fn bind(&self, config: &Config) {
|
||||
// Bind as root
|
||||
for server in &self.servers {
|
||||
for listener in &server.listeners {
|
||||
listener
|
||||
.socket
|
||||
.bind(listener.addr)
|
||||
.failed(&format!("Failed to bind to {}", listener.addr));
|
||||
}
|
||||
}
|
||||
|
||||
// Drop privileges
|
||||
#[cfg(not(target_env = "msvc"))]
|
||||
{
|
||||
if let Some(run_as_user) = config.value("server.run-as.user") {
|
||||
let mut pd = privdrop::PrivDrop::default().user(run_as_user);
|
||||
if let Some(run_as_group) = config.value("server.run-as.group") {
|
||||
pd = pd.group(run_as_group);
|
||||
}
|
||||
pd.apply().failed("Failed to drop privileges");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn spawn(
|
||||
self,
|
||||
spawn: impl Fn(Server, watch::Receiver<bool>),
|
||||
) -> (watch::Sender<bool>, watch::Receiver<bool>) {
|
||||
// Spawn listeners
|
||||
let (shutdown_tx, shutdown_rx) = watch::channel(false);
|
||||
for server in self.servers {
|
||||
spawn(server, shutdown_rx.clone());
|
||||
}
|
||||
|
||||
// Spawn ACME managers
|
||||
for (_, acme_manager) in self.acme_managers {
|
||||
acme_manager.spawn(shutdown_rx.clone());
|
||||
}
|
||||
|
||||
(shutdown_tx, shutdown_rx)
|
||||
}
|
||||
}
|
||||
|
||||
impl Listener {
|
||||
pub fn listen(self) -> TcpListener {
|
||||
self.socket
|
||||
.listen(self.backlog.unwrap_or(1024))
|
||||
.unwrap_or_else(|err| failed(&format!("Failed to listen on {}: {}", self.addr, err)))
|
||||
}
|
||||
}
|
||||
|
||||
impl ServerInstance {
|
||||
pub async fn tls_accept<T: SessionStream>(
|
||||
&self,
|
||||
stream: T,
|
||||
span: &Span,
|
||||
) -> Result<TlsStream<T>, ()> {
|
||||
match self.acceptor.accept(stream).await {
|
||||
TcpAcceptorResult::Tls(accept) => match accept.await {
|
||||
Ok(stream) => {
|
||||
tracing::info!(
|
||||
parent: span,
|
||||
context = "tls",
|
||||
event = "handshake",
|
||||
version = ?stream.get_ref().1.protocol_version().unwrap_or(rustls::ProtocolVersion::TLSv1_3),
|
||||
cipher = ?stream.get_ref().1.negotiated_cipher_suite().unwrap_or(TLS13_AES_128_GCM_SHA256),
|
||||
);
|
||||
Ok(stream)
|
||||
}
|
||||
Err(err) => {
|
||||
tracing::debug!(
|
||||
parent: span,
|
||||
context = "tls",
|
||||
event = "error",
|
||||
"Failed to accept TLS connection: {}",
|
||||
err
|
||||
);
|
||||
Err(())
|
||||
}
|
||||
},
|
||||
TcpAcceptorResult::Plain(_) | TcpAcceptorResult::Close => {
|
||||
tracing::debug!(
|
||||
parent: span,
|
||||
context = "tls",
|
||||
event = "error",
|
||||
"Failed to accept TLS connection: {}",
|
||||
"TLS is not configured for this server."
|
||||
);
|
||||
Err(())
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
164
crates/common/src/listener/mod.rs
Normal file
164
crates/common/src/listener/mod.rs
Normal file
|
@ -0,0 +1,164 @@
|
|||
/*
|
||||
* Copyright (c) 2023 Stalwart Labs Ltd.
|
||||
*
|
||||
* This file is part of the Stalwart Mail Server.
|
||||
*
|
||||
* This program is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Affero General Public License as
|
||||
* published by the Free Software Foundation, either version 3 of
|
||||
* the License, or (at your option) any later version.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Affero General Public License for more details.
|
||||
* in the LICENSE file at the top-level directory of this distribution.
|
||||
* You should have received a copy of the GNU Affero General Public License
|
||||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*
|
||||
* You can be released from the requirements of the AGPLv3 license by
|
||||
* purchasing a commercial license. Please contact licensing@stalw.art
|
||||
* for more details.
|
||||
*/
|
||||
|
||||
use std::{borrow::Cow, net::IpAddr, sync::Arc};
|
||||
|
||||
use rustls::ServerConfig;
|
||||
use std::fmt::Debug;
|
||||
use tokio::{
|
||||
io::{AsyncRead, AsyncWrite},
|
||||
sync::watch,
|
||||
};
|
||||
use tokio_rustls::{Accept, TlsAcceptor};
|
||||
use utils::config::ipmask::IpAddrMask;
|
||||
|
||||
use crate::config::server::ServerProtocol;
|
||||
|
||||
use self::{
|
||||
acme::AcmeManager,
|
||||
limiter::{ConcurrencyLimiter, InFlight},
|
||||
};
|
||||
|
||||
pub mod acme;
|
||||
pub mod blocked;
|
||||
pub mod limiter;
|
||||
pub mod listen;
|
||||
pub mod stream;
|
||||
pub mod tls;
|
||||
|
||||
pub struct ServerInstance {
|
||||
pub id: String,
|
||||
pub protocol: ServerProtocol,
|
||||
pub acceptor: TcpAcceptor,
|
||||
pub limiter: ConcurrencyLimiter,
|
||||
pub proxy_networks: Vec<IpAddrMask>,
|
||||
pub shutdown_rx: watch::Receiver<bool>,
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
pub enum TcpAcceptor {
|
||||
Tls(TlsAcceptor),
|
||||
Acme {
|
||||
challenge: Arc<ServerConfig>,
|
||||
default: Arc<ServerConfig>,
|
||||
manager: Arc<AcmeManager>,
|
||||
},
|
||||
#[default]
|
||||
Plain,
|
||||
}
|
||||
|
||||
#[allow(clippy::large_enum_variant)]
|
||||
pub enum TcpAcceptorResult<IO>
|
||||
where
|
||||
IO: AsyncRead + AsyncWrite + Unpin,
|
||||
{
|
||||
Tls(Accept<IO>),
|
||||
Plain(IO),
|
||||
Close,
|
||||
}
|
||||
|
||||
pub struct SessionData<T: SessionStream> {
|
||||
pub stream: T,
|
||||
pub local_ip: IpAddr,
|
||||
pub remote_ip: IpAddr,
|
||||
pub remote_port: u16,
|
||||
pub span: tracing::Span,
|
||||
pub in_flight: InFlight,
|
||||
pub instance: Arc<ServerInstance>,
|
||||
}
|
||||
|
||||
pub trait SessionStream: AsyncRead + AsyncWrite + Unpin + 'static + Sync + Send {
|
||||
fn is_tls(&self) -> bool;
|
||||
fn tls_version_and_cipher(&self) -> (Cow<'static, str>, Cow<'static, str>);
|
||||
}
|
||||
|
||||
pub trait SessionManager: Sync + Send + 'static + Clone {
|
||||
fn spawn<T: SessionStream>(&self, mut session: SessionData<T>, is_tls: bool) {
|
||||
let manager = self.clone();
|
||||
|
||||
tokio::spawn(async move {
|
||||
if is_tls {
|
||||
match session.instance.acceptor.accept(session.stream).await {
|
||||
TcpAcceptorResult::Tls(accept) => match accept.await {
|
||||
Ok(stream) => {
|
||||
let session = SessionData {
|
||||
stream,
|
||||
local_ip: session.local_ip,
|
||||
remote_ip: session.remote_ip,
|
||||
remote_port: session.remote_port,
|
||||
span: session.span,
|
||||
in_flight: session.in_flight,
|
||||
instance: session.instance,
|
||||
};
|
||||
manager.handle(session).await;
|
||||
}
|
||||
Err(err) => {
|
||||
tracing::debug!(
|
||||
context = "tls",
|
||||
event = "error",
|
||||
instance = session.instance.id,
|
||||
protocol = ?session.instance.protocol,
|
||||
remote.ip = session.remote_ip.to_string(),
|
||||
"Failed to accept TLS connection: {}",
|
||||
err
|
||||
);
|
||||
}
|
||||
},
|
||||
TcpAcceptorResult::Plain(stream) => {
|
||||
session.stream = stream;
|
||||
manager.handle(session).await;
|
||||
}
|
||||
TcpAcceptorResult::Close => (),
|
||||
}
|
||||
} else {
|
||||
manager.handle(session).await;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
fn handle<T: SessionStream>(
|
||||
self,
|
||||
session: SessionData<T>,
|
||||
) -> impl std::future::Future<Output = ()> + Send;
|
||||
|
||||
fn shutdown(&self) -> impl std::future::Future<Output = ()> + Send;
|
||||
}
|
||||
|
||||
impl Debug for TcpAcceptor {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
Self::Tls(_) => f.debug_tuple("Tls").finish(),
|
||||
Self::Acme {
|
||||
challenge,
|
||||
default,
|
||||
manager,
|
||||
} => f
|
||||
.debug_struct("Acme")
|
||||
.field("challenge", challenge)
|
||||
.field("default", default)
|
||||
.field("manager", manager)
|
||||
.finish(),
|
||||
Self::Plain => write!(f, "Plain"),
|
||||
}
|
||||
}
|
||||
}
|
160
crates/common/src/listener/stream.rs
Normal file
160
crates/common/src/listener/stream.rs
Normal file
|
@ -0,0 +1,160 @@
|
|||
/*
|
||||
* Copyright (c) 2023 Stalwart Labs Ltd.
|
||||
*
|
||||
* This file is part of the Stalwart Mail Server.
|
||||
*
|
||||
* This program is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Affero General Public License as
|
||||
* published by the Free Software Foundation, either version 3 of
|
||||
* the License, or (at your option) any later version.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Affero General Public License for more details.
|
||||
* in the LICENSE file at the top-level directory of this distribution.
|
||||
* You should have received a copy of the GNU Affero General Public License
|
||||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*
|
||||
* You can be released from the requirements of the AGPLv3 license by
|
||||
* purchasing a commercial license. Please contact licensing@stalw.art
|
||||
* for more details.
|
||||
*/
|
||||
|
||||
use std::borrow::Cow;
|
||||
|
||||
use proxy_header::io::ProxiedStream;
|
||||
use tokio::{
|
||||
io::{AsyncRead, AsyncWrite},
|
||||
net::TcpStream,
|
||||
};
|
||||
use tokio_rustls::server::TlsStream;
|
||||
|
||||
use super::SessionStream;
|
||||
|
||||
impl SessionStream for TcpStream {
|
||||
fn is_tls(&self) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
fn tls_version_and_cipher(&self) -> (Cow<'static, str>, Cow<'static, str>) {
|
||||
(Cow::Borrowed(""), Cow::Borrowed(""))
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: SessionStream> SessionStream for TlsStream<T> {
|
||||
fn is_tls(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn tls_version_and_cipher(&self) -> (Cow<'static, str>, Cow<'static, str>) {
|
||||
let (_, conn) = self.get_ref();
|
||||
|
||||
(
|
||||
match conn
|
||||
.protocol_version()
|
||||
.unwrap_or(rustls::ProtocolVersion::Unknown(0))
|
||||
{
|
||||
rustls::ProtocolVersion::SSLv2 => "SSLv2",
|
||||
rustls::ProtocolVersion::SSLv3 => "SSLv3",
|
||||
rustls::ProtocolVersion::TLSv1_0 => "TLSv1.0",
|
||||
rustls::ProtocolVersion::TLSv1_1 => "TLSv1.1",
|
||||
rustls::ProtocolVersion::TLSv1_2 => "TLSv1.2",
|
||||
rustls::ProtocolVersion::TLSv1_3 => "TLSv1.3",
|
||||
rustls::ProtocolVersion::DTLSv1_0 => "DTLSv1.0",
|
||||
rustls::ProtocolVersion::DTLSv1_2 => "DTLSv1.2",
|
||||
rustls::ProtocolVersion::DTLSv1_3 => "DTLSv1.3",
|
||||
_ => "unknown",
|
||||
}
|
||||
.into(),
|
||||
match conn.negotiated_cipher_suite() {
|
||||
Some(rustls::SupportedCipherSuite::Tls13(cs)) => {
|
||||
cs.common.suite.as_str().unwrap_or("unknown")
|
||||
}
|
||||
Some(rustls::SupportedCipherSuite::Tls12(cs)) => {
|
||||
cs.common.suite.as_str().unwrap_or("unknown")
|
||||
}
|
||||
None => "unknown",
|
||||
}
|
||||
.into(),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl SessionStream for ProxiedStream<TcpStream> {
|
||||
fn is_tls(&self) -> bool {
|
||||
self.proxy_header()
|
||||
.ssl()
|
||||
.map_or(false, |ssl| ssl.client_ssl())
|
||||
}
|
||||
|
||||
fn tls_version_and_cipher(&self) -> (Cow<'static, str>, Cow<'static, str>) {
|
||||
self.proxy_header()
|
||||
.ssl()
|
||||
.map(|ssl| {
|
||||
(
|
||||
ssl.version().unwrap_or("unknown").to_string().into(),
|
||||
ssl.cipher().unwrap_or("unknown").to_string().into(),
|
||||
)
|
||||
})
|
||||
.unwrap_or((Cow::Borrowed("unknown"), Cow::Borrowed("unknown")))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct NullIo {
|
||||
pub tx_buf: Vec<u8>,
|
||||
}
|
||||
|
||||
impl AsyncWrite for NullIo {
|
||||
fn poll_write(
|
||||
mut self: std::pin::Pin<&mut Self>,
|
||||
_cx: &mut std::task::Context<'_>,
|
||||
buf: &[u8],
|
||||
) -> std::task::Poll<Result<usize, std::io::Error>> {
|
||||
self.tx_buf.extend_from_slice(buf);
|
||||
std::task::Poll::Ready(Ok(buf.len()))
|
||||
}
|
||||
|
||||
fn poll_flush(
|
||||
self: std::pin::Pin<&mut Self>,
|
||||
_cx: &mut std::task::Context<'_>,
|
||||
) -> std::task::Poll<Result<(), std::io::Error>> {
|
||||
std::task::Poll::Ready(Ok(()))
|
||||
}
|
||||
|
||||
fn poll_shutdown(
|
||||
self: std::pin::Pin<&mut Self>,
|
||||
_cx: &mut std::task::Context<'_>,
|
||||
) -> std::task::Poll<Result<(), std::io::Error>> {
|
||||
std::task::Poll::Ready(Ok(()))
|
||||
}
|
||||
}
|
||||
|
||||
impl AsyncRead for NullIo {
|
||||
fn poll_read(
|
||||
self: std::pin::Pin<&mut Self>,
|
||||
_cx: &mut std::task::Context<'_>,
|
||||
_buf: &mut tokio::io::ReadBuf<'_>,
|
||||
) -> std::task::Poll<std::io::Result<()>> {
|
||||
unreachable!()
|
||||
}
|
||||
}
|
||||
|
||||
impl SessionStream for NullIo {
|
||||
fn is_tls(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn tls_version_and_cipher(
|
||||
&self,
|
||||
) -> (
|
||||
std::borrow::Cow<'static, str>,
|
||||
std::borrow::Cow<'static, str>,
|
||||
) {
|
||||
(
|
||||
std::borrow::Cow::Borrowed(""),
|
||||
std::borrow::Cow::Borrowed(""),
|
||||
)
|
||||
}
|
||||
}
|
202
crates/common/src/listener/tls.rs
Normal file
202
crates/common/src/listener/tls.rs
Normal file
|
@ -0,0 +1,202 @@
|
|||
/*
|
||||
* Copyright (c) 2023 Stalwart Labs Ltd.
|
||||
*
|
||||
* This file is part of Stalwart Mail Server.
|
||||
*
|
||||
* This program is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Affero General Public License as
|
||||
* published by the Free Software Foundation, either version 3 of
|
||||
* the License, or (at your option) any later version.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Affero General Public License for more details.
|
||||
* in the LICENSE file at the top-level directory of this distribution.
|
||||
* You should have received a copy of the GNU Affero General Public License
|
||||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*
|
||||
* You can be released from the requirements of the AGPLv3 license by
|
||||
* purchasing a commercial license. Please contact licensing@stalw.art
|
||||
* for more details.
|
||||
*/
|
||||
|
||||
use std::{
|
||||
fmt::{self, Formatter},
|
||||
sync::Arc,
|
||||
};
|
||||
|
||||
use ahash::AHashMap;
|
||||
use arc_swap::ArcSwap;
|
||||
use rustls::{
|
||||
client::verify_server_name,
|
||||
server::{ClientHello, ParsedCertificate, ResolvesServerCert},
|
||||
sign::CertifiedKey,
|
||||
version::{TLS12, TLS13},
|
||||
Error, SupportedProtocolVersion,
|
||||
};
|
||||
use rustls_pki_types::{DnsName, ServerName};
|
||||
use store::Store;
|
||||
use tokio::io::{AsyncRead, AsyncWrite, AsyncWriteExt};
|
||||
use tokio_rustls::{Accept, LazyConfigAcceptor, TlsAcceptor};
|
||||
|
||||
use crate::config::server::tls::build_certified_key;
|
||||
|
||||
use super::{acme::resolver::IsTlsAlpnChallenge, SessionStream, TcpAcceptor, TcpAcceptorResult};
|
||||
|
||||
pub static TLS13_VERSION: &[&SupportedProtocolVersion] = &[&TLS13];
|
||||
pub static TLS12_VERSION: &[&SupportedProtocolVersion] = &[&TLS12];
|
||||
|
||||
pub struct CertificateResolver {
|
||||
pub sni: AHashMap<String, Arc<Certificate>>,
|
||||
pub cert: Arc<Certificate>,
|
||||
}
|
||||
|
||||
pub struct Certificate {
|
||||
pub cert: ArcSwap<CertifiedKey>,
|
||||
pub cert_id: String,
|
||||
}
|
||||
|
||||
impl CertificateResolver {
|
||||
pub fn add(&mut self, name: &str, ck: Arc<Certificate>) -> Result<(), Error> {
|
||||
let server_name = {
|
||||
let checked_name = DnsName::try_from(name)
|
||||
.map_err(|_| Error::General("Bad DNS name".into()))
|
||||
.map(|name| name.to_lowercase_owned())?;
|
||||
ServerName::DnsName(checked_name)
|
||||
};
|
||||
|
||||
ck.cert
|
||||
.load()
|
||||
.end_entity_cert()
|
||||
.and_then(ParsedCertificate::try_from)
|
||||
.and_then(|cert| verify_server_name(&cert, &server_name))?;
|
||||
|
||||
if let ServerName::DnsName(name) = server_name {
|
||||
self.sni.insert(name.as_ref().to_string(), ck);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl ResolvesServerCert for CertificateResolver {
|
||||
fn resolve(&self, hello: ClientHello<'_>) -> Option<Arc<CertifiedKey>> {
|
||||
if !self.sni.is_empty() {
|
||||
if let Some(cert) = hello.server_name().and_then(|name| self.sni.get(name)) {
|
||||
return cert.cert.load().clone().into();
|
||||
}
|
||||
}
|
||||
self.cert.cert.load().clone().into()
|
||||
}
|
||||
}
|
||||
|
||||
impl TcpAcceptor {
|
||||
pub async fn accept<IO>(&self, stream: IO) -> TcpAcceptorResult<IO>
|
||||
where
|
||||
IO: SessionStream,
|
||||
{
|
||||
match self {
|
||||
TcpAcceptor::Tls(acceptor) => TcpAcceptorResult::Tls(acceptor.accept(stream)),
|
||||
TcpAcceptor::Acme {
|
||||
challenge,
|
||||
default,
|
||||
manager,
|
||||
} => {
|
||||
if manager.has_order_in_progress() {
|
||||
match LazyConfigAcceptor::new(Default::default(), stream).await {
|
||||
Ok(start_handshake) => {
|
||||
if start_handshake.client_hello().is_tls_alpn_challenge() {
|
||||
match start_handshake.into_stream(challenge.clone()).await {
|
||||
Ok(mut tls) => {
|
||||
tracing::debug!(
|
||||
context = "acme",
|
||||
event = "validation",
|
||||
"Received TLS-ALPN-01 validation request."
|
||||
);
|
||||
let _ = tls.shutdown().await;
|
||||
}
|
||||
Err(err) => {
|
||||
tracing::info!(
|
||||
context = "acme",
|
||||
event = "error",
|
||||
error = ?err,
|
||||
"TLS-ALPN-01 validation request failed."
|
||||
);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
return TcpAcceptorResult::Tls(
|
||||
start_handshake.into_stream(default.clone()),
|
||||
);
|
||||
}
|
||||
}
|
||||
Err(err) => {
|
||||
tracing::debug!(
|
||||
context = "listener",
|
||||
event = "error",
|
||||
error = ?err,
|
||||
"TLS handshake failed."
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
TcpAcceptorResult::Close
|
||||
} else {
|
||||
TcpAcceptorResult::Tls(TlsAcceptor::from(default.clone()).accept(stream))
|
||||
}
|
||||
}
|
||||
TcpAcceptor::Plain => TcpAcceptorResult::Plain(stream),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_tls(&self) -> bool {
|
||||
matches!(self, TcpAcceptor::Tls(_) | TcpAcceptor::Acme { .. })
|
||||
}
|
||||
}
|
||||
|
||||
impl<IO> TcpAcceptorResult<IO>
|
||||
where
|
||||
IO: AsyncRead + AsyncWrite + Unpin,
|
||||
{
|
||||
pub fn unwrap_tls(self) -> Accept<IO> {
|
||||
match self {
|
||||
TcpAcceptorResult::Tls(accept) => accept,
|
||||
_ => panic!("unwrap_tls called on non-TLS acceptor"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Certificate {
|
||||
pub async fn reload(&self, store: &Store) -> utils::config::Result<()> {
|
||||
match (
|
||||
store
|
||||
.config_get(format!("certificate.{}.cert", self.cert_id))
|
||||
.await,
|
||||
store
|
||||
.config_get(format!("certificate.{}.private-key", self.cert_id))
|
||||
.await,
|
||||
) {
|
||||
(Ok(Some(cert)), Ok(Some(pk))) => {
|
||||
match build_certified_key(cert.into_bytes(), pk.into_bytes()) {
|
||||
Ok(cert) => {
|
||||
self.cert.store(Arc::new(cert));
|
||||
|
||||
Ok(())
|
||||
}
|
||||
Err(err) => Err(err),
|
||||
}
|
||||
}
|
||||
(Ok(None), _) | (_, Ok(None)) => Err("Certificate or private key not found".into()),
|
||||
(Err(err), _) | (_, Err(err)) => Err(err.to_string()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Debug for CertificateResolver {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
|
||||
f.debug_struct("CertificateResolver")
|
||||
.field("sni", &self.sni.keys())
|
||||
.field("id", &self.cert.cert_id)
|
||||
.finish()
|
||||
}
|
||||
}
|
|
@ -21,8 +21,9 @@
|
|||
* for more details.
|
||||
*/
|
||||
|
||||
use std::time::Duration;
|
||||
|
||||
use mail_send::smtp::tls::build_tls_connector;
|
||||
use store::Store;
|
||||
use utils::config::{utils::AsKey, Config};
|
||||
|
||||
use crate::core::config::build_pool;
|
||||
|
@ -30,35 +31,44 @@ use crate::core::config::build_pool;
|
|||
use super::{ImapConnectionManager, ImapDirectory};
|
||||
|
||||
impl ImapDirectory {
|
||||
pub fn from_config(
|
||||
config: &Config,
|
||||
prefix: impl AsKey,
|
||||
data_store: Store,
|
||||
) -> utils::config::Result<Self> {
|
||||
pub fn from_config(config: &mut Config, prefix: impl AsKey) -> Option<Self> {
|
||||
let prefix = prefix.as_key();
|
||||
let address = config.value_require((&prefix, "host"))?;
|
||||
let tls_implicit: bool = config.property_or_static((&prefix, "tls.enable"), "false")?;
|
||||
let address = config.value_require_((&prefix, "host"))?.to_string();
|
||||
let tls_implicit: bool = config
|
||||
.property_or_default_((&prefix, "tls.enable"), "false")
|
||||
.unwrap_or_default();
|
||||
let port: u16 = config
|
||||
.property_or_static((&prefix, "port"), if tls_implicit { "993" } else { "143" })?;
|
||||
.property_or_default_((&prefix, "port"), if tls_implicit { "993" } else { "143" })
|
||||
.unwrap_or(if tls_implicit { 993 } else { 143 });
|
||||
|
||||
let manager = ImapConnectionManager {
|
||||
addr: format!("{address}:{port}"),
|
||||
timeout: config.property_or_static((&prefix, "timeout"), "30s")?,
|
||||
timeout: config
|
||||
.property_or_default_((&prefix, "timeout"), "30s")
|
||||
.unwrap_or_else(|| Duration::from_secs(30)),
|
||||
tls_connector: build_tls_connector(
|
||||
config.property_or_static((&prefix, "tls.allow-invalid-certs"), "false")?,
|
||||
config
|
||||
.property_or_default_((&prefix, "tls.allow-invalid-certs"), "false")
|
||||
.unwrap_or_default(),
|
||||
),
|
||||
tls_hostname: address.to_string(),
|
||||
tls_implicit,
|
||||
mechanisms: 0.into(),
|
||||
};
|
||||
|
||||
Ok(ImapDirectory {
|
||||
pool: build_pool(config, &prefix, manager)?,
|
||||
Some(ImapDirectory {
|
||||
pool: build_pool(config, &prefix, manager)
|
||||
.map_err(|e| {
|
||||
config.new_parse_error(
|
||||
prefix.as_str(),
|
||||
format!("Failed to build IMAP pool: {e:?}"),
|
||||
)
|
||||
})
|
||||
.ok()?,
|
||||
domains: config
|
||||
.values((&prefix, "lookup.domains"))
|
||||
.map(|(_, v)| v.to_lowercase())
|
||||
.collect(),
|
||||
data_store,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
|
@ -31,14 +31,12 @@ use std::{fmt::Display, sync::atomic::AtomicU64, time::Duration};
|
|||
|
||||
use ahash::AHashSet;
|
||||
use deadpool::managed::Pool;
|
||||
use store::Store;
|
||||
use tokio::io::{AsyncRead, AsyncWrite};
|
||||
use tokio_rustls::TlsConnector;
|
||||
|
||||
pub struct ImapDirectory {
|
||||
pool: Pool<ImapConnectionManager>,
|
||||
domains: AHashSet<String>,
|
||||
pub(crate) data_store: Store,
|
||||
}
|
||||
|
||||
pub struct ImapConnectionManager {
|
||||
|
|
|
@ -21,6 +21,8 @@
|
|||
* for more details.
|
||||
*/
|
||||
|
||||
use std::time::Duration;
|
||||
|
||||
use ldap3::LdapConnSettings;
|
||||
use store::Store;
|
||||
use utils::config::{utils::AsKey, Config};
|
||||
|
@ -30,16 +32,12 @@ use crate::core::config::build_pool;
|
|||
use super::{Bind, LdapConnectionManager, LdapDirectory, LdapFilter, LdapMappings};
|
||||
|
||||
impl LdapDirectory {
|
||||
pub fn from_config(
|
||||
config: &Config,
|
||||
prefix: impl AsKey,
|
||||
data_store: Store,
|
||||
) -> utils::config::Result<Self> {
|
||||
pub fn from_config(config: &mut Config, prefix: impl AsKey, data_store: Store) -> Option<Self> {
|
||||
let prefix = prefix.as_key();
|
||||
let bind_dn = if let Some(dn) = config.value((&prefix, "bind.dn")) {
|
||||
Bind::new(
|
||||
dn.to_string(),
|
||||
config.value_require((&prefix, "bind.secret"))?.to_string(),
|
||||
config.value_require_((&prefix, "bind.secret"))?.to_string(),
|
||||
)
|
||||
.into()
|
||||
} else {
|
||||
|
@ -47,23 +45,33 @@ impl LdapDirectory {
|
|||
};
|
||||
|
||||
let manager = LdapConnectionManager::new(
|
||||
config.value_require((&prefix, "url"))?.to_string(),
|
||||
config.value_require_((&prefix, "url"))?.to_string(),
|
||||
LdapConnSettings::new()
|
||||
.set_conn_timeout(config.property_or_static((&prefix, "timeout"), "30s")?)
|
||||
.set_starttls(config.property_or_static((&prefix, "tls.enable"), "false")?)
|
||||
.set_conn_timeout(
|
||||
config
|
||||
.property_or_default_((&prefix, "timeout"), "30s")
|
||||
.unwrap_or_else(|| Duration::from_secs(30)),
|
||||
)
|
||||
.set_starttls(
|
||||
config
|
||||
.property_or_default_((&prefix, "tls.enable"), "false")
|
||||
.unwrap_or_default(),
|
||||
)
|
||||
.set_no_tls_verify(
|
||||
config.property_or_static((&prefix, "tls.allow-invalid-certs"), "false")?,
|
||||
config
|
||||
.property_or_default_((&prefix, "tls.allow-invalid-certs"), "false")
|
||||
.unwrap_or_default(),
|
||||
),
|
||||
bind_dn,
|
||||
);
|
||||
|
||||
let mut mappings = LdapMappings {
|
||||
base_dn: config.value_require((&prefix, "base-dn"))?.to_string(),
|
||||
filter_name: LdapFilter::from_config(config, (&prefix, "filter.name"))?,
|
||||
filter_email: LdapFilter::from_config(config, (&prefix, "filter.email"))?,
|
||||
filter_verify: LdapFilter::from_config(config, (&prefix, "filter.verify"))?,
|
||||
filter_expand: LdapFilter::from_config(config, (&prefix, "filter.expand"))?,
|
||||
filter_domains: LdapFilter::from_config(config, (&prefix, "filter.domains"))?,
|
||||
base_dn: config.value_require_((&prefix, "base-dn"))?.to_string(),
|
||||
filter_name: LdapFilter::from_config(config, (&prefix, "filter.name")),
|
||||
filter_email: LdapFilter::from_config(config, (&prefix, "filter.email")),
|
||||
filter_verify: LdapFilter::from_config(config, (&prefix, "filter.verify")),
|
||||
filter_expand: LdapFilter::from_config(config, (&prefix, "filter.expand")),
|
||||
filter_domains: LdapFilter::from_config(config, (&prefix, "filter.domains")),
|
||||
attr_name: config
|
||||
.values((&prefix, "attributes.name"))
|
||||
.map(|(_, v)| v.to_string())
|
||||
|
@ -112,16 +120,22 @@ impl LdapDirectory {
|
|||
mappings.attrs_principal.extend(attr.iter().cloned());
|
||||
}
|
||||
|
||||
let auth_bind =
|
||||
if config.property_or_static::<bool>((&prefix, "bind.auth.enable"), "false")? {
|
||||
LdapFilter::from_config(config, (&prefix, "bind.auth.dn"))?.into()
|
||||
} else {
|
||||
None
|
||||
};
|
||||
let auth_bind = if config
|
||||
.property_or_default_::<bool>((&prefix, "bind.auth.enable"), "false")
|
||||
.unwrap_or_default()
|
||||
{
|
||||
LdapFilter::from_config(config, (&prefix, "bind.auth.dn")).into()
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
Ok(LdapDirectory {
|
||||
Some(LdapDirectory {
|
||||
mappings,
|
||||
pool: build_pool(config, &prefix, manager)?,
|
||||
pool: build_pool(config, &prefix, manager)
|
||||
.map_err(|e| {
|
||||
config.new_parse_error(prefix, format!("Failed to build LDAP pool: {e:?}"))
|
||||
})
|
||||
.ok()?,
|
||||
auth_bind,
|
||||
data_store,
|
||||
})
|
||||
|
@ -129,22 +143,21 @@ impl LdapDirectory {
|
|||
}
|
||||
|
||||
impl LdapFilter {
|
||||
fn from_config(config: &Config, key: impl AsKey) -> utils::config::Result<Self> {
|
||||
fn from_config(config: &mut Config, key: impl AsKey) -> Self {
|
||||
if let Some(value) = config.value(key.clone()) {
|
||||
let filter = LdapFilter {
|
||||
filter: value.split('?').map(|s| s.to_string()).collect(),
|
||||
};
|
||||
if filter.filter.len() >= 2 {
|
||||
Ok(filter)
|
||||
return filter;
|
||||
} else {
|
||||
Err(format!(
|
||||
"Missing '?' parameter placeholder in filter {:?} with value {:?}",
|
||||
key.as_key(),
|
||||
value
|
||||
))
|
||||
config.new_parse_error(
|
||||
key,
|
||||
format!("Missing '?' parameter placeholder in value {:?}", value),
|
||||
);
|
||||
}
|
||||
} else {
|
||||
Ok(Self::default())
|
||||
}
|
||||
|
||||
Self::default()
|
||||
}
|
||||
}
|
||||
|
|
|
@ -30,10 +30,10 @@ use super::{EmailType, MemoryDirectory};
|
|||
|
||||
impl MemoryDirectory {
|
||||
pub async fn from_config(
|
||||
config: &Config,
|
||||
config: &mut Config,
|
||||
prefix: impl AsKey,
|
||||
data_store: Store,
|
||||
) -> utils::config::Result<Self> {
|
||||
) -> Option<Self> {
|
||||
let prefix = prefix.as_key();
|
||||
let mut directory = MemoryDirectory {
|
||||
data_store,
|
||||
|
@ -42,9 +42,14 @@ impl MemoryDirectory {
|
|||
domains: Default::default(),
|
||||
};
|
||||
|
||||
for lookup_id in config.sub_keys((prefix.as_str(), "principals"), ".name") {
|
||||
for lookup_id in config
|
||||
.sub_keys((prefix.as_str(), "principals"), ".name")
|
||||
.map(|s| s.to_string())
|
||||
.collect::<Vec<_>>()
|
||||
{
|
||||
let lookup_id = lookup_id.as_str();
|
||||
let name = config
|
||||
.value_require((prefix.as_str(), "principals", lookup_id, "name"))?
|
||||
.value_require_((prefix.as_str(), "principals", lookup_id, "name"))?
|
||||
.to_string();
|
||||
let typ = match config.value((prefix.as_str(), "principals", lookup_id, "class")) {
|
||||
Some("individual") => Type::Individual,
|
||||
|
@ -59,27 +64,38 @@ impl MemoryDirectory {
|
|||
.get_or_create_account_id(&name)
|
||||
.await
|
||||
.map_err(|err| {
|
||||
format!(
|
||||
"Failed to obtain id for principal {} ({}): {:?}",
|
||||
name, lookup_id, err
|
||||
config.new_build_error(
|
||||
prefix.as_str(),
|
||||
format!(
|
||||
"Failed to obtain id for principal {} ({}): {:?}",
|
||||
name, lookup_id, err
|
||||
),
|
||||
)
|
||||
})?;
|
||||
})
|
||||
.ok()?;
|
||||
|
||||
// Obtain group ids
|
||||
let mut member_of = Vec::new();
|
||||
for (_, group) in config.values((prefix.as_str(), "principals", lookup_id, "member-of"))
|
||||
for group in config
|
||||
.values((prefix.as_str(), "principals", lookup_id, "member-of"))
|
||||
.map(|(_, s)| s.to_string())
|
||||
.collect::<Vec<_>>()
|
||||
{
|
||||
member_of.push(
|
||||
directory
|
||||
.data_store
|
||||
.get_or_create_account_id(group)
|
||||
.get_or_create_account_id(&group)
|
||||
.await
|
||||
.map_err(|err| {
|
||||
format!(
|
||||
"Failed to obtain id for principal {} ({}): {:?}",
|
||||
name, lookup_id, err
|
||||
config.new_build_error(
|
||||
prefix.as_str(),
|
||||
format!(
|
||||
"Failed to obtain id for principal {} ({}): {:?}",
|
||||
name, lookup_id, err
|
||||
),
|
||||
)
|
||||
})?,
|
||||
})
|
||||
.ok()?,
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -131,7 +147,7 @@ impl MemoryDirectory {
|
|||
.value((prefix.as_str(), "principals", lookup_id, "description"))
|
||||
.map(|v| v.to_string()),
|
||||
quota: config
|
||||
.property((prefix.as_str(), "principals", lookup_id, "quota"))?
|
||||
.property_((prefix.as_str(), "principals", lookup_id, "quota"))
|
||||
.unwrap_or(0),
|
||||
member_of,
|
||||
id,
|
||||
|
@ -139,6 +155,6 @@ impl MemoryDirectory {
|
|||
});
|
||||
}
|
||||
|
||||
Ok(directory)
|
||||
Some(directory)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -21,8 +21,9 @@
|
|||
* for more details.
|
||||
*/
|
||||
|
||||
use std::time::Duration;
|
||||
|
||||
use mail_send::{smtp::tls::build_tls_connector, SmtpClientBuilder};
|
||||
use store::Store;
|
||||
use utils::config::{utils::AsKey, Config};
|
||||
|
||||
use crate::core::config::build_pool;
|
||||
|
@ -30,24 +31,26 @@ use crate::core::config::build_pool;
|
|||
use super::{SmtpConnectionManager, SmtpDirectory};
|
||||
|
||||
impl SmtpDirectory {
|
||||
pub fn from_config(
|
||||
config: &Config,
|
||||
prefix: impl AsKey,
|
||||
is_lmtp: bool,
|
||||
data_store: Store,
|
||||
) -> utils::config::Result<Self> {
|
||||
pub fn from_config(config: &mut Config, prefix: impl AsKey, is_lmtp: bool) -> Option<Self> {
|
||||
let prefix = prefix.as_key();
|
||||
let address = config.value_require((&prefix, "host"))?;
|
||||
let tls_implicit: bool = config.property_or_static((&prefix, "tls.enable"), "false")?;
|
||||
let address = config.value_require_((&prefix, "host"))?.to_string();
|
||||
let tls_implicit: bool = config
|
||||
.property_or_default_((&prefix, "tls.enable"), "false")
|
||||
.unwrap_or_default();
|
||||
let port: u16 = config
|
||||
.property_or_static((&prefix, "port"), if tls_implicit { "465" } else { "25" })?;
|
||||
.property_or_default_((&prefix, "port"), if tls_implicit { "465" } else { "25" })
|
||||
.unwrap_or(if tls_implicit { 465 } else { 25 });
|
||||
|
||||
let manager = SmtpConnectionManager {
|
||||
builder: SmtpClientBuilder {
|
||||
addr: format!("{address}:{port}"),
|
||||
timeout: config.property_or_static((&prefix, "timeout"), "30s")?,
|
||||
timeout: config
|
||||
.property_or_default_((&prefix, "timeout"), "30s")
|
||||
.unwrap_or_else(|| Duration::from_secs(30)),
|
||||
tls_connector: build_tls_connector(
|
||||
config.property_or_static((&prefix, "tls.allow-invalid-certs"), "false")?,
|
||||
config
|
||||
.property_or_default_((&prefix, "tls.allow-invalid-certs"), "false")
|
||||
.unwrap_or_default(),
|
||||
),
|
||||
tls_hostname: address.to_string(),
|
||||
tls_implicit,
|
||||
|
@ -59,17 +62,27 @@ impl SmtpDirectory {
|
|||
.to_string(),
|
||||
say_ehlo: false,
|
||||
},
|
||||
max_rcpt: config.property_or_static((&prefix, "limits.rcpt"), "10")?,
|
||||
max_auth_errors: config.property_or_static((&prefix, "limits.auth-errors"), "3")?,
|
||||
max_rcpt: config
|
||||
.property_or_default_((&prefix, "limits.rcpt"), "10")
|
||||
.unwrap_or(10),
|
||||
max_auth_errors: config
|
||||
.property_or_default_((&prefix, "limits.auth-errors"), "3")
|
||||
.unwrap_or(10),
|
||||
};
|
||||
|
||||
Ok(SmtpDirectory {
|
||||
pool: build_pool(config, &prefix, manager)?,
|
||||
Some(SmtpDirectory {
|
||||
pool: build_pool(config, &prefix, manager)
|
||||
.map_err(|e| {
|
||||
config.new_parse_error(
|
||||
prefix.as_str(),
|
||||
format!("Failed to build SMTP pool: {e:?}"),
|
||||
)
|
||||
})
|
||||
.ok()?,
|
||||
domains: config
|
||||
.values((&prefix, "lookup.domains"))
|
||||
.map(|(_, v)| v.to_lowercase())
|
||||
.collect(),
|
||||
data_store,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
|
@ -29,14 +29,12 @@ use ahash::AHashSet;
|
|||
use deadpool::managed::Pool;
|
||||
use mail_send::SmtpClientBuilder;
|
||||
use smtp_proto::EhloResponse;
|
||||
use store::Store;
|
||||
use tokio::net::TcpStream;
|
||||
use tokio_rustls::client::TlsStream;
|
||||
|
||||
pub struct SmtpDirectory {
|
||||
pool: Pool<SmtpConnectionManager>,
|
||||
domains: AHashSet<String>,
|
||||
pub(crate) data_store: Store,
|
||||
}
|
||||
|
||||
pub struct SmtpConnectionManager {
|
||||
|
|
|
@ -28,20 +28,20 @@ use super::{SqlDirectory, SqlMappings};
|
|||
|
||||
impl SqlDirectory {
|
||||
pub fn from_config(
|
||||
config: &Config,
|
||||
config: &mut Config,
|
||||
prefix: impl AsKey,
|
||||
stores: &Stores,
|
||||
data_store: Store,
|
||||
) -> utils::config::Result<Self> {
|
||||
) -> Option<Self> {
|
||||
let prefix = prefix.as_key();
|
||||
let store_id = config.value_require((&prefix, "store"))?;
|
||||
let store = stores
|
||||
.lookup_stores
|
||||
.get(store_id)
|
||||
.ok_or_else(|| {
|
||||
format!("Directory {prefix:?} references a non-existent store {store_id:?}")
|
||||
})?
|
||||
.clone();
|
||||
let store_id = config.value_require_((&prefix, "store"))?.to_string();
|
||||
let store = if let Some(store) = stores.lookup_stores.get(&store_id) {
|
||||
store.clone()
|
||||
} else {
|
||||
let err = format!("Directory references a non-existent store {store_id:?}");
|
||||
config.new_build_error((&prefix, "store"), err);
|
||||
return None;
|
||||
};
|
||||
|
||||
let mut mappings = SqlMappings {
|
||||
column_description: config
|
||||
|
@ -73,12 +73,12 @@ impl SqlDirectory {
|
|||
("domains", &mut mappings.query_domains),
|
||||
] {
|
||||
*query = config
|
||||
.value(("store", store_id, "query", query_id))
|
||||
.value(("store", store_id.as_str(), "query", query_id))
|
||||
.unwrap_or_default()
|
||||
.to_string();
|
||||
}
|
||||
|
||||
Ok(SqlDirectory {
|
||||
Some(SqlDirectory {
|
||||
store,
|
||||
mappings,
|
||||
data_store,
|
||||
|
|
|
@ -45,34 +45,28 @@ pub struct LookupCache<T: Hash + Eq> {
|
|||
}
|
||||
|
||||
impl CachedDirectory {
|
||||
pub fn try_from_config(
|
||||
config: &Config,
|
||||
prefix: impl AsKey,
|
||||
) -> utils::config::Result<Option<Self>> {
|
||||
pub fn try_from_config(config: &mut Config, prefix: impl AsKey) -> Option<Self> {
|
||||
let prefix = prefix.as_key();
|
||||
if let Some(cached_entries) = config.property((&prefix, "cache.entries"))? {
|
||||
let cache_ttl_positive = config
|
||||
.property((&prefix, "cache.ttl.positive"))?
|
||||
.unwrap_or(Duration::from_secs(86400));
|
||||
let cache_ttl_negative = config
|
||||
.property((&prefix, "cache.ttl.positive"))?
|
||||
.unwrap_or_else(|| Duration::from_secs(3600));
|
||||
let cached_entries = config.property_((&prefix, "cache.entries"))?;
|
||||
let cache_ttl_positive = config
|
||||
.property_((&prefix, "cache.ttl.positive"))
|
||||
.unwrap_or(Duration::from_secs(86400));
|
||||
let cache_ttl_negative = config
|
||||
.property_((&prefix, "cache.ttl.positive"))
|
||||
.unwrap_or_else(|| Duration::from_secs(3600));
|
||||
|
||||
Ok(Some(CachedDirectory {
|
||||
cached_domains: Mutex::new(LookupCache::new(
|
||||
cached_entries,
|
||||
cache_ttl_positive,
|
||||
cache_ttl_negative,
|
||||
)),
|
||||
cached_rcpts: Mutex::new(LookupCache::new(
|
||||
cached_entries,
|
||||
cache_ttl_positive,
|
||||
cache_ttl_negative,
|
||||
)),
|
||||
}))
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
Some(CachedDirectory {
|
||||
cached_domains: Mutex::new(LookupCache::new(
|
||||
cached_entries,
|
||||
cache_ttl_positive,
|
||||
cache_ttl_negative,
|
||||
)),
|
||||
cached_rcpts: Mutex::new(LookupCache::new(
|
||||
cached_entries,
|
||||
cache_ttl_positive,
|
||||
cache_ttl_negative,
|
||||
)),
|
||||
})
|
||||
}
|
||||
|
||||
pub fn get_rcpt(&self, address: &str) -> Option<bool> {
|
||||
|
|
|
@ -26,13 +26,10 @@ use deadpool::{
|
|||
Runtime,
|
||||
};
|
||||
use std::{sync::Arc, time::Duration};
|
||||
use store::{dispatch::blocked::BlockedIps, Store, Stores};
|
||||
use utils::{
|
||||
config::{
|
||||
utils::{AsKey, ParseValue},
|
||||
Config,
|
||||
},
|
||||
expr::Token,
|
||||
use store::{Store, Stores};
|
||||
use utils::config::{
|
||||
utils::{AsKey, ParseValue},
|
||||
Config,
|
||||
};
|
||||
|
||||
use ahash::AHashMap;
|
||||
|
@ -42,15 +39,99 @@ use crate::{
|
|||
imap::ImapDirectory, internal::manage::ManageDirectory, ldap::LdapDirectory,
|
||||
memory::MemoryDirectory, smtp::SmtpDirectory, sql::SqlDirectory,
|
||||
},
|
||||
AddressMapping, Directories, Directory, DirectoryInner,
|
||||
Directories, Directory, DirectoryInner,
|
||||
};
|
||||
|
||||
use super::cache::CachedDirectory;
|
||||
|
||||
impl Directories {
|
||||
pub async fn parse(config: &mut Config, stores: &Stores, data_store: Store) -> Self {
|
||||
let mut directories = AHashMap::new();
|
||||
|
||||
for id in config
|
||||
.sub_keys("directory", ".type")
|
||||
.map(|s| s.to_string())
|
||||
.collect::<Vec<_>>()
|
||||
{
|
||||
// Parse directory
|
||||
let id = id.as_str();
|
||||
#[cfg(feature = "test_mode")]
|
||||
{
|
||||
if config
|
||||
.property_or_default_::<bool>(("directory", id, "disable"), "false")
|
||||
.unwrap_or(false)
|
||||
{
|
||||
tracing::debug!("Skipping disabled directory {id:?}.");
|
||||
continue;
|
||||
}
|
||||
}
|
||||
let protocol = config.value_require_(("directory", id, "type")).unwrap();
|
||||
let prefix = ("directory", id);
|
||||
let store = match protocol {
|
||||
"internal" => Some(DirectoryInner::Internal(
|
||||
if let Some(store_id) = config.value_require_(("directory", id, "store")) {
|
||||
if let Some(data) = stores.stores.get(store_id) {
|
||||
match data.clone().init().await {
|
||||
Ok(data) => data,
|
||||
Err(err) => {
|
||||
let err =
|
||||
format!("Failed to initialize store {store_id:?}: {err:?}");
|
||||
config.new_parse_error(("directory", id, "store"), err);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
config.new_parse_error(
|
||||
("directory", id, "store"),
|
||||
"Store does not exist",
|
||||
);
|
||||
continue;
|
||||
}
|
||||
} else {
|
||||
continue;
|
||||
},
|
||||
)),
|
||||
"ldap" => LdapDirectory::from_config(config, prefix, data_store.clone())
|
||||
.map(DirectoryInner::Ldap),
|
||||
"sql" => SqlDirectory::from_config(config, prefix, stores, data_store.clone())
|
||||
.map(DirectoryInner::Sql),
|
||||
"imap" => ImapDirectory::from_config(config, prefix).map(DirectoryInner::Imap),
|
||||
"smtp" => {
|
||||
SmtpDirectory::from_config(config, prefix, false).map(DirectoryInner::Smtp)
|
||||
}
|
||||
"lmtp" => {
|
||||
SmtpDirectory::from_config(config, prefix, true).map(DirectoryInner::Smtp)
|
||||
}
|
||||
"memory" => MemoryDirectory::from_config(config, prefix, data_store.clone())
|
||||
.await
|
||||
.map(DirectoryInner::Memory),
|
||||
unknown => {
|
||||
let err = format!("Unknown directory type: {unknown:?}");
|
||||
config.new_parse_error(("directory", id, "type"), err);
|
||||
continue;
|
||||
}
|
||||
};
|
||||
|
||||
// Build directory
|
||||
if let Some(store) = store {
|
||||
let directory = Arc::new(Directory {
|
||||
store,
|
||||
cache: CachedDirectory::try_from_config(config, ("directory", id)),
|
||||
});
|
||||
|
||||
// Add directory
|
||||
directories.insert(id.to_string(), directory);
|
||||
}
|
||||
}
|
||||
|
||||
Directories { directories }
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(async_fn_in_trait)]
|
||||
pub trait ConfigDirectory {
|
||||
async fn parse_directory(
|
||||
&self,
|
||||
&mut self,
|
||||
stores: &Stores,
|
||||
data_store: Store,
|
||||
) -> utils::config::Result<Directories>;
|
||||
|
@ -58,20 +139,22 @@ pub trait ConfigDirectory {
|
|||
|
||||
impl ConfigDirectory for Config {
|
||||
async fn parse_directory(
|
||||
&self,
|
||||
&mut self,
|
||||
stores: &Stores,
|
||||
data_store: Store,
|
||||
) -> utils::config::Result<Directories> {
|
||||
let mut config = Directories {
|
||||
directories: AHashMap::new(),
|
||||
};
|
||||
let blocked_ips = Arc::new(BlockedIps::new(
|
||||
stores.get_lookup_store(self, "storage.lookup")?,
|
||||
));
|
||||
|
||||
for id in self.sub_keys("directory", ".type") {
|
||||
for id in self
|
||||
.sub_keys("directory", ".type")
|
||||
.map(|s| s.to_string())
|
||||
.collect::<Vec<_>>()
|
||||
{
|
||||
// Parse directory
|
||||
if self.property_or_static::<bool>(("directory", id, "disable"), "false")? {
|
||||
let id = id.as_str();
|
||||
if self.property_or_default::<bool>(("directory", id, "disable"), "false")? {
|
||||
tracing::debug!("Skipping disabled directory {id:?}.");
|
||||
continue;
|
||||
}
|
||||
|
@ -101,36 +184,23 @@ impl ConfigDirectory for Config {
|
|||
)
|
||||
})?,
|
||||
),
|
||||
"ldap" => DirectoryInner::Ldap(LdapDirectory::from_config(
|
||||
self,
|
||||
prefix,
|
||||
data_store.clone(),
|
||||
)?),
|
||||
"sql" => DirectoryInner::Sql(SqlDirectory::from_config(
|
||||
self,
|
||||
prefix,
|
||||
stores,
|
||||
data_store.clone(),
|
||||
)?),
|
||||
"imap" => DirectoryInner::Imap(ImapDirectory::from_config(
|
||||
self,
|
||||
prefix,
|
||||
data_store.clone(),
|
||||
)?),
|
||||
"smtp" => DirectoryInner::Smtp(SmtpDirectory::from_config(
|
||||
self,
|
||||
prefix,
|
||||
false,
|
||||
data_store.clone(),
|
||||
)?),
|
||||
"lmtp" => DirectoryInner::Smtp(SmtpDirectory::from_config(
|
||||
self,
|
||||
prefix,
|
||||
true,
|
||||
data_store.clone(),
|
||||
)?),
|
||||
"ldap" => DirectoryInner::Ldap(
|
||||
LdapDirectory::from_config(self, prefix, data_store.clone()).unwrap(),
|
||||
),
|
||||
"sql" => DirectoryInner::Sql(
|
||||
SqlDirectory::from_config(self, prefix, stores, data_store.clone()).unwrap(),
|
||||
),
|
||||
"imap" => DirectoryInner::Imap(ImapDirectory::from_config(self, prefix).unwrap()),
|
||||
"smtp" => {
|
||||
DirectoryInner::Smtp(SmtpDirectory::from_config(self, prefix, false).unwrap())
|
||||
}
|
||||
"lmtp" => {
|
||||
DirectoryInner::Smtp(SmtpDirectory::from_config(self, prefix, true).unwrap())
|
||||
}
|
||||
"memory" => DirectoryInner::Memory(
|
||||
MemoryDirectory::from_config(self, prefix, data_store.clone()).await?,
|
||||
MemoryDirectory::from_config(self, prefix, data_store.clone())
|
||||
.await
|
||||
.unwrap(),
|
||||
),
|
||||
unknown => {
|
||||
return Err(format!("Unknown directory type: {unknown:?}"));
|
||||
|
@ -140,16 +210,7 @@ impl ConfigDirectory for Config {
|
|||
// Build directory
|
||||
let directory = Arc::new(Directory {
|
||||
store,
|
||||
catch_all: AddressMapping::from_config(
|
||||
self,
|
||||
("directory", id, "options.catch-all"),
|
||||
)?,
|
||||
subaddressing: AddressMapping::from_config(
|
||||
self,
|
||||
("directory", id, "options.subaddressing"),
|
||||
)?,
|
||||
cache: CachedDirectory::try_from_config(self, ("directory", id))?,
|
||||
blocked_ips: blocked_ips.clone(),
|
||||
cache: CachedDirectory::try_from_config(self, ("directory", id)),
|
||||
});
|
||||
|
||||
// Add directory
|
||||
|
@ -160,46 +221,26 @@ impl ConfigDirectory for Config {
|
|||
}
|
||||
}
|
||||
|
||||
impl AddressMapping {
|
||||
pub fn from_config(config: &Config, key: impl AsKey) -> utils::config::Result<Self> {
|
||||
let key = key.as_key();
|
||||
if let Some(value) = config.value(key.as_str()) {
|
||||
match value {
|
||||
"true" => Ok(AddressMapping::Enable),
|
||||
"false" => Ok(AddressMapping::Disable),
|
||||
_ => Err(format!(
|
||||
"Invalid value for address mapping {key:?}: {value:?}",
|
||||
)),
|
||||
}
|
||||
} else if let Some(if_block) = config.parse_if_block(key, |name| {
|
||||
if ["address", "email"].contains(&name) {
|
||||
Ok(Token::Variable(1))
|
||||
} else {
|
||||
Err(format!("Invalid variable name {name:?}.",))
|
||||
}
|
||||
})? {
|
||||
Ok(AddressMapping::Custom(if_block))
|
||||
} else {
|
||||
Ok(AddressMapping::Disable)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn build_pool<M: Manager>(
|
||||
config: &Config,
|
||||
config: &mut Config,
|
||||
prefix: &str,
|
||||
manager: M,
|
||||
) -> utils::config::Result<Pool<M>> {
|
||||
Pool::builder(manager)
|
||||
.runtime(Runtime::Tokio1)
|
||||
.max_size(config.property_or_static((prefix, "pool.max-connections"), "10")?)
|
||||
.max_size(
|
||||
config
|
||||
.property_or_default_((prefix, "pool.max-connections"), "10")
|
||||
.unwrap_or(10),
|
||||
)
|
||||
.create_timeout(
|
||||
config
|
||||
.property_or_static::<Duration>((prefix, "pool.timeout.create"), "30s")?
|
||||
.property_or_default_::<Duration>((prefix, "pool.timeout.create"), "30s")
|
||||
.unwrap_or_else(|| Duration::from_secs(30))
|
||||
.into(),
|
||||
)
|
||||
.wait_timeout(config.property_or_static((prefix, "pool.timeout.wait"), "30s")?)
|
||||
.recycle_timeout(config.property_or_static((prefix, "pool.timeout.recycle"), "30s")?)
|
||||
.wait_timeout(config.property_or_default_((prefix, "pool.timeout.wait"), "30s"))
|
||||
.recycle_timeout(config.property_or_default_((prefix, "pool.timeout.recycle"), "30s"))
|
||||
.build()
|
||||
.map_err(|err| {
|
||||
format!(
|
||||
|
|
|
@ -21,59 +21,11 @@
|
|||
* for more details.
|
||||
*/
|
||||
|
||||
use std::net::IpAddr;
|
||||
|
||||
use mail_send::Credentials;
|
||||
use store::Store;
|
||||
|
||||
use crate::{
|
||||
backend::internal::lookup::DirectoryStore, AuthResult, Directory, DirectoryInner, Principal,
|
||||
QueryBy,
|
||||
backend::internal::lookup::DirectoryStore, Directory, DirectoryInner, Principal, QueryBy,
|
||||
};
|
||||
|
||||
impl Directory {
|
||||
pub async fn authenticate(
|
||||
&self,
|
||||
credentials: &Credentials<String>,
|
||||
remote_ip: IpAddr,
|
||||
return_member_of: bool,
|
||||
) -> crate::Result<AuthResult<Principal<u32>>> {
|
||||
if let Some(principal) = self
|
||||
.query(QueryBy::Credentials(credentials), return_member_of)
|
||||
.await?
|
||||
{
|
||||
Ok(AuthResult::Success(principal))
|
||||
} else if self.blocked_ips.has_fail2ban() {
|
||||
let login = match credentials {
|
||||
Credentials::Plain { username, .. }
|
||||
| Credentials::XOauth2 { username, .. }
|
||||
| Credentials::OAuthBearer { token: username } => username,
|
||||
};
|
||||
if let Some(banned) = self
|
||||
.blocked_ips
|
||||
.is_fail2banned(remote_ip, login.to_string())
|
||||
.await
|
||||
{
|
||||
tracing::info!(
|
||||
context = "directory",
|
||||
event = "fail2ban",
|
||||
remote_ip = ?remote_ip,
|
||||
login = ?login,
|
||||
"IP address blocked after too many failed login attempts",
|
||||
);
|
||||
|
||||
// Write blocked address to config
|
||||
self.store().config_set(vec![banned].into_iter()).await?;
|
||||
|
||||
Ok(AuthResult::Banned)
|
||||
} else {
|
||||
Ok(AuthResult::Failure)
|
||||
}
|
||||
} else {
|
||||
Ok(AuthResult::Failure)
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn query(
|
||||
&self,
|
||||
by: QueryBy<'_>,
|
||||
|
@ -90,27 +42,14 @@ impl Directory {
|
|||
}
|
||||
|
||||
pub async fn email_to_ids(&self, email: &str) -> crate::Result<Vec<u32>> {
|
||||
let mut address = self.subaddressing.to_subaddress(email).await;
|
||||
for _ in 0..2 {
|
||||
let result = match &self.store {
|
||||
DirectoryInner::Internal(store) => store.email_to_ids(address.as_ref()).await,
|
||||
DirectoryInner::Ldap(store) => store.email_to_ids(address.as_ref()).await,
|
||||
DirectoryInner::Sql(store) => store.email_to_ids(address.as_ref()).await,
|
||||
DirectoryInner::Imap(store) => store.email_to_ids(address.as_ref()).await,
|
||||
DirectoryInner::Smtp(store) => store.email_to_ids(address.as_ref()).await,
|
||||
DirectoryInner::Memory(store) => store.email_to_ids(address.as_ref()).await,
|
||||
}?;
|
||||
|
||||
if !result.is_empty() {
|
||||
return Ok(result);
|
||||
} else if let Some(catch_all) = self.catch_all.to_catch_all(email).await {
|
||||
address = catch_all;
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
match &self.store {
|
||||
DirectoryInner::Internal(store) => store.email_to_ids(email).await,
|
||||
DirectoryInner::Ldap(store) => store.email_to_ids(email).await,
|
||||
DirectoryInner::Sql(store) => store.email_to_ids(email).await,
|
||||
DirectoryInner::Imap(store) => store.email_to_ids(email).await,
|
||||
DirectoryInner::Smtp(store) => store.email_to_ids(email).await,
|
||||
DirectoryInner::Memory(store) => store.email_to_ids(email).await,
|
||||
}
|
||||
|
||||
Ok(vec![])
|
||||
}
|
||||
|
||||
pub async fn is_local_domain(&self, domain: &str) -> crate::Result<bool> {
|
||||
|
@ -139,85 +78,51 @@ impl Directory {
|
|||
}
|
||||
|
||||
pub async fn rcpt(&self, email: &str) -> crate::Result<bool> {
|
||||
// Expand subaddress
|
||||
let mut address = self.subaddressing.to_subaddress(email).await;
|
||||
|
||||
// Check cache
|
||||
if let Some(cache) = &self.cache {
|
||||
if let Some(result) = cache.get_rcpt(address.as_ref()) {
|
||||
if let Some(result) = cache.get_rcpt(email) {
|
||||
return Ok(result);
|
||||
}
|
||||
}
|
||||
|
||||
for _ in 0..2 {
|
||||
let result = match &self.store {
|
||||
DirectoryInner::Internal(store) => store.rcpt(address.as_ref()).await,
|
||||
DirectoryInner::Ldap(store) => store.rcpt(address.as_ref()).await,
|
||||
DirectoryInner::Sql(store) => store.rcpt(address.as_ref()).await,
|
||||
DirectoryInner::Imap(store) => store.rcpt(address.as_ref()).await,
|
||||
DirectoryInner::Smtp(store) => store.rcpt(address.as_ref()).await,
|
||||
DirectoryInner::Memory(store) => store.rcpt(address.as_ref()).await,
|
||||
}?;
|
||||
let result = match &self.store {
|
||||
DirectoryInner::Internal(store) => store.rcpt(email).await,
|
||||
DirectoryInner::Ldap(store) => store.rcpt(email).await,
|
||||
DirectoryInner::Sql(store) => store.rcpt(email).await,
|
||||
DirectoryInner::Imap(store) => store.rcpt(email).await,
|
||||
DirectoryInner::Smtp(store) => store.rcpt(email).await,
|
||||
DirectoryInner::Memory(store) => store.rcpt(email).await,
|
||||
}?;
|
||||
|
||||
if result {
|
||||
// Update cache
|
||||
if let Some(cache) = &self.cache {
|
||||
cache.set_rcpt(address.as_ref(), true);
|
||||
}
|
||||
return Ok(true);
|
||||
} else if let Some(catch_all) = self.catch_all.to_catch_all(email).await {
|
||||
// Check cache
|
||||
if let Some(cache) = &self.cache {
|
||||
if let Some(result) = cache.get_rcpt(catch_all.as_ref()) {
|
||||
return Ok(result);
|
||||
}
|
||||
}
|
||||
address = catch_all;
|
||||
} else {
|
||||
break;
|
||||
if result {
|
||||
// Update cache
|
||||
if let Some(cache) = &self.cache {
|
||||
cache.set_rcpt(email, true);
|
||||
}
|
||||
}
|
||||
|
||||
// Update cache
|
||||
if let Some(cache) = &self.cache {
|
||||
cache.set_rcpt(address.as_ref(), false);
|
||||
}
|
||||
|
||||
Ok(false)
|
||||
Ok(result)
|
||||
}
|
||||
|
||||
pub async fn vrfy(&self, address: &str) -> crate::Result<Vec<String>> {
|
||||
let address = self.subaddressing.to_subaddress(address).await;
|
||||
match &self.store {
|
||||
DirectoryInner::Internal(store) => store.vrfy(address.as_ref()).await,
|
||||
DirectoryInner::Ldap(store) => store.vrfy(address.as_ref()).await,
|
||||
DirectoryInner::Sql(store) => store.vrfy(address.as_ref()).await,
|
||||
DirectoryInner::Imap(store) => store.vrfy(address.as_ref()).await,
|
||||
DirectoryInner::Smtp(store) => store.vrfy(address.as_ref()).await,
|
||||
DirectoryInner::Memory(store) => store.vrfy(address.as_ref()).await,
|
||||
DirectoryInner::Internal(store) => store.vrfy(address).await,
|
||||
DirectoryInner::Ldap(store) => store.vrfy(address).await,
|
||||
DirectoryInner::Sql(store) => store.vrfy(address).await,
|
||||
DirectoryInner::Imap(store) => store.vrfy(address).await,
|
||||
DirectoryInner::Smtp(store) => store.vrfy(address).await,
|
||||
DirectoryInner::Memory(store) => store.vrfy(address).await,
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn expn(&self, address: &str) -> crate::Result<Vec<String>> {
|
||||
let address = self.subaddressing.to_subaddress(address).await;
|
||||
match &self.store {
|
||||
DirectoryInner::Internal(store) => store.expn(address.as_ref()).await,
|
||||
DirectoryInner::Ldap(store) => store.expn(address.as_ref()).await,
|
||||
DirectoryInner::Sql(store) => store.expn(address.as_ref()).await,
|
||||
DirectoryInner::Imap(store) => store.expn(address.as_ref()).await,
|
||||
DirectoryInner::Smtp(store) => store.expn(address.as_ref()).await,
|
||||
DirectoryInner::Memory(store) => store.expn(address.as_ref()).await,
|
||||
}
|
||||
}
|
||||
|
||||
fn store(&self) -> &Store {
|
||||
match &self.store {
|
||||
DirectoryInner::Internal(store) => store,
|
||||
DirectoryInner::Ldap(store) => &store.data_store,
|
||||
DirectoryInner::Sql(store) => &store.data_store,
|
||||
DirectoryInner::Imap(store) => &store.data_store,
|
||||
DirectoryInner::Smtp(store) => &store.data_store,
|
||||
DirectoryInner::Memory(store) => &store.data_store,
|
||||
DirectoryInner::Internal(store) => store.expn(address).await,
|
||||
DirectoryInner::Ldap(store) => store.expn(address).await,
|
||||
DirectoryInner::Sql(store) => store.expn(address).await,
|
||||
DirectoryInner::Imap(store) => store.expn(address).await,
|
||||
DirectoryInner::Smtp(store) => store.expn(address).await,
|
||||
DirectoryInner::Memory(store) => store.expn(address).await,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -22,7 +22,7 @@
|
|||
*/
|
||||
|
||||
use core::cache::CachedDirectory;
|
||||
use std::{borrow::Cow, fmt::Debug, sync::Arc};
|
||||
use std::{fmt::Debug, sync::Arc};
|
||||
|
||||
use ahash::AHashMap;
|
||||
use backend::{
|
||||
|
@ -36,18 +36,14 @@ use backend::{
|
|||
use deadpool::managed::PoolError;
|
||||
use ldap3::LdapError;
|
||||
use mail_send::Credentials;
|
||||
use store::{dispatch::blocked::BlockedIps, Store};
|
||||
use utils::{config::if_block::IfBlock, expr::Variable};
|
||||
use store::Store;
|
||||
|
||||
pub mod backend;
|
||||
pub mod core;
|
||||
|
||||
pub struct Directory {
|
||||
pub store: DirectoryInner,
|
||||
pub catch_all: AddressMapping,
|
||||
pub subaddressing: AddressMapping,
|
||||
pub cache: Option<CachedDirectory>,
|
||||
pub blocked_ips: Arc<BlockedIps>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, Clone, PartialEq, Eq, serde::Serialize, serde::Deserialize)]
|
||||
|
@ -126,12 +122,6 @@ pub enum QueryBy<'x> {
|
|||
Credentials(&'x Credentials<String>),
|
||||
}
|
||||
|
||||
pub enum AuthResult<T> {
|
||||
Success(T),
|
||||
Failure,
|
||||
Banned,
|
||||
}
|
||||
|
||||
impl<T: serde::Serialize + serde::de::DeserializeOwned> Principal<T> {
|
||||
pub fn name(&self) -> &str {
|
||||
&self.name
|
||||
|
@ -165,14 +155,6 @@ impl Type {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub enum AddressMapping {
|
||||
Enable,
|
||||
Custom(IfBlock),
|
||||
#[default]
|
||||
Disable,
|
||||
}
|
||||
|
||||
#[derive(Default, Clone, Debug)]
|
||||
pub struct Directories {
|
||||
pub directories: AHashMap<String, Arc<Directory>>,
|
||||
|
@ -289,72 +271,6 @@ impl DirectoryError {
|
|||
}
|
||||
}
|
||||
|
||||
impl AddressMapping {
|
||||
pub async fn to_subaddress<'x, 'y: 'x>(&'x self, address: &'y str) -> Cow<'x, str> {
|
||||
match self {
|
||||
AddressMapping::Enable => {
|
||||
if let Some((local_part, domain_part)) = address.rsplit_once('@') {
|
||||
if let Some((local_part, _)) = local_part.split_once('+') {
|
||||
return format!("{}@{}", local_part, domain_part).into();
|
||||
}
|
||||
}
|
||||
}
|
||||
AddressMapping::Custom(if_block) => {
|
||||
if let Ok(result) = String::try_from(
|
||||
if_block
|
||||
.eval(
|
||||
|name| {
|
||||
if name == 1 {
|
||||
Variable::from(address)
|
||||
} else {
|
||||
Variable::default()
|
||||
}
|
||||
},
|
||||
|_, _| async { Variable::default() },
|
||||
)
|
||||
.await,
|
||||
) {
|
||||
return result.into();
|
||||
}
|
||||
}
|
||||
AddressMapping::Disable => (),
|
||||
}
|
||||
|
||||
address.into()
|
||||
}
|
||||
|
||||
pub async fn to_catch_all<'x, 'y: 'x>(&'x self, address: &'y str) -> Option<Cow<'x, str>> {
|
||||
match self {
|
||||
AddressMapping::Enable => address
|
||||
.rsplit_once('@')
|
||||
.map(|(_, domain_part)| format!("@{}", domain_part))
|
||||
.map(Cow::Owned),
|
||||
|
||||
AddressMapping::Custom(if_block) => {
|
||||
if let Ok(result) = String::try_from(
|
||||
if_block
|
||||
.eval(
|
||||
|name| {
|
||||
if name == 1 {
|
||||
Variable::from(address)
|
||||
} else {
|
||||
Variable::default()
|
||||
}
|
||||
},
|
||||
|_, _| async { Variable::default() },
|
||||
)
|
||||
.await,
|
||||
) {
|
||||
Some(result.into())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
AddressMapping::Disable => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for DirectoryError {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
match (self, other) {
|
||||
|
|
|
@ -50,15 +50,15 @@ impl IMAP {
|
|||
let capacity = config.property("cache.capacity")?.unwrap_or(100);
|
||||
|
||||
Ok(Arc::new(IMAP {
|
||||
max_request_size: config.property_or_static("imap.request.max-size", "52428800")?,
|
||||
max_auth_failures: config.property_or_static("imap.auth.max-failures", "3")?,
|
||||
max_request_size: config.property_or_default("imap.request.max-size", "52428800")?,
|
||||
max_auth_failures: config.property_or_default("imap.auth.max-failures", "3")?,
|
||||
name_shared: config
|
||||
.value("imap.folders.name.shared")
|
||||
.unwrap_or("Shared Folders")
|
||||
.to_string(),
|
||||
timeout_auth: config.property_or_static("imap.timeout.authenticated", "30m")?,
|
||||
timeout_unauth: config.property_or_static("imap.timeout.anonymous", "1m")?,
|
||||
timeout_idle: config.property_or_static("imap.timeout.idle", "30m")?,
|
||||
timeout_auth: config.property_or_default("imap.timeout.authenticated", "30m")?,
|
||||
timeout_unauth: config.property_or_default("imap.timeout.anonymous", "1m")?,
|
||||
timeout_idle: config.property_or_default("imap.timeout.idle", "30m")?,
|
||||
greeting_plain: StatusResponse::ok(SERVER_GREETING)
|
||||
.with_code(ResponseCode::Capability {
|
||||
capabilities: Capability::all_capabilities(false, false),
|
||||
|
@ -74,9 +74,9 @@ impl IMAP {
|
|||
RandomState::default(),
|
||||
shard_amount,
|
||||
),
|
||||
rate_requests: config.property_or_static("imap.rate-limit.requests", "2000/1m")?,
|
||||
rate_requests: config.property_or_default("imap.rate-limit.requests", "2000/1m")?,
|
||||
rate_concurrent: config.property("imap.rate-limit.concurrent")?.unwrap_or(4),
|
||||
allow_plain_auth: config.property_or_static("imap.auth.allow-plain-text", "false")?,
|
||||
allow_plain_auth: config.property_or_default("imap.auth.allow-plain-text", "false")?,
|
||||
cache_account: LruCache::with_capacity(
|
||||
config.property("cache.account.size")?.unwrap_or(2048),
|
||||
),
|
||||
|
|
|
@ -11,7 +11,7 @@ readme = "README.md"
|
|||
resolver = "2"
|
||||
|
||||
[dependencies]
|
||||
reqwest = { version = "0.11", default-features = false, features = ["rustls-tls-webpki-roots", "blocking"] }
|
||||
reqwest = { version = "0.12", default-features = false, features = ["rustls-tls-webpki-roots", "blocking"] }
|
||||
rpassword = "7.0"
|
||||
indicatif = "0.17.0"
|
||||
dialoguer = "0.11"
|
||||
|
|
|
@ -36,7 +36,7 @@ p256 = { version = "0.13", features = ["ecdh"] }
|
|||
hkdf = "0.12.3"
|
||||
sha1 = "0.10"
|
||||
sha2 = "0.10"
|
||||
reqwest = { version = "0.11", default-features = false, features = ["rustls-tls-webpki-roots"]}
|
||||
reqwest = { version = "0.12", default-features = false, features = ["rustls-tls-webpki-roots"]}
|
||||
tokio-tungstenite = "0.21"
|
||||
tungstenite = "0.21"
|
||||
chrono = "0.4"
|
||||
|
|
|
@ -74,7 +74,7 @@ impl crate::Config {
|
|||
.property("jmap.protocol.upload.quota.files")?
|
||||
.unwrap_or(1000),
|
||||
upload_tmp_ttl: settings
|
||||
.property_or_static::<Duration>("jmap.protocol.upload.ttl", "1h")?
|
||||
.property_or_default::<Duration>("jmap.protocol.upload.ttl", "1h")?
|
||||
.as_secs(),
|
||||
mailbox_max_depth: settings.property("jmap.mailbox.max-depth")?.unwrap_or(10),
|
||||
mailbox_name_max_len: settings
|
||||
|
@ -100,15 +100,16 @@ impl crate::Config {
|
|||
.property("cache.session.ttl")?
|
||||
.unwrap_or(Duration::from_secs(3600)),
|
||||
rate_authenticated: settings
|
||||
.property_or_static("jmap.rate-limit.account", "1000/1m")?,
|
||||
.property_or_default("jmap.rate-limit.account", "1000/1m")?,
|
||||
rate_authenticate_req: settings
|
||||
.property_or_static("authentication.rate-limit", "10/1m")?,
|
||||
rate_anonymous: settings.property_or_static("jmap.rate-limit.anonymous", "100/1m")?,
|
||||
.property_or_default("authentication.rate-limit", "10/1m")?,
|
||||
rate_anonymous: settings.property_or_default("jmap.rate-limit.anonymous", "100/1m")?,
|
||||
rate_use_forwarded: settings
|
||||
.property("jmap.rate-limit.use-forwarded")?
|
||||
.unwrap_or(false),
|
||||
oauth_key: settings
|
||||
.text_file_contents("oauth.key")?
|
||||
.value("oauth.key")
|
||||
.map(|s| s.to_string())
|
||||
.unwrap_or_else(|| {
|
||||
thread_rng()
|
||||
.sample_iter(Alphanumeric)
|
||||
|
@ -117,32 +118,34 @@ impl crate::Config {
|
|||
.collect::<String>()
|
||||
}),
|
||||
oauth_expiry_user_code: settings
|
||||
.property_or_static::<Duration>("oauth.expiry.user-code", "30m")?
|
||||
.property_or_default::<Duration>("oauth.expiry.user-code", "30m")?
|
||||
.as_secs(),
|
||||
oauth_expiry_auth_code: settings
|
||||
.property_or_static::<Duration>("oauth.expiry.auth-code", "10m")?
|
||||
.property_or_default::<Duration>("oauth.expiry.auth-code", "10m")?
|
||||
.as_secs(),
|
||||
oauth_expiry_token: settings
|
||||
.property_or_static::<Duration>("oauth.expiry.token", "1h")?
|
||||
.property_or_default::<Duration>("oauth.expiry.token", "1h")?
|
||||
.as_secs(),
|
||||
oauth_expiry_refresh_token: settings
|
||||
.property_or_static::<Duration>("oauth.expiry.refresh-token", "30d")?
|
||||
.property_or_default::<Duration>("oauth.expiry.refresh-token", "30d")?
|
||||
.as_secs(),
|
||||
oauth_expiry_refresh_token_renew: settings
|
||||
.property_or_static::<Duration>("oauth.expiry.refresh-token-renew", "4d")?
|
||||
.property_or_default::<Duration>("oauth.expiry.refresh-token-renew", "4d")?
|
||||
.as_secs(),
|
||||
oauth_max_auth_attempts: settings.property_or_static("oauth.auth.max-attempts", "3")?,
|
||||
oauth_max_auth_attempts: settings
|
||||
.property_or_default("oauth.auth.max-attempts", "3")?,
|
||||
event_source_throttle: settings
|
||||
.property_or_static("jmap.event-source.throttle", "1s")?,
|
||||
web_socket_throttle: settings.property_or_static("jmap.web-socket.throttle", "1s")?,
|
||||
web_socket_timeout: settings.property_or_static("jmap.web-socket.timeout", "10m")?,
|
||||
web_socket_heartbeat: settings.property_or_static("jmap.web-socket.heartbeat", "1m")?,
|
||||
push_max_total: settings.property_or_static("jmap.push.max-total", "100")?,
|
||||
.property_or_default("jmap.event-source.throttle", "1s")?,
|
||||
web_socket_throttle: settings.property_or_default("jmap.web-socket.throttle", "1s")?,
|
||||
web_socket_timeout: settings.property_or_default("jmap.web-socket.timeout", "10m")?,
|
||||
web_socket_heartbeat: settings
|
||||
.property_or_default("jmap.web-socket.heartbeat", "1m")?,
|
||||
push_max_total: settings.property_or_default("jmap.push.max-total", "100")?,
|
||||
principal_allow_lookups: settings
|
||||
.property("jmap.principal.allow-lookups")?
|
||||
.unwrap_or(true),
|
||||
encrypt: settings.property_or_static("storage.encryption.enable", "true")?,
|
||||
encrypt_append: settings.property_or_static("storage.encryption.append", "false")?,
|
||||
encrypt: settings.property_or_default("storage.encryption.enable", "true")?,
|
||||
encrypt_append: settings.property_or_default("storage.encryption.append", "false")?,
|
||||
spam_header: settings.value("spam.header.is-spam").and_then(|v| {
|
||||
v.split_once(':').map(|(k, v)| {
|
||||
(
|
||||
|
|
|
@ -309,7 +309,7 @@ impl SessionManager for JmapSessionManager {
|
|||
}
|
||||
|
||||
fn is_ip_blocked(&self, addr: &IpAddr) -> bool {
|
||||
self.inner.directory.blocked_ips.is_blocked(addr)
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -23,7 +23,7 @@
|
|||
|
||||
use std::{net::IpAddr, sync::Arc, time::Instant};
|
||||
|
||||
use directory::{AuthResult, QueryBy};
|
||||
use directory::QueryBy;
|
||||
use hyper::header;
|
||||
use jmap_proto::error::request::RequestError;
|
||||
use mail_parser::decoders::base64::base64_decode;
|
||||
|
|
|
@ -27,7 +27,6 @@ use std::{
|
|||
time::{Duration, Instant},
|
||||
};
|
||||
|
||||
use directory::AuthResult;
|
||||
use hyper::StatusCode;
|
||||
use store::rand::{
|
||||
distributions::{Alphanumeric, Standard},
|
||||
|
|
|
@ -28,7 +28,6 @@ use std::{
|
|||
time::{Duration, Instant},
|
||||
};
|
||||
|
||||
use directory::AuthResult;
|
||||
use http_body_util::{BodyExt, Full};
|
||||
use hyper::{body::Bytes, header, StatusCode};
|
||||
use mail_builder::encoders::base64::base64_encode;
|
||||
|
|
|
@ -29,7 +29,6 @@ use crate::{
|
|||
JMAP,
|
||||
};
|
||||
use aes::cipher::{block_padding::Pkcs7, BlockEncryptMut, KeyIvInit};
|
||||
use directory::AuthResult;
|
||||
use jmap_proto::types::{collection::Collection, property::Property};
|
||||
use mail_builder::{encoders::base64::base64_encode_mime, mime::make_boundary};
|
||||
use mail_parser::{decoders::base64::base64_decode, Message, MessageParser, MimeHeaders};
|
||||
|
|
|
@ -42,22 +42,22 @@ pub fn spawn_push_manager(settings: &Config) -> mpsc::Sender<Event> {
|
|||
let push_tx = push_tx_.clone();
|
||||
|
||||
let push_attempt_interval: Duration = settings
|
||||
.property_or_static("jmap.push.attempts.interval", "1m")
|
||||
.property_or_default("jmap.push.attempts.interval", "1m")
|
||||
.failed("Invalid configuration");
|
||||
let push_attempts_max: u32 = settings
|
||||
.property_or_static("jmap.push.attempts.max", "3")
|
||||
.property_or_default("jmap.push.attempts.max", "3")
|
||||
.failed("Invalid configuration");
|
||||
let push_retry_interval: Duration = settings
|
||||
.property_or_static("jmap.push.retry.interval", "1s")
|
||||
.property_or_default("jmap.push.retry.interval", "1s")
|
||||
.failed("Invalid configuration");
|
||||
let push_timeout: Duration = settings
|
||||
.property_or_static("jmap.push.timeout.request", "10s")
|
||||
.property_or_default("jmap.push.timeout.request", "10s")
|
||||
.failed("Invalid configuration");
|
||||
let push_verify_timeout: Duration = settings
|
||||
.property_or_static("jmap.push.timeout.verify", "1m")
|
||||
.property_or_default("jmap.push.timeout.verify", "1m")
|
||||
.failed("Invalid configuration");
|
||||
let push_throttle: Duration = settings
|
||||
.property_or_static("jmap.push.throttle", "1s")
|
||||
.property_or_default("jmap.push.throttle", "1s")
|
||||
.failed("Invalid configuration");
|
||||
|
||||
tokio::spawn(async move {
|
||||
|
|
|
@ -53,7 +53,7 @@ pub fn spawn_housekeeper(
|
|||
mut rx: mpsc::Receiver<Event>,
|
||||
) {
|
||||
let purge_cache = settings
|
||||
.property_or_static::<SimpleCron>("jmap.session.purge.frequency", "15 * *")
|
||||
.property_or_default::<SimpleCron>("jmap.session.purge.frequency", "15 * *")
|
||||
.failed("Initialize housekeeper");
|
||||
|
||||
let certificates = std::mem::take(&mut servers.certificates);
|
||||
|
@ -109,7 +109,8 @@ pub fn spawn_housekeeper(
|
|||
// Future releases will support reloading the configuration
|
||||
// for now, we just reload the blocked IP addresses
|
||||
let core = core.clone();
|
||||
tokio::spawn(async move {
|
||||
let todo = "fix";
|
||||
/*tokio::spawn(async move {
|
||||
match core.store.config_list(BLOCKED_IP_PREFIX, true).await {
|
||||
Ok(settings) => {
|
||||
if let Err(err) = core
|
||||
|
@ -134,7 +135,7 @@ pub fn spawn_housekeeper(
|
|||
);
|
||||
}
|
||||
}
|
||||
});
|
||||
});*/
|
||||
}
|
||||
Event::IndexStart => {
|
||||
if !index_busy {
|
||||
|
|
|
@ -45,7 +45,7 @@ blake3 = "1.3"
|
|||
lru-cache = "0.1.2"
|
||||
rand = "0.8.5"
|
||||
x509-parser = "0.16.0"
|
||||
reqwest = { version = "0.11", default-features = false, features = ["rustls-tls-webpki-roots", "blocking"] }
|
||||
reqwest = { version = "0.12", default-features = false, features = ["rustls-tls-webpki-roots", "blocking"] }
|
||||
serde = { version = "1.0", features = ["derive", "rc"] }
|
||||
serde_json = "1.0"
|
||||
num_cpus = "1.15.0"
|
||||
|
|
|
@ -113,14 +113,9 @@ impl ConfigAuth for Config {
|
|||
let (signer, sealer) =
|
||||
match self.property_require::<Algorithm>(("signature", id, "algorithm"))? {
|
||||
Algorithm::RsaSha256 => {
|
||||
let pk = String::from_utf8(self.file_contents((
|
||||
"signature",
|
||||
id,
|
||||
"private-key",
|
||||
))?)
|
||||
.unwrap_or_default();
|
||||
let key = RsaKey::<Sha256>::from_rsa_pem(&pk)
|
||||
.or_else(|_| RsaKey::<Sha256>::from_pkcs8_pem(&pk))
|
||||
let pk = self.value_require(("signature", id, "private-key"))?;
|
||||
let key = RsaKey::<Sha256>::from_rsa_pem(pk)
|
||||
.or_else(|_| RsaKey::<Sha256>::from_pkcs8_pem(pk))
|
||||
.map_err(|err| {
|
||||
format!(
|
||||
"Failed to build RSA key for {}: {}",
|
||||
|
@ -128,8 +123,8 @@ impl ConfigAuth for Config {
|
|||
err
|
||||
)
|
||||
})?;
|
||||
let key_clone = RsaKey::<Sha256>::from_rsa_pem(&pk)
|
||||
.or_else(|_| RsaKey::<Sha256>::from_pkcs8_pem(&pk))
|
||||
let key_clone = RsaKey::<Sha256>::from_rsa_pem(pk)
|
||||
.or_else(|_| RsaKey::<Sha256>::from_pkcs8_pem(pk))
|
||||
.map_err(|err| {
|
||||
format!(
|
||||
"Failed to build RSA key for {}: {}",
|
||||
|
@ -148,7 +143,7 @@ impl ConfigAuth for Config {
|
|||
(("signature", id, "public-key"), &mut public_key),
|
||||
(("signature", id, "private-key"), &mut private_key),
|
||||
] {
|
||||
let mut contents = self.file_contents(key)?.into_iter();
|
||||
let mut contents = self.value_require(key)?.as_bytes().iter().copied();
|
||||
let mut base64 = vec![];
|
||||
|
||||
'outer: while let Some(ch) = contents.next() {
|
||||
|
|
|
@ -42,7 +42,7 @@ use mail_send::Credentials;
|
|||
use sieve::Sieve;
|
||||
use store::Stores;
|
||||
use utils::{
|
||||
config::{if_block::IfBlock, utils::ConstantValue, Rate, Server, ServerProtocol},
|
||||
config::{if_block::IfBlock, utils::ConstantValue, Rate, ServerProtocol},
|
||||
expr::{Expression, Token},
|
||||
snowflake::SnowflakeIdGenerator,
|
||||
};
|
||||
|
@ -397,8 +397,7 @@ pub enum VerifyStrategy {
|
|||
}
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct ConfigContext<'x> {
|
||||
pub servers: &'x [Server],
|
||||
pub struct ConfigContext {
|
||||
pub directory: Directories,
|
||||
pub stores: Stores,
|
||||
pub scripts: AHashMap<String, Arc<Sieve>>,
|
||||
|
@ -406,10 +405,9 @@ pub struct ConfigContext<'x> {
|
|||
pub sealers: AHashMap<String, Arc<ArcSealer>>,
|
||||
}
|
||||
|
||||
impl<'x> ConfigContext<'x> {
|
||||
pub fn new(servers: &'x [Server]) -> Self {
|
||||
impl ConfigContext {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
servers,
|
||||
..Default::default()
|
||||
}
|
||||
}
|
||||
|
|
|
@ -77,9 +77,9 @@ impl ConfigSieve for Config {
|
|||
let sieve_ctx = SieveContext {
|
||||
psl: self.parse_public_suffix()?,
|
||||
bayes_cache: BayesTokenCache::new(
|
||||
self.property_or_static("cache.bayes.capacity", "8192")?,
|
||||
self.property_or_static("cache.bayes.ttl.positive", "1h")?,
|
||||
self.property_or_static("cache.bayes.ttl.negative", "1h")?,
|
||||
self.property_or_default("cache.bayes.capacity", "8192")?,
|
||||
self.property_or_default("cache.bayes.ttl.positive", "1h")?,
|
||||
self.property_or_default("cache.bayes.ttl.negative", "1h")?,
|
||||
),
|
||||
remote_lists: Default::default(),
|
||||
};
|
||||
|
@ -95,7 +95,7 @@ impl ConfigSieve for Config {
|
|||
.with_max_header_size(10240)
|
||||
.with_max_includes(10)
|
||||
.with_no_capability_check(
|
||||
self.property_or_static("sieve.trusted.no-capability-check", "false")?,
|
||||
self.property_or_default("sieve.trusted.no-capability-check", "false")?,
|
||||
)
|
||||
.register_functions(&mut fnc_map);
|
||||
|
||||
|
@ -115,7 +115,7 @@ impl ConfigSieve for Config {
|
|||
.with_capability(Capability::Expressions)
|
||||
.with_capability(Capability::While)
|
||||
.with_max_variable_size(
|
||||
self.property_or_static("sieve.trusted.limits.variable-size", "52428800")?,
|
||||
self.property_or_default("sieve.trusted.limits.variable-size", "52428800")?,
|
||||
)
|
||||
.with_max_header_size(10240)
|
||||
.with_valid_notification_uri("mailto")
|
||||
|
@ -152,19 +152,19 @@ impl ConfigSieve for Config {
|
|||
let key = ("sieve.trusted.scripts", id);
|
||||
|
||||
let script = if !self.contains_key(key) {
|
||||
let mut script = Vec::new();
|
||||
let mut script = String::new();
|
||||
for sub_key in self.sub_keys(key, "") {
|
||||
script.extend(self.file_contents(("sieve.trusted.scripts", id, sub_key))?);
|
||||
script.push_str(self.value_require(("sieve.trusted.scripts", id, sub_key))?);
|
||||
}
|
||||
script
|
||||
} else {
|
||||
self.file_contents(key)?
|
||||
self.value_require(key)?.to_string()
|
||||
};
|
||||
|
||||
ctx.scripts.insert(
|
||||
id.to_string(),
|
||||
compiler
|
||||
.compile(&script)
|
||||
.compile(script.as_bytes())
|
||||
.map_err(|err| format!("Failed to compile Sieve script {id:?}: {err}"))?
|
||||
.into(),
|
||||
);
|
||||
|
|
|
@ -488,25 +488,25 @@ impl ConfigSession for Config {
|
|||
hostname,
|
||||
port,
|
||||
timeout_connect: self
|
||||
.property_or_static(("session.data.milter", id, "timeout.connect"), "30s")?,
|
||||
.property_or_default(("session.data.milter", id, "timeout.connect"), "30s")?,
|
||||
timeout_command: self
|
||||
.property_or_static(("session.data.milter", id, "timeout.command"), "30s")?,
|
||||
.property_or_default(("session.data.milter", id, "timeout.command"), "30s")?,
|
||||
timeout_data: self
|
||||
.property_or_static(("session.data.milter", id, "timeout.data"), "60s")?,
|
||||
tls: self.property_or_static(("session.data.milter", id, "tls"), "false")?,
|
||||
tls_allow_invalid_certs: self.property_or_static(
|
||||
.property_or_default(("session.data.milter", id, "timeout.data"), "60s")?,
|
||||
tls: self.property_or_default(("session.data.milter", id, "tls"), "false")?,
|
||||
tls_allow_invalid_certs: self.property_or_default(
|
||||
("session.data.milter", id, "allow-invalid-certs"),
|
||||
"false",
|
||||
)?,
|
||||
tempfail_on_error: self.property_or_static(
|
||||
tempfail_on_error: self.property_or_default(
|
||||
("session.data.milter", id, "options.tempfail-on-error"),
|
||||
"true",
|
||||
)?,
|
||||
max_frame_len: self.property_or_static(
|
||||
max_frame_len: self.property_or_default(
|
||||
("session.data.milter", id, "options.max-response-size"),
|
||||
"52428800",
|
||||
)?,
|
||||
protocol_version: match self.property_or_static::<u32>(
|
||||
protocol_version: match self.property_or_default::<u32>(
|
||||
("session.data.milter", id, "options.version"),
|
||||
"6",
|
||||
)? {
|
||||
|
|
|
@ -62,24 +62,23 @@ impl ConfigShared for Config {
|
|||
.clone(),
|
||||
default_data_store: ctx.stores.get_store(self, "storage.data")?,
|
||||
default_lookup_store: self
|
||||
.value_or_default("storage.lookup", "storage.data")
|
||||
.value_or_else("storage.lookup", "storage.data")
|
||||
.and_then(|id| ctx.stores.lookup_stores.get(id))
|
||||
.ok_or_else(|| {
|
||||
format!(
|
||||
"Lookup store {:?} not found for key \"storage.lookup\".",
|
||||
self.value_or_default("storage.lookup", "storage.data")
|
||||
self.value_or_else("storage.lookup", "storage.data")
|
||||
.unwrap()
|
||||
)
|
||||
})?
|
||||
.clone(),
|
||||
default_blob_store: self
|
||||
.value_or_default("storage.blob", "storage.data")
|
||||
.value_or_else("storage.blob", "storage.data")
|
||||
.and_then(|id| ctx.stores.blob_stores.get(id))
|
||||
.ok_or_else(|| {
|
||||
format!(
|
||||
"Lookup store {:?} not found for key \"storage.blob\".",
|
||||
self.value_or_default("storage.blob", "storage.data")
|
||||
.unwrap()
|
||||
self.value_or_else("storage.blob", "storage.data").unwrap()
|
||||
)
|
||||
})?
|
||||
.clone(),
|
||||
|
|
|
@ -23,7 +23,7 @@
|
|||
|
||||
use std::{net::IpAddr, str::FromStr, sync::Arc};
|
||||
|
||||
use directory::{AuthResult, Type};
|
||||
use directory::Type;
|
||||
use http_body_util::{combinators::BoxBody, BodyExt, Empty, Full};
|
||||
use hyper::{
|
||||
body::{self, Bytes},
|
||||
|
@ -161,11 +161,7 @@ impl SessionManager for SmtpAdminSessionManager {
|
|||
}
|
||||
|
||||
fn is_ip_blocked(&self, addr: &IpAddr) -> bool {
|
||||
self.inner
|
||||
.shared
|
||||
.default_directory
|
||||
.blocked_ips
|
||||
.is_blocked(addr)
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -267,7 +263,8 @@ impl SMTP {
|
|||
})
|
||||
})
|
||||
{
|
||||
match self
|
||||
let todo = "fix";
|
||||
/*match self
|
||||
.shared
|
||||
.default_directory
|
||||
.authenticate(&Credentials::Plain { username, secret }, remote_addr, false)
|
||||
|
@ -297,7 +294,7 @@ impl SMTP {
|
|||
"Temporary authentication failure."
|
||||
);
|
||||
}
|
||||
}
|
||||
}*/
|
||||
} else {
|
||||
tracing::debug!(
|
||||
context = "management",
|
||||
|
|
|
@ -21,7 +21,6 @@
|
|||
* for more details.
|
||||
*/
|
||||
|
||||
use directory::AuthResult;
|
||||
use mail_parser::decoders::base64::base64_decode;
|
||||
use mail_send::Credentials;
|
||||
use smtp_proto::{IntoString, AUTH_LOGIN, AUTH_OAUTHBEARER, AUTH_PLAIN, AUTH_XOAUTH2};
|
||||
|
@ -181,8 +180,8 @@ impl<T: AsyncWrite + AsyncRead + Unpin> Session<T> {
|
|||
| Credentials::XOauth2 { username, .. }
|
||||
| Credentials::OAuthBearer { token: username } => username.to_string(),
|
||||
};
|
||||
|
||||
match lookup
|
||||
let todo = "fix";
|
||||
/*match lookup
|
||||
.authenticate(&credentials, self.data.remote_ip, false)
|
||||
.await
|
||||
{
|
||||
|
@ -228,7 +227,7 @@ impl<T: AsyncWrite + AsyncRead + Unpin> Session<T> {
|
|||
return Err(());
|
||||
}
|
||||
_ => (),
|
||||
}
|
||||
}*/
|
||||
} else {
|
||||
tracing::warn!(
|
||||
parent: &self.span,
|
||||
|
|
|
@ -78,11 +78,7 @@ impl SessionManager for SmtpSessionManager {
|
|||
}
|
||||
|
||||
fn is_ip_blocked(&self, addr: &IpAddr) -> bool {
|
||||
self.inner
|
||||
.shared
|
||||
.default_directory
|
||||
.blocked_ips
|
||||
.is_blocked(addr)
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -66,7 +66,7 @@ impl SMTP {
|
|||
#[cfg(feature = "local_delivery")] delivery_tx: mpsc::Sender<utils::ipc::DeliveryEvent>,
|
||||
) -> Result<Arc<Self>, String> {
|
||||
// Read configuration parameters
|
||||
let mut config_ctx = ConfigContext::new(&servers.inner);
|
||||
let mut config_ctx = ConfigContext::new();
|
||||
config_ctx.directory = directory.clone();
|
||||
config_ctx.stores = stores.clone();
|
||||
|
||||
|
|
|
@ -32,7 +32,6 @@ use smtp_proto::{
|
|||
MAIL_BY_TRACE, MAIL_RET_FULL, MAIL_RET_HDRS, RCPT_NOTIFY_DELAY, RCPT_NOTIFY_FAILURE,
|
||||
RCPT_NOTIFY_NEVER, RCPT_NOTIFY_SUCCESS,
|
||||
};
|
||||
use store::{backend::memory::MemoryStore, LookupStore};
|
||||
use tokio::runtime::Handle;
|
||||
|
||||
use crate::{core::SMTP, queue::DomainPart};
|
||||
|
@ -165,22 +164,12 @@ impl SMTP {
|
|||
}
|
||||
}
|
||||
Recipient::List(list) => {
|
||||
if let Some(list) = self.shared.lookup_stores.get(&list) {
|
||||
if let LookupStore::Memory(list) = list {
|
||||
if let MemoryStore::List(list) = list.as_ref() {
|
||||
for rcpt in &list.set {
|
||||
handle.block_on(message.add_recipient(rcpt, self));
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
tracing::warn!(
|
||||
parent: &span,
|
||||
context = "sieve",
|
||||
event = "send-failed",
|
||||
reason = format!("Lookup {list:?} not found.")
|
||||
);
|
||||
}
|
||||
tracing::warn!(
|
||||
parent: &span,
|
||||
context = "sieve",
|
||||
event = "send-failed",
|
||||
reason = format!("Lookup {list:?} not supported.")
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
[package]
|
||||
name = "store"
|
||||
version = "0.1.0"
|
||||
version = "0.6.0"
|
||||
edition = "2021"
|
||||
resolver = "2"
|
||||
|
||||
|
@ -8,7 +8,7 @@ resolver = "2"
|
|||
utils = { path = "../utils" }
|
||||
nlp = { path = "../nlp" }
|
||||
rocksdb = { version = "0.22", optional = true, features = ["multi-threaded-cf"] }
|
||||
foundationdb = { version = "0.8.0", features = ["embedded-fdb-include"], optional = true }
|
||||
foundationdb = { version = "0.9.0", features = ["embedded-fdb-include", "fdb-7_3"], optional = true }
|
||||
rusqlite = { version = "0.31.0", features = ["bundled"], optional = true }
|
||||
rust-s3 = { version = "0.33.0", default-features = false, features = ["tokio-rustls-tls", "no-verify-ssl"], optional = true }
|
||||
tokio = { version = "1.23", features = ["sync", "fs", "io-util"] }
|
||||
|
@ -35,14 +35,13 @@ rustls = { version = "0.22.0", optional = true }
|
|||
rustls-pki-types = { version = "1", optional = true }
|
||||
ring = { version = "0.17", optional = true }
|
||||
bytes = { version = "1.0", optional = true }
|
||||
mysql_async = { version = "0.33", default-features = false, features = ["default-rustls"], optional = true }
|
||||
mysql_async = { version = "0.34", default-features = false, features = ["default-rustls"], optional = true }
|
||||
elasticsearch = { version = "8.5.0-alpha.1", default-features = false, features = ["rustls-tls"], optional = true }
|
||||
serde_json = {version = "1.0.64", optional = true }
|
||||
regex = "1.7.0"
|
||||
reqwest = { version = "0.11", default-features = false, features = ["rustls-tls-webpki-roots", "blocking"] }
|
||||
flate2 = "1.0"
|
||||
async-trait = "0.1.68"
|
||||
redis = { version = "0.24.0", features = [ "tokio-comp", "tokio-rustls-comp", "tls-rustls-insecure", "tls-rustls-webpki-roots", "cluster-async"], optional = true }
|
||||
redis = { version = "0.25.2", features = [ "tokio-comp", "tokio-rustls-comp", "tls-rustls-insecure", "tls-rustls-webpki-roots", "cluster-async"], optional = true }
|
||||
deadpool = { version = "0.10.0", features = ["managed"], optional = true }
|
||||
bincode = "1.3.3"
|
||||
arc-swap = "1.6.0"
|
||||
|
|
|
@ -44,61 +44,78 @@ pub struct ElasticSearchStore {
|
|||
pub(crate) static INDEX_NAMES: &[&str] = &["stalwart_email"];
|
||||
|
||||
impl ElasticSearchStore {
|
||||
pub async fn open(config: &Config, prefix: impl AsKey) -> crate::Result<Self> {
|
||||
pub async fn open(config: &mut Config, prefix: impl AsKey) -> Option<Self> {
|
||||
let prefix = prefix.as_key();
|
||||
let credentials = if let Some(user) = config.value((&prefix, "user")) {
|
||||
let password = config.value_require((&prefix, "password"))?;
|
||||
Some(Credentials::Basic(user.to_string(), password.to_string()))
|
||||
let user = user.to_string();
|
||||
let password = config
|
||||
.value_require_((&prefix, "password"))
|
||||
.unwrap_or_default();
|
||||
Some(Credentials::Basic(user, password.to_string()))
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let es = if let Some(url) = config.value((&prefix, "url")) {
|
||||
let url = Url::parse(url).map_err(|e| {
|
||||
crate::Error::InternalError(format!(
|
||||
"Invalid URL {}: {}",
|
||||
(&prefix, "url").as_key(),
|
||||
e
|
||||
))
|
||||
})?;
|
||||
let url = Url::parse(url)
|
||||
.map_err(|e| config.new_parse_error((&prefix, "url"), format!("Invalid URL: {e}",)))
|
||||
.ok()?;
|
||||
let conn_pool = SingleNodeConnectionPool::new(url);
|
||||
let mut builder = TransportBuilder::new(conn_pool);
|
||||
if let Some(credentials) = credentials {
|
||||
builder = builder.auth(credentials);
|
||||
}
|
||||
if config.property_or_static::<bool>((&prefix, "tls.allow-invalid-certs"), "false")? {
|
||||
if config
|
||||
.property_or_default_::<bool>((&prefix, "tls.allow-invalid-certs"), "false")
|
||||
.unwrap_or(false)
|
||||
{
|
||||
builder = builder.cert_validation(CertificateValidation::None);
|
||||
}
|
||||
|
||||
Self {
|
||||
index: Elasticsearch::new(builder.build()?),
|
||||
}
|
||||
} else if let Some(cloud_id) = config.value((&prefix, "cloud-id")) {
|
||||
Self {
|
||||
index: Elasticsearch::new(Transport::cloud(
|
||||
cloud_id,
|
||||
credentials.ok_or_else(|| {
|
||||
crate::Error::InternalError(format!(
|
||||
"Missing user and/or password for ElasticSearch store {}",
|
||||
prefix
|
||||
))
|
||||
})?,
|
||||
)?),
|
||||
index: Elasticsearch::new(
|
||||
builder
|
||||
.build()
|
||||
.map_err(|err| config.new_build_error(prefix.as_str(), err.to_string()))
|
||||
.ok()?,
|
||||
),
|
||||
}
|
||||
} else {
|
||||
return Err(crate::Error::InternalError(format!(
|
||||
"Missing url or cloud_id for ElasticSearch store {}",
|
||||
prefix
|
||||
)));
|
||||
let credentials = credentials.unwrap_or_else(|| {
|
||||
config.new_build_error((&prefix, "user"), "Missing property");
|
||||
Credentials::Basic("".to_string(), "".to_string())
|
||||
});
|
||||
|
||||
if let Some(cloud_id) = config.value((&prefix, "cloud-id")) {
|
||||
Self {
|
||||
index: Elasticsearch::new(
|
||||
Transport::cloud(cloud_id, credentials)
|
||||
.map_err(|err| config.new_build_error(prefix.as_str(), err.to_string()))
|
||||
.ok()?,
|
||||
),
|
||||
}
|
||||
} else {
|
||||
config.new_parse_error(
|
||||
prefix.as_str(),
|
||||
"Missing url or cloud_id for ElasticSearch store",
|
||||
);
|
||||
return None;
|
||||
}
|
||||
};
|
||||
|
||||
es.create_index(
|
||||
config.property_or_static((&prefix, "index.shards"), "3")?,
|
||||
config.property_or_static((&prefix, "index.replicas"), "0")?,
|
||||
config
|
||||
.property_or_default_((&prefix, "index.shards"), "3")
|
||||
.unwrap_or(3),
|
||||
config
|
||||
.property_or_default_((&prefix, "index.replicas"), "0")
|
||||
.unwrap_or(0),
|
||||
)
|
||||
.await?;
|
||||
.await
|
||||
.map_err(|err| config.new_build_error(prefix.as_str(), err.to_string()))
|
||||
.ok()?;
|
||||
|
||||
Ok(es)
|
||||
Some(es)
|
||||
}
|
||||
|
||||
async fn create_index(&self, shards: usize, replicas: usize) -> crate::Result<()> {
|
||||
|
|
|
@ -29,31 +29,70 @@ use utils::config::{utils::AsKey, Config};
|
|||
use super::FdbStore;
|
||||
|
||||
impl FdbStore {
|
||||
pub async fn open(config: &Config, prefix: impl AsKey) -> crate::Result<Self> {
|
||||
pub async fn open(config: &mut Config, prefix: impl AsKey) -> Option<Self> {
|
||||
let prefix = prefix.as_key();
|
||||
let guard = unsafe { foundationdb::boot() };
|
||||
|
||||
let db = Database::new(config.value((&prefix, "cluster-file")))?;
|
||||
if let Some(value) = config.property::<Duration>((&prefix, "transaction.timeout"))? {
|
||||
db.set_option(DatabaseOption::TransactionTimeout(value.as_millis() as i32))?;
|
||||
let db = Database::new(config.value((&prefix, "cluster-file")))
|
||||
.map_err(|err| {
|
||||
config.new_build_error(prefix.as_str(), format!("Failed to open database: {err:?}"))
|
||||
})
|
||||
.ok()?;
|
||||
|
||||
if let Some(value) = config.property_::<Duration>((&prefix, "transaction.timeout")) {
|
||||
db.set_option(DatabaseOption::TransactionTimeout(value.as_millis() as i32))
|
||||
.map_err(|err| {
|
||||
config.new_build_error(
|
||||
(&prefix, "transaction.timeout"),
|
||||
format!("Failed to set option: {err:?}"),
|
||||
)
|
||||
})
|
||||
.ok()?;
|
||||
}
|
||||
if let Some(value) = config.property((&prefix, "transaction.retry-limit"))? {
|
||||
db.set_option(DatabaseOption::TransactionRetryLimit(value))?;
|
||||
if let Some(value) = config.property_((&prefix, "transaction.retry-limit")) {
|
||||
db.set_option(DatabaseOption::TransactionRetryLimit(value))
|
||||
.map_err(|err| {
|
||||
config.new_build_error(
|
||||
(&prefix, "transaction.retry-limit"),
|
||||
format!("Failed to set option: {err:?}"),
|
||||
)
|
||||
})
|
||||
.ok()?;
|
||||
}
|
||||
if let Some(value) =
|
||||
config.property::<Duration>((&prefix, "transaction.max-retry-delay"))?
|
||||
if let Some(value) = config.property_::<Duration>((&prefix, "transaction.max-retry-delay"))
|
||||
{
|
||||
db.set_option(DatabaseOption::TransactionMaxRetryDelay(
|
||||
value.as_millis() as i32
|
||||
))?;
|
||||
))
|
||||
.map_err(|err| {
|
||||
config.new_build_error(
|
||||
(&prefix, "transaction.max-retry-delay"),
|
||||
format!("Failed to set option: {err:?}"),
|
||||
)
|
||||
})
|
||||
.ok()?;
|
||||
}
|
||||
if let Some(value) = config.property((&prefix, "ids.machine"))? {
|
||||
db.set_option(DatabaseOption::MachineId(value))?;
|
||||
if let Some(value) = config.property_((&prefix, "ids.machine")) {
|
||||
db.set_option(DatabaseOption::MachineId(value))
|
||||
.map_err(|err| {
|
||||
config.new_build_error(
|
||||
(&prefix, "ids.machine"),
|
||||
format!("Failed to set option: {err:?}"),
|
||||
)
|
||||
})
|
||||
.ok()?;
|
||||
}
|
||||
if let Some(value) = config.property((&prefix, "ids.datacenter"))? {
|
||||
db.set_option(DatabaseOption::DatacenterId(value))?;
|
||||
if let Some(value) = config.property_((&prefix, "ids.datacenter")) {
|
||||
db.set_option(DatabaseOption::DatacenterId(value))
|
||||
.map_err(|err| {
|
||||
config.new_build_error(
|
||||
(&prefix, "ids.datacenter"),
|
||||
format!("Failed to set option: {err:?}"),
|
||||
)
|
||||
})
|
||||
.ok()?;
|
||||
}
|
||||
|
||||
Ok(Self { guard, db })
|
||||
Some(Self { guard, db })
|
||||
}
|
||||
}
|
||||
|
|
|
@ -38,21 +38,29 @@ pub struct FsStore {
|
|||
}
|
||||
|
||||
impl FsStore {
|
||||
pub async fn open(config: &Config, prefix: impl AsKey) -> crate::Result<Self> {
|
||||
pub async fn open(config: &mut Config, prefix: impl AsKey) -> Option<Self> {
|
||||
let prefix = prefix.as_key();
|
||||
let path = PathBuf::from(config.value_require((&prefix, "path"))?);
|
||||
let path = PathBuf::from(config.value_require_((&prefix, "path"))?);
|
||||
if !path.exists() {
|
||||
fs::create_dir_all(&path).await.map_err(|e| {
|
||||
crate::Error::InternalError(format!(
|
||||
"Failed to create blob store path {:?}: {}",
|
||||
path, e
|
||||
))
|
||||
})?;
|
||||
fs::create_dir_all(&path)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
config.new_build_error(
|
||||
(&prefix, "path"),
|
||||
format!("Failed to create directory: {e}"),
|
||||
)
|
||||
})
|
||||
.ok()?;
|
||||
}
|
||||
|
||||
Ok(FsStore {
|
||||
Some(FsStore {
|
||||
path,
|
||||
hash_levels: std::cmp::min(config.property_or_static((&prefix, "depth"), "2")?, 5),
|
||||
hash_levels: std::cmp::min(
|
||||
config
|
||||
.property_or_default_((&prefix, "depth"), "2")
|
||||
.unwrap_or(2),
|
||||
5,
|
||||
),
|
||||
})
|
||||
}
|
||||
|
||||
|
|
|
@ -1,127 +0,0 @@
|
|||
/*
|
||||
* Copyright (c) 2020-2023, Stalwart Labs Ltd.
|
||||
*
|
||||
* This file is part of the Stalwart Sieve Interpreter.
|
||||
*
|
||||
* This program is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Affero General Public License as
|
||||
* published by the Free Software Foundation, either version 3 of
|
||||
* the License, or (at your option) any later version.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Affero General Public License for more details.
|
||||
* in the LICENSE file at the top-level directory of this distribution.
|
||||
* You should have received a copy of the GNU Affero General Public License
|
||||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*
|
||||
* You can be released from the requirements of the AGPLv3 license by
|
||||
* purchasing a commercial license. Please contact licensing@stalw.art
|
||||
* for more details.
|
||||
*/
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub struct GlobPattern {
|
||||
pattern: Vec<PatternChar>,
|
||||
to_lower: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub enum PatternChar {
|
||||
WildcardMany { num: usize, match_pos: usize },
|
||||
WildcardSingle { match_pos: usize },
|
||||
Char { char: char, match_pos: usize },
|
||||
}
|
||||
|
||||
impl GlobPattern {
|
||||
pub fn compile(pattern: &str, to_lower: bool) -> Self {
|
||||
let mut chars = Vec::new();
|
||||
let mut is_escaped = false;
|
||||
let mut str = pattern.chars().peekable();
|
||||
|
||||
while let Some(char) = str.next() {
|
||||
match char {
|
||||
'*' if !is_escaped => {
|
||||
let mut num = 1;
|
||||
while let Some('*') = str.peek() {
|
||||
num += 1;
|
||||
str.next();
|
||||
}
|
||||
chars.push(PatternChar::WildcardMany { num, match_pos: 0 });
|
||||
}
|
||||
'?' if !is_escaped => {
|
||||
chars.push(PatternChar::WildcardSingle { match_pos: 0 });
|
||||
}
|
||||
'\\' if !is_escaped => {
|
||||
is_escaped = true;
|
||||
continue;
|
||||
}
|
||||
_ => {
|
||||
if is_escaped {
|
||||
is_escaped = false;
|
||||
}
|
||||
if to_lower && char.is_uppercase() {
|
||||
for char in char.to_lowercase() {
|
||||
chars.push(PatternChar::Char { char, match_pos: 0 });
|
||||
}
|
||||
} else {
|
||||
chars.push(PatternChar::Char { char, match_pos: 0 });
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
GlobPattern {
|
||||
pattern: chars,
|
||||
to_lower,
|
||||
}
|
||||
}
|
||||
|
||||
// Credits: Algorithm ported from https://research.swtch.com/glob
|
||||
pub fn matches(&self, value: &str) -> bool {
|
||||
let value = if self.to_lower {
|
||||
value.to_lowercase().chars().collect::<Vec<_>>()
|
||||
} else {
|
||||
value.chars().collect::<Vec<_>>()
|
||||
};
|
||||
|
||||
let mut px = 0;
|
||||
let mut nx = 0;
|
||||
let mut next_px = 0;
|
||||
let mut next_nx = 0;
|
||||
|
||||
while px < self.pattern.len() || nx < value.len() {
|
||||
match self.pattern.get(px) {
|
||||
Some(PatternChar::Char { char, .. }) => {
|
||||
if matches!(value.get(nx), Some(nc) if nc == char ) {
|
||||
px += 1;
|
||||
nx += 1;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
Some(PatternChar::WildcardSingle { .. }) => {
|
||||
if nx < value.len() {
|
||||
px += 1;
|
||||
nx += 1;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
Some(PatternChar::WildcardMany { .. }) => {
|
||||
next_px = px;
|
||||
next_nx = nx + 1;
|
||||
px += 1;
|
||||
continue;
|
||||
}
|
||||
_ => (),
|
||||
}
|
||||
if 0 < next_nx && next_nx <= value.len() {
|
||||
px = next_px;
|
||||
nx = next_nx;
|
||||
continue;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
true
|
||||
}
|
||||
}
|
|
@ -1,66 +0,0 @@
|
|||
/*
|
||||
* Copyright (c) 2023 Stalwart Labs Ltd.
|
||||
*
|
||||
* This file is part of the Stalwart Mail Server.
|
||||
*
|
||||
* This program is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Affero General Public License as
|
||||
* published by the Free Software Foundation, either version 3 of
|
||||
* the License, or (at your option) any later version.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Affero General Public License for more details.
|
||||
* in the LICENSE file at the top-level directory of this distribution.
|
||||
* You should have received a copy of the GNU Affero General Public License
|
||||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*
|
||||
* You can be released from the requirements of the AGPLv3 license by
|
||||
* purchasing a commercial license. Please contact licensing@stalw.art
|
||||
* for more details.
|
||||
*/
|
||||
|
||||
use crate::{IntoRows, Row};
|
||||
|
||||
use super::{LookupList, MatchType};
|
||||
|
||||
impl IntoRows for Option<Row> {
|
||||
fn into_row(self) -> Option<Row> {
|
||||
self
|
||||
}
|
||||
|
||||
fn into_rows(self) -> crate::Rows {
|
||||
unreachable!()
|
||||
}
|
||||
|
||||
fn into_named_rows(self) -> crate::NamedRows {
|
||||
unreachable!()
|
||||
}
|
||||
}
|
||||
|
||||
impl LookupList {
|
||||
pub fn contains(&self, value: &str) -> bool {
|
||||
if self.set.contains(value) {
|
||||
true
|
||||
} else {
|
||||
for match_type in &self.matches {
|
||||
let result = match match_type {
|
||||
MatchType::StartsWith(s) => value.starts_with(s),
|
||||
MatchType::EndsWith(s) => value.ends_with(s),
|
||||
MatchType::Glob(g) => g.matches(value),
|
||||
MatchType::Regex(r) => r.is_match(value),
|
||||
};
|
||||
if result {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
pub fn extend(&mut self, other: Self) {
|
||||
self.set.extend(other.set);
|
||||
self.matches.extend(other.matches);
|
||||
}
|
||||
}
|
|
@ -1,298 +0,0 @@
|
|||
/*
|
||||
* Copyright (c) 2023 Stalwart Labs Ltd.
|
||||
*
|
||||
* This file is part of the Stalwart Mail Server.
|
||||
*
|
||||
* This program is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Affero General Public License as
|
||||
* published by the Free Software Foundation, either version 3 of
|
||||
* the License, or (at your option) any later version.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Affero General Public License for more details.
|
||||
* in the LICENSE file at the top-level directory of this distribution.
|
||||
* You should have received a copy of the GNU Affero General Public License
|
||||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*
|
||||
* You can be released from the requirements of the AGPLv3 license by
|
||||
* purchasing a commercial license. Please contact licensing@stalw.art
|
||||
* for more details.
|
||||
*/
|
||||
|
||||
use std::{
|
||||
fs::File,
|
||||
io::{BufRead, BufReader},
|
||||
};
|
||||
|
||||
use utils::config::{
|
||||
utils::{AsKey, ParseValue},
|
||||
Config,
|
||||
};
|
||||
|
||||
use crate::Value;
|
||||
|
||||
use super::{glob::GlobPattern, LookupList, LookupMap, MatchType, MemoryStore};
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub enum LookupType {
|
||||
List,
|
||||
Glob,
|
||||
Regex,
|
||||
Map,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct LookupFormat {
|
||||
pub lookup_type: LookupType,
|
||||
pub comment: Option<String>,
|
||||
pub separator: Option<String>,
|
||||
}
|
||||
|
||||
impl MemoryStore {
|
||||
pub async fn open(config: &Config, prefix: impl AsKey) -> crate::Result<Self> {
|
||||
let prefix = prefix.as_key();
|
||||
|
||||
let lookup_type = config.property_require::<LookupType>((&prefix, "format"))?;
|
||||
let format = LookupFormat {
|
||||
lookup_type,
|
||||
comment: config.value((&prefix, "comment")).map(|s| s.to_string()),
|
||||
separator: config.value((&prefix, "separator")).map(|s| s.to_string()),
|
||||
};
|
||||
|
||||
Ok(match lookup_type {
|
||||
LookupType::Map => {
|
||||
MemoryStore::Map(parse_lookup_list(config, (&prefix, "values"), format)?)
|
||||
}
|
||||
_ => MemoryStore::List(parse_lookup_list(config, (&prefix, "values"), format)?),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_lookup_list<K: AsKey, T: InsertLine>(
|
||||
config: &Config,
|
||||
key: K,
|
||||
format: LookupFormat,
|
||||
) -> utils::config::Result<T> {
|
||||
let mut list = T::default();
|
||||
let mut last_failed = false;
|
||||
for (_, mut value) in config.values(key.clone()) {
|
||||
if let Some(new_value) = value.strip_prefix("fallback+") {
|
||||
if last_failed {
|
||||
value = new_value;
|
||||
} else {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
last_failed = false;
|
||||
|
||||
if value.starts_with("https://") || value.starts_with("http://") {
|
||||
match tokio::task::block_in_place(|| {
|
||||
reqwest::blocking::get(value).and_then(|r| {
|
||||
if r.status().is_success() {
|
||||
r.bytes().map(Ok)
|
||||
} else {
|
||||
Ok(Err(r))
|
||||
}
|
||||
})
|
||||
}) {
|
||||
Ok(Ok(bytes)) => {
|
||||
match list.insert_lines(&*bytes, &format, value.ends_with(".gz")) {
|
||||
Ok(_) => continue,
|
||||
Err(err) => {
|
||||
tracing::warn!(
|
||||
"Failed to read list {key:?} from {value:?}: {err}",
|
||||
key = key.as_key(),
|
||||
value = value,
|
||||
err = err
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(Err(response)) => {
|
||||
tracing::warn!(
|
||||
"Failed to fetch list {key:?} from {value:?}: Status {status}",
|
||||
key = key.as_key(),
|
||||
value = value,
|
||||
status = response.status()
|
||||
);
|
||||
}
|
||||
Err(err) => {
|
||||
tracing::warn!(
|
||||
"Failed to fetch list {key:?} from {value:?}: {err}",
|
||||
key = key.as_key(),
|
||||
value = value,
|
||||
err = err
|
||||
);
|
||||
}
|
||||
}
|
||||
last_failed = true;
|
||||
} else if let Some(path) = value.strip_prefix("file://") {
|
||||
list.insert_lines(
|
||||
File::open(path).map_err(|err| {
|
||||
format!(
|
||||
"Failed to read file {path:?} for list {}: {err}",
|
||||
key.as_key()
|
||||
)
|
||||
})?,
|
||||
&format,
|
||||
value.ends_with(".gz"),
|
||||
)
|
||||
.map_err(|err| {
|
||||
format!(
|
||||
"Failed to read file {path:?} for list {}: {err}",
|
||||
key.as_key()
|
||||
)
|
||||
})?;
|
||||
} else {
|
||||
list.insert(value.to_string(), &format);
|
||||
}
|
||||
}
|
||||
Ok(list)
|
||||
}
|
||||
|
||||
pub trait InsertLine: Default {
|
||||
fn insert(&mut self, entry: String, format: &LookupFormat);
|
||||
fn insert_lines<R: Sized + std::io::Read>(
|
||||
&mut self,
|
||||
reader: R,
|
||||
format: &LookupFormat,
|
||||
decompress: bool,
|
||||
) -> Result<(), std::io::Error> {
|
||||
let reader: Box<dyn std::io::Read> = if decompress {
|
||||
Box::new(flate2::read::GzDecoder::new(reader))
|
||||
} else {
|
||||
Box::new(reader)
|
||||
};
|
||||
|
||||
for line in BufReader::new(reader).lines() {
|
||||
let line_ = line?;
|
||||
let line = line_.trim();
|
||||
if !line.is_empty()
|
||||
&& format
|
||||
.comment
|
||||
.as_ref()
|
||||
.map_or(true, |c| !line.starts_with(c))
|
||||
{
|
||||
self.insert(line.to_string(), format);
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl InsertLine for LookupList {
|
||||
fn insert(&mut self, entry: String, format: &LookupFormat) {
|
||||
match format.lookup_type {
|
||||
LookupType::List => {
|
||||
self.set.insert(entry);
|
||||
}
|
||||
LookupType::Glob => {
|
||||
let n_wildcards = entry
|
||||
.as_bytes()
|
||||
.iter()
|
||||
.filter(|&&ch| ch == b'*' || ch == b'?')
|
||||
.count();
|
||||
if n_wildcards > 0 {
|
||||
if n_wildcards == 1 {
|
||||
if let Some(s) = entry.strip_prefix('*') {
|
||||
if !s.is_empty() {
|
||||
self.matches.push(MatchType::EndsWith(s.to_string()));
|
||||
}
|
||||
return;
|
||||
} else if let Some(s) = entry.strip_suffix('*') {
|
||||
if !s.is_empty() {
|
||||
self.matches.push(MatchType::StartsWith(s.to_string()));
|
||||
}
|
||||
return;
|
||||
}
|
||||
}
|
||||
self.matches
|
||||
.push(MatchType::Glob(GlobPattern::compile(&entry, false)));
|
||||
} else {
|
||||
self.set.insert(entry);
|
||||
}
|
||||
}
|
||||
LookupType::Regex => match regex::Regex::new(&entry) {
|
||||
Ok(regex) => {
|
||||
self.matches.push(MatchType::Regex(regex));
|
||||
}
|
||||
Err(err) => {
|
||||
tracing::warn!("Invalid regular expression {:?}: {}", entry, err);
|
||||
}
|
||||
},
|
||||
LookupType::Map => unreachable!(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl InsertLine for LookupMap {
|
||||
fn insert(&mut self, entry: String, format: &LookupFormat) {
|
||||
let (key, value) = entry
|
||||
.split_once(format.separator.as_deref().unwrap_or(" "))
|
||||
.unwrap_or((entry.as_str(), ""));
|
||||
let key = key.trim();
|
||||
if key.is_empty() {
|
||||
return;
|
||||
} else if value.is_empty() {
|
||||
self.insert(key.to_string(), Value::Null);
|
||||
return;
|
||||
}
|
||||
let mut has_digit = false;
|
||||
let mut has_dots = false;
|
||||
let mut has_other = false;
|
||||
|
||||
for (pos, ch) in value.bytes().enumerate() {
|
||||
if ch.is_ascii_digit() {
|
||||
has_digit = true;
|
||||
} else if ch == b'.' {
|
||||
has_dots = true;
|
||||
} else if pos > 0 || ch != b'-' {
|
||||
has_other = true;
|
||||
}
|
||||
}
|
||||
|
||||
let value = if has_other || !has_digit {
|
||||
Value::Text(value.to_string().into())
|
||||
} else if has_dots {
|
||||
value
|
||||
.parse()
|
||||
.map(Value::Float)
|
||||
.unwrap_or_else(|_| Value::Text(value.to_string().into()))
|
||||
} else {
|
||||
value
|
||||
.parse()
|
||||
.map(Value::Integer)
|
||||
.unwrap_or_else(|_| Value::Text(value.to_string().into()))
|
||||
};
|
||||
|
||||
self.insert(key.to_string(), value);
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for LookupFormat {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
lookup_type: LookupType::Glob,
|
||||
comment: Default::default(),
|
||||
separator: Default::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ParseValue for LookupType {
|
||||
fn parse_value(key: impl AsKey, value: &str) -> utils::config::Result<Self> {
|
||||
match value {
|
||||
"list" => Ok(LookupType::List),
|
||||
"glob" => Ok(LookupType::Glob),
|
||||
"regex" => Ok(LookupType::Regex),
|
||||
"map" => Ok(LookupType::Map),
|
||||
_ => Err(format!(
|
||||
"Invalid value for lookup type {key:?}: {value:?}",
|
||||
key = key.as_key(),
|
||||
value = value
|
||||
)),
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,52 +0,0 @@
|
|||
/*
|
||||
* Copyright (c) 2023 Stalwart Labs Ltd.
|
||||
*
|
||||
* This file is part of the Stalwart Mail Server.
|
||||
*
|
||||
* This program is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Affero General Public License as
|
||||
* published by the Free Software Foundation, either version 3 of
|
||||
* the License, or (at your option) any later version.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU Affero General Public License for more details.
|
||||
* in the LICENSE file at the top-level directory of this distribution.
|
||||
* You should have received a copy of the GNU Affero General Public License
|
||||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*
|
||||
* You can be released from the requirements of the AGPLv3 license by
|
||||
* purchasing a commercial license. Please contact licensing@stalw.art
|
||||
* for more details.
|
||||
*/
|
||||
|
||||
pub mod glob;
|
||||
pub mod lookup;
|
||||
pub mod main;
|
||||
|
||||
use ahash::{AHashMap, AHashSet};
|
||||
|
||||
use crate::Value;
|
||||
|
||||
use self::glob::GlobPattern;
|
||||
|
||||
pub enum MemoryStore {
|
||||
List(LookupList),
|
||||
Map(LookupMap),
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct LookupList {
|
||||
pub set: AHashSet<String>,
|
||||
pub matches: Vec<MatchType>,
|
||||
}
|
||||
|
||||
pub type LookupMap = AHashMap<String, Value<'static>>;
|
||||
|
||||
pub enum MatchType {
|
||||
StartsWith(String),
|
||||
EndsWith(String),
|
||||
Glob(GlobPattern),
|
||||
Regex(regex::Regex),
|
||||
}
|
|
@ -26,7 +26,6 @@ pub mod elastic;
|
|||
#[cfg(feature = "foundation")]
|
||||
pub mod foundationdb;
|
||||
pub mod fs;
|
||||
pub mod memory;
|
||||
#[cfg(feature = "mysql")]
|
||||
pub mod mysql;
|
||||
#[cfg(feature = "postgres")]
|
||||
|
|
|
@ -24,7 +24,7 @@
|
|||
use std::time::Duration;
|
||||
|
||||
use mysql_async::{prelude::Queryable, OptsBuilder, Pool, PoolConstraints, PoolOpts, SslOpts};
|
||||
use utils::config::utils::AsKey;
|
||||
use utils::config::{utils::AsKey, Config};
|
||||
|
||||
use crate::{
|
||||
SUBSPACE_BITMAPS, SUBSPACE_BLOBS, SUBSPACE_COUNTERS, SUBSPACE_INDEXES, SUBSPACE_LOGS,
|
||||
|
@ -34,29 +34,32 @@ use crate::{
|
|||
use super::MysqlStore;
|
||||
|
||||
impl MysqlStore {
|
||||
pub async fn open(config: &utils::config::Config, prefix: impl AsKey) -> crate::Result<Self> {
|
||||
pub async fn open(config: &mut Config, prefix: impl AsKey) -> Option<Self> {
|
||||
let prefix = prefix.as_key();
|
||||
let mut opts = OptsBuilder::default()
|
||||
.ip_or_hostname(config.value_require((&prefix, "host"))?.to_string())
|
||||
.ip_or_hostname(config.value_require_((&prefix, "host"))?.to_string())
|
||||
.user(config.value((&prefix, "user")).map(|s| s.to_string()))
|
||||
.pass(config.value((&prefix, "password")).map(|s| s.to_string()))
|
||||
.db_name(
|
||||
config
|
||||
.value_require((&prefix, "database"))?
|
||||
.value_require_((&prefix, "database"))?
|
||||
.to_string()
|
||||
.into(),
|
||||
)
|
||||
.max_allowed_packet(config.property((&prefix, "max-allowed-packet"))?)
|
||||
.max_allowed_packet(config.property_((&prefix, "max-allowed-packet")))
|
||||
.wait_timeout(
|
||||
config
|
||||
.property::<Duration>((&prefix, "timeout"))?
|
||||
.property_::<Duration>((&prefix, "timeout"))
|
||||
.map(|t| t.as_secs() as usize),
|
||||
);
|
||||
if let Some(port) = config.property((&prefix, "port"))? {
|
||||
if let Some(port) = config.property_((&prefix, "port")) {
|
||||
opts = opts.tcp_port(port);
|
||||
}
|
||||
|
||||
if config.property_or_static::<bool>((&prefix, "tls.allow-invalid-certs"), "false")? {
|
||||
if config
|
||||
.property_or_default_::<bool>((&prefix, "tls.allow-invalid-certs"), "false")
|
||||
.unwrap_or_default()
|
||||
{
|
||||
opts = opts.ssl_opts(Some(
|
||||
SslOpts::default().with_danger_accept_invalid_certs(true),
|
||||
));
|
||||
|
@ -65,10 +68,10 @@ impl MysqlStore {
|
|||
// Configure connection pool
|
||||
let mut pool_min = PoolConstraints::default().min();
|
||||
let mut pool_max = PoolConstraints::default().max();
|
||||
if let Some(n_size) = config.property::<usize>((&prefix, "pool.min-connections"))? {
|
||||
if let Some(n_size) = config.property_::<usize>((&prefix, "pool.min-connections")) {
|
||||
pool_min = n_size;
|
||||
}
|
||||
if let Some(n_size) = config.property::<usize>((&prefix, "pool.max-connections"))? {
|
||||
if let Some(n_size) = config.property_::<usize>((&prefix, "pool.max-connections")) {
|
||||
pool_max = n_size;
|
||||
}
|
||||
opts = opts.pool_opts(
|
||||
|
@ -79,9 +82,11 @@ impl MysqlStore {
|
|||
conn_pool: Pool::new(opts),
|
||||
};
|
||||
|
||||
db.create_tables().await?;
|
||||
if let Err(err) = db.create_tables().await {
|
||||
config.new_build_error(prefix.as_str(), format!("Failed to create tables: {err}"));
|
||||
}
|
||||
|
||||
Ok(db)
|
||||
Some(db)
|
||||
}
|
||||
|
||||
pub(super) async fn create_tables(&self) -> crate::Result<()> {
|
||||
|
|
|
@ -35,40 +35,54 @@ use tokio_postgres::NoTls;
|
|||
use utils::{config::utils::AsKey, rustls_client_config};
|
||||
|
||||
impl PostgresStore {
|
||||
pub async fn open(config: &utils::config::Config, prefix: impl AsKey) -> crate::Result<Self> {
|
||||
pub async fn open(config: &mut utils::config::Config, prefix: impl AsKey) -> Option<Self> {
|
||||
let prefix = prefix.as_key();
|
||||
let mut cfg = Config::new();
|
||||
cfg.dbname = config
|
||||
.value_require((&prefix, "database"))?
|
||||
.value_require_((&prefix, "database"))?
|
||||
.to_string()
|
||||
.into();
|
||||
cfg.host = config.value((&prefix, "host")).map(|s| s.to_string());
|
||||
cfg.user = config.value((&prefix, "user")).map(|s| s.to_string());
|
||||
cfg.password = config.value((&prefix, "password")).map(|s| s.to_string());
|
||||
cfg.port = config.property((&prefix, "port"))?;
|
||||
cfg.connect_timeout = config.property((&prefix, "timeout"))?;
|
||||
cfg.port = config.property_((&prefix, "port"));
|
||||
cfg.connect_timeout = config.property_((&prefix, "timeout"));
|
||||
cfg.manager = Some(ManagerConfig {
|
||||
recycling_method: RecyclingMethod::Fast,
|
||||
});
|
||||
if let Some(max_conn) = config.property::<usize>((&prefix, "pool.max-connections"))? {
|
||||
if let Some(max_conn) = config.property_::<usize>((&prefix, "pool.max-connections")) {
|
||||
cfg.pool = PoolConfig::new(max_conn).into();
|
||||
}
|
||||
let db = Self {
|
||||
conn_pool: if config.property_or_static::<bool>((&prefix, "tls.enable"), "false")? {
|
||||
conn_pool: if config
|
||||
.property_or_default_::<bool>((&prefix, "tls.enable"), "false")
|
||||
.unwrap_or_default()
|
||||
{
|
||||
cfg.create_pool(
|
||||
Some(Runtime::Tokio1),
|
||||
MakeRustlsConnect::new(rustls_client_config(
|
||||
config.property_or_static((&prefix, "tls.allow-invalid-certs"), "false")?,
|
||||
config
|
||||
.property_or_default_((&prefix, "tls.allow-invalid-certs"), "false")
|
||||
.unwrap_or_default(),
|
||||
)),
|
||||
)?
|
||||
)
|
||||
} else {
|
||||
cfg.create_pool(Some(Runtime::Tokio1), NoTls)?
|
||||
},
|
||||
cfg.create_pool(Some(Runtime::Tokio1), NoTls)
|
||||
}
|
||||
.map_err(|e| {
|
||||
config.new_build_error(
|
||||
prefix.as_str(),
|
||||
format!("Failed to create connection pool: {e}"),
|
||||
)
|
||||
})
|
||||
.ok()?,
|
||||
};
|
||||
|
||||
db.create_tables().await?;
|
||||
if let Err(err) = db.create_tables().await {
|
||||
config.new_build_error(prefix.as_str(), format!("Failed to create tables: {err}"));
|
||||
}
|
||||
|
||||
Ok(db)
|
||||
Some(db)
|
||||
}
|
||||
|
||||
pub(super) async fn create_tables(&self) -> crate::Result<()> {
|
||||
|
|
|
@ -56,84 +56,124 @@ enum RedisPool {
|
|||
}
|
||||
|
||||
impl RedisStore {
|
||||
pub async fn open(config: &Config, prefix: impl AsKey) -> crate::Result<Self> {
|
||||
pub async fn open(config: &mut Config, prefix: impl AsKey) -> Option<Self> {
|
||||
let prefix = prefix.as_key();
|
||||
let urls = config
|
||||
.values((&prefix, "urls"))
|
||||
.map(|(_, v)| v.to_string())
|
||||
.collect::<Vec<_>>();
|
||||
if urls.is_empty() {
|
||||
return Err(crate::Error::InternalError(format!(
|
||||
"No Redis URLs specified for {prefix:?}"
|
||||
)));
|
||||
config.new_build_error((&prefix, "urls"), "No Redis URLs specified");
|
||||
return None;
|
||||
}
|
||||
|
||||
Ok(match config.value_require((&prefix, "redis-type"))? {
|
||||
"single" => Self {
|
||||
pool: RedisPool::Single(build_pool(
|
||||
config,
|
||||
&prefix,
|
||||
RedisConnectionManager {
|
||||
client: Client::open(urls.into_iter().next().unwrap())?,
|
||||
timeout: config.property_or_static((&prefix, "timeout"), "10s")?,
|
||||
},
|
||||
)?),
|
||||
},
|
||||
Some(match config.value_require_((&prefix, "redis-type"))? {
|
||||
"single" => {
|
||||
let client = Client::open(urls.into_iter().next().unwrap())
|
||||
.map_err(|err| {
|
||||
config.new_build_error(
|
||||
prefix.as_str(),
|
||||
format!("Failed to open Redis client: {err:?}"),
|
||||
)
|
||||
})
|
||||
.ok()?;
|
||||
let timeout = config
|
||||
.property_or_default_((&prefix, "timeout"), "10s")
|
||||
.unwrap_or_else(|| Duration::from_secs(10));
|
||||
|
||||
Self {
|
||||
pool: RedisPool::Single(
|
||||
build_pool(config, &prefix, RedisConnectionManager { client, timeout })
|
||||
.map_err(|err| {
|
||||
config.new_build_error(
|
||||
prefix.as_str(),
|
||||
format!("Failed to build Redis pool: {err:?}"),
|
||||
)
|
||||
})
|
||||
.ok()?,
|
||||
),
|
||||
}
|
||||
}
|
||||
"cluster" => {
|
||||
let mut builder = ClusterClientBuilder::new(urls.into_iter());
|
||||
if let Some(value) = config.property((&prefix, "user"))? {
|
||||
if let Some(value) = config.property_((&prefix, "user")) {
|
||||
builder = builder.username(value);
|
||||
}
|
||||
if let Some(value) = config.property((&prefix, "password"))? {
|
||||
if let Some(value) = config.property_((&prefix, "password")) {
|
||||
builder = builder.password(value);
|
||||
}
|
||||
if let Some(value) = config.property((&prefix, "retry.total"))? {
|
||||
if let Some(value) = config.property_((&prefix, "retry.total")) {
|
||||
builder = builder.retries(value);
|
||||
}
|
||||
if let Some(value) = config.property::<Duration>((&prefix, "retry.max-wait"))? {
|
||||
if let Some(value) = config.property_::<Duration>((&prefix, "retry.max-wait")) {
|
||||
builder = builder.max_retry_wait(value.as_millis() as u64);
|
||||
}
|
||||
if let Some(value) = config.property::<Duration>((&prefix, "retry.min-wait"))? {
|
||||
if let Some(value) = config.property_::<Duration>((&prefix, "retry.min-wait")) {
|
||||
builder = builder.min_retry_wait(value.as_millis() as u64);
|
||||
}
|
||||
if let Some(true) = config.property::<bool>((&prefix, "read-from-replicas"))? {
|
||||
if let Some(true) = config.property_::<bool>((&prefix, "read-from-replicas")) {
|
||||
builder = builder.read_from_replicas();
|
||||
}
|
||||
|
||||
let client = builder
|
||||
.build()
|
||||
.map_err(|err| {
|
||||
config.new_build_error(
|
||||
prefix.as_str(),
|
||||
format!("Failed to open Redis client: {err:?}"),
|
||||
)
|
||||
})
|
||||
.ok()?;
|
||||
let timeout = config
|
||||
.property_or_default_((&prefix, "timeout"), "10s")
|
||||
.unwrap_or_else(|| Duration::from_secs(10));
|
||||
|
||||
Self {
|
||||
pool: RedisPool::Cluster(build_pool(
|
||||
config,
|
||||
&prefix,
|
||||
RedisClusterConnectionManager {
|
||||
client: builder.build()?,
|
||||
timeout: config.property_or_static((&prefix, "timeout"), "10s")?,
|
||||
},
|
||||
)?),
|
||||
pool: RedisPool::Cluster(
|
||||
build_pool(
|
||||
config,
|
||||
&prefix,
|
||||
RedisClusterConnectionManager { client, timeout },
|
||||
)
|
||||
.map_err(|err| {
|
||||
config.new_build_error(
|
||||
prefix.as_str(),
|
||||
format!("Failed to build Redis pool: {err:?}"),
|
||||
)
|
||||
})
|
||||
.ok()?,
|
||||
),
|
||||
}
|
||||
}
|
||||
invalid => {
|
||||
return Err(crate::Error::InternalError(format!(
|
||||
"Invalid Redis type {invalid:?} for {prefix:?}"
|
||||
)));
|
||||
let err = format!("Invalid Redis type {invalid:?}");
|
||||
config.new_parse_error((&prefix, "redis-type"), err);
|
||||
return None;
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
fn build_pool<M: Manager>(
|
||||
config: &Config,
|
||||
config: &mut Config,
|
||||
prefix: &str,
|
||||
manager: M,
|
||||
) -> utils::config::Result<Pool<M>> {
|
||||
Pool::builder(manager)
|
||||
.runtime(Runtime::Tokio1)
|
||||
.max_size(config.property_or_static((prefix, "pool.max-connections"), "10")?)
|
||||
.max_size(
|
||||
config
|
||||
.property_or_default_((prefix, "pool.max-connections"), "10")
|
||||
.unwrap_or(10),
|
||||
)
|
||||
.create_timeout(
|
||||
config
|
||||
.property_or_static::<Duration>((prefix, "pool.create-timeout"), "30s")?
|
||||
.property_or_default_::<Duration>((prefix, "pool.create-timeout"), "30s")
|
||||
.unwrap_or_else(|| Duration::from_secs(30))
|
||||
.into(),
|
||||
)
|
||||
.wait_timeout(config.property_or_static((prefix, "pool.wait-timeout"), "30s")?)
|
||||
.recycle_timeout(config.property_or_static((prefix, "pool.recycle-timeout"), "30s")?)
|
||||
.wait_timeout(config.property_or_default_((prefix, "pool.wait-timeout"), "30s"))
|
||||
.recycle_timeout(config.property_or_default_((prefix, "pool.recycle-timeout"), "30s"))
|
||||
.build()
|
||||
.map_err(|err| {
|
||||
format!(
|
||||
|
|
|
@ -24,7 +24,7 @@
|
|||
use async_trait::async_trait;
|
||||
use deadpool::managed;
|
||||
use redis::{
|
||||
aio::{Connection, ConnectionLike},
|
||||
aio::{ConnectionLike, MultiplexedConnection},
|
||||
cluster_async::ClusterConnection,
|
||||
};
|
||||
|
||||
|
@ -32,11 +32,13 @@ use super::{RedisClusterConnectionManager, RedisConnectionManager};
|
|||
|
||||
#[async_trait]
|
||||
impl managed::Manager for RedisConnectionManager {
|
||||
type Type = Connection;
|
||||
type Type = MultiplexedConnection;
|
||||
type Error = crate::Error;
|
||||
|
||||
async fn create(&self) -> Result<Connection, crate::Error> {
|
||||
match tokio::time::timeout(self.timeout, self.client.get_tokio_connection()).await {
|
||||
async fn create(&self) -> Result<MultiplexedConnection, crate::Error> {
|
||||
match tokio::time::timeout(self.timeout, self.client.get_multiplexed_tokio_connection())
|
||||
.await
|
||||
{
|
||||
Ok(conn) => conn.map_err(Into::into),
|
||||
Err(_) => Err(crate::Error::InternalError(
|
||||
"Redis connection timeout".into(),
|
||||
|
@ -46,7 +48,7 @@ impl managed::Manager for RedisConnectionManager {
|
|||
|
||||
async fn recycle(
|
||||
&self,
|
||||
conn: &mut Connection,
|
||||
conn: &mut MultiplexedConnection,
|
||||
_: &managed::Metrics,
|
||||
) -> managed::RecycleResult<crate::Error> {
|
||||
conn.req_packed_command(&redis::cmd("PING"))
|
||||
|
|
|
@ -30,31 +30,29 @@ use rocksdb::{
|
|||
};
|
||||
|
||||
use tokio::sync::oneshot;
|
||||
use utils::{
|
||||
config::{utils::AsKey, Config},
|
||||
UnwrapFailure,
|
||||
};
|
||||
use utils::config::{utils::AsKey, Config};
|
||||
|
||||
use crate::{Deserialize, Error};
|
||||
use crate::Deserialize;
|
||||
|
||||
use super::{RocksDbStore, CF_BITMAPS, CF_BLOBS, CF_COUNTERS, CF_INDEXES, CF_LOGS, CF_VALUES};
|
||||
|
||||
impl RocksDbStore {
|
||||
pub async fn open(config: &Config, prefix: impl AsKey) -> crate::Result<Self> {
|
||||
pub async fn open(config: &mut Config, prefix: impl AsKey) -> Option<Self> {
|
||||
let prefix = prefix.as_key();
|
||||
// Create the database directory if it doesn't exist
|
||||
let idx_path: PathBuf = PathBuf::from(
|
||||
config
|
||||
.value_require((&prefix, "path"))
|
||||
.failed("Invalid configuration file"),
|
||||
);
|
||||
std::fs::create_dir_all(&idx_path).map_err(|err| {
|
||||
Error::InternalError(format!(
|
||||
"Failed to create index directory {}: {:?}",
|
||||
idx_path.display(),
|
||||
err
|
||||
))
|
||||
})?;
|
||||
let idx_path: PathBuf = PathBuf::from(config.value_require_((&prefix, "path"))?);
|
||||
std::fs::create_dir_all(&idx_path)
|
||||
.map_err(|err| {
|
||||
config.new_build_error(
|
||||
(&prefix, "path"),
|
||||
format!(
|
||||
"Failed to create database directory {}: {:?}",
|
||||
idx_path.display(),
|
||||
err
|
||||
),
|
||||
)
|
||||
})
|
||||
.ok()?;
|
||||
|
||||
let mut cfs = Vec::new();
|
||||
|
||||
|
@ -73,7 +71,11 @@ impl RocksDbStore {
|
|||
// Blobs
|
||||
let mut cf_opts = Options::default();
|
||||
cf_opts.set_enable_blob_files(true);
|
||||
cf_opts.set_min_blob_size(config.property_or_static((&prefix, "min-blob-size"), "16834")?);
|
||||
cf_opts.set_min_blob_size(
|
||||
config
|
||||
.property_or_default_((&prefix, "min-blob-size"), "16834")
|
||||
.unwrap_or(16834),
|
||||
);
|
||||
cfs.push(ColumnFamilyDescriptor::new(CF_BLOBS, cf_opts));
|
||||
|
||||
// Other cfs
|
||||
|
@ -92,24 +94,36 @@ impl RocksDbStore {
|
|||
//db_opts.set_keep_log_file_num(100);
|
||||
//db_opts.set_max_successive_merges(100);
|
||||
db_opts.set_write_buffer_size(
|
||||
config.property_or_static((&prefix, "write-buffer-size"), "134217728")?,
|
||||
config
|
||||
.property_or_default_((&prefix, "write-buffer-size"), "134217728")
|
||||
.unwrap_or(134217728),
|
||||
);
|
||||
|
||||
Ok(RocksDbStore {
|
||||
Some(RocksDbStore {
|
||||
db: OptimisticTransactionDB::open_cf_descriptors(&db_opts, idx_path, cfs)
|
||||
.map_err(|e| Error::InternalError(e.into_string()))?
|
||||
.map_err(|err| {
|
||||
config.new_build_error(
|
||||
prefix.as_str(),
|
||||
format!("Failed to open database: {:?}", err),
|
||||
)
|
||||
})
|
||||
.ok()?
|
||||
.into(),
|
||||
worker_pool: rayon::ThreadPoolBuilder::new()
|
||||
.num_threads(
|
||||
config
|
||||
.property::<usize>((&prefix, "pool.workers"))?
|
||||
.property_::<usize>((&prefix, "pool.workers"))
|
||||
.filter(|v| *v > 0)
|
||||
.unwrap_or_else(|| num_cpus::get() * 4),
|
||||
)
|
||||
.build()
|
||||
.map_err(|err| {
|
||||
crate::Error::InternalError(format!("Failed to build worker pool: {}", err))
|
||||
})?,
|
||||
config.new_build_error(
|
||||
(&prefix, "pool.workers"),
|
||||
format!("Failed to build worker pool: {:?}", err),
|
||||
)
|
||||
})
|
||||
.ok()?,
|
||||
})
|
||||
}
|
||||
|
||||
|
|
|
@ -39,10 +39,10 @@ pub struct S3Store {
|
|||
}
|
||||
|
||||
impl S3Store {
|
||||
pub async fn open(config: &Config, prefix: impl AsKey) -> crate::Result<Self> {
|
||||
pub async fn open(config: &mut Config, prefix: impl AsKey) -> Option<Self> {
|
||||
// Obtain region and endpoint from config
|
||||
let prefix = prefix.as_key();
|
||||
let region = config.value_require((&prefix, "region"))?;
|
||||
let region = config.value_require_((&prefix, "region"))?.to_string();
|
||||
let region = if let Some(endpoint) = config.value((&prefix, "endpoint")) {
|
||||
Region::Custom {
|
||||
region: region.to_string(),
|
||||
|
@ -57,15 +57,28 @@ impl S3Store {
|
|||
config.value((&prefix, "security-token")),
|
||||
config.value((&prefix, "session-token")),
|
||||
config.value((&prefix, "profile")),
|
||||
)?;
|
||||
let timeout = config.property_or_static::<Duration>((&prefix, "timeout"), "30s")?;
|
||||
)
|
||||
.map_err(|err| {
|
||||
config.new_build_error(
|
||||
prefix.as_str(),
|
||||
format!("Failed to create credentials: {err:?}"),
|
||||
)
|
||||
})
|
||||
.ok()?;
|
||||
let timeout = config
|
||||
.property_or_default_::<Duration>((&prefix, "timeout"), "30s")
|
||||
.unwrap_or_else(|| Duration::from_secs(30));
|
||||
|
||||
Ok(S3Store {
|
||||
Some(S3Store {
|
||||
bucket: Bucket::new(
|
||||
config.value_require((&prefix, "bucket"))?,
|
||||
config.value_require_((&prefix, "bucket"))?,
|
||||
region,
|
||||
credentials,
|
||||
)?
|
||||
)
|
||||
.map_err(|err| {
|
||||
config.new_build_error(prefix.as_str(), format!("Failed to create bucket: {err:?}"))
|
||||
})
|
||||
.ok()?
|
||||
.with_path_style()
|
||||
.with_request_timeout(timeout),
|
||||
prefix: config.value((&prefix, "key-prefix")).map(|s| s.to_string()),
|
||||
|
|
|
@ -23,10 +23,7 @@
|
|||
|
||||
use r2d2::Pool;
|
||||
use tokio::sync::oneshot;
|
||||
use utils::{
|
||||
config::{utils::AsKey, Config},
|
||||
UnwrapFailure,
|
||||
};
|
||||
use utils::config::{utils::AsKey, Config};
|
||||
|
||||
use crate::{
|
||||
SUBSPACE_BITMAPS, SUBSPACE_BLOBS, SUBSPACE_COUNTERS, SUBSPACE_INDEXES, SUBSPACE_LOGS,
|
||||
|
@ -36,44 +33,55 @@ use crate::{
|
|||
use super::{pool::SqliteConnectionManager, SqliteStore};
|
||||
|
||||
impl SqliteStore {
|
||||
pub fn open(config: &Config, prefix: impl AsKey) -> crate::Result<Self> {
|
||||
pub fn open(config: &mut Config, prefix: impl AsKey) -> Option<Self> {
|
||||
let prefix = prefix.as_key();
|
||||
let db = Self {
|
||||
conn_pool: Pool::builder()
|
||||
.max_size(
|
||||
config
|
||||
.property((&prefix, "pool.max-connections"))?
|
||||
.property_((&prefix, "pool.max-connections"))
|
||||
.unwrap_or_else(|| (num_cpus::get() * 4) as u32),
|
||||
)
|
||||
.build(
|
||||
SqliteConnectionManager::file(
|
||||
config
|
||||
.value_require((&prefix, "path"))
|
||||
.failed("Invalid configuration file"),
|
||||
SqliteConnectionManager::file(config.value_require_((&prefix, "path"))?)
|
||||
.with_init(|c| {
|
||||
c.execute_batch(concat!(
|
||||
"PRAGMA journal_mode = WAL; ",
|
||||
"PRAGMA synchronous = NORMAL; ",
|
||||
"PRAGMA temp_store = memory;",
|
||||
"PRAGMA busy_timeout = 30000;"
|
||||
))
|
||||
}),
|
||||
)
|
||||
.map_err(|err| {
|
||||
config.new_build_error(
|
||||
prefix.as_str(),
|
||||
format!("Failed to build connection pool: {err}"),
|
||||
)
|
||||
.with_init(|c| {
|
||||
c.execute_batch(concat!(
|
||||
"PRAGMA journal_mode = WAL; ",
|
||||
"PRAGMA synchronous = NORMAL; ",
|
||||
"PRAGMA temp_store = memory;",
|
||||
"PRAGMA busy_timeout = 30000;"
|
||||
))
|
||||
}),
|
||||
)?,
|
||||
})
|
||||
.ok()?,
|
||||
worker_pool: rayon::ThreadPoolBuilder::new()
|
||||
.num_threads(
|
||||
config
|
||||
.property::<usize>((&prefix, "pool.workers"))?
|
||||
.property_::<usize>((&prefix, "pool.workers"))
|
||||
.filter(|v| *v > 0)
|
||||
.unwrap_or_else(num_cpus::get),
|
||||
)
|
||||
.build()
|
||||
.map_err(|err| {
|
||||
crate::Error::InternalError(format!("Failed to build worker pool: {}", err))
|
||||
})?,
|
||||
config.new_build_error(
|
||||
prefix.as_str(),
|
||||
format!("Failed to build worker pool: {err}"),
|
||||
)
|
||||
})
|
||||
.ok()?,
|
||||
};
|
||||
db.create_tables()?;
|
||||
Ok(db)
|
||||
|
||||
if let Err(err) = db.create_tables() {
|
||||
config.new_build_error(prefix.as_str(), format!("Failed to create tables: {err}"));
|
||||
}
|
||||
|
||||
Some(db)
|
||||
}
|
||||
|
||||
#[cfg(feature = "test_mode")]
|
||||
|
|
|
@ -26,9 +26,9 @@ use std::sync::Arc;
|
|||
use utils::config::{cron::SimpleCron, Config};
|
||||
|
||||
use crate::{
|
||||
backend::{fs::FsStore, memory::MemoryStore},
|
||||
backend::fs::FsStore,
|
||||
write::purge::{PurgeSchedule, PurgeStore},
|
||||
BlobStore, CompressionAlgo, LookupStore, QueryStore, Store, Stores,
|
||||
BlobStore, CompressionAlgo, FtsStore, LookupStore, QueryStore, Store, Stores,
|
||||
};
|
||||
|
||||
#[cfg(feature = "s3")]
|
||||
|
@ -55,9 +55,243 @@ use crate::backend::elastic::ElasticSearchStore;
|
|||
#[cfg(feature = "redis")]
|
||||
use crate::backend::redis::RedisStore;
|
||||
|
||||
impl Stores {
|
||||
pub async fn parse(config: &mut Config) -> Self {
|
||||
let mut stores = Stores::default();
|
||||
let ids = config
|
||||
.sub_keys("store", ".type")
|
||||
.map(|id| id.to_string())
|
||||
.collect::<Vec<_>>();
|
||||
for id in ids {
|
||||
let id = id.as_str();
|
||||
// Parse store
|
||||
#[cfg(feature = "test_mode")]
|
||||
{
|
||||
if config
|
||||
.property_or_default_::<bool>(("store", id, "disable"), "false")
|
||||
.unwrap_or(false)
|
||||
{
|
||||
tracing::debug!("Skipping disabled store {id:?}.");
|
||||
continue;
|
||||
}
|
||||
}
|
||||
let protocol = if let Some(protocol) = config.value_require_(("store", id, "type")) {
|
||||
protocol.to_ascii_lowercase()
|
||||
} else {
|
||||
continue;
|
||||
};
|
||||
let prefix = ("store", id);
|
||||
let store_id = id.to_string();
|
||||
let compression_algo = config
|
||||
.property_or_default_::<CompressionAlgo>(("store", id, "compression"), "lz4")
|
||||
.unwrap_or(CompressionAlgo::Lz4);
|
||||
|
||||
let lookup_store: Store = match protocol.as_str() {
|
||||
#[cfg(feature = "rocks")]
|
||||
"rocksdb" => {
|
||||
if let Some(db) = RocksDbStore::open(config, prefix).await.map(Store::from) {
|
||||
stores.stores.insert(store_id.clone(), db.clone());
|
||||
stores
|
||||
.fts_stores
|
||||
.insert(store_id.clone(), db.clone().into());
|
||||
stores.blob_stores.insert(
|
||||
store_id.clone(),
|
||||
BlobStore::from(db.clone()).with_compression(compression_algo),
|
||||
);
|
||||
stores.lookup_stores.insert(store_id, db.into());
|
||||
}
|
||||
continue;
|
||||
}
|
||||
#[cfg(feature = "foundation")]
|
||||
"foundationdb" => {
|
||||
if let Some(db) = FdbStore::open(config, prefix).await.map(Store::from) {
|
||||
stores.stores.insert(store_id.clone(), db.clone());
|
||||
stores
|
||||
.fts_stores
|
||||
.insert(store_id.clone(), db.clone().into());
|
||||
stores.blob_stores.insert(
|
||||
store_id.clone(),
|
||||
BlobStore::from(db.clone()).with_compression(compression_algo),
|
||||
);
|
||||
stores.lookup_stores.insert(store_id, db.into());
|
||||
}
|
||||
continue;
|
||||
}
|
||||
#[cfg(feature = "postgres")]
|
||||
"postgresql" => {
|
||||
if let Some(db) = PostgresStore::open(config, prefix).await.map(Store::from) {
|
||||
stores.stores.insert(store_id.clone(), db.clone());
|
||||
stores
|
||||
.fts_stores
|
||||
.insert(store_id.clone(), db.clone().into());
|
||||
stores.blob_stores.insert(
|
||||
store_id.clone(),
|
||||
BlobStore::from(db.clone()).with_compression(compression_algo),
|
||||
);
|
||||
db
|
||||
} else {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
#[cfg(feature = "mysql")]
|
||||
"mysql" => {
|
||||
if let Some(db) = MysqlStore::open(config, prefix).await.map(Store::from) {
|
||||
stores.stores.insert(store_id.clone(), db.clone());
|
||||
stores
|
||||
.fts_stores
|
||||
.insert(store_id.clone(), db.clone().into());
|
||||
stores.blob_stores.insert(
|
||||
store_id.clone(),
|
||||
BlobStore::from(db.clone()).with_compression(compression_algo),
|
||||
);
|
||||
db
|
||||
} else {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
#[cfg(feature = "sqlite")]
|
||||
"sqlite" => {
|
||||
if let Some(db) = SqliteStore::open(config, prefix).map(Store::from) {
|
||||
stores.stores.insert(store_id.clone(), db.clone());
|
||||
stores
|
||||
.fts_stores
|
||||
.insert(store_id.clone(), db.clone().into());
|
||||
stores.blob_stores.insert(
|
||||
store_id.clone(),
|
||||
BlobStore::from(db.clone()).with_compression(compression_algo),
|
||||
);
|
||||
db
|
||||
} else {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
"fs" => {
|
||||
if let Some(db) = FsStore::open(config, prefix).await.map(BlobStore::from) {
|
||||
stores
|
||||
.blob_stores
|
||||
.insert(store_id, db.with_compression(compression_algo));
|
||||
}
|
||||
continue;
|
||||
}
|
||||
#[cfg(feature = "s3")]
|
||||
"s3" => {
|
||||
if let Some(db) = S3Store::open(config, prefix).await.map(BlobStore::from) {
|
||||
stores
|
||||
.blob_stores
|
||||
.insert(store_id, db.with_compression(compression_algo));
|
||||
}
|
||||
continue;
|
||||
}
|
||||
#[cfg(feature = "elastic")]
|
||||
"elasticsearch" => {
|
||||
if let Some(db) = ElasticSearchStore::open(config, prefix)
|
||||
.await
|
||||
.map(FtsStore::from)
|
||||
{
|
||||
stores.fts_stores.insert(store_id, db);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
#[cfg(feature = "redis")]
|
||||
"redis" => {
|
||||
if let Some(db) = RedisStore::open(config, prefix)
|
||||
.await
|
||||
.map(LookupStore::from)
|
||||
{
|
||||
stores.lookup_stores.insert(store_id, db);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
unknown => {
|
||||
tracing::debug!("Unknown directory type: {unknown:?}");
|
||||
continue;
|
||||
}
|
||||
};
|
||||
|
||||
// Add queries as lookup stores
|
||||
let lookup_store: LookupStore = lookup_store.into();
|
||||
for lookup_id in config.sub_keys(("store", id, "query"), "") {
|
||||
if let Some(query) = config.value(("store", id, "query", lookup_id)) {
|
||||
stores.lookup_stores.insert(
|
||||
format!("{store_id}/{lookup_id}"),
|
||||
LookupStore::Query(Arc::new(QueryStore {
|
||||
store: lookup_store.clone(),
|
||||
query: query.to_string(),
|
||||
})),
|
||||
);
|
||||
}
|
||||
}
|
||||
stores.lookup_stores.insert(store_id, lookup_store.clone());
|
||||
|
||||
// Run init queries on database
|
||||
for query in config
|
||||
.values(("store", id, "init.execute"))
|
||||
.map(|(_, s)| s.to_string())
|
||||
.collect::<Vec<_>>()
|
||||
{
|
||||
if let Err(err) = lookup_store.query::<usize>(&query, Vec::new()).await {
|
||||
config.new_build_error(
|
||||
("store", id),
|
||||
format!("Failed to initialize store: {err}"),
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Parse purge schedules
|
||||
if let Some(store) = config
|
||||
.value("storage.data")
|
||||
.and_then(|store_id| stores.stores.get(store_id))
|
||||
{
|
||||
let store_id = config.value("storage.data").unwrap().to_string();
|
||||
if let Some(cron) =
|
||||
config.property_::<SimpleCron>(("store", store_id.as_str(), "purge.frequency"))
|
||||
{
|
||||
stores.purge_schedules.push(PurgeSchedule {
|
||||
cron,
|
||||
store_id,
|
||||
store: PurgeStore::Data(store.clone()),
|
||||
});
|
||||
}
|
||||
|
||||
if let Some(blob_store) = config
|
||||
.value("storage.blob")
|
||||
.and_then(|blob_store_id| stores.blob_stores.get(blob_store_id))
|
||||
{
|
||||
let store_id = config.value("storage.blob").unwrap().to_string();
|
||||
if let Some(cron) =
|
||||
config.property_::<SimpleCron>(("store", store_id.as_str(), "purge.frequency"))
|
||||
{
|
||||
stores.purge_schedules.push(PurgeSchedule {
|
||||
cron,
|
||||
store_id,
|
||||
store: PurgeStore::Blobs {
|
||||
store: store.clone(),
|
||||
blob_store: blob_store.clone(),
|
||||
},
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
for (store_id, store) in &stores.lookup_stores {
|
||||
if let Some(cron) =
|
||||
config.property_::<SimpleCron>(("store", store_id.as_str(), "purge.frequency"))
|
||||
{
|
||||
stores.purge_schedules.push(PurgeSchedule {
|
||||
cron,
|
||||
store_id: store_id.clone(),
|
||||
store: PurgeStore::Lookup(store.clone()),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
stores
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(async_fn_in_trait)]
|
||||
pub trait ConfigStore {
|
||||
async fn parse_stores(&self) -> utils::config::Result<Stores>;
|
||||
async fn parse_stores(&mut self) -> utils::config::Result<Stores>;
|
||||
async fn parse_purge_schedules(
|
||||
&self,
|
||||
stores: &Stores,
|
||||
|
@ -69,12 +303,17 @@ pub trait ConfigStore {
|
|||
impl ConfigStore for Config {
|
||||
#[allow(unused_variables)]
|
||||
#[allow(unreachable_code)]
|
||||
async fn parse_stores(&self) -> utils::config::Result<Stores> {
|
||||
async fn parse_stores(&mut self) -> utils::config::Result<Stores> {
|
||||
let mut config = Stores::default();
|
||||
|
||||
for id in self.sub_keys("store", ".type") {
|
||||
let ids = self
|
||||
.sub_keys("store", ".type")
|
||||
.map(|id| id.to_string())
|
||||
.collect::<Vec<_>>();
|
||||
for id in ids {
|
||||
let id = id.as_str();
|
||||
// Parse store
|
||||
if self.property_or_static::<bool>(("store", id, "disable"), "false")? {
|
||||
if self.property_or_default::<bool>(("store", id, "disable"), "false")? {
|
||||
tracing::debug!("Skipping disabled store {id:?}.");
|
||||
continue;
|
||||
}
|
||||
|
@ -84,12 +323,12 @@ impl ConfigStore for Config {
|
|||
let prefix = ("store", id);
|
||||
let store_id = id.to_string();
|
||||
let compression_algo =
|
||||
self.property_or_static::<CompressionAlgo>(("store", id, "compression"), "lz4")?;
|
||||
self.property_or_default::<CompressionAlgo>(("store", id, "compression"), "lz4")?;
|
||||
|
||||
let lookup_store: Store = match protocol.as_str() {
|
||||
#[cfg(feature = "rocks")]
|
||||
"rocksdb" => {
|
||||
let db: Store = RocksDbStore::open(self, prefix).await?.into();
|
||||
let db: Store = RocksDbStore::open(self, prefix).await.unwrap().into();
|
||||
config.stores.insert(store_id.clone(), db.clone());
|
||||
config
|
||||
.fts_stores
|
||||
|
@ -103,7 +342,7 @@ impl ConfigStore for Config {
|
|||
}
|
||||
#[cfg(feature = "foundation")]
|
||||
"foundationdb" => {
|
||||
let db: Store = FdbStore::open(self, prefix).await?.into();
|
||||
let db: Store = FdbStore::open(self, prefix).await.unwrap().into();
|
||||
config.stores.insert(store_id.clone(), db.clone());
|
||||
config
|
||||
.fts_stores
|
||||
|
@ -117,7 +356,7 @@ impl ConfigStore for Config {
|
|||
}
|
||||
#[cfg(feature = "postgres")]
|
||||
"postgresql" => {
|
||||
let db: Store = PostgresStore::open(self, prefix).await?.into();
|
||||
let db: Store = PostgresStore::open(self, prefix).await.unwrap().into();
|
||||
config.stores.insert(store_id.clone(), db.clone());
|
||||
config
|
||||
.fts_stores
|
||||
|
@ -130,7 +369,7 @@ impl ConfigStore for Config {
|
|||
}
|
||||
#[cfg(feature = "mysql")]
|
||||
"mysql" => {
|
||||
let db: Store = MysqlStore::open(self, prefix).await?.into();
|
||||
let db: Store = MysqlStore::open(self, prefix).await.unwrap().into();
|
||||
config.stores.insert(store_id.clone(), db.clone());
|
||||
config
|
||||
.fts_stores
|
||||
|
@ -143,7 +382,7 @@ impl ConfigStore for Config {
|
|||
}
|
||||
#[cfg(feature = "sqlite")]
|
||||
"sqlite" => {
|
||||
let db: Store = SqliteStore::open(self, prefix)?.into();
|
||||
let db: Store = SqliteStore::open(self, prefix).unwrap().into();
|
||||
config.stores.insert(store_id.clone(), db.clone());
|
||||
config
|
||||
.fts_stores
|
||||
|
@ -157,7 +396,7 @@ impl ConfigStore for Config {
|
|||
"fs" => {
|
||||
config.blob_stores.insert(
|
||||
store_id,
|
||||
BlobStore::from(FsStore::open(self, prefix).await?)
|
||||
BlobStore::from(FsStore::open(self, prefix).await.unwrap())
|
||||
.with_compression(compression_algo),
|
||||
);
|
||||
continue;
|
||||
|
@ -166,7 +405,7 @@ impl ConfigStore for Config {
|
|||
"s3" => {
|
||||
config.blob_stores.insert(
|
||||
store_id,
|
||||
BlobStore::from(S3Store::open(self, prefix).await?)
|
||||
BlobStore::from(S3Store::open(self, prefix).await.unwrap())
|
||||
.with_compression(compression_algo),
|
||||
);
|
||||
continue;
|
||||
|
@ -175,24 +414,18 @@ impl ConfigStore for Config {
|
|||
"elasticsearch" => {
|
||||
config.fts_stores.insert(
|
||||
store_id,
|
||||
ElasticSearchStore::open(self, prefix).await?.into(),
|
||||
ElasticSearchStore::open(self, prefix).await.unwrap().into(),
|
||||
);
|
||||
continue;
|
||||
}
|
||||
#[cfg(feature = "redis")]
|
||||
"redis" => {
|
||||
config
|
||||
.lookup_stores
|
||||
.insert(store_id, RedisStore::open(self, prefix).await?.into());
|
||||
config.lookup_stores.insert(
|
||||
store_id,
|
||||
RedisStore::open(self, prefix).await.unwrap().into(),
|
||||
);
|
||||
continue;
|
||||
}
|
||||
"memory" => {
|
||||
config
|
||||
.lookup_stores
|
||||
.insert(store_id, MemoryStore::open(self, prefix).await?.into());
|
||||
continue;
|
||||
}
|
||||
|
||||
unknown => {
|
||||
tracing::debug!("Unknown directory type: {unknown:?}");
|
||||
continue;
|
||||
|
@ -230,6 +463,7 @@ impl ConfigStore for Config {
|
|||
blob_store_id: Option<&str>,
|
||||
) -> utils::config::Result<Vec<PurgeSchedule>> {
|
||||
let mut schedules = Vec::new();
|
||||
let remove = "true";
|
||||
|
||||
if let Some(store) = store_id.and_then(|store_id| stores.stores.get(store_id)) {
|
||||
let store_id = store_id.unwrap();
|
||||
|
|
|
@ -69,7 +69,7 @@ impl Store {
|
|||
Ok(results)
|
||||
}
|
||||
|
||||
pub async fn config_set(&self, keys: impl Iterator<Item = ConfigKey>) -> crate::Result<()> {
|
||||
pub async fn config_set(&self, keys: impl IntoIterator<Item = ConfigKey>) -> crate::Result<()> {
|
||||
let mut batch = BatchBuilder::new();
|
||||
for key in keys {
|
||||
batch.set(ValueClass::Config(key.key.into_bytes()), key.value);
|
||||
|
|
|
@ -23,7 +23,7 @@
|
|||
|
||||
use utils::{config::Rate, expr};
|
||||
|
||||
use crate::{backend::memory::MemoryStore, write::LookupClass, Row};
|
||||
use crate::{write::LookupClass, Row};
|
||||
#[allow(unused_imports)]
|
||||
use crate::{
|
||||
write::{
|
||||
|
@ -88,9 +88,6 @@ impl LookupStore {
|
|||
)
|
||||
.await
|
||||
.map(|_| ()),
|
||||
LookupStore::Memory(_) => Err(crate::Error::InternalError(
|
||||
"This store does not support key_set".into(),
|
||||
)),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -129,7 +126,7 @@ impl LookupStore {
|
|||
}
|
||||
#[cfg(feature = "redis")]
|
||||
LookupStore::Redis(store) => store.key_incr(key, value, expires).await,
|
||||
LookupStore::Query(_) | LookupStore::Memory(_) => Err(crate::Error::InternalError(
|
||||
LookupStore::Query(_) => Err(crate::Error::InternalError(
|
||||
"This store does not support counter_incr".into(),
|
||||
)),
|
||||
}
|
||||
|
@ -147,7 +144,7 @@ impl LookupStore {
|
|||
}
|
||||
#[cfg(feature = "redis")]
|
||||
LookupStore::Redis(store) => store.key_delete(key).await,
|
||||
LookupStore::Query(_) | LookupStore::Memory(_) => Err(crate::Error::InternalError(
|
||||
LookupStore::Query(_) => Err(crate::Error::InternalError(
|
||||
"This store does not support key_set".into(),
|
||||
)),
|
||||
}
|
||||
|
@ -165,7 +162,7 @@ impl LookupStore {
|
|||
}
|
||||
#[cfg(feature = "redis")]
|
||||
LookupStore::Redis(store) => store.key_delete(key).await,
|
||||
LookupStore::Query(_) | LookupStore::Memory(_) => Err(crate::Error::InternalError(
|
||||
LookupStore::Query(_) => Err(crate::Error::InternalError(
|
||||
"This store does not support key_set".into(),
|
||||
)),
|
||||
}
|
||||
|
@ -184,19 +181,6 @@ impl LookupStore {
|
|||
.map(|value| value.and_then(|v| v.into())),
|
||||
#[cfg(feature = "redis")]
|
||||
LookupStore::Redis(store) => store.key_get(key).await,
|
||||
LookupStore::Memory(store) => {
|
||||
let key = String::from_utf8(key).unwrap_or_default();
|
||||
match store.as_ref() {
|
||||
MemoryStore::List(list) => Ok(if list.contains(&key) {
|
||||
Some(T::from(Value::Bool(true)))
|
||||
} else {
|
||||
None
|
||||
}),
|
||||
MemoryStore::Map(map) => {
|
||||
Ok(map.get(&key).map(|value| T::from(value.to_owned())))
|
||||
}
|
||||
}
|
||||
}
|
||||
LookupStore::Query(lookup) => lookup
|
||||
.store
|
||||
.query::<Option<Row>>(
|
||||
|
@ -222,7 +206,7 @@ impl LookupStore {
|
|||
}
|
||||
#[cfg(feature = "redis")]
|
||||
LookupStore::Redis(store) => store.counter_get(key).await,
|
||||
LookupStore::Query(_) | LookupStore::Memory(_) => Err(crate::Error::InternalError(
|
||||
LookupStore::Query(_) => Err(crate::Error::InternalError(
|
||||
"This store does not support counter_get".into(),
|
||||
)),
|
||||
}
|
||||
|
@ -238,13 +222,6 @@ impl LookupStore {
|
|||
.map(|value| matches!(value, Some(LookupValue::Value(())))),
|
||||
#[cfg(feature = "redis")]
|
||||
LookupStore::Redis(store) => store.key_exists(key).await,
|
||||
LookupStore::Memory(store) => {
|
||||
let key = String::from_utf8(key).unwrap_or_default();
|
||||
match store.as_ref() {
|
||||
MemoryStore::List(list) => Ok(list.contains(&key)),
|
||||
MemoryStore::Map(map) => Ok(map.contains_key(&key)),
|
||||
}
|
||||
}
|
||||
LookupStore::Query(lookup) => lookup
|
||||
.store
|
||||
.query::<Option<Row>>(
|
||||
|
@ -361,7 +338,7 @@ impl LookupStore {
|
|||
}
|
||||
#[cfg(feature = "redis")]
|
||||
LookupStore::Redis(_) => {}
|
||||
LookupStore::Memory(_) | LookupStore::Query(_) => {}
|
||||
LookupStore::Query(_) => {}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
|
|
|
@ -32,12 +32,12 @@ pub mod write;
|
|||
|
||||
pub use ahash;
|
||||
use ahash::AHashMap;
|
||||
use backend::{fs::FsStore, memory::MemoryStore};
|
||||
use backend::fs::FsStore;
|
||||
pub use blake3;
|
||||
pub use parking_lot;
|
||||
pub use rand;
|
||||
pub use roaring;
|
||||
use write::{BitmapClass, ValueClass};
|
||||
use write::{purge::PurgeSchedule, BitmapClass, ValueClass};
|
||||
|
||||
#[cfg(feature = "s3")]
|
||||
use backend::s3::S3Store;
|
||||
|
@ -190,6 +190,7 @@ pub struct Stores {
|
|||
pub blob_stores: AHashMap<String, BlobStore>,
|
||||
pub fts_stores: AHashMap<String, FtsStore>,
|
||||
pub lookup_stores: AHashMap<String, LookupStore>,
|
||||
pub purge_schedules: Vec<PurgeSchedule>,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
|
@ -237,7 +238,6 @@ pub enum FtsStore {
|
|||
pub enum LookupStore {
|
||||
Store(Store),
|
||||
Query(Arc<QueryStore>),
|
||||
Memory(Arc<MemoryStore>),
|
||||
#[cfg(feature = "redis")]
|
||||
Redis(Arc<RedisStore>),
|
||||
}
|
||||
|
@ -336,12 +336,6 @@ impl From<Store> for LookupStore {
|
|||
}
|
||||
}
|
||||
|
||||
impl From<MemoryStore> for LookupStore {
|
||||
fn from(store: MemoryStore) -> Self {
|
||||
Self::Memory(Arc::new(store))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
pub enum Value<'x> {
|
||||
Integer(i64),
|
||||
|
|
|
@ -28,12 +28,14 @@ use utils::config::cron::SimpleCron;
|
|||
|
||||
use crate::{BlobStore, LookupStore, Store};
|
||||
|
||||
#[derive(Clone)]
|
||||
pub enum PurgeStore {
|
||||
Data(Store),
|
||||
Blobs { store: Store, blob_store: BlobStore },
|
||||
Lookup(LookupStore),
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct PurgeSchedule {
|
||||
pub cron: SimpleCron,
|
||||
pub store_id: String,
|
||||
|
|
|
@ -31,7 +31,7 @@ ring = { version = "0.17" }
|
|||
base64 = "0.21"
|
||||
serde_json = "1.0"
|
||||
rcgen = "0.12"
|
||||
reqwest = { version = "0.11", default-features = false, features = ["rustls-tls-webpki-roots"]}
|
||||
reqwest = { version = "0.12", default-features = false, features = ["rustls-tls-webpki-roots"]}
|
||||
x509-parser = "0.16.0"
|
||||
pem = "3.0"
|
||||
parking_lot = "0.12"
|
||||
|
|
|
@ -23,7 +23,7 @@
|
|||
|
||||
use std::net::{IpAddr, Ipv4Addr, Ipv6Addr};
|
||||
|
||||
use super::utils::{AsKey, ParseValue};
|
||||
use super::utils::{AsKey, ParseKey, ParseValue};
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub enum IpAddrMask {
|
||||
|
@ -31,6 +31,12 @@ pub enum IpAddrMask {
|
|||
V6 { addr: Ipv6Addr, mask: u128 },
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub enum IpAddrOrMask {
|
||||
Ip(IpAddr),
|
||||
Mask(IpAddrMask),
|
||||
}
|
||||
|
||||
impl IpAddrMask {
|
||||
pub fn matches(&self, remote: &IpAddr) -> bool {
|
||||
match self {
|
||||
|
@ -130,6 +136,16 @@ impl ParseValue for IpAddrMask {
|
|||
}
|
||||
}
|
||||
|
||||
impl ParseValue for IpAddrOrMask {
|
||||
fn parse_value(key: impl AsKey, ip: &str) -> super::Result<Self> {
|
||||
if ip.contains('/') {
|
||||
ip.parse_key(key).map(IpAddrOrMask::Mask)
|
||||
} else {
|
||||
ip.parse_key(key).map(IpAddrOrMask::Ip)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
|
|
@ -135,7 +135,7 @@ impl Config {
|
|||
"recv-buffer-size",
|
||||
"tos",
|
||||
] {
|
||||
if let Some(value) = self.value_or_default(
|
||||
if let Some(value) = self.value_or_else(
|
||||
("server.listener", id, "socket", option),
|
||||
("server.socket", option),
|
||||
) {
|
||||
|
@ -158,20 +158,18 @@ impl Config {
|
|||
listeners.push(Listener {
|
||||
socket,
|
||||
addr,
|
||||
ttl: self.property_or_default(
|
||||
("server.listener", id, "socket.ttl"),
|
||||
"server.socket.ttl",
|
||||
)?,
|
||||
backlog: self.property_or_default(
|
||||
ttl: self
|
||||
.property_or_else(("server.listener", id, "socket.ttl"), "server.socket.ttl")?,
|
||||
backlog: self.property_or_else(
|
||||
("server.listener", id, "socket.backlog"),
|
||||
"server.socket.backlog",
|
||||
)?,
|
||||
linger: self.property_or_default(
|
||||
linger: self.property_or_else(
|
||||
("server.listener", id, "socket.linger"),
|
||||
"server.socket.linger",
|
||||
)?,
|
||||
nodelay: self
|
||||
.property_or_default(
|
||||
.property_or_else(
|
||||
("server.listener", id, "socket.nodelay"),
|
||||
"server.socket.nodelay",
|
||||
)?
|
||||
|
@ -185,13 +183,13 @@ impl Config {
|
|||
|
||||
// Build TLS config
|
||||
let (acceptor, tls_implicit) = if self
|
||||
.property_or_default(("server.listener", id, "tls.enable"), "server.tls.enable")?
|
||||
.property_or_else(("server.listener", id, "tls.enable"), "server.tls.enable")?
|
||||
.unwrap_or(false)
|
||||
{
|
||||
// Parse protocol versions
|
||||
let mut tls_v2 = false;
|
||||
let mut tls_v3 = false;
|
||||
for (key, protocol) in self.values_or_default(
|
||||
for (key, protocol) in self.values_or_else(
|
||||
("server.listener", id, "tls.protocols"),
|
||||
"server.tls.protocols",
|
||||
) {
|
||||
|
@ -209,7 +207,7 @@ impl Config {
|
|||
// Parse cipher suites
|
||||
let mut ciphers: Vec<SupportedCipherSuite> = Vec::new();
|
||||
for (key, protocol) in
|
||||
self.values_or_default(("server.listener", id, "tls.ciphers"), "server.tls.ciphers")
|
||||
self.values_or_else(("server.listener", id, "tls.ciphers"), "server.tls.ciphers")
|
||||
{
|
||||
ciphers.push(protocol.parse_key(key)?);
|
||||
}
|
||||
|
@ -217,14 +215,15 @@ impl Config {
|
|||
// Build resolver
|
||||
let mut acme_acceptor = None;
|
||||
let resolver: Arc<dyn ResolvesServerCert> = if let Some(acme_id) =
|
||||
self.value_or_default(("server.listener", id, "tls.acme"), "server.tls.acme")
|
||||
self.value_or_else(("server.listener", id, "tls.acme"), "server.tls.acme")
|
||||
{
|
||||
let acme = acmes.get(acme_id).ok_or_else(|| {
|
||||
format!("Undefined ACME id {acme_id:?} for listener {id:?}.",)
|
||||
})?;
|
||||
|
||||
// Check if this port is used to receive ACME challenges
|
||||
let acme_port = self.property_or_static::<u16>(("acme", acme_id, "port"), "443")?;
|
||||
let acme_port =
|
||||
self.property_or_default::<u16>(("acme", acme_id, "port"), "443")?;
|
||||
if listeners.iter().any(|l| l.addr.port() == acme_port) {
|
||||
acme_acceptor = Some(acme.clone());
|
||||
}
|
||||
|
@ -232,7 +231,7 @@ impl Config {
|
|||
acme.clone()
|
||||
} else {
|
||||
let cert_id = self
|
||||
.value_or_default(
|
||||
.value_or_else(
|
||||
("server.listener", id, "tls.certificate"),
|
||||
"server.tls.certificate",
|
||||
)
|
||||
|
@ -249,7 +248,7 @@ impl Config {
|
|||
|
||||
// Add SNI certificates
|
||||
for (key, value) in
|
||||
self.values_or_default(("server.listener", id, "tls.sni"), "server.tls.sni")
|
||||
self.values_or_else(("server.listener", id, "tls.sni"), "server.tls.sni")
|
||||
{
|
||||
if let Some(prefix) = key.strip_suffix(".subject") {
|
||||
resolver
|
||||
|
@ -294,7 +293,7 @@ impl Config {
|
|||
.with_no_client_auth()
|
||||
.with_cert_resolver(resolver.clone());
|
||||
config.ignore_client_order = self
|
||||
.property_or_default(
|
||||
.property_or_else(
|
||||
("server.listener", id, "tls.ignore-client-order"),
|
||||
"server.tls.ignore-client-order",
|
||||
)?
|
||||
|
@ -317,7 +316,7 @@ impl Config {
|
|||
|
||||
(
|
||||
acceptor,
|
||||
self.property_or_default(
|
||||
self.property_or_else(
|
||||
("server.listener", id, "tls.implicit"),
|
||||
"server.tls.implicit",
|
||||
)?
|
||||
|
@ -331,7 +330,7 @@ impl Config {
|
|||
|
||||
// Parse proxy networks
|
||||
let mut proxy_networks = Vec::new();
|
||||
for network in self.set_values_or_default(
|
||||
for network in self.set_values_or_else(
|
||||
("server.listener", id, "proxy.trusted-networks"),
|
||||
"server.proxy.trusted-networks",
|
||||
) {
|
||||
|
@ -342,12 +341,12 @@ impl Config {
|
|||
id: id.to_string(),
|
||||
internal_id: 0,
|
||||
hostname: self
|
||||
.value_or_default(("server.listener", id, "hostname"), "server.hostname")
|
||||
.value_or_else(("server.listener", id, "hostname"), "server.hostname")
|
||||
.ok_or("Hostname directive not found.")?
|
||||
.to_string(),
|
||||
data: match protocol {
|
||||
ServerProtocol::Smtp | ServerProtocol::Lmtp => self
|
||||
.value_or_default(("server.listener", id, "greeting"), "server.greeting")
|
||||
.value_or_else(("server.listener", id, "greeting"), "server.greeting")
|
||||
.unwrap_or(concat!(
|
||||
"Stalwart SMTP v",
|
||||
env!("CARGO_PKG_VERSION"),
|
||||
|
@ -356,16 +355,16 @@ impl Config {
|
|||
.to_string(),
|
||||
|
||||
ServerProtocol::Jmap => self
|
||||
.value_or_default(("server.listener", id, "url"), "server.url")
|
||||
.value_or_else(("server.listener", id, "url"), "server.url")
|
||||
.failed(&format!("No 'url' directive found for listener {id:?}"))
|
||||
.to_string(),
|
||||
ServerProtocol::Imap | ServerProtocol::Http | ServerProtocol::ManageSieve => self
|
||||
.value_or_default(("server.listener", id, "url"), "server.url")
|
||||
.value_or_else(("server.listener", id, "url"), "server.url")
|
||||
.unwrap_or_default()
|
||||
.to_string(),
|
||||
},
|
||||
max_connections: self
|
||||
.property_or_default(
|
||||
.property_or_else(
|
||||
("server.listener", id, "max-connections"),
|
||||
"server.max-connections",
|
||||
)?
|
||||
|
|
|
@ -31,7 +31,7 @@ pub mod utils;
|
|||
|
||||
use std::{collections::BTreeMap, fmt::Display, net::SocketAddr, sync::Arc, time::Duration};
|
||||
|
||||
use ahash::{AHashMap, AHashSet};
|
||||
use ahash::AHashMap;
|
||||
use tokio::net::TcpSocket;
|
||||
|
||||
use crate::{
|
||||
|
@ -46,6 +46,15 @@ use self::ipmask::IpAddrMask;
|
|||
#[derive(Debug, Default, Clone, PartialEq, Eq)]
|
||||
pub struct Config {
|
||||
pub keys: BTreeMap<String, String>,
|
||||
pub missing: AHashMap<String, Option<String>>,
|
||||
pub errors: AHashMap<String, ConfigError>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub enum ConfigError {
|
||||
Parse(String),
|
||||
Build(String),
|
||||
Macro(String),
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, PartialEq, Eq)]
|
||||
|
@ -153,79 +162,108 @@ impl Config {
|
|||
)
|
||||
.failed("Invalid configuration file");
|
||||
|
||||
// Extract macros and includes
|
||||
let mut keys = BTreeMap::new();
|
||||
let mut includes = AHashSet::new();
|
||||
let mut macros = AHashMap::new();
|
||||
config
|
||||
}
|
||||
|
||||
for (key, value) in config.keys {
|
||||
if let Some(macro_name) = key.strip_prefix("macros.") {
|
||||
macros.insert(macro_name.to_ascii_lowercase(), value);
|
||||
} else if key.starts_with("include.files.") {
|
||||
includes.insert(value);
|
||||
} else {
|
||||
keys.insert(key, value);
|
||||
pub async fn resolve_macros(&mut self) {
|
||||
let mut replacements = AHashMap::new();
|
||||
'outer: for (key, value) in &self.keys {
|
||||
if value.contains("%{") && value.contains("}%") {
|
||||
let mut result = String::with_capacity(value.len());
|
||||
let mut snippet: &str = value.as_str();
|
||||
|
||||
loop {
|
||||
if let Some((suffix, macro_name)) = snippet.split_once("%{") {
|
||||
if !suffix.is_empty() {
|
||||
result.push_str(suffix);
|
||||
}
|
||||
if let Some((class, location, rest)) =
|
||||
macro_name.split_once("}%").and_then(|(name, rest)| {
|
||||
name.split_once(':')
|
||||
.map(|(class, location)| (class, location, rest))
|
||||
})
|
||||
{
|
||||
match class {
|
||||
"cfg" => {
|
||||
if let Some(value) = replacements
|
||||
.get(location)
|
||||
.or_else(|| self.keys.get(location))
|
||||
{
|
||||
result.push_str(value);
|
||||
} else {
|
||||
self.errors.insert(
|
||||
key.clone(),
|
||||
ConfigError::Macro(format!("Unknown key {location:?}")),
|
||||
);
|
||||
}
|
||||
}
|
||||
"env" => match std::env::var(location) {
|
||||
Ok(value) => {
|
||||
result.push_str(&value);
|
||||
}
|
||||
Err(_) => {
|
||||
self.errors.insert(
|
||||
key.clone(),
|
||||
ConfigError::Macro(format!(
|
||||
"Failed to obtain environment variable {location:?}"
|
||||
)),
|
||||
);
|
||||
}
|
||||
},
|
||||
"file" => {
|
||||
let file_name = location.strip_prefix("//").unwrap_or(location);
|
||||
match tokio::fs::read(file_name).await {
|
||||
Ok(value) => match String::from_utf8(value) {
|
||||
Ok(value) => {
|
||||
result.push_str(&value);
|
||||
}
|
||||
Err(err) => {
|
||||
self.errors.insert(
|
||||
key.clone(),
|
||||
ConfigError::Macro(format!(
|
||||
"Failed to read file {file_name:?}: {err}"
|
||||
)),
|
||||
);
|
||||
continue 'outer;
|
||||
}
|
||||
},
|
||||
Err(err) => {
|
||||
self.errors.insert(
|
||||
key.clone(),
|
||||
ConfigError::Macro(format!(
|
||||
"Failed to read file {file_name:?}: {err}"
|
||||
)),
|
||||
);
|
||||
continue 'outer;
|
||||
}
|
||||
}
|
||||
}
|
||||
"http" | "https" => {}
|
||||
_ => {
|
||||
continue 'outer;
|
||||
}
|
||||
};
|
||||
|
||||
snippet = rest;
|
||||
}
|
||||
} else {
|
||||
result.push_str(snippet);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
replacements.insert(key.clone(), result);
|
||||
}
|
||||
}
|
||||
|
||||
// Include files
|
||||
config.keys = keys;
|
||||
for mut include in includes {
|
||||
include.replace_macros("include.files", ¯os);
|
||||
config
|
||||
.parse(&std::fs::read_to_string(&include).failed(&format!(
|
||||
"Could not read included configuration file {include:?}"
|
||||
)))
|
||||
.failed(&format!("Invalid included configuration file {include:?}"));
|
||||
if !replacements.is_empty() {
|
||||
for (key, value) in replacements {
|
||||
self.keys.insert(key, value);
|
||||
}
|
||||
}
|
||||
|
||||
// Replace macros
|
||||
for (key, value) in &mut config.keys {
|
||||
value.replace_macros(key, ¯os);
|
||||
}
|
||||
|
||||
config
|
||||
}
|
||||
|
||||
pub fn update(&mut self, settings: Vec<(String, String)>) {
|
||||
self.keys.extend(settings);
|
||||
}
|
||||
}
|
||||
|
||||
trait ReplaceMacros: Sized {
|
||||
fn replace_macros(&mut self, key: &str, macros: &AHashMap<String, String>);
|
||||
}
|
||||
|
||||
impl ReplaceMacros for String {
|
||||
fn replace_macros(&mut self, key: &str, macros: &AHashMap<String, String>) {
|
||||
if self.contains("%{") {
|
||||
let mut result = String::with_capacity(self.len());
|
||||
let mut value = self.as_str();
|
||||
|
||||
loop {
|
||||
if let Some((suffix, macro_name)) = value.split_once("%{") {
|
||||
if !suffix.is_empty() {
|
||||
result.push_str(suffix);
|
||||
}
|
||||
if let Some((macro_name, rest)) = macro_name.split_once("}%") {
|
||||
if let Some(macro_value) = macros.get(¯o_name.to_ascii_lowercase()) {
|
||||
result.push_str(macro_value);
|
||||
value = rest;
|
||||
} else {
|
||||
failed(&format!("Unknown macro {macro_name:?} for key {key:?}"));
|
||||
}
|
||||
} else {
|
||||
failed(&format!(
|
||||
"Unterminated macro name {value:?} for key {key:?}"
|
||||
));
|
||||
}
|
||||
} else {
|
||||
result.push_str(value);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
*self = result;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -33,7 +33,6 @@ use std::fmt::Write;
|
|||
const MAX_NEST_LEVEL: usize = 10;
|
||||
|
||||
// Simple TOML parser for Stalwart Mail Server configuration files.
|
||||
|
||||
impl Config {
|
||||
pub fn new(toml: &str) -> Result<Self> {
|
||||
let mut config = Config::default();
|
||||
|
@ -399,24 +398,6 @@ impl<'x, 'y> TomlParser<'x, 'y> {
|
|||
}
|
||||
self.insert_key(key, value)?;
|
||||
}
|
||||
'!' => {
|
||||
let mut value = String::with_capacity(4);
|
||||
while let Some(ch) = self.iter.peek() {
|
||||
if ch.is_alphanumeric() || ['_', '-'].contains(ch) {
|
||||
value.push(self.next_char(true, false)?);
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
let value = match std::env::var(value.as_str()) {
|
||||
Ok(value) => value,
|
||||
Err(_) => {
|
||||
tracing::warn!("Failed to get environment variable {value:?}");
|
||||
String::new()
|
||||
}
|
||||
};
|
||||
self.insert_key(key, value)?;
|
||||
}
|
||||
ch => {
|
||||
return if stop_chars.contains(&ch) {
|
||||
Ok(ch)
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue