Include integrity hash in serialized bytes

This commit is contained in:
mdecimus 2025-03-11 15:10:40 +01:00
parent 624c44e3d9
commit d3b284e28d
82 changed files with 584 additions and 355 deletions

11
Cargo.lock generated
View file

@ -2745,11 +2745,21 @@ dependencies = [
"jmap_proto",
"percent-encoding",
"rkyv 0.8.10",
"store",
"tokio",
"trc",
"utils",
]
[[package]]
name = "gxhash"
version = "3.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a197c9b654827513cf53842c5c6d3da2b4b35a785f8e0eff78bdf8e445aba1bb"
dependencies = [
"rustversion",
]
[[package]]
name = "h2"
version = "0.3.26"
@ -6985,6 +6995,7 @@ dependencies = [
"flate2",
"foundationdb",
"futures",
"gxhash",
"lru-cache",
"lz4_flex",
"memchr",

View file

@ -11,8 +11,8 @@ use reqwest::{Method, Response};
use ring::rand::SystemRandom;
use ring::signature::{ECDSA_P256_SHA256_FIXED_SIGNING, EcdsaKeyPair, EcdsaSigningAlgorithm};
use serde::Deserialize;
use store::Serialize;
use store::write::Archiver;
use store::{Serialize, SerializedVersion};
use trc::AddContext;
use trc::event::conv::AssertSuccess;
@ -210,6 +210,12 @@ pub struct SerializedCert {
pub private_key: Vec<u8>,
}
impl SerializedVersion for SerializedCert {
fn serialize_version() -> u8 {
0
}
}
#[derive(Debug, Clone, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct Directory {

View file

@ -13,7 +13,10 @@ use rustls::{
sign::CertifiedKey,
};
use rustls_pki_types::{CertificateDer, PrivateKeyDer, PrivatePkcs8KeyDer};
use store::{dispatch::lookup::KeyValue, write::Archive};
use store::{
dispatch::lookup::KeyValue,
write::{AlignedBytes, Archive},
};
use trc::AcmeEvent;
use crate::{KV_ACME, Server};
@ -48,7 +51,7 @@ impl Server {
pub(crate) async fn build_acme_certificate(&self, domain: &str) -> Option<Arc<CertifiedKey>> {
match self
.in_memory_store()
.key_get::<Archive>(KeyValue::<()>::build_key(KV_ACME, domain))
.key_get::<Archive<AlignedBytes>>(KeyValue::<()>::build_key(KV_ACME, domain))
.await
{
Ok(Some(cert_)) => match cert_.unarchive::<SerializedCert>() {

View file

@ -7,8 +7,9 @@
use ahash::AHashMap;
use jmap_proto::types::{collection::Collection, property::Property};
use store::{
Deserialize, IndexKey, IterateParams, SerializeInfallible, U32_LEN, ValueKey,
write::{Archive, ValueClass, key::DeserializeBigEndian},
Deserialize, IndexKey, IterateParams, SerializeInfallible, SerializedVersion, U32_LEN,
ValueKey,
write::{AlignedBytes, Archive, ValueClass, key::DeserializeBigEndian},
};
use trc::AddContext;
use utils::topological::{TopologicalSort, TopologicalSortIterator};
@ -48,7 +49,7 @@ impl Server {
collection: Collection,
) -> trc::Result<ExpandedFolders>
where
T: rkyv::Archive,
T: rkyv::Archive + SerializedVersion,
T::Archived: FolderHierarchy
+ for<'a> rkyv::bytecheck::CheckBytes<
rkyv::api::high::HighValidator<'a, rkyv::rancor::Error>,
@ -79,7 +80,7 @@ impl Server {
),
|key, value| {
let document_id = key.deserialize_be_u32(key.len() - U32_LEN)?;
let archive = <Archive as Deserialize>::deserialize(value)?;
let archive = <Archive<AlignedBytes> as Deserialize>::deserialize(value)?;
let folder = archive.unarchive::<T>()?;
let parent_id = folder.parent_id();

View file

@ -7,10 +7,10 @@
use jmap_proto::types::{property::Property, value::AclGrant};
use std::{borrow::Cow, collections::HashSet, fmt::Debug};
use store::{
Serialize, SerializeInfallible,
Serialize, SerializeInfallible, SerializedVersion,
write::{
Archiver, BatchBuilder, BitmapClass, BlobOp, DirectoryClass, IntoOperations, Operation,
assert::HashedValue,
Archive, Archiver, BatchBuilder, BitmapClass, BlobOp, DirectoryClass, IntoOperations,
Operation,
},
};
use utils::BlobHash;
@ -35,6 +35,7 @@ pub trait IndexableObject: Sync + Send {
pub trait IndexableAndSerializableObject:
IndexableObject
+ SerializedVersion
+ rkyv::Archive
+ for<'a> rkyv::Serialize<
rkyv::api::high::HighSerializer<
@ -49,7 +50,7 @@ pub trait IndexableAndSerializableObject:
#[derive(Debug)]
pub struct ObjectIndexBuilder<C: IndexableObject, N: IndexableAndSerializableObject> {
tenant_id: Option<u32>,
current: Option<HashedValue<C>>,
current: Option<Archive<C>>,
changes: Option<N>,
}
@ -68,7 +69,7 @@ impl<C: IndexableObject, N: IndexableAndSerializableObject> ObjectIndexBuilder<C
}
}
pub fn with_current(mut self, current: HashedValue<C>) -> Self {
pub fn with_current(mut self, current: Archive<C>) -> Self {
self.current = Some(current);
self
}
@ -78,7 +79,7 @@ impl<C: IndexableObject, N: IndexableAndSerializableObject> ObjectIndexBuilder<C
self
}
pub fn with_current_opt(mut self, current: Option<HashedValue<C>>) -> Self {
pub fn with_current_opt(mut self, current: Option<Archive<C>>) -> Self {
self.current = current;
self
}
@ -91,7 +92,7 @@ impl<C: IndexableObject, N: IndexableAndSerializableObject> ObjectIndexBuilder<C
self.changes.as_mut()
}
pub fn current(&self) -> Option<&HashedValue<C>> {
pub fn current(&self) -> Option<&Archive<C>> {
self.current.as_ref()
}

View file

@ -8,8 +8,8 @@ use std::slice::IterMut;
use jmap_proto::types::property::Property;
use store::{
Serialize,
write::{Archiver, BatchBuilder, MaybeDynamicId, TagValue, ValueClass, assert::HashedValue},
Serialize, SerializedVersion,
write::{Archive, Archiver, BatchBuilder, MaybeDynamicId, TagValue, ValueClass},
};
pub struct TagManager<
@ -27,7 +27,7 @@ pub struct TagManager<
>,
>,
> {
current: HashedValue<Vec<T>>,
current: Archive<Vec<T>>,
added: Vec<T>,
removed: Vec<T>,
last: LastTag,
@ -45,6 +45,7 @@ impl<
+ Clone
+ Sync
+ Send
+ SerializedVersion
+ rkyv::Archive
+ for<'a> rkyv::Serialize<
rkyv::api::high::HighSerializer<
@ -55,7 +56,7 @@ impl<
>,
> TagManager<T>
{
pub fn new(current: HashedValue<Vec<T>>) -> Self {
pub fn new(current: Archive<Vec<T>>) -> Self {
Self {
current,
added: Vec::new(),

View file

@ -20,8 +20,8 @@ use jmap_proto::types::collection::Collection;
use jmap_proto::types::property::Property;
use store::dispatch::lookup::KeyValue;
use store::write::serialize::rkyv_deserialize;
use store::write::{Archive, Archiver, now};
use store::{Serialize, U32_LEN};
use store::write::{AlignedBytes, Archive, Archiver, now};
use store::{Serialize, SerializedVersion, U32_LEN};
use trc::AddContext;
use super::ETag;
@ -86,7 +86,7 @@ impl LockRequestHandler for Server {
let mut lock_data = if let Some(lock_data) = self
.in_memory_store()
.key_get::<Archive>(resource_hash.as_slice())
.key_get::<Archive<AlignedBytes>>(resource_hash.as_slice())
.await
.caused_by(trc::location!())?
{
@ -388,7 +388,7 @@ impl LockRequestHandler for Server {
resource_state.document_id.filter(|&id| id != u32::MAX)
{
if let Some(archive) = self
.get_property::<Archive>(
.get_property::<Archive<AlignedBytes>>(
resource_state.account_id,
resource_state.collection,
document_id,
@ -454,7 +454,7 @@ struct LockCache<'x> {
enum LockArchive<'x> {
Unarchived(&'x ArchivedLockData),
Archived(Archive),
Archived(Archive<AlignedBytes>),
}
#[derive(Default)]
@ -550,7 +550,7 @@ impl<'x> LockCaches<'x> {
) -> trc::Result<bool> {
if let Some(lock_archive) = server
.in_memory_store()
.key_get::<Archive>(resource_state.lock_key().as_slice())
.key_get::<Archive<AlignedBytes>>(resource_state.lock_key().as_slice())
.await
.caused_by(trc::location!())?
{
@ -582,6 +582,12 @@ struct LockItem {
owner_dav: Option<DeadProperty>,
}
impl SerializedVersion for LockData {
fn serialize_version() -> u8 {
0
}
}
impl LockItem {
pub fn to_active_lock(&self, href: String) -> ActiveLock {
ActiveLock::new(

View file

@ -4,10 +4,11 @@
* SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-SEL
*/
use std::fmt::Write;
use jmap_proto::types::property::Property;
use store::write::{Archive, BatchBuilder, MaybeDynamicValue, Operation, ValueClass, ValueOp};
use store::{
U32_LEN,
write::{Archive, BatchBuilder, MaybeDynamicValue, Operation, ValueClass, ValueOp},
};
pub mod acl;
pub mod lock;
@ -21,19 +22,9 @@ pub trait ExtractETag {
fn etag(&self) -> Option<String>;
}
impl<T: AsRef<[u8]>> ETag for T {
impl<T> ETag for Archive<T> {
fn etag(&self) -> String {
let mut hasher = store::blake3::Hasher::new();
hasher.update(self.as_ref());
let hash = hasher.finalize();
let mut etag = String::with_capacity(2 + hash.as_bytes().len() * 2);
etag.push('"');
for byte in hash.as_bytes() {
let _ = write!(&mut etag, "{:02x}", byte);
}
etag.push('"');
etag
format!("\"{}\"", self.hash)
}
}
@ -46,7 +37,9 @@ impl ExtractETag for BatchBuilder {
class: ValueClass::Property(p_id),
op: ValueOp::Set(MaybeDynamicValue::Static(value)),
} if *p_id == p_value => {
return Archive::try_unpack_bytes(value).map(|bytes| bytes.etag());
return value
.get(value.len() - U32_LEN..)
.map(|v| format!("\"{}\"", u32::from_be_bytes(v.try_into().unwrap())));
}
_ => {}
}

View file

@ -16,7 +16,7 @@ use jmap_proto::types::{
};
use store::{
ahash::AHashMap,
write::{Archive, BatchBuilder, assert::HashedValue, log::ChangeLogBuilder, now},
write::{log::ChangeLogBuilder, now, AlignedBytes, Archive, BatchBuilder},
};
use trc::AddContext;
use utils::map::bitmap::Bitmap;
@ -313,7 +313,7 @@ async fn move_container(
return Err(DavError::Code(StatusCode::BAD_GATEWAY));
}
let node = server
.get_property::<HashedValue<Archive>>(
.get_property::<Archive<AlignedBytes>>(
from_account_id,
Collection::FileNode,
from_document_id,
@ -408,7 +408,7 @@ async fn copy_container(
let now = now() as i64;
for (document_id, _) in copy_files.into_iter() {
let node_ = server
.get_property::<HashedValue<Archive>>(
.get_property::<Archive<AlignedBytes>>(
from_account_id,
Collection::FileNode,
document_id,
@ -527,7 +527,7 @@ async fn overwrite_and_delete_item(
// dest_node is the current file at the destination
let dest_node = server
.get_property::<HashedValue<Archive>>(
.get_property::<Archive<AlignedBytes>>(
to_account_id,
Collection::FileNode,
to_document_id,
@ -541,7 +541,7 @@ async fn overwrite_and_delete_item(
// source_node is the file to be copied
let source_node_ = server
.get_property::<HashedValue<Archive>>(
.get_property::<Archive<AlignedBytes>>(
from_account_id,
Collection::FileNode,
from_document_id,
@ -599,7 +599,7 @@ async fn overwrite_item(
// dest_node is the current file at the destination
let dest_node = server
.get_property::<HashedValue<Archive>>(
.get_property::<Archive<AlignedBytes>>(
to_account_id,
Collection::FileNode,
to_document_id,
@ -613,7 +613,7 @@ async fn overwrite_item(
// source_node is the file to be copied
let mut source_node = server
.get_property::<Archive>(
.get_property::<Archive<AlignedBytes>>(
from_account_id,
Collection::FileNode,
from_document_id,
@ -659,7 +659,7 @@ async fn move_item(
let parent_id = destination.document_id.map(|id| id + 1).unwrap_or(0);
let node = server
.get_property::<HashedValue<Archive>>(
.get_property::<Archive<AlignedBytes>>(
from_account_id,
Collection::FileNode,
from_document_id,
@ -722,7 +722,7 @@ async fn copy_item(
let parent_id = destination.document_id.map(|id| id + 1).unwrap_or(0);
let mut node = server
.get_property::<Archive>(
.get_property::<Archive<AlignedBytes>>(
from_account_id,
Collection::FileNode,
from_document_id,
@ -755,7 +755,7 @@ async fn rename_item(
let from_document_id = from_resource.resource.document_id;
let node = server
.get_property::<HashedValue<Archive>>(
.get_property::<Archive<AlignedBytes>>(
from_account_id,
Collection::FileNode,
from_document_id,

View file

@ -12,7 +12,7 @@ use hyper::StatusCode;
use jmap_proto::types::{
acl::Acl, collection::Collection, property::Property, type_state::DataType,
};
use store::write::{Archive, BatchBuilder, assert::HashedValue, log::ChangeLogBuilder};
use store::write::{log::ChangeLogBuilder, AlignedBytes, Archive, BatchBuilder};
use trc::AddContext;
use utils::map::bitmap::Bitmap;
@ -105,7 +105,7 @@ impl FileDeleteRequestHandler for Server {
let mut changes = ChangeLogBuilder::new();
for document_id in sorted_ids {
if let Some(node) = self
.get_property::<HashedValue<Archive>>(
.get_property::<Archive<AlignedBytes>>(
account_id,
Collection::FileNode,
document_id,

View file

@ -10,7 +10,7 @@ use groupware::file::{FileNode, hierarchy::FileHierarchy};
use http_proto::HttpResponse;
use hyper::StatusCode;
use jmap_proto::types::{acl::Acl, collection::Collection, property::Property};
use store::write::Archive;
use store::write::{AlignedBytes, Archive};
use trc::AddContext;
use crate::{
@ -50,7 +50,7 @@ impl FileGetRequestHandler for Server {
// Fetch node
let node_ = self
.get_property::<Archive>(
.get_property::<Archive<AlignedBytes>>(
account_id,
Collection::FileNode,
resource.resource,

View file

@ -11,10 +11,7 @@ use groupware::file::FileNode;
use hyper::StatusCode;
use jmap_proto::types::{collection::Collection, type_state::DataType};
use store::write::{
BatchBuilder,
assert::HashedValue,
log::{Changes, LogInsert},
now,
log::{Changes, LogInsert}, now, Archive, BatchBuilder
};
use crate::{
@ -137,7 +134,7 @@ impl FromFileItem for FileItemId {
pub(crate) async fn update_file_node(
server: &Server,
access_token: &AccessToken,
node: HashedValue<FileNode>,
node: Archive<FileNode>,
mut new_node: FileNode,
account_id: u32,
document_id: u32,
@ -213,7 +210,7 @@ pub(crate) async fn insert_file_node(
pub(crate) async fn delete_file_node(
server: &Server,
access_token: &AccessToken,
node: HashedValue<FileNode>,
node: Archive<FileNode>,
account_id: u32,
document_id: u32,
) -> trc::Result<()> {

View file

@ -17,7 +17,7 @@ use groupware::file::{FileNode, hierarchy::FileHierarchy};
use http_proto::HttpResponse;
use hyper::StatusCode;
use jmap_proto::types::{acl::Acl, collection::Collection, property::Property};
use store::write::{Archive, assert::HashedValue};
use store::write::{AlignedBytes, Archive};
use trc::AddContext;
use crate::{
@ -68,7 +68,7 @@ impl FilePropPatchRequestHandler for Server {
// Fetch node
let node_ = self
.get_property::<HashedValue<Archive>>(
.get_property::<Archive<AlignedBytes>>(
account_id,
Collection::FileNode,
resource.resource,
@ -99,7 +99,7 @@ impl FilePropPatchRequestHandler for Server {
account_id,
collection: resource.collection,
document_id: resource.resource.into(),
etag: node_.inner.etag().clone().into(),
etag: node_.etag().into(),
lock_token: None,
path: resource_.resource.unwrap(),
}],
@ -162,7 +162,7 @@ impl FilePropPatchRequestHandler for Server {
.await
.caused_by(trc::location!())?
} else {
node_.inner.etag().into()
node_.etag().into()
};
Ok(HttpResponse::new(StatusCode::MULTI_STATUS)

View file

@ -13,8 +13,7 @@ use jmap_proto::types::{
acl::Acl, collection::Collection, property::Property, type_state::DataType,
};
use store::write::{
Archive, BatchBuilder,
assert::HashedValue,
AlignedBytes, Archive, BatchBuilder,
log::{Changes, LogInsert},
now,
};
@ -66,7 +65,7 @@ impl FileUpdateRequestHandler for Server {
if let Some(document_id) = files.files.by_name(resource_name).map(|r| r.document_id) {
// Update
let node_archive_ = self
.get_property::<HashedValue<Archive>>(
.get_property::<Archive<AlignedBytes>>(
account_id,
Collection::FileNode,
document_id,
@ -98,7 +97,7 @@ impl FileUpdateRequestHandler for Server {
account_id,
collection: resource.collection,
document_id: Some(document_id),
etag: node_archive_.inner.etag().into(),
etag: node_archive_.etag().into(),
lock_token: None,
path: resource_name,
}],
@ -193,7 +192,7 @@ impl FileUpdateRequestHandler for Server {
// Verify that parent is a collection
if parent_id > 0
&& self
.get_property::<Archive>(
.get_property::<Archive<AlignedBytes>>(
account_id,
Collection::FileNode,
parent_id - 1,

View file

@ -10,7 +10,7 @@ use store::{
Deserialize, IterateParams, Serialize, Store, U32_LEN, ValueKey,
write::{
AssignedIds, BatchBuilder, DirectoryClass, MaybeDynamicId, MaybeDynamicValue,
SerializeWithId, ValueClass, assert::HashedValue, key::DeserializeBigEndian,
SerializeWithId, ValueClass, assert::LegacyHashedValue, key::DeserializeBigEndian,
},
};
use trc::AddContext;
@ -790,7 +790,7 @@ impl ManageDirectory for Store {
// Fetch principal
let mut principal = self
.get_value::<HashedValue<Principal>>(ValueKey::from(ValueClass::Directory(
.get_value::<LegacyHashedValue<Principal>>(ValueKey::from(ValueClass::Directory(
DirectoryClass::Principal(principal_id),
)))
.await

View file

@ -4,6 +4,8 @@
* SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-SEL
*/
use store::SerializedVersion;
#[derive(
rkyv::Archive, rkyv::Deserialize, rkyv::Serialize, Debug, Default, Clone, PartialEq, Eq,
)]
@ -21,3 +23,9 @@ pub struct EmailAddress {
pub name: Option<String>,
pub email: String,
}
impl SerializedVersion for Identity {
fn serialize_version() -> u8 {
0
}
}

View file

@ -16,7 +16,7 @@ use store::{
Serialize, SerializeInfallible,
query::Filter,
roaring::RoaringBitmap,
write::{Archive, Archiver, BatchBuilder, assert::HashedValue, log::ChangeLogBuilder},
write::{AlignedBytes, Archive, Archiver, BatchBuilder, log::ChangeLogBuilder},
};
use trc::AddContext;
@ -101,7 +101,7 @@ impl MailboxDestroy for Server {
// otherwise delete it.
let mut destroy_ids = RoaringBitmap::new();
for (message_id, mailbox_ids) in self
.get_properties::<HashedValue<Archive>, _>(
.get_properties::<Archive<AlignedBytes>, _>(
account_id,
Collection::Email,
&message_ids,
@ -191,7 +191,7 @@ impl MailboxDestroy for Server {
// Obtain mailbox
if let Some(mailbox_) = self
.get_property::<HashedValue<Archive>>(
.get_property::<Archive<AlignedBytes>>(
account_id,
Collection::Mailbox,
document_id,

View file

@ -6,6 +6,7 @@
use common::config::jmap::settings::SpecialUse;
use jmap_proto::types::value::AclGrant;
use store::SerializedVersion;
pub mod destroy;
pub mod index;
@ -38,6 +39,18 @@ pub struct UidMailbox {
pub uid: u32,
}
impl SerializedVersion for Mailbox {
fn serialize_version() -> u8 {
0
}
}
impl SerializedVersion for UidMailbox {
fn serialize_version() -> u8 {
0
}
}
#[derive(Debug)]
pub struct ExpandPath<'x> {
pub path: Vec<&'x str>,

View file

@ -12,7 +12,7 @@ use mail_parser::Message;
use spam_filter::{
SpamFilterInput, analysis::init::SpamFilterInit, modules::bayes::BayesClassifier,
};
use store::write::{Archive, TaskQueueClass};
use store::write::{AlignedBytes, Archive, TaskQueueClass};
use trc::StoreEvent;
use utils::BlobHash;
@ -59,7 +59,7 @@ impl EmailBayesTrain for Server {
learn_spam: bool,
) -> trc::Result<TaskQueueClass> {
let metadata = self
.get_property::<Archive>(
.get_property::<Archive<AlignedBytes>>(
account_id,
Collection::Email,
document_id,

View file

@ -16,7 +16,8 @@ use mail_parser::parsers::fields::thread::thread_name;
use store::{
BlobClass, Serialize, SerializeInfallible,
write::{
Archive, Archiver, BatchBuilder, MaybeDynamicId, TagValue, TaskQueueClass, ValueClass,
AlignedBytes, Archive, Archiver, BatchBuilder, MaybeDynamicId, TagValue, TaskQueueClass,
ValueClass,
log::{Changes, LogInsert},
},
};
@ -59,7 +60,7 @@ impl EmailCopy for Server {
// Obtain metadata
let account_id = resource_token.account_id;
let mut metadata = if let Some(metadata) = self
.get_property::<Archive>(
.get_property::<Archive<AlignedBytes>>(
from_account_id,
Collection::Email,
from_message_id,

View file

@ -26,7 +26,7 @@ use rasn_cms::{
};
use rsa::{Pkcs1v15Encrypt, RsaPublicKey, pkcs1::DecodeRsaPublicKey};
use sequoia_openpgp as openpgp;
use store::{Deserialize, write::Archive};
use store::{Deserialize, SerializedVersion, write::Archive};
const P: openpgp::policy::StandardPolicy<'static> = openpgp::policy::StandardPolicy::new();
@ -84,6 +84,12 @@ pub struct EncryptionParams {
pub certs: Vec<Vec<u8>>,
}
impl SerializedVersion for EncryptionParams {
fn serialize_version() -> u8 {
0
}
}
#[derive(
rkyv::Serialize,
rkyv::Deserialize,

View file

@ -16,7 +16,7 @@ use store::{
ahash::AHashMap,
roaring::RoaringBitmap,
write::{
Archive, BatchBuilder, BitmapClass, MaybeDynamicId, TagValue, ValueClass,
AlignedBytes, Archive, BatchBuilder, BitmapClass, MaybeDynamicId, TagValue, ValueClass,
log::ChangeLogBuilder,
},
};
@ -64,7 +64,7 @@ impl EmailDeletion for Server {
// Fetch mailboxes and threadIds
let mut thread_ids: AHashMap<u32, i32> = AHashMap::new();
for (document_id, mailboxes) in self
.get_properties::<Archive, _>(
.get_properties::<Archive<AlignedBytes>, _>(
account_id,
Collection::Email,
&document_ids,
@ -445,7 +445,7 @@ impl EmailDeletion for Server {
.core
.storage
.data
.get_value::<Archive>(ValueKey {
.get_value::<Archive<AlignedBytes>>(ValueKey {
account_id,
collection: Collection::Email.into(),
document_id,
@ -474,7 +474,7 @@ impl EmailDeletion for Server {
.core
.storage
.data
.get_value::<Archive>(ValueKey {
.get_value::<Archive<AlignedBytes>>(ValueKey {
account_id,
collection: Collection::Email.into(),
document_id,

View file

@ -37,8 +37,8 @@ use store::{
ahash::AHashSet,
query::Filter,
write::{
Archive, AssignedIds, BatchBuilder, BitmapClass, MaybeDynamicId, MaybeDynamicValue,
SerializeWithId, TagValue, TaskQueueClass, ValueClass,
AlignedBytes, Archive, AssignedIds, BatchBuilder, BitmapClass, MaybeDynamicId,
MaybeDynamicValue, SerializeWithId, TagValue, TaskQueueClass, ValueClass,
log::{ChangeLogBuilder, Changes, LogInsert},
now,
},
@ -389,7 +389,12 @@ impl EmailIngest for Server {
};
if do_encrypt && !message.is_encrypted() {
if let Some(encrypt_params_) = self
.get_property::<Archive>(account_id, Collection::Principal, 0, Property::Parameters)
.get_property::<Archive<AlignedBytes>>(
account_id,
Collection::Principal,
0,
Property::Parameters,
)
.await
.caused_by(trc::location!())?
{

View file

@ -18,6 +18,7 @@ use rkyv::{
string::ArchivedString,
vec::ArchivedVec,
};
use store::SerializedVersion;
use utils::BlobHash;
#[derive(rkyv::Serialize, rkyv::Deserialize, rkyv::Archive, Debug)]
@ -31,6 +32,12 @@ pub struct MessageMetadata {
pub raw_headers: Vec<u8>,
}
impl SerializedVersion for MessageMetadata {
fn serialize_version() -> u8 {
0
}
}
#[derive(rkyv::Serialize, rkyv::Deserialize, rkyv::Archive, Debug)]
pub struct MessageMetadataContents {
pub html_body: Vec<u16>,

View file

@ -5,6 +5,7 @@
*/
use jmap_proto::types::type_state::DataType;
use store::SerializedVersion;
use utils::map::bitmap::Bitmap;
#[derive(
@ -25,3 +26,9 @@ pub struct Keys {
pub p256dh: Vec<u8>,
pub auth: Vec<u8>,
}
impl SerializedVersion for PushSubscription {
fn serialize_version() -> u8 {
0
}
}

View file

@ -9,7 +9,7 @@ use jmap_proto::types::{collection::Collection, property::Property};
use store::{
SerializeInfallible,
query::Filter,
write::{Archive, BatchBuilder, assert::HashedValue},
write::{AlignedBytes, Archive, BatchBuilder},
};
use trc::AddContext;
@ -59,7 +59,7 @@ impl SieveScriptActivate for Server {
// Deactivate scripts
for document_id in active_ids {
if let Some(sieve) = self
.get_property::<HashedValue<Archive>>(
.get_property::<Archive<AlignedBytes>>(
account_id,
Collection::SieveScript,
document_id,
@ -88,7 +88,7 @@ impl SieveScriptActivate for Server {
// Activate script
if let Some(document_id) = activate_id {
if let Some(sieve) = self
.get_property::<HashedValue<Archive>>(
.get_property::<Archive<AlignedBytes>>(
account_id,
Collection::SieveScript,
document_id,

View file

@ -6,7 +6,7 @@
use common::{Server, auth::ResourceToken, storage::index::ObjectIndexBuilder};
use jmap_proto::types::{collection::Collection, property::Property};
use store::write::{Archive, BatchBuilder, assert::HashedValue};
use store::write::{AlignedBytes, Archive, BatchBuilder};
use trc::AddContext;
use super::SieveScript;
@ -30,7 +30,7 @@ impl SieveScriptDelete for Server {
// Fetch record
let account_id = resource_token.account_id;
let obj_ = self
.get_property::<HashedValue<Archive>>(
.get_property::<Archive<AlignedBytes>>(
account_id,
Collection::SieveScript,
document_id,

View file

@ -24,7 +24,7 @@ use store::{
Deserialize, Serialize, SerializeInfallible,
ahash::{AHashSet, RandomState},
query::Filter,
write::{Archive, Archiver, BatchBuilder, BlobOp, LegacyBincode, assert::HashedValue, now},
write::{AlignedBytes, Archive, Archiver, BatchBuilder, BlobOp, LegacyBincode, now},
};
use trc::{AddContext, SieveEvent};
use utils::config::utils::ParseValue;
@ -575,7 +575,7 @@ impl SieveScriptIngest for Server {
let (script, script_name) = self.sieve_script_compile(account_id, document_id).await?;
let seen_ids = if let Some(seen_ids_archive_) = self
.get_property::<Archive>(
.get_property::<Archive<AlignedBytes>>(
account_id,
Collection::SieveScript,
document_id,
@ -636,7 +636,7 @@ impl SieveScriptIngest for Server {
) -> trc::Result<(Sieve, String)> {
// Obtain script object
let script_object = self
.get_property::<HashedValue<Archive>>(
.get_property::<Archive<AlignedBytes>>(
account_id,
Collection::SieveScript,
document_id,
@ -652,7 +652,6 @@ impl SieveScriptIngest for Server {
// Obtain the sieve script length
let unarchived_script = script_object
.inner
.unarchive::<SieveScript>()
.caused_by(trc::location!())?;
let script_offset = u32::from(unarchived_script.size) as usize;

View file

@ -8,7 +8,7 @@ use std::{collections::HashSet, sync::Arc};
use rkyv::collections::swiss_table::ArchivedHashSet;
use sieve::Sieve;
use store::{ahash::RandomState, blake3, write::now};
use store::{SerializedVersion, ahash::RandomState, blake3, write::now};
use utils::BlobHash;
pub mod activate;
@ -37,6 +37,12 @@ pub struct SeenIds {
pub has_changes: bool,
}
impl SerializedVersion for SeenIdHash {
fn serialize_version() -> u8 {
0
}
}
#[derive(
rkyv::Archive, rkyv::Deserialize, rkyv::Serialize, Debug, Default, Clone, PartialEq, Eq,
)]
@ -49,6 +55,12 @@ pub struct SieveScript {
pub vacation_response: Option<VacationResponse>,
}
impl SerializedVersion for SieveScript {
fn serialize_version() -> u8 {
0
}
}
#[derive(
rkyv::Archive, rkyv::Deserialize, rkyv::Serialize, Debug, Default, Clone, PartialEq, Eq,
)]

View file

@ -4,6 +4,7 @@
* SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-SEL
*/
use store::SerializedVersion;
use utils::map::vec_map::VecMap;
pub mod index;
@ -22,6 +23,12 @@ pub struct EmailSubmission {
pub delivery_status: VecMap<String, DeliveryStatus>,
}
impl SerializedVersion for EmailSubmission {
fn serialize_version() -> u8 {
0
}
}
#[derive(
rkyv::Archive, rkyv::Deserialize, rkyv::Serialize, Debug, Default, Clone, PartialEq, Eq,
)]

View file

@ -6,6 +6,7 @@ resolver = "2"
[dependencies]
utils = { path = "../utils" }
store = { path = "../store" }
common = { path = "../common" }
jmap_proto = { path = "../jmap-proto" }
trc = { path = "../trc" }

View file

@ -9,6 +9,7 @@ pub mod index;
use dav_proto::schema::request::DeadProperty;
use jmap_proto::types::value::AclGrant;
use store::SerializedVersion;
use utils::BlobHash;
#[derive(
@ -36,3 +37,9 @@ pub struct FileProperties {
pub media_type: Option<String>,
pub executable: bool,
}
impl SerializedVersion for FileNode {
fn serialize_version() -> u8 {
0
}
}

View file

@ -16,16 +16,19 @@ use common::{
use serde::Deserialize;
use serde_json::json;
use std::future::Future;
use store::rand::{
Rng,
distr::{Alphanumeric, StandardUniform},
rng,
};
use store::{
Serialize,
dispatch::lookup::KeyValue,
write::{Archive, Archiver},
};
use store::{
rand::{
Rng,
distr::{Alphanumeric, StandardUniform},
rng,
},
write::AlignedBytes,
};
use trc::AddContext;
use crate::auth::oauth::OAuthStatus;
@ -148,7 +151,10 @@ impl OAuthApiHandler for Server {
.core
.storage
.lookup
.key_get::<Archive>(KeyValue::<()>::build_key(KV_OAUTH, code.as_bytes()))
.key_get::<Archive<AlignedBytes>>(KeyValue::<()>::build_key(
KV_OAUTH,
code.as_bytes(),
))
.await?
{
let oauth = auth_code_

View file

@ -7,6 +7,7 @@
use http_proto::{HttpRequest, request::fetch_body};
use hyper::header::CONTENT_TYPE;
use serde::{Deserialize, Serialize};
use store::SerializedVersion;
use utils::map::vec_map::VecMap;
pub mod auth;
@ -55,6 +56,12 @@ pub struct OAuthCode {
pub params: String,
}
impl SerializedVersion for OAuthCode {
fn serialize_version() -> u8 {
0
}
}
#[derive(Debug, Serialize, Deserialize)]
pub struct DeviceAuthGet {
code: Option<String>,

View file

@ -13,7 +13,10 @@ use common::{
};
use hyper::StatusCode;
use std::future::Future;
use store::{dispatch::lookup::KeyValue, write::Archive};
use store::{
dispatch::lookup::KeyValue,
write::{AlignedBytes, Archive},
};
use trc::AddContext;
use http_proto::*;
@ -76,7 +79,10 @@ impl TokenHandler for Server {
.core
.storage
.lookup
.key_get::<Archive>(KeyValue::<()>::build_key(KV_OAUTH, code.as_bytes()))
.key_get::<Archive<AlignedBytes>>(KeyValue::<()>::build_key(
KV_OAUTH,
code.as_bytes(),
))
.await?
{
Some(auth_code_) => {
@ -145,7 +151,10 @@ impl TokenHandler for Server {
.core
.storage
.lookup
.key_get::<Archive>(KeyValue::<()>::build_key(KV_OAUTH, device_code.as_bytes()))
.key_get::<Archive<AlignedBytes>>(KeyValue::<()>::build_key(
KV_OAUTH,
device_code.as_bytes(),
))
.await?
{
let oauth = auth_code_

View file

@ -19,7 +19,7 @@ use mail_parser::MessageParser;
use serde_json::json;
use store::{
Deserialize, Serialize,
write::{Archive, Archiver, BatchBuilder},
write::{AlignedBytes, Archive, Archiver, BatchBuilder},
};
use trc::AddContext;
@ -39,7 +39,7 @@ pub trait CryptoHandler: Sync + Send {
impl CryptoHandler for Server {
async fn handle_crypto_get(&self, access_token: Arc<AccessToken>) -> trc::Result<HttpResponse> {
let ec = if let Some(params_) = self
.get_property::<Archive>(
.get_property::<Archive<AlignedBytes>>(
access_token.primary_id(),
Collection::Principal,
0,
@ -132,7 +132,7 @@ impl CryptoHandler for Server {
.parse("Subject: test\r\ntest\r\n".as_bytes())
.unwrap()
.encrypt(
<Archive as Deserialize>::deserialize(params.as_slice())?
<Archive<AlignedBytes> as Deserialize>::deserialize(params.as_slice())?
.unarchive::<EncryptionParams>()?,
)
.await

View file

@ -30,7 +30,9 @@ use smtp::{
};
use store::{
Deserialize, IterateParams, ValueKey,
write::{Archive, QueueClass, ReportEvent, ValueClass, key::DeserializeBigEndian, now},
write::{
AlignedBytes, Archive, QueueClass, ReportEvent, ValueClass, key::DeserializeBigEndian, now,
},
};
use trc::AddContext;
use utils::url_params::UrlParams;
@ -713,7 +715,7 @@ async fn fetch_queued_messages(
.iterate(
IterateParams::new(from_key, to_key).ascending(),
|key, value| {
let message_ = <Archive as Deserialize>::deserialize(value)
let message_ = <Archive<AlignedBytes> as Deserialize>::deserialize(value)
.add_context(|ctx| ctx.ctx(trc::Key::Key, key))?;
let message = message_
.unarchive::<queue::Message>()

View file

@ -23,7 +23,7 @@ use serde_json::json;
use services::index::Indexer;
use store::{
Serialize, rand,
write::{Archive, Archiver, BatchBuilder, ValueClass, assert::HashedValue},
write::{AlignedBytes, Archive, Archiver, BatchBuilder, ValueClass},
};
use trc::AddContext;
use utils::url_params::UrlParams;
@ -332,7 +332,7 @@ pub async fn reset_imap_uids(server: &Server, account_id: u32) -> trc::Result<(u
.unwrap_or_default()
{
let mailbox = server
.get_property::<HashedValue<Archive>>(
.get_property::<Archive<AlignedBytes>>(
account_id,
Collection::Mailbox,
mailbox_id,
@ -373,7 +373,7 @@ pub async fn reset_imap_uids(server: &Server, account_id: u32) -> trc::Result<(u
.unwrap_or_default()
{
let uids = server
.get_property::<HashedValue<Archive>>(
.get_property::<Archive<AlignedBytes>>(
account_id,
Collection::Email,
message_id,

View file

@ -21,7 +21,7 @@ use parking_lot::Mutex;
use std::sync::{Arc, atomic::Ordering};
use store::{
query::log::{Change, Query},
write::Archive,
write::{AlignedBytes, Archive},
};
use trc::AddContext;
use utils::topological::TopologicalSort;
@ -161,7 +161,7 @@ impl<T: SessionStream> SessionData<T> {
for (mailbox_id, mailbox_) in self
.server
.get_properties::<Archive, _>(
.get_properties::<Archive<AlignedBytes>, _>(
account_id,
Collection::Mailbox,
&mailbox_ids,
@ -644,7 +644,7 @@ impl<T: SessionStream> SessionData<T> {
Ok(access_token.is_member(account_id)
|| self
.server
.get_property::<Archive>(
.get_property::<Archive<AlignedBytes>>(
account_id,
Collection::Mailbox,
document_id,

View file

@ -11,7 +11,7 @@ use common::{NextMailboxState, listener::SessionStream};
use email::mailbox::UidMailbox;
use imap_proto::protocol::{Sequence, expunge, select::Exists};
use jmap_proto::types::{collection::Collection, property::Property};
use store::write::Archive;
use store::write::{AlignedBytes, Archive};
use trc::AddContext;
use crate::core::ImapId;
@ -52,7 +52,7 @@ impl<T: SessionStream> SessionData<T> {
let mut uid_map = BTreeMap::new();
for (message_id, uid_mailbox_) in self
.server
.get_properties::<Archive, _>(
.get_properties::<Archive<AlignedBytes>, _>(
mailbox.account_id,
Collection::Email,
&message_ids,
@ -227,7 +227,7 @@ impl<T: SessionStream> SessionData<T> {
pub async fn get_uid_validity(&self, mailbox: &MailboxId) -> trc::Result<u32> {
self.server
.get_property::<Archive>(
.get_property::<Archive<AlignedBytes>>(
mailbox.account_id,
Collection::Mailbox,
mailbox.mailbox_id,

View file

@ -26,7 +26,7 @@ use jmap_proto::types::{
acl::Acl, collection::Collection, property::Property, state::StateChange, type_state::DataType,
value::AclGrant,
};
use store::write::{Archive, BatchBuilder, assert::HashedValue, log::ChangeLogBuilder};
use store::write::{AlignedBytes, Archive, BatchBuilder, log::ChangeLogBuilder};
use trc::AddContext;
use utils::map::bitmap::Bitmap;
@ -435,11 +435,11 @@ impl<T: SessionStream> SessionData<T> {
&self,
arguments: &Arguments,
validate: bool,
) -> trc::Result<(MailboxId, HashedValue<Archive>, Arc<AccessToken>)> {
) -> trc::Result<(MailboxId, Archive<AlignedBytes>, Arc<AccessToken>)> {
if let Some(mailbox) = self.get_mailbox_by_name(&arguments.mailbox_name) {
if let Some(values) = self
.server
.get_property::<HashedValue<Archive>>(
.get_property::<Archive<AlignedBytes>>(
mailbox.account_id,
Collection::Mailbox,
mailbox.mailbox_id,
@ -452,7 +452,6 @@ impl<T: SessionStream> SessionData<T> {
if !validate
|| access_token.is_member(mailbox.account_id)
|| values
.inner
.unarchive::<email::mailbox::Mailbox>()
.caused_by(trc::location!())?
.acls

View file

@ -32,7 +32,7 @@ use jmap_proto::{
use store::{
SerializeInfallible,
roaring::RoaringBitmap,
write::{Archive, BatchBuilder, ValueClass, assert::HashedValue, log::ChangeLogBuilder},
write::{AlignedBytes, Archive, BatchBuilder, ValueClass, log::ChangeLogBuilder},
};
use super::ImapContext;
@ -474,7 +474,7 @@ impl<T: SessionStream> SessionData<T> {
// Obtain mailbox tags
if let (Some(mailboxes), Some(thread_id)) = (
self.server
.get_property::<HashedValue<Archive>>(
.get_property::<Archive<AlignedBytes>>(
account_id,
Collection::Email,
id,

View file

@ -25,7 +25,7 @@ use jmap_proto::types::{
use store::{
SerializeInfallible,
roaring::RoaringBitmap,
write::{Archive, BatchBuilder, assert::HashedValue, log::ChangeLogBuilder},
write::{AlignedBytes, Archive, BatchBuilder, log::ChangeLogBuilder},
};
use super::{ImapContext, ToModSeq};
@ -194,7 +194,7 @@ impl<T: SessionStream> SessionData<T> {
for (id, mailbox_ids) in self
.server
.get_properties::<HashedValue<Archive>, _>(
.get_properties::<Archive<AlignedBytes>, _>(
account_id,
Collection::Email,
deleted_ids,
@ -214,7 +214,7 @@ impl<T: SessionStream> SessionData<T> {
// Remove deleted flag
let (mut keywords, thread_id) = if let (Some(keywords), Some(thread_id)) = (
self.server
.get_property::<HashedValue<Archive>>(
.get_property::<Archive<AlignedBytes>>(
account_id,
Collection::Email,
id,

View file

@ -44,7 +44,7 @@ use store::{
Serialize, SerializeInfallible,
query::log::{Change, Query},
rkyv::rend::u16_le,
write::{Archive, Archiver, BatchBuilder, assert::HashedValue, serialize::rkyv_deserialize},
write::{AlignedBytes, Archive, Archiver, BatchBuilder, serialize::rkyv_deserialize},
};
use super::{FromModSeq, ImapContext};
@ -310,7 +310,7 @@ impl<T: SessionStream> SessionData<T> {
// Obtain attributes and keywords
let (metadata_, keywords_) = if let (Some(email), Some(keywords)) = (
self.server
.get_property::<Archive>(
.get_property::<Archive<AlignedBytes>>(
account_id,
Collection::Email,
id,
@ -319,7 +319,7 @@ impl<T: SessionStream> SessionData<T> {
.await
.imap_ctx(&arguments.tag, trc::location!())?,
self.server
.get_property::<HashedValue<Archive>>(
.get_property::<Archive<AlignedBytes>>(
account_id,
Collection::Email,
id,
@ -344,7 +344,6 @@ impl<T: SessionStream> SessionData<T> {
.unarchive::<MessageMetadata>()
.imap_ctx(&arguments.tag, trc::location!())?;
let keywords = keywords_
.inner
.unarchive::<Vec<Keyword>>()
.imap_ctx(&arguments.tag, trc::location!())?;
@ -553,8 +552,9 @@ impl<T: SessionStream> SessionData<T> {
if set_seen_flag {
set_seen_ids.push((
Id::from_parts(thread_id, id),
HashedValue {
Archive {
hash: keywords_.hash,
version: keywords_.version,
inner: rkyv_deserialize::<_, Vec<Keyword>>(keywords)
.imap_ctx(&arguments.tag, trc::location!())?,
},

View file

@ -18,7 +18,7 @@ use imap_proto::{
use jmap_proto::types::{
acl::Acl, collection::Collection, property::Property, state::StateChange, type_state::DataType,
};
use store::write::{Archive, BatchBuilder, assert::HashedValue};
use store::write::{AlignedBytes, Archive, BatchBuilder};
use trc::AddContext;
use super::ImapContext;
@ -88,7 +88,7 @@ impl<T: SessionStream> SessionData<T> {
// Obtain mailbox
let mailbox = self
.server
.get_property::<HashedValue<Archive>>(
.get_property::<Archive<AlignedBytes>>(
params.account_id,
Collection::Mailbox,
mailbox_id,

View file

@ -20,7 +20,10 @@ use imap_proto::{
receiver::Request,
};
use jmap_proto::types::{collection::Collection, id::Id, keyword::Keyword, property::Property};
use store::{Deserialize, U32_LEN, write::Archive};
use store::{
Deserialize, U32_LEN,
write::{AlignedBytes, Archive},
};
use store::{
IndexKeyPrefix, IterateParams, ValueKey,
roaring::RoaringBitmap,
@ -254,7 +257,7 @@ impl<T: SessionStream> SessionData<T> {
.caused_by(trc::location!())? as u64,
Status::UidValidity => u32::from(
self.server
.get_property::<Archive>(
.get_property::<Archive<AlignedBytes>>(
mailbox.account_id,
Collection::Mailbox,
mailbox.mailbox_id,

View file

@ -33,7 +33,7 @@ use jmap_proto::types::{
use store::{
SerializeInfallible,
query::log::{Change, Query},
write::{Archive, BatchBuilder, ValueClass, assert::HashedValue, log::ChangeLogBuilder},
write::{AlignedBytes, Archive, BatchBuilder, ValueClass, log::ChangeLogBuilder},
};
use trc::AddContext;
@ -207,7 +207,7 @@ impl<T: SessionStream> SessionData<T> {
// Obtain current keywords
let (mut keywords, thread_id) = if let (Some(keywords), Some(thread_id)) = (
self.server
.get_property::<HashedValue<Archive>>(
.get_property::<Archive<AlignedBytes>>(
account_id,
Collection::Email,
*id,
@ -330,7 +330,7 @@ impl<T: SessionStream> SessionData<T> {
if seen_changed {
if let Some(mailboxes) = self
.server
.get_property::<Archive>(
.get_property::<Archive<AlignedBytes>>(
account_id,
Collection::Email,
*id,

View file

@ -16,7 +16,7 @@ use imap_proto::{Command, ResponseCode, StatusResponse, receiver::Request};
use jmap_proto::types::{
collection::Collection, property::Property, state::StateChange, type_state::DataType,
};
use store::write::{Archive, BatchBuilder, assert::HashedValue};
use store::write::{AlignedBytes, Archive, BatchBuilder};
use super::ImapContext;
@ -96,7 +96,7 @@ impl<T: SessionStream> SessionData<T> {
// Obtain mailbox
let mailbox = self
.server
.get_property::<HashedValue<Archive>>(
.get_property::<Archive<AlignedBytes>>(
account_id,
Collection::Mailbox,
mailbox_id,

View file

@ -7,7 +7,7 @@
use std::fmt::Display;
use store::{
Serialize,
Serialize, SerializedVersion,
write::{MaybeDynamicId, TagValue},
};
@ -68,6 +68,12 @@ pub enum Keyword {
Other(String),
}
impl SerializedVersion for Keyword {
fn serialize_version() -> u8 {
0
}
}
impl JsonObjectParser for Keyword {
fn parse(parser: &mut Parser<'_>) -> trc::Result<Self>
where

View file

@ -24,7 +24,7 @@ use jmap_proto::{
use mail_builder::encoders::base64::base64_encode;
use sha1::{Digest, Sha1};
use sha2::{Sha256, Sha512};
use store::{BlobClass, write::Archive};
use store::{write::{AlignedBytes, Archive}, BlobClass};
use trc::AddContext;
use utils::map::vec_map::VecMap;
@ -239,7 +239,7 @@ impl BlobOperations for Server {
}
if include_mailbox {
if let Some(mailboxes) = self
.get_property::<Archive>(
.get_property::<Archive<AlignedBytes>>(
req_account_id,
Collection::Email,
*document_id,

View file

@ -28,7 +28,10 @@ use jmap_proto::{
},
};
use store::{BlobClass, write::Archive};
use store::{
BlobClass,
write::{AlignedBytes, Archive},
};
use trc::{AddContext, StoreEvent};
use utils::BlobHash;
@ -159,7 +162,7 @@ impl EmailGet for Server {
continue;
}
let metadata_ = match self
.get_property::<Archive>(
.get_property::<Archive<AlignedBytes>>(
account_id,
Collection::Email,
id.document_id(),
@ -226,7 +229,7 @@ impl EmailGet for Server {
}
Property::MailboxIds => {
if let Some(mailboxes_) = self
.get_property::<Archive>(
.get_property::<Archive<AlignedBytes>>(
account_id,
Collection::Email,
id.document_id(),
@ -263,7 +266,7 @@ impl EmailGet for Server {
}
Property::Keywords => {
if let Some(keywords_) = self
.get_property::<Archive>(
.get_property::<Archive<AlignedBytes>>(
account_id,
Collection::Email,
id.document_id(),

View file

@ -39,10 +39,7 @@ use mail_builder::{
};
use mail_parser::MessageParser;
use store::{
SerializeInfallible,
ahash::AHashSet,
roaring::RoaringBitmap,
write::{Archive, BatchBuilder, assert::HashedValue, log::ChangeLogBuilder},
ahash::AHashSet, roaring::RoaringBitmap, write::{log::ChangeLogBuilder, AlignedBytes, Archive, BatchBuilder}, SerializeInfallible
};
use trc::AddContext;
@ -768,14 +765,14 @@ impl EmailSet for Server {
// Obtain current keywords and mailboxes
let document_id = id.document_id();
let (mut mailboxes, mut keywords) = if let (Some(mailboxes), Some(keywords)) = (
self.get_property::<HashedValue<Archive>>(
self.get_property::<Archive<AlignedBytes>>(
account_id,
Collection::Email,
document_id,
Property::MailboxIds,
)
.await?,
self.get_property::<HashedValue<Archive>>(
self.get_property::<Archive<AlignedBytes>>(
account_id,
Collection::Email,
document_id,

View file

@ -18,7 +18,7 @@ use jmap_proto::{
};
use mail_parser::decoders::html::html_to_text;
use nlp::language::{Language, search_snippet::generate_snippet, stemmer::Stemmer};
use store::{backend::MAX_TOKEN_LENGTH, write::Archive};
use store::{backend::MAX_TOKEN_LENGTH, write::{AlignedBytes, Archive}};
use trc::AddContext;
use utils::BlobHash;
@ -122,7 +122,7 @@ impl EmailSearchSnippet for Server {
continue;
}
let metadata_ = match self
.get_property::<Archive>(
.get_property::<Archive<AlignedBytes>>(
account_id,
Collection::Email,
document_id,

View file

@ -16,10 +16,7 @@ use jmap_proto::{
},
};
use store::{
Serialize,
rkyv::{option::ArchivedOption, vec::ArchivedVec},
roaring::RoaringBitmap,
write::{Archive, Archiver, BatchBuilder},
rkyv::{option::ArchivedOption, vec::ArchivedVec}, roaring::RoaringBitmap, write::{AlignedBytes, Archive, Archiver, BatchBuilder}, Serialize
};
use trc::AddContext;
use utils::sanitize_email;
@ -85,7 +82,7 @@ impl IdentityGet for Server {
continue;
}
let _identity = if let Some(identity) = self
.get_property::<Archive>(
.get_property::<Archive<AlignedBytes>>(
account_id,
Collection::Identity,
document_id,

View file

@ -18,7 +18,7 @@ use jmap_proto::{
},
};
use std::future::Future;
use store::write::{Archive, BatchBuilder, log::ChangeLogBuilder};
use store::write::{log::ChangeLogBuilder, AlignedBytes, Archive, BatchBuilder};
use store::{Serialize, write::Archiver};
use trc::AddContext;
use utils::sanitize_email;
@ -121,7 +121,7 @@ impl IdentitySet for Server {
// Obtain identity
let document_id = id.document_id();
let mut identity = if let Some(identity) = self
.get_property::<Archive>(
.get_property::<Archive<AlignedBytes>>(
account_id,
Collection::Identity,
document_id,

View file

@ -15,7 +15,7 @@ use jmap_proto::{
value::{Object, Value},
},
};
use store::write::Archive;
use store::write::{AlignedBytes, Archive};
use trc::AddContext;
use crate::changes::state::StateManager;
@ -98,7 +98,7 @@ impl MailboxGet for Server {
let archived_mailbox_ = if fetch_properties {
match self
.get_property::<Archive>(
.get_property::<Archive<AlignedBytes>>(
account_id,
Collection::Mailbox,
document_id,

View file

@ -29,11 +29,7 @@ use store::{
SerializeInfallible,
query::Filter,
roaring::RoaringBitmap,
write::{
Archive, BatchBuilder,
assert::{AssertValue, HashedValue},
log::ChangeLogBuilder,
},
write::{AlignedBytes, Archive, BatchBuilder, assert::AssertValue, log::ChangeLogBuilder},
};
use trc::AddContext;
use utils::config::utils::ParseValue;
@ -63,7 +59,7 @@ pub trait MailboxSet: Sync + Send {
fn mailbox_set_item(
&self,
changes_: Object<SetValue>,
update: Option<(u32, HashedValue<Mailbox>)>,
update: Option<(u32, Archive<Mailbox>)>,
ctx: &SetContext,
) -> impl Future<Output = trc::Result<Result<ObjectIndexBuilder<Mailbox, Mailbox>, SetError>>> + Send;
}
@ -158,7 +154,7 @@ impl MailboxSet for Server {
// Obtain mailbox
let document_id = id.document_id();
if let Some(mailbox) = self
.get_property::<HashedValue<Archive>>(
.get_property::<Archive<AlignedBytes>>(
account_id,
Collection::Mailbox,
document_id,
@ -287,7 +283,7 @@ impl MailboxSet for Server {
async fn mailbox_set_item(
&self,
changes_: Object<SetValue>,
update: Option<(u32, HashedValue<Mailbox>)>,
update: Option<(u32, Archive<Mailbox>)>,
ctx: &SetContext<'_>,
) -> trc::Result<Result<ObjectIndexBuilder<Mailbox, Mailbox>, SetError>> {
// Parse properties
@ -412,7 +408,7 @@ impl MailboxSet for Server {
let parent_document_id = mailbox_parent_id - 1;
if let Some(mailbox_) = self
.get_property::<Archive>(
.get_property::<Archive<AlignedBytes>>(
ctx.account_id,
Collection::Mailbox,
parent_document_id,

View file

@ -19,8 +19,7 @@ use jmap_proto::{
},
};
use store::{
BitmapKey, ValueKey,
write::{Archive, ValueClass, now},
write::{now, AlignedBytes, Archive, ValueClass}, BitmapKey, ValueKey
};
use trc::{AddContext, ServerEvent};
use utils::map::bitmap::Bitmap;
@ -85,7 +84,7 @@ impl PushSubscriptionFetch for Server {
continue;
}
let push_ = if let Some(push) = self
.get_property::<Archive>(
.get_property::<Archive<AlignedBytes>>(
account_id,
Collection::PushSubscription,
document_id,
@ -168,7 +167,7 @@ impl PushSubscriptionFetch for Server {
.core
.storage
.data
.get_value::<Archive>(ValueKey {
.get_value::<Archive<AlignedBytes>>(ValueKey {
account_id,
collection: Collection::PushSubscription.into(),
document_id,

View file

@ -22,9 +22,7 @@ use jmap_proto::{
use rand::distr::Alphanumeric;
use std::future::Future;
use store::{
Serialize,
rand::{Rng, rng},
write::{Archive, Archiver, BatchBuilder, now},
rand::{rng, Rng}, write::{now, AlignedBytes, Archive, Archiver, BatchBuilder}, Serialize
};
use trc::AddContext;
use utils::map::bitmap::Bitmap;
@ -138,7 +136,7 @@ impl PushSubscriptionSet for Server {
// Obtain push subscription
let document_id = id.document_id();
let mut push = if let Some(push) = self
.get_property::<Archive>(
.get_property::<Archive<AlignedBytes>>(
account_id,
Collection::PushSubscription,
document_id,

View file

@ -15,7 +15,7 @@ use jmap_proto::{
value::{Object, Value},
},
};
use store::{BlobClass, write::Archive};
use store::{write::{AlignedBytes, Archive}, BlobClass};
use trc::AddContext;
use crate::changes::state::StateManager;
@ -73,7 +73,7 @@ impl SieveScriptGet for Server {
continue;
}
let sieve_ = if let Some(sieve) = self
.get_property::<Archive>(
.get_property::<Archive<AlignedBytes>>(
account_id,
Collection::SieveScript,
document_id,

View file

@ -28,10 +28,7 @@ use jmap_proto::{
use rand::distr::Alphanumeric;
use sieve::compiler::ErrorType;
use store::{
BlobClass,
query::Filter,
rand::{Rng, rng},
write::{Archive, BatchBuilder, assert::HashedValue, log::ChangeLogBuilder},
query::Filter, rand::{rng, Rng}, write::{log::ChangeLogBuilder, AlignedBytes, Archive, BatchBuilder}, BlobClass
};
use trc::AddContext;
@ -56,7 +53,7 @@ pub trait SieveScriptSet: Sync + Send {
fn sieve_set_item(
&self,
changes_: Object<SetValue>,
update: Option<(u32, HashedValue<SieveScript>)>,
update: Option<(u32, Archive<SieveScript>)>,
ctx: &SetContext,
session_id: u64,
) -> impl Future<
@ -177,7 +174,7 @@ impl SieveScriptSet for Server {
// Obtain sieve script
let document_id = id.document_id();
if let Some(sieve) = self
.get_property::<HashedValue<Archive>>(
.get_property::<Archive<AlignedBytes>>(
account_id,
Collection::SieveScript,
document_id,
@ -337,7 +334,7 @@ impl SieveScriptSet for Server {
async fn sieve_set_item(
&self,
changes_: Object<SetValue>,
update: Option<(u32, HashedValue<SieveScript>)>,
update: Option<(u32, Archive<SieveScript>)>,
ctx: &SetContext<'_>,
session_id: u64,
) -> trc::Result<

View file

@ -21,7 +21,7 @@ use jmap_proto::{
use smtp::queue::{ArchivedStatus, Message, spool::SmtpSpool};
use smtp_proto::ArchivedResponse;
use std::future::Future;
use store::{rkyv::option::ArchivedOption, write::Archive};
use store::{rkyv::option::ArchivedOption, write::{AlignedBytes, Archive}};
use trc::AddContext;
use utils::map::vec_map::VecMap;
@ -84,7 +84,7 @@ impl EmailSubmissionGet for Server {
continue;
}
let submission_ = if let Some(submission) = self
.get_property::<Archive>(
.get_property::<Archive<AlignedBytes>>(
account_id,
Collection::EmailSubmission,
document_id,

View file

@ -38,7 +38,7 @@ use smtp::{
queue::spool::SmtpSpool,
};
use smtp_proto::{MailFrom, RcptTo, request::parser::Rfc5321Parser};
use store::write::{Archive, BatchBuilder, assert::HashedValue, log::ChangeLogBuilder, now};
use store::write::{log::ChangeLogBuilder, now, AlignedBytes, Archive, BatchBuilder};
use trc::AddContext;
use utils::{BlobHash, map::vec_map::VecMap, sanitize_email};
@ -121,7 +121,7 @@ impl EmailSubmissionSet for Server {
// Obtain submission
let document_id = id.document_id();
let submission = if let Some(submission) = self
.get_property::<HashedValue<Archive>>(
.get_property::<Archive<AlignedBytes>>(
account_id,
Collection::EmailSubmission,
document_id,
@ -225,7 +225,7 @@ impl EmailSubmissionSet for Server {
for id in will_destroy {
let document_id = id.document_id();
if let Some(submission) = self
.get_property::<HashedValue<Archive>>(
.get_property::<Archive<AlignedBytes>>(
account_id,
Collection::EmailSubmission,
document_id,
@ -458,7 +458,7 @@ impl EmailSubmissionSet for Server {
// Fetch identity's mailFrom
let identity_mail_from = if let Some(identity) = self
.get_property::<Archive>(
.get_property::<Archive<AlignedBytes>>(
account_id,
Collection::Identity,
submission.identity_id,
@ -499,7 +499,7 @@ impl EmailSubmissionSet for Server {
// Obtain message metadata
let metadata_ = if let Some(metadata) = self
.get_property::<Archive>(
.get_property::<Archive<AlignedBytes>>(
account_id,
Collection::Email,
submission.email_id,

View file

@ -19,7 +19,10 @@ use jmap_proto::{
},
};
use std::future::Future;
use store::{query::Filter, write::Archive};
use store::{
query::Filter,
write::{AlignedBytes, Archive},
};
use trc::AddContext;
use crate::{JmapMethods, changes::state::StateManager};
@ -81,7 +84,7 @@ impl VacationResponseGet for Server {
if do_get {
if let Some(document_id) = self.get_vacation_sieve_script_id(account_id).await? {
if let Some(sieve_) = self
.get_property::<Archive>(
.get_property::<Archive<AlignedBytes>>(
account_id,
Collection::SieveScript,
document_id,

View file

@ -26,8 +26,7 @@ use mail_builder::MessageBuilder;
use mail_parser::decoders::html::html_to_text;
use std::future::Future;
use store::write::{
Archive, BatchBuilder,
assert::HashedValue,
AlignedBytes, Archive, BatchBuilder,
log::{Changes, LogInsert},
};
use trc::AddContext;
@ -131,7 +130,7 @@ impl VacationResponseSet for Server {
let (mut sieve, prev_sieve) = if let Some(document_id) = document_id {
let prev_sieve = self
.get_property::<HashedValue<Archive>>(
.get_property::<Archive<AlignedBytes>>(
account_id,
Collection::SieveScript,
document_id,

View file

@ -11,7 +11,7 @@ use directory::Permission;
use email::sieve::SieveScript;
use imap_proto::receiver::Request;
use jmap_proto::types::{blob::BlobSection, collection::Collection, property::Property};
use store::write::Archive;
use store::write::{AlignedBytes, Archive};
use trc::AddContext;
use utils::BlobHash;
@ -37,7 +37,7 @@ impl<T: SessionStream> Session<T> {
let document_id = self.get_script_id(account_id, &name).await?;
let sieve_ = self
.server
.get_property::<Archive>(
.get_property::<Archive<AlignedBytes>>(
account_id,
Collection::SieveScript,
document_id,

View file

@ -10,7 +10,7 @@ use common::listener::SessionStream;
use directory::Permission;
use email::sieve::SieveScript;
use jmap_proto::types::{collection::Collection, property::Property};
use store::write::Archive;
use store::write::{AlignedBytes, Archive};
use trc::AddContext;
use crate::core::{Session, StatusResponse};
@ -39,7 +39,7 @@ impl<T: SessionStream> Session<T> {
for document_id in document_ids {
if let Some(script_) = self
.server
.get_property::<Archive>(
.get_property::<Archive<AlignedBytes>>(
account_id,
Collection::SieveScript,
document_id,

View file

@ -14,7 +14,7 @@ use jmap_proto::types::{collection::Collection, property::Property};
use sieve::compiler::ErrorType;
use store::{
query::Filter,
write::{Archive, BatchBuilder, assert::HashedValue, log::LogInsert},
write::{AlignedBytes, Archive, BatchBuilder, log::LogInsert},
};
use trc::AddContext;
@ -100,7 +100,7 @@ impl<T: SessionStream> Session<T> {
// Obtain script values
let script = self
.server
.get_property::<HashedValue<Archive>>(
.get_property::<Archive<AlignedBytes>>(
account_id,
Collection::SieveScript,
document_id,

View file

@ -8,10 +8,10 @@ use std::time::Instant;
use common::{listener::SessionStream, storage::index::ObjectIndexBuilder};
use directory::Permission;
use email::sieve::{SieveScript};
use email::sieve::SieveScript;
use imap_proto::receiver::Request;
use jmap_proto::types::{collection::Collection, property::Property};
use store::write::{Archive, BatchBuilder, assert::HashedValue, log::ChangeLogBuilder};
use store::write::{AlignedBytes, Archive, BatchBuilder, log::ChangeLogBuilder};
use trc::AddContext;
use crate::core::{Command, ResponseCode, Session, StatusResponse};
@ -60,7 +60,7 @@ impl<T: SessionStream> Session<T> {
// Obtain script values
let script = self
.server
.get_property::<HashedValue<Archive>>(
.get_property::<Archive<AlignedBytes>>(
account_id,
Collection::SieveScript,
document_id,

View file

@ -12,7 +12,7 @@ use jmap_proto::types::{collection::Collection, property::Property};
use store::{
IndexKey, IterateParams, SerializeInfallible, U32_LEN,
ahash::AHashMap,
write::{Archive, key::DeserializeBigEndian},
write::{AlignedBytes, Archive, key::DeserializeBigEndian},
};
use trc::AddContext;
@ -63,7 +63,7 @@ impl<T: SessionStream> Session<T> {
.caused_by(trc::location!())?;
let uid_validity = u32::from(
self.server
.get_property::<Archive>(
.get_property::<Archive<AlignedBytes>>(
account_id,
Collection::Mailbox,
INBOX_ID,
@ -124,7 +124,7 @@ impl<T: SessionStream> Session<T> {
// Sort by UID
for (message_id, uid_mailbox) in self
.server
.get_properties::<Archive, _>(
.get_properties::<Archive<AlignedBytes>, _>(
account_id,
Collection::Email,
&message_ids,

View file

@ -10,7 +10,7 @@ use common::listener::SessionStream;
use directory::Permission;
use email::message::metadata::MessageMetadata;
use jmap_proto::types::{collection::Collection, property::Property};
use store::write::Archive;
use store::write::{AlignedBytes, Archive};
use trc::AddContext;
use crate::{Session, protocol::response::Response};
@ -27,7 +27,7 @@ impl<T: SessionStream> Session<T> {
if let Some(message) = mailbox.messages.get(msg.saturating_sub(1) as usize) {
if let Some(metadata_) = self
.server
.get_property::<Archive>(
.get_property::<Archive<AlignedBytes>>(
mailbox.account_id,
Collection::Email,
message.id,

View file

@ -20,7 +20,7 @@ use store::{
fts::index::FtsDocument,
roaring::RoaringBitmap,
write::{
Archive, BatchBuilder, BlobOp, MaybeDynamicId, TaskQueueClass, ValueClass,
AlignedBytes, Archive, BatchBuilder, BlobOp, MaybeDynamicId, TaskQueueClass, ValueClass,
key::{DeserializeBigEndian, KeySerializer},
now,
},
@ -166,7 +166,7 @@ impl Indexer for Server {
match event.action {
EmailTaskAction::Index => {
match self
.get_property::<Archive>(
.get_property::<Archive<AlignedBytes>>(
event.account_id,
Collection::Email,
event.document_id,

View file

@ -12,7 +12,7 @@ use std::{
use common::expr::{self, functions::ResolveVariable, *};
use smtp_proto::{ArchivedResponse, Response};
use store::write::now;
use store::{SerializedVersion, write::now};
use utils::BlobHash;
pub mod dsn;
@ -67,6 +67,12 @@ pub struct Message {
pub span_id: u64,
}
impl SerializedVersion for Message {
fn serialize_version() -> u8 {
0
}
}
#[derive(rkyv::Serialize, rkyv::Deserialize, rkyv::Archive, Debug, Clone, PartialEq, Eq)]
pub enum QuotaKey {
Size { key: Vec<u8>, id: u64 },

View file

@ -11,7 +11,9 @@ use std::borrow::Cow;
use std::future::Future;
use std::time::{Duration, SystemTime};
use store::write::key::DeserializeBigEndian;
use store::write::{Archive, Archiver, BatchBuilder, BlobOp, QueueClass, ValueClass, now};
use store::write::{
AlignedBytes, Archive, Archiver, BatchBuilder, BlobOp, QueueClass, ValueClass, now,
};
use store::{IterateParams, Serialize, SerializeInfallible, U64_LEN, ValueKey};
use trc::ServerEvent;
use utils::BlobHash;
@ -44,7 +46,7 @@ pub trait SmtpSpool: Sync + Send {
fn read_message_archive(
&self,
id: QueueId,
) -> impl Future<Output = trc::Result<Option<Archive>>> + Send;
) -> impl Future<Output = trc::Result<Option<Archive<AlignedBytes>>>> + Send;
}
impl SmtpSpool for Server {
@ -171,9 +173,14 @@ impl SmtpSpool for Server {
}
}
async fn read_message_archive(&self, id: QueueId) -> trc::Result<Option<Archive>> {
async fn read_message_archive(
&self,
id: QueueId,
) -> trc::Result<Option<Archive<AlignedBytes>>> {
self.store()
.get_value::<Archive>(ValueKey::from(ValueClass::Queue(QueueClass::Message(id))))
.get_value::<Archive<AlignedBytes>>(ValueKey::from(ValueClass::Queue(
QueueClass::Message(id),
)))
.await
}
}

View file

@ -51,6 +51,7 @@ arc-swap = "1.6.0"
bitpacking = "0.9.2"
memchr = { version = "2" }
rkyv = { version = "0.8.10", features = ["little_endian"] }
gxhash = "3.4.1"
[dev-dependencies]
tokio = { version = "1.23", features = ["full"] }

View file

@ -4,7 +4,7 @@
* SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-SEL
*/
use std::{borrow::Cow, sync::Arc};
use std::{borrow::Cow, collections::HashSet, sync::Arc};
pub mod backend;
pub mod config;
@ -68,6 +68,10 @@ pub trait SerializeInfallible {
fn serialize(&self) -> Vec<u8>;
}
pub trait SerializedVersion {
fn serialize_version() -> u8;
}
// Key serialization flags
pub(crate) const WITH_SUBSPACE: u32 = 1;
@ -117,6 +121,7 @@ pub struct LogKey {
pub const U64_LEN: usize = std::mem::size_of::<u64>();
pub const U32_LEN: usize = std::mem::size_of::<u32>();
pub const U16_LEN: usize = std::mem::size_of::<u16>();
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
pub enum BlobClass {
@ -782,3 +787,21 @@ impl Stores {
}
}
}
impl SerializedVersion for () {
fn serialize_version() -> u8 {
0
}
}
impl<T: SerializedVersion> SerializedVersion for Vec<T> {
fn serialize_version() -> u8 {
T::serialize_version()
}
}
impl<T: SerializedVersion, S> SerializedVersion for HashSet<T, S> {
fn serialize_version() -> u8 {
T::serialize_version()
}
}

View file

@ -6,11 +6,7 @@
use crate::{Deserialize, U32_LEN, U64_LEN};
#[derive(Debug, Clone)]
pub struct HashedValue<T> {
pub hash: u64,
pub inner: T,
}
use super::Archive;
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum AssertValue {
@ -21,10 +17,10 @@ pub enum AssertValue {
None,
}
impl<T: Deserialize + Default> HashedValue<T> {
pub fn take(&mut self) -> T {
std::mem::take(&mut self.inner)
}
#[derive(Debug, Clone)]
pub struct LegacyHashedValue<T: Deserialize> {
pub hash: u64,
pub inner: T,
}
pub trait ToAssertValue {
@ -55,13 +51,25 @@ impl ToAssertValue for u32 {
}
}
impl<T> ToAssertValue for HashedValue<T> {
impl<T> ToAssertValue for Archive<T> {
fn to_assert_value(&self) -> AssertValue {
AssertValue::U32(self.hash)
}
}
impl<T> ToAssertValue for &Archive<T> {
fn to_assert_value(&self) -> AssertValue {
AssertValue::U32(self.hash)
}
}
impl<T: Deserialize> ToAssertValue for LegacyHashedValue<T> {
fn to_assert_value(&self) -> AssertValue {
AssertValue::Hash(self.hash)
}
}
impl<T> ToAssertValue for &HashedValue<T> {
impl<T: Deserialize> ToAssertValue for &LegacyHashedValue<T> {
fn to_assert_value(&self) -> AssertValue {
AssertValue::Hash(self.hash)
}
@ -70,8 +78,13 @@ impl<T> ToAssertValue for &HashedValue<T> {
impl AssertValue {
pub fn matches(&self, bytes: &[u8]) -> bool {
match self {
AssertValue::U32(v) => bytes.len() == U32_LEN && u32::deserialize(bytes).unwrap() == *v,
AssertValue::U64(v) => bytes.len() == U64_LEN && u64::deserialize(bytes).unwrap() == *v,
AssertValue::U32(v) => bytes
.get(bytes.len() - U32_LEN..)
.is_some_and(|b| b == v.to_be_bytes()),
AssertValue::U64(v) => bytes
.get(bytes.len() - U64_LEN..)
.is_some_and(|b| b == v.to_be_bytes()),
AssertValue::Hash(v) => xxhash_rust::xxh3::xxh3_64(bytes) == *v,
AssertValue::None => false,
AssertValue::Some => true,
@ -83,18 +96,11 @@ impl AssertValue {
}
}
impl<T: Deserialize> Deserialize for HashedValue<T> {
impl<T: Deserialize> Deserialize for LegacyHashedValue<T> {
fn deserialize(bytes: &[u8]) -> trc::Result<Self> {
Ok(HashedValue {
Ok(LegacyHashedValue {
hash: xxhash_rust::xxh3::xxh3_64(bytes),
inner: T::deserialize(bytes)?,
})
}
fn deserialize_owned(bytes: Vec<u8>) -> trc::Result<Self> {
Ok(HashedValue {
hash: xxhash_rust::xxh3::xxh3_64(&bytes),
inner: T::deserialize_owned(bytes)?,
})
}
}

View file

@ -33,7 +33,14 @@ pub mod serialize;
pub(crate) const ARCHIVE_ALIGNMENT: usize = 16;
#[derive(Debug, Clone)]
pub enum Archive {
pub struct Archive<T> {
pub inner: T,
pub version: u8,
pub hash: u32,
}
#[derive(Debug, Clone)]
pub enum AlignedBytes {
Aligned(AlignedVec<ARCHIVE_ALIGNMENT>),
Vec(Vec<u8>),
}
@ -584,8 +591,8 @@ impl QueueClass {
}
}
impl AsRef<[u8]> for Archive {
impl<T: AsRef<[u8]>> AsRef<[u8]> for Archive<T> {
fn as_ref(&self) -> &[u8] {
self.as_bytes()
self.inner.as_ref()
}
}

View file

@ -4,29 +4,59 @@
* SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-SEL
*/
use std::borrow::Cow;
use rkyv::util::AlignedVec;
use crate::{Deserialize, Serialize, SerializeInfallible, U32_LEN, Value};
use crate::{Deserialize, Serialize, SerializeInfallible, SerializedVersion, U32_LEN, Value};
use super::{ARCHIVE_ALIGNMENT, Archive, Archiver, LegacyBincode, assert::HashedValue};
use super::{ARCHIVE_ALIGNMENT, AlignedBytes, Archive, Archiver, LegacyBincode};
const MAGIC_MARKER: u8 = 1 << 7;
const LZ4_COMPRESSES: u8 = 1 << 6;
const LZ4_COMPRESSED: u8 = 1 << 6;
const ARCHIVE_UNCOMPRESSED: u8 = MAGIC_MARKER;
const ARCHIVE_LZ4_COMPRESSED: u8 = MAGIC_MARKER | LZ4_COMPRESSES;
const ARCHIVE_LZ4_COMPRESSED: u8 = MAGIC_MARKER | LZ4_COMPRESSED;
const COMPRESS_WATERMARK: usize = 8192;
const HASH_SEED: i64 = 791120;
impl Deserialize for Archive {
const MARKER_MASK: u8 = MAGIC_MARKER | LZ4_COMPRESSED;
const VERSION_MASK: u8 = !MARKER_MASK;
impl Deserialize for Archive<AlignedBytes> {
fn deserialize(bytes: &[u8]) -> trc::Result<Self> {
match bytes.split_last() {
Some((&ARCHIVE_UNCOMPRESSED, archive)) => {
let mut bytes = AlignedVec::with_capacity(archive.len());
bytes.extend_from_slice(archive);
Ok(Archive::Aligned(bytes))
let (contents, marker, hash) = bytes
.split_at_checked(bytes.len() - (U32_LEN + 1))
.and_then(|(contents, marker)| {
marker.split_first().and_then(|(marker, archive_hash)| {
let hash = gxhash::gxhash32(contents, HASH_SEED);
if hash.to_be_bytes().as_slice() == archive_hash {
Some((contents, *marker, hash))
} else {
None
}
})
})
.ok_or_else(|| {
trc::StoreEvent::DataCorruption
.into_err()
.details("Archive integrity compromised")
.ctx(trc::Key::Value, bytes)
.caused_by(trc::location!())
})?;
match marker & MARKER_MASK {
ARCHIVE_UNCOMPRESSED => {
let mut bytes = AlignedVec::with_capacity(contents.len());
bytes.extend_from_slice(contents);
Ok(Archive {
hash,
version: marker & VERSION_MASK,
inner: AlignedBytes::Aligned(bytes),
})
}
Some((&ARCHIVE_LZ4_COMPRESSED, archive)) => aligned_lz4_deflate(archive),
ARCHIVE_LZ4_COMPRESSED => aligned_lz4_deflate(contents).map(|inner| Archive {
hash,
version: marker & VERSION_MASK,
inner,
}),
_ => Err(trc::StoreEvent::DataCorruption
.into_err()
.details("Invalid archive marker.")
@ -36,20 +66,50 @@ impl Deserialize for Archive {
}
fn deserialize_owned(mut bytes: Vec<u8>) -> trc::Result<Self> {
match bytes.last() {
Some(&ARCHIVE_UNCOMPRESSED) => {
bytes.pop();
let (contents, marker, hash) = bytes
.split_at_checked(bytes.len() - (U32_LEN + 1))
.and_then(|(contents, marker)| {
marker.split_first().and_then(|(marker, archive_hash)| {
let hash = gxhash::gxhash32(contents, HASH_SEED);
if hash.to_be_bytes().as_slice() == archive_hash {
Some((contents, *marker, hash))
} else {
None
}
})
})
.ok_or_else(|| {
trc::StoreEvent::DataCorruption
.into_err()
.details("Archive integrity compromised")
.ctx(trc::Key::Value, bytes.as_slice())
.caused_by(trc::location!())
})?;
match marker & MARKER_MASK {
ARCHIVE_UNCOMPRESSED => {
bytes.truncate(contents.len());
if bytes.as_ptr().addr() & (ARCHIVE_ALIGNMENT - 1) == 0 {
Ok(Archive::Vec(bytes))
Ok(Archive {
hash,
version: marker & VERSION_MASK,
inner: AlignedBytes::Vec(bytes),
})
} else {
let mut aligned = AlignedVec::with_capacity(bytes.len());
aligned.extend_from_slice(&bytes);
Ok(Archive::Aligned(aligned))
Ok(Archive {
hash,
version: marker & VERSION_MASK,
inner: AlignedBytes::Aligned(aligned),
})
}
}
Some(&ARCHIVE_LZ4_COMPRESSED) => {
aligned_lz4_deflate(bytes.get(..bytes.len() - 1).unwrap_or_default())
}
ARCHIVE_LZ4_COMPRESSED => aligned_lz4_deflate(contents).map(|inner| Archive {
hash,
version: marker & VERSION_MASK,
inner,
}),
_ => Err(trc::StoreEvent::DataCorruption
.into_err()
.details("Invalid archive marker.")
@ -60,7 +120,7 @@ impl Deserialize for Archive {
}
#[inline]
fn aligned_lz4_deflate(archive: &[u8]) -> trc::Result<Archive> {
fn aligned_lz4_deflate(archive: &[u8]) -> trc::Result<AlignedBytes> {
lz4_flex::block::uncompressed_size(archive)
.and_then(|(uncompressed_size, archive)| {
let mut bytes = AlignedVec::with_capacity(uncompressed_size);
@ -69,7 +129,7 @@ fn aligned_lz4_deflate(archive: &[u8]) -> trc::Result<Archive> {
bytes.set_len(uncompressed_size);
}
lz4_flex::decompress_into(archive, &mut bytes)?;
Ok(Archive::Aligned(bytes))
Ok(AlignedBytes::Aligned(bytes))
})
.map_err(|err| {
trc::StoreEvent::DecompressError
@ -82,6 +142,7 @@ fn aligned_lz4_deflate(archive: &[u8]) -> trc::Result<Archive> {
impl<T> Serialize for Archiver<T>
where
T: rkyv::Archive
+ SerializedVersion
+ for<'a> rkyv::Serialize<
rkyv::api::high::HighSerializer<
rkyv::util::AlignedVec,
@ -103,88 +164,119 @@ where
if input_len > COMPRESS_WATERMARK {
let mut bytes =
vec![
ARCHIVE_LZ4_COMPRESSED;
lz4_flex::block::get_maximum_output_size(input_len) + U32_LEN + 1
ARCHIVE_LZ4_COMPRESSED | (T::serialize_version() & VERSION_MASK);
lz4_flex::block::get_maximum_output_size(input_len) + (U32_LEN * 2) + 1
];
bytes[0..U32_LEN].copy_from_slice(&(input_len as u32).to_le_bytes());
let bytes_len = lz4_flex::compress_into(input, &mut bytes[U32_LEN..]).unwrap()
+ U32_LEN
+ 1;
if bytes_len < input_len {
bytes.truncate(bytes_len);
let compressed_len =
lz4_flex::compress_into(input, &mut bytes[U32_LEN..]).unwrap();
if compressed_len < input_len {
bytes[..U32_LEN].copy_from_slice(&(input_len as u32).to_le_bytes());
let hash = gxhash::gxhash32(&bytes[..compressed_len + U32_LEN], HASH_SEED);
bytes[compressed_len + U32_LEN + 1..compressed_len + (U32_LEN * 2) + 1]
.copy_from_slice(&hash.to_be_bytes());
bytes.truncate(compressed_len + (U32_LEN * 2) + 1);
} else {
bytes.clear();
bytes.extend_from_slice(input);
bytes.push(ARCHIVE_UNCOMPRESSED);
bytes.push(ARCHIVE_UNCOMPRESSED | (T::serialize_version() & VERSION_MASK));
bytes.extend_from_slice(&gxhash::gxhash32(input, HASH_SEED).to_be_bytes());
}
bytes
} else {
let mut bytes = Vec::with_capacity(input_len + 1);
let mut bytes = Vec::with_capacity(input_len + U32_LEN + 1);
bytes.extend_from_slice(input);
bytes.push(ARCHIVE_UNCOMPRESSED);
bytes.push(ARCHIVE_UNCOMPRESSED | (T::serialize_version() & VERSION_MASK));
bytes.extend_from_slice(&gxhash::gxhash32(input, HASH_SEED).to_be_bytes());
bytes
}
})
}
}
impl Archive {
pub fn try_unpack_bytes(bytes: &[u8]) -> Option<Cow<[u8]>> {
match bytes.split_last() {
Some((&ARCHIVE_UNCOMPRESSED, archive)) => Some(archive.into()),
Some((&ARCHIVE_LZ4_COMPRESSED, archive)) => {
lz4_flex::decompress_size_prepended(archive)
.ok()
.map(Cow::Owned)
}
_ => None,
}
}
impl Archive<AlignedBytes> {
#[inline]
pub fn as_bytes(&self) -> &[u8] {
match self {
Archive::Vec(bytes) => bytes.as_slice(),
Archive::Aligned(bytes) => bytes.as_slice(),
match &self.inner {
AlignedBytes::Vec(bytes) => bytes.as_slice(),
AlignedBytes::Aligned(bytes) => bytes.as_slice(),
}
}
pub fn unarchive<T>(&self) -> trc::Result<&<T as rkyv::Archive>::Archived>
where
T: rkyv::Archive,
T: rkyv::Archive + SerializedVersion,
T::Archived: for<'a> rkyv::bytecheck::CheckBytes<
rkyv::api::high::HighValidator<'a, rkyv::rancor::Error>,
> + rkyv::Deserialize<T, rkyv::api::high::HighDeserializer<rkyv::rancor::Error>>,
{
rkyv::access::<T::Archived, rkyv::rancor::Error>(self.as_bytes()).map_err(|err| {
trc::StoreEvent::DataCorruption
.caused_by(trc::location!())
if self.version == T::serialize_version() {
// SAFETY: Trusted and versioned input with integrity hash
Ok(unsafe { rkyv::access_unchecked::<T::Archived>(self.as_bytes()) })
} else {
Err(trc::StoreEvent::DataCorruption
.into_err()
.details(format!(
"Archive version mismatch, expected {} but got {}",
T::serialize_version(),
self.version
))
.ctx(trc::Key::Value, self.as_bytes())
.reason(err)
})
.caused_by(trc::location!()))
}
}
pub fn deserialize<T>(&self) -> trc::Result<T>
where
T: rkyv::Archive,
T: rkyv::Archive + SerializedVersion,
T::Archived: for<'a> rkyv::bytecheck::CheckBytes<
rkyv::api::high::HighValidator<'a, rkyv::rancor::Error>,
> + rkyv::Deserialize<T, rkyv::api::high::HighDeserializer<rkyv::rancor::Error>>,
{
rkyv::from_bytes(self.as_bytes()).map_err(|err| {
trc::StoreEvent::DeserializeError
.ctx(trc::Key::Value, self.as_bytes())
.caused_by(trc::location!())
.reason(err)
self.unarchive::<T>().and_then(|input| {
rkyv::deserialize(input).map_err(|err| {
trc::StoreEvent::DeserializeError
.ctx(trc::Key::Value, self.as_bytes())
.caused_by(trc::location!())
.reason(err)
})
})
}
pub fn to_unarchived<T>(&self) -> trc::Result<Archive<&<T as rkyv::Archive>::Archived>>
where
T: rkyv::Archive + SerializedVersion,
T::Archived: for<'a> rkyv::bytecheck::CheckBytes<
rkyv::api::high::HighValidator<'a, rkyv::rancor::Error>,
> + rkyv::Deserialize<T, rkyv::api::high::HighDeserializer<rkyv::rancor::Error>>,
{
self.unarchive::<T>().map(|inner| Archive {
hash: self.hash,
version: self.version,
inner,
})
}
pub fn into_deserialized<T>(&self) -> trc::Result<Archive<T>>
where
T: rkyv::Archive + SerializedVersion,
T::Archived: for<'a> rkyv::bytecheck::CheckBytes<
rkyv::api::high::HighValidator<'a, rkyv::rancor::Error>,
> + rkyv::Deserialize<T, rkyv::api::high::HighDeserializer<rkyv::rancor::Error>>,
{
self.deserialize::<T>().map(|inner| Archive {
hash: self.hash,
version: self.version,
inner,
})
}
pub fn into_inner(self) -> Vec<u8> {
let mut bytes = match self {
Archive::Vec(bytes) => bytes,
Archive::Aligned(bytes) => bytes.to_vec(),
let mut bytes = match self.inner {
AlignedBytes::Vec(bytes) => bytes,
AlignedBytes::Aligned(bytes) => bytes.to_vec(),
};
bytes.push(ARCHIVE_UNCOMPRESSED);
bytes.extend_from_slice(&self.hash.to_be_bytes());
bytes
}
}
@ -209,42 +301,14 @@ where
}
}
impl HashedValue<Archive> {
pub fn to_unarchived<T>(&self) -> trc::Result<HashedValue<&<T as rkyv::Archive>::Archived>>
where
T: rkyv::Archive,
T::Archived: for<'a> rkyv::bytecheck::CheckBytes<
rkyv::api::high::HighValidator<'a, rkyv::rancor::Error>,
> + rkyv::Deserialize<T, rkyv::api::high::HighDeserializer<rkyv::rancor::Error>>,
{
self.inner.unarchive::<T>().map(|inner| HashedValue {
hash: self.hash,
inner,
})
}
pub fn into_deserialized<T>(&self) -> trc::Result<HashedValue<T>>
where
T: rkyv::Archive,
T::Archived: for<'a> rkyv::bytecheck::CheckBytes<
rkyv::api::high::HighValidator<'a, rkyv::rancor::Error>,
> + rkyv::Deserialize<T, rkyv::api::high::HighDeserializer<rkyv::rancor::Error>>,
{
self.inner.deserialize::<T>().map(|inner| HashedValue {
hash: self.hash,
inner,
})
}
}
impl<T> HashedValue<&T>
impl<T> Archive<&T>
where
T: rkyv::Portable
+ for<'a> rkyv::bytecheck::CheckBytes<rkyv::api::high::HighValidator<'a, rkyv::rancor::Error>>
+ Sync
+ Send,
{
pub fn into_deserialized<V>(&self) -> trc::Result<HashedValue<V>>
pub fn into_deserialized<V>(&self) -> trc::Result<Archive<V>>
where
T: rkyv::Deserialize<V, rkyv::api::high::HighDeserializer<rkyv::rancor::Error>>,
{
@ -254,8 +318,9 @@ where
.caused_by(trc::location!())
.reason(err)
})
.map(|inner| HashedValue {
.map(|inner| Archive {
hash: self.hash,
version: self.version,
inner,
})
}
@ -409,7 +474,7 @@ impl<T: serde::Serialize + serde::de::DeserializeOwned + Sized + Sync + Send> De
}
}
impl From<Value<'static>> for Archive {
impl<T> From<Value<'static>> for Archive<T> {
fn from(_: Value<'static>) -> Self {
unimplemented!()
}

View file

@ -374,7 +374,7 @@ pub async fn jmap_tests() {
.await;
webhooks::test(&mut params).await;
email_query::test(&mut params, delete).await;
/*email_query::test(&mut params, delete).await;
email_get::test(&mut params).await;
email_set::test(&mut params).await;
email_parse::test(&mut params).await;
@ -387,7 +387,7 @@ pub async fn jmap_tests() {
mailbox::test(&mut params).await;
delivery::test(&mut params).await;
auth_acl::test(&mut params).await;
auth_limits::test(&mut params).await;
auth_limits::test(&mut params).await;*/
auth_oauth::test(&mut params).await;
event_source::test(&mut params).await;
push_subscription::test(&mut params).await;

View file

@ -19,7 +19,7 @@ use jmap_client::{
use jmap_proto::types::{collection::Collection, id::Id, property::Property};
use store::{
rand::{self, Rng},
write::Archive,
write::{AlignedBytes, Archive},
};
use super::assert_is_empty;
@ -232,7 +232,7 @@ async fn email_tests(server: Server, client: Arc<Client>) {
for email_id in &email_ids_in_mailbox {
if let Some(mailbox_tags) = server
.get_property::<Archive>(
.get_property::<Archive<AlignedBytes>>(
TEST_USER_ID,
Collection::Email,
email_id,

View file

@ -12,7 +12,9 @@ use common::{
};
use store::{
Deserialize, IterateParams, U64_LEN, ValueKey,
write::{Archive, QueueClass, ReportEvent, ValueClass, key::DeserializeBigEndian},
write::{
AlignedBytes, Archive, QueueClass, ReportEvent, ValueClass, key::DeserializeBigEndian,
},
};
use tokio::sync::mpsc::error::TryRecvError;
@ -186,8 +188,8 @@ impl QueueReceiver {
.iterate(
IterateParams::new(from_key, to_key).descending(),
|key, value| {
let value =
<Archive as Deserialize>::deserialize(value)?.deserialize::<Message>()?;
let value = <Archive<AlignedBytes> as Deserialize>::deserialize(value)?
.deserialize::<Message>()?;
assert_eq!(key.deserialize_be_u64(0)?, value.queue_id);
messages.push(value);
Ok(true)