mirror of
https://github.com/stalwartlabs/mail-server.git
synced 2025-10-30 22:36:03 +08:00
DAV file management delete
This commit is contained in:
parent
eadd36f4cb
commit
110ec14fe6
40 changed files with 1162 additions and 416 deletions
|
|
@ -87,6 +87,7 @@ pub struct DefaultFolder {
|
|||
#[derive(
|
||||
rkyv::Archive, rkyv::Deserialize, rkyv::Serialize, Clone, Copy, PartialEq, Eq, Hash, Debug,
|
||||
)]
|
||||
#[rkyv(derive(Debug))]
|
||||
pub enum SpecialUse {
|
||||
Inbox,
|
||||
Trash,
|
||||
|
|
|
|||
|
|
@ -9,6 +9,7 @@ use std::{sync::Arc, time::Duration};
|
|||
use directory::{Directory, QueryBy, Type, backend::internal::manage::ManageDirectory};
|
||||
use jmap_proto::types::{
|
||||
blob::BlobId, collection::Collection, property::Property, state::StateChange,
|
||||
type_state::DataType,
|
||||
};
|
||||
use sieve::Sieve;
|
||||
use store::{
|
||||
|
|
@ -379,16 +380,15 @@ impl Server {
|
|||
})
|
||||
}
|
||||
|
||||
pub async fn get_properties<U, I, P>(
|
||||
pub async fn get_properties<U, I>(
|
||||
&self,
|
||||
account_id: u32,
|
||||
collection: Collection,
|
||||
iterate: &I,
|
||||
property: P,
|
||||
property: Property,
|
||||
) -> trc::Result<Vec<(u32, U)>>
|
||||
where
|
||||
I: DocumentSet + Send + Sync,
|
||||
P: AsRef<Property> + Sync + Send,
|
||||
U: Deserialize + 'static,
|
||||
{
|
||||
let property: u8 = property.as_ref().into();
|
||||
|
|
@ -604,6 +604,17 @@ impl Server {
|
|||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub async fn broadcast_single_state_change(
|
||||
&self,
|
||||
account_id: u32,
|
||||
change_id: u64,
|
||||
data_type: DataType,
|
||||
) {
|
||||
self.broadcast_state_change(StateChange::new(account_id).with_change(data_type, change_id))
|
||||
.await;
|
||||
}
|
||||
|
||||
#[allow(clippy::blocks_in_conditions)]
|
||||
pub async fn put_blob(
|
||||
&self,
|
||||
|
|
|
|||
|
|
@ -45,7 +45,7 @@ use rustls::sign::CertifiedKey;
|
|||
use tokio::sync::{Notify, Semaphore, mpsc};
|
||||
use tokio_rustls::TlsConnector;
|
||||
use utils::{
|
||||
bimap::IdBimap,
|
||||
bimap::{IdBimap, IdBimapItem},
|
||||
cache::{Cache, CacheItemWeight, CacheWithTtl},
|
||||
snowflake::SnowflakeIdGenerator,
|
||||
};
|
||||
|
|
@ -250,11 +250,19 @@ pub struct NameWrapper(pub String);
|
|||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct Files {
|
||||
pub files: IdBimap,
|
||||
pub files: IdBimap<FileItem>,
|
||||
pub size: u64,
|
||||
pub modseq: Option<u64>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct FileItem {
|
||||
pub document_id: u32,
|
||||
pub parent_id: Option<u32>,
|
||||
pub name: String,
|
||||
pub is_container: bool,
|
||||
}
|
||||
|
||||
#[derive(Clone, Default)]
|
||||
pub struct Core {
|
||||
pub storage: Storage,
|
||||
|
|
@ -480,3 +488,30 @@ pub fn ip_to_bytes_prefix(prefix: u8, ip: &IpAddr) -> Vec<u8> {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Files {
|
||||
pub fn subtree(&self, search_path: &str) -> impl Iterator<Item = &FileItem> {
|
||||
let prefix = format!("{search_path}/");
|
||||
self.files
|
||||
.iter()
|
||||
.filter(move |item| item.name.starts_with(&prefix) || item.name == search_path)
|
||||
}
|
||||
|
||||
pub fn is_ancestor_of(&self, ancestor: u32, descendant: u32) -> bool {
|
||||
let ancestor = &self.files.by_id(ancestor).unwrap().name;
|
||||
let descendant = &self.files.by_id(descendant).unwrap().name;
|
||||
|
||||
let prefix = format!("{ancestor}/");
|
||||
descendant.starts_with(&prefix) || descendant == ancestor
|
||||
}
|
||||
}
|
||||
|
||||
impl IdBimapItem for FileItem {
|
||||
fn id(&self) -> &u32 {
|
||||
&self.document_id
|
||||
}
|
||||
|
||||
fn name(&self) -> &str {
|
||||
&self.name
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -16,13 +16,22 @@ use utils::topological::{TopologicalSort, TopologicalSortIterator};
|
|||
use crate::Server;
|
||||
|
||||
pub struct ExpandedFolders {
|
||||
names: AHashMap<u32, (String, u32)>,
|
||||
names: AHashMap<u32, (String, u32, bool)>,
|
||||
iter: TopologicalSortIterator<u32>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ExpandedFolder {
|
||||
pub name: String,
|
||||
pub document_id: u32,
|
||||
pub parent_id: Option<u32>,
|
||||
pub is_container: bool,
|
||||
}
|
||||
|
||||
pub trait FolderHierarchy: Sync + Send {
|
||||
fn name(&self) -> String;
|
||||
fn parent_id(&self) -> u32;
|
||||
fn is_container(&self) -> bool;
|
||||
}
|
||||
|
||||
pub trait TopologyBuilder: Sync + Send {
|
||||
|
|
@ -72,7 +81,10 @@ impl Server {
|
|||
let parent_id = folder.parent_id();
|
||||
|
||||
topological_sort.insert(parent_id, document_id);
|
||||
names.insert(document_id, (folder.name(), parent_id));
|
||||
names.insert(
|
||||
document_id,
|
||||
(folder.name(), parent_id, folder.is_container()),
|
||||
);
|
||||
|
||||
Ok(true)
|
||||
},
|
||||
|
|
@ -154,7 +166,7 @@ impl ExpandedFolders {
|
|||
where
|
||||
T: Fn(u32, &str) -> Option<String>,
|
||||
{
|
||||
for (document_id, (name, _)) in &mut self.names {
|
||||
for (document_id, (name, _, _)) in &mut self.names {
|
||||
if let Some(new_name) = formatter(*document_id - 1, name) {
|
||||
*name = new_name;
|
||||
}
|
||||
|
|
@ -162,22 +174,36 @@ impl ExpandedFolders {
|
|||
self
|
||||
}
|
||||
|
||||
pub fn into_iterator(mut self) -> impl Iterator<Item = (u32, String)> + Sync + Send {
|
||||
pub fn into_iterator(mut self) -> impl Iterator<Item = ExpandedFolder> + Sync + Send {
|
||||
for folder_id in self.iter.by_ref() {
|
||||
if folder_id != 0 {
|
||||
if let Some((name, parent_name, parent_id)) =
|
||||
self.names.get(&folder_id).and_then(|(name, parent_id)| {
|
||||
self.names
|
||||
.get(parent_id)
|
||||
.map(|(parent_name, _)| (name, parent_name, *parent_id))
|
||||
if let Some((name, parent_name, parent_id, is_container)) = self
|
||||
.names
|
||||
.get(&folder_id)
|
||||
.and_then(|(name, parent_id, is_container)| {
|
||||
self.names.get(parent_id).map(|(parent_name, _, _)| {
|
||||
(name, parent_name, *parent_id, *is_container)
|
||||
})
|
||||
})
|
||||
{
|
||||
let name = format!("{parent_name}/{name}");
|
||||
self.names.insert(folder_id, (name, parent_id));
|
||||
self.names
|
||||
.insert(folder_id, (name, parent_id, is_container));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
self.names.into_iter().map(|(id, (name, _))| (id - 1, name))
|
||||
self.names
|
||||
.into_iter()
|
||||
.map(|(id, (name, parent_id, is_container))| ExpandedFolder {
|
||||
name,
|
||||
document_id: id - 1,
|
||||
is_container,
|
||||
parent_id: if parent_id == 0 {
|
||||
None
|
||||
} else {
|
||||
Some(parent_id - 1)
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -10,7 +10,7 @@ use store::{
|
|||
Serialize, SerializeInfallible,
|
||||
write::{
|
||||
Archiver, BatchBuilder, BitmapClass, BlobOp, DirectoryClass, IntoOperations, Operation,
|
||||
ValueOp, assert::HashedValue,
|
||||
assert::HashedValue,
|
||||
},
|
||||
};
|
||||
use utils::BlobHash;
|
||||
|
|
@ -22,18 +22,19 @@ pub enum IndexValue<'x> {
|
|||
Text { field: u8, value: Cow<'x, str> },
|
||||
U32 { field: u8, value: Option<u32> },
|
||||
U64 { field: u8, value: Option<u64> },
|
||||
U32List { field: u8, value: &'x [u32] },
|
||||
U32List { field: u8, value: Cow<'x, [u32]> },
|
||||
Tag { field: u8, is_set: bool },
|
||||
Blob { value: BlobHash },
|
||||
Quota { used: u32 },
|
||||
Acl { value: &'x [AclGrant] },
|
||||
Acl { value: Cow<'x, [AclGrant]> },
|
||||
}
|
||||
|
||||
pub trait IndexableObject:
|
||||
Debug
|
||||
+ Eq
|
||||
+ Sync
|
||||
+ Send
|
||||
pub trait IndexableObject: Sync + Send {
|
||||
fn index_values(&self) -> impl Iterator<Item = IndexValue<'_>>;
|
||||
}
|
||||
|
||||
pub trait IndexableAndSerializableObject:
|
||||
IndexableObject
|
||||
+ rkyv::Archive
|
||||
+ for<'a> rkyv::Serialize<
|
||||
rkyv::api::high::HighSerializer<
|
||||
|
|
@ -43,23 +44,22 @@ pub trait IndexableObject:
|
|||
>,
|
||||
>
|
||||
{
|
||||
fn index_values(&self) -> impl Iterator<Item = IndexValue<'_>>;
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct ObjectIndexBuilder<T: IndexableObject> {
|
||||
pub struct ObjectIndexBuilder<C: IndexableObject, N: IndexableAndSerializableObject> {
|
||||
tenant_id: Option<u32>,
|
||||
current: Option<HashedValue<T>>,
|
||||
changes: Option<T>,
|
||||
current: Option<HashedValue<C>>,
|
||||
changes: Option<N>,
|
||||
}
|
||||
|
||||
impl<T: IndexableObject> Default for ObjectIndexBuilder<T> {
|
||||
impl<C: IndexableObject, N: IndexableAndSerializableObject> Default for ObjectIndexBuilder<C, N> {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: IndexableObject> ObjectIndexBuilder<T> {
|
||||
impl<C: IndexableObject, N: IndexableAndSerializableObject> ObjectIndexBuilder<C, N> {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
current: None,
|
||||
|
|
@ -68,30 +68,30 @@ impl<T: IndexableObject> ObjectIndexBuilder<T> {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn with_current(mut self, current: HashedValue<T>) -> Self {
|
||||
pub fn with_current(mut self, current: HashedValue<C>) -> Self {
|
||||
self.current = Some(current);
|
||||
self
|
||||
}
|
||||
|
||||
pub fn with_changes(mut self, changes: T) -> Self {
|
||||
pub fn with_changes(mut self, changes: N) -> Self {
|
||||
self.changes = Some(changes);
|
||||
self
|
||||
}
|
||||
|
||||
pub fn with_current_opt(mut self, current: Option<HashedValue<T>>) -> Self {
|
||||
pub fn with_current_opt(mut self, current: Option<HashedValue<C>>) -> Self {
|
||||
self.current = current;
|
||||
self
|
||||
}
|
||||
|
||||
pub fn changes(&self) -> Option<&T> {
|
||||
pub fn changes(&self) -> Option<&N> {
|
||||
self.changes.as_ref()
|
||||
}
|
||||
|
||||
pub fn changes_mut(&mut self) -> Option<&mut T> {
|
||||
pub fn changes_mut(&mut self) -> Option<&mut N> {
|
||||
self.changes.as_mut()
|
||||
}
|
||||
|
||||
pub fn current(&self) -> Option<&HashedValue<T>> {
|
||||
pub fn current(&self) -> Option<&HashedValue<C>> {
|
||||
self.current.as_ref()
|
||||
}
|
||||
|
||||
|
|
@ -101,23 +101,35 @@ impl<T: IndexableObject> ObjectIndexBuilder<T> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<T: IndexableObject> IntoOperations for ObjectIndexBuilder<T> {
|
||||
impl<C: IndexableObject, N: IndexableAndSerializableObject> IntoOperations
|
||||
for ObjectIndexBuilder<C, N>
|
||||
{
|
||||
fn build(self, batch: &mut BatchBuilder) -> trc::Result<()> {
|
||||
match (self.current, self.changes) {
|
||||
(None, Some(changes)) => {
|
||||
// Insertion
|
||||
build_batch(batch, &changes, self.tenant_id, true);
|
||||
for item in changes.index_values() {
|
||||
build_index(batch, item, self.tenant_id, true);
|
||||
}
|
||||
batch.set(Property::Value, Archiver::new(changes).serialize()?);
|
||||
}
|
||||
(Some(current), Some(changes)) => {
|
||||
// Update
|
||||
batch.assert_value(Property::Value, ¤t);
|
||||
merge_batch(batch, current.inner, changes, self.tenant_id)?;
|
||||
for (current, change) in current.inner.index_values().zip(changes.index_values()) {
|
||||
if current != change {
|
||||
merge_index(batch, current, change, self.tenant_id)?;
|
||||
}
|
||||
}
|
||||
batch.set(Property::Value, Archiver::new(changes).serialize()?);
|
||||
}
|
||||
(Some(current), None) => {
|
||||
// Deletion
|
||||
batch.assert_value(Property::Value, ¤t);
|
||||
build_batch(batch, ¤t.inner, self.tenant_id, false);
|
||||
for item in current.inner.index_values() {
|
||||
build_index(batch, item, self.tenant_id, false);
|
||||
}
|
||||
|
||||
batch.clear(Property::Value);
|
||||
}
|
||||
(None, None) => unreachable!(),
|
||||
|
|
@ -127,316 +139,301 @@ impl<T: IndexableObject> IntoOperations for ObjectIndexBuilder<T> {
|
|||
}
|
||||
}
|
||||
|
||||
fn build_batch<T: IndexableObject>(
|
||||
batch: &mut BatchBuilder,
|
||||
object: &T,
|
||||
tenant_id: Option<u32>,
|
||||
set: bool,
|
||||
) {
|
||||
for item in object.index_values() {
|
||||
match item {
|
||||
IndexValue::Text { field, value } => {
|
||||
if !value.is_empty() {
|
||||
batch.ops.push(Operation::Index {
|
||||
fn build_index(batch: &mut BatchBuilder, item: IndexValue<'_>, tenant_id: Option<u32>, set: bool) {
|
||||
match item {
|
||||
IndexValue::Text { field, value } => {
|
||||
if !value.is_empty() {
|
||||
batch.ops.push(Operation::Index {
|
||||
field,
|
||||
key: value.into_owned().into_bytes(),
|
||||
set,
|
||||
});
|
||||
}
|
||||
}
|
||||
IndexValue::U32 { field, value } => {
|
||||
if let Some(value) = value {
|
||||
batch.ops.push(Operation::Index {
|
||||
field,
|
||||
key: value.serialize(),
|
||||
set,
|
||||
});
|
||||
}
|
||||
}
|
||||
IndexValue::U64 { field, value } => {
|
||||
if let Some(value) = value {
|
||||
batch.ops.push(Operation::Index {
|
||||
field,
|
||||
key: value.serialize(),
|
||||
set,
|
||||
});
|
||||
}
|
||||
}
|
||||
IndexValue::U32List { field, value } => {
|
||||
for item in value.as_ref() {
|
||||
batch.ops.push(Operation::Index {
|
||||
field,
|
||||
key: (*item).serialize(),
|
||||
set,
|
||||
});
|
||||
}
|
||||
}
|
||||
IndexValue::Tag { field, is_set } => {
|
||||
if is_set {
|
||||
batch.ops.push(Operation::Bitmap {
|
||||
class: BitmapClass::Tag {
|
||||
field,
|
||||
key: value.into_owned().into_bytes(),
|
||||
set,
|
||||
});
|
||||
}
|
||||
value: ().into(),
|
||||
},
|
||||
set,
|
||||
});
|
||||
}
|
||||
IndexValue::U32 { field, value } => {
|
||||
if let Some(value) = value {
|
||||
batch.ops.push(Operation::Index {
|
||||
field,
|
||||
key: value.serialize(),
|
||||
set,
|
||||
});
|
||||
}
|
||||
}
|
||||
IndexValue::Blob { value } => {
|
||||
if set {
|
||||
batch.set(BlobOp::Link { hash: value }, vec![]);
|
||||
} else {
|
||||
batch.clear(BlobOp::Link { hash: value });
|
||||
}
|
||||
IndexValue::U64 { field, value } => {
|
||||
if let Some(value) = value {
|
||||
batch.ops.push(Operation::Index {
|
||||
field,
|
||||
key: value.serialize(),
|
||||
set,
|
||||
});
|
||||
}
|
||||
}
|
||||
IndexValue::Acl { value } => {
|
||||
for item in value.as_ref() {
|
||||
batch.ops.push(Operation::acl(
|
||||
item.account_id,
|
||||
if set {
|
||||
item.grants.bitmap.serialize().into()
|
||||
} else {
|
||||
None
|
||||
},
|
||||
));
|
||||
}
|
||||
IndexValue::U32List { field, value } => {
|
||||
for item in value {
|
||||
batch.ops.push(Operation::Index {
|
||||
field,
|
||||
key: (*item).serialize(),
|
||||
set,
|
||||
});
|
||||
}
|
||||
}
|
||||
IndexValue::Tag { field, is_set } => {
|
||||
if is_set {
|
||||
batch.ops.push(Operation::Bitmap {
|
||||
class: BitmapClass::Tag {
|
||||
field,
|
||||
value: ().into(),
|
||||
},
|
||||
set,
|
||||
});
|
||||
}
|
||||
}
|
||||
IndexValue::Blob { value } => {
|
||||
if set {
|
||||
batch.set(BlobOp::Link { hash: value }, vec![]);
|
||||
} else {
|
||||
batch.clear(BlobOp::Link { hash: value });
|
||||
}
|
||||
}
|
||||
IndexValue::Acl { value } => {
|
||||
for item in value {
|
||||
batch.ops.push(Operation::acl(
|
||||
item.account_id,
|
||||
if set {
|
||||
item.grants.bitmap.serialize().into()
|
||||
} else {
|
||||
None
|
||||
},
|
||||
));
|
||||
}
|
||||
}
|
||||
IndexValue::Quota { used } => {
|
||||
let value = if set { used as i64 } else { -(used as i64) };
|
||||
}
|
||||
IndexValue::Quota { used } => {
|
||||
let value = if set { used as i64 } else { -(used as i64) };
|
||||
|
||||
if let Some(account_id) = batch.last_account_id() {
|
||||
batch.add(DirectoryClass::UsedQuota(account_id), value);
|
||||
}
|
||||
if let Some(account_id) = batch.last_account_id() {
|
||||
batch.add(DirectoryClass::UsedQuota(account_id), value);
|
||||
}
|
||||
|
||||
if let Some(tenant_id) = tenant_id {
|
||||
batch.add(DirectoryClass::UsedQuota(tenant_id), value);
|
||||
}
|
||||
if let Some(tenant_id) = tenant_id {
|
||||
batch.add(DirectoryClass::UsedQuota(tenant_id), value);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn merge_batch<T: IndexableObject>(
|
||||
fn merge_index(
|
||||
batch: &mut BatchBuilder,
|
||||
current: T,
|
||||
changes: T,
|
||||
current: IndexValue<'_>,
|
||||
change: IndexValue<'_>,
|
||||
tenant_id: Option<u32>,
|
||||
) -> trc::Result<()> {
|
||||
let mut has_changes = current != changes;
|
||||
match (current, change) {
|
||||
(
|
||||
IndexValue::Text {
|
||||
field,
|
||||
value: old_value,
|
||||
},
|
||||
IndexValue::Text {
|
||||
value: new_value, ..
|
||||
},
|
||||
) => {
|
||||
if !old_value.is_empty() {
|
||||
batch.ops.push(Operation::Index {
|
||||
field,
|
||||
key: old_value.into_owned().into_bytes(),
|
||||
set: false,
|
||||
});
|
||||
}
|
||||
|
||||
for (current, change) in current.index_values().zip(changes.index_values()) {
|
||||
if current == change {
|
||||
continue;
|
||||
if !new_value.is_empty() {
|
||||
batch.ops.push(Operation::Index {
|
||||
field,
|
||||
key: new_value.into_owned().into_bytes(),
|
||||
set: true,
|
||||
});
|
||||
}
|
||||
}
|
||||
has_changes = true;
|
||||
|
||||
match (current, change) {
|
||||
(
|
||||
IndexValue::Text {
|
||||
(
|
||||
IndexValue::U32 {
|
||||
field,
|
||||
value: old_value,
|
||||
},
|
||||
IndexValue::U32 {
|
||||
value: new_value, ..
|
||||
},
|
||||
) => {
|
||||
if let Some(value) = old_value {
|
||||
batch.ops.push(Operation::Index {
|
||||
field,
|
||||
value: old_value,
|
||||
},
|
||||
IndexValue::Text {
|
||||
value: new_value, ..
|
||||
},
|
||||
) => {
|
||||
if !old_value.is_empty() {
|
||||
batch.ops.push(Operation::Index {
|
||||
field,
|
||||
key: old_value.into_owned().into_bytes(),
|
||||
set: false,
|
||||
});
|
||||
}
|
||||
key: value.serialize(),
|
||||
set: false,
|
||||
});
|
||||
}
|
||||
if let Some(value) = new_value {
|
||||
batch.ops.push(Operation::Index {
|
||||
field,
|
||||
key: value.serialize(),
|
||||
set: true,
|
||||
});
|
||||
}
|
||||
}
|
||||
(
|
||||
IndexValue::U64 {
|
||||
field,
|
||||
value: old_value,
|
||||
},
|
||||
IndexValue::U64 {
|
||||
value: new_value, ..
|
||||
},
|
||||
) => {
|
||||
if let Some(value) = old_value {
|
||||
batch.ops.push(Operation::Index {
|
||||
field,
|
||||
key: value.serialize(),
|
||||
set: false,
|
||||
});
|
||||
}
|
||||
if let Some(value) = new_value {
|
||||
batch.ops.push(Operation::Index {
|
||||
field,
|
||||
key: value.serialize(),
|
||||
set: true,
|
||||
});
|
||||
}
|
||||
}
|
||||
(
|
||||
IndexValue::U32List {
|
||||
field,
|
||||
value: old_value,
|
||||
},
|
||||
IndexValue::U32List {
|
||||
value: new_value, ..
|
||||
},
|
||||
) => {
|
||||
let mut add_values = HashSet::new();
|
||||
let mut remove_values = HashSet::new();
|
||||
|
||||
if !new_value.is_empty() {
|
||||
batch.ops.push(Operation::Index {
|
||||
field,
|
||||
key: new_value.into_owned().into_bytes(),
|
||||
set: true,
|
||||
});
|
||||
for current_value in old_value.as_ref() {
|
||||
remove_values.insert(current_value);
|
||||
}
|
||||
for value in new_value.as_ref() {
|
||||
if !remove_values.remove(&value) {
|
||||
add_values.insert(value);
|
||||
}
|
||||
}
|
||||
(
|
||||
IndexValue::U32 {
|
||||
field,
|
||||
value: old_value,
|
||||
},
|
||||
IndexValue::U32 {
|
||||
value: new_value, ..
|
||||
},
|
||||
) => {
|
||||
if let Some(value) = old_value {
|
||||
|
||||
for (values, set) in [(add_values, true), (remove_values, false)] {
|
||||
for value in values {
|
||||
batch.ops.push(Operation::Index {
|
||||
field,
|
||||
key: value.serialize(),
|
||||
set: false,
|
||||
set,
|
||||
});
|
||||
}
|
||||
if let Some(value) = new_value {
|
||||
batch.ops.push(Operation::Index {
|
||||
}
|
||||
}
|
||||
(
|
||||
IndexValue::Tag {
|
||||
field,
|
||||
is_set: was_set,
|
||||
},
|
||||
IndexValue::Tag { is_set, .. },
|
||||
) => {
|
||||
if was_set {
|
||||
batch.ops.push(Operation::Bitmap {
|
||||
class: BitmapClass::Tag {
|
||||
field,
|
||||
key: value.serialize(),
|
||||
set: true,
|
||||
});
|
||||
}
|
||||
value: ().into(),
|
||||
},
|
||||
set: false,
|
||||
});
|
||||
}
|
||||
(
|
||||
IndexValue::U64 {
|
||||
field,
|
||||
value: old_value,
|
||||
},
|
||||
IndexValue::U64 {
|
||||
value: new_value, ..
|
||||
},
|
||||
) => {
|
||||
if let Some(value) = old_value {
|
||||
batch.ops.push(Operation::Index {
|
||||
if is_set {
|
||||
batch.ops.push(Operation::Bitmap {
|
||||
class: BitmapClass::Tag {
|
||||
field,
|
||||
key: value.serialize(),
|
||||
set: false,
|
||||
});
|
||||
}
|
||||
if let Some(value) = new_value {
|
||||
batch.ops.push(Operation::Index {
|
||||
field,
|
||||
key: value.serialize(),
|
||||
set: true,
|
||||
});
|
||||
}
|
||||
value: ().into(),
|
||||
},
|
||||
set: true,
|
||||
});
|
||||
}
|
||||
(
|
||||
IndexValue::U32List {
|
||||
field,
|
||||
value: old_value,
|
||||
},
|
||||
IndexValue::U32List {
|
||||
value: new_value, ..
|
||||
},
|
||||
) => {
|
||||
let mut add_values = HashSet::new();
|
||||
let mut remove_values = HashSet::new();
|
||||
|
||||
for current_value in old_value {
|
||||
remove_values.insert(current_value);
|
||||
}
|
||||
for value in new_value {
|
||||
if !remove_values.remove(&value) {
|
||||
add_values.insert(value);
|
||||
}
|
||||
}
|
||||
|
||||
for (values, set) in [(add_values, true), (remove_values, false)] {
|
||||
for value in values {
|
||||
batch.ops.push(Operation::Index {
|
||||
field,
|
||||
key: value.serialize(),
|
||||
set,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
(
|
||||
IndexValue::Tag {
|
||||
field,
|
||||
is_set: was_set,
|
||||
},
|
||||
IndexValue::Tag { is_set, .. },
|
||||
) => {
|
||||
if was_set {
|
||||
batch.ops.push(Operation::Bitmap {
|
||||
class: BitmapClass::Tag {
|
||||
field,
|
||||
value: ().into(),
|
||||
},
|
||||
set: false,
|
||||
});
|
||||
}
|
||||
if is_set {
|
||||
batch.ops.push(Operation::Bitmap {
|
||||
class: BitmapClass::Tag {
|
||||
field,
|
||||
value: ().into(),
|
||||
},
|
||||
set: true,
|
||||
});
|
||||
}
|
||||
}
|
||||
(IndexValue::Blob { value: old_hash }, IndexValue::Blob { value: new_hash }) => {
|
||||
batch.clear(BlobOp::Link { hash: old_hash });
|
||||
batch.set(BlobOp::Link { hash: new_hash }, vec![]);
|
||||
}
|
||||
(IndexValue::Acl { value: old_acl }, IndexValue::Acl { value: new_acl }) => {
|
||||
match (!old_acl.is_empty(), !new_acl.is_empty()) {
|
||||
(true, true) => {
|
||||
// Remove deleted ACLs
|
||||
for current_item in old_acl {
|
||||
if !new_acl
|
||||
.iter()
|
||||
.any(|item| item.account_id == current_item.account_id)
|
||||
{
|
||||
batch
|
||||
.ops
|
||||
.push(Operation::acl(current_item.account_id, None));
|
||||
}
|
||||
}
|
||||
(IndexValue::Blob { value: old_hash }, IndexValue::Blob { value: new_hash }) => {
|
||||
batch.clear(BlobOp::Link { hash: old_hash });
|
||||
batch.set(BlobOp::Link { hash: new_hash }, vec![]);
|
||||
}
|
||||
(IndexValue::Acl { value: old_acl }, IndexValue::Acl { value: new_acl }) => {
|
||||
match (!old_acl.is_empty(), !new_acl.is_empty()) {
|
||||
(true, true) => {
|
||||
// Remove deleted ACLs
|
||||
for current_item in old_acl.as_ref() {
|
||||
if !new_acl
|
||||
.iter()
|
||||
.any(|item| item.account_id == current_item.account_id)
|
||||
{
|
||||
batch
|
||||
.ops
|
||||
.push(Operation::acl(current_item.account_id, None));
|
||||
}
|
||||
}
|
||||
|
||||
// Update ACLs
|
||||
for item in new_acl {
|
||||
let mut add_item = true;
|
||||
for current_item in old_acl {
|
||||
if item.account_id == current_item.account_id {
|
||||
if item.grants == current_item.grants {
|
||||
add_item = false;
|
||||
}
|
||||
break;
|
||||
// Update ACLs
|
||||
for item in new_acl.as_ref() {
|
||||
let mut add_item = true;
|
||||
for current_item in old_acl.as_ref() {
|
||||
if item.account_id == current_item.account_id {
|
||||
if item.grants == current_item.grants {
|
||||
add_item = false;
|
||||
}
|
||||
}
|
||||
if add_item {
|
||||
batch.ops.push(Operation::acl(
|
||||
item.account_id,
|
||||
item.grants.bitmap.serialize().into(),
|
||||
));
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
(false, true) => {
|
||||
// Add all ACLs
|
||||
for item in new_acl {
|
||||
if add_item {
|
||||
batch.ops.push(Operation::acl(
|
||||
item.account_id,
|
||||
item.grants.bitmap.serialize().into(),
|
||||
));
|
||||
}
|
||||
}
|
||||
(true, false) => {
|
||||
// Remove all ACLs
|
||||
for item in old_acl {
|
||||
batch.ops.push(Operation::acl(item.account_id, None));
|
||||
}
|
||||
}
|
||||
(false, true) => {
|
||||
// Add all ACLs
|
||||
for item in new_acl.as_ref() {
|
||||
batch.ops.push(Operation::acl(
|
||||
item.account_id,
|
||||
item.grants.bitmap.serialize().into(),
|
||||
));
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
(true, false) => {
|
||||
// Remove all ACLs
|
||||
for item in old_acl.as_ref() {
|
||||
batch.ops.push(Operation::acl(item.account_id, None));
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
(IndexValue::Quota { used: old_used }, IndexValue::Quota { used: new_used }) => {
|
||||
let value = new_used as i64 - old_used as i64;
|
||||
if let Some(account_id) = batch.last_account_id() {
|
||||
batch.add(DirectoryClass::UsedQuota(account_id), value);
|
||||
}
|
||||
|
||||
if let Some(tenant_id) = tenant_id {
|
||||
batch.add(DirectoryClass::UsedQuota(tenant_id), value);
|
||||
}
|
||||
}
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
(IndexValue::Quota { used: old_used }, IndexValue::Quota { used: new_used }) => {
|
||||
let value = new_used as i64 - old_used as i64;
|
||||
if let Some(account_id) = batch.last_account_id() {
|
||||
batch.add(DirectoryClass::UsedQuota(account_id), value);
|
||||
}
|
||||
|
||||
if has_changes {
|
||||
batch.ops.push(Operation::Value {
|
||||
class: Property::Value.into(),
|
||||
op: ValueOp::Set(Archiver::new(changes).serialize()?.into()),
|
||||
});
|
||||
if let Some(tenant_id) = tenant_id {
|
||||
batch.add(DirectoryClass::UsedQuota(tenant_id), value);
|
||||
}
|
||||
}
|
||||
_ => unreachable!(),
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
impl IndexableObject for () {
|
||||
fn index_values(&self) -> impl Iterator<Item = IndexValue<'_>> {
|
||||
std::iter::empty()
|
||||
}
|
||||
}
|
||||
|
||||
impl IndexableAndSerializableObject for () {}
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@ use common::{Server, auth::AccessToken};
|
|||
use hyper::StatusCode;
|
||||
use jmap_proto::types::{acl::Acl, collection::Collection};
|
||||
use trc::AddContext;
|
||||
use utils::map::bitmap::Bitmap;
|
||||
|
||||
use crate::DavError;
|
||||
|
||||
|
|
@ -12,8 +13,20 @@ pub(crate) trait DavAclHandler: Sync + Send {
|
|||
account_id: u32,
|
||||
collection: Collection,
|
||||
parent_id: Option<u32>,
|
||||
check_acls: Acl,
|
||||
check_acls: impl Into<Bitmap<Acl>> + Send,
|
||||
) -> impl Future<Output = crate::Result<u32>> + Send;
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn validate_child_or_parent_acl(
|
||||
&self,
|
||||
access_token: &AccessToken,
|
||||
account_id: u32,
|
||||
collection: Collection,
|
||||
document_id: u32,
|
||||
parent_id: Option<u32>,
|
||||
child_acl: impl Into<Bitmap<Acl>> + Send,
|
||||
parent_acl: impl Into<Bitmap<Acl>> + Send,
|
||||
) -> impl Future<Output = crate::Result<()>> + Send;
|
||||
}
|
||||
|
||||
impl DavAclHandler for Server {
|
||||
|
|
@ -23,7 +36,7 @@ impl DavAclHandler for Server {
|
|||
account_id: u32,
|
||||
collection: Collection,
|
||||
parent_id: Option<u32>,
|
||||
check_acls: Acl,
|
||||
check_acls: impl Into<Bitmap<Acl>> + Send,
|
||||
) -> crate::Result<u32> {
|
||||
match parent_id {
|
||||
Some(parent_id) => {
|
||||
|
|
@ -53,4 +66,43 @@ impl DavAclHandler for Server {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn validate_child_or_parent_acl(
|
||||
&self,
|
||||
access_token: &AccessToken,
|
||||
account_id: u32,
|
||||
collection: Collection,
|
||||
document_id: u32,
|
||||
parent_id: Option<u32>,
|
||||
child_acl: impl Into<Bitmap<Acl>> + Send,
|
||||
parent_acl: impl Into<Bitmap<Acl>> + Send,
|
||||
) -> crate::Result<()> {
|
||||
if access_token.is_member(account_id)
|
||||
|| self
|
||||
.has_access_to_document(
|
||||
access_token,
|
||||
account_id,
|
||||
collection,
|
||||
document_id,
|
||||
child_acl,
|
||||
)
|
||||
.await
|
||||
.caused_by(trc::location!())?
|
||||
|| (parent_id.is_some()
|
||||
&& self
|
||||
.has_access_to_document(
|
||||
access_token,
|
||||
account_id,
|
||||
collection,
|
||||
parent_id.unwrap(),
|
||||
parent_acl,
|
||||
)
|
||||
.await
|
||||
.caused_by(trc::location!())?)
|
||||
{
|
||||
Ok(())
|
||||
} else {
|
||||
Err(DavError::Code(StatusCode::FORBIDDEN))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -32,7 +32,10 @@ impl DavUriResource for Server {
|
|||
.split_once("/dav/")
|
||||
.ok_or(DavError::Code(StatusCode::NOT_FOUND))?;
|
||||
|
||||
let mut uri_parts = uri_parts.splitn(3, '/').filter(|x| !x.is_empty());
|
||||
let mut uri_parts = uri_parts
|
||||
.trim_end_matches('/')
|
||||
.splitn(3, '/')
|
||||
.filter(|x| !x.is_empty());
|
||||
let mut resource = UriResource {
|
||||
collection: uri_parts
|
||||
.next()
|
||||
|
|
@ -70,7 +73,7 @@ impl DavUriResource for Server {
|
|||
|
||||
// Obtain remaining path
|
||||
resource.account_id = Some(account_id);
|
||||
resource.resource = uri_parts.next().map(|uri| uri.trim_end_matches('/'));
|
||||
resource.resource = uri_parts.next();
|
||||
}
|
||||
|
||||
Ok(resource)
|
||||
|
|
@ -79,6 +82,16 @@ impl DavUriResource for Server {
|
|||
|
||||
impl<T> UriResource<T> {
|
||||
pub fn account_id(&self) -> crate::Result<u32> {
|
||||
self.account_id.ok_or(DavError::Code(StatusCode::NOT_FOUND))
|
||||
self.account_id.ok_or(DavError::Code(StatusCode::FORBIDDEN))
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> UriResource<Option<T>> {
|
||||
pub fn unwrap(self) -> UriResource<T> {
|
||||
UriResource {
|
||||
collection: self.collection,
|
||||
account_id: self.account_id,
|
||||
resource: self.resource.unwrap(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -4,9 +4,25 @@
|
|||
* SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-SEL
|
||||
*/
|
||||
|
||||
use common::{Server, auth::AccessToken};
|
||||
use dav_proto::RequestHeaders;
|
||||
use std::sync::Arc;
|
||||
|
||||
use common::{Files, Server, auth::AccessToken};
|
||||
use dav_proto::{Depth, RequestHeaders};
|
||||
use groupware::file::hierarchy::FileHierarchy;
|
||||
use http_proto::HttpResponse;
|
||||
use hyper::StatusCode;
|
||||
use jmap_proto::types::{acl::Acl, collection::Collection};
|
||||
use trc::AddContext;
|
||||
use utils::map::bitmap::Bitmap;
|
||||
|
||||
use crate::{
|
||||
DavError,
|
||||
common::{
|
||||
acl::DavAclHandler,
|
||||
uri::{DavUriResource, UriResource},
|
||||
},
|
||||
file::{DavFileResource, FileItemId},
|
||||
};
|
||||
|
||||
pub(crate) trait FileCopyMoveRequestHandler: Sync + Send {
|
||||
fn handle_file_copy_move_request(
|
||||
|
|
@ -24,6 +40,187 @@ impl FileCopyMoveRequestHandler for Server {
|
|||
headers: RequestHeaders<'_>,
|
||||
is_move: bool,
|
||||
) -> crate::Result<HttpResponse> {
|
||||
todo!()
|
||||
// Validate source
|
||||
let from_resource = self.validate_uri(access_token, headers.uri).await?;
|
||||
let from_account_id = from_resource.account_id()?;
|
||||
let from_files = self
|
||||
.fetch_file_hierarchy(from_account_id)
|
||||
.await
|
||||
.caused_by(trc::location!())?;
|
||||
let from_resource = from_files.map_resource::<FileItemId>(from_resource)?;
|
||||
|
||||
// Validate source ACLs
|
||||
let mut child_acl = Bitmap::new();
|
||||
let mut parent_acl = Bitmap::new();
|
||||
match (from_resource.resource.is_container, is_move) {
|
||||
(true, true) => {
|
||||
child_acl.insert(Acl::Delete);
|
||||
child_acl.insert(Acl::RemoveItems);
|
||||
parent_acl.insert(Acl::RemoveItems);
|
||||
}
|
||||
(true, false) => {
|
||||
child_acl.insert(Acl::Read);
|
||||
child_acl.insert(Acl::ReadItems);
|
||||
parent_acl.insert(Acl::ReadItems);
|
||||
}
|
||||
(false, true) => {
|
||||
child_acl.insert(Acl::Delete);
|
||||
parent_acl.insert(Acl::RemoveItems);
|
||||
}
|
||||
(false, false) => {
|
||||
child_acl.insert(Acl::Read);
|
||||
parent_acl.insert(Acl::ReadItems);
|
||||
}
|
||||
}
|
||||
self.validate_child_or_parent_acl(
|
||||
access_token,
|
||||
from_account_id,
|
||||
Collection::FileNode,
|
||||
from_resource.resource.document_id,
|
||||
from_resource.resource.parent_id,
|
||||
child_acl,
|
||||
parent_acl,
|
||||
)
|
||||
.await?;
|
||||
|
||||
// Validate destination
|
||||
let to_resource = self
|
||||
.validate_uri(
|
||||
access_token,
|
||||
headers
|
||||
.destination
|
||||
.ok_or(DavError::Code(StatusCode::BAD_GATEWAY))?,
|
||||
)
|
||||
.await?;
|
||||
let to_account_id = to_resource
|
||||
.account_id
|
||||
.ok_or(DavError::Code(StatusCode::BAD_GATEWAY))?;
|
||||
let to_files = if to_account_id == from_account_id {
|
||||
from_files.clone()
|
||||
} else {
|
||||
self.fetch_file_hierarchy(to_account_id)
|
||||
.await
|
||||
.caused_by(trc::location!())?
|
||||
};
|
||||
let to_resource = to_files.map_destination::<FileItemId>(to_resource)?;
|
||||
if from_resource.collection != to_resource.collection
|
||||
|| (from_resource.account_id == to_resource.account_id
|
||||
&& to_resource
|
||||
.resource
|
||||
.as_ref()
|
||||
.is_some_and(|r| r.document_id == from_resource.resource.document_id))
|
||||
{
|
||||
return Err(DavError::Code(StatusCode::BAD_GATEWAY));
|
||||
}
|
||||
|
||||
// Validate destination ACLs
|
||||
if let Some(to_resource) = &to_resource.resource {
|
||||
let mut child_acl = Bitmap::new();
|
||||
|
||||
if to_resource.is_container {
|
||||
child_acl.insert(Acl::ModifyItems);
|
||||
} else {
|
||||
child_acl.insert(Acl::Modify);
|
||||
}
|
||||
|
||||
self.validate_child_or_parent_acl(
|
||||
access_token,
|
||||
to_account_id,
|
||||
Collection::FileNode,
|
||||
to_resource.document_id,
|
||||
to_resource.parent_id,
|
||||
child_acl,
|
||||
Acl::ModifyItems,
|
||||
)
|
||||
.await?;
|
||||
} else if !access_token.is_member(to_account_id) {
|
||||
return Err(DavError::Code(StatusCode::FORBIDDEN));
|
||||
}
|
||||
|
||||
match (
|
||||
from_resource.resource.is_container,
|
||||
to_resource.resource.as_ref().is_none_or(|r| r.is_container),
|
||||
is_move,
|
||||
) {
|
||||
(true, true, true) => {
|
||||
move_container(
|
||||
self,
|
||||
from_files,
|
||||
to_files,
|
||||
from_resource,
|
||||
to_resource,
|
||||
headers.depth,
|
||||
)
|
||||
.await
|
||||
}
|
||||
(true, true, false) => {
|
||||
copy_container(
|
||||
self,
|
||||
from_files,
|
||||
to_files,
|
||||
from_resource,
|
||||
to_resource,
|
||||
headers.depth,
|
||||
)
|
||||
.await
|
||||
}
|
||||
(false, false, true) => replace_item(from_resource, to_resource.unwrap()).await,
|
||||
(false, false, false) => overwrite_item(from_resource, to_resource.unwrap()).await,
|
||||
(false, true, true) => move_item(from_resource, to_resource).await,
|
||||
(false, true, false) => copy_item(from_resource, to_resource).await,
|
||||
_ => Err(DavError::Code(StatusCode::BAD_GATEWAY)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn move_container(
|
||||
server: &Server,
|
||||
from_files: Arc<Files>,
|
||||
to_files: Arc<Files>,
|
||||
from_resource: UriResource<FileItemId>,
|
||||
to_resource: UriResource<Option<FileItemId>>,
|
||||
depth: Depth,
|
||||
) -> crate::Result<HttpResponse> {
|
||||
// check ancestors
|
||||
todo!()
|
||||
}
|
||||
|
||||
async fn copy_container(
|
||||
server: &Server,
|
||||
from_files: Arc<Files>,
|
||||
to_files: Arc<Files>,
|
||||
from_resource: UriResource<FileItemId>,
|
||||
to_resource: UriResource<Option<FileItemId>>,
|
||||
depth: Depth,
|
||||
) -> crate::Result<HttpResponse> {
|
||||
// check ancestors
|
||||
todo!()
|
||||
}
|
||||
|
||||
async fn replace_item(
|
||||
from_resource: UriResource<FileItemId>,
|
||||
to_resource: UriResource<FileItemId>,
|
||||
) -> crate::Result<HttpResponse> {
|
||||
todo!()
|
||||
}
|
||||
|
||||
async fn overwrite_item(
|
||||
from_resource: UriResource<FileItemId>,
|
||||
to_resource: UriResource<FileItemId>,
|
||||
) -> crate::Result<HttpResponse> {
|
||||
todo!()
|
||||
}
|
||||
|
||||
async fn move_item(
|
||||
from_resource: UriResource<FileItemId>,
|
||||
to_resource: UriResource<Option<FileItemId>>,
|
||||
) -> crate::Result<HttpResponse> {
|
||||
todo!()
|
||||
}
|
||||
|
||||
async fn copy_item(
|
||||
from_resource: UriResource<FileItemId>,
|
||||
to_resource: UriResource<Option<FileItemId>>,
|
||||
) -> crate::Result<HttpResponse> {
|
||||
todo!()
|
||||
}
|
||||
|
|
|
|||
|
|
@ -4,9 +4,22 @@
|
|||
* SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-SEL
|
||||
*/
|
||||
|
||||
use common::{Server, auth::AccessToken};
|
||||
use common::{Server, auth::AccessToken, storage::index::ObjectIndexBuilder};
|
||||
use dav_proto::RequestHeaders;
|
||||
use groupware::file::{FileNode, hierarchy::FileHierarchy};
|
||||
use http_proto::HttpResponse;
|
||||
use hyper::StatusCode;
|
||||
use jmap_proto::types::{
|
||||
acl::Acl, collection::Collection, property::Property, type_state::DataType,
|
||||
};
|
||||
use store::write::{Archive, BatchBuilder, assert::HashedValue, log::ChangeLogBuilder};
|
||||
use trc::AddContext;
|
||||
use utils::map::bitmap::Bitmap;
|
||||
|
||||
use crate::{
|
||||
DavError,
|
||||
common::{acl::DavAclHandler, uri::DavUriResource},
|
||||
};
|
||||
|
||||
pub(crate) trait FileDeleteRequestHandler: Sync + Send {
|
||||
fn handle_file_delete_request(
|
||||
|
|
@ -22,6 +35,97 @@ impl FileDeleteRequestHandler for Server {
|
|||
access_token: &AccessToken,
|
||||
headers: RequestHeaders<'_>,
|
||||
) -> crate::Result<HttpResponse> {
|
||||
todo!()
|
||||
// Validate URI
|
||||
let resource = self.validate_uri(access_token, headers.uri).await?;
|
||||
let account_id = resource.account_id()?;
|
||||
let delete_path = resource
|
||||
.resource
|
||||
.filter(|r| !r.is_empty())
|
||||
.ok_or(DavError::Code(StatusCode::FORBIDDEN))?;
|
||||
let files = self
|
||||
.fetch_file_hierarchy(account_id)
|
||||
.await
|
||||
.caused_by(trc::location!())?;
|
||||
|
||||
// Find ids to delete
|
||||
let mut ids = files.subtree(delete_path).collect::<Vec<_>>();
|
||||
if ids.is_empty() {
|
||||
return Err(DavError::Code(StatusCode::NOT_FOUND));
|
||||
}
|
||||
|
||||
// Sort ids descending from the deepest to the root
|
||||
ids.sort_unstable_by(|a, b| b.name.len().cmp(&a.name.len()));
|
||||
let (document_id, parent_id, is_container) = ids
|
||||
.last()
|
||||
.map(|a| (a.document_id, a.parent_id, a.is_container))
|
||||
.unwrap();
|
||||
let mut sorted_ids = Vec::with_capacity(ids.len());
|
||||
sorted_ids.extend(ids.into_iter().map(|a| a.document_id));
|
||||
|
||||
// Validate ACLs
|
||||
self.validate_child_or_parent_acl(
|
||||
access_token,
|
||||
account_id,
|
||||
Collection::FileNode,
|
||||
document_id,
|
||||
parent_id,
|
||||
if is_container {
|
||||
Bitmap::new()
|
||||
.with_item(Acl::Delete)
|
||||
.with_item(Acl::RemoveItems)
|
||||
} else {
|
||||
Bitmap::new().with_item(Acl::RemoveItems)
|
||||
},
|
||||
Acl::RemoveItems,
|
||||
)
|
||||
.await?;
|
||||
|
||||
// Process deletions
|
||||
let mut changes = ChangeLogBuilder::new();
|
||||
for document_id in sorted_ids {
|
||||
if let Some(submission) = self
|
||||
.get_property::<HashedValue<Archive>>(
|
||||
account_id,
|
||||
Collection::FileNode,
|
||||
document_id,
|
||||
Property::Value,
|
||||
)
|
||||
.await?
|
||||
{
|
||||
// Update record
|
||||
let mut batch = BatchBuilder::new();
|
||||
batch
|
||||
.with_account_id(account_id)
|
||||
.with_collection(Collection::FileNode)
|
||||
.delete_document(document_id)
|
||||
.custom(
|
||||
ObjectIndexBuilder::<_, ()>::new()
|
||||
.with_tenant_id(access_token)
|
||||
.with_current(
|
||||
submission
|
||||
.to_unarchived::<FileNode>()
|
||||
.caused_by(trc::location!())?,
|
||||
),
|
||||
)
|
||||
.caused_by(trc::location!())?;
|
||||
self.store()
|
||||
.write(batch)
|
||||
.await
|
||||
.caused_by(trc::location!())?;
|
||||
changes.log_delete(Collection::FileNode, document_id);
|
||||
}
|
||||
}
|
||||
|
||||
// Write changes
|
||||
if !changes.is_empty() {
|
||||
let change_id = self
|
||||
.commit_changes(account_id, changes)
|
||||
.await
|
||||
.caused_by(trc::location!())?;
|
||||
self.broadcast_single_state_change(account_id, change_id, DataType::FileNode)
|
||||
.await;
|
||||
}
|
||||
|
||||
Ok(HttpResponse::new(StatusCode::NO_CONTENT))
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -12,7 +12,7 @@ use dav_proto::{
|
|||
use groupware::file::{FileNode, hierarchy::FileHierarchy};
|
||||
use http_proto::HttpResponse;
|
||||
use hyper::StatusCode;
|
||||
use jmap_proto::types::{acl::Acl, collection::Collection};
|
||||
use jmap_proto::types::{acl::Acl, collection::Collection, type_state::DataType};
|
||||
use store::write::{BatchBuilder, log::LogInsert, now};
|
||||
use trc::AddContext;
|
||||
|
||||
|
|
@ -92,13 +92,17 @@ impl FileMkColRequestHandler for Server {
|
|||
.with_collection(Collection::FileNode)
|
||||
.create_document()
|
||||
.log(LogInsert())
|
||||
.custom(ObjectIndexBuilder::new().with_changes(node))
|
||||
.custom(ObjectIndexBuilder::<(), _>::new().with_changes(node))
|
||||
.caused_by(trc::location!())?;
|
||||
self.store()
|
||||
.write(batch)
|
||||
.await
|
||||
.caused_by(trc::location!())?;
|
||||
|
||||
// Broadcast state change
|
||||
self.broadcast_single_state_change(account_id, change_id, DataType::FileNode)
|
||||
.await;
|
||||
|
||||
Ok(HttpResponse::new(StatusCode::CREATED))
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@
|
|||
|
||||
use std::borrow::Cow;
|
||||
|
||||
use common::Files;
|
||||
use common::{FileItem, Files};
|
||||
use hyper::StatusCode;
|
||||
|
||||
use crate::{DavError, common::uri::UriResource};
|
||||
|
|
@ -22,39 +22,58 @@ pub mod propfind;
|
|||
pub mod proppatch;
|
||||
pub mod update;
|
||||
|
||||
pub(crate) trait DavFileResource {
|
||||
fn map_resource(&self, resource: UriResource<Option<&str>>) -> crate::Result<UriResource<u32>>;
|
||||
pub(crate) trait FromFileItem {
|
||||
fn from_file_item(item: &FileItem) -> Self;
|
||||
}
|
||||
|
||||
fn map_resource_or_root(
|
||||
pub(crate) struct FileItemId {
|
||||
pub document_id: u32,
|
||||
pub parent_id: Option<u32>,
|
||||
pub is_container: bool,
|
||||
}
|
||||
|
||||
pub(crate) trait DavFileResource {
|
||||
fn map_resource<T: FromFileItem>(
|
||||
&self,
|
||||
resource: UriResource<Option<&str>>,
|
||||
) -> crate::Result<UriResource<Option<u32>>>;
|
||||
) -> crate::Result<UriResource<T>>;
|
||||
|
||||
fn map_parent<'x>(&self, resource: &'x str) -> crate::Result<(Option<u32>, Cow<'x, str>)>;
|
||||
fn map_destination<T: FromFileItem>(
|
||||
&self,
|
||||
resource: UriResource<Option<&str>>,
|
||||
) -> crate::Result<UriResource<Option<T>>>;
|
||||
|
||||
fn map_parent_resource<'x>(
|
||||
fn map_parent<'x, T: FromFileItem>(
|
||||
&self,
|
||||
resource: &'x str,
|
||||
) -> crate::Result<(Option<T>, Cow<'x, str>)>;
|
||||
|
||||
fn map_parent_resource<'x, T: FromFileItem>(
|
||||
&self,
|
||||
resource: UriResource<Option<&'x str>>,
|
||||
) -> crate::Result<UriResource<(Option<u32>, Cow<'x, str>)>>;
|
||||
) -> crate::Result<UriResource<(Option<T>, Cow<'x, str>)>>;
|
||||
}
|
||||
|
||||
impl DavFileResource for Files {
|
||||
fn map_resource(&self, resource: UriResource<Option<&str>>) -> crate::Result<UriResource<u32>> {
|
||||
fn map_resource<T: FromFileItem>(
|
||||
&self,
|
||||
resource: UriResource<Option<&str>>,
|
||||
) -> crate::Result<UriResource<T>> {
|
||||
resource
|
||||
.resource
|
||||
.and_then(|r| self.files.by_name(r))
|
||||
.map(|r| UriResource {
|
||||
collection: resource.collection,
|
||||
account_id: resource.account_id,
|
||||
resource: r,
|
||||
resource: T::from_file_item(r),
|
||||
})
|
||||
.ok_or(DavError::Code(StatusCode::NOT_FOUND))
|
||||
}
|
||||
|
||||
fn map_resource_or_root(
|
||||
fn map_destination<T: FromFileItem>(
|
||||
&self,
|
||||
resource: UriResource<Option<&str>>,
|
||||
) -> crate::Result<UriResource<Option<u32>>> {
|
||||
) -> crate::Result<UriResource<Option<T>>> {
|
||||
Ok(UriResource {
|
||||
collection: resource.collection,
|
||||
account_id: resource.account_id,
|
||||
|
|
@ -62,7 +81,8 @@ impl DavFileResource for Files {
|
|||
Some(
|
||||
self.files
|
||||
.by_name(resource)
|
||||
.ok_or(DavError::Code(StatusCode::NOT_FOUND))?,
|
||||
.map(T::from_file_item)
|
||||
.ok_or(DavError::Code(StatusCode::BAD_GATEWAY))?,
|
||||
)
|
||||
} else {
|
||||
None
|
||||
|
|
@ -70,12 +90,16 @@ impl DavFileResource for Files {
|
|||
})
|
||||
}
|
||||
|
||||
fn map_parent<'x>(&self, resource: &'x str) -> crate::Result<(Option<u32>, Cow<'x, str>)> {
|
||||
fn map_parent<'x, T: FromFileItem>(
|
||||
&self,
|
||||
resource: &'x str,
|
||||
) -> crate::Result<(Option<T>, Cow<'x, str>)> {
|
||||
let (parent, child) = if let Some((parent, child)) = resource.rsplit_once('/') {
|
||||
(
|
||||
Some(
|
||||
self.files
|
||||
.by_name(parent)
|
||||
.map(T::from_file_item)
|
||||
.ok_or(DavError::Code(StatusCode::NOT_FOUND))?,
|
||||
),
|
||||
child,
|
||||
|
|
@ -92,10 +116,10 @@ impl DavFileResource for Files {
|
|||
))
|
||||
}
|
||||
|
||||
fn map_parent_resource<'x>(
|
||||
fn map_parent_resource<'x, T: FromFileItem>(
|
||||
&self,
|
||||
resource: UriResource<Option<&'x str>>,
|
||||
) -> crate::Result<UriResource<(Option<u32>, Cow<'x, str>)>> {
|
||||
) -> crate::Result<UriResource<(Option<T>, Cow<'x, str>)>> {
|
||||
if let Some(r) = resource.resource {
|
||||
if self.files.by_name(r).is_none() {
|
||||
self.map_parent(r).map(|r| UriResource {
|
||||
|
|
@ -111,3 +135,19 @@ impl DavFileResource for Files {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl FromFileItem for u32 {
|
||||
fn from_file_item(item: &FileItem) -> Self {
|
||||
item.document_id
|
||||
}
|
||||
}
|
||||
|
||||
impl FromFileItem for FileItemId {
|
||||
fn from_file_item(item: &FileItem) -> Self {
|
||||
FileItemId {
|
||||
document_id: item.document_id,
|
||||
parent_id: item.parent_id,
|
||||
is_container: item.is_container,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -16,7 +16,9 @@ use dav_proto::{
|
|||
use groupware::file::{FileNode, hierarchy::FileHierarchy};
|
||||
use http_proto::HttpResponse;
|
||||
use hyper::StatusCode;
|
||||
use jmap_proto::types::{acl::Acl, collection::Collection, property::Property};
|
||||
use jmap_proto::types::{
|
||||
acl::Acl, collection::Collection, property::Property, type_state::DataType,
|
||||
};
|
||||
use store::write::{Archive, BatchBuilder, assert::HashedValue, log::Changes, now};
|
||||
use trc::AddContext;
|
||||
|
||||
|
|
@ -131,8 +133,9 @@ impl FilePropPatchRequestHandler for Server {
|
|||
|
||||
// Prepare write batch
|
||||
let mut batch = BatchBuilder::new();
|
||||
let change_id = new_node.change_id;
|
||||
batch
|
||||
.with_change_id(new_node.change_id)
|
||||
.with_change_id(change_id)
|
||||
.with_account_id(account_id)
|
||||
.with_collection(Collection::FileNode)
|
||||
.update_document(resource.resource)
|
||||
|
|
@ -148,6 +151,10 @@ impl FilePropPatchRequestHandler for Server {
|
|||
.write(batch)
|
||||
.await
|
||||
.caused_by(trc::location!())?;
|
||||
|
||||
// Broadcast state change
|
||||
self.broadcast_single_state_change(account_id, change_id, DataType::FileNode)
|
||||
.await;
|
||||
}
|
||||
|
||||
Ok(HttpResponse::new(StatusCode::MULTI_STATUS)
|
||||
|
|
|
|||
|
|
@ -9,7 +9,9 @@ use dav_proto::RequestHeaders;
|
|||
use groupware::file::{FileNode, FileProperties, hierarchy::FileHierarchy};
|
||||
use http_proto::HttpResponse;
|
||||
use hyper::StatusCode;
|
||||
use jmap_proto::types::{acl::Acl, collection::Collection, property::Property};
|
||||
use jmap_proto::types::{
|
||||
acl::Acl, collection::Collection, property::Property, type_state::DataType,
|
||||
};
|
||||
use store::write::{
|
||||
Archive, BatchBuilder,
|
||||
assert::HashedValue,
|
||||
|
|
@ -56,7 +58,7 @@ impl FileUpdateRequestHandler for Server {
|
|||
.resource
|
||||
.ok_or(DavError::Code(StatusCode::NOT_FOUND))?;
|
||||
|
||||
if let Some(document_id) = files.files.by_name(resource_name) {
|
||||
if let Some(document_id) = files.files.by_name(resource_name).map(|r| r.document_id) {
|
||||
// Update
|
||||
let node_archive_ = self
|
||||
.get_property::<HashedValue<Archive>>(
|
||||
|
|
@ -140,6 +142,10 @@ impl FileUpdateRequestHandler for Server {
|
|||
.await
|
||||
.caused_by(trc::location!())?;
|
||||
|
||||
// Broadcast state change
|
||||
self.broadcast_single_state_change(account_id, change_id, DataType::FileNode)
|
||||
.await;
|
||||
|
||||
Ok(HttpResponse::new(StatusCode::OK))
|
||||
} else {
|
||||
// Insert
|
||||
|
|
@ -218,7 +224,7 @@ impl FileUpdateRequestHandler for Server {
|
|||
.create_document()
|
||||
.log(LogInsert())
|
||||
.custom(
|
||||
ObjectIndexBuilder::new()
|
||||
ObjectIndexBuilder::<(), _>::new()
|
||||
.with_changes(node)
|
||||
.with_tenant_id(access_token),
|
||||
)
|
||||
|
|
@ -228,6 +234,10 @@ impl FileUpdateRequestHandler for Server {
|
|||
.await
|
||||
.caused_by(trc::location!())?;
|
||||
|
||||
// Broadcast state change
|
||||
self.broadcast_single_state_change(account_id, change_id, DataType::FileNode)
|
||||
.await;
|
||||
|
||||
Ok(HttpResponse::new(StatusCode::CREATED))
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -218,23 +218,24 @@ impl DavRequestHandler for Server {
|
|||
{
|
||||
Ok(response) => response,
|
||||
Err(DavError::Internal(err)) => {
|
||||
let is_quota_error = matches!(
|
||||
err.event_type(),
|
||||
trc::EventType::Limit(trc::LimitEvent::Quota | trc::LimitEvent::TenantQuota)
|
||||
);
|
||||
let err_type = err.event_type();
|
||||
|
||||
trc::error!(err.span_id(session.session_id));
|
||||
|
||||
if is_quota_error {
|
||||
HttpResponse::new(StatusCode::PRECONDITION_FAILED)
|
||||
match err_type {
|
||||
trc::EventType::Limit(
|
||||
trc::LimitEvent::Quota | trc::LimitEvent::TenantQuota,
|
||||
) => HttpResponse::new(StatusCode::PRECONDITION_FAILED)
|
||||
.with_xml_body(
|
||||
ErrorResponse::new(BaseCondition::QuotaNotExceeded)
|
||||
.with_namespace(resource)
|
||||
.to_string(),
|
||||
)
|
||||
.with_no_cache()
|
||||
} else {
|
||||
HttpResponse::new(StatusCode::INTERNAL_SERVER_ERROR)
|
||||
.with_no_cache(),
|
||||
trc::EventType::Store(trc::StoreEvent::AssertValueFailed) => {
|
||||
HttpResponse::new(StatusCode::CONFLICT)
|
||||
}
|
||||
_ => HttpResponse::new(StatusCode::INTERNAL_SERVER_ERROR),
|
||||
}
|
||||
}
|
||||
Err(DavError::Parse(err)) => HttpResponse::new(StatusCode::BAD_REQUEST),
|
||||
|
|
|
|||
|
|
@ -101,7 +101,7 @@ impl MailboxDestroy for Server {
|
|||
// otherwise delete it.
|
||||
let mut destroy_ids = RoaringBitmap::new();
|
||||
for (message_id, mailbox_ids) in self
|
||||
.get_properties::<HashedValue<Archive>, _, _>(
|
||||
.get_properties::<HashedValue<Archive>, _>(
|
||||
account_id,
|
||||
Collection::Email,
|
||||
&message_ids,
|
||||
|
|
@ -190,7 +190,7 @@ impl MailboxDestroy for Server {
|
|||
}
|
||||
|
||||
// Obtain mailbox
|
||||
if let Some(mailbox) = self
|
||||
if let Some(mailbox_) = self
|
||||
.get_property::<HashedValue<Archive>>(
|
||||
account_id,
|
||||
Collection::Mailbox,
|
||||
|
|
@ -200,8 +200,8 @@ impl MailboxDestroy for Server {
|
|||
.await
|
||||
.caused_by(trc::location!())?
|
||||
{
|
||||
let mailbox = mailbox
|
||||
.into_deserialized::<Mailbox>()
|
||||
let mailbox = mailbox_
|
||||
.to_unarchived::<Mailbox>()
|
||||
.caused_by(trc::location!())?;
|
||||
// Validate ACLs
|
||||
if access_token.is_shared(account_id) {
|
||||
|
|
@ -224,7 +224,7 @@ impl MailboxDestroy for Server {
|
|||
.with_collection(Collection::Mailbox)
|
||||
.delete_document(document_id)
|
||||
.clear(Property::EmailIds)
|
||||
.custom(ObjectIndexBuilder::new().with_current(mailbox))
|
||||
.custom(ObjectIndexBuilder::<_, ()>::new().with_current(mailbox))
|
||||
.caused_by(trc::location!())?;
|
||||
|
||||
match self.core.storage.data.write(batch.build()).await {
|
||||
|
|
|
|||
|
|
@ -5,13 +5,13 @@
|
|||
*/
|
||||
|
||||
use common::{
|
||||
config::jmap::settings::SpecialUse,
|
||||
config::jmap::settings::{ArchivedSpecialUse, SpecialUse},
|
||||
storage::{
|
||||
folder::FolderHierarchy,
|
||||
index::{IndexValue, IndexableObject},
|
||||
index::{IndexValue, IndexableAndSerializableObject, IndexableObject},
|
||||
},
|
||||
};
|
||||
use jmap_proto::types::property::Property;
|
||||
use jmap_proto::types::{property::Property, value::AclGrant};
|
||||
use store::write::{MaybeDynamicId, TagValue};
|
||||
|
||||
use super::{ArchivedMailbox, ArchivedUidMailbox, Mailbox, UidMailbox};
|
||||
|
|
@ -41,14 +41,63 @@ impl IndexableObject for Mailbox {
|
|||
},
|
||||
IndexValue::U32List {
|
||||
field: Property::IsSubscribed.into(),
|
||||
value: &self.subscribers,
|
||||
value: (&self.subscribers).into(),
|
||||
},
|
||||
IndexValue::Acl {
|
||||
value: (&self.acls).into(),
|
||||
},
|
||||
IndexValue::Acl { value: &self.acls },
|
||||
]
|
||||
.into_iter()
|
||||
}
|
||||
}
|
||||
|
||||
impl IndexableObject for &ArchivedMailbox {
|
||||
fn index_values(&self) -> impl Iterator<Item = IndexValue<'_>> {
|
||||
[
|
||||
IndexValue::Text {
|
||||
field: Property::Name.into(),
|
||||
value: self.name.to_lowercase().into(),
|
||||
},
|
||||
IndexValue::Text {
|
||||
field: Property::Role.into(),
|
||||
value: self.role.as_str().unwrap_or_default().into(),
|
||||
},
|
||||
IndexValue::Tag {
|
||||
field: Property::Role.into(),
|
||||
is_set: !matches!(self.role, ArchivedSpecialUse::None),
|
||||
},
|
||||
IndexValue::U32 {
|
||||
field: Property::ParentId.into(),
|
||||
value: u32::from(self.parent_id).into(),
|
||||
},
|
||||
IndexValue::U32 {
|
||||
field: Property::SortOrder.into(),
|
||||
value: self.sort_order.as_ref().map(u32::from),
|
||||
},
|
||||
IndexValue::U32List {
|
||||
field: Property::IsSubscribed.into(),
|
||||
value: self
|
||||
.subscribers
|
||||
.iter()
|
||||
.map(u32::from)
|
||||
.collect::<Vec<_>>()
|
||||
.into(),
|
||||
},
|
||||
IndexValue::Acl {
|
||||
value: self
|
||||
.acls
|
||||
.iter()
|
||||
.map(AclGrant::from)
|
||||
.collect::<Vec<_>>()
|
||||
.into(),
|
||||
},
|
||||
]
|
||||
.into_iter()
|
||||
}
|
||||
}
|
||||
|
||||
impl IndexableAndSerializableObject for Mailbox {}
|
||||
|
||||
impl FolderHierarchy for ArchivedMailbox {
|
||||
fn name(&self) -> String {
|
||||
self.name.to_string()
|
||||
|
|
@ -57,6 +106,10 @@ impl FolderHierarchy for ArchivedMailbox {
|
|||
fn parent_id(&self) -> u32 {
|
||||
u32::from(self.parent_id)
|
||||
}
|
||||
|
||||
fn is_container(&self) -> bool {
|
||||
true
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&UidMailbox> for TagValue<MaybeDynamicId> {
|
||||
|
|
|
|||
|
|
@ -102,7 +102,7 @@ impl MailboxFnc for Server {
|
|||
}
|
||||
batch
|
||||
.create_document_with_id(document_id)
|
||||
.custom(ObjectIndexBuilder::new().with_changes(object))
|
||||
.custom(ObjectIndexBuilder::<(), _>::new().with_changes(object))
|
||||
.caused_by(trc::location!())?;
|
||||
mailbox_ids.insert(document_id);
|
||||
}
|
||||
|
|
@ -133,7 +133,7 @@ impl MailboxFnc for Server {
|
|||
}
|
||||
})
|
||||
.into_iterator()
|
||||
.map(|(document_id, name)| (name, document_id))
|
||||
.map(|e| (e.name, e.document_id))
|
||||
.collect::<AHashMap<String, u32>>();
|
||||
|
||||
let mut next_parent_id = 0;
|
||||
|
|
@ -177,7 +177,7 @@ impl MailboxFnc for Server {
|
|||
.with_collection(Collection::Mailbox)
|
||||
.create_document()
|
||||
.custom(
|
||||
ObjectIndexBuilder::new()
|
||||
ObjectIndexBuilder::<(), _>::new()
|
||||
.with_changes(Mailbox::new(name).with_parent_id(next_parent_id)),
|
||||
)
|
||||
.caused_by(trc::location!())?;
|
||||
|
|
@ -275,8 +275,8 @@ impl MailboxFnc for Server {
|
|||
folders
|
||||
.format(|mailbox_id, _| (mailbox_id == INBOX_ID).then(|| "INBOX".to_string()))
|
||||
.into_iterator()
|
||||
.find(|(_, folder_name)| folder_name.eq_ignore_ascii_case(path))
|
||||
.map(|(document_id, _)| document_id)
|
||||
.find(|e| e.name.eq_ignore_ascii_case(path))
|
||||
.map(|e| e.document_id)
|
||||
})
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -20,6 +20,7 @@ pub const ARCHIVE_ID: u32 = 5;
|
|||
pub const TOMBSTONE_ID: u32 = u32::MAX - 1;
|
||||
|
||||
#[derive(rkyv::Archive, rkyv::Deserialize, rkyv::Serialize, Debug, Clone, PartialEq, Eq)]
|
||||
#[rkyv(derive(Debug))]
|
||||
pub struct Mailbox {
|
||||
pub name: String,
|
||||
pub role: SpecialUse,
|
||||
|
|
|
|||
|
|
@ -64,7 +64,7 @@ impl EmailDeletion for Server {
|
|||
// Fetch mailboxes and threadIds
|
||||
let mut thread_ids: AHashMap<u32, i32> = AHashMap::new();
|
||||
for (document_id, mailboxes) in self
|
||||
.get_properties::<Archive, _, _>(
|
||||
.get_properties::<Archive, _>(
|
||||
account_id,
|
||||
Collection::Email,
|
||||
&document_ids,
|
||||
|
|
@ -86,7 +86,7 @@ impl EmailDeletion for Server {
|
|||
);
|
||||
}
|
||||
for (document_id, thread_id) in self
|
||||
.get_properties::<u32, _, _>(
|
||||
.get_properties::<u32, _>(
|
||||
account_id,
|
||||
Collection::Email,
|
||||
&document_ids,
|
||||
|
|
@ -343,7 +343,7 @@ impl EmailDeletion for Server {
|
|||
// Find messages to destroy
|
||||
let mut destroy_ids = RoaringBitmap::new();
|
||||
for (document_id, cid) in self
|
||||
.get_properties::<u64, _, _>(
|
||||
.get_properties::<u64, _>(
|
||||
account_id,
|
||||
Collection::Email,
|
||||
&deletion_candidates,
|
||||
|
|
|
|||
|
|
@ -29,7 +29,7 @@ impl SieveScriptDelete for Server {
|
|||
) -> trc::Result<bool> {
|
||||
// Fetch record
|
||||
let account_id = resource_token.account_id;
|
||||
let obj = self
|
||||
let obj_ = self
|
||||
.get_property::<HashedValue<Archive>>(
|
||||
account_id,
|
||||
Collection::SieveScript,
|
||||
|
|
@ -42,8 +42,9 @@ impl SieveScriptDelete for Server {
|
|||
.into_err()
|
||||
.caused_by(trc::location!())
|
||||
.document_id(document_id)
|
||||
})?
|
||||
.into_deserialized::<SieveScript>()
|
||||
})?;
|
||||
let obj = obj_
|
||||
.to_unarchived::<SieveScript>()
|
||||
.caused_by(trc::location!())?;
|
||||
|
||||
// Make sure the script is not active
|
||||
|
|
@ -59,7 +60,7 @@ impl SieveScriptDelete for Server {
|
|||
.delete_document(document_id)
|
||||
.clear(Property::EmailIds)
|
||||
.custom(
|
||||
ObjectIndexBuilder::new()
|
||||
ObjectIndexBuilder::<_, ()>::new()
|
||||
.with_current(obj)
|
||||
.with_tenant_id(resource_token),
|
||||
)
|
||||
|
|
|
|||
|
|
@ -4,10 +4,10 @@
|
|||
* SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-SEL
|
||||
*/
|
||||
|
||||
use common::storage::index::{IndexValue, IndexableObject};
|
||||
use common::storage::index::{IndexValue, IndexableAndSerializableObject, IndexableObject};
|
||||
use jmap_proto::types::property::Property;
|
||||
|
||||
use super::SieveScript;
|
||||
use super::{ArchivedSieveScript, SieveScript};
|
||||
|
||||
impl IndexableObject for SieveScript {
|
||||
fn index_values(&self) -> impl Iterator<Item = IndexValue<'_>> {
|
||||
|
|
@ -28,3 +28,27 @@ impl IndexableObject for SieveScript {
|
|||
.into_iter()
|
||||
}
|
||||
}
|
||||
|
||||
impl IndexableAndSerializableObject for SieveScript {}
|
||||
|
||||
impl IndexableObject for &ArchivedSieveScript {
|
||||
fn index_values(&self) -> impl Iterator<Item = IndexValue<'_>> {
|
||||
[
|
||||
IndexValue::Text {
|
||||
field: Property::Name.into(),
|
||||
value: self.name.to_lowercase().into(),
|
||||
},
|
||||
IndexValue::U32 {
|
||||
field: Property::IsActive.into(),
|
||||
value: Some(self.is_active as u32),
|
||||
},
|
||||
IndexValue::Blob {
|
||||
value: (&self.blob_hash).into(),
|
||||
},
|
||||
IndexValue::Quota {
|
||||
used: u32::from(self.size),
|
||||
},
|
||||
]
|
||||
.into_iter()
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -40,6 +40,7 @@ pub struct SeenIds {
|
|||
#[derive(
|
||||
rkyv::Archive, rkyv::Deserialize, rkyv::Serialize, Debug, Default, Clone, PartialEq, Eq,
|
||||
)]
|
||||
#[rkyv(derive(Debug))]
|
||||
pub struct SieveScript {
|
||||
pub name: String,
|
||||
pub is_active: bool,
|
||||
|
|
@ -51,6 +52,7 @@ pub struct SieveScript {
|
|||
#[derive(
|
||||
rkyv::Archive, rkyv::Deserialize, rkyv::Serialize, Debug, Default, Clone, PartialEq, Eq,
|
||||
)]
|
||||
#[rkyv(derive(Debug))]
|
||||
pub struct VacationResponse {
|
||||
pub from_date: Option<u64>,
|
||||
pub to_date: Option<u64>,
|
||||
|
|
|
|||
|
|
@ -4,10 +4,10 @@
|
|||
* SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-SEL
|
||||
*/
|
||||
|
||||
use common::storage::index::{IndexValue, IndexableObject};
|
||||
use common::storage::index::{IndexValue, IndexableAndSerializableObject, IndexableObject};
|
||||
use jmap_proto::types::property::Property;
|
||||
|
||||
use super::EmailSubmission;
|
||||
use super::{ArchivedEmailSubmission, EmailSubmission};
|
||||
|
||||
impl IndexableObject for EmailSubmission {
|
||||
fn index_values(&self) -> impl Iterator<Item = IndexValue<'_>> {
|
||||
|
|
@ -36,3 +36,33 @@ impl IndexableObject for EmailSubmission {
|
|||
.into_iter()
|
||||
}
|
||||
}
|
||||
|
||||
impl IndexableObject for &ArchivedEmailSubmission {
|
||||
fn index_values(&self) -> impl Iterator<Item = IndexValue<'_>> {
|
||||
[
|
||||
IndexValue::Text {
|
||||
field: Property::UndoStatus.into(),
|
||||
value: self.undo_status.as_index().into(),
|
||||
},
|
||||
IndexValue::U32 {
|
||||
field: Property::EmailId.into(),
|
||||
value: Some(u32::from(self.email_id)),
|
||||
},
|
||||
IndexValue::U32 {
|
||||
field: Property::ThreadId.into(),
|
||||
value: Some(u32::from(self.thread_id)),
|
||||
},
|
||||
IndexValue::U32 {
|
||||
field: Property::IdentityId.into(),
|
||||
value: Some(u32::from(self.identity_id)),
|
||||
},
|
||||
IndexValue::U64 {
|
||||
field: Property::SendAt.into(),
|
||||
value: Some(u64::from(self.send_at)),
|
||||
},
|
||||
]
|
||||
.into_iter()
|
||||
}
|
||||
}
|
||||
|
||||
impl IndexableAndSerializableObject for EmailSubmission {}
|
||||
|
|
|
|||
|
|
@ -47,7 +47,7 @@ impl ThreadCache for Server {
|
|||
} else {
|
||||
let thread_cache = Arc::new(Threads {
|
||||
threads: self
|
||||
.get_properties::<u32, _, _>(
|
||||
.get_properties::<u32, _>(
|
||||
account_id,
|
||||
Collection::Email,
|
||||
&(),
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@
|
|||
|
||||
use std::sync::Arc;
|
||||
|
||||
use common::{Files, Server};
|
||||
use common::{FileItem, Files, Server};
|
||||
use jmap_proto::types::collection::Collection;
|
||||
use percent_encoding::NON_ALPHANUMERIC;
|
||||
use trc::AddContext;
|
||||
|
|
@ -62,10 +62,16 @@ async fn build_file_hierarchy(server: &Server, account_id: u32) -> trc::Result<F
|
|||
modseq: None,
|
||||
};
|
||||
|
||||
for (id, name) in list.into_iterator() {
|
||||
files.size +=
|
||||
(std::mem::size_of::<u32>() + std::mem::size_of::<String>() + name.len()) as u64;
|
||||
files.files.insert(id, name);
|
||||
for expanded in list.into_iterator() {
|
||||
files.size += (std::mem::size_of::<u32>()
|
||||
+ std::mem::size_of::<String>()
|
||||
+ expanded.name.len()) as u64;
|
||||
files.files.insert(FileItem {
|
||||
document_id: expanded.document_id,
|
||||
parent_id: expanded.parent_id,
|
||||
name: expanded.name,
|
||||
is_container: expanded.is_container,
|
||||
});
|
||||
}
|
||||
|
||||
Ok(files)
|
||||
|
|
|
|||
|
|
@ -6,9 +6,9 @@
|
|||
|
||||
use common::storage::{
|
||||
folder::FolderHierarchy,
|
||||
index::{IndexValue, IndexableObject},
|
||||
index::{IndexValue, IndexableAndSerializableObject, IndexableObject},
|
||||
};
|
||||
use jmap_proto::types::property::Property;
|
||||
use jmap_proto::types::{property::Property, value::AclGrant};
|
||||
|
||||
use super::{ArchivedFileNode, FileNode};
|
||||
|
||||
|
|
@ -29,7 +29,9 @@ impl IndexableObject for FileNode {
|
|||
field: Property::ParentId.into(),
|
||||
value: self.parent_id.into(),
|
||||
},
|
||||
IndexValue::Acl { value: &self.acls },
|
||||
IndexValue::Acl {
|
||||
value: (&self.acls).into(),
|
||||
},
|
||||
]);
|
||||
|
||||
if let Some(file) = &self.file {
|
||||
|
|
@ -52,6 +54,55 @@ impl IndexableObject for FileNode {
|
|||
}
|
||||
}
|
||||
|
||||
impl IndexableObject for &ArchivedFileNode {
|
||||
fn index_values(&self) -> impl Iterator<Item = IndexValue<'_>> {
|
||||
let size = self.dead_properties.size() as u32
|
||||
+ self.display_name.as_ref().map_or(0, |n| n.len() as u32)
|
||||
+ self.name.len() as u32;
|
||||
|
||||
let mut values = Vec::with_capacity(6);
|
||||
|
||||
values.extend([
|
||||
IndexValue::Text {
|
||||
field: Property::Name.into(),
|
||||
value: self.name.to_lowercase().into(),
|
||||
},
|
||||
IndexValue::U32 {
|
||||
field: Property::ParentId.into(),
|
||||
value: u32::from(self.parent_id).into(),
|
||||
},
|
||||
IndexValue::Acl {
|
||||
value: self
|
||||
.acls
|
||||
.iter()
|
||||
.map(AclGrant::from)
|
||||
.collect::<Vec<_>>()
|
||||
.into(),
|
||||
},
|
||||
]);
|
||||
|
||||
if let Some(file) = self.file.as_ref() {
|
||||
let size = size + file.size;
|
||||
values.extend([
|
||||
IndexValue::Blob {
|
||||
value: (&file.blob_hash).into(),
|
||||
},
|
||||
IndexValue::U32 {
|
||||
field: Property::Size.into(),
|
||||
value: size.into(),
|
||||
},
|
||||
IndexValue::Quota { used: size },
|
||||
]);
|
||||
} else {
|
||||
values.push(IndexValue::Quota { used: size });
|
||||
}
|
||||
|
||||
values.into_iter()
|
||||
}
|
||||
}
|
||||
|
||||
impl IndexableAndSerializableObject for FileNode {}
|
||||
|
||||
impl FolderHierarchy for ArchivedFileNode {
|
||||
fn name(&self) -> String {
|
||||
self.name.to_string()
|
||||
|
|
@ -60,4 +111,8 @@ impl FolderHierarchy for ArchivedFileNode {
|
|||
fn parent_id(&self) -> u32 {
|
||||
u32::from(self.parent_id)
|
||||
}
|
||||
|
||||
fn is_container(&self) -> bool {
|
||||
self.file.is_none()
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -14,6 +14,7 @@ use utils::BlobHash;
|
|||
#[derive(
|
||||
rkyv::Archive, rkyv::Deserialize, rkyv::Serialize, Debug, Default, Clone, PartialEq, Eq,
|
||||
)]
|
||||
#[rkyv(derive(Debug))]
|
||||
pub struct FileNode {
|
||||
pub parent_id: u32,
|
||||
pub name: String,
|
||||
|
|
@ -29,6 +30,7 @@ pub struct FileNode {
|
|||
#[derive(
|
||||
rkyv::Archive, rkyv::Deserialize, rkyv::Serialize, Debug, Default, Clone, PartialEq, Eq,
|
||||
)]
|
||||
#[rkyv(derive(Debug))]
|
||||
pub struct FileProperties {
|
||||
pub blob_hash: BlobHash,
|
||||
pub size: u32,
|
||||
|
|
|
|||
|
|
@ -161,7 +161,7 @@ impl<T: SessionStream> SessionData<T> {
|
|||
|
||||
for (mailbox_id, mailbox_) in self
|
||||
.server
|
||||
.get_properties::<Archive, _, _>(
|
||||
.get_properties::<Archive, _>(
|
||||
account_id,
|
||||
Collection::Mailbox,
|
||||
&mailbox_ids,
|
||||
|
|
|
|||
|
|
@ -52,7 +52,7 @@ impl<T: SessionStream> SessionData<T> {
|
|||
let mut uid_map = BTreeMap::new();
|
||||
for (message_id, uid_mailbox_) in self
|
||||
.server
|
||||
.get_properties::<Archive, _, _>(
|
||||
.get_properties::<Archive, _>(
|
||||
mailbox.account_id,
|
||||
Collection::Email,
|
||||
&message_ids,
|
||||
|
|
|
|||
|
|
@ -93,7 +93,7 @@ impl<T: SessionStream> SessionData<T> {
|
|||
.with_account_id(params.account_id)
|
||||
.with_collection(Collection::Mailbox)
|
||||
.create_document()
|
||||
.custom(ObjectIndexBuilder::new().with_changes(mailbox))
|
||||
.custom(ObjectIndexBuilder::<(), _>::new().with_changes(mailbox))
|
||||
.imap_ctx(&arguments.tag, trc::location!())?;
|
||||
let mailbox_id = self
|
||||
.server
|
||||
|
|
|
|||
|
|
@ -194,7 +194,7 @@ impl<T: SessionStream> SessionData<T> {
|
|||
|
||||
for (id, mailbox_ids) in self
|
||||
.server
|
||||
.get_properties::<HashedValue<Archive>, _, _>(
|
||||
.get_properties::<HashedValue<Archive>, _>(
|
||||
account_id,
|
||||
Collection::Email,
|
||||
deleted_ids,
|
||||
|
|
|
|||
|
|
@ -143,7 +143,7 @@ impl<T: SessionStream> SessionData<T> {
|
|||
.with_account_id(params.account_id)
|
||||
.with_collection(Collection::Mailbox)
|
||||
.create_document()
|
||||
.custom(ObjectIndexBuilder::new().with_changes(
|
||||
.custom(ObjectIndexBuilder::<(), _>::new().with_changes(
|
||||
email::mailbox::Mailbox::new(path_item).with_parent_id(parent_id),
|
||||
))
|
||||
.imap_ctx(&arguments.tag, trc::location!())?;
|
||||
|
|
|
|||
|
|
@ -607,3 +607,12 @@ impl JsonQueryable for Value {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&ArchivedAclGrant> for AclGrant {
|
||||
fn from(value: &ArchivedAclGrant) -> Self {
|
||||
Self {
|
||||
account_id: u32::from(value.account_id),
|
||||
grants: (&value.grants).into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -65,7 +65,7 @@ pub trait MailboxSet: Sync + Send {
|
|||
changes_: Object<SetValue>,
|
||||
update: Option<(u32, HashedValue<Mailbox>)>,
|
||||
ctx: &SetContext,
|
||||
) -> impl Future<Output = trc::Result<Result<ObjectIndexBuilder<Mailbox>, SetError>>> + Send;
|
||||
) -> impl Future<Output = trc::Result<Result<ObjectIndexBuilder<Mailbox, Mailbox>, SetError>>> + Send;
|
||||
}
|
||||
|
||||
impl MailboxSet for Server {
|
||||
|
|
@ -289,7 +289,7 @@ impl MailboxSet for Server {
|
|||
changes_: Object<SetValue>,
|
||||
update: Option<(u32, HashedValue<Mailbox>)>,
|
||||
ctx: &SetContext<'_>,
|
||||
) -> trc::Result<Result<ObjectIndexBuilder<Mailbox>, SetError>> {
|
||||
) -> trc::Result<Result<ObjectIndexBuilder<Mailbox, Mailbox>, SetError>> {
|
||||
// Parse properties
|
||||
let mut changes = update
|
||||
.as_ref()
|
||||
|
|
|
|||
|
|
@ -60,7 +60,15 @@ pub trait SieveScriptSet: Sync + Send {
|
|||
ctx: &SetContext,
|
||||
session_id: u64,
|
||||
) -> impl Future<
|
||||
Output = trc::Result<Result<(ObjectIndexBuilder<SieveScript>, Option<Vec<u8>>), SetError>>,
|
||||
Output = trc::Result<
|
||||
Result<
|
||||
(
|
||||
ObjectIndexBuilder<SieveScript, SieveScript>,
|
||||
Option<Vec<u8>>,
|
||||
),
|
||||
SetError,
|
||||
>,
|
||||
>,
|
||||
> + Send;
|
||||
}
|
||||
|
||||
|
|
@ -332,7 +340,15 @@ impl SieveScriptSet for Server {
|
|||
update: Option<(u32, HashedValue<SieveScript>)>,
|
||||
ctx: &SetContext<'_>,
|
||||
session_id: u64,
|
||||
) -> trc::Result<Result<(ObjectIndexBuilder<SieveScript>, Option<Vec<u8>>), SetError>> {
|
||||
) -> trc::Result<
|
||||
Result<
|
||||
(
|
||||
ObjectIndexBuilder<SieveScript, SieveScript>,
|
||||
Option<Vec<u8>>,
|
||||
),
|
||||
SetError,
|
||||
>,
|
||||
> {
|
||||
// Vacation script cannot be modified
|
||||
if update
|
||||
.as_ref()
|
||||
|
|
|
|||
|
|
@ -94,7 +94,7 @@ impl EmailSubmissionSet for Server {
|
|||
.with_account_id(account_id)
|
||||
.with_collection(Collection::EmailSubmission)
|
||||
.create_document()
|
||||
.custom(ObjectIndexBuilder::new().with_changes(submission))
|
||||
.custom(ObjectIndexBuilder::<(), _>::new().with_changes(submission))
|
||||
.caused_by(trc::location!())?;
|
||||
let document_id = self
|
||||
.store()
|
||||
|
|
@ -240,9 +240,9 @@ impl EmailSubmissionSet for Server {
|
|||
.with_collection(Collection::EmailSubmission)
|
||||
.delete_document(document_id)
|
||||
.custom(
|
||||
ObjectIndexBuilder::new().with_current(
|
||||
ObjectIndexBuilder::<_, ()>::new().with_current(
|
||||
submission
|
||||
.into_deserialized::<EmailSubmission>()
|
||||
.to_unarchived::<EmailSubmission>()
|
||||
.caused_by(trc::location!())?,
|
||||
),
|
||||
)
|
||||
|
|
|
|||
|
|
@ -175,7 +175,7 @@ impl<T: SessionStream> Session<T> {
|
|||
.create_document()
|
||||
.log(LogInsert())
|
||||
.custom(
|
||||
ObjectIndexBuilder::new()
|
||||
ObjectIndexBuilder::<(), _>::new()
|
||||
.with_changes(
|
||||
SieveScript::new(name.clone(), blob_hash.clone())
|
||||
.with_is_active(false)
|
||||
|
|
|
|||
|
|
@ -124,7 +124,7 @@ impl<T: SessionStream> Session<T> {
|
|||
// Sort by UID
|
||||
for (message_id, uid_mailbox) in self
|
||||
.server
|
||||
.get_properties::<Archive, _, _>(
|
||||
.get_properties::<Archive, _>(
|
||||
account_id,
|
||||
Collection::Email,
|
||||
&message_ids,
|
||||
|
|
|
|||
|
|
@ -4,49 +4,96 @@
|
|||
* SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-SEL
|
||||
*/
|
||||
|
||||
use std::{borrow::Borrow, rc::Rc};
|
||||
use std::{borrow::Borrow, hash::Hash, rc::Rc};
|
||||
|
||||
use ahash::AHashMap;
|
||||
|
||||
#[derive(Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
|
||||
#[derive(Debug)]
|
||||
#[repr(transparent)]
|
||||
struct StringRef(Rc<String>);
|
||||
struct StringRef<T: IdBimapItem>(Rc<T>);
|
||||
|
||||
#[derive(Debug)]
|
||||
#[repr(transparent)]
|
||||
struct IdRef<T: IdBimapItem>(Rc<T>);
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct IdBimap {
|
||||
id_to_uri: AHashMap<u32, Rc<String>>,
|
||||
uri_to_id: AHashMap<StringRef, u32>,
|
||||
pub struct IdBimap<T: IdBimapItem> {
|
||||
id_to_name: AHashMap<IdRef<T>, Rc<T>>,
|
||||
name_to_id: AHashMap<StringRef<T>, Rc<T>>,
|
||||
}
|
||||
|
||||
impl IdBimap {
|
||||
impl<T: IdBimapItem> IdBimap<T> {
|
||||
pub fn with_capacity(capacity: usize) -> Self {
|
||||
Self {
|
||||
id_to_uri: AHashMap::with_capacity(capacity),
|
||||
uri_to_id: AHashMap::with_capacity(capacity),
|
||||
id_to_name: AHashMap::with_capacity(capacity),
|
||||
name_to_id: AHashMap::with_capacity(capacity),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn insert(&mut self, id: u32, uri: impl Into<String>) {
|
||||
let uri = Rc::new(uri.into());
|
||||
self.id_to_uri.insert(id, uri.clone());
|
||||
self.uri_to_id.insert(StringRef(uri), id);
|
||||
pub fn insert(&mut self, item: T) {
|
||||
let item = Rc::new(item);
|
||||
self.id_to_name.insert(IdRef(item.clone()), item.clone());
|
||||
self.name_to_id.insert(StringRef(item.clone()), item);
|
||||
}
|
||||
|
||||
pub fn by_name(&self, uri: &str) -> Option<u32> {
|
||||
self.uri_to_id.get(uri).copied()
|
||||
pub fn by_name(&self, name: &str) -> Option<&T> {
|
||||
self.name_to_id.get(name).map(|v| v.as_ref())
|
||||
}
|
||||
|
||||
pub fn by_id(&self, id: u32) -> Option<&str> {
|
||||
self.id_to_uri.get(&id).map(|x| x.as_str())
|
||||
pub fn by_id(&self, id: u32) -> Option<&T> {
|
||||
self.id_to_name.get(&id).map(|v| v.as_ref())
|
||||
}
|
||||
|
||||
pub fn iter(&self) -> impl Iterator<Item = &T> {
|
||||
self.id_to_name.values().map(|v| v.as_ref())
|
||||
}
|
||||
}
|
||||
|
||||
// SAFETY: Safe because Rc<> are never returned from the struct
|
||||
unsafe impl Send for IdBimap {}
|
||||
unsafe impl Sync for IdBimap {}
|
||||
unsafe impl<T: IdBimapItem> Send for IdBimap<T> {}
|
||||
unsafe impl<T: IdBimapItem> Sync for IdBimap<T> {}
|
||||
|
||||
impl Borrow<str> for StringRef {
|
||||
pub trait IdBimapItem: std::fmt::Debug {
|
||||
fn id(&self) -> &u32;
|
||||
fn name(&self) -> &str;
|
||||
}
|
||||
|
||||
impl<T: IdBimapItem> Borrow<str> for StringRef<T> {
|
||||
fn borrow(&self) -> &str {
|
||||
&self.0
|
||||
self.0.name()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: IdBimapItem> Borrow<u32> for IdRef<T> {
|
||||
fn borrow(&self) -> &u32 {
|
||||
self.0.id()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: IdBimapItem> PartialEq for StringRef<T> {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.0.name() == other.0.name()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: IdBimapItem> Eq for StringRef<T> {}
|
||||
|
||||
impl<T: IdBimapItem> PartialEq for IdRef<T> {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.0.id() == other.0.id()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: IdBimapItem> Eq for IdRef<T> {}
|
||||
|
||||
impl<T: IdBimapItem> Hash for StringRef<T> {
|
||||
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
|
||||
self.0.name().hash(state)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: IdBimapItem> Hash for IdRef<T> {
|
||||
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
|
||||
self.0.id().hash(state)
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -43,6 +43,8 @@ pub const BLOB_HASH_LEN: usize = 32;
|
|||
serde::Serialize,
|
||||
serde::Deserialize,
|
||||
)]
|
||||
#[rkyv(derive(Debug))]
|
||||
#[repr(transparent)]
|
||||
pub struct BlobHash(pub [u8; BLOB_HASH_LEN]);
|
||||
|
||||
impl BlobHash {
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue