mirror of
https://github.com/stalwartlabs/mail-server.git
synced 2025-09-12 15:04:22 +08:00
DAV Propfind partial impl
This commit is contained in:
parent
b75e3a8012
commit
1a8efb2182
35 changed files with 770 additions and 156 deletions
|
@ -6,12 +6,12 @@
|
|||
|
||||
use ahash::AHashSet;
|
||||
use directory::{
|
||||
Permission, Principal, QueryBy, Type,
|
||||
backend::internal::{
|
||||
PrincipalField,
|
||||
lookup::DirectoryStore,
|
||||
manage::{ChangedPrincipals, ManageDirectory},
|
||||
PrincipalField,
|
||||
},
|
||||
Permission, Principal, QueryBy, Type,
|
||||
};
|
||||
use jmap_proto::{
|
||||
request::RequestMethod,
|
||||
|
@ -29,11 +29,11 @@ use utils::map::{
|
|||
};
|
||||
|
||||
use crate::{
|
||||
KV_TOKEN_REVISION, Server,
|
||||
listener::limiter::{ConcurrencyLimiter, LimiterResult},
|
||||
Server, KV_TOKEN_REVISION,
|
||||
};
|
||||
|
||||
use super::{roles::RolePermissions, AccessToken, ResourceToken, TenantInfo};
|
||||
use super::{AccessToken, ResourceToken, TenantInfo, roles::RolePermissions};
|
||||
|
||||
pub enum PrincipalOrId {
|
||||
Principal(Principal),
|
||||
|
@ -189,7 +189,7 @@ impl Server {
|
|||
Ok(Some(principal)) => {
|
||||
return self
|
||||
.build_access_token_from_principal(principal, revision)
|
||||
.await
|
||||
.await;
|
||||
}
|
||||
Ok(None) => Err(trc::AuthEvent::Error
|
||||
.into_err()
|
||||
|
@ -294,10 +294,11 @@ impl Server {
|
|||
}
|
||||
}
|
||||
Err(err) => {
|
||||
trc::error!(err
|
||||
.details("Failed to list principals")
|
||||
.caused_by(trc::location!())
|
||||
.account_id(*id));
|
||||
trc::error!(
|
||||
err.details("Failed to list principals")
|
||||
.caused_by(trc::location!())
|
||||
.account_id(*id)
|
||||
);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
|
@ -330,10 +331,11 @@ impl Server {
|
|||
ids = members.into_iter();
|
||||
}
|
||||
Err(err) => {
|
||||
trc::error!(err
|
||||
.details("Failed to obtain principal")
|
||||
.caused_by(trc::location!())
|
||||
.account_id(id));
|
||||
trc::error!(
|
||||
err.details("Failed to obtain principal")
|
||||
.caused_by(trc::location!())
|
||||
.account_id(id)
|
||||
);
|
||||
}
|
||||
}
|
||||
} else if let Some(prev_ids) = ids_stack.pop() {
|
||||
|
@ -354,9 +356,10 @@ impl Server {
|
|||
)
|
||||
.await
|
||||
{
|
||||
trc::error!(err
|
||||
.details("Failed to increment principal revision")
|
||||
.account_id(id));
|
||||
trc::error!(
|
||||
err.details("Failed to increment principal revision")
|
||||
.account_id(id)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -371,9 +374,10 @@ impl Server {
|
|||
{
|
||||
Ok(revision) => (revision as u64).into(),
|
||||
Err(err) => {
|
||||
trc::error!(err
|
||||
.details("Failed to obtain principal revision")
|
||||
.account_id(id));
|
||||
trc::error!(
|
||||
err.details("Failed to obtain principal revision")
|
||||
.account_id(id)
|
||||
);
|
||||
None
|
||||
}
|
||||
}
|
||||
|
@ -436,6 +440,13 @@ impl AccessToken {
|
|||
.chain(self.access_to.iter().map(|(id, _)| id))
|
||||
}
|
||||
|
||||
pub fn all_ids(&self) -> impl Iterator<Item = u32> {
|
||||
[self.primary_id]
|
||||
.into_iter()
|
||||
.chain(self.member_of.iter().copied())
|
||||
.chain(self.access_to.iter().map(|(id, _)| *id))
|
||||
}
|
||||
|
||||
pub fn is_member(&self, account_id: u32) -> bool {
|
||||
self.primary_id == account_id
|
||||
|| self.member_of.contains(&account_id)
|
||||
|
|
|
@ -12,6 +12,7 @@ pub struct DavConfig {
|
|||
pub dead_property_size: Option<usize>,
|
||||
pub live_property_size: usize,
|
||||
pub max_lock_timeout: u64,
|
||||
pub max_changes: usize,
|
||||
}
|
||||
|
||||
impl DavConfig {
|
||||
|
@ -27,6 +28,7 @@ impl DavConfig {
|
|||
.property("dav.limits.size.live-property")
|
||||
.unwrap_or(250),
|
||||
max_lock_timeout: config.property("dav.limits.timeout.max-lock").unwrap_or(60),
|
||||
max_changes: config.property("dav.limits.max-changes").unwrap_or(1000),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -41,6 +41,7 @@ use mail_auth::{MX, Txt};
|
|||
use manager::webadmin::{Resource, WebAdminManager};
|
||||
use nlp::bayes::{TokenHash, Weights};
|
||||
use parking_lot::{Mutex, RwLock};
|
||||
use rkyv::munge::Borrow;
|
||||
use rustls::sign::CertifiedKey;
|
||||
use tokio::sync::{Notify, Semaphore, mpsc};
|
||||
use tokio_rustls::TlsConnector;
|
||||
|
@ -515,6 +516,12 @@ impl Files {
|
|||
})
|
||||
}
|
||||
|
||||
pub fn tree_with_depth(&self, depth: usize) -> impl Iterator<Item = &FileItem> {
|
||||
self.files.iter().filter(move |item| {
|
||||
item.name.as_bytes().iter().filter(|&&c| c == b'/').count() <= depth
|
||||
})
|
||||
}
|
||||
|
||||
pub fn is_ancestor_of(&self, ancestor: u32, descendant: u32) -> bool {
|
||||
let ancestor = &self.files.by_id(ancestor).unwrap().name;
|
||||
let descendant = &self.files.by_id(descendant).unwrap().name;
|
||||
|
@ -533,3 +540,23 @@ impl IdBimapItem for FileItem {
|
|||
&self.name
|
||||
}
|
||||
}
|
||||
|
||||
impl std::hash::Hash for FileItem {
|
||||
fn hash<H: Hasher>(&self, state: &mut H) {
|
||||
self.document_id.hash(state);
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for FileItem {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.document_id == other.document_id
|
||||
}
|
||||
}
|
||||
|
||||
impl Eq for FileItem {}
|
||||
|
||||
impl std::borrow::Borrow<u32> for FileItem {
|
||||
fn borrow(&self) -> &u32 {
|
||||
&self.document_id
|
||||
}
|
||||
}
|
||||
|
|
|
@ -12,7 +12,7 @@ use ring::rand::SystemRandom;
|
|||
use ring::signature::{ECDSA_P256_SHA256_FIXED_SIGNING, EcdsaKeyPair, EcdsaSigningAlgorithm};
|
||||
use serde::Deserialize;
|
||||
use store::write::Archiver;
|
||||
use store::{Serialize, SerializedVersion};
|
||||
use store::{Serialize, SerializedVersion, SERIALIZE_OBJ_01_V1};
|
||||
use trc::AddContext;
|
||||
use trc::event::conv::AssertSuccess;
|
||||
|
||||
|
@ -212,7 +212,7 @@ pub struct SerializedCert {
|
|||
|
||||
impl SerializedVersion for SerializedCert {
|
||||
fn serialize_version() -> u8 {
|
||||
0
|
||||
SERIALIZE_OBJ_01_V1
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -21,11 +21,11 @@ use jmap_proto::types::property::Property;
|
|||
use store::dispatch::lookup::KeyValue;
|
||||
use store::write::serialize::rkyv_deserialize;
|
||||
use store::write::{AlignedBytes, Archive, Archiver, now};
|
||||
use store::{Serialize, SerializedVersion, U32_LEN};
|
||||
use store::{SERIALIZE_OBJ_02_V1, Serialize, SerializedVersion, U32_LEN};
|
||||
use trc::AddContext;
|
||||
|
||||
use super::ETag;
|
||||
use super::uri::{DavUriResource, UriResource};
|
||||
use super::uri::{DavUriResource, OwnedUri};
|
||||
use crate::{DavError, DavErrorCondition, DavMethod};
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
|
@ -63,14 +63,15 @@ impl LockRequestHandler for Server {
|
|||
headers: RequestHeaders<'_>,
|
||||
lock_info: Option<LockInfo>,
|
||||
) -> crate::Result<HttpResponse> {
|
||||
let resource = self.validate_uri(access_token, headers.uri).await?;
|
||||
let resource_hash = resource
|
||||
.lock_key()
|
||||
.ok_or(DavError::Code(StatusCode::CONFLICT))?;
|
||||
let resource = self
|
||||
.validate_uri(access_token, headers.uri)
|
||||
.await?
|
||||
.into_owned_uri()?;
|
||||
let resource_hash = resource.lock_key();
|
||||
let resource_path = resource
|
||||
.resource
|
||||
.ok_or(DavError::Code(StatusCode::CONFLICT))?;
|
||||
let account_id = resource.account_id.unwrap();
|
||||
let account_id = resource.account_id;
|
||||
if !access_token.is_member(account_id) {
|
||||
return Err(DavError::Code(StatusCode::FORBIDDEN));
|
||||
}
|
||||
|
@ -584,7 +585,7 @@ struct LockItem {
|
|||
|
||||
impl SerializedVersion for LockData {
|
||||
fn serialize_version() -> u8 {
|
||||
0
|
||||
SERIALIZE_OBJ_02_V1
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -700,13 +701,13 @@ impl LockItem {
|
|||
}
|
||||
}
|
||||
|
||||
impl UriResource<Option<&str>> {
|
||||
pub fn lock_key(&self) -> Option<Vec<u8>> {
|
||||
impl OwnedUri<'_> {
|
||||
pub fn lock_key(&self) -> Vec<u8> {
|
||||
let mut result = Vec::with_capacity(U32_LEN + 2);
|
||||
result.push(KV_LOCK_DAV);
|
||||
result.extend_from_slice(self.account_id?.to_be_bytes().as_slice());
|
||||
result.extend_from_slice(self.account_id.to_be_bytes().as_slice());
|
||||
result.push(u8::from(self.collection));
|
||||
Some(result)
|
||||
result
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -4,16 +4,33 @@
|
|||
* SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-SEL
|
||||
*/
|
||||
|
||||
use dav_proto::{
|
||||
Depth, RequestHeaders, Return,
|
||||
schema::request::{PropFind, SyncCollection},
|
||||
};
|
||||
use jmap_proto::types::property::Property;
|
||||
use store::{
|
||||
U32_LEN,
|
||||
write::{Archive, BatchBuilder, MaybeDynamicValue, Operation, ValueClass, ValueOp},
|
||||
};
|
||||
use uri::OwnedUri;
|
||||
|
||||
pub mod acl;
|
||||
pub mod lock;
|
||||
pub mod propfind;
|
||||
pub mod uri;
|
||||
|
||||
pub(crate) struct DavQuery<'x> {
|
||||
pub resource: OwnedUri<'x>,
|
||||
pub base_uri: &'x str,
|
||||
pub propfind: PropFind,
|
||||
pub from_change_id: Option<u64>,
|
||||
pub depth: usize,
|
||||
pub limit: Option<u32>,
|
||||
pub ret: Return,
|
||||
pub depth_no_root: bool,
|
||||
}
|
||||
|
||||
pub trait ETag {
|
||||
fn etag(&self) -> String;
|
||||
}
|
||||
|
@ -48,3 +65,46 @@ impl ExtractETag for BatchBuilder {
|
|||
None
|
||||
}
|
||||
}
|
||||
|
||||
impl<'x> DavQuery<'x> {
|
||||
pub fn propfind(
|
||||
resource: OwnedUri<'x>,
|
||||
propfind: PropFind,
|
||||
headers: RequestHeaders<'x>,
|
||||
) -> Self {
|
||||
Self {
|
||||
resource,
|
||||
propfind,
|
||||
base_uri: headers.base_uri().unwrap_or_default(),
|
||||
from_change_id: None,
|
||||
depth: match headers.depth {
|
||||
Depth::Zero => 0,
|
||||
_ => 1,
|
||||
},
|
||||
limit: None,
|
||||
ret: headers.ret,
|
||||
depth_no_root: headers.depth_no_root,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn changes(
|
||||
resource: OwnedUri<'x>,
|
||||
changes: SyncCollection,
|
||||
headers: RequestHeaders<'x>,
|
||||
) -> Self {
|
||||
Self {
|
||||
resource,
|
||||
propfind: changes.properties,
|
||||
base_uri: headers.base_uri().unwrap_or_default(),
|
||||
from_change_id: changes.sync_token.and_then(|s| s.parse().ok()),
|
||||
depth: if changes.level_inf { usize::MAX } else { 1 },
|
||||
limit: changes.limit,
|
||||
ret: headers.ret,
|
||||
depth_no_root: headers.depth_no_root,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn format_to_base_uri(&self, path: &str) -> String {
|
||||
format!("{}/{}", self.base_uri, path)
|
||||
}
|
||||
}
|
||||
|
|
139
crates/dav/src/common/propfind.rs
Normal file
139
crates/dav/src/common/propfind.rs
Normal file
|
@ -0,0 +1,139 @@
|
|||
/*
|
||||
* SPDX-FileCopyrightText: 2020 Stalwart Labs Ltd <hello@stalw.art>
|
||||
*
|
||||
* SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-SEL
|
||||
*/
|
||||
|
||||
use common::{Server, auth::AccessToken};
|
||||
use dav_proto::{
|
||||
Depth, RequestHeaders,
|
||||
schema::{
|
||||
request::PropFind,
|
||||
response::{BaseCondition, MultiStatus, Response},
|
||||
},
|
||||
};
|
||||
use http_proto::HttpResponse;
|
||||
use hyper::StatusCode;
|
||||
use jmap_proto::types::collection::Collection;
|
||||
use store::roaring::RoaringBitmap;
|
||||
|
||||
use crate::{
|
||||
DavErrorCondition,
|
||||
common::uri::DavUriResource,
|
||||
file::propfind::HandleFilePropFindRequest,
|
||||
principal::propfind::{PrincipalPropFind, PrincipalResource},
|
||||
};
|
||||
|
||||
use super::{DavQuery, uri::UriResource};
|
||||
|
||||
pub(crate) trait PropFindRequestHandler: Sync + Send {
|
||||
fn handle_propfind_request(
|
||||
&self,
|
||||
access_token: &AccessToken,
|
||||
headers: RequestHeaders<'_>,
|
||||
request: PropFind,
|
||||
) -> impl Future<Output = crate::Result<HttpResponse>> + Send;
|
||||
}
|
||||
|
||||
impl PropFindRequestHandler for Server {
|
||||
async fn handle_propfind_request(
|
||||
&self,
|
||||
access_token: &AccessToken,
|
||||
headers: RequestHeaders<'_>,
|
||||
request: PropFind,
|
||||
) -> crate::Result<HttpResponse> {
|
||||
// Validate URI
|
||||
let resource = self.validate_uri(access_token, headers.uri).await?;
|
||||
|
||||
// Reject Infinity depth for certain queries
|
||||
let return_children = match headers.depth {
|
||||
Depth::One | Depth::None => true,
|
||||
Depth::Zero => false,
|
||||
Depth::Infinity => {
|
||||
if resource.account_id.is_none()
|
||||
|| matches!(resource.collection, Collection::FileNode)
|
||||
{
|
||||
return Err(DavErrorCondition::new(
|
||||
StatusCode::FORBIDDEN,
|
||||
BaseCondition::PropFindFiniteDepth,
|
||||
)
|
||||
.into());
|
||||
}
|
||||
true
|
||||
}
|
||||
};
|
||||
|
||||
// List shared resources
|
||||
if let Some(account_id) = resource.account_id {
|
||||
match resource.collection {
|
||||
Collection::FileNode => {
|
||||
self.handle_file_propfind_request(
|
||||
access_token,
|
||||
DavQuery::propfind(
|
||||
UriResource::new_owned(
|
||||
Collection::FileNode,
|
||||
account_id,
|
||||
resource.resource,
|
||||
),
|
||||
request,
|
||||
headers,
|
||||
),
|
||||
)
|
||||
.await
|
||||
}
|
||||
Collection::Calendar => todo!(),
|
||||
Collection::AddressBook => todo!(),
|
||||
Collection::Principal => {
|
||||
let mut response = MultiStatus::new(Vec::with_capacity(16));
|
||||
|
||||
if let Some(resource) = resource.resource {
|
||||
response.add_response(Response::new_status(
|
||||
[headers.format_to_base_uri(resource)],
|
||||
StatusCode::NOT_FOUND,
|
||||
));
|
||||
} else {
|
||||
self.prepare_principal_propfind_response(
|
||||
access_token,
|
||||
PrincipalResource::Id(account_id),
|
||||
&request,
|
||||
&mut response,
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
|
||||
Ok(HttpResponse::new(StatusCode::MULTI_STATUS)
|
||||
.with_xml_body(response.to_string()))
|
||||
}
|
||||
_ => unreachable!(),
|
||||
}
|
||||
} else {
|
||||
let mut response = MultiStatus::new(Vec::with_capacity(16));
|
||||
|
||||
// Add container info
|
||||
if !headers.depth_no_root {
|
||||
let blah = 1;
|
||||
}
|
||||
|
||||
if return_children {
|
||||
let ids = if !matches!(resource.collection, Collection::Principal) {
|
||||
RoaringBitmap::from_iter(access_token.all_ids())
|
||||
} else {
|
||||
// Return all principals
|
||||
self.get_document_ids(u32::MAX, Collection::Principal)
|
||||
.await?
|
||||
.unwrap_or_default()
|
||||
};
|
||||
|
||||
self.prepare_principal_propfind_response(
|
||||
access_token,
|
||||
PrincipalResource::Ids(ids),
|
||||
&request,
|
||||
&mut response,
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
|
||||
Ok(HttpResponse::new(StatusCode::MULTI_STATUS).with_xml_body(response.to_string()))
|
||||
}
|
||||
}
|
||||
}
|
|
@ -14,18 +14,22 @@ use trc::AddContext;
|
|||
|
||||
use crate::{DavError, DavResource};
|
||||
|
||||
pub(crate) struct UriResource<T> {
|
||||
pub(crate) struct UriResource<A, R> {
|
||||
pub collection: Collection,
|
||||
pub account_id: Option<u32>,
|
||||
pub resource: T,
|
||||
pub account_id: A,
|
||||
pub resource: R,
|
||||
}
|
||||
|
||||
pub(crate) type UnresolvedUri<'x> = UriResource<Option<u32>, Option<&'x str>>;
|
||||
pub(crate) type OwnedUri<'x> = UriResource<u32, Option<&'x str>>;
|
||||
//pub(crate) type DocumentUri<'x> = UriResource<u32, u32>;
|
||||
|
||||
pub(crate) trait DavUriResource: Sync + Send {
|
||||
fn validate_uri<'x>(
|
||||
&self,
|
||||
access_token: &AccessToken,
|
||||
uri: &'x str,
|
||||
) -> impl Future<Output = crate::Result<UriResource<Option<&'x str>>>> + Send;
|
||||
) -> impl Future<Output = crate::Result<UnresolvedUri<'x>>> + Send;
|
||||
}
|
||||
|
||||
impl DavUriResource for Server {
|
||||
|
@ -33,7 +37,7 @@ impl DavUriResource for Server {
|
|||
&self,
|
||||
access_token: &AccessToken,
|
||||
uri: &'x str,
|
||||
) -> crate::Result<UriResource<Option<&'x str>>> {
|
||||
) -> crate::Result<UnresolvedUri<'x>> {
|
||||
let (_, uri_parts) = uri
|
||||
.split_once("/dav/")
|
||||
.ok_or(DavError::Code(StatusCode::NOT_FOUND))?;
|
||||
|
@ -86,8 +90,28 @@ impl DavUriResource for Server {
|
|||
}
|
||||
}
|
||||
|
||||
impl<T> UriResource<T> {
|
||||
pub fn account_id(&self) -> crate::Result<u32> {
|
||||
self.account_id.ok_or(DavError::Code(StatusCode::FORBIDDEN))
|
||||
impl<'x> UnresolvedUri<'x> {
|
||||
pub fn into_owned_uri(self) -> crate::Result<OwnedUri<'x>> {
|
||||
Ok(OwnedUri {
|
||||
collection: self.collection,
|
||||
account_id: self
|
||||
.account_id
|
||||
.ok_or(DavError::Code(StatusCode::NOT_FOUND))?,
|
||||
resource: self.resource,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl OwnedUri<'_> {
|
||||
pub fn new_owned(
|
||||
collection: Collection,
|
||||
account_id: u32,
|
||||
resource: Option<&str>,
|
||||
) -> OwnedUri<'_> {
|
||||
OwnedUri {
|
||||
collection,
|
||||
account_id,
|
||||
resource,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -16,7 +16,7 @@ use jmap_proto::types::{
|
|||
};
|
||||
use store::{
|
||||
ahash::AHashMap,
|
||||
write::{log::ChangeLogBuilder, now, AlignedBytes, Archive, BatchBuilder},
|
||||
write::{AlignedBytes, Archive, BatchBuilder, log::ChangeLogBuilder, now},
|
||||
};
|
||||
use trc::AddContext;
|
||||
use utils::map::bitmap::Bitmap;
|
||||
|
@ -50,8 +50,11 @@ impl FileCopyMoveRequestHandler for Server {
|
|||
is_move: bool,
|
||||
) -> crate::Result<HttpResponse> {
|
||||
// Validate source
|
||||
let from_resource_ = self.validate_uri(access_token, headers.uri).await?;
|
||||
let from_account_id = from_resource_.account_id()?;
|
||||
let from_resource_ = self
|
||||
.validate_uri(access_token, headers.uri)
|
||||
.await?
|
||||
.into_owned_uri()?;
|
||||
let from_account_id = from_resource_.account_id;
|
||||
let from_files = self
|
||||
.fetch_file_hierarchy(from_account_id)
|
||||
.await
|
||||
|
@ -299,11 +302,11 @@ async fn move_container(
|
|||
access_token: &AccessToken,
|
||||
from_files: Arc<Files>,
|
||||
to_files: Arc<Files>,
|
||||
from_resource: UriResource<FileItemId>,
|
||||
from_resource: UriResource<u32, FileItemId>,
|
||||
destination: Destination,
|
||||
depth: Depth,
|
||||
) -> crate::Result<HttpResponse> {
|
||||
let from_account_id = from_resource.account_id.unwrap();
|
||||
let from_account_id = from_resource.account_id;
|
||||
let to_account_id = destination.account_id;
|
||||
let from_document_id = from_resource.resource.document_id;
|
||||
let parent_id = destination.document_id.map(|id| id + 1).unwrap_or(0);
|
||||
|
@ -360,7 +363,7 @@ async fn copy_container(
|
|||
server: &Server,
|
||||
access_token: &AccessToken,
|
||||
from_files: Arc<Files>,
|
||||
from_resource: UriResource<FileItemId>,
|
||||
from_resource: UriResource<u32, FileItemId>,
|
||||
mut destination: Destination,
|
||||
depth: Depth,
|
||||
delete_source: bool,
|
||||
|
@ -373,7 +376,7 @@ async fn copy_container(
|
|||
_ => true,
|
||||
};
|
||||
|
||||
let from_account_id = from_resource.account_id.unwrap();
|
||||
let from_account_id = from_resource.account_id;
|
||||
let to_account_id = destination.account_id;
|
||||
let from_document_id = from_resource.resource.document_id;
|
||||
let parent_id = destination.document_id.map(|id| id + 1).unwrap_or(0);
|
||||
|
@ -517,10 +520,10 @@ async fn copy_container(
|
|||
async fn overwrite_and_delete_item(
|
||||
server: &Server,
|
||||
access_token: &AccessToken,
|
||||
from_resource: UriResource<FileItemId>,
|
||||
from_resource: UriResource<u32, FileItemId>,
|
||||
destination: Destination,
|
||||
) -> crate::Result<HttpResponse> {
|
||||
let from_account_id = from_resource.account_id.unwrap();
|
||||
let from_account_id = from_resource.account_id;
|
||||
let to_account_id = destination.account_id;
|
||||
let from_document_id = from_resource.resource.document_id;
|
||||
let to_document_id = destination.document_id.unwrap();
|
||||
|
@ -589,10 +592,10 @@ async fn overwrite_and_delete_item(
|
|||
async fn overwrite_item(
|
||||
server: &Server,
|
||||
access_token: &AccessToken,
|
||||
from_resource: UriResource<FileItemId>,
|
||||
from_resource: UriResource<u32, FileItemId>,
|
||||
destination: Destination,
|
||||
) -> crate::Result<HttpResponse> {
|
||||
let from_account_id = from_resource.account_id.unwrap();
|
||||
let from_account_id = from_resource.account_id;
|
||||
let to_account_id = destination.account_id;
|
||||
let from_document_id = from_resource.resource.document_id;
|
||||
let to_document_id = destination.document_id.unwrap();
|
||||
|
@ -650,10 +653,10 @@ async fn overwrite_item(
|
|||
async fn move_item(
|
||||
server: &Server,
|
||||
access_token: &AccessToken,
|
||||
from_resource: UriResource<FileItemId>,
|
||||
from_resource: UriResource<u32, FileItemId>,
|
||||
destination: Destination,
|
||||
) -> crate::Result<HttpResponse> {
|
||||
let from_account_id = from_resource.account_id.unwrap();
|
||||
let from_account_id = from_resource.account_id;
|
||||
let to_account_id = destination.account_id;
|
||||
let from_document_id = from_resource.resource.document_id;
|
||||
let parent_id = destination.document_id.map(|id| id + 1).unwrap_or(0);
|
||||
|
@ -713,10 +716,10 @@ async fn move_item(
|
|||
async fn copy_item(
|
||||
server: &Server,
|
||||
access_token: &AccessToken,
|
||||
from_resource: UriResource<FileItemId>,
|
||||
from_resource: UriResource<u32, FileItemId>,
|
||||
destination: Destination,
|
||||
) -> crate::Result<HttpResponse> {
|
||||
let from_account_id = from_resource.account_id.unwrap();
|
||||
let from_account_id = from_resource.account_id;
|
||||
let to_account_id = destination.account_id;
|
||||
let from_document_id = from_resource.resource.document_id;
|
||||
let parent_id = destination.document_id.map(|id| id + 1).unwrap_or(0);
|
||||
|
@ -748,10 +751,10 @@ async fn copy_item(
|
|||
async fn rename_item(
|
||||
server: &Server,
|
||||
access_token: &AccessToken,
|
||||
from_resource: UriResource<FileItemId>,
|
||||
from_resource: UriResource<u32, FileItemId>,
|
||||
destination: Destination,
|
||||
) -> crate::Result<HttpResponse> {
|
||||
let from_account_id = from_resource.account_id.unwrap();
|
||||
let from_account_id = from_resource.account_id;
|
||||
let from_document_id = from_resource.resource.document_id;
|
||||
|
||||
let node = server
|
||||
|
|
|
@ -12,7 +12,7 @@ use hyper::StatusCode;
|
|||
use jmap_proto::types::{
|
||||
acl::Acl, collection::Collection, property::Property, type_state::DataType,
|
||||
};
|
||||
use store::write::{log::ChangeLogBuilder, AlignedBytes, Archive, BatchBuilder};
|
||||
use store::write::{AlignedBytes, Archive, BatchBuilder, log::ChangeLogBuilder};
|
||||
use trc::AddContext;
|
||||
use utils::map::bitmap::Bitmap;
|
||||
|
||||
|
@ -40,8 +40,11 @@ impl FileDeleteRequestHandler for Server {
|
|||
headers: RequestHeaders<'_>,
|
||||
) -> crate::Result<HttpResponse> {
|
||||
// Validate URI
|
||||
let resource = self.validate_uri(access_token, headers.uri).await?;
|
||||
let account_id = resource.account_id()?;
|
||||
let resource = self
|
||||
.validate_uri(access_token, headers.uri)
|
||||
.await?
|
||||
.into_owned_uri()?;
|
||||
let account_id = resource.account_id;
|
||||
let delete_path = resource
|
||||
.resource
|
||||
.filter(|r| !r.is_empty())
|
||||
|
|
|
@ -40,8 +40,11 @@ impl FileGetRequestHandler for Server {
|
|||
is_head: bool,
|
||||
) -> crate::Result<HttpResponse> {
|
||||
// Validate URI
|
||||
let resource_ = self.validate_uri(access_token, headers.uri).await?;
|
||||
let account_id = resource_.account_id()?;
|
||||
let resource_ = self
|
||||
.validate_uri(access_token, headers.uri)
|
||||
.await?
|
||||
.into_owned_uri()?;
|
||||
let account_id = resource_.account_id;
|
||||
let files = self
|
||||
.fetch_file_hierarchy(account_id)
|
||||
.await
|
||||
|
|
|
@ -45,8 +45,11 @@ impl FileMkColRequestHandler for Server {
|
|||
request: Option<MkCol>,
|
||||
) -> crate::Result<HttpResponse> {
|
||||
// Validate URI
|
||||
let resource_ = self.validate_uri(access_token, headers.uri).await?;
|
||||
let account_id = resource_.account_id()?;
|
||||
let resource_ = self
|
||||
.validate_uri(access_token, headers.uri)
|
||||
.await?
|
||||
.into_owned_uri()?;
|
||||
let account_id = resource_.account_id;
|
||||
let files = self
|
||||
.fetch_file_hierarchy(account_id)
|
||||
.await
|
||||
|
|
|
@ -11,12 +11,17 @@ use groupware::file::FileNode;
|
|||
use hyper::StatusCode;
|
||||
use jmap_proto::types::{collection::Collection, type_state::DataType};
|
||||
use store::write::{
|
||||
log::{Changes, LogInsert}, now, Archive, BatchBuilder
|
||||
Archive, BatchBuilder,
|
||||
log::{Changes, LogInsert},
|
||||
now,
|
||||
};
|
||||
|
||||
use crate::{
|
||||
DavError,
|
||||
common::{ExtractETag, uri::UriResource},
|
||||
common::{
|
||||
ExtractETag,
|
||||
uri::{OwnedUri, UriResource},
|
||||
},
|
||||
};
|
||||
|
||||
pub mod acl;
|
||||
|
@ -42,25 +47,26 @@ pub(crate) struct FileItemId {
|
|||
pub(crate) trait DavFileResource {
|
||||
fn map_resource<T: FromFileItem>(
|
||||
&self,
|
||||
resource: &UriResource<Option<&str>>,
|
||||
) -> crate::Result<UriResource<T>>;
|
||||
resource: &OwnedUri<'_>,
|
||||
) -> crate::Result<UriResource<u32, T>>;
|
||||
|
||||
fn map_parent<'x, T: FromFileItem>(
|
||||
&self,
|
||||
resource: &'x str,
|
||||
) -> Option<(Option<T>, Cow<'x, str>)>;
|
||||
|
||||
#[allow(clippy::type_complexity)]
|
||||
fn map_parent_resource<'x, T: FromFileItem>(
|
||||
&self,
|
||||
resource: &UriResource<Option<&'x str>>,
|
||||
) -> crate::Result<UriResource<(Option<T>, Cow<'x, str>)>>;
|
||||
resource: &OwnedUri<'x>,
|
||||
) -> crate::Result<UriResource<u32, (Option<T>, Cow<'x, str>)>>;
|
||||
}
|
||||
|
||||
impl DavFileResource for Files {
|
||||
fn map_resource<T: FromFileItem>(
|
||||
&self,
|
||||
resource: &UriResource<Option<&str>>,
|
||||
) -> crate::Result<UriResource<T>> {
|
||||
resource: &OwnedUri<'_>,
|
||||
) -> crate::Result<UriResource<u32, T>> {
|
||||
resource
|
||||
.resource
|
||||
.and_then(|r| self.files.by_name(r))
|
||||
|
@ -95,8 +101,8 @@ impl DavFileResource for Files {
|
|||
|
||||
fn map_parent_resource<'x, T: FromFileItem>(
|
||||
&self,
|
||||
resource: &UriResource<Option<&'x str>>,
|
||||
) -> crate::Result<UriResource<(Option<T>, Cow<'x, str>)>> {
|
||||
resource: &OwnedUri<'x>,
|
||||
) -> crate::Result<UriResource<u32, (Option<T>, Cow<'x, str>)>> {
|
||||
if let Some(r) = resource.resource {
|
||||
if self.files.by_name(r).is_none() {
|
||||
self.map_parent(r)
|
||||
|
|
|
@ -4,26 +4,279 @@
|
|||
* SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-SEL
|
||||
*/
|
||||
|
||||
use common::{Server, auth::AccessToken};
|
||||
use dav_proto::{RequestHeaders, schema::request::PropFind};
|
||||
use common::{Server, auth::AccessToken};
|
||||
use dav_proto::schema::{
|
||||
property::{DavProperty, WebDavProperty},
|
||||
request::{DavPropertyValue, PropFind},
|
||||
response::{MultiStatus, PropStat, Response},
|
||||
};
|
||||
use groupware::file::{FileNode, hierarchy::FileHierarchy};
|
||||
use http_proto::HttpResponse;
|
||||
use hyper::StatusCode;
|
||||
use jmap_proto::types::{acl::Acl, collection::Collection, property::Property};
|
||||
use store::{
|
||||
ahash::AHashMap,
|
||||
dispatch::DocumentSet,
|
||||
query::log::Query,
|
||||
roaring::RoaringBitmap,
|
||||
write::{AlignedBytes, Archive},
|
||||
};
|
||||
use trc::AddContext;
|
||||
use utils::map::bitmap::Bitmap;
|
||||
|
||||
pub(crate) trait FilePropFindRequestHandler: Sync + Send {
|
||||
use crate::{
|
||||
common::DavQuery,
|
||||
principal::propfind::{PrincipalPropFind, PrincipalResource},
|
||||
};
|
||||
|
||||
pub(crate) trait HandleFilePropFindRequest: Sync + Send {
|
||||
fn handle_file_propfind_request(
|
||||
&self,
|
||||
access_token: &AccessToken,
|
||||
headers: RequestHeaders<'_>,
|
||||
request: PropFind,
|
||||
query: DavQuery<'_>,
|
||||
) -> impl Future<Output = crate::Result<HttpResponse>> + Send;
|
||||
}
|
||||
|
||||
impl FilePropFindRequestHandler for Server {
|
||||
impl HandleFilePropFindRequest for Server {
|
||||
async fn handle_file_propfind_request(
|
||||
&self,
|
||||
access_token: &AccessToken,
|
||||
headers: RequestHeaders<'_>,
|
||||
request: PropFind,
|
||||
query: DavQuery<'_>,
|
||||
) -> crate::Result<HttpResponse> {
|
||||
todo!()
|
||||
let account_id = query.resource.account_id;
|
||||
let files = self
|
||||
.fetch_file_hierarchy(account_id)
|
||||
.await
|
||||
.caused_by(trc::location!())?;
|
||||
|
||||
// Obtain document ids
|
||||
let mut document_ids = if !access_token.is_member(account_id) {
|
||||
let todo = "query children acls";
|
||||
self.shared_containers(
|
||||
access_token,
|
||||
account_id,
|
||||
Collection::FileNode,
|
||||
Bitmap::<Acl>::from_iter([Acl::ReadItems, Acl::Read]),
|
||||
)
|
||||
.await
|
||||
.caused_by(trc::location!())?
|
||||
.into()
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
// Filter by changelog
|
||||
let mut last_change_id = None;
|
||||
if let Some(change_id) = query.from_change_id {
|
||||
let changelog = self
|
||||
.store()
|
||||
.changes(account_id, Collection::FileNode, Query::Since(change_id))
|
||||
.await
|
||||
.caused_by(trc::location!())?;
|
||||
let limit = std::cmp::min(
|
||||
query.limit.unwrap_or(u32::MAX) as usize,
|
||||
self.core.dav.max_changes,
|
||||
);
|
||||
if changelog.to_change_id != 0 {
|
||||
last_change_id = Some(changelog.to_change_id);
|
||||
}
|
||||
let mut changes =
|
||||
RoaringBitmap::from_iter(changelog.changes.iter().map(|change| change.id() as u32));
|
||||
if changes.len() as usize > limit {
|
||||
changes = RoaringBitmap::from_sorted_iter(changes.into_iter().take(limit)).unwrap();
|
||||
}
|
||||
if let Some(document_ids) = &mut document_ids {
|
||||
*document_ids &= changes;
|
||||
} else {
|
||||
document_ids = Some(changes);
|
||||
}
|
||||
}
|
||||
|
||||
let mut response = MultiStatus::new(Vec::with_capacity(16));
|
||||
let mut paths = if let Some(resource) = query.resource.resource {
|
||||
files
|
||||
.subtree_with_depth(resource, query.depth)
|
||||
.filter(|item| {
|
||||
document_ids
|
||||
.as_ref()
|
||||
.is_none_or(|d| d.contains(item.document_id))
|
||||
})
|
||||
.map(|item| {
|
||||
(
|
||||
item.document_id,
|
||||
(query.format_to_base_uri(&item.name), item.is_container),
|
||||
)
|
||||
})
|
||||
.collect::<AHashMap<_, _>>()
|
||||
} else {
|
||||
if !query.depth_no_root || query.from_change_id.is_none() {
|
||||
self.prepare_principal_propfind_response(
|
||||
access_token,
|
||||
PrincipalResource::Id(account_id),
|
||||
&query.propfind,
|
||||
&mut response,
|
||||
)
|
||||
.await?;
|
||||
|
||||
if query.depth == 0 {
|
||||
return Ok(HttpResponse::new(StatusCode::MULTI_STATUS)
|
||||
.with_xml_body(response.to_string()));
|
||||
}
|
||||
}
|
||||
files
|
||||
.tree_with_depth(query.depth - 1)
|
||||
.filter(|item| {
|
||||
document_ids
|
||||
.as_ref()
|
||||
.is_none_or(|d| d.contains(item.document_id))
|
||||
})
|
||||
.map(|item| {
|
||||
(
|
||||
item.document_id,
|
||||
(query.format_to_base_uri(&item.name), item.is_container),
|
||||
)
|
||||
})
|
||||
.collect::<AHashMap<_, _>>()
|
||||
};
|
||||
|
||||
if paths.is_empty() && query.from_change_id.is_none() {
|
||||
response.add_response(Response::new_status(
|
||||
[query.format_to_base_uri(query.resource.resource.unwrap_or_default())],
|
||||
StatusCode::NOT_FOUND,
|
||||
));
|
||||
|
||||
return Ok(
|
||||
HttpResponse::new(StatusCode::MULTI_STATUS).with_xml_body(response.to_string())
|
||||
);
|
||||
}
|
||||
|
||||
let todo = "prefer minimal";
|
||||
|
||||
// Prepare response
|
||||
let (fields, is_all_prop) = match query.propfind {
|
||||
PropFind::PropName => {
|
||||
for (_, (path, is_container)) in paths {
|
||||
response.add_response(Response::new_propstat(
|
||||
path,
|
||||
vec![PropStat::new_list(all_properties(is_container))],
|
||||
));
|
||||
}
|
||||
|
||||
return Ok(
|
||||
HttpResponse::new(StatusCode::MULTI_STATUS).with_xml_body(response.to_string())
|
||||
);
|
||||
}
|
||||
PropFind::AllProp(items) => (
|
||||
items
|
||||
.into_iter()
|
||||
.filter(|v| matches!(v, DavProperty::DeadProperty(_)))
|
||||
.map(DavPropertyValue::empty)
|
||||
.collect::<Vec<_>>(),
|
||||
true,
|
||||
),
|
||||
PropFind::Prop(items) => (
|
||||
items
|
||||
.into_iter()
|
||||
.map(DavPropertyValue::empty)
|
||||
.collect::<Vec<_>>(),
|
||||
false,
|
||||
),
|
||||
};
|
||||
|
||||
for (document_id, node_) in self
|
||||
.get_properties::<Archive<AlignedBytes>, _>(
|
||||
account_id,
|
||||
Collection::FileNode,
|
||||
&Paths(&paths),
|
||||
Property::Value,
|
||||
)
|
||||
.await
|
||||
.caused_by(trc::location!())?
|
||||
{
|
||||
let node = node_.unarchive::<FileNode>().caused_by(trc::location!())?;
|
||||
let (node_path, _) = paths.remove(&document_id).unwrap();
|
||||
let is_container = node.file.is_none();
|
||||
let mut fields = if is_all_prop {
|
||||
let mut all_fields = all_properties(is_container);
|
||||
if !fields.is_empty() {
|
||||
all_fields.extend(fields.iter().cloned());
|
||||
}
|
||||
all_fields
|
||||
} else {
|
||||
fields.clone()
|
||||
};
|
||||
|
||||
// Fill properties
|
||||
for fields in &mut fields {}
|
||||
|
||||
// Add response
|
||||
response.add_response(Response::new_propstat(
|
||||
node_path,
|
||||
vec![PropStat::new_list(fields)],
|
||||
));
|
||||
}
|
||||
|
||||
Ok(HttpResponse::new(StatusCode::MULTI_STATUS).with_xml_body(response.to_string()))
|
||||
}
|
||||
}
|
||||
|
||||
fn all_properties(is_container: bool) -> Vec<DavPropertyValue> {
|
||||
let mut props = vec![
|
||||
DavPropertyValue::empty(WebDavProperty::CreationDate),
|
||||
DavPropertyValue::empty(WebDavProperty::DisplayName),
|
||||
DavPropertyValue::empty(WebDavProperty::GetETag),
|
||||
DavPropertyValue::empty(WebDavProperty::GetLastModified),
|
||||
DavPropertyValue::empty(WebDavProperty::ResourceType),
|
||||
DavPropertyValue::empty(WebDavProperty::LockDiscovery),
|
||||
DavPropertyValue::empty(WebDavProperty::SupportedLock),
|
||||
DavPropertyValue::empty(WebDavProperty::CurrentUserPrincipal),
|
||||
DavPropertyValue::empty(WebDavProperty::SyncToken),
|
||||
DavPropertyValue::empty(WebDavProperty::Owner),
|
||||
DavPropertyValue::empty(WebDavProperty::SupportedPrivilegeSet),
|
||||
DavPropertyValue::empty(WebDavProperty::CurrentUserPrivilegeSet),
|
||||
DavPropertyValue::empty(WebDavProperty::Acl),
|
||||
DavPropertyValue::empty(WebDavProperty::AclRestrictions),
|
||||
DavPropertyValue::empty(WebDavProperty::InheritedAclSet),
|
||||
DavPropertyValue::empty(WebDavProperty::PrincipalCollectionSet),
|
||||
];
|
||||
|
||||
if is_container {
|
||||
props.extend([
|
||||
DavPropertyValue::empty(WebDavProperty::SupportedReportSet),
|
||||
DavPropertyValue::empty(WebDavProperty::QuotaAvailableBytes),
|
||||
DavPropertyValue::empty(WebDavProperty::QuotaUsedBytes),
|
||||
]);
|
||||
} else {
|
||||
props.extend([
|
||||
DavPropertyValue::empty(WebDavProperty::GetContentLanguage),
|
||||
DavPropertyValue::empty(WebDavProperty::GetContentLength),
|
||||
DavPropertyValue::empty(WebDavProperty::GetContentType),
|
||||
]);
|
||||
}
|
||||
|
||||
props
|
||||
}
|
||||
|
||||
struct Paths<'x>(&'x AHashMap<u32, (String, bool)>);
|
||||
|
||||
impl DocumentSet for Paths<'_> {
|
||||
fn min(&self) -> u32 {
|
||||
unimplemented!()
|
||||
}
|
||||
|
||||
fn max(&self) -> u32 {
|
||||
unimplemented!()
|
||||
}
|
||||
|
||||
fn contains(&self, id: u32) -> bool {
|
||||
self.0.contains_key(&id)
|
||||
}
|
||||
|
||||
fn len(&self) -> usize {
|
||||
self.0.len()
|
||||
}
|
||||
|
||||
fn iterate(&self) -> impl Iterator<Item = u32> {
|
||||
self.0.keys().copied()
|
||||
}
|
||||
}
|
||||
|
|
|
@ -57,9 +57,12 @@ impl FilePropPatchRequestHandler for Server {
|
|||
request: PropertyUpdate,
|
||||
) -> crate::Result<HttpResponse> {
|
||||
// Validate URI
|
||||
let resource_ = self.validate_uri(access_token, headers.uri).await?;
|
||||
let resource_ = self
|
||||
.validate_uri(access_token, headers.uri)
|
||||
.await?
|
||||
.into_owned_uri()?;
|
||||
let uri = headers.uri;
|
||||
let account_id = resource_.account_id()?;
|
||||
let account_id = resource_.account_id;
|
||||
let files = self
|
||||
.fetch_file_hierarchy(account_id)
|
||||
.await
|
||||
|
|
|
@ -52,8 +52,11 @@ impl FileUpdateRequestHandler for Server {
|
|||
_is_patch: bool,
|
||||
) -> crate::Result<HttpResponse> {
|
||||
// Validate URI
|
||||
let resource = self.validate_uri(access_token, headers.uri).await?;
|
||||
let account_id = resource.account_id()?;
|
||||
let resource = self
|
||||
.validate_uri(access_token, headers.uri)
|
||||
.await?
|
||||
.into_owned_uri()?;
|
||||
let account_id = resource.account_id;
|
||||
let files = self
|
||||
.fetch_file_hierarchy(account_id)
|
||||
.await
|
||||
|
|
|
@ -3,3 +3,5 @@
|
|||
*
|
||||
* SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-SEL
|
||||
*/
|
||||
|
||||
pub mod propfind;
|
||||
|
|
41
crates/dav/src/principal/propfind.rs
Normal file
41
crates/dav/src/principal/propfind.rs
Normal file
|
@ -0,0 +1,41 @@
|
|||
/*
|
||||
* SPDX-FileCopyrightText: 2020 Stalwart Labs Ltd <hello@stalw.art>
|
||||
*
|
||||
* SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-SEL
|
||||
*/
|
||||
|
||||
use common::{Server, auth::AccessToken};
|
||||
use dav_proto::{
|
||||
RequestHeaders,
|
||||
schema::{request::PropFind, response::MultiStatus},
|
||||
};
|
||||
use http_proto::HttpResponse;
|
||||
use store::roaring::RoaringBitmap;
|
||||
|
||||
pub(crate) enum PrincipalResource<'x> {
|
||||
Id(u32),
|
||||
Uri(&'x str),
|
||||
Ids(RoaringBitmap),
|
||||
}
|
||||
|
||||
pub(crate) trait PrincipalPropFind: Sync + Send {
|
||||
fn prepare_principal_propfind_response(
|
||||
&self,
|
||||
access_token: &AccessToken,
|
||||
resource: PrincipalResource<'_>,
|
||||
request: &PropFind,
|
||||
response: &mut MultiStatus,
|
||||
) -> impl Future<Output = crate::Result<HttpResponse>> + Send;
|
||||
}
|
||||
|
||||
impl PrincipalPropFind for Server {
|
||||
async fn prepare_principal_propfind_response(
|
||||
&self,
|
||||
access_token: &AccessToken,
|
||||
resource: PrincipalResource<'_>,
|
||||
request: &PropFind,
|
||||
response: &mut MultiStatus,
|
||||
) -> crate::Result<HttpResponse> {
|
||||
todo!()
|
||||
}
|
||||
}
|
|
@ -22,13 +22,12 @@ use hyper::{StatusCode, header};
|
|||
|
||||
use crate::{
|
||||
DavError, DavMethod, DavResource,
|
||||
common::lock::LockRequestHandler,
|
||||
common::{lock::LockRequestHandler, propfind::PropFindRequestHandler},
|
||||
file::{
|
||||
acl::FileAclRequestHandler, changes::FileChangesRequestHandler,
|
||||
copy_move::FileCopyMoveRequestHandler, delete::FileDeleteRequestHandler,
|
||||
get::FileGetRequestHandler, mkcol::FileMkColRequestHandler,
|
||||
propfind::FilePropFindRequestHandler, proppatch::FilePropPatchRequestHandler,
|
||||
update::FileUpdateRequestHandler,
|
||||
proppatch::FilePropPatchRequestHandler, update::FileUpdateRequestHandler,
|
||||
},
|
||||
};
|
||||
|
||||
|
@ -73,19 +72,14 @@ impl DavRequestDispatcher for Server {
|
|||
let todo = "lock tokens, headers, etc";
|
||||
|
||||
match method {
|
||||
DavMethod::PROPFIND => match resource {
|
||||
DavResource::Card => todo!(),
|
||||
DavResource::Cal => todo!(),
|
||||
DavResource::Principal => todo!(),
|
||||
DavResource::File => {
|
||||
self.handle_file_propfind_request(
|
||||
&access_token,
|
||||
headers,
|
||||
PropFind::parse(&mut Tokenizer::new(&body))?,
|
||||
)
|
||||
.await
|
||||
}
|
||||
},
|
||||
DavMethod::PROPFIND => {
|
||||
self.handle_propfind_request(
|
||||
&access_token,
|
||||
headers,
|
||||
PropFind::parse(&mut Tokenizer::new(&body))?,
|
||||
)
|
||||
.await
|
||||
}
|
||||
DavMethod::PROPPATCH => match resource {
|
||||
DavResource::Card => todo!(),
|
||||
DavResource::Cal => todo!(),
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
* SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-SEL
|
||||
*/
|
||||
|
||||
use store::SerializedVersion;
|
||||
use store::{SerializedVersion, SERIALIZE_OBJ_03_V1};
|
||||
|
||||
#[derive(
|
||||
rkyv::Archive, rkyv::Deserialize, rkyv::Serialize, Debug, Default, Clone, PartialEq, Eq,
|
||||
|
@ -26,6 +26,6 @@ pub struct EmailAddress {
|
|||
|
||||
impl SerializedVersion for Identity {
|
||||
fn serialize_version() -> u8 {
|
||||
0
|
||||
SERIALIZE_OBJ_03_V1
|
||||
}
|
||||
}
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
|
||||
use common::config::jmap::settings::SpecialUse;
|
||||
use jmap_proto::types::value::AclGrant;
|
||||
use store::SerializedVersion;
|
||||
use store::{SerializedVersion, SERIALIZE_OBJ_04_V1};
|
||||
|
||||
pub mod destroy;
|
||||
pub mod index;
|
||||
|
@ -41,7 +41,7 @@ pub struct UidMailbox {
|
|||
|
||||
impl SerializedVersion for Mailbox {
|
||||
fn serialize_version() -> u8 {
|
||||
0
|
||||
SERIALIZE_OBJ_04_V1
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -26,7 +26,7 @@ use rasn_cms::{
|
|||
};
|
||||
use rsa::{Pkcs1v15Encrypt, RsaPublicKey, pkcs1::DecodeRsaPublicKey};
|
||||
use sequoia_openpgp as openpgp;
|
||||
use store::{Deserialize, SerializedVersion, write::Archive};
|
||||
use store::{write::Archive, Deserialize, SerializedVersion, SERIALIZE_OBJ_05_V1};
|
||||
|
||||
const P: openpgp::policy::StandardPolicy<'static> = openpgp::policy::StandardPolicy::new();
|
||||
|
||||
|
@ -86,7 +86,7 @@ pub struct EncryptionParams {
|
|||
|
||||
impl SerializedVersion for EncryptionParams {
|
||||
fn serialize_version() -> u8 {
|
||||
0
|
||||
SERIALIZE_OBJ_05_V1
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -83,6 +83,7 @@ impl EmailDeletion for Server {
|
|||
.caused_by(trc::location!())?;
|
||||
let thread_id = u32::from(data.inner.thread_id);
|
||||
|
||||
// Log mailbox changes
|
||||
for mailbox in data.inner.mailboxes.iter() {
|
||||
changes.log_child_update(Collection::Mailbox, u32::from(mailbox.mailbox_id));
|
||||
}
|
||||
|
|
|
@ -20,7 +20,7 @@ use rkyv::{
|
|||
string::ArchivedString,
|
||||
vec::ArchivedVec,
|
||||
};
|
||||
use store::SerializedVersion;
|
||||
use store::{SerializedVersion, SERIALIZE_OBJ_06_V1, SERIALIZE_OBJ_07_V1};
|
||||
use utils::BlobHash;
|
||||
|
||||
use crate::mailbox::{ArchivedUidMailbox, UidMailbox};
|
||||
|
@ -48,13 +48,13 @@ impl IndexableAndSerializableObject for MessageData {}
|
|||
|
||||
impl SerializedVersion for MessageData {
|
||||
fn serialize_version() -> u8 {
|
||||
0
|
||||
SERIALIZE_OBJ_06_V1
|
||||
}
|
||||
}
|
||||
|
||||
impl SerializedVersion for MessageMetadata {
|
||||
fn serialize_version() -> u8 {
|
||||
0
|
||||
SERIALIZE_OBJ_07_V1
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
*/
|
||||
|
||||
use jmap_proto::types::type_state::DataType;
|
||||
use store::SerializedVersion;
|
||||
use store::{SerializedVersion, SERIALIZE_OBJ_08_V1};
|
||||
use utils::map::bitmap::Bitmap;
|
||||
|
||||
#[derive(
|
||||
|
@ -29,6 +29,6 @@ pub struct Keys {
|
|||
|
||||
impl SerializedVersion for PushSubscription {
|
||||
fn serialize_version() -> u8 {
|
||||
0
|
||||
SERIALIZE_OBJ_08_V1
|
||||
}
|
||||
}
|
||||
|
|
|
@ -8,7 +8,7 @@ use std::sync::Arc;
|
|||
|
||||
use common::KV_SIEVE_ID;
|
||||
use sieve::Sieve;
|
||||
use store::{SerializedVersion, blake3};
|
||||
use store::{blake3, SerializedVersion, SERIALIZE_OBJ_09_V1};
|
||||
use utils::BlobHash;
|
||||
|
||||
pub mod activate;
|
||||
|
@ -38,7 +38,7 @@ pub struct SieveScript {
|
|||
|
||||
impl SerializedVersion for SieveScript {
|
||||
fn serialize_version() -> u8 {
|
||||
0
|
||||
SERIALIZE_OBJ_09_V1
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
* SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-SEL
|
||||
*/
|
||||
|
||||
use store::SerializedVersion;
|
||||
use store::{SerializedVersion, SERIALIZE_OBJ_10_V1};
|
||||
use utils::map::vec_map::VecMap;
|
||||
|
||||
pub mod index;
|
||||
|
@ -25,7 +25,7 @@ pub struct EmailSubmission {
|
|||
|
||||
impl SerializedVersion for EmailSubmission {
|
||||
fn serialize_version() -> u8 {
|
||||
0
|
||||
SERIALIZE_OBJ_10_V1
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -97,18 +97,3 @@ impl ThreadCache for Server {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
|
||||
|
||||
// Obtain threadIds for matching messages
|
||||
let mut thread_ids = Vec::with_capacity(message_ids.size_hint().0);
|
||||
for document_id in message_ids {
|
||||
if let Some(thread_id) = thread_cache.threads.get(&document_id) {
|
||||
thread_ids.push((document_id, *thread_id));
|
||||
}
|
||||
}
|
||||
|
||||
Ok(thread_ids)
|
||||
|
||||
*/
|
||||
|
|
|
@ -9,7 +9,7 @@ pub mod index;
|
|||
|
||||
use dav_proto::schema::request::DeadProperty;
|
||||
use jmap_proto::types::value::AclGrant;
|
||||
use store::SerializedVersion;
|
||||
use store::{SerializedVersion, SERIALIZE_OBJ_11_V1};
|
||||
use utils::BlobHash;
|
||||
|
||||
#[derive(
|
||||
|
@ -40,6 +40,6 @@ pub struct FileProperties {
|
|||
|
||||
impl SerializedVersion for FileNode {
|
||||
fn serialize_version() -> u8 {
|
||||
0
|
||||
SERIALIZE_OBJ_11_V1
|
||||
}
|
||||
}
|
||||
|
|
|
@ -7,7 +7,7 @@
|
|||
use http_proto::{HttpRequest, request::fetch_body};
|
||||
use hyper::header::CONTENT_TYPE;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use store::SerializedVersion;
|
||||
use store::{SERIALIZE_OBJ_12_V1, SerializedVersion};
|
||||
use utils::map::vec_map::VecMap;
|
||||
|
||||
pub mod auth;
|
||||
|
@ -58,7 +58,7 @@ pub struct OAuthCode {
|
|||
|
||||
impl SerializedVersion for OAuthCode {
|
||||
fn serialize_version() -> u8 {
|
||||
0
|
||||
SERIALIZE_OBJ_12_V1
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -68,12 +68,6 @@ pub enum Keyword {
|
|||
Other(String),
|
||||
}
|
||||
|
||||
impl SerializedVersion for Keyword {
|
||||
fn serialize_version() -> u8 {
|
||||
0
|
||||
}
|
||||
}
|
||||
|
||||
impl JsonObjectParser for Keyword {
|
||||
fn parse(parser: &mut Parser<'_>) -> trc::Result<Self>
|
||||
where
|
||||
|
|
|
@ -39,14 +39,14 @@ impl PrincipalGet for Server {
|
|||
//Property::Timezone,
|
||||
//Property::Capabilities,
|
||||
]);
|
||||
let email_submission_ids = self
|
||||
.get_document_ids(u32::MAX, Collection::EmailSubmission)
|
||||
let principal_ids = self
|
||||
.get_document_ids(u32::MAX, Collection::Principal)
|
||||
.await?
|
||||
.unwrap_or_default();
|
||||
let ids = if let Some(ids) = ids {
|
||||
ids
|
||||
} else {
|
||||
email_submission_ids
|
||||
principal_ids
|
||||
.iter()
|
||||
.take(self.core.jmap.get_max_objects)
|
||||
.map(Into::into)
|
||||
|
|
|
@ -12,7 +12,7 @@ use std::{
|
|||
|
||||
use common::expr::{self, functions::ResolveVariable, *};
|
||||
use smtp_proto::{ArchivedResponse, Response};
|
||||
use store::{SerializedVersion, write::now};
|
||||
use store::{SERIALIZE_OBJ_13_V1, SerializedVersion, write::now};
|
||||
use utils::BlobHash;
|
||||
|
||||
pub mod dsn;
|
||||
|
@ -69,7 +69,7 @@ pub struct Message {
|
|||
|
||||
impl SerializedVersion for Message {
|
||||
fn serialize_version() -> u8 {
|
||||
0
|
||||
SERIALIZE_OBJ_13_V1
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -68,6 +68,24 @@ pub trait SerializeInfallible {
|
|||
fn serialize(&self) -> Vec<u8>;
|
||||
}
|
||||
|
||||
// Max 64 versions (2 ^ 6)
|
||||
pub const SERIALIZE_OBJ_01_V1: u8 = 0;
|
||||
pub const SERIALIZE_OBJ_02_V1: u8 = 1;
|
||||
pub const SERIALIZE_OBJ_03_V1: u8 = 2;
|
||||
pub const SERIALIZE_OBJ_04_V1: u8 = 3;
|
||||
pub const SERIALIZE_OBJ_05_V1: u8 = 4;
|
||||
pub const SERIALIZE_OBJ_06_V1: u8 = 5;
|
||||
pub const SERIALIZE_OBJ_07_V1: u8 = 6;
|
||||
pub const SERIALIZE_OBJ_08_V1: u8 = 7;
|
||||
pub const SERIALIZE_OBJ_09_V1: u8 = 8;
|
||||
pub const SERIALIZE_OBJ_10_V1: u8 = 9;
|
||||
pub const SERIALIZE_OBJ_11_V1: u8 = 10;
|
||||
pub const SERIALIZE_OBJ_12_V1: u8 = 11;
|
||||
pub const SERIALIZE_OBJ_13_V1: u8 = 12;
|
||||
pub const SERIALIZE_OBJ_14_V1: u8 = 13;
|
||||
pub const SERIALIZE_OBJ_15_V1: u8 = 14;
|
||||
pub const SERIALIZE_OBJ_16_V1: u8 = 15;
|
||||
|
||||
pub trait SerializedVersion {
|
||||
fn serialize_version() -> u8;
|
||||
}
|
||||
|
@ -796,6 +814,6 @@ impl Stores {
|
|||
|
||||
impl SerializedVersion for () {
|
||||
fn serialize_version() -> u8 {
|
||||
0
|
||||
unreachable!()
|
||||
}
|
||||
}
|
||||
|
|
|
@ -112,7 +112,7 @@ impl Deserialize for Archive<AlignedBytes> {
|
|||
}),
|
||||
_ => Err(trc::StoreEvent::DataCorruption
|
||||
.into_err()
|
||||
.details("Invalid archive marker.")
|
||||
.details("Invalid archive marker")
|
||||
.ctx(trc::Key::Value, bytes)
|
||||
.caused_by(trc::location!())),
|
||||
}
|
||||
|
@ -209,18 +209,56 @@ impl Archive<AlignedBytes> {
|
|||
rkyv::api::high::HighValidator<'a, rkyv::rancor::Error>,
|
||||
> + rkyv::Deserialize<T, rkyv::api::high::HighDeserializer<rkyv::rancor::Error>>,
|
||||
{
|
||||
if self.version == T::serialize_version() {
|
||||
let bytes = self.as_bytes();
|
||||
if self.version == T::serialize_version()
|
||||
&& bytes.len() >= std::mem::size_of::<T::Archived>()
|
||||
{
|
||||
// SAFETY: Trusted and versioned input with integrity hash
|
||||
Ok(unsafe { rkyv::access_unchecked::<T::Archived>(self.as_bytes()) })
|
||||
Ok(unsafe { rkyv::access_unchecked::<T::Archived>(bytes) })
|
||||
} else {
|
||||
Err(trc::StoreEvent::DataCorruption
|
||||
.into_err()
|
||||
.details(format!(
|
||||
"Archive version mismatch, expected {} but got {}",
|
||||
"Archive version mismatch, expected {} ({} bytes) but got {} ({} bytes)",
|
||||
T::serialize_version(),
|
||||
self.version
|
||||
std::mem::size_of::<T::Archived>(),
|
||||
self.version,
|
||||
bytes.len()
|
||||
))
|
||||
.ctx(trc::Key::Value, self.as_bytes())
|
||||
.ctx(trc::Key::Value, bytes)
|
||||
.caused_by(trc::location!()))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn unarchive_untrusted<T>(&self) -> trc::Result<&<T as rkyv::Archive>::Archived>
|
||||
where
|
||||
T: rkyv::Archive + SerializedVersion,
|
||||
T::Archived: for<'a> rkyv::bytecheck::CheckBytes<
|
||||
rkyv::api::high::HighValidator<'a, rkyv::rancor::Error>,
|
||||
> + rkyv::Deserialize<T, rkyv::api::high::HighDeserializer<rkyv::rancor::Error>>,
|
||||
{
|
||||
let bytes = self.as_bytes();
|
||||
if self.version == T::serialize_version()
|
||||
&& bytes.len() >= std::mem::size_of::<T::Archived>()
|
||||
{
|
||||
rkyv::access::<T::Archived, rkyv::rancor::Error>(bytes).map_err(|err| {
|
||||
trc::StoreEvent::DeserializeError
|
||||
.ctx(trc::Key::Value, self.as_bytes())
|
||||
.details("Archive access failed")
|
||||
.caused_by(trc::location!())
|
||||
.reason(err)
|
||||
})
|
||||
} else {
|
||||
Err(trc::StoreEvent::DataCorruption
|
||||
.into_err()
|
||||
.details(format!(
|
||||
"Archive version mismatch, expected {} ({} bytes) but got {} ({} bytes)",
|
||||
T::serialize_version(),
|
||||
std::mem::size_of::<T::Archived>(),
|
||||
self.version,
|
||||
bytes.len()
|
||||
))
|
||||
.ctx(trc::Key::Value, bytes)
|
||||
.caused_by(trc::location!()))
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Add table
Reference in a new issue