mirror of
https://github.com/stalwartlabs/mail-server.git
synced 2025-11-08 12:51:07 +08:00
Iterate values rather than sending multiple get requests
This commit is contained in:
parent
ff279b3a39
commit
48f255b31f
18 changed files with 260 additions and 212 deletions
|
|
@ -133,36 +133,29 @@ impl<T: SessionStream> SessionData<T> {
|
||||||
|
|
||||||
// Fetch mailboxes
|
// Fetch mailboxes
|
||||||
let mut mailboxes = Vec::with_capacity(10);
|
let mut mailboxes = Vec::with_capacity(10);
|
||||||
for mailbox_id in mailbox_ids {
|
for (mailbox_id, values) in self
|
||||||
mailboxes.push(
|
.jmap
|
||||||
match self
|
.get_properties::<Object<Value>, _, _>(
|
||||||
.jmap
|
account_id,
|
||||||
.get_property::<Object<Value>>(
|
Collection::Mailbox,
|
||||||
account_id,
|
&mailbox_ids,
|
||||||
Collection::Mailbox,
|
Property::Value,
|
||||||
mailbox_id,
|
)
|
||||||
&Property::Value,
|
.await
|
||||||
)
|
.map_err(|_| {})?
|
||||||
.await
|
{
|
||||||
.map_err(|_| {})?
|
mailboxes.push((
|
||||||
{
|
mailbox_id,
|
||||||
Some(values) => (
|
values
|
||||||
mailbox_id,
|
.properties
|
||||||
values
|
.get(&Property::ParentId)
|
||||||
.properties
|
.map(|parent_id| match parent_id {
|
||||||
.get(&Property::ParentId)
|
Value::Id(value) => value.document_id(),
|
||||||
.map(|parent_id| match parent_id {
|
_ => 0,
|
||||||
Value::Id(value) => value.document_id(),
|
})
|
||||||
_ => 0,
|
.unwrap_or(0),
|
||||||
})
|
values,
|
||||||
.unwrap_or(0),
|
));
|
||||||
values,
|
|
||||||
),
|
|
||||||
None => {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
},
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Build tree
|
// Build tree
|
||||||
|
|
|
||||||
|
|
@ -103,38 +103,35 @@ impl<T: SessionStream> SessionData<T> {
|
||||||
let mut unassigned = Vec::new();
|
let mut unassigned = Vec::new();
|
||||||
|
|
||||||
// Obtain all message ids
|
// Obtain all message ids
|
||||||
for (uid_mailbox, message_id) in self
|
for (message_id, uid_mailbox) in self
|
||||||
.jmap
|
.jmap
|
||||||
.get_properties::<HashedValue<Vec<UidMailbox>>>(
|
.get_properties::<HashedValue<Vec<UidMailbox>>, _, _>(
|
||||||
mailbox.account_id,
|
mailbox.account_id,
|
||||||
Collection::Email,
|
Collection::Email,
|
||||||
message_ids.iter(),
|
&message_ids,
|
||||||
Property::MailboxIds,
|
Property::MailboxIds,
|
||||||
)
|
)
|
||||||
.await?
|
.await?
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.zip(message_ids.iter())
|
|
||||||
{
|
{
|
||||||
// Make sure the message is still in this mailbox
|
// Make sure the message is still in this mailbox
|
||||||
if let Some(uid_mailbox) = uid_mailbox {
|
if let Some(item) = uid_mailbox
|
||||||
if let Some(item) = uid_mailbox
|
.inner
|
||||||
.inner
|
.iter()
|
||||||
.iter()
|
.find(|item| item.mailbox_id == mailbox.mailbox_id)
|
||||||
.find(|item| item.mailbox_id == mailbox.mailbox_id)
|
{
|
||||||
{
|
if item.uid > 0 {
|
||||||
if item.uid > 0 {
|
if assigned.insert(item.uid, message_id).is_some() {
|
||||||
if assigned.insert(item.uid, message_id).is_some() {
|
tracing::warn!(event = "error",
|
||||||
tracing::warn!(event = "error",
|
|
||||||
context = "store",
|
context = "store",
|
||||||
account_id = mailbox.account_id,
|
account_id = mailbox.account_id,
|
||||||
collection = ?Collection::Mailbox,
|
collection = ?Collection::Mailbox,
|
||||||
mailbox_id = mailbox.mailbox_id,
|
mailbox_id = mailbox.mailbox_id,
|
||||||
message_id = message_id,
|
message_id = message_id,
|
||||||
"Duplicate UID");
|
"Duplicate UID");
|
||||||
}
|
|
||||||
} else {
|
|
||||||
unassigned.push((message_id, uid_mailbox));
|
|
||||||
}
|
}
|
||||||
|
} else {
|
||||||
|
unassigned.push((message_id, uid_mailbox));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -102,10 +102,8 @@ impl<T: SessionStream> SessionData<T> {
|
||||||
// Group messages by thread
|
// Group messages by thread
|
||||||
let mut threads: AHashMap<u32, Vec<u32>> = AHashMap::new();
|
let mut threads: AHashMap<u32, Vec<u32>> = AHashMap::new();
|
||||||
let state = mailbox.state.lock();
|
let state = mailbox.state.lock();
|
||||||
for (document_id, thread_id) in result_set.results.into_iter().zip(thread_ids) {
|
for (document_id, thread_id) in thread_ids {
|
||||||
if let (Some(thread_id), Some((imap_id, _))) =
|
if let Some((imap_id, _)) = state.map_result_id(document_id, is_uid) {
|
||||||
(thread_id, state.map_result_id(document_id, is_uid))
|
|
||||||
{
|
|
||||||
threads.entry(thread_id).or_default().push(imap_id);
|
threads.entry(thread_id).or_default().push(imap_id);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -21,15 +21,20 @@
|
||||||
* for more details.
|
* for more details.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
use jmap_proto::types::{collection::Collection, property::Property};
|
use std::collections::HashMap;
|
||||||
use store::{ahash::AHashMap, write::ValueClass, ValueKey};
|
|
||||||
|
use futures_util::TryFutureExt;
|
||||||
|
use jmap_proto::{
|
||||||
|
error::method::MethodError,
|
||||||
|
types::{collection::Collection, property::Property},
|
||||||
|
};
|
||||||
use utils::CachedItem;
|
use utils::CachedItem;
|
||||||
|
|
||||||
use crate::JMAP;
|
use crate::JMAP;
|
||||||
|
|
||||||
#[derive(Debug, Default)]
|
#[derive(Debug, Default)]
|
||||||
pub struct Threads {
|
pub struct Threads {
|
||||||
pub threads: AHashMap<u32, u32>,
|
pub threads: HashMap<u32, u32>,
|
||||||
pub modseq: Option<u64>,
|
pub modseq: Option<u64>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -38,11 +43,19 @@ impl JMAP {
|
||||||
&self,
|
&self,
|
||||||
account_id: u32,
|
account_id: u32,
|
||||||
message_ids: impl Iterator<Item = u32>,
|
message_ids: impl Iterator<Item = u32>,
|
||||||
) -> store::Result<Vec<Option<u32>>> {
|
) -> Result<Vec<(u32, u32)>, MethodError> {
|
||||||
// Obtain current state
|
// Obtain current state
|
||||||
let modseq = self
|
let modseq = self
|
||||||
.store
|
.store
|
||||||
.get_last_change_id(account_id, Collection::Thread)
|
.get_last_change_id(account_id, Collection::Thread)
|
||||||
|
.map_err(|err| {
|
||||||
|
tracing::error!(event = "error",
|
||||||
|
context = "store",
|
||||||
|
account_id = account_id,
|
||||||
|
error = ?err,
|
||||||
|
"Failed to retrieve threads last change id");
|
||||||
|
MethodError::ServerPartialFail
|
||||||
|
})
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
// Lock the cache
|
// Lock the cache
|
||||||
|
|
@ -53,32 +66,22 @@ impl JMAP {
|
||||||
let mut thread_cache = thread_cache_.get().await;
|
let mut thread_cache = thread_cache_.get().await;
|
||||||
|
|
||||||
// Invalidate cache if the modseq has changed
|
// Invalidate cache if the modseq has changed
|
||||||
if thread_cache.modseq != modseq {
|
if thread_cache.modseq.unwrap_or(0) < modseq.unwrap_or(0) {
|
||||||
thread_cache.threads.clear();
|
thread_cache.threads = self
|
||||||
|
.get_properties::<u32, _, _>(account_id, Collection::Email, &(), Property::ThreadId)
|
||||||
|
.await?
|
||||||
|
.into_iter()
|
||||||
|
.collect();
|
||||||
|
thread_cache.modseq = modseq;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Obtain threadIds for matching messages
|
// Obtain threadIds for matching messages
|
||||||
let mut thread_ids = Vec::with_capacity(message_ids.size_hint().0);
|
let mut thread_ids = Vec::with_capacity(message_ids.size_hint().0);
|
||||||
for document_id in message_ids {
|
for document_id in message_ids {
|
||||||
if let Some(thread_id) = thread_cache.threads.get(&document_id) {
|
if let Some(thread_id) = thread_cache.threads.get(&document_id) {
|
||||||
thread_ids.push((*thread_id).into());
|
thread_ids.push((document_id, *thread_id));
|
||||||
} else if let Some(thread_id) = self
|
|
||||||
.store
|
|
||||||
.get_value::<u32>(ValueKey {
|
|
||||||
account_id,
|
|
||||||
collection: Collection::Email.into(),
|
|
||||||
document_id,
|
|
||||||
class: ValueClass::Property(Property::ThreadId.into()),
|
|
||||||
})
|
|
||||||
.await?
|
|
||||||
{
|
|
||||||
thread_ids.push(thread_id.into());
|
|
||||||
thread_cache.threads.insert(document_id, thread_id);
|
|
||||||
} else {
|
|
||||||
thread_ids.push(None);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
thread_cache.modseq = modseq;
|
|
||||||
|
|
||||||
Ok(thread_ids)
|
Ok(thread_ids)
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -121,9 +121,8 @@ impl JMAP {
|
||||||
MethodError::ServerPartialFail
|
MethodError::ServerPartialFail
|
||||||
})?
|
})?
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.zip(document_ids)
|
|
||||||
.filter_map(|(thread_id, document_id)| {
|
.filter_map(|(thread_id, document_id)| {
|
||||||
Id::from_parts(thread_id?, document_id).into()
|
Id::from_parts(thread_id, document_id).into()
|
||||||
})
|
})
|
||||||
.collect()
|
.collect()
|
||||||
};
|
};
|
||||||
|
|
|
||||||
|
|
@ -449,14 +449,17 @@ impl JMAP {
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
if thread_ids.len() == 1 {
|
if thread_ids.len() == 1 {
|
||||||
return Ok(thread_ids.into_iter().next().unwrap());
|
return Ok(thread_ids
|
||||||
|
.into_iter()
|
||||||
|
.next()
|
||||||
|
.map(|(_, thread_id)| thread_id));
|
||||||
}
|
}
|
||||||
|
|
||||||
// Find the most common threadId
|
// Find the most common threadId
|
||||||
let mut thread_counts = VecMap::<u32, u32>::with_capacity(thread_ids.len());
|
let mut thread_counts = VecMap::<u32, u32>::with_capacity(thread_ids.len());
|
||||||
let mut thread_id = u32::MAX;
|
let mut thread_id = u32::MAX;
|
||||||
let mut thread_count = 0;
|
let mut thread_count = 0;
|
||||||
for thread_id_ in thread_ids.iter().flatten() {
|
for (_, thread_id_) in thread_ids.iter() {
|
||||||
let tc = thread_counts.get_mut_or_insert(*thread_id_);
|
let tc = thread_counts.get_mut_or_insert(*thread_id_);
|
||||||
*tc += 1;
|
*tc += 1;
|
||||||
if *tc > thread_count {
|
if *tc > thread_count {
|
||||||
|
|
@ -494,7 +497,11 @@ impl JMAP {
|
||||||
|
|
||||||
// Move messages to the new threadId
|
// Move messages to the new threadId
|
||||||
batch.with_collection(Collection::Email);
|
batch.with_collection(Collection::Email);
|
||||||
for old_thread_id in thread_ids.into_iter().flatten().collect::<AHashSet<_>>() {
|
for old_thread_id in thread_ids
|
||||||
|
.into_iter()
|
||||||
|
.map(|(_, thread_id)| thread_id)
|
||||||
|
.collect::<AHashSet<_>>()
|
||||||
|
{
|
||||||
if thread_id != old_thread_id {
|
if thread_id != old_thread_id {
|
||||||
for document_id in self
|
for document_id in self
|
||||||
.store
|
.store
|
||||||
|
|
|
||||||
|
|
@ -375,37 +375,34 @@ impl JMAP {
|
||||||
.get_tag(account_id, Collection::Email, Property::Keywords, keyword)
|
.get_tag(account_id, Collection::Email, Property::Keywords, keyword)
|
||||||
.await?
|
.await?
|
||||||
.unwrap_or_default();
|
.unwrap_or_default();
|
||||||
|
if keyword_doc_ids.is_empty() {
|
||||||
|
return Ok(keyword_doc_ids);
|
||||||
|
}
|
||||||
|
let keyword_thread_ids = self
|
||||||
|
.get_cached_thread_ids(account_id, keyword_doc_ids.iter())
|
||||||
|
.await?;
|
||||||
|
|
||||||
let mut not_matched_ids = RoaringBitmap::new();
|
let mut not_matched_ids = RoaringBitmap::new();
|
||||||
let mut matched_ids = RoaringBitmap::new();
|
let mut matched_ids = RoaringBitmap::new();
|
||||||
|
|
||||||
for keyword_doc_id in &keyword_doc_ids {
|
for (keyword_doc_id, thread_id) in keyword_thread_ids {
|
||||||
if matched_ids.contains(keyword_doc_id) || not_matched_ids.contains(keyword_doc_id) {
|
if matched_ids.contains(keyword_doc_id) || not_matched_ids.contains(keyword_doc_id) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
if let Some(thread_id) = self
|
|
||||||
.get_property::<u32>(
|
if let Some(thread_doc_ids) = self
|
||||||
account_id,
|
.get_tag(account_id, Collection::Email, Property::ThreadId, thread_id)
|
||||||
Collection::Email,
|
|
||||||
keyword_doc_id,
|
|
||||||
&Property::ThreadId,
|
|
||||||
)
|
|
||||||
.await?
|
.await?
|
||||||
{
|
{
|
||||||
if let Some(thread_doc_ids) = self
|
let mut thread_tag_intersection = thread_doc_ids.clone();
|
||||||
.get_tag(account_id, Collection::Email, Property::ThreadId, thread_id)
|
thread_tag_intersection &= &keyword_doc_ids;
|
||||||
.await?
|
|
||||||
{
|
|
||||||
let mut thread_tag_intersection = thread_doc_ids.clone();
|
|
||||||
thread_tag_intersection &= &keyword_doc_ids;
|
|
||||||
|
|
||||||
if (match_all && thread_tag_intersection == thread_doc_ids)
|
if (match_all && thread_tag_intersection == thread_doc_ids)
|
||||||
|| (!match_all && !thread_tag_intersection.is_empty())
|
|| (!match_all && !thread_tag_intersection.is_empty())
|
||||||
{
|
{
|
||||||
matched_ids |= &thread_doc_ids;
|
matched_ids |= &thread_doc_ids;
|
||||||
} else if !thread_tag_intersection.is_empty() {
|
} else if !thread_tag_intersection.is_empty() {
|
||||||
not_matched_ids |= &thread_tag_intersection;
|
not_matched_ids |= &thread_tag_intersection;
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -49,8 +49,11 @@ use store::{
|
||||||
fts::FtsFilter,
|
fts::FtsFilter,
|
||||||
query::{sort::Pagination, Comparator, Filter, ResultSet, SortedResultSet},
|
query::{sort::Pagination, Comparator, Filter, ResultSet, SortedResultSet},
|
||||||
roaring::RoaringBitmap,
|
roaring::RoaringBitmap,
|
||||||
write::{BatchBuilder, BitmapClass, DirectoryClass, TagValue, ValueClass},
|
write::{
|
||||||
BitmapKey, BlobStore, Deserialize, FtsStore, LookupStore, Store, Stores, ValueKey,
|
key::DeserializeBigEndian, BatchBuilder, BitmapClass, DirectoryClass, TagValue, ValueClass,
|
||||||
|
},
|
||||||
|
BitmapKey, BlobStore, Deserialize, FtsStore, IterateParams, LookupStore, Store, Stores,
|
||||||
|
ValueKey, U32_LEN,
|
||||||
};
|
};
|
||||||
use tokio::sync::mpsc;
|
use tokio::sync::mpsc;
|
||||||
use utils::{
|
use utils::{
|
||||||
|
|
@ -451,44 +454,62 @@ impl JMAP {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn get_properties<U>(
|
pub async fn get_properties<U, I, P>(
|
||||||
&self,
|
&self,
|
||||||
account_id: u32,
|
account_id: u32,
|
||||||
collection: Collection,
|
collection: Collection,
|
||||||
document_ids: impl Iterator<Item = u32>,
|
iterate: &I,
|
||||||
property: impl AsRef<Property>,
|
property: P,
|
||||||
) -> Result<Vec<Option<U>>, MethodError>
|
) -> Result<Vec<(u32, U)>, MethodError>
|
||||||
where
|
where
|
||||||
|
I: PropertiesIterator + Send + Sync,
|
||||||
|
P: AsRef<Property>,
|
||||||
U: Deserialize + 'static,
|
U: Deserialize + 'static,
|
||||||
{
|
{
|
||||||
let property = property.as_ref();
|
let property: u8 = property.as_ref().into();
|
||||||
|
let collection: u8 = collection.into();
|
||||||
|
let expected_results = iterate.len();
|
||||||
|
let mut results = Vec::with_capacity(expected_results);
|
||||||
|
|
||||||
match self
|
self.store
|
||||||
.store
|
.iterate(
|
||||||
.get_values::<U>(
|
IterateParams::new(
|
||||||
document_ids
|
ValueKey {
|
||||||
.map(|document_id| ValueKey {
|
|
||||||
account_id,
|
account_id,
|
||||||
collection: collection.into(),
|
collection,
|
||||||
document_id,
|
document_id: iterate.min(),
|
||||||
class: ValueClass::Property(property.into()),
|
class: ValueClass::Property(property),
|
||||||
})
|
},
|
||||||
.collect(),
|
ValueKey {
|
||||||
|
account_id,
|
||||||
|
collection,
|
||||||
|
document_id: iterate.max(),
|
||||||
|
class: ValueClass::Property(property),
|
||||||
|
},
|
||||||
|
),
|
||||||
|
|key, value| {
|
||||||
|
let document_id = key.deserialize_be_u32(key.len() - U32_LEN)?;
|
||||||
|
if iterate.contains(document_id) {
|
||||||
|
results.push((document_id, U::deserialize(value)?));
|
||||||
|
Ok(expected_results == 0 || results.len() < expected_results)
|
||||||
|
} else {
|
||||||
|
Ok(true)
|
||||||
|
}
|
||||||
|
},
|
||||||
)
|
)
|
||||||
.await
|
.await
|
||||||
{
|
.map_err(|err| {
|
||||||
Ok(value) => Ok(value),
|
|
||||||
Err(err) => {
|
|
||||||
tracing::error!(event = "error",
|
tracing::error!(event = "error",
|
||||||
context = "store",
|
context = "store",
|
||||||
account_id = account_id,
|
account_id = account_id,
|
||||||
collection = ?collection,
|
collection = ?collection,
|
||||||
property = ?property,
|
property = ?property,
|
||||||
error = ?err,
|
error = ?err,
|
||||||
"Failed to retrieve properties");
|
"Failed to retrieve properties");
|
||||||
Err(MethodError::ServerPartialFail)
|
MethodError::ServerPartialFail
|
||||||
}
|
})?;
|
||||||
}
|
|
||||||
|
Ok(results)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn get_document_ids(
|
pub async fn get_document_ids(
|
||||||
|
|
@ -770,3 +791,47 @@ impl UpdateResults for QueryResponse {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(clippy::len_without_is_empty)]
|
||||||
|
pub trait PropertiesIterator {
|
||||||
|
fn min(&self) -> u32;
|
||||||
|
fn max(&self) -> u32;
|
||||||
|
fn contains(&self, id: u32) -> bool;
|
||||||
|
fn len(&self) -> usize;
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PropertiesIterator for RoaringBitmap {
|
||||||
|
fn min(&self) -> u32 {
|
||||||
|
self.min().unwrap_or(0)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn max(&self) -> u32 {
|
||||||
|
self.max().map(|m| m + 1).unwrap_or(0)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn contains(&self, id: u32) -> bool {
|
||||||
|
self.contains(id)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn len(&self) -> usize {
|
||||||
|
self.len() as usize
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PropertiesIterator for () {
|
||||||
|
fn min(&self) -> u32 {
|
||||||
|
0
|
||||||
|
}
|
||||||
|
|
||||||
|
fn max(&self) -> u32 {
|
||||||
|
u32::MAX
|
||||||
|
}
|
||||||
|
|
||||||
|
fn contains(&self, _: u32) -> bool {
|
||||||
|
true
|
||||||
|
}
|
||||||
|
|
||||||
|
fn len(&self) -> usize {
|
||||||
|
0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
||||||
|
|
@ -272,8 +272,7 @@ impl JMAP {
|
||||||
MethodError::ServerPartialFail
|
MethodError::ServerPartialFail
|
||||||
})?
|
})?
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.flatten()
|
.for_each(|(_, thread_id)| {
|
||||||
.for_each(|thread_id| {
|
|
||||||
thread_ids.insert(thread_id);
|
thread_ids.insert(thread_id);
|
||||||
});
|
});
|
||||||
Ok(thread_ids.len())
|
Ok(thread_ids.len())
|
||||||
|
|
|
||||||
|
|
@ -119,20 +119,19 @@ impl JMAP {
|
||||||
&& (paginate.is_some()
|
&& (paginate.is_some()
|
||||||
|| (response.total.map_or(false, |total| total > 0) && filter_as_tree))
|
|| (response.total.map_or(false, |total| total > 0) && filter_as_tree))
|
||||||
{
|
{
|
||||||
for document_id in mailbox_ids {
|
for (document_id, value) in self
|
||||||
let parent_id = self
|
.get_properties::<Object<Value>, _, _>(
|
||||||
.get_property::<Object<Value>>(
|
account_id,
|
||||||
account_id,
|
Collection::Mailbox,
|
||||||
Collection::Mailbox,
|
&mailbox_ids,
|
||||||
document_id,
|
Property::Value,
|
||||||
Property::Value,
|
)
|
||||||
)
|
.await?
|
||||||
.await?
|
{
|
||||||
.and_then(|o| {
|
let parent_id = value
|
||||||
o.properties
|
.properties
|
||||||
.get(&Property::ParentId)
|
.get(&Property::ParentId)
|
||||||
.and_then(|id| id.as_id().map(|id| id.document_id()))
|
.and_then(|id| id.as_id().map(|id| id.document_id()))
|
||||||
})
|
|
||||||
.unwrap_or(0);
|
.unwrap_or(0);
|
||||||
hierarchy.insert(document_id + 1, parent_id);
|
hierarchy.insert(document_id + 1, parent_id);
|
||||||
tree.entry(parent_id)
|
tree.entry(parent_id)
|
||||||
|
|
|
||||||
|
|
@ -32,7 +32,8 @@ jemallocator = "0.5.0"
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
#default = ["sqlite", "foundationdb", "postgres", "mysql", "rocks", "elastic", "s3", "redis"]
|
#default = ["sqlite", "foundationdb", "postgres", "mysql", "rocks", "elastic", "s3", "redis"]
|
||||||
default = ["sqlite", "postgres", "mysql", "rocks", "elastic", "s3", "redis"]
|
#default = ["sqlite", "postgres", "mysql", "rocks", "elastic", "s3", "redis"]
|
||||||
|
default = ["foundationdb", "postgres"]
|
||||||
sqlite = ["store/sqlite"]
|
sqlite = ["store/sqlite"]
|
||||||
foundationdb = ["store/foundation"]
|
foundationdb = ["store/foundation"]
|
||||||
postgres = ["store/postgres"]
|
postgres = ["store/postgres"]
|
||||||
|
|
|
||||||
|
|
@ -74,12 +74,7 @@ impl PostgresStore {
|
||||||
pub(super) async fn create_tables(&self) -> crate::Result<()> {
|
pub(super) async fn create_tables(&self) -> crate::Result<()> {
|
||||||
let conn = self.conn_pool.get().await?;
|
let conn = self.conn_pool.get().await?;
|
||||||
|
|
||||||
for table in [
|
for table in [SUBSPACE_VALUES, SUBSPACE_LOGS, SUBSPACE_BLOBS] {
|
||||||
SUBSPACE_VALUES,
|
|
||||||
SUBSPACE_LOGS,
|
|
||||||
SUBSPACE_BLOBS,
|
|
||||||
SUBSPACE_BITMAPS,
|
|
||||||
] {
|
|
||||||
let table = char::from(table);
|
let table = char::from(table);
|
||||||
conn.execute(
|
conn.execute(
|
||||||
&format!(
|
&format!(
|
||||||
|
|
@ -93,7 +88,7 @@ impl PostgresStore {
|
||||||
.await?;
|
.await?;
|
||||||
}
|
}
|
||||||
|
|
||||||
for table in [SUBSPACE_INDEXES] {
|
for table in [SUBSPACE_INDEXES, SUBSPACE_BITMAPS] {
|
||||||
let table = char::from(table);
|
let table = char::from(table);
|
||||||
conn.execute(
|
conn.execute(
|
||||||
&format!(
|
&format!(
|
||||||
|
|
|
||||||
|
|
@ -56,19 +56,6 @@ impl Store {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn get_values<U>(&self, key: Vec<impl Key>) -> crate::Result<Vec<Option<U>>>
|
|
||||||
where
|
|
||||||
U: Deserialize + 'static,
|
|
||||||
{
|
|
||||||
let mut results = Vec::with_capacity(key.len());
|
|
||||||
|
|
||||||
for key in key {
|
|
||||||
results.push(self.get_value(key).await?);
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(results)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn get_bitmap(
|
pub async fn get_bitmap(
|
||||||
&self,
|
&self,
|
||||||
key: BitmapKey<BitmapClass>,
|
key: BitmapKey<BitmapClass>,
|
||||||
|
|
|
||||||
|
|
@ -244,8 +244,8 @@ impl<T: AsRef<ValueClass> + Sync + Send> Key for ValueKey<T> {
|
||||||
.write(0u8)
|
.write(0u8)
|
||||||
.write(self.account_id)
|
.write(self.account_id)
|
||||||
.write(self.collection)
|
.write(self.collection)
|
||||||
.write_leb128(self.document_id)
|
.write(*field)
|
||||||
.write(*field),
|
.write(self.document_id),
|
||||||
ValueClass::TermIndex => serializer
|
ValueClass::TermIndex => serializer
|
||||||
.write(1u8)
|
.write(1u8)
|
||||||
.write(self.account_id)
|
.write(self.account_id)
|
||||||
|
|
|
||||||
|
|
@ -28,7 +28,10 @@ use std::{borrow::Borrow, io::Write};
|
||||||
pub trait Leb128_ {
|
pub trait Leb128_ {
|
||||||
fn to_leb128_writer(self, out: &mut impl Write) -> std::io::Result<usize>;
|
fn to_leb128_writer(self, out: &mut impl Write) -> std::io::Result<usize>;
|
||||||
fn to_leb128_bytes(self, out: &mut Vec<u8>);
|
fn to_leb128_bytes(self, out: &mut Vec<u8>);
|
||||||
fn from_leb128_bytes(slice: &[u8]) -> Option<(Self, usize)>
|
fn from_leb128_bytes_pos(slice: &[u8]) -> Option<(Self, usize)>
|
||||||
|
where
|
||||||
|
Self: std::marker::Sized;
|
||||||
|
fn from_leb128_bytes(slice: &[u8]) -> Option<Self>
|
||||||
where
|
where
|
||||||
Self: std::marker::Sized;
|
Self: std::marker::Sized;
|
||||||
fn from_leb128_it<T, I>(it: T) -> Option<Self>
|
fn from_leb128_it<T, I>(it: T) -> Option<Self>
|
||||||
|
|
@ -79,7 +82,7 @@ where
|
||||||
pub trait Leb128Reader: AsRef<[u8]> {
|
pub trait Leb128Reader: AsRef<[u8]> {
|
||||||
#[inline(always)]
|
#[inline(always)]
|
||||||
fn read_leb128<T: Leb128_>(&self) -> Option<(T, usize)> {
|
fn read_leb128<T: Leb128_>(&self) -> Option<(T, usize)> {
|
||||||
T::from_leb128_bytes(self.as_ref())
|
T::from_leb128_bytes_pos(self.as_ref())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline(always)]
|
#[inline(always)]
|
||||||
|
|
@ -133,7 +136,7 @@ macro_rules! impl_unsigned_leb128 {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline(always)]
|
#[inline(always)]
|
||||||
fn from_leb128_bytes(slice: &[u8]) -> Option<($int_ty, usize)> {
|
fn from_leb128_bytes_pos(slice: &[u8]) -> Option<($int_ty, usize)> {
|
||||||
let mut result = 0;
|
let mut result = 0;
|
||||||
|
|
||||||
for (shift, (pos, &byte)) in $shifts.into_iter().zip(slice.iter().enumerate()) {
|
for (shift, (pos, &byte)) in $shifts.into_iter().zip(slice.iter().enumerate()) {
|
||||||
|
|
@ -148,6 +151,22 @@ macro_rules! impl_unsigned_leb128 {
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[inline(always)]
|
||||||
|
fn from_leb128_bytes(slice: &[u8]) -> Option<$int_ty> {
|
||||||
|
let mut result = 0;
|
||||||
|
|
||||||
|
for (shift, &byte) in $shifts.into_iter().zip(slice.iter()) {
|
||||||
|
if (byte & 0x80) == 0 {
|
||||||
|
result |= (byte as $int_ty) << shift;
|
||||||
|
return Some(result);
|
||||||
|
} else {
|
||||||
|
result |= ((byte & 0x7F) as $int_ty) << shift;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
None
|
||||||
|
}
|
||||||
|
|
||||||
#[inline(always)]
|
#[inline(always)]
|
||||||
fn from_leb128_it<T, I>(it: T) -> Option<$int_ty>
|
fn from_leb128_it<T, I>(it: T) -> Option<$int_ty>
|
||||||
where
|
where
|
||||||
|
|
|
||||||
|
|
@ -6,7 +6,8 @@ resolver = "2"
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
#default = ["sqlite", "foundationdb", "postgres", "mysql", "rocks", "elastic", "s3", "redis"]
|
#default = ["sqlite", "foundationdb", "postgres", "mysql", "rocks", "elastic", "s3", "redis"]
|
||||||
default = ["sqlite", "postgres", "mysql", "rocks", "elastic", "s3", "redis"]
|
#default = ["sqlite", "postgres", "mysql", "rocks", "elastic", "s3", "redis"]
|
||||||
|
default = ["foundationdb", "postgres"]
|
||||||
sqlite = ["store/sqlite"]
|
sqlite = ["store/sqlite"]
|
||||||
foundationdb = ["store/foundation"]
|
foundationdb = ["store/foundation"]
|
||||||
postgres = ["store/postgres"]
|
postgres = ["store/postgres"]
|
||||||
|
|
|
||||||
|
|
@ -56,7 +56,6 @@ pub async fn test(db: Store) {
|
||||||
BatchBuilder::new()
|
BatchBuilder::new()
|
||||||
.with_account_id(0)
|
.with_account_id(0)
|
||||||
.with_collection(0)
|
.with_collection(0)
|
||||||
.with_account_id(0)
|
|
||||||
.update_document(0)
|
.update_document(0)
|
||||||
.set(ValueClass::Property(1), value.as_slice())
|
.set(ValueClass::Property(1), value.as_slice())
|
||||||
.set(ValueClass::Property(0), "check1")
|
.set(ValueClass::Property(0), "check1")
|
||||||
|
|
@ -86,7 +85,6 @@ pub async fn test(db: Store) {
|
||||||
BatchBuilder::new()
|
BatchBuilder::new()
|
||||||
.with_account_id(0)
|
.with_account_id(0)
|
||||||
.with_collection(0)
|
.with_collection(0)
|
||||||
.with_account_id(0)
|
|
||||||
.update_document(0)
|
.update_document(0)
|
||||||
.clear(ValueClass::Property(1))
|
.clear(ValueClass::Property(1))
|
||||||
.build_batch(),
|
.build_batch(),
|
||||||
|
|
|
||||||
|
|
@ -459,26 +459,21 @@ pub async fn test_filter(db: Store, fts: FtsStore) {
|
||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
assert_eq!(
|
let mut results = Vec::new();
|
||||||
db.get_values::<String>(
|
for document_id in sorted_docset.ids {
|
||||||
sorted_docset
|
results.push(
|
||||||
.ids
|
db.get_value::<String>(ValueKey {
|
||||||
.into_iter()
|
account_id: 0,
|
||||||
.map(|document_id| ValueKey {
|
collection: COLLECTION_ID,
|
||||||
account_id: 0,
|
document_id: document_id as u32,
|
||||||
collection: COLLECTION_ID,
|
class: ValueClass::Property(fields_u8["accession_number"]),
|
||||||
document_id: document_id as u32,
|
})
|
||||||
class: ValueClass::Property(fields_u8["accession_number"])
|
.await
|
||||||
})
|
.unwrap()
|
||||||
.collect()
|
.unwrap(),
|
||||||
)
|
);
|
||||||
.await
|
}
|
||||||
.unwrap()
|
assert_eq!(results, expected_results);
|
||||||
.into_iter()
|
|
||||||
.flatten()
|
|
||||||
.collect::<Vec<_>>(),
|
|
||||||
expected_results
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -554,25 +549,20 @@ pub async fn test_sort(db: Store) {
|
||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
assert_eq!(
|
let mut results = Vec::new();
|
||||||
db.get_values::<String>(
|
for document_id in sorted_docset.ids {
|
||||||
sorted_docset
|
results.push(
|
||||||
.ids
|
db.get_value::<String>(ValueKey {
|
||||||
.into_iter()
|
account_id: 0,
|
||||||
.map(|document_id| ValueKey {
|
collection: COLLECTION_ID,
|
||||||
account_id: 0,
|
document_id: document_id as u32,
|
||||||
collection: COLLECTION_ID,
|
class: ValueClass::Property(fields["accession_number"]),
|
||||||
document_id: document_id as u32,
|
})
|
||||||
class: ValueClass::Property(fields["accession_number"])
|
.await
|
||||||
})
|
.unwrap()
|
||||||
.collect()
|
.unwrap(),
|
||||||
)
|
);
|
||||||
.await
|
}
|
||||||
.unwrap()
|
assert_eq!(results, expected_results);
|
||||||
.into_iter()
|
|
||||||
.flatten()
|
|
||||||
.collect::<Vec<_>>(),
|
|
||||||
expected_results
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
Loading…
Add table
Reference in a new issue