mirror of
https://github.com/Foundry376/Mailspring.git
synced 2025-01-27 02:19:46 +08:00
feat(thread-search): Add client side search
Summary: This diff includes several updates: - Update sqlite version to use FTS5 - Adds new methods to DatabaseStore to create and update Search Indexes - Currently indexing subject, participants, and thread message bodies or snippets if the body is not available - Update initial sync to fetch 5000 message bodies per account - Adds a new SearchMatcher - Add new thread-search-index package to run in the worker window to init and keep thread search index up to date - Converts Thread to ES6 TODO: - Remove/Update suggestions dropdown - Add tests Test Plan: - TODO Reviewers: evan, bengotow Reviewed By: evan, bengotow Differential Revision: https://phab.nylas.com/D2826
This commit is contained in:
parent
79b365e930
commit
8a1aa6e9bb
19 changed files with 688 additions and 237 deletions
|
@ -1,15 +1,13 @@
|
|||
_ = require 'underscore'
|
||||
React = require 'react'
|
||||
{Actions} = require 'nylas-exports'
|
||||
{Actions, Utils} = require 'nylas-exports'
|
||||
{InjectedComponentSet, ListTabular} = require 'nylas-component-kit'
|
||||
|
||||
|
||||
snippet = (html) =>
|
||||
return "" unless html and typeof(html) is 'string'
|
||||
try
|
||||
@draftSanitizer ?= document.createElement('div')
|
||||
@draftSanitizer.innerHTML = html[0..400]
|
||||
text = @draftSanitizer.innerText
|
||||
text = Utils.extractTextFromHtml(html, maxLength: 400)
|
||||
text[0..200]
|
||||
catch
|
||||
return ""
|
||||
|
|
|
@ -47,7 +47,7 @@ class SearchSuggestionStore extends NylasStore
|
|||
next = MailboxPerspective.forSearch(current.accountIds, @_searchQuery.trim())
|
||||
Actions.focusMailboxPerspective(next)
|
||||
|
||||
else if FocusedPerspectiveStore.current().searchQuery
|
||||
else if current.isSearch()
|
||||
if @_perspectiveBeforeSearch
|
||||
Actions.focusMailboxPerspective(@_perspectiveBeforeSearch)
|
||||
@_perspectiveBeforeSearch = null
|
||||
|
|
10
internal_packages/thread-search-index/lib/main.es6
Normal file
10
internal_packages/thread-search-index/lib/main.es6
Normal file
|
@ -0,0 +1,10 @@
|
|||
import SearchIndexStore from './search-index-store'
|
||||
|
||||
|
||||
export function activate() {
|
||||
SearchIndexStore.activate()
|
||||
}
|
||||
|
||||
export function deactivate() {
|
||||
SearchIndexStore.deactivate()
|
||||
}
|
172
internal_packages/thread-search-index/lib/search-index-store.es6
Normal file
172
internal_packages/thread-search-index/lib/search-index-store.es6
Normal file
|
@ -0,0 +1,172 @@
|
|||
import _ from 'underscore'
|
||||
import {
|
||||
Utils,
|
||||
Thread,
|
||||
AccountStore,
|
||||
DatabaseStore,
|
||||
NylasSyncStatusStore,
|
||||
QuotedHTMLTransformer,
|
||||
} from 'nylas-exports'
|
||||
|
||||
const INDEX_SIZE = 10000
|
||||
const MAX_INDEX_SIZE = 25000
|
||||
const CHUNKS_PER_ACCOUNT = 10
|
||||
const INDEXING_WAIT = 1000
|
||||
const MESSAGE_BODY_LENGTH = 50000
|
||||
|
||||
|
||||
class SearchIndexStore {
|
||||
|
||||
constuctor() {
|
||||
this.unsubscribers = []
|
||||
}
|
||||
|
||||
activate() {
|
||||
NylasSyncStatusStore.whenSyncComplete().then(() => {
|
||||
const date = Date.now()
|
||||
const accountIds = AccountStore.accounts().map(acc => acc.id)
|
||||
console.log('ThreadSearch: Initializing thread search index...')
|
||||
this.initializeIndex(accountIds)
|
||||
.then(() => {
|
||||
console.log('ThreadSearch: Index built successfully in ' + ((Date.now() - date) / 1000) + 's')
|
||||
this.unsubscribers = [
|
||||
DatabaseStore.listen(::this.onDataChanged),
|
||||
AccountStore.listen(::this.onAccountsChanged),
|
||||
]
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
initializeIndex(accountIds) {
|
||||
return DatabaseStore.searchIndexSize(Thread)
|
||||
.then((size) => {
|
||||
console.log('ThreadSearch: Current index size is ' + (size || 0) + ' threads')
|
||||
if (!size || size >= MAX_INDEX_SIZE || size === 0) {
|
||||
return this.clearIndex().thenReturn(true)
|
||||
}
|
||||
return Promise.resolve(false)
|
||||
})
|
||||
.then((shouldRebuild) => {
|
||||
if (shouldRebuild) {
|
||||
return this.buildIndex(accountIds)
|
||||
}
|
||||
return Promise.resolve()
|
||||
})
|
||||
}
|
||||
|
||||
onAccountsChanged() {
|
||||
const date = Date.now()
|
||||
const accountIds = AccountStore.accounts().map(acc => acc.id)
|
||||
return this.clearIndex()
|
||||
.then(() => this.buildIndex(accountIds))
|
||||
.then(() => {
|
||||
console.log('ThreadSearch: Index rebuilt successfully in ' + ((Date.now() - date) / 1000) + 's')
|
||||
})
|
||||
}
|
||||
|
||||
onDataChanged(change) {
|
||||
if (change.objectClass !== Thread.name) {
|
||||
return;
|
||||
}
|
||||
const {objects, type} = change
|
||||
let promises = []
|
||||
if (type === 'persist') {
|
||||
promises = objects.map(thread => this.updateThreadIndex(thread))
|
||||
} else if (type === 'unpersist') {
|
||||
promises = objects.map(thread => DatabaseStore.unindexModel(thread))
|
||||
}
|
||||
Promise.all(promises)
|
||||
}
|
||||
|
||||
clearIndex() {
|
||||
return (
|
||||
DatabaseStore.dropSearchIndex(Thread)
|
||||
.then(() => DatabaseStore.createSearchIndex(Thread))
|
||||
)
|
||||
}
|
||||
|
||||
buildIndex(accountIds) {
|
||||
const numAccounts = accountIds.length
|
||||
return Promise.resolve(accountIds)
|
||||
.each((accountId) => (
|
||||
this.indexThreadsForAccount(accountId, Math.floor(INDEX_SIZE / numAccounts))
|
||||
))
|
||||
}
|
||||
|
||||
indexThreadsForAccount(accountId, indexSize) {
|
||||
const chunkSize = Math.floor(indexSize / CHUNKS_PER_ACCOUNT)
|
||||
const chunks = Promise.resolve(_.times(CHUNKS_PER_ACCOUNT, () => chunkSize))
|
||||
|
||||
return chunks.each((size, idx) => {
|
||||
return DatabaseStore.findAll(Thread)
|
||||
.where({accountId})
|
||||
.limit(size)
|
||||
.offset(size * idx)
|
||||
.order(Thread.attributes.lastMessageReceivedTimestamp.descending())
|
||||
.then((threads) => {
|
||||
return Promise.all(
|
||||
threads.map(thread => this.indexThread(thread))
|
||||
).then(() => {
|
||||
return new Promise((resolve) => setTimeout(resolve, INDEXING_WAIT))
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
indexThread(thread) {
|
||||
return (
|
||||
this.getIndexData(thread)
|
||||
.then((indexData) => (
|
||||
DatabaseStore.indexModel(thread, indexData)
|
||||
))
|
||||
)
|
||||
}
|
||||
|
||||
updateThreadIndex(thread) {
|
||||
return (
|
||||
this.getIndexData(thread)
|
||||
.then((indexData) => (
|
||||
DatabaseStore.updateModelIndex(thread, indexData)
|
||||
))
|
||||
)
|
||||
}
|
||||
|
||||
getIndexData(thread) {
|
||||
const messageBodies = (
|
||||
thread.messages()
|
||||
.then((messages) => (
|
||||
Promise.resolve(
|
||||
messages
|
||||
.map(({body, snippet}) => (
|
||||
!_.isString(body) ?
|
||||
{snippet} :
|
||||
{body: QuotedHTMLTransformer.removeQuotedHTML(body)}
|
||||
))
|
||||
.map(({body, snippet}) => (
|
||||
snippet ?
|
||||
snippet :
|
||||
Utils.extractTextFromHtml(body, {maxLength: MESSAGE_BODY_LENGTH}).replace(/(\s)+/g, ' ')
|
||||
))
|
||||
.join(' ')
|
||||
)
|
||||
))
|
||||
)
|
||||
const participants = (
|
||||
thread.participants
|
||||
.map(({name, email}) => `${name} ${email}`)
|
||||
.join(" ")
|
||||
)
|
||||
|
||||
return Promise.props({
|
||||
participants,
|
||||
body: messageBodies,
|
||||
subject: thread.subject,
|
||||
})
|
||||
}
|
||||
|
||||
deactivate() {
|
||||
this.unsubscribers.forEach(unsub => unsub())
|
||||
}
|
||||
}
|
||||
|
||||
export default new SearchIndexStore();
|
17
internal_packages/thread-search-index/package.json
Normal file
17
internal_packages/thread-search-index/package.json
Normal file
|
@ -0,0 +1,17 @@
|
|||
{
|
||||
"name": "thread-search-index",
|
||||
"version": "0.1.0",
|
||||
"main": "./lib/main",
|
||||
"description": "Keeps search index up to date",
|
||||
"license": "GPL-3.0",
|
||||
"private": true,
|
||||
"engines": {
|
||||
"nylas": "*"
|
||||
},
|
||||
"dependencies": {
|
||||
},
|
||||
"windowTypes": {
|
||||
"work": true
|
||||
}
|
||||
}
|
||||
|
|
@ -118,6 +118,7 @@ class NylasSyncWorker
|
|||
|
||||
needed = [
|
||||
{model: 'threads'},
|
||||
{model: 'messages', maxFetchCount: 5000}
|
||||
{model: @_account.categoryCollection(), initialPageSize: 1000}
|
||||
{model: 'drafts'},
|
||||
{model: 'contacts'},
|
||||
|
@ -129,8 +130,8 @@ class NylasSyncWorker
|
|||
return if needed.length is 0
|
||||
|
||||
@fetchAllMetadata =>
|
||||
needed.forEach ({model, initialPageSize}) =>
|
||||
@fetchCollection(model, initialPageSize)
|
||||
needed.forEach ({model, initialPageSize, maxFetchCount}) =>
|
||||
@fetchCollection(model, {initialPageSize, maxFetchCount})
|
||||
|
||||
fetchAllMetadata: (finished) ->
|
||||
@_metadata = {}
|
||||
|
@ -162,7 +163,8 @@ class NylasSyncWorker
|
|||
return false if state.busy
|
||||
return true
|
||||
|
||||
fetchCollection: (model, initialPageSize = INITIAL_PAGE_SIZE) ->
|
||||
fetchCollection: (model, {initialPageSize, maxFetchCount} = {}) ->
|
||||
initialPageSize ?= INITIAL_PAGE_SIZE
|
||||
state = @_state[model] ? {}
|
||||
state.complete = false
|
||||
state.error = null
|
||||
|
@ -171,52 +173,63 @@ class NylasSyncWorker
|
|||
|
||||
if not state.count
|
||||
state.count = 0
|
||||
@fetchCollectionCount(model)
|
||||
@fetchCollectionCount(model, maxFetchCount)
|
||||
|
||||
if state.errorRequestRange
|
||||
{limit, offset} = state.errorRequestRange
|
||||
if state.fetched + limit > maxFetchCount
|
||||
limit = maxFetchCount - state.fetched
|
||||
state.errorRequestRange = null
|
||||
@fetchCollectionPage(model, {limit, offset})
|
||||
@fetchCollectionPage(model, {limit, offset}, {maxFetchCount})
|
||||
else
|
||||
limit = initialPageSize
|
||||
if state.fetched + limit > maxFetchCount
|
||||
limit = maxFetchCount - state.fetched
|
||||
@fetchCollectionPage(model, {
|
||||
limit: initialPageSize,
|
||||
limit: limit,
|
||||
offset: 0
|
||||
})
|
||||
}, {maxFetchCount})
|
||||
|
||||
@_state[model] = state
|
||||
@writeState()
|
||||
|
||||
fetchCollectionCount: (model) ->
|
||||
fetchCollectionCount: (model, maxFetchCount) ->
|
||||
@_fetchWithErrorHandling
|
||||
path: "/#{model}"
|
||||
qs: {view: 'count'}
|
||||
success: (response) =>
|
||||
@updateTransferState(model, count: response.count)
|
||||
@updateTransferState(model, count: Math.min(response.count, maxFetchCount ? response.count))
|
||||
|
||||
fetchCollectionPage: (model, params = {}) ->
|
||||
fetchCollectionPage: (model, params = {}, options = {}) ->
|
||||
requestStartTime = Date.now()
|
||||
requestOptions =
|
||||
metadataToAttach: @_metadata
|
||||
|
||||
error: (err) =>
|
||||
return if @_terminated
|
||||
@_fetchCollectionPageError(model, params, err)
|
||||
@_onFetchCollectionPageError(model, params, err)
|
||||
|
||||
success: (json) =>
|
||||
return if @_terminated
|
||||
|
||||
if model in ["labels", "folders"] and @_hasNoInbox(json)
|
||||
@_fetchCollectionPageError(model, params, "No inbox in #{model}")
|
||||
@_onFetchCollectionPageError(model, params, "No inbox in #{model}")
|
||||
return
|
||||
|
||||
lastReceivedIndex = params.offset + json.length
|
||||
moreToFetch = json.length is params.limit
|
||||
moreToFetch = if options.maxFetchCount
|
||||
json.length is params.limit and lastReceivedIndex < options.maxFetchCount
|
||||
else
|
||||
json.length is params.limit
|
||||
|
||||
if moreToFetch
|
||||
nextParams = _.extend({}, params, {offset: lastReceivedIndex})
|
||||
nextParams.limit = Math.min(Math.round(params.limit * 1.5), MAX_PAGE_SIZE)
|
||||
limit = Math.min(Math.round(params.limit * 1.5), MAX_PAGE_SIZE)
|
||||
if options.maxFetchCount
|
||||
limit = Math.min(limit, options.maxFetchCount - lastReceivedIndex)
|
||||
nextParams.limit = limit
|
||||
nextDelay = Math.max(0, 1500 - (Date.now() - requestStartTime))
|
||||
setTimeout(( => @fetchCollectionPage(model, nextParams)), nextDelay)
|
||||
setTimeout(( => @fetchCollectionPage(model, nextParams, options)), nextDelay)
|
||||
|
||||
@updateTransferState(model, {
|
||||
fetched: lastReceivedIndex,
|
||||
|
@ -252,7 +265,7 @@ class NylasSyncWorker
|
|||
@_backoff()
|
||||
error(err) if error
|
||||
|
||||
_fetchCollectionPageError: (model, params, err) ->
|
||||
_onFetchCollectionPageError: (model, params, err) ->
|
||||
@_backoff()
|
||||
@updateTransferState(model, {
|
||||
busy: false,
|
||||
|
|
|
@ -60,16 +60,16 @@ describe "NylasSyncWorker", ->
|
|||
it "should start querying for model collections and counts that haven't been fully cached", ->
|
||||
@worker.start()
|
||||
advanceClock()
|
||||
expect(@apiRequests.length).toBe(10)
|
||||
expect(@apiRequests.length).toBe(12)
|
||||
modelsRequested = _.compact _.map @apiRequests, ({model}) -> model
|
||||
expect(modelsRequested).toEqual(['threads', 'labels', 'drafts', 'contacts', 'events'])
|
||||
expect(modelsRequested).toEqual(['threads', 'messages', 'labels', 'drafts', 'contacts', 'events'])
|
||||
|
||||
countsRequested = _.compact _.map @apiRequests, ({requestOptions}) ->
|
||||
if requestOptions.qs?.view is 'count'
|
||||
return requestOptions.path
|
||||
|
||||
expect(modelsRequested).toEqual(['threads', 'labels', 'drafts', 'contacts', 'events'])
|
||||
expect(countsRequested).toEqual(['/threads', '/labels', '/drafts', '/contacts', '/events'])
|
||||
expect(modelsRequested).toEqual(['threads', 'messages', 'labels', 'drafts', 'contacts', 'events'])
|
||||
expect(countsRequested).toEqual(['/threads', '/messages', '/labels', '/drafts', '/contacts', '/events'])
|
||||
|
||||
it "should fetch 1000 labels and folders, to prevent issues where Inbox is not in the first page", ->
|
||||
labelsRequest = _.find @apiRequests, (r) -> r.model is 'labels'
|
||||
|
@ -295,6 +295,55 @@ describe "NylasSyncWorker", ->
|
|||
expect(@worker._state.threads.count).toBe(1200)
|
||||
expect(@apiRequests.length).toBe(1)
|
||||
|
||||
describe 'when maxFetchCount option is specified', ->
|
||||
it "should only fetch maxFetch count on the first request if it is less than initialPageSize", ->
|
||||
@worker._state.messages =
|
||||
count: 1000
|
||||
fetched: 0
|
||||
@worker.fetchCollection('messages', {initialPageSize: 30, maxFetchCount: 25})
|
||||
expect(@apiRequests[0].params.offset).toBe 0
|
||||
expect(@apiRequests[0].params.limit).toBe 25
|
||||
|
||||
it "sould only fetch the maxFetchCount when restoring from saved state", ->
|
||||
@worker._state.messages =
|
||||
count: 1000
|
||||
fetched: 470
|
||||
errorRequestRange: {
|
||||
limit: 50,
|
||||
offset: 470,
|
||||
}
|
||||
@worker.fetchCollection('messages', {maxFetchCount: 500})
|
||||
expect(@apiRequests[0].params.offset).toBe 470
|
||||
expect(@apiRequests[0].params.limit).toBe 30
|
||||
|
||||
describe "fetchCollectionPage", ->
|
||||
beforeEach ->
|
||||
@apiRequests = []
|
||||
|
||||
describe 'when maxFetchCount option is specified', ->
|
||||
it 'should not fetch next page if maxFetchCount has been reached', ->
|
||||
@worker._state.messages =
|
||||
count: 1000
|
||||
fetched: 470
|
||||
@worker.fetchCollectionPage('messages', {limit: 30, offset: 470}, {maxFetchCount: 500})
|
||||
{success} = @apiRequests[0].requestOptions
|
||||
success({length: 30})
|
||||
expect(@worker._state.messages.fetched).toBe 500
|
||||
advanceClock(2000)
|
||||
expect(@apiRequests.length).toBe 1
|
||||
|
||||
it 'should limit by maxFetchCount when requesting the next page', ->
|
||||
@worker._state.messages =
|
||||
count: 1000
|
||||
fetched: 450
|
||||
@worker.fetchCollectionPage('messages', {limit: 30, offset: 450 }, {maxFetchCount: 500})
|
||||
{success} = @apiRequests[0].requestOptions
|
||||
success({length: 30})
|
||||
expect(@worker._state.messages.fetched).toBe 480
|
||||
advanceClock(2000)
|
||||
expect(@apiRequests[1].params.offset).toBe 480
|
||||
expect(@apiRequests[1].params.limit).toBe 20
|
||||
|
||||
describe "when an API request completes", ->
|
||||
beforeEach ->
|
||||
@worker.start()
|
||||
|
|
|
@ -48,7 +48,7 @@ function makeSqlite3Command() {
|
|||
|
||||
// Use our local version of npm (npm 3x) to build sqlite
|
||||
var npmPath = '"' + path.resolve(__dirname, '..', 'build', 'node_modules', '.bin', 'npm') + '"';
|
||||
return npmPath + " install https://github.com/bengotow/node-sqlite3/archive/bengotow/usleep.tar.gz --ignore-scripts && cd node_modules/sqlite3 && "+nodeGypPath+" configure rebuild --target="+targetElectronVersion+" --arch="+targetArch+" --target_platform="+targetPlatform+" --dist-url=https://atom.io/download/atom-shell --module_name=node_sqlite3 --module_path=../lib/binding/electron-v0.36-"+targetPlatform+"-"+targetArch
|
||||
return npmPath + " install https://github.com/bengotow/node-sqlite3/archive/bengotow/fts5.tar.gz --ignore-scripts && cd node_modules/sqlite3 && "+nodeGypPath+" configure rebuild --target="+targetElectronVersion+" --arch="+targetArch+" --target_platform="+targetPlatform+" --dist-url=https://atom.io/download/atom-shell --module_name=node_sqlite3 --module_path=../lib/binding/electron-v0.36-"+targetPlatform+"-"+targetArch
|
||||
}
|
||||
|
||||
function bootstrap() {
|
||||
|
|
|
@ -205,9 +205,36 @@ class NotCompositeMatcher extends Matcher
|
|||
wheres.push(matcher.whereSQL(klass))
|
||||
return "NOT (" + wheres.join(" AND ") + ")"
|
||||
|
||||
class SearchMatcher extends Matcher
|
||||
constructor: (@searchQuery) ->
|
||||
super(null, null, null)
|
||||
@
|
||||
|
||||
attribute: =>
|
||||
null
|
||||
|
||||
value: =>
|
||||
null
|
||||
|
||||
# The only way to truly check if a model matches this matcher is to run the query
|
||||
# again and check if the model is in the results. This is too expensive, so we
|
||||
# will always return true so models aren't excluded from the
|
||||
# SearchQuerySubscription result set
|
||||
evaluate: (model) =>
|
||||
true
|
||||
|
||||
joinSQL: (klass) =>
|
||||
searchTable = "#{klass.name}Search"
|
||||
return "INNER JOIN `#{searchTable}` AS `M#{@muid}` ON `M#{@muid}`.`content_id` = `#{klass.name}`.`id`"
|
||||
|
||||
whereSQL: (klass) =>
|
||||
searchTable = "#{klass.name}Search"
|
||||
return "`#{searchTable}` MATCH '\"#{@searchQuery}\"'"
|
||||
|
||||
Matcher.muid = 0
|
||||
Matcher.Or = OrCompositeMatcher
|
||||
Matcher.And = AndCompositeMatcher
|
||||
Matcher.Not = NotCompositeMatcher
|
||||
Matcher.Search = SearchMatcher
|
||||
|
||||
module.exports = Matcher
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
_ = require 'underscore'
|
||||
DatabaseChangeRecord = require '../stores/database-change-record'
|
||||
QuerySubscription = require './query-subscription'
|
||||
|
||||
###
|
||||
|
|
|
@ -102,6 +102,11 @@ class ModelQuery
|
|||
@_matchers.push(new Matcher.Or(matchers))
|
||||
@
|
||||
|
||||
search: (query) ->
|
||||
@_assertNotFinalized()
|
||||
@_matchers.push(new Matcher.Search(query))
|
||||
@
|
||||
|
||||
# Public: Include specific joined data attributes in result objects.
|
||||
# - `attr` A {AttributeJoinedData} that you want to be populated in
|
||||
# the returned models. Note: This results in a LEFT OUTER JOIN.
|
||||
|
|
|
@ -1,187 +0,0 @@
|
|||
_ = require 'underscore'
|
||||
|
||||
Category = require './category'
|
||||
Contact = require './contact'
|
||||
Actions = require '../actions'
|
||||
Attributes = require '../attributes'
|
||||
ModelWithMetadata = require './model-with-metadata'
|
||||
|
||||
Function::getter = (prop, get) ->
|
||||
Object.defineProperty @prototype, prop, {get, configurable: yes}
|
||||
|
||||
###
|
||||
Public: The Thread model represents a Thread object served by the Nylas Platform API.
|
||||
For more information about Threads on the Nylas Platform, read the
|
||||
[Threads API Documentation](https://nylas.com/docs/api#threads)
|
||||
|
||||
## Attributes
|
||||
|
||||
`snippet`: {AttributeString} A short, ~140 character string with the content
|
||||
of the last message in the thread. Queryable.
|
||||
|
||||
`subject`: {AttributeString} The subject of the thread. Queryable.
|
||||
|
||||
`unread`: {AttributeBoolean} True if the thread is unread. Queryable.
|
||||
|
||||
`starred`: {AttributeBoolean} True if the thread is starred. Queryable.
|
||||
|
||||
`version`: {AttributeNumber} The version number of the thread.
|
||||
|
||||
`participants`: {AttributeCollection} A set of {Contact} models
|
||||
representing the participants in the thread.
|
||||
Note: Contacts on Threads do not have IDs.
|
||||
|
||||
`lastMessageReceivedTimestamp`: {AttributeDateTime} The timestamp of the
|
||||
last message on the thread.
|
||||
|
||||
This class also inherits attributes from {Model}
|
||||
|
||||
Section: Models
|
||||
###
|
||||
class Thread extends ModelWithMetadata
|
||||
|
||||
@attributes: _.extend {}, ModelWithMetadata.attributes,
|
||||
'snippet': Attributes.String
|
||||
modelKey: 'snippet'
|
||||
|
||||
'subject': Attributes.String
|
||||
queryable: true
|
||||
modelKey: 'subject'
|
||||
|
||||
'unread': Attributes.Boolean
|
||||
queryable: true
|
||||
modelKey: 'unread'
|
||||
|
||||
'starred': Attributes.Boolean
|
||||
queryable: true
|
||||
modelKey: 'starred'
|
||||
|
||||
'version': Attributes.Number
|
||||
queryable: true
|
||||
modelKey: 'version'
|
||||
|
||||
'categories': Attributes.Collection
|
||||
queryable: true
|
||||
modelKey: 'categories'
|
||||
itemClass: Category
|
||||
|
||||
'categoriesType': Attributes.String
|
||||
modelKey: 'categoriesType'
|
||||
|
||||
'participants': Attributes.Collection
|
||||
queryable: true
|
||||
joinOnField: 'email'
|
||||
modelKey: 'participants'
|
||||
itemClass: Contact
|
||||
|
||||
'hasAttachments': Attributes.Boolean
|
||||
modelKey: 'has_attachments'
|
||||
|
||||
'lastMessageReceivedTimestamp': Attributes.DateTime
|
||||
queryable: true
|
||||
modelKey: 'lastMessageReceivedTimestamp'
|
||||
jsonKey: 'last_message_received_timestamp'
|
||||
|
||||
'lastMessageSentTimestamp': Attributes.DateTime
|
||||
queryable: true
|
||||
modelKey: 'lastMessageSentTimestamp'
|
||||
jsonKey: 'last_message_sent_timestamp'
|
||||
|
||||
'inAllMail': Attributes.Boolean
|
||||
queryable: true
|
||||
modelKey: 'inAllMail'
|
||||
jsonKey: 'in_all_mail'
|
||||
|
||||
Object.defineProperty @attributes, "labels",
|
||||
enumerable: false
|
||||
get: -> @categories
|
||||
|
||||
Object.defineProperty @attributes, "folders",
|
||||
enumerable: false
|
||||
get: -> @categories
|
||||
|
||||
Object.defineProperty @prototype, "labels",
|
||||
enumerable: false
|
||||
get: -> @categories
|
||||
set: (v) -> @categories = v
|
||||
|
||||
Object.defineProperty @prototype, "folders",
|
||||
enumerable: false
|
||||
get: -> @categories
|
||||
set: (v) -> @categories = v
|
||||
|
||||
Object.defineProperty @prototype, "inAllMail",
|
||||
enumerable: false
|
||||
get: ->
|
||||
if @categoriesType is 'labels'
|
||||
inAllMail = _.any @categories, (cat) -> cat.name is 'all'
|
||||
return true if inAllMail
|
||||
inTrashOrSpam = _.any @categories, (cat) -> cat.name is 'trash' or cat.name is 'spam'
|
||||
return true if not inTrashOrSpam
|
||||
return false
|
||||
else
|
||||
return true
|
||||
|
||||
@naturalSortOrder: ->
|
||||
Thread.attributes.lastMessageReceivedTimestamp.descending()
|
||||
|
||||
@additionalSQLiteConfig:
|
||||
setup: ->
|
||||
['CREATE TABLE IF NOT EXISTS `Thread-Counts` (`category_id` TEXT PRIMARY KEY, `unread` INTEGER, `total` INTEGER)',
|
||||
'CREATE UNIQUE INDEX IF NOT EXISTS ThreadCountsIndex ON `Thread-Counts` (category_id DESC)',
|
||||
'CREATE INDEX IF NOT EXISTS ThreadListIndex ON Thread(last_message_received_timestamp DESC, id)',
|
||||
'CREATE INDEX IF NOT EXISTS ThreadListSentIndex ON Thread(last_message_sent_timestamp DESC, id)',
|
||||
'CREATE INDEX IF NOT EXISTS ThreadStarIndex ON Thread(account_id, starred)']
|
||||
|
||||
fromJSON: (json) ->
|
||||
super(json)
|
||||
|
||||
if json['folders']
|
||||
@categoriesType = 'folders'
|
||||
@categories = @constructor.attributes.categories.fromJSON(json['folders'])
|
||||
|
||||
if json['labels']
|
||||
@categoriesType = 'labels'
|
||||
@categories = @constructor.attributes.categories.fromJSON(json['labels'])
|
||||
|
||||
for attr in ['participants', 'categories']
|
||||
value = @[attr]
|
||||
continue unless value and value instanceof Array
|
||||
item.accountId = @accountId for item in value
|
||||
|
||||
@
|
||||
|
||||
# Public: Returns true if the thread has a {Category} with the given
|
||||
# name. Note, only catgories of type `Category.Types.Standard` have valid
|
||||
# `names`
|
||||
#
|
||||
# * `id` A {String} {Category} name
|
||||
#
|
||||
categoryNamed: (name) -> return _.findWhere(@categories, {name})
|
||||
|
||||
sortedCategories: ->
|
||||
return [] unless @labels
|
||||
out = []
|
||||
|
||||
CategoryStore = require '../stores/category-store'
|
||||
|
||||
isImportant = (l) -> l.name is 'important'
|
||||
isStandardCategory = (l) -> l.isStandardCategory()
|
||||
isUnhiddenStandardLabel = (l) ->
|
||||
not isImportant(l) and \
|
||||
isStandardCategory(l) and\
|
||||
not (l.isHiddenCategory())
|
||||
|
||||
importantLabel = _.find @labels, isImportant
|
||||
out = out.concat importantLabel if importantLabel
|
||||
|
||||
standardLabels = _.filter @labels, isUnhiddenStandardLabel
|
||||
out = out.concat standardLabels if standardLabels.length
|
||||
|
||||
userLabels = _.filter @labels, (l) ->
|
||||
not isImportant(l) and not isStandardCategory(l)
|
||||
out = out.concat _.sortBy(userLabels, 'displayName') if userLabels.length
|
||||
|
||||
out
|
||||
|
||||
module.exports = Thread
|
243
src/flux/models/thread.es6
Normal file
243
src/flux/models/thread.es6
Normal file
|
@ -0,0 +1,243 @@
|
|||
import _ from 'underscore'
|
||||
import Message from './message'
|
||||
import Contact from './contact'
|
||||
import Category from './category'
|
||||
import Attributes from '../attributes'
|
||||
import DatabaseStore from '../stores/database-store'
|
||||
import ModelWithMetadata from './model-with-metadata'
|
||||
|
||||
|
||||
/**
|
||||
Public: The Thread model represents a Thread object served by the Nylas Platform API.
|
||||
For more information about Threads on the Nylas Platform, read the
|
||||
[Threads API Documentation](https://nylas.com/docs/api#threads)
|
||||
|
||||
Attributes
|
||||
|
||||
`snippet`: {AttributeString} A short, ~140 character string with the content
|
||||
of the last message in the thread. Queryable.
|
||||
|
||||
`subject`: {AttributeString} The subject of the thread. Queryable.
|
||||
|
||||
`unread`: {AttributeBoolean} True if the thread is unread. Queryable.
|
||||
|
||||
`starred`: {AttributeBoolean} True if the thread is starred. Queryable.
|
||||
|
||||
`version`: {AttributeNumber} The version number of the thread.
|
||||
|
||||
`participants`: {AttributeCollection} A set of {Contact} models
|
||||
representing the participants in the thread.
|
||||
Note: Contacts on Threads do not have IDs.
|
||||
|
||||
`lastMessageReceivedTimestamp`: {AttributeDateTime} The timestamp of the
|
||||
last message on the thread.
|
||||
|
||||
This class also inherits attributes from {Model}
|
||||
|
||||
Section: Models
|
||||
@class Thread
|
||||
*/
|
||||
class Thread extends ModelWithMetadata {
|
||||
|
||||
static attributes = _.extend({}, ModelWithMetadata.attributes, {
|
||||
'snippet': Attributes.String({
|
||||
modelKey: 'snippet',
|
||||
}),
|
||||
|
||||
'subject': Attributes.String({
|
||||
queryable: true,
|
||||
modelKey: 'subject',
|
||||
}),
|
||||
|
||||
'unread': Attributes.Boolean({
|
||||
queryable: true,
|
||||
modelKey: 'unread',
|
||||
}),
|
||||
|
||||
'starred': Attributes.Boolean({
|
||||
queryable: true,
|
||||
modelKey: 'starred',
|
||||
}),
|
||||
|
||||
'version': Attributes.Number({
|
||||
queryable: true,
|
||||
modelKey: 'version',
|
||||
}),
|
||||
|
||||
'categories': Attributes.Collection({
|
||||
queryable: true,
|
||||
modelKey: 'categories',
|
||||
itemClass: Category,
|
||||
}),
|
||||
|
||||
'categoriesType': Attributes.String({
|
||||
modelKey: 'categoriesType',
|
||||
}),
|
||||
|
||||
'participants': Attributes.Collection({
|
||||
queryable: true,
|
||||
joinOnField: 'email',
|
||||
modelKey: 'participants',
|
||||
itemClass: Contact,
|
||||
}),
|
||||
|
||||
'hasAttachments': Attributes.Boolean({
|
||||
modelKey: 'has_attachments',
|
||||
}),
|
||||
|
||||
'lastMessageReceivedTimestamp': Attributes.DateTime({
|
||||
queryable: true,
|
||||
modelKey: 'lastMessageReceivedTimestamp',
|
||||
jsonKey: 'last_message_received_timestamp',
|
||||
}),
|
||||
|
||||
'lastMessageSentTimestamp': Attributes.DateTime({
|
||||
queryable: true,
|
||||
modelKey: 'lastMessageSentTimestamp',
|
||||
jsonKey: 'last_message_sent_timestamp',
|
||||
}),
|
||||
|
||||
'inAllMail': Attributes.Boolean({
|
||||
queryable: true,
|
||||
modelKey: 'inAllMail',
|
||||
jsonKey: 'in_all_mail',
|
||||
}),
|
||||
})
|
||||
|
||||
static naturalSortOrder = () => {
|
||||
return Thread.attributes.lastMessageReceivedTimestamp.descending()
|
||||
}
|
||||
|
||||
static additionalSQLiteConfig = {
|
||||
setup: () => [
|
||||
'CREATE TABLE IF NOT EXISTS `Thread-Counts` (`category_id` TEXT PRIMARY KEY, `unread` INTEGER, `total` INTEGER)',
|
||||
'CREATE UNIQUE INDEX IF NOT EXISTS ThreadCountsIndex ON `Thread-Counts` (category_id DESC)',
|
||||
'CREATE INDEX IF NOT EXISTS ThreadListIndex ON Thread(last_message_received_timestamp DESC, id)',
|
||||
'CREATE INDEX IF NOT EXISTS ThreadListSentIndex ON Thread(last_message_sent_timestamp DESC, id)',
|
||||
'CREATE INDEX IF NOT EXISTS ThreadStarIndex ON Thread(account_id, starred)',
|
||||
],
|
||||
}
|
||||
|
||||
static searchable = true
|
||||
|
||||
static searchFields = ['subject', 'participants', 'body']
|
||||
|
||||
messages() {
|
||||
return (
|
||||
DatabaseStore.findAll(Message)
|
||||
.where({threadId: this.id})
|
||||
.include(Message.attributes.body)
|
||||
)
|
||||
}
|
||||
|
||||
get labels() {
|
||||
return this.categories;
|
||||
}
|
||||
|
||||
set labels(labels) {
|
||||
this.categories = labels;
|
||||
}
|
||||
|
||||
get folders() {
|
||||
return this.categories;
|
||||
}
|
||||
|
||||
set folders(folders) {
|
||||
this.categories = folders;
|
||||
}
|
||||
|
||||
get inAllMail() {
|
||||
if (this.categoriesType === 'labels') {
|
||||
const inAllMail = _.any(this.categories, cat => cat.name === 'all')
|
||||
if (inAllMail) {
|
||||
return true;
|
||||
}
|
||||
const inTrashOrSpam = _.any(this.categories, cat => cat.name === 'trash' || cat.name === 'spam')
|
||||
if (!inTrashOrSpam) {
|
||||
return true;
|
||||
}
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
fromJSON(json) {
|
||||
super.fromJSON(json)
|
||||
|
||||
if (json.folders) {
|
||||
this.categoriesType = 'folders'
|
||||
this.categories = Thread.attributes.categories.fromJSON(json.folders)
|
||||
}
|
||||
|
||||
if (json.labels) {
|
||||
this.categoriesType = 'labels'
|
||||
this.categories = Thread.attributes.categories.fromJSON(json.labels)
|
||||
}
|
||||
|
||||
['participants', 'categories'].forEach((attr) => {
|
||||
const value = this[attr]
|
||||
if (!(value && value instanceof Array)) {
|
||||
return;
|
||||
}
|
||||
value.forEach((item) => {
|
||||
item.accountId = this.accountId
|
||||
})
|
||||
})
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
/**
|
||||
* Public: Returns true if the thread has a {Category} with the given
|
||||
* name. Note, only catgories of type `Category.Types.Standard` have valid
|
||||
* `names`
|
||||
* - `id` A {String} {Category} name
|
||||
*/
|
||||
categoryNamed(name) {
|
||||
return _.findWhere(this.categories, {name})
|
||||
}
|
||||
|
||||
sortedCategories() {
|
||||
if (!this.categories) {
|
||||
return []
|
||||
}
|
||||
let out = []
|
||||
const isImportant = (l) => l.name === 'important'
|
||||
const isStandardCategory = (l) => l.isStandardCategory()
|
||||
const isUnhiddenStandardLabel = (l) => (
|
||||
!isImportant(l) &&
|
||||
isStandardCategory(l) &&
|
||||
!(l.isHiddenCategory())
|
||||
)
|
||||
|
||||
const importantLabel = _.find(this.categories, isImportant)
|
||||
if (importantLabel) {
|
||||
out = out.concat(importantLabel)
|
||||
}
|
||||
|
||||
const standardLabels = _.filter(this.categories, isUnhiddenStandardLabel)
|
||||
if (standardLabels.length > 0) {
|
||||
out = out.concat(standardLabels)
|
||||
}
|
||||
|
||||
const userLabels = _.filter(this.categories, (l) => (
|
||||
!isImportant(l) && !isStandardCategory(l)
|
||||
))
|
||||
if (userLabels.length > 0) {
|
||||
out = out.concat(_.sortBy(userLabels, 'displayName'))
|
||||
}
|
||||
return out
|
||||
}
|
||||
}
|
||||
|
||||
Object.defineProperty(Thread.attributes, "labels", {
|
||||
enumerable: false,
|
||||
get: () => Thread.attributes.categories,
|
||||
})
|
||||
|
||||
Object.defineProperty(Thread.attributes, "folders", {
|
||||
enumerable: false,
|
||||
get: () => Thread.attributes.categories,
|
||||
})
|
||||
|
||||
export default Thread
|
|
@ -22,6 +22,12 @@ Utils =
|
|||
window.requestAnimationFrame(attempt)
|
||||
attempt()
|
||||
|
||||
extractTextFromHtml: (html, {maxLength} = {}) ->
|
||||
if (html ? "").trim().length is 0 then return ""
|
||||
if maxLength and html.length > maxLength
|
||||
html = html.slice(0, maxLength)
|
||||
(new DOMParser()).parseFromString(html, "text/html").body.innerText
|
||||
|
||||
registeredObjectReviver: (k,v) ->
|
||||
type = v?.__constructorName
|
||||
return v unless type
|
||||
|
|
|
@ -51,6 +51,11 @@ class DatabaseSetupQueryBuilder
|
|||
|
||||
if klass.additionalSQLiteConfig?.setup?
|
||||
queries = queries.concat(klass.additionalSQLiteConfig.setup())
|
||||
queries
|
||||
|
||||
if klass.searchable is true
|
||||
DatabaseStore = require './database-store'
|
||||
queries.push(DatabaseStore.createSearchIndexSql(klass))
|
||||
|
||||
return queries
|
||||
|
||||
module.exports = DatabaseSetupQueryBuilder
|
||||
|
|
|
@ -267,6 +267,7 @@ class DatabaseStore extends NylasStore
|
|||
str = results.map((row) -> row.detail).join('\n') + " for " + query
|
||||
return if str.indexOf("SCAN") is -1
|
||||
return if str.indexOf('Thread-Counts') > 0
|
||||
return if str.indexOf('ThreadSearch') > 0
|
||||
@_prettyConsoleLog(str)
|
||||
|
||||
# Important: once the user begins a transaction, queries need to run
|
||||
|
@ -510,6 +511,77 @@ class DatabaseStore extends NylasStore
|
|||
|
||||
return @_triggerPromise
|
||||
|
||||
createSearchIndexSql: (klass) =>
|
||||
throw new Error("DatabaseStore::createSearchIndex - You must provide a class") unless klass
|
||||
throw new Error("DatabaseStore::createSearchIndex - #{klass.name} must expose an array of `searchFields`") unless klass
|
||||
searchTableName = "#{klass.name}Search"
|
||||
searchFields = klass.searchFields
|
||||
return (
|
||||
"CREATE VIRTUAL TABLE IF NOT EXISTS `#{searchTableName}` " +
|
||||
"USING fts5(
|
||||
tokenize='porter unicode61',
|
||||
content_id UNINDEXED,
|
||||
#{searchFields.join(', ')}
|
||||
)"
|
||||
)
|
||||
|
||||
createSearchIndex: (klass) =>
|
||||
sql = @createSearchIndexSql(klass)
|
||||
@_query(sql)
|
||||
|
||||
searchIndexSize: (klass) =>
|
||||
searchTableName = "#{klass.name}Search"
|
||||
sql = "SELECT COUNT(content_id) as count from `#{searchTableName}`"
|
||||
return @_query(sql).then((result) => result[0].count)
|
||||
|
||||
dropSearchIndex: (klass) =>
|
||||
throw new Error("DatabaseStore::createSearchIndex - You must provide a class") unless klass
|
||||
searchTableName = "#{klass.name}Search"
|
||||
sql = "DROP TABLE IF EXISTS `#{searchTableName}`"
|
||||
@_query(sql)
|
||||
|
||||
indexModel: (model, indexData) =>
|
||||
searchTableName = "#{model.constructor.name}Search"
|
||||
indexFields = Object.keys(indexData)
|
||||
keysSql = 'content_id, ' + indexFields.join(", ")
|
||||
valsSql = '?, ' + indexFields.map(=> '?').join(", ")
|
||||
values = [model.id].concat(indexFields.map((k) => indexData[k]))
|
||||
sql = (
|
||||
"INSERT INTO `#{searchTableName}`(#{keysSql}) VALUES (#{valsSql})"
|
||||
)
|
||||
return @_query(sql, values)
|
||||
|
||||
updateModelIndex: (model, indexData) =>
|
||||
searchTableName = "#{model.constructor.name}Search"
|
||||
exists = (
|
||||
"SELECT rowid FROM `#{searchTableName}` WHERE `#{searchTableName}`.`content_id` = ?"
|
||||
)
|
||||
return @_query(exists, [model.id])
|
||||
.then((results) =>
|
||||
isIndexed = results.length > 0
|
||||
if (not isIndexed)
|
||||
return @indexModel(model, indexData)
|
||||
|
||||
indexFields = Object.keys(indexData)
|
||||
values = indexFields.map((key) => indexData[key]).concat([model.id])
|
||||
setSql = (
|
||||
indexFields
|
||||
.map((key) => "`#{key}` = ?")
|
||||
.join(', ')
|
||||
)
|
||||
sql = (
|
||||
"UPDATE `#{searchTableName}` SET #{setSql} WHERE `#{searchTableName}`.`content_id` = ?"
|
||||
)
|
||||
return @_query(sql, values)
|
||||
)
|
||||
|
||||
unindexModel: (model) =>
|
||||
searchTableName = "#{model.constructor.name}Search"
|
||||
sql = (
|
||||
"DELETE FROM `#{searchTableName}` WHERE `#{searchTableName}`.`content_id` = ?"
|
||||
)
|
||||
return @_query(sql, [model.id])
|
||||
|
||||
|
||||
module.exports = new DatabaseStore()
|
||||
module.exports.ChangeRecord = DatabaseChangeRecord
|
||||
|
|
|
@ -38,6 +38,14 @@ class NylasSyncStatusStore extends NylasStore
|
|||
return false if not @isSyncCompleteForAccount(acctId)
|
||||
return true
|
||||
|
||||
whenSyncComplete: =>
|
||||
return Promise.resolve() if @isSyncComplete()
|
||||
return new Promise (resolve) =>
|
||||
unsubscribe = @listen =>
|
||||
if @isSyncComplete()
|
||||
unsubscribe()
|
||||
resolve()
|
||||
|
||||
busy: =>
|
||||
for accountId, states of @_statesByAccount
|
||||
for key, state of states
|
||||
|
|
|
@ -6,7 +6,7 @@ AccountStore = require './flux/stores/account-store'
|
|||
CategoryStore = require './flux/stores/category-store'
|
||||
DatabaseStore = require './flux/stores/database-store'
|
||||
OutboxStore = require './flux/stores/outbox-store'
|
||||
SearchSubscription = require './search-subscription'
|
||||
SearchQuerySubscription = require './search-query-subscription'
|
||||
ThreadCountsStore = require './flux/stores/thread-counts-store'
|
||||
MutableQuerySubscription = require './flux/models/mutable-query-subscription'
|
||||
Thread = require './flux/models/thread'
|
||||
|
@ -170,7 +170,7 @@ class SearchMailboxPerspective extends MailboxPerspective
|
|||
super(other) and other.searchQuery is @searchQuery
|
||||
|
||||
threads: =>
|
||||
new SearchSubscription(@searchQuery, @accountIds)
|
||||
new SearchQuerySubscription(@searchQuery, @accountIds)
|
||||
|
||||
canReceiveThreadsFromAccountIds: =>
|
||||
false
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
_ = require 'underscore'
|
||||
Rx = require 'rx-lite'
|
||||
|
||||
NylasAPI = require './flux/nylas-api'
|
||||
AccountStore = require './flux/stores/account-store'
|
||||
DatabaseStore = require './flux/stores/database-store'
|
||||
|
@ -8,29 +7,41 @@ Thread = require './flux/models/thread'
|
|||
Actions = require './flux/actions'
|
||||
MutableQuerySubscription = require './flux/models/mutable-query-subscription'
|
||||
|
||||
class SearchSubscription extends MutableQuerySubscription
|
||||
|
||||
constructor: (@_terms, @_accountIds) ->
|
||||
class SearchQuerySubscription extends MutableQuerySubscription
|
||||
|
||||
constructor: (@_searchQuery, @_accountIds) ->
|
||||
super(null, {asResultSet: true})
|
||||
@_searchQueryVersion = 0
|
||||
_.defer => @performSearch()
|
||||
|
||||
@_termsVersion = 0
|
||||
_.defer => @retrievePage(0)
|
||||
searchQuery: =>
|
||||
@_searchQuery
|
||||
|
||||
terms: =>
|
||||
@_terms
|
||||
|
||||
setTerms: (terms) =>
|
||||
@_terms = terms
|
||||
@_termsVersion += 1
|
||||
@retrievePage(0)
|
||||
setSearchQuery: (searchQuery) =>
|
||||
@_searchQuery = searchQuery
|
||||
@_searchQueryVersion += 1
|
||||
@performSearch()
|
||||
|
||||
replaceRange: (range) =>
|
||||
# TODO
|
||||
|
||||
# Accessing Data
|
||||
performSearch: =>
|
||||
@performLocalSearch()
|
||||
@performRemoteSearch()
|
||||
|
||||
retrievePage: (idx) =>
|
||||
termsVersion = @_termsVersion += 1
|
||||
performLocalSearch: =>
|
||||
dbQuery = DatabaseStore.findAll(Thread)
|
||||
if @_accountIds.length is 1
|
||||
dbQuery = dbQuery.where(accountId: @_accountIds[0])
|
||||
dbQuery = dbQuery.search(@_searchQuery).limit(20)
|
||||
dbQuery.then((results) =>
|
||||
if results.length > 0
|
||||
@replaceQuery(dbQuery)
|
||||
)
|
||||
|
||||
performRemoteSearch: (idx) =>
|
||||
searchQueryVersion = @_searchQueryVersion += 1
|
||||
resultCount = 0
|
||||
resultIds = []
|
||||
|
||||
|
@ -38,8 +49,11 @@ class SearchSubscription extends MutableQuerySubscription
|
|||
# Don't emit a "result" until we have at least one thread to display.
|
||||
# Otherwise it will show "No Results Found"
|
||||
if resultIds.length > 0 or resultCount is @_accountIds.length
|
||||
query = DatabaseStore.findAll(Thread).where(id: resultIds).order(Thread.attributes.lastMessageReceivedTimestamp.descending())
|
||||
@replaceQuery(query)
|
||||
if @_set?.ids().length > 0
|
||||
currentResultIds = @_set.ids()
|
||||
resultIds = _.uniq(currentResultIds.concat(resultIds))
|
||||
dbQuery = DatabaseStore.findAll(Thread).where(id: resultIds).order(Thread.attributes.lastMessageReceivedTimestamp.descending())
|
||||
@replaceQuery(dbQuery)
|
||||
|
||||
@_accountsFailed = []
|
||||
@_updateSearchError()
|
||||
|
@ -47,14 +61,14 @@ class SearchSubscription extends MutableQuerySubscription
|
|||
@_accountIds.forEach (aid) =>
|
||||
NylasAPI.makeRequest
|
||||
method: 'GET'
|
||||
path: "/threads/search?q=#{encodeURIComponent(@_terms)}"
|
||||
path: "/threads/search?q=#{encodeURIComponent(@_searchQuery)}"
|
||||
accountId: aid
|
||||
json: true
|
||||
timeout: 45000
|
||||
returnsModel: true
|
||||
|
||||
.then (threads) =>
|
||||
return unless @_termsVersion is termsVersion
|
||||
return unless @_searchQueryVersion is searchQueryVersion
|
||||
resultCount += 1
|
||||
resultIds = resultIds.concat _.pluck(threads, 'id')
|
||||
resultReturned()
|
||||
|
@ -88,4 +102,4 @@ class SearchSubscription extends MutableQuerySubscription
|
|||
id: 'search-error:dismiss'
|
||||
}]
|
||||
|
||||
module.exports = SearchSubscription
|
||||
module.exports = SearchQuerySubscription
|
Loading…
Reference in a new issue