Merging in new observables for thread list

commit 7a67c1fd349c575a91b162024cc03050e86574c9
Author: Ben Gotow <bengotow@gmail.com>
Date:   Fri Jan 8 11:14:07 2016 -0800

    WIP

commit 891f23487827a447ec95406ef26f1473a0c07de6
Author: Ben Gotow <bengotow@gmail.com>
Date:   Wed Jan 6 15:25:09 2016 -0800

    WIP

commit 3c323cd4beb2df2fae2439a556d3129404d942cc
Author: Ben Gotow <bengotow@gmail.com>
Date:   Mon Jan 4 17:46:11 2016 -0800

    WIP

commit ec7090ea9e1969fea2ea583f80a9a2ac41e6c8b0
Author: Ben Gotow <bengotow@gmail.com>
Date:   Mon Jan 4 17:22:07 2016 -0800

    Remove unused LRUCache

commit e10c3919559d3c364cb7bb94d19094a2444c10f3
Author: Ben Gotow <bengotow@gmail.com>
Date:   Mon Jan 4 16:21:37 2016 -0800

    rm(database-view): Performance refactor of thread-list

    Summary:
    This diff removes the old DatabaseView class, which included lots of gross optimizations that have since been duplicated in QuerySubscription and makes the thread list use the QuerySubscription class.

    This diff also substantially replaces the QuerySubscription class. The new implementation actually makes more queries but is less gross and more straightforward. It leverages a couple findings from database profiling:

    - Because of the sqlite page cache, asking for ids you've previously asked for is very fast.
        + Don't bother sorting in memory to avoid a query, just ask for ids again and fill in any missing objects.
    - Loading and inflating models is 4x+ slower than just grabbing ids

    I've also added more convenience classes around database queries:
    - QueryRange: Represents {offset, limit}, and can do boolean intersections
    - QueryResultSet: Better than passing an array of 50 items when you really mean items 150-200. Also tries hard to be immutable.

    This diff doesn't fully remove the concept of a "ModelView" because it's used /everywhere/ in the multiselect list source. There's a small shim that we can remove when we refactor that code. Ideally, I think we should rename ModelView to "MultiselectListDataSource" to follow iOS conventions (eg UITableViewDataSource). RIP 80char lines?

    Test Plan: They've gone to hell. WIP.

    Reviewers: evan, juan

    Differential Revision: https://phab.nylas.com/D2408

commit 32607eee8aafb7fa98b866347bdd2c0b963a602c
Author: Ben Gotow <bengotow@gmail.com>
Date:   Mon Jan 4 09:56:34 2016 -0800

    WIP

commit 5ab5fe74e94db6904bd77d224720ad9fc69fe6a7
Author: Ben Gotow <bengotow@gmail.com>
Date:   Wed Dec 30 22:56:46 2015 -0800

    redo scrollbars to not require counts

commit 361bb192d072dc8a69fd3ef143cad7bed214ebdc
Author: Ben Gotow <bengotow@gmail.com>
Date:   Wed Dec 30 17:50:57 2015 -0800

    wip

commit 079394de1cc3344fb6568efe00a52d7fc97fbd27
Author: Ben Gotow <bengotow@gmail.com>
Date:   Wed Dec 30 13:49:11 2015 -0800

    wip

commit 65142be03c27c653fe1147fdde6c2f9b046ade22
Author: Ben Gotow <bengotow@gmail.com>
Date:   Wed Dec 30 01:23:20 2015 -0800

    wip

commit 5d412ec276be1104175ad0f43c9d54e1cea857bf
Author: Ben Gotow <bengotow@gmail.com>
Date:   Tue Dec 29 22:49:58 2015 -0800

    Refactor start

commit d2b6eea884fcd2bd81ebe3985f2b2636a510c493
Author: Ben Gotow <bengotow@gmail.com>
Date:   Tue Dec 29 18:51:53 2015 -0800

    RIP DatabaseView
This commit is contained in:
Ben Gotow 2016-01-08 14:31:33 -08:00
parent 34f68dcc0d
commit d3f62d4bb0
22 changed files with 700 additions and 1310 deletions

View file

@ -1,54 +1,41 @@
NylasStore = require 'nylas-store'
Reflux = require 'reflux'
Rx = require 'rx-lite'
_ = require 'underscore'
{Message,
Actions,
DatabaseStore,
AccountStore,
FocusedContentStore,
DestroyDraftTask,
DatabaseView} = require 'nylas-exports'
MutableQuerySubscription,
QueryResultSetView,
DatabaseStore} = require 'nylas-exports'
class DraftListStore extends NylasStore
constructor: ->
@listenTo DatabaseStore, @_onDataChanged
@listenTo AccountStore, @_onAccountChanged
# It's important to listen to sendDraftSuccess because the
# _onDataChanged method will ignore our newly created draft because it
# has its draft bit set to false (since it's now a message)!
@listenTo Actions.sendDraftSuccess, => @_view.invalidate()
@_createView()
@subscription = new MutableQuerySubscription(@_queryForCurrentAccount(), {asResultSet: true})
$resultSet = Rx.Observable.fromPrivateQuerySubscription('draft-list', @subscription)
@_view = new QueryResultSetView $resultSet, ({start, end}) =>
@subscription.replaceQuery(@_queryForCurrentAccount().page(start, end))
view: =>
@_view
_createView: =>
_queryForCurrentAccount: =>
matchers = [Message.attributes.draft.equal(true)]
account = FocusedMailViewStore.mailView()?.account
if @unlisten
@unlisten()
@_view = null
matchers = [
Message.attributes.draft.equal(true)
]
if account?
matchers.push(Message.attributes.accountId.equal(account.id))
@_view = new DatabaseView Message,
matchers: matchers,
includes: [Message.attributes.body]
orders: [Message.attributes.date.descending()]
@unlisten = @_view.listen => @trigger({})
query = DatabaseStore.findAll(Message)
.include(Message.attributes.body)
.order(Message.attributes.date.descending())
.where(matchers)
.page(0, 1)
_onAccountChanged: =>
@_createView()
_onDataChanged: (change) =>
return unless change.objectClass is Message.name
return unless @_view
@_view.invalidate({change: change, shallow: true})
@subscription.replaceQuery(@_queryForCurrentAccount())
module.exports = new DraftListStore()

View file

@ -103,15 +103,12 @@ class EmptyState extends React.Component
render: ->
ContentComponent = ContentGeneric
messageOverride = null
if @props.dataView instanceof DatabaseView
if @props.dataView.klass is Message
messageOverride = "No messages to display."
if @state.layoutMode is 'list'
ContentComponent = ContentQuotes
if @state.syncing
messageOverride = "Please wait while we prepare your mailbox."
messageOverride = "Nothing to display."
if @state.layoutMode is 'list'
ContentComponent = ContentQuotes
if @state.syncing
messageOverride = "Please wait while we prepare your mailbox."
classes = classNames
'empty-state': true

View file

@ -0,0 +1,46 @@
_ = require 'underscore'
Rx = require 'rx-lite'
{NylasAPI,
Thread,
MutableQuerySubscription,
DatabaseStore} = require 'nylas-exports'
class PaginatingSearch
constructor: (@_terms, @_accountId) ->
@_version = 0
@subscription = new MutableQuerySubscription(null, {asResultSet: true})
_.defer => @retrievePage(0)
observable: =>
Rx.Observable.fromPrivateQuerySubscription('search-results', @subscription)
terms: =>
@_terms
setTerms: (terms) =>
@_terms = terms
@_version += 1
@retrievePage(0)
setRange: (range) =>
@retrievePage(Math.floor(range.start / 100))
# Accessing Data
retrievePage: (idx) =>
version = @_version += 1
NylasAPI.makeRequest
method: 'GET'
path: "/threads/search?q=#{encodeURIComponent(@_terms)}"
accountId: @_accountId
json: true
returnsModel: true
.then (threads) =>
return unless @_version is version
query = DatabaseStore.findAll(Thread).where(id: _.pluck(threads, 'id'))
@subscription.replaceQuery(query)
module.exports = PaginatingSearch

View file

@ -4,21 +4,20 @@ NylasStore = require 'nylas-store'
{Thread,
Message,
Actions,
SearchView,
DatabaseView,
DatabaseStore,
WorkspaceStore,
FocusedContentStore,
TaskQueueStatusStore,
FocusedMailViewStore} = require 'nylas-exports'
ThreadListViewFactory = require './thread-list-view-factory'
# Public: A mutable text container with undo/redo support and the ability
# to annotate logical regions in the text.
class ThreadListStore extends NylasStore
constructor: ->
@_resetInstanceVars()
@listenTo DatabaseStore, @_onDataChanged
@listenTo FocusedMailViewStore, @_onMailViewChanged
@createView()
@ -41,13 +40,17 @@ class ThreadListStore extends NylasStore
view: ->
@_view
createView: ->
mailViewFilter = FocusedMailViewStore.mailView()
return unless mailViewFilter
@setView(ThreadListViewFactory.viewForMailView(mailViewFilter))
Actions.setFocus(collection: 'thread', item: null)
setView: (view) ->
@_viewUnlisten() if @_viewUnlisten
@_view = view
@_viewUnlisten = view.listen ->
@trigger(@)
,@
@_viewUnlisten = view.listen(@_onViewDataChanged, @)
# Set up a one-time listener to focus an item in the new view
if WorkspaceStore.layoutMode() is 'split'
@ -58,33 +61,6 @@ class ThreadListStore extends NylasStore
@trigger(@)
createView: ->
mailViewFilter = FocusedMailViewStore.mailView()
return unless mailViewFilter
{account} = mailViewFilter
if mailViewFilter.searchQuery
@setView(new SearchView(mailViewFilter.searchQuery, account?.accountId))
else
matchers = []
matchers.push Thread.attributes.accountId.equal(account.accountId) if account?
matchers = matchers.concat(mailViewFilter.matchers())
view = new DatabaseView Thread, {matchers}, (ids) =>
DatabaseStore.findAll(Message)
.where(Message.attributes.threadId.in(ids))
.where(Message.attributes.accountId.equal(account.accountId))
.then (messages) ->
messagesByThread = {}
for id in ids
messagesByThread[id] = []
for message in messages
messagesByThread[message.threadId].push message
messagesByThread
@setView(view)
Actions.setFocus(collection: 'thread', item: null)
_onSelectRead: =>
items = @_view.itemsCurrentlyInViewMatching (item) -> not item.unread
@_view.selection.set(items)
@ -106,40 +82,30 @@ class ThreadListStore extends NylasStore
_onMailViewChanged: ->
@createView()
_onDataChanged: (change) ->
return unless @_view
if change.objectClass is Thread.name
_onViewDataChanged: ({previous, next} = {}) =>
if previous and next
focusedId = FocusedContentStore.focusedId('thread')
keyboardId = FocusedContentStore.keyboardCursorId('thread')
viewModeAutofocuses = WorkspaceStore.layoutMode() is 'split' or WorkspaceStore.topSheet().root is true
focusedIndex = @_view.indexOfId(focusedId)
keyboardIndex = @_view.indexOfId(keyboardId)
focusedIndex = previous.offsetOfId(focusedId)
keyboardIndex = previous.offsetOfId(keyboardId)
shiftIndex = (i) =>
if i > 0 and (@_view.get(i - 1)?.unread or i >= @_view.count())
if i > 0 and (next.modelAtOffset(i - 1)?.unread or i >= next.count())
return i - 1
else
return i
@_view.invalidate({change: change, shallow: true})
focusedLost = focusedIndex >= 0 and @_view.indexOfId(focusedId) is -1
keyboardLost = keyboardIndex >= 0 and @_view.indexOfId(keyboardId) is -1
focusedLost = focusedIndex >= 0 and next.offsetOfId(focusedId) is -1
keyboardLost = keyboardIndex >= 0 and next.offsetOfId(keyboardId) is -1
if viewModeAutofocuses and focusedLost
Actions.setFocus(collection: 'thread', item: @_view.get(shiftIndex(focusedIndex)))
Actions.setFocus(collection: 'thread', item: next.modelAtOffset(shiftIndex(focusedIndex)))
if keyboardLost
Actions.setCursorPosition(collection: 'thread', item: @_view.get(shiftIndex(keyboardIndex)))
if change.objectClass is Message.name
# Important: Until we optimize this so that it detects the set change
# and avoids a query, this should be defered since it's very unimportant
_.defer =>
threadIds = _.uniq _.map change.objects, (m) -> m.threadId
@_view.invalidateMetadataFor(threadIds)
Actions.setCursorPosition(collection: 'thread', item: next.modelAtOffset(shiftIndex(keyboardIndex)))
@trigger(@)
module.exports = new ThreadListStore()

View file

@ -0,0 +1,97 @@
Rx = require 'rx-lite'
{Thread,
Message,
AccountStore,
DatabaseStore,
QuerySubscription,
QueryResultSet,
QueryResultSetView,
MutableQuerySubscription} = require 'nylas-exports'
PaginatingSearch = require './paginating-search'
_flatMapJoiningMessages = ($threadsResultSet) =>
# DatabaseView leverages `QuerySubscription` for threads /and/ for the
# messages on each thread, which are passed to out as `thread.metadata`.
$messagesResultSets = {}
# 2. when we receive a set of threads, we check to see if we have message
# observables for each thread. If threads have been added to the result set,
# we make a single database query and load /all/ the message metadata for
# the new threads at once. (This is a performance optimization -it's about
# ~80msec faster than making 100 queries for 100 new thread ids separately.)
Rx.Observable.zip([
$threadsResultSet,
$threadsResultSet.flatMapLatest (threadsResultSet) =>
missingIds = threadsResultSet.ids().filter (id) -> not $messagesResultSets[id]
return Rx.Observable.from([[]]) if missingIds.length is 0
Rx.Observable.fromPromise(DatabaseStore.findAll(Message, threadId: missingIds))
])
# 3. when that finishes, we group the loaded messsages by threadId and create
# the missing observables. Creating a query subscription would normally load
# an initial result set. To avoid that, we just hand new subscriptions the
# results we loaded in #2.
.flatMapLatest ([threadsResultSet, messagesForNewThreads]) =>
messagesGrouped = {}
for message in messagesForNewThreads
messagesGrouped[message.threadId] ?= []
messagesGrouped[message.threadId].push(message)
oldSets = $messagesResultSets
$messagesResultSets = {}
sets = threadsResultSet.ids().map (id) =>
$messagesResultSets[id] = oldSets[id] || _observableForThreadMessages(id, messagesGrouped[id])
$messagesResultSets[id]
sets.unshift(Rx.Observable.from([threadsResultSet]))
# 4. We use `combineLatest` to merge the message observables into a single
# stream (like Promise.all). When /any/ of them emit a new result set, we
# trigger.
Rx.Observable.combineLatest(sets)
.flatMapLatest ([threadsResultSet, messagesResultSets...]) =>
threadsWithMetadata = {}
threadsResultSet.models().map (thread, idx) ->
thread = new thread.constructor(thread)
thread.metadata = messagesResultSets[idx]?.models()
threadsWithMetadata[thread.id] = thread
Rx.Observable.from([QueryResultSet.setByApplyingModels(threadsResultSet, threadsWithMetadata)])
_observableForThreadMessages = (id, initialModels) ->
subscription = new QuerySubscription(DatabaseStore.findAll(Message, threadId: id), {
asResultSet: true,
initialModels: initialModels
})
Rx.Observable.fromPrivateQuerySubscription('message-'+id, subscription)
module.exports = ThreadListViewFactory =
viewForMailView: (mailViewFilter) =>
if mailViewFilter.searchQuery
ThreadListViewFactory.viewForSearch(mailViewFilter.searchQuery, mailViewFilter.account?.id)
else
matchers = []
if mailViewFilter.account
matchers.push Thread.attributes.accountId.equal(mailViewFilter.account.id)
matchers = matchers.concat(mailViewFilter.matchers())
query = DatabaseStore.findAll(Thread).where(matchers).limit(0)
ThreadListViewFactory.viewForQuery(query)
viewForSearch: (terms, accountId) =>
search = new PaginatingSearch(terms, accountId)
$resultSet = _flatMapJoiningMessages(search.observable())
return new QueryResultSetView $resultSet, ({start, end}) =>
search.setRange({start, end})
viewForQuery: (query) =>
subscription = new MutableQuerySubscription(query, {asResultSet: true})
$resultSet = Rx.Observable.fromPrivateQuerySubscription('thread-list', subscription)
$resultSet = _flatMapJoiningMessages($resultSet)
return new QueryResultSetView $resultSet, ({start, end}) =>
subscription.replaceQuery(query.clone().page(start, end))

View file

@ -1,402 +0,0 @@
_ = require 'underscore'
EventEmitter = require('events').EventEmitter
proxyquire = require 'proxyquire'
Label = require '../src/flux/models/label'
Thread = require '../src/flux/models/thread'
Message = require '../src/flux/models/message'
DatabaseStore = require '../src/flux/stores/database-store'
DatabaseView = proxyquire '../src/flux/stores/database-view',
DatabaseStore: DatabaseStore
describe "DatabaseView", ->
beforeEach ->
@queries = []
spyOn(DatabaseStore, 'run').andCallFake (query) =>
new Promise (resolve, reject) =>
query.resolve = resolve
@queries.push(query)
describe "constructor", ->
it "should require a model class", ->
expect(( -> new DatabaseView())).toThrow()
expect(( -> new DatabaseView(Thread))).not.toThrow()
view = new DatabaseView(Thread)
expect(view.klass).toBe(Thread)
it "should optionally populate matchers and includes", ->
config =
matchers: [Message.attributes.accountId.equal('asd')]
includes: [Message.attributes.body]
view = new DatabaseView(Message, config)
expect(view._matchers).toEqual(config.matchers)
expect(view._includes).toEqual(config.includes)
it "should optionally populate ordering", ->
config =
orders: [Message.attributes.date.descending()]
view = new DatabaseView(Message, config)
expect(view._orders).toEqual(config.orders)
it "should optionally accept a metadata provider", ->
provider = ->
view = new DatabaseView(Message, {}, provider)
expect(view._metadataProvider).toEqual(provider)
it "should initialize the row count to -1", ->
view = new DatabaseView(Message)
expect(view.count()).toBe(-1)
it "should immediately start fetching a row count", ->
config =
matchers: [Message.attributes.accountId.equal('asd')]
view = new DatabaseView(Message, config)
# Count query
expect(@queries[0]._count).toEqual(true)
expect(@queries[0]._matchers).toEqual(config.matchers)
describe "instance methods", ->
beforeEach ->
config =
matchers: [Message.attributes.accountId.equal('asd')]
@view = new DatabaseView(Message, config)
@view._pages =
0:
items: [new Thread(id: 'a'), new Thread(id: 'b'), new Thread(id: 'c')]
metadata: {'a': 'a-metadata', 'b': 'b-metadata', 'c': 'c-metadata'}
loaded: true
1:
items: [new Thread(id: 'd'), new Thread(id: 'e'), new Thread(id: 'f')]
metadata: {'d': 'd-metadata', 'e': 'e-metadata', 'f': 'f-metadata'}
loaded: true
@view._count = 1
spyOn(@view, 'invalidateRetainedRange').andCallFake ->
describe "setMetadataProvider", ->
it "should empty the page cache and re-fetch all pages", ->
@view.setMetadataProvider( -> false)
expect(@view._pages).toEqual({})
expect(@view.invalidateRetainedRange).toHaveBeenCalled()
describe "setMatchers", ->
it "should reset the row count", ->
@view.setMatchers([])
expect(@view._count).toEqual(-1)
it "should empty the page cache and re-fetch all pages", ->
@view.setMatchers([])
expect(@view._pages).toEqual({})
expect(@view.invalidateRetainedRange).toHaveBeenCalled()
describe "setIncludes", ->
it "should empty the page cache and re-fetch all pages", ->
@view.setIncludes([])
expect(@view._pages).toEqual({})
expect(@view.invalidateRetainedRange).toHaveBeenCalled()
describe "invalidate", ->
it "should clear the metadata cache for each page and re-fetch", ->
@view.invalidate({shallow: false})
expect(@view.invalidateRetainedRange).toHaveBeenCalled()
expect(@view._pages[0].metadata).toEqual({})
describe "when the shallow option is provided", ->
it "should refetch items in each page, but not flush the item metadata cache", ->
beforeMetadata = @view._pages[0].metadata
@view.invalidate({shallow: true})
expect(@view.invalidateRetainedRange).toHaveBeenCalled()
expect(@view._pages[0].metadata).toEqual(beforeMetadata)
describe "when the shallow option is provided with specific changed items", ->
it "should determine whether changes to these items make page(s) invalid", ->
spyOn(@view, 'invalidateAfterDatabaseChange').andCallFake ->
@view.invalidate({shallow: true, change: {objects: ['a'], type: 'persist'}})
expect(@view.invalidateAfterDatabaseChange).toHaveBeenCalled()
describe "invalidateMetadataFor", ->
it "should clear cached metadata for just the items whose ids are provided", ->
expect(@view._pages[0].metadata).toEqual({'a': 'a-metadata', 'b': 'b-metadata', 'c': 'c-metadata'})
expect(@view._pages[1].metadata).toEqual({'d': 'd-metadata', 'e': 'e-metadata', 'f': 'f-metadata'})
@view.invalidateMetadataFor(['b', 'e'])
expect(@view._pages[0].metadata['b']).toBe(undefined)
expect(@view._pages[1].metadata['e']).toBe(undefined)
it "should re-retrieve page metadata for only impacted pages", ->
spyOn(@view, 'retrievePageMetadata')
@view.invalidateMetadataFor(['e'])
expect(@view.retrievePageMetadata).toHaveBeenCalled()
expect(@view.retrievePageMetadata.calls[0].args[0]).toEqual('1')
describe "invalidateAfterDatabaseChange with serverIds", ->
beforeEach ->
@inbox = new Label(id: 'l-1', name: 'inbox', displayName: 'Inbox')
@a = new Thread(clientId: 'client-a', serverId: null, subject: 'a', labels:[@inbox], lastMessageReceivedTimestamp: new Date(1428526885604))
@view = new DatabaseView Thread,
matchers: [Thread.attributes.labels.contains('l-1')]
@view._pages =
"0":
items: [@a]
metadata: {'a': 'a-metadata'}
loaded: true
spyOn(@view, 'invalidateRetainedRange')
it "should replace items even when their serverId changes", ->
a = new Thread(@a)
a.serverId = "server-a"
@view.invalidateAfterDatabaseChange({objects:[a], type: 'persist'})
expect(@view.invalidateRetainedRange).not.toHaveBeenCalled()
describe "invalidateAfterDatabaseChange", ->
beforeEach ->
@inbox = new Label(id: 'l-1', name: 'inbox', displayName: 'Inbox')
@archive = new Label(id: 'l-2', name: 'archive', displayName: 'archive')
@a = new Thread(id: 'a', subject: 'a', labels:[@inbox], lastMessageReceivedTimestamp: new Date(1428526885604))
@b = new Thread(id: 'b', subject: 'b', labels:[@inbox], lastMessageReceivedTimestamp: new Date(1428526885604))
@c = new Thread(id: 'c', subject: 'c', labels:[@inbox], lastMessageReceivedTimestamp: new Date(1428526885604))
@d = new Thread(id: 'd', subject: 'd', labels:[@inbox], lastMessageReceivedTimestamp: new Date(1428526885604))
@e = new Thread(id: 'e', subject: 'e', labels:[@inbox], lastMessageReceivedTimestamp: new Date(1428526885604))
@f = new Thread(id: 'f', subject: 'f', labels:[@inbox], lastMessageReceivedTimestamp: new Date(1428526885604))
@view = new DatabaseView Thread,
matchers: [Thread.attributes.labels.contains('l-1')]
@view._pages =
"0":
items: [@a, @b, @c]
metadata: {'a': 'a-metadata', 'b': 'b-metadata', 'c': 'c-metadata'}
loaded: true
"1":
items: [@d, @e, @f]
metadata: {'d': 'd-metadata', 'e': 'e-metadata', 'f': 'f-metadata'}
loaded: true
spyOn(@view, 'invalidateRetainedRange')
it "should invalidate the entire range if more than 5 items are provided", ->
@view.invalidateAfterDatabaseChange({objects:[@a, @b, @c, @d, @e, @f], type:'persist'})
expect(@view.invalidateRetainedRange).toHaveBeenCalled()
it "should invalidate the entire range if a provided item is in the set but no longer matches the set", ->
a = new Thread(@a)
a.labels = [@archive]
@view.invalidateAfterDatabaseChange({objects:[a], type:'persist'})
expect(@view.invalidateRetainedRange).toHaveBeenCalled()
it "should invalidate the entire range if a provided item is not in the set but matches the set", ->
incoming = new Thread(id: 'a', subject: 'a', labels:[@inbox], lastMessageReceivedTimestamp: new Date())
@view.invalidateAfterDatabaseChange({objects:[incoming], type:'persist'})
expect(@view.invalidateRetainedRange).toHaveBeenCalled()
it "should invalidate the entire range if a provided item matches the set and the value of it's sorting attribute has changed", ->
a = new Thread(@a)
a.lastMessageReceivedTimestamp = new Date(1428526909533)
@view.invalidateAfterDatabaseChange({objects:[a], type:'persist'})
expect(@view.invalidateRetainedRange).toHaveBeenCalled()
it "should not do anything if no provided items are in the set or belong in the set", ->
archived = new Thread(id: 'zz', labels: [@archive])
@view.invalidateAfterDatabaseChange({objects:[archived], type: 'persist'})
expect(@view.invalidateRetainedRange).not.toHaveBeenCalled()
it "should replace items in place otherwise", ->
a = new Thread(@a)
a.subject = 'Subject changed, nothing to see here!'
@view.invalidateAfterDatabaseChange({objects:[a], type: 'persist'})
expect(@view.invalidateRetainedRange).not.toHaveBeenCalled()
a = new Thread(@a)
a.labels = [@inbox, @archive] # not realistic, but doesn't change membership in set
@view.invalidateAfterDatabaseChange({objects:[a], type: 'persist'})
expect(@view.invalidateRetainedRange).not.toHaveBeenCalled()
it "should attach the metadata field to replaced items", ->
spyOn(@view._emitter, 'emit')
subject = 'Subject changed, nothing to see here!'
runs ->
e = new Thread(@e)
e.subject = subject
@view.invalidateAfterDatabaseChange({objects:[e], type: 'persist'})
waitsFor ->
advanceClock(1)
@view._emitter.emit.callCount > 0
runs ->
expect(@view._pages[1].items[1].id).toEqual(@e.id)
expect(@view._pages[1].items[1].subject).toEqual(subject)
expect(@view._pages[1].items[1].metadata).toEqual(@view._pages[1].metadata[@e.id])
describe "when items have been removed", ->
beforeEach ->
spyOn(@view._emitter, 'emit')
@start = @view._pages[1].lastTouchTime
runs ->
b = new Thread(@b)
b.labels = []
@view.invalidateAfterDatabaseChange({objects:[b], type: 'persist'})
waitsFor ->
advanceClock(1)
@view._emitter.emit.callCount > 0
it "should optimistically remove them and shift result pages", ->
expect(@view._pages[0].items).toEqual([@a, @c, @d])
expect(@view._pages[1].items).toEqual([@e, @f])
it "should change the lastTouchTime date of changed pages so that refreshes started before the replacement do not revert it's changes", ->
expect(@view._pages[0].lastTouchTime isnt @start).toEqual(true)
expect(@view._pages[1].lastTouchTime isnt @start).toEqual(true)
describe "when items have been unpersisted but still match criteria", ->
beforeEach ->
spyOn(@view._emitter, 'emit')
@start = @view._pages[1].lastTouchTime
runs ->
@view.invalidateAfterDatabaseChange({objects:[@b], type: 'unpersist'})
waitsFor ->
advanceClock(1)
@view._emitter.emit.callCount > 0
it "should optimistically remove them and shift result pages", ->
expect(@view._pages[0].items).toEqual([@a, @c, @d])
expect(@view._pages[1].items).toEqual([@e, @f])
it "should change the lastTouchTime date of changed pages so that refreshes started before the replacement do not revert it's changes", ->
expect(@view._pages[0].lastTouchTime isnt @start).toEqual(true)
expect(@view._pages[1].lastTouchTime isnt @start).toEqual(true)
describe "cullPages", ->
beforeEach ->
@view._retainedRange = {start: 200, end: 399}
@view._pages = {}
for i in [0..9]
@view._pages[i] =
items: [new Thread(id: 'a'), new Thread(id: 'b'), new Thread(id: 'c')]
metadata: {'a': 'a-metadata', 'b': 'b-metadata', 'c': 'c-metadata'}
loaded: true
it "should not remove pages in the retained range", ->
@view.cullPages()
expect(@view._pages[2]).toBeDefined()
expect(@view._pages[3]).toBeDefined()
expect(@view._pages[4]).toBeDefined()
it "should remove pages far from the retained range", ->
@view.cullPages()
expect(@view._pages[7]).not.toBeDefined()
expect(@view._pages[8]).not.toBeDefined()
expect(@view._pages[9]).not.toBeDefined()
describe "retrievePage", ->
beforeEach ->
@config =
matchers: [Message.attributes.accountId.equal('asd')]
orders: [Message.attributes.date.descending()]
@view = new DatabaseView(Message, @config)
@queries = []
it "should initialize the page and set loading to true", ->
@view.retrievePage(0)
expect(@view._pages[0].metadata).toEqual({})
expect(@view._pages[0].items).toEqual([])
expect(@view._pages[0].loading).toEqual(true)
it "should make a database query for the correct item range", ->
@view.retrievePage(2)
expect(@queries.length).toBe(1)
expect(@queries[0]._range).toEqual({offset: @view._pageSize * 2, limit: @view._pageSize})
expect(@queries[0]._matchers).toEqual(@config.matchers)
it "should order results properly", ->
@view.retrievePage(2)
expect(@queries.length).toBe(1)
expect(@queries[0]._orders).toEqual(@config.orders)
describe "once the database request has completed", ->
beforeEach ->
@view.retrievePage(0)
@completeQuery = =>
@items = [new Thread(id: 'model-a'), new Thread(id: 'model-b'), new Thread(id: 'model-c')]
@queries[0].resolve(@items)
@queries = []
spyOn(@view, 'loaded').andCallFake -> true
spyOn(@view._emitter, 'emit')
it "should populate the page items and call trigger", ->
runs ->
@completeQuery()
waitsFor ->
advanceClock(1)
@view._emitter.emit.callCount > 0
runs ->
expect(@view._pages[0].items).toEqual(@items)
expect(@view._emitter.emit).toHaveBeenCalled()
it "should set loading to false for the page", ->
runs ->
expect(@view._pages[0].loading).toEqual(true)
@completeQuery()
waitsFor ->
advanceClock(1)
@view._emitter.emit.callCount > 0
runs ->
expect(@view._pages[0].loading).toEqual(false)
describe "if an item metadata provider is configured", ->
beforeEach ->
@view._metadataProvider = (ids) ->
results = {}
for id in ids
results[id] = "metadata-for-#{id}"
Promise.resolve(results)
it "should set .metadata of each item", ->
runs ->
@completeQuery()
waitsFor ->
advanceClock(1)
@view._emitter.emit.callCount > 0
runs ->
expect(@view._pages[0].items[0].metadata).toEqual('metadata-for-model-a')
expect(@view._pages[0].items[1].metadata).toEqual('metadata-for-model-b')
it "should cache the metadata on the page object", ->
runs ->
@completeQuery()
waitsFor ->
advanceClock(1)
@view._emitter.emit.callCount > 0
runs ->
expect(@view._pages[0].metadata).toEqual
'model-a': 'metadata-for-model-a'
'model-b': 'metadata-for-model-b'
'model-c': 'metadata-for-model-c'
it "should always wait for metadata promises to resolve", ->
@resolves = []
@view._metadataProvider = (ids) =>
new Promise (resolve, reject) =>
results = {}
for id in ids
results[id] = "metadata-for-#{id}"
@resolves.push -> resolve(results)
runs ->
@completeQuery()
expect(@view._pages[0].items).toEqual([])
expect(@view._pages[0].metadata).toEqual({})
expect(@view._emitter.emit).not.toHaveBeenCalled()
waitsFor ->
@resolves.length > 0
runs ->
for resolve,idx in @resolves
resolve()
waitsFor ->
advanceClock(1)
@view._emitter.emit.callCount > 0
runs ->
expect(@view._pages[0].items[0].metadata).toEqual('metadata-for-model-a')
expect(@view._pages[0].items[1].metadata).toEqual('metadata-for-model-b')
expect(@view._emitter.emit).toHaveBeenCalled()

View file

@ -11,7 +11,7 @@ describe "QuerySubscriptionPool", ->
describe "add", ->
it "should add a new subscription with the callback", ->
callback = jasmine.createSpy('callback')
QuerySubscriptionPool.add(@query, {}, callback)
QuerySubscriptionPool.add(@query, callback)
expect(QuerySubscriptionPool._subscriptions[@queryKey]).toBeDefined()
subscription = QuerySubscriptionPool._subscriptions[@queryKey]
@ -19,7 +19,7 @@ describe "QuerySubscriptionPool", ->
it "should yield database changes to the subscription", ->
callback = jasmine.createSpy('callback')
QuerySubscriptionPool.add(@query, {}, callback)
QuerySubscriptionPool.add(@query, callback)
subscription = QuerySubscriptionPool._subscriptions[@queryKey]
spyOn(subscription, 'applyChangeRecord')
@ -29,12 +29,12 @@ describe "QuerySubscriptionPool", ->
describe "unsubscribe", ->
it "should return an unsubscribe method", ->
expect(QuerySubscriptionPool.add(@query, {}, -> ) instanceof Function).toBe(true)
expect(QuerySubscriptionPool.add(@query, -> ) instanceof Function).toBe(true)
it "should remove the callback from the subscription", ->
cb = ->
unsub = QuerySubscriptionPool.add(@query, {}, cb)
unsub = QuerySubscriptionPool.add(@query, cb)
subscription = QuerySubscriptionPool._subscriptions[@queryKey]
expect(subscription.hasCallback(cb)).toBe(true)
@ -42,7 +42,7 @@ describe "QuerySubscriptionPool", ->
expect(subscription.hasCallback(cb)).toBe(false)
it "should wait before removing th subscription to make sure it's not reused", ->
unsub = QuerySubscriptionPool.add(@query, {}, -> )
unsub = QuerySubscriptionPool.add(@query, -> )
expect(QuerySubscriptionPool._subscriptions[@queryKey]).toBeDefined()
unsub()
expect(QuerySubscriptionPool._subscriptions[@queryKey]).toBeDefined()

View file

@ -120,7 +120,7 @@ class ListTabular extends React.Component
# Expand the start/end so that you can advance the keyboard cursor fast and
# we have items to move to and then scroll to.
rangeStart = Math.max(0, rangeStart - 2)
rangeEnd = Math.min(rangeEnd + 2, @props.dataView.count())
rangeEnd = Math.min(rangeEnd + 2, @props.dataView.count() + 1)
# Final sanity check to prevent needless work
return if rangeStart is @state.renderedRangeStart and
@ -140,7 +140,12 @@ class ListTabular extends React.Component
innerStyles =
height: @props.dataView.count() * @props.itemHeight
<ScrollRegion ref="container" onScroll={@onScroll} tabIndex="-1" className="list-container list-tabular" scrollTooltipComponent={@props.scrollTooltipComponent} >
<ScrollRegion
ref="container"
onScroll={@onScroll}
tabIndex="-1"
className="list-container list-tabular"
scrollTooltipComponent={@props.scrollTooltipComponent}>
<div className="list-rows" style={innerStyles}>
{@_rows()}
</div>

View file

@ -47,8 +47,8 @@ class Scrollbar extends React.Component
@props.getScrollRegion()._recomputeDimensions(options)
@_recomputeDimensions(options)
_recomputeDimensions: ({avoidForcingLayout}) =>
if not avoidForcingLayout
_recomputeDimensions: ({useCachedValues}) =>
if not useCachedValues
trackNode = React.findDOMNode(@refs.track)
return unless trackNode
trackHeight = trackNode.clientHeight
@ -152,11 +152,25 @@ class ScrollRegion extends React.Component
@_mounted = true
@recomputeDimensions()
@_heightObserver = new MutationObserver (mutations) =>
recompute = false
mutations.forEach (mutation) ->
recompute ||= !mutation.oldValue or mutation.oldValue.indexOf('height:') isnt -1
@recomputeDimensions({useCachedValues: false}) if recompute
@_heightObserver.observe(React.findDOMNode(@refs.content), {
subtree: true,
attributes: true,
attributeOldValue: true,
attributeFilter: ['style']
})
componentWillReceiveProps: (props) =>
if @shouldInvalidateScrollbarComponent(props)
@_scrollbarComponent = null
componentWillUnmount: =>
@_heightObserver.disconnect()
@_mounted = false
shouldComponentUpdate: (newProps, newState) =>
@ -281,7 +295,7 @@ class ScrollRegion extends React.Component
scrollbar._recomputeDimensions(options)
@_recomputeDimensions(options)
_recomputeDimensions: ({avoidForcingLayout}) =>
_recomputeDimensions: ({useCachedValues}) =>
return unless @refs.content
contentNode = React.findDOMNode(@refs.content)
return unless contentNode
@ -292,7 +306,7 @@ class ScrollRegion extends React.Component
# force the browser to immediately flush any DOM changes and compute the
# height of the node. This hurts performance and also kind of unnecessary,
# since it's unlikely these values will change while scrolling.
if avoidForcingLayout
if useCachedValues
totalHeight = @state.totalHeight ? contentNode.scrollHeight
trackHeight = @state.trackHeight ? contentNode.scrollHeight
viewportHeight = @state.viewportHeight ? contentNode.clientHeight
@ -317,11 +331,11 @@ class ScrollRegion extends React.Component
# See Preferences > Signatures > textarea
return unless event.target is React.findDOMNode(@refs.content)
if not @state.scrolling
if @state.scrolling
@recomputeDimensions({useCachedValues: true})
else
@recomputeDimensions()
@_setSharedState(scrolling: true)
else
@recomputeDimensions({avoidForcingLayout: true})
@props.onScroll?(event)

View file

@ -0,0 +1,56 @@
_ = require 'underscore'
QueryRange = require './query-range'
QueryResultSet = require './query-result-set'
# TODO: Make mutator methods QueryResultSet.join(), QueryResultSet.clip...
class MutableQueryResultSet extends QueryResultSet
immutableClone: ->
set = new QueryResultSet({
_ids: [].concat(@_ids)
_modelsHash: _.extend({}, @_modelsHash)
_offset: @_offset
})
Object.freeze(set)
Object.freeze(set._ids)
Object.freeze(set._modelsHash)
set
clipToRange: (range) ->
return if range.isInfinite()
if range.offset > @_offset
@_ids = @_ids.slice(range.offset - @_offset)
@_offset = range.offset
if range.limit < @_ids.length
@_ids.length = Math.max(0, range.limit)
old = @_modelsHash
@_modelsHash = {}
@_modelsHash[id] = old[id] for id in @ids()
addModelsInRange: (rangeModels, range) ->
@addIdsInRange(_.pluck(rangeModels, 'id'), range)
@_modelsHash[m.id] = m for m in rangeModels
addIdsInRange: (rangeIds, range) ->
if @_offset is null or range.isInfinite()
@_ids = rangeIds
@_offset = range.offset
else
if range.end < @_offset - 1
throw new Error("You can only add adjacent values (#{range.end} < #{@_offset - 1})")
if range.offset > @_offset + @_ids.length
throw new Error("You can only add adjacent values (#{range.offset} > #{@_offset + @_ids.length})")
@_ids = [].concat(@_ids.slice(0, Math.max(range.offset - @_offset, 0)), rangeIds, @_ids.slice(Math.max(range.end - @_offset, 0)))
@_offset = Math.min(@_offset, range.offset)
replaceModel: (item) ->
@_modelsHash[item.id] = item
removeModelAtOffset: (item, offset) ->
idx = offset - @_offset
delete @_modelsHash[item.id]
@_ids.splice(idx, 1)
module.exports = MutableQueryResultSet

View file

@ -0,0 +1,17 @@
QuerySubscription = require './query-subscription'
class MutableQuerySubscription extends QuerySubscription
constructor: ->
super
replaceQuery: (nextQuery) ->
return if @_query?.sql() is nextQuery.sql()
rangeIsOnlyChange = @_query?.clone().offset(0).limit(0).sql() is nextQuery.clone().offset(0).limit(0).sql()
nextQuery.finalize()
@_query = nextQuery
@_set = null unless @_set and rangeIsOnlyChange
@update()
module.exports = MutableQuerySubscription

View file

@ -0,0 +1,58 @@
class QueryRange
@infinite: ->
return new QueryRange({limit: null, offset: null})
@rangeWithUnion: (a, b) ->
return QueryRange.infinite() if a.isInfinite() or b.isInfinite()
if not a.intersects(b)
throw new Error('You cannot union ranges which do not overlap.')
new QueryRange
start: Math.min(a.start, b.start)
end: Math.max(a.end, b.end)
@rangesBySubtracting: (a, b) ->
return [] unless b
if a.isInfinite() or b.isInfinite()
throw new Error("You cannot subtract infinite ranges.")
uncovered = []
if b.start > a.start
uncovered.push new QueryRange({start: a.start, end: Math.min(a.end, b.start)})
if b.end < a.end
uncovered.push new QueryRange({start: Math.max(a.start, b.end), end: a.end})
uncovered
Object.defineProperty @prototype, "start",
enumerable: false
get: -> @offset
Object.defineProperty @prototype, "end",
enumerable: false
get: -> @offset + @limit
constructor: ({@limit, @offset, start, end} = {}) ->
@offset ?= start if start?
@limit ?= end - @offset if end?
throw new Error("You must specify a limit") if @limit is undefined
throw new Error("You must specify an offset") if @offset is undefined
clone: ->
return new QueryRange({@limit, @offset})
isInfinite: ->
return @limit is null and @offset is null
isEqual: (b) ->
return @start is b.start and @end is b.end
intersects: (b) ->
return true if @isInfinite() or b.isInfinite()
return @start <= b.start <= @end or @start <= b.end <= @end
toString: ->
"QueryRange{#{@start} - #{@end}}"
module.exports = QueryRange

View file

@ -0,0 +1,79 @@
_ = require 'underscore'
QueryRange = require './query-range'
###
Public: Instances of QueryResultSet hold a set of models retrieved
from the database at a given offset.
Complete vs Incomplete:
QueryResultSet keeps an array of item ids and a lookup table of models.
The lookup table may be incomplete if the QuerySubscription isn't finished
preparing results. You can use `isComplete` to determine whether the set
has every model.
Offset vs Index:
To avoid confusion, "index" refers to an item's position in an
array, and "offset" refers to it's position in the query result set. For example,
an item might be at index 20 in the _ids array, but at offset 120 in the result.
###
class QueryResultSet
@setByApplyingModels: (set, models) ->
if models instanceof Array
throw new Error("setByApplyingModels: A hash of models is required.")
set = set.clone()
set._modelsHash = models
set
constructor: (other = {}) ->
@_modelsHash = other._modelsHash ? {}
@_offset = other._offset ? null
@_ids = other._ids ? []
clone: ->
new @constructor({
_ids: [].concat(@_ids)
_modelsHash: _.extend({}, @_modelsHash)
_offset: @_offset
})
isComplete: ->
_.every @_ids, (id) => @_modelsHash[id]
range: ->
new QueryRange(offset: @_offset, limit: @_ids.length)
count: ->
@_ids.length
empty: ->
@count() is 0
ids: ->
@_ids
idAtOffset: (offset) ->
@_ids[offset - @_offset]
models: ->
@_ids.map (id) => @_modelsHash[id]
modelCacheCount: ->
Object.keys(@_modelsHash).length
modelAtOffset: (offset) ->
unless _.isNumber(offset)
throw new Error("QueryResultSet.modelAtOffset() takes a numeric index. Maybe you meant modelWithId()?")
@_modelsHash[@_ids[offset - @_offset]]
modelWithId: (id) ->
@_modelsHash[id]
offsetOfId: (id) ->
idx = @_ids.indexOf(id)
return -1 if idx is -1
return @_offset + idx
module.exports = QueryResultSet

View file

@ -10,31 +10,35 @@ merge equivalent subscriptions, etc.
class QuerySubscriptionPool
constructor: ->
@_subscriptions = {}
@_cleanupChecks = []
@_setup()
add: (query, options, callback) =>
callback._registrationPoint = @_formatRegistrationPoint((new Error).stack)
add: (query, callback) =>
if NylasEnv.inDevMode()
callback._registrationPoint = @_formatRegistrationPoint((new Error).stack)
key = @_keyForQuery(query)
subscription = @_subscriptions[key]
if not subscription
subscription = new QuerySubscription(query, options)
subscription = new QuerySubscription(query)
@_subscriptions[key] = subscription
subscription.addCallback(callback)
return =>
subscription.removeCallback(callback)
# We could be in the middle of an update that will remove and then re-add
# the exact same subscription. Keep around the cached set for one tick
# to see if that happens.
_.defer => @checkIfSubscriptionNeeded(subscription)
@_scheduleCleanupCheckForSubscription(key)
checkIfSubscriptionNeeded: (subscription) =>
return unless subscription.callbackCount() is 0
key = @_keyForQuery(subscription.query())
delete @_subscriptions[key]
addPrivateSubscription: (key, subscription, callback) =>
@_subscriptions[key] = subscription
subscription.addCallback(callback)
return =>
subscription.removeCallback(callback)
@_scheduleCleanupCheckForSubscription(key)
printSubscriptions: =>
unless NylasEnv.inDevMode()
return console.log("printSubscriptions is only available in developer mode.")
for key, subscription of @_subscriptions
console.log(key)
console.group()
@ -43,6 +47,19 @@ class QuerySubscriptionPool
console.groupEnd()
return
_scheduleCleanupCheckForSubscription: (key) =>
# We unlisten / relisten to lots of subscriptions and setTimeout is actually
# /not/ that fast. Create one timeout for all checks, not one for each.
_.defer(@_runCleanupChecks) if @_cleanupChecks.length is 0
@_cleanupChecks.push(key)
_runCleanupChecks: =>
for key in @_cleanupChecks
subscription = @_subscriptions[key]
if subscription and subscription.callbackCount() is 0
delete @_subscriptions[key]
@_cleanupChecks = []
_formatRegistrationPoint: (stack) ->
stack = stack.split('\n')
ii = 0

View file

@ -1,25 +1,30 @@
_ = require 'underscore'
DatabaseChangeRecord = require '../stores/database-change-record'
DatabaseStore = require '../stores/database-store'
QueryRange = require './query-range'
MutableQueryResultSet = require './mutable-query-result-set'
class QuerySubscription
constructor: (@_query, @_options) ->
ModelQuery = require './query'
if not @_query or not (@_query instanceof ModelQuery)
throw new Error("QuerySubscription: Must be constructed with a ModelQuery. Got #{@_query}")
if @_query._count
throw new Error("QuerySubscriptionPool::add - You cannot listen to count queries.")
@_query.finalize()
@_limit = @_query.range().limit ? Infinity
@_offset = @_query.range().offset ? 0
@_callbacks = []
constructor: (@_query, @_options = {}) ->
@_set = null
@_version = 0
@_versionFetchInProgress = false
@_lastResultSet = null
@_refetchResultSet()
@_callbacks = []
@_lastResult = null
if @_query
if @_query._count
throw new Error("QuerySubscriptionPool::add - You cannot listen to count queries.")
@_query.finalize()
if @_options.initialModels
@_set = new MutableQueryResultSet()
@_set.addModelsInRange(@_options.initialModels, new QueryRange({
limit: @_options.initialModels.length,
offset: 0
}))
@_createResultAndTrigger()
else
@update()
query: =>
@_query
@ -29,10 +34,10 @@ class QuerySubscription
throw new Error("QuerySubscription:addCallback - expects a function, received #{callback}")
@_callbacks.push(callback)
# If we already have data, send it to our new observer. Users always expect
# callbacks to be fired asynchronously, so wait a tick.
if @_lastResultSet
_.defer => @_invokeCallback(callback)
if @_lastResult
process.nextTick =>
return unless @_lastResult
callback(@_lastResult)
hasCallback: (callback) =>
@_callbacks.indexOf(callback) isnt -1
@ -46,167 +51,45 @@ class QuerySubscription
@_callbacks.length
applyChangeRecord: (record) =>
return unless record.objectClass is @_query.objectClass()
return unless @_query and record.objectClass is @_query.objectClass()
return unless record.objects.length > 0
return @_invalidatePendingResultSet() unless @_lastResultSet
@_lastResultSet = [].concat(@_lastResultSet)
return @update() if not @_set
impactCount = 0
mustRefetchAllIds = false
if record.type is 'unpersist'
status = @_optimisticallyRemoveModels(record.objects)
for item in record.objects
offset = @_set.offsetOfId(item.id)
if offset isnt -1
@_set.removeModelAtOffset(item, offset)
impactCount += 1
else if record.type is 'persist'
status = @_optimisticallyUpdateModels(record.objects)
else
throw new Error("QuerySubscription: Unknown change record type: #{record.type}")
for item in record.objects
offset = @_set.offsetOfId(item.id)
itemIsInSet = offset isnt -1
itemShouldBeInSet = item.matches(@_query.matchers())
if status.setModified
@_invokeCallbacks()
if status.setFetchRequired
@_refetchResultSet()
if itemIsInSet and not itemShouldBeInSet
@_set.removeModelAtOffset(item, offset)
impactCount += 1
_refetchResultSet: =>
@_version += 1
else if itemShouldBeInSet and not itemIsInSet
@_set.replaceModel(item)
mustRefetchAllIds = true
impactCount += 1
return if @_versionFetchInProgress
@_versionFetchInProgress = true
fetchVersion = @_version
else if itemIsInSet
oldItem = @_set.modelWithId(item.id)
@_set.replaceModel(item)
impactCount += 1
mustRefetchAllIds = true if @_itemSortOrderHasChanged(oldItem, item)
DatabaseStore = require '../stores/database-store'
DatabaseStore.run(@_query, {format: false}).then (result) =>
@_versionFetchInProgress = false
if @_version is fetchVersion
@_lastResultSet = result
@_invokeCallbacks()
else
@_refetchResultSet()
_invalidatePendingResultSet: =>
@_version += 1
_resortResultSet: =>
sortDescriptors = @_query.orderSortDescriptors()
@_lastResultSet.sort (a, b) ->
for descriptor in sortDescriptors
if descriptor.direction is 'ASC'
direction = 1
else if descriptor.direction is 'DESC'
direction = -1
else
throw new Error("QuerySubscription: Unknown sort order: #{descriptor.direction}")
aValue = a[descriptor.attr.modelKey]
bValue = b[descriptor.attr.modelKey]
return -1 * direction if aValue < bValue
return 1 * direction if aValue > bValue
return 0
_optimisticallyRemoveModels: (items) =>
status =
setModified: false
setFetchRequired: false
lastLength = @_lastResultSet.length
for item in items
idx = _.findIndex @_lastResultSet, ({id}) -> id is item.id
if idx isnt -1
@_lastResultSet.splice(idx, 1)
status.setModified = true
# Removing items is an issue if we previosly had LIMIT items. This
# means there are likely more items to display in the place of the one
# we're removing and we need to re-fetch
if lastLength is @_limit
status.setFetchRequired = true
status
_optimisticallyUpdateModels: (items) =>
status =
setModified: false
setFetchRequired: false
sortNecessary = false
# Pull attributes of the query
sortDescriptors = @_query.orderSortDescriptors()
oldSetInfo =
length: @_lastResultSet.length
startItem: @_lastResultSet[0]
endItem: @_lastResultSet[@_limit - 1]
for item in items
# TODO
# This logic is duplicated across DatabaseView#invalidate and
# ModelView#indexOf
#
# This duplication should go away when we refactor/replace DatabaseView
# for using observables
idx = _.findIndex @_lastResultSet, ({id, clientId}) ->
id is item.id or item.clientId is clientId
itemIsInSet = idx isnt -1
itemShouldBeInSet = item.matches(@_query.matchers())
if itemIsInSet and not itemShouldBeInSet
# remove the item
@_lastResultSet.splice(idx, 1)
status.setModified = true
else if itemShouldBeInSet and not itemIsInSet
# insert the item, re-sort if a sort order is defined
if sortDescriptors.length > 0
sortNecessary = true
@_lastResultSet.push(item)
status.setModified = true
else if itemIsInSet
# update the item in the set, re-sort if a sort attribute's value has changed
if @_itemSortOrderHasChanged(@_lastResultSet[idx], item)
sortNecessary = true
@_lastResultSet[idx] = item
status.setModified = true
if sortNecessary
@_resortResultSet()
if sortNecessary and @_itemOnEdgeHasChanged(oldSetInfo)
status.setFetchRequired = true
# If items have been added, truncate the result set to the requested length
if @_lastResultSet.length > @_limit
@_lastResultSet.length = @_limit
hadMaxItems = oldSetInfo.length is @_limit
hasLostItems = @_lastResultSet.length < oldSetInfo.length
if hadMaxItems and hasLostItems
# Ex: We asked for 20 items and had 20 items. Now we have 19 items.
# We need to pull a nw item to fill slot #20.
status.setFetchRequired = true
status
_itemOnEdgeHasChanged: (oldSetInfo) ->
hasPrecedingItems = @_offset > 0
hasChangedStartItem = oldSetInfo.startItem isnt @_lastResultSet[0]
if hasPrecedingItems and hasChangedStartItem
# We've changed the identity of the item at index zero. We have no way
# of knowing if it would still sort at this position, or if another item
# from earlier in the range should be at index zero.
# Full re-fetch is necessary.
return true
hasTrailingItems = @_lastResultSet.length is @_limit
hasChangedEndItem = oldSetInfo.endItem isnt @_lastResultSet[@_limit - 1]
if hasTrailingItems and hasChangedEndItem
# We've changed he last item in the set, and the set is at it's LIMIT length.
# We have no way of knowing if the item should still be at this position
# since we can't see the next item.
# Full re-fetch is necessary.
return true
if impactCount > 0
@_set = null if mustRefetchAllIds
@update()
_itemSortOrderHasChanged: (old, updated) ->
for descriptor in @_query.orderSortDescriptors()
@ -219,15 +102,70 @@ class QuerySubscription
return false
_invokeCallbacks: =>
set = [].concat(@_lastResultSet)
resultForSet = @_query.formatResultObjects(set)
@_callbacks.forEach (callback) =>
callback(resultForSet)
update: =>
version = @_version += 1
desiredRange = @_query.range()
currentRange = @_set?.range()
if currentRange and not currentRange.isInfinite() and not desiredRange.isInfinite()
ranges = QueryRange.rangesBySubtracting(desiredRange, currentRange)
entireModels = true
else
ranges = [desiredRange]
entireModels = not @_set or @_set.modelCacheCount() is 0
Promise.each ranges, (range) =>
return unless version is @_version
@_fetchRange(range, {entireModels})
.then =>
return unless version is @_version
ids = @_set.ids().filter (id) => not @_set.modelWithId(id)
return if ids.length is 0
return DatabaseStore.findAll(@_query._klass, {id: ids}).then(@_appendToModelCache)
.then =>
return unless version is @_version
@_createResultAndTrigger()
_fetchRange: (range, {entireModels} = {}) =>
rangeQuery = undefined
unless range.isInfinite()
rangeQuery ?= @_query.clone()
rangeQuery.offset(range.offset).limit(range.limit)
unless entireModels
rangeQuery ?= @_query.clone()
rangeQuery.idsOnly()
rangeQuery ?= @_query
DatabaseStore.run(rangeQuery, {format: false}).then (results) =>
@_set = null unless @_set?.range().intersects(range)
@_set ?= new MutableQueryResultSet()
if entireModels
@_set.addModelsInRange(results, range)
else
@_set.addIdsInRange(results, range)
@_set.clipToRange(@_query.range())
_createResultAndTrigger: =>
unless @_set.isComplete()
console.warn("QuerySubscription: tried to publish a result set missing models.")
return
unless _.uniq(@_set.ids()).length is @_set.count()
throw new Error("")
if @_options.asResultSet
@_lastResult = @_set.immutableClone()
else
@_lastResult = @_query.formatResultObjects(@_set.models())
@_callbacks.forEach (callback) =>
callback(@_lastResult)
_invokeCallback: (callback) =>
set = [].concat(@_lastResultSet)
resultForSet = @_query.formatResultObjects(set)
callback(resultForSet)
module.exports = QuerySubscription

View file

@ -1,4 +1,5 @@
{Matcher, AttributeJoinedData} = require '../attributes'
QueryRange = require './query-range'
Utils = require './utils'
_ = require 'underscore'
@ -44,12 +45,23 @@ class ModelQuery
@_database || = require '../stores/database-store'
@_matchers = []
@_orders = []
@_range = {}
@_range = QueryRange.infinite()
@_returnOne = false
@_returnIds = false
@_includeJoinedData = []
@_count = false
@
clone: ->
q = new ModelQuery(@_klass, @_database).where(@_matchers).order(@_orders)
q._orders = [].concat(@_orders)
q._includeJoinedData = [].concat(@_includeJoinedData)
q._range = @_range.clone()
q._returnOne = @_returnOne
q._returnIds = @_returnIds
q._count = @_count
q
# Public: Add one or more where clauses to the query
#
# - `matchers` An {Array} of {Matcher} objects that add where clauses to the underlying query.
@ -132,6 +144,7 @@ class ModelQuery
limit: (limit) ->
@_assertNotFinalized()
throw new Error("Cannot use limit > 2 with one()") if @_returnOne and limit > 1
@_range = @_range.clone()
@_range.limit = limit
@
@ -143,9 +156,20 @@ class ModelQuery
#
offset: (offset) ->
@_assertNotFinalized()
@_range = @_range.clone()
@_range.offset = offset
@
# Public:
#
# A convenience method for setting both limit and offset given a desired page size.
#
page: (start, end, pageSize = 50, pagePadding = 100) ->
roundToPage = (n) -> Math.max(0, Math.floor(n / pageSize) * pageSize)
@offset(roundToPage(start - pagePadding))
@limit(roundToPage((end - start) + pagePadding * 2))
@
# Public: Set the `count` flag - instead of returning inflated models,
# the query will return the result `COUNT`.
#
@ -156,6 +180,10 @@ class ModelQuery
@_count = true
@
idsOnly: ->
@_returnIds = true
@
###
Query Execution
###
@ -179,6 +207,8 @@ class ModelQuery
if @_count
return result[0]['count'] / 1
else if @_returnIds
return result.map (row) -> row['id']
else
try
objects = result.map (row) =>
@ -206,6 +236,8 @@ class ModelQuery
if @_count
result = "COUNT(*) as count"
else if @_returnIds
result = "`#{@_klass.name}`.`id`"
else
result = "`#{@_klass.name}`.`data`"
@_includeJoinedData.forEach (attr) =>

View file

@ -1,425 +0,0 @@
_ = require 'underscore'
Utils = require '../models/utils'
DatabaseStore = require './database-store'
ModelView = require './model-view'
EventEmitter = require('events').EventEmitter
verbose = false
# A small helper class that prevents the DatabaseView from making too many
# queries. It tracks the number of jobs in flight via `increment` and allows
# a callback to run "when there are fewer then N ongoing queries".
# Sort of like _.throttle, but with a work threshold rather than a time threshold.
class TaskThrottler
constructor: (@_maxConcurrent) ->
@_inflight = 0
@_whenReady = null
whenReady: (fn) ->
if @_inflight < @_maxConcurrent
fn()
else
@_whenReady = fn
increment: ->
decremented = false
@_inflight += 1
# Returns a function that can be called once and only once to
# decrement the counter.
return =>
if not decremented
@_inflight -= 1
if @_whenReady and @_inflight < @_maxConcurrent
@_whenReady()
@_whenReady = null
decremented = true
# Public: DatabaseView abstracts away the process of paginating a query
# and loading ranges of data. It's very smart about deciding when
# results need to be refreshed. There are a few core concepts that
# make it flexible:
#
# - `matchers`: The where clauses that should be applied to queries.
# - `includes`: The include clauses that should be applied to queries.
#
# metadataProvider: For each item loaded, you can provide a promise
# that resolves with additional data for that item. The DatabaseView
# will not consider the page of data "loaded" until all the metadata
# promises have resolved. (Used for message metadata on threads)
#
# retainedRange: The retained range ({start, end}) represents the
# objects currently being displayed. React components displaying the
# view can alter the retained range as the user scrolls.
#
# Note: Do not make the retainedRange larger than you need. The
# DatabaseView may internally keep a larger set of items loaded
# for performance.
#
# Section: Database
#
class DatabaseView extends ModelView
constructor: (@klass, config = {}, @_metadataProvider) ->
super
@_pageSize = 100
@_throttler = new TaskThrottler(2)
@_matchers = config.matchers ? []
@_includes = config.includes ? []
@_orders = config.orders ? []
@_count = -1
@invalidateCount()
@invalidateRetainedRange()
@
log: ->
return unless verbose and not NylasEnv.inSpecMode()
if _.isString(arguments[0])
arguments[0] = "DatabaseView (#{@klass.name}): "+arguments[0]
console.log(arguments...)
metadataProvider: ->
@_metadataProvider
setMetadataProvider: (fn) ->
@_metadataProvider = fn
@_pages = {}
@invalidate()
matchers: ->
@_matchers
setMatchers: (matchers) ->
@_matchers = matchers
@_pages = {}
@_count = -1
@invalidate()
includes: ->
@_includes
setIncludes: (includes) ->
@_includes = includes
@_pages = {}
@invalidate()
orders: ->
@_orders
setOrders: (orders) ->
@_orders = orders
@_pages = {}
@invalidate()
# Accessing Data
count: ->
@_count
padRetainedRange: ({start, end}) ->
{start: start - @_pageSize / 2, end: end + @_pageSize / 2}
# Public: Call this method when the DatabaseStore triggers and will impact the
# data maintained by this DatabaseView. In the future, the DatabaseView will
# probably observe the DatabaseView directly.
#
# - `options` an Object with the following optional keys which can be used to
# optimize the behavior of the DatabaseView:
# - `change`: The change object provided by the DatabaseStore, with `items` and a `type`.
# - `shallow`: True if this change will not invalidate item metadata, only items.
#
# TODO: In order for the DatabaseView to monitor the DatabaseStore directly,
# it needs to have some way of detatching it's listener when it's no longer needed!
# Need a destructor...
#
invalidate: ({shallow, change} = {}) ->
if shallow and change
@invalidateAfterDatabaseChange(change)
else if shallow
@invalidateCount()
@invalidateRetainedRange()
else
@log('Invalidating entire range and all metadata.')
for idx, page of @_pages
page.metadata = {}
@invalidateCount()
@invalidateRetainedRange()
invalidateAfterDatabaseChange: (change) ->
items = change.objects
if items.length is 0
return
@selection.updateModelReferences(items)
@selection.removeItemsNotMatching(@_matchers)
if change.type is 'unpersist'
@selection.remove(item) for item in items
if items.length > 5
@log("invalidateAfterDatabaseChange on #{items.length} items would be expensive. Invalidating entire range.")
@invalidateCount()
@invalidateRetainedRange()
return
pagesCouldHaveChanged = false
didMakeOptimisticChange = false
sortAttribute = items[0].constructor.naturalSortOrder()?.attribute()
indexes = []
touchTime = Date.now()
spliceItem = (idx) =>
page = Math.floor(idx / @_pageSize)
pageIdx = idx - page * @_pageSize
# Remove the item in question from the page
@_pages[page]?.items.splice(pageIdx, 1)
# Update the page's `lastTouchTime`. This causes pending refreshes
# of page data to be cancelled. This is important because these refreshes
# would actually roll back this optimistic change.
@_pages[page]?.lastTouchTime = touchTime
# Iterate through the remaining pages. Take the first
# item from the next page, remove it, and put it at the
# end of our page (to fill the space left by splice above.)
while @_pages[page + 1] and @_pages[page + 1].items
item = @_pages[page + 1].items[0]
break unless item
@_pages[page + 1].items.splice(0, 1)
@_pages[page + 1].lastTouchTime = touchTime
@_pages[page].items.push(item)
page += 1
didMakeOptimisticChange = true
for item in items
# It's important that we check against an item's clientId to
# determine if it's in the set. Some item persistModel mutations
# change the serverId (but leave the clientId intact). If keyed off
# of the `id` then we would erroneously say that the item isn't in
# the set. This happens frequently in the DraftListStore when Draft
# items persist on the server and/or turn into Message items.
#
# TODO
# This logic is duplicated across QuerySubscription#_optimisticallyUpdateModels
# and ModelView#indexOf
#
# This duplication should go away when we refactor/replace DatabaseView
# for using observables
idx = @indexOfId(item.clientId)
itemIsInSet = idx isnt -1
itemShouldBeInSet = item.matches(@_matchers) and change.type isnt 'unpersist'
indexes.push(idx)
# The item matches our set but isn't in our items array
if not itemIsInSet and itemShouldBeInSet
@log("Item matches criteria but not found in cached set. Invalidating entire range.")
pagesCouldHaveChanged = true
# The item does not match our set, but is in our items array
else if itemIsInSet and not itemShouldBeInSet
@log("Item does not match criteria but is in cached set. Invalidating entire range.")
pagesCouldHaveChanged = true
# Remove the item and fire immediately. This means the user will see the item
# disappear, and then after the new page comes in the content could change, but
# they get immediate feedback.
spliceItem(idx)
# The value of the item's sort attribute has changed, and we don't
# know if it will be in the same position in a new page.
else if itemIsInSet and sortAttribute
existing = @get(idx)
existingSortValue = existing[sortAttribute.modelKey]
itemSortValue = item[sortAttribute.modelKey]
# http://stackoverflow.com/questions/4587060/determining-date-equality-in-javascript
if not (existingSortValue >= itemSortValue && existingSortValue <= itemSortValue)
@log("Item sort value has changed (#{itemSortValue} != #{existingSortValue}). Invalidating entire range.")
pagesCouldHaveChanged = true
if didMakeOptimisticChange
@trigger()
if pagesCouldHaveChanged
@invalidateCount()
@invalidateRetainedRange()
else
# No items have changed membership in our set. Just go through and
# replace all the old items with the new versions, and avoid all
# database queries. (!)
#
# NOTE: This code assumes sort order of items in the set never changes.
# May need to perform sort or extend above code to check particular sort
# fields for any changes.
#
@log("Items being swapped in place, page membership has not changed.", items)
for item, ii in items
idx = indexes[ii]
continue if idx is -1
page = Math.floor(idx / @_pageSize)
pageIdx = idx - page * @_pageSize
# Always copy the item so that a deep equals is never necessary
item = new @klass(item)
item.metadata = @_pages[page]?.metadata[item.id]
@_pages[page]?.items[pageIdx] = item
@trigger()
invalidateMetadataFor: (ids = []) ->
# This method should be called when you know that only the metadata for
# a given set of items has been dirtied. For example, when we have a view
# of Threads and their Messages change.
# This method only performs the metadata loading step and only re-fetches
# metadata for the items whose ids are provided.
for idx, page of @_pages
dirtied = false
if page.metadata
for id in ids
if page.metadata[id]
delete page.metadata[id]
dirtied = true
if dirtied
if ids.length < 5
@log("Invalidated metadata for items with ids: #{JSON.stringify(ids)}")
else
@log("Invalidated metadata for #{ids.length} items")
@retrievePageMetadata(idx, page.items)
invalidateCount: ->
DatabaseStore.findAll(@klass).where(@_matchers).count().then (count) =>
@_count = count
@trigger()
invalidateRetainedRange: ->
@_throttler.whenReady =>
for idx in @pagesRetained()
@retrievePage(idx)
retrieveDirtyInRetainedRange: ->
@_throttler.whenReady =>
for idx in @pagesRetained()
if not @_pages[idx] or @_pages[idx].lastTouchTime > @_pages[idx].lastLoadTime
@retrievePage(idx)
retrievePage: (idx) ->
page = @_pages[idx] ? {
lastTouchTime: 0
lastLoadTime: 0
metadata: {}
items: []
}
page.loading = true
@_pages[idx] = page
# Even though we won't touch the items array for another 100msec, the data
# will reflect "now" since we make the query now.
touchTime = Date.now()
query = DatabaseStore.findAll(@klass).where(@_matchers)
query.offset(idx * @_pageSize).limit(@_pageSize)
query.include(attr) for attr in @_includes
query.order(@_orders) if @_orders.length > 0
decrement = @_throttler.increment()
query.run().finally(decrement).then (items) =>
# If the page is no longer in the cache at all, it may have fallen out of the
# retained range and been cleaned up.
return unless @_pages[idx]
# The data has been changed and is now "newer" than our query result. Applying
# our version of the items would roll it back. Abort!
if page.lastTouchTime >= touchTime
@log("Version #{touchTime} fetched, but out of date (current is #{page.lastTouchTime})")
return
# Now, fetch the messages for each thread. We could do this with a
# complex join, but then we'd get thread columns repeated over and over.
# This is reasonably fast because we don't store message bodies in messages
# anymore.
@retrievePageMetadata(idx, items)
retrievePageMetadata: (idx, items) ->
page = @_pages[idx]
# Even though we won't touch the items array for another 100msec, the data
# will reflect "now" since we make the query now.
touchTime = Date.now()
# This method can only be used once the page is loaded. If no page is present,
# go ahead and retrieve it in full.
if not page
@retrievePage(idx)
return
idsMissingMetadata = []
for item in items
if not page.metadata[item.id]
idsMissingMetadata.push(item.id)
metadataPromise = Promise.resolve({})
if idsMissingMetadata.length > 0 and @_metadataProvider
metadataPromise = @_metadataProvider(idsMissingMetadata)
decrement = @_throttler.increment()
metadataPromise.finally(decrement).then (results) =>
# If we've started reloading since we made our query, don't do any more work
if page.lastTouchTime >= touchTime
@log("Metadata version #{touchTime} fetched, but out of date (current is #{page.lastTouchTime})")
return
for item, idx in items
if Object.isFrozen(item)
item = items[idx] = new @klass(item)
metadata = results[item.id] ? page.metadata[item.id]
item.metadata = page.metadata[item.id] = metadata
# Prevent anything from mutating these objects or their nested objects.
# Accidentally modifying items somewhere downstream (in a component)
# can trigger awful re-renders
Utils.modelFreeze(item)
@selection.updateModelReferences(items)
@selection.removeItemsNotMatching(@_matchers)
page.items = items
page.loading = false
page.lastLoadTime = touchTime
page.lastTouchTime = touchTime
# Trigger if this is the last page that needed to be loaded
@trigger() if @loaded()
cullPages: ->
pagesLoaded = Object.keys(@_pages)
pagesRetained = @pagesRetained()
# To avoid accumulating infinite pages in memory, cull
# any pages we've loaded that are more than 2 pages
# away from the ones currently being retained by the view.
first = +pagesRetained[0]
last = +pagesRetained[pagesRetained.length - 1]
culled = []
for idx in pagesLoaded
if +idx > last and +idx - last > 2
culled.push(idx)
else if +idx < first and first - +idx > 2
culled.push(idx)
@_pages = _.omit(@_pages, culled)
module.exports = DatabaseView

View file

@ -6,119 +6,41 @@ module.exports =
class ModelView
constructor: ->
@_pageSize = 100
@_retainedRange = {start: 0, end: 50}
@_pages = {}
@_emitter = new EventEmitter()
@selection = new ModelViewSelection(@, @trigger)
@
# Accessing Data
trigger: =>
return if @_triggering
@_triggering = true
_.defer =>
@_triggering = false
@_emitter.emit('trigger')
trigger: (arg) =>
@_emitter.emit('trigger', arg)
listen: (callback, bindContext) ->
eventHandler = (args) ->
callback.apply(bindContext, args)
eventHandler = ->
callback.apply(bindContext, arguments)
@_emitter.addListener('trigger', eventHandler)
return => @_emitter.removeListener('trigger', eventHandler)
loaded: ->
return false if @count() is -1
for idx in @pagesRetained()
if not @_pages[idx] or @_pages[idx].loading is true
return false
true
throw new Error("ModelView base class does not implement loaded()")
empty: ->
@count() <= 0
throw new Error("ModelView base class does not implement empty()")
get: (idx) ->
unless _.isNumber(idx)
throw new Error("ModelView.get() takes a numeric index. Maybe you meant getById()?")
page = Math.floor(idx / @_pageSize)
pageIdx = idx - page * @_pageSize
@_pages[page]?.items[pageIdx] ? null
getStub: ->
@_sample ?= new klass
throw new Error("ModelView base class does not implement get()")
getById: (id) ->
return null unless id
for pageIdx, page of @_pages
for item, itemIdx in page.items
return item if item.id is id
return null
throw new Error("ModelView base class does not implement getById()")
indexOfId: (id) ->
return -1 unless id
for pageIdx, page of @_pages
for item, itemIdx in page.items
if item.id is id or item.clientId is id
return pageIdx * @_pageSize + itemIdx
return -1
throw new Error("ModelView base class does not implement indexOfId()")
count: ->
throw new Error("ModelView base class does not implement count()")
pageSize: ->
@_pageSize
pagesRetained: ->
[Math.floor(@_retainedRange.start / @_pageSize)..Math.floor(@_retainedRange.end / @_pageSize)]
itemsCurrentlyInViewMatching: (matchFn) ->
matchedItems = []
for index, page of @_pages
for item in (page.items ? [])
matchedItems.push item if matchFn(item)
return matchedItems
throw new Error("ModelView base class does not implement itemsCurrentlyInViewMatching()")
setRetainedRange: ({start, end}) ->
{start, end} = @padRetainedRange({start, end})
start = Math.max(0, Math.min(@count(), start))
end = Math.max(0, Math.min(@count(), end))
return if start is @_retainedRange.start and
end is @_retainedRange.end
@_retainedRange = {start, end}
@retrieveDirtyInRetainedRange()
@cullPages()
# Optionally implement this method in subclasses to expand the retained range provided
# by a view or listener. (For example, to fetch pages before they're needed by the view)
padRetainedRange: ({start, end}) ->
{start, end}
# Optionally implement this method in subclasses to remove pages from the @_pages array
# after the retained range has changed.
cullPages: ->
false
invalidate: ({changed, shallow} = {}) ->
# "Total Refresh" - in a subclass, do something smarter
@invalidateRetainedRange()
invalidateMetadataFor: ->
# "Total Refresh" - in a subclass, do something smarter
@invalidateRetainedRange()
invalidateRetainedRange: ->
for idx in @pagesRetained()
@retrievePage(idx)
retrieveDirtyInRetainedRange: ->
for idx in @pagesRetained()
if not @_pages[idx]
@retrievePage(idx)
retrievePage: (page) ->
throw new Error("ModelView base class does not implement retrievePage()")
throw new Error("ModelView base class does not implement setRetainedRange()")

View file

@ -0,0 +1,67 @@
_ = require 'underscore'
Rx = require 'rx-lite'
DatabaseStore = require './database-store'
Message = require '../models/message'
QuerySubscriptionPool = require '../models/query-subscription-pool'
QuerySubscription = require '../models/query-subscription'
MutableQuerySubscription = require '../models/mutable-query-subscription'
ModelView = require './model-view'
###
This class takes an observable which vends QueryResultSets and adapts it so that
you can make it the data source of a MultiselectList.
When the MultiselectList is refactored to take an Observable, this class should
go away!
###
class QueryResultSetView extends ModelView
constructor: ($resultSetObservable, @_setRetainedRange) ->
super
@_countEstimate = -1
@_resultSet = null
@_resultDesiredLast = null
$resultSetObservable.subscribe (nextResultSet) =>
if nextResultSet.range().end is @_resultDesiredLast
@_countEstimate = Math.max(@_countEstimate, nextResultSet.range().end + 1)
else
@_countEstimate = nextResultSet.range().end
previousResultSet = @_resultSet
@_resultSet = nextResultSet
@trigger({previous: previousResultSet, next: nextResultSet})
setRetainedRange: ({start, end}) ->
@_resultDesiredLast = end
@_setRetainedRange({start, end})
# Retrieving Data
count: ->
@_countEstimate
loaded: ->
@_resultSet isnt null
empty: =>
not @_resultSet or @_resultSet.empty()
get: (offset) =>
return null unless @_resultSet
@_resultSet.modelAtOffset(offset)
getById: (id) ->
@_resultSet.modelWithId(id)
indexOfId: (id) ->
return -1 unless @_resultSet and id
@_resultSet.offsetOfId(id)
itemsCurrentlyInViewMatching: (matchFn) ->
return [] unless @_resultSet
@_resultSet.models().filter(matchFn)
module.exports = QueryResultSetView

View file

@ -1,88 +0,0 @@
_ = require 'underscore'
DatabaseStore = require './database-store'
Thread = require '../models/thread'
ModelView = require './model-view'
NylasAPI = require '../nylas-api'
class SearchView extends ModelView
constructor: (@_query, @_accountId) ->
super
@_queryResultTotal = -1
@_querySort = 'datetime'
_.defer => @retrievePage(0)
@
query: ->
@_query
setQuery: (query) ->
@_query = query
@invalidateRetainedRange()
setSortOrder: (sort) ->
@_querySort = sort
# Accessing Data
padRetainedRange: ({start, end}) ->
# Load the next page before the view needs it by padding the "retained range" used
# to retrieve pages.
{start: start, end: end + 100}
count: ->
@_queryResultTotal
invalidate: ({change}) ->
for key, page of @_pages
for item, idx in page.items
updated = _.find change.objects, (obj) -> obj.id is item.id
if updated
page.items[idx] = updated
@_emitter.emit('trigger')
retrievePage: (idx) ->
start = Date.now()
# For now, we never refresh a page we've already loaded. In the future, we may
# want to pull existing pages from the database ala WHERE `id` IN (ids from page)
return if @_pages[idx]
page =
items: []
loading: true
@_pages[idx] = page
NylasAPI.makeRequest
method: 'GET'
path: "/threads/search?q=#{encodeURIComponent(@_query)}"
accountId: @_accountId
json: true
returnsModel: false
.then (json) =>
objects = []
@_queryResultTotal = json.length
for resultJSON in json
obj = (new Thread).fromJSON(resultJSON)
objects.push(obj)
if objects.length > 0
DatabaseStore.inTransaction (t) ->
t.persistModels(objects)
page.items = objects
page.loading = false
@_emitter.emit('trigger')
console.log("Search view fetched #{idx} in #{Date.now() - start} msec.")
.catch (error) =>
@_queryResultTotal = 0
page.items = []
page.loading = false
@_emitter.emit('trigger')
module.exports = SearchView

View file

@ -51,11 +51,12 @@ class NylasExports
@load "EdgehillAPI", 'flux/edgehill-api'
# The Database
@load "ModelView", 'flux/stores/model-view'
@load "SearchView", 'flux/stores/search-view'
@load "DatabaseView", 'flux/stores/database-view'
@load "DatabaseStore", 'flux/stores/database-store'
@load "DatabaseTransaction", 'flux/stores/database-transaction'
@load "QueryResultSet", 'flux/models/query-result-set'
@load "QueryResultSetView", 'flux/stores/query-result-set-view'
@load "QuerySubscription", 'flux/models/query-subscription'
@load "MutableQuerySubscription", 'flux/models/mutable-query-subscription'
@load "QuerySubscriptionPool", 'flux/models/query-subscription-pool'
# Database Objects

View file

@ -90,8 +90,14 @@ Rx.Observable.fromAction = (action) =>
observer.onNext(args...)
return Rx.Disposable.create(unsubscribe)
Rx.Observable.fromQuery = (query, options) =>
Rx.Observable.fromQuery = (query) =>
return Rx.Observable.create (observer) =>
unsubscribe = QuerySubscriptionPool.add query, options, (result) =>
unsubscribe = QuerySubscriptionPool.add query, (result) =>
observer.onNext(result)
return Rx.Disposable.create(unsubscribe)
Rx.Observable.fromPrivateQuerySubscription = (name, subscription) =>
return Rx.Observable.create (observer) =>
unsubscribe = QuerySubscriptionPool.addPrivateSubscription name, subscription, (result) =>
observer.onNext(result)
return Rx.Disposable.create(unsubscribe)