feat(transactions): Explicit (and faster) database transactions

Summary:
Until now, we've been hiding transactions beneath the surface. When you call persistModel, you're implicitly creating a transaction.
You could explicitly create them with `atomically`..., but there were several critical problems that are fixed in this diff:

- Calling persistModel / unpersistModel within a transaction could cause the DatabaseStore to trigger. This could result in other parts of the app making queries /during/
  the transaction, potentially before the COMMIT occurred and saved the changes. The new, explicit inTransaction syntax holds all changes until after COMMIT and then triggers.

- Calling atomically and then calling persistModel inside that resulted in us having to check whether a transaction was present and was gross.

- Many parts of the code ran extensive logic inside a promise chained within `atomically`:

  BAD:

```
  DatabaseStore.atomically =>
   DatabaseStore.persistModel(draft) =>
     GoMakeANetworkRequestThatReturnsAPromise
```

OVERWHELMINGLY BETTER:

```
  DatabaseStore.inTransaction (t) =>
     t.persistModel(draft)
  .then =>
    GoMakeANetworkRequestThatReturnsAPromise
```

Having explicit transactions also puts us on equal footing with Sequelize and other ORMs. Note that you /have/ to call DatabaseStore.inTransaction (t) =>. There is no other way to access the methods that let you alter the database. :-)

Other changes:
- This diff removes Message.labels and the Message-Labels table. We weren't using Message-level labels anywhere, and the table could grow very large.
- This diff changes the page size during initial sync from 250 => 200 in an effort to make transactions a bit faster.

Test Plan: Run tests!

Reviewers: juan, evan

Reviewed By: juan, evan

Differential Revision: https://phab.nylas.com/D2353
This commit is contained in:
Ben Gotow 2015-12-17 11:46:05 -08:00
parent 9fee3c4cf1
commit ea76b7c442
45 changed files with 967 additions and 856 deletions

View file

@ -104,7 +104,9 @@ class MessageControls extends React.Component
accountId: AccountStore.current().id
body: @props.message.body
DatabaseStore.persistModel(draft).then =>
DatabaseStore.inTransaction (t) =>
t.persistModel(draft)
.then =>
Actions.sendDraft(draft.clientId)
dialog = remote.require('dialog')

View file

@ -32,7 +32,10 @@ FullContactStore = Reflux.createStore
contact.company = data.organizations?[0]?["name"]
contact.thirdPartyData ?= {}
contact.thirdPartyData["FullContact"] = data
DatabaseStore.persistModel(contact)
@trigger()
DatabaseStore.inTransaction (t) =>
t.persistModel(contact)
.then =>
@trigger()
module.exports = FullContactStore

View file

@ -119,9 +119,11 @@ class NylasSyncWorkerPool
_handleDeltaDeletion: (delta) =>
klass = NylasAPI._apiObjectToClassMap[delta.object]
return unless klass
DatabaseStore.find(klass, delta.id).then (model) ->
return Promise.resolve() unless model
return DatabaseStore.unpersistModel(model)
DatabaseStore.inTransaction (t) =>
t.find(klass, delta.id).then (model) ->
return Promise.resolve() unless model
return t.unpersistModel(model)
pool = new NylasSyncWorkerPool()
window.NylasSyncWorkerPool = pool

View file

@ -4,7 +4,7 @@ NylasLongConnection = require './nylas-long-connection'
ContactRankingsCache = require './contact-rankings-cache'
INITIAL_PAGE_SIZE = 30
MAX_PAGE_SIZE = 250
MAX_PAGE_SIZE = 200
# BackoffTimer is a small helper class that wraps setTimeout. It fires the function
# you provide at a regular interval, but backs off each time you call `backoff`.
@ -209,7 +209,8 @@ class NylasSyncWorker
writeState: ->
@_writeState ?= _.debounce =>
DatabaseStore.persistJSONBlob("NylasSyncWorker:#{@_account.id}", @_state)
DatabaseStore.inTransaction (t) =>
t.persistJSONBlob("NylasSyncWorker:#{@_account.id}", @_state)
,100
@_writeState()

View file

@ -24,7 +24,7 @@ class RefreshingJSONCache
reset: ->
# Clear db value, turn off any scheduled actions
DatabaseStore.persistJSONBlob(@key, {})
DatabaseStore.inTransaction (t) => t.persistJSONBlob(@key, {})
@end()
end: ->
@ -39,11 +39,12 @@ class RefreshingJSONCache
# Call fetch data function, save it to the database
@fetchData (newValue) =>
DatabaseStore.persistJSONBlob(@key, {
version: @version
time: Date.now()
value: newValue
})
DatabaseStore.inTransaction (t) =>
t.persistJSONBlob(@key, {
version: @version
time: Date.now()
value: newValue
})
fetchData: (callback) =>
throw new Error("Subclasses should override this method.")

View file

@ -1,7 +1,11 @@
_ = require 'underscore'
fs = require 'fs'
path = require 'path'
{NylasAPI, Thread, DatabaseStore, Actions} = require 'nylas-exports'
{NylasAPI,
Thread,
DatabaseStore,
DatabaseTransaction,
Actions} = require 'nylas-exports'
NylasSyncWorkerPool = require '../lib/nylas-sync-worker-pool'
fixturesPath = path.resolve(__dirname, 'fixtures')
@ -96,25 +100,26 @@ describe "NylasSyncWorkerPool", ->
"id": @thread.id,
"timestamp": "2015-08-26T17:36:45.297Z"
spyOn(DatabaseTransaction.prototype, '_query').andCallFake -> Promise.resolve([])
spyOn(DatabaseTransaction.prototype, 'unpersistModel')
it "should resolve if the object cannot be found", ->
spyOn(DatabaseStore, 'find').andCallFake (klass, id) =>
return Promise.resolve(null)
spyOn(DatabaseStore, 'unpersistModel')
waitsForPromise =>
NylasSyncWorkerPool._handleDeltaDeletion(@delta)
runs =>
expect(DatabaseStore.find).toHaveBeenCalledWith(Thread, 'idhere')
expect(DatabaseStore.unpersistModel).not.toHaveBeenCalled()
expect(DatabaseTransaction.prototype.unpersistModel).not.toHaveBeenCalled()
it "should call unpersistModel if the object exists", ->
spyOn(DatabaseStore, 'find').andCallFake (klass, id) =>
return Promise.resolve(@thread)
spyOn(DatabaseStore, 'unpersistModel')
waitsForPromise =>
NylasSyncWorkerPool._handleDeltaDeletion(@delta)
runs =>
expect(DatabaseStore.find).toHaveBeenCalledWith(Thread, 'idhere')
expect(DatabaseStore.unpersistModel).toHaveBeenCalledWith(@thread)
expect(DatabaseTransaction.prototype.unpersistModel).toHaveBeenCalledWith(@thread)
describe "handleModelResponse", ->
# SEE spec/nylas-api-spec.coffee

View file

@ -1,5 +1,5 @@
_ = require 'underscore'
{Actions, DatabaseStore, Account, Thread} = require 'nylas-exports'
{Actions, DatabaseStore, DatabaseTransaction, Account, Thread} = require 'nylas-exports'
NylasLongConnection = require '../lib/nylas-long-connection'
NylasSyncWorker = require '../lib/nylas-sync-worker'
@ -16,7 +16,7 @@ describe "NylasSyncWorker", ->
getThreads: (account, params, requestOptions) =>
@apiRequests.push({account, model:'threads', params, requestOptions})
spyOn(DatabaseStore, 'persistJSONBlob').andReturn(Promise.resolve())
spyOn(DatabaseTransaction.prototype, 'persistJSONBlob').andReturn(Promise.resolve())
spyOn(DatabaseStore, 'findJSONBlob').andCallFake (key) =>
if key is "NylasSyncWorker:#{TEST_ACCOUNT_ID}"
return Promise.resolve _.extend {}, {

View file

@ -49,8 +49,7 @@ class DeveloperBarStore extends NylasStore
triggerThrottled: ->
@_triggerThrottled ?= _.throttle(@trigger, 100)
if NylasEnv.getCurrentWindow().isVisible()
@_triggerThrottled()
@_triggerThrottled()
_setStoreDefaults: ->
@_curlHistoryIds = []

View file

@ -48,7 +48,7 @@ function makeSqlite3Command() {
// Use our local version of npm (npm 3x) to build sqlite
var npmPath = '"' + path.resolve(__dirname, '..', 'build', 'node_modules', '.bin', 'npm') + '"';
return npmPath + " install https://github.com/mapbox/node-sqlite3/archive/v3.1.1.tar.gz --ignore-scripts && cd node_modules/sqlite3 && "+nodeGypPath+" configure rebuild --target="+targetElectronVersion+" --arch="+targetArch+" --target_platform="+targetPlatform+" --dist-url=https://atom.io/download/atom-shell --module_name=node_sqlite3 --module_path=../lib/binding/node-v46-"+targetPlatform+"-"+targetArch
return npmPath + " install https://github.com/bengotow/node-sqlite3/archive/bengotow/usleep.tar.gz --ignore-scripts && cd node_modules/sqlite3 && "+nodeGypPath+" configure rebuild --target="+targetElectronVersion+" --arch="+targetArch+" --target_platform="+targetPlatform+" --dist-url=https://atom.io/download/atom-shell --module_name=node_sqlite3 --module_path=../lib/binding/node-v46-"+targetPlatform+"-"+targetArch
}
function bootstrap() {

View file

@ -4,48 +4,49 @@ Actions = require '../src/flux/actions'
NylasAPI = require '../src/flux/nylas-api'
Thread = require '../src/flux/models/thread'
DatabaseStore = require '../src/flux/stores/database-store'
DatabaseTransaction = require '../src/flux/stores/database-transaction'
describe "NylasAPI", ->
beforeEach ->
spyOn(DatabaseStore, "atomically").andCallFake (fn) -> fn()
spyOn(DatabaseStore, '_query').andCallFake => Promise.resolve([])
describe "handleModel404", ->
it "should unpersist the model from the cache that was requested", ->
model = new Thread(id: 'threadidhere')
spyOn(DatabaseStore, 'unpersistModel')
spyOn(DatabaseTransaction.prototype, 'unpersistModel')
spyOn(DatabaseStore, 'find').andCallFake (klass, id) =>
return Promise.resolve(model)
NylasAPI._handleModel404("/threads/#{model.id}")
advanceClock()
expect(DatabaseStore.find).toHaveBeenCalledWith(Thread, model.id)
expect(DatabaseStore.unpersistModel).toHaveBeenCalledWith(model)
expect(DatabaseTransaction.prototype.unpersistModel).toHaveBeenCalledWith(model)
it "should not do anything if the model is not in the cache", ->
spyOn(DatabaseStore, 'unpersistModel')
spyOn(DatabaseTransaction.prototype, 'unpersistModel')
spyOn(DatabaseStore, 'find').andCallFake (klass, id) =>
return Promise.resolve(null)
NylasAPI._handleModel404("/threads/1234")
advanceClock()
expect(DatabaseStore.find).toHaveBeenCalledWith(Thread, '1234')
expect(DatabaseStore.unpersistModel).not.toHaveBeenCalledWith()
expect(DatabaseTransaction.prototype.unpersistModel).not.toHaveBeenCalledWith()
it "should not do anything bad if it doesn't recognize the class", ->
spyOn(DatabaseStore, 'find')
spyOn(DatabaseStore, 'unpersistModel')
spyOn(DatabaseTransaction.prototype, 'unpersistModel')
waitsForPromise ->
NylasAPI._handleModel404("/asdasdasd/1234")
runs ->
expect(DatabaseStore.find).not.toHaveBeenCalled()
expect(DatabaseStore.unpersistModel).not.toHaveBeenCalled()
expect(DatabaseTransaction.prototype.unpersistModel).not.toHaveBeenCalled()
it "should not do anything bad if the endpoint only has a single segment", ->
spyOn(DatabaseStore, 'find')
spyOn(DatabaseStore, 'unpersistModel')
spyOn(DatabaseTransaction.prototype, 'unpersistModel')
waitsForPromise ->
NylasAPI._handleModel404("/account")
runs ->
expect(DatabaseStore.find).not.toHaveBeenCalled()
expect(DatabaseStore.unpersistModel).not.toHaveBeenCalled()
expect(DatabaseTransaction.prototype.unpersistModel).not.toHaveBeenCalled()
describe "handle401", ->
it "should post a notification", ->
@ -56,7 +57,7 @@ describe "NylasAPI", ->
describe "handleModelResponse", ->
beforeEach ->
spyOn(DatabaseStore, "persistModels").andCallFake (models) ->
spyOn(DatabaseTransaction.prototype, "persistModels").andCallFake (models) ->
Promise.resolve(models)
stubDB = ({models, testClass, testMatcher}) ->
@ -110,7 +111,7 @@ describe "NylasAPI", ->
waitsForPromise =>
NylasAPI._handleModelResponse(@dupes)
.then ->
models = DatabaseStore.persistModels.calls[0].args[0]
models = DatabaseTransaction.prototype.persistModels.calls[0].args[0]
expect(models.length).toBe 2
expect(models[0].id).toBe 'a'
expect(models[1].id).toBe 'b'
@ -131,7 +132,7 @@ describe "NylasAPI", ->
NylasAPI._handleModelResponse(json)
.then (models) ->
expect(models.length).toBe 1
models = DatabaseStore.persistModels.calls[0].args[0]
models = DatabaseTransaction.prototype.persistModels.calls[0].args[0]
expect(models.length).toBe 1
expect(models[0].id).toBe 'b'
@ -146,7 +147,7 @@ describe "NylasAPI", ->
stubDB models: [@existing]
verifyUpdateHappened = (responseModels) ->
changedModels = DatabaseStore.persistModels.calls[0].args[0]
changedModels = DatabaseTransaction.prototype.persistModels.calls[0].args[0]
expect(changedModels.length).toBe 2
expect(changedModels[1].id).toBe 'b'
expect(changedModels[1].starred).toBe true
@ -167,7 +168,7 @@ describe "NylasAPI", ->
NylasAPI._handleModelResponse(@json).then verifyUpdateHappened
verifyUpdateStopped = (responseModels) ->
changedModels = DatabaseStore.persistModels.calls[0].args[0]
changedModels = DatabaseTransaction.prototype.persistModels.calls[0].args[0]
expect(changedModels.length).toBe 1
expect(changedModels[0].id).toBe 'a'
expect(changedModels[0].unread).toBe true
@ -202,7 +203,7 @@ describe "NylasAPI", ->
"metadata": require('../src/flux/models/metadata')
verifyUpdateHappened = (klass, responseModels) ->
changedModels = DatabaseStore.persistModels.calls[0].args[0]
changedModels = DatabaseTransaction.prototype.persistModels.calls[0].args[0]
expect(changedModels.length).toBe 2
expect(changedModels[0].id).toBe 'a'
expect(changedModels[1].id).toBe 'b'

View file

@ -21,10 +21,12 @@ ServiceHub = require 'service-hub'
pathwatcher = require 'pathwatcher'
{clipboard} = require 'electron'
Account = require "../src/flux/models/account"
AccountStore = require "../src/flux/stores/account-store"
Contact = require '../src/flux/models/contact'
{TaskQueue, ComponentRegistry} = require "nylas-exports"
{Account,
Contact,
TaskQueue,
AccountStore,
DatabaseStore,
ComponentRegistry} = require "nylas-exports"
NylasEnv.themes.loadBaseStylesheets()
NylasEnv.themes.requireStylesheet '../static/jasmine'
@ -112,6 +114,8 @@ beforeEach ->
ComponentRegistry._clear()
global.localStorage.clear()
DatabaseStore._transactionQueue = undefined
TaskQueue._queue = []
TaskQueue._completed = []
TaskQueue._onlineStatus = true

View file

@ -20,7 +20,7 @@ describe "DatabaseStore", ->
DatabaseStore._inTransaction = false
spyOn(ModelQuery.prototype, 'where').andCallThrough()
spyOn(DatabaseStore, '_accumulateAndTrigger').andCallFake -> Promise.resolve()
spyOn(DatabaseStore, 'accumulateAndTrigger').andCallFake -> Promise.resolve()
@performed = []
@ -122,368 +122,82 @@ describe "DatabaseStore", ->
q = DatabaseStore.findAll(TestModel, testMatchers)
expect(q.sql()).toBe("SELECT `TestModel`.`data` FROM `TestModel` WHERE `TestModel`.`id` = 'b' ")
describe "persistModel", ->
it "should throw an exception if the model is not a subclass of Model", ->
expect(-> DatabaseStore.persistModel({id: 'asd', subject: 'bla'})).toThrow()
it "should call through to persistModels", ->
spyOn(DatabaseStore, 'persistModels').andReturn Promise.resolve()
DatabaseStore.persistModel(testModelInstance)
advanceClock()
expect(DatabaseStore.persistModels.callCount).toBe(1)
describe "persistModels", ->
it "should cause the DatabaseStore to trigger with a change that contains the models", ->
waitsForPromise ->
DatabaseStore.persistModels([testModelInstanceA, testModelInstanceB]).then ->
expect(DatabaseStore._accumulateAndTrigger).toHaveBeenCalled()
change = DatabaseStore._accumulateAndTrigger.mostRecentCall.args[0]
expect(change).toEqual
objectClass: TestModel.name,
objectIds: [testModelInstanceA.id, testModelInstanceB.id]
objects: [testModelInstanceA, testModelInstanceB]
type:'persist'
it "should call through to _writeModels after checking them", ->
spyOn(DatabaseStore, '_writeModels').andReturn Promise.resolve()
DatabaseStore.persistModels([testModelInstanceA, testModelInstanceB])
advanceClock()
expect(DatabaseStore._writeModels.callCount).toBe(1)
it "should throw an exception if the models are not the same class,\
since it cannot be specified by the trigger payload", ->
expect(-> DatabaseStore.persistModels([testModelInstanceA, new Label()])).toThrow()
it "should throw an exception if the models are not a subclass of Model", ->
expect(-> DatabaseStore.persistModels([{id: 'asd', subject: 'bla'}])).toThrow()
describe "mutationHooks", ->
beforeEach ->
@beforeShouldThrow = false
@beforeShouldReject = false
@beforeDatabaseChange = jasmine.createSpy('beforeDatabaseChange').andCallFake =>
throw new Error("beforeShouldThrow") if @beforeShouldThrow
new Promise (resolve, reject) =>
setTimeout =>
return resolve(new Error("beforeShouldReject")) if @beforeShouldReject
resolve("value")
, 1000
@afterDatabaseChange = jasmine.createSpy('afterDatabaseChange').andCallFake =>
new Promise (resolve, reject) ->
setTimeout(( => resolve()), 1000)
@hook = {@beforeDatabaseChange, @afterDatabaseChange}
DatabaseStore.addMutationHook(@hook)
@writeModelsResolve = null
spyOn(DatabaseStore, '_writeModels').andCallFake =>
new Promise (resolve, reject) =>
@writeModelsResolve = resolve
afterEach ->
DatabaseStore.removeMutationHook(@hook)
it "should run pre-mutation hooks, wait to write models, and then run post-mutation hooks", ->
DatabaseStore.persistModels([testModelInstanceA, testModelInstanceB])
advanceClock()
expect(@beforeDatabaseChange).toHaveBeenCalledWith(
DatabaseStore._query,
{
objects: [testModelInstanceA, testModelInstanceB]
objectIds: [testModelInstanceA.id, testModelInstanceB.id]
objectClass: testModelInstanceA.constructor.name
type: 'persist'
},
undefined
)
expect(DatabaseStore._writeModels).not.toHaveBeenCalled()
advanceClock(1100)
advanceClock()
expect(DatabaseStore._writeModels).toHaveBeenCalled()
expect(@afterDatabaseChange).not.toHaveBeenCalled()
@writeModelsResolve()
advanceClock()
advanceClock()
expect(@afterDatabaseChange).toHaveBeenCalledWith(
DatabaseStore._query,
{
objects: [testModelInstanceA, testModelInstanceB]
objectIds: [testModelInstanceA.id, testModelInstanceB.id]
objectClass: testModelInstanceA.constructor.name
type: 'persist'
},
"value"
)
it "should carry on if a pre-mutation hook throws", ->
@beforeShouldThrow = true
DatabaseStore.persistModels([testModelInstanceA, testModelInstanceB])
advanceClock()
expect(@beforeDatabaseChange).toHaveBeenCalled()
advanceClock()
advanceClock()
expect(DatabaseStore._writeModels).toHaveBeenCalled()
it "should carry on if a pre-mutation hook rejects", ->
@beforeShouldReject = true
DatabaseStore.persistModels([testModelInstanceA, testModelInstanceB])
advanceClock()
expect(@beforeDatabaseChange).toHaveBeenCalled()
advanceClock()
advanceClock()
expect(DatabaseStore._writeModels).toHaveBeenCalled()
it "should be atomic: other persistModels calls should not run during the pre+write+post series", ->
DatabaseStore.persistModels([testModelInstanceA])
DatabaseStore.persistModels([testModelInstanceB])
# Expect the entire flow (before, write, after) to be called once
# before anything is called twice.
advanceClock()
advanceClock()
expect(@beforeDatabaseChange.callCount).toBe(1)
advanceClock(1100)
advanceClock()
expect(DatabaseStore._writeModels.callCount).toBe(1)
@writeModelsResolve()
advanceClock(1100)
advanceClock()
expect(@afterDatabaseChange.callCount).toBe(1)
advanceClock()
# The second call to persistModels can start now
expect(@beforeDatabaseChange.callCount).toBe(2)
describe "unpersistModel", ->
it "should delete the model by id", ->
describe "inTransaction", ->
it "calls the provided function inside an exclusive transaction", ->
waitsForPromise =>
DatabaseStore.unpersistModel(testModelInstance).then =>
expect(@performed.length).toBe(3)
expect(@performed[0].query).toBe("BEGIN EXCLUSIVE TRANSACTION")
expect(@performed[1].query).toBe("DELETE FROM `TestModel` WHERE `id` = ?")
expect(@performed[1].values[0]).toBe('1234')
expect(@performed[2].query).toBe("COMMIT")
it "should cause the DatabaseStore to trigger() with a change that contains the model", ->
waitsForPromise ->
DatabaseStore.unpersistModel(testModelInstance).then ->
expect(DatabaseStore._accumulateAndTrigger).toHaveBeenCalled()
change = DatabaseStore._accumulateAndTrigger.mostRecentCall.args[0]
expect(change).toEqual({
objectClass: TestModel.name,
objectIds: [testModelInstance.id]
objects: [testModelInstance],
type:'unpersist'
})
describe "when the model has collection attributes", ->
it "should delete all of the elements in the join tables", ->
TestModel.configureWithCollectionAttribute()
waitsForPromise =>
DatabaseStore.unpersistModel(testModelInstance).then =>
expect(@performed.length).toBe(4)
expect(@performed[0].query).toBe("BEGIN EXCLUSIVE TRANSACTION")
expect(@performed[2].query).toBe("DELETE FROM `TestModel-Label` WHERE `id` = ?")
expect(@performed[2].values[0]).toBe('1234')
expect(@performed[3].query).toBe("COMMIT")
describe "when the model has joined data attributes", ->
it "should delete the element in the joined data table", ->
TestModel.configureWithJoinedDataAttribute()
waitsForPromise =>
DatabaseStore.unpersistModel(testModelInstance).then =>
expect(@performed.length).toBe(4)
expect(@performed[0].query).toBe("BEGIN EXCLUSIVE TRANSACTION")
expect(@performed[2].query).toBe("DELETE FROM `TestModelBody` WHERE `id` = ?")
expect(@performed[2].values[0]).toBe('1234')
expect(@performed[3].query).toBe("COMMIT")
describe "_writeModels", ->
it "should compose a REPLACE INTO query to save the model", ->
TestModel.configureWithCollectionAttribute()
DatabaseStore._writeModels([testModelInstance])
expect(@performed[0].query).toBe("REPLACE INTO `TestModel` (id,data,client_id,server_id) VALUES (?,?,?,?)")
it "should save the model JSON into the data column", ->
DatabaseStore._writeModels([testModelInstance])
expect(@performed[0].values[1]).toEqual(JSON.stringify(testModelInstance))
describe "when the model defines additional queryable attributes", ->
beforeEach ->
TestModel.configureWithAllAttributes()
@m = new TestModel
id: 'local-6806434c-b0cd'
datetime: new Date()
string: 'hello world',
boolean: true,
number: 15
it "should populate additional columns defined by the attributes", ->
DatabaseStore._writeModels([@m])
expect(@performed[0].query).toBe("REPLACE INTO `TestModel` (id,data,datetime,string-json-key,boolean,number) VALUES (?,?,?,?,?,?)")
it "should use the JSON-form values of the queryable attributes", ->
json = @m.toJSON()
DatabaseStore._writeModels([@m])
values = @performed[0].values
expect(values[2]).toEqual(json['datetime'])
expect(values[3]).toEqual(json['string-json-key'])
expect(values[4]).toEqual(json['boolean'])
expect(values[5]).toEqual(json['number'])
describe "when the model has collection attributes", ->
beforeEach ->
TestModel.configureWithCollectionAttribute()
@m = new TestModel(id: 'local-6806434c-b0cd')
@m.labels = [new Label(id: 'a'),new Label(id: 'b')]
DatabaseStore._writeModels([@m])
it "should delete all association records for the model from join tables", ->
expect(@performed[1].query).toBe('DELETE FROM `TestModel-Label` WHERE `id` IN (\'local-6806434c-b0cd\')')
it "should insert new association records into join tables in a single query", ->
expect(@performed[2].query).toBe('INSERT OR IGNORE INTO `TestModel-Label` (`id`, `value`) VALUES (?,?),(?,?)')
expect(@performed[2].values).toEqual(['local-6806434c-b0cd', 'a','local-6806434c-b0cd', 'b'])
describe "model collection attributes query building", ->
beforeEach ->
TestModel.configureWithCollectionAttribute()
@m = new TestModel(id: 'local-6806434c-b0cd')
@m.labels = []
it "should page association records into multiple queries correctly", ->
@m.labels.push(new Label(id: "id-#{i}")) for i in [0..199]
DatabaseStore._writeModels([@m])
collectionAttributeQueries = _.filter @performed, (i) ->
i.query.indexOf('INSERT OR IGNORE INTO `TestModel-Label`') == 0
expect(collectionAttributeQueries.length).toBe(1)
expect(collectionAttributeQueries[0].values[399]).toEqual('id-199')
it "should page association records into multiple queries correctly", ->
@m.labels.push(new Label(id: "id-#{i}")) for i in [0..200]
DatabaseStore._writeModels([@m])
collectionAttributeQueries = _.filter @performed, (i) ->
i.query.indexOf('INSERT OR IGNORE INTO `TestModel-Label`') == 0
expect(collectionAttributeQueries.length).toBe(2)
expect(collectionAttributeQueries[0].values[399]).toEqual('id-199')
expect(collectionAttributeQueries[1].values[1]).toEqual('id-200')
it "should page association records into multiple queries correctly", ->
@m.labels.push(new Label(id: "id-#{i}")) for i in [0..201]
DatabaseStore._writeModels([@m])
collectionAttributeQueries = _.filter @performed, (i) ->
i.query.indexOf('INSERT OR IGNORE INTO `TestModel-Label`') == 0
expect(collectionAttributeQueries.length).toBe(2)
expect(collectionAttributeQueries[0].values[399]).toEqual('id-199')
expect(collectionAttributeQueries[1].values[1]).toEqual('id-200')
expect(collectionAttributeQueries[1].values[3]).toEqual('id-201')
describe "when the model has joined data attributes", ->
beforeEach ->
TestModel.configureWithJoinedDataAttribute()
it "should not include the value to the joined attribute in the JSON written to the main model table", ->
@m = new TestModel(clientId: 'local-6806434c-b0cd', serverId: 'server-1', body: 'hello world')
DatabaseStore._writeModels([@m])
expect(@performed[0].values).toEqual(['server-1', '{"client_id":"local-6806434c-b0cd","server_id":"server-1","id":"server-1"}', 'local-6806434c-b0cd', 'server-1'])
it "should write the value to the joined table if it is defined", ->
@m = new TestModel(id: 'local-6806434c-b0cd', body: 'hello world')
DatabaseStore._writeModels([@m])
expect(@performed[1].query).toBe('REPLACE INTO `TestModelBody` (`id`, `value`) VALUES (?, ?)')
expect(@performed[1].values).toEqual([@m.id, @m.body])
it "should not write the value to the joined table if it undefined", ->
@m = new TestModel(id: 'local-6806434c-b0cd')
DatabaseStore._writeModels([@m])
expect(@performed.length).toBe(1)
describe "atomically", ->
it "sets up an exclusive transaction", ->
waitsForPromise =>
DatabaseStore.atomically( =>
DatabaseStore.inTransaction( =>
DatabaseStore._query("TEST")
).then =>
expect(@performed.length).toBe 3
expect(@performed[0].query).toBe "BEGIN EXCLUSIVE TRANSACTION"
expect(@performed[0].query).toBe "BEGIN IMMEDIATE TRANSACTION"
expect(@performed[1].query).toBe "TEST"
expect(@performed[2].query).toBe "COMMIT"
it "preserves resolved values", ->
waitsForPromise =>
DatabaseStore.atomically( =>
DatabaseStore.inTransaction( =>
DatabaseStore._query("TEST")
return Promise.resolve("myValue")
).then (myValue) =>
expect(myValue).toBe "myValue"
it "always fires a COMMIT, even if the promise fails", ->
it "always fires a COMMIT, even if the body function fails", ->
waitsForPromise =>
DatabaseStore.atomically( =>
DatabaseStore.inTransaction( =>
throw new Error("BOOO")
).catch =>
expect(@performed.length).toBe 2
expect(@performed[0].query).toBe "BEGIN EXCLUSIVE TRANSACTION"
expect(@performed[0].query).toBe "BEGIN IMMEDIATE TRANSACTION"
expect(@performed[1].query).toBe "COMMIT"
it "can be called multiple times and get queued", ->
waitsForPromise =>
Promise.all([
DatabaseStore.atomically( -> )
DatabaseStore.atomically( -> )
DatabaseStore.atomically( -> )
DatabaseStore.inTransaction( -> )
DatabaseStore.inTransaction( -> )
DatabaseStore.inTransaction( -> )
]).then =>
expect(@performed.length).toBe 6
expect(@performed[0].query).toBe "BEGIN EXCLUSIVE TRANSACTION"
expect(@performed[0].query).toBe "BEGIN IMMEDIATE TRANSACTION"
expect(@performed[1].query).toBe "COMMIT"
expect(@performed[2].query).toBe "BEGIN EXCLUSIVE TRANSACTION"
expect(@performed[2].query).toBe "BEGIN IMMEDIATE TRANSACTION"
expect(@performed[3].query).toBe "COMMIT"
expect(@performed[4].query).toBe "BEGIN EXCLUSIVE TRANSACTION"
expect(@performed[4].query).toBe "BEGIN IMMEDIATE TRANSACTION"
expect(@performed[5].query).toBe "COMMIT"
it "carries on if one of them fails, but still calls the COMMIT for the failed block", ->
caughtError = false
DatabaseStore.atomically( => DatabaseStore._query("ONE") )
DatabaseStore.atomically( => throw new Error("fail") ).catch ->
DatabaseStore.inTransaction( => DatabaseStore._query("ONE") )
DatabaseStore.inTransaction( => throw new Error("fail") ).catch ->
caughtError = true
DatabaseStore.atomically( => DatabaseStore._query("THREE") )
DatabaseStore.inTransaction( => DatabaseStore._query("THREE") )
advanceClock(100)
expect(@performed.length).toBe 8
expect(@performed[0].query).toBe "BEGIN EXCLUSIVE TRANSACTION"
expect(@performed[0].query).toBe "BEGIN IMMEDIATE TRANSACTION"
expect(@performed[1].query).toBe "ONE"
expect(@performed[2].query).toBe "COMMIT"
expect(@performed[3].query).toBe "BEGIN EXCLUSIVE TRANSACTION"
expect(@performed[3].query).toBe "BEGIN IMMEDIATE TRANSACTION"
expect(@performed[4].query).toBe "COMMIT"
expect(@performed[5].query).toBe "BEGIN EXCLUSIVE TRANSACTION"
expect(@performed[5].query).toBe "BEGIN IMMEDIATE TRANSACTION"
expect(@performed[6].query).toBe "THREE"
expect(@performed[7].query).toBe "COMMIT"
expect(caughtError).toBe true
it "is actually running in series and blocks on never-finishing specs", ->
resolver = null
DatabaseStore.atomically( -> )
DatabaseStore.inTransaction( -> )
advanceClock(100)
expect(@performed.length).toBe 2
expect(@performed[0].query).toBe "BEGIN EXCLUSIVE TRANSACTION"
expect(@performed[0].query).toBe "BEGIN IMMEDIATE TRANSACTION"
expect(@performed[1].query).toBe "COMMIT"
DatabaseStore.atomically( -> new Promise (resolve, reject) -> resolver = resolve)
DatabaseStore.inTransaction( -> new Promise (resolve, reject) -> resolver = resolve)
advanceClock(100)
blockedPromiseDone = false
DatabaseStore.atomically( -> ).then =>
DatabaseStore.inTransaction( -> ).then =>
blockedPromiseDone = true
advanceClock(100)
expect(@performed.length).toBe 3
expect(@performed[2].query).toBe "BEGIN EXCLUSIVE TRANSACTION"
expect(@performed[2].query).toBe "BEGIN IMMEDIATE TRANSACTION"
expect(blockedPromiseDone).toBe false
# Now that we've made our assertion about blocking, we need to clean up
@ -501,9 +215,9 @@ describe "DatabaseStore", ->
v2 = null
v3 = null
Promise.all([
DatabaseStore.atomically( -> "a" ).then (val) -> v1 = val
DatabaseStore.atomically( -> "b" ).then (val) -> v2 = val
DatabaseStore.atomically( -> "c" ).then (val) -> v3 = val
DatabaseStore.inTransaction( -> "a" ).then (val) -> v1 = val
DatabaseStore.inTransaction( -> "b" ).then (val) -> v2 = val
DatabaseStore.inTransaction( -> "c" ).then (val) -> v3 = val
]).then =>
expect(v1).toBe "a"
expect(v2).toBe "b"
@ -511,16 +225,14 @@ describe "DatabaseStore", ->
it "can be called multiple times and get queued", ->
waitsForPromise =>
DatabaseStore.atomically( -> )
.then -> DatabaseStore.atomically( -> )
.then -> DatabaseStore.atomically( -> )
DatabaseStore.inTransaction( -> )
.then -> DatabaseStore.inTransaction( -> )
.then -> DatabaseStore.inTransaction( -> )
.then =>
expect(@performed.length).toBe 6
expect(@performed[0].query).toBe "BEGIN EXCLUSIVE TRANSACTION"
expect(@performed[0].query).toBe "BEGIN IMMEDIATE TRANSACTION"
expect(@performed[1].query).toBe "COMMIT"
expect(@performed[2].query).toBe "BEGIN EXCLUSIVE TRANSACTION"
expect(@performed[2].query).toBe "BEGIN IMMEDIATE TRANSACTION"
expect(@performed[3].query).toBe "COMMIT"
expect(@performed[4].query).toBe "BEGIN EXCLUSIVE TRANSACTION"
expect(@performed[4].query).toBe "BEGIN IMMEDIATE TRANSACTION"
expect(@performed[5].query).toBe "COMMIT"
describe "DatabaseStore::_accumulateAndTrigger", ->

View file

@ -0,0 +1,299 @@
_ = require 'underscore'
Label = require '../../src/flux/models/label'
Thread = require '../../src/flux/models/thread'
TestModel = require '../fixtures/db-test-model'
ModelQuery = require '../../src/flux/models/query'
DatabaseTransaction = require '../../src/flux/stores/database-transaction'
testMatchers = {'id': 'b'}
testModelInstance = new TestModel(id: "1234")
testModelInstanceA = new TestModel(id: "AAA")
testModelInstanceB = new TestModel(id: "BBB")
describe "DatabaseTransaction", ->
beforeEach ->
@databaseMutationHooks = []
@performed = []
@database =
_query: jasmine.createSpy('database._query').andCallFake (query, values=[], options={}) =>
@performed.push({query, values})
Promise.resolve([])
accumulateAndTrigger: jasmine.createSpy('database.accumulateAndTrigger')
mutationHooks: => @databaseMutationHooks
@transaction = new DatabaseTransaction(@database)
describe "execute", ->
describe "persistModel", ->
it "should throw an exception if the model is not a subclass of Model", ->
expect(=> @transaction.persistModel({id: 'asd', subject: 'bla'})).toThrow()
it "should call through to persistModels", ->
spyOn(@transaction, 'persistModels').andReturn Promise.resolve()
@transaction.persistModel(testModelInstance)
advanceClock()
expect(@transaction.persistModels.callCount).toBe(1)
describe "persistModels", ->
it "should call accumulateAndTrigger with a change that contains the models", ->
runs =>
@transaction.execute (t) =>
t.persistModels([testModelInstanceA, testModelInstanceB])
waitsFor =>
@database.accumulateAndTrigger.callCount > 0
runs =>
change = @database.accumulateAndTrigger.mostRecentCall.args[0]
expect(change).toEqual
objectClass: TestModel.name,
objectIds: [testModelInstanceA.id, testModelInstanceB.id]
objects: [testModelInstanceA, testModelInstanceB]
type:'persist'
it "should call through to _writeModels after checking them", ->
spyOn(@transaction, '_writeModels').andReturn Promise.resolve()
@transaction.persistModels([testModelInstanceA, testModelInstanceB])
advanceClock()
expect(@transaction._writeModels.callCount).toBe(1)
it "should throw an exception if the models are not the same class,\
since it cannot be specified by the trigger payload", ->
expect(=> @transaction.persistModels([testModelInstanceA, new Label()])).toThrow()
it "should throw an exception if the models are not a subclass of Model", ->
expect(=> @transaction.persistModels([{id: 'asd', subject: 'bla'}])).toThrow()
describe "mutationHooks", ->
beforeEach ->
@beforeShouldThrow = false
@beforeShouldReject = false
@hook =
beforeDatabaseChange: jasmine.createSpy('beforeDatabaseChange').andCallFake =>
throw new Error("beforeShouldThrow") if @beforeShouldThrow
new Promise (resolve, reject) =>
setTimeout =>
return resolve(new Error("beforeShouldReject")) if @beforeShouldReject
resolve("value")
, 1000
afterDatabaseChange: jasmine.createSpy('afterDatabaseChange').andCallFake =>
new Promise (resolve, reject) ->
setTimeout(( => resolve()), 1000)
@databaseMutationHooks.push(@hook)
@writeModelsResolve = null
spyOn(@transaction, '_writeModels').andCallFake =>
new Promise (resolve, reject) =>
@writeModelsResolve = resolve
it "should run pre-mutation hooks, wait to write models, and then run post-mutation hooks", ->
@transaction.persistModels([testModelInstanceA, testModelInstanceB])
advanceClock()
expect(@hook.beforeDatabaseChange).toHaveBeenCalledWith(
@transaction._query,
{
objects: [testModelInstanceA, testModelInstanceB]
objectIds: [testModelInstanceA.id, testModelInstanceB.id]
objectClass: testModelInstanceA.constructor.name
type: 'persist'
},
undefined
)
expect(@transaction._writeModels).not.toHaveBeenCalled()
advanceClock(1100)
advanceClock()
expect(@transaction._writeModels).toHaveBeenCalled()
expect(@hook.afterDatabaseChange).not.toHaveBeenCalled()
@writeModelsResolve()
advanceClock()
advanceClock()
expect(@hook.afterDatabaseChange).toHaveBeenCalledWith(
@transaction._query,
{
objects: [testModelInstanceA, testModelInstanceB]
objectIds: [testModelInstanceA.id, testModelInstanceB.id]
objectClass: testModelInstanceA.constructor.name
type: 'persist'
},
"value"
)
it "should carry on if a pre-mutation hook throws", ->
@beforeShouldThrow = true
@transaction.persistModels([testModelInstanceA, testModelInstanceB])
advanceClock(1000)
expect(@hook.beforeDatabaseChange).toHaveBeenCalled()
advanceClock()
advanceClock()
expect(@transaction._writeModels).toHaveBeenCalled()
it "should carry on if a pre-mutation hook rejects", ->
@beforeShouldReject = true
@transaction.persistModels([testModelInstanceA, testModelInstanceB])
advanceClock(1000)
expect(@hook.beforeDatabaseChange).toHaveBeenCalled()
advanceClock()
advanceClock()
expect(@transaction._writeModels).toHaveBeenCalled()
describe "unpersistModel", ->
it "should delete the model by id", ->
waitsForPromise =>
@transaction.execute =>
@transaction.unpersistModel(testModelInstance)
.then =>
expect(@performed.length).toBe(3)
expect(@performed[0].query).toBe("BEGIN IMMEDIATE TRANSACTION")
expect(@performed[1].query).toBe("DELETE FROM `TestModel` WHERE `id` = ?")
expect(@performed[1].values[0]).toBe('1234')
expect(@performed[2].query).toBe("COMMIT")
it "should call accumulateAndTrigger with a change that contains the model", ->
runs =>
@transaction.execute =>
@transaction.unpersistModel(testModelInstance)
waitsFor =>
@database.accumulateAndTrigger.callCount > 0
runs =>
change = @database.accumulateAndTrigger.mostRecentCall.args[0]
expect(change).toEqual({
objectClass: TestModel.name,
objectIds: [testModelInstance.id]
objects: [testModelInstance],
type:'unpersist'
})
describe "when the model has collection attributes", ->
it "should delete all of the elements in the join tables", ->
TestModel.configureWithCollectionAttribute()
waitsForPromise =>
@transaction.execute (t) =>
t.unpersistModel(testModelInstance)
.then =>
expect(@performed.length).toBe(4)
expect(@performed[0].query).toBe("BEGIN IMMEDIATE TRANSACTION")
expect(@performed[2].query).toBe("DELETE FROM `TestModel-Label` WHERE `id` = ?")
expect(@performed[2].values[0]).toBe('1234')
expect(@performed[3].query).toBe("COMMIT")
describe "when the model has joined data attributes", ->
it "should delete the element in the joined data table", ->
TestModel.configureWithJoinedDataAttribute()
waitsForPromise =>
@transaction.execute (t) =>
t.unpersistModel(testModelInstance)
.then =>
expect(@performed.length).toBe(4)
expect(@performed[0].query).toBe("BEGIN IMMEDIATE TRANSACTION")
expect(@performed[2].query).toBe("DELETE FROM `TestModelBody` WHERE `id` = ?")
expect(@performed[2].values[0]).toBe('1234')
expect(@performed[3].query).toBe("COMMIT")
describe "_writeModels", ->
it "should compose a REPLACE INTO query to save the model", ->
TestModel.configureWithCollectionAttribute()
@transaction._writeModels([testModelInstance])
expect(@performed[0].query).toBe("REPLACE INTO `TestModel` (id,data,client_id,server_id) VALUES (?,?,?,?)")
it "should save the model JSON into the data column", ->
@transaction._writeModels([testModelInstance])
expect(@performed[0].values[1]).toEqual(JSON.stringify(testModelInstance))
describe "when the model defines additional queryable attributes", ->
beforeEach ->
TestModel.configureWithAllAttributes()
@m = new TestModel
id: 'local-6806434c-b0cd'
datetime: new Date()
string: 'hello world',
boolean: true,
number: 15
it "should populate additional columns defined by the attributes", ->
@transaction._writeModels([@m])
expect(@performed[0].query).toBe("REPLACE INTO `TestModel` (id,data,datetime,string-json-key,boolean,number) VALUES (?,?,?,?,?,?)")
it "should use the JSON-form values of the queryable attributes", ->
json = @m.toJSON()
@transaction._writeModels([@m])
values = @performed[0].values
expect(values[2]).toEqual(json['datetime'])
expect(values[3]).toEqual(json['string-json-key'])
expect(values[4]).toEqual(json['boolean'])
expect(values[5]).toEqual(json['number'])
describe "when the model has collection attributes", ->
beforeEach ->
TestModel.configureWithCollectionAttribute()
@m = new TestModel(id: 'local-6806434c-b0cd')
@m.labels = [new Label(id: 'a'),new Label(id: 'b')]
@transaction._writeModels([@m])
it "should delete all association records for the model from join tables", ->
expect(@performed[1].query).toBe('DELETE FROM `TestModel-Label` WHERE `id` IN (\'local-6806434c-b0cd\')')
it "should insert new association records into join tables in a single query", ->
expect(@performed[2].query).toBe('INSERT OR IGNORE INTO `TestModel-Label` (`id`, `value`) VALUES (?,?),(?,?)')
expect(@performed[2].values).toEqual(['local-6806434c-b0cd', 'a','local-6806434c-b0cd', 'b'])
describe "model collection attributes query building", ->
beforeEach ->
TestModel.configureWithCollectionAttribute()
@m = new TestModel(id: 'local-6806434c-b0cd')
@m.labels = []
it "should page association records into multiple queries correctly", ->
@m.labels.push(new Label(id: "id-#{i}")) for i in [0..199]
@transaction._writeModels([@m])
collectionAttributeQueries = _.filter @performed, (i) ->
i.query.indexOf('INSERT OR IGNORE INTO `TestModel-Label`') == 0
expect(collectionAttributeQueries.length).toBe(1)
expect(collectionAttributeQueries[0].values[399]).toEqual('id-199')
it "should page association records into multiple queries correctly", ->
@m.labels.push(new Label(id: "id-#{i}")) for i in [0..200]
@transaction._writeModels([@m])
collectionAttributeQueries = _.filter @performed, (i) ->
i.query.indexOf('INSERT OR IGNORE INTO `TestModel-Label`') == 0
expect(collectionAttributeQueries.length).toBe(2)
expect(collectionAttributeQueries[0].values[399]).toEqual('id-199')
expect(collectionAttributeQueries[1].values[1]).toEqual('id-200')
it "should page association records into multiple queries correctly", ->
@m.labels.push(new Label(id: "id-#{i}")) for i in [0..201]
@transaction._writeModels([@m])
collectionAttributeQueries = _.filter @performed, (i) ->
i.query.indexOf('INSERT OR IGNORE INTO `TestModel-Label`') == 0
expect(collectionAttributeQueries.length).toBe(2)
expect(collectionAttributeQueries[0].values[399]).toEqual('id-199')
expect(collectionAttributeQueries[1].values[1]).toEqual('id-200')
expect(collectionAttributeQueries[1].values[3]).toEqual('id-201')
describe "when the model has joined data attributes", ->
beforeEach ->
TestModel.configureWithJoinedDataAttribute()
it "should not include the value to the joined attribute in the JSON written to the main model table", ->
@m = new TestModel(clientId: 'local-6806434c-b0cd', serverId: 'server-1', body: 'hello world')
@transaction._writeModels([@m])
expect(@performed[0].values).toEqual(['server-1', '{"client_id":"local-6806434c-b0cd","server_id":"server-1","id":"server-1"}', 'local-6806434c-b0cd', 'server-1'])
it "should write the value to the joined table if it is defined", ->
@m = new TestModel(id: 'local-6806434c-b0cd', body: 'hello world')
@transaction._writeModels([@m])
expect(@performed[1].query).toBe('REPLACE INTO `TestModelBody` (`id`, `value`) VALUES (?, ?)')
expect(@performed[1].values).toEqual([@m.id, @m.body])
it "should not write the value to the joined table if it undefined", ->
@m = new TestModel(id: 'local-6806434c-b0cd')
@transaction._writeModels([@m])
expect(@performed.length).toBe(1)

View file

@ -1,6 +1,7 @@
Message = require '../../src/flux/models/message'
Actions = require '../../src/flux/actions'
DatabaseStore = require '../../src/flux/stores/database-store'
DatabaseTransaction = require '../../src/flux/stores/database-transaction'
DraftStoreProxy = require '../../src/flux/stores/draft-store-proxy'
DraftChangeSet = DraftStoreProxy.DraftChangeSet
_ = require 'underscore'
@ -171,9 +172,8 @@ describe "DraftStoreProxy", ->
@draft = new Message(draft: true, clientId: 'client-id', body: 'A', subject: 'initial')
@proxy = new DraftStoreProxy('client-id', @draft)
spyOn(DatabaseStore, "atomically").andCallFake (fn) ->
return Promise.resolve(fn())
spyOn(DatabaseStore, "persistModel").andReturn Promise.resolve()
spyOn(DatabaseTransaction.prototype, "persistModel").andReturn Promise.resolve()
spyOn(DatabaseTransaction.prototype, "_query").andReturn Promise.resolve()
spyOn(Actions, "queueTask").andReturn Promise.resolve()
it "should ignore the update unless it applies to the current draft", ->
@ -192,17 +192,18 @@ describe "DraftStoreProxy", ->
it "atomically commits changes", ->
spyOn(DatabaseStore, "findBy").andReturn(Promise.resolve(@draft))
spyOn(DatabaseStore, 'inTransaction').andCallThrough()
waitsForPromise =>
@proxy.changes.add({body: "123"}, {immediate: true}).then =>
expect(DatabaseStore.atomically).toHaveBeenCalled()
expect(DatabaseStore.atomically.calls.length).toBe 1
expect(DatabaseStore.inTransaction).toHaveBeenCalled()
expect(DatabaseStore.inTransaction.calls.length).toBe 1
it "persist the applied changes", ->
spyOn(DatabaseStore, "findBy").andReturn(Promise.resolve(@draft))
waitsForPromise =>
@proxy.changes.add({body: "123"}, {immediate: true}).then =>
expect(DatabaseStore.persistModel).toHaveBeenCalled()
updated = DatabaseStore.persistModel.calls[0].args[0]
expect(DatabaseTransaction.prototype.persistModel).toHaveBeenCalled()
updated = DatabaseTransaction.prototype.persistModel.calls[0].args[0]
expect(updated.body).toBe "123"
it "queues a SyncbackDraftTask", ->
@ -218,8 +219,8 @@ describe "DraftStoreProxy", ->
spyOn(DatabaseStore, "findBy").andReturn(Promise.resolve(null))
waitsForPromise =>
@proxy.changes.add({body: "123"}, {immediate: true}).then =>
expect(DatabaseStore.persistModel).toHaveBeenCalled()
updated = DatabaseStore.persistModel.calls[0].args[0]
expect(DatabaseTransaction.prototype.persistModel).toHaveBeenCalled()
updated = DatabaseTransaction.prototype.persistModel.calls[0].args[0]
expect(updated.body).toBe "123"
expect(Actions.queueTask).toHaveBeenCalled()
task = Actions.queueTask.calls[0].args[0]
@ -227,10 +228,11 @@ describe "DraftStoreProxy", ->
it "does nothing if the draft is marked as destroyed", ->
spyOn(DatabaseStore, "findBy").andReturn(Promise.resolve(@draft))
spyOn(DatabaseStore, 'inTransaction').andCallThrough()
waitsForPromise =>
@proxy._destroyed = true
@proxy.changes.add({body: "123"}, {immediate: true}).then =>
expect(DatabaseStore.atomically).not.toHaveBeenCalled()
expect(DatabaseStore.inTransaction).not.toHaveBeenCalled()
describe "draft pristine body", ->
describe "when the draft given to the session is pristine", ->

View file

@ -5,6 +5,7 @@ Contact = require '../../src/flux/models/contact'
ModelQuery = require '../../src/flux/models/query'
AccountStore = require '../../src/flux/stores/account-store'
DatabaseStore = require '../../src/flux/stores/database-store'
DatabaseTransaction = require '../../src/flux/stores/database-transaction'
DraftStore = require '../../src/flux/stores/draft-store'
ComposerExtension = require '../../src/extensions/composer-extension'
SendDraftTask = require '../../src/flux/tasks/send-draft'
@ -36,6 +37,7 @@ class TestExtension extends ComposerExtension
describe "DraftStore", ->
beforeEach ->
spyOn(DatabaseTransaction.prototype, '_query').andCallFake -> Promise.resolve([])
spyOn(NylasEnv, 'newWindow').andCallFake ->
for id, session of DraftStore._draftSessions
if session.teardown
@ -156,7 +158,8 @@ describe "DraftStore", ->
spyOn(DatabaseStore, 'run').andCallFake (query) ->
return Promise.resolve(fakeMessage2) if query._klass is Message
return Promise.reject(new Error('Not Stubbed'))
spyOn(DatabaseStore, 'persistModel').andCallFake -> Promise.resolve()
spyOn(DatabaseTransaction.prototype, 'persistModel').andCallFake -> Promise.resolve()
afterEach ->
# Have to cleanup the DraftStoreProxy objects or we'll get a memory
@ -169,9 +172,9 @@ describe "DraftStore", ->
runs ->
DraftStore._onComposeReply({threadId: fakeThread.id, messageId: fakeMessage1.id})
waitsFor ->
DatabaseStore.persistModel.callCount > 0
DatabaseTransaction.prototype.persistModel.callCount > 0
runs ->
@model = DatabaseStore.persistModel.mostRecentCall.args[0]
@model = DatabaseTransaction.prototype.persistModel.mostRecentCall.args[0]
it "should include quoted text", ->
expect(@model.body.indexOf('blockquote') > 0).toBe(true)
@ -192,9 +195,9 @@ describe "DraftStore", ->
runs ->
DraftStore._onComposeReply({threadId: fakeThread.id, messageId: msgWithReplyTo.id})
waitsFor ->
DatabaseStore.persistModel.callCount > 0
DatabaseTransaction.prototype.persistModel.callCount > 0
runs ->
@model = DatabaseStore.persistModel.mostRecentCall.args[0]
@model = DatabaseTransaction.prototype.persistModel.mostRecentCall.args[0]
expect(@model.to).toEqual(msgWithReplyTo.replyTo)
expect(@model.cc.length).toBe 0
expect(@model.bcc.length).toBe 0
@ -205,9 +208,9 @@ describe "DraftStore", ->
runs ->
DraftStore._onComposeReply({threadId: fakeThread.id, messageId: msgFromMe.id})
waitsFor ->
DatabaseStore.persistModel.callCount > 0
DatabaseTransaction.prototype.persistModel.callCount > 0
runs ->
@model = DatabaseStore.persistModel.mostRecentCall.args[0]
@model = DatabaseTransaction.prototype.persistModel.mostRecentCall.args[0]
expect(@model.to).toEqual(msgFromMe.to)
expect(@model.cc.length).toBe 0
expect(@model.bcc.length).toBe 0
@ -217,9 +220,9 @@ describe "DraftStore", ->
runs ->
DraftStore._onComposeReplyAll({threadId: fakeThread.id, messageId: fakeMessage1.id})
waitsFor ->
DatabaseStore.persistModel.callCount > 0
DatabaseTransaction.prototype.persistModel.callCount > 0
runs ->
@model = DatabaseStore.persistModel.mostRecentCall.args[0]
@model = DatabaseTransaction.prototype.persistModel.mostRecentCall.args[0]
it "should include quoted text", ->
expect(@model.body.indexOf('blockquote') > 0).toBe(true)
@ -252,9 +255,9 @@ describe "DraftStore", ->
runs ->
DraftStore._onComposeReply({threadId: fakeThread.id, messageId: msgWithReplyTo.id})
waitsFor ->
DatabaseStore.persistModel.callCount > 0
DatabaseTransaction.prototype.persistModel.callCount > 0
runs ->
@model = DatabaseStore.persistModel.mostRecentCall.args[0]
@model = DatabaseTransaction.prototype.persistModel.mostRecentCall.args[0]
it "addresses the draft to all of the message's 'ReplyTo' recipients", ->
expect(@model.to).toEqual(msgWithReplyTo.replyTo)
@ -270,9 +273,9 @@ describe "DraftStore", ->
runs ->
DraftStore._onComposeReplyAll({threadId: fakeThread.id, messageId: msgWithReplyToDuplicates.id})
waitsFor ->
DatabaseStore.persistModel.callCount > 0
DatabaseTransaction.prototype.persistModel.callCount > 0
runs ->
model = DatabaseStore.persistModel.mostRecentCall.args[0]
model = DatabaseTransaction.prototype.persistModel.mostRecentCall.args[0]
ccEmails = model.cc.map (cc) -> cc.email
expect(ccEmails.sort()).toEqual(['1@1.com', '2@2.com', '4@4.com'])
toEmails = model.to.map (to) -> to.email
@ -284,9 +287,9 @@ describe "DraftStore", ->
runs ->
DraftStore._onComposeReplyAll({threadId: fakeThread.id, messageId: msgFromMe.id})
waitsFor ->
DatabaseStore.persistModel.callCount > 0
DatabaseTransaction.prototype.persistModel.callCount > 0
runs ->
@model = DatabaseStore.persistModel.mostRecentCall.args[0]
@model = DatabaseTransaction.prototype.persistModel.mostRecentCall.args[0]
expect(@model.to).toEqual(msgFromMe.to)
expect(@model.cc).toEqual(msgFromMe.cc)
expect(@model.bcc.length).toBe 0
@ -296,9 +299,9 @@ describe "DraftStore", ->
runs ->
DraftStore._onComposeForward({threadId: fakeThread.id, messageId: fakeMessageWithFiles.id})
waitsFor ->
DatabaseStore.persistModel.callCount > 0
DatabaseTransaction.prototype.persistModel.callCount > 0
runs ->
@model = DatabaseStore.persistModel.mostRecentCall.args[0]
@model = DatabaseTransaction.prototype.persistModel.mostRecentCall.args[0]
expect(@model.files.length).toBe 2
expect(@model.files[0].filename).toBe "test.jpg"
@ -307,9 +310,9 @@ describe "DraftStore", ->
runs ->
DraftStore._onComposeForward({threadId: fakeThread.id, messageId: fakeMessage1.id})
waitsFor ->
DatabaseStore.persistModel.callCount > 0
DatabaseTransaction.prototype.persistModel.callCount > 0
runs ->
@model = DatabaseStore.persistModel.mostRecentCall.args[0]
@model = DatabaseTransaction.prototype.persistModel.mostRecentCall.args[0]
it "should include quoted text", ->
expect(@model.body.indexOf('blockquote') > 0).toBe(true)
@ -334,18 +337,18 @@ describe "DraftStore", ->
runs ->
DraftStore._onComposeReply({threadId: fakeThread.id, messageId: fakeMessage1.id, popout: true}).catch (error) -> throw new Error (error)
waitsFor ->
DatabaseStore.persistModel.callCount > 0
DatabaseTransaction.prototype.persistModel.callCount > 0
runs ->
@model = DatabaseStore.persistModel.mostRecentCall.args[0]
@model = DatabaseTransaction.prototype.persistModel.mostRecentCall.args[0]
expect(Actions.composePopoutDraft).toHaveBeenCalledWith(@model.clientId)
it "can popout a forward", ->
runs ->
DraftStore._onComposeForward({threadId: fakeThread.id, messageId: fakeMessage1.id, popout: true}).catch (error) -> throw new Error (error)
waitsFor ->
DatabaseStore.persistModel.callCount > 0
DatabaseTransaction.prototype.persistModel.callCount > 0
runs ->
@model = DatabaseStore.persistModel.mostRecentCall.args[0]
@model = DatabaseTransaction.prototype.persistModel.mostRecentCall.args[0]
expect(Actions.composePopoutDraft).toHaveBeenCalledWith(@model.clientId)
describe "_newMessageWithContext", ->
@ -355,7 +358,7 @@ describe "DraftStore", ->
@_callNewMessageWithContext = (context, attributesCallback, modelCallback) ->
waitsForPromise ->
DraftStore._newMessageWithContext(context, attributesCallback).then ->
model = DatabaseStore.persistModel.mostRecentCall.args[0]
model = DatabaseTransaction.prototype.persistModel.mostRecentCall.args[0]
modelCallback(model) if modelCallback
it "should create a new message", ->
@ -828,7 +831,7 @@ describe "DraftStore", ->
it "should give extensions a chance to customize the draft via ext.prepareNewDraft", ->
received = null
spyOn(DatabaseStore, 'persistModel').andCallFake (draft) ->
spyOn(DatabaseTransaction.prototype, 'persistModel').andCallFake (draft) ->
received = draft
Promise.resolve()
waitsForPromise ->
@ -862,7 +865,7 @@ describe "DraftStore", ->
describe "should correctly instantiate drafts for a wide range of mailto URLs", ->
beforeEach ->
spyOn(DatabaseStore, 'persistModel').andCallFake (draft) ->
spyOn(DatabaseTransaction.prototype, 'persistModel').andCallFake (draft) ->
Promise.resolve()
links = [
@ -948,7 +951,7 @@ describe "DraftStore", ->
waitsForPromise ->
DraftStore._onHandleMailtoLink({}, link).then ->
expectedDraft = expected[idx]
received = DatabaseStore.persistModel.mostRecentCall.args[0]
received = DatabaseTransaction.prototype.persistModel.mostRecentCall.args[0]
expect(received['subject']).toEqual(expectedDraft['subject'])
expect(received['body']).toEqual(expectedDraft['body']) if expectedDraft['body']
for attr in ['to', 'cc', 'bcc']

View file

@ -1,5 +1,6 @@
_ = require 'underscore'
DatabaseStore = require '../../src/flux/stores/database-store'
DatabaseTransaction = require '../../src/flux/stores/database-transaction'
ThreadCountsStore = require '../../src/flux/stores/thread-counts-store'
Thread = require '../../src/flux/models/thread'
Folder = require '../../src/flux/models/folder'
@ -173,9 +174,14 @@ describe "ThreadCountsStore", ->
})
it "should persist the new counts to the database", ->
spyOn(DatabaseStore, 'persistJSONBlob')
ThreadCountsStore._saveCounts()
expect(DatabaseStore.persistJSONBlob).toHaveBeenCalledWith(ThreadCountsStore.JSONBlobKey, ThreadCountsStore._counts)
spyOn(DatabaseStore, '_query').andCallFake -> Promise.resolve([])
spyOn(DatabaseTransaction.prototype, 'persistJSONBlob')
runs =>
ThreadCountsStore._saveCounts()
waitsFor =>
DatabaseTransaction.prototype.persistJSONBlob.callCount > 0
runs =>
expect(DatabaseTransaction.prototype.persistJSONBlob).toHaveBeenCalledWith(ThreadCountsStore.JSONBlobKey, ThreadCountsStore._counts)
describe "CategoryDatabaseMutationObserver", ->
beforeEach ->

View file

@ -12,7 +12,6 @@ ChangeLabelsTask = require '../../src/flux/tasks/change-labels-task'
testLabels = {}
testThreads = {}
testMessages = {}
describe "ChangeLabelsTask", ->
beforeEach ->
@ -25,7 +24,6 @@ describe "ChangeLabelsTask", ->
Promise.resolve items.map (item) =>
return testLabels[item] if testLabels[item]
return testThreads[item] if testThreads[item]
return testMessages[item] if testMessages[item]
item
testLabels = @testLabels =
@ -38,21 +36,11 @@ describe "ChangeLabelsTask", ->
't2': new Thread(id: 't2', labels: _.values(@testLabels))
't3': new Thread(id: 't3', labels: [@testLabels['l2'], @testLabels['l3']])
testMessages = @testMessages =
'm1': new Message(id: 'm1', labels: [@testLabels['l1']])
'm2': new Message(id: 'm2', labels: _.values(@testLabels))
'm3': new Message(id: 'm3', labels: [@testLabels['l2'], @testLabels['l3']])
@basicThreadTask = new ChangeLabelsTask
labelsToAdd: ["l1", "l2"]
labelsToRemove: ["l3"]
threads: ['t1']
@basicMessageTask = new ChangeLabelsTask
labelsToAdd: ["l1", "l2"]
labelsToRemove: ["l3"]
messages: ['m1']
describe "description", ->
it "should include the name of the added label if it's the only mutation and it was provided as an object", ->
task = new ChangeLabelsTask(labelsToAdd: ["l1"], labelsToRemove: [], threads: ['t1'])
@ -75,11 +63,12 @@ describe "ChangeLabelsTask", ->
expect(task.description()).toEqual("Changed labels on 3 threads")
describe "performLocal", ->
it "should throw an exception if task has not been given a label, or messages and threads", ->
it "should throw an exception if task has not been given a label, has been given messages, or no threads", ->
badTasks = [
new ChangeLabelsTask(),
new ChangeLabelsTask(threads: [123]),
new ChangeLabelsTask(threads: [123], messages: ["foo"]),
new ChangeLabelsTask(labelsToAdd: ['l2'], labelsToRemove: ['l1'], messages: [123]),
new ChangeLabelsTask(threads: "Thread"),
]
goodTasks = [
@ -88,11 +77,6 @@ describe "ChangeLabelsTask", ->
labelsToRemove: ['l1']
threads: ['t1']
)
new ChangeLabelsTask(
labelsToAdd: ['l2']
labelsToRemove: []
messages: ['m1']
)
]
caught = []
succeeded = []
@ -191,5 +175,3 @@ describe "ChangeLabelsTask", ->
out = task.requestBodyForModel(testThreads['t3'])
expect(out).toEqual(labels: ['l2', 'l3'])
out = task.requestBodyForModel(testMessages['m3'])
expect(out).toEqual(labels: ['l2', 'l3'])

View file

@ -1,16 +1,17 @@
_ = require 'underscore'
Folder = require '../../src/flux/models/folder'
Thread = require '../../src/flux/models/thread'
Message = require '../../src/flux/models/message'
Actions = require '../../src/flux/actions'
NylasAPI = require '../../src/flux/nylas-api'
Query = require '../../src/flux/models/query'
DatabaseStore = require '../../src/flux/stores/database-store'
Task = require '../../src/flux/tasks/task'
ChangeMailTask = require '../../src/flux/tasks/change-mail-task'
{APIError} = require '../../src/flux/errors'
{Utils} = require '../../src/flux/models/utils'
{APIError,
Folder,
Thread,
Message,
ACtions,
NylasAPI,
Query,
DatabaseStore,
DatabaseTransaction,
Task,
Utils,
ChangeMailTask} = require 'nylas-exports'
describe "ChangeMailTask", ->
beforeEach ->
@ -47,8 +48,9 @@ describe "ChangeMailTask", ->
models = models[0]
Promise.resolve(models)
spyOn(DatabaseStore, 'persistModels').andReturn(Promise.resolve())
spyOn(DatabaseStore, 'persistModel').andReturn(Promise.resolve())
spyOn(DatabaseTransaction.prototype, 'persistModels').andReturn(Promise.resolve())
spyOn(DatabaseTransaction.prototype, 'persistModel').andReturn(Promise.resolve())
spyOn(DatabaseTransaction.prototype, '_query').andReturn(Promise.resolve([]))
it "leaves subclasses to implement changesToModel", ->
task = new ChangeMailTask()
@ -101,7 +103,7 @@ describe "ChangeMailTask", ->
waitsForPromise =>
@task._performLocalThreads().then =>
expect(@task._applyChanges).toHaveBeenCalledWith(@task.threads)
expect(DatabaseStore.persistModels).toHaveBeenCalledWith([@threadAChanged])
expect(DatabaseTransaction.prototype.persistModels).toHaveBeenCalledWith([@threadAChanged])
describe "when processNestedMessages is overridden to return true", ->
it "fetches messages on changed threads and appends them to the messages to update", ->
@ -122,7 +124,7 @@ describe "ChangeMailTask", ->
waitsForPromise =>
@task._performLocalMessages().then =>
expect(@task._applyChanges).toHaveBeenCalledWith(@task.messages)
expect(DatabaseStore.persistModels).toHaveBeenCalledWith([@threadBMesage1])
expect(DatabaseTransaction.prototype.persistModels).toHaveBeenCalledWith([@threadBMesage1])
describe "_applyChanges", ->
beforeEach ->

View file

@ -1,8 +1,11 @@
DestroyCategoryTask = require "../../src/flux/tasks/destroy-category-task"
NylasAPI = require "../../src/flux/nylas-api"
Task = require '../../src/flux/tasks/task'
{APIError} = require '../../src/flux/errors'
{Label, Folder, DatabaseStore} = require "nylas-exports"
{DestroyCategoryTask,
NylasAPI,
Task,
APIError,
Label,
Folder,
DatabaseStore,
DatabaseTransaction} = require "nylas-exports"
describe "DestroyCategoryTask", ->
pathOf = (fn) ->
@ -25,18 +28,21 @@ describe "DestroyCategoryTask", ->
new DestroyCategoryTask
category: category
beforeEach ->
spyOn(DatabaseTransaction.prototype, '_query').andCallFake -> Promise.resolve([])
spyOn(DatabaseTransaction.prototype, 'persistModel').andCallFake -> Promise.resolve()
describe "performLocal", ->
beforeEach ->
spyOn(DatabaseStore, 'persistModel')
it "sets an is deleted flag and persists the category", ->
it "sets an `isDeleted` flag and persists the category", ->
task = makeTask(Folder)
task.performLocal()
expect(DatabaseStore.persistModel).toHaveBeenCalled()
model = DatabaseStore.persistModel.calls[0].args[0]
expect(model.serverId).toEqual "server-444"
expect(model.isDeleted).toBe true
runs =>
task.performLocal()
waitsFor =>
DatabaseTransaction.prototype.persistModel.callCount > 0
runs =>
model = DatabaseTransaction.prototype.persistModel.calls[0].args[0]
expect(model.serverId).toEqual "server-444"
expect(model.isDeleted).toBe true
describe "performRemote", ->
it "throws error when no category present", ->
@ -86,8 +92,6 @@ describe "DestroyCategoryTask", ->
describe "when request fails", ->
beforeEach ->
spyOn(NylasEnv, 'emitError')
spyOn(DatabaseStore, 'persistModel').andCallFake ->
Promise.resolve()
spyOn(NylasAPI, 'makeRequest').andCallFake ->
Promise.reject(new APIError({statusCode: 403}))
@ -100,7 +104,7 @@ describe "DestroyCategoryTask", ->
expect(status).toEqual Task.Status.Failed
expect(task._notifyUserOfError).toHaveBeenCalled()
expect(NylasEnv.emitError).toHaveBeenCalled()
expect(DatabaseStore.persistModel).toHaveBeenCalled()
model = DatabaseStore.persistModel.calls[0].args[0]
expect(DatabaseTransaction.prototype.persistModel).toHaveBeenCalled()
model = DatabaseTransaction.prototype.persistModel.calls[0].args[0]
expect(model.serverId).toEqual "server-444"
expect(model.isDeleted).toBe false

View file

@ -1,16 +1,19 @@
NylasAPI = require '../../src/flux/nylas-api'
Actions = require '../../src/flux/actions'
{APIError} = require '../../src/flux/errors'
EventRSVPTask = require '../../src/flux/tasks/event-rsvp'
DatabaseStore = require '../../src/flux/stores/database-store'
Event = require '../../src/flux/models/event'
AccountStore = require '../../src/flux/stores/account-store'
_ = require 'underscore'
{NylasAPI,
Event,
Actions,
APIError,
EventRSVPTask,
DatabaseStore,
DatabaseTransaction,
AccountStore} = require 'nylas-exports'
describe "EventRSVPTask", ->
beforeEach ->
spyOn(DatabaseStore, 'find').andCallFake => Promise.resolve(@event)
spyOn(DatabaseStore, 'persistModel').andCallFake -> Promise.resolve()
spyOn(DatabaseTransaction.prototype, '_query').andCallFake -> Promise.resolve([])
spyOn(DatabaseTransaction.prototype, 'persistModel').andCallFake -> Promise.resolve()
@myName = "Ben Tester"
@myEmail = "tester@nylas.com"
@event = new Event
@ -43,7 +46,7 @@ describe "EventRSVPTask", ->
it "should trigger an action to persist the change", ->
@task.performLocal()
advanceClock()
expect(DatabaseStore.persistModel).toHaveBeenCalled()
expect(DatabaseTransaction.prototype.persistModel).toHaveBeenCalled()
describe "performRemote", ->
it "should make the POST request to the message endpoint", ->
@ -89,4 +92,4 @@ describe "EventRSVPTask", ->
@task.performLocal()
@task.performRemote()
advanceClock()
expect(DatabaseStore.persistModel).toHaveBeenCalled()
expect(DatabaseTransaction.prototype.persistModel).toHaveBeenCalled()

View file

@ -1,15 +1,18 @@
NylasAPI = require '../../src/flux/nylas-api'
Actions = require '../../src/flux/actions'
SyncbackDraftTask = require '../../src/flux/tasks/syncback-draft'
FileUploadTask = require '../../src/flux/tasks/file-upload-task'
SendDraftTask = require '../../src/flux/tasks/send-draft'
DatabaseStore = require '../../src/flux/stores/database-store'
{APIError} = require '../../src/flux/errors'
Message = require '../../src/flux/models/message'
TaskQueue = require '../../src/flux/stores/task-queue'
SoundRegistry = require '../../src/sound-registry'
_ = require 'underscore'
{APIError,
Actions,
DatabaseStore,
DatabaseTransaction,
Message,
Task,
TaskQueue,
SendDraftTask,
SyncbackDraftTask,
FileUploadTask,
NylasAPI,
SoundRegistry} = require 'nylas-exports'
describe "SendDraftTask", ->
describe "isDependentTask", ->
it "should return true if there are SyncbackDraftTasks for the same draft", ->
@ -92,9 +95,11 @@ describe "SendDraftTask", ->
Promise.resolve(response)
spyOn(DatabaseStore, 'run').andCallFake (klass, id) =>
Promise.resolve(@draft)
spyOn(DatabaseStore, 'unpersistModel').andCallFake (draft) ->
spyOn(DatabaseTransaction.prototype, '_query').andCallFake ->
Promise.resolve([])
spyOn(DatabaseTransaction.prototype, 'unpersistModel').andCallFake (draft) ->
Promise.resolve()
spyOn(DatabaseStore, 'persistModel').andCallFake (draft) ->
spyOn(DatabaseTransaction.prototype, 'persistModel').andCallFake (draft) ->
Promise.resolve()
spyOn(SoundRegistry, "playSound")
spyOn(Actions, "postNotification")
@ -136,8 +141,8 @@ describe "SendDraftTask", ->
expect(@draft.serverId).toBeUndefined()
waitsForPromise =>
@task.performRemote().then =>
expect(DatabaseStore.persistModel).toHaveBeenCalled()
model = DatabaseStore.persistModel.calls[0].args[0]
expect(DatabaseTransaction.prototype.persistModel).toHaveBeenCalled()
model = DatabaseTransaction.prototype.persistModel.calls[0].args[0]
expect(model.clientId).toBe @draftClientId
expect(model.serverId).toBe @serverMessageId
expect(model.draft).toBe false
@ -190,10 +195,10 @@ describe "SendDraftTask", ->
it "should write the saved message to the database with the same client ID", ->
waitsForPromise =>
@task.performRemote().then =>
expect(DatabaseStore.persistModel).toHaveBeenCalled()
expect(DatabaseStore.persistModel.mostRecentCall.args[0].clientId).toEqual(@draftClientId)
expect(DatabaseStore.persistModel.mostRecentCall.args[0].serverId).toEqual('1233123AEDF1')
expect(DatabaseStore.persistModel.mostRecentCall.args[0].draft).toEqual(false)
expect(DatabaseTransaction.prototype.persistModel).toHaveBeenCalled()
expect(DatabaseTransaction.prototype.persistModel.mostRecentCall.args[0].clientId).toEqual(@draftClientId)
expect(DatabaseTransaction.prototype.persistModel.mostRecentCall.args[0].serverId).toEqual('1233123AEDF1')
expect(DatabaseTransaction.prototype.persistModel.mostRecentCall.args[0].draft).toEqual(false)
describe "failing performRemote", ->
beforeEach ->
@ -211,9 +216,11 @@ describe "SendDraftTask", ->
email: 'dummy@nylas.com'
@task = new SendDraftTask("local-1234")
spyOn(Actions, "dequeueTask")
spyOn(DatabaseStore, 'unpersistModel').andCallFake (draft) ->
spyOn(DatabaseTransaction.prototype, '_query').andCallFake ->
Promise.resolve([])
spyOn(DatabaseTransaction.prototype, 'unpersistModel').andCallFake (draft) ->
Promise.resolve()
spyOn(DatabaseStore, 'persistModel').andCallFake (draft) ->
spyOn(DatabaseTransaction.prototype, 'persistModel').andCallFake (draft) ->
Promise.resolve()
describe "when the server responds with `Invalid message public ID`", ->
@ -293,4 +300,3 @@ describe "SendDraftTask", ->
task.onDependentTaskError(fileUploadTask, new Error("Oh no"))
expect(task._notifyUserOfError).toHaveBeenCalled()
expect(task._notifyUserOfError.calls.length).toBe 1

View file

@ -1,6 +1,9 @@
SyncbackCategoryTask = require "../../src/flux/tasks/syncback-category-task"
NylasAPI = require "../../src/flux/nylas-api"
{Label, Folder, DatabaseStore} = require "nylas-exports"
{Label,
NylasAPI,
Folder,
DatabaseStore,
SyncbackCategoryTask,
DatabaseTransaction} = require "nylas-exports"
describe "SyncbackCategoryTask", ->
describe "performRemote", ->
@ -24,7 +27,8 @@ describe "SyncbackCategoryTask", ->
beforeEach ->
spyOn(NylasAPI, "makeRequest").andCallFake ->
Promise.resolve(id: "server-444")
spyOn(DatabaseStore, "persistModel")
spyOn(DatabaseTransaction.prototype, "_query").andCallFake => Promise.resolve([])
spyOn(DatabaseTransaction.prototype, "persistModel")
it "sends API req to /labels if user uses labels", ->
task = makeTask(Label)
@ -51,7 +55,7 @@ describe "SyncbackCategoryTask", ->
task = makeTask(Label)
task.performRemote({})
.then ->
expect(DatabaseStore.persistModel).toHaveBeenCalled()
model = DatabaseStore.persistModel.calls[0].args[0]
expect(DatabaseTransaction.prototype.persistModel).toHaveBeenCalled()
model = DatabaseTransaction.prototype.persistModel.calls[0].args[0]
expect(model.clientId).toBe "local-444"
expect(model.serverId).toBe "server-444"

View file

@ -1,17 +1,17 @@
_ = require 'underscore'
NylasAPI = require '../../src/flux/nylas-api'
Task = require '../../src/flux/tasks/task'
Actions = require '../../src/flux/actions'
Message = require '../../src/flux/models/message'
Account = require '../../src/flux/models/account'
Contact = require '../../src/flux/models/contact'
{APIError} = require '../../src/flux/errors'
AccountStore = require '../../src/flux/stores/account-store'
DatabaseStore = require '../../src/flux/stores/database-store'
TaskQueue = require '../../src/flux/stores/task-queue'
SyncbackDraftTask = require '../../src/flux/tasks/syncback-draft'
{DatabaseTransaction,
SyncbackDraftTask,
DatabaseStore,
AccountStore,
TaskQueue,
Contact,
Message,
Account,
Actions,
Task,
APIError,
NylasAPI} = require 'nylas-exports'
inboxError =
message: "No draft with public id bvn4aydxuyqlbmzowh4wraysg",
@ -41,13 +41,11 @@ describe "SyncbackDraftTask", ->
else if clientId is "missingDraftId" then Promise.resolve()
else return Promise.resolve()
spyOn(DatabaseStore, "persistModel").andCallFake ->
spyOn(DatabaseTransaction.prototype, "_query").andCallFake ->
Promise.resolve([])
spyOn(DatabaseTransaction.prototype, "persistModel").andCallFake ->
Promise.resolve()
spyOn(DatabaseStore, "_wrapInTransaction").andCallFake (fn) ->
fn()
return Promise.resolve()
describe "queueing multiple tasks", ->
beforeEach ->
@taskA = new SyncbackDraftTask("draft-123")

View file

@ -2,7 +2,6 @@ _ = require 'underscore'
moment = require 'moment'
File = require './file'
Label = require './label'
Utils = require './utils'
Folder = require './folder'
Model = require './model'
@ -149,11 +148,6 @@ class Message extends Model
modelKey: 'folder'
itemClass: Folder
'labels': Attributes.Collection
queryable: true
modelKey: 'labels'
itemClass: Label
@naturalSortOrder: ->
Message.attributes.date.ascending()

View file

@ -215,10 +215,11 @@ class NylasAPI
if klass and klassId and klassId.length > 0
unless NylasEnv.inSpecMode()
console.warn("Deleting #{klass.name}:#{klassId} due to API 404")
DatabaseStore.find(klass, klassId).then (model) ->
if model
return DatabaseStore.unpersistModel(model)
else return Promise.resolve()
DatabaseStore.inTransaction (t) ->
t.find(klass, klassId).then (model) ->
return Promise.resolve() unless model
return t.unpersistModel(model)
else
return Promise.resolve()
@ -283,36 +284,37 @@ class NylasAPI
# Step 3: Retrieve any existing models from the database for the given IDs.
ids = _.pluck(unlockedJSONs, 'id')
DatabaseStore = require './stores/database-store'
DatabaseStore.atomically =>
DatabaseStore.findAll(klass).where(klass.attributes.id.in(ids)).then (models) ->
existingModels = {}
existingModels[model.id] = model for model in models
DatabaseStore.findAll(klass).where(klass.attributes.id.in(ids)).then (models) ->
existingModels = {}
existingModels[model.id] = model for model in models
responseModels = []
changedModels = []
responseModels = []
changedModels = []
# Step 4: Merge the response data into the existing data for each model,
# skipping changes when we already have the given version
unlockedJSONs.forEach (json) =>
model = existingModels[json.id]
# Step 4: Merge the response data into the existing data for each model,
# skipping changes when we already have the given version
unlockedJSONs.forEach (json) =>
model = existingModels[json.id]
isSameOrNewerVersion = model and model.version? and json.version? and model.version >= json.version
isAlreadySent = model and model.draft is false and json.draft is true
isSameOrNewerVersion = model and model.version? and json.version? and model.version >= json.version
isAlreadySent = model and model.draft is false and json.draft is true
if isSameOrNewerVersion
json._delta?.ignoredBecause = "JSON v#{json.version} <= model v#{model.version}"
else if isAlreadySent
json._delta?.ignoredBecause = "Model #{model.id} is already sent!"
else
model ?= new klass()
model.fromJSON(json)
changedModels.push(model)
responseModels.push(model)
if isSameOrNewerVersion
json._delta?.ignoredBecause = "JSON v#{json.version} <= model v#{model.version}"
else if isAlreadySent
json._delta?.ignoredBecause = "Model #{model.id} is already sent!"
else
model ?= new klass()
model.fromJSON(json)
changedModels.push(model)
responseModels.push(model)
# Step 5: Save models that have changed, and then return all of the models
# that were in the response body.
DatabaseStore.persistModels(changedModels).then ->
return Promise.resolve(responseModels)
# Step 5: Save models that have changed, and then return all of the models
# that were in the response body.
DatabaseStore.inTransaction (t) ->
t.persistModels(changedModels)
.then ->
return Promise.resolve(responseModels)
_apiObjectToClassMap:
"file": require('./models/file')

View file

@ -164,7 +164,6 @@ class AccountStore
_importFakeData: (dir) =>
fs = require 'fs-plus'
path = require 'path'
DatabaseStore = require './database-store'
Message = require '../models/message'
Account = require '../models/account'
Thread = require '../models/thread'
@ -232,12 +231,14 @@ class AccountStore
for filename in fs.readdirSync(downloadsDir)
fs.copySync(path.join(downloadsDir, filename), path.join(NylasEnv.getConfigDirPath(), 'downloads', filename))
Promise.all([
DatabaseStore.persistModel(account),
DatabaseStore.persistModels(_.values(labels)),
DatabaseStore.persistModels(messages),
DatabaseStore.persistModels(threads)
]).then =>
DatabaseStore.inTransaction (t) =>
Promise.all([
t.persistModel(account),
t.persistModels(_.values(labels)),
t.persistModels(messages),
t.persistModels(threads)
])
.then =>
Actions.selectAccount account.id
module.exports = new AccountStore()

View file

@ -8,18 +8,14 @@ Actions = require '../actions'
ModelQuery = require '../models/query'
NylasStore = require '../../global/nylas-store'
PromiseQueue = require 'promise-queue'
PriorityUICoordinator = require '../../priority-ui-coordinator'
DatabaseSetupQueryBuilder = require './database-setup-query-builder'
DatabaseChangeRecord = require './database-change-record'
PriorityUICoordinator = require '../../priority-ui-coordinator'
DatabaseTransaction = require './database-transaction'
{ipcRenderer} = require 'electron'
{AttributeCollection, AttributeJoinedData} = require '../attributes'
{tableNameForJoin} = require '../models/utils'
DatabaseVersion = 16
DatabasePhase =
Setup: 'setup'
Ready: 'ready'
@ -32,10 +28,14 @@ DEBUG_MISSING_ACCOUNT_ID = false
BEGIN_TRANSACTION = 'BEGIN TRANSACTION'
COMMIT = 'COMMIT'
TXINDEX = 0
class JSONBlobQuery extends ModelQuery
formatResultObjects: (objects) =>
return objects[0]?.json || null
###
Public: N1 is built on top of a custom database layer modeled after
ActiveRecord. For many parts of the application, the database is the source
@ -65,6 +65,7 @@ _onDataChanged: (change) ->
```
The local cache changes very frequently, and your stores and components should
carefully choose when to refresh their data. The `change` object passed to your
event handler allows you to decide whether to refresh your data and exposes
@ -404,91 +405,6 @@ class DatabaseStore extends NylasStore
result = modelQuery.formatResultObjects(result) unless options.format is false
Promise.resolve(result)
# Public: Asynchronously writes `model` to the cache and triggers a change event.
#
# - `model` A {Model} to write to the database.
#
# Returns a {Promise} that
# - resolves after the database queries are complete and any listening
# database callbacks have finished
# - rejects if any databse query fails or one of the triggering
# callbacks failed
persistModel: (model) =>
unless model and model instanceof Model
throw new Error("DatabaseStore::persistModel - You must pass an instance of the Model class.")
@persistModels([model])
# Public: Asynchronously writes `models` to the cache and triggers a single change
# event. Note: Models must be of the same class to be persisted in a batch operation.
#
# - `models` An {Array} of {Model} objects to write to the database.
#
# Returns a {Promise} that
# - resolves after the database queries are complete and any listening
# database callbacks have finished
# - rejects if any databse query fails or one of the triggering
# callbacks failed
persistModels: (models=[]) =>
return Promise.resolve() if models.length is 0
klass = models[0].constructor
clones = []
ids = {}
unless models[0] instanceof Model
throw new Error("DatabaseStore::persistModels - You must pass an array of items which descend from the Model class.")
for model in models
unless model and model.constructor is klass
throw new Error("DatabaseStore::persistModels - When you batch persist objects, they must be of the same type")
if ids[model.id]
throw new Error("DatabaseStore::persistModels - You must pass an array of models with different ids. ID #{model.id} is in the set multiple times.")
clones.push(model.clone())
ids[model.id] = true
# Note: It's important that we clone the objects since other code could mutate
# them during the save process. We want to guaruntee that the models you send to
# persistModels are saved exactly as they were sent.
@atomicMutation =>
metadata =
objectClass: clones[0].constructor.name
objectIds: Object.keys(ids)
objects: clones
type: 'persist'
@_runMutationHooks('beforeDatabaseChange', metadata).then (data) =>
@_writeModels(clones).then =>
@_runMutationHooks('afterDatabaseChange', metadata, data)
@_accumulateAndTrigger(metadata)
# Public: Asynchronously removes `model` from the cache and triggers a change event.
#
# - `model` A {Model} to write to the database.
#
# Returns a {Promise} that
# - resolves after the database queries are complete and any listening
# database callbacks have finished
# - rejects if any databse query fails or one of the triggering
# callbacks failed
unpersistModel: (model) =>
model = model.clone()
@atomicMutation =>
metadata =
objectClass: model.constructor.name,
objectIds: [model.id]
objects: [model],
type: 'unpersist'
@_runMutationHooks('beforeDatabaseChange', metadata).then (data) =>
@_deleteModel(model).then =>
@_runMutationHooks('afterDatabaseChange', metadata, data)
@_accumulateAndTrigger(metadata)
persistJSONBlob: (id, json) ->
JSONBlob = require '../models/json-blob'
@persistModel(new JSONBlob({id, json}))
findJSONBlob: (id) ->
JSONBlob = require '../models/json-blob'
new JSONBlobQuery(JSONBlob, @).where({id}).one()
@ -513,50 +429,32 @@ class DatabaseStore extends NylasStore
removeMutationHook: (hook) ->
@_databaseMutationHooks = _.without(@_databaseMutationHooks, hook)
_runMutationHooks: (selectorName, metadata, data = []) ->
beforePromises = @_databaseMutationHooks.map (hook, idx) =>
Promise.try =>
hook[selectorName](@_query, metadata, data[idx])
Promise.all(beforePromises).catch (e) =>
unless NylasEnv.inSpecMode()
console.warn("DatabaseStore Hook: #{selectorName} failed", e)
Promise.resolve([])
atomically: (fn) =>
@_atomicallyQueue ?= new PromiseQueue(1, Infinity)
@_atomicallyQueue.add(=> @_ensureInTransaction(fn))
atomicMutation: (fn) =>
@_mutationQueue ?= new PromiseQueue(1, Infinity)
@_mutationQueue.add(=> @_ensureInTransaction(fn))
_ensureInTransaction: (fn) ->
return fn() if @_inTransaction
@_wrapInTransaction(fn)
_wrapInTransaction: (fn) ->
@_inTransaction = true
@_query("BEGIN EXCLUSIVE TRANSACTION")
.then =>
# NOTE: The value that `fn` resolves to is propagated all the way back to
# the originally caller of `atomically`
fn()
.finally (val) =>
@_query("COMMIT")
@_inTransaction = false
mutationHooks: ->
@_databaseMutationHooks
########################################################################
########################### PRIVATE METHODS ############################
########################################################################
# Public: Opens a new database transaction for writing changes.
# DatabaseStore.inTransacion makes the following guarantees:
#
# - No other calls to `inTransaction` will run until the promise has finished.
#
# - No other process will be able to write to sqlite while the provided function
# is running. "BEGIN IMMEDIATE TRANSACTION" semantics are:
# + No other connection will be able to write any changes.
# + Other connections can read from the database, but they will not see
# pending changes.
#
inTransaction: (fn) ->
t = new DatabaseTransaction(@)
@_transactionQueue ?= new PromiseQueue(1, Infinity)
@_transactionQueue.add -> t.execute(fn)
# _accumulateAndTrigger is a guarded version of trigger that can accumulate changes.
# This means that even if you're a bad person and call `persistModel` 100 times
# from 100 task objects queued at the same time, it will only create one
# `trigger` event. This is important since the database triggering impacts
# the entire application.
_accumulateAndTrigger: (change) =>
accumulateAndTrigger: (change) =>
@_triggerPromise ?= new Promise (resolve, reject) =>
@_resolve = resolve
@ -587,132 +485,6 @@ class DatabaseStore extends NylasStore
return @_triggerPromise
# Fires the queries required to write models to the DB
#
# Returns a promise that:
# - resolves when all write queries are complete
# - rejects if any query fails
_writeModels: (models) =>
promises = []
# IMPORTANT: This method assumes that all the models you
# provide are of the same class, and have different ids!
# Avoid trying to write too many objects a time - sqlite can only handle
# value sets `(?,?)...` of less than SQLITE_MAX_COMPOUND_SELECT (500),
# and we don't know ahead of time whether we'll hit that or not.
if models.length > 50
return Promise.all([
@_writeModels(models[0..49])
@_writeModels(models[50..models.length])
])
klass = models[0].constructor
attributes = _.values(klass.attributes)
columnAttributes = _.filter attributes, (attr) ->
attr.queryable && attr.columnSQL && attr.jsonKey != 'id'
# Compute the columns in the model table and a question mark string
columns = ['id', 'data']
marks = ['?', '?']
columnAttributes.forEach (attr) ->
columns.push(attr.jsonKey)
marks.push('?')
columnsSQL = columns.join(',')
marksSet = "(#{marks.join(',')})"
# Prepare a batch insert VALUES (?,?,?), (?,?,?)... by assembling
# an array of the values and a corresponding question mark set
values = []
marks = []
ids = []
for model in models
json = model.toJSON(joined: false)
ids.push(model.id)
values.push(model.id, JSON.stringify(json, Utils.registeredObjectReplacer))
columnAttributes.forEach (attr) ->
values.push(json[attr.jsonKey])
marks.push(marksSet)
marksSQL = marks.join(',')
promises.push @_query("REPLACE INTO `#{klass.name}` (#{columnsSQL}) VALUES #{marksSQL}", values)
# For each join table property, find all the items in the join table for this
# model and delte them. Insert each new value back into the table.
collectionAttributes = _.filter attributes, (attr) ->
attr.queryable && attr instanceof AttributeCollection
collectionAttributes.forEach (attr) =>
joinTable = tableNameForJoin(klass, attr.itemClass)
promises.push @_query("DELETE FROM `#{joinTable}` WHERE `id` IN ('#{ids.join("','")}')")
joinMarks = []
joinedValues = []
for model in models
joinedModels = model[attr.modelKey]
if joinedModels
for joined in joinedModels
joinMarks.push('(?,?)')
joinedValues.push(model.id, joined.id)
unless joinedValues.length is 0
# Write no more than 200 items (400 values) at once to avoid sqlite limits
# 399 values: slices:[0..0]
# 400 values: slices:[0..0]
# 401 values: slices:[0..1]
slicePageCount = Math.ceil(joinedValues.length / 400) - 1
for slice in [0..slicePageCount] by 1
[ms, me] = [slice*200, slice*200 + 199]
[vs, ve] = [slice*400, slice*400 + 399]
promises.push @_query("INSERT OR IGNORE INTO `#{joinTable}` (`id`, `value`) VALUES #{joinMarks[ms..me].join(',')}", joinedValues[vs..ve])
# For each joined data property stored in another table...
values = []
marks = []
joinedDataAttributes = _.filter attributes, (attr) ->
attr instanceof AttributeJoinedData
joinedDataAttributes.forEach (attr) =>
for model in models
if model[attr.modelKey]?
promises.push @_query("REPLACE INTO `#{attr.modelTable}` (`id`, `value`) VALUES (?, ?)", [model.id, model[attr.modelKey]])
return Promise.all(promises)
# Fires the queries required to delete models to the DB
#
# Returns a promise that:
# - resolves when all deltion queries are complete
# - rejects if any query fails
_deleteModel: (model) =>
promises = []
klass = model.constructor
attributes = _.values(klass.attributes)
# Delete the primary record
promises.push @_query("DELETE FROM `#{klass.name}` WHERE `id` = ?", [model.id])
# For each join table property, find all the items in the join table for this
# model and delte them. Insert each new value back into the table.
collectionAttributes = _.filter attributes, (attr) ->
attr.queryable && attr instanceof AttributeCollection
collectionAttributes.forEach (attr) =>
joinTable = tableNameForJoin(klass, attr.itemClass)
promises.push @_query("DELETE FROM `#{joinTable}` WHERE `id` = ?", [model.id])
joinedDataAttributes = _.filter attributes, (attr) ->
attr instanceof AttributeJoinedData
joinedDataAttributes.forEach (attr) =>
promises.push @_query("DELETE FROM `#{attr.modelTable}` WHERE `id` = ?", [model.id])
return Promise.all(promises)
module.exports = new DatabaseStore()
module.exports.ChangeRecord = DatabaseChangeRecord

View file

@ -0,0 +1,263 @@
_ = require 'underscore'
Model = require '../models/model'
Utils = require '../models/utils'
{AttributeCollection, AttributeJoinedData} = require '../attributes'
{tableNameForJoin} = require '../models/utils'
class DatabaseTransaction
constructor: (@database) ->
@_changeRecords = []
@_opened = false
find: (args...) => @database.find(args...)
findBy: (args...) => @database.findBy(args...)
findAll: (args...) => @database.findAll(args...)
count: (args...) => @database.count(args...)
findJSONBlob: (args...) => @database.findJSONBlob(args...)
execute: (fn) =>
if @_opened
throw new Error("DatabaseTransaction:execute was already called")
start = Date.now()
@_query("BEGIN IMMEDIATE TRANSACTION")
.then =>
@_opened = true
fn(@)
.finally =>
if @_opened
@_query("COMMIT")
@_opened = false
global.setImmediate =>
for record in @_changeRecords
@database.accumulateAndTrigger(record)
# Mutating the Database
persistJSONBlob: (id, json) ->
JSONBlob = require '../models/json-blob'
@persistModel(new JSONBlob({id, json}))
# Public: Asynchronously writes `model` to the cache and triggers a change event.
#
# - `model` A {Model} to write to the database.
#
# Returns a {Promise} that
# - resolves after the database queries are complete and any listening
# database callbacks have finished
# - rejects if any databse query fails or one of the triggering
# callbacks failed
persistModel: (model) =>
unless model and model instanceof Model
throw new Error("DatabaseTransaction::persistModel - You must pass an instance of the Model class.")
@persistModels([model])
# Public: Asynchronously writes `models` to the cache and triggers a single change
# event. Note: Models must be of the same class to be persisted in a batch operation.
#
# - `models` An {Array} of {Model} objects to write to the database.
#
# Returns a {Promise} that
# - resolves after the database queries are complete and any listening
# database callbacks have finished
# - rejects if any databse query fails or one of the triggering
# callbacks failed
persistModels: (models=[], {}) =>
return Promise.resolve() if models.length is 0
klass = models[0].constructor
clones = []
ids = {}
unless models[0] instanceof Model
throw new Error("DatabaseTransaction::persistModels - You must pass an array of items which descend from the Model class.")
for model in models
unless model and model.constructor is klass
throw new Error("DatabaseTransaction::persistModels - When you batch persist objects, they must be of the same type")
if ids[model.id]
throw new Error("DatabaseTransaction::persistModels - You must pass an array of models with different ids. ID #{model.id} is in the set multiple times.")
clones.push(model.clone())
ids[model.id] = true
# Note: It's important that we clone the objects since other code could mutate
# them during the save process. We want to guaruntee that the models you send to
# persistModels are saved exactly as they were sent.
metadata =
objectClass: clones[0].constructor.name
objectIds: Object.keys(ids)
objects: clones
type: 'persist'
@_runMutationHooks('beforeDatabaseChange', metadata).then (data) =>
@_writeModels(clones).then =>
@_runMutationHooks('afterDatabaseChange', metadata, data)
@_changeRecords.push(metadata)
# Public: Asynchronously removes `model` from the cache and triggers a change event.
#
# - `model` A {Model} to write to the database.
#
# Returns a {Promise} that
# - resolves after the database queries are complete and any listening
# database callbacks have finished
# - rejects if any databse query fails or one of the triggering
# callbacks failed
unpersistModel: (model) =>
model = model.clone()
metadata =
objectClass: model.constructor.name,
objectIds: [model.id]
objects: [model],
type: 'unpersist'
@_runMutationHooks('beforeDatabaseChange', metadata).then (data) =>
@_deleteModel(model).then =>
@_runMutationHooks('afterDatabaseChange', metadata, data)
@_changeRecords.push(metadata)
########################################################################
########################### PRIVATE METHODS ############################
########################################################################
_query: =>
@database._query(arguments...)
_runMutationHooks: (selectorName, metadata, data = []) =>
beforePromises = @database.mutationHooks().map (hook, idx) =>
Promise.try =>
hook[selectorName](@_query, metadata, data[idx])
Promise.all(beforePromises).catch (e) =>
unless NylasEnv.inSpecMode()
console.warn("DatabaseTransaction Hook: #{selectorName} failed", e)
Promise.resolve([])
# Fires the queries required to write models to the DB
#
# Returns a promise that:
# - resolves when all write queries are complete
# - rejects if any query fails
_writeModels: (models) =>
promises = []
# IMPORTANT: This method assumes that all the models you
# provide are of the same class, and have different ids!
# Avoid trying to write too many objects a time - sqlite can only handle
# value sets `(?,?)...` of less than SQLITE_MAX_COMPOUND_SELECT (500),
# and we don't know ahead of time whether we'll hit that or not.
if models.length > 50
return Promise.all([
@_writeModels(models[0..49])
@_writeModels(models[50..models.length])
])
klass = models[0].constructor
attributes = _.values(klass.attributes)
columnAttributes = _.filter attributes, (attr) ->
attr.queryable && attr.columnSQL && attr.jsonKey != 'id'
# Compute the columns in the model table and a question mark string
columns = ['id', 'data']
marks = ['?', '?']
columnAttributes.forEach (attr) ->
columns.push(attr.jsonKey)
marks.push('?')
columnsSQL = columns.join(',')
marksSet = "(#{marks.join(',')})"
# Prepare a batch insert VALUES (?,?,?), (?,?,?)... by assembling
# an array of the values and a corresponding question mark set
values = []
marks = []
ids = []
for model in models
json = model.toJSON(joined: false)
ids.push(model.id)
values.push(model.id, JSON.stringify(json, Utils.registeredObjectReplacer))
columnAttributes.forEach (attr) ->
values.push(json[attr.jsonKey])
marks.push(marksSet)
marksSQL = marks.join(',')
promises.push @_query("REPLACE INTO `#{klass.name}` (#{columnsSQL}) VALUES #{marksSQL}", values)
# For each join table property, find all the items in the join table for this
# model and delte them. Insert each new value back into the table.
collectionAttributes = _.filter attributes, (attr) ->
attr.queryable && attr instanceof AttributeCollection
collectionAttributes.forEach (attr) =>
joinTable = tableNameForJoin(klass, attr.itemClass)
promises.push @_query("DELETE FROM `#{joinTable}` WHERE `id` IN ('#{ids.join("','")}')")
joinMarks = []
joinedValues = []
for model in models
joinedModels = model[attr.modelKey]
if joinedModels
for joined in joinedModels
joinMarks.push('(?,?)')
joinedValues.push(model.id, joined.id)
unless joinedValues.length is 0
# Write no more than 200 items (400 values) at once to avoid sqlite limits
# 399 values: slices:[0..0]
# 400 values: slices:[0..0]
# 401 values: slices:[0..1]
slicePageCount = Math.ceil(joinedValues.length / 400) - 1
for slice in [0..slicePageCount] by 1
[ms, me] = [slice*200, slice*200 + 199]
[vs, ve] = [slice*400, slice*400 + 399]
promises.push @_query("INSERT OR IGNORE INTO `#{joinTable}` (`id`, `value`) VALUES #{joinMarks[ms..me].join(',')}", joinedValues[vs..ve])
# For each joined data property stored in another table...
values = []
marks = []
joinedDataAttributes = _.filter attributes, (attr) ->
attr instanceof AttributeJoinedData
joinedDataAttributes.forEach (attr) =>
for model in models
if model[attr.modelKey]?
promises.push @_query("REPLACE INTO `#{attr.modelTable}` (`id`, `value`) VALUES (?, ?)", [model.id, model[attr.modelKey]])
return Promise.all(promises)
# Fires the queries required to delete models to the DB
#
# Returns a promise that:
# - resolves when all deltion queries are complete
# - rejects if any query fails
_deleteModel: (model) =>
promises = []
klass = model.constructor
attributes = _.values(klass.attributes)
# Delete the primary record
promises.push @_query("DELETE FROM `#{klass.name}` WHERE `id` = ?", [model.id])
# For each join table property, find all the items in the join table for this
# model and delte them. Insert each new value back into the table.
collectionAttributes = _.filter attributes, (attr) ->
attr.queryable && attr instanceof AttributeCollection
collectionAttributes.forEach (attr) =>
joinTable = tableNameForJoin(klass, attr.itemClass)
promises.push @_query("DELETE FROM `#{joinTable}` WHERE `id` = ?", [model.id])
joinedDataAttributes = _.filter attributes, (attr) ->
attr instanceof AttributeJoinedData
joinedDataAttributes.forEach (attr) =>
promises.push @_query("DELETE FROM `#{attr.modelTable}` WHERE `id` = ?", [model.id])
return Promise.all(promises)
module.exports = DatabaseTransaction

View file

@ -165,8 +165,8 @@ class DraftStoreProxy
# underneath us
inMemoryDraft = @_draft
DatabaseStore.atomically =>
DatabaseStore.findBy(Message, clientId: inMemoryDraft.clientId).then (draft) =>
DatabaseStore.inTransaction (t) =>
t.findBy(Message, clientId: inMemoryDraft.clientId).then (draft) =>
# This can happen if we get a "delete" delta, or something else
# strange happens. In this case, we'll use the @_draft we have in
# memory to apply the changes to. On the `persistModel` in the
@ -177,9 +177,9 @@ class DraftStoreProxy
if not draft then draft = inMemoryDraft
updatedDraft = @changes.applyToModel(draft)
return DatabaseStore.persistModel(updatedDraft).then =>
Actions.queueTask(new SyncbackDraftTask(@draftClientId))
return t.persistModel(updatedDraft)
.then =>
Actions.queueTask(new SyncbackDraftTask(@draftClientId))
DraftStoreProxy.DraftChangeSet = DraftChangeSet

View file

@ -208,7 +208,9 @@ class DraftStore
.then ({draft}) =>
draft.body = body + "\n\n" + draft.body
draft.pristine = false
DatabaseStore.persistModel(draft).then =>
DatabaseStore.inTransaction (t) =>
t.persistModel(draft)
.then =>
Actions.sendDraft(draft.clientId)
_onComposeReply: (context) =>
@ -242,7 +244,9 @@ class DraftStore
# doesn't need to do a query for it a second from now when the composer wants it.
@_draftSessions[draft.clientId] = new DraftStoreProxy(draft.clientId, draft)
DatabaseStore.persistModel(draft).then =>
DatabaseStore.inTransaction (t) =>
t.persistModel(draft)
.then =>
Promise.resolve(draftClientId: draft.clientId, draft: draft)
_newMessageWithContext: (args, attributesCallback) =>

View file

@ -69,7 +69,9 @@ class SearchView extends ModelView
obj = (new Thread).fromJSON(resultJSON)
objects.push(obj)
DatabaseStore.persistModels(objects) if objects.length > 0
if objects.length > 0
DatabaseStore.inTransaction (t) ->
t.persistModels(objects)
page.items = objects
page.loading = false

View file

@ -273,7 +273,8 @@ class TaskQueue
_updateSoon: =>
@_updateSoonThrottled ?= _.throttle =>
DatabaseStore.persistJSONBlob(JSONBlobStorageKey, @_queue ? [])
DatabaseStore.inTransaction (t) =>
t.persistJSONBlob(JSONBlobStorageKey, @_queue ? [])
_.defer =>
@_processQueue()
@trigger()

View file

@ -132,7 +132,8 @@ class ThreadCountsStore extends NylasStore
@_counts[key] += count
delete @_deltas[key]
DatabaseStore.persistJSONBlob(JSONBlobKey, @_counts)
DatabaseStore.inTransaction (t) =>
t.persistJSONBlob(JSONBlobKey, @_counts)
@trigger()
_fetchCountForCategory: (cat) =>

View file

@ -19,6 +19,7 @@ class ChangeLabelsTask extends ChangeMailTask
constructor: ({@labelsToAdd, @labelsToRemove}={}) ->
@labelsToAdd ?= []
@labelsToRemove ?= []
super
label: -> "Applying labels…"
@ -36,6 +37,9 @@ class ChangeLabelsTask extends ChangeMailTask
isDependentTask: (other) -> other instanceof SyncbackCategoryTask
performLocal: ->
if @messages.length
return Promise.reject(new Error("ChangeLabelsTask: N1 does not support viewing or changing labels on individual messages."))
if @labelsToAdd.length is 0 and @labelsToRemove.length is 0
return Promise.reject(new Error("ChangeLabelsTask: Must specify `labelsToAdd` or `labelsToRemove`"))
if @threads.length > 0 and @messages.length > 0

View file

@ -131,7 +131,11 @@ class ChangeMailTask extends Task
changed = @_applyChanges(@threads)
changedIds = _.pluck(changed, 'id')
DatabaseStore.persistModels(changed).then =>
return Promise.resolve() if changed.length is 0
DatabaseStore.inTransaction (t) =>
t.persistModels(changed)
.then =>
if @processNestedMessages()
DatabaseStore.findAll(Message).where(Message.attributes.threadId.in(changedIds)).then (messages) =>
@messages = [].concat(messages, @messages)
@ -141,7 +145,11 @@ class ChangeMailTask extends Task
_performLocalMessages: ->
changed = @_applyChanges(@messages)
DatabaseStore.persistModels(changed)
return Promise.resolve() if changed.length is 0
DatabaseStore.inTransaction (t) ->
t.persistModels(changed)
_applyChanges: (modelArray) ->
changed = []

View file

@ -28,7 +28,8 @@ class CreateMetadataTask extends Task
performLocal: ->
return Promise.reject(new Error("Must pass a type")) unless @type?
@metadatum = new Metadata({@type, @publicId, @key, @value})
return DatabaseStore.persistModel(@metadatum)
DatabaseStore.inTransaction (t) =>
t.persistModel(@metadatum)
performRemote: ->
new Promise (resolve, reject) =>

View file

@ -29,7 +29,8 @@ class DestroyCategoryTask extends Task
if not @category
return Promise.reject(new Error("Attempt to call DestroyCategoryTask.performLocal without @category."))
@category.isDeleted = true
DatabaseStore.persistModel @category
DatabaseStore.inTransaction (t) =>
t.persistModel(@category)
performRemote: ->
if not @category
@ -54,7 +55,9 @@ class DestroyCategoryTask extends Task
if err.statusCode in NylasAPI.PermanentErrorCodes
# Revert isDeleted flag
@category.isDeleted = false
DatabaseStore.persistModel(@category).then =>
DatabaseStore.inTransaction (t) =>
t.persistModel(@category)
.then =>
NylasEnv.emitError(
new Error("Deleting category responded with #{err.statusCode}!")
)
@ -65,13 +68,13 @@ class DestroyCategoryTask extends Task
_notifyUserOfError: (category = @category) ->
displayName = category.displayName
label = if category instanceof Label
displayType = if category instanceof Label
'label'
else
'folder'
msg = "The #{label} #{displayName} could not be deleted."
if label is 'folder'
msg = "The #{displayType} #{displayName} could not be deleted."
if displayType is 'folder'
msg += " Make sure the folder you want to delete is empty before deleting it."
NylasEnv.showErrorDialog(msg)

View file

@ -38,7 +38,8 @@ class DestroyDraftTask extends Task
find.include(Message.attributes.body).then (draft) =>
return Promise.resolve() unless draft
@draft = draft
DatabaseStore.unpersistModel(draft)
DatabaseStore.inTransaction (t) =>
t.unpersistModel(draft)
performRemote: ->
# We don't need to do anything if we weren't able to find the draft
@ -74,7 +75,9 @@ class DestroyDraftTask extends Task
if err.statusCode in NylasAPI.PermanentErrorCodes
Actions.postNotification({message: "Unable to delete this draft. Restoring...", type: "error"})
DatabaseStore.persistModel(@draft).then =>
DatabaseStore.inTransaction (t) =>
t.persistModel(@draft)
.then =>
Promise.resolve(Task.Status.Failed)
else
Promise.resolve(Task.Status.Retry)

View file

@ -38,8 +38,12 @@ class DestroyMetadataTask extends Task
if (models ? []).length is 0
resolve()
else
Promise.settle(models.map (m) -> DatabaseStore.unpersistModel(m))
.then(resolve).catch(reject)
DatabaseStore.inTransaction (t) ->
promises = models.map (m) ->
t.unpersistModel(m)
Promise.settle(promises)
.then(resolve)
.catch(reject)
.catch (error) ->
console.error "Error finding Metadata to destroy", error
console.error error.stack

View file

@ -15,17 +15,18 @@ class EventRSVPTask extends Task
super
performLocal: ->
DatabaseStore.find(Event, @event.id).then (e) =>
e ?= @event
@_previousParticipantsState = Utils.deepClone(e.participants)
participants = []
for p in e.participants
if p['email'] == @myEmail
p['status'] = @RSVPResponse
participants.push p
e.participants = participants
@event = e
DatabaseStore.persistModel(e)
DatabaseStore.inTransaction (t) =>
t.find(Event, @event.id).then (e) =>
e ?= @event
@_previousParticipantsState = Utils.deepClone(e.participants)
participants = []
for p in e.participants
if p['email'] == @myEmail
p['status'] = @RSVPResponse
participants.push p
e.participants = participants
@event = e
t.persistModel(e)
performRemote: ->
NylasAPI.makeRequest
@ -42,10 +43,11 @@ class EventRSVPTask extends Task
.catch APIError, (err) =>
##TODO event already accepted/declined/etc
@event.participants = @_previousParticipantsState
DatabaseStore.persistModel(@event).then ->
return Promise.resolve(Task.Status.Failed)
.catch (err) ->
return Promise.resolve(Task.Status.Failed)
DatabaseStore.inTransaction (t) =>
t.persistModel(@event).then ->
return Promise.resolve(Task.Status.Failed)
.catch (err) ->
return Promise.resolve(Task.Status.Failed)
onOtherError: -> Promise.resolve()
onTimeoutError: -> Promise.resolve()

View file

@ -86,7 +86,9 @@ class SendDraftTask extends Task
# with a valid serverId.
@draft = @draft.clone().fromJSON(json)
@draft.draft = false
DatabaseStore.persistModel(@draft).then =>
DatabaseStore.inTransaction (t) =>
t.persistModel(@draft)
.then =>
if NylasEnv.config.get("core.sending.sounds")
SoundRegistry.playSound('send')
Actions.sendDraftSuccess

View file

@ -25,10 +25,11 @@ module.exports = class SyncbackCategoryTask extends Task
if not @category
return Promise.reject(new Error("Attempt to call SyncbackCategoryTask.performLocal without @category."))
if @_shouldChangeBackwards()
DatabaseStore.unpersistModel @category
else
DatabaseStore.persistModel @category
DatabaseStore.inTransaction (t) =>
if @_shouldChangeBackwards()
t.unpersistModel @category
else
t.persistModel @category
performRemote: ->
if @category instanceof Label
@ -49,7 +50,8 @@ module.exports = class SyncbackCategoryTask extends Task
# This is where we update the existing model with the newly
# created serverId.
@category.serverId = json.id
DatabaseStore.persistModel @category
DatabaseStore.inTransaction (t) =>
t.persistModel @category
.then ->
return Promise.resolve(Task.Status.Success)
.catch APIError, (err) =>

View file

@ -85,12 +85,12 @@ class SyncbackDraftTask extends Task
# below. We currently have no way of locking between processes. Maybe a
# log-style data structure would be better suited for drafts.
#
DatabaseStore.atomically =>
DatabaseStore.inTransaction (t) =>
@getLatestLocalDraft().then (draft) ->
if not draft then draft = oldDraft
draft.version = json.version
draft.serverId = json.id
DatabaseStore.persistModel(draft)
t.persistModel(draft)
.then =>
return Promise.resolve(Task.Status.Success)
@ -135,4 +135,5 @@ class SyncbackDraftTask extends Task
delete newDraft.threadId
delete newDraft.replyToMessageId
DatabaseStore.persistModel(newDraft)
DatabaseStore.inTransaction (t) =>
t.persistModel(newDraft)

View file

@ -257,7 +257,7 @@ class Task
_handleRemoteError: (err, status) =>
# Sometimes users just indicate that a task Failed, but don't provide
# the error object
err ?= new Error("Unexpected remote error in #{Task.constructor.name}")
err ?= new Error("Unexpected error in #{Task.constructor.name}.performRemote")
if status isnt Task.Status.Failed
@queueState.debugStatus = Task.DebugStatus.UncaughtError

View file

@ -55,6 +55,7 @@ class NylasExports
@load "SearchView", 'flux/stores/search-view'
@load "DatabaseView", 'flux/stores/database-view'
@load "DatabaseStore", 'flux/stores/database-store'
@load "DatabaseTransaction", 'flux/stores/database-transaction'
@load "QuerySubscriptionPool", 'flux/models/query-subscription-pool'
# Database Objects
@ -94,6 +95,7 @@ class NylasExports
@require "SendDraftTask", 'flux/tasks/send-draft'
@require "FileUploadTask", 'flux/tasks/file-upload-task'
@require "DestroyDraftTask", 'flux/tasks/destroy-draft'
@require "ChangeMailTask", 'flux/tasks/change-mail-task'
@require "ChangeLabelsTask", 'flux/tasks/change-labels-task'
@require "ChangeFolderTask", 'flux/tasks/change-folder-task'
@require "SyncbackCategoryTask", 'flux/tasks/syncback-category-task'