mirror of
https://github.com/Foundry376/Mailspring.git
synced 2025-12-09 06:01:09 +08:00
refactor(DB): promisify database
Summary: WIP. The app launches and works for me. I still need to fix the tests just the DB ones though :D I changed the `DatabaseProxy` into a `DatabaseConnection` object. It will request a fully instantiated databse from the backend Browser. If we're already setup, it'll connect straight to the existing DB. If we're not setup, then it'll create a new database and run the inital schema migration on it. The `_createNewDatabase` promise won't resolve until the migration has run. Until we add in a more sophisticated migration system and get rid of the stupid `modelClassMap` that's shoved in `Utils`, I'm passing in a series of `_setupQueries()` to get everything started. The `DatabaseConnection` is also the one responsible for queuing up queries until the DB is fully populated and ready to go. We actually get a lot of queries before we're setup because a lot of Stores will make DB requests on `require` in their `constructor` or `init` methods. (remember all the times we call `_populateStore` in `init`). Now those queries are aggregated by the `DatabaseConnection` and then executed sequentially. Our methods like `persistModel` now resolve only after both the queries have completed AND their corresponding `triggerLater` has completed as well. Test Plan: in progress Reviewers: bengotow Reviewed By: bengotow Differential Revision: https://phab.nylas.com/D1688
This commit is contained in:
parent
6f7017a887
commit
e8d24ea1b5
17 changed files with 924 additions and 682 deletions
|
|
@ -6,22 +6,22 @@
|
|||
display: inline-block;
|
||||
position: relative;
|
||||
font-size: @font-size-small;
|
||||
margin-bottom: @spacing-standard;
|
||||
margin-right: @spacing-standard;
|
||||
margin: 0 @spacing-standard @spacing-standard @spacing-standard;
|
||||
background: @background-off-primary;
|
||||
box-shadow: inset 0 0 1px 1px rgba(0,0,0,0.09);
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
white-space: nowrap;
|
||||
width: calc(~"50% - 7.5px");
|
||||
width: calc(~"50% - 23px");
|
||||
border-radius: 4px;
|
||||
|
||||
&.non-image-attachment {
|
||||
width: calc(~"50% - 23px");
|
||||
margin-left: 15px;
|
||||
&:nth-child(even) {
|
||||
margin-left: 0;
|
||||
}
|
||||
margin-left: @spacing-standard;
|
||||
}
|
||||
|
||||
&:nth-child(even) {
|
||||
margin-left: 0;
|
||||
}
|
||||
|
||||
&.file-upload {
|
||||
|
|
@ -44,9 +44,6 @@
|
|||
white-space: nowrap;
|
||||
}
|
||||
|
||||
&:nth-child(even) {
|
||||
margin-right: 0;
|
||||
}
|
||||
&:hover {
|
||||
cursor: default;
|
||||
}
|
||||
|
|
@ -61,7 +58,6 @@
|
|||
.attachment-file-name {
|
||||
font-weight: @font-weight-medium;
|
||||
}
|
||||
margin-left: 15px;
|
||||
.attachment-file-and-name {
|
||||
position: relative;
|
||||
z-index: 2;
|
||||
|
|
|
|||
|
|
@ -349,7 +349,18 @@ class ComposerView extends React.Component
|
|||
_.sortBy _.reject(@_uploadsAndFiles(), Utils.looksLikeImage), @_fileSort
|
||||
|
||||
_uploadsAndFiles: ->
|
||||
_.compact(@state.uploads.concat(@state.files))
|
||||
# When uploads finish, they stay attached to the object at 100%
|
||||
# completion. Eventually the DB trigger will make its way to a window
|
||||
# and the files will appear on the draft.
|
||||
#
|
||||
# In this case we want to show the file instead of the upload
|
||||
uploads = _.filter @state.uploads, (upload) =>
|
||||
for file in @state.files
|
||||
linkedUpload = FileUploadStore.linkedUpload(file)
|
||||
return false if linkedUpload and linkedUpload.uploadId is upload.uploadId
|
||||
return true
|
||||
|
||||
_.compact(uploads.concat(@state.files))
|
||||
|
||||
_onFileUploadStoreChange: =>
|
||||
@setState uploads: FileUploadStore.uploadsForMessage(@props.localId)
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
{
|
||||
"name": "nylas",
|
||||
"productName": "Nylas",
|
||||
"version": "0.1.3",
|
||||
"version": "0.1.4",
|
||||
"description": "An email OS",
|
||||
"main": "./src/browser/main.js",
|
||||
"repository": {
|
||||
|
|
|
|||
74
spec-nylas/fixtures/db-test-model.coffee
Normal file
74
spec-nylas/fixtures/db-test-model.coffee
Normal file
|
|
@ -0,0 +1,74 @@
|
|||
Tag = require '../../src/flux/models/tag'
|
||||
Model = require '../../src/flux/models/model'
|
||||
Attributes = require '../../src/flux/attributes'
|
||||
|
||||
class TestModel extends Model
|
||||
@attributes =
|
||||
'id': Attributes.String
|
||||
queryable: true
|
||||
modelKey: 'id'
|
||||
|
||||
TestModel.configureBasic = ->
|
||||
TestModel.additionalSQLiteConfig = undefined
|
||||
TestModel.attributes =
|
||||
'id': Attributes.String
|
||||
queryable: true
|
||||
modelKey: 'id'
|
||||
|
||||
TestModel.configureWithAllAttributes = ->
|
||||
TestModel.additionalSQLiteConfig = undefined
|
||||
TestModel.attributes =
|
||||
'datetime': Attributes.DateTime
|
||||
queryable: true
|
||||
modelKey: 'datetime'
|
||||
'string': Attributes.String
|
||||
queryable: true
|
||||
modelKey: 'string'
|
||||
jsonKey: 'string-json-key'
|
||||
'boolean': Attributes.Boolean
|
||||
queryable: true
|
||||
modelKey: 'boolean'
|
||||
'number': Attributes.Number
|
||||
queryable: true
|
||||
modelKey: 'number'
|
||||
'other': Attributes.String
|
||||
modelKey: 'other'
|
||||
|
||||
TestModel.configureWithCollectionAttribute = ->
|
||||
TestModel.additionalSQLiteConfig = undefined
|
||||
TestModel.attributes =
|
||||
'id': Attributes.String
|
||||
queryable: true
|
||||
modelKey: 'id'
|
||||
'tags': Attributes.Collection
|
||||
queryable: true
|
||||
modelKey: 'tags'
|
||||
itemClass: Tag
|
||||
|
||||
|
||||
TestModel.configureWithJoinedDataAttribute = ->
|
||||
TestModel.additionalSQLiteConfig = undefined
|
||||
TestModel.attributes =
|
||||
'id': Attributes.String
|
||||
queryable: true
|
||||
modelKey: 'id'
|
||||
'body': Attributes.JoinedData
|
||||
modelTable: 'TestModelBody'
|
||||
modelKey: 'body'
|
||||
|
||||
|
||||
TestModel.configureWithAdditionalSQLiteConfig = ->
|
||||
TestModel.attributes =
|
||||
'id': Attributes.String
|
||||
queryable: true
|
||||
modelKey: 'id'
|
||||
'body': Attributes.JoinedData
|
||||
modelTable: 'TestModelBody'
|
||||
modelKey: 'body'
|
||||
TestModel.additionalSQLiteConfig =
|
||||
setup: ->
|
||||
['CREATE INDEX IF NOT EXISTS ThreadListIndex ON Thread(last_message_timestamp DESC, namespace_id, id)']
|
||||
writeModel: jasmine.createSpy('additionalWriteModel')
|
||||
deleteModel: jasmine.createSpy('additionalDeleteModel')
|
||||
|
||||
module.exports = TestModel
|
||||
64
spec-nylas/stores/database-connection-spec.coffee
Normal file
64
spec-nylas/stores/database-connection-spec.coffee
Normal file
|
|
@ -0,0 +1,64 @@
|
|||
ipc = require 'ipc'
|
||||
TestModel = require '../fixtures/db-test-model'
|
||||
Attributes = require '../../src/flux/attributes'
|
||||
DatabaseConnection = require '../../src/flux/stores/database-connection'
|
||||
|
||||
describe "DatabaseConnection", ->
|
||||
beforeEach ->
|
||||
@connection = new DatabaseConnection()
|
||||
# Emulate a working DB
|
||||
spyOn(ipc, 'send').andCallFake (messageType, {queryKey}) ->
|
||||
return unless messageType is "database-query"
|
||||
err = null
|
||||
result = []
|
||||
@connection._onDatabaseResult({queryKey, err, result})
|
||||
|
||||
describe "_setupQueriesForTable", ->
|
||||
it "should return the queries for creating the table and the primary unique index", ->
|
||||
TestModel.attributes =
|
||||
'attrQueryable': Attributes.DateTime
|
||||
queryable: true
|
||||
modelKey: 'attrQueryable'
|
||||
jsonKey: 'attr_queryable'
|
||||
|
||||
'attrNonQueryable': Attributes.Collection
|
||||
modelKey: 'attrNonQueryable'
|
||||
jsonKey: 'attr_non_queryable'
|
||||
queries = @connection._setupQueriesForTable(TestModel)
|
||||
expected = [
|
||||
'CREATE TABLE IF NOT EXISTS `TestModel` (id TEXT PRIMARY KEY,data BLOB,attr_queryable INTEGER)',
|
||||
'CREATE UNIQUE INDEX IF NOT EXISTS `TestModel_id` ON `TestModel` (`id`)'
|
||||
]
|
||||
for query,i in queries
|
||||
expect(query).toBe(expected[i])
|
||||
|
||||
it "should correctly create join tables for models that have queryable collections", ->
|
||||
TestModel.configureWithCollectionAttribute()
|
||||
queries = @connection._setupQueriesForTable(TestModel)
|
||||
expected = [
|
||||
'CREATE TABLE IF NOT EXISTS `TestModel` (id TEXT PRIMARY KEY,data BLOB)',
|
||||
'CREATE UNIQUE INDEX IF NOT EXISTS `TestModel_id` ON `TestModel` (`id`)',
|
||||
'CREATE TABLE IF NOT EXISTS `TestModel-Tag` (id TEXT KEY, `value` TEXT)'
|
||||
'CREATE UNIQUE INDEX IF NOT EXISTS `TestModel_Tag_id_val` ON `TestModel-Tag` (`id`,`value`)',
|
||||
]
|
||||
for query,i in queries
|
||||
expect(query).toBe(expected[i])
|
||||
|
||||
it "should use the correct column type for each attribute", ->
|
||||
TestModel.configureWithAllAttributes()
|
||||
queries = @connection._setupQueriesForTable(TestModel)
|
||||
expect(queries[0]).toBe('CREATE TABLE IF NOT EXISTS `TestModel` (id TEXT PRIMARY KEY,data BLOB,datetime INTEGER,string-json-key TEXT,boolean INTEGER,number INTEGER)')
|
||||
|
||||
describe "when the model provides additional sqlite config", ->
|
||||
it "the setup method should return these queries", ->
|
||||
TestModel.configureWithAdditionalSQLiteConfig()
|
||||
spyOn(TestModel.additionalSQLiteConfig, 'setup').andCallThrough()
|
||||
queries = @connection._setupQueriesForTable(TestModel)
|
||||
expect(TestModel.additionalSQLiteConfig.setup).toHaveBeenCalledWith()
|
||||
expect(queries.pop()).toBe('CREATE INDEX IF NOT EXISTS ThreadListIndex ON Thread(last_message_timestamp DESC, namespace_id, id)')
|
||||
|
||||
it "should not fail if additional config is present, but setup is undefined", ->
|
||||
delete TestModel.additionalSQLiteConfig['setup']
|
||||
@m = new TestModel(id: 'local-6806434c-b0cd', body: 'hello world')
|
||||
expect( => @connection._setupQueriesForTable(TestModel)).not.toThrow()
|
||||
|
||||
|
|
@ -1,73 +1,10 @@
|
|||
DatabaseStore = require '../../src/flux/stores/database-store'
|
||||
Model = require '../../src/flux/models/model'
|
||||
ModelQuery = require '../../src/flux/models/query'
|
||||
Attributes = require '../../src/flux/attributes'
|
||||
Tag = require '../../src/flux/models/tag'
|
||||
_ = require 'underscore'
|
||||
ipc = require 'ipc'
|
||||
|
||||
class TestModel extends Model
|
||||
@attributes =
|
||||
'id': Attributes.String
|
||||
queryable: true
|
||||
modelKey: 'id'
|
||||
|
||||
TestModel.configureWithAllAttributes = ->
|
||||
TestModel.additionalSQLiteConfig = undefined
|
||||
TestModel.attributes =
|
||||
'datetime': Attributes.DateTime
|
||||
queryable: true
|
||||
modelKey: 'datetime'
|
||||
'string': Attributes.String
|
||||
queryable: true
|
||||
modelKey: 'string'
|
||||
jsonKey: 'string-json-key'
|
||||
'boolean': Attributes.Boolean
|
||||
queryable: true
|
||||
modelKey: 'boolean'
|
||||
'number': Attributes.Number
|
||||
queryable: true
|
||||
modelKey: 'number'
|
||||
'other': Attributes.String
|
||||
modelKey: 'other'
|
||||
|
||||
TestModel.configureWithCollectionAttribute = ->
|
||||
TestModel.additionalSQLiteConfig = undefined
|
||||
TestModel.attributes =
|
||||
'id': Attributes.String
|
||||
queryable: true
|
||||
modelKey: 'id'
|
||||
'tags': Attributes.Collection
|
||||
queryable: true
|
||||
modelKey: 'tags'
|
||||
itemClass: Tag
|
||||
|
||||
|
||||
TestModel.configureWithJoinedDataAttribute = ->
|
||||
TestModel.additionalSQLiteConfig = undefined
|
||||
TestModel.attributes =
|
||||
'id': Attributes.String
|
||||
queryable: true
|
||||
modelKey: 'id'
|
||||
'body': Attributes.JoinedData
|
||||
modelTable: 'TestModelBody'
|
||||
modelKey: 'body'
|
||||
|
||||
|
||||
TestModel.configureWithAdditionalSQLiteConfig = ->
|
||||
TestModel.attributes =
|
||||
'id': Attributes.String
|
||||
queryable: true
|
||||
modelKey: 'id'
|
||||
'body': Attributes.JoinedData
|
||||
modelTable: 'TestModelBody'
|
||||
modelKey: 'body'
|
||||
TestModel.additionalSQLiteConfig =
|
||||
setup: ->
|
||||
['CREATE INDEX IF NOT EXISTS ThreadListIndex ON Thread(last_message_timestamp DESC, namespace_id, id)']
|
||||
writeModel: jasmine.createSpy('additionalWriteModel')
|
||||
deleteModel: jasmine.createSpy('additionalDeleteModel')
|
||||
|
||||
|
||||
Tag = require '../../src/flux/models/tag'
|
||||
TestModel = require '../fixtures/db-test-model'
|
||||
ModelQuery = require '../../src/flux/models/query'
|
||||
DatabaseStore = require '../../src/flux/stores/database-store'
|
||||
|
||||
testMatchers = {'id': 'b'}
|
||||
testModelInstance = new TestModel(id: '1234')
|
||||
|
|
@ -76,25 +13,23 @@ testModelInstanceB = new TestModel(id: 'BBB')
|
|||
|
||||
describe "DatabaseStore", ->
|
||||
beforeEach ->
|
||||
TestModel.configureBasic()
|
||||
spyOn(ModelQuery.prototype, 'where').andCallThrough()
|
||||
spyOn(DatabaseStore, 'triggerSoon')
|
||||
spyOn(DatabaseStore, '_triggerSoon').andCallFake -> Promise.resolve()
|
||||
|
||||
# Emulate a working DB
|
||||
spyOn(ipc, 'send').andCallFake (messageType, {queryKey}) ->
|
||||
return unless messageType is "database-query"
|
||||
err = null
|
||||
result = []
|
||||
DatabaseStore._dbConnection._onDatabaseResult({queryKey, err, result})
|
||||
spyOn(DatabaseStore._dbConnection, "_isConnected").andReturn true
|
||||
|
||||
@performed = []
|
||||
@transactionCount = 0
|
||||
|
||||
# Pass spyTx() to functions that take a tx reference to log
|
||||
# performed queries to the @performed array.
|
||||
@spyTx = ->
|
||||
execute: (query, values, success) =>
|
||||
@performed.push({query: query, values: values})
|
||||
success() if success
|
||||
|
||||
# Spy on the DatabaseStore and return our use spyTx() to generate
|
||||
# new transactions instead of using the real websql transaction.
|
||||
spyOn(DatabaseStore, 'inTransaction').andCallFake (options, callback) =>
|
||||
@transactionCount += 1
|
||||
callback(@spyTx())
|
||||
Promise.resolve()
|
||||
oldQuery = DatabaseStore._query
|
||||
spyOn(DatabaseStore, "_query").andCallFake (query, values=[], options={}) =>
|
||||
@performed.push({query: query, values: values})
|
||||
oldQuery(query, values, options)
|
||||
|
||||
describe "find", ->
|
||||
it "should return a ModelQuery for retrieving a single item by Id", ->
|
||||
|
|
@ -131,63 +66,65 @@ describe "DatabaseStore", ->
|
|||
|
||||
describe "persistModel", ->
|
||||
it "should cause the DatabaseStore to trigger with a change that contains the model", ->
|
||||
DatabaseStore.persistModel(testModelInstance)
|
||||
expect(DatabaseStore.triggerSoon).toHaveBeenCalled()
|
||||
waitsForPromise ->
|
||||
DatabaseStore.persistModel(testModelInstance).then ->
|
||||
expect(DatabaseStore._triggerSoon).toHaveBeenCalled()
|
||||
|
||||
change = DatabaseStore.triggerSoon.mostRecentCall.args[0]
|
||||
expect(change).toEqual({objectClass: TestModel.name, objects: [testModelInstance], type:'persist'})
|
||||
change = DatabaseStore._triggerSoon.mostRecentCall.args[0]
|
||||
expect(change).toEqual({objectClass: TestModel.name, objects: [testModelInstance], type:'persist'})
|
||||
.catch (err) ->
|
||||
console.log err
|
||||
|
||||
it "should call through to writeModels", ->
|
||||
spyOn(DatabaseStore, 'writeModels')
|
||||
it "should call through to _writeModels", ->
|
||||
spyOn(DatabaseStore, '_writeModels')
|
||||
DatabaseStore.persistModel(testModelInstance)
|
||||
expect(DatabaseStore.writeModels.callCount).toBe(1)
|
||||
expect(DatabaseStore._writeModels.callCount).toBe(1)
|
||||
|
||||
describe "persistModels", ->
|
||||
it "should cause the DatabaseStore to trigger with a change that contains the models", ->
|
||||
DatabaseStore.persistModels([testModelInstanceA, testModelInstanceB])
|
||||
expect(DatabaseStore.triggerSoon).toHaveBeenCalled()
|
||||
waitsForPromise ->
|
||||
DatabaseStore.persistModels([testModelInstanceA, testModelInstanceB]).then ->
|
||||
expect(DatabaseStore._triggerSoon).toHaveBeenCalled()
|
||||
|
||||
change = DatabaseStore.triggerSoon.mostRecentCall.args[0]
|
||||
expect(change).toEqual
|
||||
objectClass: TestModel.name,
|
||||
objects: [testModelInstanceA, testModelInstanceB]
|
||||
type:'persist'
|
||||
change = DatabaseStore._triggerSoon.mostRecentCall.args[0]
|
||||
expect(change).toEqual
|
||||
objectClass: TestModel.name,
|
||||
objects: [testModelInstanceA, testModelInstanceB]
|
||||
type:'persist'
|
||||
|
||||
it "should call through to writeModels after checking them", ->
|
||||
spyOn(DatabaseStore, 'writeModels')
|
||||
it "should call through to _writeModels after checking them", ->
|
||||
spyOn(DatabaseStore, '_writeModels')
|
||||
DatabaseStore.persistModels([testModelInstanceA, testModelInstanceB])
|
||||
expect(DatabaseStore.writeModels.callCount).toBe(1)
|
||||
|
||||
it "should only open one database transaction to write all the models", ->
|
||||
DatabaseStore.persistModels([testModelInstanceA, testModelInstanceB])
|
||||
expect(@transactionCount).toBe(1)
|
||||
expect(DatabaseStore._writeModels.callCount).toBe(1)
|
||||
|
||||
it "should throw an exception if the models are not the same class,\
|
||||
since it cannot be specified by the trigger payload", ->
|
||||
expect(-> DatabaseStore.persistModels([testModelInstanceA, new Tag()])).toThrow()
|
||||
|
||||
describe "unpersistModel", ->
|
||||
it "should delete the model by Id", ->
|
||||
DatabaseStore.unpersistModel(testModelInstance)
|
||||
expect(@performed.length).toBe(3)
|
||||
expect(@performed[1].query).toBe("DELETE FROM `TestModel` WHERE `id` = ?")
|
||||
expect(@performed[1].values[0]).toBe('1234')
|
||||
it "should delete the model by Id", -> waitsForPromise =>
|
||||
DatabaseStore.unpersistModel(testModelInstance).then =>
|
||||
expect(@performed.length).toBe(3)
|
||||
expect(@performed[1].query).toBe("DELETE FROM `TestModel` WHERE `id` = ?")
|
||||
expect(@performed[1].values[0]).toBe('1234')
|
||||
|
||||
it "should cause the DatabaseStore to trigger() with a change that contains the model", ->
|
||||
DatabaseStore.unpersistModel(testModelInstance)
|
||||
expect(DatabaseStore.triggerSoon).toHaveBeenCalled()
|
||||
waitsForPromise ->
|
||||
DatabaseStore.unpersistModel(testModelInstance).then ->
|
||||
expect(DatabaseStore._triggerSoon).toHaveBeenCalled()
|
||||
|
||||
change = DatabaseStore.triggerSoon.mostRecentCall.args[0]
|
||||
expect(change).toEqual({objectClass: TestModel.name, objects: [testModelInstance], type:'unpersist'})
|
||||
change = DatabaseStore._triggerSoon.mostRecentCall.args[0]
|
||||
expect(change).toEqual({objectClass: TestModel.name, objects: [testModelInstance], type:'unpersist'})
|
||||
|
||||
describe "when the model provides additional sqlite config", ->
|
||||
beforeEach ->
|
||||
TestModel.configureWithAdditionalSQLiteConfig()
|
||||
|
||||
it "should call the deleteModel method and provide the transaction and model", ->
|
||||
DatabaseStore.unpersistModel(testModelInstance)
|
||||
expect(TestModel.additionalSQLiteConfig.deleteModel).toHaveBeenCalled()
|
||||
expect(TestModel.additionalSQLiteConfig.deleteModel.mostRecentCall.args[1]).toBe(testModelInstance)
|
||||
it "should call the deleteModel method and provide the model", ->
|
||||
waitsForPromise ->
|
||||
DatabaseStore.unpersistModel(testModelInstance).then ->
|
||||
expect(TestModel.additionalSQLiteConfig.deleteModel).toHaveBeenCalled()
|
||||
expect(TestModel.additionalSQLiteConfig.deleteModel.mostRecentCall.args[0]).toBe(testModelInstance)
|
||||
|
||||
it "should not fail if additional config is present, but deleteModel is not defined", ->
|
||||
delete TestModel.additionalSQLiteConfig['deleteModel']
|
||||
|
|
@ -196,76 +133,29 @@ describe "DatabaseStore", ->
|
|||
describe "when the model has collection attributes", ->
|
||||
it "should delete all of the elements in the join tables", ->
|
||||
TestModel.configureWithCollectionAttribute()
|
||||
DatabaseStore.unpersistModel(testModelInstance)
|
||||
expect(@performed.length).toBe(4)
|
||||
expect(@performed[2].query).toBe("DELETE FROM `TestModel-Tag` WHERE `id` = ?")
|
||||
expect(@performed[2].values[0]).toBe('1234')
|
||||
waitsForPromise =>
|
||||
DatabaseStore.unpersistModel(testModelInstance).then =>
|
||||
expect(@performed.length).toBe(4)
|
||||
expect(@performed[2].query).toBe("DELETE FROM `TestModel-Tag` WHERE `id` = ?")
|
||||
expect(@performed[2].values[0]).toBe('1234')
|
||||
|
||||
describe "when the model has joined data attributes", ->
|
||||
it "should delete the element in the joined data table", ->
|
||||
TestModel.configureWithJoinedDataAttribute()
|
||||
DatabaseStore.unpersistModel(testModelInstance)
|
||||
expect(@performed.length).toBe(4)
|
||||
expect(@performed[2].query).toBe("DELETE FROM `TestModelBody` WHERE `id` = ?")
|
||||
expect(@performed[2].values[0]).toBe('1234')
|
||||
waitsForPromise =>
|
||||
DatabaseStore.unpersistModel(testModelInstance).then =>
|
||||
expect(@performed.length).toBe(4)
|
||||
expect(@performed[2].query).toBe("DELETE FROM `TestModelBody` WHERE `id` = ?")
|
||||
expect(@performed[2].values[0]).toBe('1234')
|
||||
|
||||
describe "queriesForTableSetup", ->
|
||||
it "should return the queries for creating the table and the primary unique index", ->
|
||||
TestModel.attributes =
|
||||
'attrQueryable': Attributes.DateTime
|
||||
queryable: true
|
||||
modelKey: 'attrQueryable'
|
||||
jsonKey: 'attr_queryable'
|
||||
|
||||
'attrNonQueryable': Attributes.Collection
|
||||
modelKey: 'attrNonQueryable'
|
||||
jsonKey: 'attr_non_queryable'
|
||||
queries = DatabaseStore.queriesForTableSetup(TestModel)
|
||||
expected = [
|
||||
'CREATE TABLE IF NOT EXISTS `TestModel` (id TEXT PRIMARY KEY,data BLOB,attr_queryable INTEGER)',
|
||||
'CREATE UNIQUE INDEX IF NOT EXISTS `TestModel_id` ON `TestModel` (`id`)'
|
||||
]
|
||||
for query,i in queries
|
||||
expect(query).toBe(expected[i])
|
||||
|
||||
it "should correctly create join tables for models that have queryable collections", ->
|
||||
TestModel.configureWithCollectionAttribute()
|
||||
queries = DatabaseStore.queriesForTableSetup(TestModel)
|
||||
expected = [
|
||||
'CREATE TABLE IF NOT EXISTS `TestModel` (id TEXT PRIMARY KEY,data BLOB)',
|
||||
'CREATE UNIQUE INDEX IF NOT EXISTS `TestModel_id` ON `TestModel` (`id`)',
|
||||
'CREATE TABLE IF NOT EXISTS `TestModel-Tag` (id TEXT KEY, `value` TEXT)'
|
||||
'CREATE UNIQUE INDEX IF NOT EXISTS `TestModel_Tag_id_val` ON `TestModel-Tag` (`id`,`value`)',
|
||||
]
|
||||
for query,i in queries
|
||||
expect(query).toBe(expected[i])
|
||||
|
||||
it "should use the correct column type for each attribute", ->
|
||||
TestModel.configureWithAllAttributes()
|
||||
queries = DatabaseStore.queriesForTableSetup(TestModel)
|
||||
expect(queries[0]).toBe('CREATE TABLE IF NOT EXISTS `TestModel` (id TEXT PRIMARY KEY,data BLOB,datetime INTEGER,string-json-key TEXT,boolean INTEGER,number INTEGER)')
|
||||
|
||||
describe "when the model provides additional sqlite config", ->
|
||||
it "the setup method should return these queries", ->
|
||||
TestModel.configureWithAdditionalSQLiteConfig()
|
||||
spyOn(TestModel.additionalSQLiteConfig, 'setup').andCallThrough()
|
||||
queries = DatabaseStore.queriesForTableSetup(TestModel)
|
||||
expect(TestModel.additionalSQLiteConfig.setup).toHaveBeenCalledWith()
|
||||
expect(queries.pop()).toBe('CREATE INDEX IF NOT EXISTS ThreadListIndex ON Thread(last_message_timestamp DESC, namespace_id, id)')
|
||||
|
||||
it "should not fail if additional config is present, but setup is undefined", ->
|
||||
delete TestModel.additionalSQLiteConfig['setup']
|
||||
@m = new TestModel(id: 'local-6806434c-b0cd', body: 'hello world')
|
||||
expect( => DatabaseStore.queriesForTableSetup(TestModel)).not.toThrow()
|
||||
|
||||
describe "writeModels", ->
|
||||
describe "_writeModels", ->
|
||||
it "should compose a REPLACE INTO query to save the model", ->
|
||||
TestModel.configureWithCollectionAttribute()
|
||||
DatabaseStore.writeModels(@spyTx(), [testModelInstance])
|
||||
DatabaseStore._writeModels([testModelInstance])
|
||||
expect(@performed[0].query).toBe("REPLACE INTO `TestModel` (id,data) VALUES (?,?)")
|
||||
|
||||
it "should save the model JSON into the data column", ->
|
||||
DatabaseStore.writeModels(@spyTx(), [testModelInstance])
|
||||
DatabaseStore._writeModels([testModelInstance])
|
||||
expect(@performed[0].values[1]).toEqual(JSON.stringify(testModelInstance))
|
||||
|
||||
describe "when the model defines additional queryable attributes", ->
|
||||
|
|
@ -279,12 +169,12 @@ describe "DatabaseStore", ->
|
|||
number: 15
|
||||
|
||||
it "should populate additional columns defined by the attributes", ->
|
||||
DatabaseStore.writeModels(@spyTx(), [@m])
|
||||
DatabaseStore._writeModels([@m])
|
||||
expect(@performed[0].query).toBe("REPLACE INTO `TestModel` (id,data,datetime,string-json-key,boolean,number) VALUES (?,?,?,?,?,?)")
|
||||
|
||||
it "should use the JSON-form values of the queryable attributes", ->
|
||||
json = @m.toJSON()
|
||||
DatabaseStore.writeModels(@spyTx(), [@m])
|
||||
DatabaseStore._writeModels([@m])
|
||||
|
||||
values = @performed[0].values
|
||||
expect(values[2]).toEqual(json['datetime'])
|
||||
|
|
@ -297,7 +187,7 @@ describe "DatabaseStore", ->
|
|||
TestModel.configureWithCollectionAttribute()
|
||||
@m = new TestModel(id: 'local-6806434c-b0cd')
|
||||
@m.tags = [new Tag(id: 'a'),new Tag(id: 'b')]
|
||||
DatabaseStore.writeModels(@spyTx(), [@m])
|
||||
DatabaseStore._writeModels([@m])
|
||||
|
||||
it "should delete all association records for the model from join tables", ->
|
||||
expect(@performed[1].query).toBe('DELETE FROM `TestModel-Tag` WHERE `id` IN (\'local-6806434c-b0cd\')')
|
||||
|
|
@ -312,26 +202,27 @@ describe "DatabaseStore", ->
|
|||
|
||||
it "should write the value to the joined table if it is defined", ->
|
||||
@m = new TestModel(id: 'local-6806434c-b0cd', body: 'hello world')
|
||||
DatabaseStore.writeModels(@spyTx(), [@m])
|
||||
DatabaseStore._writeModels([@m])
|
||||
expect(@performed[1].query).toBe('REPLACE INTO `TestModelBody` (`id`, `value`) VALUES (?, ?)')
|
||||
expect(@performed[1].values).toEqual([@m.id, @m.body])
|
||||
|
||||
it "should not write the valeu to the joined table if it undefined", ->
|
||||
@m = new TestModel(id: 'local-6806434c-b0cd')
|
||||
DatabaseStore.writeModels(@spyTx(), [@m])
|
||||
DatabaseStore._writeModels([@m])
|
||||
expect(@performed.length).toBe(1)
|
||||
|
||||
describe "when the model provides additional sqlite config", ->
|
||||
beforeEach ->
|
||||
TestModel.configureWithAdditionalSQLiteConfig()
|
||||
|
||||
it "should call the writeModel method and provide the transaction and model", ->
|
||||
tx = @spyTx()
|
||||
it "should call the writeModel method and provide the model", ->
|
||||
@m = new TestModel(id: 'local-6806434c-b0cd', body: 'hello world')
|
||||
DatabaseStore.writeModels(tx, [@m])
|
||||
expect(TestModel.additionalSQLiteConfig.writeModel).toHaveBeenCalledWith(tx, @m)
|
||||
DatabaseStore._writeModels([@m])
|
||||
expect(TestModel.additionalSQLiteConfig.writeModel).toHaveBeenCalledWith(@m)
|
||||
|
||||
it "should not fail if additional config is present, but writeModel is not defined", ->
|
||||
delete TestModel.additionalSQLiteConfig['writeModel']
|
||||
@m = new TestModel(id: 'local-6806434c-b0cd', body: 'hello world')
|
||||
expect( => DatabaseStore.writeModels(@spyTx(), [@m])).not.toThrow()
|
||||
expect( => DatabaseStore._writeModels([@m])).not.toThrow()
|
||||
|
||||
describe "DatabaseStore::_triggerSoon", ->
|
||||
|
|
|
|||
|
|
@ -64,14 +64,20 @@ describe 'FileUploadStore', ->
|
|||
expect(FileUploadStore._fileUploads[123]).toBe @uploadData
|
||||
|
||||
describe 'when a file has been uploaded', ->
|
||||
it 'adds to the linked files and removes from uploads', ->
|
||||
it 'adds removes from uploads', ->
|
||||
FileUploadStore._fileUploads[123] = @uploadData
|
||||
Actions.fileUploaded
|
||||
file: @file
|
||||
uploadData: @uploadData
|
||||
expect(FileUploadStore._linkedFiles["id_123"]).toBe @uploadData
|
||||
expect(FileUploadStore._fileUploads[123]).not.toBeDefined()
|
||||
|
||||
it 'adds to the linked files', ->
|
||||
FileUploadStore._fileUploads[123] = @uploadData
|
||||
Actions.linkFileToUpload
|
||||
file: @file
|
||||
uploadData: @uploadData
|
||||
expect(FileUploadStore._linkedFiles["id_123"]).toBe @uploadData
|
||||
|
||||
describe 'when a file has been aborted', ->
|
||||
it 'removes it from the uploads', ->
|
||||
FileUploadStore._fileUploads[123] = @uploadData
|
||||
|
|
|
|||
|
|
@ -172,9 +172,10 @@ class Atom extends Model
|
|||
StyleManager = require './style-manager'
|
||||
ActionBridge = require './flux/action-bridge'
|
||||
MenuManager = require './menu-manager'
|
||||
{devMode, safeMode, resourcePath} = @getLoadSettings()
|
||||
configDirPath = @getConfigDirPath()
|
||||
|
||||
{devMode, safeMode, resourcePath} = @getLoadSettings()
|
||||
|
||||
# Add 'exports' to module search path.
|
||||
exportsPath = path.join(resourcePath, 'exports')
|
||||
require('module').globalPaths.push(exportsPath)
|
||||
|
|
|
|||
|
|
@ -1,24 +1,25 @@
|
|||
Config = require '../config'
|
||||
AtomWindow = require './atom-window'
|
||||
BrowserWindow = require 'browser-window'
|
||||
ApplicationMenu = require './application-menu'
|
||||
NylasProtocolHandler = require './nylas-protocol-handler'
|
||||
AutoUpdateManager = require './auto-update-manager'
|
||||
WindowManager = require './window-manager'
|
||||
Config = require '../config'
|
||||
dialog = require 'dialog'
|
||||
DatabaseManager = require './database-manager'
|
||||
ApplicationMenu = require './application-menu'
|
||||
AutoUpdateManager = require './auto-update-manager'
|
||||
NylasProtocolHandler = require './nylas-protocol-handler'
|
||||
|
||||
_ = require 'underscore'
|
||||
fs = require 'fs-plus'
|
||||
Menu = require 'menu'
|
||||
os = require 'os'
|
||||
app = require 'app'
|
||||
ipc = require 'ipc'
|
||||
path = require 'path'
|
||||
os = require 'os'
|
||||
net = require 'net'
|
||||
url = require 'url'
|
||||
exec = require('child_process').exec
|
||||
Menu = require 'menu'
|
||||
path = require 'path'
|
||||
dialog = require 'dialog'
|
||||
querystring = require 'querystring'
|
||||
{EventEmitter} = require 'events'
|
||||
_ = require 'underscore'
|
||||
|
||||
socketPath =
|
||||
if process.platform is 'win32'
|
||||
|
|
@ -76,7 +77,7 @@ class Application
|
|||
@config = new Config({configDirPath, @resourcePath})
|
||||
@config.load()
|
||||
|
||||
@databases = {}
|
||||
@databaseManager = new DatabaseManager({@resourcePath})
|
||||
@windowManager = new WindowManager({@resourcePath, @config, @devMode, @safeMode})
|
||||
@autoUpdateManager = new AutoUpdateManager(@version, @config, @specMode)
|
||||
@applicationMenu = new ApplicationMenu(@version)
|
||||
|
|
@ -97,72 +98,6 @@ class Application
|
|||
for urlToOpen in (urlsToOpen || [])
|
||||
@openUrl(urlToOpen)
|
||||
|
||||
prepareDatabaseInterface: ->
|
||||
return @dblitePromise if @dblitePromise
|
||||
|
||||
# configure a listener that watches for incoming queries over IPC,
|
||||
# executes them, and returns the responses to the remote renderer processes
|
||||
ipc.on 'database-query', (event, {databasePath, queryKey, query, values}) =>
|
||||
db = @databases[databasePath]
|
||||
done = (err, result) ->
|
||||
unless err
|
||||
runtime = db.lastQueryTime()
|
||||
if runtime > 250
|
||||
console.log("Query #{queryKey}: #{query} took #{runtime}msec")
|
||||
event.sender.send('database-result', {queryKey, err, result})
|
||||
|
||||
return done(new Error("Database not prepared.")) unless db
|
||||
if query[0..5] is 'SELECT'
|
||||
db.query(query, values, null, done)
|
||||
else
|
||||
db.query(query, values, done)
|
||||
|
||||
# return a promise that resolves after we've configured dblite for our platform
|
||||
return @dblitePromise = new Promise (resolve, reject) =>
|
||||
dblite = require('../../vendor/dblite-custom').withSQLite('3.8.6+')
|
||||
vendor = path.join(@resourcePath.replace('app.asar', 'app.asar.unpacked'), '/vendor')
|
||||
|
||||
if process.platform is 'win32'
|
||||
dblite.bin = "#{vendor}/sqlite3-win32.exe"
|
||||
resolve(dblite)
|
||||
else if process.platform is 'linux'
|
||||
exec "uname -a", (err, stdout, stderr) ->
|
||||
arch = if stdout.toString().indexOf('x86_64') is -1 then "32" else "64"
|
||||
dblite.bin = "#{vendor}/sqlite3-linux-#{arch}"
|
||||
resolve(dblite)
|
||||
else if process.platform is 'darwin'
|
||||
dblite.bin = "#{vendor}/sqlite3-darwin"
|
||||
resolve(dblite)
|
||||
|
||||
prepareDatabase: (databasePath, callback) ->
|
||||
@prepareDatabaseInterface().then (dblite) =>
|
||||
# Avoid opening a new connection to an existing database
|
||||
return callback() if @databases[databasePath]
|
||||
|
||||
# Create a new database for the requested path
|
||||
db = dblite(databasePath)
|
||||
|
||||
# By default, dblite stops all query execution when a query returns an error.
|
||||
# We want to propogate those errors out, but still allow queries to be made.
|
||||
db.ignoreErrors = true
|
||||
@databases[databasePath] = db
|
||||
|
||||
# Tell the person who requested the database that they can begin making queries
|
||||
callback()
|
||||
|
||||
closeDBConnection: (databasePath) ->
|
||||
@databases[databasePath]?.close()
|
||||
delete @databases[databasePath]
|
||||
|
||||
deleteAllDatabases: ->
|
||||
for path, val of @databases
|
||||
@closeDBConnection(path)
|
||||
fs.unlinkSync(path)
|
||||
|
||||
closeDBConnections: ->
|
||||
for path, val of @databases
|
||||
@closeDBConnection(path)
|
||||
|
||||
# Creates server to listen for additional atom application launches.
|
||||
#
|
||||
# You can run the atom command multiple times, but after the first launch
|
||||
|
|
@ -193,6 +128,10 @@ class Application
|
|||
setupJavaScriptArguments: ->
|
||||
app.commandLine.appendSwitch 'js-flags', '--harmony'
|
||||
|
||||
_logout: =>
|
||||
@config.set('nylas', null)
|
||||
@config.set('edgehill', null)
|
||||
|
||||
# Registers basic application commands, non-idempotent.
|
||||
# Note: If these events are triggered while an application window is open, the window
|
||||
# needs to manually bubble them up to the Application instance via IPC or they won't be
|
||||
|
|
@ -227,10 +166,7 @@ class Application
|
|||
@on 'application:run-benchmarks', ->
|
||||
@runBenchmarks()
|
||||
|
||||
@on 'application:logout', =>
|
||||
@deleteAllDatabases()
|
||||
@config.set('nylas', null)
|
||||
@config.set('edgehill', null)
|
||||
@on 'application:logout', @_logout
|
||||
|
||||
@on 'application:quit', => app.quit()
|
||||
@on 'application:inspect', ({x,y, atomWindow}) ->
|
||||
|
|
@ -283,11 +219,11 @@ class Application
|
|||
|
||||
# Called after the app has closed all windows.
|
||||
app.on 'will-quit', =>
|
||||
@closeDBConnections()
|
||||
@databaseManager.closeDatabaseConnections()
|
||||
@deleteSocketFile()
|
||||
|
||||
app.on 'will-exit', =>
|
||||
@closeDBConnections()
|
||||
@databaseManager.closeDatabaseConnections()
|
||||
@deleteSocketFile()
|
||||
|
||||
app.on 'open-file', (event, pathToOpen) ->
|
||||
|
|
@ -343,6 +279,12 @@ class Application
|
|||
clipboard ?= require 'clipboard'
|
||||
clipboard.writeText(selectedText, 'selection')
|
||||
|
||||
# configure a listener that watches for incoming queries over IPC,
|
||||
# executes them, and returns the responses to the remote renderer processes
|
||||
ipc.on 'database-query', @databaseManager.onIPCDatabaseQuery
|
||||
|
||||
@databaseManager.on "setup-error", @_logout
|
||||
|
||||
# Public: Executes the given command.
|
||||
#
|
||||
# If it isn't handled globally, delegate to the currently focused window.
|
||||
|
|
|
|||
144
src/browser/database-manager.coffee
Normal file
144
src/browser/database-manager.coffee
Normal file
|
|
@ -0,0 +1,144 @@
|
|||
_ = require 'underscore'
|
||||
fs = require 'fs-plus'
|
||||
path = require 'path'
|
||||
|
||||
{EventEmitter} = require 'events'
|
||||
|
||||
# The singleton Browser interface to the Nylas Mail database.
|
||||
class DatabaseManager
|
||||
_.extend @prototype, EventEmitter.prototype
|
||||
|
||||
constructor: ({@resourcePath}) ->
|
||||
@_databases = {}
|
||||
@_prepPromises = {}
|
||||
|
||||
_query: (db, query, values) ->
|
||||
return new Promise (resolve, reject) ->
|
||||
onQueryComplete = (err, result) ->
|
||||
if err
|
||||
reject(err)
|
||||
else
|
||||
runtime = db.lastQueryTime()
|
||||
if runtime > 250
|
||||
console.log("Query #{queryKey}: #{query} took #{runtime}msec")
|
||||
resolve(result)
|
||||
|
||||
if query[0..5] is 'SELECT'
|
||||
db.query(query, values, null, onQueryComplete)
|
||||
else
|
||||
db.query(query, values, onQueryComplete)
|
||||
|
||||
# Public: Called by `DatabaseConnection` in each window to ensure the DB
|
||||
# is fully setup
|
||||
#
|
||||
# - `databasePath` The database we want to prepare
|
||||
# - `callback` A callback that's fired once the DB is setup. We can't
|
||||
# return a promise because they don't work across the IPC bridge.
|
||||
#
|
||||
# Returns nothing
|
||||
prepare: (databasePath, callback) =>
|
||||
if @_databases[databasePath]
|
||||
callback()
|
||||
else
|
||||
@_prepPromises[databasePath] ?= @_createNewDatabase(databasePath).then (db) =>
|
||||
@_databases[databasePath] = db
|
||||
return Promise.resolve()
|
||||
|
||||
@_prepPromises[databasePath].then(callback).catch (err) ->
|
||||
console.error "Error preparing the database"
|
||||
console.error err
|
||||
|
||||
return
|
||||
|
||||
## TODO: In the future migrations shouldn't come as DB creates from a
|
||||
# data objects in Utils. For now we'll pass them in here so we can
|
||||
# access them later. This also prevents us from adding an extra argument
|
||||
# to a bunch of functions in the chain.
|
||||
addSetupQueries: (databasePath, setupQueries=[]) =>
|
||||
@_setupQueries ?= {}
|
||||
@_setupQueries[databasePath] = setupQueries
|
||||
|
||||
_closeDatabaseConnection: (databasePath) ->
|
||||
@_databases[databasePath]?.close()
|
||||
delete @_databases[databasePath]
|
||||
|
||||
closeDatabaseConnections: ->
|
||||
for path, val of @_databases
|
||||
@_closeDatabaseConnection(path)
|
||||
|
||||
onIPCDatabaseQuery: (event, {databasePath, queryKey, query, values}) =>
|
||||
db = @_databases[databasePath]
|
||||
|
||||
if not db
|
||||
err = new Error("Database not prepared"); result = null
|
||||
event.sender.send('database-result', {queryKey, err, result})
|
||||
return
|
||||
|
||||
@_query(db, query, values).then (result) ->
|
||||
err = null
|
||||
event.sender.send('database-result', {queryKey, err, result})
|
||||
.catch (err) ->
|
||||
result = null
|
||||
event.sender.send('database-result', {queryKey, err, result})
|
||||
|
||||
# Resolves when a new database has been created and the initial setup
|
||||
# migration has run successfuly.
|
||||
_createNewDatabase: (databasePath) ->
|
||||
@_getDBAdapter().then (dbAdapter) =>
|
||||
# Create a new database for the requested path
|
||||
db = dbAdapter(databasePath)
|
||||
|
||||
# By default, dblite stops all query execution when a query
|
||||
# returns an error. We want to propogate those errors out, but
|
||||
# still allow queries to be made.
|
||||
db.ignoreErrors = true
|
||||
|
||||
setupQueries = @_setupQueries?[databasePath] ? []
|
||||
|
||||
# Resolves when the DB has been initalized
|
||||
return @_runSetupQueries(db, setupQueries)
|
||||
|
||||
# Takes a set of queries to initialize the database with
|
||||
#
|
||||
# - `db` The database to initialize
|
||||
# - `setupQueries` A list of string queries to execute in order
|
||||
#
|
||||
# Returns a {Promise} that:
|
||||
# - resolves when all setup queries are complete
|
||||
# - rejects if any query had an error
|
||||
_runSetupQueries: (db, setupQueries=[]) ->
|
||||
setupPromise = Promise.all setupQueries.map (query) =>
|
||||
@_query(db, query, [])
|
||||
|
||||
setupPromise.then ->
|
||||
return Promise.resolve(db)
|
||||
.catch (err) ->
|
||||
@emit "setup-error", err
|
||||
@_deleteAllDatabases()
|
||||
console.error "There was an error setting up the database #{err?.message}"
|
||||
return Promise.reject(err)
|
||||
|
||||
_getDBAdapter: ->
|
||||
# return a promise that resolves after we've configured dblite for our platform
|
||||
return new Promise (resolve, reject) =>
|
||||
dblite = require('../../vendor/dblite-custom').withSQLite('3.8.6+')
|
||||
vendor = path.join(@resourcePath.replace('app.asar', 'app.asar.unpacked'), '/vendor')
|
||||
|
||||
if process.platform is 'win32'
|
||||
dblite.bin = "#{vendor}/sqlite3-win32.exe"
|
||||
resolve(dblite)
|
||||
else if process.platform is 'linux'
|
||||
exec "uname -a", (err, stdout, stderr) ->
|
||||
arch = if stdout.toString().indexOf('x86_64') is -1 then "32" else "64"
|
||||
dblite.bin = "#{vendor}/sqlite3-linux-#{arch}"
|
||||
resolve(dblite)
|
||||
else if process.platform is 'darwin'
|
||||
dblite.bin = "#{vendor}/sqlite3-darwin"
|
||||
resolve(dblite)
|
||||
|
||||
_deleteAllDatabases: ->
|
||||
for path, val of @_databases
|
||||
@closeDatabaseConnection(path)
|
||||
fs.unlinkSync(path)
|
||||
|
||||
module.exports = DatabaseManager
|
||||
|
|
@ -92,6 +92,7 @@ class Actions
|
|||
@uploadStateChanged: ActionScopeGlobal
|
||||
@fileAborted: ActionScopeGlobal
|
||||
@downloadStateChanged: ActionScopeGlobal
|
||||
@linkFileToUpload: ActionScopeGlobal
|
||||
@fileUploaded: ActionScopeGlobal
|
||||
@multiWindowNotification: ActionScopeGlobal
|
||||
@sendDraftSuccess: ActionScopeGlobal
|
||||
|
|
|
|||
171
src/flux/stores/database-connection.coffee
Normal file
171
src/flux/stores/database-connection.coffee
Normal file
|
|
@ -0,0 +1,171 @@
|
|||
_ = require 'underscore'
|
||||
ipc = require 'ipc'
|
||||
remote = require 'remote'
|
||||
|
||||
PriorityUICoordinator = require '../../priority-ui-coordinator'
|
||||
|
||||
{AttributeCollection, AttributeJoinedData} = require '../attributes'
|
||||
|
||||
{modelClassMap,
|
||||
tableNameForJoin} = require '../models/utils'
|
||||
|
||||
DEBUG_TO_LOG = false
|
||||
|
||||
# The DatabaseConnection dispatches queries to the Browser process via IPC and listens
|
||||
# for results. It maintains a hash of `_queryRecords` representing queries that are
|
||||
# currently running and fires promise callbacks when complete.
|
||||
#
|
||||
class DatabaseConnection
|
||||
constructor: (@_databasePath) ->
|
||||
@_queryId = 0
|
||||
@_windowId = remote.getCurrentWindow().id
|
||||
@_isConnected = false
|
||||
@_queryRecords = {}
|
||||
@_pendingQueries = []
|
||||
|
||||
ipc.on 'database-result', @_onDatabaseResult
|
||||
|
||||
return @
|
||||
|
||||
# This grabs a reference to database from the browser backend
|
||||
connect: ->
|
||||
@_isConnected = false
|
||||
databaseManager = remote.getGlobal('application').databaseManager
|
||||
|
||||
## TODO Make this a nicer migration-based system
|
||||
if atom.isMainWindow()
|
||||
databaseManager.addSetupQueries(@_databasePath, @_setupQueries())
|
||||
|
||||
databaseManager.prepare @_databasePath, =>
|
||||
@_isConnected = true
|
||||
@_flushPendingQueries()
|
||||
|
||||
# Executes a query via IPC and returns a promise that resolves or
|
||||
# rejects when the query is complete.
|
||||
#
|
||||
# We don't know if the query is complete until the `database-result` ipc
|
||||
# command returns, so we need to cache the Promise's resolve and reject
|
||||
# handlers
|
||||
query: (query, values=[], options={}) =>
|
||||
if not query
|
||||
throw new Error("no query")
|
||||
|
||||
return new Promise (resolve, reject) =>
|
||||
@_queryId += 1
|
||||
queryKey = "#{@_windowId}-#{@_queryId}"
|
||||
|
||||
@_queryRecords[queryKey] = {
|
||||
query: query
|
||||
start: Date.now()
|
||||
values: values
|
||||
reject: reject
|
||||
resolve: resolve
|
||||
options: options
|
||||
}
|
||||
|
||||
if @isConnected()
|
||||
databasePath = @_databasePath
|
||||
ipc.send('database-query', {databasePath, queryKey, query, values})
|
||||
else
|
||||
@_pendingQueries.push({queryKey, query, values})
|
||||
|
||||
isConnected: -> @_isConnected
|
||||
|
||||
_flushPendingQueries: =>
|
||||
qs = _.clone(@_pendingQueries)
|
||||
@_pendingQueries = []
|
||||
for queryArgs in qs
|
||||
{queryKey, query, values} = queryArgs
|
||||
databasePath = @_databasePath
|
||||
ipc.send('database-query', {databasePath, queryKey, query, values})
|
||||
|
||||
_onDatabaseResult: ({queryKey, err, result}) =>
|
||||
record = @_queryRecords[queryKey]
|
||||
return unless record
|
||||
|
||||
{query, start, values, reject, resolve, options} = record
|
||||
|
||||
@_logQuery(query, start, result)
|
||||
|
||||
if options.evaluateImmediately
|
||||
uiBusyPromise = Promise.resolve()
|
||||
else
|
||||
uiBusyPromise = PriorityUICoordinator.settle
|
||||
|
||||
uiBusyPromise.then =>
|
||||
delete @_queryRecords[queryKey]
|
||||
if err
|
||||
@_logQueryError(err.message, query, values)
|
||||
reject(err)
|
||||
else
|
||||
resolve(result)
|
||||
|
||||
_logQuery: (query, start, result) ->
|
||||
duration = Date.now() - start
|
||||
metadata =
|
||||
duration: duration
|
||||
resultLength: result?.length
|
||||
|
||||
console.debug(DEBUG_TO_LOG, "DatabaseStore: (#{duration}) #{query}", metadata)
|
||||
if duration > 300
|
||||
atom.errorReporter.shipLogs("Poor Query Performance")
|
||||
|
||||
_logQueryError: (message, query, values) ->
|
||||
console.error("DatabaseStore: Query #{query}, #{JSON.stringify(values)} failed #{message ? ""}")
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
## TODO: Make these a nicer migration-based system
|
||||
_setupQueries: ->
|
||||
queries = []
|
||||
queries.push "PRAGMA journal_mode=WAL;"
|
||||
for key, klass of modelClassMap()
|
||||
continue unless klass.attributes
|
||||
queries = queries.concat @_setupQueriesForTable(klass)
|
||||
return queries
|
||||
|
||||
_setupQueriesForTable: (klass) =>
|
||||
attributes = _.values(klass.attributes)
|
||||
queries = []
|
||||
|
||||
# Identify attributes of this class that can be matched against. These
|
||||
# attributes need their own columns in the table
|
||||
columnAttributes = _.filter attributes, (attr) ->
|
||||
attr.queryable && attr.columnSQL && attr.jsonKey != 'id'
|
||||
|
||||
columns = ['id TEXT PRIMARY KEY', 'data BLOB']
|
||||
columnAttributes.forEach (attr) ->
|
||||
columns.push(attr.columnSQL())
|
||||
|
||||
columnsSQL = columns.join(',')
|
||||
queries.unshift("CREATE TABLE IF NOT EXISTS `#{klass.name}` (#{columnsSQL})")
|
||||
queries.push("CREATE UNIQUE INDEX IF NOT EXISTS `#{klass.name}_id` ON `#{klass.name}` (`id`)")
|
||||
|
||||
# Identify collection attributes that can be matched against. These require
|
||||
# JOIN tables. (Right now the only one of these is Thread.tags)
|
||||
collectionAttributes = _.filter attributes, (attr) ->
|
||||
attr.queryable && attr instanceof AttributeCollection
|
||||
collectionAttributes.forEach (attribute) ->
|
||||
joinTable = tableNameForJoin(klass, attribute.itemClass)
|
||||
joinIndexName = "#{joinTable.replace('-', '_')}_id_val"
|
||||
queries.push("CREATE TABLE IF NOT EXISTS `#{joinTable}` (id TEXT KEY, `value` TEXT)")
|
||||
queries.push("CREATE UNIQUE INDEX IF NOT EXISTS `#{joinIndexName}` ON `#{joinTable}` (`id`,`value`)")
|
||||
|
||||
joinedDataAttributes = _.filter attributes, (attr) ->
|
||||
attr instanceof AttributeJoinedData
|
||||
joinedDataAttributes.forEach (attribute) ->
|
||||
queries.push("CREATE TABLE IF NOT EXISTS `#{attribute.modelTable}` (id TEXT PRIMARY KEY, `value` TEXT)")
|
||||
|
||||
if klass.additionalSQLiteConfig?.setup?
|
||||
queries = queries.concat(klass.additionalSQLiteConfig.setup())
|
||||
queries
|
||||
|
||||
module.exports = DatabaseConnection
|
||||
|
|
@ -1,106 +1,18 @@
|
|||
async = require 'async'
|
||||
remote = require 'remote'
|
||||
_ = require 'underscore'
|
||||
Actions = require '../actions'
|
||||
path = require 'path'
|
||||
|
||||
Model = require '../models/model'
|
||||
Actions = require '../actions'
|
||||
LocalLink = require '../models/local-link'
|
||||
ModelQuery = require '../models/query'
|
||||
PriorityUICoordinator = require '../../priority-ui-coordinator'
|
||||
NylasStore = require '../../../exports/nylas-store'
|
||||
DatabaseConnection = require './database-connection'
|
||||
|
||||
{AttributeCollection, AttributeJoinedData} = require '../attributes'
|
||||
{modelFromJSON,
|
||||
modelClassMap,
|
||||
tableNameForJoin,
|
||||
|
||||
{tableNameForJoin,
|
||||
generateTempId,
|
||||
isTempId} = require '../models/utils'
|
||||
fs = require 'fs-plus'
|
||||
path = require 'path'
|
||||
ipc = require 'ipc'
|
||||
|
||||
{Listener, Publisher} = require '../modules/reflux-coffee'
|
||||
CoffeeHelpers = require '../coffee-helpers'
|
||||
|
||||
silent = atom.getLoadSettings().isSpec
|
||||
printToConsole = false
|
||||
|
||||
# The DatabaseProxy dispatches queries to the Browser process via IPC and listens
|
||||
# for results. It maintains a hash of `queryRecords` representing queries that are
|
||||
# currently running and fires the correct callbacks when data is received.
|
||||
#
|
||||
class DatabaseProxy
|
||||
constructor: (@databasePath) ->
|
||||
@windowId = remote.getCurrentWindow().id
|
||||
@queryRecords = {}
|
||||
@queryId = 0
|
||||
|
||||
ipc.on 'database-result', ({queryKey, err, result}) =>
|
||||
record = @queryRecords[queryKey]
|
||||
return unless record
|
||||
|
||||
{callback, options, start, query} = record
|
||||
|
||||
duration = Date.now() - start
|
||||
metadata =
|
||||
duration: duration
|
||||
result_length: result?.length
|
||||
|
||||
console.debug(printToConsole, "DatabaseStore: (#{duration}) #{query}", metadata)
|
||||
if duration > 300
|
||||
atom.errorReporter.shipLogs("Poor Query Performance")
|
||||
|
||||
waits = Promise.resolve()
|
||||
waits = PriorityUICoordinator.settle unless options.evaluateImmediately
|
||||
waits.then =>
|
||||
callback(err, result) if callback
|
||||
delete @queryRecords[queryKey]
|
||||
|
||||
@
|
||||
|
||||
query: (query, values, callback, options) ->
|
||||
@queryId += 1
|
||||
queryKey = "#{@windowId}-#{@queryId}"
|
||||
@queryRecords[queryKey] = {
|
||||
callback: callback,
|
||||
query: query,
|
||||
options: options,
|
||||
start: Date.now()
|
||||
}
|
||||
ipc.send('database-query', {@databasePath, queryKey, query, values})
|
||||
|
||||
# DatabasePromiseTransaction converts the callback syntax of the Database
|
||||
# into a promise syntax with nice features like serial execution of many
|
||||
# queries in the same promise.
|
||||
#
|
||||
class DatabasePromiseTransaction
|
||||
constructor: (@_db, @_resolve, @_reject) ->
|
||||
@_running = 0
|
||||
|
||||
execute: (query, values, querySuccess, queryFailure, options = {}) ->
|
||||
# Wrap any user-provided success callback in one that checks query time
|
||||
callback = (err, result) =>
|
||||
if err
|
||||
console.error("DatabaseStore: Query #{query}, #{JSON.stringify(values)} failed #{err.message}")
|
||||
queryFailure(err) if queryFailure
|
||||
@_reject(err)
|
||||
else
|
||||
querySuccess(result) if querySuccess
|
||||
|
||||
# The user can attach things to the finish promise to run code after
|
||||
# the completion of all pending queries in the transaction. We fire
|
||||
# the resolve function after a delay because we need to wait for the
|
||||
# transaction to be GC'd and give up it's lock
|
||||
@_running -= 1
|
||||
if @_running == 0
|
||||
@_resolve(result)
|
||||
|
||||
@_running += 1
|
||||
@_db.query(query, values || [], callback, options)
|
||||
|
||||
executeInSeries: (queries) ->
|
||||
async.eachSeries queries
|
||||
, (query, callback) =>
|
||||
@execute(query, [], -> callback())
|
||||
, (err) =>
|
||||
@_resolve()
|
||||
|
||||
###
|
||||
Public: Nylas Mail is built on top of a custom database layer modeled after
|
||||
|
|
@ -146,267 +58,37 @@ are in your displayed set before refreshing.
|
|||
|
||||
Section: Database
|
||||
###
|
||||
class DatabaseStore
|
||||
@include: CoffeeHelpers.includeModule
|
||||
|
||||
@include Publisher
|
||||
@include Listener
|
||||
class DatabaseStore extends NylasStore
|
||||
|
||||
constructor: ->
|
||||
@_root = atom.isMainWindow()
|
||||
@_triggerPromise = null
|
||||
@_localIdLookupCache = {}
|
||||
@_db = null
|
||||
|
||||
if atom.inSpecMode()
|
||||
@_dbPath = null
|
||||
@_databasePath = path.join(atom.getConfigDirPath(),'edgehill.test.db')
|
||||
else
|
||||
@_dbPath = path.join(atom.getConfigDirPath(),'edgehill.db')
|
||||
@_databasePath = path.join(atom.getConfigDirPath(),'edgehill.db')
|
||||
|
||||
# Setup the database tables
|
||||
_.defer => @openDatabase({createTables: @_root})
|
||||
@_dbConnection = new DatabaseConnection(@_databasePath)
|
||||
|
||||
# It's important that this defer is here because we can't let queries
|
||||
# commence while the app is in its `require` phase. We'll queue all of
|
||||
# the reqeusts before the DB is setup and handle them properly later
|
||||
_.defer =>
|
||||
@_dbConnection.connect() unless atom.inSpecMode()
|
||||
|
||||
inTransaction: (options = {}, callback) =>
|
||||
new Promise (resolve, reject) =>
|
||||
aquire = =>
|
||||
db = @_db || options.database
|
||||
return setTimeout(aquire, 50) unless db
|
||||
callback(new DatabasePromiseTransaction(db, resolve, reject))
|
||||
aquire()
|
||||
|
||||
forEachClass: (callback) =>
|
||||
classMap = modelClassMap()
|
||||
for key, klass of classMap
|
||||
callback(klass) if klass.attributes
|
||||
|
||||
openDatabase: (options = {createTables: false}, callback) =>
|
||||
app = remote.getGlobal('application')
|
||||
app.prepareDatabase @_dbPath, =>
|
||||
database = new DatabaseProxy(@_dbPath)
|
||||
|
||||
if options.createTables
|
||||
# Initialize the database and setup our schema. Note that we try to do this every
|
||||
# time right now and just do `IF NOT EXISTS`. In the future we need much better migration
|
||||
# support.
|
||||
@inTransaction {database: database}, (tx) =>
|
||||
tx.execute('PRAGMA journal_mode=WAL;')
|
||||
queries = []
|
||||
@forEachClass (klass) =>
|
||||
queries = queries.concat(@queriesForTableSetup(klass))
|
||||
tx.executeInSeries(queries)
|
||||
.then =>
|
||||
@_db = database
|
||||
callback() if callback
|
||||
.catch ->
|
||||
# An error occured - most likely a schema change. Log the user out so the
|
||||
# database is compeltely reset.
|
||||
atom.logout()
|
||||
else
|
||||
@_db = database
|
||||
callback() if callback
|
||||
|
||||
closeDBConnection: =>
|
||||
app = remote.getGlobal('application')
|
||||
app.closeDBConnection(@_dbPath)
|
||||
@_db = null
|
||||
@trigger({})
|
||||
|
||||
# TriggerSoon is a guarded version of trigger that can accumulate changes.
|
||||
# This means that even if you're a bad person and call `persistModel` 100 times
|
||||
# from 100 task objects queued at the same time, it will only create one
|
||||
# `trigger` event. This is important since the database triggering impacts
|
||||
# the entire application.
|
||||
triggerSoon: (change) =>
|
||||
flush = =>
|
||||
return unless @_changeAccumulated
|
||||
clearTimeout(@_changeFireTimer) if @_changeFireTimer
|
||||
@trigger(@_changeAccumulated)
|
||||
@_changeAccumulated = null
|
||||
@_changeFireTimer = null
|
||||
|
||||
set = (change) =>
|
||||
clearTimeout(@_changeFireTimer) if @_changeFireTimer
|
||||
@_changeAccumulated = change
|
||||
@_changeFireTimer = setTimeout(flush, 20)
|
||||
|
||||
concat = (change) =>
|
||||
@_changeAccumulated.objects.push(change.objects...)
|
||||
|
||||
if not @_changeAccumulated
|
||||
set(change)
|
||||
else if @_changeAccumulated.objectClass is change.objectClass and @_changeAccumulated.type is change.type
|
||||
concat(change)
|
||||
else
|
||||
flush()
|
||||
set(change)
|
||||
|
||||
writeModels: (tx, models) =>
|
||||
# IMPORTANT: This method assumes that all the models you
|
||||
# provide are of the same class, and have different ids!
|
||||
|
||||
# Avoid trying to write too many objects a time - sqlite can only handle
|
||||
# value sets `(?,?)...` of less than SQLITE_MAX_COMPOUND_SELECT (500),
|
||||
# and we don't know ahead of time whether we'll hit that or not.
|
||||
if models.length > 100
|
||||
@writeModels(tx, models[0..99])
|
||||
@writeModels(tx, models[100..models.length])
|
||||
return
|
||||
|
||||
klass = models[0].constructor
|
||||
attributes = _.values(klass.attributes)
|
||||
ids = []
|
||||
|
||||
columnAttributes = _.filter attributes, (attr) ->
|
||||
attr.queryable && attr.columnSQL && attr.jsonKey != 'id'
|
||||
|
||||
# Compute the columns in the model table and a question mark string
|
||||
columns = ['id', 'data']
|
||||
marks = ['?', '?']
|
||||
columnAttributes.forEach (attr) ->
|
||||
columns.push(attr.jsonKey)
|
||||
marks.push('?')
|
||||
columnsSQL = columns.join(',')
|
||||
marksSet = "(#{marks.join(',')})"
|
||||
|
||||
# Prepare a batch insert VALUES (?,?,?), (?,?,?)... by assembling
|
||||
# an array of the values and a corresponding question mark set
|
||||
values = []
|
||||
marks = []
|
||||
for model in models
|
||||
json = model.toJSON()
|
||||
ids.push(model.id)
|
||||
values.push(model.id, JSON.stringify(json))
|
||||
columnAttributes.forEach (attr) ->
|
||||
values.push(json[attr.jsonKey])
|
||||
marks.push(marksSet)
|
||||
|
||||
marksSQL = marks.join(',')
|
||||
tx.execute("REPLACE INTO `#{klass.name}` (#{columnsSQL}) VALUES #{marksSQL}", values)
|
||||
|
||||
# For each join table property, find all the items in the join table for this
|
||||
# model and delte them. Insert each new value back into the table.
|
||||
collectionAttributes = _.filter attributes, (attr) ->
|
||||
attr.queryable && attr instanceof AttributeCollection
|
||||
|
||||
collectionAttributes.forEach (attr) ->
|
||||
joinTable = tableNameForJoin(klass, attr.itemClass)
|
||||
|
||||
tx.execute("DELETE FROM `#{joinTable}` WHERE `id` IN ('#{ids.join("','")}')")
|
||||
|
||||
joinMarks = []
|
||||
joinedValues = []
|
||||
for model in models
|
||||
joinedModels = model[attr.modelKey]
|
||||
if joinedModels
|
||||
for joined in joinedModels
|
||||
joinMarks.push('(?,?)')
|
||||
joinedValues.push(model.id, joined.id)
|
||||
|
||||
unless joinedValues.length is 0
|
||||
# Write no more than 200 items (400 values) at once to avoid sqlite limits
|
||||
for slice in [0..Math.floor(joinedValues.length / 400)] by 1
|
||||
[ms, me] = [slice*200, slice*200 + 199]
|
||||
[vs, ve] = [slice*400, slice*400 + 399]
|
||||
tx.execute("INSERT OR IGNORE INTO `#{joinTable}` (`id`, `value`) VALUES #{joinMarks[ms..me].join(',')}", joinedValues[vs..ve])
|
||||
|
||||
# For each joined data property stored in another table...
|
||||
values = []
|
||||
marks = []
|
||||
joinedDataAttributes = _.filter attributes, (attr) ->
|
||||
attr instanceof AttributeJoinedData
|
||||
|
||||
joinedDataAttributes.forEach (attr) ->
|
||||
for model in models
|
||||
if model[attr.modelKey]?
|
||||
tx.execute("REPLACE INTO `#{attr.modelTable}` (`id`, `value`) VALUES (?, ?)", [model.id, model[attr.modelKey]])
|
||||
|
||||
# For each model, execute any other code the model wants to run.
|
||||
# This allows model classes to do things like update a full-text table
|
||||
# that holds a composite of several fields
|
||||
if klass.additionalSQLiteConfig?.writeModel?
|
||||
for model in models
|
||||
klass.additionalSQLiteConfig.writeModel(tx, model)
|
||||
|
||||
deleteModel: (tx, model) =>
|
||||
klass = model.constructor
|
||||
attributes = _.values(klass.attributes)
|
||||
|
||||
# Delete the primary record
|
||||
tx.execute("DELETE FROM `#{klass.name}` WHERE `id` = ?", [model.id])
|
||||
|
||||
# For each join table property, find all the items in the join table for this
|
||||
# model and delte them. Insert each new value back into the table.
|
||||
collectionAttributes = _.filter attributes, (attr) ->
|
||||
attr.queryable && attr instanceof AttributeCollection
|
||||
|
||||
collectionAttributes.forEach (attr) ->
|
||||
joinTable = tableNameForJoin(klass, attr.itemClass)
|
||||
tx.execute("DELETE FROM `#{joinTable}` WHERE `id` = ?", [model.id])
|
||||
|
||||
joinedDataAttributes = _.filter attributes, (attr) ->
|
||||
attr instanceof AttributeJoinedData
|
||||
|
||||
joinedDataAttributes.forEach (attr) ->
|
||||
tx.execute("DELETE FROM `#{attr.modelTable}` WHERE `id` = ?", [model.id])
|
||||
|
||||
# Execute any other code the model wants to run.
|
||||
# This allows model classes to do things like update a full-text table
|
||||
# that holds a composite of several fields, or update entirely
|
||||
# separate database systems
|
||||
klass.additionalSQLiteConfig?.deleteModel?(tx, model)
|
||||
|
||||
# Public: Asynchronously writes `model` to the cache and triggers a change event.
|
||||
# Returns a promise that resolves when the query has been completed and
|
||||
# rejects when the query has failed.
|
||||
#
|
||||
# - `model` A {Model} to write to the database.
|
||||
#
|
||||
persistModel: (model) =>
|
||||
@inTransaction {}, (tx) =>
|
||||
tx.execute('BEGIN TRANSACTION')
|
||||
@writeModels(tx, [model])
|
||||
tx.execute('COMMIT')
|
||||
@triggerSoon({objectClass: model.constructor.name, objects: [model], type: 'persist'})
|
||||
# If a query is made while the connection is being setup, the
|
||||
# DatabaseConnection will queue the queries and fire them after it has
|
||||
# been setup. The Promise returned here wont resolve until that happens
|
||||
_query: (query, values=[], options={}) =>
|
||||
return @_dbConnection.query(query, values, options)
|
||||
|
||||
# Public: Asynchronously writes `models` to the cache and triggers a single change
|
||||
# event. Note: Models must be of the same class to be persisted in a batch operation.
|
||||
#
|
||||
# - `models` An {Array} of {Model} objects to write to the database.
|
||||
#
|
||||
persistModels: (models) =>
|
||||
klass = models[0].constructor
|
||||
@inTransaction {}, (tx) =>
|
||||
tx.execute('BEGIN TRANSACTION')
|
||||
ids = {}
|
||||
for model in models
|
||||
unless model.constructor == klass
|
||||
throw new Error("persistModels(): When you batch persist objects, they must be of the same type")
|
||||
if ids[model.id]
|
||||
throw new Error("persistModels(): You must pass an array of models with different ids. ID #{model.id} is in the set multiple times.")
|
||||
ids[model.id] = true
|
||||
|
||||
@writeModels(tx, models)
|
||||
tx.execute('COMMIT')
|
||||
@triggerSoon({objectClass: models[0].constructor.name, objects: models, type: 'persist'})
|
||||
|
||||
# Public: Asynchronously removes `model` from the cache and triggers a change event.
|
||||
#
|
||||
# - `model` A {Model} to write to the database.
|
||||
#
|
||||
unpersistModel: (model) =>
|
||||
@inTransaction {}, (tx) =>
|
||||
tx.execute('BEGIN TRANSACTION')
|
||||
@deleteModel(tx, model)
|
||||
tx.execute('COMMIT')
|
||||
@triggerSoon({objectClass: model.constructor.name, objects: [model], type: 'unpersist'})
|
||||
|
||||
swapModel: ({oldModel, newModel, localId}) =>
|
||||
@inTransaction {}, (tx) =>
|
||||
tx.execute('BEGIN TRANSACTION')
|
||||
@deleteModel(tx, oldModel)
|
||||
@writeModels(tx, [newModel])
|
||||
@writeModels(tx, [new LocalLink(id: localId, objectId: newModel.id)]) if localId
|
||||
tx.execute('COMMIT')
|
||||
@triggerSoon({objectClass: newModel.constructor.name, objects: [oldModel, newModel], type: 'swap'})
|
||||
Actions.didSwapModel({oldModel, newModel, localId})
|
||||
########################################################################
|
||||
########################### PUBLIC METHODS #############################
|
||||
########################################################################
|
||||
|
||||
###
|
||||
ActiveRecord-style Querying
|
||||
|
|
@ -479,9 +161,10 @@ class DatabaseStore
|
|||
# - `class` The class of the {Model} you're trying to retrieve.
|
||||
# - `localId` The {String} localId of the object.
|
||||
#
|
||||
# Returns a {Promise} that resolves with the Model associated with the localId,
|
||||
# or rejects if no matching object is found.
|
||||
|
||||
# Returns a {Promise} that:
|
||||
# - resolves with the Model associated with the localId
|
||||
# - rejects if no matching object is found
|
||||
#
|
||||
# Note: When fetching an object by local Id, joined attributes
|
||||
# (like body, stored in a separate table) are always included.
|
||||
#
|
||||
|
|
@ -500,8 +183,8 @@ class DatabaseStore
|
|||
# - `localId` (optional) The {String} localId. If you don't pass a LocalId, one
|
||||
# will be automatically assigned.
|
||||
#
|
||||
# Returns a {Promise} that resolves with the localId assigned to the model.
|
||||
#
|
||||
# Returns a {Promise} that:
|
||||
# - resolves with the localId assigned to the model
|
||||
bindToLocalId: (model, localId = null) =>
|
||||
return Promise.reject(new Error("You must provide a model to bindToLocalId")) unless model
|
||||
|
||||
|
|
@ -524,7 +207,8 @@ class DatabaseStore
|
|||
#
|
||||
# - `model` A {Model} object to assign a localId.
|
||||
#
|
||||
# Returns a {Promise} that resolves with the {String} localId.
|
||||
# Returns a {Promise} that:
|
||||
# - resolves with the {String} localId.
|
||||
localIdForModel: (model) =>
|
||||
return Promise.reject(new Error("You must provide a model to localIdForModel")) unless model
|
||||
|
||||
|
|
@ -539,55 +223,289 @@ class DatabaseStore
|
|||
else
|
||||
@bindToLocalId(model).then(resolve).catch(reject)
|
||||
|
||||
# Heavy Lifting
|
||||
|
||||
# Public: Executes a {ModelQuery} on the local database.
|
||||
#
|
||||
# - `modelQuery` A {ModelQuery} to execute.
|
||||
#
|
||||
# Returns a {Promise} that resolves with the result of the database query.
|
||||
#
|
||||
# Returns a {Promise} that
|
||||
# - resolves with the result of the database query.
|
||||
run: (modelQuery) =>
|
||||
@inTransaction {readonly: true}, (tx) ->
|
||||
tx.execute(modelQuery.sql(), [], null, null, modelQuery.executeOptions())
|
||||
@_query(modelQuery.sql(), [], null, null, modelQuery.executeOptions())
|
||||
.then (result) ->
|
||||
Promise.resolve(modelQuery.formatResult(result))
|
||||
|
||||
queriesForTableSetup: (klass) =>
|
||||
attributes = _.values(klass.attributes)
|
||||
queries = []
|
||||
# Public: Asynchronously writes `model` to the cache and triggers a change event.
|
||||
#
|
||||
# - `model` A {Model} to write to the database.
|
||||
#
|
||||
# Returns a {Promise} that
|
||||
# - resolves after the database queries are complete and any listening
|
||||
# database callbacks have finished
|
||||
# - rejects if any databse query fails or one of the triggering
|
||||
# callbacks failed
|
||||
persistModel: (model) =>
|
||||
return Promise.all([
|
||||
Promise.all([
|
||||
@_query('BEGIN TRANSACTION')
|
||||
@_writeModels([model])
|
||||
@_query('COMMIT')
|
||||
]),
|
||||
@_triggerSoon({objectClass: model.constructor.name, objects: [model], type: 'persist'})
|
||||
])
|
||||
|
||||
# Public: Asynchronously writes `models` to the cache and triggers a single change
|
||||
# event. Note: Models must be of the same class to be persisted in a batch operation.
|
||||
#
|
||||
# - `models` An {Array} of {Model} objects to write to the database.
|
||||
#
|
||||
# Returns a {Promise} that
|
||||
# - resolves after the database queries are complete and any listening
|
||||
# database callbacks have finished
|
||||
# - rejects if any databse query fails or one of the triggering
|
||||
# callbacks failed
|
||||
persistModels: (models) =>
|
||||
klass = models[0].constructor
|
||||
ids = {}
|
||||
for model in models
|
||||
unless model.constructor == klass
|
||||
throw new Error("persistModels(): When you batch persist objects, they must be of the same type")
|
||||
if ids[model.id]
|
||||
throw new Error("persistModels(): You must pass an array of models with different ids. ID #{model.id} is in the set multiple times.")
|
||||
ids[model.id] = true
|
||||
|
||||
return Promise.all([
|
||||
Promise.all([
|
||||
@_query('BEGIN TRANSACTION')
|
||||
@_writeModels(models)
|
||||
@_query('COMMIT')
|
||||
]),
|
||||
@_triggerSoon({objectClass: models[0].constructor.name, objects: models, type: 'persist'})
|
||||
])
|
||||
|
||||
# Public: Asynchronously removes `model` from the cache and triggers a change event.
|
||||
#
|
||||
# - `model` A {Model} to write to the database.
|
||||
#
|
||||
# Returns a {Promise} that
|
||||
# - resolves after the database queries are complete and any listening
|
||||
# database callbacks have finished
|
||||
# - rejects if any databse query fails or one of the triggering
|
||||
# callbacks failed
|
||||
unpersistModel: (model) =>
|
||||
return Promise.all([
|
||||
Promise.all([
|
||||
@_query('BEGIN TRANSACTION')
|
||||
@_deleteModel(model)
|
||||
@_query('COMMIT')
|
||||
]),
|
||||
@_triggerSoon({objectClass: model.constructor.name, objects: [model], type: 'unpersist'})
|
||||
])
|
||||
|
||||
# Public: Given an `oldModel` with a unique `localId`, it will swap the
|
||||
# item out in the database.
|
||||
#
|
||||
# - `args` An arguments hash with:
|
||||
# - `oldModel` The old model
|
||||
# - `newModel` The new model
|
||||
# - `localId` The localId to reference
|
||||
#
|
||||
# Returns a {Promise} that
|
||||
# - resolves after the database queries are complete and any listening
|
||||
# database callbacks have finished
|
||||
# - rejects if any databse query fails or one of the triggering
|
||||
# callbacks failed
|
||||
swapModel: ({oldModel, newModel, localId}) =>
|
||||
|
||||
queryPromise = Promise.all([
|
||||
@_query('BEGIN TRANSACTION')
|
||||
@_deleteModel(oldModel)
|
||||
@_writeModels([newModel])
|
||||
@_writeModels([new LocalLink(id: localId, objectId: newModel.id)]) if localId
|
||||
@_query('COMMIT')
|
||||
])
|
||||
|
||||
swapPromise = new Promise (resolve, reject) ->
|
||||
Actions.didSwapModel({oldModel, newModel, localId})
|
||||
resolve()
|
||||
|
||||
triggerPromise = @_triggerSoon({objectClass: newModel.constructor.name, objects: [oldModel, newModel], type: 'swap'})
|
||||
|
||||
return Promise.all([queryPromise, swapPromise, triggerPromise])
|
||||
|
||||
|
||||
########################################################################
|
||||
########################### PRIVATE METHODS ############################
|
||||
########################################################################
|
||||
|
||||
# _TriggerSoon is a guarded version of trigger that can accumulate changes.
|
||||
# This means that even if you're a bad person and call `persistModel` 100 times
|
||||
# from 100 task objects queued at the same time, it will only create one
|
||||
# `trigger` event. This is important since the database triggering impacts
|
||||
# the entire application.
|
||||
_triggerSoon: (change) =>
|
||||
@_triggerPromise ?= new Promise (resolve, reject) =>
|
||||
@_resolve = resolve
|
||||
|
||||
flush = =>
|
||||
return unless @_changeAccumulated
|
||||
clearTimeout(@_changeFireTimer) if @_changeFireTimer
|
||||
@trigger(@_changeAccumulated)
|
||||
@_changeAccumulated = null
|
||||
@_changeFireTimer = null
|
||||
@_resolve?()
|
||||
@_triggerPromise = null
|
||||
|
||||
set = (change) =>
|
||||
clearTimeout(@_changeFireTimer) if @_changeFireTimer
|
||||
@_changeAccumulated = change
|
||||
@_changeFireTimer = setTimeout(flush, 20)
|
||||
|
||||
concat = (change) =>
|
||||
@_changeAccumulated.objects.push(change.objects...)
|
||||
|
||||
if not @_changeAccumulated
|
||||
set(change)
|
||||
else if @_changeAccumulated.objectClass is change.objectClass and @_changeAccumulated.type is change.type
|
||||
concat(change)
|
||||
else
|
||||
flush()
|
||||
set(change)
|
||||
|
||||
return @_triggerPromise
|
||||
|
||||
# Fires the queries required to write models to the DB
|
||||
#
|
||||
# Returns a promise that:
|
||||
# - resolves when all write queries are complete
|
||||
# - rejects if any query fails
|
||||
_writeModels: (models) =>
|
||||
promises = []
|
||||
|
||||
# IMPORTANT: This method assumes that all the models you
|
||||
# provide are of the same class, and have different ids!
|
||||
|
||||
# Avoid trying to write too many objects a time - sqlite can only handle
|
||||
# value sets `(?,?)...` of less than SQLITE_MAX_COMPOUND_SELECT (500),
|
||||
# and we don't know ahead of time whether we'll hit that or not.
|
||||
if models.length > 100
|
||||
return Promise.all([
|
||||
@_writeModels(models[0..99])
|
||||
@_writeModels(models[100..models.length])
|
||||
])
|
||||
|
||||
klass = models[0].constructor
|
||||
attributes = _.values(klass.attributes)
|
||||
ids = []
|
||||
|
||||
# Identify attributes of this class that can be matched against. These
|
||||
# attributes need their own columns in the table
|
||||
columnAttributes = _.filter attributes, (attr) ->
|
||||
attr.queryable && attr.columnSQL && attr.jsonKey != 'id'
|
||||
|
||||
columns = ['id TEXT PRIMARY KEY', 'data BLOB']
|
||||
# Compute the columns in the model table and a question mark string
|
||||
columns = ['id', 'data']
|
||||
marks = ['?', '?']
|
||||
columnAttributes.forEach (attr) ->
|
||||
columns.push(attr.columnSQL())
|
||||
|
||||
columns.push(attr.jsonKey)
|
||||
marks.push('?')
|
||||
columnsSQL = columns.join(',')
|
||||
queries.unshift("CREATE TABLE IF NOT EXISTS `#{klass.name}` (#{columnsSQL})")
|
||||
queries.push("CREATE UNIQUE INDEX IF NOT EXISTS `#{klass.name}_id` ON `#{klass.name}` (`id`)")
|
||||
marksSet = "(#{marks.join(',')})"
|
||||
|
||||
# Identify collection attributes that can be matched against. These require
|
||||
# JOIN tables. (Right now the only one of these is Thread.tags)
|
||||
# Prepare a batch insert VALUES (?,?,?), (?,?,?)... by assembling
|
||||
# an array of the values and a corresponding question mark set
|
||||
values = []
|
||||
marks = []
|
||||
for model in models
|
||||
json = model.toJSON()
|
||||
ids.push(model.id)
|
||||
values.push(model.id, JSON.stringify(json))
|
||||
columnAttributes.forEach (attr) ->
|
||||
values.push(json[attr.jsonKey])
|
||||
marks.push(marksSet)
|
||||
|
||||
marksSQL = marks.join(',')
|
||||
promises.push @_query("REPLACE INTO `#{klass.name}` (#{columnsSQL}) VALUES #{marksSQL}", values)
|
||||
|
||||
# For each join table property, find all the items in the join table for this
|
||||
# model and delte them. Insert each new value back into the table.
|
||||
collectionAttributes = _.filter attributes, (attr) ->
|
||||
attr.queryable && attr instanceof AttributeCollection
|
||||
collectionAttributes.forEach (attribute) ->
|
||||
joinTable = tableNameForJoin(klass, attribute.itemClass)
|
||||
joinIndexName = "#{joinTable.replace('-', '_')}_id_val"
|
||||
queries.push("CREATE TABLE IF NOT EXISTS `#{joinTable}` (id TEXT KEY, `value` TEXT)")
|
||||
queries.push("CREATE UNIQUE INDEX IF NOT EXISTS `#{joinIndexName}` ON `#{joinTable}` (`id`,`value`)")
|
||||
|
||||
collectionAttributes.forEach (attr) =>
|
||||
joinTable = tableNameForJoin(klass, attr.itemClass)
|
||||
|
||||
promises.push @_query("DELETE FROM `#{joinTable}` WHERE `id` IN ('#{ids.join("','")}')")
|
||||
|
||||
joinMarks = []
|
||||
joinedValues = []
|
||||
for model in models
|
||||
joinedModels = model[attr.modelKey]
|
||||
if joinedModels
|
||||
for joined in joinedModels
|
||||
joinMarks.push('(?,?)')
|
||||
joinedValues.push(model.id, joined.id)
|
||||
|
||||
unless joinedValues.length is 0
|
||||
# Write no more than 200 items (400 values) at once to avoid sqlite limits
|
||||
for slice in [0..Math.floor(joinedValues.length / 400)] by 1
|
||||
[ms, me] = [slice*200, slice*200 + 199]
|
||||
[vs, ve] = [slice*400, slice*400 + 399]
|
||||
promises.push @_query("INSERT OR IGNORE INTO `#{joinTable}` (`id`, `value`) VALUES #{joinMarks[ms..me].join(',')}", joinedValues[vs..ve])
|
||||
|
||||
# For each joined data property stored in another table...
|
||||
values = []
|
||||
marks = []
|
||||
joinedDataAttributes = _.filter attributes, (attr) ->
|
||||
attr instanceof AttributeJoinedData
|
||||
|
||||
joinedDataAttributes.forEach (attr) =>
|
||||
for model in models
|
||||
if model[attr.modelKey]?
|
||||
promises.push @_query("REPLACE INTO `#{attr.modelTable}` (`id`, `value`) VALUES (?, ?)", [model.id, model[attr.modelKey]])
|
||||
|
||||
# For each model, execute any other code the model wants to run.
|
||||
# This allows model classes to do things like update a full-text table
|
||||
# that holds a composite of several fields
|
||||
if klass.additionalSQLiteConfig?.writeModel?
|
||||
for model in models
|
||||
promises = promises.concat klass.additionalSQLiteConfig.writeModel(model)
|
||||
|
||||
return Promise.all(promises)
|
||||
|
||||
# Fires the queries required to delete models to the DB
|
||||
#
|
||||
# Returns a promise that:
|
||||
# - resolves when all deltion queries are complete
|
||||
# - rejects if any query fails
|
||||
_deleteModel: (model) =>
|
||||
promises = []
|
||||
|
||||
klass = model.constructor
|
||||
attributes = _.values(klass.attributes)
|
||||
|
||||
# Delete the primary record
|
||||
promises.push @_query("DELETE FROM `#{klass.name}` WHERE `id` = ?", [model.id])
|
||||
|
||||
# For each join table property, find all the items in the join table for this
|
||||
# model and delte them. Insert each new value back into the table.
|
||||
collectionAttributes = _.filter attributes, (attr) ->
|
||||
attr.queryable && attr instanceof AttributeCollection
|
||||
|
||||
collectionAttributes.forEach (attr) =>
|
||||
joinTable = tableNameForJoin(klass, attr.itemClass)
|
||||
promises.push @_query("DELETE FROM `#{joinTable}` WHERE `id` = ?", [model.id])
|
||||
|
||||
joinedDataAttributes = _.filter attributes, (attr) ->
|
||||
attr instanceof AttributeJoinedData
|
||||
joinedDataAttributes.forEach (attribute) ->
|
||||
queries.push("CREATE TABLE IF NOT EXISTS `#{attribute.modelTable}` (id TEXT PRIMARY KEY, `value` TEXT)")
|
||||
|
||||
if klass.additionalSQLiteConfig?.setup?
|
||||
queries = queries.concat(klass.additionalSQLiteConfig.setup())
|
||||
queries
|
||||
joinedDataAttributes.forEach (attr) =>
|
||||
promises.push @_query("DELETE FROM `#{attr.modelTable}` WHERE `id` = ?", [model.id])
|
||||
|
||||
# Execute any other code the model wants to run.
|
||||
# This allows model classes to do things like update a full-text table
|
||||
# that holds a composite of several fields, or update entirely
|
||||
# separate database systems
|
||||
promises = promises.concat klass.additionalSQLiteConfig?.deleteModel?(model)
|
||||
|
||||
return Promise.all(promises)
|
||||
|
||||
|
||||
module.exports = new DatabaseStore()
|
||||
|
|
|
|||
|
|
@ -22,6 +22,7 @@ Section: Drafts
|
|||
###
|
||||
class DraftChangeSet
|
||||
constructor: (@localId, @_onChange) ->
|
||||
@_commitChain = Promise.resolve()
|
||||
@reset()
|
||||
|
||||
reset: ->
|
||||
|
|
@ -41,18 +42,20 @@ class DraftChangeSet
|
|||
@_timer = setTimeout(@commit, 5000)
|
||||
|
||||
commit: =>
|
||||
if Object.keys(@_pending).length is 0
|
||||
return Promise.resolve(true)
|
||||
@_commitChain = @_commitChain.then =>
|
||||
if Object.keys(@_pending).length is 0
|
||||
return Promise.resolve(true)
|
||||
|
||||
DatabaseStore = require './database-store'
|
||||
DatabaseStore.findByLocalId(Message, @localId).then (draft) =>
|
||||
if not draft
|
||||
throw new Error("Tried to commit a draft that had already been removed from the database. DraftId: #{@localId}")
|
||||
draft = @applyToModel(draft)
|
||||
@_saving = @_pending
|
||||
@_pending = {}
|
||||
DatabaseStore.persistModel(draft).then =>
|
||||
@_saving = {}
|
||||
DatabaseStore = require './database-store'
|
||||
return DatabaseStore.findByLocalId(Message, @localId).then (draft) =>
|
||||
if not draft
|
||||
throw new Error("Tried to commit a draft that had already been removed from the database. DraftId: #{@localId}")
|
||||
@_saving = @_pending
|
||||
@_pending = {}
|
||||
draft = @applyToModel(draft)
|
||||
return DatabaseStore.persistModel(draft).then =>
|
||||
@_saving = {}
|
||||
return @_commitChain
|
||||
|
||||
applyToModel: (model) =>
|
||||
if model
|
||||
|
|
@ -133,9 +136,10 @@ class DraftStoreProxy
|
|||
return unless @_draft
|
||||
|
||||
# Is this change an update to our draft?
|
||||
myDraft = _.find(change.objects, (obj) => obj.id == @_draft.id)
|
||||
if myDraft
|
||||
@_draft = _.extend @_draft, myDraft
|
||||
myDrafts = _.filter(change.objects, (obj) => obj.id == @_draft.id)
|
||||
|
||||
if myDrafts.length > 0
|
||||
@_draft = _.extend @_draft, _.last(myDrafts)
|
||||
@trigger()
|
||||
|
||||
_onDraftSwapped: (change) ->
|
||||
|
|
|
|||
|
|
@ -14,6 +14,7 @@ FileUploadStore = Reflux.createStore
|
|||
|
||||
# From Tasks
|
||||
@listenTo Actions.uploadStateChanged, @_onUploadStateChanged
|
||||
@listenTo Actions.linkFileToUpload, @_onLinkFileToUpload
|
||||
@listenTo Actions.fileUploaded, @_onFileUploaded
|
||||
@listenTo Actions.fileAborted, @_onFileAborted
|
||||
|
||||
|
|
@ -73,8 +74,11 @@ FileUploadStore = Reflux.createStore
|
|||
}
|
||||
})
|
||||
|
||||
_onFileUploaded: ({file, uploadData}) ->
|
||||
_onLinkFileToUpload: ({file, uploadData}) ->
|
||||
@_linkedFiles[file.id] = uploadData
|
||||
@trigger()
|
||||
|
||||
_onFileUploaded: ({file, uploadData}) ->
|
||||
delete @_fileUploads[uploadData.uploadId]
|
||||
@trigger()
|
||||
|
||||
|
|
|
|||
|
|
@ -23,7 +23,7 @@ class NamespaceStore
|
|||
constructor: ->
|
||||
@_items = []
|
||||
@_current = null
|
||||
|
||||
|
||||
saveState = atom.config.get(saveStateKey)
|
||||
if saveState and _.isObject(saveState)
|
||||
@_current = (new Namespace).fromJSON(saveState)
|
||||
|
|
|
|||
|
|
@ -95,6 +95,21 @@ class FileUploadTask extends Task
|
|||
_onRemoteSuccess: (file, resolve, reject) =>
|
||||
clearInterval(@progress)
|
||||
@req = null
|
||||
|
||||
# The minute we know what file is associated with the upload, we need
|
||||
# to fire an Action to notify a popout window's FileUploadStore that
|
||||
# these two objects are linked. We unfortunately can't wait until
|
||||
# `_attacheFileToDraft` resolves, because that will resolve after the
|
||||
# DB transaction is completed AND all of the callbacks have fired.
|
||||
# Unfortunately in the callback chain is a render method which means
|
||||
# that the upload will be left on the page for a split second before
|
||||
# we know the file has been uploaded.
|
||||
#
|
||||
# Associating the upload with the file ahead of time can let the
|
||||
# Composer know which ones to ignore when de-duping the upload/file
|
||||
# listing.
|
||||
Actions.linkFileToUpload(file: file, uploadData: @_uploadData("completed"))
|
||||
|
||||
@_attachFileToDraft(file).then =>
|
||||
Actions.uploadStateChanged @_uploadData("completed")
|
||||
Actions.fileUploaded(file: file, uploadData: @_uploadData("completed"))
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue