From 450afedaec4e51a4513772e304103b80139941fe Mon Sep 17 00:00:00 2001 From: Ben Gotow Date: Fri, 30 Sep 2016 15:24:34 -0700 Subject: [PATCH] es6(db): Move DatabaseStore to ES6 --- spec/action-bridge-spec.coffee | 2 +- .../query-subscription-pool-spec.coffee | 2 +- spec/models/query-subscription-spec.coffee | 2 +- spec/nylas-api-spec.coffee | 2 +- spec/package-manager-spec.coffee | 2 +- spec/stores/contact-store-spec.coffee | 2 +- spec/stores/database-store-spec.coffee | 2 +- spec/stores/draft-editing-session-spec.coffee | 2 +- spec/stores/message-store-spec.coffee | 2 +- spec/stores/task-queue-spec.coffee | 2 +- spec/tasks/change-folder-task-spec.coffee | 2 +- spec/tasks/change-labels-task-spec.coffee | 2 +- src/components/list-selection.coffee | 2 +- src/flux/action-bridge.coffee | 2 +- src/flux/models/query-subscription-pool.es6 | 2 +- src/flux/models/query.es6 | 2 +- src/flux/modules/reflux-coffee.coffee | 2 + src/flux/nylas-api.coffee | 2 +- src/flux/stores/account-store.coffee | 2 +- src/flux/stores/contact-ranking-store.coffee | 2 +- src/flux/stores/contact-store.coffee | 2 +- .../stores/database-setup-query-builder.es6 | 2 +- src/flux/stores/database-store.coffee | 641 --------------- src/flux/stores/database-store.es6 | 774 ++++++++++++++++++ src/flux/stores/draft-editing-session.coffee | 2 +- src/flux/stores/draft-factory.coffee | 2 +- src/flux/stores/draft-store.coffee | 2 +- src/flux/stores/file-upload-store.coffee | 2 +- src/flux/stores/focused-contacts-store.coffee | 2 +- src/flux/stores/focused-content-store.coffee | 2 +- src/flux/stores/mail-rules-store.coffee | 2 +- src/flux/stores/message-store.coffee | 2 +- .../stores/nylas-sync-status-store.coffee | 2 +- .../stores/task-queue-status-store.coffee | 2 +- src/flux/stores/task-queue.coffee | 2 +- src/flux/stores/thread-counts-store.coffee | 2 +- src/global/nylas-observables.coffee | 2 +- src/mail-rules-processor.coffee | 2 +- src/mailbox-perspective.coffee | 2 +- src/package-manager.coffee | 2 +- 40 files changed, 813 insertions(+), 678 deletions(-) delete mode 100644 src/flux/stores/database-store.coffee create mode 100644 src/flux/stores/database-store.es6 diff --git a/spec/action-bridge-spec.coffee b/spec/action-bridge-spec.coffee index 79e39c06b..6f0c0d93c 100644 --- a/spec/action-bridge-spec.coffee +++ b/spec/action-bridge-spec.coffee @@ -1,7 +1,7 @@ Reflux = require 'reflux' Actions = require '../src/flux/actions' Message = require('../src/flux/models/message').default -DatabaseStore = require '../src/flux/stores/database-store' +DatabaseStore = require('../src/flux/stores/database-store').default AccountStore = require '../src/flux/stores/account-store' ActionBridge = require '../src/flux/action-bridge', _ = require 'underscore' diff --git a/spec/models/query-subscription-pool-spec.coffee b/spec/models/query-subscription-pool-spec.coffee index 59407928e..009fca9c1 100644 --- a/spec/models/query-subscription-pool-spec.coffee +++ b/spec/models/query-subscription-pool-spec.coffee @@ -1,5 +1,5 @@ QuerySubscriptionPool = require('../../src/flux/models/query-subscription-pool').default -DatabaseStore = require '../../src/flux/stores/database-store' +DatabaseStore = require('../../src/flux/stores/database-store').default Label = require '../../src/flux/models/label' describe "QuerySubscriptionPool", -> diff --git a/spec/models/query-subscription-spec.coffee b/spec/models/query-subscription-spec.coffee index 0db86d6ee..51822a2fe 100644 --- a/spec/models/query-subscription-spec.coffee +++ b/spec/models/query-subscription-spec.coffee @@ -1,4 +1,4 @@ -DatabaseStore = require '../../src/flux/stores/database-store' +DatabaseStore = require('../../src/flux/stores/database-store').default QueryRange = require('../../src/flux/models/query-range').default MutableQueryResultSet = require('../../src/flux/models/mutable-query-result-set').default diff --git a/spec/nylas-api-spec.coffee b/spec/nylas-api-spec.coffee index 55c6142bd..3d390013e 100644 --- a/spec/nylas-api-spec.coffee +++ b/spec/nylas-api-spec.coffee @@ -5,7 +5,7 @@ NylasAPI = require '../src/flux/nylas-api' Thread = require('../src/flux/models/thread').default Message = require('../src/flux/models/message').default AccountStore = require '../src/flux/stores/account-store' -DatabaseStore = require '../src/flux/stores/database-store' +DatabaseStore = require('../src/flux/stores/database-store').default DatabaseTransaction = require('../src/flux/stores/database-transaction').default describe "NylasAPI", -> diff --git a/spec/package-manager-spec.coffee b/spec/package-manager-spec.coffee index 066cd6869..2c9dfdb71 100644 --- a/spec/package-manager-spec.coffee +++ b/spec/package-manager-spec.coffee @@ -1,6 +1,6 @@ path = require 'path' Package = require '../src/package' -DatabaseStore = require '../src/flux/stores/database-store' +DatabaseStore = require('../src/flux/stores/database-store').default {Disposable} = require 'event-kit' describe "PackageManager", -> diff --git a/spec/stores/contact-store-spec.coffee b/spec/stores/contact-store-spec.coffee index fe294a0c8..2581a56a0 100644 --- a/spec/stores/contact-store-spec.coffee +++ b/spec/stores/contact-store-spec.coffee @@ -5,7 +5,7 @@ Contact = require '../../src/flux/models/contact' NylasAPI = require '../../src/flux/nylas-api' ContactStore = require '../../src/flux/stores/contact-store' ContactRankingStore = require '../../src/flux/stores/contact-ranking-store' -DatabaseStore = require '../../src/flux/stores/database-store' +DatabaseStore = require('../../src/flux/stores/database-store').default AccountStore = require '../../src/flux/stores/account-store' {mockObservable} = NylasTestUtils diff --git a/spec/stores/database-store-spec.coffee b/spec/stores/database-store-spec.coffee index 13cdad609..d6b70319f 100644 --- a/spec/stores/database-store-spec.coffee +++ b/spec/stores/database-store-spec.coffee @@ -4,7 +4,7 @@ Label = require '../../src/flux/models/label' Thread = require('../../src/flux/models/thread').default TestModel = require '../fixtures/db-test-model' ModelQuery = require('../../src/flux/models/query').default -DatabaseStore = require '../../src/flux/stores/database-store' +DatabaseStore = require('../../src/flux/stores/database-store').default testMatchers = {'id': 'b'} testModelInstance = new TestModel(id: "1234") diff --git a/spec/stores/draft-editing-session-spec.coffee b/spec/stores/draft-editing-session-spec.coffee index b554ff61a..315fd3f4e 100644 --- a/spec/stores/draft-editing-session-spec.coffee +++ b/spec/stores/draft-editing-session-spec.coffee @@ -1,6 +1,6 @@ Message = require('../../src/flux/models/message').default Actions = require '../../src/flux/actions' -DatabaseStore = require '../../src/flux/stores/database-store' +DatabaseStore = require('../../src/flux/stores/database-store').default DatabaseTransaction = require('../../src/flux/stores/database-transaction').default DraftEditingSession = require '../../src/flux/stores/draft-editing-session' DraftChangeSet = DraftEditingSession.DraftChangeSet diff --git a/spec/stores/message-store-spec.coffee b/spec/stores/message-store-spec.coffee index 8c5f3287d..a8c8b513e 100644 --- a/spec/stores/message-store-spec.coffee +++ b/spec/stores/message-store-spec.coffee @@ -5,7 +5,7 @@ Message = require('../../src/flux/models/message').default FocusedContentStore = require '../../src/flux/stores/focused-content-store' FocusedPerspectiveStore = require '../../src/flux/stores/focused-perspective-store' MessageStore = require '../../src/flux/stores/message-store' -DatabaseStore = require '../../src/flux/stores/database-store' +DatabaseStore = require('../../src/flux/stores/database-store').default ChangeUnreadTask = require('../../src/flux/tasks/change-unread-task').default Actions = require '../../src/flux/actions' diff --git a/spec/stores/task-queue-spec.coffee b/spec/stores/task-queue-spec.coffee index c7b92779d..6fcbbf9de 100644 --- a/spec/stores/task-queue-spec.coffee +++ b/spec/stores/task-queue-spec.coffee @@ -1,5 +1,5 @@ Actions = require '../../src/flux/actions' -DatabaseStore = require '../../src/flux/stores/database-store' +DatabaseStore = require('../../src/flux/stores/database-store').default TaskQueue = require '../../src/flux/stores/task-queue' Task = require('../../src/flux/tasks/task').default TaskRegistry = require('../../src/task-registry').default diff --git a/spec/tasks/change-folder-task-spec.coffee b/spec/tasks/change-folder-task-spec.coffee index 503e9d06f..5ceddb39b 100644 --- a/spec/tasks/change-folder-task-spec.coffee +++ b/spec/tasks/change-folder-task-spec.coffee @@ -5,7 +5,7 @@ Message = require('../../src/flux/models/message').default Actions = require '../../src/flux/actions' NylasAPI = require '../../src/flux/nylas-api' Query = require('../../src/flux/models/query').default -DatabaseStore = require '../../src/flux/stores/database-store' +DatabaseStore = require('../../src/flux/stores/database-store').default ChangeFolderTask = require('../../src/flux/tasks/change-folder-task').default ChangeMailTask = require('../../src/flux/tasks/change-mail-task').default diff --git a/spec/tasks/change-labels-task-spec.coffee b/spec/tasks/change-labels-task-spec.coffee index f5624c13f..f5c5fbb4f 100644 --- a/spec/tasks/change-labels-task-spec.coffee +++ b/spec/tasks/change-labels-task-spec.coffee @@ -4,7 +4,7 @@ Thread = require('../../src/flux/models/thread').default Message = require('../../src/flux/models/message').default Actions = require '../../src/flux/actions' NylasAPI = require '../../src/flux/nylas-api' -DatabaseStore = require '../../src/flux/stores/database-store' +DatabaseStore = require('../../src/flux/stores/database-store').default ChangeLabelsTask = require('../../src/flux/tasks/change-labels-task').default ChangeMailTask = require('../../src/flux/tasks/change-mail-task').default diff --git a/src/components/list-selection.coffee b/src/components/list-selection.coffee index 811242ff1..3f4b8c45a 100644 --- a/src/components/list-selection.coffee +++ b/src/components/list-selection.coffee @@ -1,7 +1,7 @@ _ = require 'underscore' Model = require '../flux/models/model' -DatabaseStore = require '../flux/stores/database-store' +DatabaseStore = require('../flux/stores/database-store').default module.exports = class ListSelection diff --git a/src/flux/action-bridge.coffee b/src/flux/action-bridge.coffee index e410db2bc..b4c15d648 100644 --- a/src/flux/action-bridge.coffee +++ b/src/flux/action-bridge.coffee @@ -1,7 +1,7 @@ _ = require 'underscore' Actions = require './actions' Model = require './models/model' -DatabaseStore = require './stores/database-store' +DatabaseStore = require('./stores/database-store').default DatabaseChangeRecord = require('./stores/database-change-record').default Utils = require './models/utils' diff --git a/src/flux/models/query-subscription-pool.es6 b/src/flux/models/query-subscription-pool.es6 index 60cfc86ae..1a92e2092 100644 --- a/src/flux/models/query-subscription-pool.es6 +++ b/src/flux/models/query-subscription-pool.es6 @@ -99,7 +99,7 @@ class QuerySubscriptionPool { } _setup() { - DatabaseStore = DatabaseStore || require('../stores/database-store'); + DatabaseStore = DatabaseStore || require('../stores/database-store').default; DatabaseStore.listen(this._onChange); } diff --git a/src/flux/models/query.es6 b/src/flux/models/query.es6 index 3e3586410..edb27bdb0 100644 --- a/src/flux/models/query.es6 +++ b/src/flux/models/query.es6 @@ -45,7 +45,7 @@ export default class ModelQuery { // constructor(klass, database) { this._klass = klass; - this._database = database || require('./database-store'); + this._database = database || require('./database-store').default; this._matchers = []; this._orders = []; this._distinct = false; diff --git a/src/flux/modules/reflux-coffee.coffee b/src/flux/modules/reflux-coffee.coffee index f33e696e6..be89736f9 100644 --- a/src/flux/modules/reflux-coffee.coffee +++ b/src/flux/modules/reflux-coffee.coffee @@ -77,6 +77,8 @@ module.exports = if listenable == this return 'Listener is not able to listen to itself' if !_.isFunction(listenable.listen) + console.log require('util').inspect(listenable) + console.log((new Error()).stack) return listenable + ' is missing a listen method' if listenable.hasListener and listenable.hasListener(this) return 'Listener cannot listen to this listenable because of circular loop' diff --git a/src/flux/nylas-api.coffee b/src/flux/nylas-api.coffee index cf1e24df0..ccb76e094 100644 --- a/src/flux/nylas-api.coffee +++ b/src/flux/nylas-api.coffee @@ -8,7 +8,7 @@ IdentityStore = require('./stores/identity-store').default Actions = require './actions' {APIError} = require './errors' PriorityUICoordinator = require '../priority-ui-coordinator' -DatabaseStore = require './stores/database-store' +DatabaseStore = require('./stores/database-store').default async = require 'async' # A 0 code is when an error returns without a status code. These are diff --git a/src/flux/stores/account-store.coffee b/src/flux/stores/account-store.coffee index d24542ad7..84e45ad83 100644 --- a/src/flux/stores/account-store.coffee +++ b/src/flux/stores/account-store.coffee @@ -3,7 +3,7 @@ NylasStore = require 'nylas-store' Actions = require '../actions' Account = require('../models/account').default Utils = require '../models/utils' -DatabaseStore = require './database-store' +DatabaseStore = require('./database-store').default keytar = require 'keytar' NylasAPI = null diff --git a/src/flux/stores/contact-ranking-store.coffee b/src/flux/stores/contact-ranking-store.coffee index be43de824..7a8bda9b3 100644 --- a/src/flux/stores/contact-ranking-store.coffee +++ b/src/flux/stores/contact-ranking-store.coffee @@ -1,6 +1,6 @@ Rx = require 'rx-lite' NylasStore = require 'nylas-store' -DatabaseStore = require './database-store' +DatabaseStore = require('./database-store').default AccountStore = require './account-store' class ContactRankingStore extends NylasStore diff --git a/src/flux/stores/contact-store.coffee b/src/flux/stores/contact-store.coffee index 200830515..54f93efca 100644 --- a/src/flux/stores/contact-store.coffee +++ b/src/flux/stores/contact-store.coffee @@ -7,7 +7,7 @@ Contact = require '../models/contact' Utils = require '../models/utils' NylasStore = require 'nylas-store' RegExpUtils = require '../../regexp-utils' -DatabaseStore = require './database-store' +DatabaseStore = require('./database-store').default AccountStore = require './account-store' ContactRankingStore = require './contact-ranking-store' _ = require 'underscore' diff --git a/src/flux/stores/database-setup-query-builder.es6 b/src/flux/stores/database-setup-query-builder.es6 index 7782e1a94..dc6af137d 100644 --- a/src/flux/stores/database-setup-query-builder.es6 +++ b/src/flux/stores/database-setup-query-builder.es6 @@ -84,7 +84,7 @@ export default class DatabaseSetupQueryBuilder { } if (klass.searchable === true) { - const DatabaseStore = require('./database-store'); + const DatabaseStore = require('./database-store').default; queries.push(DatabaseStore.createSearchIndexSql(klass)); } diff --git a/src/flux/stores/database-store.coffee b/src/flux/stores/database-store.coffee deleted file mode 100644 index f2a18dfac..000000000 --- a/src/flux/stores/database-store.coffee +++ /dev/null @@ -1,641 +0,0 @@ -_ = require 'underscore' -async = require 'async' -path = require 'path' -fs = require 'fs' -sqlite3 = require 'sqlite3' -Model = require '../models/model' -Utils = require '../models/utils' -Actions = require '../actions' -ModelQuery = require('../models/query').default -NylasStore = require '../../global/nylas-store' -PromiseQueue = require 'promise-queue' -PriorityUICoordinator = require '../../priority-ui-coordinator' -DatabaseSetupQueryBuilder = require('./database-setup-query-builder').default -DatabaseChangeRecord = require('./database-change-record').default -DatabaseTransaction = require('./database-transaction').default -JSONBlob = null - -{remote, ipcRenderer} = require 'electron' - -DatabaseVersion = 23 -DatabasePhase = - Setup: 'setup' - Ready: 'ready' - Close: 'close' - -DEBUG_TO_LOG = false -DEBUG_QUERY_PLANS = NylasEnv.inDevMode() - -BEGIN_TRANSACTION = 'BEGIN TRANSACTION' -COMMIT = 'COMMIT' - -TXINDEX = 0 - -### -Public: N1 is built on top of a custom database layer modeled after -ActiveRecord. For many parts of the application, the database is the source -of truth. Data is retrieved from the API, written to the database, and changes -to the database trigger Stores and components to refresh their contents. - -The DatabaseStore is available in every application window and allows you to -make queries against the local cache. Every change to the local cache is -broadcast as a change event, and listening to the DatabaseStore keeps the -rest of the application in sync. - -## Listening for Changes - -To listen for changes to the local cache, subscribe to the DatabaseStore and -inspect the changes that are sent to your listener method. - -```coffeescript -@unsubscribe = DatabaseStore.listen(@_onDataChanged, @) - -... - -_onDataChanged: (change) -> - return unless change.objectClass is Message - return unless @_myMessageID in _.map change.objects, (m) -> m.id - - # Refresh Data - -``` - - -The local cache changes very frequently, and your stores and components should -carefully choose when to refresh their data. The `change` object passed to your -event handler allows you to decide whether to refresh your data and exposes -the following keys: - -`objectClass`: The {Model} class that has been changed. If multiple types of models -were saved to the database, you will receive multiple change events. - -`objects`: An {Array} of {Model} instances that were either created, updated or -deleted from the local cache. If your component or store presents a single object -or a small collection of objects, you should look to see if any of the objects -are in your displayed set before refreshing. - -Section: Database -### -class DatabaseStore extends NylasStore - - constructor: -> - @_triggerPromise = null - @_inflightTransactions = 0 - @_open = false - @_waiting = [] - - @setupEmitter() - @_emitter.setMaxListeners(100) - - if NylasEnv.inSpecMode() - @_databasePath = path.join(NylasEnv.getConfigDirPath(),'edgehill.test.db') - else - @_databasePath = path.join(NylasEnv.getConfigDirPath(),'edgehill.db') - - @_databaseMutationHooks = [] - - # Listen to events from the application telling us when the database is ready, - # should be closed so it can be deleted, etc. - ipcRenderer.on('database-phase-change', @_onPhaseChange) - _.defer => @_onPhaseChange() - - _onPhaseChange: (event) => - return if NylasEnv.inSpecMode() - - app = remote.getGlobal('application') - phase = app.databasePhase() - - if phase is DatabasePhase.Setup and NylasEnv.isWorkWindow() - @_openDatabase => - @_checkDatabaseVersion {allowNotSet: true}, => - @_runDatabaseSetup => - app.setDatabasePhase(DatabasePhase.Ready) - setTimeout(@_runDatabaseAnalyze, 60 * 1000) - - else if phase is DatabasePhase.Ready - @_openDatabase => - @_checkDatabaseVersion {}, => - @_open = true - w() for w in @_waiting - @_waiting = [] - - else if phase is DatabasePhase.Close - @_open = false - @_db?.close() - @_db = null - - # When 3rd party components register new models, we need to refresh the - # database schema to prepare those tables. This method may be called - # extremely frequently as new models are added when packages load. - refreshDatabaseSchema: -> - return unless NylasEnv.isWorkWindow() - app = remote.getGlobal('application') - phase = app.databasePhase() - if phase isnt DatabasePhase.Setup - app.setDatabasePhase(DatabasePhase.Setup) - - _openDatabase: (ready) => - return ready() if @_db - - if NylasEnv.isWorkWindow() - # Since only the main window calls `_runDatabaseSetup`, it's important that - # it is also the only window with permission to create the file on disk - mode = sqlite3.OPEN_READWRITE | sqlite3.OPEN_CREATE - else - mode = sqlite3.OPEN_READWRITE - - @_db = new sqlite3.Database @_databasePath, mode, (err) => - return @_handleSetupError(err) if err - - # https://www.sqlite.org/wal.html - # WAL provides more concurrency as readers do not block writers and a writer - # does not block readers. Reading and writing can proceed concurrently. - @_db.run("PRAGMA journal_mode = WAL;") - - # Note: These are properties of the connection, so they must be set regardless - # of whether the database setup queries are run. - - # https://www.sqlite.org/intern-v-extern-blob.html - # A database page size of 8192 or 16384 gives the best performance for large BLOB I/O. - @_db.run("PRAGMA main.page_size = 8192;") - @_db.run("PRAGMA main.cache_size = 20000;") - @_db.run("PRAGMA main.synchronous = NORMAL;") - @_db.configure('busyTimeout', 10000) - @_db.on 'profile', (query, msec) => - if msec > 100 - @_prettyConsoleLog("#{msec}msec: #{query}") - else - console.debug(DEBUG_TO_LOG, "#{msec}: #{query}") - - ready() - - _checkDatabaseVersion: ({allowNotSet} = {}, ready) => - @_db.get 'PRAGMA user_version', (err, {user_version}) => - return @_handleSetupError(err) if err - emptyVersion = user_version is 0 - wrongVersion = user_version/1 isnt DatabaseVersion - if wrongVersion and not (emptyVersion and allowNotSet) - return @_handleSetupError(new Error("Incorrect database schema version: #{user_version} not #{DatabaseVersion}")) - ready() - - _runDatabaseSetup: (ready) => - builder = new DatabaseSetupQueryBuilder() - - @_db.serialize => - async.each builder.setupQueries(), (query, callback) => - console.debug(DEBUG_TO_LOG, "DatabaseStore: #{query}") - @_db.run(query, [], callback) - , (err) => - return @_handleSetupError(err) if err - @_db.run "PRAGMA user_version=#{DatabaseVersion}", (err) => - return @_handleSetupError(err) if err - - exportPath = path.join(NylasEnv.getConfigDirPath(), 'mail-rules-export.json') - if fs.existsSync(exportPath) - try - row = JSON.parse(fs.readFileSync(exportPath)) - @inTransaction (t) -> t.persistJSONBlob('MailRules-V2', row['json']) - fs.unlink(exportPath) - catch err - console.log("Could not re-import mail rules: #{err}") - ready() - - _runDatabaseAnalyze: => - builder = new DatabaseSetupQueryBuilder() - async.each builder.analyzeQueries(), (query, callback) => - @_db.run(query, [], callback) - , (err) => - console.log("Completed ANALYZE of database") - - _handleSetupError: (err = (new Error("Manually called _handleSetupError"))) => - NylasEnv.reportError(err, {}, noWindows: true) - - # Temporary: export mail rules. They're the only bit of data in the cache - # we can't rebuild. Should be moved to cloud metadata store soon. - @_db.all "SELECT * FROM JSONBlob WHERE id = 'MailRules-V2' LIMIT 1", [], (mailsRulesErr, results = []) => - if not mailsRulesErr and results.length is 1 - exportPath = path.join(NylasEnv.getConfigDirPath(), 'mail-rules-export.json') - try - fs.writeFileSync(exportPath, results[0]['data']) - catch writeErr - console.log("Could not write mail rules to file: #{writeErr}") - - app = require('electron').remote.getGlobal('application') - app.rebuildDatabase() - - _prettyConsoleLog: (q) => - q = q.replace(/%/g, '%%') - q = "color:black |||%c " + q - q = q.replace(/`(\w+)`/g, "||| color:purple |||%c$&||| color:black |||%c") - - colorRules = - 'color:green': ['SELECT', 'INSERT INTO', 'VALUES', 'WHERE', 'FROM', 'JOIN', 'ORDER BY', 'DESC', 'ASC', 'INNER', 'OUTER', 'LIMIT', 'OFFSET', 'IN'] - 'color:red; background-color:#ffdddd;': ['SCAN TABLE'] - - for style, keywords of colorRules - for keyword in keywords - q = q.replace(new RegExp("\\b#{keyword}\\b", 'g'), "||| #{style} |||%c#{keyword}||| color:black |||%c") - - q = q.split('|||') - colors = [] - msg = [] - for i in [0...q.length] - if i % 2 is 0 - colors.push(q[i]) - else - msg.push(q[i]) - - console.log(msg.join(''), colors...) - - - # Returns a promise that resolves when the query has been completed and - # rejects when the query has failed. - # - # If a query is made while the connection is being setup, the - # DatabaseConnection will queue the queries and fire them after it has - # been setup. The Promise returned here wont resolve until that happens - _query: (query, values=[]) => - new Promise (resolve, reject) => - if not @_open - @_waiting.push => @_query(query, values).then(resolve, reject) - return - - if query.indexOf("SELECT ") is 0 - fn = 'all' - else - fn = 'run' - - if query.indexOf("SELECT ") is 0 - if DEBUG_QUERY_PLANS - @_db.all "EXPLAIN QUERY PLAN #{query}", values, (err, results=[]) => - str = results.map((row) -> row.detail).join('\n') + " for " + query - return if str.indexOf('ThreadCounts') > 0 - return if str.indexOf('ThreadSearch') > 0 - if str.indexOf('SCAN') isnt -1 and str.indexOf('COVERING INDEX') is -1 - @_prettyConsoleLog(str) - - # Important: once the user begins a transaction, queries need to run - # in serial. This ensures that the subsequent "COMMIT" call - # actually runs after the other queries in the transaction, and that - # no other code can execute "BEGIN TRANS." until the previously - # queued BEGIN/COMMIT have been processed. - - # We don't exit serial execution mode until the last pending transaction has - # finished executing. - - if query.indexOf "BEGIN" is 0 - @_db.serialize() if @_inflightTransactions is 0 - @_inflightTransactions += 1 - - @_db[fn] query, values, (err, results) => - if err - console.error("DatabaseStore: Query #{query}, #{JSON.stringify(values)} failed #{err.toString()}") - - if query is COMMIT - @_inflightTransactions -= 1 - @_db.parallelize() if @_inflightTransactions is 0 - - return reject(err) if err - return resolve(results) - - ######################################################################## - ########################### PUBLIC METHODS ############################# - ######################################################################## - - ### - ActiveRecord-style Querying - ### - - # Public: Creates a new Model Query for retrieving a single model specified by - # the class and id. - # - # - `class` The class of the {Model} you're trying to retrieve. - # - `id` The {String} id of the {Model} you're trying to retrieve - # - # Example: - # ```coffee - # DatabaseStore.find(Thread, 'id-123').then (thread) -> - # # thread is a Thread object, or null if no match was found. - # ``` - # - # Returns a {ModelQuery} - # - find: (klass, id) => - throw new Error("DatabaseStore::find - You must provide a class") unless klass - throw new Error("DatabaseStore::find - You must provide a string id. You may have intended to use findBy.") unless _.isString(id) - new ModelQuery(klass, @).where({id:id}).one() - - # Public: Creates a new Model Query for retrieving a single model matching the - # predicates provided. - # - # - `class` The class of the {Model} you're trying to retrieve. - # - `predicates` An {Array} of {matcher} objects. The set of predicates the - # returned model must match. - # - # Returns a {ModelQuery} - # - findBy: (klass, predicates = []) => - throw new Error("DatabaseStore::findBy - You must provide a class") unless klass - new ModelQuery(klass, @).where(predicates).one() - - # Public: Creates a new Model Query for retrieving all models matching the - # predicates provided. - # - # - `class` The class of the {Model} you're trying to retrieve. - # - `predicates` An {Array} of {matcher} objects. The set of predicates the - # returned model must match. - # - # Returns a {ModelQuery} - # - findAll: (klass, predicates = []) => - throw new Error("DatabaseStore::findAll - You must provide a class") unless klass - new ModelQuery(klass, @).where(predicates) - - # Public: Creates a new Model Query that returns the {Number} of models matching - # the predicates provided. - # - # - `class` The class of the {Model} you're trying to retrieve. - # - `predicates` An {Array} of {matcher} objects. The set of predicates the - # returned model must match. - # - # Returns a {ModelQuery} - # - count: (klass, predicates = []) => - throw new Error("DatabaseStore::count - You must provide a class") unless klass - new ModelQuery(klass, @).where(predicates).count() - - # Public: Modelify converts the provided array of IDs or models (or a mix of - # IDs and models) into an array of models of the `klass` provided by querying for the missing items. - # - # Modelify is efficient and uses a single database query. It resolves Immediately - # if no query is necessary. - # - # - `class` The {Model} class desired. - # - 'arr' An {Array} with a mix of string model IDs and/or models. - # - modelify: (klass, arr) => - if not _.isArray(arr) or arr.length is 0 - return Promise.resolve([]) - - ids = [] - clientIds = [] - for item in arr - if item instanceof klass - if not item.serverId - clientIds.push(item.clientId) - else - continue - else if _.isString(item) - if Utils.isTempId(item) - clientIds.push(item) - else - ids.push(item) - else - throw new Error("modelify: Not sure how to convert #{item} into a #{klass.name}") - - if ids.length is 0 and clientIds.length is 0 - return Promise.resolve(arr) - - queries = - modelsFromIds: [] - modelsFromClientIds: [] - - if ids.length - queries.modelsFromIds = @findAll(klass).where(klass.attributes.id.in(ids)) - # Allow Sqlite to exit early once it's found a single match for each ID - queries.modelsFromIds.limit(ids.length) - - if clientIds.length - queries.modelsFromClientIds = @findAll(klass).where(klass.attributes.clientId.in(clientIds)) - # Allow Sqlite to exit early once it's found a single match for each ID - queries.modelsFromClientIds.limit(clientIds.length) - - Promise.props(queries).then ({modelsFromIds, modelsFromClientIds}) => - modelsByString = {} - modelsByString[model.id] = model for model in modelsFromIds - modelsByString[model.clientId] = model for model in modelsFromClientIds - - arr = arr.map (item) -> - if item instanceof klass - return item - else - return modelsByString[item] - - return Promise.resolve(arr) - - # Public: Executes a {ModelQuery} on the local database. - # - # - `modelQuery` A {ModelQuery} to execute. - # - # Returns a {Promise} that - # - resolves with the result of the database query. - # - run: (modelQuery, options = {format: true}) => - @_query(modelQuery.sql(), []).then (result) => - result = modelQuery.inflateResult(result) - result = modelQuery.formatResult(result) unless options.format is false - Promise.resolve(result) - - findJSONBlob: (id) -> - JSONBlob ?= require('../models/json-blob').default - new JSONBlob.Query(JSONBlob, @).where({id}).one() - - # Private: Mutation hooks allow you to observe changes to the database and - # add additional functionality before and after the REPLACE / INSERT queries. - # - # beforeDatabaseChange: Run queries, etc. and return a promise. The DatabaseStore - # will proceed with changes once your promise has finished. You cannot call - # persistModel or unpersistModel from this hook. - # - # afterDatabaseChange: Run queries, etc. after the REPLACE / INSERT queries - # - # Warning: this is very low level. If you just want to watch for changes, You - # should subscribe to the DatabaseStore's trigger events. - # - addMutationHook: ({beforeDatabaseChange, afterDatabaseChange}) -> - throw new Error("DatabaseStore:addMutationHook - You must provide a beforeDatabaseChange function") unless beforeDatabaseChange - throw new Error("DatabaseStore:addMutationHook - You must provide a afterDatabaseChange function") unless afterDatabaseChange - @_databaseMutationHooks.push({beforeDatabaseChange, afterDatabaseChange}) - - removeMutationHook: (hook) -> - @_databaseMutationHooks = _.without(@_databaseMutationHooks, hook) - - mutationHooks: -> - @_databaseMutationHooks - - - # Public: Opens a new database transaction for writing changes. - # DatabaseStore.inTransacion makes the following guarantees: - # - # - No other calls to `inTransaction` will run until the promise has finished. - # - # - No other process will be able to write to sqlite while the provided function - # is running. "BEGIN IMMEDIATE TRANSACTION" semantics are: - # + No other connection will be able to write any changes. - # + Other connections can read from the database, but they will not see - # pending changes. - # - # @param fn {function} callback that will be executed inside a database transaction - # Returns a {Promise} that resolves when the transaction has successfully - # completed. - inTransaction: (fn) -> - t = new DatabaseTransaction(@) - @_transactionQueue ?= new PromiseQueue(1, Infinity) - @_transactionQueue.add -> - t.execute(fn) - - # _accumulateAndTrigger is a guarded version of trigger that can accumulate changes. - # This means that even if you're a bad person and call `persistModel` 100 times - # from 100 task objects queued at the same time, it will only create one - # `trigger` event. This is important since the database triggering impacts - # the entire application. - accumulateAndTrigger: (change) => - @_triggerPromise ?= new Promise (resolve, reject) => - @_resolve = resolve - - flush = => - return unless @_changeAccumulated - clearTimeout(@_changeFireTimer) if @_changeFireTimer - @trigger(new DatabaseChangeRecord(@_changeAccumulated)) - @_changeAccumulated = null - @_changeAccumulatedLookup = null - @_changeFireTimer = null - @_resolve?() - @_triggerPromise = null - - set = (change) => - clearTimeout(@_changeFireTimer) if @_changeFireTimer - @_changeAccumulated = change - @_changeAccumulatedLookup = {} - for obj, idx in @_changeAccumulated.objects - @_changeAccumulatedLookup[obj.id] = idx - @_changeFireTimer = setTimeout(flush, 10) - - concat = (change) => - # When we join new models into our set, replace existing ones so the same - # model cannot exist in the change record set multiple times. - for obj in change.objects - idx = @_changeAccumulatedLookup[obj.id] - if idx - @_changeAccumulated.objects[idx] = obj - else - @_changeAccumulatedLookup[obj.id] = @_changeAccumulated.objects.length - @_changeAccumulated.objects.push(obj) - - if not @_changeAccumulated - set(change) - else if @_changeAccumulated.objectClass is change.objectClass and @_changeAccumulated.type is change.type - concat(change) - else - flush() - set(change) - - return @_triggerPromise - - - # Search Index Operations - - createSearchIndexSql: (klass) => - throw new Error("DatabaseStore::createSearchIndex - You must provide a class") unless klass - throw new Error("DatabaseStore::createSearchIndex - #{klass.name} must expose an array of `searchFields`") unless klass - searchTableName = "#{klass.name}Search" - searchFields = klass.searchFields - return ( - "CREATE VIRTUAL TABLE IF NOT EXISTS `#{searchTableName}` " + - "USING fts5( - tokenize='porter unicode61', - content_id UNINDEXED, - #{searchFields.join(', ')} - )" - ) - - createSearchIndex: (klass) => - sql = @createSearchIndexSql(klass) - @_query(sql) - - searchIndexSize: (klass) => - searchTableName = "#{klass.name}Search" - sql = "SELECT COUNT(content_id) as count FROM `#{searchTableName}`" - return @_query(sql).then((result) => result[0].count) - - isIndexEmptyForAccount: (accountId, modelKlass) => - modelTable = modelKlass.name - searchTable = "#{modelTable}Search" - sql = ( - "SELECT `#{searchTable}`.`content_id` FROM `#{searchTable}` INNER JOIN `#{modelTable}` - ON `#{modelTable}`.id = `#{searchTable}`.`content_id` WHERE `#{modelTable}`.`account_id` = ? - LIMIT 1" - ) - return @_query(sql, [accountId]).then((result) => result.length is 0) - - dropSearchIndex: (klass) => - throw new Error("DatabaseStore::createSearchIndex - You must provide a class") unless klass - searchTableName = "#{klass.name}Search" - sql = "DROP TABLE IF EXISTS `#{searchTableName}`" - @_query(sql) - - isModelIndexed: (model, isIndexed) => - return Promise.resolve(true) if isIndexed is true - searchTableName = "#{model.constructor.name}Search" - exists = ( - "SELECT rowid FROM `#{searchTableName}` WHERE `#{searchTableName}`.`content_id` = ?" - ) - return @_query(exists, [model.id]).then((results) => - return Promise.resolve(results.length > 0) - ) - - indexModel: (model, indexData, isModelIndexed) => - searchTableName = "#{model.constructor.name}Search" - @isModelIndexed(model, isModelIndexed) - .then((isIndexed) => - if (isIndexed) - return @updateModelIndex(model, indexData, isIndexed) - - indexFields = Object.keys(indexData) - keysSql = 'content_id, ' + indexFields.join(", ") - valsSql = '?, ' + indexFields.map(=> '?').join(", ") - values = [model.id].concat(indexFields.map((k) => indexData[k])) - sql = ( - "INSERT INTO `#{searchTableName}`(#{keysSql}) VALUES (#{valsSql})" - ) - return @_query(sql, values) - ) - - updateModelIndex: (model, indexData, isModelIndexed) => - searchTableName = "#{model.constructor.name}Search" - @isModelIndexed(model, isModelIndexed) - .then((isIndexed) => - if (not isIndexed) - return @indexModel(model, indexData, isIndexed) - - indexFields = Object.keys(indexData) - values = indexFields.map((key) => indexData[key]).concat([model.id]) - setSql = ( - indexFields - .map((key) => "`#{key}` = ?") - .join(', ') - ) - sql = ( - "UPDATE `#{searchTableName}` SET #{setSql} WHERE `#{searchTableName}`.`content_id` = ?" - ) - return @_query(sql, values) - ) - - unindexModel: (model) => - searchTableName = "#{model.constructor.name}Search" - sql = ( - "DELETE FROM `#{searchTableName}` WHERE `#{searchTableName}`.`content_id` = ?" - ) - return @_query(sql, [model.id]) - - unindexModelsForAccount: (accountId, modelKlass) => - modelTable = modelKlass.name - searchTableName = "#{modelTable}Search" - sql = ( - "DELETE FROM `#{searchTableName}` WHERE `#{searchTableName}`.`content_id` IN - (SELECT `id` FROM `#{modelTable}` WHERE `#{modelTable}`.`account_id` = ?)" - ) - return @_query(sql, [accountId]) - -module.exports = new DatabaseStore() -module.exports.ChangeRecord = DatabaseChangeRecord diff --git a/src/flux/stores/database-store.es6 b/src/flux/stores/database-store.es6 new file mode 100644 index 000000000..600767eb3 --- /dev/null +++ b/src/flux/stores/database-store.es6 @@ -0,0 +1,774 @@ +/* eslint global-require: 0 */ +import async from 'async'; +import path from 'path'; +import fs from 'fs'; +import sqlite3 from 'sqlite3'; +import PromiseQueue from 'promise-queue'; +import NylasStore from '../../global/nylas-store'; +import {remote, ipcRenderer} from 'electron'; + +import Utils from '../models/utils'; +import Query from '../models/query'; +import DatabaseChangeRecord from './database-change-record'; +import DatabaseTransaction from './database-transaction'; +import DatabaseSetupQueryBuilder from './database-setup-query-builder'; + +const DatabaseVersion = 23; +const DatabasePhase = { + Setup: 'setup', + Ready: 'ready', + Close: 'close', +} + +const DEBUG_TO_LOG = false; +const DEBUG_QUERY_PLANS = NylasEnv.inDevMode(); + +const COMMIT = 'COMMIT'; + +let JSONBlob = null; + +/* +Public: N1 is built on top of a custom database layer modeled after +ActiveRecord. For many parts of the application, the database is the source +of truth. Data is retrieved from the API, written to the database, and changes +to the database trigger Stores and components to refresh their contents. + +The DatabaseStore is available in every application window and allows you to +make queries against the local cache. Every change to the local cache is +broadcast as a change event, and listening to the DatabaseStore keeps the +rest of the application in sync. + +#// Listening for Changes + +To listen for changes to the local cache, subscribe to the DatabaseStore and +inspect the changes that are sent to your listener method. + +```coffeescript +this.unsubscribe = DatabaseStore.listen(this._onDataChanged, this.) + +... + +_onDataChanged: (change) -> + return unless change.objectClass is Message + return unless this._myMessageID in _.map change.objects, (m) -> m.id + + // Refresh Data + +``` + +The local cache changes very frequently, and your stores and components should +carefully choose when to refresh their data. The \`change\` object passed to your +event handler allows you to decide whether to refresh your data and exposes +the following keys: + +\`objectClass\`: The {Model} class that has been changed. If multiple types of models +were saved to the database, you will receive multiple change events. + +\`objects\`: An {Array} of {Model} instances that were either created, updated or +deleted from the local cache. If your component or store presents a single object +or a small collection of objects, you should look to see if any of the objects +are in your displayed set before refreshing. + +Section: Database +*/ +class DatabaseStore extends NylasStore { + + static ChangeRecord = DatabaseChangeRecord; + + constructor() { + super(); + + this._triggerPromise = null; + this._inflightTransactions = 0; + this._open = false; + this._waiting = []; + + this.setupEmitter(); + this._emitter.setMaxListeners(100); + + if (NylasEnv.inSpecMode()) { + this._databasePath = path.join(NylasEnv.getConfigDirPath(), 'edgehill.test.db'); + } else { + this._databasePath = path.join(NylasEnv.getConfigDirPath(), 'edgehill.db'); + } + + this._databaseMutationHooks = []; + + // Listen to events from the application telling us when the database is ready, + // should be closed so it can be deleted, etc. + ipcRenderer.on('database-phase-change', () => this._onPhaseChange()); + setTimeout(() => this._onPhaseChange(), 0); + } + + _onPhaseChange() { + if (NylasEnv.inSpecMode()) { + return; + } + + const app = remote.getGlobal('application') + const phase = app.databasePhase() + + if (phase === DatabasePhase.Setup && NylasEnv.isWorkWindow()) { + this._openDatabase(() => { + this._checkDatabaseVersion({allowNotSet: true}, () => { + this._runDatabaseSetup(() => { + app.setDatabasePhase(DatabasePhase.Ready); + setTimeout(() => this._runDatabaseAnalyze(), 60 * 1000); + }); + }); + }); + } else if (phase === DatabasePhase.Ready) { + this._openDatabase(() => { + this._checkDatabaseVersion({}, () => { + this._open = true; + for (const w of this._waiting) { + w(); + } + this._waiting = []; + }); + }); + } else if (phase === DatabasePhase.Close) { + this._open = false; + if (this._db) { + this._db.close(); + this._db = null; + } + } + } + + // When 3rd party components register new models, we need to refresh the + // database schema to prepare those tables. This method may be called + // extremely frequently as new models are added when packages load. + refreshDatabaseSchema() { + if (!NylasEnv.isWorkWindow()) { + return; + } + const app = remote.getGlobal('application'); + const phase = app.databasePhase(); + if (phase !== DatabasePhase.Setup) { + app.setDatabasePhase(DatabasePhase.Setup); + } + } + + _openDatabase(ready) { + if (this._db) { + ready(); + return; + } + + let mode = sqlite3.OPEN_READWRITE; + if (NylasEnv.isWorkWindow()) { + // Since only the main window calls \`_runDatabaseSetup\`, it's important that + // it is also the only window with permission to create the file on disk + mode = sqlite3.OPEN_READWRITE | sqlite3.OPEN_CREATE; + } + + this._db = new sqlite3.Database(this._databasePath, mode, (err) => { + if (err) { + this._handleSetupError(err); + return; + } + + // https://www.sqlite.org/wal.html + // WAL provides more concurrency as readers do not block writers and a writer + // does not block readers. Reading and writing can proceed concurrently. + this._db.run(`PRAGMA journal_mode = WAL;`); + + // Note: These are properties of the connection, so they must be set regardless + // of whether the database setup queries are run. + + // https://www.sqlite.org/intern-v-extern-blob.html + // A database page size of 8192 or 16384 gives the best performance for large BLOB I/O. + this._db.run(`PRAGMA main.page_size = 8192;`); + this._db.run(`PRAGMA main.cache_size = 20000;`); + this._db.run(`PRAGMA main.synchronous = NORMAL;`); + this._db.configure('busyTimeout', 10000); + this._db.on('profile', (query, msec) => { + if (msec > 100) { + this._prettyConsoleLog(`${msec}msec: ${query}`); + } else { + console.debug(DEBUG_TO_LOG, `${msec}: ${query}`); + } + }); + ready(); + }); + } + + _checkDatabaseVersion({allowNotSet} = {}, ready) { + this._db.get('PRAGMA user_version', (err, result) => { + if (err) { + return this._handleSetupError(err) + } + const emptyVersion = (result.user_version === 0); + const wrongVersion = (result.user_version / 1 !== DatabaseVersion); + if (wrongVersion && !(emptyVersion && allowNotSet)) { + return this._handleSetupError(new Error(`Incorrect database schema version: ${result.user_version} not ${DatabaseVersion}`)); + } + return ready(); + }); + } + + _runDatabaseSetup(ready) { + const builder = new DatabaseSetupQueryBuilder() + + this._db.serialize(() => { + async.each(builder.setupQueries(), (query, callback) => { + console.debug(DEBUG_TO_LOG, `DatabaseStore: ${query}`); + this._db.run(query, [], callback); + }, (err) => { + if (err) { + return this._handleSetupError(err); + } + return this._db.run(`PRAGMA user_version=${DatabaseVersion}`, (versionErr) => { + if (versionErr) { + return this._handleSetupError(versionErr); + } + + const exportPath = path.join(NylasEnv.getConfigDirPath(), 'mail-rules-export.json') + if (fs.existsSync(exportPath)) { + try { + const row = JSON.parse(fs.readFileSync(exportPath)); + this.inTransaction(t => t.persistJSONBlob('MailRules-V2', row.json)); + fs.unlink(exportPath); + } catch (mailRulesError) { + console.log(`Could not re-import mail rules: ${mailRulesError}`); + } + } + return ready(); + }); + }); + }); + } + + _runDatabaseAnalyze() { + const builder = new DatabaseSetupQueryBuilder(); + async.each(builder.analyzeQueries(), (query, callback) => { + this._db.run(query, [], callback); + }, (err) => { + console.log(`Completed ANALYZE of database`, err); + }); + } + + _handleSetupError(err = (new Error(`Manually called _handleSetupError`))) { + NylasEnv.reportError(err, {}, {noWindows: true}); + + // Temporary: export mail rules. They're the only bit of data in the cache + // we can't rebuild. Should be moved to cloud metadata store soon. + this._db.all(`SELECT * FROM JSONBlob WHERE id = 'MailRules-V2' LIMIT 1`, [], (mailsRulesErr, results = []) => { + if (!mailsRulesErr && results.length === 1) { + const exportPath = path.join(NylasEnv.getConfigDirPath(), 'mail-rules-export.json'); + try { + fs.writeFileSync(exportPath, results[0].data); + } catch (writeErr) { + console.log(`Could not write mail rules to file: ${writeErr}`); + } + } + + const app = remote.getGlobal('application'); + app.rebuildDatabase(); + }); + } + + _prettyConsoleLog(qa) { + let q = qa.replace(/%/g, '%%'); + q = `color:black |||%c ${q}`; + q = q.replace(/`(\w+)`/g, "||| color:purple |||%c$&||| color:black |||%c"); + + const colorRules = { + 'color:green': ['SELECT', 'INSERT INTO', 'VALUES', 'WHERE', 'FROM', 'JOIN', 'ORDER BY', 'DESC', 'ASC', 'INNER', 'OUTER', 'LIMIT', 'OFFSET', 'IN'], + 'color:red; background-color:#ffdddd;': ['SCAN TABLE'], + }; + + for (const style of Object.keys(colorRules)) { + for (const keyword of colorRules[style]) { + q = q.replace(new RegExp(`\\b${keyword}\\b`, 'g'), `||| ${style} |||%c${keyword}||| color:black |||%c`); + } + } + + q = q.split('|||'); + const colors = []; + const msg = []; + for (let i = 0; i < q.length; i ++) { + if (i % 2 === 0) { + colors.push(q[i]); + } else { + msg.push(q[i]); + } + } + + console.log(msg.join(''), ...colors); + } + + // Returns a promise that resolves when the query has been completed and + // rejects when the query has failed. + // + // If a query is made while the connection is being setup, the + // DatabaseConnection will queue the queries and fire them after it has + // been setup. The Promise returned here wont resolve until that happens + _query(query, values = []) { + return new Promise((resolve, reject) => { + if (!this._open) { + this._waiting.push(() => this._query(query, values).then(resolve, reject)); + return; + } + + const fn = (query.indexOf(`SELECT `) === 0) ? 'all' : 'run'; + + if (query.indexOf(`SELECT `) === 0) { + if (DEBUG_QUERY_PLANS) { + this._db.all(`EXPLAIN QUERY PLAN ${query}`, values, (err, results = []) => { + const str = `${results.map(row => row.detail).join('\n')} for ${query}`; + if (str.indexOf('ThreadCounts') > 0) { + return; + } + if (str.indexOf('ThreadSearch') > 0) { + return; + } + if ((str.indexOf('SCAN') !== -1) && (str.indexOf('COVERING INDEX') === -1)) { + this._prettyConsoleLog(str); + } + }); + } + } + + // Important: once the user begins a transaction, queries need to run + // in serial. This ensures that the subsequent `COMMIT` call + // actually runs after the other queries in the transaction, and that + // no other code can execute `BEGIN TRANS.` until the previously + // queued BEGIN/COMMIT have been processed. + + // We don't exit serial execution mode until the last pending transaction has + // finished executing. + + if (query.indexOf(`BEGIN`) === 0) { + if (this._inflightTransactions === 0) { + this._db.serialize(); + } + this._inflightTransactions += 1; + } + + this._db[fn](query, values, (err, results) => { + if (err) { + console.error(`DatabaseStore: Query ${query}, ${JSON.stringify(values)} failed ${err.toString()}`); + } + + if (query === COMMIT) { + this._inflightTransactions -= 1; + if (this._inflightTransactions === 0) { + this._db.parallelize(); + } + } + if (err) { + reject(err) + } else { + resolve(results) + } + }); + }); + } + + // PUBLIC METHODS ############################# + + // ActiveRecord-style Querying + + // Public: Creates a new Model Query for retrieving a single model specified by + // the class and id. + // + // - \`class\` The class of the {Model} you're trying to retrieve. + // - \`id\` The {String} id of the {Model} you're trying to retrieve + // + // Example: + // ```coffee + // DatabaseStore.find(Thread, 'id-123').then (thread) -> + // // thread is a Thread object, or null if no match was found. + // ``` + // + // Returns a {Query} + // + find(klass, id) { + if (!klass) { + throw new Error(`DatabaseStore::find - You must provide a class`); + } + if (typeof id !== 'string') { + throw new Error(`DatabaseStore::find - You must provide a string id. You may have intended to use findBy.`); + } + return new Query(klass, this).where({id}).one(); + } + + // Public: Creates a new Model Query for retrieving a single model matching the + // predicates provided. + // + // - \`class\` The class of the {Model} you're trying to retrieve. + // - \`predicates\` An {Array} of {matcher} objects. The set of predicates the + // returned model must match. + // + // Returns a {Query} + // + findBy(klass, predicates = []) { + if (!klass) { + throw new Error(`DatabaseStore::findBy - You must provide a class`); + } + return new Query(klass, this).where(predicates).one(); + } + + // Public: Creates a new Model Query for retrieving all models matching the + // predicates provided. + // + // - \`class\` The class of the {Model} you're trying to retrieve. + // - \`predicates\` An {Array} of {matcher} objects. The set of predicates the + // returned model must match. + // + // Returns a {Query} + // + findAll(klass, predicates = []) { + if (!klass) { + throw new Error(`DatabaseStore::findAll - You must provide a class`); + } + return new Query(klass, this).where(predicates); + } + + // Public: Creates a new Model Query that returns the {Number} of models matching + // the predicates provided. + // + // - \`class\` The class of the {Model} you're trying to retrieve. + // - \`predicates\` An {Array} of {matcher} objects. The set of predicates the + // returned model must match. + // + // Returns a {Query} + // + count(klass, predicates = []) { + if (!klass) { + throw new Error(`DatabaseStore::count - You must provide a class`); + } + return new Query(klass, this).where(predicates).count(); + } + + // Public: Modelify converts the provided array of IDs or models (or a mix of + // IDs and models) into an array of models of the \`klass\` provided by querying for the missing items. + // + // Modelify is efficient and uses a single database query. It resolves Immediately + // if no query is necessary. + // + // - \`class\` The {Model} class desired. + // - 'arr' An {Array} with a mix of string model IDs and/or models. + // + modelify(klass, arr) { + if (!(arr instanceof Array) || (arr.length === 0)) { + return Promise.resolve([]); + } + + const ids = [] + const clientIds = [] + for (const item of arr) { + if (item instanceof klass) { + if (!item.serverId) { + clientIds.push(item.clientId); + } else { + continue; + } + } else if (typeof(item) === 'string') { + if (Utils.isTempId(item)) { + clientIds.push(item); + } else { + ids.push(item); + } + } else { + throw new Error(`modelify: Not sure how to convert ${item} into a ${klass.name}`); + } + } + if ((ids.length === 0) && (clientIds.length === 0)) { + return Promise.resolve(arr); + } + + const queries = { + modelsFromIds: [], + modelsFromClientIds: [], + } + + if (ids.length) { + queries.modelsFromIds = this.findAll(klass).where(klass.attributes.id.in(ids)); + } + if (clientIds.length) { + queries.modelsFromClientIds = this.findAll(klass).where(klass.attributes.clientId.in(clientIds)); + } + + return Promise.props(queries).then(({modelsFromIds, modelsFromClientIds}) => { + const modelsByString = {}; + for (const model of modelsFromIds) { + modelsByString[model.id] = model; + } + for (const model of modelsFromClientIds) { + modelsByString[model.clientId] = model; + } + + return Promise.resolve(arr.map(item => + (item instanceof klass ? item : modelsByString[item])) + ); + }); + } + + // Public: Executes a {Query} on the local database. + // + // - \`modelQuery\` A {Query} to execute. + // + // Returns a {Promise} that + // - resolves with the result of the database query. + // + run(modelQuery, options = {format: true}) { + return this._query(modelQuery.sql(), []).then((result) => { + let transformed = modelQuery.inflateResult(result); + if (options.format !== false) { + transformed = modelQuery.formatResult(transformed) + } + return Promise.resolve(transformed); + }); + } + + findJSONBlob(id) { + JSONBlob = JSONBlob || require('../models/json-blob').default; + return new JSONBlob.Query(JSONBlob, this).where({id}).one(); + } + + // Private: Mutation hooks allow you to observe changes to the database and + // add additional functionality before and after the REPLACE / INSERT queries. + // + // beforeDatabaseChange: Run queries, etc. and return a promise. The DatabaseStore + // will proceed with changes once your promise has finished. You cannot call + // persistModel or unpersistModel from this hook. + // + // afterDatabaseChange: Run queries, etc. after the REPLACE / INSERT queries + // + // Warning: this is very low level. If you just want to watch for changes, You + // should subscribe to the DatabaseStore's trigger events. + // + addMutationHook({beforeDatabaseChange, afterDatabaseChange}) { + if (!beforeDatabaseChange) { + throw new Error(`DatabaseStore:addMutationHook - You must provide a beforeDatabaseChange function`); + } + if (!afterDatabaseChange) { + throw new Error(`DatabaseStore:addMutationHook - You must provide a afterDatabaseChange function`); + } + this._databaseMutationHooks.push({beforeDatabaseChange, afterDatabaseChange}); + } + + removeMutationHook(hook) { + this._databaseMutationHooks = this._databaseMutationHooks.filter(h => h !== hook); + } + + mutationHooks() { + return this._databaseMutationHooks; + } + + + // Public: Opens a new database transaction for writing changes. + // DatabaseStore.inTransacion makes the following guarantees: + // + // - No other calls to \`inTransaction\` will run until the promise has finished. + // + // - No other process will be able to write to sqlite while the provided function + // is running. `BEGIN IMMEDIATE TRANSACTION` semantics are: + // + No other connection will be able to write any changes. + // + Other connections can read from the database, but they will not see + // pending changes. + // + // this.param fn {function} callback that will be executed inside a database transaction + // Returns a {Promise} that resolves when the transaction has successfully + // completed. + inTransaction(fn) { + const t = new DatabaseTransaction(this); + this._transactionQueue = this._transactionQueue || new PromiseQueue(1, Infinity); + return this._transactionQueue.add(() => + t.execute(fn) + ); + } + + // _accumulateAndTrigger is a guarded version of trigger that can accumulate changes. + // This means that even if you're a bad person and call \`persistModel\` 100 times + // from 100 task objects queued at the same time, it will only create one + // \`trigger\` event. This is important since the database triggering impacts + // the entire application. + accumulateAndTrigger(change) { + this._triggerPromise = this._triggerPromise || new Promise((resolve) => { + this._resolve = resolve; + }); + + const flush = () => { + if (!this._changeAccumulated) { + return; + } + if (this._changeFireTimer) { + clearTimeout(this._changeFireTimer); + } + this.trigger(new DatabaseChangeRecord(this._changeAccumulated)); + this._changeAccumulated = null; + this._changeAccumulatedLookup = null; + this._changeFireTimer = null; + if (this._resolve) { + this._resolve(); + } + this._triggerPromise = null; + }; + + const set = (_change) => { + if (this._changeFireTimer) { + clearTimeout(this._changeFireTimer); + } + this._changeAccumulated = _change; + this._changeAccumulatedLookup = {}; + this._changeAccumulated.objects.forEach((obj, idx) => { + this._changeAccumulatedLookup[obj.id] = idx; + }); + this._changeFireTimer = setTimeout(flush, 10); + }; + + const concat = (_change) => { + // When we join new models into our set, replace existing ones so the same + // model cannot exist in the change record set multiple times. + for (const obj of _change.objects) { + const idx = this._changeAccumulatedLookup[obj.id] + if (idx) { + this._changeAccumulated.objects[idx] = obj; + } else { + this._changeAccumulatedLookup[obj.id] = this._changeAccumulated.objects.length + this._changeAccumulated.objects.push(obj); + } + } + }; + + if (!this._changeAccumulated) { + set(change); + } else if ((this._changeAccumulated.objectClass === change.objectClass) && (this._changeAccumulated.type === change.type)) { + concat(change); + } else { + flush(); + set(change); + } + + return this._triggerPromise; + } + + + // Search Index Operations + + createSearchIndexSql(klass) { + if (!klass) { + throw new Error(`DatabaseStore::createSearchIndex - You must provide a class`); + } + if (!klass.searchFields) { + throw new Error(`DatabaseStore::createSearchIndex - ${klass.name} must expose an array of \`searchFields\``); + } + const searchTableName = `${klass.name}Search`; + const searchFields = klass.searchFields; + return ( + `CREATE VIRTUAL TABLE IF NOT EXISTS \`${searchTableName}\` ` + + `USING fts5( + tokenize='porter unicode61', + content_id UNINDEXED, + ${searchFields.join(', ')} + )` + ); + } + + createSearchIndex(klass) { + const sql = this.createSearchIndexSql(klass); + return this._query(sql); + } + + searchIndexSize(klass) { + const searchTableName = `${klass.name}Search`; + const sql = `SELECT COUNT(content_id) as count FROM \`${searchTableName}\``; + return this._query(sql).then((result) => result[0].count); + } + + isIndexEmptyForAccount(accountId, modelKlass) { + const modelTable = modelKlass.name + const searchTable = `${modelTable}Search` + const sql = ( + `SELECT \`${searchTable}\`.\`content_id\` FROM \`${searchTable}\` INNER JOIN \`${modelTable}\` + ON \`${modelTable}\`.id = \`${searchTable}\`.\`content_id\` WHERE \`${modelTable}\`.\`account_id\` = ? + LIMIT 1` + ); + return this._query(sql, [accountId]).then(result => result.length === 0); + } + + dropSearchIndex(klass) { + if (!klass) { + throw new Error(`DatabaseStore::createSearchIndex - You must provide a class`); + } + const searchTableName = `${klass.name}Search` + const sql = `DROP TABLE IF EXISTS \`${searchTableName}\`` + return this._query(sql); + } + + isModelIndexed(model, isIndexed) { + if (isIndexed === true) { + return Promise.resolve(true); + } + const searchTableName = `${model.constructor.name}Search` + const exists = ( + `SELECT rowid FROM \`${searchTableName}\` WHERE \`${searchTableName}\`.\`content_id\` = ?` + ) + return this._query(exists, [model.id]).then((results) => + Promise.resolve(results.length > 0) + ) + } + + indexModel(model, indexData, isModelIndexed) { + const searchTableName = `${model.constructor.name}Search`; + return this.isModelIndexed(model, isModelIndexed).then((isIndexed) => { + if (isIndexed) { + return this.updateModelIndex(model, indexData, isIndexed); + } + + const indexFields = Object.keys(indexData) + const keysSql = `content_id, ${indexFields.join(`, `)}` + const valsSql = `?, ${indexFields.map(() => '?').join(', ')}` + const values = [model.id].concat(indexFields.map(k => indexData[k])) + const sql = ( + `INSERT INTO \`${searchTableName}\`(${keysSql}) VALUES (${valsSql})` + ) + return this._query(sql, values); + }); + } + + updateModelIndex(model, indexData, isModelIndexed) { + const searchTableName = `${model.constructor.name}Search`; + this.isModelIndexed(model, isModelIndexed).then((isIndexed) => { + if (!isIndexed) { + return this.indexModel(model, indexData, isIndexed); + } + + const indexFields = Object.keys(indexData); + const values = indexFields.map(key => indexData[key]).concat([model.id]); + const setSql = ( + indexFields + .map((key) => `\`${key}\` = ?`) + .join(', ') + ); + const sql = ( + `UPDATE \`${searchTableName}\` SET ${setSql} WHERE \`${searchTableName}\`.\`content_id\` = ?` + ); + return this._query(sql, values); + }); + } + + unindexModel(model) { + const searchTableName = `${model.constructor.name}Search`; + const sql = ( + `DELETE FROM \`${searchTableName}\` WHERE \`${searchTableName}\`.\`content_id\` = ?` + ); + return this._query(sql, [model.id]); + } + + unindexModelsForAccount(accountId, modelKlass) { + const modelTable = modelKlass.name; + const searchTableName = `${modelTable}Search`; + const sql = ( + `DELETE FROM \`${searchTableName}\` WHERE \`${searchTableName}\`.\`content_id\` IN + (SELECT \`id\` FROM \`${modelTable}\` WHERE \`${modelTable}\`.\`account_id\` = ?)` + ); + return this._query(sql, [accountId]); + } +} + +export default new DatabaseStore(); diff --git a/src/flux/stores/draft-editing-session.coffee b/src/flux/stores/draft-editing-session.coffee index 59ae7d049..91fbe0181 100644 --- a/src/flux/stores/draft-editing-session.coffee +++ b/src/flux/stores/draft-editing-session.coffee @@ -3,7 +3,7 @@ Actions = require '../actions' NylasAPI = require '../nylas-api' AccountStore = require './account-store' ContactStore = require './contact-store' -DatabaseStore = require './database-store' +DatabaseStore = require('./database-store').default UndoStack = require('../../undo-stack').default DraftHelpers = require '../stores/draft-helpers' ExtensionRegistry = require '../../extension-registry' diff --git a/src/flux/stores/draft-factory.coffee b/src/flux/stores/draft-factory.coffee index a03f0aefc..3a96df974 100644 --- a/src/flux/stores/draft-factory.coffee +++ b/src/flux/stores/draft-factory.coffee @@ -1,7 +1,7 @@ _ = require 'underscore' Actions = require '../actions' -DatabaseStore = require './database-store' +DatabaseStore = require('./database-store').default AccountStore = require './account-store' ContactStore = require './contact-store' MessageStore = require './message-store' diff --git a/src/flux/stores/draft-store.coffee b/src/flux/stores/draft-store.coffee index f9f9bb5db..ce16f5341 100644 --- a/src/flux/stores/draft-store.coffee +++ b/src/flux/stores/draft-store.coffee @@ -6,7 +6,7 @@ NylasAPI = require '../nylas-api' DraftEditingSession = require './draft-editing-session' DraftHelpers = require './draft-helpers' DraftFactory = require './draft-factory' -DatabaseStore = require './database-store' +DatabaseStore = require('./database-store').default AccountStore = require './account-store' TaskQueueStatusStore = require './task-queue-status-store' FocusedContentStore = require './focused-content-store' diff --git a/src/flux/stores/file-upload-store.coffee b/src/flux/stores/file-upload-store.coffee index d078e6205..dbd2d88a3 100644 --- a/src/flux/stores/file-upload-store.coffee +++ b/src/flux/stores/file-upload-store.coffee @@ -7,7 +7,7 @@ Actions = require '../actions' Utils = require '../models/utils' Message = require('../models/message').default DraftStore = require './draft-store' -DatabaseStore = require './database-store' +DatabaseStore = require('./database-store').default Promise.promisifyAll(fs) mkdirpAsync = Promise.promisify(mkdirp) diff --git a/src/flux/stores/focused-contacts-store.coffee b/src/flux/stores/focused-contacts-store.coffee index 5de05f122..359cc9ac9 100644 --- a/src/flux/stores/focused-contacts-store.coffee +++ b/src/flux/stores/focused-contacts-store.coffee @@ -8,7 +8,7 @@ Thread = require('../models/thread').default Contact = require '../models/contact' MessageStore = require './message-store' AccountStore = require './account-store' -DatabaseStore = require './database-store' +DatabaseStore = require('./database-store').default FocusedContentStore = require './focused-content-store' # A store that handles the focuses collections of and individual contacts diff --git a/src/flux/stores/focused-content-store.coffee b/src/flux/stores/focused-content-store.coffee index ef2dc1475..50d74205f 100644 --- a/src/flux/stores/focused-content-store.coffee +++ b/src/flux/stores/focused-content-store.coffee @@ -2,7 +2,7 @@ _ = require 'underscore' Reflux = require 'reflux' AccountStore = require './account-store' WorkspaceStore = require './workspace-store' -DatabaseStore = require './database-store' +DatabaseStore = require('./database-store').default FocusedPerspectiveStore = require './focused-perspective-store' MailboxPerspective = require '../../mailbox-perspective' Actions = require '../actions' diff --git a/src/flux/stores/mail-rules-store.coffee b/src/flux/stores/mail-rules-store.coffee index 860d7abc5..ea06ce7b7 100644 --- a/src/flux/stores/mail-rules-store.coffee +++ b/src/flux/stores/mail-rules-store.coffee @@ -2,7 +2,7 @@ NylasStore = require 'nylas-store' _ = require 'underscore' Rx = require 'rx-lite' AccountStore = require './account-store' -DatabaseStore = require './database-store' +DatabaseStore = require('./database-store').default TaskQueueStatusStore = require './task-queue-status-store' ReprocessMailRulesTask = require('../tasks/reprocess-mail-rules-task').default Utils = require '../models/utils' diff --git a/src/flux/stores/message-store.coffee b/src/flux/stores/message-store.coffee index 50da9194c..18c7e7b78 100644 --- a/src/flux/stores/message-store.coffee +++ b/src/flux/stores/message-store.coffee @@ -3,7 +3,7 @@ Actions = require "../actions" Message = require("../models/message").default Thread = require("../models/thread").default Utils = require '../models/utils' -DatabaseStore = require "./database-store" +DatabaseStore = require("./database-store").default FocusedPerspectiveStore = require './focused-perspective-store' FocusedContentStore = require "./focused-content-store" ChangeUnreadTask = require('../tasks/change-unread-task').default diff --git a/src/flux/stores/nylas-sync-status-store.coffee b/src/flux/stores/nylas-sync-status-store.coffee index 90154f925..b59de12f5 100644 --- a/src/flux/stores/nylas-sync-status-store.coffee +++ b/src/flux/stores/nylas-sync-status-store.coffee @@ -1,7 +1,7 @@ _ = require 'underscore' Rx = require 'rx-lite' AccountStore = require './account-store' -DatabaseStore = require './database-store' +DatabaseStore = require('./database-store').default NylasStore = require 'nylas-store' ModelsForSync = [ diff --git a/src/flux/stores/task-queue-status-store.coffee b/src/flux/stores/task-queue-status-store.coffee index d6e573c0d..b2586f777 100644 --- a/src/flux/stores/task-queue-status-store.coffee +++ b/src/flux/stores/task-queue-status-store.coffee @@ -1,7 +1,7 @@ _ = require 'underscore' Rx = require 'rx-lite' NylasStore = require 'nylas-store' -DatabaseStore = require './database-store' +DatabaseStore = require('./database-store').default AccountStore = require './account-store' TaskQueue = require './task-queue' diff --git a/src/flux/stores/task-queue.coffee b/src/flux/stores/task-queue.coffee index 72a649b90..42b431068 100644 --- a/src/flux/stores/task-queue.coffee +++ b/src/flux/stores/task-queue.coffee @@ -10,7 +10,7 @@ TaskRegistry = require('../../task-registry').default Utils = require "../models/utils" Reflux = require 'reflux' Actions = require '../actions' -DatabaseStore = require './database-store' +DatabaseStore = require('./database-store').default {APIError, TimeoutError} = require '../errors' diff --git a/src/flux/stores/thread-counts-store.coffee b/src/flux/stores/thread-counts-store.coffee index 25b8b1fa2..e311d5378 100644 --- a/src/flux/stores/thread-counts-store.coffee +++ b/src/flux/stores/thread-counts-store.coffee @@ -1,7 +1,7 @@ Reflux = require 'reflux' _ = require 'underscore' NylasStore = require 'nylas-store' -DatabaseStore = require './database-store' +DatabaseStore = require('./database-store').default Thread = require('../models/thread').default ### diff --git a/src/global/nylas-observables.coffee b/src/global/nylas-observables.coffee index 26354b30e..49cfca9ad 100644 --- a/src/global/nylas-observables.coffee +++ b/src/global/nylas-observables.coffee @@ -2,7 +2,7 @@ Rx = require 'rx-lite' _ = require 'underscore' Category = require '../flux/models/category' QuerySubscriptionPool = require('../flux/models/query-subscription-pool').default -DatabaseStore = require '../flux/stores/database-store' +DatabaseStore = require('../flux/stores/database-store').default CategoryOperators = sort: -> diff --git a/src/mail-rules-processor.coffee b/src/mail-rules-processor.coffee index 77562c158..085abf38e 100644 --- a/src/mail-rules-processor.coffee +++ b/src/mail-rules-processor.coffee @@ -6,7 +6,7 @@ Category = require './flux/models/category' Thread = require('./flux/models/thread').default Message = require('./flux/models/message').default AccountStore = require './flux/stores/account-store' -DatabaseStore = require './flux/stores/database-store' +DatabaseStore = require('./flux/stores/database-store').default TaskQueueStatusStore = require './flux/stores/task-queue-status-store' {ConditionMode, ConditionTemplates} = require './mail-rules-templates' diff --git a/src/mailbox-perspective.coffee b/src/mailbox-perspective.coffee index 042927379..cfd78d403 100644 --- a/src/mailbox-perspective.coffee +++ b/src/mailbox-perspective.coffee @@ -4,7 +4,7 @@ Utils = require './flux/models/utils' TaskFactory = require('./flux/tasks/task-factory').default AccountStore = require './flux/stores/account-store' CategoryStore = require './flux/stores/category-store' -DatabaseStore = require './flux/stores/database-store' +DatabaseStore = require('./flux/stores/database-store').default OutboxStore = require('./flux/stores/outbox-store').default ThreadCountsStore = require './flux/stores/thread-counts-store' RecentlyReadStore = require('./flux/stores/recently-read-store').default diff --git a/src/package-manager.coffee b/src/package-manager.coffee index b6e6a8c08..3bf7bfb56 100644 --- a/src/package-manager.coffee +++ b/src/package-manager.coffee @@ -10,7 +10,7 @@ Q = require 'q' Actions = require './flux/actions' Package = require './package' ThemePackage = require './theme-package' -DatabaseStore = require './flux/stores/database-store' +DatabaseStore = require('./flux/stores/database-store').default APMWrapper = require './apm-wrapper' basePackagePaths = null