mirror of
https://github.com/Foundry376/Mailspring.git
synced 2025-09-04 19:54:32 +08:00
feat(work): Create the "Work" window, move TaskQueue, Nylas sync workers
Summary: Move sync workers and Edgehill token checks to work window Move the task queue and database setup to the work window Move ContactStore background refresh to work window Store the task queue in the database WIP The TaskQueue now puts tasks in the database instead of in a file, which also means it can be observed Move all delta sync and initial sync to a package, make NylasSyncStore which exposes read-only sync state DraftStore no longer reads task status. Once you set the "sending" bit on a draft, it never gets unset. But that's fine actually. If your package lists windowTypes, you *only* get loaded in those windowTypes. If you specify no windowTypes, you get loaded in the root window. This means that onboarding, worker-ui, worker-sync, etc. no longer get loaded into the main window ActivitySidebar has a special little store that observes the task queue since it's no longer in the window Move "toggle component regions" / "toggle react remote" to the Developer menu Move sync worker specs, update draft store specs to not rely on TaskQueue at all Test Plan: Run existing tests, all pass Reviewers: dillon, evan Reviewed By: evan Differential Revision: https://phab.nylas.com/D1936
This commit is contained in:
parent
41a3529f16
commit
1a576d92dc
63 changed files with 710 additions and 575 deletions
|
@ -38,6 +38,7 @@ class NylasExports
|
|||
|
||||
# API Endpoints
|
||||
@load "NylasAPI", 'flux/nylas-api'
|
||||
@load "NylasSyncStatusStore", 'flux/stores/nylas-sync-status-store'
|
||||
@load "EdgehillAPI", 'flux/edgehill-api'
|
||||
|
||||
# The Database
|
||||
|
@ -68,8 +69,9 @@ class NylasExports
|
|||
|
||||
# The Task Queue
|
||||
@require "Task", 'flux/tasks/task'
|
||||
@require "TaskQueue", 'flux/stores/task-queue'
|
||||
@require "TaskRegistry", "task-registry"
|
||||
@require "TaskQueue", 'flux/stores/task-queue'
|
||||
@load "TaskQueueStatusStore", 'flux/stores/task-queue-status-store'
|
||||
@require "UndoRedoStore", 'flux/stores/undo-redo-store'
|
||||
|
||||
# Tasks
|
||||
|
|
|
@ -11,6 +11,7 @@
|
|||
"dependencies": {
|
||||
},
|
||||
"windowTypes": {
|
||||
"default": true,
|
||||
"composer": true
|
||||
}
|
||||
}
|
||||
|
|
|
@ -9,6 +9,7 @@
|
|||
"atom": "*"
|
||||
},
|
||||
"windowTypes": {
|
||||
"default": true,
|
||||
"composer": true
|
||||
},
|
||||
"dependencies": {
|
||||
|
|
|
@ -13,6 +13,7 @@
|
|||
"dependencies": {
|
||||
},
|
||||
"windowTypes": {
|
||||
"default": true,
|
||||
"composer": true
|
||||
}
|
||||
}
|
||||
|
|
|
@ -667,7 +667,7 @@ describe "populated composer", ->
|
|||
|
||||
describe "when the DraftStore `isSending` isn't stubbed out", ->
|
||||
beforeEach ->
|
||||
DraftStore._pendingEnqueue = {}
|
||||
DraftStore._draftsSending = {}
|
||||
|
||||
it "doesn't send twice in a popout", ->
|
||||
spyOn(Actions, "queueTask")
|
||||
|
|
|
@ -11,6 +11,7 @@
|
|||
"dependencies": {
|
||||
},
|
||||
"windowTypes": {
|
||||
"default": true,
|
||||
"composer": true
|
||||
}
|
||||
}
|
||||
|
|
|
@ -5,6 +5,8 @@ NotificationStore = require './notifications-store'
|
|||
{Actions,
|
||||
TaskQueue,
|
||||
AccountStore,
|
||||
NylasSyncStatusStore,
|
||||
TaskQueueStatusStore,
|
||||
NylasAPI} = require 'nylas-exports'
|
||||
{TimeoutTransitionGroup} = require 'nylas-component-kit'
|
||||
|
||||
|
@ -21,10 +23,9 @@ class ActivitySidebar extends React.Component
|
|||
|
||||
componentDidMount: =>
|
||||
@_unlisteners = []
|
||||
@_unlisteners.push AccountStore.listen @_onAccountsChanged
|
||||
@_unlisteners.push TaskQueue.listen @_onDataChanged
|
||||
@_unlisteners.push TaskQueueStatusStore.listen @_onDataChanged
|
||||
@_unlisteners.push NylasSyncStatusStore.listen @_onDataChanged
|
||||
@_unlisteners.push NotificationStore.listen @_onDataChanged
|
||||
@_onAccountsChanged()
|
||||
|
||||
componentWillUnmount: =>
|
||||
unlisten() for unlisten in @_unlisteners
|
||||
|
@ -53,12 +54,13 @@ class ActivitySidebar extends React.Component
|
|||
incomplete = 0
|
||||
error = null
|
||||
|
||||
for model, modelState of @state.sync
|
||||
incomplete += 1 unless modelState.complete
|
||||
error ?= modelState.error
|
||||
if modelState.count
|
||||
count += modelState.count / 1
|
||||
fetched += modelState.fetched / 1
|
||||
for acctId, state of @state.sync
|
||||
for model, modelState of state
|
||||
incomplete += 1 unless modelState.complete
|
||||
error ?= modelState.error
|
||||
if modelState.count
|
||||
count += modelState.count / 1
|
||||
fetched += modelState.fetched / 1
|
||||
|
||||
progress = (fetched / count) * 100 if count > 0
|
||||
|
||||
|
@ -102,24 +104,16 @@ class ActivitySidebar extends React.Component
|
|||
</div>
|
||||
</div>
|
||||
|
||||
_onAccountsChanged: =>
|
||||
account = AccountStore.current()
|
||||
return unless account
|
||||
@_worker = NylasAPI.workerForAccount(account)
|
||||
@_workerUnlisten() if @_workerUnlisten
|
||||
@_workerUnlisten = @_worker.listen(@_onDataChanged, @)
|
||||
@_onDataChanged()
|
||||
|
||||
_onTryAgain: =>
|
||||
@_worker.resumeFetches()
|
||||
# TODO
|
||||
|
||||
_onDataChanged: =>
|
||||
@setState(@_getStateFromStores())
|
||||
|
||||
_getStateFromStores: =>
|
||||
tasks: TaskQueue.queue()
|
||||
notifications: NotificationStore.notifications()
|
||||
sync: @_worker?.state()
|
||||
tasks: TaskQueueStatusStore.queue()
|
||||
sync: NylasSyncStatusStore.state()
|
||||
|
||||
|
||||
module.exports = ActivitySidebar
|
||||
|
|
|
@ -9,8 +9,5 @@
|
|||
"atom": "*"
|
||||
},
|
||||
"dependencies": {
|
||||
},
|
||||
"windowTypes": {
|
||||
"preferences": true
|
||||
}
|
||||
}
|
||||
|
|
|
@ -3,8 +3,8 @@ React = require 'react'
|
|||
classNames = require 'classnames'
|
||||
{RetinaImg} = require 'nylas-component-kit'
|
||||
{DatabaseView,
|
||||
AccountStore,
|
||||
NylasAPI,
|
||||
NylasSyncStatusStore,
|
||||
WorkspaceStore} = require 'nylas-exports'
|
||||
|
||||
EmptyMessages = [{
|
||||
|
@ -73,14 +73,13 @@ class EmptyState extends React.Component
|
|||
constructor: (@props) ->
|
||||
@state =
|
||||
layoutMode: WorkspaceStore.layoutMode()
|
||||
syncing: false
|
||||
syncing: NylasSyncStatusStore.busy()
|
||||
active: false
|
||||
|
||||
componentDidMount: ->
|
||||
@_unlisteners = []
|
||||
@_unlisteners.push WorkspaceStore.listen(@_onChange, @)
|
||||
@_unlisteners.push AccountStore.listen(@_onAccountsChanged, @)
|
||||
@_onAccountsChanged()
|
||||
@_unlisteners.push NylasSyncStatusStore.listen(@_onChange, @)
|
||||
|
||||
shouldComponentUpdate: (nextProps, nextState) ->
|
||||
# Avoid deep comparison of dataView, which is a very complex object
|
||||
|
@ -88,16 +87,8 @@ class EmptyState extends React.Component
|
|||
return true if nextProps.dataView isnt @props.dataView
|
||||
return not _.isEqual(nextState, @state)
|
||||
|
||||
_onAccountsChanged: ->
|
||||
account = AccountStore.current()
|
||||
@_worker = NylasAPI.workerForAccount(account)
|
||||
@_workerUnlisten() if @_workerUnlisten
|
||||
@_workerUnlisten = @_worker.listen(@_onChange, @)
|
||||
@setState(syncing: @_worker.busy())
|
||||
|
||||
componentWillUnmount: ->
|
||||
unlisten() for unlisten in @_unlisteners
|
||||
@_workerUnlisten() if @_workerUnlisten
|
||||
|
||||
componentDidUpdate: ->
|
||||
if @props.visible and not @state.active
|
||||
|
@ -129,7 +120,6 @@ class EmptyState extends React.Component
|
|||
_onChange: ->
|
||||
@setState
|
||||
layoutMode: WorkspaceStore.layoutMode()
|
||||
syncing: @_worker.busy()
|
||||
|
||||
syncing: NylasSyncStatusStore.busy()
|
||||
|
||||
module.exports = EmptyState
|
||||
|
|
|
@ -12,6 +12,7 @@ NylasStore = require 'nylas-store'
|
|||
ChangeStarredTask,
|
||||
FocusedContentStore,
|
||||
ArchiveThreadHelper,
|
||||
TaskQueueStatusStore,
|
||||
FocusedCategoryStore} = require 'nylas-exports'
|
||||
|
||||
# Public: A mutable text container with undo/redo support and the ability
|
||||
|
@ -177,7 +178,7 @@ class ThreadListStore extends NylasStore
|
|||
focusedId = FocusedContentStore.focusedId('thread')
|
||||
keyboardId = FocusedContentStore.keyboardCursorId('thread')
|
||||
|
||||
task.waitForPerformLocal().then =>
|
||||
TaskQueueStatusStore.waitForPerformLocal(task).then =>
|
||||
if focusedId in selectedThreadIds
|
||||
Actions.setFocus(collection: 'thread', item: null)
|
||||
if keyboardId in selectedThreadIds
|
||||
|
@ -222,7 +223,7 @@ class ThreadListStore extends NylasStore
|
|||
nextFocus = null
|
||||
|
||||
# Archive the current thread
|
||||
task.waitForPerformLocal().then ->
|
||||
TaskQueueStatusStore.waitForPerformLocal(task).then =>
|
||||
Actions.setFocus(collection: 'thread', item: nextFocus)
|
||||
Actions.setCursorPosition(collection: 'thread', item: nextKeyboard)
|
||||
Actions.queueTask(task)
|
||||
|
|
6
internal_packages/worker-sync/lib/main.coffee
Normal file
6
internal_packages/worker-sync/lib/main.coffee
Normal file
|
@ -0,0 +1,6 @@
|
|||
NylasSyncWorkerPool = require './nylas-sync-worker-pool'
|
||||
|
||||
module.exports =
|
||||
activate: ->
|
||||
|
||||
deactivate: ->
|
125
internal_packages/worker-sync/lib/nylas-sync-worker-pool.coffee
Normal file
125
internal_packages/worker-sync/lib/nylas-sync-worker-pool.coffee
Normal file
|
@ -0,0 +1,125 @@
|
|||
_ = require 'underscore'
|
||||
|
||||
{NylasAPI,
|
||||
Actions,
|
||||
AccountStore,
|
||||
DatabaseStore,
|
||||
DatabaseObjectRegistry} = require 'nylas-exports'
|
||||
|
||||
NylasLongConnection = require './nylas-long-connection'
|
||||
NylasSyncWorker = require './nylas-sync-worker'
|
||||
|
||||
|
||||
class NylasSyncWorkerPool
|
||||
|
||||
constructor: ->
|
||||
@_workers = []
|
||||
AccountStore.listen(@_onAccountsChanged, @)
|
||||
@_onAccountsChanged()
|
||||
|
||||
_onAccountsChanged: ->
|
||||
return if atom.inSpecMode()
|
||||
|
||||
accounts = AccountStore.items()
|
||||
workers = _.map(accounts, @workerForAccount)
|
||||
|
||||
# Stop the workers that are not in the new workers list.
|
||||
# These accounts are no longer in our database, so we shouldn't
|
||||
# be listening.
|
||||
old = _.without(@_workers, workers...)
|
||||
worker.cleanup() for worker in old
|
||||
|
||||
@_workers = workers
|
||||
|
||||
workers: =>
|
||||
@_workers
|
||||
|
||||
workerForAccount: (account) =>
|
||||
worker = _.find @_workers, (c) -> c.account().id is account.id
|
||||
return worker if worker
|
||||
|
||||
worker = new NylasSyncWorker(NylasAPI, account)
|
||||
connection = worker.connection()
|
||||
|
||||
connection.onStateChange (state) ->
|
||||
Actions.longPollStateChanged({accountId: account.id, state: state})
|
||||
if state == NylasLongConnection.State.Connected
|
||||
## TODO use OfflineStatusStore
|
||||
Actions.longPollConnected()
|
||||
else
|
||||
## TODO use OfflineStatusStore
|
||||
Actions.longPollOffline()
|
||||
|
||||
connection.onDeltas (deltas) =>
|
||||
@_handleDeltas(deltas)
|
||||
|
||||
@_workers.push(worker)
|
||||
worker.start()
|
||||
worker
|
||||
|
||||
_cleanupAccountWorkers: ->
|
||||
for worker in @_workers
|
||||
worker.cleanup()
|
||||
@_workers = []
|
||||
|
||||
_handleDeltas: (deltas) ->
|
||||
Actions.longPollReceivedRawDeltas(deltas)
|
||||
|
||||
# Create a (non-enumerable) reference from the attributes which we carry forward
|
||||
# back to their original deltas. This allows us to mark the deltas that the
|
||||
# app ignores later in the process.
|
||||
deltas.forEach (delta) ->
|
||||
if delta.attributes
|
||||
Object.defineProperty(delta.attributes, '_delta', { get: -> delta })
|
||||
|
||||
{create, modify, destroy} = @_clusterDeltas(deltas)
|
||||
|
||||
# Apply all the deltas to create objects. Gets promises for handling
|
||||
# each type of model in the `create` hash, waits for them all to resolve.
|
||||
create[type] = NylasAPI._handleModelResponse(_.values(dict)) for type, dict of create
|
||||
Promise.props(create).then (created) =>
|
||||
# Apply all the deltas to modify objects. Gets promises for handling
|
||||
# each type of model in the `modify` hash, waits for them all to resolve.
|
||||
modify[type] = NylasAPI._handleModelResponse(_.values(dict)) for type, dict of modify
|
||||
Promise.props(modify).then (modified) =>
|
||||
|
||||
# Now that we've persisted creates/updates, fire an action
|
||||
# that allows other parts of the app to update based on new models
|
||||
# (notifications)
|
||||
if _.flatten(_.values(created)).length > 0
|
||||
Actions.didPassivelyReceiveNewModels(created)
|
||||
|
||||
# Apply all of the deletions
|
||||
destroyPromises = destroy.map(@_handleDeltaDeletion)
|
||||
Promise.settle(destroyPromises).then =>
|
||||
Actions.longPollProcessedDeltas()
|
||||
|
||||
_clusterDeltas: (deltas) ->
|
||||
# Group deltas by object type so we can mutate the cache efficiently.
|
||||
# NOTE: This code must not just accumulate creates, modifies and destroys
|
||||
# but also de-dupe them. We cannot call "persistModels(itemA, itemA, itemB)"
|
||||
# or it will throw an exception - use the last received copy of each model
|
||||
# we see.
|
||||
create = {}
|
||||
modify = {}
|
||||
destroy = []
|
||||
for delta in deltas
|
||||
if delta.event is 'create'
|
||||
create[delta.object] ||= {}
|
||||
create[delta.object][delta.attributes.id] = delta.attributes
|
||||
else if delta.event is 'modify'
|
||||
modify[delta.object] ||= {}
|
||||
modify[delta.object][delta.attributes.id] = delta.attributes
|
||||
else if delta.event is 'delete'
|
||||
destroy.push(delta)
|
||||
|
||||
{create, modify, destroy}
|
||||
|
||||
_handleDeltaDeletion: (delta) =>
|
||||
klass = NylasAPI._apiObjectToClassMap[delta.object]
|
||||
return unless klass
|
||||
DatabaseStore.find(klass, delta.id).then (model) ->
|
||||
return Promise.resolve() unless model
|
||||
return DatabaseStore.unpersistModel(model)
|
||||
|
||||
module.exports = new NylasSyncWorkerPool()
|
|
@ -1,8 +1,6 @@
|
|||
_ = require 'underscore'
|
||||
{DatabaseStore} = require 'nylas-exports'
|
||||
NylasLongConnection = require './nylas-long-connection'
|
||||
DatabaseStore = require './stores/database-store'
|
||||
{Publisher} = require './modules/reflux-coffee'
|
||||
CoffeeHelpers = require './coffee-helpers'
|
||||
|
||||
PAGE_SIZE = 250
|
||||
|
||||
|
@ -37,9 +35,6 @@ class BackoffTimer
|
|||
module.exports =
|
||||
class NylasSyncWorker
|
||||
|
||||
@include: CoffeeHelpers.includeModule
|
||||
@include Publisher
|
||||
|
||||
constructor: (api, account) ->
|
||||
@_api = api
|
||||
@_account = account
|
||||
|
@ -164,6 +159,5 @@ class NylasSyncWorker
|
|||
DatabaseStore.persistJSONObject("NylasSyncWorker:#{@_account.id}", @_state)
|
||||
,100
|
||||
@_writeState()
|
||||
@trigger()
|
||||
|
||||
NylasSyncWorker.BackoffTimer = BackoffTimer
|
16
internal_packages/worker-sync/package.json
Executable file
16
internal_packages/worker-sync/package.json
Executable file
|
@ -0,0 +1,16 @@
|
|||
{
|
||||
"name": "worker-sync",
|
||||
"version": "0.1.0",
|
||||
"main": "./lib/main",
|
||||
"description": "Mail sync classes for the worker window",
|
||||
"license": "Proprietary",
|
||||
"private": true,
|
||||
"engines": {
|
||||
"atom": "*"
|
||||
},
|
||||
"dependencies": {
|
||||
},
|
||||
"windowTypes": {
|
||||
"work": true
|
||||
}
|
||||
}
|
|
@ -0,0 +1,140 @@
|
|||
_ = require 'underscore'
|
||||
fs = require 'fs'
|
||||
path = require 'path'
|
||||
{NylasAPI, Thread, DatabaseStore, Actions} = require 'nylas-exports'
|
||||
NylasSyncWorkerPool = require '../lib/nylas-sync-worker-pool'
|
||||
|
||||
fixturesPath = path.resolve(__dirname, 'fixtures')
|
||||
|
||||
describe "NylasSyncWorkerPool", ->
|
||||
|
||||
describe "handleDeltas", ->
|
||||
beforeEach ->
|
||||
@sampleDeltas = JSON.parse(fs.readFileSync("#{fixturesPath}/sample.json"))
|
||||
@sampleClustered = JSON.parse(fs.readFileSync("#{fixturesPath}/sample-clustered.json"))
|
||||
|
||||
it "should immediately fire the received raw deltas event", ->
|
||||
spyOn(Actions, 'longPollReceivedRawDeltas')
|
||||
spyOn(NylasSyncWorkerPool, '_clusterDeltas').andReturn({create: {}, modify: {}, destroy: []})
|
||||
NylasSyncWorkerPool._handleDeltas(@sampleDeltas)
|
||||
expect(Actions.longPollReceivedRawDeltas).toHaveBeenCalled()
|
||||
|
||||
it "should call helper methods for all creates first, then modifications, then destroys", ->
|
||||
spyOn(Actions, 'longPollProcessedDeltas')
|
||||
|
||||
handleDeltaDeletionPromises = []
|
||||
resolveDeltaDeletionPromises = ->
|
||||
fn() for fn in handleDeltaDeletionPromises
|
||||
handleDeltaDeletionPromises = []
|
||||
|
||||
spyOn(NylasSyncWorkerPool, '_handleDeltaDeletion').andCallFake ->
|
||||
new Promise (resolve, reject) ->
|
||||
handleDeltaDeletionPromises.push(resolve)
|
||||
|
||||
handleModelResponsePromises = []
|
||||
resolveModelResponsePromises = ->
|
||||
fn() for fn in handleModelResponsePromises
|
||||
handleModelResponsePromises = []
|
||||
|
||||
spyOn(NylasAPI, '_handleModelResponse').andCallFake ->
|
||||
new Promise (resolve, reject) ->
|
||||
handleModelResponsePromises.push(resolve)
|
||||
|
||||
spyOn(NylasSyncWorkerPool, '_clusterDeltas').andReturn(JSON.parse(JSON.stringify(@sampleClustered)))
|
||||
NylasSyncWorkerPool._handleDeltas(@sampleDeltas)
|
||||
|
||||
createTypes = Object.keys(@sampleClustered['create'])
|
||||
expect(NylasAPI._handleModelResponse.calls.length).toEqual(createTypes.length)
|
||||
expect(NylasAPI._handleModelResponse.calls[0].args[0]).toEqual(_.values(@sampleClustered['create'][createTypes[0]]))
|
||||
expect(NylasSyncWorkerPool._handleDeltaDeletion.calls.length).toEqual(0)
|
||||
|
||||
NylasAPI._handleModelResponse.reset()
|
||||
resolveModelResponsePromises()
|
||||
advanceClock()
|
||||
|
||||
modifyTypes = Object.keys(@sampleClustered['modify'])
|
||||
expect(NylasAPI._handleModelResponse.calls.length).toEqual(modifyTypes.length)
|
||||
expect(NylasAPI._handleModelResponse.calls[0].args[0]).toEqual(_.values(@sampleClustered['modify'][modifyTypes[0]]))
|
||||
expect(NylasSyncWorkerPool._handleDeltaDeletion.calls.length).toEqual(0)
|
||||
|
||||
NylasAPI._handleModelResponse.reset()
|
||||
resolveModelResponsePromises()
|
||||
advanceClock()
|
||||
|
||||
destroyCount = @sampleClustered['destroy'].length
|
||||
expect(NylasSyncWorkerPool._handleDeltaDeletion.calls.length).toEqual(destroyCount)
|
||||
expect(NylasSyncWorkerPool._handleDeltaDeletion.calls[0].args[0]).toEqual(@sampleClustered['destroy'][0])
|
||||
|
||||
expect(Actions.longPollProcessedDeltas).not.toHaveBeenCalled()
|
||||
|
||||
resolveDeltaDeletionPromises()
|
||||
advanceClock()
|
||||
|
||||
expect(Actions.longPollProcessedDeltas).toHaveBeenCalled()
|
||||
|
||||
describe "clusterDeltas", ->
|
||||
beforeEach ->
|
||||
@sampleDeltas = JSON.parse(fs.readFileSync("#{fixturesPath}/sample.json"))
|
||||
@expectedClustered = JSON.parse(fs.readFileSync("#{fixturesPath}/sample-clustered.json"))
|
||||
|
||||
it "should collect create/modify events into a hash by model type", ->
|
||||
{create, modify} = NylasSyncWorkerPool._clusterDeltas(@sampleDeltas)
|
||||
expect(create).toEqual(@expectedClustered.create)
|
||||
expect(modify).toEqual(@expectedClustered.modify)
|
||||
|
||||
it "should collect destroys into an array", ->
|
||||
{destroy} = NylasSyncWorkerPool._clusterDeltas(@sampleDeltas)
|
||||
expect(destroy).toEqual(@expectedClustered.destroy)
|
||||
|
||||
describe "handleDeltaDeletion", ->
|
||||
beforeEach ->
|
||||
@thread = new Thread(id: 'idhere')
|
||||
@delta =
|
||||
"cursor": "bb95ddzqtr2gpmvgrng73t6ih",
|
||||
"object": "thread",
|
||||
"event": "delete",
|
||||
"id": @thread.id,
|
||||
"timestamp": "2015-08-26T17:36:45.297Z"
|
||||
|
||||
it "should resolve if the object cannot be found", ->
|
||||
spyOn(DatabaseStore, 'find').andCallFake (klass, id) =>
|
||||
return Promise.resolve(null)
|
||||
spyOn(DatabaseStore, 'unpersistModel')
|
||||
waitsForPromise =>
|
||||
NylasSyncWorkerPool._handleDeltaDeletion(@delta)
|
||||
runs =>
|
||||
expect(DatabaseStore.find).toHaveBeenCalledWith(Thread, 'idhere')
|
||||
expect(DatabaseStore.unpersistModel).not.toHaveBeenCalled()
|
||||
|
||||
it "should call unpersistModel if the object exists", ->
|
||||
spyOn(DatabaseStore, 'find').andCallFake (klass, id) =>
|
||||
return Promise.resolve(@thread)
|
||||
spyOn(DatabaseStore, 'unpersistModel')
|
||||
waitsForPromise =>
|
||||
NylasSyncWorkerPool._handleDeltaDeletion(@delta)
|
||||
runs =>
|
||||
expect(DatabaseStore.find).toHaveBeenCalledWith(Thread, 'idhere')
|
||||
expect(DatabaseStore.unpersistModel).toHaveBeenCalledWith(@thread)
|
||||
|
||||
# These specs are on hold because this function is changing very soon
|
||||
|
||||
xdescribe "handleModelResponse", ->
|
||||
it "should reject if no JSON is provided", ->
|
||||
it "should resolve if an empty JSON array is provided", ->
|
||||
|
||||
describe "if JSON contains the same object more than once", ->
|
||||
it "should warn", ->
|
||||
it "should omit duplicates", ->
|
||||
|
||||
describe "if JSON contains objects which are of unknown types", ->
|
||||
it "should warn and resolve", ->
|
||||
|
||||
describe "when the object type is `thread`", ->
|
||||
it "should check that models are acceptable", ->
|
||||
|
||||
describe "when the object type is `draft`", ->
|
||||
it "should check that models are acceptable", ->
|
||||
|
||||
it "should call persistModels to save all of the received objects", ->
|
||||
|
||||
it "should resolve with the objects", ->
|
|
@ -1,9 +1,7 @@
|
|||
_ = require 'underscore'
|
||||
DatabaseStore = require '../src/flux/stores/database-store'
|
||||
NylasLongConnection = require '../src/flux/nylas-long-connection'
|
||||
NylasSyncWorker = require '../src/flux/nylas-sync-worker'
|
||||
Account = require '../src/flux/models/account'
|
||||
Thread = require '../src/flux/models/thread'
|
||||
{DatabaseStore, Account, Thread} = require 'nylas-exports'
|
||||
NylasLongConnection = require '../lib/nylas-long-connection'
|
||||
NylasSyncWorker = require '../lib/nylas-sync-worker'
|
||||
|
||||
describe "NylasSyncWorker", ->
|
||||
beforeEach ->
|
|
@ -6,14 +6,12 @@ React = require 'react/addons'
|
|||
Actions,
|
||||
Contact,
|
||||
Message} = require 'nylas-exports'
|
||||
{ResizableRegion} = require 'nylas-component-kit'
|
||||
|
||||
DeveloperBarStore = require './developer-bar-store'
|
||||
DeveloperBarTask = require './developer-bar-task'
|
||||
DeveloperBarCurlItem = require './developer-bar-curl-item'
|
||||
DeveloperBarLongPollItem = require './developer-bar-long-poll-item'
|
||||
|
||||
DeveloperBarClosedHeight = 30
|
||||
|
||||
class DeveloperBar extends React.Component
|
||||
@displayName: "DeveloperBar"
|
||||
|
@ -22,7 +20,6 @@ class DeveloperBar extends React.Component
|
|||
|
||||
constructor: (@props) ->
|
||||
@state = _.extend @_getStateFromStores(),
|
||||
height: DeveloperBarClosedHeight
|
||||
section: 'curl'
|
||||
filter: ''
|
||||
|
||||
|
@ -35,17 +32,8 @@ class DeveloperBar extends React.Component
|
|||
@activityStoreUnsubscribe() if @activityStoreUnsubscribe
|
||||
|
||||
render: =>
|
||||
# TODO WARNING: This 1px height is necessary to fix a redraw issue in the thread
|
||||
# list in Chrome 42 (Electron 0.26.0). Do not remove unless you've verified that
|
||||
# scrolling works fine now and repaints aren't visible.
|
||||
return <div style={height:1}></div> unless @state.visible
|
||||
|
||||
<ResizableRegion className="developer-bar"
|
||||
initialHeight={@state.height}
|
||||
minHeight={DeveloperBarClosedHeight}
|
||||
handle={ResizableRegion.Handle.Top}>
|
||||
<div className="developer-bar">
|
||||
<div className="controls">
|
||||
{@_caret()}
|
||||
<div className="btn-container pull-left">
|
||||
<div className="btn" onClick={ => @_onExpandSection('queue')}>
|
||||
<span>Queue Length: {@state.queue?.length}</span>
|
||||
|
@ -67,25 +55,13 @@ class DeveloperBar extends React.Component
|
|||
<div className="btn-container pull-right">
|
||||
<div className="btn" onClick={Actions.sendFeedback}>Feedback</div>
|
||||
</div>
|
||||
<div className="btn-container pull-right">
|
||||
<div className="btn" onClick={@_onToggleRegions}>Component Regions</div>
|
||||
</div>
|
||||
<div className="btn-container pull-right">
|
||||
<div className="btn" onClick={@_onToggleReactRemoteContainer}>React Remote Container</div>
|
||||
</div>
|
||||
</div>
|
||||
{@_sectionContent()}
|
||||
<div className="footer">
|
||||
<div className="btn" onClick={@_onClear}>Clear</div>
|
||||
<input className="filter" placeholder="Filter..." value={@state.filter} onChange={@_onFilter} />
|
||||
</div>
|
||||
</ResizableRegion>
|
||||
|
||||
_caret: =>
|
||||
if @state.height > DeveloperBarClosedHeight
|
||||
<i className="fa fa-caret-square-o-down" onClick={@_onHide}></i>
|
||||
else
|
||||
<i className="fa fa-caret-square-o-up" onClick={@_onShow}></i>
|
||||
</div>
|
||||
|
||||
_sectionContent: =>
|
||||
expandedDiv = <div></div>
|
||||
|
@ -133,11 +109,7 @@ class DeveloperBar extends React.Component
|
|||
expandedDiv
|
||||
|
||||
_onChange: =>
|
||||
# The developer bar is hidden almost all the time. Rather than render when
|
||||
# API requests come in, etc., just ignore changes from our store and retrieve
|
||||
# state when we open.
|
||||
if @state.visible and @state.height > DeveloperBarClosedHeight
|
||||
@setState(@_getStateFromStores())
|
||||
@setState(@_getStateFromStores())
|
||||
|
||||
_onClear: =>
|
||||
Actions.clearDeveloperConsole()
|
||||
|
@ -148,28 +120,11 @@ class DeveloperBar extends React.Component
|
|||
_onDequeueAll: =>
|
||||
Actions.dequeueAllTasks()
|
||||
|
||||
_onHide: =>
|
||||
@setState
|
||||
height: DeveloperBarClosedHeight
|
||||
|
||||
_onShow: =>
|
||||
@setState(@_getStateFromStores())
|
||||
@setState(height: 200) if @state.height < 100
|
||||
|
||||
_onExpandSection: (section) =>
|
||||
@setState(@_getStateFromStores())
|
||||
@setState(section: section)
|
||||
@_onShow()
|
||||
|
||||
_onToggleRegions: =>
|
||||
Actions.toggleComponentRegions()
|
||||
|
||||
_onToggleReactRemoteContainer: =>
|
||||
{ReactRemote} = require('nylas-exports')
|
||||
ReactRemote.toggleContainerVisible()
|
||||
|
||||
_getStateFromStores: =>
|
||||
visible: DeveloperBarStore.visible()
|
||||
queue: TaskQueue._queue
|
||||
completed: TaskQueue._completed
|
||||
curlHistory: DeveloperBarStore.curlHistory()
|
|
@ -6,8 +6,11 @@ module.exports =
|
|||
item: null
|
||||
|
||||
activate: (@state={}) ->
|
||||
WorkspaceStore.defineSheet 'Main', {root: true},
|
||||
popout: ['Center']
|
||||
|
||||
ComponentRegistry.register DeveloperBar,
|
||||
location: WorkspaceStore.Sheet.Global.Footer
|
||||
location: WorkspaceStore.Location.Center
|
||||
|
||||
deactivate: ->
|
||||
ComponentRegistry.unregister DeveloperBar
|
|
@ -1,13 +1,16 @@
|
|||
{
|
||||
"name": "developer-bar",
|
||||
"name": "worker-ui",
|
||||
"version": "0.1.0",
|
||||
"main": "./lib/main",
|
||||
"description": "Developer bar at the very bottom of the window",
|
||||
"description": "Interface for the worker window",
|
||||
"license": "Proprietary",
|
||||
"private": true,
|
||||
"engines": {
|
||||
"atom": "*"
|
||||
},
|
||||
"dependencies": {
|
||||
},
|
||||
"windowTypes": {
|
||||
"work": true
|
||||
}
|
||||
}
|
|
@ -6,24 +6,19 @@
|
|||
border-top:1px solid rgba(0,0,0,0.7);
|
||||
color:white;
|
||||
font-size:12px;
|
||||
order:1000;
|
||||
display:flex;
|
||||
flex-direction:column;
|
||||
|
||||
.resizable {
|
||||
display: flex;
|
||||
width:100%;
|
||||
flex-direction: column;
|
||||
.resizeBar {
|
||||
z-index:10;
|
||||
}
|
||||
}
|
||||
height:100%;
|
||||
|
||||
.controls {
|
||||
z-index:2;
|
||||
background-color: rgba(80,80,80,1);
|
||||
position: relative;
|
||||
min-height:30px;
|
||||
-webkit-app-region: drag;
|
||||
.btn-container {
|
||||
-webkit-app-region: no-drag;
|
||||
}
|
||||
}
|
||||
|
||||
.footer {
|
|
@ -6,6 +6,7 @@
|
|||
'cmd-q': 'application:quit'
|
||||
'cmd-h': 'application:hide'
|
||||
'cmd-1': 'application:show-main-window'
|
||||
'cmd-2': 'application:show-work-window'
|
||||
'cmd-m': 'application:minimize'
|
||||
'cmd-alt-h': 'application:hide-other-applications'
|
||||
'alt-cmd-ctrl-m': 'application:zoom'
|
||||
|
|
|
@ -55,7 +55,8 @@
|
|||
{
|
||||
label: 'Developer'
|
||||
submenu: [
|
||||
{ label: 'Toggle Hacker Theme', command: 'application:toggle-theme' }
|
||||
{ label: 'Toggle Component Regions', command: 'window:toggle-component-regions' }
|
||||
{ label: 'Toggle React Remote', command: 'window:toggle-react-remote' }
|
||||
{ type: 'separator' }
|
||||
{ label: 'Relaunch with Debug Flags...', command: 'application:open-dev' }
|
||||
{ type: 'separator' }
|
||||
|
@ -75,6 +76,7 @@
|
|||
{ label: 'Zoom', command: 'application:zoom' }
|
||||
{ type: 'separator' }
|
||||
{ label: 'Message Viewer', command: 'application:show-main-window' }
|
||||
{ label: 'Activity', command: 'application:show-work-window' }
|
||||
{ type: 'separator' }
|
||||
{ label: 'Bring All to Front', command: 'application:bring-all-windows-to-front' }
|
||||
]
|
||||
|
|
|
@ -36,7 +36,8 @@
|
|||
{
|
||||
label: 'Developer'
|
||||
submenu: [
|
||||
{ label: 'Toggle Hacker Theme', command: 'application:toggle-theme' }
|
||||
{ label: 'Toggle Component Regions', command: 'window:toggle-component-regions' }
|
||||
{ label: 'Toggle React Remote', command: 'window:toggle-react-remote' }
|
||||
{ type: 'separator' }
|
||||
{ label: 'Relaunch with &Debug Flags...', command: 'application:open-dev' }
|
||||
{ type: 'separator' }
|
||||
|
|
|
@ -38,7 +38,8 @@
|
|||
{
|
||||
label: 'Developer'
|
||||
submenu: [
|
||||
{ label: 'Toggle Hacker Theme', command: 'application:toggle-theme' }
|
||||
{ label: 'Toggle Component Regions', command: 'window:toggle-component-regions' }
|
||||
{ label: 'Toggle React Remote', command: 'window:toggle-react-remote' }
|
||||
{ type: 'separator' }
|
||||
{ label: 'Relaunch with &Debug Flags...', command: 'application:open-dev' }
|
||||
{ type: 'separator' }
|
||||
|
|
|
@ -12,14 +12,14 @@ ipc =
|
|||
|
||||
describe "ActionBridge", ->
|
||||
|
||||
describe "in the editor window", ->
|
||||
describe "in the work window", ->
|
||||
beforeEach ->
|
||||
spyOn(atom, "getWindowType").andReturn "default"
|
||||
spyOn(atom, "isMainWindow").andReturn true
|
||||
spyOn(atom, "isWorkWindow").andReturn true
|
||||
@bridge = new ActionBridge(ipc)
|
||||
|
||||
it "should have the role Role.ROOT", ->
|
||||
expect(@bridge.role).toBe(ActionBridge.Role.ROOT)
|
||||
it "should have the role Role.WORK", ->
|
||||
expect(@bridge.role).toBe(ActionBridge.Role.WORK)
|
||||
|
||||
it "should rebroadcast global actions", ->
|
||||
spyOn(@bridge, 'onRebroadcast')
|
||||
|
@ -44,10 +44,10 @@ describe "ActionBridge", ->
|
|||
testAction('bla')
|
||||
expect(@bridge.onRebroadcast).not.toHaveBeenCalled()
|
||||
|
||||
describe "in a secondary window", ->
|
||||
describe "in another window", ->
|
||||
beforeEach ->
|
||||
spyOn(atom, "getWindowType").andReturn "popout"
|
||||
spyOn(atom, "isMainWindow").andReturn false
|
||||
spyOn(atom, "isWorkWindow").andReturn false
|
||||
@bridge = new ActionBridge(ipc)
|
||||
@message = new Message
|
||||
id: 'test-id'
|
||||
|
@ -87,12 +87,12 @@ describe "ActionBridge", ->
|
|||
@bridge.onRebroadcast(ActionBridge.TargetWindows.ALL, 'didSwapModel', [{oldModel: '1', newModel: 2}])
|
||||
expect(ipc.send).toHaveBeenCalledWith('action-bridge-rebroadcast-to-all', 'popout', 'didSwapModel', '[{"oldModel":"1","newModel":2}]')
|
||||
|
||||
describe "when called with TargetWindows.MAIN", ->
|
||||
describe "when called with TargetWindows.WORK", ->
|
||||
it "should broadcast the action over IPC to the main window only", ->
|
||||
spyOn(ipc, 'send')
|
||||
Actions.didSwapModel.firing = false
|
||||
@bridge.onRebroadcast(ActionBridge.TargetWindows.MAIN, 'didSwapModel', [{oldModel: '1', newModel: 2}])
|
||||
expect(ipc.send).toHaveBeenCalledWith('action-bridge-rebroadcast-to-main', 'popout', 'didSwapModel', '[{"oldModel":"1","newModel":2}]')
|
||||
@bridge.onRebroadcast(ActionBridge.TargetWindows.WORK, 'didSwapModel', [{oldModel: '1', newModel: 2}])
|
||||
expect(ipc.send).toHaveBeenCalledWith('action-bridge-rebroadcast-to-work', 'popout', 'didSwapModel', '[{"oldModel":"1","newModel":2}]')
|
||||
|
||||
it "should not do anything if the current invocation of the Action was triggered by itself", ->
|
||||
spyOn(ipc, 'send')
|
||||
|
|
|
@ -51,113 +51,6 @@ describe "NylasAPI", ->
|
|||
expect(Actions.postNotification).toHaveBeenCalled()
|
||||
expect(Actions.postNotification.mostRecentCall.args[0].message).toEqual("Nylas can no longer authenticate with your mail provider. You will not be able to send or receive mail. Please log out and sign in again.")
|
||||
|
||||
describe "handleDeltas", ->
|
||||
beforeEach ->
|
||||
@sampleDeltas = JSON.parse(fs.readFileSync('./spec-nylas/fixtures/delta-sync/sample.json'))
|
||||
@sampleClustered = JSON.parse(fs.readFileSync('./spec-nylas/fixtures/delta-sync/sample-clustered.json'))
|
||||
|
||||
it "should immediately fire the received raw deltas event", ->
|
||||
spyOn(Actions, 'longPollReceivedRawDeltas')
|
||||
spyOn(NylasAPI, '_clusterDeltas').andReturn({create: {}, modify: {}, destroy: []})
|
||||
NylasAPI._handleDeltas(@sampleDeltas)
|
||||
expect(Actions.longPollReceivedRawDeltas).toHaveBeenCalled()
|
||||
|
||||
it "should call helper methods for all creates first, then modifications, then destroys", ->
|
||||
spyOn(Actions, 'longPollProcessedDeltas')
|
||||
|
||||
handleDeltaDeletionPromises = []
|
||||
resolveDeltaDeletionPromises = ->
|
||||
fn() for fn in handleDeltaDeletionPromises
|
||||
handleDeltaDeletionPromises = []
|
||||
|
||||
spyOn(NylasAPI, '_handleDeltaDeletion').andCallFake ->
|
||||
new Promise (resolve, reject) ->
|
||||
handleDeltaDeletionPromises.push(resolve)
|
||||
|
||||
handleModelResponsePromises = []
|
||||
resolveModelResponsePromises = ->
|
||||
fn() for fn in handleModelResponsePromises
|
||||
handleModelResponsePromises = []
|
||||
|
||||
spyOn(NylasAPI, '_handleModelResponse').andCallFake ->
|
||||
new Promise (resolve, reject) ->
|
||||
handleModelResponsePromises.push(resolve)
|
||||
|
||||
NylasAPI._handleDeltas(@sampleDeltas)
|
||||
|
||||
createTypes = Object.keys(@sampleClustered['create'])
|
||||
expect(NylasAPI._handleModelResponse.calls.length).toEqual(createTypes.length)
|
||||
expect(NylasAPI._handleModelResponse.calls[0].args[0]).toEqual(_.values(@sampleClustered['create'][createTypes[0]]))
|
||||
expect(NylasAPI._handleDeltaDeletion.calls.length).toEqual(0)
|
||||
|
||||
NylasAPI._handleModelResponse.reset()
|
||||
resolveModelResponsePromises()
|
||||
advanceClock()
|
||||
|
||||
modifyTypes = Object.keys(@sampleClustered['modify'])
|
||||
expect(NylasAPI._handleModelResponse.calls.length).toEqual(modifyTypes.length)
|
||||
expect(NylasAPI._handleModelResponse.calls[0].args[0]).toEqual(_.values(@sampleClustered['modify'][modifyTypes[0]]))
|
||||
expect(NylasAPI._handleDeltaDeletion.calls.length).toEqual(0)
|
||||
|
||||
NylasAPI._handleModelResponse.reset()
|
||||
resolveModelResponsePromises()
|
||||
advanceClock()
|
||||
|
||||
destroyCount = @sampleClustered['destroy'].length
|
||||
expect(NylasAPI._handleDeltaDeletion.calls.length).toEqual(destroyCount)
|
||||
expect(NylasAPI._handleDeltaDeletion.calls[0].args[0]).toEqual(@sampleClustered['destroy'][0])
|
||||
|
||||
expect(Actions.longPollProcessedDeltas).not.toHaveBeenCalled()
|
||||
|
||||
resolveDeltaDeletionPromises()
|
||||
advanceClock()
|
||||
|
||||
expect(Actions.longPollProcessedDeltas).toHaveBeenCalled()
|
||||
|
||||
describe "clusterDeltas", ->
|
||||
beforeEach ->
|
||||
@sampleDeltas = JSON.parse(fs.readFileSync('./spec-nylas/fixtures/delta-sync/sample.json'))
|
||||
@expectedClustered = JSON.parse(fs.readFileSync('./spec-nylas/fixtures/delta-sync/sample-clustered.json'))
|
||||
|
||||
it "should collect create/modify events into a hash by model type", ->
|
||||
{create, modify} = NylasAPI._clusterDeltas(@sampleDeltas)
|
||||
expect(create).toEqual(@expectedClustered.create)
|
||||
expect(modify).toEqual(@expectedClustered.modify)
|
||||
|
||||
it "should collect destroys into an array", ->
|
||||
{destroy} = NylasAPI._clusterDeltas(@sampleDeltas)
|
||||
expect(destroy).toEqual(@expectedClustered.destroy)
|
||||
|
||||
describe "handleDeltaDeletion", ->
|
||||
beforeEach ->
|
||||
@thread = new Thread(id: 'idhere')
|
||||
@delta =
|
||||
"cursor": "bb95ddzqtr2gpmvgrng73t6ih",
|
||||
"object": "thread",
|
||||
"event": "delete",
|
||||
"id": @thread.id,
|
||||
"timestamp": "2015-08-26T17:36:45.297Z"
|
||||
|
||||
it "should resolve if the object cannot be found", ->
|
||||
spyOn(DatabaseStore, 'find').andCallFake (klass, id) =>
|
||||
return Promise.resolve(null)
|
||||
spyOn(DatabaseStore, 'unpersistModel')
|
||||
waitsForPromise =>
|
||||
NylasAPI._handleDeltaDeletion(@delta)
|
||||
runs =>
|
||||
expect(DatabaseStore.find).toHaveBeenCalledWith(Thread, 'idhere')
|
||||
expect(DatabaseStore.unpersistModel).not.toHaveBeenCalled()
|
||||
|
||||
it "should call unpersistModel if the object exists", ->
|
||||
spyOn(DatabaseStore, 'find').andCallFake (klass, id) =>
|
||||
return Promise.resolve(@thread)
|
||||
spyOn(DatabaseStore, 'unpersistModel')
|
||||
waitsForPromise =>
|
||||
NylasAPI._handleDeltaDeletion(@delta)
|
||||
runs =>
|
||||
expect(DatabaseStore.find).toHaveBeenCalledWith(Thread, 'idhere')
|
||||
expect(DatabaseStore.unpersistModel).toHaveBeenCalledWith(@thread)
|
||||
|
||||
# These specs are on hold because this function is changing very soon
|
||||
|
||||
xdescribe "handleModelResponse", ->
|
||||
|
|
|
@ -6,7 +6,6 @@ AccountStore = require '../../src/flux/stores/account-store'
|
|||
DatabaseStore = require '../../src/flux/stores/database-store'
|
||||
DraftStore = require '../../src/flux/stores/draft-store'
|
||||
DraftStoreExtension = require '../../src/flux/stores/draft-store-extension'
|
||||
TaskQueue = require '../../src/flux/stores/task-queue'
|
||||
SendDraftTask = require '../../src/flux/tasks/send-draft'
|
||||
DestroyDraftTask = require '../../src/flux/tasks/destroy-draft'
|
||||
Actions = require '../../src/flux/actions'
|
||||
|
@ -563,6 +562,7 @@ describe "DraftStore", ->
|
|||
draftLocalId = "local-123"
|
||||
beforeEach ->
|
||||
DraftStore._draftSessions = {}
|
||||
DraftStore._draftsSending = {}
|
||||
proxy =
|
||||
prepare: -> Promise.resolve(proxy)
|
||||
teardown: ->
|
||||
|
@ -572,11 +572,9 @@ describe "DraftStore", ->
|
|||
DraftStore._draftSessions[draftLocalId] = proxy
|
||||
spyOn(DraftStore, "_doneWithSession").andCallThrough()
|
||||
spyOn(DraftStore, "trigger")
|
||||
TaskQueue._queue = []
|
||||
|
||||
it "sets the sending state when sending", ->
|
||||
spyOn(atom, "isMainWindow").andReturn true
|
||||
spyOn(TaskQueue, "_updateSoon")
|
||||
spyOn(Actions, "queueTask").andCallThrough()
|
||||
runs ->
|
||||
DraftStore._onSendDraft(draftLocalId)
|
||||
|
|
|
@ -72,21 +72,13 @@ describe "TaskQueue", ->
|
|||
TaskQueue.enqueue(@unstartedTask)
|
||||
expect(@unstartedTask.runLocal).toHaveBeenCalled()
|
||||
|
||||
it "add it to the queue after `performLocalComplete` has run", ->
|
||||
task = new Task()
|
||||
spyOn(atom, "isMainWindow").andReturn true
|
||||
waitsForPromise ->
|
||||
TaskQueue.enqueue(task)
|
||||
task.waitForPerformLocal().then ->
|
||||
expect(TaskQueue._queue.length).toBe 1
|
||||
expect(TaskQueue._queue[0]).toBe task
|
||||
|
||||
it "notifies the queue should be processed", ->
|
||||
spyOn(TaskQueue, "_processQueue").andCallThrough()
|
||||
spyOn(TaskQueue, "_processTask")
|
||||
|
||||
TaskQueue.enqueue(@unstartedTask)
|
||||
advanceClock()
|
||||
advanceClock()
|
||||
expect(TaskQueue._processQueue).toHaveBeenCalled()
|
||||
expect(TaskQueue._processTask).toHaveBeenCalledWith(@unstartedTask)
|
||||
expect(TaskQueue._processTask.calls.length).toBe(1)
|
||||
|
@ -152,6 +144,7 @@ describe "TaskQueue", ->
|
|||
spyOn(TaskQueue, "_processQueue")
|
||||
TaskQueue.dequeue(@unstartedTask)
|
||||
advanceClock(20)
|
||||
advanceClock()
|
||||
expect(TaskQueue._processQueue).toHaveBeenCalled()
|
||||
expect(TaskQueue._processQueue.calls.length).toBe(1)
|
||||
|
||||
|
@ -211,4 +204,3 @@ describe "TaskQueue", ->
|
|||
TaskQueue._queue = [task]
|
||||
TaskQueue._processTask(task)
|
||||
expect(task.queueState.isProcessing).toBe true
|
||||
|
||||
|
|
|
@ -384,6 +384,7 @@ window.fakeSetTimeout = (callback, ms) ->
|
|||
id
|
||||
|
||||
window.fakeClearTimeout = (idToClear) ->
|
||||
window.timeouts ?= []
|
||||
window.timeouts = window.timeouts.filter ([id]) -> id != idToClear
|
||||
|
||||
window.fakeSetInterval = (callback, ms) ->
|
||||
|
@ -401,6 +402,7 @@ window.advanceClock = (delta=1) ->
|
|||
window.now += delta
|
||||
callbacks = []
|
||||
|
||||
window.timeouts ?= []
|
||||
window.timeouts = window.timeouts.filter ([id, strikeTime, callback]) ->
|
||||
if strikeTime <= window.now
|
||||
callbacks.push(callback)
|
||||
|
|
|
@ -355,6 +355,9 @@ class Atom extends Model
|
|||
isMainWindow: ->
|
||||
!!@getLoadSettings().mainWindow
|
||||
|
||||
isWorkWindow: ->
|
||||
@getWindowType() is 'work'
|
||||
|
||||
getWindowType: ->
|
||||
@getLoadSettings().windowType
|
||||
|
||||
|
@ -520,16 +523,6 @@ class Atom extends Model
|
|||
toggleFullScreen: ->
|
||||
@setFullScreen(!@isFullScreen())
|
||||
|
||||
# Schedule the window to be shown and focused on the next tick.
|
||||
#
|
||||
# This is done in a next tick to prevent a white flicker from occurring
|
||||
# if called synchronously.
|
||||
displayWindow: ({maximize}={}) ->
|
||||
setImmediate =>
|
||||
@show()
|
||||
@focus()
|
||||
@maximize() if maximize
|
||||
|
||||
# Get the dimensions of this window.
|
||||
#
|
||||
# Returns an {Object} with the following keys:
|
||||
|
@ -605,7 +598,7 @@ class Atom extends Model
|
|||
|
||||
# Call this method when establishing a real application window.
|
||||
startRootWindow: ->
|
||||
{resourcePath, safeMode} = @getLoadSettings()
|
||||
{resourcePath, safeMode, windowType} = @getLoadSettings()
|
||||
|
||||
CommandInstaller = require './command-installer'
|
||||
CommandInstaller.installAtomCommand resourcePath, false, (error) ->
|
||||
|
@ -614,34 +607,44 @@ class Atom extends Model
|
|||
console.warn error.message if error?
|
||||
|
||||
dimensions = @restoreWindowDimensions()
|
||||
|
||||
@loadConfig()
|
||||
@keymaps.loadBundledKeymaps()
|
||||
@themes.loadBaseStylesheets()
|
||||
@packages.loadPackages()
|
||||
@deserializeRootWindow()
|
||||
@packages.activate()
|
||||
@keymaps.loadUserKeymap()
|
||||
@requireUserInitScript() unless safeMode
|
||||
@menu.update()
|
||||
|
||||
@commands.add 'atom-workspace',
|
||||
'atom-workspace:add-account': =>
|
||||
@newWindow
|
||||
title: 'Add an Account'
|
||||
width: 340
|
||||
height: 550
|
||||
toolbar: false
|
||||
resizable: false
|
||||
windowType: 'onboarding'
|
||||
windowProps:
|
||||
page: 'add-account'
|
||||
|
||||
# Make sure we can't be made so small that the interface looks like crap
|
||||
@getCurrentWindow().setMinimumSize(875, 500)
|
||||
|
||||
maximize = dimensions?.maximized and process.platform isnt 'darwin'
|
||||
@displayWindow({maximize})
|
||||
@show()
|
||||
@focus()
|
||||
@maximize() if maximize
|
||||
|
||||
cover = document.getElementById("application-loading-cover")
|
||||
wait = (time, fn) -> setTimeout(fn, time)
|
||||
|
||||
wait 1, =>
|
||||
cover.classList.add("showing")
|
||||
|
||||
wait 220, =>
|
||||
@loadConfig()
|
||||
@keymaps.loadBundledKeymaps()
|
||||
@themes.loadBaseStylesheets()
|
||||
@packages.loadPackages(windowType)
|
||||
@deserializeRootWindow()
|
||||
@packages.activate()
|
||||
@keymaps.loadUserKeymap()
|
||||
@requireUserInitScript() unless safeMode
|
||||
@menu.update()
|
||||
|
||||
@commands.add 'atom-workspace',
|
||||
'atom-workspace:add-account': =>
|
||||
@newWindow
|
||||
title: 'Add an Account'
|
||||
width: 340
|
||||
height: 550
|
||||
toolbar: false
|
||||
resizable: false
|
||||
windowType: 'onboarding'
|
||||
windowProps:
|
||||
page: 'add-account'
|
||||
|
||||
# Make sure we can't be made so small that the interface looks like crap
|
||||
@getCurrentWindow().setMinimumSize(875, 500)
|
||||
wait 20, =>
|
||||
cover.classList.add('visible')
|
||||
|
||||
# Call this method when establishing a secondary application window
|
||||
# displaying a specific set of packages.
|
||||
|
@ -652,6 +655,9 @@ class Atom extends Model
|
|||
windowType,
|
||||
windowPackages} = @getLoadSettings()
|
||||
|
||||
cover = document.getElementById("application-loading-cover")
|
||||
cover.remove()
|
||||
|
||||
@loadConfig()
|
||||
|
||||
@keymaps.loadBundledKeymaps()
|
||||
|
@ -908,6 +914,6 @@ class Atom extends Model
|
|||
finishUnload: ->
|
||||
_.defer =>
|
||||
if remote.getGlobal('application').quitting
|
||||
remote.quit()
|
||||
remote.require('app').quit()
|
||||
else
|
||||
@close()
|
||||
|
|
|
@ -96,7 +96,7 @@ class Application
|
|||
if test
|
||||
@runSpecs({exitWhenDone: specsOnCommandLine, @resourcePath, specDirectory, specFilePattern, logFile})
|
||||
else
|
||||
@windowManager.ensurePrimaryWindowOnscreen()
|
||||
@openWindowsForTokenState()
|
||||
for urlToOpen in (urlsToOpen || [])
|
||||
@openUrl(urlToOpen)
|
||||
|
||||
|
@ -132,7 +132,7 @@ class Application
|
|||
# retry the deletion a few times.
|
||||
deleteFileWithRetry: (filePath, callback, retries = 5) ->
|
||||
callbackWithRetry = (err) =>
|
||||
if err
|
||||
if err and err.message.indexOf('no such file') is -1
|
||||
console.log("File Error: #{err.message} - retrying in 150msec")
|
||||
setTimeout =>
|
||||
@deleteFileWithRetry(filePath, callback, retries - 1)
|
||||
|
@ -141,7 +141,7 @@ class Application
|
|||
callback(null)
|
||||
|
||||
if not fs.existsSync(filePath)
|
||||
callback(null)
|
||||
return callback(null)
|
||||
|
||||
if retries > 0
|
||||
fs.unlink(filePath, callbackWithRetry)
|
||||
|
@ -152,15 +152,28 @@ class Application
|
|||
setupJavaScriptArguments: ->
|
||||
app.commandLine.appendSwitch 'js-flags', '--harmony'
|
||||
|
||||
openWindowsForTokenState: =>
|
||||
hasToken = @config.get('edgehill.credentials')
|
||||
if hasToken
|
||||
@windowManager.showMainWindow()
|
||||
@windowManager.ensureWorkWindow()
|
||||
else
|
||||
@windowManager.newOnboardingWindow().showWhenLoaded()
|
||||
|
||||
_logout: =>
|
||||
@setDatabasePhase('close')
|
||||
@windowManager.closeMainWindow()
|
||||
@windowManager.unregisterAllHotWindows()
|
||||
@windowManager.closeAllWindows()
|
||||
@deleteFileWithRetry path.join(configDirPath,'edgehill.db'), =>
|
||||
@config.set('tokens', null)
|
||||
@config.set('nylas', null)
|
||||
@config.set('edgehill', null)
|
||||
@setDatabasePhase('setup')
|
||||
@openWindowsForTokenState()
|
||||
|
||||
_loginSuccessful: =>
|
||||
@openWindowsForTokenState()
|
||||
@windowManager.mainWindow().once 'window:loaded', =>
|
||||
@windowManager.onboardingWindow()?.close()
|
||||
|
||||
databasePhase: ->
|
||||
@_databasePhase
|
||||
|
@ -177,16 +190,22 @@ class Application
|
|||
atomWindow.browserWindow.webContents.send('database-phase-change', phase)
|
||||
|
||||
rebuildDatabase: =>
|
||||
return if @_databasePhase is 'close'
|
||||
@setDatabasePhase('close')
|
||||
@windowManager.closeMainWindow()
|
||||
dialog.showMessageBox
|
||||
type: 'info'
|
||||
message: 'Upgrading Nylas'
|
||||
detail: 'Welcome back to Nylas! We need to rebuild your mailbox to support new features. Please wait a few moments while we re-sync your mail.'
|
||||
buttons: ['OK']
|
||||
@deleteFileWithRetry path.join(configDirPath,'edgehill.db'), =>
|
||||
@setDatabasePhase('setup')
|
||||
@windowManager.showMainWindow()
|
||||
@windowManager.closeAllWindows()
|
||||
|
||||
# Return immediately so that the client window which called this
|
||||
# method via remote is not blocked.
|
||||
_.defer =>
|
||||
dialog.showMessageBox
|
||||
type: 'info'
|
||||
message: 'Upgrading Nylas'
|
||||
detail: 'Welcome back to Nylas! We need to rebuild your mailbox to support new features. Please wait a few moments while we re-sync your mail.'
|
||||
buttons: ['OK']
|
||||
|
||||
@deleteFileWithRetry path.join(configDirPath,'edgehill.db'), =>
|
||||
@setDatabasePhase('setup')
|
||||
@openWindowsForTokenState()
|
||||
|
||||
# Registers basic application commands, non-idempotent.
|
||||
# Note: If these events are triggered while an application window is open, the window
|
||||
|
@ -232,6 +251,7 @@ class Application
|
|||
@on 'application:send-feedback', => @windowManager.sendToMainWindow('send-feedback')
|
||||
@on 'application:open-preferences', => @windowManager.sendToMainWindow('open-preferences')
|
||||
@on 'application:show-main-window', => @windowManager.ensurePrimaryWindowOnscreen()
|
||||
@on 'application:show-work-window', => @windowManager.showWorkWindow()
|
||||
@on 'application:check-for-update', => @autoUpdateManager.check()
|
||||
@on 'application:install-update', =>
|
||||
@quitting = true
|
||||
|
@ -290,6 +310,9 @@ class Application
|
|||
@openUrl(urlToOpen)
|
||||
event.preventDefault()
|
||||
|
||||
ipc.on 'set-badge-value', (event, value) =>
|
||||
app.dock?.setBadge?(value)
|
||||
|
||||
ipc.on 'new-window', (event, options) =>
|
||||
@windowManager.newWindow(options)
|
||||
|
||||
|
@ -328,17 +351,20 @@ class Application
|
|||
return unless atomWindow.browserWindow.webContents
|
||||
atomWindow.browserWindow.webContents.send('action-bridge-message', args...)
|
||||
|
||||
ipc.on 'action-bridge-rebroadcast-to-main', (event, args...) =>
|
||||
mainWindow = @windowManager.mainWindow()
|
||||
return if not mainWindow or not mainWindow.browserWindow.webContents
|
||||
return if BrowserWindow.fromWebContents(event.sender) is mainWindow
|
||||
mainWindow.browserWindow.webContents.send('action-bridge-message', args...)
|
||||
ipc.on 'action-bridge-rebroadcast-to-work', (event, args...) =>
|
||||
workWindow = @windowManager.workWindow()
|
||||
return if not workWindow or not workWindow.browserWindow.webContents
|
||||
return if BrowserWindow.fromWebContents(event.sender) is workWindow
|
||||
workWindow.browserWindow.webContents.send('action-bridge-message', args...)
|
||||
|
||||
clipboard = null
|
||||
ipc.on 'write-text-to-selection-clipboard', (event, selectedText) ->
|
||||
clipboard ?= require 'clipboard'
|
||||
clipboard.writeText(selectedText, 'selection')
|
||||
|
||||
ipc.on 'login-successful', (event) =>
|
||||
@_loginSuccessful()
|
||||
|
||||
# Public: Executes the given command.
|
||||
#
|
||||
# If it isn't handled globally, delegate to the currently focused window.
|
||||
|
|
|
@ -9,24 +9,15 @@ class WindowManager
|
|||
constructor: ({@devMode, @safeMode, @resourcePath, @config}) ->
|
||||
@_windows = []
|
||||
@_mainWindow = null
|
||||
@_workWindow = null
|
||||
@_hotWindows = {}
|
||||
|
||||
@config.onDidChange 'edgehill.credentials', =>
|
||||
@ensurePrimaryWindowOnscreen()
|
||||
|
||||
ensurePrimaryWindowOnscreen: ->
|
||||
return if global.application.quitting
|
||||
hasToken = @config.get('edgehill.credentials')
|
||||
if hasToken
|
||||
@showMainWindow()
|
||||
else
|
||||
onboarding = @onboardingWindow() ? @newOnboardingWindow()
|
||||
onboarding.showWhenLoaded()
|
||||
|
||||
@closeMainWindow()
|
||||
@unregisterAllHotWindows()
|
||||
for win in @_windows
|
||||
win.close() unless win is onboarding
|
||||
closeAllWindows: ->
|
||||
@closeMainWindow()
|
||||
@closeWorkWindow()
|
||||
@unregisterAllHotWindows()
|
||||
for win in @_windows
|
||||
win.close()
|
||||
|
||||
windows: ->
|
||||
@_windows
|
||||
|
@ -71,6 +62,8 @@ class WindowManager
|
|||
@_mainWindow.focus()
|
||||
else if !@_mainWindow.isVisible()
|
||||
@_mainWindow.showWhenLoaded()
|
||||
else
|
||||
@_mainWindow.focus()
|
||||
|
||||
else
|
||||
if @devMode
|
||||
|
@ -88,6 +81,39 @@ class WindowManager
|
|||
neverClose: true
|
||||
mainWindow: true
|
||||
|
||||
###
|
||||
Work Window
|
||||
###
|
||||
|
||||
workWindow: ->
|
||||
@_workWindow
|
||||
|
||||
closeWorkWindow: ->
|
||||
return unless @_workWindow
|
||||
@_workWindow.neverClose = false
|
||||
@_workWindow.close()
|
||||
@_workWindow = null
|
||||
|
||||
ensureWorkWindow: ->
|
||||
console.log('ensureWorkWindow')
|
||||
@_workWindow ?= @newWindow
|
||||
windowType: 'work'
|
||||
title: 'Activity'
|
||||
toolbar: false
|
||||
neverClose: true
|
||||
width: 800
|
||||
height: 400
|
||||
hidden: true
|
||||
|
||||
showWorkWindow: ->
|
||||
return unless @_workWindow
|
||||
if @_workWindow.isMinimized()
|
||||
@_workWindow.restore()
|
||||
@_workWindow.focus()
|
||||
else if !@_workWindow.isVisible()
|
||||
@_workWindow.showWhenLoaded()
|
||||
else
|
||||
@_workWindow.focus()
|
||||
|
||||
###
|
||||
Onboarding Window
|
||||
|
@ -252,7 +278,7 @@ class WindowManager
|
|||
win = new AtomWindow(options)
|
||||
newLoadSettings = _.extend(win.loadSettings(), options)
|
||||
win.setLoadSettings(newLoadSettings)
|
||||
win.showWhenLoaded()
|
||||
win.showWhenLoaded() unless options.hidden
|
||||
return win
|
||||
|
||||
# Tries to create a new hot window. Since we're updating an existing
|
||||
|
@ -409,7 +435,8 @@ class WindowManager
|
|||
@quitCheck ?= _.debounce =>
|
||||
noVisibleWindows = @visibleWindows().length is 0
|
||||
mainWindowLoading = @mainWindow() and not @mainWindow().isLoaded()
|
||||
if noVisibleWindows and not mainWindowLoading
|
||||
workWindowLoading = @workWindow() and not @workWindow().isLoaded()
|
||||
if noVisibleWindows and not mainWindowLoading and not workWindowLoading
|
||||
app.quit()
|
||||
, 10000
|
||||
@quitCheck()
|
||||
|
|
|
@ -22,8 +22,6 @@ class ComponentRegistry
|
|||
@_registry = {}
|
||||
@_showComponentRegions = false
|
||||
|
||||
@listenTo Actions.toggleComponentRegions, @_onToggleComponentRegions
|
||||
|
||||
|
||||
# Public: Register a new component with the Component Registry.
|
||||
# Typically, packages call this method from their main `activate` method
|
||||
|
@ -160,7 +158,7 @@ class ComponentRegistry
|
|||
|
||||
# Showing Component Regions
|
||||
|
||||
_onToggleComponentRegions: ->
|
||||
toggleComponentRegions: ->
|
||||
@_showComponentRegions = !@_showComponentRegions
|
||||
@trigger(@)
|
||||
|
||||
|
|
|
@ -101,16 +101,7 @@ class ListTabular extends React.Component
|
|||
# Determine the exact range of rows we want onscreen
|
||||
rangeStart = Math.floor(scrollTop / @props.itemHeight)
|
||||
rangeSize = Math.ceil(window.innerHeight / @props.itemHeight)
|
||||
rangeEnd = rangeStart + rangeSize
|
||||
|
||||
# 1. Clip this range to the number of available items
|
||||
#
|
||||
# 2. Expand the range by a bit so that we prepare items offscreen
|
||||
# before they're seen. This works because we force a compositor
|
||||
# layer using transform:translate3d(0,0,0)
|
||||
#
|
||||
rangeStart = Math.max(0, rangeStart - rangeSize)
|
||||
rangeEnd = Math.min(rangeEnd + rangeSize, @props.dataView.count())
|
||||
rangeEnd = Math.min(rangeStart + rangeSize, @props.dataView.count())
|
||||
|
||||
# Final sanity check to prevent needless work
|
||||
return if rangeStart is @state.renderedRangeStart and
|
||||
|
|
|
@ -7,12 +7,12 @@ TaskRegistry = require '../task-registry'
|
|||
DatabaseObjectRegistry = require '../database-object-registry'
|
||||
|
||||
Role =
|
||||
ROOT: 'root',
|
||||
WORK: 'work',
|
||||
SECONDARY: 'secondary'
|
||||
|
||||
TargetWindows =
|
||||
ALL: 'all',
|
||||
MAIN: 'main'
|
||||
WORK: 'work'
|
||||
|
||||
Message =
|
||||
DATABASE_STORE_TRIGGER: 'db-store-trigger'
|
||||
|
@ -41,7 +41,7 @@ class ActionBridge
|
|||
constructor: (ipc) ->
|
||||
@ipc = ipc
|
||||
@initiatorId = atom.getWindowType()
|
||||
@role = if atom.isMainWindow() then Role.ROOT else Role.SECONDARY
|
||||
@role = if atom.isWorkWindow() then Role.WORK else Role.SECONDARY
|
||||
|
||||
# Listen for action bridge messages from other windows
|
||||
@ipc.on('action-bridge-message', @onIPCMessage)
|
||||
|
@ -58,11 +58,11 @@ class ActionBridge
|
|||
@onRebroadcast(TargetWindows.ALL, Message.DATABASE_STORE_TRIGGER, [change])
|
||||
DatabaseStore.listen(databaseCallback, @)
|
||||
|
||||
if @role isnt Role.ROOT
|
||||
if @role isnt Role.WORK
|
||||
# Observe all mainWindow actions fired in this window and re-broadcast
|
||||
# them to other windows so the central application stores can take action
|
||||
Actions.mainWindowActions.forEach (name) =>
|
||||
callback = => @onRebroadcast(TargetWindows.MAIN, name, arguments)
|
||||
Actions.workWindowActions.forEach (name) =>
|
||||
callback = => @onRebroadcast(TargetWindows.WORK, name, arguments)
|
||||
Actions[name].listen(callback, @)
|
||||
|
||||
onIPCMessage: (initiatorId, name, json) =>
|
||||
|
@ -89,7 +89,7 @@ class ActionBridge
|
|||
params = []
|
||||
args.forEach (arg) ->
|
||||
if arg instanceof Function
|
||||
throw new Error("ActionBridge cannot forward action argument of type `function` to main window.")
|
||||
throw new Error("ActionBridge cannot forward action argument of type `function` to work window.")
|
||||
params.push(arg[0])
|
||||
json = Utils.serializeRegisteredObjects(params)
|
||||
|
||||
|
|
|
@ -2,7 +2,7 @@ Reflux = require 'reflux'
|
|||
|
||||
ActionScopeWindow = 'window'
|
||||
ActionScopeGlobal = 'global'
|
||||
ActionScopeMainWindow = 'main'
|
||||
ActionScopeWorkWindow = 'work'
|
||||
|
||||
###
|
||||
Public: In the Flux {Architecture.md}, almost every user action
|
||||
|
@ -103,30 +103,32 @@ class Actions
|
|||
|
||||
*Scope: Main Window*
|
||||
###
|
||||
@queueTask: ActionScopeMainWindow
|
||||
@queueTask: ActionScopeWorkWindow
|
||||
|
||||
@undoTaskId: ActionScopeWorkWindow
|
||||
|
||||
###
|
||||
Public: Dequeue all {Task}s from the {TaskQueue}. Use with care.
|
||||
|
||||
*Scope: Main Window*
|
||||
###
|
||||
@dequeueAllTasks: ActionScopeMainWindow
|
||||
@dequeueTask: ActionScopeMainWindow
|
||||
@dequeueAllTasks: ActionScopeWorkWindow
|
||||
@dequeueTask: ActionScopeWorkWindow
|
||||
|
||||
###
|
||||
Public: Dequeue a {Task} matching the description provided.
|
||||
|
||||
*Scope: Main Window*
|
||||
###
|
||||
@dequeueMatchingTask: ActionScopeMainWindow
|
||||
@dequeueMatchingTask: ActionScopeWorkWindow
|
||||
|
||||
@longPollStateChanged: ActionScopeMainWindow
|
||||
@longPollReceivedRawDeltas: ActionScopeMainWindow
|
||||
@longPollProcessedDeltas: ActionScopeMainWindow
|
||||
@longPollConnected: ActionScopeMainWindow
|
||||
@longPollOffline: ActionScopeMainWindow
|
||||
@didMakeAPIRequest: ActionScopeMainWindow
|
||||
@sendFeedback: ActionScopeMainWindow
|
||||
@longPollStateChanged: ActionScopeWorkWindow
|
||||
@longPollReceivedRawDeltas: ActionScopeWorkWindow
|
||||
@longPollProcessedDeltas: ActionScopeWorkWindow
|
||||
@longPollConnected: ActionScopeWorkWindow
|
||||
@longPollOffline: ActionScopeWorkWindow
|
||||
@didMakeAPIRequest: ActionScopeWorkWindow
|
||||
@sendFeedback: ActionScopeWorkWindow
|
||||
|
||||
|
||||
###
|
||||
|
@ -150,8 +152,6 @@ class Actions
|
|||
###
|
||||
@clearDeveloperConsole: ActionScopeWindow
|
||||
|
||||
@toggleComponentRegions: ActionScopeWindow
|
||||
|
||||
###
|
||||
Public: Select the provided account ID in the current window.
|
||||
|
||||
|
@ -484,17 +484,17 @@ create = (obj, name, scope) ->
|
|||
obj[name].scope = scope
|
||||
obj[name].sync = true
|
||||
|
||||
scopes = {'window': [], 'global': [], 'main': []}
|
||||
scopes = {'window': [], 'global': [], 'work': []}
|
||||
|
||||
for name in Object.getOwnPropertyNames(Actions)
|
||||
continue if name in ['length', 'name', 'arguments', 'caller', 'prototype']
|
||||
continue unless Actions[name] in ['window', 'global', 'main']
|
||||
continue unless Actions[name] in ['window', 'global', 'work']
|
||||
scope = Actions[name]
|
||||
scopes[scope].push(name)
|
||||
create(Actions, name, scope)
|
||||
|
||||
Actions.windowActions = scopes['window']
|
||||
Actions.mainWindowActions = scopes['main']
|
||||
Actions.workWindowActions = scopes['work']
|
||||
Actions.globalActions = scopes['global']
|
||||
|
||||
module.exports = Actions
|
||||
|
|
|
@ -14,7 +14,7 @@ class EdgehillAPI
|
|||
|
||||
# Always ask Edgehill Server for our tokens at launch. This way accounts
|
||||
# added elsewhere will appear, and we'll also handle the 0.2.5=>0.3.0 upgrade.
|
||||
if atom.isMainWindow()
|
||||
if atom.isWorkWindow()
|
||||
existing = @_getCredentials()
|
||||
if existing and existing.username
|
||||
@setUserIdentifierAndRetrieveTokens(existing.username)
|
||||
|
@ -70,9 +70,8 @@ class EdgehillAPI
|
|||
success: (userData={}) =>
|
||||
@setTokens(userData.tokens)
|
||||
if atom.getWindowType() is 'onboarding'
|
||||
setTimeout ->
|
||||
atom.close()
|
||||
, 2500
|
||||
ipc = require 'ipc'
|
||||
ipc.send('login-successful')
|
||||
error: (apiError) =>
|
||||
console.error apiError
|
||||
|
||||
|
|
|
@ -130,11 +130,10 @@ Utils =
|
|||
|
||||
modelFreeze: (o) ->
|
||||
Object.freeze(o)
|
||||
for key, prop of o
|
||||
continue unless o.hasOwnProperty(key)
|
||||
continue unless typeof prop is 'object' and prop isnt null
|
||||
continue if Object.isFrozen(prop)
|
||||
Utils.modelFreeze(prop)
|
||||
Object.getOwnPropertyNames(o).forEach (key) ->
|
||||
val = o[key]
|
||||
if typeof val is 'object' and val isnt null and not Object.isFrozen(val)
|
||||
Utils.modelFreeze(val)
|
||||
|
||||
generateTempId: ->
|
||||
s4 = ->
|
||||
|
|
|
@ -4,8 +4,6 @@ Actions = require './actions'
|
|||
{APIError} = require './errors'
|
||||
PriorityUICoordinator = require '../priority-ui-coordinator'
|
||||
DatabaseStore = require './stores/database-store'
|
||||
NylasSyncWorker = require './nylas-sync-worker'
|
||||
NylasLongConnection = require './nylas-long-connection'
|
||||
async = require 'async'
|
||||
|
||||
PermanentErrorCodes = [400, 404, 500]
|
||||
|
@ -14,7 +12,6 @@ CancelledErrorCode = -123
|
|||
# This is lazy-loaded
|
||||
AccountStore = null
|
||||
|
||||
|
||||
class NylasAPIOptimisticChangeTracker
|
||||
constructor: ->
|
||||
@_locks = {}
|
||||
|
@ -100,11 +97,6 @@ class NylasAPI
|
|||
atom.config.onDidChange('tokens', @_onConfigChanged)
|
||||
@_onConfigChanged()
|
||||
|
||||
if atom.isMainWindow()
|
||||
AccountStore = require './stores/account-store'
|
||||
AccountStore.listen(@_onAccountsChanged, @)
|
||||
@_onAccountsChanged()
|
||||
|
||||
_onConfigChanged: =>
|
||||
prev = {@AppID, @APIRoot, @APITokens}
|
||||
|
||||
|
@ -128,61 +120,13 @@ class NylasAPI
|
|||
|
||||
current = {@AppID, @APIRoot, @APITokens}
|
||||
|
||||
if atom.isMainWindow() and not _.isEqual(prev, current)
|
||||
if atom.isWorkWindow() and not _.isEqual(prev, current)
|
||||
@APITokens.forEach (token) =>
|
||||
@makeRequest
|
||||
path: "/account"
|
||||
auth: {'user': token, 'pass': '', sendImmediately: true}
|
||||
returnsModel: true
|
||||
|
||||
_onAccountsChanged: ->
|
||||
return if atom.inSpecMode()
|
||||
|
||||
AccountStore = require './stores/account-store'
|
||||
accounts = AccountStore.items()
|
||||
workers = _.map(accounts, @workerForAccount)
|
||||
|
||||
# Stop the workers that are not in the new workers list.
|
||||
# These accounts are no longer in our database, so we shouldn't
|
||||
# be listening.
|
||||
old = _.without(@_workers, workers...)
|
||||
worker.cleanup() for worker in old
|
||||
|
||||
@_workers = workers
|
||||
|
||||
workers: =>
|
||||
@_workers
|
||||
|
||||
workerForAccount: (account) =>
|
||||
worker = _.find @_workers, (c) -> c.account().id is account.id
|
||||
return worker if worker
|
||||
|
||||
worker = new NylasSyncWorker(@, account)
|
||||
connection = worker.connection()
|
||||
|
||||
connection.onStateChange (state) ->
|
||||
Actions.longPollStateChanged({accountId: account.id, state: state})
|
||||
if state == NylasLongConnection.State.Connected
|
||||
## TODO use OfflineStatusStore
|
||||
Actions.longPollConnected()
|
||||
else
|
||||
## TODO use OfflineStatusStore
|
||||
Actions.longPollOffline()
|
||||
|
||||
connection.onDeltas (deltas) =>
|
||||
PriorityUICoordinator.settle.then =>
|
||||
@_handleDeltas(deltas)
|
||||
|
||||
@_workers.push(worker)
|
||||
worker.start()
|
||||
worker
|
||||
|
||||
_cleanupAccountWorkers: ->
|
||||
for worker in @_workers
|
||||
worker.cleanup()
|
||||
@_workers = []
|
||||
|
||||
|
||||
# Delegates to node's request object.
|
||||
# On success, it will call the passed in success callback with options.
|
||||
# On error it will create a new APIError object that wraps the error,
|
||||
|
@ -269,66 +213,6 @@ class NylasAPI
|
|||
|
||||
return Promise.resolve()
|
||||
|
||||
_handleDeltas: (deltas) ->
|
||||
Actions.longPollReceivedRawDeltas(deltas)
|
||||
|
||||
# Create a (non-enumerable) reference from the attributes which we carry forward
|
||||
# back to their original deltas. This allows us to mark the deltas that the
|
||||
# app ignores later in the process.
|
||||
deltas.forEach (delta) ->
|
||||
if delta.attributes
|
||||
Object.defineProperty(delta.attributes, '_delta', { get: -> delta })
|
||||
|
||||
{create, modify, destroy} = @_clusterDeltas(deltas)
|
||||
|
||||
# Apply all the deltas to create objects. Gets promises for handling
|
||||
# each type of model in the `create` hash, waits for them all to resolve.
|
||||
create[type] = @_handleModelResponse(_.values(dict)) for type, dict of create
|
||||
Promise.props(create).then (created) =>
|
||||
# Apply all the deltas to modify objects. Gets promises for handling
|
||||
# each type of model in the `modify` hash, waits for them all to resolve.
|
||||
modify[type] = @_handleModelResponse(_.values(dict)) for type, dict of modify
|
||||
Promise.props(modify).then (modified) =>
|
||||
|
||||
# Now that we've persisted creates/updates, fire an action
|
||||
# that allows other parts of the app to update based on new models
|
||||
# (notifications)
|
||||
if _.flatten(_.values(created)).length > 0
|
||||
Actions.didPassivelyReceiveNewModels(created)
|
||||
|
||||
# Apply all of the deletions
|
||||
destroyPromises = destroy.map(@_handleDeltaDeletion)
|
||||
Promise.settle(destroyPromises).then =>
|
||||
Actions.longPollProcessedDeltas()
|
||||
|
||||
_clusterDeltas: (deltas) ->
|
||||
# Group deltas by object type so we can mutate the cache efficiently.
|
||||
# NOTE: This code must not just accumulate creates, modifies and destroys
|
||||
# but also de-dupe them. We cannot call "persistModels(itemA, itemA, itemB)"
|
||||
# or it will throw an exception - use the last received copy of each model
|
||||
# we see.
|
||||
create = {}
|
||||
modify = {}
|
||||
destroy = []
|
||||
for delta in deltas
|
||||
if delta.event is 'create'
|
||||
create[delta.object] ||= {}
|
||||
create[delta.object][delta.attributes.id] = delta.attributes
|
||||
else if delta.event is 'modify'
|
||||
modify[delta.object] ||= {}
|
||||
modify[delta.object][delta.attributes.id] = delta.attributes
|
||||
else if delta.event is 'delete'
|
||||
destroy.push(delta)
|
||||
|
||||
{create, modify, destroy}
|
||||
|
||||
_handleDeltaDeletion: (delta) =>
|
||||
klass = @_apiObjectToClassMap[delta.object]
|
||||
return unless klass
|
||||
DatabaseStore.find(klass, delta.id).then (model) ->
|
||||
return Promise.resolve() unless model
|
||||
return DatabaseStore.unpersistModel(model)
|
||||
|
||||
# Returns a Promsie that resolves when any parsed out models (if any)
|
||||
# have been created and persisted to the database.
|
||||
_handleModelResponse: (jsons) ->
|
||||
|
|
|
@ -91,7 +91,7 @@ class RankingsJSONCache extends JSONCache
|
|||
super(key: 'RankingsJSONCache', version: 1, maxAge: 60 * 60 * 1000 * 24)
|
||||
|
||||
refreshValue: (callback) =>
|
||||
return unless atom.isMainWindow()
|
||||
return unless atom.isWorkWindow()
|
||||
|
||||
accountId = AccountStore.current()?.id
|
||||
return unless accountId
|
||||
|
|
|
@ -15,9 +15,11 @@ PriorityUICoordinator = require '../../priority-ui-coordinator'
|
|||
|
||||
{tableNameForJoin,
|
||||
generateTempId,
|
||||
serializeRegisteredObjects,
|
||||
deserializeRegisteredObjects,
|
||||
isTempId} = require '../models/utils'
|
||||
|
||||
DatabaseVersion = 10
|
||||
DatabaseVersion = 59
|
||||
|
||||
DatabasePhase =
|
||||
Setup: 'setup'
|
||||
|
@ -100,7 +102,7 @@ class DatabaseStore extends NylasStore
|
|||
app = require('remote').getGlobal('application')
|
||||
phase = app.databasePhase()
|
||||
|
||||
if phase is DatabasePhase.Setup and atom.isMainWindow()
|
||||
if phase is DatabasePhase.Setup and atom.isWorkWindow()
|
||||
@_openDatabase =>
|
||||
@_checkDatabaseVersion {allowNotSet: true}, =>
|
||||
@_runDatabaseSetup =>
|
||||
|
@ -122,7 +124,7 @@ class DatabaseStore extends NylasStore
|
|||
# database schema to prepare those tables. This method may be called
|
||||
# extremely frequently as new models are added when packages load.
|
||||
refreshDatabaseSchema: ->
|
||||
return unless atom.isMainWindow()
|
||||
return unless atom.isWorkWindow()
|
||||
app = require('remote').getGlobal('application')
|
||||
phase = app.databasePhase()
|
||||
if phase isnt DatabasePhase.Setup
|
||||
|
@ -131,7 +133,7 @@ class DatabaseStore extends NylasStore
|
|||
_openDatabase: (ready) =>
|
||||
return ready() if @_db
|
||||
|
||||
if atom.isMainWindow()
|
||||
if atom.isWorkWindow()
|
||||
# Since only the main window calls `_runDatabaseSetup`, it's important that
|
||||
# it is also the only window with permission to create the file on disk
|
||||
mode = sqlite3.OPEN_READWRITE | sqlite3.OPEN_CREATE
|
||||
|
@ -166,6 +168,7 @@ class DatabaseStore extends NylasStore
|
|||
|
||||
_handleSetupError: (err) =>
|
||||
console.error(err)
|
||||
console.log(atom.getWindowType())
|
||||
app = require('remote').getGlobal('application')
|
||||
app.rebuildDatabase()
|
||||
|
||||
|
@ -556,14 +559,16 @@ class DatabaseStore extends NylasStore
|
|||
@_triggerSoon({objectClass: newModel.constructor.name, objects: [oldModel, newModel], type: 'swap'})
|
||||
|
||||
persistJSONObject: (key, json) ->
|
||||
jsonString = serializeRegisteredObjects(json)
|
||||
@_query(BEGIN_TRANSACTION)
|
||||
@_query("REPLACE INTO `JSONObject` (`key`,`data`) VALUES (?,?)", [key, JSON.stringify(json)])
|
||||
@_query("REPLACE INTO `JSONObject` (`key`,`data`) VALUES (?,?)", [key, jsonString])
|
||||
@_query(COMMIT)
|
||||
@trigger({objectClass: 'JSONObject', objects: [{key: key, json: json}], type: 'persist'})
|
||||
|
||||
findJSONObject: (key) ->
|
||||
@_query("SELECT `data` FROM `JSONObject` WHERE key = ? LIMIT 1", [key]).then (results) =>
|
||||
return Promise.resolve(null) unless results[0]
|
||||
data = JSON.parse(results[0].data)
|
||||
data = deserializeRegisteredObjects(results[0].data)
|
||||
Promise.resolve(data)
|
||||
|
||||
########################################################################
|
||||
|
|
|
@ -79,7 +79,7 @@ class DraftStore
|
|||
#
|
||||
# As a result, we keep track of the intermediate time between when we
|
||||
# request to queue something, and when it appears on the queue.
|
||||
@_pendingEnqueue = {}
|
||||
@_draftsSending = {}
|
||||
|
||||
ipc.on 'mailto', @_onHandleMailtoLink
|
||||
|
||||
|
@ -112,10 +112,7 @@ class DraftStore
|
|||
# Public: Look up the sending state of the given draft Id.
|
||||
# In popout windows the existance of the window is the sending state.
|
||||
isSendingDraft: (draftLocalId) ->
|
||||
if atom.isMainWindow()
|
||||
task = TaskQueue.findTask(SendDraftTask, {draftLocalId})
|
||||
return task? or @_pendingEnqueue[draftLocalId]
|
||||
else return @_pendingEnqueue[draftLocalId]
|
||||
return @_draftsSending[draftLocalId]?
|
||||
|
||||
###
|
||||
Composer Extensions
|
||||
|
@ -394,28 +391,15 @@ class DraftStore
|
|||
|
||||
# The user request to send the draft
|
||||
_onSendDraft: (draftLocalId) =>
|
||||
@_pendingEnqueue[draftLocalId] = true
|
||||
@_draftsSending[draftLocalId] = true
|
||||
@trigger(draftLocalId)
|
||||
|
||||
@sessionForLocalId(draftLocalId).then (session) =>
|
||||
@_runExtensionsBeforeSend(session)
|
||||
|
||||
# Immediately save any pending changes so we don't save after sending
|
||||
session.changes.commit().then =>
|
||||
task = new SendDraftTask(draftLocalId, {fromPopout: @_isPopout()})
|
||||
|
||||
if atom.isMainWindow()
|
||||
# We need to wait for performLocal to finish before `trigger`ing.
|
||||
# Only when `performLocal` is done will the task be on the
|
||||
# TaskQueue. When we `trigger` listeners should be able to call
|
||||
# `isSendingDraft` and have it accurately return true.
|
||||
task.waitForPerformLocal().then =>
|
||||
# As far as this window is concerned, we're not making any more
|
||||
# edits and are destroying the session. If there are errors down
|
||||
# the line, we'll make a new session and handle them later
|
||||
@_doneWithSession(session)
|
||||
@_pendingEnqueue[draftLocalId] = false
|
||||
@trigger(draftLocalId)
|
||||
|
||||
Actions.queueTask(task)
|
||||
@_doneWithSession(session)
|
||||
atom.close() if @_isPopout()
|
||||
|
|
40
src/flux/stores/nylas-sync-status-store.coffee
Normal file
40
src/flux/stores/nylas-sync-status-store.coffee
Normal file
|
@ -0,0 +1,40 @@
|
|||
_ = require 'underscore'
|
||||
AccountStore = require './account-store'
|
||||
DatabaseStore = require './database-store'
|
||||
NylasStore = require 'nylas-store'
|
||||
|
||||
class NylasSyncStatusStore extends NylasStore
|
||||
|
||||
constructor: ->
|
||||
@_statesByAccount = {}
|
||||
|
||||
@listenTo AccountStore, @_onAccountsChanged
|
||||
@listenTo DatabaseStore, @_onChange
|
||||
@_onAccountsChanged()
|
||||
|
||||
_onAccountsChanged: =>
|
||||
promises = []
|
||||
AccountStore.items().forEach (item) =>
|
||||
return if @_statesByAccount[item.id]
|
||||
promises.push DatabaseStore.findJSONObject("NylasSyncWorker:#{item.id}").then (json) =>
|
||||
@_statesByAccount[item.id] = json ? {}
|
||||
Promise.all(promises).then =>
|
||||
@trigger()
|
||||
|
||||
_onChange: (change) =>
|
||||
if change.objectClass is 'JSONObject' and change.objects[0].key.indexOf('NylasSyncWorker') is 0
|
||||
[worker, accountId] = change.objects[0].key.split(':')
|
||||
@_statesByAccount[accountId] = change.objects[0].json
|
||||
@trigger()
|
||||
|
||||
state: =>
|
||||
@_statesByAccount
|
||||
|
||||
busy: =>
|
||||
for accountId, states of @_statesByAccount
|
||||
for key, state of states
|
||||
if state.busy
|
||||
return true
|
||||
false
|
||||
|
||||
module.exports = new NylasSyncStatusStore()
|
39
src/flux/stores/task-queue-status-store.coffee
Normal file
39
src/flux/stores/task-queue-status-store.coffee
Normal file
|
@ -0,0 +1,39 @@
|
|||
_ = require 'underscore'
|
||||
NylasStore = require 'nylas-store'
|
||||
DatabaseStore = require './database-store'
|
||||
AccountStore = require './account-store'
|
||||
TaskQueue = require './task-queue'
|
||||
|
||||
# Public: The TaskQueueStatusStore allows you to inspect the task queue from
|
||||
# any window, even though the queue itself only runs in the work window.
|
||||
#
|
||||
class TaskQueueStatusStore extends NylasStore
|
||||
|
||||
constructor: ->
|
||||
@_queue = []
|
||||
@_waiting = []
|
||||
@listenTo DatabaseStore, @_onChange
|
||||
|
||||
DatabaseStore.findJSONObject(TaskQueue.JSONObjectStorageKey).then (json) =>
|
||||
@_queue = json || []
|
||||
@trigger()
|
||||
|
||||
_onChange: (change) =>
|
||||
if change.objectClass is 'JSONObject' and change.objects[0].key is 'task-queue'
|
||||
@_queue = change.objects[0].json
|
||||
@_waiting = @_waiting.filter ({taskId, resolve}) =>
|
||||
task = _.findWhere(@_queue, {id: taskId})
|
||||
if not task or task.queueState.localComplete
|
||||
resolve()
|
||||
return false
|
||||
return true
|
||||
@trigger()
|
||||
|
||||
queue: ->
|
||||
@_queue
|
||||
|
||||
waitForPerformLocal: (task) ->
|
||||
new Promise (resolve, reject) =>
|
||||
@_waiting.push({taskId: task.id, resolve: resolve})
|
||||
|
||||
module.exports = new TaskQueueStatusStore()
|
|
@ -10,12 +10,16 @@ Task = require "../tasks/task"
|
|||
Utils = require "../models/utils"
|
||||
Reflux = require 'reflux'
|
||||
Actions = require '../actions'
|
||||
TaskRegistry = require '../../task-registry'
|
||||
DatabaseStore = require './database-store'
|
||||
|
||||
{APIError,
|
||||
TimeoutError} = require '../errors'
|
||||
|
||||
if not atom.isMainWindow() and not atom.inSpecMode() then return
|
||||
JSONObjectStorageKey = 'task-queue'
|
||||
|
||||
if not atom.isWorkWindow() and not atom.inSpecMode()
|
||||
module.exports = {JSONObjectStorageKey}
|
||||
return
|
||||
|
||||
###
|
||||
Public: The TaskQueue is a Flux-compatible Store that manages a queue of {Task}
|
||||
|
@ -71,9 +75,10 @@ class TaskQueue
|
|||
@_queue = []
|
||||
@_completed = []
|
||||
|
||||
@_restoreQueueFromDisk()
|
||||
@_restoreQueue()
|
||||
|
||||
@listenTo(Actions.queueTask, @enqueue)
|
||||
@listenTo(Actions.undoTaskId, @enqueueUndoOfTaskId)
|
||||
@listenTo(Actions.dequeueTask, @dequeue)
|
||||
@listenTo(Actions.dequeueAllTasks, @dequeueAll)
|
||||
@listenTo(Actions.dequeueMatchingTask, @dequeueMatching)
|
||||
|
@ -115,15 +120,14 @@ class TaskQueue
|
|||
@_dequeueObsoleteTasks(task)
|
||||
task.runLocal().then =>
|
||||
@_queue.push(task)
|
||||
|
||||
# We want to make sure the task has made it onto the queue before
|
||||
# `performLocalComplete` runs. Code in the `performLocalComplete`
|
||||
# callback might depend on knowing that the Task is present in the
|
||||
# queue. For example, when we're sending a message I want to know if
|
||||
# there's already a task on the queue so I don't double-send.
|
||||
task.performLocalComplete()
|
||||
@_updateSoon()
|
||||
|
||||
enqueueUndoOfTaskId: (taskId) =>
|
||||
task = _.findWhere(@_queue, {id: taskId})
|
||||
task ?= _.findWhere(@_completed, {id: taskId})
|
||||
if task
|
||||
@enqueue(task.createUndoTask())
|
||||
|
||||
dequeue: (taskOrId) =>
|
||||
task = @_resolveTaskArgument(taskOrId)
|
||||
if not task
|
||||
|
@ -190,7 +194,6 @@ class TaskQueue
|
|||
for otherTask in obsolete
|
||||
@dequeue(otherTask)
|
||||
|
||||
|
||||
_taskIsBlocked: (task) =>
|
||||
_.any @_queue, (otherTask) ->
|
||||
task.shouldWaitForTask(otherTask) and task isnt otherTask
|
||||
|
@ -203,38 +206,22 @@ class TaskQueue
|
|||
else
|
||||
return _.findWhere(@_queue, id: taskOrId)
|
||||
|
||||
_restoreQueueFromDisk: =>
|
||||
try
|
||||
queueFile = path.join(atom.getConfigDirPath(), 'pending-tasks.json')
|
||||
queue = Utils.deserializeRegisteredObjects(fs.readFileSync(queueFile))
|
||||
|
||||
_restoreQueue: =>
|
||||
DatabaseStore.findJSONObject(JSONObjectStorageKey).then (queue = []) =>
|
||||
# We need to set the processing bit back to false so it gets
|
||||
# re-retried upon inflation
|
||||
for task in queue
|
||||
task.queueState ?= {}
|
||||
task.queueState.isProcessing = false
|
||||
@_queue = queue
|
||||
catch e
|
||||
if not atom.inSpecMode()
|
||||
console.log("Queue deserialization failed with error: #{e.toString()}")
|
||||
|
||||
_saveQueueToDisk: =>
|
||||
# It's very important that we debounce saving here. When the user bulk-archives
|
||||
# items, they can easily process 1000 tasks at the same moment. We can't try to
|
||||
# save 1000 times! (Do not remove debounce without a plan!)
|
||||
@_saveDebounced ?= _.debounce =>
|
||||
queueFile = path.join(atom.getConfigDirPath(), 'pending-tasks.json')
|
||||
queueJSON = Utils.serializeRegisteredObjects((@_queue ? []))
|
||||
fs.writeFile(queueFile, queueJSON)
|
||||
, 150
|
||||
@_saveDebounced()
|
||||
|
||||
_updateSoon: =>
|
||||
@_updateSoonThrottled ?= _.throttle =>
|
||||
@_processQueue()
|
||||
@_saveQueueToDisk()
|
||||
@trigger()
|
||||
, 10, {leading: false}
|
||||
DatabaseStore.persistJSONObject(JSONObjectStorageKey, @_queue ? [])
|
||||
_.defer =>
|
||||
@_processQueue()
|
||||
@trigger()
|
||||
, 10
|
||||
@_updateSoonThrottled()
|
||||
|
||||
module.exports = new TaskQueue()
|
||||
|
|
|
@ -22,7 +22,7 @@ class UndoRedoStore
|
|||
atom.commands.add('body', {'core:redo': => @redo() })
|
||||
|
||||
_onTaskQueued: (task) =>
|
||||
if task.canBeUndone() and not task.isUndo()
|
||||
if task.canBeUndone()
|
||||
@_redo = []
|
||||
@_undo.push(task)
|
||||
@trigger() unless task._isReverting
|
||||
|
@ -31,7 +31,7 @@ class UndoRedoStore
|
|||
topTask = @_undo.pop()
|
||||
return unless topTask
|
||||
@trigger()
|
||||
Actions.queueTask(topTask.createUndoTask())
|
||||
Actions.undoTaskId(topTask.id)
|
||||
@_redo.push(topTask.createIdenticalTask())
|
||||
|
||||
redo: =>
|
||||
|
|
|
@ -1,7 +1,5 @@
|
|||
Reflux = require 'reflux'
|
||||
_ = require 'underscore'
|
||||
remote = require 'remote'
|
||||
app = remote.require 'app'
|
||||
CategoryStore = require './category-store'
|
||||
AccountStore = require './account-store'
|
||||
DatabaseStore = require './database-store'
|
||||
|
@ -88,6 +86,7 @@ UnreadCountStore = Reflux.createStore
|
|||
_setBadge: (val) ->
|
||||
# NOTE: Do not underestimate how long this can take. It's a synchronous
|
||||
# remote call and can take ~50+msec.
|
||||
_.defer => app.dock?.setBadge?(val)
|
||||
ipc = require 'ipc'
|
||||
ipc.send('set-badge-value', val)
|
||||
|
||||
module.exports = UnreadCountStore
|
||||
|
|
|
@ -36,7 +36,6 @@ class DestroyDraftTask extends Task
|
|||
return Promise.reject(new Error("Attempt to call DestroyDraftTask.performLocal without draftLocalId or draftId"))
|
||||
|
||||
find.then (draft) =>
|
||||
console.log("Found draft: ", draft)
|
||||
return Promise.resolve() unless draft
|
||||
@draft = draft
|
||||
DatabaseStore.unpersistModel(draft)
|
||||
|
|
|
@ -3,6 +3,7 @@ _ = require 'underscore'
|
|||
|
||||
Actions = require '../actions'
|
||||
DatabaseStore = require '../stores/database-store'
|
||||
TaskQueueStatusStore = require '../stores/task-queue-status-store'
|
||||
NylasAPI = require '../nylas-api'
|
||||
|
||||
Task = require './task'
|
||||
|
@ -104,7 +105,7 @@ class SyncbackDraftTask extends Task
|
|||
if existingAccountDraft.accountId isnt acct.id
|
||||
DestroyDraftTask = require './destroy-draft'
|
||||
destroy = new DestroyDraftTask(draftId: existingAccountDraft.id)
|
||||
promise = destroy.waitForPerformLocal().then =>
|
||||
promise = TaskQueueStatusStore.waitForPerformLocal(destroy).then =>
|
||||
@detatchFromRemoteID(existingAccountDraft, acct.id).then (newAccountDraft) =>
|
||||
Promise.resolve(newAccountDraft)
|
||||
Actions.queueTask(destroy)
|
||||
|
|
|
@ -47,10 +47,6 @@ class Task
|
|||
|
||||
constructor: ->
|
||||
@_rememberedToCallSuper = true
|
||||
@_performLocalCompletePromise = new Promise (resolve, reject) =>
|
||||
# This is called by the `TaskQueue` immeidately after `performLocal`
|
||||
# has finished and the task has been added to the Queue.
|
||||
@performLocalComplete = resolve
|
||||
|
||||
@id = generateTempId()
|
||||
@creationDate = new Date()
|
||||
|
@ -111,15 +107,6 @@ class Task
|
|||
performRemote: ->
|
||||
Promise.resolve(Task.Status.Finished)
|
||||
|
||||
waitForPerformLocal: ->
|
||||
if not atom.isMainWindow()
|
||||
throw new Error("waitForPerformLocal is only supported in the main window. In
|
||||
secondary windows, tasks are serialized and sent to the main
|
||||
window, and cannot be observed.")
|
||||
if not @_performLocalCompletePromise
|
||||
throw new Error("This #{@constructor.name} Task did not call `super` in it's constructor! You must call `super`")
|
||||
@_performLocalCompletePromise
|
||||
|
||||
cancel: ->
|
||||
# We ignore requests to cancel and carry on. Subclasses that want to support
|
||||
# cancellation or dequeue requests while running should implement cancel.
|
||||
|
|
|
@ -276,6 +276,8 @@ class PackageManager
|
|||
getAvailablePackagePaths: (windowType) ->
|
||||
packagePaths = []
|
||||
|
||||
loadPackagesWhenNoTypesSpecified = windowType is 'default'
|
||||
|
||||
for packageDirPath in @packageDirPaths
|
||||
for packagePath in fs.listSync(packageDirPath)
|
||||
# Ignore files in package directory
|
||||
|
@ -288,7 +290,11 @@ class PackageManager
|
|||
packagePaths = _.filter packagePaths, (packagePath) ->
|
||||
try
|
||||
{windowTypes} = Package.loadMetadata(packagePath) ? {}
|
||||
return windowType of (windowTypes ? {})
|
||||
if windowTypes
|
||||
return windowTypes[windowType]?
|
||||
else if loadPackagesWhenNoTypesSpecified
|
||||
return true
|
||||
return false
|
||||
catch
|
||||
return false
|
||||
|
||||
|
@ -369,10 +375,7 @@ class PackageManager
|
|||
packagePaths = _.uniq packagePaths, (packagePath) -> path.basename(packagePath)
|
||||
@loadPackage(packagePath) for packagePath in packagePaths
|
||||
@emit 'loaded'
|
||||
if windowType
|
||||
@emitter.emit 'did-load-window-packages', windowType
|
||||
else
|
||||
@emitter.emit 'did-load-initial-packages'
|
||||
@emitter.emit 'did-load-initial-packages'
|
||||
|
||||
loadPackage: (nameOrPath) ->
|
||||
return pack if pack = @getLoadedPackage(nameOrPath)
|
||||
|
|
1
src/react-remote/react-remote-parent.js
vendored
1
src/react-remote/react-remote-parent.js
vendored
|
@ -161,6 +161,7 @@ ipc.on('from-react-remote-window', function(json) {
|
|||
var parentListenersAttached = false;
|
||||
var reactRemoteContainer = document.createElement('div');
|
||||
reactRemoteContainer.style.left = '-10000px';
|
||||
reactRemoteContainer.style.top = '40px';
|
||||
reactRemoteContainer.style.backgroundColor = 'white';
|
||||
reactRemoteContainer.style.position = 'absolute';
|
||||
reactRemoteContainer.style.zIndex = 10000;
|
||||
|
|
|
@ -58,17 +58,29 @@ class WindowEventHandler
|
|||
|
||||
@subscribe $(window), 'unload', -> atom.removeEditorWindow()
|
||||
|
||||
@subscribeToCommand $(window), 'window:toggle-full-screen', -> atom.toggleFullScreen()
|
||||
@subscribeToCommand $(window), 'window:toggle-full-screen', ->
|
||||
atom.toggleFullScreen()
|
||||
|
||||
@subscribeToCommand $(window), 'window:close', -> atom.close()
|
||||
@subscribeToCommand $(window), 'window:close', ->
|
||||
atom.close()
|
||||
|
||||
@subscribeToCommand $(window), 'window:reload', =>
|
||||
@reloadRequested = true
|
||||
atom.reload()
|
||||
|
||||
@subscribeToCommand $(window), 'window:toggle-dev-tools', -> atom.toggleDevTools()
|
||||
@subscribeToCommand $(window), 'window:toggle-dev-tools', ->
|
||||
atom.toggleDevTools()
|
||||
|
||||
@subscribeToCommand $(window), 'window:open-errorreporter-logs', -> atom.errorReporter.openLogs()
|
||||
@subscribeToCommand $(window), 'window:open-errorreporter-logs', ->
|
||||
atom.errorReporter.openLogs()
|
||||
|
||||
@subscribeToCommand $(window), 'window:toggle-component-regions', ->
|
||||
ComponentRegistry = require './component-registry'
|
||||
ComponentRegistry.toggleComponentRegions()
|
||||
|
||||
@subscribeToCommand $(window), 'window:toggle-react-remote', ->
|
||||
ReactRemote = require './react-remote/react-remote-parent'
|
||||
ReactRemote.toggleContainerVisible()
|
||||
|
||||
if process.platform in ['win32', 'linux']
|
||||
@subscribeToCommand $(window), 'window:toggle-menu-bar', ->
|
||||
|
|
File diff suppressed because one or more lines are too long
|
@ -3,6 +3,7 @@
|
|||
// global scope. We need to do it here before React loads.
|
||||
window.__REACT_DEVTOOLS_GLOBAL_HOOK__ = {}
|
||||
|
||||
|
||||
function registerRuntimeTranspilers(hotreload) {
|
||||
// This sets require.extensions['.coffee'].
|
||||
require('coffee-script').register();
|
||||
|
|
Loading…
Add table
Reference in a new issue