Remove isomorphic-core and related packages

This commit is contained in:
Ben Gotow 2017-06-24 20:34:01 -07:00
parent d53606922f
commit 3c56e2fbfd
50 changed files with 26 additions and 3303 deletions

View file

@ -43,7 +43,7 @@
"semi": "off",
"no-mixed-operators": "off",
"import/extensions": ["error", "never", { "json": "always" }],
"import/no-unresolved": ["error", {"ignore": ["nylas-exports", "nylas-component-kit", "electron", "nylas-store", "react-dom/server", "nylas-observables", "windows-shortcuts", "moment-round", "better-sqlite3", "chrono-node", "event-kit", "enzyme", "isomorphic-core"]}],
"import/no-unresolved": ["error", {"ignore": ["nylas-exports", "nylas-component-kit", "electron", "nylas-store", "react-dom/server", "nylas-observables", "windows-shortcuts", "moment-round", "better-sqlite3", "chrono-node", "event-kit", "enzyme"]}],
"import/no-extraneous-dependencies": "off",
"import/newline-after-import": "off",
"import/prefer-default-export": "off",

4
.gitignore vendored
View file

@ -48,10 +48,6 @@ spec-saved-state.json
# Symlinked Jasmine config files
**/jasmine/config.json
!packages/isomorphic-core/spec/jasmine/config.json
# Symlinked isomorphic-core Specs
packages/client-app/spec/isomorphic-core
# Elastic Beanstalk Files
.elasticbeanstalk/*

View file

@ -53,7 +53,7 @@
"upload-client": "grunt upload-client --gruntfile=packages/client-app/build/Gruntfile.js --base=./",
"cloud": "pm2 stop all; pm2 delete all; pm2 start packages/cloud-core/pm2-dev.yml --no-daemon",
"cloud-debug": "pm2 stop all; pm2 delete all; pm2 start packages/cloud-core/pm2-debug-cloud-api.yml --no-daemon",
"test-cloud": "cd packages/cloud-api && npm test && cd ../cloud-core && npm test && cd ../cloud-workers && npm test && cd ../isomorphic-core && npm test",
"test-cloud": "cd packages/cloud-api && npm test && cd ../cloud-core && npm test && cd ../cloud-workers && npm test",
"stop": "npm run stop-cloud",
"stop-cloud": "pm2 stop all; pm2 delete all;",
"build-cloud": "docker build .",

View file

@ -2,12 +2,6 @@
"ignore_dirs": [
"build/node_modules",
"apm/node_modules",
"node_modules",
"src/K2/node_modules",
"src/K2/packages/local-sync/node_modules",
"src/K2/packages/isomorphic-core/node_modules",
"src/K2/packages/cloud-api/node_modules",
"src/K2/packages/cloud-workers/node_modules",
"src/K2/packages/cloud-core/node_modules"
"node_modules"
]
}

View file

@ -692,8 +692,6 @@
3485716: src/flux/attributes/attribute-collection.js
3490067: src/flux/attributes/attribute-joined-data.js
3490067: src/flux/attributes/attribute-joined-data.js
3492659: src/flux/attributes/attribute-serverid.js
3492659: src/flux/attributes/attribute-serverid.js
3494243: src/flux/actions.js
3449351: src/flux/models/model.js
3494243: src/flux/actions.js
@ -735,7 +733,6 @@
3536620: node_modules/reflux/src/listenTo.js
3536620: node_modules/reflux/src/listenTo.js
3538124: node_modules/reflux/src/listenToMany.js
3492659: src/flux/attributes/attribute-serverid.js
3538124: node_modules/reflux/src/listenToMany.js
3494243: src/flux/actions.js
3509768: node_modules/reflux/package.json

View file

@ -89,17 +89,6 @@ module.exports = (grunt) => {
callback();
}
/**
* We don't need the K2 folder anymore since the previous step hard
* copied the client-sync package (and its isomorphic-core dependency)
* into /internal_packages. The remains of the folder are N1-Cloud
* pieces that aren't necessary
*/
function removeUnnecessaryFiles(buildPath, electronVersion, platform, arch, callback) {
fs.removeSync(path.join(buildPath, 'src', 'K2'))
callback();
}
function runTranspilers(buildPath, electronVersion, platform, arch, callback) {
console.log("---> Running babel and coffeescript transpilers")
@ -263,7 +252,6 @@ module.exports = (grunt) => {
runCopyPlatformSpecificResources,
runCopyAPM,
runCopySymlinkedPackages,
removeUnnecessaryFiles,
runTranspilers,
],
},

View file

@ -1,7 +1,7 @@
/* eslint global-require: 0 */
import crypto from 'crypto';
import {CommonProviderSettings} from 'isomorphic-core'
import {CommonProviderSettings} from 'imap-provider-settings';
import {
N1CloudAPI,
NylasAPI,

View file

@ -13,13 +13,11 @@
"url": "https://github.com/nylas/nylas-mail/issues"
},
"dependencies": {
"analytics-node": "2.x.x",
"async": "^0.9",
"babel-core": "6.22.0",
"babel-preset-electron": "1.4.15",
"babel-preset-react": "6.22.0",
"babel-regenerator-runtime": "6.5.0",
"base64-stream": "0.1.3",
"better-sqlite3": "bengotow/better-sqlite3#a888061ad334c76d2db4c06554c90785cc6e7cce",
"bluebird": "3.4.x",
"chromium-net-errors": "1.0.3",
@ -37,32 +35,18 @@
"emoji-data": "^0.2.0",
"encoding": "0.1.12",
"enzyme": "2.7.1",
"esdoc": "^0.5.2",
"esdoc-es7-plugin": "0.0.3",
"event-kit": "^1.0.2",
"fs-plus": "^2.3.2",
"getmac": "1.x.x",
"googleapis": "9.0.0",
"guid": "0.0.10",
"hapi": "16.1.0",
"hapi-auth-basic": "^4.2.0",
"hapi-boom-decorators": "2.2.2",
"hapi-plugin-websocket": "^0.9.2",
"hapi-swagger": "7.6.0",
"he": "1.1.0",
"iconv": "2.2.1",
"immutable": "3.7.5",
"inert": "4.0.0",
"is-online": "6.1.0",
"isomorphic-core": "0.x.x",
"imap-provider-settings": "github:nylas/imap-provider-settings#2fdcd34d59b",
"jasmine-json": "~0.0",
"jasmine-react-helpers": "^0.2",
"jasmine-reporters": "1.x.x",
"jasmine-tagged": "^1.1.2",
"joi": "8.4.2",
"jsx-transform": "^2.3.0",
"juice": "^1.4",
"kbpgp": "^2.0.52",
"keytar": "3.0.0",
"less-cache": "0.21",
"lru-cache": "^4.0.1",
@ -73,16 +57,13 @@
"moment-round": "^1.0.1",
"moment-timezone": "0.5.4",
"mousetrap": "^1.5.3",
"nock": "^2",
"node-emoji": "^1.2.1",
"node-uuid": "^1.4",
"nslog": "^3",
"optimist": "0.4.0",
"papaparse": "^4.1.2",
"pathwatcher": "~6.2",
"pick-react-known-prop": "0.x.x",
"promise-queue": "2.1.1",
"property-accessors": "^1",
"proxyquire": "1.3.1",
"q": "^1.0.1",
"raven": "1.1.4",
@ -99,9 +80,7 @@
"rx-lite": "4.0.8",
"rx-lite-testing": "^4.0.7",
"sanitize-html": "1.9.0",
"season": "^5.1",
"semver": "^4.2",
"sequelize": "nylas/sequelize#nylas-3.40.0",
"simplemde": "jstejada/simplemde-markdown-editor#input-style-support",
"source-map-support": "^0.3.2",
"sqlite3": "https://github.com/bengotow/node-sqlite3/archive/bengotow/usleep-v3.1.4.tar.gz",
@ -109,11 +88,12 @@
"tld": "^0.0.2",
"underscore": "1.8.x",
"underscore.string": "^3.0",
"vision": "4.1.0",
"windows-shortcuts": "emorikawa/windows-shortcuts#b0a0fc7"
},
"devDependencies": {
"donna": "^1.0.15",
"esdoc": "^0.5.2",
"esdoc-es7-plugin": "0.0.3",
"gitbook": "^3.2.2",
"gitbook-cli": "^2.3.0",
"gitbook-plugin-anchors": "^0.7.1",

View file

@ -0,0 +1 @@
/Users/bengotow/Work/F376/Projects/Nylas2/nylas-mail/packages/isomorphic-core/spec

View file

@ -3,9 +3,9 @@ import path from 'path';
import createDebug from 'debug';
import childProcess from 'child_process';
import PromiseQueue from 'promise-queue';
import {remote, ipcRenderer} from 'electron';
import {remote} from 'electron';
import LRU from "lru-cache";
import {StringUtils, ExponentialBackoffScheduler} from 'isomorphic-core';
import {ExponentialBackoffScheduler} from '../../backoff-schedulers';
import NylasStore from '../../global/nylas-store';
import Utils from '../models/utils';
@ -24,6 +24,17 @@ const MAX_RETRY_LOCK_DELAY = 500;
let JSONBlob = null;
function trimTo(str, size) {
const g = window || global || {}
const TRIM_SIZE = size || process.env.TRIM_SIZE || g.TRIM_SIZE || 256;
let trimed = str;
if (str.length >= TRIM_SIZE) {
trimed = `${str.slice(0, TRIM_SIZE / 2)}…${str.slice(str.length - TRIM_SIZE / 2, str.length)}`
}
return trimed
}
/*
Public: N1 is built on top of a custom database layer modeled after
ActiveRecord. For many parts of the application, the database is the source

View file

@ -1,7 +1,7 @@
import isOnline from 'is-online'
import NylasStore from 'nylas-store'
import {ExponentialBackoffScheduler} from 'isomorphic-core'
import Actions from '../actions'
import isOnline from 'is-online';
import NylasStore from 'nylas-store';
import {ExponentialBackoffScheduler} from '../../backoff-schedulers';
import Actions from '../actions';
const CHECK_ONLINE_INTERVAL = 30 * 1000

View file

@ -7,8 +7,7 @@ import * as NylasAPIHelpers from '../flux/nylas-api-helpers'
/**
* This injests deltas from multiple sources. One is from local-sync, the
* other is from n1-cloud. Both sources use
* isomorphic-core/src/delta-stream-builder to generate the delta stream.
* other is from n1-cloud. Both sources use delta-stream-builder to generate the delta stream.
*
* In both cases we are given the JSON serialized form of a `Transaction`
* model. An example Thread delta would look like:

View file

@ -1,28 +0,0 @@
# Isomorphic Core
Isomorphic refers to javascript that can be run on both the client and the
server.
This is shared code for mail and utilities that is designed to run both on
deployed cloud servers and from within the Nylas Mail client.
Use through a regular import: `import iso-core from 'isomorphic-core'`
It is required as a dependency in the package.json of other modules.
This library isn't on the npm registry, but works as a dependency thanks to
`lerna bootstrap`
See index.js for what gets explicitly exported by this library.
## Important Usage Notes:
Since this code runs in both the client and the server, you must be careful
with what libraries you use. Some common gotchas:
- You can't use `NylasEnv` or `NylasExports`. These are injected only in the
client.
- If you require a 3rd party library, it must be added to the "dependencies" of
isomorphic-core's `package.json`
- You may use modern javascript syntax. Both the client and server get compiled
with the same .babelrc setting

View file

@ -1,31 +0,0 @@
/* eslint global-require: 0 */
module.exports = {
Provider: {
Gmail: 'gmail',
IMAP: 'imap',
},
Imap: require('imap'),
Errors: require('./src/errors'),
IMAPErrors: require('./src/imap-errors'),
SMTPErrors: require('./src/smtp-errors'),
loadModels: require('./src/load-models'),
AuthHelpers: require('./src/auth-helpers'),
PromiseUtils: require('./src/promise-utils'),
DatabaseTypes: require('./src/database-types'),
IMAPConnection: require('./src/imap-connection').default,
IMAPConnectionPool: require('./src/imap-connection-pool'),
SendmailClient: require('./src/sendmail-client'),
DeltaStreamBuilder: require('./src/delta-stream-builder'),
HookTransactionLog: require('./src/hook-transaction-log'),
HookIncrementVersionOnSave: require('./src/hook-increment-version-on-save'),
BackoffScheduler: require('./src/backoff-schedulers').BackoffScheduler,
ExponentialBackoffScheduler: require('./src/backoff-schedulers').ExponentialBackoffScheduler,
CommonProviderSettings: require('imap-provider-settings').CommonProviderSettings,
MetricsReporter: require('./src/metrics-reporter').default,
MessageUtils: require('./src/message-utils'),
ModelUtils: require('./src/model-utils').default,
executeJasmine: require('./spec/jasmine/execute').default,
StringUtils: require('./src/string-utils'),
TLSUtils: require('./src/tls-utils'),
DBUtils: require('./src/db-utils'),
}

View file

@ -1,30 +0,0 @@
{
"name": "isomorphic-core",
"version": "0.0.1",
"description": "Packages use isomorphically on n1-cloud and client-sync",
"main": "index.js",
"scripts": {
"test": "babel-node spec/run.es6"
},
"dependencies": {
"atob": "2.0.3",
"btoa": "1.1.2",
"imap": "github:jstejada/node-imap#fix-parse-body-list",
"imap-provider-settings": "github:nylas/imap-provider-settings#2fdcd34d59b",
"jasmine": "2.x.x",
"joi": "8.4.2",
"libhoney": "1.0.0-beta.2",
"nodemailer": "2.5.0",
"promise-props": "1.0.0",
"promise.prototype.finally": "1.0.1",
"rx-lite": "4.0.8",
"sequelize": "3.28.0",
"underscore": "1.8.3",
"xoauth2": "1.2.0",
"he": "1.1.0",
"iconv": "2.2.1",
"mimelib": "0.2.19"
},
"author": "Nylas",
"license": "ISC"
}

View file

@ -1,71 +0,0 @@
import {BackoffScheduler, ExponentialBackoffScheduler} from '../src/backoff-schedulers'
describe('BackoffSchedulers', function describeBlock() {
describe('BackoffScheduler', () => {
function linearBackoff(base, numTries) {
return base * numTries
}
it('calculates the next delay correctly with no jitter', () => {
const scheduler = new BackoffScheduler({
jitter: false,
baseDelay: 2,
maxDelay: 5,
getNextBackoffDelay: linearBackoff,
})
expect(scheduler.nextDelay()).toEqual(0)
expect(scheduler.nextDelay()).toEqual(2)
expect(scheduler.nextDelay()).toEqual(4)
expect(scheduler.nextDelay()).toEqual(5)
expect(scheduler.nextDelay()).toEqual(5)
})
it('calculates the next delay correctly with jitter', () => {
spyOn(Math, 'random').andReturn(0.5)
const scheduler = new BackoffScheduler({
jitter: true,
baseDelay: 2,
maxDelay: 5,
getNextBackoffDelay: linearBackoff,
})
expect(scheduler.nextDelay()).toEqual(0)
expect(scheduler.nextDelay()).toEqual(1)
expect(scheduler.nextDelay()).toEqual(2)
expect(scheduler.nextDelay()).toEqual(3)
expect(scheduler.nextDelay()).toEqual(4)
expect(scheduler.nextDelay()).toEqual(5)
expect(scheduler.nextDelay()).toEqual(5)
})
});
describe('ExponentialBackoffScheduler', () => {
it('calculates the next delay correctly with no jitter', () => {
const scheduler = new ExponentialBackoffScheduler({
jitter: false,
baseDelay: 2,
maxDelay: 10,
})
expect(scheduler.nextDelay()).toEqual(2)
expect(scheduler.nextDelay()).toEqual(4)
expect(scheduler.nextDelay()).toEqual(8)
expect(scheduler.nextDelay()).toEqual(10)
expect(scheduler.nextDelay()).toEqual(10)
})
it('calculates the next delay correctly with no jitter', () => {
spyOn(Math, 'random').andReturn(0.5)
const scheduler = new ExponentialBackoffScheduler({
jitter: true,
baseDelay: 2,
maxDelay: 10,
})
expect(scheduler.nextDelay()).toEqual(1)
expect(scheduler.nextDelay()).toEqual(2)
expect(scheduler.nextDelay()).toEqual(4)
expect(scheduler.nextDelay()).toEqual(8)
expect(scheduler.nextDelay()).toEqual(10)
expect(scheduler.nextDelay()).toEqual(10)
})
});
});

View file

@ -1,204 +0,0 @@
import IMAPConnectionPool from '../src/imap-connection-pool';
import IMAPConnection from '../src/imap-connection';
import IMAPErrors from '../src/imap-errors';
describe('IMAPConnectionPool', function describeBlock() {
beforeEach(() => {
this.account = {
id: 'test-account',
decryptedCredentials: () => { return {}; },
connectionSettings: {
imap_host: 'imap.foobar.com',
},
};
IMAPConnectionPool._poolMap = {};
this.logger = {};
spyOn(IMAPConnection.prototype, 'connect').andCallFake(function connectFake() {
return this;
});
spyOn(IMAPConnection.prototype, 'end').andCallFake(() => {});
});
it('opens IMAP connection and properly returns to pool at end of scope', async () => {
let invokedCallback = false;
await IMAPConnectionPool.withConnectionsForAccount(this.account, {
desiredCount: 1,
logger: this.logger,
socketTimeout: 5 * 1000,
onConnected: ([conn]) => {
expect(conn instanceof IMAPConnection).toBe(true);
invokedCallback = true;
return false;
},
});
expect(invokedCallback).toBe(true);
expect(IMAPConnection.prototype.connect.calls.length).toBe(1);
expect(IMAPConnection.prototype.end.calls.length).toBe(0);
});
it('opens multiple IMAP connections and properly returns to pool at end of scope', async () => {
let invokedCallback = false;
await IMAPConnectionPool.withConnectionsForAccount(this.account, {
desiredCount: 2,
logger: this.logger,
socketTimeout: 5 * 1000,
onConnected: ([conn, otherConn]) => {
expect(conn instanceof IMAPConnection).toBe(true);
expect(otherConn instanceof IMAPConnection).toBe(true);
invokedCallback = true;
return false;
},
});
expect(invokedCallback).toBe(true);
expect(IMAPConnection.prototype.connect.calls.length).toBe(2);
expect(IMAPConnection.prototype.end.calls.length).toBe(0);
});
it('opens an IMAP connection properly and only returns to pool on done', async () => {
let invokedCallback = false;
let doneCallback = null;
await IMAPConnectionPool.withConnectionsForAccount(this.account, {
desiredCount: 1,
logger: this.logger,
socketTimeout: 5 * 1000,
onConnected: ([conn], done) => {
expect(conn instanceof IMAPConnection).toBe(true);
invokedCallback = true;
doneCallback = done;
return true;
},
});
expect(invokedCallback).toBe(true);
expect(IMAPConnection.prototype.connect.calls.length).toBe(1);
expect(IMAPConnection.prototype.end.calls.length).toBe(0);
expect(IMAPConnectionPool._poolMap[this.account.id]._availableConns.length === 2);
doneCallback();
expect(IMAPConnectionPool._poolMap[this.account.id]._availableConns.length === 3);
});
it('does not call connect if already connected', async () => {
let invokedCallback = false;
await IMAPConnectionPool.withConnectionsForAccount(this.account, {
desiredCount: 1,
logger: this.logger,
socketTimeout: 5 * 1000,
onConnected: ([conn]) => {
expect(conn instanceof IMAPConnection).toBe(true);
invokedCallback = true;
return false;
},
});
expect(invokedCallback).toBe(true);
expect(IMAPConnection.prototype.connect.calls.length).toBe(1);
expect(IMAPConnection.prototype.end.calls.length).toBe(0);
invokedCallback = false;
await IMAPConnectionPool.withConnectionsForAccount(this.account, {
desiredCount: 1,
logger: this.logger,
socketTimeout: 5 * 1000,
onConnected: ([conn]) => {
expect(conn instanceof IMAPConnection).toBe(true);
invokedCallback = true;
return false;
},
});
expect(invokedCallback).toBe(true);
expect(IMAPConnection.prototype.connect.calls.length).toBe(1);
expect(IMAPConnection.prototype.end.calls.length).toBe(0);
});
it('waits for an available IMAP connection', async () => {
let invokedCallback = false;
let doneCallback = null;
await IMAPConnectionPool.withConnectionsForAccount(this.account, {
desiredCount: 3,
logger: this.logger,
socketTimeout: 5 * 1000,
onConnected: ([conn], done) => {
expect(conn instanceof IMAPConnection).toBe(true);
invokedCallback = true;
doneCallback = done;
return true;
},
});
expect(invokedCallback).toBe(true);
expect(IMAPConnection.prototype.connect.calls.length).toBe(3);
expect(IMAPConnection.prototype.end.calls.length).toBe(0);
invokedCallback = false;
const promise = IMAPConnectionPool.withConnectionsForAccount(this.account, {
desiredCount: 1,
logger: this.logger,
socketTimeout: 5 * 1000,
onConnected: ([conn]) => {
expect(conn instanceof IMAPConnection).toBe(true);
invokedCallback = true;
return false;
},
});
expect(IMAPConnectionPool._poolMap[this.account.id]._queue.length).toBe(1)
doneCallback();
await promise;
expect(invokedCallback).toBe(true);
expect(IMAPConnection.prototype.connect.calls.length).toBe(3);
expect(IMAPConnection.prototype.end.calls.length).toBe(0);
});
it('does not retry on IMAP connection timeout', async () => {
let invokeCount = 0;
try {
await IMAPConnectionPool.withConnectionsForAccount(this.account, {
desiredCount: 1,
logger: this.logger,
socketTimeout: 5 * 1000,
onConnected: ([conn]) => {
expect(conn instanceof IMAPConnection).toBe(true);
if (invokeCount === 0) {
invokeCount += 1;
throw new IMAPErrors.IMAPConnectionTimeoutError();
}
invokeCount += 1;
return false;
},
});
} catch (err) {
expect(err instanceof IMAPErrors.IMAPConnectionTimeoutError).toBe(true);
}
expect(invokeCount).toBe(1);
expect(IMAPConnection.prototype.connect.calls.length).toBe(1);
expect(IMAPConnection.prototype.end.calls.length).toBe(1);
});
it('does not retry on other IMAP error', async () => {
let invokeCount = 0;
let errorCount = 0;
try {
await IMAPConnectionPool.withConnectionsForAccount(this.account, {
desiredCount: 1,
logger: this.logger,
socketTimeout: 5 * 1000,
onConnected: ([conn]) => {
expect(conn instanceof IMAPConnection).toBe(true);
if (invokeCount === 0) {
invokeCount += 1;
throw new IMAPErrors.IMAPSocketError();
}
invokeCount += 1;
return false;
},
});
} catch (err) {
errorCount += 1;
}
expect(invokeCount).toBe(1);
expect(errorCount).toBe(1);
expect(IMAPConnection.prototype.connect.calls.length).toBe(1);
expect(IMAPConnection.prototype.end.calls.length).toBe(1);
});
});

View file

@ -1,11 +0,0 @@
{
"spec_dir": "spec",
"spec_files": [
"**/*[sS]pec.{js,es6}"
],
"helpers": [
"helpers/**/*.{js,es6}"
],
"stopSpecOnExpectationFailure": false,
"random": false
}

View file

@ -1,10 +0,0 @@
import Jasmine from 'jasmine'
import JasmineExtensions from './extensions'
export default function execute(extendOpts) {
const jasmine = new Jasmine()
jasmine.loadConfigFile('spec/jasmine/config.json')
const jasmineExtensions = new JasmineExtensions()
jasmineExtensions.extend(extendOpts)
jasmine.execute()
}

View file

@ -1,56 +0,0 @@
import applyPolyfills from './polyfills'
export default class JasmineExtensions {
extend({beforeEach, afterEach} = {}) {
applyPolyfills()
global.it = this._makeItAsync(global.it)
global.fit = this._makeItAsync(global.fit)
global.beforeAll = this._makeEachOrAllFnAsync(global.beforeAll)
global.afterAll = this._makeEachOrAllFnAsync(global.afterAll)
global.beforeEach = this._makeEachOrAllFnAsync(global.beforeEach)
global.afterEach = this._makeEachOrAllFnAsync(global.afterEach)
if (beforeEach) {
global.beforeEach(beforeEach)
}
if (afterEach) {
global.afterEach(afterEach)
}
}
_runAsync(userFn, done) {
if (!userFn) {
done()
return true
}
const resp = userFn.apply(this);
if (resp && resp.then) {
return resp.then(done).catch((error) => {
// Throwing an error doesn't register as stopping the test. Instead, run an
// expect() that will fail and show us the error. We still need to call done()
// afterwards, or it will take the full timeout to fail.
expect(error).toBeUndefined()
done()
})
}
done()
return resp
}
_makeEachOrAllFnAsync(jasmineEachFn) {
const self = this;
return (userFn) => {
return jasmineEachFn(function asyncEachFn(done) {
self._runAsync.call(this, userFn, done)
})
}
}
_makeItAsync(jasmineIt) {
const self = this;
return (desc, userFn) => {
return jasmineIt(desc, function asyncIt(done) {
self._runAsync.call(this, userFn, done)
})
}
}
}

View file

@ -1,19 +0,0 @@
// We use Jasmine 1 in the client tests and Jasmine 2 in the cloud tests,
// but isomorphic-core tests need to be run in both environments. Tests in
// isomorphic-core should use Jasmine 1 syntax, and then we can add polyfills
// here to make sure that they exist when we run in a Jasmine 2 environment.
export default function applyPolyfills() {
const origSpyOn = global.spyOn;
// There's no prototype to modify, so we have to modify the return
// values of spyOn as they're created.
global.spyOn = (object, methodName) => {
const originalValue = object[methodName]
const spy = origSpyOn(object, methodName)
object[methodName].originalValue = originalValue;
spy.andReturn = spy.and.returnValue;
spy.andCallFake = spy.and.callFake;
Object.defineProperty(spy.calls, 'length', {get: function getLength() { return this.count(); }})
return spy;
}
}

View file

@ -1,124 +0,0 @@
/*
const {parseFromImap, parseSnippet, parseContacts} = require('../src/message-utils');
const {forEachJSONFixture, forEachHTMLAndTXTFixture, ACCOUNT_ID, getTestDatabase} = require('./helpers');
xdescribe('MessageUtils', function MessageUtilsSpecs() {
beforeEach(() => {
waitsForPromise(async () => {
const db = await getTestDatabase()
const folder = await db.Folder.create({
id: 'test-folder-id',
accountId: ACCOUNT_ID,
version: 1,
name: 'Test Folder',
role: null,
});
this.options = { accountId: ACCOUNT_ID, db, folder };
})
})
describe("parseFromImap", () => {
forEachJSONFixture('MessageUtils/parseFromImap', (filename, json) => {
it(`should correctly build message properties for ${filename}`, () => {
const {imapMessage, desiredParts, result} = json;
// requiring these to match makes it overly arduous to generate test
// cases from real accounts
const excludeKeys = new Set(['id', 'accountId', 'folderId', 'folder', 'labels']);
waitsForPromise(async () => {
const actual = await parseFromImap(imapMessage, desiredParts, this.options);
for (const key of Object.keys(result)) {
if (!excludeKeys.has(key)) {
expect(actual[key]).toEqual(result[key]);
}
}
});
});
})
});
});
const snippetTestCases = [{
purpose: 'trim whitespace in basic plaintext',
body: '<pre>The quick brown fox\n\n\tjumps over the lazy</pre>',
snippet: 'The quick brown fox jumps over the lazy',
}, {
purpose: 'truncate long plaintext without breaking words',
body: '<pre>The quick brown fox jumps over the lazy dog and then the lazy dog rolls over and sighs. The fox turns around in a circle and then jumps onto a bush! It grins wickedly and wags its fat tail. As the lazy dog puts its head on its paws and cracks a sleepy eye open, a slow grin forms on its face. The fox has fallen into the bush and is yelping and squeaking.</pre>',
snippet: 'The quick brown fox jumps over the lazy dog and then the lazy dog rolls over and sighs. The fox turns',
}, {
purpose: 'process basic HTML correctly',
body: '<html><title>All About Ponies</title><h1>PONIES AND RAINBOWS AND UNICORNS</h1><p>Unicorns are native to the hillsides of Flatagonia.</p></html>',
snippet: 'PONIES AND RAINBOWS AND UNICORNS Unicorns are native to the hillsides of Flatagonia.',
}, {
purpose: 'properly strip rogue styling inside of <body> and trim whitespace in HTML',
body: '<html>\n <head></head>\n <body>\n <style>\n body { width: 100% !important; min-width: 100%; -webkit-font-smoothing: antialiased; -webkit-text-size-adjust: 100%; -ms-text-size-adjust: 100%; margin: 0; padding: 0; background: #fafafa;\n </style>\n <p>Look ma, no CSS!</p></body></html>',
snippet: 'Look ma, no CSS!',
}, {
purpose: 'properly process <br/> and <div/>',
body: '<p>Unicorns are <div>native</div>to the<br/>hillsides of<br/>Flatagonia.</p>',
snippet: 'Unicorns are native to the hillsides of Flatagonia.',
}, {
purpose: 'properly strip out HTML comments',
body: '<p>Unicorns are<!-- an HTML comment! -->native to the</p>',
snippet: 'Unicorns are native to the',
}, {
purpose: "don't add extraneous spaces after text format markup",
body: `
<td style="padding: 0px 10px">
Hey there, <b>Nylas</b>!<br>
You have a new follower on Product Hunt.
</td>`,
snippet: 'Hey there, Nylas! You have a new follower on Product Hunt.',
},
]
const contactsTestCases = [{
purpose: "not erroneously split contact names on commas",
// NOTE: inputs must be in same format as output by mimelib.parseHeader
input: ['"Little Bo Peep, The Hill" <bopeep@example.com>'],
output: [{name: "Little Bo Peep, The Hill", email: "bopeep@example.com"}],
}, {
purpose: "extract two separate contacts, removing quotes properly & respecing unicode",
input: ['AppleBees Zé <a@example.com>, "Tiger Zen" b@example.com'],
output: [
{name: 'AppleBees Zé', email: 'a@example.com'},
{name: 'Tiger Zen', email: 'b@example.com'},
],
}, {
purpose: "correctly concatenate multiple array elements (from multiple header lines)",
input: ['Yubi Key <yubi@example.com>', 'Smokey the Bear <smokey@example.com>'],
output: [
{name: 'Yubi Key', email: 'yubi@example.com'},
{name: 'Smokey the Bear', email: 'smokey@example.com'},
],
},
]
describe('MessageUtilsHelpers', function MessageUtilsHelperSpecs() {
describe('parseSnippet (basic)', () => {
snippetTestCases.forEach(({purpose, body, snippet}) => {
it(`should ${purpose}`, () => {
const parsedSnippet = parseSnippet(body);
expect(parsedSnippet).toEqual(snippet);
});
});
});
describe('parseSnippet (real world)', () => {
forEachHTMLAndTXTFixture('MessageUtils/parseSnippet', (filename, html, txt) => {
it(`should correctly extract the snippet from the html`, () => {
const parsedSnippet = parseSnippet(html);
expect(parsedSnippet).toEqual(txt);
});
});
});
describe('parseContacts (basic)', () => {
contactsTestCases.forEach(({purpose, input, output}) => {
it(`should ${purpose}`, () => {
const parsedContacts = parseContacts(input);
expect(parsedContacts).toEqual(output);
});
});
});
});
*/

View file

@ -1,2 +0,0 @@
import executeJasmine from './jasmine/execute'
executeJasmine()

View file

@ -1,281 +0,0 @@
/* eslint camelcase: 0 */
import _ from 'underscore'
import Joi from 'joi'
import atob from 'atob';
import nodemailer from 'nodemailer';
import {CommonProviderSettings} from 'imap-provider-settings';
import {INSECURE_TLS_OPTIONS, SECURE_TLS_OPTIONS} from './tls-utils';
import IMAPConnection from './imap-connection'
import {NylasError, RetryableError} from './errors'
import {convertSmtpError} from './smtp-errors'
const {GMAIL_CLIENT_ID, GMAIL_CLIENT_SECRET} = process.env;
const imapSmtpSettings = Joi.object().keys({
imap_host: [Joi.string().ip().required(), Joi.string().hostname().required()],
imap_port: Joi.number().integer().required(),
imap_username: Joi.string().required(),
imap_password: Joi.string().required(),
smtp_host: [Joi.string().ip().required(), Joi.string().hostname().required()],
smtp_port: Joi.number().integer().required(),
smtp_username: Joi.string().required(),
smtp_password: Joi.string().required(),
// new options - not required() for backcompat
smtp_security: Joi.string(),
imap_security: Joi.string(),
imap_allow_insecure_ssl: Joi.boolean(),
smtp_allow_insecure_ssl: Joi.boolean(),
// TODO: deprecated options - eventually remove!
smtp_custom_config: Joi.object(),
ssl_required: Joi.boolean(),
}).required();
const resolvedGmailSettings = Joi.object().keys({
xoauth2: Joi.string().required(),
expiry_date: Joi.number().integer().required(),
}).required();
const office365Settings = Joi.object().keys({
name: Joi.string().required(),
type: Joi.string().valid('office365').required(),
email: Joi.string().required(),
password: Joi.string().required(),
username: Joi.string().required(),
}).required();
export const SUPPORTED_PROVIDERS = new Set(
['gmail', 'office365', 'imap', 'icloud', 'yahoo', 'fastmail']
);
export function generateXOAuth2Token(username, accessToken) {
// See https://developers.google.com/gmail/xoauth2_protocol
// for more details.
const s = `user=${username}\x01auth=Bearer ${accessToken}\x01\x01`
return new Buffer(s).toString('base64');
}
export function googleSettings(googleToken, email) {
const connectionSettings = Object.assign({
imap_username: email,
smtp_username: email,
}, CommonProviderSettings.gmail);
const connectionCredentials = {
expiry_date: Math.floor(googleToken.expiry_date / 1000),
};
if (GMAIL_CLIENT_ID && GMAIL_CLIENT_SECRET) {
// cloud-only credentials
connectionCredentials.client_id = GMAIL_CLIENT_ID;
connectionCredentials.client_secret = GMAIL_CLIENT_SECRET;
connectionCredentials.access_token = googleToken.access_token;
connectionCredentials.refresh_token = googleToken.refresh_token;
}
if (googleToken.xoauth2) {
connectionCredentials.xoauth2 = googleToken.xoauth2;
} else {
connectionCredentials.xoauth2 = generateXOAuth2Token(email, googleToken.access_token)
}
return {connectionSettings, connectionCredentials}
}
export function credentialsForProvider({provider, settings, email}) {
if (provider === "gmail") {
const {connectionSettings, connectionCredentials} = googleSettings(settings, email)
return {connectionSettings, connectionCredentials}
} else if (provider === "office365") {
const connectionSettings = CommonProviderSettings[provider];
const connectionCredentials = {
imap_username: email,
imap_password: settings.password || settings.imap_password,
smtp_username: email,
smtp_password: settings.password || settings.smtp_password,
}
return {connectionSettings, connectionCredentials}
} else if (SUPPORTED_PROVIDERS.has(provider)) {
const connectionSettings = _.pick(settings, [
'imap_host', 'imap_port', 'imap_security',
'smtp_host', 'smtp_port', 'smtp_security',
'smtp_allow_insecure_ssl',
'imap_allow_insecure_ssl',
]);
// BACKCOMPAT ONLY - remove eventually & make _security params required!
if (!connectionSettings.imap_security) {
switch (connectionSettings.imap_port) {
case 993:
connectionSettings.imap_security = "SSL / TLS";
break;
default:
connectionSettings.imap_security = "none";
break;
}
}
if (!connectionSettings.smtp_security) {
switch (connectionSettings.smtp_security) {
case 465:
connectionSettings.smtp_security = "SSL / TLS";
break;
default:
connectionSettings.smtp_security = 'STARTTLS';
break;
}
}
// END BACKCOMPAT
const connectionCredentials = _.pick(settings, [
'imap_username', 'imap_password',
'smtp_username', 'smtp_password',
]);
return {connectionSettings, connectionCredentials}
}
throw new Error(`Invalid provider: ${provider}`)
}
function bearerToken(xoauth2) {
// We have to unpack the access token from the entire XOAuth2
// token because it is re-packed during the SMTP connection login.
// https://github.com/nodemailer/smtp-connection/blob/master/lib/smtp-connection.js#L1418
const bearer = "Bearer ";
const decoded = atob(xoauth2);
const tokenIndex = decoded.indexOf(bearer) + bearer.length;
return decoded.substring(tokenIndex, decoded.length - 2);
}
export function smtpConfigFromSettings(provider, connectionSettings, connectionCredentials) {
const {smtp_host, smtp_port, smtp_security, smtp_allow_insecure_ssl} = connectionSettings;
const config = {
host: smtp_host,
port: smtp_port,
secure: smtp_security === 'SSL / TLS',
};
if (smtp_security === 'STARTTLS') {
config.requireTLS = true;
}
if (smtp_allow_insecure_ssl) {
config.tls = INSECURE_TLS_OPTIONS;
} else {
config.tls = SECURE_TLS_OPTIONS;
}
if (provider === 'gmail') {
const {xoauth2} = connectionCredentials;
if (!xoauth2) {
throw new Error("Missing XOAuth2 Token")
}
const token = bearerToken(xoauth2);
config.auth = { user: connectionSettings.smtp_username, xoauth2: token }
} else if (SUPPORTED_PROVIDERS.has(provider)) {
const {smtp_username, smtp_password} = connectionCredentials
config.auth = { user: smtp_username, pass: smtp_password}
} else {
throw new Error(`${provider} not yet supported`)
}
return config;
}
export function imapAuthRouteConfig() {
return {
description: 'Authenticates a new account.',
tags: ['accounts'],
auth: false,
validate: {
payload: {
email: Joi.string().email().required(),
name: Joi.string().required(),
provider: Joi.string().valid(...SUPPORTED_PROVIDERS).required(),
settings: Joi.alternatives().try(imapSmtpSettings, office365Settings, resolvedGmailSettings),
},
},
}
}
export function imapAuthHandler(upsertAccount) {
const MAX_RETRIES = 2
const authHandler = (request, reply, retryNum = 0) => {
const dbStub = {};
const {email, provider, name} = request.payload;
const connectionChecks = [];
const {connectionSettings, connectionCredentials} = credentialsForProvider(request.payload)
const smtpConfig = smtpConfigFromSettings(provider, connectionSettings, connectionCredentials);
const smtpTransport = nodemailer.createTransport(Object.assign({
connectionTimeout: 30000,
}, smtpConfig));
// All IMAP accounts require a valid SMTP server for sending, and we never
// want to allow folks to connect accounts and find out later that they
// entered the wrong SMTP credentials. So verify here also!
const smtpVerifyPromise = smtpTransport.verify().catch((error) => {
throw convertSmtpError(error);
});
connectionChecks.push(smtpVerifyPromise);
connectionChecks.push(IMAPConnection.connect({
settings: Object.assign({}, connectionSettings, connectionCredentials),
logger: request.logger,
db: dbStub,
}));
Promise.all(connectionChecks).then((results) => {
for (const result of results) {
// close any IMAP connections we opened
if (result && result.end) { result.end(); }
}
const accountParams = {
name: name,
provider: provider,
emailAddress: email,
connectionSettings: connectionSettings,
}
return upsertAccount(accountParams, connectionCredentials)
})
.then(({account, token}) => {
const response = account.toJSON();
response.account_token = token.value;
reply(JSON.stringify(response));
return
})
.catch((err) => {
const logger = request.logger.child({
account_name: name,
account_provider: provider,
account_email: email,
connection_settings: connectionSettings,
error_name: err.name,
error_message: err.message,
error_tb: err.stack,
})
if (err instanceof RetryableError) {
if (retryNum < MAX_RETRIES) {
setTimeout(() => {
request.logger.info(`${err.name}. Retry #${retryNum + 1}`)
authHandler(request, reply, retryNum + 1)
}, 100)
return
}
logger.error('Encountered retryable error while attempting to authenticate')
reply({message: err.userMessage, type: "api_error"}).code(err.statusCode);
return
}
logger.error("Error trying to authenticate")
let userMessage = "Please contact support@nylas.com. An unforeseen error has occurred.";
let statusCode = 500;
if (err instanceof NylasError) {
if (err.userMessage) {
userMessage = err.userMessage;
}
if (err.statusCode) {
statusCode = err.statusCode;
}
}
reply({message: userMessage, type: "api_error"}).code(statusCode);
return;
})
}
return authHandler
}

View file

@ -1,55 +0,0 @@
const Sequelize = require('sequelize');
module.exports = {
JSONColumn(fieldName, options = {}) {
return Object.assign(options, {
type: options.columnType || Sequelize.TEXT,
get() {
try {
const val = this.getDataValue(fieldName);
if (!val) {
const {defaultValue} = options
return defaultValue ? Object.assign({}, defaultValue) : {};
}
return JSON.parse(val);
} catch (err) {
console.error(err);
return null
}
},
set(val) {
this.setDataValue(fieldName, JSON.stringify(val));
},
defaultValue: undefined,
})
},
JSONArrayColumn(fieldName, options = {}) {
return Object.assign(options, {
type: Sequelize.TEXT,
get() {
try {
const val = this.getDataValue(fieldName);
if (!val) {
const {defaultValue} = options
return defaultValue || [];
}
const arr = JSON.parse(val)
if (!Array.isArray(arr)) {
throw new Error('JSONArrayType should be an array')
}
return JSON.parse(val);
} catch (err) {
console.error(err);
return null
}
},
set(val) {
if (!Array.isArray(val)) {
throw new Error('JSONArrayType should be an array')
}
this.setDataValue(fieldName, JSON.stringify(val));
},
defaultValue: undefined,
})
},
}

View file

@ -1,11 +0,0 @@
// In order to be able to represent 4-byte characters such as some emoji, we
// must use the 'utf8mb4' character set on MySQL. Any table using this
// character set can't have indexes on fields longer than this length without
// triggering the error
//
// ERROR 1071 (42000): Specified key was too long; max key length is 767 bytes
//
// (or, without sql_mode = TRADITIONAL - getting silently truncated!)
const MAX_INDEXABLE_LENGTH = 191;
export {MAX_INDEXABLE_LENGTH};

View file

@ -1,123 +0,0 @@
const _ = require('underscore');
const Rx = require('rx-lite')
const stream = require('stream');
const DELTA_CONNECTION_TIMEOUT_MS = 15 * 60000;
const OBSERVABLE_TIMEOUT_MS = DELTA_CONNECTION_TIMEOUT_MS - (1 * 60000);
/**
* A Transaction references objects that changed. This finds and inflates
* those objects.
*
* Resolves to an array of transactions with their `attributes` set to be
* the inflated model they reference.
*/
function inflateTransactions(db, accountId, transactions = [], sourceName) {
const transactionJSONs = transactions.map((t) => (t.toJSON ? t.toJSON() : t))
transactionJSONs.forEach((t) => {
t.cursor = t.id;
t.accountId = accountId;
});
const byModel = _.groupBy(transactionJSONs, "object");
const byObjectIds = _.groupBy(transactionJSONs, "objectId");
return Promise.all(Object.keys(byModel).map((modelName) => {
const modelIds = byModel[modelName].filter(t => t.event !== 'delete').map(t => t.objectId);
const modelConstructorName = modelName.charAt(0).toUpperCase() + modelName.slice(1);
const ModelKlass = db[modelConstructorName]
let includes = [];
if (ModelKlass.requiredAssociationsForJSON) {
includes = ModelKlass.requiredAssociationsForJSON(db)
}
return ModelKlass.findAll({
where: {id: modelIds},
include: includes,
}).then((models) => {
const remaining = _.difference(modelIds, models.map(m => `${m.id}`))
if (remaining.length !== 0) {
const badTrans = byModel[modelName].filter(t =>
remaining.includes(t.objectId))
console.error(`While inflating ${sourceName} transactions, we couldn't find models for some ${modelName} IDs`, remaining, badTrans)
}
for (const model of models) {
const transactionsForModel = byObjectIds[model.id];
for (const t of transactionsForModel) {
t.attributes = model.toJSON();
}
}
});
})).then(() => transactionJSONs)
}
function stringifyTransactions(db, accountId, transactions = [], sourceName) {
return inflateTransactions(db, accountId, transactions, sourceName)
.then((transactionJSONs) => {
return `${transactionJSONs.map(JSON.stringify).join("\n")}\n`;
});
}
function transactionsSinceCursor(db, cursor, accountId) {
return db.Transaction.streamAll({where: { id: {$gt: cursor || 0}, accountId }});
}
module.exports = {
DELTA_CONNECTION_TIMEOUT_MS: DELTA_CONNECTION_TIMEOUT_MS,
buildAPIStream(request, {databasePromise, cursor, accountId, deltasSource}) {
return databasePromise.then((db) => {
const source = Rx.Observable.merge(
transactionsSinceCursor(db, cursor, accountId).flatMap((ts) =>
stringifyTransactions(db, accountId, ts, "initial")),
deltasSource.flatMap((t) =>
stringifyTransactions(db, accountId, [t], "new")),
Rx.Observable.interval(1000).map(() => "\n")
).timeout(OBSERVABLE_TIMEOUT_MS);
const outputStream = stream.Readable();
outputStream._read = () => { return };
const disposable = source.subscribe((str) => outputStream.push(str))
// See the following for why we need to set up the listeners on the raw
// stream.
// http://stackoverflow.com/questions/26221000/detecting-when-a-long-request-has-ended-in-nodejs-express
// https://github.com/hapijs/discuss/issues/322#issuecomment-235999544
//
// Hapi's disconnect event only fires on error or unexpected aborts: https://hapijs.com/api#response-events
request.raw.req.on('error', (error) => {
request.logger.error({err: error}, 'Delta connection stream errored')
disposable.dispose()
})
request.raw.req.on('close', () => {
request.logger.info('Delta connection stream was closed')
disposable.dispose()
})
request.raw.req.on('end', () => {
request.logger.info('Delta connection stream ended')
disposable.dispose()
})
request.on("disconnect", () => {
request.logger.info('Delta connection request was disconnected')
disposable.dispose()
});
return outputStream;
});
},
buildDeltaObservable({db, cursor, accountId, deltasSource}) {
return Rx.Observable.merge(
transactionsSinceCursor(db, cursor, accountId).flatMap((ts) =>
inflateTransactions(db, accountId, ts, "initial")),
deltasSource.flatMap((t) =>
inflateTransactions(db, accountId, [t], "new"))
)
},
buildCursor({databasePromise}) {
return databasePromise.then(({Transaction}) => {
return Transaction.findOne({order: [['id', 'DESC']]}).then((t) => {
return t ? t.id : 0;
});
});
},
}

View file

@ -1,15 +0,0 @@
export function isClientEnv() {
return typeof window !== 'undefined' && typeof window.NylasEnv !== 'undefined'
}
export function isCloudEnv() {
return !isClientEnv()
}
export function inDevMode() {
if (isClientEnv()) {
return window.NylasEnv.inDevMode();
}
return process.env.NODE_ENV !== 'production';
}

View file

@ -1,27 +0,0 @@
export class NylasError extends Error {
toJSON() {
let json = {}
if (super.toJSON) {
// Chromium `Error`s have a `toJSON`, but Node `Error`s do NOT!
json = super.toJSON()
}
Object.getOwnPropertyNames(this).forEach((key) => {
json[key] = this[key];
});
return json
}
}
export class APIError extends NylasError {
constructor(message, statusCode, data) {
super(message);
this.statusCode = statusCode;
this.data = data;
}
}
/**
* An abstract base class that can be used to indicate Errors that may fix
* themselves when retried
*/
export class RetryableError extends NylasError { }

View file

@ -1,20 +0,0 @@
const _ = require('underscore');
module.exports = (db) => {
for (const modelName of Object.keys(db)) {
const model = db[modelName];
const allIgnoredFields = (changedFields) => {
return _.isEqual(changedFields, ['syncState']);
}
model.beforeCreate('increment-version-c', (instance) => {
instance.version = 1;
});
model.beforeUpdate('increment-version-u', (instance) => {
if (!allIgnoredFields(Object.keys(instance._changed))) {
instance.version = instance.version ? instance.version + 1 : 1;
}
});
}
}

View file

@ -1,64 +0,0 @@
const _ = require('underscore')
module.exports = (db, sequelize, {only, onCreatedTransaction} = {}) => {
if (!db.Transaction) {
throw new Error("Cannot enable transaction logging, there is no Transaction model class in this database.")
}
const isTransaction = ($modelOptions) => {
return $modelOptions.name.singular === "transaction"
}
const allIgnoredFields = (changedFields) => {
return _.isEqual(changedFields, ['updatedAt', 'version'])
}
const transactionLogger = (event) => {
return ({dataValues, _changed, $modelOptions}) => {
let name = $modelOptions.name.singular;
if (name === 'metadatum') {
name = 'metadata';
}
if (name === 'reference') {
return;
}
if (name === 'message' && dataValues.isDraft) {
// TODO: when draft syncing support added, remove this and force
// transactions for all drafts in db to sync to app
return;
}
if ((only && !only.includes(name)) || isTransaction($modelOptions)) {
return;
}
const changedFields = Object.keys(_changed)
if (event !== 'delete' && (changedFields.length === 0 || allIgnoredFields(changedFields))) {
return;
}
const accountId = db.accountId ? db.accountId : dataValues.accountId;
if (!accountId) {
throw new Error("Assertion failure: Cannot create a transaction - could not resolve accountId.")
}
const transactionData = Object.assign({event}, {
object: name,
objectId: dataValues.id,
accountId: accountId,
changedFields: changedFields,
});
db.Transaction.create(transactionData).then(onCreatedTransaction)
}
}
sequelize.addHook("afterCreate", transactionLogger("create"))
sequelize.addHook("afterUpdate", transactionLogger("modify"))
// NOTE: Hooking UPSERT requires Sequelize 4.x. We're
// on version 3 right now, but leaving this here for when we upgrade.
sequelize.addHook("afterUpsert", transactionLogger("modify"))
sequelize.addHook("afterDestroy", transactionLogger("delete"))
}

View file

@ -1,209 +0,0 @@
const _ = require('underscore');
const {RetryableError} = require('./errors')
const {IMAPConnectionNotReadyError} = require('./imap-errors');
/*
IMAPBox uses Proxy to wrap the "box" exposed by node-imap. It provides higher-level
primitives, but you can still call through to properties / methods of the node-imap
box, ala `imapbox.uidvalidity`
*/
class IMAPBox {
constructor(imapConn, box) {
this._conn = imapConn
this._box = box
return new Proxy(this, {
get(obj, prop) {
const val = (prop in obj) ? obj[prop] : obj._box[prop];
if (_.isFunction(val)) {
const myBox = obj._box.name;
const openBox = obj._conn.getOpenBoxName()
if (myBox !== openBox) {
return () => {
throw new RetryableError(`IMAPBox::${prop} - Mailbox is no longer selected on the IMAPConnection (${myBox} != ${openBox}).`);
}
}
}
return val;
},
})
}
_withPreparedConnection(cb) {
return this._conn._withPreparedConnection(cb)
}
/**
* @param {array|string} range - can be a single message identifier,
* a message identifier range (e.g. '2504:2507' or '*' or '2504:*'),
* an array of message identifiers, or an array of message identifier ranges.
* @param {Object} options
* @param {function} forEachMessageCallback - function to be called with each
* message as it comes in
* @return {Promise} that will feed each message as it becomes ready
*/
async fetchEach(range, options, forEachMessageCallback) {
if (!options) {
throw new Error("IMAPBox.fetch now requires an options object.")
}
if (range.length === 0) {
return Promise.resolve()
}
return this._withPreparedConnection((imap) => {
return new Promise((resolve, reject) => {
const f = imap.fetch(range, options);
f.on('message', (imapMessage) => {
const parts = {};
let headers = null;
let attributes = null;
imapMessage.on('attributes', (attrs) => {
attributes = attrs;
});
imapMessage.on('body', (stream, info) => {
const chunks = [];
stream.on('data', (chunk) => {
chunks.push(chunk);
});
stream.once('end', () => {
const full = Buffer.concat(chunks);
if (info.which === 'HEADER') {
headers = full;
} else {
parts[info.which] = full;
}
});
});
imapMessage.once('end', () => {
// attributes is an object containing ascii strings, but parts and
// headers are undecoded binary Buffers (since the data for mime
// parts cannot be decoded to strings without looking up charset data
// in metadata, and this function's job is only to fetch the raw data)
forEachMessageCallback({attributes, headers, parts});
});
})
f.once('error', reject);
f.once('end', resolve);
})
});
}
/**
* @return {Promise} that resolves to requested message
*/
async fetchMessage(uid) {
if (!uid) {
throw new Error("IMAPConnection.fetchMessage requires a message uid.")
}
let message;
await this.fetchEach([uid], {bodies: ['HEADER', 'TEXT']}, (msg) => { message = msg; })
return message
}
async fetchMessageStream(uid, {fetchOptions, onFetchComplete} = {}) {
if (!uid) {
throw new Error("IMAPConnection.fetchStream requires a message uid.")
}
if (!fetchOptions) {
throw new Error("IMAPConnection.fetchStream requires an options object.")
}
return this._withPreparedConnection((imap) => {
return new Promise((resolve, reject) => {
const f = imap.fetch(uid, fetchOptions);
f.on('message', (imapMessage) => {
imapMessage.on('body', (stream) => {
resolve(stream)
})
})
f.once('error', reject)
f.once('end', onFetchComplete || (() => {}));
})
})
}
/**
* @param {array|string} range - can be a single message identifier,
* a message identifier range (e.g. '2504:2507' or '*' or '2504:*'),
* an array of message identifiers, or an array of message identifier ranges.
* @return {Promise} that resolves to a map of uid -> attributes for every
* message in the range
*/
async fetchUIDAttributes(range, fetchOptions = {}) {
return this._withPreparedConnection((imap) => {
return new Promise((resolve, reject) => {
const attributesByUID = {};
const f = imap.fetch(range, fetchOptions);
f.on('message', (msg) => {
msg.on('attributes', (attrs) => {
attributesByUID[attrs.uid] = attrs;
})
});
f.once('error', reject);
f.once('end', () => resolve(attributesByUID));
})
});
}
addFlags(range, flags) {
if (!this._conn._imap) {
throw new IMAPConnectionNotReadyError(`IMAPBox::addFlags`)
}
return this._withPreparedConnection((imap) => imap.addFlagsAsync(range, flags))
}
delFlags(range, flags) {
if (!this._conn._imap) {
throw new IMAPConnectionNotReadyError(`IMAPBox::delFlags`)
}
return this._withPreparedConnection((imap) => imap.delFlagsAsync(range, flags))
}
moveFromBox(range, folderName) {
if (!this._conn._imap) {
throw new IMAPConnectionNotReadyError(`IMAPBox::moveFromBox`)
}
return this._withPreparedConnection((imap) => imap.moveAsync(range, folderName))
}
setLabels(range, labels) {
if (!this._conn._imap) {
throw new IMAPConnectionNotReadyError(`IMAPBox::moveFromBox`)
}
return this._withPreparedConnection((imap) => imap.setLabelsAsync(range, labels))
}
removeLabels(range, labels) {
if (!this._conn._imap) {
throw new IMAPConnectionNotReadyError(`IMAPBox::moveFromBox`)
}
return this._withPreparedConnection((imap) => imap.delLabelsAsync(range, labels))
}
append(rawMime, options) {
if (!this._conn._imap) {
throw new IMAPConnectionNotReadyError(`IMAPBox::append`)
}
return this._withPreparedConnection((imap) => imap.appendAsync(rawMime, options))
}
search(criteria) {
if (!this._conn._imap) {
throw new IMAPConnectionNotReadyError(`IMAPBox::search`)
}
return this._withPreparedConnection((imap) => imap.searchAsync(criteria))
}
closeBox({expunge = true} = {}) {
if (!this._conn._imap) {
throw new IMAPConnectionNotReadyError(`IMAPBox::closeBox`)
}
return this._withPreparedConnection((imap) => imap.closeBoxAsync(expunge))
}
}
module.exports = IMAPBox;

View file

@ -1,120 +0,0 @@
const IMAPConnection = require('./imap-connection').default;
const {inDevMode} = require('./env-helpers')
const MAX_DEV_MODE_CONNECTIONS = 3
const MAX_GMAIL_CONNECTIONS = 7;
const MAX_O365_CONNECTIONS = 5;
const MAX_ICLOUD_CONNECTIONS = 5;
const MAX_IMAP_CONNECTIONS = 5;
class AccountConnectionPool {
constructor(maxConnections) {
this._availableConns = new Array(maxConnections).fill(null);
this._queue = [];
}
async _genConnection(account, socketTimeout, logger) {
const settings = account.connectionSettings;
const credentials = account.decryptedCredentials();
if (!settings || !settings.imap_host) {
throw new Error("_genConnection: There are no IMAP connection settings for this account.");
}
if (!credentials) {
throw new Error("_genConnection: There are no IMAP connection credentials for this account.");
}
const conn = new IMAPConnection({
db: null,
settings: Object.assign({}, settings, credentials, {socketTimeout}),
logger,
account,
});
return conn.connect();
}
async withConnections({account, desiredCount, logger, socketTimeout, onConnected}) {
// If we wake up from the first await but don't have enough connections in
// the pool then we need to prepend ourselves to the queue until there are
// enough. This guarantees that the queue is fair.
let prependToQueue = false;
while (this._availableConns.length < desiredCount) {
await new Promise((resolve) => {
if (prependToQueue) {
this._queue.unshift(resolve);
} else {
this._queue.push(resolve);
}
});
prependToQueue = true;
}
let conns = [];
let keepOpen = false;
let calledOnDone = false;
const onDone = () => {
if (calledOnDone) { return }
calledOnDone = true
keepOpen = false;
conns.filter(Boolean).forEach(conn => conn.end());
conns.filter(Boolean).forEach(conn => conn.removeAllListeners());
conns.fill(null);
this._availableConns = conns.concat(this._availableConns);
if (this._queue.length > 0) {
const resolveWaitForConnection = this._queue.shift();
resolveWaitForConnection();
}
};
try {
for (let i = 0; i < desiredCount; ++i) {
conns.push(this._availableConns.shift());
}
conns = await Promise.all(
conns.map(() => this._genConnection(account, socketTimeout, logger))
);
keepOpen = await onConnected(conns, onDone);
if (!keepOpen) {
onDone();
}
} catch (err) {
onDone()
throw err;
}
}
}
class IMAPConnectionPool {
constructor() {
this._poolMap = {};
}
_maxConnectionsForAccount(account) {
if (inDevMode()) {
return MAX_DEV_MODE_CONNECTIONS;
}
switch (account.provider) {
case 'gmail': return MAX_GMAIL_CONNECTIONS;
case 'office365': return MAX_O365_CONNECTIONS;
case 'icloud': return MAX_ICLOUD_CONNECTIONS;
case 'imap': return MAX_IMAP_CONNECTIONS;
default: return MAX_DEV_MODE_CONNECTIONS;
}
}
async withConnectionsForAccount(account, {desiredCount, logger, socketTimeout, onConnected}) {
if (!this._poolMap[account.id]) {
this._poolMap[account.id] = new AccountConnectionPool(this._maxConnectionsForAccount(account));
}
const pool = this._poolMap[account.id];
await pool.withConnections({account, desiredCount, logger, socketTimeout, onConnected});
}
}
module.exports = new IMAPConnectionPool();

View file

@ -1,414 +0,0 @@
import Imap from 'imap';
import _ from 'underscore';
import xoauth2 from 'xoauth2';
import EventEmitter from 'events';
import {INSECURE_TLS_OPTIONS, SECURE_TLS_OPTIONS} from './tls-utils';
import PromiseUtils from './promise-utils';
import IMAPBox from './imap-box';
import {RetryableError} from './errors'
import {
convertImapError,
IMAPConnectionTimeoutError,
IMAPConnectionNotReadyError,
IMAPConnectionEndedError,
} from './imap-errors';
const Capabilities = {
Gmail: 'X-GM-EXT-1',
Quota: 'QUOTA',
UIDPlus: 'UIDPLUS',
Condstore: 'CONDSTORE',
Search: 'ESEARCH',
Sort: 'SORT',
}
const ONE_HOUR_SECS = 60 * 60;
const AUTH_TIMEOUT_MS = 30 * 1000;
const DEFAULT_SOCKET_TIMEOUT_MS = 30 * 1000;
export default class IMAPConnection extends EventEmitter {
static DefaultSocketTimeout = DEFAULT_SOCKET_TIMEOUT_MS;
static connect(...args) {
return new IMAPConnection(...args).connect()
}
static asyncResolveIMAPSettings(baseSettings) {
let autotls;
// BACKCOMPAT ONLY - remove the if conditional on this eventually
if (baseSettings.imap_security) {
switch (baseSettings.imap_security) {
case 'STARTTLS':
autotls = 'required';
break;
case 'SSL / TLS':
autotls = 'never';
break;
default:
autotls = 'always';
break;
}
} else {
// old code used the default value
autotls = 'never';
}
const settings = {
host: baseSettings.imap_host,
port: baseSettings.imap_port,
user: baseSettings.imap_username,
password: baseSettings.imap_password,
// TODO: ssl_required is a deprecated setting, remove eventually
tls: baseSettings.imap_security === 'SSL / TLS' || baseSettings.ssl_required,
autotls: autotls,
socketTimeout: baseSettings.socketTimeout || DEFAULT_SOCKET_TIMEOUT_MS,
authTimeout: baseSettings.authTimeout || AUTH_TIMEOUT_MS,
}
// TODO: second part of || is for backcompat only, remove eventually (old
// settings were insecure by default)
if (baseSettings.imap_allow_insecure_ssl || baseSettings.imap_allow_insecure_ssl === undefined) {
settings.tlsOptions = INSECURE_TLS_OPTIONS;
} else {
settings.tlsOptions = SECURE_TLS_OPTIONS;
}
if (process.env.NYLAS_DEBUG) {
settings.debug = console.log;
}
// This account uses XOAuth2, and we have the client_id + refresh token
if (baseSettings.refresh_token) {
const xoauthFields = ['client_id', 'client_secret', 'imap_username', 'refresh_token'];
if (Object.keys(_.pick(baseSettings, xoauthFields)).length !== 4) {
return Promise.reject(new Error(`IMAPConnection: Expected ${xoauthFields.join(',')} when given refresh_token`))
}
return new Promise((resolve, reject) => {
xoauth2.createXOAuth2Generator({
clientId: baseSettings.client_id,
clientSecret: baseSettings.client_secret,
user: baseSettings.imap_username,
refreshToken: baseSettings.refresh_token,
})
.getToken((err, token) => {
if (err) { return reject(err) }
delete settings.password;
settings.xoauth2 = token;
settings.expiry_date = Math.floor(Date.now() / 1000) + ONE_HOUR_SECS;
return resolve(settings);
});
});
}
// This account uses XOAuth2, and we have a token given to us by the
// backend, which has the client secret.
if (baseSettings.xoauth2) {
delete settings.password;
settings.xoauth2 = baseSettings.xoauth2;
settings.expiry_date = baseSettings.expiry_date;
}
return Promise.resolve(settings);
}
constructor({db, account, settings, logger} = {}) {
super();
if (!(settings instanceof Object)) {
throw new Error("IMAPConnection: Must be instantiated with `settings`")
}
if (!logger) {
throw new Error("IMAPConnection: Must be instantiated with `logger`")
}
this._logger = logger;
this._db = db;
this._account = account;
this._queue = [];
this._currentOperation = null;
this._baseSettings = settings;
this._resolvedSettings = null;
this._imap = null;
this._connectPromise = null;
this._isOpeningBox = false;
this._lastOpenDuration = null;
}
async connect() {
if (!this._connectPromise) {
this._connectPromise = new Promise(async (resolve, reject) => {
try {
this._resolvedSettings = await IMAPConnection.asyncResolveIMAPSettings(this._baseSettings)
await this._buildUnderlyingConnection()
resolve(this)
} catch (err) {
reject(err)
}
})
}
return this._connectPromise;
}
end() {
if (this._imap) {
this._imap.end();
this._imap = null;
}
this._queue = [];
this._connectPromise = null;
}
async _buildUnderlyingConnection() {
this._imap = PromiseUtils.promisifyAll(new Imap(this._resolvedSettings));
return this._withPreparedConnection(() => {
return new Promise((resolve) => {
// `mail` event is emitted when new mail arrives in the currently open mailbox.
let lastMailEventBox = null;
this._imap.on('mail', () => {
// Fix https://github.com/mscdex/node-imap/issues/585
if (this._isOpeningBox) { return }
if (!this._imap) { return }
if (lastMailEventBox === null || lastMailEventBox === this._imap._box.name) {
// Fix https://github.com/mscdex/node-imap/issues/445
this.emit('mail');
}
lastMailEventBox = this._imap._box.name
});
// Emitted if the UID validity value for the currently open mailbox
// changes during the current session.
this._imap.on('uidvalidity', () => this.emit('uidvalidity'))
// Emitted when message metadata (e.g. flags) changes externally.
this._imap.on('update', () => this.emit('update'))
this._imap.on('alert', (msg) => {
this._logger.info({imap_server_msg: msg}, `IMAP server message`)
});
this._imap.once('ready', () => {
resolve()
});
this._imap.once('error', () => {
this.end();
});
this._imap.once('end', () => {
this._logger.warn('Underlying IMAP connection has ended')
this.end();
});
this._imap.connect();
});
})
}
/**
* @return {Promise} that resolves/rejects when the Promise returned by the
* passed-in callback resolves or rejects, and additionally will reject when
* the IMAP connection closes, ends or times out.
* This is important for 2 main reasons:
* - node-imap can sometimes hang the current operation after the connection
* has emmitted an `end` event. For this reason, we need to manually reject
* and end() on `end` event.
* - node-imap does not seem to respect the socketTimeout setting, so it won't
* actually time out an operation after the specified timeout has passed.
* For this reason, we have to manually reject when the timeout period has
* passed.
* @param {function} callback - This callback will receive as a single arg
* a node-imap connection instance, and should return a Promise.
*/
async _withPreparedConnection(callback) {
if (!this._imap) {
throw new IMAPConnectionNotReadyError(`IMAPConnection::_withPreparedConnection`)
}
if (this._isOpeningBox) {
throw new RetryableError('IMAPConnection: Cannot operate on connection while opening a box.')
}
let onEnded = null;
let onErrored = null;
try {
return await new Promise(async (resolve, reject) => {
const socketTimeout = setTimeout(() => {
reject(new IMAPConnectionTimeoutError('Socket timed out'))
}, this._resolvedSettings.socketTimeout)
const wrappedResolve = (result) => {
clearTimeout(socketTimeout)
resolve(result)
}
const wrappedReject = (error) => {
clearTimeout(socketTimeout)
const convertedError = convertImapError(error)
reject(convertedError)
this.end()
}
onEnded = () => {
wrappedReject(new IMAPConnectionEndedError())
};
onErrored = (error) => {
wrappedReject(error);
};
this._imap.on('error', onErrored);
this._imap.on('end', onEnded);
try {
const result = await callback(this._imap)
wrappedResolve(result)
} catch (error) {
wrappedReject(error)
}
})
} finally {
if (this._imap) {
this._imap.removeListener('error', onErrored);
this._imap.removeListener('end', onEnded);
}
}
}
getResolvedSettings() {
return this._resolvedSettings
}
getOpenBoxName() {
return (this._imap && this._imap._box) ? this._imap._box.name : null;
}
serverSupports(capability) {
if (!this._imap) {
throw new IMAPConnectionNotReadyError(`IMAPConnection::serverSupports`)
}
return this._imap.serverSupports(capability);
}
getLastOpenDuration() {
if (this._isOpeningBox) {
throw new RetryableError('IMAPConnection: Cannot operate on connection while opening a box.')
}
return this._lastOpenDuration;
}
/**
* @return {Promise} that resolves to instance of IMAPBox
*/
async openBox(folderName, {readOnly = false, refetchBoxInfo = false} = {}) {
if (!folderName) {
throw new Error('IMAPConnection::openBox - You must provide a folder name')
}
if (!this._imap) {
throw new IMAPConnectionNotReadyError(`IMAPConnection::openBox`)
}
if (!refetchBoxInfo && folderName === this.getOpenBoxName()) {
return Promise.resolve(new IMAPBox(this, this._imap._box));
}
return this._withPreparedConnection(async (imap) => {
try {
this._isOpeningBox = true
this._lastOpenDuration = null;
const before = Date.now();
const box = await imap.openBoxAsync(folderName, readOnly)
this._lastOpenDuration = Date.now() - before;
this._isOpeningBox = false
return new IMAPBox(this, box)
} finally {
this._isOpeningBox = false
}
})
}
async getLatestBoxStatus(folderName) {
if (!folderName) {
throw new Error('IMAPConnection::getLatestBoxStatus - You must provide a folder name')
}
if (folderName === this.getOpenBoxName()) {
// If the box is already open, we need to re-issue a SELECT in order to
// get the latest stats from the box (e.g. latest uidnext, etc)
return this.openBox(folderName, {refetchBoxInfo: true})
}
return this._withPreparedConnection((imap) => imap.statusAsync(folderName))
}
async getBoxes() {
if (!this._imap) {
throw new IMAPConnectionNotReadyError(`IMAPConnection::getBoxes`)
}
return this._withPreparedConnection((imap) => imap.getBoxesAsync())
}
async addBox(folderName) {
if (!this._imap) {
throw new IMAPConnectionNotReadyError(`IMAPConnection::addBox`)
}
return this._withPreparedConnection((imap) => imap.addBoxAsync(folderName))
}
async renameBox(oldFolderName, newFolderName) {
if (!this._imap) {
throw new IMAPConnectionNotReadyError(`IMAPConnection::renameBox`)
}
return this._withPreparedConnection((imap) => imap.renameBoxAsync(oldFolderName, newFolderName))
}
async delBox(folderName) {
if (!this._imap) {
throw new IMAPConnectionNotReadyError(`IMAPConnection::delBox`)
}
return this._withPreparedConnection((imap) => imap.delBoxAsync(folderName))
}
async runOperation(operation, ctx) {
if (!this._imap) {
throw new IMAPConnectionNotReadyError(`IMAPConnection::runOperation`)
}
return new Promise((resolve, reject) => {
this._queue.push({operation, ctx, resolve, reject});
if (this._imap.state === 'authenticated' && !this._currentOperation) {
this._processNextOperation();
}
});
}
_processNextOperation() {
if (this._currentOperation) {
return;
}
this._currentOperation = this._queue.shift();
if (!this._currentOperation) {
this.emit('queue-empty');
return;
}
const {operation, ctx, resolve, reject} = this._currentOperation;
const resultPromise = operation.run(this._db, this, ctx);
if (resultPromise.constructor.name !== "Promise") {
reject(new Error(`Expected ${operation.constructor.name} to return promise.`))
}
resultPromise.then((maybeResult) => {
this._currentOperation = null;
// this._logger.info({
// operation_type: operation.constructor.name,
// operation_description: operation.description(),
// }, `Finished sync operation`)
resolve(maybeResult);
this._processNextOperation();
})
.catch((err) => {
this._currentOperation = null;
this._logger.error({
err: err,
operation_type: operation.constructor.name,
operation_description: operation.description(),
}, `IMAPConnection - operation errored`)
reject(err);
})
}
}
IMAPConnection.Capabilities = Capabilities;

View file

@ -1,150 +0,0 @@
import {NylasError, RetryableError} from './errors'
export class IMAPRetryableError extends RetryableError {
constructor(msg) {
super(msg)
this.userMessage = "We were unable to reach your IMAP provider. Please try again.";
this.statusCode = 408;
}
}
/**
* IMAPErrors that originate from NodeIMAP. See `convertImapError` for
* documentation on underlying causes
*/
export class IMAPSocketError extends IMAPRetryableError { }
export class IMAPConnectionTimeoutError extends IMAPRetryableError { }
export class IMAPAuthenticationTimeoutError extends IMAPRetryableError { }
export class IMAPTransientAuthenticationError extends IMAPRetryableError { }
export class IMAPProtocolError extends NylasError {
constructor(msg) {
super(msg)
this.userMessage = "IMAP protocol error. Please contact support@nylas.com."
this.statusCode = 401
}
}
export class IMAPAuthenticationError extends NylasError {
constructor(msg) {
super(msg)
this.userMessage = "Incorrect IMAP username or password.";
this.statusCode = 401;
}
}
export class IMAPConnectionNotReadyError extends IMAPRetryableError {
constructor(funcName) {
super(`${funcName} - You must call connect() first.`);
}
}
export class IMAPConnectionEndedError extends IMAPRetryableError {
constructor(msg = "The IMAP Connection was ended.") {
super(msg);
}
}
/**
* Certificate validation failures may correct themselves over long spans
* of time, but not over the short spans of time in which it'd make sense
* for us to retry.
*/
export class IMAPCertificateError extends NylasError {
constructor(msg) {
super(msg)
this.userMessage = "We couldn't make a secure connection to your SMTP server. Please contact support@nylas.com."
this.statusCode = 401
}
}
/**
* IMAPErrors may come from:
*
* 1. Underlying IMAP provider (Fastmail, Yahoo, etc)
* 2. Node IMAP
* 3. K2 code
*
* NodeIMAP puts a `source` attribute on `Error` objects to indicate where
* a particular error came from. See https://github.com/mscdex/node-imap/blob/master/lib/Connection.js
*
* These may have the following values:
*
* - "socket-timeout": Created by NodeIMAP when `config.socketTimeout`
* expires on the base Node `net.Socket` and socket.on('timeout') fires
* Message: 'Socket timed out while talking to server'
*
* - "timeout": Created by NodeIMAP when `config.connTimeout` has been
* reached when trying to connect the socket.
* Message: 'Timed out while connecting to server'
*
* - "socket": Created by Node's `net.Socket` on error. See:
* https://nodejs.org/api/net.html#net_event_error_1
* Message: Various from `net.Socket`
*
* - "protocol": Created by NodeIMAP when `bad` or `no` types come back
* from the IMAP protocol.
* Message: Various from underlying IMAP protocol
*
* - "authentication": Created by underlying IMAP connection or NodeIMAP
* in a few scenarios.
* Message: Various from underlying IMAP connection
* OR: No supported authentication method(s) available. Unable to login.
* OR: Logging in is disabled on this server
*
* - "timeout-auth": Created by NodeIMAP when `config.authTimeout` has
* been reached when trying to authenticate
* Message: 'Timed out while authenticating with server'
*
*/
export function convertImapError(imapError) {
let error = imapError;
if (/try again/i.test(imapError.message)) {
error = new RetryableError(imapError)
error.source = imapError.source
return error
}
if (/system error/i.test(imapError.message)) {
// System Errors encountered in the wild so far have been retryable.
error = new RetryableError(imapError)
error.source = imapError.source
return error
}
if (/user is authenticated but not connected/i.test(imapError.message)) {
// We need to treat this type of error as retryable
// See https://github.com/mscdex/node-imap/issues/523 for more details
error = new RetryableError(imapError)
error.source = imapError.source
return error
}
if (/server unavailable/i.test(imapError.message)) {
// Server Unavailable encountered in the wild so far have been retryable.
error = new RetryableError(imapError)
error.source = imapError.source
return error
}
switch (imapError.source) {
case "socket-timeout":
error = new IMAPConnectionTimeoutError(imapError); break;
case "timeout":
error = new IMAPConnectionTimeoutError(imapError); break;
case "socket":
if (imapError.code === "UNABLE_TO_VERIFY_LEAF_SIGNATURE") {
error = new IMAPCertificateError(imapError);
} else {
error = new IMAPSocketError(imapError);
}
break;
case "protocol":
error = new IMAPProtocolError(imapError); break;
case "authentication":
error = new IMAPAuthenticationError(imapError); break;
case "timeout-auth":
error = new IMAPAuthenticationTimeoutError(imapError); break;
default:
break;
}
error.source = imapError.source
return error
}

View file

@ -1,33 +0,0 @@
const fs = require('fs');
const path = require('path');
function loadModels(Sequelize, sequelize, {loadShared = true, modelDirs = [], schema} = {}) {
if (loadShared) {
modelDirs.unshift(path.join(__dirname, 'models'))
}
const db = {};
for (const modelsDir of modelDirs) {
for (const filename of fs.readdirSync(modelsDir)) {
if (filename.endsWith('.js') || filename.endsWith('.es6')) {
let model = sequelize.import(path.join(modelsDir, filename));
if (schema) {
model = model.schema(schema);
}
db[model.name[0].toUpperCase() + model.name.substr(1)] = model;
}
}
}
Object.keys(db).forEach((modelName) => {
if ("associate" in db[modelName]) {
db[modelName].associate(db);
}
});
return db;
}
module.exports = loadModels

View file

@ -1,511 +0,0 @@
/* eslint no-useless-escape: 0 */
import mailcomposer from 'mailcomposer'
const mimelib = require('mimelib');
const encoding = require('encoding');
const he = require('he');
const os = require('os');
const fs = require('fs');
const path = require('path');
const mkdirp = require('mkdirp');
const btoa = require('btoa')
const {APIError} = require('./errors');
const {deepClone} = require('./model-utils').default;
// Aiming for the former in length, but the latter is the hard db cutoff
const SNIPPET_SIZE = 100;
const SNIPPET_MAX_SIZE = 255;
// Copied from regexp-utils.coffee.
// FIXME @karim: share code, somehow.
// Test cases: https://regex101.com/r/cK0zD8/4
// Catches link tags containing which are:
// - Non empty
// - Not a mailto: link
// Returns the following capturing groups:
// 1. start of the opening a tag to href="
// 2. The contents of the href without quotes
// 3. the rest of the opening a tag
// 4. the contents of the a tag
// 5. the closing tag
function urlLinkTagRegex() {
return new RegExp(/(<a.*?href\s*?=\s*?['"])((?!mailto).+?)(['"].*?>)([\s\S]*?)(<\/a>)/gim);
}
// Format of input: ['a@example.com, B <b@example.com>', 'c@example.com'],
// where each element of the array is the unparsed contents of a single
// element of the same header field. (It's totally valid to have multiple
// From/To/etc. headers on the same email.)
function parseContacts(input) {
if (!input || input.length === 0 || !input[0]) {
return [];
}
let contacts = [];
for (const headerLine of input) {
const values = mimelib.parseAddresses(headerLine);
if (!values || values.length === 0) {
continue;
}
contacts = contacts.concat(values.map(v => {
if (!v || v.length === 0) {
return null
}
const {name, address: email} = v;
return {name, email};
})
.filter(c => c != null))
}
return contacts;
}
function parseSnippet(body) {
const doc = new DOMParser().parseFromString(body, 'text/html')
const skipTags = new Set(['TITLE', 'SCRIPT', 'STYLE', 'IMG']);
const noSpaceTags = new Set(['B', 'I', 'STRONG', 'EM', 'SPAN']);
const treeWalker = document.createTreeWalker(doc, NodeFilter.SHOW_ELEMENT | NodeFilter.SHOW_TEXT, (node) => {
if (skipTags.has(node.tagName)) {
// skip this node and all its children
return NodeFilter.FILTER_REJECT;
}
if (node.nodeType === Node.TEXT_NODE) {
const nodeValue = node.nodeValue ? node.nodeValue.trim() : null;
if (nodeValue) {
return NodeFilter.FILTER_ACCEPT;
}
return NodeFilter.FILTER_SKIP;
}
return NodeFilter.FILTER_ACCEPT;
});
let extractedText = "";
let lastNodeTag = "";
while (treeWalker.nextNode()) {
if (treeWalker.currentNode.nodeType === Node.ELEMENT_NODE) {
lastNodeTag = treeWalker.currentNode.nodeName;
} else {
if (extractedText && !noSpaceTags.has(lastNodeTag)) {
extractedText += " ";
}
extractedText += treeWalker.currentNode.nodeValue;
if (extractedText.length > SNIPPET_MAX_SIZE) {
break;
}
}
}
const snippetText = extractedText.trim();
// clean up and trim snippet
let trimmed = snippetText.replace(/[\n\r]/g, ' ').replace(/\s\s+/g, ' ').substr(0, SNIPPET_MAX_SIZE);
if (trimmed) {
// TODO: strip quoted text from snippets also
// trim down to approx. SNIPPET_SIZE w/out cutting off words right in the
// middle (if possible)
const wordBreak = trimmed.indexOf(' ', SNIPPET_SIZE);
if (wordBreak !== -1) {
trimmed = trimmed.substr(0, wordBreak);
}
}
return trimmed;
}
// In goes arrays of text, out comes arrays of RFC2822 Message-Ids. Luckily,
// these days most all text in In-Reply-To, Message-Id, and References headers
// actually conforms to the spec.
function parseReferences(input) {
if (!input || !input.length || !input[0]) {
return [];
}
const references = new Set();
for (const headerLine of input) {
for (const ref of headerLine.split(/[\s,]+/)) {
if (/^<.*>$/.test(ref)) {
references.add(ref);
}
}
}
return Array.from(references);
}
function htmlifyPlaintext(text) {
const escapedText = he.escape(text);
return `<pre class="nylas-plaintext">${escapedText}</pre>`;
}
function replaceMessageIdInBodyTrackingLinks(messageId, originalBody) {
const regex = new RegExp(`(https://.+?)MESSAGE_ID`, 'g')
return originalBody.replace(regex, `$1${messageId}`)
}
function stripTrackingLinksFromBody(originalBody) {
let body = originalBody.replace(/<img class="n1-open"[^<]+src="([a-zA-Z0-9-_:/.]*)">/g, () => {
return "";
});
body = body.replace(urlLinkTagRegex(), (match, prefix, url, suffix, content, closingTag) => {
const param = url.split("?")[1];
if (param) {
const link = decodeURIComponent(param.split("=")[1]);
return `${prefix}${link}${suffix}${content}${closingTag}`;
}
return match;
});
return body;
}
function buildTrackingBodyForRecipient({baseMessage, recipient, usesOpenTracking, usesLinkTracking} = {}) {
const {id: messageId, body} = baseMessage
const encodedEmail = btoa(recipient.email)
.replace(/\+/g, '-')
.replace(/\//g, '_');
let customBody = body
if (usesOpenTracking) {
customBody = customBody.replace(/<img class="n1-open"[^<]+src="([a-zA-Z0-9-_:/.]*)">/g, (match, url) => {
return `<img class="n1-open" width="0" height="0" style="border:0; width:0; height:0;" src="${url}?r=${encodedEmail}">`;
});
}
if (usesLinkTracking) {
customBody = customBody.replace(urlLinkTagRegex(), (match, prefix, url, suffix, content, closingTag) => {
return `${prefix}${url}&r=${encodedEmail}${suffix}${content}${closingTag}`;
});
}
return replaceMessageIdInBodyTrackingLinks(messageId, customBody);
}
function getReplyHeaders(messageReplyingTo) {
let inReplyTo;
let references;
if (messageReplyingTo.headerMessageId) {
inReplyTo = messageReplyingTo.headerMessageId;
if (messageReplyingTo.references) {
const refById = {};
for (const ref of messageReplyingTo.references) {
refById[ref.id] = ref;
}
references = [];
for (const referenceId of messageReplyingTo.referencesOrder) {
references.push(refById[referenceId].rfc2822MessageId);
}
if (!references.includes(messageReplyingTo.headerMessageId)) {
references.push(messageReplyingTo.headerMessageId);
}
} else {
references = [messageReplyingTo.headerMessageId];
}
}
return {inReplyTo, references}
}
function bodyFromParts(imapMessage, desiredParts) {
let body = '';
for (const {id, mimeType, transferEncoding, charset} of desiredParts) {
let decoded = '';
// see https://www.w3.org/Protocols/rfc1341/5_Content-Transfer-Encoding.html
if ((/quot(ed)?[-/]print(ed|able)?/gi).test(transferEncoding)) {
decoded = mimelib.decodeQuotedPrintable(imapMessage.parts[id], charset);
} else if ((/base64/gi).test(transferEncoding)) {
decoded = mimelib.decodeBase64(imapMessage.parts[id], charset);
} else {
// Treat this as having no encoding and decode based only on the charset
//
// According to https://tools.ietf.org/html/rfc2045#section-5.2,
// this should default to ascii; however, if we don't get a charset,
// it's possible clients (like nodemailer) encoded the data as utf-8
// anyway. Since ascii is a strict subset of utf-8, it's safer to
// try and decode as utf-8 if we don't have the charset.
//
// (This applies to decoding quoted-printable and base64 as well. The
// mimelib library, if charset is null, will default to utf-8)
//
decoded = encoding.convert(imapMessage.parts[id], 'utf-8', charset).toString('utf-8');
}
// desiredParts are in order of the MIME tree walk, e.g. 1.1, 1.2, 2...,
// and for multipart/alternative arrays, we have already pulled out the
// highest fidelity part (generally HTML).
//
// Therefore, the correct way to display multiple parts is to simply
// concatenate later ones with the body of the previous MIME parts.
//
// This may seem kind of weird, but some MUAs _do_ send out whack stuff
// like an HTML body followed by a plaintext footer.
if (mimeType === 'text/plain') {
body += htmlifyPlaintext(decoded);
} else {
body += decoded;
}
}
// sometimes decoding results in a NUL-terminated body string, which makes
// SQLite blow up with an 'unrecognized token' error
body = body.replace(/\0/g, '');
return body;
}
// Since we only fetch the MIME structure and specific desired MIME parts from
// IMAP, we unfortunately can't use an existing library like mailparser to parse
// the message, and have to do fun stuff like deal with character sets and
// content-transfer-encodings ourselves.
async function parseFromImap(imapMessage, desiredParts, {db, accountId, folder}) {
const {Message, Label} = db;
const {attributes} = imapMessage;
// this key name can change depending on which subset of headers we're downloading,
// so to prevent having to update this code every time we change the set,
// dynamically look up the key instead
const headerKey = Object.keys(imapMessage.parts).filter(k => k.startsWith('HEADER'))[0]
const headers = imapMessage.parts[headerKey].toString('ascii')
const parsedHeaders = mimelib.parseHeaders(headers);
for (const key of ['x-gm-thrid', 'x-gm-msgid', 'x-gm-labels']) {
parsedHeaders[key] = attributes[key];
}
const parsedMessage = {
to: parseContacts(parsedHeaders.to),
cc: parseContacts(parsedHeaders.cc),
bcc: parseContacts(parsedHeaders.bcc),
from: parseContacts(parsedHeaders.from),
replyTo: parseContacts(parsedHeaders['reply-to']),
accountId: accountId,
body: bodyFromParts(imapMessage, desiredParts),
snippet: null,
unread: !attributes.flags.includes('\\Seen'),
starred: attributes.flags.includes('\\Flagged'),
// We limit drafts to the drafts and all mail folders because some clients
// may send messages and improperly leave the draft flag set, and also
// because we want to exclude drafts moved to the trash from the drafts view
// see https://github.com/nylas/cloud-core/commit/1433921a166ddcba7c269158d65febb7928767d8
// & associated phabricator bug https://phab.nylas.com/T5696
isDraft: (
['drafts', 'all'].includes(folder.role) &&
(
attributes.flags.includes('\\Draft') ||
(parsedHeaders['x-gm-labels'] || []).includes('\\Draft')
)
),
// We prefer the date from the message headers because the date is one of
// the fields we use for generating unique message IDs, and the server
// INTERNALDATE, `attributes.date`, may differ across accounts for the same
// message. If the Date header is not included in the message, we fall
// back to the INTERNALDATE and it's possible we'll generate different IDs
// for the same message delivered to different accounts (which is better
// than having message ID collisions for different messages, which could
// happen if we did not include the date).
date: parsedHeaders.date ? parsedHeaders.date[0] : imapMessage.attributes.date,
folderImapUID: attributes.uid,
folderId: folder.id,
folder: null,
labels: [],
headerMessageId: parseReferences(parsedHeaders['message-id'])[0],
// References are not saved on the message model itself, but are later
// converted to associated Reference objects so we can index them. Since we
// don't do tree threading, we don't need to care about In-Reply-To
// separately, and can simply associate them all in the same way.
// Generally, References already contains the Message-IDs in In-Reply-To,
// but we concat and dedupe just in case.
references: parseReferences(
(parsedHeaders.references || []).concat(
(parsedHeaders['in-reply-to'] || []), (parsedHeaders['message-id'] || [])
)
),
gMsgId: parsedHeaders['x-gm-msgid'],
gThrId: parsedHeaders['x-gm-thrid'],
subject: parsedHeaders.subject ? parsedHeaders.subject[0] : '(no subject)',
}
/**
* mimelib will return a string date with the leading zero of single
* digit dates truncated. e.g. February 1 instead of February 01. When
* we set Message Date headers, we use javascript's `toUTCString` which
* zero pads digit dates. To make the hashes line up, we need to ensure
* that the date string used in the ID generation is also zero-padded.
*/
const messageForHashing = deepClone(parsedMessage)
messageForHashing.date = Message.dateString(parsedMessage.date);
// Inversely to `buildForSend`, we leave the date header as it is so that the
// format is consistent for the generative IDs, then convert it to a Date object
parsedMessage.id = Message.hash(messageForHashing)
parsedMessage.date = new Date(Date.parse(parsedMessage.date));
parsedMessage.snippet = parseSnippet(parsedMessage.body);
parsedMessage.folder = folder;
const xGmLabels = parsedHeaders['x-gm-labels']
if (xGmLabels) {
parsedMessage.folderImapXGMLabels = JSON.stringify(xGmLabels)
parsedMessage.labels = await Label.findXGMLabels(xGmLabels)
}
if (process.env.NYLAS_DEBUG) {
const outJSON = JSON.stringify({imapMessage, desiredParts, result: parsedMessage});
const outDir = path.join(os.tmpdir(), "k2-parse-output", folder.name)
const outFile = path.join(outDir, imapMessage.attributes.uid.toString());
mkdirp.sync(outDir);
fs.writeFileSync(outFile, outJSON);
}
return parsedMessage;
}
async function buildForSend(db, json) {
const {Thread, Message, Reference} = db
let replyToThread;
let replyToMessage;
if (json.thread_id != null) {
replyToThread = await Thread.find({
where: {id: json.thread_id},
include: [{
model: Message,
as: 'messages',
attributes: ['id'],
}],
});
}
if (json.reply_to_message_id != null) {
replyToMessage = await Message.findById(
json.reply_to_message_id,
{ include: [{model: Reference, as: 'references', attributes: ['id', 'rfc2822MessageId']}] }
)
}
if (replyToThread && replyToMessage) {
if (!replyToThread.messages.find((msg) => msg.id === replyToMessage.id)) {
throw new APIError(`Message ${replyToMessage.id} is not in thread ${replyToThread.id}`, 400)
}
}
let thread;
let replyHeaders = {};
let inReplyToLocalMessageId;
if (replyToMessage) {
inReplyToLocalMessageId = replyToMessage.id;
replyHeaders = getReplyHeaders(replyToMessage);
thread = await replyToMessage.getThread();
} else if (replyToThread) {
thread = replyToThread;
const previousMessages = thread.messages.filter(msg => !msg.isDraft);
if (previousMessages.length > 0) {
const lastMessage = previousMessages[previousMessages.length - 1]
inReplyToLocalMessageId = lastMessage.id;
replyHeaders = getReplyHeaders(lastMessage);
}
}
const {inReplyTo, references} = replyHeaders
const date = new Date()
const message = {
accountId: json.account_id,
threadId: thread ? thread.id : null,
headerMessageId: Message.buildHeaderMessageId(json.client_id),
from: json.from,
to: json.to,
cc: json.cc,
bcc: json.bcc,
replyTo: json.reply_to,
subject: json.subject,
body: json.body,
unread: false,
isDraft: json.draft,
isSent: false,
version: 0,
date: date,
inReplyToLocalMessageId: inReplyToLocalMessageId,
uploads: json.uploads,
}
// We have to clone the message and change the date for hashing because the
// date we get later when we parse from IMAP is a different format, per the
// nodemailer buildmail function that gives us the raw message and replaces
// the date header with this modified UTC string
// https://github.com/nodemailer/buildmail/blob/master/lib/buildmail.js#L470
const messageForHashing = deepClone(message)
messageForHashing.date = Message.dateString(date)
message.id = Message.hash(messageForHashing)
message.body = replaceMessageIdInBodyTrackingLinks(message.id, message.body)
const instance = Message.build(message)
// TODO we set these temporary properties which aren't stored in the database
// model because SendmailClient requires them to send the message with the
// correct headers.
// This should be cleaned up
instance.inReplyTo = inReplyTo;
instance.references = references;
return instance;
}
const formatParticipants = (participants) => {
// Something weird happens with the mime building when the participant name
// has an @ symbol in it (e.g. a name and email of hello@gmail.com turns into
// 'hello@ <gmail.com hello@gmail.com>'), so replace it with whitespace.
return participants.map(p => `${p.name.replace('@', ' ')} <${p.email}>`).join(',');
}
// Transforms the message into a json object with the properties formatted in
// the way mailer libraries (e.g. nodemailer, mailcomposer) expect.
function getMailerPayload(message) {
const msgData = {};
for (const field of ['from', 'to', 'cc', 'bcc']) {
if (message[field]) {
msgData[field] = formatParticipants(message[field])
}
}
msgData.date = message.date;
msgData.subject = message.subject;
msgData.html = message.body;
msgData.messageId = message.headerMessageId || message.message_id_header;
msgData.attachments = []
const uploads = message.uploads || []
for (const upload of uploads) {
msgData.attachments.push({
filename: upload.filename,
content: fs.createReadStream(upload.targetPath),
cid: upload.inline ? upload.id : null,
})
}
if (message.replyTo) {
msgData.replyTo = formatParticipants(message.replyTo);
}
msgData.inReplyTo = message.inReplyTo;
msgData.references = message.references;
// message.headers is usually unset, but in the case that we do add
// headers elsewhere, we don't want to override them here
msgData.headers = message.headers || {};
msgData.headers['User-Agent'] = `NylasMailer-K2`
return msgData;
}
async function buildMime(message, {includeBcc = false} = {}) {
const payload = getMailerPayload(message)
const builder = mailcomposer(payload)
const mimeNode = await (new Promise((resolve, reject) => {
builder.build((error, result) => (
error ? reject(error) : resolve(result)
))
}));
if (!includeBcc || !message.bcc || message.bcc.length === 0) {
return mimeNode.toString('ascii')
}
return `Bcc: ${formatParticipants(message.bcc)}\n${mimeNode.toString('ascii')}`
}
module.exports = {
buildForSend,
getReplyHeaders,
parseFromImap,
parseSnippet,
parseContacts,
stripTrackingLinksFromBody,
buildTrackingBodyForRecipient,
replaceMessageIdInBodyTrackingLinks,
getMailerPayload,
buildMime,
}

View file

@ -1,82 +0,0 @@
import os from 'os'
import {isClientEnv, isCloudEnv} from './env-helpers'
class MetricsReporter {
constructor() {
this._honey = null
if (isCloudEnv()) {
const LibHoney = require('libhoney') // eslint-disable-line
this._honey = new LibHoney({
writeKey: process.env.HONEY_WRITE_KEY,
dataset: process.env.HONEY_DATASET,
})
}
}
async collectCPUUsage() {
return new Promise((resolve) => {
const startUsage = process.cpuUsage();
const sampleDuration = 400;
setTimeout(() => {
const {user, system} = process.cpuUsage(startUsage);
const fractionToPrecent = 100.0;
resolve(Math.round((user + system) / (sampleDuration * 1000.0) * fractionToPrecent));
}, sampleDuration);
});
}
sendToHoneycomb(info) {
if (!this._honey) {
throw new Error('Metrics Reporter: Honeycomb is not available in this environment')
}
this._honey.sendNow(info);
}
async reportEvent(info) {
if (!info.nylasId) {
throw new Error("Metrics Reporter: You must include an nylasId");
}
const logger = global.Logger.child({accountEmail: info.emailAddress})
const {workingSetSize, privateBytes, sharedBytes} = process.getProcessMemoryInfo();
info.hostname = os.hostname();
info.cpus = os.cpus().length;
info.arch = os.arch();
info.platform = process.platform;
info.version = NylasEnv.getVersion();
info.processWorkingSetSize = workingSetSize;
info.processPrivateBytes = privateBytes;
info.processSharedBytes = sharedBytes;
try {
if (isClientEnv()) {
if (NylasEnv.inDevMode()) { return }
if (!info.accountId) {
throw new Error("Metrics Reporter: You must include an accountId");
}
const {N1CloudAPI, NylasAPIRequest} = require('nylas-exports') // eslint-disable-line
const req = new NylasAPIRequest({
api: N1CloudAPI,
options: {
path: `/ingest-metrics`,
method: 'POST',
body: info,
accountId: info.accountId,
},
});
await req.run()
} else {
this.sendToHoneycomb(info)
}
logger.log(info, "Metrics Reporter: Submitted.", info);
} catch (err) {
logger.warn("Metrics Reporter: Submission Failed.", {error: err, ...info});
}
}
}
export default new MetricsReporter();

View file

@ -1,34 +0,0 @@
/* eslint no-unused-vars: 0 */
module.exports = {
up: function up(queryInterface, Sequelize) {
return queryInterface.createTable('Users', {
id: {
allowNull: false,
autoIncrement: true,
primaryKey: true,
type: Sequelize.INTEGER,
},
first_name: {
type: Sequelize.STRING,
},
last_name: {
type: Sequelize.STRING,
},
bio: {
type: Sequelize.TEXT,
},
createdAt: {
allowNull: false,
type: Sequelize.DATE,
},
updatedAt: {
allowNull: false,
type: Sequelize.DATE,
},
});
},
down: function down(queryInterface, Sequelize) {
return queryInterface.dropTable('Users');
},
};

View file

@ -1,64 +0,0 @@
import _ from 'underscore'
function deepClone(object, customizer, stackSeen = [], stackRefs = []) {
let newObject;
if (!_.isObject(object)) { return object; }
if (_.isFunction(object)) { return object; }
if (_.isArray(object)) {
// http://perfectionkills.com/how-ecmascript-5-still-does-not-allow-to-subclass-an-array/
newObject = [];
} else if (object instanceof Date) {
// You can't clone dates by iterating through `getOwnPropertyNames`
// of the Date object. We need to special-case Dates.
newObject = new Date(object);
} else {
newObject = Object.create(Object.getPrototypeOf(object));
}
// Circular reference check
const seenIndex = stackSeen.indexOf(object);
if (seenIndex >= 0) { return stackRefs[seenIndex]; }
stackSeen.push(object); stackRefs.push(newObject);
// It's important to use getOwnPropertyNames instead of Object.keys to
// get the non-enumerable items as well.
for (const key of Array.from(Object.getOwnPropertyNames(object))) {
const newVal = deepClone(object[key], customizer, stackSeen, stackRefs);
if (_.isFunction(customizer)) {
newObject[key] = customizer(key, newVal);
} else {
newObject[key] = newVal;
}
}
return newObject;
}
function copyModel(Model, model, updates = {}) {
const fields = Object.keys(model.dataValues)
const data = {}
for (const field of fields) {
// We can't just copy over the values directly from `dataValues` because
// they are the raw values, and we would ignore custom getters.
// Rather, we access them from the model instance.
// For example our JSON database type, is simply a string and the custom
// getter parses it into json. We want to get the parsed json, not the
// string
data[field] = model[field]
}
return Model.build(Object.assign({}, data, updates))
}
function isValidId(value) {
if (value == null) { return false; }
if (isNaN(parseInt(value, 36))) {
return false
}
return true
}
export default {
deepClone,
copyModel,
isValidId,
}

View file

@ -1,24 +0,0 @@
module.exports = (sequelize, Sequelize) => {
const AccountToken = sequelize.define('accountToken', {
value: {
type: Sequelize.UUID,
defaultValue: Sequelize.UUIDV4,
},
}, {
indexes: [
{ fields: ['value'] },
],
classMethods: {
associate: ({Account}) => {
AccountToken.belongsTo(Account, {
onDelete: "CASCADE",
foreignKey: {
allowNull: false,
},
});
},
},
});
return AccountToken;
};

View file

@ -1,129 +0,0 @@
const crypto = require('crypto');
const {JSONColumn, JSONArrayColumn} = require('../database-types');
const {credentialsForProvider, smtpConfigFromSettings} = require('../auth-helpers');
const {MAX_INDEXABLE_LENGTH} = require('../db-utils');
const {DB_ENCRYPTION_ALGORITHM, DB_ENCRYPTION_PASSWORD} = process.env;
module.exports = (sequelize, Sequelize) => {
const Account = sequelize.define('account', {
id: { type: Sequelize.STRING(65), primaryKey: true },
name: Sequelize.STRING,
provider: Sequelize.STRING,
emailAddress: Sequelize.STRING(MAX_INDEXABLE_LENGTH),
connectionSettings: JSONColumn('connectionSettings'),
connectionCredentials: Sequelize.TEXT,
syncPolicy: JSONColumn('syncPolicy'),
syncError: JSONColumn('syncError'),
firstSyncCompletion: {
type: Sequelize.STRING(14),
allowNull: true,
defaultValue: null,
},
lastSyncCompletions: JSONArrayColumn('lastSyncCompletions'),
}, {
indexes: [
{
unique: true,
fields: ['id'],
},
],
classMethods: {
associate(data = {}) {
Account.hasMany(data.AccountToken, {as: 'tokens', onDelete: 'cascade', hooks: true})
},
hash({emailAddress, connectionSettings} = {}) {
const idString = `${emailAddress}${JSON.stringify(connectionSettings)}`;
return crypto.createHash('sha256').update(idString, 'utf8').digest('hex')
},
upsertWithCredentials(accountParams, credentials) {
if (!accountParams || !credentials || !accountParams.emailAddress || !accountParams.connectionSettings) {
throw new Error("Need to pass accountParams and credentials to upsertWithCredentials")
}
const id = Account.hash(accountParams)
return Account.findById(id).then((existing) => {
const account = existing || Account.build(Object.assign({id}, accountParams))
// always update with the latest credentials
account.setCredentials(credentials);
return account.save().then((saved) => {
return sequelize.models.accountToken.create({accountId: saved.id}).then((token) => {
return Promise.resolve({account: saved, token: token})
})
});
});
},
},
instanceMethods: {
toJSON() {
return {
id: this.id,
name: this.name,
object: 'account',
organization_unit: (this.provider === 'gmail') ? 'label' : 'folder',
provider: this.provider,
email_address: this.emailAddress,
connection_settings: this.connectionSettings,
sync_policy: this.syncPolicy,
sync_error: this.syncError,
first_sync_completion: this.firstSyncCompletion / 1,
last_sync_completions: this.lastSyncCompletions,
created_at: this.createdAt,
}
},
errored() {
return this.syncError != null;
},
setCredentials(json) {
if (!(json instanceof Object)) {
throw new Error("Call setCredentials with JSON!")
}
if (DB_ENCRYPTION_ALGORITHM && DB_ENCRYPTION_PASSWORD) {
const cipher = crypto.createCipher(DB_ENCRYPTION_ALGORITHM, DB_ENCRYPTION_PASSWORD)
let crypted = cipher.update(JSON.stringify(json), 'utf8', 'hex')
crypted += cipher.final('hex');
this.connectionCredentials = crypted;
} else {
this.connectionCredentials = JSON.stringify(json);
}
},
decryptedCredentials() {
let dec = null;
if (DB_ENCRYPTION_ALGORITHM && DB_ENCRYPTION_PASSWORD) {
const decipher = crypto.createDecipher(DB_ENCRYPTION_ALGORITHM, DB_ENCRYPTION_PASSWORD)
dec = decipher.update(this.connectionCredentials, 'hex', 'utf8')
dec += decipher.final('utf8');
} else {
dec = this.connectionCredentials;
}
try {
return JSON.parse(dec);
} catch (err) {
return null;
}
},
smtpConfig() {
// We always call credentialsForProvider() here because n1Cloud
// sometimes needs to send emails for accounts which did not have their
// full SMTP settings saved to the database.
const {connectionSettings, connectionCredentials} = credentialsForProvider({
provider: this.provider,
settings: Object.assign({}, this.decryptedCredentials(), this.connectionSettings),
email: this.emailAddress,
});
return smtpConfigFromSettings(this.provider, connectionSettings, connectionCredentials);
},
},
});
return Account;
};

View file

@ -1,26 +0,0 @@
const {JSONArrayColumn} = require('../database-types');
const {MAX_INDEXABLE_LENGTH} = require('../db-utils');
module.exports = (sequelize, Sequelize) => {
return sequelize.define('transaction', {
event: Sequelize.STRING(MAX_INDEXABLE_LENGTH),
object: Sequelize.STRING(MAX_INDEXABLE_LENGTH),
objectId: Sequelize.STRING(MAX_INDEXABLE_LENGTH),
accountId: Sequelize.STRING(MAX_INDEXABLE_LENGTH),
changedFields: JSONArrayColumn('changedFields'),
}, {
indexes: [
{ fields: ['accountId'] },
],
instanceMethods: {
toJSON: function toJSON() {
return {
id: `${this.id}`,
event: this.event,
object: this.object,
objectId: `${this.objectId}`,
}
},
},
});
}

View file

@ -1,56 +0,0 @@
/* eslint no-restricted-syntax: 0 */
require('promise.prototype.finally')
const props = require('promise-props');
const _ = require('underscore')
global.Promise.prototype.thenReturn = function thenReturn(value) {
return this.then(function then() { return Promise.resolve(value); })
}
function sleep(ms) {
return new Promise((resolve) => setTimeout(resolve, ms))
}
function each(iterable, iterator) {
return Promise.resolve(iterable).then((array) => {
return new Promise((resolve, reject) => {
Array.from(array).reduce((prevPromise, item, idx, len) => (
prevPromise.then(() => Promise.resolve(iterator(item, idx, len)))
), Promise.resolve())
.then(() => resolve(iterable))
.catch((err) => reject(err))
})
})
}
function promisify(nodeFn) {
return function wrapper(...fnArgs) {
return new Promise((resolve, reject) => {
nodeFn.call(this, ...fnArgs, (err, ...results) => {
if (err) {
reject(err)
return
}
resolve(...results)
});
})
}
}
function promisifyAll(obj) {
for (const key in obj) {
if (!key.endsWith('Async') && _.isFunction(obj[key])) {
obj[`${key}Async`] = promisify(obj[key])
}
}
return obj
}
module.exports = {
each,
sleep,
promisify,
promisifyAll,
props: props,
}

View file

@ -1,79 +0,0 @@
/* eslint no-useless-escape: 0 */
import nodemailer from 'nodemailer'
import {APIError} from './errors'
import {convertSmtpError} from './smtp-errors'
import {getMailerPayload, buildMime} from './message-utils'
const MAX_RETRIES = 1;
class SendmailClient {
constructor(account, logger) {
this._transporter = nodemailer.createTransport(Object.assign(account.smtpConfig(), {pool: true}));
this._logger = logger;
}
async _send(msgData) {
let error;
let results;
// disable nodemailer's automatic X-Mailer header
msgData.xMailer = false;
for (let i = 0; i <= MAX_RETRIES; i++) {
try {
results = await this._transporter.sendMail(msgData);
} catch (err) {
// Keep retrying for MAX_RETRIES
error = convertSmtpError(err);
this._logger.error(err);
}
if (!results) {
continue;
}
const {rejected, pending} = results;
if ((rejected && rejected.length > 0) || (pending && pending.length > 0)) {
// At least one recipient was rejected by the server,
// but at least one recipient got it. Don't retry; throw an
// error so that we fail to client.
throw new APIError('Sending to at least one recipient failed', 402, {results});
}
return
}
this._logger.error('Max sending retries reached');
let userMessage = 'Sending failed';
let statusCode = 500;
if (error && error.userMessage && error.statusCode) {
userMessage = `Sending failed - ${error.userMessage}`;
statusCode = error.statusCode;
}
const {host, port, secure} = this._transporter.transporter.options;
throw new APIError(userMessage, statusCode, {
originalError: error,
smtp_host: host,
smtp_port: port,
smtp_use_ssl: secure,
});
}
async send(message) {
if (message.isSent) {
throw new Error(`Cannot send message ${message.id}, it has already been sent`);
}
const payload = getMailerPayload(message)
await this._send(payload);
}
async sendCustom(customMessage, recipients) {
const envelope = {};
for (const field of Object.keys(recipients)) {
envelope[field] = recipients[field].map(r => r.email);
}
envelope.from = customMessage.from.map(c => c.email)
const raw = await buildMime(customMessage);
await this._send({raw, envelope});
}
}
module.exports = SendmailClient;

View file

@ -1,68 +0,0 @@
import {NylasError, RetryableError} from './errors'
export class SMTPRetryableError extends RetryableError {
constructor(msg) {
super(msg)
this.userMessage = "We were unable to reach your SMTP server. Please try again."
this.statusCode = 408
}
}
export class SMTPConnectionTimeoutError extends SMTPRetryableError { }
export class SMTPConnectionEndedError extends SMTPRetryableError { }
export class SMTPConnectionTLSError extends SMTPRetryableError { }
export class SMTPProtocolError extends NylasError {
constructor(msg) {
super(msg)
this.userMessage = "SMTP protocol error. Please check your SMTP settings."
this.statusCode = 401
}
}
export class SMTPConnectionDNSError extends NylasError {
constructor(msg) {
super(msg)
this.userMessage = "We were unable to look up your SMTP host. Please check the SMTP server name."
this.statusCode = 401
}
}
export class SMTPAuthenticationError extends NylasError {
constructor(msg) {
super(msg)
this.userMessage = "Incorrect SMTP username or password."
this.statusCode = 401
}
}
/* Nodemailer's errors are just regular old Error objects, so we have to
* test the error message to determine more about what they mean
*/
export function convertSmtpError(err) {
// TODO: what error is thrown if you're offline?
// TODO: what error is thrown if the message you're sending is too large?
if (/(?:connection timeout)|(?:connect etimedout)/i.test(err.message)) {
return new SMTPConnectionTimeoutError(err)
}
if (/(?:connection|socket) closed?/i.test(err.message)) {
const smtpErr = SMTPConnectionEndedError(err)
if (err.code) {
// e.g. https://github.com/nodemailer/nodemailer/blob/master/lib/smtp-transport/index.js#L184-L185
smtpErr.code = err.code;
}
}
if (/error initiating tls/i.test(err.message)) {
return new SMTPConnectionTLSError(err);
}
if (/getaddrinfo enotfound/i.test(err.message)) {
return new SMTPConnectionDNSError(err);
}
if (/unknown protocol/i.test(err.message)) {
return new SMTPProtocolError(err);
}
if (/(?:invalid login)|(?:username and password not accepted)|(?:incorrect username or password)|(?:authentication failed)/i.test(err.message)) {
return new SMTPAuthenticationError(err);
}
return err;
}

View file

@ -1,9 +0,0 @@
export function trimTo(str, size) {
const g = window || global || {}
const TRIM_SIZE = size || process.env.TRIM_SIZE || g.TRIM_SIZE || 256;
let trimed = str;
if (str.length >= TRIM_SIZE) {
trimed = `${str.slice(0, TRIM_SIZE / 2)}…${str.slice(str.length - TRIM_SIZE / 2, str.length)}`
}
return trimed
}

View file

@ -1,15 +0,0 @@
import constants from 'constants';
const INSECURE_TLS_OPTIONS = {
secureProtocol: 'SSLv23_method',
rejectUnauthorized: false,
}
const SECURE_TLS_OPTIONS = {
secureProtocol: 'SSLv23_method',
// See similar code in cloud-core for explanation of each flag:
// https://github.com/nylas/cloud-core/blob/e70f9e023b880090564b62fca8532f56ec77bfc3/sync-engine/inbox/auth/generic.py#L397-L435
secureOptions: constants.SSL_OP_NO_SSLv3 | constants.SSL_OP_NO_SSLv2 | constants.SSL_OP_NO_COMPRESSION | constants.SSL_OP_CIPHER_SERVER_PREFERENCE | constants.SSL_OP_SINGLE_DH_USE | constants.SSL_OP_SINGLE_ECDH_USE,
}
export {SECURE_TLS_OPTIONS, INSECURE_TLS_OPTIONS};

View file

@ -107,42 +107,6 @@ async function electronRebuild() {
})
}
const getJasmineDir = (packageName) => path.resolve(
path.join('packages', packageName, 'spec', 'jasmine')
)
const getJasmineConfigPath = (packageName) => path.resolve(
path.join(getJasmineDir(packageName), 'config.json')
)
function linkJasmineConfigs() {
console.log("\n---> Linking Jasmine configs");
const linkToPackages = ['cloud-api', 'cloud-core', 'cloud-workers']
const from = getJasmineConfigPath('isomorphic-core')
for (const packageName of linkToPackages) {
const packageDir = path.join('packages', packageName)
if (!fs.existsSync(packageDir)) {
console.log("\n---> No cloud packages to link. Moving on")
return
}
const jasmineDir = getJasmineDir(packageName)
if (!fs.existsSync(jasmineDir)) {
fs.mkdirSync(jasmineDir)
}
const to = getJasmineConfigPath(packageName)
unlinkIfExistsSync(to)
fs.symlinkSync(from, to, 'file')
}
}
function linkIsomorphicCoreSpecs() {
console.log("\n---> Linking isomorphic-core specs to client-app specs")
const from = path.resolve(path.join('packages', 'isomorphic-core', 'spec'))
const to = path.resolve(path.join('packages', 'client-app', 'spec', 'isomorphic-core'))
unlinkIfExistsSync(to)
fs.symlinkSync(from, to, 'dir')
}
function getInstallTarget() {
const {INSTALL_TARGET} = process.env
if (!INSTALL_TARGET) {
@ -175,8 +139,6 @@ async function main() {
await npm('install', {cwd: 'packages/client-app'})
}
await electronRebuild();
linkJasmineConfigs();
linkIsomorphicCoreSpecs();
}
} catch (err) {
console.error(err);