mirror of
https://github.com/Foundry376/Mailspring.git
synced 2025-09-08 13:44:53 +08:00
[local-sync] Remove logger and metrics from local-sync and nylas-core
This commit is contained in:
parent
3ff98f45a9
commit
296846adf5
11 changed files with 45 additions and 200 deletions
|
@ -1,8 +1,9 @@
|
|||
import {createLogger} from './src/shared/logger'
|
||||
|
||||
export function activate() {
|
||||
global.Logger = createLogger('local-sync')
|
||||
require('./src/local-api/app.js');
|
||||
require('./src/local-sync-worker/app.js');
|
||||
console.log('wtf dude')
|
||||
}
|
||||
|
||||
export function deactivate() {
|
||||
|
|
|
@ -1,6 +1,3 @@
|
|||
const Metrics = require(`../local-sync-metrics`)
|
||||
Metrics.startCapturing('nylas-k2-api')
|
||||
|
||||
const Hapi = require('hapi');
|
||||
const HapiSwagger = require('hapi-swagger');
|
||||
const HapiBoom = require('hapi-boom-decorators')
|
||||
|
@ -10,20 +7,9 @@ const Vision = require('vision');
|
|||
const Package = require('../../package');
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const {Logger} = require(`nylas-core`);
|
||||
const LocalDatabaseConnector = require('../shared/local-database-connector')
|
||||
const SchedulerUtils = require('../shared/scheduler-utils')
|
||||
|
||||
global.Metrics = Metrics
|
||||
global.Logger = Logger.createLogger('nylas-k2-api')
|
||||
|
||||
const onUnhandledError = (err) => {
|
||||
global.Logger.fatal(err, 'Unhandled error')
|
||||
global.Metrics.reportError(err)
|
||||
}
|
||||
process.on('uncaughtException', onUnhandledError)
|
||||
process.on('unhandledRejection', onUnhandledError)
|
||||
|
||||
const server = new Hapi.Server({
|
||||
connections: {
|
||||
router: {
|
||||
|
|
|
@ -1,22 +1,8 @@
|
|||
const Metrics = require(`../local-sync-metrics`)
|
||||
Metrics.startCapturing('nylas-k2-dashboard')
|
||||
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const Inert = require('inert');
|
||||
const Hapi = require('hapi');
|
||||
const HapiWebSocket = require('hapi-plugin-websocket');
|
||||
const {Logger} = require(`nylas-core`);
|
||||
|
||||
global.Metrics = Metrics
|
||||
global.Logger = Logger.createLogger('nylas-k2-dashboard')
|
||||
|
||||
const onUnhandledError = (err) => {
|
||||
global.Logger.fatal(err, 'Unhandled error')
|
||||
global.Metrics.reportError(err)
|
||||
}
|
||||
process.on('uncaughtException', onUnhandledError)
|
||||
process.on('unhandledRejection', onUnhandledError)
|
||||
|
||||
const server = new Hapi.Server();
|
||||
server.connection({ port: process.env.PORT });
|
||||
|
|
|
@ -1,50 +0,0 @@
|
|||
const {env: {NODE_ENV, SIGNALFX_TOKEN}, pid} = process
|
||||
const os = require('os')
|
||||
const signalfx = require('signalfx')
|
||||
|
||||
let signalfxClient = null
|
||||
|
||||
const MetricTypes = {
|
||||
Gauge: 'gauges',
|
||||
Counter: 'counters',
|
||||
CumulativeCounter: 'cumulative_counters',
|
||||
}
|
||||
const shouldReport = NODE_ENV && NODE_ENV !== 'development'
|
||||
|
||||
|
||||
const Metrics = {
|
||||
|
||||
MetricTypes,
|
||||
|
||||
startCapturing(name) {
|
||||
if (!shouldReport) { return }
|
||||
signalfxClient = new signalfx.Ingest(SIGNALFX_TOKEN, {
|
||||
dimensions: {
|
||||
name,
|
||||
host: os.hostname(),
|
||||
pid: pid.toString(),
|
||||
env: NODE_ENV,
|
||||
},
|
||||
})
|
||||
},
|
||||
|
||||
reportError(error) {
|
||||
},
|
||||
|
||||
reportMetric({name, value, type, dimensions = {}} = {}) {
|
||||
if (!signalfxClient || !shouldReport) { return }
|
||||
if (!name) {
|
||||
throw new Error('Metrics.reportMetric requires a metric.name')
|
||||
}
|
||||
if (value == null) {
|
||||
throw new Error('Metrics.reportMetric requires a metric.value')
|
||||
}
|
||||
if (!type) {
|
||||
throw new Error('Metrics.reportMetric requires a metric.type from Metrics.MetricTypes')
|
||||
}
|
||||
const metric = {metric: name, value, timestamp: Date.now(), dimensions}
|
||||
signalfxClient.send({[type]: [metric]})
|
||||
},
|
||||
}
|
||||
|
||||
module.exports = Metrics
|
|
@ -1,20 +1,5 @@
|
|||
const Metrics = require('../local-sync-metrics')
|
||||
Metrics.startCapturing('nylas-k2-sync')
|
||||
|
||||
const {Logger} = require('nylas-core')
|
||||
const LocalDatabaseConnector = require('../shared/local-database-connector')
|
||||
|
||||
global.Metrics = Metrics
|
||||
global.Logger = Logger.createLogger('nylas-k2-sync')
|
||||
|
||||
const onUnhandledError = (err) => {
|
||||
global.Logger.fatal(err, 'Unhandled error')
|
||||
global.Metrics.reportError(err)
|
||||
}
|
||||
process.on('uncaughtException', onUnhandledError)
|
||||
process.on('unhandledRejection', onUnhandledError)
|
||||
|
||||
|
||||
const prepareEnvironmentInfo = (callback) => {
|
||||
if (process.env.NODE_ENV === 'development') {
|
||||
const os = require('os')
|
||||
|
|
|
@ -1,19 +1,6 @@
|
|||
const Metrics = require(`../local-sync-metrics`)
|
||||
Metrics.startCapturing('nylas-k2-message-processor')
|
||||
|
||||
const {LocalPubsubConnector, Logger} = require(`nylas-core`)
|
||||
const {processors} = require('./processors')
|
||||
const LocalDatabaseConnector = require('../shared/local-database-connector')
|
||||
|
||||
global.Metrics = Metrics
|
||||
global.Logger = Logger.createLogger('nylas-k2-message-processor')
|
||||
|
||||
const onUnhandledError = (err) => {
|
||||
global.Logger.fatal(err, 'Unhandled error')
|
||||
global.Metrics.reportError(err)
|
||||
}
|
||||
process.on('uncaughtException', onUnhandledError)
|
||||
process.on('unhandledRejection', onUnhandledError)
|
||||
const LocalPubsubConnector = require('../shared/local-pubsub-connector')
|
||||
|
||||
// List of the attributes of Message that the processor should be allowed to change.
|
||||
// The message may move between folders, get starred, etc. while it's being
|
||||
|
|
42
packages/local-sync/src/shared/logger.js
Normal file
42
packages/local-sync/src/shared/logger.js
Normal file
|
@ -0,0 +1,42 @@
|
|||
const _ = require('underscore')
|
||||
|
||||
function Logger(boundArgs = {}) {
|
||||
if (!_.isObject(boundArgs)) {
|
||||
throw new Error('Logger: Bound arguments must be an object')
|
||||
}
|
||||
const logger = {}
|
||||
const loggerFns = ['log', 'info', 'warn', 'error']
|
||||
loggerFns.forEach((logFn) => {
|
||||
logger[logFn] = (first, ...args) => {
|
||||
if (first instanceof Error || !_.isObject(first)) {
|
||||
if (_.isEmpty(boundArgs)) {
|
||||
return console[logFn](first, ...args)
|
||||
}
|
||||
return console[logFn](boundArgs, first, ...args)
|
||||
}
|
||||
return console[logFn]({...boundArgs, ...first}, ...args)
|
||||
}
|
||||
})
|
||||
logger.child = (extraBoundArgs) => Logger({...boundArgs, ...extraBoundArgs})
|
||||
return logger
|
||||
}
|
||||
|
||||
function createLogger(name) {
|
||||
const childLogs = new Map()
|
||||
const logger = Logger({name})
|
||||
|
||||
return Object.assign(logger, {
|
||||
forAccount(account = {}) {
|
||||
if (!childLogs.has(account.id)) {
|
||||
const childLog = logger.child({
|
||||
account_id: account.id,
|
||||
account_email: account.emailAddress,
|
||||
})
|
||||
childLogs.set(account.id, childLog)
|
||||
}
|
||||
return childLogs.get(account.id)
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
module.exports = {createLogger}
|
|
@ -6,7 +6,6 @@ module.exports = {
|
|||
Imap: require('imap'),
|
||||
IMAPConnection: require('./imap-connection'),
|
||||
MessageTypes: require('./message-types'),
|
||||
Logger: require('./logger'),
|
||||
IMAPErrors: require('./imap-errors'),
|
||||
PromiseUtils: require('./promise-utils'),
|
||||
}
|
||||
|
|
|
@ -1,58 +0,0 @@
|
|||
const os = require('os');
|
||||
const createCWStream = require('bunyan-cloudwatch')
|
||||
const PrettyStream = require('bunyan-prettystream')
|
||||
|
||||
const cloudwatchConfig = (name, env) => ({
|
||||
logGroupName: `k2-${env}`,
|
||||
logStreamName: `${name}-${env}-${os.hostname()}`,
|
||||
cloudWatchLogsOptions: {
|
||||
region: 'us-east-1',
|
||||
},
|
||||
})
|
||||
|
||||
const stdoutStream = {
|
||||
level: 'info',
|
||||
stream: process.stdout,
|
||||
}
|
||||
|
||||
const getLogStreams = (name, env) => {
|
||||
switch (env) {
|
||||
case 'development': {
|
||||
const prettyStdOut = new PrettyStream({
|
||||
mode: 'pm2',
|
||||
lessThan: 'error',
|
||||
});
|
||||
const prettyStdErr = new PrettyStream({
|
||||
mode: 'pm2',
|
||||
});
|
||||
prettyStdOut.pipe(process.stdout);
|
||||
prettyStdErr.pipe(process.stderr);
|
||||
return [
|
||||
{
|
||||
type: 'raw',
|
||||
level: 'error',
|
||||
stream: prettyStdErr,
|
||||
reemitErrorEvents: true,
|
||||
},
|
||||
{
|
||||
type: 'raw',
|
||||
level: 'debug',
|
||||
stream: prettyStdOut,
|
||||
reemitErrorEvents: true,
|
||||
},
|
||||
]
|
||||
}
|
||||
default: {
|
||||
return [
|
||||
stdoutStream,
|
||||
{
|
||||
type: 'raw',
|
||||
reemitErrorEvents: true,
|
||||
stream: createCWStream(cloudwatchConfig(name, env)),
|
||||
},
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {getLogStreams}
|
|
@ -1,30 +0,0 @@
|
|||
const bunyan = require('bunyan')
|
||||
const {getLogStreams} = require('./log-streams')
|
||||
const NODE_ENV = process.env.NODE_ENV || 'unknown'
|
||||
|
||||
function createLogger(name, env = NODE_ENV) {
|
||||
const childLogs = new Map()
|
||||
const logger = bunyan.createLogger({
|
||||
name,
|
||||
env,
|
||||
serializers: bunyan.stdSerializers,
|
||||
streams: getLogStreams(name, env),
|
||||
})
|
||||
|
||||
return Object.assign(logger, {
|
||||
forAccount(account = {}) {
|
||||
if (!childLogs.has(account.id)) {
|
||||
const childLog = logger.child({
|
||||
account_id: account.id,
|
||||
account_email: account.emailAddress,
|
||||
})
|
||||
childLogs.set(account.id, childLog)
|
||||
}
|
||||
return childLogs.get(account.id)
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
createLogger,
|
||||
}
|
|
@ -4,9 +4,6 @@
|
|||
"description": "Core shared packages",
|
||||
"main": "index.js",
|
||||
"dependencies": {
|
||||
"bunyan": "1.8.0",
|
||||
"bunyan-cloudwatch": "2.0.0",
|
||||
"bunyan-prettystream": "github:emorikawa/node-bunyan-prettystream",
|
||||
"imap": "0.8.18",
|
||||
"promise-props": "1.0.0",
|
||||
"promise.prototype.finally": "1.0.1",
|
||||
|
|
Loading…
Add table
Reference in a new issue