Migrate from Bull to BullMQ for background processing

This commit is contained in:
Andris Reinman 2023-03-10 15:47:47 +02:00
parent 4370a79192
commit e7cef85d0f
No known key found for this signature in database
GPG key ID: DC6C83F4D584D364
8 changed files with 467 additions and 349 deletions

View file

@ -1,8 +1,7 @@
module.exports = {
upgrade: true,
reject: [
// FIXME: v4.x.x throws if not maxRetriesPerRequest: null, enableReadyCheck: false
// https://github.com/OptimalBits/bull/blob/develop/CHANGELOG.md#breaking-changes
'bull'
// mongodb 5.x driver does not support callbacks, only promises
'mongodb'
]
};

View file

@ -4,7 +4,7 @@ const log = require('npmlog');
const config = require('wild-config');
const Gelf = require('gelf');
const os = require('os');
const Queue = require('bull');
const { Queue, Worker } = require('bullmq');
const db = require('./lib/db');
const errors = require('./lib/errors');
const crypto = require('crypto');
@ -16,6 +16,7 @@ const { getClient } = require('./lib/elasticsearch');
let loggelf;
let processlock;
let queueWorkers = {};
const LOCK_EXPIRE_TTL = 5;
const LOCK_RENEW_TTL = 2;
@ -71,14 +72,6 @@ class Indexer {
return;
}
let hasFeatureFlag = await db.redis.sismember(`feature:indexing`, entry.user.toString());
if (!hasFeatureFlag) {
log.silly('Indexer', `Feature flag not set, skipping user=%s command=%s message=%s`, entry.user, entry.command, entry.message);
return;
} else {
log.verbose('Indexer', `Feature flag set, processing user=%s command=%s message=%s`, entry.user, entry.command, entry.message);
}
switch (entry.command) {
case 'EXISTS':
payload = {
@ -111,7 +104,15 @@ class Indexer {
}
if (payload) {
await indexingQueue.add(payload, {
let hasFeatureFlag = await db.redis.sismember(`feature:indexing`, entry.user.toString());
if (!hasFeatureFlag) {
log.silly('Indexer', `Feature flag not set, skipping user=%s command=%s message=%s`, entry.user, entry.command, entry.message);
return;
} else {
log.verbose('Indexer', `Feature flag set, processing user=%s command=%s message=%s`, entry.user, entry.command, entry.message);
}
await indexingQueue.add('journal', payload, {
removeOnComplete: 100,
removeOnFail: 100,
attempts: 5,
@ -311,7 +312,7 @@ module.exports.start = callback => {
return setTimeout(() => process.exit(1), 3000);
}
indexingQueue = new Queue('indexing', typeof config.dbs.redis === 'object' ? { redis: config.dbs.redis } : config.dbs.redis);
indexingQueue = new Queue('indexing', db.queueConf);
processlock = counters(db.redis).processlock;
@ -321,189 +322,191 @@ module.exports.start = callback => {
});
const esclient = getClient();
indexingQueue.process(async job => {
try {
if (!job || !job.data) {
return false;
}
const data = job.data;
queueWorkers.indexing = new Worker(
'indexing',
async job => {
try {
if (!job || !job.data) {
return false;
}
const data = job.data;
const dateKeyTdy = new Date().toISOString().substring(0, 10).replace(/-/g, '');
const dateKeyYdy = new Date(Date.now() - 24 * 3600 * 1000).toISOString().substring(0, 10).replace(/-/g, '');
const tombstoneTdy = `indexer:tomb:${dateKeyTdy}`;
const tombstoneYdy = `indexer:tomb:${dateKeyYdy}`;
const dateKeyTdy = new Date().toISOString().substring(0, 10).replace(/-/g, '');
const dateKeyYdy = new Date(Date.now() - 24 * 3600 * 1000).toISOString().substring(0, 10).replace(/-/g, '');
const tombstoneTdy = `indexer:tomb:${dateKeyTdy}`;
const tombstoneYdy = `indexer:tomb:${dateKeyYdy}`;
switch (data.action) {
case 'new': {
// check tombstone for race conditions (might be already deleted)
switch (data.action) {
case 'new': {
// check tombstone for race conditions (might be already deleted)
let [[err1, isDeleted1], [err2, isDeleted2]] = await db.redis
.multi()
.sismember(tombstoneTdy, data.message)
.sismember(tombstoneYdy, data.message)
.exec();
let [[err1, isDeleted1], [err2, isDeleted2]] = await db.redis
.multi()
.sismember(tombstoneTdy, data.message)
.sismember(tombstoneYdy, data.message)
.exec();
if (err1) {
log.verbose('Indexing', 'Failed checking tombstone key=%s erro=%s', tombstoneTdy, err1.message);
}
if (err1) {
log.verbose('Indexing', 'Failed checking tombstone key=%s erro=%s', tombstoneTdy, err1.message);
}
if (err2) {
log.verbose('Indexing', 'Failed checking tombstone key=%s erro=%s', tombstoneYdy, err2.message);
}
if (err2) {
log.verbose('Indexing', 'Failed checking tombstone key=%s erro=%s', tombstoneYdy, err2.message);
}
if (isDeleted1 || isDeleted2) {
log.info('Indexing', 'Document tombstone found, skip index message=%s', data.message);
break;
}
// fetch message from DB
let messageData = await db.database.collection('messages').findOne(
{
_id: new ObjectId(data.message),
// shard key
mailbox: new ObjectId(data.mailbox),
uid: data.uid
},
{
projection: {
bodystructure: false,
envelope: false,
'mimeTree.childNodes': false,
'mimeTree.header': false
}
}
);
const now = new Date();
let messageObj = removeEmptyKeys({
user: messageData.user.toString(),
mailbox: messageData.mailbox.toString(),
thread: messageData.thread ? messageData.thread.toString() : null,
uid: messageData.uid,
answered: messageData.flags ? messageData.flags.includes('\\Answered') : null,
attachments:
(messageData.attachments &&
messageData.attachments.map(attachment =>
removeEmptyKeys({
cid: attachment.cid || null,
contentType: attachment.contentType || null,
size: attachment.size,
filename: attachment.filename,
id: attachment.id,
disposition: attachment.disposition
})
)) ||
null,
bcc: formatAddresses(messageData.mimeTree && messageData.mimeTree.parsedHeader && messageData.mimeTree.parsedHeader.bcc),
cc: formatAddresses(messageData.mimeTree && messageData.mimeTree.parsedHeader && messageData.mimeTree.parsedHeader.cc),
// Time when stored
created: now.toISOString(),
// Internal Date
idate: (messageData.idate && messageData.idate.toISOString()) || now.toISOString(),
// Header Date
hdate: (messageData.hdate && messageData.hdate.toISOString()) || now.toISOString(),
draft: messageData.flags ? messageData.flags.includes('\\Draft') : null,
flagged: messageData.flags ? messageData.flags.includes('\\Flagged') : null,
flags: messageData.flags || [],
from: formatAddresses(messageData.mimeTree && messageData.mimeTree.parsedHeader && messageData.mimeTree.parsedHeader.from),
// do not index authentication and transport headers
headers: messageData.headers
? messageData.headers.filter(header => !/^x|^received|^arc|^dkim|^authentication/gi.test(header.key))
: null,
inReplyTo: messageData.inReplyTo || null,
msgid: messageData.msgid || null,
replyTo: formatAddresses(
messageData.mimeTree && messageData.mimeTree.parsedHeader && messageData.mimeTree.parsedHeader['reply-to']
),
size: messageData.size || null,
subject: messageData.subject || '',
to: formatAddresses(messageData.mimeTree && messageData.mimeTree.parsedHeader && messageData.mimeTree.parsedHeader.to),
unseen: messageData.flags ? !messageData.flags.includes('\\Seen') : null,
html: (messageData.html && messageData.html.join('\n')) || null,
text: messageData.text || null,
modseq: data.modseq
});
let indexResponse = await esclient.index({
id: messageData._id.toString(),
index: config.elasticsearch.index,
body: messageObj,
refresh: false
});
log.verbose(
'Indexing',
'Document index result=%s message=%s',
indexResponse.body && indexResponse.body.result,
indexResponse.body && indexResponse.body._id
);
if (isDeleted1 || isDeleted2) {
log.info('Indexing', 'Document tombstone found, skip index message=%s', data.message);
break;
}
// fetch message from DB
let messageData = await db.database.collection('messages').findOne(
{
_id: new ObjectId(data.message),
// shard key
mailbox: new ObjectId(data.mailbox),
uid: data.uid
},
{
projection: {
bodystructure: false,
envelope: false,
'mimeTree.childNodes': false,
'mimeTree.header': false
case 'delete': {
let deleteResponse;
try {
deleteResponse = await esclient.delete({
id: data.message,
index: config.elasticsearch.index,
refresh: false
});
} catch (err) {
if (err.meta && err.meta.body && err.meta.body.result === 'not_found') {
// set tombstone to prevent indexing this message in case of race conditions
await db.redis
.multi()
.sadd(tombstoneTdy, data.message)
.expire(tombstoneTdy, 24 * 3600)
.exec();
}
throw err;
}
);
const now = new Date();
log.verbose(
'Indexing',
'Document delete result=%s message=%s',
deleteResponse.body && deleteResponse.body.result,
deleteResponse.body && deleteResponse.body._id
);
break;
}
let messageObj = removeEmptyKeys({
user: messageData.user.toString(),
mailbox: messageData.mailbox.toString(),
thread: messageData.thread ? messageData.thread.toString() : null,
uid: messageData.uid,
answered: messageData.flags ? messageData.flags.includes('\\Answered') : null,
attachments:
(messageData.attachments &&
messageData.attachments.map(attachment =>
removeEmptyKeys({
cid: attachment.cid || null,
contentType: attachment.contentType || null,
size: attachment.size,
filename: attachment.filename,
id: attachment.id,
disposition: attachment.disposition
})
)) ||
null,
bcc: formatAddresses(messageData.mimeTree && messageData.mimeTree.parsedHeader && messageData.mimeTree.parsedHeader.bcc),
cc: formatAddresses(messageData.mimeTree && messageData.mimeTree.parsedHeader && messageData.mimeTree.parsedHeader.cc),
// Time when stored
created: now.toISOString(),
// Internal Date
idate: (messageData.idate && messageData.idate.toISOString()) || now.toISOString(),
// Header Date
hdate: (messageData.hdate && messageData.hdate.toISOString()) || now.toISOString(),
draft: messageData.flags ? messageData.flags.includes('\\Draft') : null,
flagged: messageData.flags ? messageData.flags.includes('\\Flagged') : null,
flags: messageData.flags || [],
from: formatAddresses(messageData.mimeTree && messageData.mimeTree.parsedHeader && messageData.mimeTree.parsedHeader.from),
// do not index authentication and transport headers
headers: messageData.headers
? messageData.headers.filter(header => !/^x|^received|^arc|^dkim|^authentication/gi.test(header.key))
: null,
inReplyTo: messageData.inReplyTo || null,
msgid: messageData.msgid || null,
replyTo: formatAddresses(
messageData.mimeTree && messageData.mimeTree.parsedHeader && messageData.mimeTree.parsedHeader['reply-to']
),
size: messageData.size || null,
subject: messageData.subject || '',
to: formatAddresses(messageData.mimeTree && messageData.mimeTree.parsedHeader && messageData.mimeTree.parsedHeader.to),
unseen: messageData.flags ? !messageData.flags.includes('\\Seen') : null,
html: (messageData.html && messageData.html.join('\n')) || null,
text: messageData.text || null,
modseq: data.modseq
});
let indexResponse = await esclient.index({
id: messageData._id.toString(),
index: config.elasticsearch.index,
body: messageObj,
refresh: false
});
log.verbose(
'Indexing',
'Document index result=%s message=%s',
indexResponse.body && indexResponse.body.result,
indexResponse.body && indexResponse.body._id
);
break;
}
case 'delete': {
let deleteResponse;
try {
deleteResponse = await esclient.delete({
case 'update': {
let updateRequest = {
id: data.message,
index: config.elasticsearch.index,
refresh: false
});
} catch (err) {
if (err.meta && err.meta.body && err.meta.body.result === 'not_found') {
// set tombstone to prevent indexing this message in case of race conditions
await db.redis
.multi()
.sadd(tombstoneTdy, data.message)
.expire(tombstoneTdy, 24 * 3600)
.exec();
}
throw err;
}
};
log.verbose(
'Indexing',
'Document delete result=%s message=%s',
deleteResponse.body && deleteResponse.body.result,
deleteResponse.body && deleteResponse.body._id
);
break;
}
case 'update': {
let updateRequest = {
id: data.message,
index: config.elasticsearch.index,
refresh: false
};
if (data.modseq && typeof data.modseq === 'number') {
updateRequest.body = {
script: {
lang: 'painless',
source: `
if (data.modseq && typeof data.modseq === 'number') {
updateRequest.body = {
script: {
lang: 'painless',
source: `
if( ctx._source.modseq >= params.modseq) {
ctx.op = 'none';
} else {
@ -514,49 +517,56 @@ module.exports.start = callback => {
ctx._source.modseq = params.modseq;
}
`,
params: {
modseq: data.modseq,
draft: data.flags.includes('\\Draft'),
flagged: data.flags.includes('\\Flagged'),
flags: data.flags || [],
unseen: !data.flags.includes('\\Seen')
params: {
modseq: data.modseq,
draft: data.flags.includes('\\Draft'),
flagged: data.flags.includes('\\Flagged'),
flags: data.flags || [],
unseen: !data.flags.includes('\\Seen')
}
}
}
};
} else {
updateRequest.body = {
doc: removeEmptyKeys({
draft: data.flags ? data.flags.includes('\\Draft') : null,
flagged: data.flags ? data.flags.includes('\\Flagged') : null,
flags: data.flags || [],
unseen: data.flags ? !data.flags.includes('\\Seen') : null
})
};
};
} else {
updateRequest.body = {
doc: removeEmptyKeys({
draft: data.flags ? data.flags.includes('\\Draft') : null,
flagged: data.flags ? data.flags.includes('\\Flagged') : null,
flags: data.flags || [],
unseen: data.flags ? !data.flags.includes('\\Seen') : null
})
};
}
let updateResponse = await esclient.update(updateRequest);
log.verbose(
'Indexing',
'Document update result=%s message=%s',
updateResponse.body && updateResponse.body.result,
updateResponse.body && updateResponse.body._id
);
}
let updateResponse = await esclient.update(updateRequest);
log.verbose(
'Indexing',
'Document update result=%s message=%s',
updateResponse.body && updateResponse.body.result,
updateResponse.body && updateResponse.body._id
);
}
}
// loggelf({ _msg: 'hello world' });
} catch (err) {
if (err.meta && err.meta.body && err.meta.body.result === 'not_found') {
// missing document, ignore
log.error('Indexing', 'Failed to process indexing request, document not found message=%s', err.meta.body._id);
return;
}
// loggelf({ _msg: 'hello world' });
} catch (err) {
if (err.meta && err.meta.body && err.meta.body.result === 'not_found') {
// missing document, ignore
log.error('Indexing', 'Failed to process indexing request, document not found message=%s', err.meta.body._id);
return;
}
log.error('Indexing', err);
throw err;
}
});
log.error('Indexing', err);
throw err;
}
},
Object.assign(
{
concurrency: 1
},
db.queueConf
)
);
callback();
});

View file

@ -1,17 +1,40 @@
'use strict';
const config = require('wild-config');
const tools = require('./tools');
const mongodb = require('mongodb');
const Redis = require('ioredis');
const redisUrl = require('./redis-url');
const log = require('npmlog');
const packageData = require('../package.json');
const MongoClient = mongodb.MongoClient;
module.exports.database = false;
module.exports.gridfs = false;
module.exports.users = false;
module.exports.senderDb = false;
module.exports.redis = false;
module.exports.redisConfig = false;
const REDIS_CONF = Object.assign(
{
// some defaults
maxRetriesPerRequest: null,
showFriendlyErrorStack: true,
retryStrategy(times) {
const delay = !times ? 1000 : Math.min(2 ** times * 500, 15 * 1000);
log.info('Redis', 'Connection retry times=%s delay=%s', times, delay);
return delay;
},
connectionName: `${packageData.name}@${packageData.version}[${process.pid}]`
},
typeof redisConf === 'string' ? redisUrl(config.dbs.redis) : config.dbs.redis || {}
);
module.exports.redisConfig = REDIS_CONF;
module.exports.queueConf = {
connection: Object.assign({ connectionName: `${REDIS_CONF.connectionName}[notify]` }, REDIS_CONF),
prefix: `wd:bull`
};
module.exports.redis = new Redis(REDIS_CONF);
let getDBConnection = (main, config, callback) => {
if (main) {
@ -70,10 +93,7 @@ module.exports.connect = callback => {
}
module.exports.senderDb = sdb || module.exports.database;
module.exports.redisConfig = tools.redisConfig(config.dbs.redis);
module.exports.redis = new Redis(module.exports.redisConfig);
module.exports.redis.connect(() => callback(null, module.exports.database));
callback();
});
});
});

View file

@ -2,7 +2,7 @@
// Pushes events to processing queue
const Queue = require('bull');
const { Queue } = require('bullmq');
const log = require('npmlog');
let webhooksQueue;
@ -54,13 +54,8 @@ module.exports = {
if (!webhooksQueue) {
webhooksQueue = new Queue('webhooks', {
createClient(type /*, config*/) {
if (type === 'bclient') {
// most probably never called
return redisClient.duplicate();
}
return redisClient;
}
connection: redisClient,
prefix: `wd:bull`
});
}
@ -73,7 +68,7 @@ module.exports = {
});
try {
let job = await webhooksQueue.add(data, {
let job = await webhooksQueue.add('wehook', data, {
removeOnComplete: true,
removeOnFail: 500,
attempts: 5,

View file

@ -79,6 +79,7 @@ class MailboxHandler {
return callback(err);
}
console.log('MAILBOX CREATED');
publish(this.redis, {
ev: MAILBOX_CREATED,
user,

74
lib/redis-url.js Normal file
View file

@ -0,0 +1,74 @@
'use strict';
module.exports = redisConf => {
let parsedRedisUrl = new URL(redisConf);
let parsedUrl = {};
let usernameAllowed = false;
for (let key of parsedRedisUrl.searchParams.keys()) {
let value = parsedRedisUrl.searchParams.get(key);
if (!value) {
continue;
}
switch (key) {
case 'password':
parsedUrl.password = value;
break;
case 'db':
{
if (value && !isNaN(value)) {
parsedUrl.db = Number(value);
}
}
break;
case 'allowUsernameInURI':
if (/^(true|1|yes|y)$/i.test(value)) {
usernameAllowed = true;
}
break;
}
}
for (let key of ['hostname', 'port', 'password', 'pathname', 'protocol', 'username']) {
let value = parsedRedisUrl[key];
if (!value) {
continue;
}
switch (key) {
case 'hostname':
parsedUrl.host = value;
break;
case 'port':
parsedUrl.port = Number(value);
break;
case 'password':
parsedUrl.password = value;
break;
case 'username':
if (usernameAllowed) {
parsedUrl.username = value;
}
break;
case 'pathname': {
let pathname = value.slice(1);
if (pathname && !isNaN(pathname)) {
parsedUrl.db = Number(pathname);
}
break;
}
case 'protocol':
if (value.toLowerCase() === 'rediss:') {
parsedUrl.tls = {};
}
break;
}
}
return parsedUrl;
};

View file

@ -42,7 +42,6 @@
"supertest": "6.3.3"
},
"dependencies": {
"restify-cors-middleware2": "2.2.1",
"@fidm/x509": "1.2.1",
"@opensearch-project/opensearch": "2.2.0",
"@phc/pbkdf2": "1.1.14",
@ -54,7 +53,7 @@
"base32.js": "0.1.0",
"bcryptjs": "2.4.3",
"bson": "5.0.1",
"bull": "3.29.3",
"bullmq": "3.10.1",
"fido2-lib": "3.3.5",
"gelf": "2.0.1",
"generate-password": "1.7.0",
@ -77,7 +76,7 @@
"mailsplit": "5.4.0",
"mobileconfig": "2.4.0",
"mongo-cursor-pagination": "8.1.3",
"mongodb": "4.12.1",
"mongodb": "4.14.0",
"mongodb-extended-json": "1.11.1",
"msgpack5": "6.0.2",
"node-forge": "1.3.1",
@ -90,6 +89,7 @@
"pwnedpasswords": "1.0.6",
"qrcode": "1.5.1",
"restify": "11.1.0",
"restify-cors-middleware2": "2.2.1",
"restify-logger": "2.0.1",
"saslprep": "1.0.3",
"seq-index": "1.1.0",

View file

@ -4,7 +4,7 @@ const log = require('npmlog');
const config = require('wild-config');
const Gelf = require('gelf');
const os = require('os');
const Queue = require('bull');
const { Queue, Worker } = require('bullmq');
const db = require('./lib/db');
const tools = require('./lib/tools');
const { ObjectId } = require('mongodb');
@ -13,6 +13,7 @@ const packageData = require('./package.json');
const { MARKED_SPAM, MARKED_HAM } = require('./lib/events');
let loggelf;
let queueWorkers = {};
async function postWebhook(webhook, data) {
let res;
@ -117,133 +118,151 @@ module.exports.start = callback => {
}
};
const webhooksQueue = new Queue('webhooks', typeof config.dbs.redis === 'object' ? { redis: config.dbs.redis } : config.dbs.redis);
const webhooksPostQueue = new Queue('webhooks_post', typeof config.dbs.redis === 'object' ? { redis: config.dbs.redis } : config.dbs.redis);
const webhooksPostQueue = new Queue('webhooks_post', db.queueConf);
webhooksQueue.process(async job => {
try {
if (!job || !job.data || !job.data.ev) {
return false;
}
queueWorkers.webhooks = new Worker(
'webhooks',
async job => {
try {
if (!job || !job.data || !job.data.ev) {
return false;
}
const data = job.data;
const data = job.data;
let evtList = ['*'];
let typeParts = data.ev.split('.');
typeParts.pop();
for (let i = 1; i <= typeParts.length; i++) {
evtList.push(typeParts.slice(0, i) + '.*');
}
evtList.push(data.ev);
let evtList = ['*'];
let typeParts = data.ev.split('.');
typeParts.pop();
for (let i = 1; i <= typeParts.length; i++) {
evtList.push(typeParts.slice(0, i) + '.*');
}
evtList.push(data.ev);
const query = { type: { $in: evtList } };
if (data.user) {
query.user = { $in: [new ObjectId(data.user), null] };
}
const query = { type: { $in: evtList } };
if (data.user) {
query.user = { $in: [new ObjectId(data.user), null] };
}
let whid = new ObjectId();
let count = 0;
let whid = new ObjectId();
let count = 0;
let webhooks = await db.users.collection('webhooks').find(query).toArray();
let webhooks = await db.users.collection('webhooks').find(query).toArray();
if (!webhooks.length) {
// ignore this event
return;
}
if ([MARKED_SPAM, MARKED_HAM].includes(data.ev)) {
let message = new ObjectId(data.message);
data.message = data.id;
delete data.id;
let messageData = await db.database.collection('messages').findOne(
{ _id: message },
{
projection: {
_id: true,
uid: true,
msgid: true,
subject: true,
mailbox: true,
mimeTree: true,
idate: true
}
}
);
if (!messageData) {
// message already deleted?
if (!webhooks.length) {
// ignore this event
return;
}
let parsedHeader = (messageData.mimeTree && messageData.mimeTree.parsedHeader) || {};
if ([MARKED_SPAM, MARKED_HAM].includes(data.ev)) {
let message = new ObjectId(data.message);
data.message = data.id;
delete data.id;
let from = parsedHeader.from ||
parsedHeader.sender || [
let messageData = await db.database.collection('messages').findOne(
{ _id: message },
{
name: '',
address: (messageData.meta && messageData.meta.from) || ''
}
];
let addresses = {
to: [].concat(parsedHeader.to || []),
cc: [].concat(parsedHeader.cc || []),
bcc: [].concat(parsedHeader.bcc || [])
};
tools.decodeAddresses(from);
tools.decodeAddresses(addresses.to);
tools.decodeAddresses(addresses.cc);
tools.decodeAddresses(addresses.bcc);
if (from && from[0]) {
data.from = from[0];
}
for (let addrType of ['to', 'cc', 'bcc']) {
if (addresses[addrType] && addresses[addrType].length) {
data[addrType] = addresses[addrType];
}
}
data.messageId = messageData.msgid;
data.subject = messageData.subject;
data.date = messageData.idate.toISOString();
}
for (let webhook of webhooks) {
count++;
try {
await webhooksPostQueue.add(
{ data: Object.assign({ id: `${whid.toHexString()}:${count}` }, data), webhook },
{
removeOnComplete: true,
removeOnFail: 500,
attempts: 5,
backoff: {
type: 'exponential',
delay: 2000
projection: {
_id: true,
uid: true,
msgid: true,
subject: true,
mailbox: true,
mimeTree: true,
idate: true
}
}
);
} catch (err) {
// ignore?
log.error('Events', err);
}
}
} catch (err) {
log.error('Webhooks', err);
throw err;
}
});
webhooksPostQueue.process(async job => {
if (!job || !job.data) {
return false;
}
const { data, webhook } = job.data;
return await postWebhook(webhook, data);
});
if (!messageData) {
// message already deleted?
return;
}
let parsedHeader = (messageData.mimeTree && messageData.mimeTree.parsedHeader) || {};
let from = parsedHeader.from ||
parsedHeader.sender || [
{
name: '',
address: (messageData.meta && messageData.meta.from) || ''
}
];
let addresses = {
to: [].concat(parsedHeader.to || []),
cc: [].concat(parsedHeader.cc || []),
bcc: [].concat(parsedHeader.bcc || [])
};
tools.decodeAddresses(from);
tools.decodeAddresses(addresses.to);
tools.decodeAddresses(addresses.cc);
tools.decodeAddresses(addresses.bcc);
if (from && from[0]) {
data.from = from[0];
}
for (let addrType of ['to', 'cc', 'bcc']) {
if (addresses[addrType] && addresses[addrType].length) {
data[addrType] = addresses[addrType];
}
}
data.messageId = messageData.msgid;
data.subject = messageData.subject;
data.date = messageData.idate.toISOString();
}
for (let webhook of webhooks) {
count++;
try {
await webhooksPostQueue.add(
'webhook',
{ data: Object.assign({ id: `${whid.toHexString()}:${count}` }, data), webhook },
{
removeOnComplete: true,
removeOnFail: 500,
attempts: 5,
backoff: {
type: 'exponential',
delay: 2000
}
}
);
} catch (err) {
// ignore?
log.error('Events', err);
}
}
} catch (err) {
log.error('Webhooks', err);
throw err;
}
},
Object.assign(
{
concurrency: 1
},
db.queueConf
)
);
queueWorkers.webhooksPost = new Worker(
'webhooks_post',
async job => {
if (!job || !job.data) {
return false;
}
const { data, webhook } = job.data;
return await postWebhook(webhook, data);
},
Object.assign(
{
concurrency: 1
},
db.queueConf
)
);
callback();
};