reddit plugin configuration from file, not from options now. Scheduling, refactoring of sync mutex

This commit is contained in:
azivner 2018-01-13 22:51:39 -05:00
parent fbfaff6ab8
commit 9839ea019e
5 changed files with 63 additions and 49 deletions

View file

@ -7,7 +7,8 @@ const log = require('../services/log');
const utils = require('../services/utils'); const utils = require('../services/utils');
const unescape = require('unescape'); const unescape = require('unescape');
const attributes = require('../services/attributes'); const attributes = require('../services/attributes');
const options = require('../services/options'); const sync_mutex = require('../services/sync_mutex');
const config = require('../services/config');
const REDDIT_ROOT = 'reddit_root'; const REDDIT_ROOT = 'reddit_root';
@ -106,7 +107,7 @@ async function getDateNoteIdForReddit(dateTimeStr, rootNoteId) {
return redditDateNoteId; return redditDateNoteId;
} }
async function importReddit(accountName, afterId = null) { async function importComments(accountName, afterId = null) {
let rootNoteId = await sql.getFirstValue(`SELECT notes.note_id FROM notes JOIN attributes USING(note_id) let rootNoteId = await sql.getFirstValue(`SELECT notes.note_id FROM notes JOIN attributes USING(note_id)
WHERE attributes.name = '${REDDIT_ROOT}' AND notes.is_deleted = 0`); WHERE attributes.name = '${REDDIT_ROOT}' AND notes.is_deleted = 0`);
@ -173,38 +174,47 @@ async function importReddit(accountName, afterId = null) {
if (listing.data.after && importedComments > 0) { if (listing.data.after && importedComments > 0) {
log.info("Reddit: Importing from next page of comments ..."); log.info("Reddit: Importing from next page of comments ...");
importedComments += await importReddit(accountName, listing.data.after); importedComments += await importComments(accountName, listing.data.after);
} }
return importedComments; return importedComments;
} }
let redditAccounts = [];
async function runImport() {
// technically mutex shouldn't be necessary but we want to avoid doing potentially expensive import
// concurrently with sync
await sync_mutex.doExclusively(async () => {
let importedComments = 0;
for (const account of redditAccounts) {
log.info("Reddit: Importing account " + account);
importedComments += await importComments(account);
}
log.info(`Reddit: Imported ${importedComments} comments.`);
});
}
sql.dbReady.then(async () => { sql.dbReady.then(async () => {
const enabledOption = await options.getOptionOrNull("reddit_enabled"); console.log(config);
const accountsOption = await options.getOptionOrNull("reddit_accounts");
if (!enabledOption) { if (!config['Reddit'] || !config['Reddit']['enabled'] !== true) {
await options.createOption("reddit_enabled", "false", true);
await options.createOption("reddit_accounts", "[]", true);
return; return;
} }
if (enabledOption.opt_value !== "true") { const redditAccountsStr = config['Reddit']['accounts'];
return;
}
if (!accountsOption) { if (!redditAccountsStr) {
log.info("Reddit: No reddit accounts defined in option 'reddit_accounts'"); log.info("Reddit: No reddit accounts defined in option 'reddit_accounts'");
} }
const redditAccounts = JSON.parse(accountsOption.opt_value); redditAccounts = redditAccountsStr.split(",").map(s => s.trim());
let importedComments = 0;
for (const account of redditAccounts) { const pollingIntervalInSeconds = config['Reddit']['pollingIntervalInSeconds'] || 3600;
log.info("Reddit: Importing account " + account);
importedComments += await importReddit(account); setInterval(runImport, pollingIntervalInSeconds * 1000);
} setTimeout(runImport, 1000);
log.info(`Reddit: Imported ${importedComments} comments.`);
}); });

View file

@ -23,9 +23,8 @@ async function regularBackup() {
async function backupNow() { async function backupNow() {
// we don't want to backup DB in the middle of sync with potentially inconsistent DB state // we don't want to backup DB in the middle of sync with potentially inconsistent DB state
const releaseMutex = await sync_mutex.acquire();
try { await sync_mutex.doExclusively(async () => {
const now = utils.nowDate(); const now = utils.nowDate();
const backupFile = dataDir.BACKUP_DIR + "/" + "backup-" + utils.getDateTimeForFile() + ".db"; const backupFile = dataDir.BACKUP_DIR + "/" + "backup-" + utils.getDateTimeForFile() + ".db";
@ -37,10 +36,7 @@ async function backupNow() {
await sql.doInTransaction(async () => { await sql.doInTransaction(async () => {
await options.setOption('last_backup_date', now); await options.setOption('last_backup_date', now);
}); });
} });
finally {
releaseMutex();
}
} }
async function cleanupOldBackups() { async function cleanupOldBackups() {

View file

@ -217,18 +217,14 @@ async function runAllChecks() {
async function runChecks() { async function runChecks() {
let errorList; let errorList;
let elapsedTimeMs; let elapsedTimeMs;
const releaseMutex = await sync_mutex.acquire();
try { await sync_mutex.doExclusively(async () => {
const startTime = new Date(); const startTime = new Date();
errorList = await runAllChecks(); errorList = await runAllChecks();
elapsedTimeMs = new Date().getTime() - startTime.getTime(); elapsedTimeMs = new Date().getTime() - startTime.getTime();
} });
finally {
releaseMutex();
}
if (errorList.length > 0) { if (errorList.length > 0) {
log.info(`Consistency checks failed (took ${elapsedTimeMs}ms) with these errors: ` + JSON.stringify(errorList)); log.info(`Consistency checks failed (took ${elapsedTimeMs}ms) with these errors: ` + JSON.stringify(errorList));

View file

@ -20,25 +20,25 @@ let proxyToggle = true;
let syncServerCertificate = null; let syncServerCertificate = null;
async function sync() { async function sync() {
const releaseMutex = await sync_mutex.acquire();
try { try {
if (!await sql.isDbUpToDate()) { await sync_mutex.doExclusively(async () => {
return { if (!await sql.isDbUpToDate()) {
success: false, return {
message: "DB not up to date" success: false,
}; message: "DB not up to date"
} };
}
const syncContext = await login(); const syncContext = await login();
await pushSync(syncContext); await pushSync(syncContext);
await pullSync(syncContext); await pullSync(syncContext);
await pushSync(syncContext); await pushSync(syncContext);
await checkContentHash(syncContext); await checkContentHash(syncContext);
});
return { return {
success: true success: true
@ -64,9 +64,6 @@ async function sync() {
} }
} }
} }
finally {
releaseMutex();
}
} }
async function login() { async function login() {

View file

@ -4,5 +4,20 @@
*/ */
const Mutex = require('async-mutex').Mutex; const Mutex = require('async-mutex').Mutex;
const instance = new Mutex();
module.exports = new Mutex(); async function doExclusively(func) {
const releaseMutex = await instance.acquire();
try {
await func();
}
finally {
releaseMutex();
}
}
module.exports = {
doExclusively
};