fix DB setup

This commit is contained in:
zadam 2020-06-20 21:42:41 +02:00
parent 6207203b35
commit 027afab6b1
19 changed files with 133 additions and 118 deletions

View file

@ -1,7 +1,7 @@
"use strict";
const repository = require('../../services/repository');
const noteCacheService = require('../../services/note_cache/note_cache.js');
const noteCacheService = require('../../services/note_cache/note_cache_service');
const protectedSessionService = require('../../services/protected_session');
const noteRevisionService = require('../../services/note_revisions');
const utils = require('../../services/utils');

View file

@ -146,7 +146,7 @@ function update(req) {
function syncFinished() {
// after first sync finishes, the application is ready to be used
// this is meaningless but at the same time harmless (idempotent) for further syncs
sqlInit.dbInitialized();
sqlInit.setDbAsInitialized();
}
function queueSector(req) {

View file

@ -80,6 +80,8 @@ function apiRoute(method, path, routeHandler) {
function route(method, path, middleware, routeHandler, resultHandler, transactional = true) {
router[method](path, ...middleware, (req, res, next) => {
const start = Date.now();
try {
cls.namespace.bindEmitter(req);
cls.namespace.bindEmitter(res);
@ -103,6 +105,12 @@ function route(method, path, middleware, routeHandler, resultHandler, transactio
res.sendStatus(500);
}
const time = Date.now() - start;
if (time >= 10) {
console.log(`Slow request: ${time}ms - ${method} ${path}`);
}
});
}

View file

@ -126,10 +126,12 @@ if (!fs.existsSync(dataDir.BACKUP_DIR)) {
fs.mkdirSync(dataDir.BACKUP_DIR, 0o700);
}
setInterval(cls.wrap(regularBackup), 4 * 60 * 60 * 1000);
sqlInit.dbReady.then(() => {
setInterval(cls.wrap(regularBackup), 4 * 60 * 60 * 1000);
// kickoff first backup soon after start up
setTimeout(cls.wrap(regularBackup), 5 * 60 * 1000);
// kickoff first backup soon after start up
setTimeout(cls.wrap(regularBackup), 5 * 60 * 1000);
});
module.exports = {
backupNow,

View file

@ -699,10 +699,12 @@ function runOnDemandChecks(autoFix) {
consistencyChecks.runChecks();
}
setInterval(cls.wrap(runPeriodicChecks), 60 * 60 * 1000);
sqlInit.dbReady.then(() => {
setInterval(cls.wrap(runPeriodicChecks), 60 * 60 * 1000);
// kickoff checks soon after startup (to not block the initial load)
setTimeout(cls.wrap(runPeriodicChecks), 20 * 1000);
// kickoff checks soon after startup (to not block the initial load)
setTimeout(cls.wrap(runPeriodicChecks), 20 * 1000);
});
module.exports = {
runOnDemandChecks

View file

@ -9,4 +9,6 @@ eventService.subscribe(eventService.ENTITY_CHANGED, ({entityName, entity}) => {
}
});
hoistedNote.setHoistedNoteId(optionService.getOption('hoistedNoteId'));
sqlInit.dbReady.then(() => {
hoistedNote.setHoistedNoteId(optionService.getOption('hoistedNoteId'));
});

View file

@ -296,7 +296,7 @@ function importZip(taskContext, fileBuffer, importRootNote) {
}
});
if(noteMeta) {
if (noteMeta) {
const includeNoteLinks = (noteMeta.attributes || [])
.filter(attr => attr.type === 'relation' && attr.name === 'includeNoteLink');

View file

@ -3,23 +3,31 @@
const sql = require('../sql.js');
const eventService = require('../events.js');
const noteCache = require('./note_cache');
const sqlInit = require('../sql_init');
const Note = require('./entities/note');
const Branch = require('./entities/branch');
const Attribute = require('./entities/attribute');
sqlInit.dbReady.then(() => {
load();
});
function load() {
noteCache.reset();
sql.getRows(`SELECT noteId, title, type, mime, isProtected, dateCreated, dateModified, utcDateCreated, utcDateModified, contentLength FROM notes WHERE isDeleted = 0`, [])
.map(row => new Note(noteCache, row));
for (const row of sql.iterateRows(`SELECT noteId, title, type, mime, isProtected, dateCreated, dateModified, utcDateCreated, utcDateModified, contentLength FROM notes WHERE isDeleted = 0`, [])) {
new Note(noteCache, row);
}
sql.getRows(`SELECT branchId, noteId, parentNoteId, prefix FROM branches WHERE isDeleted = 0`, [])
.map(row => new Branch(noteCache, row));
for (const row of sql.iterateRows(`SELECT branchId, noteId, parentNoteId, prefix FROM branches WHERE isDeleted = 0`, [])) {
new Branch(noteCache, row);
}
sql.getRows(`SELECT attributeId, noteId, type, name, value, isInheritable FROM attributes WHERE isDeleted = 0`, []).map(row => new Attribute(noteCache, row));
for (const row of sql.iterateRows(`SELECT attributeId, noteId, type, name, value, isInheritable FROM attributes WHERE isDeleted = 0`, [])) {
new Attribute(noteCache, row);
}
noteCache.loaded = true;
noteCache.loadedResolve();
}
eventService.subscribe([eventService.ENTITY_CHANGED, eventService.ENTITY_DELETED, eventService.ENTITY_SYNCED], ({entityName, entity}) => {
@ -144,7 +152,5 @@ eventService.subscribe([eventService.ENTITY_CHANGED, eventService.ENTITY_DELETED
});
eventService.subscribe(eventService.ENTER_PROTECTED_SESSION, () => {
noteCache.loadedPromise.then(() => noteCache.decryptProtectedNotes());
noteCache.decryptProtectedNotes();
});
load();

View file

@ -154,7 +154,7 @@ function getNotePath(noteId) {
return {
noteId: noteId,
branchId: getBranch(noteId, parentNote.noteId).branchId,
branchId: noteCache.getBranch(noteId, parentNote.noteId).branchId,
title: noteTitle,
notePath: retPath,
path: retPath.join('/')

View file

@ -762,10 +762,12 @@ function duplicateNote(noteId, parentNoteId) {
};
}
// first cleanup kickoff 5 minutes after startup
setTimeout(cls.wrap(eraseDeletedNotes), 5 * 60 * 1000);
sqlInit.dbReady.then(() => {
// first cleanup kickoff 5 minutes after startup
setTimeout(cls.wrap(eraseDeletedNotes), 5 * 60 * 1000);
setInterval(cls.wrap(eraseDeletedNotes), 4 * 3600 * 1000);
setInterval(cls.wrap(eraseDeletedNotes), 4 * 3600 * 1000);
});
module.exports = {
createNewNote,

View file

@ -1,6 +1,7 @@
const scriptService = require('./script');
const repository = require('./repository');
const cls = require('./cls');
const sqlInit = require('./sql_init');
function runNotesWithLabel(runAttrValue) {
const notes = repository.getEntities(`
@ -20,8 +21,10 @@ function runNotesWithLabel(runAttrValue) {
}
}
setTimeout(cls.wrap(() => runNotesWithLabel('backendStartup')), 10 * 1000);
sqlInit.dbReady.then(() => {
setTimeout(cls.wrap(() => runNotesWithLabel('backendStartup')), 10 * 1000);
setInterval(cls.wrap(() => runNotesWithLabel('hourly')), 3600 * 1000);
setInterval(cls.wrap(() => runNotesWithLabel('hourly')), 3600 * 1000);
setInterval(cls.wrap(() => runNotesWithLabel('daily')), 24 * 3600 * 1000);
setInterval(cls.wrap(() => runNotesWithLabel('daily')), 24 * 3600 * 1000);
});

View file

@ -24,7 +24,7 @@ function triggerSync() {
// it's ok to not wait for it here
syncService.sync().then(res => {
if (res.success) {
sqlInit.dbInitialized();
sqlInit.setDbAsInitialized();
}
});
}

View file

@ -47,8 +47,10 @@ function isLocalSourceId(srcId) {
const currentSourceId = createSourceId();
// this will also refresh source IDs
cls.wrap(() => saveSourceId(currentSourceId));
// very ugly
setTimeout(() => {
sqlInit.dbReady.then(cls.wrap(() => saveSourceId(currentSourceId)));
}, 1000);
function getCurrentSourceId() {
return currentSourceId;

View file

@ -2,12 +2,11 @@
const log = require('./log');
const cls = require('./cls');
const Database = require('better-sqlite3');
const dataDir = require('./data_dir');
let dbConnection;
function setDbConnection(connection) {
dbConnection = connection;
}
const dbConnection = new Database(dataDir.DOCUMENT_PATH);
dbConnection.pragma('journal_mode = WAL');
[`exit`, `SIGINT`, `SIGUSR1`, `SIGUSR2`, `SIGTERM`].forEach(eventType => {
process.on(eventType, () => {
@ -88,7 +87,7 @@ function rollback() {
}
function getRow(query, params = []) {
return wrap(() => stmt(query).get(params), query);
return wrap(query, s => s.get(params));
}
function getRowOrNull(query, params = []) {
@ -135,7 +134,11 @@ function getManyRows(query, params) {
}
function getRows(query, params = []) {
return wrap(() => stmt(query).all(params), query);
return wrap(query, s => s.all(params));
}
function iterateRows(query, params = []) {
return stmt(query).iterate(params);
}
function getMap(query, params = []) {
@ -171,7 +174,7 @@ function getColumn(query, params = []) {
function execute(query, params = []) {
startTransactionIfNecessary();
return wrap(() => stmt(query).run(params), query);
return wrap(query, s => s.run(params));
}
function executeWithoutTransaction(query, params = []) {
@ -181,57 +184,39 @@ function executeWithoutTransaction(query, params = []) {
function executeMany(query, params) {
startTransactionIfNecessary();
// essentially just alias
getManyRows(query, params);
}
function executeScript(query) {
startTransactionIfNecessary();
return wrap(() => stmt.run(query), query);
return dbConnection.exec(query);
}
function wrap(func, query) {
if (!dbConnection) {
throw new Error("DB connection not initialized yet");
}
function wrap(query, func) {
const startTimestamp = Date.now();
const thisError = new Error();
const result = func(stmt(query));
try {
const startTimestamp = Date.now();
const milliseconds = Date.now() - startTimestamp;
const result = func(dbConnection);
const milliseconds = Date.now() - startTimestamp;
if (milliseconds >= 300) {
if (query.includes("WITH RECURSIVE")) {
log.info(`Slow recursive query took ${milliseconds}ms.`);
}
else {
log.info(`Slow query took ${milliseconds}ms: ${query}`);
}
if (milliseconds >= 100) {
if (query.includes("WITH RECURSIVE")) {
log.info(`Slow recursive query took ${milliseconds}ms.`);
}
else {
log.info(`Slow query took ${milliseconds}ms: ${query}`);
}
return result;
}
catch (e) {
log.error("Error executing query. Inner exception: " + e.stack + thisError.stack);
thisError.message = e.stack;
throw thisError;
}
return result;
}
function startTransactionIfNecessary() {
if (!cls.get('isTransactional')
|| cls.get('isInTransaction')) {
if (!cls.get('isTransactional') || dbConnection.inTransaction) {
return;
}
cls.set('isInTransaction', true);
beginTransaction();
}
@ -246,7 +231,7 @@ function transactional(func) {
try {
const ret = func();
if (cls.get('isInTransaction')) {
if (dbConnection.inTransaction) {
commit();
// note that sync rows sent from this action will be sent again by scheduled periodic ping
@ -256,7 +241,7 @@ function transactional(func) {
return ret;
}
catch (e) {
if (cls.get('isInTransaction')) {
if (dbConnection.inTransaction) {
rollback();
}
@ -264,22 +249,17 @@ function transactional(func) {
}
finally {
cls.namespace.set('isTransactional', false);
if (cls.namespace.get('isInTransaction')) {
cls.namespace.set('isInTransaction', false);
// resolving even for rollback since this is just semaphore for allowing another write transaction to proceed
}
}
}
module.exports = {
setDbConnection,
insert,
replace,
getValue,
getRow,
getRowOrNull,
getRows,
iterateRows,
getManyRows,
getMap,
getColumn,

View file

@ -1,28 +1,21 @@
const log = require('./log');
const dataDir = require('./data_dir');
const fs = require('fs');
const resourceDir = require('./resource_dir');
const appInfo = require('./app_info');
const sql = require('./sql');
const cls = require('./cls');
const utils = require('./utils');
const optionService = require('./options');
const port = require('./port');
const Option = require('../entities/option');
const TaskContext = require('./task_context.js');
const Database = require('better-sqlite3');
const dbConnection = new Database(dataDir.DOCUMENT_PATH);
dbConnection.pragma('journal_mode = WAL');
const dbReady = utils.deferred();
sql.setDbConnection(dbConnection);
const dbReady = initDbConnection();
initDbConnection();
function schemaExists() {
const tableResults = sql.getRows("SELECT name FROM sqlite_master WHERE type='table' AND name='options'");
return tableResults.length === 1;
return !!sql.getValue(`SELECT name FROM sqlite_master
WHERE type = 'table' AND name = 'options'`);
}
function isDbInitialized() {
@ -32,35 +25,34 @@ function isDbInitialized() {
const initialized = sql.getValue("SELECT value FROM options WHERE name = 'initialized'");
// !initialized may be removed in the future, required only for migration
return !initialized || initialized === 'true';
return initialized === 'true';
}
function initDbConnection() {
cls.init(() => {
if (!isDbInitialized()) {
log.info(`DB not initialized, please visit setup page` + (utils.isElectron() ? '' : ` - http://[your-server-host]:${port} to see instructions on how to initialize Trilium.`));
if (!isDbInitialized()) {
log.info(`DB not initialized, please visit setup page` + (utils.isElectron() ? '' : ` - http://[your-server-host]:${port} to see instructions on how to initialize Trilium.`));
return;
}
return;
}
const currentDbVersion = getDbVersion();
const currentDbVersion = getDbVersion();
if (currentDbVersion > appInfo.dbVersion) {
log.error(`Current DB version ${currentDbVersion} is newer than app db version ${appInfo.dbVersion} which means that it was created by newer and incompatible version of Trilium. Upgrade to latest version of Trilium to resolve this issue.`);
if (currentDbVersion > appInfo.dbVersion) {
log.error(`Current DB version ${currentDbVersion} is newer than app db version ${appInfo.dbVersion} which means that it was created by newer and incompatible version of Trilium. Upgrade to latest version of Trilium to resolve this issue.`);
utils.crash();
}
utils.crash();
}
if (!isDbUpToDate()) {
// avoiding circular dependency
const migrationService = require('./migration');
if (!isDbUpToDate()) {
// avoiding circular dependency
const migrationService = require('./migration');
migrationService.migrate();
}
migrationService.migrate();
}
require('./options_init').initStartupOptions();
});
require('./options_init').initStartupOptions();
dbReady.resolve();
}
function createInitialDatabase(username, password, theme) {
@ -156,7 +148,7 @@ function isDbUpToDate() {
return upToDate;
}
function dbInitialized() {
function setDbAsInitialized() {
if (!isDbInitialized()) {
optionService.setOption('initialized', 'true');
@ -174,5 +166,5 @@ module.exports = {
isDbUpToDate,
createInitialDatabase,
createDatabaseForSync,
dbInitialized
setDbAsInitialized
};

View file

@ -368,12 +368,14 @@ function getMaxSyncId() {
return sql.getValue('SELECT MAX(id) FROM sync');
}
setInterval(cls.wrap(sync), 60000);
sqlInit.dbReady.then(() => {
setInterval(cls.wrap(sync), 60000);
// kickoff initial sync immediately
setTimeout(cls.wrap(sync), 3000);
// kickoff initial sync immediately
setTimeout(cls.wrap(sync), 3000);
setInterval(cls.wrap(updatePushStats), 1000);
setInterval(cls.wrap(updatePushStats), 1000);
});
module.exports = {
sync,

View file

@ -5,7 +5,6 @@ const repository = require('./repository');
const Branch = require('../entities/branch');
const syncTableService = require('./sync_table');
const protectedSessionService = require('./protected_session');
const noteCacheService = require('./note_cache/note_cache.js');
function getNotes(noteIds) {
// we return also deleted notes which have been specifically asked for
@ -23,8 +22,6 @@ function getNotes(noteIds) {
protectedSessionService.decryptNotes(notes);
noteCacheService.loadedPromise;
notes.forEach(note => {
note.isProtected = !!note.isProtected
});

View file

@ -259,6 +259,22 @@ function timeLimit(promise, limitMs) {
});
}
function deferred() {
return (() => {
let resolve, reject;
let promise = new Promise((res, rej) => {
resolve = res;
reject = rej;
});
promise.resolve = resolve;
promise.reject = reject;
return promise;
})();
}
module.exports = {
randomSecureToken,
randomString,
@ -290,5 +306,6 @@ module.exports = {
getNoteTitle,
removeTextFileExtension,
formatDownloadTitle,
timeLimit
timeLimit,
deferred
};

View file

@ -127,10 +127,10 @@ function closeSetupWindow() {
}
}
function registerGlobalShortcuts() {
async function registerGlobalShortcuts() {
const {globalShortcut} = require('electron');
sqlInit.dbReady;
await sqlInit.dbReady;
const allActions = keyboardActionsService.getKeyboardActions();