trilium/src/public/javascripts/services/ws.js

338 lines
11 KiB
JavaScript
Raw Normal View History

import utils from './utils.js';
2019-10-20 16:00:18 +08:00
import toastService from "./toast.js";
import server from "./server.js";
2020-02-06 05:46:20 +08:00
import LoadResults from "./load_results.js";
import Branch from "../entities/branch.js";
import Attribute from "../entities/attribute.js";
import options from "./options.js";
const $outstandingSyncsCount = $("#outstanding-syncs-count");
const messageHandlers = [];
let ws;
let lastAcceptedSyncId = window.glob.maxSyncIdAtLoad;
let lastProcessedSyncId = window.glob.maxSyncIdAtLoad;
let lastPingTs;
let syncDataQueue = [];
function logError(message) {
console.log(utils.now(), message); // needs to be separate from .trace()
console.trace();
if (ws && ws.readyState === 1) {
ws.send(JSON.stringify({
type: 'log-error',
error: message
}));
}
}
2017-12-18 02:46:18 +08:00
function subscribeToMessages(messageHandler) {
messageHandlers.push(messageHandler);
}
// used to serialize sync operations
let consumeQueuePromise = null;
async function handleMessage(event) {
const message = JSON.parse(event.data);
for (const messageHandler of messageHandlers) {
messageHandler(message);
}
if (message.type === 'sync') {
2019-10-29 02:45:36 +08:00
const syncRows = message.data;
2019-02-10 23:36:25 +08:00
lastPingTs = Date.now();
$outstandingSyncsCount.html(message.outstandingSyncs);
2019-10-29 02:45:36 +08:00
if (syncRows.length > 0) {
console.debug(utils.now(), "Sync data: ", syncRows);
2019-10-29 02:45:36 +08:00
syncDataQueue.push(...syncRows);
2019-12-17 05:47:07 +08:00
// we set lastAcceptedSyncId even before sync processing and send ping so that backend can start sending more updates
lastAcceptedSyncId = Math.max(lastAcceptedSyncId, syncRows[syncRows.length - 1].id);
sendPing();
// first wait for all the preceding consumers to finish
while (consumeQueuePromise) {
await consumeQueuePromise;
}
2019-12-17 05:00:44 +08:00
try {
// it's my turn so start it up
consumeQueuePromise = consumeSyncData();
2019-12-17 05:00:44 +08:00
await consumeQueuePromise;
}
finally {
// finish and set to null to signal somebody else can pick it up
consumeQueuePromise = null;
}
}
}
else if (message.type === 'sync-hash-check-failed') {
2019-10-20 16:00:18 +08:00
toastService.showError("Sync check failed!", 60000);
}
else if (message.type === 'consistency-checks-failed') {
2019-10-20 16:00:18 +08:00
toastService.showError("Consistency checks failed! See logs for details.", 50 * 60000);
}
}
let syncIdReachedListeners = [];
function waitForSyncId(desiredSyncId) {
if (desiredSyncId <= lastProcessedSyncId) {
return Promise.resolve();
}
return new Promise((res, rej) => {
syncIdReachedListeners.push({
desiredSyncId,
resolvePromise: res,
start: Date.now()
})
});
}
function waitForMaxKnownSyncId() {
return waitForSyncId(server.getMaxKnownSyncId());
}
2019-10-29 02:45:36 +08:00
function checkSyncIdListeners() {
syncIdReachedListeners
.filter(l => l.desiredSyncId <= lastProcessedSyncId)
2019-10-29 02:45:36 +08:00
.forEach(l => l.resolvePromise());
syncIdReachedListeners = syncIdReachedListeners
.filter(l => l.desiredSyncId > lastProcessedSyncId);
2019-10-29 02:45:36 +08:00
syncIdReachedListeners.filter(l => Date.now() > l.start - 60000)
.forEach(l => console.log(`Waiting for syncId ${l.desiredSyncId} while current is ${lastProcessedSyncId} for ${Math.floor((Date.now() - l.start) / 1000)}s`));
2019-10-29 02:45:36 +08:00
}
2019-12-17 05:00:44 +08:00
async function runSafely(syncHandler, syncData) {
try {
return await syncHandler(syncData);
}
catch (e) {
console.log(`Sync handler failed with ${e.message}: ${e.stack}`);
}
}
async function consumeSyncData() {
2019-10-31 02:43:17 +08:00
if (syncDataQueue.length > 0) {
const allSyncData = syncDataQueue;
syncDataQueue = [];
2019-12-17 05:47:07 +08:00
try {
2020-02-06 05:46:20 +08:00
await processSyncRows(allSyncData);
2019-12-17 05:47:07 +08:00
}
catch (e) {
logError(`Encountered error ${e.message}: ${e.stack}, reloading frontend.`);
2019-12-17 05:47:07 +08:00
// if there's an error in updating the frontend then the easy option to recover is to reload the frontend completely
utils.reloadApp();
}
lastProcessedSyncId = Math.max(lastProcessedSyncId, allSyncData[allSyncData.length - 1].id);
}
checkSyncIdListeners();
}
function connectWebSocket() {
2019-11-26 04:44:46 +08:00
const loc = window.location;
const webSocketUri = (loc.protocol === "https:" ? "wss:" : "ws:")
+ "//" + loc.host + loc.pathname;
// use wss for secure messaging
2019-11-26 04:44:46 +08:00
const ws = new WebSocket(webSocketUri);
ws.onopen = () => console.debug(utils.now(), `Connected to server ${webSocketUri} with WebSocket`);
ws.onmessage = handleMessage;
2019-07-06 18:03:51 +08:00
// we're not handling ws.onclose here because reconnection is done in sendPing()
return ws;
}
async function sendPing() {
if (Date.now() - lastPingTs > 30000) {
2019-12-21 03:17:58 +08:00
console.log(utils.now(), "Lost websocket connection to the backend");
}
if (ws.readyState === ws.OPEN) {
ws.send(JSON.stringify({
type: 'ping',
lastSyncId: lastAcceptedSyncId
}));
}
else if (ws.readyState === ws.CLOSED || ws.readyState === ws.CLOSING) {
console.log(utils.now(), "WS closed or closing, trying to reconnect");
ws = connectWebSocket();
}
}
setTimeout(() => {
ws = connectWebSocket();
2019-02-10 23:36:25 +08:00
lastPingTs = Date.now();
setInterval(sendPing, 1000);
2018-04-06 11:17:19 +08:00
}, 0);
2017-12-02 11:28:22 +08:00
2019-10-26 04:20:14 +08:00
subscribeToMessages(message => {
if (message.type === 'sync-pull-in-progress') {
toastService.showPersistent({
id: 'sync',
title: "Sync status",
message: "Sync update in progress",
icon: "refresh"
});
}
2019-10-29 02:45:36 +08:00
else if (message.type === 'sync-pull-finished') {
2019-10-29 03:26:40 +08:00
// this gives user a chance to see the toast in case of fast sync finish
setTimeout(() => toastService.closePersistent('sync'), 1000);
2019-10-26 04:20:14 +08:00
}
});
2020-02-06 05:46:20 +08:00
async function processSyncRows(syncRows) {
const loadResults = new LoadResults(this);
syncRows.filter(sync => sync.entityName === 'notes').forEach(sync => {
const note = this.notes[sync.entityId];
if (note) {
note.update(sync.entity);
loadResults.addNote(sync.entityId, sync.sourceId);
}
});
syncRows.filter(sync => sync.entityName === 'branches').forEach(sync => {
let branch = this.branches[sync.entityId];
const childNote = this.notes[sync.entity.noteId];
const parentNote = this.notes[sync.entity.parentNoteId];
if (branch) {
if (sync.entity.isDeleted) {
if (childNote) {
childNote.parents = childNote.parents.filter(parentNoteId => parentNoteId !== sync.entity.parentNoteId);
delete childNote.parentToBranch[sync.entity.parentNoteId];
}
if (parentNote) {
parentNote.children = parentNote.children.filter(childNoteId => childNoteId !== sync.entity.noteId);
delete parentNote.childToBranch[sync.entity.noteId];
}
}
else {
branch.update(sync.entity);
loadResults.addBranch(sync.entityId, sync.sourceId);
if (childNote) {
childNote.addParent(branch.parentNoteId, branch.branchId);
}
if (parentNote) {
parentNote.addChild(branch.noteId, branch.branchId);
}
}
}
else if (!sync.entity.isDeleted) {
if (childNote || parentNote) {
branch = new Branch(this, sync.entity);
this.branches[branch.branchId] = branch;
loadResults.addBranch(sync.entityId, sync.sourceId);
if (childNote) {
childNote.addParent(branch.parentNoteId, branch.branchId);
}
if (parentNote) {
parentNote.addChild(branch.noteId, branch.branchId);
}
}
}
});
syncRows.filter(sync => sync.entityName === 'note_reordering').forEach(sync => {
for (const branchId in sync.positions) {
const branch = this.branches[branchId];
if (branch) {
branch.notePosition = sync.positions[branchId];
}
}
loadResults.addNoteReordering(sync.entityId, sync.sourceId);
});
// missing reloading the relation target note
syncRows.filter(sync => sync.entityName === 'attributes').forEach(sync => {
let attribute = this.attributes[sync.entityId];
const sourceNote = this.notes[sync.entity.noteId];
const targetNote = sync.entity.type === 'relation' && this.notes[sync.entity.value];
if (attribute) {
attribute.update(sync.entity);
loadResults.addAttribute(sync.entityId, sync.sourceId);
if (sync.entity.isDeleted) {
if (sourceNote) {
sourceNote.attributes = sourceNote.attributes.filter(attributeId => attributeId !== attribute.attributeId);
}
if (targetNote) {
targetNote.targetRelations = targetNote.targetRelations.filter(attributeId => attributeId !== attribute.value);
}
}
}
else if (!sync.entity.isDeleted) {
if (sourceNote || targetNote) {
attribute = new Attribute(this, sync.entity);
this.attributes[attribute.attributeId] = attribute;
loadResults.addAttribute(sync.entityId, sync.sourceId);
if (sourceNote && !sourceNote.attributes.includes(attribute.attributeId)) {
sourceNote.attributes.push(attribute.attributeId);
}
if (targetNote && !targetNote.attributes.includes(attribute.attributeId)) {
targetNote.attributes.push(attribute.attributeId);
}
}
}
});
syncRows.filter(sync => sync.entityName === 'note_contents').forEach(sync => {
delete this.noteComplementPromises[sync.entityId];
loadResults.addNoteContent(sync.entityId, sync.sourceId);
});
syncRows.filter(sync => sync.entityName === 'note_revisions').forEach(sync => {
loadResults.addNoteRevision(sync.entityId, sync.noteId, sync.sourceId);
});
syncRows.filter(sync => sync.entityName === 'options').forEach(sync => {
options.set(sync.entity.name, sync.entity.value);
loadResults.addOption(sync.entity.name);
});
const appContext = (await import("./app_context.js")).default;
appContext.trigger('entitiesReloaded', {loadResults});
}
export default {
logError,
subscribeToMessages,
waitForSyncId,
waitForMaxKnownSyncId
};