From 56e2b44c25eddcb2224178414ee3a7925f3736f0 Mon Sep 17 00:00:00 2001 From: azivner Date: Mon, 26 Nov 2018 22:22:16 +0100 Subject: [PATCH] fix import --- src/services/export/tar.js | 13 ++++++----- src/services/import/tar.js | 46 ++++++++++++++++++++++++++++---------- src/services/note_cache.js | 6 ++--- src/services/repository.js | 1 + 4 files changed, 45 insertions(+), 21 deletions(-) diff --git a/src/services/export/tar.js b/src/services/export/tar.js index 6c7e364b1..e65044d17 100644 --- a/src/services/export/tar.js +++ b/src/services/export/tar.js @@ -126,10 +126,11 @@ async function exportToTar(branch, format, res) { meta.dirFileName = getUniqueFilename(existingFileNames, baseFileName); meta.children = []; + // namespace is shared by children in the same note const childExistingNames = {}; for (const childBranch of childBranches) { - const note = await getNote(childBranch, existingFileNames); + const note = await getNote(childBranch, childExistingNames); // can be undefined if export is disabled for this note if (note) { @@ -160,27 +161,27 @@ async function exportToTar(branch, format, res) { if (noteMeta.isClone) { const content = "Note is present at " + notePaths[noteMeta.noteId]; - pack.entry({name: path + '/' + noteMeta.dataFileName, size: content.length}, content); + pack.entry({name: path + noteMeta.dataFileName, size: content.length}, content); return; } const note = await repository.getNote(noteMeta.noteId); - notePaths[note.noteId] = path + '/' + (noteMeta.dataFileName || noteMeta.dirFileName); + notePaths[note.noteId] = path + (noteMeta.dataFileName || noteMeta.dirFileName); if (noteMeta.dataFileName) { const content = prepareContent(note, noteMeta.format); - pack.entry({name: path + '/' + noteMeta.dataFileName, size: content.length}, content); + pack.entry({name: path + noteMeta.dataFileName, size: content.length}, content); } if (noteMeta.children && noteMeta.children.length > 0) { - const directoryPath = path + '/' + noteMeta.dirFileName; + const directoryPath = path + noteMeta.dirFileName; pack.entry({name: directoryPath, type: 'directory'}); for (const childMeta of noteMeta.children) { - await saveNote(childMeta, directoryPath); + await saveNote(childMeta, directoryPath + '/'); } } } diff --git a/src/services/import/tar.js b/src/services/import/tar.js index 16ee842c7..4bc7ccd90 100644 --- a/src/services/import/tar.js +++ b/src/services/import/tar.js @@ -3,6 +3,7 @@ const Attribute = require('../../entities/attribute'); const Link = require('../../entities/link'); const utils = require('../../services/utils'); +const log = require('../../services/log'); const repository = require('../../services/repository'); const noteService = require('../../services/notes'); const Branch = require('../../entities/branch'); @@ -15,6 +16,8 @@ const mimeTypes = require('mime-types'); async function importTar(fileBuffer, importRootNote) { // maps from original noteId (in tar file) to newly generated noteId const noteIdMap = {}; + const attributes = []; + const links = []; // path => noteId const createdPaths = { '/': importRootNote.noteId, '\\': importRootNote.noteId }; const mdReader = new commonmark.Parser(); @@ -134,7 +137,7 @@ async function importTar(fileBuffer, importRootNote) { return { type, mime }; } - async function saveAttributes(note, noteMeta) { + async function saveAttributesAndLinks(note, noteMeta) { if (!noteMeta) { return; } @@ -146,14 +149,14 @@ async function importTar(fileBuffer, importRootNote) { attr.value = getNewNoteId(attr.value); } - await new Attribute(attr).save(); + attributes.push(attr); } for (const link of noteMeta.links) { link.noteId = note.noteId; link.targetNoteId = getNewNoteId(link.targetNoteId); - await new Link(link).save(); + links.push(link); } } @@ -178,14 +181,12 @@ async function importTar(fileBuffer, importRootNote) { isExpanded: noteMeta ? noteMeta.isExpanded : false })); - await saveAttributes(note, noteMeta); + await saveAttributesAndLinks(note, noteMeta); if (!firstNote) { firstNote = note; } - console.log(filePath); - createdPaths[filePath] = noteId; } @@ -207,8 +208,6 @@ async function importTar(fileBuffer, importRootNote) { const parentNoteId = getParentNoteId(filePath, parentNoteMeta); if (noteMeta && noteMeta.isClone) { - console.log(`Creating branch for ${noteId} in ${parentNoteId}`); - await new Branch({ noteId, parentNoteId, @@ -247,7 +246,7 @@ async function importTar(fileBuffer, importRootNote) { isExpanded: noteMeta ? noteMeta.isExpanded : false })); - await saveAttributes(note, noteMeta); + await saveAttributesAndLinks(note, noteMeta); if (!firstNote) { firstNote = note; @@ -257,7 +256,6 @@ async function importTar(fileBuffer, importRootNote) { filePath = getTextFileWithoutExtension(filePath); } - console.log(filePath); createdPaths[filePath] = noteId; } } @@ -273,6 +271,7 @@ async function importTar(fileBuffer, importRootNote) { if (filePath.endsWith("/")) { filePath = filePath.substr(0, filePath.length - 1); } + return filePath; } @@ -298,9 +297,12 @@ async function importTar(fileBuffer, importRootNote) { else if (header.type === 'directory') { await saveDirectory(filePath); } - else { + else if (header.type === 'file') { await saveNote(filePath, content); } + else { + log.info("Ignoring tar import entry with type " + header.type); + } next(); // ready for next entry }); @@ -309,7 +311,27 @@ async function importTar(fileBuffer, importRootNote) { }); return new Promise(resolve => { - extract.on('finish', function() { + extract.on('finish', async function() { + const createdNoteIds = {}; + + for (const path in createdPaths) { + createdNoteIds[createdPaths[path]] = true; + } + + // we're saving attributes and links only now so that all relation and link target notes + // are already in the database (we don't want to have "broken" relations, not even transitionally) + for (const attr of attributes) { + if (attr.value in createdNoteIds) { + await new Attribute(attr).save(); + } + } + + for (const link of links) { + if (link.targetNoteId in createdNoteIds) { + await new Link(link).save(); + } + } + resolve(firstNote); }); diff --git a/src/services/note_cache.js b/src/services/note_cache.js index 11227ac73..e19cac5f6 100644 --- a/src/services/note_cache.js +++ b/src/services/note_cache.js @@ -327,11 +327,11 @@ eventService.subscribe(eventService.ENTITY_CHANGED, async ({entityName, entity}) if (attribute.type === 'label' && attribute.name === 'archived') { // we're not using label object directly, since there might be other non-deleted archived label - const hideLabel = await repository.getEntity(`SELECT * FROM attributes WHERE isDeleted = 0 AND type = 'label' + const archivedLabel = await repository.getEntity(`SELECT * FROM attributes WHERE isDeleted = 0 AND type = 'label' AND name = 'archived' AND noteId = ?`, [attribute.noteId]); - if (hideLabel) { - archived[attribute.noteId] = hideLabel.isInheritable ? 1 : 0; + if (archivedLabel) { + archived[attribute.noteId] = archivedLabel.isInheritable ? 1 : 0; } else { delete archived[attribute.noteId]; diff --git a/src/services/repository.js b/src/services/repository.js index 205d831aa..26d700463 100644 --- a/src/services/repository.js +++ b/src/services/repository.js @@ -107,6 +107,7 @@ async function updateEntity(entity) { // it seems to be better to handle deletion and update separately await eventService.emit(entity.isDeleted ? eventService.ENTITY_DELETED : eventService.ENTITY_CHANGED, eventPayload); + } }); }