fix import

This commit is contained in:
azivner 2018-11-26 22:22:16 +01:00
parent 4d5a17583f
commit 56e2b44c25
4 changed files with 45 additions and 21 deletions

View file

@ -126,10 +126,11 @@ async function exportToTar(branch, format, res) {
meta.dirFileName = getUniqueFilename(existingFileNames, baseFileName); meta.dirFileName = getUniqueFilename(existingFileNames, baseFileName);
meta.children = []; meta.children = [];
// namespace is shared by children in the same note
const childExistingNames = {}; const childExistingNames = {};
for (const childBranch of childBranches) { for (const childBranch of childBranches) {
const note = await getNote(childBranch, existingFileNames); const note = await getNote(childBranch, childExistingNames);
// can be undefined if export is disabled for this note // can be undefined if export is disabled for this note
if (note) { if (note) {
@ -160,27 +161,27 @@ async function exportToTar(branch, format, res) {
if (noteMeta.isClone) { if (noteMeta.isClone) {
const content = "Note is present at " + notePaths[noteMeta.noteId]; const content = "Note is present at " + notePaths[noteMeta.noteId];
pack.entry({name: path + '/' + noteMeta.dataFileName, size: content.length}, content); pack.entry({name: path + noteMeta.dataFileName, size: content.length}, content);
return; return;
} }
const note = await repository.getNote(noteMeta.noteId); const note = await repository.getNote(noteMeta.noteId);
notePaths[note.noteId] = path + '/' + (noteMeta.dataFileName || noteMeta.dirFileName); notePaths[note.noteId] = path + (noteMeta.dataFileName || noteMeta.dirFileName);
if (noteMeta.dataFileName) { if (noteMeta.dataFileName) {
const content = prepareContent(note, noteMeta.format); const content = prepareContent(note, noteMeta.format);
pack.entry({name: path + '/' + noteMeta.dataFileName, size: content.length}, content); pack.entry({name: path + noteMeta.dataFileName, size: content.length}, content);
} }
if (noteMeta.children && noteMeta.children.length > 0) { if (noteMeta.children && noteMeta.children.length > 0) {
const directoryPath = path + '/' + noteMeta.dirFileName; const directoryPath = path + noteMeta.dirFileName;
pack.entry({name: directoryPath, type: 'directory'}); pack.entry({name: directoryPath, type: 'directory'});
for (const childMeta of noteMeta.children) { for (const childMeta of noteMeta.children) {
await saveNote(childMeta, directoryPath); await saveNote(childMeta, directoryPath + '/');
} }
} }
} }

View file

@ -3,6 +3,7 @@
const Attribute = require('../../entities/attribute'); const Attribute = require('../../entities/attribute');
const Link = require('../../entities/link'); const Link = require('../../entities/link');
const utils = require('../../services/utils'); const utils = require('../../services/utils');
const log = require('../../services/log');
const repository = require('../../services/repository'); const repository = require('../../services/repository');
const noteService = require('../../services/notes'); const noteService = require('../../services/notes');
const Branch = require('../../entities/branch'); const Branch = require('../../entities/branch');
@ -15,6 +16,8 @@ const mimeTypes = require('mime-types');
async function importTar(fileBuffer, importRootNote) { async function importTar(fileBuffer, importRootNote) {
// maps from original noteId (in tar file) to newly generated noteId // maps from original noteId (in tar file) to newly generated noteId
const noteIdMap = {}; const noteIdMap = {};
const attributes = [];
const links = [];
// path => noteId // path => noteId
const createdPaths = { '/': importRootNote.noteId, '\\': importRootNote.noteId }; const createdPaths = { '/': importRootNote.noteId, '\\': importRootNote.noteId };
const mdReader = new commonmark.Parser(); const mdReader = new commonmark.Parser();
@ -134,7 +137,7 @@ async function importTar(fileBuffer, importRootNote) {
return { type, mime }; return { type, mime };
} }
async function saveAttributes(note, noteMeta) { async function saveAttributesAndLinks(note, noteMeta) {
if (!noteMeta) { if (!noteMeta) {
return; return;
} }
@ -146,14 +149,14 @@ async function importTar(fileBuffer, importRootNote) {
attr.value = getNewNoteId(attr.value); attr.value = getNewNoteId(attr.value);
} }
await new Attribute(attr).save(); attributes.push(attr);
} }
for (const link of noteMeta.links) { for (const link of noteMeta.links) {
link.noteId = note.noteId; link.noteId = note.noteId;
link.targetNoteId = getNewNoteId(link.targetNoteId); link.targetNoteId = getNewNoteId(link.targetNoteId);
await new Link(link).save(); links.push(link);
} }
} }
@ -178,14 +181,12 @@ async function importTar(fileBuffer, importRootNote) {
isExpanded: noteMeta ? noteMeta.isExpanded : false isExpanded: noteMeta ? noteMeta.isExpanded : false
})); }));
await saveAttributes(note, noteMeta); await saveAttributesAndLinks(note, noteMeta);
if (!firstNote) { if (!firstNote) {
firstNote = note; firstNote = note;
} }
console.log(filePath);
createdPaths[filePath] = noteId; createdPaths[filePath] = noteId;
} }
@ -207,8 +208,6 @@ async function importTar(fileBuffer, importRootNote) {
const parentNoteId = getParentNoteId(filePath, parentNoteMeta); const parentNoteId = getParentNoteId(filePath, parentNoteMeta);
if (noteMeta && noteMeta.isClone) { if (noteMeta && noteMeta.isClone) {
console.log(`Creating branch for ${noteId} in ${parentNoteId}`);
await new Branch({ await new Branch({
noteId, noteId,
parentNoteId, parentNoteId,
@ -247,7 +246,7 @@ async function importTar(fileBuffer, importRootNote) {
isExpanded: noteMeta ? noteMeta.isExpanded : false isExpanded: noteMeta ? noteMeta.isExpanded : false
})); }));
await saveAttributes(note, noteMeta); await saveAttributesAndLinks(note, noteMeta);
if (!firstNote) { if (!firstNote) {
firstNote = note; firstNote = note;
@ -257,7 +256,6 @@ async function importTar(fileBuffer, importRootNote) {
filePath = getTextFileWithoutExtension(filePath); filePath = getTextFileWithoutExtension(filePath);
} }
console.log(filePath);
createdPaths[filePath] = noteId; createdPaths[filePath] = noteId;
} }
} }
@ -273,6 +271,7 @@ async function importTar(fileBuffer, importRootNote) {
if (filePath.endsWith("/")) { if (filePath.endsWith("/")) {
filePath = filePath.substr(0, filePath.length - 1); filePath = filePath.substr(0, filePath.length - 1);
} }
return filePath; return filePath;
} }
@ -298,9 +297,12 @@ async function importTar(fileBuffer, importRootNote) {
else if (header.type === 'directory') { else if (header.type === 'directory') {
await saveDirectory(filePath); await saveDirectory(filePath);
} }
else { else if (header.type === 'file') {
await saveNote(filePath, content); await saveNote(filePath, content);
} }
else {
log.info("Ignoring tar import entry with type " + header.type);
}
next(); // ready for next entry next(); // ready for next entry
}); });
@ -309,7 +311,27 @@ async function importTar(fileBuffer, importRootNote) {
}); });
return new Promise(resolve => { return new Promise(resolve => {
extract.on('finish', function() { extract.on('finish', async function() {
const createdNoteIds = {};
for (const path in createdPaths) {
createdNoteIds[createdPaths[path]] = true;
}
// we're saving attributes and links only now so that all relation and link target notes
// are already in the database (we don't want to have "broken" relations, not even transitionally)
for (const attr of attributes) {
if (attr.value in createdNoteIds) {
await new Attribute(attr).save();
}
}
for (const link of links) {
if (link.targetNoteId in createdNoteIds) {
await new Link(link).save();
}
}
resolve(firstNote); resolve(firstNote);
}); });

View file

@ -327,11 +327,11 @@ eventService.subscribe(eventService.ENTITY_CHANGED, async ({entityName, entity})
if (attribute.type === 'label' && attribute.name === 'archived') { if (attribute.type === 'label' && attribute.name === 'archived') {
// we're not using label object directly, since there might be other non-deleted archived label // we're not using label object directly, since there might be other non-deleted archived label
const hideLabel = await repository.getEntity(`SELECT * FROM attributes WHERE isDeleted = 0 AND type = 'label' const archivedLabel = await repository.getEntity(`SELECT * FROM attributes WHERE isDeleted = 0 AND type = 'label'
AND name = 'archived' AND noteId = ?`, [attribute.noteId]); AND name = 'archived' AND noteId = ?`, [attribute.noteId]);
if (hideLabel) { if (archivedLabel) {
archived[attribute.noteId] = hideLabel.isInheritable ? 1 : 0; archived[attribute.noteId] = archivedLabel.isInheritable ? 1 : 0;
} }
else { else {
delete archived[attribute.noteId]; delete archived[attribute.noteId];

View file

@ -107,6 +107,7 @@ async function updateEntity(entity) {
// it seems to be better to handle deletion and update separately // it seems to be better to handle deletion and update separately
await eventService.emit(entity.isDeleted ? eventService.ENTITY_DELETED : eventService.ENTITY_CHANGED, eventPayload); await eventService.emit(entity.isDeleted ? eventService.ENTITY_DELETED : eventService.ENTITY_CHANGED, eventPayload);
} }
}); });
} }