trilium/src/routes/api/import.js

130 lines
3.2 KiB
JavaScript
Raw Normal View History

2017-12-03 12:41:18 +08:00
"use strict";
2018-04-01 23:42:12 +08:00
const repository = require('../../services/repository');
const labelService = require('../../services/labels');
const noteService = require('../../services/notes');
const tar = require('tar-stream');
const stream = require('stream');
const path = require('path');
2017-12-03 12:41:18 +08:00
function getFileName(name) {
let key;
2017-12-03 12:41:18 +08:00
if (name.endsWith(".dat")) {
key = "data";
name = name.substr(0, name.length - 4);
}
else if (name.endsWith((".meta"))) {
key = "meta";
name = name.substr(0, name.length - 5);
}
else {
throw new Error("Unknown file type in import archive: " + name);
2017-12-03 12:41:18 +08:00
}
return {name, key};
}
2017-12-03 12:41:18 +08:00
async function parseImportFile(file) {
const fileMap = {};
const files = [];
2017-12-03 12:41:18 +08:00
const extract = tar.extract();
2017-12-03 12:41:18 +08:00
extract.on('entry', function(header, stream, next) {
let {name, key} = getFileName(header.name);
2017-12-03 12:41:18 +08:00
let file = fileMap[name];
2017-12-03 12:41:18 +08:00
if (!file) {
file = fileMap[name] = {
children: []
};
let parentFileName = path.dirname(header.name);
2017-12-03 12:41:18 +08:00
if (parentFileName && parentFileName !== '.') {
fileMap[parentFileName].children.push(file);
}
else {
files.push(file);
}
2017-12-03 12:41:18 +08:00
}
const chunks = [];
stream.on("data", function (chunk) {
chunks.push(chunk);
});
// header is the tar header
// stream is the content body (might be an empty stream)
// call next when you are done with this entry
stream.on('end', function() {
file[key] = Buffer.concat(chunks);
2017-12-03 12:41:18 +08:00
if (key === "meta") {
file[key] = JSON.parse(file[key].toString("UTF-8"));
}
next(); // ready for next entry
2017-12-03 12:41:18 +08:00
});
stream.resume(); // just auto drain the stream
});
return new Promise(resolve => {
extract.on('finish', function() {
resolve(files);
2017-12-03 12:41:18 +08:00
});
const bufferStream = new stream.PassThrough();
bufferStream.end(file.buffer);
bufferStream.pipe(extract);
});
}
async function importTar(req) {
2018-04-02 08:50:58 +08:00
const parentNoteId = req.params.parentNoteId;
const file = req.file;
2018-04-02 08:50:58 +08:00
const parentNote = await repository.getNote(parentNoteId);
2018-04-01 23:42:12 +08:00
if (!parentNote) {
2018-04-02 08:50:58 +08:00
return [404, `Note ${parentNoteId} doesn't exist.`];
}
2017-12-03 12:41:18 +08:00
const files = await parseImportFile(file);
2018-04-02 08:50:58 +08:00
await importNotes(files, parentNoteId);
2018-03-31 03:34:07 +08:00
}
async function importNotes(files, parentNoteId) {
for (const file of files) {
if (file.meta.version !== 1) {
throw new Error("Can't read meta data version " + file.meta.version);
}
if (file.meta.type !== 'file') {
file.data = file.data.toString("UTF-8");
}
const noteId = await noteService.createNote(parentNoteId, file.meta.title, file.data, {
type: file.meta.type,
mime: file.meta.mime
});
2017-12-03 12:41:18 +08:00
for (const label of file.meta.labels) {
await labelService.createLabel(noteId, label.name, label.value);
}
if (file.children.length > 0) {
await importNotes(file.children, noteId);
2017-12-03 12:41:18 +08:00
}
}
}
2018-03-31 03:34:07 +08:00
module.exports = {
importTar
};