trilium/src/routes/api/import.js

139 lines
3.7 KiB
JavaScript
Raw Normal View History

2017-12-03 12:41:18 +08:00
"use strict";
const express = require('express');
const router = express.Router();
const sql = require('../../services/sql');
2017-12-23 22:57:20 +08:00
const auth = require('../../services/auth');
const labels = require('../../services/labels');
const notes = require('../../services/notes');
const wrap = require('express-promise-wrap').wrap;
const tar = require('tar-stream');
const multer = require('multer')();
const stream = require('stream');
const path = require('path');
2017-12-03 12:41:18 +08:00
function getFileName(name) {
let key;
2017-12-03 12:41:18 +08:00
if (name.endsWith(".dat")) {
key = "data";
name = name.substr(0, name.length - 4);
}
else if (name.endsWith((".meta"))) {
key = "meta";
name = name.substr(0, name.length - 5);
}
else {
throw new Error("Unknown file type in import archive: " + name);
2017-12-03 12:41:18 +08:00
}
return {name, key};
}
2017-12-03 12:41:18 +08:00
async function parseImportFile(file) {
const fileMap = {};
const files = [];
2017-12-03 12:41:18 +08:00
const extract = tar.extract();
2017-12-03 12:41:18 +08:00
extract.on('entry', function(header, stream, next) {
let {name, key} = getFileName(header.name);
2017-12-03 12:41:18 +08:00
let file = fileMap[name];
2017-12-03 12:41:18 +08:00
if (!file) {
file = fileMap[name] = {
children: []
};
let parentFileName = path.dirname(header.name);
2017-12-03 12:41:18 +08:00
if (parentFileName && parentFileName !== '.') {
fileMap[parentFileName].children.push(file);
}
else {
files.push(file);
}
2017-12-03 12:41:18 +08:00
}
const chunks = [];
stream.on("data", function (chunk) {
chunks.push(chunk);
});
// header is the tar header
// stream is the content body (might be an empty stream)
// call next when you are done with this entry
stream.on('end', function() {
file[key] = Buffer.concat(chunks);
2017-12-03 12:41:18 +08:00
if (key === "meta") {
file[key] = JSON.parse(file[key].toString("UTF-8"));
}
next(); // ready for next entry
2017-12-03 12:41:18 +08:00
});
stream.resume(); // just auto drain the stream
});
return new Promise(resolve => {
extract.on('finish', function() {
resolve(files);
2017-12-03 12:41:18 +08:00
});
const bufferStream = new stream.PassThrough();
bufferStream.end(file.buffer);
bufferStream.pipe(extract);
});
}
router.post('/:parentNoteId', auth.checkApiAuthOrElectron, multer.single('upload'), wrap(async (req, res, next) => {
const sourceId = req.headers.source_id;
const parentNoteId = req.params.parentNoteId;
const file = req.file;
const note = await sql.getRow("SELECT * FROM notes WHERE noteId = ?", [parentNoteId]);
if (!note) {
return res.status(404).send(`Note ${parentNoteId} doesn't exist.`);
}
2017-12-03 12:41:18 +08:00
const files = await parseImportFile(file);
await sql.doInTransaction(async () => {
await importNotes(files, parentNoteId, sourceId);
});
res.send({});
}));
async function importNotes(files, parentNoteId, sourceId) {
for (const file of files) {
if (file.meta.version !== 1) {
throw new Error("Can't read meta data version " + file.meta.version);
}
if (file.meta.type !== 'file') {
file.data = file.data.toString("UTF-8");
}
const noteId = await notes.createNote(parentNoteId, file.meta.title, file.data, {
type: file.meta.type,
mime: file.meta.mime,
sourceId: sourceId
});
2017-12-03 12:41:18 +08:00
for (const attr of file.meta.labels) {
await labels.createLabel(noteId, attr.name, attr.value);
}
if (file.children.length > 0) {
await importNotes(file.children, noteId, sourceId);
2017-12-03 12:41:18 +08:00
}
}
}
module.exports = router;