2017-12-03 12:41:18 +08:00
|
|
|
"use strict";
|
|
|
|
|
|
|
|
const express = require('express');
|
|
|
|
const router = express.Router();
|
|
|
|
const sql = require('../../services/sql');
|
2017-12-23 22:57:20 +08:00
|
|
|
const auth = require('../../services/auth');
|
2018-03-25 10:02:26 +08:00
|
|
|
const labels = require('../../services/labels');
|
2018-02-26 13:07:43 +08:00
|
|
|
const notes = require('../../services/notes');
|
2018-01-07 22:35:44 +08:00
|
|
|
const wrap = require('express-promise-wrap').wrap;
|
2018-02-26 13:07:43 +08:00
|
|
|
const tar = require('tar-stream');
|
|
|
|
const multer = require('multer')();
|
|
|
|
const stream = require('stream');
|
|
|
|
const path = require('path');
|
2017-12-03 12:41:18 +08:00
|
|
|
|
2018-02-26 13:07:43 +08:00
|
|
|
function getFileName(name) {
|
|
|
|
let key;
|
2017-12-03 12:41:18 +08:00
|
|
|
|
2018-02-26 13:07:43 +08:00
|
|
|
if (name.endsWith(".dat")) {
|
|
|
|
key = "data";
|
|
|
|
name = name.substr(0, name.length - 4);
|
|
|
|
}
|
|
|
|
else if (name.endsWith((".meta"))) {
|
|
|
|
key = "meta";
|
|
|
|
name = name.substr(0, name.length - 5);
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
throw new Error("Unknown file type in import archive: " + name);
|
2017-12-03 12:41:18 +08:00
|
|
|
}
|
2018-02-26 13:07:43 +08:00
|
|
|
return {name, key};
|
|
|
|
}
|
2017-12-03 12:41:18 +08:00
|
|
|
|
2018-02-26 13:07:43 +08:00
|
|
|
async function parseImportFile(file) {
|
|
|
|
const fileMap = {};
|
|
|
|
const files = [];
|
2017-12-03 12:41:18 +08:00
|
|
|
|
2018-02-26 13:07:43 +08:00
|
|
|
const extract = tar.extract();
|
2017-12-03 12:41:18 +08:00
|
|
|
|
2018-02-26 13:07:43 +08:00
|
|
|
extract.on('entry', function(header, stream, next) {
|
|
|
|
let {name, key} = getFileName(header.name);
|
2017-12-03 12:41:18 +08:00
|
|
|
|
2018-02-26 13:07:43 +08:00
|
|
|
let file = fileMap[name];
|
2017-12-03 12:41:18 +08:00
|
|
|
|
2018-02-26 13:07:43 +08:00
|
|
|
if (!file) {
|
|
|
|
file = fileMap[name] = {
|
|
|
|
children: []
|
|
|
|
};
|
2017-12-03 13:10:43 +08:00
|
|
|
|
2018-02-26 13:07:43 +08:00
|
|
|
let parentFileName = path.dirname(header.name);
|
2017-12-03 12:41:18 +08:00
|
|
|
|
2018-02-26 13:07:43 +08:00
|
|
|
if (parentFileName && parentFileName !== '.') {
|
|
|
|
fileMap[parentFileName].children.push(file);
|
2017-12-03 13:10:43 +08:00
|
|
|
}
|
|
|
|
else {
|
2018-02-26 13:07:43 +08:00
|
|
|
files.push(file);
|
2017-12-03 13:10:43 +08:00
|
|
|
}
|
2017-12-03 12:41:18 +08:00
|
|
|
}
|
|
|
|
|
2018-02-26 13:07:43 +08:00
|
|
|
const chunks = [];
|
|
|
|
|
|
|
|
stream.on("data", function (chunk) {
|
|
|
|
chunks.push(chunk);
|
|
|
|
});
|
|
|
|
|
|
|
|
// header is the tar header
|
|
|
|
// stream is the content body (might be an empty stream)
|
|
|
|
// call next when you are done with this entry
|
2017-12-03 13:10:43 +08:00
|
|
|
|
2018-02-26 13:07:43 +08:00
|
|
|
stream.on('end', function() {
|
|
|
|
file[key] = Buffer.concat(chunks);
|
2017-12-03 12:41:18 +08:00
|
|
|
|
2018-02-26 13:07:43 +08:00
|
|
|
if (key === "meta") {
|
|
|
|
file[key] = JSON.parse(file[key].toString("UTF-8"));
|
|
|
|
}
|
2017-12-11 01:56:59 +08:00
|
|
|
|
2018-02-26 13:07:43 +08:00
|
|
|
next(); // ready for next entry
|
2017-12-03 12:41:18 +08:00
|
|
|
});
|
|
|
|
|
2018-02-26 13:07:43 +08:00
|
|
|
stream.resume(); // just auto drain the stream
|
|
|
|
});
|
|
|
|
|
|
|
|
return new Promise(resolve => {
|
|
|
|
extract.on('finish', function() {
|
|
|
|
resolve(files);
|
2017-12-03 12:41:18 +08:00
|
|
|
});
|
|
|
|
|
2018-02-26 13:07:43 +08:00
|
|
|
const bufferStream = new stream.PassThrough();
|
|
|
|
bufferStream.end(file.buffer);
|
|
|
|
|
|
|
|
bufferStream.pipe(extract);
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
2018-03-31 07:31:22 +08:00
|
|
|
async function importTar(req) {
|
2018-02-26 13:07:43 +08:00
|
|
|
const sourceId = req.headers.source_id;
|
|
|
|
const parentNoteId = req.params.parentNoteId;
|
|
|
|
const file = req.file;
|
|
|
|
|
|
|
|
const note = await sql.getRow("SELECT * FROM notes WHERE noteId = ?", [parentNoteId]);
|
|
|
|
|
|
|
|
if (!note) {
|
2018-03-31 07:31:22 +08:00
|
|
|
return [404, `Note ${parentNoteId} doesn't exist.`];
|
2018-02-26 13:07:43 +08:00
|
|
|
}
|
2017-12-03 12:41:18 +08:00
|
|
|
|
2018-02-26 13:07:43 +08:00
|
|
|
const files = await parseImportFile(file);
|
|
|
|
|
2018-03-31 03:34:07 +08:00
|
|
|
await importNotes(files, parentNoteId, sourceId);
|
|
|
|
}
|
2018-02-26 13:07:43 +08:00
|
|
|
|
|
|
|
async function importNotes(files, parentNoteId, sourceId) {
|
|
|
|
for (const file of files) {
|
2018-03-03 22:32:21 +08:00
|
|
|
if (file.meta.version !== 1) {
|
|
|
|
throw new Error("Can't read meta data version " + file.meta.version);
|
|
|
|
}
|
|
|
|
|
2018-02-26 13:07:43 +08:00
|
|
|
if (file.meta.type !== 'file') {
|
|
|
|
file.data = file.data.toString("UTF-8");
|
|
|
|
}
|
|
|
|
|
|
|
|
const noteId = await notes.createNote(parentNoteId, file.meta.title, file.data, {
|
|
|
|
type: file.meta.type,
|
|
|
|
mime: file.meta.mime,
|
|
|
|
sourceId: sourceId
|
|
|
|
});
|
2017-12-03 12:41:18 +08:00
|
|
|
|
2018-03-25 10:02:26 +08:00
|
|
|
for (const attr of file.meta.labels) {
|
|
|
|
await labels.createLabel(noteId, attr.name, attr.value);
|
2018-03-03 22:30:18 +08:00
|
|
|
}
|
|
|
|
|
2018-02-26 13:07:43 +08:00
|
|
|
if (file.children.length > 0) {
|
|
|
|
await importNotes(file.children, noteId, sourceId);
|
2017-12-03 12:41:18 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-03-31 03:34:07 +08:00
|
|
|
module.exports = {
|
|
|
|
importTar
|
|
|
|
};
|