mirror of
https://github.com/zadam/trilium.git
synced 2025-01-07 15:49:01 +08:00
import notes from tar archive, closes #63
This commit is contained in:
parent
60bba46d80
commit
1501fa8dbf
9 changed files with 175 additions and 117 deletions
2
package-lock.json
generated
2
package-lock.json
generated
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "trilium",
|
||||
"version": "0.6.2",
|
||||
"version": "0.7.0-beta",
|
||||
"lockfileVersion": 1,
|
||||
"requires": true,
|
||||
"dependencies": {
|
||||
|
|
|
@ -57,6 +57,7 @@
|
|||
"session-file-store": "^1.1.2",
|
||||
"simple-node-logger": "^0.93.30",
|
||||
"sqlite": "^2.9.0",
|
||||
"tar-stream": "^1.5.5",
|
||||
"unescape": "^1.0.1",
|
||||
"ws": "^3.3.2"
|
||||
},
|
||||
|
|
|
@ -6,6 +6,26 @@ function exportSubTree(noteId) {
|
|||
download(url);
|
||||
}
|
||||
|
||||
function importSubTree(noteId) {
|
||||
let importNoteId;
|
||||
|
||||
}
|
||||
function importSubTree(noteId) {
|
||||
importNoteId = noteId;
|
||||
|
||||
$("#import-upload").trigger('click');
|
||||
}
|
||||
|
||||
$("#import-upload").change(async function() {
|
||||
const formData = new FormData();
|
||||
formData.append('upload', this.files[0]);
|
||||
|
||||
await $.ajax({
|
||||
url: baseApiUrl + 'import/' + importNoteId,
|
||||
headers: server.getHeaders(),
|
||||
data: formData,
|
||||
type: 'POST',
|
||||
contentType: false, // NEEDED, DON'T OMIT THIS
|
||||
processData: false, // NEEDED, DON'T OMIT THIS
|
||||
});
|
||||
|
||||
await noteTree.reload();
|
||||
});
|
|
@ -226,10 +226,10 @@ if (isElectron()) {
|
|||
}
|
||||
|
||||
function uploadAttachment() {
|
||||
$("#file-upload").trigger('click');
|
||||
$("#attachment-upload").trigger('click');
|
||||
}
|
||||
|
||||
$("#file-upload").change(async function() {
|
||||
$("#attachment-upload").change(async function() {
|
||||
const formData = new FormData();
|
||||
formData.append('upload', this.files[0]);
|
||||
|
||||
|
|
|
@ -31,6 +31,10 @@ async function exportNote(noteTreeId, directory, pack) {
|
|||
const noteTree = await sql.getRow("SELECT * FROM note_tree WHERE noteTreeId = ?", [noteTreeId]);
|
||||
const note = await sql.getRow("SELECT * FROM notes WHERE noteId = ?", [noteTree.noteId]);
|
||||
|
||||
if (note.isProtected) {
|
||||
return;
|
||||
}
|
||||
|
||||
const content = note.type === 'text' ? html.prettyPrint(note.content, {indent_size: 2}) : note.content;
|
||||
|
||||
const childFileName = directory + sanitize(note.title);
|
||||
|
|
|
@ -2,104 +2,128 @@
|
|||
|
||||
const express = require('express');
|
||||
const router = express.Router();
|
||||
const fs = require('fs');
|
||||
const sql = require('../../services/sql');
|
||||
const data_dir = require('../../services/data_dir');
|
||||
const utils = require('../../services/utils');
|
||||
const sync_table = require('../../services/sync_table');
|
||||
const auth = require('../../services/auth');
|
||||
const notes = require('../../services/notes');
|
||||
const wrap = require('express-promise-wrap').wrap;
|
||||
const tar = require('tar-stream');
|
||||
const multer = require('multer')();
|
||||
const stream = require('stream');
|
||||
const path = require('path');
|
||||
|
||||
router.get('/:directory/to/:parentNoteId', auth.checkApiAuth, wrap(async (req, res, next) => {
|
||||
const directory = req.params.directory.replace(/[^0-9a-zA-Z_-]/gi, '');
|
||||
function getFileName(name) {
|
||||
let key;
|
||||
|
||||
if (name.endsWith(".dat")) {
|
||||
key = "data";
|
||||
name = name.substr(0, name.length - 4);
|
||||
}
|
||||
else if (name.endsWith((".meta"))) {
|
||||
key = "meta";
|
||||
name = name.substr(0, name.length - 5);
|
||||
}
|
||||
else {
|
||||
throw new Error("Unknown file type in import archive: " + name);
|
||||
}
|
||||
return {name, key};
|
||||
}
|
||||
|
||||
async function parseImportFile(file) {
|
||||
const fileMap = {};
|
||||
const files = [];
|
||||
|
||||
const extract = tar.extract();
|
||||
|
||||
extract.on('entry', function(header, stream, next) {
|
||||
let {name, key} = getFileName(header.name);
|
||||
|
||||
let file = fileMap[name];
|
||||
|
||||
if (!file) {
|
||||
file = fileMap[name] = {
|
||||
children: []
|
||||
};
|
||||
|
||||
let parentFileName = path.dirname(header.name);
|
||||
|
||||
if (parentFileName && parentFileName !== '.') {
|
||||
fileMap[parentFileName].children.push(file);
|
||||
}
|
||||
else {
|
||||
files.push(file);
|
||||
}
|
||||
}
|
||||
|
||||
const chunks = [];
|
||||
|
||||
stream.on("data", function (chunk) {
|
||||
chunks.push(chunk);
|
||||
});
|
||||
|
||||
// header is the tar header
|
||||
// stream is the content body (might be an empty stream)
|
||||
// call next when you are done with this entry
|
||||
|
||||
stream.on('end', function() {
|
||||
file[key] = Buffer.concat(chunks);
|
||||
|
||||
if (key === "meta") {
|
||||
file[key] = JSON.parse(file[key].toString("UTF-8"));
|
||||
}
|
||||
|
||||
next(); // ready for next entry
|
||||
});
|
||||
|
||||
stream.resume(); // just auto drain the stream
|
||||
});
|
||||
|
||||
return new Promise(resolve => {
|
||||
extract.on('finish', function() {
|
||||
resolve(files);
|
||||
});
|
||||
|
||||
const bufferStream = new stream.PassThrough();
|
||||
bufferStream.end(file.buffer);
|
||||
|
||||
bufferStream.pipe(extract);
|
||||
});
|
||||
}
|
||||
|
||||
router.post('/:parentNoteId', auth.checkApiAuthOrElectron, multer.single('upload'), wrap(async (req, res, next) => {
|
||||
const sourceId = req.headers.source_id;
|
||||
const parentNoteId = req.params.parentNoteId;
|
||||
const file = req.file;
|
||||
|
||||
const dir = data_dir.EXPORT_DIR + '/' + directory;
|
||||
const note = await sql.getRow("SELECT * FROM notes WHERE noteId = ?", [parentNoteId]);
|
||||
|
||||
await sql.doInTransaction(async () => await importNotes(dir, parentNoteId));
|
||||
if (!note) {
|
||||
return res.status(404).send(`Note ${parentNoteId} doesn't exist.`);
|
||||
}
|
||||
|
||||
const files = await parseImportFile(file);
|
||||
|
||||
await sql.doInTransaction(async () => {
|
||||
await importNotes(files, parentNoteId, sourceId);
|
||||
});
|
||||
|
||||
res.send({});
|
||||
}));
|
||||
|
||||
async function importNotes(dir, parentNoteId) {
|
||||
const parent = await sql.getRow("SELECT * FROM notes WHERE noteId = ?", [parentNoteId]);
|
||||
|
||||
if (!parent) {
|
||||
return;
|
||||
}
|
||||
|
||||
const fileList = fs.readdirSync(dir);
|
||||
|
||||
for (const file of fileList) {
|
||||
const path = dir + '/' + file;
|
||||
|
||||
if (fs.lstatSync(path).isDirectory()) {
|
||||
continue;
|
||||
async function importNotes(files, parentNoteId, sourceId) {
|
||||
for (const file of files) {
|
||||
if (file.meta.type !== 'file') {
|
||||
file.data = file.data.toString("UTF-8");
|
||||
}
|
||||
|
||||
if (!file.endsWith('.html')) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const fileNameWithoutExt = file.substr(0, file.length - 5);
|
||||
|
||||
let noteTitle;
|
||||
let notePos;
|
||||
|
||||
const match = fileNameWithoutExt.match(/^([0-9]{4})-(.*)$/);
|
||||
if (match) {
|
||||
notePos = parseInt(match[1]);
|
||||
noteTitle = match[2];
|
||||
}
|
||||
else {
|
||||
let maxPos = await sql.getValue("SELECT MAX(notePosition) FROM note_tree WHERE parentNoteId = ? AND isDeleted = 0", [parentNoteId]);
|
||||
if (maxPos) {
|
||||
notePos = maxPos + 1;
|
||||
}
|
||||
else {
|
||||
notePos = 0;
|
||||
}
|
||||
|
||||
noteTitle = fileNameWithoutExt;
|
||||
}
|
||||
|
||||
const noteText = fs.readFileSync(path, "utf8");
|
||||
|
||||
const noteId = utils.newNoteId();
|
||||
const noteTreeId = utils.newNoteRevisionId();
|
||||
|
||||
const now = utils.nowDate();
|
||||
|
||||
await sql.insert('note_tree', {
|
||||
noteTreeId: noteTreeId,
|
||||
noteId: noteId,
|
||||
parentNoteId: parentNoteId,
|
||||
notePosition: notePos,
|
||||
isExpanded: 0,
|
||||
isDeleted: 0,
|
||||
dateModified: now
|
||||
const noteId = await notes.createNote(parentNoteId, file.meta.title, file.data, {
|
||||
type: file.meta.type,
|
||||
mime: file.meta.mime,
|
||||
attributes: file.meta.attributes,
|
||||
sourceId: sourceId
|
||||
});
|
||||
|
||||
await sync_table.addNoteTreeSync(noteTreeId);
|
||||
|
||||
await sql.insert('notes', {
|
||||
noteId: noteId,
|
||||
title: noteTitle,
|
||||
content: noteText,
|
||||
isDeleted: 0,
|
||||
isProtected: 0,
|
||||
type: 'text',
|
||||
mime: 'text/html',
|
||||
dateCreated: now,
|
||||
dateModified: now
|
||||
});
|
||||
|
||||
await sync_table.addNoteSync(noteId);
|
||||
|
||||
const noteDir = dir + '/' + fileNameWithoutExt;
|
||||
|
||||
if (fs.existsSync(noteDir) && fs.lstatSync(noteDir).isDirectory()) {
|
||||
await importNotes(noteDir, noteId);
|
||||
if (file.children.length > 0) {
|
||||
await importNotes(file.children, noteId, sourceId);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -83,6 +83,37 @@ async function createNewNote(parentNoteId, noteOpts, dataKey, sourceId) {
|
|||
};
|
||||
}
|
||||
|
||||
async function createNote(parentNoteId, title, content = "", extraOptions = {}) {
|
||||
const note = {
|
||||
title: title,
|
||||
content: extraOptions.json ? JSON.stringify(content, null, '\t') : content,
|
||||
target: 'into',
|
||||
isProtected: extraOptions.isProtected !== undefined ? extraOptions.isProtected : false,
|
||||
type: extraOptions.type,
|
||||
mime: extraOptions.mime
|
||||
};
|
||||
|
||||
if (extraOptions.json) {
|
||||
note.type = "code";
|
||||
note.mime = "application/json";
|
||||
}
|
||||
|
||||
if (!note.type) {
|
||||
note.type = "text";
|
||||
note.mime = "text/html";
|
||||
}
|
||||
|
||||
const {noteId} = await createNewNote(parentNoteId, note, extraOptions.dataKey, extraOptions.sourceId);
|
||||
|
||||
if (extraOptions.attributes) {
|
||||
for (const attrName in extraOptions.attributes) {
|
||||
await attributes.createAttribute(noteId, attrName, extraOptions.attributes[attrName]);
|
||||
}
|
||||
}
|
||||
|
||||
return noteId;
|
||||
}
|
||||
|
||||
async function protectNoteRecursively(noteId, dataKey, protect, sourceId) {
|
||||
const note = await sql.getRow("SELECT * FROM notes WHERE noteId = ?", [noteId]);
|
||||
|
||||
|
@ -307,6 +338,7 @@ async function deleteNote(noteTreeId, sourceId) {
|
|||
|
||||
module.exports = {
|
||||
createNewNote,
|
||||
createNote,
|
||||
updateNote,
|
||||
deleteNote,
|
||||
protectNoteRecursively
|
||||
|
|
|
@ -27,35 +27,10 @@ function ScriptContext(dataKey) {
|
|||
return notes.length > 0 ? notes[0] : null;
|
||||
};
|
||||
|
||||
this.createNote = async function (parentNoteId, title, content = "", extraOptions = {}) {
|
||||
const note = {
|
||||
title: title,
|
||||
content: extraOptions.json ? JSON.stringify(content, null, '\t') : content,
|
||||
target: 'into',
|
||||
isProtected: extraOptions.isProtected !== undefined ? extraOptions.isProtected : false,
|
||||
type: extraOptions.type,
|
||||
mime: extraOptions.mime
|
||||
};
|
||||
this.createNote = async function(parentNoteId, title, content = "", extraOptions = {}) {
|
||||
extraOptions.dataKey = dataKey;
|
||||
|
||||
if (extraOptions.json) {
|
||||
note.type = "code";
|
||||
note.mime = "application/json";
|
||||
}
|
||||
|
||||
if (!note.type) {
|
||||
note.type = "text";
|
||||
note.mime = "text/html";
|
||||
}
|
||||
|
||||
const noteId = (await notes.createNewNote(parentNoteId, note, dataKey)).noteId;
|
||||
|
||||
if (extraOptions.attributes) {
|
||||
for (const attrName in extraOptions.attributes) {
|
||||
await attributes.createAttribute(noteId, attrName, extraOptions.attributes[attrName]);
|
||||
}
|
||||
}
|
||||
|
||||
return noteId;
|
||||
notes.createNote(parentNoteId, title, content, extraOptions);
|
||||
};
|
||||
|
||||
this.createAttribute = attributes.createAttribute;
|
||||
|
|
|
@ -56,6 +56,8 @@
|
|||
<img src="images/icons/search.png" alt="Search in notes"/>
|
||||
</a>
|
||||
</div>
|
||||
|
||||
<input type="file" id="import-upload" style="display: none" />
|
||||
</div>
|
||||
|
||||
<div id="search-box" class="hide-toggle" style="grid-area: search; display: none; padding: 10px; margin-top: 10px;">
|
||||
|
@ -167,7 +169,7 @@
|
|||
</table>
|
||||
</div>
|
||||
|
||||
<input type="file" id="file-upload" style="display: none" />
|
||||
<input type="file" id="attachment-upload" style="display: none" />
|
||||
</div>
|
||||
|
||||
<div id="attribute-list">
|
||||
|
|
Loading…
Reference in a new issue