2017-11-22 11:11:27 +08:00
|
|
|
const sql = require('./sql');
|
|
|
|
const utils = require('./utils');
|
|
|
|
const options = require('./options');
|
2018-01-02 08:47:50 +08:00
|
|
|
const log = require('./log');
|
2017-11-22 11:11:27 +08:00
|
|
|
|
2017-12-16 10:14:10 +08:00
|
|
|
function getHash(rows) {
|
|
|
|
let hash = '';
|
|
|
|
|
2017-11-22 11:11:27 +08:00
|
|
|
for (const row of rows) {
|
|
|
|
hash = utils.hash(hash + JSON.stringify(row));
|
|
|
|
}
|
|
|
|
|
|
|
|
return hash;
|
|
|
|
}
|
|
|
|
|
2017-12-16 10:14:10 +08:00
|
|
|
async function getHashes() {
|
2018-01-02 08:47:50 +08:00
|
|
|
const startTime = new Date();
|
|
|
|
|
|
|
|
const hashes = {
|
2018-01-07 04:56:00 +08:00
|
|
|
notes: getHash(await sql.getAll(`
|
|
|
|
SELECT
|
|
|
|
note_id,
|
|
|
|
note_title,
|
|
|
|
note_text,
|
2018-01-21 10:56:03 +08:00
|
|
|
type,
|
2018-01-07 04:56:00 +08:00
|
|
|
date_modified,
|
|
|
|
is_protected,
|
|
|
|
is_deleted
|
|
|
|
FROM notes
|
|
|
|
ORDER BY note_id`)),
|
2017-11-22 11:11:27 +08:00
|
|
|
|
2018-01-07 04:56:00 +08:00
|
|
|
notes_tree: getHash(await sql.getAll(`
|
|
|
|
SELECT
|
|
|
|
note_tree_id,
|
|
|
|
note_id,
|
|
|
|
parent_note_id,
|
|
|
|
note_position,
|
|
|
|
date_modified,
|
|
|
|
is_deleted,
|
|
|
|
prefix
|
|
|
|
FROM notes_tree
|
|
|
|
ORDER BY note_tree_id`)),
|
2017-11-22 11:11:27 +08:00
|
|
|
|
2018-01-07 04:56:00 +08:00
|
|
|
notes_history: getHash(await sql.getAll(`
|
|
|
|
SELECT
|
|
|
|
note_history_id,
|
|
|
|
note_id,
|
|
|
|
note_title,
|
|
|
|
note_text,
|
|
|
|
date_modified_from,
|
|
|
|
date_modified_to
|
|
|
|
FROM notes_history
|
|
|
|
ORDER BY note_history_id`)),
|
2017-11-22 11:11:27 +08:00
|
|
|
|
2018-01-07 04:56:00 +08:00
|
|
|
recent_notes: getHash(await sql.getAll(`
|
|
|
|
SELECT
|
|
|
|
note_tree_id,
|
|
|
|
note_path,
|
|
|
|
date_accessed,
|
|
|
|
is_deleted
|
|
|
|
FROM recent_notes
|
|
|
|
ORDER BY note_path`)),
|
2017-11-22 11:11:27 +08:00
|
|
|
|
2018-01-07 04:56:00 +08:00
|
|
|
options: getHash(await sql.getAll(`
|
|
|
|
SELECT
|
|
|
|
opt_name,
|
|
|
|
opt_value
|
|
|
|
FROM options
|
2018-01-12 11:45:25 +08:00
|
|
|
WHERE is_synced = 1
|
|
|
|
ORDER BY opt_name`)),
|
2018-01-07 04:56:00 +08:00
|
|
|
|
|
|
|
// we don't include image data on purpose because they are quite large, checksum is good enough
|
|
|
|
// to represent the data anyway
|
|
|
|
images: getHash(await sql.getAll(`
|
|
|
|
SELECT
|
|
|
|
image_id,
|
|
|
|
format,
|
|
|
|
checksum,
|
|
|
|
name,
|
|
|
|
is_deleted,
|
|
|
|
date_modified,
|
|
|
|
date_created
|
|
|
|
FROM images
|
2018-01-14 12:33:09 +08:00
|
|
|
ORDER BY image_id`)),
|
|
|
|
|
|
|
|
attributes: getHash(await sql.getAll(`
|
|
|
|
SELECT
|
|
|
|
attribute_id,
|
|
|
|
note_id
|
|
|
|
name,
|
|
|
|
value
|
|
|
|
date_modified,
|
|
|
|
date_created
|
|
|
|
FROM attributes
|
|
|
|
ORDER BY attribute_id`))
|
2017-12-16 10:14:10 +08:00
|
|
|
};
|
2018-01-02 08:47:50 +08:00
|
|
|
|
|
|
|
const elapseTimeMs = new Date().getTime() - startTime.getTime();
|
|
|
|
|
|
|
|
log.info(`Content hash computation took ${elapseTimeMs}ms`);
|
|
|
|
|
|
|
|
return hashes;
|
2017-11-22 11:11:27 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
module.exports = {
|
2017-12-16 10:14:10 +08:00
|
|
|
getHashes
|
2017-11-22 11:11:27 +08:00
|
|
|
};
|