mirror of
https://github.com/zadam/trilium.git
synced 2024-09-20 23:55:59 +08:00
apply new query parsing to note autocomplete
This commit is contained in:
parent
b26100479d
commit
32dde426fd
|
@ -19,7 +19,7 @@ class Attribute {
|
|||
|
||||
this.noteCache.notes[this.noteId].ownedAttributes.push(this);
|
||||
|
||||
const key = `${this.type-this.name}`;
|
||||
const key = `${this.type}-${this.name}`;
|
||||
this.noteCache.attributeIndex[key] = this.noteCache.attributeIndex[key] || [];
|
||||
this.noteCache.attributeIndex[key].push(this);
|
||||
|
||||
|
|
|
@ -28,6 +28,8 @@ class AttributeExistsExp {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
return resultNoteSet;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -29,6 +29,8 @@ class FieldComparisonExp {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
return resultNoteSet;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -11,15 +11,64 @@ class NoteCacheFulltextExp {
|
|||
execute(noteSet, searchContext) {
|
||||
// has deps on SQL which breaks unit test so needs to be dynamically required
|
||||
const noteCacheService = require('../../note_cache/note_cache_service');
|
||||
|
||||
const resultNoteSet = new NoteSet();
|
||||
|
||||
function searchDownThePath(note, tokens, path) {
|
||||
if (tokens.length === 0) {
|
||||
const retPath = noteCacheService.getSomePath(note, path);
|
||||
|
||||
if (retPath) {
|
||||
const noteId = retPath[retPath.length - 1];
|
||||
searchContext.noteIdToNotePath[noteId] = retPath;
|
||||
|
||||
resultNoteSet.add(noteCache.notes[noteId]);
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
if (!note.parents.length === 0 || note.noteId === 'root') {
|
||||
return;
|
||||
}
|
||||
|
||||
const foundAttrTokens = [];
|
||||
|
||||
for (const attribute of note.ownedAttributes) {
|
||||
for (const token of tokens) {
|
||||
if (attribute.name.toLowerCase().includes(token)
|
||||
|| attribute.value.toLowerCase().includes(token)) {
|
||||
foundAttrTokens.push(token);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (const parentNote of note.parents) {
|
||||
const title = noteCacheService.getNoteTitle(note.noteId, parentNote.noteId).toLowerCase();
|
||||
const foundTokens = foundAttrTokens.slice();
|
||||
|
||||
for (const token of tokens) {
|
||||
if (title.includes(token)) {
|
||||
foundTokens.push(token);
|
||||
}
|
||||
}
|
||||
|
||||
if (foundTokens.length > 0) {
|
||||
const remainingTokens = tokens.filter(token => !foundTokens.includes(token));
|
||||
|
||||
searchDownThePath(parentNote, remainingTokens, path.concat([note.noteId]));
|
||||
}
|
||||
else {
|
||||
searchDownThePath(parentNote, tokens, path.concat([note.noteId]));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const candidateNotes = this.getCandidateNotes(noteSet);
|
||||
|
||||
for (const note of candidateNotes) {
|
||||
// autocomplete should be able to find notes by their noteIds as well (only leafs)
|
||||
if (this.tokens.length === 1 && note.noteId === this.tokens[0]) {
|
||||
this.searchDownThePath(note, [], [], resultNoteSet, searchContext);
|
||||
searchDownThePath(note, [], []);
|
||||
continue;
|
||||
}
|
||||
|
||||
|
@ -52,7 +101,7 @@ class NoteCacheFulltextExp {
|
|||
if (foundTokens.length > 0) {
|
||||
const remainingTokens = this.tokens.filter(token => !foundTokens.includes(token));
|
||||
|
||||
this.searchDownThePath(parentNote, remainingTokens, [note.noteId], resultNoteSet, searchContext);
|
||||
searchDownThePath(parentNote, remainingTokens, [note.noteId]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -80,56 +129,6 @@ class NoteCacheFulltextExp {
|
|||
|
||||
return candidateNotes;
|
||||
}
|
||||
|
||||
searchDownThePath(note, tokens, path, resultNoteSet, searchContext) {
|
||||
if (tokens.length === 0) {
|
||||
const retPath = noteCacheService.getSomePath(note, path);
|
||||
|
||||
if (retPath) {
|
||||
const noteId = retPath[retPath.length - 1];
|
||||
searchContext.noteIdToNotePath[noteId] = retPath;
|
||||
|
||||
resultNoteSet.add(noteCache.notes[noteId]);
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
if (!note.parents.length === 0 || note.noteId === 'root') {
|
||||
return;
|
||||
}
|
||||
|
||||
const foundAttrTokens = [];
|
||||
|
||||
for (const attribute of note.ownedAttributes) {
|
||||
for (const token of tokens) {
|
||||
if (attribute.name.toLowerCase().includes(token)
|
||||
|| attribute.value.toLowerCase().includes(token)) {
|
||||
foundAttrTokens.push(token);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (const parentNote of note.parents) {
|
||||
const title = noteCacheService.getNoteTitle(note.noteId, parentNote.noteId).toLowerCase();
|
||||
const foundTokens = foundAttrTokens.slice();
|
||||
|
||||
for (const token of tokens) {
|
||||
if (title.includes(token)) {
|
||||
foundTokens.push(token);
|
||||
}
|
||||
}
|
||||
|
||||
if (foundTokens.length > 0) {
|
||||
const remainingTokens = tokens.filter(token => !foundTokens.includes(token));
|
||||
|
||||
this.searchDownThePath(parentNote, remainingTokens, path.concat([note.noteId]), resultNoteSet, searchContext);
|
||||
}
|
||||
else {
|
||||
this.searchDownThePath(parentNote, tokens, path.concat([note.noteId]), resultNoteSet, searchContext);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = NoteCacheFulltextExp;
|
||||
|
|
|
@ -20,15 +20,13 @@ class NoteContentFulltextExp {
|
|||
JOIN note_contents ON notes.noteId = note_contents.noteId
|
||||
WHERE isDeleted = 0 AND isProtected = 0 AND ${wheres.join(' AND ')}`);
|
||||
|
||||
const results = [];
|
||||
|
||||
for (const noteId of noteIds) {
|
||||
if (noteSet.hasNoteId(noteId) && noteId in noteCache.notes) {
|
||||
resultNoteSet.add(noteCache.notes[noteId]);
|
||||
}
|
||||
}
|
||||
|
||||
return results;
|
||||
return resultNoteSet;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
*/
|
||||
function parens(tokens) {
|
||||
if (tokens.length === 0) {
|
||||
throw new Error("Empty expression.");
|
||||
return [];
|
||||
}
|
||||
|
||||
while (true) {
|
||||
|
|
|
@ -1,14 +1,16 @@
|
|||
"use strict";
|
||||
|
||||
const NoteCacheFulltextExp = require("./expressions/note_cache_fulltext");
|
||||
const lexer = require('./lexer');
|
||||
const parens = require('./parens');
|
||||
const parser = require('./parser');
|
||||
const NoteSet = require("./note_set");
|
||||
const SearchResult = require("./search_result");
|
||||
const noteCache = require('../note_cache/note_cache');
|
||||
const noteCacheService = require('../note_cache/note_cache_service');
|
||||
const hoistedNoteService = require('../hoisted_note');
|
||||
const utils = require('../utils');
|
||||
|
||||
async function findNotesWithExpression(expression) {
|
||||
|
||||
const hoistedNote = noteCache.notes[hoistedNoteService.getHoistedNoteId()];
|
||||
const allNotes = (hoistedNote && hoistedNote.noteId !== 'root')
|
||||
? hoistedNote.subtreeNotes
|
||||
|
@ -23,7 +25,7 @@ async function findNotesWithExpression(expression) {
|
|||
const noteSet = await expression.execute(allNoteSet, searchContext);
|
||||
|
||||
let searchResults = noteSet.notes
|
||||
.map(note => searchContext.noteIdToNotePath[note.noteId] || getSomePath(note))
|
||||
.map(note => searchContext.noteIdToNotePath[note.noteId] || noteCacheService.getSomePath(note))
|
||||
.filter(notePathArray => notePathArray.includes(hoistedNoteService.getHoistedNoteId()))
|
||||
.map(notePathArray => new SearchResult(notePathArray));
|
||||
|
||||
|
@ -40,24 +42,30 @@ async function findNotesWithExpression(expression) {
|
|||
return searchResults;
|
||||
}
|
||||
|
||||
function parseQueryToExpression(query) {
|
||||
const {fulltextTokens, expressionTokens} = lexer(query);
|
||||
const structuredExpressionTokens = parens(expressionTokens);
|
||||
const expression = parser(fulltextTokens, structuredExpressionTokens, false);
|
||||
|
||||
return expression;
|
||||
}
|
||||
|
||||
async function searchNotesForAutocomplete(query) {
|
||||
if (!query.trim().length) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const tokens = query
|
||||
.trim() // necessary because even with .split() trailing spaces are tokens which causes havoc
|
||||
.toLowerCase()
|
||||
.split(/[ -]/)
|
||||
.filter(token => token !== '/'); // '/' is used as separator
|
||||
const expression = parseQueryToExpression(query);
|
||||
|
||||
const expression = new NoteCacheFulltextExp(tokens);
|
||||
if (!expression) {
|
||||
return [];
|
||||
}
|
||||
|
||||
let searchResults = await findNotesWithExpression(expression);
|
||||
|
||||
searchResults = searchResults.slice(0, 200);
|
||||
|
||||
highlightSearchResults(searchResults, tokens);
|
||||
highlightSearchResults(searchResults, query);
|
||||
|
||||
return searchResults.map(result => {
|
||||
return {
|
||||
|
@ -68,7 +76,13 @@ async function searchNotesForAutocomplete(query) {
|
|||
});
|
||||
}
|
||||
|
||||
function highlightSearchResults(searchResults, tokens) {
|
||||
function highlightSearchResults(searchResults, query) {
|
||||
let tokens = query
|
||||
.trim() // necessary because even with .split() trailing spaces are tokens which causes havoc
|
||||
.toLowerCase()
|
||||
.split(/[ -]/)
|
||||
.filter(token => token !== '/');
|
||||
|
||||
// we remove < signs because they can cause trouble in matching and overwriting existing highlighted chunks
|
||||
// which would make the resulting HTML string invalid.
|
||||
// { and } are used for marking <b> and </b> tag (to avoid matches on single 'b' character)
|
||||
|
|
Loading…
Reference in a new issue