apply new query parsing to note autocomplete

This commit is contained in:
zadam 2020-05-21 00:39:17 +02:00
parent b26100479d
commit 32dde426fd
7 changed files with 84 additions and 69 deletions

View file

@ -19,7 +19,7 @@ class Attribute {
this.noteCache.notes[this.noteId].ownedAttributes.push(this); this.noteCache.notes[this.noteId].ownedAttributes.push(this);
const key = `${this.type-this.name}`; const key = `${this.type}-${this.name}`;
this.noteCache.attributeIndex[key] = this.noteCache.attributeIndex[key] || []; this.noteCache.attributeIndex[key] = this.noteCache.attributeIndex[key] || [];
this.noteCache.attributeIndex[key].push(this); this.noteCache.attributeIndex[key].push(this);

View file

@ -28,6 +28,8 @@ class AttributeExistsExp {
} }
} }
} }
return resultNoteSet;
} }
} }

View file

@ -29,6 +29,8 @@ class FieldComparisonExp {
} }
} }
} }
return resultNoteSet;
} }
} }

View file

@ -11,15 +11,64 @@ class NoteCacheFulltextExp {
execute(noteSet, searchContext) { execute(noteSet, searchContext) {
// has deps on SQL which breaks unit test so needs to be dynamically required // has deps on SQL which breaks unit test so needs to be dynamically required
const noteCacheService = require('../../note_cache/note_cache_service'); const noteCacheService = require('../../note_cache/note_cache_service');
const resultNoteSet = new NoteSet(); const resultNoteSet = new NoteSet();
function searchDownThePath(note, tokens, path) {
if (tokens.length === 0) {
const retPath = noteCacheService.getSomePath(note, path);
if (retPath) {
const noteId = retPath[retPath.length - 1];
searchContext.noteIdToNotePath[noteId] = retPath;
resultNoteSet.add(noteCache.notes[noteId]);
}
return;
}
if (!note.parents.length === 0 || note.noteId === 'root') {
return;
}
const foundAttrTokens = [];
for (const attribute of note.ownedAttributes) {
for (const token of tokens) {
if (attribute.name.toLowerCase().includes(token)
|| attribute.value.toLowerCase().includes(token)) {
foundAttrTokens.push(token);
}
}
}
for (const parentNote of note.parents) {
const title = noteCacheService.getNoteTitle(note.noteId, parentNote.noteId).toLowerCase();
const foundTokens = foundAttrTokens.slice();
for (const token of tokens) {
if (title.includes(token)) {
foundTokens.push(token);
}
}
if (foundTokens.length > 0) {
const remainingTokens = tokens.filter(token => !foundTokens.includes(token));
searchDownThePath(parentNote, remainingTokens, path.concat([note.noteId]));
}
else {
searchDownThePath(parentNote, tokens, path.concat([note.noteId]));
}
}
}
const candidateNotes = this.getCandidateNotes(noteSet); const candidateNotes = this.getCandidateNotes(noteSet);
for (const note of candidateNotes) { for (const note of candidateNotes) {
// autocomplete should be able to find notes by their noteIds as well (only leafs) // autocomplete should be able to find notes by their noteIds as well (only leafs)
if (this.tokens.length === 1 && note.noteId === this.tokens[0]) { if (this.tokens.length === 1 && note.noteId === this.tokens[0]) {
this.searchDownThePath(note, [], [], resultNoteSet, searchContext); searchDownThePath(note, [], []);
continue; continue;
} }
@ -52,7 +101,7 @@ class NoteCacheFulltextExp {
if (foundTokens.length > 0) { if (foundTokens.length > 0) {
const remainingTokens = this.tokens.filter(token => !foundTokens.includes(token)); const remainingTokens = this.tokens.filter(token => !foundTokens.includes(token));
this.searchDownThePath(parentNote, remainingTokens, [note.noteId], resultNoteSet, searchContext); searchDownThePath(parentNote, remainingTokens, [note.noteId]);
} }
} }
} }
@ -80,56 +129,6 @@ class NoteCacheFulltextExp {
return candidateNotes; return candidateNotes;
} }
searchDownThePath(note, tokens, path, resultNoteSet, searchContext) {
if (tokens.length === 0) {
const retPath = noteCacheService.getSomePath(note, path);
if (retPath) {
const noteId = retPath[retPath.length - 1];
searchContext.noteIdToNotePath[noteId] = retPath;
resultNoteSet.add(noteCache.notes[noteId]);
}
return;
}
if (!note.parents.length === 0 || note.noteId === 'root') {
return;
}
const foundAttrTokens = [];
for (const attribute of note.ownedAttributes) {
for (const token of tokens) {
if (attribute.name.toLowerCase().includes(token)
|| attribute.value.toLowerCase().includes(token)) {
foundAttrTokens.push(token);
}
}
}
for (const parentNote of note.parents) {
const title = noteCacheService.getNoteTitle(note.noteId, parentNote.noteId).toLowerCase();
const foundTokens = foundAttrTokens.slice();
for (const token of tokens) {
if (title.includes(token)) {
foundTokens.push(token);
}
}
if (foundTokens.length > 0) {
const remainingTokens = tokens.filter(token => !foundTokens.includes(token));
this.searchDownThePath(parentNote, remainingTokens, path.concat([note.noteId]), resultNoteSet, searchContext);
}
else {
this.searchDownThePath(parentNote, tokens, path.concat([note.noteId]), resultNoteSet, searchContext);
}
}
}
} }
module.exports = NoteCacheFulltextExp; module.exports = NoteCacheFulltextExp;

View file

@ -20,15 +20,13 @@ class NoteContentFulltextExp {
JOIN note_contents ON notes.noteId = note_contents.noteId JOIN note_contents ON notes.noteId = note_contents.noteId
WHERE isDeleted = 0 AND isProtected = 0 AND ${wheres.join(' AND ')}`); WHERE isDeleted = 0 AND isProtected = 0 AND ${wheres.join(' AND ')}`);
const results = [];
for (const noteId of noteIds) { for (const noteId of noteIds) {
if (noteSet.hasNoteId(noteId) && noteId in noteCache.notes) { if (noteSet.hasNoteId(noteId) && noteId in noteCache.notes) {
resultNoteSet.add(noteCache.notes[noteId]); resultNoteSet.add(noteCache.notes[noteId]);
} }
} }
return results; return resultNoteSet;
} }
} }

View file

@ -3,7 +3,7 @@
*/ */
function parens(tokens) { function parens(tokens) {
if (tokens.length === 0) { if (tokens.length === 0) {
throw new Error("Empty expression."); return [];
} }
while (true) { while (true) {

View file

@ -1,14 +1,16 @@
"use strict"; "use strict";
const NoteCacheFulltextExp = require("./expressions/note_cache_fulltext"); const lexer = require('./lexer');
const parens = require('./parens');
const parser = require('./parser');
const NoteSet = require("./note_set"); const NoteSet = require("./note_set");
const SearchResult = require("./search_result"); const SearchResult = require("./search_result");
const noteCache = require('../note_cache/note_cache'); const noteCache = require('../note_cache/note_cache');
const noteCacheService = require('../note_cache/note_cache_service');
const hoistedNoteService = require('../hoisted_note'); const hoistedNoteService = require('../hoisted_note');
const utils = require('../utils'); const utils = require('../utils');
async function findNotesWithExpression(expression) { async function findNotesWithExpression(expression) {
const hoistedNote = noteCache.notes[hoistedNoteService.getHoistedNoteId()]; const hoistedNote = noteCache.notes[hoistedNoteService.getHoistedNoteId()];
const allNotes = (hoistedNote && hoistedNote.noteId !== 'root') const allNotes = (hoistedNote && hoistedNote.noteId !== 'root')
? hoistedNote.subtreeNotes ? hoistedNote.subtreeNotes
@ -23,7 +25,7 @@ async function findNotesWithExpression(expression) {
const noteSet = await expression.execute(allNoteSet, searchContext); const noteSet = await expression.execute(allNoteSet, searchContext);
let searchResults = noteSet.notes let searchResults = noteSet.notes
.map(note => searchContext.noteIdToNotePath[note.noteId] || getSomePath(note)) .map(note => searchContext.noteIdToNotePath[note.noteId] || noteCacheService.getSomePath(note))
.filter(notePathArray => notePathArray.includes(hoistedNoteService.getHoistedNoteId())) .filter(notePathArray => notePathArray.includes(hoistedNoteService.getHoistedNoteId()))
.map(notePathArray => new SearchResult(notePathArray)); .map(notePathArray => new SearchResult(notePathArray));
@ -40,24 +42,30 @@ async function findNotesWithExpression(expression) {
return searchResults; return searchResults;
} }
function parseQueryToExpression(query) {
const {fulltextTokens, expressionTokens} = lexer(query);
const structuredExpressionTokens = parens(expressionTokens);
const expression = parser(fulltextTokens, structuredExpressionTokens, false);
return expression;
}
async function searchNotesForAutocomplete(query) { async function searchNotesForAutocomplete(query) {
if (!query.trim().length) { if (!query.trim().length) {
return []; return [];
} }
const tokens = query const expression = parseQueryToExpression(query);
.trim() // necessary because even with .split() trailing spaces are tokens which causes havoc
.toLowerCase()
.split(/[ -]/)
.filter(token => token !== '/'); // '/' is used as separator
const expression = new NoteCacheFulltextExp(tokens); if (!expression) {
return [];
}
let searchResults = await findNotesWithExpression(expression); let searchResults = await findNotesWithExpression(expression);
searchResults = searchResults.slice(0, 200); searchResults = searchResults.slice(0, 200);
highlightSearchResults(searchResults, tokens); highlightSearchResults(searchResults, query);
return searchResults.map(result => { return searchResults.map(result => {
return { return {
@ -68,7 +76,13 @@ async function searchNotesForAutocomplete(query) {
}); });
} }
function highlightSearchResults(searchResults, tokens) { function highlightSearchResults(searchResults, query) {
let tokens = query
.trim() // necessary because even with .split() trailing spaces are tokens which causes havoc
.toLowerCase()
.split(/[ -]/)
.filter(token => token !== '/');
// we remove < signs because they can cause trouble in matching and overwriting existing highlighted chunks // we remove < signs because they can cause trouble in matching and overwriting existing highlighted chunks
// which would make the resulting HTML string invalid. // which would make the resulting HTML string invalid.
// { and } are used for marking <b> and </b> tag (to avoid matches on single 'b' character) // { and } are used for marking <b> and </b> tag (to avoid matches on single 'b' character)