feat: implement highlight parser

This commit is contained in:
Steven 2024-01-15 22:30:06 +08:00
parent 46bd470640
commit 3f4b361fad
6 changed files with 118 additions and 0 deletions

View file

@ -28,6 +28,7 @@ const (
StrikethroughNode
EscapingCharacterNode
MathNode
HighlightNode
)
type Node interface {

View file

@ -191,3 +191,17 @@ func (*Math) Type() NodeType {
func (n *Math) Restore() string {
return fmt.Sprintf("$%s$", n.Content)
}
type Highlight struct {
BaseInline
Content string
}
func (*Highlight) Type() NodeType {
return HighlightNode
}
func (n *Highlight) Restore() string {
return fmt.Sprintf("==%s==", n.Content)
}

View file

@ -0,0 +1,58 @@
package parser
import (
"errors"
"github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer"
)
type HighlightParser struct{}
func NewHighlightParser() InlineParser {
return &HighlightParser{}
}
func (*HighlightParser) Match(tokens []*tokenizer.Token) (int, bool) {
if len(tokens) < 5 {
return 0, false
}
prefixTokens := tokens[:2]
if prefixTokens[0].Type != prefixTokens[1].Type {
return 0, false
}
prefixTokenType := prefixTokens[0].Type
if prefixTokenType != tokenizer.EqualSign {
return 0, false
}
cursor, matched := 2, false
for ; cursor < len(tokens)-1; cursor++ {
token, nextToken := tokens[cursor], tokens[cursor+1]
if token.Type == tokenizer.Newline || nextToken.Type == tokenizer.Newline {
return 0, false
}
if token.Type == prefixTokenType && nextToken.Type == prefixTokenType {
matched = true
break
}
}
if !matched {
return 0, false
}
return cursor + 2, true
}
func (p *HighlightParser) Parse(tokens []*tokenizer.Token) (ast.Node, error) {
size, ok := p.Match(tokens)
if size == 0 || !ok {
return nil, errors.New("not matched")
}
contentTokens := tokens[2 : size-2]
return &ast.Highlight{
Content: tokenizer.Stringify(contentTokens),
}, nil
}

View file

@ -0,0 +1,41 @@
package parser
import (
"testing"
"github.com/stretchr/testify/require"
"github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer"
"github.com/usememos/memos/plugin/gomark/restore"
)
func TestHighlightParser(t *testing.T) {
tests := []struct {
text string
bold ast.Node
}{
{
text: "==Hello world!",
bold: nil,
},
{
text: "==Hello==",
bold: &ast.Highlight{
Content: "Hello",
},
},
{
text: "==Hello world==",
bold: &ast.Highlight{
Content: "Hello world",
},
},
}
for _, test := range tests {
tokens := tokenizer.Tokenize(test.text)
node, _ := NewHighlightParser().Parse(tokens)
require.Equal(t, restore.Restore([]ast.Node{test.bold}), restore.Restore([]ast.Node{node}))
}
}

View file

@ -80,6 +80,7 @@ var defaultInlineParsers = []InlineParser{
NewAutoLinkParser(),
NewBoldParser(),
NewItalicParser(),
NewHighlightParser(),
NewCodeParser(),
NewMathParser(),
NewTagParser(),

View file

@ -19,6 +19,7 @@ const (
LessThan TokenType = "<"
GreaterThan TokenType = ">"
DollarSign TokenType = "$"
EqualSign TokenType = "="
Backslash TokenType = "\\"
Newline TokenType = "\n"
Space TokenType = " "
@ -77,6 +78,8 @@ func Tokenize(text string) []*Token {
tokens = append(tokens, NewToken(Dot, "."))
case '$':
tokens = append(tokens, NewToken(DollarSign, "$"))
case '=':
tokens = append(tokens, NewToken(EqualSign, "="))
case '\\':
tokens = append(tokens, NewToken(Backslash, `\`))
case '\n':