chore: implement referenced content node

This commit is contained in:
Steven 2024-01-23 21:40:59 +08:00
parent d7f02b94e5
commit a316e239ce
7 changed files with 132 additions and 3 deletions

View file

@ -33,6 +33,7 @@ const (
HighlightNode
SubscriptNode
SuperscriptNode
ReferencedContentNode
)
type Node interface {

View file

@ -241,9 +241,10 @@ func (*EmbeddedContent) Type() NodeType {
}
func (n *EmbeddedContent) Restore() string {
result := fmt.Sprintf("![[%s]]", n.ResourceName)
params := ""
if n.Params != "" {
result += fmt.Sprintf("?%s", n.Params)
params = fmt.Sprintf("?%s", n.Params)
}
result := fmt.Sprintf("![[%s%s]]", n.ResourceName, params)
return result
}

View file

@ -233,3 +233,23 @@ func (*Superscript) Type() NodeType {
func (n *Superscript) Restore() string {
return fmt.Sprintf("^%s^", n.Content)
}
type ReferencedContent struct {
BaseInline
ResourceName string
Params string
}
func (*ReferencedContent) Type() NodeType {
return ReferencedContentNode
}
func (n *ReferencedContent) Restore() string {
params := ""
if n.Params != "" {
params = fmt.Sprintf("?%s", n.Params)
}
result := fmt.Sprintf("[[%s%s]]", n.ResourceName, params)
return result
}

View file

@ -13,7 +13,7 @@ func NewEmbeddedContentParser() *EmbeddedContentParser {
func (*EmbeddedContentParser) Match(tokens []*tokenizer.Token) (ast.Node, int) {
matchedTokens := tokenizer.GetFirstLine(tokens)
if len(matchedTokens) < 5 {
if len(matchedTokens) < 6 {
return nil, 0
}
if matchedTokens[0].Type != tokenizer.ExclamationMark || matchedTokens[1].Type != tokenizer.LeftSquareBracket || matchedTokens[2].Type != tokenizer.LeftSquareBracket {

View file

@ -80,6 +80,7 @@ var defaultInlineParsers = []InlineParser{
NewSubscriptParser(),
NewSuperscriptParser(),
NewMathParser(),
NewReferencedContentParser(),
NewTagParser(),
NewStrikethroughParser(),
NewLineBreakParser(),

View file

@ -0,0 +1,45 @@
package parser
import (
"github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer"
)
type ReferencedContentParser struct{}
func NewReferencedContentParser() *ReferencedContentParser {
return &ReferencedContentParser{}
}
func (*ReferencedContentParser) Match(tokens []*tokenizer.Token) (ast.Node, int) {
matchedTokens := tokenizer.GetFirstLine(tokens)
if len(matchedTokens) < 5 {
return nil, 0
}
if matchedTokens[0].Type != tokenizer.LeftSquareBracket || matchedTokens[1].Type != tokenizer.LeftSquareBracket {
return nil, 0
}
contentTokens := []*tokenizer.Token{}
matched := false
for index, token := range matchedTokens[2 : len(matchedTokens)-1] {
if token.Type == tokenizer.RightSquareBracket && matchedTokens[2+index+1].Type == tokenizer.RightSquareBracket {
matched = true
break
}
contentTokens = append(contentTokens, token)
}
if !matched {
return nil, 0
}
resourceName, params := tokenizer.Stringify(contentTokens), ""
questionMarkIndex := tokenizer.FindUnescaped(contentTokens, tokenizer.QuestionMark)
if questionMarkIndex > 0 {
resourceName, params = tokenizer.Stringify(contentTokens[:questionMarkIndex]), tokenizer.Stringify(contentTokens[questionMarkIndex+1:])
}
return &ast.ReferencedContent{
ResourceName: resourceName,
Params: params,
}, len(contentTokens) + 4
}

View file

@ -0,0 +1,61 @@
package parser
import (
"testing"
"github.com/stretchr/testify/require"
"github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer"
"github.com/usememos/memos/plugin/gomark/restore"
)
func TestReferencedContentParser(t *testing.T) {
tests := []struct {
text string
referencedContent ast.Node
}{
{
text: "[[Hello world]",
referencedContent: nil,
},
{
text: "[[Hello world]]",
referencedContent: &ast.ReferencedContent{
ResourceName: "Hello world",
},
},
{
text: "[[memos/1]]",
referencedContent: &ast.ReferencedContent{
ResourceName: "memos/1",
},
},
{
text: "[[resources/101]]111\n123",
referencedContent: &ast.ReferencedContent{
ResourceName: "resources/101",
},
},
{
text: "[[resources/101?align=center]]",
referencedContent: &ast.ReferencedContent{
ResourceName: "resources/101",
Params: "align=center",
},
},
{
text: "[[resources/6uxnhT98q8vN8anBbUbRGu?align=center]]",
referencedContent: &ast.ReferencedContent{
ResourceName: "resources/6uxnhT98q8vN8anBbUbRGu",
Params: "align=center",
},
},
}
for _, test := range tests {
tokens := tokenizer.Tokenize(test.text)
node, _ := NewReferencedContentParser().Match(tokens)
require.Equal(t, restore.Restore([]ast.Node{test.referencedContent}), restore.Restore([]ast.Node{node}))
}
}