chore: implement part of html renderer

This commit is contained in:
Steven 2023-12-13 23:50:05 +08:00
parent 453707d18c
commit 43ef9eaced
35 changed files with 449 additions and 91 deletions

View file

@ -1,7 +1,9 @@
package ast
import "fmt"
type BaseBlock struct {
Node
BaseNode
}
type LineBreak struct {
@ -14,6 +16,10 @@ func (*LineBreak) Type() NodeType {
return NodeTypeLineBreak
}
func (n *LineBreak) String() string {
return n.Type().String()
}
type Paragraph struct {
BaseBlock
@ -26,6 +32,14 @@ func (*Paragraph) Type() NodeType {
return NodeTypeParagraph
}
func (n *Paragraph) String() string {
str := n.Type().String()
for _, child := range n.Children {
str += " " + child.String()
}
return str
}
type CodeBlock struct {
BaseBlock
@ -39,6 +53,10 @@ func (*CodeBlock) Type() NodeType {
return NodeTypeCodeBlock
}
func (n *CodeBlock) String() string {
return n.Type().String() + " " + n.Language + " " + n.Content
}
type Heading struct {
BaseBlock
@ -52,6 +70,14 @@ func (*Heading) Type() NodeType {
return NodeTypeHeading
}
func (n *Heading) String() string {
str := n.Type().String() + " " + fmt.Sprintf("%d", n.Level)
for _, child := range n.Children {
str += " " + child.String()
}
return str
}
type HorizontalRule struct {
BaseBlock
@ -65,6 +91,10 @@ func (*HorizontalRule) Type() NodeType {
return NodeTypeHorizontalRule
}
func (n *HorizontalRule) String() string {
return n.Type().String()
}
type Blockquote struct {
BaseBlock
@ -76,3 +106,11 @@ var NodeTypeBlockquote = NewNodeType("Blockquote")
func (*Blockquote) Type() NodeType {
return NodeTypeBlockquote
}
func (n *Blockquote) String() string {
str := n.Type().String()
for _, child := range n.Children {
str += " " + child.String()
}
return str
}

View file

@ -1,7 +1,7 @@
package ast
type BaseInline struct {
Node
BaseNode
}
type Text struct {
@ -16,10 +16,14 @@ func (*Text) Type() NodeType {
return NodeTypeText
}
func (n *Text) String() string {
return n.Type().String() + " " + n.Content
}
type Bold struct {
BaseInline
// Symbol is "*" or "_"
// Symbol is "*" or "_".
Symbol string
Content string
}
@ -30,10 +34,14 @@ func (*Bold) Type() NodeType {
return NodeTypeBold
}
func (n *Bold) String() string {
return n.Type().String() + " " + n.Symbol + " " + n.Content
}
type Italic struct {
BaseInline
// Symbol is "*" or "_"
// Symbol is "*" or "_".
Symbol string
Content string
}
@ -44,10 +52,14 @@ func (*Italic) Type() NodeType {
return NodeTypeItalic
}
func (n *Italic) String() string {
return n.Type().String() + " " + n.Symbol + " " + n.Content
}
type BoldItalic struct {
BaseInline
// Symbol is "*" or "_"
// Symbol is "*" or "_".
Symbol string
Content string
}
@ -58,6 +70,10 @@ func (*BoldItalic) Type() NodeType {
return NodeTypeBoldItalic
}
func (n *BoldItalic) String() string {
return n.Type().String() + " " + n.Symbol + " " + n.Content
}
type Code struct {
BaseInline
@ -70,6 +86,10 @@ func (*Code) Type() NodeType {
return NodeTypeCode
}
func (n *Code) String() string {
return n.Type().String() + " " + n.Content
}
type Image struct {
BaseInline
@ -83,6 +103,10 @@ func (*Image) Type() NodeType {
return NodeTypeImage
}
func (n *Image) String() string {
return n.Type().String() + " " + n.AltText + " " + n.URL
}
type Link struct {
BaseInline
@ -96,6 +120,10 @@ func (*Link) Type() NodeType {
return NodeTypeLink
}
func (n *Link) String() string {
return n.Type().String() + " " + n.Text + " " + n.URL
}
type Tag struct {
BaseInline
@ -108,6 +136,10 @@ func (*Tag) Type() NodeType {
return NodeTypeTag
}
func (n *Tag) String() string {
return n.Type().String() + " " + n.Content
}
type Strikethrough struct {
BaseInline
@ -119,3 +151,7 @@ var NodeTypeStrikethrough = NewNodeType("Strikethrough")
func (*Strikethrough) Type() NodeType {
return NodeTypeStrikethrough
}
func (n *Strikethrough) String() string {
return n.Type().String() + " " + n.Content
}

View file

@ -1,7 +1,36 @@
package ast
type Node interface {
// Type returns a node type.
Type() NodeType
// String returns a string representation of this node.
// This method is used for debugging.
String() string
// GetParent returns a parent node of this node.
GetParent() Node
// GetPrevSibling returns a previous sibling node of this node.
GetPrevSibling() Node
// GetNextSibling returns a next sibling node of this node.
GetNextSibling() Node
// GetChildren returns children nodes of this node.
GetChildren() []Node
// SetParent sets a parent node to this node.
SetParent(Node)
// SetPrevSibling sets a previous sibling node to this node.
SetPrevSibling(Node)
// SetNextSibling sets a next sibling node to this node.
SetNextSibling(Node)
// SetChildren sets children nodes to this node.
SetChildren([]Node)
}
type NodeType int
@ -18,3 +47,45 @@ func NewNodeType(name string) NodeType {
nodeTypeIndex++
return nodeTypeIndex
}
type BaseNode struct {
parent Node
prevSibling Node
nextSibling Node
children []Node
}
func (n *BaseNode) GetParent() Node {
return n.parent
}
func (n *BaseNode) GetPrevSibling() Node {
return n.prevSibling
}
func (n *BaseNode) GetNextSibling() Node {
return n.nextSibling
}
func (n *BaseNode) GetChildren() []Node {
return n.children
}
func (n *BaseNode) SetParent(node Node) {
n.parent = node
}
func (n *BaseNode) SetPrevSibling(node Node) {
n.prevSibling = node
}
func (n *BaseNode) SetNextSibling(node Node) {
n.nextSibling = node
}
func (n *BaseNode) SetChildren(nodes []Node) {
n.children = nodes
}

View file

@ -1,6 +1,8 @@
package parser
import (
"errors"
"github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer"
)
@ -21,10 +23,10 @@ func (*BlockquoteParser) Match(tokens []*tokenizer.Token) (int, bool) {
contentTokens := []*tokenizer.Token{}
for _, token := range tokens[2:] {
contentTokens = append(contentTokens, token)
if token.Type == tokenizer.Newline {
break
}
contentTokens = append(contentTokens, token)
}
if len(contentTokens) == 0 {
return 0, false
@ -33,15 +35,18 @@ func (*BlockquoteParser) Match(tokens []*tokenizer.Token) (int, bool) {
return len(contentTokens) + 2, true
}
func (p *BlockquoteParser) Parse(tokens []*tokenizer.Token) ast.Node {
func (p *BlockquoteParser) Parse(tokens []*tokenizer.Token) (ast.Node, error) {
size, ok := p.Match(tokens)
if size == 0 || !ok {
return nil
return nil, errors.New("not matched")
}
contentTokens := tokens[2:size]
children := ParseInline(contentTokens)
return &ast.Blockquote{
Children: children,
blockquote := &ast.Blockquote{}
children, err := ParseInline(blockquote, contentTokens)
if err != nil {
return nil, err
}
blockquote.Children = children
return blockquote, nil
}

View file

@ -31,6 +31,7 @@ func TestBlockquoteParser(t *testing.T) {
&ast.Text{
Content: "Hello",
},
&ast.LineBreak{},
},
},
},
@ -42,6 +43,7 @@ func TestBlockquoteParser(t *testing.T) {
for _, test := range tests {
tokens := tokenizer.Tokenize(test.text)
require.Equal(t, test.blockquote, NewBlockquoteParser().Parse(tokens))
node, _ := NewBlockquoteParser().Parse(tokens)
require.Equal(t, StringifyNodes([]ast.Node{test.blockquote}), StringifyNodes([]ast.Node{node}))
}
}

View file

@ -1,6 +1,8 @@
package parser
import (
"errors"
"github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer"
)
@ -43,10 +45,10 @@ func (*BoldParser) Match(tokens []*tokenizer.Token) (int, bool) {
return cursor + 2, true
}
func (p *BoldParser) Parse(tokens []*tokenizer.Token) ast.Node {
func (p *BoldParser) Parse(tokens []*tokenizer.Token) (ast.Node, error) {
size, ok := p.Match(tokens)
if size == 0 || !ok {
return nil
return nil, errors.New("not matched")
}
prefixTokenType := tokens[0].Type
@ -54,5 +56,5 @@ func (p *BoldParser) Parse(tokens []*tokenizer.Token) ast.Node {
return &ast.Bold{
Symbol: prefixTokenType,
Content: tokenizer.Stringify(contentTokens),
}
}, nil
}

View file

@ -1,6 +1,8 @@
package parser
import (
"errors"
"github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer"
)
@ -43,10 +45,10 @@ func (*BoldItalicParser) Match(tokens []*tokenizer.Token) (int, bool) {
return cursor + 3, true
}
func (p *BoldItalicParser) Parse(tokens []*tokenizer.Token) ast.Node {
func (p *BoldItalicParser) Parse(tokens []*tokenizer.Token) (ast.Node, error) {
size, ok := p.Match(tokens)
if size == 0 || !ok {
return nil
return nil, errors.New("not matched")
}
prefixTokenType := tokens[0].Type
@ -54,5 +56,5 @@ func (p *BoldItalicParser) Parse(tokens []*tokenizer.Token) ast.Node {
return &ast.BoldItalic{
Symbol: prefixTokenType,
Content: tokenizer.Stringify(contentTokens),
}
}, nil
}

View file

@ -44,6 +44,7 @@ func TestBoldItalicParser(t *testing.T) {
for _, test := range tests {
tokens := tokenizer.Tokenize(test.text)
require.Equal(t, test.boldItalic, NewBoldItalicParser().Parse(tokens))
node, _ := NewBoldItalicParser().Parse(tokens)
require.Equal(t, StringifyNodes([]ast.Node{test.boldItalic}), StringifyNodes([]ast.Node{node}))
}
}

View file

@ -44,6 +44,7 @@ func TestBoldParser(t *testing.T) {
for _, test := range tests {
tokens := tokenizer.Tokenize(test.text)
require.Equal(t, test.bold, NewBoldParser().Parse(tokens))
node, _ := NewBoldParser().Parse(tokens)
require.Equal(t, StringifyNodes([]ast.Node{test.bold}), StringifyNodes([]ast.Node{node}))
}
}

View file

@ -1,6 +1,8 @@
package parser
import (
"errors"
"github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer"
)
@ -36,14 +38,14 @@ func (*CodeParser) Match(tokens []*tokenizer.Token) (int, bool) {
return len(contentTokens) + 2, true
}
func (p *CodeParser) Parse(tokens []*tokenizer.Token) ast.Node {
func (p *CodeParser) Parse(tokens []*tokenizer.Token) (ast.Node, error) {
size, ok := p.Match(tokens)
if size == 0 || !ok {
return nil
return nil, errors.New("not matched")
}
contentTokens := tokens[1 : size-1]
return &ast.Code{
Content: tokenizer.Stringify(contentTokens),
}
}, nil
}

View file

@ -1,6 +1,8 @@
package parser
import (
"errors"
"github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer"
)
@ -51,10 +53,10 @@ func (*CodeBlockParser) Match(tokens []*tokenizer.Token) (int, bool) {
return cursor, true
}
func (p *CodeBlockParser) Parse(tokens []*tokenizer.Token) ast.Node {
func (p *CodeBlockParser) Parse(tokens []*tokenizer.Token) (ast.Node, error) {
size, ok := p.Match(tokens)
if size == 0 || !ok {
return nil
return nil, errors.New("not matched")
}
languageToken := tokens[3]
@ -73,5 +75,5 @@ func (p *CodeBlockParser) Parse(tokens []*tokenizer.Token) ast.Node {
if languageToken != nil {
codeBlock.Language = languageToken.String()
}
return codeBlock
return codeBlock, nil
}

View file

@ -58,6 +58,7 @@ func TestCodeBlockParser(t *testing.T) {
for _, test := range tests {
tokens := tokenizer.Tokenize(test.text)
require.Equal(t, test.codeBlock, NewCodeBlockParser().Parse(tokens))
node, _ := NewCodeBlockParser().Parse(tokens)
require.Equal(t, StringifyNodes([]ast.Node{test.codeBlock}), StringifyNodes([]ast.Node{node}))
}
}

View file

@ -32,6 +32,7 @@ func TestCodeParser(t *testing.T) {
for _, test := range tests {
tokens := tokenizer.Tokenize(test.text)
require.Equal(t, test.code, NewCodeParser().Parse(tokens))
node, _ := NewCodeParser().Parse(tokens)
require.Equal(t, StringifyNodes([]ast.Node{test.code}), StringifyNodes([]ast.Node{node}))
}
}

View file

@ -1,6 +1,8 @@
package parser
import (
"errors"
"github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer"
)
@ -47,10 +49,10 @@ func (*HeadingParser) Match(tokens []*tokenizer.Token) (int, bool) {
return cursor, true
}
func (p *HeadingParser) Parse(tokens []*tokenizer.Token) ast.Node {
func (p *HeadingParser) Parse(tokens []*tokenizer.Token) (ast.Node, error) {
size, ok := p.Match(tokens)
if size == 0 || !ok {
return nil
return nil, errors.New("not matched")
}
level := 0
@ -61,10 +63,15 @@ func (p *HeadingParser) Parse(tokens []*tokenizer.Token) ast.Node {
break
}
}
contentTokens := tokens[level+1 : size]
children := ParseInline(contentTokens)
return &ast.Heading{
heading := &ast.Heading{
Level: level,
Children: children,
}
children, err := ParseInline(heading, contentTokens)
if err != nil {
return nil, err
}
heading.Children = children
return heading, nil
}

View file

@ -75,6 +75,7 @@ Hello World`,
for _, test := range tests {
tokens := tokenizer.Tokenize(test.text)
require.Equal(t, test.heading, NewHeadingParser().Parse(tokens))
node, _ := NewHeadingParser().Parse(tokens)
require.Equal(t, StringifyNodes([]ast.Node{test.heading}), StringifyNodes([]ast.Node{node}))
}
}

View file

@ -1,6 +1,8 @@
package parser
import (
"errors"
"github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer"
)
@ -27,13 +29,13 @@ func (*HorizontalRuleParser) Match(tokens []*tokenizer.Token) (int, bool) {
return 3, true
}
func (p *HorizontalRuleParser) Parse(tokens []*tokenizer.Token) ast.Node {
func (p *HorizontalRuleParser) Parse(tokens []*tokenizer.Token) (ast.Node, error) {
size, ok := p.Match(tokens)
if size == 0 || !ok {
return nil
return nil, errors.New("not matched")
}
return &ast.HorizontalRule{
Symbol: tokens[0].Type,
}
}, nil
}

View file

@ -44,6 +44,7 @@ func TestHorizontalRuleParser(t *testing.T) {
for _, test := range tests {
tokens := tokenizer.Tokenize(test.text)
require.Equal(t, test.horizontalRule, NewHorizontalRuleParser().Parse(tokens))
node, _ := NewHorizontalRuleParser().Parse(tokens)
require.Equal(t, StringifyNodes([]ast.Node{test.horizontalRule}), StringifyNodes([]ast.Node{node}))
}
}

View file

@ -1,6 +1,8 @@
package parser
import (
"errors"
"github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer"
)
@ -52,10 +54,10 @@ func (*ImageParser) Match(tokens []*tokenizer.Token) (int, bool) {
return cursor + len(contentTokens) + 1, true
}
func (p *ImageParser) Parse(tokens []*tokenizer.Token) ast.Node {
func (p *ImageParser) Parse(tokens []*tokenizer.Token) (ast.Node, error) {
size, ok := p.Match(tokens)
if size == 0 || !ok {
return nil
return nil, errors.New("not matched")
}
altTextTokens := []*tokenizer.Token{}
@ -69,5 +71,5 @@ func (p *ImageParser) Parse(tokens []*tokenizer.Token) ast.Node {
return &ast.Image{
AltText: tokenizer.Stringify(altTextTokens),
URL: tokenizer.Stringify(contentTokens),
}
}, nil
}

View file

@ -39,6 +39,7 @@ func TestImageParser(t *testing.T) {
}
for _, test := range tests {
tokens := tokenizer.Tokenize(test.text)
require.Equal(t, test.image, NewImageParser().Parse(tokens))
node, _ := NewImageParser().Parse(tokens)
require.Equal(t, StringifyNodes([]ast.Node{test.image}), StringifyNodes([]ast.Node{node}))
}
}

View file

@ -1,6 +1,8 @@
package parser
import (
"errors"
"github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer"
)
@ -42,10 +44,10 @@ func (*ItalicParser) Match(tokens []*tokenizer.Token) (int, bool) {
return len(contentTokens) + 2, true
}
func (p *ItalicParser) Parse(tokens []*tokenizer.Token) ast.Node {
func (p *ItalicParser) Parse(tokens []*tokenizer.Token) (ast.Node, error) {
size, ok := p.Match(tokens)
if size == 0 || !ok {
return nil
return nil, errors.New("not matched")
}
prefixTokenType := tokens[0].Type
@ -53,5 +55,5 @@ func (p *ItalicParser) Parse(tokens []*tokenizer.Token) ast.Node {
return &ast.Italic{
Symbol: prefixTokenType,
Content: tokenizer.Stringify(contentTokens),
}
}, nil
}

View file

@ -43,6 +43,7 @@ func TestItalicParser(t *testing.T) {
for _, test := range tests {
tokens := tokenizer.Tokenize(test.text)
require.Equal(t, test.italic, NewItalicParser().Parse(tokens))
node, _ := NewItalicParser().Parse(tokens)
require.Equal(t, StringifyNodes([]ast.Node{test.italic}), StringifyNodes([]ast.Node{node}))
}
}

View file

@ -1,6 +1,8 @@
package parser
import (
"errors"
"github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer"
)
@ -21,11 +23,11 @@ func (*LineBreakParser) Match(tokens []*tokenizer.Token) (int, bool) {
return 1, true
}
func (p *LineBreakParser) Parse(tokens []*tokenizer.Token) ast.Node {
func (p *LineBreakParser) Parse(tokens []*tokenizer.Token) (ast.Node, error) {
size, ok := p.Match(tokens)
if size == 0 || !ok {
return nil
return nil, errors.New("not matched")
}
return &ast.LineBreak{}
return &ast.LineBreak{}, nil
}

View file

@ -1,6 +1,8 @@
package parser
import (
"errors"
"github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer"
)
@ -51,10 +53,10 @@ func (*LinkParser) Match(tokens []*tokenizer.Token) (int, bool) {
return 4 + len(urlTokens) + len(textTokens), true
}
func (p *LinkParser) Parse(tokens []*tokenizer.Token) ast.Node {
func (p *LinkParser) Parse(tokens []*tokenizer.Token) (ast.Node, error) {
size, ok := p.Match(tokens)
if size == 0 || !ok {
return nil
return nil, errors.New("not matched")
}
textTokens := []*tokenizer.Token{}
@ -68,5 +70,5 @@ func (p *LinkParser) Parse(tokens []*tokenizer.Token) ast.Node {
return &ast.Link{
Text: tokenizer.Stringify(textTokens),
URL: tokenizer.Stringify(urlTokens),
}
}, nil
}

View file

@ -39,6 +39,7 @@ func TestLinkParser(t *testing.T) {
}
for _, test := range tests {
tokens := tokenizer.Tokenize(test.text)
require.Equal(t, test.link, NewLinkParser().Parse(tokens))
node, _ := NewLinkParser().Parse(tokens)
require.Equal(t, StringifyNodes([]ast.Node{test.link}), StringifyNodes([]ast.Node{node}))
}
}

View file

@ -1,6 +1,8 @@
package parser
import (
"errors"
"github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer"
)
@ -29,15 +31,18 @@ func (*ParagraphParser) Match(tokens []*tokenizer.Token) (int, bool) {
return len(contentTokens), true
}
func (p *ParagraphParser) Parse(tokens []*tokenizer.Token) ast.Node {
func (p *ParagraphParser) Parse(tokens []*tokenizer.Token) (ast.Node, error) {
size, ok := p.Match(tokens)
if size == 0 || !ok {
return nil
return nil, errors.New("not matched")
}
contentTokens := tokens[:size]
children := ParseInline(contentTokens)
return &ast.Paragraph{
Children: children,
paragraph := &ast.Paragraph{}
children, err := ParseInline(paragraph, contentTokens)
if err != nil {
return nil, err
}
paragraph.Children = children
return paragraph, nil
}

View file

@ -32,6 +32,7 @@ func TestParagraphParser(t *testing.T) {
for _, test := range tests {
tokens := tokenizer.Tokenize(test.text)
require.Equal(t, test.paragraph, NewParagraphParser().Parse(tokens))
node, _ := NewParagraphParser().Parse(tokens)
require.Equal(t, StringifyNodes([]ast.Node{test.paragraph}), StringifyNodes([]ast.Node{node}))
}
}

View file

@ -14,7 +14,7 @@ type Context struct {
type BaseParser interface {
Match(tokens []*tokenizer.Token) (int, bool)
Parse(tokens []*tokenizer.Token) ast.Node
Parse(tokens []*tokenizer.Token) (ast.Node, error)
}
type InlineParser interface {
@ -36,16 +36,23 @@ var defaultBlockParsers = []BlockParser{
func Parse(tokens []*tokenizer.Token) ([]ast.Node, error) {
nodes := []ast.Node{}
var prevNode ast.Node
for len(tokens) > 0 {
for _, blockParser := range defaultBlockParsers {
cursor, matched := blockParser.Match(tokens)
size, matched := blockParser.Match(tokens)
if matched {
node := blockParser.Parse(tokens)
if node == nil {
node, err := blockParser.Parse(tokens)
if err != nil {
return nil, errors.New("parse error")
}
tokens = tokens[size:]
if prevNode != nil {
prevNode.SetNextSibling(node)
node.SetPrevSibling(prevNode)
}
prevNode = node
nodes = append(nodes, node)
tokens = tokens[cursor:]
break
}
}
@ -62,27 +69,40 @@ var defaultInlineParsers = []InlineParser{
NewCodeParser(),
NewTagParser(),
NewStrikethroughParser(),
NewLineBreakParser(),
NewTextParser(),
}
func ParseInline(tokens []*tokenizer.Token) []ast.Node {
func ParseInline(parent ast.Node, tokens []*tokenizer.Token) ([]ast.Node, error) {
nodes := []ast.Node{}
var lastNode ast.Node
var prevNode ast.Node
for len(tokens) > 0 {
for _, inlineParser := range defaultInlineParsers {
cursor, matched := inlineParser.Match(tokens)
size, matched := inlineParser.Match(tokens)
if matched {
node := inlineParser.Parse(tokens)
if node.Type() == ast.NodeTypeText && lastNode != nil && lastNode.Type() == ast.NodeTypeText {
lastNode.(*ast.Text).Content += node.(*ast.Text).Content
} else {
nodes = append(nodes, node)
lastNode = node
node, err := inlineParser.Parse(tokens)
if err != nil {
return nil, errors.New("parse error")
}
tokens = tokens[cursor:]
tokens = tokens[size:]
node.SetParent(parent)
if prevNode != nil {
if prevNode.Type() == ast.NodeTypeText && node.Type() == ast.NodeTypeText {
prevNode.(*ast.Text).Content += node.(*ast.Text).Content
break
}
prevNode.SetNextSibling(node)
node.SetPrevSibling(prevNode)
}
nodes = append(nodes, node)
prevNode = node
break
}
}
}
return nodes
parent.SetChildren(nodes)
return nodes, nil
}

View file

@ -43,7 +43,7 @@ func TestParser(t *testing.T) {
},
},
{
text: "Hello **world**!",
text: "Hello **world**!\nHere is a new line.",
nodes: []ast.Node{
&ast.Paragraph{
Children: []ast.Node{
@ -59,6 +59,14 @@ func TestParser(t *testing.T) {
},
},
},
&ast.LineBreak{},
&ast.Paragraph{
Children: []ast.Node{
&ast.Text{
Content: "Here is a new line.",
},
},
},
},
},
{
@ -89,8 +97,17 @@ func TestParser(t *testing.T) {
for _, test := range tests {
tokens := tokenizer.Tokenize(test.text)
nodes, err := Parse(tokens)
require.NoError(t, err)
require.Equal(t, test.nodes, nodes)
nodes, _ := Parse(tokens)
require.Equal(t, StringifyNodes(test.nodes), StringifyNodes(nodes))
}
}
func StringifyNodes(nodes []ast.Node) string {
var result string
for _, node := range nodes {
if node != nil {
result += node.String()
}
}
return result
}

View file

@ -1,6 +1,8 @@
package parser
import (
"errors"
"github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer"
)
@ -36,14 +38,14 @@ func (*StrikethroughParser) Match(tokens []*tokenizer.Token) (int, bool) {
return cursor + 2, true
}
func (p *StrikethroughParser) Parse(tokens []*tokenizer.Token) ast.Node {
func (p *StrikethroughParser) Parse(tokens []*tokenizer.Token) (ast.Node, error) {
size, ok := p.Match(tokens)
if size == 0 || !ok {
return nil
return nil, errors.New("not matched")
}
contentTokens := tokens[2 : size-2]
return &ast.Strikethrough{
Content: tokenizer.Stringify(contentTokens),
}
}, nil
}

View file

@ -40,6 +40,7 @@ func TestStrikethroughParser(t *testing.T) {
for _, test := range tests {
tokens := tokenizer.Tokenize(test.text)
require.Equal(t, test.strikethrough, NewStrikethroughParser().Parse(tokens))
node, _ := NewStrikethroughParser().Parse(tokens)
require.Equal(t, StringifyNodes([]ast.Node{test.strikethrough}), StringifyNodes([]ast.Node{node}))
}
}

View file

@ -1,6 +1,8 @@
package parser
import (
"errors"
"github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer"
)
@ -32,14 +34,14 @@ func (*TagParser) Match(tokens []*tokenizer.Token) (int, bool) {
return len(contentTokens) + 1, true
}
func (p *TagParser) Parse(tokens []*tokenizer.Token) ast.Node {
func (p *TagParser) Parse(tokens []*tokenizer.Token) (ast.Node, error) {
size, ok := p.Match(tokens)
if size == 0 || !ok {
return nil
return nil, errors.New("not matched")
}
contentTokens := tokens[1:size]
return &ast.Tag{
Content: tokenizer.Stringify(contentTokens),
}
}, nil
}

View file

@ -38,6 +38,7 @@ func TestTagParser(t *testing.T) {
for _, test := range tests {
tokens := tokenizer.Tokenize(test.text)
require.Equal(t, test.tag, NewTagParser().Parse(tokens))
node, _ := NewTagParser().Parse(tokens)
require.Equal(t, StringifyNodes([]ast.Node{test.tag}), StringifyNodes([]ast.Node{node}))
}
}

View file

@ -20,11 +20,11 @@ func (*TextParser) Match(tokens []*tokenizer.Token) (int, bool) {
return 1, true
}
func (*TextParser) Parse(tokens []*tokenizer.Token) ast.Node {
func (*TextParser) Parse(tokens []*tokenizer.Token) (ast.Node, error) {
if len(tokens) == 0 {
return &ast.Text{}
return &ast.Text{}, nil
}
return &ast.Text{
Content: tokens[0].String(),
}
}, nil
}

View file

@ -0,0 +1,87 @@
package html
import (
"bytes"
"fmt"
"github.com/usememos/memos/plugin/gomark/ast"
)
// HTMLRenderer is a simple renderer that converts AST to HTML.
// nolint
type HTMLRenderer struct {
output *bytes.Buffer
context *renderContext
}
type renderContext struct {
}
// NewHTMLRenderer creates a new HTMLRenderer.
func NewHTMLRenderer() *HTMLRenderer {
return &HTMLRenderer{
output: new(bytes.Buffer),
context: &renderContext{},
}
}
// RenderNode renders a single AST node to HTML.
func (r *HTMLRenderer) RenderNode(node ast.Node) {
prevSibling, nextSibling := node.GetPrevSibling(), node.GetNextSibling()
switch n := node.(type) {
case *ast.LineBreak:
r.output.WriteString("<br>")
case *ast.Paragraph:
r.output.WriteString("<p>")
r.RenderNodes(n.Children)
r.output.WriteString("</p>")
case *ast.CodeBlock:
r.output.WriteString("<pre><code>")
r.output.WriteString(n.Content)
r.output.WriteString("</code></pre>")
case *ast.Heading:
r.output.WriteString(fmt.Sprintf("<h%d>", n.Level))
r.RenderNodes(n.Children)
r.output.WriteString(fmt.Sprintf("</h%d>", n.Level))
case *ast.HorizontalRule:
r.output.WriteString("<hr>")
case *ast.Blockquote:
if prevSibling == nil || prevSibling.Type() != ast.NodeTypeBlockquote {
r.output.WriteString("<blockquote>")
}
r.RenderNodes(n.Children)
if nextSibling != nil && nextSibling.Type() == ast.NodeTypeBlockquote {
r.RenderNode(nextSibling)
}
if prevSibling == nil || prevSibling.Type() != ast.NodeTypeBlockquote {
r.output.WriteString("</blockquote>")
}
case *ast.Text:
r.output.WriteString(n.Content)
default:
// Handle other block types if needed.
}
}
// RenderNodes renders a slice of AST nodes to HTML.
func (r *HTMLRenderer) RenderNodes(nodes []ast.Node) {
for _, node := range nodes {
prevSibling := node.GetPrevSibling()
if prevSibling != nil {
if prevSibling.Type() == node.Type() {
if node.Type() == ast.NodeTypeBlockquote {
continue
}
}
}
r.RenderNode(node)
}
}
// Render renders the AST to HTML.
func (r *HTMLRenderer) Render(astRoot []ast.Node) string {
r.RenderNodes(astRoot)
return r.output.String()
}

View file

@ -0,0 +1,36 @@
package html
import (
"testing"
"github.com/stretchr/testify/require"
"github.com/usememos/memos/plugin/gomark/parser"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer"
)
func TestHTMLRenderer(t *testing.T) {
tests := []struct {
text string
expected string
}{
{
text: "Hello world!",
expected: `<p>Hello world!</p>`,
},
{
text: "> Hello\n> world!",
expected: `<blockquote>Hello<br>world!</blockquote>`,
},
}
for _, test := range tests {
tokens := tokenizer.Tokenize(test.text)
nodes, err := parser.Parse(tokens)
require.NoError(t, err)
actual := NewHTMLRenderer().Render(nodes)
if actual != test.expected {
t.Errorf("expected: %s, actual: %s", test.expected, actual)
}
}
}