diff --git a/plugin/gomark/ast/block.go b/plugin/gomark/ast/block.go index 7eef5823..ff254e52 100644 --- a/plugin/gomark/ast/block.go +++ b/plugin/gomark/ast/block.go @@ -1,7 +1,9 @@ package ast +import "fmt" + type BaseBlock struct { - Node + BaseNode } type LineBreak struct { @@ -14,6 +16,10 @@ func (*LineBreak) Type() NodeType { return NodeTypeLineBreak } +func (n *LineBreak) String() string { + return n.Type().String() +} + type Paragraph struct { BaseBlock @@ -26,6 +32,14 @@ func (*Paragraph) Type() NodeType { return NodeTypeParagraph } +func (n *Paragraph) String() string { + str := n.Type().String() + for _, child := range n.Children { + str += " " + child.String() + } + return str +} + type CodeBlock struct { BaseBlock @@ -39,6 +53,10 @@ func (*CodeBlock) Type() NodeType { return NodeTypeCodeBlock } +func (n *CodeBlock) String() string { + return n.Type().String() + " " + n.Language + " " + n.Content +} + type Heading struct { BaseBlock @@ -52,6 +70,14 @@ func (*Heading) Type() NodeType { return NodeTypeHeading } +func (n *Heading) String() string { + str := n.Type().String() + " " + fmt.Sprintf("%d", n.Level) + for _, child := range n.Children { + str += " " + child.String() + } + return str +} + type HorizontalRule struct { BaseBlock @@ -65,6 +91,10 @@ func (*HorizontalRule) Type() NodeType { return NodeTypeHorizontalRule } +func (n *HorizontalRule) String() string { + return n.Type().String() +} + type Blockquote struct { BaseBlock @@ -76,3 +106,11 @@ var NodeTypeBlockquote = NewNodeType("Blockquote") func (*Blockquote) Type() NodeType { return NodeTypeBlockquote } + +func (n *Blockquote) String() string { + str := n.Type().String() + for _, child := range n.Children { + str += " " + child.String() + } + return str +} diff --git a/plugin/gomark/ast/inline.go b/plugin/gomark/ast/inline.go index d21fe1a1..58236864 100644 --- a/plugin/gomark/ast/inline.go +++ b/plugin/gomark/ast/inline.go @@ -1,7 +1,7 @@ package ast type BaseInline struct { - Node + BaseNode } type Text struct { @@ -16,10 +16,14 @@ func (*Text) Type() NodeType { return NodeTypeText } +func (n *Text) String() string { + return n.Type().String() + " " + n.Content +} + type Bold struct { BaseInline - // Symbol is "*" or "_" + // Symbol is "*" or "_". Symbol string Content string } @@ -30,10 +34,14 @@ func (*Bold) Type() NodeType { return NodeTypeBold } +func (n *Bold) String() string { + return n.Type().String() + " " + n.Symbol + " " + n.Content +} + type Italic struct { BaseInline - // Symbol is "*" or "_" + // Symbol is "*" or "_". Symbol string Content string } @@ -44,10 +52,14 @@ func (*Italic) Type() NodeType { return NodeTypeItalic } +func (n *Italic) String() string { + return n.Type().String() + " " + n.Symbol + " " + n.Content +} + type BoldItalic struct { BaseInline - // Symbol is "*" or "_" + // Symbol is "*" or "_". Symbol string Content string } @@ -58,6 +70,10 @@ func (*BoldItalic) Type() NodeType { return NodeTypeBoldItalic } +func (n *BoldItalic) String() string { + return n.Type().String() + " " + n.Symbol + " " + n.Content +} + type Code struct { BaseInline @@ -70,6 +86,10 @@ func (*Code) Type() NodeType { return NodeTypeCode } +func (n *Code) String() string { + return n.Type().String() + " " + n.Content +} + type Image struct { BaseInline @@ -83,6 +103,10 @@ func (*Image) Type() NodeType { return NodeTypeImage } +func (n *Image) String() string { + return n.Type().String() + " " + n.AltText + " " + n.URL +} + type Link struct { BaseInline @@ -96,6 +120,10 @@ func (*Link) Type() NodeType { return NodeTypeLink } +func (n *Link) String() string { + return n.Type().String() + " " + n.Text + " " + n.URL +} + type Tag struct { BaseInline @@ -108,6 +136,10 @@ func (*Tag) Type() NodeType { return NodeTypeTag } +func (n *Tag) String() string { + return n.Type().String() + " " + n.Content +} + type Strikethrough struct { BaseInline @@ -119,3 +151,7 @@ var NodeTypeStrikethrough = NewNodeType("Strikethrough") func (*Strikethrough) Type() NodeType { return NodeTypeStrikethrough } + +func (n *Strikethrough) String() string { + return n.Type().String() + " " + n.Content +} diff --git a/plugin/gomark/ast/node.go b/plugin/gomark/ast/node.go index ebd0e856..1877c38e 100644 --- a/plugin/gomark/ast/node.go +++ b/plugin/gomark/ast/node.go @@ -1,7 +1,36 @@ package ast type Node interface { + // Type returns a node type. Type() NodeType + + // String returns a string representation of this node. + // This method is used for debugging. + String() string + + // GetParent returns a parent node of this node. + GetParent() Node + + // GetPrevSibling returns a previous sibling node of this node. + GetPrevSibling() Node + + // GetNextSibling returns a next sibling node of this node. + GetNextSibling() Node + + // GetChildren returns children nodes of this node. + GetChildren() []Node + + // SetParent sets a parent node to this node. + SetParent(Node) + + // SetPrevSibling sets a previous sibling node to this node. + SetPrevSibling(Node) + + // SetNextSibling sets a next sibling node to this node. + SetNextSibling(Node) + + // SetChildren sets children nodes to this node. + SetChildren([]Node) } type NodeType int @@ -18,3 +47,45 @@ func NewNodeType(name string) NodeType { nodeTypeIndex++ return nodeTypeIndex } + +type BaseNode struct { + parent Node + + prevSibling Node + + nextSibling Node + + children []Node +} + +func (n *BaseNode) GetParent() Node { + return n.parent +} + +func (n *BaseNode) GetPrevSibling() Node { + return n.prevSibling +} + +func (n *BaseNode) GetNextSibling() Node { + return n.nextSibling +} + +func (n *BaseNode) GetChildren() []Node { + return n.children +} + +func (n *BaseNode) SetParent(node Node) { + n.parent = node +} + +func (n *BaseNode) SetPrevSibling(node Node) { + n.prevSibling = node +} + +func (n *BaseNode) SetNextSibling(node Node) { + n.nextSibling = node +} + +func (n *BaseNode) SetChildren(nodes []Node) { + n.children = nodes +} diff --git a/plugin/gomark/parser/blockquote.go b/plugin/gomark/parser/blockquote.go index fa9425c4..a0d8b0c7 100644 --- a/plugin/gomark/parser/blockquote.go +++ b/plugin/gomark/parser/blockquote.go @@ -1,6 +1,8 @@ package parser import ( + "errors" + "github.com/usememos/memos/plugin/gomark/ast" "github.com/usememos/memos/plugin/gomark/parser/tokenizer" ) @@ -21,10 +23,10 @@ func (*BlockquoteParser) Match(tokens []*tokenizer.Token) (int, bool) { contentTokens := []*tokenizer.Token{} for _, token := range tokens[2:] { + contentTokens = append(contentTokens, token) if token.Type == tokenizer.Newline { break } - contentTokens = append(contentTokens, token) } if len(contentTokens) == 0 { return 0, false @@ -33,15 +35,18 @@ func (*BlockquoteParser) Match(tokens []*tokenizer.Token) (int, bool) { return len(contentTokens) + 2, true } -func (p *BlockquoteParser) Parse(tokens []*tokenizer.Token) ast.Node { +func (p *BlockquoteParser) Parse(tokens []*tokenizer.Token) (ast.Node, error) { size, ok := p.Match(tokens) if size == 0 || !ok { - return nil + return nil, errors.New("not matched") } contentTokens := tokens[2:size] - children := ParseInline(contentTokens) - return &ast.Blockquote{ - Children: children, + blockquote := &ast.Blockquote{} + children, err := ParseInline(blockquote, contentTokens) + if err != nil { + return nil, err } + blockquote.Children = children + return blockquote, nil } diff --git a/plugin/gomark/parser/blockquote_test.go b/plugin/gomark/parser/blockquote_test.go index 2e0f2ae0..c3d5a049 100644 --- a/plugin/gomark/parser/blockquote_test.go +++ b/plugin/gomark/parser/blockquote_test.go @@ -31,6 +31,7 @@ func TestBlockquoteParser(t *testing.T) { &ast.Text{ Content: "Hello", }, + &ast.LineBreak{}, }, }, }, @@ -42,6 +43,7 @@ func TestBlockquoteParser(t *testing.T) { for _, test := range tests { tokens := tokenizer.Tokenize(test.text) - require.Equal(t, test.blockquote, NewBlockquoteParser().Parse(tokens)) + node, _ := NewBlockquoteParser().Parse(tokens) + require.Equal(t, StringifyNodes([]ast.Node{test.blockquote}), StringifyNodes([]ast.Node{node})) } } diff --git a/plugin/gomark/parser/bold.go b/plugin/gomark/parser/bold.go index 36cf2434..530b8a99 100644 --- a/plugin/gomark/parser/bold.go +++ b/plugin/gomark/parser/bold.go @@ -1,6 +1,8 @@ package parser import ( + "errors" + "github.com/usememos/memos/plugin/gomark/ast" "github.com/usememos/memos/plugin/gomark/parser/tokenizer" ) @@ -43,10 +45,10 @@ func (*BoldParser) Match(tokens []*tokenizer.Token) (int, bool) { return cursor + 2, true } -func (p *BoldParser) Parse(tokens []*tokenizer.Token) ast.Node { +func (p *BoldParser) Parse(tokens []*tokenizer.Token) (ast.Node, error) { size, ok := p.Match(tokens) if size == 0 || !ok { - return nil + return nil, errors.New("not matched") } prefixTokenType := tokens[0].Type @@ -54,5 +56,5 @@ func (p *BoldParser) Parse(tokens []*tokenizer.Token) ast.Node { return &ast.Bold{ Symbol: prefixTokenType, Content: tokenizer.Stringify(contentTokens), - } + }, nil } diff --git a/plugin/gomark/parser/bold_italic.go b/plugin/gomark/parser/bold_italic.go index a9736594..44f43a3e 100644 --- a/plugin/gomark/parser/bold_italic.go +++ b/plugin/gomark/parser/bold_italic.go @@ -1,6 +1,8 @@ package parser import ( + "errors" + "github.com/usememos/memos/plugin/gomark/ast" "github.com/usememos/memos/plugin/gomark/parser/tokenizer" ) @@ -43,10 +45,10 @@ func (*BoldItalicParser) Match(tokens []*tokenizer.Token) (int, bool) { return cursor + 3, true } -func (p *BoldItalicParser) Parse(tokens []*tokenizer.Token) ast.Node { +func (p *BoldItalicParser) Parse(tokens []*tokenizer.Token) (ast.Node, error) { size, ok := p.Match(tokens) if size == 0 || !ok { - return nil + return nil, errors.New("not matched") } prefixTokenType := tokens[0].Type @@ -54,5 +56,5 @@ func (p *BoldItalicParser) Parse(tokens []*tokenizer.Token) ast.Node { return &ast.BoldItalic{ Symbol: prefixTokenType, Content: tokenizer.Stringify(contentTokens), - } + }, nil } diff --git a/plugin/gomark/parser/bold_italic_test.go b/plugin/gomark/parser/bold_italic_test.go index 85579859..b31790e5 100644 --- a/plugin/gomark/parser/bold_italic_test.go +++ b/plugin/gomark/parser/bold_italic_test.go @@ -44,6 +44,7 @@ func TestBoldItalicParser(t *testing.T) { for _, test := range tests { tokens := tokenizer.Tokenize(test.text) - require.Equal(t, test.boldItalic, NewBoldItalicParser().Parse(tokens)) + node, _ := NewBoldItalicParser().Parse(tokens) + require.Equal(t, StringifyNodes([]ast.Node{test.boldItalic}), StringifyNodes([]ast.Node{node})) } } diff --git a/plugin/gomark/parser/bold_test.go b/plugin/gomark/parser/bold_test.go index 43864e4f..9ab395b9 100644 --- a/plugin/gomark/parser/bold_test.go +++ b/plugin/gomark/parser/bold_test.go @@ -44,6 +44,7 @@ func TestBoldParser(t *testing.T) { for _, test := range tests { tokens := tokenizer.Tokenize(test.text) - require.Equal(t, test.bold, NewBoldParser().Parse(tokens)) + node, _ := NewBoldParser().Parse(tokens) + require.Equal(t, StringifyNodes([]ast.Node{test.bold}), StringifyNodes([]ast.Node{node})) } } diff --git a/plugin/gomark/parser/code.go b/plugin/gomark/parser/code.go index b6dff80c..94cfd572 100644 --- a/plugin/gomark/parser/code.go +++ b/plugin/gomark/parser/code.go @@ -1,6 +1,8 @@ package parser import ( + "errors" + "github.com/usememos/memos/plugin/gomark/ast" "github.com/usememos/memos/plugin/gomark/parser/tokenizer" ) @@ -36,14 +38,14 @@ func (*CodeParser) Match(tokens []*tokenizer.Token) (int, bool) { return len(contentTokens) + 2, true } -func (p *CodeParser) Parse(tokens []*tokenizer.Token) ast.Node { +func (p *CodeParser) Parse(tokens []*tokenizer.Token) (ast.Node, error) { size, ok := p.Match(tokens) if size == 0 || !ok { - return nil + return nil, errors.New("not matched") } contentTokens := tokens[1 : size-1] return &ast.Code{ Content: tokenizer.Stringify(contentTokens), - } + }, nil } diff --git a/plugin/gomark/parser/code_block.go b/plugin/gomark/parser/code_block.go index 8ae48176..4eaf3a74 100644 --- a/plugin/gomark/parser/code_block.go +++ b/plugin/gomark/parser/code_block.go @@ -1,6 +1,8 @@ package parser import ( + "errors" + "github.com/usememos/memos/plugin/gomark/ast" "github.com/usememos/memos/plugin/gomark/parser/tokenizer" ) @@ -51,10 +53,10 @@ func (*CodeBlockParser) Match(tokens []*tokenizer.Token) (int, bool) { return cursor, true } -func (p *CodeBlockParser) Parse(tokens []*tokenizer.Token) ast.Node { +func (p *CodeBlockParser) Parse(tokens []*tokenizer.Token) (ast.Node, error) { size, ok := p.Match(tokens) if size == 0 || !ok { - return nil + return nil, errors.New("not matched") } languageToken := tokens[3] @@ -73,5 +75,5 @@ func (p *CodeBlockParser) Parse(tokens []*tokenizer.Token) ast.Node { if languageToken != nil { codeBlock.Language = languageToken.String() } - return codeBlock + return codeBlock, nil } diff --git a/plugin/gomark/parser/code_block_test.go b/plugin/gomark/parser/code_block_test.go index dca3cc24..4a46b9aa 100644 --- a/plugin/gomark/parser/code_block_test.go +++ b/plugin/gomark/parser/code_block_test.go @@ -58,6 +58,7 @@ func TestCodeBlockParser(t *testing.T) { for _, test := range tests { tokens := tokenizer.Tokenize(test.text) - require.Equal(t, test.codeBlock, NewCodeBlockParser().Parse(tokens)) + node, _ := NewCodeBlockParser().Parse(tokens) + require.Equal(t, StringifyNodes([]ast.Node{test.codeBlock}), StringifyNodes([]ast.Node{node})) } } diff --git a/plugin/gomark/parser/code_test.go b/plugin/gomark/parser/code_test.go index c1ee6ffd..8c4822d8 100644 --- a/plugin/gomark/parser/code_test.go +++ b/plugin/gomark/parser/code_test.go @@ -32,6 +32,7 @@ func TestCodeParser(t *testing.T) { for _, test := range tests { tokens := tokenizer.Tokenize(test.text) - require.Equal(t, test.code, NewCodeParser().Parse(tokens)) + node, _ := NewCodeParser().Parse(tokens) + require.Equal(t, StringifyNodes([]ast.Node{test.code}), StringifyNodes([]ast.Node{node})) } } diff --git a/plugin/gomark/parser/heading.go b/plugin/gomark/parser/heading.go index eaefcb33..8613af02 100644 --- a/plugin/gomark/parser/heading.go +++ b/plugin/gomark/parser/heading.go @@ -1,6 +1,8 @@ package parser import ( + "errors" + "github.com/usememos/memos/plugin/gomark/ast" "github.com/usememos/memos/plugin/gomark/parser/tokenizer" ) @@ -47,10 +49,10 @@ func (*HeadingParser) Match(tokens []*tokenizer.Token) (int, bool) { return cursor, true } -func (p *HeadingParser) Parse(tokens []*tokenizer.Token) ast.Node { +func (p *HeadingParser) Parse(tokens []*tokenizer.Token) (ast.Node, error) { size, ok := p.Match(tokens) if size == 0 || !ok { - return nil + return nil, errors.New("not matched") } level := 0 @@ -61,10 +63,15 @@ func (p *HeadingParser) Parse(tokens []*tokenizer.Token) ast.Node { break } } + contentTokens := tokens[level+1 : size] - children := ParseInline(contentTokens) - return &ast.Heading{ - Level: level, - Children: children, + heading := &ast.Heading{ + Level: level, } + children, err := ParseInline(heading, contentTokens) + if err != nil { + return nil, err + } + heading.Children = children + return heading, nil } diff --git a/plugin/gomark/parser/heading_test.go b/plugin/gomark/parser/heading_test.go index 638d0466..759c1d18 100644 --- a/plugin/gomark/parser/heading_test.go +++ b/plugin/gomark/parser/heading_test.go @@ -75,6 +75,7 @@ Hello World`, for _, test := range tests { tokens := tokenizer.Tokenize(test.text) - require.Equal(t, test.heading, NewHeadingParser().Parse(tokens)) + node, _ := NewHeadingParser().Parse(tokens) + require.Equal(t, StringifyNodes([]ast.Node{test.heading}), StringifyNodes([]ast.Node{node})) } } diff --git a/plugin/gomark/parser/horizontal_rule.go b/plugin/gomark/parser/horizontal_rule.go index a008d443..0f104539 100644 --- a/plugin/gomark/parser/horizontal_rule.go +++ b/plugin/gomark/parser/horizontal_rule.go @@ -1,6 +1,8 @@ package parser import ( + "errors" + "github.com/usememos/memos/plugin/gomark/ast" "github.com/usememos/memos/plugin/gomark/parser/tokenizer" ) @@ -27,13 +29,13 @@ func (*HorizontalRuleParser) Match(tokens []*tokenizer.Token) (int, bool) { return 3, true } -func (p *HorizontalRuleParser) Parse(tokens []*tokenizer.Token) ast.Node { +func (p *HorizontalRuleParser) Parse(tokens []*tokenizer.Token) (ast.Node, error) { size, ok := p.Match(tokens) if size == 0 || !ok { - return nil + return nil, errors.New("not matched") } return &ast.HorizontalRule{ Symbol: tokens[0].Type, - } + }, nil } diff --git a/plugin/gomark/parser/horizontal_rule_test.go b/plugin/gomark/parser/horizontal_rule_test.go index f45878f1..1cc34428 100644 --- a/plugin/gomark/parser/horizontal_rule_test.go +++ b/plugin/gomark/parser/horizontal_rule_test.go @@ -44,6 +44,7 @@ func TestHorizontalRuleParser(t *testing.T) { for _, test := range tests { tokens := tokenizer.Tokenize(test.text) - require.Equal(t, test.horizontalRule, NewHorizontalRuleParser().Parse(tokens)) + node, _ := NewHorizontalRuleParser().Parse(tokens) + require.Equal(t, StringifyNodes([]ast.Node{test.horizontalRule}), StringifyNodes([]ast.Node{node})) } } diff --git a/plugin/gomark/parser/image.go b/plugin/gomark/parser/image.go index 83be30bd..deb72988 100644 --- a/plugin/gomark/parser/image.go +++ b/plugin/gomark/parser/image.go @@ -1,6 +1,8 @@ package parser import ( + "errors" + "github.com/usememos/memos/plugin/gomark/ast" "github.com/usememos/memos/plugin/gomark/parser/tokenizer" ) @@ -52,10 +54,10 @@ func (*ImageParser) Match(tokens []*tokenizer.Token) (int, bool) { return cursor + len(contentTokens) + 1, true } -func (p *ImageParser) Parse(tokens []*tokenizer.Token) ast.Node { +func (p *ImageParser) Parse(tokens []*tokenizer.Token) (ast.Node, error) { size, ok := p.Match(tokens) if size == 0 || !ok { - return nil + return nil, errors.New("not matched") } altTextTokens := []*tokenizer.Token{} @@ -69,5 +71,5 @@ func (p *ImageParser) Parse(tokens []*tokenizer.Token) ast.Node { return &ast.Image{ AltText: tokenizer.Stringify(altTextTokens), URL: tokenizer.Stringify(contentTokens), - } + }, nil } diff --git a/plugin/gomark/parser/image_test.go b/plugin/gomark/parser/image_test.go index 176030fc..f88e2a69 100644 --- a/plugin/gomark/parser/image_test.go +++ b/plugin/gomark/parser/image_test.go @@ -39,6 +39,7 @@ func TestImageParser(t *testing.T) { } for _, test := range tests { tokens := tokenizer.Tokenize(test.text) - require.Equal(t, test.image, NewImageParser().Parse(tokens)) + node, _ := NewImageParser().Parse(tokens) + require.Equal(t, StringifyNodes([]ast.Node{test.image}), StringifyNodes([]ast.Node{node})) } } diff --git a/plugin/gomark/parser/italic.go b/plugin/gomark/parser/italic.go index e9e32fef..1b29afeb 100644 --- a/plugin/gomark/parser/italic.go +++ b/plugin/gomark/parser/italic.go @@ -1,6 +1,8 @@ package parser import ( + "errors" + "github.com/usememos/memos/plugin/gomark/ast" "github.com/usememos/memos/plugin/gomark/parser/tokenizer" ) @@ -42,10 +44,10 @@ func (*ItalicParser) Match(tokens []*tokenizer.Token) (int, bool) { return len(contentTokens) + 2, true } -func (p *ItalicParser) Parse(tokens []*tokenizer.Token) ast.Node { +func (p *ItalicParser) Parse(tokens []*tokenizer.Token) (ast.Node, error) { size, ok := p.Match(tokens) if size == 0 || !ok { - return nil + return nil, errors.New("not matched") } prefixTokenType := tokens[0].Type @@ -53,5 +55,5 @@ func (p *ItalicParser) Parse(tokens []*tokenizer.Token) ast.Node { return &ast.Italic{ Symbol: prefixTokenType, Content: tokenizer.Stringify(contentTokens), - } + }, nil } diff --git a/plugin/gomark/parser/italic_test.go b/plugin/gomark/parser/italic_test.go index 60c6ae53..f3627f71 100644 --- a/plugin/gomark/parser/italic_test.go +++ b/plugin/gomark/parser/italic_test.go @@ -43,6 +43,7 @@ func TestItalicParser(t *testing.T) { for _, test := range tests { tokens := tokenizer.Tokenize(test.text) - require.Equal(t, test.italic, NewItalicParser().Parse(tokens)) + node, _ := NewItalicParser().Parse(tokens) + require.Equal(t, StringifyNodes([]ast.Node{test.italic}), StringifyNodes([]ast.Node{node})) } } diff --git a/plugin/gomark/parser/line_break.go b/plugin/gomark/parser/line_break.go index 6017f72a..9d98d6a9 100644 --- a/plugin/gomark/parser/line_break.go +++ b/plugin/gomark/parser/line_break.go @@ -1,6 +1,8 @@ package parser import ( + "errors" + "github.com/usememos/memos/plugin/gomark/ast" "github.com/usememos/memos/plugin/gomark/parser/tokenizer" ) @@ -21,11 +23,11 @@ func (*LineBreakParser) Match(tokens []*tokenizer.Token) (int, bool) { return 1, true } -func (p *LineBreakParser) Parse(tokens []*tokenizer.Token) ast.Node { +func (p *LineBreakParser) Parse(tokens []*tokenizer.Token) (ast.Node, error) { size, ok := p.Match(tokens) if size == 0 || !ok { - return nil + return nil, errors.New("not matched") } - return &ast.LineBreak{} + return &ast.LineBreak{}, nil } diff --git a/plugin/gomark/parser/link.go b/plugin/gomark/parser/link.go index 3b5fd643..196a5764 100644 --- a/plugin/gomark/parser/link.go +++ b/plugin/gomark/parser/link.go @@ -1,6 +1,8 @@ package parser import ( + "errors" + "github.com/usememos/memos/plugin/gomark/ast" "github.com/usememos/memos/plugin/gomark/parser/tokenizer" ) @@ -51,10 +53,10 @@ func (*LinkParser) Match(tokens []*tokenizer.Token) (int, bool) { return 4 + len(urlTokens) + len(textTokens), true } -func (p *LinkParser) Parse(tokens []*tokenizer.Token) ast.Node { +func (p *LinkParser) Parse(tokens []*tokenizer.Token) (ast.Node, error) { size, ok := p.Match(tokens) if size == 0 || !ok { - return nil + return nil, errors.New("not matched") } textTokens := []*tokenizer.Token{} @@ -68,5 +70,5 @@ func (p *LinkParser) Parse(tokens []*tokenizer.Token) ast.Node { return &ast.Link{ Text: tokenizer.Stringify(textTokens), URL: tokenizer.Stringify(urlTokens), - } + }, nil } diff --git a/plugin/gomark/parser/link_test.go b/plugin/gomark/parser/link_test.go index 421ab38a..6b256406 100644 --- a/plugin/gomark/parser/link_test.go +++ b/plugin/gomark/parser/link_test.go @@ -39,6 +39,7 @@ func TestLinkParser(t *testing.T) { } for _, test := range tests { tokens := tokenizer.Tokenize(test.text) - require.Equal(t, test.link, NewLinkParser().Parse(tokens)) + node, _ := NewLinkParser().Parse(tokens) + require.Equal(t, StringifyNodes([]ast.Node{test.link}), StringifyNodes([]ast.Node{node})) } } diff --git a/plugin/gomark/parser/paragraph.go b/plugin/gomark/parser/paragraph.go index 09e69047..41c598d3 100644 --- a/plugin/gomark/parser/paragraph.go +++ b/plugin/gomark/parser/paragraph.go @@ -1,6 +1,8 @@ package parser import ( + "errors" + "github.com/usememos/memos/plugin/gomark/ast" "github.com/usememos/memos/plugin/gomark/parser/tokenizer" ) @@ -29,15 +31,18 @@ func (*ParagraphParser) Match(tokens []*tokenizer.Token) (int, bool) { return len(contentTokens), true } -func (p *ParagraphParser) Parse(tokens []*tokenizer.Token) ast.Node { +func (p *ParagraphParser) Parse(tokens []*tokenizer.Token) (ast.Node, error) { size, ok := p.Match(tokens) if size == 0 || !ok { - return nil + return nil, errors.New("not matched") } contentTokens := tokens[:size] - children := ParseInline(contentTokens) - return &ast.Paragraph{ - Children: children, + paragraph := &ast.Paragraph{} + children, err := ParseInline(paragraph, contentTokens) + if err != nil { + return nil, err } + paragraph.Children = children + return paragraph, nil } diff --git a/plugin/gomark/parser/paragraph_test.go b/plugin/gomark/parser/paragraph_test.go index 8c3bfe3c..387bc06a 100644 --- a/plugin/gomark/parser/paragraph_test.go +++ b/plugin/gomark/parser/paragraph_test.go @@ -32,6 +32,7 @@ func TestParagraphParser(t *testing.T) { for _, test := range tests { tokens := tokenizer.Tokenize(test.text) - require.Equal(t, test.paragraph, NewParagraphParser().Parse(tokens)) + node, _ := NewParagraphParser().Parse(tokens) + require.Equal(t, StringifyNodes([]ast.Node{test.paragraph}), StringifyNodes([]ast.Node{node})) } } diff --git a/plugin/gomark/parser/parser.go b/plugin/gomark/parser/parser.go index 123eead3..33f602e3 100644 --- a/plugin/gomark/parser/parser.go +++ b/plugin/gomark/parser/parser.go @@ -14,7 +14,7 @@ type Context struct { type BaseParser interface { Match(tokens []*tokenizer.Token) (int, bool) - Parse(tokens []*tokenizer.Token) ast.Node + Parse(tokens []*tokenizer.Token) (ast.Node, error) } type InlineParser interface { @@ -36,16 +36,23 @@ var defaultBlockParsers = []BlockParser{ func Parse(tokens []*tokenizer.Token) ([]ast.Node, error) { nodes := []ast.Node{} + var prevNode ast.Node for len(tokens) > 0 { for _, blockParser := range defaultBlockParsers { - cursor, matched := blockParser.Match(tokens) + size, matched := blockParser.Match(tokens) if matched { - node := blockParser.Parse(tokens) - if node == nil { + node, err := blockParser.Parse(tokens) + if err != nil { return nil, errors.New("parse error") } + + tokens = tokens[size:] + if prevNode != nil { + prevNode.SetNextSibling(node) + node.SetPrevSibling(prevNode) + } + prevNode = node nodes = append(nodes, node) - tokens = tokens[cursor:] break } } @@ -62,27 +69,40 @@ var defaultInlineParsers = []InlineParser{ NewCodeParser(), NewTagParser(), NewStrikethroughParser(), + NewLineBreakParser(), NewTextParser(), } -func ParseInline(tokens []*tokenizer.Token) []ast.Node { +func ParseInline(parent ast.Node, tokens []*tokenizer.Token) ([]ast.Node, error) { nodes := []ast.Node{} - var lastNode ast.Node + var prevNode ast.Node for len(tokens) > 0 { for _, inlineParser := range defaultInlineParsers { - cursor, matched := inlineParser.Match(tokens) + size, matched := inlineParser.Match(tokens) if matched { - node := inlineParser.Parse(tokens) - if node.Type() == ast.NodeTypeText && lastNode != nil && lastNode.Type() == ast.NodeTypeText { - lastNode.(*ast.Text).Content += node.(*ast.Text).Content - } else { - nodes = append(nodes, node) - lastNode = node + node, err := inlineParser.Parse(tokens) + if err != nil { + return nil, errors.New("parse error") } - tokens = tokens[cursor:] + + tokens = tokens[size:] + node.SetParent(parent) + if prevNode != nil { + if prevNode.Type() == ast.NodeTypeText && node.Type() == ast.NodeTypeText { + prevNode.(*ast.Text).Content += node.(*ast.Text).Content + break + } + + prevNode.SetNextSibling(node) + node.SetPrevSibling(prevNode) + } + + nodes = append(nodes, node) + prevNode = node break } } } - return nodes + parent.SetChildren(nodes) + return nodes, nil } diff --git a/plugin/gomark/parser/parser_test.go b/plugin/gomark/parser/parser_test.go index 62b06ab9..c5fed67c 100644 --- a/plugin/gomark/parser/parser_test.go +++ b/plugin/gomark/parser/parser_test.go @@ -43,7 +43,7 @@ func TestParser(t *testing.T) { }, }, { - text: "Hello **world**!", + text: "Hello **world**!\nHere is a new line.", nodes: []ast.Node{ &ast.Paragraph{ Children: []ast.Node{ @@ -59,6 +59,14 @@ func TestParser(t *testing.T) { }, }, }, + &ast.LineBreak{}, + &ast.Paragraph{ + Children: []ast.Node{ + &ast.Text{ + Content: "Here is a new line.", + }, + }, + }, }, }, { @@ -89,8 +97,17 @@ func TestParser(t *testing.T) { for _, test := range tests { tokens := tokenizer.Tokenize(test.text) - nodes, err := Parse(tokens) - require.NoError(t, err) - require.Equal(t, test.nodes, nodes) + nodes, _ := Parse(tokens) + require.Equal(t, StringifyNodes(test.nodes), StringifyNodes(nodes)) } } + +func StringifyNodes(nodes []ast.Node) string { + var result string + for _, node := range nodes { + if node != nil { + result += node.String() + } + } + return result +} diff --git a/plugin/gomark/parser/strikethrough.go b/plugin/gomark/parser/strikethrough.go index fde8acc5..d32ffc3c 100644 --- a/plugin/gomark/parser/strikethrough.go +++ b/plugin/gomark/parser/strikethrough.go @@ -1,6 +1,8 @@ package parser import ( + "errors" + "github.com/usememos/memos/plugin/gomark/ast" "github.com/usememos/memos/plugin/gomark/parser/tokenizer" ) @@ -36,14 +38,14 @@ func (*StrikethroughParser) Match(tokens []*tokenizer.Token) (int, bool) { return cursor + 2, true } -func (p *StrikethroughParser) Parse(tokens []*tokenizer.Token) ast.Node { +func (p *StrikethroughParser) Parse(tokens []*tokenizer.Token) (ast.Node, error) { size, ok := p.Match(tokens) if size == 0 || !ok { - return nil + return nil, errors.New("not matched") } contentTokens := tokens[2 : size-2] return &ast.Strikethrough{ Content: tokenizer.Stringify(contentTokens), - } + }, nil } diff --git a/plugin/gomark/parser/strikethrough_test.go b/plugin/gomark/parser/strikethrough_test.go index 28a04bcc..7e9efe6f 100644 --- a/plugin/gomark/parser/strikethrough_test.go +++ b/plugin/gomark/parser/strikethrough_test.go @@ -40,6 +40,7 @@ func TestStrikethroughParser(t *testing.T) { for _, test := range tests { tokens := tokenizer.Tokenize(test.text) - require.Equal(t, test.strikethrough, NewStrikethroughParser().Parse(tokens)) + node, _ := NewStrikethroughParser().Parse(tokens) + require.Equal(t, StringifyNodes([]ast.Node{test.strikethrough}), StringifyNodes([]ast.Node{node})) } } diff --git a/plugin/gomark/parser/tag.go b/plugin/gomark/parser/tag.go index 80fc15ed..3deccad3 100644 --- a/plugin/gomark/parser/tag.go +++ b/plugin/gomark/parser/tag.go @@ -1,6 +1,8 @@ package parser import ( + "errors" + "github.com/usememos/memos/plugin/gomark/ast" "github.com/usememos/memos/plugin/gomark/parser/tokenizer" ) @@ -32,14 +34,14 @@ func (*TagParser) Match(tokens []*tokenizer.Token) (int, bool) { return len(contentTokens) + 1, true } -func (p *TagParser) Parse(tokens []*tokenizer.Token) ast.Node { +func (p *TagParser) Parse(tokens []*tokenizer.Token) (ast.Node, error) { size, ok := p.Match(tokens) if size == 0 || !ok { - return nil + return nil, errors.New("not matched") } contentTokens := tokens[1:size] return &ast.Tag{ Content: tokenizer.Stringify(contentTokens), - } + }, nil } diff --git a/plugin/gomark/parser/tag_test.go b/plugin/gomark/parser/tag_test.go index 54d541e7..cc746742 100644 --- a/plugin/gomark/parser/tag_test.go +++ b/plugin/gomark/parser/tag_test.go @@ -38,6 +38,7 @@ func TestTagParser(t *testing.T) { for _, test := range tests { tokens := tokenizer.Tokenize(test.text) - require.Equal(t, test.tag, NewTagParser().Parse(tokens)) + node, _ := NewTagParser().Parse(tokens) + require.Equal(t, StringifyNodes([]ast.Node{test.tag}), StringifyNodes([]ast.Node{node})) } } diff --git a/plugin/gomark/parser/text.go b/plugin/gomark/parser/text.go index a2523ff2..eb2f107f 100644 --- a/plugin/gomark/parser/text.go +++ b/plugin/gomark/parser/text.go @@ -20,11 +20,11 @@ func (*TextParser) Match(tokens []*tokenizer.Token) (int, bool) { return 1, true } -func (*TextParser) Parse(tokens []*tokenizer.Token) ast.Node { +func (*TextParser) Parse(tokens []*tokenizer.Token) (ast.Node, error) { if len(tokens) == 0 { - return &ast.Text{} + return &ast.Text{}, nil } return &ast.Text{ Content: tokens[0].String(), - } + }, nil } diff --git a/plugin/gomark/renderer/html/html.go b/plugin/gomark/renderer/html/html.go new file mode 100644 index 00000000..9c8759c8 --- /dev/null +++ b/plugin/gomark/renderer/html/html.go @@ -0,0 +1,87 @@ +package html + +import ( + "bytes" + "fmt" + + "github.com/usememos/memos/plugin/gomark/ast" +) + +// HTMLRenderer is a simple renderer that converts AST to HTML. +// nolint +type HTMLRenderer struct { + output *bytes.Buffer + context *renderContext +} + +type renderContext struct { +} + +// NewHTMLRenderer creates a new HTMLRenderer. +func NewHTMLRenderer() *HTMLRenderer { + return &HTMLRenderer{ + output: new(bytes.Buffer), + context: &renderContext{}, + } +} + +// RenderNode renders a single AST node to HTML. +func (r *HTMLRenderer) RenderNode(node ast.Node) { + prevSibling, nextSibling := node.GetPrevSibling(), node.GetNextSibling() + + switch n := node.(type) { + case *ast.LineBreak: + r.output.WriteString("
") + case *ast.Paragraph: + r.output.WriteString("

") + r.RenderNodes(n.Children) + r.output.WriteString("

") + case *ast.CodeBlock: + r.output.WriteString("
")
+		r.output.WriteString(n.Content)
+		r.output.WriteString("
") + case *ast.Heading: + r.output.WriteString(fmt.Sprintf("", n.Level)) + r.RenderNodes(n.Children) + r.output.WriteString(fmt.Sprintf("", n.Level)) + case *ast.HorizontalRule: + r.output.WriteString("
") + case *ast.Blockquote: + if prevSibling == nil || prevSibling.Type() != ast.NodeTypeBlockquote { + r.output.WriteString("
") + } + r.RenderNodes(n.Children) + if nextSibling != nil && nextSibling.Type() == ast.NodeTypeBlockquote { + r.RenderNode(nextSibling) + } + if prevSibling == nil || prevSibling.Type() != ast.NodeTypeBlockquote { + r.output.WriteString("
") + } + case *ast.Text: + r.output.WriteString(n.Content) + default: + // Handle other block types if needed. + } +} + +// RenderNodes renders a slice of AST nodes to HTML. +func (r *HTMLRenderer) RenderNodes(nodes []ast.Node) { + for _, node := range nodes { + prevSibling := node.GetPrevSibling() + if prevSibling != nil { + if prevSibling.Type() == node.Type() { + if node.Type() == ast.NodeTypeBlockquote { + continue + } + } + } + + r.RenderNode(node) + } +} + +// Render renders the AST to HTML. +func (r *HTMLRenderer) Render(astRoot []ast.Node) string { + r.RenderNodes(astRoot) + return r.output.String() +} diff --git a/plugin/gomark/renderer/html/html_test.go b/plugin/gomark/renderer/html/html_test.go new file mode 100644 index 00000000..83dc5f0d --- /dev/null +++ b/plugin/gomark/renderer/html/html_test.go @@ -0,0 +1,36 @@ +package html + +import ( + "testing" + + "github.com/stretchr/testify/require" + + "github.com/usememos/memos/plugin/gomark/parser" + "github.com/usememos/memos/plugin/gomark/parser/tokenizer" +) + +func TestHTMLRenderer(t *testing.T) { + tests := []struct { + text string + expected string + }{ + { + text: "Hello world!", + expected: `

Hello world!

`, + }, + { + text: "> Hello\n> world!", + expected: `
Hello
world!
`, + }, + } + + for _, test := range tests { + tokens := tokenizer.Tokenize(test.text) + nodes, err := parser.Parse(tokens) + require.NoError(t, err) + actual := NewHTMLRenderer().Render(nodes) + if actual != test.expected { + t.Errorf("expected: %s, actual: %s", test.expected, actual) + } + } +}