diff --git a/plugin/gomark/ast/inline.go b/plugin/gomark/ast/inline.go index 8e82c54d..82450fb0 100644 --- a/plugin/gomark/ast/inline.go +++ b/plugin/gomark/ast/inline.go @@ -101,3 +101,13 @@ type Strikethrough struct { func (*Strikethrough) Type() NodeType { return StrikethroughNode } + +type EscapingCharacter struct { + BaseInline + + Symbol string +} + +func (*EscapingCharacter) Type() NodeType { + return EscapingCharacterNode +} diff --git a/plugin/gomark/ast/node.go b/plugin/gomark/ast/node.go index 9e4c5315..6552555e 100644 --- a/plugin/gomark/ast/node.go +++ b/plugin/gomark/ast/node.go @@ -23,6 +23,7 @@ const ( LinkNode TagNode StrikethroughNode + EscapingCharacterNode ) type Node interface { diff --git a/plugin/gomark/parser/escaping_character.go b/plugin/gomark/parser/escaping_character.go new file mode 100644 index 00000000..8b3f74ed --- /dev/null +++ b/plugin/gomark/parser/escaping_character.go @@ -0,0 +1,41 @@ +package parser + +import ( + "errors" + + "github.com/usememos/memos/plugin/gomark/ast" + "github.com/usememos/memos/plugin/gomark/parser/tokenizer" +) + +type EscapingCharacterParser struct{} + +func NewEscapingCharacterParser() *EscapingCharacterParser { + return &EscapingCharacterParser{} +} + +func (*EscapingCharacterParser) Match(tokens []*tokenizer.Token) (int, bool) { + if len(tokens) == 0 { + return 0, false + } + if tokens[0].Type != tokenizer.Backslash { + return 0, false + } + if len(tokens) == 1 { + return 0, false + } + if tokens[1].Type == tokenizer.Newline || tokens[1].Type == tokenizer.Space || tokens[1].Type == tokenizer.Text || tokens[1].Type == tokenizer.Number { + return 0, false + } + return 2, true +} + +func (p *EscapingCharacterParser) Parse(tokens []*tokenizer.Token) (ast.Node, error) { + size, ok := p.Match(tokens) + if size == 0 || !ok { + return nil, errors.New("not matched") + } + + return &ast.EscapingCharacter{ + Symbol: tokens[1].Value, + }, nil +} diff --git a/plugin/gomark/parser/escaping_character_test.go b/plugin/gomark/parser/escaping_character_test.go new file mode 100644 index 00000000..da6b2d31 --- /dev/null +++ b/plugin/gomark/parser/escaping_character_test.go @@ -0,0 +1,30 @@ +package parser + +import ( + "testing" + + "github.com/stretchr/testify/require" + + "github.com/usememos/memos/plugin/gomark/ast" + "github.com/usememos/memos/plugin/gomark/parser/tokenizer" +) + +func TestEscapingCharacterParser(t *testing.T) { + tests := []struct { + text string + node ast.Node + }{ + { + text: `\# 123`, + node: &ast.EscapingCharacter{ + Symbol: "#", + }, + }, + } + + for _, test := range tests { + tokens := tokenizer.Tokenize(test.text) + node, _ := NewEscapingCharacterParser().Parse(tokens) + require.Equal(t, StringifyNodes([]ast.Node{test.node}), StringifyNodes([]ast.Node{node})) + } +} diff --git a/plugin/gomark/parser/parser.go b/plugin/gomark/parser/parser.go index 545d5a50..9c647f36 100644 --- a/plugin/gomark/parser/parser.go +++ b/plugin/gomark/parser/parser.go @@ -71,6 +71,7 @@ func ParseBlockWithParsers(tokens []*tokenizer.Token, blockParsers []BlockParser } var defaultInlineParsers = []InlineParser{ + NewEscapingCharacterParser(), NewBoldItalicParser(), NewImageParser(), NewLinkParser(), diff --git a/plugin/gomark/parser/parser_test.go b/plugin/gomark/parser/parser_test.go index 602d09bc..b1c489e5 100644 --- a/plugin/gomark/parser/parser_test.go +++ b/plugin/gomark/parser/parser_test.go @@ -26,6 +26,34 @@ func TestParser(t *testing.T) { }, }, }, + { + text: "# Hello world!", + nodes: []ast.Node{ + &ast.Heading{ + Level: 1, + Children: []ast.Node{ + &ast.Text{ + Content: "Hello world!", + }, + }, + }, + }, + }, + { + text: "\\# Hello world!", + nodes: []ast.Node{ + &ast.Paragraph{ + Children: []ast.Node{ + &ast.EscapingCharacter{ + Symbol: "#", + }, + &ast.Text{ + Content: " Hello world!", + }, + }, + }, + }, + }, { text: "**Hello** world!", nodes: []ast.Node{ diff --git a/plugin/gomark/parser/tokenizer/tokenizer.go b/plugin/gomark/parser/tokenizer/tokenizer.go index e138ba82..de217037 100644 --- a/plugin/gomark/parser/tokenizer/tokenizer.go +++ b/plugin/gomark/parser/tokenizer/tokenizer.go @@ -17,6 +17,7 @@ const ( PlusSign TokenType = "+" Dot TokenType = "." GreaterThan TokenType = ">" + Backslash TokenType = "\\" Newline TokenType = "\n" Space TokenType = " " ) @@ -70,6 +71,8 @@ func Tokenize(text string) []*Token { tokens = append(tokens, NewToken(PlusSign, "+")) case '.': tokens = append(tokens, NewToken(Dot, ".")) + case '\\': + tokens = append(tokens, NewToken(Backslash, `\`)) case '\n': tokens = append(tokens, NewToken(Newline, "\n")) case ' ': diff --git a/plugin/gomark/render/html/html.go b/plugin/gomark/render/html/html.go index 47979ac6..7757aca4 100644 --- a/plugin/gomark/render/html/html.go +++ b/plugin/gomark/render/html/html.go @@ -59,6 +59,8 @@ func (r *HTMLRender) RenderNode(node ast.Node) { r.renderTag(n) case *ast.Strikethrough: r.renderStrikethrough(n) + case *ast.EscapingCharacter: + r.renderEscapingCharacter(n) case *ast.Text: r.renderText(n) default: @@ -199,3 +201,8 @@ func (r *HTMLRender) renderStrikethrough(node *ast.Strikethrough) { r.output.WriteString(node.Content) r.output.WriteString(``) } + +func (r *HTMLRender) renderEscapingCharacter(node *ast.EscapingCharacter) { + r.output.WriteString("\\") + r.output.WriteString(node.Symbol) +} diff --git a/plugin/gomark/render/html/html_test.go b/plugin/gomark/render/html/html_test.go index fb6499a5..1a76b66f 100644 --- a/plugin/gomark/render/html/html_test.go +++ b/plugin/gomark/render/html/html_test.go @@ -42,6 +42,10 @@ func TestHTMLRender(t *testing.T) { text: "#article #memo", expected: `
#article #memo
`, }, + { + text: "#article \\#memo", + expected: `#article \#memo
`, + }, { text: "* Hello\n* world!", expected: `