2023-05-23 21:11:01 +08:00
|
|
|
package parser
|
|
|
|
|
2023-12-13 21:00:13 +08:00
|
|
|
import (
|
2023-12-13 23:50:05 +08:00
|
|
|
"errors"
|
|
|
|
|
2023-12-13 21:00:13 +08:00
|
|
|
"github.com/usememos/memos/plugin/gomark/ast"
|
|
|
|
"github.com/usememos/memos/plugin/gomark/parser/tokenizer"
|
|
|
|
)
|
2023-05-23 21:11:01 +08:00
|
|
|
|
|
|
|
type ItalicParser struct {
|
|
|
|
ContentTokens []*tokenizer.Token
|
|
|
|
}
|
|
|
|
|
|
|
|
func NewItalicParser() *ItalicParser {
|
|
|
|
return &ItalicParser{}
|
|
|
|
}
|
|
|
|
|
2023-12-13 21:00:13 +08:00
|
|
|
func (*ItalicParser) Match(tokens []*tokenizer.Token) (int, bool) {
|
2023-05-23 21:11:01 +08:00
|
|
|
if len(tokens) < 3 {
|
2023-12-13 21:00:13 +08:00
|
|
|
return 0, false
|
2023-05-23 21:11:01 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
prefixTokens := tokens[:1]
|
2023-12-16 08:51:29 +08:00
|
|
|
if prefixTokens[0].Type != tokenizer.Asterisk && prefixTokens[0].Type != tokenizer.Underscore {
|
2023-12-13 21:00:13 +08:00
|
|
|
return 0, false
|
2023-05-23 21:11:01 +08:00
|
|
|
}
|
|
|
|
prefixTokenType := prefixTokens[0].Type
|
|
|
|
contentTokens := []*tokenizer.Token{}
|
|
|
|
matched := false
|
|
|
|
for _, token := range tokens[1:] {
|
|
|
|
if token.Type == tokenizer.Newline {
|
2023-12-13 21:00:13 +08:00
|
|
|
return 0, false
|
2023-05-23 21:11:01 +08:00
|
|
|
}
|
|
|
|
if token.Type == prefixTokenType {
|
|
|
|
matched = true
|
|
|
|
break
|
|
|
|
}
|
|
|
|
contentTokens = append(contentTokens, token)
|
|
|
|
}
|
|
|
|
if !matched || len(contentTokens) == 0 {
|
2023-12-13 21:00:13 +08:00
|
|
|
return 0, false
|
|
|
|
}
|
|
|
|
|
|
|
|
return len(contentTokens) + 2, true
|
|
|
|
}
|
|
|
|
|
2023-12-13 23:50:05 +08:00
|
|
|
func (p *ItalicParser) Parse(tokens []*tokenizer.Token) (ast.Node, error) {
|
2023-12-13 21:00:13 +08:00
|
|
|
size, ok := p.Match(tokens)
|
|
|
|
if size == 0 || !ok {
|
2023-12-13 23:50:05 +08:00
|
|
|
return nil, errors.New("not matched")
|
2023-05-23 21:11:01 +08:00
|
|
|
}
|
|
|
|
|
2023-12-13 21:00:13 +08:00
|
|
|
prefixTokenType := tokens[0].Type
|
|
|
|
contentTokens := tokens[1 : size-1]
|
|
|
|
return &ast.Italic{
|
|
|
|
Symbol: prefixTokenType,
|
|
|
|
Content: tokenizer.Stringify(contentTokens),
|
2023-12-13 23:50:05 +08:00
|
|
|
}, nil
|
2023-05-23 21:11:01 +08:00
|
|
|
}
|