mirror of
https://github.com/usememos/memos.git
synced 2025-12-16 05:43:06 +08:00
chore: add missing punctuation in comments
This commit is contained in:
parent
739fd2cde6
commit
6cb96ef65e
10 changed files with 48 additions and 66 deletions
|
|
@ -10,7 +10,7 @@ import (
|
|||
|
||||
type tagExtension struct{}
|
||||
|
||||
// TagExtension is a goldmark extension for #tag syntax
|
||||
// TagExtension is a goldmark extension for #tag syntax.
|
||||
var TagExtension = &tagExtension{}
|
||||
|
||||
// Extend extends the goldmark parser with tag support.
|
||||
|
|
|
|||
|
|
@ -10,7 +10,7 @@ import (
|
|||
|
||||
type wikilinkExtension struct{}
|
||||
|
||||
// WikilinkExtension is a goldmark extension for [[...]] wikilink syntax
|
||||
// WikilinkExtension is a goldmark extension for [[...]] wikilink syntax.
|
||||
var WikilinkExtension = &wikilinkExtension{}
|
||||
|
||||
// Extend extends the goldmark parser with wikilink support.
|
||||
|
|
|
|||
|
|
@ -17,7 +17,7 @@ import (
|
|||
storepb "github.com/usememos/memos/proto/gen/store"
|
||||
)
|
||||
|
||||
// ExtractedData contains all metadata extracted from markdown in a single pass
|
||||
// ExtractedData contains all metadata extracted from markdown in a single pass.
|
||||
type ExtractedData struct {
|
||||
Tags []string
|
||||
Property *storepb.MemoPayload_Property
|
||||
|
|
@ -57,12 +57,12 @@ type Service interface {
|
|||
RenameTag(content []byte, oldTag, newTag string) (string, error)
|
||||
}
|
||||
|
||||
// service implements the Service interface
|
||||
// service implements the Service interface.
|
||||
type service struct {
|
||||
md goldmark.Markdown
|
||||
}
|
||||
|
||||
// Option configures the markdown service
|
||||
// Option configures the markdown service.
|
||||
type Option func(*config)
|
||||
|
||||
type config struct {
|
||||
|
|
@ -70,21 +70,21 @@ type config struct {
|
|||
enableWikilink bool
|
||||
}
|
||||
|
||||
// WithTagExtension enables #tag parsing
|
||||
// WithTagExtension enables #tag parsing.
|
||||
func WithTagExtension() Option {
|
||||
return func(c *config) {
|
||||
c.enableTags = true
|
||||
}
|
||||
}
|
||||
|
||||
// WithWikilinkExtension enables [[wikilink]] parsing
|
||||
// WithWikilinkExtension enables [[wikilink]] parsing.
|
||||
func WithWikilinkExtension() Option {
|
||||
return func(c *config) {
|
||||
c.enableWikilink = true
|
||||
}
|
||||
}
|
||||
|
||||
// NewService creates a new markdown service with the given options
|
||||
// NewService creates a new markdown service with the given options.
|
||||
func NewService(opts ...Option) Service {
|
||||
cfg := &config{}
|
||||
for _, opt := range opts {
|
||||
|
|
@ -115,14 +115,14 @@ func NewService(opts ...Option) Service {
|
|||
}
|
||||
}
|
||||
|
||||
// parse is an internal helper to parse content into AST
|
||||
// parse is an internal helper to parse content into AST.
|
||||
func (s *service) parse(content []byte) (gast.Node, error) {
|
||||
reader := text.NewReader(content)
|
||||
doc := s.md.Parser().Parse(reader)
|
||||
return doc, nil
|
||||
}
|
||||
|
||||
// ExtractTags returns all #tags found in content
|
||||
// ExtractTags returns all #tags found in content.
|
||||
func (s *service) ExtractTags(content []byte) ([]string, error) {
|
||||
root, err := s.parse(content)
|
||||
if err != nil {
|
||||
|
|
@ -153,7 +153,7 @@ func (s *service) ExtractTags(content []byte) ([]string, error) {
|
|||
return uniqueLowercase(tags), nil
|
||||
}
|
||||
|
||||
// ExtractProperties computes boolean properties about the content
|
||||
// ExtractProperties computes boolean properties about the content.
|
||||
func (s *service) ExtractProperties(content []byte) (*storepb.MemoPayload_Property, error) {
|
||||
root, err := s.parse(content)
|
||||
if err != nil {
|
||||
|
|
@ -171,15 +171,9 @@ func (s *service) ExtractProperties(content []byte) (*storepb.MemoPayload_Proper
|
|||
case gast.KindLink, mast.KindWikilink:
|
||||
prop.HasLink = true
|
||||
|
||||
case mast.KindWikilink:
|
||||
prop.HasLink = true
|
||||
|
||||
case gast.KindCodeBlock, gast.KindFencedCodeBlock, gast.KindCodeSpan:
|
||||
prop.HasCode = true
|
||||
|
||||
case gast.KindCodeSpan:
|
||||
prop.HasCode = true
|
||||
|
||||
case east.KindTaskCheckBox:
|
||||
prop.HasTaskList = true
|
||||
if checkBox, ok := n.(*east.TaskCheckBox); ok {
|
||||
|
|
@ -187,6 +181,8 @@ func (s *service) ExtractProperties(content []byte) (*storepb.MemoPayload_Proper
|
|||
prop.HasIncompleteTasks = true
|
||||
}
|
||||
}
|
||||
default:
|
||||
// No special handling for other node types
|
||||
}
|
||||
|
||||
return gast.WalkContinue, nil
|
||||
|
|
@ -199,7 +195,7 @@ func (s *service) ExtractProperties(content []byte) (*storepb.MemoPayload_Proper
|
|||
return prop, nil
|
||||
}
|
||||
|
||||
// ExtractReferences returns all wikilink references found in content
|
||||
// ExtractReferences returns all wikilink references found in content.
|
||||
func (s *service) ExtractReferences(content []byte) ([]string, error) {
|
||||
root, err := s.parse(content)
|
||||
if err != nil {
|
||||
|
|
@ -229,7 +225,7 @@ func (s *service) ExtractReferences(content []byte) ([]string, error) {
|
|||
return references, nil
|
||||
}
|
||||
|
||||
// RenderMarkdown renders goldmark AST back to markdown text
|
||||
// RenderMarkdown renders goldmark AST back to markdown text.
|
||||
func (s *service) RenderMarkdown(content []byte) (string, error) {
|
||||
root, err := s.parse(content)
|
||||
if err != nil {
|
||||
|
|
@ -240,7 +236,7 @@ func (s *service) RenderMarkdown(content []byte) (string, error) {
|
|||
return mdRenderer.Render(root, content), nil
|
||||
}
|
||||
|
||||
// RenderHTML renders markdown content to HTML using goldmark's built-in HTML renderer
|
||||
// RenderHTML renders markdown content to HTML using goldmark's built-in HTML renderer.
|
||||
func (s *service) RenderHTML(content []byte) (string, error) {
|
||||
var buf bytes.Buffer
|
||||
if err := s.md.Convert(content, &buf); err != nil {
|
||||
|
|
@ -249,7 +245,7 @@ func (s *service) RenderHTML(content []byte) (string, error) {
|
|||
return buf.String(), nil
|
||||
}
|
||||
|
||||
// GenerateSnippet creates a plain text summary from markdown content
|
||||
// GenerateSnippet creates a plain text summary from markdown content.
|
||||
func (s *service) GenerateSnippet(content []byte, maxLength int) (string, error) {
|
||||
root, err := s.parse(content)
|
||||
if err != nil {
|
||||
|
|
@ -265,6 +261,8 @@ func (s *service) GenerateSnippet(content []byte, maxLength int) (string, error)
|
|||
switch n.Kind() {
|
||||
case gast.KindCodeBlock, gast.KindFencedCodeBlock, gast.KindCodeSpan:
|
||||
return gast.WalkSkipChildren, nil
|
||||
default:
|
||||
// Continue walking for other node types
|
||||
}
|
||||
|
||||
// Add space before block elements (except first)
|
||||
|
|
@ -273,6 +271,8 @@ func (s *service) GenerateSnippet(content []byte, maxLength int) (string, error)
|
|||
if buf.Len() > 0 && lastNodeWasBlock {
|
||||
buf.WriteByte(' ')
|
||||
}
|
||||
default:
|
||||
// No space needed for other node types
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -281,6 +281,8 @@ func (s *service) GenerateSnippet(content []byte, maxLength int) (string, error)
|
|||
switch n.Kind() {
|
||||
case gast.KindParagraph, gast.KindHeading, gast.KindListItem:
|
||||
lastNodeWasBlock = true
|
||||
default:
|
||||
// Not a block element
|
||||
}
|
||||
return gast.WalkContinue, nil
|
||||
}
|
||||
|
|
@ -321,14 +323,14 @@ func (s *service) GenerateSnippet(content []byte, maxLength int) (string, error)
|
|||
return strings.TrimSpace(snippet), nil
|
||||
}
|
||||
|
||||
// ValidateContent checks if the markdown content is valid
|
||||
// ValidateContent checks if the markdown content is valid.
|
||||
func (s *service) ValidateContent(content []byte) error {
|
||||
// Try to parse the content
|
||||
_, err := s.parse(content)
|
||||
return err
|
||||
}
|
||||
|
||||
// ExtractAll extracts tags, properties, and references in a single parse for efficiency
|
||||
// ExtractAll extracts tags, properties, and references in a single parse for efficiency.
|
||||
func (s *service) ExtractAll(content []byte) (*ExtractedData, error) {
|
||||
root, err := s.parse(content)
|
||||
if err != nil {
|
||||
|
|
@ -362,15 +364,9 @@ func (s *service) ExtractAll(content []byte) (*ExtractedData, error) {
|
|||
case gast.KindLink, mast.KindWikilink:
|
||||
data.Property.HasLink = true
|
||||
|
||||
case mast.KindWikilink:
|
||||
data.Property.HasLink = true
|
||||
|
||||
case gast.KindCodeBlock, gast.KindFencedCodeBlock, gast.KindCodeSpan:
|
||||
data.Property.HasCode = true
|
||||
|
||||
case gast.KindCodeSpan:
|
||||
data.Property.HasCode = true
|
||||
|
||||
case east.KindTaskCheckBox:
|
||||
data.Property.HasTaskList = true
|
||||
if checkBox, ok := n.(*east.TaskCheckBox); ok {
|
||||
|
|
@ -378,6 +374,8 @@ func (s *service) ExtractAll(content []byte) (*ExtractedData, error) {
|
|||
data.Property.HasIncompleteTasks = true
|
||||
}
|
||||
}
|
||||
default:
|
||||
// No special handling for other node types
|
||||
}
|
||||
|
||||
return gast.WalkContinue, nil
|
||||
|
|
@ -393,7 +391,7 @@ func (s *service) ExtractAll(content []byte) (*ExtractedData, error) {
|
|||
return data, nil
|
||||
}
|
||||
|
||||
// RenameTag renames all occurrences of oldTag to newTag in content
|
||||
// RenameTag renames all occurrences of oldTag to newTag in content.
|
||||
func (s *service) RenameTag(content []byte, oldTag, newTag string) (string, error) {
|
||||
root, err := s.parse(content)
|
||||
if err != nil {
|
||||
|
|
@ -425,7 +423,7 @@ func (s *service) RenameTag(content []byte, oldTag, newTag string) (string, erro
|
|||
return mdRenderer.Render(root, content), nil
|
||||
}
|
||||
|
||||
// uniqueLowercase returns unique lowercase strings from input
|
||||
// uniqueLowercase returns unique lowercase strings from input.
|
||||
func uniqueLowercase(strs []string) []string {
|
||||
seen := make(map[string]bool)
|
||||
var result []string
|
||||
|
|
@ -441,7 +439,7 @@ func uniqueLowercase(strs []string) []string {
|
|||
return result
|
||||
}
|
||||
|
||||
// truncateAtWord truncates a string at the last word boundary before maxLength
|
||||
// truncateAtWord truncates a string at the last word boundary before maxLength.
|
||||
func truncateAtWord(s string, maxLength int) string {
|
||||
if len(s) <= maxLength {
|
||||
return s
|
||||
|
|
|
|||
|
|
@ -446,7 +446,7 @@ func TestTruncateAtWord(t *testing.T) {
|
|||
}
|
||||
}
|
||||
|
||||
// Benchmark tests
|
||||
// Benchmark tests.
|
||||
func BenchmarkGenerateSnippet(b *testing.B) {
|
||||
svc := NewService()
|
||||
content := []byte(`# Large Document
|
||||
|
|
|
|||
|
|
@ -10,7 +10,7 @@ import (
|
|||
|
||||
type tagParser struct{}
|
||||
|
||||
// NewTagParser creates a new inline parser for #tag syntax
|
||||
// NewTagParser creates a new inline parser for #tag syntax.
|
||||
func NewTagParser() parser.InlineParser {
|
||||
return &tagParser{}
|
||||
}
|
||||
|
|
@ -20,8 +20,8 @@ func (*tagParser) Trigger() []byte {
|
|||
return []byte{'#'}
|
||||
}
|
||||
|
||||
// Parse parses #tag syntax
|
||||
func (p *tagParser) Parse(parent gast.Node, block text.Reader, pc parser.Context) gast.Node {
|
||||
// Parse parses #tag syntax.
|
||||
func (*tagParser) Parse(_ gast.Node, block text.Reader, _ parser.Context) gast.Node {
|
||||
line, _ := block.PeekLine()
|
||||
|
||||
// Must start with #
|
||||
|
|
|
|||
|
|
@ -110,7 +110,8 @@ func TestTagParser(t *testing.T) {
|
|||
require.NotNil(t, node, "Expected tag to be parsed")
|
||||
require.IsType(t, &mast.TagNode{}, node)
|
||||
|
||||
tagNode := node.(*mast.TagNode)
|
||||
tagNode, ok := node.(*mast.TagNode)
|
||||
require.True(t, ok, "Expected node to be *mast.TagNode")
|
||||
assert.Equal(t, tt.expectedTag, string(tagNode.Tag))
|
||||
} else {
|
||||
assert.Nil(t, node, "Expected tag NOT to be parsed")
|
||||
|
|
@ -137,7 +138,8 @@ func TestTagParser_MultipleTags(t *testing.T) {
|
|||
// Parse first tag
|
||||
node1 := p.Parse(nil, reader, ctx)
|
||||
require.NotNil(t, node1)
|
||||
tagNode1 := node1.(*mast.TagNode)
|
||||
tagNode1, ok := node1.(*mast.TagNode)
|
||||
require.True(t, ok, "Expected node1 to be *mast.TagNode")
|
||||
assert.Equal(t, "tag1", string(tagNode1.Tag))
|
||||
|
||||
// Advance past the space
|
||||
|
|
@ -146,7 +148,8 @@ func TestTagParser_MultipleTags(t *testing.T) {
|
|||
// Parse second tag
|
||||
node2 := p.Parse(nil, reader, ctx)
|
||||
require.NotNil(t, node2)
|
||||
tagNode2 := node2.(*mast.TagNode)
|
||||
tagNode2, ok := node2.(*mast.TagNode)
|
||||
require.True(t, ok, "Expected node2 to be *mast.TagNode")
|
||||
assert.Equal(t, "tag2", string(tagNode2.Tag))
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -12,7 +12,7 @@ import (
|
|||
|
||||
type wikilinkParser struct{}
|
||||
|
||||
// NewWikilinkParser creates a new inline parser for [[...]] wikilink syntax
|
||||
// NewWikilinkParser creates a new inline parser for [[...]] wikilink syntax.
|
||||
func NewWikilinkParser() parser.InlineParser {
|
||||
return &wikilinkParser{}
|
||||
}
|
||||
|
|
@ -23,7 +23,7 @@ func (*wikilinkParser) Trigger() []byte {
|
|||
}
|
||||
|
||||
// Parse parses [[target]] or [[target?params]] wikilink syntax.
|
||||
func (*wikilinkParser) Parse(parent gast.Node, block text.Reader, pc parser.Context) gast.Node {
|
||||
func (*wikilinkParser) Parse(_ gast.Node, block text.Reader, _ parser.Context) gast.Node {
|
||||
line, _ := block.PeekLine()
|
||||
|
||||
// Must start with [[
|
||||
|
|
@ -64,8 +64,8 @@ func (*wikilinkParser) Parse(parent gast.Node, block text.Reader, pc parser.Cont
|
|||
return node
|
||||
}
|
||||
|
||||
// findClosingBrackets finds the position of ]] in the byte slice
|
||||
// Returns -1 if not found
|
||||
// findClosingBrackets finds the position of ]] in the byte slice.
|
||||
// Returns -1 if not found.
|
||||
func findClosingBrackets(data []byte) int {
|
||||
for i := 0; i < len(data)-1; i++ {
|
||||
if data[i] == ']' && data[i+1] == ']' {
|
||||
|
|
@ -75,7 +75,7 @@ func findClosingBrackets(data []byte) int {
|
|||
return -1
|
||||
}
|
||||
|
||||
// parseTargetAndParams splits content on ? to extract target and parameters
|
||||
// parseTargetAndParams splits content on ? to extract target and parameters.
|
||||
func parseTargetAndParams(content []byte) (target []byte, params []byte) {
|
||||
// Find ? separator
|
||||
idx := bytes.IndexByte(content, '?')
|
||||
|
|
|
|||
|
|
@ -124,7 +124,8 @@ func TestWikilinkParser(t *testing.T) {
|
|||
require.NotNil(t, node, "Expected wikilink to be parsed")
|
||||
require.IsType(t, &mast.WikilinkNode{}, node)
|
||||
|
||||
wikilinkNode := node.(*mast.WikilinkNode)
|
||||
wikilinkNode, ok := node.(*mast.WikilinkNode)
|
||||
require.True(t, ok, "Expected node to be *mast.WikilinkNode")
|
||||
assert.Equal(t, tt.expectedTarget, string(wikilinkNode.Target))
|
||||
assert.Equal(t, tt.expectedParams, string(wikilinkNode.Params))
|
||||
} else {
|
||||
|
|
|
|||
|
|
@ -252,7 +252,7 @@ func (r *MarkdownRenderer) renderListItem(node *gast.ListItem, source []byte, de
|
|||
|
||||
// Add list marker
|
||||
if list.IsOrdered() {
|
||||
r.buf.WriteString(fmt.Sprintf("%d. ", list.Start))
|
||||
fmt.Fprintf(r.buf, "%d. ", list.Start)
|
||||
list.Start++ // Increment for next item
|
||||
} else {
|
||||
r.buf.WriteString("- ")
|
||||
|
|
|
|||
|
|
@ -6,7 +6,6 @@ import (
|
|||
"log/slog"
|
||||
"strings"
|
||||
"time"
|
||||
"unicode/utf8"
|
||||
|
||||
"github.com/lithammer/shortuuid/v4"
|
||||
"github.com/pkg/errors"
|
||||
|
|
@ -845,25 +844,6 @@ func (s *APIV1Service) getMemoContentSnippet(content string) (string, error) {
|
|||
return snippet, nil
|
||||
}
|
||||
|
||||
func substring(s string, length int) string {
|
||||
if length <= 0 {
|
||||
return ""
|
||||
}
|
||||
|
||||
runeCount := 0
|
||||
byteIndex := 0
|
||||
for byteIndex < len(s) {
|
||||
_, size := utf8.DecodeRuneInString(s[byteIndex:])
|
||||
byteIndex += size
|
||||
runeCount++
|
||||
if runeCount == length {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
return s[:byteIndex]
|
||||
}
|
||||
|
||||
// parseMemoOrderBy parses the order_by field and sets the appropriate ordering in memoFind.
|
||||
// Follows AIP-132: supports comma-separated list of fields with optional "desc" suffix.
|
||||
// Example: "pinned desc, display_time desc" or "create_time asc".
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue