🎨 Improve highlighting of numbers and letters in global search preview Fix https://github.com/siyuan-note/siyuan/issues/8100
This commit is contained in:
parent
2494412f77
commit
a5be0036e9
3 changed files with 10 additions and 7 deletions
|
@ -676,7 +676,7 @@ func GetDoc(startID, endID, id string, index int, keyword string, mode int, size
|
|||
// 支持代码块搜索定位 https://github.com/siyuan-note/siyuan/issues/5520
|
||||
if ast.NodeCodeBlockCode == n.Type && 0 < len(keywords) && !treenode.IsChartCodeBlockCode(n) {
|
||||
text := string(n.Tokens)
|
||||
text = search.EncloseHighlighting(text, keywords, search.SearchMarkLeft, search.SearchMarkRight, Conf.Search.CaseSensitive)
|
||||
text = search.EncloseHighlighting(text, keywords, search.SearchMarkLeft, search.SearchMarkRight, Conf.Search.CaseSensitive, false)
|
||||
n.Tokens = gulu.Str.ToBytes(text)
|
||||
}
|
||||
|
||||
|
|
|
@ -1001,7 +1001,7 @@ func stringQuery(query string) string {
|
|||
func markReplaceSpan(n *ast.Node, unlinks *[]*ast.Node, keywords []string, markSpanDataType string, luteEngine *lute.Lute) bool {
|
||||
text := n.Content()
|
||||
if ast.NodeText == n.Type {
|
||||
text = search.EncloseHighlighting(text, keywords, search.GetMarkSpanStart(markSpanDataType), search.GetMarkSpanEnd(), Conf.Search.CaseSensitive)
|
||||
text = search.EncloseHighlighting(text, keywords, search.GetMarkSpanStart(markSpanDataType), search.GetMarkSpanEnd(), Conf.Search.CaseSensitive, false)
|
||||
n.Tokens = gulu.Str.ToBytes(text)
|
||||
if bytes.Contains(n.Tokens, []byte(search.MarkDataType)) {
|
||||
linkTree := parse.Inline("", n.Tokens, luteEngine.ParseOptions)
|
||||
|
@ -1022,7 +1022,7 @@ func markReplaceSpan(n *ast.Node, unlinks *[]*ast.Node, keywords []string, markS
|
|||
}
|
||||
|
||||
startTag := search.GetMarkSpanStart(markSpanDataType)
|
||||
text = search.EncloseHighlighting(text, keywords, startTag, search.GetMarkSpanEnd(), Conf.Search.CaseSensitive)
|
||||
text = search.EncloseHighlighting(text, keywords, startTag, search.GetMarkSpanEnd(), Conf.Search.CaseSensitive, false)
|
||||
if strings.Contains(text, search.MarkDataType) {
|
||||
dataType := search.GetMarkSpanStart(n.TextMarkType + " " + search.MarkDataType)
|
||||
text = strings.ReplaceAll(text, startTag, dataType)
|
||||
|
@ -1075,7 +1075,7 @@ func markReplaceSpanWithSplit(text string, keywords []string, replacementStart,
|
|||
// 虚拟引用和反链提及关键字按最长匹配优先 https://github.com/siyuan-note/siyuan/issues/7465
|
||||
sort.Slice(keywords, func(i, j int) bool { return len(keywords[i]) > len(keywords[j]) })
|
||||
|
||||
tmp := search.EncloseHighlighting(text, keywords, replacementStart, replacementEnd, Conf.Search.CaseSensitive)
|
||||
tmp := search.EncloseHighlighting(text, keywords, replacementStart, replacementEnd, Conf.Search.CaseSensitive, true)
|
||||
parts := strings.Split(tmp, replacementEnd)
|
||||
buf := bytes.Buffer{}
|
||||
for i := 0; i < len(parts); i++ {
|
||||
|
|
|
@ -33,7 +33,7 @@ func MarkText(text string, keyword string, beforeLen int, caseSensitive bool) (p
|
|||
}
|
||||
text = util.EscapeHTML(text)
|
||||
keywords := SplitKeyword(keyword)
|
||||
marked = EncloseHighlighting(text, keywords, "<mark>", "</mark>", caseSensitive)
|
||||
marked = EncloseHighlighting(text, keywords, "<mark>", "</mark>", caseSensitive, false)
|
||||
|
||||
pos = strings.Index(marked, "<mark>")
|
||||
if 0 > pos {
|
||||
|
@ -81,14 +81,17 @@ func SplitKeyword(keyword string) (keywords []string) {
|
|||
return
|
||||
}
|
||||
|
||||
func EncloseHighlighting(text string, keywords []string, openMark, closeMark string, caseSensitive bool) (ret string) {
|
||||
func EncloseHighlighting(text string, keywords []string, openMark, closeMark string, caseSensitive, splitWords bool) (ret string) {
|
||||
ic := "(?i)"
|
||||
if caseSensitive {
|
||||
ic = "(?)"
|
||||
}
|
||||
re := ic + "("
|
||||
for i, k := range keywords {
|
||||
wordBoundary := lex.IsASCIILetterNums(gulu.Str.ToBytes(k)) // Improve virtual reference split words https://github.com/siyuan-note/siyuan/issues/7833
|
||||
wordBoundary := false
|
||||
if splitWords {
|
||||
wordBoundary = lex.IsASCIILetterNums(gulu.Str.ToBytes(k)) // Improve virtual reference split words https://github.com/siyuan-note/siyuan/issues/7833
|
||||
}
|
||||
k = regexp.QuoteMeta(k)
|
||||
re += "("
|
||||
if wordBoundary {
|
||||
|
|
Loading…
Add table
Reference in a new issue