Browse Source

:art: Improve highlighting of numbers and letters in global search preview Fix https://github.com/siyuan-note/siyuan/issues/8100

Liang Ding 2 years ago
parent
commit
a5be0036e9
3 changed files with 10 additions and 7 deletions
  1. 1 1
      kernel/model/file.go
  2. 3 3
      kernel/model/search.go
  3. 6 3
      kernel/search/mark.go

+ 1 - 1
kernel/model/file.go

@@ -676,7 +676,7 @@ func GetDoc(startID, endID, id string, index int, keyword string, mode int, size
 			// 支持代码块搜索定位 https://github.com/siyuan-note/siyuan/issues/5520
 			// 支持代码块搜索定位 https://github.com/siyuan-note/siyuan/issues/5520
 			if ast.NodeCodeBlockCode == n.Type && 0 < len(keywords) && !treenode.IsChartCodeBlockCode(n) {
 			if ast.NodeCodeBlockCode == n.Type && 0 < len(keywords) && !treenode.IsChartCodeBlockCode(n) {
 				text := string(n.Tokens)
 				text := string(n.Tokens)
-				text = search.EncloseHighlighting(text, keywords, search.SearchMarkLeft, search.SearchMarkRight, Conf.Search.CaseSensitive)
+				text = search.EncloseHighlighting(text, keywords, search.SearchMarkLeft, search.SearchMarkRight, Conf.Search.CaseSensitive, false)
 				n.Tokens = gulu.Str.ToBytes(text)
 				n.Tokens = gulu.Str.ToBytes(text)
 			}
 			}
 
 

+ 3 - 3
kernel/model/search.go

@@ -1001,7 +1001,7 @@ func stringQuery(query string) string {
 func markReplaceSpan(n *ast.Node, unlinks *[]*ast.Node, keywords []string, markSpanDataType string, luteEngine *lute.Lute) bool {
 func markReplaceSpan(n *ast.Node, unlinks *[]*ast.Node, keywords []string, markSpanDataType string, luteEngine *lute.Lute) bool {
 	text := n.Content()
 	text := n.Content()
 	if ast.NodeText == n.Type {
 	if ast.NodeText == n.Type {
-		text = search.EncloseHighlighting(text, keywords, search.GetMarkSpanStart(markSpanDataType), search.GetMarkSpanEnd(), Conf.Search.CaseSensitive)
+		text = search.EncloseHighlighting(text, keywords, search.GetMarkSpanStart(markSpanDataType), search.GetMarkSpanEnd(), Conf.Search.CaseSensitive, false)
 		n.Tokens = gulu.Str.ToBytes(text)
 		n.Tokens = gulu.Str.ToBytes(text)
 		if bytes.Contains(n.Tokens, []byte(search.MarkDataType)) {
 		if bytes.Contains(n.Tokens, []byte(search.MarkDataType)) {
 			linkTree := parse.Inline("", n.Tokens, luteEngine.ParseOptions)
 			linkTree := parse.Inline("", n.Tokens, luteEngine.ParseOptions)
@@ -1022,7 +1022,7 @@ func markReplaceSpan(n *ast.Node, unlinks *[]*ast.Node, keywords []string, markS
 		}
 		}
 
 
 		startTag := search.GetMarkSpanStart(markSpanDataType)
 		startTag := search.GetMarkSpanStart(markSpanDataType)
-		text = search.EncloseHighlighting(text, keywords, startTag, search.GetMarkSpanEnd(), Conf.Search.CaseSensitive)
+		text = search.EncloseHighlighting(text, keywords, startTag, search.GetMarkSpanEnd(), Conf.Search.CaseSensitive, false)
 		if strings.Contains(text, search.MarkDataType) {
 		if strings.Contains(text, search.MarkDataType) {
 			dataType := search.GetMarkSpanStart(n.TextMarkType + " " + search.MarkDataType)
 			dataType := search.GetMarkSpanStart(n.TextMarkType + " " + search.MarkDataType)
 			text = strings.ReplaceAll(text, startTag, dataType)
 			text = strings.ReplaceAll(text, startTag, dataType)
@@ -1075,7 +1075,7 @@ func markReplaceSpanWithSplit(text string, keywords []string, replacementStart,
 	// 虚拟引用和反链提及关键字按最长匹配优先 https://github.com/siyuan-note/siyuan/issues/7465
 	// 虚拟引用和反链提及关键字按最长匹配优先 https://github.com/siyuan-note/siyuan/issues/7465
 	sort.Slice(keywords, func(i, j int) bool { return len(keywords[i]) > len(keywords[j]) })
 	sort.Slice(keywords, func(i, j int) bool { return len(keywords[i]) > len(keywords[j]) })
 
 
-	tmp := search.EncloseHighlighting(text, keywords, replacementStart, replacementEnd, Conf.Search.CaseSensitive)
+	tmp := search.EncloseHighlighting(text, keywords, replacementStart, replacementEnd, Conf.Search.CaseSensitive, true)
 	parts := strings.Split(tmp, replacementEnd)
 	parts := strings.Split(tmp, replacementEnd)
 	buf := bytes.Buffer{}
 	buf := bytes.Buffer{}
 	for i := 0; i < len(parts); i++ {
 	for i := 0; i < len(parts); i++ {

+ 6 - 3
kernel/search/mark.go

@@ -33,7 +33,7 @@ func MarkText(text string, keyword string, beforeLen int, caseSensitive bool) (p
 	}
 	}
 	text = util.EscapeHTML(text)
 	text = util.EscapeHTML(text)
 	keywords := SplitKeyword(keyword)
 	keywords := SplitKeyword(keyword)
-	marked = EncloseHighlighting(text, keywords, "<mark>", "</mark>", caseSensitive)
+	marked = EncloseHighlighting(text, keywords, "<mark>", "</mark>", caseSensitive, false)
 
 
 	pos = strings.Index(marked, "<mark>")
 	pos = strings.Index(marked, "<mark>")
 	if 0 > pos {
 	if 0 > pos {
@@ -81,14 +81,17 @@ func SplitKeyword(keyword string) (keywords []string) {
 	return
 	return
 }
 }
 
 
-func EncloseHighlighting(text string, keywords []string, openMark, closeMark string, caseSensitive bool) (ret string) {
+func EncloseHighlighting(text string, keywords []string, openMark, closeMark string, caseSensitive, splitWords bool) (ret string) {
 	ic := "(?i)"
 	ic := "(?i)"
 	if caseSensitive {
 	if caseSensitive {
 		ic = "(?)"
 		ic = "(?)"
 	}
 	}
 	re := ic + "("
 	re := ic + "("
 	for i, k := range keywords {
 	for i, k := range keywords {
-		wordBoundary := lex.IsASCIILetterNums(gulu.Str.ToBytes(k)) // Improve virtual reference split words https://github.com/siyuan-note/siyuan/issues/7833
+		wordBoundary := false
+		if splitWords {
+			wordBoundary = lex.IsASCIILetterNums(gulu.Str.ToBytes(k)) // Improve virtual reference split words https://github.com/siyuan-note/siyuan/issues/7833
+		}
 		k = regexp.QuoteMeta(k)
 		k = regexp.QuoteMeta(k)
 		re += "("
 		re += "("
 		if wordBoundary {
 		if wordBoundary {