⚡ Improve global searching (group by doc) performance https://github.com/siyuan-note/siyuan/issues/11951
This commit is contained in:
parent
5f56e42dee
commit
f75e383728
1 changed files with 9 additions and 1 deletions
|
@ -328,6 +328,7 @@ func SearchRefBlock(id, rootID, keyword string, beforeLen int, isSquareBrackets,
|
|||
for _, ref := range refs {
|
||||
btsID = append(btsID, ref.DefBlockRootID)
|
||||
}
|
||||
btsID = gulu.Str.RemoveDuplicatedElem(btsID)
|
||||
bts := treenode.GetBlockTrees(btsID)
|
||||
for _, ref := range refs {
|
||||
tree := cachedTrees[ref.DefBlockRootID]
|
||||
|
@ -369,6 +370,7 @@ func SearchRefBlock(id, rootID, keyword string, beforeLen int, isSquareBrackets,
|
|||
for _, b := range ret {
|
||||
btsID = append(btsID, b.RootID)
|
||||
}
|
||||
btsID = gulu.Str.RemoveDuplicatedElem(btsID)
|
||||
bts := treenode.GetBlockTrees(btsID)
|
||||
for _, b := range ret {
|
||||
tree := cachedTrees[b.RootID]
|
||||
|
@ -899,11 +901,17 @@ func FullTextSearchBlock(query string, boxes, paths []string, types map[string]b
|
|||
rootMap := map[string]bool{}
|
||||
var rootIDs []string
|
||||
contentSorts := map[string]int{}
|
||||
var btsID []string
|
||||
for _, b := range blocks {
|
||||
btsID = append(btsID, b.RootID)
|
||||
}
|
||||
btsID = gulu.Str.RemoveDuplicatedElem(btsID)
|
||||
bts := treenode.GetBlockTrees(btsID)
|
||||
for _, b := range blocks {
|
||||
if _, ok := rootMap[b.RootID]; !ok {
|
||||
rootMap[b.RootID] = true
|
||||
rootIDs = append(rootIDs, b.RootID)
|
||||
tree, _ := LoadTreeByBlockID(b.RootID)
|
||||
tree, _ := loadTreeByBlockTree(bts[b.RootID])
|
||||
if nil == tree {
|
||||
continue
|
||||
}
|
||||
|
|
Loading…
Add table
Reference in a new issue