Merge remote-tracking branch 'origin/dev' into dev
This commit is contained in:
commit
2615f1a5d1
15 changed files with 82 additions and 51 deletions
|
@ -1078,7 +1078,7 @@
|
|||
"export6": "About the handling of anchor text in PDF annotations when exporting",
|
||||
"export7": "File Name - Page Number - Anchor Text",
|
||||
"export8": "Just anchor text",
|
||||
"export9": "Anchor hash (only for exporting Notebook)",
|
||||
"export9": "Anchor hash",
|
||||
"graphConfig2": "Reference Count filter",
|
||||
"selectOpen": "Always Select Opened Doc",
|
||||
"selectOpen1": "Select Opened Doc",
|
||||
|
|
|
@ -1078,7 +1078,7 @@
|
|||
"export6": "Sobre el manejo del texto ancla en las anotaciones PDF al exportar",
|
||||
"export7": "Nombre de archivo - Número de página - Texto ancla",
|
||||
"export8": "Sólo texto ancla",
|
||||
"export9": "Hash de anclaje (sólo para exportar Notebook)",
|
||||
"export9": "Hash de anclaje",
|
||||
"graphConfig2": "Filtro de recuento de referencias",
|
||||
"selectOpen": "Seleccionar siempre el documento abierto",
|
||||
"selectOpen1": "Seleccionar documento abierto",
|
||||
|
|
|
@ -1078,7 +1078,7 @@
|
|||
"export6": "À propos de la gestion du texte d'ancrage dans les annotations PDF lors de l'exportation",
|
||||
"export7": "Nom de fichier - Numéro de page - Texte d'ancrage",
|
||||
"export8": "Anchor text only",
|
||||
"export9": "Hash d'ancrage (uniquement pour l'exportation de Notebook)",
|
||||
"export9": "Hash d'ancrage",
|
||||
"graphConfig2": "Filtre de compte de blocs de référence",
|
||||
"selectOpen": "Localisez toujours les documents ouverts",
|
||||
"selectOpen1": "Localiser les documents ouverts",
|
||||
|
|
|
@ -1078,7 +1078,7 @@
|
|||
"export6": "エクスポート時の PDF 注釈内のアンカーテキストの処理方法",
|
||||
"export7": "ファイル名 - ページ番号 - アンカーテキスト",
|
||||
"export8": "アンカーテキストのみ",
|
||||
"export9": "アンカーハッシュ (ノートブックのエクスポート専用)",
|
||||
"export9": "アンカーハッシュ",
|
||||
"graphConfig2": "参照カウントフィルタ",
|
||||
"selectOpen": "常に開いているドキュメントを選択",
|
||||
"selectOpen1": "開いているドキュメントをツリーで選択",
|
||||
|
|
|
@ -1078,7 +1078,7 @@
|
|||
"export6": "導出時關於 PDF 標註引出處錨文字的處理方式",
|
||||
"export7": "文件名 - 頁碼 - 錨文字",
|
||||
"export8": "僅錨文字",
|
||||
"export9": "錨點哈希(僅支援導出筆記本)",
|
||||
"export9": "錨點哈希",
|
||||
"graphConfig2": "引用塊次數過濾",
|
||||
"selectOpen": "定位打開的文檔",
|
||||
"selectOpen1": "定位打開的文檔",
|
||||
|
|
|
@ -1078,7 +1078,7 @@
|
|||
"export6": "导出时关于 PDF 标注引出处锚文本的处理方式",
|
||||
"export7": "文件名 - 页码 - 锚文本",
|
||||
"export8": "仅锚文本",
|
||||
"export9": "锚点哈希(仅支持导出笔记本)",
|
||||
"export9": "锚点哈希",
|
||||
"graphConfig2": "引用块次数过滤",
|
||||
"selectOpen": "始终定位打开的文档",
|
||||
"selectOpen1": "定位打开的文档",
|
||||
|
|
2
app/stage/protyle/js/lute/lute.min.js
vendored
2
app/stage/protyle/js/lute/lute.min.js
vendored
File diff suppressed because one or more lines are too long
|
@ -8,7 +8,7 @@ require (
|
|||
github.com/88250/epub v0.0.0-20230830085737-c19055cd1f48
|
||||
github.com/88250/go-humanize v0.0.0-20240424102817-4f78fac47ea7
|
||||
github.com/88250/gulu v1.2.3-0.20240612035750-c9cf5f7a4d02
|
||||
github.com/88250/lute v1.7.7-0.20240624141038-a066485dd2b3
|
||||
github.com/88250/lute v1.7.7-0.20240628132915-65118a2b453f
|
||||
github.com/88250/pdfcpu v0.3.14-0.20230401044135-c7369a99720c
|
||||
github.com/88250/vitess-sqlparser v0.0.0-20210205111146-56a2ded2aba1
|
||||
github.com/ClarkThan/ahocorasick v0.0.0-20231011042242-30d1ef1347f4
|
||||
|
|
|
@ -12,8 +12,8 @@ github.com/88250/go-sqlite3 v1.14.13-0.20231214121541-e7f54c482950 h1:Pa5hMiBceT
|
|||
github.com/88250/go-sqlite3 v1.14.13-0.20231214121541-e7f54c482950/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU=
|
||||
github.com/88250/gulu v1.2.3-0.20240612035750-c9cf5f7a4d02 h1:3e5+yobj655pTeKOYMbJrnc1mE51ZkbXIxquTYZuYCY=
|
||||
github.com/88250/gulu v1.2.3-0.20240612035750-c9cf5f7a4d02/go.mod h1:MUfzyfmbPrRDZLqxc7aPrVYveatTHRfoUa5TynPS0i8=
|
||||
github.com/88250/lute v1.7.7-0.20240624141038-a066485dd2b3 h1:mqoLTyzwLxx0i8bSaZsm/PDLc670ECDD1FYHtTbv2+k=
|
||||
github.com/88250/lute v1.7.7-0.20240624141038-a066485dd2b3/go.mod h1:VDAzL8b+oCh+e3NAlmwwLzC53ten0rZlS8NboB7ljtk=
|
||||
github.com/88250/lute v1.7.7-0.20240628132915-65118a2b453f h1:AFhzS6Pk/ITWge7YLhCxUKYM9vgBbhhKiJSbdjyPox8=
|
||||
github.com/88250/lute v1.7.7-0.20240628132915-65118a2b453f/go.mod h1:VDAzL8b+oCh+e3NAlmwwLzC53ten0rZlS8NboB7ljtk=
|
||||
github.com/88250/pdfcpu v0.3.14-0.20230401044135-c7369a99720c h1:Dl/8S9iLyPMTElnWIBxmjaLiWrkI5P4a21ivwAn5pU0=
|
||||
github.com/88250/pdfcpu v0.3.14-0.20230401044135-c7369a99720c/go.mod h1:S5YT38L/GCjVjmB4PB84PymA1qfopjEhfhTNQilLpv4=
|
||||
github.com/88250/vitess-sqlparser v0.0.0-20210205111146-56a2ded2aba1 h1:48T899JQDwyyRu9yXHePYlPdHtpJfrJEUGBMH3SMBWY=
|
||||
|
|
|
@ -2618,16 +2618,15 @@ func exportPandocConvertZip(exportNotebook bool, boxID, baseFolderName string, d
|
|||
}
|
||||
|
||||
exportRefMode := Conf.Export.BlockRefMode
|
||||
if !exportNotebook && 5 == exportRefMode {
|
||||
// 非笔记本导出不支持锚点哈希,将其切换为锚文本块链
|
||||
exportRefMode = 2
|
||||
}
|
||||
|
||||
var defBlockIDs []string
|
||||
if exportNotebook && 5 == exportRefMode {
|
||||
// Add a Ref export mode `Anchor hash` for notebook Markdown exporting https://github.com/siyuan-note/siyuan/issues/10265
|
||||
// 导出笔记本时导出锚点哈希,这里先记录下所有定义块的 ID
|
||||
if 5 == exportRefMode {
|
||||
// 导出锚点哈希,这里先记录下所有定义块的 ID
|
||||
walked := map[string]bool{}
|
||||
for _, p := range docPaths {
|
||||
if walked[p] {
|
||||
continue
|
||||
}
|
||||
|
||||
docIAL := box.docIAL(p)
|
||||
if nil == docIAL {
|
||||
continue
|
||||
|
@ -2638,18 +2637,25 @@ func exportPandocConvertZip(exportNotebook bool, boxID, baseFolderName string, d
|
|||
continue
|
||||
}
|
||||
ast.Walk(tree.Root, func(n *ast.Node, entering bool) ast.WalkStatus {
|
||||
if !entering {
|
||||
if !entering || !treenode.IsBlockRef(n) {
|
||||
return ast.WalkContinue
|
||||
}
|
||||
|
||||
if treenode.IsBlockRef(n) {
|
||||
defID, _, _ := treenode.GetBlockRef(n)
|
||||
defID, _, _ := treenode.GetBlockRef(n)
|
||||
if defBt := treenode.GetBlockTree(defID); nil != defBt {
|
||||
docPaths = append(docPaths, defBt.Path)
|
||||
docPaths = gulu.Str.RemoveDuplicatedElem(docPaths)
|
||||
|
||||
defBlockIDs = append(defBlockIDs, defID)
|
||||
defBlockIDs = gulu.Str.RemoveDuplicatedElem(defBlockIDs)
|
||||
|
||||
walked[defBt.Path] = true
|
||||
}
|
||||
return ast.WalkContinue
|
||||
})
|
||||
}
|
||||
defBlockIDs = gulu.Str.RemoveDuplicatedElem(defBlockIDs)
|
||||
docPaths = gulu.Str.RemoveDuplicatedElem(docPaths)
|
||||
}
|
||||
|
||||
luteEngine := util.NewLute()
|
||||
|
|
|
@ -1152,12 +1152,12 @@ func fullTextSearchRefBlock(keyword string, beforeLen int, onlyDoc bool) (ret []
|
|||
|
||||
if ignoreLines := getRefSearchIgnoreLines(); 0 < len(ignoreLines) {
|
||||
// Support ignore search results https://github.com/siyuan-note/siyuan/issues/10089
|
||||
notLike := bytes.Buffer{}
|
||||
buf := bytes.Buffer{}
|
||||
for _, line := range ignoreLines {
|
||||
notLike.WriteString(" AND ")
|
||||
notLike.WriteString(line)
|
||||
buf.WriteString(" AND ")
|
||||
buf.WriteString(line)
|
||||
}
|
||||
stmt += notLike.String()
|
||||
stmt += buf.String()
|
||||
}
|
||||
|
||||
orderBy := ` ORDER BY CASE
|
||||
|
@ -1272,12 +1272,12 @@ func fullTextSearchByFTS(query, boxFilter, pathFilter, typeFilter, orderBy strin
|
|||
|
||||
if ignoreLines := getSearchIgnoreLines(); 0 < len(ignoreLines) {
|
||||
// Support ignore search results https://github.com/siyuan-note/siyuan/issues/10089
|
||||
notLike := bytes.Buffer{}
|
||||
buf := bytes.Buffer{}
|
||||
for _, line := range ignoreLines {
|
||||
notLike.WriteString(" AND ")
|
||||
notLike.WriteString(line)
|
||||
buf.WriteString(" AND ")
|
||||
buf.WriteString(line)
|
||||
}
|
||||
stmt += notLike.String()
|
||||
stmt += buf.String()
|
||||
}
|
||||
|
||||
stmt += " " + orderBy
|
||||
|
|
|
@ -386,11 +386,7 @@ func Timing(c *gin.Context) {
|
|||
}
|
||||
|
||||
func Recover(c *gin.Context) {
|
||||
defer func() {
|
||||
logging.Recover()
|
||||
c.Status(http.StatusInternalServerError)
|
||||
}()
|
||||
|
||||
defer logging.Recover()
|
||||
c.Next()
|
||||
}
|
||||
|
||||
|
|
|
@ -112,7 +112,9 @@ func ResetVirtualBlockRefCache() {
|
|||
return
|
||||
}
|
||||
|
||||
keywords := sql.QueryVirtualRefKeywords(Conf.Search.VirtualRefName, Conf.Search.VirtualRefAlias, Conf.Search.VirtualRefAnchor, Conf.Search.VirtualRefDoc)
|
||||
searchIgnoreLines := getSearchIgnoreLines()
|
||||
refSearchIgnoreLines := getRefSearchIgnoreLines()
|
||||
keywords := sql.QueryVirtualRefKeywords(Conf.Search.VirtualRefName, Conf.Search.VirtualRefAlias, Conf.Search.VirtualRefAnchor, Conf.Search.VirtualRefDoc, searchIgnoreLines, refSearchIgnoreLines)
|
||||
virtualBlockRefCache.Set("virtual_ref", keywords, 1)
|
||||
}
|
||||
|
||||
|
|
|
@ -180,9 +180,16 @@ func QueryBlockAliases(rootID string) (ret []string) {
|
|||
return
|
||||
}
|
||||
|
||||
func queryNames() (ret []string) {
|
||||
func queryNames(searchIgnoreLines []string) (ret []string) {
|
||||
ret = []string{}
|
||||
sqlStmt := "SELECT name FROM blocks WHERE name != '' LIMIT ?"
|
||||
sqlStmt := "SELECT name FROM blocks WHERE name != ''"
|
||||
buf := bytes.Buffer{}
|
||||
for _, line := range searchIgnoreLines {
|
||||
buf.WriteString(" AND ")
|
||||
buf.WriteString(line)
|
||||
}
|
||||
sqlStmt += buf.String()
|
||||
sqlStmt += " LIMIT ?"
|
||||
rows, err := query(sqlStmt, 10240)
|
||||
if nil != err {
|
||||
logging.LogErrorf("sql query [%s] failed: %s", sqlStmt, err)
|
||||
|
@ -213,9 +220,16 @@ func queryNames() (ret []string) {
|
|||
return
|
||||
}
|
||||
|
||||
func queryAliases() (ret []string) {
|
||||
func queryAliases(searchIgnoreLines []string) (ret []string) {
|
||||
ret = []string{}
|
||||
sqlStmt := "SELECT alias FROM blocks WHERE alias != '' LIMIT ?"
|
||||
sqlStmt := "SELECT alias FROM blocks WHERE alias != ''"
|
||||
buf := bytes.Buffer{}
|
||||
for _, line := range searchIgnoreLines {
|
||||
buf.WriteString(" AND ")
|
||||
buf.WriteString(line)
|
||||
}
|
||||
sqlStmt += buf.String()
|
||||
sqlStmt += " LIMIT ?"
|
||||
rows, err := query(sqlStmt, 10240)
|
||||
if nil != err {
|
||||
logging.LogErrorf("sql query [%s] failed: %s", sqlStmt, err)
|
||||
|
@ -274,9 +288,15 @@ func queryDocIDsByTitle(title string, excludeIDs []string) (ret []string) {
|
|||
return
|
||||
}
|
||||
|
||||
func queryDocTitles() (ret []string) {
|
||||
func queryDocTitles(searchIgnoreLines []string) (ret []string) {
|
||||
ret = []string{}
|
||||
sqlStmt := "SELECT content FROM blocks WHERE type = 'd'"
|
||||
buf := bytes.Buffer{}
|
||||
for _, line := range searchIgnoreLines {
|
||||
buf.WriteString(" AND ")
|
||||
buf.WriteString(line)
|
||||
}
|
||||
sqlStmt += buf.String()
|
||||
rows, err := query(sqlStmt)
|
||||
if nil != err {
|
||||
logging.LogErrorf("sql query [%s] failed: %s", sqlStmt, err)
|
||||
|
|
|
@ -44,18 +44,18 @@ func GetRefDuplicatedDefRootIDs() (ret []string) {
|
|||
return
|
||||
}
|
||||
|
||||
func QueryVirtualRefKeywords(name, alias, anchor, doc bool) (ret []string) {
|
||||
func QueryVirtualRefKeywords(name, alias, anchor, doc bool, searchIgnoreLines, refSearchIgnoreLines []string) (ret []string) {
|
||||
if name {
|
||||
ret = append(ret, queryNames()...)
|
||||
ret = append(ret, queryNames(searchIgnoreLines)...)
|
||||
}
|
||||
if alias {
|
||||
ret = append(ret, queryAliases()...)
|
||||
ret = append(ret, queryAliases(searchIgnoreLines)...)
|
||||
}
|
||||
if anchor {
|
||||
ret = append(ret, queryRefTexts()...)
|
||||
ret = append(ret, queryRefTexts(refSearchIgnoreLines)...)
|
||||
}
|
||||
if doc {
|
||||
ret = append(ret, queryDocTitles()...)
|
||||
ret = append(ret, queryDocTitles(searchIgnoreLines)...)
|
||||
}
|
||||
ret = gulu.Str.RemoveDuplicatedElem(ret)
|
||||
sort.SliceStable(ret, func(i, j int) bool {
|
||||
|
@ -64,9 +64,16 @@ func QueryVirtualRefKeywords(name, alias, anchor, doc bool) (ret []string) {
|
|||
return
|
||||
}
|
||||
|
||||
func queryRefTexts() (ret []string) {
|
||||
func queryRefTexts(refSearchIgnoreLines []string) (ret []string) {
|
||||
ret = []string{}
|
||||
sqlStmt := "SELECT DISTINCT content FROM refs LIMIT 10240"
|
||||
sqlStmt := "SELECT DISTINCT content FROM refs WHERE 1 = 1"
|
||||
buf := bytes.Buffer{}
|
||||
for _, line := range refSearchIgnoreLines {
|
||||
buf.WriteString(" AND ")
|
||||
buf.WriteString(line)
|
||||
}
|
||||
sqlStmt += buf.String()
|
||||
sqlStmt += " LIMIT 10240"
|
||||
rows, err := query(sqlStmt)
|
||||
if nil != err {
|
||||
logging.LogErrorf("sql query failed: %s", sqlStmt, err)
|
||||
|
@ -361,12 +368,12 @@ func QueryRefsRecent(onlyDoc bool, ignoreLines []string) (ret []*Ref) {
|
|||
stmt += " WHERE 1 = 1"
|
||||
if 0 < len(ignoreLines) {
|
||||
// Support ignore search results https://github.com/siyuan-note/siyuan/issues/10089
|
||||
notLike := bytes.Buffer{}
|
||||
buf := bytes.Buffer{}
|
||||
for _, line := range ignoreLines {
|
||||
notLike.WriteString(" AND ")
|
||||
notLike.WriteString(line)
|
||||
buf.WriteString(" AND ")
|
||||
buf.WriteString(line)
|
||||
}
|
||||
stmt += notLike.String()
|
||||
stmt += buf.String()
|
||||
}
|
||||
stmt += " GROUP BY r.def_block_id ORDER BY r.id DESC LIMIT 32"
|
||||
rows, err := query(stmt)
|
||||
|
|
Loading…
Add table
Reference in a new issue