Merge remote-tracking branch 'origin/dev' into dev

This commit is contained in:
Vanessa 2024-12-05 11:09:20 +08:00
commit 3ff44adf59
21 changed files with 189 additions and 58 deletions

75
API.md
View file

@ -73,7 +73,8 @@
* Endpoint: `http://127.0.0.1:6806`
* Both are POST methods
* An interface with parameters is required, the parameter is a JSON string, placed in the body, and the header Content-Type is `application/json`
* An interface with parameters is required, the parameter is a JSON string, placed in the body, and the header
Content-Type is `application/json`
* Return value
````json
@ -328,7 +329,8 @@ View API token in <kbd>Settings - About</kbd>, request header: `Authorization: T
```
* `notebook`: Notebook ID
* `path`: Document path, which needs to start with / and separate levels with / (path here corresponds to the database hpath field)
* `path`: Document path, which needs to start with / and separate levels with / (path here corresponds to the
database hpath field)
* `markdown`: GFM Markdown content
* Return value
@ -381,8 +383,8 @@ Rename a document by `id`:
}
```
* `id`: Document ID
* `title`: New document title
* `id`: Document ID
* `title`: New document title
* Return value
```json
@ -416,7 +418,7 @@ Rename a document by `id`:
"data": null
}
```
Remove a document by `id`:
* `/api/filetree/removeDocByID`
@ -428,7 +430,7 @@ Remove a document by `id`:
}
```
* `id`: Document ID
* `id`: Document ID
* Return value
```json
@ -465,6 +467,30 @@ Remove a document by `id`:
}
```
Move documents by `id`:
* `/api/filetree/moveDocsByID`
* Parameters
```json
{
"fromIDs": ["20210917220056-yxtyl7i"],
"toID": "20210817205410-2kvfpfn"
}
```
* `fromIDs`: Source docs' IDs
* `toID`: Target parent ID
* Return value
```json
{
"code": 0,
"msg": "",
"data": null
}
```
### Get human-readable path based on path
* `/api/filetree/getHPathByPath`
@ -510,7 +536,7 @@ Remove a document by `id`:
"data": "/foo/bar"
}
```
### Get storage path based on ID
* `/api/filetree/getPathByID`
@ -522,7 +548,7 @@ Remove a document by `id`:
}
```
* `id`: Block ID
* `id`: Block ID
* Return value
```json
@ -545,8 +571,8 @@ Remove a document by `id`:
}
```
* `path`: Human-readable path
* `notebook`: Notebook ID
* `path`: Human-readable path
* `notebook`: Notebook ID
* Return value
```json
@ -570,7 +596,8 @@ Remove a document by `id`:
* `"/assets/"`: workspace/data/assets/ folder
* `"/assets/sub/"`: workspace/data/assets/sub/ folder
Under normal circumstances, it is recommended to use the first method, which is stored in the assets folder of the workspace, putting in a subdirectory has some side effects, please refer to the assets chapter of the user guide.
Under normal circumstances, it is recommended to use the first method, which is stored in the assets folder of the
workspace, putting in a subdirectory has some side effects, please refer to the assets chapter of the user guide.
* `file[]`: Uploaded file list
* Return value
@ -588,7 +615,9 @@ Remove a document by `id`:
```
* `errFiles`: List of filenames with errors in upload processing
* `succMap`: For successfully processed files, the key is the file name when uploading, and the value is assets/foo-id.png, which is used to replace the asset link address in the existing Markdown content with the uploaded address
* `succMap`: For successfully processed files, the key is the file name when uploading, and the value is
assets/foo-id.png, which is used to replace the asset link address in the existing Markdown content with the
uploaded address
## Blocks
@ -613,7 +642,8 @@ Remove a document by `id`:
* `previousID`: The ID of the previous block, used to anchor the insertion position
* `parentID`: The ID of the parent block, used to anchor the insertion position
`nextID`, `previousID`, and `parentID` must have at least one value, using priority: `nextID` > `previousID` > `parentID`
`nextID`, `previousID`, and `parentID` must have at least one value, using priority: `nextID` > `previousID` >
`parentID`
* Return value
```json
@ -820,7 +850,8 @@ Remove a document by `id`:
* `id`: Block ID to move
* `previousID`: The ID of the previous block, used to anchor the insertion position
* `parentID`: The ID of the parent block, used to anchor the insertion position, `previousID` and `parentID` cannot be empty at the same time, if they exist at the same time, `previousID` will be used first
* `parentID`: The ID of the parent block, used to anchor the insertion position, `previousID` and `parentID` cannot
be empty at the same time, if they exist at the same time, `previousID` will be used first
* Return value
```json
@ -860,7 +891,7 @@ Remove a document by `id`:
}
```
* `id`: Block ID to fold
* `id`: Block ID to fold
* Return value
```json
@ -882,7 +913,7 @@ Remove a document by `id`:
}
```
* `id`: Block ID to unfold
* `id`: Block ID to unfold
* Return value
```json
@ -1380,7 +1411,8 @@ Remove a document by `id`:
"timeout": 7000
}
```
* `timeout`: The duration of the message display in milliseconds. This field can be omitted, the default is 7000 milliseconds
* `timeout`: The duration of the message display in milliseconds. This field can be omitted, the default is 7000
milliseconds
* Return value
```json
@ -1405,7 +1437,8 @@ Remove a document by `id`:
"timeout": 7000
}
```
* `timeout`: The duration of the message display in milliseconds. This field can be omitted, the default is 7000 milliseconds
* `timeout`: The duration of the message display in milliseconds. This field can be omitted, the default is 7000
milliseconds
* Return value
```json
@ -1457,7 +1490,8 @@ Remove a document by `id`:
* `base32` | `base32-std`
* `base32-hex`
* `hex`
* `responseEncoding`: The encoding scheme used by `body` in response data, default is `text`, optional values are as follows
* `responseEncoding`: The encoding scheme used by `body` in response data, default is `text`, optional values are as
follows
* `text`
* `base64` | `base64-std`
@ -1484,7 +1518,8 @@ Remove a document by `id`:
}
```
* `bodyEncoding`: The encoding scheme used by `body`, is consistent with field `responseEncoding` in request, default is `text`, optional values are as follows
* `bodyEncoding`: The encoding scheme used by `body`, is consistent with field `responseEncoding` in request,
default is `text`, optional values are as follows
* `text`
* `base64` | `base64-std`

View file

@ -381,8 +381,8 @@
}
```
* `id`:文档 ID
* `title`:新标题
* `id`:文档 ID
* `title`:新标题
* 返回值
```json
@ -416,7 +416,7 @@
"data": null
}
```
通过 `id` 删除文档:
* `/api/filetree/removeDocByID`
@ -428,7 +428,7 @@
}
```
* `id`:文档 ID
* `id`:文档 ID
* 返回值
```json
@ -465,6 +465,30 @@
}
```
通过 `id` 移动文档:
* `/api/filetree/moveDocsByID`
* 参数
```json
{
"fromIDs": ["20210917220056-yxtyl7i"],
"toID": "20210817205410-2kvfpfn"
}
```
* `fromIDs`:源文档 ID
* `toID`:目标父文档 ID
* 返回值
```json
{
"code": 0,
"msg": "",
"data": null
}
```
### 根据路径获取人类可读路径
* `/api/filetree/getHPathByPath`
@ -522,7 +546,7 @@
}
```
* `id`:块 ID
* `id`:块 ID
* 返回值
```json
@ -545,8 +569,8 @@
}
```
* `path`:人类可读路径
* `notebook`:笔记本 ID
* `path`:人类可读路径
* `notebook`:笔记本 ID
* 返回值
```json
@ -860,7 +884,7 @@
}
```
* `id`:待折叠块的 ID
* `id`:待折叠块的 ID
* 返回值
```json
@ -882,7 +906,7 @@
}
```
* `id`:待展开块的 ID
* `id`:待展开块的 ID
* 返回值
```json
@ -1063,7 +1087,7 @@
]
}
```
### 提交事务
* `/api/sqlite/flushTransaction`

View file

@ -1,7 +1,7 @@
{
"empty": "Leer",
"newRowInRelation": "Erstellen Sie einen neuen Eintrag in ${x} <b class='ft__on-surface'>${y}</b>",
"keyContent": "Schlüsselinhalt",
"copyKeyContent": "Primärschlüsseltext kopieren",
"addDesc": "Beschreibung hinzufügen",
"dataRepoAutoPurgeIndexRetentionDays": "Daten-Snapshot-Aufbewahrungstage",
"dataRepoAutoPurgeRetentionIndexesDaily": "Daten-Snapshots pro Tag",

View file

@ -1,7 +1,7 @@
{
"empty": "Empty",
"newRowInRelation": "Create a new entry in ${x} <b class='ft__on-surface'>${y}</b>",
"keyContent": "Key content",
"copyKeyContent": "Copy primary key text",
"addDesc": "Add description",
"dataRepoAutoPurgeIndexRetentionDays": "Data snapshot retention days",
"dataRepoAutoPurgeRetentionIndexesDaily": "Data snapshots per day",

View file

@ -1,7 +1,7 @@
{
"empty": "Vacío",
"newRowInRelation": "Crear una nueva entrada en ${x} <b class='ft__on-surface'>${y}</b>",
"keyContent": "Contenido de la clave",
"copyKeyContent": "Copiar texto de la clave principal",
"addDesc": "Agregar descripción",
"dataRepoAutoPurgeIndexRetentionDays": "Días de retención de instantáneas de datos",
"dataRepoAutoPurgeRetentionIndexesDaily": "Número de instantáneas de datos por día",

View file

@ -1,6 +1,6 @@
{
"newRowInRelation": "Créer une nouvelle entrée dans ${x} <b class='ft__on-surface'>${y}</b>",
"keyContent": "Contenu de la clé",
"copyKeyContent": "Copier le texte de la clé principale",
"addDesc": "Ajouter une description",
"dataRepoAutoPurgeIndexRetentionDays": "Jours de rétention des instantanés de données",
"dataRepoAutoPurgeRetentionIndexesDaily": "Nombre d'instantanés de données par jour",

View file

@ -1,7 +1,7 @@
{
"empty": "ריק",
"newRowInRelation": "צור ערך חדש ב-${x} <b class='ft__on-surface'>${y}</b>",
"keyContent": "תוכן המפתח",
"copyKeyContent": "העתק טקסט מפתח ראשי",
"addDesc": "הוסף תיאור",
"dataRepoAutoPurgeIndexRetentionDays": "ימי שמירת תמונות נתונים",
"dataRepoAutoPurgeRetentionIndexesDaily": "מספר תמונות נתונים ביום",

View file

@ -1,7 +1,7 @@
{
"empty": "Vuoto",
"newRowInRelation": "Crea una nuova voce in ${x} <b class='ft__on-surface'>${y}</b>",
"keyContent": "Contenuto della chiave",
"copyKeyContent": "Copia il testo della chiave primaria",
"addDesc": "Aggiungi descrizione",
"dataRepoAutoPurgeIndexRetentionDays": "Giorni di conservazione degli snapshot dei dati",
"dataRepoAutoPurgeRetentionIndexesDaily": "Numero di snapshot dei dati al giorno",

View file

@ -1,7 +1,7 @@
{
"empty": "空白",
"newRowInRelation": "${x} に新しい項目を作成 <b class='ft__on-surface'>${y}</b>",
"keyContent": "キーコンテンツ",
"copyKeyContent": "主キーのテキストをコピー",
"addDesc": "説明を追加",
"dataRepoAutoPurgeIndexRetentionDays": "データスナップショットの保持日数",
"dataRepoAutoPurgeRetentionIndexesDaily": "データスナップショットの毎日の保持数",

View file

@ -1,7 +1,7 @@
{
"empty": "Pusty",
"newRowInRelation": "Utwórz nowy wpis w ${x} <b class='ft__on-surface'>${y}</b>",
"keyContent": "Zawartość klucza",
"copyKeyContent": "Skopiuj tekst klucza głównego",
"addDesc": "Dodaj opis",
"dataRepoAutoPurgeIndexRetentionDays": "Dni przechowywania migawek danych",
"dataRepoAutoPurgeRetentionIndexesDaily": "Liczba migawek danych dziennie",

View file

@ -1,7 +1,7 @@
{
"empty": "Пусто",
"newRowInRelation": "Создать новую запись в ${x} <b class='ft__on-surface'>${y}</b>",
"keyContent": "Содержимое ключа",
"copyKeyContent": "Скопировать текст основного ключа",
"addDesc": "Добавить описание",
"dataRepoAutoPurgeIndexRetentionDays": "Срок хранения снимков данных",
"dataRepoAutoPurgeRetentionIndexesDaily": "Количество снимков данных в день",

View file

@ -1,7 +1,7 @@
{
"empty": "空白",
"newRowInRelation": "在 ${x} 中新建條目 <b class='ft__on-surface'>${y}</b>",
"keyContent": "主鍵內容",
"copyKeyContent": "複製主鍵文本",
"addDesc": "添加描述",
"dataRepoAutoPurgeIndexRetentionDays": "數據快照保留天數",
"dataRepoAutoPurgeRetentionIndexesDaily": "數據快照每天保留個數",

View file

@ -1,7 +1,7 @@
{
"empty": "空白",
"newRowInRelation": "在 ${x} 中新建条目 <b class='ft__on-surface'>${y}</b>",
"keyContent": "主键内容",
"copyKeyContent": "复制主键文本",
"addDesc": "添加描述",
"dataRepoAutoPurgeIndexRetentionDays": "数据快照保留天数",
"dataRepoAutoPurgeRetentionIndexesDaily": "数据快照每天保留个数",

View file

@ -298,7 +298,7 @@ export const avContextmenu = (protyle: IProtyle, rowElement: HTMLElement, positi
});
const copyMenu: IMenu[] = [{
iconHTML: "",
label: window.siyuan.languages.keyContent,
label: window.siyuan.languages.copyKeyContent,
click() {
let text = "";
rowElements.forEach((item, i) => {

View file

@ -472,9 +472,7 @@ func moveDocs(c *gin.Context) {
if util.InvalidIDPattern(toNotebook, ret) {
return
}
callback := arg["callback"]
err := model.MoveDocs(fromPaths, toNotebook, toPath, callback)
if err != nil {
ret.Code = -1
@ -484,6 +482,61 @@ func moveDocs(c *gin.Context) {
}
}
func moveDocsByID(c *gin.Context) {
ret := gulu.Ret.NewResult()
defer c.JSON(http.StatusOK, ret)
arg, ok := util.JsonArg(c, ret)
if !ok {
return
}
fromIDsArg := arg["fromIDs"].([]any)
var fromIDs []string
for _, fromIDArg := range fromIDsArg {
fromID := fromIDArg.(string)
if util.InvalidIDPattern(fromID, ret) {
return
}
fromIDs = append(fromIDs, fromID)
}
toID := arg["toID"].(string)
if util.InvalidIDPattern(toID, ret) {
return
}
var fromPaths []string
for _, fromID := range fromIDs {
tree, err := model.LoadTreeByBlockID(fromID)
if err != nil {
ret.Code = -1
ret.Msg = err.Error()
ret.Data = map[string]interface{}{"closeTimeout": 7000}
return
}
fromPaths = append(fromPaths, tree.Path)
}
fromPaths = gulu.Str.RemoveDuplicatedElem(fromPaths)
toTree, err := model.LoadTreeByBlockID(toID)
if err != nil {
ret.Code = -1
ret.Msg = err.Error()
ret.Data = map[string]interface{}{"closeTimeout": 7000}
return
}
toNotebook := toTree.Box
toPath := toTree.Path
callback := arg["callback"]
err = model.MoveDocs(fromPaths, toNotebook, toPath, callback)
if err != nil {
ret.Code = -1
ret.Msg = err.Error()
ret.Data = map[string]interface{}{"closeTimeout": 7000}
return
}
}
func removeDoc(c *gin.Context) {
ret := gulu.Ret.NewResult()
defer c.JSON(http.StatusOK, ret)
@ -1061,8 +1114,14 @@ func getDoc(c *gin.Context) {
if nil != isBacklinkArg {
isBacklink = isBacklinkArg.(bool)
}
highlightArg := arg["highlight"]
highlight := true
if nil != highlightArg {
highlight = highlightArg.(bool)
}
blockCount, content, parentID, parent2ID, rootID, typ, eof, scroll, boxID, docPath, isBacklinkExpand, err := model.GetDoc(startID, endID, id, index, query, queryTypes, queryMethod, mode, size, isBacklink)
blockCount, content, parentID, parent2ID, rootID, typ, eof, scroll, boxID, docPath, isBacklinkExpand, err :=
model.GetDoc(startID, endID, id, index, query, queryTypes, queryMethod, mode, size, isBacklink, highlight)
if model.ErrBlockNotFound == err {
ret.Code = 3
return

View file

@ -145,7 +145,11 @@ func getDocHistoryContent(c *gin.Context) {
if nil != k {
keyword = k.(string)
}
id, rootID, content, isLargeDoc, err := model.GetDocHistoryContent(historyPath, keyword)
highlight := true
if val, ok := arg["highlight"]; ok {
highlight = val.(bool)
}
id, rootID, content, isLargeDoc, err := model.GetDocHistoryContent(historyPath, keyword, highlight)
if err != nil {
ret.Code = -1
ret.Msg = err.Error()

View file

@ -56,7 +56,11 @@ func getBackmentionDoc(c *gin.Context) {
if val, ok := arg["containChildren"]; ok {
containChildren = val.(bool)
}
backlinks := model.GetBackmentionDoc(defID, refTreeID, keyword, containChildren)
highlight := true
if val, ok := arg["highlight"]; ok {
highlight = val.(bool)
}
backlinks := model.GetBackmentionDoc(defID, refTreeID, keyword, containChildren, highlight)
ret.Data = map[string]interface{}{
"backmentions": backlinks,
}
@ -78,7 +82,11 @@ func getBacklinkDoc(c *gin.Context) {
if val, ok := arg["containChildren"]; ok {
containChildren = val.(bool)
}
backlinks := model.GetBacklinkDoc(defID, refTreeID, keyword, containChildren)
highlight := true
if val, ok := arg["highlight"]; ok {
highlight = val.(bool)
}
backlinks := model.GetBacklinkDoc(defID, refTreeID, keyword, containChildren, highlight)
ret.Data = map[string]interface{}{
"backlinks": backlinks,
}

View file

@ -109,6 +109,7 @@ func ServeAPI(ginServer *gin.Engine) {
ginServer.Handle("POST", "/api/filetree/removeDocByID", model.CheckAuth, model.CheckAdminRole, model.CheckReadonly, removeDocByID)
ginServer.Handle("POST", "/api/filetree/removeDocs", model.CheckAuth, model.CheckAdminRole, model.CheckReadonly, removeDocs)
ginServer.Handle("POST", "/api/filetree/moveDocs", model.CheckAuth, model.CheckAdminRole, model.CheckReadonly, moveDocs)
ginServer.Handle("POST", "/api/filetree/moveDocsByID", model.CheckAuth, model.CheckAdminRole, model.CheckReadonly, moveDocsByID)
ginServer.Handle("POST", "/api/filetree/duplicateDoc", model.CheckAuth, model.CheckAdminRole, model.CheckReadonly, duplicateDoc)
ginServer.Handle("POST", "/api/filetree/getHPathByPath", model.CheckAuth, getHPathByPath)
ginServer.Handle("POST", "/api/filetree/getHPathsByPaths", model.CheckAuth, getHPathsByPaths)

View file

@ -62,7 +62,7 @@ type Backlink struct {
node *ast.Node // 仅用于按文档内容顺序排序
}
func GetBackmentionDoc(defID, refTreeID, keyword string, containChildren bool) (ret []*Backlink) {
func GetBackmentionDoc(defID, refTreeID, keyword string, containChildren, highlight bool) (ret []*Backlink) {
var keywords []string
keyword = strings.TrimSpace(keyword)
if "" != keyword {
@ -102,7 +102,7 @@ func GetBackmentionDoc(defID, refTreeID, keyword string, containChildren bool) (
var refTree *parse.Tree
trees := filesys.LoadTrees(mentionBlockIDs)
for id, tree := range trees {
backlink := buildBacklink(id, tree, mentionKeywords, luteEngine)
backlink := buildBacklink(id, tree, mentionKeywords, highlight, luteEngine)
if nil != backlink {
ret = append(ret, backlink)
}
@ -118,7 +118,7 @@ func GetBackmentionDoc(defID, refTreeID, keyword string, containChildren bool) (
return
}
func GetBacklinkDoc(defID, refTreeID, keyword string, containChildren bool) (ret []*Backlink) {
func GetBacklinkDoc(defID, refTreeID, keyword string, containChildren, highlight bool) (ret []*Backlink) {
var keywords []string
keyword = strings.TrimSpace(keyword)
if "" != keyword {
@ -150,7 +150,7 @@ func GetBacklinkDoc(defID, refTreeID, keyword string, containChildren bool) (ret
luteEngine := util.NewLute()
for _, linkRef := range linkRefs {
backlink := buildBacklink(linkRef.ID, refTree, keywords, luteEngine)
backlink := buildBacklink(linkRef.ID, refTree, keywords, highlight, luteEngine)
if nil != backlink {
ret = append(ret, backlink)
}
@ -191,7 +191,7 @@ func sortBacklinks(backlinks []*Backlink, tree *parse.Tree) {
})
}
func buildBacklink(refID string, refTree *parse.Tree, keywords []string, luteEngine *lute.Lute) (ret *Backlink) {
func buildBacklink(refID string, refTree *parse.Tree, keywords []string, highlight bool, luteEngine *lute.Lute) (ret *Backlink) {
n := treenode.GetNodeInTree(refTree, refID)
if nil == n {
return
@ -199,7 +199,7 @@ func buildBacklink(refID string, refTree *parse.Tree, keywords []string, luteEng
renderNodes, expand := getBacklinkRenderNodes(n)
if 0 < len(keywords) {
if highlight && 0 < len(keywords) {
for _, renderNode := range renderNodes {
var unlinks []*ast.Node

View file

@ -599,7 +599,7 @@ func StatTree(id string) (ret *util.BlockStatResult) {
}
}
func GetDoc(startID, endID, id string, index int, query string, queryTypes map[string]bool, queryMethod, mode int, size int, isBacklink bool) (blockCount int, dom, parentID, parent2ID, rootID, typ string, eof, scroll bool, boxID, docPath string, isBacklinkExpand bool, err error) {
func GetDoc(startID, endID, id string, index int, query string, queryTypes map[string]bool, queryMethod, mode int, size int, isBacklink, highlight bool) (blockCount int, dom, parentID, parent2ID, rootID, typ string, eof, scroll bool, boxID, docPath string, isBacklinkExpand bool, err error) {
//os.MkdirAll("pprof", 0755)
//cpuProfile, _ := os.Create("pprof/GetDoc")
//pprof.StartCPUProfile(cpuProfile)
@ -844,7 +844,7 @@ func GetDoc(startID, endID, id string, index int, query string, queryTypes map[s
}
}
if 0 < len(keywords) {
if highlight && 0 < len(keywords) {
hitBlock := false
for p := n.Parent; nil != p; p = p.Parent {
if p.ID == id {

View file

@ -147,7 +147,7 @@ func ClearWorkspaceHistory() (err error) {
return
}
func GetDocHistoryContent(historyPath, keyword string) (id, rootID, content string, isLargeDoc bool, err error) {
func GetDocHistoryContent(historyPath, keyword string, highlight bool) (id, rootID, content string, isLargeDoc bool, err error) {
if !gulu.File.IsExist(historyPath) {
logging.LogWarnf("doc history [%s] not exist", historyPath)
return
@ -185,7 +185,7 @@ func GetDocHistoryContent(historyPath, keyword string) (id, rootID, content stri
n.RemoveIALAttr("heading-fold")
n.RemoveIALAttr("fold")
if 0 < len(keywords) {
if highlight && 0 < len(keywords) {
if markReplaceSpan(n, &unlinks, keywords, search.MarkDataType, luteEngine) {
return ast.WalkContinue
}