Merge remote-tracking branch 'origin/dev' into dev
This commit is contained in:
commit
920dca9ad9
3 changed files with 73 additions and 174 deletions
|
@ -36,7 +36,10 @@ func searchHistory(c *gin.Context) {
|
|||
}
|
||||
|
||||
query := arg["query"].(string)
|
||||
page := int(arg["page"].(float64))
|
||||
page := 1
|
||||
if nil != arg["page"] {
|
||||
page = int(arg["page"].(float64))
|
||||
}
|
||||
histories := model.FullTextSearchHistory(query, page)
|
||||
ret.Data = map[string]interface{}{
|
||||
"histories": histories,
|
||||
|
@ -75,7 +78,16 @@ func getAssetsHistory(c *gin.Context) {
|
|||
ret := gulu.Ret.NewResult()
|
||||
defer c.JSON(http.StatusOK, ret)
|
||||
|
||||
histories, err := model.GetAssetsHistory()
|
||||
arg, ok := util.JsonArg(c, ret)
|
||||
if !ok {
|
||||
return
|
||||
}
|
||||
|
||||
page := 1
|
||||
if nil != arg["page"] {
|
||||
page = int(arg["page"].(float64))
|
||||
}
|
||||
histories, err := model.GetAssetsHistory(page)
|
||||
if nil != err {
|
||||
ret.Code = -1
|
||||
ret.Msg = err.Error()
|
||||
|
@ -114,7 +126,11 @@ func getDocHistory(c *gin.Context) {
|
|||
}
|
||||
|
||||
notebook := arg["notebook"].(string)
|
||||
histories, err := model.GetDocHistory(notebook)
|
||||
page := 1
|
||||
if nil != arg["page"] {
|
||||
page = int(arg["page"].(float64))
|
||||
}
|
||||
histories, err := model.GetDocHistory(notebook, page)
|
||||
if nil != err {
|
||||
ret.Code = -1
|
||||
ret.Msg = err.Error()
|
||||
|
|
|
@ -100,6 +100,7 @@ func WriteTree(tree *parse.Tree) (err error) {
|
|||
newP := parse.NewParagraph()
|
||||
tree.Root.AppendChild(newP)
|
||||
tree.Root.SetIALAttr("updated", util.TimeFromID(newP.ID))
|
||||
treenode.ReindexBlockTree(tree)
|
||||
}
|
||||
|
||||
renderer := render.NewJSONRenderer(tree, luteEngine.RenderOptions)
|
||||
|
|
|
@ -20,7 +20,6 @@ import (
|
|||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"io/fs"
|
||||
"os"
|
||||
"path/filepath"
|
||||
|
@ -262,90 +261,34 @@ type HistoryItem struct {
|
|||
Path string `json:"path"`
|
||||
}
|
||||
|
||||
const maxHistory = 32
|
||||
func GetDocHistory(boxID string, page int) (ret []*History, err error) {
|
||||
pageSize := 32
|
||||
from := (page - 1) * pageSize
|
||||
to := page * pageSize
|
||||
|
||||
func GetDocHistory(boxID string) (ret []*History, err error) {
|
||||
ret = []*History{}
|
||||
table := "histories_fts_case_insensitive"
|
||||
projections := "type, op, title, content, path, created"
|
||||
stmt := "SELECT " + projections + " FROM " + table + " WHERE path LIKE '%/" + boxID + "/%.sy'"
|
||||
stmt += " ORDER BY created DESC LIMIT " + strconv.Itoa(from) + ", " + strconv.Itoa(to)
|
||||
sqlHistories := sql.SelectHistoriesRawStmt(stmt)
|
||||
ret = fromSQLHistories(sqlHistories)
|
||||
return
|
||||
}
|
||||
|
||||
historyDir := util.HistoryDir
|
||||
if !gulu.File.IsDir(historyDir) {
|
||||
return
|
||||
}
|
||||
func FullTextSearchHistory(query string, page int) (ret []*History) {
|
||||
query = gulu.Str.RemoveInvisible(query)
|
||||
query = stringQuery(query)
|
||||
|
||||
historyBoxDirs, err := filepath.Glob(historyDir + "/*/" + boxID)
|
||||
if nil != err {
|
||||
logging.LogErrorf("read dir [%s] failed: %s", historyDir, err)
|
||||
return
|
||||
}
|
||||
sort.Slice(historyBoxDirs, func(i, j int) bool {
|
||||
return historyBoxDirs[i] > historyBoxDirs[j]
|
||||
})
|
||||
pageSize := 32
|
||||
from := (page - 1) * pageSize
|
||||
to := page * pageSize
|
||||
|
||||
luteEngine := NewLute()
|
||||
count := 0
|
||||
for _, historyBoxDir := range historyBoxDirs {
|
||||
var docs []*HistoryItem
|
||||
itemCount := 0
|
||||
filepath.Walk(historyBoxDir, func(path string, info fs.FileInfo, err error) error {
|
||||
if info.IsDir() {
|
||||
return nil
|
||||
}
|
||||
|
||||
if !strings.HasSuffix(info.Name(), ".sy") {
|
||||
return nil
|
||||
}
|
||||
|
||||
data, err := filelock.NoLockFileRead(path)
|
||||
if nil != err {
|
||||
logging.LogErrorf("read file [%s] failed: %s", path, err)
|
||||
return nil
|
||||
}
|
||||
historyTree, err := parse.ParseJSONWithoutFix(data, luteEngine.ParseOptions)
|
||||
if nil != err {
|
||||
logging.LogErrorf("parse tree from file [%s] failed, remove it", path)
|
||||
os.RemoveAll(path)
|
||||
return nil
|
||||
}
|
||||
historyName := historyTree.Root.IALAttr("title")
|
||||
if "" == historyName {
|
||||
historyName = info.Name()
|
||||
}
|
||||
|
||||
docs = append(docs, &HistoryItem{
|
||||
Title: historyTree.Root.IALAttr("title"),
|
||||
Path: path,
|
||||
})
|
||||
itemCount++
|
||||
if maxHistory < itemCount {
|
||||
return io.EOF
|
||||
}
|
||||
return nil
|
||||
})
|
||||
|
||||
if 1 > len(docs) {
|
||||
continue
|
||||
}
|
||||
|
||||
timeDir := filepath.Base(filepath.Dir(historyBoxDir))
|
||||
t := timeDir[:strings.LastIndex(timeDir, "-")]
|
||||
if ti, parseErr := time.Parse("2006-01-02-150405", t); nil == parseErr {
|
||||
t = ti.Format("2006-01-02 15:04:05")
|
||||
}
|
||||
|
||||
ret = append(ret, &History{
|
||||
HCreated: t,
|
||||
Items: docs,
|
||||
})
|
||||
|
||||
count++
|
||||
if maxHistory <= count {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
sort.Slice(ret, func(i, j int) bool {
|
||||
return ret[i].HCreated > ret[j].HCreated
|
||||
})
|
||||
table := "histories_fts_case_insensitive"
|
||||
projections := "type, op, title, content, path, created"
|
||||
stmt := "SELECT " + projections + " FROM " + table + " WHERE " + table + " MATCH '{title content}:(" + query + ")'"
|
||||
stmt += " ORDER BY created DESC LIMIT " + strconv.Itoa(from) + ", " + strconv.Itoa(to)
|
||||
sqlHistories := sql.SelectHistoriesRawStmt(stmt)
|
||||
ret = fromSQLHistories(sqlHistories)
|
||||
return
|
||||
}
|
||||
|
||||
|
@ -368,7 +311,6 @@ func GetNotebookHistory() (ret []*History, err error) {
|
|||
return iTimeDir > jTimeDir
|
||||
})
|
||||
|
||||
historyCount := 0
|
||||
for _, historyNotebookConf := range historyNotebookConfs {
|
||||
timeDir := filepath.Base(filepath.Dir(filepath.Dir(filepath.Dir(historyNotebookConf))))
|
||||
t := timeDir[:strings.LastIndex(timeDir, "-")]
|
||||
|
@ -389,18 +331,11 @@ func GetNotebookHistory() (ret []*History, err error) {
|
|||
|
||||
ret = append(ret, &History{
|
||||
HCreated: t,
|
||||
Items: []*HistoryItem{
|
||||
{
|
||||
Title: c.Name,
|
||||
Path: filepath.Dir(filepath.Dir(historyNotebookConf)),
|
||||
},
|
||||
},
|
||||
Items: []*HistoryItem{{
|
||||
Title: c.Name,
|
||||
Path: filepath.Dir(filepath.Dir(historyNotebookConf)),
|
||||
}},
|
||||
})
|
||||
|
||||
historyCount++
|
||||
if maxHistory <= historyCount {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
sort.Slice(ret, func(i, j int) bool {
|
||||
|
@ -409,73 +344,17 @@ func GetNotebookHistory() (ret []*History, err error) {
|
|||
return
|
||||
}
|
||||
|
||||
func GetAssetsHistory() (ret []*History, err error) {
|
||||
ret = []*History{}
|
||||
func GetAssetsHistory(page int) (ret []*History, err error) {
|
||||
pageSize := 32
|
||||
from := (page - 1) * pageSize
|
||||
to := page * pageSize
|
||||
|
||||
historyDir := util.HistoryDir
|
||||
if !gulu.File.IsDir(historyDir) {
|
||||
return
|
||||
}
|
||||
|
||||
historyAssetsDirs, err := filepath.Glob(historyDir + "/*/assets")
|
||||
if nil != err {
|
||||
logging.LogErrorf("read dir [%s] failed: %s", historyDir, err)
|
||||
return
|
||||
}
|
||||
sort.Slice(historyAssetsDirs, func(i, j int) bool {
|
||||
return historyAssetsDirs[i] > historyAssetsDirs[j]
|
||||
})
|
||||
|
||||
historyCount := 0
|
||||
for _, historyAssetsDir := range historyAssetsDirs {
|
||||
var assets []*HistoryItem
|
||||
itemCount := 0
|
||||
filepath.Walk(historyAssetsDir, func(path string, info fs.FileInfo, err error) error {
|
||||
if isSkipFile(info.Name()) {
|
||||
if info.IsDir() {
|
||||
return filepath.SkipDir
|
||||
}
|
||||
return nil
|
||||
}
|
||||
if info.IsDir() {
|
||||
return nil
|
||||
}
|
||||
|
||||
assets = append(assets, &HistoryItem{
|
||||
Title: info.Name(),
|
||||
Path: filepath.ToSlash(strings.TrimPrefix(path, util.WorkspaceDir)),
|
||||
})
|
||||
itemCount++
|
||||
if maxHistory < itemCount {
|
||||
return io.EOF
|
||||
}
|
||||
return nil
|
||||
})
|
||||
|
||||
if 1 > len(assets) {
|
||||
continue
|
||||
}
|
||||
|
||||
timeDir := filepath.Base(filepath.Dir(historyAssetsDir))
|
||||
t := timeDir[:strings.LastIndex(timeDir, "-")]
|
||||
if ti, parseErr := time.Parse("2006-01-02-150405", t); nil == parseErr {
|
||||
t = ti.Format("2006-01-02 15:04:05")
|
||||
}
|
||||
|
||||
ret = append(ret, &History{
|
||||
HCreated: t,
|
||||
Items: assets,
|
||||
})
|
||||
|
||||
historyCount++
|
||||
if maxHistory <= historyCount {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
sort.Slice(ret, func(i, j int) bool {
|
||||
return ret[i].HCreated > ret[j].HCreated
|
||||
})
|
||||
table := "histories_fts_case_insensitive"
|
||||
projections := "type, op, title, content, path, created"
|
||||
stmt := "SELECT " + projections + " FROM " + table + " WHERE path LIKE '%/assets/%'"
|
||||
stmt += " ORDER BY created DESC LIMIT " + strconv.Itoa(from) + ", " + strconv.Itoa(to)
|
||||
sqlHistories := sql.SelectHistoriesRawStmt(stmt)
|
||||
ret = fromSQLHistories(sqlHistories)
|
||||
return
|
||||
}
|
||||
|
||||
|
@ -600,6 +479,7 @@ const (
|
|||
HistoryOpUpdate = "update"
|
||||
HistoryOpDelete = "delete"
|
||||
HistoryOpFormat = "format"
|
||||
HistoryOpSync = "sync"
|
||||
)
|
||||
|
||||
func GetHistoryDir(suffix string) (ret string, err error) {
|
||||
|
@ -633,7 +513,7 @@ func ReindexHistory() (err error) {
|
|||
return
|
||||
}
|
||||
|
||||
var validOps = []string{HistoryOpClean, HistoryOpUpdate, HistoryOpDelete, HistoryOpFormat}
|
||||
var validOps = []string{HistoryOpClean, HistoryOpUpdate, HistoryOpDelete, HistoryOpFormat, HistoryOpSync}
|
||||
|
||||
func indexHistoryDir(name string, luteEngine *lute.Lute) (err error) {
|
||||
op := name[strings.LastIndex(name, "-")+1:]
|
||||
|
@ -642,7 +522,7 @@ func indexHistoryDir(name string, luteEngine *lute.Lute) (err error) {
|
|||
return
|
||||
}
|
||||
t := name[:strings.LastIndex(name, "-")]
|
||||
tt, parseErr := time.Parse("2006-01-02-150405", t)
|
||||
tt, parseErr := time.ParseInLocation("2006-01-02-150405", t, time.Local)
|
||||
if nil != parseErr {
|
||||
logging.LogWarnf("parse history dir time [%s] failed: %s", t, parseErr)
|
||||
return
|
||||
|
@ -654,7 +534,7 @@ func indexHistoryDir(name string, luteEngine *lute.Lute) (err error) {
|
|||
filepath.Walk(entryPath, func(path string, info os.FileInfo, err error) error {
|
||||
if strings.HasSuffix(info.Name(), ".sy") {
|
||||
docs = append(docs, path)
|
||||
} else if strings.Contains(path, "assets/") {
|
||||
} else if strings.Contains(path, "assets"+string(os.PathSeparator)) {
|
||||
assets = append(assets, path)
|
||||
}
|
||||
return nil
|
||||
|
@ -714,15 +594,7 @@ func indexHistoryDir(name string, luteEngine *lute.Lute) (err error) {
|
|||
return
|
||||
}
|
||||
|
||||
func FullTextSearchHistory(query string, page int) (ret []*History) {
|
||||
query = gulu.Str.RemoveInvisible(query)
|
||||
query = stringQuery(query)
|
||||
|
||||
table := "histories_fts_case_insensitive"
|
||||
projections := "type, op, title, content, path, created"
|
||||
stmt := "SELECT " + projections + " FROM " + table + " WHERE " + table + " MATCH '{title content}:(" + query + ")'"
|
||||
stmt += " ORDER BY created DESC LIMIT " + strconv.Itoa(page)
|
||||
sqlHistories := sql.SelectHistoriesRawStmt(stmt)
|
||||
func fromSQLHistories(sqlHistories []*sql.History) (ret []*History) {
|
||||
if 1 > len(sqlHistories) {
|
||||
ret = []*History{}
|
||||
return
|
||||
|
@ -746,7 +618,17 @@ func FullTextSearchHistory(query string, page int) (ret []*History) {
|
|||
Items: items,
|
||||
})
|
||||
items = []*HistoryItem{}
|
||||
items = append(items, &HistoryItem{
|
||||
Title: sqlHistory.Title,
|
||||
Path: filepath.Join(util.HistoryDir, sqlHistory.Path),
|
||||
})
|
||||
}
|
||||
}
|
||||
if 1 < len(items) {
|
||||
ret = append(ret, &History{
|
||||
HCreated: time.Unix(tmpTime, 0).Format("2006-01-02 15:04:05"),
|
||||
Items: items,
|
||||
})
|
||||
}
|
||||
return
|
||||
}
|
||||
|
|
Loading…
Add table
Reference in a new issue