[server] Limit copying max 100 files in single req
This commit is contained in:
parent
cbdd116cea
commit
d0621eea3e
2 changed files with 8 additions and 0 deletions
|
@ -1,6 +1,7 @@
|
|||
package api
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/ente-io/museum/pkg/controller/file_copy"
|
||||
"net/http"
|
||||
"os"
|
||||
|
@ -27,6 +28,7 @@ type FileHandler struct {
|
|||
|
||||
// DefaultMaxBatchSize is the default maximum API batch size unless specified otherwise
|
||||
const DefaultMaxBatchSize = 1000
|
||||
const DefaultCopyBatchSize = 100
|
||||
|
||||
// CreateOrUpdate creates an entry for a file
|
||||
func (h *FileHandler) CreateOrUpdate(c *gin.Context) {
|
||||
|
@ -67,6 +69,10 @@ func (h *FileHandler) CopyFiles(c *gin.Context) {
|
|||
handler.Error(c, stacktrace.Propagate(err, ""))
|
||||
return
|
||||
}
|
||||
if len(req.CollectionFileItems) > DefaultCopyBatchSize {
|
||||
handler.Error(c, stacktrace.Propagate(ente.NewBadRequestWithMessage(fmt.Sprintf("more than %d items", DefaultCopyBatchSize)), ""))
|
||||
return
|
||||
}
|
||||
response, err := h.FileCopyCtrl.CopyFiles(c, req)
|
||||
if err != nil {
|
||||
handler.Error(c, stacktrace.Propagate(err, ""))
|
||||
|
|
|
@ -17,6 +17,8 @@ import (
|
|||
"time"
|
||||
)
|
||||
|
||||
const ()
|
||||
|
||||
type FileCopyController struct {
|
||||
S3Config *s3config.S3Config
|
||||
FileController *controller.FileController
|
||||
|
|
Loading…
Add table
Reference in a new issue