Browse Source

[server] Limit copying max 100 files in single req

Neeraj Gupta 1 year ago
parent
commit
d0621eea3e
2 changed files with 8 additions and 0 deletions
  1. 6 0
      server/pkg/api/file.go
  2. 2 0
      server/pkg/controller/file_copy/file_copy.go

+ 6 - 0
server/pkg/api/file.go

@@ -1,6 +1,7 @@
 package api
 
 import (
+	"fmt"
 	"github.com/ente-io/museum/pkg/controller/file_copy"
 	"net/http"
 	"os"
@@ -27,6 +28,7 @@ type FileHandler struct {
 
 // DefaultMaxBatchSize is the default maximum API batch size unless specified otherwise
 const DefaultMaxBatchSize = 1000
+const DefaultCopyBatchSize = 100
 
 // CreateOrUpdate creates an entry for a file
 func (h *FileHandler) CreateOrUpdate(c *gin.Context) {
@@ -67,6 +69,10 @@ func (h *FileHandler) CopyFiles(c *gin.Context) {
 		handler.Error(c, stacktrace.Propagate(err, ""))
 		return
 	}
+	if len(req.CollectionFileItems) > DefaultCopyBatchSize {
+		handler.Error(c, stacktrace.Propagate(ente.NewBadRequestWithMessage(fmt.Sprintf("more than %d items", DefaultCopyBatchSize)), ""))
+		return
+	}
 	response, err := h.FileCopyCtrl.CopyFiles(c, req)
 	if err != nil {
 		handler.Error(c, stacktrace.Propagate(err, ""))

+ 2 - 0
server/pkg/controller/file_copy/file_copy.go

@@ -17,6 +17,8 @@ import (
 	"time"
 )
 
+const ()
+
 type FileCopyController struct {
 	S3Config       *s3config.S3Config
 	FileController *controller.FileController