parent
c0f47a58f2
commit
a6355e298e
19 changed files with 706 additions and 265 deletions
|
@ -28,7 +28,7 @@ It can serve local filesystem, S3 (compatible) Object Storage, Google Cloud Stor
|
|||
- Per user and per directory permission management: list directory contents, upload, overwrite, download, delete, rename, create directories, create symlinks, change owner/group and mode, change access and modification times.
|
||||
- Per user files/folders ownership mapping: you can map all the users to the system account that runs SFTPGo (all platforms are supported) or you can run SFTPGo as root user and map each user or group of users to a different system account (\*NIX only).
|
||||
- Per user IP filters are supported: login can be restricted to specific ranges of IP addresses or to a specific IP address.
|
||||
- Per user and per directory file extensions filters are supported: files can be allowed or denied based on their extensions.
|
||||
- Per user and per directory shell like patterns filters are supported: files can be allowed or denied based on shell like patterns.
|
||||
- Virtual folders are supported: directories outside the user home directory can be exposed as virtual folders.
|
||||
- Configurable custom commands and/or HTTP notifications on file upload, download, pre-delete, delete, rename, on SSH commands and on user add, update and delete.
|
||||
- Automatically terminating idle connections.
|
||||
|
|
|
@ -32,8 +32,8 @@ var (
|
|||
portablePublicKeys []string
|
||||
portablePermissions []string
|
||||
portableSSHCommands []string
|
||||
portableAllowedExtensions []string
|
||||
portableDeniedExtensions []string
|
||||
portableAllowedPatterns []string
|
||||
portableDeniedPatterns []string
|
||||
portableFsProvider int
|
||||
portableS3Bucket string
|
||||
portableS3Region string
|
||||
|
@ -174,7 +174,7 @@ Please take a look at the usage below to customize the serving parameters`,
|
|||
},
|
||||
},
|
||||
Filters: dataprovider.UserFilters{
|
||||
FileExtensions: parseFileExtensionsFilters(),
|
||||
FilePatterns: parsePatternsFilesFilters(),
|
||||
},
|
||||
},
|
||||
}
|
||||
|
@ -217,16 +217,16 @@ value`)
|
|||
portableCmd.Flags().StringSliceVarP(&portablePermissions, "permissions", "g", []string{"list", "download"},
|
||||
`User's permissions. "*" means any
|
||||
permission`)
|
||||
portableCmd.Flags().StringArrayVar(&portableAllowedExtensions, "allowed-extensions", []string{},
|
||||
`Allowed file extensions case
|
||||
insensitive. The format is
|
||||
/dir::ext1,ext2.
|
||||
For example: "/somedir::.jpg,.png"`)
|
||||
portableCmd.Flags().StringArrayVar(&portableDeniedExtensions, "denied-extensions", []string{},
|
||||
`Denied file extensions case
|
||||
insensitive. The format is
|
||||
/dir::ext1,ext2.
|
||||
For example: "/somedir::.jpg,.png"`)
|
||||
portableCmd.Flags().StringArrayVar(&portableAllowedPatterns, "allowed-patterns", []string{},
|
||||
`Allowed file patterns case insensitive.
|
||||
The format is:
|
||||
/dir::pattern1,pattern2.
|
||||
For example: "/somedir::*.jpg,a*b?.png"`)
|
||||
portableCmd.Flags().StringArrayVar(&portableDeniedPatterns, "denied-patterns", []string{},
|
||||
`Denied file patterns case insensitive.
|
||||
The format is:
|
||||
/dir::pattern1,pattern2.
|
||||
For example: "/somedir::*.jpg,a*b?.png"`)
|
||||
portableCmd.Flags().BoolVarP(&portableAdvertiseService, "advertise-service", "S", false,
|
||||
`Advertise SFTP/FTP service using
|
||||
multicast DNS`)
|
||||
|
@ -287,42 +287,42 @@ parallel`)
|
|||
rootCmd.AddCommand(portableCmd)
|
||||
}
|
||||
|
||||
func parseFileExtensionsFilters() []dataprovider.ExtensionsFilter {
|
||||
var extensions []dataprovider.ExtensionsFilter
|
||||
for _, val := range portableAllowedExtensions {
|
||||
p, exts := getExtensionsFilterValues(strings.TrimSpace(val))
|
||||
func parsePatternsFilesFilters() []dataprovider.PatternsFilter {
|
||||
var patterns []dataprovider.PatternsFilter
|
||||
for _, val := range portableAllowedPatterns {
|
||||
p, exts := getPatternsFilterValues(strings.TrimSpace(val))
|
||||
if len(p) > 0 {
|
||||
extensions = append(extensions, dataprovider.ExtensionsFilter{
|
||||
Path: path.Clean(p),
|
||||
AllowedExtensions: exts,
|
||||
DeniedExtensions: []string{},
|
||||
patterns = append(patterns, dataprovider.PatternsFilter{
|
||||
Path: path.Clean(p),
|
||||
AllowedPatterns: exts,
|
||||
DeniedPatterns: []string{},
|
||||
})
|
||||
}
|
||||
}
|
||||
for _, val := range portableDeniedExtensions {
|
||||
p, exts := getExtensionsFilterValues(strings.TrimSpace(val))
|
||||
for _, val := range portableDeniedPatterns {
|
||||
p, exts := getPatternsFilterValues(strings.TrimSpace(val))
|
||||
if len(p) > 0 {
|
||||
found := false
|
||||
for index, e := range extensions {
|
||||
for index, e := range patterns {
|
||||
if path.Clean(e.Path) == path.Clean(p) {
|
||||
extensions[index].DeniedExtensions = append(extensions[index].DeniedExtensions, exts...)
|
||||
patterns[index].DeniedPatterns = append(patterns[index].DeniedPatterns, exts...)
|
||||
found = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !found {
|
||||
extensions = append(extensions, dataprovider.ExtensionsFilter{
|
||||
Path: path.Clean(p),
|
||||
AllowedExtensions: []string{},
|
||||
DeniedExtensions: exts,
|
||||
patterns = append(patterns, dataprovider.PatternsFilter{
|
||||
Path: path.Clean(p),
|
||||
AllowedPatterns: []string{},
|
||||
DeniedPatterns: exts,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
return extensions
|
||||
return patterns
|
||||
}
|
||||
|
||||
func getExtensionsFilterValues(value string) (string, []string) {
|
||||
func getPatternsFilterValues(value string) (string, []string) {
|
||||
if strings.Contains(value, "::") {
|
||||
dirExts := strings.Split(value, "::")
|
||||
if len(dirExts) > 1 {
|
||||
|
@ -334,7 +334,7 @@ func getExtensionsFilterValues(value string) (string, []string) {
|
|||
exts = append(exts, cleanedExt)
|
||||
}
|
||||
}
|
||||
if len(dir) > 0 && len(exts) > 0 {
|
||||
if dir != "" && len(exts) > 0 {
|
||||
return dir, exts
|
||||
}
|
||||
}
|
||||
|
|
|
@ -903,6 +903,50 @@ func validatePublicKeys(user *User) error {
|
|||
return nil
|
||||
}
|
||||
|
||||
func validateFiltersPatternExtensions(user *User) error {
|
||||
if len(user.Filters.FilePatterns) == 0 {
|
||||
user.Filters.FilePatterns = []PatternsFilter{}
|
||||
return nil
|
||||
}
|
||||
filteredPaths := []string{}
|
||||
var filters []PatternsFilter
|
||||
for _, f := range user.Filters.FilePatterns {
|
||||
cleanedPath := filepath.ToSlash(path.Clean(f.Path))
|
||||
if !path.IsAbs(cleanedPath) {
|
||||
return &ValidationError{err: fmt.Sprintf("invalid path %#v for file patterns filter", f.Path)}
|
||||
}
|
||||
if utils.IsStringInSlice(cleanedPath, filteredPaths) {
|
||||
return &ValidationError{err: fmt.Sprintf("duplicate file patterns filter for path %#v", f.Path)}
|
||||
}
|
||||
if len(f.AllowedPatterns) == 0 && len(f.DeniedPatterns) == 0 {
|
||||
return &ValidationError{err: fmt.Sprintf("empty file patterns filter for path %#v", f.Path)}
|
||||
}
|
||||
f.Path = cleanedPath
|
||||
allowed := make([]string, 0, len(f.AllowedPatterns))
|
||||
denied := make([]string, 0, len(f.DeniedPatterns))
|
||||
for _, pattern := range f.AllowedPatterns {
|
||||
_, err := path.Match(pattern, "abc")
|
||||
if err != nil {
|
||||
return &ValidationError{err: fmt.Sprintf("invalid file pattern filter %v", pattern)}
|
||||
}
|
||||
allowed = append(allowed, strings.ToLower(pattern))
|
||||
}
|
||||
for _, pattern := range f.DeniedPatterns {
|
||||
_, err := path.Match(pattern, "abc")
|
||||
if err != nil {
|
||||
return &ValidationError{err: fmt.Sprintf("invalid file pattern filter %v", pattern)}
|
||||
}
|
||||
denied = append(denied, strings.ToLower(pattern))
|
||||
}
|
||||
f.AllowedPatterns = allowed
|
||||
f.DeniedPatterns = denied
|
||||
filters = append(filters, f)
|
||||
filteredPaths = append(filteredPaths, cleanedPath)
|
||||
}
|
||||
user.Filters.FilePatterns = filters
|
||||
return nil
|
||||
}
|
||||
|
||||
func validateFiltersFileExtensions(user *User) error {
|
||||
if len(user.Filters.FileExtensions) == 0 {
|
||||
user.Filters.FileExtensions = []ExtensionsFilter{}
|
||||
|
@ -922,6 +966,16 @@ func validateFiltersFileExtensions(user *User) error {
|
|||
return &ValidationError{err: fmt.Sprintf("empty file extensions filter for path %#v", f.Path)}
|
||||
}
|
||||
f.Path = cleanedPath
|
||||
allowed := make([]string, 0, len(f.AllowedExtensions))
|
||||
denied := make([]string, 0, len(f.DeniedExtensions))
|
||||
for _, ext := range f.AllowedExtensions {
|
||||
allowed = append(allowed, strings.ToLower(ext))
|
||||
}
|
||||
for _, ext := range f.DeniedExtensions {
|
||||
denied = append(denied, strings.ToLower(ext))
|
||||
}
|
||||
f.AllowedExtensions = allowed
|
||||
f.DeniedExtensions = denied
|
||||
filters = append(filters, f)
|
||||
filteredPaths = append(filteredPaths, cleanedPath)
|
||||
}
|
||||
|
@ -929,6 +983,13 @@ func validateFiltersFileExtensions(user *User) error {
|
|||
return nil
|
||||
}
|
||||
|
||||
func validateFileFilters(user *User) error {
|
||||
if err := validateFiltersFileExtensions(user); err != nil {
|
||||
return err
|
||||
}
|
||||
return validateFiltersPatternExtensions(user)
|
||||
}
|
||||
|
||||
func validateFilters(user *User) error {
|
||||
if len(user.Filters.AllowedIP) == 0 {
|
||||
user.Filters.AllowedIP = []string{}
|
||||
|
@ -970,7 +1031,7 @@ func validateFilters(user *User) error {
|
|||
return &ValidationError{err: fmt.Sprintf("invalid protocol: %#v", p)}
|
||||
}
|
||||
}
|
||||
return validateFiltersFileExtensions(user)
|
||||
return validateFileFilters(user)
|
||||
}
|
||||
|
||||
func saveGCSCredentials(user *User) error {
|
||||
|
|
|
@ -81,25 +81,46 @@ func (c *CachedUser) IsExpired() bool {
|
|||
// ExtensionsFilter defines filters based on file extensions.
|
||||
// These restrictions do not apply to files listing for performance reasons, so
|
||||
// a denied file cannot be downloaded/overwritten/renamed but will still be
|
||||
// it will still be listed in the list of files.
|
||||
// in the list of files.
|
||||
// System commands such as Git and rsync interacts with the filesystem directly
|
||||
// and they are not aware about these restrictions so they are not allowed
|
||||
// inside paths with extensions filters
|
||||
type ExtensionsFilter struct {
|
||||
// SFTP/SCP path, if no other specific filter is defined, the filter apply for
|
||||
// Virtual path, if no other specific filter is defined, the filter apply for
|
||||
// sub directories too.
|
||||
// For example if filters are defined for the paths "/" and "/sub" then the
|
||||
// filters for "/" are applied for any file outside the "/sub" directory
|
||||
Path string `json:"path"`
|
||||
// only files with these, case insensitive, extensions are allowed.
|
||||
// Shell like expansion is not supported so you have to specify ".jpg" and
|
||||
// not "*.jpg"
|
||||
// not "*.jpg". If you want shell like patterns use pattern filters
|
||||
AllowedExtensions []string `json:"allowed_extensions,omitempty"`
|
||||
// files with these, case insensitive, extensions are not allowed.
|
||||
// Denied file extensions are evaluated before the allowed ones
|
||||
DeniedExtensions []string `json:"denied_extensions,omitempty"`
|
||||
}
|
||||
|
||||
// PatternsFilter defines filters based on shell like patterns.
|
||||
// These restrictions do not apply to files listing for performance reasons, so
|
||||
// a denied file cannot be downloaded/overwritten/renamed but will still be
|
||||
// in the list of files.
|
||||
// System commands such as Git and rsync interacts with the filesystem directly
|
||||
// and they are not aware about these restrictions so they are not allowed
|
||||
// inside paths with extensions filters
|
||||
type PatternsFilter struct {
|
||||
// Virtual path, if no other specific filter is defined, the filter apply for
|
||||
// sub directories too.
|
||||
// For example if filters are defined for the paths "/" and "/sub" then the
|
||||
// filters for "/" are applied for any file outside the "/sub" directory
|
||||
Path string `json:"path"`
|
||||
// files with these, case insensitive, patterns are allowed.
|
||||
// Denied file patterns are evaluated before the allowed ones
|
||||
AllowedPatterns []string `json:"allowed_patterns,omitempty"`
|
||||
// files with these, case insensitive, patterns are not allowed.
|
||||
// Denied file patterns are evaluated before the allowed ones
|
||||
DeniedPatterns []string `json:"denied_patterns,omitempty"`
|
||||
}
|
||||
|
||||
// UserFilters defines additional restrictions for a user
|
||||
type UserFilters struct {
|
||||
// only clients connecting from these IP/Mask are allowed.
|
||||
|
@ -118,6 +139,8 @@ type UserFilters struct {
|
|||
// filters based on file extensions.
|
||||
// Please note that these restrictions can be easily bypassed.
|
||||
FileExtensions []ExtensionsFilter `json:"file_extensions,omitempty"`
|
||||
// filter based on shell patterns
|
||||
FilePatterns []PatternsFilter `json:"file_patterns,omitempty"`
|
||||
// max size allowed for a single upload, 0 means unlimited
|
||||
MaxUploadFileSize int64 `json:"max_upload_file_size,omitempty"`
|
||||
}
|
||||
|
@ -444,11 +467,15 @@ func (u *User) GetAllowedLoginMethods() []string {
|
|||
}
|
||||
|
||||
// IsFileAllowed returns true if the specified file is allowed by the file restrictions filters
|
||||
func (u *User) IsFileAllowed(sftpPath string) bool {
|
||||
func (u *User) IsFileAllowed(virtualPath string) bool {
|
||||
return u.isFilePatternAllowed(virtualPath) && u.isFileExtensionAllowed(virtualPath)
|
||||
}
|
||||
|
||||
func (u *User) isFileExtensionAllowed(virtualPath string) bool {
|
||||
if len(u.Filters.FileExtensions) == 0 {
|
||||
return true
|
||||
}
|
||||
dirsForPath := utils.GetDirsForSFTPPath(path.Dir(sftpPath))
|
||||
dirsForPath := utils.GetDirsForSFTPPath(path.Dir(virtualPath))
|
||||
var filter ExtensionsFilter
|
||||
for _, dir := range dirsForPath {
|
||||
for _, f := range u.Filters.FileExtensions {
|
||||
|
@ -457,12 +484,12 @@ func (u *User) IsFileAllowed(sftpPath string) bool {
|
|||
break
|
||||
}
|
||||
}
|
||||
if len(filter.Path) > 0 {
|
||||
if filter.Path != "" {
|
||||
break
|
||||
}
|
||||
}
|
||||
if len(filter.Path) > 0 {
|
||||
toMatch := strings.ToLower(sftpPath)
|
||||
if filter.Path != "" {
|
||||
toMatch := strings.ToLower(virtualPath)
|
||||
for _, denied := range filter.DeniedExtensions {
|
||||
if strings.HasSuffix(toMatch, denied) {
|
||||
return false
|
||||
|
@ -478,6 +505,42 @@ func (u *User) IsFileAllowed(sftpPath string) bool {
|
|||
return true
|
||||
}
|
||||
|
||||
func (u *User) isFilePatternAllowed(virtualPath string) bool {
|
||||
if len(u.Filters.FilePatterns) == 0 {
|
||||
return true
|
||||
}
|
||||
dirsForPath := utils.GetDirsForSFTPPath(path.Dir(virtualPath))
|
||||
var filter PatternsFilter
|
||||
for _, dir := range dirsForPath {
|
||||
for _, f := range u.Filters.FilePatterns {
|
||||
if f.Path == dir {
|
||||
filter = f
|
||||
break
|
||||
}
|
||||
}
|
||||
if filter.Path != "" {
|
||||
break
|
||||
}
|
||||
}
|
||||
if filter.Path != "" {
|
||||
toMatch := strings.ToLower(path.Base(virtualPath))
|
||||
for _, denied := range filter.DeniedPatterns {
|
||||
matched, err := path.Match(denied, toMatch)
|
||||
if err != nil || matched {
|
||||
return false
|
||||
}
|
||||
}
|
||||
for _, allowed := range filter.AllowedPatterns {
|
||||
matched, err := path.Match(allowed, toMatch)
|
||||
if err == nil && matched {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return len(filter.AllowedPatterns) == 0
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// IsLoginFromAddrAllowed returns true if the login is allowed from the specified remoteAddr.
|
||||
// If AllowedIP is defined only the specified IP/Mask can login.
|
||||
// If DeniedIP is defined the specified IP/Mask cannot login.
|
||||
|
@ -711,6 +774,8 @@ func (u *User) getACopy() User {
|
|||
copy(filters.DeniedLoginMethods, u.Filters.DeniedLoginMethods)
|
||||
filters.FileExtensions = make([]ExtensionsFilter, len(u.Filters.FileExtensions))
|
||||
copy(filters.FileExtensions, u.Filters.FileExtensions)
|
||||
filters.FilePatterns = make([]PatternsFilter, len(u.Filters.FilePatterns))
|
||||
copy(filters.FilePatterns, u.Filters.FilePatterns)
|
||||
filters.DeniedProtocols = make([]string, len(u.Filters.DeniedProtocols))
|
||||
copy(filters.DeniedProtocols, u.Filters.DeniedProtocols)
|
||||
fsConfig := Filesystem{
|
||||
|
|
|
@ -41,10 +41,14 @@ For each account, the following properties can be configured:
|
|||
- `SSH`
|
||||
- `FTP`
|
||||
- `DAV`
|
||||
- `file_extensions`, list of struct. These restrictions do not apply to files listing for performance reasons, so a denied file cannot be downloaded/overwritten/renamed but it will still be listed in the list of files. Please note that these restrictions can be easily bypassed. Each struct contains the following fields:
|
||||
- `allowed_extensions`, list of, case insensitive, allowed files extension. Shell like expansion is not supported so you have to specify `.jpg` and not `*.jpg`. Any file that does not end with this suffix will be denied
|
||||
- `denied_extensions`, list of, case insensitive, denied files extension. Denied file extensions are evaluated before the allowed ones
|
||||
- `path`, SFTP/SCP path, if no other specific filter is defined, the filter apply for sub directories too. For example if filters are defined for the paths `/` and `/sub` then the filters for `/` are applied for any file outside the `/sub` directory
|
||||
- `file_extensions`, list of struct. Deprecated, please use `file_patterns`. These restrictions do not apply to files listing for performance reasons, so a denied file cannot be downloaded/overwritten/renamed but it will still be in the list of files. Please note that these restrictions can be easily bypassed. Each struct contains the following fields:
|
||||
- `allowed_extensions`, list of, case insensitive, allowed file extensions. Shell like expansion is not supported so you have to specify `.jpg` and not `*.jpg`. Any file that does not end with this suffix will be denied
|
||||
- `denied_extensions`, list of, case insensitive, denied file extensions. Denied file extensions are evaluated before the allowed ones
|
||||
- `path`, exposed virtual path, if no other specific filter is defined, the filter apply for sub directories too. For example if filters are defined for the paths `/` and `/sub` then the filters for `/` are applied for any file outside the `/sub` directory
|
||||
- `file_patterns`, list of struct. These restrictions do not apply to files listing for performance reasons, so a denied file cannot be downloaded/overwritten/renamed but it will still be in the list of files. Please note that these restrictions can be easily bypassed. For syntax details take a look [here](https://golang.org/pkg/path/#Match). Each struct contains the following fields:
|
||||
- `allowed_patterns`, list of, case insensitive, allowed file patterns. Examples: `*.jpg`, `a*b?.png`. Any non matching file will be denied
|
||||
- `denied_patterns`, list of, case insensitive, denied file patterns. Denied file patterns are evaluated before the allowed ones
|
||||
- `path`, exposed virtual path, if no other specific filter is defined, the filter apply for sub directories too. For example if filters are defined for the paths `/` and `/sub` then the filters for `/` are applied for any file outside the `/sub` directory
|
||||
- `fs_provider`, filesystem to serve via SFTP. Local filesystem (0), S3 Compatible Object Storage (1), Google Cloud Storage (2) and Azure Blob Storage (3) are supported
|
||||
- `s3_bucket`, required for S3 filesystem
|
||||
- `s3_region`, required for S3 filesystem. Must match the region for your bucket. You can find here the list of available [AWS regions](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/using-regions-availability-zones.html#concepts-available-regions). For example if your bucket is at `Frankfurt` you have to set the region to `eu-central-1`
|
||||
|
|
|
@ -15,90 +15,92 @@ Usage:
|
|||
sftpgo portable [flags]
|
||||
|
||||
Flags:
|
||||
-C, --advertise-credentials If the SFTP/FTP service is
|
||||
advertised via multicast DNS, this
|
||||
flag allows to put username/password
|
||||
inside the advertised TXT record
|
||||
-S, --advertise-service Advertise SFTP/FTP service using
|
||||
multicast DNS
|
||||
--allowed-extensions stringArray Allowed file extensions case
|
||||
insensitive. The format is
|
||||
/dir::ext1,ext2.
|
||||
For example: "/somedir::.jpg,.png"
|
||||
-C, --advertise-credentials If the SFTP/FTP service is
|
||||
advertised via multicast DNS, this
|
||||
flag allows to put username/password
|
||||
inside the advertised TXT record
|
||||
-S, --advertise-service Advertise SFTP/FTP service using
|
||||
multicast DNS
|
||||
--allowed-patterns stringArray Allowed file patterns case insensitive.
|
||||
The format is:
|
||||
/dir::pattern1,pattern2.
|
||||
For example: "/somedir::*.jpg,a*b?.png"
|
||||
--az-access-tier string Leave empty to use the default
|
||||
container setting
|
||||
--az-account-key string
|
||||
--az-account-name string
|
||||
--az-container string
|
||||
--az-endpoint string Leave empty to use the default:
|
||||
"blob.core.windows.net"
|
||||
--az-key-prefix string Allows to restrict access to the
|
||||
virtual folder identified by this
|
||||
prefix and its contents
|
||||
--az-sas-url string Shared access signature URL
|
||||
--az-upload-concurrency int How many parts are uploaded in
|
||||
parallel (default 2)
|
||||
--az-upload-part-size int The buffer size for multipart uploads
|
||||
(MB) (default 4)
|
||||
--az-endpoint string Leave empty to use the default:
|
||||
"blob.core.windows.net"
|
||||
--az-key-prefix string Allows to restrict access to the
|
||||
virtual folder identified by this
|
||||
prefix and its contents
|
||||
--az-sas-url string Shared access signature URL
|
||||
--az-upload-concurrency int How many parts are uploaded in
|
||||
parallel (default 2)
|
||||
--az-upload-part-size int The buffer size for multipart uploads
|
||||
(MB) (default 4)
|
||||
--az-use-emulator
|
||||
--denied-extensions stringArray Denied file extensions case
|
||||
insensitive. The format is
|
||||
/dir::ext1,ext2.
|
||||
For example: "/somedir::.jpg,.png"
|
||||
-d, --directory string Path to the directory to serve.
|
||||
This can be an absolute path or a path
|
||||
relative to the current directory
|
||||
(default ".")
|
||||
-f, --fs-provider int 0 => local filesystem
|
||||
1 => AWS S3 compatible
|
||||
2 => Google Cloud Storage
|
||||
3 => Azure Blob Storage
|
||||
--ftpd-cert string Path to the certificate file for FTPS
|
||||
--ftpd-key string Path to the key file for FTPS
|
||||
--ftpd-port int 0 means a random unprivileged port,
|
||||
< 0 disabled (default -1)
|
||||
--gcs-automatic-credentials int 0 means explicit credentials using
|
||||
a JSON credentials file, 1 automatic
|
||||
(default 1)
|
||||
--denied-patterns stringArray Denied file patterns case insensitive.
|
||||
The format is:
|
||||
/dir::pattern1,pattern2.
|
||||
For example: "/somedir::*.jpg,a*b?.png"
|
||||
-d, --directory string Path to the directory to serve.
|
||||
This can be an absolute path or a path
|
||||
relative to the current directory
|
||||
(default ".")
|
||||
-f, --fs-provider int 0 => local filesystem
|
||||
1 => AWS S3 compatible
|
||||
2 => Google Cloud Storage
|
||||
3 => Azure Blob Storage
|
||||
--ftpd-cert string Path to the certificate file for FTPS
|
||||
--ftpd-key string Path to the key file for FTPS
|
||||
--ftpd-port int 0 means a random unprivileged port,
|
||||
< 0 disabled (default -1)
|
||||
--gcs-automatic-credentials int 0 means explicit credentials using
|
||||
a JSON credentials file, 1 automatic
|
||||
(default 1)
|
||||
--gcs-bucket string
|
||||
--gcs-credentials-file string Google Cloud Storage JSON credentials
|
||||
file
|
||||
--gcs-key-prefix string Allows to restrict access to the
|
||||
virtual folder identified by this
|
||||
prefix and its contents
|
||||
--gcs-credentials-file string Google Cloud Storage JSON credentials
|
||||
file
|
||||
--gcs-key-prefix string Allows to restrict access to the
|
||||
virtual folder identified by this
|
||||
prefix and its contents
|
||||
--gcs-storage-class string
|
||||
-h, --help help for portable
|
||||
-l, --log-file-path string Leave empty to disable logging
|
||||
-v, --log-verbose Enable verbose logs
|
||||
-p, --password string Leave empty to use an auto generated
|
||||
value
|
||||
-g, --permissions strings User's permissions. "*" means any
|
||||
permission (default [list,download])
|
||||
-h, --help help for portable
|
||||
-l, --log-file-path string Leave empty to disable logging
|
||||
-v, --log-verbose Enable verbose logs
|
||||
-p, --password string Leave empty to use an auto generated
|
||||
value
|
||||
-g, --permissions strings User's permissions. "*" means any
|
||||
permission (default [list,download])
|
||||
-k, --public-key strings
|
||||
--s3-access-key string
|
||||
--s3-access-secret string
|
||||
--s3-bucket string
|
||||
--s3-endpoint string
|
||||
--s3-key-prefix string Allows to restrict access to the
|
||||
virtual folder identified by this
|
||||
prefix and its contents
|
||||
--s3-key-prefix string Allows to restrict access to the
|
||||
virtual folder identified by this
|
||||
prefix and its contents
|
||||
--s3-region string
|
||||
--s3-storage-class string
|
||||
--s3-upload-concurrency int How many parts are uploaded in
|
||||
parallel (default 2)
|
||||
--s3-upload-part-size int The buffer size for multipart uploads
|
||||
(MB) (default 5)
|
||||
-s, --sftpd-port int 0 means a random unprivileged port
|
||||
-c, --ssh-commands strings SSH commands to enable.
|
||||
"*" means any supported SSH command
|
||||
including scp
|
||||
(default [md5sum,sha1sum,cd,pwd,scp])
|
||||
-u, --username string Leave empty to use an auto generated
|
||||
value
|
||||
--webdav-cert string Path to the certificate file for WebDAV
|
||||
over HTTPS
|
||||
--webdav-key string Path to the key file for WebDAV over
|
||||
HTTPS
|
||||
--webdav-port int 0 means a random unprivileged port,
|
||||
< 0 disabled (default -1)
|
||||
--s3-upload-concurrency int How many parts are uploaded in
|
||||
parallel (default 2)
|
||||
--s3-upload-part-size int The buffer size for multipart uploads
|
||||
(MB) (default 5)
|
||||
-s, --sftpd-port int 0 means a random unprivileged port
|
||||
-c, --ssh-commands strings SSH commands to enable.
|
||||
"*" means any supported SSH command
|
||||
including scp
|
||||
(default [md5sum,sha1sum,cd,pwd,scp])
|
||||
-u, --username string Leave empty to use an auto generated
|
||||
value
|
||||
--webdav-cert string Path to the certificate file for WebDAV
|
||||
over HTTPS
|
||||
--webdav-key string Path to the key file for WebDAV over
|
||||
HTTPS
|
||||
--webdav-port int 0 means a random unprivileged port,
|
||||
< 0 disabled (default -1)
|
||||
```
|
||||
|
||||
In portable mode, SFTPGo can advertise the SFTP/FTP services and, optionally, the credentials via multicast DNS, so there is a standard way to discover the service and to automatically connect to it.
|
||||
|
|
|
@ -44,7 +44,7 @@ Let's see a sample usage for each REST API.
|
|||
Command:
|
||||
|
||||
```console
|
||||
python sftpgo_api_cli add-user test_username --password "test_pwd" --home-dir="/tmp/test_home_dir" --uid 33 --gid 1000 --max-sessions 2 --quota-size 0 --quota-files 3 --permissions "list" "download" "upload" "delete" "rename" "create_dirs" "overwrite" --subdirs-permissions "/dir1::list,download" "/dir2::*" --upload-bandwidth 100 --download-bandwidth 60 --status 0 --expiration-date 2019-01-01 --allowed-ip "192.168.1.1/32" --fs S3 --s3-bucket test --s3-region eu-west-1 --s3-access-key accesskey --s3-access-secret secret --s3-endpoint "http://127.0.0.1:9000" --s3-storage-class Standard --s3-key-prefix "vfolder/" --s3-upload-part-size 10 --s3-upload-concurrency 4 --denied-login-methods "password" "keyboard-interactive" --allowed-extensions "/dir1::.jpg,.png" "/dir2::.rar,.png" --denied-extensions "/dir3::.zip,.rar" --denied-protocols DAV FTP
|
||||
python sftpgo_api_cli add-user test_username --password "test_pwd" --home-dir="/tmp/test_home_dir" --uid 33 --gid 1000 --max-sessions 2 --quota-size 0 --quota-files 3 --permissions "list" "download" "upload" "delete" "rename" "create_dirs" "overwrite" --subdirs-permissions "/dir1::list,download" "/dir2::*" --upload-bandwidth 100 --download-bandwidth 60 --status 0 --expiration-date 2019-01-01 --allowed-ip "192.168.1.1/32" --fs S3 --s3-bucket test --s3-region eu-west-1 --s3-access-key accesskey --s3-access-secret secret --s3-endpoint "http://127.0.0.1:9000" --s3-storage-class Standard --s3-key-prefix "vfolder/" --s3-upload-part-size 10 --s3-upload-concurrency 4 --denied-login-methods "password" "keyboard-interactive" --allowed-patterns "/dir1::*.jpg,*.png" "/dir2::*.rar,*.png" --denied-patterns "/dir3::*.zip,*.rar" --denied-protocols DAV FTP
|
||||
```
|
||||
|
||||
Output:
|
||||
|
@ -80,25 +80,25 @@ Output:
|
|||
"DAV",
|
||||
"FTP"
|
||||
],
|
||||
"file_extensions": [
|
||||
"file_patterns": [
|
||||
{
|
||||
"allowed_extensions": [
|
||||
".jpg",
|
||||
".png"
|
||||
"allowed_patterns": [
|
||||
"*.jpg",
|
||||
"*.png"
|
||||
],
|
||||
"path": "/dir1"
|
||||
},
|
||||
{
|
||||
"allowed_extensions": [
|
||||
".rar",
|
||||
".png"
|
||||
"allowed_patterns": [
|
||||
"*.rar",
|
||||
"*.png"
|
||||
],
|
||||
"path": "/dir2"
|
||||
},
|
||||
{
|
||||
"denied_extensions": [
|
||||
".zip",
|
||||
".rar"
|
||||
"denied_patterns": [
|
||||
"*.zip",
|
||||
"*.rar"
|
||||
],
|
||||
"path": "/dir3"
|
||||
}
|
||||
|
@ -144,7 +144,7 @@ Output:
|
|||
Command:
|
||||
|
||||
```console
|
||||
python sftpgo_api_cli update-user 9576 test_username --password "test_pwd" --home-dir="/tmp/test_home_dir" --uid 0 --gid 33 --max-sessions 3 --quota-size 0 --quota-files 4 --permissions "*" --subdirs-permissions "/dir1::list,download,create_symlinks" --upload-bandwidth 90 --download-bandwidth 80 --status 1 --expiration-date "" --allowed-ip "" --denied-ip "192.168.1.0/24" --denied-login-methods "" --fs local --virtual-folders "/vdir1::/tmp/mapped1::-1::-1" "/vdir2::/tmp/mapped2::100::104857600" --allowed-extensions "" --denied-extensions "" --max-upload-file-size 104857600 --denied-protocols ""
|
||||
python sftpgo_api_cli update-user 9576 test_username --password "test_pwd" --home-dir="/tmp/test_home_dir" --uid 0 --gid 33 --max-sessions 3 --quota-size 0 --quota-files 4 --permissions "*" --subdirs-permissions "/dir1::list,download,create_symlinks" --upload-bandwidth 90 --download-bandwidth 80 --status 1 --expiration-date "" --allowed-ip "" --denied-ip "192.168.1.0/24" --denied-login-methods "" --fs local --virtual-folders "/vdir1::/tmp/mapped1::-1::-1" "/vdir2::/tmp/mapped2::100::104857600" --allowed-patterns "" --denied-patterns "" --max-upload-file-size 104857600 --denied-protocols ""
|
||||
```
|
||||
|
||||
Output:
|
||||
|
@ -182,29 +182,6 @@ Output:
|
|||
"denied_ip": [
|
||||
"192.168.1.0/24"
|
||||
],
|
||||
"file_extensions": [
|
||||
{
|
||||
"allowed_extensions": [
|
||||
".jpg",
|
||||
".png"
|
||||
],
|
||||
"path": "/dir1"
|
||||
},
|
||||
{
|
||||
"allowed_extensions": [
|
||||
".rar",
|
||||
".png"
|
||||
],
|
||||
"path": "/dir2"
|
||||
},
|
||||
{
|
||||
"denied_extensions": [
|
||||
".zip",
|
||||
".rar"
|
||||
],
|
||||
"path": "/dir3"
|
||||
}
|
||||
],
|
||||
"max_upload_file_size": 104857600
|
||||
},
|
||||
"gid": 33,
|
||||
|
@ -279,7 +256,8 @@ Output:
|
|||
"filters": {
|
||||
"denied_ip": [
|
||||
"192.168.1.0/24"
|
||||
]
|
||||
],
|
||||
"max_upload_file_size": 104857600
|
||||
},
|
||||
"gid": 33,
|
||||
"home_dir": "/tmp/test_home_dir",
|
||||
|
|
|
@ -81,7 +81,7 @@ class SFTPGoApiRequests:
|
|||
s3_region='', s3_access_key='', s3_access_secret='', s3_endpoint='', s3_storage_class='',
|
||||
s3_key_prefix='', gcs_bucket='', gcs_key_prefix='', gcs_storage_class='', gcs_credentials_file='',
|
||||
gcs_automatic_credentials='automatic', denied_login_methods=[], virtual_folders=[],
|
||||
denied_extensions=[], allowed_extensions=[], s3_upload_part_size=0, s3_upload_concurrency=0,
|
||||
denied_patterns=[], allowed_patterns=[], s3_upload_part_size=0, s3_upload_concurrency=0,
|
||||
max_upload_file_size=0, denied_protocols=[], az_container='', az_account_name='', az_account_key='',
|
||||
az_sas_url='', az_endpoint='', az_upload_part_size=0, az_upload_concurrency=0, az_key_prefix='',
|
||||
az_use_emulator=False, az_access_tier=''):
|
||||
|
@ -103,8 +103,8 @@ class SFTPGoApiRequests:
|
|||
if virtual_folders:
|
||||
user.update({'virtual_folders':self.buildVirtualFolders(virtual_folders)})
|
||||
|
||||
user.update({'filters':self.buildFilters(allowed_ip, denied_ip, denied_login_methods, denied_extensions,
|
||||
allowed_extensions, max_upload_file_size, denied_protocols)})
|
||||
user.update({'filters':self.buildFilters(allowed_ip, denied_ip, denied_login_methods, denied_patterns,
|
||||
allowed_patterns, max_upload_file_size, denied_protocols)})
|
||||
user.update({'filesystem':self.buildFsConfig(fs_provider, s3_bucket, s3_region, s3_access_key, s3_access_secret,
|
||||
s3_endpoint, s3_storage_class, s3_key_prefix, gcs_bucket,
|
||||
gcs_key_prefix, gcs_storage_class, gcs_credentials_file,
|
||||
|
@ -158,7 +158,7 @@ class SFTPGoApiRequests:
|
|||
permissions.update({directory:values})
|
||||
return permissions
|
||||
|
||||
def buildFilters(self, allowed_ip, denied_ip, denied_login_methods, denied_extensions, allowed_extensions,
|
||||
def buildFilters(self, allowed_ip, denied_ip, denied_login_methods, denied_patterns, allowed_patterns,
|
||||
max_upload_file_size, denied_protocols):
|
||||
filters = {"max_upload_file_size":max_upload_file_size}
|
||||
if allowed_ip:
|
||||
|
@ -181,11 +181,11 @@ class SFTPGoApiRequests:
|
|||
filters.update({'denied_protocols':[]})
|
||||
else:
|
||||
filters.update({'denied_protocols':denied_protocols})
|
||||
extensions_filter = []
|
||||
extensions_denied = []
|
||||
extensions_allowed = []
|
||||
if denied_extensions:
|
||||
for e in denied_extensions:
|
||||
patterns_filter = []
|
||||
patterns_denied = []
|
||||
patterns_allowed = []
|
||||
if denied_patterns:
|
||||
for e in denied_patterns:
|
||||
if '::' in e:
|
||||
directory = None
|
||||
values = []
|
||||
|
@ -195,10 +195,10 @@ class SFTPGoApiRequests:
|
|||
else:
|
||||
values = [v.strip() for v in value.split(',') if v.strip()]
|
||||
if directory:
|
||||
extensions_denied.append({'path':directory, 'denied_extensions':values,
|
||||
'allowed_extensions':[]})
|
||||
if allowed_extensions:
|
||||
for e in allowed_extensions:
|
||||
patterns_denied.append({'path':directory, 'denied_patterns':values,
|
||||
'allowed_patterns':[]})
|
||||
if allowed_patterns:
|
||||
for e in allowed_patterns:
|
||||
if '::' in e:
|
||||
directory = None
|
||||
values = []
|
||||
|
@ -208,27 +208,27 @@ class SFTPGoApiRequests:
|
|||
else:
|
||||
values = [v.strip() for v in value.split(',') if v.strip()]
|
||||
if directory:
|
||||
extensions_allowed.append({'path':directory, 'allowed_extensions':values,
|
||||
'denied_extensions':[]})
|
||||
if extensions_allowed and extensions_denied:
|
||||
for allowed in extensions_allowed:
|
||||
for denied in extensions_denied:
|
||||
patterns_allowed.append({'path':directory, 'allowed_patterns':values,
|
||||
'denied_patterns':[]})
|
||||
if patterns_allowed and patterns_denied:
|
||||
for allowed in patterns_allowed:
|
||||
for denied in patterns_denied:
|
||||
if allowed.get('path') == denied.get('path'):
|
||||
allowed.update({'denied_extensions':denied.get('denied_extensions')})
|
||||
extensions_filter.append(allowed)
|
||||
for denied in extensions_denied:
|
||||
allowed.update({'denied_patterns':denied.get('denied_patterns')})
|
||||
patterns_filter.append(allowed)
|
||||
for denied in patterns_denied:
|
||||
found = False
|
||||
for allowed in extensions_allowed:
|
||||
for allowed in patterns_allowed:
|
||||
if allowed.get('path') == denied.get('path'):
|
||||
found = True
|
||||
if not found:
|
||||
extensions_filter.append(denied)
|
||||
elif extensions_allowed:
|
||||
extensions_filter = extensions_allowed
|
||||
elif extensions_denied:
|
||||
extensions_filter = extensions_denied
|
||||
if allowed_extensions or denied_extensions:
|
||||
filters.update({'file_extensions':extensions_filter})
|
||||
patterns_filter.append(denied)
|
||||
elif patterns_allowed:
|
||||
patterns_filter = patterns_allowed
|
||||
elif patterns_denied:
|
||||
patterns_filter = patterns_denied
|
||||
if allowed_patterns or denied_patterns:
|
||||
filters.update({'file_patterns':patterns_filter})
|
||||
return filters
|
||||
|
||||
def buildFsConfig(self, fs_provider, s3_bucket, s3_region, s3_access_key, s3_access_secret, s3_endpoint,
|
||||
|
@ -275,7 +275,7 @@ class SFTPGoApiRequests:
|
|||
subdirs_permissions=[], allowed_ip=[], denied_ip=[], fs_provider='local', s3_bucket='', s3_region='',
|
||||
s3_access_key='', s3_access_secret='', s3_endpoint='', s3_storage_class='', s3_key_prefix='', gcs_bucket='',
|
||||
gcs_key_prefix='', gcs_storage_class='', gcs_credentials_file='', gcs_automatic_credentials='automatic',
|
||||
denied_login_methods=[], virtual_folders=[], denied_extensions=[], allowed_extensions=[],
|
||||
denied_login_methods=[], virtual_folders=[], denied_patterns=[], allowed_patterns=[],
|
||||
s3_upload_part_size=0, s3_upload_concurrency=0, max_upload_file_size=0, denied_protocols=[], az_container="",
|
||||
az_account_name='', az_account_key='', az_sas_url='', az_endpoint='', az_upload_part_size=0,
|
||||
az_upload_concurrency=0, az_key_prefix='', az_use_emulator=False, az_access_tier=''):
|
||||
|
@ -283,8 +283,8 @@ class SFTPGoApiRequests:
|
|||
quota_size, quota_files, self.buildPermissions(perms, subdirs_permissions), upload_bandwidth, download_bandwidth,
|
||||
status, expiration_date, allowed_ip, denied_ip, fs_provider, s3_bucket, s3_region, s3_access_key,
|
||||
s3_access_secret, s3_endpoint, s3_storage_class, s3_key_prefix, gcs_bucket, gcs_key_prefix, gcs_storage_class,
|
||||
gcs_credentials_file, gcs_automatic_credentials, denied_login_methods, virtual_folders, denied_extensions,
|
||||
allowed_extensions, s3_upload_part_size, s3_upload_concurrency, max_upload_file_size, denied_protocols,
|
||||
gcs_credentials_file, gcs_automatic_credentials, denied_login_methods, virtual_folders, denied_patterns,
|
||||
allowed_patterns, s3_upload_part_size, s3_upload_concurrency, max_upload_file_size, denied_protocols,
|
||||
az_container, az_account_name, az_account_key, az_sas_url, az_endpoint, az_upload_part_size,
|
||||
az_upload_concurrency, az_key_prefix, az_use_emulator, az_access_tier)
|
||||
r = requests.post(self.userPath, json=u, auth=self.auth, verify=self.verify)
|
||||
|
@ -295,8 +295,8 @@ class SFTPGoApiRequests:
|
|||
expiration_date=0, subdirs_permissions=[], allowed_ip=[], denied_ip=[], fs_provider='local',
|
||||
s3_bucket='', s3_region='', s3_access_key='', s3_access_secret='', s3_endpoint='', s3_storage_class='',
|
||||
s3_key_prefix='', gcs_bucket='', gcs_key_prefix='', gcs_storage_class='', gcs_credentials_file='',
|
||||
gcs_automatic_credentials='automatic', denied_login_methods=[], virtual_folders=[], denied_extensions=[],
|
||||
allowed_extensions=[], s3_upload_part_size=0, s3_upload_concurrency=0, max_upload_file_size=0,
|
||||
gcs_automatic_credentials='automatic', denied_login_methods=[], virtual_folders=[], denied_patterns=[],
|
||||
allowed_patterns=[], s3_upload_part_size=0, s3_upload_concurrency=0, max_upload_file_size=0,
|
||||
denied_protocols=[], disconnect=0, az_container='', az_account_name='', az_account_key='', az_sas_url='',
|
||||
az_endpoint='', az_upload_part_size=0, az_upload_concurrency=0, az_key_prefix='', az_use_emulator=False,
|
||||
az_access_tier=''):
|
||||
|
@ -304,8 +304,8 @@ class SFTPGoApiRequests:
|
|||
quota_size, quota_files, self.buildPermissions(perms, subdirs_permissions), upload_bandwidth, download_bandwidth,
|
||||
status, expiration_date, allowed_ip, denied_ip, fs_provider, s3_bucket, s3_region, s3_access_key,
|
||||
s3_access_secret, s3_endpoint, s3_storage_class, s3_key_prefix, gcs_bucket, gcs_key_prefix, gcs_storage_class,
|
||||
gcs_credentials_file, gcs_automatic_credentials, denied_login_methods, virtual_folders, denied_extensions,
|
||||
allowed_extensions, s3_upload_part_size, s3_upload_concurrency, max_upload_file_size, denied_protocols,
|
||||
gcs_credentials_file, gcs_automatic_credentials, denied_login_methods, virtual_folders, denied_patterns,
|
||||
allowed_patterns, s3_upload_part_size, s3_upload_concurrency, max_upload_file_size, denied_protocols,
|
||||
az_container, az_account_name, az_account_key, az_sas_url, az_endpoint, az_upload_part_size,
|
||||
az_upload_concurrency, az_key_prefix, az_use_emulator, az_access_tier)
|
||||
r = requests.put(urlparse.urljoin(self.userPath, 'user/' + str(user_id)), params={'disconnect':disconnect},
|
||||
|
@ -607,12 +607,12 @@ def addCommonUserArguments(parser):
|
|||
help='Allowed IP/Mask in CIDR notation. For example "192.168.2.0/24" or "2001:db8::/32". Default: %(default)s')
|
||||
parser.add_argument('-N', '--denied-ip', type=str, nargs='+', default=[],
|
||||
help='Denied IP/Mask in CIDR notation. For example "192.168.2.0/24" or "2001:db8::/32". Default: %(default)s')
|
||||
parser.add_argument('--denied-extensions', type=str, nargs='*', default=[], help='Denied file extensions case insensitive. '
|
||||
+'The format is /dir::ext1,ext2. For example: "/somedir::.jpg,.png" "/otherdir/subdir::.zip,.rar". ' +
|
||||
'You have to set both denied and allowed extensions to update existing values or none to preserve them.' +
|
||||
' If you only set allowed or denied extensions the missing one is assumed to be an empty list. Default: %(default)s')
|
||||
parser.add_argument('--allowed-extensions', type=str, nargs='*', default=[], help='Allowed file extensions case insensitive. '
|
||||
+'The format is /dir::ext1,ext2. For example: "/somedir::.jpg,.png" "/otherdir/subdir::.zip,.rar". ' +
|
||||
parser.add_argument('--denied-patterns', type=str, nargs='*', default=[], help='Denied file patterns case insensitive. '
|
||||
+'The format is /dir::pattern1,pattern2. For example: "/somedir::*.jpg,*.png" "/otherdir/subdir::a*b?.zip,*.rar". ' +
|
||||
'You have to set both denied and allowed patterns to update existing values or none to preserve them.' +
|
||||
' If you only set allowed or denied patterns the missing one is assumed to be an empty list. Default: %(default)s')
|
||||
parser.add_argument('--allowed-patterns', type=str, nargs='*', default=[], help='Allowed file patterns case insensitive. '
|
||||
+'The format is /dir::pattern1,pattern2. For example: "/somedir::*.jpg,a*b?.png" "/otherdir/subdir::*.zip,*.rar". ' +
|
||||
'Default: %(default)s')
|
||||
parser.add_argument('--fs', type=str, default='local', choices=['local', 'S3', 'GCS', "AzureBlob"],
|
||||
help='Filesystem provider. Default: %(default)s')
|
||||
|
@ -804,7 +804,7 @@ if __name__ == '__main__':
|
|||
args.denied_ip, args.fs, args.s3_bucket, args.s3_region, args.s3_access_key, args.s3_access_secret,
|
||||
args.s3_endpoint, args.s3_storage_class, args.s3_key_prefix, args.gcs_bucket, args.gcs_key_prefix,
|
||||
args.gcs_storage_class, args.gcs_credentials_file, args.gcs_automatic_credentials,
|
||||
args.denied_login_methods, args.virtual_folders, args.denied_extensions, args.allowed_extensions,
|
||||
args.denied_login_methods, args.virtual_folders, args.denied_patterns, args.allowed_patterns,
|
||||
args.s3_upload_part_size, args.s3_upload_concurrency, args.max_upload_file_size, args.denied_protocols,
|
||||
args.az_container, args.az_account_name, args.az_account_key, args.az_sas_url, args.az_endpoint,
|
||||
args.az_upload_part_size, args.az_upload_concurrency, args.az_key_prefix, args.az_use_emulator,
|
||||
|
@ -817,7 +817,7 @@ if __name__ == '__main__':
|
|||
args.s3_access_key, args.s3_access_secret, args.s3_endpoint, args.s3_storage_class,
|
||||
args.s3_key_prefix, args.gcs_bucket, args.gcs_key_prefix, args.gcs_storage_class,
|
||||
args.gcs_credentials_file, args.gcs_automatic_credentials, args.denied_login_methods,
|
||||
args.virtual_folders, args.denied_extensions, args.allowed_extensions, args.s3_upload_part_size,
|
||||
args.virtual_folders, args.denied_patterns, args.allowed_patterns, args.s3_upload_part_size,
|
||||
args.s3_upload_concurrency, args.max_upload_file_size, args.denied_protocols, args.disconnect,
|
||||
args.az_container, args.az_account_name, args.az_account_key, args.az_sas_url, args.az_endpoint,
|
||||
args.az_upload_part_size, args.az_upload_concurrency, args.az_key_prefix, args.az_use_emulator,
|
||||
|
|
|
@ -519,12 +519,20 @@ func TestDownloadErrors(t *testing.T) {
|
|||
DeniedExtensions: []string{".zip"},
|
||||
},
|
||||
}
|
||||
u.Filters.FilePatterns = []dataprovider.PatternsFilter{
|
||||
{
|
||||
Path: "/sub2",
|
||||
AllowedPatterns: []string{},
|
||||
DeniedPatterns: []string{"*.jpg"},
|
||||
},
|
||||
}
|
||||
user, _, err := httpd.AddUser(u, http.StatusOK)
|
||||
assert.NoError(t, err)
|
||||
client, err := getFTPClient(user, true)
|
||||
if assert.NoError(t, err) {
|
||||
testFilePath1 := filepath.Join(user.HomeDir, subDir1, "file.zip")
|
||||
testFilePath2 := filepath.Join(user.HomeDir, subDir2, "file.zip")
|
||||
testFilePath3 := filepath.Join(user.HomeDir, subDir2, "file.jpg")
|
||||
err = os.MkdirAll(filepath.Dir(testFilePath1), os.ModePerm)
|
||||
assert.NoError(t, err)
|
||||
err = os.MkdirAll(filepath.Dir(testFilePath2), os.ModePerm)
|
||||
|
@ -533,11 +541,15 @@ func TestDownloadErrors(t *testing.T) {
|
|||
assert.NoError(t, err)
|
||||
err = ioutil.WriteFile(testFilePath2, []byte("file2"), os.ModePerm)
|
||||
assert.NoError(t, err)
|
||||
err = ioutil.WriteFile(testFilePath3, []byte("file3"), os.ModePerm)
|
||||
assert.NoError(t, err)
|
||||
localDownloadPath := filepath.Join(homeBasePath, testDLFileName)
|
||||
err = ftpDownloadFile(path.Join("/", subDir1, "file.zip"), localDownloadPath, 5, client, 0)
|
||||
assert.Error(t, err)
|
||||
err = ftpDownloadFile(path.Join("/", subDir2, "file.zip"), localDownloadPath, 5, client, 0)
|
||||
assert.Error(t, err)
|
||||
err = ftpDownloadFile(path.Join("/", subDir2, "file.jpg"), localDownloadPath, 5, client, 0)
|
||||
assert.Error(t, err)
|
||||
err = ftpDownloadFile("/missing.zip", localDownloadPath, 5, client, 0)
|
||||
assert.Error(t, err)
|
||||
err = client.Quit()
|
||||
|
|
|
@ -774,6 +774,40 @@ func compareUserFilters(expected *dataprovider.User, actual *dataprovider.User)
|
|||
if err := compareUserFileExtensionsFilters(expected, actual); err != nil {
|
||||
return err
|
||||
}
|
||||
return compareUserFilePatternsFilters(expected, actual)
|
||||
}
|
||||
|
||||
func checkFilterMatch(expected []string, actual []string) bool {
|
||||
if len(expected) != len(actual) {
|
||||
return false
|
||||
}
|
||||
for _, e := range expected {
|
||||
if !utils.IsStringInSlice(strings.ToLower(e), actual) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func compareUserFilePatternsFilters(expected *dataprovider.User, actual *dataprovider.User) error {
|
||||
if len(expected.Filters.FilePatterns) != len(actual.Filters.FilePatterns) {
|
||||
return errors.New("file patterns mismatch")
|
||||
}
|
||||
for _, f := range expected.Filters.FilePatterns {
|
||||
found := false
|
||||
for _, f1 := range actual.Filters.FilePatterns {
|
||||
if path.Clean(f.Path) == path.Clean(f1.Path) {
|
||||
if !checkFilterMatch(f.AllowedPatterns, f1.AllowedPatterns) ||
|
||||
!checkFilterMatch(f.DeniedPatterns, f1.DeniedPatterns) {
|
||||
return errors.New("file patterns contents mismatch")
|
||||
}
|
||||
found = true
|
||||
}
|
||||
}
|
||||
if !found {
|
||||
return errors.New("file patterns contents mismatch")
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
|
@ -785,19 +819,10 @@ func compareUserFileExtensionsFilters(expected *dataprovider.User, actual *datap
|
|||
found := false
|
||||
for _, f1 := range actual.Filters.FileExtensions {
|
||||
if path.Clean(f.Path) == path.Clean(f1.Path) {
|
||||
if len(f.AllowedExtensions) != len(f1.AllowedExtensions) || len(f.DeniedExtensions) != len(f1.DeniedExtensions) {
|
||||
if !checkFilterMatch(f.AllowedExtensions, f1.AllowedExtensions) ||
|
||||
!checkFilterMatch(f.DeniedExtensions, f1.DeniedExtensions) {
|
||||
return errors.New("file extensions contents mismatch")
|
||||
}
|
||||
for _, e := range f.AllowedExtensions {
|
||||
if !utils.IsStringInSlice(e, f1.AllowedExtensions) {
|
||||
return errors.New("file extensions contents mismatch")
|
||||
}
|
||||
}
|
||||
for _, e := range f.DeniedExtensions {
|
||||
if !utils.IsStringInSlice(e, f1.DeniedExtensions) {
|
||||
return errors.New("file extensions contents mismatch")
|
||||
}
|
||||
}
|
||||
found = true
|
||||
}
|
||||
}
|
||||
|
|
|
@ -378,6 +378,45 @@ func TestAddUserInvalidFilters(t *testing.T) {
|
|||
_, _, err = httpd.AddUser(u, http.StatusBadRequest)
|
||||
assert.NoError(t, err)
|
||||
u.Filters.FileExtensions = nil
|
||||
u.Filters.FilePatterns = []dataprovider.PatternsFilter{
|
||||
{
|
||||
Path: "relative",
|
||||
AllowedPatterns: []string{},
|
||||
DeniedPatterns: []string{},
|
||||
},
|
||||
}
|
||||
_, _, err = httpd.AddUser(u, http.StatusBadRequest)
|
||||
assert.NoError(t, err)
|
||||
u.Filters.FilePatterns = []dataprovider.PatternsFilter{
|
||||
{
|
||||
Path: "/",
|
||||
AllowedPatterns: []string{},
|
||||
DeniedPatterns: []string{},
|
||||
},
|
||||
}
|
||||
_, _, err = httpd.AddUser(u, http.StatusBadRequest)
|
||||
assert.NoError(t, err)
|
||||
u.Filters.FilePatterns = []dataprovider.PatternsFilter{
|
||||
{
|
||||
Path: "/subdir",
|
||||
AllowedPatterns: []string{"*.zip"},
|
||||
},
|
||||
{
|
||||
Path: "/subdir",
|
||||
AllowedPatterns: []string{"*.rar"},
|
||||
DeniedPatterns: []string{"*.jpg"},
|
||||
},
|
||||
}
|
||||
_, _, err = httpd.AddUser(u, http.StatusBadRequest)
|
||||
assert.NoError(t, err)
|
||||
u.Filters.FilePatterns = []dataprovider.PatternsFilter{
|
||||
{
|
||||
Path: "/subdir",
|
||||
AllowedPatterns: []string{"a\\"},
|
||||
},
|
||||
}
|
||||
_, _, err = httpd.AddUser(u, http.StatusBadRequest)
|
||||
assert.NoError(t, err)
|
||||
u.Filters.DeniedProtocols = []string{"invalid"}
|
||||
_, _, err = httpd.AddUser(u, http.StatusBadRequest)
|
||||
assert.NoError(t, err)
|
||||
|
@ -689,6 +728,11 @@ func TestUpdateUser(t *testing.T) {
|
|||
AllowedExtensions: []string{".zip", ".rar"},
|
||||
DeniedExtensions: []string{".jpg", ".png"},
|
||||
})
|
||||
user.Filters.FilePatterns = append(user.Filters.FilePatterns, dataprovider.PatternsFilter{
|
||||
Path: "/subdir",
|
||||
AllowedPatterns: []string{"*.zip", "*.rar"},
|
||||
DeniedPatterns: []string{"*.jpg", "*.png"},
|
||||
})
|
||||
user.Filters.MaxUploadFileSize = 4096
|
||||
user.UploadBandwidth = 1024
|
||||
user.DownloadBandwidth = 512
|
||||
|
@ -2411,8 +2455,10 @@ func TestWebUserAddMock(t *testing.T) {
|
|||
form.Set("permissions", "*")
|
||||
form.Set("sub_dirs_permissions", " /subdir::list ,download ")
|
||||
form.Set("virtual_folders", fmt.Sprintf(" /vdir:: %v :: 2 :: 1024", mappedDir))
|
||||
form.Set("allowed_extensions", "/dir1::.jpg,.png")
|
||||
form.Set("allowed_extensions", "/dir2::.jpg,.png\n/dir2::.ico")
|
||||
form.Set("denied_extensions", "/dir1::.zip")
|
||||
form.Set("allowed_patterns", "/dir2::*.jpg,*.png")
|
||||
form.Set("denied_patterns", "/dir1::*.zip")
|
||||
b, contentType, _ := getMultipartFormData(form, "", "")
|
||||
// test invalid url escape
|
||||
req, _ := http.NewRequest(http.MethodPost, webUserPath+"?a=%2", &b)
|
||||
|
@ -2546,8 +2592,27 @@ func TestWebUserAddMock(t *testing.T) {
|
|||
assert.Equal(t, v.QuotaFiles, 2)
|
||||
assert.Equal(t, v.QuotaSize, int64(1024))
|
||||
}
|
||||
extFilters := newUser.Filters.FileExtensions[0]
|
||||
assert.True(t, utils.IsStringInSlice(".zip", extFilters.DeniedExtensions))
|
||||
assert.Len(t, newUser.Filters.FileExtensions, 2)
|
||||
for _, filter := range newUser.Filters.FileExtensions {
|
||||
if filter.Path == "/dir1" {
|
||||
assert.True(t, utils.IsStringInSlice(".zip", filter.DeniedExtensions))
|
||||
}
|
||||
if filter.Path == "/dir2" {
|
||||
assert.True(t, utils.IsStringInSlice(".jpg", filter.AllowedExtensions))
|
||||
assert.True(t, utils.IsStringInSlice(".png", filter.AllowedExtensions))
|
||||
assert.True(t, utils.IsStringInSlice(".ico", filter.AllowedExtensions))
|
||||
}
|
||||
}
|
||||
assert.Len(t, newUser.Filters.FilePatterns, 2)
|
||||
for _, filter := range newUser.Filters.FilePatterns {
|
||||
if filter.Path == "/dir1" {
|
||||
assert.True(t, utils.IsStringInSlice("*.zip", filter.DeniedPatterns))
|
||||
}
|
||||
if filter.Path == "/dir2" {
|
||||
assert.True(t, utils.IsStringInSlice("*.jpg", filter.AllowedPatterns))
|
||||
assert.True(t, utils.IsStringInSlice("*.png", filter.AllowedPatterns))
|
||||
}
|
||||
}
|
||||
req, _ = http.NewRequest(http.MethodDelete, userPath+"/"+strconv.FormatInt(newUser.ID, 10), nil)
|
||||
rr = executeRequest(req)
|
||||
checkResponseCode(t, http.StatusOK, rr.Code)
|
||||
|
|
|
@ -218,6 +218,45 @@ func TestCompareUserFilters(t *testing.T) {
|
|||
}
|
||||
err = checkUser(expected, actual)
|
||||
assert.Error(t, err)
|
||||
actual.Filters.FileExtensions = nil
|
||||
actual.Filters.FilePatterns = nil
|
||||
expected.Filters.FileExtensions = nil
|
||||
expected.Filters.FilePatterns = nil
|
||||
expected.Filters.FilePatterns = append(expected.Filters.FilePatterns, dataprovider.PatternsFilter{
|
||||
Path: "/",
|
||||
AllowedPatterns: []string{"*.jpg", "*.png"},
|
||||
DeniedPatterns: []string{"*.zip", "*.rar"},
|
||||
})
|
||||
err = checkUser(expected, actual)
|
||||
assert.Error(t, err)
|
||||
actual.Filters.FilePatterns = append(actual.Filters.FilePatterns, dataprovider.PatternsFilter{
|
||||
Path: "/sub",
|
||||
AllowedPatterns: []string{"*.jpg", "*.png"},
|
||||
DeniedPatterns: []string{"*.zip", "*.rar"},
|
||||
})
|
||||
err = checkUser(expected, actual)
|
||||
assert.Error(t, err)
|
||||
actual.Filters.FilePatterns[0] = dataprovider.PatternsFilter{
|
||||
Path: "/",
|
||||
AllowedPatterns: []string{"*.jpg"},
|
||||
DeniedPatterns: []string{"*.zip", "*.rar"},
|
||||
}
|
||||
err = checkUser(expected, actual)
|
||||
assert.Error(t, err)
|
||||
actual.Filters.FilePatterns[0] = dataprovider.PatternsFilter{
|
||||
Path: "/",
|
||||
AllowedPatterns: []string{"*.tiff", "*.png"},
|
||||
DeniedPatterns: []string{"*.zip", "*.rar"},
|
||||
}
|
||||
err = checkUser(expected, actual)
|
||||
assert.Error(t, err)
|
||||
actual.Filters.FilePatterns[0] = dataprovider.PatternsFilter{
|
||||
Path: "/",
|
||||
AllowedPatterns: []string{"*.jpg", "*.png"},
|
||||
DeniedPatterns: []string{"*.tar.gz", "*.rar"},
|
||||
}
|
||||
err = checkUser(expected, actual)
|
||||
assert.Error(t, err)
|
||||
}
|
||||
|
||||
func TestCompareUserFields(t *testing.T) {
|
||||
|
|
|
@ -2,7 +2,7 @@ openapi: 3.0.3
|
|||
info:
|
||||
title: SFTPGo
|
||||
description: 'SFTPGo REST API'
|
||||
version: 2.0.2
|
||||
version: 2.0.3
|
||||
|
||||
servers:
|
||||
- url: /api/v1
|
||||
|
@ -869,12 +869,32 @@ components:
|
|||
- 'SSH'
|
||||
- 'FTP'
|
||||
- 'DAV'
|
||||
PatternsFilter:
|
||||
type: object
|
||||
properties:
|
||||
path:
|
||||
type: string
|
||||
description: exposed virtual path, if no other specific filter is defined, the filter apply for sub directories too. For example if filters are defined for the paths "/" and "/sub" then the filters for "/" are applied for any file outside the "/sub" directory
|
||||
allowed_patterns:
|
||||
type: array
|
||||
items:
|
||||
type: string
|
||||
nullable: true
|
||||
description: list of, case insensitive, allowed shell like file patterns.
|
||||
example: [ "*.jpg", "a*b?.png" ]
|
||||
denied_patterns:
|
||||
type: array
|
||||
items:
|
||||
type: string
|
||||
nullable: true
|
||||
description: list of, case insensitive, denied shell like file patterns. Denied patterns are evaluated before the allowed ones
|
||||
example: [ "*.zip" ]
|
||||
ExtensionsFilter:
|
||||
type: object
|
||||
properties:
|
||||
path:
|
||||
type: string
|
||||
description: exposed SFTPGo path, if no other specific filter is defined, the filter apply for sub directories too. For example if filters are defined for the paths "/" and "/sub" then the filters for "/" are applied for any file outside the "/sub" directory
|
||||
description: exposed virtual path, if no other specific filter is defined, the filter apply for sub directories too. For example if filters are defined for the paths "/" and "/sub" then the filters for "/" are applied for any file outside the "/sub" directory
|
||||
allowed_extensions:
|
||||
type: array
|
||||
items:
|
||||
|
@ -918,12 +938,18 @@ components:
|
|||
$ref: '#/components/schemas/SupportedProtocols'
|
||||
nullable: true
|
||||
description: if null or empty any available protocol is allowed
|
||||
file_patterns:
|
||||
type: array
|
||||
items:
|
||||
$ref: '#/components/schemas/PatternsFilter'
|
||||
nullable: true
|
||||
description: filters based on shell like file patterns. These restrictions do not apply to files listing for performance reasons, so a denied file cannot be downloaded/overwritten/renamed but it will still be in the list of files. Please note that these restrictions can be easily bypassed
|
||||
file_extensions:
|
||||
type: array
|
||||
items:
|
||||
$ref: '#/components/schemas/ExtensionsFilter'
|
||||
nullable: true
|
||||
description: filters based on file extensions. These restrictions do not apply to files listing for performance reasons, so a denied file cannot be downloaded/overwritten/renamed but it will still be listed in the list of files. Please note that these restrictions can be easily bypassed
|
||||
description: filters based on shell like patterns. Deprecated, use file_patterns. These restrictions do not apply to files listing for performance reasons, so a denied file cannot be downloaded/overwritten/renamed but it will still be in the list of files. Please note that these restrictions can be easily bypassed
|
||||
max_upload_file_size:
|
||||
type: integer
|
||||
format: int64
|
||||
|
|
92
httpd/web.go
92
httpd/web.go
|
@ -6,7 +6,6 @@ import (
|
|||
"html/template"
|
||||
"io/ioutil"
|
||||
"net/http"
|
||||
"path"
|
||||
"path/filepath"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
@ -308,8 +307,8 @@ func getSliceFromDelimitedValues(values, delimiter string) []string {
|
|||
return result
|
||||
}
|
||||
|
||||
func getFileExtensionsFromPostField(value string, extesionsType int) []dataprovider.ExtensionsFilter {
|
||||
var result []dataprovider.ExtensionsFilter
|
||||
func getListFromPostFields(value string) map[string][]string {
|
||||
result := make(map[string][]string)
|
||||
for _, cleaned := range getSliceFromDelimitedValues(value, "\n") {
|
||||
if strings.Contains(cleaned, "::") {
|
||||
dirExts := strings.Split(cleaned, "::")
|
||||
|
@ -319,22 +318,16 @@ func getFileExtensionsFromPostField(value string, extesionsType int) []dataprovi
|
|||
exts := []string{}
|
||||
for _, e := range strings.Split(dirExts[1], ",") {
|
||||
cleanedExt := strings.TrimSpace(e)
|
||||
if len(cleanedExt) > 0 {
|
||||
if cleanedExt != "" {
|
||||
exts = append(exts, cleanedExt)
|
||||
}
|
||||
}
|
||||
if len(dir) > 0 {
|
||||
filter := dataprovider.ExtensionsFilter{
|
||||
Path: dir,
|
||||
}
|
||||
if extesionsType == 1 {
|
||||
filter.AllowedExtensions = exts
|
||||
filter.DeniedExtensions = []string{}
|
||||
if dir != "" {
|
||||
if _, ok := result[dir]; ok {
|
||||
result[dir] = append(result[dir], exts...)
|
||||
} else {
|
||||
filter.DeniedExtensions = exts
|
||||
filter.AllowedExtensions = []string{}
|
||||
result[dir] = exts
|
||||
}
|
||||
result = append(result, filter)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -342,6 +335,42 @@ func getFileExtensionsFromPostField(value string, extesionsType int) []dataprovi
|
|||
return result
|
||||
}
|
||||
|
||||
func getFilePatternsFromPostField(value string, extesionsType int) []dataprovider.PatternsFilter {
|
||||
var result []dataprovider.PatternsFilter
|
||||
for dir, values := range getListFromPostFields(value) {
|
||||
filter := dataprovider.PatternsFilter{
|
||||
Path: dir,
|
||||
}
|
||||
if extesionsType == 1 {
|
||||
filter.AllowedPatterns = values
|
||||
filter.DeniedPatterns = []string{}
|
||||
} else {
|
||||
filter.DeniedPatterns = values
|
||||
filter.AllowedPatterns = []string{}
|
||||
}
|
||||
result = append(result, filter)
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
func getFileExtensionsFromPostField(value string, extesionsType int) []dataprovider.ExtensionsFilter {
|
||||
var result []dataprovider.ExtensionsFilter
|
||||
for dir, values := range getListFromPostFields(value) {
|
||||
filter := dataprovider.ExtensionsFilter{
|
||||
Path: dir,
|
||||
}
|
||||
if extesionsType == 1 {
|
||||
filter.AllowedExtensions = values
|
||||
filter.DeniedExtensions = []string{}
|
||||
} else {
|
||||
filter.DeniedExtensions = values
|
||||
filter.AllowedExtensions = []string{}
|
||||
}
|
||||
result = append(result, filter)
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
func getFiltersFromUserPostFields(r *http.Request) dataprovider.UserFilters {
|
||||
var filters dataprovider.UserFilters
|
||||
filters.AllowedIP = getSliceFromDelimitedValues(r.Form.Get("allowed_ip"), ",")
|
||||
|
@ -351,33 +380,16 @@ func getFiltersFromUserPostFields(r *http.Request) dataprovider.UserFilters {
|
|||
allowedExtensions := getFileExtensionsFromPostField(r.Form.Get("allowed_extensions"), 1)
|
||||
deniedExtensions := getFileExtensionsFromPostField(r.Form.Get("denied_extensions"), 2)
|
||||
extensions := []dataprovider.ExtensionsFilter{}
|
||||
if len(allowedExtensions) > 0 && len(deniedExtensions) > 0 {
|
||||
for _, allowed := range allowedExtensions {
|
||||
for _, denied := range deniedExtensions {
|
||||
if path.Clean(allowed.Path) == path.Clean(denied.Path) {
|
||||
allowed.DeniedExtensions = append(allowed.DeniedExtensions, denied.DeniedExtensions...)
|
||||
}
|
||||
}
|
||||
extensions = append(extensions, allowed)
|
||||
}
|
||||
for _, denied := range deniedExtensions {
|
||||
found := false
|
||||
for _, allowed := range allowedExtensions {
|
||||
if path.Clean(denied.Path) == path.Clean(allowed.Path) {
|
||||
found = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !found {
|
||||
extensions = append(extensions, denied)
|
||||
}
|
||||
}
|
||||
} else if len(allowedExtensions) > 0 {
|
||||
extensions = append(extensions, allowedExtensions...)
|
||||
} else if len(deniedExtensions) > 0 {
|
||||
extensions = append(extensions, deniedExtensions...)
|
||||
}
|
||||
extensions = append(extensions, allowedExtensions...)
|
||||
extensions = append(extensions, deniedExtensions...)
|
||||
filters.FileExtensions = extensions
|
||||
allowedPatterns := getFilePatternsFromPostField(r.Form.Get("allowed_patterns"), 1)
|
||||
deniedPatterns := getFilePatternsFromPostField(r.Form.Get("denied_patterns"), 2)
|
||||
patterns := []dataprovider.PatternsFilter{}
|
||||
patterns = append(patterns, allowedPatterns...)
|
||||
patterns = append(patterns, deniedPatterns...)
|
||||
filters.FilePatterns = patterns
|
||||
|
||||
return filters
|
||||
}
|
||||
|
||||
|
|
|
@ -103,9 +103,9 @@ func (s *Service) StartPortableMode(sftpdPort, ftpPort, webdavPort int, enabledS
|
|||
s.advertiseServices(advertiseService, advertiseCredentials)
|
||||
|
||||
logger.InfoToConsole("Portable mode ready, SFTP port: %v, user: %#v, password: %#v, public keys: %v, directory: %#v, "+
|
||||
"permissions: %+v, enabled ssh commands: %v file extensions filters: %+v %v", sftpdConf.BindPort, s.PortableUser.Username,
|
||||
"permissions: %+v, enabled ssh commands: %v file patterns filters: %+v %v", sftpdConf.BindPort, s.PortableUser.Username,
|
||||
printablePassword, s.PortableUser.PublicKeys, s.getPortableDirToServe(), s.PortableUser.Permissions,
|
||||
sftpdConf.EnabledSSHCommands, s.PortableUser.Filters.FileExtensions, s.getServiceOptionalInfoString())
|
||||
sftpdConf.EnabledSSHCommands, s.PortableUser.Filters.FilePatterns, s.getServiceOptionalInfoString())
|
||||
return nil
|
||||
}
|
||||
|
||||
|
|
|
@ -394,12 +394,12 @@ func (c *Configuration) handleSftpConnection(channel ssh.Channel, connection *Co
|
|||
// Create the server instance for the channel using the handler we created above.
|
||||
server := sftp.NewRequestServer(channel, handler, sftp.WithRSAllocator())
|
||||
|
||||
defer server.Close()
|
||||
if err := server.Serve(); err == io.EOF {
|
||||
connection.Log(logger.LevelDebug, "connection closed, sending exit status")
|
||||
exitStatus := sshSubsystemExitStatus{Status: uint32(0)}
|
||||
_, err = channel.SendRequest("exit-status", false, ssh.Marshal(&exitStatus))
|
||||
connection.Log(logger.LevelDebug, "sent exit status %+v error: %v", exitStatus, err)
|
||||
server.Close()
|
||||
} else if err != nil {
|
||||
connection.Log(logger.LevelWarn, "connection closed with error: %v", err)
|
||||
}
|
||||
|
|
|
@ -2554,7 +2554,8 @@ func TestBandwidthAndConnections(t *testing.T) {
|
|||
assert.NoError(t, err)
|
||||
}
|
||||
|
||||
func TestExtensionsFilters(t *testing.T) {
|
||||
//nolint:dupl
|
||||
func TestPatternsFilters(t *testing.T) {
|
||||
usePubKey := true
|
||||
u := getTestUser(usePubKey)
|
||||
user, _, err := httpd.AddUser(u, http.StatusOK)
|
||||
|
@ -2569,12 +2570,14 @@ func TestExtensionsFilters(t *testing.T) {
|
|||
assert.NoError(t, err)
|
||||
err = sftpUploadFile(testFilePath, testFileName, testFileSize, client)
|
||||
assert.NoError(t, err)
|
||||
err = sftpUploadFile(testFilePath, testFileName+".zip", testFileSize, client)
|
||||
assert.NoError(t, err)
|
||||
}
|
||||
user.Filters.FileExtensions = []dataprovider.ExtensionsFilter{
|
||||
user.Filters.FilePatterns = []dataprovider.PatternsFilter{
|
||||
{
|
||||
Path: "/",
|
||||
AllowedExtensions: []string{".zip"},
|
||||
DeniedExtensions: []string{},
|
||||
Path: "/",
|
||||
AllowedPatterns: []string{"*.zIp"},
|
||||
DeniedPatterns: []string{},
|
||||
},
|
||||
}
|
||||
_, _, err = httpd.UpdateUser(user, http.StatusOK, "")
|
||||
|
@ -2590,6 +2593,75 @@ func TestExtensionsFilters(t *testing.T) {
|
|||
assert.Error(t, err)
|
||||
err = client.Remove(testFileName)
|
||||
assert.Error(t, err)
|
||||
err = sftpDownloadFile(testFileName+".zip", localDownloadPath, testFileSize, client)
|
||||
assert.NoError(t, err)
|
||||
err = client.Mkdir("dir.zip")
|
||||
assert.NoError(t, err)
|
||||
err = client.Rename("dir.zip", "dir1.zip")
|
||||
assert.NoError(t, err)
|
||||
}
|
||||
_, err = httpd.RemoveUser(user, http.StatusOK)
|
||||
assert.NoError(t, err)
|
||||
err = os.Remove(testFilePath)
|
||||
assert.NoError(t, err)
|
||||
err = os.Remove(localDownloadPath)
|
||||
assert.NoError(t, err)
|
||||
err = os.RemoveAll(user.GetHomeDir())
|
||||
assert.NoError(t, err)
|
||||
}
|
||||
|
||||
//nolint:dupl
|
||||
func TestExtensionsFilters(t *testing.T) {
|
||||
usePubKey := true
|
||||
u := getTestUser(usePubKey)
|
||||
user, _, err := httpd.AddUser(u, http.StatusOK)
|
||||
assert.NoError(t, err)
|
||||
testFileSize := int64(131072)
|
||||
testFilePath := filepath.Join(homeBasePath, testFileName)
|
||||
localDownloadPath := filepath.Join(homeBasePath, testDLFileName)
|
||||
client, err := getSftpClient(user, usePubKey)
|
||||
if assert.NoError(t, err) {
|
||||
defer client.Close()
|
||||
err = createTestFile(testFilePath, testFileSize)
|
||||
assert.NoError(t, err)
|
||||
err = sftpUploadFile(testFilePath, testFileName, testFileSize, client)
|
||||
assert.NoError(t, err)
|
||||
err = sftpUploadFile(testFilePath, testFileName+".zip", testFileSize, client)
|
||||
assert.NoError(t, err)
|
||||
err = sftpUploadFile(testFilePath, testFileName+".jpg", testFileSize, client)
|
||||
assert.NoError(t, err)
|
||||
}
|
||||
user.Filters.FileExtensions = []dataprovider.ExtensionsFilter{
|
||||
{
|
||||
Path: "/",
|
||||
AllowedExtensions: []string{".zIp", ".jPg"},
|
||||
DeniedExtensions: []string{},
|
||||
},
|
||||
}
|
||||
user.Filters.FilePatterns = []dataprovider.PatternsFilter{
|
||||
{
|
||||
Path: "/",
|
||||
AllowedPatterns: []string{"*.jPg", "*.zIp"},
|
||||
DeniedPatterns: []string{},
|
||||
},
|
||||
}
|
||||
_, _, err = httpd.UpdateUser(user, http.StatusOK, "")
|
||||
assert.NoError(t, err)
|
||||
client, err = getSftpClient(user, usePubKey)
|
||||
if assert.NoError(t, err) {
|
||||
defer client.Close()
|
||||
err = sftpUploadFile(testFilePath, testFileName, testFileSize, client)
|
||||
assert.Error(t, err)
|
||||
err = sftpDownloadFile(testFileName, localDownloadPath, testFileSize, client)
|
||||
assert.Error(t, err)
|
||||
err = client.Rename(testFileName, testFileName+"1")
|
||||
assert.Error(t, err)
|
||||
err = client.Remove(testFileName)
|
||||
assert.Error(t, err)
|
||||
err = sftpDownloadFile(testFileName+".zip", localDownloadPath, testFileSize, client)
|
||||
assert.NoError(t, err)
|
||||
err = sftpDownloadFile(testFileName+".jpg", localDownloadPath, testFileSize, client)
|
||||
assert.NoError(t, err)
|
||||
err = client.Mkdir("dir.zip")
|
||||
assert.NoError(t, err)
|
||||
err = client.Rename("dir.zip", "dir1.zip")
|
||||
|
@ -5731,6 +5803,44 @@ func TestUserPerms(t *testing.T) {
|
|||
assert.True(t, user.HasPerm(dataprovider.PermDownload, "/p/1/test/file.dat"))
|
||||
}
|
||||
|
||||
//nolint:dupl
|
||||
func TestFilterFilePatterns(t *testing.T) {
|
||||
user := getTestUser(true)
|
||||
pattern := dataprovider.PatternsFilter{
|
||||
Path: "/test",
|
||||
AllowedPatterns: []string{"*.jpg", "*.png"},
|
||||
DeniedPatterns: []string{"*.pdf"},
|
||||
}
|
||||
filters := dataprovider.UserFilters{
|
||||
FilePatterns: []dataprovider.PatternsFilter{pattern},
|
||||
}
|
||||
user.Filters = filters
|
||||
assert.True(t, user.IsFileAllowed("/test/test.jPg"))
|
||||
assert.False(t, user.IsFileAllowed("/test/test.pdf"))
|
||||
assert.True(t, user.IsFileAllowed("/test.pDf"))
|
||||
|
||||
filters.FilePatterns = append(filters.FilePatterns, dataprovider.PatternsFilter{
|
||||
Path: "/",
|
||||
AllowedPatterns: []string{"*.zip", "*.rar", "*.pdf"},
|
||||
DeniedPatterns: []string{"*.gz"},
|
||||
})
|
||||
user.Filters = filters
|
||||
assert.False(t, user.IsFileAllowed("/test1/test.gz"))
|
||||
assert.True(t, user.IsFileAllowed("/test1/test.zip"))
|
||||
assert.False(t, user.IsFileAllowed("/test/sub/test.pdf"))
|
||||
assert.False(t, user.IsFileAllowed("/test1/test.png"))
|
||||
|
||||
filters.FilePatterns = append(filters.FilePatterns, dataprovider.PatternsFilter{
|
||||
Path: "/test/sub",
|
||||
DeniedPatterns: []string{"*.tar"},
|
||||
})
|
||||
user.Filters = filters
|
||||
assert.False(t, user.IsFileAllowed("/test/sub/sub/test.tar"))
|
||||
assert.True(t, user.IsFileAllowed("/test/sub/test.gz"))
|
||||
assert.False(t, user.IsFileAllowed("/test/test.zip"))
|
||||
}
|
||||
|
||||
//nolint:dupl
|
||||
func TestFilterFileExtensions(t *testing.T) {
|
||||
user := getTestUser(true)
|
||||
extension := dataprovider.ExtensionsFilter{
|
||||
|
|
|
@ -119,7 +119,7 @@
|
|||
{{- end}}
|
||||
{{- end}}</textarea>
|
||||
<small id="subDirsHelpBlock" class="form-text text-muted">
|
||||
One virtual directory path per line as dir::perms, for example /somedir::list,download
|
||||
One exposed virtual directory path per line as dir::perms, for example /somedir::list,download
|
||||
</small>
|
||||
</div>
|
||||
</div>
|
||||
|
@ -241,6 +241,36 @@
|
|||
</div>
|
||||
</div>
|
||||
|
||||
<div class="form-group row">
|
||||
<label for="idFilePatternsDenied" class="col-sm-2 col-form-label">Denied file patterns</label>
|
||||
<div class="col-sm-10">
|
||||
<textarea class="form-control" id="idFilePatternsDenied" name="denied_patterns" rows="3"
|
||||
aria-describedby="deniedPatternsHelpBlock">{{range $index, $filter := .User.Filters.FilePatterns -}}
|
||||
{{if $filter.DeniedPatterns -}}
|
||||
{{$filter.Path}}::{{range $idx, $p := $filter.DeniedPatterns}}{{if $idx}},{{end}}{{$p}}{{end}}
|
||||
{{- end}}
|
||||
{{- end}}</textarea>
|
||||
<small id="deniedPatternsHelpBlock" class="form-text text-muted">
|
||||
One exposed virtual directory per line as dir::pattern1,pattern2, for example /subdir::*.zip,*.rar
|
||||
</small>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="form-group row">
|
||||
<label for="idFilePatternsAllowed" class="col-sm-2 col-form-label">Allowed file patterns</label>
|
||||
<div class="col-sm-10">
|
||||
<textarea class="form-control" id="idFilePatternsAllowed" name="allowed_patterns" rows="3"
|
||||
aria-describedby="allowedPatternsHelpBlock">{{range $index, $filter := .User.Filters.FilePatterns -}}
|
||||
{{if $filter.AllowedPatterns -}}
|
||||
{{$filter.Path}}::{{range $idx, $p := $filter.AllowedPatterns}}{{if $idx}},{{end}}{{$p}}{{end}}
|
||||
{{- end}}
|
||||
{{- end}}</textarea>
|
||||
<small id="allowedPatternsHelpBlock" class="form-text text-muted">
|
||||
One exposed virtual directory per line as dir::pattern1,pattern2, for example /somedir::*.jpg,*.png
|
||||
</small>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="form-group row">
|
||||
<label for="idFilesExtensionsDenied" class="col-sm-2 col-form-label">Denied file extensions</label>
|
||||
<div class="col-sm-10">
|
||||
|
@ -251,7 +281,7 @@
|
|||
{{- end}}
|
||||
{{- end}}</textarea>
|
||||
<small id="deniedExtensionsHelpBlock" class="form-text text-muted">
|
||||
One directory per line as dir::extensions1,extensions2, for example /subdir::.zip,.rar
|
||||
One exposed virtual directory per line as dir::extension1,extension2, for example /subdir::.zip,.rar. Deprecated, use file patterns
|
||||
</small>
|
||||
</div>
|
||||
</div>
|
||||
|
@ -266,7 +296,7 @@
|
|||
{{- end}}
|
||||
{{- end}}</textarea>
|
||||
<small id="allowedExtensionsHelpBlock" class="form-text text-muted">
|
||||
One directory per line as dir::extensions1,extensions2, for example /somedir::.jpg,.png
|
||||
One exposed virtual directory per line as dir::extension1,extension2, for example /somedir::.jpg,.png. Deprecated, use file patterns
|
||||
</small>
|
||||
</div>
|
||||
</div>
|
||||
|
|
|
@ -536,11 +536,19 @@ func TestDownloadErrors(t *testing.T) {
|
|||
DeniedExtensions: []string{".zipp"},
|
||||
},
|
||||
}
|
||||
u.Filters.FilePatterns = []dataprovider.PatternsFilter{
|
||||
{
|
||||
Path: "/sub2",
|
||||
AllowedPatterns: []string{},
|
||||
DeniedPatterns: []string{"*.jpg"},
|
||||
},
|
||||
}
|
||||
user, _, err := httpd.AddUser(u, http.StatusOK)
|
||||
assert.NoError(t, err)
|
||||
client := getWebDavClient(user)
|
||||
testFilePath1 := filepath.Join(user.HomeDir, subDir1, "file.zipp")
|
||||
testFilePath2 := filepath.Join(user.HomeDir, subDir2, "file.zipp")
|
||||
testFilePath3 := filepath.Join(user.HomeDir, subDir2, "file.jpg")
|
||||
err = os.MkdirAll(filepath.Dir(testFilePath1), os.ModePerm)
|
||||
assert.NoError(t, err)
|
||||
err = os.MkdirAll(filepath.Dir(testFilePath2), os.ModePerm)
|
||||
|
@ -549,11 +557,15 @@ func TestDownloadErrors(t *testing.T) {
|
|||
assert.NoError(t, err)
|
||||
err = ioutil.WriteFile(testFilePath2, []byte("file2"), os.ModePerm)
|
||||
assert.NoError(t, err)
|
||||
err = ioutil.WriteFile(testFilePath3, []byte("file3"), os.ModePerm)
|
||||
assert.NoError(t, err)
|
||||
localDownloadPath := filepath.Join(homeBasePath, testDLFileName)
|
||||
err = downloadFile(path.Join("/", subDir1, "file.zipp"), localDownloadPath, 5, client)
|
||||
assert.Error(t, err)
|
||||
err = downloadFile(path.Join("/", subDir2, "file.zipp"), localDownloadPath, 5, client)
|
||||
assert.Error(t, err)
|
||||
err = downloadFile(path.Join("/", subDir2, "file.jpg"), localDownloadPath, 5, client)
|
||||
assert.Error(t, err)
|
||||
err = downloadFile(path.Join("missing.zip"), localDownloadPath, 5, client)
|
||||
assert.Error(t, err)
|
||||
|
||||
|
|
Loading…
Reference in a new issue