gcs: add support for automatic credentials

We can now also support implicit credentials using the Application
Default Credentials strategy
This commit is contained in:
Nicola Murino 2020-02-19 09:41:15 +01:00
parent c8cc81cf4a
commit ae8ed75ae5
15 changed files with 215 additions and 115 deletions

View file

@ -587,7 +587,7 @@ Other notes:
Each user can be mapped with a Google Cloud Storage bucket or a bucket virtual folder, this way the mapped bucket/virtual folder is exposed over SFTP/SCP. This backend is very similar to the S3 backend and it has the same limitations. Each user can be mapped with a Google Cloud Storage bucket or a bucket virtual folder, this way the mapped bucket/virtual folder is exposed over SFTP/SCP. This backend is very similar to the S3 backend and it has the same limitations.
To connect SFTPGo to Google Cloud Storage you need a credentials file that you can obtain from the Google Cloud Console, take a look at the "Setting up authentication" section [here](https://cloud.google.com/storage/docs/reference/libraries) for details. To connect SFTPGo to Google Cloud Storage you can use use the Application Default Credentials (ADC) strategy to try to find your application's credentials automatically or you can explicitly provide a JSON credentials file that you can obtain from the Google Cloud Console, take a look [here](https://cloud.google.com/docs/authentication/production#providing_credentials_to_your_application) for details.
You can optionally specify a [storage class](https://cloud.google.com/storage/docs/storage-classes) too, leave blank to use the default storage class. You can optionally specify a [storage class](https://cloud.google.com/storage/docs/storage-classes) too, leave blank to use the default storage class.
@ -618,29 +618,30 @@ Usage:
sftpgo portable [flags] sftpgo portable [flags]
Flags: Flags:
-C, --advertise-credentials If the SFTP service is advertised via multicast DNS this flag allows to put username/password inside the advertised TXT record -C, --advertise-credentials If the SFTP service is advertised via multicast DNS this flag allows to put username/password inside the advertised TXT record
-S, --advertise-service Advertise SFTP service using multicast DNS (default true) -S, --advertise-service Advertise SFTP service using multicast DNS (default true)
-d, --directory string Path to the directory to serve. This can be an absolute path or a path relative to the current directory (default ".") -d, --directory string Path to the directory to serve. This can be an absolute path or a path relative to the current directory (default ".")
-f, --fs-provider int 0 means local filesystem, 1 Amazon S3 compatible, 2 Google Cloud Storage -f, --fs-provider int 0 means local filesystem, 1 Amazon S3 compatible, 2 Google Cloud Storage
--gcs-automatic-credentials int 0 means explicit credentials using a JSON credentials file, 1 automatic (default 1)
--gcs-bucket string --gcs-bucket string
--gcs-credentials-file string Google Cloud Storage JSON credentials file --gcs-credentials-file string Google Cloud Storage JSON credentials file
--gcs-key-prefix string Allows to restrict access to the virtual folder identified by this prefix and its contents --gcs-key-prefix string Allows to restrict access to the virtual folder identified by this prefix and its contents
--gcs-storage-class string --gcs-storage-class string
-h, --help help for portable -h, --help help for portable
-l, --log-file-path string Leave empty to disable logging -l, --log-file-path string Leave empty to disable logging
-p, --password string Leave empty to use an auto generated value -p, --password string Leave empty to use an auto generated value
-g, --permissions strings User's permissions. "*" means any permission (default [list,download]) -g, --permissions strings User's permissions. "*" means any permission (default [list,download])
-k, --public-key strings -k, --public-key strings
--s3-access-key string --s3-access-key string
--s3-access-secret string --s3-access-secret string
--s3-bucket string --s3-bucket string
--s3-endpoint string --s3-endpoint string
--s3-key-prefix string Allows to restrict access to the virtual folder identified by this prefix and its contents --s3-key-prefix string Allows to restrict access to the virtual folder identified by this prefix and its contents
--s3-region string --s3-region string
--s3-storage-class string --s3-storage-class string
-s, --sftpd-port int 0 means a random non privileged port -s, --sftpd-port int 0 means a random non privileged port
-c, --ssh-commands strings SSH commands to enable. "*" means any supported SSH command including scp (default [md5sum,sha1sum,cd,pwd]) -c, --ssh-commands strings SSH commands to enable. "*" means any supported SSH command including scp (default [md5sum,sha1sum,cd,pwd])
-u, --username string Leave empty to use an auto generated value -u, --username string Leave empty to use an auto generated value
``` ```
In portable mode SFTPGo can advertise the SFTP service and, optionally, the credentials via multicast DNS, so there is a standard way to discover the service and to automatically connect to it. In portable mode SFTPGo can advertise the SFTP service and, optionally, the credentials via multicast DNS, so there is a standard way to discover the service and to automatically connect to it.
@ -696,6 +697,7 @@ For each account the following properties can be configured:
- `s3_key_prefix`, allows to restrict access to the virtual folder identified by this prefix and its contents - `s3_key_prefix`, allows to restrict access to the virtual folder identified by this prefix and its contents
- `gcs_bucket`, required for GCS filesystem - `gcs_bucket`, required for GCS filesystem
- `gcs_credentials`, Google Cloud Storage JSON credentials base64 encoded - `gcs_credentials`, Google Cloud Storage JSON credentials base64 encoded
- `gcs_automatic_credentials`, integer. Set to 1 to use Application Default Credentials strategy or set to 0 to use explicit credentials via `gcs_credentials`
- `gcs_storage_class` - `gcs_storage_class`
- `gcs_key_prefix`, allows to restrict access to the virtual folder identified by this prefix and its contents - `gcs_key_prefix`, allows to restrict access to the virtual folder identified by this prefix and its contents

View file

@ -35,6 +35,7 @@ var (
portableS3KeyPrefix string portableS3KeyPrefix string
portableGCSBucket string portableGCSBucket string
portableGCSCredentialsFile string portableGCSCredentialsFile string
portableGCSAutoCredentials int
portableGCSStorageClass string portableGCSStorageClass string
portableGCSKeyPrefix string portableGCSKeyPrefix string
portableCmd = &cobra.Command{ portableCmd = &cobra.Command{
@ -48,12 +49,16 @@ Please take a look at the usage below to customize the serving parameters`,
Run: func(cmd *cobra.Command, args []string) { Run: func(cmd *cobra.Command, args []string) {
portableDir := directoryToServe portableDir := directoryToServe
if !filepath.IsAbs(portableDir) { if !filepath.IsAbs(portableDir) {
portableDir, _ = filepath.Abs(portableDir) if portableFsProvider == 0 {
portableDir, _ = filepath.Abs(portableDir)
} else {
portableDir = os.TempDir()
}
} }
permissions := make(map[string][]string) permissions := make(map[string][]string)
permissions["/"] = portablePermissions permissions["/"] = portablePermissions
portableGCSCredentials := "" portableGCSCredentials := ""
if portableFsProvider == 2 { if portableFsProvider == 2 && len(portableGCSCredentialsFile) > 0 {
fi, err := os.Stat(portableGCSCredentialsFile) fi, err := os.Stat(portableGCSCredentialsFile)
if err != nil { if err != nil {
fmt.Printf("Invalid GCS credentials file: %v\n", err) fmt.Printf("Invalid GCS credentials file: %v\n", err)
@ -69,6 +74,7 @@ Please take a look at the usage below to customize the serving parameters`,
fmt.Printf("Unable to read credentials file: %v\n", err) fmt.Printf("Unable to read credentials file: %v\n", err)
} }
portableGCSCredentials = base64.StdEncoding.EncodeToString(creds) portableGCSCredentials = base64.StdEncoding.EncodeToString(creds)
portableGCSAutoCredentials = 0
} }
service := service.Service{ service := service.Service{
ConfigDir: defaultConfigDir, ConfigDir: defaultConfigDir,
@ -100,10 +106,11 @@ Please take a look at the usage below to customize the serving parameters`,
KeyPrefix: portableS3KeyPrefix, KeyPrefix: portableS3KeyPrefix,
}, },
GCSConfig: vfs.GCSFsConfig{ GCSConfig: vfs.GCSFsConfig{
Bucket: portableGCSBucket, Bucket: portableGCSBucket,
Credentials: portableGCSCredentials, Credentials: portableGCSCredentials,
StorageClass: portableGCSStorageClass, AutomaticCredentials: portableGCSAutoCredentials,
KeyPrefix: portableGCSKeyPrefix, StorageClass: portableGCSStorageClass,
KeyPrefix: portableGCSKeyPrefix,
}, },
}, },
}, },
@ -147,5 +154,7 @@ func init() {
portableCmd.Flags().StringVar(&portableGCSKeyPrefix, "gcs-key-prefix", "", "Allows to restrict access to the virtual folder "+ portableCmd.Flags().StringVar(&portableGCSKeyPrefix, "gcs-key-prefix", "", "Allows to restrict access to the virtual folder "+
"identified by this prefix and its contents") "identified by this prefix and its contents")
portableCmd.Flags().StringVar(&portableGCSCredentialsFile, "gcs-credentials-file", "", "Google Cloud Storage JSON credentials file") portableCmd.Flags().StringVar(&portableGCSCredentialsFile, "gcs-credentials-file", "", "Google Cloud Storage JSON credentials file")
portableCmd.Flags().IntVar(&portableGCSAutoCredentials, "gcs-automatic-credentials", 1, "0 means explicit credentials using a JSON "+
"credentials file, 1 automatic")
rootCmd.AddCommand(portableCmd) rootCmd.AddCommand(portableCmd)
} }

View file

@ -813,6 +813,9 @@ func addCredentialsToUser(user *User) error {
if user.FsConfig.Provider != 2 { if user.FsConfig.Provider != 2 {
return nil return nil
} }
if user.FsConfig.GCSConfig.AutomaticCredentials > 0 {
return nil
}
cred, err := ioutil.ReadFile(user.getGCSCredentialsFilePath()) cred, err := ioutil.ReadFile(user.getGCSCredentialsFilePath())
if err != nil { if err != nil {
return err return err

View file

@ -419,10 +419,11 @@ func (u *User) getACopy() User {
KeyPrefix: u.FsConfig.S3Config.KeyPrefix, KeyPrefix: u.FsConfig.S3Config.KeyPrefix,
}, },
GCSConfig: vfs.GCSFsConfig{ GCSConfig: vfs.GCSFsConfig{
Bucket: u.FsConfig.GCSConfig.Bucket, Bucket: u.FsConfig.GCSConfig.Bucket,
CredentialFile: u.FsConfig.GCSConfig.CredentialFile, CredentialFile: u.FsConfig.GCSConfig.CredentialFile,
StorageClass: u.FsConfig.GCSConfig.StorageClass, AutomaticCredentials: u.FsConfig.GCSConfig.AutomaticCredentials,
KeyPrefix: u.FsConfig.GCSConfig.KeyPrefix, StorageClass: u.FsConfig.GCSConfig.StorageClass,
KeyPrefix: u.FsConfig.GCSConfig.KeyPrefix,
}, },
} }

View file

@ -462,6 +462,9 @@ func compareUserFsConfig(expected *dataprovider.User, actual *dataprovider.User)
expected.FsConfig.GCSConfig.KeyPrefix+"/" != actual.FsConfig.GCSConfig.KeyPrefix { expected.FsConfig.GCSConfig.KeyPrefix+"/" != actual.FsConfig.GCSConfig.KeyPrefix {
return errors.New("GCS key prefix mismatch") return errors.New("GCS key prefix mismatch")
} }
if expected.FsConfig.GCSConfig.AutomaticCredentials != actual.FsConfig.GCSConfig.AutomaticCredentials {
return errors.New("GCS automatic credentials mismatch")
}
return nil return nil
} }

View file

@ -352,6 +352,7 @@ func TestAddUserInvalidFsConfig(t *testing.T) {
} }
u.FsConfig.GCSConfig.KeyPrefix = "somedir/subdir/" u.FsConfig.GCSConfig.KeyPrefix = "somedir/subdir/"
u.FsConfig.GCSConfig.Credentials = "" u.FsConfig.GCSConfig.Credentials = ""
u.FsConfig.GCSConfig.AutomaticCredentials = 0
_, _, err = httpd.AddUser(u, http.StatusBadRequest) _, _, err = httpd.AddUser(u, http.StatusBadRequest)
if err != nil { if err != nil {
t.Errorf("unexpected error adding user with invalid fs config: %v", err) t.Errorf("unexpected error adding user with invalid fs config: %v", err)
@ -519,6 +520,14 @@ func TestUserGCSConfig(t *testing.T) {
if err != nil { if err != nil {
t.Errorf("unable to add user: %v", err) t.Errorf("unable to add user: %v", err)
} }
os.RemoveAll(credentialsPath)
os.MkdirAll(credentialsPath, 0700)
user.FsConfig.GCSConfig.Credentials = ""
user.FsConfig.GCSConfig.AutomaticCredentials = 1
user, _, err = httpd.UpdateUser(user, http.StatusOK)
if err != nil {
t.Errorf("unable to update user: %v", err)
}
user.FsConfig.Provider = 1 user.FsConfig.Provider = 1
user.FsConfig.S3Config.Bucket = "test1" user.FsConfig.S3Config.Bucket = "test1"
user.FsConfig.S3Config.Region = "us-east-1" user.FsConfig.S3Config.Region = "us-east-1"
@ -1937,6 +1946,26 @@ func TestWebUserGCSMock(t *testing.T) {
if updateUser.FsConfig.GCSConfig.KeyPrefix != user.FsConfig.GCSConfig.KeyPrefix { if updateUser.FsConfig.GCSConfig.KeyPrefix != user.FsConfig.GCSConfig.KeyPrefix {
t.Error("GCS key prefix mismatch") t.Error("GCS key prefix mismatch")
} }
form.Set("gcs_auto_credentials", "on")
b, contentType, _ = getMultipartFormData(form, "", "")
req, _ = http.NewRequest(http.MethodPost, webUserPath+"/"+strconv.FormatInt(user.ID, 10), &b)
req.Header.Set("Content-Type", contentType)
rr = executeRequest(req)
checkResponseCode(t, http.StatusSeeOther, rr.Code)
req, _ = http.NewRequest(http.MethodGet, userPath+"?limit=1&offset=0&order=ASC&username="+user.Username, nil)
rr = executeRequest(req)
checkResponseCode(t, http.StatusOK, rr.Code)
err = render.DecodeJSON(rr.Body, &users)
if err != nil {
t.Errorf("Error decoding users: %v", err)
}
if len(users) != 1 {
t.Errorf("1 user is expected")
}
updateUser = users[0]
if updateUser.FsConfig.GCSConfig.AutomaticCredentials != 1 {
t.Error("GCS automatic credentials mismatch")
}
req, _ = http.NewRequest(http.MethodDelete, userPath+"/"+strconv.FormatInt(user.ID, 10), nil) req, _ = http.NewRequest(http.MethodDelete, userPath+"/"+strconv.FormatInt(user.ID, 10), nil)
rr = executeRequest(req) rr = executeRequest(req)
checkResponseCode(t, http.StatusOK, rr.Code) checkResponseCode(t, http.StatusOK, rr.Code)

View file

@ -286,8 +286,13 @@ func TestCompareUserFsConfig(t *testing.T) {
t.Errorf("S3 key prefix does not match") t.Errorf("S3 key prefix does not match")
} }
expected.FsConfig.S3Config.KeyPrefix = "" expected.FsConfig.S3Config.KeyPrefix = ""
}
func TestCompareUserGCSConfig(t *testing.T) {
expected := &dataprovider.User{}
actual := &dataprovider.User{}
expected.FsConfig.GCSConfig.KeyPrefix = "somedir/subdir" expected.FsConfig.GCSConfig.KeyPrefix = "somedir/subdir"
err = compareUserFsConfig(expected, actual) err := compareUserFsConfig(expected, actual)
if err == nil { if err == nil {
t.Errorf("GCS key prefix does not match") t.Errorf("GCS key prefix does not match")
} }
@ -304,6 +309,12 @@ func TestCompareUserFsConfig(t *testing.T) {
t.Errorf("GCS storage class does not match") t.Errorf("GCS storage class does not match")
} }
expected.FsConfig.GCSConfig.StorageClass = "" expected.FsConfig.GCSConfig.StorageClass = ""
expected.FsConfig.GCSConfig.AutomaticCredentials = 1
err = compareUserFsConfig(expected, actual)
if err == nil {
t.Errorf("GCS automatic credentials does not match")
}
expected.FsConfig.GCSConfig.AutomaticCredentials = 0
} }
func TestGCSWebInvalidFormFile(t *testing.T) { func TestGCSWebInvalidFormFile(t *testing.T) {

View file

@ -2,7 +2,7 @@ openapi: 3.0.1
info: info:
title: SFTPGo title: SFTPGo
description: 'SFTPGo REST API' description: 'SFTPGo REST API'
version: 1.7.0 version: 1.8.0
servers: servers:
- url: /api/v1 - url: /api/v1
@ -987,6 +987,16 @@ components:
type: string type: string
format: byte format: byte
description: Google Cloud Storage JSON credentials base64 encoded. This field must be populated only when adding/updating an user. It will be always omitted, since there are sensitive data, when you search/get users. The credentials will be stored in the configured "credentials_path" description: Google Cloud Storage JSON credentials base64 encoded. This field must be populated only when adding/updating an user. It will be always omitted, since there are sensitive data, when you search/get users. The credentials will be stored in the configured "credentials_path"
automatic_credentials:
type: integer
nullable: true
enum:
- 0
- 1
description: >
Automatic credentials:
* `0` - disabled, explicit credentials, using a JSON credentials file, must be provided. This is the default value if the field is null
* `1` - enabled, we try to use the Application Default Credentials (ADC) strategy to find your application's credentials
storage_class: storage_class:
type: string type: string
key_prefix: key_prefix:

View file

@ -246,6 +246,12 @@ func getFsConfigFromUserPostFields(r *http.Request) (dataprovider.Filesystem, er
fs.GCSConfig.Bucket = r.Form.Get("gcs_bucket") fs.GCSConfig.Bucket = r.Form.Get("gcs_bucket")
fs.GCSConfig.StorageClass = r.Form.Get("gcs_storage_class") fs.GCSConfig.StorageClass = r.Form.Get("gcs_storage_class")
fs.GCSConfig.KeyPrefix = r.Form.Get("gcs_key_prefix") fs.GCSConfig.KeyPrefix = r.Form.Get("gcs_key_prefix")
autoCredentials := r.Form.Get("gcs_auto_credentials")
if len(autoCredentials) > 0 {
fs.GCSConfig.AutomaticCredentials = 1
} else {
fs.GCSConfig.AutomaticCredentials = 0
}
credentials, _, err := r.FormFile("gcs_credential_file") credentials, _, err := r.FormFile("gcs_credential_file")
if err == http.ErrMissingFile { if err == http.ErrMissingFile {
return fs, nil return fs, nil
@ -262,6 +268,7 @@ func getFsConfigFromUserPostFields(r *http.Request) (dataprovider.Filesystem, er
return fs, err return fs, err
} }
fs.GCSConfig.Credentials = base64.StdEncoding.EncodeToString(fileBytes) fs.GCSConfig.Credentials = base64.StdEncoding.EncodeToString(fileBytes)
fs.GCSConfig.AutomaticCredentials = 0
} }
return fs, nil return fs, nil
} }

View file

@ -75,7 +75,8 @@ class SFTPGoApiRequests:
max_sessions=0, quota_size=0, quota_files=0, permissions={}, upload_bandwidth=0, download_bandwidth=0, max_sessions=0, quota_size=0, quota_files=0, permissions={}, upload_bandwidth=0, download_bandwidth=0,
status=1, expiration_date=0, allowed_ip=[], denied_ip=[], fs_provider='local', s3_bucket='', status=1, expiration_date=0, allowed_ip=[], denied_ip=[], fs_provider='local', s3_bucket='',
s3_region='', s3_access_key='', s3_access_secret='', s3_endpoint='', s3_storage_class='', s3_region='', s3_access_key='', s3_access_secret='', s3_endpoint='', s3_storage_class='',
s3_key_prefix='', gcs_bucket='', gcs_key_prefix='', gcs_storage_class='', gcs_credentials_file=''): s3_key_prefix='', gcs_bucket='', gcs_key_prefix='', gcs_storage_class='', gcs_credentials_file='',
gcs_automatic_credentials='automatic'):
user = {'id':user_id, 'username':username, 'uid':uid, 'gid':gid, user = {'id':user_id, 'username':username, 'uid':uid, 'gid':gid,
'max_sessions':max_sessions, 'quota_size':quota_size, 'quota_files':quota_files, 'max_sessions':max_sessions, 'quota_size':quota_size, 'quota_files':quota_files,
'upload_bandwidth':upload_bandwidth, 'download_bandwidth':download_bandwidth, 'upload_bandwidth':upload_bandwidth, 'download_bandwidth':download_bandwidth,
@ -95,7 +96,8 @@ class SFTPGoApiRequests:
user.update({'filters':self.buildFilters(allowed_ip, denied_ip)}) user.update({'filters':self.buildFilters(allowed_ip, denied_ip)})
user.update({'filesystem':self.buildFsConfig(fs_provider, s3_bucket, s3_region, s3_access_key, s3_access_secret, user.update({'filesystem':self.buildFsConfig(fs_provider, s3_bucket, s3_region, s3_access_key, s3_access_secret,
s3_endpoint, s3_storage_class, s3_key_prefix, gcs_bucket, s3_endpoint, s3_storage_class, s3_key_prefix, gcs_bucket,
gcs_key_prefix, gcs_storage_class, gcs_credentials_file)}) gcs_key_prefix, gcs_storage_class, gcs_credentials_file,
gcs_automatic_credentials)})
return user return user
def buildPermissions(self, root_perms, subdirs_perms): def buildPermissions(self, root_perms, subdirs_perms):
@ -130,7 +132,8 @@ class SFTPGoApiRequests:
return filters return filters
def buildFsConfig(self, fs_provider, s3_bucket, s3_region, s3_access_key, s3_access_secret, s3_endpoint, def buildFsConfig(self, fs_provider, s3_bucket, s3_region, s3_access_key, s3_access_secret, s3_endpoint,
s3_storage_class, s3_key_prefix, gcs_bucket, gcs_key_prefix, gcs_storage_class, gcs_credentials_file): s3_storage_class, s3_key_prefix, gcs_bucket, gcs_key_prefix, gcs_storage_class,
gcs_credentials_file, gcs_automatic_credentials):
fs_config = {'provider':0} fs_config = {'provider':0}
if fs_provider == 'S3': if fs_provider == 'S3':
s3config = {'bucket':s3_bucket, 'region':s3_region, 'access_key':s3_access_key, 'access_secret': s3config = {'bucket':s3_bucket, 'region':s3_region, 'access_key':s3_access_key, 'access_secret':
@ -139,9 +142,14 @@ class SFTPGoApiRequests:
fs_config.update({'provider':1, 's3config':s3config}) fs_config.update({'provider':1, 's3config':s3config})
elif fs_provider == 'GCS': elif fs_provider == 'GCS':
gcsconfig = {'bucket':gcs_bucket, 'key_prefix':gcs_key_prefix, 'storage_class':gcs_storage_class} gcsconfig = {'bucket':gcs_bucket, 'key_prefix':gcs_key_prefix, 'storage_class':gcs_storage_class}
if gcs_automatic_credentials == "automatic":
gcsconfig.update({'automatic_credentials':1})
else:
gcsconfig.update({'automatic_credentials':0})
if gcs_credentials_file: if gcs_credentials_file:
with open(gcs_credentials_file) as creds: with open(gcs_credentials_file) as creds:
gcsconfig.update({'credentials':base64.b64encode(creds.read().encode('UTF-8')).decode('UTF-8')}) gcsconfig.update({'credentials':base64.b64encode(creds.read().encode('UTF-8')).decode('UTF-8'),
'automatic_credentials':0})
fs_config.update({'provider':2, 'gcsconfig':gcsconfig}) fs_config.update({'provider':2, 'gcsconfig':gcsconfig})
return fs_config return fs_config
@ -158,12 +166,12 @@ class SFTPGoApiRequests:
quota_files=0, perms=[], upload_bandwidth=0, download_bandwidth=0, status=1, expiration_date=0, quota_files=0, perms=[], upload_bandwidth=0, download_bandwidth=0, status=1, expiration_date=0,
subdirs_permissions=[], allowed_ip=[], denied_ip=[], fs_provider='local', s3_bucket='', s3_region='', subdirs_permissions=[], allowed_ip=[], denied_ip=[], fs_provider='local', s3_bucket='', s3_region='',
s3_access_key='', s3_access_secret='', s3_endpoint='', s3_storage_class='', s3_key_prefix='', gcs_bucket='', s3_access_key='', s3_access_secret='', s3_endpoint='', s3_storage_class='', s3_key_prefix='', gcs_bucket='',
gcs_key_prefix='', gcs_storage_class='', gcs_credentials_file=''): gcs_key_prefix='', gcs_storage_class='', gcs_credentials_file='', gcs_automatic_credentials='automatic'):
u = self.buildUserObject(0, username, password, public_keys, home_dir, uid, gid, max_sessions, u = self.buildUserObject(0, username, password, public_keys, home_dir, uid, gid, max_sessions,
quota_size, quota_files, self.buildPermissions(perms, subdirs_permissions), upload_bandwidth, download_bandwidth, quota_size, quota_files, self.buildPermissions(perms, subdirs_permissions), upload_bandwidth, download_bandwidth,
status, expiration_date, allowed_ip, denied_ip, fs_provider, s3_bucket, s3_region, s3_access_key, status, expiration_date, allowed_ip, denied_ip, fs_provider, s3_bucket, s3_region, s3_access_key,
s3_access_secret, s3_endpoint, s3_storage_class, s3_key_prefix, gcs_bucket, gcs_key_prefix, gcs_storage_class, s3_access_secret, s3_endpoint, s3_storage_class, s3_key_prefix, gcs_bucket, gcs_key_prefix, gcs_storage_class,
gcs_credentials_file) gcs_credentials_file, gcs_automatic_credentials)
r = requests.post(self.userPath, json=u, auth=self.auth, verify=self.verify) r = requests.post(self.userPath, json=u, auth=self.auth, verify=self.verify)
self.printResponse(r) self.printResponse(r)
@ -171,12 +179,13 @@ class SFTPGoApiRequests:
quota_size=0, quota_files=0, perms=[], upload_bandwidth=0, download_bandwidth=0, status=1, quota_size=0, quota_files=0, perms=[], upload_bandwidth=0, download_bandwidth=0, status=1,
expiration_date=0, subdirs_permissions=[], allowed_ip=[], denied_ip=[], fs_provider='local', expiration_date=0, subdirs_permissions=[], allowed_ip=[], denied_ip=[], fs_provider='local',
s3_bucket='', s3_region='', s3_access_key='', s3_access_secret='', s3_endpoint='', s3_storage_class='', s3_bucket='', s3_region='', s3_access_key='', s3_access_secret='', s3_endpoint='', s3_storage_class='',
s3_key_prefix='', gcs_bucket='', gcs_key_prefix='', gcs_storage_class='', gcs_credentials_file=''): s3_key_prefix='', gcs_bucket='', gcs_key_prefix='', gcs_storage_class='', gcs_credentials_file='',
gcs_automatic_credentials='automatic'):
u = self.buildUserObject(user_id, username, password, public_keys, home_dir, uid, gid, max_sessions, u = self.buildUserObject(user_id, username, password, public_keys, home_dir, uid, gid, max_sessions,
quota_size, quota_files, self.buildPermissions(perms, subdirs_permissions), upload_bandwidth, download_bandwidth, quota_size, quota_files, self.buildPermissions(perms, subdirs_permissions), upload_bandwidth, download_bandwidth,
status, expiration_date, allowed_ip, denied_ip, fs_provider, s3_bucket, s3_region, s3_access_key, status, expiration_date, allowed_ip, denied_ip, fs_provider, s3_bucket, s3_region, s3_access_key,
s3_access_secret, s3_endpoint, s3_storage_class, s3_key_prefix, gcs_bucket, gcs_key_prefix, gcs_storage_class, s3_access_secret, s3_endpoint, s3_storage_class, s3_key_prefix, gcs_bucket, gcs_key_prefix, gcs_storage_class,
gcs_credentials_file) gcs_credentials_file, gcs_automatic_credentials)
r = requests.put(urlparse.urljoin(self.userPath, 'user/' + str(user_id)), json=u, auth=self.auth, verify=self.verify) r = requests.put(urlparse.urljoin(self.userPath, 'user/' + str(user_id)), json=u, auth=self.auth, verify=self.verify)
self.printResponse(r) self.printResponse(r)
@ -448,6 +457,8 @@ def addCommonUserArguments(parser):
' Default: %(default)s') ' Default: %(default)s')
parser.add_argument('--gcs-storage-class', type=str, default='', help='Default: %(default)s') parser.add_argument('--gcs-storage-class', type=str, default='', help='Default: %(default)s')
parser.add_argument('--gcs-credentials-file', type=str, default='', help='Default: %(default)s') parser.add_argument('--gcs-credentials-file', type=str, default='', help='Default: %(default)s')
parser.add_argument('--gcs-automatic-credentials', type=str, default='automatic', choices=['explicit', 'automatic'],
help='If you provide a credentials file this argument will be setted to "explicit". Default: %(default)s')
if __name__ == '__main__': if __name__ == '__main__':
@ -558,7 +569,7 @@ if __name__ == '__main__':
args.status, getDatetimeAsMillisSinceEpoch(args.expiration_date), args.subdirs_permissions, args.allowed_ip, args.status, getDatetimeAsMillisSinceEpoch(args.expiration_date), args.subdirs_permissions, args.allowed_ip,
args.denied_ip, args.fs, args.s3_bucket, args.s3_region, args.s3_access_key, args.s3_access_secret, args.denied_ip, args.fs, args.s3_bucket, args.s3_region, args.s3_access_key, args.s3_access_secret,
args.s3_endpoint, args.s3_storage_class, args.s3_key_prefix, args.gcs_bucket, args.gcs_key_prefix, args.s3_endpoint, args.s3_storage_class, args.s3_key_prefix, args.gcs_bucket, args.gcs_key_prefix,
args.gcs_storage_class, args.gcs_credentials_file) args.gcs_storage_class, args.gcs_credentials_file, args.gcs_automatic_credentials)
elif args.command == 'update-user': elif args.command == 'update-user':
api.updateUser(args.id, args.username, args.password, args.public_keys, args.home_dir, args.uid, args.gid, api.updateUser(args.id, args.username, args.password, args.public_keys, args.home_dir, args.uid, args.gid,
args.max_sessions, args.quota_size, args.quota_files, args.permissions, args.upload_bandwidth, args.max_sessions, args.quota_size, args.quota_files, args.permissions, args.upload_bandwidth,
@ -566,7 +577,7 @@ if __name__ == '__main__':
args.subdirs_permissions, args.allowed_ip, args.denied_ip, args.fs, args.s3_bucket, args.s3_region, args.subdirs_permissions, args.allowed_ip, args.denied_ip, args.fs, args.s3_bucket, args.s3_region,
args.s3_access_key, args.s3_access_secret, args.s3_endpoint, args.s3_storage_class, args.s3_access_key, args.s3_access_secret, args.s3_endpoint, args.s3_storage_class,
args.s3_key_prefix, args.gcs_bucket, args.gcs_key_prefix, args.gcs_storage_class, args.s3_key_prefix, args.gcs_bucket, args.gcs_key_prefix, args.gcs_storage_class,
args.gcs_credentials_file) args.gcs_credentials_file, args.gcs_automatic_credentials)
elif args.command == 'delete-user': elif args.command == 'delete-user':
api.deleteUser(args.id) api.deleteUser(args.id)
elif args.command == 'get-users': elif args.command == 'get-users':

View file

@ -172,52 +172,60 @@ func (s *Service) StartPortableMode(sftpdPort int, enabledSSHCommands []string,
config.SetSFTPDConfig(sftpdConf) config.SetSFTPDConfig(sftpdConf)
err = s.Start() err = s.Start()
if err == nil { if err != nil {
var mDNSService *zeroconf.Server return err
var err error
if advertiseService {
version := utils.GetAppVersion()
meta := []string{
fmt.Sprintf("version=%v", version.GetVersionAsString()),
}
if advertiseCredentials {
logger.InfoToConsole("Advertising credentials via multicast DNS")
meta = append(meta, fmt.Sprintf("user=%v", s.PortableUser.Username))
if len(s.PortableUser.Password) > 0 {
meta = append(meta, fmt.Sprintf("password=%v", s.PortableUser.Password))
} else {
logger.InfoToConsole("Unable to advertise key based credentials via multicast DNS, we don't have the private key")
}
}
mDNSService, err = zeroconf.Register(
fmt.Sprintf("SFTPGo portable %v", sftpdConf.BindPort), // service instance name
"_sftp-ssh._tcp", // service type and protocol
"local.", // service domain
sftpdConf.BindPort, // service port
meta, // service metadata
nil, // register on all network interfaces
)
if err != nil {
mDNSService = nil
logger.WarnToConsole("Unable to advertise SFTP service via multicast DNS: %v", err)
} else {
logger.InfoToConsole("SFTP service advertised via multicast DNS")
}
}
sig := make(chan os.Signal, 1)
signal.Notify(sig, os.Interrupt, syscall.SIGTERM)
go func() {
<-sig
if mDNSService != nil {
logger.InfoToConsole("unregistering multicast DNS service")
mDNSService.Shutdown()
}
s.Stop()
}()
logger.InfoToConsole("Portable mode ready, SFTP port: %v, user: %#v, password: %#v, public keys: %v, directory: %#v, "+
"permissions: %v, enabled ssh commands: %v", sftpdConf.BindPort, s.PortableUser.Username, s.PortableUser.Password,
s.PortableUser.PublicKeys, s.PortableUser.HomeDir, s.PortableUser.Permissions, sftpdConf.EnabledSSHCommands)
} }
return err var mDNSService *zeroconf.Server
if advertiseService {
version := utils.GetAppVersion()
meta := []string{
fmt.Sprintf("version=%v", version.GetVersionAsString()),
}
if advertiseCredentials {
logger.InfoToConsole("Advertising credentials via multicast DNS")
meta = append(meta, fmt.Sprintf("user=%v", s.PortableUser.Username))
if len(s.PortableUser.Password) > 0 {
meta = append(meta, fmt.Sprintf("password=%v", s.PortableUser.Password))
} else {
logger.InfoToConsole("Unable to advertise key based credentials via multicast DNS, we don't have the private key")
}
}
mDNSService, err = zeroconf.Register(
fmt.Sprintf("SFTPGo portable %v", sftpdConf.BindPort), // service instance name
"_sftp-ssh._tcp", // service type and protocol
"local.", // service domain
sftpdConf.BindPort, // service port
meta, // service metadata
nil, // register on all network interfaces
)
if err != nil {
mDNSService = nil
logger.WarnToConsole("Unable to advertise SFTP service via multicast DNS: %v", err)
} else {
logger.InfoToConsole("SFTP service advertised via multicast DNS")
}
}
sig := make(chan os.Signal, 1)
signal.Notify(sig, os.Interrupt, syscall.SIGTERM)
go func() {
<-sig
if mDNSService != nil {
logger.InfoToConsole("unregistering multicast DNS service")
mDNSService.Shutdown()
}
s.Stop()
}()
var dirToServe string
if s.PortableUser.FsConfig.Provider == 1 {
dirToServe = s.PortableUser.FsConfig.S3Config.KeyPrefix
} else if s.PortableUser.FsConfig.Provider == 2 {
dirToServe = s.PortableUser.FsConfig.GCSConfig.KeyPrefix
} else {
dirToServe = s.PortableUser.HomeDir
}
logger.InfoToConsole("Portable mode ready, SFTP port: %v, user: %#v, password: %#v, public keys: %v, directory: %#v, "+
"permissions: %v, enabled ssh commands: %v", sftpdConf.BindPort, s.PortableUser.Username, s.PortableUser.Password,
s.PortableUser.PublicKeys, dirToServe, s.PortableUser.Permissions, sftpdConf.EnabledSSHCommands)
return nil
} }

View file

@ -203,13 +203,13 @@
</div> </div>
<div class="form-group row s3"> <div class="form-group row s3">
<label for="idS3Bucket" class="col-sm-2 col-form-label">S3 Bucket</label> <label for="idS3Bucket" class="col-sm-2 col-form-label">Bucket</label>
<div class="col-sm-3"> <div class="col-sm-3">
<input type="text" class="form-control" id="idS3Bucket" name="s3_bucket" placeholder="" <input type="text" class="form-control" id="idS3Bucket" name="s3_bucket" placeholder=""
value="{{.User.FsConfig.S3Config.Bucket}}" maxlength="255"> value="{{.User.FsConfig.S3Config.Bucket}}" maxlength="255">
</div> </div>
<div class="col-sm-2"></div> <div class="col-sm-2"></div>
<label for="idS3Region" class="col-sm-2 col-form-label">S3 Region</label> <label for="idS3Region" class="col-sm-2 col-form-label">Region</label>
<div class="col-sm-3"> <div class="col-sm-3">
<input type="text" class="form-control" id="idS3Region" name="s3_region" placeholder="" <input type="text" class="form-control" id="idS3Region" name="s3_region" placeholder=""
value="{{.User.FsConfig.S3Config.Region}}" maxlength="255"> value="{{.User.FsConfig.S3Config.Region}}" maxlength="255">
@ -217,13 +217,13 @@
</div> </div>
<div class="form-group row s3"> <div class="form-group row s3">
<label for="idS3AccessKey" class="col-sm-2 col-form-label">S3 Access Key</label> <label for="idS3AccessKey" class="col-sm-2 col-form-label">Access Key</label>
<div class="col-sm-3"> <div class="col-sm-3">
<input type="text" class="form-control" id="idS3AccessKey" name="s3_access_key" placeholder="" <input type="text" class="form-control" id="idS3AccessKey" name="s3_access_key" placeholder=""
value="{{.User.FsConfig.S3Config.AccessKey}}" maxlength="255"> value="{{.User.FsConfig.S3Config.AccessKey}}" maxlength="255">
</div> </div>
<div class="col-sm-2"></div> <div class="col-sm-2"></div>
<label for="idS3AccessSecret" class="col-sm-2 col-form-label">S3 Access Secret</label> <label for="idS3AccessSecret" class="col-sm-2 col-form-label">Access Secret</label>
<div class="col-sm-3"> <div class="col-sm-3">
<input type="text" class="form-control" id="idS3AccessSecret" name="s3_access_secret" placeholder="" <input type="text" class="form-control" id="idS3AccessSecret" name="s3_access_secret" placeholder=""
value="{{.User.FsConfig.S3Config.AccessSecret}}" maxlength="1000"> value="{{.User.FsConfig.S3Config.AccessSecret}}" maxlength="1000">
@ -231,13 +231,13 @@
</div> </div>
<div class="form-group row s3"> <div class="form-group row s3">
<label for="idS3StorageClass" class="col-sm-2 col-form-label">S3 Storage Class</label> <label for="idS3StorageClass" class="col-sm-2 col-form-label">Storage Class</label>
<div class="col-sm-3"> <div class="col-sm-3">
<input type="text" class="form-control" id="idS3StorageClass" name="s3_storage_class" placeholder="" <input type="text" class="form-control" id="idS3StorageClass" name="s3_storage_class" placeholder=""
value="{{.User.FsConfig.S3Config.StorageClass}}" maxlength="1000"> value="{{.User.FsConfig.S3Config.StorageClass}}" maxlength="1000">
</div> </div>
<div class="col-sm-2"></div> <div class="col-sm-2"></div>
<label for="idS3Endpoint" class="col-sm-2 col-form-label">S3 Endpoint</label> <label for="idS3Endpoint" class="col-sm-2 col-form-label">Endpoint</label>
<div class="col-sm-3"> <div class="col-sm-3">
<input type="text" class="form-control" id="idS3Endpoint" name="s3_endpoint" placeholder="" <input type="text" class="form-control" id="idS3Endpoint" name="s3_endpoint" placeholder=""
value="{{.User.FsConfig.S3Config.Endpoint}}" maxlength="255"> value="{{.User.FsConfig.S3Config.Endpoint}}" maxlength="255">
@ -245,7 +245,7 @@
</div> </div>
<div class="form-group row s3"> <div class="form-group row s3">
<label for="idS3KeyPrefix" class="col-sm-2 col-form-label">S3 Key Prefix</label> <label for="idS3KeyPrefix" class="col-sm-2 col-form-label">Key Prefix</label>
<div class="col-sm-10"> <div class="col-sm-10">
<input type="text" class="form-control" id="idS3KeyPrefix" name="s3_key_prefix" placeholder="" <input type="text" class="form-control" id="idS3KeyPrefix" name="s3_key_prefix" placeholder=""
value="{{.User.FsConfig.S3Config.KeyPrefix}}" maxlength="255" aria-describedby="S3KeyPrefixHelpBlock"> value="{{.User.FsConfig.S3Config.KeyPrefix}}" maxlength="255" aria-describedby="S3KeyPrefixHelpBlock">
@ -256,7 +256,7 @@
</div> </div>
<div class="form-group row gcs"> <div class="form-group row gcs">
<label for="idGCSBucket" class="col-sm-2 col-form-label">GCS Bucket</label> <label for="idGCSBucket" class="col-sm-2 col-form-label">Bucket</label>
<div class="col-sm-10"> <div class="col-sm-10">
<input type="text" class="form-control" id="idGCSBucket" name="gcs_bucket" placeholder="" <input type="text" class="form-control" id="idGCSBucket" name="gcs_bucket" placeholder=""
value="{{.User.FsConfig.GCSConfig.Bucket}}" maxlength="255"> value="{{.User.FsConfig.GCSConfig.Bucket}}" maxlength="255">
@ -264,7 +264,7 @@
</div> </div>
<div class="form-group row gcs"> <div class="form-group row gcs">
<label for="idGCSCredentialFile" class="col-sm-2 col-form-label">GCS Credentials file</label> <label for="idGCSCredentialFile" class="col-sm-2 col-form-label">Credentials file</label>
<div class="col-sm-4"> <div class="col-sm-4">
<input type="file" class="form-control-file" id="idGCSCredentialFile" name="gcs_credential_file" <input type="file" class="form-control-file" id="idGCSCredentialFile" name="gcs_credential_file"
aria-describedby="GCSCredentialsHelpBlock"> aria-describedby="GCSCredentialsHelpBlock">
@ -273,15 +273,23 @@
</small> </small>
</div> </div>
<div class="col-sm-1"></div> <div class="col-sm-1"></div>
<label for="idGCSStorageClass" class="col-sm-2 col-form-label">GCS Storage Class</label> <label for="idGCSStorageClass" class="col-sm-2 col-form-label">Storage Class</label>
<div class="col-sm-3"> <div class="col-sm-3">
<input type="text" class="form-control" id="idGCSStorageClass" name="gcs_storage_class" placeholder="" <input type="text" class="form-control" id="idGCSStorageClass" name="gcs_storage_class" placeholder=""
value="{{.User.FsConfig.GCSConfig.StorageClass}}" maxlength="255"> value="{{.User.FsConfig.GCSConfig.StorageClass}}" maxlength="255">
</div> </div>
</div> </div>
<div class="form-group gcs">
<div class="form-check">
<input type="checkbox" class="form-check-input" id="idGCSAutoCredentials" name="gcs_auto_credentials"
{{if gt .User.FsConfig.GCSConfig.AutomaticCredentials 0}}checked{{end}}>
<label for="idGCSAutoCredentials" class="form-check-label">Automatic credentials</label>
</div>
</div>
<div class="form-group row gcs"> <div class="form-group row gcs">
<label for="idGCSKeyPrefix" class="col-sm-2 col-form-label">GCS Key Prefix</label> <label for="idGCSKeyPrefix" class="col-sm-2 col-form-label">Key Prefix</label>
<div class="col-sm-10"> <div class="col-sm-10">
<input type="text" class="form-control" id="idGCSKeyPrefix" name="gcs_key_prefix" placeholder="" <input type="text" class="form-control" id="idGCSKeyPrefix" name="gcs_key_prefix" placeholder=""
value="{{.User.FsConfig.GCSConfig.KeyPrefix}}" maxlength="255" aria-describedby="GCSKeyPrefixHelpBlock"> value="{{.User.FsConfig.GCSConfig.KeyPrefix}}" maxlength="255" aria-describedby="GCSKeyPrefixHelpBlock">
@ -341,12 +349,15 @@
function onFilesystemChanged(val){ function onFilesystemChanged(val){
if (val == '1'){ if (val == '1'){
$('.form-group.row.gcs').hide(); $('.form-group.row.gcs').hide();
$('.form-group.gcs').hide();
$('.form-group.row.s3').show(); $('.form-group.row.s3').show();
} else if (val == '2'){ } else if (val == '2'){
$('.form-group.row.gcs').show(); $('.form-group.row.gcs').show();
$('.form-group.gcs').show();
$('.form-group.row.s3').hide(); $('.form-group.row.s3').hide();
} else { } else {
$('.form-group.row.gcs').hide(); $('.form-group.row.gcs').hide();
$('.form-group.gcs').hide();
$('.form-group.row.s3').hide(); $('.form-group.row.s3').hide();
} }
} }

View file

@ -21,10 +21,8 @@ import (
) )
var ( var (
// we cannot use attrs selection until this bug is fixed: // we can use fields selection only when we don't need directory-like results
// // with folders
// https://github.com/googleapis/google-cloud-go/issues/1763
//
gcsDefaultFieldsSelection = []string{"Name", "Size", "Deleted", "Updated"} gcsDefaultFieldsSelection = []string{"Name", "Size", "Deleted", "Updated"}
) )
@ -37,10 +35,11 @@ type GCSFsConfig struct {
// folder. The prefix, if not empty, must not start with "/" and must // folder. The prefix, if not empty, must not start with "/" and must
// end with "/". // end with "/".
// If empty the whole bucket contents will be available // If empty the whole bucket contents will be available
KeyPrefix string `json:"key_prefix,omitempty"` KeyPrefix string `json:"key_prefix,omitempty"`
CredentialFile string `json:"-"` CredentialFile string `json:"-"`
Credentials string `json:"credentials,omitempty"` Credentials string `json:"credentials,omitempty"`
StorageClass string `json:"storage_class,omitempty"` AutomaticCredentials int `json:"automatic_credentials,omitempty"`
StorageClass string `json:"storage_class,omitempty"`
} }
// GCSFs is a Fs implementation for Google Cloud Storage. // GCSFs is a Fs implementation for Google Cloud Storage.
@ -67,7 +66,11 @@ func NewGCSFs(connectionID, localTempDir string, config GCSFsConfig) (Fs, error)
return fs, err return fs, err
} }
ctx := context.Background() ctx := context.Background()
fs.svc, err = storage.NewClient(ctx, option.WithCredentialsFile(fs.config.CredentialFile)) if fs.config.AutomaticCredentials > 0 {
fs.svc, err = storage.NewClient(ctx)
} else {
fs.svc, err = storage.NewClient(ctx, option.WithCredentialsFile(fs.config.CredentialFile))
}
return fs, err return fs, err
} }
@ -97,10 +100,6 @@ func (fs GCSFs) Stat(name string) (os.FileInfo, error) {
} }
prefix := fs.getPrefixForStat(name) prefix := fs.getPrefixForStat(name)
query := &storage.Query{Prefix: prefix, Delimiter: "/"} query := &storage.Query{Prefix: prefix, Delimiter: "/"}
/*err = query.SetAttrSelection(gcsDefaultFieldsSelection)
if err != nil {
return result, err
}*/
ctx, cancelFn := context.WithDeadline(context.Background(), time.Now().Add(fs.ctxTimeout)) ctx, cancelFn := context.WithDeadline(context.Background(), time.Now().Add(fs.ctxTimeout))
defer cancelFn() defer cancelFn()
bkt := fs.svc.Bucket(fs.config.Bucket) bkt := fs.svc.Bucket(fs.config.Bucket)
@ -299,7 +298,7 @@ func (GCSFs) Chtimes(name string, atime, mtime time.Time) error {
// a list of directory entries. // a list of directory entries.
func (fs GCSFs) ReadDir(dirname string) ([]os.FileInfo, error) { func (fs GCSFs) ReadDir(dirname string) ([]os.FileInfo, error) {
var result []os.FileInfo var result []os.FileInfo
// dirname deve essere già cleaned // dirname must be already cleaned
prefix := "" prefix := ""
if len(dirname) > 0 && dirname != "." { if len(dirname) > 0 && dirname != "." {
prefix = strings.TrimPrefix(dirname, "/") prefix = strings.TrimPrefix(dirname, "/")
@ -308,10 +307,6 @@ func (fs GCSFs) ReadDir(dirname string) ([]os.FileInfo, error) {
} }
} }
query := &storage.Query{Prefix: prefix, Delimiter: "/"} query := &storage.Query{Prefix: prefix, Delimiter: "/"}
/*err := query.SetAttrSelection(gcsDefaultFieldsSelection)
if err != nil {
return result, err
}*/
ctx, cancelFn := context.WithDeadline(context.Background(), time.Now().Add(fs.ctxTimeout)) ctx, cancelFn := context.WithDeadline(context.Background(), time.Now().Add(fs.ctxTimeout))
defer cancelFn() defer cancelFn()
bkt := fs.svc.Bucket(fs.config.Bucket) bkt := fs.svc.Bucket(fs.config.Bucket)

View file

@ -325,7 +325,7 @@ func (S3Fs) Chtimes(name string, atime, mtime time.Time) error {
// a list of directory entries. // a list of directory entries.
func (fs S3Fs) ReadDir(dirname string) ([]os.FileInfo, error) { func (fs S3Fs) ReadDir(dirname string) ([]os.FileInfo, error) {
var result []os.FileInfo var result []os.FileInfo
// dirname deve essere già cleaned // dirname must be already cleaned
prefix := "" prefix := ""
if dirname != "/" && dirname != "." { if dirname != "/" && dirname != "." {
prefix = strings.TrimPrefix(dirname, "/") prefix = strings.TrimPrefix(dirname, "/")

View file

@ -109,7 +109,7 @@ func ValidateGCSFsConfig(config *GCSFsConfig, credentialsFilePath string) error
config.KeyPrefix += "/" config.KeyPrefix += "/"
} }
} }
if len(config.Credentials) == 0 { if len(config.Credentials) == 0 && config.AutomaticCredentials == 0 {
fi, err := os.Stat(credentialsFilePath) fi, err := os.Stat(credentialsFilePath)
if err != nil { if err != nil {
return fmt.Errorf("invalid credentials %v", err) return fmt.Errorf("invalid credentials %v", err)