2019-07-30 18:51:29 +00:00
|
|
|
// Package dataprovider provides data access.
|
2020-06-07 21:30:18 +00:00
|
|
|
// It abstracts different data providers and exposes a common API.
|
2019-07-20 10:26:52 +00:00
|
|
|
package dataprovider
|
|
|
|
|
|
|
|
import (
|
2020-01-21 09:54:05 +00:00
|
|
|
"bufio"
|
2019-11-14 10:06:03 +00:00
|
|
|
"bytes"
|
2020-01-06 20:42:41 +00:00
|
|
|
"context"
|
2019-08-17 13:20:49 +00:00
|
|
|
"crypto/sha1"
|
|
|
|
"crypto/sha256"
|
|
|
|
"crypto/sha512"
|
|
|
|
"crypto/subtle"
|
2021-02-28 11:10:40 +00:00
|
|
|
"crypto/x509"
|
2019-08-17 13:20:49 +00:00
|
|
|
"encoding/base64"
|
2019-11-14 10:06:03 +00:00
|
|
|
"encoding/json"
|
2019-08-12 16:31:31 +00:00
|
|
|
"errors"
|
2019-07-20 10:26:52 +00:00
|
|
|
"fmt"
|
2019-08-17 13:20:49 +00:00
|
|
|
"hash"
|
2020-02-16 10:43:52 +00:00
|
|
|
"io"
|
2019-12-30 17:37:50 +00:00
|
|
|
"net"
|
2019-11-14 10:06:03 +00:00
|
|
|
"net/http"
|
|
|
|
"net/url"
|
|
|
|
"os"
|
|
|
|
"os/exec"
|
2019-12-25 17:20:19 +00:00
|
|
|
"path"
|
2019-07-20 10:26:52 +00:00
|
|
|
"path/filepath"
|
2021-01-17 21:29:08 +00:00
|
|
|
"regexp"
|
2020-01-21 09:54:05 +00:00
|
|
|
"runtime"
|
2019-08-17 13:20:49 +00:00
|
|
|
"strconv"
|
2019-07-20 10:26:52 +00:00
|
|
|
"strings"
|
2020-01-21 09:54:05 +00:00
|
|
|
"sync"
|
2019-09-13 16:45:36 +00:00
|
|
|
"time"
|
2019-07-20 10:26:52 +00:00
|
|
|
|
2020-09-04 19:08:09 +00:00
|
|
|
"github.com/GehirnInc/crypt"
|
|
|
|
"github.com/GehirnInc/crypt/apr1_crypt"
|
|
|
|
"github.com/GehirnInc/crypt/md5_crypt"
|
|
|
|
"github.com/GehirnInc/crypt/sha512_crypt"
|
2019-07-20 10:26:52 +00:00
|
|
|
"github.com/alexedwards/argon2id"
|
2020-04-01 21:25:23 +00:00
|
|
|
"github.com/go-chi/render"
|
|
|
|
"github.com/rs/xid"
|
2019-08-12 16:31:31 +00:00
|
|
|
"golang.org/x/crypto/bcrypt"
|
2019-08-17 13:20:49 +00:00
|
|
|
"golang.org/x/crypto/pbkdf2"
|
2019-07-20 10:26:52 +00:00
|
|
|
"golang.org/x/crypto/ssh"
|
|
|
|
|
2020-04-26 21:29:09 +00:00
|
|
|
"github.com/drakkan/sftpgo/httpclient"
|
2020-11-30 20:46:34 +00:00
|
|
|
"github.com/drakkan/sftpgo/kms"
|
2019-08-12 16:31:31 +00:00
|
|
|
"github.com/drakkan/sftpgo/logger"
|
2019-09-13 16:45:36 +00:00
|
|
|
"github.com/drakkan/sftpgo/metrics"
|
2019-07-20 10:26:52 +00:00
|
|
|
"github.com/drakkan/sftpgo/utils"
|
2020-01-19 06:41:05 +00:00
|
|
|
"github.com/drakkan/sftpgo/vfs"
|
2019-07-20 10:26:52 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
const (
|
2021-03-23 18:14:15 +00:00
|
|
|
// SQLiteDataProviderName defines the name for SQLite database provider
|
2019-07-20 10:26:52 +00:00
|
|
|
SQLiteDataProviderName = "sqlite"
|
2021-03-23 18:14:15 +00:00
|
|
|
// PGSQLDataProviderName defines the name for PostgreSQL database provider
|
2019-09-06 09:23:06 +00:00
|
|
|
PGSQLDataProviderName = "postgresql"
|
2021-03-23 18:14:15 +00:00
|
|
|
// MySQLDataProviderName defines the name for MySQL database provider
|
2019-07-20 10:26:52 +00:00
|
|
|
MySQLDataProviderName = "mysql"
|
2021-03-23 18:14:15 +00:00
|
|
|
// BoltDataProviderName defines the name for bbolt key/value store provider
|
2019-08-12 16:31:31 +00:00
|
|
|
BoltDataProviderName = "bolt"
|
2021-03-23 18:14:15 +00:00
|
|
|
// MemoryDataProviderName defines the name for memory provider
|
2019-10-25 16:37:12 +00:00
|
|
|
MemoryDataProviderName = "memory"
|
2021-03-23 18:14:15 +00:00
|
|
|
// CockroachDataProviderName defines the for CockroachDB provider
|
2021-03-27 18:41:00 +00:00
|
|
|
CockroachDataProviderName = "cockroachdb"
|
2020-11-22 20:53:04 +00:00
|
|
|
// DumpVersion defines the version for the dump.
|
|
|
|
// For restore/load we support the current version and the previous one
|
2021-02-24 18:40:29 +00:00
|
|
|
DumpVersion = 7
|
2019-07-20 10:26:52 +00:00
|
|
|
|
2020-04-11 10:25:21 +00:00
|
|
|
argonPwdPrefix = "$argon2id$"
|
|
|
|
bcryptPwdPrefix = "$2a$"
|
|
|
|
pbkdf2SHA1Prefix = "$pbkdf2-sha1$"
|
|
|
|
pbkdf2SHA256Prefix = "$pbkdf2-sha256$"
|
|
|
|
pbkdf2SHA512Prefix = "$pbkdf2-sha512$"
|
|
|
|
pbkdf2SHA256B64SaltPrefix = "$pbkdf2-b64salt-sha256$"
|
|
|
|
md5cryptPwdPrefix = "$1$"
|
|
|
|
md5cryptApr1PwdPrefix = "$apr1$"
|
|
|
|
sha512cryptPwdPrefix = "$6$"
|
|
|
|
trackQuotaDisabledError = "please enable track_quota in your configuration to use this method"
|
|
|
|
operationAdd = "add"
|
|
|
|
operationUpdate = "update"
|
|
|
|
operationDelete = "delete"
|
2021-04-12 18:00:49 +00:00
|
|
|
sqlPrefixValidChars = "abcdefghijklmnopqrstuvwxyz_0123456789"
|
2020-06-07 21:30:18 +00:00
|
|
|
)
|
|
|
|
|
2021-04-20 11:55:09 +00:00
|
|
|
// Supported algorithms for hashing passwords.
|
|
|
|
// These algorithms can be used when SFTPGo hashes a plain text password
|
|
|
|
const (
|
|
|
|
HashingAlgoBcrypt = "bcrypt"
|
|
|
|
HashingAlgoArgon2ID = "argon2id"
|
|
|
|
)
|
|
|
|
|
2020-06-07 21:30:18 +00:00
|
|
|
// ordering constants
|
|
|
|
const (
|
|
|
|
OrderASC = "ASC"
|
|
|
|
OrderDESC = "DESC"
|
2019-07-20 10:26:52 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
var (
|
2020-04-09 21:32:42 +00:00
|
|
|
// SupportedProviders defines the supported data providers
|
2019-10-25 16:37:12 +00:00
|
|
|
SupportedProviders = []string{SQLiteDataProviderName, PGSQLDataProviderName, MySQLDataProviderName,
|
2021-03-23 18:14:15 +00:00
|
|
|
BoltDataProviderName, MemoryDataProviderName, CockroachDataProviderName}
|
2020-04-09 21:32:42 +00:00
|
|
|
// ValidPerms defines all the valid permissions for a user
|
2019-10-07 16:19:01 +00:00
|
|
|
ValidPerms = []string{PermAny, PermListItems, PermDownload, PermUpload, PermOverwrite, PermRename, PermDelete,
|
2019-11-16 09:23:41 +00:00
|
|
|
PermCreateDirs, PermCreateSymlinks, PermChmod, PermChown, PermChtimes}
|
2021-02-28 11:10:40 +00:00
|
|
|
// ValidLoginMethods defines all the valid login methods
|
|
|
|
ValidLoginMethods = []string{SSHLoginMethodPublicKey, LoginMethodPassword, SSHLoginMethodKeyboardInteractive,
|
|
|
|
SSHLoginMethodKeyAndPassword, SSHLoginMethodKeyAndKeyboardInt, LoginMethodTLSCertificate,
|
|
|
|
LoginMethodTLSCertificateAndPwd}
|
2020-04-09 21:32:42 +00:00
|
|
|
// SSHMultiStepsLoginMethods defines the supported Multi-Step Authentications
|
|
|
|
SSHMultiStepsLoginMethods = []string{SSHLoginMethodKeyAndPassword, SSHLoginMethodKeyAndKeyboardInt}
|
2020-08-12 14:15:12 +00:00
|
|
|
// ErrNoAuthTryed defines the error for connection closed before authentication
|
2020-08-17 10:49:20 +00:00
|
|
|
ErrNoAuthTryed = errors.New("no auth tryed")
|
|
|
|
// ValidProtocols defines all the valid protcols
|
2020-08-30 11:50:43 +00:00
|
|
|
ValidProtocols = []string{"SSH", "FTP", "DAV"}
|
2020-10-05 17:42:33 +00:00
|
|
|
// ErrNoInitRequired defines the error returned by InitProvider if no inizialization/update is required
|
2021-03-21 18:15:47 +00:00
|
|
|
ErrNoInitRequired = errors.New("the data provider is up to date")
|
2020-08-31 17:25:17 +00:00
|
|
|
// ErrInvalidCredentials defines the error to return if the supplied credentials are invalid
|
2021-01-02 13:05:09 +00:00
|
|
|
ErrInvalidCredentials = errors.New("invalid credentials")
|
2021-02-28 11:10:40 +00:00
|
|
|
validTLSUsernames = []string{string(TLSUsernameNone), string(TLSUsernameCN)}
|
2020-08-31 17:25:17 +00:00
|
|
|
config Config
|
|
|
|
provider Provider
|
|
|
|
sqlPlaceholders []string
|
|
|
|
hashPwdPrefixes = []string{argonPwdPrefix, bcryptPwdPrefix, pbkdf2SHA1Prefix, pbkdf2SHA256Prefix,
|
2020-04-11 10:25:21 +00:00
|
|
|
pbkdf2SHA512Prefix, pbkdf2SHA256B64SaltPrefix, md5cryptPwdPrefix, md5cryptApr1PwdPrefix, sha512cryptPwdPrefix}
|
|
|
|
pbkdfPwdPrefixes = []string{pbkdf2SHA1Prefix, pbkdf2SHA256Prefix, pbkdf2SHA512Prefix, pbkdf2SHA256B64SaltPrefix}
|
|
|
|
pbkdfPwdB64SaltPrefixes = []string{pbkdf2SHA256B64SaltPrefix}
|
|
|
|
unixPwdPrefixes = []string{md5cryptPwdPrefix, md5cryptApr1PwdPrefix, sha512cryptPwdPrefix}
|
|
|
|
logSender = "dataProvider"
|
|
|
|
availabilityTicker *time.Ticker
|
|
|
|
availabilityTickerDone chan bool
|
|
|
|
credentialsDirPath string
|
2020-06-07 21:30:18 +00:00
|
|
|
sqlTableUsers = "users"
|
|
|
|
sqlTableFolders = "folders"
|
|
|
|
sqlTableFoldersMapping = "folders_mapping"
|
2021-01-17 21:29:08 +00:00
|
|
|
sqlTableAdmins = "admins"
|
2020-06-07 21:30:18 +00:00
|
|
|
sqlTableSchemaVersion = "schema_version"
|
2020-09-04 15:09:31 +00:00
|
|
|
argon2Params *argon2id.Params
|
2020-11-30 20:46:34 +00:00
|
|
|
lastLoginMinDelay = 10 * time.Minute
|
2021-01-17 21:29:08 +00:00
|
|
|
usernameRegex = regexp.MustCompile("^[a-zA-Z0-9-_.~]+$")
|
2019-07-20 10:26:52 +00:00
|
|
|
)
|
|
|
|
|
2020-02-08 13:44:25 +00:00
|
|
|
type schemaVersion struct {
|
|
|
|
Version int
|
|
|
|
}
|
|
|
|
|
2021-04-20 11:55:09 +00:00
|
|
|
// BcryptOptions defines the options for bcrypt password hashing
|
|
|
|
type BcryptOptions struct {
|
|
|
|
Cost int `json:"cost" mapstructure:"cost"`
|
|
|
|
}
|
|
|
|
|
2020-09-04 15:09:31 +00:00
|
|
|
// Argon2Options defines the options for argon2 password hashing
|
|
|
|
type Argon2Options struct {
|
|
|
|
Memory uint32 `json:"memory" mapstructure:"memory"`
|
|
|
|
Iterations uint32 `json:"iterations" mapstructure:"iterations"`
|
|
|
|
Parallelism uint8 `json:"parallelism" mapstructure:"parallelism"`
|
|
|
|
}
|
|
|
|
|
|
|
|
// PasswordHashing defines the configuration for password hashing
|
|
|
|
type PasswordHashing struct {
|
2021-04-20 11:55:09 +00:00
|
|
|
BcryptOptions BcryptOptions `json:"bcrypt_options" mapstructure:"bcrypt_options"`
|
2021-04-25 07:38:33 +00:00
|
|
|
Argon2Options Argon2Options `json:"argon2_options" mapstructure:"argon2_options"`
|
|
|
|
// Algorithm to use for hashing passwords. Available algorithms: argon2id, bcrypt. Default: bcrypt
|
|
|
|
Algo string `json:"algo" mapstructure:"algo"`
|
2020-09-04 15:09:31 +00:00
|
|
|
}
|
|
|
|
|
2020-07-24 21:39:38 +00:00
|
|
|
// UserActions defines the action to execute on user create, update, delete.
|
|
|
|
type UserActions struct {
|
2019-11-14 10:06:03 +00:00
|
|
|
// Valid values are add, update, delete. Empty slice to disable
|
|
|
|
ExecuteOn []string `json:"execute_on" mapstructure:"execute_on"`
|
2020-05-24 13:29:39 +00:00
|
|
|
// Absolute path to an external program or an HTTP URL
|
|
|
|
Hook string `json:"hook" mapstructure:"hook"`
|
2019-11-14 10:06:03 +00:00
|
|
|
}
|
|
|
|
|
2020-12-08 10:18:34 +00:00
|
|
|
// ProviderStatus defines the provider status
|
|
|
|
type ProviderStatus struct {
|
|
|
|
Driver string `json:"driver"`
|
|
|
|
IsActive bool `json:"is_active"`
|
|
|
|
Error string `json:"error"`
|
|
|
|
}
|
|
|
|
|
2019-07-20 10:26:52 +00:00
|
|
|
// Config provider configuration
|
|
|
|
type Config struct {
|
2019-07-30 18:51:29 +00:00
|
|
|
// Driver name, must be one of the SupportedProviders
|
2019-08-07 20:46:13 +00:00
|
|
|
Driver string `json:"driver" mapstructure:"driver"`
|
2020-01-06 20:42:41 +00:00
|
|
|
// Database name. For driver sqlite this can be the database name relative to the config dir
|
|
|
|
// or the absolute path to the SQLite database.
|
2019-08-07 20:46:13 +00:00
|
|
|
Name string `json:"name" mapstructure:"name"`
|
2019-07-30 18:51:29 +00:00
|
|
|
// Database host
|
2019-08-07 20:46:13 +00:00
|
|
|
Host string `json:"host" mapstructure:"host"`
|
2019-07-30 18:51:29 +00:00
|
|
|
// Database port
|
2019-08-07 20:46:13 +00:00
|
|
|
Port int `json:"port" mapstructure:"port"`
|
2019-07-30 18:51:29 +00:00
|
|
|
// Database username
|
2019-08-07 20:46:13 +00:00
|
|
|
Username string `json:"username" mapstructure:"username"`
|
2019-07-30 18:51:29 +00:00
|
|
|
// Database password
|
2019-08-07 20:46:13 +00:00
|
|
|
Password string `json:"password" mapstructure:"password"`
|
2019-07-30 18:51:29 +00:00
|
|
|
// Used for drivers mysql and postgresql.
|
|
|
|
// 0 disable SSL/TLS connections.
|
|
|
|
// 1 require ssl.
|
|
|
|
// 2 set ssl mode to verify-ca for driver postgresql and skip-verify for driver mysql.
|
|
|
|
// 3 set ssl mode to verify-full for driver postgresql and preferred for driver mysql.
|
2019-08-07 20:46:13 +00:00
|
|
|
SSLMode int `json:"sslmode" mapstructure:"sslmode"`
|
2019-07-30 18:51:29 +00:00
|
|
|
// Custom database connection string.
|
|
|
|
// If not empty this connection string will be used instead of build one using the previous parameters
|
2019-08-07 20:46:13 +00:00
|
|
|
ConnectionString string `json:"connection_string" mapstructure:"connection_string"`
|
2020-06-07 21:30:18 +00:00
|
|
|
// prefix for SQL tables
|
|
|
|
SQLTablesPrefix string `json:"sql_tables_prefix" mapstructure:"sql_tables_prefix"`
|
2019-07-30 18:51:29 +00:00
|
|
|
// Set the preferred way to track users quota between the following choices:
|
|
|
|
// 0, disable quota tracking. REST API to scan user dir and update quota will do nothing
|
|
|
|
// 1, quota is updated each time a user upload or delete a file even if the user has no quota restrictions
|
2020-06-07 21:30:18 +00:00
|
|
|
// 2, quota is updated each time a user upload or delete a file but only for users with quota restrictions
|
|
|
|
// and for virtual folders.
|
2019-07-30 18:51:29 +00:00
|
|
|
// With this configuration the "quota scan" REST API can still be used to periodically update space usage
|
|
|
|
// for users without quota restrictions
|
2019-08-07 20:46:13 +00:00
|
|
|
TrackQuota int `json:"track_quota" mapstructure:"track_quota"`
|
2019-09-13 06:14:07 +00:00
|
|
|
// Sets the maximum number of open connections for mysql and postgresql driver.
|
|
|
|
// Default 0 (unlimited)
|
|
|
|
PoolSize int `json:"pool_size" mapstructure:"pool_size"`
|
2020-03-03 22:25:23 +00:00
|
|
|
// Users default base directory.
|
2019-09-28 20:48:52 +00:00
|
|
|
// If no home dir is defined while adding a new user, and this value is
|
|
|
|
// a valid absolute path, then the user home dir will be automatically
|
|
|
|
// defined as the path obtained joining the base dir and the username
|
|
|
|
UsersBaseDir string `json:"users_base_dir" mapstructure:"users_base_dir"`
|
2019-11-14 10:06:03 +00:00
|
|
|
// Actions to execute on user add, update, delete.
|
2019-11-14 16:43:14 +00:00
|
|
|
// Update action will not be fired for internal updates such as the last login or the user quota fields.
|
2020-07-24 21:39:38 +00:00
|
|
|
Actions UserActions `json:"actions" mapstructure:"actions"`
|
2020-04-01 21:25:23 +00:00
|
|
|
// Absolute path to an external program or an HTTP URL to invoke for users authentication.
|
|
|
|
// Leave empty to use builtin authentication.
|
2020-01-06 20:42:41 +00:00
|
|
|
// If the authentication succeed the user will be automatically added/updated inside the defined data provider.
|
|
|
|
// Actions defined for user added/updated will not be executed in this case.
|
|
|
|
// This method is slower than built-in authentication methods, but it's very flexible as anyone can
|
2020-04-01 21:25:23 +00:00
|
|
|
// easily write his own authentication hooks.
|
|
|
|
ExternalAuthHook string `json:"external_auth_hook" mapstructure:"external_auth_hook"`
|
|
|
|
// ExternalAuthScope defines the scope for the external authentication hook.
|
2020-10-05 09:29:18 +00:00
|
|
|
// - 0 means all supported authentication scopes, the external hook will be executed for password,
|
2021-02-28 11:10:40 +00:00
|
|
|
// public key, keyboard interactive authentication and TLS certificates
|
2020-01-21 09:54:05 +00:00
|
|
|
// - 1 means passwords only
|
|
|
|
// - 2 means public keys only
|
|
|
|
// - 4 means keyboard interactive only
|
2021-02-28 11:10:40 +00:00
|
|
|
// - 8 means TLS certificates only
|
2020-01-21 09:54:05 +00:00
|
|
|
// you can combine the scopes, for example 3 means password and public key, 5 password and keyboard
|
|
|
|
// interactive and so on
|
2020-01-06 20:42:41 +00:00
|
|
|
ExternalAuthScope int `json:"external_auth_scope" mapstructure:"external_auth_scope"`
|
2020-01-31 18:04:00 +00:00
|
|
|
// CredentialsPath defines the directory for storing user provided credential files such as
|
|
|
|
// Google Cloud Storage credentials. It can be a path relative to the config dir or an
|
|
|
|
// absolute path
|
|
|
|
CredentialsPath string `json:"credentials_path" mapstructure:"credentials_path"`
|
2020-04-01 21:25:23 +00:00
|
|
|
// Absolute path to an external program or an HTTP URL to invoke just before the user login.
|
|
|
|
// This program/URL allows to modify or create the user trying to login.
|
|
|
|
// It is useful if you have users with dynamic fields to update just before the login.
|
|
|
|
// Please note that if you want to create a new user, the pre-login hook response must
|
2020-03-27 22:26:22 +00:00
|
|
|
// include all the mandatory user fields.
|
|
|
|
//
|
2020-04-01 21:25:23 +00:00
|
|
|
// The pre-login hook must finish within 30 seconds.
|
2020-02-23 17:50:59 +00:00
|
|
|
//
|
2020-04-01 21:25:23 +00:00
|
|
|
// If an error happens while executing the "PreLoginHook" then login will be denied.
|
|
|
|
// PreLoginHook and ExternalAuthHook are mutally exclusive.
|
2020-02-23 17:50:59 +00:00
|
|
|
// Leave empty to disable.
|
2020-04-01 21:25:23 +00:00
|
|
|
PreLoginHook string `json:"pre_login_hook" mapstructure:"pre_login_hook"`
|
2020-08-12 14:15:12 +00:00
|
|
|
// Absolute path to an external program or an HTTP URL to invoke after the user login.
|
|
|
|
// Based on the configured scope you can choose if notify failed or successful logins
|
|
|
|
// or both
|
|
|
|
PostLoginHook string `json:"post_login_hook" mapstructure:"post_login_hook"`
|
|
|
|
// PostLoginScope defines the scope for the post-login hook.
|
|
|
|
// - 0 means notify both failed and successful logins
|
|
|
|
// - 1 means notify failed logins
|
|
|
|
// - 2 means notify successful logins
|
|
|
|
PostLoginScope int `json:"post_login_scope" mapstructure:"post_login_scope"`
|
2020-08-19 17:36:12 +00:00
|
|
|
// Absolute path to an external program or an HTTP URL to invoke just before password
|
|
|
|
// authentication. This hook allows you to externally check the provided password,
|
|
|
|
// its main use case is to allow to easily support things like password+OTP for protocols
|
|
|
|
// without keyboard interactive support such as FTP and WebDAV. You can ask your users
|
|
|
|
// to login using a string consisting of a fixed password and a One Time Token, you
|
|
|
|
// can verify the token inside the hook and ask to SFTPGo to verify the fixed part.
|
|
|
|
CheckPasswordHook string `json:"check_password_hook" mapstructure:"check_password_hook"`
|
|
|
|
// CheckPasswordScope defines the scope for the check password hook.
|
|
|
|
// - 0 means all protocols
|
|
|
|
// - 1 means SSH
|
|
|
|
// - 2 means FTP
|
|
|
|
// - 4 means WebDAV
|
|
|
|
// you can combine the scopes, for example 6 means FTP and WebDAV
|
|
|
|
CheckPasswordScope int `json:"check_password_scope" mapstructure:"check_password_scope"`
|
2020-10-05 17:42:33 +00:00
|
|
|
// Defines how the database will be initialized/updated:
|
|
|
|
// - 0 means automatically
|
|
|
|
// - 1 means manually using the initprovider sub-command
|
|
|
|
UpdateMode int `json:"update_mode" mapstructure:"update_mode"`
|
2020-10-22 08:42:40 +00:00
|
|
|
// PasswordHashing defines the configuration for password hashing
|
|
|
|
PasswordHashing PasswordHashing `json:"password_hashing" mapstructure:"password_hashing"`
|
|
|
|
// PreferDatabaseCredentials indicates whether credential files (currently used for Google
|
|
|
|
// Cloud Storage) should be stored in the database instead of in the directory specified by
|
|
|
|
// CredentialsPath.
|
2021-04-25 07:38:33 +00:00
|
|
|
PreferDatabaseCredentials bool `json:"prefer_database_credentials" mapstructure:"prefer_database_credentials"`
|
2021-03-04 08:48:53 +00:00
|
|
|
// SkipNaturalKeysValidation allows to use any UTF-8 character for natural keys as username, admin name,
|
|
|
|
// folder name. These keys are used in URIs for REST API and Web admin. By default only unreserved URI
|
|
|
|
// characters are allowed: ALPHA / DIGIT / "-" / "." / "_" / "~".
|
|
|
|
SkipNaturalKeysValidation bool `json:"skip_natural_keys_validation" mapstructure:"skip_natural_keys_validation"`
|
2021-04-20 07:39:36 +00:00
|
|
|
// Verifying argon2 passwords has a high memory and computational cost,
|
|
|
|
// by enabling, in memory, password caching you reduce this cost.
|
|
|
|
PasswordCaching bool `json:"password_caching" mapstructure:"password_caching"`
|
2021-04-11 06:38:43 +00:00
|
|
|
// DelayedQuotaUpdate defines the number of seconds to accumulate quota updates.
|
|
|
|
// If there are a lot of close uploads, accumulating quota updates can save you many
|
|
|
|
// queries to the data provider.
|
|
|
|
// If you want to track quotas, a scheduled quota update is recommended in any case, the stored
|
|
|
|
// quota size may be incorrect for several reasons, such as an unexpected shutdown, temporary provider
|
|
|
|
// failures, file copied outside of SFTPGo, and so on.
|
|
|
|
// 0 means immediate quota update.
|
|
|
|
DelayedQuotaUpdate int `json:"delayed_quota_update" mapstructure:"delayed_quota_update"`
|
2019-07-20 10:26:52 +00:00
|
|
|
}
|
|
|
|
|
2020-02-02 21:20:39 +00:00
|
|
|
// BackupData defines the structure for the backup/restore files
|
|
|
|
type BackupData struct {
|
2020-06-07 21:30:18 +00:00
|
|
|
Users []User `json:"users"`
|
|
|
|
Folders []vfs.BaseVirtualFolder `json:"folders"`
|
2021-01-17 21:29:08 +00:00
|
|
|
Admins []Admin `json:"admins"`
|
2020-11-22 20:53:04 +00:00
|
|
|
Version int `json:"version"`
|
2020-02-02 21:20:39 +00:00
|
|
|
}
|
|
|
|
|
2021-02-01 18:04:15 +00:00
|
|
|
// HasFolder returns true if the folder with the given name is included
|
|
|
|
func (d *BackupData) HasFolder(name string) bool {
|
|
|
|
for _, folder := range d.Folders {
|
|
|
|
if folder.Name == name {
|
|
|
|
return true
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return false
|
|
|
|
}
|
|
|
|
|
2020-04-01 21:25:23 +00:00
|
|
|
type keyboardAuthHookRequest struct {
|
|
|
|
RequestID string `json:"request_id"`
|
|
|
|
Username string `json:"username,omitempty"`
|
2020-08-04 16:03:28 +00:00
|
|
|
IP string `json:"ip,omitempty"`
|
2020-04-01 21:25:23 +00:00
|
|
|
Password string `json:"password,omitempty"`
|
|
|
|
Answers []string `json:"answers,omitempty"`
|
|
|
|
Questions []string `json:"questions,omitempty"`
|
|
|
|
}
|
|
|
|
|
|
|
|
type keyboardAuthHookResponse struct {
|
2020-01-21 09:54:05 +00:00
|
|
|
Instruction string `json:"instruction"`
|
|
|
|
Questions []string `json:"questions"`
|
|
|
|
Echos []bool `json:"echos"`
|
|
|
|
AuthResult int `json:"auth_result"`
|
2020-02-16 10:43:52 +00:00
|
|
|
CheckPwd int `json:"check_password"`
|
2020-01-21 09:54:05 +00:00
|
|
|
}
|
|
|
|
|
2020-08-19 17:36:12 +00:00
|
|
|
type checkPasswordRequest struct {
|
|
|
|
Username string `json:"username"`
|
|
|
|
IP string `json:"ip"`
|
|
|
|
Password string `json:"password"`
|
|
|
|
Protocol string `json:"protocol"`
|
|
|
|
}
|
|
|
|
|
|
|
|
type checkPasswordResponse struct {
|
|
|
|
// 0 KO, 1 OK, 2 partial success, -1 not executed
|
|
|
|
Status int `json:"status"`
|
|
|
|
// for status = 2 this is the password to check against the one stored
|
|
|
|
// inside the SFTPGo data provider
|
|
|
|
ToVerify string `json:"to_verify"`
|
|
|
|
}
|
|
|
|
|
2019-07-20 10:26:52 +00:00
|
|
|
// ValidationError raised if input data is not valid
|
|
|
|
type ValidationError struct {
|
|
|
|
err string
|
|
|
|
}
|
|
|
|
|
2019-07-30 18:51:29 +00:00
|
|
|
// Validation error details
|
2019-07-20 10:26:52 +00:00
|
|
|
func (e *ValidationError) Error() string {
|
|
|
|
return fmt.Sprintf("Validation error: %s", e.err)
|
|
|
|
}
|
|
|
|
|
2021-01-17 21:29:08 +00:00
|
|
|
// NewValidationError returns a validation errors
|
|
|
|
func NewValidationError(error string) *ValidationError {
|
|
|
|
return &ValidationError{
|
|
|
|
err: error,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-07-30 18:51:29 +00:00
|
|
|
// MethodDisabledError raised if a method is disabled in config file.
|
|
|
|
// For example, if user management is disabled, this error is raised
|
2020-03-27 22:26:22 +00:00
|
|
|
// every time a user operation is done using the REST API
|
2019-07-20 10:26:52 +00:00
|
|
|
type MethodDisabledError struct {
|
|
|
|
err string
|
|
|
|
}
|
|
|
|
|
2019-07-30 18:51:29 +00:00
|
|
|
// Method disabled error details
|
2019-07-20 10:26:52 +00:00
|
|
|
func (e *MethodDisabledError) Error() string {
|
|
|
|
return fmt.Sprintf("Method disabled error: %s", e.err)
|
|
|
|
}
|
|
|
|
|
2019-08-12 16:31:31 +00:00
|
|
|
// RecordNotFoundError raised if a requested user is not found
|
|
|
|
type RecordNotFoundError struct {
|
|
|
|
err string
|
|
|
|
}
|
|
|
|
|
|
|
|
func (e *RecordNotFoundError) Error() string {
|
2021-01-02 13:05:09 +00:00
|
|
|
return fmt.Sprintf("not found: %s", e.err)
|
2019-08-12 16:31:31 +00:00
|
|
|
}
|
|
|
|
|
2019-11-26 21:26:42 +00:00
|
|
|
// GetQuotaTracking returns the configured mode for user's quota tracking
|
|
|
|
func GetQuotaTracking() int {
|
|
|
|
return config.TrackQuota
|
|
|
|
}
|
|
|
|
|
2020-06-07 21:30:18 +00:00
|
|
|
// Provider defines the interface that data providers must implement.
|
2019-07-20 10:26:52 +00:00
|
|
|
type Provider interface {
|
2020-08-19 17:36:12 +00:00
|
|
|
validateUserAndPass(username, password, ip, protocol string) (User, error)
|
2020-04-09 21:32:42 +00:00
|
|
|
validateUserAndPubKey(username string, pubKey []byte) (User, string, error)
|
2021-02-28 11:10:40 +00:00
|
|
|
validateUserAndTLSCert(username, protocol string, tlsCert *x509.Certificate) (User, error)
|
2019-07-20 10:26:52 +00:00
|
|
|
updateQuota(username string, filesAdd int, sizeAdd int64, reset bool) error
|
|
|
|
getUsedQuota(username string) (int, int64, error)
|
|
|
|
userExists(username string) (User, error)
|
2021-01-05 08:50:22 +00:00
|
|
|
addUser(user *User) error
|
|
|
|
updateUser(user *User) error
|
|
|
|
deleteUser(user *User) error
|
2021-01-17 21:29:08 +00:00
|
|
|
getUsers(limit int, offset int, order string) ([]User, error)
|
2019-12-27 22:12:44 +00:00
|
|
|
dumpUsers() ([]User, error)
|
2019-11-13 10:36:21 +00:00
|
|
|
updateLastLogin(username string) error
|
2021-02-01 18:04:15 +00:00
|
|
|
getFolders(limit, offset int, order string) ([]vfs.BaseVirtualFolder, error)
|
|
|
|
getFolderByName(name string) (vfs.BaseVirtualFolder, error)
|
2021-01-05 08:50:22 +00:00
|
|
|
addFolder(folder *vfs.BaseVirtualFolder) error
|
2021-02-01 18:04:15 +00:00
|
|
|
updateFolder(folder *vfs.BaseVirtualFolder) error
|
2021-01-05 08:50:22 +00:00
|
|
|
deleteFolder(folder *vfs.BaseVirtualFolder) error
|
2021-02-01 18:04:15 +00:00
|
|
|
updateFolderQuota(name string, filesAdd int, sizeAdd int64, reset bool) error
|
|
|
|
getUsedFolderQuota(name string) (int, int64, error)
|
2020-06-07 21:30:18 +00:00
|
|
|
dumpFolders() ([]vfs.BaseVirtualFolder, error)
|
2021-01-17 21:29:08 +00:00
|
|
|
adminExists(username string) (Admin, error)
|
|
|
|
addAdmin(admin *Admin) error
|
|
|
|
updateAdmin(admin *Admin) error
|
|
|
|
deleteAdmin(admin *Admin) error
|
|
|
|
getAdmins(limit int, offset int, order string) ([]Admin, error)
|
|
|
|
dumpAdmins() ([]Admin, error)
|
|
|
|
validateAdminAndPass(username, password, ip string) (Admin, error)
|
2019-09-13 16:45:36 +00:00
|
|
|
checkAvailability() error
|
2019-09-28 20:48:52 +00:00
|
|
|
close() error
|
2020-02-02 21:20:39 +00:00
|
|
|
reloadConfig() error
|
2020-02-08 13:44:25 +00:00
|
|
|
initializeDatabase() error
|
|
|
|
migrateDatabase() error
|
2020-11-26 21:08:33 +00:00
|
|
|
revertDatabase(targetVersion int) error
|
2019-09-13 16:45:36 +00:00
|
|
|
}
|
|
|
|
|
2021-03-21 18:15:47 +00:00
|
|
|
type fsValidatorHelper interface {
|
|
|
|
GetGCSCredentialsFilePath() string
|
|
|
|
GetEncrytionAdditionalData() string
|
|
|
|
}
|
|
|
|
|
2019-07-30 18:51:29 +00:00
|
|
|
// Initialize the data provider.
|
|
|
|
// An error is returned if the configured driver is invalid or if the data provider cannot be initialized
|
2021-01-17 21:29:08 +00:00
|
|
|
func Initialize(cnf Config, basePath string, checkAdmins bool) error {
|
2019-09-13 16:45:36 +00:00
|
|
|
var err error
|
2019-07-20 10:26:52 +00:00
|
|
|
config = cnf
|
2020-01-06 20:42:41 +00:00
|
|
|
|
2020-11-25 13:18:12 +00:00
|
|
|
if filepath.IsAbs(config.CredentialsPath) {
|
|
|
|
credentialsDirPath = config.CredentialsPath
|
|
|
|
} else {
|
|
|
|
credentialsDirPath = filepath.Join(basePath, config.CredentialsPath)
|
2020-02-23 17:50:59 +00:00
|
|
|
}
|
2021-03-21 18:15:47 +00:00
|
|
|
vfs.SetCredentialsDirPath(credentialsDirPath)
|
2021-03-21 18:58:57 +00:00
|
|
|
argon2Params = &argon2id.Params{
|
|
|
|
Memory: cnf.PasswordHashing.Argon2Options.Memory,
|
|
|
|
Iterations: cnf.PasswordHashing.Argon2Options.Iterations,
|
|
|
|
Parallelism: cnf.PasswordHashing.Argon2Options.Parallelism,
|
|
|
|
SaltLength: 16,
|
|
|
|
KeyLength: 32,
|
|
|
|
}
|
2020-11-25 13:18:12 +00:00
|
|
|
|
2021-04-25 07:38:33 +00:00
|
|
|
if config.PasswordHashing.Algo == HashingAlgoBcrypt {
|
2021-04-20 11:55:09 +00:00
|
|
|
if config.PasswordHashing.BcryptOptions.Cost > bcrypt.MaxCost {
|
|
|
|
err = fmt.Errorf("invalid bcrypt cost %v, max allowed %v", config.PasswordHashing.BcryptOptions.Cost, bcrypt.MaxCost)
|
|
|
|
logger.WarnToConsole("Unable to initialize data provider: %v", err)
|
|
|
|
providerLog(logger.LevelWarn, "Unable to initialize data provider: %v", err)
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-11-25 13:18:12 +00:00
|
|
|
if err = validateHooks(); err != nil {
|
2020-11-22 20:53:04 +00:00
|
|
|
return err
|
2020-02-08 13:44:25 +00:00
|
|
|
}
|
|
|
|
err = createProvider(basePath)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2020-10-05 17:42:33 +00:00
|
|
|
if cnf.UpdateMode == 0 {
|
|
|
|
err = provider.initializeDatabase()
|
|
|
|
if err != nil && err != ErrNoInitRequired {
|
|
|
|
logger.WarnToConsole("Unable to initialize data provider: %v", err)
|
|
|
|
providerLog(logger.LevelWarn, "Unable to initialize data provider: %v", err)
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
if err == nil {
|
|
|
|
logger.DebugToConsole("Data provider successfully initialized")
|
|
|
|
}
|
|
|
|
err = provider.migrateDatabase()
|
|
|
|
if err != nil && err != ErrNoInitRequired {
|
|
|
|
providerLog(logger.LevelWarn, "database migration error: %v", err)
|
|
|
|
return err
|
|
|
|
}
|
2021-01-17 21:29:08 +00:00
|
|
|
if checkAdmins {
|
|
|
|
err = checkDefaultAdmin()
|
|
|
|
if err != nil {
|
|
|
|
providerLog(logger.LevelWarn, "check default admin error: %v", err)
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
}
|
2020-10-05 17:42:33 +00:00
|
|
|
} else {
|
|
|
|
providerLog(logger.LevelInfo, "database initialization/migration skipped, manual mode is configured")
|
2020-01-31 18:04:00 +00:00
|
|
|
}
|
2020-02-08 13:44:25 +00:00
|
|
|
startAvailabilityTimer()
|
2021-04-11 06:38:43 +00:00
|
|
|
delayedQuotaUpdater.start()
|
2020-02-08 13:44:25 +00:00
|
|
|
return nil
|
|
|
|
}
|
2020-01-06 20:42:41 +00:00
|
|
|
|
2020-04-01 21:25:23 +00:00
|
|
|
func validateHooks() error {
|
2020-08-19 17:36:12 +00:00
|
|
|
var hooks []string
|
2021-01-05 08:50:22 +00:00
|
|
|
if config.PreLoginHook != "" && !strings.HasPrefix(config.PreLoginHook, "http") {
|
2020-08-19 17:36:12 +00:00
|
|
|
hooks = append(hooks, config.PreLoginHook)
|
2020-04-01 21:25:23 +00:00
|
|
|
}
|
2021-01-05 08:50:22 +00:00
|
|
|
if config.ExternalAuthHook != "" && !strings.HasPrefix(config.ExternalAuthHook, "http") {
|
2020-08-19 17:36:12 +00:00
|
|
|
hooks = append(hooks, config.ExternalAuthHook)
|
2020-04-01 21:25:23 +00:00
|
|
|
}
|
2021-01-05 08:50:22 +00:00
|
|
|
if config.PostLoginHook != "" && !strings.HasPrefix(config.PostLoginHook, "http") {
|
2020-08-19 17:36:12 +00:00
|
|
|
hooks = append(hooks, config.PostLoginHook)
|
|
|
|
}
|
2021-01-05 08:50:22 +00:00
|
|
|
if config.CheckPasswordHook != "" && !strings.HasPrefix(config.CheckPasswordHook, "http") {
|
2020-08-19 17:36:12 +00:00
|
|
|
hooks = append(hooks, config.CheckPasswordHook)
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, hook := range hooks {
|
|
|
|
if !filepath.IsAbs(hook) {
|
|
|
|
return fmt.Errorf("invalid hook: %#v must be an absolute path", hook)
|
2020-08-12 14:15:12 +00:00
|
|
|
}
|
2020-08-19 17:36:12 +00:00
|
|
|
_, err := os.Stat(hook)
|
2020-08-12 14:15:12 +00:00
|
|
|
if err != nil {
|
2020-08-19 17:36:12 +00:00
|
|
|
providerLog(logger.LevelWarn, "invalid hook: %v", err)
|
2020-08-12 14:15:12 +00:00
|
|
|
return err
|
|
|
|
}
|
|
|
|
}
|
2020-08-19 17:36:12 +00:00
|
|
|
|
2020-04-01 21:25:23 +00:00
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2020-06-07 21:30:18 +00:00
|
|
|
func validateSQLTablesPrefix() error {
|
2021-04-12 18:00:49 +00:00
|
|
|
if config.SQLTablesPrefix != "" {
|
2020-06-07 21:30:18 +00:00
|
|
|
for _, char := range config.SQLTablesPrefix {
|
|
|
|
if !strings.Contains(sqlPrefixValidChars, strings.ToLower(string(char))) {
|
2021-04-12 18:00:49 +00:00
|
|
|
return errors.New("invalid sql_tables_prefix only chars in range 'a..z', 'A..Z', '0-9' and '_' are allowed")
|
2020-06-07 21:30:18 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
sqlTableUsers = config.SQLTablesPrefix + sqlTableUsers
|
|
|
|
sqlTableFolders = config.SQLTablesPrefix + sqlTableFolders
|
|
|
|
sqlTableFoldersMapping = config.SQLTablesPrefix + sqlTableFoldersMapping
|
2021-01-17 21:29:08 +00:00
|
|
|
sqlTableAdmins = config.SQLTablesPrefix + sqlTableAdmins
|
2020-06-07 21:30:18 +00:00
|
|
|
sqlTableSchemaVersion = config.SQLTablesPrefix + sqlTableSchemaVersion
|
2021-01-17 21:29:08 +00:00
|
|
|
providerLog(logger.LevelDebug, "sql table for users %#v, folders %#v folders mapping %#v admins %#v schema version %#v",
|
|
|
|
sqlTableUsers, sqlTableFolders, sqlTableFoldersMapping, sqlTableAdmins, sqlTableSchemaVersion)
|
2020-06-07 21:30:18 +00:00
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2021-01-17 21:29:08 +00:00
|
|
|
func checkDefaultAdmin() error {
|
|
|
|
admins, err := provider.getAdmins(1, 0, OrderASC)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
if len(admins) > 0 {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
logger.Debug(logSender, "", "no admins found, try to create the default one")
|
|
|
|
// we need to create the default admin
|
|
|
|
admin := &Admin{}
|
|
|
|
admin.setDefaults()
|
|
|
|
return provider.addAdmin(admin)
|
|
|
|
}
|
|
|
|
|
2020-02-08 13:44:25 +00:00
|
|
|
// InitializeDatabase creates the initial database structure
|
|
|
|
func InitializeDatabase(cnf Config, basePath string) error {
|
|
|
|
config = cnf
|
|
|
|
|
2020-11-26 21:08:33 +00:00
|
|
|
if filepath.IsAbs(config.CredentialsPath) {
|
|
|
|
credentialsDirPath = config.CredentialsPath
|
|
|
|
} else {
|
|
|
|
credentialsDirPath = filepath.Join(basePath, config.CredentialsPath)
|
|
|
|
}
|
|
|
|
|
2020-02-08 13:44:25 +00:00
|
|
|
err := createProvider(basePath)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
2019-07-20 10:26:52 +00:00
|
|
|
}
|
2020-10-05 17:42:33 +00:00
|
|
|
err = provider.initializeDatabase()
|
|
|
|
if err != nil && err != ErrNoInitRequired {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
return provider.migrateDatabase()
|
2019-07-20 10:26:52 +00:00
|
|
|
}
|
|
|
|
|
2020-11-26 21:08:33 +00:00
|
|
|
// RevertDatabase restores schema and/or data to a previous version
|
|
|
|
func RevertDatabase(cnf Config, basePath string, targetVersion int) error {
|
|
|
|
config = cnf
|
|
|
|
|
|
|
|
if filepath.IsAbs(config.CredentialsPath) {
|
|
|
|
credentialsDirPath = config.CredentialsPath
|
|
|
|
} else {
|
|
|
|
credentialsDirPath = filepath.Join(basePath, config.CredentialsPath)
|
|
|
|
}
|
|
|
|
|
|
|
|
err := createProvider(basePath)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
err = provider.initializeDatabase()
|
|
|
|
if err != nil && err != ErrNoInitRequired {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
return provider.revertDatabase(targetVersion)
|
|
|
|
}
|
|
|
|
|
2021-01-17 21:29:08 +00:00
|
|
|
// CheckAdminAndPass validates the given admin and password connecting from ip
|
|
|
|
func CheckAdminAndPass(username, password, ip string) (Admin, error) {
|
|
|
|
return provider.validateAdminAndPass(username, password, ip)
|
|
|
|
}
|
|
|
|
|
2021-03-01 18:28:11 +00:00
|
|
|
// CheckCachedUserCredentials checks the credentials for a cached user
|
|
|
|
func CheckCachedUserCredentials(user *CachedUser, password, loginMethod, protocol string, tlsCert *x509.Certificate) error {
|
|
|
|
if loginMethod != LoginMethodPassword {
|
|
|
|
_, err := checkUserAndTLSCertificate(&user.User, protocol, tlsCert)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
if loginMethod == LoginMethodTLSCertificate {
|
|
|
|
if !user.User.IsLoginMethodAllowed(LoginMethodTLSCertificate, nil) {
|
2021-03-21 18:15:47 +00:00
|
|
|
return fmt.Errorf("certificate login method is not allowed for user %#v", user.User.Username)
|
2021-03-01 18:28:11 +00:00
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if err := checkLoginConditions(&user.User); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
if password == "" {
|
|
|
|
return ErrInvalidCredentials
|
|
|
|
}
|
|
|
|
if user.Password != "" {
|
|
|
|
if password == user.Password {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
if ok, _ := isPasswordOK(&user.User, password); ok {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return ErrInvalidCredentials
|
|
|
|
}
|
|
|
|
|
|
|
|
// CheckCompositeCredentials checks multiple credentials.
|
|
|
|
// WebDAV users can send both a password and a TLS certificate within the same request
|
|
|
|
func CheckCompositeCredentials(username, password, ip, loginMethod, protocol string, tlsCert *x509.Certificate) (User, string, error) {
|
|
|
|
if loginMethod == LoginMethodPassword {
|
|
|
|
user, err := CheckUserAndPass(username, password, ip, protocol)
|
|
|
|
return user, loginMethod, err
|
|
|
|
}
|
|
|
|
user, err := CheckUserBeforeTLSAuth(username, ip, protocol, tlsCert)
|
|
|
|
if err != nil {
|
|
|
|
return user, loginMethod, err
|
|
|
|
}
|
|
|
|
if !user.IsTLSUsernameVerificationEnabled() {
|
|
|
|
// for backward compatibility with 2.0.x we only check the password and change the login method here
|
|
|
|
// in future updates we have to return an error
|
|
|
|
user, err := CheckUserAndPass(username, password, ip, protocol)
|
|
|
|
return user, LoginMethodPassword, err
|
|
|
|
}
|
|
|
|
user, err = checkUserAndTLSCertificate(&user, protocol, tlsCert)
|
|
|
|
if err != nil {
|
|
|
|
return user, loginMethod, err
|
|
|
|
}
|
|
|
|
if loginMethod == LoginMethodTLSCertificate && !user.IsLoginMethodAllowed(LoginMethodTLSCertificate, nil) {
|
2021-03-21 18:15:47 +00:00
|
|
|
return user, loginMethod, fmt.Errorf("certificate login method is not allowed for user %#v", user.Username)
|
2021-03-01 18:28:11 +00:00
|
|
|
}
|
|
|
|
if loginMethod == LoginMethodTLSCertificateAndPwd {
|
|
|
|
if config.ExternalAuthHook != "" && (config.ExternalAuthScope == 0 || config.ExternalAuthScope&1 != 0) {
|
|
|
|
user, err = doExternalAuth(username, password, nil, "", ip, protocol, nil)
|
|
|
|
if err != nil {
|
|
|
|
return user, loginMethod, err
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if config.PreLoginHook != "" {
|
|
|
|
user, err = executePreLoginHook(username, LoginMethodPassword, ip, protocol)
|
|
|
|
if err != nil {
|
|
|
|
return user, loginMethod, err
|
|
|
|
}
|
|
|
|
}
|
|
|
|
user, err = checkUserAndPass(&user, password, ip, protocol)
|
|
|
|
}
|
|
|
|
return user, loginMethod, err
|
|
|
|
}
|
|
|
|
|
2021-02-28 11:10:40 +00:00
|
|
|
// CheckUserBeforeTLSAuth checks if a user exits before trying mutual TLS
|
|
|
|
func CheckUserBeforeTLSAuth(username, ip, protocol string, tlsCert *x509.Certificate) (User, error) {
|
|
|
|
if config.ExternalAuthHook != "" && (config.ExternalAuthScope == 0 || config.ExternalAuthScope&8 != 0) {
|
|
|
|
return doExternalAuth(username, "", nil, "", ip, protocol, tlsCert)
|
|
|
|
}
|
|
|
|
if config.PreLoginHook != "" {
|
|
|
|
return executePreLoginHook(username, LoginMethodTLSCertificate, ip, protocol)
|
|
|
|
}
|
|
|
|
return UserExists(username)
|
|
|
|
}
|
|
|
|
|
|
|
|
// CheckUserAndTLSCert returns the SFTPGo user with the given username and check if the
|
|
|
|
// given TLS certificate allow authentication without password
|
|
|
|
func CheckUserAndTLSCert(username, ip, protocol string, tlsCert *x509.Certificate) (User, error) {
|
|
|
|
if config.ExternalAuthHook != "" && (config.ExternalAuthScope == 0 || config.ExternalAuthScope&8 != 0) {
|
|
|
|
user, err := doExternalAuth(username, "", nil, "", ip, protocol, tlsCert)
|
|
|
|
if err != nil {
|
|
|
|
return user, err
|
|
|
|
}
|
|
|
|
return checkUserAndTLSCertificate(&user, protocol, tlsCert)
|
|
|
|
}
|
|
|
|
if config.PreLoginHook != "" {
|
|
|
|
user, err := executePreLoginHook(username, LoginMethodTLSCertificate, ip, protocol)
|
|
|
|
if err != nil {
|
|
|
|
return user, err
|
|
|
|
}
|
|
|
|
return checkUserAndTLSCertificate(&user, protocol, tlsCert)
|
|
|
|
}
|
|
|
|
return provider.validateUserAndTLSCert(username, protocol, tlsCert)
|
|
|
|
}
|
|
|
|
|
|
|
|
// CheckUserAndPass retrieves the SFTPGo user with the given username and password if a match is found or an error
|
2020-08-12 14:15:12 +00:00
|
|
|
func CheckUserAndPass(username, password, ip, protocol string) (User, error) {
|
2021-01-05 08:50:22 +00:00
|
|
|
if config.ExternalAuthHook != "" && (config.ExternalAuthScope == 0 || config.ExternalAuthScope&1 != 0) {
|
2021-02-28 11:10:40 +00:00
|
|
|
user, err := doExternalAuth(username, password, nil, "", ip, protocol, nil)
|
2020-01-06 20:42:41 +00:00
|
|
|
if err != nil {
|
|
|
|
return user, err
|
|
|
|
}
|
2021-02-16 18:11:36 +00:00
|
|
|
return checkUserAndPass(&user, password, ip, protocol)
|
2020-01-06 20:42:41 +00:00
|
|
|
}
|
2021-01-05 08:50:22 +00:00
|
|
|
if config.PreLoginHook != "" {
|
2020-08-12 14:15:12 +00:00
|
|
|
user, err := executePreLoginHook(username, LoginMethodPassword, ip, protocol)
|
2020-02-23 17:50:59 +00:00
|
|
|
if err != nil {
|
|
|
|
return user, err
|
|
|
|
}
|
2021-02-16 18:11:36 +00:00
|
|
|
return checkUserAndPass(&user, password, ip, protocol)
|
2020-02-23 17:50:59 +00:00
|
|
|
}
|
2020-08-19 17:36:12 +00:00
|
|
|
return provider.validateUserAndPass(username, password, ip, protocol)
|
2019-07-20 10:26:52 +00:00
|
|
|
}
|
|
|
|
|
2019-07-30 18:51:29 +00:00
|
|
|
// CheckUserAndPubKey retrieves the SFTP user with the given username and public key if a match is found or an error
|
2020-08-12 14:15:12 +00:00
|
|
|
func CheckUserAndPubKey(username string, pubKey []byte, ip, protocol string) (User, string, error) {
|
2021-01-05 08:50:22 +00:00
|
|
|
if config.ExternalAuthHook != "" && (config.ExternalAuthScope == 0 || config.ExternalAuthScope&2 != 0) {
|
2021-02-28 11:10:40 +00:00
|
|
|
user, err := doExternalAuth(username, "", pubKey, "", ip, protocol, nil)
|
2020-01-06 20:42:41 +00:00
|
|
|
if err != nil {
|
|
|
|
return user, "", err
|
|
|
|
}
|
2021-02-16 18:11:36 +00:00
|
|
|
return checkUserAndPubKey(&user, pubKey)
|
2020-01-06 20:42:41 +00:00
|
|
|
}
|
2021-01-05 08:50:22 +00:00
|
|
|
if config.PreLoginHook != "" {
|
2020-08-12 14:15:12 +00:00
|
|
|
user, err := executePreLoginHook(username, SSHLoginMethodPublicKey, ip, protocol)
|
2020-02-23 17:50:59 +00:00
|
|
|
if err != nil {
|
|
|
|
return user, "", err
|
|
|
|
}
|
2021-02-16 18:11:36 +00:00
|
|
|
return checkUserAndPubKey(&user, pubKey)
|
2020-02-23 17:50:59 +00:00
|
|
|
}
|
2020-07-08 17:59:31 +00:00
|
|
|
return provider.validateUserAndPubKey(username, pubKey)
|
2019-07-20 10:26:52 +00:00
|
|
|
}
|
|
|
|
|
2020-01-21 09:54:05 +00:00
|
|
|
// CheckKeyboardInteractiveAuth checks the keyboard interactive authentication and returns
|
|
|
|
// the authenticated user or an error
|
2020-08-12 14:15:12 +00:00
|
|
|
func CheckKeyboardInteractiveAuth(username, authHook string, client ssh.KeyboardInteractiveChallenge, ip, protocol string) (User, error) {
|
2020-01-21 09:54:05 +00:00
|
|
|
var user User
|
|
|
|
var err error
|
2021-01-05 08:50:22 +00:00
|
|
|
if config.ExternalAuthHook != "" && (config.ExternalAuthScope == 0 || config.ExternalAuthScope&4 != 0) {
|
2021-02-28 11:10:40 +00:00
|
|
|
user, err = doExternalAuth(username, "", nil, "1", ip, protocol, nil)
|
2021-01-05 08:50:22 +00:00
|
|
|
} else if config.PreLoginHook != "" {
|
2020-08-12 14:15:12 +00:00
|
|
|
user, err = executePreLoginHook(username, SSHLoginMethodKeyboardInteractive, ip, protocol)
|
2020-01-21 09:54:05 +00:00
|
|
|
} else {
|
2020-07-08 17:59:31 +00:00
|
|
|
user, err = provider.userExists(username)
|
2020-01-21 09:54:05 +00:00
|
|
|
}
|
|
|
|
if err != nil {
|
|
|
|
return user, err
|
|
|
|
}
|
2021-02-16 18:11:36 +00:00
|
|
|
return doKeyboardInteractiveAuth(&user, authHook, client, ip, protocol)
|
2020-01-21 09:54:05 +00:00
|
|
|
}
|
|
|
|
|
2019-11-13 10:36:21 +00:00
|
|
|
// UpdateLastLogin updates the last login fields for the given SFTP user
|
2021-02-16 18:11:36 +00:00
|
|
|
func UpdateLastLogin(user *User) error {
|
2020-11-30 20:46:34 +00:00
|
|
|
lastLogin := utils.GetTimeFromMsecSinceEpoch(user.LastLogin)
|
|
|
|
diff := -time.Until(lastLogin)
|
|
|
|
if diff < 0 || diff > lastLoginMinDelay {
|
2020-12-06 07:19:41 +00:00
|
|
|
err := provider.updateLastLogin(user.Username)
|
|
|
|
if err == nil {
|
2021-03-27 18:10:27 +00:00
|
|
|
webDAVUsersCache.updateLastLogin(user.Username)
|
2020-12-06 07:19:41 +00:00
|
|
|
}
|
|
|
|
return err
|
2020-11-30 20:46:34 +00:00
|
|
|
}
|
|
|
|
return nil
|
2019-11-13 10:36:21 +00:00
|
|
|
}
|
|
|
|
|
2019-07-30 18:51:29 +00:00
|
|
|
// UpdateUserQuota updates the quota for the given SFTP user adding filesAdd and sizeAdd.
|
|
|
|
// If reset is true filesAdd and sizeAdd indicates the total files and the total size instead of the difference.
|
2021-02-16 18:11:36 +00:00
|
|
|
func UpdateUserQuota(user *User, filesAdd int, sizeAdd int64, reset bool) error {
|
2019-07-20 10:26:52 +00:00
|
|
|
if config.TrackQuota == 0 {
|
|
|
|
return &MethodDisabledError{err: trackQuotaDisabledError}
|
2019-07-28 20:04:50 +00:00
|
|
|
} else if config.TrackQuota == 2 && !reset && !user.HasQuotaRestrictions() {
|
|
|
|
return nil
|
2019-07-20 10:26:52 +00:00
|
|
|
}
|
2020-08-31 17:25:17 +00:00
|
|
|
if filesAdd == 0 && sizeAdd == 0 && !reset {
|
|
|
|
return nil
|
|
|
|
}
|
2021-04-11 06:38:43 +00:00
|
|
|
if config.DelayedQuotaUpdate == 0 || reset {
|
|
|
|
if reset {
|
|
|
|
delayedQuotaUpdater.resetUserQuota(user.Username)
|
|
|
|
}
|
|
|
|
return provider.updateQuota(user.Username, filesAdd, sizeAdd, reset)
|
|
|
|
}
|
|
|
|
delayedQuotaUpdater.updateUserQuota(user.Username, filesAdd, sizeAdd)
|
|
|
|
return nil
|
2019-07-20 10:26:52 +00:00
|
|
|
}
|
|
|
|
|
2020-06-07 21:30:18 +00:00
|
|
|
// UpdateVirtualFolderQuota updates the quota for the given virtual folder adding filesAdd and sizeAdd.
|
|
|
|
// If reset is true filesAdd and sizeAdd indicates the total files and the total size instead of the difference.
|
2021-02-16 18:11:36 +00:00
|
|
|
func UpdateVirtualFolderQuota(vfolder *vfs.BaseVirtualFolder, filesAdd int, sizeAdd int64, reset bool) error {
|
2020-06-07 21:30:18 +00:00
|
|
|
if config.TrackQuota == 0 {
|
|
|
|
return &MethodDisabledError{err: trackQuotaDisabledError}
|
|
|
|
}
|
2020-08-31 17:25:17 +00:00
|
|
|
if filesAdd == 0 && sizeAdd == 0 && !reset {
|
|
|
|
return nil
|
|
|
|
}
|
2021-04-11 06:38:43 +00:00
|
|
|
if config.DelayedQuotaUpdate == 0 || reset {
|
|
|
|
if reset {
|
|
|
|
delayedQuotaUpdater.resetFolderQuota(vfolder.Name)
|
|
|
|
}
|
|
|
|
return provider.updateFolderQuota(vfolder.Name, filesAdd, sizeAdd, reset)
|
|
|
|
}
|
|
|
|
delayedQuotaUpdater.updateFolderQuota(vfolder.Name, filesAdd, sizeAdd)
|
|
|
|
return nil
|
2020-06-07 21:30:18 +00:00
|
|
|
}
|
|
|
|
|
2019-07-30 18:51:29 +00:00
|
|
|
// GetUsedQuota returns the used quota for the given SFTP user.
|
2020-07-08 17:59:31 +00:00
|
|
|
func GetUsedQuota(username string) (int, int64, error) {
|
2019-07-20 10:26:52 +00:00
|
|
|
if config.TrackQuota == 0 {
|
|
|
|
return 0, 0, &MethodDisabledError{err: trackQuotaDisabledError}
|
|
|
|
}
|
2021-04-11 06:38:43 +00:00
|
|
|
files, size, err := provider.getUsedQuota(username)
|
|
|
|
if err != nil {
|
|
|
|
return files, size, err
|
|
|
|
}
|
|
|
|
delayedFiles, delayedSize := delayedQuotaUpdater.getUserPendingQuota(username)
|
|
|
|
return files + delayedFiles, size + delayedSize, err
|
2019-07-20 10:26:52 +00:00
|
|
|
}
|
|
|
|
|
2020-06-07 21:30:18 +00:00
|
|
|
// GetUsedVirtualFolderQuota returns the used quota for the given virtual folder.
|
2021-02-01 18:04:15 +00:00
|
|
|
func GetUsedVirtualFolderQuota(name string) (int, int64, error) {
|
2020-06-07 21:30:18 +00:00
|
|
|
if config.TrackQuota == 0 {
|
|
|
|
return 0, 0, &MethodDisabledError{err: trackQuotaDisabledError}
|
|
|
|
}
|
2021-04-11 06:38:43 +00:00
|
|
|
files, size, err := provider.getUsedFolderQuota(name)
|
|
|
|
if err != nil {
|
|
|
|
return files, size, err
|
|
|
|
}
|
|
|
|
delayedFiles, delayedSize := delayedQuotaUpdater.getFolderPendingQuota(name)
|
|
|
|
return files + delayedFiles, size + delayedSize, err
|
2020-06-07 21:30:18 +00:00
|
|
|
}
|
|
|
|
|
2021-01-17 21:29:08 +00:00
|
|
|
// AddAdmin adds a new SFTPGo admin
|
|
|
|
func AddAdmin(admin *Admin) error {
|
|
|
|
return provider.addAdmin(admin)
|
|
|
|
}
|
|
|
|
|
|
|
|
// UpdateAdmin updates an existing SFTPGo admin
|
|
|
|
func UpdateAdmin(admin *Admin) error {
|
|
|
|
return provider.updateAdmin(admin)
|
|
|
|
}
|
|
|
|
|
|
|
|
// DeleteAdmin deletes an existing SFTPGo admin
|
|
|
|
func DeleteAdmin(username string) error {
|
|
|
|
admin, err := provider.adminExists(username)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
return provider.deleteAdmin(&admin)
|
|
|
|
}
|
|
|
|
|
|
|
|
// AdminExists returns the given admins if it exists
|
|
|
|
func AdminExists(username string) (Admin, error) {
|
|
|
|
return provider.adminExists(username)
|
|
|
|
}
|
|
|
|
|
|
|
|
// UserExists checks if the given SFTPGo username exists, returns an error if no match is found
|
2020-07-08 17:59:31 +00:00
|
|
|
func UserExists(username string) (User, error) {
|
|
|
|
return provider.userExists(username)
|
2019-07-20 10:26:52 +00:00
|
|
|
}
|
|
|
|
|
2020-08-31 17:25:17 +00:00
|
|
|
// AddUser adds a new SFTPGo user.
|
2021-01-05 08:50:22 +00:00
|
|
|
func AddUser(user *User) error {
|
2020-07-08 17:59:31 +00:00
|
|
|
err := provider.addUser(user)
|
2019-11-14 10:06:03 +00:00
|
|
|
if err == nil {
|
2021-01-26 17:05:44 +00:00
|
|
|
executeAction(operationAdd, user)
|
2019-11-14 10:06:03 +00:00
|
|
|
}
|
|
|
|
return err
|
2019-07-20 10:26:52 +00:00
|
|
|
}
|
|
|
|
|
2020-08-31 17:25:17 +00:00
|
|
|
// UpdateUser updates an existing SFTPGo user.
|
2021-01-05 08:50:22 +00:00
|
|
|
func UpdateUser(user *User) error {
|
2020-07-08 17:59:31 +00:00
|
|
|
err := provider.updateUser(user)
|
2019-11-14 10:06:03 +00:00
|
|
|
if err == nil {
|
2021-03-27 18:10:27 +00:00
|
|
|
webDAVUsersCache.swap(user)
|
2021-04-20 07:39:36 +00:00
|
|
|
cachedPasswords.Remove(user.Username)
|
2021-01-26 17:05:44 +00:00
|
|
|
executeAction(operationUpdate, user)
|
2019-11-14 10:06:03 +00:00
|
|
|
}
|
|
|
|
return err
|
2019-07-20 10:26:52 +00:00
|
|
|
}
|
|
|
|
|
2020-09-01 14:10:26 +00:00
|
|
|
// DeleteUser deletes an existing SFTPGo user.
|
2021-01-17 21:29:08 +00:00
|
|
|
func DeleteUser(username string) error {
|
|
|
|
user, err := provider.userExists(username)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
2019-07-20 10:26:52 +00:00
|
|
|
}
|
2021-01-17 21:29:08 +00:00
|
|
|
err = provider.deleteUser(&user)
|
2019-11-14 10:06:03 +00:00
|
|
|
if err == nil {
|
2020-08-31 17:25:17 +00:00
|
|
|
RemoveCachedWebDAVUser(user.Username)
|
2021-04-11 06:38:43 +00:00
|
|
|
delayedQuotaUpdater.resetUserQuota(username)
|
2021-04-20 07:39:36 +00:00
|
|
|
cachedPasswords.Remove(username)
|
2021-01-26 17:05:44 +00:00
|
|
|
executeAction(operationDelete, &user)
|
2019-11-14 10:06:03 +00:00
|
|
|
}
|
|
|
|
return err
|
2019-07-20 10:26:52 +00:00
|
|
|
}
|
|
|
|
|
2020-02-02 21:20:39 +00:00
|
|
|
// ReloadConfig reloads provider configuration.
|
|
|
|
// Currently only implemented for memory provider, allows to reload the users
|
|
|
|
// from the configured file, if defined
|
|
|
|
func ReloadConfig() error {
|
|
|
|
return provider.reloadConfig()
|
|
|
|
}
|
|
|
|
|
2021-01-17 21:29:08 +00:00
|
|
|
// GetAdmins returns an array of admins respecting limit and offset
|
|
|
|
func GetAdmins(limit, offset int, order string) ([]Admin, error) {
|
|
|
|
return provider.getAdmins(limit, offset, order)
|
2019-07-20 10:26:52 +00:00
|
|
|
}
|
|
|
|
|
2021-01-17 21:29:08 +00:00
|
|
|
// GetUsers returns an array of users respecting limit and offset and filtered by username exact match if not empty
|
|
|
|
func GetUsers(limit, offset int, order string) ([]User, error) {
|
|
|
|
return provider.getUsers(limit, offset, order)
|
2019-07-20 10:26:52 +00:00
|
|
|
}
|
|
|
|
|
2020-06-07 21:30:18 +00:00
|
|
|
// AddFolder adds a new virtual folder.
|
2021-01-05 08:50:22 +00:00
|
|
|
func AddFolder(folder *vfs.BaseVirtualFolder) error {
|
2020-07-08 17:59:31 +00:00
|
|
|
return provider.addFolder(folder)
|
2020-06-07 21:30:18 +00:00
|
|
|
}
|
|
|
|
|
2021-02-01 18:04:15 +00:00
|
|
|
// UpdateFolder updates the specified virtual folder
|
2021-03-21 18:15:47 +00:00
|
|
|
func UpdateFolder(folder *vfs.BaseVirtualFolder, users []string) error {
|
|
|
|
err := provider.updateFolder(folder)
|
|
|
|
if err == nil {
|
|
|
|
for _, user := range users {
|
|
|
|
RemoveCachedWebDAVUser(user)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return err
|
2021-02-01 18:04:15 +00:00
|
|
|
}
|
|
|
|
|
2020-06-07 21:30:18 +00:00
|
|
|
// DeleteFolder deletes an existing folder.
|
2021-02-01 18:04:15 +00:00
|
|
|
func DeleteFolder(folderName string) error {
|
|
|
|
folder, err := provider.getFolderByName(folderName)
|
2021-01-17 21:29:08 +00:00
|
|
|
if err != nil {
|
|
|
|
return err
|
2020-06-07 21:30:18 +00:00
|
|
|
}
|
2021-03-21 18:15:47 +00:00
|
|
|
err = provider.deleteFolder(&folder)
|
|
|
|
if err == nil {
|
|
|
|
for _, user := range folder.Users {
|
|
|
|
RemoveCachedWebDAVUser(user)
|
|
|
|
}
|
2021-04-11 06:38:43 +00:00
|
|
|
delayedQuotaUpdater.resetFolderQuota(folderName)
|
2021-03-21 18:15:47 +00:00
|
|
|
}
|
|
|
|
return err
|
2020-06-07 21:30:18 +00:00
|
|
|
}
|
|
|
|
|
2021-02-01 18:04:15 +00:00
|
|
|
// GetFolderByName returns the folder with the specified name if any
|
|
|
|
func GetFolderByName(name string) (vfs.BaseVirtualFolder, error) {
|
|
|
|
return provider.getFolderByName(name)
|
2020-06-07 21:30:18 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// GetFolders returns an array of folders respecting limit and offset
|
2021-02-01 18:04:15 +00:00
|
|
|
func GetFolders(limit, offset int, order string) ([]vfs.BaseVirtualFolder, error) {
|
|
|
|
return provider.getFolders(limit, offset, order)
|
2020-06-07 21:30:18 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// DumpData returns all users and folders
|
2020-07-08 17:59:31 +00:00
|
|
|
func DumpData() (BackupData, error) {
|
2020-06-07 21:30:18 +00:00
|
|
|
var data BackupData
|
2020-07-08 17:59:31 +00:00
|
|
|
users, err := provider.dumpUsers()
|
2020-06-07 21:30:18 +00:00
|
|
|
if err != nil {
|
|
|
|
return data, err
|
|
|
|
}
|
2020-07-08 17:59:31 +00:00
|
|
|
folders, err := provider.dumpFolders()
|
2020-06-07 21:30:18 +00:00
|
|
|
if err != nil {
|
|
|
|
return data, err
|
|
|
|
}
|
2021-01-17 21:29:08 +00:00
|
|
|
admins, err := provider.dumpAdmins()
|
|
|
|
if err != nil {
|
|
|
|
return data, err
|
|
|
|
}
|
2020-06-07 21:30:18 +00:00
|
|
|
data.Users = users
|
|
|
|
data.Folders = folders
|
2021-01-17 21:29:08 +00:00
|
|
|
data.Admins = admins
|
|
|
|
data.Version = DumpVersion
|
2020-06-07 21:30:18 +00:00
|
|
|
return data, err
|
|
|
|
}
|
|
|
|
|
2020-11-22 20:53:04 +00:00
|
|
|
// ParseDumpData tries to parse data as BackupData
|
|
|
|
func ParseDumpData(data []byte) (BackupData, error) {
|
|
|
|
var dump BackupData
|
|
|
|
err := json.Unmarshal(data, &dump)
|
|
|
|
return dump, err
|
|
|
|
}
|
|
|
|
|
2019-11-14 17:48:01 +00:00
|
|
|
// GetProviderStatus returns an error if the provider is not available
|
2020-12-08 10:18:34 +00:00
|
|
|
func GetProviderStatus() ProviderStatus {
|
|
|
|
err := provider.checkAvailability()
|
|
|
|
status := ProviderStatus{
|
|
|
|
Driver: config.Driver,
|
|
|
|
}
|
|
|
|
if err == nil {
|
|
|
|
status.IsActive = true
|
|
|
|
} else {
|
|
|
|
status.IsActive = false
|
|
|
|
status.Error = err.Error()
|
|
|
|
}
|
|
|
|
return status
|
2019-11-14 17:48:01 +00:00
|
|
|
}
|
|
|
|
|
2019-09-29 06:38:09 +00:00
|
|
|
// Close releases all provider resources.
|
|
|
|
// This method is used in test cases.
|
|
|
|
// Closing an uninitialized provider is not supported
|
2020-07-08 17:59:31 +00:00
|
|
|
func Close() error {
|
2020-07-24 21:39:38 +00:00
|
|
|
if availabilityTicker != nil {
|
|
|
|
availabilityTicker.Stop()
|
|
|
|
availabilityTickerDone <- true
|
|
|
|
availabilityTicker = nil
|
|
|
|
}
|
2020-07-08 17:59:31 +00:00
|
|
|
return provider.close()
|
2019-09-28 20:48:52 +00:00
|
|
|
}
|
|
|
|
|
2020-02-08 13:44:25 +00:00
|
|
|
func createProvider(basePath string) error {
|
|
|
|
var err error
|
2020-06-07 21:30:18 +00:00
|
|
|
sqlPlaceholders = getSQLPlaceholders()
|
|
|
|
if err = validateSQLTablesPrefix(); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2021-03-23 18:14:15 +00:00
|
|
|
logSender = fmt.Sprintf("dataprovider_%v", config.Driver)
|
|
|
|
|
|
|
|
switch config.Driver {
|
|
|
|
case SQLiteDataProviderName:
|
|
|
|
return initializeSQLiteProvider(basePath)
|
|
|
|
case PGSQLDataProviderName, CockroachDataProviderName:
|
|
|
|
return initializePGSQLProvider()
|
|
|
|
case MySQLDataProviderName:
|
|
|
|
return initializeMySQLProvider()
|
|
|
|
case BoltDataProviderName:
|
|
|
|
return initializeBoltProvider(basePath)
|
|
|
|
case MemoryDataProviderName:
|
2020-11-25 08:18:36 +00:00
|
|
|
initializeMemoryProvider(basePath)
|
2021-03-23 18:14:15 +00:00
|
|
|
return nil
|
|
|
|
default:
|
|
|
|
return fmt.Errorf("unsupported data provider: %v", config.Driver)
|
2020-02-08 13:44:25 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-09-28 21:44:36 +00:00
|
|
|
func buildUserHomeDir(user *User) {
|
2020-12-12 09:31:09 +00:00
|
|
|
if user.HomeDir == "" {
|
|
|
|
if config.UsersBaseDir != "" {
|
2019-09-28 20:48:52 +00:00
|
|
|
user.HomeDir = filepath.Join(config.UsersBaseDir, user.Username)
|
2021-03-21 18:15:47 +00:00
|
|
|
} else if user.FsConfig.Provider == vfs.SFTPFilesystemProvider {
|
2020-12-12 09:31:09 +00:00
|
|
|
user.HomeDir = filepath.Join(os.TempDir(), user.Username)
|
2019-09-28 20:48:52 +00:00
|
|
|
}
|
|
|
|
}
|
2019-09-28 21:44:36 +00:00
|
|
|
}
|
|
|
|
|
2021-03-21 18:15:47 +00:00
|
|
|
func isVirtualDirOverlapped(dir1, dir2 string, fullCheck bool) bool {
|
2020-02-23 10:30:26 +00:00
|
|
|
if dir1 == dir2 {
|
|
|
|
return true
|
|
|
|
}
|
2021-03-21 18:15:47 +00:00
|
|
|
if fullCheck {
|
|
|
|
if len(dir1) > len(dir2) {
|
|
|
|
if strings.HasPrefix(dir1, dir2+"/") {
|
|
|
|
return true
|
|
|
|
}
|
2020-02-23 10:30:26 +00:00
|
|
|
}
|
2021-03-21 18:15:47 +00:00
|
|
|
if len(dir2) > len(dir1) {
|
|
|
|
if strings.HasPrefix(dir2, dir1+"/") {
|
|
|
|
return true
|
|
|
|
}
|
2020-02-23 10:30:26 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
return false
|
|
|
|
}
|
|
|
|
|
2021-03-21 18:15:47 +00:00
|
|
|
func isMappedDirOverlapped(dir1, dir2 string, fullCheck bool) bool {
|
2020-02-23 10:30:26 +00:00
|
|
|
if dir1 == dir2 {
|
|
|
|
return true
|
|
|
|
}
|
2021-03-21 18:15:47 +00:00
|
|
|
if fullCheck {
|
|
|
|
if len(dir1) > len(dir2) {
|
|
|
|
if strings.HasPrefix(dir1, dir2+string(os.PathSeparator)) {
|
|
|
|
return true
|
|
|
|
}
|
2020-02-23 10:30:26 +00:00
|
|
|
}
|
2021-03-21 18:15:47 +00:00
|
|
|
if len(dir2) > len(dir1) {
|
|
|
|
if strings.HasPrefix(dir2, dir1+string(os.PathSeparator)) {
|
|
|
|
return true
|
|
|
|
}
|
2020-02-23 10:30:26 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
return false
|
|
|
|
}
|
|
|
|
|
2020-06-07 21:30:18 +00:00
|
|
|
func validateFolderQuotaLimits(folder vfs.VirtualFolder) error {
|
|
|
|
if folder.QuotaSize < -1 {
|
|
|
|
return &ValidationError{err: fmt.Sprintf("invalid quota_size: %v folder path %#v", folder.QuotaSize, folder.MappedPath)}
|
|
|
|
}
|
|
|
|
if folder.QuotaFiles < -1 {
|
2021-02-01 18:04:15 +00:00
|
|
|
return &ValidationError{err: fmt.Sprintf("invalid quota_file: %v folder path %#v", folder.QuotaFiles, folder.MappedPath)}
|
2020-06-07 21:30:18 +00:00
|
|
|
}
|
|
|
|
if (folder.QuotaSize == -1 && folder.QuotaFiles != -1) || (folder.QuotaFiles == -1 && folder.QuotaSize != -1) {
|
|
|
|
return &ValidationError{err: fmt.Sprintf("virtual folder quota_size and quota_files must be both -1 or >= 0, quota_size: %v quota_files: %v",
|
|
|
|
folder.QuotaFiles, folder.QuotaSize)}
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2021-02-01 18:04:15 +00:00
|
|
|
func getVirtualFolderIfInvalid(folder *vfs.BaseVirtualFolder) *vfs.BaseVirtualFolder {
|
2021-02-04 18:09:43 +00:00
|
|
|
if err := ValidateFolder(folder); err == nil {
|
2021-02-01 18:04:15 +00:00
|
|
|
return folder
|
|
|
|
}
|
|
|
|
// we try to get the folder from the data provider if only the Name is populated
|
|
|
|
if folder.MappedPath != "" {
|
|
|
|
return folder
|
|
|
|
}
|
|
|
|
if folder.Name == "" {
|
|
|
|
return folder
|
|
|
|
}
|
2021-03-21 18:15:47 +00:00
|
|
|
if folder.FsConfig.Provider != vfs.LocalFilesystemProvider {
|
|
|
|
return folder
|
|
|
|
}
|
2021-02-01 18:04:15 +00:00
|
|
|
if f, err := GetFolderByName(folder.Name); err == nil {
|
|
|
|
return &f
|
|
|
|
}
|
|
|
|
return folder
|
|
|
|
}
|
|
|
|
|
2020-06-07 21:30:18 +00:00
|
|
|
func validateUserVirtualFolders(user *User) error {
|
2021-03-21 18:15:47 +00:00
|
|
|
if len(user.VirtualFolders) == 0 {
|
2020-02-23 10:30:26 +00:00
|
|
|
user.VirtualFolders = []vfs.VirtualFolder{}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
var virtualFolders []vfs.VirtualFolder
|
2021-03-21 18:15:47 +00:00
|
|
|
mappedPaths := make(map[string]bool)
|
|
|
|
virtualPaths := make(map[string]bool)
|
2020-02-23 10:30:26 +00:00
|
|
|
for _, v := range user.VirtualFolders {
|
|
|
|
cleanedVPath := filepath.ToSlash(path.Clean(v.VirtualPath))
|
|
|
|
if !path.IsAbs(cleanedVPath) || cleanedVPath == "/" {
|
|
|
|
return &ValidationError{err: fmt.Sprintf("invalid virtual folder %#v", v.VirtualPath)}
|
|
|
|
}
|
2020-06-07 21:30:18 +00:00
|
|
|
if err := validateFolderQuotaLimits(v); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2021-02-01 18:04:15 +00:00
|
|
|
folder := getVirtualFolderIfInvalid(&v.BaseVirtualFolder)
|
2021-02-04 18:09:43 +00:00
|
|
|
if err := ValidateFolder(folder); err != nil {
|
2021-02-01 18:04:15 +00:00
|
|
|
return err
|
2020-02-23 10:30:26 +00:00
|
|
|
}
|
2021-02-01 18:04:15 +00:00
|
|
|
cleanedMPath := folder.MappedPath
|
2021-03-21 18:15:47 +00:00
|
|
|
if folder.IsLocalOrLocalCrypted() {
|
|
|
|
if isMappedDirOverlapped(cleanedMPath, user.GetHomeDir(), true) {
|
|
|
|
return &ValidationError{err: fmt.Sprintf("invalid mapped folder %#v cannot be inside or contain the user home dir %#v",
|
|
|
|
folder.MappedPath, user.GetHomeDir())}
|
|
|
|
}
|
|
|
|
for mPath := range mappedPaths {
|
|
|
|
if folder.IsLocalOrLocalCrypted() && isMappedDirOverlapped(mPath, cleanedMPath, false) {
|
|
|
|
return &ValidationError{err: fmt.Sprintf("invalid mapped folder %#v overlaps with mapped folder %#v",
|
|
|
|
v.MappedPath, mPath)}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
mappedPaths[cleanedMPath] = true
|
2020-02-23 10:30:26 +00:00
|
|
|
}
|
2021-03-21 18:15:47 +00:00
|
|
|
for vPath := range virtualPaths {
|
|
|
|
if isVirtualDirOverlapped(vPath, cleanedVPath, false) {
|
|
|
|
return &ValidationError{err: fmt.Sprintf("invalid virtual folder %#v overlaps with virtual folder %#v",
|
|
|
|
v.VirtualPath, vPath)}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
virtualPaths[cleanedVPath] = true
|
2020-02-23 10:30:26 +00:00
|
|
|
virtualFolders = append(virtualFolders, vfs.VirtualFolder{
|
2021-02-01 18:04:15 +00:00
|
|
|
BaseVirtualFolder: *folder,
|
|
|
|
VirtualPath: cleanedVPath,
|
|
|
|
QuotaSize: v.QuotaSize,
|
|
|
|
QuotaFiles: v.QuotaFiles,
|
2020-02-23 10:30:26 +00:00
|
|
|
})
|
|
|
|
}
|
|
|
|
user.VirtualFolders = virtualFolders
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2019-10-07 16:19:01 +00:00
|
|
|
func validatePermissions(user *User) error {
|
2019-12-29 22:27:32 +00:00
|
|
|
if len(user.Permissions) == 0 {
|
2020-01-19 22:23:09 +00:00
|
|
|
return &ValidationError{err: "please grant some permissions to this user"}
|
2019-12-29 22:27:32 +00:00
|
|
|
}
|
2019-12-25 17:20:19 +00:00
|
|
|
permissions := make(map[string][]string)
|
|
|
|
if _, ok := user.Permissions["/"]; !ok {
|
2020-04-30 13:06:15 +00:00
|
|
|
return &ValidationError{err: "permissions for the root dir \"/\" must be set"}
|
2019-10-07 16:19:01 +00:00
|
|
|
}
|
2019-12-25 17:20:19 +00:00
|
|
|
for dir, perms := range user.Permissions {
|
2020-02-10 18:28:35 +00:00
|
|
|
if len(perms) == 0 && dir == "/" {
|
2020-01-19 22:23:09 +00:00
|
|
|
return &ValidationError{err: fmt.Sprintf("no permissions granted for the directory: %#v", dir)}
|
2019-12-25 17:20:19 +00:00
|
|
|
}
|
2020-02-19 21:39:30 +00:00
|
|
|
if len(perms) > len(ValidPerms) {
|
|
|
|
return &ValidationError{err: "invalid permissions"}
|
|
|
|
}
|
2019-12-25 17:20:19 +00:00
|
|
|
for _, p := range perms {
|
|
|
|
if !utils.IsStringInSlice(p, ValidPerms) {
|
2020-01-19 22:23:09 +00:00
|
|
|
return &ValidationError{err: fmt.Sprintf("invalid permission: %#v", p)}
|
2019-12-25 17:20:19 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
cleanedDir := filepath.ToSlash(path.Clean(dir))
|
|
|
|
if cleanedDir != "/" {
|
|
|
|
cleanedDir = strings.TrimSuffix(cleanedDir, "/")
|
|
|
|
}
|
|
|
|
if !path.IsAbs(cleanedDir) {
|
2020-01-19 22:23:09 +00:00
|
|
|
return &ValidationError{err: fmt.Sprintf("cannot set permissions for non absolute path: %#v", dir)}
|
2019-12-25 17:20:19 +00:00
|
|
|
}
|
2020-02-10 18:28:35 +00:00
|
|
|
if dir != cleanedDir && cleanedDir == "/" {
|
|
|
|
return &ValidationError{err: fmt.Sprintf("cannot set permissions for invalid subdirectory: %#v is an alias for \"/\"", dir)}
|
|
|
|
}
|
2019-12-25 17:20:19 +00:00
|
|
|
if utils.IsStringInSlice(PermAny, perms) {
|
|
|
|
permissions[cleanedDir] = []string{PermAny}
|
|
|
|
} else {
|
2021-01-17 21:29:08 +00:00
|
|
|
permissions[cleanedDir] = utils.RemoveDuplicates(perms)
|
2019-12-25 17:20:19 +00:00
|
|
|
}
|
2019-10-07 16:19:01 +00:00
|
|
|
}
|
2019-12-25 17:20:19 +00:00
|
|
|
user.Permissions = permissions
|
2019-10-07 16:19:01 +00:00
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2019-12-30 17:37:50 +00:00
|
|
|
func validatePublicKeys(user *User) error {
|
|
|
|
if len(user.PublicKeys) == 0 {
|
|
|
|
user.PublicKeys = []string{}
|
|
|
|
}
|
|
|
|
for i, k := range user.PublicKeys {
|
|
|
|
_, _, _, _, err := ssh.ParseAuthorizedKey([]byte(k))
|
|
|
|
if err != nil {
|
2020-01-19 22:23:09 +00:00
|
|
|
return &ValidationError{err: fmt.Sprintf("could not parse key nr. %d: %s", i, err)}
|
2019-12-30 17:37:50 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2020-11-15 21:04:48 +00:00
|
|
|
func validateFiltersPatternExtensions(user *User) error {
|
|
|
|
if len(user.Filters.FilePatterns) == 0 {
|
|
|
|
user.Filters.FilePatterns = []PatternsFilter{}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
filteredPaths := []string{}
|
|
|
|
var filters []PatternsFilter
|
|
|
|
for _, f := range user.Filters.FilePatterns {
|
|
|
|
cleanedPath := filepath.ToSlash(path.Clean(f.Path))
|
|
|
|
if !path.IsAbs(cleanedPath) {
|
|
|
|
return &ValidationError{err: fmt.Sprintf("invalid path %#v for file patterns filter", f.Path)}
|
|
|
|
}
|
|
|
|
if utils.IsStringInSlice(cleanedPath, filteredPaths) {
|
|
|
|
return &ValidationError{err: fmt.Sprintf("duplicate file patterns filter for path %#v", f.Path)}
|
|
|
|
}
|
|
|
|
if len(f.AllowedPatterns) == 0 && len(f.DeniedPatterns) == 0 {
|
|
|
|
return &ValidationError{err: fmt.Sprintf("empty file patterns filter for path %#v", f.Path)}
|
|
|
|
}
|
|
|
|
f.Path = cleanedPath
|
|
|
|
allowed := make([]string, 0, len(f.AllowedPatterns))
|
|
|
|
denied := make([]string, 0, len(f.DeniedPatterns))
|
|
|
|
for _, pattern := range f.AllowedPatterns {
|
|
|
|
_, err := path.Match(pattern, "abc")
|
|
|
|
if err != nil {
|
2020-11-16 18:21:50 +00:00
|
|
|
return &ValidationError{err: fmt.Sprintf("invalid file pattern filter %#v", pattern)}
|
2020-11-15 21:04:48 +00:00
|
|
|
}
|
|
|
|
allowed = append(allowed, strings.ToLower(pattern))
|
|
|
|
}
|
|
|
|
for _, pattern := range f.DeniedPatterns {
|
|
|
|
_, err := path.Match(pattern, "abc")
|
|
|
|
if err != nil {
|
2020-11-16 18:21:50 +00:00
|
|
|
return &ValidationError{err: fmt.Sprintf("invalid file pattern filter %#v", pattern)}
|
2020-11-15 21:04:48 +00:00
|
|
|
}
|
|
|
|
denied = append(denied, strings.ToLower(pattern))
|
|
|
|
}
|
|
|
|
f.AllowedPatterns = allowed
|
|
|
|
f.DeniedPatterns = denied
|
|
|
|
filters = append(filters, f)
|
|
|
|
filteredPaths = append(filteredPaths, cleanedPath)
|
|
|
|
}
|
|
|
|
user.Filters.FilePatterns = filters
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2020-03-01 21:10:29 +00:00
|
|
|
func validateFiltersFileExtensions(user *User) error {
|
|
|
|
if len(user.Filters.FileExtensions) == 0 {
|
|
|
|
user.Filters.FileExtensions = []ExtensionsFilter{}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
filteredPaths := []string{}
|
|
|
|
var filters []ExtensionsFilter
|
|
|
|
for _, f := range user.Filters.FileExtensions {
|
|
|
|
cleanedPath := filepath.ToSlash(path.Clean(f.Path))
|
|
|
|
if !path.IsAbs(cleanedPath) {
|
|
|
|
return &ValidationError{err: fmt.Sprintf("invalid path %#v for file extensions filter", f.Path)}
|
|
|
|
}
|
|
|
|
if utils.IsStringInSlice(cleanedPath, filteredPaths) {
|
|
|
|
return &ValidationError{err: fmt.Sprintf("duplicate file extensions filter for path %#v", f.Path)}
|
|
|
|
}
|
|
|
|
if len(f.AllowedExtensions) == 0 && len(f.DeniedExtensions) == 0 {
|
|
|
|
return &ValidationError{err: fmt.Sprintf("empty file extensions filter for path %#v", f.Path)}
|
|
|
|
}
|
|
|
|
f.Path = cleanedPath
|
2020-11-15 21:04:48 +00:00
|
|
|
allowed := make([]string, 0, len(f.AllowedExtensions))
|
|
|
|
denied := make([]string, 0, len(f.DeniedExtensions))
|
|
|
|
for _, ext := range f.AllowedExtensions {
|
|
|
|
allowed = append(allowed, strings.ToLower(ext))
|
|
|
|
}
|
|
|
|
for _, ext := range f.DeniedExtensions {
|
|
|
|
denied = append(denied, strings.ToLower(ext))
|
|
|
|
}
|
|
|
|
f.AllowedExtensions = allowed
|
|
|
|
f.DeniedExtensions = denied
|
2020-03-01 21:10:29 +00:00
|
|
|
filters = append(filters, f)
|
|
|
|
filteredPaths = append(filteredPaths, cleanedPath)
|
|
|
|
}
|
|
|
|
user.Filters.FileExtensions = filters
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2020-11-15 21:04:48 +00:00
|
|
|
func validateFileFilters(user *User) error {
|
|
|
|
if err := validateFiltersFileExtensions(user); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
return validateFiltersPatternExtensions(user)
|
|
|
|
}
|
|
|
|
|
2021-02-28 11:10:40 +00:00
|
|
|
func checkEmptyFiltersStruct(user *User) {
|
2019-12-30 17:37:50 +00:00
|
|
|
if len(user.Filters.AllowedIP) == 0 {
|
|
|
|
user.Filters.AllowedIP = []string{}
|
|
|
|
}
|
|
|
|
if len(user.Filters.DeniedIP) == 0 {
|
|
|
|
user.Filters.DeniedIP = []string{}
|
|
|
|
}
|
2020-02-19 21:39:30 +00:00
|
|
|
if len(user.Filters.DeniedLoginMethods) == 0 {
|
|
|
|
user.Filters.DeniedLoginMethods = []string{}
|
|
|
|
}
|
2020-08-17 10:49:20 +00:00
|
|
|
if len(user.Filters.DeniedProtocols) == 0 {
|
|
|
|
user.Filters.DeniedProtocols = []string{}
|
|
|
|
}
|
2021-02-28 11:10:40 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
func validateFilters(user *User) error {
|
|
|
|
checkEmptyFiltersStruct(user)
|
2019-12-30 17:37:50 +00:00
|
|
|
for _, IPMask := range user.Filters.DeniedIP {
|
|
|
|
_, _, err := net.ParseCIDR(IPMask)
|
|
|
|
if err != nil {
|
2020-01-19 22:23:09 +00:00
|
|
|
return &ValidationError{err: fmt.Sprintf("could not parse denied IP/Mask %#v : %v", IPMask, err)}
|
2019-12-30 17:37:50 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
for _, IPMask := range user.Filters.AllowedIP {
|
|
|
|
_, _, err := net.ParseCIDR(IPMask)
|
|
|
|
if err != nil {
|
2020-01-19 22:23:09 +00:00
|
|
|
return &ValidationError{err: fmt.Sprintf("could not parse allowed IP/Mask %#v : %v", IPMask, err)}
|
2019-12-30 17:37:50 +00:00
|
|
|
}
|
|
|
|
}
|
2021-02-28 11:10:40 +00:00
|
|
|
if len(user.Filters.DeniedLoginMethods) >= len(ValidLoginMethods) {
|
2020-02-19 21:39:30 +00:00
|
|
|
return &ValidationError{err: "invalid denied_login_methods"}
|
|
|
|
}
|
|
|
|
for _, loginMethod := range user.Filters.DeniedLoginMethods {
|
2021-02-28 11:10:40 +00:00
|
|
|
if !utils.IsStringInSlice(loginMethod, ValidLoginMethods) {
|
2020-02-19 21:39:30 +00:00
|
|
|
return &ValidationError{err: fmt.Sprintf("invalid login method: %#v", loginMethod)}
|
|
|
|
}
|
|
|
|
}
|
2020-08-17 10:49:20 +00:00
|
|
|
if len(user.Filters.DeniedProtocols) >= len(ValidProtocols) {
|
|
|
|
return &ValidationError{err: "invalid denied_protocols"}
|
2020-03-01 21:10:29 +00:00
|
|
|
}
|
2020-08-17 10:49:20 +00:00
|
|
|
for _, p := range user.Filters.DeniedProtocols {
|
|
|
|
if !utils.IsStringInSlice(p, ValidProtocols) {
|
|
|
|
return &ValidationError{err: fmt.Sprintf("invalid protocol: %#v", p)}
|
|
|
|
}
|
|
|
|
}
|
2021-02-28 11:10:40 +00:00
|
|
|
if user.Filters.TLSUsername != "" {
|
|
|
|
if !utils.IsStringInSlice(string(user.Filters.TLSUsername), validTLSUsernames) {
|
|
|
|
return &ValidationError{err: fmt.Sprintf("invalid TLS username: %#v", user.Filters.TLSUsername)}
|
|
|
|
}
|
|
|
|
}
|
2020-11-15 21:04:48 +00:00
|
|
|
return validateFileFilters(user)
|
2019-12-30 17:37:50 +00:00
|
|
|
}
|
|
|
|
|
2021-03-21 18:15:47 +00:00
|
|
|
func saveGCSCredentials(fsConfig *vfs.Filesystem, helper fsValidatorHelper) error {
|
|
|
|
if fsConfig.Provider != vfs.GCSFilesystemProvider {
|
2020-01-31 18:04:00 +00:00
|
|
|
return nil
|
|
|
|
}
|
2021-03-21 18:15:47 +00:00
|
|
|
if fsConfig.GCSConfig.Credentials.GetPayload() == "" {
|
2020-01-31 18:04:00 +00:00
|
|
|
return nil
|
|
|
|
}
|
2020-10-22 08:42:40 +00:00
|
|
|
if config.PreferDatabaseCredentials {
|
2021-03-21 18:15:47 +00:00
|
|
|
if fsConfig.GCSConfig.Credentials.IsPlain() {
|
|
|
|
fsConfig.GCSConfig.Credentials.SetAdditionalData(helper.GetEncrytionAdditionalData())
|
|
|
|
err := fsConfig.GCSConfig.Credentials.Encrypt()
|
2020-11-22 20:53:04 +00:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
}
|
2020-10-22 08:42:40 +00:00
|
|
|
return nil
|
2020-01-31 18:04:00 +00:00
|
|
|
}
|
2021-03-21 18:15:47 +00:00
|
|
|
if fsConfig.GCSConfig.Credentials.IsPlain() {
|
|
|
|
fsConfig.GCSConfig.Credentials.SetAdditionalData(helper.GetEncrytionAdditionalData())
|
|
|
|
err := fsConfig.GCSConfig.Credentials.Encrypt()
|
2020-11-22 20:53:04 +00:00
|
|
|
if err != nil {
|
|
|
|
return &ValidationError{err: fmt.Sprintf("could not encrypt GCS credentials: %v", err)}
|
|
|
|
}
|
|
|
|
}
|
2021-03-21 18:15:47 +00:00
|
|
|
creds, err := json.Marshal(fsConfig.GCSConfig.Credentials)
|
2020-11-22 20:53:04 +00:00
|
|
|
if err != nil {
|
|
|
|
return &ValidationError{err: fmt.Sprintf("could not marshal GCS credentials: %v", err)}
|
|
|
|
}
|
2021-03-21 18:15:47 +00:00
|
|
|
credentialsFilePath := helper.GetGCSCredentialsFilePath()
|
2020-11-25 13:18:12 +00:00
|
|
|
err = os.MkdirAll(filepath.Dir(credentialsFilePath), 0700)
|
|
|
|
if err != nil {
|
|
|
|
return &ValidationError{err: fmt.Sprintf("could not create GCS credentials dir: %v", err)}
|
|
|
|
}
|
2021-02-25 20:53:04 +00:00
|
|
|
err = os.WriteFile(credentialsFilePath, creds, 0600)
|
2020-01-31 18:04:00 +00:00
|
|
|
if err != nil {
|
|
|
|
return &ValidationError{err: fmt.Sprintf("could not save GCS credentials: %v", err)}
|
|
|
|
}
|
2021-03-21 18:15:47 +00:00
|
|
|
fsConfig.GCSConfig.Credentials = kms.NewEmptySecret()
|
2020-01-31 18:04:00 +00:00
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2021-03-21 18:15:47 +00:00
|
|
|
func validateFilesystemConfig(fsConfig *vfs.Filesystem, helper fsValidatorHelper) error {
|
|
|
|
if fsConfig.Provider == vfs.S3FilesystemProvider {
|
|
|
|
if err := fsConfig.S3Config.Validate(); err != nil {
|
2020-01-19 22:23:09 +00:00
|
|
|
return &ValidationError{err: fmt.Sprintf("could not validate s3config: %v", err)}
|
2020-01-19 06:41:05 +00:00
|
|
|
}
|
2021-03-21 18:15:47 +00:00
|
|
|
if err := fsConfig.S3Config.EncryptCredentials(helper.GetEncrytionAdditionalData()); err != nil {
|
2020-12-12 09:31:09 +00:00
|
|
|
return &ValidationError{err: fmt.Sprintf("could not encrypt s3 access secret: %v", err)}
|
2020-01-19 06:41:05 +00:00
|
|
|
}
|
2021-03-21 18:15:47 +00:00
|
|
|
fsConfig.GCSConfig = vfs.GCSFsConfig{}
|
|
|
|
fsConfig.AzBlobConfig = vfs.AzBlobFsConfig{}
|
|
|
|
fsConfig.CryptConfig = vfs.CryptFsConfig{}
|
|
|
|
fsConfig.SFTPConfig = vfs.SFTPFsConfig{}
|
2020-01-19 06:41:05 +00:00
|
|
|
return nil
|
2021-03-21 18:15:47 +00:00
|
|
|
} else if fsConfig.Provider == vfs.GCSFilesystemProvider {
|
|
|
|
if err := fsConfig.GCSConfig.Validate(helper.GetGCSCredentialsFilePath()); err != nil {
|
2020-01-31 18:04:00 +00:00
|
|
|
return &ValidationError{err: fmt.Sprintf("could not validate GCS config: %v", err)}
|
|
|
|
}
|
2021-03-21 18:15:47 +00:00
|
|
|
fsConfig.S3Config = vfs.S3FsConfig{}
|
|
|
|
fsConfig.AzBlobConfig = vfs.AzBlobFsConfig{}
|
|
|
|
fsConfig.CryptConfig = vfs.CryptFsConfig{}
|
|
|
|
fsConfig.SFTPConfig = vfs.SFTPFsConfig{}
|
2020-01-31 18:04:00 +00:00
|
|
|
return nil
|
2021-03-21 18:15:47 +00:00
|
|
|
} else if fsConfig.Provider == vfs.AzureBlobFilesystemProvider {
|
|
|
|
if err := fsConfig.AzBlobConfig.Validate(); err != nil {
|
2020-10-25 07:18:48 +00:00
|
|
|
return &ValidationError{err: fmt.Sprintf("could not validate Azure Blob config: %v", err)}
|
|
|
|
}
|
2021-03-21 18:15:47 +00:00
|
|
|
if err := fsConfig.AzBlobConfig.EncryptCredentials(helper.GetEncrytionAdditionalData()); err != nil {
|
2020-12-12 09:31:09 +00:00
|
|
|
return &ValidationError{err: fmt.Sprintf("could not encrypt Azure blob account key: %v", err)}
|
2020-10-25 07:18:48 +00:00
|
|
|
}
|
2021-03-21 18:15:47 +00:00
|
|
|
fsConfig.S3Config = vfs.S3FsConfig{}
|
|
|
|
fsConfig.GCSConfig = vfs.GCSFsConfig{}
|
|
|
|
fsConfig.CryptConfig = vfs.CryptFsConfig{}
|
|
|
|
fsConfig.SFTPConfig = vfs.SFTPFsConfig{}
|
2020-12-05 12:48:13 +00:00
|
|
|
return nil
|
2021-03-21 18:15:47 +00:00
|
|
|
} else if fsConfig.Provider == vfs.CryptedFilesystemProvider {
|
|
|
|
if err := fsConfig.CryptConfig.Validate(); err != nil {
|
2020-12-05 12:48:13 +00:00
|
|
|
return &ValidationError{err: fmt.Sprintf("could not validate Crypt fs config: %v", err)}
|
|
|
|
}
|
2021-03-21 18:15:47 +00:00
|
|
|
if err := fsConfig.CryptConfig.EncryptCredentials(helper.GetEncrytionAdditionalData()); err != nil {
|
2020-12-12 09:31:09 +00:00
|
|
|
return &ValidationError{err: fmt.Sprintf("could not encrypt Crypt fs passphrase: %v", err)}
|
2020-12-05 12:48:13 +00:00
|
|
|
}
|
2021-03-21 18:15:47 +00:00
|
|
|
fsConfig.S3Config = vfs.S3FsConfig{}
|
|
|
|
fsConfig.GCSConfig = vfs.GCSFsConfig{}
|
|
|
|
fsConfig.AzBlobConfig = vfs.AzBlobFsConfig{}
|
|
|
|
fsConfig.SFTPConfig = vfs.SFTPFsConfig{}
|
2020-12-12 09:31:09 +00:00
|
|
|
return nil
|
2021-03-21 18:15:47 +00:00
|
|
|
} else if fsConfig.Provider == vfs.SFTPFilesystemProvider {
|
|
|
|
if err := fsConfig.SFTPConfig.Validate(); err != nil {
|
2020-12-12 09:31:09 +00:00
|
|
|
return &ValidationError{err: fmt.Sprintf("could not validate SFTP fs config: %v", err)}
|
|
|
|
}
|
2021-03-21 18:15:47 +00:00
|
|
|
if err := fsConfig.SFTPConfig.EncryptCredentials(helper.GetEncrytionAdditionalData()); err != nil {
|
2020-12-12 09:31:09 +00:00
|
|
|
return &ValidationError{err: fmt.Sprintf("could not encrypt SFTP fs credentials: %v", err)}
|
|
|
|
}
|
2021-03-21 18:15:47 +00:00
|
|
|
fsConfig.S3Config = vfs.S3FsConfig{}
|
|
|
|
fsConfig.GCSConfig = vfs.GCSFsConfig{}
|
|
|
|
fsConfig.AzBlobConfig = vfs.AzBlobFsConfig{}
|
|
|
|
fsConfig.CryptConfig = vfs.CryptFsConfig{}
|
2020-10-25 07:18:48 +00:00
|
|
|
return nil
|
2020-01-19 06:41:05 +00:00
|
|
|
}
|
2021-03-21 18:15:47 +00:00
|
|
|
fsConfig.Provider = vfs.LocalFilesystemProvider
|
|
|
|
fsConfig.S3Config = vfs.S3FsConfig{}
|
|
|
|
fsConfig.GCSConfig = vfs.GCSFsConfig{}
|
|
|
|
fsConfig.AzBlobConfig = vfs.AzBlobFsConfig{}
|
|
|
|
fsConfig.CryptConfig = vfs.CryptFsConfig{}
|
|
|
|
fsConfig.SFTPConfig = vfs.SFTPFsConfig{}
|
2020-01-19 06:41:05 +00:00
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2020-01-31 22:26:56 +00:00
|
|
|
func validateBaseParams(user *User) error {
|
2020-09-18 17:21:24 +00:00
|
|
|
if user.Username == "" {
|
|
|
|
return &ValidationError{err: "username is mandatory"}
|
2019-07-20 10:26:52 +00:00
|
|
|
}
|
2021-03-04 08:48:53 +00:00
|
|
|
if !config.SkipNaturalKeysValidation && !usernameRegex.MatchString(user.Username) {
|
2021-02-08 20:32:59 +00:00
|
|
|
return &ValidationError{err: fmt.Sprintf("username %#v is not valid, the following characters are allowed: a-zA-Z0-9-_.~",
|
|
|
|
user.Username)}
|
2021-01-17 21:29:08 +00:00
|
|
|
}
|
2020-09-18 17:21:24 +00:00
|
|
|
if user.HomeDir == "" {
|
|
|
|
return &ValidationError{err: "home_dir is mandatory"}
|
|
|
|
}
|
|
|
|
if user.Password == "" && len(user.PublicKeys) == 0 {
|
2020-01-19 22:23:09 +00:00
|
|
|
return &ValidationError{err: "please set a password or at least a public_key"}
|
2019-07-20 10:26:52 +00:00
|
|
|
}
|
|
|
|
if !filepath.IsAbs(user.HomeDir) {
|
|
|
|
return &ValidationError{err: fmt.Sprintf("home_dir must be an absolute path, actual value: %v", user.HomeDir)}
|
|
|
|
}
|
2020-01-31 22:26:56 +00:00
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2020-03-22 13:03:06 +00:00
|
|
|
func createUserPasswordHash(user *User) error {
|
2021-02-15 18:38:53 +00:00
|
|
|
if user.Password != "" && !user.IsPasswordHashed() {
|
2021-04-25 07:38:33 +00:00
|
|
|
if config.PasswordHashing.Algo == HashingAlgoBcrypt {
|
2021-04-20 11:55:09 +00:00
|
|
|
pwd, err := bcrypt.GenerateFromPassword([]byte(user.Password), config.PasswordHashing.BcryptOptions.Cost)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
user.Password = string(pwd)
|
|
|
|
} else {
|
|
|
|
pwd, err := argon2id.CreateHash(user.Password, argon2Params)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
user.Password = pwd
|
2020-03-22 13:03:06 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2021-02-04 18:09:43 +00:00
|
|
|
// ValidateFolder returns an error if the folder is not valid
|
|
|
|
// FIXME: this should be defined as Folder struct method
|
|
|
|
func ValidateFolder(folder *vfs.BaseVirtualFolder) error {
|
2021-02-01 18:04:15 +00:00
|
|
|
if folder.Name == "" {
|
|
|
|
return &ValidationError{err: "folder name is mandatory"}
|
|
|
|
}
|
2021-03-04 08:48:53 +00:00
|
|
|
if !config.SkipNaturalKeysValidation && !usernameRegex.MatchString(folder.Name) {
|
2021-02-08 20:32:59 +00:00
|
|
|
return &ValidationError{err: fmt.Sprintf("folder name %#v is not valid, the following characters are allowed: a-zA-Z0-9-_.~",
|
|
|
|
folder.Name)}
|
2021-02-01 18:04:15 +00:00
|
|
|
}
|
2021-03-21 18:15:47 +00:00
|
|
|
if folder.FsConfig.Provider == vfs.LocalFilesystemProvider || folder.FsConfig.Provider == vfs.CryptedFilesystemProvider ||
|
|
|
|
folder.MappedPath != "" {
|
|
|
|
cleanedMPath := filepath.Clean(folder.MappedPath)
|
|
|
|
if !filepath.IsAbs(cleanedMPath) {
|
|
|
|
return &ValidationError{err: fmt.Sprintf("invalid folder mapped path %#v", folder.MappedPath)}
|
|
|
|
}
|
|
|
|
folder.MappedPath = cleanedMPath
|
|
|
|
}
|
|
|
|
if folder.HasRedactedSecret() {
|
|
|
|
return errors.New("cannot save a folder with a redacted secret")
|
|
|
|
}
|
|
|
|
if err := validateFilesystemConfig(&folder.FsConfig, folder); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
if err := saveGCSCredentials(&folder.FsConfig, folder); err != nil {
|
|
|
|
return err
|
2020-06-07 21:30:18 +00:00
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2021-01-25 20:31:33 +00:00
|
|
|
// ValidateUser returns an error if the user is not valid
|
|
|
|
// FIXME: this should be defined as User struct method
|
|
|
|
func ValidateUser(user *User) error {
|
2020-11-30 20:46:34 +00:00
|
|
|
user.SetEmptySecretsIfNil()
|
2020-01-31 22:26:56 +00:00
|
|
|
buildUserHomeDir(user)
|
|
|
|
if err := validateBaseParams(user); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2019-10-07 16:19:01 +00:00
|
|
|
if err := validatePermissions(user); err != nil {
|
|
|
|
return err
|
2019-07-20 10:26:52 +00:00
|
|
|
}
|
2021-03-21 18:15:47 +00:00
|
|
|
if user.hasRedactedSecret() {
|
|
|
|
return errors.New("cannot save a user with a redacted secret")
|
|
|
|
}
|
|
|
|
if err := validateFilesystemConfig(&user.FsConfig, user); err != nil {
|
2020-01-19 06:41:05 +00:00
|
|
|
return err
|
|
|
|
}
|
2020-06-07 21:30:18 +00:00
|
|
|
if err := validateUserVirtualFolders(user); err != nil {
|
2020-02-23 10:30:26 +00:00
|
|
|
return err
|
|
|
|
}
|
2019-11-13 10:36:21 +00:00
|
|
|
if user.Status < 0 || user.Status > 1 {
|
|
|
|
return &ValidationError{err: fmt.Sprintf("invalid user status: %v", user.Status)}
|
|
|
|
}
|
2020-03-22 13:03:06 +00:00
|
|
|
if err := createUserPasswordHash(user); err != nil {
|
|
|
|
return err
|
2019-07-20 10:26:52 +00:00
|
|
|
}
|
2019-12-30 17:37:50 +00:00
|
|
|
if err := validatePublicKeys(user); err != nil {
|
|
|
|
return err
|
2019-12-29 16:21:25 +00:00
|
|
|
}
|
2019-12-30 17:37:50 +00:00
|
|
|
if err := validateFilters(user); err != nil {
|
|
|
|
return err
|
2019-07-20 10:26:52 +00:00
|
|
|
}
|
2021-03-21 18:15:47 +00:00
|
|
|
if err := saveGCSCredentials(&user.FsConfig, user); err != nil {
|
2020-01-31 18:04:00 +00:00
|
|
|
return err
|
|
|
|
}
|
2019-07-20 10:26:52 +00:00
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2021-01-05 08:50:22 +00:00
|
|
|
func checkLoginConditions(user *User) error {
|
2019-11-13 10:36:21 +00:00
|
|
|
if user.Status < 1 {
|
|
|
|
return fmt.Errorf("user %#v is disabled", user.Username)
|
|
|
|
}
|
|
|
|
if user.ExpirationDate > 0 && user.ExpirationDate < utils.GetTimeAsMsSinceEpoch(time.Now()) {
|
|
|
|
return fmt.Errorf("user %#v is expired, expiration timestamp: %v current timestamp: %v", user.Username,
|
|
|
|
user.ExpirationDate, utils.GetTimeAsMsSinceEpoch(time.Now()))
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2020-08-19 17:36:12 +00:00
|
|
|
func isPasswordOK(user *User, password string) (bool, error) {
|
2021-04-20 07:39:36 +00:00
|
|
|
if config.PasswordCaching {
|
|
|
|
found, match := cachedPasswords.Check(user.Username, password)
|
|
|
|
if found {
|
|
|
|
return match, nil
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-12-29 06:43:59 +00:00
|
|
|
match := false
|
2020-08-19 17:36:12 +00:00
|
|
|
var err error
|
2019-08-12 16:31:31 +00:00
|
|
|
if strings.HasPrefix(user.Password, argonPwdPrefix) {
|
|
|
|
match, err = argon2id.ComparePasswordAndHash(password, user.Password)
|
|
|
|
if err != nil {
|
2019-09-06 13:19:01 +00:00
|
|
|
providerLog(logger.LevelWarn, "error comparing password with argon hash: %v", err)
|
2020-08-19 17:36:12 +00:00
|
|
|
return match, err
|
2019-08-12 16:31:31 +00:00
|
|
|
}
|
|
|
|
} else if strings.HasPrefix(user.Password, bcryptPwdPrefix) {
|
|
|
|
if err = bcrypt.CompareHashAndPassword([]byte(user.Password), []byte(password)); err != nil {
|
2019-09-06 13:19:01 +00:00
|
|
|
providerLog(logger.LevelWarn, "error comparing password with bcrypt hash: %v", err)
|
2020-08-19 17:36:12 +00:00
|
|
|
return match, err
|
2019-08-12 16:31:31 +00:00
|
|
|
}
|
|
|
|
match = true
|
2019-08-17 13:20:49 +00:00
|
|
|
} else if utils.IsStringPrefixInSlice(user.Password, pbkdfPwdPrefixes) {
|
|
|
|
match, err = comparePbkdf2PasswordAndHash(password, user.Password)
|
|
|
|
if err != nil {
|
2020-08-19 17:36:12 +00:00
|
|
|
return match, err
|
2019-08-17 13:20:49 +00:00
|
|
|
}
|
2019-12-29 06:43:59 +00:00
|
|
|
} else if utils.IsStringPrefixInSlice(user.Password, unixPwdPrefixes) {
|
|
|
|
match, err = compareUnixPasswordAndHash(user, password)
|
|
|
|
if err != nil {
|
2020-08-19 17:36:12 +00:00
|
|
|
return match, err
|
2019-09-15 06:34:44 +00:00
|
|
|
}
|
2019-08-12 16:31:31 +00:00
|
|
|
}
|
2021-04-20 07:39:36 +00:00
|
|
|
if err == nil && match {
|
|
|
|
cachedPasswords.Add(user.Username, password)
|
|
|
|
}
|
2020-08-19 17:36:12 +00:00
|
|
|
return match, err
|
|
|
|
}
|
|
|
|
|
2021-02-28 11:10:40 +00:00
|
|
|
func checkUserAndTLSCertificate(user *User, protocol string, tlsCert *x509.Certificate) (User, error) {
|
|
|
|
err := checkLoginConditions(user)
|
|
|
|
if err != nil {
|
|
|
|
return *user, err
|
|
|
|
}
|
|
|
|
switch protocol {
|
2021-03-01 18:28:11 +00:00
|
|
|
case "FTP", "DAV":
|
2021-02-28 11:10:40 +00:00
|
|
|
if user.Filters.TLSUsername == TLSUsernameCN {
|
|
|
|
if user.Username == tlsCert.Subject.CommonName {
|
|
|
|
return *user, nil
|
|
|
|
}
|
|
|
|
return *user, fmt.Errorf("CN %#v does not match username %#v", tlsCert.Subject.CommonName, user.Username)
|
|
|
|
}
|
|
|
|
return *user, errors.New("TLS certificate is not valid")
|
|
|
|
default:
|
|
|
|
return *user, fmt.Errorf("certificate authentication is not supported for protocol %v", protocol)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-02-16 18:11:36 +00:00
|
|
|
func checkUserAndPass(user *User, password, ip, protocol string) (User, error) {
|
|
|
|
err := checkLoginConditions(user)
|
2020-08-19 17:36:12 +00:00
|
|
|
if err != nil {
|
2021-02-16 18:11:36 +00:00
|
|
|
return *user, err
|
2020-08-19 17:36:12 +00:00
|
|
|
}
|
2021-01-05 08:50:22 +00:00
|
|
|
if user.Password == "" {
|
2021-03-21 18:15:47 +00:00
|
|
|
return *user, errors.New("credentials cannot be null or empty")
|
2020-08-19 17:36:12 +00:00
|
|
|
}
|
2021-04-04 20:32:25 +00:00
|
|
|
if !user.Filters.Hooks.CheckPasswordDisabled {
|
|
|
|
hookResponse, err := executeCheckPasswordHook(user.Username, password, ip, protocol)
|
|
|
|
if err != nil {
|
|
|
|
providerLog(logger.LevelDebug, "error executing check password hook: %v", err)
|
|
|
|
return *user, errors.New("unable to check credentials")
|
|
|
|
}
|
|
|
|
switch hookResponse.Status {
|
|
|
|
case -1:
|
|
|
|
// no hook configured
|
|
|
|
case 1:
|
|
|
|
providerLog(logger.LevelDebug, "password accepted by check password hook")
|
|
|
|
return *user, nil
|
|
|
|
case 2:
|
|
|
|
providerLog(logger.LevelDebug, "partial success from check password hook")
|
|
|
|
password = hookResponse.ToVerify
|
|
|
|
default:
|
|
|
|
providerLog(logger.LevelDebug, "password rejected by check password hook, status: %v", hookResponse.Status)
|
|
|
|
return *user, ErrInvalidCredentials
|
|
|
|
}
|
2020-08-19 17:36:12 +00:00
|
|
|
}
|
|
|
|
|
2021-02-16 18:11:36 +00:00
|
|
|
match, err := isPasswordOK(user, password)
|
2019-08-12 16:31:31 +00:00
|
|
|
if !match {
|
2020-08-31 17:25:17 +00:00
|
|
|
err = ErrInvalidCredentials
|
2019-08-12 16:31:31 +00:00
|
|
|
}
|
2021-02-16 18:11:36 +00:00
|
|
|
return *user, err
|
2019-08-12 16:31:31 +00:00
|
|
|
}
|
|
|
|
|
2021-02-16 18:11:36 +00:00
|
|
|
func checkUserAndPubKey(user *User, pubKey []byte) (User, string, error) {
|
|
|
|
err := checkLoginConditions(user)
|
2019-11-13 10:36:21 +00:00
|
|
|
if err != nil {
|
2021-02-16 18:11:36 +00:00
|
|
|
return *user, "", err
|
2019-11-13 10:36:21 +00:00
|
|
|
}
|
2019-08-12 16:31:31 +00:00
|
|
|
if len(user.PublicKeys) == 0 {
|
2021-02-16 18:11:36 +00:00
|
|
|
return *user, "", ErrInvalidCredentials
|
2019-08-12 16:31:31 +00:00
|
|
|
}
|
|
|
|
for i, k := range user.PublicKeys {
|
2019-09-05 19:35:53 +00:00
|
|
|
storedPubKey, comment, _, _, err := ssh.ParseAuthorizedKey([]byte(k))
|
2019-08-12 16:31:31 +00:00
|
|
|
if err != nil {
|
2019-09-06 13:19:01 +00:00
|
|
|
providerLog(logger.LevelWarn, "error parsing stored public key %d for user %v: %v", i, user.Username, err)
|
2021-02-16 18:11:36 +00:00
|
|
|
return *user, "", err
|
2019-08-12 16:31:31 +00:00
|
|
|
}
|
2020-04-09 21:32:42 +00:00
|
|
|
if bytes.Equal(storedPubKey.Marshal(), pubKey) {
|
2020-05-15 18:08:53 +00:00
|
|
|
certInfo := ""
|
|
|
|
cert, ok := storedPubKey.(*ssh.Certificate)
|
|
|
|
if ok {
|
|
|
|
certInfo = fmt.Sprintf(" %v ID: %v Serial: %v CA: %v", cert.Type(), cert.KeyId, cert.Serial,
|
|
|
|
ssh.FingerprintSHA256(cert.SignatureKey))
|
|
|
|
}
|
2021-02-16 18:11:36 +00:00
|
|
|
return *user, fmt.Sprintf("%v:%v%v", ssh.FingerprintSHA256(storedPubKey), comment, certInfo), nil
|
2019-08-12 16:31:31 +00:00
|
|
|
}
|
|
|
|
}
|
2021-02-16 18:11:36 +00:00
|
|
|
return *user, "", ErrInvalidCredentials
|
2019-08-12 16:31:31 +00:00
|
|
|
}
|
|
|
|
|
2020-08-19 17:36:12 +00:00
|
|
|
func compareUnixPasswordAndHash(user *User, password string) (bool, error) {
|
2020-09-04 19:08:09 +00:00
|
|
|
var crypter crypt.Crypter
|
2019-12-29 06:43:59 +00:00
|
|
|
if strings.HasPrefix(user.Password, sha512cryptPwdPrefix) {
|
2020-09-04 19:08:09 +00:00
|
|
|
crypter = sha512_crypt.New()
|
|
|
|
} else if strings.HasPrefix(user.Password, md5cryptPwdPrefix) {
|
|
|
|
crypter = md5_crypt.New()
|
|
|
|
} else if strings.HasPrefix(user.Password, md5cryptApr1PwdPrefix) {
|
|
|
|
crypter = apr1_crypt.New()
|
2019-12-29 06:43:59 +00:00
|
|
|
} else {
|
2020-09-04 19:08:09 +00:00
|
|
|
return false, errors.New("unix crypt: invalid or unsupported hash format")
|
2019-12-29 06:43:59 +00:00
|
|
|
}
|
2020-09-04 19:08:09 +00:00
|
|
|
if err := crypter.Verify(user.Password, []byte(password)); err != nil {
|
|
|
|
return false, err
|
|
|
|
}
|
|
|
|
return true, nil
|
2019-12-29 06:43:59 +00:00
|
|
|
}
|
|
|
|
|
2019-08-17 13:20:49 +00:00
|
|
|
func comparePbkdf2PasswordAndHash(password, hashedPassword string) (bool, error) {
|
|
|
|
vals := strings.Split(hashedPassword, "$")
|
|
|
|
if len(vals) != 5 {
|
|
|
|
return false, fmt.Errorf("pbkdf2: hash is not in the correct format")
|
|
|
|
}
|
2020-04-11 10:25:21 +00:00
|
|
|
iterations, err := strconv.Atoi(vals[2])
|
|
|
|
if err != nil {
|
|
|
|
return false, err
|
|
|
|
}
|
|
|
|
expected, err := base64.StdEncoding.DecodeString(vals[4])
|
|
|
|
if err != nil {
|
|
|
|
return false, err
|
|
|
|
}
|
|
|
|
var salt []byte
|
|
|
|
if utils.IsStringPrefixInSlice(hashedPassword, pbkdfPwdB64SaltPrefixes) {
|
|
|
|
salt, err = base64.StdEncoding.DecodeString(vals[3])
|
|
|
|
if err != nil {
|
|
|
|
return false, err
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
salt = []byte(vals[3])
|
|
|
|
}
|
2019-08-17 13:20:49 +00:00
|
|
|
var hashFunc func() hash.Hash
|
2020-04-11 10:25:21 +00:00
|
|
|
if strings.HasPrefix(hashedPassword, pbkdf2SHA256Prefix) || strings.HasPrefix(hashedPassword, pbkdf2SHA256B64SaltPrefix) {
|
2019-08-17 13:20:49 +00:00
|
|
|
hashFunc = sha256.New
|
|
|
|
} else if strings.HasPrefix(hashedPassword, pbkdf2SHA512Prefix) {
|
|
|
|
hashFunc = sha512.New
|
|
|
|
} else if strings.HasPrefix(hashedPassword, pbkdf2SHA1Prefix) {
|
|
|
|
hashFunc = sha1.New
|
|
|
|
} else {
|
|
|
|
return false, fmt.Errorf("pbkdf2: invalid or unsupported hash format %v", vals[1])
|
|
|
|
}
|
2020-04-11 10:25:21 +00:00
|
|
|
df := pbkdf2.Key([]byte(password), salt, iterations, len(expected), hashFunc)
|
2020-03-28 15:09:06 +00:00
|
|
|
return subtle.ConstantTimeCompare(df, expected) == 1, nil
|
2019-08-17 13:20:49 +00:00
|
|
|
}
|
|
|
|
|
2020-01-31 18:04:00 +00:00
|
|
|
func addCredentialsToUser(user *User) error {
|
2021-03-21 18:15:47 +00:00
|
|
|
if err := addFolderCredentialsToUser(user); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
if user.FsConfig.Provider != vfs.GCSFilesystemProvider {
|
2020-01-31 18:04:00 +00:00
|
|
|
return nil
|
|
|
|
}
|
2020-02-19 08:41:15 +00:00
|
|
|
if user.FsConfig.GCSConfig.AutomaticCredentials > 0 {
|
|
|
|
return nil
|
|
|
|
}
|
2020-10-22 08:42:40 +00:00
|
|
|
|
|
|
|
// Don't read from file if credentials have already been set
|
2020-11-22 20:53:04 +00:00
|
|
|
if user.FsConfig.GCSConfig.Credentials.IsValid() {
|
2020-10-22 08:42:40 +00:00
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2021-03-21 18:15:47 +00:00
|
|
|
cred, err := os.ReadFile(user.GetGCSCredentialsFilePath())
|
2020-01-31 18:04:00 +00:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2020-11-22 20:53:04 +00:00
|
|
|
return json.Unmarshal(cred, &user.FsConfig.GCSConfig.Credentials)
|
2020-01-31 18:04:00 +00:00
|
|
|
}
|
|
|
|
|
2021-03-21 18:15:47 +00:00
|
|
|
func addFolderCredentialsToUser(user *User) error {
|
|
|
|
for idx := range user.VirtualFolders {
|
|
|
|
f := &user.VirtualFolders[idx]
|
|
|
|
if f.FsConfig.Provider != vfs.GCSFilesystemProvider {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
if f.FsConfig.GCSConfig.AutomaticCredentials > 0 {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
// Don't read from file if credentials have already been set
|
|
|
|
if f.FsConfig.GCSConfig.Credentials.IsValid() {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
cred, err := os.ReadFile(f.GetGCSCredentialsFilePath())
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
err = json.Unmarshal(cred, f.FsConfig.GCSConfig.Credentials)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2019-07-20 10:26:52 +00:00
|
|
|
func getSSLMode() string {
|
2021-03-23 18:14:15 +00:00
|
|
|
if config.Driver == PGSQLDataProviderName || config.Driver == CockroachDataProviderName {
|
2019-07-20 10:26:52 +00:00
|
|
|
if config.SSLMode == 0 {
|
|
|
|
return "disable"
|
|
|
|
} else if config.SSLMode == 1 {
|
|
|
|
return "require"
|
|
|
|
} else if config.SSLMode == 2 {
|
|
|
|
return "verify-ca"
|
|
|
|
} else if config.SSLMode == 3 {
|
|
|
|
return "verify-full"
|
|
|
|
}
|
|
|
|
} else if config.Driver == MySQLDataProviderName {
|
|
|
|
if config.SSLMode == 0 {
|
|
|
|
return "false"
|
|
|
|
} else if config.SSLMode == 1 {
|
|
|
|
return "true"
|
|
|
|
} else if config.SSLMode == 2 {
|
|
|
|
return "skip-verify"
|
|
|
|
} else if config.SSLMode == 3 {
|
|
|
|
return "preferred"
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return ""
|
|
|
|
}
|
2019-09-06 13:19:01 +00:00
|
|
|
|
2019-09-13 16:45:36 +00:00
|
|
|
func startAvailabilityTimer() {
|
2020-07-24 21:39:38 +00:00
|
|
|
availabilityTicker = time.NewTicker(30 * time.Second)
|
2019-09-28 20:48:52 +00:00
|
|
|
availabilityTickerDone = make(chan bool)
|
2019-09-13 16:45:36 +00:00
|
|
|
checkDataprovider()
|
|
|
|
go func() {
|
2019-09-28 20:48:52 +00:00
|
|
|
for {
|
|
|
|
select {
|
|
|
|
case <-availabilityTickerDone:
|
|
|
|
return
|
|
|
|
case <-availabilityTicker.C:
|
|
|
|
checkDataprovider()
|
|
|
|
}
|
2019-09-13 16:45:36 +00:00
|
|
|
}
|
|
|
|
}()
|
|
|
|
}
|
|
|
|
|
|
|
|
func checkDataprovider() {
|
|
|
|
err := provider.checkAvailability()
|
|
|
|
if err != nil {
|
|
|
|
providerLog(logger.LevelWarn, "check availability error: %v", err)
|
|
|
|
}
|
|
|
|
metrics.UpdateDataProviderAvailability(err)
|
|
|
|
}
|
|
|
|
|
2020-01-21 09:54:05 +00:00
|
|
|
func terminateInteractiveAuthProgram(cmd *exec.Cmd, isFinished bool) {
|
|
|
|
if isFinished {
|
|
|
|
return
|
|
|
|
}
|
|
|
|
providerLog(logger.LevelInfo, "kill interactive auth program after an unexpected error")
|
2020-04-30 12:23:55 +00:00
|
|
|
err := cmd.Process.Kill()
|
|
|
|
if err != nil {
|
|
|
|
providerLog(logger.LevelDebug, "error killing interactive auth program: %v", err)
|
|
|
|
}
|
2020-01-21 09:54:05 +00:00
|
|
|
}
|
|
|
|
|
2020-04-01 21:25:23 +00:00
|
|
|
func validateKeyboardAuthResponse(response keyboardAuthHookResponse) error {
|
|
|
|
if len(response.Questions) == 0 {
|
|
|
|
err := errors.New("interactive auth error: hook response does not contain questions")
|
|
|
|
providerLog(logger.LevelInfo, "%v", err)
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
if len(response.Questions) != len(response.Echos) {
|
|
|
|
err := fmt.Errorf("interactive auth error, http hook response questions don't match echos: %v %v",
|
|
|
|
len(response.Questions), len(response.Echos))
|
|
|
|
providerLog(logger.LevelInfo, "%v", err)
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func sendKeyboardAuthHTTPReq(url *url.URL, request keyboardAuthHookRequest) (keyboardAuthHookResponse, error) {
|
|
|
|
var response keyboardAuthHookResponse
|
2020-04-26 21:29:09 +00:00
|
|
|
httpClient := httpclient.GetHTTPClient()
|
2020-04-01 21:25:23 +00:00
|
|
|
reqAsJSON, err := json.Marshal(request)
|
|
|
|
if err != nil {
|
|
|
|
providerLog(logger.LevelWarn, "error serializing keyboard interactive auth request: %v", err)
|
|
|
|
return response, err
|
|
|
|
}
|
|
|
|
resp, err := httpClient.Post(url.String(), "application/json", bytes.NewBuffer(reqAsJSON))
|
|
|
|
if err != nil {
|
|
|
|
providerLog(logger.LevelWarn, "error getting keyboard interactive auth hook HTTP response: %v", err)
|
|
|
|
return response, err
|
|
|
|
}
|
|
|
|
defer resp.Body.Close()
|
|
|
|
if resp.StatusCode != http.StatusOK {
|
|
|
|
return response, fmt.Errorf("wrong keyboard interactive auth http status code: %v, expected 200", resp.StatusCode)
|
|
|
|
}
|
|
|
|
err = render.DecodeJSON(resp.Body, &response)
|
|
|
|
return response, err
|
|
|
|
}
|
|
|
|
|
2021-02-16 18:11:36 +00:00
|
|
|
func executeKeyboardInteractiveHTTPHook(user *User, authHook string, client ssh.KeyboardInteractiveChallenge, ip, protocol string) (int, error) {
|
2020-04-01 21:25:23 +00:00
|
|
|
authResult := 0
|
|
|
|
var url *url.URL
|
|
|
|
url, err := url.Parse(authHook)
|
|
|
|
if err != nil {
|
|
|
|
providerLog(logger.LevelWarn, "invalid url for keyboard interactive hook %#v, error: %v", authHook, err)
|
|
|
|
return authResult, err
|
|
|
|
}
|
|
|
|
requestID := xid.New().String()
|
|
|
|
req := keyboardAuthHookRequest{
|
|
|
|
Username: user.Username,
|
2020-08-04 16:03:28 +00:00
|
|
|
IP: ip,
|
2020-04-01 21:25:23 +00:00
|
|
|
Password: user.Password,
|
|
|
|
RequestID: requestID,
|
|
|
|
}
|
|
|
|
var response keyboardAuthHookResponse
|
|
|
|
for {
|
|
|
|
response, err = sendKeyboardAuthHTTPReq(url, req)
|
|
|
|
if err != nil {
|
|
|
|
return authResult, err
|
|
|
|
}
|
|
|
|
if response.AuthResult != 0 {
|
|
|
|
return response.AuthResult, err
|
|
|
|
}
|
|
|
|
if err = validateKeyboardAuthResponse(response); err != nil {
|
|
|
|
return authResult, err
|
|
|
|
}
|
2020-08-19 17:36:12 +00:00
|
|
|
answers, err := getKeyboardInteractiveAnswers(client, response, user, ip, protocol)
|
2020-04-01 21:25:23 +00:00
|
|
|
if err != nil {
|
|
|
|
return authResult, err
|
|
|
|
}
|
|
|
|
req = keyboardAuthHookRequest{
|
|
|
|
RequestID: requestID,
|
|
|
|
Username: user.Username,
|
|
|
|
Password: user.Password,
|
|
|
|
Answers: answers,
|
|
|
|
Questions: response.Questions,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func getKeyboardInteractiveAnswers(client ssh.KeyboardInteractiveChallenge, response keyboardAuthHookResponse,
|
2021-02-16 18:11:36 +00:00
|
|
|
user *User, ip, protocol string) ([]string, error) {
|
2020-02-16 10:43:52 +00:00
|
|
|
questions := response.Questions
|
|
|
|
answers, err := client(user.Username, response.Instruction, questions, response.Echos)
|
|
|
|
if err != nil {
|
|
|
|
providerLog(logger.LevelInfo, "error getting interactive auth client response: %v", err)
|
2020-04-01 21:25:23 +00:00
|
|
|
return answers, err
|
2020-02-16 10:43:52 +00:00
|
|
|
}
|
|
|
|
if len(answers) != len(questions) {
|
|
|
|
err = fmt.Errorf("client answers does not match questions, expected: %v actual: %v", questions, answers)
|
|
|
|
providerLog(logger.LevelInfo, "keyboard interactive auth error: %v", err)
|
2020-04-01 21:25:23 +00:00
|
|
|
return answers, err
|
2020-02-16 10:43:52 +00:00
|
|
|
}
|
|
|
|
if len(answers) == 1 && response.CheckPwd > 0 {
|
2020-08-19 17:36:12 +00:00
|
|
|
_, err = checkUserAndPass(user, answers[0], ip, protocol)
|
2020-04-01 21:25:23 +00:00
|
|
|
providerLog(logger.LevelInfo, "interactive auth hook requested password validation for user %#v, validation error: %v",
|
2020-02-16 10:43:52 +00:00
|
|
|
user.Username, err)
|
|
|
|
if err != nil {
|
2020-04-01 21:25:23 +00:00
|
|
|
return answers, err
|
2020-02-16 10:43:52 +00:00
|
|
|
}
|
|
|
|
answers[0] = "OK"
|
|
|
|
}
|
2020-04-01 21:25:23 +00:00
|
|
|
return answers, err
|
|
|
|
}
|
|
|
|
|
|
|
|
func handleProgramInteractiveQuestions(client ssh.KeyboardInteractiveChallenge, response keyboardAuthHookResponse,
|
2021-02-16 18:11:36 +00:00
|
|
|
user *User, stdin io.WriteCloser, ip, protocol string) error {
|
2020-08-19 17:36:12 +00:00
|
|
|
answers, err := getKeyboardInteractiveAnswers(client, response, user, ip, protocol)
|
2020-04-01 21:25:23 +00:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2020-02-16 10:43:52 +00:00
|
|
|
for _, answer := range answers {
|
|
|
|
if runtime.GOOS == "windows" {
|
|
|
|
answer += "\r"
|
|
|
|
}
|
|
|
|
answer += "\n"
|
|
|
|
_, err = stdin.Write([]byte(answer))
|
|
|
|
if err != nil {
|
|
|
|
providerLog(logger.LevelError, "unable to write client answer to keyboard interactive program: %v", err)
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2021-02-16 18:11:36 +00:00
|
|
|
func executeKeyboardInteractiveProgram(user *User, authHook string, client ssh.KeyboardInteractiveChallenge, ip, protocol string) (int, error) {
|
2020-04-01 21:25:23 +00:00
|
|
|
authResult := 0
|
2020-01-21 09:54:05 +00:00
|
|
|
ctx, cancel := context.WithTimeout(context.Background(), 60*time.Second)
|
|
|
|
defer cancel()
|
2020-04-01 21:25:23 +00:00
|
|
|
cmd := exec.CommandContext(ctx, authHook)
|
2020-01-21 09:54:05 +00:00
|
|
|
cmd.Env = append(os.Environ(),
|
|
|
|
fmt.Sprintf("SFTPGO_AUTHD_USERNAME=%v", user.Username),
|
2020-08-04 16:03:28 +00:00
|
|
|
fmt.Sprintf("SFTPGO_AUTHD_IP=%v", ip),
|
2020-01-21 09:54:05 +00:00
|
|
|
fmt.Sprintf("SFTPGO_AUTHD_PASSWORD=%v", user.Password))
|
|
|
|
stdout, err := cmd.StdoutPipe()
|
|
|
|
if err != nil {
|
2020-04-01 21:25:23 +00:00
|
|
|
return authResult, err
|
2020-01-21 09:54:05 +00:00
|
|
|
}
|
|
|
|
stdin, err := cmd.StdinPipe()
|
|
|
|
if err != nil {
|
2020-04-01 21:25:23 +00:00
|
|
|
return authResult, err
|
2020-01-21 09:54:05 +00:00
|
|
|
}
|
|
|
|
err = cmd.Start()
|
|
|
|
if err != nil {
|
2020-04-01 21:25:23 +00:00
|
|
|
return authResult, err
|
2020-01-21 09:54:05 +00:00
|
|
|
}
|
|
|
|
var once sync.Once
|
|
|
|
scanner := bufio.NewScanner(stdout)
|
|
|
|
for scanner.Scan() {
|
2020-04-01 21:25:23 +00:00
|
|
|
var response keyboardAuthHookResponse
|
|
|
|
err = json.Unmarshal(scanner.Bytes(), &response)
|
2020-01-21 09:54:05 +00:00
|
|
|
if err != nil {
|
|
|
|
providerLog(logger.LevelInfo, "interactive auth error parsing response: %v", err)
|
|
|
|
once.Do(func() { terminateInteractiveAuthProgram(cmd, false) })
|
|
|
|
break
|
|
|
|
}
|
|
|
|
if response.AuthResult != 0 {
|
|
|
|
authResult = response.AuthResult
|
|
|
|
break
|
|
|
|
}
|
2020-04-01 21:25:23 +00:00
|
|
|
if err = validateKeyboardAuthResponse(response); err != nil {
|
2020-01-21 09:54:05 +00:00
|
|
|
once.Do(func() { terminateInteractiveAuthProgram(cmd, false) })
|
|
|
|
break
|
|
|
|
}
|
|
|
|
go func() {
|
2020-08-19 17:36:12 +00:00
|
|
|
err := handleProgramInteractiveQuestions(client, response, user, stdin, ip, protocol)
|
2020-01-21 09:54:05 +00:00
|
|
|
if err != nil {
|
|
|
|
once.Do(func() { terminateInteractiveAuthProgram(cmd, false) })
|
|
|
|
}
|
|
|
|
}()
|
|
|
|
}
|
|
|
|
stdin.Close()
|
|
|
|
once.Do(func() { terminateInteractiveAuthProgram(cmd, true) })
|
2020-04-30 12:23:55 +00:00
|
|
|
go func() {
|
|
|
|
_, err := cmd.Process.Wait()
|
|
|
|
if err != nil {
|
|
|
|
providerLog(logger.LevelWarn, "error waiting for #%v process to exit: %v", authHook, err)
|
|
|
|
}
|
|
|
|
}()
|
2020-04-01 21:25:23 +00:00
|
|
|
|
|
|
|
return authResult, err
|
|
|
|
}
|
|
|
|
|
2021-02-16 18:11:36 +00:00
|
|
|
func doKeyboardInteractiveAuth(user *User, authHook string, client ssh.KeyboardInteractiveChallenge, ip, protocol string) (User, error) {
|
2020-04-01 21:25:23 +00:00
|
|
|
var authResult int
|
|
|
|
var err error
|
|
|
|
if strings.HasPrefix(authHook, "http") {
|
2020-08-19 17:36:12 +00:00
|
|
|
authResult, err = executeKeyboardInteractiveHTTPHook(user, authHook, client, ip, protocol)
|
2020-04-01 21:25:23 +00:00
|
|
|
} else {
|
2020-08-19 17:36:12 +00:00
|
|
|
authResult, err = executeKeyboardInteractiveProgram(user, authHook, client, ip, protocol)
|
2020-04-01 21:25:23 +00:00
|
|
|
}
|
2020-04-03 20:30:30 +00:00
|
|
|
if err != nil {
|
2021-02-16 18:11:36 +00:00
|
|
|
return *user, err
|
2020-04-03 20:30:30 +00:00
|
|
|
}
|
2020-01-21 09:54:05 +00:00
|
|
|
if authResult != 1 {
|
2021-02-16 18:11:36 +00:00
|
|
|
return *user, fmt.Errorf("keyboard interactive auth failed, result: %v", authResult)
|
2020-01-21 09:54:05 +00:00
|
|
|
}
|
2021-02-16 18:11:36 +00:00
|
|
|
err = checkLoginConditions(user)
|
2020-02-23 17:50:59 +00:00
|
|
|
if err != nil {
|
2021-02-16 18:11:36 +00:00
|
|
|
return *user, err
|
2020-02-23 17:50:59 +00:00
|
|
|
}
|
2021-02-16 18:11:36 +00:00
|
|
|
return *user, nil
|
2020-01-21 09:54:05 +00:00
|
|
|
}
|
|
|
|
|
2020-08-19 17:36:12 +00:00
|
|
|
func isCheckPasswordHookDefined(protocol string) bool {
|
2021-01-05 08:50:22 +00:00
|
|
|
if config.CheckPasswordHook == "" {
|
2020-08-19 17:36:12 +00:00
|
|
|
return false
|
|
|
|
}
|
|
|
|
if config.CheckPasswordScope == 0 {
|
|
|
|
return true
|
|
|
|
}
|
|
|
|
switch protocol {
|
|
|
|
case "SSH":
|
|
|
|
return config.CheckPasswordScope&1 != 0
|
|
|
|
case "FTP":
|
|
|
|
return config.CheckPasswordScope&2 != 0
|
|
|
|
case "DAV":
|
|
|
|
return config.CheckPasswordScope&4 != 0
|
|
|
|
default:
|
|
|
|
return false
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func getPasswordHookResponse(username, password, ip, protocol string) ([]byte, error) {
|
|
|
|
if strings.HasPrefix(config.CheckPasswordHook, "http") {
|
|
|
|
var result []byte
|
|
|
|
var url *url.URL
|
|
|
|
url, err := url.Parse(config.CheckPasswordHook)
|
|
|
|
if err != nil {
|
|
|
|
providerLog(logger.LevelWarn, "invalid url for check password hook %#v, error: %v", config.CheckPasswordHook, err)
|
|
|
|
return result, err
|
|
|
|
}
|
|
|
|
req := checkPasswordRequest{
|
|
|
|
Username: username,
|
|
|
|
Password: password,
|
|
|
|
IP: ip,
|
|
|
|
Protocol: protocol,
|
|
|
|
}
|
|
|
|
reqAsJSON, err := json.Marshal(req)
|
|
|
|
if err != nil {
|
|
|
|
return result, err
|
|
|
|
}
|
|
|
|
httpClient := httpclient.GetHTTPClient()
|
|
|
|
resp, err := httpClient.Post(url.String(), "application/json", bytes.NewBuffer(reqAsJSON))
|
|
|
|
if err != nil {
|
|
|
|
providerLog(logger.LevelWarn, "error getting check password hook response: %v", err)
|
|
|
|
return result, err
|
|
|
|
}
|
|
|
|
defer resp.Body.Close()
|
|
|
|
if resp.StatusCode != http.StatusOK {
|
|
|
|
return result, fmt.Errorf("wrong http status code from chek password hook: %v, expected 200", resp.StatusCode)
|
|
|
|
}
|
2021-02-25 20:53:04 +00:00
|
|
|
return io.ReadAll(resp.Body)
|
2020-08-19 17:36:12 +00:00
|
|
|
}
|
|
|
|
ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second)
|
|
|
|
defer cancel()
|
|
|
|
cmd := exec.CommandContext(ctx, config.CheckPasswordHook)
|
|
|
|
cmd.Env = append(os.Environ(),
|
|
|
|
fmt.Sprintf("SFTPGO_AUTHD_USERNAME=%v", username),
|
|
|
|
fmt.Sprintf("SFTPGO_AUTHD_PASSWORD=%v", password),
|
|
|
|
fmt.Sprintf("SFTPGO_AUTHD_IP=%v", ip),
|
|
|
|
fmt.Sprintf("SFTPGO_AUTHD_PROTOCOL=%v", protocol),
|
|
|
|
)
|
|
|
|
return cmd.Output()
|
|
|
|
}
|
|
|
|
|
|
|
|
func executeCheckPasswordHook(username, password, ip, protocol string) (checkPasswordResponse, error) {
|
|
|
|
var response checkPasswordResponse
|
|
|
|
|
|
|
|
if !isCheckPasswordHookDefined(protocol) {
|
|
|
|
response.Status = -1
|
|
|
|
return response, nil
|
|
|
|
}
|
|
|
|
|
2021-03-21 18:15:47 +00:00
|
|
|
startTime := time.Now()
|
2020-08-19 17:36:12 +00:00
|
|
|
out, err := getPasswordHookResponse(username, password, ip, protocol)
|
2021-03-21 18:15:47 +00:00
|
|
|
providerLog(logger.LevelDebug, "check password hook executed, error: %v, elapsed: %v", err, time.Since(startTime))
|
2020-08-19 17:36:12 +00:00
|
|
|
if err != nil {
|
|
|
|
return response, err
|
|
|
|
}
|
|
|
|
err = json.Unmarshal(out, &response)
|
|
|
|
return response, err
|
|
|
|
}
|
|
|
|
|
2020-08-12 14:15:12 +00:00
|
|
|
func getPreLoginHookResponse(loginMethod, ip, protocol string, userAsJSON []byte) ([]byte, error) {
|
2020-04-01 21:25:23 +00:00
|
|
|
if strings.HasPrefix(config.PreLoginHook, "http") {
|
|
|
|
var url *url.URL
|
|
|
|
var result []byte
|
|
|
|
url, err := url.Parse(config.PreLoginHook)
|
|
|
|
if err != nil {
|
|
|
|
providerLog(logger.LevelWarn, "invalid url for pre-login hook %#v, error: %v", config.PreLoginHook, err)
|
|
|
|
return result, err
|
|
|
|
}
|
|
|
|
q := url.Query()
|
|
|
|
q.Add("login_method", loginMethod)
|
2020-08-04 16:03:28 +00:00
|
|
|
q.Add("ip", ip)
|
2020-08-12 14:15:12 +00:00
|
|
|
q.Add("protocol", protocol)
|
2020-04-01 21:25:23 +00:00
|
|
|
url.RawQuery = q.Encode()
|
2020-04-26 21:29:09 +00:00
|
|
|
httpClient := httpclient.GetHTTPClient()
|
2020-04-01 21:25:23 +00:00
|
|
|
resp, err := httpClient.Post(url.String(), "application/json", bytes.NewBuffer(userAsJSON))
|
|
|
|
if err != nil {
|
|
|
|
providerLog(logger.LevelWarn, "error getting pre-login hook response: %v", err)
|
|
|
|
return result, err
|
|
|
|
}
|
|
|
|
defer resp.Body.Close()
|
|
|
|
if resp.StatusCode == http.StatusNoContent {
|
|
|
|
return result, nil
|
|
|
|
}
|
|
|
|
if resp.StatusCode != http.StatusOK {
|
|
|
|
return result, fmt.Errorf("wrong pre-login hook http status code: %v, expected 200", resp.StatusCode)
|
|
|
|
}
|
2021-02-25 20:53:04 +00:00
|
|
|
return io.ReadAll(resp.Body)
|
2020-04-01 21:25:23 +00:00
|
|
|
}
|
2020-04-26 21:29:09 +00:00
|
|
|
ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second)
|
2020-04-01 21:25:23 +00:00
|
|
|
defer cancel()
|
|
|
|
cmd := exec.CommandContext(ctx, config.PreLoginHook)
|
|
|
|
cmd.Env = append(os.Environ(),
|
|
|
|
fmt.Sprintf("SFTPGO_LOGIND_USER=%v", string(userAsJSON)),
|
|
|
|
fmt.Sprintf("SFTPGO_LOGIND_METHOD=%v", loginMethod),
|
2020-08-04 16:03:28 +00:00
|
|
|
fmt.Sprintf("SFTPGO_LOGIND_IP=%v", ip),
|
2020-08-19 17:36:12 +00:00
|
|
|
fmt.Sprintf("SFTPGO_LOGIND_PROTOCOL=%v", protocol),
|
2020-04-01 21:25:23 +00:00
|
|
|
)
|
|
|
|
return cmd.Output()
|
|
|
|
}
|
|
|
|
|
2020-08-12 14:15:12 +00:00
|
|
|
func executePreLoginHook(username, loginMethod, ip, protocol string) (User, error) {
|
2021-03-26 14:19:01 +00:00
|
|
|
u, userAsJSON, err := getUserAndJSONForHook(username)
|
2020-02-23 17:50:59 +00:00
|
|
|
if err != nil {
|
|
|
|
return u, err
|
|
|
|
}
|
2021-04-04 20:32:25 +00:00
|
|
|
if u.Filters.Hooks.PreLoginDisabled {
|
|
|
|
return u, nil
|
|
|
|
}
|
2021-03-21 18:15:47 +00:00
|
|
|
startTime := time.Now()
|
2020-08-12 14:15:12 +00:00
|
|
|
out, err := getPreLoginHookResponse(loginMethod, ip, protocol, userAsJSON)
|
2020-02-23 17:50:59 +00:00
|
|
|
if err != nil {
|
2021-03-21 18:15:47 +00:00
|
|
|
return u, fmt.Errorf("pre-login hook error: %v, elapsed %v", err, time.Since(startTime))
|
2020-02-23 17:50:59 +00:00
|
|
|
}
|
2021-03-21 18:15:47 +00:00
|
|
|
providerLog(logger.LevelDebug, "pre-login hook completed, elapsed: %v", time.Since(startTime))
|
2021-03-26 14:19:01 +00:00
|
|
|
if utils.IsByteArrayEmpty(out) {
|
2020-04-01 21:25:23 +00:00
|
|
|
providerLog(logger.LevelDebug, "empty response from pre-login hook, no modification requested for user %#v id: %v",
|
2020-03-27 22:26:22 +00:00
|
|
|
username, u.ID)
|
|
|
|
if u.ID == 0 {
|
2021-03-26 14:19:01 +00:00
|
|
|
return u, &RecordNotFoundError{err: fmt.Sprintf("username %#v does not exist", username)}
|
2020-03-27 22:26:22 +00:00
|
|
|
}
|
2020-02-23 17:50:59 +00:00
|
|
|
return u, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
userID := u.ID
|
2021-04-20 07:39:36 +00:00
|
|
|
userPwd := u.Password
|
2020-02-23 17:50:59 +00:00
|
|
|
userUsedQuotaSize := u.UsedQuotaSize
|
|
|
|
userUsedQuotaFiles := u.UsedQuotaFiles
|
|
|
|
userLastQuotaUpdate := u.LastQuotaUpdate
|
|
|
|
userLastLogin := u.LastLogin
|
|
|
|
err = json.Unmarshal(out, &u)
|
|
|
|
if err != nil {
|
2021-03-21 18:15:47 +00:00
|
|
|
return u, fmt.Errorf("invalid pre-login hook response %#v, error: %v", string(out), err)
|
2020-02-23 17:50:59 +00:00
|
|
|
}
|
|
|
|
u.ID = userID
|
|
|
|
u.UsedQuotaSize = userUsedQuotaSize
|
|
|
|
u.UsedQuotaFiles = userUsedQuotaFiles
|
|
|
|
u.LastQuotaUpdate = userLastQuotaUpdate
|
|
|
|
u.LastLogin = userLastLogin
|
2020-03-27 22:26:22 +00:00
|
|
|
if userID == 0 {
|
2021-01-05 08:50:22 +00:00
|
|
|
err = provider.addUser(&u)
|
2020-03-27 22:26:22 +00:00
|
|
|
} else {
|
2021-01-05 08:50:22 +00:00
|
|
|
err = provider.updateUser(&u)
|
2021-03-27 18:10:27 +00:00
|
|
|
if err == nil {
|
|
|
|
webDAVUsersCache.swap(&u)
|
2021-04-20 07:39:36 +00:00
|
|
|
if u.Password != userPwd {
|
|
|
|
cachedPasswords.Remove(username)
|
|
|
|
}
|
2021-03-27 18:10:27 +00:00
|
|
|
}
|
2020-03-27 22:26:22 +00:00
|
|
|
}
|
2020-02-23 17:50:59 +00:00
|
|
|
if err != nil {
|
|
|
|
return u, err
|
|
|
|
}
|
2020-04-01 21:25:23 +00:00
|
|
|
providerLog(logger.LevelDebug, "user %#v added/updated from pre-login hook response, id: %v", username, userID)
|
2021-01-05 08:50:22 +00:00
|
|
|
if userID == 0 {
|
|
|
|
return provider.userExists(username)
|
|
|
|
}
|
|
|
|
return u, nil
|
2020-02-23 17:50:59 +00:00
|
|
|
}
|
|
|
|
|
2020-08-12 14:15:12 +00:00
|
|
|
// ExecutePostLoginHook executes the post login hook if defined
|
2021-01-26 17:05:44 +00:00
|
|
|
func ExecutePostLoginHook(user *User, loginMethod, ip, protocol string, err error) {
|
2021-01-05 08:50:22 +00:00
|
|
|
if config.PostLoginHook == "" {
|
2020-08-12 14:15:12 +00:00
|
|
|
return
|
|
|
|
}
|
|
|
|
if config.PostLoginScope == 1 && err == nil {
|
|
|
|
return
|
|
|
|
}
|
|
|
|
if config.PostLoginScope == 2 && err != nil {
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2021-01-26 17:05:44 +00:00
|
|
|
go func() {
|
|
|
|
status := "0"
|
2020-08-12 14:15:12 +00:00
|
|
|
if err == nil {
|
2021-01-26 17:05:44 +00:00
|
|
|
status = "1"
|
|
|
|
}
|
|
|
|
|
2021-03-22 18:03:25 +00:00
|
|
|
user.PrepareForRendering()
|
2021-01-26 17:05:44 +00:00
|
|
|
userAsJSON, err := json.Marshal(user)
|
|
|
|
if err != nil {
|
|
|
|
providerLog(logger.LevelWarn, "error serializing user in post login hook: %v", err)
|
|
|
|
return
|
2020-08-12 14:15:12 +00:00
|
|
|
}
|
|
|
|
if strings.HasPrefix(config.PostLoginHook, "http") {
|
|
|
|
var url *url.URL
|
|
|
|
url, err := url.Parse(config.PostLoginHook)
|
|
|
|
if err != nil {
|
|
|
|
providerLog(logger.LevelDebug, "Invalid post-login hook %#v", config.PostLoginHook)
|
|
|
|
return
|
|
|
|
}
|
2021-01-26 17:05:44 +00:00
|
|
|
q := url.Query()
|
|
|
|
q.Add("login_method", loginMethod)
|
|
|
|
q.Add("ip", ip)
|
|
|
|
q.Add("protocol", protocol)
|
|
|
|
q.Add("status", status)
|
|
|
|
url.RawQuery = q.Encode()
|
|
|
|
|
2020-08-12 14:15:12 +00:00
|
|
|
startTime := time.Now()
|
|
|
|
respCode := 0
|
2021-02-12 20:42:49 +00:00
|
|
|
httpClient := httpclient.GetRetraybleHTTPClient()
|
2021-01-26 17:05:44 +00:00
|
|
|
resp, err := httpClient.Post(url.String(), "application/json", bytes.NewBuffer(userAsJSON))
|
2020-08-12 14:15:12 +00:00
|
|
|
if err == nil {
|
|
|
|
respCode = resp.StatusCode
|
|
|
|
resp.Body.Close()
|
|
|
|
}
|
|
|
|
providerLog(logger.LevelDebug, "post login hook executed, response code: %v, elapsed: %v err: %v",
|
|
|
|
respCode, time.Since(startTime), err)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
ctx, cancel := context.WithTimeout(context.Background(), 20*time.Second)
|
|
|
|
defer cancel()
|
|
|
|
cmd := exec.CommandContext(ctx, config.PostLoginHook)
|
|
|
|
cmd.Env = append(os.Environ(),
|
2021-01-26 17:05:44 +00:00
|
|
|
fmt.Sprintf("SFTPGO_LOGIND_USER=%v", string(userAsJSON)),
|
2020-08-12 14:15:12 +00:00
|
|
|
fmt.Sprintf("SFTPGO_LOGIND_IP=%v", ip),
|
|
|
|
fmt.Sprintf("SFTPGO_LOGIND_METHOD=%v", loginMethod),
|
|
|
|
fmt.Sprintf("SFTPGO_LOGIND_STATUS=%v", status),
|
|
|
|
fmt.Sprintf("SFTPGO_LOGIND_PROTOCOL=%v", protocol))
|
|
|
|
startTime := time.Now()
|
|
|
|
err = cmd.Run()
|
|
|
|
providerLog(logger.LevelDebug, "post login hook executed, elapsed %v err: %v", time.Since(startTime), err)
|
2021-01-26 17:05:44 +00:00
|
|
|
}()
|
2020-08-12 14:15:12 +00:00
|
|
|
}
|
|
|
|
|
2021-03-26 14:19:01 +00:00
|
|
|
func getExternalAuthResponse(username, password, pkey, keyboardInteractive, ip, protocol string, cert *x509.Certificate, userAsJSON []byte) ([]byte, error) {
|
|
|
|
var tlsCert string
|
|
|
|
if cert != nil {
|
|
|
|
var err error
|
|
|
|
tlsCert, err = utils.EncodeTLSCertToPem(cert)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
}
|
2020-04-01 21:25:23 +00:00
|
|
|
if strings.HasPrefix(config.ExternalAuthHook, "http") {
|
|
|
|
var url *url.URL
|
|
|
|
var result []byte
|
|
|
|
url, err := url.Parse(config.ExternalAuthHook)
|
|
|
|
if err != nil {
|
|
|
|
providerLog(logger.LevelWarn, "invalid url for external auth hook %#v, error: %v", config.ExternalAuthHook, err)
|
|
|
|
return result, err
|
|
|
|
}
|
2020-04-26 21:29:09 +00:00
|
|
|
httpClient := httpclient.GetHTTPClient()
|
2020-04-01 21:25:23 +00:00
|
|
|
authRequest := make(map[string]string)
|
|
|
|
authRequest["username"] = username
|
2020-08-04 16:03:28 +00:00
|
|
|
authRequest["ip"] = ip
|
2020-04-01 21:25:23 +00:00
|
|
|
authRequest["password"] = password
|
|
|
|
authRequest["public_key"] = pkey
|
2020-08-12 14:15:12 +00:00
|
|
|
authRequest["protocol"] = protocol
|
2020-04-01 21:25:23 +00:00
|
|
|
authRequest["keyboard_interactive"] = keyboardInteractive
|
2021-02-28 11:10:40 +00:00
|
|
|
authRequest["tls_cert"] = tlsCert
|
2021-03-26 14:19:01 +00:00
|
|
|
if len(userAsJSON) > 0 {
|
|
|
|
authRequest["user"] = string(userAsJSON)
|
|
|
|
}
|
2020-04-01 21:25:23 +00:00
|
|
|
authRequestAsJSON, err := json.Marshal(authRequest)
|
|
|
|
if err != nil {
|
|
|
|
providerLog(logger.LevelWarn, "error serializing external auth request: %v", err)
|
|
|
|
return result, err
|
|
|
|
}
|
|
|
|
resp, err := httpClient.Post(url.String(), "application/json", bytes.NewBuffer(authRequestAsJSON))
|
|
|
|
if err != nil {
|
|
|
|
providerLog(logger.LevelWarn, "error getting external auth hook HTTP response: %v", err)
|
|
|
|
return result, err
|
|
|
|
}
|
|
|
|
defer resp.Body.Close()
|
2021-03-21 18:15:47 +00:00
|
|
|
providerLog(logger.LevelDebug, "external auth hook executed, response code: %v", resp.StatusCode)
|
2020-04-01 21:25:23 +00:00
|
|
|
if resp.StatusCode != http.StatusOK {
|
|
|
|
return result, fmt.Errorf("wrong external auth http status code: %v, expected 200", resp.StatusCode)
|
|
|
|
}
|
2021-03-21 18:15:47 +00:00
|
|
|
|
2021-02-25 20:53:04 +00:00
|
|
|
return io.ReadAll(resp.Body)
|
2020-04-01 21:25:23 +00:00
|
|
|
}
|
2020-04-26 21:29:09 +00:00
|
|
|
ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second)
|
2020-04-01 21:25:23 +00:00
|
|
|
defer cancel()
|
|
|
|
cmd := exec.CommandContext(ctx, config.ExternalAuthHook)
|
|
|
|
cmd.Env = append(os.Environ(),
|
|
|
|
fmt.Sprintf("SFTPGO_AUTHD_USERNAME=%v", username),
|
2021-03-26 14:19:01 +00:00
|
|
|
fmt.Sprintf("SFTPGO_AUTHD_USER=%v", string(userAsJSON)),
|
2020-08-04 16:03:28 +00:00
|
|
|
fmt.Sprintf("SFTPGO_AUTHD_IP=%v", ip),
|
2020-04-01 21:25:23 +00:00
|
|
|
fmt.Sprintf("SFTPGO_AUTHD_PASSWORD=%v", password),
|
|
|
|
fmt.Sprintf("SFTPGO_AUTHD_PUBLIC_KEY=%v", pkey),
|
2020-08-12 14:15:12 +00:00
|
|
|
fmt.Sprintf("SFTPGO_AUTHD_PROTOCOL=%v", protocol),
|
2021-02-28 11:10:40 +00:00
|
|
|
fmt.Sprintf("SFTPGO_AUTHD_TLS_CERT=%v", strings.ReplaceAll(tlsCert, "\n", "\\n")),
|
2020-04-01 21:25:23 +00:00
|
|
|
fmt.Sprintf("SFTPGO_AUTHD_KEYBOARD_INTERACTIVE=%v", keyboardInteractive))
|
|
|
|
return cmd.Output()
|
|
|
|
}
|
|
|
|
|
2021-03-27 18:10:27 +00:00
|
|
|
func updateUserFromExtAuthResponse(user *User, password, pkey string) {
|
|
|
|
if password != "" {
|
|
|
|
user.Password = password
|
|
|
|
}
|
|
|
|
if pkey != "" && !utils.IsStringPrefixInSlice(pkey, user.PublicKeys) {
|
|
|
|
user.PublicKeys = append(user.PublicKeys, pkey)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-02-28 11:10:40 +00:00
|
|
|
func doExternalAuth(username, password string, pubKey []byte, keyboardInteractive, ip, protocol string, tlsCert *x509.Certificate) (User, error) {
|
2020-01-06 20:42:41 +00:00
|
|
|
var user User
|
2021-03-26 14:19:01 +00:00
|
|
|
|
2021-04-04 20:32:25 +00:00
|
|
|
u, userAsJSON, err := getUserAndJSONForHook(username)
|
2021-03-26 14:19:01 +00:00
|
|
|
if err != nil {
|
|
|
|
return user, err
|
2020-01-06 20:42:41 +00:00
|
|
|
}
|
2021-04-04 20:32:25 +00:00
|
|
|
|
|
|
|
if u.Filters.Hooks.ExternalAuthDisabled {
|
|
|
|
return u, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
pkey, err := utils.GetSSHPublicKeyAsString(pubKey)
|
2021-03-26 14:19:01 +00:00
|
|
|
if err != nil {
|
|
|
|
return user, err
|
2021-02-28 11:10:40 +00:00
|
|
|
}
|
2021-03-26 14:19:01 +00:00
|
|
|
|
2021-03-21 18:15:47 +00:00
|
|
|
startTime := time.Now()
|
2021-03-26 14:19:01 +00:00
|
|
|
out, err := getExternalAuthResponse(username, password, pkey, keyboardInteractive, ip, protocol, tlsCert, userAsJSON)
|
2020-01-06 20:42:41 +00:00
|
|
|
if err != nil {
|
2021-03-21 18:15:47 +00:00
|
|
|
return user, fmt.Errorf("external auth error: %v, elapsed: %v", err, time.Since(startTime))
|
2020-01-06 20:42:41 +00:00
|
|
|
}
|
2021-03-21 18:15:47 +00:00
|
|
|
providerLog(logger.LevelDebug, "external auth completed, elapsed: %v", time.Since(startTime))
|
2021-03-26 14:19:01 +00:00
|
|
|
if utils.IsByteArrayEmpty(out) {
|
|
|
|
providerLog(logger.LevelDebug, "empty response from external hook, no modification requested for user %#v id: %v",
|
|
|
|
username, u.ID)
|
|
|
|
if u.ID == 0 {
|
|
|
|
return u, &RecordNotFoundError{err: fmt.Sprintf("username %#v does not exist", username)}
|
|
|
|
}
|
|
|
|
return u, nil
|
|
|
|
}
|
2020-01-06 20:42:41 +00:00
|
|
|
err = json.Unmarshal(out, &user)
|
|
|
|
if err != nil {
|
2021-03-21 18:15:47 +00:00
|
|
|
return user, fmt.Errorf("invalid external auth response: %v", err)
|
2020-01-06 20:42:41 +00:00
|
|
|
}
|
2021-03-27 18:10:27 +00:00
|
|
|
// an empty username means authentication failure
|
2021-02-11 18:45:52 +00:00
|
|
|
if user.Username == "" {
|
2020-08-31 17:25:17 +00:00
|
|
|
return user, ErrInvalidCredentials
|
2020-01-06 20:42:41 +00:00
|
|
|
}
|
2021-03-27 18:10:27 +00:00
|
|
|
updateUserFromExtAuthResponse(&user, password, pkey)
|
2021-01-05 08:50:22 +00:00
|
|
|
// some users want to map multiple login usernames with a single SFTPGo account
|
2020-06-08 11:06:02 +00:00
|
|
|
// for example an SFTP user logins using "user1" or "user2" and the external auth
|
|
|
|
// returns "user" in both cases, so we use the username returned from
|
|
|
|
// external auth and not the one used to login
|
2021-03-26 14:19:01 +00:00
|
|
|
if user.Username != username {
|
|
|
|
u, err = provider.userExists(user.Username)
|
|
|
|
}
|
|
|
|
if u.ID > 0 && err == nil {
|
2020-01-06 20:42:41 +00:00
|
|
|
user.ID = u.ID
|
|
|
|
user.UsedQuotaSize = u.UsedQuotaSize
|
|
|
|
user.UsedQuotaFiles = u.UsedQuotaFiles
|
|
|
|
user.LastQuotaUpdate = u.LastQuotaUpdate
|
|
|
|
user.LastLogin = u.LastLogin
|
2021-01-05 08:50:22 +00:00
|
|
|
err = provider.updateUser(&user)
|
2021-03-27 18:10:27 +00:00
|
|
|
if err == nil {
|
|
|
|
webDAVUsersCache.swap(&user)
|
2021-04-20 07:39:36 +00:00
|
|
|
cachedPasswords.Add(user.Username, password)
|
2021-03-27 18:10:27 +00:00
|
|
|
}
|
2021-01-05 08:50:22 +00:00
|
|
|
return user, err
|
2020-01-06 20:42:41 +00:00
|
|
|
}
|
2021-01-05 08:50:22 +00:00
|
|
|
err = provider.addUser(&user)
|
2020-01-06 20:42:41 +00:00
|
|
|
if err != nil {
|
|
|
|
return user, err
|
|
|
|
}
|
2020-06-08 11:06:02 +00:00
|
|
|
return provider.userExists(user.Username)
|
2020-01-06 20:42:41 +00:00
|
|
|
}
|
|
|
|
|
2021-03-26 14:19:01 +00:00
|
|
|
func getUserAndJSONForHook(username string) (User, []byte, error) {
|
|
|
|
var userAsJSON []byte
|
|
|
|
u, err := provider.userExists(username)
|
|
|
|
if err != nil {
|
|
|
|
if _, ok := err.(*RecordNotFoundError); !ok {
|
|
|
|
return u, userAsJSON, err
|
|
|
|
}
|
|
|
|
u = User{
|
|
|
|
ID: 0,
|
|
|
|
Username: username,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
userAsJSON, err = json.Marshal(u)
|
|
|
|
if err != nil {
|
|
|
|
return u, userAsJSON, err
|
|
|
|
}
|
|
|
|
return u, userAsJSON, err
|
|
|
|
}
|
|
|
|
|
2019-09-06 13:19:01 +00:00
|
|
|
func providerLog(level logger.LogLevel, format string, v ...interface{}) {
|
|
|
|
logger.Log(level, logSender, "", format, v...)
|
|
|
|
}
|
2019-11-14 10:06:03 +00:00
|
|
|
|
2021-01-26 17:05:44 +00:00
|
|
|
func executeNotificationCommand(operation string, commandArgs []string, userAsJSON []byte) error {
|
2020-05-24 13:29:39 +00:00
|
|
|
if !filepath.IsAbs(config.Actions.Hook) {
|
|
|
|
err := fmt.Errorf("invalid notification command %#v", config.Actions.Hook)
|
|
|
|
logger.Warn(logSender, "", "unable to execute notification command: %v", err)
|
|
|
|
return err
|
|
|
|
}
|
2021-01-26 17:05:44 +00:00
|
|
|
|
2020-01-09 11:00:37 +00:00
|
|
|
ctx, cancel := context.WithTimeout(context.Background(), 15*time.Second)
|
|
|
|
defer cancel()
|
2021-01-26 17:05:44 +00:00
|
|
|
|
2020-05-24 13:29:39 +00:00
|
|
|
cmd := exec.CommandContext(ctx, config.Actions.Hook, commandArgs...)
|
2021-01-26 17:05:44 +00:00
|
|
|
cmd.Env = append(os.Environ(),
|
|
|
|
fmt.Sprintf("SFTPGO_USER_ACTION=%v", operation),
|
|
|
|
fmt.Sprintf("SFTPGO_USER=%v", string(userAsJSON)))
|
|
|
|
|
2020-01-09 11:00:37 +00:00
|
|
|
startTime := time.Now()
|
|
|
|
err := cmd.Run()
|
|
|
|
providerLog(logger.LevelDebug, "executed command %#v with arguments: %+v, elapsed: %v, error: %v",
|
2020-05-24 13:29:39 +00:00
|
|
|
config.Actions.Hook, commandArgs, time.Since(startTime), err)
|
2020-01-09 11:00:37 +00:00
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2021-01-26 17:05:44 +00:00
|
|
|
func executeAction(operation string, user *User) {
|
2019-11-14 10:06:03 +00:00
|
|
|
if !utils.IsStringInSlice(operation, config.Actions.ExecuteOn) {
|
|
|
|
return
|
|
|
|
}
|
2021-01-26 17:05:44 +00:00
|
|
|
if config.Actions.Hook == "" {
|
2020-05-24 13:29:39 +00:00
|
|
|
return
|
|
|
|
}
|
2021-01-26 17:05:44 +00:00
|
|
|
|
|
|
|
go func() {
|
|
|
|
if operation != operationDelete {
|
|
|
|
var err error
|
|
|
|
u, err := provider.userExists(user.Username)
|
|
|
|
if err != nil {
|
|
|
|
providerLog(logger.LevelWarn, "unable to get the user to notify for operation %#v: %v", operation, err)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
user = &u
|
2019-11-14 10:06:03 +00:00
|
|
|
}
|
2021-03-22 18:03:25 +00:00
|
|
|
user.PrepareForRendering()
|
2019-11-14 10:06:03 +00:00
|
|
|
userAsJSON, err := json.Marshal(user)
|
|
|
|
if err != nil {
|
2021-01-26 17:05:44 +00:00
|
|
|
providerLog(logger.LevelWarn, "unable to serialize user as JSON for operation %#v: %v", operation, err)
|
2019-11-14 10:06:03 +00:00
|
|
|
return
|
|
|
|
}
|
2021-01-26 17:05:44 +00:00
|
|
|
if strings.HasPrefix(config.Actions.Hook, "http") {
|
|
|
|
var url *url.URL
|
|
|
|
url, err := url.Parse(config.Actions.Hook)
|
|
|
|
if err != nil {
|
|
|
|
providerLog(logger.LevelWarn, "Invalid http_notification_url %#v for operation %#v: %v", config.Actions.Hook, operation, err)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
q := url.Query()
|
|
|
|
q.Add("action", operation)
|
|
|
|
url.RawQuery = q.Encode()
|
|
|
|
startTime := time.Now()
|
2021-02-12 20:42:49 +00:00
|
|
|
httpClient := httpclient.GetRetraybleHTTPClient()
|
2021-01-26 17:05:44 +00:00
|
|
|
resp, err := httpClient.Post(url.String(), "application/json", bytes.NewBuffer(userAsJSON))
|
|
|
|
respCode := 0
|
|
|
|
if err == nil {
|
|
|
|
respCode = resp.StatusCode
|
|
|
|
resp.Body.Close()
|
|
|
|
}
|
|
|
|
providerLog(logger.LevelDebug, "notified operation %#v to URL: %v status code: %v, elapsed: %v err: %v",
|
|
|
|
operation, url.String(), respCode, time.Since(startTime), err)
|
|
|
|
} else {
|
|
|
|
executeNotificationCommand(operation, user.getNotificationFieldsAsSlice(operation), userAsJSON) //nolint:errcheck // the error is used in test cases only
|
2019-11-14 10:06:03 +00:00
|
|
|
}
|
2021-01-26 17:05:44 +00:00
|
|
|
}()
|
2019-11-14 10:06:03 +00:00
|
|
|
}
|