mirror of
https://github.com/0rangebananaspy/authelia.git
synced 2024-09-14 22:47:21 +07:00
feat(configuration): replace viper with koanf (#2053)
This commit replaces github.com/spf13/viper with github.com/knadh/koanf. Koanf is very similar library to viper, with less dependencies and several quality of life differences. This also allows most config options to be defined by ENV. Lastly it also enables the use of split configuration files which can be configured by setting the --config flag multiple times. Co-authored-by: Amir Zarrinkafsh <nightah@me.com>
This commit is contained in:
parent
3d656eb5db
commit
a7e867a699
|
@ -12,7 +12,7 @@ func getXFlags(branch, build, extra string) (flags []string, err error) {
|
||||||
if branch == "" {
|
if branch == "" {
|
||||||
out, _, err := utils.RunCommandAndReturnOutput("git rev-parse --abbrev-ref HEAD")
|
out, _, err := utils.RunCommandAndReturnOutput("git rev-parse --abbrev-ref HEAD")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return flags, err
|
return flags, fmt.Errorf("error getting branch with git rev-parse: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
if out == "" {
|
if out == "" {
|
||||||
|
@ -24,17 +24,17 @@ func getXFlags(branch, build, extra string) (flags []string, err error) {
|
||||||
|
|
||||||
gitTagCommit, _, err := utils.RunCommandAndReturnOutput("git rev-list --tags --max-count=1")
|
gitTagCommit, _, err := utils.RunCommandAndReturnOutput("git rev-list --tags --max-count=1")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return flags, err
|
return flags, fmt.Errorf("error getting tag commit with git rev-list: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
tag, _, err := utils.RunCommandAndReturnOutput("git describe --tags --abbrev=0 " + gitTagCommit)
|
tag, _, err := utils.RunCommandAndReturnOutput(fmt.Sprintf("git describe --tags --abbrev=0 %s", gitTagCommit))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return flags, err
|
return flags, fmt.Errorf("error getting tag with git describe: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
commit, _, err := utils.RunCommandAndReturnOutput("git rev-parse HEAD")
|
commit, _, err := utils.RunCommandAndReturnOutput("git rev-parse HEAD")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return flags, err
|
return flags, fmt.Errorf("error getting commit with git rev-parse: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
var states []string
|
var states []string
|
||||||
|
|
|
@ -136,7 +136,7 @@ func main() {
|
||||||
cobraCommands = append(cobraCommands, command)
|
cobraCommands = append(cobraCommands, command)
|
||||||
}
|
}
|
||||||
|
|
||||||
cobraCommands = append(cobraCommands, commands.HashPasswordCmd, commands.CertificatesCmd, commands.RSACmd, xflagsCmd)
|
cobraCommands = append(cobraCommands, commands.NewHashPasswordCmd(), commands.NewCertificatesCmd(), commands.NewRSACmd(), xflagsCmd)
|
||||||
|
|
||||||
rootCmd.PersistentFlags().BoolVar(&buildkite, "buildkite", false, "Set CI flag for Buildkite")
|
rootCmd.PersistentFlags().BoolVar(&buildkite, "buildkite", false, "Set CI flag for Buildkite")
|
||||||
rootCmd.PersistentFlags().StringVar(&logLevel, "log-level", "info", "Set the log level for the command")
|
rootCmd.PersistentFlags().StringVar(&logLevel, "log-level", "info", "Set the log level for the command")
|
||||||
|
|
|
@ -1,21 +0,0 @@
|
||||||
package main
|
|
||||||
|
|
||||||
const fmtAutheliaLong = `authelia %s
|
|
||||||
|
|
||||||
An open-source authentication and authorization server providing
|
|
||||||
two-factor authentication and single sign-on (SSO) for your
|
|
||||||
applications via a web portal.
|
|
||||||
|
|
||||||
Documentation is available at: https://www.authelia.com/docs
|
|
||||||
`
|
|
||||||
|
|
||||||
const fmtAutheliaBuild = `Last Tag: %s
|
|
||||||
State: %s
|
|
||||||
Branch: %s
|
|
||||||
Commit: %s
|
|
||||||
Build Number: %s
|
|
||||||
Build OS: %s
|
|
||||||
Build Arch: %s
|
|
||||||
Build Date: %s
|
|
||||||
Extra: %s
|
|
||||||
`
|
|
|
@ -1,188 +1,14 @@
|
||||||
package main
|
package main
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
|
||||||
"os"
|
|
||||||
"runtime"
|
|
||||||
|
|
||||||
"github.com/sirupsen/logrus"
|
|
||||||
"github.com/spf13/cobra"
|
|
||||||
|
|
||||||
"github.com/authelia/authelia/internal/authentication"
|
|
||||||
"github.com/authelia/authelia/internal/authorization"
|
|
||||||
"github.com/authelia/authelia/internal/commands"
|
"github.com/authelia/authelia/internal/commands"
|
||||||
"github.com/authelia/authelia/internal/configuration"
|
|
||||||
"github.com/authelia/authelia/internal/logging"
|
"github.com/authelia/authelia/internal/logging"
|
||||||
"github.com/authelia/authelia/internal/middlewares"
|
|
||||||
"github.com/authelia/authelia/internal/notification"
|
|
||||||
"github.com/authelia/authelia/internal/oidc"
|
|
||||||
"github.com/authelia/authelia/internal/regulation"
|
|
||||||
"github.com/authelia/authelia/internal/server"
|
|
||||||
"github.com/authelia/authelia/internal/session"
|
|
||||||
"github.com/authelia/authelia/internal/storage"
|
|
||||||
"github.com/authelia/authelia/internal/utils"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
var configPathFlag string
|
|
||||||
|
|
||||||
//nolint:gocyclo // TODO: Consider refactoring/simplifying, time permitting.
|
|
||||||
func startServer() {
|
|
||||||
logger := logging.Logger()
|
|
||||||
config, errs := configuration.Read(configPathFlag)
|
|
||||||
|
|
||||||
if len(errs) > 0 {
|
|
||||||
for _, err := range errs {
|
|
||||||
logger.Error(err)
|
|
||||||
}
|
|
||||||
|
|
||||||
os.Exit(1)
|
|
||||||
}
|
|
||||||
|
|
||||||
autheliaCertPool, errs, nonFatalErrs := utils.NewX509CertPool(config.CertificatesDirectory)
|
|
||||||
if len(errs) > 0 {
|
|
||||||
for _, err := range errs {
|
|
||||||
logger.Error(err)
|
|
||||||
}
|
|
||||||
|
|
||||||
os.Exit(2)
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(nonFatalErrs) > 0 {
|
|
||||||
for _, err := range nonFatalErrs {
|
|
||||||
logger.Warn(err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if err := logging.InitializeLogger(config.Logging.Format, config.Logging.FilePath, config.Logging.KeepStdout); err != nil {
|
|
||||||
logger.Fatalf("Cannot initialize logger: %v", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
logger.Infof("Authelia %s is starting", utils.Version())
|
|
||||||
|
|
||||||
switch config.Logging.Level {
|
|
||||||
case "error":
|
|
||||||
logger.Info("Logging severity set to error")
|
|
||||||
logging.SetLevel(logrus.ErrorLevel)
|
|
||||||
case "warn":
|
|
||||||
logger.Info("Logging severity set to warn")
|
|
||||||
logging.SetLevel(logrus.WarnLevel)
|
|
||||||
case "info":
|
|
||||||
logger.Info("Logging severity set to info")
|
|
||||||
logging.SetLevel(logrus.InfoLevel)
|
|
||||||
case "debug":
|
|
||||||
logger.Info("Logging severity set to debug")
|
|
||||||
logging.SetLevel(logrus.DebugLevel)
|
|
||||||
case "trace":
|
|
||||||
logger.Info("Logging severity set to trace")
|
|
||||||
logging.SetLevel(logrus.TraceLevel)
|
|
||||||
}
|
|
||||||
|
|
||||||
if os.Getenv("ENVIRONMENT") == "dev" {
|
|
||||||
logger.Info("===> Authelia is running in development mode. <===")
|
|
||||||
}
|
|
||||||
|
|
||||||
var storageProvider storage.Provider
|
|
||||||
|
|
||||||
switch {
|
|
||||||
case config.Storage.PostgreSQL != nil:
|
|
||||||
storageProvider = storage.NewPostgreSQLProvider(*config.Storage.PostgreSQL)
|
|
||||||
case config.Storage.MySQL != nil:
|
|
||||||
storageProvider = storage.NewMySQLProvider(*config.Storage.MySQL)
|
|
||||||
case config.Storage.Local != nil:
|
|
||||||
storageProvider = storage.NewSQLiteProvider(config.Storage.Local.Path)
|
|
||||||
default:
|
|
||||||
logger.Fatalf("Unrecognized storage backend")
|
|
||||||
}
|
|
||||||
|
|
||||||
var (
|
|
||||||
userProvider authentication.UserProvider
|
|
||||||
err error
|
|
||||||
)
|
|
||||||
|
|
||||||
switch {
|
|
||||||
case config.AuthenticationBackend.File != nil:
|
|
||||||
userProvider = authentication.NewFileUserProvider(config.AuthenticationBackend.File)
|
|
||||||
case config.AuthenticationBackend.LDAP != nil:
|
|
||||||
userProvider, err = authentication.NewLDAPUserProvider(config.AuthenticationBackend, autheliaCertPool)
|
|
||||||
if err != nil {
|
|
||||||
logger.Fatalf("Failed to Check LDAP Authentication Backend: %v", err)
|
|
||||||
}
|
|
||||||
default:
|
|
||||||
logger.Fatalf("Unrecognized authentication backend")
|
|
||||||
}
|
|
||||||
|
|
||||||
var notifier notification.Notifier
|
|
||||||
|
|
||||||
switch {
|
|
||||||
case config.Notifier.SMTP != nil:
|
|
||||||
notifier = notification.NewSMTPNotifier(*config.Notifier.SMTP, autheliaCertPool)
|
|
||||||
case config.Notifier.FileSystem != nil:
|
|
||||||
notifier = notification.NewFileNotifier(*config.Notifier.FileSystem)
|
|
||||||
default:
|
|
||||||
logger.Fatalf("Unrecognized notifier")
|
|
||||||
}
|
|
||||||
|
|
||||||
if !config.Notifier.DisableStartupCheck {
|
|
||||||
_, err = notifier.StartupCheck()
|
|
||||||
if err != nil {
|
|
||||||
logger.Fatalf("Error during notifier startup check: %s", err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
clock := utils.RealClock{}
|
|
||||||
authorizer := authorization.NewAuthorizer(config)
|
|
||||||
sessionProvider := session.NewProvider(config.Session, autheliaCertPool)
|
|
||||||
regulator := regulation.NewRegulator(config.Regulation, storageProvider, clock)
|
|
||||||
|
|
||||||
oidcProvider, err := oidc.NewOpenIDConnectProvider(config.IdentityProviders.OIDC)
|
|
||||||
if err != nil {
|
|
||||||
logger.Fatalf("Error initializing OpenID Connect Provider: %+v", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
providers := middlewares.Providers{
|
|
||||||
Authorizer: authorizer,
|
|
||||||
UserProvider: userProvider,
|
|
||||||
Regulator: regulator,
|
|
||||||
OpenIDConnect: oidcProvider,
|
|
||||||
StorageProvider: storageProvider,
|
|
||||||
Notifier: notifier,
|
|
||||||
SessionProvider: sessionProvider,
|
|
||||||
}
|
|
||||||
|
|
||||||
server.StartServer(*config, providers)
|
|
||||||
}
|
|
||||||
|
|
||||||
func main() {
|
func main() {
|
||||||
logger := logging.Logger()
|
logger := logging.Logger()
|
||||||
|
|
||||||
version := utils.Version()
|
if err := commands.NewRootCmd().Execute(); err != nil {
|
||||||
|
|
||||||
rootCmd := &cobra.Command{
|
|
||||||
Use: "authelia",
|
|
||||||
Run: func(cmd *cobra.Command, args []string) {
|
|
||||||
startServer()
|
|
||||||
},
|
|
||||||
Version: version,
|
|
||||||
Short: fmt.Sprintf("authelia %s", version),
|
|
||||||
Long: fmt.Sprintf(fmtAutheliaLong, version),
|
|
||||||
}
|
|
||||||
|
|
||||||
rootCmd.Flags().StringVar(&configPathFlag, "config", "", "Configuration file")
|
|
||||||
|
|
||||||
buildCmd := &cobra.Command{
|
|
||||||
Use: "build",
|
|
||||||
Short: "Show the build of Authelia",
|
|
||||||
Run: func(cmd *cobra.Command, args []string) {
|
|
||||||
fmt.Printf(fmtAutheliaBuild, utils.BuildTag, utils.BuildState, utils.BuildBranch, utils.BuildCommit,
|
|
||||||
utils.BuildNumber, runtime.GOOS, runtime.GOARCH, utils.BuildDate, utils.BuildExtra)
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
rootCmd.AddCommand(buildCmd, commands.HashPasswordCmd,
|
|
||||||
commands.ValidateConfigCmd, commands.CertificatesCmd,
|
|
||||||
commands.RSACmd)
|
|
||||||
|
|
||||||
if err := rootCmd.Execute(); err != nil {
|
|
||||||
logger.Fatal(err)
|
logger.Fatal(err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -29,21 +29,16 @@ default_redirection_url: https://home.example.com/
|
||||||
## Server Configuration
|
## Server Configuration
|
||||||
##
|
##
|
||||||
server:
|
server:
|
||||||
|
|
||||||
## The address to listen on.
|
## The address to listen on.
|
||||||
host: 0.0.0.0
|
host: 0.0.0.0
|
||||||
|
|
||||||
## The port to listen on.
|
## The port to listen on.
|
||||||
port: 9091
|
port: 9091
|
||||||
|
|
||||||
## Authelia by default doesn't accept TLS communication on the server port. This section overrides this behaviour.
|
## Set the single level path Authelia listens on.
|
||||||
tls:
|
## Must be alphanumeric chars and should not contain any slashes.
|
||||||
## The path to the DER base64/PEM format private key.
|
path: ""
|
||||||
key: ""
|
|
||||||
# key: /config/ssl/key.pem
|
|
||||||
|
|
||||||
## The path to the DER base64/PEM format public certificate.
|
|
||||||
certificate: ""
|
|
||||||
# certificate: /config/ssl/cert.pem
|
|
||||||
|
|
||||||
## Buffers usually should be configured to be the same value.
|
## Buffers usually should be configured to be the same value.
|
||||||
## Explanation at https://www.authelia.com/docs/configuration/server.html
|
## Explanation at https://www.authelia.com/docs/configuration/server.html
|
||||||
|
@ -52,16 +47,23 @@ server:
|
||||||
read_buffer_size: 4096
|
read_buffer_size: 4096
|
||||||
write_buffer_size: 4096
|
write_buffer_size: 4096
|
||||||
|
|
||||||
## Set the single level path Authelia listens on.
|
|
||||||
## Must be alphanumeric chars and should not contain any slashes.
|
|
||||||
path: ""
|
|
||||||
|
|
||||||
## Enables the pprof endpoint.
|
## Enables the pprof endpoint.
|
||||||
enable_pprof: false
|
enable_pprof: false
|
||||||
|
|
||||||
## Enables the expvars endpoint.
|
## Enables the expvars endpoint.
|
||||||
enable_expvars: false
|
enable_expvars: false
|
||||||
|
|
||||||
|
## Authelia by default doesn't accept TLS communication on the server port. This section overrides this behaviour.
|
||||||
|
tls:
|
||||||
|
## The path to the DER base64/PEM format private key.
|
||||||
|
key: ""
|
||||||
|
|
||||||
|
## The path to the DER base64/PEM format public certificate.
|
||||||
|
certificate: ""
|
||||||
|
|
||||||
|
##
|
||||||
|
## Log Configuration
|
||||||
|
##
|
||||||
log:
|
log:
|
||||||
## Level of verbosity for logs: info, debug, trace.
|
## Level of verbosity for logs: info, debug, trace.
|
||||||
level: debug
|
level: debug
|
||||||
|
|
|
@ -6,17 +6,61 @@ has_children: true
|
||||||
---
|
---
|
||||||
|
|
||||||
# Configuration
|
# Configuration
|
||||||
|
Authelia has several methods of configuration available to it. The order of precedence is as follows:
|
||||||
|
|
||||||
Authelia uses a YAML file as configuration file. A template with all possible options can be
|
1. [Secrets](./secrets.md)
|
||||||
found [here](https://github.com/authelia/authelia/blob/master/config.template.yml), at the root of the repository.
|
2. [Environment Variables](#environment)
|
||||||
|
3. [Files](#files) (in order of them being specified)
|
||||||
|
|
||||||
|
This order of precedence puts higher weight on things higher in the list. This means anything specified in the
|
||||||
|
[files](#files) is overridden by [environment variables](#environment) if specified, and anything specified by
|
||||||
|
[environment variables](#environment) is overridden by [secrets](./secrets.md) if specified.
|
||||||
|
|
||||||
|
## Files
|
||||||
When running **Authelia**, you can specify your configuration by passing the file path as shown below.
|
When running **Authelia**, you can specify your configuration by passing the file path as shown below.
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ authelia --config config.custom.yml
|
$ authelia --config config.custom.yml
|
||||||
```
|
```
|
||||||
|
|
||||||
## Documentation
|
You can have multiple configuration files which will be merged in the order specified. If duplicate keys are specified
|
||||||
|
the last one to be specified is the one that takes precedence. Example:
|
||||||
|
|
||||||
|
```console
|
||||||
|
$ authelia --config config.yml --config config-acl.yml --config config-other.yml
|
||||||
|
$ authelia --config config.yml,config-acl.yml,config-other.yml
|
||||||
|
```
|
||||||
|
|
||||||
|
Authelia's configuration files use the YAML format. A template with all possible options can be found at the root of the
|
||||||
|
repository [here](https://github.com/authelia/authelia/blob/master/config.template.yml).
|
||||||
|
|
||||||
|
## Environment
|
||||||
|
You may also provide the configuration by using environment variables. Environment variables are applied after the
|
||||||
|
configuration file meaning anything specified as part of the environment overrides the configuration files. The
|
||||||
|
environment variables must be prefixed with `AUTHELIA_`.
|
||||||
|
|
||||||
|
_**Please Note:** It is not possible to configure_ the _access control rules section or OpenID Connect identity provider
|
||||||
|
section using environment variables at this time._
|
||||||
|
|
||||||
|
_**Please Note:** There are compatability issues with Kubernetes and this particular configuration option. You must ensure you
|
||||||
|
have the `enableServiceLinks: false` setting in your pod spec._
|
||||||
|
|
||||||
|
Underscores replace indented configuration sections or subkeys. For example the following environment variables replace
|
||||||
|
the configuration snippet that follows it:
|
||||||
|
|
||||||
|
```
|
||||||
|
AUTHELIA_LOG_LEVEL=info
|
||||||
|
AUTHELIA_SERVER_READ_BUFFER_SIZE=4096
|
||||||
|
```
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
log:
|
||||||
|
level: info
|
||||||
|
server:
|
||||||
|
read_buffer_size: 4096
|
||||||
|
```
|
||||||
|
|
||||||
|
# Documentation
|
||||||
|
|
||||||
We document the configuration in two ways:
|
We document the configuration in two ways:
|
||||||
|
|
||||||
|
@ -33,7 +77,7 @@ We document the configuration in two ways:
|
||||||
- The `required` label changes color. When required it will be red, when not required it will be green, when the
|
- The `required` label changes color. When required it will be red, when not required it will be green, when the
|
||||||
required state depends on another configuration value it is yellow.
|
required state depends on another configuration value it is yellow.
|
||||||
|
|
||||||
## Validation
|
# Validation
|
||||||
|
|
||||||
Authelia validates the configuration when it starts. This process checks multiple factors including configuration keys
|
Authelia validates the configuration when it starts. This process checks multiple factors including configuration keys
|
||||||
that don't exist, configuration keys that have changed, the values of the keys are valid, and that a configuration
|
that don't exist, configuration keys that have changed, the values of the keys are valid, and that a configuration
|
||||||
|
@ -50,7 +94,7 @@ integrations, it only checks that your configuration syntax is valid.
|
||||||
$ authelia validate-config configuration.yml
|
$ authelia validate-config configuration.yml
|
||||||
```
|
```
|
||||||
|
|
||||||
## Duration Notation Format
|
# Duration Notation Format
|
||||||
|
|
||||||
We have implemented a string based notation for configuration options that take a duration. This section describes its
|
We have implemented a string based notation for configuration options that take a duration. This section describes its
|
||||||
usage. You can use this implementation in: session for expiration, inactivity, and remember_me_duration; and regulation
|
usage. You can use this implementation in: session for expiration, inactivity, and remember_me_duration; and regulation
|
||||||
|
@ -74,12 +118,12 @@ Examples:
|
||||||
* 1 day: 1d
|
* 1 day: 1d
|
||||||
* 10 hours: 10h
|
* 10 hours: 10h
|
||||||
|
|
||||||
## TLS Configuration
|
# TLS Configuration
|
||||||
|
|
||||||
Various sections of the configuration use a uniform configuration section called TLS. Notably LDAP and SMTP.
|
Various sections of the configuration use a uniform configuration section called TLS. Notably LDAP and SMTP.
|
||||||
This section documents the usage.
|
This section documents the usage.
|
||||||
|
|
||||||
### Server Name
|
## Server Name
|
||||||
<div markdown="1">
|
<div markdown="1">
|
||||||
type: string
|
type: string
|
||||||
{: .label .label-config .label-purple }
|
{: .label .label-config .label-purple }
|
||||||
|
@ -92,7 +136,7 @@ required: no
|
||||||
The key `server_name` overrides the name checked against the certificate in the verification process. Useful if you
|
The key `server_name` overrides the name checked against the certificate in the verification process. Useful if you
|
||||||
require to use a direct IP address for the address of the backend service but want to verify a specific SNI.
|
require to use a direct IP address for the address of the backend service but want to verify a specific SNI.
|
||||||
|
|
||||||
### Skip Verify
|
## Skip Verify
|
||||||
<div markdown="1">
|
<div markdown="1">
|
||||||
type: boolean
|
type: boolean
|
||||||
{: .label .label-config .label-purple }
|
{: .label .label-config .label-purple }
|
||||||
|
@ -103,9 +147,9 @@ required: no
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
The key `skip_verify` completely negates validating the certificate of the backend service. This is not recommended,
|
The key `skip_verify` completely negates validating the certificate of the backend service. This is not recommended,
|
||||||
instead you should tweak the `server_name` option, and the global option [certificates_directory](./miscellaneous.md#certificates_directory).
|
instead you should tweak the `server_name` option, and the global option [certificates directory](./miscellaneous.md#certificates_directory).
|
||||||
|
|
||||||
### Minimum Version
|
## Minimum Version
|
||||||
<div markdown="1">
|
<div markdown="1">
|
||||||
type: string
|
type: string
|
||||||
{: .label .label-config .label-purple }
|
{: .label .label-config .label-purple }
|
||||||
|
|
|
@ -7,32 +7,30 @@ nav_order: 10
|
||||||
|
|
||||||
# Secrets
|
# Secrets
|
||||||
|
|
||||||
Configuration of Authelia requires some secrets and passwords.
|
Configuration of Authelia requires some secrets and passwords. Even if they can be set in the configuration file or
|
||||||
Even if they can be set in the configuration file, the recommended
|
standard environment variables, the recommended way to set secrets is to use environment variables as described below.
|
||||||
way to set secrets is to use environment variables as described
|
|
||||||
below.
|
|
||||||
|
|
||||||
## Environment variables
|
## Environment variables
|
||||||
|
|
||||||
A secret can be configured using an environment variable with the
|
A secret value can be loaded by Authelia when the configuration key ends with one of the following words: `key`,
|
||||||
prefix AUTHELIA_ followed by the path of the option capitalized
|
`secret`, `password`, or `token`.
|
||||||
and with dots replaced by underscores followed by the suffix _FILE.
|
|
||||||
|
|
||||||
The contents of the environment variable must be a path to a file
|
If you take the expected environment variable for the configuration option with the `_FILE` suffix at the end. The value
|
||||||
containing the secret data. This file must be readable by the
|
of these environment variables must be the path of a file that is readable by the Authelia process, if they are not,
|
||||||
user the Authelia daemon is running as.
|
Authelia will fail to load. Authelia will automatically remove the newlines from the end of the files contents.
|
||||||
|
|
||||||
For instance the LDAP password can be defined in the configuration
|
For instance the LDAP password can be defined in the configuration
|
||||||
at the path **authentication_backend.ldap.password**, so this password
|
at the path **authentication_backend.ldap.password**, so this password
|
||||||
could alternatively be set using the environment variable called
|
could alternatively be set using the environment variable called
|
||||||
**AUTHELIA_AUTHENTICATION_BACKEND_LDAP_PASSWORD_FILE**.
|
**AUTHELIA__AUTHENTICATION_BACKEND_LDAP_PASSWORD_FILE**.
|
||||||
|
|
||||||
Here is the list of the environment variables which are considered
|
Here is the list of the environment variables which are considered secrets and can be defined. Please note that only
|
||||||
secrets and can be defined. Any other option defined using an
|
secrets can be loaded into the configuration if they end with one of the suffixes above, you can set the value of any
|
||||||
environment variable will not be replaced.
|
other configuration using the environment but instead of loading a file the value of the environment variable is used.
|
||||||
|
|
||||||
|Configuration Key |Environment Variable |
|
|Configuration Key |Environment Variable |
|
||||||
|:-----------------------------------------------:|:------------------------------------------------------:|
|
|:-----------------------------------------------:|:------------------------------------------------------:|
|
||||||
|
|tls_key |AUTHELIA_TLS_KEY_FILE |
|
||||||
|jwt_secret |AUTHELIA_JWT_SECRET_FILE |
|
|jwt_secret |AUTHELIA_JWT_SECRET_FILE |
|
||||||
|duo_api.secret_key |AUTHELIA_DUO_API_SECRET_KEY_FILE |
|
|duo_api.secret_key |AUTHELIA_DUO_API_SECRET_KEY_FILE |
|
||||||
|session.secret |AUTHELIA_SESSION_SECRET_FILE |
|
|session.secret |AUTHELIA_SESSION_SECRET_FILE |
|
||||||
|
@ -47,21 +45,21 @@ environment variable will not be replaced.
|
||||||
|
|
||||||
## Secrets in configuration file
|
## Secrets in configuration file
|
||||||
|
|
||||||
If for some reason you prefer keeping the secrets in the configuration
|
If for some reason you decide on keeping the secrets in the configuration file, it is strongly recommended that you
|
||||||
file, be sure to apply the right permissions to the file in order to
|
ensure the permissions of the configuration file are appropriately set so that other users or processes cannot access
|
||||||
prevent secret leaks if an another application gets compromised on your
|
this file. Generally the UNIX permissions that are appropriate are 0600.
|
||||||
server. The UNIX permissions should probably be something like 600.
|
|
||||||
|
|
||||||
## Secrets exposed in an environment variable
|
## Secrets exposed in an environment variable
|
||||||
|
|
||||||
**DEPRECATION NOTICE:** This backwards compatibility feature **has been removed** in 4.18.0+.
|
In all versions 4.30.0+ you can technically set secrets using the environment variables without the `_FILE` suffix by
|
||||||
|
setting the value to the value you wish to set in configuration, however we strongly urge people not to use this option
|
||||||
|
and instead use the file-based secrets above.
|
||||||
|
|
||||||
Prior to implementing file secrets you were able to define the
|
Prior to implementing file secrets the only way you were able to define secret values was either via configuration or
|
||||||
values of secrets in the environment variables themselves
|
via environment variables in plain text.
|
||||||
in plain text instead of referencing a file. **This is no longer available
|
|
||||||
as an option**, please see the table above for the file based replacements. See
|
See [this article](https://diogomonica.com/2017/03/27/why-you-shouldnt-use-env-variables-for-secret-data/) for reasons
|
||||||
[this article](https://diogomonica.com/2017/03/27/why-you-shouldnt-use-env-variables-for-secret-data/)
|
why setting them via the file counterparts is highly encouraged.
|
||||||
for reasons why this was removed.
|
|
||||||
|
|
||||||
## Docker
|
## Docker
|
||||||
|
|
||||||
|
|
|
@ -15,9 +15,9 @@ The server section configures and tunes the http server module Authelia uses.
|
||||||
server:
|
server:
|
||||||
host: 0.0.0.0
|
host: 0.0.0.0
|
||||||
port: 9091
|
port: 9091
|
||||||
|
path: ""
|
||||||
read_buffer_size: 4096
|
read_buffer_size: 4096
|
||||||
write_buffer_size: 4096
|
write_buffer_size: 4096
|
||||||
path: ""
|
|
||||||
enable_pprof: false
|
enable_pprof: false
|
||||||
enable_expvars: false
|
enable_expvars: false
|
||||||
tls:
|
tls:
|
||||||
|
@ -58,30 +58,6 @@ required: no
|
||||||
|
|
||||||
Defines the port to listen on. See also [host](#host).
|
Defines the port to listen on. See also [host](#host).
|
||||||
|
|
||||||
### read_buffer_size
|
|
||||||
<div markdown="1">
|
|
||||||
type: integer
|
|
||||||
{: .label .label-config .label-purple }
|
|
||||||
default: 4096
|
|
||||||
{: .label .label-config .label-blue }
|
|
||||||
required: no
|
|
||||||
{: .label .label-config .label-green }
|
|
||||||
</div>
|
|
||||||
|
|
||||||
Configures the maximum request size. The default of 4096 is generally sufficient for most use cases.
|
|
||||||
|
|
||||||
### write_buffer_size
|
|
||||||
<div markdown="1">
|
|
||||||
type: integer
|
|
||||||
{: .label .label-config .label-purple }
|
|
||||||
default: 4096
|
|
||||||
{: .label .label-config .label-blue }
|
|
||||||
required: no
|
|
||||||
{: .label .label-config .label-green }
|
|
||||||
</div>
|
|
||||||
|
|
||||||
Configures the maximum response size. The default of 4096 is generally sufficient for most use cases.
|
|
||||||
|
|
||||||
### path
|
### path
|
||||||
<div markdown="1">
|
<div markdown="1">
|
||||||
type: string
|
type: string
|
||||||
|
@ -110,6 +86,30 @@ server:
|
||||||
path: authelia
|
path: authelia
|
||||||
```
|
```
|
||||||
|
|
||||||
|
### read_buffer_size
|
||||||
|
<div markdown="1">
|
||||||
|
type: integer
|
||||||
|
{: .label .label-config .label-purple }
|
||||||
|
default: 4096
|
||||||
|
{: .label .label-config .label-blue }
|
||||||
|
required: no
|
||||||
|
{: .label .label-config .label-green }
|
||||||
|
</div>
|
||||||
|
|
||||||
|
Configures the maximum request size. The default of 4096 is generally sufficient for most use cases.
|
||||||
|
|
||||||
|
### write_buffer_size
|
||||||
|
<div markdown="1">
|
||||||
|
type: integer
|
||||||
|
{: .label .label-config .label-purple }
|
||||||
|
default: 4096
|
||||||
|
{: .label .label-config .label-blue }
|
||||||
|
required: no
|
||||||
|
{: .label .label-config .label-green }
|
||||||
|
</div>
|
||||||
|
|
||||||
|
Configures the maximum response size. The default of 4096 is generally sufficient for most use cases.
|
||||||
|
|
||||||
### enable_pprof
|
### enable_pprof
|
||||||
<div markdown="1">
|
<div markdown="1">
|
||||||
type: boolean
|
type: boolean
|
||||||
|
|
3
go.mod
3
go.mod
|
@ -18,6 +18,8 @@ require (
|
||||||
github.com/golang/mock v1.6.0
|
github.com/golang/mock v1.6.0
|
||||||
github.com/google/uuid v1.3.0
|
github.com/google/uuid v1.3.0
|
||||||
github.com/jackc/pgx/v4 v4.13.0
|
github.com/jackc/pgx/v4 v4.13.0
|
||||||
|
github.com/knadh/koanf v1.1.1
|
||||||
|
github.com/mitchellh/mapstructure v1.4.1
|
||||||
github.com/ory/fosite v0.40.2
|
github.com/ory/fosite v0.40.2
|
||||||
github.com/ory/herodot v0.9.7
|
github.com/ory/herodot v0.9.7
|
||||||
github.com/otiai10/copy v1.6.0
|
github.com/otiai10/copy v1.6.0
|
||||||
|
@ -26,7 +28,6 @@ require (
|
||||||
github.com/simia-tech/crypt v0.5.0
|
github.com/simia-tech/crypt v0.5.0
|
||||||
github.com/sirupsen/logrus v1.8.1
|
github.com/sirupsen/logrus v1.8.1
|
||||||
github.com/spf13/cobra v1.2.1
|
github.com/spf13/cobra v1.2.1
|
||||||
github.com/spf13/viper v1.8.1
|
|
||||||
github.com/stretchr/testify v1.7.0
|
github.com/stretchr/testify v1.7.0
|
||||||
github.com/tebeka/selenium v0.9.9
|
github.com/tebeka/selenium v0.9.9
|
||||||
github.com/tstranex/u2f v1.0.0
|
github.com/tstranex/u2f v1.0.0
|
||||||
|
|
6
go.sum
6
go.sum
|
@ -817,6 +817,7 @@ github.com/jmoiron/sqlx v1.2.0/go.mod h1:1FEQNm3xlJgrMD+FBdI9+xvCksHtbpVBBw5dYhB
|
||||||
github.com/jmoiron/sqlx v1.3.1/go.mod h1:2BljVx/86SuTyjE+aPYlHCTNvZrnJXghYGpNiXLBMCQ=
|
github.com/jmoiron/sqlx v1.3.1/go.mod h1:2BljVx/86SuTyjE+aPYlHCTNvZrnJXghYGpNiXLBMCQ=
|
||||||
github.com/joeshaw/multierror v0.0.0-20140124173710-69b34d4ec901/go.mod h1:Z86h9688Y0wesXCyonoVr47MasHilkuLMqGhRZ4Hpak=
|
github.com/joeshaw/multierror v0.0.0-20140124173710-69b34d4ec901/go.mod h1:Z86h9688Y0wesXCyonoVr47MasHilkuLMqGhRZ4Hpak=
|
||||||
github.com/joho/godotenv v1.2.0/go.mod h1:7hK45KPybAkOC6peb+G5yklZfMxEjkZhHbwpqxOKXbg=
|
github.com/joho/godotenv v1.2.0/go.mod h1:7hK45KPybAkOC6peb+G5yklZfMxEjkZhHbwpqxOKXbg=
|
||||||
|
github.com/joho/godotenv v1.3.0 h1:Zjp+RcGpHhGlrMbJzXTrZZPrWj+1vfm90La1wgB6Bhc=
|
||||||
github.com/joho/godotenv v1.3.0/go.mod h1:7hK45KPybAkOC6peb+G5yklZfMxEjkZhHbwpqxOKXbg=
|
github.com/joho/godotenv v1.3.0/go.mod h1:7hK45KPybAkOC6peb+G5yklZfMxEjkZhHbwpqxOKXbg=
|
||||||
github.com/jonboulle/clockwork v0.1.0/go.mod h1:Ii8DK3G1RaLaWxj9trq07+26W01tbo22gdxWY5EU2bo=
|
github.com/jonboulle/clockwork v0.1.0/go.mod h1:Ii8DK3G1RaLaWxj9trq07+26W01tbo22gdxWY5EU2bo=
|
||||||
github.com/jpillora/backoff v1.0.0/go.mod h1:J/6gKK9jxlEcS3zixgDgUAsiuZ7yrSoa/FX5e0EB2j4=
|
github.com/jpillora/backoff v1.0.0/go.mod h1:J/6gKK9jxlEcS3zixgDgUAsiuZ7yrSoa/FX5e0EB2j4=
|
||||||
|
@ -854,6 +855,8 @@ github.com/klauspost/compress v1.12.2 h1:2KCfW3I9M7nSc5wOqXAlW2v2U6v+w6cbjvbfp+O
|
||||||
github.com/klauspost/compress v1.12.2/go.mod h1:8dP1Hq4DHOhN9w426knH3Rhby4rFm6D8eO+e+Dq5Gzg=
|
github.com/klauspost/compress v1.12.2/go.mod h1:8dP1Hq4DHOhN9w426knH3Rhby4rFm6D8eO+e+Dq5Gzg=
|
||||||
github.com/knadh/koanf v0.14.1-0.20201201075439-e0853799f9ec/go.mod h1:H5mEFsTeWizwFXHKtsITL5ipsLTuAMQoGuQpp+1JL9U=
|
github.com/knadh/koanf v0.14.1-0.20201201075439-e0853799f9ec/go.mod h1:H5mEFsTeWizwFXHKtsITL5ipsLTuAMQoGuQpp+1JL9U=
|
||||||
github.com/knadh/koanf v1.0.0/go.mod h1:vrMMuhIH0k7EoxiMbVfFlRvJYmxcT2Eha3DH8Tx5+X4=
|
github.com/knadh/koanf v1.0.0/go.mod h1:vrMMuhIH0k7EoxiMbVfFlRvJYmxcT2Eha3DH8Tx5+X4=
|
||||||
|
github.com/knadh/koanf v1.1.1 h1:doO5UBvSXcmngdr/u54HKe+Uz4ZZw0/YHVzSsnE3vD4=
|
||||||
|
github.com/knadh/koanf v1.1.1/go.mod h1:xpPTwMhsA/aaQLAilyCCqfpEiY1gpa160AiCuWHJUjY=
|
||||||
github.com/konsorten/go-windows-terminal-sequences v0.0.0-20180402223658-b729f2633dfe/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
|
github.com/konsorten/go-windows-terminal-sequences v0.0.0-20180402223658-b729f2633dfe/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
|
||||||
github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
|
github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
|
||||||
github.com/konsorten/go-windows-terminal-sequences v1.0.2/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
|
github.com/konsorten/go-windows-terminal-sequences v1.0.2/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
|
||||||
|
@ -947,6 +950,7 @@ github.com/microcosm-cc/bluemonday v1.0.2/go.mod h1:iVP4YcDBq+n/5fb23BhYFvIMq/le
|
||||||
github.com/miekg/dns v1.0.14/go.mod h1:W1PPwlIAgtquWBMBEV9nkV9Cazfe8ScdGz/Lj7v3Nrg=
|
github.com/miekg/dns v1.0.14/go.mod h1:W1PPwlIAgtquWBMBEV9nkV9Cazfe8ScdGz/Lj7v3Nrg=
|
||||||
github.com/mitchellh/cli v1.0.0/go.mod h1:hNIlj7HEI86fIcpObd7a0FcrxTWetlwJDGcceTlRvqc=
|
github.com/mitchellh/cli v1.0.0/go.mod h1:hNIlj7HEI86fIcpObd7a0FcrxTWetlwJDGcceTlRvqc=
|
||||||
github.com/mitchellh/copystructure v1.0.0/go.mod h1:SNtv71yrdKgLRyLFxmLdkAbkKEFWgYaq1OVrnRcwhnw=
|
github.com/mitchellh/copystructure v1.0.0/go.mod h1:SNtv71yrdKgLRyLFxmLdkAbkKEFWgYaq1OVrnRcwhnw=
|
||||||
|
github.com/mitchellh/copystructure v1.2.0 h1:vpKXTN4ewci03Vljg/q9QvCGUDttBOGBIa15WveJJGw=
|
||||||
github.com/mitchellh/copystructure v1.2.0/go.mod h1:qLl+cE2AmVv+CoeAwDPye/v+N2HKCj9FbZEVFJRxO9s=
|
github.com/mitchellh/copystructure v1.2.0/go.mod h1:qLl+cE2AmVv+CoeAwDPye/v+N2HKCj9FbZEVFJRxO9s=
|
||||||
github.com/mitchellh/go-homedir v1.0.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0=
|
github.com/mitchellh/go-homedir v1.0.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0=
|
||||||
github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0=
|
github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0=
|
||||||
|
@ -963,6 +967,7 @@ github.com/mitchellh/mapstructure v1.3.2/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RR
|
||||||
github.com/mitchellh/mapstructure v1.4.1 h1:CpVNEelQCZBooIPDn+AR3NpivK/TIKU8bDxdASFVQag=
|
github.com/mitchellh/mapstructure v1.4.1 h1:CpVNEelQCZBooIPDn+AR3NpivK/TIKU8bDxdASFVQag=
|
||||||
github.com/mitchellh/mapstructure v1.4.1/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo=
|
github.com/mitchellh/mapstructure v1.4.1/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo=
|
||||||
github.com/mitchellh/reflectwalk v1.0.0/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw=
|
github.com/mitchellh/reflectwalk v1.0.0/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw=
|
||||||
|
github.com/mitchellh/reflectwalk v1.0.2 h1:G2LzWKi524PWgd3mLHV8Y5k7s6XUvT0Gef6zxSIeXaQ=
|
||||||
github.com/mitchellh/reflectwalk v1.0.2/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw=
|
github.com/mitchellh/reflectwalk v1.0.2/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw=
|
||||||
github.com/moby/term v0.0.0-20200915141129-7f0af18e79f2/go.mod h1:TjQg8pa4iejrUrjiz0MCtMV38jdMNW4doKSiBrEvCQQ=
|
github.com/moby/term v0.0.0-20200915141129-7f0af18e79f2/go.mod h1:TjQg8pa4iejrUrjiz0MCtMV38jdMNW4doKSiBrEvCQQ=
|
||||||
github.com/moby/term v0.0.0-20201216013528-df9cb8a40635/go.mod h1:FBS0z0QWA44HXygs7VXDUOGoN/1TV3RuWkLO04am3wc=
|
github.com/moby/term v0.0.0-20201216013528-df9cb8a40635/go.mod h1:FBS0z0QWA44HXygs7VXDUOGoN/1TV3RuWkLO04am3wc=
|
||||||
|
@ -1257,7 +1262,6 @@ github.com/spf13/viper v1.3.1/go.mod h1:ZiWeW+zYFKm7srdB9IoDzzZXaJaI5eL9QjNiN/DM
|
||||||
github.com/spf13/viper v1.3.2/go.mod h1:ZiWeW+zYFKm7srdB9IoDzzZXaJaI5eL9QjNiN/DMA2s=
|
github.com/spf13/viper v1.3.2/go.mod h1:ZiWeW+zYFKm7srdB9IoDzzZXaJaI5eL9QjNiN/DMA2s=
|
||||||
github.com/spf13/viper v1.4.0/go.mod h1:PTJ7Z/lr49W6bUbkmS1V3by4uWynFiR9p7+dSq/yZzE=
|
github.com/spf13/viper v1.4.0/go.mod h1:PTJ7Z/lr49W6bUbkmS1V3by4uWynFiR9p7+dSq/yZzE=
|
||||||
github.com/spf13/viper v1.7.0/go.mod h1:8WkrPz2fc9jxqZNCJI/76HCieCp4Q8HaLFoCha5qpdg=
|
github.com/spf13/viper v1.7.0/go.mod h1:8WkrPz2fc9jxqZNCJI/76HCieCp4Q8HaLFoCha5qpdg=
|
||||||
github.com/spf13/viper v1.8.1 h1:Kq1fyeebqsBfbjZj4EL7gj2IO0mMaiyjYUWcUsl2O44=
|
|
||||||
github.com/spf13/viper v1.8.1/go.mod h1:o0Pch8wJ9BVSWGQMbra6iw0oQ5oktSIBaujf1rJH9Ns=
|
github.com/spf13/viper v1.8.1/go.mod h1:o0Pch8wJ9BVSWGQMbra6iw0oQ5oktSIBaujf1rJH9Ns=
|
||||||
github.com/sqs/goreturns v0.0.0-20181028201513-538ac6014518 h1:iD+PFTQwKEmbwSdwfvP5ld2WEI/g7qbdhmHJ2ASfYGs=
|
github.com/sqs/goreturns v0.0.0-20181028201513-538ac6014518 h1:iD+PFTQwKEmbwSdwfvP5ld2WEI/g7qbdhmHJ2ASfYGs=
|
||||||
github.com/sqs/goreturns v0.0.0-20181028201513-538ac6014518/go.mod h1:CKI4AZ4XmGV240rTHfO0hfE83S6/a3/Q1siZJ/vXf7A=
|
github.com/sqs/goreturns v0.0.0-20181028201513-538ac6014518/go.mod h1:CKI4AZ4XmGV240rTHfO0hfE83S6/a3/Q1siZJ/vXf7A=
|
||||||
|
|
29
internal/commands/build-info.go
Normal file
29
internal/commands/build-info.go
Normal file
|
@ -0,0 +1,29 @@
|
||||||
|
package commands
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"runtime"
|
||||||
|
|
||||||
|
"github.com/spf13/cobra"
|
||||||
|
|
||||||
|
"github.com/authelia/authelia/internal/utils"
|
||||||
|
)
|
||||||
|
|
||||||
|
func newBuildInfoCmd() (cmd *cobra.Command) {
|
||||||
|
cmd = &cobra.Command{
|
||||||
|
Use: "build-info",
|
||||||
|
Short: "Show the build information of Authelia",
|
||||||
|
Long: buildLong,
|
||||||
|
RunE: cmdBuildInfoRunE,
|
||||||
|
Args: cobra.NoArgs,
|
||||||
|
}
|
||||||
|
|
||||||
|
return cmd
|
||||||
|
}
|
||||||
|
|
||||||
|
func cmdBuildInfoRunE(_ *cobra.Command, _ []string) (err error) {
|
||||||
|
_, err = fmt.Printf(fmtAutheliaBuild, utils.BuildTag, utils.BuildState, utils.BuildBranch, utils.BuildCommit,
|
||||||
|
utils.BuildNumber, runtime.GOOS, runtime.GOARCH, utils.BuildDate, utils.BuildExtra)
|
||||||
|
|
||||||
|
return err
|
||||||
|
}
|
|
@ -9,67 +9,192 @@ import (
|
||||||
"crypto/x509"
|
"crypto/x509"
|
||||||
"crypto/x509/pkix"
|
"crypto/x509/pkix"
|
||||||
"encoding/pem"
|
"encoding/pem"
|
||||||
|
"fmt"
|
||||||
"log"
|
"log"
|
||||||
"math/big"
|
"math/big"
|
||||||
"net"
|
"net"
|
||||||
"os"
|
"os"
|
||||||
"path"
|
"path/filepath"
|
||||||
"strings"
|
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/spf13/cobra"
|
"github.com/spf13/cobra"
|
||||||
)
|
)
|
||||||
|
|
||||||
var (
|
// NewCertificatesCmd returns a new Certificates Cmd.
|
||||||
host string
|
func NewCertificatesCmd() (cmd *cobra.Command) {
|
||||||
validFrom string
|
cmd = &cobra.Command{
|
||||||
validFor time.Duration
|
Use: "certificates",
|
||||||
isCA bool
|
Short: "Commands related to certificate generation",
|
||||||
rsaBits int
|
Args: cobra.NoArgs,
|
||||||
ecdsaCurve string
|
}
|
||||||
ed25519Key bool
|
|
||||||
certificateTargetDirectory string
|
|
||||||
)
|
|
||||||
|
|
||||||
func init() {
|
cmd.PersistentFlags().StringSlice("host", []string{}, "Comma-separated hostnames and IPs to generate a certificate for")
|
||||||
CertificatesGenerateCmd.PersistentFlags().StringVar(&host, "host", "", "Comma-separated hostnames and IPs to generate a certificate for")
|
|
||||||
err := CertificatesGenerateCmd.MarkPersistentFlagRequired("host")
|
|
||||||
|
|
||||||
|
err := cmd.MarkPersistentFlagRequired("host")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Fatal(err)
|
log.Fatal(err)
|
||||||
}
|
}
|
||||||
|
|
||||||
CertificatesGenerateCmd.PersistentFlags().StringVar(&validFrom, "start-date", "", "Creation date formatted as Jan 1 15:04:05 2011")
|
cmd.AddCommand(newCertificatesGenerateCmd())
|
||||||
CertificatesGenerateCmd.PersistentFlags().DurationVar(&validFor, "duration", 365*24*time.Hour, "Duration that certificate is valid for")
|
|
||||||
CertificatesGenerateCmd.PersistentFlags().BoolVar(&isCA, "ca", false, "Whether this cert should be its own Certificate Authority")
|
|
||||||
CertificatesGenerateCmd.PersistentFlags().IntVar(&rsaBits, "rsa-bits", 2048, "Size of RSA key to generate. Ignored if --ecdsa-curve is set")
|
|
||||||
CertificatesGenerateCmd.PersistentFlags().StringVar(&ecdsaCurve, "ecdsa-curve", "", "ECDSA curve to use to generate a key. Valid values are P224, P256 (recommended), P384, P521")
|
|
||||||
CertificatesGenerateCmd.PersistentFlags().BoolVar(&ed25519Key, "ed25519", false, "Generate an Ed25519 key")
|
|
||||||
CertificatesGenerateCmd.PersistentFlags().StringVar(&certificateTargetDirectory, "dir", "", "Target directory where the certificate and keys will be stored")
|
|
||||||
|
|
||||||
CertificatesCmd.AddCommand(CertificatesGenerateCmd)
|
return cmd
|
||||||
}
|
}
|
||||||
|
|
||||||
func publicKey(priv interface{}) interface{} {
|
func newCertificatesGenerateCmd() (cmd *cobra.Command) {
|
||||||
switch k := priv.(type) {
|
cmd = &cobra.Command{
|
||||||
case *rsa.PrivateKey:
|
Use: "generate",
|
||||||
return &k.PublicKey
|
Short: "Generate a self-signed certificate",
|
||||||
case *ecdsa.PrivateKey:
|
Args: cobra.NoArgs,
|
||||||
return &k.PublicKey
|
Run: cmdCertificatesGenerateRun,
|
||||||
case ed25519.PrivateKey:
|
|
||||||
return k.Public().(ed25519.PublicKey)
|
|
||||||
default:
|
|
||||||
return nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
cmd.Flags().String("start-date", "", "Creation date formatted as Jan 1 15:04:05 2011")
|
||||||
|
cmd.Flags().Duration("duration", 365*24*time.Hour, "Duration that certificate is valid for")
|
||||||
|
cmd.Flags().Bool("ca", false, "Whether this cert should be its own Certificate Authority")
|
||||||
|
cmd.Flags().Int("rsa-bits", 2048, "Size of RSA key to generate. Ignored if --ecdsa-curve is set")
|
||||||
|
cmd.Flags().String("ecdsa-curve", "", "ECDSA curve to use to generate a key. Valid values are P224, P256 (recommended), P384, P521")
|
||||||
|
cmd.Flags().Bool("ed25519", false, "Generate an Ed25519 key")
|
||||||
|
cmd.Flags().String("dir", "", "Target directory where the certificate and keys will be stored")
|
||||||
|
|
||||||
|
return cmd
|
||||||
}
|
}
|
||||||
|
|
||||||
//nolint:gocyclo // TODO: Consider refactoring/simplifying, time permitting.
|
func cmdCertificatesGenerateRun(cmd *cobra.Command, _ []string) {
|
||||||
func generateSelfSignedCertificate(cmd *cobra.Command, args []string) {
|
|
||||||
// implementation retrieved from https://golang.org/src/crypto/tls/generate_cert.go
|
// implementation retrieved from https://golang.org/src/crypto/tls/generate_cert.go
|
||||||
var priv interface{}
|
ecdsaCurve, err := cmd.Flags().GetString("ecdsa-curve")
|
||||||
|
if err != nil {
|
||||||
|
fmt.Printf("Failed to parse ecdsa-curve flag: %v\n", err)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
var err error
|
ed25519Key, err := cmd.Flags().GetBool("ed25519")
|
||||||
|
if err != nil {
|
||||||
|
fmt.Printf("Failed to parse ed25519 flag: %v\n", err)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
rsaBits, err := cmd.Flags().GetInt("rsa-bits")
|
||||||
|
if err != nil {
|
||||||
|
fmt.Printf("Failed to parse rsa-bits flag: %v\n", err)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
hosts, err := cmd.Flags().GetStringSlice("host")
|
||||||
|
if err != nil {
|
||||||
|
fmt.Printf("Failed to parse host flag: %v\n", err)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
validFrom, err := cmd.Flags().GetString("start-date")
|
||||||
|
if err != nil {
|
||||||
|
fmt.Printf("Failed to parse start-date flag: %v\n", err)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
validFor, err := cmd.Flags().GetDuration("duration")
|
||||||
|
if err != nil {
|
||||||
|
fmt.Printf("Failed to parse duration flag: %v\n", err)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
isCA, err := cmd.Flags().GetBool("ca")
|
||||||
|
if err != nil {
|
||||||
|
fmt.Printf("Failed to parse ca flag: %v\n", err)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
certificateTargetDirectory, err := cmd.Flags().GetString("dir")
|
||||||
|
if err != nil {
|
||||||
|
fmt.Printf("Failed to parse dir flag: %v\n", err)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
cmdCertificatesGenerateRunExtended(hosts, ecdsaCurve, validFrom, certificateTargetDirectory, ed25519Key, isCA, rsaBits, validFor)
|
||||||
|
}
|
||||||
|
|
||||||
|
func cmdCertificatesGenerateRunExtended(hosts []string, ecdsaCurve, validFrom, certificateTargetDirectory string, ed25519Key, isCA bool, rsaBits int, validFor time.Duration) {
|
||||||
|
priv, err := getPrivateKey(ecdsaCurve, ed25519Key, rsaBits)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
fmt.Printf("Failed to generate private key: %v\n", err)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
var notBefore time.Time
|
||||||
|
|
||||||
|
switch len(validFrom) {
|
||||||
|
case 0:
|
||||||
|
notBefore = time.Now()
|
||||||
|
default:
|
||||||
|
notBefore, err = time.Parse("Jan 2 15:04:05 2006", validFrom)
|
||||||
|
if err != nil {
|
||||||
|
fmt.Printf("Failed to parse start date: %v\n", err)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
notAfter := notBefore.Add(validFor)
|
||||||
|
|
||||||
|
serialNumberLimit := new(big.Int).Lsh(big.NewInt(1), 128)
|
||||||
|
|
||||||
|
serialNumber, err := rand.Int(rand.Reader, serialNumberLimit)
|
||||||
|
if err != nil {
|
||||||
|
fmt.Printf("Failed to generate serial number: %v\n", err)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
template := x509.Certificate{
|
||||||
|
SerialNumber: serialNumber,
|
||||||
|
Subject: pkix.Name{
|
||||||
|
Organization: []string{"Acme Co"},
|
||||||
|
},
|
||||||
|
NotBefore: notBefore,
|
||||||
|
NotAfter: notAfter,
|
||||||
|
|
||||||
|
KeyUsage: x509.KeyUsageKeyEncipherment | x509.KeyUsageDigitalSignature,
|
||||||
|
ExtKeyUsage: []x509.ExtKeyUsage{x509.ExtKeyUsageServerAuth},
|
||||||
|
BasicConstraintsValid: true,
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, h := range hosts {
|
||||||
|
if ip := net.ParseIP(h); ip != nil {
|
||||||
|
template.IPAddresses = append(template.IPAddresses, ip)
|
||||||
|
} else {
|
||||||
|
template.DNSNames = append(template.DNSNames, h)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if isCA {
|
||||||
|
template.IsCA = true
|
||||||
|
template.KeyUsage |= x509.KeyUsageCertSign
|
||||||
|
}
|
||||||
|
|
||||||
|
certPath := filepath.Join(certificateTargetDirectory, "cert.pem")
|
||||||
|
|
||||||
|
derBytes, err := x509.CreateCertificate(rand.Reader, &template, &template, publicKey(priv), priv)
|
||||||
|
if err != nil {
|
||||||
|
fmt.Printf("Failed to create certificate: %v\n", err)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
writePEM(derBytes, "CERTIFICATE", certPath)
|
||||||
|
|
||||||
|
fmt.Printf("Certificate Public Key written to %s\n", certPath)
|
||||||
|
|
||||||
|
keyPath := filepath.Join(certificateTargetDirectory, "key.pem")
|
||||||
|
|
||||||
|
privBytes, err := x509.MarshalPKCS8PrivateKey(priv)
|
||||||
|
if err != nil {
|
||||||
|
fmt.Printf("Failed to marshal private key: %v\n", err)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
writePEM(privBytes, "PRIVATE KEY", keyPath)
|
||||||
|
|
||||||
|
fmt.Printf("Certificate Private Key written to %s\n", keyPath)
|
||||||
|
}
|
||||||
|
|
||||||
|
func getPrivateKey(ecdsaCurve string, ed25519Key bool, rsaBits int) (priv interface{}, err error) {
|
||||||
switch ecdsaCurve {
|
switch ecdsaCurve {
|
||||||
case "":
|
case "":
|
||||||
if ed25519Key {
|
if ed25519Key {
|
||||||
|
@ -86,114 +211,39 @@ func generateSelfSignedCertificate(cmd *cobra.Command, args []string) {
|
||||||
case "P521":
|
case "P521":
|
||||||
priv, err = ecdsa.GenerateKey(elliptic.P521(), rand.Reader)
|
priv, err = ecdsa.GenerateKey(elliptic.P521(), rand.Reader)
|
||||||
default:
|
default:
|
||||||
log.Fatalf("Unrecognized elliptic curve: %q", ecdsaCurve)
|
err = fmt.Errorf("unrecognized elliptic curve: %q", ecdsaCurve)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
return priv, err
|
||||||
|
}
|
||||||
|
|
||||||
|
func writePEM(bytes []byte, blockType, path string) {
|
||||||
|
keyOut, err := os.OpenFile(path, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0600)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Fatalf("Failed to generate private key: %v", err)
|
fmt.Printf("Failed to open %s for writing: %v\n", path, err)
|
||||||
|
os.Exit(1)
|
||||||
}
|
}
|
||||||
|
|
||||||
var notBefore time.Time
|
if err := pem.Encode(keyOut, &pem.Block{Type: blockType, Bytes: bytes}); err != nil {
|
||||||
if len(validFrom) == 0 {
|
fmt.Printf("Failed to write data to %s: %v\n", path, err)
|
||||||
notBefore = time.Now()
|
os.Exit(1)
|
||||||
} else {
|
|
||||||
notBefore, err = time.Parse("Jan 2 15:04:05 2006", validFrom)
|
|
||||||
if err != nil {
|
|
||||||
log.Fatalf("Failed to parse creation date: %v", err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
notAfter := notBefore.Add(validFor)
|
|
||||||
|
|
||||||
serialNumberLimit := new(big.Int).Lsh(big.NewInt(1), 128)
|
|
||||||
serialNumber, err := rand.Int(rand.Reader, serialNumberLimit)
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
log.Fatalf("Failed to generate serial number: %v", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
template := x509.Certificate{
|
|
||||||
SerialNumber: serialNumber,
|
|
||||||
Subject: pkix.Name{
|
|
||||||
Organization: []string{"Acme Co"},
|
|
||||||
},
|
|
||||||
NotBefore: notBefore,
|
|
||||||
NotAfter: notAfter,
|
|
||||||
|
|
||||||
KeyUsage: x509.KeyUsageKeyEncipherment | x509.KeyUsageDigitalSignature,
|
|
||||||
ExtKeyUsage: []x509.ExtKeyUsage{x509.ExtKeyUsageServerAuth},
|
|
||||||
BasicConstraintsValid: true,
|
|
||||||
}
|
|
||||||
|
|
||||||
hosts := strings.Split(host, ",")
|
|
||||||
for _, h := range hosts {
|
|
||||||
if ip := net.ParseIP(h); ip != nil {
|
|
||||||
template.IPAddresses = append(template.IPAddresses, ip)
|
|
||||||
} else {
|
|
||||||
template.DNSNames = append(template.DNSNames, h)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if isCA {
|
|
||||||
template.IsCA = true
|
|
||||||
template.KeyUsage |= x509.KeyUsageCertSign
|
|
||||||
}
|
|
||||||
|
|
||||||
derBytes, err := x509.CreateCertificate(rand.Reader, &template, &template, publicKey(priv), priv)
|
|
||||||
if err != nil {
|
|
||||||
log.Fatalf("Failed to create certificate: %v", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
certPath := path.Join(certificateTargetDirectory, "cert.pem")
|
|
||||||
certOut, err := os.Create(certPath)
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
log.Fatalf("Failed to open %s for writing: %v", certPath, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
if err := pem.Encode(certOut, &pem.Block{Type: "CERTIFICATE", Bytes: derBytes}); err != nil {
|
|
||||||
log.Fatalf("Failed to write data to cert.pem: %v", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
if err := certOut.Close(); err != nil {
|
|
||||||
log.Fatalf("Error closing %s: %v", certPath, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
log.Printf("wrote %s\n", certPath)
|
|
||||||
|
|
||||||
keyPath := path.Join(certificateTargetDirectory, "key.pem")
|
|
||||||
keyOut, err := os.OpenFile(keyPath, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0600)
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
log.Fatalf("Failed to open %s for writing: %v", keyPath, err)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
privBytes, err := x509.MarshalPKCS8PrivateKey(priv)
|
|
||||||
if err != nil {
|
|
||||||
log.Fatalf("Unable to marshal private key: %v", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
if err := pem.Encode(keyOut, &pem.Block{Type: "PRIVATE KEY", Bytes: privBytes}); err != nil {
|
|
||||||
log.Fatalf("Failed to write data to %s: %v", keyPath, err)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := keyOut.Close(); err != nil {
|
if err := keyOut.Close(); err != nil {
|
||||||
log.Fatalf("Error closing %s: %v", keyPath, err)
|
fmt.Printf("Error closing %s: %v\n", path, err)
|
||||||
|
os.Exit(1)
|
||||||
}
|
}
|
||||||
|
|
||||||
log.Printf("wrote %s\n", keyPath)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// CertificatesCmd certificate helper command.
|
func publicKey(priv interface{}) interface{} {
|
||||||
var CertificatesCmd = &cobra.Command{
|
switch k := priv.(type) {
|
||||||
Use: "certificates",
|
case *rsa.PrivateKey:
|
||||||
Short: "Commands related to certificate generation",
|
return &k.PublicKey
|
||||||
}
|
case *ecdsa.PrivateKey:
|
||||||
|
return &k.PublicKey
|
||||||
// CertificatesGenerateCmd certificate generation command.
|
case ed25519.PrivateKey:
|
||||||
var CertificatesGenerateCmd = &cobra.Command{
|
return k.Public().(ed25519.PublicKey)
|
||||||
Use: "generate",
|
default:
|
||||||
Short: "Generate a self-signed certificate",
|
return nil
|
||||||
Run: generateSelfSignedCertificate,
|
}
|
||||||
}
|
}
|
||||||
|
|
45
internal/commands/completion.go
Normal file
45
internal/commands/completion.go
Normal file
|
@ -0,0 +1,45 @@
|
||||||
|
package commands
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
|
||||||
|
"github.com/spf13/cobra"
|
||||||
|
)
|
||||||
|
|
||||||
|
func newCompletionCmd() (cmd *cobra.Command) {
|
||||||
|
cmd = &cobra.Command{
|
||||||
|
Use: "completion [bash|zsh|fish|powershell]",
|
||||||
|
Short: "Generate completion script",
|
||||||
|
Long: completionLong,
|
||||||
|
Args: cobra.ExactValidArgs(1),
|
||||||
|
ValidArgs: []string{"bash", "zsh", "fish", "powershell"},
|
||||||
|
DisableFlagsInUseLine: true,
|
||||||
|
Run: cmdCompletionRun,
|
||||||
|
}
|
||||||
|
|
||||||
|
return cmd
|
||||||
|
}
|
||||||
|
|
||||||
|
func cmdCompletionRun(cmd *cobra.Command, args []string) {
|
||||||
|
var err error
|
||||||
|
|
||||||
|
switch args[0] {
|
||||||
|
case "bash":
|
||||||
|
err = cmd.Root().GenBashCompletion(os.Stdout)
|
||||||
|
case "zsh":
|
||||||
|
err = cmd.Root().GenZshCompletion(os.Stdout)
|
||||||
|
case "fish":
|
||||||
|
err = cmd.Root().GenFishCompletion(os.Stdout, true)
|
||||||
|
case "powershell":
|
||||||
|
err = cmd.Root().GenPowerShellCompletionWithDesc(os.Stdout)
|
||||||
|
default:
|
||||||
|
fmt.Printf("Invalid shell provided for completion command: %s\n", args[0])
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
fmt.Printf("Error generating completion: %v\n", err)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
}
|
75
internal/commands/configuration.go
Normal file
75
internal/commands/configuration.go
Normal file
|
@ -0,0 +1,75 @@
|
||||||
|
package commands
|
||||||
|
|
||||||
|
import (
|
||||||
|
"os"
|
||||||
|
|
||||||
|
"github.com/spf13/cobra"
|
||||||
|
|
||||||
|
"github.com/authelia/authelia/internal/configuration"
|
||||||
|
"github.com/authelia/authelia/internal/configuration/schema"
|
||||||
|
"github.com/authelia/authelia/internal/configuration/validator"
|
||||||
|
"github.com/authelia/authelia/internal/logging"
|
||||||
|
)
|
||||||
|
|
||||||
|
// cmdWithConfigFlags is used for commands which require access to the configuration to add the flag to the command.
|
||||||
|
func cmdWithConfigFlags(cmd *cobra.Command) {
|
||||||
|
cmd.Flags().StringSliceP("config", "c", []string{}, "Configuration files")
|
||||||
|
}
|
||||||
|
|
||||||
|
var config *schema.Configuration
|
||||||
|
|
||||||
|
func newCmdWithConfigPreRun(ensureConfigExists, validateKeys, validateConfiguration bool) func(cmd *cobra.Command, args []string) {
|
||||||
|
return func(cmd *cobra.Command, _ []string) {
|
||||||
|
logger := logging.Logger()
|
||||||
|
|
||||||
|
configs, err := cmd.Root().Flags().GetStringSlice("config")
|
||||||
|
if err != nil {
|
||||||
|
logger.Fatalf("Error reading flags: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if ensureConfigExists && len(configs) == 1 {
|
||||||
|
created, err := configuration.EnsureConfigurationExists(configs[0])
|
||||||
|
if err != nil {
|
||||||
|
logger.Fatal(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if created {
|
||||||
|
logger.Warnf("Configuration did not exist so a default one has been generated at %s, you will need to configure this", configs[0])
|
||||||
|
os.Exit(0)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var keys []string
|
||||||
|
|
||||||
|
val := schema.NewStructValidator()
|
||||||
|
|
||||||
|
keys, config, err = configuration.Load(val, configuration.NewDefaultSources(configs, configuration.DefaultEnvPrefix, configuration.DefaultEnvDelimiter)...)
|
||||||
|
if err != nil {
|
||||||
|
logger.Fatalf("Error occurred loading configuration: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if validateKeys {
|
||||||
|
validator.ValidateKeys(keys, configuration.DefaultEnvPrefix, val)
|
||||||
|
}
|
||||||
|
|
||||||
|
if validateConfiguration {
|
||||||
|
validator.ValidateConfiguration(config, val)
|
||||||
|
}
|
||||||
|
|
||||||
|
warnings := val.Warnings()
|
||||||
|
if len(warnings) != 0 {
|
||||||
|
for _, warning := range warnings {
|
||||||
|
logger.Warnf("Configuration: %+v", warning)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
errs := val.Errors()
|
||||||
|
if len(errs) != 0 {
|
||||||
|
for _, err := range errs {
|
||||||
|
logger.Errorf("Configuration: %+v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.Fatalf("Can't continue due to the errors loading the configuration")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
77
internal/commands/const.go
Normal file
77
internal/commands/const.go
Normal file
|
@ -0,0 +1,77 @@
|
||||||
|
package commands
|
||||||
|
|
||||||
|
const cmdAutheliaExample = `authelia --config /etc/authelia/config.yml --config /etc/authelia/access-control.yml
|
||||||
|
authelia --config /etc/authelia/config.yml,/etc/authelia/access-control.yml
|
||||||
|
authelia --config /etc/authelia/config/
|
||||||
|
`
|
||||||
|
|
||||||
|
const fmtAutheliaLong = `authelia %s
|
||||||
|
|
||||||
|
An open-source authentication and authorization server providing
|
||||||
|
two-factor authentication and single sign-on (SSO) for your
|
||||||
|
applications via a web portal.
|
||||||
|
|
||||||
|
Documentation is available at: https://www.authelia.com/docs
|
||||||
|
`
|
||||||
|
|
||||||
|
const fmtAutheliaBuild = `Last Tag: %s
|
||||||
|
State: %s
|
||||||
|
Branch: %s
|
||||||
|
Commit: %s
|
||||||
|
Build Number: %s
|
||||||
|
Build OS: %s
|
||||||
|
Build Arch: %s
|
||||||
|
Build Date: %s
|
||||||
|
Extra: %s
|
||||||
|
`
|
||||||
|
|
||||||
|
const buildLong = `Show the build information of Authelia
|
||||||
|
|
||||||
|
This outputs detailed version information about the specific version
|
||||||
|
of the Authelia binary. This information is embedded into Authelia
|
||||||
|
by the continuous integration.
|
||||||
|
|
||||||
|
This could be vital in debugging if you're not using a particular
|
||||||
|
tagged build of Authelia. It's suggested to provide it along with
|
||||||
|
your issue.
|
||||||
|
`
|
||||||
|
|
||||||
|
const completionLong = `To load completions:
|
||||||
|
|
||||||
|
Bash:
|
||||||
|
|
||||||
|
$ source <(authelia completion bash)
|
||||||
|
|
||||||
|
# To load completions for each session, execute once:
|
||||||
|
# Linux:
|
||||||
|
$ authelia completion bash > /etc/bash_completion.d/authelia
|
||||||
|
# macOS:
|
||||||
|
$ authelia completion bash > /usr/local/etc/bash_completion.d/authelia
|
||||||
|
|
||||||
|
Zsh:
|
||||||
|
|
||||||
|
# If shell completion is not already enabled in your environment,
|
||||||
|
# you will need to enable it. You can execute the following once:
|
||||||
|
|
||||||
|
$ echo "autoload -U compinit; compinit" >> ~/.zshrc
|
||||||
|
|
||||||
|
# To load completions for each session, execute once:
|
||||||
|
$ authelia completion zsh > "${fpath[1]}/_authelia"
|
||||||
|
|
||||||
|
# You will need to start a new shell for this setup to take effect.
|
||||||
|
|
||||||
|
fish:
|
||||||
|
|
||||||
|
$ authelia completion fish | source
|
||||||
|
|
||||||
|
# To load completions for each session, execute once:
|
||||||
|
$ authelia completion fish > ~/.config/fish/completions/authelia.fish
|
||||||
|
|
||||||
|
PowerShell:
|
||||||
|
|
||||||
|
PS> authelia completion powershell | Out-String | Invoke-Expression
|
||||||
|
|
||||||
|
# To load completions for every new session, run:
|
||||||
|
PS> authelia completion powershell > authelia.ps1
|
||||||
|
# and source this file from your PowerShell profile.
|
||||||
|
`
|
|
@ -2,60 +2,67 @@ package commands
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"log"
|
|
||||||
|
|
||||||
"github.com/simia-tech/crypt"
|
"github.com/simia-tech/crypt"
|
||||||
"github.com/spf13/cobra"
|
"github.com/spf13/cobra"
|
||||||
|
|
||||||
"github.com/authelia/authelia/internal/authentication"
|
"github.com/authelia/authelia/internal/authentication"
|
||||||
"github.com/authelia/authelia/internal/configuration/schema"
|
"github.com/authelia/authelia/internal/configuration/schema"
|
||||||
|
"github.com/authelia/authelia/internal/logging"
|
||||||
)
|
)
|
||||||
|
|
||||||
func init() {
|
// NewHashPasswordCmd returns a new Hash Password Cmd.
|
||||||
HashPasswordCmd.Flags().BoolP("sha512", "z", false, fmt.Sprintf("use sha512 as the algorithm (changes iterations to %d, change with -i)", schema.DefaultPasswordSHA512Configuration.Iterations))
|
func NewHashPasswordCmd() (cmd *cobra.Command) {
|
||||||
HashPasswordCmd.Flags().IntP("iterations", "i", schema.DefaultPasswordConfiguration.Iterations, "set the number of hashing iterations")
|
cmd = &cobra.Command{
|
||||||
HashPasswordCmd.Flags().StringP("salt", "s", "", "set the salt string")
|
Use: "hash-password [password]",
|
||||||
HashPasswordCmd.Flags().IntP("memory", "m", schema.DefaultPasswordConfiguration.Memory, "[argon2id] set the amount of memory param (in MB)")
|
Short: "Hash a password to be used in file-based users database. Default algorithm is argon2id.",
|
||||||
HashPasswordCmd.Flags().IntP("parallelism", "p", schema.DefaultPasswordConfiguration.Parallelism, "[argon2id] set the parallelism param")
|
Args: cobra.MinimumNArgs(1),
|
||||||
HashPasswordCmd.Flags().IntP("key-length", "k", schema.DefaultPasswordConfiguration.KeyLength, "[argon2id] set the key length param")
|
Run: cmdHashPasswordRun,
|
||||||
HashPasswordCmd.Flags().IntP("salt-length", "l", schema.DefaultPasswordConfiguration.SaltLength, "set the auto-generated salt length")
|
}
|
||||||
|
|
||||||
|
cmd.Flags().BoolP("sha512", "z", false, fmt.Sprintf("use sha512 as the algorithm (changes iterations to %d, change with -i)", schema.DefaultPasswordSHA512Configuration.Iterations))
|
||||||
|
cmd.Flags().IntP("iterations", "i", schema.DefaultPasswordConfiguration.Iterations, "set the number of hashing iterations")
|
||||||
|
cmd.Flags().StringP("salt", "s", "", "set the salt string")
|
||||||
|
cmd.Flags().IntP("memory", "m", schema.DefaultPasswordConfiguration.Memory, "[argon2id] set the amount of memory param (in MB)")
|
||||||
|
cmd.Flags().IntP("parallelism", "p", schema.DefaultPasswordConfiguration.Parallelism, "[argon2id] set the parallelism param")
|
||||||
|
cmd.Flags().IntP("key-length", "k", schema.DefaultPasswordConfiguration.KeyLength, "[argon2id] set the key length param")
|
||||||
|
cmd.Flags().IntP("salt-length", "l", schema.DefaultPasswordConfiguration.SaltLength, "set the auto-generated salt length")
|
||||||
|
|
||||||
|
return cmd
|
||||||
}
|
}
|
||||||
|
|
||||||
// HashPasswordCmd password hashing command.
|
func cmdHashPasswordRun(cmd *cobra.Command, args []string) {
|
||||||
var HashPasswordCmd = &cobra.Command{
|
sha512, _ := cmd.Flags().GetBool("sha512")
|
||||||
Use: "hash-password [password]",
|
iterations, _ := cmd.Flags().GetInt("iterations")
|
||||||
Short: "Hash a password to be used in file-based users database. Default algorithm is argon2id.",
|
salt, _ := cmd.Flags().GetString("salt")
|
||||||
Run: func(cobraCmd *cobra.Command, args []string) {
|
keyLength, _ := cmd.Flags().GetInt("key-length")
|
||||||
sha512, _ := cobraCmd.Flags().GetBool("sha512")
|
saltLength, _ := cmd.Flags().GetInt("salt-length")
|
||||||
iterations, _ := cobraCmd.Flags().GetInt("iterations")
|
memory, _ := cmd.Flags().GetInt("memory")
|
||||||
salt, _ := cobraCmd.Flags().GetString("salt")
|
parallelism, _ := cmd.Flags().GetInt("parallelism")
|
||||||
keyLength, _ := cobraCmd.Flags().GetInt("key-length")
|
|
||||||
saltLength, _ := cobraCmd.Flags().GetInt("salt-length")
|
|
||||||
memory, _ := cobraCmd.Flags().GetInt("memory")
|
|
||||||
parallelism, _ := cobraCmd.Flags().GetInt("parallelism")
|
|
||||||
|
|
||||||
var err error
|
var (
|
||||||
var hash string
|
hash string
|
||||||
var algorithm authentication.CryptAlgo
|
algorithm authentication.CryptAlgo
|
||||||
|
)
|
||||||
|
|
||||||
if sha512 {
|
if sha512 {
|
||||||
if iterations == schema.DefaultPasswordConfiguration.Iterations {
|
if iterations == schema.DefaultPasswordConfiguration.Iterations {
|
||||||
iterations = schema.DefaultPasswordSHA512Configuration.Iterations
|
iterations = schema.DefaultPasswordSHA512Configuration.Iterations
|
||||||
}
|
|
||||||
algorithm = authentication.HashingAlgorithmSHA512
|
|
||||||
} else {
|
|
||||||
algorithm = authentication.HashingAlgorithmArgon2id
|
|
||||||
}
|
|
||||||
if salt != "" {
|
|
||||||
salt = crypt.Base64Encoding.EncodeToString([]byte(salt))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
hash, err = authentication.HashPassword(args[0], salt, algorithm, iterations, memory*1024, parallelism, keyLength, saltLength)
|
algorithm = authentication.HashingAlgorithmSHA512
|
||||||
if err != nil {
|
} else {
|
||||||
log.Fatalf("Error occurred during hashing: %s\n", err)
|
algorithm = authentication.HashingAlgorithmArgon2id
|
||||||
} else {
|
}
|
||||||
fmt.Printf("Password hash: %s\n", hash)
|
|
||||||
}
|
if salt != "" {
|
||||||
},
|
salt = crypt.Base64Encoding.EncodeToString([]byte(salt))
|
||||||
Args: cobra.MinimumNArgs(1),
|
}
|
||||||
|
|
||||||
|
hash, err := authentication.HashPassword(args[0], salt, algorithm, iterations, memory*1024, parallelism, keyLength, saltLength)
|
||||||
|
if err != nil {
|
||||||
|
logging.Logger().Fatalf("Error occurred during hashing: %v\n", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Printf("Password hash: %s\n", hash)
|
||||||
}
|
}
|
||||||
|
|
155
internal/commands/root.go
Normal file
155
internal/commands/root.go
Normal file
|
@ -0,0 +1,155 @@
|
||||||
|
package commands
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
|
||||||
|
"github.com/spf13/cobra"
|
||||||
|
|
||||||
|
"github.com/authelia/authelia/internal/authentication"
|
||||||
|
"github.com/authelia/authelia/internal/authorization"
|
||||||
|
"github.com/authelia/authelia/internal/configuration/schema"
|
||||||
|
"github.com/authelia/authelia/internal/logging"
|
||||||
|
"github.com/authelia/authelia/internal/middlewares"
|
||||||
|
"github.com/authelia/authelia/internal/notification"
|
||||||
|
"github.com/authelia/authelia/internal/oidc"
|
||||||
|
"github.com/authelia/authelia/internal/regulation"
|
||||||
|
"github.com/authelia/authelia/internal/server"
|
||||||
|
"github.com/authelia/authelia/internal/session"
|
||||||
|
"github.com/authelia/authelia/internal/storage"
|
||||||
|
"github.com/authelia/authelia/internal/utils"
|
||||||
|
)
|
||||||
|
|
||||||
|
// NewRootCmd returns a new Root Cmd.
|
||||||
|
func NewRootCmd() (cmd *cobra.Command) {
|
||||||
|
version := utils.Version()
|
||||||
|
|
||||||
|
cmd = &cobra.Command{
|
||||||
|
Use: "authelia",
|
||||||
|
Example: cmdAutheliaExample,
|
||||||
|
Short: fmt.Sprintf("authelia %s", version),
|
||||||
|
Long: fmt.Sprintf(fmtAutheliaLong, version),
|
||||||
|
Version: version,
|
||||||
|
Args: cobra.NoArgs,
|
||||||
|
PreRun: newCmdWithConfigPreRun(true, true, true),
|
||||||
|
Run: cmdRootRun,
|
||||||
|
}
|
||||||
|
|
||||||
|
cmdWithConfigFlags(cmd)
|
||||||
|
|
||||||
|
cmd.AddCommand(
|
||||||
|
newBuildInfoCmd(),
|
||||||
|
NewCertificatesCmd(),
|
||||||
|
newCompletionCmd(),
|
||||||
|
NewHashPasswordCmd(),
|
||||||
|
NewRSACmd(),
|
||||||
|
newValidateConfigCmd(),
|
||||||
|
)
|
||||||
|
|
||||||
|
return cmd
|
||||||
|
}
|
||||||
|
|
||||||
|
func cmdRootRun(_ *cobra.Command, _ []string) {
|
||||||
|
logger := logging.Logger()
|
||||||
|
|
||||||
|
logger.Infof("Authelia %s is starting", utils.Version())
|
||||||
|
|
||||||
|
if os.Getenv("ENVIRONMENT") == "dev" {
|
||||||
|
logger.Info("===> Authelia is running in development mode. <===")
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := logging.InitializeLogger(config.Log, true); err != nil {
|
||||||
|
logger.Fatalf("Cannot initialize logger: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
providers, warnings, errors := getProviders(config)
|
||||||
|
if len(warnings) != 0 {
|
||||||
|
for _, err := range warnings {
|
||||||
|
logger.Warn(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(errors) != 0 {
|
||||||
|
for _, err := range errors {
|
||||||
|
logger.Error(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.Fatalf("Errors occurred provisioning providers.")
|
||||||
|
}
|
||||||
|
|
||||||
|
server.Start(*config, providers)
|
||||||
|
}
|
||||||
|
|
||||||
|
func getProviders(config *schema.Configuration) (providers middlewares.Providers, warnings []error, errors []error) {
|
||||||
|
autheliaCertPool, warnings, errors := utils.NewX509CertPool(config.CertificatesDirectory)
|
||||||
|
if len(warnings) != 0 || len(errors) != 0 {
|
||||||
|
return providers, warnings, errors
|
||||||
|
}
|
||||||
|
|
||||||
|
var storageProvider storage.Provider
|
||||||
|
|
||||||
|
switch {
|
||||||
|
case config.Storage.PostgreSQL != nil:
|
||||||
|
storageProvider = storage.NewPostgreSQLProvider(*config.Storage.PostgreSQL)
|
||||||
|
case config.Storage.MySQL != nil:
|
||||||
|
storageProvider = storage.NewMySQLProvider(*config.Storage.MySQL)
|
||||||
|
case config.Storage.Local != nil:
|
||||||
|
storageProvider = storage.NewSQLiteProvider(config.Storage.Local.Path)
|
||||||
|
default:
|
||||||
|
errors = append(errors, fmt.Errorf("unrecognized storage provider"))
|
||||||
|
}
|
||||||
|
|
||||||
|
var (
|
||||||
|
userProvider authentication.UserProvider
|
||||||
|
err error
|
||||||
|
)
|
||||||
|
|
||||||
|
switch {
|
||||||
|
case config.AuthenticationBackend.File != nil:
|
||||||
|
userProvider = authentication.NewFileUserProvider(config.AuthenticationBackend.File)
|
||||||
|
case config.AuthenticationBackend.LDAP != nil:
|
||||||
|
userProvider, err = authentication.NewLDAPUserProvider(config.AuthenticationBackend, autheliaCertPool)
|
||||||
|
if err != nil {
|
||||||
|
errors = append(errors, fmt.Errorf("failed to check LDAP authentication backend: %w", err))
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
errors = append(errors, fmt.Errorf("unrecognized user provider"))
|
||||||
|
}
|
||||||
|
|
||||||
|
var notifier notification.Notifier
|
||||||
|
|
||||||
|
switch {
|
||||||
|
case config.Notifier.SMTP != nil:
|
||||||
|
notifier = notification.NewSMTPNotifier(*config.Notifier.SMTP, autheliaCertPool)
|
||||||
|
case config.Notifier.FileSystem != nil:
|
||||||
|
notifier = notification.NewFileNotifier(*config.Notifier.FileSystem)
|
||||||
|
default:
|
||||||
|
errors = append(errors, fmt.Errorf("unrecognized notifier provider"))
|
||||||
|
}
|
||||||
|
|
||||||
|
if notifier != nil {
|
||||||
|
if _, err := notifier.StartupCheck(); err != nil {
|
||||||
|
errors = append(errors, fmt.Errorf("failed to check notification provider: %w", err))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
clock := utils.RealClock{}
|
||||||
|
authorizer := authorization.NewAuthorizer(config)
|
||||||
|
sessionProvider := session.NewProvider(config.Session, autheliaCertPool)
|
||||||
|
regulator := regulation.NewRegulator(config.Regulation, storageProvider, clock)
|
||||||
|
|
||||||
|
oidcProvider, err := oidc.NewOpenIDConnectProvider(config.IdentityProviders.OIDC)
|
||||||
|
if err != nil {
|
||||||
|
errors = append(errors, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return middlewares.Providers{
|
||||||
|
Authorizer: authorizer,
|
||||||
|
UserProvider: userProvider,
|
||||||
|
Regulator: regulator,
|
||||||
|
OpenIDConnect: oidcProvider,
|
||||||
|
StorageProvider: storageProvider,
|
||||||
|
Notifier: notifier,
|
||||||
|
SessionProvider: sessionProvider,
|
||||||
|
}, warnings, errors
|
||||||
|
}
|
|
@ -1,79 +1,106 @@
|
||||||
package commands
|
package commands
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"log"
|
"fmt"
|
||||||
"os"
|
"os"
|
||||||
"path"
|
"path/filepath"
|
||||||
|
|
||||||
"github.com/spf13/cobra"
|
"github.com/spf13/cobra"
|
||||||
|
|
||||||
"github.com/authelia/authelia/internal/utils"
|
"github.com/authelia/authelia/internal/utils"
|
||||||
)
|
)
|
||||||
|
|
||||||
var rsaTargetDirectory string
|
// NewRSACmd returns a new RSA Cmd.
|
||||||
|
func NewRSACmd() (cmd *cobra.Command) {
|
||||||
|
cmd = &cobra.Command{
|
||||||
|
Use: "rsa",
|
||||||
|
Short: "Commands related to rsa keypair generation",
|
||||||
|
Args: cobra.NoArgs,
|
||||||
|
}
|
||||||
|
|
||||||
func init() {
|
cmd.AddCommand(newRSAGenerateCmd())
|
||||||
RSAGenerateCmd.PersistentFlags().StringVar(&rsaTargetDirectory, "dir", "", "Target directory where the keypair will be stored")
|
|
||||||
|
|
||||||
RSACmd.AddCommand(RSAGenerateCmd)
|
return cmd
|
||||||
}
|
}
|
||||||
|
|
||||||
func generateRSAKeypair(cmd *cobra.Command, args []string) {
|
func newRSAGenerateCmd() (cmd *cobra.Command) {
|
||||||
privateKey, publicKey := utils.GenerateRsaKeyPair(2048)
|
cmd = &cobra.Command{
|
||||||
|
Use: "generate",
|
||||||
|
Short: "Generate a RSA keypair",
|
||||||
|
Args: cobra.NoArgs,
|
||||||
|
Run: cmdRSAGenerateRun,
|
||||||
|
}
|
||||||
|
|
||||||
keyPath := path.Join(rsaTargetDirectory, "key.pem")
|
cmd.Flags().StringP("dir", "d", "", "Target directory where the keypair will be stored")
|
||||||
keyOut, err := os.OpenFile(keyPath, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0600)
|
cmd.Flags().IntP("key-size", "b", 2048, "Sets the key size in bits")
|
||||||
|
|
||||||
|
return cmd
|
||||||
|
}
|
||||||
|
|
||||||
|
func cmdRSAGenerateRun(cmd *cobra.Command, _ []string) {
|
||||||
|
bits, err := cmd.Flags().GetInt("key-size")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Fatalf("Failed to open %s for writing: %v", keyPath, err)
|
fmt.Printf("Failed to parse key-size flag: %v\n", err)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
privateKey, publicKey := utils.GenerateRsaKeyPair(bits)
|
||||||
|
|
||||||
|
rsaTargetDirectory, err := cmd.Flags().GetString("dir")
|
||||||
|
if err != nil {
|
||||||
|
fmt.Printf("Failed to parse dir flag: %v\n", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
keyPath := filepath.Join(rsaTargetDirectory, "key.pem")
|
||||||
|
|
||||||
|
keyOut, err := os.OpenFile(keyPath, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0600)
|
||||||
|
if err != nil {
|
||||||
|
fmt.Printf("Failed to open %s for writing: %v\n", keyPath, err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
defer func() {
|
||||||
|
if err := keyOut.Close(); err != nil {
|
||||||
|
fmt.Printf("Unable to close private key file: %v\n", err)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
|
||||||
_, err = keyOut.WriteString(utils.ExportRsaPrivateKeyAsPemStr(privateKey))
|
_, err = keyOut.WriteString(utils.ExportRsaPrivateKeyAsPemStr(privateKey))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Fatalf("Unable to write private key: %v", err)
|
fmt.Printf("Failed to write private key: %v\n", err)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := keyOut.Close(); err != nil {
|
fmt.Printf("RSA Private Key written to %s\n", keyPath)
|
||||||
log.Fatalf("Unable to close private key file: %v", err)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
keyPath = path.Join(rsaTargetDirectory, "key.pub")
|
certPath := filepath.Join(rsaTargetDirectory, "key.pub")
|
||||||
keyOut, err = os.OpenFile(keyPath, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0600)
|
|
||||||
|
|
||||||
|
certOut, err := os.OpenFile(certPath, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0600)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Fatalf("Failed to open %s for writing: %v", keyPath, err)
|
fmt.Printf("Failed to open %s for writing: %v\n", keyPath, err)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
defer func() {
|
||||||
|
if err := certOut.Close(); err != nil {
|
||||||
|
fmt.Printf("Failed to close public key file: %v\n", err)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
|
||||||
publicPem, err := utils.ExportRsaPublicKeyAsPemStr(publicKey)
|
publicPem, err := utils.ExportRsaPublicKeyAsPemStr(publicKey)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Fatalf("Unable to marshal public key: %v", err)
|
fmt.Printf("Failed to marshal public key: %v\n", err)
|
||||||
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
_, err = keyOut.WriteString(publicPem)
|
_, err = certOut.WriteString(publicPem)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Fatalf("Unable to write private key: %v", err)
|
fmt.Printf("Failed to write private key: %v\n", err)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := keyOut.Close(); err != nil {
|
fmt.Printf("RSA Public Key written to %s\n", certPath)
|
||||||
log.Fatalf("Unable to close public key file: %v", err)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// RSACmd RSA helper command.
|
|
||||||
var RSACmd = &cobra.Command{
|
|
||||||
Use: "rsa",
|
|
||||||
Short: "Commands related to rsa keypair generation",
|
|
||||||
}
|
|
||||||
|
|
||||||
// RSAGenerateCmd certificate generation command.
|
|
||||||
var RSAGenerateCmd = &cobra.Command{
|
|
||||||
Use: "generate",
|
|
||||||
Short: "Generate a RSA keypair",
|
|
||||||
Run: generateRSAKeypair,
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,40 +1,66 @@
|
||||||
package commands
|
package commands
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
|
||||||
"log"
|
"log"
|
||||||
"os"
|
"os"
|
||||||
|
|
||||||
"github.com/spf13/cobra"
|
"github.com/spf13/cobra"
|
||||||
|
|
||||||
"github.com/authelia/authelia/internal/configuration"
|
"github.com/authelia/authelia/internal/configuration"
|
||||||
|
"github.com/authelia/authelia/internal/configuration/schema"
|
||||||
|
"github.com/authelia/authelia/internal/configuration/validator"
|
||||||
|
"github.com/authelia/authelia/internal/logging"
|
||||||
)
|
)
|
||||||
|
|
||||||
// ValidateConfigCmd uses the internal configuration reader to validate the configuration.
|
func newValidateConfigCmd() (cmd *cobra.Command) {
|
||||||
var ValidateConfigCmd = &cobra.Command{
|
cmd = &cobra.Command{
|
||||||
Use: "validate-config [yaml]",
|
Use: "validate-config [yaml]",
|
||||||
Short: "Check a configuration against the internal configuration validation mechanisms.",
|
Short: "Check a configuration against the internal configuration validation mechanisms",
|
||||||
Run: func(cobraCmd *cobra.Command, args []string) {
|
Args: cobra.MinimumNArgs(1),
|
||||||
configPath := args[0]
|
Run: cmdValidateConfigRun,
|
||||||
if _, err := os.Stat(configPath); err != nil {
|
}
|
||||||
log.Fatalf("Error Loading Configuration: %s\n", err)
|
|
||||||
|
return cmd
|
||||||
|
}
|
||||||
|
|
||||||
|
func cmdValidateConfigRun(_ *cobra.Command, args []string) {
|
||||||
|
logger := logging.Logger()
|
||||||
|
|
||||||
|
configPath := args[0]
|
||||||
|
if _, err := os.Stat(configPath); err != nil {
|
||||||
|
logger.Fatalf("Error Loading Configuration: %v\n", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
val := schema.NewStructValidator()
|
||||||
|
|
||||||
|
keys, conf, err := configuration.Load(val, configuration.NewYAMLFileSource(configPath))
|
||||||
|
if err != nil {
|
||||||
|
logger.Fatalf("Error occurred loading configuration: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
validator.ValidateKeys(keys, configuration.DefaultEnvPrefix, val)
|
||||||
|
validator.ValidateConfiguration(conf, val)
|
||||||
|
|
||||||
|
warnings := val.Warnings()
|
||||||
|
errors := val.Errors()
|
||||||
|
|
||||||
|
if len(warnings) != 0 {
|
||||||
|
logger.Warn("Warnings occurred while loading the configuration:")
|
||||||
|
|
||||||
|
for _, warn := range warnings {
|
||||||
|
logger.Warnf(" %+v", warn)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(errors) != 0 {
|
||||||
|
logger.Error("Errors occurred while loading the configuration:")
|
||||||
|
|
||||||
|
for _, err := range errors {
|
||||||
|
logger.Errorf(" %+v", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO: Actually use the configuration to validate some providers like Notifier
|
logger.Fatal("Can't continue due to errors")
|
||||||
_, errs := configuration.Read(configPath)
|
}
|
||||||
if len(errs) != 0 {
|
|
||||||
str := "Errors"
|
log.Println("Configuration parsed successfully without errors.")
|
||||||
if len(errs) == 1 {
|
|
||||||
str = "Error"
|
|
||||||
}
|
|
||||||
errors := ""
|
|
||||||
for _, err := range errs {
|
|
||||||
errors += fmt.Sprintf("\t%s\n", err.Error())
|
|
||||||
}
|
|
||||||
log.Fatalf("%s occurred parsing configuration:\n%s", str, errors)
|
|
||||||
} else {
|
|
||||||
log.Println("Configuration parsed successfully without errors.")
|
|
||||||
}
|
|
||||||
},
|
|
||||||
Args: cobra.MinimumNArgs(1),
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -29,21 +29,16 @@ default_redirection_url: https://home.example.com/
|
||||||
## Server Configuration
|
## Server Configuration
|
||||||
##
|
##
|
||||||
server:
|
server:
|
||||||
|
|
||||||
## The address to listen on.
|
## The address to listen on.
|
||||||
host: 0.0.0.0
|
host: 0.0.0.0
|
||||||
|
|
||||||
## The port to listen on.
|
## The port to listen on.
|
||||||
port: 9091
|
port: 9091
|
||||||
|
|
||||||
## Authelia by default doesn't accept TLS communication on the server port. This section overrides this behaviour.
|
## Set the single level path Authelia listens on.
|
||||||
tls:
|
## Must be alphanumeric chars and should not contain any slashes.
|
||||||
## The path to the DER base64/PEM format private key.
|
path: ""
|
||||||
key: ""
|
|
||||||
# key: /config/ssl/key.pem
|
|
||||||
|
|
||||||
## The path to the DER base64/PEM format public certificate.
|
|
||||||
certificate: ""
|
|
||||||
# certificate: /config/ssl/cert.pem
|
|
||||||
|
|
||||||
## Buffers usually should be configured to be the same value.
|
## Buffers usually should be configured to be the same value.
|
||||||
## Explanation at https://www.authelia.com/docs/configuration/server.html
|
## Explanation at https://www.authelia.com/docs/configuration/server.html
|
||||||
|
@ -52,16 +47,23 @@ server:
|
||||||
read_buffer_size: 4096
|
read_buffer_size: 4096
|
||||||
write_buffer_size: 4096
|
write_buffer_size: 4096
|
||||||
|
|
||||||
## Set the single level path Authelia listens on.
|
|
||||||
## Must be alphanumeric chars and should not contain any slashes.
|
|
||||||
path: ""
|
|
||||||
|
|
||||||
## Enables the pprof endpoint.
|
## Enables the pprof endpoint.
|
||||||
enable_pprof: false
|
enable_pprof: false
|
||||||
|
|
||||||
## Enables the expvars endpoint.
|
## Enables the expvars endpoint.
|
||||||
enable_expvars: false
|
enable_expvars: false
|
||||||
|
|
||||||
|
## Authelia by default doesn't accept TLS communication on the server port. This section overrides this behaviour.
|
||||||
|
tls:
|
||||||
|
## The path to the DER base64/PEM format private key.
|
||||||
|
key: ""
|
||||||
|
|
||||||
|
## The path to the DER base64/PEM format public certificate.
|
||||||
|
certificate: ""
|
||||||
|
|
||||||
|
##
|
||||||
|
## Log Configuration
|
||||||
|
##
|
||||||
log:
|
log:
|
||||||
## Level of verbosity for logs: info, debug, trace.
|
## Level of verbosity for logs: info, debug, trace.
|
||||||
level: debug
|
level: debug
|
||||||
|
|
19
internal/configuration/configuration_test.go
Normal file
19
internal/configuration/configuration_test.go
Normal file
|
@ -0,0 +1,19 @@
|
||||||
|
package configuration
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
|
||||||
|
"github.com/authelia/authelia/internal/utils"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestShouldHaveSameChecksumForBothTemplates(t *testing.T) {
|
||||||
|
sumRoot, err := utils.HashSHA256FromPath("../../config.template.yml")
|
||||||
|
assert.NoError(t, err)
|
||||||
|
|
||||||
|
sumInternal, err := utils.HashSHA256FromPath("./config.template.yml")
|
||||||
|
assert.NoError(t, err)
|
||||||
|
|
||||||
|
assert.Equal(t, sumRoot, sumInternal, "Ensure both ./config.template.yml and ./internal/configuration/config.template.yml are exactly the same.")
|
||||||
|
}
|
|
@ -1,3 +1,30 @@
|
||||||
package configuration
|
package configuration
|
||||||
|
|
||||||
const windows = "windows"
|
import (
|
||||||
|
"errors"
|
||||||
|
)
|
||||||
|
|
||||||
|
// DefaultEnvPrefix is the default environment prefix.
|
||||||
|
const DefaultEnvPrefix = "AUTHELIA_"
|
||||||
|
|
||||||
|
// DefaultEnvDelimiter is the default environment delimiter.
|
||||||
|
const DefaultEnvDelimiter = "_"
|
||||||
|
|
||||||
|
const (
|
||||||
|
constSecretSuffix = "_FILE"
|
||||||
|
|
||||||
|
constDelimiter = "."
|
||||||
|
|
||||||
|
constWindows = "windows"
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
errFmtSecretAlreadyDefined = "secrets: error loading secret into key '%s': it's already defined in other " +
|
||||||
|
"configuration sources"
|
||||||
|
errFmtSecretIOIssue = "secrets: error loading secret path %s into key '%s': %v"
|
||||||
|
errFmtGenerateConfiguration = "error occurred generating configuration: %+v"
|
||||||
|
)
|
||||||
|
|
||||||
|
var secretSuffixes = []string{"key", "secret", "password", "token"}
|
||||||
|
var errNoSources = errors.New("no sources provided")
|
||||||
|
var errNoValidator = errors.New("no validator provided")
|
||||||
|
|
55
internal/configuration/helpers.go
Normal file
55
internal/configuration/helpers.go
Normal file
|
@ -0,0 +1,55 @@
|
||||||
|
package configuration
|
||||||
|
|
||||||
|
import (
|
||||||
|
"io/ioutil"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/authelia/authelia/internal/utils"
|
||||||
|
)
|
||||||
|
|
||||||
|
func getEnvConfigMap(keys []string, prefix, delimiter string) (keyMap map[string]string, ignoredKeys []string) {
|
||||||
|
keyMap = make(map[string]string)
|
||||||
|
|
||||||
|
for _, key := range keys {
|
||||||
|
if strings.Contains(key, delimiter) {
|
||||||
|
originalKey := prefix + strings.ToUpper(strings.ReplaceAll(key, constDelimiter, delimiter))
|
||||||
|
keyMap[originalKey] = key
|
||||||
|
}
|
||||||
|
|
||||||
|
// Secret envs should be ignored by the env parser.
|
||||||
|
if isSecretKey(key) {
|
||||||
|
originalKey := strings.ToUpper(strings.ReplaceAll(key, constDelimiter, delimiter)) + constSecretSuffix
|
||||||
|
|
||||||
|
ignoredKeys = append(ignoredKeys, prefix+originalKey)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return keyMap, ignoredKeys
|
||||||
|
}
|
||||||
|
|
||||||
|
func getSecretConfigMap(keys []string, prefix, delimiter string) (keyMap map[string]string) {
|
||||||
|
keyMap = make(map[string]string)
|
||||||
|
|
||||||
|
for _, key := range keys {
|
||||||
|
if isSecretKey(key) {
|
||||||
|
originalKey := strings.ToUpper(strings.ReplaceAll(key, constDelimiter, delimiter)) + constSecretSuffix
|
||||||
|
|
||||||
|
keyMap[prefix+originalKey] = key
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return keyMap
|
||||||
|
}
|
||||||
|
|
||||||
|
func isSecretKey(key string) (isSecretKey bool) {
|
||||||
|
return utils.IsStringInSliceSuffix(key, secretSuffixes)
|
||||||
|
}
|
||||||
|
|
||||||
|
func loadSecret(path string) (value string, err error) {
|
||||||
|
content, err := ioutil.ReadFile(path)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
|
||||||
|
return strings.TrimRight(string(content), "\n"), err
|
||||||
|
}
|
85
internal/configuration/helpers_test.go
Normal file
85
internal/configuration/helpers_test.go
Normal file
|
@ -0,0 +1,85 @@
|
||||||
|
package configuration
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestIsSecretKey(t *testing.T) {
|
||||||
|
assert.True(t, isSecretKey("my_fake_token"))
|
||||||
|
assert.False(t, isSecretKey("my_fake_tokenz"))
|
||||||
|
assert.True(t, isSecretKey("my_.fake.secret"))
|
||||||
|
assert.True(t, isSecretKey("my.password"))
|
||||||
|
assert.False(t, isSecretKey("my.passwords"))
|
||||||
|
assert.False(t, isSecretKey("my.passwords"))
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestGetEnvConfigMaps(t *testing.T) {
|
||||||
|
var (
|
||||||
|
key string
|
||||||
|
ok bool
|
||||||
|
)
|
||||||
|
|
||||||
|
input := []string{
|
||||||
|
"my.non_secret.config_item",
|
||||||
|
"myother.configkey",
|
||||||
|
"mysecret.password",
|
||||||
|
"mysecret.user_password",
|
||||||
|
}
|
||||||
|
|
||||||
|
keys, ignoredKeys := getEnvConfigMap(input, DefaultEnvPrefix, DefaultEnvDelimiter)
|
||||||
|
|
||||||
|
key, ok = keys[DefaultEnvPrefix+"MY_NON_SECRET_CONFIG_ITEM"]
|
||||||
|
assert.True(t, ok)
|
||||||
|
assert.Equal(t, key, "my.non_secret.config_item")
|
||||||
|
|
||||||
|
key, ok = keys[DefaultEnvPrefix+"MYSECRET_USER_PASSWORD"]
|
||||||
|
assert.True(t, ok)
|
||||||
|
assert.Equal(t, key, "mysecret.user_password")
|
||||||
|
|
||||||
|
key, ok = keys[DefaultEnvPrefix+"MYOTHER_CONFIGKEY"]
|
||||||
|
assert.False(t, ok)
|
||||||
|
assert.Equal(t, key, "")
|
||||||
|
|
||||||
|
key, ok = keys[DefaultEnvPrefix+"MYSECRET_PASSWORD"]
|
||||||
|
assert.False(t, ok)
|
||||||
|
assert.Equal(t, key, "")
|
||||||
|
|
||||||
|
assert.Len(t, ignoredKeys, 3)
|
||||||
|
assert.Contains(t, ignoredKeys, DefaultEnvPrefix+"MYOTHER_CONFIGKEY_FILE")
|
||||||
|
assert.Contains(t, ignoredKeys, DefaultEnvPrefix+"MYSECRET_PASSWORD_FILE")
|
||||||
|
assert.Contains(t, ignoredKeys, DefaultEnvPrefix+"MYSECRET_USER_PASSWORD_FILE")
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestGetSecretConfigMap(t *testing.T) {
|
||||||
|
var (
|
||||||
|
key string
|
||||||
|
ok bool
|
||||||
|
)
|
||||||
|
|
||||||
|
input := []string{
|
||||||
|
"my.non_secret.config_item",
|
||||||
|
"myother.configkey",
|
||||||
|
"mysecret.password",
|
||||||
|
"mysecret.user_password",
|
||||||
|
}
|
||||||
|
|
||||||
|
keys := getSecretConfigMap(input, DefaultEnvPrefix, DefaultEnvDelimiter)
|
||||||
|
|
||||||
|
key, ok = keys[DefaultEnvPrefix+"MY_NON_SECRET_CONFIG_ITEM_FILE"]
|
||||||
|
assert.False(t, ok)
|
||||||
|
assert.Equal(t, key, "")
|
||||||
|
|
||||||
|
key, ok = keys[DefaultEnvPrefix+"MYOTHER_CONFIGKEY_FILE"]
|
||||||
|
assert.True(t, ok)
|
||||||
|
assert.Equal(t, key, "myother.configkey")
|
||||||
|
|
||||||
|
key, ok = keys[DefaultEnvPrefix+"MYSECRET_PASSWORD_FILE"]
|
||||||
|
assert.True(t, ok)
|
||||||
|
assert.Equal(t, key, "mysecret.password")
|
||||||
|
|
||||||
|
key, ok = keys[DefaultEnvPrefix+"MYSECRET_USER_PASSWORD_FILE"]
|
||||||
|
assert.True(t, ok)
|
||||||
|
assert.Equal(t, key, "mysecret.user_password")
|
||||||
|
}
|
50
internal/configuration/koanf_callbacks.go
Normal file
50
internal/configuration/koanf_callbacks.go
Normal file
|
@ -0,0 +1,50 @@
|
||||||
|
package configuration
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/authelia/authelia/internal/configuration/schema"
|
||||||
|
"github.com/authelia/authelia/internal/configuration/validator"
|
||||||
|
"github.com/authelia/authelia/internal/utils"
|
||||||
|
)
|
||||||
|
|
||||||
|
// koanfEnvironmentCallback returns a koanf callback to map the environment vars to Configuration keys.
|
||||||
|
func koanfEnvironmentCallback(keyMap map[string]string, ignoredKeys []string, prefix, delimiter string) func(key, value string) (finalKey string, finalValue interface{}) {
|
||||||
|
return func(key, value string) (finalKey string, finalValue interface{}) {
|
||||||
|
if k, ok := keyMap[key]; ok {
|
||||||
|
return k, value
|
||||||
|
}
|
||||||
|
|
||||||
|
if utils.IsStringInSlice(key, ignoredKeys) {
|
||||||
|
return "", nil
|
||||||
|
}
|
||||||
|
|
||||||
|
formattedKey := strings.TrimPrefix(key, prefix)
|
||||||
|
formattedKey = strings.ReplaceAll(strings.ToLower(formattedKey), delimiter, constDelimiter)
|
||||||
|
|
||||||
|
if utils.IsStringInSlice(formattedKey, validator.ValidKeys) {
|
||||||
|
return formattedKey, value
|
||||||
|
}
|
||||||
|
|
||||||
|
return key, value
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// koanfEnvironmentSecretsCallback returns a koanf callback to map the environment vars to Configuration keys.
|
||||||
|
func koanfEnvironmentSecretsCallback(keyMap map[string]string, validator *schema.StructValidator) func(key, value string) (finalKey string, finalValue interface{}) {
|
||||||
|
return func(key, value string) (finalKey string, finalValue interface{}) {
|
||||||
|
k, ok := keyMap[key]
|
||||||
|
if !ok {
|
||||||
|
return "", nil
|
||||||
|
}
|
||||||
|
|
||||||
|
v, err := loadSecret(value)
|
||||||
|
if err != nil {
|
||||||
|
validator.Push(fmt.Errorf(errFmtSecretIOIssue, value, k, err))
|
||||||
|
return k, ""
|
||||||
|
}
|
||||||
|
|
||||||
|
return k, v
|
||||||
|
}
|
||||||
|
}
|
129
internal/configuration/koanf_callbacks_test.go
Normal file
129
internal/configuration/koanf_callbacks_test.go
Normal file
|
@ -0,0 +1,129 @@
|
||||||
|
package configuration
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"io/ioutil"
|
||||||
|
"path/filepath"
|
||||||
|
"runtime"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
|
||||||
|
"github.com/authelia/authelia/internal/configuration/schema"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestKoanfEnvironmentCallback(t *testing.T) {
|
||||||
|
var (
|
||||||
|
key string
|
||||||
|
value interface{}
|
||||||
|
)
|
||||||
|
|
||||||
|
keyMap := map[string]string{
|
||||||
|
DefaultEnvPrefix + "KEY_EXAMPLE_UNDERSCORE": "key.example_underscore",
|
||||||
|
}
|
||||||
|
ignoredKeys := []string{DefaultEnvPrefix + "SOME_SECRET"}
|
||||||
|
|
||||||
|
callback := koanfEnvironmentCallback(keyMap, ignoredKeys, DefaultEnvPrefix, DefaultEnvDelimiter)
|
||||||
|
|
||||||
|
key, value = callback(DefaultEnvPrefix+"KEY_EXAMPLE_UNDERSCORE", "value")
|
||||||
|
assert.Equal(t, "key.example_underscore", key)
|
||||||
|
assert.Equal(t, "value", value)
|
||||||
|
|
||||||
|
key, value = callback(DefaultEnvPrefix+"KEY_EXAMPLE", "value")
|
||||||
|
assert.Equal(t, DefaultEnvPrefix+"KEY_EXAMPLE", key)
|
||||||
|
assert.Equal(t, "value", value)
|
||||||
|
|
||||||
|
key, value = callback(DefaultEnvPrefix+"THEME", "value")
|
||||||
|
assert.Equal(t, "theme", key)
|
||||||
|
assert.Equal(t, "value", value)
|
||||||
|
|
||||||
|
key, value = callback(DefaultEnvPrefix+"SOME_SECRET", "value")
|
||||||
|
assert.Equal(t, "", key)
|
||||||
|
assert.Nil(t, value)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestKoanfSecretCallbackWithValidSecrets(t *testing.T) {
|
||||||
|
var (
|
||||||
|
key string
|
||||||
|
value interface{}
|
||||||
|
)
|
||||||
|
|
||||||
|
keyMap := map[string]string{
|
||||||
|
"AUTHELIA__JWT_SECRET": "jwt_secret",
|
||||||
|
"AUTHELIA_JWT_SECRET": "jwt_secret",
|
||||||
|
"AUTHELIA_FAKE_KEY": "fake_key",
|
||||||
|
"AUTHELIA__FAKE_KEY": "fake_key",
|
||||||
|
"AUTHELIA_STORAGE_MYSQL_FAKE_PASSWORD": "storage.mysql.fake_password",
|
||||||
|
"AUTHELIA__STORAGE_MYSQL_FAKE_PASSWORD": "storage.mysql.fake_password",
|
||||||
|
}
|
||||||
|
|
||||||
|
dir, err := ioutil.TempDir("", "authelia-test-callbacks")
|
||||||
|
assert.NoError(t, err)
|
||||||
|
|
||||||
|
secretOne := filepath.Join(dir, "secert_one")
|
||||||
|
secretTwo := filepath.Join(dir, "secret_two")
|
||||||
|
|
||||||
|
assert.NoError(t, testCreateFile(secretOne, "value one", 0600))
|
||||||
|
assert.NoError(t, testCreateFile(secretTwo, "value two", 0600))
|
||||||
|
|
||||||
|
val := schema.NewStructValidator()
|
||||||
|
|
||||||
|
callback := koanfEnvironmentSecretsCallback(keyMap, val)
|
||||||
|
|
||||||
|
key, value = callback("AUTHELIA_FAKE_KEY", secretOne)
|
||||||
|
assert.Equal(t, "fake_key", key)
|
||||||
|
assert.Equal(t, "value one", value)
|
||||||
|
|
||||||
|
key, value = callback("AUTHELIA__STORAGE_MYSQL_FAKE_PASSWORD", secretTwo)
|
||||||
|
assert.Equal(t, "storage.mysql.fake_password", key)
|
||||||
|
assert.Equal(t, "value two", value)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestKoanfSecretCallbackShouldIgnoreUndetectedSecrets(t *testing.T) {
|
||||||
|
keyMap := map[string]string{
|
||||||
|
"AUTHELIA__JWT_SECRET": "jwt_secret",
|
||||||
|
"AUTHELIA_JWT_SECRET": "jwt_secret",
|
||||||
|
}
|
||||||
|
|
||||||
|
val := schema.NewStructValidator()
|
||||||
|
|
||||||
|
callback := koanfEnvironmentSecretsCallback(keyMap, val)
|
||||||
|
|
||||||
|
key, value := callback("AUTHELIA__SESSION_DOMAIN", "/tmp/not-a-path")
|
||||||
|
assert.Equal(t, "", key)
|
||||||
|
assert.Nil(t, value)
|
||||||
|
|
||||||
|
assert.Len(t, val.Errors(), 0)
|
||||||
|
assert.Len(t, val.Warnings(), 0)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestKoanfSecretCallbackShouldErrorOnFSError(t *testing.T) {
|
||||||
|
if runtime.GOOS == constWindows {
|
||||||
|
t.Skip("skipping test due to being on windows")
|
||||||
|
}
|
||||||
|
|
||||||
|
keyMap := map[string]string{
|
||||||
|
"AUTHELIA__THEME": "theme",
|
||||||
|
"AUTHELIA_THEME": "theme",
|
||||||
|
}
|
||||||
|
|
||||||
|
dir, err := ioutil.TempDir("", "authelia-test-callbacks")
|
||||||
|
assert.NoError(t, err)
|
||||||
|
|
||||||
|
secret := filepath.Join(dir, "inaccessible")
|
||||||
|
|
||||||
|
assert.NoError(t, testCreateFile(secret, "secret", 0000))
|
||||||
|
|
||||||
|
val := schema.NewStructValidator()
|
||||||
|
|
||||||
|
callback := koanfEnvironmentSecretsCallback(keyMap, val)
|
||||||
|
|
||||||
|
key, value := callback("AUTHELIA_THEME", secret)
|
||||||
|
assert.Equal(t, "theme", key)
|
||||||
|
assert.Equal(t, "", value)
|
||||||
|
|
||||||
|
require.Len(t, val.Errors(), 1)
|
||||||
|
assert.Len(t, val.Warnings(), 0)
|
||||||
|
assert.EqualError(t, val.Errors()[0], fmt.Sprintf(errFmtSecretIOIssue, secret, "theme", fmt.Sprintf("open %s: permission denied", secret)))
|
||||||
|
}
|
75
internal/configuration/provider.go
Normal file
75
internal/configuration/provider.go
Normal file
|
@ -0,0 +1,75 @@
|
||||||
|
package configuration
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
|
||||||
|
"github.com/knadh/koanf"
|
||||||
|
"github.com/mitchellh/mapstructure"
|
||||||
|
|
||||||
|
"github.com/authelia/authelia/internal/configuration/schema"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Load the configuration given the provided options and sources.
|
||||||
|
func Load(val *schema.StructValidator, sources ...Source) (keys []string, configuration *schema.Configuration, err error) {
|
||||||
|
if val == nil {
|
||||||
|
return keys, configuration, errNoValidator
|
||||||
|
}
|
||||||
|
|
||||||
|
ko := koanf.NewWithConf(koanf.Conf{
|
||||||
|
Delim: constDelimiter,
|
||||||
|
StrictMerge: false,
|
||||||
|
})
|
||||||
|
|
||||||
|
err = loadSources(ko, val, sources...)
|
||||||
|
if err != nil {
|
||||||
|
return ko.Keys(), configuration, err
|
||||||
|
}
|
||||||
|
|
||||||
|
configuration = &schema.Configuration{}
|
||||||
|
|
||||||
|
unmarshal(ko, val, "", configuration)
|
||||||
|
|
||||||
|
return ko.Keys(), configuration, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func unmarshal(ko *koanf.Koanf, val *schema.StructValidator, path string, o interface{}) {
|
||||||
|
c := koanf.UnmarshalConf{
|
||||||
|
DecoderConfig: &mapstructure.DecoderConfig{
|
||||||
|
DecodeHook: mapstructure.ComposeDecodeHookFunc(
|
||||||
|
mapstructure.StringToTimeDurationHookFunc(),
|
||||||
|
mapstructure.StringToSliceHookFunc(","),
|
||||||
|
),
|
||||||
|
Metadata: nil,
|
||||||
|
Result: o,
|
||||||
|
WeaklyTypedInput: true,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := ko.UnmarshalWithConf(path, o, c); err != nil {
|
||||||
|
val.Push(fmt.Errorf("error occurred during unmarshalling configuration: %w", err))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func loadSources(ko *koanf.Koanf, val *schema.StructValidator, sources ...Source) (err error) {
|
||||||
|
if len(sources) == 0 {
|
||||||
|
return errNoSources
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, source := range sources {
|
||||||
|
err := source.Load(val)
|
||||||
|
if err != nil {
|
||||||
|
val.Push(fmt.Errorf("failed to load configuration from %s source: %+v", source.Name(), err))
|
||||||
|
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
err = source.Merge(ko, val)
|
||||||
|
if err != nil {
|
||||||
|
val.Push(fmt.Errorf("failed to merge configuration from %s source: %+v", source.Name(), err))
|
||||||
|
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
287
internal/configuration/provider_test.go
Normal file
287
internal/configuration/provider_test.go
Normal file
|
@ -0,0 +1,287 @@
|
||||||
|
package configuration
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"io/ioutil"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"runtime"
|
||||||
|
"sort"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
|
||||||
|
"github.com/authelia/authelia/internal/configuration/schema"
|
||||||
|
"github.com/authelia/authelia/internal/configuration/validator"
|
||||||
|
"github.com/authelia/authelia/internal/utils"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestShouldErrorSecretNotExist(t *testing.T) {
|
||||||
|
testReset()
|
||||||
|
|
||||||
|
dir, err := ioutil.TempDir("", "authelia-test-secret-not-exist")
|
||||||
|
assert.NoError(t, err)
|
||||||
|
|
||||||
|
assert.NoError(t, os.Setenv(DefaultEnvPrefix+"JWT_SECRET_FILE", filepath.Join(dir, "jwt")))
|
||||||
|
assert.NoError(t, os.Setenv(DefaultEnvPrefix+"DUO_API_SECRET_KEY_FILE", filepath.Join(dir, "duo")))
|
||||||
|
assert.NoError(t, os.Setenv(DefaultEnvPrefix+"SESSION_SECRET_FILE", filepath.Join(dir, "session")))
|
||||||
|
assert.NoError(t, os.Setenv(DefaultEnvPrefix+"AUTHENTICATION_BACKEND_LDAP_PASSWORD_FILE", filepath.Join(dir, "authentication")))
|
||||||
|
assert.NoError(t, os.Setenv(DefaultEnvPrefix+"NOTIFIER_SMTP_PASSWORD_FILE", filepath.Join(dir, "notifier")))
|
||||||
|
assert.NoError(t, os.Setenv(DefaultEnvPrefix+"SESSION_REDIS_PASSWORD_FILE", filepath.Join(dir, "redis")))
|
||||||
|
assert.NoError(t, os.Setenv(DefaultEnvPrefix+"SESSION_REDIS_HIGH_AVAILABILITY_SENTINEL_PASSWORD_FILE", filepath.Join(dir, "redis-sentinel")))
|
||||||
|
assert.NoError(t, os.Setenv(DefaultEnvPrefix+"STORAGE_MYSQL_PASSWORD_FILE", filepath.Join(dir, "mysql")))
|
||||||
|
assert.NoError(t, os.Setenv(DefaultEnvPrefix+"STORAGE_POSTGRES_PASSWORD_FILE", filepath.Join(dir, "postgres")))
|
||||||
|
assert.NoError(t, os.Setenv(DefaultEnvPrefix+"TLS_KEY_FILE", filepath.Join(dir, "tls")))
|
||||||
|
assert.NoError(t, os.Setenv(DefaultEnvPrefix+"IDENTITY_PROVIDERS_OIDC_ISSUER_PRIVATE_KEY_FILE", filepath.Join(dir, "oidc-key")))
|
||||||
|
assert.NoError(t, os.Setenv(DefaultEnvPrefix+"IDENTITY_PROVIDERS_OIDC_HMAC_SECRET_FILE", filepath.Join(dir, "oidc-hmac")))
|
||||||
|
|
||||||
|
val := schema.NewStructValidator()
|
||||||
|
_, _, err = Load(val, NewEnvironmentSource(DefaultEnvPrefix, DefaultEnvDelimiter), NewSecretsSource(DefaultEnvPrefix, DefaultEnvDelimiter))
|
||||||
|
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.Len(t, val.Warnings(), 0)
|
||||||
|
|
||||||
|
errs := val.Errors()
|
||||||
|
require.Len(t, errs, 12)
|
||||||
|
|
||||||
|
sort.Sort(utils.ErrSliceSortAlphabetical(errs))
|
||||||
|
|
||||||
|
errFmt := utils.GetExpectedErrTxt("filenotfound")
|
||||||
|
|
||||||
|
// ignore the errors before this as they are checked by the valdator.
|
||||||
|
assert.EqualError(t, errs[0], fmt.Sprintf(errFmtSecretIOIssue, filepath.Join(dir, "authentication"), "authentication_backend.ldap.password", fmt.Sprintf(errFmt, filepath.Join(dir, "authentication"))))
|
||||||
|
assert.EqualError(t, errs[1], fmt.Sprintf(errFmtSecretIOIssue, filepath.Join(dir, "duo"), "duo_api.secret_key", fmt.Sprintf(errFmt, filepath.Join(dir, "duo"))))
|
||||||
|
assert.EqualError(t, errs[2], fmt.Sprintf(errFmtSecretIOIssue, filepath.Join(dir, "jwt"), "jwt_secret", fmt.Sprintf(errFmt, filepath.Join(dir, "jwt"))))
|
||||||
|
assert.EqualError(t, errs[3], fmt.Sprintf(errFmtSecretIOIssue, filepath.Join(dir, "mysql"), "storage.mysql.password", fmt.Sprintf(errFmt, filepath.Join(dir, "mysql"))))
|
||||||
|
assert.EqualError(t, errs[4], fmt.Sprintf(errFmtSecretIOIssue, filepath.Join(dir, "notifier"), "notifier.smtp.password", fmt.Sprintf(errFmt, filepath.Join(dir, "notifier"))))
|
||||||
|
assert.EqualError(t, errs[5], fmt.Sprintf(errFmtSecretIOIssue, filepath.Join(dir, "oidc-hmac"), "identity_providers.oidc.hmac_secret", fmt.Sprintf(errFmt, filepath.Join(dir, "oidc-hmac"))))
|
||||||
|
assert.EqualError(t, errs[6], fmt.Sprintf(errFmtSecretIOIssue, filepath.Join(dir, "oidc-key"), "identity_providers.oidc.issuer_private_key", fmt.Sprintf(errFmt, filepath.Join(dir, "oidc-key"))))
|
||||||
|
assert.EqualError(t, errs[7], fmt.Sprintf(errFmtSecretIOIssue, filepath.Join(dir, "postgres"), "storage.postgres.password", fmt.Sprintf(errFmt, filepath.Join(dir, "postgres"))))
|
||||||
|
assert.EqualError(t, errs[8], fmt.Sprintf(errFmtSecretIOIssue, filepath.Join(dir, "redis"), "session.redis.password", fmt.Sprintf(errFmt, filepath.Join(dir, "redis"))))
|
||||||
|
assert.EqualError(t, errs[9], fmt.Sprintf(errFmtSecretIOIssue, filepath.Join(dir, "redis-sentinel"), "session.redis.high_availability.sentinel_password", fmt.Sprintf(errFmt, filepath.Join(dir, "redis-sentinel"))))
|
||||||
|
assert.EqualError(t, errs[10], fmt.Sprintf(errFmtSecretIOIssue, filepath.Join(dir, "session"), "session.secret", fmt.Sprintf(errFmt, filepath.Join(dir, "session"))))
|
||||||
|
assert.EqualError(t, errs[11], fmt.Sprintf(errFmtSecretIOIssue, filepath.Join(dir, "tls"), "tls_key", fmt.Sprintf(errFmt, filepath.Join(dir, "tls"))))
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestLoadShouldReturnErrWithoutValidator(t *testing.T) {
|
||||||
|
_, _, err := Load(nil, NewEnvironmentSource(DefaultEnvPrefix, DefaultEnvDelimiter))
|
||||||
|
assert.EqualError(t, err, "no validator provided")
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestLoadShouldReturnErrWithoutSources(t *testing.T) {
|
||||||
|
_, _, err := Load(schema.NewStructValidator())
|
||||||
|
assert.EqualError(t, err, "no sources provided")
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestShouldHaveNotifier(t *testing.T) {
|
||||||
|
testReset()
|
||||||
|
|
||||||
|
assert.NoError(t, os.Setenv(DefaultEnvPrefix+"SESSION_SECRET", "abc"))
|
||||||
|
assert.NoError(t, os.Setenv(DefaultEnvPrefix+"STORAGE_MYSQL_PASSWORD", "abc"))
|
||||||
|
assert.NoError(t, os.Setenv(DefaultEnvPrefix+"JWT_SECRET", "abc"))
|
||||||
|
assert.NoError(t, os.Setenv(DefaultEnvPrefix+"AUTHENTICATION_BACKEND_LDAP_PASSWORD", "abc"))
|
||||||
|
|
||||||
|
val := schema.NewStructValidator()
|
||||||
|
_, config, err := Load(val, NewDefaultSources([]string{"./test_resources/config.yml"}, DefaultEnvPrefix, DefaultEnvDelimiter)...)
|
||||||
|
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.Len(t, val.Errors(), 0)
|
||||||
|
assert.Len(t, val.Warnings(), 0)
|
||||||
|
assert.NotNil(t, config.Notifier)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestShouldValidateConfigurationWithEnv(t *testing.T) {
|
||||||
|
testReset()
|
||||||
|
|
||||||
|
assert.NoError(t, os.Setenv(DefaultEnvPrefix+"SESSION_SECRET", "abc"))
|
||||||
|
assert.NoError(t, os.Setenv(DefaultEnvPrefix+"STORAGE_MYSQL_PASSWORD", "abc"))
|
||||||
|
assert.NoError(t, os.Setenv(DefaultEnvPrefix+"JWT_SECRET", "abc"))
|
||||||
|
assert.NoError(t, os.Setenv(DefaultEnvPrefix+"AUTHENTICATION_BACKEND_LDAP_PASSWORD", "abc"))
|
||||||
|
|
||||||
|
val := schema.NewStructValidator()
|
||||||
|
_, _, err := Load(val, NewDefaultSources([]string{"./test_resources/config.yml"}, DefaultEnvPrefix, DefaultEnvDelimiter)...)
|
||||||
|
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.Len(t, val.Errors(), 0)
|
||||||
|
assert.Len(t, val.Warnings(), 0)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestShouldNotIgnoreInvalidEnvs(t *testing.T) {
|
||||||
|
testReset()
|
||||||
|
|
||||||
|
assert.NoError(t, os.Setenv(DefaultEnvPrefix+"SESSION_SECRET", "an env session secret"))
|
||||||
|
assert.NoError(t, os.Setenv(DefaultEnvPrefix+"STORAGE_MYSQL_PASSWORD", "an env storage mysql password"))
|
||||||
|
assert.NoError(t, os.Setenv(DefaultEnvPrefix+"STORAGE_MYSQL", "a bad env"))
|
||||||
|
assert.NoError(t, os.Setenv(DefaultEnvPrefix+"JWT_SECRET", "an env jwt secret"))
|
||||||
|
assert.NoError(t, os.Setenv(DefaultEnvPrefix+"AUTHENTICATION_BACKEND_LDAP_PASSWORD", "an env authentication backend ldap password"))
|
||||||
|
assert.NoError(t, os.Setenv(DefaultEnvPrefix+"AUTHENTICATION_BACKEND_LDAP_URL", "an env authentication backend ldap password"))
|
||||||
|
|
||||||
|
val := schema.NewStructValidator()
|
||||||
|
keys, _, err := Load(val, NewDefaultSources([]string{"./test_resources/config.yml"}, DefaultEnvPrefix, DefaultEnvDelimiter)...)
|
||||||
|
|
||||||
|
assert.NoError(t, err)
|
||||||
|
|
||||||
|
validator.ValidateKeys(keys, DefaultEnvPrefix, val)
|
||||||
|
|
||||||
|
require.Len(t, val.Warnings(), 1)
|
||||||
|
assert.Len(t, val.Errors(), 0)
|
||||||
|
|
||||||
|
assert.EqualError(t, val.Warnings()[0], fmt.Sprintf("configuration environment variable not expected: %sSTORAGE_MYSQL", DefaultEnvPrefix))
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestShouldValidateAndRaiseErrorsOnNormalConfigurationAndSecret(t *testing.T) {
|
||||||
|
testReset()
|
||||||
|
|
||||||
|
assert.NoError(t, os.Setenv(DefaultEnvPrefix+"SESSION_SECRET", "an env session secret"))
|
||||||
|
assert.NoError(t, os.Setenv(DefaultEnvPrefix+"SESSION_SECRET_FILE", "./test_resources/example_secret"))
|
||||||
|
assert.NoError(t, os.Setenv(DefaultEnvPrefix+"STORAGE_MYSQL_PASSWORD", "an env storage mysql password"))
|
||||||
|
assert.NoError(t, os.Setenv(DefaultEnvPrefix+"JWT_SECRET_FILE", "./test_resources/example_secret"))
|
||||||
|
assert.NoError(t, os.Setenv(DefaultEnvPrefix+"AUTHENTICATION_BACKEND_LDAP_PASSWORD", "an env authentication backend ldap password"))
|
||||||
|
|
||||||
|
val := schema.NewStructValidator()
|
||||||
|
_, config, err := Load(val, NewDefaultSources([]string{"./test_resources/config.yml"}, DefaultEnvPrefix, DefaultEnvDelimiter)...)
|
||||||
|
|
||||||
|
assert.NoError(t, err)
|
||||||
|
require.Len(t, val.Errors(), 1)
|
||||||
|
assert.Len(t, val.Warnings(), 0)
|
||||||
|
|
||||||
|
assert.EqualError(t, val.Errors()[0], "secrets: error loading secret into key 'session.secret': it's already defined in other configuration sources")
|
||||||
|
|
||||||
|
assert.Equal(t, "example_secret value", config.JWTSecret)
|
||||||
|
assert.Equal(t, "example_secret value", config.Session.Secret)
|
||||||
|
assert.Equal(t, "an env storage mysql password", config.Storage.MySQL.Password)
|
||||||
|
assert.Equal(t, "an env authentication backend ldap password", config.AuthenticationBackend.LDAP.Password)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestShouldRaiseIOErrOnUnreadableFile(t *testing.T) {
|
||||||
|
if runtime.GOOS == constWindows {
|
||||||
|
t.Skip("skipping test due to being on windows")
|
||||||
|
}
|
||||||
|
|
||||||
|
testReset()
|
||||||
|
|
||||||
|
dir, err := ioutil.TempDir("", "authelia-conf")
|
||||||
|
assert.NoError(t, err)
|
||||||
|
|
||||||
|
assert.NoError(t, os.WriteFile(filepath.Join(dir, "myconf.yml"), []byte("server:\n port: 9091\n"), 0000))
|
||||||
|
|
||||||
|
cfg := filepath.Join(dir, "myconf.yml")
|
||||||
|
|
||||||
|
val := schema.NewStructValidator()
|
||||||
|
_, _, err = Load(val, NewYAMLFileSource(cfg))
|
||||||
|
|
||||||
|
assert.NoError(t, err)
|
||||||
|
require.Len(t, val.Errors(), 1)
|
||||||
|
assert.Len(t, val.Warnings(), 0)
|
||||||
|
assert.EqualError(t, val.Errors()[0], fmt.Sprintf("failed to load configuration from yaml file(%s) source: open %s: permission denied", cfg, cfg))
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestShouldValidateConfigurationWithEnvSecrets(t *testing.T) {
|
||||||
|
testReset()
|
||||||
|
|
||||||
|
assert.NoError(t, os.Setenv(DefaultEnvPrefix+"SESSION_SECRET_FILE", "./test_resources/example_secret"))
|
||||||
|
assert.NoError(t, os.Setenv(DefaultEnvPrefix+"STORAGE_MYSQL_PASSWORD_FILE", "./test_resources/example_secret"))
|
||||||
|
assert.NoError(t, os.Setenv(DefaultEnvPrefix+"JWT_SECRET_FILE", "./test_resources/example_secret"))
|
||||||
|
assert.NoError(t, os.Setenv(DefaultEnvPrefix+"AUTHENTICATION_BACKEND_LDAP_PASSWORD_FILE", "./test_resources/example_secret"))
|
||||||
|
|
||||||
|
val := schema.NewStructValidator()
|
||||||
|
_, config, err := Load(val, NewDefaultSources([]string{"./test_resources/config.yml"}, DefaultEnvPrefix, DefaultEnvDelimiter)...)
|
||||||
|
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.Len(t, val.Errors(), 0)
|
||||||
|
assert.Len(t, val.Warnings(), 0)
|
||||||
|
|
||||||
|
assert.Equal(t, "example_secret value", config.JWTSecret)
|
||||||
|
assert.Equal(t, "example_secret value", config.Session.Secret)
|
||||||
|
assert.Equal(t, "example_secret value", config.AuthenticationBackend.LDAP.Password)
|
||||||
|
assert.Equal(t, "example_secret value", config.Storage.MySQL.Password)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestShouldValidateAndRaiseErrorsOnBadConfiguration(t *testing.T) {
|
||||||
|
testReset()
|
||||||
|
|
||||||
|
assert.NoError(t, os.Setenv(DefaultEnvPrefix+"SESSION_SECRET", "abc"))
|
||||||
|
assert.NoError(t, os.Setenv(DefaultEnvPrefix+"STORAGE_MYSQL_PASSWORD", "abc"))
|
||||||
|
assert.NoError(t, os.Setenv(DefaultEnvPrefix+"JWT_SECRET", "abc"))
|
||||||
|
assert.NoError(t, os.Setenv(DefaultEnvPrefix+"AUTHENTICATION_BACKEND_LDAP_PASSWORD", "abc"))
|
||||||
|
|
||||||
|
val := schema.NewStructValidator()
|
||||||
|
keys, _, err := Load(val, NewDefaultSources([]string{"./test_resources/config_bad_keys.yml"}, DefaultEnvPrefix, DefaultEnvDelimiter)...)
|
||||||
|
|
||||||
|
assert.NoError(t, err)
|
||||||
|
|
||||||
|
validator.ValidateKeys(keys, DefaultEnvPrefix, val)
|
||||||
|
|
||||||
|
require.Len(t, val.Errors(), 2)
|
||||||
|
assert.Len(t, val.Warnings(), 0)
|
||||||
|
|
||||||
|
assert.EqualError(t, val.Errors()[0], "configuration key not expected: loggy_file")
|
||||||
|
assert.EqualError(t, val.Errors()[1], "invalid configuration key 'logs_level' was replaced by 'log.level'")
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestShouldNotReadConfigurationOnFSAccessDenied(t *testing.T) {
|
||||||
|
if runtime.GOOS == constWindows {
|
||||||
|
t.Skip("skipping test due to being on windows")
|
||||||
|
}
|
||||||
|
|
||||||
|
testReset()
|
||||||
|
|
||||||
|
dir, err := ioutil.TempDir("", "authelia-config")
|
||||||
|
assert.NoError(t, err)
|
||||||
|
|
||||||
|
cfg := filepath.Join(dir, "config.yml")
|
||||||
|
assert.NoError(t, testCreateFile(filepath.Join(dir, "config.yml"), "port: 9091\n", 0000))
|
||||||
|
|
||||||
|
val := schema.NewStructValidator()
|
||||||
|
_, _, err = Load(val, NewYAMLFileSource(cfg))
|
||||||
|
|
||||||
|
assert.NoError(t, err)
|
||||||
|
require.Len(t, val.Errors(), 1)
|
||||||
|
|
||||||
|
assert.EqualError(t, val.Errors()[0], fmt.Sprintf("failed to load configuration from yaml file(%s) source: open %s: permission denied", cfg, cfg))
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestShouldNotLoadDirectoryConfiguration(t *testing.T) {
|
||||||
|
testReset()
|
||||||
|
|
||||||
|
dir, err := ioutil.TempDir("", "authelia-config")
|
||||||
|
assert.NoError(t, err)
|
||||||
|
|
||||||
|
val := schema.NewStructValidator()
|
||||||
|
_, _, err = Load(val, NewYAMLFileSource(dir))
|
||||||
|
|
||||||
|
assert.NoError(t, err)
|
||||||
|
require.Len(t, val.Errors(), 1)
|
||||||
|
assert.Len(t, val.Warnings(), 0)
|
||||||
|
|
||||||
|
expectedErr := fmt.Sprintf(utils.GetExpectedErrTxt("yamlisdir"), dir)
|
||||||
|
assert.EqualError(t, val.Errors()[0], fmt.Sprintf("failed to load configuration from yaml file(%s) source: %s", dir, expectedErr))
|
||||||
|
}
|
||||||
|
|
||||||
|
func testReset() {
|
||||||
|
testUnsetEnvName("STORAGE_MYSQL")
|
||||||
|
testUnsetEnvName("JWT_SECRET")
|
||||||
|
testUnsetEnvName("DUO_API_SECRET_KEY")
|
||||||
|
testUnsetEnvName("SESSION_SECRET")
|
||||||
|
testUnsetEnvName("AUTHENTICATION_BACKEND_LDAP_PASSWORD")
|
||||||
|
testUnsetEnvName("AUTHENTICATION_BACKEND_LDAP_URL")
|
||||||
|
testUnsetEnvName("NOTIFIER_SMTP_PASSWORD")
|
||||||
|
testUnsetEnvName("SESSION_REDIS_PASSWORD")
|
||||||
|
testUnsetEnvName("SESSION_REDIS_HIGH_AVAILABILITY_SENTINEL_PASSWORD")
|
||||||
|
testUnsetEnvName("STORAGE_MYSQL_PASSWORD")
|
||||||
|
testUnsetEnvName("STORAGE_POSTGRES_PASSWORD")
|
||||||
|
testUnsetEnvName("TLS_KEY")
|
||||||
|
testUnsetEnvName("PORT")
|
||||||
|
testUnsetEnvName("IDENTITY_PROVIDERS_OIDC_ISSUER_PRIVATE_KEY")
|
||||||
|
testUnsetEnvName("IDENTITY_PROVIDERS_OIDC_HMAC_SECRET")
|
||||||
|
}
|
||||||
|
|
||||||
|
func testUnsetEnvName(name string) {
|
||||||
|
_ = os.Unsetenv(DefaultEnvPrefix + name)
|
||||||
|
_ = os.Unsetenv(DefaultEnvPrefix + name + constSecretSuffix)
|
||||||
|
}
|
||||||
|
|
||||||
|
func testCreateFile(path, value string, perm os.FileMode) (err error) {
|
||||||
|
return os.WriteFile(path, []byte(value), perm)
|
||||||
|
}
|
|
@ -1,99 +0,0 @@
|
||||||
package configuration
|
|
||||||
|
|
||||||
import (
|
|
||||||
_ "embed" // Embed config.template.yml.
|
|
||||||
"errors"
|
|
||||||
"fmt"
|
|
||||||
"io/ioutil"
|
|
||||||
"os"
|
|
||||||
"strings"
|
|
||||||
|
|
||||||
"github.com/spf13/viper"
|
|
||||||
"gopkg.in/yaml.v2"
|
|
||||||
|
|
||||||
"github.com/authelia/authelia/internal/configuration/schema"
|
|
||||||
"github.com/authelia/authelia/internal/configuration/validator"
|
|
||||||
"github.com/authelia/authelia/internal/logging"
|
|
||||||
)
|
|
||||||
|
|
||||||
// Read a YAML configuration and create a Configuration object out of it.
|
|
||||||
func Read(configPath string) (*schema.Configuration, []error) {
|
|
||||||
logger := logging.Logger()
|
|
||||||
|
|
||||||
if configPath == "" {
|
|
||||||
return nil, []error{errors.New("No config file path provided")}
|
|
||||||
}
|
|
||||||
|
|
||||||
_, err := os.Stat(configPath)
|
|
||||||
if err != nil {
|
|
||||||
errs := []error{
|
|
||||||
fmt.Errorf("Unable to find config file: %v", configPath),
|
|
||||||
fmt.Errorf("Generating config file: %v", configPath),
|
|
||||||
}
|
|
||||||
|
|
||||||
err = generateConfigFromTemplate(configPath)
|
|
||||||
if err != nil {
|
|
||||||
errs = append(errs, err)
|
|
||||||
} else {
|
|
||||||
errs = append(errs, fmt.Errorf("Generated configuration at: %v", configPath))
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil, errs
|
|
||||||
}
|
|
||||||
|
|
||||||
file, err := ioutil.ReadFile(configPath)
|
|
||||||
if err != nil {
|
|
||||||
return nil, []error{fmt.Errorf("Failed to %v", err)}
|
|
||||||
}
|
|
||||||
|
|
||||||
var data interface{}
|
|
||||||
|
|
||||||
err = yaml.Unmarshal(file, &data)
|
|
||||||
if err != nil {
|
|
||||||
return nil, []error{fmt.Errorf("Error malformed %v", err)}
|
|
||||||
}
|
|
||||||
|
|
||||||
viper.SetEnvKeyReplacer(strings.NewReplacer(".", "_"))
|
|
||||||
|
|
||||||
// Dynamically load the secret env names from the SecretNames map.
|
|
||||||
for _, secretName := range validator.SecretNames {
|
|
||||||
_ = viper.BindEnv(validator.SecretNameToEnvName(secretName))
|
|
||||||
}
|
|
||||||
|
|
||||||
viper.SetConfigFile(configPath)
|
|
||||||
|
|
||||||
_ = viper.ReadInConfig()
|
|
||||||
|
|
||||||
var configuration schema.Configuration
|
|
||||||
|
|
||||||
viper.Unmarshal(&configuration) //nolint:errcheck // TODO: Legacy code, consider refactoring time permitting.
|
|
||||||
|
|
||||||
val := schema.NewStructValidator()
|
|
||||||
validator.ValidateSecrets(&configuration, val, viper.GetViper())
|
|
||||||
validator.ValidateConfiguration(&configuration, val)
|
|
||||||
validator.ValidateKeys(val, viper.AllKeys())
|
|
||||||
|
|
||||||
if val.HasErrors() {
|
|
||||||
return nil, val.Errors()
|
|
||||||
}
|
|
||||||
|
|
||||||
if val.HasWarnings() {
|
|
||||||
for _, warn := range val.Warnings() {
|
|
||||||
logger.Warnf(warn.Error())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return &configuration, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
//go:embed config.template.yml
|
|
||||||
var cfg []byte
|
|
||||||
|
|
||||||
func generateConfigFromTemplate(configPath string) error {
|
|
||||||
err := ioutil.WriteFile(configPath, cfg, 0600)
|
|
||||||
if err != nil {
|
|
||||||
return fmt.Errorf("Unable to generate %v: %v", configPath, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
|
@ -1,282 +0,0 @@
|
||||||
package configuration
|
|
||||||
|
|
||||||
import (
|
|
||||||
"io/ioutil"
|
|
||||||
"os"
|
|
||||||
"path"
|
|
||||||
"runtime"
|
|
||||||
"sort"
|
|
||||||
"testing"
|
|
||||||
|
|
||||||
"github.com/stretchr/testify/assert"
|
|
||||||
"github.com/stretchr/testify/require"
|
|
||||||
|
|
||||||
"github.com/authelia/authelia/internal/authentication"
|
|
||||||
"github.com/authelia/authelia/internal/utils"
|
|
||||||
)
|
|
||||||
|
|
||||||
func createTestingTempFile(t *testing.T, dir, name, content string) {
|
|
||||||
err := ioutil.WriteFile(path.Join(dir, name), []byte(content), 0600)
|
|
||||||
require.NoError(t, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
func resetEnv() {
|
|
||||||
_ = os.Unsetenv("AUTHELIA_JWT_SECRET_FILE")
|
|
||||||
_ = os.Unsetenv("AUTHELIA_DUO_API_SECRET_KEY_FILE")
|
|
||||||
_ = os.Unsetenv("AUTHELIA_SESSION_SECRET_FILE")
|
|
||||||
_ = os.Unsetenv("AUTHELIA_SESSION_SECRET_FILE")
|
|
||||||
_ = os.Unsetenv("AUTHELIA_AUTHENTICATION_BACKEND_LDAP_PASSWORD_FILE")
|
|
||||||
_ = os.Unsetenv("AUTHELIA_NOTIFIER_SMTP_PASSWORD_FILE")
|
|
||||||
_ = os.Unsetenv("AUTHELIA_SESSION_REDIS_PASSWORD_FILE")
|
|
||||||
_ = os.Unsetenv("AUTHELIA_SESSION_REDIS_HIGH_AVAILABILITY_SENTINEL_PASSWORD_FILE")
|
|
||||||
_ = os.Unsetenv("AUTHELIA_STORAGE_MYSQL_PASSWORD_FILE")
|
|
||||||
_ = os.Unsetenv("AUTHELIA_STORAGE_POSTGRES_PASSWORD_FILE")
|
|
||||||
}
|
|
||||||
|
|
||||||
func setupEnv(t *testing.T) string {
|
|
||||||
resetEnv()
|
|
||||||
|
|
||||||
dirEnv := os.Getenv("AUTHELIA_TESTING_DIR")
|
|
||||||
if dirEnv != "" {
|
|
||||||
return dirEnv
|
|
||||||
}
|
|
||||||
|
|
||||||
dir := "/tmp/authelia" + utils.RandomString(10, authentication.HashingPossibleSaltCharacters) + "/"
|
|
||||||
err := os.MkdirAll(dir, 0700)
|
|
||||||
require.NoError(t, err)
|
|
||||||
|
|
||||||
createTestingTempFile(t, dir, "jwt", "secret_from_env")
|
|
||||||
createTestingTempFile(t, dir, "duo", "duo_secret_from_env")
|
|
||||||
createTestingTempFile(t, dir, "session", "session_secret_from_env")
|
|
||||||
createTestingTempFile(t, dir, "authentication", "ldap_secret_from_env")
|
|
||||||
createTestingTempFile(t, dir, "notifier", "smtp_secret_from_env")
|
|
||||||
createTestingTempFile(t, dir, "redis", "redis_secret_from_env")
|
|
||||||
createTestingTempFile(t, dir, "redis-sentinel", "redis-sentinel_secret_from_env")
|
|
||||||
createTestingTempFile(t, dir, "mysql", "mysql_secret_from_env")
|
|
||||||
createTestingTempFile(t, dir, "postgres", "postgres_secret_from_env")
|
|
||||||
|
|
||||||
require.NoError(t, os.Setenv("AUTHELIA_TESTING_DIR", dir))
|
|
||||||
|
|
||||||
return dir
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestShouldErrorNoConfigPath(t *testing.T) {
|
|
||||||
_, errors := Read("")
|
|
||||||
|
|
||||||
require.Len(t, errors, 1)
|
|
||||||
|
|
||||||
require.EqualError(t, errors[0], "No config file path provided")
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestShouldErrorSecretNotExist(t *testing.T) {
|
|
||||||
dir := "/path/not/exist"
|
|
||||||
|
|
||||||
require.NoError(t, os.Setenv("AUTHELIA_JWT_SECRET_FILE", dir+"jwt"))
|
|
||||||
require.NoError(t, os.Setenv("AUTHELIA_DUO_API_SECRET_KEY_FILE", dir+"duo"))
|
|
||||||
require.NoError(t, os.Setenv("AUTHELIA_SESSION_SECRET_FILE", dir+"session"))
|
|
||||||
require.NoError(t, os.Setenv("AUTHELIA_AUTHENTICATION_BACKEND_LDAP_PASSWORD_FILE", dir+"authentication"))
|
|
||||||
require.NoError(t, os.Setenv("AUTHELIA_NOTIFIER_SMTP_PASSWORD_FILE", dir+"notifier"))
|
|
||||||
require.NoError(t, os.Setenv("AUTHELIA_SESSION_REDIS_PASSWORD_FILE", dir+"redis"))
|
|
||||||
require.NoError(t, os.Setenv("AUTHELIA_SESSION_REDIS_HIGH_AVAILABILITY_SENTINEL_PASSWORD_FILE", dir+"redis-sentinel"))
|
|
||||||
require.NoError(t, os.Setenv("AUTHELIA_STORAGE_MYSQL_PASSWORD_FILE", dir+"mysql"))
|
|
||||||
require.NoError(t, os.Setenv("AUTHELIA_STORAGE_POSTGRES_PASSWORD_FILE", dir+"postgres"))
|
|
||||||
|
|
||||||
_, errors := Read("./test_resources/config.yml")
|
|
||||||
|
|
||||||
require.Len(t, errors, 12)
|
|
||||||
|
|
||||||
if runtime.GOOS == windows {
|
|
||||||
assert.EqualError(t, errors[0], "error loading secret file (jwt_secret): open /path/not/existjwt: The system cannot find the path specified.")
|
|
||||||
assert.EqualError(t, errors[1], "error loading secret file (session.secret): open /path/not/existsession: The system cannot find the path specified.")
|
|
||||||
assert.EqualError(t, errors[2], "error loading secret file (duo_api.secret_key): open /path/not/existduo: The system cannot find the path specified.")
|
|
||||||
assert.EqualError(t, errors[3], "error loading secret file (session.redis.password): open /path/not/existredis: The system cannot find the path specified.")
|
|
||||||
assert.EqualError(t, errors[4], "error loading secret file (session.redis.high_availability.sentinel_password): open /path/not/existredis-sentinel: The system cannot find the path specified.")
|
|
||||||
assert.EqualError(t, errors[5], "error loading secret file (authentication_backend.ldap.password): open /path/not/existauthentication: The system cannot find the path specified.")
|
|
||||||
assert.EqualError(t, errors[6], "error loading secret file (notifier.smtp.password): open /path/not/existnotifier: The system cannot find the path specified.")
|
|
||||||
assert.EqualError(t, errors[7], "error loading secret file (storage.mysql.password): open /path/not/existmysql: The system cannot find the path specified.")
|
|
||||||
} else {
|
|
||||||
assert.EqualError(t, errors[0], "error loading secret file (jwt_secret): open /path/not/existjwt: no such file or directory")
|
|
||||||
assert.EqualError(t, errors[1], "error loading secret file (session.secret): open /path/not/existsession: no such file or directory")
|
|
||||||
assert.EqualError(t, errors[2], "error loading secret file (duo_api.secret_key): open /path/not/existduo: no such file or directory")
|
|
||||||
assert.EqualError(t, errors[3], "error loading secret file (session.redis.password): open /path/not/existredis: no such file or directory")
|
|
||||||
assert.EqualError(t, errors[4], "error loading secret file (session.redis.high_availability.sentinel_password): open /path/not/existredis-sentinel: no such file or directory")
|
|
||||||
assert.EqualError(t, errors[5], "error loading secret file (authentication_backend.ldap.password): open /path/not/existauthentication: no such file or directory")
|
|
||||||
assert.EqualError(t, errors[6], "error loading secret file (notifier.smtp.password): open /path/not/existnotifier: no such file or directory")
|
|
||||||
assert.EqualError(t, errors[7], "error loading secret file (storage.mysql.password): open /path/not/existmysql: no such file or directory")
|
|
||||||
}
|
|
||||||
|
|
||||||
assert.EqualError(t, errors[8], "Provide a JWT secret using \"jwt_secret\" key")
|
|
||||||
assert.EqualError(t, errors[9], "Please provide a password to connect to the LDAP server")
|
|
||||||
assert.EqualError(t, errors[10], "The session secret must be set when using the redis sentinel session provider")
|
|
||||||
assert.EqualError(t, errors[11], "the SQL username and password must be provided")
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestShouldErrorPermissionsOnLocalFS(t *testing.T) {
|
|
||||||
if runtime.GOOS == windows {
|
|
||||||
t.Skip("skipping test due to being on windows")
|
|
||||||
}
|
|
||||||
|
|
||||||
resetEnv()
|
|
||||||
|
|
||||||
_ = os.Mkdir("/tmp/noperms/", 0000)
|
|
||||||
_, errors := Read("/tmp/noperms/configuration.yml")
|
|
||||||
|
|
||||||
require.Len(t, errors, 3)
|
|
||||||
|
|
||||||
require.EqualError(t, errors[0], "Unable to find config file: /tmp/noperms/configuration.yml")
|
|
||||||
require.EqualError(t, errors[1], "Generating config file: /tmp/noperms/configuration.yml")
|
|
||||||
require.EqualError(t, errors[2], "Unable to generate /tmp/noperms/configuration.yml: open /tmp/noperms/configuration.yml: permission denied")
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestShouldErrorAndGenerateConfigFile(t *testing.T) {
|
|
||||||
_, errors := Read("./nonexistent.yml")
|
|
||||||
_ = os.Remove("./nonexistent.yml")
|
|
||||||
|
|
||||||
require.Len(t, errors, 3)
|
|
||||||
|
|
||||||
require.EqualError(t, errors[0], "Unable to find config file: ./nonexistent.yml")
|
|
||||||
require.EqualError(t, errors[1], "Generating config file: ./nonexistent.yml")
|
|
||||||
require.EqualError(t, errors[2], "Generated configuration at: ./nonexistent.yml")
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestShouldErrorPermissionsConfigFile(t *testing.T) {
|
|
||||||
resetEnv()
|
|
||||||
|
|
||||||
_ = ioutil.WriteFile("/tmp/authelia/permissions.yml", []byte{}, 0000) // nolint:gosec
|
|
||||||
_, errors := Read("/tmp/authelia/permissions.yml")
|
|
||||||
|
|
||||||
if runtime.GOOS == windows {
|
|
||||||
require.Len(t, errors, 5)
|
|
||||||
assert.EqualError(t, errors[0], "Provide a JWT secret using \"jwt_secret\" key")
|
|
||||||
assert.EqualError(t, errors[1], "Please provide `ldap` or `file` object in `authentication_backend`")
|
|
||||||
assert.EqualError(t, errors[2], "Set domain of the session object")
|
|
||||||
assert.EqualError(t, errors[3], "A storage configuration must be provided. It could be 'local', 'mysql' or 'postgres'")
|
|
||||||
assert.EqualError(t, errors[4], "A notifier configuration must be provided")
|
|
||||||
} else {
|
|
||||||
require.Len(t, errors, 1)
|
|
||||||
|
|
||||||
assert.EqualError(t, errors[0], "Failed to open /tmp/authelia/permissions.yml: permission denied")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestShouldErrorParseBadConfigFile(t *testing.T) {
|
|
||||||
_, errors := Read("./test_resources/config_bad_quoting.yml")
|
|
||||||
|
|
||||||
require.Len(t, errors, 1)
|
|
||||||
|
|
||||||
require.EqualError(t, errors[0], "Error malformed yaml: line 27: did not find expected alphabetic or numeric character")
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestShouldParseConfigFile(t *testing.T) {
|
|
||||||
dir := setupEnv(t)
|
|
||||||
|
|
||||||
require.NoError(t, os.Setenv("AUTHELIA_JWT_SECRET_FILE", dir+"jwt"))
|
|
||||||
require.NoError(t, os.Setenv("AUTHELIA_DUO_API_SECRET_KEY_FILE", dir+"duo"))
|
|
||||||
require.NoError(t, os.Setenv("AUTHELIA_SESSION_SECRET_FILE", dir+"session"))
|
|
||||||
require.NoError(t, os.Setenv("AUTHELIA_AUTHENTICATION_BACKEND_LDAP_PASSWORD_FILE", dir+"authentication"))
|
|
||||||
require.NoError(t, os.Setenv("AUTHELIA_NOTIFIER_SMTP_PASSWORD_FILE", dir+"notifier"))
|
|
||||||
require.NoError(t, os.Setenv("AUTHELIA_SESSION_REDIS_PASSWORD_FILE", dir+"redis"))
|
|
||||||
require.NoError(t, os.Setenv("AUTHELIA_SESSION_REDIS_HIGH_AVAILABILITY_SENTINEL_PASSWORD_FILE", dir+"redis-sentinel"))
|
|
||||||
require.NoError(t, os.Setenv("AUTHELIA_STORAGE_MYSQL_PASSWORD_FILE", dir+"mysql"))
|
|
||||||
require.NoError(t, os.Setenv("AUTHELIA_STORAGE_POSTGRES_PASSWORD_FILE", dir+"postgres"))
|
|
||||||
|
|
||||||
config, errors := Read("./test_resources/config.yml")
|
|
||||||
|
|
||||||
require.Len(t, errors, 0)
|
|
||||||
|
|
||||||
assert.Equal(t, 9091, config.Server.Port)
|
|
||||||
assert.Equal(t, "debug", config.Logging.Level)
|
|
||||||
assert.Equal(t, "https://home.example.com:8080/", config.DefaultRedirectionURL)
|
|
||||||
assert.Equal(t, "authelia.com", config.TOTP.Issuer)
|
|
||||||
assert.Equal(t, "secret_from_env", config.JWTSecret)
|
|
||||||
|
|
||||||
assert.Equal(t, "api-123456789.example.com", config.DuoAPI.Hostname)
|
|
||||||
assert.Equal(t, "ABCDEF", config.DuoAPI.IntegrationKey)
|
|
||||||
assert.Equal(t, "duo_secret_from_env", config.DuoAPI.SecretKey)
|
|
||||||
|
|
||||||
assert.Equal(t, "session_secret_from_env", config.Session.Secret)
|
|
||||||
assert.Equal(t, "ldap_secret_from_env", config.AuthenticationBackend.LDAP.Password)
|
|
||||||
assert.Equal(t, "smtp_secret_from_env", config.Notifier.SMTP.Password)
|
|
||||||
assert.Equal(t, "redis_secret_from_env", config.Session.Redis.Password)
|
|
||||||
assert.Equal(t, "redis-sentinel_secret_from_env", config.Session.Redis.HighAvailability.SentinelPassword)
|
|
||||||
assert.Equal(t, "mysql_secret_from_env", config.Storage.MySQL.Password)
|
|
||||||
|
|
||||||
assert.Equal(t, "deny", config.AccessControl.DefaultPolicy)
|
|
||||||
assert.Len(t, config.AccessControl.Rules, 12)
|
|
||||||
|
|
||||||
require.NotNil(t, config.Session)
|
|
||||||
require.NotNil(t, config.Session.Redis)
|
|
||||||
require.NotNil(t, config.Session.Redis.HighAvailability)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestShouldParseAltConfigFile(t *testing.T) {
|
|
||||||
dir := setupEnv(t)
|
|
||||||
|
|
||||||
require.NoError(t, os.Setenv("AUTHELIA_STORAGE_POSTGRES_PASSWORD_FILE", dir+"postgres"))
|
|
||||||
require.NoError(t, os.Setenv("AUTHELIA_AUTHENTICATION_BACKEND_LDAP_PASSWORD_FILE", dir+"authentication"))
|
|
||||||
require.NoError(t, os.Setenv("AUTHELIA_JWT_SECRET_FILE", dir+"jwt"))
|
|
||||||
require.NoError(t, os.Setenv("AUTHELIA_SESSION_SECRET_FILE", dir+"session"))
|
|
||||||
|
|
||||||
config, errors := Read("./test_resources/config_alt.yml")
|
|
||||||
require.Len(t, errors, 0)
|
|
||||||
|
|
||||||
assert.Equal(t, 9091, config.Server.Port)
|
|
||||||
assert.Equal(t, "debug", config.Logging.Level)
|
|
||||||
assert.Equal(t, "https://home.example.com:8080/", config.DefaultRedirectionURL)
|
|
||||||
assert.Equal(t, "authelia.com", config.TOTP.Issuer)
|
|
||||||
assert.Equal(t, "secret_from_env", config.JWTSecret)
|
|
||||||
|
|
||||||
assert.Equal(t, "api-123456789.example.com", config.DuoAPI.Hostname)
|
|
||||||
assert.Equal(t, "ABCDEF", config.DuoAPI.IntegrationKey)
|
|
||||||
assert.Equal(t, "postgres_secret_from_env", config.Storage.PostgreSQL.Password)
|
|
||||||
|
|
||||||
assert.Equal(t, "deny", config.AccessControl.DefaultPolicy)
|
|
||||||
assert.Len(t, config.AccessControl.Rules, 12)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestShouldNotParseConfigFileWithOldOrUnexpectedKeys(t *testing.T) {
|
|
||||||
dir := setupEnv(t)
|
|
||||||
|
|
||||||
require.NoError(t, os.Setenv("AUTHELIA_JWT_SECRET_FILE", dir+"jwt"))
|
|
||||||
require.NoError(t, os.Setenv("AUTHELIA_DUO_API_SECRET_KEY_FILE", dir+"duo"))
|
|
||||||
require.NoError(t, os.Setenv("AUTHELIA_SESSION_SECRET_FILE", dir+"session"))
|
|
||||||
require.NoError(t, os.Setenv("AUTHELIA_AUTHENTICATION_BACKEND_LDAP_PASSWORD_FILE", dir+"authentication"))
|
|
||||||
require.NoError(t, os.Setenv("AUTHELIA_NOTIFIER_SMTP_PASSWORD_FILE", dir+"notifier"))
|
|
||||||
require.NoError(t, os.Setenv("AUTHELIA_SESSION_REDIS_PASSWORD_FILE", dir+"redis"))
|
|
||||||
require.NoError(t, os.Setenv("AUTHELIA_STORAGE_MYSQL_PASSWORD_FILE", dir+"mysql"))
|
|
||||||
|
|
||||||
_, errors := Read("./test_resources/config_bad_keys.yml")
|
|
||||||
require.Len(t, errors, 2)
|
|
||||||
|
|
||||||
// Sort error slice to prevent shenanigans that somehow occur
|
|
||||||
sort.Slice(errors, func(i, j int) bool {
|
|
||||||
return errors[i].Error() < errors[j].Error()
|
|
||||||
})
|
|
||||||
assert.EqualError(t, errors[0], "config key not expected: loggy_file")
|
|
||||||
assert.EqualError(t, errors[1], "invalid configuration key 'logs_level' was replaced by 'log.level'")
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestShouldValidateConfigurationTemplate(t *testing.T) {
|
|
||||||
resetEnv()
|
|
||||||
|
|
||||||
_, errors := Read("../../config.template.yml")
|
|
||||||
assert.Len(t, errors, 0)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestShouldOnlyAllowEnvOrConfig(t *testing.T) {
|
|
||||||
dir := setupEnv(t)
|
|
||||||
|
|
||||||
resetEnv()
|
|
||||||
require.NoError(t, os.Setenv("AUTHELIA_JWT_SECRET_FILE", dir+"jwt"))
|
|
||||||
require.NoError(t, os.Setenv("AUTHELIA_DUO_API_SECRET_KEY_FILE", dir+"duo"))
|
|
||||||
require.NoError(t, os.Setenv("AUTHELIA_SESSION_SECRET_FILE", dir+"session"))
|
|
||||||
require.NoError(t, os.Setenv("AUTHELIA_AUTHENTICATION_BACKEND_LDAP_PASSWORD_FILE", dir+"authentication"))
|
|
||||||
require.NoError(t, os.Setenv("AUTHELIA_NOTIFIER_SMTP_PASSWORD_FILE", dir+"notifier"))
|
|
||||||
require.NoError(t, os.Setenv("AUTHELIA_SESSION_REDIS_PASSWORD_FILE", dir+"redis"))
|
|
||||||
require.NoError(t, os.Setenv("AUTHELIA_STORAGE_MYSQL_PASSWORD_FILE", dir+"mysql"))
|
|
||||||
|
|
||||||
_, errors := Read("./test_resources/config_with_secret.yml")
|
|
||||||
|
|
||||||
require.Len(t, errors, 1)
|
|
||||||
require.EqualError(t, errors[0], "error loading secret (jwt_secret): it's already defined in the config file")
|
|
||||||
}
|
|
|
@ -2,25 +2,25 @@ package schema
|
||||||
|
|
||||||
// AccessControlConfiguration represents the configuration related to ACLs.
|
// AccessControlConfiguration represents the configuration related to ACLs.
|
||||||
type AccessControlConfiguration struct {
|
type AccessControlConfiguration struct {
|
||||||
DefaultPolicy string `mapstructure:"default_policy"`
|
DefaultPolicy string `koanf:"default_policy"`
|
||||||
Networks []ACLNetwork `mapstructure:"networks"`
|
Networks []ACLNetwork `koanf:"networks"`
|
||||||
Rules []ACLRule `mapstructure:"rules"`
|
Rules []ACLRule `koanf:"rules"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// ACLNetwork represents one ACL network group entry; "weak" coerces a single value into slice.
|
// ACLNetwork represents one ACL network group entry; "weak" coerces a single value into slice.
|
||||||
type ACLNetwork struct {
|
type ACLNetwork struct {
|
||||||
Name string `mapstructure:"name"`
|
Name string `koanf:"name"`
|
||||||
Networks []string `mapstructure:"networks"`
|
Networks []string `koanf:"networks"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// ACLRule represents one ACL rule entry; "weak" coerces a single value into slice.
|
// ACLRule represents one ACL rule entry; "weak" coerces a single value into slice.
|
||||||
type ACLRule struct {
|
type ACLRule struct {
|
||||||
Domains []string `mapstructure:"domain,weak"`
|
Domains []string `koanf:"domain"`
|
||||||
Policy string `mapstructure:"policy"`
|
Policy string `koanf:"policy"`
|
||||||
Subjects [][]string `mapstructure:"subject,weak"`
|
Subjects [][]string `koanf:"subject"`
|
||||||
Networks []string `mapstructure:"networks"`
|
Networks []string `koanf:"networks"`
|
||||||
Resources []string `mapstructure:"resources"`
|
Resources []string `koanf:"resources"`
|
||||||
Methods []string `mapstructure:"methods"`
|
Methods []string `koanf:"methods"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// DefaultACLNetwork represents the default configuration related to access control network group configuration.
|
// DefaultACLNetwork represents the default configuration related to access control network group configuration.
|
||||||
|
|
|
@ -2,45 +2,45 @@ package schema
|
||||||
|
|
||||||
// LDAPAuthenticationBackendConfiguration represents the configuration related to LDAP server.
|
// LDAPAuthenticationBackendConfiguration represents the configuration related to LDAP server.
|
||||||
type LDAPAuthenticationBackendConfiguration struct {
|
type LDAPAuthenticationBackendConfiguration struct {
|
||||||
Implementation string `mapstructure:"implementation"`
|
Implementation string `koanf:"implementation"`
|
||||||
URL string `mapstructure:"url"`
|
URL string `koanf:"url"`
|
||||||
BaseDN string `mapstructure:"base_dn"`
|
BaseDN string `koanf:"base_dn"`
|
||||||
AdditionalUsersDN string `mapstructure:"additional_users_dn"`
|
AdditionalUsersDN string `koanf:"additional_users_dn"`
|
||||||
UsersFilter string `mapstructure:"users_filter"`
|
UsersFilter string `koanf:"users_filter"`
|
||||||
AdditionalGroupsDN string `mapstructure:"additional_groups_dn"`
|
AdditionalGroupsDN string `koanf:"additional_groups_dn"`
|
||||||
GroupsFilter string `mapstructure:"groups_filter"`
|
GroupsFilter string `koanf:"groups_filter"`
|
||||||
GroupNameAttribute string `mapstructure:"group_name_attribute"`
|
GroupNameAttribute string `koanf:"group_name_attribute"`
|
||||||
UsernameAttribute string `mapstructure:"username_attribute"`
|
UsernameAttribute string `koanf:"username_attribute"`
|
||||||
MailAttribute string `mapstructure:"mail_attribute"`
|
MailAttribute string `koanf:"mail_attribute"`
|
||||||
DisplayNameAttribute string `mapstructure:"display_name_attribute"`
|
DisplayNameAttribute string `koanf:"display_name_attribute"`
|
||||||
User string `mapstructure:"user"`
|
User string `koanf:"user"`
|
||||||
Password string `mapstructure:"password"`
|
Password string `koanf:"password"`
|
||||||
StartTLS bool `mapstructure:"start_tls"`
|
StartTLS bool `koanf:"start_tls"`
|
||||||
TLS *TLSConfig `mapstructure:"tls"`
|
TLS *TLSConfig `koanf:"tls"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// FileAuthenticationBackendConfiguration represents the configuration related to file-based backend.
|
// FileAuthenticationBackendConfiguration represents the configuration related to file-based backend.
|
||||||
type FileAuthenticationBackendConfiguration struct {
|
type FileAuthenticationBackendConfiguration struct {
|
||||||
Path string `mapstructure:"path"`
|
Path string `koanf:"path"`
|
||||||
Password *PasswordConfiguration `mapstructure:"password"`
|
Password *PasswordConfiguration `koanf:"password"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// PasswordConfiguration represents the configuration related to password hashing.
|
// PasswordConfiguration represents the configuration related to password hashing.
|
||||||
type PasswordConfiguration struct {
|
type PasswordConfiguration struct {
|
||||||
Iterations int `mapstructure:"iterations"`
|
Iterations int `koanf:"iterations"`
|
||||||
KeyLength int `mapstructure:"key_length"`
|
KeyLength int `koanf:"key_length"`
|
||||||
SaltLength int `mapstructure:"salt_length"`
|
SaltLength int `koanf:"salt_length"`
|
||||||
Algorithm string `mapstrucutre:"algorithm"`
|
Algorithm string `mapstrucutre:"algorithm"`
|
||||||
Memory int `mapstructure:"memory"`
|
Memory int `koanf:"memory"`
|
||||||
Parallelism int `mapstructure:"parallelism"`
|
Parallelism int `koanf:"parallelism"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// AuthenticationBackendConfiguration represents the configuration related to the authentication backend.
|
// AuthenticationBackendConfiguration represents the configuration related to the authentication backend.
|
||||||
type AuthenticationBackendConfiguration struct {
|
type AuthenticationBackendConfiguration struct {
|
||||||
DisableResetPassword bool `mapstructure:"disable_reset_password"`
|
DisableResetPassword bool `koanf:"disable_reset_password"`
|
||||||
RefreshInterval string `mapstructure:"refresh_interval"`
|
RefreshInterval string `koanf:"refresh_interval"`
|
||||||
LDAP *LDAPAuthenticationBackendConfiguration `mapstructure:"ldap"`
|
LDAP *LDAPAuthenticationBackendConfiguration `koanf:"ldap"`
|
||||||
File *FileAuthenticationBackendConfiguration `mapstructure:"file"`
|
File *FileAuthenticationBackendConfiguration `koanf:"file"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// DefaultPasswordConfiguration represents the default configuration related to Argon2id hashing.
|
// DefaultPasswordConfiguration represents the default configuration related to Argon2id hashing.
|
||||||
|
|
|
@ -2,10 +2,10 @@ package schema
|
||||||
|
|
||||||
// Configuration object extracted from YAML configuration file.
|
// Configuration object extracted from YAML configuration file.
|
||||||
type Configuration struct {
|
type Configuration struct {
|
||||||
Theme string `mapstructure:"theme"`
|
Theme string `koanf:"theme"`
|
||||||
CertificatesDirectory string `mapstructure:"certificates_directory"`
|
CertificatesDirectory string `koanf:"certificates_directory"`
|
||||||
JWTSecret string `mapstructure:"jwt_secret"`
|
JWTSecret string `koanf:"jwt_secret"`
|
||||||
DefaultRedirectionURL string `mapstructure:"default_redirection_url"`
|
DefaultRedirectionURL string `koanf:"default_redirection_url"`
|
||||||
|
|
||||||
Host string `koanf:"host"` // Deprecated: To be Removed. TODO: Remove in 4.33.0.
|
Host string `koanf:"host"` // Deprecated: To be Removed. TODO: Remove in 4.33.0.
|
||||||
Port int `koanf:"port"` // Deprecated: To be Removed. TODO: Remove in 4.33.0.
|
Port int `koanf:"port"` // Deprecated: To be Removed. TODO: Remove in 4.33.0.
|
||||||
|
@ -15,15 +15,15 @@ type Configuration struct {
|
||||||
LogFormat string `koanf:"log_format"` // Deprecated: To be Removed. TODO: Remove in 4.33.0.
|
LogFormat string `koanf:"log_format"` // Deprecated: To be Removed. TODO: Remove in 4.33.0.
|
||||||
LogFilePath string `koanf:"log_file_path"` // Deprecated: To be Removed. TODO: Remove in 4.33.0.
|
LogFilePath string `koanf:"log_file_path"` // Deprecated: To be Removed. TODO: Remove in 4.33.0.
|
||||||
|
|
||||||
Logging LogConfiguration `mapstructure:"log"`
|
Log LogConfiguration `koanf:"log"`
|
||||||
IdentityProviders IdentityProvidersConfiguration `mapstructure:"identity_providers"`
|
IdentityProviders IdentityProvidersConfiguration `koanf:"identity_providers"`
|
||||||
AuthenticationBackend AuthenticationBackendConfiguration `mapstructure:"authentication_backend"`
|
AuthenticationBackend AuthenticationBackendConfiguration `koanf:"authentication_backend"`
|
||||||
Session SessionConfiguration `mapstructure:"session"`
|
Session SessionConfiguration `koanf:"session"`
|
||||||
TOTP *TOTPConfiguration `mapstructure:"totp"`
|
TOTP *TOTPConfiguration `koanf:"totp"`
|
||||||
DuoAPI *DuoAPIConfiguration `mapstructure:"duo_api"`
|
DuoAPI *DuoAPIConfiguration `koanf:"duo_api"`
|
||||||
AccessControl AccessControlConfiguration `mapstructure:"access_control"`
|
AccessControl AccessControlConfiguration `koanf:"access_control"`
|
||||||
Regulation *RegulationConfiguration `mapstructure:"regulation"`
|
Regulation *RegulationConfiguration `koanf:"regulation"`
|
||||||
Storage StorageConfiguration `mapstructure:"storage"`
|
Storage StorageConfiguration `koanf:"storage"`
|
||||||
Notifier *NotifierConfiguration `mapstructure:"notifier"`
|
Notifier *NotifierConfiguration `koanf:"notifier"`
|
||||||
Server ServerConfiguration `mapstructure:"server"`
|
Server ServerConfiguration `koanf:"server"`
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,7 +2,7 @@ package schema
|
||||||
|
|
||||||
// DuoAPIConfiguration represents the configuration related to Duo API.
|
// DuoAPIConfiguration represents the configuration related to Duo API.
|
||||||
type DuoAPIConfiguration struct {
|
type DuoAPIConfiguration struct {
|
||||||
Hostname string `mapstructure:"hostname"`
|
Hostname string `koanf:"hostname"`
|
||||||
IntegrationKey string `mapstructure:"integration_key"`
|
IntegrationKey string `koanf:"integration_key"`
|
||||||
SecretKey string `mapstructure:"secret_key"`
|
SecretKey string `koanf:"secret_key"`
|
||||||
}
|
}
|
||||||
|
|
|
@ -4,42 +4,43 @@ import "time"
|
||||||
|
|
||||||
// IdentityProvidersConfiguration represents the IdentityProviders 2.0 configuration for Authelia.
|
// IdentityProvidersConfiguration represents the IdentityProviders 2.0 configuration for Authelia.
|
||||||
type IdentityProvidersConfiguration struct {
|
type IdentityProvidersConfiguration struct {
|
||||||
OIDC *OpenIDConnectConfiguration `mapstructure:"oidc"`
|
OIDC *OpenIDConnectConfiguration `koanf:"oidc"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// OpenIDConnectConfiguration configuration for OpenID Connect.
|
// OpenIDConnectConfiguration configuration for OpenID Connect.
|
||||||
type OpenIDConnectConfiguration struct {
|
type OpenIDConnectConfiguration struct {
|
||||||
// This secret must be 32 bytes long
|
// This secret must be 32 bytes long
|
||||||
HMACSecret string `mapstructure:"hmac_secret"`
|
HMACSecret string `koanf:"hmac_secret"`
|
||||||
IssuerPrivateKey string `mapstructure:"issuer_private_key"`
|
IssuerPrivateKey string `koanf:"issuer_private_key"`
|
||||||
|
|
||||||
AccessTokenLifespan time.Duration `mapstructure:"access_token_lifespan"`
|
AccessTokenLifespan time.Duration `koanf:"access_token_lifespan"`
|
||||||
AuthorizeCodeLifespan time.Duration `mapstructure:"authorize_code_lifespan"`
|
AuthorizeCodeLifespan time.Duration `koanf:"authorize_code_lifespan"`
|
||||||
IDTokenLifespan time.Duration `mapstructure:"id_token_lifespan"`
|
IDTokenLifespan time.Duration `koanf:"id_token_lifespan"`
|
||||||
RefreshTokenLifespan time.Duration `mapstructure:"refresh_token_lifespan"`
|
RefreshTokenLifespan time.Duration `koanf:"refresh_token_lifespan"`
|
||||||
EnableClientDebugMessages bool `mapstructure:"enable_client_debug_messages"`
|
|
||||||
MinimumParameterEntropy int `mapstructure:"minimum_parameter_entropy"`
|
|
||||||
|
|
||||||
Clients []OpenIDConnectClientConfiguration `mapstructure:"clients"`
|
EnableClientDebugMessages bool `koanf:"enable_client_debug_messages"`
|
||||||
|
MinimumParameterEntropy int `koanf:"minimum_parameter_entropy"`
|
||||||
|
|
||||||
|
Clients []OpenIDConnectClientConfiguration `koanf:"clients"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// OpenIDConnectClientConfiguration configuration for an OpenID Connect client.
|
// OpenIDConnectClientConfiguration configuration for an OpenID Connect client.
|
||||||
type OpenIDConnectClientConfiguration struct {
|
type OpenIDConnectClientConfiguration struct {
|
||||||
ID string `mapstructure:"id"`
|
ID string `koanf:"id"`
|
||||||
Description string `mapstructure:"description"`
|
Description string `koanf:"description"`
|
||||||
Secret string `mapstructure:"secret"`
|
Secret string `koanf:"secret"`
|
||||||
Public bool `mapstructure:"public"`
|
Public bool `koanf:"public"`
|
||||||
|
|
||||||
Policy string `mapstructure:"authorization_policy"`
|
Policy string `koanf:"authorization_policy"`
|
||||||
|
|
||||||
Audience []string `mapstructure:"audience"`
|
Audience []string `koanf:"audience"`
|
||||||
Scopes []string `mapstructure:"scopes"`
|
Scopes []string `koanf:"scopes"`
|
||||||
RedirectURIs []string `mapstructure:"redirect_uris"`
|
RedirectURIs []string `koanf:"redirect_uris"`
|
||||||
GrantTypes []string `mapstructure:"grant_types"`
|
GrantTypes []string `koanf:"grant_types"`
|
||||||
ResponseTypes []string `mapstructure:"response_types"`
|
ResponseTypes []string `koanf:"response_types"`
|
||||||
ResponseModes []string `mapstructure:"response_modes"`
|
ResponseModes []string `koanf:"response_modes"`
|
||||||
|
|
||||||
UserinfoSigningAlgorithm string `mapstructure:"userinfo_signing_algorithm"`
|
UserinfoSigningAlgorithm string `koanf:"userinfo_signing_algorithm"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// DefaultOpenIDConnectConfiguration contains defaults for OIDC.
|
// DefaultOpenIDConnectConfiguration contains defaults for OIDC.
|
||||||
|
|
|
@ -2,10 +2,10 @@ package schema
|
||||||
|
|
||||||
// LogConfiguration represents the logging configuration.
|
// LogConfiguration represents the logging configuration.
|
||||||
type LogConfiguration struct {
|
type LogConfiguration struct {
|
||||||
Level string `mapstructure:"level"`
|
Level string `koanf:"level"`
|
||||||
Format string `mapstructure:"format"`
|
Format string `koanf:"format"`
|
||||||
FilePath string `mapstructure:"file_path"`
|
FilePath string `koanf:"file_path"`
|
||||||
KeepStdout bool `mapstructure:"keep_stdout"`
|
KeepStdout bool `koanf:"keep_stdout"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// DefaultLoggingConfiguration is the default logging configuration.
|
// DefaultLoggingConfiguration is the default logging configuration.
|
||||||
|
|
|
@ -2,29 +2,29 @@ package schema
|
||||||
|
|
||||||
// FileSystemNotifierConfiguration represents the configuration of the notifier writing emails in a file.
|
// FileSystemNotifierConfiguration represents the configuration of the notifier writing emails in a file.
|
||||||
type FileSystemNotifierConfiguration struct {
|
type FileSystemNotifierConfiguration struct {
|
||||||
Filename string `mapstructure:"filename"`
|
Filename string `koanf:"filename"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// SMTPNotifierConfiguration represents the configuration of the SMTP server to send emails with.
|
// SMTPNotifierConfiguration represents the configuration of the SMTP server to send emails with.
|
||||||
type SMTPNotifierConfiguration struct {
|
type SMTPNotifierConfiguration struct {
|
||||||
Host string `mapstructure:"host"`
|
Host string `koanf:"host"`
|
||||||
Port int `mapstructure:"port"`
|
Port int `koanf:"port"`
|
||||||
Username string `mapstructure:"username"`
|
Username string `koanf:"username"`
|
||||||
Password string `mapstructure:"password"`
|
Password string `koanf:"password"`
|
||||||
Identifier string `mapstructure:"identifier"`
|
Identifier string `koanf:"identifier"`
|
||||||
Sender string `mapstructure:"sender"`
|
Sender string `koanf:"sender"`
|
||||||
Subject string `mapstructure:"subject"`
|
Subject string `koanf:"subject"`
|
||||||
StartupCheckAddress string `mapstructure:"startup_check_address"`
|
StartupCheckAddress string `koanf:"startup_check_address"`
|
||||||
DisableRequireTLS bool `mapstructure:"disable_require_tls"`
|
DisableRequireTLS bool `koanf:"disable_require_tls"`
|
||||||
DisableHTMLEmails bool `mapstructure:"disable_html_emails"`
|
DisableHTMLEmails bool `koanf:"disable_html_emails"`
|
||||||
TLS *TLSConfig `mapstructure:"tls"`
|
TLS *TLSConfig `koanf:"tls"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// NotifierConfiguration represents the configuration of the notifier to use when sending notifications to users.
|
// NotifierConfiguration represents the configuration of the notifier to use when sending notifications to users.
|
||||||
type NotifierConfiguration struct {
|
type NotifierConfiguration struct {
|
||||||
DisableStartupCheck bool `mapstructure:"disable_startup_check"`
|
DisableStartupCheck bool `koanf:"disable_startup_check"`
|
||||||
FileSystem *FileSystemNotifierConfiguration `mapstructure:"filesystem"`
|
FileSystem *FileSystemNotifierConfiguration `koanf:"filesystem"`
|
||||||
SMTP *SMTPNotifierConfiguration `mapstructure:"smtp"`
|
SMTP *SMTPNotifierConfiguration `koanf:"smtp"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// DefaultSMTPNotifierConfiguration represents default configuration parameters for the SMTP notifier.
|
// DefaultSMTPNotifierConfiguration represents default configuration parameters for the SMTP notifier.
|
||||||
|
|
|
@ -2,9 +2,9 @@ package schema
|
||||||
|
|
||||||
// RegulationConfiguration represents the configuration related to regulation.
|
// RegulationConfiguration represents the configuration related to regulation.
|
||||||
type RegulationConfiguration struct {
|
type RegulationConfiguration struct {
|
||||||
MaxRetries int `mapstructure:"max_retries"`
|
MaxRetries int `koanf:"max_retries"`
|
||||||
FindTime string `mapstructure:"find_time"`
|
FindTime string `koanf:"find_time,weak"`
|
||||||
BanTime string `mapstructure:"ban_time"`
|
BanTime string `koanf:"ban_time,weak"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// DefaultRegulationConfiguration represents default configuration parameters for the regulator.
|
// DefaultRegulationConfiguration represents default configuration parameters for the regulator.
|
||||||
|
|
|
@ -2,21 +2,21 @@ package schema
|
||||||
|
|
||||||
// ServerConfiguration represents the configuration of the http server.
|
// ServerConfiguration represents the configuration of the http server.
|
||||||
type ServerConfiguration struct {
|
type ServerConfiguration struct {
|
||||||
Host string `mapstructure:"host"`
|
Host string `koanf:"host"`
|
||||||
Port int `mapstructure:"port"`
|
Port int `koanf:"port"`
|
||||||
Path string `mapstructure:"path"`
|
Path string `koanf:"path"`
|
||||||
ReadBufferSize int `mapstructure:"read_buffer_size"`
|
ReadBufferSize int `koanf:"read_buffer_size"`
|
||||||
WriteBufferSize int `mapstructure:"write_buffer_size"`
|
WriteBufferSize int `koanf:"write_buffer_size"`
|
||||||
EnablePprof bool `mapstructure:"enable_endpoint_pprof"`
|
EnablePprof bool `koanf:"enable_endpoint_pprof"`
|
||||||
EnableExpvars bool `mapstructure:"enable_endpoint_expvars"`
|
EnableExpvars bool `koanf:"enable_endpoint_expvars"`
|
||||||
|
|
||||||
TLS ServerTLSConfiguration `mapstructure:"tls"`
|
TLS ServerTLSConfiguration `koanf:"tls"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// ServerTLSConfiguration represents the configuration of the http servers TLS options.
|
// ServerTLSConfiguration represents the configuration of the http servers TLS options.
|
||||||
type ServerTLSConfiguration struct {
|
type ServerTLSConfiguration struct {
|
||||||
Certificate string `mapstructure:"certificate"`
|
Certificate string `koanf:"certificate"`
|
||||||
Key string `mapstructure:"key"`
|
Key string `koanf:"key"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// DefaultServerConfiguration represents the default values of the ServerConfiguration.
|
// DefaultServerConfiguration represents the default values of the ServerConfiguration.
|
||||||
|
|
|
@ -2,42 +2,42 @@ package schema
|
||||||
|
|
||||||
// RedisNode Represents a Node.
|
// RedisNode Represents a Node.
|
||||||
type RedisNode struct {
|
type RedisNode struct {
|
||||||
Host string `mapstructure:"host"`
|
Host string `koanf:"host"`
|
||||||
Port int `mapstructure:"port"`
|
Port int `koanf:"port"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// RedisHighAvailabilityConfiguration holds configuration variables for Redis Cluster/Sentinel.
|
// RedisHighAvailabilityConfiguration holds configuration variables for Redis Cluster/Sentinel.
|
||||||
type RedisHighAvailabilityConfiguration struct {
|
type RedisHighAvailabilityConfiguration struct {
|
||||||
SentinelName string `mapstructure:"sentinel_name"`
|
SentinelName string `koanf:"sentinel_name"`
|
||||||
SentinelPassword string `mapstructure:"sentinel_password"`
|
SentinelPassword string `koanf:"sentinel_password"`
|
||||||
Nodes []RedisNode `mapstructure:"nodes"`
|
Nodes []RedisNode `koanf:"nodes"`
|
||||||
RouteByLatency bool `mapstructure:"route_by_latency"`
|
RouteByLatency bool `koanf:"route_by_latency"`
|
||||||
RouteRandomly bool `mapstructure:"route_randomly"`
|
RouteRandomly bool `koanf:"route_randomly"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// RedisSessionConfiguration represents the configuration related to redis session store.
|
// RedisSessionConfiguration represents the configuration related to redis session store.
|
||||||
type RedisSessionConfiguration struct {
|
type RedisSessionConfiguration struct {
|
||||||
Host string `mapstructure:"host"`
|
Host string `koanf:"host"`
|
||||||
Port int `mapstructure:"port"`
|
Port int `koanf:"port"`
|
||||||
Username string `mapstructure:"username"`
|
Username string `koanf:"username"`
|
||||||
Password string `mapstructure:"password"`
|
Password string `koanf:"password"`
|
||||||
DatabaseIndex int `mapstructure:"database_index"`
|
DatabaseIndex int `koanf:"database_index"`
|
||||||
MaximumActiveConnections int `mapstructure:"maximum_active_connections"`
|
MaximumActiveConnections int `koanf:"maximum_active_connections"`
|
||||||
MinimumIdleConnections int `mapstructure:"minimum_idle_connections"`
|
MinimumIdleConnections int `koanf:"minimum_idle_connections"`
|
||||||
TLS *TLSConfig `mapstructure:"tls"`
|
TLS *TLSConfig `koanf:"tls"`
|
||||||
HighAvailability *RedisHighAvailabilityConfiguration `mapstructure:"high_availability"`
|
HighAvailability *RedisHighAvailabilityConfiguration `koanf:"high_availability"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// SessionConfiguration represents the configuration related to user sessions.
|
// SessionConfiguration represents the configuration related to user sessions.
|
||||||
type SessionConfiguration struct {
|
type SessionConfiguration struct {
|
||||||
Name string `mapstructure:"name"`
|
Name string `koanf:"name"`
|
||||||
Domain string `mapstructure:"domain"`
|
Domain string `koanf:"domain"`
|
||||||
SameSite string `mapstructure:"same_site"`
|
SameSite string `koanf:"same_site"`
|
||||||
Secret string `mapstructure:"secret"`
|
Secret string `koanf:"secret"`
|
||||||
Expiration string `mapstructure:"expiration"`
|
Expiration string `koanf:"expiration"`
|
||||||
Inactivity string `mapstructure:"inactivity"`
|
Inactivity string `koanf:"inactivity"`
|
||||||
RememberMeDuration string `mapstructure:"remember_me_duration"`
|
RememberMeDuration string `koanf:"remember_me_duration"`
|
||||||
Redis *RedisSessionConfiguration `mapstructure:"redis"`
|
Redis *RedisSessionConfiguration `koanf:"redis"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// DefaultSessionConfiguration is the default session configuration.
|
// DefaultSessionConfiguration is the default session configuration.
|
||||||
|
|
|
@ -2,7 +2,7 @@ package schema
|
||||||
|
|
||||||
// TLSConfig is a representation of the TLS configuration.
|
// TLSConfig is a representation of the TLS configuration.
|
||||||
type TLSConfig struct {
|
type TLSConfig struct {
|
||||||
MinimumVersion string `mapstructure:"minimum_version"`
|
MinimumVersion string `koanf:"minimum_version"`
|
||||||
SkipVerify bool `mapstructure:"skip_verify"`
|
SkipVerify bool `koanf:"skip_verify"`
|
||||||
ServerName string `mapstructure:"server_name"`
|
ServerName string `koanf:"server_name"`
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,32 +2,32 @@ package schema
|
||||||
|
|
||||||
// LocalStorageConfiguration represents the configuration when using local storage.
|
// LocalStorageConfiguration represents the configuration when using local storage.
|
||||||
type LocalStorageConfiguration struct {
|
type LocalStorageConfiguration struct {
|
||||||
Path string `mapstructure:"path"`
|
Path string `koanf:"path"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// SQLStorageConfiguration represents the configuration of the SQL database.
|
// SQLStorageConfiguration represents the configuration of the SQL database.
|
||||||
type SQLStorageConfiguration struct {
|
type SQLStorageConfiguration struct {
|
||||||
Host string `mapstructure:"host"`
|
Host string `koanf:"host"`
|
||||||
Port int `mapstructure:"port"`
|
Port int `koanf:"port"`
|
||||||
Database string `mapstructure:"database"`
|
Database string `koanf:"database"`
|
||||||
Username string `mapstructure:"username"`
|
Username string `koanf:"username"`
|
||||||
Password string `mapstructure:"password"`
|
Password string `koanf:"password"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// MySQLStorageConfiguration represents the configuration of a MySQL database.
|
// MySQLStorageConfiguration represents the configuration of a MySQL database.
|
||||||
type MySQLStorageConfiguration struct {
|
type MySQLStorageConfiguration struct {
|
||||||
SQLStorageConfiguration `mapstructure:",squash"`
|
SQLStorageConfiguration `koanf:",squash"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// PostgreSQLStorageConfiguration represents the configuration of a Postgres database.
|
// PostgreSQLStorageConfiguration represents the configuration of a Postgres database.
|
||||||
type PostgreSQLStorageConfiguration struct {
|
type PostgreSQLStorageConfiguration struct {
|
||||||
SQLStorageConfiguration `mapstructure:",squash"`
|
SQLStorageConfiguration `koanf:",squash"`
|
||||||
SSLMode string `mapstructure:"sslmode"`
|
SSLMode string `koanf:"sslmode"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// StorageConfiguration represents the configuration of the storage backend.
|
// StorageConfiguration represents the configuration of the storage backend.
|
||||||
type StorageConfiguration struct {
|
type StorageConfiguration struct {
|
||||||
Local *LocalStorageConfiguration `mapstructure:"local"`
|
Local *LocalStorageConfiguration `koanf:"local"`
|
||||||
MySQL *MySQLStorageConfiguration `mapstructure:"mysql"`
|
MySQL *MySQLStorageConfiguration `koanf:"mysql"`
|
||||||
PostgreSQL *PostgreSQLStorageConfiguration `mapstructure:"postgres"`
|
PostgreSQL *PostgreSQLStorageConfiguration `koanf:"postgres"`
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,9 +2,9 @@ package schema
|
||||||
|
|
||||||
// TOTPConfiguration represents the configuration related to TOTP options.
|
// TOTPConfiguration represents the configuration related to TOTP options.
|
||||||
type TOTPConfiguration struct {
|
type TOTPConfiguration struct {
|
||||||
Issuer string `mapstructure:"issuer"`
|
Issuer string `koanf:"issuer"`
|
||||||
Period int `mapstructure:"period"`
|
Period int `koanf:"period"`
|
||||||
Skew *int `mapstructure:"skew"`
|
Skew *int `koanf:"skew"`
|
||||||
}
|
}
|
||||||
|
|
||||||
var defaultOtpSkew = 1
|
var defaultOtpSkew = 1
|
||||||
|
|
126
internal/configuration/sources.go
Normal file
126
internal/configuration/sources.go
Normal file
|
@ -0,0 +1,126 @@
|
||||||
|
package configuration
|
||||||
|
|
||||||
|
import (
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
|
||||||
|
"github.com/knadh/koanf"
|
||||||
|
"github.com/knadh/koanf/parsers/yaml"
|
||||||
|
"github.com/knadh/koanf/providers/env"
|
||||||
|
"github.com/knadh/koanf/providers/file"
|
||||||
|
|
||||||
|
"github.com/authelia/authelia/internal/configuration/schema"
|
||||||
|
"github.com/authelia/authelia/internal/configuration/validator"
|
||||||
|
)
|
||||||
|
|
||||||
|
// NewYAMLFileSource returns a Source configured to load from a specified YAML path. If there is an issue accessing this
|
||||||
|
// path it also returns an error.
|
||||||
|
func NewYAMLFileSource(path string) (source *YAMLFileSource) {
|
||||||
|
return &YAMLFileSource{
|
||||||
|
koanf: koanf.New(constDelimiter),
|
||||||
|
path: path,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewYAMLFileSources returns a slice of Source configured to load from specified YAML files.
|
||||||
|
func NewYAMLFileSources(paths []string) (sources []*YAMLFileSource) {
|
||||||
|
for _, path := range paths {
|
||||||
|
source := NewYAMLFileSource(path)
|
||||||
|
|
||||||
|
sources = append(sources, source)
|
||||||
|
}
|
||||||
|
|
||||||
|
return sources
|
||||||
|
}
|
||||||
|
|
||||||
|
// Name of the Source.
|
||||||
|
func (s YAMLFileSource) Name() (name string) {
|
||||||
|
return fmt.Sprintf("yaml file(%s)", s.path)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Merge the YAMLFileSource koanf.Koanf into the provided one.
|
||||||
|
func (s *YAMLFileSource) Merge(ko *koanf.Koanf, _ *schema.StructValidator) (err error) {
|
||||||
|
return ko.Merge(s.koanf)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Load the Source into the YAMLFileSource koanf.Koanf.
|
||||||
|
func (s *YAMLFileSource) Load(_ *schema.StructValidator) (err error) {
|
||||||
|
if s.path == "" {
|
||||||
|
return errors.New("invalid yaml path source configuration")
|
||||||
|
}
|
||||||
|
|
||||||
|
return s.koanf.Load(file.Provider(s.path), yaml.Parser())
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewEnvironmentSource returns a Source configured to load from environment variables.
|
||||||
|
func NewEnvironmentSource(prefix, delimiter string) (source *EnvironmentSource) {
|
||||||
|
return &EnvironmentSource{
|
||||||
|
koanf: koanf.New(constDelimiter),
|
||||||
|
prefix: prefix,
|
||||||
|
delimiter: delimiter,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Name of the Source.
|
||||||
|
func (s EnvironmentSource) Name() (name string) {
|
||||||
|
return "environment"
|
||||||
|
}
|
||||||
|
|
||||||
|
// Merge the EnvironmentSource koanf.Koanf into the provided one.
|
||||||
|
func (s *EnvironmentSource) Merge(ko *koanf.Koanf, _ *schema.StructValidator) (err error) {
|
||||||
|
return ko.Merge(s.koanf)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Load the Source into the EnvironmentSource koanf.Koanf.
|
||||||
|
func (s *EnvironmentSource) Load(_ *schema.StructValidator) (err error) {
|
||||||
|
keyMap, ignoredKeys := getEnvConfigMap(validator.ValidKeys, s.prefix, s.delimiter)
|
||||||
|
|
||||||
|
return s.koanf.Load(env.ProviderWithValue(s.prefix, constDelimiter, koanfEnvironmentCallback(keyMap, ignoredKeys, s.prefix, s.delimiter)), nil)
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewSecretsSource returns a Source configured to load from secrets.
|
||||||
|
func NewSecretsSource(prefix, delimiter string) (source *SecretsSource) {
|
||||||
|
return &SecretsSource{
|
||||||
|
koanf: koanf.New(constDelimiter),
|
||||||
|
prefix: prefix,
|
||||||
|
delimiter: delimiter,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Name of the Source.
|
||||||
|
func (s SecretsSource) Name() (name string) {
|
||||||
|
return "secrets"
|
||||||
|
}
|
||||||
|
|
||||||
|
// Merge the SecretsSource koanf.Koanf into the provided one.
|
||||||
|
func (s *SecretsSource) Merge(ko *koanf.Koanf, val *schema.StructValidator) (err error) {
|
||||||
|
for _, key := range s.koanf.Keys() {
|
||||||
|
value, ok := ko.Get(key).(string)
|
||||||
|
|
||||||
|
if ok && value != "" {
|
||||||
|
val.Push(fmt.Errorf(errFmtSecretAlreadyDefined, key))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return ko.Merge(s.koanf)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Load the Source into the SecretsSource koanf.Koanf.
|
||||||
|
func (s *SecretsSource) Load(val *schema.StructValidator) (err error) {
|
||||||
|
keyMap := getSecretConfigMap(validator.ValidKeys, s.prefix, s.delimiter)
|
||||||
|
|
||||||
|
return s.koanf.Load(env.ProviderWithValue(s.prefix, constDelimiter, koanfEnvironmentSecretsCallback(keyMap, val)), nil)
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewDefaultSources returns a slice of Source configured to load from specified YAML files.
|
||||||
|
func NewDefaultSources(filePaths []string, prefix, delimiter string) (sources []Source) {
|
||||||
|
fileSources := NewYAMLFileSources(filePaths)
|
||||||
|
for _, source := range fileSources {
|
||||||
|
sources = append(sources, source)
|
||||||
|
}
|
||||||
|
|
||||||
|
sources = append(sources, NewEnvironmentSource(prefix, delimiter))
|
||||||
|
sources = append(sources, NewSecretsSource(prefix, delimiter))
|
||||||
|
|
||||||
|
return sources
|
||||||
|
}
|
31
internal/configuration/template.go
Normal file
31
internal/configuration/template.go
Normal file
|
@ -0,0 +1,31 @@
|
||||||
|
package configuration
|
||||||
|
|
||||||
|
import (
|
||||||
|
_ "embed"
|
||||||
|
"fmt"
|
||||||
|
"io/ioutil"
|
||||||
|
"os"
|
||||||
|
)
|
||||||
|
|
||||||
|
//go:embed config.template.yml
|
||||||
|
var template []byte
|
||||||
|
|
||||||
|
// EnsureConfigurationExists is an auxiliary function to the main Configuration tools that ensures the Configuration
|
||||||
|
// template is created if it doesn't already exist.
|
||||||
|
func EnsureConfigurationExists(path string) (created bool, err error) {
|
||||||
|
_, err = os.Stat(path)
|
||||||
|
if err != nil {
|
||||||
|
if os.IsNotExist(err) {
|
||||||
|
err := ioutil.WriteFile(path, template, 0600)
|
||||||
|
if err != nil {
|
||||||
|
return false, fmt.Errorf(errFmtGenerateConfiguration, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return true, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return false, fmt.Errorf(errFmtGenerateConfiguration, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return false, nil
|
||||||
|
}
|
59
internal/configuration/template_test.go
Normal file
59
internal/configuration/template_test.go
Normal file
|
@ -0,0 +1,59 @@
|
||||||
|
package configuration
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"io/ioutil"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"runtime"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
|
||||||
|
"github.com/authelia/authelia/internal/utils"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestShouldGenerateConfiguration(t *testing.T) {
|
||||||
|
dir, err := ioutil.TempDir("", "authelia-config")
|
||||||
|
assert.NoError(t, err)
|
||||||
|
|
||||||
|
cfg := filepath.Join(dir, "config.yml")
|
||||||
|
|
||||||
|
created, err := EnsureConfigurationExists(cfg)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.True(t, created)
|
||||||
|
|
||||||
|
_, err = os.Stat(cfg)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestShouldNotGenerateConfigurationOnFSAccessDenied(t *testing.T) {
|
||||||
|
if runtime.GOOS == constWindows {
|
||||||
|
t.Skip("skipping test due to being on windows")
|
||||||
|
}
|
||||||
|
|
||||||
|
dir, err := ioutil.TempDir("", "authelia-config")
|
||||||
|
assert.NoError(t, err)
|
||||||
|
|
||||||
|
assert.NoError(t, os.Mkdir(filepath.Join(dir, "zero"), 0000))
|
||||||
|
|
||||||
|
cfg := filepath.Join(dir, "zero", "config.yml")
|
||||||
|
|
||||||
|
created, err := EnsureConfigurationExists(cfg)
|
||||||
|
assert.EqualError(t, err, fmt.Sprintf("error occurred generating configuration: stat %s: permission denied", cfg))
|
||||||
|
assert.False(t, created)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestShouldNotGenerateConfiguration(t *testing.T) {
|
||||||
|
dir, err := ioutil.TempDir("", "authelia-config")
|
||||||
|
assert.NoError(t, err)
|
||||||
|
|
||||||
|
cfg := filepath.Join(dir, "..", "not-a-dir", "config.yml")
|
||||||
|
|
||||||
|
created, err := EnsureConfigurationExists(cfg)
|
||||||
|
|
||||||
|
expectedErr := fmt.Sprintf(utils.GetExpectedErrTxt("pathnotfound"), cfg)
|
||||||
|
|
||||||
|
assert.EqualError(t, err, fmt.Sprintf(errFmtGenerateConfiguration, expectedErr))
|
||||||
|
assert.False(t, created)
|
||||||
|
}
|
1
internal/configuration/test_resources/example_secret
Normal file
1
internal/configuration/test_resources/example_secret
Normal file
|
@ -0,0 +1 @@
|
||||||
|
example_secret value
|
34
internal/configuration/types.go
Normal file
34
internal/configuration/types.go
Normal file
|
@ -0,0 +1,34 @@
|
||||||
|
package configuration
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/knadh/koanf"
|
||||||
|
|
||||||
|
"github.com/authelia/authelia/internal/configuration/schema"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Source is an abstract representation of a configuration Source implementation.
|
||||||
|
type Source interface {
|
||||||
|
Name() (name string)
|
||||||
|
Merge(ko *koanf.Koanf, val *schema.StructValidator) (err error)
|
||||||
|
Load(val *schema.StructValidator) (err error)
|
||||||
|
}
|
||||||
|
|
||||||
|
// YAMLFileSource is a configuration Source with a YAML File.
|
||||||
|
type YAMLFileSource struct {
|
||||||
|
koanf *koanf.Koanf
|
||||||
|
path string
|
||||||
|
}
|
||||||
|
|
||||||
|
// EnvironmentSource is a configuration Source which loads values from the environment.
|
||||||
|
type EnvironmentSource struct {
|
||||||
|
koanf *koanf.Koanf
|
||||||
|
prefix string
|
||||||
|
delimiter string
|
||||||
|
}
|
||||||
|
|
||||||
|
// SecretsSource loads environment variables that have a value pointing to a file.
|
||||||
|
type SecretsSource struct {
|
||||||
|
koanf *koanf.Koanf
|
||||||
|
prefix string
|
||||||
|
delimiter string
|
||||||
|
}
|
|
@ -12,7 +12,7 @@ import (
|
||||||
|
|
||||||
// IsPolicyValid check if policy is valid.
|
// IsPolicyValid check if policy is valid.
|
||||||
func IsPolicyValid(policy string) (isValid bool) {
|
func IsPolicyValid(policy string) (isValid bool) {
|
||||||
return policy == denyPolicy || policy == oneFactorPolicy || policy == twoFactorPolicy || policy == bypassPolicy
|
return policy == policyDeny || policy == policyOneFactor || policy == policyTwoFactor || policy == policyBypass
|
||||||
}
|
}
|
||||||
|
|
||||||
// IsResourceValid check if a resource is valid.
|
// IsResourceValid check if a resource is valid.
|
||||||
|
@ -52,7 +52,7 @@ func IsNetworkValid(network string) (isValid bool) {
|
||||||
// ValidateAccessControl validates access control configuration.
|
// ValidateAccessControl validates access control configuration.
|
||||||
func ValidateAccessControl(configuration *schema.AccessControlConfiguration, validator *schema.StructValidator) {
|
func ValidateAccessControl(configuration *schema.AccessControlConfiguration, validator *schema.StructValidator) {
|
||||||
if configuration.DefaultPolicy == "" {
|
if configuration.DefaultPolicy == "" {
|
||||||
configuration.DefaultPolicy = denyPolicy
|
configuration.DefaultPolicy = policyDeny
|
||||||
}
|
}
|
||||||
|
|
||||||
if !IsPolicyValid(configuration.DefaultPolicy) {
|
if !IsPolicyValid(configuration.DefaultPolicy) {
|
||||||
|
@ -73,7 +73,7 @@ func ValidateAccessControl(configuration *schema.AccessControlConfiguration, val
|
||||||
// ValidateRules validates an ACL Rule configuration.
|
// ValidateRules validates an ACL Rule configuration.
|
||||||
func ValidateRules(configuration schema.AccessControlConfiguration, validator *schema.StructValidator) {
|
func ValidateRules(configuration schema.AccessControlConfiguration, validator *schema.StructValidator) {
|
||||||
if configuration.Rules == nil || len(configuration.Rules) == 0 {
|
if configuration.Rules == nil || len(configuration.Rules) == 0 {
|
||||||
if configuration.DefaultPolicy != oneFactorPolicy && configuration.DefaultPolicy != twoFactorPolicy {
|
if configuration.DefaultPolicy != policyOneFactor && configuration.DefaultPolicy != policyTwoFactor {
|
||||||
validator.Push(fmt.Errorf("Default Policy [%s] is invalid, access control rules must be provided or a policy must either be 'one_factor' or 'two_factor'", configuration.DefaultPolicy))
|
validator.Push(fmt.Errorf("Default Policy [%s] is invalid, access control rules must be provided or a policy must either be 'one_factor' or 'two_factor'", configuration.DefaultPolicy))
|
||||||
|
|
||||||
return
|
return
|
||||||
|
@ -103,7 +103,7 @@ func ValidateRules(configuration schema.AccessControlConfiguration, validator *s
|
||||||
|
|
||||||
validateMethods(rulePosition, rule, validator)
|
validateMethods(rulePosition, rule, validator)
|
||||||
|
|
||||||
if rule.Policy == bypassPolicy && len(rule.Subjects) != 0 {
|
if rule.Policy == policyBypass && len(rule.Subjects) != 0 {
|
||||||
validator.Push(fmt.Errorf(errAccessControlInvalidPolicyWithSubjects, rulePosition, rule.Domains, rule.Subjects))
|
validator.Push(fmt.Errorf(errAccessControlInvalidPolicyWithSubjects, rulePosition, rule.Domains, rule.Subjects))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -18,7 +18,7 @@ type AccessControl struct {
|
||||||
|
|
||||||
func (suite *AccessControl) SetupTest() {
|
func (suite *AccessControl) SetupTest() {
|
||||||
suite.validator = schema.NewStructValidator()
|
suite.validator = schema.NewStructValidator()
|
||||||
suite.configuration.DefaultPolicy = denyPolicy
|
suite.configuration.DefaultPolicy = policyDeny
|
||||||
suite.configuration.Networks = schema.DefaultACLNetwork
|
suite.configuration.Networks = schema.DefaultACLNetwork
|
||||||
suite.configuration.Rules = schema.DefaultACLRule
|
suite.configuration.Rules = schema.DefaultACLRule
|
||||||
}
|
}
|
||||||
|
@ -71,7 +71,7 @@ func (suite *AccessControl) TestShouldRaiseErrorWithNoRulesDefined() {
|
||||||
func (suite *AccessControl) TestShouldRaiseWarningWithNoRulesDefined() {
|
func (suite *AccessControl) TestShouldRaiseWarningWithNoRulesDefined() {
|
||||||
suite.configuration.Rules = []schema.ACLRule{}
|
suite.configuration.Rules = []schema.ACLRule{}
|
||||||
|
|
||||||
suite.configuration.DefaultPolicy = twoFactorPolicy
|
suite.configuration.DefaultPolicy = policyTwoFactor
|
||||||
|
|
||||||
ValidateRules(suite.configuration, suite.validator)
|
ValidateRules(suite.configuration, suite.validator)
|
||||||
|
|
||||||
|
|
|
@ -10,7 +10,7 @@ import (
|
||||||
"github.com/authelia/authelia/internal/utils"
|
"github.com/authelia/authelia/internal/utils"
|
||||||
)
|
)
|
||||||
|
|
||||||
// ValidateAuthenticationBackend validates and update authentication backend configuration.
|
// ValidateAuthenticationBackend validates and updates the authentication backend configuration.
|
||||||
func ValidateAuthenticationBackend(configuration *schema.AuthenticationBackendConfiguration, validator *schema.StructValidator) {
|
func ValidateAuthenticationBackend(configuration *schema.AuthenticationBackendConfiguration, validator *schema.StructValidator) {
|
||||||
if configuration.LDAP == nil && configuration.File == nil {
|
if configuration.LDAP == nil && configuration.File == nil {
|
||||||
validator.Push(errors.New("Please provide `ldap` or `file` object in `authentication_backend`"))
|
validator.Push(errors.New("Please provide `ldap` or `file` object in `authentication_backend`"))
|
||||||
|
@ -36,7 +36,7 @@ func ValidateAuthenticationBackend(configuration *schema.AuthenticationBackendCo
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
//nolint:gocyclo // TODO: Consider refactoring/simplifying, time permitting.
|
// validateFileAuthenticationBackend validates and updates the file authentication backend configuration.
|
||||||
func validateFileAuthenticationBackend(configuration *schema.FileAuthenticationBackendConfiguration, validator *schema.StructValidator) {
|
func validateFileAuthenticationBackend(configuration *schema.FileAuthenticationBackendConfiguration, validator *schema.StructValidator) {
|
||||||
if configuration.Path == "" {
|
if configuration.Path == "" {
|
||||||
validator.Push(errors.New("Please provide a `path` for the users database in `authentication_backend`"))
|
validator.Push(errors.New("Please provide a `path` for the users database in `authentication_backend`"))
|
||||||
|
@ -45,26 +45,6 @@ func validateFileAuthenticationBackend(configuration *schema.FileAuthenticationB
|
||||||
if configuration.Password == nil {
|
if configuration.Password == nil {
|
||||||
configuration.Password = &schema.DefaultPasswordConfiguration
|
configuration.Password = &schema.DefaultPasswordConfiguration
|
||||||
} else {
|
} else {
|
||||||
if configuration.Password.Algorithm == "" {
|
|
||||||
configuration.Password.Algorithm = schema.DefaultPasswordConfiguration.Algorithm
|
|
||||||
} else {
|
|
||||||
configuration.Password.Algorithm = strings.ToLower(configuration.Password.Algorithm)
|
|
||||||
if configuration.Password.Algorithm != argon2id && configuration.Password.Algorithm != sha512 {
|
|
||||||
validator.Push(fmt.Errorf("Unknown hashing algorithm supplied, valid values are argon2id and sha512, you configured '%s'", configuration.Password.Algorithm))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Iterations (time)
|
|
||||||
if configuration.Password.Iterations == 0 {
|
|
||||||
if configuration.Password.Algorithm == argon2id {
|
|
||||||
configuration.Password.Iterations = schema.DefaultPasswordConfiguration.Iterations
|
|
||||||
} else {
|
|
||||||
configuration.Password.Iterations = schema.DefaultPasswordSHA512Configuration.Iterations
|
|
||||||
}
|
|
||||||
} else if configuration.Password.Iterations < 1 {
|
|
||||||
validator.Push(fmt.Errorf("The number of iterations specified is invalid, must be 1 or more, you configured %d", configuration.Password.Iterations))
|
|
||||||
}
|
|
||||||
|
|
||||||
// Salt Length
|
// Salt Length
|
||||||
switch {
|
switch {
|
||||||
case configuration.Password.SaltLength == 0:
|
case configuration.Password.SaltLength == 0:
|
||||||
|
@ -73,28 +53,55 @@ func validateFileAuthenticationBackend(configuration *schema.FileAuthenticationB
|
||||||
validator.Push(fmt.Errorf("The salt length must be 2 or more, you configured %d", configuration.Password.SaltLength))
|
validator.Push(fmt.Errorf("The salt length must be 2 or more, you configured %d", configuration.Password.SaltLength))
|
||||||
}
|
}
|
||||||
|
|
||||||
if configuration.Password.Algorithm == argon2id {
|
switch configuration.Password.Algorithm {
|
||||||
// Parallelism
|
case "":
|
||||||
if configuration.Password.Parallelism == 0 {
|
configuration.Password.Algorithm = schema.DefaultPasswordConfiguration.Algorithm
|
||||||
configuration.Password.Parallelism = schema.DefaultPasswordConfiguration.Parallelism
|
fallthrough
|
||||||
} else if configuration.Password.Parallelism < 1 {
|
case hashArgon2id:
|
||||||
validator.Push(fmt.Errorf("Parallelism for argon2id must be 1 or more, you configured %d", configuration.Password.Parallelism))
|
validateFileAuthenticationBackendArgon2id(configuration, validator)
|
||||||
}
|
case hashSHA512:
|
||||||
|
validateFileAuthenticationBackendSHA512(configuration)
|
||||||
// Memory
|
default:
|
||||||
if configuration.Password.Memory == 0 {
|
validator.Push(fmt.Errorf("Unknown hashing algorithm supplied, valid values are argon2id and sha512, you configured '%s'", configuration.Password.Algorithm))
|
||||||
configuration.Password.Memory = schema.DefaultPasswordConfiguration.Memory
|
|
||||||
} else if configuration.Password.Memory < configuration.Password.Parallelism*8 {
|
|
||||||
validator.Push(fmt.Errorf("Memory for argon2id must be %d or more (parallelism * 8), you configured memory as %d and parallelism as %d", configuration.Password.Parallelism*8, configuration.Password.Memory, configuration.Password.Parallelism))
|
|
||||||
}
|
|
||||||
|
|
||||||
// Key Length
|
|
||||||
if configuration.Password.KeyLength == 0 {
|
|
||||||
configuration.Password.KeyLength = schema.DefaultPasswordConfiguration.KeyLength
|
|
||||||
} else if configuration.Password.KeyLength < 16 {
|
|
||||||
validator.Push(fmt.Errorf("Key length for argon2id must be 16, you configured %d", configuration.Password.KeyLength))
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if configuration.Password.Iterations < 1 {
|
||||||
|
validator.Push(fmt.Errorf("The number of iterations specified is invalid, must be 1 or more, you configured %d", configuration.Password.Iterations))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func validateFileAuthenticationBackendSHA512(configuration *schema.FileAuthenticationBackendConfiguration) {
|
||||||
|
// Iterations (time)
|
||||||
|
if configuration.Password.Iterations == 0 {
|
||||||
|
configuration.Password.Iterations = schema.DefaultPasswordSHA512Configuration.Iterations
|
||||||
|
}
|
||||||
|
}
|
||||||
|
func validateFileAuthenticationBackendArgon2id(configuration *schema.FileAuthenticationBackendConfiguration, validator *schema.StructValidator) {
|
||||||
|
// Iterations (time)
|
||||||
|
if configuration.Password.Iterations == 0 {
|
||||||
|
configuration.Password.Iterations = schema.DefaultPasswordConfiguration.Iterations
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parallelism
|
||||||
|
if configuration.Password.Parallelism == 0 {
|
||||||
|
configuration.Password.Parallelism = schema.DefaultPasswordConfiguration.Parallelism
|
||||||
|
} else if configuration.Password.Parallelism < 1 {
|
||||||
|
validator.Push(fmt.Errorf("Parallelism for argon2id must be 1 or more, you configured %d", configuration.Password.Parallelism))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Memory
|
||||||
|
if configuration.Password.Memory == 0 {
|
||||||
|
configuration.Password.Memory = schema.DefaultPasswordConfiguration.Memory
|
||||||
|
} else if configuration.Password.Memory < configuration.Password.Parallelism*8 {
|
||||||
|
validator.Push(fmt.Errorf("Memory for argon2id must be %d or more (parallelism * 8), you configured memory as %d and parallelism as %d", configuration.Password.Parallelism*8, configuration.Password.Memory, configuration.Password.Parallelism))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Key Length
|
||||||
|
if configuration.Password.KeyLength == 0 {
|
||||||
|
configuration.Password.KeyLength = schema.DefaultPasswordConfiguration.KeyLength
|
||||||
|
} else if configuration.Password.KeyLength < 16 {
|
||||||
|
validator.Push(fmt.Errorf("Key length for argon2id must be 16, you configured %d", configuration.Password.KeyLength))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -97,7 +97,7 @@ func (suite *FileBasedAuthenticationBackend) TestShouldSetDefaultConfigurationWh
|
||||||
ValidateAuthenticationBackend(&suite.configuration, suite.validator)
|
ValidateAuthenticationBackend(&suite.configuration, suite.validator)
|
||||||
|
|
||||||
suite.Assert().False(suite.validator.HasWarnings())
|
suite.Assert().False(suite.validator.HasWarnings())
|
||||||
suite.Assert().False(suite.validator.HasErrors())
|
suite.Assert().Len(suite.validator.Errors(), 0)
|
||||||
|
|
||||||
suite.Assert().Equal(schema.DefaultPasswordConfiguration.KeyLength, suite.configuration.File.Password.KeyLength)
|
suite.Assert().Equal(schema.DefaultPasswordConfiguration.KeyLength, suite.configuration.File.Password.KeyLength)
|
||||||
suite.Assert().Equal(schema.DefaultPasswordConfiguration.Iterations, suite.configuration.File.Password.Iterations)
|
suite.Assert().Equal(schema.DefaultPasswordConfiguration.Iterations, suite.configuration.File.Password.Iterations)
|
||||||
|
@ -115,7 +115,7 @@ func (suite *FileBasedAuthenticationBackend) TestShouldSetDefaultConfigurationWh
|
||||||
ValidateAuthenticationBackend(&suite.configuration, suite.validator)
|
ValidateAuthenticationBackend(&suite.configuration, suite.validator)
|
||||||
|
|
||||||
suite.Assert().False(suite.validator.HasWarnings())
|
suite.Assert().False(suite.validator.HasWarnings())
|
||||||
suite.Assert().False(suite.validator.HasErrors())
|
suite.Assert().Len(suite.validator.Errors(), 0)
|
||||||
|
|
||||||
suite.Assert().Equal(schema.DefaultPasswordSHA512Configuration.KeyLength, suite.configuration.File.Password.KeyLength)
|
suite.Assert().Equal(schema.DefaultPasswordSHA512Configuration.KeyLength, suite.configuration.File.Password.KeyLength)
|
||||||
suite.Assert().Equal(schema.DefaultPasswordSHA512Configuration.Iterations, suite.configuration.File.Password.Iterations)
|
suite.Assert().Equal(schema.DefaultPasswordSHA512Configuration.Iterations, suite.configuration.File.Password.Iterations)
|
||||||
|
|
|
@ -14,8 +14,8 @@ func newDefaultConfig() schema.Configuration {
|
||||||
config := schema.Configuration{}
|
config := schema.Configuration{}
|
||||||
config.Server.Host = loopback
|
config.Server.Host = loopback
|
||||||
config.Server.Port = 9090
|
config.Server.Port = 9090
|
||||||
config.Logging.Level = "info"
|
config.Log.Level = "info"
|
||||||
config.Logging.Format = "text"
|
config.Log.Format = "text"
|
||||||
config.JWTSecret = testJWTSecret
|
config.JWTSecret = testJWTSecret
|
||||||
config.AuthenticationBackend.File = &schema.FileAuthenticationBackendConfiguration{
|
config.AuthenticationBackend.File = &schema.FileAuthenticationBackendConfiguration{
|
||||||
Path: "/a/path",
|
Path: "/a/path",
|
||||||
|
@ -81,7 +81,10 @@ func TestShouldRaiseErrorWithUndefinedJWTSecretKey(t *testing.T) {
|
||||||
|
|
||||||
ValidateConfiguration(&config, validator)
|
ValidateConfiguration(&config, validator)
|
||||||
require.Len(t, validator.Errors(), 1)
|
require.Len(t, validator.Errors(), 1)
|
||||||
|
require.Len(t, validator.Warnings(), 1)
|
||||||
|
|
||||||
assert.EqualError(t, validator.Errors()[0], "Provide a JWT secret using \"jwt_secret\" key")
|
assert.EqualError(t, validator.Errors()[0], "Provide a JWT secret using \"jwt_secret\" key")
|
||||||
|
assert.EqualError(t, validator.Warnings()[0], "No access control rules have been defined so the default policy two_factor will be applied to all requests")
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestShouldRaiseErrorWithBadDefaultRedirectionURL(t *testing.T) {
|
func TestShouldRaiseErrorWithBadDefaultRedirectionURL(t *testing.T) {
|
||||||
|
@ -91,16 +94,24 @@ func TestShouldRaiseErrorWithBadDefaultRedirectionURL(t *testing.T) {
|
||||||
|
|
||||||
ValidateConfiguration(&config, validator)
|
ValidateConfiguration(&config, validator)
|
||||||
require.Len(t, validator.Errors(), 1)
|
require.Len(t, validator.Errors(), 1)
|
||||||
|
require.Len(t, validator.Warnings(), 1)
|
||||||
|
|
||||||
assert.EqualError(t, validator.Errors()[0], "Value for \"default_redirection_url\" is invalid: the url 'bad_default_redirection_url' is not absolute because it doesn't start with a scheme like 'http://' or 'https://'")
|
assert.EqualError(t, validator.Errors()[0], "Value for \"default_redirection_url\" is invalid: the url 'bad_default_redirection_url' is not absolute because it doesn't start with a scheme like 'http://' or 'https://'")
|
||||||
|
assert.EqualError(t, validator.Warnings()[0], "No access control rules have been defined so the default policy two_factor will be applied to all requests")
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestShouldNotOverrideCertificatesDirectoryAndShouldPassWhenBlank(t *testing.T) {
|
func TestShouldNotOverrideCertificatesDirectoryAndShouldPassWhenBlank(t *testing.T) {
|
||||||
validator := schema.NewStructValidator()
|
validator := schema.NewStructValidator()
|
||||||
config := newDefaultConfig()
|
config := newDefaultConfig()
|
||||||
|
|
||||||
ValidateConfiguration(&config, validator)
|
ValidateConfiguration(&config, validator)
|
||||||
require.Len(t, validator.Errors(), 0)
|
|
||||||
|
assert.Len(t, validator.Errors(), 0)
|
||||||
|
require.Len(t, validator.Warnings(), 1)
|
||||||
|
|
||||||
require.Equal(t, "", config.CertificatesDirectory)
|
require.Equal(t, "", config.CertificatesDirectory)
|
||||||
|
|
||||||
|
assert.EqualError(t, validator.Warnings()[0], "No access control rules have been defined so the default policy two_factor will be applied to all requests")
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestShouldRaiseErrorOnInvalidCertificatesDirectory(t *testing.T) {
|
func TestShouldRaiseErrorOnInvalidCertificatesDirectory(t *testing.T) {
|
||||||
|
@ -111,6 +122,7 @@ func TestShouldRaiseErrorOnInvalidCertificatesDirectory(t *testing.T) {
|
||||||
ValidateConfiguration(&config, validator)
|
ValidateConfiguration(&config, validator)
|
||||||
|
|
||||||
require.Len(t, validator.Errors(), 1)
|
require.Len(t, validator.Errors(), 1)
|
||||||
|
require.Len(t, validator.Warnings(), 1)
|
||||||
|
|
||||||
if runtime.GOOS == "windows" {
|
if runtime.GOOS == "windows" {
|
||||||
assert.EqualError(t, validator.Errors()[0], "Error checking certificate directory: CreateFile not-a-real-file.go: The system cannot find the file specified.")
|
assert.EqualError(t, validator.Errors()[0], "Error checking certificate directory: CreateFile not-a-real-file.go: The system cannot find the file specified.")
|
||||||
|
@ -118,12 +130,18 @@ func TestShouldRaiseErrorOnInvalidCertificatesDirectory(t *testing.T) {
|
||||||
assert.EqualError(t, validator.Errors()[0], "Error checking certificate directory: stat not-a-real-file.go: no such file or directory")
|
assert.EqualError(t, validator.Errors()[0], "Error checking certificate directory: stat not-a-real-file.go: no such file or directory")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
assert.EqualError(t, validator.Warnings()[0], "No access control rules have been defined so the default policy two_factor will be applied to all requests")
|
||||||
|
|
||||||
validator = schema.NewStructValidator()
|
validator = schema.NewStructValidator()
|
||||||
config.CertificatesDirectory = "const.go"
|
config.CertificatesDirectory = "const.go"
|
||||||
|
|
||||||
ValidateConfiguration(&config, validator)
|
ValidateConfiguration(&config, validator)
|
||||||
|
|
||||||
require.Len(t, validator.Errors(), 1)
|
require.Len(t, validator.Errors(), 1)
|
||||||
|
require.Len(t, validator.Warnings(), 1)
|
||||||
|
|
||||||
assert.EqualError(t, validator.Errors()[0], "The path const.go specified for certificate_directory is not a directory")
|
assert.EqualError(t, validator.Errors()[0], "The path const.go specified for certificate_directory is not a directory")
|
||||||
|
assert.EqualError(t, validator.Warnings()[0], "No access control rules have been defined so the default policy two_factor will be applied to all requests")
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestShouldNotRaiseErrorOnValidCertificatesDirectory(t *testing.T) {
|
func TestShouldNotRaiseErrorOnValidCertificatesDirectory(t *testing.T) {
|
||||||
|
@ -133,5 +151,8 @@ func TestShouldNotRaiseErrorOnValidCertificatesDirectory(t *testing.T) {
|
||||||
|
|
||||||
ValidateConfiguration(&config, validator)
|
ValidateConfiguration(&config, validator)
|
||||||
|
|
||||||
require.Len(t, validator.Errors(), 0)
|
assert.Len(t, validator.Errors(), 0)
|
||||||
|
require.Len(t, validator.Warnings(), 1)
|
||||||
|
|
||||||
|
assert.EqualError(t, validator.Warnings()[0], "No access control rules have been defined so the default policy two_factor will be applied to all requests")
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,65 +1,36 @@
|
||||||
package validator
|
package validator
|
||||||
|
|
||||||
|
import "regexp"
|
||||||
|
|
||||||
const (
|
const (
|
||||||
loopback = "127.0.0.1"
|
loopback = "127.0.0.1"
|
||||||
oauth2InstalledApp = "urn:ietf:wg:oauth:2.0:oob"
|
oauth2InstalledApp = "urn:ietf:wg:oauth:2.0:oob"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
// Policy constants.
|
||||||
const (
|
const (
|
||||||
errFmtDeprecatedConfigurationKey = "[DEPRECATED] The %s configuration option is deprecated and will be " +
|
policyBypass = "bypass"
|
||||||
"removed in %s, please use %s instead"
|
policyOneFactor = "one_factor"
|
||||||
errFmtReplacedConfigurationKey = "invalid configuration key '%s' was replaced by '%s'"
|
policyTwoFactor = "two_factor"
|
||||||
|
policyDeny = "deny"
|
||||||
|
)
|
||||||
|
|
||||||
errFmtLoggingLevelInvalid = "the log level '%s' is invalid, must be one of: %s"
|
// Hashing constants.
|
||||||
|
const (
|
||||||
errFmtSessionSecretRedisProvider = "The session secret must be set when using the %s session provider"
|
hashArgon2id = "argon2id"
|
||||||
errFmtSessionRedisPortRange = "The port must be between 1 and 65535 for the %s session provider"
|
hashSHA512 = "sha512"
|
||||||
errFmtSessionRedisHostRequired = "The host must be provided when using the %s session provider"
|
)
|
||||||
errFmtSessionRedisHostOrNodesRequired = "Either the host or a node must be provided when using the %s session provider"
|
|
||||||
|
|
||||||
errFmtOIDCServerClientRedirectURI = "OIDC client with ID '%s' redirect URI %s has an invalid scheme '%s', " +
|
|
||||||
"should be http or https"
|
|
||||||
errFmtOIDCClientRedirectURIPublic = "openid connect provider: client with ID '%s' redirect URI '%s' is " +
|
|
||||||
"only valid for the public client type, not the confidential client type"
|
|
||||||
errFmtOIDCClientRedirectURIAbsolute = "openid connect provider: client with ID '%s' redirect URI '%s' is invalid " +
|
|
||||||
"because it has no scheme when it should be http or https"
|
|
||||||
errFmtOIDCServerClientRedirectURICantBeParsed = "OIDC client with ID '%s' has an invalid redirect URI '%s' " +
|
|
||||||
"could not be parsed: %v"
|
|
||||||
errFmtOIDCServerClientInvalidPolicy = "OIDC client with ID '%s' has an invalid policy '%s', " +
|
|
||||||
"should be either 'one_factor' or 'two_factor'"
|
|
||||||
errFmtOIDCServerClientInvalidSecret = "OIDC client with ID '%s' has an empty secret" //nolint:gosec
|
|
||||||
errFmtOIDCClientPublicInvalidSecret = "openid connect provider: client with ID '%s' is public but does not have an empty secret" //nolint:gosec
|
|
||||||
errFmtOIDCServerClientInvalidScope = "OIDC client with ID '%s' has an invalid scope '%s', " +
|
|
||||||
"must be one of: '%s'"
|
|
||||||
errFmtOIDCServerClientInvalidGrantType = "OIDC client with ID '%s' has an invalid grant type '%s', " +
|
|
||||||
"must be one of: '%s'"
|
|
||||||
errFmtOIDCServerClientInvalidResponseMode = "OIDC client with ID '%s' has an invalid response mode '%s', " +
|
|
||||||
"must be one of: '%s'"
|
|
||||||
errFmtOIDCServerClientInvalidUserinfoAlgorithm = "OIDC client with ID '%s' has an invalid userinfo signing " +
|
|
||||||
"algorithm '%s', must be one of: '%s'"
|
|
||||||
errFmtOIDCServerInsecureParameterEntropy = "SECURITY ISSUE: OIDC minimum parameter entropy is configured to an " +
|
|
||||||
"unsafe value, it should be above 8 but it's configured to %d."
|
|
||||||
|
|
||||||
errFileHashing = "config key incorrect: authentication_backend.file.hashing should be " +
|
|
||||||
"authentication_backend.file.password"
|
|
||||||
errFilePHashing = "config key incorrect: authentication_backend.file.password_hashing should be " +
|
|
||||||
"authentication_backend.file.password"
|
|
||||||
errFilePOptions = "config key incorrect: authentication_backend.file.password_options should be " +
|
|
||||||
"authentication_backend.file.password"
|
|
||||||
|
|
||||||
bypassPolicy = "bypass"
|
|
||||||
oneFactorPolicy = "one_factor"
|
|
||||||
twoFactorPolicy = "two_factor"
|
|
||||||
denyPolicy = "deny"
|
|
||||||
|
|
||||||
argon2id = "argon2id"
|
|
||||||
sha512 = "sha512"
|
|
||||||
|
|
||||||
|
// Scheme constants.
|
||||||
|
const (
|
||||||
schemeLDAP = "ldap"
|
schemeLDAP = "ldap"
|
||||||
schemeLDAPS = "ldaps"
|
schemeLDAPS = "ldaps"
|
||||||
schemeHTTP = "http"
|
schemeHTTP = "http"
|
||||||
schemeHTTPS = "https"
|
schemeHTTPS = "https"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Test constants.
|
||||||
|
const (
|
||||||
testBadTimer = "-1"
|
testBadTimer = "-1"
|
||||||
testInvalidPolicy = "invalid"
|
testInvalidPolicy = "invalid"
|
||||||
testJWTSecret = "a_secret"
|
testJWTSecret = "a_secret"
|
||||||
|
@ -70,9 +41,58 @@ const (
|
||||||
testModeDisabled = "disable"
|
testModeDisabled = "disable"
|
||||||
testTLSCert = "/tmp/cert.pem"
|
testTLSCert = "/tmp/cert.pem"
|
||||||
testTLSKey = "/tmp/key.pem"
|
testTLSKey = "/tmp/key.pem"
|
||||||
|
)
|
||||||
|
|
||||||
errAccessControlInvalidPolicyWithSubjects = "Policy [bypass] for rule #%d domain %s with subjects %s is invalid. " +
|
// OpenID Error constants.
|
||||||
"It is not supported to configure both policy bypass and subjects. For more information see: " +
|
const (
|
||||||
|
errFmtOIDCClientsDuplicateID = "openid connect provider: one or more clients have the same ID"
|
||||||
|
errFmtOIDCClientsWithEmptyID = "openid connect provider: one or more clients have been configured with an empty ID"
|
||||||
|
errFmtOIDCNoClientsConfigured = "openid connect provider: no clients are configured"
|
||||||
|
errFmtOIDCNoPrivateKey = "openid connect provider: issuer private key must be provided"
|
||||||
|
errFmtOIDCClientInvalidSecret = "openid connect provider: client with ID '%s' has an empty secret"
|
||||||
|
errFmtOIDCClientPublicInvalidSecret = "openid connect provider: client with ID '%s' is public but does not have " +
|
||||||
|
"an empty secret"
|
||||||
|
errFmtOIDCClientRedirectURI = "openid connect provider: client with ID '%s' redirect URI %s has an " +
|
||||||
|
"invalid scheme %s, should be http or https"
|
||||||
|
errFmtOIDCClientRedirectURICantBeParsed = "openid connect provider: client with ID '%s' has an invalid redirect " +
|
||||||
|
"URI '%s' could not be parsed: %v"
|
||||||
|
errFmtOIDCClientRedirectURIPublic = "openid connect provider: client with ID '%s' redirect URI '%s' is " +
|
||||||
|
"only valid for the public client type, not the confidential client type"
|
||||||
|
errFmtOIDCClientRedirectURIAbsolute = "openid connect provider: client with ID '%s' redirect URI '%s' is invalid " +
|
||||||
|
"because it has no scheme when it should be http or https"
|
||||||
|
errFmtOIDCClientInvalidPolicy = "openid connect provider: client with ID '%s' has an invalid policy " +
|
||||||
|
"'%s', should be either 'one_factor' or 'two_factor'"
|
||||||
|
errFmtOIDCClientInvalidScope = "openid connect provider: client with ID '%s' has an invalid scope " +
|
||||||
|
"'%s', must be one of: '%s'"
|
||||||
|
errFmtOIDCClientInvalidGrantType = "openid connect provider: client with ID '%s' has an invalid grant type " +
|
||||||
|
"'%s', must be one of: '%s'"
|
||||||
|
errFmtOIDCClientInvalidResponseMode = "openid connect provider: client with ID '%s' has an invalid response mode " +
|
||||||
|
"'%s', must be one of: '%s'"
|
||||||
|
errFmtOIDCClientInvalidUserinfoAlgorithm = "openid connect provider: client with ID '%s' has an invalid userinfo signing " +
|
||||||
|
"algorithm '%s', must be one of: '%s'"
|
||||||
|
errFmtOIDCServerInsecureParameterEntropy = "openid connect provider: SECURITY ISSUE - minimum parameter entropy is " +
|
||||||
|
"configured to an unsafe value, it should be above 8 but it's configured to %d"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Error constants.
|
||||||
|
const (
|
||||||
|
errFmtDeprecatedConfigurationKey = "the %s configuration option is deprecated and will be " +
|
||||||
|
"removed in %s, please use %s instead"
|
||||||
|
errFmtReplacedConfigurationKey = "invalid configuration key '%s' was replaced by '%s'"
|
||||||
|
|
||||||
|
errFmtLoggingLevelInvalid = "the log level '%s' is invalid, must be one of: %s"
|
||||||
|
|
||||||
|
errFmtSessionSecretRedisProvider = "the session secret must be set when using the %s session provider"
|
||||||
|
errFmtSessionRedisPortRange = "the port must be between 1 and 65535 for the %s session provider"
|
||||||
|
errFmtSessionRedisHostRequired = "the host must be provided when using the %s session provider"
|
||||||
|
errFmtSessionRedisHostOrNodesRequired = "either the host or a node must be provided when using the %s session provider"
|
||||||
|
|
||||||
|
errFileHashing = "config key incorrect: authentication_backend.file.hashing should be authentication_backend.file.password"
|
||||||
|
errFilePHashing = "config key incorrect: authentication_backend.file.password_hashing should be authentication_backend.file.password"
|
||||||
|
errFilePOptions = "config key incorrect: authentication_backend.file.password_options should be authentication_backend.file.password"
|
||||||
|
|
||||||
|
errAccessControlInvalidPolicyWithSubjects = "policy [bypass] for rule #%d domain %s with subjects %s is invalid. It is " +
|
||||||
|
"not supported to configure both policy bypass and subjects. For more information see: " +
|
||||||
"https://www.authelia.com/docs/configuration/access-control.html#combining-subjects-and-the-bypass-policy"
|
"https://www.authelia.com/docs/configuration/access-control.html#combining-subjects-and-the-bypass-policy"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -84,28 +104,16 @@ var validOIDCGrantTypes = []string{"implicit", "refresh_token", "authorization_c
|
||||||
var validOIDCResponseModes = []string{"form_post", "query", "fragment"}
|
var validOIDCResponseModes = []string{"form_post", "query", "fragment"}
|
||||||
var validOIDCUserinfoAlgorithms = []string{"none", "RS256"}
|
var validOIDCUserinfoAlgorithms = []string{"none", "RS256"}
|
||||||
|
|
||||||
// SecretNames contains a map of secret names.
|
var reKeyReplacer = regexp.MustCompile(`\[\d+]`)
|
||||||
var SecretNames = map[string]string{
|
|
||||||
"JWTSecret": "jwt_secret",
|
|
||||||
"SessionSecret": "session.secret",
|
|
||||||
"DUOSecretKey": "duo_api.secret_key",
|
|
||||||
"RedisPassword": "session.redis.password",
|
|
||||||
"RedisSentinelPassword": "session.redis.high_availability.sentinel_password",
|
|
||||||
"LDAPPassword": "authentication_backend.ldap.password",
|
|
||||||
"SMTPPassword": "notifier.smtp.password",
|
|
||||||
"MySQLPassword": "storage.mysql.password",
|
|
||||||
"PostgreSQLPassword": "storage.postgres.password",
|
|
||||||
"OpenIDConnectHMACSecret": "identity_providers.oidc.hmac_secret",
|
|
||||||
"OpenIDConnectIssuerPrivateKey": "identity_providers.oidc.issuer_private_key",
|
|
||||||
}
|
|
||||||
|
|
||||||
// validKeys is a list of valid keys that are not secret names. For the sake of consistency please place any secret in
|
// ValidKeys is a list of valid keys that are not secret names. For the sake of consistency please place any secret in
|
||||||
// the secret names map and reuse it in relevant sections.
|
// the secret names map and reuse it in relevant sections.
|
||||||
var validKeys = []string{
|
var ValidKeys = []string{
|
||||||
// Root Keys.
|
// Root Keys.
|
||||||
"certificates_directory",
|
"certificates_directory",
|
||||||
"theme",
|
"theme",
|
||||||
"default_redirection_url",
|
"default_redirection_url",
|
||||||
|
"jwt_secret",
|
||||||
|
|
||||||
// Log keys.
|
// Log keys.
|
||||||
"log.level",
|
"log.level",
|
||||||
|
@ -139,14 +147,26 @@ var validKeys = []string{
|
||||||
"totp.period",
|
"totp.period",
|
||||||
"totp.skew",
|
"totp.skew",
|
||||||
|
|
||||||
|
// DUO API Keys.
|
||||||
|
"duo_api.hostname",
|
||||||
|
"duo_api.secret_key",
|
||||||
|
"duo_api.integration_key",
|
||||||
|
|
||||||
// Access Control Keys.
|
// Access Control Keys.
|
||||||
"access_control.rules",
|
|
||||||
"access_control.default_policy",
|
"access_control.default_policy",
|
||||||
"access_control.networks",
|
"access_control.networks",
|
||||||
|
"access_control.rules",
|
||||||
|
"access_control.rules[].domain",
|
||||||
|
"access_control.rules[].methods",
|
||||||
|
"access_control.rules[].networks",
|
||||||
|
"access_control.rules[].subject",
|
||||||
|
"access_control.rules[].policy",
|
||||||
|
"access_control.rules[].resources",
|
||||||
|
|
||||||
// Session Keys.
|
// Session Keys.
|
||||||
"session.name",
|
"session.name",
|
||||||
"session.domain",
|
"session.domain",
|
||||||
|
"session.secret",
|
||||||
"session.same_site",
|
"session.same_site",
|
||||||
"session.expiration",
|
"session.expiration",
|
||||||
"session.inactivity",
|
"session.inactivity",
|
||||||
|
@ -156,6 +176,7 @@ var validKeys = []string{
|
||||||
"session.redis.host",
|
"session.redis.host",
|
||||||
"session.redis.port",
|
"session.redis.port",
|
||||||
"session.redis.username",
|
"session.redis.username",
|
||||||
|
"session.redis.password",
|
||||||
"session.redis.database_index",
|
"session.redis.database_index",
|
||||||
"session.redis.maximum_active_connections",
|
"session.redis.maximum_active_connections",
|
||||||
"session.redis.minimum_idle_connections",
|
"session.redis.minimum_idle_connections",
|
||||||
|
@ -163,6 +184,7 @@ var validKeys = []string{
|
||||||
"session.redis.tls.skip_verify",
|
"session.redis.tls.skip_verify",
|
||||||
"session.redis.tls.server_name",
|
"session.redis.tls.server_name",
|
||||||
"session.redis.high_availability.sentinel_name",
|
"session.redis.high_availability.sentinel_name",
|
||||||
|
"session.redis.high_availability.sentinel_password",
|
||||||
"session.redis.high_availability.nodes",
|
"session.redis.high_availability.nodes",
|
||||||
"session.redis.high_availability.route_by_latency",
|
"session.redis.high_availability.route_by_latency",
|
||||||
"session.redis.high_availability.route_randomly",
|
"session.redis.high_availability.route_randomly",
|
||||||
|
@ -180,12 +202,14 @@ var validKeys = []string{
|
||||||
"storage.mysql.port",
|
"storage.mysql.port",
|
||||||
"storage.mysql.database",
|
"storage.mysql.database",
|
||||||
"storage.mysql.username",
|
"storage.mysql.username",
|
||||||
|
"storage.mysql.password",
|
||||||
|
|
||||||
// PostgreSQL Storage Keys.
|
// PostgreSQL Storage Keys.
|
||||||
"storage.postgres.host",
|
"storage.postgres.host",
|
||||||
"storage.postgres.port",
|
"storage.postgres.port",
|
||||||
"storage.postgres.database",
|
"storage.postgres.database",
|
||||||
"storage.postgres.username",
|
"storage.postgres.username",
|
||||||
|
"storage.postgres.password",
|
||||||
"storage.postgres.sslmode",
|
"storage.postgres.sslmode",
|
||||||
|
|
||||||
// FileSystem Notifier Keys.
|
// FileSystem Notifier Keys.
|
||||||
|
@ -193,9 +217,10 @@ var validKeys = []string{
|
||||||
"notifier.disable_startup_check",
|
"notifier.disable_startup_check",
|
||||||
|
|
||||||
// SMTP Notifier Keys.
|
// SMTP Notifier Keys.
|
||||||
"notifier.smtp.username",
|
|
||||||
"notifier.smtp.host",
|
"notifier.smtp.host",
|
||||||
"notifier.smtp.port",
|
"notifier.smtp.port",
|
||||||
|
"notifier.smtp.username",
|
||||||
|
"notifier.smtp.password",
|
||||||
"notifier.smtp.identifier",
|
"notifier.smtp.identifier",
|
||||||
"notifier.smtp.sender",
|
"notifier.smtp.sender",
|
||||||
"notifier.smtp.subject",
|
"notifier.smtp.subject",
|
||||||
|
@ -211,10 +236,6 @@ var validKeys = []string{
|
||||||
"regulation.find_time",
|
"regulation.find_time",
|
||||||
"regulation.ban_time",
|
"regulation.ban_time",
|
||||||
|
|
||||||
// DUO API Keys.
|
|
||||||
"duo_api.hostname",
|
|
||||||
"duo_api.integration_key",
|
|
||||||
|
|
||||||
// Authentication Backend Keys.
|
// Authentication Backend Keys.
|
||||||
"authentication_backend.disable_reset_password",
|
"authentication_backend.disable_reset_password",
|
||||||
"authentication_backend.refresh_interval",
|
"authentication_backend.refresh_interval",
|
||||||
|
@ -232,6 +253,7 @@ var validKeys = []string{
|
||||||
"authentication_backend.ldap.mail_attribute",
|
"authentication_backend.ldap.mail_attribute",
|
||||||
"authentication_backend.ldap.display_name_attribute",
|
"authentication_backend.ldap.display_name_attribute",
|
||||||
"authentication_backend.ldap.user",
|
"authentication_backend.ldap.user",
|
||||||
|
"authentication_backend.ldap.password",
|
||||||
"authentication_backend.ldap.start_tls",
|
"authentication_backend.ldap.start_tls",
|
||||||
"authentication_backend.ldap.tls.minimum_version",
|
"authentication_backend.ldap.tls.minimum_version",
|
||||||
"authentication_backend.ldap.tls.skip_verify",
|
"authentication_backend.ldap.tls.skip_verify",
|
||||||
|
@ -247,12 +269,22 @@ var validKeys = []string{
|
||||||
"authentication_backend.file.password.parallelism",
|
"authentication_backend.file.password.parallelism",
|
||||||
|
|
||||||
// Identity Provider Keys.
|
// Identity Provider Keys.
|
||||||
"identity_providers.oidc.clients",
|
"identity_providers.oidc.hmac_secret",
|
||||||
|
"identity_providers.oidc.issuer_private_key",
|
||||||
"identity_providers.oidc.id_token_lifespan",
|
"identity_providers.oidc.id_token_lifespan",
|
||||||
"identity_providers.oidc.access_token_lifespan",
|
"identity_providers.oidc.access_token_lifespan",
|
||||||
"identity_providers.oidc.refresh_token_lifespan",
|
"identity_providers.oidc.refresh_token_lifespan",
|
||||||
"identity_providers.oidc.authorize_code_lifespan",
|
"identity_providers.oidc.authorize_code_lifespan",
|
||||||
"identity_providers.oidc.enable_client_debug_messages",
|
"identity_providers.oidc.enable_client_debug_messages",
|
||||||
|
"identity_providers.oidc.clients",
|
||||||
|
"identity_providers.oidc.clients[].id",
|
||||||
|
"identity_providers.oidc.clients[].description",
|
||||||
|
"identity_providers.oidc.clients[].secret",
|
||||||
|
"identity_providers.oidc.clients[].redirect_uris",
|
||||||
|
"identity_providers.oidc.clients[].authorization_policy",
|
||||||
|
"identity_providers.oidc.clients[].scopes",
|
||||||
|
"identity_providers.oidc.clients[].grant_types",
|
||||||
|
"identity_providers.oidc.clients[].response_types",
|
||||||
}
|
}
|
||||||
|
|
||||||
var replacedKeys = map[string]string{
|
var replacedKeys = map[string]string{
|
||||||
|
@ -265,8 +297,8 @@ var replacedKeys = map[string]string{
|
||||||
|
|
||||||
var specificErrorKeys = map[string]string{
|
var specificErrorKeys = map[string]string{
|
||||||
"google_analytics": "config key removed: google_analytics - this functionality has been deprecated",
|
"google_analytics": "config key removed: google_analytics - this functionality has been deprecated",
|
||||||
"notifier.smtp.trusted_cert": "invalid configuration key `notifier.smtp.trusted_cert` it has been removed, " +
|
"notifier.smtp.trusted_cert": "invalid configuration key 'notifier.smtp.trusted_cert' it has been removed, " +
|
||||||
"option has been replaced by the global option `certificates_directory`",
|
"option has been replaced by the global option 'certificates_directory'",
|
||||||
|
|
||||||
"authentication_backend.file.password_options.algorithm": errFilePOptions,
|
"authentication_backend.file.password_options.algorithm": errFilePOptions,
|
||||||
"authentication_backend.file.password_options.iterations": errFilePOptions,
|
"authentication_backend.file.password_options.iterations": errFilePOptions,
|
||||||
|
|
|
@ -18,7 +18,7 @@ func ValidateIdentityProviders(configuration *schema.IdentityProvidersConfigurat
|
||||||
func validateOIDC(configuration *schema.OpenIDConnectConfiguration, validator *schema.StructValidator) {
|
func validateOIDC(configuration *schema.OpenIDConnectConfiguration, validator *schema.StructValidator) {
|
||||||
if configuration != nil {
|
if configuration != nil {
|
||||||
if configuration.IssuerPrivateKey == "" {
|
if configuration.IssuerPrivateKey == "" {
|
||||||
validator.Push(fmt.Errorf("OIDC Server issuer private key must be provided"))
|
validator.Push(fmt.Errorf(errFmtOIDCNoPrivateKey))
|
||||||
}
|
}
|
||||||
|
|
||||||
if configuration.AccessTokenLifespan == time.Duration(0) {
|
if configuration.AccessTokenLifespan == time.Duration(0) {
|
||||||
|
@ -44,7 +44,7 @@ func validateOIDC(configuration *schema.OpenIDConnectConfiguration, validator *s
|
||||||
validateOIDCClients(configuration, validator)
|
validateOIDCClients(configuration, validator)
|
||||||
|
|
||||||
if len(configuration.Clients) == 0 {
|
if len(configuration.Clients) == 0 {
|
||||||
validator.Push(fmt.Errorf("OIDC Server has no clients defined"))
|
validator.Push(fmt.Errorf(errFmtOIDCNoClientsConfigured))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -74,14 +74,14 @@ func validateOIDCClients(configuration *schema.OpenIDConnectConfiguration, valid
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
if client.Secret == "" {
|
if client.Secret == "" {
|
||||||
validator.Push(fmt.Errorf(errFmtOIDCServerClientInvalidSecret, client.ID))
|
validator.Push(fmt.Errorf(errFmtOIDCClientInvalidSecret, client.ID))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if client.Policy == "" {
|
if client.Policy == "" {
|
||||||
configuration.Clients[c].Policy = schema.DefaultOpenIDConnectClientConfiguration.Policy
|
configuration.Clients[c].Policy = schema.DefaultOpenIDConnectClientConfiguration.Policy
|
||||||
} else if client.Policy != oneFactorPolicy && client.Policy != twoFactorPolicy {
|
} else if client.Policy != policyOneFactor && client.Policy != policyTwoFactor {
|
||||||
validator.Push(fmt.Errorf(errFmtOIDCServerClientInvalidPolicy, client.ID, client.Policy))
|
validator.Push(fmt.Errorf(errFmtOIDCClientInvalidPolicy, client.ID, client.Policy))
|
||||||
}
|
}
|
||||||
|
|
||||||
validateOIDCClientScopes(c, configuration, validator)
|
validateOIDCClientScopes(c, configuration, validator)
|
||||||
|
@ -94,11 +94,11 @@ func validateOIDCClients(configuration *schema.OpenIDConnectConfiguration, valid
|
||||||
}
|
}
|
||||||
|
|
||||||
if invalidID {
|
if invalidID {
|
||||||
validator.Push(fmt.Errorf("OIDC Server has one or more clients with an empty ID"))
|
validator.Push(fmt.Errorf(errFmtOIDCClientsWithEmptyID))
|
||||||
}
|
}
|
||||||
|
|
||||||
if duplicateIDs {
|
if duplicateIDs {
|
||||||
validator.Push(fmt.Errorf("OIDC Server has clients with duplicate ID's"))
|
validator.Push(fmt.Errorf(errFmtOIDCClientsDuplicateID))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -115,7 +115,7 @@ func validateOIDCClientScopes(c int, configuration *schema.OpenIDConnectConfigur
|
||||||
for _, scope := range configuration.Clients[c].Scopes {
|
for _, scope := range configuration.Clients[c].Scopes {
|
||||||
if !utils.IsStringInSlice(scope, validOIDCScopes) {
|
if !utils.IsStringInSlice(scope, validOIDCScopes) {
|
||||||
validator.Push(fmt.Errorf(
|
validator.Push(fmt.Errorf(
|
||||||
errFmtOIDCServerClientInvalidScope,
|
errFmtOIDCClientInvalidScope,
|
||||||
configuration.Clients[c].ID, scope, strings.Join(validOIDCScopes, "', '")))
|
configuration.Clients[c].ID, scope, strings.Join(validOIDCScopes, "', '")))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -130,7 +130,7 @@ func validateOIDCClientGrantTypes(c int, configuration *schema.OpenIDConnectConf
|
||||||
for _, grantType := range configuration.Clients[c].GrantTypes {
|
for _, grantType := range configuration.Clients[c].GrantTypes {
|
||||||
if !utils.IsStringInSlice(grantType, validOIDCGrantTypes) {
|
if !utils.IsStringInSlice(grantType, validOIDCGrantTypes) {
|
||||||
validator.Push(fmt.Errorf(
|
validator.Push(fmt.Errorf(
|
||||||
errFmtOIDCServerClientInvalidGrantType,
|
errFmtOIDCClientInvalidGrantType,
|
||||||
configuration.Clients[c].ID, grantType, strings.Join(validOIDCGrantTypes, "', '")))
|
configuration.Clients[c].ID, grantType, strings.Join(validOIDCGrantTypes, "', '")))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -152,7 +152,7 @@ func validateOIDCClientResponseModes(c int, configuration *schema.OpenIDConnectC
|
||||||
for _, responseMode := range configuration.Clients[c].ResponseModes {
|
for _, responseMode := range configuration.Clients[c].ResponseModes {
|
||||||
if !utils.IsStringInSlice(responseMode, validOIDCResponseModes) {
|
if !utils.IsStringInSlice(responseMode, validOIDCResponseModes) {
|
||||||
validator.Push(fmt.Errorf(
|
validator.Push(fmt.Errorf(
|
||||||
errFmtOIDCServerClientInvalidResponseMode,
|
errFmtOIDCClientInvalidResponseMode,
|
||||||
configuration.Clients[c].ID, responseMode, strings.Join(validOIDCResponseModes, "', '")))
|
configuration.Clients[c].ID, responseMode, strings.Join(validOIDCResponseModes, "', '")))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -162,7 +162,7 @@ func validateOIDDClientUserinfoAlgorithm(c int, configuration *schema.OpenIDConn
|
||||||
if configuration.Clients[c].UserinfoSigningAlgorithm == "" {
|
if configuration.Clients[c].UserinfoSigningAlgorithm == "" {
|
||||||
configuration.Clients[c].UserinfoSigningAlgorithm = schema.DefaultOpenIDConnectClientConfiguration.UserinfoSigningAlgorithm
|
configuration.Clients[c].UserinfoSigningAlgorithm = schema.DefaultOpenIDConnectClientConfiguration.UserinfoSigningAlgorithm
|
||||||
} else if !utils.IsStringInSlice(configuration.Clients[c].UserinfoSigningAlgorithm, validOIDCUserinfoAlgorithms) {
|
} else if !utils.IsStringInSlice(configuration.Clients[c].UserinfoSigningAlgorithm, validOIDCUserinfoAlgorithms) {
|
||||||
validator.Push(fmt.Errorf(errFmtOIDCServerClientInvalidUserinfoAlgorithm,
|
validator.Push(fmt.Errorf(errFmtOIDCClientInvalidUserinfoAlgorithm,
|
||||||
configuration.Clients[c].ID, configuration.Clients[c].UserinfoSigningAlgorithm, strings.Join(validOIDCUserinfoAlgorithms, ", ")))
|
configuration.Clients[c].ID, configuration.Clients[c].UserinfoSigningAlgorithm, strings.Join(validOIDCUserinfoAlgorithms, ", ")))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -181,7 +181,7 @@ func validateOIDCClientRedirectURIs(client schema.OpenIDConnectClientConfigurati
|
||||||
|
|
||||||
parsedURL, err := url.Parse(redirectURI)
|
parsedURL, err := url.Parse(redirectURI)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
validator.Push(fmt.Errorf(errFmtOIDCServerClientRedirectURICantBeParsed, client.ID, redirectURI, err))
|
validator.Push(fmt.Errorf(errFmtOIDCClientRedirectURICantBeParsed, client.ID, redirectURI, err))
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -191,7 +191,7 @@ func validateOIDCClientRedirectURIs(client schema.OpenIDConnectClientConfigurati
|
||||||
}
|
}
|
||||||
|
|
||||||
if parsedURL.Scheme != schemeHTTPS && parsedURL.Scheme != schemeHTTP {
|
if parsedURL.Scheme != schemeHTTPS && parsedURL.Scheme != schemeHTTP {
|
||||||
validator.Push(fmt.Errorf(errFmtOIDCServerClientRedirectURI, client.ID, redirectURI, parsedURL.Scheme))
|
validator.Push(fmt.Errorf(errFmtOIDCClientRedirectURI, client.ID, redirectURI, parsedURL.Scheme))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -25,8 +25,8 @@ func TestShouldRaiseErrorWhenInvalidOIDCServerConfiguration(t *testing.T) {
|
||||||
|
|
||||||
require.Len(t, validator.Errors(), 2)
|
require.Len(t, validator.Errors(), 2)
|
||||||
|
|
||||||
assert.EqualError(t, validator.Errors()[0], "OIDC Server issuer private key must be provided")
|
assert.EqualError(t, validator.Errors()[0], errFmtOIDCNoPrivateKey)
|
||||||
assert.EqualError(t, validator.Errors()[1], "OIDC Server has no clients defined")
|
assert.EqualError(t, validator.Errors()[1], errFmtOIDCNoClientsConfigured)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestShouldRaiseErrorWhenOIDCServerIssuerPrivateKeyPathInvalid(t *testing.T) {
|
func TestShouldRaiseErrorWhenOIDCServerIssuerPrivateKeyPathInvalid(t *testing.T) {
|
||||||
|
@ -42,7 +42,7 @@ func TestShouldRaiseErrorWhenOIDCServerIssuerPrivateKeyPathInvalid(t *testing.T)
|
||||||
|
|
||||||
require.Len(t, validator.Errors(), 1)
|
require.Len(t, validator.Errors(), 1)
|
||||||
|
|
||||||
assert.EqualError(t, validator.Errors()[0], "OIDC Server has no clients defined")
|
assert.EqualError(t, validator.Errors()[0], errFmtOIDCNoClientsConfigured)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestShouldRaiseErrorWhenOIDCServerClientBadValues(t *testing.T) {
|
func TestShouldRaiseErrorWhenOIDCServerClientBadValues(t *testing.T) {
|
||||||
|
@ -79,7 +79,7 @@ func TestShouldRaiseErrorWhenOIDCServerClientBadValues(t *testing.T) {
|
||||||
{
|
{
|
||||||
ID: "client-check-uri-parse",
|
ID: "client-check-uri-parse",
|
||||||
Secret: "a-secret",
|
Secret: "a-secret",
|
||||||
Policy: twoFactorPolicy,
|
Policy: policyTwoFactor,
|
||||||
RedirectURIs: []string{
|
RedirectURIs: []string{
|
||||||
"http://abc@%two",
|
"http://abc@%two",
|
||||||
},
|
},
|
||||||
|
@ -87,7 +87,7 @@ func TestShouldRaiseErrorWhenOIDCServerClientBadValues(t *testing.T) {
|
||||||
{
|
{
|
||||||
ID: "client-check-uri-abs",
|
ID: "client-check-uri-abs",
|
||||||
Secret: "a-secret",
|
Secret: "a-secret",
|
||||||
Policy: twoFactorPolicy,
|
Policy: policyTwoFactor,
|
||||||
RedirectURIs: []string{
|
RedirectURIs: []string{
|
||||||
"google.com",
|
"google.com",
|
||||||
},
|
},
|
||||||
|
@ -101,14 +101,14 @@ func TestShouldRaiseErrorWhenOIDCServerClientBadValues(t *testing.T) {
|
||||||
require.Len(t, validator.Errors(), 8)
|
require.Len(t, validator.Errors(), 8)
|
||||||
|
|
||||||
assert.Equal(t, schema.DefaultOpenIDConnectClientConfiguration.Policy, config.OIDC.Clients[0].Policy)
|
assert.Equal(t, schema.DefaultOpenIDConnectClientConfiguration.Policy, config.OIDC.Clients[0].Policy)
|
||||||
assert.EqualError(t, validator.Errors()[0], fmt.Sprintf(errFmtOIDCServerClientInvalidSecret, ""))
|
assert.EqualError(t, validator.Errors()[0], fmt.Sprintf(errFmtOIDCClientInvalidSecret, ""))
|
||||||
assert.EqualError(t, validator.Errors()[1], fmt.Sprintf(errFmtOIDCServerClientRedirectURI, "", "tcp://google.com", "tcp"))
|
assert.EqualError(t, validator.Errors()[1], fmt.Sprintf(errFmtOIDCClientRedirectURI, "", "tcp://google.com", "tcp"))
|
||||||
assert.EqualError(t, validator.Errors()[2], fmt.Sprintf(errFmtOIDCServerClientInvalidPolicy, "a-client", "a-policy"))
|
assert.EqualError(t, validator.Errors()[2], fmt.Sprintf(errFmtOIDCClientInvalidPolicy, "a-client", "a-policy"))
|
||||||
assert.EqualError(t, validator.Errors()[3], fmt.Sprintf(errFmtOIDCServerClientInvalidPolicy, "a-client", "a-policy"))
|
assert.EqualError(t, validator.Errors()[3], fmt.Sprintf(errFmtOIDCClientInvalidPolicy, "a-client", "a-policy"))
|
||||||
assert.EqualError(t, validator.Errors()[4], fmt.Sprintf(errFmtOIDCServerClientRedirectURICantBeParsed, "client-check-uri-parse", "http://abc@%two", errors.New("parse \"http://abc@%two\": invalid URL escape \"%tw\"")))
|
assert.EqualError(t, validator.Errors()[4], fmt.Sprintf(errFmtOIDCClientRedirectURICantBeParsed, "client-check-uri-parse", "http://abc@%two", errors.New("parse \"http://abc@%two\": invalid URL escape \"%tw\"")))
|
||||||
assert.EqualError(t, validator.Errors()[5], fmt.Sprintf(errFmtOIDCClientRedirectURIAbsolute, "client-check-uri-abs", "google.com"))
|
assert.EqualError(t, validator.Errors()[5], fmt.Sprintf(errFmtOIDCClientRedirectURIAbsolute, "client-check-uri-abs", "google.com"))
|
||||||
assert.EqualError(t, validator.Errors()[6], "OIDC Server has one or more clients with an empty ID")
|
assert.EqualError(t, validator.Errors()[6], errFmtOIDCClientsWithEmptyID)
|
||||||
assert.EqualError(t, validator.Errors()[7], "OIDC Server has clients with duplicate ID's")
|
assert.EqualError(t, validator.Errors()[7], errFmtOIDCClientsDuplicateID)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestShouldRaiseErrorWhenOIDCClientConfiguredWithBadScopes(t *testing.T) {
|
func TestShouldRaiseErrorWhenOIDCClientConfiguredWithBadScopes(t *testing.T) {
|
||||||
|
@ -134,7 +134,7 @@ func TestShouldRaiseErrorWhenOIDCClientConfiguredWithBadScopes(t *testing.T) {
|
||||||
ValidateIdentityProviders(config, validator)
|
ValidateIdentityProviders(config, validator)
|
||||||
|
|
||||||
require.Len(t, validator.Errors(), 1)
|
require.Len(t, validator.Errors(), 1)
|
||||||
assert.EqualError(t, validator.Errors()[0], "OIDC client with ID 'good_id' has an invalid scope "+
|
assert.EqualError(t, validator.Errors()[0], "openid connect provider: client with ID 'good_id' has an invalid scope "+
|
||||||
"'bad_scope', must be one of: 'openid', 'email', 'profile', 'groups', 'offline_access'")
|
"'bad_scope', must be one of: 'openid', 'email', 'profile', 'groups', 'offline_access'")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -161,7 +161,7 @@ func TestShouldRaiseErrorWhenOIDCClientConfiguredWithBadGrantTypes(t *testing.T)
|
||||||
ValidateIdentityProviders(config, validator)
|
ValidateIdentityProviders(config, validator)
|
||||||
|
|
||||||
require.Len(t, validator.Errors(), 1)
|
require.Len(t, validator.Errors(), 1)
|
||||||
assert.EqualError(t, validator.Errors()[0], "OIDC client with ID 'good_id' has an invalid grant type "+
|
assert.EqualError(t, validator.Errors()[0], "openid connect provider: client with ID 'good_id' has an invalid grant type "+
|
||||||
"'bad_grant_type', must be one of: 'implicit', 'refresh_token', 'authorization_code', "+
|
"'bad_grant_type', must be one of: 'implicit', 'refresh_token', 'authorization_code', "+
|
||||||
"'password', 'client_credentials'")
|
"'password', 'client_credentials'")
|
||||||
}
|
}
|
||||||
|
@ -189,7 +189,7 @@ func TestShouldRaiseErrorWhenOIDCClientConfiguredWithBadResponseModes(t *testing
|
||||||
ValidateIdentityProviders(config, validator)
|
ValidateIdentityProviders(config, validator)
|
||||||
|
|
||||||
require.Len(t, validator.Errors(), 1)
|
require.Len(t, validator.Errors(), 1)
|
||||||
assert.EqualError(t, validator.Errors()[0], "OIDC client with ID 'good_id' has an invalid response mode "+
|
assert.EqualError(t, validator.Errors()[0], "openid connect provider: client with ID 'good_id' has an invalid response mode "+
|
||||||
"'bad_responsemode', must be one of: 'form_post', 'query', 'fragment'")
|
"'bad_responsemode', must be one of: 'form_post', 'query', 'fragment'")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -216,7 +216,7 @@ func TestShouldRaiseErrorWhenOIDCClientConfiguredWithBadUserinfoAlg(t *testing.T
|
||||||
ValidateIdentityProviders(config, validator)
|
ValidateIdentityProviders(config, validator)
|
||||||
|
|
||||||
require.Len(t, validator.Errors(), 1)
|
require.Len(t, validator.Errors(), 1)
|
||||||
assert.EqualError(t, validator.Errors()[0], "OIDC client with ID 'good_id' has an invalid userinfo "+
|
assert.EqualError(t, validator.Errors()[0], "openid connect provider: client with ID 'good_id' has an invalid userinfo "+
|
||||||
"signing algorithm 'rs256', must be one of: 'none, RS256'")
|
"signing algorithm 'rs256', must be one of: 'none, RS256'")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -245,7 +245,7 @@ func TestValidateIdentityProvidersShouldRaiseWarningOnSecurityIssue(t *testing.T
|
||||||
assert.Len(t, validator.Errors(), 0)
|
assert.Len(t, validator.Errors(), 0)
|
||||||
require.Len(t, validator.Warnings(), 1)
|
require.Len(t, validator.Warnings(), 1)
|
||||||
|
|
||||||
assert.EqualError(t, validator.Warnings()[0], "SECURITY ISSUE: OIDC minimum parameter entropy is configured to an unsafe value, it should be above 8 but it's configured to 1.")
|
assert.EqualError(t, validator.Warnings()[0], "openid connect provider: SECURITY ISSUE - minimum parameter entropy is configured to an unsafe value, it should be above 8 but it's configured to 1")
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestValidateIdentityProvidersShouldRaiseErrorsOnInvalidClientTypes(t *testing.T) {
|
func TestValidateIdentityProvidersShouldRaiseErrorsOnInvalidClientTypes(t *testing.T) {
|
||||||
|
@ -345,7 +345,7 @@ func TestValidateIdentityProvidersShouldSetDefaultValues(t *testing.T) {
|
||||||
ID: "b-client",
|
ID: "b-client",
|
||||||
Description: "Normal Description",
|
Description: "Normal Description",
|
||||||
Secret: "b-client-secret",
|
Secret: "b-client-secret",
|
||||||
Policy: oneFactorPolicy,
|
Policy: policyOneFactor,
|
||||||
UserinfoSigningAlgorithm: "RS256",
|
UserinfoSigningAlgorithm: "RS256",
|
||||||
RedirectURIs: []string{
|
RedirectURIs: []string{
|
||||||
"https://google.com",
|
"https://google.com",
|
||||||
|
@ -375,11 +375,11 @@ func TestValidateIdentityProvidersShouldSetDefaultValues(t *testing.T) {
|
||||||
assert.Len(t, validator.Errors(), 0)
|
assert.Len(t, validator.Errors(), 0)
|
||||||
|
|
||||||
// Assert Clients[0] Policy is set to the default, and the default doesn't override Clients[1]'s Policy.
|
// Assert Clients[0] Policy is set to the default, and the default doesn't override Clients[1]'s Policy.
|
||||||
assert.Equal(t, config.OIDC.Clients[0].Policy, twoFactorPolicy)
|
assert.Equal(t, policyTwoFactor, config.OIDC.Clients[0].Policy)
|
||||||
assert.Equal(t, config.OIDC.Clients[1].Policy, oneFactorPolicy)
|
assert.Equal(t, policyOneFactor, config.OIDC.Clients[1].Policy)
|
||||||
|
|
||||||
assert.Equal(t, config.OIDC.Clients[0].UserinfoSigningAlgorithm, "none")
|
assert.Equal(t, "none", config.OIDC.Clients[0].UserinfoSigningAlgorithm)
|
||||||
assert.Equal(t, config.OIDC.Clients[1].UserinfoSigningAlgorithm, "RS256")
|
assert.Equal(t, "RS256", config.OIDC.Clients[1].UserinfoSigningAlgorithm)
|
||||||
|
|
||||||
// Assert Clients[0] Description is set to the Clients[0] ID, and Clients[1]'s Description is not overridden.
|
// Assert Clients[0] Description is set to the Clients[0] ID, and Clients[1]'s Description is not overridden.
|
||||||
assert.Equal(t, config.OIDC.Clients[0].ID, config.OIDC.Clients[0].Description)
|
assert.Equal(t, config.OIDC.Clients[0].ID, config.OIDC.Clients[0].Description)
|
||||||
|
|
|
@ -3,35 +3,38 @@ package validator
|
||||||
import (
|
import (
|
||||||
"errors"
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"strings"
|
||||||
|
|
||||||
"github.com/authelia/authelia/internal/configuration/schema"
|
"github.com/authelia/authelia/internal/configuration/schema"
|
||||||
"github.com/authelia/authelia/internal/utils"
|
"github.com/authelia/authelia/internal/utils"
|
||||||
)
|
)
|
||||||
|
|
||||||
// ValidateKeys determines if a provided key is valid.
|
// ValidateKeys determines if all provided keys are valid.
|
||||||
func ValidateKeys(validator *schema.StructValidator, keys []string) {
|
func ValidateKeys(keys []string, prefix string, validator *schema.StructValidator) {
|
||||||
var errStrings []string
|
var errStrings []string
|
||||||
|
|
||||||
for _, key := range keys {
|
for _, key := range keys {
|
||||||
if utils.IsStringInSlice(key, validKeys) {
|
expectedKey := reKeyReplacer.ReplaceAllString(key, "[]")
|
||||||
|
|
||||||
|
if utils.IsStringInSlice(expectedKey, ValidKeys) {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
if isSecretKey(key) {
|
if newKey, ok := replacedKeys[expectedKey]; ok {
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
if newKey, ok := replacedKeys[key]; ok {
|
|
||||||
validator.Push(fmt.Errorf(errFmtReplacedConfigurationKey, key, newKey))
|
validator.Push(fmt.Errorf(errFmtReplacedConfigurationKey, key, newKey))
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
if err, ok := specificErrorKeys[key]; ok {
|
if err, ok := specificErrorKeys[expectedKey]; ok {
|
||||||
if !utils.IsStringInSlice(err, errStrings) {
|
if !utils.IsStringInSlice(err, errStrings) {
|
||||||
errStrings = append(errStrings, err)
|
errStrings = append(errStrings, err)
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
validator.Push(fmt.Errorf("config key not expected: %s", key))
|
if strings.HasPrefix(key, prefix) {
|
||||||
|
validator.PushWarning(fmt.Errorf("configuration environment variable not expected: %s", key))
|
||||||
|
} else {
|
||||||
|
validator.Push(fmt.Errorf("configuration key not expected: %s", key))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -12,25 +12,41 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestShouldValidateGoodKeys(t *testing.T) {
|
func TestShouldValidateGoodKeys(t *testing.T) {
|
||||||
configKeys := validKeys
|
configKeys := ValidKeys
|
||||||
val := schema.NewStructValidator()
|
val := schema.NewStructValidator()
|
||||||
ValidateKeys(val, configKeys)
|
ValidateKeys(configKeys, "AUTHELIA_", val)
|
||||||
|
|
||||||
require.Len(t, val.Errors(), 0)
|
require.Len(t, val.Errors(), 0)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestShouldNotValidateBadKeys(t *testing.T) {
|
func TestShouldNotValidateBadKeys(t *testing.T) {
|
||||||
configKeys := validKeys
|
configKeys := ValidKeys
|
||||||
configKeys = append(configKeys, "bad_key")
|
configKeys = append(configKeys, "bad_key")
|
||||||
configKeys = append(configKeys, "totp.skewy")
|
configKeys = append(configKeys, "totp.skewy")
|
||||||
val := schema.NewStructValidator()
|
val := schema.NewStructValidator()
|
||||||
ValidateKeys(val, configKeys)
|
ValidateKeys(configKeys, "AUTHELIA_", val)
|
||||||
|
|
||||||
errs := val.Errors()
|
errs := val.Errors()
|
||||||
require.Len(t, errs, 2)
|
require.Len(t, errs, 2)
|
||||||
|
|
||||||
assert.EqualError(t, errs[0], "config key not expected: bad_key")
|
assert.EqualError(t, errs[0], "configuration key not expected: bad_key")
|
||||||
assert.EqualError(t, errs[1], "config key not expected: totp.skewy")
|
assert.EqualError(t, errs[1], "configuration key not expected: totp.skewy")
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestShouldNotValidateBadEnvKeys(t *testing.T) {
|
||||||
|
configKeys := ValidKeys
|
||||||
|
configKeys = append(configKeys, "AUTHELIA__BAD_ENV_KEY")
|
||||||
|
configKeys = append(configKeys, "AUTHELIA_BAD_ENV_KEY")
|
||||||
|
|
||||||
|
val := schema.NewStructValidator()
|
||||||
|
ValidateKeys(configKeys, "AUTHELIA_", val)
|
||||||
|
|
||||||
|
warns := val.Warnings()
|
||||||
|
assert.Len(t, val.Errors(), 0)
|
||||||
|
require.Len(t, warns, 2)
|
||||||
|
|
||||||
|
assert.EqualError(t, warns[0], "configuration environment variable not expected: AUTHELIA__BAD_ENV_KEY")
|
||||||
|
assert.EqualError(t, warns[1], "configuration environment variable not expected: AUTHELIA_BAD_ENV_KEY")
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestAllSpecificErrorKeys(t *testing.T) {
|
func TestAllSpecificErrorKeys(t *testing.T) {
|
||||||
|
@ -48,7 +64,7 @@ func TestAllSpecificErrorKeys(t *testing.T) {
|
||||||
}
|
}
|
||||||
|
|
||||||
val := schema.NewStructValidator()
|
val := schema.NewStructValidator()
|
||||||
ValidateKeys(val, configKeys)
|
ValidateKeys(configKeys, "AUTHELIA_", val)
|
||||||
|
|
||||||
errs := val.Errors()
|
errs := val.Errors()
|
||||||
|
|
||||||
|
@ -72,7 +88,7 @@ func TestSpecificErrorKeys(t *testing.T) {
|
||||||
}
|
}
|
||||||
|
|
||||||
val := schema.NewStructValidator()
|
val := schema.NewStructValidator()
|
||||||
ValidateKeys(val, configKeys)
|
ValidateKeys(configKeys, "AUTHELIA_", val)
|
||||||
|
|
||||||
errs := val.Errors()
|
errs := val.Errors()
|
||||||
|
|
||||||
|
@ -95,7 +111,7 @@ func TestReplacedErrors(t *testing.T) {
|
||||||
}
|
}
|
||||||
|
|
||||||
val := schema.NewStructValidator()
|
val := schema.NewStructValidator()
|
||||||
ValidateKeys(val, configKeys)
|
ValidateKeys(configKeys, "AUTHELIA_", val)
|
||||||
|
|
||||||
warns := val.Warnings()
|
warns := val.Warnings()
|
||||||
errs := val.Errors()
|
errs := val.Errors()
|
||||||
|
@ -109,18 +125,3 @@ func TestReplacedErrors(t *testing.T) {
|
||||||
assert.EqualError(t, errs[3], fmt.Sprintf(errFmtReplacedConfigurationKey, "logs_file_path", "log.file_path"))
|
assert.EqualError(t, errs[3], fmt.Sprintf(errFmtReplacedConfigurationKey, "logs_file_path", "log.file_path"))
|
||||||
assert.EqualError(t, errs[4], fmt.Sprintf(errFmtReplacedConfigurationKey, "logs_level", "log.level"))
|
assert.EqualError(t, errs[4], fmt.Sprintf(errFmtReplacedConfigurationKey, "logs_level", "log.level"))
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestSecretKeysDontRaiseErrors(t *testing.T) {
|
|
||||||
configKeys := []string{}
|
|
||||||
|
|
||||||
for _, key := range SecretNames {
|
|
||||||
configKeys = append(configKeys, SecretNameToEnvName(key))
|
|
||||||
configKeys = append(configKeys, key)
|
|
||||||
}
|
|
||||||
|
|
||||||
val := schema.NewStructValidator()
|
|
||||||
ValidateKeys(val, configKeys)
|
|
||||||
|
|
||||||
assert.Len(t, val.Warnings(), 0)
|
|
||||||
assert.Len(t, val.Errors(), 0)
|
|
||||||
}
|
|
||||||
|
|
|
@ -12,16 +12,16 @@ import (
|
||||||
func ValidateLogging(configuration *schema.Configuration, validator *schema.StructValidator) {
|
func ValidateLogging(configuration *schema.Configuration, validator *schema.StructValidator) {
|
||||||
applyDeprecatedLoggingConfiguration(configuration, validator) // TODO: DEPRECATED LINE. Remove in 4.33.0.
|
applyDeprecatedLoggingConfiguration(configuration, validator) // TODO: DEPRECATED LINE. Remove in 4.33.0.
|
||||||
|
|
||||||
if configuration.Logging.Level == "" {
|
if configuration.Log.Level == "" {
|
||||||
configuration.Logging.Level = schema.DefaultLoggingConfiguration.Level
|
configuration.Log.Level = schema.DefaultLoggingConfiguration.Level
|
||||||
}
|
}
|
||||||
|
|
||||||
if configuration.Logging.Format == "" {
|
if configuration.Log.Format == "" {
|
||||||
configuration.Logging.Format = schema.DefaultLoggingConfiguration.Format
|
configuration.Log.Format = schema.DefaultLoggingConfiguration.Format
|
||||||
}
|
}
|
||||||
|
|
||||||
if !utils.IsStringInSlice(configuration.Logging.Level, validLoggingLevels) {
|
if !utils.IsStringInSlice(configuration.Log.Level, validLoggingLevels) {
|
||||||
validator.Push(fmt.Errorf(errFmtLoggingLevelInvalid, configuration.Logging.Level, strings.Join(validLoggingLevels, ", ")))
|
validator.Push(fmt.Errorf(errFmtLoggingLevelInvalid, configuration.Log.Level, strings.Join(validLoggingLevels, ", ")))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -30,24 +30,24 @@ func applyDeprecatedLoggingConfiguration(configuration *schema.Configuration, va
|
||||||
if configuration.LogLevel != "" {
|
if configuration.LogLevel != "" {
|
||||||
validator.PushWarning(fmt.Errorf(errFmtDeprecatedConfigurationKey, "log_level", "4.33.0", "log.level"))
|
validator.PushWarning(fmt.Errorf(errFmtDeprecatedConfigurationKey, "log_level", "4.33.0", "log.level"))
|
||||||
|
|
||||||
if configuration.Logging.Level == "" {
|
if configuration.Log.Level == "" {
|
||||||
configuration.Logging.Level = configuration.LogLevel
|
configuration.Log.Level = configuration.LogLevel
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if configuration.LogFormat != "" {
|
if configuration.LogFormat != "" {
|
||||||
validator.PushWarning(fmt.Errorf(errFmtDeprecatedConfigurationKey, "log_format", "4.33.0", "log.format"))
|
validator.PushWarning(fmt.Errorf(errFmtDeprecatedConfigurationKey, "log_format", "4.33.0", "log.format"))
|
||||||
|
|
||||||
if configuration.Logging.Format == "" {
|
if configuration.Log.Format == "" {
|
||||||
configuration.Logging.Format = configuration.LogFormat
|
configuration.Log.Format = configuration.LogFormat
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if configuration.LogFilePath != "" {
|
if configuration.LogFilePath != "" {
|
||||||
validator.PushWarning(fmt.Errorf(errFmtDeprecatedConfigurationKey, "log_file_path", "4.33.0", "log.file_path"))
|
validator.PushWarning(fmt.Errorf(errFmtDeprecatedConfigurationKey, "log_file_path", "4.33.0", "log.file_path"))
|
||||||
|
|
||||||
if configuration.Logging.FilePath == "" {
|
if configuration.Log.FilePath == "" {
|
||||||
configuration.Logging.FilePath = configuration.LogFilePath
|
configuration.Log.FilePath = configuration.LogFilePath
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -20,20 +20,20 @@ func TestShouldSetDefaultLoggingValues(t *testing.T) {
|
||||||
assert.Len(t, validator.Warnings(), 0)
|
assert.Len(t, validator.Warnings(), 0)
|
||||||
assert.Len(t, validator.Errors(), 0)
|
assert.Len(t, validator.Errors(), 0)
|
||||||
|
|
||||||
require.NotNil(t, config.Logging.KeepStdout)
|
require.NotNil(t, config.Log.KeepStdout)
|
||||||
|
|
||||||
assert.Equal(t, "", config.LogLevel)
|
assert.Equal(t, "", config.LogLevel) // TODO: DEPRECATED TEST. Remove in 4.33.0.
|
||||||
assert.Equal(t, "", config.LogFormat)
|
assert.Equal(t, "", config.LogFormat) // TODO: DEPRECATED TEST. Remove in 4.33.0.
|
||||||
assert.Equal(t, "", config.LogFilePath)
|
assert.Equal(t, "", config.LogFilePath) // TODO: DEPRECATED TEST. Remove in 4.33.0.
|
||||||
|
|
||||||
assert.Equal(t, "info", config.Logging.Level)
|
assert.Equal(t, "info", config.Log.Level)
|
||||||
assert.Equal(t, "text", config.Logging.Format)
|
assert.Equal(t, "text", config.Log.Format)
|
||||||
assert.Equal(t, "", config.Logging.FilePath)
|
assert.Equal(t, "", config.Log.FilePath)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestShouldRaiseErrorOnInvalidLoggingLevel(t *testing.T) {
|
func TestShouldRaiseErrorOnInvalidLoggingLevel(t *testing.T) {
|
||||||
config := &schema.Configuration{
|
config := &schema.Configuration{
|
||||||
Logging: schema.LogConfiguration{
|
Log: schema.LogConfiguration{
|
||||||
Level: "TRACE",
|
Level: "TRACE",
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
@ -63,15 +63,15 @@ func TestShouldMigrateDeprecatedLoggingConfig(t *testing.T) {
|
||||||
assert.Len(t, validator.Errors(), 0)
|
assert.Len(t, validator.Errors(), 0)
|
||||||
require.Len(t, validator.Warnings(), 3)
|
require.Len(t, validator.Warnings(), 3)
|
||||||
|
|
||||||
require.NotNil(t, config.Logging.KeepStdout)
|
require.NotNil(t, config.Log.KeepStdout)
|
||||||
|
|
||||||
assert.Equal(t, "trace", config.LogLevel)
|
assert.Equal(t, "trace", config.LogLevel)
|
||||||
assert.Equal(t, "json", config.LogFormat)
|
assert.Equal(t, "json", config.LogFormat)
|
||||||
assert.Equal(t, "/a/b/c", config.LogFilePath)
|
assert.Equal(t, "/a/b/c", config.LogFilePath)
|
||||||
|
|
||||||
assert.Equal(t, "trace", config.Logging.Level)
|
assert.Equal(t, "trace", config.Log.Level)
|
||||||
assert.Equal(t, "json", config.Logging.Format)
|
assert.Equal(t, "json", config.Log.Format)
|
||||||
assert.Equal(t, "/a/b/c", config.Logging.FilePath)
|
assert.Equal(t, "/a/b/c", config.Log.FilePath)
|
||||||
|
|
||||||
assert.EqualError(t, validator.Warnings()[0], fmt.Sprintf(errFmtDeprecatedConfigurationKey, "log_level", "4.33.0", "log.level"))
|
assert.EqualError(t, validator.Warnings()[0], fmt.Sprintf(errFmtDeprecatedConfigurationKey, "log_level", "4.33.0", "log.level"))
|
||||||
assert.EqualError(t, validator.Warnings()[1], fmt.Sprintf(errFmtDeprecatedConfigurationKey, "log_format", "4.33.0", "log.format"))
|
assert.EqualError(t, validator.Warnings()[1], fmt.Sprintf(errFmtDeprecatedConfigurationKey, "log_format", "4.33.0", "log.format"))
|
||||||
|
@ -80,7 +80,7 @@ func TestShouldMigrateDeprecatedLoggingConfig(t *testing.T) {
|
||||||
|
|
||||||
func TestShouldRaiseErrorsAndNotOverwriteConfigurationWhenUsingDeprecatedLoggingConfig(t *testing.T) {
|
func TestShouldRaiseErrorsAndNotOverwriteConfigurationWhenUsingDeprecatedLoggingConfig(t *testing.T) {
|
||||||
config := &schema.Configuration{
|
config := &schema.Configuration{
|
||||||
Logging: schema.LogConfiguration{
|
Log: schema.LogConfiguration{
|
||||||
Level: "info",
|
Level: "info",
|
||||||
Format: "text",
|
Format: "text",
|
||||||
FilePath: "/x/y/z",
|
FilePath: "/x/y/z",
|
||||||
|
@ -95,12 +95,12 @@ func TestShouldRaiseErrorsAndNotOverwriteConfigurationWhenUsingDeprecatedLogging
|
||||||
|
|
||||||
ValidateLogging(config, validator)
|
ValidateLogging(config, validator)
|
||||||
|
|
||||||
require.NotNil(t, config.Logging.KeepStdout)
|
require.NotNil(t, config.Log.KeepStdout)
|
||||||
|
|
||||||
assert.Equal(t, "info", config.Logging.Level)
|
assert.Equal(t, "info", config.Log.Level)
|
||||||
assert.Equal(t, "text", config.Logging.Format)
|
assert.Equal(t, "text", config.Log.Format)
|
||||||
assert.True(t, config.Logging.KeepStdout)
|
assert.True(t, config.Log.KeepStdout)
|
||||||
assert.Equal(t, "/x/y/z", config.Logging.FilePath)
|
assert.Equal(t, "/x/y/z", config.Log.FilePath)
|
||||||
|
|
||||||
assert.Len(t, validator.Errors(), 0)
|
assert.Len(t, validator.Errors(), 0)
|
||||||
require.Len(t, validator.Warnings(), 3)
|
require.Len(t, validator.Warnings(), 3)
|
||||||
|
|
|
@ -1,89 +0,0 @@
|
||||||
package validator
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"io/ioutil"
|
|
||||||
"strings"
|
|
||||||
|
|
||||||
"github.com/spf13/viper"
|
|
||||||
|
|
||||||
"github.com/authelia/authelia/internal/configuration/schema"
|
|
||||||
)
|
|
||||||
|
|
||||||
// SecretNameToEnvName converts a secret name into the env name.
|
|
||||||
func SecretNameToEnvName(secretName string) (envName string) {
|
|
||||||
return "authelia." + secretName + ".file"
|
|
||||||
}
|
|
||||||
|
|
||||||
func isSecretKey(value string) (isSecretKey bool) {
|
|
||||||
for _, secretKey := range SecretNames {
|
|
||||||
if value == secretKey || value == SecretNameToEnvName(secretKey) {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
// ValidateSecrets checks that secrets are either specified by config file/env or by file references.
|
|
||||||
func ValidateSecrets(configuration *schema.Configuration, validator *schema.StructValidator, viper *viper.Viper) {
|
|
||||||
configuration.JWTSecret = getSecretValue(SecretNames["JWTSecret"], validator, viper)
|
|
||||||
configuration.Session.Secret = getSecretValue(SecretNames["SessionSecret"], validator, viper)
|
|
||||||
|
|
||||||
if configuration.DuoAPI != nil {
|
|
||||||
configuration.DuoAPI.SecretKey = getSecretValue(SecretNames["DUOSecretKey"], validator, viper)
|
|
||||||
}
|
|
||||||
|
|
||||||
if configuration.Session.Redis != nil {
|
|
||||||
configuration.Session.Redis.Password = getSecretValue(SecretNames["RedisPassword"], validator, viper)
|
|
||||||
|
|
||||||
if configuration.Session.Redis.HighAvailability != nil {
|
|
||||||
configuration.Session.Redis.HighAvailability.SentinelPassword =
|
|
||||||
getSecretValue(SecretNames["RedisSentinelPassword"], validator, viper)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if configuration.AuthenticationBackend.LDAP != nil {
|
|
||||||
configuration.AuthenticationBackend.LDAP.Password = getSecretValue(SecretNames["LDAPPassword"], validator, viper)
|
|
||||||
}
|
|
||||||
|
|
||||||
if configuration.Notifier != nil && configuration.Notifier.SMTP != nil {
|
|
||||||
configuration.Notifier.SMTP.Password = getSecretValue(SecretNames["SMTPPassword"], validator, viper)
|
|
||||||
}
|
|
||||||
|
|
||||||
if configuration.Storage.MySQL != nil {
|
|
||||||
configuration.Storage.MySQL.Password = getSecretValue(SecretNames["MySQLPassword"], validator, viper)
|
|
||||||
}
|
|
||||||
|
|
||||||
if configuration.Storage.PostgreSQL != nil {
|
|
||||||
configuration.Storage.PostgreSQL.Password = getSecretValue(SecretNames["PostgreSQLPassword"], validator, viper)
|
|
||||||
}
|
|
||||||
|
|
||||||
if configuration.IdentityProviders.OIDC != nil {
|
|
||||||
configuration.IdentityProviders.OIDC.HMACSecret = getSecretValue(SecretNames["OpenIDConnectHMACSecret"], validator, viper)
|
|
||||||
configuration.IdentityProviders.OIDC.IssuerPrivateKey = getSecretValue(SecretNames["OpenIDConnectIssuerPrivateKey"], validator, viper)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func getSecretValue(name string, validator *schema.StructValidator, viper *viper.Viper) string {
|
|
||||||
configValue := viper.GetString(name)
|
|
||||||
fileEnvValue := viper.GetString(SecretNameToEnvName(name))
|
|
||||||
|
|
||||||
// Error Checking.
|
|
||||||
if fileEnvValue != "" && configValue != "" {
|
|
||||||
validator.Push(fmt.Errorf("error loading secret (%s): it's already defined in the config file", name))
|
|
||||||
}
|
|
||||||
|
|
||||||
// Derive Secret.
|
|
||||||
if fileEnvValue != "" {
|
|
||||||
content, err := ioutil.ReadFile(fileEnvValue)
|
|
||||||
if err != nil {
|
|
||||||
validator.Push(fmt.Errorf("error loading secret file (%s): %s", name, err))
|
|
||||||
} else {
|
|
||||||
// TODO: Test this functionality.
|
|
||||||
return strings.TrimRight(string(content), "\n")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return configValue
|
|
||||||
}
|
|
|
@ -1,18 +0,0 @@
|
||||||
package validator
|
|
||||||
|
|
||||||
import (
|
|
||||||
"testing"
|
|
||||||
|
|
||||||
"github.com/stretchr/testify/assert"
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestShouldValidateCorrectSecretKeys(t *testing.T) {
|
|
||||||
assert.True(t, isSecretKey("jwt_secret"))
|
|
||||||
assert.True(t, isSecretKey("authelia.jwt_secret.file"))
|
|
||||||
assert.False(t, isSecretKey("totp.issuer"))
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestShouldCreateCorrectSecretEnvNames(t *testing.T) {
|
|
||||||
assert.Equal(t, "authelia.jwt_secret.file", SecretNameToEnvName("jwt_secret"))
|
|
||||||
assert.Equal(t, "authelia.not_a_real_secret.file", SecretNameToEnvName("not_a_real_secret"))
|
|
||||||
}
|
|
|
@ -361,7 +361,7 @@ func verifySessionHasUpToDateProfile(ctx *middlewares.AutheliaCtx, targetURL *ur
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
ctx.Logger.Debugf("Updated profile detected for %s.", userSession.Username)
|
ctx.Logger.Debugf("Updated profile detected for %s.", userSession.Username)
|
||||||
if ctx.Configuration.Logging.Level == "trace" {
|
if ctx.Configuration.Log.Level == "trace" {
|
||||||
generateVerifySessionHasUpToDateProfileTraceLogs(ctx, userSession, details)
|
generateVerifySessionHasUpToDateProfileTraceLogs(ctx, userSession, details)
|
||||||
}
|
}
|
||||||
userSession.Emails = details.Emails
|
userSession.Emails = details.Emails
|
||||||
|
|
|
@ -6,45 +6,44 @@ import (
|
||||||
|
|
||||||
logrus_stack "github.com/Gurpartap/logrus-stack"
|
logrus_stack "github.com/Gurpartap/logrus-stack"
|
||||||
"github.com/sirupsen/logrus"
|
"github.com/sirupsen/logrus"
|
||||||
|
|
||||||
|
"github.com/authelia/authelia/internal/configuration/schema"
|
||||||
)
|
)
|
||||||
|
|
||||||
// Logger return the standard logrus logger.
|
// Logger returns the standard logrus logger.
|
||||||
func Logger() *logrus.Logger {
|
func Logger() *logrus.Logger {
|
||||||
return logrus.StandardLogger()
|
return logrus.StandardLogger()
|
||||||
}
|
}
|
||||||
|
|
||||||
// SetLevel set the level of the logger.
|
// InitializeLogger configures the default loggers stack levels, formatting, and the output destinations.
|
||||||
func SetLevel(level logrus.Level) {
|
func InitializeLogger(config schema.LogConfiguration, log bool) error {
|
||||||
logrus.SetLevel(level)
|
setLevelStr(config.Level, log)
|
||||||
}
|
|
||||||
|
|
||||||
// InitializeLogger initialize logger.
|
|
||||||
func InitializeLogger(format, filename string, stdout bool) error {
|
|
||||||
callerLevels := []logrus.Level{}
|
callerLevels := []logrus.Level{}
|
||||||
stackLevels := []logrus.Level{logrus.PanicLevel, logrus.FatalLevel, logrus.ErrorLevel}
|
stackLevels := []logrus.Level{logrus.PanicLevel, logrus.FatalLevel, logrus.ErrorLevel}
|
||||||
logrus.AddHook(logrus_stack.NewHook(callerLevels, stackLevels))
|
logrus.AddHook(logrus_stack.NewHook(callerLevels, stackLevels))
|
||||||
|
|
||||||
if format == logFormatJSON {
|
if config.Format == logFormatJSON {
|
||||||
logrus.SetFormatter(&logrus.JSONFormatter{})
|
logrus.SetFormatter(&logrus.JSONFormatter{})
|
||||||
} else {
|
} else {
|
||||||
logrus.SetFormatter(&logrus.TextFormatter{})
|
logrus.SetFormatter(&logrus.TextFormatter{})
|
||||||
}
|
}
|
||||||
|
|
||||||
if filename != "" {
|
if config.FilePath != "" {
|
||||||
f, err := os.OpenFile(filename, os.O_APPEND|os.O_WRONLY|os.O_CREATE, 0600)
|
f, err := os.OpenFile(config.FilePath, os.O_APPEND|os.O_WRONLY|os.O_CREATE, 0600)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
if format != logFormatJSON {
|
if config.Format != logFormatJSON {
|
||||||
logrus.SetFormatter(&logrus.TextFormatter{
|
logrus.SetFormatter(&logrus.TextFormatter{
|
||||||
DisableColors: true,
|
DisableColors: true,
|
||||||
FullTimestamp: true,
|
FullTimestamp: true,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
if stdout {
|
if config.KeepStdout {
|
||||||
logLocations := io.MultiWriter(os.Stdout, f)
|
logLocations := io.MultiWriter(os.Stdout, f)
|
||||||
logrus.SetOutput(logLocations)
|
logrus.SetOutput(logLocations)
|
||||||
} else {
|
} else {
|
||||||
|
@ -54,3 +53,26 @@ func InitializeLogger(format, filename string, stdout bool) error {
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func setLevelStr(level string, log bool) {
|
||||||
|
switch level {
|
||||||
|
case "error":
|
||||||
|
logrus.SetLevel(logrus.ErrorLevel)
|
||||||
|
case "warn":
|
||||||
|
logrus.SetLevel(logrus.WarnLevel)
|
||||||
|
case "info":
|
||||||
|
logrus.SetLevel(logrus.InfoLevel)
|
||||||
|
case "debug":
|
||||||
|
logrus.SetLevel(logrus.DebugLevel)
|
||||||
|
case "trace":
|
||||||
|
logrus.SetLevel(logrus.TraceLevel)
|
||||||
|
default:
|
||||||
|
level = "info (default)"
|
||||||
|
|
||||||
|
logrus.SetLevel(logrus.InfoLevel)
|
||||||
|
}
|
||||||
|
|
||||||
|
if log {
|
||||||
|
logrus.Infof("Log severity set to %s", level)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -9,6 +9,8 @@ import (
|
||||||
|
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
|
|
||||||
|
"github.com/authelia/authelia/internal/configuration/schema"
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestShouldWriteLogsToFile(t *testing.T) {
|
func TestShouldWriteLogsToFile(t *testing.T) {
|
||||||
|
@ -20,7 +22,7 @@ func TestShouldWriteLogsToFile(t *testing.T) {
|
||||||
defer os.RemoveAll(dir)
|
defer os.RemoveAll(dir)
|
||||||
|
|
||||||
path := fmt.Sprintf("%s/authelia.log", dir)
|
path := fmt.Sprintf("%s/authelia.log", dir)
|
||||||
err = InitializeLogger("text", path, false)
|
err = InitializeLogger(schema.LogConfiguration{Format: "text", FilePath: path, KeepStdout: false}, false)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
Logger().Info("This is a test")
|
Logger().Info("This is a test")
|
||||||
|
@ -43,7 +45,7 @@ func TestShouldWriteLogsToFileAndStdout(t *testing.T) {
|
||||||
defer os.RemoveAll(dir)
|
defer os.RemoveAll(dir)
|
||||||
|
|
||||||
path := fmt.Sprintf("%s/authelia.log", dir)
|
path := fmt.Sprintf("%s/authelia.log", dir)
|
||||||
err = InitializeLogger("text", path, true)
|
err = InitializeLogger(schema.LogConfiguration{Format: "text", FilePath: path, KeepStdout: true}, false)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
Logger().Info("This is a test")
|
Logger().Info("This is a test")
|
||||||
|
@ -66,7 +68,7 @@ func TestShouldFormatLogsAsJSON(t *testing.T) {
|
||||||
defer os.RemoveAll(dir)
|
defer os.RemoveAll(dir)
|
||||||
|
|
||||||
path := fmt.Sprintf("%s/authelia.log", dir)
|
path := fmt.Sprintf("%s/authelia.log", dir)
|
||||||
err = InitializeLogger("json", path, false)
|
err = InitializeLogger(schema.LogConfiguration{Format: "json", FilePath: path, KeepStdout: false}, false)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
Logger().Info("This is a test")
|
Logger().Info("This is a test")
|
||||||
|
|
|
@ -153,8 +153,8 @@ func registerRoutes(configuration schema.Configuration, providers middlewares.Pr
|
||||||
return handler
|
return handler
|
||||||
}
|
}
|
||||||
|
|
||||||
// StartServer start Authelia server with the given configuration and providers.
|
// Start Authelia's internal webserver with the given configuration and providers.
|
||||||
func StartServer(configuration schema.Configuration, providers middlewares.Providers) {
|
func Start(configuration schema.Configuration, providers middlewares.Providers) {
|
||||||
logger := logging.Logger()
|
logger := logging.Logger()
|
||||||
|
|
||||||
handler := registerRoutes(configuration, providers)
|
handler := registerRoutes(configuration, providers)
|
||||||
|
@ -192,10 +192,10 @@ func StartServer(configuration schema.Configuration, providers middlewares.Provi
|
||||||
}
|
}
|
||||||
|
|
||||||
if configuration.Server.TLS.Certificate != "" && configuration.Server.TLS.Key != "" {
|
if configuration.Server.TLS.Certificate != "" && configuration.Server.TLS.Key != "" {
|
||||||
logger.Infof("Authelia is listening for TLS connections on %s%s", addrPattern, configuration.Server.Path)
|
logger.Infof("Listening for TLS connections on %s%s", addrPattern, configuration.Server.Path)
|
||||||
logger.Fatal(server.ServeTLS(listener, configuration.Server.TLS.Certificate, configuration.Server.TLS.Key))
|
logger.Fatal(server.ServeTLS(listener, configuration.Server.TLS.Certificate, configuration.Server.TLS.Key))
|
||||||
} else {
|
} else {
|
||||||
logger.Infof("Authelia is listening for non-TLS connections on %s%s", addrPattern, configuration.Server.Path)
|
logger.Infof("Listening for non-TLS connections on %s%s", addrPattern, configuration.Server.Path)
|
||||||
logger.Fatal(server.Serve(listener))
|
logger.Fatal(server.Serve(listener))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -46,7 +46,7 @@ func waitUntilAutheliaBackendIsReady(dockerEnvironment *DockerEnvironment) error
|
||||||
90*time.Second,
|
90*time.Second,
|
||||||
dockerEnvironment,
|
dockerEnvironment,
|
||||||
"authelia-backend",
|
"authelia-backend",
|
||||||
[]string{"Authelia is listening for"})
|
[]string{"Listening for"})
|
||||||
}
|
}
|
||||||
|
|
||||||
func waitUntilAutheliaFrontendIsReady(dockerEnvironment *DockerEnvironment) error {
|
func waitUntilAutheliaFrontendIsReady(dockerEnvironment *DockerEnvironment) error {
|
||||||
|
|
|
@ -39,7 +39,7 @@ func (s *CLISuite) SetupTest() {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *CLISuite) TestShouldPrintBuildInformation() {
|
func (s *CLISuite) TestShouldPrintBuildInformation() {
|
||||||
output, err := s.Exec("authelia-backend", []string{"authelia", s.testArg, s.coverageArg, "build"})
|
output, err := s.Exec("authelia-backend", []string{"authelia", s.testArg, s.coverageArg, "build-info"})
|
||||||
s.Assert().Nil(err)
|
s.Assert().Nil(err)
|
||||||
s.Assert().Contains(output, "Last Tag: ")
|
s.Assert().Contains(output, "Last Tag: ")
|
||||||
s.Assert().Contains(output, "State: ")
|
s.Assert().Contains(output, "State: ")
|
||||||
|
@ -86,76 +86,76 @@ func (s *CLISuite) TestShouldHashPasswordSHA512() {
|
||||||
func (s *CLISuite) TestShouldGenerateCertificateRSA() {
|
func (s *CLISuite) TestShouldGenerateCertificateRSA() {
|
||||||
output, err := s.Exec("authelia-backend", []string{"authelia", s.testArg, s.coverageArg, "certificates", "generate", "--host", "*.example.com", "--dir", "/tmp/"})
|
output, err := s.Exec("authelia-backend", []string{"authelia", s.testArg, s.coverageArg, "certificates", "generate", "--host", "*.example.com", "--dir", "/tmp/"})
|
||||||
s.Assert().Nil(err)
|
s.Assert().Nil(err)
|
||||||
s.Assert().Contains(output, "wrote /tmp/cert.pem")
|
s.Assert().Contains(output, "Certificate Public Key written to /tmp/cert.pem")
|
||||||
s.Assert().Contains(output, "wrote /tmp/key.pem")
|
s.Assert().Contains(output, "Certificate Private Key written to /tmp/key.pem")
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *CLISuite) TestShouldGenerateCertificateRSAWithIPAddress() {
|
func (s *CLISuite) TestShouldGenerateCertificateRSAWithIPAddress() {
|
||||||
output, err := s.Exec("authelia-backend", []string{"authelia", s.testArg, s.coverageArg, "certificates", "generate", "--host", "127.0.0.1", "--dir", "/tmp/"})
|
output, err := s.Exec("authelia-backend", []string{"authelia", s.testArg, s.coverageArg, "certificates", "generate", "--host", "127.0.0.1", "--dir", "/tmp/"})
|
||||||
s.Assert().Nil(err)
|
s.Assert().Nil(err)
|
||||||
s.Assert().Contains(output, "wrote /tmp/cert.pem")
|
s.Assert().Contains(output, "Certificate Public Key written to /tmp/cert.pem")
|
||||||
s.Assert().Contains(output, "wrote /tmp/key.pem")
|
s.Assert().Contains(output, "Certificate Private Key written to /tmp/key.pem")
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *CLISuite) TestShouldGenerateCertificateRSAWithStartDate() {
|
func (s *CLISuite) TestShouldGenerateCertificateRSAWithStartDate() {
|
||||||
output, err := s.Exec("authelia-backend", []string{"authelia", s.testArg, s.coverageArg, "certificates", "generate", "--host", "*.example.com", "--dir", "/tmp/", "--start-date", "'Jan 1 15:04:05 2011'"})
|
output, err := s.Exec("authelia-backend", []string{"authelia", s.testArg, s.coverageArg, "certificates", "generate", "--host", "*.example.com", "--dir", "/tmp/", "--start-date", "'Jan 1 15:04:05 2011'"})
|
||||||
s.Assert().Nil(err)
|
s.Assert().Nil(err)
|
||||||
s.Assert().Contains(output, "wrote /tmp/cert.pem")
|
s.Assert().Contains(output, "Certificate Public Key written to /tmp/cert.pem")
|
||||||
s.Assert().Contains(output, "wrote /tmp/key.pem")
|
s.Assert().Contains(output, "Certificate Private Key written to /tmp/key.pem")
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *CLISuite) TestShouldFailGenerateCertificateRSAWithStartDate() {
|
func (s *CLISuite) TestShouldFailGenerateCertificateRSAWithStartDate() {
|
||||||
output, err := s.Exec("authelia-backend", []string{"authelia", s.testArg, s.coverageArg, "certificates", "generate", "--host", "*.example.com", "--dir", "/tmp/", "--start-date", "Jan"})
|
output, err := s.Exec("authelia-backend", []string{"authelia", s.testArg, s.coverageArg, "certificates", "generate", "--host", "*.example.com", "--dir", "/tmp/", "--start-date", "Jan"})
|
||||||
s.Assert().NotNil(err)
|
s.Assert().NotNil(err)
|
||||||
s.Assert().Contains(output, "Failed to parse creation date: parsing time \"Jan\" as \"Jan 2 15:04:05 2006\": cannot parse \"\" as \"2\"")
|
s.Assert().Contains(output, "Failed to parse start date: parsing time \"Jan\" as \"Jan 2 15:04:05 2006\": cannot parse \"\" as \"2\"")
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *CLISuite) TestShouldGenerateCertificateCA() {
|
func (s *CLISuite) TestShouldGenerateCertificateCA() {
|
||||||
output, err := s.Exec("authelia-backend", []string{"authelia", s.testArg, s.coverageArg, "certificates", "generate", "--host", "*.example.com", "--dir", "/tmp/", "--ca"})
|
output, err := s.Exec("authelia-backend", []string{"authelia", s.testArg, s.coverageArg, "certificates", "generate", "--host", "*.example.com", "--dir", "/tmp/", "--ca"})
|
||||||
s.Assert().Nil(err)
|
s.Assert().Nil(err)
|
||||||
s.Assert().Contains(output, "wrote /tmp/cert.pem")
|
s.Assert().Contains(output, "Certificate Public Key written to /tmp/cert.pem")
|
||||||
s.Assert().Contains(output, "wrote /tmp/key.pem")
|
s.Assert().Contains(output, "Certificate Private Key written to /tmp/key.pem")
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *CLISuite) TestShouldGenerateCertificateEd25519() {
|
func (s *CLISuite) TestShouldGenerateCertificateEd25519() {
|
||||||
output, err := s.Exec("authelia-backend", []string{"authelia", s.testArg, s.coverageArg, "certificates", "generate", "--host", "*.example.com", "--dir", "/tmp/", "--ed25519"})
|
output, err := s.Exec("authelia-backend", []string{"authelia", s.testArg, s.coverageArg, "certificates", "generate", "--host", "*.example.com", "--dir", "/tmp/", "--ed25519"})
|
||||||
s.Assert().Nil(err)
|
s.Assert().Nil(err)
|
||||||
s.Assert().Contains(output, "wrote /tmp/cert.pem")
|
s.Assert().Contains(output, "Certificate Public Key written to /tmp/cert.pem")
|
||||||
s.Assert().Contains(output, "wrote /tmp/key.pem")
|
s.Assert().Contains(output, "Certificate Private Key written to /tmp/key.pem")
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *CLISuite) TestShouldFailGenerateCertificateECDSA() {
|
func (s *CLISuite) TestShouldFailGenerateCertificateECDSA() {
|
||||||
output, err := s.Exec("authelia-backend", []string{"authelia", s.testArg, s.coverageArg, "certificates", "generate", "--host", "*.example.com", "--dir", "/tmp/", "--ecdsa-curve", "invalid"})
|
output, err := s.Exec("authelia-backend", []string{"authelia", s.testArg, s.coverageArg, "certificates", "generate", "--host", "*.example.com", "--dir", "/tmp/", "--ecdsa-curve", "invalid"})
|
||||||
s.Assert().NotNil(err)
|
s.Assert().NotNil(err)
|
||||||
s.Assert().Contains(output, "Unrecognized elliptic curve: \"invalid\"")
|
s.Assert().Contains(output, "Failed to generate private key: unrecognized elliptic curve: \"invalid\"")
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *CLISuite) TestShouldGenerateCertificateECDSAP224() {
|
func (s *CLISuite) TestShouldGenerateCertificateECDSAP224() {
|
||||||
output, err := s.Exec("authelia-backend", []string{"authelia", s.testArg, s.coverageArg, "certificates", "generate", "--host", "*.example.com", "--dir", "/tmp/", "--ecdsa-curve", "P224"})
|
output, err := s.Exec("authelia-backend", []string{"authelia", s.testArg, s.coverageArg, "certificates", "generate", "--host", "*.example.com", "--dir", "/tmp/", "--ecdsa-curve", "P224"})
|
||||||
s.Assert().Nil(err)
|
s.Assert().Nil(err)
|
||||||
s.Assert().Contains(output, "wrote /tmp/cert.pem")
|
s.Assert().Contains(output, "Certificate Public Key written to /tmp/cert.pem")
|
||||||
s.Assert().Contains(output, "wrote /tmp/key.pem")
|
s.Assert().Contains(output, "Certificate Private Key written to /tmp/key.pem")
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *CLISuite) TestShouldGenerateCertificateECDSAP256() {
|
func (s *CLISuite) TestShouldGenerateCertificateECDSAP256() {
|
||||||
output, err := s.Exec("authelia-backend", []string{"authelia", s.testArg, s.coverageArg, "certificates", "generate", "--host", "*.example.com", "--dir", "/tmp/", "--ecdsa-curve", "P256"})
|
output, err := s.Exec("authelia-backend", []string{"authelia", s.testArg, s.coverageArg, "certificates", "generate", "--host", "*.example.com", "--dir", "/tmp/", "--ecdsa-curve", "P256"})
|
||||||
s.Assert().Nil(err)
|
s.Assert().Nil(err)
|
||||||
s.Assert().Contains(output, "wrote /tmp/cert.pem")
|
s.Assert().Contains(output, "Certificate Public Key written to /tmp/cert.pem")
|
||||||
s.Assert().Contains(output, "wrote /tmp/key.pem")
|
s.Assert().Contains(output, "Certificate Private Key written to /tmp/key.pem")
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *CLISuite) TestShouldGenerateCertificateECDSAP384() {
|
func (s *CLISuite) TestShouldGenerateCertificateECDSAP384() {
|
||||||
output, err := s.Exec("authelia-backend", []string{"authelia", s.testArg, s.coverageArg, "certificates", "generate", "--host", "*.example.com", "--dir", "/tmp/", "--ecdsa-curve", "P384"})
|
output, err := s.Exec("authelia-backend", []string{"authelia", s.testArg, s.coverageArg, "certificates", "generate", "--host", "*.example.com", "--dir", "/tmp/", "--ecdsa-curve", "P384"})
|
||||||
s.Assert().Nil(err)
|
s.Assert().Nil(err)
|
||||||
s.Assert().Contains(output, "wrote /tmp/cert.pem")
|
s.Assert().Contains(output, "Certificate Public Key written to /tmp/cert.pem")
|
||||||
s.Assert().Contains(output, "wrote /tmp/key.pem")
|
s.Assert().Contains(output, "Certificate Private Key written to /tmp/key.pem")
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *CLISuite) TestShouldGenerateCertificateECDSAP521() {
|
func (s *CLISuite) TestShouldGenerateCertificateECDSAP521() {
|
||||||
output, err := s.Exec("authelia-backend", []string{"authelia", s.testArg, s.coverageArg, "certificates", "generate", "--host", "*.example.com", "--dir", "/tmp/", "--ecdsa-curve", "P521"})
|
output, err := s.Exec("authelia-backend", []string{"authelia", s.testArg, s.coverageArg, "certificates", "generate", "--host", "*.example.com", "--dir", "/tmp/", "--ecdsa-curve", "P521"})
|
||||||
s.Assert().Nil(err)
|
s.Assert().Nil(err)
|
||||||
s.Assert().Contains(output, "wrote /tmp/cert.pem")
|
s.Assert().Contains(output, "Certificate Public Key written to /tmp/cert.pem")
|
||||||
s.Assert().Contains(output, "wrote /tmp/key.pem")
|
s.Assert().Contains(output, "Certificate Private Key written to /tmp/key.pem")
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestCLISuite(t *testing.T) {
|
func TestCLISuite(t *testing.T) {
|
||||||
|
|
|
@ -5,7 +5,7 @@ import (
|
||||||
"crypto/x509"
|
"crypto/x509"
|
||||||
"fmt"
|
"fmt"
|
||||||
"io/ioutil"
|
"io/ioutil"
|
||||||
"path"
|
"path/filepath"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"github.com/authelia/authelia/internal/configuration/schema"
|
"github.com/authelia/authelia/internal/configuration/schema"
|
||||||
|
@ -28,10 +28,10 @@ func NewTLSConfig(config *schema.TLSConfig, defaultMinVersion uint16, certPool *
|
||||||
}
|
}
|
||||||
|
|
||||||
// NewX509CertPool generates a x509.CertPool from the system PKI and the directory specified.
|
// NewX509CertPool generates a x509.CertPool from the system PKI and the directory specified.
|
||||||
func NewX509CertPool(directory string) (certPool *x509.CertPool, errors []error, nonFatalErrors []error) {
|
func NewX509CertPool(directory string) (certPool *x509.CertPool, warnings []error, errors []error) {
|
||||||
certPool, err := x509.SystemCertPool()
|
certPool, err := x509.SystemCertPool()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
nonFatalErrors = append(nonFatalErrors, fmt.Errorf("could not load system certificate pool which may result in untrusted certificate issues: %v", err))
|
warnings = append(warnings, fmt.Errorf("could not load system certificate pool which may result in untrusted certificate issues: %v", err))
|
||||||
certPool = x509.NewCertPool()
|
certPool = x509.NewCertPool()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -48,7 +48,7 @@ func NewX509CertPool(directory string) (certPool *x509.CertPool, errors []error,
|
||||||
nameLower := strings.ToLower(certFileInfo.Name())
|
nameLower := strings.ToLower(certFileInfo.Name())
|
||||||
|
|
||||||
if !certFileInfo.IsDir() && (strings.HasSuffix(nameLower, ".cer") || strings.HasSuffix(nameLower, ".crt") || strings.HasSuffix(nameLower, ".pem")) {
|
if !certFileInfo.IsDir() && (strings.HasSuffix(nameLower, ".cer") || strings.HasSuffix(nameLower, ".crt") || strings.HasSuffix(nameLower, ".pem")) {
|
||||||
certPath := path.Join(directory, certFileInfo.Name())
|
certPath := filepath.Join(directory, certFileInfo.Name())
|
||||||
|
|
||||||
logger.Tracef("Found possible cert %s, attempting to add it to the pool", certPath)
|
logger.Tracef("Found possible cert %s, attempting to add it to the pool", certPath)
|
||||||
|
|
||||||
|
@ -65,7 +65,7 @@ func NewX509CertPool(directory string) (certPool *x509.CertPool, errors []error,
|
||||||
|
|
||||||
logger.Tracef("Finished scan of directory %s for certificates", directory)
|
logger.Tracef("Finished scan of directory %s for certificates", directory)
|
||||||
|
|
||||||
return certPool, errors, nonFatalErrors
|
return certPool, warnings, errors
|
||||||
}
|
}
|
||||||
|
|
||||||
// TLSStringToTLSConfigVersion returns a go crypto/tls version for a tls.Config based on string input.
|
// TLSStringToTLSConfigVersion returns a go crypto/tls version for a tls.Config based on string input.
|
||||||
|
|
|
@ -77,50 +77,50 @@ func TestShouldReturnZeroAndErrorOnInvalidTLSVersions(t *testing.T) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestShouldReturnErrWhenX509DirectoryNotExist(t *testing.T) {
|
func TestShouldReturnErrWhenX509DirectoryNotExist(t *testing.T) {
|
||||||
pool, errs, nonFatalErrs := NewX509CertPool("/tmp/asdfzyxabc123/not/a/real/dir")
|
pool, warnings, errors := NewX509CertPool("/tmp/asdfzyxabc123/not/a/real/dir")
|
||||||
assert.NotNil(t, pool)
|
assert.NotNil(t, pool)
|
||||||
|
|
||||||
if runtime.GOOS == windows {
|
if runtime.GOOS == windows {
|
||||||
require.Len(t, nonFatalErrs, 1)
|
require.Len(t, warnings, 1)
|
||||||
assert.EqualError(t, nonFatalErrs[0], "could not load system certificate pool which may result in untrusted certificate issues: crypto/x509: system root pool is not available on Windows")
|
assert.EqualError(t, warnings[0], "could not load system certificate pool which may result in untrusted certificate issues: crypto/x509: system root pool is not available on Windows")
|
||||||
} else {
|
} else {
|
||||||
assert.Len(t, nonFatalErrs, 0)
|
assert.Len(t, warnings, 0)
|
||||||
}
|
}
|
||||||
|
|
||||||
require.Len(t, errs, 1)
|
require.Len(t, errors, 1)
|
||||||
|
|
||||||
if runtime.GOOS == windows {
|
if runtime.GOOS == windows {
|
||||||
assert.EqualError(t, errs[0], "could not read certificates from directory open /tmp/asdfzyxabc123/not/a/real/dir: The system cannot find the path specified.")
|
assert.EqualError(t, errors[0], "could not read certificates from directory open /tmp/asdfzyxabc123/not/a/real/dir: The system cannot find the path specified.")
|
||||||
} else {
|
} else {
|
||||||
assert.EqualError(t, errs[0], "could not read certificates from directory open /tmp/asdfzyxabc123/not/a/real/dir: no such file or directory")
|
assert.EqualError(t, errors[0], "could not read certificates from directory open /tmp/asdfzyxabc123/not/a/real/dir: no such file or directory")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestShouldNotReturnErrWhenX509DirectoryExist(t *testing.T) {
|
func TestShouldNotReturnErrWhenX509DirectoryExist(t *testing.T) {
|
||||||
pool, errs, nonFatalErrs := NewX509CertPool("/tmp")
|
pool, warnings, errors := NewX509CertPool("/tmp")
|
||||||
assert.NotNil(t, pool)
|
assert.NotNil(t, pool)
|
||||||
|
|
||||||
if runtime.GOOS == windows {
|
if runtime.GOOS == windows {
|
||||||
require.Len(t, nonFatalErrs, 1)
|
require.Len(t, warnings, 1)
|
||||||
assert.EqualError(t, nonFatalErrs[0], "could not load system certificate pool which may result in untrusted certificate issues: crypto/x509: system root pool is not available on Windows")
|
assert.EqualError(t, warnings[0], "could not load system certificate pool which may result in untrusted certificate issues: crypto/x509: system root pool is not available on Windows")
|
||||||
} else {
|
} else {
|
||||||
assert.Len(t, nonFatalErrs, 0)
|
assert.Len(t, warnings, 0)
|
||||||
}
|
}
|
||||||
|
|
||||||
assert.Len(t, errs, 0)
|
assert.Len(t, errors, 0)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestShouldReadCertsFromDirectoryButNotKeys(t *testing.T) {
|
func TestShouldReadCertsFromDirectoryButNotKeys(t *testing.T) {
|
||||||
pool, errs, nonFatalErrs := NewX509CertPool("../suites/common/ssl/")
|
pool, warnings, errors := NewX509CertPool("../suites/common/ssl/")
|
||||||
assert.NotNil(t, pool)
|
assert.NotNil(t, pool)
|
||||||
require.Len(t, errs, 1)
|
require.Len(t, errors, 1)
|
||||||
|
|
||||||
if runtime.GOOS == "windows" {
|
if runtime.GOOS == "windows" {
|
||||||
require.Len(t, nonFatalErrs, 1)
|
require.Len(t, warnings, 1)
|
||||||
assert.EqualError(t, nonFatalErrs[0], "could not load system certificate pool which may result in untrusted certificate issues: crypto/x509: system root pool is not available on Windows")
|
assert.EqualError(t, warnings[0], "could not load system certificate pool which may result in untrusted certificate issues: crypto/x509: system root pool is not available on Windows")
|
||||||
} else {
|
} else {
|
||||||
assert.Len(t, nonFatalErrs, 0)
|
assert.Len(t, warnings, 0)
|
||||||
}
|
}
|
||||||
|
|
||||||
assert.EqualError(t, errs[0], "could not import certificate key.pem")
|
assert.EqualError(t, errors[0], "could not import certificate key.pem")
|
||||||
}
|
}
|
||||||
|
|
|
@ -44,6 +44,8 @@ const (
|
||||||
clean = "clean"
|
clean = "clean"
|
||||||
tagged = "tagged"
|
tagged = "tagged"
|
||||||
unknown = "unknown"
|
unknown = "unknown"
|
||||||
|
|
||||||
|
errFmtLinuxNotFound = "open %s: no such file or directory"
|
||||||
)
|
)
|
||||||
|
|
||||||
var (
|
var (
|
||||||
|
|
42
internal/utils/errs.go
Normal file
42
internal/utils/errs.go
Normal file
|
@ -0,0 +1,42 @@
|
||||||
|
package utils
|
||||||
|
|
||||||
|
import "runtime"
|
||||||
|
|
||||||
|
// ErrSliceSortAlphabetical is a helper type that can be used with sort.Sort to sort a slice of errors in alphabetical
|
||||||
|
// order. Usage is simple just do sort.Sort(ErrSliceSortAlphabetical([]error{})).
|
||||||
|
type ErrSliceSortAlphabetical []error
|
||||||
|
|
||||||
|
func (s ErrSliceSortAlphabetical) Len() int { return len(s) }
|
||||||
|
|
||||||
|
func (s ErrSliceSortAlphabetical) Less(i, j int) bool { return s[i].Error() < s[j].Error() }
|
||||||
|
|
||||||
|
func (s ErrSliceSortAlphabetical) Swap(i, j int) { s[i], s[j] = s[j], s[i] }
|
||||||
|
|
||||||
|
// GetExpectedErrTxt returns error text for expected errs.
|
||||||
|
func GetExpectedErrTxt(err string) string {
|
||||||
|
switch err {
|
||||||
|
case "pathnotfound":
|
||||||
|
switch runtime.GOOS {
|
||||||
|
case windows:
|
||||||
|
return "open %s: The system cannot find the path specified."
|
||||||
|
default:
|
||||||
|
return errFmtLinuxNotFound
|
||||||
|
}
|
||||||
|
case "filenotfound":
|
||||||
|
switch runtime.GOOS {
|
||||||
|
case windows:
|
||||||
|
return "open %s: The system cannot find the file specified."
|
||||||
|
default:
|
||||||
|
return errFmtLinuxNotFound
|
||||||
|
}
|
||||||
|
case "yamlisdir":
|
||||||
|
switch runtime.GOOS {
|
||||||
|
case windows:
|
||||||
|
return "read %s: The handle is invalid."
|
||||||
|
default:
|
||||||
|
return "read %s: is a directory"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return ""
|
||||||
|
}
|
|
@ -2,12 +2,34 @@ package utils
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"crypto/sha256"
|
"crypto/sha256"
|
||||||
"fmt"
|
"encoding/hex"
|
||||||
|
"io"
|
||||||
|
"os"
|
||||||
)
|
)
|
||||||
|
|
||||||
// HashSHA256FromString takes an input string and calculates the SHA256 checksum returning it as a base16 hash string.
|
// HashSHA256FromString takes an input string and calculates the SHA256 checksum returning it as a base16 hash string.
|
||||||
func HashSHA256FromString(input string) (output string) {
|
func HashSHA256FromString(input string) (output string) {
|
||||||
sum := sha256.Sum256([]byte(input))
|
hash := sha256.New()
|
||||||
|
|
||||||
return fmt.Sprintf("%x", sum)
|
hash.Write([]byte(input))
|
||||||
|
|
||||||
|
return hex.EncodeToString(hash.Sum(nil))
|
||||||
|
}
|
||||||
|
|
||||||
|
// HashSHA256FromPath takes a path string and calculates the SHA256 checksum of the file at the path returning it as a base16 hash string.
|
||||||
|
func HashSHA256FromPath(path string) (output string, err error) {
|
||||||
|
file, err := os.Open(path)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
|
||||||
|
defer file.Close()
|
||||||
|
|
||||||
|
hash := sha256.New()
|
||||||
|
|
||||||
|
if _, err := io.Copy(hash, file); err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
|
||||||
|
return hex.EncodeToString(hash.Sum(nil)), nil
|
||||||
}
|
}
|
||||||
|
|
67
internal/utils/hashing_test.go
Normal file
67
internal/utils/hashing_test.go
Normal file
|
@ -0,0 +1,67 @@
|
||||||
|
package utils
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"io/ioutil"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestShouldHashString(t *testing.T) {
|
||||||
|
input := "input"
|
||||||
|
anotherInput := "another"
|
||||||
|
|
||||||
|
sum := HashSHA256FromString(input)
|
||||||
|
|
||||||
|
assert.Equal(t, "c96c6d5be8d08a12e7b5cdc1b207fa6b2430974c86803d8891675e76fd992c20", sum)
|
||||||
|
|
||||||
|
anotherSum := HashSHA256FromString(anotherInput)
|
||||||
|
|
||||||
|
assert.Equal(t, "ae448ac86c4e8e4dec645729708ef41873ae79c6dff84eff73360989487f08e5", anotherSum)
|
||||||
|
assert.NotEqual(t, sum, anotherSum)
|
||||||
|
|
||||||
|
randomInput := RandomString(40, AlphaNumericCharacters)
|
||||||
|
randomSum := HashSHA256FromString(randomInput)
|
||||||
|
|
||||||
|
assert.NotEqual(t, randomSum, sum)
|
||||||
|
assert.NotEqual(t, randomSum, anotherSum)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestShouldHashPath(t *testing.T) {
|
||||||
|
dir, err := ioutil.TempDir("", "authelia-hashing")
|
||||||
|
assert.NoError(t, err)
|
||||||
|
|
||||||
|
err = os.WriteFile(filepath.Join(dir, "myfile"), []byte("output\n"), 0600)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
|
||||||
|
err = os.WriteFile(filepath.Join(dir, "anotherfile"), []byte("another\n"), 0600)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
|
||||||
|
err = os.WriteFile(filepath.Join(dir, "randomfile"), []byte(RandomString(40, AlphaNumericCharacters)+"\n"), 0600)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
|
||||||
|
sum, err := HashSHA256FromPath(filepath.Join(dir, "myfile"))
|
||||||
|
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.Equal(t, "9aff6ba4b042b9d09991a9fbf8c80ddbd2a9c433638339cd831bed955e39f106", sum)
|
||||||
|
|
||||||
|
anotherSum, err := HashSHA256FromPath(filepath.Join(dir, "anotherfile"))
|
||||||
|
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.Equal(t, "33a7b215065f2ee8635efb72620bc269a1efb889ba3026560334da7366742374", anotherSum)
|
||||||
|
|
||||||
|
randomSum, err := HashSHA256FromPath(filepath.Join(dir, "randomfile"))
|
||||||
|
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.NotEqual(t, randomSum, sum)
|
||||||
|
assert.NotEqual(t, randomSum, anotherSum)
|
||||||
|
|
||||||
|
sum, err = HashSHA256FromPath(filepath.Join(dir, "notafile"))
|
||||||
|
assert.Equal(t, "", sum)
|
||||||
|
|
||||||
|
errTxt := GetExpectedErrTxt("filenotfound")
|
||||||
|
assert.EqualError(t, err, fmt.Sprintf(errTxt, filepath.Join(dir, "notafile")))
|
||||||
|
}
|
|
@ -46,6 +46,17 @@ func IsStringInSlice(needle string, haystack []string) (inSlice bool) {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// IsStringInSliceSuffix checks if the needle string has one of the suffixes in the haystack.
|
||||||
|
func IsStringInSliceSuffix(needle string, haystack []string) (hasSuffix bool) {
|
||||||
|
for _, straw := range haystack {
|
||||||
|
if strings.HasSuffix(needle, straw) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
// IsStringInSliceFold checks if a single string is in a slice of strings but uses strings.EqualFold to compare them.
|
// IsStringInSliceFold checks if a single string is in a slice of strings but uses strings.EqualFold to compare them.
|
||||||
func IsStringInSliceFold(needle string, haystack []string) (inSlice bool) {
|
func IsStringInSliceFold(needle string, haystack []string) (inSlice bool) {
|
||||||
for _, b := range haystack {
|
for _, b := range haystack {
|
||||||
|
|
|
@ -131,3 +131,14 @@ func TestShouldNotFindStringInSliceFold(t *testing.T) {
|
||||||
assert.False(t, IsStringInSliceFold(a, slice))
|
assert.False(t, IsStringInSliceFold(a, slice))
|
||||||
assert.False(t, IsStringInSliceFold(b, slice))
|
assert.False(t, IsStringInSliceFold(b, slice))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestIsStringInSliceSuffix(t *testing.T) {
|
||||||
|
suffixes := []string{"apple", "banana"}
|
||||||
|
|
||||||
|
assert.True(t, IsStringInSliceSuffix("apple.banana", suffixes))
|
||||||
|
assert.True(t, IsStringInSliceSuffix("a.banana", suffixes))
|
||||||
|
assert.True(t, IsStringInSliceSuffix("a_banana", suffixes))
|
||||||
|
assert.True(t, IsStringInSliceSuffix("an.apple", suffixes))
|
||||||
|
assert.False(t, IsStringInSliceSuffix("an.orange", suffixes))
|
||||||
|
assert.False(t, IsStringInSliceSuffix("an.apple.orange", suffixes))
|
||||||
|
}
|
||||||
|
|
Loading…
Reference in New Issue
Block a user