Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

First pass at enabling s3 path style config #317

Draft
wants to merge 2 commits into
base: master
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,2 +1,4 @@
.idea/
dist/
tmp/
vendor/
14 changes: 10 additions & 4 deletions cmd/root.go
Original file line number Diff line number Diff line change
Expand Up @@ -5,15 +5,16 @@ import (
"os"
"strings"

log "github.com/sirupsen/logrus"
"github.com/spf13/cobra"
"github.com/spf13/pflag"
"github.com/spf13/viper"

"github.com/databacker/mysql-backup/pkg/config"
"github.com/databacker/mysql-backup/pkg/core"
"github.com/databacker/mysql-backup/pkg/database"
databacklog "github.com/databacker/mysql-backup/pkg/log"
"github.com/databacker/mysql-backup/pkg/storage/credentials"
log "github.com/sirupsen/logrus"
"github.com/spf13/cobra"
"github.com/spf13/pflag"
"github.com/spf13/viper"
)

type execs interface {
Expand Down Expand Up @@ -56,6 +57,9 @@ func rootCmd(execs execs) (*cobra.Command, error) {
AWS_ACCESS_KEY_ID: AWS Key ID
AWS_SECRET_ACCESS_KEY: AWS Secret Access Key
AWS_REGION: Region in which the bucket resides

It also supports one non-standard option:
AWS_S3_USE_PATH_STYLE: false
`,
PersistentPreRunE: func(c *cobra.Command, args []string) error {
bindFlags(cmd, v)
Expand Down Expand Up @@ -145,6 +149,7 @@ func rootCmd(execs execs) (*cobra.Command, error) {
AccessKeyID: v.GetString("aws-access-key-id"),
SecretAccessKey: v.GetString("aws-secret-access-key"),
Region: v.GetString("aws-region"),
S3UsePathStyle: v.GetBool("aws-s3-use-path-style"),
},
SMB: credentials.SMBCreds{
Username: v.GetString("smb-user"),
Expand Down Expand Up @@ -185,6 +190,7 @@ func rootCmd(execs execs) (*cobra.Command, error) {
pflags.String("aws-access-key-id", "", "Access Key for s3 and s3 interoperable systems; ignored if not using s3.")
pflags.String("aws-secret-access-key", "", "Secret Access Key for s3 and s3 interoperable systems; ignored if not using s3.")
pflags.String("aws-region", "", "Region for s3 and s3 interoperable systems; ignored if not using s3.")
pflags.Bool("aws-s3-use-path-style", false, "Force the use of legacy path-style bucket routing; ignored if not using s3.")

// smb options
pflags.String("smb-user", "", "SMB username")
Expand Down
67 changes: 34 additions & 33 deletions docs/configuration.md
Original file line number Diff line number Diff line change
Expand Up @@ -61,39 +61,40 @@ Various sample configuration files are available in the [sample-configs](../samp

The following are the environment variables, CLI flags and configuration file options for: backup(B), restore (R), prune (P).

| Purpose | Backup / Restore / Prune | CLI Flag | Env Var | Config Key | Default |
| --- | --- | --- | --- | --- | --- |
| config file path | BRP | `config` | `DB_DUMP_CONFIG` | | |
| hostname or unix domain socket path (starting with a slash) to connect to database. Required. | BR | `server` | `DB_SERVER` | `database.server` | |
| port to use to connect to database. Optional. | BR | `port` | `DB_PORT` | `database.port` | 3306 |
| username for the database | BR | `user` | `DB_USER` | `database.credentials.username` | |
| password for the database | BR | `pass` | `DB_PASS` | `database.credentials.password` | |
| names of databases to dump, comma-separated | B | `include` | `DB_NAMES` | `dump.include` | all databases in the server |
| names of databases to exclude from the dump | B | `exclude` | `DB_NAMES_EXCLUDE` | `dump.exclude` | |
| do not include `USE <database>;` statement in the dump | B | `no-database-name` | `NO_DATABASE_NAME` | `dump.noDatabaseName` | `false` |
| restore to a specific database | R | `restore --database` | `RESTORE_DATABASE` | `restore.database` | |
| how often to do a dump or prune, in minutes | BP | `dump --frequency` | `DB_DUMP_FREQ` | `dump.schedule.frequency` | `1440` (in minutes), i.e. once per day |
| what time to do the first dump or prune | BP | `dump --begin` | `DB_DUMP_BEGIN` | `dump.schedule.begin` | `0`, i.e. immediately |
| cron schedule for dumps or prunes | BP | `dump --cron` | `DB_DUMP_CRON` | `dump.schedule.cron` | |
| run the backup or prune a single time and exit | BP | `dump --once` | `RUN_ONCE` | `dump.schedule.once` | `false` |
| enable debug logging | BRP | `debug` | `DEBUG` | `logging` | `false` |
| where to put the dump file; see [backup](./backup.md) | BP | `dump --target` | `DB_DUMP_TARGET` | `dump.targets` | |
| where the restore file exists; see [restore](./restore.md) | R | `restore --target` | `DB_RESTORE_TARGET` | `restore.target` | |
| replace any `:` in the dump filename with `-` | BP | `dump --safechars` | `DB_DUMP_SAFECHARS` | `database.safechars` | `false` |
| AWS access key ID, used only if a target does not have one | BRP | `aws-access-key-id` | `AWS_ACCESS_KEY_ID` | `dump.targets[s3-target].accessKeyId` | |
| AWS secret access key, used only if a target does not have one | BRP | `aws-secret-access-key` | `AWS_SECRET_ACCESS_KEY` | `dump.targets[s3-target].secretAccessKey` | |
| AWS default region, used only if a target does not have one | BRP | `aws-region` | `AWS_REGION` | `dump.targets[s3-target].region` | |
| alternative endpoint URL for S3-interoperable systems, used only if a target does not have one | BR | `aws-endpoint-url` | `AWS_ENDPOINT_URL` | `dump.targets[s3-target].endpoint` | |
| SMB username, used only if a target does not have one | BRP | `smb-user` | `SMB_USER` | `dump.targets[smb-target].username` | |
| SMB password, used only if a target does not have one | BRP | `smb-pass` | `SMB_PASS` | `dump.targets[smb-target].password` | |
| compression to use, one of: `bzip2`, `gzip` | BP | `compression` | `DB_DUMP_COMPRESSION` | `dump.compression` | `gzip` |
| when in container, run the dump or restore with `nice`/`ionice` | BR | `` | `NICE` | `` | `false` |
| filename to save the target backup file | B | `dump --filename-pattern` | `DB_DUMP_FILENAME_PATTERN` | `dump.filenamePattern` | |
| directory with scripts to execute before backup | B | `dump --pre-backup-scripts` | `DB_DUMP_PRE_BACKUP_SCRIPTS` | `dump.scripts.preBackup` | in container, `/scripts.d/pre-backup/` |
| directory with scripts to execute after backup | B | `dump --post-backup-scripts` | `DB_DUMP_POST_BACKUP_SCRIPTS` | `dump.scripts.postBackup` | in container, `/scripts.d/post-backup/` |
| directory with scripts to execute before restore | R | `restore --pre-restore-scripts` | `DB_DUMP_PRE_RESTORE_SCRIPTS` | `restore.scripts.preRestore` | in container, `/scripts.d/pre-restore/` |
| directory with scripts to execute after restore | R | `restore --post-restore-scripts` | `DB_DUMP_POST_RESTORE_SCRIPTS` | `restore.scripts.postRestore` | in container, `/scripts.d/post-restore/` |
| retention policy for backups | BP | `dump --retention` | `RETENTION` | `prune.retention` | Infinite |
| Purpose | Backup / Restore / Prune | CLI Flag | Env Var | Config Key | Default |
|------------------------------------------------------------------------------------------------|--------------------------|----------------------------------|--------------------------------|-------------------------------------------|------------------------------------------|
| config file path | BRP | `config` | `DB_DUMP_CONFIG` | | |
| hostname or unix domain socket path (starting with a slash) to connect to database. Required. | BR | `server` | `DB_SERVER` | `database.server` | |
| port to use to connect to database. Optional. | BR | `port` | `DB_PORT` | `database.port` | 3306 |
| username for the database | BR | `user` | `DB_USER` | `database.credentials.username` | |
| password for the database | BR | `pass` | `DB_PASS` | `database.credentials.password` | |
| names of databases to dump, comma-separated | B | `include` | `DB_NAMES` | `dump.include` | all databases in the server |
| names of databases to exclude from the dump | B | `exclude` | `DB_NAMES_EXCLUDE` | `dump.exclude` | |
| do not include `USE <database>;` statement in the dump | B | `no-database-name` | `NO_DATABASE_NAME` | `dump.noDatabaseName` | `false` |
| restore to a specific database | R | `restore --database` | `RESTORE_DATABASE` | `restore.database` | |
| how often to do a dump or prune, in minutes | BP | `dump --frequency` | `DB_DUMP_FREQ` | `dump.schedule.frequency` | `1440` (in minutes), i.e. once per day |
| what time to do the first dump or prune | BP | `dump --begin` | `DB_DUMP_BEGIN` | `dump.schedule.begin` | `0`, i.e. immediately |
| cron schedule for dumps or prunes | BP | `dump --cron` | `DB_DUMP_CRON` | `dump.schedule.cron` | |
| run the backup or prune a single time and exit | BP | `dump --once` | `RUN_ONCE` | `dump.schedule.once` | `false` |
| enable debug logging | BRP | `debug` | `DEBUG` | `logging` | `false` |
| where to put the dump file; see [backup](./backup.md) | BP | `dump --target` | `DB_DUMP_TARGET` | `dump.targets` | |
| where the restore file exists; see [restore](./restore.md) | R | `restore --target` | `DB_RESTORE_TARGET` | `restore.target` | |
| replace any `:` in the dump filename with `-` | BP | `dump --safechars` | `DB_DUMP_SAFECHARS` | `database.safechars` | `false` |
| AWS access key ID, used only if a target does not have one | BRP | `aws-access-key-id` | `AWS_ACCESS_KEY_ID` | `dump.targets[s3-target].accessKeyId` | |
| AWS secret access key, used only if a target does not have one | BRP | `aws-secret-access-key` | `AWS_SECRET_ACCESS_KEY` | `dump.targets[s3-target].secretAccessKey` | |
| AWS default region, used only if a target does not have one | BRP | `aws-region` | `AWS_REGION` | `dump.targets[s3-target].region` | |
| Use legacy path style s3 bucket routing | BRP | `aws-s3-use-path-style` | `AWS_S3_USE_PATH_STYLE` | `dump.targets[s3-target].usePathStyle` | `false` |
| alternative endpoint URL for S3-interoperable systems, used only if a target does not have one | BR | `aws-endpoint-url` | `AWS_ENDPOINT_URL` | `dump.targets[s3-target].endpoint` | |
| SMB username, used only if a target does not have one | BRP | `smb-user` | `SMB_USER` | `dump.targets[smb-target].username` | |
| SMB password, used only if a target does not have one | BRP | `smb-pass` | `SMB_PASS` | `dump.targets[smb-target].password` | |
| compression to use, one of: `bzip2`, `gzip` | BP | `compression` | `DB_DUMP_COMPRESSION` | `dump.compression` | `gzip` |
| when in container, run the dump or restore with `nice`/`ionice` | BR | `` | `NICE` | `` | `false` |
| filename to save the target backup file | B | `dump --filename-pattern` | `DB_DUMP_FILENAME_PATTERN` | `dump.filenamePattern` | |
| directory with scripts to execute before backup | B | `dump --pre-backup-scripts` | `DB_DUMP_PRE_BACKUP_SCRIPTS` | `dump.scripts.preBackup` | in container, `/scripts.d/pre-backup/` |
| directory with scripts to execute after backup | B | `dump --post-backup-scripts` | `DB_DUMP_POST_BACKUP_SCRIPTS` | `dump.scripts.postBackup` | in container, `/scripts.d/post-backup/` |
| directory with scripts to execute before restore | R | `restore --pre-restore-scripts` | `DB_DUMP_PRE_RESTORE_SCRIPTS` | `restore.scripts.preRestore` | in container, `/scripts.d/pre-restore/` |
| directory with scripts to execute after restore | R | `restore --post-restore-scripts` | `DB_DUMP_POST_RESTORE_SCRIPTS` | `restore.scripts.postRestore` | in container, `/scripts.d/post-restore/` |
| retention policy for backups | BP | `dump --retention` | `RETENTION` | `prune.retention` | Infinite |

## Configuration File

Expand Down
17 changes: 11 additions & 6 deletions pkg/config/local.go
Original file line number Diff line number Diff line change
Expand Up @@ -3,13 +3,14 @@ package config
import (
"fmt"

"gopkg.in/yaml.v3"

"github.com/databacker/mysql-backup/pkg/remote"
"github.com/databacker/mysql-backup/pkg/storage"
"github.com/databacker/mysql-backup/pkg/storage/credentials"
"github.com/databacker/mysql-backup/pkg/storage/s3"
"github.com/databacker/mysql-backup/pkg/storage/smb"
"github.com/databacker/mysql-backup/pkg/util"
"gopkg.in/yaml.v3"
)

type ConfigSpec struct {
Expand Down Expand Up @@ -129,11 +130,12 @@ func (t *Target) UnmarshalYAML(n *yaml.Node) error {
}

type S3Target struct {
Type string `yaml:"type"`
URL string `yaml:"url"`
Region string `yaml:"region"`
Endpoint string `yaml:"endpoint"`
Credentials AWSCredentials `yaml:"credentials"`
Type string `yaml:"type"`
URL string `yaml:"url"`
Region string `yaml:"region"`
Endpoint string `yaml:"endpoint"`
Credentials AWSCredentials `yaml:"credentials"`
UsePathStyle bool `yaml:"usePathStyle"`
}

func (s S3Target) Storage() (storage.Storage, error) {
Expand All @@ -148,6 +150,9 @@ func (s S3Target) Storage() (storage.Storage, error) {
if s.Endpoint != "" {
opts = append(opts, s3.WithEndpoint(s.Endpoint))
}
if s.UsePathStyle {
opts = append(opts, s3.WithPathStyle())
}
if s.Credentials.AccessKeyId != "" {
opts = append(opts, s3.WithAccessKeyId(s.Credentials.AccessKeyId))
}
Expand Down
1 change: 1 addition & 0 deletions pkg/storage/credentials/creds.go
Original file line number Diff line number Diff line change
Expand Up @@ -16,4 +16,5 @@ type AWSCreds struct {
SecretAccessKey string
Endpoint string
Region string
S3UsePathStyle bool
}
3 changes: 3 additions & 0 deletions pkg/storage/parse.go
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,9 @@ func ParseURL(url string, creds credentials.Creds) (Storage, error) {
if creds.AWS.SecretAccessKey != "" {
opts = append(opts, s3.WithSecretAccessKey(creds.AWS.SecretAccessKey))
}
if creds.AWS.S3UsePathStyle {
opts = append(opts, s3.WithPathStyle())
}
store = s3.New(*u, opts...)
default:
return nil, fmt.Errorf("unknown url protocol: %s", u.Scheme)
Expand Down
23 changes: 16 additions & 7 deletions pkg/storage/s3/s3.go
Original file line number Diff line number Diff line change
Expand Up @@ -179,10 +179,13 @@ func (s *S3) Remove(target string, logger *log.Entry) error {

func (s *S3) getClient(logger *log.Entry) (*s3.Client, error) {
// Get the AWS config
var opts []func(*config.LoadOptions) error
var (
cfgOpts []func(*config.LoadOptions) error
clientOpts []func(*s3.Options)
)
if s.endpoint != "" {
cleanEndpoint := getEndpoint(s.endpoint)
opts = append(opts,
cfgOpts = append(cfgOpts,
config.WithEndpointResolverWithOptions(
aws.EndpointResolverWithOptionsFunc(func(service, region string, options ...interface{}) (aws.Endpoint, error) {
return aws.Endpoint{URL: cleanEndpoint}, nil
Expand All @@ -191,27 +194,33 @@ func (s *S3) getClient(logger *log.Entry) (*s3.Client, error) {
)
}
if logger.Level == log.TraceLevel {
opts = append(opts, config.WithClientLogMode(aws.LogRequestWithBody|aws.LogResponse))
cfgOpts = append(cfgOpts, config.WithClientLogMode(aws.LogRequestWithBody|aws.LogResponse))
}
if s.region != "" {
opts = append(opts, config.WithRegion(s.region))
cfgOpts = append(cfgOpts, config.WithRegion(s.region))
}
if s.accessKeyId != "" {
opts = append(opts, config.WithCredentialsProvider(credentials.NewStaticCredentialsProvider(
cfgOpts = append(cfgOpts, config.WithCredentialsProvider(credentials.NewStaticCredentialsProvider(
s.accessKeyId,
s.secretAccessKey,
"",
)))
}
cfg, err := config.LoadDefaultConfig(context.TODO(),
opts...,
cfgOpts...,
)
if err != nil {
return nil, fmt.Errorf("failed to load AWS config: %v", err)
}

// build client options list with path style config
clientOpts = append(clientOpts, func(opts *s3.Options) {
opts.UsePathStyle = s.pathStyle
})

// Create a new S3 service client
return s3.NewFromConfig(cfg), nil

return s3.NewFromConfig(cfg, clientOpts...), nil
}

// getEndpoint returns a clean (for AWS client) endpoint. Normally, this is unchanged,
Expand Down
2 changes: 1 addition & 1 deletion pkg/util/parse.go
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ import (
"strings"
)

// smartParse parse a url, but convert "/" into "file:///"
// SmartParse parse a url, but convert "/" into "file:///"
func SmartParse(raw string) (*url.URL, error) {
if strings.HasPrefix(raw, "/") {
raw = "file://" + raw
Expand Down
Loading