From ebbcdfb3ff030898e7bb1dff34c509fcc31605d9 Mon Sep 17 00:00:00 2001 From: Dennis Urban Date: Sun, 18 Aug 2024 11:29:55 +0200 Subject: [PATCH] Implement Backblaze B2 as remote provider Signed-off-by: Dennis Urban (github@dennisurban.de) --- README.md | 2 +- config.go | 78 ++++---- config_test.go | 518 ++++++++++++++++++++++++------------------------- go.mod | 2 +- pg_back.conf | 8 +- upload.go | 83 ++++---- 6 files changed, 339 insertions(+), 352 deletions(-) diff --git a/README.md b/README.md index 0de1ec9..f113df1 100644 --- a/README.md +++ b/README.md @@ -208,7 +208,7 @@ WARNING: Azure support is not guaranted because there are no free solutions for testing on it When set to `b2`, files are uploaded to Backblaze B2. The `--b2-*` family of options can be used to tweak the access to -the bucket. `--b2-concurrent-uploads` can be used to upload the file through parallel HTTP connections. +the bucket. `--b2-concurrent-connections` can be used to upload the file through parallel HTTP connections. The `--upload-prefix` option can be used to place the files in a remote directory, as most cloud storage treat prefix as directories. The filename and diff --git a/config.go b/config.go index e2fd4cf..309ba29 100644 --- a/config.go +++ b/config.go @@ -96,13 +96,11 @@ type options struct { S3ForcePath bool S3DisableTLS bool - B2Region string - B2Bucket string - B2Endpoint string - B2KeyID string - B2AppKey string - B2ForcePath bool - B2ConcurrentUploads int + B2Bucket string + B2KeyID string + B2AppKey string + B2ForcePath bool + B2ConcurrentConnections int SFTPHost string SFTPPort string @@ -129,24 +127,24 @@ func defaultOptions() options { } return options{ - NoConfigFile: false, - Directory: "/var/backups/postgresql", - Format: 'c', - DirJobs: 1, - CompressLevel: -1, - Jobs: 1, - PauseTimeout: 3600, - PurgeInterval: -30 * 24 * time.Hour, - PurgeKeep: 0, - SumAlgo: "none", - CfgFile: defaultCfgFile, - TimeFormat: timeFormat, - WithRolePasswords: true, - Upload: "none", - Download: "none", - ListRemote: "none", - AzureEndpoint: "blob.core.windows.net", - B2ConcurrentUploads: 5, + NoConfigFile: false, + Directory: "/var/backups/postgresql", + Format: 'c', + DirJobs: 1, + CompressLevel: -1, + Jobs: 1, + PauseTimeout: 3600, + PurgeInterval: -30 * 24 * time.Hour, + PurgeKeep: 0, + SumAlgo: "none", + CfgFile: defaultCfgFile, + TimeFormat: timeFormat, + WithRolePasswords: true, + Upload: "none", + Download: "none", + ListRemote: "none", + AzureEndpoint: "blob.core.windows.net", + B2ConcurrentConnections: 5, } } @@ -303,13 +301,11 @@ func parseCli(args []string) (options, []string, error) { pflag.StringVar(&opts.ListRemote, "list-remote", "none", "list the remote files on s3, gcs, sftp, azure instead of dumping. DBNAMEs become\nglobs to select files") purgeRemote := pflag.String("purge-remote", "no", "purge the file on remote location after upload, with the same rules\nas the local directory") - pflag.StringVar(&opts.B2Region, "b2-region", "", "B2 region") pflag.StringVar(&opts.B2Bucket, "b2-bucket", "", "B2 bucket") - pflag.StringVar(&opts.B2Endpoint, "b2-endpoint", "", "B2 endpoint") pflag.StringVar(&opts.B2KeyID, "b2-key-id", "", "B2 access key ID") pflag.StringVar(&opts.B2AppKey, "b2-app-key", "", "B2 app key") B2ForcePath := pflag.String("b2-force-path", "no", "force path style addressing instead of virtual hosted bucket\naddressing") - B2ConcurrentUploads := pflag.Int("b2-concurrent-uploads", 5, "set the amount of concurrent b2 http uploads") + B2ConcurrentConnections := pflag.Int("b2-concurrent-connections", 5, "set the amount of concurrent b2 http connections") pflag.StringVar(&opts.S3Region, "s3-region", "", "S3 region") pflag.StringVar(&opts.S3Bucket, "s3-bucket", "", "S3 bucket") @@ -490,10 +486,10 @@ func parseCli(args []string) (options, []string, error) { return opts, changed, fmt.Errorf("invalid value for --b2-force-path: %s", err) } - if *B2ConcurrentUploads <= 0 { - return opts, changed, fmt.Errorf("b2 concurrent uploads must be more than 0 (current %d)", *B2ConcurrentUploads) + if *B2ConcurrentConnections <= 0 { + return opts, changed, fmt.Errorf("b2 concurrent connections must be more than 0 (current %d)", *B2ConcurrentConnections) } else { - opts.B2ConcurrentUploads = *B2ConcurrentUploads + opts.B2ConcurrentConnections = *B2ConcurrentConnections } case "s3": @@ -530,8 +526,8 @@ func validateConfigurationFile(cfg *ini.File) error { "purge_older_than", "purge_min_keep", "checksum_algorithm", "pre_backup_hook", "post_backup_hook", "encrypt", "cipher_pass", "cipher_public_key", "cipher_private_key", "encrypt_keep_source", "upload", "purge_remote", - "b2_region", "b2_bucket", "b2_endpoint", "b2_key_id", "b2_app_key", "b2_force_path", - "b2_concurrent_uploads", "s3_region", "s3_bucket", "s3_endpoint", + "b2_bucket", "b2_key_id", "b2_app_key", "b2_force_path", + "b2_concurrent_connections", "s3_region", "s3_bucket", "s3_endpoint", "s3_profile", "s3_key_id", "s3_secret", "s3_force_path", "s3_tls", "sftp_host", "sftp_port", "sftp_user", "sftp_password", "sftp_directory", "sftp_identity", "sftp_ignore_hostkey", "gcs_bucket", "gcs_endpoint", "gcs_keyfile", @@ -634,13 +630,11 @@ func loadConfigurationFile(path string) (options, error) { opts.UploadPrefix = s.Key("upload_prefix").MustString("") opts.PurgeRemote = s.Key("purge_remote").MustBool(false) - opts.B2Region = s.Key("b2_region").MustString("") opts.B2Bucket = s.Key("b2_bucket").MustString("") - opts.B2Endpoint = s.Key("b2_endpoint").MustString("") opts.B2KeyID = s.Key("b2_key_id").MustString("") opts.B2AppKey = s.Key("b2_app_key").MustString("") opts.B2ForcePath = s.Key("b2_force_path").MustBool(false) - opts.B2ConcurrentUploads = s.Key("b2_concurrent_uploads").MustInt(5) + opts.B2ConcurrentConnections = s.Key("b2_concurrent_connections").MustInt(5) opts.S3Region = s.Key("s3_region").MustString("") opts.S3Bucket = s.Key("s3_bucket").MustString("") @@ -700,8 +694,8 @@ func loadConfigurationFile(path string) (options, error) { } } - if opts.B2ConcurrentUploads <= 0 { - return opts, fmt.Errorf("b2 concurrent uploads must be more than 0 (current %d)", opts.B2ConcurrentUploads) + if opts.B2ConcurrentConnections <= 0 { + return opts, fmt.Errorf("b2 concurrent connections must be more than 0 (current %d)", opts.B2ConcurrentConnections) } // Validate upload option @@ -888,20 +882,16 @@ func mergeCliAndConfigOptions(cliOpts options, configOpts options, onCli []strin case "purge-remote": opts.PurgeRemote = cliOpts.PurgeRemote - case "b2-region": - opts.B2Region = cliOpts.B2Region case "b2-bucket": opts.B2Bucket = cliOpts.B2Bucket - case "b2-endpoint": - opts.B2Endpoint = cliOpts.B2Endpoint case "b2-key-id": opts.B2KeyID = cliOpts.B2KeyID case "b2-app-key": opts.B2AppKey = cliOpts.B2AppKey case "b2-force-path": opts.B2ForcePath = cliOpts.B2ForcePath - case "b2-concurrent-uploads": - opts.B2ConcurrentUploads = cliOpts.B2ConcurrentUploads + case "b2-concurrent-connections": + opts.B2ConcurrentConnections = cliOpts.B2ConcurrentConnections case "s3-region": opts.S3Region = cliOpts.S3Region diff --git a/config_test.go b/config_test.go index 8dae470..0072f22 100644 --- a/config_test.go +++ b/config_test.go @@ -182,23 +182,23 @@ func TestDefaultOptions(t *testing.T) { } var want = options{ - Directory: "/var/backups/postgresql", - Format: 'c', - DirJobs: 1, - CompressLevel: -1, - Jobs: 1, - PauseTimeout: 3600, - PurgeInterval: -30 * 24 * time.Hour, - PurgeKeep: 0, - SumAlgo: "none", - CfgFile: "/etc/pg_back/pg_back.conf", - TimeFormat: timeFormat, - WithRolePasswords: true, - Upload: "none", - Download: "none", - ListRemote: "none", - AzureEndpoint: "blob.core.windows.net", - B2ConcurrentUploads: 5, + Directory: "/var/backups/postgresql", + Format: 'c', + DirJobs: 1, + CompressLevel: -1, + Jobs: 1, + PauseTimeout: 3600, + PurgeInterval: -30 * 24 * time.Hour, + PurgeKeep: 0, + SumAlgo: "none", + CfgFile: "/etc/pg_back/pg_back.conf", + TimeFormat: timeFormat, + WithRolePasswords: true, + Upload: "none", + Download: "none", + ListRemote: "none", + AzureEndpoint: "blob.core.windows.net", + B2ConcurrentConnections: 5, } got := defaultOptions() @@ -227,24 +227,24 @@ func TestParseCli(t *testing.T) { { []string{"-b", "test", "-Z", "2", "a", "b"}, options{ - Directory: "test", - Dbnames: []string{"a", "b"}, - Format: 'c', - DirJobs: 1, - CompressLevel: 2, - Jobs: 1, - PauseTimeout: 3600, - PurgeInterval: -30 * 24 * time.Hour, - PurgeKeep: 0, - SumAlgo: "none", - CfgFile: "/etc/pg_back/pg_back.conf", - TimeFormat: timeFormat, - WithRolePasswords: true, - Upload: "none", - Download: "none", - ListRemote: "none", - AzureEndpoint: "blob.core.windows.net", - B2ConcurrentUploads: 5, + Directory: "test", + Dbnames: []string{"a", "b"}, + Format: 'c', + DirJobs: 1, + CompressLevel: 2, + Jobs: 1, + PauseTimeout: 3600, + PurgeInterval: -30 * 24 * time.Hour, + PurgeKeep: 0, + SumAlgo: "none", + CfgFile: "/etc/pg_back/pg_back.conf", + TimeFormat: timeFormat, + WithRolePasswords: true, + Upload: "none", + Download: "none", + ListRemote: "none", + AzureEndpoint: "blob.core.windows.net", + B2ConcurrentConnections: 5, }, false, false, @@ -254,24 +254,24 @@ func TestParseCli(t *testing.T) { { []string{"-t", "--without-templates"}, options{ - Directory: "/var/backups/postgresql", - WithTemplates: false, - Format: 'c', - DirJobs: 1, - CompressLevel: -1, - Jobs: 1, - PauseTimeout: 3600, - PurgeInterval: -30 * 24 * time.Hour, - PurgeKeep: 0, - SumAlgo: "none", - CfgFile: "/etc/pg_back/pg_back.conf", - TimeFormat: timeFormat, - WithRolePasswords: true, - Upload: "none", - Download: "none", - ListRemote: "none", - AzureEndpoint: "blob.core.windows.net", - B2ConcurrentUploads: 5, + Directory: "/var/backups/postgresql", + WithTemplates: false, + Format: 'c', + DirJobs: 1, + CompressLevel: -1, + Jobs: 1, + PauseTimeout: 3600, + PurgeInterval: -30 * 24 * time.Hour, + PurgeKeep: 0, + SumAlgo: "none", + CfgFile: "/etc/pg_back/pg_back.conf", + TimeFormat: timeFormat, + WithRolePasswords: true, + Upload: "none", + Download: "none", + ListRemote: "none", + AzureEndpoint: "blob.core.windows.net", + B2ConcurrentConnections: 5, }, false, false, @@ -305,25 +305,25 @@ func TestParseCli(t *testing.T) { { []string{"--upload", "wrong"}, options{ - Directory: "/var/backups/postgresql", - Format: 'c', - DirJobs: 1, - CompressLevel: -1, - Jobs: 1, - PauseTimeout: 3600, - PurgeInterval: -30 * 24 * time.Hour, - PurgeKeep: 0, - SumAlgo: "none", - CfgFile: "/etc/pg_back/pg_back.conf", - TimeFormat: timeFormat, - Encrypt: true, - CipherPassphrase: "testpass", - WithRolePasswords: true, - Upload: "wrong", - Download: "none", - ListRemote: "none", - AzureEndpoint: "blob.core.windows.net", - B2ConcurrentUploads: 5, + Directory: "/var/backups/postgresql", + Format: 'c', + DirJobs: 1, + CompressLevel: -1, + Jobs: 1, + PauseTimeout: 3600, + PurgeInterval: -30 * 24 * time.Hour, + PurgeKeep: 0, + SumAlgo: "none", + CfgFile: "/etc/pg_back/pg_back.conf", + TimeFormat: timeFormat, + Encrypt: true, + CipherPassphrase: "testpass", + WithRolePasswords: true, + Upload: "wrong", + Download: "none", + ListRemote: "none", + AzureEndpoint: "blob.core.windows.net", + B2ConcurrentConnections: 5, }, false, false, @@ -333,25 +333,25 @@ func TestParseCli(t *testing.T) { { []string{"--download", "wrong"}, options{ - Directory: "/var/backups/postgresql", - Format: 'c', - DirJobs: 1, - CompressLevel: -1, - Jobs: 1, - PauseTimeout: 3600, - PurgeInterval: -30 * 24 * time.Hour, - PurgeKeep: 0, - SumAlgo: "none", - CfgFile: "/etc/pg_back/pg_back.conf", - TimeFormat: timeFormat, - Encrypt: true, - CipherPassphrase: "testpass", - WithRolePasswords: true, - Upload: "none", - Download: "wrong", - ListRemote: "none", - AzureEndpoint: "blob.core.windows.net", - B2ConcurrentUploads: 5, + Directory: "/var/backups/postgresql", + Format: 'c', + DirJobs: 1, + CompressLevel: -1, + Jobs: 1, + PauseTimeout: 3600, + PurgeInterval: -30 * 24 * time.Hour, + PurgeKeep: 0, + SumAlgo: "none", + CfgFile: "/etc/pg_back/pg_back.conf", + TimeFormat: timeFormat, + Encrypt: true, + CipherPassphrase: "testpass", + WithRolePasswords: true, + Upload: "none", + Download: "wrong", + ListRemote: "none", + AzureEndpoint: "blob.core.windows.net", + B2ConcurrentConnections: 5, }, false, false, @@ -369,25 +369,25 @@ func TestParseCli(t *testing.T) { { []string{"--cipher-pass", "mypass"}, options{ - Directory: "/var/backups/postgresql", - Format: 'c', - DirJobs: 1, - CompressLevel: -1, - Jobs: 1, - PauseTimeout: 3600, - PurgeInterval: -30 * 24 * time.Hour, - PurgeKeep: 0, - SumAlgo: "none", - CfgFile: "/etc/pg_back/pg_back.conf", - TimeFormat: timeFormat, - Decrypt: false, - CipherPassphrase: "mypass", - WithRolePasswords: true, - Upload: "none", - Download: "none", - ListRemote: "none", - AzureEndpoint: "blob.core.windows.net", - B2ConcurrentUploads: 5, + Directory: "/var/backups/postgresql", + Format: 'c', + DirJobs: 1, + CompressLevel: -1, + Jobs: 1, + PauseTimeout: 3600, + PurgeInterval: -30 * 24 * time.Hour, + PurgeKeep: 0, + SumAlgo: "none", + CfgFile: "/etc/pg_back/pg_back.conf", + TimeFormat: timeFormat, + Decrypt: false, + CipherPassphrase: "mypass", + WithRolePasswords: true, + Upload: "none", + Download: "none", + ListRemote: "none", + AzureEndpoint: "blob.core.windows.net", + B2ConcurrentConnections: 5, }, false, false, @@ -397,25 +397,25 @@ func TestParseCli(t *testing.T) { { []string{"--cipher-private-key", "mykey"}, options{ - Directory: "/var/backups/postgresql", - Format: 'c', - DirJobs: 1, - CompressLevel: -1, - Jobs: 1, - PauseTimeout: 3600, - PurgeInterval: -30 * 24 * time.Hour, - PurgeKeep: 0, - SumAlgo: "none", - CfgFile: "/etc/pg_back/pg_back.conf", - TimeFormat: timeFormat, - Decrypt: false, - CipherPrivateKey: "mykey", - WithRolePasswords: true, - Upload: "none", - Download: "none", - ListRemote: "none", - AzureEndpoint: "blob.core.windows.net", - B2ConcurrentUploads: 5, + Directory: "/var/backups/postgresql", + Format: 'c', + DirJobs: 1, + CompressLevel: -1, + Jobs: 1, + PauseTimeout: 3600, + PurgeInterval: -30 * 24 * time.Hour, + PurgeKeep: 0, + SumAlgo: "none", + CfgFile: "/etc/pg_back/pg_back.conf", + TimeFormat: timeFormat, + Decrypt: false, + CipherPrivateKey: "mykey", + WithRolePasswords: true, + Upload: "none", + Download: "none", + ListRemote: "none", + AzureEndpoint: "blob.core.windows.net", + B2ConcurrentConnections: 5, }, false, false, @@ -425,25 +425,25 @@ func TestParseCli(t *testing.T) { { []string{"--cipher-public-key", "fakepubkey"}, options{ - Directory: "/var/backups/postgresql", - Format: 'c', - DirJobs: 1, - CompressLevel: -1, - Jobs: 1, - PauseTimeout: 3600, - PurgeInterval: -30 * 24 * time.Hour, - PurgeKeep: 0, - SumAlgo: "none", - CfgFile: "/etc/pg_back/pg_back.conf", - TimeFormat: timeFormat, - Decrypt: false, - CipherPublicKey: "fakepubkey", - WithRolePasswords: true, - Upload: "none", - Download: "none", - ListRemote: "none", - AzureEndpoint: "blob.core.windows.net", - B2ConcurrentUploads: 5, + Directory: "/var/backups/postgresql", + Format: 'c', + DirJobs: 1, + CompressLevel: -1, + Jobs: 1, + PauseTimeout: 3600, + PurgeInterval: -30 * 24 * time.Hour, + PurgeKeep: 0, + SumAlgo: "none", + CfgFile: "/etc/pg_back/pg_back.conf", + TimeFormat: timeFormat, + Decrypt: false, + CipherPublicKey: "fakepubkey", + WithRolePasswords: true, + Upload: "none", + Download: "none", + ListRemote: "none", + AzureEndpoint: "blob.core.windows.net", + B2ConcurrentConnections: 5, }, false, false, @@ -471,7 +471,7 @@ func TestParseCli(t *testing.T) { defaultOptions(), false, false, - "b2 concurrent uploads must be more than 0 (current 0)", + "b2 concurrent connections must be more than 0 (current 0)", "", }, } @@ -541,94 +541,94 @@ func TestLoadConfigurationFile(t *testing.T) { []string{"backup_directory = test", "port = 5433"}, false, options{ - Directory: "test", - Port: 5433, - Format: 'c', - DirJobs: 1, - CompressLevel: -1, - Jobs: 1, - PauseTimeout: 3600, - PurgeInterval: -30 * 24 * time.Hour, - PurgeKeep: 0, - SumAlgo: "none", - CfgFile: "/etc/pg_back/pg_back.conf", - TimeFormat: timeFormat, - WithRolePasswords: true, - Upload: "none", - Download: "none", - ListRemote: "none", - AzureEndpoint: "blob.core.windows.net", - B2ConcurrentUploads: 5, + Directory: "test", + Port: 5433, + Format: 'c', + DirJobs: 1, + CompressLevel: -1, + Jobs: 1, + PauseTimeout: 3600, + PurgeInterval: -30 * 24 * time.Hour, + PurgeKeep: 0, + SumAlgo: "none", + CfgFile: "/etc/pg_back/pg_back.conf", + TimeFormat: timeFormat, + WithRolePasswords: true, + Upload: "none", + Download: "none", + ListRemote: "none", + AzureEndpoint: "blob.core.windows.net", + B2ConcurrentConnections: 5, }, }, { // ensure comma separated lists work []string{"backup_directory = test", "include_dbs = a, b, postgres", "compress_level = 9"}, false, options{ - Directory: "test", - Dbnames: []string{"a", "b", "postgres"}, - Format: 'c', - DirJobs: 1, - CompressLevel: 9, - Jobs: 1, - PauseTimeout: 3600, - PurgeInterval: -30 * 24 * time.Hour, - PurgeKeep: 0, - SumAlgo: "none", - CfgFile: "/etc/pg_back/pg_back.conf", - TimeFormat: timeFormat, - WithRolePasswords: true, - Upload: "none", - Download: "none", - ListRemote: "none", - AzureEndpoint: "blob.core.windows.net", - B2ConcurrentUploads: 5, + Directory: "test", + Dbnames: []string{"a", "b", "postgres"}, + Format: 'c', + DirJobs: 1, + CompressLevel: 9, + Jobs: 1, + PauseTimeout: 3600, + PurgeInterval: -30 * 24 * time.Hour, + PurgeKeep: 0, + SumAlgo: "none", + CfgFile: "/etc/pg_back/pg_back.conf", + TimeFormat: timeFormat, + WithRolePasswords: true, + Upload: "none", + Download: "none", + ListRemote: "none", + AzureEndpoint: "blob.core.windows.net", + B2ConcurrentConnections: 5, }, }, { []string{"timestamp_format = rfc3339"}, false, options{ - Directory: "/var/backups/postgresql", - Format: 'c', - DirJobs: 1, - CompressLevel: -1, - Jobs: 1, - PauseTimeout: 3600, - PurgeInterval: -30 * 24 * time.Hour, - PurgeKeep: 0, - SumAlgo: "none", - CfgFile: "/etc/pg_back/pg_back.conf", - TimeFormat: timeFormat, - WithRolePasswords: true, - Upload: "none", - Download: "none", - ListRemote: "none", - AzureEndpoint: "blob.core.windows.net", - B2ConcurrentUploads: 5, + Directory: "/var/backups/postgresql", + Format: 'c', + DirJobs: 1, + CompressLevel: -1, + Jobs: 1, + PauseTimeout: 3600, + PurgeInterval: -30 * 24 * time.Hour, + PurgeKeep: 0, + SumAlgo: "none", + CfgFile: "/etc/pg_back/pg_back.conf", + TimeFormat: timeFormat, + WithRolePasswords: true, + Upload: "none", + Download: "none", + ListRemote: "none", + AzureEndpoint: "blob.core.windows.net", + B2ConcurrentConnections: 5, }, }, { []string{"timestamp_format = legacy"}, false, options{ - Directory: "/var/backups/postgresql", - Format: 'c', - DirJobs: 1, - CompressLevel: -1, - Jobs: 1, - PauseTimeout: 3600, - PurgeInterval: -30 * 24 * time.Hour, - PurgeKeep: 0, - SumAlgo: "none", - CfgFile: "/etc/pg_back/pg_back.conf", - TimeFormat: "2006-01-02_15-04-05", - WithRolePasswords: true, - Upload: "none", - Download: "none", - ListRemote: "none", - AzureEndpoint: "blob.core.windows.net", - B2ConcurrentUploads: 5, + Directory: "/var/backups/postgresql", + Format: 'c', + DirJobs: 1, + CompressLevel: -1, + Jobs: 1, + PauseTimeout: 3600, + PurgeInterval: -30 * 24 * time.Hour, + PurgeKeep: 0, + SumAlgo: "none", + CfgFile: "/etc/pg_back/pg_back.conf", + TimeFormat: "2006-01-02_15-04-05", + WithRolePasswords: true, + Upload: "none", + Download: "none", + ListRemote: "none", + AzureEndpoint: "blob.core.windows.net", + B2ConcurrentConnections: 5, }, }, { @@ -675,12 +675,12 @@ func TestLoadConfigurationFile(t *testing.T) { PgDumpOpts: []string{"-O", "-x"}, WithBlobs: 1, }}, - WithRolePasswords: true, - Upload: "none", - Download: "none", - ListRemote: "none", - AzureEndpoint: "blob.core.windows.net", - B2ConcurrentUploads: 5, + WithRolePasswords: true, + Upload: "none", + Download: "none", + ListRemote: "none", + AzureEndpoint: "blob.core.windows.net", + B2ConcurrentConnections: 5, }, }, { @@ -719,16 +719,16 @@ func TestLoadConfigurationFile(t *testing.T) { PgDumpOpts: []string{}, WithBlobs: 2, }}, - WithRolePasswords: false, - Upload: "none", - Download: "none", - ListRemote: "none", - AzureEndpoint: "blob.core.windows.net", - B2ConcurrentUploads: 5, + WithRolePasswords: false, + Upload: "none", + Download: "none", + ListRemote: "none", + AzureEndpoint: "blob.core.windows.net", + B2ConcurrentConnections: 5, }, }, { - []string{"b2_concurrent_uploads = 0"}, + []string{"b2_concurrent_connections = 0"}, true, defaultOptions(), }, @@ -772,33 +772,33 @@ func TestMergeCliAndConfigoptions(t *testing.T) { } want := options{ - BinDirectory: "/bin", - Directory: "test", - Host: "localhost", - Port: 5433, - Username: "test", - ConnDb: "postgres", - ExcludeDbs: []string{"a", "b"}, - Dbnames: []string{"b", "c", "d"}, - WithTemplates: true, - Format: 'd', - DirJobs: 2, - CompressLevel: 4, - Jobs: 4, - PauseTimeout: 60, - PurgeInterval: -7 * 24 * time.Hour, - PurgeKeep: 5, - SumAlgo: "sha256", - PreHook: "touch /tmp/pre-hook", - PostHook: "touch /tmp/post-hook", - CfgFile: "/etc/pg_back/pg_back.conf", - TimeFormat: timeFormat, - WithRolePasswords: true, - Upload: "none", - Download: "none", - ListRemote: "none", - AzureEndpoint: "blob.core.windows.net", - B2ConcurrentUploads: 5, + BinDirectory: "/bin", + Directory: "test", + Host: "localhost", + Port: 5433, + Username: "test", + ConnDb: "postgres", + ExcludeDbs: []string{"a", "b"}, + Dbnames: []string{"b", "c", "d"}, + WithTemplates: true, + Format: 'd', + DirJobs: 2, + CompressLevel: 4, + Jobs: 4, + PauseTimeout: 60, + PurgeInterval: -7 * 24 * time.Hour, + PurgeKeep: 5, + SumAlgo: "sha256", + PreHook: "touch /tmp/pre-hook", + PostHook: "touch /tmp/post-hook", + CfgFile: "/etc/pg_back/pg_back.conf", + TimeFormat: timeFormat, + WithRolePasswords: true, + Upload: "none", + Download: "none", + ListRemote: "none", + AzureEndpoint: "blob.core.windows.net", + B2ConcurrentConnections: 5, } cliOptList := []string{ diff --git a/go.mod b/go.mod index 3e9a717..7a7ecf3 100644 --- a/go.mod +++ b/go.mod @@ -6,6 +6,7 @@ require ( cloud.google.com/go/storage v1.42.0 filippo.io/age v1.2.0 github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.4.0 + github.com/Backblaze/blazer v0.6.1 github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be github.com/aws/aws-sdk-go v1.54.6 github.com/google/go-cmp v0.6.0 @@ -26,7 +27,6 @@ require ( cloud.google.com/go/iam v1.1.8 // indirect github.com/Azure/azure-sdk-for-go/sdk/azcore v1.13.0 // indirect github.com/Azure/azure-sdk-for-go/sdk/internal v1.10.0 // indirect - github.com/Backblaze/blazer v0.6.1 // indirect github.com/felixge/httpsnoop v1.0.4 // indirect github.com/go-logr/logr v1.4.2 // indirect github.com/go-logr/stdr v1.2.2 // indirect diff --git a/pg_back.conf b/pg_back.conf index 0322fc3..2929ff5 100644 --- a/pg_back.conf +++ b/pg_back.conf @@ -22,7 +22,7 @@ backup_directory = /var/backups/postgresql host = port = user = -dbname = +dbname = # Weither to dump role passwords when running pg_dump dump_role_passwords = true @@ -104,7 +104,7 @@ pause_timeout = 3600 # Commands to execute before and after dumping. The post-backup # command is always executed even in case of failure. -pre_backup_hook = +pre_backup_hook = post_backup_hook = # Upload resulting files to a remote location. Possible values are: none, @@ -162,13 +162,11 @@ upload = none # Backblaze B2 Access information. Region, Endpoint, Bucket, Key-ID and App-Key are mandatory. -# b2_region = # b2_bucket = -# b2_endpoint = # b2_key_id = # b2_app_key = # b2_force_path = false -# b2_concurrent_uploads = 5 +# b2_concurrent_connections = 5 # # Per database options. Use a ini section named the same as the diff --git a/upload.go b/upload.go index 2a24ea0..06dd414 100644 --- a/upload.go +++ b/upload.go @@ -26,11 +26,20 @@ package main import ( - "cloud.google.com/go/storage" "context" "errors" "fmt" + "io" + "net" + "os" + "os/user" + "path/filepath" + "strings" + "time" + + "cloud.google.com/go/storage" "github.com/Azure/azure-sdk-for-go/sdk/storage/azblob" + "github.com/Backblaze/blazer/b2" "github.com/aws/aws-sdk-go/aws" "github.com/aws/aws-sdk-go/aws/credentials" "github.com/aws/aws-sdk-go/aws/session" @@ -42,14 +51,6 @@ import ( "golang.org/x/crypto/ssh/knownhosts" "google.golang.org/api/iterator" "google.golang.org/api/option" - "io" - "net" - "os" - "os/user" - "path/filepath" - "strings" - "time" - "github.com/Backblaze/blazer/b2" ) // A Repo is a remote service where we can upload files @@ -120,16 +121,14 @@ func NewRepo(kind string, opts options) (Repo, error) { } type b2repo struct { - appKey string - b2Bucket *b2.Bucket - b2Client *b2.Client - bucket string - concurrentUploads int - ctx context.Context - endpoint string - forcePath bool - keyID string - region string + appKey string + b2Bucket *b2.Bucket + b2Client *b2.Client + bucket string + concurrentConnections int + ctx context.Context + forcePath bool + keyID string } type s3repo struct { @@ -146,17 +145,15 @@ type s3repo struct { func NewB2Repo(opts options) (*b2repo, error) { r := &b2repo{ - appKey: opts.B2AppKey, - bucket: opts.B2Bucket, - concurrentUploads: opts.B2ConcurrentUploads, - ctx: context.Background(), - endpoint: opts.B2Endpoint, - forcePath: opts.B2ForcePath, - keyID: opts.B2KeyID, - region: opts.B2Region, - } - - l.Verbosef("starting b2 client with %d connections to %s %s \n", r.concurrentUploads, r.endpoint, r.bucket) + appKey: opts.B2AppKey, + bucket: opts.B2Bucket, + concurrentConnections: opts.B2ConcurrentConnections, + ctx: context.Background(), + forcePath: opts.B2ForcePath, + keyID: opts.B2KeyID, + } + + l.Verbosef("starting b2 client with %d connections to endpoint to bucket %s \n", r.concurrentConnections, r.bucket) client, err := b2.NewClient(r.ctx, r.keyID, r.appKey) if err != nil { @@ -184,40 +181,42 @@ func (r *b2repo) Upload(path string, target string) error { defer f.Close() w := r.b2Bucket.Object(target).NewWriter(r.ctx) - w.ConcurrentUploads = r.concurrentUploads + defer w.Close() + + w.ConcurrentUploads = r.concurrentConnections l.Infof("uploading %s to B2 bucket %s\n", path, r.bucket) if _, err := io.Copy(w, f); err != nil { - w.Close() return err } - return w.Close() + return nil } func (r *b2repo) Download(target string, path string) error { - - file, err := os.Create(path) + f, err := os.Create(path) if err != nil { return fmt.Errorf("download error: %w", err) } - defer file.Close() + defer f.Close() bucket := r.b2Bucket - remoteFile := bucket.Object(path).NewReader(r.ctx) - defer remoteFile.Close() + l.Infof("downloading %s from B2 bucket %s to %s\n", target, r.bucket, path) + + rf := bucket.Object(target).NewReader(r.ctx) + rf.ConcurrentDownloads = r.concurrentConnections + defer rf.Close() - localFile, err := os.Create(target) if err != nil { return err } - if _, err := io.Copy(file, remoteFile); err != nil { - localFile.Close() + if _, err := io.Copy(f, rf); err != nil { return err } - return localFile.Close() + + return nil } func (r *b2repo) Close() error {