From: intrigeri Date: Sat, 9 Jan 2010 22:14:17 +0000 (+0100) Subject: dup: support backups to Amazon S3 buckets X-Git-Tag: backupninja-0.9.7~6 X-Git-Url: https://git.stderr.nl/gitweb?a=commitdiff_plain;h=64c4d190a3ab6b4fa60b10208b32f3b270a72482;p=matthijs%2Fupstream%2Fbackupninja.git dup: support backups to Amazon S3 buckets Thanks to stefan for the patch. This fixes Redmine bug #658. --- diff --git a/AUTHORS b/AUTHORS index 89b505f..c8686eb 100644 --- a/AUTHORS +++ b/AUTHORS @@ -30,4 +30,5 @@ Matthew Palmer -- halt loglevel feature dan@garthwaite.org -- reportspace bugfix Tuomas Jormola -- "when = manual" option Ian Beckwith -- dup bandwidthlimit fix -Olivier Berger -- dup debug output bugfix, reportinfo option \ No newline at end of file +Olivier Berger -- dup debug output bugfix, reportinfo option +stefan -- dup support for Amazon S3 buckets diff --git a/ChangeLog b/ChangeLog index 12c1436..f4009d7 100644 --- a/ChangeLog +++ b/ChangeLog @@ -59,6 +59,7 @@ version 0.9.7 -- UNRELEASED . Report duplicity output as "info" so that it can be included in report e-mail when reportinfo is on (Closes: #563734) . Fix include/exclude paths with spaces + . Support backups to Amazon S3 buckets, thanks to stefan for the patch. helper changes dup: . Do not propose to exclude /home/*/.gnupg twice anymore diff --git a/README b/README index 03d96fe..04eda2f 100644 --- a/README +++ b/README @@ -182,6 +182,16 @@ blank by hitting return. The included helper program "ninjahelper" will walk you through creating an rdiff-backup configuration, and will set up the ssh keys for you. + +Amazon Simple Storage Service (S3) +================================== + +Duplicity can store backups on Amazon S3 buckets, taking care of encryption. +Since it performs incremental backups it minimizes the number of request per +operation therefore reducing the costs. The boto Python interface to Amazon +Web Services is needed to use duplicity with S3 (Debian package: python-boto). + + INSTALLATION ============ diff --git a/examples/example.dup b/examples/example.dup index 2b59fe5..ea4d66e 100644 --- a/examples/example.dup +++ b/examples/example.dup @@ -8,6 +8,8 @@ ## passed directly to duplicity, e.g. to increase verbosity set this to: ## options = --verbosity 8 +## when using the Amazon S3 backend to create buckets in Europe: +## options = --s3-european-buckets --s3-use-new-style ## ## Default: # options = @@ -158,11 +160,21 @@ exclude = /home/*/.gnupg ## examples include: ## desturl = file:///usr/local/backup ## desturl = rsync://user@other.host//var/backup/bla +## desturl = s3+http:// ## the default value of this configuration option is not set: ## ## Default: # desturl = +## Amazon Web Services Access Key ID and Secret Access Key, needed for backups +## to S3 buckets. +## awsaccesskeyid = YOUR_AWS_ACCESS_KEY_ID +## awssecretaccesskey = YOUR_AWS_SECRET_KEY +## +## Default: +# awsaccesskeyid = +# awssecretaccesskey = + ## bandwith limit, in kbit/s ; default is 0, i.e. no limit an example ## setting would be: ## bandwidthlimit = 128 diff --git a/handlers/dup.helper.in b/handlers/dup.helper.in index 2fafb99..ae48e4c 100644 --- a/handlers/dup.helper.in +++ b/handlers/dup.helper.in @@ -403,6 +403,12 @@ keep = $dup_keep # bandwithlimit. For details, see duplicity manpage, section "URL FORMAT". #desturl = file:///usr/local/backup #desturl = rsync://user@other.host//var/backup/bla +#desturl = s3+http://your_bucket + +# Amazon Web Services Access Key ID and Secret Access Key, needed for backups +# to S3 buckets. +#awsaccesskeyid = YOUR_AWS_ACCESS_KEY_ID +#awssecretaccesskey = YOUR_AWS_SECRET_KEY # bandwith limit, in kbit/s ; default is 0, i.e. no limit #bandwidthlimit = 128 diff --git a/handlers/dup.in b/handlers/dup.in index 2f55b9c..ffae48c 100644 --- a/handlers/dup.in +++ b/handlers/dup.in @@ -26,6 +26,8 @@ setsection dest getconf incremental yes getconf keep 60 getconf desturl +getconf awsaccesskeyid +getconf awssecretaccesskey getconf sshoptions getconf bandwidthlimit 0 getconf desthost @@ -38,6 +40,9 @@ destdir=${destdir%/} [ -n "$desturl" -o -n "$destdir" ] || fatal "The destination directory (destdir) must be set when desturl is not used." [ -n "$include" -o -n "$vsinclude" ] || fatal "No source includes specified" [ -n "$password" ] || fatal "The password option must be set." +if [ "`echo $desturl | @AWK@ -F ':' '{print $1}'`" == "s3+http" ]; then + [ -n "$awsaccesskeyid" -a -n "$awssecretaccesskey" ] || fatal "AWS access keys must be set for S3 backups." +fi ### VServers # If vservers are configured, check that the ones listed in $vsnames do exist. @@ -227,6 +232,12 @@ set +o noglob execstr_source=${execstr_source//\\*/\\\\\\*} +### If desturl is an S3 URL export the AWS environment variables +if [ "`echo $desturl | @AWK@ -F ':' '{print $1}'`" == "s3+http" ]; then + export AWS_ACCESS_KEY_ID="$awsaccesskeyid" + export AWS_SECRET_ACCESS_KEY="$awssecretaccesskey" +fi + ### Cleanup commands (duplicity >= 0.4.4) # cleanup