Dackup
view release on metacpan or search on metacpan
Revision history for Perl module Dackup:
0.44 Mon Jan 11 12:06:16 IST 2010
- add a file test (patch by Leo Lapworth)
- do not backup the cache file (patch by Leo Lapworth)
- note in documentation that S3 bucket must already exist
(patch by Leo Lapworth)
- add prefix to S3 and CloudFiles documentation (patch by Leo
Lapworth)
0.43 Sat Jan 5 10:56:50 GMT 2010
- Minor doc patch for 'prefix' from leo
0.42 Tue May 5 16:44:23 BST 2009
- fix bug with ssh transport and directories containing spaces
NAME
Dackup - Flexible file backup
SYNOPSIS
use Dackup;
my $source = Dackup::Target::Filesystem->new(
prefix => '/home/acme/important/' );
my $destination = Dackup::Target::Filesystem->new(
prefix => '/home/acme/backup/' );
my $dackup = Dackup->new(
source => $source,
destination => $destination,
delete => 0,
dry_run => 0,
verbose => 1,
throttle => '1Mbps',
);
$dackup->backup;
DESCRIPTION
This module is an attempt at a flexible file backup. It supports copying
to and from filesystems, remote hosts via SSH, Amazon's Simple Storage
Service and Mosso's CloudFiles. At all stages, it checks the MD5 hash of
the source and destination files.
It uses an MD5 cache to speed up operations, which it stores by default
in your home directory (you can pass it as a directory parameter). It's
just a cache, so you can delete it, but the next time you sync it might
be a little slower.
It will update new and changed files. If you pass in delete => 1 then it
lib/Dackup.pm view on Meta::CPAN
|| confess "Unable to create directory $directory: $!";
}
my $filename = file( $directory, 'dackup.db' );
my $cache = Dackup::Cache->new( filename => $filename );
$self->cache($cache);
$self->source->dackup($self);
$self->destination->dackup($self);
}
sub backup {
my $self = shift;
my $source = $self->source;
my $destination = $self->destination;
my $delete = $self->delete;
my $dry_run = $self->dry_run;
my $verbose = $self->verbose;
my $source_entries = $source->entries;
my $destination_entries = $destination->entries;
lib/Dackup.pm view on Meta::CPAN
return \@entries_to_update, \@entries_to_delete;
}
1;
__END__
=head1 NAME
Dackup - Flexible file backup
=head1 SYNOPSIS
use Dackup;
my $source = Dackup::Target::Filesystem->new(
prefix => '/home/acme/important/' );
my $destination = Dackup::Target::Filesystem->new(
prefix => '/home/acme/backup/' );
my $dackup = Dackup->new(
source => $source,
destination => $destination,
delete => 0,
dry_run => 0,
verbose => 1,
throttle => '1Mbps',
);
$dackup->backup;
=head1 DESCRIPTION
This module is an attempt at a flexible file backup. It supports
copying to and from filesystems, remote hosts via SSH, Amazon's
Simple Storage Service and Mosso's CloudFiles. At all stages,
it checks the MD5 hash of the source and destination files.
It uses an MD5 cache to speed up operations, which it stores by
default in your home directory (you can pass it as a directory
parameter). It's just a cache, so you can delete it, but the next
time you sync it might be a little slower.
It will update new and changed files. If you pass in
lib/Dackup/Target/CloudFiles.pm view on Meta::CPAN
my $object = $self->object($entry);
$object->delete;
}
1;
__END__
=head1 NAME
Dackup::Target::CloudFiles - Flexible file backup to/from CloudFiles
=head1 SYNOPSIS
use Dackup;
use Net::Amazon::S3;
use Net::Mosso::CloudFiles;
my $s3 = Net::Amazon::S3->new(
aws_access_key_id => 'XXX',
aws_secret_access_key => 'YYY,
retry => 1,
);
my $client = Net::Amazon::S3::Client->new( s3 => $s3 );
my $bucket = $client->bucket( name => 'important' );
my $cloudfiles = Net::Mosso::CloudFiles->new(
user => 'myuser',
key => 'ZZZ',
);
my $container = $cloudfiles->container('backup');
my $source = Dackup::Target::S3->new( bucket => $bucket );
my $destination = Dackup::Target::CloudFiles->new(
container => $container,
prefix => 'important_backup/', # optional
);
my $dackup = Dackup->new(
directory => '/home/acme/dackup',
source => $source,
destination => $destination,
delete => 1,
);
$dackup->backup;
=head1 DESCRIPTION
This is a Dackup target for the Mosso CloudFile's service.
=head1 AUTHOR
Leon Brocard <acme@astray.com>
=head1 COPYRIGHT
lib/Dackup/Target/Filesystem.pm view on Meta::CPAN
my $file_stream = Data::Stream::Bulk::Path::Class->new(
dir => Path::Class::Dir->new($prefix),
only_files => 1,
);
my @entries;
until ( $file_stream->is_done ) {
foreach my $filename ( $file_stream->items ) {
# Do not backup the cache db
next if $filename->basename() eq 'dackup.db';
my $key = $filename->relative($prefix)->stringify;
my $stat = $filename->stat
|| confess "Unable to stat $filename";
my $ctime = $stat->ctime;
my $mtime = $stat->mtime;
my $size = $stat->size;
my $inodenum = $stat->ino;
lib/Dackup/Target/Filesystem.pm view on Meta::CPAN
my $filename = $self->filename($entry);
unlink($filename) || confess("Error deleting $filename: $!");
}
1;
__END__
=head1 NAME
Dackup::Target::Filesystem - Flexible file backup to/from the filesystem
=head1 SYNOPSIS
use Dackup;
my $source = Dackup::Target::Filesystem->new(
prefix => '/home/acme/important/' );
my $destination = Dackup::Target::Filesystem->new(
prefix => '/home/acme/backup/' );
my $dackup = Dackup->new(
source => $source,
destination => $destination,
delete => 0,
);
$dackup->backup;
=head1 DESCRIPTION
This is a Dackup target for the filesystem.
=head1 AUTHOR
Leon Brocard <acme@astray.com>
=head1 COPYRIGHT
lib/Dackup/Target/S3.pm view on Meta::CPAN
my $object = $self->object($entry);
$object->delete;
}
1;
__END__
=head1 NAME
Dackup::Target::S3 - Flexible file backup to/from Amazon S3
=head1 SYNOPSIS
use Dackup;
use Net::Amazon::S3;
my $s3 = Net::Amazon::S3->new(
aws_access_key_id => 'XXX',
aws_secret_access_key => 'YYY',
retry => 1,
);
my $client = Net::Amazon::S3::Client->new( s3 => $s3 );
# You must have already created this bucket
# see Net::Amazon::S3::Client
my $bucket = $client->bucket( name => 'mybackups' );
my $source = Dackup::Target::Filesystem->new(
prefix => '/home/acme/important/' );
my $destination = Dackup::Target::S3->new(
bucket => $bucket,
prefix => 'important_backup/', # optional
);
my $dackup = Dackup->new(
source => $source,
destination => $destination,
delete => 1,
);
$dackup->backup;
=head1 DESCRIPTION
This is a Dackup target for Amazon's Simple Storage Service.
=head1 AUTHOR
Leon Brocard <acme@astray.com>
=head1 COPYRIGHT
lib/Dackup/Target/SSH.pm view on Meta::CPAN
$ssh->system("rm -f $filename")
|| die "rm -f $filename failed: " . $ssh->error;
}
1;
__END__
=head1 NAME
Dackup::Target::SSH - Flexible file backup remote hosts via SSH
=head1 SYNOPSIS
use Dackup;
use Net::OpenSSH;
my $ssh = Net::OpenSSH->new('acme:password@backuphost');
$ssh->error
and die "Couldn't establish SSH connection: " . $ssh->error;
my $source = Dackup::Target::Filesystem->new(
prefix => '/home/acme/important/' );
my $destination = Dackup::Target::SSH->new(
ssh => $ssh,
prefix => '/home/acme/important_backup/'
);
my $dackup = Dackup->new(
source => $source,
destination => $destination,
delete => 0,
);
$dackup->backup;
=head1 DESCRIPTION
This is a Dackup target for a remote host via SSH.
=head1 AUTHOR
Leon Brocard <acme@astray.com>
=head1 COPYRIGHT
t/filesystem.t view on Meta::CPAN
ok( -d "$source_dir", "source_dir exists" );
ok( -d "$destination_dir", "destination_dir exists" );
my $TESTFILES = 3;
# create some test files
for ( my $i = 1; $i <= $TESTFILES; $i++ ) {
my $file = $source_dir->file("test$i.txt");
my $fh = $file->openw();
$fh->print("File to backup $i");
$fh->close();
}
my $source = Dackup::Target::Filesystem->new( prefix => $source_dir );
my $destination
= Dackup::Target::Filesystem->new( prefix => $destination_dir );
my $dackup = Dackup->new(
directory => $source_dir, # So we can test the db being here
source => $source,
destination => $destination,
delete => 0,
);
my $num_backedup_first = $dackup->backup;
is( $num_backedup_first, $TESTFILES,
"We backed up the correct number of files" );
ok( -r $dackup->cache->filename(), "Cache exists in source" );
my $dest_cache
= file( $destination_dir, $dackup->cache->filename()->basename() );
ok( !-r $dest_cache, "Cache does not exist on destination" );
# check test files
for ( my $i = 1; $i <= $TESTFILES; $i++ ) {
my $file = $destination_dir->file("test$i.txt");
my $content = $file->slurp();
my $to_match = "File to backup $i";
is( $content, $to_match, "Got matching content for file $i" );
}
my $num_backedup_second = $dackup->backup;
is( $num_backedup_second, 0, "We didn't need to backup anything" );
( run in 1.485 second using v1.01-cache-2.11-cpan-49f99fa48dc )