view release on metacpan or search on metacpan
script/_genpass-id view on Meta::CPAN
# my ($self, $l, $r) = @_;
# $self->path([]);
# $self->errors([]);
# $self->mem({});
# $self->cur_mem_key(undef);
# my ($key, $res, $backup) = $self->_merge(undef, $l, $r);
# {
# success => !@{ $self->errors },
# error => (@{ $self->errors } ?
# join(", ",
# map { sprintf("/%s: %s", join("/", @{ $_->[0] }), $_->[1]) }
# @{ $self->errors }) : ''),
# result => $res,
# backup => $backup,
# };
#}
#
#sub _process_todo {
# my ($self) = @_;
script/_genpass-id view on Meta::CPAN
# return ($key, undef, undef, 1);
# }
# } else {
# $self->mem->{$memkey} = {res=>undef, todo=>[]};
# $self->cur_mem_key($memkey);
# my ($newkey, $res, $backup) = $mh->$meth($key, $l, $r);
# $self->mem->{$memkey}{res} = [$newkey, $res, $backup];
# $self->_process_todo;
# return ($newkey, $res, $backup);
# }
# } else {
# $self->_process_todo;
# return $mh->$meth($key, $l, $r);
# }
script/_genpass-id view on Meta::CPAN
# my $c = $mm->config;
# return $self->merge_SCALAR_SCALAR($key, $l, $r) unless $c->recurse_array;
# return if $c->wanted_path && !$mm->_path_is_included($mm->path, $c->wanted_path);
#
# my @res;
# my @backup;
# my $la = @$l;
# my $lb = @$r;
# push @{ $mm->path }, -1;
# for my $i (0..($la > $lb ? $la : $lb)-1) {
# $mm->path->[-1] = $i;
# if ($i < $la && $i < $lb) {
# push @backup, $l->[$i];
# my ($subnewkey, $subres, $subbackup, $is_circular) = $mm->_merge($i, $l->[$i], $r->[$i], $c->default_mode);
# last if @{ $mm->errors };
# if ($is_circular) {
# push @res, undef;
# push @{ $mm->mem->{ $mm->cur_mem_key }{todo} }, sub {
# my ($subnewkey, $subres, $subbackup) = @_;
# $res[$i] = $subres;
# }
# } else {
# push @res, $subres;
# }
script/_genpass-id view on Meta::CPAN
# } else {
# push @res, $r->[$i];
# }
# }
# pop @{ $mm->path };
# ($key, \@res, \@backup);
#}
#
#sub _prefilter_hash {
# my ($self, $h, $desc, $sub) = @_;
# my $mm = $self->merger;
script/_genpass-id view on Meta::CPAN
# my $mm = $self->merger;
# my $c = $mm->config;
#
#
# my $res = {};
# my $backup = {};
#
# my %k = map {$_=>1} keys(%$hl), keys(%$hr);
# push @{ $mm->path }, "";
# K:
# for my $k (keys %k) {
script/_genpass-id view on Meta::CPAN
# if (!$do_merge) {
# $res->{$k} = $hl->{$k} if $hl->{$k};
# next K;
# }
#
# $backup->{$k} = $hl->{$k}[1] if $hl->{$k} && $hr->{$k};
# if ($hl->{$k}) {
# push @o, $hl->{$k};
# }
# if ($hr->{$k}) {
# my %m = map {$_=>$mm->modes->{$_}->precedence_level} keys %{ $hr->{$k} };
script/_genpass-id view on Meta::CPAN
# my $mh = $mm->modes->{$o[$i][0]};
# if (@o == 1 &&
# (($hl->{$k} && $mh->can("merge_left_only")) ||
# ($hr->{$k} && $mh->can("merge_right_only")))) {
# my $meth = $hl->{$k} ? "merge_left_only" : "merge_right_only";
# my ($subnewkey, $v, $subbackup, $is_circular, $newmode) = $mh->$meth($k, $o[$i][1]);
# next K unless defined($subnewkey);
# $final_mode = $newmode;
# $v = $res;
# } else {
# $final_mode = $o[$i][0];
script/_genpass-id view on Meta::CPAN
# my $m = $mm->combine_rules->{"$final_mode+$o[$i][0]"}
# or do {
# $mm->push_error("Can't merge $final_mode + $o[$i][0]");
# return;
# };
# my ($subnewkey, $subbackup);
# ($subnewkey, $v, $subbackup, $is_circular) = $mm->_merge($k, $v, $o[$i][1], $m->[0]);
# return if @{ $mm->errors };
# if ($is_circular) {
# if ($i < $#o) {
# $mm->push_error("Can't handle circular at $i of $#o merges (mode $m->[0]): not the last merge");
# return;
# }
# push @{ $mm->mem->{ $mm->cur_mem_key }{todo} }, sub {
# my ($subnewkey, $subres, $subbackup) = @_;
# my $final_mode = $m->[1];
# $res->{$k} = [$m->[1], $subres];
# if ($c->readd_prefix) {
# $self->_readd_prefix($res, $k, $c->default_mode);
# } else {
script/_genpass-id view on Meta::CPAN
# }
# }
# $res->{$k} = [$final_mode, $v] unless $is_circular;
# }
# pop @{ $mm->path };
# ($res, $backup);
#}
#
#sub _readd_prefix {
# my ($self, $hh, $k, $defmode) = @_;
# my $mm = $self->merger;
script/_genpass-id view on Meta::CPAN
#
# my $okr = $self->_gen_right($r, $mode, sub {$_[0] eq $ok});
# return if @{ $mm->errors };
#
# push @{ $mm->path }, $ok;
# my ($res, $backup);
# {
# local $c->{readd_prefix} = 0;
# ($res, $backup) = $self->_merge_gen($okl, $okr, $mode);
# }
# pop @{ $mm->path };
# return if @{ $mm->errors };
#
#
script/_genpass-id view on Meta::CPAN
# $mm->push_error("Invalid config value `include_merge_regex`: invalid regex: $@");
# return;
# }
# }
#
# my ($res, $backup) = $self->_merge_gen($hl, $hr, $mode, $em, $im, $emr, $imr);
# return if @{ $mm->errors };
#
#
# if ($c->readd_prefix) {
# for my $k (keys %$res) {
script/_genpass-id view on Meta::CPAN
#
# if ($config_replaced) {
# $mm->config($orig_c);
# }
#
# ($key, $res, $backup);
#}
#
#1;
#
#__END__
view all matches for this distribution
view release on metacpan or search on metacpan
script/_genpass-wordlist view on Meta::CPAN
# my ($self, $l, $r) = @_;
# $self->path([]);
# $self->errors([]);
# $self->mem({});
# $self->cur_mem_key(undef);
# my ($key, $res, $backup) = $self->_merge(undef, $l, $r);
# {
# success => !@{ $self->errors },
# error => (@{ $self->errors } ?
# join(", ",
# map { sprintf("/%s: %s", join("/", @{ $_->[0] }), $_->[1]) }
# @{ $self->errors }) : ''),
# result => $res,
# backup => $backup,
# };
#}
#
#sub _process_todo {
# my ($self) = @_;
script/_genpass-wordlist view on Meta::CPAN
# return ($key, undef, undef, 1);
# }
# } else {
# $self->mem->{$memkey} = {res=>undef, todo=>[]};
# $self->cur_mem_key($memkey);
# my ($newkey, $res, $backup) = $mh->$meth($key, $l, $r);
# $self->mem->{$memkey}{res} = [$newkey, $res, $backup];
# $self->_process_todo;
# return ($newkey, $res, $backup);
# }
# } else {
# $self->_process_todo;
# return $mh->$meth($key, $l, $r);
# }
script/_genpass-wordlist view on Meta::CPAN
# my $c = $mm->config;
# return $self->merge_SCALAR_SCALAR($key, $l, $r) unless $c->recurse_array;
# return if $c->wanted_path && !$mm->_path_is_included($mm->path, $c->wanted_path);
#
# my @res;
# my @backup;
# my $la = @$l;
# my $lb = @$r;
# push @{ $mm->path }, -1;
# for my $i (0..($la > $lb ? $la : $lb)-1) {
# $mm->path->[-1] = $i;
# if ($i < $la && $i < $lb) {
# push @backup, $l->[$i];
# my ($subnewkey, $subres, $subbackup, $is_circular) = $mm->_merge($i, $l->[$i], $r->[$i], $c->default_mode);
# last if @{ $mm->errors };
# if ($is_circular) {
# push @res, undef;
# push @{ $mm->mem->{ $mm->cur_mem_key }{todo} }, sub {
# my ($subnewkey, $subres, $subbackup) = @_;
# $res[$i] = $subres;
# }
# } else {
# push @res, $subres;
# }
script/_genpass-wordlist view on Meta::CPAN
# } else {
# push @res, $r->[$i];
# }
# }
# pop @{ $mm->path };
# ($key, \@res, \@backup);
#}
#
#sub _prefilter_hash {
# my ($self, $h, $desc, $sub) = @_;
# my $mm = $self->merger;
script/_genpass-wordlist view on Meta::CPAN
# my $mm = $self->merger;
# my $c = $mm->config;
#
#
# my $res = {};
# my $backup = {};
#
# my %k = map {$_=>1} keys(%$hl), keys(%$hr);
# push @{ $mm->path }, "";
# K:
# for my $k (keys %k) {
script/_genpass-wordlist view on Meta::CPAN
# if (!$do_merge) {
# $res->{$k} = $hl->{$k} if $hl->{$k};
# next K;
# }
#
# $backup->{$k} = $hl->{$k}[1] if $hl->{$k} && $hr->{$k};
# if ($hl->{$k}) {
# push @o, $hl->{$k};
# }
# if ($hr->{$k}) {
# my %m = map {$_=>$mm->modes->{$_}->precedence_level} keys %{ $hr->{$k} };
script/_genpass-wordlist view on Meta::CPAN
# my $mh = $mm->modes->{$o[$i][0]};
# if (@o == 1 &&
# (($hl->{$k} && $mh->can("merge_left_only")) ||
# ($hr->{$k} && $mh->can("merge_right_only")))) {
# my $meth = $hl->{$k} ? "merge_left_only" : "merge_right_only";
# my ($subnewkey, $v, $subbackup, $is_circular, $newmode) = $mh->$meth($k, $o[$i][1]);
# next K unless defined($subnewkey);
# $final_mode = $newmode;
# $v = $res;
# } else {
# $final_mode = $o[$i][0];
script/_genpass-wordlist view on Meta::CPAN
# my $m = $mm->combine_rules->{"$final_mode+$o[$i][0]"}
# or do {
# $mm->push_error("Can't merge $final_mode + $o[$i][0]");
# return;
# };
# my ($subnewkey, $subbackup);
# ($subnewkey, $v, $subbackup, $is_circular) = $mm->_merge($k, $v, $o[$i][1], $m->[0]);
# return if @{ $mm->errors };
# if ($is_circular) {
# if ($i < $#o) {
# $mm->push_error("Can't handle circular at $i of $#o merges (mode $m->[0]): not the last merge");
# return;
# }
# push @{ $mm->mem->{ $mm->cur_mem_key }{todo} }, sub {
# my ($subnewkey, $subres, $subbackup) = @_;
# my $final_mode = $m->[1];
# $res->{$k} = [$m->[1], $subres];
# if ($c->readd_prefix) {
# $self->_readd_prefix($res, $k, $c->default_mode);
# } else {
script/_genpass-wordlist view on Meta::CPAN
# }
# }
# $res->{$k} = [$final_mode, $v] unless $is_circular;
# }
# pop @{ $mm->path };
# ($res, $backup);
#}
#
#sub _readd_prefix {
# my ($self, $hh, $k, $defmode) = @_;
# my $mm = $self->merger;
script/_genpass-wordlist view on Meta::CPAN
#
# my $okr = $self->_gen_right($r, $mode, sub {$_[0] eq $ok});
# return if @{ $mm->errors };
#
# push @{ $mm->path }, $ok;
# my ($res, $backup);
# {
# local $c->{readd_prefix} = 0;
# ($res, $backup) = $self->_merge_gen($okl, $okr, $mode);
# }
# pop @{ $mm->path };
# return if @{ $mm->errors };
#
#
script/_genpass-wordlist view on Meta::CPAN
# $mm->push_error("Invalid config value `include_merge_regex`: invalid regex: $@");
# return;
# }
# }
#
# my ($res, $backup) = $self->_merge_gen($hl, $hr, $mode, $em, $im, $emr, $imr);
# return if @{ $mm->errors };
#
#
# if ($c->readd_prefix) {
# for my $k (keys %$res) {
script/_genpass-wordlist view on Meta::CPAN
#
# if ($config_replaced) {
# $mm->config($orig_c);
# }
#
# ($key, $res, $backup);
#}
#
#1;
#
#__END__
view all matches for this distribution
view release on metacpan or search on metacpan
MANIFEST.SKIP view on Meta::CPAN
# Avoid Module::Build generated and utility files.
\bBuild$
\b_build/
# Avoid temp and backup files.
~$
\.old$
\#$
\b\.#
\.bak$
view all matches for this distribution
view release on metacpan or search on metacpan
bin/git-hook-setup view on Meta::CPAN
sub write_hooks {
my ($dir, $hooks) = @_;
for my $hook (keys %{ $hooks } ) {
my $file = catfile($dir, $hook);
my $backup = catfile($dir, "$hook~");
my $count = 1;
while (-e $backup) {
$backup = catfile($dir, "$hook$count~");
$count++;
}
system 'cp', $file, $backup;
my $fh = $file->openw;
print {$fh} $hooks->{$hook};
close $fh;
chmod 0755, $file;
view all matches for this distribution
view release on metacpan or search on metacpan
MANIFEST.SKIP view on Meta::CPAN
\bblibdirs\.ts$ # 6.18 through 6.25 generated this
\b_eumm/ # 7.05_05 and above
\.tar\.gz$
# Avoid temp and backup files.
~$
\.old$
\#$
\b\.#
\.bak$
view all matches for this distribution
view release on metacpan or search on metacpan
lib/App/GitHub/FixRepositoryName.pm view on Meta::CPAN
# All of the above do the same thing, basically
=head1 DESCRIPTION
App::GitHub::FixRepositoryName will automatically find and update the github repository URLs in .git/config (so that they have
the right casing). It will first make a backup of your .git/config AND it will prompt you before writing out
the new config (and show it to you first)
=head1 INSTALL
You can install L<App::GitHub::FixRepositoryName> by using L<CPAN>:
lib/App/GitHub/FixRepositoryName.pm view on Meta::CPAN
A commandline application that will fix a given .git/config to have the right repository name(s)
Usage: github-fix-repository-name [...] <path1> <path2> ...
--backup-to <directory> Backup 'config' to <directory> (default is the same directory)
--no-backup Do not make a backup first
--always-yes Assume yes when asking to write out the new config
--help, -h, -? This help
lib/App/GitHub/FixRepositoryName.pm view on Meta::CPAN
warn $error if $error;
warn <<'_END_';
Usage: github-fix-repository-name [...] <path>
--backup-to <directory> Backup 'config' to <directory> (default is the same directory)
--no-backup Do not make a backup first
--always-yes Assume yes when asking to write out the new config
--help, -h, -? This help
lib/App/GitHub/FixRepositoryName.pm view on Meta::CPAN
}
sub run {
my $self = shift;
my ($backup_to, $no_backup, $always_yes, $help);
GetOptions(
'help|h|?' => \$help,
'backup-to=s' => \$backup_to,
'no-backup' => \$no_backup,
'always-yes|Y' => \$always_yes,
);
if ($help) {
do_usage;
lib/App/GitHub/FixRepositoryName.pm view on Meta::CPAN
}
my @fix = @ARGV ? @ARGV : qw/./;
for my $path (@fix) {
$self->_try_to_fix_file_or_directory( $path,
backup_to => $backup_to, no_backup => $no_backup, always_yes => $always_yes );
}
}
sub _try_to_fix_file_or_directory {
my $self = shift;
lib/App/GitHub/FixRepositoryName.pm view on Meta::CPAN
if (! -s _ ) {
carp "File \"$file\" is empty";
return;
}
my ($backup_file);
my ($content, $original_content) = $self->fix_file( $file );
if ($content eq $original_content) {
$print->( "Nothing to do to \"$file\"\n" );
return;
}
lib/App/GitHub/FixRepositoryName.pm view on Meta::CPAN
unless ($Y) {
$print->( "Abandoning update to \"$file\"\n" );
return;
}
}
unless ( $given{no_backup} ) {
$backup_file = $self->_backup_file( $file, to => $given{backup_to}, template => $given{backup_template} );
$print->( "Made a backup of \"$file\" to \"$backup_file\"\n" );
}
File::AtomicWrite->write_file({ file => $file, input => \$content });
$print->( "Fixup of \"$file\" complete\n" );
$file = Path::Class::File->new( "$file" );
return wantarray ? ($file, $backup_file) : $file;
}
}
# TODO: Factor this out to a CPAN module
sub _backup_file {
my $self = shift;
my $file = shift;
my %given = @_;
croak "Wasn't given file to backup" unless defined $file;
croak "Can't read file \"$file\"" unless -r $file;
$file = Path::Class::File->new( "$file" );
my $to = $given{to} || $file->parent;
lib/App/GitHub/FixRepositoryName.pm view on Meta::CPAN
$to = Path::Class::Dir->new( "$to" );
$to->mkpath unless -e $to;
croak "Backup destination \"$to\" is not a directory (or doesn't exist)" unless -d $to;
croak "Cannot write to backup destination \"$to\"" unless -w _;
my $template = $given{template} || '.backup-%basename-%date-%tmp';
if ($template =~ m/%fullpath\b/) {
my $value = $file.'';
$value =~ s!/+!-!g;
$template =~ s/%fullpath\b/$value/g;
lib/App/GitHub/FixRepositoryName.pm view on Meta::CPAN
if ($template =~ m/%sha1\b/) {
my $value = sha1_hex scalar $file->slurp;
$template =~ s/%sha1\b/$value/g;
}
my ($handle, $backup_file);
if ($tmp) {
($handle, $backup_file) = tempfile( $template, DIR => "$to", UNLINK => 0 );
}
else {
$backup_file = $to->file( $template );
$handle = $backup_file->openw or croak "Couldn't open \"$backup_file\": since $!";
}
$handle->print( scalar $file->slurp );
close $handle;
my $file_size = -s $file;
my $backup_file_size = -s $backup_file;
croak "Couldn't backup \"$file\" ($file_size) to \"$backup_file\" ($backup_file_size): size doesn't match!" unless $file_size == $backup_file_size;
return Path::Class::File->new( $backup_file );
}
=head1 AUTHOR
Robert Krimen, C<< <rkrimen at cpan.org> >>
view all matches for this distribution
view release on metacpan or search on metacpan
App::Glacier
============
Command line utility for working with Amazon Glacier, an online service
that provides storage for data archiving and backup.
Usage:
glacier COMMAND [OPTIONS] [ARGS]
view all matches for this distribution
view release on metacpan or search on metacpan
[GithubMeta]
; Enable issue tracking using Github
issues = 1
[PruneFiles]
; Git rid of backup files
match = ~$
match = \.bak$
[NextRelease]
format = %-7v %{yyyy-MM-dd}d
view all matches for this distribution
view release on metacpan or search on metacpan
lib/App/Gre.pm view on Meta::CPAN
other. One level is the "include" filters like -perl, -nophp, or
-ext=c. The second level is the "exclude" filters like -xname=foo
or -xbinary.
Why are they independent? Consider if the script had a default
filter to remove all backup files (-xname='~$') which would have
to mix with additional command line filters. The following would
try to search for bash files (files whose first line starts with
#!/bin/bash) that aren't backups:
$ gre -xname='~$' -line1='^#!/bin/bash'
It wouldn't work if they weren't independent: filters are additive,
so this would have added all files which are not backups then add
all files which are bash files (some of which may be backup files).
The reason the filters have to be additive is to let commands like
this work:
$ gre -html -js
lib/App/Gre.pm view on Meta::CPAN
Then you wouldn't have a chance to disable it:
$ gre -line1='^#!/bin/bash' -noxname='~$' -xname='~$'
It would still filter out the backup files.
The result should be intuitive. For example, if you want to
search everything except one file that's messing up the search add:
$ gre -xname=INBOX.mbox -ext=mbox qwerty
and you wouldn't have to worry about order of these filters.
If you want to remove all the builtin "exclude" filters, use -x on
the command line. By default, gre will exclude backup files, swap
files, core dumps, .git directories, .svn directories, binary files,
minimized js files, and more. See the output of -c for the full
list.
"exclude" filters also have another property which the regular
view all matches for this distribution
view release on metacpan or search on metacpan
lib/App/Greple/subst/desumasu.pm view on Meta::CPAN
=item B<--overwrite>
To update the file, use these options. Option B<--create> make new
file with extention C<.new>. Option B<--replace> updates the target
file with backup, while option B<--overwrite> does it without backup.
=back
See L<App::Greple::subst> for other options.
view all matches for this distribution
view release on metacpan or search on metacpan
lib/App/Greple/subst.pm view on Meta::CPAN
original filename.
=item B<--replace>
Replace the target file by converted result. Original file is renamed
to backup name with ".bak" suffix.
=item B<--overwrite>
Overwrite the target file by converted result with no backup.
=back
=head1 DICTIONARY
lib/App/Greple/subst.pm view on Meta::CPAN
--update::update \
--update::discard
option --diff --subst --update::diff
option --create --subst --update::create
option --replace --subst --update::update --with-backup
option --overwrite --subst --update::update
option --with-stat --epilogue subst_show_stat
option --stat --update::discard --with-stat
view all matches for this distribution
view release on metacpan or search on metacpan
lib/App/Greple/update.pm view on Meta::CPAN
greple -Mupdate
Options:
--update replace file content
--with-backup make backup files
--diff produce diff output
-U# specify unified diff context length
--discard simply discard the output
lib/App/Greple/update.pm view on Meta::CPAN
The file is also not updated if the output is empty. This is to
prevent the contents of the file from being erased if none of the
match strings are included. If you want to intentionally empty a
file, you need to think of another way.
=item B<--with-backup>[=I<suffix>]
Backup original file with C<.bak> suffix. If optional parameter is
given, it is used as a suffix string. If the file exists, C<.bak_1>,
C<.bak_2> ... are used.
lib/App/Greple/update.pm view on Meta::CPAN
our $debug = 0;
our $remember_data = 1;
our $opt_update_diffcmd = "diff -u";
our $opt_suffix = '';
our $opt_backup;
our $opt_U = '';
my $current_file;
my $contents;
my @update_diffcmd;
lib/App/Greple/update.pm view on Meta::CPAN
sub update_initialize {
@update_diffcmd = shellwords $opt_update_diffcmd;
if ($opt_U ne '') {
@update_diffcmd = ('diff', "-U$opt_U");
}
if (defined $opt_backup) {
$opt_suffix = $opt_backup ne '' ? $opt_backup : '.bak';
}
}
sub update_begin {
my %arg = @_;
lib/App/Greple/update.pm view on Meta::CPAN
builtin diffcmd=s $opt_update_diffcmd
builtin update-suffix=s $opt_suffix
builtin U=i $opt_U
builtin remember! $remember_data
builtin with-backup:s $opt_backup
option default \
--prologue update_initialize \
--begin update_begin
view all matches for this distribution
view release on metacpan or search on metacpan
lib/App/Greple/dig.pm view on Meta::CPAN
## files
##
expand is_dots -name .*
expand is_version -name *,v
expand is_backup ( -name *~ -o -name *.swp )
expand is_image ( -iname *.jpg -o -iname *.jpeg -o \
-iname *.gif -o -iname *.png -o \
-iname *.ico -o \
-iname *.heic -o -iname *.heif -o \
-iname *.svg -o \
lib/App/Greple/dig.pm view on Meta::CPAN
-prune -o \
-type f \
) \
! is_dots \
! is_version \
! is_backup \
! is_image \
! is_archive \
! is_pdf \
! is_db \
! is_others \
view all matches for this distribution
view release on metacpan or search on metacpan
setup/import_export/ascii.pl view on Meta::CPAN
my ($self, $elements_to_save, $file) = @_ ;
if($self->{CREATE_BACKUP} && -e $file)
{
use File::Copy;
copy($file,"$file.bak") or die "export_pod: Copy failed while making backup copy: $!";
}
write_file($file, $self->transform_elements_to_ascii_buffer()) ;
#~ open FH, ">:encoding(utf8)", $file_name;
#~ print FH $self->transform_elements_to_ascii_buffer() ;
view all matches for this distribution
view release on metacpan or search on metacpan
t/in/status.dat view on Meta::CPAN
last_hard_state_change=1279065910
last_time_ok=1280288856
last_time_warning=0
last_time_unknown=0
last_time_critical=0
plugin_output=/dev/mapper/misc-backup 109G 40G 69G 37% /backup
long_plugin_output=
performance_data=
last_check=1280288856
next_check=0
check_options=0
view all matches for this distribution
view release on metacpan or search on metacpan
lib/App/InvestSim/Values.pm view on Meta::CPAN
# two arguments:
# - the string to validate.
# - the validation event: 'key' when the validation is for a key entered by the
# user, 'focusout' when the value is complete and it can be validated
# entirely. That second event is also used to validate values read from a
# backup file.
# On failure, these methods must all return 0 (and not just '' which is the
# default false value), as that is expected by Tcl.
# An integer that is allowed to be 0 or more.
sub validate_non_negative_integer {
view all matches for this distribution
view release on metacpan or search on metacpan
* lib/App/Jawk.pm: restored, for ease of implementation.
Dist::Zilla wanted it by default.
2012-12-16 14:04 joshr
* .Makefile.PL: backup
2012-12-16 14:14 joshr
* .README, README: 'file moved from README to .README'
view all matches for this distribution
view release on metacpan or search on metacpan
lib/App/JobLog.pm view on Meta::CPAN
with special valence so the log has to handle its own escaping of spaces and colons and so forth. We don't want to
worry about character encoding so the log has to be in Unicode (utf8).
B<App::JobLog> keeps its documents, by default, in a hidden directory in your home directory called F<.joblog/>. These
documents are a README file explaining to anyone who stumbles across the directory what it's function is, a log, called
F<log>, a configuration file, called F<config.ini>, a vacation file, called F<vacation>, and perhaps log backups.
To perform any action with B<App::JobLog> one invokes the executable with a command and a list of options. These commands
are listed below.
=head2 COMMANDS
view all matches for this distribution
view release on metacpan or search on metacpan
lib/App/LDAP/Command/Export.pm view on Meta::CPAN
App::LDAP::Command::Export
=head1 SYNOPSIS
$ sudo ldap export backup.ldif
backup whole DIT
$ ldap export people.ldif --base ou=people,dc=example,dc=com
dump user information without password
$ sudo ldap export people.ldif --base ou=people,dc=example,dc=com
view all matches for this distribution
view release on metacpan or search on metacpan
lib/App/LXC/Container/Data/common.pm view on Meta::CPAN
'/usr/share/lintian/overrides ignore',
'/usr/share/man ignore',
'/usr/share/misc/magic.mgc ignore',
'/usr/src nomerge',
'/var nomerge',
'/var/backups ignore',
'/var/cache nomerge',
# Note that /var/lib would break the start of a container due to
# /var/lib/lxc* behaving unexpectedly:
'/var/lib nomerge',
'/var/lib/dpkg ignore',
view all matches for this distribution
view release on metacpan or search on metacpan
lib/App/LedgerSMB/Admin.pm view on Meta::CPAN
Provides base version tracking and management routines
=head2 App::LedgerSMB::Admin::Database
Routines to reload, create, update, backup, and restore databases
=head2 App::LedgerSMB::Admin::Database::Setting
Look up settings in a LedgerSMB database
view all matches for this distribution
view release on metacpan or search on metacpan
bin/licensecheck view on Meta::CPAN
I<Since v2.10.10.>
regular expression of files to skip
when more than one F<PATH> is provided
S<(default value: some backup and VCS files)>
=item B<-r>, B<--recursive>
I<Since v2.10.7.>
bin/licensecheck view on Meta::CPAN
check|c=s
ignore|i=s
recursive|r
);
$OPT{check} = 'common source files';
$OPT{ignore} = 'some backup and VCS files';
=head2 Parsing contents
=over 16
bin/licensecheck view on Meta::CPAN
)$
!;
# From dpkg-source
my $default_ignore_regex = q!
# Ignore general backup files
~$|
# Ignore emacs recovery files
(?:^|/)\.#|
# Ignore vi swap files
(?:^|/)\..*\.swp$|
bin/licensecheck view on Meta::CPAN
(?:^|/),,.*(?:$|/.*$)|
# File-names that should be ignored (never directories)
(?:^|/)(?:DEADJOE|\.cvsignore|\.arch-inventory|\.bzrignore|\.gitignore)$|
# File or directory names that should be ignored
(?:^|/)(?:CVS|RCS|\.pc|\.deps|\{arch\}|\.arch-ids|\.svn|\.hg|_darcs|\.git|
\.shelf|_MTN|\.bzr(?:\.backup|tags)?)(?:$|/.*$)
!;
my $check_regex = $OPT{check};
if ( !$check_regex or $check_regex eq 'common source files' ) {
$check_regex = qr/$default_check_regex/x;
bin/licensecheck view on Meta::CPAN
else {
$check_regex = qr/$check_regex/;
}
my $ignore_regex = $OPT{ignore};
if ( !$ignore_regex or $ignore_regex eq 'some backup and VCS files' ) {
$ignore_regex = qr/$default_ignore_regex/x;
}
else {
$ignore_regex = qr/$ignore_regex/;
}
view all matches for this distribution
view release on metacpan or search on metacpan
lib/App/LintPrereqs.pm view on Meta::CPAN
use Config::IOD;
use Fcntl qw(:DEFAULT);
use File::Find;
use File::Which;
use Filename::Backup qw(check_backup_filename);
use IPC::System::Options 'system', -log=>1;
use Module::CoreList::More;
use Proc::ChildError qw(explain_child_error);
use Scalar::Util 'looks_like_number';
use Sort::Sub qw(prereq_ala_perlancar);
lib/App/LintPrereqs.pm view on Meta::CPAN
));
last unless @dirs;
find(
sub {
return unless -f;
return if check_backup_filename(filename=>$_);
push @{$files{Runtime}}, "$File::Find::dir/$_";
},
@dirs
);
}
lib/App/LintPrereqs.pm view on Meta::CPAN
));
last unless @dirs;
find(
sub {
return unless -f;
return if check_backup_filename(filename=>$_);
return unless /\.(t|pl|pm)$/;
push @{$files{Test}}, "$File::Find::dir/$_";
},
@dirs
);
lib/App/LintPrereqs.pm view on Meta::CPAN
cmdline_aliases => {F=>{}},
description => <<'_',
`lint-prereqs` can attempt to automatically fix the errors by
adding/removing/moving prereqs in `dist.ini`. Not all errors can be
automatically fixed. When modifying `dist.ini`, a backup in `dist.ini~` will be
created.
_
},
},
lib/App/LintPrereqs.pm view on Meta::CPAN
for my $cmd (@{ $e->{remedy_cmds} }) {
system @$cmd;
if ($?) {
$e->{remedy} = "(FIX FAILED: ".explain_child_error().") $e->{remedy}";
$resmeta->{'cmdline.exit_code'} = 1;
# restore dist.ini from backup
rename "dist.ini~", "dist.ini";
last FIX;
}
}
}
lib/App/LintPrereqs.pm view on Meta::CPAN
Attempt to automatically fix the errors.
C<lint-prereqs> can attempt to automatically fix the errors by
adding/removing/moving prereqs in C<dist.ini>. Not all errors can be
automatically fixed. When modifying C<dist.ini>, a backup in C<dist.ini~> will be
created.
=item * B<perl_version> => I<str>
Perl version to use (overrides scan_prereqsE<sol>dist.ini).
view all matches for this distribution
view release on metacpan or search on metacpan
lib/App/MBUtiny.pm view on Meta::CPAN
=encoding utf-8
=head1 NAME
App::MBUtiny - Websites and any file system elements backup tool
=head1 VERSION
Version 1.13
=head1 SYNOPSIS
# mbutiny test
# mbutiny backup
# mbutiny restore
# mbutiny report
=head1 DESCRIPTION
Websites and any file system elements backup tool
=head2 FEATURES
=over 4
lib/App/MBUtiny.pm view on Meta::CPAN
=item Backup small databases
=item Run external utilities for object preparation
=item Supported storage of backups on local drives
=item Supported storage of backups on remote SFTP storages
=item Supported storage of backups on remote FTP storages
=item Supported storage of backups on remote HTTP storages
=item Easy configuration
=item Monitoring feature enabled
lib/App/MBUtiny.pm view on Meta::CPAN
=head2 CONFIGURATION
By default configuration file located in C</etc/mbutiny> directory
Every configuration directive detailed described in C<mbutiny.conf> file, also
see C<hosts/foo.conf.sample> file for MBUtiny backup hosts configuration
=head2 CRONTAB
To automatically launch the program, we recommend using standard scheduling tools, such as crontab
0 2 * * * mbutiny -l backup >/dev/null 2>>/var/log/mbutiny-error.log
Or for selected hosts only:
0 2 * * * mbutiny -l backup foo bar >/dev/null 2>>/var/log/mbutiny-error.log
15 2 * * * mbutiny -l backup baz >/dev/null 2>>/var/log/mbutiny-error.log
For daily reporting:
0 9 * * * mbutiny -l report >/dev/null 2>>/var/log/mbutiny-error.log
=head2 COLLECTOR
Collector is a monitoring server that allows you to collect data on the status of performs backups.
The collector allows you to build reports on the collected data from various servers.
How it work?
+------------+
lib/App/MBUtiny.pm view on Meta::CPAN
For installation of the collector Your need Apache 2.2/2.4 web server and CGI/FastCGI script.
See C<collector.cgi.sample> in C</etc/mbutiny> directory
=head2 HTTP SERVER
If you want to use the HTTP server as a storage for backups, you need to install the CGI/FastCGI
script on Apache 2.2/2.4 web server.
See C<server.cgi>
=head1 INTERNAL METHODS
lib/App/MBUtiny.pm view on Meta::CPAN
=item B<rstdir>
my $rstdir = $app->rstdir;
Returns path to restored backups
=back
=head1 HISTORY
lib/App/MBUtiny.pm view on Meta::CPAN
my $name = _getName($pair); # Backup name
my $host = node($pair, $name); # Config section
my $hostskip = (!@arguments || grep {lc($name) eq lc($_)} @arguments) ? 0 : 1;
my $enabled = value($host, 'enable') ? 1 : 0;
if ($hostskip || !$enabled) {
$self->log_info("Skip testing for \"%s\" backup host section", $name);
next;
}
my $tbl = Text::SimpleTable->new(@{(TEST_HEADERS)});
$self->log_info("Start testing for \"%s\" backup host section", $name);
push @header, ["Backup name", $name];
push @errors, $self->getdbi->dsn, $self->getdbi->error, "" if $self->getdbi->error;
#
# Loading backup data
#
my $buday = (value($host, 'buday') // $self->config('buday')) || 0;
my $buweek = (value($host, 'buweek') // $self->config('buweek')) || 0;
my $bumonth = (value($host, 'bumonth') // $self->config('bumonth')) || 0;
push @header, (
["Daily backups", $buday],
["Weekly backups", $buweek],
["Monthly backups", $bumonth],
);
# Get mask vars
my $arc = $self->_getArc($host);
my $arcmask = value($host, 'arcmask') || ARC_MASK;
lib/App/MBUtiny.pm view on Meta::CPAN
my @dates = $self->_getDates($buday, $buweek, $bumonth);
# Get paths
push @header, (
["Work directory", $self->datadir],
["Directory for backups", $self->objdir],
["Directory for restores", $self->rstdir],
);
# Regular objects
my $objects = array($host, 'object');
lib/App/MBUtiny.pm view on Meta::CPAN
push @errors, $storage->error, "";
$ostat = 0;
};
my $last_file = (sort {$b cmp $a} @filelist)[0];
if ($files_number && $last_file) {
push @header, ["Last backup file", $last_file];
my $list = hash($storage->{list});
foreach my $k (keys %$list) {
my $l = array($list, $k);
my $st = (grep {$_ eq $last_file} @$l) ? 1 : 0;
$tbl->row(sprintf("%s storage", $k),
lib/App/MBUtiny.pm view on Meta::CPAN
push @report, $self->_report_summary($ostat ? "All tests successful" : "Errors occurred while testing"); # Summary table
push @report, $tbl->draw() || ''; # Table
push @report, $self->_report_errors(@errors); # List of occurred errors
if ($TTY || $self->verbosemode) { # Draw to TTY
printf("%s\n\n", "~" x 94);
printf("The %s for %s backup host\n\n", $report_name, $name);
print join("\n", @report, "");
}
#
lib/App/MBUtiny.pm view on Meta::CPAN
if ($sent) { $self->debug(sprintf("Mail has been sent to: %s", $to)) }
else { $self->error(sprintf("Mail was not sent to: %s", $to)) }
}
# Finish testing
$self->log_info("Finish testing for \"%s\" backup host section", $name);
# General status
$status = 0 unless $ostat;
}
return $status;
});
__PACKAGE__->register_handler(
handler => "backup",
description => "Backup hosts",
code => sub {
### CODE:
my ($self, $meta, @arguments) = @_;
$self->configure or return 0;
lib/App/MBUtiny.pm view on Meta::CPAN
my $name = _getName($pair); # Backup name
my $host = node($pair, $name); # Config section
my $hostskip = (!@arguments || grep {lc($name) eq lc($_)} @arguments) ? 0 : 1;
my $enabled = value($host, 'enable') ? 1 : 0;
if ($hostskip || !$enabled) {
$self->log_info("Skip backup process for \"%s\" backup host section", $name);
next;
}
my $tbl = Text::SimpleTable->new(@{(TABLE_HEADERS)});
$self->log_info("Start backup process for \"%s\" backup host section", $name);
push @header, ["Backup name", $name];
push @errors, $self->getdbi->dsn, $self->getdbi->error, "" if $self->getdbi->error;
#
# Loading backup data
#
my $buday = (value($host, 'buday') // $self->config('buday')) || 0;
my $buweek = (value($host, 'buweek') // $self->config('buweek')) || 0;
my $bumonth = (value($host, 'bumonth') // $self->config('bumonth')) || 0;
push @header, (
["Daily backups", $buday],
["Weekly backups", $buweek],
["Monthly backups", $bumonth],
);
# Get mask vars
my $arc = $self->_getArc($host);
my $arcmask = value($host, 'arcmask') || ARC_MASK;
lib/App/MBUtiny.pm view on Meta::CPAN
$step = "Storages testing";
$self->debug($step);
my $storage = new App::MBUtiny::Storage(
name => $name, # Backup name
host => $host, # Host config section
path => $self->objdir, # Where is located backup archive
fixup => sub {
my $strg = shift; # Storage object
my $oper = shift // 'noop'; # Operation name
my $colret;
if ($oper =~ /^(del)|(rem)/i) {
lib/App/MBUtiny.pm view on Meta::CPAN
$ostat ? 'All processes successful' : 'Errors have occurred!',
$ostat ? 'PASS' : 'FAIL'
);
push @header, ["Summary status", $ostat ? 'PASS' : 'FAIL'];
my @report;
my $report_name = $ostat ? "backup report" : "backup error report";
push @report, $self->_report_common(@header); # Common information
push @report, $self->_report_summary($ostat ? "Backup is done" : "Errors occurred while performing backup"); # Summary table
push @report, $tbl->draw() || ''; # Table
push @report, $self->_report_errors(@errors); # List of occurred errors
if ($TTY || $self->verbosemode) { # Draw to TTY
printf("%s\n\n", "~" x 114);
printf("The %s for %s backup host\n\n", $report_name, $name);
print join("\n", @report, "");
}
#
lib/App/MBUtiny.pm view on Meta::CPAN
my $sent = sendmail(%ma);
if ($sent) { $self->debug(sprintf("Mail has been sent to: %s", $to)) }
else { $self->error(sprintf("Mail was not sent to: %s", $to)) }
}
# Finish backup
$self->log_info("Finish backup process for \"%s\" backup host section", $name);
# General status
$status = 0 unless $ostat;
}
lib/App/MBUtiny.pm view on Meta::CPAN
my $name = _getName($pair); # Backup name
my $host = node($pair, $name); # Config section
my $hostskip = (!@arguments || grep {lc($name) eq lc($_)} @arguments) ? 0 : 1;
my $enabled = value($host, 'enable') ? 1 : 0;
if ($hostskip || !$enabled) {
$self->log_info("Skip restore process for \"%s\" backup host section", $name);
next;
}
my $tbl = Text::SimpleTable->new(@{(TABLE_HEADERS)});
$self->log_info("Start restore process for \"%s\" backup host section", $name);
push @header, ["Backup name", $name];
push @errors, $self->getdbi->dsn, $self->getdbi->error, "" if $self->getdbi->error;
# Get mask vars
my $arc = $self->_getArc($host);
lib/App/MBUtiny.pm view on Meta::CPAN
$step = "Storages testing";
$self->debug($step);
my $storage = new App::MBUtiny::Storage(
name => $name, # Backup name
host => $host, # Host config section
path => $self->rstdir, # Where is located restored backup archive
validate => sub {
my $strg = shift; # storage object
my $file = shift; # fetched file
if ($info{size}) { # Valid sizes
my $size = filesize($file) // 0;
lib/App/MBUtiny.pm view on Meta::CPAN
arcdef => $arc,
archive=> $archive_file,
dirdst => $restore_dir,
);
if ($st) {
push @header, ["Location of restored backup", $restore_dir];
$self->log_info("Downloaded backup archive: %s", $archive_file);
$self->log_info("Location of restored backup: %s", $restore_dir);
} else {
my $msg = sprintf("Extracting archive \"%s\" failed: %s", $archive_file, $self->error);
$self->log_error($msg);
push @errors, $msg, "";
$ostat = 0;
lib/App/MBUtiny.pm view on Meta::CPAN
push @report, $self->_report_summary($ostat ? "Restore is done" : "Errors occurred while performing restore"); # Summary table
push @report, $tbl->draw() || ''; # Table
push @report, $self->_report_errors(@errors); # List of occurred errors
if ($TTY || $self->verbosemode) { # Draw to TTY
printf("%s\n\n", "~" x 114);
printf("The %s for %s backup host\n\n", $report_name, $name);
print join("\n", @report, "");
}
# Finish restore
$self->log_info("Finish restore process for \"%s\" backup host section", $name);
# General status
$status = 0 unless $ostat;
}
lib/App/MBUtiny.pm view on Meta::CPAN
my $host = node($pair, $name); # Config section
my $hostskip = (!@arguments || grep {lc($name) eq lc($_)} @arguments) ? 0 : 1;
my $enabled = value($host, 'enable') ? 1 : 0;
$tbl_hosts->row($name, ($hostskip || !$enabled) ? 'SKIP' : 'PASS');
if ($hostskip || !$enabled) {
$self->log_info("Skip reporting for \"%s\" backup host section", $name);
next;
}
my $lcols = $self->_getCollector($host);
push @collectors, @$lcols;
}
lib/App/MBUtiny.pm view on Meta::CPAN
}
#
# Collectors processing
#
my @backups;
if (@ok_collectors) {
my $collector = new App::MBUtiny::Collector(
collector_config => [@ok_collectors],
dbi => $self->getdbi, # For local storage only
);
@backups = $collector->report(); # start => 1561799600;
if ($collector->error) {
$self->log_error(sprintf("Collector error: %s", $collector->error));
push @errors, $collector->error, "";
}
}
#
# Get report data about LAST backups on collector for each available host
#
my %requires;
foreach (@req_hosts) {$requires{$_} = 0};
foreach my $rec (@backups) {
push @comments, sprintf("%s: %s", uv2null($rec->{file}), $rec->{comment}), "" if $rec->{comment};
push @errors, uv2null($rec->{file}), $rec->{error}, "" if $rec->{error};
my $nm = $rec->{name} || 'virtual';
$tbl_report->row(
sprintf("%s\n%s", $nm, uv2null($rec->{addr})),
lib/App/MBUtiny.pm view on Meta::CPAN
push @report, $tbl_report->draw(); # Report table
push @report, "Comments:", "", @comments, "" if @comments;
push @report, $self->_report_errors(@errors); # List of occurred errors
if ($TTY || $self->verbosemode) { # Draw to TTY
printf("%s\n\n", "~" x 106);
printf("The %s for all backup hosts on %s\n\n", $report_name, $hostname);
print join("\n", @report, "");
}
#
# SendMail (Send report)
#
if ($send_report) {
unshift @report, $self->_report_title($report_name, "last backups");
push @report, $self->_report_footer();
my %ma = (); foreach my $k (keys %$sm) { $ma{"-".$k} = $sm->{$k} };
$ma{"-subject"} = sprintf("%s %s (%s on %s)", PROJECTNAME, $report_name, "last backups", $hostname);
$ma{"-message"} = join("\n", @report);
# Send!
my $sent = sendmail(%ma);
if ($sent) { $self->debug(sprintf("Mail has been sent to: %s", $to)) }
lib/App/MBUtiny.pm view on Meta::CPAN
for (my $i=0; $i<$period; $i++) {
my ( $y, $m, $d, $wd ) = (localtime( time - $i * 86400 ))[5,4,3,6];
my $date = sprintf( "%04d%02d%02d", ($y+1900), ($m+1), $d );
if (($i < $buday)
|| (($i < $buweek * 7) && $wd == 0) # do weekly backups on sunday
|| (($i < $bumonth * 30) && $d == 1)) # do monthly backups on 1-st day of month
{
$dates{ $date } = 1;
} else {
$dates{ $date } = 0;
}
lib/App/MBUtiny.pm view on Meta::CPAN
my $self = shift;
my $title = shift || "report";
my $name = shift || "virtual";
return (
sprintf("Dear %s user,", PROJECTNAME),"",
sprintf("This is a automatic-generated %s for %s backup\non %s, created by %s/%s",
$title, $name, $hostname, __PACKAGE__, $VERSION),"",
"Sections of this report:","",
" * Common information",
" * Summary",
" * List of occurred errors","",
view all matches for this distribution
view release on metacpan or search on metacpan
share/public_html/static/music_worklet_inprogress/decoder/src/mhfs_cl_track.h view on Meta::CPAN
blockvf_error code;
uint32_t extradata;
} mhfs_cl_track_blockvf_data;
typedef struct {
// for backup and restore
ma_decoder backupDecoder;
unsigned backupFileOffset;
mhfs_cl_track_allocs allocs;
ma_decoder_config decoderConfig;
ma_decoder decoder;
bool dec_initialized;
share/public_html/static/music_worklet_inprogress/decoder/src/mhfs_cl_track.h view on Meta::CPAN
if(pAllocs->allocptrs[i] == p)
{
const size_t osz = pAllocs->allocsizes[i];
const size_t orsz = ceil8(pAllocs->allocsizes[i]);
const size_t rsz = ceil8(sz);
// avoid losing the start of backup by moving it down
if(rsz < orsz)
{
uint8_t *ogalloc = p;
memmove(ogalloc+rsz, ogalloc+orsz, sz);
}
share/public_html/static/music_worklet_inprogress/decoder/src/mhfs_cl_track.h view on Meta::CPAN
return NULL;
}
// we moved the data down so we can't fail
newalloc = p;
}
// move the backup data forward
else if(rsz > orsz)
{
memmove(newalloc+rsz, newalloc+orsz, osz);
}
share/public_html/static/music_worklet_inprogress/decoder/src/mhfs_cl_track.h view on Meta::CPAN
}
MHFSCLTR_PRINT("%s: %zu failed to find\n", __func__, sz);
return NULL;
}
static inline void mhfs_cl_track_allocs_backup_or_restore(mhfs_cl_track *pTrack, const bool backup)
{
// copy ma_decoder and blockvf fileoffset
if(backup)
{
pTrack->backupDecoder = pTrack->decoder;
pTrack->backupFileOffset = pTrack->vf.fileoffset;
}
else
{
pTrack->decoder = pTrack->backupDecoder;
pTrack->vf.fileoffset = pTrack->backupFileOffset;
}
// copy the allocations
mhfs_cl_track_allocs *pAllocs = &pTrack->allocs;
for(unsigned i = 0; i < MHFS_CL_TRACK_MAX_ALLOCS; i++)
share/public_html/static/music_worklet_inprogress/decoder/src/mhfs_cl_track.h view on Meta::CPAN
{
const size_t offset = ceil8(pAllocs->allocsizes[i]);
uint8_t *allocBuf = pAllocs->allocptrs[i];
const uint8_t *srcBuf;
uint8_t *destBuf;
if(backup)
{
srcBuf = allocBuf;
destBuf = allocBuf + offset;
}
else
share/public_html/static/music_worklet_inprogress/decoder/src/mhfs_cl_track.h view on Meta::CPAN
memcpy(destBuf, srcBuf, pAllocs->allocsizes[i]);
}
}
}
static inline void mhfs_cl_track_allocs_backup(mhfs_cl_track *pTrack)
{
return mhfs_cl_track_allocs_backup_or_restore(pTrack, true);
}
static inline void mhfs_cl_track_allocs_restore(mhfs_cl_track *pTrack)
{
return mhfs_cl_track_allocs_backup_or_restore(pTrack, false);
}
void mhfs_cl_track_init(mhfs_cl_track *pTrack, const unsigned blocksize)
{
for(unsigned i = 0; i < MHFS_CL_TRACK_MAX_ALLOCS; i++)
share/public_html/static/music_worklet_inprogress/decoder/src/mhfs_cl_track.h view on Meta::CPAN
static inline void mhfs_cl_track_blockvf_ma_decoder_call_before(mhfs_cl_track *pTrack, const bool bSaveDecoder)
{
pTrack->vfData.code = BLOCKVF_SUCCESS;
if(bSaveDecoder)
{
mhfs_cl_track_allocs_backup(pTrack);
}
}
static inline mhfs_cl_error mhfs_cl_track_blockvf_ma_decoder_call_after(mhfs_cl_track *pTrack, const bool bRestoreDecoder, uint32_t *pNeededOffset)
{
view all matches for this distribution
view release on metacpan or search on metacpan
lib/App/MatrixTool/Command/resolve.pm view on Meta::CPAN
my $self = shift;
my ( $opts, $server_name ) = @_;
$self->http_client->resolve_matrix( $server_name )->then( sub {
my @res = @_;
# SRV records yield a 'weight' field, A/AAAA-based backup does not
defined $res[0]->{weight}
? $self->output_info( "Resolved $server_name by SRV" )
: $self->output_info( "Using legacy IP address fallback" );
try_repeat {
view all matches for this distribution
view release on metacpan or search on metacpan
repository.type = git
[GatherDir]
exclude_match = ^\.git(ignore|/.*)$
exclude_match = (^|/)#[^/]+#$ ; emacs autosave
exclude_match = ~$ ; emacs backup
[ExecDir]
[PruneCruft]
[MinimumPerl] ; with Perl::MinimumVersion
view all matches for this distribution
view release on metacpan or search on metacpan
MANIFEST.SKIP view on Meta::CPAN
\bbuild.com$
# and Module::Build::Tiny generated files
\b_build_params$
# Avoid temp and backup files.
~$
\.old$
\#$
\b\.#
\.bak$
view all matches for this distribution
view release on metacpan or search on metacpan
MANIFEST.SKIP view on Meta::CPAN
\bbuild.com$
# and Module::Build::Tiny generated files
\b_build_params$
# Avoid temp and backup files.
~$
\.old$
\#$
\b\.#
\.bak$
view all matches for this distribution
view release on metacpan or search on metacpan
==============
Perl Multithreaded multipart sync to Amazon Glacier service.
## Intro
Amazon Glacier is an archive/backup service with very low storage price. However with some caveats in usage and archive retrieval prices.
[Read more about Amazon Glacier][amazon glacier]
*mt-aws-glacier* is a client application for Amazon Glacier, written in Perl programming language, for *nix.
[amazon glacier]:http://aws.amazon.com/glacier/
* Read Amazon Glacier pricing [FAQ][Amazon Glacier faq] again, really. Beware of retrieval fee.
* Before using this program, you should read Amazon Glacier documentation and understand, in general, Amazon Glacier workflows and entities. This documentation
does not define any new layer of abstraction over Amazon Glacier entities.
* In general, all Amazon Glacier clients store metadata (filenames, file metadata) in own formats, incompatible with each other. To restore backup made with `mt-aws-glacier` you'll
need `mt-aws-glacier`, other software most likely will restore your data but loose filenames.
* With low "partsize" option you pay a bit more (Amazon charges for each upload request)
* For backup created with older versions (0.7x) of mt-aws-glacier, Journal file **required to restore backup**.
* Use a **Journal file** only with **same vault** ( more info [here](#what-is-journal) and [here](#how-to-maintain-a-relation-between-my-journal-files-and-my-vaults) and [here](https://github.com/vsespb/mt-aws-glacier/issues/50))
* When work with CD-ROM/CIFS/other non-Unix/non-POSIX filesystems, you might need set `leaf-optimization` to `0`
* Please report any bugs or issues (using GitHub issues). Well, any feedback is welcomed.
* If you want to contribute to the source code, please contact me first and describe what you want to do
## Usage
1. Create a directory containing files to backup. Example `/data/backup`
2. Create config file, say, glacier.cfg
key=YOURKEY
secret=YOURSECRET
# region: eu-west-1, us-east-1 etc
(note that Amazon Glacier does not return error if vault already exists etc)
4. Choose a filename for the Journal, for example, `journal.log`
5. Sync your files
./mtglacier sync --config glacier.cfg --dir /data/backup --vault myvault --journal journal.log --concurrency 3
6. Add more files and sync again
7. Check that your local files not modified since last sync
./mtglacier check-local-hash --config glacier.cfg --dir /data/backup --journal journal.log
8. Delete some files from your backup location
9. Initiate archive restore job on Amazon side
./mtglacier restore --config glacier.cfg --dir /data/backup --vault myvault --journal journal.log --max-number-of-files 10
10. Wait 4+ hours for Amazon Glacier to complete archive retrieval
11. Download restored files back to backup location
./mtglacier restore-completed --config glacier.cfg --dir /data/backup --vault myvault --journal journal.log
12. Delete all your files from vault
./mtglacier purge-vault --config glacier.cfg --vault myvault --journal journal.log
* Each text line in a file represent one record
* It's an append-only file. File opened in append-only mode, and new records only added to the end. This guarantees that
you can recover Journal file to previous state in case of bug in program/crash/some power/filesystem issues. You can even use `chattr +a` to set append-only protection to the Journal.
* As Journal file is append-only, it's easy to perform incremental backups of it
#### Why Journal is a file in local filesystem file, but not in online Cloud storage (like Amazon S3 or Amazon DynamoDB)?
Journal is needed to restore backup, and we can expect that if you need to restore a backup, that means that you lost your filesystem, together with Journal.
However Journal also needed to perform *new backups* (`sync` command), to determine which files are already in Glacier and which are not. And also to checking local file integrity (`check-local-hash` command).
Actually, usually you perform new backups every day. And you restore backups (and loose your filesystem) very rare.
So fast (local) journal is essential to perform new backups fast and cheap (important for users who backups thousands or millions of files).
And if you lost your journal, you can restore it from Amazon Glacier (see `retrieve-inventory` command). Also it's recommended to backup your journal
to another backup system (Amazon S3 ? Dropbox ?) with another tool, because retrieving inventory from Amazon Glacier is pretty slow.
Also some users might want to backup *same* files from *multiple* different locations. They will need *synchronization* solution for journal files.
Anyway I think problem of putting Journals into cloud can be automated and solved with 3 lines bash script..
#### How to maintain a relation between my journal files and my vaults?
9. It's better to keep relation between *vault* and transfer root (`--dir` option) in one place, such as config file.
#### Why Journal (and metadata stored in Amazon Glacier) does not contain file's metadata (like permissions)?
If you want to store permissions, put your files to archives before backup to Amazon Glacier. There are lot's of different possible things to store as file metadata information,
most of them are not portable. Take a look on archives file formats - different formats allows to store different metadata.
It's possible that in the future `mtglacier` will support some other metadata things.
## Specification for some commands
_Uploads what_: a file, pointed by `filename`.
_Filename in Journal and Amazon Glacier metadata_: A relative path from `dir` to `filename`
./mtglacier upload-file --config glacier.cfg --vault myvault --journal journal.log --dir /data/backup --filename /data/backup/dir1/myfile
(this will upload content of `/data/backup/dir1/myfile` to Amazon Glacier and use `dir1/myfile` as filename for Journal )
./mtglacier upload-file --config glacier.cfg --vault myvault --journal journal.log --dir data/backup --filename data/backup/dir1/myfile
(Let's assume current directory is `/home`. Then this will upload content of `/home/data/backup/dir1/myfile` to Amazon Glacier and use `dir1/myfile` as filename for Journal)
NOTE: file `filename` should be inside directory `dir`
NOTE: both `-filename` and `--dir` resolved to full paths, before determining relative path from `--dir` to `--filename`. Thus yo'll get an error
if parent directories are unreadable. Also if you have `/dir/ds` symlink to `/dir/d3` directory, then `--dir /dir` `--filename /dir/ds/file` will result in relative
view all matches for this distribution