Result:
found more than 886 distributions - search limited to the first 2001 files matching your query ( run in 0.651 )


BackPAN-Index

 view release on metacpan or  search on metacpan

MANIFEST.SKIP  view on Meta::CPAN

\bBuild.bat$
\bBuild.COM$
\bBUILD.COM$
\bbuild.com$

# Avoid temp and backup files.
~$
\.old$
\#$
\b\.#
\.bak$

 view all matches for this distribution


BackPAN-Version-Discover

 view release on metacpan or  search on metacpan

MANIFEST.SKIP  view on Meta::CPAN

\bMakeMaker-\d
\bpm_to_blib\.ts$
\bpm_to_blib$
\bblibdirs\.ts$         # 6.18 through 6.25 generated this

# Avoid temp and backup files.
~$
\.old$
\#$
\b\.#
\.bak$

 view all matches for this distribution


Backup-Duplicity-YADW

 view release on metacpan or  search on metacpan

lib/Backup/Duplicity/YADW.pm  view on Meta::CPAN

	$self->_conf($conf);
	$self->_init_logs;
	$self->_write_pidfile;
}

sub backup {

	args_pos
		my $self,
		my $type => 'Str';

lib/Backup/Duplicity/YADW.pm  view on Meta::CPAN


	args_pos
		my $self,
		my $cmds;

	my $days = $self->_conf->get('days2keepbackups');

	if ( !defined $days ) {
		confess "missing configuration days2keepbackups";
	}
	elsif ( !$days ) {

		#		confess "days2keepbackups must be greater than 0";
	}

	push @$cmds, $days . 'D';
}


sub expire {

	args_pos my $self;

	$self->_log( 'info', "removing old backups" );

	my @cmd = ( 'duplicity', 'remove-older-than' );

	$self->_get_expire_days( \@cmd );
	push @cmd, '--force';

lib/Backup/Duplicity/YADW.pm  view on Meta::CPAN


sub verify {

	args_pos my $self;

	$self->_log( 'info', "verifying backups" );

	my @cmd = ( 'duplicity', 'verify' );

	$self->_get_verbosity( \@cmd );
	$self->_get_exclude_device_files( \@cmd );

lib/Backup/Duplicity/YADW.pm  view on Meta::CPAN

	args_pos my $self;

	$self->_get_syslog;

	if ( $self->use_syslog ) {
		openlog( 'backups', $$, 'user' );
	}

	$self->_log( 'info', "$0 @ARGV" );
}

lib/Backup/Duplicity/YADW.pm  view on Meta::CPAN

               dry_run    => 0,
               use_syslog => 1,
               verbose    => 0
               );
              
  $yadw->backup();
  $yadw->verify();
  $yadw->expire();

  $yadw->restore("/my/file/location");

lib/Backup/Duplicity/YADW.pm  view on Meta::CPAN


=head2 new( [ %attributes ] )

Constructor - 'nuff said

=head2 backup( $type )

Tell duplicity to do a backup.  Requires either 'full' or 'inc' for a type.
Returns true on success.

=head2 expire( )

Tell duplicity to "remove-older-than <days in conf file>".

lib/Backup/Duplicity/YADW.pm  view on Meta::CPAN


Equivalent to "collection-status" in duplicity.  Returns true on success.

=head2 verify( )

Tell duplicity to verify backups.  Returns true on success.

=head2 restore( %args )

Tell duplicity to do a restore.

lib/Backup/Duplicity/YADW.pm  view on Meta::CPAN


Returns true on success.

=head1 SEE ALSO

yadw (ready to use backup script)

=head1 AUTHOR

John Gravatt <john@gravatt.org>

 view all matches for this distribution


Backup-EZ

 view release on metacpan or  search on metacpan

lib/Backup/EZ.pm  view on Meta::CPAN

use Data::Dumper;

#
# CONSTANTS
#
use constant EXCLUDE_FILE            => '/etc/ezbackup/ezbackup_exclude.rsync';
use constant CONF                    => '/etc/ezbackup/ezbackup.conf';
use constant COPIES                  => 30;
use constant DEST_HOSTNAME           => 'localhost';
use constant DEST_APPEND_MACH_ID     => 0;
use constant USE_SUDO                => 0;
use constant IGNORE_VANISHED         => 0;
use constant DEFAULT_ARCHIVE_OPTS    => '-az';
use constant ARCHIVE_NO_RECURSE_OPTS => '-dlptgoDz';

=head1 NAME

Backup::EZ - Simple backups based on rsync

=head1 VERSION

version 0.46

lib/Backup/EZ.pm  view on Meta::CPAN

=head1 SYNOPSIS

  use Backup::EZ;

  my $ez = Backup::EZ->new;
  $ez->backup;

=head1 DESCRIPTION

Backup::EZ is backup software that is designed to be as easy to use
as possible, yet provide a robust solution

If you only want to run backups, see the included command line utility
"ezbackup".  See the README for configuration instructions.

=head1 SUBROUTINES/METHODS

=head2 new

lib/Backup/EZ.pm  view on Meta::CPAN

    my $self = shift;
    my $msg  = shift;

    my $line = (caller)[2];

    openlog "ezbackup", $self->{syslog_option}, LOG_SYSLOG;
    syslog LOG_DEBUG, "($line) $msg";
    closelog;
}

#sub _error {
#	my $self = shift;
#	my $msg  = shift;
#
#	openlog "ezbackup", $self->{syslog_option}, LOG_LOCAL7;
#	syslog LOG_ERR, $msg;
#	closelog;
#}

sub _info {
    my $self = shift;
    my $msg  = shift;

    openlog "ezbackup", $self->{syslog_option}, LOG_SYSLOG;
    syslog LOG_INFO, $msg;
    closelog;
}

sub _read_conf {

lib/Backup/EZ.pm  view on Meta::CPAN

    my %conf = $config->getall;
    _debug( $self, Dumper \%conf );

    foreach my $key ( keys %conf ) {

        if ( !defined $conf{backup_host} ) {
            $conf{backup_host} = DEST_HOSTNAME;
        }

        if ( !defined $conf{copies} ) {
            $conf{copies} = COPIES;
        }

lib/Backup/EZ.pm  view on Meta::CPAN

}

sub _get_dest_username {
    my $self = shift;

    if ( $self->{conf}->{backup_user} ) {
        return $self->{conf}->{backup_user};
    }

    if ( $ENV{USER} ) {
        return $ENV{USER};
    }

lib/Backup/EZ.pm  view on Meta::CPAN

}

sub _get_dest_hostname {
    my $self = shift;

    return $self->{conf}->{backup_host};
}

sub _get_dest_tmp_dir {
    my $self = shift;

    return sprintf( "%s/%s", $self->get_dest_dir, ".tmp" );
}

sub _get_dest_backup_dir {
    my $self = shift;

    return sprintf( "%s/%s", $self->get_dest_dir, $self->{datestamp} );
}

lib/Backup/EZ.pm  view on Meta::CPAN

#
#    # uncoverable branch true
#    confess if $?;
#}

sub _full_backup_chunked {
    my $self = shift;
    my $dir  = shift;

    $self->_rsync2(
        dir          => $dir->dirname,

lib/Backup/EZ.pm  view on Meta::CPAN

            );
        }
    }
}

sub _full_backup {
    my $self = shift;
    my $dir  = shift;

    if ( $dir->chunked ) {
        $self->_full_backup_chunked($dir);
    }
    else {
        $self->_rsync2(
            dir          => $dir->dirname,
            archive_opts => DEFAULT_ARCHIVE_OPTS,
            extra_opts   => $dir->excludes(),
        );
    }
}

sub _inc_backup_chunked {
    my $self            = shift;
    my $dir             = shift;
    my $last_backup_dir = shift;
    my $link_dest       = shift;

    $self->_rsync2(
        dir          => $dir->dirname,
        archive_opts => ARCHIVE_NO_RECURSE_OPTS,

lib/Backup/EZ.pm  view on Meta::CPAN

            );
        }
    }
}

sub _inc_backup {
    my $self            = shift;
    my $dir             = shift;
    my $last_backup_dir = shift;

    my $link_dest = sprintf(
        "%s/%s/%s",
        $self->get_dest_dir,    #
        $last_backup_dir,       #
        $dir->dirname,          #
    );

    if ( $dir->chunked ) {
        $self->_inc_backup_chunked( $dir, $last_backup_dir, $link_dest );
    }
    else {
        $self->_rsync2(
            dir          => $dir->dirname,
            archive_opts => DEFAULT_ARCHIVE_OPTS,

lib/Backup/EZ.pm  view on Meta::CPAN

    my $self = shift;

    pdump $self->{conf};
}

=head2 backup

Invokes the backup process.  Takes no args.

=cut

sub backup {
    my $self = shift;

    $self->_mk_dest_dir( $self->get_dest_dir );
    my @backups = $self->get_list_of_backups;
    $self->_set_datestamp;
    $self->_mk_dest_dir( $self->_get_dest_tmp_dir, $self->{dryrun} );

    foreach my $dir ( $self->_get_dirs ) {

        my $dirname = $dir->dirname();
        if ( -d $dirname ) {

            $self->_info("backing up $dirname");

            if ( !@backups ) {

                # full
                $self->_full_backup($dir);
            }
            else {
                # incremental
                $self->_inc_backup( $dir, $backups[$#backups] );
            }
        }
        else {
            $self->_info("skipping $dirname because it does not exist");
        }

    }

    $self->_ssh(
        sprintf( "mv %s %s",
            $self->_get_dest_tmp_dir, $self->_get_dest_backup_dir ),
        $self->{dryrun}
    );

    $self->expire();

    return 1;
}

=head2 expire

Expire backups.  Gets a list of current backups and removes old ones that are
beyond the cutoff (see "copies" in the conf file).

=cut

sub expire {
    my $self = shift;

    my @list = $self->get_list_of_backups;

    while ( scalar(@list) > $self->{conf}->{copies} ) {

        my $subdir = shift @list;
        my $del_dir = sprintf( "%s/%s", $self->get_dest_dir, $subdir );

lib/Backup/EZ.pm  view on Meta::CPAN


        $self->_ssh($cmd);
    }
}

=head2 get_backup_host

Returns the backup_host name.

=cut

sub get_backup_host {
    my $self = shift;
    return $self->{conf}->{backup_host};
}

=head2 get_dest_dir

Returns the dest_dir.

lib/Backup/EZ.pm  view on Meta::CPAN

    }

    return sprintf( "%s/%s", $self->{conf}->{dest_dir}, $hostname );
}

=head2 get_list_of_backups

Returns an array of backups.  They are in the format of "YYYY-MM-DD_HH:MM:SS".
=cut

sub get_list_of_backups {
    my $self = shift;

    my @backups;

    my @list = $self->_ssh( sprintf( "ls %s", $self->get_dest_dir ) );

    foreach my $e (@list) {
        chomp $e;

        if ( $e =~ /^\d\d\d\d-\d\d-\d\d_\d\d:\d\d:\d\d$/ ) {
            push( @backups, $e );
        }
    }

    return @backups;
}

1;

 view all matches for this distribution


Backup-Hanoi

 view release on metacpan or  search on metacpan

lib/Backup/Hanoi.pm  view on Meta::CPAN

package Backup::Hanoi;
# ABSTRACT: select backup according to algo
$Backup::Hanoi::VERSION = '0.005';
use strict;
use warnings;

use v5.6.0;

lib/Backup/Hanoi.pm  view on Meta::CPAN

    # the number of devices predicts the size of the cycles
    my $device_count = scalar @{$devices};

    die "three devices needed.\n" if ($device_count < 3);

    # half a hanoi cycle is just what we need for backup
    my $hanoi_cycles_half = (2**$device_count) / 2;

    my $self = {    devices           => $devices,
                    hanoi_cycles_half => $hanoi_cycles_half,
               };

lib/Backup/Hanoi.pm  view on Meta::CPAN


=encoding UTF-8

=head1 NAME

Backup::Hanoi - select backup according to algo

=head1 VERSION

version 0.005

lib/Backup/Hanoi.pm  view on Meta::CPAN

This is an early release.
This code is currently not used in production by the author.
Use it with care!

 my @devices = ('A', 'B', 'C', 'D');
 my $backup  = Backup::Hanoi->new(\@devices);

 # calculate the next 100 backup cycles
 for (0 .. 99) {
     print $backup->get_device_for_cycle($_);
     print "\n";
 }
 
 # enhanced compination of FIFO for initialisation
 # and Hanoi algorithm for overwriting
 for (-3 .. 99) {
     print $backup->get_device_for_cycle($_);
     print "\n";
 }

See also the script L<backup-hanoi>.

=head1 FUNCTIONS

=head2 new

 view all matches for this distribution


Backup-Omni

 view release on metacpan or  search on metacpan

lib/Backup/Omni/Restore/Filesystem/Single.pm  view on Meta::CPAN

    my $rc = $?;

    unless (grep(/Restore successfully/, @results)) {

        $self->throw_msg(
            'backup.omni.restore.filesystem.single.submit',
            'nosubmit',
            $self->session
        );

    }

lib/Backup/Omni/Restore/Filesystem/Single.pm  view on Meta::CPAN

  
=over 4

=item B<-host>

The name of the host that the backup was performed on.

=item B<-session>

The session id of the backup.

=item B<-target>

The target system to restore the file too.

lib/Backup/Omni/Restore/Filesystem/Single.pm  view on Meta::CPAN


The path on the host that was backed up. Defaults to "/archive".

=item B<-label>

The label that was defined for this backup. Defaults to "/archive".

=back

=head2 submit

 view all matches for this distribution


BackupPC-Backups-Info

 view release on metacpan or  search on metacpan

lib/BackupPC/Backups/Info.pm  view on Meta::CPAN

use warnings;
use base 'Error::Helper';

=head1 NAME

BackupPC::Backups::Info - Restrieves info on BackupPC backups.

=head1 VERSION

Version 0.1.1

lib/BackupPC/Backups/Info.pm  view on Meta::CPAN

	return $self->{pcdir};
}

=head2 get_parsed

This parses the raw backups file and then returns a array of hashes.
For a explanation of the hashes, please see BACKUP HASH.

One archment is taken and that is the machine name.

   my @parsed=$bpcinfo->get_parsed($machine);

lib/BackupPC/Backups/Info.pm  view on Meta::CPAN

	#will store what we return
	my @parsed;

	my $int=0;
	while( defined( $lines[$int] ) ){
		my %backup;
		( $backup{num}, $backup{type}, $backup{startTime}, $backup{endTime},
		  $backup{nFiles}, $backup{size}, $backup{nFilesExist}, $backup{sizeExist},
		  $backup{nFilesNew}, $backup{sizeNew}, $backup{xferErrs}, $backup{xferBadFile},
		  $backup{xferBadShare}, $backup{tarErrs}, $backup{compress},
		  $backup{sizeExistComp}, $backup{sizeNewComp}, $backup{noFill},
		  $backup{fillFromNum}, $backup{mangle}, $backup{xferMethod}, $backup{level} )=split(/\t/, $lines[$int]);

		if ( $backup{compress} eq ''){
			$backup{compress}=0;
		}
		
		push( @parsed, \%backup );
		
		$int++;
	}

	#save info on the last

lib/BackupPC/Backups/Info.pm  view on Meta::CPAN

	return @parsed;
}

=head2 get_raw

This retrieves the law data from a backups file for a machine.

The section on backups file in
L<https://backuppc.github.io/backuppc/BackupPC.html#Storage-layout>
is suggested reading if you plan on actually using this.

    my $raw=$bpcinfo->get_raw('foo');
    if ($bpcinfo->error){
        warn('something errored');

lib/BackupPC/Backups/Info.pm  view on Meta::CPAN

		$self->{eerorString}='"'.$machineDir.'" does not eixst';
		$self->warn;
		return undef;
	}

	my $backupsFile=$machineDir.'/backups';

	my $fh;
	if (! open( $fh, '<', $backupsFile ) ){
		$self->{error}=7;
		$self->{errorString}='failed to open "'.$backupsFile.'"';
		$self->warn;
	};

	my $data='';
	while ( my $line=$fh->getline ){

lib/BackupPC/Backups/Info.pm  view on Meta::CPAN

	return keys(%{$self->{parsed}});
}

=head2 read_in_all

This reads in the backups files for each machine.

Currently this just attempts to read in all via get_parsed
and ignores any errors, just proceeding to the next one.

As long as list_machines does not error, this will not error.

lib/BackupPC/Backups/Info.pm  view on Meta::CPAN


	my $pcdir=$self->get_pc_dir;
	
	my $int=0;
	while( defined( $machines[$int] ) ){
		if ( -f $pcdir.'/'.$machines[$int].'/backups' ){
			$self->get_parsed( $machines[$int] );
		}
				
		$int++;
	}

lib/BackupPC/Backups/Info.pm  view on Meta::CPAN

	return 1;
}

=head1 BACKUP HASH

Based on __TOPDIR__/pc/$host/backup from
L<https://backuppc.github.io/backuppc/BackupPC.html#Storage-layout>.

=head2 num

The backup number for the current hash.

=head2 type

Either 'incr' or 'full'.

=head2 startTime

The unix start time of the backup.

=head2 endTime

The unix end time of the backup.

=head2 nFiles

Number of files backed up.

lib/BackupPC/Backups/Info.pm  view on Meta::CPAN


Total size of files not in the pool.

=head2 xferErrs

Number of warnings/errors from the backup method.

=head2 xferBadFile

Number of errors from the backup method in regards to bad files.

=head2 xferBadShare

Number of errors from smbclient that were bad share errors.

lib/BackupPC/Backups/Info.pm  view on Meta::CPAN


Number of errors from BackupPC_tarExtract.

=head2 compress

The compression level used on this backup. Zero means no compression.

Please note that while BackupPC may leave this field blank if none is used, this module
will check for a blank value and set it to zero.

=head2 sizeExistComp

lib/BackupPC/Backups/Info.pm  view on Meta::CPAN


Total compressed size of new files in the pool.

=head2 noFill

et if this backup has not been filled in with the most recent previous filled or full backup.
See $Conf{IncrFill} in the BackupPC docs.

=head2 fillFromNum

If filled, this is the backup it was filled from.

=head2 mangle

Set if this backup has mangled file names and attributes. Always true for backups in v1.4.0
and above. False for all backups prior to v1.4.0.

=head2 xferMethod

The value of $Conf{XferMethod} when this dump was done.

lib/BackupPC/Backups/Info.pm  view on Meta::CPAN


Zane C. Bowers-Hadley, C<< <vvelox at vvelox.net> >>

=head1 BUGS

Please report any bugs or feature requests to C<bug-backuppc-backups-info at rt.cpan.org>, or through
the web interface at L<http://rt.cpan.org/NoAuth/ReportBug.html?Queue=BackupPC-Backups-Info>.  I will be notified, and then you'll
automatically be notified of progress on your bug as I make changes.



 view all matches for this distribution


BackupPC-XS

 view release on metacpan or  search on metacpan

lib/BackupPC/XS.pm  view on Meta::CPAN


=head2 BackupPC::XS::AttribCache

Maintain a cache of directories, with full share/path semantics.

    $ac = BackupPC::XS::AttribCache::new($host, $backupNum, $shareNameUM, $compress);

    $attrHash = $ac->get($fileName, $allocateIfMissing, $dontReadInode);
    $ac->set($fileName, $attrHash, $dontOverwriteInode);
    $ac->delete($fileName);

lib/BackupPC/XS.pm  view on Meta::CPAN


If you specify :all (see SYNOPSIS), then the BPC_FTYPE_ values are exported.

=head1 SEE ALSO

BackupPC, backuppc.sourceforge.net.

rsync-bpc.

=head1 AUTHOR

 view all matches for this distribution


Badger

 view release on metacpan or  search on metacpan

lib/Badger/Filesystem/Visitor.pm  view on Meta::CPAN

inclusive options (0/1 flags, names, regexen, subroutines, or list refs
containing any of the above).

    $dir->visit( 
        no_files    => '*.bak',     
        no_dirs     => ['tmp', qr/backup/i],
        not_in_dirs => ['.svn', '.DS_Store'],
    );

When the visit is done, the L<collect()> method can be called to return
a list (in list context) or reference to a list (in scalar context) of the 

 view all matches for this distribution


Basset

 view release on metacpan or  search on metacpan

lib/Basset/Object.pm  view on Meta::CPAN

Copies the object. B<Be warned>! Copy does a B<deep> copy of the object. So any objects/references/etc
pointed to by the original object will also be copied.

You may optionally pass in a different object/structure and copy that instead.

 my $backupBoard = $game->copy($game->board);

=cut

=pod

 view all matches for this distribution


Batch-Batchrun

 view release on metacpan or  search on metacpan

lib/Batch/Batchrun/Retain.pm  view on Meta::CPAN

#=========================================================================
#  File: Retain.pm
#
#  Usage: Subroutine
#
#  Purpose: Copy and Compress files saving to backup directories
#
#   EXAMPLE - 
#  use Batch::Batchrun::Retain;
#
#  retain(FILE=>test, LIMIT=>5, DIR=>/apps/irmprod/archive,COMPRESS=>yes, 

lib/Batch/Batchrun/Retain.pm  view on Meta::CPAN

__END__

=head1 NAME
 
 
Retain - keep backup copies of a file


=head1 SYNOPSIS


lib/Batch/Batchrun/Retain.pm  view on Meta::CPAN


retain(FILE=>test,LIMIT=>5,DIR=>/apps/irmprod/archive,COMPRESS=>yes,DELETE=>NO);

=head1 DESCRIPTION

The C<retain> function provides a convenient way to keep backups of files. It keeps
a determined number of files in numbered directories. Arguments are passed using named
parameters.  Each name is case insensitive.  Of the several parameters only FILE and DIR 
are required. 

=head2 REQUIRED PARAMETERS

lib/Batch/Batchrun/Retain.pm  view on Meta::CPAN


=over 4

=item B<COMPRESS>

compress the backup copies of the file. True values are indicated by 
passing 1 or yes. (unix only - defaults to no)

=item B<CHMOD>

the numeric mode to use when creating the backup file
(defaults to 0775)

=item B<DELETE>

deletes the original file if specified. True values are indicated by 
passing 1 or yes. (defaults to no)


=item B<LIMIT>

number of backup copies to keep.

=item B<PREFIX>

the prefix to use for each numbered directory.  The numbered directory will 
automatically be created if it does not exist. (defaults to bk)

 view all matches for this distribution


Batch-Interpreter

 view release on metacpan or  search on metacpan

MANIFEST.SKIP  view on Meta::CPAN

\bBuild.bat$
\bBuild.COM$
\bBUILD.COM$
\bbuild.com$

# Avoid temp and backup files.
~$
\.old$
\#$
\b\.#
\.bak$

 view all matches for this distribution


Beagle

 view release on metacpan or  search on metacpan

lib/Beagle/Cmd/Command/shell.pm  view on Meta::CPAN

                show_time($start) if enabled_devel;

            }
            else {

                # backup settings
                my ( $devel, $cache, $root ) =
                  ( enabled_devel(), enabled_cache(), current_root('not die') );

                my $start = Time::HiRes::time();
                eval { Beagle::Cmd->run };

 view all matches for this distribution


Benchmark-Featureset-SetOps

 view release on metacpan or  search on metacpan

MANIFEST.SKIP  view on Meta::CPAN

\bBuild.bat$

# Avoid Devel::Cover generated files
\bcover_db

# Avoid temp and backup files.
~$
\#$
\.#
\.bak$
\.old$

 view all matches for this distribution


Benchmark-Perl-Formance-Cargo

 view release on metacpan or  search on metacpan

share/PerlCritic/Critic/Utils.pm  view on Meta::CPAN

            @newfiles = File::Spec->no_upwards(@newfiles);
            @newfiles = grep { not $SKIP_DIR{$_} } @newfiles;
            push @queue, map { File::Spec->catfile($file, $_) } @newfiles;
        }

        if ( (-f $file) && ! _is_backup($file) && _is_perl($file) ) {
            push @code_files, $file;
        }
    }
    return @code_files;
}


#-----------------------------------------------------------------------------
# Decide if it's some sort of backup file

sub _is_backup {
    my ($file) = @_;
    return 1 if $file =~ m{ [.] swp \z}xms;
    return 1 if $file =~ m{ [.] bak \z}xms;
    return 1 if $file =~ m{  ~ \z}xms;
    return 1 if $file =~ m{ \A [#] .+ [#] \z}xms;

share/PerlCritic/Critic/Utils.pm  view on Meta::CPAN


Given a list of directories, recursively searches through all the
directories (depth first) and returns a list of paths for all the
files that are Perl code files.  Any administrative files for CVS or
Subversion are skipped, as are things that look like temporary or
backup files.

A Perl code file is:

=over

 view all matches for this distribution


BenchmarkAnything-Storage-Backend-SQL

 view release on metacpan or  search on metacpan

lib/BenchmarkAnything/Storage/Backend/SQL.pm  view on Meta::CPAN

    tables              => {
        unit_table                       => 'bench_units',
        benchmark_table                  => 'benchs',
        benchmark_value_table            => 'bench_values',
        subsume_type_table               => 'bench_subsume_types',
        benchmark_backup_value_table     => 'bench_backup_values',
        additional_type_table            => 'bench_additional_types',
        additional_value_table           => 'bench_additional_values',
        additional_relation_table        => 'bench_additional_relations',
        additional_type_relation_table   => 'bench_additional_type_relations',
        backup_additional_relation_table => 'bench_backup_additional_relations',
    },
};

my $hr_column_ba_mapping = {
    bench_value_id => 'VALUE_ID',

lib/BenchmarkAnything/Storage/Backend/SQL.pm  view on Meta::CPAN

        $or_self->{query}->copy_additional_values({
            new_bench_value_id => $i_bench_value_id,
            old_bench_value_id => $hr_atts->{rows}[0]{bench_value_id},
        });

        for my $hr_backup_row ( @{$hr_atts->{rows}} ) {

            if ( $hr_backup_row->{bench_subsume_type_rank} == 1 ) {
                if ( $hr_atts->{backup} ) {
                    # copy data rows to backup table
                    $or_self->{query}->copy_benchmark_backup_value({
                        new_bench_value_id => $i_bench_value_id,
                        old_bench_value_id => $hr_backup_row->{bench_value_id},
                    });
                    my $i_bench_backup_value_id = $or_self->{query}->last_insert_id(
                        $or_self->{config}{tables}{benchmark_backup_value_table},
                        'bench_backup_value_id',
                    );
                    $or_self->{query}->copy_benchmark_backup_additional_relations({
                        new_bench_value_id => $i_bench_backup_value_id,
                        old_bench_value_id => $hr_backup_row->{bench_value_id},
                    });
                }
            }
            else {
                # update bench_value_id in backup table
                $or_self->{query}->update_benchmark_backup_value({
                    new_bench_value_id => $i_bench_value_id,
                    old_bench_value_id => $hr_backup_row->{bench_value_id},
                });
            }

            # now lets remove the old rows
            $or_self->{query}->delete_benchmark_additional_relations(
                $hr_backup_row->{bench_value_id},
            );
            $or_self->{query}->delete_benchmark_value(
                $hr_backup_row->{bench_value_id},
            );

        }

    };

lib/BenchmarkAnything/Storage/Backend/SQL.pm  view on Meta::CPAN

    my $or_strp = DateTime::Format::Strptime->new( pattern => '%F %T', );

    my @a_rows;
    my $i_counter   = 0;
    my $i_sum_value = 0;
    my $b_backup    = ((not exists $hr_options->{backup}) || $hr_options->{backup}) ? 1 : 0;
    my $s_last_key  = q##;

    while ( my $hr_values = $or_data_values->fetchrow_hashref() ) {

        my $s_act_key = join '__',

lib/BenchmarkAnything/Storage/Backend/SQL.pm  view on Meta::CPAN


            if ( $i_counter ) {
                $or_self->$fn_add_subsumed_point({
                    rows    => \@a_rows,
                    VALUE   => $i_sum_value / $i_counter,
                    backup  => $b_backup,
                    type_id => $hr_subsume_type->{bench_subsume_type_id}
                });
            }

            @a_rows         = ();

lib/BenchmarkAnything/Storage/Backend/SQL.pm  view on Meta::CPAN


    if ( $i_counter ) {
        $or_self->$fn_add_subsumed_point({
            rows    => \@a_rows,
            VALUE   => $i_sum_value / $i_counter,
            backup  => $b_backup,
            type_id => $hr_subsume_type->{bench_subsume_type_id}
        });
    }

    return 1;

lib/BenchmarkAnything/Storage/Backend/SQL.pm  view on Meta::CPAN

=head3 subsume

This is a maintenance function for reducing the number of data points in the
database. Calling this function reduces the rows in the benchmark values table
by building an average value for all benchmark data points grouped by specfic
columns. By default all old grouped columns will be added to backup tables for
rebuilding the original state.
It is highly recommended to do this periodically for better search performance.

    my $b_success = $or_bench->subsume({
        subsume_type        => 'month',
        exclude_additionals => [qw/ benchmark_date /],
        date_from           => '2013-01-01 00:00:00',
        date_to             => '2014-01-01 00:00:00',
        backup              => 0,
    });

=over 4

=item subsume_type

lib/BenchmarkAnything/Storage/Backend/SQL.pm  view on Meta::CPAN


=item exclude_additionals

Array Reference of additional values that should be excluded from grouping.

=item backup

By default all subsumed rows will be inserted to backup tables. If this
isn't desired a false value must be passed.

=back

=head3 init_search_engine( $force )

lib/BenchmarkAnything/Storage/Backend/SQL.pm  view on Meta::CPAN

    tables => {
        unit_table                       => 'bench_units',
        benchmark_table                  => 'benchs',
        benchmark_value_table            => 'bench_values',
        subsume_type_table               => 'bench_subsume_types',
        benchmark_backup_value_table     => 'bench_backup_values',
        additional_type_table            => 'bench_additional_types',
        additional_value_table           => 'bench_additional_values',
        additional_relation_table        => 'bench_additional_relations',
        additional_type_relation_table   => 'bench_additional_type_relations',
        backup_additional_relation_table => 'bench_backup_additional_relations',
    }

=item select_cache [optional]

In case of a true value the module cache some select results

 view all matches for this distribution


BerkeleyDB-Manager

 view release on metacpan or  search on metacpan

lib/BerkeleyDB/Manager.pm  view on Meta::CPAN

=item log_auto_remove

Enables automatic removal of logs.

Normally logs should be removed after being backed up, but if you are not
interested in having full snapshot backups for catastrophic recovery scenarios,
you can enable this.

See L<http://www.oracle.com/technology/documentation/berkeley-db/db/ref/transapp/logfile.html>.

Defaults to false.

 view all matches for this distribution


BerkeleyDB

 view release on metacpan or  search on metacpan

ppport.h  view on Meta::CPAN

av_top_index|5.017009|5.003007|p
av_top_index_skip_len_mg|5.025010||Viu
av_undef|5.003007|5.003007|
av_unshift|5.003007|5.003007|
ax|5.003007|5.003007|
backup_one_GCB|5.025003||Viu
backup_one_LB|5.023007||Viu
backup_one_SB|5.021009||Viu
backup_one_WB|5.021009||Viu
bad_type_gv|5.019002||Viu
bad_type_pv|5.016000||Viu
BADVERSION|5.011004||Viu
BASEOP|5.003007||Viu
BhkDISABLE|5.013003||xV

 view all matches for this distribution


Biblio-ILL-ISO

 view release on metacpan or  search on metacpan

Makefile.PL  view on Meta::CPAN

use File::Find;

find( \&filecheck, "." );

sub filecheck {
    unlink if /~$/;  # Remove any emacs backup files
    die "Aborting: Swapfile $_ found" if /\.swp$/;
}

&WriteMakefile(
	'NAME'		=> 'Biblio::ILL::ISO',

 view all matches for this distribution


Biblio-Isis

 view release on metacpan or  search on metacpan

lib/Biblio/Isis/Manual.pod  view on Meta::CPAN


Sorted Link file (long terms)

=item C<xxxxxx.BKP>

Master file backup

=item C<xxxxxx.XHF>

Hit file index

lib/Biblio/Isis/Manual.pod  view on Meta::CPAN


always 0 for user data base file (1 for system message files)

=back

(the last four fields are used for statistics during backup/restore).

=head2 C. Master file block format

The Master file records are stored consecutively, one after the other,
each record occupying exactly C<MFRL> bytes. The file is stored as

lib/Biblio/Isis/Manual.pod  view on Meta::CPAN

As indicated above, as Master file records are updated the C<MST> file
grows in size and there will be lost space in the file which cannot be
used. The reorganization facilities allow this space to be reclaimed by
recompacting the file.

During the backup phase a Master file backup file is created (C<.BKP>).
The structure and format of this file is the same as the Master file
(C<.MST>), except that a Crossreference file is not required as all the
records are adjacent. Records marked for deletion are not backed up.
Because only the latest copy of each record is backed up, the system
does not allow you to perform a backup whenever an Inverted file update
is pending for one or more records.

During the restore phase the backup file is read sequentially and the
program recreates the C<MST> and C<XRF> file. At this point alt records which
were marked for logical deletion (before the backup) are now marked as
physically deleted (by setting C<XRFMFB = -1> and C<XRFMFP = 0>.
Deleted records are detected by checking holes in the C<MFN> numbering.

=head1 Inverted file structure and record formats

 view all matches for this distribution


BigIP-iControl

 view release on metacpan or  search on metacpan

lib/BigIP/iControl.pm  view on Meta::CPAN

	return %res
}

=head3 save_configuration ($filename)

	$ic->save_configuration('backup.ucs');

	# is equivalent to

	$ic->save_configuration('backup');
	
	# Not specifying a filename will use today's date in the
	# format YYYYMMDD as the filename.

	$ic->save_configuration();

lib/BigIP/iControl.pm  view on Meta::CPAN

	print $ic->download_file('/config/bigip.conf');

This method provides direct access to files on the target system. The method returns a scalar containing
the contents of the file.

This method may be useful for downloading configuration files for versioning or backups.

=cut

sub download_file {
	my ($self,$file_name)	= @_;

 view all matches for this distribution


Bigtop

 view release on metacpan or  search on metacpan

lib/Bigtop/Backend/Init/Std.pm  view on Meta::CPAN


# Avoid Module::Build generated and utility files.
\bBuild$
\b_build

# Avoid temp and backup files.
~$
\.tmp$
\.old$
\.bak$
\#$

 view all matches for this distribution


Bio-AGP-LowLevel

 view release on metacpan or  search on metacpan

MANIFEST.SKIP  view on Meta::CPAN


# Avoid Module::Build generated and utility files.
\bBuild$
\b_build/

# Avoid temp and backup files.
~$
\.old$
\#$
\b\.#
\.bak$

 view all matches for this distribution


Bio-GMOD

 view release on metacpan or  search on metacpan

TODO  view on Meta::CPAN



Write DTD for defaults; convert Adaptor.pm to parsing XML

Future items:
1. Option to backup software prior to install.

2 .starting and stopping of servers - is this working / robust
    during or after an install

3. server monitoring

 view all matches for this distribution


Bio-MUST-Core

 view release on metacpan or  search on metacpan

lib/Bio/MUST/Core/Tree.pm  view on Meta::CPAN

        $mode == 2 ? $tree->get_entities  :
        $mode == 1 ? $tree->get_internals :
                     $tree->get_terminals
    };

    # Note: old labels are backuped in specified attributes and vice-versa
    # TODO: allow appending acc for terminal nodes?
    for my $node (@nodes) {
        my $label     = $node->get_name;
        my $attribute = $node->get_generic($key);
        $node->set_generic($key => $label);

lib/Bio/MUST/Core/Tree.pm  view on Meta::CPAN


sub store_tpl {
    my $self    = shift;
    my $outfile = shift;

    # backup and discard branch lengths
    # Note: I have to do that since I cannot clone the tree (Bio::Phylo bug?)
    my @branch_lengths;
    for my $node ( @{ $self->tree->get_entities } ) {
        push @branch_lengths, $node->get_branch_length;
        $node->set_branch_length(undef);

 view all matches for this distribution


Bio-ToolBox

 view release on metacpan or  search on metacpan

scripts/manipulate_datasets.pl  view on Meta::CPAN

	}
	else {
		# need to sort by the mean of provided column indices
		# we will generate a temporary column of the mean
		# first need to set the target of mean which is needed by combine function
		my $original = $opt_target;    # keep a backup just in case
		$opt_target = 'mean';
		combine_function(@indices);
		my $i = $Data->last_column;
		$opt_target = $original;       # restore backup just in case
		$Data->sort_data( $i, $direction );
		$Data->delete_column($i);      # delete the temporary column
	}

	# remove any pre-existing sorted metadata since no longer valid

scripts/manipulate_datasets.pl  view on Meta::CPAN

performed as specified using command line options. As such, the program can
be called in shell scripts.

The program keeps track of the number of manipulations performed, and if 
any are performed, will write out to file the changed data. Unless an 
output file name is provided, it will overwrite the input file (NO backup is
made!).

=head1 FUNCTIONS

This is a list of the functions available for manipulating columns. These may 

 view all matches for this distribution


Bio-WGS2NCBI

 view release on metacpan or  search on metacpan

lib/Bio/WGS2NCBI.pm  view on Meta::CPAN

		
		# have a FASTA file
		if ( $file =~ /(.+)\.fsa$/ ) {
			my $stem = $1;
		
			# make backup of FASTA file
			rename "${INDIR}/${file}", "${INDIR}/${file}.bak";
			
			# read file, look op non-missing residue positions, write truncated
			open my $fh,  '<', "${INDIR}/${file}.bak" or die $!;
			open my $out, '>', "${INDIR}/${file}"     or die $!;

lib/Bio/WGS2NCBI.pm  view on Meta::CPAN

				INFO "$id\t$i1 .. $i2";
				$coord{$id} = [ $i1, $i2  ];
				$seq->trunc( $i1 + 1, $i2 + 1 )->write_fasta($out);	
			}
			
			# make backup of TBL file, open handle for writing		
			rename "${INDIR}/${stem}.tbl", "${INDIR}/${stem}.tbl.bak";
			open my $outtbl, '>', "${INDIR}/${stem}.tbl" or die $!;
			
			# initialize variables
			my $tr = Bio::WGS2NCBI::TableReader->new( 

 view all matches for this distribution


BioPerl-Run

 view release on metacpan or  search on metacpan

lib/Bio/DB/SoapEUtilities/FetchAdaptor/seq.pm  view on Meta::CPAN

		              \s*(.*?)
		              \s*(?: \( (.*?) \) )?\.?
		              $}xms ) { 
        ($organelle, $abbr_name, $common) = ($1, $2, $3); # optional
    } else {
        $abbr_name = $get->('source'); # nothing caught; this is a backup!
    }

    # Convert data in classification lines into classification array.
    my @class = split(/; /, $get->('taxonomy'));

 view all matches for this distribution


BioPerl

 view release on metacpan or  search on metacpan

Bio/SeqFeature/Tools/Unflattener.pm  view on Meta::CPAN

   # of genbank records
   #
   # if no resolver tag is specified, we revert to the normal
   # resolver_method
   if ($resolver_tag) {
       my $backup_resolver_method = $resolver_method;
       # closure: $resolver_tag is remembered by this sub
       my $sub = 
	 sub {
	     my ($self, $sf, @possible_container_sfs) = @_;
	     my @container_sfs = ();

Bio/SeqFeature/Tools/Unflattener.pm  view on Meta::CPAN

		       }
		       $match;
		   } @possible_container_sfs;
	     } 
	     else {
		 return $backup_resolver_method->($sf, @possible_container_sfs);
	     }
	     return map {$_=>0} @container_sfs;
	 };
       $resolver_method = $sub;
   }

 view all matches for this distribution


( run in 0.651 second using v1.01-cache-2.11-cpan-933e48f88fa )