Result:
found more than 886 distributions - search limited to the first 2001 files matching your query ( run in 0.908 )


App-Sandy

 view release on metacpan or  search on metacpan

ppport.h  view on Meta::CPAN

av_top_index|5.017009|5.003007|p
av_top_index_skip_len_mg|5.025010||Viu
av_undef|5.003007|5.003007|
av_unshift|5.003007|5.003007|
ax|5.003007|5.003007|
backup_one_GCB|5.025003||Viu
backup_one_LB|5.023007||Viu
backup_one_SB|5.021009||Viu
backup_one_WB|5.021009||Viu
bad_type_gv|5.019002||Viu
bad_type_pv|5.016000||Viu
BADVERSION|5.011004||Viu
BASEOP|5.003007||Viu
BhkDISABLE|5.013003||xV

 view all matches for this distribution


App-ScanPrereqs

 view release on metacpan or  search on metacpan

lib/App/ScanPrereqs.pm  view on Meta::CPAN

        sub {
            no warnings 'once';

            return unless -f;
            my $path = "$File::Find::dir/$_";
            if (Filename::Type::Backup::check_backup_filename(filename=>$_)) {
                log_debug("Skipping backup file %s ...", $path);
                return;
            }
            if (/\A(\.git)\z/) {
                log_debug("Skipping %s ...", $path);
                return;

 view all matches for this distribution


App-SeismicUnixGui

 view release on metacpan or  search on metacpan

lib/App/SeismicUnixGui/big_streams/BackupProject.pl  view on Meta::CPAN

my $tar_input      = $HOME . '/'. $project_directory;

=head2 Verify project is a true SUG project

collect project names
compare backup project against project names

=cut

my @PROJECT_HOME_aref = $L_SU_local_user_constants->get_PROJECT_HOMES_aref();
my @project_name_aref = $L_SU_local_user_constants->get_project_names();

lib/App/SeismicUnixGui/big_streams/BackupProject.pl  view on Meta::CPAN


=pod
 
 check to see that the project directory contains Project.config
 If Project.config exists then
 copy this file with the Project during the backup
 
=cut

if ( $project_exists ) {

 view all matches for this distribution


App-SimpleBackuper

 view release on metacpan or  search on metacpan

lib/App/SimpleBackuper/Backup.pm  view on Meta::CPAN

}

sub Backup {
	my($options, $state) = @_;
	
	my($backups, $files, $parts, $blocks) = @{ $state->{db} }{qw(backups files parts blocks)};
	
	die "Backup '$options->{\"backup-name\"}' already exists" if grep { $backups->unpack($_)->{name} eq $options->{'backup-name'} } @$backups;
	
	$state->{$_} = 0 foreach qw(last_backup_id last_file_id last_block_id bytes_processed bytes_in_last_backup total_weight);
	
	print "Preparing to backup: " if $options->{verbose};
	$state->{profile}->{init_ids} = - time();
	foreach (@$backups) {
		my $id = $backups->unpack($_)->{id};
		$state->{last_backup_id} = $id if ! $state->{last_backup_id} or $state->{last_backup_id} < $id;
	}
	#print "last backup id $state->{last_backup_id}, ";
	foreach (@$files) {
		my $file = $files->unpack($_);
		$state->{last_file_id} = $file->{id} if ! $state->{last_file_id} or $state->{last_file_id} < $file->{id};
		if($file->{versions} and @{ $file->{versions} } and $file->{versions}->[-1]->{backup_id_max} == $state->{last_backup_id}) {
			$state->{bytes_in_last_backup} += $file->{versions}->[-1]->{size};
		}
	}
	#print "last file id $state->{last_file_id}, ";
	foreach (@$blocks) {
		my $id = $blocks->unpack($_)->{id};

lib/App/SimpleBackuper/Backup.pm  view on Meta::CPAN

	for(my $q = 0; $q <= $#$parts; $q++) {
		$state->{total_weight} += $parts->unpack($parts->[ $q ])->{size};
	}
	print fmt_weight($state->{total_weight}).", " if $options->{verbose};
	
	my $cur_backup = {name => $options->{'backup-name'}, id => ++$state->{last_backup_id}, files_cnt => 0, max_files_cnt => 0};
	$backups->upsert({ id => $cur_backup->{id} }, $cur_backup);
	
	{
		$state->{blocks_info} = _BlocksInfo($options, $state);
		
		$state->{blocks2delete_prio2size2chunks} = {};

lib/App/SimpleBackuper/Backup.pm  view on Meta::CPAN

					$file //= {
						parent_id	=> $file_id,
						id			=> ++$state->{last_file_id},
						name		=> $path_node,
						versions	=> [ {
							backup_id_min	=> $state->{last_backup_id},
							backup_id_max	=> 0,
							uid				=> 0,
							gid				=> 0,
							size			=> 0,
							mode			=> 0,
							mtime			=> 0,

lib/App/SimpleBackuper/Backup.pm  view on Meta::CPAN

	
	while(my($full_path, $dir2upd) = each %dirs2upd) {
		print "Updating dir $full_path..." if $options->{verbose};
		my $file = $files->find_by_parent_id_name($dir2upd->{parent_id}, $dir2upd->{filename});
		my @stat = lstat($full_path);
		if(@stat and $file->{versions}->[-1]->{backup_id_max} != $state->{last_backup_id}) {
			my($uid, $gid) =_proc_uid_gid($stat[4], $stat[5], $state->{db}->{uids_gids});
			if($file->{versions}->[-1]->{backup_id_max} == $state->{last_backup_id} - 1) {
				$file->{versions}->[-1] = {
					%{ $file->{versions}->[-1] },
					backup_id_max	=> $state->{last_backup_id},
					uid				=> $uid,
					gid				=> $gid,
					size			=> $stat[7],
					mode			=> $stat[2],
					mtime			=> $stat[9],

lib/App/SimpleBackuper/Backup.pm  view on Meta::CPAN

					symlink_to		=> undef,
					parts			=> [],
				};
			} else {
				push @{ $file->{versions} }, {
					backup_id_min	=> $state->{last_backup_id},
					backup_id_max	=> $state->{last_backup_id},
					uid				=> $uid,
					gid				=> $gid,
					size			=> $stat[7],
					mode			=> $stat[2],
					mtime			=> $stat[9],

lib/App/SimpleBackuper/Backup.pm  view on Meta::CPAN

					parts			=> [],
				}
			}
			$files->upsert({ id => $file->{id}, parent_id => $file->{parent_id} }, $file);
			
			my $backup = $backups->find_row({ id => $state->{last_backup_id} });
			$backup->{files_cnt}++;
			$backup->{max_files_cnt}++;
			$backups->upsert({ id => $backup->{id} }, $backup );
		}
		
		print "OK\n" if $options->{verbose};
	}
	
	my $backup = $backups->find_row({ id => $state->{last_backup_id} });
	$backup->{is_done} = 1;
	$backups->upsert({ id => $backup->{id} }, $backup );
	
	App::SimpleBackuper::BackupDB($options, $state);
	
	_print_progress($state) if ! $options->{quiet};
}

sub _print_progress {
	print "Progress: ";
	if($_[0]->{bytes_in_last_backup}) {
		printf "processed %s of %s in last backup, ", fmt_weight($_[0]->{bytes_processed}), fmt_weight($_[0]->{bytes_in_last_backup});
	}
	printf "total backups weight %s.\n", fmt_weight($_[0]->{total_weight});
}

use Text::Glob qw(match_glob);
use Fcntl ':mode'; # For S_ISDIR & same
use App::SimpleBackuper::RegularFile;

lib/App/SimpleBackuper/Backup.pm  view on Meta::CPAN

	else {
		printf ", stat: %s:%s %o %s modified at %s", scalar getpwuid($stat[4]), scalar getgrgid($stat[5]), $stat[2], fmt_weight($stat[7]), fmt_datetime($stat[9]) if $options->{verbose};
	}
	
	
	my($backups, $blocks, $files, $parts, $uids_gids) = @{ $state->{db} }{qw(backups blocks files parts uids_gids)};
	
	
	my($uid, $gid) = _proc_uid_gid($stat[4], $stat[5], $uids_gids);
	
	
	my($file); {
		my($filename) = $task->[0] =~ /([^\/]+)\/?$/;
		$file = $files->find_by_parent_id_name($task->[2], $filename);
		if($file) {
			print ", is old file #$file->{id}" if $options->{verbose};
			if($file->{versions}->[-1]->{backup_id_max} == $state->{last_backup_id}) {
				print ", is already backuped.\n" if $options->{verbose};
				return;
			}
		} else {
			$file = {
				parent_id	=> $task->[2],

lib/App/SimpleBackuper/Backup.pm  view on Meta::CPAN

	}
	
	$state->{bytes_processed} += $file->{versions}->[-1]->{size} if @{ $file->{versions} };
	
	my %version = (
		backup_id_min	=> $state->{last_backup_id},
		backup_id_max	=> $state->{last_backup_id},
		uid				=> $uid,
		gid				=> $gid,
		size			=> $stat[7],
		mode			=> $stat[2],
		mtime			=> $stat[9],

lib/App/SimpleBackuper/Backup.pm  view on Meta::CPAN

			$version{parts} = $file->{versions}->[-1]->{parts}; # If mtime not changed then file not changed
			
			$version{block_id} = $file->{versions}->[-1]->{block_id};
			
			my $block = $blocks->find_row({ id => $version{block_id} });
			confess "File has lost block #$version{block_id} in backup "
				.$backups->find_row({ id => $version{backup_id_min} })->{name}
				."..".$backups->find_row({ id => $version{backup_id_max} })->{name}
				if ! $block;
			$block->{last_backup_id} = $state->{last_backup_id};
			$blocks->upsert({ id => $block->{id} }, $block);
			
			print ", mtime is not changed.\n" if $options->{verbose};
		} else {
			print @{ $file->{versions} } ? ", mtime changed.\n" : "\n" if $options->{verbose};

lib/App/SimpleBackuper/Backup.pm  view on Meta::CPAN

						$block_ids{ $part->{block_id} }++;
					}
					$part{size} = $part->{size};
					$part{aes_key} = $part->{aes_key};
					$part{aes_iv} = $part->{aes_iv};
					print "backuped earlier (".fmt_weight($read)." -> ".fmt_weight($part->{size}).");\n" if $options->{verbose};
				} else {
					
					print fmt_weight($read) if $options->{verbose};
					
					$state->{profile}->{math} -= time;

lib/App/SimpleBackuper/Backup.pm  view on Meta::CPAN

				$part->{block_id} //= $block->{id};
				$parts->upsert({ hash => $part->{hash} }, $part);
			}
				
			
			$block->{last_backup_id} = $state->{last_backup_id};
			$blocks->upsert({ id => $block->{id} }, $block);
			
			$version{block_id} = $block->{id};
		}
	}

lib/App/SimpleBackuper/Backup.pm  view on Meta::CPAN

		print ", skip not supported file type\n" if $options->{verbose};
		return;
	}
	
	
	# If file version not changed, use old version with wider backup ids range
	if(	@{ $file->{versions} }
		and (
			$file->{versions}->[-1]->{backup_id_max} + 1 == $state->{last_backup_id}
			or $file->{versions}->[-1]->{backup_id_max} == $state->{last_backup_id}
		)
		and $file->{versions}->[-1]->{uid}	== $version{uid}
		and $file->{versions}->[-1]->{gid}	== $version{gid}
		and $file->{versions}->[-1]->{size}	== $version{size}
		and $file->{versions}->[-1]->{mode}	== $version{mode}

lib/App/SimpleBackuper/Backup.pm  view on Meta::CPAN

				or $file->{versions}->[-1]->{symlink_to} eq $version{symlink_to}
			)
		)
		and join(' ', map { $_->{hash} } @{ $file->{versions}->[-1]->{parts} }) eq join(' ', map { $_->{hash} } @{ $version{parts} })
	) {
		$file->{versions}->[-1]->{backup_id_max} = $state->{last_backup_id};
	} else {
		push @{ $file->{versions} }, \%version;
	}
	
	$files->upsert({ parent_id => $file->{parent_id}, id => $file->{id} }, $file );
	
	my $backup = $backups->find_row({ id => $state->{last_backup_id} });
	$backup->{files_cnt}++;
	$backup->{max_files_cnt}++;
	$backups->upsert({ id => $backup->{id} }, $backup );

	
	$state->{longest_files} ||= [];
	if(	@{ $state->{longest_files} } < $SIZE_OF_TOP_FILES
		or $state->{longest_files}->[-1]->{time} < $file_time_spent

lib/App/SimpleBackuper/Backup.pm  view on Meta::CPAN

}

sub _free_up_space {
	my($options, $state, $protected_block_ids) = @_;
	
	my($backups, $files, $blocks, $parts) = @{ $state->{db} }{qw(backups files blocks parts)};
	
	my $deleted = 0;
	while(1) {
		my($block_id, @files) = _get_block_to_delete($state);
		last if ! $block_id;
		next if exists $protected_block_ids->{ $block_id };
		my $block = $blocks->find_row({ id => $block_id });
		next if ! $block;
		next if $block->{last_backup_id} == $state->{last_backup_id};
		
		$deleted += App::SimpleBackuper::_BlockDelete($options, $state, $block, $state->{blocks_info}->{ $block_id }->[2]);
		last if $deleted;
	}
	

 view all matches for this distribution


App-SimplenoteSync

 view release on metacpan or  search on metacpan

bin/simplenotesync  view on Meta::CPAN

and a local directory of text files on your computer

=head1 WARNING

Please note that this software is still in development stages --- I STRONGLY
urge you to backup all of your data before running to ensure nothing is lost.
If you run C<simplenotesync> on an empty local folder without the net result will 
be to copy the remote notes to the local folder, effectively performing a backup.

=head1 CONFIGURATION

TODO x-platform!

 view all matches for this distribution


App-Slackeria

 view release on metacpan or  search on metacpan

MANIFEST.SKIP  view on Meta::CPAN


# Avoid Module::Build generated and utility files.
\bBuild$
\b_build/

# Avoid temp and backup files.
~$
\.old$
\#$
\b\.#
\.bak$

 view all matches for this distribution


App-Slaughter

 view release on metacpan or  search on metacpan

lib/Slaughter/API/generic.pm  view on Meta::CPAN

    {
        if ( -e $dst )
        {

            #
            #  If we've been given "Backup" then we backup, otherwise
            # we just remove the old file.
            #
            my $backup = $params{ 'Backup' } || "true";

            if ( $backup =~ /true/i )
            {
                $::verbose && print "\tMoving existing file out of the way.\n";
                RunCommand( Cmd => "mv $dst $dst.old" );
            }
            else
            {
                $::verbose &&
                  print "\tOverwriting existing file without creating backup\n";
            }
        }


        #

 view all matches for this distribution


App-Smbxfer

 view release on metacpan or  search on metacpan

lib/App/Smbxfer.pm  view on Meta::CPAN

motivation for its existence was a limitation in smbclient causing a timeout
that precluded transfer of large files.

An especially useful way to apply this modulino is to invoke it as a
non-interactive command-line tool.  This can be an effective way to create cron
jobs for backup of data TO Samba shares.

As a module, it provides functions to conduct file transfers, get information on
Samba filesystem objects, and to perform validations on "SMB path specs," Samba
location identifiers of the form:

 view all matches for this distribution


App-SnerpVortex

 view release on metacpan or  search on metacpan

bin/snerp-projector  view on Meta::CPAN

}

sub purge_project {
	my $project = shift;

	_log("Purging backups from $project->{git_base_dir}...");

	system(
		"/bin/rm", "-r", "-f",
		"$project->{git_base_dir}/.git/refs/original",
		"$project->{git_base_dir}/.git/refs/logs/",

 view all matches for this distribution


App-SpamcupNG

 view release on metacpan or  search on metacpan

Changes  view on Meta::CPAN


  Change: fd4a9c2ba0d8f70daa97048b69acd54b49989788
  Author: alceu.freitas <alceu.freitas@semantix.com.br>
  Date : 2022-01-06 18:17:56 +0000

    fix: rolling backup version bump 

  Change: bb4f78f631a17f1d8b63b086e09d939033f03846
  Author: alceu.freitas <alceu.freitas@semantix.com.br>
  Date : 2022-01-06 18:17:09 +0000

 view all matches for this distribution


App-Spanel-BuildBindZones

 view release on metacpan or  search on metacpan

lib/App/Spanel/BuildBindZones.pm  view on Meta::CPAN

            log_info "User $user is migrated, skipping";
            next;
        }
        local $CWD = "$user/sysetc";
        for my $yaml_file (glob "zone=*") {
            # skip backup files
            next if $yaml_file =~ /~$/;

            log_info "Processing file $yaml_file ...";
            my ($domain) = $yaml_file =~ /^zone=(.+)/;
            #if (my $err = $code_validate_domain->($domain)) {

 view all matches for this distribution


App-Spoor

 view release on metacpan or  search on metacpan

MANIFEST.SKIP  view on Meta::CPAN

\bbuild.com$

# and Module::Build::Tiny generated files
\b_build_params$

# Avoid temp and backup files.
~$
\.old$
\#$
\b\.#
\.bak$

 view all matches for this distribution


App-SpreadRevolutionaryDate

 view release on metacpan or  search on metacpan

lib/App/SpreadRevolutionaryDate/Config.pm  view on Meta::CPAN

  # Rewind configuration file if needed and read it
  seek $filename, $file_start, 0 if $file_start;
  $self->parse_file($filename);

  # Backup multivalued options so command line arguments can override them
  my %args_list_backup;
  my %args_list = $config_targets->varlist(".");
  foreach my $arg_list (keys %args_list) {
    next unless $self->_argcount($arg_list) == ARGCOUNT_LIST;
    push @{$args_list_backup{$arg_list}}, @{$self->$arg_list};
    $self->_default($arg_list);
  }

  # Rewind command line arguments and process them
  @ARGV = @orig_argv;
  $self->parse_command_line;

  # Restore multivalued options if not overridden by command line arguments
  foreach my $arg_list (keys %args_list_backup) {
    unless (scalar @{$self->$arg_list}) {
      $self->$arg_list($_) foreach (@{$args_list_backup{$arg_list}});
    }
  }

  # Add targets defined with targets option
  @targets = @{$self->targets};

 view all matches for this distribution


App-Sqitch

 view release on metacpan or  search on metacpan

t/lib/DBIEngineTest.pm  view on Meta::CPAN

        }

        # Make a temporary copy of the deploy file so we can restore it later.
        my $deploy_file = $rev_change->deploy_file;
        my $tmp_dir = dir( tempdir CLEANUP => 1 );
        my $backup = $tmp_dir->file($deploy_file->basename);
        ok $deploy_file->copy_to($backup), 'Back up deploy file';

        # Modify the file so that its hash digest will vary.
        delete $rev_change->{script_hash};
        my $fh = $deploy_file->opena or die "Cannot open $deploy_file: $!\n";
        try {

t/lib/DBIEngineTest.pm  view on Meta::CPAN

            lives_ok {
                ok $engine->log_deploy_change($rev_change), 'Deploy the reworked change';
            } 'The deploy should not fail';
        } finally {
            # Restore the reworked script.
            $backup->copy_to($deploy_file);
        };

        # Make sure that change_id_for() chokes on the dupe.
        MOCKVENT: {
            my $sqitch_mocker = Test::MockModule->new(ref $sqitch);

 view all matches for this distribution


App-Standby

 view release on metacpan or  search on metacpan

lib/App/Standby/DB.pm  view on Meta::CPAN

        if($db_version < 2) {
            # do upgrades to version 2 here
            # remove column class from table groups
            my $sql = <<EOS;
BEGIN TRANSACTION;
CREATE TEMPORARY TABLE groups_backup(id,name);
DROP TABLE groups;
CREATE TABLE IF NOT EXISTS groups (
        id INTEGER PRIMARY KEY ASC,
        name TEXT
);
INSERT INTO groups SELECT id,name FROM groups_backup;
DROP TABLE groups_backup;
COMMIT;
EOS
            # TODO HIGH do multi-statements work es expected?
            $dbh->do($sql);
        }

lib/App/Standby/DB.pm  view on Meta::CPAN

        }
        if($db_version < 5) {
            # remove column class from table contacts
            my $sql = <<EOS;
BEGIN TRANSACTION;
CREATE TEMPORARY TABLE contacts_backup(id,name,cellphone,group_id,is_enabled,ordinal);
DROP TABLE contacts;
CREATE TABLE IF NOT EXISTS contacts (
        id INTEGER PRIMARY KEY ASC,
        name TEXT,
        cellphone TEXT,
        group_id INTEGER,
        is_enabled INTEGER,
        ordinal INTEGER,
        CONSTRAINT fk_gid FOREIGN KEY (group_id) REFERENCES groups (id) ON DELETE CASCADE
);
INSERT INTO contacts SELECT id,name,cellphone,group_id,is_enabled,ordinal FROM contacts_backup;
DROP TABLE contacts_backup;
COMMIT;
EOS
            # TODO HIGH do multi-statements work es expected?
            $dbh->do($sql);
        }

 view all matches for this distribution


App-Standup-Diary

 view release on metacpan or  search on metacpan

MANIFEST.SKIP  view on Meta::CPAN

\bbuild.com$

# and Module::Build::Tiny generated files
\b_build_params$

# Avoid temp and backup files.
~$
\.old$
\#$
\b\.#
\.bak$

 view all matches for this distribution


App-Sysadmin-Log-Simple

 view release on metacpan or  search on metacpan

MANIFEST.SKIP  view on Meta::CPAN

\bBuild.bat$
\bBuild.COM$
\bBUILD.COM$
\bbuild.com$

# Avoid temp and backup files.
~$
\.old$
\#$
\b\.#
\.bak$

 view all matches for this distribution


App-TaggedDirUtils

 view release on metacpan or  search on metacpan

lib/App/TaggedDirUtils.pm  view on Meta::CPAN

            test => 0,
            'x.doc.show_result' => 0,
        },
        {
            summary => 'Backup all my mediadirs to Google Drive',
            src => q{[[prog]] --has-tag media --lacks-file .git -l /media/budi /media/ujang | td map '"rclone copy -v -v $_->{abs_path} mygdrive:/backup/$_->{name}"' | bash},
            src_plang => 'bash',
            test => 0,
            'x.doc.show_result' => 0,
        },
    ],

lib/App/TaggedDirUtils.pm  view on Meta::CPAN

=back

A "tagged directory" is a directory which has one or more tags: usually empty
files called F<.tag-TAGNAME>, where I<TAGNAME> is some tag name.

You can backup, rsync, or do whatever you like with a tagged directory, just
like a normal filesystem directory. The utilities provided in this distribution
help you handle tagged directories.

=head1 FUNCTIONS

 view all matches for this distribution


App-TemplateCMD

 view release on metacpan or  search on metacpan

templates/perl/package/MANIFEST.SKIP  view on Meta::CPAN

\bbuild.com$

# Avoid Devel::Cover generated files
\bcover_db

# Avoid temp and backup files.
~$
\.tmp$
\.old$
\.bak$
\#$

 view all matches for this distribution


App-Templer

 view release on metacpan or  search on metacpan

t/style-no-trailing-whitespace.t  view on Meta::CPAN

    my $file = $File::Find::name;

    # We don't care about directories
    return if ( !-f $file );

    # Nor about backup files.
    return if ( $file =~ /~$/ );

    # or Makefiles
    return if ( $file =~ /Makefile/ );

 view all matches for this distribution


App-Test-Generator

 view release on metacpan or  search on metacpan

MANIFEST.SKIP  view on Meta::CPAN

\bBuild.bat$
\bBuild.COM$
\bBUILD.COM$
\bbuild.com$

# Avoid temp and backup files.
~$
\.old$
\#$
\b\.#
\.bak$

 view all matches for this distribution


App-TimeTracker-Command-Jira

 view release on metacpan or  search on metacpan

MANIFEST.SKIP  view on Meta::CPAN

CPAN.SKIP
t/000_standard__*
Debian_CPANTS.txt
nytprof.out

# Temp, old, emacs, vim, backup files.
~$
\.old$
\.swp$
\.tar$
\.tar\.gz$

 view all matches for this distribution


App-Toot

 view release on metacpan or  search on metacpan

MANIFEST.SKIP  view on Meta::CPAN


# Avoid Module::Build generated and utility files.
\bBuild$
\b_build/

# Avoid temp and backup files.
~$
\.old$
\#$
\b\.#
\.bak$

 view all matches for this distribution


App-Transpierce

 view release on metacpan or  search on metacpan

lib/App/Transpierce.pm  view on Meta::CPAN


__END__

=head1 NAME

App::Transpierce - backup and modify important files

=head1 SYNOPSIS

	# exports the script into the current directory
	transpierce --self-export

lib/App/Transpierce.pm  view on Meta::CPAN

This distribution provides C<transpierce> script which can be used for per-task
management of files which must be backed up before modification.

Transpierce means to pierce through. The module makes it easier penetrate
system files and alter them by working on local copies. You only poke single
holes in it by performing file deployment and (if needed) backup restoration.
If you think that's not how it's supposed to be done, you're right - but
sometimes it is what needs to be done.

Suppose you must reproduce a bug that only happens under a very specific
environment. Or you have to quickly hotfix something and full release cycle
will not be fast enough. Do you change live files? Or make copies as backups
and then do modifications? Are you sure you restored all unwanted changes?

This script will set up a small working environment for you, which consists of:

=over

 view all matches for this distribution


App-TrimHistories

 view release on metacpan or  search on metacpan

lib/App/TrimHistories.pm  view on Meta::CPAN

    v => 1.1,
    summary => 'Keep only a certain number of sets of file histories, '.
        'delete the rest',
    description => <<'_',

This script can be used to delete old backup or log files. The files must be
named with timestamps, e.g. `mydb-2017-06-14.sql.gz`. By default, it keeps only
7 daily, 4 weekly, and 6 monthly histories. The rest will be deleted.

_
    args => {
        files => {
            'x.name.is_plural' => 1,
            'x.name.singular' => 'file',
            schema => ['array*', of=>'filename*'],
            summary => 'Each file name must be unique and contain date, '.
                'e.g. `backup-2017-06-14.tar.gz`',
            req => 1,
            pos => 0,
            greedy => 1,
        },
        sets => {

lib/App/TrimHistories.pm  view on Meta::CPAN


 trim_histories(%args) -> [status, msg, payload, meta]

Keep only a certain number of sets of file histories, delete the rest.

This script can be used to delete old backup or log files. The files must be
named with timestamps, e.g. C<mydb-2017-06-14.sql.gz>. By default, it keeps only
7 daily, 4 weekly, and 6 monthly histories. The rest will be deleted.

This function is not exported.

lib/App/TrimHistories.pm  view on Meta::CPAN


=item * B<discard_young_histories> => I<bool>

=item * B<files>* => I<array[filename]>

Each file name must be unique and contain date, e.g. `backup-2017-06-14.tar.gz`.

=item * B<sets> => I<array[str]> (default: ["daily",7,"weekly",4,"monthly",6])

History sets to keep.

 view all matches for this distribution


App-UpfUtils

 view release on metacpan or  search on metacpan

script/upf-add-group  view on Meta::CPAN


=head1 SYNOPSIS

Usage:

 % upf-add-group [--backup] [--etc-dir=s] [--format=name] [--gid=s]
     [--json] [--max-gid=s] [--members-json=s] [--members=s] [--min-gid=s]
     [--(no)naked-res] [--no-backup] [--nobackup] [--page-result[=program]]
     <group>

=head1 OPTIONS

C<*> marks required options.

=head2 Main options

=over

=item B<--backup>

Whether to backup when modifying files.

Backup is written with `.bak` extension in the same directory. Unmodified file
will not be backed up. Previous backup will be overwritten.


=item B<--gid>=I<s>

Pick a specific new GID.

 view all matches for this distribution


App-Utils

 view release on metacpan or  search on metacpan

bin/move-merge  view on Meta::CPAN


sub help { print <<"#EOT" }
# move-merge merges directories into one target directory, version $VERSION
#
# Move and merge directories into the destination directory, with file
# renaming.  The script is useful in incremental backups with rsync.
#
# Usage: find-equal-files [switches] [destinationdir] [dirs]
#  -h  Print help and exit.
#  -v  Print version of the program and exit.
#
# An illustrative Scenario:  Assume that we are making regular backups
# of the directory /home/user into /backup/user while saving the old
# files into directory /backup/user-old/041201-085451, where
# 041201-085451 is a time-stamp-named directory with the structure
# similar to the previous /backup/user directory.  When this is
# periodically repeated, the directory /backup/user-old/ accumulates a
# lot of directories and it needs to be cleaned periodically.
# Before cleaning, it may be useful to merge the tagged directories
# with:   move-merge m 0*
#EOT

bin/move-merge  view on Meta::CPAN


The command C<move-merge> merges a list of source diretories into the target
directory, unifying their subdirectory structures.  The final files are replaced
with the same-named directories inside which the files are saved under the names
of the source directories.  This is particularly useful in merging together
backup directories after backups saved with the C<rsync> command.  For example,
let us assume that we are making regular backups of the directory C</home/user>
into the directory C</backup/user> while saving the old and deleted files into
the directory C</backup/user-old/220203-105750>, where C<220203-105750> contains
the time-stamped version of the old files in the same directory structure as the
original backup.  After collecting a number of such backups, we can run the
command C<move-merge m 2*> which will collect and merge all versions into the
directory C<m>.

=head1 AUTHOR

 view all matches for this distribution


App-VTide

 view release on metacpan or  search on metacpan

example/hooks.pl  view on Meta::CPAN

        # keep all names lower case
        $$name_ref = lc $$name_ref;
    },
    start_pre => sub {
        my ($self, $name, $dir) = @_;
        my $global = path($self->config->global_config)->parent->path('backups');
        $global->mkpath;
        my $backup = path( $global, $name . '.yml' );
        path($dir, '.vtide.yml')->copy($backup);
    },
    refresh_session_missing => sub {
        my ($self, $name, $dir) = @_;

        my $global = path($self->config->global_config)->parent->path('backups');
        my $backup = path( $global, $name . '.yml' );
        warn "    But backup exists\n" if -f $backup;
    },
};

 view all matches for this distribution


App-WHMCSUtils

 view release on metacpan or  search on metacpan

lib/App/WHMCSUtils.pm  view on Meta::CPAN

    );
}

$SPEC{restore_whmcs_client} = {
    v => 1.1,
    summary => "Restore a missing client from SQL database backup",
    args => {
        sql_backup_file => {
            schema => 'filename*',
            description => <<'_',

Can accept either `.sql` or `.sql.gz`.

Will be converted first to a directory where the SQL file will be extracted to
separate files on a per-table basis.

_
        },
        sql_backup_dir => {
            summary => 'Directory containing per-table SQL files',
            schema => 'dirname*',
            description => <<'_',


lib/App/WHMCSUtils.pm  view on Meta::CPAN

            default => 1,
        },
    },
    args_rels => {
        'req_one&' => [
            ['sql_backup_file', 'sql_backup_dir'],
            ['client_email', 'client_id'],
        ],
    },
    deps => {
        prog => "mysql-sql-dump-extract-tables",

lib/App/WHMCSUtils.pm  view on Meta::CPAN

sub restore_whmcs_client {
    my %args = @_;

    local $CWD;

    my $sql_backup_dir;
    my $decompress = 0;
    if ($args{sql_backup_file}) {
        return [404, "No such file: $args{sql_backup_file}"]
            unless -f $args{sql_backup_file};
        my $pt = path($args{sql_backup_file});
        my $basename = $pt->basename;
        if ($basename =~ /(.+)\.sql\z/i) {
            $sql_backup_dir = $1;
        } elsif ($basename =~ /(.+)\.sql\.gz\z/i) {
            $sql_backup_dir = $1;
            $decompress = 1;
        } else {
            return [412, "SQL backup file should be named *.sql or *.sql.gz: ".
                        "$args{sql_backup_file}"];
        }
        if (-d $sql_backup_dir) {
            log_info "SQL backup dir '$sql_backup_dir' already exists, ".
                "skipped extracting";
        } else {
            mkdir $sql_backup_dir, 0755
                or return [500, "Can't mkdir '$sql_backup_dir': $!"];
            $CWD = $sql_backup_dir;
            my @cmd;
            if ($decompress) {
                push @cmd, "zcat", $pt->absolute->stringify, \"|";
            } else {
                push @cmd, "cat", $pt->absolute->stringify, \"|";
            }
            push @cmd, "mysql-sql-dump-extract-tables",
                "--include-table-pattern", '^(tblclients|tblinvoices|tblinvoiceitems|tblorders)$';
            system({shell=>1, die=>1, log=>1}, @cmd);
        }
    } elsif ($args{sql_backup_dir}) {
        $sql_backup_dir = $args{sql_backup_dir};
        return [404, "No such dir: $sql_backup_dir"]
            unless -d $sql_backup_dir;
        $CWD = $sql_backup_dir;
    }

    my @sql;

    my $clientid = $args{client_id};
  FIND_CLIENT:
    {
        open my $fh, "<", "tblclients"
            or return [500, "Can't open $sql_backup_dir/tblclients: $!"];
        my $clientemail;
        $clientemail = lc $args{client_email} if defined $args{client_email};
        while (<$fh>) {
            next unless /^INSERT INTO `tblclients` \(`id`, `firstname`, `lastname`, `companyname`, `email`, [^)]+\) VALUES \((\d+),'(.*?)','(.*?)','(.*?)','(.*?)',/;
            my ($rid, $rfirstname, $rlastname, $rcompanyname, $remail) = ($1, $2, $3, $4, $5);
            if (defined $clientid) {
                # find by ID
                if ($rid == $clientid) {
                    $clientemail = $remail;
                    push @sql, $_;
                    log_info "Found client ID=%s in backup", $clientid;
                    last FIND_CLIENT;
                }
            } else {
                # find by email
                if (lc $remail eq $clientemail) {
                    $clientid = $rid;
                    push @sql, $_;
                    log_info "Found client email=%s in backup: ID=%s", $clientemail, $clientid;
                    last FIND_CLIENT;
                }
            }
        }
        return [404, "Couldn't find client email=$clientemail in database backup, please check the email or try another backup"];
    }

    my @invoiceids;
  FIND_INVOICES:
    {
        last unless $args{restore_invoices};
        open my $fh, "<", "tblinvoices"
            or return [500, "Can't open $sql_backup_dir/tblinvoices: $!"];
        while (<$fh>) {
            next unless /^INSERT INTO `tblinvoices` \(`id`, `userid`, [^)]+\) VALUES \((\d+),(\d+),/;
            my ($rid, $ruserid) = ($1, $2);
            if ($ruserid == $clientid) {
                push @invoiceids, $rid;
                push @sql, $_;
                log_info "Found client invoice in backup: ID=%s", $rid;
            }
        }
        log_info "Number of invoices found for client in backup: %d", ~~@invoiceids if @invoiceids;
    }

  FIND_INVOICEITEMS:
    {
        last unless @invoiceids;
        open my $fh, "<", "tblinvoiceitems"
            or return [500, "Can't open $sql_backup_dir/tblinvoiceitems: $!"];
        while (<$fh>) {
            next unless /^INSERT INTO `tblinvoiceitems` \(`id`, `invoiceid`, `userid`, [^)]+\) VALUES \((\d+),(\d+),(\d+)/;
            my ($rid, $rinvoiceid, $ruserid) = ($1, $2, $3);
            if (grep {$rinvoiceid == $_} @invoiceids) {
                log_trace "Adding invoice item %s for invoice #%s", $rid, $rinvoiceid;

lib/App/WHMCSUtils.pm  view on Meta::CPAN


  FIND_HOSTINGS:
    {
        last unless $args{restore_hostings};
        open my $fh, "<", "tblhosting"
            or return [500, "Can't open $sql_backup_dir/tblhosting: $!"];
        while (<$fh>) {
            next unless /^INSERT INTO `tblhosting` \(`id`, `userid`, [^)]+\) VALUES \((\d+),(\d+),(\d+)/;
            my ($rid, $ruserid) = ($1, $2, $3);
            if ($ruserid == $clientid) {
                log_trace "Found hosting for client in backup: ID=%d", $rid;
                push @sql, $_;
            }
        }
    }

  FIND_DOMAINS:
    {
        last unless $args{restore_domains};
        open my $fh, "<", "tbldomains"
            or return [500, "Can't open $sql_backup_dir/tbldomains: $!"];
        while (<$fh>) {
            next unless /^INSERT INTO `tbldomains` \(`id`, `userid`, [^)]+\) VALUES \((\d+),(\d+),(\d+)/;
            my ($rid, $ruserid) = ($1, $2, $3);
            if ($ruserid == $clientid) {
                log_trace "Found domain for client in backup: ID=%d", $rid;
                push @sql, $_;
            }
        }
    }

lib/App/WHMCSUtils.pm  view on Meta::CPAN


Usage:

 restore_whmcs_client(%args) -> [$status_code, $reason, $payload, \%result_meta]

Restore a missing client from SQL database backup.

This function is not exported.

This function supports dry-run operation.

lib/App/WHMCSUtils.pm  view on Meta::CPAN


=item * B<restore_hostings> => I<bool> (default: 1)

=item * B<restore_invoices> => I<bool> (default: 1)

=item * B<sql_backup_dir> => I<dirname>

Directory containing per-table SQL files.

=item * B<sql_backup_file> => I<filename>

Can accept either C<.sql> or C<.sql.gz>.

Will be converted first to a directory where the SQL file will be extracted to
separate files on a per-table basis.

 view all matches for this distribution


( run in 0.908 second using v1.01-cache-2.11-cpan-933e48f88fa )