DBD-PgLite

 view release on metacpan or  search on metacpan

lib/DBD/PgLite/MirrorPgToSQLite.pm  view on Meta::CPAN

	if ($opt{sl_dbh}->selectrow_array("select name from sqlite_master where name = ? and type = 'view'",{},$vn)) {
		eval { $opt{sl_dbh}->do("drop view $vn") };
	}
	eval { $opt{sl_dbh}->do("create view $vn as $def"); };
	warn "    *** COULD NOT CREATE VIEW $sn.$vn *** \n" if $@;
}

sub tablelist {
	my ($pg,$sn,@pats) = @_;
	my %tables; # prevent duplicate table names
	for my $pat (@pats) {
		if ($pat =~ s/^\/(.+)\/$/$1/) {
			my $res = $pg->selectcol_arrayref("select tablename from pg_tables where lower(schemaname) = lower('$sn') and tablename ~* '$pat'");
			$tables{$_}++ for @$res;
		} else {
			$tables{$pat}++;
		}
	}
	return keys %tables;
}

sub viewlist {
	my ($pg,$sn,@pats) = @_;
	my %views; # prevent duplicate table names
	for my $pat (@pats) {
		if ($pat =~ s/^\/(.+)\/$/$1/) {
			my $res = $pg->selectcol_arrayref("select viewname from pg_views where lower(schemaname) = lower('$sn') and viewname ~* '$pat'");
			$views{$_}++ for @$res;
		} else {
			$views{$pat}++;
		}
	}
	return keys %views;
}


sub get_schema {
	my ($sn,$tn,%opt) = @_;
	# Constructing a schema definition can be rather slow,
	# so we cache the result for up to a week
	my @cached = cached_schema($sn,$tn,undef,undef,%opt);
	return @cached if @cached;
	my @cdef = col_def($sn,$tn,%opt);
	my $colcnt = scalar @cdef;
	my @pknames = $opt{pg_dbh}->primary_key(undef,$sn,$tn);
	push @cdef, "primary key (" . join(',',@pknames) . ")" if @pknames && $pknames[0] ne '';
	my $create = "create table $tn (\n  ".join(",\n  ",@cdef)."\n)\n";
	cached_schema($sn,$tn,$create,$colcnt,%opt);
	return ($create, $colcnt);
}

sub cached_schema {
	my ($sn,$tn,$creat,$cnt,%opt) = @_;
	my $database = $opt{pg_dbh}->{mbl_dbh} 
	  ? $opt{pg_dbh}->{mbl_dbh}->[0]->{Name}
	  : $opt{pg_dbh}->{Name};
	unless (-d $opt{cachedir}) {
		mkdir $opt{cachedir};
		chmod 0777, $opt{cachedir};
	}
	my $uid = (getpwuid($>))[0] || $>;
	mkdir "$opt{cachedir}/$uid" unless -d "$opt{cachedir}/$uid";
	my $fn = "$opt{cachedir}/$uid/$database.$sn.$tn";
	if ($cnt) {
		Storable::store [$creat,$cnt], $fn;
	} elsif (-f $fn && time-(stat $fn)[9]<7*24*60*60) {
		my $ret = Storable::retrieve $fn || [];
		return @$ret;
	} else {
		return ();
	}
}

sub col_def {
	my ($sn,$tn,%opt) = @_;
	my $sth = $opt{pg_dbh}->column_info(undef,$sn,$tn,undef);
	$sth->execute;
	my $res = $sth->fetchall_arrayref;
	my @ret;
	foreach my $ci (@$res) {
		my ($colnam,$typnam,$nullable) = @{$ci}[qw(3 5 10)]; #)];
		my $notnull = $nullable ? "" : " not null";
		push @ret, "$colnam $typnam$notnull";
	}
	$sth->finish;
	return @ret;
}

sub lockfile {
	my ($action,$lockfile) = @_;
	if ($action eq 'create') {
		die "ERROR: Lockfile $lockfile exists - cannot continue" if -f $lockfile;
		open LOCK, ">", "$lockfile" or die "ERROR: Could not open lockfile $lockfile: $!";
		print LOCK $$;
		close LOCK;
	} elsif ($action eq 'clear') {
		if (-f $lockfile) {
			unlink $lockfile or die "ERROR: Could not remove lockfile $lockfile: $!";
		} else {
			warn "WARNING: Lockfile $lockfile does not exist - cannot clear" unless -f $lockfile;
		}
	}
}

1;

__END__

=pod

=head1 NAME

DBD::PgLite::MirrorPgToSQLite - Mirror tables from PostgreSQL to SQLite

=head1 SUMMARY

 use DBD::PgLite::MirrorPgToSQLite qw(pg_to_sqlite);
 pg_to_sqlite(
     sqlite_file => '/var/pg_mirror/news.sqlite',
     pg_dbh      => $dbh,
     schema      => 'news',



( run in 1.897 second using v1.01-cache-2.11-cpan-39bf76dae61 )