view release on metacpan or search on metacpan
- added the ability to set on_connect_do and the various sql_maker
options as part of Storage::DBI's connect_info.
0.06003 2006-05-19 15:37:30
- make find_or_create_related check defined() instead of truth
- don't unnecessarily fetch rels for cascade_update
- don't set_columns explicitly in update_or_create; instead use
update($hashref) so InflateColumn works
- fix for has_many prefetch with 0 related rows
- make limit error if rows => 0
- added memory cycle tests and a long-needed weaken call
0.06002 2006-04-20 00:42:41
- fix set_from_related to accept undef
- fix to Dumper-induced hash iteration bug
- fix to copy() with non-composed resultsource
- fix to ->search without args to clone rs but maintain cache
- grab $self->dbh once per function in Storage::DBI
- nuke ResultSource caching of ->resultset for consistency reasons
- fix for -and conditions when updating or deleting on a ResultSet
- Fix exception text for nonexistent key in ResultSet::find()
0.05999_04 2006-03-18 19:20:49
- Fix for delete on full-table resultsets
- Removed caching on count() and added _count for pager()
- ->connection does nothing if ->storage defined and no args
(and hence ->connect acts like ->clone under the same conditions)
- Storage::DBI throws better exception if no connect info
- columns_info_for made more robust / informative
- ithreads compat added, fork compat improved
- weaken result_source in all resultsets
- Make pg seq extractor less sensitive.
0.05999_03 2006-03-14 01:58:10
- has_many prefetch fixes
- deploy now adds drop statements before creates
- deploy outputs debugging statements if DBIX_CLASS_STORAGE_DBI_DEBUG
is set
0.05999_02 2006-03-10 13:31:37
- remove test dep on YAML
lib/DBIx/Class/AccessorGroup.pm view on Meta::CPAN
package DBIx::Class::AccessorGroup;
use strict;
use warnings;
use base qw/Class::Accessor::Grouped/;
use Scalar::Util qw/weaken blessed/;
use namespace::clean;
my $successfully_loaded_components;
sub get_component_class {
my $class = $_[0]->get_inherited($_[1]);
# It's already an object, just go for it.
return $class if blessed $class;
if (defined $class and ! $successfully_loaded_components->{$class} ) {
$_[0]->ensure_class_loaded($class);
no strict 'refs';
$successfully_loaded_components->{$class}
= ${"${class}::__LOADED__BY__DBIC__CAG__COMPONENT_CLASS__"}
= do { \(my $anon = 'loaded') };
weaken($successfully_loaded_components->{$class});
}
$class;
};
sub set_component_class {
shift->set_inherited(@_);
}
1;
lib/DBIx/Class/CDBICompat/ColumnsAsHash.pm view on Meta::CPAN
tie $self->{$col}, 'DBIx::Class::CDBICompat::Tied::ColumnValue',
$self, $col;
}
}
package DBIx::Class::CDBICompat::Tied::ColumnValue;
use Carp;
use Scalar::Util qw(weaken isweak);
sub TIESCALAR {
my($class, $obj, $col) = @_;
my $self = [$obj, $col];
weaken $self->[0];
return bless $self, $_[0];
}
sub FETCH {
my $self = shift;
my($obj, $col) = @$self;
my $class = ref $obj;
my $id = $obj->id;
lib/DBIx/Class/CDBICompat/LiveObjectIndex.pm view on Meta::CPAN
package # hide from PAUSE
DBIx::Class::CDBICompat::LiveObjectIndex;
use strict;
use warnings;
use Scalar::Util qw/weaken/;
use base qw/Class::Data::Inheritable/;
__PACKAGE__->mk_classdata('purge_object_index_every' => 1000);
__PACKAGE__->mk_classdata('live_object_index' => { });
__PACKAGE__->mk_classdata('live_object_init_count' => { });
# Caching is on by default, but a classic CDBI hack to turn it off is to
# set this variable false.
$Class::DBI::Weaken_Is_Available = 1
lib/DBIx/Class/CDBICompat/LiveObjectIndex.pm view on Meta::CPAN
return $self if $self->nocache;
# Because the insert will die() if it can't insert into the db (or should)
# we can be sure the object *was* inserted if we got this far. In which
# case, given primary keys are unique and ID only returns a
# value if the object has all its primary keys, we can be sure there
# isn't a real one in the object index already because such a record
# cannot have existed without the insert failing.
if (my $key = $self->ID) {
my $live = $self->live_object_index;
weaken($live->{$key} = $self);
$self->purge_dead_from_object_index
if ++$self->live_object_init_count->{count}
% $self->purge_object_index_every == 0;
}
return $self;
}
sub inflate_result {
my ($class, @rest) = @_;
my $new = $class->next::method(@rest);
return $new if $new->nocache;
if (my $key = $new->ID) {
#warn "Key $key";
my $live = $class->live_object_index;
return $live->{$key} if $live->{$key};
weaken($live->{$key} = $new);
$class->purge_dead_from_object_index
if ++$class->live_object_init_count->{count}
% $class->purge_object_index_every == 0;
}
return $new;
}
1;
lib/DBIx/Class/Relationship/Base.pm view on Meta::CPAN
package DBIx::Class::Relationship::Base;
use strict;
use warnings;
use base qw/DBIx::Class/;
use Scalar::Util qw/weaken blessed/;
use Try::Tiny;
use DBIx::Class::_Util 'UNRESOLVABLE_CONDITION';
use namespace::clean;
=head1 NAME
DBIx::Class::Relationship::Base - Inter-table relationships
=head1 SYNOPSIS
lib/DBIx/Class/Relationship/Base.pm view on Meta::CPAN
)->search_related('me', $query, $attrs)
}
else {
# FIXME - this conditional doesn't seem correct - got to figure out
# at some point what it does. Also the entire UNRESOLVABLE_CONDITION
# business seems shady - we could simply not query *at all*
if ($cond eq UNRESOLVABLE_CONDITION) {
my $reverse = $rsrc->reverse_relationship_info($rel);
foreach my $rev_rel (keys %$reverse) {
if ($reverse->{$rev_rel}{attrs}{accessor} && $reverse->{$rev_rel}{attrs}{accessor} eq 'multi') {
weaken($attrs->{related_objects}{$rev_rel}[0] = $self);
} else {
weaken($attrs->{related_objects}{$rev_rel} = $self);
}
}
}
elsif (ref $cond eq 'ARRAY') {
$cond = [ map {
if (ref $_ eq 'HASH') {
my $hash;
foreach my $key (keys %$_) {
my $newkey = $key !~ /\./ ? "me.$key" : $key;
$hash->{$newkey} = $_->{$key};
lib/DBIx/Class/ResultSet.pm view on Meta::CPAN
package DBIx::Class::ResultSet;
use strict;
use warnings;
use base qw/DBIx::Class/;
use DBIx::Class::Carp;
use DBIx::Class::ResultSetColumn;
use Scalar::Util qw/blessed weaken reftype/;
use DBIx::Class::_Util qw(
fail_on_internal_wantarray fail_on_internal_call UNRESOLVABLE_CONDITION
);
use Try::Tiny;
BEGIN {
# De-duplication in _merge_attr() is disabled, but left in for reference
# (the merger is used for other things that ought not to be de-duped)
*__HM_DEDUP = sub () { 0 };
}
lib/DBIx/Class/ResultSource.pm view on Meta::CPAN
use base qw/DBIx::Class::ResultSource::RowParser DBIx::Class/;
use DBIx::Class::ResultSet;
use DBIx::Class::ResultSourceHandle;
use DBIx::Class::Carp;
use DBIx::Class::_Util 'UNRESOLVABLE_CONDITION';
use SQL::Abstract::Util 'is_literal_value';
use Devel::GlobalDestruction;
use Try::Tiny;
use Scalar::Util qw/blessed weaken isweak/;
use namespace::clean;
__PACKAGE__->mk_group_accessors(simple => qw/
source_name name source_info
_ordered_columns _columns _primaries _unique_constraints
_relationships resultset_attributes
column_info_from_storage
/);
lib/DBIx/Class/ResultSource.pm view on Meta::CPAN
# we are trying to save to reattach back to the source we are destroying.
# The relevant code checking refcounts is in ::Schema::DESTROY()
# if we are not a schema instance holder - we don't matter
return if(
! ref $_[0]->{schema}
or
isweak $_[0]->{schema}
);
# weaken our schema hold forcing the schema to find somewhere else to live
# during global destruction (if we have not yet bailed out) this will throw
# which will serve as a signal to not try doing anything else
# however beware - on older perls the exception seems randomly untrappable
# due to some weird race condition during thread joining :(((
local $@;
eval {
weaken $_[0]->{schema};
# if schema is still there reintroduce ourselves with strong refs back to us
if ($_[0]->{schema}) {
my $srcregs = $_[0]->{schema}->source_registrations;
for (keys %$srcregs) {
next unless $srcregs->{$_};
$srcregs->{$_} = $_[0] if $srcregs->{$_} == $_[0];
}
}
lib/DBIx/Class/Schema.pm view on Meta::CPAN
package DBIx::Class::Schema;
use strict;
use warnings;
use base 'DBIx::Class';
use DBIx::Class::Carp;
use Try::Tiny;
use Scalar::Util qw/weaken blessed/;
use DBIx::Class::_Util qw(refcount quote_sub is_exception scope_guard);
use Devel::GlobalDestruction;
use namespace::clean;
__PACKAGE__->mk_classdata('class_mappings' => {});
__PACKAGE__->mk_classdata('source_registrations' => {});
__PACKAGE__->mk_classdata('storage_type' => '::DBI');
__PACKAGE__->mk_classdata('storage');
__PACKAGE__->mk_classdata('exception_action');
__PACKAGE__->mk_classdata('stacktrace' => $ENV{DBIC_TRACE} || 0);
lib/DBIx/Class/Schema.pm view on Meta::CPAN
=cut
sub register_extra_source { shift->_register_source(@_, { extra => 1 }) }
sub _register_source {
my ($self, $source_name, $source, $params) = @_;
$source = $source->new({ %$source, source_name => $source_name });
$source->schema($self);
weaken $source->{schema} if ref($self);
my %reg = %{$self->source_registrations};
$reg{$source_name} = $source;
$self->source_registrations(\%reg);
return $source if $params->{extra};
my $rs_class = $source->result_class;
if ($rs_class and my $rsrc = try { $rs_class->result_source_instance } ) {
my %map = %{$self->class_mappings};
lib/DBIx/Class/Schema.pm view on Meta::CPAN
### NO detected_reinvoked_destructor check
### This code very much relies on being called multuple times
return if $global_phase_destroy ||= in_global_destruction;
my $self = shift;
my $srcs = $self->source_registrations;
for my $source_name (keys %$srcs) {
# find first source that is not about to be GCed (someone other than $self
# holds a reference to it) and reattach to it, weakening our own link
#
# during global destruction (if we have not yet bailed out) this should throw
# which will serve as a signal to not try doing anything else
# however beware - on older perls the exception seems randomly untrappable
# due to some weird race condition during thread joining :(((
if (length ref $srcs->{$source_name} and refcount($srcs->{$source_name}) > 1) {
local $@;
eval {
$srcs->{$source_name}->schema($self);
weaken $srcs->{$source_name};
1;
} or do {
$global_phase_destroy = 1;
};
last;
}
}
}
lib/DBIx/Class/Schema/Versioned.pm view on Meta::CPAN
package DBIx::Class::Schema::Versioned;
use strict;
use warnings;
use base 'DBIx::Class::Schema';
use DBIx::Class::Carp;
use Time::HiRes qw/gettimeofday/;
use Try::Tiny;
use Scalar::Util 'weaken';
use namespace::clean;
__PACKAGE__->mk_classdata('_filedata');
__PACKAGE__->mk_classdata('upgrade_directory');
__PACKAGE__->mk_classdata('backup_directory');
__PACKAGE__->mk_classdata('do_backup');
__PACKAGE__->mk_classdata('do_diff_on_init');
=head1 METHODS
lib/DBIx/Class/Schema/Versioned.pm view on Meta::CPAN
my $self = shift;
$self->next::method(@_);
$self->_on_connect();
return $self;
}
sub _on_connect
{
my ($self) = @_;
weaken (my $w_storage = $self->storage );
$self->{vschema} = DBIx::Class::Version->connect(
sub { $w_storage->dbh },
# proxy some flags from the main storage
{ map { $_ => $w_storage->$_ } qw( unsafe ) },
);
my $conn_attrs = $w_storage->_dbic_connect_attributes || {};
my $vtable = $self->{vschema}->resultset('Table');
lib/DBIx/Class/Storage.pm view on Meta::CPAN
use mro 'c3';
{
package # Hide from PAUSE
DBIx::Class::Storage::NESTED_ROLLBACK_EXCEPTION;
use base 'DBIx::Class::Exception';
}
use DBIx::Class::Carp;
use DBIx::Class::Storage::BlockRunner;
use Scalar::Util qw/blessed weaken/;
use DBIx::Class::Storage::TxnScopeGuard;
use Try::Tiny;
use namespace::clean;
__PACKAGE__->mk_group_accessors(simple => qw/debug schema transaction_depth auto_savepoint savepoints/);
__PACKAGE__->mk_group_accessors(component_class => 'cursor_class');
__PACKAGE__->cursor_class('DBIx::Class::Cursor');
sub cursor { shift->cursor_class(@_); }
lib/DBIx/Class/Storage.pm view on Meta::CPAN
=head2 set_schema
Used to reset the schema class or object which owns this
storage object, such as during L<DBIx::Class::Schema/clone>.
=cut
sub set_schema {
my ($self, $schema) = @_;
$self->schema($schema);
weaken $self->{schema} if ref $self->{schema};
}
=head2 connected
Returns true if we have an open storage connection, false
if it is not (yet) open.
=cut
sub connected { die "Virtual method!" }
lib/DBIx/Class/Storage/BlockRunner.pm view on Meta::CPAN
package # hide from pause until we figure it all out
DBIx::Class::Storage::BlockRunner;
use warnings;
use strict;
use DBIx::Class::Exception;
use DBIx::Class::Carp;
use Context::Preserve 'preserve_context';
use DBIx::Class::_Util qw(is_exception qsub);
use Scalar::Util qw(weaken blessed reftype);
use Try::Tiny;
use Moo;
use namespace::clean;
=head1 NAME
DBIx::Class::Storage::BlockRunner - Try running a block of code until success with a configurable retry logic
=head1 DESCRIPTION
lib/DBIx/Class/Storage/BlockRunner.pm view on Meta::CPAN
);
local $storage->{_in_do_block} = 1 unless $storage->{_in_do_block};
return $self->_run($cref, @_);
}
# this is the actual recursing worker
sub _run {
# internal method - we know that both refs are strong-held by the
# calling scope of run(), hence safe to weaken everything
weaken( my $self = shift );
weaken( my $cref = shift );
my $args = @_ ? \@_ : [];
# from this point on (defined $txn_init_depth) is an indicator for wrap_txn
# save a bit on method calls
my $txn_init_depth = $self->wrap_txn ? $self->storage->transaction_depth : undef;
my $txn_begin_ok;
my $run_err = '';
lib/DBIx/Class/Storage/DBI.pm view on Meta::CPAN
package DBIx::Class::Storage::DBI;
# -*- mode: cperl; cperl-indent-level: 2 -*-
use strict;
use warnings;
use base qw/DBIx::Class::Storage::DBIHacks DBIx::Class::Storage/;
use mro 'c3';
use DBIx::Class::Carp;
use Scalar::Util qw/refaddr weaken reftype blessed/;
use Context::Preserve 'preserve_context';
use Try::Tiny;
use SQL::Abstract::Util qw(is_plain_value is_literal_value);
use DBIx::Class::_Util qw(quote_sub perlstring serialize detected_reinvoked_destructor sigwarn_silencer);
use namespace::clean;
# default cursor class, overridable in connect_info attributes
__PACKAGE__->cursor_class('DBIx::Class::Storage::DBI::Cursor');
__PACKAGE__->mk_group_accessors('inherited' => qw/
lib/DBIx/Class/Storage/DBI.pm view on Meta::CPAN
{
my %seek_and_destroy;
sub _arm_global_destructor {
# quick "garbage collection" pass - prevents the registry
# from slowly growing with a bunch of undef-valued keys
defined $seek_and_destroy{$_} or delete $seek_and_destroy{$_}
for keys %seek_and_destroy;
weaken (
$seek_and_destroy{ refaddr($_[0]) } = $_[0]
);
}
END {
local $?; # just in case the DBI destructor changes it somehow
# destroy just the object if not native to this process
$_->_verify_pid for (grep
{ defined $_ }
lib/DBIx/Class/Storage/DBI.pm view on Meta::CPAN
local $DBI::connect_via = 'connect' if $INC{'Apache/DBI.pm'} && $ENV{MOD_PERL};
# this odd anonymous coderef dereference is in fact really
# necessary to avoid the unwanted effect described in perl5
# RT#75792
#
# in addition the coderef itself can't reside inside the try{} block below
# as it somehow triggers a leak under perl -d
my $dbh_error_handler_installer = sub {
weaken (my $weak_self = $_[0]);
# the coderef is blessed so we can distinguish it from externally
# supplied handles (which must be preserved)
$_[1]->{HandleError} = bless sub {
if ($weak_self) {
$weak_self->throw_exception("DBI Exception: $_[0]");
}
else {
# the handler may be invoked by something totally out of
# the scope of DBIC
lib/DBIx/Class/Storage/DBI/Cursor.pm view on Meta::CPAN
package DBIx::Class::Storage::DBI::Cursor;
use strict;
use warnings;
use base 'DBIx::Class::Cursor';
use Try::Tiny;
use Scalar::Util qw(refaddr weaken);
use DBIx::Class::_Util 'detected_reinvoked_destructor';
use namespace::clean;
__PACKAGE__->mk_group_accessors('simple' =>
qw/storage args attrs/
);
=head1 NAME
DBIx::Class::Storage::DBI::Cursor - Object representing a query cursor on a
lib/DBIx/Class/Storage/DBI/Cursor.pm view on Meta::CPAN
attrs => $attrs,
}, ref $class || $class;
if (DBIx::Class::_ENV_::HAS_ITHREADS) {
# quick "garbage collection" pass - prevents the registry
# from slowly growing with a bunch of undef-valued keys
defined $cursor_registry{$_} or delete $cursor_registry{$_}
for keys %cursor_registry;
weaken( $cursor_registry{ refaddr($self) } = $self )
}
return $self;
}
sub CLONE {
for (keys %cursor_registry) {
# once marked we no longer care about them, hence no
# need to keep in the registry, left alone renumber the
# keys (all addresses are now different)
lib/DBIx/Class/Storage/DBI/Sybase/ASE.pm view on Meta::CPAN
use strict;
use warnings;
use base qw/
DBIx::Class::Storage::DBI::Sybase
DBIx::Class::Storage::DBI::AutoCast
DBIx::Class::Storage::DBI::IdentityInsert
/;
use mro 'c3';
use DBIx::Class::Carp;
use Scalar::Util qw/blessed weaken/;
use Sub::Name();
use Data::Dumper::Concise 'Dumper';
use Try::Tiny;
use Context::Preserve 'preserve_context';
use DBIx::Class::_Util 'sigwarn_silencer';
use namespace::clean;
__PACKAGE__->sql_limit_dialect ('GenericSubQ');
__PACKAGE__->sql_quote_char ([qw/[ ]/]);
__PACKAGE__->datetime_parser_type(
lib/DBIx/Class/Storage/DBI/Sybase/ASE.pm view on Meta::CPAN
# create storage for insert/(update blob) transactions,
# unless this is that storage
return if $self->_parent_storage;
my $writer_storage = (ref $self)->new;
$writer_storage->_is_writer_storage(1); # just info
$writer_storage->connect_info($self->connect_info);
$writer_storage->auto_cast($self->auto_cast);
weaken ($writer_storage->{_parent_storage} = $self);
$self->_writer_storage($writer_storage);
# create a bulk storage unless connect_info is a coderef
return if ref($self->_dbi_connect_info->[0]) eq 'CODE';
my $bulk_storage = (ref $self)->new;
$bulk_storage->_is_bulk_storage(1); # for special ->disconnect acrobatics
$bulk_storage->connect_info($self->connect_info);
# this is why
$bulk_storage->_dbi_connect_info->[0] .= ';bulkLogin=1';
weaken ($bulk_storage->{_parent_storage} = $self);
$self->_bulk_storage($bulk_storage);
}
for my $method (@also_proxy_to_extra_storages) {
no strict 'refs';
no warnings 'redefine';
my $replaced = __PACKAGE__->can($method);
*{$method} = Sub::Name::subname $method => sub {
lib/DBIx/Class/Storage/TxnScopeGuard.pm view on Meta::CPAN
package DBIx::Class::Storage::TxnScopeGuard;
use strict;
use warnings;
use Try::Tiny;
use Scalar::Util qw(weaken blessed refaddr);
use DBIx::Class;
use DBIx::Class::_Util qw(is_exception detected_reinvoked_destructor);
use DBIx::Class::Carp;
use namespace::clean;
sub new {
my ($class, $storage) = @_;
my $guard = {
inactivated => 0,
lib/DBIx/Class/Storage/TxnScopeGuard.pm view on Meta::CPAN
};
# we are starting with an already set $@ - in order for things to work we need to
# be able to recognize it upon destruction - store its weakref
# recording it before doing the txn_begin stuff
#
# FIXME FRAGILE - any eval that fails but *does not* rethrow between here
# and the unwind will trample over $@ and invalidate the entire mechanism
# There got to be a saner way of doing this...
if (is_exception $@) {
weaken(
$guard->{existing_exception_ref} = (ref($@) eq '') ? \$@ : $@
);
}
$storage->txn_begin;
weaken( $guard->{dbh} = $storage->_dbh );
bless $guard, ref $class || $class;
$guard;
}
sub commit {
my $self = shift;
$self->{storage}->throw_exception("Refusing to execute multiple commits on scope guard $self")
lib/DBIx/Class/_Util.pm view on Meta::CPAN
}
}
# FIXME - this is not supposed to be here
# Carp::Skip to the rescue soon
use DBIx::Class::Carp '^DBIx::Class|^DBICTest';
use B ();
use Carp 'croak';
use Storable 'nfreeze';
use Scalar::Util qw(weaken blessed reftype refaddr);
use Sub::Quote qw(qsub quote_sub);
use base 'Exporter';
our @EXPORT_OK = qw(
sigwarn_silencer modver_gt_or_eq modver_gt_or_eq_and_lt
fail_on_internal_wantarray fail_on_internal_call
refdesc refcount hrefaddr
scope_guard is_exception detected_reinvoked_destructor emit_loud_diag
quote_sub qsub perlstring serialize
UNRESOLVABLE_CONDITION
lib/DBIx/Class/_Util.pm view on Meta::CPAN
for keys %$destruction_registry;
if (! length ref $_[0]) {
printf STDERR '%s() expects a blessed reference %s',
(caller(0))[3],
Carp::longmess,
;
return undef; # don't know wtf to do
}
elsif (! defined $destruction_registry->{ my $addr = refaddr($_[0]) } ) {
weaken( $destruction_registry->{$addr} = $_[0] );
return 0;
}
else {
carp_unique ( sprintf (
'Preventing *MULTIPLE* DESTROY() invocations on %s - an *EXTREMELY '
. 'DANGEROUS* condition which is *ALMOST CERTAINLY GLOBAL* within your '
. 'application, affecting *ALL* classes without active protection against '
. 'this. Diagnose and fix the root cause ASAP!!!%s',
refdesc $_[0],
( ( $INC{'Devel/StackTrace.pm'} and ! do { local $@; eval { Devel::StackTrace->VERSION(2) } } )
lib/DBIx/Class/_Util.pm view on Meta::CPAN
if (
$want and $fr->[0] =~ /^(?:DBIx::Class|DBICx::)/
) {
DBIx::Class::Exception->throw( sprintf (
"Improper use of %s instance in list context at %s line %d\n\n Stacktrace starts",
$argdesc, @{$fr}[1,2]
), 'with_stacktrace');
}
my $mark = [];
weaken ( $list_ctx_ok_stack_marker = $mark );
$mark;
}
}
sub fail_on_internal_call {
my ($fr, $argdesc);
{
package DB;
$fr = [ caller(1) ];
$argdesc = ref $DB::args[0]
t/51threadtxn.t view on Meta::CPAN
use strict;
use warnings;
use Test::More;
plan skip_all => 'DBIC does not actively support threads before perl 5.8.5'
if $] < '5.008005';
use DBIx::Class::Optional::Dependencies ();
use Scalar::Util 'weaken';
use lib qw(t/lib);
use DBICTest;
my ($dsn, $user, $pass) = @ENV{map { "DBICTEST_PG_${_}" } qw/DSN USER PASS/};
plan skip_all => 'Set $ENV{DBICTEST_PG_DSN}, _USER and _PASS to run this test'
. ' (note: creates and drops a table named artist!)' unless ($dsn && $user);
plan skip_all => 'Test needs ' . DBIx::Class::Optional::Dependencies->req_missing_for ('rdbms_pg')
unless DBIx::Class::Optional::Dependencies->req_ok_for ('rdbms_pg');
t/51threadtxn.t view on Meta::CPAN
$parent_rs = $schema->resultset('CD')->search({ year => 1901 });
is ($parent_rs->count, 2);
};
ok(!$@) or diag "Creation eval failed: $@";
my @children;
while(@children < $num_children) {
my $newthread = async {
my $tid = threads->tid;
weaken(my $weak_schema = $schema);
weaken(my $weak_parent_rs = $parent_rs);
$schema->txn_do(sub {
my $child_rs = $weak_schema->resultset('CD')->search({ year => 1901 });
my $row = $weak_parent_rs->next;
if($row && $row->get_column('artist') =~ /^(?:123|456)$/) {
$weak_schema->resultset('CD')->create({ title => "test success $tid", artist => $tid, year => scalar(@children) });
}
});
sleep(1); # tasty crashes without this
};
die "Thread creation failed: $! $@" if !defined $newthread;
t/52leaks.t view on Meta::CPAN
*CORE::GLOBAL::bless = sub { goto $bless_override };
}
use strict;
use warnings;
use Test::More;
use lib qw(t/lib);
use DBICTest::RunMode;
use DBICTest::Util::LeakTracer qw(populate_weakregistry assert_empty_weakregistry visit_refs);
use Scalar::Util qw(weaken blessed reftype);
use DBIx::Class::_Util qw(hrefaddr sigwarn_silencer modver_gt_or_eq modver_gt_or_eq_and_lt);
BEGIN {
plan skip_all => "Your perl version $] appears to leak like a sieve - skipping test"
if DBIx::Class::_ENV_::PEEPEENESS;
}
my $TB = Test::More->builder;
if ($ENV{DBICTEST_IN_PERSISTENT_ENV}) {
# without this explicit close TB warns in END after a ->reset
t/52leaks.t view on Meta::CPAN
if ( modver_gt_or_eq( 'Test::More', '1.200' ) ) {
open ($TB->$_, '>&', *STDERR)
for qw( failure_output todo_output );
open ($TB->output, '>&', *STDOUT);
}
# so done_testing can work on every persistent pass
$TB->reset;
}
# this is what holds all weakened refs to be checked for leakage
my $weak_registry = {};
# whether or to invoke IC::DT
my $has_dt;
# Skip the heavy-duty leak tracing when just doing an install
unless (DBICTest::RunMode->is_plain) {
# redefine the bless override so that we can catch each and every object created
no warnings qw/redefine once/;
t/52leaks.t view on Meta::CPAN
or push @circreffed, $r;
}
if (@circreffed) {
is (scalar @circreffed, 1, 'One resultset expected to leak');
# this is useless on its own, it is to showcase the circref-diag
# and eventually test it when it is operational
local $TODO = 'Needs Data::Entangled or somesuch - see RT#82942';
while (@circreffed) {
weaken (my $r = shift @circreffed);
populate_weakregistry( (my $mini_registry = {}), $r );
assert_empty_weakregistry( $mini_registry );
$r->result_source(undef);
}
}
}
assert_empty_weakregistry ($weak_registry);
t/71mysql.t view on Meta::CPAN
use strict;
use warnings;
use Test::More;
use Test::Exception;
use Test::Warn;
use DBI::Const::GetInfoType;
use Scalar::Util qw/weaken/;
use DBIx::Class::Optional::Dependencies ();
use lib qw(t/lib);
use DBICTest;
plan skip_all => 'Test needs ' . DBIx::Class::Optional::Dependencies->req_missing_for ('test_rdbms_mysql')
unless DBIx::Class::Optional::Dependencies->req_ok_for ('test_rdbms_mysql');
my ($dsn, $user, $pass) = @ENV{map { "DBICTEST_MYSQL_${_}" } qw/DSN USER PASS/};
t/71mysql.t view on Meta::CPAN
{
local $ENV{MOD_PERL} = 'boogiewoogie';
my $schema = DBICTest::Schema->connect($dsn, $user, $pass);
ok (! $schema->storage->_get_dbh->{mysql_auto_reconnect}, 'mysql_auto_reconnect unset regardless of ENV' );
# Make sure hardcore forking action still works even if mysql_auto_reconnect
# is true (test inspired by ether)
my $schema_autorecon = DBICTest::Schema->connect($dsn, $user, $pass, { mysql_auto_reconnect => 1 });
my $orig_dbh = $schema_autorecon->storage->_get_dbh;
weaken $orig_dbh;
ok ($orig_dbh, 'Got weak $dbh ref');
ok ($orig_dbh->{mysql_auto_reconnect}, 'mysql_auto_reconnect is properly set if explicitly requested' );
my $rs = $schema_autorecon->resultset('Artist');
my ($parent_in, $child_out);
pipe( $parent_in, $child_out ) or die "Pipe open failed: $!";
my $pid = fork();
if (! defined $pid ) {
t/74mssql.t view on Meta::CPAN
use strict;
use warnings;
use Test::More;
use Test::Exception;
use Scalar::Util 'weaken';
use DBIx::Class::Optional::Dependencies ();
use lib qw(t/lib);
use DBICTest;
my ($dsn, $user, $pass) = @ENV{map { "DBICTEST_MSSQL_${_}" } qw/DSN USER PASS/};
plan skip_all => 'Set $ENV{DBICTEST_MSSQL_DSN}, _USER and _PASS to run this test'
unless ($dsn);
t/74mssql.t view on Meta::CPAN
eval { $dbh->do("DROP TABLE money_test") };
$dbh->do(<<'SQL');
CREATE TABLE money_test (
id INT IDENTITY PRIMARY KEY,
amount MONEY NULL
)
SQL
});
my $rs = $schema->resultset('Money');
weaken(my $rs_cp = $rs); # nested closure refcounting is an utter mess in perl
my $row;
lives_ok {
$row = $rs->create({ amount => 100 });
} 'inserted a money value';
cmp_ok $rs->find($row->id)->amount, '==', 100, 'money value round-trip';
lives_ok {
$row->update({ amount => 200 });
t/74mssql.t view on Meta::CPAN
$rs->create({ amount => 1000 + $_ }) for (1..3);
my $artist_rs = $schema->resultset('Artist')->search({
name => { -like => 'Artist %' }
});;
$rs->next;
my $map = [ ['Artist 1', '1002.00'], ['Artist 2', '1003.00'] ];
weaken(my $a_rs_cp = $artist_rs);
local $TODO = 'Transaction handling with multiple active statements will '
.'need eager cursor support.'
unless $wrapper eq 'no_transaction';
lives_and {
my @results;
$wrappers->{$wrapper}->( sub {
while (my $money = $rs_cp->next) {
t/99dbic_sqlt_parser.t view on Meta::CPAN
package
)) {
warnings_exist {
push @schemas, create_schema({
args => { parser_args => { $parser_args_key => $s } }
});
} qr/\Qparser_args => {\E.+?is deprecated.+\Q@{[__FILE__]}/,
"deprecated crazy parser_arg '$parser_args_key' warned";
}
Scalar::Util::weaken ($s);
ok (!$s, 'Schema not leaked');
isa_ok ($_, 'SQL::Translator::Schema', "SQLT schema object $_ produced")
for @schemas;
}
# make sure classname-style works
lives_ok { isa_ok (create_schema ({ schema => 'DBICTest::Schema' }), 'SQL::Translator::Schema', 'SQLT schema object produced') };
t/lib/DBICTest/BaseSchema.pm view on Meta::CPAN
package #hide from pause
DBICTest::BaseSchema;
use strict;
use warnings;
use base qw(DBICTest::Base DBIx::Class::Schema);
use Fcntl qw(:DEFAULT :seek :flock);
use Scalar::Util 'weaken';
use Time::HiRes 'sleep';
use DBICTest::Util::LeakTracer qw(populate_weakregistry assert_empty_weakregistry);
use DBICTest::Util qw( local_umask await_flock dbg DEBUG_TEST_CONCURRENCY_LOCKS );
use namespace::clean;
sub capture_executed_sql_bind {
my ($self, $cref) = @_;
$self->throw_exception("Expecting a coderef to run") unless ref $cref eq 'CODE';
t/lib/DBICTest/BaseSchema.pm view on Meta::CPAN
lock_name => "$lockpath",
};
}
}
if ($INC{'Test/Builder.pm'}) {
populate_weakregistry ( $weak_registry, $self->storage );
my $cur_connect_call = $self->storage->on_connect_call;
# without this weaken() the sub added below *sometimes* leaks
# ( can't reproduce locally :/ )
weaken( my $wlocker = $locker );
$self->storage->on_connect_call([
(ref $cur_connect_call eq 'ARRAY'
? @$cur_connect_call
: ($cur_connect_call || ())
),
[ sub { populate_weakregistry( $weak_registry, $_[0]->_dbh ) } ],
( !$wlocker ? () : (
require Data::Dumper::Concise
and
t/lib/DBICTest/Util/LeakTracer.pm view on Meta::CPAN
package DBICTest::Util::LeakTracer;
use warnings;
use strict;
use Carp;
use Scalar::Util qw(isweak weaken blessed reftype);
use DBIx::Class::_Util qw(refcount hrefaddr refdesc);
use DBIx::Class::Optional::Dependencies;
use Data::Dumper::Concise;
use DBICTest::Util qw( stacktrace visit_namespaces );
use constant {
CV_TRACING => !DBICTest::RunMode->is_plain && DBIx::Class::Optional::Dependencies->req_ok_for ('test_leaks_heavy'),
SKIP_SCALAR_REFS => ( "$]" < 5.008004 ),
};
use base 'Exporter';
t/lib/DBICTest/Util/LeakTracer.pm view on Meta::CPAN
croak 'Expecting a registry hashref' unless ref $weak_registry eq 'HASH';
croak 'Target is not a reference' unless length ref $target;
my $refaddr = hrefaddr $target;
# a registry could be fed to itself or another registry via recursive sweeps
return $target if $reg_of_regs{$refaddr};
return $target if SKIP_SCALAR_REFS and reftype($target) eq 'SCALAR';
weaken( $reg_of_regs{ hrefaddr($weak_registry) } = $weak_registry )
unless( $reg_of_regs{ hrefaddr($weak_registry) } );
# an explicit "garbage collection" pass every time we store a ref
# if we do not do this the registry will keep growing appearing
# as if the traced program is continuously slowly leaking memory
for my $reg (values %reg_of_regs) {
(defined $reg->{$_}{weakref}) or delete $reg->{$_}
for keys %$reg;
}
if (! defined $weak_registry->{$refaddr}{weakref}) {
$weak_registry->{$refaddr} = {
stacktrace => stacktrace(1),
weakref => $target,
};
weaken( $weak_registry->{$refaddr}{weakref} );
$refs_traced++;
}
my $desc = refdesc $target;
$weak_registry->{$refaddr}{slot_names}{$desc} = 1;
if ($note) {
$note =~ s/\s*\Q$desc\E\s*//g;
$weak_registry->{$refaddr}{slot_names}{$note} = 1;
}
t/lib/DBICTest/Util/LeakTracer.pm view on Meta::CPAN
# Regenerate the slots names on a thread spawn
sub CLONE {
my @individual_regs = grep { scalar keys %{$_||{}} } values %reg_of_regs;
%reg_of_regs = ();
for my $reg (@individual_regs) {
my @live_slots = grep { defined $_->{weakref} } values %$reg
or next;
$reg = {}; # get a fresh hashref in the new thread ctx
weaken( $reg_of_regs{hrefaddr($reg)} = $reg );
for my $slot_info (@live_slots) {
my $new_addr = hrefaddr $slot_info->{weakref};
# replace all slot names
$slot_info->{slot_names} = { map {
my $name = $_;
$name =~ s/\(0x[0-9A-F]+\)/sprintf ('(%s)', $new_addr)/ieg;
($name => 1);
} keys %{$slot_info->{slot_names}} };
t/storage/savepoints.t view on Meta::CPAN
use strict;
use warnings;
use Test::More;
use Test::Exception;
use DBIx::Class::Optional::Dependencies;
use DBIx::Class::_Util qw(sigwarn_silencer scope_guard);
use Scalar::Util 'weaken';
use lib qw(t/lib);
use DBICTest;
{
package # moar hide
DBICTest::SVPTracerObj;
use base 'DBIx::Class::Storage::Statistics';
t/storage/savepoints.t view on Meta::CPAN
'commit from inner transaction');
is $ars->search({ name => 'in_inner_transaction_rolling_back' })->first,
undef,
'rollback from inner transaction';
# make sure a fresh txn will work after above
$schema->storage->txn_do(sub { ok "noop" } );
### Make sure non-existend savepoint release doesn't infloop itself
{
weaken( my $s = $schema );
throws_ok {
$s->storage->txn_do(sub { $s->svp_release('wibble') })
} qr/Savepoint 'wibble' does not exist/,
"Calling svp_release on a non-existant savepoint throws expected error"
;
}
### cleanupz
$schema->storage->dbh->do ("DROP TABLE artist");