Algorithm-LibLinear
view release on metacpan or search on metacpan
lib/Algorithm/LibLinear.pm view on Meta::CPAN
L2R_LR_DUAL => 7,
L2R_L2LOSS_SVR => 11,
L2R_L2LOSS_SVR_DUAL => 12,
L2R_L1LOSS_SVR_DUAL => 13,
ONECLASS_SVM => 21,
);
sub new {
args
my $class => ClassName,
my $bias => +{ isa => Num, default => -1.0, },
my $cost => +{ isa => Num, default => 1, },
my $epsilon => +{ isa => Num, optional => 1, },
my $loss_sensitivity => +{ isa => Num, default => 0.1, },
my $nu => +{ isa => Num, default => 0.5, },
my $regularize_bias => +{ isa => Bool, default => 1, },
my $solver => +{
isa => $SolverDescriptor,
default => 'L2R_L2LOSS_SVC_DUAL',
},
my $weights => +{ isa => ArrayRef[$ClassWeight], default => [], };
$epsilon //= $default_eps{$solver};
my (@weight_labels, @weights);
for my $weight (@$weights) {
push @weight_labels, $weight->{label};
push @weights, $weight->{weight};
}
my $training_parameter = Algorithm::LibLinear::TrainingParameter->new(
$solvers{$solver},
$epsilon,
$cost,
\@weight_labels,
\@weights,
$loss_sensitivity,
$nu,
$regularize_bias,
);
bless +{
bias => $bias,
training_parameter => $training_parameter,
} => $class;
}
sub bias { $_[0]->{bias} }
sub cost { $_[0]->training_parameter->cost }
sub cross_validation {
args
my $self => $InstanceOfPackage,
my $data_set => InstanceOf['Algorithm::LibLinear::DataSet'],
my $num_folds => Int;
my $targets = $self->training_parameter->cross_validation(
$data_set->as_problem(bias => $self->bias),
$num_folds,
);
my @labels = map { $_->{label} } @{ $data_set->as_arrayref };
if ($self->is_regression_solver) {
my $total_square_error = sum map {
($targets->[$_] - $labels[$_]) ** 2;
} (0 .. $data_set->size - 1);
# Returns mean squared error.
# TODO: Squared correlation coefficient (see train.c in LIBLINEAR.)
return $total_square_error / $data_set->size;
} else {
my $num_corrects =
grep { $targets->[$_] == $labels[$_] } (0 .. $data_set->size - 1);
return $num_corrects / $data_set->size;
}
}
sub epsilon { $_[0]->training_parameter->epsilon }
sub find_cost_parameter {
args
my $self => $InstanceOfPackage,
my $data_set => InstanceOf['Algorithm::LibLinear::DataSet'],
my $initial => +{ isa => Num, default => -1.0 },
my $num_folds => Int,
my $update => +{ isa => Bool, default => 0, };
my ($cost, undef, $accuracy) = @{
$self->find_parameters(
data_set => $data_set,
initial_cost => $initial,
initial_loss_sensitivity => -1.0,
num_folds => $num_folds,
update => $update,
)
};
return [ $cost, $accuracy ];
}
sub find_parameters {
args
my $self => $InstanceOfPackage,
my $data_set => InstanceOf['Algorithm::LibLinear::DataSet'],
my $initial_cost => +{ isa => Num, default => -1.0, },
my $initial_loss_sensitivity => +{ isa => Num, default => -1.0, },
my $num_folds => Int,
my $update => +{ isa => Bool, default => 0, };
my $solver_type = $self->training_parameter->solver_type;
if (none { $solver_type == $_ } @solvers{
qw/L2R_LR L2R_L2LOSS_SVC L2R_L2LOSS_SVR/,
}) {
my %solver_types = reverse %solvers;
my $solver = $solver_types{$solver_type};
Carp::croak('Operation is unsupported with this solver: ', $solver);
}
$self->training_parameter->find_parameters(
$data_set->as_problem(bias => $self->bias),
$num_folds,
$initial_cost,
$initial_loss_sensitivity,
$update,
);
}
sub is_regression_solver { $_[0]->training_parameter->is_regression_solver }
sub loss_sensitivity { $_[0]->training_parameter->loss_sensitivity }
sub training_parameter { $_[0]->{training_parameter} }
( run in 0.649 second using v1.01-cache-2.11-cpan-796a6f069b2 )