Algorithm-LibLinear

 view release on metacpan or  search on metacpan

Changes  view on Meta::CPAN


0.24    Mon 4 Apr 2022
        - Bundle LIBLINEAR 2.44.

0.23    Wed 18 Nov 2020
        - Bundle LIBLINEAR 2.42, featuring automatic switching from dual CD
          solver to primal Newton solver when convergence is slow.

0.22    Fri 28 Aug 2020
        - Algorithm::LibLinear: Make find_parameters() / find_cost_parameter()
          throw an error when the given solver type is unsupported.
        - Fix documentation errors.
        - Fix a broken test case.

0.21    Sun 23 Aug 2020
        - Bundle LIBILNEAR 2.41.
        - Bump minimum required Perl version from 5.14 to 5.16 for using
          T_AVREF_REFCOUNT_FIXED typemap.
        - Fix memory leaks in several methods which return ArrayRefs.
        - Switch from Smart::Args to Smart::Args::TypeTiny for less
          dependencies.
        - Algorithm::LibLinear: Support new "ONECLASS_SVM" solver.

Changes  view on Meta::CPAN


0.06    Mon Jun 3 2013
        - Fix that predict() rounds result value even when the model is trained
          for regression.
        - predict_probability() now returns empty array when given model is not
          for logistic regression-based classification.

0.05    Sat Jun 1 2013
        - Fix incorrect behaviour of predict_values() due to my
          misunderstanding.
        - Relax type checking to not raise error on regression problem.

0.04    Sun May 19 2013
        - Fix incorrect sparse vector scaling.

0.03    Fri May 3 2013
        - Fix a test to pass on environment which is long double enabled.

0.02    Sat Mar 10 2013
        - Release for CPAN.

lib/Algorithm/LibLinear.pm  view on Meta::CPAN

        my $self => $InstanceOfPackage,
        my $data_set => InstanceOf['Algorithm::LibLinear::DataSet'],
        my $num_folds => Int;

    my $targets = $self->training_parameter->cross_validation(
        $data_set->as_problem(bias => $self->bias),
        $num_folds,
    );
    my @labels = map { $_->{label} } @{ $data_set->as_arrayref };
    if ($self->is_regression_solver) {
        my $total_square_error = sum map {
            ($targets->[$_] - $labels[$_]) ** 2;
        } (0 .. $data_set->size - 1);
        # Returns mean squared error.
        # TODO: Squared correlation coefficient (see train.c in LIBLINEAR.)
        return $total_square_error / $data_set->size;
    } else {
        my $num_corrects =
            grep { $targets->[$_] == $labels[$_] } (0 .. $data_set->size - 1);
        return $num_corrects / $data_set->size;
    }
}

sub epsilon { $_[0]->training_parameter->epsilon }

sub find_cost_parameter {

lib/Algorithm/LibLinear/Model.pm  view on Meta::CPAN

=head2 predict(feature => $hashref)

In case of classification, returns predicted class label.

In case of regression, returns value of estimated function given feature.

=head2 predict_probabilities(feature => $hashref)

Returns an ArrayRef of probabilities of the feature belonging to corresponding class.

This method will raise an error if the model is not a classifier based on logistic regression (i.e., C<< not $classifier->is_probability_model >>.)

=head2 predict_values(feature => $hashref)

Returns an ArrayRef of decision values of each class (higher is better).

=head2 save(filename => $path)

Writes the model out as a LIBLINEAR model file.

=cut

src/liblinear.xs  view on Meta::CPAN


void
ll_save(self, filename)
    struct model *self;
    const char *filename;
CODE:
    if (save_model(filename, self) != 0) {
        Perl_croak(
          aTHX_
          "Error occured during save process: %s",
          errno == 0 ? "unknown error" : strerror(errno)
        );
    }

void
ll_DESTROY(self)
    struct model *self;
CODE:
    free_and_destroy_model(&self);

MODULE = Algorithm::LibLinear  PACKAGE = Algorithm::LibLinear::TrainingParameter  PREFIX = ll_

src/liblinear/linear.cpp  view on Meta::CPAN

			if(current_rate > *best_score)
			{
				*best_C = param_tmp->C;
				*best_score = current_rate;
			}

			info("log2c=%7.2f\trate=%g\n",log(param_tmp->C)/log(2.0),100.0*current_rate);
		}
		else if(param_tmp->solver_type == L2R_L2LOSS_SVR)
		{
			double total_error = 0.0;
			for(i=0; i<prob->l; i++)
			{
				double y = prob->y[i];
				double v = target[i];
				total_error += (v-y)*(v-y);
			}
			double current_error = total_error/prob->l;
			if(current_error < *best_score)
			{
				*best_C = param_tmp->C;
				*best_score = current_error;
			}

			info("log2c=%7.2f\tp=%7.2f\tMean squared error=%g\n",log(param_tmp->C)/log(2.0),param_tmp->p,current_error);
		}

		num_unchanged_w++;
		if(num_unchanged_w == 5)
			break;
		param_tmp->C = param_tmp->C*ratio;
	}

	if(param_tmp->C > max_C)
		info("WARNING: maximum C reached.\n");

src/liblinear/linear.cpp  view on Meta::CPAN

	{
		int j;
		for(j=0; j<nr_w; j++)
			fprintf(fp, "%.17g ", model_->w[i*nr_w+j]);
		fprintf(fp, "\n");
	}

	setlocale(LC_ALL, old_locale);
	free(old_locale);

	if (ferror(fp) != 0 || fclose(fp) != 0) return -1;
	else return 0;
}

//
// FSCANF helps to handle fscanf failures.
// Its do-while block avoids the ambiguity when
// if (...)
//    FSCANF();
// is used
//

src/liblinear/linear.cpp  view on Meta::CPAN

	for(i=0; i<w_size; i++)
	{
		int j;
		for(j=0; j<nr_w; j++)
			FSCANF(fp, "%lf ", &model_->w[i*nr_w+j]);
	}

	setlocale(LC_ALL, old_locale);
	free(old_locale);

	if (ferror(fp) != 0 || fclose(fp) != 0) return NULL;

	return model_;
}

int get_nr_feature(const model *model_)
{
	return model_->nr_feature;
}

int get_nr_class(const model *model_)

t/Algorithm/LibLinear/FeatureScaling.t  view on Meta::CPAN

{
    my $data_set_with_additional_feature = Algorithm::LibLinear::DataSet->new(
        data_set => [ +{ feature => +{ 1 => 2.0, 6 => 1.0, }, label => 1, } ],
    );
    lives_and {
        my $scaled_data_set =
            $scale->scale(data_set => $data_set_with_additional_feature);
        my $scaled_feature = $scaled_data_set->as_arrayref->[0]{feature};
        is 0 + keys %$scaled_feature, 3,
            'New feature should be ommited since scaling factor is unknown.';
    } 'Scaling data set with unknown feature should not raise error.';
}

{
    my $data_set = Algorithm::LibLinear::DataSet->new(
        data_set => [
            +{ feature => +{ 1 => 1, 3 => 1 }, label => 1 },
            +{ feature => +{ 2 => 2 }, label => 1 },
            +{ feature => +{ 2 => 1 }, label => 1 },
        ]
    );

t/Algorithm/LibLinear/TrainingParameter.t  view on Meta::CPAN

            ],
        ],
    );
}

my $data_set = Algorithm::LibLinear::DataSet->new(data_set => [
    +{ feature => +{}, label => 1 },
]);

my @cases = (
    +{ constructor_params => [ cost => 0 ], error_pattern => qr/C <= 0/ },
    +{ constructor_params => [ epsilon => 0 ], error_pattern => qr/eps <= 0/ },
    +{
      constructor_params => [
        loss_sensitivity => -1,
        solver => 'L2R_L2LOSS_SVR',
      ],
      error_pattern => qr/p < 0/,
    },
);
for my $case (@cases) {
    my $learner = Algorithm::LibLinear->new(@{ $case->{constructor_params} });
    throws_ok {
        $learner->train(data_set => $data_set);
    } $case->{error_pattern};
    throws_ok {
        $learner->cross_validation(data_set => $data_set, num_folds => 5);
    } $case->{error_pattern};
}

done_testing;



( run in 0.716 second using v1.01-cache-2.11-cpan-65fba6d93b7 )