Algorithm-LibLinear
view release on metacpan or search on metacpan
src/liblinear.xs view on Meta::CPAN
TYPEMAP: <<'EOT'
TYPEMAP
AV * T_AVREF_REFCOUNT_FIXED
struct model * T_LIBLINEAR_MODEL
struct parameter * T_LIBLINEAR_TRAINING_PARAMETER
struct problem * T_LIBLINEAR_PROBLEM
INPUT
T_LIBLINEAR_MODEL
if (SvROK($arg) &&
sv_derived_from($arg, \"Algorithm::LibLinear::Model::Raw\")) {
IV tmp = SvIV((SV*)SvRV($arg));
$var = INT2PTR($type,tmp);
}
else {
Perl_croak(aTHX_ \"%s: %s is not of type %s\",
${$ALIAS?\q[GvNAME(CvGV(cv))]:\qq[\"$pname\"]},
\"$var\", \"$ntype\");
src/liblinear.xs view on Meta::CPAN
sv_derived_from($arg, \"Algorithm::LibLinear::Problem\")) {
IV tmp = SvIV((SV*)SvRV($arg));
$var = INT2PTR($type,tmp);
}
else {
Perl_croak(aTHX_ \"%s: %s is not of type %s\",
${$ALIAS?\q[GvNAME(CvGV(cv))]:\qq[\"$pname\"]},
\"$var\", \"$ntype\");
}
OUTPUT
T_LIBLINEAR_MODEL
sv_setref_pv($arg, \"Algorithm::LibLinear::Model::Raw\", (void*)$var);
T_LIBLINEAR_TRAINING_PARAMETER
sv_setref_pv(
$arg, \"Algorithm::LibLinear::TrainingParameter\", (void*)$var);
T_LIBLINEAR_PROBLEM
sv_setref_pv($arg, \"Algorithm::LibLinear::Problem\", (void*)$var);
EOT
src/liblinear.xs view on Meta::CPAN
PROTOTYPES: DISABLE
struct model *
ll_train(klass, problem_, parameter_)
struct problem *problem_;
struct parameter *parameter_;
CODE:
validate_parameter(aTHX_ problem_, parameter_);
RETVAL = train(problem_, parameter_);
OUTPUT:
RETVAL
struct model *
ll_load(klass, filename)
const char *filename;
CODE:
RETVAL = load_model(filename);
if (!RETVAL) {
Perl_croak(aTHX_ "Failed to load a model from file: %s.", filename);
}
OUTPUT:
RETVAL
double
ll_bias(self, label)
struct model *self;
int label;
CODE:
RETVAL = get_decfun_bias(self, label);
OUTPUT:
RETVAL
AV *
ll_class_labels(self)
struct model *self;
CODE:
RETVAL = newAV();
av_extend(RETVAL, self->nr_class - 1);
for (int i = 0; i < self->nr_class; ++i) {
av_push(RETVAL, newSViv(self->label[i]));
}
OUTPUT:
RETVAL
double
ll_coefficient(self, feature, label)
struct model *self;
int feature;
int label;
CODE:
RETVAL = get_decfun_coef(self, feature, label);
OUTPUT:
RETVAL
bool
ll_is_oneclass_model(self)
struct model* self;
CODE:
RETVAL = check_oneclass_model(self);
OUTPUT:
RETVAL
bool
ll_is_probability_model(self)
struct model *self;
CODE:
RETVAL = check_probability_model(self);
OUTPUT:
RETVAL
bool
ll_is_regression_model(self)
struct model *self;
CODE:
RETVAL = check_regression_model(self);
OUTPUT:
RETVAL
int
ll_num_classes(self)
struct model *self;
CODE:
RETVAL = get_nr_class(self);
OUTPUT:
RETVAL
int
ll_num_features(self)
struct model *self;
CODE:
RETVAL = get_nr_feature(self);
OUTPUT:
RETVAL
SV *
ll_predict(self, feature_hash)
struct model *self;
HV *feature_hash;
CODE:
struct feature_node *feature_vector = hv2feature(aTHX_ feature_hash);
double prediction = predict(self, feature_vector);
Safefree(feature_vector);
RETVAL = is_regression_solver(&self->param) ?
newSVnv(prediction) : newSViv((int)prediction);
OUTPUT:
RETVAL
AV *
ll_predict_probability(self, feature_hash)
struct model *self;
HV *feature_hash;
CODE:
RETVAL = newAV();
if (check_probability_model(self)) {
struct feature_node *feature_vector = hv2feature(aTHX_ feature_hash);
src/liblinear.xs view on Meta::CPAN
int num_classes = get_nr_class(self);
Newx(estimated_probabilities, num_classes, double);
predict_probability(self, feature_vector, estimated_probabilities);
av_extend(RETVAL, num_classes - 1);
for (int i = 0; i < num_classes; ++i) {
av_push(RETVAL, newSVnv(estimated_probabilities[i]));
}
Safefree(feature_vector);
Safefree(estimated_probabilities);
}
OUTPUT:
RETVAL
AV *
ll_predict_values(self, feature_hash)
struct model *self;
HV *feature_hash;
CODE:
struct feature_node *feature_vector = hv2feature(aTHX_ feature_hash);
int num_classes = get_nr_class(self);
int num_decision_values =
src/liblinear.xs view on Meta::CPAN
double *decision_values;
Newx(decision_values, num_decision_values, double);
predict_values(self, feature_vector, decision_values);
RETVAL = newAV();
av_extend(RETVAL, num_decision_values - 1);
for (int i = 0; i < num_decision_values; ++i) {
av_push(RETVAL, newSVnv(decision_values[i]));
}
Safefree(decision_values);
Safefree(feature_vector);
OUTPUT:
RETVAL
double
ll_rho(self)
struct model *self;
CODE:
RETVAL = get_decfun_rho(self);
OUTPUT:
RETVAL
void
ll_save(self, filename)
struct model *self;
const char *filename;
CODE:
if (save_model(filename, self) != 0) {
Perl_croak(
aTHX_
src/liblinear.xs view on Meta::CPAN
double *weights_ = RETVAL->weight;
for (int i = 0; i < num_weights; ++i) {
weight_labels_[i] = SvIV(*av_fetch(weight_labels, i, 0));
weights_[i] = SvNV(*av_fetch(weights, i, 0));
}
} XCPT_TRY_END
XCPT_CATCH {
free_parameter(aTHX_ RETVAL);
XCPT_RETHROW;
}
OUTPUT:
RETVAL
AV *
ll_cross_validation(self, problem_, num_folds)
struct parameter *self;
struct problem *problem_;
int num_folds;
CODE:
validate_parameter(aTHX_ problem_, self);
double *targets;
Newx(targets, problem_->l, double);
cross_validation(problem_, self, num_folds, targets);
RETVAL = newAV();
av_extend(RETVAL, problem_->l - 1);
for (int i = 0; i < problem_->l; ++i) {
av_push(RETVAL, newSVnv(targets[i]));
}
Safefree(targets);
OUTPUT:
RETVAL
AV *
ll_find_parameters(self, problem_, num_folds, initial_C, initial_p, update)
struct parameter *self;
struct problem *problem_;
int num_folds;
double initial_C;
double initial_p;
bool update;
src/liblinear.xs view on Meta::CPAN
if (is_regression_model) {
self->p = best_p;
}
}
RETVAL = newAV();
av_push(RETVAL, newSVnv(best_C));
av_push(
RETVAL,
is_regression_model ? newSVnv(best_p) : newSVsv(&PL_sv_undef));
av_push(RETVAL, newSVnv(accuracy));
OUTPUT:
RETVAL
bool
ll_is_regression_solver(self)
struct parameter *self;
CODE:
RETVAL = is_regression_solver(self);
OUTPUT:
RETVAL
double
ll_cost(self)
struct parameter *self;
CODE:
RETVAL = self->C;
OUTPUT:
RETVAL
double
ll_epsilon(self)
struct parameter *self;
CODE:
RETVAL = self->eps;
OUTPUT:
RETVAL
double
ll_loss_sensitivity(self)
struct parameter *self;
CODE:
RETVAL = self->p;
OUTPUT:
RETVAL
int
ll_solver_type(self)
struct parameter *self;
CODE:
RETVAL = self->solver_type;
OUTPUT:
RETVAL
AV *
ll_weights(self)
struct parameter *self;
CODE:
RETVAL = newAV();
av_extend(RETVAL, self->nr_weight - 1);
for (int i = 0; i < self->nr_weight; ++i) {
av_push(RETVAL, newSVnv(self->weight[i]));
}
OUTPUT:
RETVAL
AV *
ll_weight_labels(self)
struct parameter *self;
CODE:
RETVAL = newAV();
av_extend(RETVAL, self->nr_weight - 1);
for (int i = 0; i < self->nr_weight; ++i) {
av_push(RETVAL, newSViv(self->weight_label[i]));
}
OUTPUT:
RETVAL
void
ll_DESTROY(self)
struct parameter *self;
CODE:
free_parameter(aTHX_ self);
MODULE = Algorithm::LibLinear PACKAGE = Algorithm::LibLinear::Problem PREFIX = ll_
src/liblinear.xs view on Meta::CPAN
features_[i] =
hv2feature(aTHX_ feature_hash, max_feature_index, bias);
}
RETVAL->bias = bias;
RETVAL->n = max_feature_index;
} XCPT_TRY_END
XCPT_CATCH {
free_problem(aTHX_ RETVAL);
XCPT_RETHROW;
}
OUTPUT:
RETVAL
double
ll_bias(self)
struct problem *self;
CODE:
RETVAL = self->bias;
OUTPUT:
RETVAL
int
ll_data_set_size(self)
struct problem *self;
CODE:
RETVAL = self->l;
OUTPUT:
RETVAL
int
ll_num_features(self)
struct problem *self;
CODE:
RETVAL = self->n;
OUTPUT:
RETVAL
void
ll_DESTROY(self)
struct problem *self;
CODE:
free_problem(aTHX_ self);
( run in 0.342 second using v1.01-cache-2.11-cpan-4e96b696675 )