Algorithm-LibLinear
view release on metacpan or search on metacpan
src/liblinear.xs view on Meta::CPAN
struct model *self;
HV *feature_hash;
CODE:
RETVAL = newAV();
if (check_probability_model(self)) {
struct feature_node *feature_vector = hv2feature(aTHX_ feature_hash);
double *estimated_probabilities;
int num_classes = get_nr_class(self);
Newx(estimated_probabilities, num_classes, double);
predict_probability(self, feature_vector, estimated_probabilities);
av_extend(RETVAL, num_classes - 1);
for (int i = 0; i < num_classes; ++i) {
av_push(RETVAL, newSVnv(estimated_probabilities[i]));
}
Safefree(feature_vector);
Safefree(estimated_probabilities);
}
OUTPUT:
RETVAL
AV *
ll_predict_values(self, feature_hash)
struct model *self;
HV *feature_hash;
CODE:
struct feature_node *feature_vector = hv2feature(aTHX_ feature_hash);
int num_classes = get_nr_class(self);
int num_decision_values =
(num_classes == 2 && self->param.solver_type != MCSVM_CS) ?
1 : num_classes;
double *decision_values;
Newx(decision_values, num_decision_values, double);
predict_values(self, feature_vector, decision_values);
RETVAL = newAV();
av_extend(RETVAL, num_decision_values - 1);
for (int i = 0; i < num_decision_values; ++i) {
av_push(RETVAL, newSVnv(decision_values[i]));
}
Safefree(decision_values);
Safefree(feature_vector);
OUTPUT:
RETVAL
double
ll_rho(self)
struct model *self;
CODE:
RETVAL = get_decfun_rho(self);
OUTPUT:
RETVAL
void
ll_save(self, filename)
struct model *self;
const char *filename;
CODE:
if (save_model(filename, self) != 0) {
Perl_croak(
aTHX_
"Error occured during save process: %s",
errno == 0 ? "unknown error" : strerror(errno)
);
}
void
ll_DESTROY(self)
struct model *self;
CODE:
free_and_destroy_model(&self);
MODULE = Algorithm::LibLinear PACKAGE = Algorithm::LibLinear::TrainingParameter PREFIX = ll_
PROTOTYPES: DISABLE
struct parameter *
ll_new(klass, solver_type, epsilon, cost, weight_labels, weights, loss_sensitivity, nu, regularize_bias)
int solver_type;
double epsilon;
double cost;
AV *weight_labels;
AV *weights;
double loss_sensitivity;
double nu;
bool regularize_bias;
CODE:
int num_weights = av_len(weight_labels) + 1;
if (av_len(weights) + 1 != num_weights) {
Perl_croak(
aTHX_
"The number of weight labels is not equal to the number of"
" weights.");
}
// |init_sol| is initialized within |alloc_parameter|.
RETVAL = alloc_parameter(aTHX_ num_weights);
RETVAL->solver_type = solver_type;
RETVAL->eps = epsilon;
RETVAL->C = cost;
RETVAL->p = loss_sensitivity;
RETVAL->nu = nu;
RETVAL->regularize_bias = regularize_bias ? 1 : 0;
dXCPT;
XCPT_TRY_START {
int *weight_labels_ = RETVAL->weight_label;
double *weights_ = RETVAL->weight;
for (int i = 0; i < num_weights; ++i) {
weight_labels_[i] = SvIV(*av_fetch(weight_labels, i, 0));
weights_[i] = SvNV(*av_fetch(weights, i, 0));
}
} XCPT_TRY_END
XCPT_CATCH {
free_parameter(aTHX_ RETVAL);
XCPT_RETHROW;
}
OUTPUT:
RETVAL
AV *
ll_cross_validation(self, problem_, num_folds)
struct parameter *self;
struct problem *problem_;
int num_folds;
( run in 0.429 second using v1.01-cache-2.11-cpan-796a6f069b2 )