view release on metacpan or search on metacpan
# This file was automatically generated by Dist::Zilla::Plugin::ModuleBuild v6.008.
use strict;
use warnings;
use Module::Build 0.28;
my %module_build_args = (
"build_requires" => {
"Module::Build" => "0.28"
},
"configure_requires" => {
"Module::Build" => "0.28"
},
"dist_abstract" => "Perl wrapper for XGBoost library L<https://github.com/dmlc/xgboost>",
"dist_author" => [
"Pablo Rodr\x{ed}guez Gonz\x{e1}lez <pablo.rodriguez.gonzalez\@gmail.com>"
],
"dist_name" => "AI-XGBoost",
"dist_version" => "0.11",
"license" => "apache",
"module_name" => "AI::XGBoost",
"recursive_test_files" => 1,
"requires" => {
"Alien::XGBoost" => 0,
"Carp" => 0,
"Exception::Class" => 0,
"Exporter::Easy" => 0,
"FFI::Platypus" => 0,
"Moose" => 0,
"NativeCall" => 0,
"namespace::autoclean" => 0,
"perl" => "5.010",
"strict" => 0,
"utf8" => 0,
"warnings" => 0
},
"test_requires" => {
"Test::More" => 0,
"Test::Most" => 0
}
);
my %fallback_build_requires = (
"Module::Build" => "0.28",
"Test::More" => 0,
"Test::Most" => 0
);
unless ( eval { Module::Build->VERSION(0.4004) } ) {
delete $module_build_args{test_requires};
$module_build_args{build_requires} = \%fallback_build_requires;
}
my $build = Module::Build->new(%module_build_args);
$build->create_build_script;
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"license" : [
"apache_2_0"
],
"meta-spec" : {
"url" : "http://search.cpan.org/perldoc?CPAN::Meta::Spec",
"version" : 2
},
"name" : "AI-XGBoost",
"prereqs" : {
"build" : {
"requires" : {
"Module::Build" : "0.28"
}
},
"configure" : {
"requires" : {
"Module::Build" : "0.28"
}
},
"develop" : {
"requires" : {
"Pod::Coverage::TrustPod" : "0",
"Pod::Weaver" : "0",
"Pod::Weaver::Section::Contributors" : "0",
"Test::Pod" : "1.41",
"Test::Pod::Coverage" : "1.08",
"Test::Synopsis" : "0"
}
},
"runtime" : {
"requires" : {
"Alien::XGBoost" : "0",
"Carp" : "0",
"Exception::Class" : "0",
"Exporter::Easy" : "0",
"FFI::Platypus" : "0",
"Moose" : "0",
"NativeCall" : "0",
"namespace::autoclean" : "0",
"perl" : "5.010",
"strict" : "0",
"utf8" : "0",
"warnings" : "0"
}
},
"test" : {
"requires" : {
"Test::More" : "0",
"Test::Most" : "0"
}
}
},
"release_status" : "stable",
"resources" : {
"bugtracker" : {
"web" : "https://github.com/pablrod/p5-AI-XGBoost/issues"
},
"homepage" : "https://github.com/pablrod/p5-AI-XGBoost",
"repository" : {
"type" : "git",
"url" : "https://github.com/pablrod/p5-AI-XGBoost.git",
"web" : "https://github.com/pablrod/p5-AI-XGBoost"
}
},
---
abstract: 'Perl wrapper for XGBoost library L<https://github.com/dmlc/xgboost>'
author:
- 'Pablo RodrÃguez González <pablo.rodriguez.gonzalez@gmail.com>'
build_requires:
Module::Build: '0.28'
Test::More: '0'
Test::Most: '0'
configure_requires:
Module::Build: '0.28'
dynamic_config: 0
generated_by: 'Dist::Zilla version 6.008, CPAN::Meta::Converter version 2.150005'
license: apache
meta-spec:
url: http://module-build.sourceforge.net/META-spec-v1.4.html
version: '1.4'
name: AI-XGBoost
requires:
Alien::XGBoost: '0'
Carp: '0'
Exception::Class: '0'
Exporter::Easy: '0'
FFI::Platypus: '0'
Moose: '0'
NativeCall: '0'
namespace::autoclean: '0'
perl: '5.010'
strict: '0'
utf8: '0'
warnings: '0'
resources:
bugtracker: https://github.com/pablrod/p5-AI-XGBoost/issues
homepage: https://github.com/pablrod/p5-AI-XGBoost
repository: https://github.com/pablrod/p5-AI-XGBoost.git
version: '0.11'
x_contributors:
- 'Ruben <me@ruben.tech>'
x_serialization_backend: 'YAML::Tiny version 1.69'
[PodWeaver]
[PodSyntaxTests]
[PodCoverageTests]
[AutoPrereqs]
[PerlTidy]
perltidyrc = .perltidyrc
[Test::Perl::Critic]
[Prereqs]
NativeCall = 0
[Prereqs / TestRequires]
Test::Most = 0 ; for done_testing
[Prereqs / DevelopRequires]
Pod::Weaver = 0
Pod::Weaver::Section::Contributors = 0
examples/basic.pl view on Meta::CPAN
use aliased 'AI::XGBoost::DMatrix';
use AI::XGBoost qw(train);
# We are going to solve a binary classification problem:
# Mushroom poisonous or not
my $train_data = DMatrix->From(file => 'agaricus.txt.train');
my $test_data = DMatrix->From(file => 'agaricus.txt.test');
# With XGBoost we can solve this problem using 'gbtree' booster
# and as loss function a logistic regression 'binary:logistic'
# (Gradient Boosting Regression Tree)
# XGBoost Tree Booster has a lot of parameters that we can tune
# (https://github.com/dmlc/xgboost/blob/master/doc/parameter.md)
my $booster = train(data => $train_data, number_of_rounds => 10, params => {
objective => 'binary:logistic',
eta => 1.0,
max_depth => 2,
silent => 1
});
# For binay classification predictions are probability confidence scores in [0, 1]
# indicating that the label is positive (1 in the first column of agaricus.txt.test)
my $predictions = $booster->predict(data => $test_data);
say join "\n", @$predictions[0 .. 10];
examples/capi_raw.pl view on Meta::CPAN
my $booster = 0;
AI::XGBoost::CAPI::RAW::XGBoosterCreate( [$dtrain] , 1, \$booster);
for my $iter (0 .. 10) {
AI::XGBoost::CAPI::RAW::XGBoosterUpdateOneIter($booster, $iter, $dtrain);
}
my $out_len = 0;
my $out_result = 0;
AI::XGBoost::CAPI::RAW::XGBoosterPredict($booster, $dtest, 0, 0, \$out_len, \$out_result);
my $ffi = FFI::Platypus->new();
my $predictions = $ffi->cast(opaque => "float[$out_len]", $out_result);
#say join "\n", @$predictions;
AI::XGBoost::CAPI::RAW::XGBoosterFree($booster);
AI::XGBoost::CAPI::RAW::XGDMatrixFree($dtrain);
AI::XGBoost::CAPI::RAW::XGDMatrixFree($dtest);
examples/iris.pl view on Meta::CPAN
use aliased 'AI::XGBoost::DMatrix';
use AI::XGBoost qw(train);
use Data::Dataset::Classic::Iris;
# We are going to solve a multiple classification problem:
# determining plant species using a set of flower's measures
# XGBoost uses number for "class" so we are going to codify classes
my %class = (
setosa => 0,
versicolor => 1,
virginica => 2
);
my $iris = Data::Dataset::Classic::Iris::get();
# Split train and test, label and features
my $train_dataset = [map {$iris->{$_}} grep {$_ ne 'species'} keys %$iris];
my $test_dataset = [map {$iris->{$_}} grep {$_ ne 'species'} keys %$iris];
sub transpose {
# Transposing without using PDL, Data::Table, Data::Frame or other modules
# to keep minimal dependencies
my $array = shift;
my @aux = ();
for my $row (@$array) {
for my $column (0 .. scalar @$row - 1) {
lib/AI/XGBoost.pm view on Meta::CPAN
use aliased 'AI::XGBoost::DMatrix';
use AI::XGBoost qw(train);
# We are going to solve a binary classification problem:
# Mushroom poisonous or not
my $train_data = DMatrix->From(file => 'agaricus.txt.train');
my $test_data = DMatrix->From(file => 'agaricus.txt.test');
# With XGBoost we can solve this problem using 'gbtree' booster
# and as loss function a logistic regression 'binary:logistic'
# (Gradient Boosting Regression Tree)
# XGBoost Tree Booster has a lot of parameters that we can tune
# (https://github.com/dmlc/xgboost/blob/master/doc/parameter.md)
my $booster = train(data => $train_data, number_of_rounds => 10, params => {
objective => 'binary:logistic',
eta => 1.0,
max_depth => 2,
silent => 1
});
# For binay classification predictions are probability confidence scores in [0, 1]
# indicating that the label is positive (1 in the first column of agaricus.txt.test)
my $predictions = $booster->predict(data => $test_data);
say join "\n", @$predictions[0 .. 10];
use aliased 'AI::XGBoost::DMatrix';
use AI::XGBoost qw(train);
use Data::Dataset::Classic::Iris;
# We are going to solve a multiple classification problem:
# determining plant species using a set of flower's measures
# XGBoost uses number for "class" so we are going to codify classes
my %class = (
setosa => 0,
versicolor => 1,
virginica => 2
);
my $iris = Data::Dataset::Classic::Iris::get();
# Split train and test, label and features
my $train_dataset = [map {$iris->{$_}} grep {$_ ne 'species'} keys %$iris];
my $test_dataset = [map {$iris->{$_}} grep {$_ ne 'species'} keys %$iris];
sub transpose {
# Transposing without using PDL, Data::Table, Data::Frame or other modules
# to keep minimal dependencies
my $array = shift;
my @aux = ();
for my $row (@$array) {
for my $column (0 .. scalar @$row - 1) {
lib/AI/XGBoost.pm view on Meta::CPAN
my $predictions = $booster->predict(data => $test_data);
=head1 DESCRIPTION
Perl wrapper for XGBoost library.
The easiest way to use the wrapper is using C<train>, but beforehand
you need the data to be used contained in a C<DMatrix> object
This is a work in progress, feedback, comments, issues, suggestion and
pull requests are welcome!!
XGBoost library is used via L<Alien::XGBoost>. That means downloading,
compiling and installing if it's not available in your system.
=head1 FUNCTIONS
=head2 train
Performs gradient boosting using the data and parameters passed
lib/AI/XGBoost.pm view on Meta::CPAN
=head2 VERSIONS
=over 4
=item 0.2
Full C API "easy" to use, with PDL support as L<AI::XGBoost::CAPI>
Easy means clients don't have to use L<FFI::Platypus> or modules dealing
with C structures
=item 0.25
Alien package for libxgboost.so/xgboost.dll
=item 0.3
Object oriented API Moose based with DMatrix and Booster classes
=item 0.4
Complete object oriented API
=item 0.5
Use perl signatures (L<https://metacpan.org/pod/distribution/perl/pod/perlexperiment.pod#Subroutine-signatures>)
=back
=head1 SEE ALSO
=over 4
=item L<AI::MXNet>
=item L<FFI::Platypus>
lib/AI/XGBoost/Booster.pm view on Meta::CPAN
my %args = @_;
my ( $dtrain, $grad, $hess ) = @args{qw(dtrain grad hess)};
XGBoosterBoostOneIter( $self->_handle, $dtrain, $grad, $hess );
return $self;
}
sub predict {
my $self = shift;
my %args = @_;
my $data = $args{'data'};
my $result = XGBoosterPredict( $self->_handle, $data->handle );
my $result_size = scalar @$result;
my $matrix_rows = $data->num_row;
if ( $result_size != $matrix_rows && $result_size % $matrix_rows == 0 ) {
my $col_size = $result_size / $matrix_rows;
return [ map { [ @$result[ $_ * $col_size .. $_ * $col_size + $col_size - 1 ] ] } 0 .. $matrix_rows - 1 ];
}
return $result;
}
sub set_param {
my $self = shift;
my ( $name, $value ) = @_;
XGBoosterSetParam( $self->_handle, $name, $value );
return $self;
}
sub set_attr {
lib/AI/XGBoost/Booster.pm view on Meta::CPAN
use aliased 'AI::XGBoost::DMatrix';
use AI::XGBoost qw(train);
# We are going to solve a binary classification problem:
# Mushroom poisonous or not
my $train_data = DMatrix->From(file => 'agaricus.txt.train');
my $test_data = DMatrix->From(file => 'agaricus.txt.test');
# With XGBoost we can solve this problem using 'gbtree' booster
# and as loss function a logistic regression 'binary:logistic'
# (Gradient Boosting Regression Tree)
# XGBoost Tree Booster has a lot of parameters that we can tune
# (https://github.com/dmlc/xgboost/blob/master/doc/parameter.md)
my $booster = train(data => $train_data, number_of_rounds => 10, params => {
objective => 'binary:logistic',
eta => 1.0,
max_depth => 2,
silent => 1
});
# For binay classification predictions are probability confidence scores in [0, 1]
# indicating that the label is positive (1 in the first column of agaricus.txt.test)
my $predictions = $booster->predict(data => $test_data);
say join "\n", @$predictions[0 .. 10];
=head1 DESCRIPTION
Booster objects control training, prediction and evaluation
Work In Progress, the API may change. Comments and suggestions are welcome!
=head1 METHODS
=head2 update
Update one iteration
=head3 Parameters
=over 4
lib/AI/XGBoost/CAPI.pm view on Meta::CPAN
XGBoosterSetParam
XGBoosterSetAttr
XGBoosterGetAttr
XGBoosterGetAttrNames
XGBoosterUpdateOneIter
XGBoosterBoostOneIter
XGBoosterPredict
XGBoosterFree
XGBoosterDumpModel
XGBoosterDumpModelEx
XGBoosterDumpModelWithFeatures
XGBoosterDumpModelExWithFeatures
)
]
]
);
use AI::XGBoost::CAPI::RAW;
use FFI::Platypus;
use Exception::Class ( 'XGBoostException' );
our $VERSION = '0.11'; # VERSION
lib/AI/XGBoost/CAPI.pm view on Meta::CPAN
}
sub XGDMatrixSetFloatInfo {
my ( $matrix, $info, $data ) = @_;
_CheckCall( AI::XGBoost::CAPI::RAW::XGDMatrixSetFloatInfo( $matrix, $info, $data, scalar @$data ) );
}
sub XGDMatrixGetFloatInfo {
my ( $matrix, $info ) = @_;
my $out_len = 0;
my $out_result = 0;
_CheckCall( AI::XGBoost::CAPI::RAW::XGDMatrixGetFloatInfo( $matrix, $info, \$out_len, \$out_result ) );
my $ffi = FFI::Platypus->new();
return $ffi->cast( opaque => "float[$out_len]", $out_result );
}
sub XGDMatrixSetUintInfo {
my ( $matrix, $info, $data ) = @_;
_CheckCall( AI::XGBoost::CAPI::RAW::XGDMatrixSetUintInfo( $matrix, $info, $data, scalar @$data ) );
}
sub XGDMatrixGetUintInfo {
my ( $matrix, $info ) = @_;
my $out_len = 0;
my $out_result = 0;
_CheckCall( AI::XGBoost::CAPI::RAW::XGDMatrixGetUintInfo( $matrix, $info, \$out_len, \$out_result ) );
my $ffi = FFI::Platypus->new();
return $ffi->cast( opaque => "uint32[$out_len]", $out_result );
}
sub XGDMatrixSaveBinary {
my ( $matrix, $filename, $silent ) = @_;
$silent //= 1;
_CheckCall( AI::XGBoost::CAPI::RAW::XGDMatrixSaveBinary( $matrix, $filename, $silent ) );
}
sub XGDMatrixSliceDMatrix {
my ( $matrix, $list_of_indices ) = @_;
lib/AI/XGBoost/CAPI.pm view on Meta::CPAN
if ($success) {
my $ffi = FFI::Platypus->new();
return $ffi->cast( opaque => "string", $value );
}
return ();
}
sub XGBoosterGetAttrNames {
my ($booster) = @_;
my $out_len = 0;
my $out_result = 0;
_CheckCall( AI::XGBoost::CAPI::RAW::XGBoosterGetAttrNames( $booster, \$out_len, \$out_result ) );
my $ffi = FFI::Platypus->new();
$out_result = $ffi->cast( opaque => "opaque[$out_len]", $out_result );
return [ map { $ffi->cast( opaque => "string", $_ ) } @$out_result ];
}
sub XGBoosterUpdateOneIter {
my ( $booster, $iter, $train_matrix ) = @_;
_CheckCall( AI::XGBoost::CAPI::RAW::XGBoosterUpdateOneIter( $booster, $iter, $train_matrix ) );
return ();
}
sub XGBoosterBoostOneIter {
my ( $booster, $train_matrix, $gradient, $hessian ) = @_;
my $out_result = 0;
_CheckCall(
AI::XGBoost::CAPI::RAW::XGBoosterBoostOneIter(
$booster, $train_matrix, $gradient, $hessian, scalar(@$gradient)
)
);
return ();
}
sub XGBoosterEvalOneIter {
my ( $booster, $iter, $matrices, $matrices_names ) = @_;
my $out_result = 0;
my $number_of_matrices = scalar @$matrices;
my $ffi = FFI::Platypus->new();
my $array_of_opaque_matrices_names = [ map { $ffi->cast( string => "opaque", $_ ) } @$matrices_names ];
_CheckCall(
AI::XGBoost::CAPI::RAW::XGBoosterEvalOneIter(
$booster, $iter, $matrices, $array_of_opaque_matrices_names,
$number_of_matrices, \$out_result
)
);
$out_result = $ffi->cast( opaque => "opaque[$number_of_matrices]", $out_result );
return [ map { $ffi->cast( opaque => "string", $_ ) } @$out_result ];
}
sub XGBoosterPredict {
my ( $booster, $data_matrix, $option_mask, $ntree_limit ) = @_;
$option_mask //= 0;
$ntree_limit //= 0;
my $out_len = 0;
my $out_result = 0;
_CheckCall(
AI::XGBoost::CAPI::RAW::XGBoosterPredict( $booster, $data_matrix, $option_mask,
$ntree_limit, \$out_len, \$out_result
)
);
my $ffi = FFI::Platypus->new();
return $ffi->cast( opaque => "float[$out_len]", $out_result );
}
sub XGBoosterDumpModel {
my ( $booster, $feature_map, $with_stats ) = @_;
$feature_map //= "";
$with_stats //= 1;
my $out_len = 0;
my $out_result = 0;
_CheckCall(
AI::XGBoost::CAPI::RAW::XGBoosterDumpModel( $booster, $feature_map, $with_stats, \$out_len, \$out_result ) );
my $ffi = FFI::Platypus->new();
$out_result = $ffi->cast( opaque => "opaque[$out_len]", $out_result );
return [ map { $ffi->cast( opaque => "string", $_ ) } @$out_result ];
}
sub XGBoosterDumpModelEx {
my ( $booster, $feature_map, $with_stats, $format ) = @_;
$feature_map //= "";
$with_stats //= 1;
my $out_len = 0;
my $out_result = 0;
_CheckCall(
AI::XGBoost::CAPI::RAW::XGBoosterDumpModelEx(
$booster, $feature_map, $with_stats, $format, \$out_len, \$out_result
)
);
my $ffi = FFI::Platypus->new();
$out_result = $ffi->cast( opaque => "opaque[$out_len]", $out_result );
return [ map { $ffi->cast( opaque => "string", $_ ) } @$out_result ];
}
sub XGBoosterDumpModelWithFeatures {
my ( $booster, $feature_names, $feature_types, $with_stats ) = @_;
$with_stats //= 1;
my $out_len = 0;
my $out_result = 0;
my $ffi = FFI::Platypus->new();
my $number_of_features = scalar @$feature_names;
my $array_of_opaque_feature_names = [ map { $ffi->cast( string => "opaque", $_ ) } @$feature_names ];
my $array_of_opaque_feature_types = [ map { $ffi->cast( string => "opaque", $_ ) } @$feature_types ];
_CheckCall(
AI::XGBoost::CAPI::RAW::XGBoosterDumpModelWithFeatures( $booster, $number_of_features,
$array_of_opaque_feature_names,
$array_of_opaque_feature_types,
$with_stats, \$out_len, \$out_result
)
);
$out_result = $ffi->cast( opaque => "opaque[$out_len]", $out_result );
return [ map { $ffi->cast( opaque => "string", $_ ) } @$out_result ];
}
sub XGBoosterDumpModelExWithFeatures {
my ( $booster, $feature_names, $feature_types, $with_stats, $format ) = @_;
my $out_len = 0;
my $out_result = 0;
my $ffi = FFI::Platypus->new();
my $number_of_features = scalar @$feature_names;
my $array_of_opaque_feature_names = [ map { $ffi->cast( string => "opaque", $_ ) } @$feature_names ];
my $array_of_opaque_feature_types = [ map { $ffi->cast( string => "opaque", $_ ) } @$feature_types ];
_CheckCall(
AI::XGBoost::CAPI::RAW::XGBoosterDumpModelExWithFeatures( $booster, $number_of_features,
$array_of_opaque_feature_names,
$array_of_opaque_feature_types,
$with_stats, $format, \$out_len, \$out_result
)
);
$out_result = $ffi->cast( opaque => "opaque[$out_len]", $out_result );
return [ map { $ffi->cast( opaque => "string", $_ ) } @$out_result ];
}
sub XGBoosterFree {
my ($booster) = @_;
_CheckCall( AI::XGBoost::CAPI::RAW::XGBoosterFree($booster) );
return ();
}
# _CheckCall
#
lib/AI/XGBoost/CAPI.pm view on Meta::CPAN
XGBoosterFree($booster);
XGDMatrixFree($dtrain);
XGDMatrixFree($dtest);
=head1 DESCRIPTION
Perlified wrapper for the C API
=head2 Error handling
XGBoost c api functions returns some int to signal the presence/absence of error.
In this module that is achieved using Exceptions from L<Exception::Class>
=head1 FUNCTIONS
=head2 XGDMatrixCreateFromFile
Load a data matrix
Parameters:
lib/AI/XGBoost/CAPI.pm view on Meta::CPAN
=back
=item ntree_limit
limit number of trees used for prediction, this is only valid for boosted trees
when the parameter is set to 0, we will use all the trees
=back
Returns an arrayref with the predictions corresponding to the rows of data matrix
=head2 XGBoosterDumpModel
=head2 XGBoosterDumpModelEx
=head2 XGBoosterDumpModelWithFeatures
=head2 XGBoosterDumpModelExWithFeatures
=head2 XGBoosterFree
Free booster object
Parameters:
=over 4
=item booster
lib/AI/XGBoost/CAPI/RAW.pm view on Meta::CPAN
$ffi->attach( XGBoosterSaveModel => [qw(opaque string)] => 'int' );
$ffi->attach( XGBoosterLoadModelFromBuffer => [qw(opaque opaque uint64)] => 'int' );
$ffi->attach( XGBoosterGetModelRaw => [qw(opaque uint64* opaque*)] => 'int' );
$ffi->attach( XGBoosterDumpModel => [qw(opaque string int uint64* opaque*)] => 'int' );
$ffi->attach( XGBoosterDumpModelEx => [qw(opaque string int string uint64* opaque*)] => 'int' );
$ffi->attach( XGBoosterDumpModelWithFeatures => [qw(opaque int opaque[] opaque[] int uint64* opaque*)] => 'int' );
$ffi->attach(
XGBoosterDumpModelExWithFeatures => [qw(opaque int opaque[] opaque[] int string uint64* opaque*)] => 'int' );
$ffi->attach( XGBoosterSetAttr => [qw(opaque string string)] => 'int' );
$ffi->attach( XGBoosterGetAttr => [qw(opaque string opaque* int*)] => 'int' );
$ffi->attach( XGBoosterGetAttrNames => [qw(opaque uint64* opaque*)] => 'int' );
$ffi->attach( XGBoosterLoadRabitCheckpoint => [qw(opaque int)] => 'int' );
$ffi->attach( XGBoosterSaveRabitCheckpoint => [qw(opaque)] => 'int' );
lib/AI/XGBoost/CAPI/RAW.pm view on Meta::CPAN
my $booster = 0;
AI::XGBoost::CAPI::RAW::XGBoosterCreate( [$dtrain] , 1, \$booster);
for my $iter (0 .. 10) {
AI::XGBoost::CAPI::RAW::XGBoosterUpdateOneIter($booster, $iter, $dtrain);
}
my $out_len = 0;
my $out_result = 0;
AI::XGBoost::CAPI::RAW::XGBoosterPredict($booster, $dtest, 0, 0, \$out_len, \$out_result);
my $ffi = FFI::Platypus->new();
my $predictions = $ffi->cast(opaque => "float[$out_len]", $out_result);
#say join "\n", @$predictions;
AI::XGBoost::CAPI::RAW::XGBoosterFree($booster);
AI::XGBoost::CAPI::RAW::XGDMatrixFree($dtrain);
AI::XGBoost::CAPI::RAW::XGDMatrixFree($dtest);
=head1 DESCRIPTION
Wrapper for the C API.
lib/AI/XGBoost/CAPI/RAW.pm view on Meta::CPAN
=item nrow
number of rows
=item ncol
number columns
=item missing
which value to represent missing value
=item out
created dmatrix
=back
=head2 XGDMatrixCreateFromMat_omp
Create matrix content from dense matrix
lib/AI/XGBoost/CAPI/RAW.pm view on Meta::CPAN
=item nrow
number of rows
=item ncol
number columns
=item missing
which value to represent missing value
=item out
created dmatrix
=item nthread
number of threads (up to maximum cores available, if <=0 use all cores)
=back
=head2 XGDMatrixSliceDMatrix
Create a new dmatrix from sliced content of existing matrix
Parameters:
=over 4
lib/AI/XGBoost/CAPI/RAW.pm view on Meta::CPAN
Parameters:
=over 4
=item handle
the handle to the DMatrix
=item out
The address to hold number of rows.
=back
=head2 XGDMatrixNumCol
Get number of cols.
Parameters:
=over 4
=item handle
the handle to the DMatrix
=item out
The address to hold number of cols.
=back
=head2 XGDMatrixSaveBinary
load a data matrix into binary file
Parameters:
=over 4
lib/AI/XGBoost/CAPI/RAW.pm view on Meta::CPAN
=item handle
a instance of data matrix
=item field
field name
=item out_len
used to set result length
=item out_dptr
pointer to the result
=back
=head2 XGDMatrixGetUIntInfo
Get uint32 info vector from matrix
Parameters:
=over 4
lib/AI/XGBoost/CAPI/RAW.pm view on Meta::CPAN
=item field
field name
=item out_len
The length of the field
=item out_dptr
pointer to the result
=back
=head2 XGDMatrixFree
Free space in data matrix
=head2 XGBoosterCreate
Create xgboost learner
lib/AI/XGBoost/CAPI/RAW.pm view on Meta::CPAN
=item dmats
matrices that are set to be cached
=item len
length of dmats
=item out
handle to the result booster
=back
=head2 XGBoosterFree
Free obj in handle
Parameters:
=over 4
lib/AI/XGBoost/CAPI/RAW.pm view on Meta::CPAN
=back
=item ntree_limit
limit number of trees used for prediction, this is only valid for boosted trees
when the parameter is set to 0, we will use all the trees
=item out_len
used to store length of returning result
=item out_result
used to set a pointer to array
=back
=head2 XGBoosterLoadModel
Load model form existing file
Parameters:
lib/AI/XGBoost/CAPI/RAW.pm view on Meta::CPAN
=back
=head2 XGBoosterLoadModelFromBuffer
=head2 XGBoosterGetModelRaw
=head2 XGBoosterDumpModel
=head2 XGBoosterDumpModelEx
=head2 XGBoosterDumpModelWithFeatures
=head2 XGBoosterDumpModelExWithFeatures
=head2 XGBoosterSetAttr
=head2 XGBoosterGetAttr
=head2 XGBoosterGetAttrNames
=head2 XGBoosterLoadRabitCheckpoint
=head2 XGBoosterSaveRabitCheckpoint
lib/AI/XGBoost/DMatrix.pm view on Meta::CPAN
=head1 SYNOPSIS
use aliased 'AI::XGBoost::DMatrix';
my $train_data = DMatrix->FromFile(filename => 'agaricus.txt.train');
=head1 DESCRIPTION
XGBoost DMatrix perl model
Work In Progress, the API may change. Comments and suggestions are welcome!
=head1 METHODS
=head2 From
Construct a DMatrix from diferent sources. Based on parameters
dispatch to the correct From* method
Refer to From* to see what can be done.
lib/AI/XGBoost/DMatrix.pm view on Meta::CPAN
=head3 Parameters
=over 4
=item filename
File to read
=item silent
Supress messages
=back
=head2 FromMat
Construct a DMatrix from a bidimensional array
=head3 Parameters
=over 4
misc/using_capi.c view on Meta::CPAN
XGDMatrixCreateFromFile("agaricus.txt.test", 0, &dtest);
XGDMatrixCreateFromFile("agaricus.txt.train", 0, &dtrain);
DMatrixHandle cache[] = {dtrain};
BoosterHandle booster;
XGBoosterCreate(cache, 1, &booster);
for (int iter = 0; iter < 11; iter++) {
XGBoosterUpdateOneIter(booster, iter, dtrain);
}
bst_ulong out_len;
const float *out_result;
XGBoosterPredict(booster, dtest, 0, 0, &out_len, &out_result);
printf("Length: %ld\n", out_len);
for (int output = 0; output < out_len; output++) {
printf("%f\n", out_result[output]);
}
}