AI-Nerl
view release on metacpan - search on metacpan
view release on metacpan or search on metacpan
examples/digits/deep_digits.pl view on Meta::CPAN
use PDL::IO::FITS;
use PDL::Constants 'E';
use lib 'lib';
use lib '../../lib';
use AI::Nerl;
use FindBin qw($Bin);
chdir $Bin;
unless (-e "t10k-labels-idx1-ubyte.fits"){ die <<"NODATA";}
pull this data by running get_digits.sh
convert it to FITS by running idx_to_fits.pl
NODATA
my $images = rfits('t10k-images-idx3-ubyte.fits');
my $labels = rfits('t10k-labels-idx1-ubyte.fits');
my $y = identity(10)->range($labels->transpose)->sever;
say 't10k data loaded';
my $nerl = AI::Nerl->new(
# type => image,dims=>[28,28],...
scale_input => 1/256,
);
$nerl->init_network(l1 => 784, l3=>10, l2=>7);#method=batch,hidden=>12345,etc
my $prev_nerl = $nerl;
my $prev_cost = 10000;
examples/digits/digits.pl view on Meta::CPAN
use PDL::IO::FITS;
use PDL::Constants 'E';
use lib 'lib';
use lib '../../lib';
use AI::Nerl;
use FindBin qw($Bin);
chdir $Bin;
unless (-e "t10k-labels-idx1-ubyte.fits"){ die <<"NODATA";}
pull this data by running get_digits.sh
convert it to FITS by running idx_to_fits.pl
NODATA
my $images = rfits('t10k-images-idx3-ubyte.fits');
my $labels = rfits('t10k-labels-idx1-ubyte.fits');
my $y = identity(10)->range($labels->transpose)->sever;
$y *= 2;
$y -= 1;
say 't10k data loaded';
my $nerl = AI::Nerl->new(
# type => image,dims=>[28,28],...
scale_input => 1/256,
# train_x => $images(0:99),
# train_y => $y(0:99),
# test_x => $images(8000:8999),
# test_y => $y(8000:8999),
# cv_x => $images(9000:9999),
# cv_y => $y(9000:9999),
examples/digits/idx_to_fits.pl view on Meta::CPAN
use PDL;
use PDL::NiceSlice;
use File::Slurp;
use PDL::IO::FITS;
use FindBin qw($Bin);
chdir $Bin;
die 'filename' unless $ARGV[0];
my $img_filename = $ARGV[0];
my $img_data = read_file( $img_filename, binmode => ':raw' ) ;
my @header = map {ord} split ('', substr ($img_data, 0, 4, ''));
my $numdims = $header[3];
my @dims = map {ord} split ('',substr($img_data, 0, 4*$numdims, ''));
#'IDX' format described here: http://yann.lecun.com/exdb/mnist/
for (0..$numdims-1){
$dims[$_] = 256*$dims[4*$_+2] + $dims[4*$_+3];
}
@dims=@dims[0..$numdims-1];
#die join ' ',@dims;
#my @img_data = map{ord}split('',$img_data);
my $img_pdl = pdl(unpack('C*',$img_data));
use PDL::Graphics2D;
if(!defined($dims[1])){
$img_pdl = $img_pdl->squeeze;
}
elsif ($dims[1]==28){ #images
@dims = (28**2,$dims[0]);
$img_pdl = $img_pdl->reshape(@dims)->transpose();
#imag2d($img_pdl(3000)->reshape(28,28)/256);
lib/AI/Nerl.pm view on Meta::CPAN
use PDL;
use AI::Nerl::Network;
# ABSTRACT: Neural networks with backpropagation.
# main_module
our $VERSION = .03;
#A Nerl is a mechanism to build neural networks?
#Give it training,test, and cv data?
#it settles on a learning rate and stuff?
#or maybe it's also a language for guided training?
#or maybe a visual gui thing?
#Not exactly sure. Maybe I'm tinkering with forces better left alone.
#That's a great excuse for failing horribly.
=head1 AI::Nerl - A sort of stackable neural network builder thing.
=head1 SYNOPSIS
lib/AI/Nerl/Network.pm view on Meta::CPAN
=head1 COPYRIGHT
Copyright 2012 by Zach Morgan
This package is free software; you can redistribute it and/or modify it under the
same terms as Perl itself.
=cut
# Simple nn with 1 hidden layer
# train with $nn->train(data,labels);
has scale_input => (
is => 'ro',
required => 0,
isa => 'Num',
default => 0,
);
# number of input,hidden,output neurons
has [qw/ l1 l2 l3 /] => (
view all matches for this distributionview release on metacpan - search on metacpan
( run in 1.243 second using v1.00-cache-2.02-grep-82fe00e-cpan-4673cadbf75 )