view release on metacpan or search on metacpan
AI-ActivationFunctions-0.01/AI-ActivationFunctions-0.01/Makefile.PL view on Meta::CPAN
use 5.008;
use ExtUtils::MakeMaker;
WriteMakefile(
NAME => 'AI::ActivationFunctions',
VERSION_FROM => 'lib/AI/ActivationFunctions.pm',
ABSTRACT_FROM => 'lib/AI/ActivationFunctions.pm',
AUTHOR => 'Ulisses Manzo Castello <umcastello@gmail.com>',
LICENSE => 'perl_5',
PREREQ_PM => {
'Test::More' => 0,
'Exporter' => 0,
# PDL é recomendado, mas não obrigatório
'PDL' => 0, # em Build.PL seria 'recommends'
},
META_MERGE => {
'meta-spec' => { version => 2 },
resources => {
repository => {
type => 'git',
url => 'https://github.com/seuusuario/ai-activationfunctions.git',
AI-ActivationFunctions-0.01/AI-ActivationFunctions-0.01/t/00-load.t view on Meta::CPAN
#!/usr/bin/perl
use strict;
use warnings;
use Test::More tests => 1;
BEGIN {
use_ok('AI::ActivationFunctions');
}
diag("Testing AI::ActivationFunctions $AI::ActivationFunctions::VERSION");
AI-ActivationFunctions-0.01/AI-ActivationFunctions-0.01/t/basic.t view on Meta::CPAN
#!/usr/bin/perl
use strict;
use warnings;
use Test::More tests => 17; # Agora 17 testes!
# Carregar e importar
use AI::ActivationFunctions qw(relu prelu leaky_relu sigmoid tanh softmax);
# Teste 1: ReLU
is(relu(5), 5, 'relu(5) = 5');
is(relu(-5), 0, 'relu(-5) = 0');
is(relu(0), 0, 'relu(0) = 0');
# Teste 2: PReLU
is(prelu(2, 0.1), 2, 'prelu(2, 0.1) = 2');
is(sprintf("%.1f", prelu(-2, 0.1)), "-0.2", 'prelu(-2, 0.1) = -0.2');
is(sprintf("%.2f", prelu(-2)), "-0.02", 'prelu(-2) com alpha padrão = -0.02');
# Teste 3: Leaky ReLU
is(leaky_relu(2), 2, 'leaky_relu(2) = 2');
is(sprintf("%.2f", leaky_relu(-2)), "-0.02", 'leaky_relu(-2) = -0.02');
# Teste 4: Sigmoid
my $sigmoid0 = sigmoid(0);
ok($sigmoid0 > 0.49 && $sigmoid0 < 0.51, "sigmoid(0) â 0.5 ($sigmoid0)");
my $sigmoid1 = sigmoid(1);
ok($sigmoid1 > 0.73 && $sigmoid1 < 0.74, "sigmoid(1) â 0.731 ($sigmoid1)");
# Teste 5: Tanh
my $tanh0 = tanh(0);
ok(abs($tanh0) < 0.001, "tanh(0) â 0 ($tanh0)");
my $tanh1 = tanh(1);
ok($tanh1 > 0.76 && $tanh1 < 0.77, "tanh(1) â 0.761 ($tanh1)");
# Teste 6: Softmax
my $scores = [1, 2, 3];
my $probs = softmax($scores);
is(ref($probs), 'ARRAY', 'softmax retorna arrayref');
is(scalar @$probs, 3, 'softmax retorna 3 elementos');
my $sum = 0;
$sum += $_ for @$probs;
ok(abs($sum - 1) < 0.0001, "softmax soma â 1 ($sum)");
AI-ActivationFunctions-0.01/AI-ActivationFunctions-0.01/t/pdl.t view on Meta::CPAN
#!/usr/bin/perl
use strict;
use warnings;
use Test::More;
# Pular se PDL não estiver instalado
BEGIN {
eval {
require PDL;
1;
} or do {
plan skip_all => 'PDL não está instalado';
exit 0;
};
}
# Continuar com os testes
plan tests => 1;
# Testar se podemos usar PDL com nosso módulo
use_ok('AI::ActivationFunctions');
# Nota: Nosso módulo atual não tem suporte a PDL,
# então apenas testamos o carregamento
done_testing();
},
"configure" : {
"requires" : {
"ExtUtils::MakeMaker" : "0"
}
},
"runtime" : {
"requires" : {
"Exporter" : "0",
"PDL" : "0",
"Test::More" : "0"
}
}
},
"release_status" : "stable",
"resources" : {
"repository" : {
"type" : "git",
"url" : "https://github.com/seuusuario/ai-activationfunctions.git",
"web" : "https://github.com/seuusuario/ai-activationfunctions"
}
url: http://module-build.sourceforge.net/META-spec-v1.4.html
version: '1.4'
name: AI-ActivationFunctions
no_index:
directory:
- t
- inc
requires:
Exporter: '0'
PDL: '0'
Test::More: '0'
resources:
repository: https://github.com/seuusuario/ai-activationfunctions.git
version: '0.01'
x_serialization_backend: 'CPAN::Meta::YAML version 0.018'
Makefile.PL view on Meta::CPAN
use 5.008;
use ExtUtils::MakeMaker;
WriteMakefile(
NAME => 'AI::ActivationFunctions',
VERSION_FROM => 'lib/AI/ActivationFunctions.pm',
ABSTRACT_FROM => 'lib/AI/ActivationFunctions.pm',
AUTHOR => 'Ulisses Manzo Castello <umcastello@gmail.com>',
LICENSE => 'perl_5',
PREREQ_PM => {
'Test::More' => 0,
'Exporter' => 0,
# PDL é recomendado, mas não obrigatório
'PDL' => 0, # em Build.PL seria 'recommends'
},
META_MERGE => {
'meta-spec' => { version => 2 },
resources => {
repository => {
type => 'git',
url => 'https://github.com/seuusuario/ai-activationfunctions.git',
create_structure.sh view on Meta::CPAN
use 5.010001;
use ExtUtils::MakeMaker;
WriteMakefile(
NAME => 'AI::ActivationFunctions',
VERSION_FROM => 'lib/AI/ActivationFunctions.pm',
ABSTRACT_FROM => 'lib/AI/ActivationFunctions.pm',
AUTHOR => 'Ulisses Manzo Castello <umcastello@gmail.com>',
LICENSE => 'perl_5',
PREREQ_PM => {
'Test::More' => 0,
'Exporter' => 0,
'Carp' => 0,
},
TEST_REQUIRES => {
'Test::More' => 0,
},
META_MERGE => {
'meta-spec' => { version => 2 },
resources => {
repository => {
type => 'git',
url => 'https://github.com/test/ai-activationfunctions.git',
},
},
},
t/00-load.t view on Meta::CPAN
#!/usr/bin/perl
use strict;
use warnings;
use Test::More tests => 1;
BEGIN {
use_ok('AI::ActivationFunctions');
}
diag("Testing AI::ActivationFunctions $AI::ActivationFunctions::VERSION");
#!/usr/bin/perl
use strict;
use warnings;
use Test::More tests => 17; # Agora 17 testes!
# Carregar e importar
use AI::ActivationFunctions qw(relu prelu leaky_relu sigmoid tanh softmax);
# Teste 1: ReLU
is(relu(5), 5, 'relu(5) = 5');
is(relu(-5), 0, 'relu(-5) = 0');
is(relu(0), 0, 'relu(0) = 0');
# Teste 2: PReLU
is(prelu(2, 0.1), 2, 'prelu(2, 0.1) = 2');
is(sprintf("%.1f", prelu(-2, 0.1)), "-0.2", 'prelu(-2, 0.1) = -0.2');
is(sprintf("%.2f", prelu(-2)), "-0.02", 'prelu(-2) com alpha padrão = -0.02');
# Teste 3: Leaky ReLU
is(leaky_relu(2), 2, 'leaky_relu(2) = 2');
is(sprintf("%.2f", leaky_relu(-2)), "-0.02", 'leaky_relu(-2) = -0.02');
# Teste 4: Sigmoid
my $sigmoid0 = sigmoid(0);
ok($sigmoid0 > 0.49 && $sigmoid0 < 0.51, "sigmoid(0) â 0.5 ($sigmoid0)");
my $sigmoid1 = sigmoid(1);
ok($sigmoid1 > 0.73 && $sigmoid1 < 0.74, "sigmoid(1) â 0.731 ($sigmoid1)");
# Teste 5: Tanh
my $tanh0 = tanh(0);
ok(abs($tanh0) < 0.001, "tanh(0) â 0 ($tanh0)");
my $tanh1 = tanh(1);
ok($tanh1 > 0.76 && $tanh1 < 0.77, "tanh(1) â 0.761 ($tanh1)");
# Teste 6: Softmax
my $scores = [1, 2, 3];
my $probs = softmax($scores);
is(ref($probs), 'ARRAY', 'softmax retorna arrayref');
is(scalar @$probs, 3, 'softmax retorna 3 elementos');
my $sum = 0;
$sum += $_ for @$probs;
ok(abs($sum - 1) < 0.0001, "softmax soma â 1 ($sum)");
t/extended.t view on Meta::CPAN
#!/usr/bin/perl
use strict;
use warnings;
use Test::More tests => 8; # Plan primeiro
# Importar funções especÃficas
use AI::ActivationFunctions qw(elu swish gelu relu_derivative sigmoid_derivative);
# Teste ELU
my $elu_pos = elu(1, 1);
is(sprintf("%.1f", $elu_pos), "1.0", 'elu(1,1) = 1.0');
my $elu_neg = elu(-1, 1);
ok($elu_neg > -0.64 && $elu_neg < -0.63, "elu(-1,1) â -0.632 ($elu_neg)");
# Teste Swish
my $swish1 = swish(1);
ok($swish1 > 0.73 && $swish1 < 0.74, "swish(1) â 0.731 ($swish1)");
# Teste GELU
my $gelu0 = gelu(0);
ok(abs($gelu0) < 0.001, "gelu(0) â 0 ($gelu0)");
my $gelu1 = gelu(1);
ok($gelu1 > 0.84 && $gelu1 < 0.85, "gelu(1) â 0.841 ($gelu1)");
# Teste derivadas
is(relu_derivative(5), 1, 'relu_derivative(5) = 1');
is(relu_derivative(-5), 0, 'relu_derivative(-5) = 0');
my $sigmoid_deriv = sigmoid_derivative(0);
ok($sigmoid_deriv > 0.24 && $sigmoid_deriv < 0.26,
"sigmoid_derivative(0) â 0.25 ($sigmoid_deriv)");
#!/usr/bin/perl
use strict;
use warnings;
use Test::More;
# Pular se PDL não estiver instalado
BEGIN {
eval {
require PDL;
1;
} or do {
plan skip_all => 'PDL não está instalado';
exit 0;
};
}
# Continuar com os testes
plan tests => 1;
# Testar se podemos usar PDL com nosso módulo
use_ok('AI::ActivationFunctions');
# Nota: Nosso módulo atual não tem suporte a PDL,
# então apenas testamos o carregamento
done_testing();
test_all.sh view on Meta::CPAN
# 1. Crie a estrutura
#mkdir -p AI-ActivationFunctions-simple/lib/AI
#mkdir AI-ActivationFunctions-simple/t
#mkdir AI-ActivationFunctions-simple/examples
#cd AI-ActivationFunctions-simple
# 2. Copie os arquivos acima para seus respectivos diretórios
# 3. Teste
perl -Ilib -e "use AI::ActivationFunctions; print 'Carregou!\\n';"
# 4. Execute o teste rápido
perl test_quick.pl
# 5. Execute os testes formais
prove -Ilib t/
# 6. Execute o exemplo
perl examples/simple.pl
test_minimal.pl view on Meta::CPAN
#!/usr/bin/perl
use strict;
use warnings;
# Teste MÃNIMO - sem usar o módulo ainda
print "1. Testando funções básicas...\n";
# Defina as funções localmente primeiro
sub relu {
my ($x) = @_;
return $x > 0 ? $x : 0;
}
sub tanh_simple {
my ($x) = @_;
my $e2x = exp(2 * $x);
return ($e2x - 1) / ($e2x + 1);
}
# Teste
print " relu(5) = " . relu(5) . " (esperado: 5)\n";
print " relu(-3) = " . relu(-3) . " (esperado: 0)\n";
print " tanh(0) = " . tanh_simple(0) . " (esperado: ~0)\n";
print "\n2. Agora testando o módulo...\n";
# Tente carregar o módulo
eval {
# Adiciona lib ao @INC
unshift @INC, 'lib';
require AI::ActivationFunctions;
print " â Módulo carregado\n";
# Testa uma função
my $test = AI::ActivationFunctions::relu(10);
print " â relu(10) = $test\n";
1;
} or do {
print " â Erro: $@\n";
# Mostra o arquivo se houver erro
if (-f 'lib/AI/ActivationFunctions.pm') {
print "\nConteúdo do arquivo (primeiras 20 linhas):\n";
test_quick.pl view on Meta::CPAN
#!/usr/bin/perl
use strict;
use warnings;
use FindBin qw($Bin);
use lib "$Bin/lib";
# Testar se o módulo carrega
eval {
require AI::ActivationFunctions;
AI::ActivationFunctions->import(qw(relu prelu sigmoid));
1;
} or die "Erro ao carregar módulo: $@";
print "=== Teste Rápido ===\n\n";
print "relu(5) = " . AI::ActivationFunctions::relu(5) . "\n";
print "relu(-3) = " . AI::ActivationFunctions::relu(-3) . "\n";
print "prelu(-2, 0.1) = " . AI::ActivationFunctions::prelu(-2, 0.1) . "\n";
print "sigmoid(0) = " . AI::ActivationFunctions::sigmoid(0) . "\n";
my $arr = [-2, -1, 0, 1, 2];
my $result = AI::ActivationFunctions::relu($arr);
print "relu([-2,-1,0,1,2]) = [" . join(", ", @$result) . "]\n";