view release on metacpan or search on metacpan
AI-ActivationFunctions-0.01/AI-ActivationFunctions-0.01/Makefile.PL view on Meta::CPAN
use 5.008;
use ExtUtils::MakeMaker;
WriteMakefile(
NAME => 'AI::ActivationFunctions',
VERSION_FROM => 'lib/AI/ActivationFunctions.pm',
ABSTRACT_FROM => 'lib/AI/ActivationFunctions.pm',
AUTHOR => 'Ulisses Manzo Castello <umcastello@gmail.com>',
LICENSE => 'perl_5',
PREREQ_PM => {
'Test::More' => 0,
'Exporter' => 0,
# PDL é recomendado, mas não obrigatório
'PDL' => 0, # em Build.PL seria 'recommends'
},
META_MERGE => {
'meta-spec' => { version => 2 },
resources => {
repository => {
type => 'git',
url => 'https://github.com/seuusuario/ai-activationfunctions.git',
web => 'https://github.com/seuusuario/ai-activationfunctions',
},
},
},
);
AI-ActivationFunctions-0.01/AI-ActivationFunctions-0.01/examples/simple.pl view on Meta::CPAN
#!/usr/bin/perl
use strict;
use warnings;
use FindBin qw($Bin);
use lib "$Bin/../lib";
use AI::ActivationFunctions qw(:all);
print "=== Demonstração Completa ===\n\n";
print "1. Funções Básicas:\n";
printf(" relu(5) = %d\n", relu(5));
printf(" relu(-3) = %d\n", relu(-3));
printf(" prelu(-2, 0.1) = %.1f\n", prelu(-2, 0.1));
printf(" sigmoid(0) = %.4f\n", sigmoid(0));
printf(" tanh(1) = %.4f\n", tanh(1));
print "\n2. Funções Avançadas:\n";
printf(" elu(-1, 1) = %.4f\n", elu(-1, 1));
printf(" swish(1) = %.4f\n", swish(1));
printf(" gelu(1) = %.4f\n", gelu(1));
print "\n3. Derivadas (para backpropagation):\n";
printf(" relu_derivative(5) = %d\n", relu_derivative(5));
printf(" relu_derivative(-5) = %d\n", relu_derivative(-5));
printf(" sigmoid_derivative(0) = %.4f\n", sigmoid_derivative(0));
print "\n4. Softmax (distribuição):\n";
my $scores = [1.0, 2.0, 3.0];
my $probs = softmax($scores);
printf(" Entrada: [%.1f, %.1f, %.1f]\n", @$scores);
printf(" SaÃda: [%.4f, %.4f, %.4f]\n", @$probs);
printf(" Soma: %.4f\n", $probs->[0] + $probs->[1] + $probs->[2]);
print "\n=== Fim da Demonstração ===\n";
AI-ActivationFunctions-0.01/AI-ActivationFunctions-0.01/lib/AI/ActivationFunctions.pm view on Meta::CPAN
package AI::ActivationFunctions;
use strict;
use warnings;
use Exporter 'import';
our $VERSION = '0.01';
our $ABSTRACT = 'Activation functions for neural networks in Perl';
# Lista COMPLETA de funções exportáveis
our @EXPORT_OK = qw(
relu prelu leaky_relu
sigmoid tanh softmax
elu swish gelu
relu_derivative sigmoid_derivative
);
our %EXPORT_TAGS = (
all => \@EXPORT_OK,
basic => [qw(relu prelu leaky_relu sigmoid tanh softmax)],
advanced => [qw(elu swish gelu)],
derivatives => [qw(relu_derivative sigmoid_derivative)],
);
# ReLU
sub relu {
my ($x) = @_;
return $x > 0 ? $x : 0;
}
# PReLU
sub prelu {
my ($x, $alpha) = @_;
$alpha //= 0.01;
return $x > 0 ? $x : $alpha * $x;
}
# Leaky ReLU
sub leaky_relu {
my ($x) = @_;
return prelu($x, 0.01);
}
# Sigmoid
sub sigmoid {
my ($x) = @_;
return 1 / (1 + exp(-$x));
}
# Tanh
sub tanh {
my ($x) = @_;
my $e2x = exp(2 * $x);
return ($e2x - 1) / ($e2x + 1);
}
# Softmax para array
sub softmax {
my ($array) = @_;
return undef unless ref($array) eq 'ARRAY';
# Encontrar máximo
my $max = $array->[0];
foreach my $val (@$array) {
$max = $val if $val > $max;
}
# Calcular exponenciais
my @exp_vals;
my $sum = 0;
foreach my $val (@$array) {
my $exp_val = exp($val - $max);
push @exp_vals, $exp_val;
$sum += $exp_val;
}
# Normalizar
return [map { $_ / $sum } @exp_vals];
}
# ELU (Exponential Linear Unit)
sub elu {
my ($x, $alpha) = @_;
$alpha //= 1.0;
return $x > 0 ? $x : $alpha * (exp($x) - 1);
}
# Swish (Google)
sub swish {
my ($x) = @_;
return $x * sigmoid($x);
}
# GELU (Gaussian Error Linear Unit)
sub gelu {
my ($x) = @_;
return 0.5 * $x * (1 + tanh(sqrt(2/3.141592653589793) *
($x + 0.044715 * $x**3)));
}
# Derivada da ReLU
sub relu_derivative {
my ($x) = @_;
return $x > 0 ? 1 : 0;
}
# Derivada da Sigmoid
sub sigmoid_derivative {
my ($x) = @_;
my $s = sigmoid($x);
return $s * (1 - $s);
}
1;
=head1 NAME
AI::ActivationFunctions - Activation functions for neural networks in Perl
=head1 VERSION
Version 0.01
=head1 ABSTRACT
Activation functions for neural networks in Perl
=head1 SYNOPSIS
use AI::ActivationFunctions qw(relu prelu sigmoid);
my $result = relu(-5); # returns 0
my $prelu_result = prelu(-2, 0.1); # returns -0.2
# Array version works too
my $array_result = relu([-2, -1, 0, 1, 2]); # returns [0, 0, 0, 1, 2]
=head1 DESCRIPTION
This module provides various activation functions commonly used in neural networks
and machine learning. It includes basic functions like ReLU and sigmoid, as well
as advanced functions like GELU and Swish.
=head1 FUNCTIONS
=head2 Basic Functions
=over 4
=item * relu($input)
Rectified Linear Unit. Returns max(0, $input).
=item * prelu($input, $alpha=0.01)
Parametric ReLU. Returns $input if $input > 0, else $alpha * $input.
=item * leaky_relu($input)
Leaky ReLU with alpha=0.01.
=item * sigmoid($input)
Sigmoid function: 1 / (1 + exp(-$input)).
=item * tanh($input)
Hyperbolic tangent function.
=item * softmax(\@array)
Softmax function for probability distributions.
=back
=head2 Advanced Functions
=over 4
=item * elu($input, $alpha=1.0)
Exponential Linear Unit.
=item * swish($input)
Swish activation function.
=item * gelu($input)
Gaussian Error Linear Unit (used in transformers like BERT, GPT).
=back
=head2 Derivatives
=over 4
=item * relu_derivative($input)
Derivative of ReLU for backpropagation.
=item * sigmoid_derivative($input)
Derivative of sigmoid for backpropagation.
=back
=head1 EXPORT
By default nothing is exported. You can export specific functions:
use AI::ActivationFunctions qw(relu prelu); # specific functions
use AI::ActivationFunctions qw(:basic); # basic functions
use AI::ActivationFunctions qw(:all); # all functions
=head1 SEE ALSO
=over 4
=item * L<PDL> - Perl Data Language for numerical computing
=item * L<AI::TensorFlow> - Perl interface to TensorFlow
=item * L<AI::MXNet> - Perl interface to Apache MXNet
=back
=head1 AUTHOR
Your Name <your.email@example.com>
=head1 LICENSE
This library is free software; you can redistribute it and/or modify
it under the same terms as Perl itself.
=cut
AI-ActivationFunctions-0.01/AI-ActivationFunctions-0.01/t/00-load.t view on Meta::CPAN
#!/usr/bin/perl
use strict;
use warnings;
use Test::More tests => 1;
BEGIN {
use_ok('AI::ActivationFunctions');
}
diag("Testing AI::ActivationFunctions $AI::ActivationFunctions::VERSION");
AI-ActivationFunctions-0.01/AI-ActivationFunctions-0.01/t/basic.t view on Meta::CPAN
#!/usr/bin/perl
use strict;
use warnings;
use Test::More tests => 17; # Agora 17 testes!
# Carregar e importar
use AI::ActivationFunctions qw(relu prelu leaky_relu sigmoid tanh softmax);
# Teste 1: ReLU
is(relu(5), 5, 'relu(5) = 5');
is(relu(-5), 0, 'relu(-5) = 0');
is(relu(0), 0, 'relu(0) = 0');
# Teste 2: PReLU
is(prelu(2, 0.1), 2, 'prelu(2, 0.1) = 2');
is(sprintf("%.1f", prelu(-2, 0.1)), "-0.2", 'prelu(-2, 0.1) = -0.2');
is(sprintf("%.2f", prelu(-2)), "-0.02", 'prelu(-2) com alpha padrão = -0.02');
# Teste 3: Leaky ReLU
is(leaky_relu(2), 2, 'leaky_relu(2) = 2');
is(sprintf("%.2f", leaky_relu(-2)), "-0.02", 'leaky_relu(-2) = -0.02');
# Teste 4: Sigmoid
my $sigmoid0 = sigmoid(0);
ok($sigmoid0 > 0.49 && $sigmoid0 < 0.51, "sigmoid(0) â 0.5 ($sigmoid0)");
my $sigmoid1 = sigmoid(1);
ok($sigmoid1 > 0.73 && $sigmoid1 < 0.74, "sigmoid(1) â 0.731 ($sigmoid1)");
# Teste 5: Tanh
my $tanh0 = tanh(0);
ok(abs($tanh0) < 0.001, "tanh(0) â 0 ($tanh0)");
my $tanh1 = tanh(1);
ok($tanh1 > 0.76 && $tanh1 < 0.77, "tanh(1) â 0.761 ($tanh1)");
# Teste 6: Softmax
my $scores = [1, 2, 3];
my $probs = softmax($scores);
is(ref($probs), 'ARRAY', 'softmax retorna arrayref');
is(scalar @$probs, 3, 'softmax retorna 3 elementos');
my $sum = 0;
$sum += $_ for @$probs;
ok(abs($sum - 1) < 0.0001, "softmax soma â 1 ($sum)");
# Verifica ordem (maior score = maior probabilidade)
ok($probs->[2] > $probs->[1], "Prob[2] > Prob[1]");
ok($probs->[1] > $probs->[0], "Prob[1] > Prob[0]");
AI-ActivationFunctions-0.01/AI-ActivationFunctions-0.01/t/pdl.t view on Meta::CPAN
#!/usr/bin/perl
use strict;
use warnings;
use Test::More;
# Pular se PDL não estiver instalado
BEGIN {
eval {
require PDL;
1;
} or do {
plan skip_all => 'PDL não está instalado';
exit 0;
};
}
# Continuar com os testes
plan tests => 1;
# Testar se podemos usar PDL com nosso módulo
use_ok('AI::ActivationFunctions');
# Nota: Nosso módulo atual não tem suporte a PDL,
# então apenas testamos o carregamento
done_testing();
AI-ActivationFunctions-0.01/examples/simple.pl
AI-ActivationFunctions-0.01/lib/AI/ActivationFunctions.pm
AI-ActivationFunctions-0.01/Makefile.PL
AI-ActivationFunctions-0.01/META.json
AI-ActivationFunctions-0.01/META.yml
AI-ActivationFunctions-0.01/README.md
AI-ActivationFunctions-0.01/t/00-load.t
AI-ActivationFunctions-0.01/t/basic.t
AI-ActivationFunctions-0.01/t/extended.t
AI-ActivationFunctions-0.01/t/pdl.t
create_structure.sh
examples/neural_network.pl
examples/simple.pl
lib/AI/ActivationFunctions.pm
lib/AI/ActivationFunctions/PDL.pm
Makefile.PL
MANIFEST
META.json
META.yml
README.md
t/00-load.t
t/basic.t
t/extended.t
t/pdl.t
test_all.sh
test_minimal.pl
test_quick.pl
{
"abstract" : "Activation functions for neural networks in Perl",
"author" : [
"Ulisses Manzo Castello <umcastello@gmail.com>"
],
"dynamic_config" : 1,
"generated_by" : "ExtUtils::MakeMaker version 7.70, CPAN::Meta::Converter version 2.150010",
"license" : [
"perl_5"
],
"meta-spec" : {
"url" : "http://search.cpan.org/perldoc?CPAN::Meta::Spec",
"version" : 2
},
"name" : "AI-ActivationFunctions",
"no_index" : {
"directory" : [
"t",
"inc"
]
},
"prereqs" : {
"build" : {
"requires" : {
"ExtUtils::MakeMaker" : "0"
}
},
"configure" : {
"requires" : {
"ExtUtils::MakeMaker" : "0"
}
},
"runtime" : {
"requires" : {
"Exporter" : "0",
"PDL" : "0",
"Test::More" : "0"
}
}
},
"release_status" : "stable",
"resources" : {
"repository" : {
"type" : "git",
"url" : "https://github.com/seuusuario/ai-activationfunctions.git",
"web" : "https://github.com/seuusuario/ai-activationfunctions"
}
},
"version" : "0.01",
"x_serialization_backend" : "JSON::PP version 4.16"
}
---
abstract: 'Activation functions for neural networks in Perl'
author:
- 'Ulisses Manzo Castello <umcastello@gmail.com>'
build_requires:
ExtUtils::MakeMaker: '0'
configure_requires:
ExtUtils::MakeMaker: '0'
dynamic_config: 1
generated_by: 'ExtUtils::MakeMaker version 7.70, CPAN::Meta::Converter version 2.150010'
license: perl
meta-spec:
url: http://module-build.sourceforge.net/META-spec-v1.4.html
version: '1.4'
name: AI-ActivationFunctions
no_index:
directory:
- t
- inc
requires:
Exporter: '0'
PDL: '0'
Test::More: '0'
resources:
repository: https://github.com/seuusuario/ai-activationfunctions.git
version: '0.01'
x_serialization_backend: 'CPAN::Meta::YAML version 0.018'
Makefile.PL view on Meta::CPAN
use 5.008;
use ExtUtils::MakeMaker;
WriteMakefile(
NAME => 'AI::ActivationFunctions',
VERSION_FROM => 'lib/AI/ActivationFunctions.pm',
ABSTRACT_FROM => 'lib/AI/ActivationFunctions.pm',
AUTHOR => 'Ulisses Manzo Castello <umcastello@gmail.com>',
LICENSE => 'perl_5',
PREREQ_PM => {
'Test::More' => 0,
'Exporter' => 0,
# PDL é recomendado, mas não obrigatório
'PDL' => 0, # em Build.PL seria 'recommends'
},
META_MERGE => {
'meta-spec' => { version => 2 },
resources => {
repository => {
type => 'git',
url => 'https://github.com/seuusuario/ai-activationfunctions.git',
web => 'https://github.com/seuusuario/ai-activationfunctions',
},
},
},
);
# AI::ActivationFunctions
Activation functions for neural networks and machine learning in Perl.
## Installation
```bash
cpanm AI::ActivationFunctions
## Usage
use AI::ActivationFunctions qw(relu prelu sigmoid softmax);
# Basic usage
my $result = relu(-5); # returns 0
# With custom parameter
my $prelu = prelu(-2, 0.1); # returns -0.2
# Probability distribution
my $probs = softmax([1, 2, 3]);
# Functions
relu($x) - Rectified Linear Unit
prelu($x, $alpha=0.01) - Parametric ReLU
leaky_relu($x) - Leaky ReLU
sigmoid($x) - Sigmoid function
tanh($x) - Hyperbolic tangent
softmax(\@array) - Softmax function
elu($x, $alpha=1.0) - Exponential Linear Unit
swish($x) - Swish activation
gelu($x) - Gaussian Error Linear Unit
relu_derivative($x) - ReLU derivative
sigmoid_derivative($x) - Sigmoid derivative
create_structure.sh view on Meta::CPAN
#!/bin/bash
# save como create_structure.sh
# Criar diretórios
#mkdir -p AI-ActivationFunctions-0.01/lib/AI/ActivationFunctions
#mkdir -p AI-ActivationFunctions-0.01/t
#mkdir -p AI-ActivationFunctions-0.01/examples
# Mover para o diretório
#cd AI-ActivationFunctions-0.01
# Criar Makefile.PL (simplificado para teste)
cat > Makefile.PL << 'EOF'
use 5.010001;
use ExtUtils::MakeMaker;
WriteMakefile(
NAME => 'AI::ActivationFunctions',
VERSION_FROM => 'lib/AI/ActivationFunctions.pm',
ABSTRACT_FROM => 'lib/AI/ActivationFunctions.pm',
AUTHOR => 'Ulisses Manzo Castello <umcastello@gmail.com>',
LICENSE => 'perl_5',
PREREQ_PM => {
'Test::More' => 0,
'Exporter' => 0,
'Carp' => 0,
},
TEST_REQUIRES => {
'Test::More' => 0,
},
META_MERGE => {
'meta-spec' => { version => 2 },
resources => {
repository => {
type => 'git',
url => 'https://github.com/test/ai-activationfunctions.git',
},
},
},
);
EOF
echo "Estrutura criada! Agora copie os arquivos .pm e .t para os diretórios."
echo "Use os códigos que eu forneci anteriormente."
examples/neural_network.pl view on Meta::CPAN
#!/usr/bin/perl
use strict;
use warnings;
use AI::ActivationFunctions qw(relu sigmoid softmax relu_derivative sigmoid_derivative);
print "=== Simple Neural Network Demo ===\n\n";
# Simple neural network layer simulation
sub neural_layer {
my ($inputs, $weights, $biases, $activation) = @_;
# Linear transformation: Wx + b
my @output;
for my $i (0..$#$weights) {
my $sum = $biases->[$i];
for my $j (0..$#$inputs) {
$sum += $weights->[$i][$j] * $inputs->[$j];
}
push @output, $sum;
}
# Apply activation function
return $activation->(\@output);
}
# Training data: OR gate
my @training_data = (
{ input => [0, 0], output => [0] },
{ input => [0, 1], output => [1] },
{ input => [1, 0], output => [1] },
{ input => [1, 1], output => [1] },
);
# Initialize weights and biases randomly
my @weights = (
[rand() - 0.5, rand() - 0.5], # hidden layer weights
);
my @biases = (rand() - 0.5);
print "Initial weights: [", $weights[0][0], ", ", $weights[0][1], "]\n";
print "Initial bias: ", $biases[0], "\n\n";
# Simple forward pass
print "Forward pass through network:\n";
foreach my $example (@training_data) {
my $input = $example->{input};
my $target = $example->{output}[0];
# Forward pass
my $hidden = neural_layer($input, \@weights, \@biases, \&sigmoid);
my $prediction = $hidden->[0];
# Calculate error
my $error = $target - $prediction;
printf("Input: [%d, %d] -> Prediction: %.4f (Target: %d, Error: %.4f)\n",
$input->[0], $input->[1], $prediction, $target, $error);
}
# Backpropagation example
print "\nBackpropagation step (simplified):\n";
my $example = $training_data[1]; # [0
examples/simple.pl view on Meta::CPAN
#!/usr/bin/perl
use strict;
use warnings;
use FindBin qw($Bin);
use lib "$Bin/../lib";
use AI::ActivationFunctions qw(:all);
print "=== Demonstração Completa ===\n\n";
print "1. Funções Básicas:\n";
printf(" relu(5) = %d\n", relu(5));
printf(" relu(-3) = %d\n", relu(-3));
printf(" prelu(-2, 0.1) = %.1f\n", prelu(-2, 0.1));
printf(" sigmoid(0) = %.4f\n", sigmoid(0));
printf(" tanh(1) = %.4f\n", tanh(1));
print "\n2. Funções Avançadas:\n";
printf(" elu(-1, 1) = %.4f\n", elu(-1, 1));
printf(" swish(1) = %.4f\n", swish(1));
printf(" gelu(1) = %.4f\n", gelu(1));
print "\n3. Derivadas (para backpropagation):\n";
printf(" relu_derivative(5) = %d\n", relu_derivative(5));
printf(" relu_derivative(-5) = %d\n", relu_derivative(-5));
printf(" sigmoid_derivative(0) = %.4f\n", sigmoid_derivative(0));
print "\n4. Softmax (distribuição):\n";
my $scores = [1.0, 2.0, 3.0];
my $probs = softmax($scores);
printf(" Entrada: [%.1f, %.1f, %.1f]\n", @$scores);
printf(" SaÃda: [%.4f, %.4f, %.4f]\n", @$probs);
printf(" Soma: %.4f\n", $probs->[0] + $probs->[1] + $probs->[2]);
print "\n=== Fim da Demonstração ===\n";
lib/AI/ActivationFunctions.pm view on Meta::CPAN
package AI::ActivationFunctions;
use strict;
use warnings;
use Exporter 'import';
our $VERSION = '0.01';
our $ABSTRACT = 'Activation functions for neural networks in Perl';
# Lista COMPLETA de funções exportáveis
our @EXPORT_OK = qw(
relu prelu leaky_relu
sigmoid tanh softmax
elu swish gelu
relu_derivative sigmoid_derivative
);
our %EXPORT_TAGS = (
all => \@EXPORT_OK,
basic => [qw(relu prelu leaky_relu sigmoid tanh softmax)],
advanced => [qw(elu swish gelu)],
derivatives => [qw(relu_derivative sigmoid_derivative)],
);
# ReLU
sub relu {
my ($x) = @_;
return $x > 0 ? $x : 0;
}
# PReLU
sub prelu {
my ($x, $alpha) = @_;
$alpha //= 0.01;
return $x > 0 ? $x : $alpha * $x;
}
# Leaky ReLU
sub leaky_relu {
my ($x) = @_;
return prelu($x, 0.01);
}
# Sigmoid
sub sigmoid {
my ($x) = @_;
return 1 / (1 + exp(-$x));
}
# Tanh
sub tanh {
my ($x) = @_;
my $e2x = exp(2 * $x);
return ($e2x - 1) / ($e2x + 1);
}
# Softmax para array
sub softmax {
my ($array) = @_;
return undef unless ref($array) eq 'ARRAY';
# Encontrar máximo
my $max = $array->[0];
foreach my $val (@$array) {
$max = $val if $val > $max;
}
# Calcular exponenciais
my @exp_vals;
my $sum = 0;
foreach my $val (@$array) {
my $exp_val = exp($val - $max);
push @exp_vals, $exp_val;
$sum += $exp_val;
}
# Normalizar
return [map { $_ / $sum } @exp_vals];
}
# ELU (Exponential Linear Unit)
sub elu {
my ($x, $alpha) = @_;
$alpha //= 1.0;
return $x > 0 ? $x : $alpha * (exp($x) - 1);
}
# Swish (Google)
sub swish {
my ($x) = @_;
return $x * sigmoid($x);
}
# GELU (Gaussian Error Linear Unit)
sub gelu {
my ($x) = @_;
return 0.5 * $x * (1 + tanh(sqrt(2/3.141592653589793) *
($x + 0.044715 * $x**3)));
}
# Derivada da ReLU
sub relu_derivative {
my ($x) = @_;
return $x > 0 ? 1 : 0;
}
# Derivada da Sigmoid
sub sigmoid_derivative {
my ($x) = @_;
my $s = sigmoid($x);
return $s * (1 - $s);
}
1;
=head1 NAME
AI::ActivationFunctions - Activation functions for neural networks in Perl
=head1 VERSION
Version 0.01
=head1 ABSTRACT
Activation functions for neural networks in Perl
=head1 SYNOPSIS
use AI::ActivationFunctions qw(relu prelu sigmoid);
my $result = relu(-5); # returns 0
my $prelu_result = prelu(-2, 0.1); # returns -0.2
# Array version works too
my $array_result = relu([-2, -1, 0, 1, 2]); # returns [0, 0, 0, 1, 2]
=head1 DESCRIPTION
This module provides various activation functions commonly used in neural networks
and machine learning. It includes basic functions like ReLU and sigmoid, as well
as advanced functions like GELU and Swish.
=head1 FUNCTIONS
=head2 Basic Functions
=over 4
=item * relu($input)
Rectified Linear Unit. Returns max(0, $input).
=item * prelu($input, $alpha=0.01)
Parametric ReLU. Returns $input if $input > 0, else $alpha * $input.
=item * leaky_relu($input)
Leaky ReLU with alpha=0.01.
=item * sigmoid($input)
Sigmoid function: 1 / (1 + exp(-$input)).
=item * tanh($input)
Hyperbolic tangent function.
=item * softmax(\@array)
Softmax function for probability distributions.
=back
=head2 Advanced Functions
=over 4
=item * elu($input, $alpha=1.0)
Exponential Linear Unit.
=item * swish($input)
Swish activation function.
=item * gelu($input)
Gaussian Error Linear Unit (used in transformers like BERT, GPT).
=back
=head2 Derivatives
=over 4
=item * relu_derivative($input)
Derivative of ReLU for backpropagation.
=item * sigmoid_derivative($input)
Derivative of sigmoid for backpropagation.
=back
=head1 EXPORT
By default nothing is exported. You can export specific functions:
use AI::ActivationFunctions qw(relu prelu); # specific functions
use AI::ActivationFunctions qw(:basic); # basic functions
use AI::ActivationFunctions qw(:all); # all functions
=head1 SEE ALSO
=over 4
=item * L<PDL> - Perl Data Language for numerical computing
=item * L<AI::TensorFlow> - Perl interface to TensorFlow
=item * L<AI::MXNet> - Perl interface to Apache MXNet
=back
=head1 AUTHOR
Your Name <your.email@example.com>
=head1 LICENSE
This library is free software; you can redistribute it and/or modify
it under the same terms as Perl itself.
=cut
lib/AI/ActivationFunctions/PDL.pm view on Meta::CPAN
package AI::ActivationFunctions;
use strict;
use warnings;
use Exporter 'import';
our $VERSION = '0.01';
our @EXPORT_OK = qw(relu prelu leaky_relu sigmoid tanh softmax elu swish gelu relu_derivative sigmoid_derivative);
our %EXPORT_TAGS = (all => \@EXPORT_OK);
# ReLU - MUITO simples
sub relu {
my ($x) = @_;
return $x > 0 ? $x : 0;
}
# PReLU
sub prelu {
my ($x, $alpha) = @_;
$alpha //= 0.01;
return $x > 0 ? $x : $alpha * $x;
}
# Leaky ReLU
sub leaky_relu {
my ($x) = @_;
return $x > 0 ? $x : 0.01 * $x;
}
# Sigmoid
sub sigmoid {
my ($x) = @_;
return 1 / (1 + exp(-$x));
}
# Tanh - versão correta (sem CORE::tanh)
sub tanh {
my ($x) = @_;
my $e2x = exp(2 * $x);
return ($e2x - 1) / ($e2x + 1);
}
# Softmax para array
sub softmax {
my ($array) = @_;
# Encontrar máximo para estabilidade numérica
my $max = $array->[0];
foreach my $val (@$array) {
$max = $val if $val > $max;
}
# Calcular exponenciais
my @exp_vals;
my $sum = 0;
foreach my $val (@$array) {
my $exp_val = exp($val - $max);
push @exp_vals, $exp_val;
$sum += $exp_val;
}
# Normalizar
return [map { $_ / $sum } @exp_vals];
}
# ELU (Exponential Linear Unit)
sub elu {
my ($x, $alpha) = @_;
$alpha //= 1.0;
return $x > 0 ? $x : $alpha * (exp($x) - 1);
}
# Swish (Google)
sub swish {
my ($x) = @_;
return $x * sigmoid($x);
}
# GELU (Gaussian Error Linear Unit) - usado em BERT/GPT
sub gelu {
my ($x) = @_;
return 0.5 * $x * (1 + tanh(sqrt(2/3.141592653589793) *
($x + 0.044715 * $x**3)));
}
# Derivada da ReLU (para backpropagation)
sub relu_derivative {
my ($x) = @_;
return $x > 0 ? 1 : 0;
}
# Derivada da Sigmoid (para backpropagation)
sub sigmoid_derivative {
my ($x) = @_;
my $s = sigmoid($x);
return $s * (1 - $s);
}
1;
t/00-load.t view on Meta::CPAN
#!/usr/bin/perl
use strict;
use warnings;
use Test::More tests => 1;
BEGIN {
use_ok('AI::ActivationFunctions');
}
diag("Testing AI::ActivationFunctions $AI::ActivationFunctions::VERSION");
#!/usr/bin/perl
use strict;
use warnings;
use Test::More tests => 17; # Agora 17 testes!
# Carregar e importar
use AI::ActivationFunctions qw(relu prelu leaky_relu sigmoid tanh softmax);
# Teste 1: ReLU
is(relu(5), 5, 'relu(5) = 5');
is(relu(-5), 0, 'relu(-5) = 0');
is(relu(0), 0, 'relu(0) = 0');
# Teste 2: PReLU
is(prelu(2, 0.1), 2, 'prelu(2, 0.1) = 2');
is(sprintf("%.1f", prelu(-2, 0.1)), "-0.2", 'prelu(-2, 0.1) = -0.2');
is(sprintf("%.2f", prelu(-2)), "-0.02", 'prelu(-2) com alpha padrão = -0.02');
# Teste 3: Leaky ReLU
is(leaky_relu(2), 2, 'leaky_relu(2) = 2');
is(sprintf("%.2f", leaky_relu(-2)), "-0.02", 'leaky_relu(-2) = -0.02');
# Teste 4: Sigmoid
my $sigmoid0 = sigmoid(0);
ok($sigmoid0 > 0.49 && $sigmoid0 < 0.51, "sigmoid(0) â 0.5 ($sigmoid0)");
my $sigmoid1 = sigmoid(1);
ok($sigmoid1 > 0.73 && $sigmoid1 < 0.74, "sigmoid(1) â 0.731 ($sigmoid1)");
# Teste 5: Tanh
my $tanh0 = tanh(0);
ok(abs($tanh0) < 0.001, "tanh(0) â 0 ($tanh0)");
my $tanh1 = tanh(1);
ok($tanh1 > 0.76 && $tanh1 < 0.77, "tanh(1) â 0.761 ($tanh1)");
# Teste 6: Softmax
my $scores = [1, 2, 3];
my $probs = softmax($scores);
is(ref($probs), 'ARRAY', 'softmax retorna arrayref');
is(scalar @$probs, 3, 'softmax retorna 3 elementos');
my $sum = 0;
$sum += $_ for @$probs;
ok(abs($sum - 1) < 0.0001, "softmax soma â 1 ($sum)");
# Verifica ordem (maior score = maior probabilidade)
ok($probs->[2] > $probs->[1], "Prob[2] > Prob[1]");
ok($probs->[1] > $probs->[0], "Prob[1] > Prob[0]");
t/extended.t view on Meta::CPAN
#!/usr/bin/perl
use strict;
use warnings;
use Test::More tests => 8; # Plan primeiro
# Importar funções especÃficas
use AI::ActivationFunctions qw(elu swish gelu relu_derivative sigmoid_derivative);
# Teste ELU
my $elu_pos = elu(1, 1);
is(sprintf("%.1f", $elu_pos), "1.0", 'elu(1,1) = 1.0');
my $elu_neg = elu(-1, 1);
ok($elu_neg > -0.64 && $elu_neg < -0.63, "elu(-1,1) â -0.632 ($elu_neg)");
# Teste Swish
my $swish1 = swish(1);
ok($swish1 > 0.73 && $swish1 < 0.74, "swish(1) â 0.731 ($swish1)");
# Teste GELU
my $gelu0 = gelu(0);
ok(abs($gelu0) < 0.001, "gelu(0) â 0 ($gelu0)");
my $gelu1 = gelu(1);
ok($gelu1 > 0.84 && $gelu1 < 0.85, "gelu(1) â 0.841 ($gelu1)");
# Teste derivadas
is(relu_derivative(5), 1, 'relu_derivative(5) = 1');
is(relu_derivative(-5), 0, 'relu_derivative(-5) = 0');
my $sigmoid_deriv = sigmoid_derivative(0);
ok($sigmoid_deriv > 0.24 && $sigmoid_deriv < 0.26,
"sigmoid_derivative(0) â 0.25 ($sigmoid_deriv)");
#!/usr/bin/perl
use strict;
use warnings;
use Test::More;
# Pular se PDL não estiver instalado
BEGIN {
eval {
require PDL;
1;
} or do {
plan skip_all => 'PDL não está instalado';
exit 0;
};
}
# Continuar com os testes
plan tests => 1;
# Testar se podemos usar PDL com nosso módulo
use_ok('AI::ActivationFunctions');
# Nota: Nosso módulo atual não tem suporte a PDL,
# então apenas testamos o carregamento
done_testing();
test_all.sh view on Meta::CPAN
# 1. Crie a estrutura
#mkdir -p AI-ActivationFunctions-simple/lib/AI
#mkdir AI-ActivationFunctions-simple/t
#mkdir AI-ActivationFunctions-simple/examples
#cd AI-ActivationFunctions-simple
# 2. Copie os arquivos acima para seus respectivos diretórios
# 3. Teste
perl -Ilib -e "use AI::ActivationFunctions; print 'Carregou!\\n';"
# 4. Execute o teste rápido
perl test_quick.pl
# 5. Execute os testes formais
prove -Ilib t/
# 6. Execute o exemplo
perl examples/simple.pl
test_minimal.pl view on Meta::CPAN
#!/usr/bin/perl
use strict;
use warnings;
# Teste MÃNIMO - sem usar o módulo ainda
print "1. Testando funções básicas...\n";
# Defina as funções localmente primeiro
sub relu {
my ($x) = @_;
return $x > 0 ? $x : 0;
}
sub tanh_simple {
my ($x) = @_;
my $e2x = exp(2 * $x);
return ($e2x - 1) / ($e2x + 1);
}
# Teste
print " relu(5) = " . relu(5) . " (esperado: 5)\n";
print " relu(-3) = " . relu(-3) . " (esperado: 0)\n";
print " tanh(0) = " . tanh_simple(0) . " (esperado: ~0)\n";
print "\n2. Agora testando o módulo...\n";
# Tente carregar o módulo
eval {
# Adiciona lib ao @INC
unshift @INC, 'lib';
require AI::ActivationFunctions;
print " â Módulo carregado\n";
# Testa uma função
my $test = AI::ActivationFunctions::relu(10);
print " â relu(10) = $test\n";
1;
} or do {
print " â Erro: $@\n";
# Mostra o arquivo se houver erro
if (-f 'lib/AI/ActivationFunctions.pm') {
print "\nConteúdo do arquivo (primeiras 20 linhas):\n";
open my $fh, '<', 'lib/AI/ActivationFunctions.pm' or die $!;
my $linenum = 0;
while (<$fh>) {
$linenum++;
print "$linenum: $_";
last if $linenum >= 20;
}
close $fh;
}
};
print "\nFeito!\n";
test_quick.pl view on Meta::CPAN
#!/usr/bin/perl
use strict;
use warnings;
use FindBin qw($Bin);
use lib "$Bin/lib";
# Testar se o módulo carrega
eval {
require AI::ActivationFunctions;
AI::ActivationFunctions->import(qw(relu prelu sigmoid));
1;
} or die "Erro ao carregar módulo: $@";
print "=== Teste Rápido ===\n\n";
print "relu(5) = " . AI::ActivationFunctions::relu(5) . "\n";
print "relu(-3) = " . AI::ActivationFunctions::relu(-3) . "\n";
print "prelu(-2, 0.1) = " . AI::ActivationFunctions::prelu(-2, 0.1) . "\n";
print "sigmoid(0) = " . AI::ActivationFunctions::sigmoid(0) . "\n";
my $arr = [-2, -1, 0, 1, 2];
my $result = AI::ActivationFunctions::relu($arr);
print "relu([-2,-1,0,1,2]) = [" . join(", ", @$result) . "]\n";
print "\nOK!\n";