AI-ActivationFunctions

 view release on metacpan or  search on metacpan

AI-ActivationFunctions-0.01/AI-ActivationFunctions-0.01/Makefile.PL  view on Meta::CPAN

    AUTHOR        => 'Ulisses Manzo Castello <umcastello@gmail.com>',
    LICENSE       => 'perl_5',
    PREREQ_PM     => {
        'Test::More' => 0,
        'Exporter'   => 0,
        # PDL é recomendado, mas não obrigatório
        'PDL'        => 0,  # em Build.PL seria 'recommends'
    },
    META_MERGE    => {
        'meta-spec' => { version => 2 },
        resources   => {
            repository => {
                type => 'git',
                url  => 'https://github.com/seuusuario/ai-activationfunctions.git',
                web  => 'https://github.com/seuusuario/ai-activationfunctions',
            },
        },
    },
);

AI-ActivationFunctions-0.01/AI-ActivationFunctions-0.01/examples/simple.pl  view on Meta::CPAN

printf("   elu(-1, 1)     = %.4f\n", elu(-1, 1));
printf("   swish(1)       = %.4f\n", swish(1));
printf("   gelu(1)        = %.4f\n", gelu(1));

print "\n3. Derivadas (para backpropagation):\n";
printf("   relu_derivative(5)     = %d\n", relu_derivative(5));
printf("   relu_derivative(-5)    = %d\n", relu_derivative(-5));
printf("   sigmoid_derivative(0)  = %.4f\n", sigmoid_derivative(0));

print "\n4. Softmax (distribuição):\n";
my $scores = [1.0, 2.0, 3.0];
my $probs = softmax($scores);
printf("   Entrada:  [%.1f, %.1f, %.1f]\n", @$scores);
printf("   Saída:    [%.4f, %.4f, %.4f]\n", @$probs);
printf("   Soma:     %.4f\n", $probs->[0] + $probs->[1] + $probs->[2]);

print "\n=== Fim da Demonstração ===\n";

AI-ActivationFunctions-0.01/AI-ActivationFunctions-0.01/lib/AI/ActivationFunctions.pm  view on Meta::CPAN

Version 0.01

=head1 ABSTRACT

Activation functions for neural networks in Perl

=head1 SYNOPSIS

    use AI::ActivationFunctions qw(relu prelu sigmoid);

    my $result = relu(-5);  # returns 0
    my $prelu_result = prelu(-2, 0.1);  # returns -0.2

    # Array version works too
    my $array_result = relu([-2, -1, 0, 1, 2]);  # returns [0, 0, 0, 1, 2]

=head1 DESCRIPTION

This module provides various activation functions commonly used in neural networks
and machine learning. It includes basic functions like ReLU and sigmoid, as well
as advanced functions like GELU and Swish.

=head1 FUNCTIONS

=head2 Basic Functions

AI-ActivationFunctions-0.01/AI-ActivationFunctions-0.01/t/basic.t  view on Meta::CPAN

ok($sigmoid1 > 0.73 && $sigmoid1 < 0.74, "sigmoid(1) ≈ 0.731 ($sigmoid1)");

# Teste 5: Tanh
my $tanh0 = tanh(0);
ok(abs($tanh0) < 0.001, "tanh(0) ≈ 0 ($tanh0)");

my $tanh1 = tanh(1);
ok($tanh1 > 0.76 && $tanh1 < 0.77, "tanh(1) ≈ 0.761 ($tanh1)");

# Teste 6: Softmax
my $scores = [1, 2, 3];
my $probs = softmax($scores);

is(ref($probs), 'ARRAY', 'softmax retorna arrayref');
is(scalar @$probs, 3, 'softmax retorna 3 elementos');

my $sum = 0;
$sum += $_ for @$probs;
ok(abs($sum - 1) < 0.0001, "softmax soma ≈ 1 ($sum)");

# Verifica ordem (maior score = maior probabilidade)
ok($probs->[2] > $probs->[1], "Prob[2] > Prob[1]");

META.json  view on Meta::CPAN

   },
   "name" : "AI-ActivationFunctions",
   "no_index" : {
      "directory" : [
         "t",
         "inc"
      ]
   },
   "prereqs" : {
      "build" : {
         "requires" : {
            "ExtUtils::MakeMaker" : "0"
         }
      },
      "configure" : {
         "requires" : {
            "ExtUtils::MakeMaker" : "0"
         }
      },
      "runtime" : {
         "requires" : {
            "Exporter" : "0",
            "PDL" : "0",
            "Test::More" : "0"
         }
      }
   },
   "release_status" : "stable",
   "resources" : {
      "repository" : {
         "type" : "git",
         "url" : "https://github.com/seuusuario/ai-activationfunctions.git",
         "web" : "https://github.com/seuusuario/ai-activationfunctions"
      }
   },
   "version" : "0.01",
   "x_serialization_backend" : "JSON::PP version 4.16"
}

META.yml  view on Meta::CPAN

---
abstract: 'Activation functions for neural networks in Perl'
author:
  - 'Ulisses Manzo Castello <umcastello@gmail.com>'
build_requires:
  ExtUtils::MakeMaker: '0'
configure_requires:
  ExtUtils::MakeMaker: '0'
dynamic_config: 1
generated_by: 'ExtUtils::MakeMaker version 7.70, CPAN::Meta::Converter version 2.150010'
license: perl
meta-spec:
  url: http://module-build.sourceforge.net/META-spec-v1.4.html
  version: '1.4'
name: AI-ActivationFunctions
no_index:
  directory:
    - t
    - inc
requires:
  Exporter: '0'
  PDL: '0'
  Test::More: '0'
resources:
  repository: https://github.com/seuusuario/ai-activationfunctions.git
version: '0.01'
x_serialization_backend: 'CPAN::Meta::YAML version 0.018'

Makefile.PL  view on Meta::CPAN

    AUTHOR        => 'Ulisses Manzo Castello <umcastello@gmail.com>',
    LICENSE       => 'perl_5',
    PREREQ_PM     => {
        'Test::More' => 0,
        'Exporter'   => 0,
        # PDL é recomendado, mas não obrigatório
        'PDL'        => 0,  # em Build.PL seria 'recommends'
    },
    META_MERGE    => {
        'meta-spec' => { version => 2 },
        resources   => {
            repository => {
                type => 'git',
                url  => 'https://github.com/seuusuario/ai-activationfunctions.git',
                web  => 'https://github.com/seuusuario/ai-activationfunctions',
            },
        },
    },
);

README.md  view on Meta::CPAN


```bash
cpanm AI::ActivationFunctions


## Usage

use AI::ActivationFunctions qw(relu prelu sigmoid softmax);

# Basic usage
my $result = relu(-5);  # returns 0

# With custom parameter
my $prelu = prelu(-2, 0.1);  # returns -0.2

# Probability distribution
my $probs = softmax([1, 2, 3]);

# Functions

relu($x) - Rectified Linear Unit

create_structure.sh  view on Meta::CPAN

    PREREQ_PM     => {
        'Test::More' => 0,
        'Exporter'   => 0,
        'Carp'       => 0,
    },
    TEST_REQUIRES => {
        'Test::More' => 0,
    },
    META_MERGE    => {
        'meta-spec' => { version => 2 },
        resources   => {
            repository => {
                type => 'git',
                url  => 'https://github.com/test/ai-activationfunctions.git',
            },
        },
    },
);
EOF

echo "Estrutura criada! Agora copie os arquivos .pm e .t para os diretórios."

examples/simple.pl  view on Meta::CPAN

printf("   elu(-1, 1)     = %.4f\n", elu(-1, 1));
printf("   swish(1)       = %.4f\n", swish(1));
printf("   gelu(1)        = %.4f\n", gelu(1));

print "\n3. Derivadas (para backpropagation):\n";
printf("   relu_derivative(5)     = %d\n", relu_derivative(5));
printf("   relu_derivative(-5)    = %d\n", relu_derivative(-5));
printf("   sigmoid_derivative(0)  = %.4f\n", sigmoid_derivative(0));

print "\n4. Softmax (distribuição):\n";
my $scores = [1.0, 2.0, 3.0];
my $probs = softmax($scores);
printf("   Entrada:  [%.1f, %.1f, %.1f]\n", @$scores);
printf("   Saída:    [%.4f, %.4f, %.4f]\n", @$probs);
printf("   Soma:     %.4f\n", $probs->[0] + $probs->[1] + $probs->[2]);

print "\n=== Fim da Demonstração ===\n";

lib/AI/ActivationFunctions.pm  view on Meta::CPAN

Version 0.01

=head1 ABSTRACT

Activation functions for neural networks in Perl

=head1 SYNOPSIS

    use AI::ActivationFunctions qw(relu prelu sigmoid);

    my $result = relu(-5);  # returns 0
    my $prelu_result = prelu(-2, 0.1);  # returns -0.2

    # Array version works too
    my $array_result = relu([-2, -1, 0, 1, 2]);  # returns [0, 0, 0, 1, 2]

=head1 DESCRIPTION

This module provides various activation functions commonly used in neural networks
and machine learning. It includes basic functions like ReLU and sigmoid, as well
as advanced functions like GELU and Swish.

=head1 FUNCTIONS

=head2 Basic Functions

t/basic.t  view on Meta::CPAN

ok($sigmoid1 > 0.73 && $sigmoid1 < 0.74, "sigmoid(1) ≈ 0.731 ($sigmoid1)");

# Teste 5: Tanh
my $tanh0 = tanh(0);
ok(abs($tanh0) < 0.001, "tanh(0) ≈ 0 ($tanh0)");

my $tanh1 = tanh(1);
ok($tanh1 > 0.76 && $tanh1 < 0.77, "tanh(1) ≈ 0.761 ($tanh1)");

# Teste 6: Softmax
my $scores = [1, 2, 3];
my $probs = softmax($scores);

is(ref($probs), 'ARRAY', 'softmax retorna arrayref');
is(scalar @$probs, 3, 'softmax retorna 3 elementos');

my $sum = 0;
$sum += $_ for @$probs;
ok(abs($sum - 1) < 0.0001, "softmax soma ≈ 1 ($sum)");

# Verifica ordem (maior score = maior probabilidade)
ok($probs->[2] > $probs->[1], "Prob[2] > Prob[1]");

test_all.sh  view on Meta::CPAN

# 1. Crie a estrutura
#mkdir -p AI-ActivationFunctions-simple/lib/AI
#mkdir AI-ActivationFunctions-simple/t
#mkdir AI-ActivationFunctions-simple/examples
#cd AI-ActivationFunctions-simple

# 2. Copie os arquivos acima para seus respectivos diretórios

# 3. Teste
perl -Ilib -e "use AI::ActivationFunctions; print 'Carregou!\\n';"

# 4. Execute o teste rápido
perl test_quick.pl

# 5. Execute os testes formais
prove -Ilib t/

test_quick.pl  view on Meta::CPAN

} or die "Erro ao carregar módulo: $@";

print "=== Teste Rápido ===\n\n";

print "relu(5) = " . AI::ActivationFunctions::relu(5) . "\n";
print "relu(-3) = " . AI::ActivationFunctions::relu(-3) . "\n";
print "prelu(-2, 0.1) = " . AI::ActivationFunctions::prelu(-2, 0.1) . "\n";
print "sigmoid(0) = " . AI::ActivationFunctions::sigmoid(0) . "\n";

my $arr = [-2, -1, 0, 1, 2];
my $result = AI::ActivationFunctions::relu($arr);
print "relu([-2,-1,0,1,2]) = [" . join(", ", @$result) . "]\n";

print "\nOK!\n";



( run in 0.812 second using v1.01-cache-2.11-cpan-0a27d97929d )