AI-ML

 view release on metacpan or  search on metacpan

MANIFEST  view on Meta::CPAN

t/08-gradient-checking.t_
t/W1_grad_check.csv
t/W2_grad_check.csv
t/W3_grad_check.csv
t/b1_grad_check.csv
t/b2_grad_check.csv
t/b3_grad_check.csv
t/dW1_grad_check.csv
t/dW2_grad_check.csv
t/dW3_grad_check.csv
t/dataset_adjetivos.csv
t/db1_grad_check.csv
t/db2_grad_check.csv
t/db3_grad_check.csv
t/logistic.csv
t/pred-nn.csv
t/w1.csv
t/w2.csv
t/x.csv
t/x_grad_check.csv
t/y.csv

lib/AI/ML/LinearRegression.pm  view on Meta::CPAN

            $lambda = $self->{reg};
            for my $i (1..$iters){

                $cost = sum( ( ($x x $thetas) - $y) ** 2) / (2 * $m) + $lambda * sum( $thetas->slice(x0 => 1) );
                push @cost_values, $cost->get_element(0,0) if defined $self->{cost};

                $grads =  ($x->T x (($x x $thetas)-$y)) / $m;

                $reg_thetas = ($lambda / $m) * $thetas;
                # do not regularize theta 0
                $reg_thetas->set_element(0,0,0);
                $thetas = $thetas - $alpha * ( $grads + $reg_thetas );
            }
        }
        else{
            for my $i (1..$iters)
            {
        				if( exists $self->{cost} ) {
                            $cost = sum( ( ($x x $thetas) - $y) ** 2) / (2 * $m);
                            push @cost_values,$cost->get_element(0,0);
                        }

lib/AI/ML/LogisticRegression.pm  view on Meta::CPAN

    if ( exists $self->{reg} ) {
				$lambda = $self->{reg};
    		for my $i (1 .. $iters) {
        		$h = sigmoid($x x $thetas);
        		$reg = ($lambda / (2 * $m)) * sum( $thetas->slice(x0 => 1) ** 2 );
        		$cost = (-1 / $m) * sum($y * log($h) + (1 - $y) * log(1-$h)) + $reg;

        		push @cost_values, $cost->get_element(0,0) if exists $self->{cost};

        		$reg_thetas = ($lambda / $m) * $thetas;
        		$reg_thetas->set_element(0,0,0);

        		$grad = ($x->T x ($h - $y)) / $m;

        		$thetas = $thetas - $alpha * ( $grad + $reg_thetas );
      	}
    }
    else {
      	for my $i (1 .. $iters) {
        	$h = sigmoid($x x $thetas);
        	$cost = (-1 / $m)*sum(($y * log($h)) + ((1-$y) * log(1-$h)));

lib/AI/ML/NeuralNetwork.pm  view on Meta::CPAN

    #    print STDERR $params->get_element($j,0)."\n";
    #}
  
    #my $epsilon = 1e-7;
    #my $J_plus = Math::Lapack::Matrix->zeros($n,1);
    #my $J_minus = Math::Lapack::Matrix->zeros($n,1);
    #my $grad_aprox = Math::Lapack::Matrix->zeros($n,1);
    
    #for my $i (0..$n-1){
    #    $theta_plus = $params;
    #    $theta_plus->set_element($i,0) = $theta_plus->get_element($i,0) + $epsilon;       
    #    $J_plus($i,0) = _forward_prop_n($x, $y, _vector_to_hash($theta_plus, $n, %dims));
    #    
    #    $theta_minus = $params;
    #    $theta_minus->set_element($i,0) = $theta_minus->get_element($i,0) - $epsilon;       
    #    $J_minus($i,0) = _forward_prop_n($x, $y, _vector_to_hash($theta_minus, $n));

    #    $grad_aprox($i,0) = ($J_plus($i,0) - $j_minus($i,0)) / (2*$epsilon);
    #}

} 
    

=head2 _vector_to_hash



( run in 0.614 second using v1.01-cache-2.11-cpan-49f99fa48dc )