Derivative of ReLU function --Derivative of activation function

Let's write the derivative of the ReLU function in Perl.

Let's write the derivative of the ReLU function in Perl. The derivative of the ReLU function is used for the reverse mispropagation method.

use strict;
use warnings;

sub relu_derivative {
   my ($x) = @_;
  
   my $relu_derivative = 1 * ($x> 0.0);
  
   return $relu_derivative;
}

my $value1 = 0.7;
my $relu_derivative1 = relu_derivative ($value1);

print "$relu_derivative1\n";

my $value2 = -0.4;
my $relu_derivative2 = relu_derivative ($value2);

print "$relu_derivative2\n";
th, $columns_length) = @_; my $values_length = $rows_length * $columns_length; my $mat = { rows_length => $rows_length, columns_length => $columns_length, values ​​=> [(0) x $values_length], };; return $mat;

}

# Find the product of matrices

sub mat_mul {

my ($mat1, $mat2) = @_; my $mat1_rows_length = $mat1->{rows_length}; my $mat1_columns_length = $mat1->{columns_length}; my $mat1_values ​​= $mat1->{values}; my $mat2_rows_length = $mat2->{rows_length}; my $mat2_columns_length = $mat2->{columns_length}; my $mat2_values ​​= $mat2->{values}; #Calculation of matrix product my $mat_out_values ​​= []; for (my $row = 0; $row <$mat1_rows_length; $row ++) { for (my $col = 0; $col <$mat2_columns_length; $col ++) { for (my $incol = 0; $incol <$mat1_columns_length; $incol ++) { $mat_out_values->[$row + $col * $mat1_rows_length] + = $mat1_values->[$row + $incol * $mat1_rows_length] * $mat2_values->[$incol + $col * $mat2_rows_length]; } } } my $mat_out = { rows_length => $mat1_rows_length, columns_length => $mat2_columns_length, values ​​=> $mat_out_values, };; return $mat_out;

}

#Creating a column-first matrix

sub mat_new {

my ($values, $rows_length, $columns_length) = @_; my $mat = { rows_length => $rows_length, columns_length => $columns_length, values ​​=> $values, };; return $mat;

}

# Transpose matrix (replace matrix)

sub mat_transpose {

my ($mat) = @_; my $rows_length = $mat->{rows_length}; my $columns_length = $mat->{columns_length}; my $length = $rows_length * $columns_length; my $mat_trans = {}; $mat_trans->{rows_length} = $columns_length; $mat_trans->{columns_length} = $rows_length; my $values ​​= $mat->{values}; my $mat_trans_values ​​= []; for (my $row_index = 0; $row_index <$rows_length; $row_index ++) { for (my $column_index = 0; $column_index <$columns_length; $column_index ++) { $mat_trans_values->[$row_index * $columns_length + $column_index] = $values->[$column_index * $rows_length + $row_index]; } } $mat_trans->{values} = $mat_trans_values; return $mat_trans;

}

sub array_div_scalar {

my ($nums, $scalar_num) = @_; my $nums_out = []; for (my $i = 0; $i <@$nums; $i ++) { $nums_out->[$i] = $nums->[$i] / $scalar_num; } return $nums_out;

}

#softmax function

sub softmax {

my ($nums) = @_; my $exp_total = 0; for (my $i = 0; $i <@$nums; $i ++) { $exp_total + = exp($nums->[$i]); } my $nums_out = []; for (my $i = 0; $i <@$nums; $i ++) { $nums_out->[$i] = exp($nums->[$i]) / $exp_total; } return $nums_out;

}

#softmax Derivative of cross entropy error

sub softmax_cross_entropy_cost_derivative {

my ($softmax_outputs, $desired_outputs) = @_; my $length = @$softmax_outputs; my $softmax_cross_entropy_cost_derivative = []; for (my $i = 0; $i <@$softmax_outputs; $i ++) { $softmax_cross_entropy_cost_derivative->[$i] = ($softmax_outputs->[$i]-$desired_outputs->[$i]) / $length; } return $softmax_cross_entropy_cost_derivative;

}

Associated Information