PHP code example of numpower / autograd

1. Go to this page and download the library: Download numpower/autograd library. Choose the download type require.

2. Extract the ZIP file and open the index.php.

3. Add this code to the index.php.
    
        
<?php
require_once('vendor/autoload.php');

/* Start to develop here. Best regards https://php-download.com/ */

    

numpower / autograd example snippets

 
use NumPower\Tensor;

$a = new Tensor([[1, 2], [3, 4]], c = (($a + $b) / $b)->sum();

$c->backward();

$dc_Da = $a->grad();
$dc_Db = $b->grad();

echo "out: \n";
echo $c;
echo "\ndc_Da: \n";
echo $dc_Da;
echo "dc_Db: \n";
echo $dc_Db;

out: 
5.4619045257568
dc_Da: 
[[0.2, 0.166667]
 [0.142857, 0.125]]
dc_Db: 
[[-0.04, -0.0555556]
 [-0.0612245, -0.0625]]

$a = new Tensor([[1, 2], [3, 4]],  on is performed on GPU)

$c->backward(); // Back propagation is performed on GPU
 
use NDArray as nd;
use NumPower\Tensor;
use NumPower\NeuralNetwork\Activations as activation;
use NumPower\NeuralNetwork\Losses as loss;

class SimpleModel
{
    public Tensor $weights_hidden_layer;
    public Tensor $weights_output_layer;
    public Tensor $hidden_bias;
    public Tensor $output_bias;
    private float $learningRate;

    public function __construct(int $inputDim = 2,
                                int $outputDim = 1,
                                int $hiddenSize = 16,
                                float $learningRate = 0.01
    )
    {
        $this->learningRate = $learningRate;
        // Initialize hidden layer weights
        $this->weights_hidden_layer = new Tensor(
            nd::uniform([$inputDim, $hiddenSize], -0.5, 0.5),
            name: 'weights_hidden_layer',
            r) + $this->hidden_bias;
        $x = activation::ReLU($x); // ReLU Activation

        // Forward pass - Output Layer
        $x = $x->matmul($this->weights_output_layer) + $this->output_bias;
        $x = activation::sigmoid($x); // Sigmoid Activation

        // Binary Cross Entropy Loss
        $loss = loss::BinaryCrossEntropy($x, $y, name: 'loss');
        return [$x, $loss];
    }

    public function backward(Tensor $loss)
    {
        // Trigger autograd
        $loss->backward();

        // SGD (Optimizer) - Update Hidden Layer weights and bias
        $dw_dLoss = $this->weights_hidden_layer->grad();

        $this->weights_hidden_layer -= ($dw_dLoss * $this->learningRate);
        $this->weights_hidden_layer->resetGradients();

        $this->hidden_bias -= ($this->hidden_bias->grad() * $this->learningRate);
        $this->hidden_bias->resetGradients();

        // SGD (Optimizer) - Update Output Layer weights and bias
        $db_dLoss = $this->weights_output_layer->grad();

        $this->weights_output_layer -= ($db_dLoss * $this->learningRate);
        $this->weights_output_layer->resetGradients();

        $this->output_bias -= $this->output_bias->grad() * $this->learningRate;
        $this->output_bias->resetGradients();
    }
}
 
$num_epochs = 4000;
$x = new Tensor(nd::array([[0, 0], [1, 0], [1, 1], [0, 1]]), name: 'x');
$y = new Tensor(nd::array([[0], [1], [0], [1]]), name: 'y');

$model = new SimpleModel();

for ($current_epoch = 0; $current_epoch < $num_epochs; $current_epoch++) {
    // Forward Pass
    [$prediction, $loss] = $model->forward($x, $y);
    // Backward Pass
    $model->backward($loss);
    echo "\n Epoch ($current_epoch): ".$loss->getArray();
}

echo "\nPredicted:\n";
print_r($model->forward($x, $y)[0]->toArray());