PHP code example of golivehost / brain

1. Go to this page and download the library: Download golivehost/brain library. Choose the download type require.

2. Extract the ZIP file and open the index.php.

3. Add this code to the index.php.
    
        
<?php
require_once('vendor/autoload.php');

/* Start to develop here. Best regards https://php-download.com/ */

    

golivehost / brain example snippets



GoLiveHost\Brain\NeuralNetworks\NeuralNetwork;

// Training data for XOR problem
$trainingData = [
    ['input' => [0, 0], 'output' => [0]],
    ['input' => [0, 1], 'output' => [1]],
    ['input' => [1, 0], 'output' => [1]],
    ['input' => [1, 1], 'output' => [0]]
];

// Create and configure neural network
$net = new NeuralNetwork([
    'hiddenLayers' => [3],
    'activation' => 'sigmoid',
    'learningRate' => 0.3,
    'iterations' => 20000
]);

// Train the network
$result = $net->train($trainingData);
echo "Training completed in {$result['iterations']} iterations with error {$result['error']}\n";

// Test the network
foreach ($trainingData as $data) {
    $output = $net->run($data['input']);
    echo "Input: [" . implode(", ", $data['input']) . "] => Output: " . round($output[0]) . "\n";
}

// Save the model
$json = $net->toJSON();
file_put_contents('xor-model.json', $json);

use GoLiveHost\Brain\Brain;

// Create a feedforward neural network
$nn = Brain::neuralNetwork([
    'hiddenLayers' => [20, 10],
    'activation' => 'relu',
    'dropout' => 0.2
]);

// Create an LSTM network
$lstm = Brain::lstm([
    'hiddenLayers' => [50, 25],
    'learningRate' => 0.01,
    'praxis' => 'adam'
]);

// Create a Liquid State Machine
$lsm = Brain::liquidStateMachine([
    'reservoirSize' => 100,
    'connectivity' => 0.1,
    'spectralRadius' => 0.9
]);

// Load a model from JSON
$model = Brain::fromJSON($json);

use GoLiveHost\Brain\NeuralNetworks\LSTM;

// Prepare sequence data
$sequences = [
    [
        'input' => [[0.1], [0.2], [0.3], [0.4], [0.5]],
        'output' => [[0.6], [0.7], [0.8]]
    ],
    // ... more sequences
];

// Create and train LSTM
$lstm = new LSTM([
    'inputSize' => 1,
    'hiddenLayers' => [50, 25],
    'outputSize' => 1,
    'learningRate' => 0.01,
    'iterations' => 1000,
    'batchSize' => 32,
    'praxis' => 'adam'
]);

$result = $lstm->train($sequences);

// Generate predictions
$testSequence = [[0.5], [0.6], [0.7], [0.8], [0.9]];
$predictions = $lstm->run($testSequence);

// Generate future values
$generated = $lstm->generate($testSequence, 10);

use GoLiveHost\Brain\GRU;

$gru = new GRU([
    'inputSize' => 10,
    'hiddenSize' => 20,
    'outputSize' => 5,
    'learningRate' => 0.01,
    'activation' => 'tanh'
]);

$result = $gru->train($sequences);
$output = $gru->run($inputSequence);

use GoLiveHost\Brain\NeuralNetworks\LiquidStateMachine;

$lsm = new LiquidStateMachine([
    'inputSize' => 3,
    'reservoirSize' => 100,
    'outputSize' => 2,
    'connectivity' => 0.1,
    'spectralRadius' => 0.9,
    'leakingRate' => 0.3,
    'regularization' => 0.001
]);

$result = $lsm->train($sequences);
$outputs = $lsm->run($inputSequence);

use GoLiveHost\Brain\Utilities\CrossValidation;

// K-fold cross-validation
$results = CrossValidation::kFold($model, $data, 5);
echo "Average accuracy: " . $results['averageMetrics']['accuracy'] . "\n";

// Stratified k-fold for classification
$results = CrossValidation::stratifiedKFold(
    $model, 
    $data, 
    5, 
    function($item) { return $item['output'][0]; }
);

// Train-test split
$split = CrossValidation::trainTestSplit($data, 0.2);
$model->train($split['train']);
$testResults = CrossValidation::evaluateModel($model, $split['test']);

use GoLiveHost\Brain\Utilities\ModelCheckpoint;

$checkpoint = new ModelCheckpoint([
    'directory' => './checkpoints',
    'filePrefix' => 'my_model',
    'saveFrequency' => 100,
    'saveOnlyBest' => true,
    'monitorMetric' => 'error',
    'maxCheckpoints' => 5
]);

// During training loop
for ($epoch = 0; $epoch < 1000; $epoch++) {
    // ... training code ...
    
    $metrics = ['error' => $error, 'accuracy' => $accuracy];
    $checkpoint->save($model, $metrics, $epoch);
}

// Load the best model
$bestModel = $checkpoint->loadBest(NeuralNetwork::class);

use GoLiveHost\Brain\Layers\BatchNormalization;

$batchNorm = new BatchNormalization(100, [
    'epsilon' => 1e-5,
    'momentum' => 0.9
]);

// During training
$batchNorm->setTraining(true);
$normalized = $batchNorm->forward($batchData);

// During inference
$batchNorm->setTraining(false);
$output = $batchNorm->forward($input);

use GoLiveHost\Brain\Optimizers\Adam;
use GoLiveHost\Brain\Optimizers\RMSprop;
use GoLiveHost\Brain\Optimizers\AdaGrad;

// Adam optimizer
$adam = new Adam([
    'learningRate' => 0.001,
    'beta1' => 0.9,
    'beta2' => 0.999,
    'epsilon' => 1e-8
]);

// RMSprop optimizer
$rmsprop = new RMSprop([
    'learningRate' => 0.01,
    'decay' => 0.9,
    'epsilon' => 1e-8
]);

// AdaGrad optimizer
$adagrad = new AdaGrad([
    'learningRate' => 0.01,
    'epsilon' => 1e-8
]);

use GoLiveHost\Brain\Utilities\Normalizer;
use GoLiveHost\Brain\Utilities\DataFormatter;

// Normalization
$normalizer = new Normalizer();
$normalizer->fit($trainingData);
$normalizedData = $normalizer->transform($trainingData);

// Format data for sequences
$formatter = new DataFormatter();
$formattedSequences = $formatter->formatSequences($sequences);

use GoLiveHost\Brain\Utilities\ModelValidator;

// Validate neural network options
$validatedOptions = ModelValidator::validateNeuralNetworkOptions($options);

// Validate training data
ModelValidator::validateTrainingData($data);

// Validate sequence data
ModelValidator::validateTrainingData($sequences, true);

use GoLiveHost\Brain\Utilities\Matrix;

// Matrix multiplication
$result = Matrix::multiply($matrixA, $matrixB);

// Matrix addition
$sum = Matrix::add($matrixA, $matrixB);

// Matrix transpose
$transposed = Matrix::transpose($matrix);

// Element-wise multiplication
$hadamard = Matrix::elementMultiply($matrixA, $matrixB);

// Matrix inverse
$inverse = Matrix::inverse($matrix);

// Determinant
$det = Matrix::determinant($matrix);

use GoLiveHost\Brain\Utilities\Tensor;

// Apply function to each element
$doubled = Tensor::map($tensor, fn($x) => $x * 2);

// Tensor operations
$sum = Tensor::sum($tensor);
$mean = Tensor::mean($tensor);
$max = Tensor::max($tensor);
$min = Tensor::min($tensor);

// Reshape tensor
$reshaped = Tensor::reshape($tensor, [10, 10]);

// Export to standalone PHP class
$phpCode = $neuralNetwork->exportToPhp('MyNeuralNetwork');
file_put_contents('MyNeuralNetwork.php', $phpCode);

// Use the exported model

use GoLiveHost\Brain\Exceptions\BrainException;

try {
    $model = new NeuralNetwork($options);
    $result = $model->train($data);
} catch (BrainException $e) {
    echo "Brain Error: " . $e->getMessage() . "\n";
}