public function testInput()
 {
     foreach ($this->network->getNodes($this->filterInput) as $neuron) {
         $this->assertEquals($neuron->output(), 0);
     }
     $this->network->input([1])->output();
     foreach ($this->network->getNodes($this->filterInput) as $neuron) {
         $this->assertEquals($neuron->output(), 1);
     }
 }
 public function teachKit(array $kit, array $expectations, $error = 0.3, $maxIterations = 10000)
 {
     if (count($kit) != count($expectations)) {
         throw new Exception("Kit and expectations quantities must be equals");
     }
     for ($i = 0; $i < $maxIterations; $i++) {
         $trueResults = 0;
         foreach ($expectations as $key => $expectation) {
             $result = $this->perceptron->input($kit[$key])->output();
             $isTrueResults = (int) self::isTrueResult($result, $expectation, $error);
             if (!$isTrueResults) {
                 $this->teach($kit[$key], $expectation);
             } else {
                 $trueResults++;
             }
         }
         if ($trueResults == count($kit)) {
             return $i;
         }
     }
     return -1;
 }
<?php

use Neural\BackpropagationTeacher;
use Neural\MultilayerPerceptron;
require_once '../vendor/autoload.php';
$p = new MultilayerPerceptron([4, 8, 5]);
$p->generateSynapses();
$t = new BackpropagationTeacher($p);
$startTime = microtime(true);
$epochs = $t->teachKit([[0, 0, 0, 0], [0, 0, 0, 1], [0, 0, 1, 0], [0, 1, 0, 0], [1, 0, 0, 0], [1, 0, 0, 1], [0, 1, 1, 0], [1, 1, 0, 0], [0, 0, 1, 1], [1, 0, 1, 0], [0, 1, 0, 1], [0, 1, 1, 1], [1, 0, 1, 1], [1, 1, 0, 1], [1, 1, 1, 0], [1, 1, 1, 1]], [[1, 0, 0, 0, 0], [0, 1, 0, 0, 0], [0, 1, 0, 0, 0], [0, 1, 0, 0, 0], [0, 1, 0, 0, 0], [0, 0, 1, 0, 0], [0, 0, 1, 0, 0], [0, 0, 1, 0, 0], [0, 0, 1, 0, 0], [0, 0, 1, 0, 0], [0, 0, 1, 0, 0], [0, 0, 0, 1, 0], [0, 0, 0, 1, 0], [0, 0, 0, 1, 0], [0, 0, 0, 1, 0], [0, 0, 0, 0, 1]], 0.25) . PHP_EOL;
$endTime = microtime(true);
echo 'Memory peak usage: ' . round(memory_get_peak_usage() / 1024, 1) . PHP_EOL;
echo 'Seconds per epoch: ' . round(($endTime - $startTime) / $epochs, 3);
<?php

use Neural\Layer;
use Neural\MultilayerPerceptron;
use Neural\Node\Bias;
use Neural\Node\Input;
use Neural\Node\Neuron;
use Neural\Synapse;
require_once '../vendor/autoload.php';
$p = new MultilayerPerceptron([2, 2, 1]);
//Equivalent to:
$p = new MultilayerPerceptron();
$p->addLayer(new Layer())->toLastLayer()->addNode(new Input())->addNode(new Input())->addNode(new Bias());
$p->addLayer(new Layer())->toLastLayer()->addNode(new Neuron())->addNode(new Neuron())->addNode(new Bias());
$p->addLayer(new Layer())->toLastLayer()->addNode(new Neuron());
//Do not forget to add synapses:
$p->generateSynapses();
//Or you may direct the process:
$neuronFilter = function ($node) {
    return $node instanceof Neuron;
};
/** @var Neuron $secondLayerNeuron */
$secondLayerNeuron = iterator_to_array($p->getLayers()[1]->getNodes($neuronFilter))[0];
$input = iterator_to_array($p->getLayers()[0]->getNodes())[0];
$secondLayerNeuron->addSynapse(new Synapse($input));
//and so on...
<?php

use Neural\BackpropagationTeacher;
use Neural\MultilayerPerceptron;
require_once '../vendor/autoload.php';
$p = new MultilayerPerceptron([4, 4, 5]);
$p->generateSynapses();
$t = new BackpropagationTeacher($p);
echo $t->teachKit([[0, 0, 0, 0], [0, 0, 0, 1], [0, 0, 1, 0], [0, 1, 0, 0], [1, 0, 0, 0], [1, 0, 0, 1], [0, 1, 1, 0], [1, 1, 0, 0], [0, 0, 1, 1], [1, 0, 1, 0], [0, 1, 0, 1], [0, 1, 1, 1], [1, 0, 1, 1], [1, 1, 0, 1], [1, 1, 1, 0], [1, 1, 1, 1]], [[1, 0, 0, 0, 0], [0, 1, 0, 0, 0], [0, 1, 0, 0, 0], [0, 1, 0, 0, 0], [0, 1, 0, 0, 0], [0, 0, 1, 0, 0], [0, 0, 1, 0, 0], [0, 0, 1, 0, 0], [0, 0, 1, 0, 0], [0, 0, 1, 0, 0], [0, 0, 1, 0, 0], [0, 0, 0, 1, 0], [0, 0, 0, 1, 0], [0, 0, 0, 1, 0], [0, 0, 0, 1, 0], [0, 0, 0, 0, 1]], 0.25) . PHP_EOL;
$roundElements = function (&$r) {
    $r = round($r);
};
$test = [rand(0, 1), rand(0, 1), rand(0, 1), rand(0, 1)];
$result = $p->input($test)->output();
array_walk($result, $roundElements);
echo 'Result for [' . implode(', ', $test) . ']:' . PHP_EOL;
echo '[' . implode(', ', $result) . ']';
Beispiel #6
0
<?php

use Neural\BackpropagationTeacher;
use Neural\MultilayerPerceptron;
require_once '../vendor/autoload.php';
//Creation neural network, with 2 input-neurons, one hidden layer with 2 neurons and one output neuron:
$p = new MultilayerPerceptron([2, 2, 1]);
//You may add more hidden layers or neurons to layers: [2, 3, 2, 1]
$p->generateSynapses();
//automatically add synapses
$p->trace();
$t = new BackpropagationTeacher($p);
//Teacher with backpropagation algorithm
//Teach until it learns
$learningResult = $t->teachKit([[1, 0], [0, 1], [1, 1], [0, 0]], [[1], [1], [0], [0]], 0.3, 10000);
if ($learningResult != -1) {
    echo '1,0: ' . round($p->input([1, 0])->output()[0]) . PHP_EOL;
    echo '0,1: ' . round($p->input([0, 1])->output()[0]) . PHP_EOL;
    echo '0,0: ' . round($p->input([0, 0])->output()[0]) . PHP_EOL;
    echo '1,1: ' . round($p->input([1, 1])->output()[0]) . PHP_EOL;
}
$p->trace();