public function testInput() { foreach ($this->network->getNodes($this->filterInput) as $neuron) { $this->assertEquals($neuron->output(), 0); } $this->network->input([1])->output(); foreach ($this->network->getNodes($this->filterInput) as $neuron) { $this->assertEquals($neuron->output(), 1); } }
public function teachKit(array $kit, array $expectations, $error = 0.3, $maxIterations = 10000) { if (count($kit) != count($expectations)) { throw new Exception("Kit and expectations quantities must be equals"); } for ($i = 0; $i < $maxIterations; $i++) { $trueResults = 0; foreach ($expectations as $key => $expectation) { $result = $this->perceptron->input($kit[$key])->output(); $isTrueResults = (int) self::isTrueResult($result, $expectation, $error); if (!$isTrueResults) { $this->teach($kit[$key], $expectation); } else { $trueResults++; } } if ($trueResults == count($kit)) { return $i; } } return -1; }
<?php use Neural\BackpropagationTeacher; use Neural\MultilayerPerceptron; require_once '../vendor/autoload.php'; $p = new MultilayerPerceptron([4, 4, 5]); $p->generateSynapses(); $t = new BackpropagationTeacher($p); echo $t->teachKit([[0, 0, 0, 0], [0, 0, 0, 1], [0, 0, 1, 0], [0, 1, 0, 0], [1, 0, 0, 0], [1, 0, 0, 1], [0, 1, 1, 0], [1, 1, 0, 0], [0, 0, 1, 1], [1, 0, 1, 0], [0, 1, 0, 1], [0, 1, 1, 1], [1, 0, 1, 1], [1, 1, 0, 1], [1, 1, 1, 0], [1, 1, 1, 1]], [[1, 0, 0, 0, 0], [0, 1, 0, 0, 0], [0, 1, 0, 0, 0], [0, 1, 0, 0, 0], [0, 1, 0, 0, 0], [0, 0, 1, 0, 0], [0, 0, 1, 0, 0], [0, 0, 1, 0, 0], [0, 0, 1, 0, 0], [0, 0, 1, 0, 0], [0, 0, 1, 0, 0], [0, 0, 0, 1, 0], [0, 0, 0, 1, 0], [0, 0, 0, 1, 0], [0, 0, 0, 1, 0], [0, 0, 0, 0, 1]], 0.25) . PHP_EOL; $roundElements = function (&$r) { $r = round($r); }; $test = [rand(0, 1), rand(0, 1), rand(0, 1), rand(0, 1)]; $result = $p->input($test)->output(); array_walk($result, $roundElements); echo 'Result for [' . implode(', ', $test) . ']:' . PHP_EOL; echo '[' . implode(', ', $result) . ']';
<?php use Neural\BackpropagationTeacher; use Neural\MultilayerPerceptron; require_once '../vendor/autoload.php'; //Creation neural network, with 2 input-neurons, one hidden layer with 2 neurons and one output neuron: $p = new MultilayerPerceptron([2, 2, 1]); //You may add more hidden layers or neurons to layers: [2, 3, 2, 1] $p->generateSynapses(); //automatically add synapses $p->trace(); $t = new BackpropagationTeacher($p); //Teacher with backpropagation algorithm //Teach until it learns $learningResult = $t->teachKit([[1, 0], [0, 1], [1, 1], [0, 0]], [[1], [1], [0], [0]], 0.3, 10000); if ($learningResult != -1) { echo '1,0: ' . round($p->input([1, 0])->output()[0]) . PHP_EOL; echo '0,1: ' . round($p->input([0, 1])->output()[0]) . PHP_EOL; echo '0,0: ' . round($p->input([0, 0])->output()[0]) . PHP_EOL; echo '1,1: ' . round($p->input([1, 1])->output()[0]) . PHP_EOL; } $p->trace();