NeuralNetwork
  • Class
  • Tree

Classes

  • NetworkGraph
  • NeuralNetwork
  1 <?php
  2 /**
  3  * <b>Multi-layer Neural Network in PHP</b>
  4  * 
  5  * Loosely based on source code by {@link http://www.philbrierley.com Phil Brierley},
  6  * that was translated into PHP by 'dspink' in sep 2005
  7  * 
  8  * Algorithm was obtained from the excellent introductory book 
  9  * "{@link http://www.amazon.com/link/dp/0321204662 Artificial Intelligence - a guide to intelligent systems}"
 10  * by Michael Negnevitsky (ISBN 0-201-71159-1)
 11  *
 12  * <b>Example: learning the 'XOR'-function</b>
 13  * <code>
 14  * // Create a new neural network with 3 input neurons,
 15  * // 4 hidden neurons, and 1 output neuron
 16  * $n = new NeuralNetwork(3, 4, 1);
 17  * $n->setVerbose(false);
 18  * 
 19  * // Add test-data to the network. In this case,
 20  * // we want the network to learn the 'XOR'-function
 21  * $n->addTestData(array (-1, -1, 1), array (-1));
 22  * $n->addTestData(array (-1,  1, 1), array ( 1));
 23  * $n->addTestData(array ( 1, -1, 1), array ( 1));
 24  * $n->addTestData(array ( 1,  1, 1), array (-1));
 25  * 
 26  * // we try training the network for at most $max times
 27  * $max = 3;
 28  * 
 29  * // train the network in max 1000 epochs, with a max squared error of 0.01
 30  * while (!($success = $n->train(1000, 0.01)) && ++$i<$max) {
 31  *  echo "Round $i: No success...<hr />";
 32  * }
 33  * 
 34  * // print a message if the network was succesfully trained
 35  * if ($success) {
 36  *     $epochs = $n->getEpoch();
 37  *     echo "Success in $epochs training rounds!<hr />";
 38  * }
 39  * 
 40  * // in any case, we print the output of the neural network
 41  * echo "<h2>End result</h2>";
 42  * for ($i = 0; $i < count($n->trainInputs); $i ++) {
 43  *     $output = $n->calculate($n->trainInputs[$i]);
 44  *     echo "<br />Testset $i; ";
 45  *     echo "expected output = (".implode(", ", $n->trainOutput[$i]).") ";
 46  *     echo "output from neural network = (".implode(", ", $output).")\n";
 47  * }
 48  * </code>
 49  * 
 50  * The resulting output could for example be something along the following lines:
 51  * 
 52  * <code>
 53  * Success in 719 training rounds!
 54  * Testset 0; expected output = (-1) output from neural network = (-0.986415991978)
 55  * Testset 1; expected output = (1) output from neural network = (0.992121412998)
 56  * Testset 2; expected output = (1) output from neural network = (0.992469534962)
 57  * Testset 3; expected output = (-1) output from neural network = (-0.990224120384)
 58  * </code>
 59  * 
 60  * ...which indicates the network has learned the task. 
 61  *  
 62  * @author E. Akerboom
 63  * @author {@link http://www.tremani.nl/ Tremani}, {@link http://maps.google.com/maps?f=q&hl=en&q=delft%2C+the+netherlands&ie=UTF8&t=k&om=1&ll=53.014783%2C4.921875&spn=36.882665%2C110.566406&z=4 Delft}, The Netherlands
 64  * @since feb 2007
 65  * @version 1.1
 66  * @license http://opensource.org/licenses/bsd-license.php BSD License
 67  */
 68 class NeuralNetwork {
 69     protected $nodeCount = array ();
 70     protected $nodeValue = array ();
 71     protected $nodeThreshold = array ();
 72     protected $edgeWeight = array ();
 73     protected $learningRate = array (0.1);
 74     protected $layerCount = 0;
 75     protected $previousWeightCorrection = array ();
 76     protected $momentum = 0.8;
 77     protected $isVerbose = true;
 78     protected $weightsInitialized = false;
 79 
 80     public $trainInputs = array ();
 81     public $trainOutput = array ();
 82     public $trainDataID = array ();
 83 
 84     public $controlInputs = array ();
 85     public $controlOutput = array ();
 86     public $controlDataID = array ();
 87 
 88     protected $epoch;
 89     protected $errorTrainingset;
 90     protected $errorControlset;
 91     protected $success;
 92 
 93     /**
 94      * Creates a neural network.
 95      * 
 96      * Example:
 97      * <code>
 98      * // create a network with 4 input nodes, 10 hidden nodes, and 4 output nodes
 99      * $n = new NeuralNetwork(4, 10, 4);
100      * 
101      * // create a network with 4 input nodes, 1 hidden layer with 10 nodes, 
102      * // another hidden layer with 10 nodes, and 4 output nodes
103      * $n = new NeuralNetwork(4, 10, 10, 4); 
104      * 
105      * // alternative syntax
106      * $n = new NeuralNetwork(array(4, 10, 10, 4));
107      * </code>
108      * 
109      * @param array $nodeCount The number of nodes in the consecutive layers.
110      */
111     public function __construct($nodeCount) {
112         require_once("NetworkGraph.php"); // Import NeworkGraph
113 
114         if (!is_array($nodeCount)) {
115             $nodeCount = func_get_args();
116         }
117         $this->nodeCount = $nodeCount;
118 
119         // store the number of layers
120         $this->layerCount = count($this->nodeCount);
121     }
122 
123     /**
124      * Exports the neural network
125      * 
126      * @returns array
127      */
128     public function export()
129     {
130         return array(
131             'layerCount' => $this->layerCount,
132             'nodeCount' => $this->nodeCount,
133             'edgeWeight' => $this->edgeWeight,
134             'nodeThreshold' => $this->nodeThreshold,
135             'learningRate' => $this->learningrate,
136             'momentum' => $this->momentum,
137             'isVerbose' => $this->isVerbose,
138             'weightsInitialized' => $this->weightsInitialized,
139         );
140     }
141 
142     /**
143      * Import a neural network
144      * @param array $nn_array An array of the neural network parameters
145      */
146     public function import($nn_array)
147     {
148         foreach ($nn_array as $key => $value)
149         {
150             $this->$key = $value;
151         }
152         return $this;
153     }
154 
155     /**
156      * Sets the learning rate between the different layers. 
157      *
158      * @param array $learningRate An array containing the learning rates [range 0.0 - 1.0]. 
159      * The size of this array is 'layerCount - 1'. You might also provide a single number. If that is
160      * the case, then this will be the learning rate for the whole network.
161      */
162     public function setLearningRate($learningRate) {
163         if (!is_array($learningRate)) {
164             $learningRate = func_get_args();
165         }
166 
167         $this->learningRate = $learningRate;
168     }
169 
170     /**
171      * Gets the learning rate for a specific layer
172      * 
173      * @param int $layer The layer to obtain the learning rate for
174      * @return float The learning rate for that layer
175      */
176     public function getLearningRate($layer) {
177         if (array_key_exists($layer, $this->learningRate)) {
178             return $this->learningRate[$layer];
179         }
180         return $this->learningRate[0];
181     }
182 
183     /**
184      * Sets the 'momentum' for the learning algorithm. The momentum should 
185      * accelerate the learning process and help avoid local minima.
186      * 
187      * @param float $momentum The momentum. Must be between 0.0 and 1.0; Usually between 0.5 and 0.9
188      */
189     public function setMomentum($momentum) {
190         $this->momentum = $momentum;
191     }
192 
193     /**
194      * Gets the momentum.
195      * 
196      * @return float The momentum
197      */
198     public function getMomentum() {
199         return $this->momentum;
200     }
201 
202     /**
203      * Calculate the output of the neural network for a given input vector
204      * 
205      * @param array $input The vector to calculate
206      * @return mixed The output of the network
207      */
208     public function calculate($input) {
209 
210         // put the input vector on the input nodes
211         foreach ($input as $index => $value) {
212             $this->nodeValue[0][$index] = $value;
213         }
214 
215         // iterate the hidden layers
216         for ($layer = 1; $layer < $this->layerCount; $layer ++) {
217 
218             $prev_layer = $layer -1;
219 
220             // iterate each node in this layer
221             for ($node = 0; $node < ($this->nodeCount[$layer]); $node ++) {
222                 $node_value = 0.0;
223 
224                 // each node in the previous layer has a connection to this node
225                 // on basis of this, calculate this node's value
226                 for ($prev_node = 0; $prev_node < ($this->nodeCount[$prev_layer]); $prev_node ++) {
227                     $inputnode_value = $this->nodeValue[$prev_layer][$prev_node];
228                     $edge_weight = $this->edgeWeight[$prev_layer][$prev_node][$node];
229 
230                     $node_value = $node_value + ($inputnode_value * $edge_weight);
231                 }
232 
233                 // apply the threshold
234                 $node_value = $node_value - $this->nodeThreshold[$layer][$node];
235 
236                 // apply the activation function
237                 $node_value = $this->activation($node_value);
238 
239                 // remember the outcome
240                 $this->nodeValue[$layer][$node] = $node_value;
241             }
242         }
243 
244         // return the values of the last layer (the output layer)
245         return $this->nodeValue[$this->layerCount - 1];
246     }
247 
248     /**
249      * Implements the standard (default) activation function for backpropagation networks, 
250      * the 'tanh' activation function.
251      * 
252      * @param float $value The preliminary output to apply this function to
253      * @return float The final output of the node
254      */
255     protected function activation($value) {
256         return tanh($value);
257         // return (1.0 / (1.0 + exp(- $value)));
258     }
259 
260     /**
261      * Implements the derivative of the activation function. By default, this is the 
262      * inverse of the 'tanh' activation function: 1.0 - tanh($value)*tanh($value);
263      * 
264      * @param float $value 'X'
265      * @return $float 
266      */
267     protected function derivativeActivation($value) {
268         $tanh = tanh($value);
269         return 1.0 - $tanh * $tanh;
270         //return $value * (1.0 - $value);
271     }
272 
273     /**
274      * Calculates the parameters for the NetworkGraph and returns it
275      * 
276      * @return NetworkGraph NetworkGraph class
277      */
278     public function networkgraph() {
279         $intNumberInputs = $this->nodeCount[0];
280         $intNumberHiddenLayers = $this->layerCount - 2;
281         $intNumberNeuronsOfHiddenLayer = $this->nodeCount[1];
282         $intNumberOfOutputs = $this->nodeCount[count($this->nodeCount) - 1];
283         return new NetworkGraph($intNumberInputs, $intNumberHiddenLayers, $intNumberNeuronsOfHiddenLayer, $intNumberOfOutputs);
284     }
285 
286     /**
287      * Add a test vector and its output
288      * 
289      * @param array $input An input vector
290      * @param array $output The corresponding output
291      * @param int $id (optional) An identifier for this piece of data
292      */
293     public function addTestData($input, $output, $id = null) {
294         $index = count($this->trainInputs);
295         foreach ($input as $node => $value) {
296             $this->trainInputs[$index][$node] = $value;
297         }
298 
299         foreach ($output as $node => $value) {
300             $this->trainOutput[$index][$node] = $value;
301         }
302 
303         $this->trainDataID[$index] = $id;
304     }
305 
306     /**
307      * Returns the identifiers of the data used to train the network (if available)
308      * 
309      * @return array An array of identifiers
310      */
311     public function getTestDataIDs() {
312         return $this->trainDataID;
313     }
314 
315     /**
316      * Add a set of control data to the network. 
317      * 
318      * This set of data is used to prevent 'overlearning' of the network. The 
319      * network will stop training if the results obtained for the control data 
320      * are worsening.
321      * 
322      * The data added as control data is not used for training.
323      * 
324      * @param array $input An input vector
325      * @param array $output The corresponding output
326      * @param int $id (optional) An identifier for this piece of data
327      */
328     public function addControlData($input, $output, $id = null) {
329         $index = count($this->controlInputs);
330         foreach ($input as $node => $value) {
331             $this->controlInputs[$index][$node] = $value;
332         }
333 
334         foreach ($output as $node => $value) {
335             $this->controlOutput[$index][$node] = $value;
336         }
337 
338         $this->controlDataID[$index] = $id;
339     }
340 
341     /**
342      * Returns the identifiers of the control data used during the training 
343      * of the network (if available)
344      * 
345      * @return array An array of identifiers
346      */
347     public function getControlDataIDs() {
348         return $this->controlDataID;
349     }
350 
351     /**
352      * Shows the current weights and thresholds
353      * 
354      * @param boolean $force Force the output, even if the network is {@link setVerbose() not verbose}. 
355      */
356     public function showWeights($force = false) {
357         if ($this->isVerbose() || $force) {
358             echo "<hr>";
359             echo "<br />Weights: <pre>".print_r($this->edgeWeight, true)."</pre>";
360             echo "<br />Thresholds: <pre>".print_r($this->nodeThreshold, true)."</pre>";
361         }
362     }
363 
364     /**
365      * Determines if the neural network displays status and error messages. By default, it does.
366      * 
367      * @param boolean $isVerbose 'true' if you want to display status and error messages, 'false' if you don't
368      */
369     public function setVerbose($isVerbose) {
370         $this->isVerbose = $isVerbose;
371     }
372 
373     /**
374      * Returns whether or not the network displays status and error messages.
375      * 
376      * @return boolean 'true' if status and error messages are displayed, 'false' otherwise
377      */
378     public function isVerbose() {
379         return $this->isVerbose;
380     }
381 
382     /**
383      * Loads a neural network from a file saved by the 'save()' function. Clears 
384      * the training and control data added so far.
385      * 
386      * @param string $filename The filename to load the network from
387      * @return boolean 'true' on success, 'false' otherwise
388      */
389     public function load($filename) {
390         if (file_exists($filename)) {
391             $data = parse_ini_file($filename);
392             if (array_key_exists("edges", $data) && array_key_exists("thresholds", $data)) {
393                 // make sure all standard preparations performed
394                 $this->initWeights();
395 
396                 // load data from file
397                 $this->edgeWeight = unserialize($data['edges']);
398                 $this->nodeThreshold = unserialize($data['thresholds']);
399 
400                 $this->weightsInitialized = true;
401 
402                 // load IDs of training and control set
403                 if (array_key_exists("training_data", $data) && array_key_exists("control_data", $data)) {
404 
405                     // load the IDs
406                     $this->trainDataID = unserialize($data['training_data']);
407                     $this->controlDataID = unserialize($data['control_data']);
408 
409                     // if we do not reset the training and control data here, then we end up
410                     // with a bunch of IDs that do not refer to the actual data we're training
411                     // the network with.
412                     $this->controlInputs = array ();
413                     $this->controlOutput = array ();
414 
415                     $this->trainInputs = array ();
416                     $this->trainOutput = array ();
417                 }
418 
419                 return true;
420             }
421         }
422 
423         return false;
424     }
425 
426     /**
427      * Saves a neural network to a file
428      * 
429      * @param string $filename The filename to save the neural network to
430      * @return boolean 'true' on success, 'false' otherwise
431      */
432     public function save($filename) {
433         $f = fopen($filename, "w");
434         if ($f) {
435             fwrite($f, "[weights]");
436             fwrite($f, "\r\nedges = \"".serialize($this->edgeWeight)."\"");
437             fwrite($f, "\r\nthresholds = \"".serialize($this->nodeThreshold)."\"");
438             fwrite($f, "\r\n");
439             fwrite($f, "[identifiers]");
440             fwrite($f, "\r\ntraining_data = \"".serialize($this->trainDataID)."\"");
441             fwrite($f, "\r\ncontrol_data = \"".serialize($this->controlDataID)."\"");
442             fclose($f);
443 
444             return true;
445         }
446 
447         return false;
448     }
449     
450     /**
451      * Resets the state of the neural network, so it is ready for a new 
452      * round of training.
453      */
454     public function clear() {
455         $this->initWeights();
456     }
457 
458     /**
459      * Start the training process
460      * 
461      * @param int $maxEpochs The maximum number of epochs
462      * @param float $maxError The maximum squared error in the training data
463      * @return bool 'true' if the training was successful, 'false' otherwise
464      */
465     public function train($maxEpochs = 500, $maxError = 0.01) {
466 
467         if (!$this->weightsInitialized) {
468             $this->initWeights();
469         }
470 
471         if ($this->isVerbose()) {
472             echo "<table>";
473             echo "<tr><th>#</th><th>error(trainingdata)</th><th>error(controldata)</th><th>slope(error(controldata))</th></tr>";
474         }
475 
476         $epoch = 0;
477         $errorControlSet = array ();
478         $avgErrorControlSet = array ();
479         $sample_count = 10;
480         do {
481 //                        echo "<tr><td colspan=10><b>epoch $epoch</b></td></tr>";
482             for ($i = 0; $i < count($this->trainInputs); $i ++) {
483                 // select a training pattern at random
484                 $index = mt_rand(0, count($this->trainInputs) - 1);
485 
486                 // determine the input, and the desired output
487                 $input = $this->trainInputs[$index];
488                 $desired_output = $this->trainOutput[$index];
489 
490                 // calculate the actual output
491                 $output = $this->calculate($input);
492 
493 //                              echo "<tr><td></td><td>Training set $i</td><td>input = (" . implode(", ", $input) . ")</td>";
494 //              echo "<td>desired = (" . implode(", ", $desired_output) . ")</td>";
495 //              echo "<td>output = (" . implode(", ", $output) .")</td></tr>";
496 
497                 // change network weights
498                 $this->backpropagate($output, $desired_output);
499             }
500 
501             // buy some time
502             set_time_limit(300);
503 
504             //display the overall network error after each epoch
505             $squaredError = $this->squaredErrorEpoch();
506             if ($epoch % 2 == 0) {
507                 $squaredErrorControlSet = $this->squaredErrorControlSet();
508                 $errorControlSet[] = $squaredErrorControlSet;
509 
510                 if (count($errorControlSet) > $sample_count) {
511                     $avgErrorControlSet[] = array_sum(array_slice($errorControlSet, -$sample_count)) / $sample_count;
512                 }
513 
514                 list ($slope, $offset) = $this->fitLine($avgErrorControlSet);
515                 $controlset_msg = $squaredErrorControlSet;
516             } else {
517                 $controlset_msg = "";
518             }
519 
520             if ($this->isVerbose()) {
521                 echo "<tr><td><b>$epoch</b></td><td>$squaredError</td><td>$controlset_msg";
522                 echo "<script type='text/javascript'>window.scrollBy(0,100);</script>";
523                 echo "</td><td>$slope</td></tr>";
524                 echo "</td></tr>";
525 
526                 flush();
527                 ob_flush();
528             }
529 
530             // conditions for a 'successful' stop:
531             // 1. the squared error is now lower than the provided maximum error
532             $stop_1 = $squaredError <= $maxError || $squaredErrorControlSet <= $maxError;
533 
534             // conditions for an 'unsuccessful' stop
535             // 1. the maximum number of epochs has been reached
536             $stop_2 = $epoch ++ > $maxEpochs;
537 
538             // 2. the network's performance on the control data is getting worse
539             $stop_3 = $slope > 0;
540 
541         } while (!$stop_1 && !$stop_2 && !$stop_3);
542 
543         $this->setEpoch($epoch);
544         $this->setErrorTrainingSet($squaredError);
545         $this->setErrorControlSet($squaredErrorControlSet);
546         $this->setTrainingSuccessful($stop_1);
547 
548         if ($this->isVerbose()) {
549             echo "</table>";
550         }
551 
552         return $stop_1;
553     }
554 
555     /**
556      * After training, this function is used to store the number of epochs the network 
557      * needed for training the network. An epoch is defined as the number of times 
558      * the complete trainingset is used for training.
559      * 
560      * @param int $epoch 
561      */
562     private function setEpoch($epoch) {
563         $this->epoch = $epoch;
564     }
565 
566     /**
567      * Gets the number of epochs the network needed for training.
568      * 
569      * @return int The number of epochs.
570      */
571     public function getEpoch() {
572         return $this->epoch;
573     }
574 
575     /**
576      * After training, this function is used to store the squared error between the
577      * desired output and the obtained output of the training data.
578      * 
579      * @param float $error The squared error of the training data
580      */
581     private function setErrorTrainingSet($error) {
582         $this->errorTrainingset = $error;
583     }
584 
585     /**
586      * Gets the squared error between the desired output and the obtained output of 
587      * the training data.
588      * 
589      * @return float The squared error of the training data
590      */
591     public function getErrorTrainingSet() {
592         return $this->errorTrainingset;
593     }
594 
595     /**
596      * After training, this function is used to store the squared error between the
597      * desired output and the obtained output of the control data.
598      * 
599      * @param float $error The squared error of the control data
600      */
601     private function setErrorControlSet($error) {
602         $this->errorControlset = $error;
603     }
604 
605     /**
606      * Gets the squared error between the desired output and the obtained output of 
607      * the control data.
608      * 
609      * @return float The squared error of the control data
610      */
611     public function getErrorControlSet() {
612         return $this->errorControlset;
613     }
614 
615     /**
616      * After training, this function is used to store whether or not the training
617      * was successful.
618      * 
619      * @param bool $success 'true' if the training was successful, 'false' otherwise
620      */
621     private function setTrainingSuccessful($success) {
622         $this->success = $success;
623     }
624 
625     /**
626      * Determines if the training was successful.
627      * 
628      * @return bool 'true' if the training was successful, 'false' otherwise
629      */
630     public function getTrainingSuccessful() {
631         return $this->success;
632     }
633 
634     /**
635      * Finds the least square fitting line for the given data. 
636      * 
637      * This function is used to determine if the network is overtraining itself. If 
638      * the line through the controlset's most recent squared errors is going 'up', 
639      * then it's time to stop training.
640      * 
641      * @param array $data The points to fit a line to. The keys of this array represent 
642      *                    the 'x'-value of the point, the corresponding value is the 
643      *                    'y'-value of the point.
644      * @return array An array containing, respectively, the slope and the offset of the fitted line.
645      */
646     private function fitLine($data) {
647         // based on 
648         //    http://mathworld.wolfram.com/LeastSquaresFitting.html
649 
650         $n = count($data);
651 
652         if ($n > 1) {
653             $sum_y = 0;
654             $sum_x = 0;
655             $sum_x2 = 0;
656             $sum_xy = 0;
657             foreach ($data as $x => $y) {
658                 $sum_x += $x;
659                 $sum_y += $y;
660                 $sum_x2 += $x * $x;
661                 $sum_xy += $x * $y;
662             }
663 
664             // implementation of formula (12)
665             $offset = ($sum_y * $sum_x2 - $sum_x * $sum_xy) / ($n * $sum_x2 - $sum_x * $sum_x);
666 
667             // implementation of formula (13)
668             $slope = ($n * $sum_xy - $sum_x * $sum_y) / ($n * $sum_x2 - $sum_x * $sum_x);
669 
670             return array ($slope, $offset);
671         } else {
672             return array (0.0, 0.0);
673         }
674     }
675 
676     /**
677      * Gets a random weight between [-0.25 .. 0.25]. Used to initialize the network.
678      * 
679      * @return float A random weight
680      */
681     private function getRandomWeight($layer) {
682         return ((mt_rand(0, 1000) / 1000) - 0.5) / 2;
683     }
684 
685     /**
686      * Randomise the weights in the neural network
687      */
688     function initWeights() {
689         // assign a random value to each edge between the layers, and randomise each threshold
690         //
691         // 1. start at layer '1' (so skip the input layer)
692         for ($layer = 1; $layer < $this->layerCount; $layer ++) {
693 
694             $prev_layer = $layer -1;
695 
696             // 2. in this layer, walk each node
697             for ($node = 0; $node < $this->nodeCount[$layer]; $node ++) {
698 
699                 // 3. randomise this node's threshold
700                 $this->nodeThreshold[$layer][$node] = $this->getRandomWeight($layer);
701 
702                 // 4. this node is connected to each node of the previous layer
703                 for ($prev_index = 0; $prev_index < $this->nodeCount[$prev_layer]; $prev_index ++) {
704 
705                     // 5. this is the 'edge' that needs to be reset / initialised
706                     $this->edgeWeight[$prev_layer][$prev_index][$node] = $this->getRandomWeight($prev_layer);
707 
708                     // 6. initialize the 'previous weightcorrection' at 0.0
709                     $this->previousWeightCorrection[$prev_layer][$prev_index] = 0.0;
710                 }
711             }
712         }
713     }
714 
715     /**
716     * Performs the backpropagation algorithm. This changes the weights and thresholds of the network.
717     * 
718     * @param array $output The output obtained by the network
719     * @param array $desired_output The desired output
720     */
721     private function backpropagate($output, $desired_output) {
722 
723         $errorgradient = array ();
724         $outputlayer = $this->layerCount - 1;
725 
726         $momentum = $this->getMomentum();
727 
728         // Propagate the difference between output and desired output through the layers.
729         for ($layer = $this->layerCount - 1; $layer > 0; $layer --) {
730             for ($node = 0; $node < $this->nodeCount[$layer]; $node ++) {
731 
732                 // step 1: determine errorgradient
733                 if ($layer == $outputlayer) {
734                     // for the output layer:
735                     // 1a. calculate error between desired output and actual output
736                     $error = $desired_output[$node] - $output[$node];
737 
738                     // 1b. calculate errorgradient
739                     $errorgradient[$layer][$node] = $this->derivativeActivation($output[$node]) * $error;
740                 } else {
741                     // for hidden layers:
742                     // 1a. sum the product of edgeWeight and errorgradient of the 'next' layer
743                     $next_layer = $layer +1;
744 
745                     $productsum = 0;
746                     for ($next_index = 0; $next_index < ($this->nodeCount[$next_layer]); $next_index ++) {
747                         $_errorgradient = $errorgradient[$next_layer][$next_index];
748                         $_edgeWeight = $this->edgeWeight[$layer][$node][$next_index];
749 
750                         $productsum = $productsum + $_errorgradient * $_edgeWeight;
751                     }
752 
753                     // 1b. calculate errorgradient
754                     $nodeValue = $this->nodeValue[$layer][$node];
755                     $errorgradient[$layer][$node] = $this->derivativeActivation($nodeValue) * $productsum;
756                 }
757 
758                 // step 2: use the errorgradient to determine a weight correction for each node
759                 $prev_layer = $layer -1;
760                 $learning_rate = $this->getlearningRate($prev_layer);
761 
762                 for ($prev_index = 0; $prev_index < ($this->nodeCount[$prev_layer]); $prev_index ++) {
763 
764                     // 2a. obtain nodeValue, edgeWeight and learning rate
765                     $nodeValue = $this->nodeValue[$prev_layer][$prev_index];
766                     $edgeWeight = $this->edgeWeight[$prev_layer][$prev_index][$node];
767 
768                     // 2b. calculate weight correction
769                     $weight_correction = $learning_rate * $nodeValue * $errorgradient[$layer][$node];
770 
771                     // 2c. retrieve previous weight correction
772                     $prev_weightcorrection = @$this->previousWeightCorrection[$layer][$node];
773 
774                     // 2d. combine those ('momentum learning') to a new weight
775                     $new_weight = $edgeWeight + $weight_correction + $momentum * $prev_weightcorrection;
776 
777                     // 2e. assign the new weight to this edge
778                     $this->edgeWeight[$prev_layer][$prev_index][$node] = $new_weight;
779 
780                     // 2f. remember this weightcorrection
781                     $this->previousWeightCorrection[$layer][$node] = $weight_correction;
782                 }
783 
784                 // step 3: use the errorgradient to determine threshold correction
785                 $threshold_correction = $learning_rate * -1 * $errorgradient[$layer][$node];
786                 $new_threshold = $this->nodeThreshold[$layer][$node] + $threshold_correction;
787 
788                 $this->nodeThreshold[$layer][$node] = $new_threshold;
789             }
790         }
791     }
792 
793     /**
794      * Calculate the root-mean-squared error of the output, given the
795      * trainingdata.
796      * 
797      * @return float The root-mean-squared error of the output
798      */
799     private function squaredErrorEpoch() {
800         $RMSerror = 0.0;
801         for ($i = 0; $i < count($this->trainInputs); $i ++) {
802             $RMSerror += $this->squaredError($this->trainInputs[$i], $this->trainOutput[$i]);
803         }
804         $RMSerror = $RMSerror / count($this->trainInputs);
805 
806         return sqrt($RMSerror);
807     }
808 
809     /**
810      * Calculate the root-mean-squared error of the output, given the
811      * controldata.
812      * 
813      * @return float The root-mean-squared error of the output
814      */
815     private function squaredErrorControlSet() {
816 
817         if (count($this->controlInputs) == 0) {
818             return 1.0;
819         }
820 
821         $RMSerror = 0.0;
822         for ($i = 0; $i < count($this->controlInputs); $i ++) {
823             $RMSerror += $this->squaredError($this->controlInputs[$i], $this->controlOutput[$i]);
824         }
825         $RMSerror = $RMSerror / count($this->controlInputs);
826 
827         return sqrt($RMSerror);
828     }
829 
830     /**
831      * Calculate the root-mean-squared error of the output, given the
832      * desired output.
833      * 
834      * @param array $input The input to test
835      * @param array $desired_output The desired output
836      * @return float The root-mean-squared error of the output compared to the desired output
837      */
838     private function squaredError($input, $desired_output) {
839         $output = $this->calculate($input);
840 
841         $RMSerror = 0.0;
842         foreach ($output as $node => $value) {
843             //calculate the error
844             $error = $output[$node] - $desired_output[$node];
845 
846             $RMSerror = $RMSerror + ($error * $error);
847         }
848 
849         return $RMSerror;
850     }
851 }
852 ?>
853 
NeuralNetwork API documentation generated by ApiGen