Last active
February 27, 2023 09:37
-
-
Save Muqsit/d1d1dd7b09e8b1cee281a1520ce06b0d to your computer and use it in GitHub Desktop.
Simple Neural Network implementation in pure PHP (translated from Python). This is a translation of my university classwork on Neural Networks. This can be live-tested on https://3v4l.org (using PHP 8.1.16).
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
<?php | |
declare(strict_types=1); | |
final class Matrix{ | |
/** | |
* Equivalent of numpy.random.random(size=(size_x, size_y)) | |
* | |
* @param int $size_x | |
* @param int $size_y | |
* @return list<list<float>> | |
*/ | |
public static function random(int $size_x, int $size_y) : self{ | |
$result = array_fill(0, $size_y, []); | |
for($i = 0; $i < $size_x; $i++){ | |
for($j = 0; $j < $size_y; $j++){ | |
$result[$i][$j] = mt_rand(0, 100_000_000) * 1e-8; | |
} | |
} | |
return new self($result, $size_x, $size_y); | |
} | |
/** | |
* @param list<list<float>> $values | |
* @return self | |
*/ | |
public static function from(array $values) : self{ | |
return new self($values, count($values), count($values[0]) /* TODO: validate column counts */); | |
} | |
public static function n(int|float $n, int $rows, int $columns) : self{ | |
$values = array_fill(0, $rows, array_fill(0, $columns, $n)); | |
return new self($values, $rows, $columns); | |
} | |
/** | |
* @param list<list<float>> $values | |
* @param int $rows | |
* @param int $columns | |
*/ | |
private function __construct( | |
public array $values, | |
public int $rows, | |
public int $columns | |
){} | |
public function broadcast(int $rows, int $columns) : self{ | |
$result = []; | |
for($i = 0; $i < $rows; $i++){ | |
$row = []; | |
for($j = 0; $j < $columns; $j++){ | |
$row[] = $this->values[$i % $this->rows][$j % $this->columns]; | |
} | |
$result[] = $row; | |
} | |
return new self($result, $rows, $columns); | |
} | |
/** | |
* @param Closure(float) : float $function | |
* @return self | |
*/ | |
public function apply(Closure $function) : self{ | |
$result = []; | |
for($i = 0; $i < $this->rows; $i++){ | |
for($j = 0; $j < $this->columns; $j++){ | |
$result[$i][$j] = $function($this->values[$i][$j]); | |
} | |
} | |
return new self($result, $this->rows, $this->columns); | |
} | |
/** | |
* @param self $x | |
* @param Closure(float, float) : float $function | |
* @return self | |
*/ | |
public function broadcastAndApply(self $x, Closure $function) : self{ | |
if($this->rows !== $x->rows || $this->columns !== $x->columns){ | |
if($this->columns === $x->rows){ | |
$b = $this->broadcast($x->rows, $this->columns); | |
$xb = $x->broadcast($x->rows, $this->columns); | |
return $b->broadcastAndApply($xb, $function); | |
} | |
throw new LogicException(); | |
} | |
$result = self::n(0, $this->rows, $this->columns); | |
for($i = 0; $i < $result->rows; $i++){ | |
for($j = 0; $j < $result->columns; $j++){ | |
$result->values[$i][$j] = $function($this->values[$i][$j], $x->values[$i][$j]); | |
} | |
} | |
return $result; | |
} | |
public function transpose() : self{ | |
$result = []; | |
for($i = 0; $i < $this->columns; $i++){ | |
for($j = 0; $j < $this->rows; $j++){ | |
$result[$i][$j] = $this->values[$j][$i]; | |
} | |
} | |
return new self($result, $this->columns, $this->rows); | |
} | |
public function dot(self $x) : self{ | |
$result = []; | |
if($this->columns !== $x->rows){ | |
throw new LogicException(); | |
} | |
$result = []; | |
for($i = 0; $i < $this->rows; $i++){ | |
$row = []; | |
for($j = 0; $j < $x->columns; $j++){ | |
$product = 0; | |
for($k = 0; $k < $this->columns; $k++){ | |
$product += $this->values[$i][$k] * $x->values[$k][$j]; | |
} | |
$row[] = $product; | |
} | |
$result[] = $row; | |
} | |
return new self($result, $this->rows, $x->columns); | |
} | |
public function add(self $x) : self{ | |
return $this->broadcastAndApply($x, fn($a, $b) => $a + $b); | |
} | |
public function subtract(self $x) : self{ | |
return $this->broadcastAndApply($x, fn($a, $b) => $a - $b); | |
} | |
public function multiply(self $x) : self{ | |
return $this->broadcastAndApply($x, fn($a, $b) => $a * $b); | |
} | |
public function print() : string{ | |
$values = []; | |
$space = 0; | |
for($i = 0; $i < $this->rows; $i++){ | |
for($j = 0; $j < $this->columns; $j++){ | |
$values[$i][$j] = sprintf("%.8f", $this->values[$i][$j]); | |
$space_this = strlen($values[$i][$j]); | |
$space = $space_this > $space ? $space_this : $space; | |
} | |
} | |
if(count($values) === 0){ | |
return "[]"; | |
} | |
$result = []; | |
for($i = 0; $i < $this->rows; $i++){ | |
$line = "["; | |
for($j = 0; $j < $this->columns; $j++){ | |
$value = $values[$i][$j]; | |
$line .= $value; | |
if($j !== $this->columns - 1){ | |
$line .= ","; | |
} | |
$line .= str_repeat(" ", $space - strlen($value)); | |
} | |
$line .= "]"; | |
$result[] = $line; | |
} | |
if(count($result) === 1){ | |
return $result[0]; | |
} | |
$print = "["; | |
$print .= array_shift($result) . PHP_EOL; | |
foreach($result as $line){ | |
$print .= " " . $line . PHP_EOL; | |
} | |
$print[strlen($print) - 1] = "]"; | |
return $print; | |
} | |
} | |
final class NeuralNetwork{ | |
public Matrix $synaptic_weights; | |
public function __construct(){ | |
// seed the random number will be generator, so it generates the same numbers | |
mt_srand(1); | |
$this->synaptic_weights = Matrix::random(3, 1)->apply(fn($x) => 2 * $x - 1); | |
} | |
// sigmoid function | |
private function sigmoid(Matrix $x) : Matrix{ | |
return $x->apply(fn($n) => 1 / (1 + exp(-$n))); | |
} | |
// create the curve "s" | |
private function sigmoid_derivative(Matrix $x) : Matrix{ | |
return $x->apply(fn($n) => $n * (1 - $n)); | |
} | |
// train the NN (adjust the weight and synaptic values) | |
public function train(Matrix $training_set_inputs, Matrix $training_set_outputs, int $number_of_training_iterations) : void{ | |
for($iteration = 0; $iteration < $number_of_training_iterations; $iteration++){ | |
// pass the process of cycle / loop into the form of neural network | |
$output = $this->think($training_set_inputs); | |
// calculate the error | |
// error = training_set_outputs - output | |
$error = $training_set_outputs->subtract($output); | |
$adjustment = $training_set_inputs->transpose()->dot($error->multiply($this->sigmoid_derivative($output))); | |
// adjust the weights | |
$this->synaptic_weights = $this->synaptic_weights->add($adjustment); | |
} | |
} | |
// the neural network processing the criteria of think rationally | |
public function think(Matrix $inputs) : Matrix{ | |
return $this->sigmoid($inputs->dot($this->synaptic_weights)); | |
} | |
} | |
$neural_network = new NeuralNetwork(); | |
// initialize the thinking logical to produce the single neuron in NN model | |
echo "Random starting synaptic weights:", PHP_EOL; | |
echo $neural_network->synaptic_weights->print(), PHP_EOL; | |
echo PHP_EOL; | |
// the training set | |
// input values (4 sets) and output values (1 set) | |
$training_set_inputs = Matrix::from([[0, 0, 1], [1, 1, 1], [1, 0, 1], [0, 1, 1]]); | |
$training_set_outputs = Matrix::from([[0, 1, 1, 0]])->transpose(); | |
// train neural_network | |
// do loop the process 10,000 times of process for setup the synapses values | |
$neural_network->train($training_set_inputs, $training_set_outputs, 10000); | |
echo "New synaptic weights after training:", PHP_EOL; | |
echo $neural_network->synaptic_weights->print(), PHP_EOL; | |
echo PHP_EOL; | |
// test neural_network | |
echo "Consider new situation [1, 0, 0] -> ?:", PHP_EOL; | |
echo $neural_network->think(Matrix::from([[1, 0, 0]]))->print(), PHP_EOL; | |
echo PHP_EOL; |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment
The corresponding Jupyter Notebook work:
Create NN model