diff --git a/DIRECTORY.md b/DIRECTORY.md
index 30aa2c7f..18c67420 100644
--- a/DIRECTORY.md
+++ b/DIRECTORY.md
@@ -27,12 +27,21 @@
* [Bstnode](./DataStructures/BinarySearchTree/BSTNode.php)
* [Bstree](./DataStructures/BinarySearchTree/BSTree.php)
* [Duplicatekeyexception](./DataStructures/BinarySearchTree/DuplicateKeyException.php)
+ * Comparebinarytree
+ * [Binarytreenode](./DataStructures/CompareBinaryTree/BinaryTreeNode.php)
+ * [Comparebinarytree](./DataStructures/CompareBinaryTree/CompareBinaryTree.php)
* Disjointsets
* [Disjointset](./DataStructures/DisjointSets/DisjointSet.php)
* [Disjointsetnode](./DataStructures/DisjointSets/DisjointSetNode.php)
* [Doublylinkedlist](./DataStructures/DoublyLinkedList.php)
+ * Invertbinarytree
+ * [Binarytree](./DataStructures/InvertBinaryTree/BinaryTree.php)
+ * [Invertbinarytree](./DataStructures/InvertBinaryTree/InvertBinaryTree.php)
* [Node](./DataStructures/Node.php)
* [Queue](./DataStructures/Queue.php)
+ * Reverselinkedlist
+ * [Linkedlistitem](./DataStructures/ReverseLinkedList/LinkedListItem.php)
+ * [Reverselinkedlist](./DataStructures/ReverseLinkedList/ReverseLinkedList.php)
* Segmenttree
* [Segmenttree](./DataStructures/SegmentTree/SegmentTree.php)
* [Segmenttreenode](./DataStructures/SegmentTree/SegmentTreeNode.php)
@@ -50,6 +59,8 @@
* [Bellmanford](./Graphs/BellmanFord.php)
* [Breadthfirstsearch](./Graphs/BreadthFirstSearch.php)
* [Depthfirstsearch](./Graphs/DepthFirstSearch.php)
+ * [Dijkstras](./Graphs/Dijkstras.php)
+ * [Graphedge](./Graphs/GraphEdge.php)
## Maths
* [Absolutemax](./Maths/AbsoluteMax.php)
@@ -86,6 +97,10 @@
* [Problem8](./Maths/ProjectEuler/Problem8.php)
* [Problem9](./Maths/ProjectEuler/Problem9.php)
+## Neuralnetworks
+ * Perceptronclassifier
+ * [Neuralnetworkperceptronclassifier](./NeuralNetworks/PerceptronClassifier/NeuralNetworkPerceptronClassifier.php)
+
## Searches
* [Binarysearch](./Searches/BinarySearch.php)
* [Exponentialsearch](./Searches/ExponentialSearch.php)
@@ -139,9 +154,12 @@
* Datastructures
* [Avltreetest](./tests/DataStructures/AVLTreeTest.php)
* [Bstreetest](./tests/DataStructures/BSTreeTest.php)
+ * [Comparebinarytreetest](./tests/DataStructures/CompareBinaryTreeTest.php)
* [Disjointsettest](./tests/DataStructures/DisjointSetTest.php)
* [Doublylinkedlisttest](./tests/DataStructures/DoublyLinkedListTest.php)
+ * [Invertbinarytreetest](./tests/DataStructures/InvertBinaryTreeTest.php)
* [Queuetest](./tests/DataStructures/QueueTest.php)
+ * [Reverselinkedlisttest](./tests/DataStructures/ReverseLinkedListTest.php)
* [Segmenttreetest](./tests/DataStructures/SegmentTreeTest.php)
* [Singlylinkedlisttest](./tests/DataStructures/SinglyLinkedListTest.php)
* [Splaytreetest](./tests/DataStructures/SplayTreeTest.php)
@@ -151,10 +169,14 @@
* [Bellmanfordtest](./tests/Graphs/BellmanFordTest.php)
* [Breadthfirstsearchtest](./tests/Graphs/BreadthFirstSearchTest.php)
* [Depthfirstsearchtest](./tests/Graphs/DepthFirstSearchTest.php)
+ * [Dijkstrastest](./tests/Graphs/DijkstrasTest.php)
* Maths
* [Eratosthenessievetest](./tests/Maths/EratosthenesSieveTest.php)
* [Mathstest](./tests/Maths/MathsTest.php)
* [Projecteulertest](./tests/Maths/ProjectEulerTest.php)
+ * Neuralnetworks
+ * Perceptronclassifier
+ * [Neuralnetworkperceptronclassifiertest](./tests/NeuralNetworks/PerceptronClassifier/NeuralNetworkPerceptronClassifierTest.php)
* Searches
* [Searchestest](./tests/Searches/SearchesTest.php)
* Sorting
diff --git a/DataStructures/CompareBinaryTree/BinaryTreeNode.php b/DataStructures/CompareBinaryTree/BinaryTreeNode.php
new file mode 100644
index 00000000..d0146aea
--- /dev/null
+++ b/DataStructures/CompareBinaryTree/BinaryTreeNode.php
@@ -0,0 +1,17 @@
+value = $value;
+ $this->left = $left;
+ $this->right = $right;
+ }
+
+ public $value;
+ public ?BinaryTreeNode $left;
+ public ?BinaryTreeNode $right;
+}
diff --git a/DataStructures/CompareBinaryTree/CompareBinaryTree.php b/DataStructures/CompareBinaryTree/CompareBinaryTree.php
new file mode 100644
index 00000000..d4def6e4
--- /dev/null
+++ b/DataStructures/CompareBinaryTree/CompareBinaryTree.php
@@ -0,0 +1,35 @@
+value !== $b->value) {
+ return false;
+ }
+ return $this->areTreesEqual($a->left, $b->left)
+ && $this->areTreesEqual($a->right, $b->right);
+ }
+}
diff --git a/DataStructures/InvertBinaryTree/BinaryTree.php b/DataStructures/InvertBinaryTree/BinaryTree.php
new file mode 100644
index 00000000..38419351
--- /dev/null
+++ b/DataStructures/InvertBinaryTree/BinaryTree.php
@@ -0,0 +1,43 @@
+left = $left;
+ return $this;
+ }
+
+ public function getLeft(): ?BinaryTree
+ {
+ return $this->left;
+ }
+
+ public function setRight(?BinaryTree $right)
+ {
+ $this->right = $right;
+ return $this;
+ }
+
+ public function getRight(): ?BinaryTree
+ {
+ return $this->right;
+ }
+
+ public function setValue($value)
+ {
+ $this->value = $value;
+ return $this;
+ }
+
+ public function getValue()
+ {
+ return $this->value;
+ }
+}
diff --git a/DataStructures/InvertBinaryTree/InvertBinaryTree.php b/DataStructures/InvertBinaryTree/InvertBinaryTree.php
new file mode 100644
index 00000000..346f74c1
--- /dev/null
+++ b/DataStructures/InvertBinaryTree/InvertBinaryTree.php
@@ -0,0 +1,24 @@
+getLeft();
+ $b->setLeft($b->getRight());
+ $b->setRight($tmp);
+ $this->invert($b->getLeft());
+ $this->invert($b->getRight());
+ }
+}
diff --git a/DataStructures/ReverseLinkedList/LinkedListItem.php b/DataStructures/ReverseLinkedList/LinkedListItem.php
new file mode 100644
index 00000000..1e2ae822
--- /dev/null
+++ b/DataStructures/ReverseLinkedList/LinkedListItem.php
@@ -0,0 +1,43 @@
+next = $next;
+ return $this;
+ }
+
+ public function getNext(): ?LinkedListItem
+ {
+ return $this->next;
+ }
+
+ public function setPrev(?LinkedListItem $prev)
+ {
+ $this->prev = $prev;
+ return $this;
+ }
+
+ public function getPrev(): ?LinkedListItem
+ {
+ return $this->prev;
+ }
+
+ public function setValue($value)
+ {
+ $this->value = $value;
+ return $this;
+ }
+
+ public function getValue()
+ {
+ return $this->value;
+ }
+}
diff --git a/DataStructures/ReverseLinkedList/ReverseLinkedList.php b/DataStructures/ReverseLinkedList/ReverseLinkedList.php
new file mode 100644
index 00000000..cde6384b
--- /dev/null
+++ b/DataStructures/ReverseLinkedList/ReverseLinkedList.php
@@ -0,0 +1,28 @@
+getNext();
+ $item->setNext(null);
+ while (true) {
+ $item->setPrev($next);
+ if (! $next) {
+ return $item;
+ }
+ $nextNext = $next->getNext();
+ $next->setNext($item);
+ $item = $next;
+ $next = $nextNext;
+ }
+ }
+}
diff --git a/Graphs/BellmanFord.php b/Graphs/BellmanFord.php
index 37dbcd7d..7173e6ea 100644
--- a/Graphs/BellmanFord.php
+++ b/Graphs/BellmanFord.php
@@ -1,23 +1,17 @@
$minWeight) {
if ($verbose) {
- echo "checking vertice $vertice\n";
+ echo "checking vertex $vertice\n";
}
if ($start === $vertice) {
$vertices[$vertice] = 0;
@@ -39,7 +33,8 @@ function bellmanFord(array $verticesNames, array $edges, string $start, bool $ve
foreach ($edges[$vertice] as $edge) {
if ($vertices[$edge->end] > $vertices[$vertice] + $edge->weight) {
if ($verbose) {
- echo "replace $vertice " . $vertices[$edge->end] . " with " . $vertices[$vertice] + $edge->weight . "\n ";
+ echo "replace $vertice " . $vertices[$edge->end] . " with "
+ . ($vertices[$vertice] + $edge->weight) . "\n ";
}
$vertices[$edge->end] = $vertices[$vertice] + $edge->weight;
$change = true;
diff --git a/Graphs/Dijkstras.php b/Graphs/Dijkstras.php
new file mode 100644
index 00000000..f6ba42f1
--- /dev/null
+++ b/Graphs/Dijkstras.php
@@ -0,0 +1,43 @@
+start == $nextVertex) { //consider only nodes connected to current one
+ $vertices[$edge->end] = min($vertices[$edge->end], $vertices[$nextVertex] + $edge->weight);
+ }
+ }
+
+ // find vertex with current lowest value to be starting point in next iteration
+ $minVertexName = null;
+ $minVertexWeight = PHP_INT_MAX;
+ foreach ($vertices as $name => $weight) {
+ if (in_array($name, $visitedNodes) || $name == $nextVertex) {
+ continue;
+ }
+ if ($weight <= $minVertexWeight) {
+ $minVertexName = $name;
+ $minVertexWeight = $weight;
+ }
+ }
+ $visitedNodes[] = $nextVertex;
+ $nextVertex = $minVertexName;
+ }
+ return $vertices;
+}
diff --git a/Graphs/GraphEdge.php b/Graphs/GraphEdge.php
new file mode 100644
index 00000000..df73f082
--- /dev/null
+++ b/Graphs/GraphEdge.php
@@ -0,0 +1,8 @@
+initParams(count($X));
+
+ for ($i = 0; $i < $iterations; $i++) {
+ // Forward propagation
+ $A = $this->forwardPropagation($X, $W, $b);
+
+ // Compute cost
+ $cost = $this->computeCost($A, $Y);
+
+ // Backward propagation
+ [$dW, $db] = $this->backwardPropagation($A, $X, $Y);
+
+ // Update parameters
+ [$W, $b] = $this->updateParams($W, $b, $dW, $db, $learningRate);
+
+ if ($i % 100 == 0) {
+ echo "Iteration {$i} - Cost: {$cost}\n";
+ }
+ }
+
+ return [$W, $b];
+ }
+
+ /**
+ * @param array $X
+ * @param array $W
+ * @param float $b
+ * @return array
+ */
+ public function predict(array $X, array $W, float $b): array
+ {
+ $A = $this->forwardPropagation($X, $W, $b);
+ return array_map(fn($a) => $a > 0.5 ? 1 : 0, $A);
+ }
+
+ /**
+ * Stage 1. Prepare dataset
+ * @return array[]
+ */
+ public function generateTrainingSet(): array
+ {
+ $m = 50;
+
+ // Generate a 2 x m matrix with binary values (0 or 1)
+ $X = [];
+ for ($i = 0; $i < 2; $i++) {
+ for ($j = 0; $j < $m; $j++) {
+ $X[$i][$j] = rand(0, 1);
+ }
+ }
+
+ // Compute Y: Logical AND condition (X[0] == 1 and X[1] == 0)
+ $Y = [];
+ for ($j = 0; $j < $m; $j++) {
+ $Y[$j] = ($X[0][$j] == 1 && $X[1][$j] == 0) ? 1 : 0;
+ }
+
+ return [$X, $Y];
+ }
+
+ /**
+ * Stage 2. Initialize model parameters
+ * @param int $n Number of features
+ * @return array [$W, $b] Weight and bias arrays
+ */
+ private function initParams(int $n): array
+ {
+ $W = [];
+ for ($i = 0; $i < $n; $i++) {
+ $W[$i] = mt_rand() / mt_getrandmax(); // Small random values
+ }
+ $b = 0.0; // Bias initialized to zero
+ return [$W, $b];
+ }
+
+ /**
+ * Sigmoid Activation Function
+ * @param float $z
+ * @return float
+ */
+ private function sigmoid(float $z): float
+ {
+ return 1 / (1 + exp(-$z));
+ }
+
+ /**
+ * Stage 3. Forward Propagation
+ * @param array $X
+ * @param array $W
+ * @param float $b
+ * @return array
+ */
+ private function forwardPropagation(array $X, array $W, float $b): array
+ {
+ $Z = [];
+ for ($j = 0; $j < count($X[0]); $j++) {
+ $sum = $b;
+ for ($i = 0; $i < count($W); $i++) {
+ $sum += $W[$i] * $X[$i][$j];
+ }
+ $Z[$j] = $this->sigmoid($sum);
+ }
+ return $Z;
+ }
+
+ /**
+ * Stage 4. Compute Cost Function (Binary Cross-Entropy Loss)
+ * @param array $A
+ * @param array $Y
+ * @return float
+ */
+ private function computeCost(array $A, array $Y): float
+ {
+ $m = count($Y);
+ $cost = 0.0;
+ for ($i = 0; $i < $m; $i++) {
+ $cost += -($Y[$i] * log($A[$i]) + (1 - $Y[$i]) * log(1 - $A[$i]));
+ }
+ return $cost / $m;
+ }
+
+ /**
+ * Stage 5. Backward Propagation
+ * @param array $A
+ * @param array $X
+ * @param array $Y
+ * @return array
+ */
+ private function backwardPropagation(array $A, array $X, array $Y): array
+ {
+ $m = count($Y);
+ $dW = array_fill(0, count($X), 0.0);
+ $db = 0.0;
+
+ for ($j = 0; $j < $m; $j++) {
+ $dZ = $A[$j] - $Y[$j];
+ for ($i = 0; $i < count($X); $i++) {
+ $dW[$i] += $dZ * $X[$i][$j];
+ }
+ $db += $dZ;
+ }
+
+ // Average gradients
+ for ($i = 0; $i < count($dW); $i++) {
+ $dW[$i] /= $m;
+ }
+ $db /= $m;
+
+ return [$dW, $db];
+ }
+
+ /**
+ * STage 6. Update Parameters
+ * @param array $W
+ * @param float $b
+ * @param array $dW
+ * @param float $db
+ * @param float $learningRate
+ * @return array
+ */
+ private function updateParams(array $W, float $b, array $dW, float $db, float $learningRate): array
+ {
+ for ($i = 0; $i < count($W); $i++) {
+ $W[$i] -= $learningRate * $dW[$i];
+ }
+ $b -= $learningRate * $db;
+
+ return [$W, $b];
+ }
+}
diff --git a/NeuralNetworks/PerceptronClassifier/README.md b/NeuralNetworks/PerceptronClassifier/README.md
new file mode 100644
index 00000000..870c9727
--- /dev/null
+++ b/NeuralNetworks/PerceptronClassifier/README.md
@@ -0,0 +1,100 @@
+## Maths behind the single Perceptron Neural Network with Activation Function
+
+This work is based on examples from course https://www.coursera.org/learn/machine-learning-calculus prepared by author Luis Serrano.
+
+Linear separation refers to data points in binary classification problems that can be separated by a linear decision boundary.
+If the data points can be separated by a line, linear function, or flat hyperplane, they are said to be linearly separable.
+
+If separate points in an n-dimensional space exist, then it is said to be linearly separable
+
+$$w_1x_1 + w_2x_2 + w_nx_n + b = 0$$
+
+For two-dimensional input data, if there is a line, whose equation is $$w_1x_1 + w_2x_2 + b = 0$$
+
+that separates all samples of one class from the other class, then the corresponding observation can be derived from the equation of the separating line.
+Such classification problems are called "linearly separable", i.e. separating by linear combination.
+
+
+
+The input layer contains two nodes $x_1$ and $x_2$. Weight vector $W = \begin{bmatrix} w_1 & w_2\end{bmatrix}$ and bias ($b$) are the parameters to be updated during the model training.
+
+$$z^{(i)} = w_1x_1^{(i)} + w_2x_2^{(i)} + b = Wx^{(i)} + b.\tag{1}$$
+
+To be able to perform classification we need nonlinear approach. This can achieved with sigmoid activation function which roughly replace values with nearly 0 or nearly 1 for most cases and some values between for small range near 0.
+
+$$\hat{y} = \begin{cases} 1 & \mbox{if } a > 0.5 \\ 0 & \mbox{otherwise } \end{cases}\tag{10}$$
+
+Sigmoid activation function is defined as
+
+$$a = \sigma\left(z\right) = \frac{1}{1+e^{-z}}.\tag{2}$$
+
+
+
+Threshold value of $0.5$ can be used for predictions: $1$ (red) if $a > 0.5$ and $0$ (blue) otherwise.
+
+The single perceptron neural network with sigmoid activation function can be expressed as:
+
+\begin{align}
+z^{(i)} &= W x^{(i)} + b,\\
+a^{(i)} &= \sigma\left(z^{(i)}\right).\\\tag{3}
+\end{align}
+
+
+With $m$ training examples organised in the columns of ($2 \times m$) matrix $X$, you can apply the activation function element-wise. So the model can be written as:
+
+
+\begin {align}
+Z &= W X + b,\\
+A &= \sigma\left(Z\right),\\\tag{4}
+\end{align}
+
+When dealing with classification problems, the most commonly used cost function is the **log loss**, which is described by the following equation
+
+$$\mathcal{L}\left(W, b\right) = \frac{1}{m}\sum_{i=1}^{m} L\left(W, b\right) = \frac{1}{m}\sum_{i=1}^{m} \large\left(\small -y^{(i)}\log\left(a^{(i)}\right) - (1-y^{(i)})\log\left(1- a^{(i)}\right) \large \right) \small,\tag{5}$$
+
+where $y^{(i)} \in \{0,1\}$ are the original labels and $a^{(i)}$ are the continuous output values of the forward propagation step (elements of array $A$).
+
+
+We want to minimize the cost function during the training. To implement gradient descent, calculate partial derivatives using chain rule
+
+
+\begin{align}
+\frac{\partial \mathcal{L} }{ \partial w_1 } &=
+\frac{1}{m}\sum_{i=1}^{m} \left(a^{(i)} - y^{(i)}\right)x_1^{(i)},\\
+\frac{\partial \mathcal{L} }{ \partial w_2 } &=
+\frac{1}{m}\sum_{i=1}^{m} \left(a^{(i)} - y^{(i)}\right)x_2^{(i)},\tag{7}\\
+\frac{\partial \mathcal{L} }{ \partial b } &=
+\frac{1}{m}\sum_{i=1}^{m} \left(a^{(i)} - y^{(i)}\right).
+\end{align}
+
+Equations above can be rewritten in a matrix form
+
+
+\begin{align}
+\frac{\partial \mathcal{L} }{ \partial W } &=
+\begin{bmatrix} \frac{\partial \mathcal{L} }{ \partial w_1 } &
+\frac{\partial \mathcal{L} }{ \partial w_2 }\end{bmatrix} = \frac{1}{m}\left(A - Y\right)X^T,\\
+\frac{\partial \mathcal{L} }{ \partial b } &= \frac{1}{m}\left(A - Y\right)\mathbf{1}.
+\tag{8}
+\end{align}
+
+where $\left(A - Y\right)$ is an array of a shape ($1 \times m$), $X^T$ is an array of a shape ($m \times 2$) and $\mathbf{1}$ is just a ($m \times 1$) vector of ones.
+
+Then you can update the parameters:
+
+\begin{align}
+W &= W - \alpha \frac{\partial \mathcal{L} }{ \partial W },\\
+b &= b - \alpha \frac{\partial \mathcal{L} }{ \partial b },
+\tag{9}\end{align}
+
+where $\alpha$ is the learning rate. Repeat the process in a loop until the cost function stops decreasing.
+
+in last step apply activation
+$$\hat{y} = \begin{cases} 1 & \mbox{if } a > 0.5 \\ 0 & \mbox{otherwise } \end{cases}\tag{10}$$
+
+
+### Dataset
+
+As a dataset we will generate $m=50$ data points $(x_1, x_2)$, where $x_1, x_2 \in \{0,1\}$ and save them in the `NumPy` array `X` of a shape $(2 \times m)$. The labels ($0$: blue, $1$: red) will be calculated so that $y = 1$ if $x_1 = 1$ and $x_2 = 0$, in the rest of the cases $y=0$. The labels will be saved in the array `Y` of a shape $(1 \times m)$.
+
+
diff --git a/NeuralNetworks/PerceptronClassifier/chart/dataset.png b/NeuralNetworks/PerceptronClassifier/chart/dataset.png
new file mode 100644
index 00000000..95a5042a
Binary files /dev/null and b/NeuralNetworks/PerceptronClassifier/chart/dataset.png differ
diff --git a/NeuralNetworks/PerceptronClassifier/chart/linear-separated.png b/NeuralNetworks/PerceptronClassifier/chart/linear-separated.png
new file mode 100644
index 00000000..061be4bc
Binary files /dev/null and b/NeuralNetworks/PerceptronClassifier/chart/linear-separated.png differ
diff --git a/NeuralNetworks/PerceptronClassifier/chart/sigmoid.png b/NeuralNetworks/PerceptronClassifier/chart/sigmoid.png
new file mode 100644
index 00000000..98560311
Binary files /dev/null and b/NeuralNetworks/PerceptronClassifier/chart/sigmoid.png differ
diff --git a/tests/DataStructures/CompareBinaryTreeTest.php b/tests/DataStructures/CompareBinaryTreeTest.php
new file mode 100644
index 00000000..7ae5e581
--- /dev/null
+++ b/tests/DataStructures/CompareBinaryTreeTest.php
@@ -0,0 +1,91 @@
+assertTrue($sut->areTreesEqual($tree1, $tree2));
+ }
+
+ public function testBinaryTreesAreNotEqualWhenAreNotEqualInReality()
+ {
+
+ $tree1 = new BinaryTreeNode(
+ 'A',
+ new BinaryTreeNode(
+ 'B',
+ new BinaryTreeNode(
+ 'F'
+ ),
+ new BinaryTreeNode(
+ 'E',
+ null,
+ new BinaryTreeNode(
+ 'D'
+ )
+ )
+ ),
+ new BinaryTreeNode(
+ 'C',
+ new BinaryTreeNode('G')
+ )
+ );
+
+ $tree2 = new BinaryTreeNode(
+ 'A',
+ new BinaryTreeNode(
+ 'B',
+ new BinaryTreeNode(
+ 'F'
+ ),
+ new BinaryTreeNode(
+ 'E',
+ null,
+ new BinaryTreeNode(
+ 'D'
+ )
+ )
+ ),
+ new BinaryTreeNode(
+ 'C'
+ )
+ );
+
+ $sut = new CompareBinaryTree();
+ $this->assertFalse($sut->areTreesEqual($tree1, $tree2));
+ }
+}
diff --git a/tests/DataStructures/InvertBinaryTreeTest.php b/tests/DataStructures/InvertBinaryTreeTest.php
new file mode 100644
index 00000000..ce73bb27
--- /dev/null
+++ b/tests/DataStructures/InvertBinaryTreeTest.php
@@ -0,0 +1,37 @@
+setValue(1);
+ $bl = (new BinaryTree())->setValue(3);
+ $b->setLeft($bl);
+ $br = (new BinaryTree())->setValue(2);
+ $b->setRight($br);
+ $br->setLeft((new BinaryTree())->setValue(4));
+ $br->setRight((new BinaryTree())->setValue(5));
+
+ $expected = (new BinaryTree())->setValue(1);
+ $expectedBr = (new BinaryTree())->setValue(3);
+ $expected->setRight($expectedBr);
+ $expectedBl = (new BinaryTree())->setValue(2);
+ $expected->setLeft($expectedBl);
+ $expectedBl->setRight((new BinaryTree())->setValue(4));
+ $expectedBl->setLeft((new BinaryTree())->setValue(5));
+
+ (new InvertBinaryTree())->invert($b);
+
+ $this->assertEquals($expected, $b);
+ }
+}
diff --git a/tests/DataStructures/ReverseLinkedListTest.php b/tests/DataStructures/ReverseLinkedListTest.php
new file mode 100644
index 00000000..667bd45a
--- /dev/null
+++ b/tests/DataStructures/ReverseLinkedListTest.php
@@ -0,0 +1,37 @@
+setValue(0);
+
+ $prevItem = $firstItem;
+
+ foreach ($list as $value) {
+ $item = new LinkedListItem();
+ $item->setValue($value);
+ $item->setPrev($prevItem);
+ $prevItem->setNext($item);
+ $prevItem = $item;
+ }
+
+ $newFirstItem = (new ReverseLinkedList())->reverse($firstItem);
+ do {
+ $this->assertEquals($newFirstItem->getValue(), array_pop($list));
+ } while ($newFirstItem = $newFirstItem->getNext());
+ }
+}
diff --git a/tests/Graphs/BellmanFordTest.php b/tests/Graphs/BellmanFordTest.php
index c483cb34..a743f820 100644
--- a/tests/Graphs/BellmanFordTest.php
+++ b/tests/Graphs/BellmanFordTest.php
@@ -1,6 +1,7 @@
start = $edgeRaw[0];
$edge->end = $edgeRaw[2];
$edge->weight = $edgeRaw[1];
@@ -36,13 +37,16 @@ public function testBellmanFord()
$result = bellmanFord($vertices, $edges, 'S');
- $this->assertEquals($result, [
- 'S' => 0,
- 'A' => 5,
- 'B' => 5,
- 'C' => 7,
- 'D' => 9,
- 'E' => 8
- ]);
+ $this->assertEquals(
+ [
+ 'S' => 0,
+ 'A' => 5,
+ 'B' => 5,
+ 'C' => 7,
+ 'D' => 9,
+ 'E' => 8
+ ],
+ $result
+ );
}
}
diff --git a/tests/Graphs/DijkstrasTest.php b/tests/Graphs/DijkstrasTest.php
new file mode 100644
index 00000000..f00ce0b7
--- /dev/null
+++ b/tests/Graphs/DijkstrasTest.php
@@ -0,0 +1,52 @@
+start = $edgeRaw[0];
+ $edge->end = $edgeRaw[2];
+ $edge->weight = $edgeRaw[1];
+ $edges[] = $edge;
+ }
+
+ $result = dijkstras($vertices, $edges, 'S');
+
+ $this->assertEquals(
+ [
+ 'S' => 0,
+ 'A' => 5,
+ 'B' => 5,
+ 'C' => 7,
+ 'D' => 9,
+ 'E' => 8
+ ],
+ $result
+ );
+ }
+}
diff --git a/tests/NeuralNetworks/PerceptronClassifier/NeuralNetworkPerceptronClassifierTest.php b/tests/NeuralNetworks/PerceptronClassifier/NeuralNetworkPerceptronClassifierTest.php
new file mode 100644
index 00000000..61eacc48
--- /dev/null
+++ b/tests/NeuralNetworks/PerceptronClassifier/NeuralNetworkPerceptronClassifierTest.php
@@ -0,0 +1,23 @@
+generateTrainingSet();
+ // Train the model
+ [$W, $b] = $nnClassifier->trainModel($X, $Y, 1000, 0.1);
+
+ // Make predictions
+ $predictions = $nnClassifier->predict([[0, 0, 1, 1], [0, 1, 1, 0]], $W, $b);
+ $this->assertEquals([0, 0, 0, 1], $predictions);
+ }
+}