diff --git a/tests/test_algorithms/test_algorithms.py b/tests/test_algorithms/test_algorithms.py deleted file mode 100644 index 61c35194..00000000 --- a/tests/test_algorithms/test_algorithms.py +++ /dev/null @@ -1,177 +0,0 @@ -"""Unit tests for algorithms/""" - -# Authors: Genevieve Hayes (modified by Kyle Nakamura) -# License: BSD 3-clause - -import numpy as np -from tests.globals import SEED - -from mlrose_ky import OneMax, DiscreteOpt, ContinuousOpt, hill_climb, random_hill_climb, simulated_annealing, genetic_alg, mimic - - -def test_mimic_discrete_max(): - """Test mimic function for a discrete maximization problem""" - problem = DiscreteOpt(5, OneMax()) - best_state, best_fitness, _ = mimic(problem, random_state=SEED) - x = np.ones(5) - assert np.array_equal(best_state, x) and best_fitness == 5 - - -def test_mimic_discrete_min(): - """Test mimic function for a discrete minimization problem""" - problem = DiscreteOpt(5, OneMax(), maximize=False) - best_state, best_fitness, _ = mimic(problem, random_state=SEED) - x = np.zeros(5) - assert np.array_equal(best_state, x) and best_fitness == 0 - - -def test_hill_climb_discrete_max(): - """Test hill_climb function for a discrete maximization problem""" - problem = DiscreteOpt(5, OneMax()) - best_state, best_fitness, _ = hill_climb(problem, restarts=10, random_state=SEED) - x = np.ones(5) - assert np.array_equal(best_state, x) and best_fitness == 5 - - -def test_hill_climb_continuous_max(): - """Test hill_climb function for a continuous maximization problem""" - problem = ContinuousOpt(5, OneMax()) - best_state, best_fitness, _ = hill_climb(problem, restarts=10, random_state=SEED) - x = np.ones(5) - assert np.array_equal(best_state, x) and best_fitness == 5 - - -def test_hill_climb_discrete_min(): - """Test hill_climb function for a discrete minimization problem""" - problem = DiscreteOpt(5, OneMax(), maximize=False) - best_state, best_fitness, _ = hill_climb(problem, restarts=10, random_state=SEED) - x = np.zeros(5) - assert np.array_equal(best_state, x) and best_fitness == 0 - - -def test_hill_climb_continuous_min(): - """Test hill_climb function for a continuous minimization problem""" - problem = ContinuousOpt(5, OneMax(), maximize=False) - best_state, best_fitness, _ = hill_climb(problem, restarts=10, random_state=SEED) - x = np.zeros(5) - assert np.array_equal(best_state, x) and best_fitness == 0 - - -def test_hill_climb_max_iters(): - """Test hill_climb function with max_iters less than infinite""" - problem = DiscreteOpt(5, OneMax()) - x = np.zeros(5) - best_state, best_fitness, _ = hill_climb(problem, max_iters=1, init_state=x, random_state=SEED) - assert best_fitness == 1 - - -def test_random_hill_climb_discrete_max(): - """Test random_hill_climb function for a discrete maximization problem""" - problem = DiscreteOpt(5, OneMax()) - best_state, best_fitness, _ = random_hill_climb(problem, restarts=10, random_state=SEED) - x = np.ones(5) - assert np.array_equal(best_state, x) and best_fitness == 5 - - -def test_random_hill_climb_continuous_max(): - """Test random_hill_climb function for a continuous maximization problem""" - problem = ContinuousOpt(5, OneMax()) - best_state, best_fitness, _ = random_hill_climb(problem, restarts=10, random_state=SEED) - x = np.ones(5) - assert np.array_equal(best_state, x) and best_fitness == 5 - - -def test_random_hill_climb_discrete_min(): - """Test random_hill_climb function for a discrete minimization problem""" - problem = DiscreteOpt(5, OneMax(), maximize=False) - best_state, best_fitness, _ = random_hill_climb(problem, restarts=10, random_state=SEED) - x = np.zeros(5) - assert np.array_equal(best_state, x) and best_fitness == 0 - - -def test_random_hill_climb_continuous_min(): - """Test random_hill_climb function for a continuous minimization problem""" - problem = ContinuousOpt(5, OneMax(), maximize=False) - best_state, best_fitness, _ = random_hill_climb(problem, restarts=10, random_state=SEED) - x = np.zeros(5) - assert np.array_equal(best_state, x) and best_fitness == 0 - - -def test_random_hill_climb_max_iters(): - """Test random_hill_climb function with low max_iters""" - problem = DiscreteOpt(5, OneMax()) - x = np.zeros(5) - best_state, best_fitness, _ = random_hill_climb(problem, max_attempts=1, max_iters=1, init_state=x, random_state=SEED) - assert best_fitness == 1 - - -def test_simulated_annealing_discrete_max(): - """Test simulated_annealing function for a discrete maximization problem""" - problem = DiscreteOpt(5, OneMax()) - best_state, best_fitness, _ = simulated_annealing(problem, random_state=SEED) - x = np.ones(5) - assert np.array_equal(best_state, x) and best_fitness == 5 - - -def test_simulated_annealing_continuous_max(): - """Test simulated_annealing function for a continuous maximization problem""" - problem = ContinuousOpt(5, OneMax()) - best_state, best_fitness, _ = simulated_annealing(problem, max_attempts=20, random_state=SEED) - x = np.ones(5) - assert np.array_equal(best_state, x) and best_fitness == 5 - - -def test_simulated_annealing_discrete_min(): - """Test simulated_annealing function for a discrete minimization problem""" - problem = DiscreteOpt(5, OneMax(), maximize=False) - best_state, best_fitness, _ = simulated_annealing(problem, random_state=SEED) - x = np.zeros(5) - assert np.array_equal(best_state, x) and best_fitness == 0 - - -def test_simulated_annealing_continuous_min(): - """Test simulated_annealing function for a continuous minimization problem""" - problem = ContinuousOpt(5, OneMax(), maximize=False) - best_state, best_fitness, _ = simulated_annealing(problem, random_state=SEED) - x = np.zeros(5) - assert np.array_equal(best_state, x) and best_fitness == 0 - - -def test_simulated_annealing_max_iters(): - """Test simulated_annealing function with low max_iters""" - problem = DiscreteOpt(5, OneMax()) - x = np.zeros(5) - best_state, best_fitness, _ = simulated_annealing(problem, max_attempts=1, max_iters=1, init_state=x, random_state=SEED) - assert best_fitness == 1 - - -def test_genetic_alg_discrete_max(): - """Test genetic_alg function for a discrete maximization problem""" - problem = DiscreteOpt(5, OneMax()) - best_state, best_fitness, _ = genetic_alg(problem, random_state=SEED) - x = np.ones(5) - assert np.array_equal(best_state, x) and best_fitness == 5 - - -def test_genetic_alg_continuous_max(): - """Test genetic_alg function for a continuous maximization problem""" - problem = ContinuousOpt(5, OneMax()) - best_state, best_fitness, _ = genetic_alg(problem, random_state=SEED) - x = np.ones(5) - assert np.allclose(best_state, x, atol=0.5) and best_fitness > 4 - - -def test_genetic_alg_discrete_min(): - """Test genetic_alg function for a discrete minimization problem""" - problem = DiscreteOpt(5, OneMax(), maximize=False) - best_state, best_fitness, _ = genetic_alg(problem, random_state=SEED) - x = np.zeros(5) - assert np.array_equal(best_state, x) and best_fitness == 0 - - -def test_genetic_alg_continuous_min(): - """Test genetic_alg function for a continuous minimization problem""" - problem = ContinuousOpt(5, OneMax(), maximize=False) - best_state, best_fitness, _ = genetic_alg(problem, random_state=SEED) - x = np.zeros(5) - assert np.allclose(best_state, x, atol=0.5) and best_fitness < 1 diff --git a/tests/test_algorithms/test_algorithms__crossovers.py b/tests/test_algorithms/test_algorithms__crossovers.py index f562f7ed..11542748 100644 --- a/tests/test_algorithms/test_algorithms__crossovers.py +++ b/tests/test_algorithms/test_algorithms__crossovers.py @@ -3,13 +3,15 @@ # Author: Kyle Nakamura # License: BSD 3-clause +from unittest.mock import patch + import numpy as np import pytest -from unittest.mock import patch + +from mlrose_ky.algorithms.crossovers import OnePointCrossOver, TSPCrossOver, UniformCrossOver # noinspection PyProtectedMember from mlrose_ky.algorithms.crossovers._crossover_base import _CrossOverBase -from mlrose_ky.algorithms.crossovers import OnePointCrossOver, TSPCrossOver, UniformCrossOver class MockOptProb: diff --git a/tests/test_algorithms/test_algorithms__decay.py b/tests/test_algorithms/test_algorithms__decay.py index 187c0c1e..2cd87f3a 100644 --- a/tests/test_algorithms/test_algorithms__decay.py +++ b/tests/test_algorithms/test_algorithms__decay.py @@ -4,6 +4,7 @@ # License: BSD 3-clause import pytest + from mlrose_ky import GeomDecay, ArithDecay, ExpDecay, CustomSchedule diff --git a/tests/test_algorithms/test_algorithms__mutators.py b/tests/test_algorithms/test_algorithms__mutators.py index 90bd4172..e7b85d0f 100644 --- a/tests/test_algorithms/test_algorithms__mutators.py +++ b/tests/test_algorithms/test_algorithms__mutators.py @@ -3,13 +3,15 @@ # Author: Kyle Nakamura # License: BSD 3-clause +from unittest.mock import patch + import numpy as np import pytest -from unittest.mock import patch + +from mlrose_ky.algorithms.mutators import ChangeOneMutator, DiscreteMutator, ShiftOneMutator, SwapMutator # noinspection PyProtectedMember from mlrose_ky.algorithms.mutators._mutator_base import _MutatorBase -from mlrose_ky.algorithms.mutators import ChangeOneMutator, DiscreteMutator, ShiftOneMutator, SwapMutator class MockOptProb: diff --git a/tests/test_algorithms/test_ga.py b/tests/test_algorithms/test_ga.py new file mode 100644 index 00000000..34499ed9 --- /dev/null +++ b/tests/test_algorithms/test_ga.py @@ -0,0 +1,42 @@ +"""Unit tests for algorithms/ga.py""" + +# Authors: Kyle Nakamura +# License: BSD 3-clause + +import numpy as np + +from mlrose_ky import DiscreteOpt, OneMax, ContinuousOpt +from mlrose_ky.algorithms import genetic_alg +from tests.globals import SEED + + +class TestGeneticAlg: + """Unit tests for genetic_alg.""" + + def test_genetic_alg_discrete_max(self): + """Test genetic_alg function for a discrete maximization problem""" + problem = DiscreteOpt(5, OneMax()) + best_state, best_fitness, _ = genetic_alg(problem, random_state=SEED) + x = np.ones(5) + assert np.array_equal(best_state, x) and best_fitness == 5 + + def test_genetic_alg_continuous_max(self): + """Test genetic_alg function for a continuous maximization problem""" + problem = ContinuousOpt(5, OneMax()) + best_state, best_fitness, _ = genetic_alg(problem, random_state=SEED) + x = np.ones(5) + assert np.allclose(best_state, x, atol=0.5) and best_fitness > 4 + + def test_genetic_alg_discrete_min(self): + """Test genetic_alg function for a discrete minimization problem""" + problem = DiscreteOpt(5, OneMax(), maximize=False) + best_state, best_fitness, _ = genetic_alg(problem, random_state=SEED) + x = np.zeros(5) + assert np.array_equal(best_state, x) and best_fitness == 0 + + def test_genetic_alg_continuous_min(self): + """Test genetic_alg function for a continuous minimization problem""" + problem = ContinuousOpt(5, OneMax(), maximize=False) + best_state, best_fitness, _ = genetic_alg(problem, random_state=SEED) + x = np.zeros(5) + assert np.allclose(best_state, x, atol=0.5) and best_fitness < 1 diff --git a/tests/test_algorithms/test_gd.py b/tests/test_algorithms/test_gd.py new file mode 100644 index 00000000..f4a22dd1 --- /dev/null +++ b/tests/test_algorithms/test_gd.py @@ -0,0 +1,14 @@ +"""Unit tests for algorithms/gd.py""" + +# Authors: Kyle Nakamura +# License: BSD 3-clause + +from mlrose_ky import DiscreteOpt, OneMax, ContinuousOpt +from mlrose_ky.algorithms import gradient_descent +from tests.globals import SEED + + +class TestGradientDescent: + """Unit tests for gradient_descent.""" + + pass diff --git a/tests/test_algorithms/test_hc.py b/tests/test_algorithms/test_hc.py new file mode 100644 index 00000000..82299556 --- /dev/null +++ b/tests/test_algorithms/test_hc.py @@ -0,0 +1,49 @@ +"""Unit tests for algorithms/hc.py""" + +# Authors: Kyle Nakamura +# License: BSD 3-clause + +import numpy as np + +from mlrose_ky import DiscreteOpt, OneMax, ContinuousOpt +from mlrose_ky.algorithms import hill_climb +from tests.globals import SEED + + +class TestHillClimb: + """Unit tests for hill_climb.""" + + def test_hill_climb_discrete_max(self): + """Test hill_climb function for a discrete maximization problem""" + problem = DiscreteOpt(5, OneMax()) + best_state, best_fitness, _ = hill_climb(problem, restarts=10, random_state=SEED) + x = np.ones(5) + assert np.array_equal(best_state, x) and best_fitness == 5 + + def test_hill_climb_continuous_max(self): + """Test hill_climb function for a continuous maximization problem""" + problem = ContinuousOpt(5, OneMax()) + best_state, best_fitness, _ = hill_climb(problem, restarts=10, random_state=SEED) + x = np.ones(5) + assert np.array_equal(best_state, x) and best_fitness == 5 + + def test_hill_climb_discrete_min(self): + """Test hill_climb function for a discrete minimization problem""" + problem = DiscreteOpt(5, OneMax(), maximize=False) + best_state, best_fitness, _ = hill_climb(problem, restarts=10, random_state=SEED) + x = np.zeros(5) + assert np.array_equal(best_state, x) and best_fitness == 0 + + def test_hill_climb_continuous_min(self): + """Test hill_climb function for a continuous minimization problem""" + problem = ContinuousOpt(5, OneMax(), maximize=False) + best_state, best_fitness, _ = hill_climb(problem, restarts=10, random_state=SEED) + x = np.zeros(5) + assert np.array_equal(best_state, x) and best_fitness == 0 + + def test_hill_climb_max_iters(self): + """Test hill_climb function with max_iters less than infinite""" + problem = DiscreteOpt(5, OneMax()) + x = np.zeros(5) + best_state, best_fitness, _ = hill_climb(problem, max_iters=1, init_state=x, random_state=SEED) + assert best_fitness == 1 diff --git a/tests/test_algorithms/test_mimic.py b/tests/test_algorithms/test_mimic.py new file mode 100644 index 00000000..e2b583e6 --- /dev/null +++ b/tests/test_algorithms/test_mimic.py @@ -0,0 +1,28 @@ +"""Unit tests for algorithms/mimic.py""" + +# Authors: Kyle Nakamura +# License: BSD 3-clause + +import numpy as np + +from mlrose_ky import DiscreteOpt, OneMax +from mlrose_ky.algorithms import mimic +from tests.globals import SEED + + +class TestMimic: + """Unit tests for mimic.""" + + def test_mimic_discrete_max(self): + """Test mimic function for a discrete maximization problem""" + problem = DiscreteOpt(5, OneMax()) + best_state, best_fitness, _ = mimic(problem, random_state=SEED) + x = np.ones(5) + assert np.array_equal(best_state, x) and best_fitness == 5 + + def test_mimic_discrete_min(self): + """Test mimic function for a discrete minimization problem""" + problem = DiscreteOpt(5, OneMax(), maximize=False) + best_state, best_fitness, _ = mimic(problem, random_state=SEED) + x = np.zeros(5) + assert np.array_equal(best_state, x) and best_fitness == 0 diff --git a/tests/test_algorithms/test_rhc.py b/tests/test_algorithms/test_rhc.py new file mode 100644 index 00000000..30606429 --- /dev/null +++ b/tests/test_algorithms/test_rhc.py @@ -0,0 +1,49 @@ +"""Unit tests for algorithms/rhc.py""" + +# Authors: Kyle Nakamura +# License: BSD 3-clause + +import numpy as np + +from mlrose_ky import DiscreteOpt, OneMax, ContinuousOpt +from mlrose_ky.algorithms import random_hill_climb +from tests.globals import SEED + + +class TestRandomHillClimb: + """Unit tests for random_hill_climb.""" + + def test_random_hill_climb_discrete_max(self): + """Test random_hill_climb function for a discrete maximization problem""" + problem = DiscreteOpt(5, OneMax()) + best_state, best_fitness, _ = random_hill_climb(problem, restarts=10, random_state=SEED) + x = np.ones(5) + assert np.array_equal(best_state, x) and best_fitness == 5 + + def test_random_hill_climb_continuous_max(self): + """Test random_hill_climb function for a continuous maximization problem""" + problem = ContinuousOpt(5, OneMax()) + best_state, best_fitness, _ = random_hill_climb(problem, restarts=10, random_state=SEED) + x = np.ones(5) + assert np.array_equal(best_state, x) and best_fitness == 5 + + def test_random_hill_climb_discrete_min(self): + """Test random_hill_climb function for a discrete minimization problem""" + problem = DiscreteOpt(5, OneMax(), maximize=False) + best_state, best_fitness, _ = random_hill_climb(problem, restarts=10, random_state=SEED) + x = np.zeros(5) + assert np.array_equal(best_state, x) and best_fitness == 0 + + def test_random_hill_climb_continuous_min(self): + """Test random_hill_climb function for a continuous minimization problem""" + problem = ContinuousOpt(5, OneMax(), maximize=False) + best_state, best_fitness, _ = random_hill_climb(problem, restarts=10, random_state=SEED) + x = np.zeros(5) + assert np.array_equal(best_state, x) and best_fitness == 0 + + def test_random_hill_climb_max_iters(self): + """Test random_hill_climb function with low max_iters""" + problem = DiscreteOpt(5, OneMax()) + x = np.zeros(5) + best_state, best_fitness, _ = random_hill_climb(problem, max_attempts=1, max_iters=1, init_state=x, random_state=SEED) + assert best_fitness == 1 diff --git a/tests/test_algorithms/test_sa.py b/tests/test_algorithms/test_sa.py new file mode 100644 index 00000000..2e940290 --- /dev/null +++ b/tests/test_algorithms/test_sa.py @@ -0,0 +1,49 @@ +"""Unit tests for algorithms/sa.py""" + +# Authors: Kyle Nakamura +# License: BSD 3-clause + +import numpy as np + +from mlrose_ky import DiscreteOpt, OneMax, ContinuousOpt +from mlrose_ky.algorithms import simulated_annealing +from tests.globals import SEED + + +class TestSimulatedAnnealing: + """Unit tests for simulated_annealing.""" + + def test_simulated_annealing_discrete_max(self): + """Test simulated_annealing function for a discrete maximization problem""" + problem = DiscreteOpt(5, OneMax()) + best_state, best_fitness, _ = simulated_annealing(problem, random_state=SEED) + x = np.ones(5) + assert np.array_equal(best_state, x) and best_fitness == 5 + + def test_simulated_annealing_continuous_max(self): + """Test simulated_annealing function for a continuous maximization problem""" + problem = ContinuousOpt(5, OneMax()) + best_state, best_fitness, _ = simulated_annealing(problem, max_attempts=20, random_state=SEED) + x = np.ones(5) + assert np.array_equal(best_state, x) and best_fitness == 5 + + def test_simulated_annealing_discrete_min(self): + """Test simulated_annealing function for a discrete minimization problem""" + problem = DiscreteOpt(5, OneMax(), maximize=False) + best_state, best_fitness, _ = simulated_annealing(problem, random_state=SEED) + x = np.zeros(5) + assert np.array_equal(best_state, x) and best_fitness == 0 + + def test_simulated_annealing_continuous_min(self): + """Test simulated_annealing function for a continuous minimization problem""" + problem = ContinuousOpt(5, OneMax(), maximize=False) + best_state, best_fitness, _ = simulated_annealing(problem, random_state=SEED) + x = np.zeros(5) + assert np.array_equal(best_state, x) and best_fitness == 0 + + def test_simulated_annealing_max_iters(self): + """Test simulated_annealing function with low max_iters""" + problem = DiscreteOpt(5, OneMax()) + x = np.zeros(5) + best_state, best_fitness, _ = simulated_annealing(problem, max_attempts=1, max_iters=1, init_state=x, random_state=SEED) + assert best_fitness == 1