From a8f254941e5c35466f9a3bfe7928dffdb4c313b9 Mon Sep 17 00:00:00 2001
From: Polina Lakrisenko
Date: Wed, 7 Sep 2022 10:51:10 +0200
Subject: [PATCH] Fix typos (#926)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
* fix typos
* fix more typos
Co-authored-by: Fabian Fröhlich
Co-authored-by: Yannik Schälte <31767307+yannikschaelte@users.noreply.github.com>
---
pypesto/objective/aesara.py | 14 +++++++-------
pypesto/objective/aggregated.py | 2 +-
pypesto/objective/base.py | 2 +-
pypesto/objective/finite_difference.py | 2 +-
4 files changed, 10 insertions(+), 10 deletions(-)
diff --git a/pypesto/objective/aesara.py b/pypesto/objective/aesara.py
index e37210239..b9e654d6b 100644
--- a/pypesto/objective/aesara.py
+++ b/pypesto/objective/aesara.py
@@ -3,7 +3,7 @@
Adds an interface for the construction of loss functions
incorporating aesara models. This permits computation of derivatives using a
-combination of objective based methods and aeara based backpropagation.
+combination of objective based methods and aesara based backpropagation.
"""
import copy
@@ -43,7 +43,7 @@ class AesaraObjective(ObjectiveBase):
objective:
The `pypesto.ObjectiveBase` to wrap.
aet_x:
- Tensor variables that that define the variables of `aet_fun`
+ Tensor variables that define the variables of `aet_fun`
aet_fun:
Aesara function that maps `aet_x` to the variables of `objective`
coeff:
@@ -228,7 +228,7 @@ class AesaraObjectiveOp(Op):
Parameters
----------
obj:
- Base aseara objective
+ Base aesara objective
coeff:
Multiplicative coefficient for the objective function value
"""
@@ -248,7 +248,7 @@ def __init__(self, obj: AesaraObjective, coeff: Optional[float] = 1.0):
def perform(self, node, inputs, outputs, params=None): # noqa
# note that we use precomputed values from the outer
- # AesaraObjective.call_unprocessed here, which which means we can
+ # AesaraObjective.call_unprocessed here, which means we can
# ignore inputs here
log_prob = self._coeff * self._objective.inner_ret[FVAL]
outputs[0][0] = np.array(log_prob)
@@ -275,7 +275,7 @@ class AesaraObjectiveGradOp(Op):
Parameters
----------
obj:
- Base aseara objective
+ Base aesara objective
coeff:
Multiplicative coefficient for the objective function value
"""
@@ -321,7 +321,7 @@ class AesaraObjectiveHessOp(Op):
Parameters
----------
obj:
- Base aseara objective
+ Base aesara objective
coeff:
Multiplicative coefficient for the objective function value
"""
@@ -335,7 +335,7 @@ def __init__(self, obj: AesaraObjective, coeff: Optional[float] = 1.0):
def perform(self, node, inputs, outputs, params=None): # noqa
# note that we use precomputed values from the outer
- # AesaraObjective.call_unprocessed here, which which means we can
+ # AesaraObjective.call_unprocessed here, which means we can
# ignore inputs here
log_prob_hess = self._coeff * self._objective.inner_ret[HESS]
outputs[0][0] = log_prob_hess
diff --git a/pypesto/objective/aggregated.py b/pypesto/objective/aggregated.py
index febece237..9ce6c084f 100644
--- a/pypesto/objective/aggregated.py
+++ b/pypesto/objective/aggregated.py
@@ -111,7 +111,7 @@ def get_config(self) -> dict:
def aggregate_results(rvals: Sequence[ResultDict]) -> ResultDict:
"""
- Aggregrate the results from the provided ResultDicts into a single one.
+ Aggregate the results from the provided ResultDicts into a single one.
Parameters
----------
diff --git a/pypesto/objective/base.py b/pypesto/objective/base.py
index c4a95529f..0b120eef8 100644
--- a/pypesto/objective/base.py
+++ b/pypesto/objective/base.py
@@ -255,7 +255,7 @@ def get_config(self) -> dict:
"""
Get the configuration information of the objective function.
- Return it as a dictonary.
+ Return it as a dictionary.
"""
info = {'type': self.__class__.__name__}
return info
diff --git a/pypesto/objective/finite_difference.py b/pypesto/objective/finite_difference.py
index 81f90d5b4..bb8f5c1a8 100644
--- a/pypesto/objective/finite_difference.py
+++ b/pypesto/objective/finite_difference.py
@@ -181,7 +181,7 @@ def _update(
# shape (n_delta, n_par, ...)
nablas = np.array(nablas)
- # The stability vector is the the absolute difference of Jacobian
+ # The stability vector is the absolute difference of Jacobian
# entries towards smaller and larger deltas, thus indicating the
# change in the approximation when changing delta.
# This is done separately for each parameter. Then, for each the delta