Skip to content

Commit

Permalink
Fix typos (#926)
Browse files Browse the repository at this point in the history
* fix typos

* fix more typos

Co-authored-by: Fabian Fröhlich <fabian@schaluck.com>
Co-authored-by: Yannik Schälte <31767307+yannikschaelte@users.noreply.github.com>
  • Loading branch information
3 people authored Sep 7, 2022
1 parent 12ed10b commit a8f2549
Show file tree
Hide file tree
Showing 4 changed files with 10 additions and 10 deletions.
14 changes: 7 additions & 7 deletions pypesto/objective/aesara.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
Adds an interface for the construction of loss functions
incorporating aesara models. This permits computation of derivatives using a
combination of objective based methods and aeara based backpropagation.
combination of objective based methods and aesara based backpropagation.
"""

import copy
Expand Down Expand Up @@ -43,7 +43,7 @@ class AesaraObjective(ObjectiveBase):
objective:
The `pypesto.ObjectiveBase` to wrap.
aet_x:
Tensor variables that that define the variables of `aet_fun`
Tensor variables that define the variables of `aet_fun`
aet_fun:
Aesara function that maps `aet_x` to the variables of `objective`
coeff:
Expand Down Expand Up @@ -228,7 +228,7 @@ class AesaraObjectiveOp(Op):
Parameters
----------
obj:
Base aseara objective
Base aesara objective
coeff:
Multiplicative coefficient for the objective function value
"""
Expand All @@ -248,7 +248,7 @@ def __init__(self, obj: AesaraObjective, coeff: Optional[float] = 1.0):

def perform(self, node, inputs, outputs, params=None): # noqa
# note that we use precomputed values from the outer
# AesaraObjective.call_unprocessed here, which which means we can
# AesaraObjective.call_unprocessed here, which means we can
# ignore inputs here
log_prob = self._coeff * self._objective.inner_ret[FVAL]
outputs[0][0] = np.array(log_prob)
Expand All @@ -275,7 +275,7 @@ class AesaraObjectiveGradOp(Op):
Parameters
----------
obj:
Base aseara objective
Base aesara objective
coeff:
Multiplicative coefficient for the objective function value
"""
Expand Down Expand Up @@ -321,7 +321,7 @@ class AesaraObjectiveHessOp(Op):
Parameters
----------
obj:
Base aseara objective
Base aesara objective
coeff:
Multiplicative coefficient for the objective function value
"""
Expand All @@ -335,7 +335,7 @@ def __init__(self, obj: AesaraObjective, coeff: Optional[float] = 1.0):

def perform(self, node, inputs, outputs, params=None): # noqa
# note that we use precomputed values from the outer
# AesaraObjective.call_unprocessed here, which which means we can
# AesaraObjective.call_unprocessed here, which means we can
# ignore inputs here
log_prob_hess = self._coeff * self._objective.inner_ret[HESS]
outputs[0][0] = log_prob_hess
2 changes: 1 addition & 1 deletion pypesto/objective/aggregated.py
Original file line number Diff line number Diff line change
Expand Up @@ -111,7 +111,7 @@ def get_config(self) -> dict:

def aggregate_results(rvals: Sequence[ResultDict]) -> ResultDict:
"""
Aggregrate the results from the provided ResultDicts into a single one.
Aggregate the results from the provided ResultDicts into a single one.
Parameters
----------
Expand Down
2 changes: 1 addition & 1 deletion pypesto/objective/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -255,7 +255,7 @@ def get_config(self) -> dict:
"""
Get the configuration information of the objective function.
Return it as a dictonary.
Return it as a dictionary.
"""
info = {'type': self.__class__.__name__}
return info
Expand Down
2 changes: 1 addition & 1 deletion pypesto/objective/finite_difference.py
Original file line number Diff line number Diff line change
Expand Up @@ -181,7 +181,7 @@ def _update(
# shape (n_delta, n_par, ...)
nablas = np.array(nablas)

# The stability vector is the the absolute difference of Jacobian
# The stability vector is the absolute difference of Jacobian
# entries towards smaller and larger deltas, thus indicating the
# change in the approximation when changing delta.
# This is done separately for each parameter. Then, for each the delta
Expand Down

0 comments on commit a8f2549

Please sign in to comment.