Skip to content

Commit

Permalink
include training loss in mlp
Browse files Browse the repository at this point in the history
  • Loading branch information
ccomkhj committed Nov 8, 2023
1 parent 384cf5f commit 978d1a9
Show file tree
Hide file tree
Showing 2 changed files with 6 additions and 0 deletions.
5 changes: 5 additions & 0 deletions constrained_linear_regression/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -127,6 +127,7 @@ def __init__(
epsilon=1e-8,
n_iter_no_change=10,
max_fun=15000,
training_losses=[],
):
super().__init__(
hidden_layer_sizes=hidden_layer_sizes,
Expand All @@ -153,12 +154,16 @@ def __init__(
n_iter_no_change=n_iter_no_change,
max_fun=max_fun,
)
self.training_losses = training_losses
assert shuffle is False, "shuffle should be False in the contrained ML."

@abc.abstractmethod
def fit(self, X, y):
pass

def _save_mse(self, loss):
self.training_losses.append(loss)

def _verify_coef(self, feature_count, coef, value, idx=0):
if coef is not None:
coef_ = coef
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -371,6 +371,7 @@ def _loss_grad_constrained_lbfgs(
)
self._update_coef_using_constrain(coef_grads) # it throws error.
grad = _pack(coef_grads, intercept_grads)
self._save_mse(loss)
return loss, grad


Expand Down

0 comments on commit 978d1a9

Please sign in to comment.