Skip to content

Commit

Permalink
Allow modifier loggers to be overwritten by subsequent calls (#896) (#…
Browse files Browse the repository at this point in the history
…899)

Co-authored-by: Konstantin Gulin <66528950+KSGulin@users.noreply.github.com>
  • Loading branch information
bfineran and KSGulin authored Jun 21, 2022
1 parent f6251f3 commit 9902945
Showing 1 changed file with 2 additions and 5 deletions.
7 changes: 2 additions & 5 deletions src/sparseml/pytorch/sparsification/modifier.py
Original file line number Diff line number Diff line change
Expand Up @@ -234,9 +234,6 @@ def initialize_loggers(self, loggers: Union[None, LoggerManager, List[BaseLogger
:param loggers: the logger maanger to setup this modifier with for logging
important info and milestones to
"""
if self._loggers_initialized and self._loggers:
return

loggers = loggers or []
if isinstance(loggers, List):
loggers = LoggerManager(loggers)
Expand Down Expand Up @@ -709,7 +706,7 @@ def log_string(
level = level or LOGGING_LEVELS["debug"]
step = (
loggers.epoch_to_step(epoch, steps_per_epoch)
if (epoch and steps_per_epoch)
if (epoch is not None) and (steps_per_epoch is not None)
else None
)
loggers.log_string(tag=tag, string=string, step=step, level=level)
Expand All @@ -727,7 +724,7 @@ def log_scalar(
loggers = loggers or self.loggers
step = (
loggers.epoch_to_step(epoch, steps_per_epoch)
if (epoch and steps_per_epoch)
if (epoch is not None) and (steps_per_epoch is not None)
else None
)
loggers.log_scalar(tag=tag, value=value, step=step, level=level)
Expand Down

0 comments on commit 9902945

Please sign in to comment.