Skip to content

Commit

Permalink
Add step parameter to TensorBoardLogger.log_hyperparams (#20176)
Browse files Browse the repository at this point in the history
* Add step to TensorBoardLogger.log_hyperparams

The metrics that get logged using this method are always logged to step 0, so no meaningful graph is made

* Change expected step value to None

---------

Co-authored-by: Jirka Borovec <[email protected]>
Co-authored-by: Luca Antiga <[email protected]>
  • Loading branch information
3 people authored Dec 11, 2024
1 parent a9125c2 commit 1129d4c
Show file tree
Hide file tree
Showing 3 changed files with 18 additions and 10 deletions.
16 changes: 10 additions & 6 deletions src/lightning/fabric/loggers/tensorboard.py
Original file line number Diff line number Diff line change
Expand Up @@ -220,15 +220,19 @@ def log_metrics(self, metrics: Mapping[str, float], step: Optional[int] = None)
@override
@rank_zero_only
def log_hyperparams(
self, params: Union[dict[str, Any], Namespace], metrics: Optional[dict[str, Any]] = None
self,
params: Union[dict[str, Any], Namespace],
metrics: Optional[dict[str, Any]] = None,
step: Optional[int] = None,
) -> None:
"""Record hyperparameters. TensorBoard logs with and without saved hyperparameters are incompatible, the
hyperparameters are then not displayed in the TensorBoard. Please delete or move the previously saved logs to
display the new ones with hyperparameters.
Args:
params: a dictionary-like container with the hyperparameters
params: A dictionary-like container with the hyperparameters
metrics: Dictionary with metric names as keys and measured quantities as values
step: Optional global step number for the logged metrics
"""
params = _convert_params(params)
Expand All @@ -244,7 +248,7 @@ def log_hyperparams(
metrics = {"hp_metric": metrics}

if metrics:
self.log_metrics(metrics, 0)
self.log_metrics(metrics, step)

if _TENSORBOARD_AVAILABLE:
from torch.utils.tensorboard.summary import hparams
Expand All @@ -253,9 +257,9 @@ def log_hyperparams(

exp, ssi, sei = hparams(params, metrics)
writer = self.experiment._get_file_writer()
writer.add_summary(exp)
writer.add_summary(ssi)
writer.add_summary(sei)
writer.add_summary(exp, step)
writer.add_summary(ssi, step)
writer.add_summary(sei, step)

@override
@rank_zero_only
Expand Down
10 changes: 7 additions & 3 deletions src/lightning/pytorch/loggers/tensorboard.py
Original file line number Diff line number Diff line change
Expand Up @@ -153,15 +153,19 @@ def save_dir(self) -> str:
@override
@rank_zero_only
def log_hyperparams(
self, params: Union[dict[str, Any], Namespace], metrics: Optional[dict[str, Any]] = None
self,
params: Union[dict[str, Any], Namespace],
metrics: Optional[dict[str, Any]] = None,
step: Optional[int] = None,
) -> None:
"""Record hyperparameters. TensorBoard logs with and without saved hyperparameters are incompatible, the
hyperparameters are then not displayed in the TensorBoard. Please delete or move the previously saved logs to
display the new ones with hyperparameters.
Args:
params: a dictionary-like container with the hyperparameters
params: A dictionary-like container with the hyperparameters
metrics: Dictionary with metric names as keys and measured quantities as values
step: Optional global step number for the logged metrics
"""
if _OMEGACONF_AVAILABLE:
Expand All @@ -175,7 +179,7 @@ def log_hyperparams(
else:
self.hparams.update(params)

return super().log_hyperparams(params=params, metrics=metrics)
return super().log_hyperparams(params=params, metrics=metrics, step=step)

@override
@rank_zero_only
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -544,7 +544,7 @@ def test_step(self, batch, batch_idx):
"valid_loss_1",
}
assert mock_log_metrics.mock_calls == [
call({"hp_metric": -1}, 0),
call({"hp_metric": -1}, None),
call(metrics={"train_loss": ANY, "epoch": 0}, step=0),
call(metrics={"valid_loss_0_step": ANY, "valid_loss_2": ANY}, step=0),
call(metrics={"valid_loss_0_step": ANY, "valid_loss_2": ANY}, step=1),
Expand Down

0 comments on commit 1129d4c

Please sign in to comment.