Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Log TensorBoard histograms #19851

Open
wants to merge 6 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
23 changes: 13 additions & 10 deletions src/lightning/fabric/loggers/tensorboard.py
Original file line number Diff line number Diff line change
Expand Up @@ -205,19 +205,22 @@ def log_metrics(self, metrics: Mapping[str, float], step: Optional[int] = None)
metrics = _add_prefix(metrics, self._prefix, self.LOGGER_JOIN_CHAR)

for k, v in metrics.items():
if isinstance(v, Tensor):
if isinstance(v, Tensor) and v.ndim == 0:
v = v.item()

if isinstance(v, dict):
self.experiment.add_scalars(k, v, step)
else:
try:
try:
if isinstance(v, dict):
self.experiment.add_scalars(k, v, step)
elif isinstance(v, Tensor):
self.experiment.add_histogram(k, v, step)
else:
self.experiment.add_scalar(k, v, step)
# TODO(fabric): specify the possible exception
except Exception as ex:
raise ValueError(
f"\n you tried to log {v} which is currently not supported. Try a dict or a scalar/tensor."
) from ex

# TODO(fabric): specify the possible exception
except Exception as ex:
raise ValueError(
f"\n you tried to log {v} which is currently not supported. Try a dict or a scalar/tensor."
) from ex

@override
@rank_zero_only
Expand Down
2 changes: 1 addition & 1 deletion src/lightning/pytorch/CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/).

- Relaxed the requirement for custom batch samplers to expose `drop_last` for prediction ([#19678](https://github.com/Lightning-AI/pytorch-lightning/pull/19678))

-
- Allow `LightningModule` to log `Tensorboard` histograms ([#19851](https://github.com/Lightning-AI/pytorch-lightning/pull/19851))

### Deprecated

Expand Down
8 changes: 5 additions & 3 deletions src/lightning/pytorch/core/module.py
Original file line number Diff line number Diff line change
Expand Up @@ -642,10 +642,12 @@ def __to_tensor(self, value: Union[Tensor, numbers.Number], name: str) -> Tensor
if isinstance(value, Tensor)
else torch.tensor(value, device=self.device, dtype=_get_default_dtype())
)
if not torch.numel(value) == 1:

# check tensor contains single element (implies value.ndim == 0), or is a non-empty 1D array
if not (torch.numel(value) == 1 or (torch.numel(value) > 0 and value.ndim == 1)):
raise ValueError(
f"`self.log({name}, {value})` was called, but the tensor must have a single element."
f" You can try doing `self.log({name}, {value}.mean())`"
f"`self.log({name}, {value})` was called, but the tensor must have a single element, "
f"or a single non-empty dimension. You can try doing `self.log({name}, {value}.mean())`"
)
value = value.squeeze()
return value
Expand Down
8 changes: 7 additions & 1 deletion tests/tests_pytorch/loggers/test_tensorboard.py
Original file line number Diff line number Diff line change
Expand Up @@ -154,7 +154,13 @@ def name(self):
@pytest.mark.parametrize("step_idx", [10, None])
def test_tensorboard_log_metrics(tmp_path, step_idx):
logger = TensorBoardLogger(tmp_path)
metrics = {"float": 0.3, "int": 1, "FloatTensor": torch.tensor(0.1), "IntTensor": torch.tensor(1)}
metrics = {
"float": 0.3,
"int": 1,
"FloatTensor": torch.tensor(0.1),
"IntTensor": torch.tensor(1),
"Histogram": torch.tensor([10, 100, 1000]),
}
logger.log_metrics(metrics, step_idx)


Expand Down
12 changes: 10 additions & 2 deletions tests/tests_pytorch/trainer/logging_/test_train_loop_logging.py
Original file line number Diff line number Diff line change
Expand Up @@ -640,10 +640,18 @@ def training_step(self, *args):

class TestModel(BoringModel):
def on_train_start(self):
self.log("foo", torch.tensor([1.0, 2.0]))
self.log("foo", torch.tensor([])) # empty

model = TestModel()
with pytest.raises(ValueError, match="tensor must have a single element"):
with pytest.raises(ValueError, match="tensor must have a single element, or a single non-empty dimension."):
trainer.fit(model)

class TestModel(BoringModel):
def on_train_start(self):
self.log("foo", torch.tensor([[1.0], [2.0]])) # too-many dimensions

model = TestModel()
with pytest.raises(ValueError, match="tensor must have a single element, or a single non-empty dimension."):
trainer.fit(model)


Expand Down
Loading