From 6c723469cf1443932384ef2fd0e2ed2dd3a1dd76 Mon Sep 17 00:00:00 2001 From: Tanay Aggarwal Date: Tue, 31 Aug 2021 17:14:52 +0530 Subject: [PATCH] Incubation of Graphical Representation in TF2 of HParams The issue or request of the feature addition has been tapered at How to add graphs to hparams in tensorboard? on PyTorch. However, the use of add_hparams along with add_scalar widely known in the applications of Machine Learning and Deep Learning, although the generation of the event file by the add_hparams is independent of add_scalar and tensorboard log can't be used intuitively, that is against the coherence of one file per experiment. https://github.com/pytorch/pytorch/issues/64258#issue-983739478 --- torch/utils/tensorboard/writer.py | 34 +++++++++++++++++-------------- 1 file changed, 19 insertions(+), 15 deletions(-) diff --git a/torch/utils/tensorboard/writer.py b/torch/utils/tensorboard/writer.py index cd7f751a32f5f..5bca7cf91ea9a 100644 --- a/torch/utils/tensorboard/writer.py +++ b/torch/utils/tensorboard/writer.py @@ -297,21 +297,25 @@ def add_hparams( :scale: 50 % """ - torch._C._log_api_usage_once("tensorboard.logging.add_hparams") - if type(hparam_dict) is not dict or type(metric_dict) is not dict: - raise TypeError('hparam_dict and metric_dict should be dictionary.') - exp, ssi, sei = hparams(hparam_dict, metric_dict, hparam_domain_discrete) - - if not run_name: - run_name = str(time.time()) - logdir = os.path.join(self._get_file_writer().get_logdir(), run_name) - with SummaryWriter(log_dir=logdir) as w_hp: - w_hp.file_writer.add_summary(exp) - w_hp.file_writer.add_summary(ssi) - w_hp.file_writer.add_summary(sei) - for k, v in metric_dict.items(): - w_hp.add_scalar(k, v) - + def add_hparams(self, hparam_dict, metric_dict, hparam_domain_discrete=None, run_name=None): + torch._C._log_api_usage_once("tensorboard.logging.add_hparams") + exp, ssi, sei = hparams(hparam_dict, metric_dict, hparam_domain_discrete) + self.file_writer.add_summary(exp) + self.file_writer.add_summary(ssi) + self.file_writer.add_summary(sei) + for k, v in metric_dict.items(): + if v is not None: + self.add_scalar(k, v) + from torch.utils.tensorboard import SummaryWriter +for i in range(5): + save_metrics = {'train/acc': None, 'train/loss': None} + writer = SummaryWriter(f'runs/{i}') + for step in range(50): + writer.add_scalar('train/acc', 10*i+step, step) + writer.add_scalar('train/loss', 10*i - step, step) + writer.add_hparams({'lr': 0.1*i, 'bsize': i}, save_metrics) + writer.close() + def add_scalar( self, tag,