From 646818a4d966c51931793900673073d6d6a5fd9d Mon Sep 17 00:00:00 2001 From: Aarni Koskela Date: Thu, 11 May 2023 09:46:43 +0300 Subject: [PATCH] Apply manual ruff fixes --- k_diffusion/gns.py | 4 ++-- k_diffusion/utils.py | 14 ++++++++++---- 2 files changed, 12 insertions(+), 6 deletions(-) diff --git a/k_diffusion/gns.py b/k_diffusion/gns.py index 6cdbe0fc..d0c25d63 100644 --- a/k_diffusion/gns.py +++ b/k_diffusion/gns.py @@ -5,8 +5,8 @@ class DDPGradientStatsHook: def __init__(self, ddp_module): try: ddp_module.register_comm_hook(self, self._hook_fn) - except AttributeError: - raise ValueError('DDPGradientStatsHook does not support non-DDP wrapped modules') + except AttributeError as ae: + raise ValueError('DDPGradientStatsHook does not support non-DDP wrapped modules') from ae self._clear_state() def _clear_state(self): diff --git a/k_diffusion/utils.py b/k_diffusion/utils.py index 9afedb99..e1897e22 100644 --- a/k_diffusion/utils.py +++ b/k_diffusion/utils.py @@ -178,8 +178,11 @@ def __init__(self, optimizer, inv_gamma=1., power=1., warmup=0., min_lr=0., def get_lr(self): if not self._get_lr_called_within_step: - warnings.warn("To get the last learning rate computed by the scheduler, " - "please use `get_last_lr()`.") + warnings.warn( + "To get the last learning rate computed by the scheduler, " + "please use `get_last_lr()`.", + stacklevel=1, + ) return self._get_closed_form_lr() @@ -219,8 +222,11 @@ def __init__(self, optimizer, num_steps, decay=0.5, warmup=0., min_lr=0., def get_lr(self): if not self._get_lr_called_within_step: - warnings.warn("To get the last learning rate computed by the scheduler, " - "please use `get_last_lr()`.") + warnings.warn( + "To get the last learning rate computed by the scheduler, " + "please use `get_last_lr()`.", + stacklevel=1, + ) return self._get_closed_form_lr()