ema.py 3.1 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586
  1. import torch
  2. from torch import nn
  3. class LitEma(nn.Module):
  4. def __init__(self, model, decay=0.9999, use_num_upates=True):
  5. super().__init__()
  6. if decay < 0.0 or decay > 1.0:
  7. raise ValueError("Decay must be between 0 and 1")
  8. self.m_name2s_name = {}
  9. self.register_buffer("decay", torch.tensor(decay, dtype=torch.float32))
  10. self.register_buffer(
  11. "num_updates",
  12. torch.tensor(0, dtype=torch.int)
  13. if use_num_upates
  14. else torch.tensor(-1, dtype=torch.int),
  15. )
  16. for name, p in model.named_parameters():
  17. if p.requires_grad:
  18. # remove as '.'-character is not allowed in buffers
  19. s_name = name.replace(".", "")
  20. self.m_name2s_name.update({name: s_name})
  21. self.register_buffer(s_name, p.clone().detach().data)
  22. self.collected_params = []
  23. def reset_num_updates(self):
  24. del self.num_updates
  25. self.register_buffer("num_updates", torch.tensor(0, dtype=torch.int))
  26. def forward(self, model):
  27. decay = self.decay
  28. if self.num_updates >= 0:
  29. self.num_updates += 1
  30. decay = min(self.decay, (1 + self.num_updates) / (10 + self.num_updates))
  31. one_minus_decay = 1.0 - decay
  32. with torch.no_grad():
  33. m_param = dict(model.named_parameters())
  34. shadow_params = dict(self.named_buffers())
  35. for key in m_param:
  36. if m_param[key].requires_grad:
  37. sname = self.m_name2s_name[key]
  38. shadow_params[sname] = shadow_params[sname].type_as(m_param[key])
  39. shadow_params[sname].sub_(
  40. one_minus_decay * (shadow_params[sname] - m_param[key])
  41. )
  42. else:
  43. assert not key in self.m_name2s_name
  44. def copy_to(self, model):
  45. m_param = dict(model.named_parameters())
  46. shadow_params = dict(self.named_buffers())
  47. for key in m_param:
  48. if m_param[key].requires_grad:
  49. m_param[key].data.copy_(shadow_params[self.m_name2s_name[key]].data)
  50. else:
  51. assert not key in self.m_name2s_name
  52. def store(self, parameters):
  53. """
  54. Save the current parameters for restoring later.
  55. Args:
  56. parameters: Iterable of `torch.nn.Parameter`; the parameters to be
  57. temporarily stored.
  58. """
  59. self.collected_params = [param.clone() for param in parameters]
  60. def restore(self, parameters):
  61. """
  62. Restore the parameters stored with the `store` method.
  63. Useful to validate the model with EMA parameters without affecting the
  64. original optimization process. Store the parameters before the
  65. `copy_to` method. After validation (or model saving), use this to
  66. restore the former parameters.
  67. Args:
  68. parameters: Iterable of `torch.nn.Parameter`; the parameters to be
  69. updated with the stored parameters.
  70. """
  71. for c_param, param in zip(self.collected_params, parameters):
  72. param.data.copy_(c_param.data)