model.py 1.3 KB

1234567891011121314151617181920212223242526272829303132333435363738394041
  1. import os
  2. import torch
  3. from omegaconf import OmegaConf
  4. from sorawm.iopaint.model.anytext.ldm.util import instantiate_from_config
  5. def get_state_dict(d):
  6. return d.get("state_dict", d)
  7. def load_state_dict(ckpt_path, location="cpu"):
  8. _, extension = os.path.splitext(ckpt_path)
  9. if extension.lower() == ".safetensors":
  10. import safetensors.torch
  11. state_dict = safetensors.torch.load_file(ckpt_path, device=location)
  12. else:
  13. state_dict = get_state_dict(
  14. torch.load(ckpt_path, map_location=torch.device(location))
  15. )
  16. state_dict = get_state_dict(state_dict)
  17. print(f"Loaded state_dict from [{ckpt_path}]")
  18. return state_dict
  19. def create_model(config_path, device, cond_stage_path=None, use_fp16=False):
  20. config = OmegaConf.load(config_path)
  21. # if cond_stage_path:
  22. # config.model.params.cond_stage_config.params.version = (
  23. # cond_stage_path # use pre-downloaded ckpts, in case blocked
  24. # )
  25. config.model.params.cond_stage_config.params.device = str(device)
  26. if use_fp16:
  27. config.model.params.use_fp16 = True
  28. config.model.params.control_stage_config.params.use_fp16 = True
  29. config.model.params.unet_config.params.use_fp16 = True
  30. model = instantiate_from_config(config.model).cpu()
  31. print(f"Loaded model config from [{config_path}]")
  32. return model