1
mirror of https://github.com/comfyanonymous/ComfyUI.git synced 2025-08-03 07:26:31 +08:00

Don't unload/reload model from CPU uselessly.

This commit is contained in:
comfyanonymous
2023-02-08 03:17:54 -05:00
parent e3e65947f2
commit a84cd0d1ad
3 changed files with 58 additions and 32 deletions

View File

@@ -15,6 +15,7 @@ sys.path.append(os.path.join(sys.path[0], "comfy"))
import comfy.samplers
import comfy.sd
import model_management
supported_ckpt_extensions = ['.ckpt']
supported_pt_extensions = ['.ckpt', '.pt', '.bin']
@@ -353,43 +354,39 @@ def common_ksampler(device, model, seed, steps, cfg, sampler_name, scheduler, po
noise = torch.randn(latent_image.size(), dtype=latent_image.dtype, layout=latent_image.layout, generator=torch.manual_seed(seed), device="cpu")
real_model = None
try:
if device != "cpu":
model_management.load_model_gpu(model)
real_model = model.model
else:
#TODO: cpu support
real_model = model.patch_model()
real_model.to(device)
noise = noise.to(device)
latent_image = latent_image.to(device)
noise = noise.to(device)
latent_image = latent_image.to(device)
positive_copy = []
negative_copy = []
positive_copy = []
negative_copy = []
for p in positive:
t = p[0]
if t.shape[0] < noise.shape[0]:
t = torch.cat([t] * noise.shape[0])
t = t.to(device)
positive_copy += [[t] + p[1:]]
for n in negative:
t = n[0]
if t.shape[0] < noise.shape[0]:
t = torch.cat([t] * noise.shape[0])
t = t.to(device)
negative_copy += [[t] + n[1:]]
for p in positive:
t = p[0]
if t.shape[0] < noise.shape[0]:
t = torch.cat([t] * noise.shape[0])
t = t.to(device)
positive_copy += [[t] + p[1:]]
for n in negative:
t = n[0]
if t.shape[0] < noise.shape[0]:
t = torch.cat([t] * noise.shape[0])
t = t.to(device)
negative_copy += [[t] + n[1:]]
if sampler_name in comfy.samplers.KSampler.SAMPLERS:
sampler = comfy.samplers.KSampler(real_model, steps=steps, device=device, sampler=sampler_name, scheduler=scheduler, denoise=denoise)
else:
#other samplers
pass
if sampler_name in comfy.samplers.KSampler.SAMPLERS:
sampler = comfy.samplers.KSampler(real_model, steps=steps, device=device, sampler=sampler_name, scheduler=scheduler, denoise=denoise)
else:
#other samplers
pass
samples = sampler.sample(noise, positive_copy, negative_copy, cfg=cfg, latent_image=latent_image, start_step=start_step, last_step=last_step, force_full_denoise=force_full_denoise)
samples = samples.cpu()
real_model.cpu()
model.unpatch_model()
except Exception as e:
if real_model is not None:
real_model.cpu()
model.unpatch_model()
raise e
samples = sampler.sample(noise, positive_copy, negative_copy, cfg=cfg, latent_image=latent_image, start_step=start_step, last_step=last_step, force_full_denoise=force_full_denoise)
samples = samples.cpu()
return (samples, )