1
mirror of https://github.com/comfyanonymous/ComfyUI.git synced 2025-08-02 15:04:50 +08:00

Add a /free route to unload models or free all memory.

A POST request to /free with: {"unload_models":true}
will unload models from vram.

A POST request to /free with: {"free_memory":true}
will unload models and free all cached data from the last run workflow.
This commit is contained in:
comfyanonymous
2024-01-04 14:28:11 -05:00
parent 8c6493578b
commit 6d281b4ff4
3 changed files with 44 additions and 2 deletions

View File

@@ -507,6 +507,17 @@ class PromptServer():
nodes.interrupt_processing()
return web.Response(status=200)
@routes.post("/free")
async def post_interrupt(request):
json_data = await request.json()
unload_models = json_data.get("unload_models", False)
free_memory = json_data.get("free_memory", False)
if unload_models:
self.prompt_queue.set_flag("unload_models", unload_models)
if free_memory:
self.prompt_queue.set_flag("free_memory", free_memory)
return web.Response(status=200)
@routes.post("/history")
async def post_history(request):
json_data = await request.json()