Skip to content

Commit e9e9a03

Browse files
Show a better error when the workflow OOMs. (comfyanonymous#8574)
1 parent d7430c5 commit e9e9a03

File tree

1 file changed

+7
-4
lines changed

1 file changed

+7
-4
lines changed

execution.py

Lines changed: 7 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -429,17 +429,20 @@ def pre_execute_cb(call_index):
429429

430430
logging.error(f"!!! Exception during processing !!! {ex}")
431431
logging.error(traceback.format_exc())
432+
tips = ""
433+
434+
if isinstance(ex, comfy.model_management.OOM_EXCEPTION):
435+
tips = "This error means you ran out of memory on your GPU.\n\nTIPS: If the workflow worked before you might have accidentally set the batch_size to a large number."
436+
logging.error("Got an OOM, unloading all loaded models.")
437+
comfy.model_management.unload_all_models()
432438

433439
error_details = {
434440
"node_id": real_node_id,
435-
"exception_message": str(ex),
441+
"exception_message": "{}\n{}".format(ex, tips),
436442
"exception_type": exception_type,
437443
"traceback": traceback.format_tb(tb),
438444
"current_inputs": input_data_formatted
439445
}
440-
if isinstance(ex, comfy.model_management.OOM_EXCEPTION):
441-
logging.error("Got an OOM, unloading all loaded models.")
442-
comfy.model_management.unload_all_models()
443446

444447
return (ExecutionResult.FAILURE, error_details, ex)
445448

0 commit comments

Comments
 (0)