Open
Description
Currently getting this error on the latest version of ComfyUI:
0%| | 0/20 [00:00<?, ?it/s]
!!! Exception during processing !!! 'NoneType' object has no attribute 'prepare_state'
Traceback (most recent call last):
File "P:\ComfyUI_windows_portable\ComfyUI\execution.py", line 328, in execute
output_data, output_ui, has_subgraph = get_output_data(obj, input_data_all, execution_block_cb=execution_block_cb, pre_execute_cb=pre_execute_cb)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "P:\ComfyUI_windows_portable\ComfyUI\execution.py", line 203, in get_output_data
return_values = _map_node_over_list(obj, input_data_all, obj.FUNCTION, allow_interrupt=True, execution_block_cb=execution_block_cb, pre_execute_cb=pre_execute_cb)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "P:\ComfyUI_windows_portable\ComfyUI\execution.py", line 174, in _map_node_over_list
process_inputs(input_dict, i)
File "P:\ComfyUI_windows_portable\ComfyUI\execution.py", line 163, in process_inputs
results.append(getattr(obj, func)(**inputs))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "P:\ComfyUI_windows_portable\ComfyUI\custom_nodes\ComfyUI-Prediction\nodes\nodes_pred.py", line 109, in sample
samples = sample_pred(
^^^^^^^^^^^^
File "P:\ComfyUI_windows_portable\ComfyUI\custom_nodes\ComfyUI-Prediction\nodes\nodes_pred.py", line 206, in sample_pred
samples = sampler.sample(predictor_model, sigmas, extra_args, callback, noise, latent, noise_mask, disable_pbar)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "P:\ComfyUI_windows_portable\ComfyUI\comfy\samplers.py", line 707, in sample
samples = self.sampler_function(model_k, noise, sigmas, extra_args=extra_args, callback=k_callback, disable=disable_pbar, **self.extra_options)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "P:\ComfyUI_windows_portable\python_embeded\Lib\site-packages\torch\utils\_contextlib.py", line 116, in decorate_context
return func(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^
File "P:\ComfyUI_windows_portable\ComfyUI\comfy\k_diffusion\sampling.py", line 155, in sample_euler
denoised = model(x, sigma_hat * s_in, **extra_args)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "P:\ComfyUI_windows_portable\ComfyUI\comfy\samplers.py", line 379, in __call__
out = self.inner_model(x, sigma, model_options=model_options, seed=seed)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "P:\ComfyUI_windows_portable\python_embeded\Lib\site-packages\torch\nn\modules\module.py", line 1736, in _wrapped_call_impl
return self._call_impl(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "P:\ComfyUI_windows_portable\python_embeded\Lib\site-packages\torch\nn\modules\module.py", line 1747, in _call_impl
return forward_call(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "P:\ComfyUI_windows_portable\ComfyUI\custom_nodes\ComfyUI-Prediction\nodes\nodes_pred.py", line 64, in forward
return self.apply_model(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "P:\ComfyUI_windows_portable\ComfyUI\custom_nodes\ComfyUI-Prediction\nodes\nodes_pred.py", line 58, in apply_model
return self.pred.predict_noise(x, timestep, self.inner_model, self.conds, model_options, seed)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "P:\ComfyUI_windows_portable\ComfyUI\custom_nodes\ComfyUI-Prediction\nodes\nodes_pred.py", line 318, in predict_noise
self.cached_prediction = self.predict_noise_uncached(x, timestep, model, conds, model_options, seed)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "P:\ComfyUI_windows_portable\ComfyUI\custom_nodes\ComfyUI-Prediction\nodes\nodes_pred.py", line 358, in predict_noise_uncached
return calc_cond_batch(model, [conds[self.cond_name]], x, timestep, model_options)[0]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "P:\ComfyUI_windows_portable\ComfyUI\comfy\samplers.py", line 195, in calc_cond_batch
return executor.execute(model, conds, x_in, timestep, model_options)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "P:\ComfyUI_windows_portable\ComfyUI\comfy\patcher_extension.py", line 110, in execute
return self.original(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "P:\ComfyUI_windows_portable\ComfyUI\comfy\samplers.py", line 229, in _calc_cond_batch
model.current_patcher.prepare_state(timestep)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
AttributeError: 'NoneType' object has no attribute 'prepare_state'
Prompt executed in 4.58 seconds
Seems to be caused by the recent ModelPatcher overhaul.
Metadata
Metadata
Assignees
Labels
No labels