File "D:\AI\ComfyUI_windows_portable\ComfyUI\custom_nodes\ComfyUI_VLM_nodes\nodes\llavaloader.py", line 4, in from llama_cpp import Llama raise RuntimeError(f"Failed ...
from .kolors.pipelines.pipeline_stable_diffusion_xl_chatglm_256 import StableDiffusionXLPipeline File "E:\ComfyUI_windows_portable\ComfyUI\custom_nodes\ComfyUI ...