Traceback (most recent call last):
File "C:\Users\xxxx\Deep\magi\magi_llm_app.py", line 786, in textgen_switcher
self.load_model('llama.cpp')
File "C:\Users\xxxx\Deep\magi\magi_llm_app.py", line 768, in load_model
cpp_model = LlamaCppModel.from_pretrained(
File "C:\Users\xxxx\Deep\magi\llamacpp_generate.py", line 12, in from_pretrained
return cls(**params)
File "C:\Users\xxxx\Deep\magi\llamacpp_generate.py", line 7, in init
self.model = Llama(**params)
File "C:\Users\xxxx\anaconda3\envs\magi\lib\site-packages\llama_cpp\llama.py", line 191, in init
raise ValueError(f"Model path does not exist: {model_path}")
ValueError: Model path does not exist:
--- Loading Exllama model...
INFO: Could not find files for the given pattern(s).
Injected compiler path: C:\Program Files\Microsoft Visual Studio\2022\Community\VC\Tools\MSVC\14.35.32215\bin\Hostx64\x64
E:\Backups\Deep Models\LLama\models\TheBloke_airoboros-33b-gpt4-GPTQ\gptq_model-4bit--1g.safetensors
Exception ignored in: <function Llama.del at 0x0000021C1CA313F0>
Traceback (most recent call last):
File "C:\Users\xxxx\anaconda3\envs\magi\lib\site-packages\llama_cpp\llama.py", line 1333, in del
if self.ctx is not None:
AttributeError: 'Llama' object has no attribute 'ctx'
Traceback (most recent call last):
File "C:\Users\xxxx\Deep\magi\magi_llm_app.py", line 788, in textgen_switcher
self.load_model('Exllama')
File "C:\Users\xxxx\Deep\magi\magi_llm_app.py", line 760, in load_model
exllama_model = ExllamaModel.from_pretrained(
File "C:\Users\xxxx\Deep\magi\exllama_generate.py", line 64, in from_pretrained
model = ExLlama(config)
File "C:\Users\xxxx\Deep\magi\exllama\model.py", line 759, in init
layer = ExLlamaDecoderLayer(self.config, tensors, f"model.layers.{i}", i, sin, cos)
File "C:\Users\xxxx\Deep\magi\exllama\model.py", line 345, in init
self.self_attn = ExLlamaAttention(self.config, tensors, key + ".self_attn", sin, cos, self.index)
File "C:\Users\xxxx\Deep\magi\exllama\model.py", line 257, in init
self.q_proj = Ex4bitLinear(config, self.config.hidden_size, self.config.num_attention_heads * self.config.head_dim, False, tensors, key + ".q_proj")
File "C:\Users\xxxx\Deep\magi\exllama\model.py", line 165, in init
if self.groupsize is None: raise ValueError("Found group index but no groupsize. What do?")
ValueError: Found group index but no groupsize. What do?