mirror of
https://github.com/LostRuins/koboldcpp.git
synced 2025-09-10 09:04:36 +00:00
fix for mamba processing
This commit is contained in:
parent
ba950716a9
commit
47c42fd45c
4 changed files with 24 additions and 3 deletions
|
@ -54,7 +54,7 @@
|
|||
"ForceRebuild = False #@param {type:\"boolean\"}\r\n",
|
||||
"#@markdown <hr>\r\n",
|
||||
"LoadLLaVAmmproj = False #@param {type:\"boolean\"}\r\n",
|
||||
"LLaVAmmproj = \"https://huggingface.co/concedo/koboldcpp-mmproj/resolve/main/llama-13b-mmproj-v1.5.Q4_1.gguf\" #@param [\"https://huggingface.co/concedo/koboldcpp-mmproj/resolve/main/llama-13b-mmproj-v1.5.Q4_1.gguf\",\"https://huggingface.co/concedo/koboldcpp-mmproj/resolve/main/mistral-7b-mmproj-v1.5-Q4_1.gguf\",\"https://huggingface.co/concedo/koboldcpp-mmproj/resolve/main/llama-7b-mmproj-v1.5-Q4_0.gguf\"]{allow-input: true}\r\n",
|
||||
"LLaVAmmproj = \"https://huggingface.co/koboldcpp/mmproj/resolve/main/llama-13b-mmproj-v1.5.Q4_1.gguf\" #@param [\"https://huggingface.co/koboldcpp/mmproj/resolve/main/llama-13b-mmproj-v1.5.Q4_1.gguf\",\"https://huggingface.co/koboldcpp/mmproj/resolve/main/mistral-7b-mmproj-v1.5-Q4_1.gguf\",\"https://huggingface.co/koboldcpp/mmproj/resolve/main/llama-7b-mmproj-v1.5-Q4_0.gguf\"]{allow-input: true}\r\n",
|
||||
"VCommand = \"\"\r\n",
|
||||
"#@markdown <hr>\r\n",
|
||||
"LoadImgModel = False #@param {type:\"boolean\"}\r\n",
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue