edit colab (+1 squashed commits)

Squashed commits:

[c7ccb99d] update colab with llava
This commit is contained in:
Concedo 2024-03-12 15:21:28 +08:00
parent 6a32c14e86
commit a69bc44e7a
3 changed files with 14 additions and 3 deletions

View file

@ -52,6 +52,11 @@
"Layers = 99 #@param [99]{allow-input: true}\r\n",
"ContextSize = 4096 #@param [4096] {allow-input: true}\r\n",
"ForceRebuild = False #@param {type:\"boolean\"}\r\n",
"#@markdown <hr>\r\n",
"LoadLLaVAmmproj = False #@param {type:\"boolean\"}\r\n",
"LLaVAmmproj = \"https://huggingface.co/concedo/koboldcpp-mmproj/resolve/main/llama-13b-mmproj-v1.5.Q4_1.gguf\" #@param [\"https://huggingface.co/concedo/koboldcpp-mmproj/resolve/main/llama-13b-mmproj-v1.5.Q4_1.gguf\"]{allow-input: true}\r\n",
"Vcommand = \"\"\r\n",
"#@markdown <hr>\r\n",
"LoadImgModel = False #@param {type:\"boolean\"}\r\n",
"ImgModel = \"https://huggingface.co/koboldcpp/imgmodel/resolve/main/imgmodel_older_q4_0.gguf\" #@param [\"https://huggingface.co/koboldcpp/imgmodel/resolve/main/imgmodel_older_q4_0.gguf\"]{allow-input: true}\r\n",
"SCommand = \"\"\r\n",
@ -67,6 +72,10 @@
"kvers = kvers[0]\r\n",
"if ForceRebuild:\r\n",
" kvers = \"force_rebuild\"\r\n",
"if LLaVAmmproj and LoadLLaVAmmproj:\r\n",
" Vcommand = \"--mmproj vmodel.gguf\"\r\n",
"else:\r\n",
" SCommand = \"\"\r\n",
"if ImgModel and LoadImgModel:\r\n",
" SCommand = \"--sdconfig imodel.gguf clamped 4 quant\"\r\n",
"else:\r\n",
@ -79,9 +88,11 @@
"!apt update\r\n",
"!apt install aria2 -y\r\n",
"!aria2c -x 10 -o model.gguf --summary-interval=5 --download-result=default --allow-overwrite=true --file-allocation=none $Model\r\n",
"if VCommand:\r\n",
" !aria2c -x 10 -o vmodel.gguf --summary-interval=5 --download-result=default --allow-overwrite=true --file-allocation=none $LLaVAmmproj\r\n",
"if SCommand:\r\n",
" !aria2c -x 10 -o imodel.gguf --summary-interval=5 --download-result=default --allow-overwrite=true --file-allocation=none $ImgModel\r\n",
"!python koboldcpp.py model.gguf --usecublas 0 mmq --multiuser --gpulayers $Layers --contextsize $ContextSize --quiet --remotetunnel $SCommand\r\n"
"!python koboldcpp.py model.gguf --usecublas 0 mmq --multiuser --gpulayers $Layers --contextsize $ContextSize --quiet --remotetunnel $Vcommand $SCommand\r\n"
]
}
],