mirror of
https://github.com/LostRuins/koboldcpp.git
synced 2025-09-11 01:24:36 +00:00
try fix clouflare tunnel (+2 squashed commit)
Squashed commit: [87d96bf2] update remote option [c30bc909] updated fixed colab (+1 squashed commits) Squashed commits: [97b77563] updated fixed colab (+2 squashed commit) Squashed commit: [d851b04c] replaced cloudflare manual dl with remotetunnel in colab [90ff1790] updated lite
This commit is contained in:
parent
6dbb8d82b0
commit
b0c7b88eac
3 changed files with 36 additions and 26 deletions
13
colab.ipynb
13
colab.ipynb
|
@ -42,7 +42,7 @@
|
|||
"source": [
|
||||
"#@title <b>v-- Enter your model below and then click this to start Koboldcpp</b>\r\n",
|
||||
"\r\n",
|
||||
"Model = \"https://huggingface.co/KoboldAI/LLaMA2-13B-Tiefighter-GGUF/resolve/main/LLaMA2-13B-Tiefighter.Q4_K_M.gguf\" #@param [\"https://huggingface.co/KoboldAI/LLaMA2-13B-Tiefighter-GGUF/resolve/main/LLaMA2-13B-Tiefighter.Q4_K_M.gguf\",\"https://huggingface.co/TheBloke/MythoMax-L2-13B-GGUF/resolve/main/mythomax-l2-13b.Q4_K_M.gguf\",\"https://huggingface.co/TheBloke/ReMM-SLERP-L2-13B-GGUF/resolve/main/remm-slerp-l2-13b.Q4_K_M.gguf\",\"https://huggingface.co/TheBloke/Xwin-LM-13B-v0.2-GGUF/resolve/main/xwin-lm-13b-v0.2.Q4_K_M.gguf\",\"https://huggingface.co/TheBloke/Stheno-L2-13B-GGUF/resolve/main/stheno-l2-13b.Q4_K_M.gguf\",\"https://huggingface.co/TheBloke/Mistral-7B-Instruct-v0.1-GGUF\"]{allow-input: true}\r\n",
|
||||
"Model = \"https://huggingface.co/KoboldAI/LLaMA2-13B-Tiefighter-GGUF/resolve/main/LLaMA2-13B-Tiefighter.Q4_K_M.gguf\" #@param [\"https://huggingface.co/KoboldAI/LLaMA2-13B-Tiefighter-GGUF/resolve/main/LLaMA2-13B-Tiefighter.Q4_K_M.gguf\",\"https://huggingface.co/TheBloke/MythoMax-L2-13B-GGUF/resolve/main/mythomax-l2-13b.Q4_K_M.gguf\",\"https://huggingface.co/TheBloke/ReMM-SLERP-L2-13B-GGUF/resolve/main/remm-slerp-l2-13b.Q4_K_M.gguf\",\"https://huggingface.co/TheBloke/Xwin-LM-13B-v0.2-GGUF/resolve/main/xwin-lm-13b-v0.2.Q4_K_M.gguf\",\"https://huggingface.co/TheBloke/Stheno-L2-13B-GGUF/resolve/main/stheno-l2-13b.Q4_K_M.gguf\",\"https://huggingface.co/TheBloke/Mistral-7B-Instruct-v0.1-GGUF/resolve/main/mistral-7b-instruct-v0.1.Q4_K_S.gguf\"]{allow-input: true}\r\n",
|
||||
"Layers = 43 #@param [43]{allow-input: true}\r\n",
|
||||
"ContextSize = 4096 #@param [4096] {allow-input: true}\r\n",
|
||||
"\r\n",
|
||||
|
@ -52,17 +52,12 @@
|
|||
"kvers = !(cat koboldcpp.py | grep 'KcppVersion = ' | cut -d '\"' -f2)\r\n",
|
||||
"kvers = kvers[0]\r\n",
|
||||
"!echo Finding prebuilt binary for {kvers}\r\n",
|
||||
"!wget -O koboldcpp_cublas.so -c https://kcppcolab.concedo.workers.dev/?{kvers}\r\n",
|
||||
"!wget -O dlfile.tmp -c https://kcppcolab.concedo.workers.dev/?{kvers} && mv dlfile.tmp koboldcpp_cublas.so\r\n",
|
||||
"!test -f koboldcpp_cublas.so && echo Prebuilt Binary Exists || echo Prebuilt Binary Does Not Exist\r\n",
|
||||
"!test -f koboldcpp_cublas.so && echo Build Skipped || make koboldcpp_cublas LLAMA_CUBLAS=1\r\n",
|
||||
"!cp koboldcpp_cublas.so koboldcpp_cublas.dat\r\n",
|
||||
"!wget $Model -O model.ggml\r\n",
|
||||
"!wget -c https://github.com/cloudflare/cloudflared/releases/latest/download/cloudflared-linux-amd64\r\n",
|
||||
"!chmod +x cloudflared-linux-amd64\r\n",
|
||||
"!nohup ./cloudflared-linux-amd64 tunnel --url http://localhost:5001 &\r\n",
|
||||
"!sleep 5\r\n",
|
||||
"!cat nohup.out\r\n",
|
||||
"!python koboldcpp.py model.ggml --usecublas 0 mmq --multiuser --gpulayers $Layers --contextsize $ContextSize --hordeconfig concedo 1 1 --onready \"echo Connect to the link below && cat nohup.out | grep trycloudflare.com && rm nohup.out\"\r\n"
|
||||
"!wget $Model -O model.ggml\r\n",
|
||||
"!python koboldcpp.py model.ggml --usecublas 0 mmq --multiuser --gpulayers $Layers --contextsize $ContextSize --hordeconfig concedo 1 1 --remotetunnel\r\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue