mirror of
https://github.com/LostRuins/koboldcpp.git
synced 2025-09-10 17:14:36 +00:00
renamed some labels
This commit is contained in:
parent
a244b1ffd2
commit
df7c2b9923
4 changed files with 43 additions and 14 deletions
|
@ -54,8 +54,8 @@
|
|||
"FlashAttention = True #@param {type:\"boolean\"}\r\n",
|
||||
"FACommand = \"\"\r\n",
|
||||
"#@markdown <hr>\r\n",
|
||||
"LoadLLaVAmmproj = False #@param {type:\"boolean\"}\r\n",
|
||||
"LLaVAmmproj = \"https://huggingface.co/koboldcpp/mmproj/resolve/main/llama-13b-mmproj-v1.5.Q4_1.gguf\" #@param [\"https://huggingface.co/koboldcpp/mmproj/resolve/main/llama-13b-mmproj-v1.5.Q4_1.gguf\",\"https://huggingface.co/koboldcpp/mmproj/resolve/main/mistral-7b-mmproj-v1.5-Q4_1.gguf\",\"https://huggingface.co/koboldcpp/mmproj/resolve/main/llama-7b-mmproj-v1.5-Q4_0.gguf\",\"https://huggingface.co/koboldcpp/mmproj/resolve/main/LLaMA3-8B_mmproj-Q4_1.gguf\"]{allow-input: true}\r\n",
|
||||
"LoadVisionMMProjector = False #@param {type:\"boolean\"}\r\n",
|
||||
"Mmproj = \"https://huggingface.co/koboldcpp/mmproj/resolve/main/llama-13b-mmproj-v1.5.Q4_1.gguf\" #@param [\"https://huggingface.co/koboldcpp/mmproj/resolve/main/llama-13b-mmproj-v1.5.Q4_1.gguf\",\"https://huggingface.co/koboldcpp/mmproj/resolve/main/mistral-7b-mmproj-v1.5-Q4_1.gguf\",\"https://huggingface.co/koboldcpp/mmproj/resolve/main/llama-7b-mmproj-v1.5-Q4_0.gguf\",\"https://huggingface.co/koboldcpp/mmproj/resolve/main/LLaMA3-8B_mmproj-Q4_1.gguf\"]{allow-input: true}\r\n",
|
||||
"VCommand = \"\"\r\n",
|
||||
"#@markdown <hr>\r\n",
|
||||
"LoadImgModel = False #@param {type:\"boolean\"}\r\n",
|
||||
|
@ -71,7 +71,7 @@
|
|||
" raise RuntimeError(\"⚠️Colab did not give you a GPU due to usage limits, this can take a few hours before they let you back in. Check out https://lite.koboldai.net for a free alternative (that does not provide an API link but can load KoboldAI saves and chat cards) or subscribe to Colab Pro for immediate access.⚠️\")\r\n",
|
||||
"\r\n",
|
||||
"%cd /content\r\n",
|
||||
"if LLaVAmmproj and LoadLLaVAmmproj:\r\n",
|
||||
"if Mmproj and LoadVisionMMProjector:\r\n",
|
||||
" VCommand = \"--mmproj vmodel.gguf\"\r\n",
|
||||
"else:\r\n",
|
||||
" SCommand = \"\"\r\n",
|
||||
|
@ -99,7 +99,7 @@
|
|||
" Model = Model.replace(\"/blob/main/\", \"/resolve/main/\")\r\n",
|
||||
"!aria2c -x 10 -o model.gguf --summary-interval=5 --download-result=default --allow-overwrite=true --file-allocation=none $Model\r\n",
|
||||
"if VCommand:\r\n",
|
||||
" !aria2c -x 10 -o vmodel.gguf --summary-interval=5 --download-result=default --allow-overwrite=true --file-allocation=none $LLaVAmmproj\r\n",
|
||||
" !aria2c -x 10 -o vmodel.gguf --summary-interval=5 --download-result=default --allow-overwrite=true --file-allocation=none $Mmproj\r\n",
|
||||
"if SCommand:\r\n",
|
||||
" !aria2c -x 10 -o imodel.gguf --summary-interval=5 --download-result=default --allow-overwrite=true --file-allocation=none $ImgModel\r\n",
|
||||
"if WCommand:\r\n",
|
||||
|
|
|
@ -1272,6 +1272,7 @@
|
|||
"width": 512,
|
||||
"height": 512,
|
||||
"seed": -1,
|
||||
"clip_skip": -1,
|
||||
"sampler_name": "Euler a"
|
||||
},
|
||||
"schema": {
|
||||
|
@ -1297,6 +1298,9 @@
|
|||
"seed": {
|
||||
"type": "number"
|
||||
},
|
||||
"clip_skip": {
|
||||
"type": "number"
|
||||
},
|
||||
"sampler_name": {
|
||||
"type": "string"
|
||||
},
|
||||
|
@ -1356,6 +1360,7 @@
|
|||
"width": 512,
|
||||
"height": 512,
|
||||
"seed": -1,
|
||||
"clip_skip": -1,
|
||||
"sampler_name": "Euler a",
|
||||
"denoising_strength": 0.6,
|
||||
"init_images":["base64_image_data"],
|
||||
|
@ -1383,6 +1388,9 @@
|
|||
"seed": {
|
||||
"type": "number"
|
||||
},
|
||||
"clip_skip": {
|
||||
"type": "number"
|
||||
},
|
||||
"sampler_name": {
|
||||
"type": "string"
|
||||
},
|
||||
|
|
35
klite.embd
35
klite.embd
|
@ -12,7 +12,7 @@ Current version indicated by LITEVER below.
|
|||
-->
|
||||
|
||||
<script>
|
||||
const LITEVER = 185;
|
||||
const LITEVER = 186;
|
||||
const urlParams = new URLSearchParams(window.location.search);
|
||||
var localflag = true;
|
||||
const STORAGE_PREFIX = (localflag?"e_":"")+"kaihordewebui_";
|
||||
|
@ -4355,6 +4355,7 @@ Current version indicated by LITEVER below.
|
|||
const XTTS_ID = 1000;
|
||||
const ALLTALK_ID = 1001;
|
||||
const OAI_TTS_ID = 1002;
|
||||
const BIG_HD_RES_PX = 512; //when saving oversized images, could be 640?
|
||||
const HD_RES_PX = 512;
|
||||
const NO_HD_RES_PX = 320;
|
||||
const SAVE_SLOTS = 6;
|
||||
|
@ -4527,6 +4528,7 @@ Current version indicated by LITEVER below.
|
|||
img_allowhd: true,
|
||||
img_crop: false,
|
||||
img_img2imgstr: 0.6,
|
||||
img_clipskip: -1,
|
||||
img_steps: 20,
|
||||
img_sampler: "Euler a",
|
||||
img_aspect:0, //0=square,1=portrait,2=landscape,3=bigsquare
|
||||
|
@ -5570,6 +5572,10 @@ Current version indicated by LITEVER below.
|
|||
a1111_t2i_payload.init_images = [req_payload.source_image];
|
||||
a1111_t2i_payload.denoising_strength = req_payload.params.denoising_strength;
|
||||
}
|
||||
if(req_payload.params.clip_skip && req_payload.params.clip_skip>0)
|
||||
{
|
||||
a1111_t2i_payload.clip_skip = req_payload.params.clip_skip;
|
||||
}
|
||||
|
||||
if(localsettings.save_remote_images)
|
||||
{
|
||||
|
@ -10485,6 +10491,7 @@ Current version indicated by LITEVER below.
|
|||
document.getElementById("save_remote_images").checked = localsettings.save_remote_images;
|
||||
document.getElementById("img_cfgscale").value = localsettings.img_cfgscale;
|
||||
document.getElementById("img_img2imgstr").value = localsettings.img_img2imgstr;
|
||||
document.getElementById("img_clipskip").value = localsettings.img_clipskip;
|
||||
document.getElementById("img_aspect").value = localsettings.img_aspect;
|
||||
document.getElementById("img_sampler").value = localsettings.img_sampler;
|
||||
document.getElementById("img_steps").value = localsettings.img_steps;
|
||||
|
@ -10785,6 +10792,7 @@ Current version indicated by LITEVER below.
|
|||
|
||||
localsettings.img_cfgscale = parseFloat(document.getElementById("img_cfgscale").value);
|
||||
localsettings.img_img2imgstr = parseFloat(document.getElementById("img_img2imgstr").value);
|
||||
localsettings.img_clipskip = parseInt(document.getElementById("img_clipskip").value);
|
||||
localsettings.img_aspect = parseInt(document.getElementById("img_aspect").value);
|
||||
localsettings.img_sampler = document.getElementById("img_sampler").value;
|
||||
localsettings.img_steps = parseInt(document.getElementById("img_steps").value);
|
||||
|
@ -10800,7 +10808,12 @@ Current version indicated by LITEVER below.
|
|||
{
|
||||
localsettings.img_img2imgstr = defaultsettings.img_img2imgstr;
|
||||
}
|
||||
if(isNaN(localsettings.img_clipskip))
|
||||
{
|
||||
localsettings.img_clipskip = defaultsettings.img_clipskip;
|
||||
}
|
||||
localsettings.img_img2imgstr = cleannum(localsettings.img_img2imgstr, 0.0, 1.0);
|
||||
localsettings.img_clipskip = cleannum(localsettings.img_clipskip, -1, 20);
|
||||
if(isNaN(localsettings.img_aspect))
|
||||
{
|
||||
localsettings.img_aspect = defaultsettings.img_aspect;
|
||||
|
@ -13881,6 +13894,10 @@ Current version indicated by LITEVER below.
|
|||
genimg_payload["source_image"] = base64img;
|
||||
genimg_payload["params"]["denoising_strength"] = localsettings.img_img2imgstr;
|
||||
}
|
||||
if(localsettings.img_clipskip>0)
|
||||
{
|
||||
genimg_payload["params"]["clip_skip"] = localsettings.img_clipskip;
|
||||
}
|
||||
|
||||
if(localsettings.generate_images_mode==1) //horde
|
||||
{
|
||||
|
@ -13931,7 +13948,7 @@ Current version indicated by LITEVER below.
|
|||
{
|
||||
//console.log(outputimg);
|
||||
let origImg = "data:image/jpeg;base64," + outputimg;
|
||||
let imgres = localsettings.img_allowhd?HD_RES_PX:NO_HD_RES_PX;
|
||||
let imgres = localsettings.img_allowhd?(localsettings.img_aspect==0?HD_RES_PX:BIG_HD_RES_PX):NO_HD_RES_PX;
|
||||
compressImage(origImg, (newDataUri) => {
|
||||
image_db[imgid].done = true;
|
||||
image_db[imgid].result = newDataUri;
|
||||
|
@ -13961,7 +13978,7 @@ Current version indicated by LITEVER below.
|
|||
{
|
||||
//console.log(outputimg);
|
||||
let origImg = "data:image/jpeg;base64," + outputimg;
|
||||
let imgres = localsettings.img_allowhd?HD_RES_PX:NO_HD_RES_PX;
|
||||
let imgres = localsettings.img_allowhd?(localsettings.img_aspect==0?HD_RES_PX:BIG_HD_RES_PX):NO_HD_RES_PX;
|
||||
compressImage(origImg, (newDataUri) => {
|
||||
image_db[imgid].done = true;
|
||||
image_db[imgid].result = newDataUri;
|
||||
|
@ -14610,7 +14627,7 @@ Current version indicated by LITEVER below.
|
|||
console.log("polling for pending images " + imagecount);
|
||||
for (let key in image_db) {
|
||||
let img = image_db[key];
|
||||
if (img.done == false && img.poll_category==1) {
|
||||
if (img.done == false && img.poll_category==1) { //horde image polling
|
||||
//call check
|
||||
fetch(stablehorde_poll_endpoint + "/" + key)
|
||||
.then(x => x.json())
|
||||
|
@ -14637,7 +14654,7 @@ Current version indicated by LITEVER below.
|
|||
img.queue = 0;
|
||||
let origImg = "data:image/jpeg;base64," + finalimg.generations[0].img;
|
||||
//console.log("Original image: " + origImg);
|
||||
let imgres = localsettings.img_allowhd?HD_RES_PX:NO_HD_RES_PX;
|
||||
let imgres = localsettings.img_allowhd?(localsettings.img_aspect==0?HD_RES_PX:BIG_HD_RES_PX):NO_HD_RES_PX;
|
||||
compressImage(origImg, (newDataUri) => {
|
||||
img.result = newDataUri;
|
||||
}, true, false, imgres,0.35,false);
|
||||
|
@ -14660,7 +14677,7 @@ Current version indicated by LITEVER below.
|
|||
delete image_db[key];
|
||||
});
|
||||
}
|
||||
else if (img.done == false && img.poll_category==2)
|
||||
else if (img.done == false && img.poll_category==2) //comfyui image polling
|
||||
{
|
||||
//comfyui polling
|
||||
fetch(localsettings.saved_comfy_url + comfy_history_endpoint + "/" + key, {
|
||||
|
@ -14687,7 +14704,7 @@ Current version indicated by LITEVER below.
|
|||
reader.onloadend = () => {
|
||||
img.queue = 0;
|
||||
let origImg = reader.result;
|
||||
let imgres = localsettings.img_allowhd?HD_RES_PX:NO_HD_RES_PX;
|
||||
let imgres = localsettings.img_allowhd?(localsettings.img_aspect==0?HD_RES_PX:BIG_HD_RES_PX):NO_HD_RES_PX;
|
||||
compressImage(origImg, (newDataUri) => {
|
||||
img.result = newDataUri;
|
||||
}, true, false, imgres,0.35,false);
|
||||
|
@ -19571,6 +19588,10 @@ Current version indicated by LITEVER below.
|
|||
</span>: </div>
|
||||
<input title="Img2Img Strength" type="text" inputmode="decimal" id="img_img2imgstr" style="width:60px">
|
||||
</div>
|
||||
<div class="inlinelabel">
|
||||
<div class="justifyleft" style="padding:4px">Clip Skip: </div>
|
||||
<input title="Clip Skip" type="text" inputmode="decimal" id="img_clipskip" style="width:60px">
|
||||
</div>
|
||||
<div class="inlinelabel">
|
||||
<div class="justifyleft" style="padding:4px">Save Higher-Res <span class="helpicon">?
|
||||
<span class="helptext">This option will result in larger save files which may be slower. Changing this setting only applies to NEW images.</span>
|
||||
|
|
|
@ -2102,7 +2102,7 @@ Enter Prompt:<br>
|
|||
self.send_response(503)
|
||||
self.end_headers(content_type='application/json')
|
||||
self.wfile.write(json.dumps({"detail": {
|
||||
"msg": "No LLaVA model loaded",
|
||||
"msg": "No Vision model loaded",
|
||||
"type": "service_unavailable",
|
||||
}}).encode())
|
||||
return
|
||||
|
@ -2982,7 +2982,7 @@ def show_gui():
|
|||
makefileentry(model_tab, "Model:", "Select GGUF or GGML Model File", model_var, 1,width=280, onchoosefile=on_picked_model_file,tooltiptxt="Select a GGUF or GGML model file on disk to be loaded.")
|
||||
makefileentry(model_tab, "Lora:", "Select Lora File",lora_var, 3,width=280,tooltiptxt="Select an optional GGML LoRA adapter to use.\nLeave blank to skip.")
|
||||
makefileentry(model_tab, "Lora Base:", "Select Lora Base File", lora_base_var, 5,width=280,tooltiptxt="Select an optional F16 GGML LoRA base file to use.\nLeave blank to skip.")
|
||||
makefileentry(model_tab, "LLaVA mmproj:", "Select LLaVA mmproj File", mmproj_var, 7,width=280,tooltiptxt="Select a mmproj file to use for LLaVA.\nLeave blank to skip.")
|
||||
makefileentry(model_tab, "Vision mmproj:", "Select Vision mmproj File", mmproj_var, 7,width=280,tooltiptxt="Select a mmproj file to use for vision models like LLaVA.\nLeave blank to skip.")
|
||||
makefileentry(model_tab, "Preloaded Story:", "Select Preloaded Story File", preloadstory_var, 9,width=280,tooltiptxt="Select an optional KoboldAI JSON savefile \nto be served on launch to any client.")
|
||||
makefileentry(model_tab, "ChatCompletions Adapter:", "Select ChatCompletions Adapter File", chatcompletionsadapter_var, 12, width=250, filetypes=[("JSON Adapter", "*.json")], tooltiptxt="Select an optional ChatCompletions Adapter JSON file to force custom instruct tags.")
|
||||
def pickpremadetemplate():
|
||||
|
@ -4661,7 +4661,7 @@ if __name__ == '__main__':
|
|||
advparser.add_argument("--quiet", help="Enable quiet mode, which hides generation inputs and outputs in the terminal. Quiet mode is automatically enabled when running a horde worker.", action='store_true')
|
||||
advparser.add_argument("--ssl", help="Allows all content to be served over SSL instead. A valid UNENCRYPTED SSL cert and key .pem files must be provided", metavar=('[cert_pem]', '[key_pem]'), nargs='+')
|
||||
advparser.add_argument("--nocertify", help="Allows insecure SSL connections. Use this if you have cert errors and need to bypass certificate restrictions.", action='store_true')
|
||||
advparser.add_argument("--mmproj", help="Select a multimodal projector file for LLaVA.", default="")
|
||||
advparser.add_argument("--mmproj", help="Select a multimodal projector file for vision models like LLaVA.", default="")
|
||||
advparser.add_argument("--password", help="Enter a password required to use this instance. This key will be required for all text endpoints. Image endpoints are not secured.", default=None)
|
||||
advparser.add_argument("--ignoremissing", help="Ignores all missing non-essential files, just skipping them instead.", action='store_true')
|
||||
advparser.add_argument("--chatcompletionsadapter", help="Select an optional ChatCompletions Adapter JSON file to force custom instruct tags.", default="")
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue