mirror of
https://github.com/LostRuins/koboldcpp.git
synced 2025-09-11 01:24:36 +00:00
noscript image gen
This commit is contained in:
parent
95d1aaf4d4
commit
12c2efdadd
2 changed files with 61 additions and 20 deletions
12
klite.embd
12
klite.embd
|
@ -12,7 +12,7 @@ Current version indicated by LITEVER below.
|
||||||
-->
|
-->
|
||||||
|
|
||||||
<script>
|
<script>
|
||||||
const LITEVER = 230;
|
const LITEVER = 231;
|
||||||
const urlParams = new URLSearchParams(window.location.search);
|
const urlParams = new URLSearchParams(window.location.search);
|
||||||
var localflag = urlParams.get('local'); //this will be replaced automatically in embedded kcpp
|
var localflag = urlParams.get('local'); //this will be replaced automatically in embedded kcpp
|
||||||
const STORAGE_PREFIX = (localflag?"e_":"")+"kaihordewebui_";
|
const STORAGE_PREFIX = (localflag?"e_":"")+"kaihordewebui_";
|
||||||
|
@ -5084,15 +5084,14 @@ Current version indicated by LITEVER below.
|
||||||
.then((response) => response.json())
|
.then((response) => response.json())
|
||||||
.then((data) => {
|
.then((data) => {
|
||||||
console.log("sync finished response: " + JSON.stringify(data));
|
console.log("sync finished response: " + JSON.stringify(data));
|
||||||
|
last_response_obj = JSON.parse(JSON.stringify(data));
|
||||||
if (custom_oai_key != "" && data.choices != null && data.choices.length > 0) {
|
if (custom_oai_key != "" && data.choices != null && data.choices.length > 0) {
|
||||||
let dch = data.choices[0];
|
let dch = data.choices[0];
|
||||||
if (dch.text) {
|
if (dch.text) {
|
||||||
synchro_polled_response = dch.text;
|
synchro_polled_response = dch.text;
|
||||||
last_response_obj = JSON.parse(JSON.stringify(data));
|
|
||||||
}
|
}
|
||||||
else if (dch.message) {
|
else if (dch.message) {
|
||||||
synchro_polled_response = dch.message.content;
|
synchro_polled_response = dch.message.content;
|
||||||
last_response_obj = JSON.parse(JSON.stringify(data));
|
|
||||||
|
|
||||||
if(oaiemulatecompletionscontent!="" && synchro_polled_response!="" && synchro_polled_response.startsWith(oaiemulatecompletionscontent))
|
if(oaiemulatecompletionscontent!="" && synchro_polled_response!="" && synchro_polled_response.startsWith(oaiemulatecompletionscontent))
|
||||||
{
|
{
|
||||||
|
@ -7662,7 +7661,7 @@ Current version indicated by LITEVER below.
|
||||||
}
|
}
|
||||||
function togglescenarioautopick()
|
function togglescenarioautopick()
|
||||||
{
|
{
|
||||||
if(localflag)
|
if(localflag || is_using_custom_ep())
|
||||||
{
|
{
|
||||||
document.getElementById("scenarioautopickbox").classList.add("hidden");
|
document.getElementById("scenarioautopickbox").classList.add("hidden");
|
||||||
}
|
}
|
||||||
|
@ -12050,6 +12049,8 @@ Current version indicated by LITEVER below.
|
||||||
|
|
||||||
function display_newgame() {
|
function display_newgame() {
|
||||||
mainmenu_untab(true);
|
mainmenu_untab(true);
|
||||||
|
document.getElementById("keep_ai_selected").checked = true;
|
||||||
|
document.getElementById("keep_memory").checked = false;
|
||||||
document.getElementById("newgamecontainer").classList.remove("hidden");
|
document.getElementById("newgamecontainer").classList.remove("hidden");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -14917,6 +14918,7 @@ Current version indicated by LITEVER below.
|
||||||
.then((response) => response.json())
|
.then((response) => response.json())
|
||||||
.then((data) => {
|
.then((data) => {
|
||||||
console.log("sync finished response: " + JSON.stringify(data));
|
console.log("sync finished response: " + JSON.stringify(data));
|
||||||
|
last_response_obj = JSON.parse(JSON.stringify(data));
|
||||||
if(custom_claude_key != "" && data.content && data.content.length > 0)
|
if(custom_claude_key != "" && data.content && data.content.length > 0)
|
||||||
{
|
{
|
||||||
let comp = "";
|
let comp = "";
|
||||||
|
@ -15087,6 +15089,7 @@ Current version indicated by LITEVER below.
|
||||||
.then((response) => response.json())
|
.then((response) => response.json())
|
||||||
.then((data) => {
|
.then((data) => {
|
||||||
console.log("sync finished response: " + JSON.stringify(data));
|
console.log("sync finished response: " + JSON.stringify(data));
|
||||||
|
last_response_obj = JSON.parse(JSON.stringify(data));
|
||||||
if (custom_gemini_key != "" && data.candidates != null && data.candidates.length>0 && data.candidates[0].output && data.candidates[0].output != "") {
|
if (custom_gemini_key != "" && data.candidates != null && data.candidates.length>0 && data.candidates[0].output && data.candidates[0].output != "") {
|
||||||
synchro_polled_response = data.candidates[0].output;
|
synchro_polled_response = data.candidates[0].output;
|
||||||
}else if (custom_gemini_key != "" && data.candidates != null && data.candidates.length>0 && data.candidates[0].content && data.candidates[0].content.parts != null && data.candidates[0].content.parts.length>0) {
|
}else if (custom_gemini_key != "" && data.candidates != null && data.candidates.length>0 && data.candidates[0].content && data.candidates[0].content.parts != null && data.candidates[0].content.parts.length>0) {
|
||||||
|
@ -15160,6 +15163,7 @@ Current version indicated by LITEVER below.
|
||||||
.then((response) => response.json())
|
.then((response) => response.json())
|
||||||
.then((data) => {
|
.then((data) => {
|
||||||
console.log("sync finished response: " + JSON.stringify(data));
|
console.log("sync finished response: " + JSON.stringify(data));
|
||||||
|
last_response_obj = JSON.parse(JSON.stringify(data));
|
||||||
if (custom_cohere_key != "" && data && data.text) {
|
if (custom_cohere_key != "" && data && data.text) {
|
||||||
if (data.text) {
|
if (data.text) {
|
||||||
synchro_polled_response = data.text
|
synchro_polled_response = data.text
|
||||||
|
|
59
koboldcpp.py
59
koboldcpp.py
|
@ -2546,15 +2546,19 @@ class KcppServerRequestHandler(http.server.SimpleHTTPRequestHandler):
|
||||||
status = str(parsed_dict['status'][0]) if 'status' in parsed_dict else "Ready To Generate"
|
status = str(parsed_dict['status'][0]) if 'status' in parsed_dict else "Ready To Generate"
|
||||||
prompt = str(parsed_dict['prompt'][0]) if 'prompt' in parsed_dict else ""
|
prompt = str(parsed_dict['prompt'][0]) if 'prompt' in parsed_dict else ""
|
||||||
chatmsg = str(parsed_dict['chatmsg'][0]) if 'chatmsg' in parsed_dict else ""
|
chatmsg = str(parsed_dict['chatmsg'][0]) if 'chatmsg' in parsed_dict else ""
|
||||||
|
imgprompt = str(parsed_dict['imgprompt'][0]) if 'imgprompt' in parsed_dict else ""
|
||||||
max_length = int(parsed_dict['max_length'][0]) if 'max_length' in parsed_dict else 100
|
max_length = int(parsed_dict['max_length'][0]) if 'max_length' in parsed_dict else 100
|
||||||
temperature = float(parsed_dict['temperature'][0]) if 'temperature' in parsed_dict else 0.75
|
temperature = float(parsed_dict['temperature'][0]) if 'temperature' in parsed_dict else 0.75
|
||||||
top_k = int(parsed_dict['top_k'][0]) if 'top_k' in parsed_dict else 100
|
top_k = int(parsed_dict['top_k'][0]) if 'top_k' in parsed_dict else 100
|
||||||
top_p = float(parsed_dict['top_p'][0]) if 'top_p' in parsed_dict else 0.9
|
top_p = float(parsed_dict['top_p'][0]) if 'top_p' in parsed_dict else 0.9
|
||||||
rep_pen = float(parsed_dict['rep_pen'][0]) if 'rep_pen' in parsed_dict else 1.0
|
rep_pen = float(parsed_dict['rep_pen'][0]) if 'rep_pen' in parsed_dict else 1.0
|
||||||
ban_eos_token = int(parsed_dict['ban_eos_token'][0]) if 'ban_eos_token' in parsed_dict else 0
|
ban_eos_token = int(parsed_dict['ban_eos_token'][0]) if 'ban_eos_token' in parsed_dict else 0
|
||||||
|
steps = int(parsed_dict['steps'][0]) if 'steps' in parsed_dict else 25
|
||||||
|
cfg = int(parsed_dict['cfg'][0]) if 'cfg' in parsed_dict else 7
|
||||||
genbtnval = (parsed_dict['generate'][0] if 'generate' in parsed_dict else "")
|
genbtnval = (parsed_dict['generate'][0] if 'generate' in parsed_dict else "")
|
||||||
gencommand = (genbtnval=="Generate" or genbtnval=="Send")
|
gencommand = (genbtnval=="Generate" or genbtnval=="Send")
|
||||||
chatmode = int(parsed_dict['chatmode'][0]) if 'chatmode' in parsed_dict else 0
|
chatmode = int(parsed_dict['chatmode'][0]) if 'chatmode' in parsed_dict else 0
|
||||||
|
imgmode = int(parsed_dict['imgmode'][0]) if 'imgmode' in parsed_dict else 0
|
||||||
human_name = str(parsed_dict['human_name'][0]) if 'human_name' in parsed_dict else "User"
|
human_name = str(parsed_dict['human_name'][0]) if 'human_name' in parsed_dict else "User"
|
||||||
bot_name = str(parsed_dict['bot_name'][0]) if 'bot_name' in parsed_dict else "Assistant"
|
bot_name = str(parsed_dict['bot_name'][0]) if 'bot_name' in parsed_dict else "Assistant"
|
||||||
stops = []
|
stops = []
|
||||||
|
@ -2568,6 +2572,12 @@ class KcppServerRequestHandler(http.server.SimpleHTTPRequestHandler):
|
||||||
gencommand = False
|
gencommand = False
|
||||||
stops = [f"\n{human_name}:",f"\n{bot_name}:"]
|
stops = [f"\n{human_name}:",f"\n{bot_name}:"]
|
||||||
prefix = f"[This is a chat conversation log between {human_name} and {bot_name}.]\n"
|
prefix = f"[This is a chat conversation log between {human_name} and {bot_name}.]\n"
|
||||||
|
elif imgmode:
|
||||||
|
if imgprompt:
|
||||||
|
prompt = imgprompt
|
||||||
|
max_length = 1
|
||||||
|
else:
|
||||||
|
gencommand = False
|
||||||
|
|
||||||
if modelbusy.locked():
|
if modelbusy.locked():
|
||||||
status = "Model is currently busy, try again later."
|
status = "Model is currently busy, try again later."
|
||||||
|
@ -2581,6 +2591,10 @@ class KcppServerRequestHandler(http.server.SimpleHTTPRequestHandler):
|
||||||
epurl = f"{httpsaffix}://localhost:{args.port}"
|
epurl = f"{httpsaffix}://localhost:{args.port}"
|
||||||
if args.host!="":
|
if args.host!="":
|
||||||
epurl = f"{httpsaffix}://{args.host}:{args.port}"
|
epurl = f"{httpsaffix}://{args.host}:{args.port}"
|
||||||
|
if imgmode and imgprompt:
|
||||||
|
gen_payload = {"prompt":{"3":{"inputs":{"cfg":cfg,"steps":steps}},"6":{"inputs":{"text":imgprompt}}}}
|
||||||
|
respjson = make_url_request(f'{epurl}/prompt', gen_payload)
|
||||||
|
else:
|
||||||
gen_payload = {"prompt": prefix+prompt,"max_length": max_length,"temperature": temperature,"top_k": top_k,"top_p": top_p,"rep_pen": rep_pen,"ban_eos_token":ban_eos_token, "stop_sequence":stops}
|
gen_payload = {"prompt": prefix+prompt,"max_length": max_length,"temperature": temperature,"top_k": top_k,"top_p": top_p,"rep_pen": rep_pen,"ban_eos_token":ban_eos_token, "stop_sequence":stops}
|
||||||
respjson = make_url_request(f'{epurl}/api/v1/generate', gen_payload)
|
respjson = make_url_request(f'{epurl}/api/v1/generate', gen_payload)
|
||||||
reply = html.escape(respjson["results"][0]["text"])
|
reply = html.escape(respjson["results"][0]["text"])
|
||||||
|
@ -2592,9 +2606,12 @@ class KcppServerRequestHandler(http.server.SimpleHTTPRequestHandler):
|
||||||
del parsed_dict["generate"]
|
del parsed_dict["generate"]
|
||||||
if "chatmsg" in parsed_dict:
|
if "chatmsg" in parsed_dict:
|
||||||
del parsed_dict["chatmsg"]
|
del parsed_dict["chatmsg"]
|
||||||
|
if "imgprompt" in parsed_dict:
|
||||||
|
del parsed_dict["imgprompt"]
|
||||||
parsed_dict["prompt"] = prompt + reply
|
parsed_dict["prompt"] = prompt + reply
|
||||||
parsed_dict["status"] = status
|
parsed_dict["status"] = status
|
||||||
parsed_dict["chatmode"] = ("1" if chatmode else "0")
|
parsed_dict["chatmode"] = ("1" if chatmode else "0")
|
||||||
|
parsed_dict["imgmode"] = ("1" if imgmode else "0")
|
||||||
updated_query_string = urlparse.urlencode(parsed_dict, doseq=True)
|
updated_query_string = urlparse.urlencode(parsed_dict, doseq=True)
|
||||||
updated_path = parsed_url._replace(query=updated_query_string).geturl()
|
updated_path = parsed_url._replace(query=updated_query_string).geturl()
|
||||||
self.path = updated_path
|
self.path = updated_path
|
||||||
|
@ -2604,8 +2621,19 @@ class KcppServerRequestHandler(http.server.SimpleHTTPRequestHandler):
|
||||||
self.end_headers(content_type='text/html')
|
self.end_headers(content_type='text/html')
|
||||||
return
|
return
|
||||||
|
|
||||||
bodycontent = f'''<b><u>{"Chat Mode" if chatmode else "Story Mode"}</u></b><br>'''
|
imgbtn = '''<form action="/noscript" style="display: inline;">
|
||||||
if chatmode:
|
<input type="hidden" name="imgmode" value="1">
|
||||||
|
<input type="submit" value="Image Mode">
|
||||||
|
</form>'''
|
||||||
|
|
||||||
|
bodycontent = f'''<b><u>{"Image Mode" if imgmode else ("Chat Mode" if chatmode else "Story Mode")}</u></b><br>'''
|
||||||
|
optionscontent = ""
|
||||||
|
if imgmode:
|
||||||
|
bodycontent += f'''<p>Generated Image: {prompt if prompt else "None"}</p>
|
||||||
|
{'<img src="view.png" width="320" width="320">' if prompt else ""}<br>
|
||||||
|
<label>Image Prompt: </label><input type="text" size="40" value="" name="imgprompt">
|
||||||
|
<input type="submit" name="generate" value="Generate"> (Be patient)'''
|
||||||
|
elif chatmode:
|
||||||
oldconvo = prompt.strip().replace(f"{human_name}:",f"<b>{human_name}:</b>").replace(f"{bot_name}:",f"<b>{bot_name}:</b>").replace("\n","<br>")
|
oldconvo = prompt.strip().replace(f"{human_name}:",f"<b>{human_name}:</b>").replace(f"{bot_name}:",f"<b>{bot_name}:</b>").replace("\n","<br>")
|
||||||
oldconvo += f'''<input type="hidden" name="human_name" value="{human_name}"><input type="hidden" name="bot_name" value="{bot_name}">'''
|
oldconvo += f'''<input type="hidden" name="human_name" value="{human_name}"><input type="hidden" name="bot_name" value="{bot_name}">'''
|
||||||
newconvo = '''Start a new conversation.<br>
|
newconvo = '''Start a new conversation.<br>
|
||||||
|
@ -2621,6 +2649,17 @@ class KcppServerRequestHandler(http.server.SimpleHTTPRequestHandler):
|
||||||
<textarea name="prompt" cols="60" rows="8" wrap="soft" placeholder="Enter Prompt Here">{prompt}</textarea><br>
|
<textarea name="prompt" cols="60" rows="8" wrap="soft" placeholder="Enter Prompt Here">{prompt}</textarea><br>
|
||||||
<input type="submit" name="generate" value="Generate"> (Be patient)
|
<input type="submit" name="generate" value="Generate"> (Be patient)
|
||||||
'''
|
'''
|
||||||
|
if not imgmode:
|
||||||
|
optionscontent = f'''<label>Gen. Amount</label> <input type="text" size="4" value="{max_length}" name="max_length"><br>
|
||||||
|
<label>Temperature</label> <input type="text" size="4" value="{temperature}" name="temperature"><br>
|
||||||
|
<label>Top-K</label> <input type="text" size="4" value="{top_k}" name="top_k"><br>
|
||||||
|
<label>Top-P</label> <input type="text" size="4" value="{top_p}" name="top_p"><br>
|
||||||
|
<label>Rep. Pen</label> <input type="text" size="4" value="{rep_pen}" name="rep_pen"><br>
|
||||||
|
<label>Prevent EOS</label> <input type="checkbox" name="ban_eos_token" value="1" {"checked" if ban_eos_token else ""}><br>'''
|
||||||
|
else:
|
||||||
|
optionscontent = f'''<label>Steps</label> <input type="text" size="4" value="{steps}" name="steps"><br>
|
||||||
|
<label>Cfg. Scale</label> <input type="text" size="4" value="{cfg}" name="cfg"><br>'''
|
||||||
|
|
||||||
finalhtml = f'''<!doctype html>
|
finalhtml = f'''<!doctype html>
|
||||||
<html lang="en"><head>
|
<html lang="en"><head>
|
||||||
<meta charset="utf-8">
|
<meta charset="utf-8">
|
||||||
|
@ -2635,23 +2674,21 @@ class KcppServerRequestHandler(http.server.SimpleHTTPRequestHandler):
|
||||||
<hr>
|
<hr>
|
||||||
<b>{status}</b><br>
|
<b>{status}</b><br>
|
||||||
<hr>
|
<hr>
|
||||||
<label>Gen. Amount</label> <input type="text" size="4" value="{max_length}" name="max_length"><br>
|
{optionscontent}
|
||||||
<label>Temperature</label> <input type="text" size="4" value="{temperature}" name="temperature"><br>
|
|
||||||
<label>Top-K</label> <input type="text" size="4" value="{top_k}" name="top_k"><br>
|
|
||||||
<label>Top-P</label> <input type="text" size="4" value="{top_p}" name="top_p"><br>
|
|
||||||
<label>Rep. Pen</label> <input type="text" size="4" value="{rep_pen}" name="rep_pen"><br>
|
|
||||||
<label>Prevent EOS</label> <input type="checkbox" name="ban_eos_token" value="1" {"checked" if ban_eos_token else ""}><br>
|
|
||||||
<input type="hidden" name="chatmode" value="{chatmode}">
|
<input type="hidden" name="chatmode" value="{chatmode}">
|
||||||
|
<input type="hidden" name="imgmode" value="{imgmode}">
|
||||||
</form>
|
</form>
|
||||||
<hr>
|
<hr>
|
||||||
<div style="display: inline-block;">
|
<div style="display: inline-block;">
|
||||||
|
Change Mode<br>
|
||||||
<form action="/noscript" style="display: inline;">
|
<form action="/noscript" style="display: inline;">
|
||||||
<input type="submit" value="Reset (Story Mode)">
|
<input type="submit" value="Story Mode">
|
||||||
</form>
|
</form>
|
||||||
<form action="/noscript" style="display: inline;">
|
<form action="/noscript" style="display: inline;">
|
||||||
<input type="hidden" name="chatmode" value="1">
|
<input type="hidden" name="chatmode" value="1">
|
||||||
<input type="submit" value="Reset (Chat Mode)">
|
<input type="submit" value="Chat Mode">
|
||||||
</form>
|
</form>
|
||||||
|
{imgbtn if "txt2img" in get_capabilities() else ""}
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</body></html>'''
|
</body></html>'''
|
||||||
|
@ -2791,7 +2828,7 @@ class KcppServerRequestHandler(http.server.SimpleHTTPRequestHandler):
|
||||||
response_body = (json.dumps([]).encode())
|
response_body = (json.dumps([]).encode())
|
||||||
else:
|
else:
|
||||||
response_body = (json.dumps([friendlysdmodelname]).encode())
|
response_body = (json.dumps([friendlysdmodelname]).encode())
|
||||||
elif self.path=='/view' or self.path=='/api/view' or self.path.startswith('/view?') or self.path.startswith('/api/view?'): #emulate comfyui
|
elif self.path=='/view' or self.path=='/view.png' or self.path=='/api/view' or self.path.startswith('/view?') or self.path.startswith('/api/view?'): #emulate comfyui
|
||||||
content_type = 'image/png'
|
content_type = 'image/png'
|
||||||
response_body = lastgeneratedcomfyimg
|
response_body = lastgeneratedcomfyimg
|
||||||
elif self.path=='/history' or self.path=='/api/history' or self.path.startswith('/api/history/') or self.path.startswith('/history/'): #emulate comfyui
|
elif self.path=='/history' or self.path=='/api/history' or self.path.startswith('/api/history/') or self.path.startswith('/history/'): #emulate comfyui
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue