mirror of
https://github.com/LostRuins/koboldcpp.git
synced 2025-09-11 09:34:37 +00:00
refactored some outputs
This commit is contained in:
parent
5eb314a04b
commit
8de44d1e41
1 changed files with 29 additions and 23 deletions
52
koboldcpp.py
52
koboldcpp.py
|
@ -565,7 +565,12 @@ def exit_with_error(code, message, title="Error"):
|
||||||
time.sleep(2)
|
time.sleep(2)
|
||||||
sys.exit(code)
|
sys.exit(code)
|
||||||
|
|
||||||
def utfprint(str):
|
def utfprint(str, importance = 2): #0 = only debugmode, 1 = except quiet, 2 = always print
|
||||||
|
if args.debugmode < 1:
|
||||||
|
if importance==1 and (args.debugmode == -1 or args.quiet):
|
||||||
|
return
|
||||||
|
if importance==0:
|
||||||
|
return
|
||||||
maxlen = 32000
|
maxlen = 32000
|
||||||
if args.debugmode >= 1:
|
if args.debugmode >= 1:
|
||||||
maxlen = 64000
|
maxlen = 64000
|
||||||
|
@ -1285,18 +1290,26 @@ def websearch(query):
|
||||||
import urllib.parse
|
import urllib.parse
|
||||||
import urllib.request
|
import urllib.request
|
||||||
import difflib
|
import difflib
|
||||||
|
import random
|
||||||
from html.parser import HTMLParser
|
from html.parser import HTMLParser
|
||||||
from concurrent.futures import ThreadPoolExecutor
|
from concurrent.futures import ThreadPoolExecutor
|
||||||
num_results = 3
|
num_results = 3
|
||||||
searchresults = []
|
searchresults = []
|
||||||
if args.debugmode != -1 and not args.quiet:
|
utfprint("Performing new websearch...",1)
|
||||||
print("Performing new websearch...")
|
|
||||||
|
|
||||||
def fetch_searched_webpage(url):
|
def fetch_searched_webpage(url, random_agent=False):
|
||||||
|
uagent = 'Mozilla/5.0 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)'
|
||||||
|
if random_agent:
|
||||||
|
agents = ["Mozilla/5.0 (Macintosh; Intel Mac OS X 13_2) Gecko/20100101 Firefox/114.0",
|
||||||
|
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/109.0.0.0 Safari/537.36",
|
||||||
|
"Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:133.0) Gecko/20100101 Firefox/133.0",
|
||||||
|
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/114.0.1823.79 Safari/537.36 Edg/114.0.1823.79",
|
||||||
|
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/117.0.5938.132 Safari/537.36"]
|
||||||
|
uagent = random.choice(agents)
|
||||||
if args.debugmode:
|
if args.debugmode:
|
||||||
utfprint(f"WebSearch URL: {url}")
|
utfprint(f"WebSearch URL: {url}")
|
||||||
try:
|
try:
|
||||||
req = urllib.request.Request(url, headers={'User-Agent': 'Mozilla/5.0 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)'})
|
req = urllib.request.Request(url, headers={'User-Agent': uagent})
|
||||||
with urllib.request.urlopen(req, timeout=15) as response:
|
with urllib.request.urlopen(req, timeout=15) as response:
|
||||||
html_content = response.read().decode('utf-8', errors='ignore')
|
html_content = response.read().decode('utf-8', errors='ignore')
|
||||||
return html_content
|
return html_content
|
||||||
|
@ -1307,12 +1320,10 @@ def websearch(query):
|
||||||
html_content = response.read().decode('utf-8', errors='ignore')
|
html_content = response.read().decode('utf-8', errors='ignore')
|
||||||
return html_content
|
return html_content
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
if args.debugmode != -1 and not args.quiet:
|
utfprint(f"Error fetching text from URL {url}: {e}",1)
|
||||||
print(f"Error fetching text from URL {url}: {e}")
|
|
||||||
return ""
|
return ""
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
if args.debugmode != -1 and not args.quiet:
|
utfprint(f"Error fetching text from URL {url}: {e}",1)
|
||||||
print(f"Error fetching text from URL {url}: {e}")
|
|
||||||
return ""
|
return ""
|
||||||
def fetch_webpages_parallel(urls):
|
def fetch_webpages_parallel(urls):
|
||||||
with ThreadPoolExecutor() as executor:
|
with ThreadPoolExecutor() as executor:
|
||||||
|
@ -1389,13 +1400,15 @@ def websearch(query):
|
||||||
search_url = f"https://html.duckduckgo.com/html/?q={encoded_query}"
|
search_url = f"https://html.duckduckgo.com/html/?q={encoded_query}"
|
||||||
|
|
||||||
try:
|
try:
|
||||||
search_html = fetch_searched_webpage(search_url)
|
search_html = fetch_searched_webpage(search_url, random_agent=True)
|
||||||
parser = ExtractResultsParser()
|
parser = ExtractResultsParser()
|
||||||
parser.feed(search_html)
|
parser.feed(search_html)
|
||||||
titles = parser.titles[:num_results]
|
titles = parser.titles[:num_results]
|
||||||
searchurls = parser.urls[:num_results]
|
searchurls = parser.urls[:num_results]
|
||||||
descs = parser.descs[:num_results]
|
descs = parser.descs[:num_results]
|
||||||
fetchedcontent = fetch_webpages_parallel(searchurls)
|
fetchedcontent = fetch_webpages_parallel(searchurls)
|
||||||
|
if len(descs)==0:
|
||||||
|
utfprint("No results found! Maybe something went wrong...",1)
|
||||||
for i in range(len(descs)):
|
for i in range(len(descs)):
|
||||||
# dive into the results to try and get even more details
|
# dive into the results to try and get even more details
|
||||||
title = titles[i]
|
title = titles[i]
|
||||||
|
@ -1425,8 +1438,7 @@ def websearch(query):
|
||||||
searchresults.append({"title":title,"url":url,"desc":desc,"content":pagedesc})
|
searchresults.append({"title":title,"url":url,"desc":desc,"content":pagedesc})
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
if args.debugmode != -1 and not args.quiet:
|
utfprint(f"Error fetching URL {search_url}: {e}",1)
|
||||||
print(f"Error fetching URL {search_url}: {e}")
|
|
||||||
return ""
|
return ""
|
||||||
if len(searchresults) > 0:
|
if len(searchresults) > 0:
|
||||||
websearch_lastquery = query
|
websearch_lastquery = query
|
||||||
|
@ -1770,8 +1782,7 @@ class ServerRequestHandler(http.server.SimpleHTTPRequestHandler):
|
||||||
global last_non_horde_req_time
|
global last_non_horde_req_time
|
||||||
last_non_horde_req_time = time.time()
|
last_non_horde_req_time = time.time()
|
||||||
|
|
||||||
if (args.debugmode != -1 and not is_quiet) or args.debugmode >= 1:
|
utfprint("\nOutput: " + recvtxt,1)
|
||||||
utfprint("\nOutput: " + recvtxt)
|
|
||||||
|
|
||||||
if api_format == 1:
|
if api_format == 1:
|
||||||
res = {"data": {"seqs": [recvtxt]}}
|
res = {"data": {"seqs": [recvtxt]}}
|
||||||
|
@ -2554,9 +2565,7 @@ Enter Prompt:<br>
|
||||||
}}).encode())
|
}}).encode())
|
||||||
return
|
return
|
||||||
|
|
||||||
is_quiet = args.quiet
|
utfprint("\nInput: " + json.dumps(genparams),1)
|
||||||
if (args.debugmode != -1 and not is_quiet) or args.debugmode >= 1:
|
|
||||||
utfprint("\nInput: " + json.dumps(genparams))
|
|
||||||
|
|
||||||
if args.foreground:
|
if args.foreground:
|
||||||
bring_terminal_to_foreground()
|
bring_terminal_to_foreground()
|
||||||
|
@ -2577,8 +2586,7 @@ Enter Prompt:<br>
|
||||||
self.end_headers(content_type='application/json')
|
self.end_headers(content_type='application/json')
|
||||||
self.wfile.write(genresp)
|
self.wfile.write(genresp)
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
if args.debugmode:
|
utfprint(ex,0)
|
||||||
print(ex)
|
|
||||||
print("Generate: The response could not be sent, maybe connection was terminated?")
|
print("Generate: The response could not be sent, maybe connection was terminated?")
|
||||||
handle.abort_generate()
|
handle.abort_generate()
|
||||||
time.sleep(0.2) #short delay
|
time.sleep(0.2) #short delay
|
||||||
|
@ -2604,8 +2612,7 @@ Enter Prompt:<br>
|
||||||
self.end_headers(content_type='application/json')
|
self.end_headers(content_type='application/json')
|
||||||
self.wfile.write(genresp)
|
self.wfile.write(genresp)
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
if args.debugmode:
|
utfprint(ex,0)
|
||||||
print(ex)
|
|
||||||
print("Generate Image: The response could not be sent, maybe connection was terminated?")
|
print("Generate Image: The response could not be sent, maybe connection was terminated?")
|
||||||
time.sleep(0.2) #short delay
|
time.sleep(0.2) #short delay
|
||||||
return
|
return
|
||||||
|
@ -2618,8 +2625,7 @@ Enter Prompt:<br>
|
||||||
self.end_headers(content_type='application/json')
|
self.end_headers(content_type='application/json')
|
||||||
self.wfile.write(genresp)
|
self.wfile.write(genresp)
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
if args.debugmode:
|
utfprint(ex,0)
|
||||||
print(ex)
|
|
||||||
print("Transcribe: The response could not be sent, maybe connection was terminated?")
|
print("Transcribe: The response could not be sent, maybe connection was terminated?")
|
||||||
time.sleep(0.2) #short delay
|
time.sleep(0.2) #short delay
|
||||||
return
|
return
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue