fixed defective websearch

This commit is contained in:
Concedo 2025-01-04 16:47:38 +08:00
parent b37354bf73
commit 1559d4d2fb
2 changed files with 84 additions and 56 deletions

View file

@ -1281,6 +1281,9 @@ def detokenize_ids(tokids):
def websearch(query):
global websearch_lastquery
global websearch_lastresponse
# sanitize query
query = re.sub(r'[+\-\"\\/*^|<>~`]', '', query) # Remove blacklisted characters
query = re.sub(r'\s+', ' ', query).strip() # Replace multiple spaces with a single space
if not query or query=="":
return []
query = query[:300] # only search first 300 chars, due to search engine limits
@ -1406,9 +1409,12 @@ def websearch(query):
titles = parser.titles[:num_results]
searchurls = parser.urls[:num_results]
descs = parser.descs[:num_results]
fetchedcontent = fetch_webpages_parallel(searchurls)
if len(descs)==0:
if len(descs)==0 or len(titles)==0 or len(descs)==0:
utfprint("No results found! Maybe something went wrong...",1)
return []
fetchedcontent = fetch_webpages_parallel(searchurls)
for i in range(len(descs)):
# dive into the results to try and get even more details
title = titles[i]
@ -1439,7 +1445,7 @@ def websearch(query):
except Exception as e:
utfprint(f"Error fetching URL {search_url}: {e}",1)
return ""
return []
if len(searchresults) > 0:
websearch_lastquery = query
websearch_lastresponse = searchresults