mirror of
https://github.com/LostRuins/koboldcpp.git
synced 2025-09-10 09:04:36 +00:00
fixed grammar not resetting correctly
This commit is contained in:
parent
18f227625b
commit
1dd37933e3
2 changed files with 7 additions and 4 deletions
|
@ -1507,6 +1507,7 @@ static void load_grammar(const std::string & gammarstr)
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!gammarstr.empty()) {
|
if (!gammarstr.empty()) {
|
||||||
|
parsed_grammar = llama_grammar_parser();
|
||||||
parsed_grammar.parse(gammarstr.c_str());
|
parsed_grammar.parse(gammarstr.c_str());
|
||||||
// will be empty (default) if there are parse errors
|
// will be empty (default) if there are parse errors
|
||||||
if (parsed_grammar.rules.empty()) {
|
if (parsed_grammar.rules.empty()) {
|
||||||
|
|
10
koboldcpp.py
10
koboldcpp.py
|
@ -69,6 +69,7 @@ multiplayer_story_data_compressed = None #stores the full compressed story of th
|
||||||
multiplayer_turn_major = 1 # to keep track of when a client needs to sync their stories
|
multiplayer_turn_major = 1 # to keep track of when a client needs to sync their stories
|
||||||
multiplayer_turn_minor = 1
|
multiplayer_turn_minor = 1
|
||||||
multiplayer_dataformat = "" # used to tell what is the data payload in saved story. set by client
|
multiplayer_dataformat = "" # used to tell what is the data payload in saved story. set by client
|
||||||
|
multiplayer_lastactive = 0 # timestamp of last activity
|
||||||
preloaded_story = None
|
preloaded_story = None
|
||||||
chatcompl_adapter = None
|
chatcompl_adapter = None
|
||||||
embedded_kailite = None
|
embedded_kailite = None
|
||||||
|
@ -1798,7 +1799,7 @@ Enter Prompt:<br>
|
||||||
|
|
||||||
def do_GET(self):
|
def do_GET(self):
|
||||||
global embedded_kailite, embedded_kcpp_docs, embedded_kcpp_sdui
|
global embedded_kailite, embedded_kcpp_docs, embedded_kcpp_sdui
|
||||||
global has_multiplayer, multiplayer_turn_major, multiplayer_turn_minor, multiplayer_story_data_compressed, multiplayer_dataformat, maxctx, maxhordelen, friendlymodelname, KcppVersion, totalgens, preloaded_story, exitcounter, currentusergenkey, friendlysdmodelname, fullsdmodelpath, mmprojpath, password, fullwhispermodelpath
|
global has_multiplayer, multiplayer_turn_major, multiplayer_turn_minor, multiplayer_story_data_compressed, multiplayer_dataformat, multiplayer_lastactive, maxctx, maxhordelen, friendlymodelname, KcppVersion, totalgens, preloaded_story, exitcounter, currentusergenkey, friendlysdmodelname, fullsdmodelpath, mmprojpath, password, fullwhispermodelpath
|
||||||
self.path = self.path.rstrip('/')
|
self.path = self.path.rstrip('/')
|
||||||
response_body = None
|
response_body = None
|
||||||
content_type = 'application/json'
|
content_type = 'application/json'
|
||||||
|
@ -1942,7 +1943,7 @@ Enter Prompt:<br>
|
||||||
if not has_multiplayer:
|
if not has_multiplayer:
|
||||||
response_body = (json.dumps({"error":"Multiplayer not enabled!"}).encode())
|
response_body = (json.dumps({"error":"Multiplayer not enabled!"}).encode())
|
||||||
else:
|
else:
|
||||||
response_body = (json.dumps({"turn_major":multiplayer_turn_major,"turn_minor":multiplayer_turn_minor,"idle":(0 if modelbusy.locked() else 1),"data_format":multiplayer_dataformat}).encode())
|
response_body = (json.dumps({"turn_major":multiplayer_turn_major,"turn_minor":multiplayer_turn_minor,"idle":(0 if (modelbusy.locked() or (time.time()-multiplayer_lastactive)<10) else 1),"data_format":multiplayer_dataformat}).encode())
|
||||||
|
|
||||||
elif self.path=="/api/extra/multiplayer/getstory":
|
elif self.path=="/api/extra/multiplayer/getstory":
|
||||||
if not has_multiplayer:
|
if not has_multiplayer:
|
||||||
|
@ -1977,7 +1978,7 @@ Enter Prompt:<br>
|
||||||
return
|
return
|
||||||
|
|
||||||
def do_POST(self):
|
def do_POST(self):
|
||||||
global modelbusy, requestsinqueue, currentusergenkey, totalgens, pendingabortkey, multiplayer_turn_major, multiplayer_turn_minor, multiplayer_story_data_compressed, multiplayer_dataformat
|
global modelbusy, requestsinqueue, currentusergenkey, totalgens, pendingabortkey, multiplayer_turn_major, multiplayer_turn_minor, multiplayer_story_data_compressed, multiplayer_dataformat, multiplayer_lastactive
|
||||||
contlenstr = self.headers['content-length']
|
contlenstr = self.headers['content-length']
|
||||||
content_length = 0
|
content_length = 0
|
||||||
body = None
|
body = None
|
||||||
|
@ -2126,12 +2127,13 @@ Enter Prompt:<br>
|
||||||
else:
|
else:
|
||||||
multiplayer_story_data_compressed = str(storybody) #save latest story
|
multiplayer_story_data_compressed = str(storybody) #save latest story
|
||||||
multiplayer_dataformat = dataformat
|
multiplayer_dataformat = dataformat
|
||||||
|
multiplayer_lastactive = int(time.time())
|
||||||
if fullupdate:
|
if fullupdate:
|
||||||
multiplayer_turn_minor = 1
|
multiplayer_turn_minor = 1
|
||||||
multiplayer_turn_major += 1
|
multiplayer_turn_major += 1
|
||||||
else:
|
else:
|
||||||
multiplayer_turn_minor += 1
|
multiplayer_turn_minor += 1
|
||||||
response_body = (json.dumps({"success":True,"turn_major":multiplayer_turn_major,"turn_minor":multiplayer_turn_minor,"idle":(0 if modelbusy.locked() else 1),"data_format":multiplayer_dataformat}).encode())
|
response_body = (json.dumps({"success":True,"turn_major":multiplayer_turn_major,"turn_minor":multiplayer_turn_minor,"idle":(0 if (modelbusy.locked() or (time.time()-multiplayer_lastactive)<10) else 1),"data_format":multiplayer_dataformat}).encode())
|
||||||
else:
|
else:
|
||||||
response_code = 400
|
response_code = 400
|
||||||
response_body = (json.dumps({"success":False, "error":"No story submitted!"}).encode())
|
response_body = (json.dumps({"success":False, "error":"No story submitted!"}).encode())
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue