mirror of
https://github.com/LostRuins/koboldcpp.git
synced 2025-09-10 17:14:36 +00:00
added field to show recent seed
This commit is contained in:
parent
9e0dee769b
commit
94e68fe474
5 changed files with 12 additions and 31 deletions
29
.github/workflows/python-check-requirements.yml
vendored
29
.github/workflows/python-check-requirements.yml
vendored
|
@ -1,29 +0,0 @@
|
||||||
name: Python check requirements.txt
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
paths:
|
|
||||||
- 'scripts/check-requirements.sh'
|
|
||||||
- 'convert*.py'
|
|
||||||
- 'requirements.txt'
|
|
||||||
- 'requirements/*.txt'
|
|
||||||
pull_request:
|
|
||||||
paths:
|
|
||||||
- 'scripts/check-requirements.sh'
|
|
||||||
- 'convert*.py'
|
|
||||||
- 'requirements.txt'
|
|
||||||
- 'requirements/*.txt'
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
python-check-requirements:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
name: check-requirements
|
|
||||||
steps:
|
|
||||||
- name: Check out source repository
|
|
||||||
uses: actions/checkout@v3
|
|
||||||
- name: Set up Python environment
|
|
||||||
uses: actions/setup-python@v4
|
|
||||||
with:
|
|
||||||
python-version: "3.11"
|
|
||||||
- name: Run check-requirements.sh script
|
|
||||||
run: bash scripts/check-requirements.sh nocleanup
|
|
|
@ -217,6 +217,10 @@ extern "C"
|
||||||
int get_last_token_count() {
|
int get_last_token_count() {
|
||||||
return last_token_count;
|
return last_token_count;
|
||||||
}
|
}
|
||||||
|
int get_last_seed()
|
||||||
|
{
|
||||||
|
return last_seed;
|
||||||
|
}
|
||||||
int get_total_gens() {
|
int get_total_gens() {
|
||||||
return total_gens;
|
return total_gens;
|
||||||
}
|
}
|
||||||
|
|
1
expose.h
1
expose.h
|
@ -102,5 +102,6 @@ extern bool generation_finished;
|
||||||
extern float last_eval_time;
|
extern float last_eval_time;
|
||||||
extern float last_process_time;
|
extern float last_process_time;
|
||||||
extern int last_token_count;
|
extern int last_token_count;
|
||||||
|
extern int last_seed;
|
||||||
extern int total_gens;
|
extern int total_gens;
|
||||||
extern stop_reason last_stop_reason;
|
extern stop_reason last_stop_reason;
|
||||||
|
|
|
@ -39,6 +39,7 @@ bool generation_finished;
|
||||||
float last_process_time = 0;
|
float last_process_time = 0;
|
||||||
float last_eval_time = 0;
|
float last_eval_time = 0;
|
||||||
int last_token_count = 0;
|
int last_token_count = 0;
|
||||||
|
int last_seed = -1;
|
||||||
int total_gens = 0;
|
int total_gens = 0;
|
||||||
stop_reason last_stop_reason = stop_reason::INVALID;
|
stop_reason last_stop_reason = stop_reason::INVALID;
|
||||||
std::vector<std::string> generated_tokens;
|
std::vector<std::string> generated_tokens;
|
||||||
|
@ -1531,7 +1532,7 @@ generation_outputs gpttype_generate(const generation_inputs inputs, generation_o
|
||||||
}
|
}
|
||||||
if (kcpp_params->seed <= 0 || kcpp_params->seed==0xFFFFFFFF)
|
if (kcpp_params->seed <= 0 || kcpp_params->seed==0xFFFFFFFF)
|
||||||
{
|
{
|
||||||
kcpp_params->seed = time(NULL);
|
kcpp_params->seed = (((uint32_t)time(NULL)) % 1000000);
|
||||||
}
|
}
|
||||||
|
|
||||||
// tokenize the prompt
|
// tokenize the prompt
|
||||||
|
@ -2051,6 +2052,7 @@ generation_outputs gpttype_generate(const generation_inputs inputs, generation_o
|
||||||
last_eval_time = pt2;
|
last_eval_time = pt2;
|
||||||
last_process_time = pt1;
|
last_process_time = pt1;
|
||||||
last_token_count = realnpredict;
|
last_token_count = realnpredict;
|
||||||
|
last_seed = kcpp_params->seed;
|
||||||
total_gens += 1;
|
total_gens += 1;
|
||||||
snprintf(output.text, sizeof(output.text), "%s", concat_output.c_str());
|
snprintf(output.text, sizeof(output.text), "%s", concat_output.c_str());
|
||||||
|
|
||||||
|
|
|
@ -226,6 +226,7 @@ def init_library():
|
||||||
handle.get_last_eval_time.restype = ctypes.c_float
|
handle.get_last_eval_time.restype = ctypes.c_float
|
||||||
handle.get_last_process_time.restype = ctypes.c_float
|
handle.get_last_process_time.restype = ctypes.c_float
|
||||||
handle.get_last_token_count.restype = ctypes.c_int
|
handle.get_last_token_count.restype = ctypes.c_int
|
||||||
|
handle.get_last_seed.restype = ctypes.c_int
|
||||||
handle.get_total_gens.restype = ctypes.c_int
|
handle.get_total_gens.restype = ctypes.c_int
|
||||||
handle.get_last_stop_reason.restype = ctypes.c_int
|
handle.get_last_stop_reason.restype = ctypes.c_int
|
||||||
handle.abort_generate.restype = ctypes.c_bool
|
handle.abort_generate.restype = ctypes.c_bool
|
||||||
|
@ -793,7 +794,8 @@ Enter Prompt:<br>
|
||||||
lastc = handle.get_last_token_count()
|
lastc = handle.get_last_token_count()
|
||||||
totalgens = handle.get_total_gens()
|
totalgens = handle.get_total_gens()
|
||||||
stopreason = handle.get_last_stop_reason()
|
stopreason = handle.get_last_stop_reason()
|
||||||
response_body = (json.dumps({"last_process":lastp,"last_eval":laste,"last_token_count":lastc, "total_gens":totalgens, "stop_reason":stopreason, "queue":requestsinqueue, "idle":(0 if modelbusy.locked() else 1), "hordeexitcounter":exitcounter}).encode())
|
lastseed = handle.get_last_seed()
|
||||||
|
response_body = (json.dumps({"last_process":lastp,"last_eval":laste,"last_token_count":lastc, "last_seed":lastseed, "total_gens":totalgens, "stop_reason":stopreason, "queue":requestsinqueue, "idle":(0 if modelbusy.locked() else 1), "hordeexitcounter":exitcounter}).encode())
|
||||||
|
|
||||||
elif self.path.endswith('/api/extra/generate/check'):
|
elif self.path.endswith('/api/extra/generate/check'):
|
||||||
pendtxtStr = ""
|
pendtxtStr = ""
|
||||||
|
@ -2214,6 +2216,7 @@ def unload_libs():
|
||||||
del handle.get_last_eval_time
|
del handle.get_last_eval_time
|
||||||
del handle.get_last_process_time
|
del handle.get_last_process_time
|
||||||
del handle.get_last_token_count
|
del handle.get_last_token_count
|
||||||
|
del handle.get_last_seed
|
||||||
del handle.get_total_gens
|
del handle.get_total_gens
|
||||||
del handle.get_last_stop_reason
|
del handle.get_last_stop_reason
|
||||||
del handle.abort_generate
|
del handle.abort_generate
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue