mirror of
https://github.com/kvcache-ai/ktransformers.git
synced 2025-09-10 06:14:58 +00:00
fix top_p = 0 bug
This commit is contained in:
parent
aeabd783b0
commit
5c7ed7b579
2 changed files with 4 additions and 0 deletions
|
@ -208,6 +208,8 @@ class TransformersInterface(BackendInterfaceBase):
|
|||
temperature = self.model.generation_config.temperature
|
||||
if top_p is None:
|
||||
top_p = self.model.generation_config.top_p
|
||||
if top_p == 0:
|
||||
top_p = 0.0001
|
||||
generation_config, model_kwargs = self.model._prepare_generation_config(
|
||||
None, max_length=self.args.max_new_tokens,
|
||||
do_sample=True,
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue