fix top_p = 0 bug

This commit is contained in:
dongjw 2025-04-01 20:38:33 +08:00
parent aeabd783b0
commit 5c7ed7b579
2 changed files with 4 additions and 0 deletions

View file

@ -368,6 +368,8 @@ class BalanceServeInterface(BackendInterfaceBase):
stop_criteria = [self.tokenizer.encode(self.tokenizer.eos_token, add_special_tokens=False),self.tokenizer.encode("<|im_end|>")]
query_add.stop_criteria = stop_criteria
query_add.sample_options.temperature = temperature
if top_p == 0:
top_p = 0.0001
query_add.sample_options.top_p = top_p
query_add.estimated_length = min(self.args.cache_lens, query_length+self.args.max_new_tokens)
query_id = self.sched_client.add_query(query_add)

View file

@ -208,6 +208,8 @@ class TransformersInterface(BackendInterfaceBase):
temperature = self.model.generation_config.temperature
if top_p is None:
top_p = self.model.generation_config.top_p
if top_p == 0:
top_p = 0.0001
generation_config, model_kwargs = self.model._prepare_generation_config(
None, max_length=self.args.max_new_tokens,
do_sample=True,