mirror of
https://github.com/kvcache-ai/ktransformers.git
synced 2025-09-11 07:44:35 +00:00
remove causal mask
This commit is contained in:
parent
cdb6f896bb
commit
006e8c6abc
1 changed files with 6 additions and 3 deletions
|
@ -649,9 +649,12 @@ class KDeepseekV2Model(BaseInjectedModule):
|
||||||
if per_layer_prefill_flag:
|
if per_layer_prefill_flag:
|
||||||
causal_mask = None
|
causal_mask = None
|
||||||
else:
|
else:
|
||||||
causal_mask = self._update_causal_mask(
|
if os.name == 'nt':
|
||||||
attention_mask, inputs_embeds, cache_position, past_key_values, output_attentions
|
causal_mask = self._update_causal_mask(
|
||||||
)
|
attention_mask, inputs_embeds, cache_position, past_key_values, output_attentions
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
causal_mask = None
|
||||||
|
|
||||||
# embed positions
|
# embed positions
|
||||||
hidden_states = inputs_embeds
|
hidden_states = inputs_embeds
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue