mirror of
https://github.com/LostRuins/koboldcpp.git
synced 2026-05-17 04:09:19 +00:00
* move conversion code to a dedicated conversion directory and split the files akin to the src/models architecture --------- Co-authored-by: Sigbjørn Skjæret <sigbjorn.skjaeret@scala.com>
23 lines
786 B
Python
23 lines
786 B
Python
from __future__ import annotations
|
|
|
|
from .base import ModelBase, TextModel, gguf
|
|
|
|
|
|
@ModelBase.register("PLMForCausalLM")
|
|
class PLMModel(TextModel):
|
|
model_arch = gguf.MODEL_ARCH.PLM
|
|
|
|
def set_vocab(self):
|
|
self._set_vocab_gpt2()
|
|
|
|
def set_gguf_parameters(self):
|
|
super().set_gguf_parameters()
|
|
hparams = self.hparams
|
|
self.gguf_writer.add_vocab_size(hparams["vocab_size"])
|
|
self.gguf_writer.add_kv_lora_rank(hparams["kv_lora_rank"])
|
|
self.gguf_writer.add_key_length(hparams["qk_nope_head_dim"] + hparams["qk_rope_head_dim"])
|
|
self.gguf_writer.add_value_length(hparams["v_head_dim"])
|
|
self.gguf_writer.add_rope_dimension_count(hparams["qk_rope_head_dim"])
|
|
|
|
def prepare_tensors(self):
|
|
super().prepare_tensors()
|