fix save and reload model state (#49)
Some checks failed
Lint with Black / lint (push) Has been cancelled
Lint with isort / lint (push) Has been cancelled

Co-authored-by: Srini Iyer <sviyer@meta.com>
This commit is contained in:
Srinivasan Iyer 2025-02-07 14:27:47 -08:00 committed by GitHub
parent fe45f69fbf
commit 22c7fe1d1c
No known key found for this signature in database
GPG key ID: B5690EEEBB952194

View file

@ -74,12 +74,10 @@ class LocalModelBase(nn.Module):
self.boe_id = BOE_ID self.boe_id = BOE_ID
self.norm = RMSNorm(args.dim, eps=args.norm_eps)
self.layers = nn.ModuleList( self.layers = nn.ModuleList(
[TransformerBlock(args) for _ in range(args.n_layers)] [TransformerBlock(args) for _ in range(args.n_layers)]
) )
self.tok_embeddings = nn.Embedding(self.vocab_size, args.dim)
if not self.use_rope: if not self.use_rope:
self.pos_embeddings = nn.Embedding(args.max_length, args.dim) self.pos_embeddings = nn.Embedding(args.max_length, args.dim)
else: else:
@ -131,16 +129,18 @@ class LocalModelBase(nn.Module):
def init_weights(self, init_std=None): def init_weights(self, init_std=None):
self.rope.reset_parameters() self.rope.reset_parameters()
self.norm.reset_parameters() if hasattr(self, "norm"):
self.norm.reset_parameters()
init_std = init_std or (self.dim ** (-0.5)) init_std = init_std or (self.dim ** (-0.5))
nn.init.trunc_normal_( if hasattr(self, "tok_embeddings"):
self.tok_embeddings.weight, nn.init.trunc_normal_(
mean=0.0, self.tok_embeddings.weight,
std=init_std, mean=0.0,
a=-3 * init_std, std=init_std,
b=3 * init_std, a=-3 * init_std,
) b=3 * init_std,
)
if self.pos_embeddings is not None: if self.pos_embeddings is not None:
nn.init.trunc_normal_( nn.init.trunc_normal_(
self.pos_embeddings.weight, self.pos_embeddings.weight,
@ -212,6 +212,8 @@ class LocalEncoder(LocalModelBase):
self.cross_attn_init_by_pooling = args.cross_attn_init_by_pooling self.cross_attn_init_by_pooling = args.cross_attn_init_by_pooling
self.cross_attn_nheads = args.cross_attn_nheads self.cross_attn_nheads = args.cross_attn_nheads
self.tok_embeddings = nn.Embedding(self.vocab_size, args.dim)
if self.cross_attn_encoder: if self.cross_attn_encoder:
self.cross_attn_layers = torch.nn.ModuleList() self.cross_attn_layers = torch.nn.ModuleList()
layers_to_add = args.n_layers if self.cross_attn_all_layers_encoder else 1 layers_to_add = args.n_layers if self.cross_attn_all_layers_encoder else 1
@ -314,6 +316,8 @@ class LocalDecoder(LocalModelBase):
self.cross_attn_init_by_pooling = args.cross_attn_init_by_pooling self.cross_attn_init_by_pooling = args.cross_attn_init_by_pooling
self.cross_attn_nheads = args.cross_attn_nheads self.cross_attn_nheads = args.cross_attn_nheads
self.norm = RMSNorm(args.dim, eps=args.norm_eps)
if self.cross_attn_decoder: if self.cross_attn_decoder:
self.cross_attn_layers = torch.nn.ModuleList() self.cross_attn_layers = torch.nn.ModuleList()
layers_to_add = args.n_layers if self.cross_attn_all_layers_decoder else 1 layers_to_add = args.n_layers if self.cross_attn_all_layers_decoder else 1