convert : Qwerky : use lora_rank_tokenshift and lora_rank_decay if present (#12667)

This commit is contained in:
Sigbjørn Skjæret 2025-03-31 16:36:25 +02:00 committed by GitHub
parent a8a1f33567
commit 403fbacbbc
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -3557,8 +3557,8 @@ class RWKV6Qwen2Model(Rwkv6Model):
head_size = hidden_size // num_attention_heads
rms_norm_eps = self.hparams["rms_norm_eps"]
intermediate_size = self.hparams["intermediate_size"]
time_mix_extra_dim = 64 if hidden_size >= 4096 else 32
time_decay_extra_dim = 128 if hidden_size >= 4096 else 64
time_mix_extra_dim = self.hparams.get("lora_rank_tokenshift", 64 if hidden_size >= 4096 else 32)
time_decay_extra_dim = self.hparams.get("lora_rank_decay", 128 if hidden_size >= 4096 else 64)
# RWKV isn't context limited
self.gguf_writer.add_context_length(1048576)