models/gemma3: remove final logit softcap (#9692)

Softcap isn't in the whitepaper/implementation for the language model so we should remove it. There is no discernible difference in output with it removed.
This commit is contained in:
Bruce MacDonald 2025-03-12 10:17:57 -07:00 committed by GitHub
parent 6b45b1d6b4
commit a70820daa0
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -15,7 +15,6 @@ type TextOptions struct {
attnKeyLen, attnValLen int attnKeyLen, attnValLen int
eps, ropeScale float32 eps, ropeScale float32
ropeLocalBase, ropeGlobalBase float32 ropeLocalBase, ropeGlobalBase float32
finalLogitSoftcap float32
largeModelScaling bool largeModelScaling bool
} }
@ -66,7 +65,6 @@ func newTextModel(c ml.Config) *TextModel {
ropeLocalBase: c.Float("rope.local.freq_base", 10000.0), ropeLocalBase: c.Float("rope.local.freq_base", 10000.0),
ropeGlobalBase: c.Float("rope.global.freq_base", 1000000.0), ropeGlobalBase: c.Float("rope.global.freq_base", 1000000.0),
ropeScale: c.Float("rope.freq_scale", 1.0), ropeScale: c.Float("rope.freq_scale", 1.0),
finalLogitSoftcap: c.Float("final_logit_softcapping", 30.0),
}, },
} }
@ -245,10 +243,5 @@ func (m *TextModel) Forward(ctx ml.Context, inputs, positions, outputs ml.Tensor
} }
hiddenState = m.OutputNorm.Forward(ctx, hiddenState, m.eps) hiddenState = m.OutputNorm.Forward(ctx, hiddenState, m.eps)
hiddenState = m.Output.Forward(ctx, hiddenState) return m.Output.Forward(ctx, hiddenState)
// final logit softcap
hiddenState = hiddenState.Scale(ctx, 1.0/float64(m.TextOptions.finalLogitSoftcap))
hiddenState = hiddenState.Tanh(ctx)
return hiddenState.Scale(ctx, float64(m.TextOptions.finalLogitSoftcap))
} }