Use a silu activation in mistral. (#991)

This commit is contained in:
Laurent Mazare
2023-09-29 08:06:54 +02:00
committed by GitHub
parent 23b3576c47
commit 53510ce427
2 changed files with 5 additions and 1 deletions

View File

@ -9,6 +9,8 @@ pub enum Activation {
#[serde(rename = "gated-gelu")]
NewGelu,
Relu,
Silu,
Sigmoid,
Elu(f64),
LeakyRelu(f64),
}
@ -20,6 +22,8 @@ impl super::Module for Activation {
// https://github.com/huggingface/transformers/blob/12f043eaeaabfef6f6efea411d98e6f6d3c094b7/src/transformers/activations.py#L49-L78
Self::NewGelu => xs.gelu(),
Self::Relu => xs.relu(),
Self::Silu => crate::ops::silu(xs),
Self::Sigmoid => crate::ops::sigmoid(xs),
&Self::Elu(alpha) => xs.elu(alpha),
&Self::LeakyRelu(negative_slope) => crate::ops::leaky_relu(xs, negative_slope),
}

View File

@ -28,7 +28,7 @@ impl Config {
num_hidden_layers: 32,
num_attention_heads: 32,
num_key_value_heads: 8,
hidden_act: Activation::Gelu, // TODO: silu
hidden_act: Activation::Silu,
max_position_embeddings: 32768,
rms_norm_eps: 1e-5,
rope_theta: 10_000.,