mirror of
https://github.com/huggingface/candle.git
synced 2025-06-16 10:38:54 +00:00
Use a silu activation in mistral. (#991)
This commit is contained in:
@ -9,6 +9,8 @@ pub enum Activation {
|
||||
#[serde(rename = "gated-gelu")]
|
||||
NewGelu,
|
||||
Relu,
|
||||
Silu,
|
||||
Sigmoid,
|
||||
Elu(f64),
|
||||
LeakyRelu(f64),
|
||||
}
|
||||
@ -20,6 +22,8 @@ impl super::Module for Activation {
|
||||
// https://github.com/huggingface/transformers/blob/12f043eaeaabfef6f6efea411d98e6f6d3c094b7/src/transformers/activations.py#L49-L78
|
||||
Self::NewGelu => xs.gelu(),
|
||||
Self::Relu => xs.relu(),
|
||||
Self::Silu => crate::ops::silu(xs),
|
||||
Self::Sigmoid => crate::ops::sigmoid(xs),
|
||||
&Self::Elu(alpha) => xs.elu(alpha),
|
||||
&Self::LeakyRelu(negative_slope) => crate::ops::leaky_relu(xs, negative_slope),
|
||||
}
|
||||
|
@ -28,7 +28,7 @@ impl Config {
|
||||
num_hidden_layers: 32,
|
||||
num_attention_heads: 32,
|
||||
num_key_value_heads: 8,
|
||||
hidden_act: Activation::Gelu, // TODO: silu
|
||||
hidden_act: Activation::Silu,
|
||||
max_position_embeddings: 32768,
|
||||
rms_norm_eps: 1e-5,
|
||||
rope_theta: 10_000.,
|
||||
|
Reference in New Issue
Block a user