mirror of
https://github.com/huggingface/candle.git
synced 2025-06-16 18:48:51 +00:00
Add the relu2 and relu6 activations. (#1201)
This commit is contained in:
@ -9,6 +9,8 @@ pub enum Activation {
|
||||
#[serde(rename = "gated-gelu")]
|
||||
NewGelu,
|
||||
Relu,
|
||||
Relu2,
|
||||
Relu6,
|
||||
Silu,
|
||||
Sigmoid,
|
||||
Elu(f64),
|
||||
@ -22,6 +24,8 @@ impl super::Module for Activation {
|
||||
// https://github.com/huggingface/transformers/blob/12f043eaeaabfef6f6efea411d98e6f6d3c094b7/src/transformers/activations.py#L49-L78
|
||||
Self::NewGelu => xs.gelu(),
|
||||
Self::Relu => xs.relu(),
|
||||
Self::Relu2 => xs.relu()?.sqr(),
|
||||
Self::Relu6 => xs.clamp(0f32, 6f32),
|
||||
Self::Silu => crate::ops::silu(xs),
|
||||
Self::Sigmoid => crate::ops::sigmoid(xs),
|
||||
&Self::Elu(alpha) => xs.elu(alpha),
|
||||
|
@ -11,6 +11,7 @@ pub mod llama;
|
||||
pub mod mistral;
|
||||
pub mod mixformer;
|
||||
pub mod mpt;
|
||||
pub mod persimmon;
|
||||
pub mod quantized_blip;
|
||||
pub mod quantized_blip_text;
|
||||
pub mod quantized_llama;
|
||||
|
56
candle-transformers/src/models/persimmon.rs
Normal file
56
candle-transformers/src/models/persimmon.rs
Normal file
@ -0,0 +1,56 @@
|
||||
use candle::DType;
|
||||
use serde::Deserialize;
|
||||
|
||||
pub const DTYPE: DType = DType::F32;
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Deserialize)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
pub enum PositionEmbeddingType {
|
||||
Absolute,
|
||||
Alibi,
|
||||
}
|
||||
|
||||
// https://github.com/huggingface/transformers/blob/main/src/transformers/models/persimmon/configuration_persimmon.py
|
||||
#[derive(Debug, Clone, PartialEq, Deserialize)]
|
||||
pub struct Config {
|
||||
pub vocab_size: usize,
|
||||
pub hidden_size: usize,
|
||||
pub intermediate_size: usize,
|
||||
pub num_hidden_layers: usize,
|
||||
pub num_attention_heads: usize,
|
||||
pub num_key_value_heads: usize,
|
||||
pub hidden_act: candle_nn::Activation,
|
||||
pub max_position_embeddings: usize,
|
||||
pub initializer_range: f64,
|
||||
pub layer_norm_eps: f64,
|
||||
pub rms_norm_eps: f64,
|
||||
pub use_cache: bool,
|
||||
pub tie_word_embeddings: bool,
|
||||
pub rope_theta: f64,
|
||||
pub qk_layernorm: bool,
|
||||
pub partial_rotary_factor: f64,
|
||||
}
|
||||
|
||||
impl Config {
|
||||
pub fn base_8b() -> Self {
|
||||
// https://huggingface.co/adept/persimmon-8b-base/blob/main/config.json
|
||||
Self {
|
||||
hidden_act: candle_nn::Activation::Relu,
|
||||
hidden_size: 4096,
|
||||
initializer_range: 0.02,
|
||||
intermediate_size: 16384,
|
||||
layer_norm_eps: 1e-05,
|
||||
max_position_embeddings: 16384,
|
||||
num_attention_heads: 64,
|
||||
num_hidden_layers: 36,
|
||||
num_key_value_heads: 64,
|
||||
qk_layernorm: true,
|
||||
rms_norm_eps: 1e-06,
|
||||
rope_theta: 25000.0,
|
||||
tie_word_embeddings: false,
|
||||
use_cache: true,
|
||||
vocab_size: 262144,
|
||||
partial_rotary_factor: 0.5,
|
||||
}
|
||||
}
|
||||
}
|
Reference in New Issue
Block a user