mirror of
https://github.com/huggingface/candle.git
synced 2025-06-15 18:28:24 +00:00
Add hard-sigmoid and hard-swish activations (#1244)
* Add hard-sigmoid and hard-swish activations * Update ops.rs * Use / rather than div. --------- Co-authored-by: Laurent <laurent.mazare@gmail.com>
This commit is contained in:
@ -13,7 +13,9 @@ pub enum Activation {
|
||||
Relu6,
|
||||
Silu,
|
||||
Sigmoid,
|
||||
HardSigmoid,
|
||||
Swish,
|
||||
HardSwish,
|
||||
Elu(f64),
|
||||
LeakyRelu(f64),
|
||||
}
|
||||
@ -29,7 +31,9 @@ impl super::Module for Activation {
|
||||
Self::Relu6 => xs.clamp(0f32, 6f32),
|
||||
Self::Silu => crate::ops::silu(xs),
|
||||
Self::Sigmoid => crate::ops::sigmoid(xs),
|
||||
Self::HardSigmoid => crate::ops::hard_sigmoid(xs),
|
||||
Self::Swish => xs * crate::ops::sigmoid(xs)?,
|
||||
Self::HardSwish => xs * crate::ops::hard_sigmoid(xs)?,
|
||||
&Self::Elu(alpha) => xs.elu(alpha),
|
||||
&Self::LeakyRelu(negative_slope) => crate::ops::leaky_relu(xs, negative_slope),
|
||||
}
|
||||
|
@ -44,6 +44,11 @@ pub fn sigmoid(xs: &Tensor) -> Result<Tensor> {
|
||||
(xs.neg()?.exp()? + 1.0)?.recip()
|
||||
}
|
||||
|
||||
pub fn hard_sigmoid(xs: &Tensor) -> Result<Tensor> {
|
||||
// TODO: Should we have a specialized op for this?
|
||||
((xs + 3.0)? / 6.0)?.clamp(0f32, 1f32)
|
||||
}
|
||||
|
||||
pub fn leaky_relu(xs: &Tensor, negative_slope: f64) -> Result<Tensor> {
|
||||
let zeros = xs.zeros_like()?;
|
||||
xs.maximum(&zeros)? + xs.minimum(&zeros)? * negative_slope
|
||||
|
Reference in New Issue
Block a user