Add the swiglu activation from the chatglm PR. (#1246)

This commit is contained in:
Laurent Mazare
2023-11-02 20:01:34 +01:00
committed by GitHub
parent e08fbb6543
commit a2a20aeecc
2 changed files with 7 additions and 0 deletions

View File

@ -39,6 +39,11 @@ pub fn silu(xs: &Tensor) -> Result<Tensor> {
xs / (xs.neg()?.exp()? + 1.0)?
}
pub fn swiglu(xs: &Tensor) -> Result<Tensor> {
let xs = xs.chunk(2, candle::D::Minus1)?;
crate::ops::silu(&xs[0])? * &xs[1]
}
pub fn sigmoid(xs: &Tensor) -> Result<Tensor> {
// TODO: Should we have a specialized op for this?
(xs.neg()?.exp()? + 1.0)?.recip()