mirror of
https://github.com/huggingface/candle.git
synced 2025-06-20 20:09:50 +00:00
Enable the new layer-norm. (#2213)
* Enable the new layer-norm. * Shape fixes.
This commit is contained in:
@ -11,8 +11,8 @@
|
||||
//! use candle_nn::{LayerNorm, Module};
|
||||
//! # fn main() -> candle::Result<()> {
|
||||
//!
|
||||
//! let w = Tensor::new(1f32, &Cpu)?;
|
||||
//! let b = Tensor::new(0f32, &Cpu)?;
|
||||
//! let w = Tensor::new(&[1f32, 1f32, 1f32], &Cpu)?;
|
||||
//! let b = Tensor::new(&[0f32, 0f32, 0f32], &Cpu)?;
|
||||
//! let layer = LayerNorm::new(w, b, 1e-5);
|
||||
//!
|
||||
//! let xs = Tensor::new(
|
||||
@ -107,6 +107,11 @@ impl LayerNorm {
|
||||
|
||||
impl Module for LayerNorm {
|
||||
fn forward(&self, x: &Tensor) -> Result<Tensor> {
|
||||
if x.is_contiguous() && self.remove_mean {
|
||||
if let Some(bias) = self.bias.as_ref() {
|
||||
return crate::ops::layer_norm(x, &self.weight, bias, self.eps as f32);
|
||||
}
|
||||
}
|
||||
let x_dtype = x.dtype();
|
||||
let internal_dtype = match x_dtype {
|
||||
DType::F16 | DType::BF16 => DType::F32,
|
||||
|
Reference in New Issue
Block a user