Add the prelu layer. (#1402)

This commit is contained in:
Laurent Mazare
2023-12-03 17:06:09 +01:00
committed by GitHub
parent 8418154ee0
commit b5c283e86f
3 changed files with 51 additions and 4 deletions

View File

@ -56,7 +56,7 @@ impl super::Module for Linear {
/// Create or initialize a new linear layer.
///
/// This uses some default names for weight and biases, namely `"weight"` and `"bias"`.
/// This uses some default names for weights and biases, namely `"weight"` and `"bias"`.
pub fn linear(in_dim: usize, out_dim: usize, vs: crate::VarBuilder) -> Result<Linear> {
let init_ws = crate::init::DEFAULT_KAIMING_NORMAL;
let ws = vs.get_with_hints((out_dim, in_dim), "weight", init_ws)?;
@ -69,6 +69,7 @@ pub fn linear(in_dim: usize, out_dim: usize, vs: crate::VarBuilder) -> Result<Li
Ok(Linear::new(ws, Some(bs)))
}
/// Create or initialize a new linear layer without biases.
pub fn linear_no_bias(in_dim: usize, out_dim: usize, vs: crate::VarBuilder) -> Result<Linear> {
let init_ws = crate::init::DEFAULT_KAIMING_NORMAL;
let ws = vs.get_with_hints((out_dim, in_dim), "weight", init_ws)?;