Add the prelu layer. (#1402)

This commit is contained in:
Laurent Mazare
2023-12-03 17:06:09 +01:00
committed by GitHub
parent 8418154ee0
commit b5c283e86f
3 changed files with 51 additions and 4 deletions

View File

@ -15,7 +15,7 @@ pub mod sequential;
pub mod var_builder;
pub mod var_map;
pub use activation::Activation;
pub use activation::{prelu, Activation, PReLU};
pub use batch_norm::{batch_norm, BatchNorm, BatchNormConfig};
pub use conv::{
conv1d, conv2d, conv2d_no_bias, conv_transpose2d, conv_transpose2d_no_bias, Conv1d,