Expose the conv2d-transpose layers. (#761)

This commit is contained in:
Laurent Mazare
2023-09-07 07:04:52 +02:00
committed by GitHub
parent a17a7c42c1
commit 000fa00e31
2 changed files with 45 additions and 1 deletions

View File

@ -240,3 +240,44 @@ pub fn conv2d_no_bias(
)?; )?;
Ok(Conv2d::new(ws, None, cfg)) Ok(Conv2d::new(ws, None, cfg))
} }
pub fn conv_transpose2d(
in_channels: usize,
out_channels: usize,
kernel_size: usize,
cfg: ConvTranspose2dConfig,
vs: crate::VarBuilder,
) -> Result<ConvTranspose2d> {
let bound = 1. / (out_channels as f64).sqrt() / kernel_size as f64;
let init = crate::Init::Uniform {
lo: -bound,
up: bound,
};
let ws = vs.get_with_hints(
(in_channels, out_channels, kernel_size, kernel_size),
"weight",
init,
)?;
let bs = vs.get_with_hints(out_channels, "bias", init)?;
Ok(ConvTranspose2d::new(ws, Some(bs), cfg))
}
pub fn conv_transpose2d_no_bias(
in_channels: usize,
out_channels: usize,
kernel_size: usize,
cfg: ConvTranspose2dConfig,
vs: crate::VarBuilder,
) -> Result<ConvTranspose2d> {
let bound = 1. / (out_channels as f64).sqrt() / kernel_size as f64;
let init = crate::Init::Uniform {
lo: -bound,
up: bound,
};
let ws = vs.get_with_hints(
(out_channels, in_channels, kernel_size, kernel_size),
"weight",
init,
)?;
Ok(ConvTranspose2d::new(ws, None, cfg))
}

View File

@ -16,7 +16,10 @@ pub mod var_map;
pub use activation::Activation; pub use activation::Activation;
pub use batch_norm::{batch_norm, BatchNorm, BatchNormConfig}; pub use batch_norm::{batch_norm, BatchNorm, BatchNormConfig};
pub use conv::{conv1d, conv2d, conv2d_no_bias, Conv1d, Conv1dConfig, Conv2d, Conv2dConfig}; pub use conv::{
conv1d, conv2d, conv2d_no_bias, conv_transpose2d, conv_transpose2d_no_bias, Conv1d,
Conv1dConfig, Conv2d, Conv2dConfig, ConvTranspose2d, ConvTranspose2dConfig,
};
pub use embedding::{embedding, Embedding}; pub use embedding::{embedding, Embedding};
pub use func::{func, Func}; pub use func::{func, Func};
pub use group_norm::{group_norm, GroupNorm}; pub use group_norm::{group_norm, GroupNorm};