Rustfmt fix. (#1788)

This commit is contained in:
Laurent Mazare
2024-03-02 10:35:07 +01:00
committed by GitHub
parent 3e3def4134
commit 314630638d
2 changed files with 10 additions and 3 deletions

View File

@ -186,7 +186,11 @@ impl DecoderLayer {
fn new(rotary_emb: Arc<RotaryEmbedding>, cfg: &Config, vb: VarBuilder) -> Result<Self> {
let self_attn = Attention::new(rotary_emb, cfg, vb.pp("self_attn"))?;
let mlp = MLP::new(cfg, vb.pp("mlp"))?;
let input_layernorm = layer_norm(cfg.hidden_size, cfg.layer_norm_eps, vb.pp("input_layernorm"))?;
let input_layernorm = layer_norm(
cfg.hidden_size,
cfg.layer_norm_eps,
vb.pp("input_layernorm"),
)?;
let post_attention_layernorm = layer_norm(
cfg.hidden_size,
cfg.layer_norm_eps,

View File

@ -316,8 +316,11 @@ impl DecoderLayer {
fn new(rotary_emb: Arc<RotaryEmbedding>, cfg: &Config, vb: VarBuilder) -> Result<Self> {
let self_attn = Attention::new(rotary_emb, cfg, vb.pp("self_attn"))?;
let mlp = MLP::new(cfg, vb.pp("mlp"))?;
let input_layernorm =
candle_nn::layer_norm(cfg.hidden_size, cfg.layer_norm_eps, vb.pp("input_layernorm"))?;
let input_layernorm = candle_nn::layer_norm(
cfg.hidden_size,
cfg.layer_norm_eps,
vb.pp("input_layernorm"),
)?;
let post_attention_layernorm = candle_nn::layer_norm(
cfg.hidden_size,
cfg.layer_norm_eps,