Add the call to dense in the attention layer. (#96)

This commit is contained in:
Laurent Mazare
2023-07-06 23:22:08 +01:00
committed by GitHub
parent 0a2c82e301
commit a3f3b93d16

View File

@ -444,6 +444,7 @@ impl FalconAttention {
.reshape((b_sz, self.num_heads, q_len, head_dim))?
.transpose(1, 2)?
.reshape((b_sz, q_len, self.num_heads * head_dim))?;
let attn_output = self.attn_output.forward(&attn_output)?;
Ok(attn_output)
}
}