mirror of
https://github.com/huggingface/candle.git
synced 2025-06-16 10:38:54 +00:00
Add the call to dense in the attention layer. (#96)
This commit is contained in:
@ -444,6 +444,7 @@ impl FalconAttention {
|
||||
.reshape((b_sz, self.num_heads, q_len, head_dim))?
|
||||
.transpose(1, 2)?
|
||||
.reshape((b_sz, q_len, self.num_heads * head_dim))?;
|
||||
let attn_output = self.attn_output.forward(&attn_output)?;
|
||||
Ok(attn_output)
|
||||
}
|
||||
}
|
||||
|
Reference in New Issue
Block a user