From a3f3b93d16e08b78d3416d9caf7607c19bc11646 Mon Sep 17 00:00:00 2001 From: Laurent Mazare Date: Thu, 6 Jul 2023 23:22:08 +0100 Subject: [PATCH] Add the call to dense in the attention layer. (#96) --- candle-examples/examples/falcon/model.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/candle-examples/examples/falcon/model.rs b/candle-examples/examples/falcon/model.rs index efab97ca..df89b75c 100644 --- a/candle-examples/examples/falcon/model.rs +++ b/candle-examples/examples/falcon/model.rs @@ -444,6 +444,7 @@ impl FalconAttention { .reshape((b_sz, self.num_heads, q_len, head_dim))? .transpose(1, 2)? .reshape((b_sz, q_len, self.num_heads * head_dim))?; + let attn_output = self.attn_output.forward(&attn_output)?; Ok(attn_output) } }