From af955f260cc20364b3e000c895dcb134a46e4e94 Mon Sep 17 00:00:00 2001 From: Laurent Mazare Date: Mon, 15 Apr 2024 09:39:03 +0200 Subject: [PATCH] Make the falcon model cloneable. (#2067) --- candle-transformers/src/models/falcon.rs | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/candle-transformers/src/models/falcon.rs b/candle-transformers/src/models/falcon.rs index 05d64567..3a3575aa 100644 --- a/candle-transformers/src/models/falcon.rs +++ b/candle-transformers/src/models/falcon.rs @@ -120,7 +120,7 @@ fn rotate_half(x: &Tensor) -> Result { Ok(x21) } -#[derive(Debug)] +#[derive(Debug, Clone)] struct FalconRotaryEmbedding { inv_freq: Tensor, cache: Option<(usize, Tensor, Tensor)>, @@ -186,7 +186,7 @@ fn masked_fill(on_false: &Tensor, mask: &Tensor, on_true: f32) -> Result Ok(m) } -#[derive(Debug)] +#[derive(Debug, Clone)] struct FalconAttention { query_key_value: Linear, dense: Linear, @@ -321,7 +321,7 @@ impl FalconAttention { } } -#[derive(Debug)] +#[derive(Debug, Clone)] struct FalconMlp { dense_h_to_4h: Linear, dense_4h_to_h: Linear, @@ -346,7 +346,7 @@ impl FalconMlp { } } -#[derive(Debug)] +#[derive(Debug, Clone)] struct FalconDecoderLayer { inp_layernorm: LayerNorm, self_attention: FalconAttention, @@ -412,7 +412,7 @@ impl FalconDecoderLayer { } } -#[derive(Debug)] +#[derive(Debug, Clone)] pub struct Falcon { word_embeddings: Embedding, blocks: Vec,