Remove some dead-code pragmas. (#137)

This commit is contained in:
Laurent Mazare
2023-07-11 09:33:59 +01:00
committed by GitHub
parent ae79c00e48
commit 674eb35e10
7 changed files with 3 additions and 44 deletions

View File

@ -470,7 +470,6 @@ impl Map1 for Elu {
} }
} }
#[allow(dead_code)]
struct Sum<'a>(&'a [usize]); struct Sum<'a>(&'a [usize]);
impl<'a> Map1 for Sum<'a> { impl<'a> Map1 for Sum<'a> {
fn f<T: DeviceRepr + WithDType + ValidAsZeroBits>( fn f<T: DeviceRepr + WithDType + ValidAsZeroBits>(
@ -507,7 +506,6 @@ impl<'a> Map1 for Sum<'a> {
} }
} }
#[allow(dead_code)]
struct FastSum<'a>(&'a [usize]); struct FastSum<'a>(&'a [usize]);
impl<'a> Map1 for FastSum<'a> { impl<'a> Map1 for FastSum<'a> {
fn f<T: DeviceRepr + WithDType + ValidAsZeroBits>( fn f<T: DeviceRepr + WithDType + ValidAsZeroBits>(

View File

@ -57,7 +57,6 @@ impl std::fmt::Debug for Tensor {
} }
} }
#[allow(dead_code)]
/// Options for Tensor pretty printing /// Options for Tensor pretty printing
pub struct PrinterOptions { pub struct PrinterOptions {
precision: usize, precision: usize,

View File

@ -1,5 +1,3 @@
#![allow(dead_code)]
#[cfg(feature = "mkl")] #[cfg(feature = "mkl")]
extern crate intel_mkl_src; extern crate intel_mkl_src;
@ -86,7 +84,7 @@ impl Default for Config {
} }
impl Config { impl Config {
fn all_mini_lm_l6_v2() -> Self { fn _all_mini_lm_l6_v2() -> Self {
// https://huggingface.co/sentence-transformers/all-MiniLM-L6-v2/blob/main/config.json // https://huggingface.co/sentence-transformers/all-MiniLM-L6-v2/blob/main/config.json
Self { Self {
vocab_size: 30522, vocab_size: 30522,
@ -121,6 +119,7 @@ fn linear(size1: usize, size2: usize, vb: VarBuilder) -> Result<Linear> {
} }
struct Dropout { struct Dropout {
#[allow(dead_code)]
pr: f64, pr: f64,
} }
@ -156,8 +155,6 @@ struct BertEmbeddings {
token_type_embeddings: Embedding, token_type_embeddings: Embedding,
layer_norm: LayerNorm, layer_norm: LayerNorm,
dropout: Dropout, dropout: Dropout,
position_ids: Tensor,
token_type_ids: Tensor,
} }
impl BertEmbeddings { impl BertEmbeddings {
@ -182,17 +179,12 @@ impl BertEmbeddings {
config.layer_norm_eps, config.layer_norm_eps,
vb.pp("LayerNorm"), vb.pp("LayerNorm"),
)?; )?;
let position_ids: Vec<_> = (0..config.max_position_embeddings as u32).collect();
let position_ids = Tensor::new(&position_ids[..], vb.device())?.unsqueeze(0)?;
let token_type_ids = position_ids.zeros_like()?;
Ok(Self { Ok(Self {
word_embeddings, word_embeddings,
position_embeddings: Some(position_embeddings), position_embeddings: Some(position_embeddings),
token_type_embeddings, token_type_embeddings,
layer_norm, layer_norm,
dropout: Dropout::new(config.hidden_dropout_prob), dropout: Dropout::new(config.hidden_dropout_prob),
position_ids,
token_type_ids,
}) })
} }

View File

@ -1,4 +1,3 @@
#![allow(dead_code)]
// TODO: Add an offline mode. // TODO: Add an offline mode.
#[cfg(feature = "mkl")] #[cfg(feature = "mkl")]

View File

@ -28,22 +28,6 @@ fn layer_norm(size: usize, eps: f64, vb: VarBuilder) -> Result<LayerNorm> {
Ok(LayerNorm::new(weight, bias, eps)) Ok(LayerNorm::new(weight, bias, eps))
} }
#[derive(Debug)]
struct Dropout {
pr: f64,
}
impl Dropout {
fn new(pr: f64) -> Self {
Self { pr }
}
fn forward(&self, x: &Tensor) -> Result<Tensor> {
// TODO
Ok(x.clone())
}
}
fn embedding(vocab_size: usize, hidden_size: usize, vb: VarBuilder) -> Result<Embedding> { fn embedding(vocab_size: usize, hidden_size: usize, vb: VarBuilder) -> Result<Embedding> {
let embeddings = vb.get((vocab_size, hidden_size), "weight")?; let embeddings = vb.get((vocab_size, hidden_size), "weight")?;
Ok(Embedding::new(embeddings, hidden_size)) Ok(Embedding::new(embeddings, hidden_size))
@ -345,7 +329,6 @@ impl FalconAttention {
struct FalconMlp { struct FalconMlp {
dense_h_to_4h: Linear, dense_h_to_4h: Linear,
dense_4h_to_h: Linear, dense_4h_to_h: Linear,
dropout: Dropout,
} }
impl FalconMlp { impl FalconMlp {
@ -354,11 +337,9 @@ impl FalconMlp {
let b = cfg.bias; let b = cfg.bias;
let dense_h_to_4h = linear(h, 4 * h, b, vb.pp("dense_h_to_4h"))?; let dense_h_to_4h = linear(h, 4 * h, b, vb.pp("dense_h_to_4h"))?;
let dense_4h_to_h = linear(4 * h, h, b, vb.pp("dense_4h_to_h"))?; let dense_4h_to_h = linear(4 * h, h, b, vb.pp("dense_4h_to_h"))?;
let dropout = Dropout::new(cfg.hidden_dropout);
Ok(Self { Ok(Self {
dense_h_to_4h, dense_h_to_4h,
dense_4h_to_h, dense_4h_to_h,
dropout,
}) })
} }

View File

@ -1,16 +1,8 @@
use super::*; use super::*;
use candle::{DType, Device, Result, Shape, Tensor}; use candle::{Device, Result, Tensor};
use std::collections::HashMap; use std::collections::HashMap;
use std::sync::{Arc, Mutex}; use std::sync::{Arc, Mutex};
#[allow(dead_code)]
#[derive(Clone)]
struct NamedVar {
path: String,
dtype: DType,
shape: Shape,
}
#[derive(Clone)] #[derive(Clone)]
pub struct VarBuilder { pub struct VarBuilder {
path: Vec<String>, path: Vec<String>,

View File

@ -1,5 +1,3 @@
#![allow(dead_code)]
use anyhow::Result; use anyhow::Result;
use candle::Tensor; use candle::Tensor;