mirror of
https://github.com/huggingface/candle.git
synced 2025-06-16 02:38:10 +00:00
Removing cuda default.
Seems very important for a lot of exploring users usually on laptop without GPUs. Adding more README instructions in a follow up.
This commit is contained in:
@ -495,10 +495,19 @@ struct Args {
|
||||
|
||||
impl Args {
|
||||
fn build_model_and_tokenizer(&self) -> Result<(BertModel, Tokenizer)> {
|
||||
#[cfg(feature = "cuda")]
|
||||
let default_device = Device::new_cuda(0)?;
|
||||
|
||||
#[cfg(not(feature = "cuda"))]
|
||||
let default_device = {
|
||||
println!("Running on CPU, to run on GPU, run this example with `--features cuda`");
|
||||
Device::Cpu
|
||||
};
|
||||
|
||||
let device = if self.cpu {
|
||||
Device::Cpu
|
||||
} else {
|
||||
Device::new_cuda(0)?
|
||||
default_device
|
||||
};
|
||||
let default_model = "sentence-transformers/all-MiniLM-L6-v2".to_string();
|
||||
let default_revision = "refs/pr/21".to_string();
|
||||
|
@ -119,10 +119,19 @@ struct Args {
|
||||
|
||||
fn main() -> Result<()> {
|
||||
let args = Args::parse();
|
||||
|
||||
#[cfg(feature = "cuda")]
|
||||
let default_device = Device::new_cuda(0)?;
|
||||
|
||||
#[cfg(not(feature = "cuda"))]
|
||||
let default_device = {
|
||||
println!("Running on CPU, to run on GPU, run this example with `--features cuda`");
|
||||
Device::Cpu
|
||||
};
|
||||
let device = if args.cpu {
|
||||
Device::Cpu
|
||||
} else {
|
||||
Device::new_cuda(0)?
|
||||
default_device
|
||||
};
|
||||
|
||||
let start = std::time::Instant::now();
|
||||
|
@ -133,10 +133,20 @@ fn main() -> Result<()> {
|
||||
use tokenizers::Tokenizer;
|
||||
|
||||
let args = Args::parse();
|
||||
|
||||
#[cfg(feature = "cuda")]
|
||||
let default_device = Device::new_cuda(0)?;
|
||||
|
||||
#[cfg(not(feature = "cuda"))]
|
||||
let default_device = {
|
||||
println!("Running on CPU, to run on GPU, run this example with `--features cuda`");
|
||||
Device::Cpu
|
||||
};
|
||||
|
||||
let device = if args.cpu {
|
||||
Device::Cpu
|
||||
} else {
|
||||
Device::new_cuda(0)?
|
||||
default_device
|
||||
};
|
||||
let config = Config::config_7b();
|
||||
let cache = model::Cache::new(!args.no_kv_cache, &config, &device);
|
||||
|
@ -41,10 +41,18 @@ fn main() -> Result<()> {
|
||||
use tokenizers::Tokenizer;
|
||||
|
||||
let args = Args::parse();
|
||||
#[cfg(feature = "cuda")]
|
||||
let default_device = Device::new_cuda(0)?;
|
||||
|
||||
#[cfg(not(feature = "cuda"))]
|
||||
let default_device = {
|
||||
println!("Running on CPU, to run on GPU, run this example with `--features cuda`");
|
||||
Device::Cpu
|
||||
};
|
||||
let device = if args.cpu {
|
||||
Device::Cpu
|
||||
} else {
|
||||
Device::new_cuda(0)?
|
||||
default_device
|
||||
};
|
||||
|
||||
let mut tokenizer = Tokenizer::from_file(args.tokenizer).map_err(E::msg)?;
|
||||
|
@ -257,10 +257,20 @@ struct Args {
|
||||
|
||||
fn main() -> Result<()> {
|
||||
let args = Args::parse();
|
||||
|
||||
#[cfg(feature = "cuda")]
|
||||
let default_device = Device::new_cuda(0)?;
|
||||
|
||||
#[cfg(not(feature = "cuda"))]
|
||||
let default_device = {
|
||||
println!("Running on CPU, to run on GPU, run this example with `--features cuda`");
|
||||
Device::Cpu
|
||||
};
|
||||
|
||||
let device = if args.cpu {
|
||||
Device::Cpu
|
||||
} else {
|
||||
Device::new_cuda(0)?
|
||||
default_device
|
||||
};
|
||||
let default_model = "openai/whisper-tiny.en".to_string();
|
||||
let path = std::path::PathBuf::from(default_model.clone());
|
||||
|
Reference in New Issue
Block a user