Removing cuda default.

Seems very important for a lot of exploring users usually on laptop
without GPUs.

Adding more README instructions in a follow up.
This commit is contained in:
Nicolas Patry
2023-07-14 16:52:15 +02:00
parent 88f666781f
commit 4ed56d7861
12 changed files with 62 additions and 14 deletions

View File

@ -495,10 +495,19 @@ struct Args {
impl Args {
fn build_model_and_tokenizer(&self) -> Result<(BertModel, Tokenizer)> {
#[cfg(feature = "cuda")]
let default_device = Device::new_cuda(0)?;
#[cfg(not(feature = "cuda"))]
let default_device = {
println!("Running on CPU, to run on GPU, run this example with `--features cuda`");
Device::Cpu
};
let device = if self.cpu {
Device::Cpu
} else {
Device::new_cuda(0)?
default_device
};
let default_model = "sentence-transformers/all-MiniLM-L6-v2".to_string();
let default_revision = "refs/pr/21".to_string();

View File

@ -119,10 +119,19 @@ struct Args {
fn main() -> Result<()> {
let args = Args::parse();
#[cfg(feature = "cuda")]
let default_device = Device::new_cuda(0)?;
#[cfg(not(feature = "cuda"))]
let default_device = {
println!("Running on CPU, to run on GPU, run this example with `--features cuda`");
Device::Cpu
};
let device = if args.cpu {
Device::Cpu
} else {
Device::new_cuda(0)?
default_device
};
let start = std::time::Instant::now();

View File

@ -133,10 +133,20 @@ fn main() -> Result<()> {
use tokenizers::Tokenizer;
let args = Args::parse();
#[cfg(feature = "cuda")]
let default_device = Device::new_cuda(0)?;
#[cfg(not(feature = "cuda"))]
let default_device = {
println!("Running on CPU, to run on GPU, run this example with `--features cuda`");
Device::Cpu
};
let device = if args.cpu {
Device::Cpu
} else {
Device::new_cuda(0)?
default_device
};
let config = Config::config_7b();
let cache = model::Cache::new(!args.no_kv_cache, &config, &device);

View File

@ -41,10 +41,18 @@ fn main() -> Result<()> {
use tokenizers::Tokenizer;
let args = Args::parse();
#[cfg(feature = "cuda")]
let default_device = Device::new_cuda(0)?;
#[cfg(not(feature = "cuda"))]
let default_device = {
println!("Running on CPU, to run on GPU, run this example with `--features cuda`");
Device::Cpu
};
let device = if args.cpu {
Device::Cpu
} else {
Device::new_cuda(0)?
default_device
};
let mut tokenizer = Tokenizer::from_file(args.tokenizer).map_err(E::msg)?;

View File

@ -257,10 +257,20 @@ struct Args {
fn main() -> Result<()> {
let args = Args::parse();
#[cfg(feature = "cuda")]
let default_device = Device::new_cuda(0)?;
#[cfg(not(feature = "cuda"))]
let default_device = {
println!("Running on CPU, to run on GPU, run this example with `--features cuda`");
Device::Cpu
};
let device = if args.cpu {
Device::Cpu
} else {
Device::new_cuda(0)?
default_device
};
let default_model = "openai/whisper-tiny.en".to_string();
let path = std::path::PathBuf::from(default_model.clone());