From 4ed56d78610647914056974c04fb19c53b00953a Mon Sep 17 00:00:00 2001 From: Nicolas Patry Date: Fri, 14 Jul 2023 16:52:15 +0200 Subject: [PATCH] Removing cuda default. Seems very important for a lot of exploring users usually on laptop without GPUs. Adding more README instructions in a follow up. --- Cargo.toml | 6 ++++-- candle-core/Cargo.toml | 2 +- candle-examples/Cargo.toml | 2 +- candle-examples/examples/bert/main.rs | 11 ++++++++++- candle-examples/examples/falcon/main.rs | 11 ++++++++++- candle-examples/examples/llama/main.rs | 12 +++++++++++- candle-examples/examples/musicgen/main.rs | 10 +++++++++- candle-examples/examples/whisper/main.rs | 12 +++++++++++- candle-nn/Cargo.toml | 2 +- candle-pyo3/Cargo.toml | 2 +- candle-transformers/Cargo.toml | 2 +- candle-wasm-example/Cargo.toml | 4 ++-- 12 files changed, 62 insertions(+), 14 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index efbc80d8..46e9fe3c 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -2,13 +2,15 @@ members = [ "candle-core", "candle-examples", - "candle-kernels", "candle-hub", "candle-nn", - "candle-pyo3", "candle-transformers", "candle-wasm-example", ] +exclude = [ + "candle-kernels", + "candle-pyo3", +] [profile.release-with-debug] inherits = "release" diff --git a/candle-core/Cargo.toml b/candle-core/Cargo.toml index 6af01139..41ee0a43 100644 --- a/candle-core/Cargo.toml +++ b/candle-core/Cargo.toml @@ -34,6 +34,6 @@ zip = { version = "0.6.6", default-features=false } anyhow = { version = "1", features = ["backtrace"] } [features] -default = ["cuda"] +default = [] cuda = ["dep:cudarc", "dep:candle-kernels"] mkl = ["dep:libc", "dep:intel-mkl-src"] diff --git a/candle-examples/Cargo.toml b/candle-examples/Cargo.toml index 2cbef233..96674844 100644 --- a/candle-examples/Cargo.toml +++ b/candle-examples/Cargo.toml @@ -28,6 +28,6 @@ tokenizers = { version = "0.13.3", default-features=false, features=["onig"] } wav = "1.0.0" [features] -default = ["cuda"] +default = [] cuda = ["candle/cuda", "candle-nn/cuda", "candle-transformers/cuda"] mkl = ["dep:intel-mkl-src", "candle/mkl", "candle-nn/mkl", "candle-transformers/mkl"] diff --git a/candle-examples/examples/bert/main.rs b/candle-examples/examples/bert/main.rs index bf419072..aae8bc50 100644 --- a/candle-examples/examples/bert/main.rs +++ b/candle-examples/examples/bert/main.rs @@ -495,10 +495,19 @@ struct Args { impl Args { fn build_model_and_tokenizer(&self) -> Result<(BertModel, Tokenizer)> { + #[cfg(feature = "cuda")] + let default_device = Device::new_cuda(0)?; + + #[cfg(not(feature = "cuda"))] + let default_device = { + println!("Running on CPU, to run on GPU, run this example with `--features cuda`"); + Device::Cpu + }; + let device = if self.cpu { Device::Cpu } else { - Device::new_cuda(0)? + default_device }; let default_model = "sentence-transformers/all-MiniLM-L6-v2".to_string(); let default_revision = "refs/pr/21".to_string(); diff --git a/candle-examples/examples/falcon/main.rs b/candle-examples/examples/falcon/main.rs index 5cc7b065..7e20c7d2 100644 --- a/candle-examples/examples/falcon/main.rs +++ b/candle-examples/examples/falcon/main.rs @@ -119,10 +119,19 @@ struct Args { fn main() -> Result<()> { let args = Args::parse(); + + #[cfg(feature = "cuda")] + let default_device = Device::new_cuda(0)?; + + #[cfg(not(feature = "cuda"))] + let default_device = { + println!("Running on CPU, to run on GPU, run this example with `--features cuda`"); + Device::Cpu + }; let device = if args.cpu { Device::Cpu } else { - Device::new_cuda(0)? + default_device }; let start = std::time::Instant::now(); diff --git a/candle-examples/examples/llama/main.rs b/candle-examples/examples/llama/main.rs index 7ba87c70..203b4606 100644 --- a/candle-examples/examples/llama/main.rs +++ b/candle-examples/examples/llama/main.rs @@ -133,10 +133,20 @@ fn main() -> Result<()> { use tokenizers::Tokenizer; let args = Args::parse(); + + #[cfg(feature = "cuda")] + let default_device = Device::new_cuda(0)?; + + #[cfg(not(feature = "cuda"))] + let default_device = { + println!("Running on CPU, to run on GPU, run this example with `--features cuda`"); + Device::Cpu + }; + let device = if args.cpu { Device::Cpu } else { - Device::new_cuda(0)? + default_device }; let config = Config::config_7b(); let cache = model::Cache::new(!args.no_kv_cache, &config, &device); diff --git a/candle-examples/examples/musicgen/main.rs b/candle-examples/examples/musicgen/main.rs index cb94d3df..90b464c3 100644 --- a/candle-examples/examples/musicgen/main.rs +++ b/candle-examples/examples/musicgen/main.rs @@ -41,10 +41,18 @@ fn main() -> Result<()> { use tokenizers::Tokenizer; let args = Args::parse(); + #[cfg(feature = "cuda")] + let default_device = Device::new_cuda(0)?; + + #[cfg(not(feature = "cuda"))] + let default_device = { + println!("Running on CPU, to run on GPU, run this example with `--features cuda`"); + Device::Cpu + }; let device = if args.cpu { Device::Cpu } else { - Device::new_cuda(0)? + default_device }; let mut tokenizer = Tokenizer::from_file(args.tokenizer).map_err(E::msg)?; diff --git a/candle-examples/examples/whisper/main.rs b/candle-examples/examples/whisper/main.rs index d0329f4d..09ef4593 100644 --- a/candle-examples/examples/whisper/main.rs +++ b/candle-examples/examples/whisper/main.rs @@ -257,10 +257,20 @@ struct Args { fn main() -> Result<()> { let args = Args::parse(); + + #[cfg(feature = "cuda")] + let default_device = Device::new_cuda(0)?; + + #[cfg(not(feature = "cuda"))] + let default_device = { + println!("Running on CPU, to run on GPU, run this example with `--features cuda`"); + Device::Cpu + }; + let device = if args.cpu { Device::Cpu } else { - Device::new_cuda(0)? + default_device }; let default_model = "openai/whisper-tiny.en".to_string(); let path = std::path::PathBuf::from(default_model.clone()); diff --git a/candle-nn/Cargo.toml b/candle-nn/Cargo.toml index 73a4954c..c7339ce0 100644 --- a/candle-nn/Cargo.toml +++ b/candle-nn/Cargo.toml @@ -19,6 +19,6 @@ intel-mkl-src = {version="0.8.1", optional=true, features = ["mkl-dynamic-lp64-i anyhow = { version = "1", features = ["backtrace"] } [features] -default = ["cuda"] +default = [] cuda = ["candle/cuda"] mkl = ["dep:intel-mkl-src", "candle/mkl"] diff --git a/candle-pyo3/Cargo.toml b/candle-pyo3/Cargo.toml index 225fd221..d173ec07 100644 --- a/candle-pyo3/Cargo.toml +++ b/candle-pyo3/Cargo.toml @@ -21,5 +21,5 @@ pyo3 = { version = "0.19.0", features = ["extension-module"] } half = { version = "2.3.1", features = ["num-traits"] } [features] -default = ["cuda"] +default = [] cuda = ["candle/cuda"] diff --git a/candle-transformers/Cargo.toml b/candle-transformers/Cargo.toml index 048e0f6b..8f5c409a 100644 --- a/candle-transformers/Cargo.toml +++ b/candle-transformers/Cargo.toml @@ -20,6 +20,6 @@ rand = "0.8.5" wav = "1.0.0" [features] -default = ["cuda"] +default = [] cuda = ["candle/cuda", "candle-nn/cuda"] mkl = ["dep:intel-mkl-src", "candle/mkl", "candle-nn/mkl"] diff --git a/candle-wasm-example/Cargo.toml b/candle-wasm-example/Cargo.toml index 57e97584..e4a2319c 100644 --- a/candle-wasm-example/Cargo.toml +++ b/candle-wasm-example/Cargo.toml @@ -14,8 +14,8 @@ readme = "README.md" crate-type = ["cdylib"] [dependencies] -candle = { path = "../candle-core", default-features=false } -candle-nn = { path = "../candle-nn", default-features=false } +candle = { path = "../candle-core" } +candle-nn = { path = "../candle-nn" } wasm-bindgen = "0.2.87" getrandom = { version = "0.2", features = ["js"] } tokenizers = { version = "0.13.3", default-features=false, features=["unstable_wasm"] }