Removing cuda default.

Seems very important for a lot of exploring users usually on laptop
without GPUs.

Adding more README instructions in a follow up.
This commit is contained in:
Nicolas Patry 2023-07-14 16:52:15 +02:00
parent 88f666781f
commit 4ed56d7861
12 changed files with 62 additions and 14 deletions

View File

@ -2,13 +2,15 @@
members = [
"candle-core",
"candle-examples",
"candle-kernels",
"candle-hub",
"candle-nn",
"candle-pyo3",
"candle-transformers",
"candle-wasm-example",
]
exclude = [
"candle-kernels",
"candle-pyo3",
]
[profile.release-with-debug]
inherits = "release"

View File

@ -34,6 +34,6 @@ zip = { version = "0.6.6", default-features=false }
anyhow = { version = "1", features = ["backtrace"] }
[features]
default = ["cuda"]
default = []
cuda = ["dep:cudarc", "dep:candle-kernels"]
mkl = ["dep:libc", "dep:intel-mkl-src"]

View File

@ -28,6 +28,6 @@ tokenizers = { version = "0.13.3", default-features=false, features=["onig"] }
wav = "1.0.0"
[features]
default = ["cuda"]
default = []
cuda = ["candle/cuda", "candle-nn/cuda", "candle-transformers/cuda"]
mkl = ["dep:intel-mkl-src", "candle/mkl", "candle-nn/mkl", "candle-transformers/mkl"]

View File

@ -495,10 +495,19 @@ struct Args {
impl Args {
fn build_model_and_tokenizer(&self) -> Result<(BertModel, Tokenizer)> {
#[cfg(feature = "cuda")]
let default_device = Device::new_cuda(0)?;
#[cfg(not(feature = "cuda"))]
let default_device = {
println!("Running on CPU, to run on GPU, run this example with `--features cuda`");
Device::Cpu
};
let device = if self.cpu {
Device::Cpu
} else {
Device::new_cuda(0)?
default_device
};
let default_model = "sentence-transformers/all-MiniLM-L6-v2".to_string();
let default_revision = "refs/pr/21".to_string();

View File

@ -119,10 +119,19 @@ struct Args {
fn main() -> Result<()> {
let args = Args::parse();
#[cfg(feature = "cuda")]
let default_device = Device::new_cuda(0)?;
#[cfg(not(feature = "cuda"))]
let default_device = {
println!("Running on CPU, to run on GPU, run this example with `--features cuda`");
Device::Cpu
};
let device = if args.cpu {
Device::Cpu
} else {
Device::new_cuda(0)?
default_device
};
let start = std::time::Instant::now();

View File

@ -133,10 +133,20 @@ fn main() -> Result<()> {
use tokenizers::Tokenizer;
let args = Args::parse();
#[cfg(feature = "cuda")]
let default_device = Device::new_cuda(0)?;
#[cfg(not(feature = "cuda"))]
let default_device = {
println!("Running on CPU, to run on GPU, run this example with `--features cuda`");
Device::Cpu
};
let device = if args.cpu {
Device::Cpu
} else {
Device::new_cuda(0)?
default_device
};
let config = Config::config_7b();
let cache = model::Cache::new(!args.no_kv_cache, &config, &device);

View File

@ -41,10 +41,18 @@ fn main() -> Result<()> {
use tokenizers::Tokenizer;
let args = Args::parse();
#[cfg(feature = "cuda")]
let default_device = Device::new_cuda(0)?;
#[cfg(not(feature = "cuda"))]
let default_device = {
println!("Running on CPU, to run on GPU, run this example with `--features cuda`");
Device::Cpu
};
let device = if args.cpu {
Device::Cpu
} else {
Device::new_cuda(0)?
default_device
};
let mut tokenizer = Tokenizer::from_file(args.tokenizer).map_err(E::msg)?;

View File

@ -257,10 +257,20 @@ struct Args {
fn main() -> Result<()> {
let args = Args::parse();
#[cfg(feature = "cuda")]
let default_device = Device::new_cuda(0)?;
#[cfg(not(feature = "cuda"))]
let default_device = {
println!("Running on CPU, to run on GPU, run this example with `--features cuda`");
Device::Cpu
};
let device = if args.cpu {
Device::Cpu
} else {
Device::new_cuda(0)?
default_device
};
let default_model = "openai/whisper-tiny.en".to_string();
let path = std::path::PathBuf::from(default_model.clone());

View File

@ -19,6 +19,6 @@ intel-mkl-src = {version="0.8.1", optional=true, features = ["mkl-dynamic-lp64-i
anyhow = { version = "1", features = ["backtrace"] }
[features]
default = ["cuda"]
default = []
cuda = ["candle/cuda"]
mkl = ["dep:intel-mkl-src", "candle/mkl"]

View File

@ -21,5 +21,5 @@ pyo3 = { version = "0.19.0", features = ["extension-module"] }
half = { version = "2.3.1", features = ["num-traits"] }
[features]
default = ["cuda"]
default = []
cuda = ["candle/cuda"]

View File

@ -20,6 +20,6 @@ rand = "0.8.5"
wav = "1.0.0"
[features]
default = ["cuda"]
default = []
cuda = ["candle/cuda", "candle-nn/cuda"]
mkl = ["dep:intel-mkl-src", "candle/mkl", "candle-nn/mkl"]

View File

@ -14,8 +14,8 @@ readme = "README.md"
crate-type = ["cdylib"]
[dependencies]
candle = { path = "../candle-core", default-features=false }
candle-nn = { path = "../candle-nn", default-features=false }
candle = { path = "../candle-core" }
candle-nn = { path = "../candle-nn" }
wasm-bindgen = "0.2.87"
getrandom = { version = "0.2", features = ["js"] }
tokenizers = { version = "0.13.3", default-features=false, features=["unstable_wasm"] }