From c6019e9635526e2e4feb565b37d3ce72aa4e81ae Mon Sep 17 00:00:00 2001 From: laurent Date: Wed, 25 Sep 2024 12:08:20 +0200 Subject: [PATCH] Use the newly minted gguf file. --- candle-examples/examples/flux/main.rs | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/candle-examples/examples/flux/main.rs b/candle-examples/examples/flux/main.rs index 641b72f5..e8609a8f 100644 --- a/candle-examples/examples/flux/main.rs +++ b/candle-examples/examples/flux/main.rs @@ -167,8 +167,10 @@ fn run(args: Args) -> Result<()> { println!("{timesteps:?}"); if quantized { let model_file = match model { - Model::Schnell => bf_repo.get("flux1-schnell.safetensors")?, - Model::Dev => bf_repo.get("flux1-dev.safetensors")?, + Model::Schnell => api + .repo(hf_hub::Repo::model("lmz/candle-flux".to_string())) + .get("flux1-schnell.gguf")?, + Model::Dev => todo!(), }; let vb = candle_transformers::quantized_var_builder::VarBuilder::from_gguf( model_file, &device,