mirror of
https://github.com/huggingface/candle.git
synced 2025-06-18 11:37:11 +00:00
[ONNX] Do not generate values for constants. (#1272)
* Do not generate values for constants. * Add an onnx based example using squeezenet.
This commit is contained in:
@ -16,6 +16,7 @@ candle-datasets = { path = "../candle-datasets", version = "0.3.0" }
|
|||||||
candle-nn = { path = "../candle-nn", version = "0.3.0" }
|
candle-nn = { path = "../candle-nn", version = "0.3.0" }
|
||||||
candle-transformers = { path = "../candle-transformers", version = "0.3.0" }
|
candle-transformers = { path = "../candle-transformers", version = "0.3.0" }
|
||||||
candle-flash-attn = { path = "../candle-flash-attn", version = "0.3.0", optional = true }
|
candle-flash-attn = { path = "../candle-flash-attn", version = "0.3.0", optional = true }
|
||||||
|
candle-onnx = { path = "../candle-onnx", version = "0.3.0" }
|
||||||
cudarc = { workspace = true, optional = true }
|
cudarc = { workspace = true, optional = true }
|
||||||
half = { workspace = true, optional = true }
|
half = { workspace = true, optional = true }
|
||||||
image = { workspace = true }
|
image = { workspace = true }
|
||||||
|
10
candle-examples/examples/squeezenet-onnx/README.md
Normal file
10
candle-examples/examples/squeezenet-onnx/README.md
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
## Using ONNX models in Candle
|
||||||
|
|
||||||
|
This example demonstrates how to run ONNX based models in Candle, the model
|
||||||
|
being used here is a small sequeezenet variant.
|
||||||
|
|
||||||
|
You can run the example with the following command:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
cargo run --example squeezenet-onnx --release -- --image candle-examples/examples/yolo-v8/assets/bike.jpg
|
||||||
|
```
|
57
candle-examples/examples/squeezenet-onnx/main.rs
Normal file
57
candle-examples/examples/squeezenet-onnx/main.rs
Normal file
@ -0,0 +1,57 @@
|
|||||||
|
#[cfg(feature = "mkl")]
|
||||||
|
extern crate intel_mkl_src;
|
||||||
|
|
||||||
|
#[cfg(feature = "accelerate")]
|
||||||
|
extern crate accelerate_src;
|
||||||
|
|
||||||
|
use candle::{IndexOp, D};
|
||||||
|
use clap::Parser;
|
||||||
|
|
||||||
|
#[derive(Parser)]
|
||||||
|
struct Args {
|
||||||
|
#[arg(long)]
|
||||||
|
image: String,
|
||||||
|
|
||||||
|
#[arg(long)]
|
||||||
|
model: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn main() -> anyhow::Result<()> {
|
||||||
|
let args = Args::parse();
|
||||||
|
let image = candle_examples::imagenet::load_image224(args.image)?;
|
||||||
|
|
||||||
|
println!("loaded image {image:?}");
|
||||||
|
|
||||||
|
let model = match args.model {
|
||||||
|
Some(model) => std::path::PathBuf::from(model),
|
||||||
|
None => hf_hub::api::sync::Api::new()?
|
||||||
|
.model("lmz/candle-onnx".into())
|
||||||
|
.get("squeezenet1.1-7.onnx")?,
|
||||||
|
};
|
||||||
|
|
||||||
|
let model = candle_onnx::read_file(model)?;
|
||||||
|
let graph = model.graph.as_ref().unwrap();
|
||||||
|
let mut inputs = std::collections::HashMap::new();
|
||||||
|
inputs.insert(graph.input[0].name.to_string(), image.unsqueeze(0)?);
|
||||||
|
let mut outputs = candle_onnx::simple_eval(&model, inputs)?;
|
||||||
|
let logits = outputs.remove(&graph.output[0].name).unwrap();
|
||||||
|
let prs = candle_nn::ops::softmax(&logits, D::Minus1)?
|
||||||
|
.i(0)?
|
||||||
|
.to_vec1::<f32>()?;
|
||||||
|
|
||||||
|
// Sort the predictions and take the top 5
|
||||||
|
let mut top: Vec<_> = prs.iter().enumerate().collect();
|
||||||
|
top.sort_by(|a, b| b.1.partial_cmp(a.1).unwrap());
|
||||||
|
let top = top.into_iter().take(5).collect::<Vec<_>>();
|
||||||
|
|
||||||
|
// Print the top predictions
|
||||||
|
for &(i, p) in &top {
|
||||||
|
println!(
|
||||||
|
"{:50}: {:.2}%",
|
||||||
|
candle_examples::imagenet::CLASSES[i],
|
||||||
|
p * 100.0
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
@ -35,33 +35,34 @@ pub fn main() -> Result<()> {
|
|||||||
}
|
}
|
||||||
Command::SimpleEval { file } => {
|
Command::SimpleEval { file } => {
|
||||||
let model = candle_onnx::read_file(file)?;
|
let model = candle_onnx::read_file(file)?;
|
||||||
let inputs = model
|
let graph = model.graph.as_ref().unwrap();
|
||||||
.graph
|
let constants: std::collections::HashSet<_> =
|
||||||
.as_ref()
|
graph.initializer.iter().map(|i| i.name.as_str()).collect();
|
||||||
.unwrap()
|
let mut inputs = std::collections::HashMap::new();
|
||||||
.input
|
for input in graph.input.iter() {
|
||||||
.iter()
|
use candle_onnx::onnx::tensor_proto::DataType;
|
||||||
.map(|input| {
|
if constants.contains(input.name.as_str()) {
|
||||||
use candle_onnx::onnx::tensor_proto::DataType;
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
let type_ = input.r#type.as_ref().expect("no type for input");
|
let type_ = input.r#type.as_ref().expect("no type for input");
|
||||||
let type_ = type_.value.as_ref().expect("no type.value for input");
|
let type_ = type_.value.as_ref().expect("no type.value for input");
|
||||||
let value = match type_ {
|
let value = match type_ {
|
||||||
candle_onnx::onnx::type_proto::Value::TensorType(tt) => {
|
candle_onnx::onnx::type_proto::Value::TensorType(tt) => {
|
||||||
let dt = match DataType::try_from(tt.elem_type) {
|
let dt = match DataType::try_from(tt.elem_type) {
|
||||||
Ok(dt) => match candle_onnx::dtype(dt) {
|
Ok(dt) => match candle_onnx::dtype(dt) {
|
||||||
Some(dt) => dt,
|
Some(dt) => dt,
|
||||||
None => {
|
None => {
|
||||||
anyhow::bail!(
|
anyhow::bail!(
|
||||||
"unsupported 'value' data-type {dt:?} for {}",
|
"unsupported 'value' data-type {dt:?} for {}",
|
||||||
input.name
|
input.name
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
type_ => anyhow::bail!("unsupported input type {type_:?}"),
|
type_ => anyhow::bail!("unsupported input type {type_:?}"),
|
||||||
};
|
};
|
||||||
let shape = tt.shape.as_ref().expect("no tensortype.shape for input");
|
let shape = tt.shape.as_ref().expect("no tensortype.shape for input");
|
||||||
let dims = shape
|
let dims = shape
|
||||||
.dim
|
.dim
|
||||||
.iter()
|
.iter()
|
||||||
.map(|dim| match dim.value.as_ref().expect("no dim value") {
|
.map(|dim| match dim.value.as_ref().expect("no dim value") {
|
||||||
@ -69,16 +70,16 @@ pub fn main() -> Result<()> {
|
|||||||
candle_onnx::onnx::tensor_shape_proto::dimension::Value::DimParam(_) => anyhow::bail!("DimParam is unsupported for input {}", input.name),
|
candle_onnx::onnx::tensor_shape_proto::dimension::Value::DimParam(_) => anyhow::bail!("DimParam is unsupported for input {}", input.name),
|
||||||
})
|
})
|
||||||
.collect::<Result<Vec<usize>>>()?;
|
.collect::<Result<Vec<usize>>>()?;
|
||||||
Tensor::zeros(dims, dt, &Device::Cpu)?
|
Tensor::zeros(dims, dt, &Device::Cpu)?
|
||||||
}
|
}
|
||||||
type_ => anyhow::bail!("unsupported input type {type_:?}"),
|
type_ => anyhow::bail!("unsupported input type {type_:?}"),
|
||||||
};
|
};
|
||||||
Ok::<_, anyhow::Error>((input.name.clone(), value))
|
println!("input {}: {value:?}", input.name);
|
||||||
})
|
inputs.insert(input.name.clone(), value);
|
||||||
.collect::<Result<_>>()?;
|
}
|
||||||
let outputs = candle_onnx::simple_eval(&model, inputs)?;
|
let outputs = candle_onnx::simple_eval(&model, inputs)?;
|
||||||
for (name, value) in outputs.iter() {
|
for (name, value) in outputs.iter() {
|
||||||
println!("{name}: {value:?}")
|
println!("output {name}: {value:?}")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -382,7 +382,7 @@ pub fn simple_eval(
|
|||||||
Some([p]) => *p as usize,
|
Some([p]) => *p as usize,
|
||||||
Some([p1, p2, p3, p4]) => {
|
Some([p1, p2, p3, p4]) => {
|
||||||
if p1 != p2 || p1 != p3 || p1 != p4 {
|
if p1 != p2 || p1 != p3 || p1 != p4 {
|
||||||
bail!("pads to be the same {pads:?} {}", node.name)
|
bail!("pads have to be the same {pads:?} {}", node.name)
|
||||||
}
|
}
|
||||||
*p1 as usize
|
*p1 as usize
|
||||||
}
|
}
|
||||||
@ -396,7 +396,7 @@ pub fn simple_eval(
|
|||||||
Some([p1, p2]) => {
|
Some([p1, p2]) => {
|
||||||
if p1 != p2 {
|
if p1 != p2 {
|
||||||
bail!(
|
bail!(
|
||||||
"strides to be the same on both axis {pads:?} {}",
|
"strides have to be the same on both axis {pads:?} {}",
|
||||||
node.name
|
node.name
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
@ -412,7 +412,7 @@ pub fn simple_eval(
|
|||||||
Some([p1, p2]) => {
|
Some([p1, p2]) => {
|
||||||
if p1 != p2 {
|
if p1 != p2 {
|
||||||
bail!(
|
bail!(
|
||||||
"dilations to be the same on both axis {pads:?} {}",
|
"dilations have to be the same on both axis {pads:?} {}",
|
||||||
node.name
|
node.name
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
Reference in New Issue
Block a user