mirror of
https://github.com/huggingface/candle.git
synced 2025-06-20 04:00:28 +00:00
ONNX: add ArgMin, ArgMax and LeakyRelu (#2246)
* Add basic RandomUniform implementation * Use is_some to check if seed is present * Added Exp operator implementation * Added ArgMin operator implementation * Added tests for ArgMin * ArgMin now returns a tensor with i64 * Added tests from pytorch examples * Added ArgMax operator implementation * Added tests for ArgMax * Added LeakyRelu implementation * Added a test for LeakyRelu * Typo fix * Fix a weird automatic RustRover change --------- Co-authored-by: Mateusz Okulus <mmokulus@gmail.com>
This commit is contained in:
@ -1027,6 +1027,59 @@ pub fn simple_eval(
|
||||
};
|
||||
values.insert(node.output[0].clone(), output);
|
||||
}
|
||||
"ArgMin" => {
|
||||
let input = get(&node.input[0])?;
|
||||
let axis_i64: i64 = get_attr_opt(node, "axis")?.copied().unwrap_or(0);
|
||||
let rank_i64: i64 = input.rank().try_into().unwrap();
|
||||
if axis_i64 < -rank_i64 || axis_i64 >= rank_i64 {
|
||||
bail!("axis ({}) out of accepted range [-rank, rank-1] which was [{}, {}]", axis_i64, -rank_i64, rank_i64-1)
|
||||
}
|
||||
let axis = input.normalize_axis(axis_i64)?;
|
||||
let keepdims: i64 = get_attr_opt(node, "keepdims")?.copied().unwrap_or(1);
|
||||
let select_last_index: i64 = get_attr_opt(node, "select_last_index")?.copied().unwrap_or(0);
|
||||
if select_last_index == 1 {
|
||||
bail!("select_last_index for ArgMin is currently not supported")
|
||||
}
|
||||
let output = if keepdims == 1 {
|
||||
input.argmin_keepdim(axis)?
|
||||
} else {
|
||||
input.argmin(axis)?
|
||||
}.to_dtype(DType::I64)?;
|
||||
values.insert(node.output[0].clone(), output);
|
||||
}
|
||||
"ArgMax" => {
|
||||
let input = get(&node.input[0])?;
|
||||
let axis_i64: i64 = get_attr_opt(node, "axis")?.copied().unwrap_or(0);
|
||||
let rank_i64: i64 = input.rank().try_into().unwrap();
|
||||
if axis_i64 < -rank_i64 || axis_i64 >= rank_i64 {
|
||||
bail!("axis ({}) out of accepted range [-rank, rank-1] which was [{}, {}]", axis_i64, -rank_i64, rank_i64-1)
|
||||
}
|
||||
let axis = input.normalize_axis(axis_i64)?;
|
||||
let keepdims: i64 = get_attr_opt(node, "keepdims")?.copied().unwrap_or(1);
|
||||
let select_last_index: i64 = get_attr_opt(node, "select_last_index")?.copied().unwrap_or(0);
|
||||
if select_last_index == 1 {
|
||||
bail!("select_last_index for ArgMin is currently not supported")
|
||||
}
|
||||
let output = if keepdims == 1 {
|
||||
input.argmax_keepdim(axis)?
|
||||
} else {
|
||||
input.argmax(axis)?
|
||||
}.to_dtype(DType::I64)?;
|
||||
values.insert(node.output[0].clone(), output);
|
||||
}
|
||||
"LeakyRelu" => {
|
||||
let input = get(&node.input[0])?;
|
||||
let dt = input.dtype();
|
||||
match dt {
|
||||
DType::U8 | DType::U32 | DType::I64 => {
|
||||
bail!("unsupported dtype {}, only float types are allowed for LeakyRelu", dt.as_str())
|
||||
}
|
||||
DType::BF16 | DType::F16 | DType::F32 | DType::F64 => {}
|
||||
}
|
||||
let alpha = get_attr_opt::<f32>(node, "alpha")?.copied().unwrap_or(0.01);
|
||||
let output = candle_nn::ops::leaky_relu(input, alpha.into())?;
|
||||
values.insert(node.output[0].clone(), output);
|
||||
}
|
||||
op_type => bail!("unsupported op_type {op_type} for op {node:?}"),
|
||||
}
|
||||
}
|
||||
|
Reference in New Issue
Block a user