mirror of
https://github.com/huggingface/candle.git
synced 2025-06-16 10:38:54 +00:00
Add some backprop test.
This commit is contained in:
@ -169,7 +169,7 @@ impl Tensor {
|
||||
}
|
||||
|
||||
pub fn var<A: crate::device::NdArray>(array: A, device: Device) -> Result<Self> {
|
||||
Self::new_impl(array, device, false)
|
||||
Self::new_impl(array, device, true)
|
||||
}
|
||||
|
||||
pub(crate) fn same_shape_binary_op(&self, rhs: &Self, op: &'static str) -> Result<&Shape> {
|
||||
@ -329,6 +329,7 @@ impl Tensor {
|
||||
|
||||
pub fn backward(&self) -> Result<HashMap<TensorId, Tensor>> {
|
||||
let sorted_nodes = self.sorted_nodes();
|
||||
println!("{}", sorted_nodes.len());
|
||||
let mut grads = HashMap::new();
|
||||
grads.insert(self.id, self.ones_like());
|
||||
for node in sorted_nodes.iter() {
|
||||
|
19
tests/grad_tests.rs
Normal file
19
tests/grad_tests.rs
Normal file
@ -0,0 +1,19 @@
|
||||
use anyhow::{Context, Result};
|
||||
use candle::{Device, Tensor};
|
||||
|
||||
#[test]
|
||||
fn simple_grad() -> Result<()> {
|
||||
let x = Tensor::var(&[3f32, 1., 4.], Device::Cpu)?;
|
||||
let five = Tensor::new(&[5f32, 5., 5.], Device::Cpu)?;
|
||||
let x_times_five = x.mul(&five)?;
|
||||
let x2 = x.mul(&x)?;
|
||||
let y = x2.add(&x_times_five)?.add(&five)?;
|
||||
let grads = y.backward()?;
|
||||
let grad_x = grads.get(&x.id()).context("no grad for x")?;
|
||||
assert_eq!(x.to_vec1::<f32>()?, [3., 1., 4.]);
|
||||
// y = x^2 + 5.x + 5
|
||||
assert_eq!(y.to_vec1::<f32>()?, [29., 11., 41.]);
|
||||
// dy/dx = 2.x + 5
|
||||
assert_eq!(grad_x.to_vec1::<f32>()?, [11., 7., 13.]);
|
||||
Ok(())
|
||||
}
|
Reference in New Issue
Block a user