Add some backprop test.

This commit is contained in:
laurent
2023-06-20 20:54:35 +01:00
parent c4c303b6f1
commit a419a9da72
2 changed files with 21 additions and 1 deletions

19
tests/grad_tests.rs Normal file
View File

@ -0,0 +1,19 @@
use anyhow::{Context, Result};
use candle::{Device, Tensor};
#[test]
fn simple_grad() -> Result<()> {
let x = Tensor::var(&[3f32, 1., 4.], Device::Cpu)?;
let five = Tensor::new(&[5f32, 5., 5.], Device::Cpu)?;
let x_times_five = x.mul(&five)?;
let x2 = x.mul(&x)?;
let y = x2.add(&x_times_five)?.add(&five)?;
let grads = y.backward()?;
let grad_x = grads.get(&x.id()).context("no grad for x")?;
assert_eq!(x.to_vec1::<f32>()?, [3., 1., 4.]);
// y = x^2 + 5.x + 5
assert_eq!(y.to_vec1::<f32>()?, [29., 11., 41.]);
// dy/dx = 2.x + 5
assert_eq!(grad_x.to_vec1::<f32>()?, [11., 7., 13.]);
Ok(())
}