Abstract the gradient storage.

This commit is contained in:
laurent
2023-06-21 14:29:48 +01:00
parent 68f525f321
commit 7adffafeda
5 changed files with 87 additions and 37 deletions

View File

@ -6,7 +6,7 @@ fn simple_grad() -> Result<()> {
let x = Tensor::var(&[3f32, 1., 4.], Device::Cpu)?;
let y = (((&x * &x)? + &x * 5f64)? + 4f64)?;
let grads = y.backward()?;
let grad_x = grads.get(&x.id()).context("no grad for x")?;
let grad_x = grads.get(&x).context("no grad for x")?;
assert_eq!(x.to_vec1::<f32>()?, [3., 1., 4.]);
// y = x^2 + 5.x + 4
assert_eq!(y.to_vec1::<f32>()?, [28., 10., 40.]);

View File

@ -2,7 +2,7 @@ use candle::{DType, Device, Result, Tensor};
#[test]
fn zeros() -> Result<()> {
let tensor = Tensor::zeros((5, 2), DType::F32, Device::Cpu);
let tensor = Tensor::zeros((5, 2), DType::F32, Device::Cpu)?;
let (dim1, dim2) = tensor.shape().r2()?;
assert_eq!(dim1, 5);
assert_eq!(dim2, 2);