mirror of
https://github.com/huggingface/candle.git
synced 2025-06-16 18:48:51 +00:00
Lazy detach. (#1242)
This commit is contained in:
@ -920,6 +920,10 @@ impl BackpropOp {
|
|||||||
};
|
};
|
||||||
Self(op)
|
Self(op)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub(crate) fn is_none(&self) -> bool {
|
||||||
|
self.0.is_none()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl std::ops::Deref for BackpropOp {
|
impl std::ops::Deref for BackpropOp {
|
||||||
|
@ -1807,7 +1807,12 @@ impl Tensor {
|
|||||||
|
|
||||||
/// Returns a new tensor detached from the current graph, gradient are not propagated through
|
/// Returns a new tensor detached from the current graph, gradient are not propagated through
|
||||||
/// this new node. The storage of this tensor is shared with the initial tensor.
|
/// this new node. The storage of this tensor is shared with the initial tensor.
|
||||||
|
///
|
||||||
|
/// If the tensor is already detached from the computation graph, the same tensor is returned.
|
||||||
pub fn detach(&self) -> Result<Tensor> {
|
pub fn detach(&self) -> Result<Tensor> {
|
||||||
|
if self.op.is_none() && !self.is_variable {
|
||||||
|
Ok(self.clone())
|
||||||
|
} else {
|
||||||
let tensor_ = Tensor_ {
|
let tensor_ = Tensor_ {
|
||||||
id: TensorId::new(),
|
id: TensorId::new(),
|
||||||
storage: self.storage.clone(),
|
storage: self.storage.clone(),
|
||||||
@ -1819,6 +1824,7 @@ impl Tensor {
|
|||||||
};
|
};
|
||||||
Ok(Tensor(Arc::new(tensor_)))
|
Ok(Tensor(Arc::new(tensor_)))
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// If the target device is the same as the tensor device, only a shallow copy is performed.
|
/// If the target device is the same as the tensor device, only a shallow copy is performed.
|
||||||
pub fn to_device(&self, device: &Device) -> Result<Tensor> {
|
pub fn to_device(&self, device: &Device) -> Result<Tensor> {
|
||||||
|
Reference in New Issue
Block a user