mirror of
https://github.com/huggingface/candle.git
synced 2025-06-18 03:28:50 +00:00
Llama more training (#297)
* Rework the var-builder to handle initializations. * Add some helper functions for layer creation. * Improve the layer initializations. * Get initialized variables. * Precompute the rot embeddings when training lamas.
This commit is contained in:
@ -28,3 +28,15 @@ impl Embedding {
|
||||
Ok(values)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn embedding(in_size: usize, out_size: usize, vb: crate::VarBuilder) -> Result<Embedding> {
|
||||
let embeddings = vb.get_or_init(
|
||||
(in_size, out_size),
|
||||
"weight",
|
||||
crate::Init::Randn {
|
||||
mean: 0.,
|
||||
stdev: 1.,
|
||||
},
|
||||
)?;
|
||||
Ok(Embedding::new(embeddings, out_size))
|
||||
}
|
||||
|
Reference in New Issue
Block a user