mirror of
https://github.com/huggingface/candle.git
synced 2025-06-19 19:58:35 +00:00
Llama more training (#297)
* Rework the var-builder to handle initializations. * Add some helper functions for layer creation. * Improve the layer initializations. * Get initialized variables. * Precompute the rot embeddings when training lamas.
This commit is contained in:
@ -15,9 +15,9 @@ pub mod vision;
|
||||
|
||||
pub use activation::Activation;
|
||||
pub use conv::{Conv1d, Conv1dConfig};
|
||||
pub use embedding::Embedding;
|
||||
pub use embedding::{embedding, Embedding};
|
||||
pub use init::Init;
|
||||
pub use layer_norm::LayerNorm;
|
||||
pub use linear::Linear;
|
||||
pub use layer_norm::{layer_norm, LayerNorm};
|
||||
pub use linear::{linear, linear_no_bias, Linear};
|
||||
pub use optim::SGD;
|
||||
pub use var_builder::VarBuilder;
|
||||
pub use var_builder::{VarBuilder, VarMap};
|
||||
|
Reference in New Issue
Block a user