mirror of
https://github.com/huggingface/candle.git
synced 2025-06-19 03:54:56 +00:00
Ok.
This commit is contained in:
@ -15,7 +15,7 @@ use anyhow::{Error as E, Result};
|
|||||||
use clap::Parser;
|
use clap::Parser;
|
||||||
|
|
||||||
use candle::{DType, Device, Tensor};
|
use candle::{DType, Device, Tensor};
|
||||||
use candle_hub::{Repo, api::Api, RepoType};
|
use candle_hub::{api::Api, Repo, RepoType};
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::sync::{Arc, Mutex};
|
use std::sync::{Arc, Mutex};
|
||||||
|
|
||||||
@ -439,7 +439,10 @@ async fn main() -> Result<()> {
|
|||||||
.to_vec();
|
.to_vec();
|
||||||
|
|
||||||
let mut filenames = vec![];
|
let mut filenames = vec![];
|
||||||
for rfilename in ["model-00001-of-00002.safetensors", "model-00002-of-00002.safetensors"]{
|
for rfilename in [
|
||||||
|
"model-00001-of-00002.safetensors",
|
||||||
|
"model-00002-of-00002.safetensors",
|
||||||
|
] {
|
||||||
let filename = api.get(&repo, rfilename).await?;
|
let filename = api.get(&repo, rfilename).await?;
|
||||||
filenames.push(filename);
|
filenames.push(filename);
|
||||||
}
|
}
|
||||||
|
@ -1,13 +1,16 @@
|
|||||||
|
use super::*;
|
||||||
|
use candle::{Device, Result, Tensor};
|
||||||
|
use half::f16;
|
||||||
use memmap2::MmapOptions;
|
use memmap2::MmapOptions;
|
||||||
use candle::{Device, Result, Shape, Tensor, WithDType};
|
use safetensors::{
|
||||||
|
tensor::{Dtype, TensorView},
|
||||||
|
SafeTensors,
|
||||||
|
};
|
||||||
use std::fs::File;
|
use std::fs::File;
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
use super::*;
|
|
||||||
use safetensors::{SafeTensors, tensor::{Dtype, TensorView}};
|
|
||||||
use half::f16;
|
|
||||||
|
|
||||||
fn convert<'a>(view: TensorView<'a>, device: &Device) -> Result<Tensor>{
|
fn convert(view: TensorView<'_>, device: &Device) -> Result<Tensor> {
|
||||||
match view.dtype(){
|
match view.dtype() {
|
||||||
Dtype::F16 => {
|
Dtype::F16 => {
|
||||||
let v = view.data();
|
let v = view.data();
|
||||||
if (v.as_ptr() as usize) % 2 == 0 {
|
if (v.as_ptr() as usize) % 2 == 0 {
|
||||||
@ -25,76 +28,82 @@ fn convert<'a>(view: TensorView<'a>, device: &Device) -> Result<Tensor>{
|
|||||||
}
|
}
|
||||||
Tensor::from_slice(&c, view.shape(), device)
|
Tensor::from_slice(&c, view.shape(), device)
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
dt => todo!("Unhandled dtype {dt:?}")
|
dt => todo!("Unhandled dtype {dt:?}"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct VarBuilder<'a>{
|
pub struct VarBuilder<'a> {
|
||||||
routing: HashMap<String, usize>,
|
routing: HashMap<String, usize>,
|
||||||
safetensors: Vec<SafeTensors<'a>>,
|
safetensors: Vec<SafeTensors<'a>>,
|
||||||
device: Device,
|
device: Device,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl<'a> VarBuilder<'a> {
|
||||||
impl<'a> VarBuilder<'a>{
|
pub fn new(safetensors: Vec<SafeTensors<'a>>, device: Device) -> Self {
|
||||||
pub fn new(safetensors: Vec<SafeTensors<'a>>, device: Device) -> Self{
|
|
||||||
let mut routing = HashMap::new();
|
let mut routing = HashMap::new();
|
||||||
for (index, sf) in safetensors.iter().enumerate(){
|
for (index, sf) in safetensors.iter().enumerate() {
|
||||||
for k in sf.names(){
|
for k in sf.names() {
|
||||||
routing.insert(k.to_string(), index);
|
routing.insert(k.to_string(), index);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Self{
|
Self {
|
||||||
safetensors,
|
safetensors,
|
||||||
device,
|
device,
|
||||||
routing
|
routing,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get(&self, tensor_name: &str) -> Result<Tensor>{
|
pub fn get(&self, tensor_name: &str) -> Result<Tensor> {
|
||||||
// Unwrap or 0 just to let the proper error flow.
|
// Unwrap or 0 just to let the proper error flow.
|
||||||
let index = self.routing.get(tensor_name).unwrap_or(&0);
|
let index = self.routing.get(tensor_name).unwrap_or(&0);
|
||||||
let view = self.safetensors[*index].tensor(tensor_name).unwrap();
|
let view = self.safetensors[*index].tensor(tensor_name).unwrap();
|
||||||
let tensor = convert(view, &self.device)?;
|
let tensor = convert(view, &self.device)?;
|
||||||
Ok(tensor)
|
Ok(tensor)
|
||||||
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Linear{
|
impl Linear {
|
||||||
fn load(prefix: &str, vb: &VarBuilder) -> Result<Self>{
|
fn load(prefix: &str, vb: &VarBuilder) -> Result<Self> {
|
||||||
let weight = vb.get(&format!("{prefix}.weight"))?;
|
let weight = vb.get(&format!("{prefix}.weight"))?;
|
||||||
Ok(Self::new(weight))
|
Ok(Self::new(weight))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn load_multi(prefixes: &[&str], vb: &VarBuilder) -> Result<Self>{
|
fn load_multi(prefixes: &[&str], vb: &VarBuilder) -> Result<Self> {
|
||||||
let weights: Vec<_> = prefixes.iter().map(|p| vb.get(&format!("{p}.weight")).unwrap()).collect();
|
let weights: Vec<_> = prefixes
|
||||||
println!("shapes {:?}", weights.iter().map(|w| w.shape()).collect::<Vec<_>>());
|
.iter()
|
||||||
|
.map(|p| vb.get(&format!("{p}.weight")).unwrap())
|
||||||
|
.collect();
|
||||||
let weight = Tensor::cat(&weights, 0)?;
|
let weight = Tensor::cat(&weights, 0)?;
|
||||||
Ok(Self::new(weight))
|
Ok(Self::new(weight))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl RmsNorm{
|
impl RmsNorm {
|
||||||
fn load(prefix: &str, vb: &VarBuilder) -> Result<Self>{
|
fn load(prefix: &str, vb: &VarBuilder) -> Result<Self> {
|
||||||
let scale = vb.get(&format!("{prefix}.weight"))?;
|
let scale = vb.get(&format!("{prefix}.weight"))?;
|
||||||
Ok(Self::new(scale))
|
Ok(Self::new(scale))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl CausalSelfAttention{
|
impl CausalSelfAttention {
|
||||||
fn load(prefix: &str, vb: &VarBuilder, cache: &Cache, config: &Config) -> Result<Self>{
|
fn load(prefix: &str, vb: &VarBuilder, cache: &Cache, config: &Config) -> Result<Self> {
|
||||||
let c_attn = Linear::load_multi(&[&format!("{prefix}.q_proj"), &format!("{prefix}.k_proj"), &format!("{prefix}.v_proj")], vb)?;
|
let c_attn = Linear::load_multi(
|
||||||
|
&[
|
||||||
|
&format!("{prefix}.q_proj"),
|
||||||
|
&format!("{prefix}.k_proj"),
|
||||||
|
&format!("{prefix}.v_proj"),
|
||||||
|
],
|
||||||
|
vb,
|
||||||
|
)?;
|
||||||
let o_proj = Linear::load(&format!("{prefix}.o_proj"), vb)?;
|
let o_proj = Linear::load(&format!("{prefix}.o_proj"), vb)?;
|
||||||
Ok(Self::new(c_attn,o_proj, config.n_head, cache))
|
Ok(Self::new(c_attn, o_proj, config.n_head, cache))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Mlp{
|
impl Mlp {
|
||||||
fn load(prefix: &str, vb: &VarBuilder, config: &Config) -> Result<Self>{
|
fn load(prefix: &str, vb: &VarBuilder) -> Result<Self> {
|
||||||
let c_fc1 = Linear::load(&format!("{prefix}.gate_proj"), vb)?;
|
let c_fc1 = Linear::load(&format!("{prefix}.gate_proj"), vb)?;
|
||||||
let c_fc2 = Linear::load(&format!("{prefix}.up_proj"), vb)?;
|
let c_fc2 = Linear::load(&format!("{prefix}.up_proj"), vb)?;
|
||||||
let c_proj = Linear::load(&format!("{prefix}.down_proj"), vb)?;
|
let c_proj = Linear::load(&format!("{prefix}.down_proj"), vb)?;
|
||||||
@ -102,27 +111,43 @@ impl Mlp{
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Block{
|
impl Block {
|
||||||
fn load(prefix: &str, vb: &VarBuilder, cache: &Cache, config: &Config) -> Result<Self>{
|
fn load(prefix: &str, vb: &VarBuilder, cache: &Cache, config: &Config) -> Result<Self> {
|
||||||
let attn = CausalSelfAttention::load(&format!("{prefix}.self_attn"), vb, cache, config)?;
|
let attn = CausalSelfAttention::load(&format!("{prefix}.self_attn"), vb, cache, config)?;
|
||||||
let mlp = Mlp::load(&format!("{prefix}.mlp"), vb, config)?;
|
let mlp = Mlp::load(&format!("{prefix}.mlp"), vb)?;
|
||||||
let input_layernorm = RmsNorm::load(&format!("{prefix}.input_layernorm"), vb)?;
|
let input_layernorm = RmsNorm::load(&format!("{prefix}.input_layernorm"), vb)?;
|
||||||
let post_attention_layernorm = RmsNorm::load(&format!("{prefix}.post_attention_layernorm"), vb)?;
|
let post_attention_layernorm =
|
||||||
Ok(Self::new(input_layernorm, attn, post_attention_layernorm, mlp))
|
RmsNorm::load(&format!("{prefix}.post_attention_layernorm"), vb)?;
|
||||||
|
Ok(Self::new(
|
||||||
|
input_layernorm,
|
||||||
|
attn,
|
||||||
|
post_attention_layernorm,
|
||||||
|
mlp,
|
||||||
|
))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Llama{
|
impl Llama {
|
||||||
pub fn load(device: &Device, filenames: &[PathBuf], cache: &Cache, config: &Config) -> Result<Self>{
|
pub fn load(
|
||||||
let handles: Vec<_> = filenames.iter().map(|f| {
|
device: &Device,
|
||||||
|
filenames: &[PathBuf],
|
||||||
|
cache: &Cache,
|
||||||
|
config: &Config,
|
||||||
|
) -> Result<Self> {
|
||||||
|
let handles: Vec<_> = filenames
|
||||||
|
.iter()
|
||||||
|
.map(|f| {
|
||||||
let file = File::open(f).unwrap();
|
let file = File::open(f).unwrap();
|
||||||
let buffer = unsafe { MmapOptions::new().map(&file).unwrap() };
|
unsafe { MmapOptions::new().map(&file).unwrap() }
|
||||||
buffer
|
})
|
||||||
}).collect();
|
.collect();
|
||||||
let tensors: Vec<_> = handles.iter().map(|h| {
|
let tensors: Vec<_> = handles
|
||||||
|
.iter()
|
||||||
|
.map(|h| {
|
||||||
let tensors = SafeTensors::deserialize(h).unwrap();
|
let tensors = SafeTensors::deserialize(h).unwrap();
|
||||||
tensors
|
tensors
|
||||||
}).collect();
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
let vb = VarBuilder::new(tensors, device.clone());
|
let vb = VarBuilder::new(tensors, device.clone());
|
||||||
|
|
||||||
@ -130,15 +155,10 @@ impl Llama{
|
|||||||
let wte = Embedding::new(embedding);
|
let wte = Embedding::new(embedding);
|
||||||
let lm_head = Linear::load("lm_head", &vb)?;
|
let lm_head = Linear::load("lm_head", &vb)?;
|
||||||
let norm = RmsNorm::load("model.norm", &vb)?;
|
let norm = RmsNorm::load("model.norm", &vb)?;
|
||||||
let blocks: Vec<_> = (0..config.n_layer).map(|i| Block::load(&format!("model.layers.{i}"), &vb, cache, config).unwrap()).collect();
|
let blocks: Vec<_> = (0..config.n_layer)
|
||||||
|
.map(|i| Block::load(&format!("model.layers.{i}"), &vb, cache, config).unwrap())
|
||||||
|
.collect();
|
||||||
|
|
||||||
Ok(Self::new(
|
Ok(Self::new(wte, blocks, norm, lm_head))
|
||||||
wte,
|
|
||||||
blocks,
|
|
||||||
norm,
|
|
||||||
lm_head
|
|
||||||
))
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -397,7 +397,6 @@ impl Api {
|
|||||||
let parallel_failures_semaphore = parallel_failures_semaphore.clone();
|
let parallel_failures_semaphore = parallel_failures_semaphore.clone();
|
||||||
let progress = progressbar.clone();
|
let progress = progressbar.clone();
|
||||||
handles.push(tokio::spawn(async move {
|
handles.push(tokio::spawn(async move {
|
||||||
println!("Start {start:?} - {stop:?}");
|
|
||||||
let mut chunk = Self::download_chunk(&client, &url, &filename, start, stop).await;
|
let mut chunk = Self::download_chunk(&client, &url, &filename, start, stop).await;
|
||||||
let mut i = 0;
|
let mut i = 0;
|
||||||
if parallel_failures > 0 {
|
if parallel_failures > 0 {
|
||||||
|
@ -53,9 +53,9 @@ impl Cache {
|
|||||||
let commit_hash = std::fs::read_to_string(commit_path).ok()?;
|
let commit_hash = std::fs::read_to_string(commit_path).ok()?;
|
||||||
let mut pointer_path = self.pointer_path(repo, &commit_hash);
|
let mut pointer_path = self.pointer_path(repo, &commit_hash);
|
||||||
pointer_path.push(filename);
|
pointer_path.push(filename);
|
||||||
if pointer_path.exists(){
|
if pointer_path.exists() {
|
||||||
Some(pointer_path)
|
Some(pointer_path)
|
||||||
}else{
|
} else {
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -150,7 +150,7 @@ impl Repo {
|
|||||||
|
|
||||||
/// The normalized folder nameof the repo within the cache directory
|
/// The normalized folder nameof the repo within the cache directory
|
||||||
pub fn folder_name(&self) -> String {
|
pub fn folder_name(&self) -> String {
|
||||||
let prefix = match self.repo_type{
|
let prefix = match self.repo_type {
|
||||||
RepoType::Model => "models",
|
RepoType::Model => "models",
|
||||||
RepoType::Dataset => "datasets",
|
RepoType::Dataset => "datasets",
|
||||||
RepoType::Space => "spaces",
|
RepoType::Space => "spaces",
|
||||||
|
Reference in New Issue
Block a user