Also fix the aspect ratio in the wasm example. (#556)

* Also fix the aspect ratio in the wasm example.

* Add the yolo lib.

* Update the build script.
This commit is contained in:
Laurent Mazare
2023-08-22 22:20:08 +01:00
committed by GitHub
parent f9ecc84477
commit 7687a0f453
6 changed files with 68 additions and 26 deletions

1
.gitignore vendored
View File

@ -25,6 +25,7 @@ flamegraph.svg
*.swp *.swp
trace-*.json trace-*.json
candle-wasm-examples/*/build
candle-wasm-examples/*/*.bin candle-wasm-examples/*/*.bin
candle-wasm-examples/*/*.jpeg candle-wasm-examples/*/*.jpeg
candle-wasm-examples/*/*.wav candle-wasm-examples/*/*.wav

View File

@ -0,0 +1,2 @@
cargo build --target wasm32-unknown-unknown --release
wasm-bindgen ../../target/wasm32-unknown-unknown/release/m.wasm --out-dir build --target web

View File

@ -0,0 +1,25 @@
use candle_wasm_example_yolo::worker::Model as M;
use wasm_bindgen::prelude::*;
#[wasm_bindgen]
pub struct Model {
inner: M,
}
#[wasm_bindgen]
impl Model {
#[wasm_bindgen(constructor)]
pub fn new(data: Vec<u8>) -> Result<Model, JsError> {
let inner = M::load_(&data)?;
Ok(Self { inner })
}
#[wasm_bindgen]
pub fn run(&self, image: Vec<u8>) -> Result<String, JsError> {
let boxes = self.inner.run(image)?;
let json = serde_json::to_string(&boxes)?;
Ok(json)
}
}
fn main() {}

View File

@ -1,6 +1,6 @@
mod app; mod app;
mod coco_classes; mod coco_classes;
mod model; mod model;
mod worker; pub mod worker;
pub use app::App; pub use app::App;
pub use worker::Worker; pub use worker::Worker;

View File

@ -5,8 +5,8 @@ use candle_nn::{
}; };
use image::DynamicImage; use image::DynamicImage;
const CONFIDENCE_THRESHOLD: f32 = 0.5; const CONFIDENCE_THRESHOLD: f32 = 0.25;
const NMS_THRESHOLD: f32 = 0.4; const NMS_THRESHOLD: f32 = 0.45;
// Model architecture from https://github.com/ultralytics/ultralytics/issues/189 // Model architecture from https://github.com/ultralytics/ultralytics/issues/189
// https://github.com/tinygrad/tinygrad/blob/master/examples/yolov8.py // https://github.com/tinygrad/tinygrad/blob/master/examples/yolov8.py

View File

@ -27,46 +27,62 @@ pub struct ModelData {
pub weights: Vec<u8>, pub weights: Vec<u8>,
} }
struct Model { pub struct Model {
model: YoloV8, model: YoloV8,
} }
impl Model { impl Model {
fn run( pub fn run(&self, image_data: Vec<u8>) -> Result<Vec<Vec<Bbox>>> {
&self,
_link: &WorkerLink<Worker>,
_id: HandlerId,
image_data: Vec<u8>,
) -> Result<Vec<Vec<Bbox>>> {
console_log!("image data: {}", image_data.len()); console_log!("image data: {}", image_data.len());
let image_data = std::io::Cursor::new(image_data); let image_data = std::io::Cursor::new(image_data);
let original_image = image::io::Reader::new(image_data) let original_image = image::io::Reader::new(image_data)
.with_guessed_format()? .with_guessed_format()?
.decode() .decode()
.map_err(candle::Error::wrap)?; .map_err(candle::Error::wrap)?;
let image = { let (width, height) = {
let data = original_image let w = original_image.width() as usize;
.resize_exact(640, 640, image::imageops::FilterType::Triangle) let h = original_image.height() as usize;
.to_rgb8() if w < h {
.into_raw(); let w = w * 640 / h;
Tensor::from_vec(data, (640, 640, 3), &Device::Cpu)?.permute((2, 0, 1))? // Sizes have to be divisible by 32.
(w / 32 * 32, 640)
} else {
let h = h * 640 / w;
(640, h / 32 * 32)
}
}; };
let image = (image.unsqueeze(0)?.to_dtype(DType::F32)? * (1. / 255.))?; let image_t = {
let predictions = self.model.forward(&image)?.squeeze(0)?; let img = original_image.resize_exact(
width as u32,
height as u32,
image::imageops::FilterType::CatmullRom,
);
let data = img.to_rgb8().into_raw();
Tensor::from_vec(
data,
(img.height() as usize, img.width() as usize, 3),
&Device::Cpu,
)?
.permute((2, 0, 1))?
};
let image_t = (image_t.unsqueeze(0)?.to_dtype(DType::F32)? * (1. / 255.))?;
let predictions = self.model.forward(&image_t)?.squeeze(0)?;
console_log!("generated predictions {predictions:?}"); console_log!("generated predictions {predictions:?}");
let bboxes = report(&predictions, original_image, 640, 640)?; let bboxes = report(&predictions, original_image, width, height)?;
Ok(bboxes) Ok(bboxes)
} }
}
impl Model { pub fn load_(weights: &[u8]) -> Result<Self> {
fn load(md: ModelData) -> Result<Self> {
let dev = &Device::Cpu; let dev = &Device::Cpu;
let weights = safetensors::tensor::SafeTensors::deserialize(&md.weights)?; let weights = safetensors::tensor::SafeTensors::deserialize(weights)?;
let vb = VarBuilder::from_safetensors(vec![weights], DType::F32, dev); let vb = VarBuilder::from_safetensors(vec![weights], DType::F32, dev);
let model = YoloV8::load(vb, Multiples::s(), 80)?; let model = YoloV8::load(vb, Multiples::s(), 80)?;
Ok(Self { model }) Ok(Self { model })
} }
pub fn load(md: ModelData) -> Result<Self> {
Self::load_(&md.weights)
}
} }
pub struct Worker { pub struct Worker {
@ -112,9 +128,7 @@ impl yew_agent::Worker for Worker {
WorkerInput::Run(image_data) => match &mut self.model { WorkerInput::Run(image_data) => match &mut self.model {
None => Err("model has not been set yet".to_string()), None => Err("model has not been set yet".to_string()),
Some(model) => { Some(model) => {
let result = model let result = model.run(image_data).map_err(|e| e.to_string());
.run(&self.link, id, image_data)
.map_err(|e| e.to_string());
Ok(WorkerOutput::ProcessingDone(result)) Ok(WorkerOutput::ProcessingDone(result))
} }
}, },