Bump the crate versions to v0.2.3. (#886)

* Bump the crate version.

* Also update the python bindings.
This commit is contained in:
Laurent Mazare
2023-09-18 12:14:03 +01:00
committed by GitHub
parent 12696b7b2d
commit 7dd8e12472
16 changed files with 51 additions and 37 deletions

View File

@ -1,6 +1,6 @@
[package]
name = "candle-flash-attn"
version = "0.2.2"
version = "0.2.3"
edition = "2021"
description = "Flash attention layer for the candle ML framework."
@ -11,7 +11,7 @@ license = "MIT OR Apache-2.0"
readme = "README.md"
[dependencies]
candle = { path = "../candle-core", features = ["cuda"], version = "0.2.2", package = "candle-core" }
candle = { path = "../candle-core", features = ["cuda"], version = "0.2.3", package = "candle-core" }
half = { version = "2.3.1", features = ["num-traits"] }
[build-dependencies]
@ -21,4 +21,4 @@ rayon = "1.7.0"
[dev-dependencies]
anyhow = { version = "1", features = ["backtrace"] }
candle-nn = { path = "../candle-nn", version = "0.2.2", features = ["cuda"] }
candle-nn = { path = "../candle-nn", version = "0.2.3", features = ["cuda"] }