Update for 0.3.1. (#1324)

This commit is contained in:
Laurent Mazare
2023-11-11 18:48:52 +00:00
committed by GitHub
parent f1e678b39c
commit a209ce8ceb
20 changed files with 54 additions and 54 deletions

View File

@ -1,6 +1,6 @@
[package]
name = "candle-flash-attn"
version = "0.3.0"
version = "0.3.1"
edition = "2021"
description = "Flash attention layer for the candle ML framework."
@ -11,7 +11,7 @@ license = "MIT OR Apache-2.0"
readme = "README.md"
[dependencies]
candle = { path = "../candle-core", features = ["cuda"], version = "0.3.0", package = "candle-core" }
candle = { path = "../candle-core", features = ["cuda"], version = "0.3.1", package = "candle-core" }
half = { version = "2.3.1", features = ["num-traits"] }
[build-dependencies]
@ -21,4 +21,4 @@ rayon = "1.7.0"
[dev-dependencies]
anyhow = { version = "1", features = ["backtrace"] }
candle-nn = { path = "../candle-nn", version = "0.3.0", features = ["cuda"] }
candle-nn = { path = "../candle-nn", version = "0.3.1", features = ["cuda"] }