From 7ba8a34821e5603ddb383fcd2e7c2607617a2027 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 18 Jul 2024 16:50:38 +0000 Subject: [PATCH] build(deps): bump flash-attn from 2.5.9.post1 to 2.6.1 Bumps [flash-attn](https://github.com/Dao-AILab/flash-attention) from 2.5.9.post1 to 2.6.1. - [Release notes](https://github.com/Dao-AILab/flash-attention/releases) - [Commits](https://github.com/Dao-AILab/flash-attention/compare/v2.5.9.post1...v2.6.1) --- updated-dependencies: - dependency-name: flash-attn dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 4e3321c..11f416a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -65,7 +65,7 @@ dev = [ flash_attn = [ # it's easier to install flash-attn from wheel rather than like this as extra # "https://github.com/Dao-AILab/flash-attention/releases/download/v2.5.6/flash_attn-2.5.6+cu118torch2.0cxx11abiFALSE-cp311-cp311-linux_x86_64.whl", - "flash-attn==2.5.9.post1", + "flash-attn==2.6.1", "packaging", # FIXME: temporary, until https://github.com/Dao-AILab/flash-attention/pull/937 is released "ninja" ]