Skip to content

Commit

Permalink
Remove flash-attn from vllm extra (#542)
Browse files Browse the repository at this point in the history
  • Loading branch information
alvarobartt committed Apr 16, 2024
1 parent d5d7d3e commit d9e0aa7
Showing 1 changed file with 1 addition and 1 deletion.
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ mistralai = ["mistralai >= 0.1.0"]
ollama = ["ollama >= 0.1.7"]
openai = ["openai >= 1.0.0"]
vertexai = ["google-cloud-aiplatform >= 1.38.0"]
vllm = ["vllm >= 0.2.1", "filelock >= 3.13.4", "flash-attn >= 2.5.7"]
vllm = ["vllm >= 0.2.1", "filelock >= 3.13.4"]

[project.urls]
Documentation = "https://distilabel.argilla.io/"
Expand Down

0 comments on commit d9e0aa7

Please sign in to comment.