Update README.md
Browse files
README.md
CHANGED
@@ -7,7 +7,7 @@ language:
|
|
7 |
|------------------------------------------------|-----------------|------------------|----------------|---------------------------------------------------------------------------|
|
8 |
| [Flash-Attention 2.7.4.post1](https://huggingface.co/lym00/win_amd64_prebuilt_wheels/blob/main/flash_attn-2.7.4.post1-cp312-cp312-win_amd64.whl) | 3.12 | 2.8.0.dev | 12.8.1 | [Dao-AILab/flash-attention](https://github.com/Dao-AILab/flash-attention) |
|
9 |
| [SageAttention2.2.0](https://huggingface.co/lym00/win_amd64_prebuilt_wheels/blob/main/sageattention-2.2.0-cp312-cp312-win_amd64.whl) | 3.12 | 2.8.0.dev | 12.8.1 | [jt-zhang/SageAttention2_plus](https://huggingface.co/jt-zhang/SageAttention2_plus) |
|
10 |
-
| SageAttention3 (pending
|
11 |
| Flash-Attention_2.8.1 | 3.12 | 2.9.0.dev | 12.9.1 | [Dao-AILab/flash-attention](https://github.com/Dao-AILab/flash-attention) |
|
12 |
| xformers_0.0.31.post1 | 3.12 | 2.9.0.dev | 12.9.1 | [facebookresearch/xformers](https://github.com/facebookresearch/xformers) |
|
13 |
| INSERT | INSERT | INSERT | INSERT | INSERT |
|
|
|
7 |
|------------------------------------------------|-----------------|------------------|----------------|---------------------------------------------------------------------------|
|
8 |
| [Flash-Attention 2.7.4.post1](https://huggingface.co/lym00/win_amd64_prebuilt_wheels/blob/main/flash_attn-2.7.4.post1-cp312-cp312-win_amd64.whl) | 3.12 | 2.8.0.dev | 12.8.1 | [Dao-AILab/flash-attention](https://github.com/Dao-AILab/flash-attention) |
|
9 |
| [SageAttention2.2.0](https://huggingface.co/lym00/win_amd64_prebuilt_wheels/blob/main/sageattention-2.2.0-cp312-cp312-win_amd64.whl) | 3.12 | 2.8.0.dev | 12.8.1 | [jt-zhang/SageAttention2_plus](https://huggingface.co/jt-zhang/SageAttention2_plus) |
|
10 |
+
| SageAttention3 (pending approval) | 3.12 | 2.9.0.dev | 12.9.1 | [jt-zhang/SageAttention3](https://huggingface.co/jt-zhang/SageAttention3) |
|
11 |
| Flash-Attention_2.8.1 | 3.12 | 2.9.0.dev | 12.9.1 | [Dao-AILab/flash-attention](https://github.com/Dao-AILab/flash-attention) |
|
12 |
| xformers_0.0.31.post1 | 3.12 | 2.9.0.dev | 12.9.1 | [facebookresearch/xformers](https://github.com/facebookresearch/xformers) |
|
13 |
| INSERT | INSERT | INSERT | INSERT | INSERT |
|