jbilcke-hf HF Staff commited on
Commit
afaa1bd
·
1 Parent(s): 7e21e7f

adding fallback instructions

Browse files
Files changed (1) hide show
  1. requirements.txt +9 -0
requirements.txt CHANGED
@@ -20,6 +20,15 @@ torchdata==0.11.0
20
  torchao==0.12.0
21
  torchcodec=0.5.0
22
  flash-attn @ https://github.com/Dao-AILab/flash-attention/releases/download/v2.7.4.post1/flash_attn-2.7.4.post1+cu12torch2.7cxx11abiFALSE-cp310-cp310-linux_x86_64.whl
 
 
 
 
 
 
 
 
 
23
 
24
  # something broke in Transformers > 4.55.4
25
  transformers==4.55.4
 
20
  torchao==0.12.0
21
  torchcodec=0.5.0
22
  flash-attn @ https://github.com/Dao-AILab/flash-attention/releases/download/v2.7.4.post1/flash_attn-2.7.4.post1+cu12torch2.7cxx11abiFALSE-cp310-cp310-linux_x86_64.whl
23
+ #
24
+ # if that doesn't work, we can still try to revert back to this:
25
+ #
26
+ #torch==2.6.0
27
+ #torchvision==0.21.0
28
+ #torchdata==0.10.1
29
+ #torchao==0.9.0
30
+ #torchcodec==0.4.0
31
+ #flash-attn @ https://github.com/Dao-AILab/flash-attention/releases/download/v2.7.4.post1/flash_attn-2.7.4.post1+cu12torch2.6cxx11abiFALSE-cp310-cp310-linux_x86_64.whl
32
 
33
  # something broke in Transformers > 4.55.4
34
  transformers==4.55.4