torchlibrosa==0.1.0 pytorch-lightning==2.3.0 jupyter==1.0.0 torchmetrics==0.11.4 huggingface_hub==0.20.2 calmsize librosa==0.10.1 einops==0.6.0 pytorch-memlab==0.3.0 transformers==4.18.0 omegaconf==2.3.0 sentencepiece==0.2.0 taming-transformers-rom1504 soundfile gradio torchdyn ninja protobuf torchaudio openai flash-attn @ https://github.com/Dao-AILab/flash-attention/releases/download/v2.5.9.post1/flash_attn-2.5.9.post1+cu118torch2.1cxx11abiFALSE-cp39-cp39-linux_x86_64.whl