--no-binary=opencc cn2an ffmpeg-python g2pk2 g2p_en jieba jieba_fast kernels ko_pron modelscope opencc peft py-cpuinfo pypinyin split-lang torchcodec transformers tensorboard ToJyutping wordsegment x_transformers onnxruntime; platform_machine == "aarch64" or platform_machine == "arm64" onnxruntime-gpu; platform_machine == "x86_64" or platform_machine == "AMD64" python_mecab_ko; sys_platform != 'win32' fastapi[standard]>=0.115.2 fast_langdetect>=0.3.1 gradio librosa==0.10.2 pydantic<=2.10.6 pyopenjtalk>=0.4.1 https://github.com/XXXXRT666/flash-attention/releases/download/v2.8.3/flash_attn-2.8.3+cu12torch2.8cxx11abiTRUE-cp310-cp310-linux_x86_64.whl