Skip to content
Snippets Groups Projects
Unverified Commit e683ecc9 authored by Ahmet Öner's avatar Ahmet Öner Committed by GitHub
Browse files

Merge pull request #286 from ahmetoner/release-1.8.1

Release 1.8.1
parents 53f60f3f dbc3dfc6
No related branches found
No related tags found
No related merge requests found
...@@ -4,9 +4,13 @@ Changelog ...@@ -4,9 +4,13 @@ Changelog
Unreleased Unreleased
---------- ----------
[1.8.1] (2025-02-18)
--------------------
### Fixed ### Fixed
- Fixed issues with Torch CUDA and cuDNN - Fixed issues with Torch CUDA and cuDNN
- Updated Torch and Torchaudio dependencies for multi-architecture support
[1.8.0] (2025-02-17) [1.8.0] (2025-02-17)
-------------------- --------------------
...@@ -270,6 +274,7 @@ Unreleased ...@@ -270,6 +274,7 @@ Unreleased
- mp3 support by using FFmpeg instead of librosa in #8 - mp3 support by using FFmpeg instead of librosa in #8
- add language detection endpoint in #9 - add language detection endpoint in #9
[1.8.1]: https://github.com/ahmetoner/whisper-asr-webservice/releases/tag/v1.8.1
[1.8.0]: https://github.com/ahmetoner/whisper-asr-webservice/releases/tag/v1.8.0 [1.8.0]: https://github.com/ahmetoner/whisper-asr-webservice/releases/tag/v1.8.0
[1.7.1]: https://github.com/ahmetoner/whisper-asr-webservice/releases/tag/v1.7.1 [1.7.1]: https://github.com/ahmetoner/whisper-asr-webservice/releases/tag/v1.7.1
[1.7.0]: https://github.com/ahmetoner/whisper-asr-webservice/releases/tag/v1.7.0 [1.7.0]: https://github.com/ahmetoner/whisper-asr-webservice/releases/tag/v1.7.0
......
...@@ -9,7 +9,7 @@ Whisper ASR Box is a general-purpose speech recognition toolkit. Whisper Models ...@@ -9,7 +9,7 @@ Whisper ASR Box is a general-purpose speech recognition toolkit. Whisper Models
## Features ## Features
Current release (v1.8.0) supports following whisper models: Current release (v1.8.1) supports following whisper models:
- [openai/whisper](https://github.com/openai/whisper)@[v20240930](https://github.com/openai/whisper/releases/tag/v20240930) - [openai/whisper](https://github.com/openai/whisper)@[v20240930](https://github.com/openai/whisper/releases/tag/v20240930)
- [SYSTRAN/faster-whisper](https://github.com/SYSTRAN/faster-whisper)@[v1.1.0](https://github.com/SYSTRAN/faster-whisper/releases/tag/v1.1.0) - [SYSTRAN/faster-whisper](https://github.com/SYSTRAN/faster-whisper)@[v1.1.0](https://github.com/SYSTRAN/faster-whisper/releases/tag/v1.1.0)
......
...@@ -2,7 +2,7 @@ Whisper is a general-purpose speech recognition model. It is trained on a large ...@@ -2,7 +2,7 @@ Whisper is a general-purpose speech recognition model. It is trained on a large
## Features ## Features
Current release (v1.8.0) supports following whisper models: Current release (v1.8.1) supports following whisper models:
- [openai/whisper](https://github.com/openai/whisper)@[v20240930](https://github.com/openai/whisper/releases/tag/v20240930) - [openai/whisper](https://github.com/openai/whisper)@[v20240930](https://github.com/openai/whisper/releases/tag/v20240930)
- [SYSTRAN/faster-whisper](https://github.com/SYSTRAN/faster-whisper)@[v1.1.0](https://github.com/SYSTRAN/faster-whisper/releases/tag/v1.1.0) - [SYSTRAN/faster-whisper](https://github.com/SYSTRAN/faster-whisper)@[v1.1.0](https://github.com/SYSTRAN/faster-whisper/releases/tag/v1.1.0)
......
...@@ -1030,7 +1030,7 @@ description = "Lightweight in-process concurrent programming" ...@@ -1030,7 +1030,7 @@ description = "Lightweight in-process concurrent programming"
optional = false optional = false
python-versions = ">=3.7" python-versions = ">=3.7"
groups = ["main"] groups = ["main"]
markers = "platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\"" markers = "(sys_platform == \"darwin\" or platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\" or platform_machine == \"x86_64\") and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")"
files = [ files = [
{file = "greenlet-3.1.1-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:0bbae94a29c9e5c7e4a2b7f0aae5c17e8e90acbfd3bf6270eeba60c39fce3563"}, {file = "greenlet-3.1.1-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:0bbae94a29c9e5c7e4a2b7f0aae5c17e8e90acbfd3bf6270eeba60c39fce3563"},
{file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fde093fb93f35ca72a556cf72c92ea3ebfda3d79fc35bb19fbe685853869a83"}, {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fde093fb93f35ca72a556cf72c92ea3ebfda3d79fc35bb19fbe685853869a83"},
...@@ -1934,6 +1934,199 @@ files = [ ...@@ -1934,6 +1934,199 @@ files = [
{file = "numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010"}, {file = "numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010"},
] ]
[[package]]
name = "nvidia-cublas-cu12"
version = "12.4.5.8"
description = "CUBLAS native runtime libraries"
optional = false
python-versions = ">=3"
groups = ["main"]
markers = "sys_platform == \"darwin\" and platform_system == \"Linux\" and platform_machine == \"x86_64\""
files = [
{file = "nvidia_cublas_cu12-12.4.5.8-py3-none-manylinux2014_aarch64.whl", hash = "sha256:0f8aa1706812e00b9f19dfe0cdb3999b092ccb8ca168c0db5b8ea712456fd9b3"},
{file = "nvidia_cublas_cu12-12.4.5.8-py3-none-manylinux2014_x86_64.whl", hash = "sha256:2fc8da60df463fdefa81e323eef2e36489e1c94335b5358bcb38360adf75ac9b"},
{file = "nvidia_cublas_cu12-12.4.5.8-py3-none-win_amd64.whl", hash = "sha256:5a796786da89203a0657eda402bcdcec6180254a8ac22d72213abc42069522dc"},
]
[[package]]
name = "nvidia-cuda-cupti-cu12"
version = "12.4.127"
description = "CUDA profiling tools runtime libs."
optional = false
python-versions = ">=3"
groups = ["main"]
markers = "sys_platform == \"darwin\" and platform_system == \"Linux\" and platform_machine == \"x86_64\""
files = [
{file = "nvidia_cuda_cupti_cu12-12.4.127-py3-none-manylinux2014_aarch64.whl", hash = "sha256:79279b35cf6f91da114182a5ce1864997fd52294a87a16179ce275773799458a"},
{file = "nvidia_cuda_cupti_cu12-12.4.127-py3-none-manylinux2014_x86_64.whl", hash = "sha256:9dec60f5ac126f7bb551c055072b69d85392b13311fcc1bcda2202d172df30fb"},
{file = "nvidia_cuda_cupti_cu12-12.4.127-py3-none-win_amd64.whl", hash = "sha256:5688d203301ab051449a2b1cb6690fbe90d2b372f411521c86018b950f3d7922"},
]
[[package]]
name = "nvidia-cuda-nvrtc-cu12"
version = "12.4.127"
description = "NVRTC native runtime libraries"
optional = false
python-versions = ">=3"
groups = ["main"]
markers = "sys_platform == \"darwin\" and platform_system == \"Linux\" and platform_machine == \"x86_64\""
files = [
{file = "nvidia_cuda_nvrtc_cu12-12.4.127-py3-none-manylinux2014_aarch64.whl", hash = "sha256:0eedf14185e04b76aa05b1fea04133e59f465b6f960c0cbf4e37c3cb6b0ea198"},
{file = "nvidia_cuda_nvrtc_cu12-12.4.127-py3-none-manylinux2014_x86_64.whl", hash = "sha256:a178759ebb095827bd30ef56598ec182b85547f1508941a3d560eb7ea1fbf338"},
{file = "nvidia_cuda_nvrtc_cu12-12.4.127-py3-none-win_amd64.whl", hash = "sha256:a961b2f1d5f17b14867c619ceb99ef6fcec12e46612711bcec78eb05068a60ec"},
]
[[package]]
name = "nvidia-cuda-runtime-cu12"
version = "12.4.127"
description = "CUDA Runtime native Libraries"
optional = false
python-versions = ">=3"
groups = ["main"]
markers = "sys_platform == \"darwin\" and platform_system == \"Linux\" and platform_machine == \"x86_64\""
files = [
{file = "nvidia_cuda_runtime_cu12-12.4.127-py3-none-manylinux2014_aarch64.whl", hash = "sha256:961fe0e2e716a2a1d967aab7caee97512f71767f852f67432d572e36cb3a11f3"},
{file = "nvidia_cuda_runtime_cu12-12.4.127-py3-none-manylinux2014_x86_64.whl", hash = "sha256:64403288fa2136ee8e467cdc9c9427e0434110899d07c779f25b5c068934faa5"},
{file = "nvidia_cuda_runtime_cu12-12.4.127-py3-none-win_amd64.whl", hash = "sha256:09c2e35f48359752dfa822c09918211844a3d93c100a715d79b59591130c5e1e"},
]
[[package]]
name = "nvidia-cudnn-cu12"
version = "9.1.0.70"
description = "cuDNN runtime libraries"
optional = false
python-versions = ">=3"
groups = ["main"]
markers = "sys_platform == \"darwin\" and platform_system == \"Linux\" and platform_machine == \"x86_64\""
files = [
{file = "nvidia_cudnn_cu12-9.1.0.70-py3-none-manylinux2014_x86_64.whl", hash = "sha256:165764f44ef8c61fcdfdfdbe769d687e06374059fbb388b6c89ecb0e28793a6f"},
{file = "nvidia_cudnn_cu12-9.1.0.70-py3-none-win_amd64.whl", hash = "sha256:6278562929433d68365a07a4a1546c237ba2849852c0d4b2262a486e805b977a"},
]
[package.dependencies]
nvidia-cublas-cu12 = "*"
[[package]]
name = "nvidia-cufft-cu12"
version = "11.2.1.3"
description = "CUFFT native runtime libraries"
optional = false
python-versions = ">=3"
groups = ["main"]
markers = "sys_platform == \"darwin\" and platform_system == \"Linux\" and platform_machine == \"x86_64\""
files = [
{file = "nvidia_cufft_cu12-11.2.1.3-py3-none-manylinux2014_aarch64.whl", hash = "sha256:5dad8008fc7f92f5ddfa2101430917ce2ffacd86824914c82e28990ad7f00399"},
{file = "nvidia_cufft_cu12-11.2.1.3-py3-none-manylinux2014_x86_64.whl", hash = "sha256:f083fc24912aa410be21fa16d157fed2055dab1cc4b6934a0e03cba69eb242b9"},
{file = "nvidia_cufft_cu12-11.2.1.3-py3-none-win_amd64.whl", hash = "sha256:d802f4954291101186078ccbe22fc285a902136f974d369540fd4a5333d1440b"},
]
[package.dependencies]
nvidia-nvjitlink-cu12 = "*"
[[package]]
name = "nvidia-curand-cu12"
version = "10.3.5.147"
description = "CURAND native runtime libraries"
optional = false
python-versions = ">=3"
groups = ["main"]
markers = "sys_platform == \"darwin\" and platform_system == \"Linux\" and platform_machine == \"x86_64\""
files = [
{file = "nvidia_curand_cu12-10.3.5.147-py3-none-manylinux2014_aarch64.whl", hash = "sha256:1f173f09e3e3c76ab084aba0de819c49e56614feae5c12f69883f4ae9bb5fad9"},
{file = "nvidia_curand_cu12-10.3.5.147-py3-none-manylinux2014_x86_64.whl", hash = "sha256:a88f583d4e0bb643c49743469964103aa59f7f708d862c3ddb0fc07f851e3b8b"},
{file = "nvidia_curand_cu12-10.3.5.147-py3-none-win_amd64.whl", hash = "sha256:f307cc191f96efe9e8f05a87096abc20d08845a841889ef78cb06924437f6771"},
]
[[package]]
name = "nvidia-cusolver-cu12"
version = "11.6.1.9"
description = "CUDA solver native runtime libraries"
optional = false
python-versions = ">=3"
groups = ["main"]
markers = "sys_platform == \"darwin\" and platform_system == \"Linux\" and platform_machine == \"x86_64\""
files = [
{file = "nvidia_cusolver_cu12-11.6.1.9-py3-none-manylinux2014_aarch64.whl", hash = "sha256:d338f155f174f90724bbde3758b7ac375a70ce8e706d70b018dd3375545fc84e"},
{file = "nvidia_cusolver_cu12-11.6.1.9-py3-none-manylinux2014_x86_64.whl", hash = "sha256:19e33fa442bcfd085b3086c4ebf7e8debc07cfe01e11513cc6d332fd918ac260"},
{file = "nvidia_cusolver_cu12-11.6.1.9-py3-none-win_amd64.whl", hash = "sha256:e77314c9d7b694fcebc84f58989f3aa4fb4cb442f12ca1a9bde50f5e8f6d1b9c"},
]
[package.dependencies]
nvidia-cublas-cu12 = "*"
nvidia-cusparse-cu12 = "*"
nvidia-nvjitlink-cu12 = "*"
[[package]]
name = "nvidia-cusparse-cu12"
version = "12.3.1.170"
description = "CUSPARSE native runtime libraries"
optional = false
python-versions = ">=3"
groups = ["main"]
markers = "sys_platform == \"darwin\" and platform_system == \"Linux\" and platform_machine == \"x86_64\""
files = [
{file = "nvidia_cusparse_cu12-12.3.1.170-py3-none-manylinux2014_aarch64.whl", hash = "sha256:9d32f62896231ebe0480efd8a7f702e143c98cfaa0e8a76df3386c1ba2b54df3"},
{file = "nvidia_cusparse_cu12-12.3.1.170-py3-none-manylinux2014_x86_64.whl", hash = "sha256:ea4f11a2904e2a8dc4b1833cc1b5181cde564edd0d5cd33e3c168eff2d1863f1"},
{file = "nvidia_cusparse_cu12-12.3.1.170-py3-none-win_amd64.whl", hash = "sha256:9bc90fb087bc7b4c15641521f31c0371e9a612fc2ba12c338d3ae032e6b6797f"},
]
[package.dependencies]
nvidia-nvjitlink-cu12 = "*"
[[package]]
name = "nvidia-cusparselt-cu12"
version = "0.6.2"
description = "NVIDIA cuSPARSELt"
optional = false
python-versions = "*"
groups = ["main"]
markers = "sys_platform == \"darwin\" and platform_system == \"Linux\" and platform_machine == \"x86_64\""
files = [
{file = "nvidia_cusparselt_cu12-0.6.2-py3-none-manylinux2014_aarch64.whl", hash = "sha256:067a7f6d03ea0d4841c85f0c6f1991c5dda98211f6302cb83a4ab234ee95bef8"},
{file = "nvidia_cusparselt_cu12-0.6.2-py3-none-manylinux2014_x86_64.whl", hash = "sha256:df2c24502fd76ebafe7457dbc4716b2fec071aabaed4fb7691a201cde03704d9"},
{file = "nvidia_cusparselt_cu12-0.6.2-py3-none-win_amd64.whl", hash = "sha256:0057c91d230703924c0422feabe4ce768841f9b4b44d28586b6f6d2eb86fbe70"},
]
[[package]]
name = "nvidia-nccl-cu12"
version = "2.21.5"
description = "NVIDIA Collective Communication Library (NCCL) Runtime"
optional = false
python-versions = ">=3"
groups = ["main"]
markers = "sys_platform == \"darwin\" and platform_system == \"Linux\" and platform_machine == \"x86_64\""
files = [
{file = "nvidia_nccl_cu12-2.21.5-py3-none-manylinux2014_x86_64.whl", hash = "sha256:8579076d30a8c24988834445f8d633c697d42397e92ffc3f63fa26766d25e0a0"},
]
[[package]]
name = "nvidia-nvjitlink-cu12"
version = "12.4.127"
description = "Nvidia JIT LTO Library"
optional = false
python-versions = ">=3"
groups = ["main"]
markers = "sys_platform == \"darwin\" and platform_system == \"Linux\" and platform_machine == \"x86_64\""
files = [
{file = "nvidia_nvjitlink_cu12-12.4.127-py3-none-manylinux2014_aarch64.whl", hash = "sha256:4abe7fef64914ccfa909bc2ba39739670ecc9e820c83ccc7a6ed414122599b83"},
{file = "nvidia_nvjitlink_cu12-12.4.127-py3-none-manylinux2014_x86_64.whl", hash = "sha256:06b3b9b25bf3f8af351d664978ca26a16d2c5127dbd53c0497e28d1fb9611d57"},
{file = "nvidia_nvjitlink_cu12-12.4.127-py3-none-win_amd64.whl", hash = "sha256:fd9020c501d27d135f983c6d3e244b197a7ccad769e34df53a42e276b0e25fa1"},
]
[[package]]
name = "nvidia-nvtx-cu12"
version = "12.4.127"
description = "NVIDIA Tools Extension"
optional = false
python-versions = ">=3"
groups = ["main"]
markers = "sys_platform == \"darwin\" and platform_system == \"Linux\" and platform_machine == \"x86_64\""
files = [
{file = "nvidia_nvtx_cu12-12.4.127-py3-none-manylinux2014_aarch64.whl", hash = "sha256:7959ad635db13edf4fc65c06a6e9f9e55fc2f92596db928d169c0bb031e88ef3"},
{file = "nvidia_nvtx_cu12-12.4.127-py3-none-manylinux2014_x86_64.whl", hash = "sha256:781e950d9b9f60d8241ccea575b32f5105a5baf4c2351cab5256a24869f12a1a"},
{file = "nvidia_nvtx_cu12-12.4.127-py3-none-win_amd64.whl", hash = "sha256:641dccaaa1139f3ffb0d3164b4b84f9d253397e38246a4f2f36728b48566d485"},
]
[[package]] [[package]]
name = "omegaconf" name = "omegaconf"
version = "2.3.0" version = "2.3.0"
...@@ -3530,6 +3723,64 @@ dev = ["black (==22.3)", "datasets", "numpy", "pytest", "requests"] ...@@ -3530,6 +3723,64 @@ dev = ["black (==22.3)", "datasets", "numpy", "pytest", "requests"]
docs = ["setuptools-rust", "sphinx", "sphinx-rtd-theme"] docs = ["setuptools-rust", "sphinx", "sphinx-rtd-theme"]
testing = ["black (==22.3)", "datasets", "numpy", "pytest", "requests"] testing = ["black (==22.3)", "datasets", "numpy", "pytest", "requests"]
[[package]]
name = "torch"
version = "2.6.0"
description = "Tensors and Dynamic neural networks in Python with strong GPU acceleration"
optional = false
python-versions = ">=3.9.0"
groups = ["main"]
markers = "sys_platform == \"darwin\" or platform_machine != \"x86_64\""
files = [
{file = "torch-2.6.0-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:6860df13d9911ac158f4c44031609700e1eba07916fff62e21e6ffa0a9e01961"},
{file = "torch-2.6.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:c4f103a49830ce4c7561ef4434cc7926e5a5fe4e5eb100c19ab36ea1e2b634ab"},
{file = "torch-2.6.0-cp310-cp310-win_amd64.whl", hash = "sha256:56eeaf2ecac90da5d9e35f7f35eb286da82673ec3c582e310a8d1631a1c02341"},
{file = "torch-2.6.0-cp310-none-macosx_11_0_arm64.whl", hash = "sha256:09e06f9949e1a0518c5b09fe95295bc9661f219d9ecb6f9893e5123e10696628"},
{file = "torch-2.6.0-cp311-cp311-manylinux1_x86_64.whl", hash = "sha256:7979834102cd5b7a43cc64e87f2f3b14bd0e1458f06e9f88ffa386d07c7446e1"},
{file = "torch-2.6.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:ccbd0320411fe1a3b3fec7b4d3185aa7d0c52adac94480ab024b5c8f74a0bf1d"},
{file = "torch-2.6.0-cp311-cp311-win_amd64.whl", hash = "sha256:46763dcb051180ce1ed23d1891d9b1598e07d051ce4c9d14307029809c4d64f7"},
{file = "torch-2.6.0-cp311-none-macosx_11_0_arm64.whl", hash = "sha256:94fc63b3b4bedd327af588696559f68c264440e2503cc9e6954019473d74ae21"},
{file = "torch-2.6.0-cp312-cp312-manylinux1_x86_64.whl", hash = "sha256:2bb8987f3bb1ef2675897034402373ddfc8f5ef0e156e2d8cfc47cacafdda4a9"},
{file = "torch-2.6.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:b789069020c5588c70d5c2158ac0aa23fd24a028f34a8b4fcb8fcb4d7efcf5fb"},
{file = "torch-2.6.0-cp312-cp312-win_amd64.whl", hash = "sha256:7e1448426d0ba3620408218b50aa6ada88aeae34f7a239ba5431f6c8774b1239"},
{file = "torch-2.6.0-cp312-none-macosx_11_0_arm64.whl", hash = "sha256:9a610afe216a85a8b9bc9f8365ed561535c93e804c2a317ef7fabcc5deda0989"},
{file = "torch-2.6.0-cp313-cp313-manylinux1_x86_64.whl", hash = "sha256:4874a73507a300a5d089ceaff616a569e7bb7c613c56f37f63ec3ffac65259cf"},
{file = "torch-2.6.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:a0d5e1b9874c1a6c25556840ab8920569a7a4137afa8a63a32cee0bc7d89bd4b"},
{file = "torch-2.6.0-cp313-cp313-win_amd64.whl", hash = "sha256:510c73251bee9ba02ae1cb6c9d4ee0907b3ce6020e62784e2d7598e0cfa4d6cc"},
{file = "torch-2.6.0-cp313-none-macosx_11_0_arm64.whl", hash = "sha256:ff96f4038f8af9f7ec4231710ed4549da1bdebad95923953a25045dcf6fd87e2"},
{file = "torch-2.6.0-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:9ea955317cfcd3852b1402b62af258ce735c2edeee42ca9419b6bc889e5ae053"},
{file = "torch-2.6.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:bb2c6c3e65049f081940f5ab15c9136c7de40d3f01192541c920a07c7c585b7e"},
{file = "torch-2.6.0-cp39-cp39-win_amd64.whl", hash = "sha256:683410f97984103148e31b38a8631acf31c3034c020c0f4d26171e7626d8317a"},
{file = "torch-2.6.0-cp39-none-macosx_11_0_arm64.whl", hash = "sha256:265f70de5fd45b864d924b64be1797f86e76c8e48a02c2a3a6fc7ec247d2226c"},
]
[package.dependencies]
filelock = "*"
fsspec = "*"
jinja2 = "*"
networkx = "*"
nvidia-cublas-cu12 = {version = "12.4.5.8", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""}
nvidia-cuda-cupti-cu12 = {version = "12.4.127", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""}
nvidia-cuda-nvrtc-cu12 = {version = "12.4.127", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""}
nvidia-cuda-runtime-cu12 = {version = "12.4.127", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""}
nvidia-cudnn-cu12 = {version = "9.1.0.70", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""}
nvidia-cufft-cu12 = {version = "11.2.1.3", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""}
nvidia-curand-cu12 = {version = "10.3.5.147", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""}
nvidia-cusolver-cu12 = {version = "11.6.1.9", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""}
nvidia-cusparse-cu12 = {version = "12.3.1.170", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""}
nvidia-cusparselt-cu12 = {version = "0.6.2", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""}
nvidia-nccl-cu12 = {version = "2.21.5", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""}
nvidia-nvjitlink-cu12 = {version = "12.4.127", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""}
nvidia-nvtx-cu12 = {version = "12.4.127", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""}
setuptools = {version = "*", markers = "python_version >= \"3.12\""}
sympy = {version = "1.13.1", markers = "python_version >= \"3.9\""}
triton = {version = "3.2.0", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""}
typing-extensions = ">=4.10.0"
[package.extras]
opt-einsum = ["opt-einsum (>=3.3)"]
optree = ["optree (>=0.13.0)"]
[[package]] [[package]]
name = "torch" name = "torch"
version = "2.6.0+cpu" version = "2.6.0+cpu"
...@@ -3537,6 +3788,7 @@ description = "Tensors and Dynamic neural networks in Python with strong GPU acc ...@@ -3537,6 +3788,7 @@ description = "Tensors and Dynamic neural networks in Python with strong GPU acc
optional = false optional = false
python-versions = ">=3.9.0" python-versions = ">=3.9.0"
groups = ["main"] groups = ["main"]
markers = "platform_machine == \"x86_64\" and sys_platform != \"darwin\""
files = [ files = [
{file = "torch-2.6.0+cpu-cp310-cp310-linux_x86_64.whl", hash = "sha256:35a9e78b7e4096968b54c1a198687b981569c50ae93e661aa430f9fd208da102"}, {file = "torch-2.6.0+cpu-cp310-cp310-linux_x86_64.whl", hash = "sha256:35a9e78b7e4096968b54c1a198687b981569c50ae93e661aa430f9fd208da102"},
{file = "torch-2.6.0+cpu-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:90832f4d118c566b8652a2196ac695fc1f14cf420db27b5a1b41c7eaaf2141e9"}, {file = "torch-2.6.0+cpu-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:90832f4d118c566b8652a2196ac695fc1f14cf420db27b5a1b41c7eaaf2141e9"},
...@@ -3614,6 +3866,40 @@ primePy = ">=1.3" ...@@ -3614,6 +3866,40 @@ primePy = ">=1.3"
torch = ">=1.7.0" torch = ">=1.7.0"
torchaudio = ">=0.7.0" torchaudio = ">=0.7.0"
[[package]]
name = "torchaudio"
version = "2.6.0"
description = "An audio package for PyTorch"
optional = false
python-versions = "*"
groups = ["main"]
markers = "sys_platform == \"darwin\" or platform_machine != \"x86_64\""
files = [
{file = "torchaudio-2.6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0eda1cd876f44fc014dc04aa680db2fa355a83df5d834398db6dd5f5cd911f4c"},
{file = "torchaudio-2.6.0-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:22798d5d8e37869bd5875d37f42270efbeb8ae94bda97fed40c1c5e0e1c62fa3"},
{file = "torchaudio-2.6.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:9d8e07789452efdb8132d62afe21f2293a72805f26c2891c6c53e4e4df38ddf6"},
{file = "torchaudio-2.6.0-cp310-cp310-win_amd64.whl", hash = "sha256:c6386bfa478afae2137715bb60f35520e3b05f5fc6d3bcc6969cf9cdfb11c09c"},
{file = "torchaudio-2.6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c12fc41241b8dfce3ccc1917f1c81a0f92f532d9917706600046f1eb21d2d765"},
{file = "torchaudio-2.6.0-cp311-cp311-manylinux1_x86_64.whl", hash = "sha256:377b177a3d683a9163e4cab5a06f0346dac9ff96fa527477338fd90fc6a2a4b6"},
{file = "torchaudio-2.6.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:0f0db5c997d031c34066d8be1c0ce7d2a1f2b6c016a92885b20b00bfeb17b753"},
{file = "torchaudio-2.6.0-cp311-cp311-win_amd64.whl", hash = "sha256:52182f6de4e7b342d139e54b703185d428de9cce3c4cf914a9b2ab2359d192a3"},
{file = "torchaudio-2.6.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7d0e4b08c42325bf4b887de9a25c44ed882997001740e1bd7d901f65581cf1ab"},
{file = "torchaudio-2.6.0-cp312-cp312-manylinux1_x86_64.whl", hash = "sha256:715aa21f6bdbd085454c313ae3a2c7cc07bf2e8cf05752f819afb5b4c57f4e6f"},
{file = "torchaudio-2.6.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:6291d9507dc1d6b4ffe8843fbfb201e6c8270dd8c42ad70bb76226c0ebdcad56"},
{file = "torchaudio-2.6.0-cp312-cp312-win_amd64.whl", hash = "sha256:86d6239792bf94741a41acd6fe3d549faaf0d50e7275d17d076a190bd007e2f9"},
{file = "torchaudio-2.6.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:66f2e0bd5ab56fd81419d2f5afb74a9a70141688594646441756c8c24f424a73"},
{file = "torchaudio-2.6.0-cp313-cp313-manylinux1_x86_64.whl", hash = "sha256:52f15185349c370fc1faa84e8b8b2782c007472db9d586a16bba314130b322f2"},
{file = "torchaudio-2.6.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:b521ea9618fb4c29a6f8071628170c222291f46a48a3bf424cfeb488f54af714"},
{file = "torchaudio-2.6.0-cp313-cp313-win_amd64.whl", hash = "sha256:393fa74ec40d167f0170728ea21c9b5e0f830648fd02df7db2bf7e62f64245ec"},
{file = "torchaudio-2.6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:04803a969710bdb77a4ddfdb85a32fa9b9e0310dc91f7eb7e54d6083dd69bfab"},
{file = "torchaudio-2.6.0-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:8c1a4d08e35a9ceaadadbff6e60bcb3442482f800369be350103dfd08b4ddf52"},
{file = "torchaudio-2.6.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:72e77055d8e742475c6dfacf59fab09b1fc94d4423e14897e188b67cad3851c6"},
{file = "torchaudio-2.6.0-cp39-cp39-win_amd64.whl", hash = "sha256:d855da878a28c2e5e6fb3d76fcddd544f4d957a320b29602cea5af2fe0ad1f3a"},
]
[package.dependencies]
torch = "2.6.0"
[[package]] [[package]]
name = "torchaudio" name = "torchaudio"
version = "2.6.0+cpu" version = "2.6.0+cpu"
...@@ -3621,6 +3907,7 @@ description = "An audio package for PyTorch" ...@@ -3621,6 +3907,7 @@ description = "An audio package for PyTorch"
optional = false optional = false
python-versions = "*" python-versions = "*"
groups = ["main"] groups = ["main"]
markers = "platform_machine == \"x86_64\" and sys_platform != \"darwin\""
files = [ files = [
{file = "torchaudio-2.6.0+cpu-cp310-cp310-linux_x86_64.whl", hash = "sha256:2de25e3df4c1bfcb06589a115b246b169d3391adde0a9d1913fcb8bd0daf95a8"}, {file = "torchaudio-2.6.0+cpu-cp310-cp310-linux_x86_64.whl", hash = "sha256:2de25e3df4c1bfcb06589a115b246b169d3391adde0a9d1913fcb8bd0daf95a8"},
{file = "torchaudio-2.6.0+cpu-cp310-cp310-win_amd64.whl", hash = "sha256:a38f6c413a83bc1089d4eecd0acd88e8190df6e0c4423ee45ba59cc0a8001324"}, {file = "torchaudio-2.6.0+cpu-cp310-cp310-win_amd64.whl", hash = "sha256:a38f6c413a83bc1089d4eecd0acd88e8190df6e0c4423ee45ba59cc0a8001324"},
...@@ -3768,7 +4055,7 @@ description = "A language and compiler for custom Deep Learning operations" ...@@ -3768,7 +4055,7 @@ description = "A language and compiler for custom Deep Learning operations"
optional = false optional = false
python-versions = "*" python-versions = "*"
groups = ["main"] groups = ["main"]
markers = "platform_machine == \"x86_64\" and sys_platform == \"linux\" or sys_platform == \"linux2\"" markers = "(platform_machine == \"x86_64\" or sys_platform == \"linux2\") and (sys_platform == \"darwin\" or sys_platform == \"linux\" or sys_platform == \"linux2\") and (platform_system == \"Linux\" or sys_platform == \"linux2\" or sys_platform == \"linux\")"
files = [ files = [
{file = "triton-3.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3e54983cd51875855da7c68ec05c05cf8bb08df361b1d5b69e05e40b0c9bd62"}, {file = "triton-3.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3e54983cd51875855da7c68ec05c05cf8bb08df361b1d5b69e05e40b0c9bd62"},
{file = "triton-3.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8009a1fb093ee8546495e96731336a33fb8856a38e45bb4ab6affd6dbc3ba220"}, {file = "triton-3.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8009a1fb093ee8546495e96731336a33fb8856a38e45bb4ab6affd6dbc3ba220"},
...@@ -4154,4 +4441,4 @@ propcache = ">=0.2.0" ...@@ -4154,4 +4441,4 @@ propcache = ">=0.2.0"
[metadata] [metadata]
lock-version = "2.1" lock-version = "2.1"
python-versions = "<3.13,>=3.10" python-versions = "<3.13,>=3.10"
content-hash = "97dfc6c23816f1e1d45f6ac3118f043dfb34856e2922bfc2a43ae66bfae00f50" content-hash = "0c0767a6fae119e6bd7a65f853052c14a66c95b2d8b1e11b72a2d8488c27652c"
...@@ -29,8 +29,16 @@ whisperx = "^3.3.1" ...@@ -29,8 +29,16 @@ whisperx = "^3.3.1"
tqdm = "^4.67.1" tqdm = "^4.67.1"
llvmlite = "^0.44.0" llvmlite = "^0.44.0"
numba = "^0.61.0" numba = "^0.61.0"
torch = { version = "2.6.0", source = "pytorch-cpu" } torch = [
torchaudio = { version = "2.6.0", source = "pytorch-cpu" } { version = "2.6.0", source = "pypi", markers = "sys_platform == 'darwin'"},
{ version = "2.6.0", source = "pypi", markers = "platform_machine == 'aarch64' and sys_platform != 'darwin'"},
{ version = "2.6.0", source = "pytorch-cpu", markers = "platform_machine == 'x86_64' and sys_platform != 'darwin'" },
]
torchaudio = [
{ version = "2.6.0", source = "pypi", markers = "sys_platform == 'darwin'"},
{ version = "2.6.0", source = "pypi", markers = "platform_machine == 'aarch64' and sys_platform != 'darwin'"},
{ version = "2.6.0", source = "pytorch-cpu", markers = "platform_machine == 'x86_64' and sys_platform != 'darwin'" },
]
[poetry.group.dev.dependencies] [poetry.group.dev.dependencies]
pytest = "^8.3.4" pytest = "^8.3.4"
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please to comment