Skip to content
Snippets Groups Projects
Commit 54afe4a6 authored by Ahmet Öner's avatar Ahmet Öner
Browse files

Upgrade faster-whisper to v1.1.0

parent f6507969
No related branches found
No related tags found
No related merge requests found
...@@ -14,6 +14,11 @@ Unreleased ...@@ -14,6 +14,11 @@ Unreleased
- Refactor classes, Add comments, implement abstract methods, and add factory method for engine selection - Refactor classes, Add comments, implement abstract methods, and add factory method for engine selection
### Changed
- Upgraded
- [SYSTRAN/faster-whisper](https://github.com/SYSTRAN/faster-whisper) to [v1.1.0](https://github.com/SYSTRAN/faster-whisper/releases/tag/v1.1.0)
[1.6.0] (2024-10-06) [1.6.0] (2024-10-06)
-------------------- --------------------
......
...@@ -15,8 +15,7 @@ export ASR_ENGINE=faster_whisper ...@@ -15,8 +15,7 @@ export ASR_ENGINE=faster_whisper
export ASR_MODEL=base export ASR_MODEL=base
``` ```
Available ASR_MODELs are `tiny`, `base`, `small`, `medium`, `large`, `large-v1`, `large-v2`, `large-v3`, `turbo`(only Available ASR_MODELs are `tiny`, `base`, `small`, `medium`, `large`, `large-v1`, `large-v2`, `large-v3`, `turbo` and `large-v3-turbo`.
OpenAI Whisper) and `large-v3-turbo`(only OpenAI Whisper).
For English-only applications, the `.en` models tend to perform better, especially for the `tiny.en` and `base.en` For English-only applications, the `.en` models tend to perform better, especially for the `tiny.en` and `base.en`
models. We observed that the difference becomes less significant for the `small.en` and `medium.en` models. models. We observed that the difference becomes less significant for the `small.en` and `medium.en` models.
......
...@@ -375,21 +375,22 @@ standard = ["email-validator (>=2.0.0)", "fastapi-cli[standard] (>=0.0.5)", "htt ...@@ -375,21 +375,22 @@ standard = ["email-validator (>=2.0.0)", "fastapi-cli[standard] (>=0.0.5)", "htt
[[package]] [[package]]
name = "faster-whisper" name = "faster-whisper"
version = "1.0.3" version = "1.1.0"
description = "Faster Whisper transcription with CTranslate2" description = "Faster Whisper transcription with CTranslate2"
optional = false optional = false
python-versions = ">=3.8" python-versions = ">=3.8"
files = [ files = [
{file = "faster-whisper-1.0.3.tar.gz", hash = "sha256:1a145db86450b56aaa623c8df7d4ef86e8a1159900f60533e2890e98e8453a17"}, {file = "faster-whisper-1.1.0.tar.gz", hash = "sha256:cea4bba5d4527173fdbacafa56f2ffb17dd322688f6c3fdf5fd7b6b6c193ce17"},
{file = "faster_whisper-1.0.3-py3-none-any.whl", hash = "sha256:364d0e378ab232ed26f39656e5c98548b38045224e206b20f7d8c90e2745b9d3"}, {file = "faster_whisper-1.1.0-py3-none-any.whl", hash = "sha256:0f2d025676bbff1e46c4108b6f9a82578d6e33826c174af2990e45b33fab6182"},
] ]
[package.dependencies] [package.dependencies]
av = ">=11.0,<13" av = ">=11"
ctranslate2 = ">=4.0,<5" ctranslate2 = ">=4.0,<5"
huggingface-hub = ">=0.13" huggingface-hub = ">=0.13"
onnxruntime = ">=1.14,<2" onnxruntime = ">=1.14,<2"
tokenizers = ">=0.13,<1" tokenizers = ">=0.13,<1"
tqdm = "*"
[package.extras] [package.extras]
conversion = ["transformers[torch] (>=4.23)"] conversion = ["transformers[torch] (>=4.23)"]
...@@ -2003,4 +2004,4 @@ files = [ ...@@ -2003,4 +2004,4 @@ files = [
[metadata] [metadata]
lock-version = "2.0" lock-version = "2.0"
python-versions = "^3.10" python-versions = "^3.10"
content-hash = "3a007512112802c1d81dea2788a1e5523f701cf84ba97a163e7ac1882707e119" content-hash = "c190cb1d67e7b336841b59fbfa4e1df855724eaf804fd08c92e6e1e1a35840d6"
...@@ -26,7 +26,7 @@ fastapi = "^0.115.0" ...@@ -26,7 +26,7 @@ fastapi = "^0.115.0"
llvmlite = "^0.43.0" llvmlite = "^0.43.0"
numba = "^0.60.0" numba = "^0.60.0"
openai-whisper = "^20240930" openai-whisper = "^20240930"
faster-whisper = "^1.0.3" faster-whisper = "^1.1.0"
torch = [ torch = [
{ markers = "sys_platform == 'darwin' and platform_machine == 'arm64'", url = "https://download.pytorch.org/whl/cpu/torch-1.13.1-cp310-none-macosx_11_0_arm64.whl" }, { markers = "sys_platform == 'darwin' and platform_machine == 'arm64'", url = "https://download.pytorch.org/whl/cpu/torch-1.13.1-cp310-none-macosx_11_0_arm64.whl" },
{ markers = "sys_platform == 'linux' and platform_machine == 'arm64'", url = "https://download.pytorch.org/whl/cpu/torch-1.13.1-cp310-none-macosx_11_0_arm64.whl" }, { markers = "sys_platform == 'linux' and platform_machine == 'arm64'", url = "https://download.pytorch.org/whl/cpu/torch-1.13.1-cp310-none-macosx_11_0_arm64.whl" },
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment