Merge pull request #225507 from MayNiklas/update-whisper

openai-whisper: 20230124 -> 20230314
This commit is contained in:
Martin Weinelt 2023-04-10 17:11:47 +02:00 committed by GitHub
commit e4daee5b6e
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
2 changed files with 36 additions and 6 deletions

View file

@ -2,6 +2,7 @@
, fetchFromGitHub , fetchFromGitHub
, buildPythonPackage , buildPythonPackage
, substituteAll , substituteAll
, cudaSupport ? false
# runtime # runtime
, ffmpeg , ffmpeg
@ -9,10 +10,15 @@
# propagates # propagates
, numpy , numpy
, torch , torch
, torchWithCuda
, tqdm , tqdm
, more-itertools , more-itertools
, transformers , transformers
, ffmpeg-python , ffmpeg-python
, numba
, openai-triton
, scipy
, tiktoken
# tests # tests
, pytestCheckHook , pytestCheckHook
@ -20,14 +26,14 @@
buildPythonPackage rec { buildPythonPackage rec {
pname = "whisper"; pname = "whisper";
version = "20230124"; version = "20230314";
format = "setuptools"; format = "setuptools";
src = fetchFromGitHub { src = fetchFromGitHub {
owner = "openai"; owner = "openai";
repo = pname; repo = pname;
rev = "refs/tags/v${version}"; rev = "refs/tags/v${version}";
hash = "sha256-+3fs/EXK5NGlISuMTk7r2ZZ4tNFKbNFNkVS2LmHBvwk="; hash = "sha256-qQCELjRFeRCT1k1CBc3netRtFvt+an/EbkrgnmiX/mc=";
}; };
patches = [ patches = [
@ -39,13 +45,31 @@ buildPythonPackage rec {
propagatedBuildInputs = [ propagatedBuildInputs = [
numpy numpy
torch
tqdm tqdm
more-itertools more-itertools
transformers transformers
ffmpeg-python ffmpeg-python
numba
scipy
tiktoken
] ++ lib.optionals (!cudaSupport) [
torch
] ++ lib.optionals (cudaSupport) [
openai-triton
torchWithCuda
]; ];
postPatch = ''
substituteInPlace requirements.txt \
--replace "tiktoken==0.3.1" "tiktoken>=0.3.1"
''
# openai-triton is only needed for CUDA support.
# triton needs CUDA to be build.
# -> by making it optional, we can build whisper without unfree packages enabled
+ lib.optionalString (!cudaSupport) ''
sed -i '/if sys.platform.startswith("linux") and platform.machine() == "x86_64":/{N;d}' setup.py
'';
preCheck = '' preCheck = ''
export HOME=$TMPDIR export HOME=$TMPDIR
''; '';
@ -56,14 +80,18 @@ buildPythonPackage rec {
disabledTests = [ disabledTests = [
# requires network access to download models # requires network access to download models
"test_tokenizer"
"test_transcribe" "test_transcribe"
# requires NVIDIA drivers
"test_dtw_cuda_equivalence"
"test_median_filter_equivalence"
]; ];
meta = with lib; { meta = with lib; {
changelog = "https://github.com/openai/whisper/blob/v$[version}/CHANGELOG.md";
description = "General-purpose speech recognition model"; description = "General-purpose speech recognition model";
homepage = "https://github.com/openai/whisper"; homepage = "https://github.com/openai/whisper";
license = licenses.mit; license = licenses.mit;
maintainers = with maintainers; [ hexa ]; maintainers = with maintainers; [ hexa MayNiklas ];
}; };
} }

View file

@ -6815,7 +6815,9 @@ self: super: with self; {
openai-triton = callPackage ../development/python-modules/openai-triton { llvmPackages = pkgs.llvmPackages_rocm; }; openai-triton = callPackage ../development/python-modules/openai-triton { llvmPackages = pkgs.llvmPackages_rocm; };
openai-whisper = callPackage ../development/python-modules/openai-whisper { }; openai-whisper = callPackage ../development/python-modules/openai-whisper {
cudaSupport = pkgs.config.cudaSupport or false;
};
openant = callPackage ../development/python-modules/openant { }; openant = callPackage ../development/python-modules/openant { };