diff --git a/backend/python/transformers/requirements-cpu.txt b/backend/python/transformers/requirements-cpu.txt index d6e385868..1292ef236 100644 --- a/backend/python/transformers/requirements-cpu.txt +++ b/backend/python/transformers/requirements-cpu.txt @@ -1,6 +1,6 @@ torch==2.7.1 llvmlite==0.43.0 -numba==0.60.0 +numba==0.64.0 accelerate transformers bitsandbytes diff --git a/backend/python/transformers/requirements-cublas12.txt b/backend/python/transformers/requirements-cublas12.txt index 028e4f191..b16fc13e1 100644 --- a/backend/python/transformers/requirements-cublas12.txt +++ b/backend/python/transformers/requirements-cublas12.txt @@ -1,7 +1,7 @@ torch==2.7.1 accelerate llvmlite==0.43.0 -numba==0.60.0 +numba==0.64.0 transformers bitsandbytes sentence-transformers==5.2.2 diff --git a/backend/python/transformers/requirements-cublas13.txt b/backend/python/transformers/requirements-cublas13.txt index fa375851c..04c1e6740 100644 --- a/backend/python/transformers/requirements-cublas13.txt +++ b/backend/python/transformers/requirements-cublas13.txt @@ -1,7 +1,7 @@ --extra-index-url https://download.pytorch.org/whl/cu130 torch==2.9.0 llvmlite==0.43.0 -numba==0.60.0 +numba==0.64.0 transformers bitsandbytes sentence-transformers==5.2.2 diff --git a/backend/python/transformers/requirements-hipblas.txt b/backend/python/transformers/requirements-hipblas.txt index 49ccf3f25..67749fa8b 100644 --- a/backend/python/transformers/requirements-hipblas.txt +++ b/backend/python/transformers/requirements-hipblas.txt @@ -3,7 +3,7 @@ torch==2.8.0+rocm6.4 accelerate transformers llvmlite==0.43.0 -numba==0.60.0 +numba==0.64.0 bitsandbytes sentence-transformers==5.2.2 protobuf==6.33.5 \ No newline at end of file diff --git a/backend/python/transformers/requirements-intel.txt b/backend/python/transformers/requirements-intel.txt index 233b84bd5..e92f10863 100644 --- a/backend/python/transformers/requirements-intel.txt +++ b/backend/python/transformers/requirements-intel.txt @@ -2,7 +2,7 @@ torch optimum[openvino] llvmlite==0.43.0 -numba==0.60.0 +numba==0.64.0 transformers bitsandbytes sentence-transformers==5.2.2 diff --git a/backend/python/transformers/requirements-mps.txt b/backend/python/transformers/requirements-mps.txt index 3439a720e..81aa1fa37 100644 --- a/backend/python/transformers/requirements-mps.txt +++ b/backend/python/transformers/requirements-mps.txt @@ -1,6 +1,6 @@ torch==2.7.1 llvmlite==0.43.0 -numba==0.60.0 +numba==0.64.0 accelerate transformers bitsandbytes