diff --git a/backend/python/transformers/requirements-cpu.txt b/backend/python/transformers/requirements-cpu.txt index e397245e3..bd6003bb1 100644 --- a/backend/python/transformers/requirements-cpu.txt +++ b/backend/python/transformers/requirements-cpu.txt @@ -1,6 +1,6 @@ torch==2.7.1 llvmlite==0.43.0 -numba==0.60.0 +numba==0.64.0 accelerate transformers bitsandbytes diff --git a/backend/python/transformers/requirements-cublas12.txt b/backend/python/transformers/requirements-cublas12.txt index 07ae834c7..02d078da2 100644 --- a/backend/python/transformers/requirements-cublas12.txt +++ b/backend/python/transformers/requirements-cublas12.txt @@ -1,7 +1,7 @@ torch==2.7.1 accelerate llvmlite==0.43.0 -numba==0.60.0 +numba==0.64.0 transformers bitsandbytes sentence-transformers==5.2.3 diff --git a/backend/python/transformers/requirements-cublas13.txt b/backend/python/transformers/requirements-cublas13.txt index a82220a08..06197a7a5 100644 --- a/backend/python/transformers/requirements-cublas13.txt +++ b/backend/python/transformers/requirements-cublas13.txt @@ -1,7 +1,7 @@ --extra-index-url https://download.pytorch.org/whl/cu130 torch==2.9.0 llvmlite==0.43.0 -numba==0.60.0 +numba==0.64.0 transformers bitsandbytes sentence-transformers==5.2.3 diff --git a/backend/python/transformers/requirements-hipblas.txt b/backend/python/transformers/requirements-hipblas.txt index 2c346dfa6..63f6da7ca 100644 --- a/backend/python/transformers/requirements-hipblas.txt +++ b/backend/python/transformers/requirements-hipblas.txt @@ -3,7 +3,7 @@ torch==2.8.0+rocm6.4 accelerate transformers llvmlite==0.43.0 -numba==0.60.0 +numba==0.64.0 bitsandbytes sentence-transformers==5.2.3 protobuf==6.33.5 \ No newline at end of file diff --git a/backend/python/transformers/requirements-intel.txt b/backend/python/transformers/requirements-intel.txt index 61f8d3ba9..148e018a8 100644 --- a/backend/python/transformers/requirements-intel.txt +++ b/backend/python/transformers/requirements-intel.txt @@ -2,7 +2,7 @@ torch optimum[openvino] llvmlite==0.43.0 -numba==0.60.0 +numba==0.64.0 transformers bitsandbytes sentence-transformers==5.2.3 diff --git a/backend/python/transformers/requirements-mps.txt b/backend/python/transformers/requirements-mps.txt index 579fb95c7..d2851f25a 100644 --- a/backend/python/transformers/requirements-mps.txt +++ b/backend/python/transformers/requirements-mps.txt @@ -1,6 +1,6 @@ torch==2.7.1 llvmlite==0.43.0 -numba==0.60.0 +numba==0.64.0 accelerate transformers bitsandbytes