Files
OpenLLM/cllama/spec.py
2024-05-18 12:41:54 +08:00

12 lines
269 B
Python

GPU_MEMORY = {
"nvidia-tesla-t4": 16,
"nvidia-tesla-v100": 16,
"nvidia-tesla-p100": 16,
"nvidia-tesla-p4": 8,
"nvidia-tesla-k80": 12,
"nvidia-tesla-a100": 40,
"nvidia-tesla-a100-80gb": 80,
"nvidia-tesla-a10g": 24,
"nvidia-l4": 24,
}