fix(dolly): make sure to use GPU when available

map device_map to auto when GPU is available

Signed-off-by: aarnphm-ec2-dev <29749331+aarnphm@users.noreply.github.com>
This commit is contained in:
aarnphm-ec2-dev
2023-06-15 05:52:25 +00:00
parent dfe71d7867
commit b3d924e6d6

View File

@@ -38,7 +38,7 @@ class DollyV2(openllm.LLM["transformers.Pipeline", "transformers.PreTrainedToken
@property
def import_kwargs(self):
model_kwds = {
"device_map": "auto" if torch.cuda.is_available() and torch.cuda.device_count() > 1 else None,
"device_map": "auto" if torch.cuda.is_available() else None,
"torch_dtype": torch.bfloat16,
}
tokenizer_kwds = {"padding_side": "left"}