Spaces:
Running on Zero
Running on Zero
Commit ·
5de3b84
1
Parent(s): 840aba4
:bug: remove mps
Browse files- context_window_gradio.py +1 -1
context_window_gradio.py
CHANGED
|
@@ -11,7 +11,7 @@ from transformers import pipeline, AutoModelForCausalLM, AutoTokenizer, BitsAndB
|
|
| 11 |
|
| 12 |
|
| 13 |
# quantization_config = BitsAndBytesConfig(load_in_4bit=True)
|
| 14 |
-
torch_device = "cuda" if torch.cuda.is_available() else
|
| 15 |
|
| 16 |
torch_dtype = torch.bfloat16 if torch_device in ["cuda", "mps"] else torch.float32
|
| 17 |
|
|
|
|
| 11 |
|
| 12 |
|
| 13 |
# quantization_config = BitsAndBytesConfig(load_in_4bit=True)
|
| 14 |
+
torch_device = "cuda" if torch.cuda.is_available() else "cpu"
|
| 15 |
|
| 16 |
torch_dtype = torch.bfloat16 if torch_device in ["cuda", "mps"] else torch.float32
|
| 17 |
|