Readme from_pretrained model version fix
Browse files
README.md
CHANGED
|
@@ -39,8 +39,8 @@ from transformers import AutoModelForCausalLM, AutoTokenizer
|
|
| 39 |
|
| 40 |
device = "cuda" # the device to load the model onto
|
| 41 |
|
| 42 |
-
model = AutoModelForCausalLM.from_pretrained("mistralai/Mistral-7B-Instruct-v0.
|
| 43 |
-
tokenizer = AutoTokenizer.from_pretrained("mistralai/Mistral-7B-Instruct-v0.
|
| 44 |
|
| 45 |
messages = [
|
| 46 |
{"role": "user", "content": "What is your favourite condiment?"},
|
|
|
|
| 39 |
|
| 40 |
device = "cuda" # the device to load the model onto
|
| 41 |
|
| 42 |
+
model = AutoModelForCausalLM.from_pretrained("mistralai/Mistral-7B-Instruct-v0.2")
|
| 43 |
+
tokenizer = AutoTokenizer.from_pretrained("mistralai/Mistral-7B-Instruct-v0.2")
|
| 44 |
|
| 45 |
messages = [
|
| 46 |
{"role": "user", "content": "What is your favourite condiment?"},
|