Update README.md
Browse files
README.md
CHANGED
|
@@ -92,7 +92,6 @@ LORA_NAME = "ibm-granite/granite-uncertainty-3.0-8b-lora"
|
|
| 92 |
device=torch.device('cuda' if torch.cuda.is_available() else 'cpu')
|
| 93 |
|
| 94 |
# Load model
|
| 95 |
-
token = os.getenv("HF_MISTRAL_TOKEN")
|
| 96 |
tokenizer = AutoTokenizer.from_pretrained(BASE_NAME,padding_side='left',trust_remote_code=True, token=token)
|
| 97 |
model_base = AutoModelForCausalLM.from_pretrained(BASE_NAME,device_map="auto")
|
| 98 |
model_UQ = PeftModel.from_pretrained(model_base, LORA_NAME)
|
|
|
|
| 92 |
device=torch.device('cuda' if torch.cuda.is_available() else 'cpu')
|
| 93 |
|
| 94 |
# Load model
|
|
|
|
| 95 |
tokenizer = AutoTokenizer.from_pretrained(BASE_NAME,padding_side='left',trust_remote_code=True, token=token)
|
| 96 |
model_base = AutoModelForCausalLM.from_pretrained(BASE_NAME,device_map="auto")
|
| 97 |
model_UQ = PeftModel.from_pretrained(model_base, LORA_NAME)
|