rudranshn commited on
Commit
564b70e
·
verified ·
1 Parent(s): 8d166ef

Upload README.md with huggingface_hub

Browse files
Files changed (1) hide show
  1. README.md +16 -7
README.md CHANGED
@@ -48,14 +48,23 @@ Trained on the [lucasmccabe/logiqa](https://huggingface.co/datasets/lucasmccabe/
48
  from transformers import AutoModelForCausalLM, AutoTokenizer
49
  import torch
50
 
51
- model = AutoModelForCausalLM.from_pretrained("rudransh/circuit", torch_dtype=torch.float16)
52
- tokenizer = AutoTokenizer.from_pretrained("rudransh/circuit")
53
-
54
- prompt = "If all squares are rectangles, and all rectangles have four sides, what can we conclude about squares?"
55
-
56
- inputs = tokenizer(prompt, return_tensors="pt")
 
 
 
 
 
 
 
 
 
57
  outputs = model.generate(**inputs, max_new_tokens=150)
58
- print(tokenizer.decode(outputs[0], skip_special_tokens=True)
59
 
60
  ```
61
 
 
48
  from transformers import AutoModelForCausalLM, AutoTokenizer
49
  import torch
50
 
51
+ model = AutoModelForCausalLM.from_pretrained(
52
+ "rudranshjoshi/circuit",
53
+ torch_dtype=torch.float16,
54
+ trust_remote_code=True
55
+ )
56
+ tokenizer = AutoTokenizer.from_pretrained(
57
+ "rudranshjoshi/circuit",
58
+ trust_remote_code=True
59
+ )
60
+
61
+ device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
62
+ model.to(device)
63
+
64
+ prompt = "Your prompt here"
65
+ inputs = tokenizer(prompt, return_tensors="pt").to(device)
66
  outputs = model.generate(**inputs, max_new_tokens=150)
67
+ print(tokenizer.decode(outputs[0], skip_special_tokens=True))
68
 
69
  ```
70