bnurpek commited on
Commit
bc1e1fa
·
1 Parent(s): 800dde5

Push model using huggingface_hub.

Browse files
Files changed (2) hide show
  1. README.md +3 -3
  2. model.safetensors +1 -1
README.md CHANGED
@@ -25,7 +25,7 @@ You can then generate text as follows:
25
  ```python
26
  from transformers import pipeline
27
 
28
- generator = pipeline("text-generation", model="bnurpek//tmp/tmpx6toxjox/bnurpek/gpt2-256t-pos-7")
29
  outputs = generator("Hello, my llama is cute")
30
  ```
31
 
@@ -35,8 +35,8 @@ If you want to use the model for training or to obtain the outputs from the valu
35
  from transformers import AutoTokenizer
36
  from trl import AutoModelForCausalLMWithValueHead
37
 
38
- tokenizer = AutoTokenizer.from_pretrained("bnurpek//tmp/tmpx6toxjox/bnurpek/gpt2-256t-pos-7")
39
- model = AutoModelForCausalLMWithValueHead.from_pretrained("bnurpek//tmp/tmpx6toxjox/bnurpek/gpt2-256t-pos-7")
40
 
41
  inputs = tokenizer("Hello, my llama is cute", return_tensors="pt")
42
  outputs = model(**inputs, labels=inputs["input_ids"])
 
25
  ```python
26
  from transformers import pipeline
27
 
28
+ generator = pipeline("text-generation", model="bnurpek//tmp/tmp1v9fuz61/bnurpek/gpt2-256t-pos-7")
29
  outputs = generator("Hello, my llama is cute")
30
  ```
31
 
 
35
  from transformers import AutoTokenizer
36
  from trl import AutoModelForCausalLMWithValueHead
37
 
38
+ tokenizer = AutoTokenizer.from_pretrained("bnurpek//tmp/tmp1v9fuz61/bnurpek/gpt2-256t-pos-7")
39
+ model = AutoModelForCausalLMWithValueHead.from_pretrained("bnurpek//tmp/tmp1v9fuz61/bnurpek/gpt2-256t-pos-7")
40
 
41
  inputs = tokenizer("Hello, my llama is cute", return_tensors="pt")
42
  outputs = model(**inputs, labels=inputs["input_ids"])
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:77d5686a5e539d3c7068e47b4310dfe375b0d1a057ab598f49c64ff34bccb617
3
  size 497777468
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1a31929daa0e3f46108172ed6a814d8b5f9592d49d9bdc3fc78c733085dc5c28
3
  size 497777468