Spaces:
Runtime error
Runtime error
Commit
·
fe48cb9
1
Parent(s):
336e5d3
Update app.py
Browse files
app.py
CHANGED
|
@@ -49,11 +49,7 @@ logger = CSVLogger("out", name, flush_logs_every_n_steps=log_interval)
|
|
| 49 |
|
| 50 |
fabric = L.Fabric(devices=1, strategy='auto', precision=None, loggers=logger)
|
| 51 |
|
| 52 |
-
checkpoint_path = Path("out/redpajama/iter-023999-ckpt.pth")
|
| 53 |
-
config = Config.from_name(model_name)
|
| 54 |
-
model = GPT(config)
|
| 55 |
|
| 56 |
-
load_checkpoint(fabric, model, checkpoint_path)
|
| 57 |
|
| 58 |
#print(model.transformer.h[0].mlp.fc.weight)
|
| 59 |
|
|
@@ -92,7 +88,7 @@ def generate( model, config, idx, max_new_tokens, temperature=1.0, top_k=None):
|
|
| 92 |
checkpoint_dir = Path('./checkpoints/meta-llama/Llama-2-7b-chat-hf')
|
| 93 |
token = Tokenizer(checkpoint_dir = checkpoint_dir)
|
| 94 |
|
| 95 |
-
def tsaigpt(start:str ,
|
| 96 |
|
| 97 |
|
| 98 |
|
|
@@ -113,7 +109,11 @@ def tsaigpt(start:str , model= model, max_new_tokens = 300, num_samples =2, toke
|
|
| 113 |
device_type = 'cuda' if 'cuda' in device else 'cpu' # for later use in torch.autocast
|
| 114 |
ptdtype = {'float32': torch.float32, 'bfloat16': torch.bfloat16, 'float16': torch.float16}[dtype]
|
| 115 |
ctx = nullcontext() if device_type == 'cpu' else torch.amp.autocast(device_type=device_type, dtype=ptdtype)
|
|
|
|
|
|
|
|
|
|
| 116 |
|
|
|
|
| 117 |
model.eval()
|
| 118 |
model.to(device)
|
| 119 |
if compile:
|
|
|
|
| 49 |
|
| 50 |
fabric = L.Fabric(devices=1, strategy='auto', precision=None, loggers=logger)
|
| 51 |
|
|
|
|
|
|
|
|
|
|
| 52 |
|
|
|
|
| 53 |
|
| 54 |
#print(model.transformer.h[0].mlp.fc.weight)
|
| 55 |
|
|
|
|
| 88 |
checkpoint_dir = Path('./checkpoints/meta-llama/Llama-2-7b-chat-hf')
|
| 89 |
token = Tokenizer(checkpoint_dir = checkpoint_dir)
|
| 90 |
|
| 91 |
+
def tsaigpt(start:str , max_new_tokens = 300, num_samples =2, tokeniser= token):
|
| 92 |
|
| 93 |
|
| 94 |
|
|
|
|
| 109 |
device_type = 'cuda' if 'cuda' in device else 'cpu' # for later use in torch.autocast
|
| 110 |
ptdtype = {'float32': torch.float32, 'bfloat16': torch.bfloat16, 'float16': torch.float16}[dtype]
|
| 111 |
ctx = nullcontext() if device_type == 'cpu' else torch.amp.autocast(device_type=device_type, dtype=ptdtype)
|
| 112 |
+
checkpoint_path = Path("out/redpajama/iter-023999-ckpt.pth")
|
| 113 |
+
config = Config.from_name(model_name)
|
| 114 |
+
model = GPT(config)
|
| 115 |
|
| 116 |
+
load_checkpoint(fabric, model, checkpoint_path)
|
| 117 |
model.eval()
|
| 118 |
model.to(device)
|
| 119 |
if compile:
|