Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
|
@@ -34,7 +34,7 @@ def generate_text(user_prompt):
|
|
| 34 |
# Generate text with the complete prompt and limit the maximum length to 256 tokens
|
| 35 |
output = model.generate(
|
| 36 |
input_ids=prompt_encoded,
|
| 37 |
-
max_length=
|
| 38 |
num_beams=1,
|
| 39 |
num_return_sequences=1,
|
| 40 |
do_sample=True,
|
|
@@ -48,9 +48,11 @@ def generate_text(user_prompt):
|
|
| 48 |
generated_text = tokenizer.decode(output[0], skip_special_tokens=True)
|
| 49 |
|
| 50 |
# Extract the assistant's response
|
| 51 |
-
assistant_response = generated_text.split("
|
| 52 |
assistant_response = assistant_response.replace(f"{user_prompt}", "").strip()
|
| 53 |
assistant_response = assistant_response.replace(system, "").strip()
|
|
|
|
|
|
|
| 54 |
|
| 55 |
return assistant_response
|
| 56 |
|
|
|
|
| 34 |
# Generate text with the complete prompt and limit the maximum length to 256 tokens
|
| 35 |
output = model.generate(
|
| 36 |
input_ids=prompt_encoded,
|
| 37 |
+
max_length=1550,
|
| 38 |
num_beams=1,
|
| 39 |
num_return_sequences=1,
|
| 40 |
do_sample=True,
|
|
|
|
| 48 |
generated_text = tokenizer.decode(output[0], skip_special_tokens=True)
|
| 49 |
|
| 50 |
# Extract the assistant's response
|
| 51 |
+
assistant_response = generated_text.split("<|user|>")[-1]
|
| 52 |
assistant_response = assistant_response.replace(f"{user_prompt}", "").strip()
|
| 53 |
assistant_response = assistant_response.replace(system, "").strip()
|
| 54 |
+
assistant_response = assistant_response.replace("<|system|>", "").strip()
|
| 55 |
+
assistant_response = assistant_response.replace("<|assistant|>", "").strip()
|
| 56 |
|
| 57 |
return assistant_response
|
| 58 |
|