Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -7,6 +7,7 @@ API_URL = "https://api-inference.huggingface.co/models/"
|
|
| 7 |
client = InferenceClient(
|
| 8 |
"mistralai/Mistral-7B-Instruct-v0.1"
|
| 9 |
)
|
|
|
|
| 10 |
# Flag to track whether initial greeting has been displayed
|
| 11 |
initial_greeting_displayed = False
|
| 12 |
|
|
@@ -23,6 +24,13 @@ def format_prompt(message, history):
|
|
| 23 |
prompt += f"[INST] {message} [/INST]"
|
| 24 |
return prompt
|
| 25 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 26 |
|
| 27 |
def generate(prompt, history, temperature=0.9, max_new_tokens=2048, top_p=0.95, repetition_penalty=1.0):
|
| 28 |
temperature = float(temperature)
|
|
|
|
| 7 |
client = InferenceClient(
|
| 8 |
"mistralai/Mistral-7B-Instruct-v0.1"
|
| 9 |
)
|
| 10 |
+
|
| 11 |
# Flag to track whether initial greeting has been displayed
|
| 12 |
initial_greeting_displayed = False
|
| 13 |
|
|
|
|
| 24 |
prompt += f"[INST] {message} [/INST]"
|
| 25 |
return prompt
|
| 26 |
|
| 27 |
+
def generate_initial_prompt():
|
| 28 |
+
initial_prompt = format_prompt("", [])
|
| 29 |
+
for output in generate(initial_prompt, []):
|
| 30 |
+
print(output, end='')
|
| 31 |
+
|
| 32 |
+
generate_initial_prompt() # Call the function to display initial greeting
|
| 33 |
+
|
| 34 |
|
| 35 |
def generate(prompt, history, temperature=0.9, max_new_tokens=2048, top_p=0.95, repetition_penalty=1.0):
|
| 36 |
temperature = float(temperature)
|