Update app.py
Browse files
app.py
CHANGED
|
@@ -27,23 +27,23 @@ def wingpt(model_name, sl, tl, input_text):
|
|
| 27 |
device_map="auto"
|
| 28 |
)
|
| 29 |
tokenizer = AutoTokenizer.from_pretrained(model_name)
|
| 30 |
-
|
| 31 |
messages = [
|
| 32 |
-
{"role": "system", "content": f"Translate this
|
| 33 |
{"role": "user", "content": input_text}
|
| 34 |
]
|
| 35 |
|
| 36 |
text = tokenizer.apply_chat_template(
|
| 37 |
messages,
|
| 38 |
tokenize=False,
|
| 39 |
-
add_generation_prompt=
|
| 40 |
)
|
| 41 |
model_inputs = tokenizer([text], return_tensors="pt").to(model.device)
|
| 42 |
|
| 43 |
generated_ids = model.generate(
|
| 44 |
**model_inputs,
|
| 45 |
max_new_tokens=512,
|
| 46 |
-
temperature=0
|
| 47 |
)
|
| 48 |
|
| 49 |
generated_ids = [
|
|
@@ -147,7 +147,7 @@ if submit_button:
|
|
| 147 |
output_ids = model.generate(input_ids)
|
| 148 |
# Decode the translated text
|
| 149 |
translated_text = tokenizer.decode(output_ids[0], skip_special_tokens=True)
|
| 150 |
-
|
| 151 |
pipe = pipeline("text-generation", model=model_name, torch_dtype=torch.bfloat16, device_map="auto")
|
| 152 |
# We use the tokenizer’s chat template to format each message - see https://huggingface.co/docs/transformers/main/en/chat_templating
|
| 153 |
messages = [{"role": "user",
|
|
@@ -159,7 +159,7 @@ if submit_button:
|
|
| 159 |
if start_marker in translated_text:
|
| 160 |
translated_text = translated_text.split(start_marker)[1].strip()
|
| 161 |
translated_text = translated_text.replace('Answer:', '').strip() if translated_text.startswith('Answer:') else translated_text
|
| 162 |
-
|
| 163 |
import argostranslate.translate
|
| 164 |
# Translate
|
| 165 |
try:
|
|
@@ -169,7 +169,7 @@ if submit_button:
|
|
| 169 |
translated_text = f"No Argos model for {sselected_language} to {tselected_language}. Try other model or languages combination!"
|
| 170 |
except Exception as error:
|
| 171 |
translated_text = error
|
| 172 |
-
|
| 173 |
translated_text = wingpt(model_name, sselected_language, tselected_language, input_text)
|
| 174 |
|
| 175 |
# Display the translated text
|
|
|
|
| 27 |
device_map="auto"
|
| 28 |
)
|
| 29 |
tokenizer = AutoTokenizer.from_pretrained(model_name)
|
| 30 |
+
input_json = '{"input_text": input_text}'
|
| 31 |
messages = [
|
| 32 |
+
{"role": "system", "content": f"Translate this to {tl} language"},
|
| 33 |
{"role": "user", "content": input_text}
|
| 34 |
]
|
| 35 |
|
| 36 |
text = tokenizer.apply_chat_template(
|
| 37 |
messages,
|
| 38 |
tokenize=False,
|
| 39 |
+
add_generation_prompt=True
|
| 40 |
)
|
| 41 |
model_inputs = tokenizer([text], return_tensors="pt").to(model.device)
|
| 42 |
|
| 43 |
generated_ids = model.generate(
|
| 44 |
**model_inputs,
|
| 45 |
max_new_tokens=512,
|
| 46 |
+
temperature=0.1
|
| 47 |
)
|
| 48 |
|
| 49 |
generated_ids = [
|
|
|
|
| 147 |
output_ids = model.generate(input_ids)
|
| 148 |
# Decode the translated text
|
| 149 |
translated_text = tokenizer.decode(output_ids[0], skip_special_tokens=True)
|
| 150 |
+
elif 'Unbabel' in model_name:
|
| 151 |
pipe = pipeline("text-generation", model=model_name, torch_dtype=torch.bfloat16, device_map="auto")
|
| 152 |
# We use the tokenizer’s chat template to format each message - see https://huggingface.co/docs/transformers/main/en/chat_templating
|
| 153 |
messages = [{"role": "user",
|
|
|
|
| 159 |
if start_marker in translated_text:
|
| 160 |
translated_text = translated_text.split(start_marker)[1].strip()
|
| 161 |
translated_text = translated_text.replace('Answer:', '').strip() if translated_text.startswith('Answer:') else translated_text
|
| 162 |
+
elif 'Argos' in model_name:
|
| 163 |
import argostranslate.translate
|
| 164 |
# Translate
|
| 165 |
try:
|
|
|
|
| 169 |
translated_text = f"No Argos model for {sselected_language} to {tselected_language}. Try other model or languages combination!"
|
| 170 |
except Exception as error:
|
| 171 |
translated_text = error
|
| 172 |
+
elif model_name == "winninghealth/WiNGPT-Babel-2":
|
| 173 |
translated_text = wingpt(model_name, sselected_language, tselected_language, input_text)
|
| 174 |
|
| 175 |
# Display the translated text
|