Spaces:
Runtime error
Runtime error
Commit
·
67aa790
1
Parent(s):
2b256e9
fix
Browse files
app.py
CHANGED
|
@@ -8,6 +8,7 @@ from apscheduler.schedulers.background import BackgroundScheduler
|
|
| 8 |
from agents import DeepResearchAgent, get_llms
|
| 9 |
import threading
|
| 10 |
from queue import Queue
|
|
|
|
| 11 |
|
| 12 |
lock = threading.Lock()
|
| 13 |
app = FastAPI()
|
|
@@ -19,6 +20,8 @@ reply_count = 0
|
|
| 19 |
# 启动时设置计数器重置
|
| 20 |
last_reset_time = datetime.now()
|
| 21 |
|
|
|
|
|
|
|
| 22 |
# HTML模板
|
| 23 |
html_template = """
|
| 24 |
<!DOCTYPE html>
|
|
@@ -327,41 +330,41 @@ def form_get():
|
|
| 327 |
@app.post("/", response_class=HTMLResponse)
|
| 328 |
def form_post(topic: str = Form(...)):
|
| 329 |
global reply_count
|
|
|
|
| 330 |
start_time = time.time()
|
| 331 |
|
| 332 |
-
|
| 333 |
-
|
| 334 |
-
|
| 335 |
-
|
| 336 |
-
# 等待轮到当前请求
|
| 337 |
-
while request_queue.queue[0] != request_id:
|
| 338 |
-
time.sleep(10) # 等待一段时间后再检查
|
| 339 |
-
# 根据自己的request_id,判断前面还有多少人在排队
|
| 340 |
-
len = 0
|
| 341 |
-
for i in request_queue.queue:
|
| 342 |
-
if i == request_id:
|
| 343 |
-
break
|
| 344 |
-
len += 1
|
| 345 |
-
return Template(html_template).render(idea="", error=f"The server is busy. Please try again later.There are {len}", reply_count=reply_count)
|
| 346 |
|
| 347 |
with lock:
|
|
|
|
|
|
|
| 348 |
start_time = time.time()
|
| 349 |
# 检查是否超过每日最大回复次数
|
| 350 |
if reply_count >= MAX_REPLIES_PER_DAY:
|
| 351 |
error_message = "Today's maximum number of replies has been reached. Please try again tomorrow."
|
|
|
|
|
|
|
| 352 |
return Template(html_template).render(idea="", error=error_message, reply_count=reply_count)
|
| 353 |
try:
|
| 354 |
main_llm, cheap_llm = get_llms()
|
| 355 |
deep_research_agent = DeepResearchAgent(llm=main_llm, cheap_llm=cheap_llm, improve_cnt=1, max_chain_length=5, min_chain_length=3, max_chain_numbers=1)
|
| 356 |
print(f"begin to generate idea of topic {topic}")
|
| 357 |
idea, related_experiments, entities, idea_chain, ideas, trend, future, human, year = deep_research_agent.generate_idea_with_chain(topic)
|
|
|
|
|
|
|
| 358 |
idea_md = markdown.markdown(idea)
|
| 359 |
# 更新每日回复次数
|
| 360 |
reply_count += 1
|
| 361 |
end_time = time.time()
|
| 362 |
time_taken = round(end_time - start_time, 2)
|
|
|
|
|
|
|
| 363 |
return Template(html_template).render(idea=idea_md, error=None, reply_count=reply_count, time_taken=time_taken)
|
| 364 |
except Exception as e:
|
| 365 |
end_time = time.time()
|
| 366 |
time_taken = round(end_time - start_time, 2)
|
|
|
|
|
|
|
| 367 |
return Template(html_template).render(idea="", error=str(e), reply_count=reply_count, time_taken=time_taken)
|
|
|
|
| 8 |
from agents import DeepResearchAgent, get_llms
|
| 9 |
import threading
|
| 10 |
from queue import Queue
|
| 11 |
+
import logging
|
| 12 |
|
| 13 |
lock = threading.Lock()
|
| 14 |
app = FastAPI()
|
|
|
|
| 20 |
# 启动时设置计数器重置
|
| 21 |
last_reset_time = datetime.now()
|
| 22 |
|
| 23 |
+
is_processing = False
|
| 24 |
+
|
| 25 |
# HTML模板
|
| 26 |
html_template = """
|
| 27 |
<!DOCTYPE html>
|
|
|
|
| 330 |
@app.post("/", response_class=HTMLResponse)
|
| 331 |
def form_post(topic: str = Form(...)):
|
| 332 |
global reply_count
|
| 333 |
+
global is_processing
|
| 334 |
start_time = time.time()
|
| 335 |
|
| 336 |
+
if is_processing:
|
| 337 |
+
error = "The server is processing another request. Please try again later."
|
| 338 |
+
return Template(html_template).render(idea="", error=error, reply_count=reply_count)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 339 |
|
| 340 |
with lock:
|
| 341 |
+
is_processing = True
|
| 342 |
+
logging.info(f"Processing request for topic: {topic}")
|
| 343 |
start_time = time.time()
|
| 344 |
# 检查是否超过每日最大回复次数
|
| 345 |
if reply_count >= MAX_REPLIES_PER_DAY:
|
| 346 |
error_message = "Today's maximum number of replies has been reached. Please try again tomorrow."
|
| 347 |
+
logging.info(f"Today's maximum number of replies has been reached. Please try again tomorrow.")
|
| 348 |
+
is_processing = False
|
| 349 |
return Template(html_template).render(idea="", error=error_message, reply_count=reply_count)
|
| 350 |
try:
|
| 351 |
main_llm, cheap_llm = get_llms()
|
| 352 |
deep_research_agent = DeepResearchAgent(llm=main_llm, cheap_llm=cheap_llm, improve_cnt=1, max_chain_length=5, min_chain_length=3, max_chain_numbers=1)
|
| 353 |
print(f"begin to generate idea of topic {topic}")
|
| 354 |
idea, related_experiments, entities, idea_chain, ideas, trend, future, human, year = deep_research_agent.generate_idea_with_chain(topic)
|
| 355 |
+
|
| 356 |
+
idea = idea.replace("\n", "<br>")
|
| 357 |
idea_md = markdown.markdown(idea)
|
| 358 |
# 更新每日回复次数
|
| 359 |
reply_count += 1
|
| 360 |
end_time = time.time()
|
| 361 |
time_taken = round(end_time - start_time, 2)
|
| 362 |
+
logging.info(f"Successfully generated idea for topic: {topic}")
|
| 363 |
+
is_processing = False
|
| 364 |
return Template(html_template).render(idea=idea_md, error=None, reply_count=reply_count, time_taken=time_taken)
|
| 365 |
except Exception as e:
|
| 366 |
end_time = time.time()
|
| 367 |
time_taken = round(end_time - start_time, 2)
|
| 368 |
+
logging.error(f"Failed to generate idea for topic: {topic}, Error: {str(e)}")
|
| 369 |
+
is_processing = False
|
| 370 |
return Template(html_template).render(idea="", error=str(e), reply_count=reply_count, time_taken=time_taken)
|