from graph import graph from langchain_core.messages import SystemMessage, HumanMessage from langfuse.langchain import CallbackHandler import json from prompts import evaluation_prompt, production_prompt from tqdm import tqdm langfuse_handler = CallbackHandler() questions = [] with open("questions.json", "r") as file: questions = json.load(file) final_answers = [] for question in tqdm(questions): messages = [ SystemMessage(production_prompt), HumanMessage(question["question"]) ] try: response = graph.invoke( input={'messages' : messages, 'filename' : question["file_name"], 'file_extension' : ""}, config={'callbacks' : [langfuse_handler], 'recursion_limit' : 10, 'metadata' : {"langfuse_tags" : ["gpt"]}} ) answer = { "task_id" : question["task_id"], "submitted_answer" : response["messages"][-1].content } except Exception as e: answer = { "task_id" : question["task_id"], "submitted_answer" : "ERROR" } print(e) finally: final_answers.append(answer) with open("final_answers_gpt_v2.json", "w") as file: json.dump(final_answers, file, indent=4)