Caporlingua Marina
go back to Meta-Llama-3-8B-Instruct
cfde4b0
from modules.shared import *
from modules.query_api import *
from modules.utils import *
from llms.llama import *
app = FastAPI()
@app.get("/")
def greet_json():
return {"msg" : "Space under construction"}
@app.post("/eurostat/fetch-dataflows")
async def fetch_eurostat_dataflows(api_key: str = Depends(authenticate)) -> dict:
output_file = f"{EUROSTAT_DATA_PATH}/dataflows.jsonl"
return handle_fetch_and_parse(api_key, EUROSTAT_API_DATAFLOWS, output_file)
@app.post("/istat/fetch-dataflows")
async def fetch_istat_dataflows(api_key: str = Depends(authenticate)) -> dict:
output_file = f"{ISTAT_DATA_PATH}/dataflows.jsonl"
return handle_fetch_and_parse(api_key, ISTAT_API_DATAFLOWS, output_file)
@app.post("/istat/prompt")
async def answer_user_query(prompt: str, api_key: str = Depends(authenticate)) -> dict:
"""
Endpoint to process a user's query using ISTAT data.
Args:
prompt (str): The user's question or prompt.
api_key (str): API key for authentication, injected using the `authenticate` dependency.
Returns:
dict: Final data based on the user's query.
"""
return process_user_query(prompt)