Spaces:
Sleeping
Sleeping
| import openai | |
| import streamlit as st | |
| class FineTunedChatbot: | |
| def __init__(self, api_key, model_name): | |
| openai.api_key = api_key | |
| self.client = openai | |
| self.model_name = model_name | |
| def get_response(self, query_text): | |
| try: | |
| # Use client.chat.completions.create | |
| response = self.client.chat.completions.create( | |
| model=self.model_name, | |
| messages=[ | |
| {"role": "system", "content": "You are a German chatbot. You should help the user by answering their question."}, | |
| {"role": "user", "content": query_text} | |
| ] | |
| ) | |
| # Extract the response content | |
| answer = response.choices[0].message.content | |
| return {'answer': answer} | |
| except Exception as e: | |
| return {'error': f'Error processing query: {str(e)}'} | |
| # Fine-tuned model name | |
| MODEL_NAME = "ft:gpt-4o-mini-2024-07-18:brenin::AkWzHofJ" | |
| # Streamlit app UI | |
| st.title("Chat with Fine-Tuned GPT") | |
| st.markdown("This chatbot uses a fine-tuned GPT model.") | |
| # Initialize chatbot | |
| chatbot = FineTunedChatbot(api_key=st.secrets["OPENAI_API_KEY"], model_name=MODEL_NAME) | |
| # User input field | |
| user_input = st.text_input("Enter your question:") | |
| if user_input: | |
| # Fetch response from chatbot | |
| response = chatbot.get_response(user_input) | |
| if 'answer' in response: | |
| st.markdown(f"**Assistant:** {response['answer']}") | |
| else: | |
| st.markdown(f"**Error:** {response['error']}") | |