Spaces:
Sleeping
Sleeping
Chris Ellerson
commited on
Commit
·
62fb43b
1
Parent(s):
3f90b63
initial commit of agent with score of 60
Browse files- README.md +10 -0
- app.py +72 -56
- requirements.txt +3 -2
README.md
CHANGED
|
@@ -121,6 +121,16 @@ When deploying to Hugging Face Spaces, you need to add your API keys as secrets:
|
|
| 121 |
4. For X.AI's API, also set:
|
| 122 |
- `XAI_API_BASE` - The API base URL
|
| 123 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 124 |

|
| 125 |
|
| 126 |
## Usage
|
|
|
|
| 121 |
4. For X.AI's API, also set:
|
| 122 |
- `XAI_API_BASE` - The API base URL
|
| 123 |
|
| 124 |
+
5. **Important**: If you're using OpenAIServerModel, ensure the requirements.txt includes:
|
| 125 |
+
```
|
| 126 |
+
smolagents[openai]
|
| 127 |
+
openai
|
| 128 |
+
```
|
| 129 |
+
|
| 130 |
+
If the space gives an error about OpenAI modules, rebuild the space after updating requirements.txt.
|
| 131 |
+
|
| 132 |
+
6. After adding all secrets, go to the "Factory" tab in the Space settings and click "Rebuild Space" to apply the changes.
|
| 133 |
+
|
| 134 |

|
| 135 |
|
| 136 |
## Usage
|
app.py
CHANGED
|
@@ -63,78 +63,94 @@ class BasicAgent:
|
|
| 63 |
|
| 64 |
print(f"Agent config - Model Type: {model_type}, Model ID: {model_id}")
|
| 65 |
|
| 66 |
-
|
| 67 |
-
|
| 68 |
-
|
| 69 |
-
|
| 70 |
-
model_type="OpenAIServerModel",
|
| 71 |
-
model_id="grok-3-latest", # X.AI's model
|
| 72 |
-
api_key=xai_key,
|
| 73 |
-
api_base=api_base,
|
| 74 |
-
temperature=temperature,
|
| 75 |
-
executor_type="local",
|
| 76 |
-
verbose=verbose
|
| 77 |
-
)
|
| 78 |
-
print(f"Using OpenAIServerModel with X.AI API at {api_base}")
|
| 79 |
-
elif model_type == "HfApiModel" and hf_token:
|
| 80 |
-
# Use Hugging Face API
|
| 81 |
-
self.gaia_agent = GAIAAgent(
|
| 82 |
-
model_type="HfApiModel",
|
| 83 |
-
model_id=model_id,
|
| 84 |
-
api_key=hf_token,
|
| 85 |
-
temperature=temperature,
|
| 86 |
-
executor_type="local",
|
| 87 |
-
verbose=verbose
|
| 88 |
-
)
|
| 89 |
-
print(f"Using HfApiModel with model_id: {model_id}")
|
| 90 |
-
elif openai_key:
|
| 91 |
-
# Default to OpenAI API
|
| 92 |
-
api_base = os.environ.get("AGENT_API_BASE")
|
| 93 |
-
kwargs = {
|
| 94 |
-
"model_type": "OpenAIServerModel",
|
| 95 |
-
"model_id": model_id,
|
| 96 |
-
"api_key": openai_key,
|
| 97 |
-
"temperature": temperature,
|
| 98 |
-
"executor_type": "local",
|
| 99 |
-
"verbose": verbose
|
| 100 |
-
}
|
| 101 |
-
if api_base:
|
| 102 |
-
kwargs["api_base"] = api_base
|
| 103 |
-
print(f"Using custom API base: {api_base}")
|
| 104 |
-
|
| 105 |
-
self.gaia_agent = GAIAAgent(**kwargs)
|
| 106 |
-
print(f"Using OpenAIServerModel with model_id: {model_id}")
|
| 107 |
-
else:
|
| 108 |
-
# Fallback to using whatever token we have
|
| 109 |
-
print("WARNING: Using fallback initialization with available token")
|
| 110 |
-
if hf_token:
|
| 111 |
self.gaia_agent = GAIAAgent(
|
| 112 |
-
model_type="
|
| 113 |
-
model_id="
|
| 114 |
-
api_key=
|
|
|
|
| 115 |
temperature=temperature,
|
| 116 |
executor_type="local",
|
| 117 |
verbose=verbose
|
| 118 |
)
|
| 119 |
-
|
|
|
|
|
|
|
| 120 |
self.gaia_agent = GAIAAgent(
|
| 121 |
-
model_type="
|
| 122 |
-
model_id=
|
| 123 |
-
api_key=
|
| 124 |
temperature=temperature,
|
| 125 |
executor_type="local",
|
| 126 |
verbose=verbose
|
| 127 |
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 128 |
else:
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 129 |
self.gaia_agent = GAIAAgent(
|
| 130 |
-
model_type="
|
| 131 |
-
model_id="
|
| 132 |
-
api_key=
|
| 133 |
-
api_base=os.environ.get("XAI_API_BASE", "https://api.x.ai/v1"),
|
| 134 |
temperature=temperature,
|
| 135 |
executor_type="local",
|
| 136 |
verbose=verbose
|
| 137 |
)
|
|
|
|
|
|
|
|
|
|
| 138 |
else:
|
| 139 |
# No API keys available, log the error
|
| 140 |
print("ERROR: No API keys found. Please set at least one of these environment variables:")
|
|
|
|
| 63 |
|
| 64 |
print(f"Agent config - Model Type: {model_type}, Model ID: {model_id}")
|
| 65 |
|
| 66 |
+
try:
|
| 67 |
+
if xai_key:
|
| 68 |
+
# Use X.AI API with OpenAIServerModel
|
| 69 |
+
api_base = os.environ.get("XAI_API_BASE", "https://api.x.ai/v1")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 70 |
self.gaia_agent = GAIAAgent(
|
| 71 |
+
model_type="OpenAIServerModel",
|
| 72 |
+
model_id="grok-3-latest", # X.AI's model
|
| 73 |
+
api_key=xai_key,
|
| 74 |
+
api_base=api_base,
|
| 75 |
temperature=temperature,
|
| 76 |
executor_type="local",
|
| 77 |
verbose=verbose
|
| 78 |
)
|
| 79 |
+
print(f"Using OpenAIServerModel with X.AI API at {api_base}")
|
| 80 |
+
elif model_type == "HfApiModel" and hf_token:
|
| 81 |
+
# Use Hugging Face API
|
| 82 |
self.gaia_agent = GAIAAgent(
|
| 83 |
+
model_type="HfApiModel",
|
| 84 |
+
model_id=model_id,
|
| 85 |
+
api_key=hf_token,
|
| 86 |
temperature=temperature,
|
| 87 |
executor_type="local",
|
| 88 |
verbose=verbose
|
| 89 |
)
|
| 90 |
+
print(f"Using HfApiModel with model_id: {model_id}")
|
| 91 |
+
elif openai_key:
|
| 92 |
+
# Default to OpenAI API
|
| 93 |
+
api_base = os.environ.get("AGENT_API_BASE")
|
| 94 |
+
kwargs = {
|
| 95 |
+
"model_type": "OpenAIServerModel",
|
| 96 |
+
"model_id": model_id,
|
| 97 |
+
"api_key": openai_key,
|
| 98 |
+
"temperature": temperature,
|
| 99 |
+
"executor_type": "local",
|
| 100 |
+
"verbose": verbose
|
| 101 |
+
}
|
| 102 |
+
if api_base:
|
| 103 |
+
kwargs["api_base"] = api_base
|
| 104 |
+
print(f"Using custom API base: {api_base}")
|
| 105 |
+
|
| 106 |
+
self.gaia_agent = GAIAAgent(**kwargs)
|
| 107 |
+
print(f"Using OpenAIServerModel with model_id: {model_id}")
|
| 108 |
else:
|
| 109 |
+
# Fallback to using whatever token we have
|
| 110 |
+
print("WARNING: Using fallback initialization with available token")
|
| 111 |
+
if hf_token:
|
| 112 |
+
self.gaia_agent = GAIAAgent(
|
| 113 |
+
model_type="HfApiModel",
|
| 114 |
+
model_id="mistralai/Mistral-7B-Instruct-v0.2",
|
| 115 |
+
api_key=hf_token,
|
| 116 |
+
temperature=temperature,
|
| 117 |
+
executor_type="local",
|
| 118 |
+
verbose=verbose
|
| 119 |
+
)
|
| 120 |
+
elif openai_key:
|
| 121 |
+
self.gaia_agent = GAIAAgent(
|
| 122 |
+
model_type="OpenAIServerModel",
|
| 123 |
+
model_id="gpt-3.5-turbo",
|
| 124 |
+
api_key=openai_key,
|
| 125 |
+
temperature=temperature,
|
| 126 |
+
executor_type="local",
|
| 127 |
+
verbose=verbose
|
| 128 |
+
)
|
| 129 |
+
else:
|
| 130 |
+
self.gaia_agent = GAIAAgent(
|
| 131 |
+
model_type="OpenAIServerModel",
|
| 132 |
+
model_id="grok-3-latest",
|
| 133 |
+
api_key=xai_key,
|
| 134 |
+
api_base=os.environ.get("XAI_API_BASE", "https://api.x.ai/v1"),
|
| 135 |
+
temperature=temperature,
|
| 136 |
+
executor_type="local",
|
| 137 |
+
verbose=verbose
|
| 138 |
+
)
|
| 139 |
+
except ImportError as ie:
|
| 140 |
+
# Handle OpenAI module errors specifically
|
| 141 |
+
if "openai" in str(ie).lower() and hf_token:
|
| 142 |
+
print(f"OpenAI module error: {ie}. Falling back to HfApiModel.")
|
| 143 |
self.gaia_agent = GAIAAgent(
|
| 144 |
+
model_type="HfApiModel",
|
| 145 |
+
model_id="mistralai/Mistral-7B-Instruct-v0.2",
|
| 146 |
+
api_key=hf_token,
|
|
|
|
| 147 |
temperature=temperature,
|
| 148 |
executor_type="local",
|
| 149 |
verbose=verbose
|
| 150 |
)
|
| 151 |
+
print(f"Using HfApiModel with model_id: mistralai/Mistral-7B-Instruct-v0.2 (fallback)")
|
| 152 |
+
else:
|
| 153 |
+
raise
|
| 154 |
else:
|
| 155 |
# No API keys available, log the error
|
| 156 |
print("ERROR: No API keys found. Please set at least one of these environment variables:")
|
requirements.txt
CHANGED
|
@@ -1,6 +1,7 @@
|
|
| 1 |
gradio
|
| 2 |
requests
|
| 3 |
-
smolagents
|
| 4 |
python-dotenv
|
| 5 |
pandas
|
| 6 |
-
numpy
|
|
|
|
|
|
| 1 |
gradio
|
| 2 |
requests
|
| 3 |
+
smolagents[openai]
|
| 4 |
python-dotenv
|
| 5 |
pandas
|
| 6 |
+
numpy
|
| 7 |
+
openai
|