Spaces:
Sleeping
Sleeping
Upload folder using huggingface_hub
Browse files- .env.example +2 -0
- .gitattributes +1 -0
- .gitignore +175 -0
- .gradio/certificate.pem +31 -0
- LICENSE +21 -0
- README.md +68 -7
- TODO.md +12 -0
- activate-env.sh +66 -0
- environment.yml +15 -0
- logo.png +3 -0
- main.py +218 -0
- prompts/system.txt +66 -0
- tools.py +450 -0
- utils/logger.py +41 -0
.env.example
ADDED
|
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
|
|
|
| 1 |
+
GOOGLE_MAPS_API_KEY=your_api_key_here
|
| 2 |
+
ANTHROPIC_API_KEY=your_api_key_here
|
.gitattributes
CHANGED
|
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
|
| 33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
| 34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
| 35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
| 33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
| 34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
| 35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
| 36 |
+
logo.png filter=lfs diff=lfs merge=lfs -text
|
.gitignore
ADDED
|
@@ -0,0 +1,175 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Byte-compiled / optimized / DLL files
|
| 2 |
+
__pycache__/
|
| 3 |
+
*.py[cod]
|
| 4 |
+
*$py.class
|
| 5 |
+
|
| 6 |
+
# C extensions
|
| 7 |
+
*.so
|
| 8 |
+
|
| 9 |
+
# Distribution / packaging
|
| 10 |
+
.Python
|
| 11 |
+
build/
|
| 12 |
+
develop-eggs/
|
| 13 |
+
dist/
|
| 14 |
+
downloads/
|
| 15 |
+
eggs/
|
| 16 |
+
.eggs/
|
| 17 |
+
lib/
|
| 18 |
+
lib64/
|
| 19 |
+
parts/
|
| 20 |
+
sdist/
|
| 21 |
+
var/
|
| 22 |
+
wheels/
|
| 23 |
+
share/python-wheels/
|
| 24 |
+
*.egg-info/
|
| 25 |
+
.installed.cfg
|
| 26 |
+
*.egg
|
| 27 |
+
MANIFEST
|
| 28 |
+
|
| 29 |
+
# PyInstaller
|
| 30 |
+
# Usually these files are written by a python script from a template
|
| 31 |
+
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
| 32 |
+
*.manifest
|
| 33 |
+
*.spec
|
| 34 |
+
|
| 35 |
+
# Installer logs
|
| 36 |
+
pip-log.txt
|
| 37 |
+
pip-delete-this-directory.txt
|
| 38 |
+
|
| 39 |
+
# Unit test / coverage reports
|
| 40 |
+
htmlcov/
|
| 41 |
+
.tox/
|
| 42 |
+
.nox/
|
| 43 |
+
.coverage
|
| 44 |
+
.coverage.*
|
| 45 |
+
.cache
|
| 46 |
+
nosetests.xml
|
| 47 |
+
coverage.xml
|
| 48 |
+
*.cover
|
| 49 |
+
*.py,cover
|
| 50 |
+
.hypothesis/
|
| 51 |
+
.pytest_cache/
|
| 52 |
+
cover/
|
| 53 |
+
|
| 54 |
+
# Translations
|
| 55 |
+
*.mo
|
| 56 |
+
*.pot
|
| 57 |
+
|
| 58 |
+
# Django stuff:
|
| 59 |
+
*.log
|
| 60 |
+
local_settings.py
|
| 61 |
+
db.sqlite3
|
| 62 |
+
db.sqlite3-journal
|
| 63 |
+
|
| 64 |
+
# Flask stuff:
|
| 65 |
+
instance/
|
| 66 |
+
.webassets-cache
|
| 67 |
+
|
| 68 |
+
# Scrapy stuff:
|
| 69 |
+
.scrapy
|
| 70 |
+
|
| 71 |
+
# Sphinx documentation
|
| 72 |
+
docs/_build/
|
| 73 |
+
|
| 74 |
+
# PyBuilder
|
| 75 |
+
.pybuilder/
|
| 76 |
+
target/
|
| 77 |
+
|
| 78 |
+
# Jupyter Notebook
|
| 79 |
+
.ipynb_checkpoints
|
| 80 |
+
|
| 81 |
+
# IPython
|
| 82 |
+
profile_default/
|
| 83 |
+
ipython_config.py
|
| 84 |
+
|
| 85 |
+
# pyenv
|
| 86 |
+
# For a library or package, you might want to ignore these files since the code is
|
| 87 |
+
# intended to run in multiple environments; otherwise, check them in:
|
| 88 |
+
# .python-version
|
| 89 |
+
|
| 90 |
+
# pipenv
|
| 91 |
+
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
| 92 |
+
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
| 93 |
+
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
| 94 |
+
# install all needed dependencies.
|
| 95 |
+
#Pipfile.lock
|
| 96 |
+
|
| 97 |
+
# UV
|
| 98 |
+
# Similar to Pipfile.lock, it is generally recommended to include uv.lock in version control.
|
| 99 |
+
# This is especially recommended for binary packages to ensure reproducibility, and is more
|
| 100 |
+
# commonly ignored for libraries.
|
| 101 |
+
#uv.lock
|
| 102 |
+
|
| 103 |
+
# poetry
|
| 104 |
+
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
|
| 105 |
+
# This is especially recommended for binary packages to ensure reproducibility, and is more
|
| 106 |
+
# commonly ignored for libraries.
|
| 107 |
+
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
|
| 108 |
+
#poetry.lock
|
| 109 |
+
|
| 110 |
+
# pdm
|
| 111 |
+
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
|
| 112 |
+
#pdm.lock
|
| 113 |
+
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
|
| 114 |
+
# in version control.
|
| 115 |
+
# https://pdm.fming.dev/latest/usage/project/#working-with-version-control
|
| 116 |
+
.pdm.toml
|
| 117 |
+
.pdm-python
|
| 118 |
+
.pdm-build/
|
| 119 |
+
|
| 120 |
+
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
|
| 121 |
+
__pypackages__/
|
| 122 |
+
|
| 123 |
+
# Celery stuff
|
| 124 |
+
celerybeat-schedule
|
| 125 |
+
celerybeat.pid
|
| 126 |
+
|
| 127 |
+
# SageMath parsed files
|
| 128 |
+
*.sage.py
|
| 129 |
+
|
| 130 |
+
# Environments
|
| 131 |
+
.env
|
| 132 |
+
.venv
|
| 133 |
+
env/
|
| 134 |
+
venv/
|
| 135 |
+
ENV/
|
| 136 |
+
env.bak/
|
| 137 |
+
venv.bak/
|
| 138 |
+
|
| 139 |
+
# Spyder project settings
|
| 140 |
+
.spyderproject
|
| 141 |
+
.spyproject
|
| 142 |
+
|
| 143 |
+
# Rope project settings
|
| 144 |
+
.ropeproject
|
| 145 |
+
|
| 146 |
+
# mkdocs documentation
|
| 147 |
+
/site
|
| 148 |
+
|
| 149 |
+
# mypy
|
| 150 |
+
.mypy_cache/
|
| 151 |
+
.dmypy.json
|
| 152 |
+
dmypy.json
|
| 153 |
+
|
| 154 |
+
# Pyre type checker
|
| 155 |
+
.pyre/
|
| 156 |
+
|
| 157 |
+
# pytype static type analyzer
|
| 158 |
+
.pytype/
|
| 159 |
+
|
| 160 |
+
# Cython debug symbols
|
| 161 |
+
cython_debug/
|
| 162 |
+
|
| 163 |
+
# PyCharm
|
| 164 |
+
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
|
| 165 |
+
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
|
| 166 |
+
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
| 167 |
+
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
| 168 |
+
#.idea/
|
| 169 |
+
|
| 170 |
+
# PyPI configuration file
|
| 171 |
+
.pypirc
|
| 172 |
+
|
| 173 |
+
# Logs
|
| 174 |
+
logs/
|
| 175 |
+
*.log
|
.gradio/certificate.pem
ADDED
|
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
-----BEGIN CERTIFICATE-----
|
| 2 |
+
MIIFazCCA1OgAwIBAgIRAIIQz7DSQONZRGPgu2OCiwAwDQYJKoZIhvcNAQELBQAw
|
| 3 |
+
TzELMAkGA1UEBhMCVVMxKTAnBgNVBAoTIEludGVybmV0IFNlY3VyaXR5IFJlc2Vh
|
| 4 |
+
cmNoIEdyb3VwMRUwEwYDVQQDEwxJU1JHIFJvb3QgWDEwHhcNMTUwNjA0MTEwNDM4
|
| 5 |
+
WhcNMzUwNjA0MTEwNDM4WjBPMQswCQYDVQQGEwJVUzEpMCcGA1UEChMgSW50ZXJu
|
| 6 |
+
ZXQgU2VjdXJpdHkgUmVzZWFyY2ggR3JvdXAxFTATBgNVBAMTDElTUkcgUm9vdCBY
|
| 7 |
+
MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK3oJHP0FDfzm54rVygc
|
| 8 |
+
h77ct984kIxuPOZXoHj3dcKi/vVqbvYATyjb3miGbESTtrFj/RQSa78f0uoxmyF+
|
| 9 |
+
0TM8ukj13Xnfs7j/EvEhmkvBioZxaUpmZmyPfjxwv60pIgbz5MDmgK7iS4+3mX6U
|
| 10 |
+
A5/TR5d8mUgjU+g4rk8Kb4Mu0UlXjIB0ttov0DiNewNwIRt18jA8+o+u3dpjq+sW
|
| 11 |
+
T8KOEUt+zwvo/7V3LvSye0rgTBIlDHCNAymg4VMk7BPZ7hm/ELNKjD+Jo2FR3qyH
|
| 12 |
+
B5T0Y3HsLuJvW5iB4YlcNHlsdu87kGJ55tukmi8mxdAQ4Q7e2RCOFvu396j3x+UC
|
| 13 |
+
B5iPNgiV5+I3lg02dZ77DnKxHZu8A/lJBdiB3QW0KtZB6awBdpUKD9jf1b0SHzUv
|
| 14 |
+
KBds0pjBqAlkd25HN7rOrFleaJ1/ctaJxQZBKT5ZPt0m9STJEadao0xAH0ahmbWn
|
| 15 |
+
OlFuhjuefXKnEgV4We0+UXgVCwOPjdAvBbI+e0ocS3MFEvzG6uBQE3xDk3SzynTn
|
| 16 |
+
jh8BCNAw1FtxNrQHusEwMFxIt4I7mKZ9YIqioymCzLq9gwQbooMDQaHWBfEbwrbw
|
| 17 |
+
qHyGO0aoSCqI3Haadr8faqU9GY/rOPNk3sgrDQoo//fb4hVC1CLQJ13hef4Y53CI
|
| 18 |
+
rU7m2Ys6xt0nUW7/vGT1M0NPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV
|
| 19 |
+
HRMBAf8EBTADAQH/MB0GA1UdDgQWBBR5tFnme7bl5AFzgAiIyBpY9umbbjANBgkq
|
| 20 |
+
hkiG9w0BAQsFAAOCAgEAVR9YqbyyqFDQDLHYGmkgJykIrGF1XIpu+ILlaS/V9lZL
|
| 21 |
+
ubhzEFnTIZd+50xx+7LSYK05qAvqFyFWhfFQDlnrzuBZ6brJFe+GnY+EgPbk6ZGQ
|
| 22 |
+
3BebYhtF8GaV0nxvwuo77x/Py9auJ/GpsMiu/X1+mvoiBOv/2X/qkSsisRcOj/KK
|
| 23 |
+
NFtY2PwByVS5uCbMiogziUwthDyC3+6WVwW6LLv3xLfHTjuCvjHIInNzktHCgKQ5
|
| 24 |
+
ORAzI4JMPJ+GslWYHb4phowim57iaztXOoJwTdwJx4nLCgdNbOhdjsnvzqvHu7Ur
|
| 25 |
+
TkXWStAmzOVyyghqpZXjFaH3pO3JLF+l+/+sKAIuvtd7u+Nxe5AW0wdeRlN8NwdC
|
| 26 |
+
jNPElpzVmbUq4JUagEiuTDkHzsxHpFKVK7q4+63SM1N95R1NbdWhscdCb+ZAJzVc
|
| 27 |
+
oyi3B43njTOQ5yOf+1CceWxG1bQVs5ZufpsMljq4Ui0/1lvh+wjChP4kqKOJ2qxq
|
| 28 |
+
4RgqsahDYVvTH9w7jXbyLeiNdd8XM2w9U/t7y0Ff/9yi0GE44Za4rF2LN9d11TPA
|
| 29 |
+
mRGunUHBcnWEvgJBQl9nJEiU0Zsnvgc/ubhPgXRR4Xq37Z0j4r7g1SgEEzwxA57d
|
| 30 |
+
emyPxgcYxn/eR44/KJ4EBs+lVDR3veyJm+kXQ99b21/+jh5Xos1AnX5iItreGCc=
|
| 31 |
+
-----END CERTIFICATE-----
|
LICENSE
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
MIT License
|
| 2 |
+
|
| 3 |
+
Copyright (c) 2025 Tiago Silva
|
| 4 |
+
|
| 5 |
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
| 6 |
+
of this software and associated documentation files (the "Software"), to deal
|
| 7 |
+
in the Software without restriction, including without limitation the rights
|
| 8 |
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
| 9 |
+
copies of the Software, and to permit persons to whom the Software is
|
| 10 |
+
furnished to do so, subject to the following conditions:
|
| 11 |
+
|
| 12 |
+
The above copyright notice and this permission notice shall be included in all
|
| 13 |
+
copies or substantial portions of the Software.
|
| 14 |
+
|
| 15 |
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
| 16 |
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
| 17 |
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
| 18 |
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
| 19 |
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
| 20 |
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
| 21 |
+
SOFTWARE.
|
README.md
CHANGED
|
@@ -1,12 +1,73 @@
|
|
| 1 |
---
|
| 2 |
-
title:
|
| 3 |
-
|
| 4 |
-
colorFrom: blue
|
| 5 |
-
colorTo: green
|
| 6 |
sdk: gradio
|
| 7 |
sdk_version: 5.12.0
|
| 8 |
-
app_file: app.py
|
| 9 |
-
pinned: false
|
| 10 |
---
|
|
|
|
| 11 |
|
| 12 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
---
|
| 2 |
+
title: botty-mcbotface
|
| 3 |
+
app_file: main.py
|
|
|
|
|
|
|
| 4 |
sdk: gradio
|
| 5 |
sdk_version: 5.12.0
|
|
|
|
|
|
|
| 6 |
---
|
| 7 |
+
# 🤖 Botty McBotface
|
| 8 |
|
| 9 |
+
<p align="center">
|
| 10 |
+
<img src="logo.png" alt="Botty McBotface Logo" width="400"/>
|
| 11 |
+
</p>
|
| 12 |
+
|
| 13 |
+
> 💬 Just what you needed, another AI chatbot trying to remember things and use tools without breaking everything 🤦
|
| 14 |
+
|
| 15 |
+
## ✨ Features
|
| 16 |
+
|
| 17 |
+
- 🧠 Long-term memory system
|
| 18 |
+
- 🛠️ Integrated tools support
|
| 19 |
+
- 🌍 Location awareness
|
| 20 |
+
- 🌤️ Weather information
|
| 21 |
+
- 🔍 Place search capabilities
|
| 22 |
+
- 🖥️ Gradio-powered GUI
|
| 23 |
+
|
| 24 |
+
## 🛠️ Installation
|
| 25 |
+
|
| 26 |
+
1. Clone this repository:
|
| 27 |
+
```sh
|
| 28 |
+
git clone https://github.com/tsilva/botty-mcbotface.git
|
| 29 |
+
```
|
| 30 |
+
2. Navigate to the project directory:
|
| 31 |
+
```sh
|
| 32 |
+
cd botty-mcbotface
|
| 33 |
+
```
|
| 34 |
+
|
| 35 |
+
## Environment Setup
|
| 36 |
+
|
| 37 |
+
The project uses a Conda environment defined in `environment.yml`. To set up and activate the environment:
|
| 38 |
+
|
| 39 |
+
1. Ensure you have Miniconda or Anaconda installed
|
| 40 |
+
2. **IMPORTANT:** You must SOURCE the activation script (do not run with bash/sh):
|
| 41 |
+
```bash
|
| 42 |
+
source activate-env.sh
|
| 43 |
+
# or
|
| 44 |
+
. activate-env.sh
|
| 45 |
+
```
|
| 46 |
+
|
| 47 |
+
⚠️ Running with `bash activate-env.sh` or `./activate-env.sh` will not work!
|
| 48 |
+
|
| 49 |
+
The script will:
|
| 50 |
+
- Check for Miniconda installation
|
| 51 |
+
- Create the environment if it doesn't exist
|
| 52 |
+
- Activate the environment automatically
|
| 53 |
+
|
| 54 |
+
Note: Using `./activate-env.sh` won't work as the script needs to be sourced to modify your current shell environment.
|
| 55 |
+
|
| 56 |
+
## 🚀 Usage
|
| 57 |
+
|
| 58 |
+
1. Run the chatbot:
|
| 59 |
+
```sh
|
| 60 |
+
python main.py
|
| 61 |
+
```
|
| 62 |
+
|
| 63 |
+
2. For development with auto-reload:
|
| 64 |
+
```sh
|
| 65 |
+
gradio main.py
|
| 66 |
+
```
|
| 67 |
+
This will automatically restart the app when you make changes to the source files.
|
| 68 |
+
|
| 69 |
+
3. Interact with the chatbot through the GUI.
|
| 70 |
+
|
| 71 |
+
## 📄 License
|
| 72 |
+
|
| 73 |
+
This project is licensed under the MIT License - see the [LICENSE](LICENSE) file for details.
|
TODO.md
ADDED
|
@@ -0,0 +1,12 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# https://docs.anthropic.com/en/docs/build-with-claude/tool-use
|
| 2 |
+
# https://www.gradio.app/guides/plot-component-for-maps
|
| 3 |
+
|
| 4 |
+
- [ ] Host app
|
| 5 |
+
- [ ] Improve layout
|
| 6 |
+
- [ ] Add response streaming support
|
| 7 |
+
- [ ] Return place photos
|
| 8 |
+
- [ ] Return map response
|
| 9 |
+
- [ ] Figure out how to develop a test suite
|
| 10 |
+
- [ ] Use langchain instead?
|
| 11 |
+
- [ ] Optimize nearby places response
|
| 12 |
+
- [ ] Create external bot
|
activate-env.sh
ADDED
|
@@ -0,0 +1,66 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Detect if the script is being sourced
|
| 2 |
+
(return 0 2>/dev/null) && sourced=1 || sourced=0
|
| 3 |
+
|
| 4 |
+
if [ $sourced -eq 0 ]; then
|
| 5 |
+
echo "Error: This script needs to be sourced. Run:"
|
| 6 |
+
echo " source activate-env.sh"
|
| 7 |
+
echo " or"
|
| 8 |
+
echo " . activate-env.sh"
|
| 9 |
+
exit 1
|
| 10 |
+
fi
|
| 11 |
+
|
| 12 |
+
# Check if Miniconda is installed
|
| 13 |
+
if ! command -v conda &> /dev/null; then
|
| 14 |
+
echo "Miniconda is not installed. Please install Miniconda and try again."
|
| 15 |
+
exit 1
|
| 16 |
+
fi
|
| 17 |
+
|
| 18 |
+
# Check if environment.yml exists
|
| 19 |
+
if [ ! -f "environment.yml" ]; then
|
| 20 |
+
echo "environment.yml not found in the current directory. Please provide an environment.yml file."
|
| 21 |
+
exit 1
|
| 22 |
+
fi
|
| 23 |
+
|
| 24 |
+
# Extract environment name from environment.yml
|
| 25 |
+
env_name=$(grep "^name:" environment.yml | awk '{print $2}')
|
| 26 |
+
|
| 27 |
+
if [ -z "$env_name" ]; then
|
| 28 |
+
echo "Environment name not found in environment.yml. Please ensure the file has a 'name' field."
|
| 29 |
+
exit 1
|
| 30 |
+
fi
|
| 31 |
+
|
| 32 |
+
# Check if Conda is initialized
|
| 33 |
+
if ! conda info &> /dev/null; then
|
| 34 |
+
echo "Conda is not initialized. Run 'conda init' and restart your shell."
|
| 35 |
+
exit 1
|
| 36 |
+
fi
|
| 37 |
+
|
| 38 |
+
# Check if the environment already exists
|
| 39 |
+
if conda env list | grep -q "^$env_name\s"; then
|
| 40 |
+
echo "Activating existing environment: $env_name"
|
| 41 |
+
else
|
| 42 |
+
echo "Environment $env_name not found. Creating it from environment.yml..."
|
| 43 |
+
# Initialize conda for the shell
|
| 44 |
+
eval "$(conda shell.bash hook)"
|
| 45 |
+
|
| 46 |
+
conda env create -f environment.yml
|
| 47 |
+
|
| 48 |
+
if [ $? -ne 0 ]; then
|
| 49 |
+
echo "Failed to create the environment. Check your environment.yml for errors."
|
| 50 |
+
exit 1
|
| 51 |
+
fi
|
| 52 |
+
|
| 53 |
+
echo "Environment $env_name created successfully."
|
| 54 |
+
fi
|
| 55 |
+
|
| 56 |
+
# Initialize conda and activate environment
|
| 57 |
+
. $(conda info --base)/etc/profile.d/conda.sh
|
| 58 |
+
conda activate "$env_name"
|
| 59 |
+
|
| 60 |
+
# Confirm activation (but don't exit since we're sourcing)
|
| 61 |
+
if [ "$CONDA_DEFAULT_ENV" = "$env_name" ]; then
|
| 62 |
+
echo "Environment $env_name is now active."
|
| 63 |
+
else
|
| 64 |
+
echo "Failed to activate environment $env_name."
|
| 65 |
+
return 1
|
| 66 |
+
fi
|
environment.yml
ADDED
|
@@ -0,0 +1,15 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
name: botty-mcbotface
|
| 2 |
+
channels:
|
| 3 |
+
- conda-forge
|
| 4 |
+
- defaults
|
| 5 |
+
dependencies:
|
| 6 |
+
- python=3.11
|
| 7 |
+
- pip
|
| 8 |
+
- pip:
|
| 9 |
+
- python-dotenv
|
| 10 |
+
- anthropic
|
| 11 |
+
- openai
|
| 12 |
+
- langchain
|
| 13 |
+
- googlemaps
|
| 14 |
+
- gradio>=5.0.0
|
| 15 |
+
|
logo.png
ADDED
|
Git LFS Details
|
main.py
ADDED
|
@@ -0,0 +1,218 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# https://docs.anthropic.com/en/docs/build-with-claude/tool-use
|
| 2 |
+
# https://www.gradio.app/guides/plot-component-for-maps
|
| 3 |
+
# TODO: ask LLM what to improve
|
| 4 |
+
# TODO: improve layout
|
| 5 |
+
# TODO: add starter examples
|
| 6 |
+
# TODO: add multimodality (input/ooutput): gr.Image, gr.Video, gr.Audio, gr.File, gr.HTML, gr.Gallery, gr.Plot, gr.Map
|
| 7 |
+
# TODO: add streaming support
|
| 8 |
+
# TODO: trim down data from nearby places results (filling up context too much)
|
| 9 |
+
# TODO: host in spaces
|
| 10 |
+
|
| 11 |
+
from dotenv import load_dotenv
|
| 12 |
+
load_dotenv()
|
| 13 |
+
|
| 14 |
+
import time
|
| 15 |
+
import json
|
| 16 |
+
import anthropic
|
| 17 |
+
import gradio as gr
|
| 18 |
+
from gradio import ChatMessage
|
| 19 |
+
|
| 20 |
+
from tools import TOOLS_SPECS, TOOLS_FUNCTIONS
|
| 21 |
+
from utils.logger import setup_logger
|
| 22 |
+
|
| 23 |
+
# Setup logger
|
| 24 |
+
logger = setup_logger()
|
| 25 |
+
|
| 26 |
+
system_memory = []
|
| 27 |
+
|
| 28 |
+
app_context = {
|
| 29 |
+
"model_id" : "claude-3-5-sonnet-20241022",
|
| 30 |
+
"max_tokens" : 1024,
|
| 31 |
+
"system_memory" : system_memory,
|
| 32 |
+
"system_memory_max_size" : 5
|
| 33 |
+
}
|
| 34 |
+
|
| 35 |
+
tools_cache = {}
|
| 36 |
+
|
| 37 |
+
# TODO: this is a hack, if I don't fix this, multiple chats won't be supported
|
| 38 |
+
claude_history = []
|
| 39 |
+
|
| 40 |
+
client = anthropic.Anthropic()
|
| 41 |
+
|
| 42 |
+
def load_system_prompt():
|
| 43 |
+
with open('prompts/system.txt', 'r') as f:
|
| 44 |
+
return f.read().strip()
|
| 45 |
+
|
| 46 |
+
def get_memory_string():
|
| 47 |
+
return "\n".join([f"{index}: {value}" for index, value in enumerate(list(system_memory))]).strip()
|
| 48 |
+
|
| 49 |
+
def get_memory_markdown():
|
| 50 |
+
return "\n".join([f"{index}. {value}" for index, value in enumerate(list(system_memory))]).strip()
|
| 51 |
+
|
| 52 |
+
def prompt_claude():
|
| 53 |
+
# Build the system prompt including all memories the user asked to remember
|
| 54 |
+
system_prompt_memory_str = get_memory_string()
|
| 55 |
+
system_prompt_text = load_system_prompt()
|
| 56 |
+
if system_prompt_memory_str: system_prompt_text += f"\n\nHere are the memories the user asked you to remember:\n{system_prompt_memory_str}"
|
| 57 |
+
|
| 58 |
+
# Send message to Claude
|
| 59 |
+
model_id = app_context["model_id"]
|
| 60 |
+
max_tokens = app_context["max_tokens"]
|
| 61 |
+
message = client.messages.create(
|
| 62 |
+
model=model_id,
|
| 63 |
+
max_tokens=max_tokens,
|
| 64 |
+
temperature=0.0,
|
| 65 |
+
tools=TOOLS_SPECS.values(),
|
| 66 |
+
system=[{
|
| 67 |
+
"type": "text",
|
| 68 |
+
"text": system_prompt_text,
|
| 69 |
+
"cache_control": {"type": "ephemeral"} # Prompt caching references the entire prompt - tools, system, and messages (in that order) up to and including the block designated with cache_control.
|
| 70 |
+
}],
|
| 71 |
+
messages=claude_history
|
| 72 |
+
)
|
| 73 |
+
return message
|
| 74 |
+
|
| 75 |
+
def get_tool_generator(cached_yield, tool_function, app_context, tool_input):
|
| 76 |
+
"""Helper function to either yield cached result or run tool function"""
|
| 77 |
+
if cached_yield: yield cached_yield
|
| 78 |
+
else: yield from tool_function(app_context, **tool_input)
|
| 79 |
+
|
| 80 |
+
def chatbot(message, history):
|
| 81 |
+
logger.info(f"New message received: {message[:50]}...")
|
| 82 |
+
try:
|
| 83 |
+
# Store in claude history
|
| 84 |
+
claude_history.append({
|
| 85 |
+
"role": "user",
|
| 86 |
+
"content": message
|
| 87 |
+
})
|
| 88 |
+
|
| 89 |
+
messages = []
|
| 90 |
+
|
| 91 |
+
done = False
|
| 92 |
+
while not done:
|
| 93 |
+
done = True
|
| 94 |
+
|
| 95 |
+
claude_response = prompt_claude()
|
| 96 |
+
for content in claude_response.content:
|
| 97 |
+
if content.type == "text":
|
| 98 |
+
message = ChatMessage(
|
| 99 |
+
role="assistant",
|
| 100 |
+
content=content.text
|
| 101 |
+
)
|
| 102 |
+
messages.append(message)
|
| 103 |
+
yield messages, get_memory_markdown()
|
| 104 |
+
|
| 105 |
+
# Store in claude history
|
| 106 |
+
claude_history.append({
|
| 107 |
+
"role": "assistant",
|
| 108 |
+
"content": content.text
|
| 109 |
+
})
|
| 110 |
+
elif content.type == "tool_use":
|
| 111 |
+
tool_id = content.id
|
| 112 |
+
tool_name = content.name
|
| 113 |
+
tool_input = content.input
|
| 114 |
+
tool_key = f"{tool_name}_{json.dumps(tool_input)}" # TODO: sort input
|
| 115 |
+
tool_cached_yield = tools_cache.get(tool_key)
|
| 116 |
+
|
| 117 |
+
# Say that we're calling the tool
|
| 118 |
+
message = ChatMessage(
|
| 119 |
+
role="assistant",
|
| 120 |
+
content="...",
|
| 121 |
+
metadata={
|
| 122 |
+
"title" : f"🛠️ Using tool `{tool_name}`",
|
| 123 |
+
"status": "pending"
|
| 124 |
+
}
|
| 125 |
+
)
|
| 126 |
+
messages.append(message)
|
| 127 |
+
yield messages, get_memory_markdown()
|
| 128 |
+
|
| 129 |
+
# Call the tool
|
| 130 |
+
print(f"Calling {tool_name}({json.dumps(tool_input, indent=2)})")
|
| 131 |
+
tool_result = None
|
| 132 |
+
tool_statuses = []
|
| 133 |
+
tool_function = TOOLS_FUNCTIONS[tool_name]
|
| 134 |
+
tool_generator = get_tool_generator(tool_cached_yield, tool_function, app_context, tool_input)
|
| 135 |
+
tool_error = False
|
| 136 |
+
start_time = time.time()
|
| 137 |
+
try:
|
| 138 |
+
for tool_yield in tool_generator:
|
| 139 |
+
# Update tool status
|
| 140 |
+
status = tool_yield.get("status")
|
| 141 |
+
status_type = tool_yield.get("status_type", "current")
|
| 142 |
+
if status_type == "step": tool_statuses.append(status)
|
| 143 |
+
else: tool_statuses = tool_statuses[:-1] + [status]
|
| 144 |
+
message.content = "\n".join(tool_statuses)
|
| 145 |
+
|
| 146 |
+
# In case the tool is done, mark it as done
|
| 147 |
+
if "result" in tool_yield:
|
| 148 |
+
tool_result = tool_yield["result"]
|
| 149 |
+
|
| 150 |
+
print(f"Tool {tool_name} result: {json.dumps(tool_result, indent=2)}")
|
| 151 |
+
tools_cache[tool_key] = tool_yield
|
| 152 |
+
duration = time.time() - start_time
|
| 153 |
+
message.metadata["status"] = "done"
|
| 154 |
+
message.metadata["duration"] = duration
|
| 155 |
+
message.metadata["title"] = f"🛠️ Used tool `{tool_name}`"
|
| 156 |
+
|
| 157 |
+
# Update the chat history
|
| 158 |
+
yield messages, get_memory_markdown()
|
| 159 |
+
except Exception as tool_exception:
|
| 160 |
+
tool_error = str(tool_exception)
|
| 161 |
+
message.metadata["status"] = "done"
|
| 162 |
+
message.content = tool_error
|
| 163 |
+
message.metadata["title"] = f"💥 Tool `{tool_name}` failed"
|
| 164 |
+
|
| 165 |
+
# Store final result in claude history
|
| 166 |
+
claude_history.extend([
|
| 167 |
+
{
|
| 168 |
+
"role": "assistant",
|
| 169 |
+
"content": [
|
| 170 |
+
{
|
| 171 |
+
"type": "tool_use",
|
| 172 |
+
"id": tool_id,
|
| 173 |
+
"name" : tool_name,
|
| 174 |
+
"input" : tool_input
|
| 175 |
+
}
|
| 176 |
+
]
|
| 177 |
+
},
|
| 178 |
+
{
|
| 179 |
+
"role": "user",
|
| 180 |
+
"content": [
|
| 181 |
+
{
|
| 182 |
+
"type": "tool_result",
|
| 183 |
+
"tool_use_id": tool_id,
|
| 184 |
+
"content": str(tool_error) if tool_error else str(tool_result),
|
| 185 |
+
"is_error" : bool(tool_error)
|
| 186 |
+
}
|
| 187 |
+
]
|
| 188 |
+
}
|
| 189 |
+
])
|
| 190 |
+
|
| 191 |
+
done = False
|
| 192 |
+
else:
|
| 193 |
+
raise Exception(f"Unknown content type {type(content)}")
|
| 194 |
+
logger.debug(f"Generated response: {messages[-1].content[:50]}...")
|
| 195 |
+
return messages, get_memory_markdown()
|
| 196 |
+
except Exception as e:
|
| 197 |
+
logger.error(f"Error processing message: {str(e)}", exc_info=True)
|
| 198 |
+
return "Sorry, an error occurred while processing your message."
|
| 199 |
+
|
| 200 |
+
with gr.Blocks() as demo:
|
| 201 |
+
|
| 202 |
+
memory = gr.Markdown(render=False)
|
| 203 |
+
with gr.Row(equal_height=True):
|
| 204 |
+
with gr.Column(scale=80, min_width=600):
|
| 205 |
+
gr.Markdown("<center><h1>Botty McBotface</h1></center>")
|
| 206 |
+
gr.ChatInterface(
|
| 207 |
+
fn=chatbot,
|
| 208 |
+
type="messages",
|
| 209 |
+
description="Botty McBotFace is really just another chatbot.",
|
| 210 |
+
additional_outputs=[memory],
|
| 211 |
+
)
|
| 212 |
+
with gr.Column(scale=20, min_width=150, variant="compact"):
|
| 213 |
+
gr.Markdown("<center><h1>Memory</h1></center>")
|
| 214 |
+
memory.render()
|
| 215 |
+
|
| 216 |
+
if __name__ == "__main__":
|
| 217 |
+
logger.info("Starting Botty McBotface...")
|
| 218 |
+
demo.launch()
|
prompts/system.txt
ADDED
|
@@ -0,0 +1,66 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
### 🌟 **Concise and Personalized Travel Assistant** 🌟
|
| 2 |
+
|
| 3 |
+
You’re a location-aware assistant focused on delivering **brief**, **relevant**, and **personalized** travel insights, while ensuring the **location context is always accurate and updated**! 😊
|
| 4 |
+
|
| 5 |
+
---
|
| 6 |
+
|
| 7 |
+
### **Core Guidelines:**
|
| 8 |
+
1. **Be Concise:**
|
| 9 |
+
- Provide clear, straightforward answers without unnecessary details.
|
| 10 |
+
|
| 11 |
+
2. **Save User Preferences:**
|
| 12 |
+
- Use `tool_save_memory` to store **relevant travel-related preferences** (e.g., food, music, arts).
|
| 13 |
+
- Avoid saving **personal or sensitive information** (e.g., name, tax ID).
|
| 14 |
+
|
| 15 |
+
3. **Keep Location Context Accurate:**
|
| 16 |
+
- **Always geocode new locations using `tool_geocode`.**
|
| 17 |
+
- Save the geocoded location to memory with `tool_save_memory` to ensure all queries are based on the correct context. Make sure you save the radius as well.
|
| 18 |
+
|
| 19 |
+
4. **Personalized Results:**
|
| 20 |
+
- Saved preferences and locations are added to the **system prompt for future sessions**, enabling tailored recommendations.
|
| 21 |
+
|
| 22 |
+
---
|
| 23 |
+
|
| 24 |
+
### **How Location Context Works:**
|
| 25 |
+
1. **Always Update Context:**
|
| 26 |
+
- When a location is mentioned or inferred, geocode it immediately.
|
| 27 |
+
- Save it as the **current search context** for future use.
|
| 28 |
+
|
| 29 |
+
2. **Examples:**
|
| 30 |
+
- User: "I’m in Porto."
|
| 31 |
+
- **Geocode "Porto, Portugal."**
|
| 32 |
+
- Save as context using `tool_save_memory`.
|
| 33 |
+
- User: "Events in Tokyo."
|
| 34 |
+
- **Geocode "Tokyo, Japan."**
|
| 35 |
+
- Update context to Tokyo.
|
| 36 |
+
- User: "Cafes in Denmark?"
|
| 37 |
+
- **Geocode "Denmark."**
|
| 38 |
+
- Context updates to Denmark.
|
| 39 |
+
|
| 40 |
+
---
|
| 41 |
+
|
| 42 |
+
### **Saving User Preferences:**
|
| 43 |
+
**Save Only Relevant Preferences:**
|
| 44 |
+
- Food (e.g., "loves Italian cuisine," "prefers vegan options").
|
| 45 |
+
- Music (e.g., "enjoys jazz," "loves live bands").
|
| 46 |
+
- Arts (e.g., "modern art enthusiast").
|
| 47 |
+
|
| 48 |
+
**Avoid Saving:**
|
| 49 |
+
- Personal identifiers (e.g., name, contact details).
|
| 50 |
+
- Irrelevant or sensitive data.
|
| 51 |
+
|
| 52 |
+
---
|
| 53 |
+
|
| 54 |
+
### **Style & Features:**
|
| 55 |
+
- **Friendly & Brief Tone:** Be warm but concise.
|
| 56 |
+
- **Markdown & Emojis:** Use **bold**, `code`, ### headers, and emojis to stay engaging (🌍, 🎶, 🍷).
|
| 57 |
+
|
| 58 |
+
---
|
| 59 |
+
|
| 60 |
+
### **Key Notes:**
|
| 61 |
+
- **Always keep the location context up to date.**
|
| 62 |
+
- **Geocode every new location** immediately with `tool_geocode`.
|
| 63 |
+
- Save the updated location using `tool_save_memory` to ensure accuracy.
|
| 64 |
+
- Relevant preferences help tailor suggestions for food, music, and arts.
|
| 65 |
+
|
| 66 |
+
Let’s explore the world with precision and fun! 😊✨
|
tools.py
ADDED
|
@@ -0,0 +1,450 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
|
| 3 |
+
TOOL_SAVE_MEMORY = {
|
| 4 |
+
"name" : "tool_save_memory",
|
| 5 |
+
"description": "Used to store information the user requested to remember. Can optionally specify index to overwrite existing memories. Memorized information will be used in system prompt.",
|
| 6 |
+
"input_schema": {
|
| 7 |
+
"type": "object",
|
| 8 |
+
"properties": {
|
| 9 |
+
"memory_data": {
|
| 10 |
+
"type": "string",
|
| 11 |
+
"description": "Summarized version of the information to remember, compressed to use the least tokens possible while preserving all relevant facts"
|
| 12 |
+
},
|
| 13 |
+
"index": {
|
| 14 |
+
"type": "integer",
|
| 15 |
+
"description": "Optional index where to store the memory. If provided, overwrites existing memory at that index. If not provided, appends to end of memory list.",
|
| 16 |
+
"minimum": 0
|
| 17 |
+
}
|
| 18 |
+
},
|
| 19 |
+
"required": ["memory_data"]
|
| 20 |
+
}
|
| 21 |
+
}
|
| 22 |
+
def tool_save_memory(app_context, memory_data: str, index: int = None):
|
| 23 |
+
system_memory = app_context["system_memory"]
|
| 24 |
+
system_memory_max_size = app_context["system_memory_max_size"]
|
| 25 |
+
|
| 26 |
+
if index is not None and index < len(system_memory):
|
| 27 |
+
system_memory[index] = memory_data
|
| 28 |
+
status = f"✅ Updated memory `{index}`: `{memory_data}`."
|
| 29 |
+
else:
|
| 30 |
+
system_memory.append(memory_data)
|
| 31 |
+
system_memory[:] = system_memory[:system_memory_max_size]
|
| 32 |
+
status = f"✅ Added new memory: `{memory_data}`."
|
| 33 |
+
|
| 34 |
+
yield {
|
| 35 |
+
"status" : status,
|
| 36 |
+
"result" : None
|
| 37 |
+
}
|
| 38 |
+
|
| 39 |
+
|
| 40 |
+
TOOL_DELETE_MEMORY = {
|
| 41 |
+
"name": "tool_delete_memory",
|
| 42 |
+
"description": "Used to discard information that was previously stored in memory.",
|
| 43 |
+
"input_schema": {
|
| 44 |
+
"type": "object",
|
| 45 |
+
"properties": {
|
| 46 |
+
"memory_index": {
|
| 47 |
+
"type": "integer",
|
| 48 |
+
"description": "The index of the memory slot to discard. The system prompt enumerates all memories at all times, prefixed by their memory slot, this is what should be referenced."
|
| 49 |
+
}
|
| 50 |
+
},
|
| 51 |
+
"required": ["memory_index"]
|
| 52 |
+
}
|
| 53 |
+
|
| 54 |
+
}
|
| 55 |
+
def tool_delete_memory(app_context, memory_index: int):
|
| 56 |
+
system_memory = app_context["system_memory"]
|
| 57 |
+
memory_data = system_memory[memory_index]
|
| 58 |
+
system_memory.pop(memory_index)
|
| 59 |
+
|
| 60 |
+
yield {
|
| 61 |
+
"status" : f"✅ Deleted memory: `{memory_data}`.",
|
| 62 |
+
"result" : None
|
| 63 |
+
}
|
| 64 |
+
|
| 65 |
+
|
| 66 |
+
TOOL_PLACES_NEARBY = {
|
| 67 |
+
"name": "tool_places_nearby",
|
| 68 |
+
"description": "Search for places using Google Places API with various filtering options",
|
| 69 |
+
"input_schema": {
|
| 70 |
+
"type": "object",
|
| 71 |
+
"properties": {
|
| 72 |
+
"type": {
|
| 73 |
+
"type": "string",
|
| 74 |
+
"description": "Type of place to search for",
|
| 75 |
+
"enum": [
|
| 76 |
+
# Automotive
|
| 77 |
+
"car_dealer", "car_rental", "car_repair", "car_wash", "electric_vehicle_charging_station", "gas_station", "parking", "rest_stop",
|
| 78 |
+
|
| 79 |
+
# Business
|
| 80 |
+
"corporate_office", "farm", "ranch",
|
| 81 |
+
|
| 82 |
+
# Culture
|
| 83 |
+
"art_gallery", "art_studio", "auditorium", "cultural_landmark", "historical_place", "monument", "museum", "performing_arts_theater", "sculpture",
|
| 84 |
+
|
| 85 |
+
# Education
|
| 86 |
+
"library", "preschool", "primary_school", "school", "secondary_school", "university",
|
| 87 |
+
|
| 88 |
+
# Entertainment and Recreation
|
| 89 |
+
"adventure_sports_center", "amphitheatre", "amusement_center", "amusement_park", "aquarium", "banquet_hall", "barbecue_area", "botanical_garden",
|
| 90 |
+
"bowling_alley", "casino", "childrens_camp", "comedy_club", "community_center", "concert_hall", "convention_center", "cultural_center",
|
| 91 |
+
"cycling_park", "dance_hall", "dog_park", "event_venue", "ferris_wheel", "garden", "hiking_area", "historical_landmark", "internet_cafe",
|
| 92 |
+
"karaoke", "marina", "movie_rental", "movie_theater", "national_park", "night_club", "observation_deck", "off_roading_area", "opera_house",
|
| 93 |
+
"park", "philharmonic_hall", "picnic_ground", "planetarium", "plaza", "roller_coaster", "skateboard_park", "state_park", "tourist_attraction",
|
| 94 |
+
"video_arcade", "visitor_center", "water_park", "wedding_venue", "wildlife_park", "wildlife_refuge", "zoo",
|
| 95 |
+
|
| 96 |
+
# Facilities
|
| 97 |
+
"public_bath", "public_bathroom", "stable",
|
| 98 |
+
|
| 99 |
+
# Finance
|
| 100 |
+
"accounting", "atm", "bank",
|
| 101 |
+
|
| 102 |
+
# Food and Drink
|
| 103 |
+
"acai_shop", "afghani_restaurant", "african_restaurant", "american_restaurant", "asian_restaurant", "bagel_shop", "bakery", "bar",
|
| 104 |
+
"bar_and_grill", "barbecue_restaurant", "brazilian_restaurant", "breakfast_restaurant", "brunch_restaurant", "buffet_restaurant", "cafe",
|
| 105 |
+
"cafeteria", "candy_store", "cat_cafe", "chinese_restaurant", "chocolate_factory", "chocolate_shop", "coffee_shop", "confectionery",
|
| 106 |
+
"deli", "dessert_restaurant", "dessert_shop", "diner", "dog_cafe", "donut_shop", "fast_food_restaurant", "fine_dining_restaurant",
|
| 107 |
+
"food_court", "french_restaurant", "greek_restaurant", "hamburger_restaurant", "ice_cream_shop", "indian_restaurant", "indonesian_restaurant",
|
| 108 |
+
"italian_restaurant", "japanese_restaurant", "juice_shop", "korean_restaurant", "lebanese_restaurant", "meal_delivery", "meal_takeaway",
|
| 109 |
+
"mediterranean_restaurant", "mexican_restaurant", "middle_eastern_restaurant", "pizza_restaurant", "pub", "ramen_restaurant", "restaurant",
|
| 110 |
+
"sandwich_shop", "seafood_restaurant", "spanish_restaurant", "steak_house", "sushi_restaurant", "tea_house", "thai_restaurant",
|
| 111 |
+
"turkish_restaurant", "vegan_restaurant", "vegetarian_restaurant", "vietnamese_restaurant", "wine_bar",
|
| 112 |
+
|
| 113 |
+
# Geographical Areas
|
| 114 |
+
"administrative_area_level_1", "administrative_area_level_2", "country", "locality", "postal_code", "school_district",
|
| 115 |
+
|
| 116 |
+
# Government
|
| 117 |
+
"city_hall", "courthouse", "embassy", "fire_station", "government_office", "local_government_office", "neighborhood_police_station",
|
| 118 |
+
"police", "post_office",
|
| 119 |
+
|
| 120 |
+
# Health and Wellness
|
| 121 |
+
"chiropractor", "dental_clinic", "dentist", "doctor", "drugstore", "hospital", "massage", "medical_lab", "pharmacy", "physiotherapist",
|
| 122 |
+
"sauna", "skin_care_clinic", "spa", "tanning_studio", "wellness_center", "yoga_studio",
|
| 123 |
+
|
| 124 |
+
# Housing
|
| 125 |
+
"apartment_building", "apartment_complex", "condominium_complex", "housing_complex",
|
| 126 |
+
|
| 127 |
+
# Lodging
|
| 128 |
+
"bed_and_breakfast", "budget_japanese_inn", "campground", "camping_cabin", "cottage", "extended_stay_hotel", "farmstay", "guest_house",
|
| 129 |
+
"hostel", "hotel", "inn", "japanese_inn", "lodging", "mobile_home_park", "motel", "private_guest_room", "resort_hotel", "rv_park",
|
| 130 |
+
|
| 131 |
+
# Natural Features
|
| 132 |
+
"beach",
|
| 133 |
+
|
| 134 |
+
# Places of Worship
|
| 135 |
+
"church", "hindu_temple", "mosque", "synagogue",
|
| 136 |
+
|
| 137 |
+
# Services
|
| 138 |
+
"astrologer", "barber_shop", "beautician", "beauty_salon", "body_art_service", "catering_service", "cemetery", "child_care_agency",
|
| 139 |
+
"consultant", "courier_service", "electrician", "florist", "food_delivery", "foot_care", "funeral_home", "hair_care", "hair_salon",
|
| 140 |
+
"insurance_agency", "laundry", "lawyer", "locksmith", "makeup_artist", "moving_company", "nail_salon", "painter", "plumber",
|
| 141 |
+
"psychic", "real_estate_agency", "roofing_contractor", "storage", "summer_camp_organizer", "tailor", "telecommunications_service_provider",
|
| 142 |
+
"tour_agency", "tourist_information_center", "travel_agency", "veterinary_care",
|
| 143 |
+
|
| 144 |
+
# Shopping
|
| 145 |
+
"asian_grocery_store", "auto_parts_store", "bicycle_store", "book_store", "butcher_shop", "cell_phone_store", "clothing_store",
|
| 146 |
+
"convenience_store", "department_store", "discount_store", "electronics_store", "food_store", "furniture_store", "gift_shop",
|
| 147 |
+
"grocery_store", "hardware_store", "home_goods_store", "home_improvement_store", "jewelry_store", "liquor_store", "market", "pet_store",
|
| 148 |
+
"shoe_store", "shopping_mall", "sporting_goods_store", "store", "supermarket", "warehouse_store", "wholesaler",
|
| 149 |
+
|
| 150 |
+
# Sports
|
| 151 |
+
"arena", "athletic_field", "fishing_charter", "fishing_pond", "fitness_center", "golf_course", "gym", "ice_skating_rink", "playground",
|
| 152 |
+
"ski_resort", "sports_activity_location", "sports_club", "sports_coaching", "sports_complex", "stadium", "swimming_pool",
|
| 153 |
+
|
| 154 |
+
# Transportation
|
| 155 |
+
"airport", "airstrip", "bus_station", "bus_stop", "ferry_terminal", "heliport", "international_airport", "light_rail_station",
|
| 156 |
+
"park_and_ride", "subway_station", "taxi_stand", "train_station", "transit_depot", "transit_station", "truck_stop",
|
| 157 |
+
|
| 158 |
+
# Table B Additional Types
|
| 159 |
+
"administrative_area_level_3", "administrative_area_level_4", "administrative_area_level_5", "administrative_area_level_6",
|
| 160 |
+
"administrative_area_level_7", "archipelago", "colloquial_area", "continent", "establishment", "finance", "floor", "food",
|
| 161 |
+
"general_contractor", "geocode", "health", "intersection", "landmark", "natural_feature", "neighborhood", "place_of_worship",
|
| 162 |
+
"plus_code", "point_of_interest", "political", "post_box", "postal_code_prefix", "postal_code_suffix", "postal_town", "premise",
|
| 163 |
+
"room", "route", "street_address", "street_number", "sublocality", "sublocality_level_1", "sublocality_level_2", "sublocality_level_3",
|
| 164 |
+
"sublocality_level_4", "sublocality_level_5", "subpremise", "town_square"
|
| 165 |
+
]
|
| 166 |
+
},
|
| 167 |
+
"location": {
|
| 168 |
+
"type": "object",
|
| 169 |
+
"properties": {
|
| 170 |
+
"latitude": {"type": "number", "minimum": -90, "maximum": 90},
|
| 171 |
+
"longitude": {"type": "number", "minimum": -180, "maximum": 180}
|
| 172 |
+
},
|
| 173 |
+
"required": ["latitude", "longitude"],
|
| 174 |
+
"description": "Geographic coordinates of the search center point"
|
| 175 |
+
},
|
| 176 |
+
"radius": {
|
| 177 |
+
"type": "integer",
|
| 178 |
+
"description": "Search radius in meters",
|
| 179 |
+
"minimum": 1,
|
| 180 |
+
"maximum": 50000
|
| 181 |
+
},
|
| 182 |
+
"keyword": {
|
| 183 |
+
"type": "string",
|
| 184 |
+
"description": "Term to match against all content indexed for this place"
|
| 185 |
+
},
|
| 186 |
+
"language": {
|
| 187 |
+
"type": "string",
|
| 188 |
+
"description": "The language code for the results (e.g., 'en', 'pt')"
|
| 189 |
+
},
|
| 190 |
+
"min_price": {
|
| 191 |
+
"type": "integer",
|
| 192 |
+
"minimum": 0,
|
| 193 |
+
"maximum": 4,
|
| 194 |
+
"description": "Minimum price level (0=most affordable, 4=most expensive)"
|
| 195 |
+
},
|
| 196 |
+
"max_price": {
|
| 197 |
+
"type": "integer",
|
| 198 |
+
"minimum": 0,
|
| 199 |
+
"maximum": 4,
|
| 200 |
+
"description": "Maximum price level (0=most affordable, 4=most expensive)"
|
| 201 |
+
},
|
| 202 |
+
"name": {
|
| 203 |
+
"type": "string",
|
| 204 |
+
"description": "Terms to match against place names"
|
| 205 |
+
},
|
| 206 |
+
"open_now": {
|
| 207 |
+
"type": "boolean",
|
| 208 |
+
"description": "Return only places that are currently open"
|
| 209 |
+
},
|
| 210 |
+
"rank_by": {
|
| 211 |
+
"type": "string",
|
| 212 |
+
"enum": ["prominence", "distance"],
|
| 213 |
+
"description": "Order in which to rank results"
|
| 214 |
+
},
|
| 215 |
+
"page_token": {
|
| 216 |
+
"type": "string",
|
| 217 |
+
"description": "Token for retrieving the next page of results"
|
| 218 |
+
}
|
| 219 |
+
},
|
| 220 |
+
"required": ["location"]
|
| 221 |
+
}
|
| 222 |
+
}
|
| 223 |
+
def tool_places_nearby(
|
| 224 |
+
app_context,
|
| 225 |
+
location: dict,
|
| 226 |
+
type: str = None,
|
| 227 |
+
radius: int = None,
|
| 228 |
+
keyword: str = None,
|
| 229 |
+
language: str = None,
|
| 230 |
+
min_price: int = None,
|
| 231 |
+
max_price: int = None,
|
| 232 |
+
name: str = None,
|
| 233 |
+
open_now: bool = False,
|
| 234 |
+
rank_by: str = None,
|
| 235 |
+
page_token: str = None
|
| 236 |
+
) -> dict:
|
| 237 |
+
import googlemaps
|
| 238 |
+
|
| 239 |
+
yield {"status" : f"⏳ Searching for locations..."}
|
| 240 |
+
|
| 241 |
+
gmaps = googlemaps.Client(key=os.getenv('GOOGLE_MAPS_API_KEY'))
|
| 242 |
+
|
| 243 |
+
# Convert location dict to tuple
|
| 244 |
+
location_tuple = (location['latitude'], location['longitude'])
|
| 245 |
+
|
| 246 |
+
# Build params dict with only non-None values
|
| 247 |
+
params = {
|
| 248 |
+
'type': type,
|
| 249 |
+
'location': location_tuple,
|
| 250 |
+
'keyword': keyword,
|
| 251 |
+
'language': language,
|
| 252 |
+
'min_price': min_price,
|
| 253 |
+
'max_price': max_price,
|
| 254 |
+
'name': name,
|
| 255 |
+
'open_now': open_now,
|
| 256 |
+
'rank_by': rank_by,
|
| 257 |
+
'page_token': page_token
|
| 258 |
+
}
|
| 259 |
+
|
| 260 |
+
# Add radius if specified (required unless rank_by=distance)
|
| 261 |
+
if radius is not None: params['radius'] = radius
|
| 262 |
+
elif rank_by != 'distance': params['radius'] = 1000 # Default radius
|
| 263 |
+
|
| 264 |
+
# Remove None values
|
| 265 |
+
params = {k: v for k, v in params.items() if v is not None}
|
| 266 |
+
|
| 267 |
+
# Make the API call
|
| 268 |
+
result = gmaps.places_nearby(**params)
|
| 269 |
+
locations = result.get('results', [])
|
| 270 |
+
|
| 271 |
+
yield {
|
| 272 |
+
"status" : f"✅ Found `{len(locations)}` locations.",
|
| 273 |
+
"result" : locations
|
| 274 |
+
}
|
| 275 |
+
|
| 276 |
+
|
| 277 |
+
TOOL_CALCULATOR = {
|
| 278 |
+
"name": "tool_calculator",
|
| 279 |
+
"description": "Perform mathematical operations with error handling and precision tracking",
|
| 280 |
+
"input_schema": {
|
| 281 |
+
"type": "object",
|
| 282 |
+
"properties": {
|
| 283 |
+
"first_number": {
|
| 284 |
+
"type": "number",
|
| 285 |
+
"description": "First operand for the calculation"
|
| 286 |
+
},
|
| 287 |
+
"second_number": {
|
| 288 |
+
"type": "number",
|
| 289 |
+
"description": "Second operand for the calculation"
|
| 290 |
+
},
|
| 291 |
+
"operation": {
|
| 292 |
+
"type": "string",
|
| 293 |
+
"description": "Mathematical operation to perform",
|
| 294 |
+
"enum": ["add", "subtract", "multiply", "divide"]
|
| 295 |
+
}
|
| 296 |
+
},
|
| 297 |
+
"required": ["first_number", "second_number", "operation"]
|
| 298 |
+
}
|
| 299 |
+
}
|
| 300 |
+
def tool_calculator(app_context, first_number, second_number, operation: str):
|
| 301 |
+
x, y = first_number, second_number
|
| 302 |
+
|
| 303 |
+
result = None
|
| 304 |
+
if operation == "add": result = x + y
|
| 305 |
+
elif operation == "subtract": result = x - y
|
| 306 |
+
elif operation == "multiply": result = x * y
|
| 307 |
+
elif operation == "divide": result = x / y if y != 0 else None
|
| 308 |
+
|
| 309 |
+
yield {"result" : f"⏳ Searching for locations..."}
|
| 310 |
+
return result
|
| 311 |
+
|
| 312 |
+
|
| 313 |
+
TOOL_GEOCODE = {
|
| 314 |
+
"name": "tool_geocode",
|
| 315 |
+
"description": "Convert addresses into latitude and longitude coordinates using Google Geocoding API",
|
| 316 |
+
"input_schema": {
|
| 317 |
+
"type": "object",
|
| 318 |
+
"properties": {
|
| 319 |
+
"address": {
|
| 320 |
+
"type": "string",
|
| 321 |
+
"description": "Address to convert to coordinates (e.g. 'Porto, Portugal' or 'Avenida dos Aliados, Porto')"
|
| 322 |
+
}
|
| 323 |
+
},
|
| 324 |
+
"required": ["address"]
|
| 325 |
+
}
|
| 326 |
+
}
|
| 327 |
+
def tool_geocode(app_context, address: str) -> dict:
|
| 328 |
+
# Haversine formula to calculate the great-circle distance
|
| 329 |
+
def _haversine(lat1, lon1, lat2, lon2):
|
| 330 |
+
import math
|
| 331 |
+
|
| 332 |
+
# Convert latitude and longitude from degrees to radians
|
| 333 |
+
lat1, lon1, lat2, lon2 = map(math.radians, [lat1, lon1, lat2, lon2])
|
| 334 |
+
|
| 335 |
+
# Radius of Earth in kilometers
|
| 336 |
+
R = 6371.0
|
| 337 |
+
|
| 338 |
+
# Differences in coordinates
|
| 339 |
+
dlat = lat2 - lat1
|
| 340 |
+
dlon = lon2 - lon1
|
| 341 |
+
|
| 342 |
+
# Haversine formula
|
| 343 |
+
a = math.sin(dlat / 2)**2 + math.cos(lat1) * math.cos(lat2) * math.sin(dlon / 2)**2
|
| 344 |
+
c = 2 * math.atan2(math.sqrt(a), math.sqrt(1 - a))
|
| 345 |
+
distance = R * c
|
| 346 |
+
|
| 347 |
+
return distance
|
| 348 |
+
|
| 349 |
+
import googlemaps
|
| 350 |
+
|
| 351 |
+
yield {"status" : f"⏳ Geocoding '{address}'..."}
|
| 352 |
+
|
| 353 |
+
# TODO: reuse gmaps client
|
| 354 |
+
gmaps = googlemaps.Client(key=os.getenv('GOOGLE_MAPS_API_KEY'))
|
| 355 |
+
|
| 356 |
+
result = gmaps.geocode(address)
|
| 357 |
+
|
| 358 |
+
# Retrieve coordinates
|
| 359 |
+
#location = result[0]['geometry']['location']
|
| 360 |
+
#latitude = location["lat"]
|
| 361 |
+
#longitude = location["lng"]
|
| 362 |
+
|
| 363 |
+
# Retrieve bounding box
|
| 364 |
+
bounds = result[0]['geometry']['bounds']
|
| 365 |
+
northeast = bounds['northeast']
|
| 366 |
+
southwest = bounds['southwest']
|
| 367 |
+
|
| 368 |
+
# Calculate the center of the bounding box
|
| 369 |
+
center_lat = (northeast['lat'] + southwest['lat']) / 2
|
| 370 |
+
center_lng = (northeast['lng'] + southwest['lng']) / 2
|
| 371 |
+
center = {"lat": center_lat, "lng": center_lng}
|
| 372 |
+
|
| 373 |
+
# Calculate the radius of the bounding box
|
| 374 |
+
radius = _haversine(center_lat, center_lng, northeast['lat'], northeast['lng'])
|
| 375 |
+
|
| 376 |
+
yield {
|
| 377 |
+
"status" : f"✅ Geocoded `{address}` to center=`({center_lat},{center_lng}), radius={radius}m`.",
|
| 378 |
+
"result" : {
|
| 379 |
+
"center": center,
|
| 380 |
+
"radius" : radius
|
| 381 |
+
}
|
| 382 |
+
}
|
| 383 |
+
|
| 384 |
+
|
| 385 |
+
TOOL_PLACE_DETAILS = {
|
| 386 |
+
"name": "tool_place_details",
|
| 387 |
+
"description": "Get detailed information about a specific place using its place_id from Google Places API",
|
| 388 |
+
"input_schema": {
|
| 389 |
+
"type": "object",
|
| 390 |
+
"properties": {
|
| 391 |
+
"place_id": {
|
| 392 |
+
"type": "string",
|
| 393 |
+
"description": "The place_id of the location to get details for. This can be obtained from the results of places_nearby searches."
|
| 394 |
+
},
|
| 395 |
+
"language": {
|
| 396 |
+
"type": "string",
|
| 397 |
+
"description": "The language code for the results (e.g., 'en', 'pt')"
|
| 398 |
+
},
|
| 399 |
+
"fields": {
|
| 400 |
+
"type": "array",
|
| 401 |
+
"description": "List of specific fields to return. If empty, returns all available fields.",
|
| 402 |
+
"items": {
|
| 403 |
+
"type": "string",
|
| 404 |
+
"enum": [
|
| 405 |
+
"address_component", "adr_address", "business_status",
|
| 406 |
+
"formatted_address", "geometry", "icon", "name",
|
| 407 |
+
"photo", "place_id", "plus_code", "type",
|
| 408 |
+
"url", "utc_offset", "vicinity", "formatted_phone_number",
|
| 409 |
+
"international_phone_number", "opening_hours",
|
| 410 |
+
"website", "price_level", "rating", "review",
|
| 411 |
+
"user_ratings_total"
|
| 412 |
+
]
|
| 413 |
+
}
|
| 414 |
+
}
|
| 415 |
+
},
|
| 416 |
+
"required": ["place_id"]
|
| 417 |
+
}
|
| 418 |
+
}
|
| 419 |
+
def tool_place_details(app_context, place_id: str, language: str = None, fields: list = None) -> dict:
|
| 420 |
+
import googlemaps
|
| 421 |
+
|
| 422 |
+
yield {"status" : f"⏳ Looking up details on location..."}
|
| 423 |
+
|
| 424 |
+
gmaps = googlemaps.Client(key=os.getenv('GOOGLE_MAPS_API_KEY'))
|
| 425 |
+
|
| 426 |
+
params = {'place_id': place_id}
|
| 427 |
+
if language: params['language'] = language
|
| 428 |
+
if fields: params['fields'] = fields
|
| 429 |
+
|
| 430 |
+
result = gmaps.place(**params)
|
| 431 |
+
details = result.get('result', {})
|
| 432 |
+
|
| 433 |
+
yield {
|
| 434 |
+
"status" : f"✅ Location details fetched.",
|
| 435 |
+
"result" : details
|
| 436 |
+
}
|
| 437 |
+
|
| 438 |
+
|
| 439 |
+
TOOLS = (
|
| 440 |
+
(TOOL_SAVE_MEMORY, tool_save_memory),
|
| 441 |
+
(TOOL_DELETE_MEMORY, tool_delete_memory),
|
| 442 |
+
(TOOL_CALCULATOR, tool_calculator),
|
| 443 |
+
(TOOL_PLACES_NEARBY, tool_places_nearby),
|
| 444 |
+
(TOOL_GEOCODE, tool_geocode),
|
| 445 |
+
(TOOL_PLACE_DETAILS, tool_place_details)
|
| 446 |
+
)
|
| 447 |
+
|
| 448 |
+
TOOLS_SPECS = {tool[0]["name"]: tool[0] for tool in TOOLS}
|
| 449 |
+
|
| 450 |
+
TOOLS_FUNCTIONS = {tool[0]["name"]: tool[1] for tool in TOOLS}
|
utils/logger.py
ADDED
|
@@ -0,0 +1,41 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import logging
|
| 2 |
+
import os
|
| 3 |
+
from datetime import datetime
|
| 4 |
+
from logging.handlers import RotatingFileHandler
|
| 5 |
+
|
| 6 |
+
def setup_logger(name="botty"):
|
| 7 |
+
# Create logs directory if it doesn't exist
|
| 8 |
+
if not os.path.exists('logs'):
|
| 9 |
+
os.makedirs('logs')
|
| 10 |
+
|
| 11 |
+
# Configure logger
|
| 12 |
+
logger = logging.getLogger(name)
|
| 13 |
+
logger.setLevel(logging.DEBUG)
|
| 14 |
+
|
| 15 |
+
# Create formatters
|
| 16 |
+
file_formatter = logging.Formatter(
|
| 17 |
+
'%(asctime)s | %(levelname)s | %(module)s:%(lineno)d | %(message)s'
|
| 18 |
+
)
|
| 19 |
+
console_formatter = logging.Formatter(
|
| 20 |
+
'%(levelname)s: %(message)s'
|
| 21 |
+
)
|
| 22 |
+
|
| 23 |
+
# File handler (rotating, max 5MB per file, keep 5 backup files)
|
| 24 |
+
file_handler = RotatingFileHandler(
|
| 25 |
+
f'logs/botty_{datetime.now().strftime("%Y%m%d")}.log',
|
| 26 |
+
maxBytes=5*1024*1024,
|
| 27 |
+
backupCount=5
|
| 28 |
+
)
|
| 29 |
+
file_handler.setLevel(logging.DEBUG)
|
| 30 |
+
file_handler.setFormatter(file_formatter)
|
| 31 |
+
|
| 32 |
+
# Console handler
|
| 33 |
+
console_handler = logging.StreamHandler()
|
| 34 |
+
console_handler.setLevel(logging.INFO)
|
| 35 |
+
console_handler.setFormatter(console_formatter)
|
| 36 |
+
|
| 37 |
+
# Add handlers to logger
|
| 38 |
+
logger.addHandler(file_handler)
|
| 39 |
+
logger.addHandler(console_handler)
|
| 40 |
+
|
| 41 |
+
return logger
|