Roman Jurowetzki commited on
Commit
709e549
·
1 Parent(s): cced517
.gitignore ADDED
@@ -0,0 +1 @@
 
 
1
+ __pycache__
Dockerfile ADDED
@@ -0,0 +1,23 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ FROM python:3.11-slim
2
+
3
+ RUN pip install poetry==1.6.1
4
+
5
+ RUN poetry config virtualenvs.create false
6
+
7
+ WORKDIR /code
8
+
9
+ COPY ./pyproject.toml ./README.md ./poetry.lock* ./
10
+
11
+ COPY ./package[s] ./packages
12
+
13
+ RUN poetry install --no-interaction --no-ansi --no-root
14
+
15
+ COPY ./app ./app
16
+
17
+ RUN poetry install --no-interaction --no-ansi
18
+
19
+ EXPOSE 8080
20
+
21
+ RUN --mount=type=secret,id=TOGETHER_API_KEY,mode=0444,required=true
22
+
23
+ CMD ["uvicorn", "app.server:app", "--host", "0.0.0.0", "--port", "7860"]
README copy.md ADDED
@@ -0,0 +1,79 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # my-app
2
+
3
+ ## Installation
4
+
5
+ Install the LangChain CLI if you haven't yet
6
+
7
+ ```bash
8
+ pip install -U langchain-cli
9
+ ```
10
+
11
+ ## Adding packages
12
+
13
+ ```bash
14
+ # adding packages from
15
+ # https://github.com/langchain-ai/langchain/tree/master/templates
16
+ langchain app add $PROJECT_NAME
17
+
18
+ # adding custom GitHub repo packages
19
+ langchain app add --repo $OWNER/$REPO
20
+ # or with whole git string (supports other git providers):
21
+ # langchain app add git+https://github.com/hwchase17/chain-of-verification
22
+
23
+ # with a custom api mount point (defaults to `/{package_name}`)
24
+ langchain app add $PROJECT_NAME --api_path=/my/custom/path/rag
25
+ ```
26
+
27
+ Note: you remove packages by their api path
28
+
29
+ ```bash
30
+ langchain app remove my/custom/path/rag
31
+ ```
32
+
33
+ ## Setup LangSmith (Optional)
34
+ LangSmith will help us trace, monitor and debug LangChain applications.
35
+ You can sign up for LangSmith [here](https://smith.langchain.com/).
36
+ If you don't have access, you can skip this section
37
+
38
+
39
+ ```shell
40
+ export LANGCHAIN_TRACING_V2=true
41
+ export LANGCHAIN_API_KEY=<your-api-key>
42
+ export LANGCHAIN_PROJECT=<your-project> # if not specified, defaults to "default"
43
+ ```
44
+
45
+ ## Launch LangServe
46
+
47
+ ```bash
48
+ langchain serve
49
+ ```
50
+
51
+ ## Running in Docker
52
+
53
+ This project folder includes a Dockerfile that allows you to easily build and host your LangServe app.
54
+
55
+ ### Building the Image
56
+
57
+ To build the image, you simply:
58
+
59
+ ```shell
60
+ docker build . -t my-langserve-app
61
+ ```
62
+
63
+ If you tag your image with something other than `my-langserve-app`,
64
+ note it for use in the next step.
65
+
66
+ ### Running the Image Locally
67
+
68
+ To run the image, you'll need to include any environment variables
69
+ necessary for your application.
70
+
71
+ In the below example, we inject the `OPENAI_API_KEY` environment
72
+ variable with the value set in my local environment
73
+ (`$OPENAI_API_KEY`)
74
+
75
+ We also expose port 8080 with the `-p 8080:8080` option.
76
+
77
+ ```shell
78
+ docker run -e OPENAI_API_KEY=$OPENAI_API_KEY -p 8080:8080 my-langserve-app
79
+ ```
app/__init__.py ADDED
File without changes
app/server.py ADDED
@@ -0,0 +1,22 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from fastapi import FastAPI
2
+ from fastapi.responses import RedirectResponse
3
+ from langserve import add_routes
4
+ from pirate_speak.chain import chain as pirate_speak_chain
5
+
6
+ app = FastAPI()
7
+
8
+
9
+ @app.get("/")
10
+ async def redirect_root_to_docs():
11
+ return RedirectResponse("/docs")
12
+
13
+
14
+ # Edit this to add the chain you want to add
15
+ add_routes(app, pirate_speak_chain,
16
+ path="/pirate-speak",
17
+ playground_type="chat")
18
+
19
+ if __name__ == "__main__":
20
+ import uvicorn
21
+
22
+ uvicorn.run(app, host="0.0.0.0", port=8000)
app/test_endpoint.ipynb ADDED
@@ -0,0 +1,51 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "cells": [
3
+ {
4
+ "cell_type": "code",
5
+ "execution_count": 7,
6
+ "metadata": {},
7
+ "outputs": [
8
+ {
9
+ "ename": "HTTPStatusError",
10
+ "evalue": "Server error '500 Internal Server Error' for url 'http://127.0.0.1:8000/pirate-speak/invoke'\nFor more information check: https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/500 for Internal Server Error",
11
+ "output_type": "error",
12
+ "traceback": [
13
+ "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
14
+ "\u001b[0;31mHTTPStatusError\u001b[0m Traceback (most recent call last)",
15
+ "File \u001b[0;32m~/.python/current/lib/python3.10/site-packages/langserve/client.py:157\u001b[0m, in \u001b[0;36m_raise_for_status\u001b[0;34m(response)\u001b[0m\n\u001b[1;32m 156\u001b[0m \u001b[39mtry\u001b[39;00m:\n\u001b[0;32m--> 157\u001b[0m response\u001b[39m.\u001b[39;49mraise_for_status()\n\u001b[1;32m 158\u001b[0m \u001b[39mexcept\u001b[39;00m httpx\u001b[39m.\u001b[39mHTTPStatusError \u001b[39mas\u001b[39;00m e:\n",
16
+ "File \u001b[0;32m~/.local/lib/python3.10/site-packages/httpx/_models.py:761\u001b[0m, in \u001b[0;36mResponse.raise_for_status\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m 760\u001b[0m message \u001b[39m=\u001b[39m message\u001b[39m.\u001b[39mformat(\u001b[39mself\u001b[39m, error_type\u001b[39m=\u001b[39merror_type)\n\u001b[0;32m--> 761\u001b[0m \u001b[39mraise\u001b[39;00m HTTPStatusError(message, request\u001b[39m=\u001b[39mrequest, response\u001b[39m=\u001b[39m\u001b[39mself\u001b[39m)\n",
17
+ "\u001b[0;31mHTTPStatusError\u001b[0m: Server error '500 Internal Server Error' for url 'http://127.0.0.1:8000/pirate-speak/invoke'\nFor more information check: https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/500",
18
+ "\nDuring handling of the above exception, another exception occurred:\n",
19
+ "\u001b[0;31mHTTPStatusError\u001b[0m Traceback (most recent call last)",
20
+ "\u001b[1;32m/workspaces/RPAI2024-bot/my-app/app/test_endpoint.ipynb Cell 1\u001b[0m line \u001b[0;36m7\n\u001b[1;32m <a href='vscode-notebook-cell://codespaces%2Bmusical-parakeet-j4w67p6ww59fjgww/workspaces/RPAI2024-bot/my-app/app/test_endpoint.ipynb#W0sdnNjb2RlLXJlbW90ZQ%3D%3D?line=3'>4</a>\u001b[0m rag_app \u001b[39m=\u001b[39m RemoteRunnable(\u001b[39m\"\u001b[39m\u001b[39mhttp://127.0.0.1:8000/pirate-speak/\u001b[39m\u001b[39m\"\u001b[39m)\n\u001b[1;32m <a href='vscode-notebook-cell://codespaces%2Bmusical-parakeet-j4w67p6ww59fjgww/workspaces/RPAI2024-bot/my-app/app/test_endpoint.ipynb#W0sdnNjb2RlLXJlbW90ZQ%3D%3D?line=5'>6</a>\u001b[0m \u001b[39m# call the API with a question\u001b[39;00m\n\u001b[0;32m----> <a href='vscode-notebook-cell://codespaces%2Bmusical-parakeet-j4w67p6ww59fjgww/workspaces/RPAI2024-bot/my-app/app/test_endpoint.ipynb#W0sdnNjb2RlLXJlbW90ZQ%3D%3D?line=6'>7</a>\u001b[0m answer \u001b[39m=\u001b[39m rag_app\u001b[39m.\u001b[39;49minvoke(\u001b[39m\"\u001b[39;49m\u001b[39mTell me a joke!\u001b[39;49m\u001b[39m\"\u001b[39;49m)\n\u001b[1;32m <a href='vscode-notebook-cell://codespaces%2Bmusical-parakeet-j4w67p6ww59fjgww/workspaces/RPAI2024-bot/my-app/app/test_endpoint.ipynb#W0sdnNjb2RlLXJlbW90ZQ%3D%3D?line=8'>9</a>\u001b[0m \u001b[39mprint\u001b[39m(answer)\n",
21
+ "File \u001b[0;32m~/.python/current/lib/python3.10/site-packages/langserve/client.py:356\u001b[0m, in \u001b[0;36mRemoteRunnable.invoke\u001b[0;34m(self, input, config, **kwargs)\u001b[0m\n\u001b[1;32m 354\u001b[0m \u001b[39mif\u001b[39;00m kwargs:\n\u001b[1;32m 355\u001b[0m \u001b[39mraise\u001b[39;00m \u001b[39mNotImplementedError\u001b[39;00m(\u001b[39m\"\u001b[39m\u001b[39mkwargs not implemented yet.\u001b[39m\u001b[39m\"\u001b[39m)\n\u001b[0;32m--> 356\u001b[0m \u001b[39mreturn\u001b[39;00m \u001b[39mself\u001b[39;49m\u001b[39m.\u001b[39;49m_call_with_config(\u001b[39mself\u001b[39;49m\u001b[39m.\u001b[39;49m_invoke, \u001b[39minput\u001b[39;49m, config\u001b[39m=\u001b[39;49mconfig)\n",
22
+ "File \u001b[0;32m~/.python/current/lib/python3.10/site-packages/langchain_core/runnables/base.py:1596\u001b[0m, in \u001b[0;36mRunnable._call_with_config\u001b[0;34m(self, func, input, config, run_type, **kwargs)\u001b[0m\n\u001b[1;32m 1592\u001b[0m context \u001b[39m=\u001b[39m copy_context()\n\u001b[1;32m 1593\u001b[0m context\u001b[39m.\u001b[39mrun(_set_config_context, child_config)\n\u001b[1;32m 1594\u001b[0m output \u001b[39m=\u001b[39m cast(\n\u001b[1;32m 1595\u001b[0m Output,\n\u001b[0;32m-> 1596\u001b[0m context\u001b[39m.\u001b[39;49mrun(\n\u001b[1;32m 1597\u001b[0m call_func_with_variable_args, \u001b[39m# type: ignore[arg-type]\u001b[39;49;00m\n\u001b[1;32m 1598\u001b[0m func, \u001b[39m# type: ignore[arg-type]\u001b[39;49;00m\n\u001b[1;32m 1599\u001b[0m \u001b[39minput\u001b[39;49m, \u001b[39m# type: ignore[arg-type]\u001b[39;49;00m\n\u001b[1;32m 1600\u001b[0m config,\n\u001b[1;32m 1601\u001b[0m run_manager,\n\u001b[1;32m 1602\u001b[0m \u001b[39m*\u001b[39;49m\u001b[39m*\u001b[39;49mkwargs,\n\u001b[1;32m 1603\u001b[0m ),\n\u001b[1;32m 1604\u001b[0m )\n\u001b[1;32m 1605\u001b[0m \u001b[39mexcept\u001b[39;00m \u001b[39mBaseException\u001b[39;00m \u001b[39mas\u001b[39;00m e:\n\u001b[1;32m 1606\u001b[0m run_manager\u001b[39m.\u001b[39mon_chain_error(e)\n",
23
+ "File \u001b[0;32m~/.python/current/lib/python3.10/site-packages/langchain_core/runnables/config.py:380\u001b[0m, in \u001b[0;36mcall_func_with_variable_args\u001b[0;34m(func, input, config, run_manager, **kwargs)\u001b[0m\n\u001b[1;32m 378\u001b[0m \u001b[39mif\u001b[39;00m run_manager \u001b[39mis\u001b[39;00m \u001b[39mnot\u001b[39;00m \u001b[39mNone\u001b[39;00m \u001b[39mand\u001b[39;00m accepts_run_manager(func):\n\u001b[1;32m 379\u001b[0m kwargs[\u001b[39m\"\u001b[39m\u001b[39mrun_manager\u001b[39m\u001b[39m\"\u001b[39m] \u001b[39m=\u001b[39m run_manager\n\u001b[0;32m--> 380\u001b[0m \u001b[39mreturn\u001b[39;00m func(\u001b[39minput\u001b[39;49m, \u001b[39m*\u001b[39;49m\u001b[39m*\u001b[39;49mkwargs)\n",
24
+ "File \u001b[0;32m~/.python/current/lib/python3.10/site-packages/langserve/client.py:343\u001b[0m, in \u001b[0;36mRemoteRunnable._invoke\u001b[0;34m(self, input, run_manager, config, **kwargs)\u001b[0m\n\u001b[1;32m 334\u001b[0m \u001b[39m\u001b[39m\u001b[39m\"\"\"Invoke the runnable with the given input and config.\"\"\"\u001b[39;00m\n\u001b[1;32m 335\u001b[0m response \u001b[39m=\u001b[39m \u001b[39mself\u001b[39m\u001b[39m.\u001b[39msync_client\u001b[39m.\u001b[39mpost(\n\u001b[1;32m 336\u001b[0m \u001b[39m\"\u001b[39m\u001b[39m/invoke\u001b[39m\u001b[39m\"\u001b[39m,\n\u001b[1;32m 337\u001b[0m json\u001b[39m=\u001b[39m{\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 341\u001b[0m },\n\u001b[1;32m 342\u001b[0m )\n\u001b[0;32m--> 343\u001b[0m output, callback_events \u001b[39m=\u001b[39m _decode_response(\n\u001b[1;32m 344\u001b[0m \u001b[39mself\u001b[39;49m\u001b[39m.\u001b[39;49m_lc_serializer, response, is_batch\u001b[39m=\u001b[39;49m\u001b[39mFalse\u001b[39;49;00m\n\u001b[1;32m 345\u001b[0m )\n\u001b[1;32m 347\u001b[0m \u001b[39mif\u001b[39;00m \u001b[39mself\u001b[39m\u001b[39m.\u001b[39m_use_server_callback_events \u001b[39mand\u001b[39;00m callback_events:\n\u001b[1;32m 348\u001b[0m handle_callbacks(run_manager, callback_events)\n",
25
+ "File \u001b[0;32m~/.python/current/lib/python3.10/site-packages/langserve/client.py:230\u001b[0m, in \u001b[0;36m_decode_response\u001b[0;34m(serializer, response, is_batch)\u001b[0m\n\u001b[1;32m 223\u001b[0m \u001b[39mdef\u001b[39;00m \u001b[39m_decode_response\u001b[39m(\n\u001b[1;32m 224\u001b[0m serializer: Serializer,\n\u001b[1;32m 225\u001b[0m response: httpx\u001b[39m.\u001b[39mResponse,\n\u001b[1;32m 226\u001b[0m \u001b[39m*\u001b[39m,\n\u001b[1;32m 227\u001b[0m is_batch: \u001b[39mbool\u001b[39m \u001b[39m=\u001b[39m \u001b[39mFalse\u001b[39;00m,\n\u001b[1;32m 228\u001b[0m ) \u001b[39m-\u001b[39m\u001b[39m>\u001b[39m Tuple[Any, Union[List[CallbackEventDict], List[List[CallbackEventDict]]]]:\n\u001b[1;32m 229\u001b[0m \u001b[39m \u001b[39m\u001b[39m\"\"\"Decode the response.\"\"\"\u001b[39;00m\n\u001b[0;32m--> 230\u001b[0m _raise_for_status(response)\n\u001b[1;32m 231\u001b[0m obj \u001b[39m=\u001b[39m response\u001b[39m.\u001b[39mjson()\n\u001b[1;32m 232\u001b[0m \u001b[39mif\u001b[39;00m \u001b[39mnot\u001b[39;00m \u001b[39misinstance\u001b[39m(obj, \u001b[39mdict\u001b[39m):\n",
26
+ "File \u001b[0;32m~/.python/current/lib/python3.10/site-packages/langserve/client.py:165\u001b[0m, in \u001b[0;36m_raise_for_status\u001b[0;34m(response)\u001b[0m\n\u001b[1;32m 162\u001b[0m \u001b[39mif\u001b[39;00m e\u001b[39m.\u001b[39mresponse\u001b[39m.\u001b[39mtext:\n\u001b[1;32m 163\u001b[0m message \u001b[39m+\u001b[39m\u001b[39m=\u001b[39m \u001b[39mf\u001b[39m\u001b[39m\"\u001b[39m\u001b[39m for \u001b[39m\u001b[39m{\u001b[39;00me\u001b[39m.\u001b[39mresponse\u001b[39m.\u001b[39mtext\u001b[39m}\u001b[39;00m\u001b[39m\"\u001b[39m\n\u001b[0;32m--> 165\u001b[0m \u001b[39mraise\u001b[39;00m httpx\u001b[39m.\u001b[39mHTTPStatusError(\n\u001b[1;32m 166\u001b[0m message\u001b[39m=\u001b[39mmessage,\n\u001b[1;32m 167\u001b[0m request\u001b[39m=\u001b[39m_sanitize_request(e\u001b[39m.\u001b[39mrequest),\n\u001b[1;32m 168\u001b[0m response\u001b[39m=\u001b[39me\u001b[39m.\u001b[39mresponse,\n\u001b[1;32m 169\u001b[0m )\n",
27
+ "\u001b[0;31mHTTPStatusError\u001b[0m: Server error '500 Internal Server Error' for url 'http://127.0.0.1:8000/pirate-speak/invoke'\nFor more information check: https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/500 for Internal Server Error"
28
+ ]
29
+ }
30
+ ],
31
+ "source": [
32
+ "from langserve.client import RemoteRunnable\n",
33
+ "\n",
34
+ "# Initialize the RemoteRunnable with your API \n",
35
+ "rag_app = RemoteRunnable(\"http://127.0.0.1:8000/pirate-speak/\")\n",
36
+ "\n",
37
+ "# call the API with a question\n",
38
+ "answer = rag_app.invoke(\"Tell me a joke!\")\n",
39
+ "\n",
40
+ "print(answer)\n"
41
+ ]
42
+ }
43
+ ],
44
+ "metadata": {
45
+ "language_info": {
46
+ "name": "python"
47
+ }
48
+ },
49
+ "nbformat": 4,
50
+ "nbformat_minor": 2
51
+ }
packages/README.md ADDED
File without changes
packages/pirate-speak/LICENSE ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ MIT License
2
+
3
+ Copyright (c) 2023 LangChain, Inc.
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
packages/pirate-speak/README.md ADDED
@@ -0,0 +1,67 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+ # pirate-speak
3
+
4
+ This template converts user input into pirate speak.
5
+
6
+ ## Environment Setup
7
+
8
+ Set the `OPENAI_API_KEY` environment variable to access the OpenAI models.
9
+
10
+ ## Usage
11
+
12
+ To use this package, you should first have the LangChain CLI installed:
13
+
14
+ ```shell
15
+ pip install -U langchain-cli
16
+ ```
17
+
18
+ To create a new LangChain project and install this as the only package, you can do:
19
+
20
+ ```shell
21
+ langchain app new my-app --package pirate-speak
22
+ ```
23
+
24
+ If you want to add this to an existing project, you can just run:
25
+
26
+ ```shell
27
+ langchain app add pirate-speak
28
+ ```
29
+
30
+ And add the following code to your `server.py` file:
31
+ ```python
32
+ from pirate_speak.chain import chain as pirate_speak_chain
33
+
34
+ add_routes(app, pirate_speak_chain, path="/pirate-speak")
35
+ ```
36
+
37
+ (Optional) Let's now configure LangSmith.
38
+ LangSmith will help us trace, monitor and debug LangChain applications.
39
+ You can sign up for LangSmith [here](https://smith.langchain.com/).
40
+ If you don't have access, you can skip this section
41
+
42
+
43
+ ```shell
44
+ export LANGCHAIN_TRACING_V2=true
45
+ export LANGCHAIN_API_KEY=<your-api-key>
46
+ export LANGCHAIN_PROJECT=<your-project> # if not specified, defaults to "default"
47
+ ```
48
+
49
+ If you are inside this directory, then you can spin up a LangServe instance directly by:
50
+
51
+ ```shell
52
+ langchain serve
53
+ ```
54
+
55
+ This will start the FastAPI app with a server is running locally at
56
+ [http://localhost:8000](http://localhost:8000)
57
+
58
+ We can see all templates at [http://127.0.0.1:8000/docs](http://127.0.0.1:8000/docs)
59
+ We can access the playground at [http://127.0.0.1:8000/pirate-speak/playground](http://127.0.0.1:8000/pirate-speak/playground)
60
+
61
+ We can access the template from code with:
62
+
63
+ ```python
64
+ from langserve.client import RemoteRunnable
65
+
66
+ runnable = RemoteRunnable("http://localhost:8000/pirate-speak")
67
+ ```
packages/pirate-speak/pirate_speak/__init__.py ADDED
File without changes
packages/pirate-speak/pirate_speak/chain.py ADDED
@@ -0,0 +1,32 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #from langchain_community.chat_models import ChatOpenAI
2
+ from langchain_together import Together
3
+ from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder
4
+
5
+ import os
6
+ from dotenv import load_dotenv
7
+ load_dotenv()
8
+ together_api_key = os.getenv("TOGETHER_API_KEY")
9
+
10
+ _prompt = ChatPromptTemplate.from_messages(
11
+ [
12
+ (
13
+ "system",
14
+ "Du er en ung mand fra Randers og du opfylder alle stereotyper. Du svarer som sådan en ville gøre.",
15
+ ),
16
+ MessagesPlaceholder("chat_history"),
17
+ ("human", "{text}"),
18
+ ]
19
+ )
20
+ _model = Together(
21
+ model="meta-llama/Llama-3-70b-chat-hf",
22
+ temperature=0.7,
23
+ top_k=50,
24
+ top_p=0.7,
25
+ repetition_penalty=1,
26
+ together_api_key=together_api_key
27
+ )
28
+
29
+
30
+ # if you update this, you MUST also update ../pyproject.toml
31
+ # with the new `tool.langserve.export_attr`
32
+ chain = _prompt | _model
packages/pirate-speak/poetry.lock ADDED
The diff for this file is too large to render. See raw diff
 
packages/pirate-speak/pyproject.toml ADDED
@@ -0,0 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [tool.poetry]
2
+ name = "pirate-speak"
3
+ version = "0.0.1"
4
+ description = "Get started with a simple template that speaks like a pirate"
5
+ authors = []
6
+ readme = "README.md"
7
+
8
+ [tool.poetry.dependencies]
9
+ python = ">=3.8.1,<4.0"
10
+ openai = "<2"
11
+ langchain-community = ">=0.0.7,<0.2"
12
+ langchain-core = ">=0.1.4,<0.2"
13
+
14
+ [tool.poetry.group.dev.dependencies]
15
+ langchain-cli = ">=0.0.21"
16
+ fastapi = ">=0.104.0,<1"
17
+ sse-starlette = "^1.6.5"
18
+
19
+ [tool.langserve]
20
+ export_module = "pirate_speak.chain"
21
+ export_attr = "chain"
22
+
23
+ [tool.templates-hub]
24
+ use-case = "chatbot"
25
+ author = "LangChain"
26
+ integrations = ["OpenAI"]
27
+ tags = ["getting-started"]
28
+
29
+ [build-system]
30
+ requires = ["poetry-core"]
31
+ build-backend = "poetry.core.masonry.api"
packages/pirate-speak/tests/__init__.py ADDED
File without changes
pyproject.toml ADDED
@@ -0,0 +1,26 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [tool.poetry]
2
+ name = "my-app"
3
+ version = "0.1.0"
4
+ description = ""
5
+ authors = ["Your Name <[email protected]>"]
6
+ readme = "README.md"
7
+ packages = [
8
+ { include = "app" },
9
+ ]
10
+
11
+ [tool.poetry.dependencies]
12
+ python = "^3.11"
13
+ uvicorn = "^0.23.2"
14
+ langserve = {extras = ["server"], version = ">=0.0.30"}
15
+ pydantic = "2.6.0"
16
+ pirate-speak = {path = "packages/pirate-speak", develop = true}
17
+ python-dotenv = "1"
18
+ langchain-together = "0.1.0"
19
+
20
+
21
+ [tool.poetry.group.dev.dependencies]
22
+ langchain-cli = ">=0.0.15"
23
+
24
+ [build-system]
25
+ requires = ["poetry-core"]
26
+ build-backend = "poetry.core.masonry.api"