themanas021 commited on
Commit
0a44d42
·
1 Parent(s): 8c633e5

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +39 -0
app.py ADDED
@@ -0,0 +1,39 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from langchain import PromptTemplate, LLMChain
2
+ import chainlit as cl
3
+ from langchain import HuggingFaceHub
4
+
5
+ repo_id = "tiiuae/falcon-7b-instruct"
6
+ llm = HuggingFaceHub(
7
+ huggingfacehub_api_token='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',
8
+ repo_id=repo_id,
9
+ model_kwargs={"temperature":0.3, "max_new_tokens":500}
10
+ )
11
+
12
+ template = """Question: {question}
13
+
14
+ Answer: Let's think step by step."""
15
+
16
+
17
+ @cl.on_chat_start
18
+ def main():
19
+ # Instantiate the chain for that user session
20
+ prompt = PromptTemplate(template=template, input_variables=["question"])
21
+ llm_chain = LLMChain(prompt=prompt, llm=llm, verbose=True)
22
+
23
+ # Store the chain in the user session
24
+ cl.user_session.set("llm_chain", llm_chain)
25
+
26
+
27
+ @cl.on_message
28
+ async def main(message: cl.Message):
29
+ # Retrieve the chain from the user session
30
+ llm_chain = cl.user_session.get("llm_chain") # type: LLMChain
31
+
32
+ # Call the chain asynchronously
33
+ res = await llm_chain.acall(message.content, callbacks=[cl.AsyncLangchainCallbackHandler()])
34
+
35
+ # Do any post processing here
36
+
37
+ # "res" is a Dict. For this chain, we get the response by reading the "text" key.
38
+ # This varies from chain to chain, you should check which key to read.
39
+ await cl.Message(content=res["text"]).send()