/
11_P170.py
69 lines (61 loc) · 2.05 KB
/
11_P170.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
# ・複数の会話履歴を持てるチャットbotを作成する P170
import chainlit as cl
import os
from langchain_openai import ChatOpenAI
from langchain.memory import ConversationBufferMemory, RedisChatMessageHistory
from langchain.chains import ConversationChain
from langchain.schema import HumanMessage
chat = ChatOpenAI(openai_api_key=os.environ["OPENAI_API_KEY"], model = "gpt-4-0613")
@cl.on_chat_start
async def on_chat_start():
thread_id = None
while not thread_id:
res = await cl.AskUserMessage(
content = "私は会話の文脈を考慮した返答ができるチャットボットです。スレッドIDを入力してください。",
timeout = 600
).send()
if res:
thread_id = res['content']
history = RedisChatMessageHistory(
session_id = thread_id,
url = os.environ["REDIS_URL"]
)
memory = ConversationBufferMemory(
return_messages = True,
chat_memory = history,
k =
)
# memory = ConversationBufferWindowMemory(
# return_messages = True,
# chat_memory = history,
# k = 3
# )
# memory = ConversationSummaryMemory(
# llm =chat,
# return_messages = True,
# chat_memory = history,
# k = 3
# )
chain = ConversationChain(
memory = memory,
llm = chat
)
memory_message_result = chain.memory.load_memory_variables({})
messages = memory_message_result["history"]
for message in messages:
if isinstance(message, HumanMessage):
await cl.Message(
author = "User",
content = f"{message.content}"
).send()
else:
await cl.Message(
author = "ChatBot",
content = f"{message.content}",
).send()
cl.user_session.set("chain", chain)
@cl.on_message
async def on_message(input_message):
chain = cl.user_session.get("chain")
result = chain(input_message)
await cl.Message(content = result["response"]).send()