-
Notifications
You must be signed in to change notification settings - Fork 0
/
chatbot_lib.py
55 lines (46 loc) · 1.72 KB
/
chatbot_lib.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
import os
from langchain.memory import ConversationSummaryBufferMemory
from langchain.llms.bedrock import Bedrock
from langchain.chains import ConversationChain
import json
import streamlit as st
# Assuming CPSC.json contains relevant data for the conversation
course_data = json.load(open("UBCCourses.json"))
courses_taken = " "
if "courses_taken" in st.session_state:
# Access the saved courses_taken from the session state
courses_taken = st.session_state["courses_taken"]
def get_llm():
# Initialize Bedrock with only the necessary parameters
llm = Bedrock(
credentials_profile_name=os.environ.get(
"BWB_PROFILE_NAME"
), # Optional: specify AWS credentials profile name
region_name=os.environ.get(
"BWB_REGION_NAME"
), # Optional: specify the AWS region
endpoint_url=os.environ.get(
"BWB_ENDPOINT_URL"
), # Optional: specify the endpoint URL
model_id="anthropic.claude-v2:1", # Specify the model ID directly without `model_kwargs`
)
return llm
def get_memory():
# Initialize memory for the chat session
llm = get_llm()
memory = ConversationSummaryBufferMemory(
llm=llm,
max_token_limit=20000, # Maintain a summary of previous messages up to a token limit
)
return memory
def get_chat_response(input_text, memory):
# Build the chat response
prompt = f"{course_data} + {input_text} + {courses_taken}"
llm = get_llm()
conversation_with_summary = ConversationChain(
llm=llm,
memory=memory,
verbose=True, # Print internal states of the chain while running
)
chat_response = conversation_with_summary.predict(input=prompt)
return chat_response