-
Notifications
You must be signed in to change notification settings - Fork 2
/
st-Phi3Mini-128k-Chat.py
80 lines (69 loc) · 2.94 KB
/
st-Phi3Mini-128k-Chat.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
import streamlit as st
import time
import sys
from gradio_client import Client
# Internal usage
import os
from time import sleep
if "hf_model" not in st.session_state:
st.session_state.hf_model = "Phi-3-mini-128k-instruct"
# Initialize chat history
if "messages" not in st.session_state:
st.session_state.messages = []
@st.cache_resource
def create_client():
yourHFtoken = "hf_xxxxxxxxxxxxxxxxxx" #here your HF token
print(f'loading the API gradio client for {st.session_state.hf_model}')
client = Client("eswardivi/Phi-3-mini-128k-instruct", hf_token=yourHFtoken)
return client
# FUNCTION TO LOG ALL CHAT MESSAGES INTO chathistory.txt
def writehistory(text):
with open('chathistoryPhi3mini.txt', 'a', encoding='utf-8') as f:
f.write(text)
f.write('\n')
f.close()
#AVATARS
av_us = '🧑💻' # './man.png' #"🦖" #A single emoji, e.g. "🧑💻", "🤖", "🦖". Shortcodes are not supported.
av_ass = "🤖" #'./robot.png'
# Set a default model
### START STREAMLIT UI
st.image('https://github.com/fabiomatricardi/ChatBOTMastery/raw/main/Phi3mini128-logo.png', )
st.markdown("### *powered by Streamlit & Gradio_client*", unsafe_allow_html=True )
#st.subheader(f"Free ChatBot using {st.session_state.hf_model}")
st.markdown('---')
client = create_client()
# Display chat messages from history on app rerun
for message in st.session_state.messages:
if message["role"] == "user":
with st.chat_message(message["role"],avatar=av_us):
st.markdown(message["content"])
else:
with st.chat_message(message["role"],avatar=av_ass):
st.markdown(message["content"])
# Accept user input
if myprompt := st.chat_input("What is an AI model?"):
# Add user message to chat history
st.session_state.messages.append({"role": "user", "content": myprompt})
# Display user message in chat message container
with st.chat_message("user", avatar=av_us):
st.markdown(myprompt)
usertext = f"user: {myprompt}"
writehistory(usertext)
# Display assistant response in chat message container
with st.chat_message("assistant"):
message_placeholder = st.empty()
full_response = ""
res = client.submit(
myprompt,
0, # float (numeric value between 0 and 1) in 'Temperature' Slider component
True, # bool in 'Sampling' Checkbox component
800, # float (numeric value between 128 and 4096) in 'Max new tokens' Slider component
api_name="/chat"
)
for r in res:
full_response=r
message_placeholder.markdown(r+ "▌")
message_placeholder.markdown(full_response)
asstext = f"assistant: {full_response}"
writehistory(asstext)
st.session_state.messages.append({"role": "assistant", "content": full_response})