Update app.py
Browse files
app.py
CHANGED
|
@@ -93,12 +93,7 @@ if st.session_state.logged_in:
|
|
| 93 |
st.chat_message("user").markdown(prompt)
|
| 94 |
|
| 95 |
# Add user message to chat history
|
| 96 |
-
|
| 97 |
-
st.session_state.messages.append({"role": "user", "content": prompt})
|
| 98 |
-
|
| 99 |
-
# API Call
|
| 100 |
-
bot = ChatBot(
|
| 101 |
-
protocol={"role": "system", "content": f"""
|
| 102 |
You are a helpful assistant assiting users on GPU selections.
|
| 103 |
Here's the data:
|
| 104 |
{df.to_markdown(index=False)}
|
|
@@ -106,10 +101,14 @@ if st.session_state.logged_in:
|
|
| 106 |
User may ask what is the best GPU selection.
|
| 107 |
You will need to ask user: 1) type of task, 2) size of data, 3) size of models.
|
| 108 |
You will then make a suggestion of what type of GPU or instance is the best for the user.
|
| 109 |
-
"""}
|
| 110 |
-
|
| 111 |
-
|
| 112 |
-
#
|
|
|
|
|
|
|
|
|
|
|
|
|
| 113 |
response = bot.generate_response(prompt)
|
| 114 |
|
| 115 |
# Display assistant response in chat message container
|
|
|
|
| 93 |
st.chat_message("user").markdown(prompt)
|
| 94 |
|
| 95 |
# Add user message to chat history
|
| 96 |
+
st.session_state.messages.append({"role": "system", "content": f"""
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 97 |
You are a helpful assistant assiting users on GPU selections.
|
| 98 |
Here's the data:
|
| 99 |
{df.to_markdown(index=False)}
|
|
|
|
| 101 |
User may ask what is the best GPU selection.
|
| 102 |
You will need to ask user: 1) type of task, 2) size of data, 3) size of models.
|
| 103 |
You will then make a suggestion of what type of GPU or instance is the best for the user.
|
| 104 |
+
"""})
|
| 105 |
+
st.session_state.messages.append({"role": "user", "content": prompt})
|
| 106 |
+
|
| 107 |
+
# API Call
|
| 108 |
+
bot = ChatBot()
|
| 109 |
+
st.write(st.session_state.messages.copy())
|
| 110 |
+
st.type(st.session_state.messages.copy())
|
| 111 |
+
bot.history = st.session_state.messages.copy() # Update history from messages
|
| 112 |
response = bot.generate_response(prompt)
|
| 113 |
|
| 114 |
# Display assistant response in chat message container
|