Spaces:
Sleeping
Sleeping
| # import os | |
| # import streamlit as st | |
| # from langchain.chains import ConversationChain | |
| # from langchain_openai import ChatOpenAI | |
| # from langchain.memory import ConversationBufferMemory | |
| # os.environ["OPENAI_API_KEY"] = "" | |
| # # Intialize the chatbot | |
| # @st.cache_resource | |
| # def init_chatbot(): | |
| # memory = ConversationBufferMemory() | |
| # chatbot = ConversationChain( | |
| # llm =ChatOpenAI(model = "gpt-4o-mini"), | |
| # memory = memory, | |
| # verbose = False | |
| # ) | |
| # return chatbot | |
| # # Streamlit Application | |
| # st.title("Langchain Chatbot") | |
| # st.write("Hi, I'm a chatbot built with Langchain powered by GPT. How can I assist you today?") | |
| # user_input = st.text_input("You:", placeholder = "Ask me anything....") | |
| # if user_input: | |
| # with st.spinner("Thinking......"): | |
| # resp = chatbot.run(user_input) | |
| # st.write(f"Chatbot: {resp}") | |
| import os | |
| import streamlit as st | |
| from langchain.chains import ConversationChain | |
| from langchain_openai import ChatOpenAI | |
| from langchain.memory import ConversationBufferMemory | |
| from streamlit_extras.add_vertical_space import add_vertical_space | |
| # Set up OpenAI API Key securely | |
| if "openai_api_key" in st.secrets: | |
| os.environ["OPENAI_API_KEY"] = st.secrets["openai_api_key"] | |
| else: | |
| st.error("π OpenAI API Key is missing! Please add it to Streamlit secrets.") | |
| st.stop() | |
| # Initialize chatbot with memory | |
| def init_chatbot(model="gpt-4o-mini"): | |
| try: | |
| memory = ConversationBufferMemory() | |
| chatbot = ConversationChain(llm=ChatOpenAI(model=model), memory=memory, verbose=False) | |
| return chatbot | |
| except Exception as e: | |
| st.error(f"β οΈ Error initializing chatbot: {e}") | |
| return None | |
| if "chatbot" not in st.session_state: | |
| st.session_state.chatbot = init_chatbot() | |
| # Custom Styling | |
| st.markdown(""" | |
| <style> | |
| body { | |
| background-color: #f5f5f5; | |
| } | |
| .stChatMessage { | |
| padding: 10px; | |
| border-radius: 10px; | |
| margin: 5px 0; | |
| } | |
| .user-message { | |
| background-color: #dcf8c6; | |
| text-align: right; | |
| } | |
| .bot-message { | |
| background-color: #ffffff; | |
| } | |
| </style> | |
| """, unsafe_allow_html=True) | |
| # Sidebar - Model Selection | |
| st.sidebar.title("βοΈ Settings") | |
| model_choice = st.sidebar.radio("Select Model", ("gpt-4o-mini", "gpt-4", "gpt-3.5-turbo")) | |
| # Update chatbot model if changed | |
| if model_choice != st.session_state.chatbot.llm.model_name: | |
| st.session_state.chatbot = init_chatbot(model_choice) | |
| # Title and Description | |
| st.title("π¬ LangChain AI Chatbot") | |
| st.write("### Hi, I'm a chatbot powered by GPT. How can I assist you today?") | |
| # Chat history storage | |
| if "chat_history" not in st.session_state: | |
| st.session_state.chat_history = [] | |
| # User Input via Chat Input (better UX) | |
| user_input = st.chat_input("Type your message here...") | |
| # Process input | |
| if user_input: | |
| with st.spinner("Thinking..."): | |
| try: | |
| response = st.session_state.chatbot.run(user_input) | |
| if response: | |
| st.session_state.chat_history.append(("user", user_input)) | |
| st.session_state.chat_history.append(("bot", response)) | |
| except Exception as e: | |
| st.error(f"β οΈ Error generating response: {e}") | |
| # Display chat history | |
| st.write("### π¨οΈ Conversation") | |
| for role, text in st.session_state.chat_history: | |
| with st.chat_message(role): | |
| st.markdown(f"**{role.capitalize()}**: {text}") | |
| # Collapsible Chat History | |
| with st.expander("π View Full Chat History"): | |
| for role, text in st.session_state.chat_history: | |
| st.write(f"**{role.capitalize()}**: {text}") | |
| # Add spacing | |
| add_vertical_space(2) | |
| # Footer | |
| st.markdown("---") | |
| st.markdown("Developed with β€οΈ using Streamlit & LangChain") | |