Selaa lähdekoodia

Embedchain + Mistral Streamlit chatbot (#1017)

Sidharth Mohanty 1 vuosi sitten
vanhempi
commit
0272459435

+ 113 - 0
docs/integration/streamlit-mistral.mdx

@@ -0,0 +1,113 @@
+---
+title: '🚀 Streamlit'
+description: 'Integrate with Streamlit to plug and play with any LLM'
+---
+
+In this example, we will learn how to use `mistralai/Mistral-7B-v0.1` and Embedchain together with Streamlit to build a simple RAG chatbot.
+
+## Setup
+
+<Accordion title="Customize using code.">
+    1. Install Embedchain and Streamlit
+    ```bash
+    pip install embedchain
+    pip install streamlit
+    ```
+    <Tabs>
+        <Tab title="app.py">
+        ```python
+            import os
+            from embedchain import Pipeline as App
+            import streamlit as st
+
+            with st.sidebar:
+                huggingface_access_token = st.text_input("Hugging face Token", key="chatbot_api_key", type="password")
+                "[Get Hugging Face Access Token](https://huggingface.co/settings/tokens)"
+                "[View the source code](https://github.com/embedchain/examples/mistral-streamlit)"
+
+
+            st.title("💬 Chatbot")
+            st.caption("🚀 An Embedchain app powered by Mistral!")
+            if "messages" not in st.session_state:
+                st.session_state.messages = [
+                    {
+                        "role": "assistant",
+                        "content": """
+                    Hi! I'm a chatbot. I can answer questions and learn new things!\n
+                    Ask me anything and if you want me to learn something do `/add <source>`.\n
+                    I can learn mostly everything. :)
+                    """,
+                    }
+                ]
+
+            for message in st.session_state.messages:
+                with st.chat_message(message["role"]):
+                    st.markdown(message["content"])
+
+            if prompt := st.chat_input("Ask me anything!"):
+                if not st.session_state.chatbot_api_key:
+                    st.error("Please enter your Hugging Face Access Token")
+                    st.stop()
+
+                os.environ["HUGGINGFACE_ACCESS_TOKEN"] = st.session_state.chatbot_api_key
+                app = App.from_config(config_path="config.yaml")
+
+                if prompt.startswith("/add"):
+                    with st.chat_message("user"):
+                        st.markdown(prompt)
+                        st.session_state.messages.append({"role": "user", "content": prompt})
+                    prompt = prompt.replace("/add", "").strip()
+                    with st.chat_message("assistant"):
+                        message_placeholder = st.empty()
+                        message_placeholder.markdown("Adding to knowledge base...")
+                        app.add(prompt)
+                        message_placeholder.markdown(f"Added {prompt} to knowledge base!")
+                        st.session_state.messages.append({"role": "assistant", "content": f"Added {prompt} to knowledge base!"})
+                        st.stop()
+
+                with st.chat_message("user"):
+                    st.markdown(prompt)
+                    st.session_state.messages.append({"role": "user", "content": prompt})
+
+                with st.chat_message("assistant"):
+                    msg_placeholder = st.empty()
+                    msg_placeholder.markdown("Thinking...")
+                    full_response = ""
+
+                    for response in app.chat(prompt):
+                        msg_placeholder.empty()
+                        full_response += response
+
+                    msg_placeholder.markdown(full_response)
+                    st.session_state.messages.append({"role": "assistant", "content": full_response})
+            ```
+        </Tab>
+        <Tab title="config.yaml">
+        ```yaml
+            app:
+            config:
+                name: 'mistral-streamlit-app'
+
+            llm:
+            provider: huggingface
+            config:
+                model: 'mistralai/Mistral-7B-v0.1'
+                temperature: 0.1
+                max_tokens: 250
+                top_p: 0.1
+                stream: true
+
+            embedder:
+            provider: huggingface
+            config:
+                model: 'sentence-transformers/all-mpnet-base-v2'
+        ```
+        </Tab>
+    </Tabs>
+</Accordion>
+
+## To run it locally,
+
+```bash
+streamlit run app.py
+```

+ 2 - 1
docs/mint.json

@@ -77,7 +77,8 @@
         "group": "🔗 Integrations",
           "pages": [
             "integration/langsmith",
-            "integration/chainlit"
+            "integration/chainlit",
+            "integration/streamlit-mistral"
           ]
         },
         "get-started/faq"

+ 7 - 0
examples/mistral-streamlit/README.md

@@ -0,0 +1,7 @@
+### Streamlit Chat bot App (Embedchain + Mistral)
+
+To run it locally,
+
+```bash
+streamlit run app.py
+```

+ 64 - 0
examples/mistral-streamlit/app.py

@@ -0,0 +1,64 @@
+import os
+from embedchain import Pipeline as App
+import streamlit as st
+
+with st.sidebar:
+    huggingface_access_token = st.text_input("Hugging face Token", key="chatbot_api_key", type="password")
+    "[Get Hugging Face Access Token](https://huggingface.co/settings/tokens)"
+    "[View the source code](https://github.com/embedchain/examples/mistral-streamlit)"
+
+
+st.title("💬 Chatbot")
+st.caption("🚀 An Embedchain app powered by Mistral!")
+if "messages" not in st.session_state:
+    st.session_state.messages = [
+        {
+            "role": "assistant",
+            "content": """
+        Hi! I'm a chatbot. I can answer questions and learn new things!\n
+        Ask me anything and if you want me to learn something do `/add <source>`.\n
+        I can learn mostly everything. :)
+        """,
+        }
+    ]
+
+for message in st.session_state.messages:
+    with st.chat_message(message["role"]):
+        st.markdown(message["content"])
+
+if prompt := st.chat_input("Ask me anything!"):
+    if not st.session_state.chatbot_api_key:
+        st.error("Please enter your Hugging Face Access Token")
+        st.stop()
+
+    os.environ["HUGGINGFACE_ACCESS_TOKEN"] = st.session_state.chatbot_api_key
+    app = App.from_config(config_path="config.yaml")
+
+    if prompt.startswith("/add"):
+        with st.chat_message("user"):
+            st.markdown(prompt)
+            st.session_state.messages.append({"role": "user", "content": prompt})
+        prompt = prompt.replace("/add", "").strip()
+        with st.chat_message("assistant"):
+            message_placeholder = st.empty()
+            message_placeholder.markdown("Adding to knowledge base...")
+            app.add(prompt)
+            message_placeholder.markdown(f"Added {prompt} to knowledge base!")
+            st.session_state.messages.append({"role": "assistant", "content": f"Added {prompt} to knowledge base!"})
+            st.stop()
+
+    with st.chat_message("user"):
+        st.markdown(prompt)
+        st.session_state.messages.append({"role": "user", "content": prompt})
+
+    with st.chat_message("assistant"):
+        msg_placeholder = st.empty()
+        msg_placeholder.markdown("Thinking...")
+        full_response = ""
+
+        for response in app.chat(prompt):
+            msg_placeholder.empty()
+            full_response += response
+
+        msg_placeholder.markdown(full_response)
+        st.session_state.messages.append({"role": "assistant", "content": full_response})

+ 17 - 0
examples/mistral-streamlit/config.yaml

@@ -0,0 +1,17 @@
+app:
+  config:
+    name: 'mistral-streamlit-app'
+
+llm:
+  provider: huggingface
+  config:
+    model: 'mistralai/Mixtral-8x7B-Instruct-v0.1'
+    temperature: 0.1
+    max_tokens: 250
+    top_p: 0.1
+    stream: true
+
+embedder:
+  provider: huggingface
+  config:
+    model: 'sentence-transformers/all-mpnet-base-v2'

+ 2 - 0
examples/mistral-streamlit/requirements.txt

@@ -0,0 +1,2 @@
+streamlit==1.29.0
+embedchain