mirror of
https://github.com/mudler/LocalAI.git
synced 2024-12-22 14:02:24 +00:00
f37a4ec9c8
**Description** This PR fixes #1038 Added Streamlit example and also updated readme for examples. **[Signed commits](../CONTRIBUTING.md#signing-off-on-commits-developer-certificate-of-origin)** - [X] Yes, I signed my commits. <!-- Thank you for contributing to LocalAI! Contributing Conventions: 1. Include descriptive PR titles with [<component-name>] prepended. 2. Build and test your changes before submitting a PR. 3. Sign your commits By following the community's contribution conventions upfront, the review process will be accelerated and your PR merged more quickly. -->
70 lines
2.0 KiB
Python
70 lines
2.0 KiB
Python
import streamlit as st
|
|
import time
|
|
import requests
|
|
import json
|
|
|
|
def ask(prompt):
|
|
url = 'http://localhost:8080/v1/chat/completions'
|
|
myobj = {
|
|
"model": "ggml-gpt4all-j.bin",
|
|
"messages": [{"role": "user", "content": prompt}],
|
|
"temperature": 0.9
|
|
}
|
|
myheaders = { "Content-Type" : "application/json" }
|
|
|
|
x = requests.post(url, json = myobj, headers=myheaders)
|
|
|
|
print(x.text)
|
|
|
|
json_data = json.loads(x.text)
|
|
|
|
return json_data["choices"][0]["message"]["content"]
|
|
|
|
|
|
def main():
|
|
# Page setup
|
|
st.set_page_config(page_title="Ask your LLM")
|
|
st.header("Ask your Question 💬")
|
|
|
|
# Initialize chat history
|
|
if "messages" not in st.session_state:
|
|
st.session_state.messages = []
|
|
|
|
# Display chat messages from history on app rerun
|
|
for message in st.session_state.messages:
|
|
with st.chat_message(message["role"]):
|
|
st.markdown(message["content"])
|
|
|
|
# Scroll to bottom
|
|
st.markdown(
|
|
"""
|
|
<script>
|
|
var element = document.getElementById("end-of-chat");
|
|
element.scrollIntoView({behavior: "smooth"});
|
|
</script>
|
|
""",
|
|
unsafe_allow_html=True,
|
|
)
|
|
|
|
# React to user input
|
|
if prompt := st.chat_input("What is up?"):
|
|
# Display user message in chat message container
|
|
st.chat_message("user").markdown(prompt)
|
|
# Add user message to chat history
|
|
st.session_state.messages.append({"role": "user", "content": prompt})
|
|
print(f"User has asked the following question: {prompt}")
|
|
|
|
# Process
|
|
response = ""
|
|
with st.spinner('Processing...'):
|
|
response = ask(prompt)
|
|
|
|
#response = f"Echo: {prompt}"
|
|
# Display assistant response in chat message container
|
|
with st.chat_message("assistant"):
|
|
st.markdown(response)
|
|
# Add assistant response to chat history
|
|
st.session_state.messages.append({"role": "assistant", "content": response})
|
|
|
|
if __name__ == "__main__":
|
|
main() |