Home » Constructing a Streamlit Q&A app utilizing OpenAI’s Assistant API. | by Sre Chakra Yeddula | Nov, 2023

Constructing a Streamlit Q&A app utilizing OpenAI’s Assistant API. | by Sre Chakra Yeddula | Nov, 2023

by Narnia
0 comment
# Import mandatory libraries
import openai
import streamlit as st
from bs4 import BeautifulSoup
import requests
import pdfkit
import time

# Set your OpenAI Assistant ID right here
assistant_id = ‘asst_enteryourownassistant’

# Initialize the OpenAI consumer (guarantee to set your API key within the sidebar throughout the app)
consumer = openai

# Initialize session state variables for file IDs and chat management
if “file_id_list” not in st.session_state:
st.session_state.file_id_list = []

if “start_chat” not in st.session_state:
st.session_state.start_chat = False

if “thread_id” not in st.session_state:
st.session_state.thread_id = None

# Set up the Streamlit web page with a title and icon
st.set_page_config(page_title=”ChatGPT-like Chat App Using Assistants API”, page_icon=”:speech_balloon:”)

# Define capabilities for scraping, changing textual content to PDF, and importing to OpenAI
def scrape_website(url):
“””Scrape textual content from an internet site URL.”””
response = requests.get(url)
soup = BeautifulSoup(response.textual content, “html.parser”)
return soup.get_text()

def text_to_pdf(textual content, filename):
“””Convert textual content content material to a PDF file.”””
path_wkhtmltopdf = ‘/usr/native/bin/wkhtmltopdf’
config = pdfkit.configuration(wkhtmltopdf=path_wkhtmltopdf)
pdfkit.from_string(textual content, filename, configuration=config)
return filename

def upload_to_openai(filepath):
“””Upload a file to OpenAI and return its file ID.”””
with open(filepath, “rb”) as file:
response = openai.information.create(file=file.learn(), goal=”assistants”)
return response.id

# Create a sidebar for API key configuration and extra options
st.sidebar.header(“Configuration”)
api_key = st.sidebar.text_input(“Enter your OpenAI API key”, kind=”password”)
if api_key:
openai.api_key = api_key

# Additional options within the sidebar for net scraping and file importing
st.sidebar.header(“Additional Features”)
website_url = st.sidebar.text_input(“Enter an internet site URL to scrape and manage right into a PDF”, key=”website_url”)

# Button to scrape an internet site, convert to PDF, and add to OpenAI
if st.sidebar.button(“Scrape and Upload”):
# Scrape, convert, and add course of
scraped_text = scrape_website(website_url)
pdf_path = text_to_pdf(scraped_text, “scraped_content.pdf”)
file_id = upload_to_openai(pdf_path)
st.session_state.file_id_list.append(file_id)
#st.sidebar.write(f”File ID: {file_id}”)

# Sidebar choice for customers to add their very own information
uploaded_file = st.sidebar.file_uploader(“Upload a file to OpenAI embeddings”, key=”file_uploader”)

# Button to add a person’s file and retailer the file ID
if st.sidebar.button(“Upload File”):
# Upload file supplied by person
if uploaded_file:
with open(f”{uploaded_file.title}”, “wb”) as f:
f.write(uploaded_file.getbuffer())
additional_file_id = upload_to_openai(f”{uploaded_file.title}”)
st.session_state.file_id_list.append(additional_file_id)
st.sidebar.write(f”Additional File ID: {additional_file_id}”)

# Display all file IDs
if st.session_state.file_id_list:
st.sidebar.write(“Uploaded File IDs:”)
for file_id in st.session_state.file_id_list:
st.sidebar.write(file_id)
# Associate information with the assistant
assistant_file = consumer.beta.assistants.information.create(
assistant_id=assistant_id,
file_id=file_id
)

# Button to begin the chat session
if st.sidebar.button(“Start Chat”):
# Check if information are uploaded earlier than beginning chat
if st.session_state.file_id_list:
st.session_state.start_chat = True
# Create a thread as soon as and retailer its ID in session state
thread = consumer.beta.threads.create()
st.session_state.thread_id = thread.id
st.write(“thread id: “, thread.id)
else:
st.sidebar.warning(“Please add at the very least one file to begin the chat.”)

# Define the operate to course of messages with citations
def process_message_with_citations(message):
“””Extract content material and annotations from the message and format citations as footnotes.”””
message_content = message.content material[0].textual content
annotations = message_content.annotations if hasattr(message_content, ‘annotations’) else []
citations = []

# Iterate over the annotations and add footnotes
for index, annotation in enumerate(annotations):
# Replace the textual content with a footnote
message_content.worth = message_content.worth.exchange(annotation.textual content, f’ [{index + 1}]’)

# Gather citations based mostly on annotation attributes
if (file_citation := getattr(annotation, ‘file_citation’, None)):
# Retrieve the cited file particulars (dummy response right here since we will not name OpenAI)
cited_file = {‘filename’: ‘cited_document.pdf’} # This ought to be changed with precise file retrieval
citations.append(f'[{index + 1}] {file_citation.quote} from {cited_file[“filename”]}’)
elif (file_path := getattr(annotation, ‘file_path’, None)):
# Placeholder for file obtain quotation
cited_file = {‘filename’: ‘downloaded_document.pdf’} # This ought to be changed with precise file retrieval
citations.append(f'[{index + 1}] Click [here](#) to obtain {cited_file[“filename”]}’) # The obtain hyperlink ought to be changed with the precise obtain path

# Add footnotes to the top of the message content material
full_response = message_content.worth + ‘nn’ + ‘n’.be part of(citations)
return full_response

# Main chat interface setup
st.title(“OpenAI Assistants API Chat”)
st.write(“This is a straightforward chat software that makes use of OpenAI’s API to generate responses.”)

# Only present the chat interface if the chat has been began
if st.session_state.start_chat:
# Initialize the mannequin and messages record if not already in session state
if “openai_model” not in st.session_state:
st.session_state.openai_model = “gpt-4-1106-preview”
if “messages” not in st.session_state:
st.session_state.messages = []

# Display current messages within the chat
for message in st.session_state.messages:
with st.chat_message(message[“role”]):
st.markdown(message[“content”])

# Chat enter for the person
if immediate := st.chat_input(“What is up?”):
# Add person message to the state and show it
st.session_state.messages.append({“position”: “person”, “content material”: immediate})
with st.chat_message(“person”):
st.markdown(immediate)

# Add the person’s message to the prevailing thread
consumer.beta.threads.messages.create(
thread_id=st.session_state.thread_id,
position=”person”,
content material=immediate
)

# Create a run with extra directions
run = consumer.beta.threads.runs.create(
thread_id=st.session_state.thread_id,
assistant_id=assistant_id,
directions=”Please reply the queries utilizing the information supplied within the information.When including different data mark it clearly as such.with a distinct colour”
)

# Poll for the run to finish and retrieve the assistant’s messages
whereas run.standing != ‘accomplished’:
time.sleep(1)
run = consumer.beta.threads.runs.retrieve(
thread_id=st.session_state.thread_id,
run_id=run.id
)

# Retrieve messages added by the assistant
messages = consumer.beta.threads.messages.record(
thread_id=st.session_state.thread_id
)

# Process and show assistant messages
assistant_messages_for_run = [
message for message in messages
if message.run_id == run.id and message.role == “assistant”
]
for message in assistant_messages_for_run:
full_response = process_message_with_citations(message)
st.session_state.messages.append({“position”: “assistant”, “content material”: full_response})
with st.chat_message(“assistant”):
st.markdown(full_response, unsafe_allow_html=True)
else:
# Prompt to begin the chat
st.write(“Please add information and click on ‘Start Chat’ to start the dialog.”)

You may also like

Leave a Comment