Galileo
Search
⌃K

Example: Short Summary App

First, pip install Galileo's llm-monitor library
pip install llm-monitor
Below is an example Python code snippet of a Streamlit app that shows how to integrate Galileo Monitor into your LLM application:
import streamlit as st
​
from langchain.chains import LLMChain
from langchain.chat_models import ChatOpenAI
from langchain.prompts import PromptTemplate
​
from llm_monitor import MonitorHandler
​
​
class MonitoringApp:
def __init__(self):
self.galileo_console_url = "https://console.dev.rungalileo.io/prompt-monitoring"
​
def run_llm(self, llm, user_prompt):
if user_prompt is None:
st.write("Please provide a text to summarize.")
else:
prompt = PromptTemplate.from_template("Summarize the following in a point by point manner: {user_prompt}")
chain = LLMChain(llm=llm, prompt=prompt)
result = chain.run(user_prompt=user_prompt)
st.write(f"{result}\n")
st.write(f"<a href='{self.galileo_console_url}'>Galileo Link</a>", unsafe_allow_html=True)
​
​
def run_streamlit_app(app, llm):
st.markdown("""## Text Summarizer""")
st.markdown(
"""Welcome! Acme has built this summarizer for you.
Please paste your text and we'll summarize this for you.""")
st.markdown("""---""")
user_question = st.text_area("Enter the document you'd like summarized")
if st.button("Summarize"):
app.run_llm(llm, user_question)
st.markdown("""---""")
​
def main():
app = MonitoringApp()
llm = ChatOpenAI(
temperature=0,
callbacks=[MonitorHandler(project_name="example_app")],
)
run_streamlit_app(app, llm)
​
​
if __name__ == "__main__":
main()
Link on Github.