Skip to content

Commit

Permalink
Merge pull request #524 from GoogleCloudPlatform/chatbot_scaffold
Browse files Browse the repository at this point in the history
Add Streamlit Gemini chatbot scaffold
  • Loading branch information
takumiohym authored Sep 26, 2024
2 parents 26d42e4 + 0361779 commit 41137c3
Show file tree
Hide file tree
Showing 8 changed files with 703 additions and 0 deletions.
13 changes: 13 additions & 0 deletions scaffolds/streamlit_on_cloudrun/llm_chatbot/Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
FROM python:3.10.14

WORKDIR /app

COPY requirements.txt /app
RUN pip install -r requirements.txt

COPY assets /app/assets
COPY app.py /app

EXPOSE 8080

CMD streamlit run --server.port 8080 --server.enableCORS false app.py
16 changes: 16 additions & 0 deletions scaffolds/streamlit_on_cloudrun/llm_chatbot/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
# Streamlit Application - Chat with Gemini

This directory includes files to build and deploy an chatbot application on streamlit.

For all the step by step instructions, refer to the `build_app.ipynb` which includes detailed explanations on each file.

Or, if you want to quickly deploy and check the application, follow the steps below:

1. Open `deploy.sh` and change the PROJECT_ID variable to your project id.

2. Run `deploy.sh` and deploy the application to Cloud Run.
```
. deploy.sh
```

3. Follow the instruction printed at the end of the `deploy.sh` command, and open the application via Cloud Shell.
73 changes: 73 additions & 0 deletions scaffolds/streamlit_on_cloudrun/llm_chatbot/app.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,73 @@
"""Streamlit Chatbot App"""

import os
import time

import streamlit as st
import vertexai
from vertexai.generative_models import Content, GenerativeModel, Part

st.set_page_config(page_title="Chat with Gemini", page_icon="♊")

st.title("Chat with Gemini")

st.markdown("Welcome to this simple web application to chat with Gemini")

PROJECT_ID = os.environ.get("GCP_PROJECT")
LOCATION = os.environ.get("GCP_REGION")

vertexai.init(project=PROJECT_ID, location=LOCATION)

if "gemini_model" not in st.session_state:
st.session_state["gemini_model"] = "gemini-1.5-flash-001"

model = GenerativeModel(st.session_state["gemini_model"])

# Initialize chat history
if "messages" not in st.session_state:
st.session_state.messages = []

# Display chat messages from history on app rerun
for message in st.session_state.messages:
with st.chat_message(name=message["role"], avatar=message["avatar"]):
st.markdown(message["content"])


def generate_response(input_text):
chat = model.start_chat(
history=[
Content(role=m["role"], parts=[Part.from_text(m["content"])])
for m in st.session_state.messages[:-1]
]
)
return chat.send_message(input_text)


def stream(text):
for word in text.split(" "):
yield word + " "
time.sleep(0.02)


# React to user input
if prompt := st.chat_input("Write a promt"):
# 1. Write the user message
with st.chat_message(name="user", avatar=None):
st.write(prompt)
# 2. Add user message to message history
st.session_state.messages.append(
{"role": "user", "content": prompt, "avatar": None}
)

# 3. Call Gemini and write the response
with st.chat_message(name="assistant", avatar="assets/gemini-icon.png"):
response = generate_response(prompt)
st.write_stream(stream(response.text))
# 4. Add Gemini response to message history
st.session_state.messages.append(
{
"role": "assistant",
"content": response.text,
"avatar": "assets/gemini-icon.png",
}
)
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading

0 comments on commit 41137c3

Please sign in to comment.