Skip to content

Commit

Permalink
Update app error handling
Browse files Browse the repository at this point in the history
  • Loading branch information
JoshuaC215 committed Dec 10, 2024
1 parent 08bb9b5 commit 1fb349d
Show file tree
Hide file tree
Showing 2 changed files with 57 additions and 27 deletions.
63 changes: 36 additions & 27 deletions src/streamlit_app.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,11 +4,10 @@

import streamlit as st
from dotenv import load_dotenv
from httpx import ConnectError, ConnectTimeout
from pydantic import ValidationError
from streamlit.runtime.scriptrunner import get_script_run_ctx

from client import AgentClient
from client import AgentClient, AgentClientError
from schema import ChatHistory, ChatMessage
from schema.task_data import TaskData, TaskDataStatus

Expand Down Expand Up @@ -59,9 +58,11 @@ async def main() -> None:
port = os.getenv("PORT", 80)
agent_url = f"http://{host}:{port}"
try:
st.session_state.agent_client = AgentClient(base_url=agent_url)
except (ConnectError, ConnectTimeout) as e:
with st.spinner("Connecting to agent service..."):
st.session_state.agent_client = AgentClient(base_url=agent_url)
except AgentClientError as e:
st.error(f"Error connecting to agent service: {e}")
st.markdown("The service might be booting up. Try again in a few seconds.")
st.stop()

Check warning on line 66 in src/streamlit_app.py

View check run for this annotation

Codecov / codecov/patch

src/streamlit_app.py#L63-L66

Added lines #L63 - L66 were not covered by tests
agent_client: AgentClient = st.session_state.agent_client

Expand Down Expand Up @@ -140,25 +141,29 @@ async def amessage_iter() -> AsyncGenerator[ChatMessage, None]:
if user_input := st.chat_input():
messages.append(ChatMessage(type="human", content=user_input))
st.chat_message("human").write(user_input)
if use_streaming:
stream = agent_client.astream(
message=user_input,
model=model,
thread_id=st.session_state.thread_id,
)
await draw_messages(stream, is_new=True)
else:
response = await agent_client.ainvoke(
message=user_input,
model=model,
thread_id=st.session_state.thread_id,
)
messages.append(response)
st.chat_message("ai").write(response.content)
st.rerun() # Clear stale containers
try:
if use_streaming:
stream = agent_client.astream(
message=user_input,
model=model,
thread_id=st.session_state.thread_id,
)
await draw_messages(stream, is_new=True)
else:
response = await agent_client.ainvoke(
message=user_input,
model=model,
thread_id=st.session_state.thread_id,
)
messages.append(response)
st.chat_message("ai").write(response.content)
st.rerun() # Clear stale containers
except AgentClientError as e:
st.error(f"Error generating response: {e}")
st.stop()

# If messages have been generated, show feedback widget
if len(messages) > 0:
if len(messages) > 0 and st.session_state.last_message:
with st.session_state.last_message:
await handle_feedback()

Expand Down Expand Up @@ -321,12 +326,16 @@ async def handle_feedback() -> None:
normalized_score = (feedback + 1) / 5.0

agent_client: AgentClient = st.session_state.agent_client
await agent_client.acreate_feedback(
run_id=latest_run_id,
key="human-feedback-stars",
score=normalized_score,
kwargs={"comment": "In-line human feedback"},
)
try:
await agent_client.acreate_feedback(

Check warning on line 330 in src/streamlit_app.py

View check run for this annotation

Codecov / codecov/patch

src/streamlit_app.py#L329-L330

Added lines #L329 - L330 were not covered by tests
run_id=latest_run_id,
key="human-feedback-stars",
score=normalized_score,
kwargs={"comment": "In-line human feedback"},
)
except AgentClientError as e:
st.error(f"Error recording feedback: {e}")
st.stop()

Check warning on line 338 in src/streamlit_app.py

View check run for this annotation

Codecov / codecov/patch

src/streamlit_app.py#L336-L338

Added lines #L336 - L338 were not covered by tests
st.session_state.last_feedback = (latest_run_id, feedback)
st.toast("Feedback recorded", icon=":material/reviews:")

Expand Down
21 changes: 21 additions & 0 deletions tests/app/test_streamlit_app.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
import pytest
from streamlit.testing.v1 import AppTest

from client import AgentClientError
from schema import ChatHistory, ChatMessage
from schema.models import OpenAIModelName

Expand Down Expand Up @@ -139,3 +140,23 @@ async def amessage_iter() -> AsyncGenerator[ChatMessage, None]:
assert tool_status.markdown[2].value == "42"
assert response.markdown[-1].value == "The answer is 42"
assert not at.exception


@pytest.mark.asyncio
async def test_app_init_error(mock_agent_client):
"""Test the app with an error in the agent initialization"""
at = AppTest.from_file("../../src/streamlit_app.py").run()

# Setup mock streaming response
PROMPT = "What is 6 * 7?"
mock_agent_client.astream.side_effect = AgentClientError("Error connecting to agent")

at.toggle[0].set_value(True) # Use Streaming = True
at.chat_input[0].set_value(PROMPT).run()
print(at)

assert at.chat_message[0].avatar == "assistant"
assert at.chat_message[1].avatar == "user"
assert at.chat_message[1].markdown[0].value == PROMPT
assert at.error[0].value == "Error generating response: Error connecting to agent"
assert not at.exception

0 comments on commit 1fb349d

Please sign in to comment.