Skip to content

Commit

Permalink
clean up
Browse files Browse the repository at this point in the history
  • Loading branch information
RogerHYang committed Jan 19, 2024
1 parent f885342 commit eada5ff
Showing 1 changed file with 12 additions and 15 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -187,21 +187,14 @@ async def task() -> None:
assert llm_attributes.pop(f"{LLM_INPUT_MESSAGES}.0.{MESSAGE_CONTENT}", None) is not None
assert llm_attributes.pop(f"{LLM_INPUT_MESSAGES}.1.{MESSAGE_ROLE}", None) is not None
assert llm_attributes.pop(f"{LLM_INPUT_MESSAGES}.1.{MESSAGE_CONTENT}", None) is not None
if status_code == 200:
assert llm_span.status.status_code == trace_api.StatusCode.OK
assert llm_attributes.pop(OUTPUT_VALUE, None) == answer
if not is_stream:
# FIXME: currently we can't capture messages when streaming
assert (
llm_attributes.pop(f"{LLM_OUTPUT_MESSAGES}.0.{MESSAGE_ROLE}", None)
== "assistant"
)
assert (
llm_attributes.pop(f"{LLM_OUTPUT_MESSAGES}.0.{MESSAGE_CONTENT}", None) == answer
)
elif status_code == 400 and not is_stream:
# FIXME: currently we can't capture errors when streaming
assert llm_span.status.status_code == trace_api.StatusCode.ERROR
assert llm_span.status.status_code == trace_api.StatusCode.OK
assert llm_attributes.pop(OUTPUT_VALUE, None) == answer
if not is_stream:
# FIXME: currently we can't capture messages when streaming
assert (
llm_attributes.pop(f"{LLM_OUTPUT_MESSAGES}.0.{MESSAGE_ROLE}", None) == "assistant"
)
assert llm_attributes.pop(f"{LLM_OUTPUT_MESSAGES}.0.{MESSAGE_CONTENT}", None) == answer
assert llm_attributes == {}

# FIXME: maybe chunking spans should be discarded?
Expand Down Expand Up @@ -287,6 +280,10 @@ def openai_api_key(monkeypatch: pytest.MonkeyPatch) -> None:

@pytest.fixture(scope="module")
def seed() -> Iterator[int]:
"""
Use rolling seeds to help debugging, because the rolling pseudo-random values
allow conditional breakpoints to be hit precisely (and repeatably).
"""
return count()


Expand Down

0 comments on commit eada5ff

Please sign in to comment.