Skip to content

Commit

Permalink
.content is deleted in haystack 2.9.0, should use .text now
Browse files Browse the repository at this point in the history
  • Loading branch information
lambda-science authored Jan 16, 2025
1 parent f5ff3a0 commit 4b138b7
Show file tree
Hide file tree
Showing 2 changed files with 9 additions and 9 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -380,7 +380,7 @@ def _get_llm_input_message_attributes(arguments: Mapping[str, Any]) -> Iterator[
map(lambda x: isinstance(x, ChatMessage), messages)
):
for message_index, message in enumerate(messages):
if (content := message.content) is not None:
if (content := message.text) is not None:
yield f"{LLM_INPUT_MESSAGES}.{message_index}.{MESSAGE_CONTENT}", content
if (role := message.role) is not None:
yield f"{LLM_INPUT_MESSAGES}.{message_index}.{MESSAGE_ROLE}", role
Expand Down Expand Up @@ -409,7 +409,7 @@ def _get_llm_output_message_attributes(response: Mapping[str, Any]) -> Iterator[
continue
if finish_reason == "tool_calls":
try:
tool_calls = json.loads(reply.content)
tool_calls = json.loads(reply.text)
except json.JSONDecodeError:
continue
for tool_call_index, tool_call in enumerate(tool_calls):
Expand All @@ -426,7 +426,7 @@ def _get_llm_output_message_attributes(response: Mapping[str, Any]) -> Iterator[
tool_name,
)
else:
yield f"{LLM_OUTPUT_MESSAGES}.{reply_index}.{MESSAGE_CONTENT}", reply.content
yield f"{LLM_OUTPUT_MESSAGES}.{reply_index}.{MESSAGE_CONTENT}", reply.text
yield f"{LLM_OUTPUT_MESSAGES}.{reply_index}.{MESSAGE_ROLE}", reply.role.value
elif isinstance(reply, str):
yield f"{LLM_OUTPUT_MESSAGES}.0.{MESSAGE_CONTENT}", reply
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -448,7 +448,7 @@ def test_tool_calling_llm_span_has_expected_attributes(
}
}
)
assert "get_current_weather" in response["llm"]["replies"][0].content
assert "get_current_weather" in response["llm"]["replies"][0].text

spans = in_memory_span_exporter.get_finished_spans()
assert len(spans) == 2
Expand Down Expand Up @@ -527,7 +527,7 @@ def test_openai_chat_generator_llm_span_has_expected_attributes(
}
}
)
assert "argentina" in response["llm"]["replies"][0].content.lower()
assert "argentina" in response["llm"]["replies"][0].text.lower()
spans = in_memory_span_exporter.get_finished_spans()
assert len(spans) == 2
span = spans[0]
Expand Down Expand Up @@ -729,8 +729,8 @@ def test_cohere_reranker_span_has_expected_attributes(
)
ranker_response = response["ranker"]
assert len(response_documents := ranker_response["documents"]) == 2
assert "Lionel Messi" in response_documents[0].content
assert "Paul Graham" in response_documents[1].content
assert "Lionel Messi" in response_documents[0].text
assert "Paul Graham" in response_documents[1].text

spans = in_memory_span_exporter.get_finished_spans()
assert len(spans) == 2
Expand Down Expand Up @@ -872,9 +872,9 @@ def test_openai_document_embedder_embedding_span_has_expected_attributes(
)
assert (response_documents := response["embedder"].get("documents")) is not None
assert len(response_documents) == 2
assert "Argentina won the World Cup in 2022." == response_documents[0].content
assert "Argentina won the World Cup in 2022." == response_documents[0].text
assert response_documents[0].embedding is not None
assert "France won the World Cup in 2018." == response_documents[1].content
assert "France won the World Cup in 2018." == response_documents[1].text
assert response_documents[1].embedding is not None

spans = in_memory_span_exporter.get_finished_spans()
Expand Down

0 comments on commit 4b138b7

Please sign in to comment.