Skip to content

Commit

Permalink
added more test cases
Browse files Browse the repository at this point in the history
  • Loading branch information
adeelehsan committed Dec 11, 2024
1 parent 9be1e4e commit 201da08
Show file tree
Hide file tree
Showing 2 changed files with 206 additions and 10 deletions.
207 changes: 203 additions & 4 deletions int_tests/test_query.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,9 @@
from vectara.core import RequestOptions
from vectara.factory import Factory
from vectara import CoreDocument, CoreDocumentPart, SearchCorporaParameters, KeyedSearchCorpus, \
ContextConfiguration, CustomerSpecificReranker, GenerationParameters, ModelParameters, CitationParameters, \
ChatParameters, QueryStreamedResponse, QueryFullResponse
ContextConfiguration, CustomerSpecificReranker, GenerationParameters, CitationParameters, \
QueryStreamedResponse, QueryFullResponse, MmrReranker, NoneReranker, UserFunctionReranker, \
ChainReranker


class TestMultipleCorporaQuery(unittest.TestCase):
Expand Down Expand Up @@ -64,9 +65,9 @@ def setUp(self):
citations=CitationParameters(
style="none",
),
enable_factual_consistency_score=True,
enable_factual_consistency_score=False,
)
self.chat_params = ChatParameters(store=True)

self.request_options = RequestOptions(timeout_in_seconds=100)

def test_query(self):
Expand All @@ -77,6 +78,204 @@ def test_query(self):
self.assertIsNotNone(response.summary)
self.assertGreater(len(response.search_results), 0)

def test_query_with_different_lambda(self):
search = SearchCorporaParameters(
corpora=[
KeyedSearchCorpus(
corpus_key="test-search-1",
metadata_filter="",
lexical_interpolation=0,
),
KeyedSearchCorpus(
corpus_key="test-search-2",
metadata_filter="",
lexical_interpolation=0,
)
],
context_configuration=ContextConfiguration(
sentences_before=2,
sentences_after=2,
),
reranker=CustomerSpecificReranker(
reranker_id="rnk_272725719"
),
)

response = self.client.query(query="Robot Utility Models", search=search,
generation=self.generation_params,
request_options=self.request_options)
self.assertIsInstance(response, QueryFullResponse)
self.assertIsNotNone(response.summary)
self.assertGreater(len(response.search_results), 0)

search = SearchCorporaParameters(
corpora=[
KeyedSearchCorpus(
corpus_key="test-search-1",
metadata_filter="",
lexical_interpolation=0.1,
),
KeyedSearchCorpus(
corpus_key="test-search-2",
metadata_filter="",
lexical_interpolation=0.1,
)
],
context_configuration=ContextConfiguration(
sentences_before=2,
sentences_after=2,
),
reranker=CustomerSpecificReranker(
reranker_id="rnk_272725719"
),
)

response = self.client.query(query="Robot Utility Models", search=search,
generation=self.generation_params,
request_options=self.request_options)
self.assertIsInstance(response, QueryFullResponse)
self.assertIsNotNone(response.summary)
self.assertGreater(len(response.search_results), 0)

def test_query_with_mmr_reranker(self):
search = SearchCorporaParameters(
corpora=[
KeyedSearchCorpus(
corpus_key="test-search-1",
metadata_filter="",
lexical_interpolation=0,
),
KeyedSearchCorpus(
corpus_key="test-search-2",
metadata_filter="",
lexical_interpolation=0,
)
],
context_configuration=ContextConfiguration(
sentences_before=2,
sentences_after=2,
),
reranker=MmrReranker(
diversity_bias=0.3
),
)

response = self.client.query(query="Robot Utility Models", search=search,
generation=self.generation_params,
request_options=self.request_options)
self.assertIsInstance(response, QueryFullResponse)
self.assertIsNotNone(response.summary)
self.assertGreater(len(response.search_results), 0)

def test_query_with_none_reranker(self):
search = SearchCorporaParameters(
corpora=[
KeyedSearchCorpus(
corpus_key="test-search-1",
metadata_filter="",
lexical_interpolation=0,
),
KeyedSearchCorpus(
corpus_key="test-search-2",
metadata_filter="",
lexical_interpolation=0,
)
],
context_configuration=ContextConfiguration(
sentences_before=2,
sentences_after=2,
),
reranker=NoneReranker(),
)

response = self.client.query(query="Robot Utility Models", search=search,
generation=self.generation_params,
request_options=self.request_options)
self.assertIsInstance(response, QueryFullResponse)
self.assertIsNotNone(response.summary)
self.assertGreater(len(response.search_results), 0)

def test_query_with_udf_reranker(self):
search = SearchCorporaParameters(
corpora=[
KeyedSearchCorpus(
corpus_key="test-search-1",
metadata_filter="",
lexical_interpolation=0,
),
KeyedSearchCorpus(
corpus_key="test-search-2",
metadata_filter="",
lexical_interpolation=0,
)
],
context_configuration=ContextConfiguration(
sentences_before=2,
sentences_after=2,
),
reranker=UserFunctionReranker(
user_function="if (get('$.score') < 0.7) null else get('$.score') + 1"
),
)

response = self.client.query(query="Robot Utility Models", search=search,
generation=self.generation_params,
request_options=self.request_options)
self.assertIsInstance(response, QueryFullResponse)
for result in response.search_results:
self.assertGreater(result.score, 1)

def test_query_with_chain_reranker(self):
search = SearchCorporaParameters(
corpora=[
KeyedSearchCorpus(
corpus_key="test-search-1",
metadata_filter="",
lexical_interpolation=0,
),
KeyedSearchCorpus(
corpus_key="test-search-2",
metadata_filter="",
lexical_interpolation=0,
)
],
context_configuration=ContextConfiguration(
sentences_before=2,
sentences_after=2,
),
reranker=ChainReranker(
rerankers=[
CustomerSpecificReranker(
reranker_id="rnk_272725719"
),
UserFunctionReranker(
user_function="if (get('$.score') < 0.7) null else get('$.score') + 1"),
]
)
)

response = self.client.query(query="Robot Utility Models", search=search,
generation=self.generation_params,
request_options=self.request_options)
self.assertIsInstance(response, QueryFullResponse)
for result in response.search_results:
self.assertGreater(result.score, 1)

def test_query_with_fcs_enabled(self):
generation_params = GenerationParameters(
response_language="eng",
citations=CitationParameters(
style="none",
),
enable_factual_consistency_score=True,
)
response = self.client.query(query="Robot Utility Models", search=self.search_params,
generation=generation_params,
request_options=self.request_options)
self.assertIsInstance(response, QueryFullResponse)
self.assertIsNotNone(response.summary)
self.assertGreater(len(response.search_results), 0)

def test_query_stream(self):
response = self.client.query_stream(query="Robot Utility Models", search=self.search_params,
generation=self.generation_params,
Expand Down
9 changes: 3 additions & 6 deletions int_tests/vectara_int_tests/managers/test_chat.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,8 +37,8 @@ def setUp(self):
id="my-doc-id",
document_parts=[
CoreDocumentPart(
text="Robot Utility Models are trained on a diverse set of environments and objects, and then can "
"be deployed in novel environments with novel objects without any further data or training.",
text="""Robot Utility Models are trained on a diverse set of environments and objects, and then can
be deployed in novel environments with novel objects without any further data or training.""",
)
],
)
Expand All @@ -60,21 +60,18 @@ def test_get_chat(self):

def test_list_chats(self):
chat_ids = [self.chat_id]
print(self.chat_id)
for _ in range(2):
response = self.client.chat(
query="Robot Utility Models",
search=self.search_params,
generation=self.generation_params,
chat=self.chat_params
)
print(response.chat_id)
chat_ids.append(response.chat_id)

response = self.client.chats.list()
for chat in response:
print(f'{chat.id} -- response')
# self.assertIn(chat.id, chat_ids)
self.assertIn(chat.id, chat_ids)

def test_delete_chat(self):
response = self.client.chats.delete(chat_id=self.chat_id)
Expand Down

0 comments on commit 201da08

Please sign in to comment.