From d8948dbb6651c569a5c7a53d9fb4d2ef8b600aad Mon Sep 17 00:00:00 2001 From: Maksym Date: Sun, 18 Feb 2024 15:31:37 -0500 Subject: [PATCH] LLMFactCheck --- src/get_result.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/get_result.py b/src/get_result.py index f0e0df7..e3ce69c 100644 --- a/src/get_result.py +++ b/src/get_result.py @@ -31,7 +31,7 @@ def get_result(model_info, prompt, model_type): # Interact with the Llama model print(chunk) response = model(prompt=chunk, max_tokens=256, temperature=0.5, - top_p=0.95, repeat_penalty=1.2, top_k=150, echo=True) + top_p=0.95, repeat_penalty=1.2, top_k=150, echo=False) result_text += response["choices"][0]["text"] print(result_text) return result_text