diff --git a/src/get_result.py b/src/get_result.py index f0e0df7..e3ce69c 100644 --- a/src/get_result.py +++ b/src/get_result.py @@ -31,7 +31,7 @@ def get_result(model_info, prompt, model_type): # Interact with the Llama model print(chunk) response = model(prompt=chunk, max_tokens=256, temperature=0.5, - top_p=0.95, repeat_penalty=1.2, top_k=150, echo=True) + top_p=0.95, repeat_penalty=1.2, top_k=150, echo=False) result_text += response["choices"][0]["text"] print(result_text) return result_text