Skip to content

Commit

Permalink
LLMFactCheck
Browse files Browse the repository at this point in the history
  • Loading branch information
mlupei committed Feb 18, 2024
1 parent d842804 commit c7ab889
Showing 1 changed file with 6 additions and 9 deletions.
15 changes: 6 additions & 9 deletions src/get_result.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,17 +23,14 @@ def get_result(model_info, prompt, model_type):
# If using a standalone Llama model
model = model_info
full_prompt = prompt
prompt = full_prompt
prompt_template='''{prompt}'''
prompt_chunks = [prompt_template]
result_text = ""
for chunk in prompt_chunks:


# Interact with the Llama model
print(chunk)
response = model(prompt=chunk, max_tokens=256, temperature=0.5,
print(full_prompt)
response = model(prompt=full_prompt, max_tokens=256, temperature=0.5,
top_p=0.95, repeat_penalty=1.2, top_k=150, echo=True)
result_text += response["choices"][0]["text"]
print(result_text)
result_text += response["choices"][0]["text"]
print(result_text)
return result_text

else:
Expand Down

0 comments on commit c7ab889

Please sign in to comment.