diff --git a/app/src/components/DetailedTranscription.tsx b/app/src/components/DetailedTranscription.tsx index 842727f..52fc3e3 100644 --- a/app/src/components/DetailedTranscription.tsx +++ b/app/src/components/DetailedTranscription.tsx @@ -124,8 +124,8 @@ const DetailedTranscription = ({ {showTranslation ? ( - - {transcription.translation || "No translation available"} + + {"```"+transcription.translation+"```" || "No translation available"} ) : (
@@ -173,7 +173,7 @@ const DetailedTranscription = ({

Summary

- + {transcription.summary || "No summary available"} diff --git a/service/summarizer.py b/service/summarizer.py index 006e212..aa59c1e 100644 --- a/service/summarizer.py +++ b/service/summarizer.py @@ -41,6 +41,6 @@ def summarize_using_openai(text): #Using Ollama and llama3.2 model, summarize the English translation def summarize_using_ollama(text): - response = ollama.generate(model= "llama3.2", prompt = "Provide highlights of conversion inbullet points without pretext:"+text+"\n \n") + response = ollama.generate(model= "llama3.2", prompt = text+"\n \n""Provide highlights above conversation in Markdown bullet points, ready for direct inclusion in a file, with no pretext, and formatted as a multiline string.") summary = response["response"] return summary