Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

llm chat, post svelte refactor #50

Draft
wants to merge 12 commits into
base: main
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 5 additions & 2 deletions src/lib/config.ts
Original file line number Diff line number Diff line change
Expand Up @@ -24,8 +24,11 @@ export const SOF_HOSTS = [
// },
{
id: "epic",
name: "Epic Demo",
url: "https://fhir.epic.com/interconnect-fhir-oauth/api/FHIR/R4",
//name: "Epic Demo",
//url: "https://fhir.epic.com/interconnect-fhir-oauth/api/FHIR/R4",
name: "Epic, UW Medicine Epic (production)",
// per https://open.epic.com/MyApps/Endpoints :
url: "https://fhir.epic.medical.washington.edu/FHIR-Proxy/api/FHIR/R4",
clientId: import.meta.env.VITE_EPIC_CLIENT_ID,
note: "<a href='https://fhir.epic.com/Documentation?docId=testpatients' target='_blank' rel='noreferrer'>Test patient credentials <Icon name='box-arrow-up-right' /></a>"
},
Expand Down
109 changes: 109 additions & 0 deletions src/lib/utils/llmChat.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,109 @@
let fhirResources = null;
let messages = [];

function initLLMChat(resources) {
fhirResources = resources;
const llmChatContent = document.getElementById('llm-chat-content');
const chatInput = document.getElementById('chat-input');
const sendMessageButton = document.getElementById('send-message');

sendMessageButton.addEventListener('click', sendMessage);
chatInput.addEventListener('keypress', (e) => {
if (e.key === 'Enter') sendMessage();
});
}

function insertMessageIntoUi(role, userMessage) {
const chatMessages = document.getElementById('chat-messages');

// Create a new table row for the user message
const row = document.createElement('tr');

// Create cells for the request
const requestCell = document.createElement('td');
requestCell.textContent = userMessage;

// Create empty cells for response and tokens
const responseCell = document.createElement('td');
const promptTokensCell = document.createElement('td');
const completionTokensCell = document.createElement('td');
const costCell = document.createElement('td');

// Append cells to the row
row.appendChild(requestCell);
row.appendChild(responseCell);
row.appendChild(promptTokensCell);
row.appendChild(completionTokensCell);
row.appendChild(costCell);

// Append the row to the chat messages table
chatMessages.appendChild(row);

// Return the row for later updates
return row;
}

// Update the sendMessage function to use the new insertMessageIntoUi
async function sendMessage() {
const chatInput = document.getElementById('chat-input');
const userMessage = chatInput.value.trim();
if (userMessage.length === 0) return;

// Append the FHIR resources as the first message
if (messages.length === 0) {
messages.push({
role: "user",
content: [{ type: "text", text: JSON.stringify(fhirResources) }]
});
}

// Append the user message
messages.push({
role: "user",
content: [{ type: "text", text: userMessage }]
});

// Insert the user message into the UI and get the row reference
const row = insertMessageIntoUi('user', userMessage);

chatInput.value = '';

try {
// FIXME config for this url...
const response = await fetch('https://llm-service.fl.mcjustin.dev.cirg.uw.edu/api/chat', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify({ messages: messages }), // Send the messages array
});

if (!response.ok) {
throw new Error('Failed to get LLM response');
}

const data = await response.json();
// Append the assistant's response
messages.push({
role: "assistant",
content: [{ type: "text", text: data.content }]
});

const promptTokens = data.prompt_tokens;
const completionTokens = data.completion_tokens;
const costInput = parseInt(promptTokens) * 0.15 / 1000000;
const costOutput = parseInt(completionTokens) * 0.6 / 1000000;
const cost = costInput + costOutput;

// Update the existing row with the response and token counts
row.cells[1].textContent = data.content; // Response
row.cells[2].textContent = promptTokens; // Prompt Tokens
row.cells[3].textContent = completionTokens; // Completion Tokens
row.cells[4].textContent = costInput.toString().substring(0,7) + " + " + costOutput.toString().substring(0,7) + " = " + cost.toString().substring(0,7);
} catch (error) {
console.error('Error sending message to LLM:', error);
row.cells[1].textContent = 'Failed to get a response. Please try again.'; // Update response cell with error message
}
}

export { initLLMChat };
1 change: 1 addition & 0 deletions src/lib/utils/retreiveIPS.js
Original file line number Diff line number Diff line change
Expand Up @@ -71,6 +71,7 @@ async function retrieve(){
$("#ips-loader").hide();
return;
}
// TODO look at these in the debugger... actually, is this used at all?
const decoded = await Promise.all(retrieveResult.shcs.map(verify));
const data = decoded.map((e) => e.fhirBundle);
prepareSHLContents(data);
Expand Down
58 changes: 57 additions & 1 deletion src/routes/(viewer)/ips/+page.svelte
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,8 @@

import { SHOW_VIEWER_DEMO } from "$lib/config";

import { initLLMChat } from '$lib/utils/llmChat.js';

let shlContents: Bundle[] = [];

let loading: boolean;
Expand Down Expand Up @@ -54,6 +56,8 @@
}
}

let showLlmChat = false;

let displayModeText:string;
$: {
if ($displayMode) {
Expand Down Expand Up @@ -150,6 +154,9 @@
if (retrieveResult.shcs) {
const decoded = await Promise.all(retrieveResult.shcs.map(verify));
const data = decoded.map((e) => e.fhirBundle);

data.forEach(ipsBundle => initLLMChat(ipsBundle)); // Call initLLMChat for each bundle

shlContents = data;
}
}
Expand Down Expand Up @@ -186,6 +193,28 @@

</script>

<Row class="mx-0 my-4">
<Col>
<div id="llm-chat-content" style="display: block; margin: 30px;">
<table id="chat-messages" style="width: 100%; border-collapse: collapse;">
<thead>
<tr>
<th>Your request</th>
<th>LLM Chat Response</th>
<th>Prompt Tokens</th>
<th>Response Tokens</th>
<th>Cost in US$ (prompt + response = total)</th>
</tr>
</thead>
<tbody>
<!-- Messages will be appended here -->
</tbody>
</table>
<input type="text" id="chat-input" placeholder="Ask a large language model about your health...">
<button id="send-message">Send to LLM</button>
</div>
</Col>
</Row>
<Row class="d-flex justify-content-start mx-0 pb-4">
<Col class="d-flex justify-content-start align-items-center">
Displaying FHIR Resources Using:
Expand Down Expand Up @@ -245,6 +274,33 @@
<IPSContent content={shlContents[0]} mode={$displayMode} />
{/if}

<!-- This at least renders here and doesn't break the page...
<Row class="mx-0 my-4">
<Col>
<div id="llm-chat-content" style="display: block; margin: 30px;">
<table id="chat-messages" style="width: 100%; border-collapse: collapse;">
<thead>
<tr>
<th>Your request</th>
<th>LLM Chat Response</th>
<th>Prompt Tokens</th>
<th>Response Tokens</th>
<th>Cost in US$ (prompt + response = total)</th>
</tr>
</thead>
<tbody>
-->
<!-- Messages will be appended here -->
<!--
</tbody>
</table>
<input type="text" id="chat-input" placeholder="Ask a large language model about your health...">
<button id="send-message">Send to LLM</button>
</div>
</Col>
</Row>
-->

<style lang="css">
:global(.loader) {
width: 100%;
Expand Down Expand Up @@ -289,4 +345,4 @@
background-position: 150% 0, 0 0, 70px 5px, 70px 38px, 0px 66px;
}
}
</style>
</style>
Loading