Skip to content

Commit

Permalink
Fixes for cloudflare deployment (#3)
Browse files Browse the repository at this point in the history
  • Loading branch information
bhackett1024 authored Jan 14, 2025
1 parent 6bc2183 commit 6543f33
Show file tree
Hide file tree
Showing 11 changed files with 307 additions and 339 deletions.
113 changes: 0 additions & 113 deletions app/components/chat/BaseChat.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@ import { Menu } from '~/components/sidebar/Menu.client';
import { IconButton } from '~/components/ui/IconButton';
import { Workbench } from '~/components/workbench/Workbench.client';
import { classNames } from '~/utils/classNames';
import { MODEL_LIST, PROVIDER_LIST, initializeModelList } from '~/utils/constants';
import { Messages } from './Messages.client';
import { SendButton } from './SendButton.client';
import { APIKeyManager } from './APIKeyManager';
Expand All @@ -25,7 +24,6 @@ import GitCloneButton from './GitCloneButton';
import FilePreview from './FilePreview';
import { ModelSelector } from '~/components/chat/ModelSelector';
import { SpeechRecognitionButton } from '~/components/chat/SpeechRecognition';
import type { IProviderSetting, ProviderInfo } from '~/types/model';
import { ScreenshotStateManager } from './ScreenshotStateManager';
import { toast } from 'react-toastify';

Expand All @@ -43,11 +41,6 @@ interface BaseChatProps {
enhancingPrompt?: boolean;
promptEnhanced?: boolean;
input?: string;
model?: string;
setModel?: (model: string) => void;
provider?: ProviderInfo;
setProvider?: (provider: ProviderInfo) => void;
providerList?: ProviderInfo[];
handleStop?: () => void;
sendMessage?: (event: React.UIEvent, messageInput?: string, simulation?: boolean) => void;
handleInputChange?: (event: React.ChangeEvent<HTMLTextAreaElement>) => void;
Expand All @@ -69,11 +62,6 @@ export const BaseChat = React.forwardRef<HTMLDivElement, BaseChatProps>(
showChat = true,
chatStarted = false,
isStreaming = false,
model,
setModel,
provider,
setProvider,
providerList,
input = '',
enhancingPrompt,
handleInputChange,
Expand All @@ -93,22 +81,6 @@ export const BaseChat = React.forwardRef<HTMLDivElement, BaseChatProps>(
ref,
) => {
const TEXTAREA_MAX_HEIGHT = chatStarted ? 400 : 200;
const [apiKeys, setApiKeys] = useState<Record<string, string>>(() => {
const savedKeys = Cookies.get('apiKeys');

if (savedKeys) {
try {
return JSON.parse(savedKeys);
} catch (error) {
console.error('Failed to parse API keys from cookies:', error);
return {};
}
}

return {};
});
const [modelList, setModelList] = useState(MODEL_LIST);
const [isModelSettingsCollapsed, setIsModelSettingsCollapsed] = useState(false);
const [isListening, setIsListening] = useState(false);
const [recognition, setRecognition] = useState<SpeechRecognition | null>(null);
const [transcript, setTranscript] = useState('');
Expand All @@ -120,50 +92,6 @@ export const BaseChat = React.forwardRef<HTMLDivElement, BaseChatProps>(
useEffect(() => {
// Load API keys from cookies on component mount

let parsedApiKeys: Record<string, string> | undefined = {};

try {
const storedApiKeys = Cookies.get('apiKeys');

if (storedApiKeys) {
const parsedKeys = JSON.parse(storedApiKeys);

if (typeof parsedKeys === 'object' && parsedKeys !== null) {
setApiKeys(parsedKeys);
parsedApiKeys = parsedKeys;
}
}
} catch (error) {
console.error('Error loading API keys from cookies:', error);

// Clear invalid cookie data
Cookies.remove('apiKeys');
}

let providerSettings: Record<string, IProviderSetting> | undefined = undefined;

try {
const savedProviderSettings = Cookies.get('providers');

if (savedProviderSettings) {
const parsedProviderSettings = JSON.parse(savedProviderSettings);

if (typeof parsedProviderSettings === 'object' && parsedProviderSettings !== null) {
providerSettings = parsedProviderSettings;
}
}
} catch (error) {
console.error('Error loading Provider Settings from cookies:', error);

// Clear invalid cookie data
Cookies.remove('providers');
}

initializeModelList({ apiKeys: parsedApiKeys, providerSettings }).then((modelList) => {
console.log('Model List: ', modelList);
setModelList(modelList);
});

if (typeof window !== 'undefined' && ('SpeechRecognition' in window || 'webkitSpeechRecognition' in window)) {
const SpeechRecognition = window.SpeechRecognition || window.webkitSpeechRecognition;
const recognition = new SpeechRecognition();
Expand Down Expand Up @@ -351,31 +279,6 @@ export const BaseChat = React.forwardRef<HTMLDivElement, BaseChatProps>(
<rect className={classNames(styles.PromptEffectLine)} pathLength="100" strokeLinecap="round"></rect>
<rect className={classNames(styles.PromptShine)} x="48" y="24" width="70" height="1"></rect>
</svg>
<div>
<div className={isModelSettingsCollapsed ? 'hidden' : ''}>
<ModelSelector
key={provider?.name + ':' + modelList.length}
model={model}
setModel={setModel}
modelList={modelList}
provider={provider}
setProvider={setProvider}
providerList={providerList || (PROVIDER_LIST as ProviderInfo[])}
apiKeys={apiKeys}
/>
{(providerList || []).length > 0 && provider && (
<APIKeyManager
provider={provider}
apiKey={apiKeys[provider.name] || ''}
setApiKey={(key) => {
const newApiKeys = { ...apiKeys, [provider.name]: key };
setApiKeys(newApiKeys);
Cookies.set('apiKeys', JSON.stringify(newApiKeys));
}}
/>
)}
</div>
</div>
<FilePreview
files={uploadedFiles}
imageDataList={imageDataList}
Expand Down Expand Up @@ -476,7 +379,6 @@ export const BaseChat = React.forwardRef<HTMLDivElement, BaseChatProps>(
show={input.length > 0 || isStreaming || uploadedFiles.length > 0}
simulation={false}
isStreaming={isStreaming}
disabled={!providerList || providerList.length === 0}
onClick={(event) => {
if (isStreaming) {
handleStop?.();
Expand All @@ -492,7 +394,6 @@ export const BaseChat = React.forwardRef<HTMLDivElement, BaseChatProps>(
show={(input.length > 0 || uploadedFiles.length > 0) && chatStarted}
simulation={true}
isStreaming={isStreaming}
disabled={!providerList || providerList.length === 0}
onClick={(event) => {
if (input.length > 0 || uploadedFiles.length > 0) {
handleSendMessage?.(event, undefined, true);
Expand Down Expand Up @@ -530,20 +431,6 @@ export const BaseChat = React.forwardRef<HTMLDivElement, BaseChatProps>(
disabled={isStreaming}
/>
{chatStarted && <ClientOnly>{() => <ExportChatButton exportChat={exportChat} />}</ClientOnly>}
<IconButton
title="Model Settings"
className={classNames('transition-all flex items-center gap-1', {
'bg-bolt-elements-item-backgroundAccent text-bolt-elements-item-contentAccent':
isModelSettingsCollapsed,
'bg-bolt-elements-item-backgroundDefault text-bolt-elements-item-contentDefault':
!isModelSettingsCollapsed,
})}
onClick={() => setIsModelSettingsCollapsed(!isModelSettingsCollapsed)}
disabled={!providerList || providerList.length === 0}
>
<div className={`i-ph:caret-${isModelSettingsCollapsed ? 'right' : 'down'} text-lg`} />
{isModelSettingsCollapsed ? <span className="text-xs">{model}</span> : <span />}
</IconButton>
</div>
{input.length > 3 ? (
<div className="text-xs text-bolt-elements-textTertiary">
Expand Down
53 changes: 6 additions & 47 deletions app/components/chat/Chat.client.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -12,18 +12,16 @@ import { useMessageParser, usePromptEnhancer, useShortcuts, useSnapScroll } from
import { description, useChatHistory } from '~/lib/persistence';
import { chatStore } from '~/lib/stores/chat';
import { workbenchStore } from '~/lib/stores/workbench';
import { DEFAULT_MODEL, DEFAULT_PROVIDER, PROMPT_COOKIE_KEY, PROVIDER_LIST } from '~/utils/constants';
import { PROMPT_COOKIE_KEY } from '~/utils/constants';
import { cubicEasingFn } from '~/utils/easings';
import { createScopedLogger, renderLogger } from '~/utils/logger';
import { BaseChat } from './BaseChat';
import Cookies from 'js-cookie';
import { debounce } from '~/utils/debounce';
import { useSettings } from '~/lib/hooks/useSettings';
import type { ProviderInfo } from '~/types/model';
import { useSearchParams } from '@remix-run/react';
import { createSampler } from '~/utils/sampler';
import { saveProjectPrompt } from './Messages.client';
import { uint8ArrayToBase64 } from '~/lib/replay/ReplayProtocolClient';
import type { SimulationPromptClientData } from '~/lib/replay/SimulationPrompt';
import { getIFrameSimulationData } from '~/lib/replay/Recording';
import { getCurrentIFrame } from '../workbench/Preview';
Expand Down Expand Up @@ -120,27 +118,15 @@ export const ChatImpl = memo(
const [imageDataList, setImageDataList] = useState<string[]>([]); // Move here
const [searchParams, setSearchParams] = useSearchParams();
const files = useStore(workbenchStore.files);
const { activeProviders, promptId } = useSettings();

const [model, setModel] = useState(() => {
const savedModel = Cookies.get('selectedModel');
return savedModel || DEFAULT_MODEL;
});
const [provider, setProvider] = useState(() => {
const savedProvider = Cookies.get('selectedProvider');
return (PROVIDER_LIST.find((p) => p.name === savedProvider) || DEFAULT_PROVIDER) as ProviderInfo;
});
const { promptId } = useSettings();

const { showChat } = useStore(chatStore);

const [animationScope, animate] = useAnimate();

const [apiKeys, setApiKeys] = useState<Record<string, string>>({});

const { messages, isLoading, input, handleInputChange, setInput, stop, append } = useChat({
api: '/api/chat',
body: {
apiKeys,
files,
promptId,
},
Expand All @@ -167,7 +153,6 @@ export const ChatImpl = memo(
});
useEffect(() => {
const prompt = searchParams.get('prompt');
console.log(prompt, searchParams, model, provider);

if (prompt) {
setSearchParams({});
Expand All @@ -177,12 +162,12 @@ export const ChatImpl = memo(
content: [
{
type: 'text',
text: `[Model: ${model}]\n\n[Provider: ${provider.name}]\n\n${prompt}`,
text: prompt,
},
] as any, // Type assertion to bypass compiler check
});
}
}, [model, provider, searchParams]);
}, [searchParams]);

const { enhancingPrompt, promptEnhanced, enhancePrompt, resetEnhancer } = usePromptEnhancer();
const { parsedMessages, parseMessages } = useMessageParser();
Expand Down Expand Up @@ -294,7 +279,7 @@ export const ChatImpl = memo(
content: [
{
type: 'text',
text: `[Model: ${model}]\n\n[Provider: ${provider.name}]\n\n${_input}`,
text: _input,
},
...imageDataList.map((imageData) => ({
type: 'image',
Expand All @@ -314,7 +299,7 @@ export const ChatImpl = memo(
content: [
{
type: 'text',
text: `[Model: ${model}]\n\n[Provider: ${provider.name}]\n\n${_input}`,
text: _input,
},
...imageDataList.map((imageData) => ({
type: 'image',
Expand Down Expand Up @@ -363,24 +348,6 @@ export const ChatImpl = memo(

const [messageRef, scrollRef] = useSnapScroll();

useEffect(() => {
const storedApiKeys = Cookies.get('apiKeys');

if (storedApiKeys) {
setApiKeys(JSON.parse(storedApiKeys));
}
}, []);

const handleModelChange = (newModel: string) => {
setModel(newModel);
Cookies.set('selectedModel', newModel, { expires: 30 });
};

const handleProviderChange = (newProvider: ProviderInfo) => {
setProvider(newProvider);
Cookies.set('selectedProvider', newProvider.name, { expires: 30 });
};

return (
<BaseChat
ref={animationScope}
Expand All @@ -392,11 +359,6 @@ export const ChatImpl = memo(
enhancingPrompt={enhancingPrompt}
promptEnhanced={promptEnhanced}
sendMessage={sendMessage}
model={model}
setModel={handleModelChange}
provider={provider}
setProvider={handleProviderChange}
providerList={activeProviders}
messageRef={messageRef}
scrollRef={scrollRef}
handleInputChange={(e) => {
Expand Down Expand Up @@ -424,9 +386,6 @@ export const ChatImpl = memo(
setInput(input);
scrollTextArea();
},
model,
provider,
apiKeys,
);
}}
uploadedFiles={uploadedFiles}
Expand Down
59 changes: 59 additions & 0 deletions app/lib/.server/llm/chat-anthropic.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,59 @@
import type { CoreMessage } from "ai";
import Anthropic from "@anthropic-ai/sdk";
import { ChatStreamController } from "~/utils/chatStreamController";
import type { ContentBlockParam, MessageParam } from "@anthropic-ai/sdk/resources/messages/messages.mjs";

const MaxMessageTokens = 8192;

function convertContentToAnthropic(content: any): ContentBlockParam[] {
if (typeof content === "string") {
return [{ type: "text", text: content }];
}
if (Array.isArray(content)) {
return content.flatMap(convertContentToAnthropic);
}
if (content.type === "text" && typeof content.text === "string") {
return [{ type: "text", text: content.text }];
}
console.log("AnthropicUnknownContent", JSON.stringify(content, null, 2));
return [];
}

export async function chatAnthropic(chatController: ChatStreamController, apiKey: string, systemPrompt: string, messages: CoreMessage[]) {
const anthropic = new Anthropic({ apiKey });

const messageParams: MessageParam[] = [];

messageParams.push({
role: "assistant",
content: systemPrompt,
});

for (const message of messages) {
const role = message.role == "user" ? "user" : "assistant";
const content = convertContentToAnthropic(message.content);
messageParams.push({
role,
content,
});
}

const response = await anthropic.messages.create({
model: "claude-3-5-sonnet-20241022",
messages: messageParams,
max_tokens: MaxMessageTokens,
});

for (const content of response.content) {
if (content.type === "text") {
chatController.writeText(content.text);
} else {
console.log("AnthropicUnknownResponse", JSON.stringify(content, null, 2));
}
}

const tokens = response.usage.input_tokens + response.usage.output_tokens;
console.log("AnthropicTokens", tokens);

chatController.writeUsage({ completionTokens: response.usage.output_tokens, promptTokens: response.usage.input_tokens });
}
Loading

0 comments on commit 6543f33

Please sign in to comment.