Added the ability to use practically any LLM you can dream of within Bolt.new

This commit is contained in:
Cole Medin
2024-10-13 13:53:43 -05:00
parent ffa9f11360
commit 90a206f2d4
14 changed files with 642 additions and 126 deletions

View File

@@ -1,3 +1,5 @@
// @ts-nocheck
// Preventing TS checks with files presented in the video for a better presentation.
import type { Message } from 'ai';
import React, { type RefCallback } from 'react';
import { ClientOnly } from 'remix-utils/client-only';
@@ -5,11 +7,22 @@ import { Menu } from '~/components/sidebar/Menu.client';
import { IconButton } from '~/components/ui/IconButton';
import { Workbench } from '~/components/workbench/Workbench.client';
import { classNames } from '~/utils/classNames';
import { MODEL_LIST } from '~/utils/constants';
import { Messages } from './Messages.client';
import { SendButton } from './SendButton.client';
import styles from './BaseChat.module.scss';
const EXAMPLE_PROMPTS = [
{ text: 'Build a todo app in React using Tailwind' },
{ text: 'Build a simple blog using Astro' },
{ text: 'Create a cookie consent form using Material UI' },
{ text: 'Make a space invaders game' },
{ text: 'How do I center a div?' },
];
const TEXTAREA_MIN_HEIGHT = 76;
interface BaseChatProps {
textareaRef?: React.RefObject<HTMLTextAreaElement> | undefined;
messageRef?: RefCallback<HTMLDivElement> | undefined;
@@ -21,22 +34,14 @@ interface BaseChatProps {
enhancingPrompt?: boolean;
promptEnhanced?: boolean;
input?: string;
model: string;
setModel: (model: string) => void;
handleStop?: () => void;
sendMessage?: (event: React.UIEvent, messageInput?: string) => void;
handleInputChange?: (event: React.ChangeEvent<HTMLTextAreaElement>) => void;
enhancePrompt?: () => void;
}
const EXAMPLE_PROMPTS = [
{ text: 'Build a todo app in React using Tailwind' },
{ text: 'Build a simple blog using Astro' },
{ text: 'Create a cookie consent form using Material UI' },
{ text: 'Make a space invaders game' },
{ text: 'How do I center a div?' },
];
const TEXTAREA_MIN_HEIGHT = 76;
export const BaseChat = React.forwardRef<HTMLDivElement, BaseChatProps>(
(
{
@@ -50,6 +55,8 @@ export const BaseChat = React.forwardRef<HTMLDivElement, BaseChatProps>(
promptEnhanced = false,
messages,
input = '',
model,
setModel,
sendMessage,
handleInputChange,
enhancePrompt,
@@ -103,6 +110,20 @@ export const BaseChat = React.forwardRef<HTMLDivElement, BaseChatProps>(
'sticky bottom-0': chatStarted,
})}
>
{/* Model selection dropdown */}
<div className="mb-2">
<select
value={model}
onChange={(e) => setModel(e.target.value)}
className="w-full p-2 rounded-lg border border-bolt-elements-borderColor bg-bolt-elements-prompt-background text-bolt-elements-textPrimary focus:outline-none"
>
{MODEL_LIST.map((modelOption) => (
<option key={modelOption.name} value={modelOption.name}>
{modelOption.label}
</option>
))}
</select>
</div>
<div
className={classNames(
'shadow-sm border border-bolt-elements-borderColor bg-bolt-elements-prompt-background backdrop-filter backdrop-blur-[8px] rounded-lg overflow-hidden',
@@ -210,4 +231,4 @@ export const BaseChat = React.forwardRef<HTMLDivElement, BaseChatProps>(
</div>
);
},
);
);

View File

@@ -1,3 +1,5 @@
// @ts-nocheck
// Preventing TS checks with files presented in the video for a better presentation.
import { useStore } from '@nanostores/react';
import type { Message } from 'ai';
import { useChat } from 'ai/react';
@@ -9,6 +11,7 @@ import { useChatHistory } from '~/lib/persistence';
import { chatStore } from '~/lib/stores/chat';
import { workbenchStore } from '~/lib/stores/workbench';
import { fileModificationsToHTML } from '~/utils/diff';
import { DEFAULT_MODEL } from '~/utils/constants';
import { cubicEasingFn } from '~/utils/easings';
import { createScopedLogger, renderLogger } from '~/utils/logger';
import { BaseChat } from './BaseChat';
@@ -70,6 +73,7 @@ export const ChatImpl = memo(({ initialMessages, storeMessageHistory }: ChatProp
const textareaRef = useRef<HTMLTextAreaElement>(null);
const [chatStarted, setChatStarted] = useState(initialMessages.length > 0);
const [model, setModel] = useState(DEFAULT_MODEL);
const { showChat } = useStore(chatStore);
@@ -178,7 +182,7 @@ export const ChatImpl = memo(({ initialMessages, storeMessageHistory }: ChatProp
* manually reset the input and we'd have to manually pass in file attachments. However, those
* aren't relevant here.
*/
append({ role: 'user', content: `${diff}\n\n${_input}` });
append({ role: 'user', content: `[Model: ${model}]\n\n${diff}\n\n${_input}` });
/**
* After sending a new message we reset all modifications since the model
@@ -186,7 +190,7 @@ export const ChatImpl = memo(({ initialMessages, storeMessageHistory }: ChatProp
*/
workbenchStore.resetAllFileModifications();
} else {
append({ role: 'user', content: _input });
append({ role: 'user', content: `[Model: ${model}]\n\n${_input}` });
}
setInput('');
@@ -209,6 +213,8 @@ export const ChatImpl = memo(({ initialMessages, storeMessageHistory }: ChatProp
enhancingPrompt={enhancingPrompt}
promptEnhanced={promptEnhanced}
sendMessage={sendMessage}
model={model}
setModel={setModel}
messageRef={messageRef}
scrollRef={scrollRef}
handleInputChange={handleInputChange}

View File

@@ -1,4 +1,7 @@
// @ts-nocheck
// Preventing TS checks with files presented in the video for a better presentation.
import { modificationsRegex } from '~/utils/diff';
import { MODEL_REGEX } from '~/utils/constants';
import { Markdown } from './Markdown';
interface UserMessageProps {
@@ -14,5 +17,5 @@ export function UserMessage({ content }: UserMessageProps) {
}
function sanitizeUserMessage(content: string) {
return content.replace(modificationsRegex, '').trim();
return content.replace(modificationsRegex, '').replace(MODEL_REGEX, '').trim();
}