Merge remote-tracking branch 'upstream/main'

This commit is contained in:
Andrew Trokhymenko
2024-11-21 23:31:30 -05:00
22 changed files with 374 additions and 217 deletions

View File

@@ -10,11 +10,7 @@ interface APIKeyManagerProps {
labelForGetApiKey?: string;
}
export const APIKeyManager: React.FC<APIKeyManagerProps> = ({
provider,
apiKey,
setApiKey,
}) => {
export const APIKeyManager: React.FC<APIKeyManagerProps> = ({ provider, apiKey, setApiKey }) => {
const [isEditing, setIsEditing] = useState(false);
const [tempKey, setTempKey] = useState(apiKey);
@@ -24,15 +20,29 @@ export const APIKeyManager: React.FC<APIKeyManagerProps> = ({
};
return (
<div className="flex items-center gap-2 mt-2 mb-2">
<span className="text-sm text-bolt-elements-textSecondary">{provider?.name} API Key:</span>
<div className="flex items-start sm:items-center mt-2 mb-2 flex-col sm:flex-row">
<div>
<span className="text-sm text-bolt-elements-textSecondary">{provider?.name} API Key:</span>
{!isEditing && (
<div className="flex items-center mb-4">
<span className="flex-1 text-xs text-bolt-elements-textPrimary mr-2">
{apiKey ? '••••••••' : 'Not set (will still work if set in .env file)'}
</span>
<IconButton onClick={() => setIsEditing(true)} title="Edit API Key">
<div className="i-ph:pencil-simple" />
</IconButton>
</div>
)}
</div>
{isEditing ? (
<>
<div className="flex items-center gap-3 mt-2">
<input
type="password"
value={tempKey}
placeholder="Your API Key"
onChange={(e) => setTempKey(e.target.value)}
className="flex-1 p-1 text-sm rounded border border-bolt-elements-borderColor bg-bolt-elements-prompt-background text-bolt-elements-textPrimary focus:outline-none focus:ring-2 focus:ring-bolt-elements-focus"
className="flex-1 px-2 py-1 text-xs lg:text-sm rounded border border-bolt-elements-borderColor bg-bolt-elements-prompt-background text-bolt-elements-textPrimary focus:outline-none focus:ring-2 focus:ring-bolt-elements-focus"
/>
<IconButton onClick={handleSave} title="Save API Key">
<div className="i-ph:check" />
@@ -40,20 +50,15 @@ export const APIKeyManager: React.FC<APIKeyManagerProps> = ({
<IconButton onClick={() => setIsEditing(false)} title="Cancel">
<div className="i-ph:x" />
</IconButton>
</>
</div>
) : (
<>
<span className="flex-1 text-sm text-bolt-elements-textPrimary">
{apiKey ? '••••••••' : 'Not set (will still work if set in .env file)'}
</span>
<IconButton onClick={() => setIsEditing(true)} title="Edit API Key">
<div className="i-ph:pencil-simple" />
</IconButton>
{provider?.getApiKeyLink && <IconButton onClick={() => window.open(provider?.getApiKeyLink)} title="Edit API Key">
<span className="mr-2">{provider?.labelForGetApiKey || 'Get API Key'}</span>
<div className={provider?.icon || "i-ph:key"} />
</IconButton>}
{provider?.getApiKeyLink && (
<IconButton className="ml-auto" onClick={() => window.open(provider?.getApiKeyLink)} title="Edit API Key">
<span className="mr-2 text-xs lg:text-sm">{provider?.labelForGetApiKey || 'Get API Key'}</span>
<div className={provider?.icon || 'i-ph:key'} />
</IconButton>
)}
</>
)}
</div>

View File

@@ -29,9 +29,9 @@ const EXAMPLE_PROMPTS = [
const providerList = PROVIDER_LIST;
const ModelSelector = ({ model, setModel, provider, setProvider, modelList, providerList }) => {
const ModelSelector = ({ model, setModel, provider, setProvider, modelList, providerList, apiKeys }) => {
return (
<div className="mb-2 flex gap-2">
<div className="mb-2 flex gap-2 flex-col sm:flex-row">
<select
value={provider?.name}
onChange={(e) => {
@@ -51,8 +51,7 @@ const ModelSelector = ({ model, setModel, provider, setProvider, modelList, prov
key={provider?.name}
value={model}
onChange={(e) => setModel(e.target.value)}
style={{ maxWidth: '70%' }}
className="flex-1 p-2 rounded-lg border border-bolt-elements-borderColor bg-bolt-elements-prompt-background text-bolt-elements-textPrimary focus:outline-none focus:ring-2 focus:ring-bolt-elements-focus transition-all"
className="flex-1 p-2 rounded-lg border border-bolt-elements-borderColor bg-bolt-elements-prompt-background text-bolt-elements-textPrimary focus:outline-none focus:ring-2 focus:ring-bolt-elements-focus transition-all lg:max-w-[70%] "
>
{[...modelList]
.filter((e) => e.provider == provider?.name && e.name)
@@ -193,25 +192,25 @@ export const BaseChat = React.forwardRef<HTMLDivElement, BaseChatProps>(
ref={ref}
className={classNames(
styles.BaseChat,
'relative flex h-full w-full overflow-hidden bg-bolt-elements-background-depth-1',
'relative flex flex-col lg:flex-row h-full w-full overflow-hidden bg-bolt-elements-background-depth-1',
)}
data-chat-visible={showChat}
>
<ClientOnly>{() => <Menu />}</ClientOnly>
<div ref={scrollRef} className="flex overflow-y-auto w-full h-full">
<div className={classNames(styles.Chat, 'flex flex-col flex-grow min-w-[var(--chat-min-width)] h-full')}>
<div ref={scrollRef} className="flex flex-col lg:flex-row overflow-y-auto w-full h-full">
<div className={classNames(styles.Chat, 'flex flex-col flex-grow lg:min-w-[var(--chat-min-width)] h-full')}>
{!chatStarted && (
<div id="intro" className="mt-[26vh] max-w-chat mx-auto text-center">
<h1 className="text-6xl font-bold text-bolt-elements-textPrimary mb-4 animate-fade-in">
<div id="intro" className="mt-[26vh] max-w-chat mx-auto text-center px-4 lg:px-0">
<h1 className="text-3xl lg:text-6xl font-bold text-bolt-elements-textPrimary mb-4 animate-fade-in">
Where ideas begin
</h1>
<p className="text-xl mb-8 text-bolt-elements-textSecondary animate-fade-in animation-delay-200">
<p className="text-md lg:text-xl mb-8 text-bolt-elements-textSecondary animate-fade-in animation-delay-200">
Bring ideas to life in seconds or get help on existing projects.
</p>
</div>
)}
<div
className={classNames('pt-6 px-6', {
className={classNames('pt-6 px-2 sm:px-6', {
'h-full flex flex-col': chatStarted,
})}
>
@@ -220,7 +219,7 @@ export const BaseChat = React.forwardRef<HTMLDivElement, BaseChatProps>(
return chatStarted ? (
<Messages
ref={messageRef}
className="flex flex-col w-full flex-1 max-w-chat px-4 pb-6 mx-auto z-1"
className="flex flex-col w-full flex-1 max-w-chat pb-6 mx-auto z-1"
messages={messages}
isStreaming={isStreaming}
/>
@@ -228,9 +227,12 @@ export const BaseChat = React.forwardRef<HTMLDivElement, BaseChatProps>(
}}
</ClientOnly>
<div
className={classNames('relative w-full max-w-chat mx-auto z-prompt', {
'sticky bottom-0': chatStarted,
})}
className={classNames(
' bg-bolt-elements-background-depth-2 p-3 rounded-lg border border-bolt-elements-borderColor relative w-full max-w-chat mx-auto z-prompt mb-6',
{
'sticky bottom-2': chatStarted,
},
)}
>
<ModelSelector
key={provider?.name + ':' + modelList.length}
@@ -240,7 +242,9 @@ export const BaseChat = React.forwardRef<HTMLDivElement, BaseChatProps>(
provider={provider}
setProvider={setProvider}
providerList={PROVIDER_LIST}
apiKeys={apiKeys}
/>
{provider && (
<APIKeyManager
provider={provider}
@@ -248,7 +252,6 @@ export const BaseChat = React.forwardRef<HTMLDivElement, BaseChatProps>(
setApiKey={(key) => updateApiKey(provider.name, key)}
/>
)}
<FilePreview
files={uploadedFiles}
imageDataList={imageDataList}
@@ -257,7 +260,6 @@ export const BaseChat = React.forwardRef<HTMLDivElement, BaseChatProps>(
setImageDataList?.(imageDataList.filter((_, i) => i !== index));
}}
/>
<div
className={classNames(
'shadow-lg border border-bolt-elements-borderColor bg-bolt-elements-prompt-background backdrop-filter backdrop-blur-[8px] rounded-lg overflow-hidden transition-all',
@@ -265,7 +267,7 @@ export const BaseChat = React.forwardRef<HTMLDivElement, BaseChatProps>(
>
<textarea
ref={textareaRef}
className={`w-full pl-4 pt-4 pr-16 focus:outline-none focus:ring-2 focus:ring-bolt-elements-focus resize-none text-md text-bolt-elements-textPrimary placeholder-bolt-elements-textTertiary bg-transparent transition-all`}
className={`w-full pl-4 pt-4 pr-16 focus:outline-none focus:ring-0 focus:border-none focus:shadow-none resize-none text-md text-bolt-elements-textPrimary placeholder-bolt-elements-textTertiary bg-transparent transition-all`}
onKeyDown={(event) => {
if (event.key === 'Enter') {
if (event.shiftKey) {
@@ -347,7 +349,6 @@ export const BaseChat = React.forwardRef<HTMLDivElement, BaseChatProps>(
) : null}
</div>
</div>
<div className="bg-bolt-elements-background-depth-1 pb-6">{/* Ghost Element */}</div>
</div>
</div>
{!chatStarted && (

View File

@@ -4,7 +4,7 @@ import { classNames } from '~/utils/classNames';
import { AssistantMessage } from './AssistantMessage';
import { UserMessage } from './UserMessage';
import * as Tooltip from '@radix-ui/react-tooltip';
import { useLocation, useNavigate } from '@remix-run/react';
import { useLocation } from '@remix-run/react';
import { db, chatId } from '~/lib/persistence/useChatHistory';
import { forkChat } from '~/lib/persistence/db';
import { toast } from 'react-toastify';
@@ -19,7 +19,6 @@ interface MessagesProps {
export const Messages = React.forwardRef<HTMLDivElement, MessagesProps>((props: MessagesProps, ref) => {
const { id, isStreaming = false, messages = [] } = props;
const location = useLocation();
const navigate = useNavigate();
const handleRewind = (messageId: string) => {
const searchParams = new URLSearchParams(location.search);
@@ -69,53 +68,57 @@ export const Messages = React.forwardRef<HTMLDivElement, MessagesProps>((props:
<div className="grid grid-col-1 w-full">
{isUserMessage ? <UserMessage content={content} /> : <AssistantMessage content={content} />}
</div>
{!isUserMessage && (<div className="flex gap-2">
<Tooltip.Root>
<Tooltip.Trigger asChild>
{messageId && (<button
onClick={() => handleRewind(messageId)}
key='i-ph:arrow-u-up-left'
className={classNames(
'i-ph:arrow-u-up-left',
'text-xl text-bolt-elements-textSecondary hover:text-bolt-elements-textPrimary transition-colors'
{!isUserMessage && (
<div className="flex gap-2 flex-col lg:flex-row">
<Tooltip.Root>
<Tooltip.Trigger asChild>
{messageId && (
<button
onClick={() => handleRewind(messageId)}
key="i-ph:arrow-u-up-left"
className={classNames(
'i-ph:arrow-u-up-left',
'text-xl text-bolt-elements-textSecondary hover:text-bolt-elements-textPrimary transition-colors',
)}
/>
)}
/>)}
</Tooltip.Trigger>
<Tooltip.Portal>
<Tooltip.Content
className="bg-bolt-elements-tooltip-background text-bolt-elements-textPrimary px-3 py-2 rounded-lg text-sm shadow-lg"
sideOffset={5}
style={{zIndex: 1000}}
>
Revert to this message
<Tooltip.Arrow className="fill-bolt-elements-tooltip-background" />
</Tooltip.Content>
</Tooltip.Portal>
</Tooltip.Root>
</Tooltip.Trigger>
<Tooltip.Portal>
<Tooltip.Content
className="bg-bolt-elements-tooltip-background text-bolt-elements-textPrimary px-3 py-2 rounded-lg text-sm shadow-lg"
sideOffset={5}
style={{ zIndex: 1000 }}
>
Revert to this message
<Tooltip.Arrow className="fill-bolt-elements-tooltip-background" />
</Tooltip.Content>
</Tooltip.Portal>
</Tooltip.Root>
<Tooltip.Root>
<Tooltip.Trigger asChild>
<button
onClick={() => handleFork(messageId)}
key='i-ph:git-fork'
className={classNames(
'i-ph:git-fork',
'text-xl text-bolt-elements-textSecondary hover:text-bolt-elements-textPrimary transition-colors'
)}
/>
</Tooltip.Trigger>
<Tooltip.Portal>
<Tooltip.Content
className="bg-bolt-elements-tooltip-background text-bolt-elements-textPrimary px-3 py-2 rounded-lg text-sm shadow-lg"
sideOffset={5}
style={{zIndex: 1000}}
>
Fork chat from this message
<Tooltip.Arrow className="fill-bolt-elements-tooltip-background" />
</Tooltip.Content>
</Tooltip.Portal>
</Tooltip.Root>
</div>)}
<Tooltip.Root>
<Tooltip.Trigger asChild>
<button
onClick={() => handleFork(messageId)}
key="i-ph:git-fork"
className={classNames(
'i-ph:git-fork',
'text-xl text-bolt-elements-textSecondary hover:text-bolt-elements-textPrimary transition-colors',
)}
/>
</Tooltip.Trigger>
<Tooltip.Portal>
<Tooltip.Content
className="bg-bolt-elements-tooltip-background text-bolt-elements-textPrimary px-3 py-2 rounded-lg text-sm shadow-lg"
sideOffset={5}
style={{ zIndex: 1000 }}
>
Fork chat from this message
<Tooltip.Arrow className="fill-bolt-elements-tooltip-background" />
</Tooltip.Content>
</Tooltip.Portal>
</Tooltip.Root>
</div>
)}
</div>
);
})

View File

@@ -1,4 +1,5 @@
import { useStore } from '@nanostores/react';
import useViewport from '~/lib/hooks';
import { chatStore } from '~/lib/stores/chat';
import { workbenchStore } from '~/lib/stores/workbench';
import { classNames } from '~/utils/classNames';
@@ -9,6 +10,8 @@ export function HeaderActionButtons({}: HeaderActionButtonsProps) {
const showWorkbench = useStore(workbenchStore.showWorkbench);
const { showChat } = useStore(chatStore);
const isSmallViewport = useViewport(1024);
const canHideChat = showWorkbench || !showChat;
return (
@@ -16,7 +19,7 @@ export function HeaderActionButtons({}: HeaderActionButtonsProps) {
<div className="flex border border-bolt-elements-borderColor rounded-md overflow-hidden">
<Button
active={showChat}
disabled={!canHideChat}
disabled={!canHideChat || isSmallViewport} // expand button is disabled on mobile as it's needed
onClick={() => {
if (canHideChat) {
chatStore.setKey('showChat', !showChat);

View File

@@ -16,6 +16,7 @@ import { cubicEasingFn } from '~/utils/easings';
import { renderLogger } from '~/utils/logger';
import { EditorPanel } from './EditorPanel';
import { Preview } from './Preview';
import useViewport from '~/lib/hooks';
interface WorkspaceProps {
chatStarted?: boolean;
@@ -65,6 +66,8 @@ export const Workbench = memo(({ chatStarted, isStreaming }: WorkspaceProps) =>
const files = useStore(workbenchStore.files);
const selectedView = useStore(workbenchStore.currentView);
const isSmallViewport = useViewport(1024);
const setSelectedView = (view: WorkbenchViewType) => {
workbenchStore.currentView.set(view);
};
@@ -128,18 +131,20 @@ export const Workbench = memo(({ chatStarted, isStreaming }: WorkspaceProps) =>
className={classNames(
'fixed top-[calc(var(--header-height)+1.5rem)] bottom-6 w-[var(--workbench-inner-width)] mr-4 z-0 transition-[left,width] duration-200 bolt-ease-cubic-bezier',
{
'w-full': isSmallViewport,
'left-0': showWorkbench && isSmallViewport,
'left-[var(--workbench-left)]': showWorkbench,
'left-[100%]': !showWorkbench,
},
)}
>
<div className="absolute inset-0 px-6">
<div className="absolute inset-0 px-2 lg:px-6">
<div className="h-full flex flex-col bg-bolt-elements-background-depth-2 border border-bolt-elements-borderColor shadow-sm rounded-lg overflow-hidden">
<div className="flex items-center px-3 py-2 border-b border-bolt-elements-borderColor">
<Slider selected={selectedView} options={sliderOptions} setSelected={setSelectedView} />
<div className="ml-auto" />
{selectedView === 'code' && (
<>
<div className="flex overflow-y-auto">
<PanelHeaderButton
className="mr-1 text-sm"
onClick={() => {
@@ -165,29 +170,32 @@ export const Workbench = memo(({ chatStarted, isStreaming }: WorkspaceProps) =>
<PanelHeaderButton
className="mr-1 text-sm"
onClick={() => {
const repoName = prompt("Please enter a name for your new GitHub repository:", "bolt-generated-project");
const repoName = prompt(
'Please enter a name for your new GitHub repository:',
'bolt-generated-project',
);
if (!repoName) {
alert("Repository name is required. Push to GitHub cancelled.");
alert('Repository name is required. Push to GitHub cancelled.');
return;
}
const githubUsername = prompt("Please enter your GitHub username:");
const githubUsername = prompt('Please enter your GitHub username:');
if (!githubUsername) {
alert("GitHub username is required. Push to GitHub cancelled.");
alert('GitHub username is required. Push to GitHub cancelled.');
return;
}
const githubToken = prompt("Please enter your GitHub personal access token:");
const githubToken = prompt('Please enter your GitHub personal access token:');
if (!githubToken) {
alert("GitHub token is required. Push to GitHub cancelled.");
alert('GitHub token is required. Push to GitHub cancelled.');
return;
}
workbenchStore.pushToGitHub(repoName, githubUsername, githubToken);
workbenchStore.pushToGitHub(repoName, githubUsername, githubToken);
}}
>
<div className="i-ph:github-logo" />
Push to GitHub
</PanelHeaderButton>
</>
</div>
)}
<IconButton
icon="i-ph:x-circle"

View File

@@ -35,6 +35,8 @@ export function getAPIKey(cloudflareEnv: Env, provider: string, userApiKeys?: Re
return env.OPENAI_LIKE_API_KEY || cloudflareEnv.OPENAI_LIKE_API_KEY;
case "xAI":
return env.XAI_API_KEY || cloudflareEnv.XAI_API_KEY;
case "Cohere":
return env.COHERE_API_KEY;
default:
return "";
}

View File

@@ -7,6 +7,11 @@ import { createGoogleGenerativeAI } from '@ai-sdk/google';
import { ollama } from 'ollama-ai-provider';
import { createOpenRouter } from "@openrouter/ai-sdk-provider";
import { createMistral } from '@ai-sdk/mistral';
import { createCohere } from '@ai-sdk/cohere'
export const DEFAULT_NUM_CTX = process.env.DEFAULT_NUM_CTX ?
parseInt(process.env.DEFAULT_NUM_CTX, 10) :
32768;
export function getAnthropicModel(apiKey: string, model: string) {
const anthropic = createAnthropic({
@@ -22,14 +27,16 @@ export function getOpenAILikeModel(baseURL: string, apiKey: string, model: strin
baseURL,
apiKey,
});
// console.log('OpenAI client created:', !!openai);
const client = openai(model);
// console.log('OpenAI model client:', !!client);
return client;
// return {
// model: client,
// provider: 'OpenAILike' // Correctly identifying the actual provider
// };
return openai(model);
}
export function getCohereAIModel(apiKey:string, model: string){
const cohere = createCohere({
apiKey,
});
return cohere(model);
}
export function getOpenAIModel(apiKey: string, model: string) {
@@ -76,7 +83,7 @@ export function getHuggingFaceModel(apiKey: string, model: string) {
export function getOllamaModel(baseURL: string, model: string) {
let Ollama = ollama(model, {
numCtx: 32768,
numCtx: DEFAULT_NUM_CTX,
});
Ollama.config.baseURL = `${baseURL}/api`;
@@ -150,6 +157,8 @@ export function getModel(provider: string, model: string, env: Env, apiKeys?: Re
return getLMStudioModel(baseURL, model);
case 'xAI':
return getXAIModel(apiKey, model);
case 'Cohere':
return getCohereAIModel(apiKey, model);
default:
return getOllamaModel(baseURL, model);
}

View File

@@ -58,7 +58,6 @@ function extractPropertiesFromMessage(message: Message): { model: string; provid
return { model, provider, content: cleanedContent };
}
export function streamText(
messages: Messages,
env: Env,
@@ -68,8 +67,6 @@ export function streamText(
let currentModel = DEFAULT_MODEL;
let currentProvider = DEFAULT_PROVIDER;
// console.log('StreamText:', JSON.stringify(messages));
const processedMessages = messages.map((message) => {
if (message.role === 'user') {
const { model, provider, content } = extractPropertiesFromMessage(message);
@@ -83,25 +80,19 @@ export function streamText(
return { ...message, content };
}
return message; // No changes for non-user messages
});
const modelDetails = MODEL_LIST.find((m) => m.name === currentModel);
// console.log('Message content:', messages[0].content);
// console.log('Extracted properties:', extractPropertiesFromMessage(messages[0]));
const dynamicMaxTokens =
modelDetails && modelDetails.maxTokenAllowed
? modelDetails.maxTokenAllowed
: MAX_TOKENS;
const llmClient = getModel(currentProvider, currentModel, env, apiKeys);
// console.log('LLM Client:', llmClient);
const llmConfig = {
...options,
model: llmClient, //getModel(currentProvider, currentModel, env, apiKeys),
provider: currentProvider,
system: getSystemPrompt(),
maxTokens: MAX_TOKENS,
messages: convertToCoreMessages(processedMessages),
};
// console.log('LLM Config:', llmConfig);
return _streamText(llmConfig);
}
return _streamText({
model: getModel(currentProvider, currentModel, env, apiKeys),
system: getSystemPrompt(),
maxTokens: dynamicMaxTokens,
messages: convertToCoreMessages(processedMessages),
...options,
});
}
)}

View File

@@ -2,3 +2,4 @@ export * from './useMessageParser';
export * from './usePromptEnhancer';
export * from './useShortcuts';
export * from './useSnapScroll';
export { default } from './useViewport';

View File

@@ -1,4 +1,5 @@
import { useState } from 'react';
import type { ProviderInfo } from '~/types/model';
import { createScopedLogger } from '~/utils/logger';
const logger = createScopedLogger('usePromptEnhancement');
@@ -13,54 +14,54 @@ export function usePromptEnhancer() {
};
const enhancePrompt = async (
input: string,
input: string,
setInput: (value: string) => void,
model: string,
provider: string,
apiKeys?: Record<string, string>
provider: ProviderInfo,
apiKeys?: Record<string, string>,
) => {
setEnhancingPrompt(true);
setPromptEnhanced(false);
const requestBody: any = {
message: input,
model,
provider,
};
if (apiKeys) {
requestBody.apiKeys = apiKeys;
}
const response = await fetch('/api/enhancer', {
method: 'POST',
body: JSON.stringify(requestBody),
});
const reader = response.body?.getReader();
const originalInput = input;
if (reader) {
const decoder = new TextDecoder();
let _input = '';
let _error;
try {
setInput('');
while (true) {
const { value, done } = await reader.read();
if (done) {
break;
}
_input += decoder.decode(value);
logger.trace('Set input', _input);
setInput(_input);
}
} catch (error) {
@@ -70,10 +71,10 @@ export function usePromptEnhancer() {
if (_error) {
logger.error(_error);
}
setEnhancingPrompt(false);
setPromptEnhanced(true);
setTimeout(() => {
setInput(_input);
});

View File

@@ -0,0 +1,18 @@
import { useState, useEffect } from 'react';
const useViewport = (threshold = 1024) => {
const [isSmallViewport, setIsSmallViewport] = useState(window.innerWidth < threshold);
useEffect(() => {
const handleResize = () => setIsSmallViewport(window.innerWidth < threshold);
window.addEventListener('resize', handleResize);
return () => {
window.removeEventListener('resize', handleResize);
};
}, [threshold]);
return isSmallViewport;
};
export default useViewport;

View File

@@ -2,7 +2,7 @@ import { type ActionFunctionArgs } from '@remix-run/cloudflare';
import { StreamingTextResponse, parseStreamPart } from 'ai';
import { streamText } from '~/lib/.server/llm/stream-text';
import { stripIndents } from '~/utils/stripIndent';
import type { StreamingOptions } from '~/lib/.server/llm/stream-text';
import type { ProviderInfo } from '~/types/model';
const encoder = new TextEncoder();
const decoder = new TextDecoder();
@@ -12,25 +12,27 @@ export async function action(args: ActionFunctionArgs) {
}
async function enhancerAction({ context, request }: ActionFunctionArgs) {
const { message, model, provider, apiKeys } = await request.json<{
const { message, model, provider, apiKeys } = await request.json<{
message: string;
model: string;
provider: string;
provider: ProviderInfo;
apiKeys?: Record<string, string>;
}>();
// Validate 'model' and 'provider' fields
const { name: providerName } = provider;
// validate 'model' and 'provider' fields
if (!model || typeof model !== 'string') {
throw new Response('Invalid or missing model', {
status: 400,
statusText: 'Bad Request'
statusText: 'Bad Request',
});
}
if (!provider || typeof provider !== 'string') {
if (!providerName || typeof providerName !== 'string') {
throw new Response('Invalid or missing provider', {
status: 400,
statusText: 'Bad Request'
statusText: 'Bad Request',
});
}
@@ -39,7 +41,9 @@ async function enhancerAction({ context, request }: ActionFunctionArgs) {
[
{
role: 'user',
content: `[Model: ${model}]\n\n[Provider: ${provider}]\n\n` + stripIndents`
content:
`[Model: ${model}]\n\n[Provider: ${providerName}]\n\n` +
stripIndents`
I want you to improve the user prompt that is wrapped in \`<original_prompt>\` tags.
IMPORTANT: Only respond with the improved prompt and nothing else!
@@ -52,23 +56,24 @@ async function enhancerAction({ context, request }: ActionFunctionArgs) {
],
context.cloudflare.env,
undefined,
apiKeys
apiKeys,
);
const transformStream = new TransformStream({
transform(chunk, controller) {
const text = decoder.decode(chunk);
const lines = text.split('\n').filter(line => line.trim() !== '');
const lines = text.split('\n').filter((line) => line.trim() !== '');
for (const line of lines) {
try {
const parsed = parseStreamPart(line);
if (parsed.type === 'text') {
controller.enqueue(encoder.encode(parsed.value));
}
} catch (e) {
// Skip invalid JSON lines
console.warn('Failed to parse stream part:', line);
// skip invalid JSON lines
console.warn('Failed to parse stream part:', line, e);
}
}
},
@@ -83,7 +88,7 @@ async function enhancerAction({ context, request }: ActionFunctionArgs) {
if (error instanceof Error && error.message?.includes('API key')) {
throw new Response('Invalid or missing API key', {
status: 401,
statusText: 'Unauthorized'
statusText: 'Unauthorized',
});
}

View File

@@ -12,12 +12,12 @@ const PROVIDER_LIST: ProviderInfo[] = [
{
name: 'Anthropic',
staticModels: [
{ name: 'claude-3-5-sonnet-latest', label: 'Claude 3.5 Sonnet (new)', provider: 'Anthropic' },
{ name: 'claude-3-5-sonnet-20240620', label: 'Claude 3.5 Sonnet (old)', provider: 'Anthropic' },
{ name: 'claude-3-5-haiku-latest', label: 'Claude 3.5 Haiku (new)', provider: 'Anthropic' },
{ name: 'claude-3-opus-latest', label: 'Claude 3 Opus', provider: 'Anthropic' },
{ name: 'claude-3-sonnet-20240229', label: 'Claude 3 Sonnet', provider: 'Anthropic' },
{ name: 'claude-3-haiku-20240307', label: 'Claude 3 Haiku', provider: 'Anthropic' }
{ name: 'claude-3-5-sonnet-latest', label: 'Claude 3.5 Sonnet (new)', provider: 'Anthropic', maxTokenAllowed: 8000 },
{ name: 'claude-3-5-sonnet-20240620', label: 'Claude 3.5 Sonnet (old)', provider: 'Anthropic', maxTokenAllowed: 8000 },
{ name: 'claude-3-5-haiku-latest', label: 'Claude 3.5 Haiku (new)', provider: 'Anthropic', maxTokenAllowed: 8000 },
{ name: 'claude-3-opus-latest', label: 'Claude 3 Opus', provider: 'Anthropic', maxTokenAllowed: 8000 },
{ name: 'claude-3-sonnet-20240229', label: 'Claude 3 Sonnet', provider: 'Anthropic', maxTokenAllowed: 8000 },
{ name: 'claude-3-haiku-20240307', label: 'Claude 3 Haiku', provider: 'Anthropic', maxTokenAllowed: 8000 }
],
getApiKeyLink: "https://console.anthropic.com/settings/keys",
},
@@ -36,23 +36,40 @@ const PROVIDER_LIST: ProviderInfo[] = [
],
getDynamicModels: getOpenAILikeModels
},
{
name: 'Cohere',
staticModels: [
{ name: 'command-r-plus-08-2024', label: 'Command R plus Latest', provider: 'Cohere', maxTokenAllowed: 4096 },
{ name: 'command-r-08-2024', label: 'Command R Latest', provider: 'Cohere', maxTokenAllowed: 4096 },
{ name: 'command-r-plus', label: 'Command R plus', provider: 'Cohere', maxTokenAllowed: 4096 },
{ name: 'command-r', label: 'Command R', provider: 'Cohere', maxTokenAllowed: 4096 },
{ name: 'command', label: 'Command', provider: 'Cohere', maxTokenAllowed: 4096 },
{ name: 'command-nightly', label: 'Command Nightly', provider: 'Cohere', maxTokenAllowed: 4096 },
{ name: 'command-light', label: 'Command Light', provider: 'Cohere', maxTokenAllowed: 4096 },
{ name: 'command-light-nightly', label: 'Command Light Nightly', provider: 'Cohere', maxTokenAllowed: 4096 },
{ name: 'c4ai-aya-expanse-8b', label: 'c4AI Aya Expanse 8b', provider: 'Cohere', maxTokenAllowed: 4096 },
{ name: 'c4ai-aya-expanse-32b', label: 'c4AI Aya Expanse 32b', provider: 'Cohere', maxTokenAllowed: 4096 },
],
getApiKeyLink: 'https://dashboard.cohere.com/api-keys'
},
{
name: 'OpenRouter',
staticModels: [
{ name: 'gpt-4o', label: 'GPT-4o', provider: 'OpenRouter' },
{ name: 'gpt-4o', label: 'GPT-4o', provider: 'OpenAI', maxTokenAllowed: 8000 },
{
name: 'anthropic/claude-3.5-sonnet',
label: 'Anthropic: Claude 3.5 Sonnet (OpenRouter)',
provider: 'OpenRouter'
, maxTokenAllowed: 8000
},
{ name: 'anthropic/claude-3-haiku', label: 'Anthropic: Claude 3 Haiku (OpenRouter)', provider: 'OpenRouter' },
{ name: 'deepseek/deepseek-coder', label: 'Deepseek-Coder V2 236B (OpenRouter)', provider: 'OpenRouter' },
{ name: 'google/gemini-flash-1.5', label: 'Google Gemini Flash 1.5 (OpenRouter)', provider: 'OpenRouter' },
{ name: 'google/gemini-pro-1.5', label: 'Google Gemini Pro 1.5 (OpenRouter)', provider: 'OpenRouter' },
{ name: 'x-ai/grok-beta', label: 'xAI Grok Beta (OpenRouter)', provider: 'OpenRouter' },
{ name: 'mistralai/mistral-nemo', label: 'OpenRouter Mistral Nemo (OpenRouter)', provider: 'OpenRouter' },
{ name: 'qwen/qwen-110b-chat', label: 'OpenRouter Qwen 110b Chat (OpenRouter)', provider: 'OpenRouter' },
{ name: 'cohere/command', label: 'Cohere Command (OpenRouter)', provider: 'OpenRouter' }
{ name: 'anthropic/claude-3-haiku', label: 'Anthropic: Claude 3 Haiku (OpenRouter)', provider: 'OpenRouter', maxTokenAllowed: 8000 },
{ name: 'deepseek/deepseek-coder', label: 'Deepseek-Coder V2 236B (OpenRouter)', provider: 'OpenRouter', maxTokenAllowed: 8000 },
{ name: 'google/gemini-flash-1.5', label: 'Google Gemini Flash 1.5 (OpenRouter)', provider: 'OpenRouter', maxTokenAllowed: 8000 },
{ name: 'google/gemini-pro-1.5', label: 'Google Gemini Pro 1.5 (OpenRouter)', provider: 'OpenRouter', maxTokenAllowed: 8000 },
{ name: 'x-ai/grok-beta', label: 'xAI Grok Beta (OpenRouter)', provider: 'OpenRouter', maxTokenAllowed: 8000 },
{ name: 'mistralai/mistral-nemo', label: 'OpenRouter Mistral Nemo (OpenRouter)', provider: 'OpenRouter', maxTokenAllowed: 8000 },
{ name: 'qwen/qwen-110b-chat', label: 'OpenRouter Qwen 110b Chat (OpenRouter)', provider: 'OpenRouter', maxTokenAllowed: 8000 },
{ name: 'cohere/command', label: 'Cohere Command (OpenRouter)', provider: 'OpenRouter', maxTokenAllowed: 4096 }
],
getDynamicModels: getOpenRouterModels,
getApiKeyLink: 'https://openrouter.ai/settings/keys',
@@ -69,59 +86,59 @@ const PROVIDER_LIST: ProviderInfo[] = [
}, {
name: 'Groq',
staticModels: [
{ name: 'llama-3.1-70b-versatile', label: 'Llama 3.1 70b (Groq)', provider: 'Groq' },
{ name: 'llama-3.1-8b-instant', label: 'Llama 3.1 8b (Groq)', provider: 'Groq' },
{ name: 'llama-3.2-11b-vision-preview', label: 'Llama 3.2 11b (Groq)', provider: 'Groq' },
{ name: 'llama-3.2-3b-preview', label: 'Llama 3.2 3b (Groq)', provider: 'Groq' },
{ name: 'llama-3.2-1b-preview', label: 'Llama 3.2 1b (Groq)', provider: 'Groq' }
{ name: 'llama-3.1-70b-versatile', label: 'Llama 3.1 70b (Groq)', provider: 'Groq', maxTokenAllowed: 8000 },
{ name: 'llama-3.1-8b-instant', label: 'Llama 3.1 8b (Groq)', provider: 'Groq', maxTokenAllowed: 8000 },
{ name: 'llama-3.2-11b-vision-preview', label: 'Llama 3.2 11b (Groq)', provider: 'Groq', maxTokenAllowed: 8000 },
{ name: 'llama-3.2-3b-preview', label: 'Llama 3.2 3b (Groq)', provider: 'Groq', maxTokenAllowed: 8000 },
{ name: 'llama-3.2-1b-preview', label: 'Llama 3.2 1b (Groq)', provider: 'Groq', maxTokenAllowed: 8000 }
],
getApiKeyLink: 'https://console.groq.com/keys'
},
{
name: 'HuggingFace',
staticModels: [
{ name: 'Qwen/Qwen2.5-Coder-32B-Instruct', label: 'Qwen2.5-Coder-32B-Instruct (HuggingFace)', provider: 'HuggingFace' },
{ name: '01-ai/Yi-1.5-34B-Chat', label: 'Yi-1.5-34B-Chat (HuggingFace)', provider: 'HuggingFace' },
{ name: 'codellama/CodeLlama-34b-Instruct-hf', label: 'CodeLlama-34b-Instruct (HuggingFace)', provider: 'HuggingFace' },
{ name: 'NousResearch/Hermes-3-Llama-3.1-8B', label: 'Hermes-3-Llama-3.1-8B (HuggingFace)', provider: 'HuggingFace' }
{ name: 'Qwen/Qwen2.5-Coder-32B-Instruct', label: 'Qwen2.5-Coder-32B-Instruct (HuggingFace)', provider: 'HuggingFace', maxTokenAllowed: 8000 },
{ name: '01-ai/Yi-1.5-34B-Chat', label: 'Yi-1.5-34B-Chat (HuggingFace)', provider: 'HuggingFace', maxTokenAllowed: 8000 },
{ name: 'codellama/CodeLlama-34b-Instruct-hf', label: 'CodeLlama-34b-Instruct (HuggingFace)', provider: 'HuggingFace', maxTokenAllowed: 8000 },
{ name: 'NousResearch/Hermes-3-Llama-3.1-8B', label: 'Hermes-3-Llama-3.1-8B (HuggingFace)', provider: 'HuggingFace', maxTokenAllowed: 8000 }
],
getApiKeyLink: 'https://huggingface.co/settings/tokens'
},
{
name: 'OpenAI',
staticModels: [
{ name: 'gpt-4o-mini', label: 'GPT-4o Mini', provider: 'OpenAI' },
{ name: 'gpt-4-turbo', label: 'GPT-4 Turbo', provider: 'OpenAI' },
{ name: 'gpt-4', label: 'GPT-4', provider: 'OpenAI' },
{ name: 'gpt-3.5-turbo', label: 'GPT-3.5 Turbo', provider: 'OpenAI' }
{ name: 'gpt-4o-mini', label: 'GPT-4o Mini', provider: 'OpenAI', maxTokenAllowed: 8000 },
{ name: 'gpt-4-turbo', label: 'GPT-4 Turbo', provider: 'OpenAI', maxTokenAllowed: 8000 },
{ name: 'gpt-4', label: 'GPT-4', provider: 'OpenAI', maxTokenAllowed: 8000 },
{ name: 'gpt-3.5-turbo', label: 'GPT-3.5 Turbo', provider: 'OpenAI', maxTokenAllowed: 8000 }
],
getApiKeyLink: "https://platform.openai.com/api-keys",
}, {
name: 'xAI',
staticModels: [
{ name: 'grok-beta', label: 'xAI Grok Beta', provider: 'xAI' }
{ name: 'grok-beta', label: 'xAI Grok Beta', provider: 'xAI', maxTokenAllowed: 8000 }
],
getApiKeyLink: 'https://docs.x.ai/docs/quickstart#creating-an-api-key'
}, {
name: 'Deepseek',
staticModels: [
{ name: 'deepseek-coder', label: 'Deepseek-Coder', provider: 'Deepseek' },
{ name: 'deepseek-chat', label: 'Deepseek-Chat', provider: 'Deepseek' }
{ name: 'deepseek-coder', label: 'Deepseek-Coder', provider: 'Deepseek', maxTokenAllowed: 8000 },
{ name: 'deepseek-chat', label: 'Deepseek-Chat', provider: 'Deepseek', maxTokenAllowed: 8000 }
],
getApiKeyLink: 'https://platform.deepseek.com/api_keys'
}, {
name: 'Mistral',
staticModels: [
{ name: 'open-mistral-7b', label: 'Mistral 7B', provider: 'Mistral' },
{ name: 'open-mixtral-8x7b', label: 'Mistral 8x7B', provider: 'Mistral' },
{ name: 'open-mixtral-8x22b', label: 'Mistral 8x22B', provider: 'Mistral' },
{ name: 'open-codestral-mamba', label: 'Codestral Mamba', provider: 'Mistral' },
{ name: 'open-mistral-nemo', label: 'Mistral Nemo', provider: 'Mistral' },
{ name: 'ministral-8b-latest', label: 'Mistral 8B', provider: 'Mistral' },
{ name: 'mistral-small-latest', label: 'Mistral Small', provider: 'Mistral' },
{ name: 'codestral-latest', label: 'Codestral', provider: 'Mistral' },
{ name: 'mistral-large-latest', label: 'Mistral Large Latest', provider: 'Mistral' }
{ name: 'open-mistral-7b', label: 'Mistral 7B', provider: 'Mistral', maxTokenAllowed: 8000 },
{ name: 'open-mixtral-8x7b', label: 'Mistral 8x7B', provider: 'Mistral', maxTokenAllowed: 8000 },
{ name: 'open-mixtral-8x22b', label: 'Mistral 8x22B', provider: 'Mistral', maxTokenAllowed: 8000 },
{ name: 'open-codestral-mamba', label: 'Codestral Mamba', provider: 'Mistral', maxTokenAllowed: 8000 },
{ name: 'open-mistral-nemo', label: 'Mistral Nemo', provider: 'Mistral', maxTokenAllowed: 8000 },
{ name: 'ministral-8b-latest', label: 'Mistral 8B', provider: 'Mistral', maxTokenAllowed: 8000 },
{ name: 'mistral-small-latest', label: 'Mistral Small', provider: 'Mistral', maxTokenAllowed: 8000 },
{ name: 'codestral-latest', label: 'Codestral', provider: 'Mistral', maxTokenAllowed: 8000 },
{ name: 'mistral-large-latest', label: 'Mistral Large Latest', provider: 'Mistral', maxTokenAllowed: 8000 }
],
getApiKeyLink: 'https://console.mistral.ai/api-keys/'
}, {
@@ -165,7 +182,8 @@ async function getOllamaModels(): Promise<ModelInfo[]> {
return data.models.map((model: OllamaModel) => ({
name: model.name,
label: `${model.name} (${model.details.parameter_size})`,
provider: 'Ollama'
provider: 'Ollama',
maxTokenAllowed:8000,
}));
} catch (e) {
return [];
@@ -218,8 +236,9 @@ async function getOpenRouterModels(): Promise<ModelInfo[]> {
name: m.id,
label: `${m.name} - in:$${(m.pricing.prompt * 1_000_000).toFixed(
2)} out:$${(m.pricing.completion * 1_000_000).toFixed(2)} - context ${Math.floor(
m.context_length / 1000)}k`,
provider: 'OpenRouter'
m.context_length / 1000)}k`,
provider: 'OpenRouter',
maxTokenAllowed:8000,
}));
}

View File

@@ -25,6 +25,7 @@ export interface ModelInfo {
name: string;
label: string;
provider: string;
maxTokenAllowed: number;
}
export interface ProviderInfo {