From bfaaf86c69aa558dda35dc638e30108e5d58dc69 Mon Sep 17 00:00:00 2001 From: Aaron Bolton Date: Mon, 18 Nov 2024 20:48:35 +0000 Subject: [PATCH 01/98] Created DEFAULT_NUM_CTX VAR with a deafult of 32768 --- .env.example | 7 +++++++ app/lib/.server/llm/model.ts | 6 +++++- 2 files changed, 12 insertions(+), 1 deletion(-) diff --git a/.env.example b/.env.example index 46a21e8..386d407 100644 --- a/.env.example +++ b/.env.example @@ -56,3 +56,10 @@ XAI_API_KEY= # Include this environment variable if you want more logging for debugging locally VITE_LOG_LEVEL=debug + +# Example Context Values for qwen2.5-coder:32b +# +# DEFAULT_NUM_CTX=32768 # Consumes 36GB of VRAM +# DEFAULT_NUM_CTX=24576 # Consumes 32GB of VRAM +# DEFAULT_NUM_CTX=12288 # Consumes 26GB of VRAM +# DEFAULT_NUM_CTX=6144 # Consumes 24GB of VRAM \ No newline at end of file diff --git a/app/lib/.server/llm/model.ts b/app/lib/.server/llm/model.ts index 6be9d11..266dd69 100644 --- a/app/lib/.server/llm/model.ts +++ b/app/lib/.server/llm/model.ts @@ -8,6 +8,10 @@ import { ollama } from 'ollama-ai-provider'; import { createOpenRouter } from "@openrouter/ai-sdk-provider"; import { createMistral } from '@ai-sdk/mistral'; +export const DEFAULT_NUM_CTX = process.env.DEFAULT_NUM_CTX ? + parseInt(process.env.DEFAULT_NUM_CTX, 10) : + 32768; + export function getAnthropicModel(apiKey: string, model: string) { const anthropic = createAnthropic({ apiKey, @@ -58,7 +62,7 @@ export function getGroqModel(apiKey: string, model: string) { export function getOllamaModel(baseURL: string, model: string) { let Ollama = ollama(model, { - numCtx: 32768, + numCtx: DEFAULT_NUM_CTX, }); Ollama.config.baseURL = `${baseURL}/api`; From 53594234819346a3d8940b9101ebd9ec69b25641 Mon Sep 17 00:00:00 2001 From: Aaron Bolton Date: Tue, 19 Nov 2024 07:46:51 +0000 Subject: [PATCH 02/98] DEFAULT_NUM_CTX additions adding further changes for DEFAULT_NUM_CTX, including docs --- .env.example | 3 ++- CONTRIBUTING.md | 16 ++++++++++++++++ Dockerfile | 8 ++++++-- docker-compose.yaml | 2 ++ 4 files changed, 26 insertions(+), 3 deletions(-) diff --git a/.env.example b/.env.example index 386d407..9710a8e 100644 --- a/.env.example +++ b/.env.example @@ -62,4 +62,5 @@ VITE_LOG_LEVEL=debug # DEFAULT_NUM_CTX=32768 # Consumes 36GB of VRAM # DEFAULT_NUM_CTX=24576 # Consumes 32GB of VRAM # DEFAULT_NUM_CTX=12288 # Consumes 26GB of VRAM -# DEFAULT_NUM_CTX=6144 # Consumes 24GB of VRAM \ No newline at end of file +# DEFAULT_NUM_CTX=6144 # Consumes 24GB of VRAM +DEFAULT_NUM_CTX= \ No newline at end of file diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 1bf3bfb..23f2b8d 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -1,4 +1,7 @@ # Contributing to Bolt.new Fork +## DEFAULT_NUM_CTX + +The `DEFAULT_NUM_CTX` environment variable can be used to limit the maximum number of context values used by the qwen2.5-coder model. For example, to limit the context to 24576 values (which uses 32GB of VRAM), set `DEFAULT_NUM_CTX=24576` in your `.env.local` file. First off, thank you for considering contributing to Bolt.new! This fork aims to expand the capabilities of the original project by integrating multiple LLM providers and enhancing functionality. Every contribution helps make Bolt.new a better tool for developers worldwide. @@ -80,6 +83,19 @@ ANTHROPIC_API_KEY=XXX ```bash VITE_LOG_LEVEL=debug ``` + + - Optionally set context size: +```bash +DEFAULT_NUM_CTX=32768 +``` + +Some Example Context Values for the qwen2.5-coder:32b models are. + +* DEFAULT_NUM_CTX=32768 - Consumes 36GB of VRAM +* DEFAULT_NUM_CTX=24576 - Consumes 32GB of VRAM +* DEFAULT_NUM_CTX=12288 - Consumes 26GB of VRAM +* DEFAULT_NUM_CTX=6144 - Consumes 24GB of VRAM + **Important**: Never commit your `.env.local` file to version control. It's already included in .gitignore. ### 🚀 Running the Development Server diff --git a/Dockerfile b/Dockerfile index 3b5a74c..1d68673 100644 --- a/Dockerfile +++ b/Dockerfile @@ -25,6 +25,7 @@ ARG OPEN_ROUTER_API_KEY ARG GOOGLE_GENERATIVE_AI_API_KEY ARG OLLAMA_API_BASE_URL ARG VITE_LOG_LEVEL=debug +ARG DEFAULT_NUM_CTX ENV WRANGLER_SEND_METRICS=false \ GROQ_API_KEY=${GROQ_API_KEY} \ @@ -33,7 +34,8 @@ ENV WRANGLER_SEND_METRICS=false \ OPEN_ROUTER_API_KEY=${OPEN_ROUTER_API_KEY} \ GOOGLE_GENERATIVE_AI_API_KEY=${GOOGLE_GENERATIVE_AI_API_KEY} \ OLLAMA_API_BASE_URL=${OLLAMA_API_BASE_URL} \ - VITE_LOG_LEVEL=${VITE_LOG_LEVEL} + VITE_LOG_LEVEL=${VITE_LOG_LEVEL} \ + DEFAULT_NUM_CTX=${DEFAULT_NUM_CTX} # Pre-configure wrangler to disable metrics RUN mkdir -p /root/.config/.wrangler && \ @@ -54,6 +56,7 @@ ARG OPEN_ROUTER_API_KEY ARG GOOGLE_GENERATIVE_AI_API_KEY ARG OLLAMA_API_BASE_URL ARG VITE_LOG_LEVEL=debug +ARG DEFAULT_NUM_CTX ENV GROQ_API_KEY=${GROQ_API_KEY} \ OPENAI_API_KEY=${OPENAI_API_KEY} \ @@ -61,7 +64,8 @@ ENV GROQ_API_KEY=${GROQ_API_KEY} \ OPEN_ROUTER_API_KEY=${OPEN_ROUTER_API_KEY} \ GOOGLE_GENERATIVE_AI_API_KEY=${GOOGLE_GENERATIVE_AI_API_KEY} \ OLLAMA_API_BASE_URL=${OLLAMA_API_BASE_URL} \ - VITE_LOG_LEVEL=${VITE_LOG_LEVEL} + VITE_LOG_LEVEL=${VITE_LOG_LEVEL} \ + DEFAULT_NUM_CTX=${DEFAULT_NUM_CTX} RUN mkdir -p ${WORKDIR}/run CMD pnpm run dev --host diff --git a/docker-compose.yaml b/docker-compose.yaml index c391dd7..6fbd704 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -20,6 +20,7 @@ services: - GOOGLE_GENERATIVE_AI_API_KEY=${GOOGLE_GENERATIVE_AI_API_KEY} - OLLAMA_API_BASE_URL=${OLLAMA_API_BASE_URL} - VITE_LOG_LEVEL=${VITE_LOG_LEVEL:-debug} + - DEFAULT_NUM_CTX=${DEFAULT_NUM_CTX:-32768} - RUNNING_IN_DOCKER=true extra_hosts: - "host.docker.internal:host-gateway" @@ -46,6 +47,7 @@ services: - GOOGLE_GENERATIVE_AI_API_KEY=${GOOGLE_GENERATIVE_AI_API_KEY} - OLLAMA_API_BASE_URL=${OLLAMA_API_BASE_URL} - VITE_LOG_LEVEL=${VITE_LOG_LEVEL:-debug} + - DEFAULT_NUM_CTX=${DEFAULT_NUM_CTX:-32768} - RUNNING_IN_DOCKER=true extra_hosts: - "host.docker.internal:host-gateway" From 604ab8710b1b72c54d7d3a392faabcaf22951004 Mon Sep 17 00:00:00 2001 From: lassecapel Date: Sun, 17 Nov 2024 20:11:51 +0100 Subject: [PATCH 03/98] feat: add custom unique filename when doanload as zip --- app/lib/stores/workbench.ts | 19 ++++++++++++++++++- 1 file changed, 18 insertions(+), 1 deletion(-) diff --git a/app/lib/stores/workbench.ts b/app/lib/stores/workbench.ts index 4db14e7..9f0401d 100644 --- a/app/lib/stores/workbench.ts +++ b/app/lib/stores/workbench.ts @@ -15,6 +15,7 @@ import { Octokit, type RestEndpointMethodTypes } from "@octokit/rest"; import * as nodePath from 'node:path'; import type { WebContainerProcess } from '@webcontainer/api'; import { extractRelativePath } from '~/utils/diff'; +import { description } from '../persistence'; export interface ArtifactState { id: string; @@ -171,6 +172,7 @@ export class WorkbenchStore { this.#editorStore.setSelectedFile(filePath); } + async saveFile(filePath: string) { const documents = this.#editorStore.documents.get(); const document = documents[filePath]; @@ -325,6 +327,15 @@ export class WorkbenchStore { async downloadZip() { const zip = new JSZip(); const files = this.files.get(); + // Get the project name (assuming it's stored in this.projectName) + const projectName = (description.value ?? 'project').toLocaleLowerCase().split(' ').join('_'); + + // Generate a simple 6-character hash based on the current timestamp + const timestampHash = Date.now().toString(36).slice(-6); + const uniqueProjectName = `${projectName}_${timestampHash}`; + + // Prompt the user for a file name, prefilled with the project name + const fileName = prompt('Enter the file name', `${uniqueProjectName}.zip`); for (const [filePath, dirent] of Object.entries(files)) { if (dirent?.type === 'file' && !dirent.isBinary) { @@ -348,8 +359,14 @@ export class WorkbenchStore { } } + + + + if (fileName) { + // Generate the zip file and save it const content = await zip.generateAsync({ type: 'blob' }); - saveAs(content, 'project.zip'); + saveAs(content, fileName); + } } async syncFiles(targetHandle: FileSystemDirectoryHandle) { From 399affd1e9e0cd113c45cabcdebb8db11a5434b3 Mon Sep 17 00:00:00 2001 From: lassecapel Date: Sun, 17 Nov 2024 20:24:33 +0100 Subject: [PATCH 04/98] update comment to reflect the the codeline --- app/lib/stores/workbench.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/app/lib/stores/workbench.ts b/app/lib/stores/workbench.ts index 9f0401d..dbdd689 100644 --- a/app/lib/stores/workbench.ts +++ b/app/lib/stores/workbench.ts @@ -327,7 +327,7 @@ export class WorkbenchStore { async downloadZip() { const zip = new JSZip(); const files = this.files.get(); - // Get the project name (assuming it's stored in this.projectName) + // Get the project name from the description input, or use a default name const projectName = (description.value ?? 'project').toLocaleLowerCase().split(' ').join('_'); // Generate a simple 6-character hash based on the current timestamp From 8978ed0ff34c4d164ab67ad25f03951c8d5eaf5f Mon Sep 17 00:00:00 2001 From: Lasse Capel Date: Wed, 20 Nov 2024 09:54:31 +0100 Subject: [PATCH 05/98] use a descriptive anique filename when downloading the files to zip --- app/lib/stores/workbench.ts | 12 ++---------- 1 file changed, 2 insertions(+), 10 deletions(-) diff --git a/app/lib/stores/workbench.ts b/app/lib/stores/workbench.ts index dbdd689..7eed952 100644 --- a/app/lib/stores/workbench.ts +++ b/app/lib/stores/workbench.ts @@ -334,9 +334,6 @@ export class WorkbenchStore { const timestampHash = Date.now().toString(36).slice(-6); const uniqueProjectName = `${projectName}_${timestampHash}`; - // Prompt the user for a file name, prefilled with the project name - const fileName = prompt('Enter the file name', `${uniqueProjectName}.zip`); - for (const [filePath, dirent] of Object.entries(files)) { if (dirent?.type === 'file' && !dirent.isBinary) { const relativePath = extractRelativePath(filePath); @@ -358,15 +355,10 @@ export class WorkbenchStore { } } } - - - - - if (fileName) { // Generate the zip file and save it const content = await zip.generateAsync({ type: 'blob' }); - saveAs(content, fileName); - } + saveAs(content, `${uniqueProjectName}.zip`); + } async syncFiles(targetHandle: FileSystemDirectoryHandle) { From 424ad1ea18be4e11983ab2e3ce2ea066c3ee96ce Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Oliver=20J=C3=A4gle?= Date: Thu, 21 Nov 2024 22:04:42 +0100 Subject: [PATCH 06/98] Limit linting to app Don't know why, but previously the linter for . didn't terminate, although node_modules is ignored as per linter config --- package.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/package.json b/package.json index cc1c256..052c9cd 100644 --- a/package.json +++ b/package.json @@ -11,8 +11,8 @@ "dev": "remix vite:dev", "test": "vitest --run", "test:watch": "vitest", - "lint": "eslint --cache --cache-location ./node_modules/.cache/eslint .", - "lint:fix": "npm run lint -- --fix", + "lint": "eslint --cache --cache-location ./node_modules/.cache/eslint app", + "lint:fix": "pnpm run lint -- --fix", "start": "bindings=$(./bindings.sh) && wrangler pages dev ./build/client $bindings", "dockerstart": "bindings=$(./bindings.sh) && wrangler pages dev ./build/client $bindings --ip 0.0.0.0 --port 5173 --no-show-interactive-dev-session", "dockerrun": "docker run -it -d --name bolt-ai-live -p 5173:5173 --env-file .env.local bolt-ai", From 2327de381018af6627799db11b9ce56df4d5c838 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Oliver=20J=C3=A4gle?= Date: Thu, 21 Nov 2024 22:05:35 +0100 Subject: [PATCH 07/98] Lint-fix all files in app --- app/components/chat/BaseChat.tsx | 11 +- app/components/chat/Chat.client.tsx | 17 +- app/components/chat/UserMessage.tsx | 12 +- app/components/workbench/EditorPanel.tsx | 2 + app/components/workbench/FileTree.tsx | 2 +- app/components/workbench/Workbench.client.tsx | 5 + app/lib/.server/llm/api-key.ts | 36 +-- app/lib/.server/llm/model.ts | 30 ++- app/lib/.server/llm/stream-text.ts | 28 +- app/lib/persistence/db.ts | 23 +- app/lib/persistence/useChatHistory.ts | 4 +- app/lib/runtime/action-runner.ts | 59 ++-- app/lib/runtime/message-parser.ts | 10 +- app/lib/stores/terminal.ts | 6 +- app/lib/stores/workbench.ts | 37 +-- app/routes/api.chat.ts | 23 +- app/types/model.ts | 12 +- app/utils/constants.ts | 252 ++++++++++++------ app/utils/logger.ts | 2 +- app/utils/shell.ts | 92 ++++--- app/utils/types.ts | 15 +- 21 files changed, 413 insertions(+), 265 deletions(-) diff --git a/app/components/chat/BaseChat.tsx b/app/components/chat/BaseChat.tsx index 396fb01..c384138 100644 --- a/app/components/chat/BaseChat.tsx +++ b/app/components/chat/BaseChat.tsx @@ -1,5 +1,7 @@ -// @ts-nocheck -// Preventing TS checks with files presented in the video for a better presentation. +/* + * @ts-nocheck + * Preventing TS checks with files presented in the video for a better presentation. + */ import type { Message } from 'ai'; import React, { type RefCallback, useEffect } from 'react'; import { ClientOnly } from 'remix-utils/client-only'; @@ -34,6 +36,7 @@ const ModelSelector = ({ model, setModel, provider, setProvider, modelList, prov value={provider?.name} onChange={(e) => { setProvider(providerList.find((p) => p.name === e.target.value)); + const firstModel = [...modelList].find((m) => m.provider == e.target.value); setModel(firstModel ? firstModel.name : ''); }} @@ -118,14 +121,17 @@ export const BaseChat = React.forwardRef( // Load API keys from cookies on component mount try { const storedApiKeys = Cookies.get('apiKeys'); + if (storedApiKeys) { const parsedKeys = JSON.parse(storedApiKeys); + if (typeof parsedKeys === 'object' && parsedKeys !== null) { setApiKeys(parsedKeys); } } } catch (error) { console.error('Error loading API keys from cookies:', error); + // Clear invalid cookie data Cookies.remove('apiKeys'); } @@ -139,6 +145,7 @@ export const BaseChat = React.forwardRef( try { const updatedApiKeys = { ...apiKeys, [provider]: key }; setApiKeys(updatedApiKeys); + // Save updated API keys to cookies with 30 day expiry and secure settings Cookies.set('apiKeys', JSON.stringify(updatedApiKeys), { expires: 30, // 30 days diff --git a/app/components/chat/Chat.client.tsx b/app/components/chat/Chat.client.tsx index 31748b3..47515dd 100644 --- a/app/components/chat/Chat.client.tsx +++ b/app/components/chat/Chat.client.tsx @@ -1,5 +1,7 @@ -// @ts-nocheck -// Preventing TS checks with files presented in the video for a better presentation. +/* + * @ts-nocheck + * Preventing TS checks with files presented in the video for a better presentation. + */ import { useStore } from '@nanostores/react'; import type { Message } from 'ai'; import { useChat } from 'ai/react'; @@ -81,7 +83,7 @@ export const ChatImpl = memo(({ initialMessages, storeMessageHistory }: ChatProp }); const [provider, setProvider] = useState(() => { const savedProvider = Cookies.get('selectedProvider'); - return PROVIDER_LIST.find(p => p.name === savedProvider) || DEFAULT_PROVIDER; + return PROVIDER_LIST.find((p) => p.name === savedProvider) || DEFAULT_PROVIDER; }); const { showChat } = useStore(chatStore); @@ -93,11 +95,13 @@ export const ChatImpl = memo(({ initialMessages, storeMessageHistory }: ChatProp const { messages, isLoading, input, handleInputChange, setInput, stop, append } = useChat({ api: '/api/chat', body: { - apiKeys + apiKeys, }, onError: (error) => { logger.error('Request failed\n\n', error); - toast.error('There was an error processing your request: ' + (error.message ? error.message : "No details were returned")); + toast.error( + 'There was an error processing your request: ' + (error.message ? error.message : 'No details were returned'), + ); }, onFinish: () => { logger.debug('Finished streaming'); @@ -218,6 +222,7 @@ export const ChatImpl = memo(({ initialMessages, storeMessageHistory }: ChatProp useEffect(() => { const storedApiKeys = Cookies.get('apiKeys'); + if (storedApiKeys) { setApiKeys(JSON.parse(storedApiKeys)); } @@ -271,7 +276,7 @@ export const ChatImpl = memo(({ initialMessages, storeMessageHistory }: ChatProp }, model, provider, - apiKeys + apiKeys, ); }} /> diff --git a/app/components/chat/UserMessage.tsx b/app/components/chat/UserMessage.tsx index 803d2cd..520b390 100644 --- a/app/components/chat/UserMessage.tsx +++ b/app/components/chat/UserMessage.tsx @@ -1,5 +1,7 @@ -// @ts-nocheck -// Preventing TS checks with files presented in the video for a better presentation. +/* + * @ts-nocheck + * Preventing TS checks with files presented in the video for a better presentation. + */ import { modificationsRegex } from '~/utils/diff'; import { MODEL_REGEX, PROVIDER_REGEX } from '~/utils/constants'; import { Markdown } from './Markdown'; @@ -17,5 +19,9 @@ export function UserMessage({ content }: UserMessageProps) { } function sanitizeUserMessage(content: string) { - return content.replace(modificationsRegex, '').replace(MODEL_REGEX, 'Using: $1').replace(PROVIDER_REGEX, ' ($1)\n\n').trim(); + return content + .replace(modificationsRegex, '') + .replace(MODEL_REGEX, 'Using: $1') + .replace(PROVIDER_REGEX, ' ($1)\n\n') + .trim(); } diff --git a/app/components/workbench/EditorPanel.tsx b/app/components/workbench/EditorPanel.tsx index 046eaf8..0a18658 100644 --- a/app/components/workbench/EditorPanel.tsx +++ b/app/components/workbench/EditorPanel.tsx @@ -255,6 +255,7 @@ export const EditorPanel = memo( {Array.from({ length: terminalCount + 1 }, (_, index) => { const isActive = activeTerminal === index; + if (index == 0) { logger.info('Starting bolt terminal'); @@ -273,6 +274,7 @@ export const EditorPanel = memo( /> ); } + return ( +
{filteredFileList.map((fileOrFolder) => { switch (fileOrFolder.kind) { case 'file': { diff --git a/app/components/workbench/Workbench.client.tsx b/app/components/workbench/Workbench.client.tsx index 7e21dd0..fb2f49e 100644 --- a/app/components/workbench/Workbench.client.tsx +++ b/app/components/workbench/Workbench.client.tsx @@ -174,16 +174,21 @@ export const Workbench = memo(({ chatStarted, isStreaming }: WorkspaceProps) => 'Please enter a name for your new GitHub repository:', 'bolt-generated-project', ); + if (!repoName) { alert('Repository name is required. Push to GitHub cancelled.'); return; } + const githubUsername = prompt('Please enter your GitHub username:'); + if (!githubUsername) { alert('GitHub username is required. Push to GitHub cancelled.'); return; } + const githubToken = prompt('Please enter your GitHub personal access token:'); + if (!githubToken) { alert('GitHub token is required. Push to GitHub cancelled.'); return; diff --git a/app/lib/.server/llm/api-key.ts b/app/lib/.server/llm/api-key.ts index 7d8d2f9..82beacf 100644 --- a/app/lib/.server/llm/api-key.ts +++ b/app/lib/.server/llm/api-key.ts @@ -1,5 +1,7 @@ -// @ts-nocheck -// Preventing TS checks with files presented in the video for a better presentation. +/* + * @ts-nocheck + * Preventing TS checks with files presented in the video for a better presentation. + */ import { env } from 'node:process'; export function getAPIKey(cloudflareEnv: Env, provider: string, userApiKeys?: Record) { @@ -28,17 +30,19 @@ export function getAPIKey(cloudflareEnv: Env, provider: string, userApiKeys?: Re case 'OpenRouter': return env.OPEN_ROUTER_API_KEY || cloudflareEnv.OPEN_ROUTER_API_KEY; case 'Deepseek': - return env.DEEPSEEK_API_KEY || cloudflareEnv.DEEPSEEK_API_KEY + return env.DEEPSEEK_API_KEY || cloudflareEnv.DEEPSEEK_API_KEY; case 'Mistral': - return env.MISTRAL_API_KEY || cloudflareEnv.MISTRAL_API_KEY; - case "OpenAILike": + return env.MISTRAL_API_KEY || cloudflareEnv.MISTRAL_API_KEY; + case 'OpenAILike': return env.OPENAI_LIKE_API_KEY || cloudflareEnv.OPENAI_LIKE_API_KEY; - case "xAI": + case 'xAI': return env.XAI_API_KEY || cloudflareEnv.XAI_API_KEY; - case "Cohere": + case 'Cohere': return env.COHERE_API_KEY; + case 'AzureOpenAI': + return env.AZURE_OPENAI_API_KEY; default: - return ""; + return ''; } } @@ -47,14 +51,16 @@ export function getBaseURL(cloudflareEnv: Env, provider: string) { case 'OpenAILike': return env.OPENAI_LIKE_API_BASE_URL || cloudflareEnv.OPENAI_LIKE_API_BASE_URL; case 'LMStudio': - return env.LMSTUDIO_API_BASE_URL || cloudflareEnv.LMSTUDIO_API_BASE_URL || "http://localhost:1234"; + return env.LMSTUDIO_API_BASE_URL || cloudflareEnv.LMSTUDIO_API_BASE_URL || 'http://localhost:1234'; case 'Ollama': - let baseUrl = env.OLLAMA_API_BASE_URL || cloudflareEnv.OLLAMA_API_BASE_URL || "http://localhost:11434"; - if (env.RUNNING_IN_DOCKER === 'true') { - baseUrl = baseUrl.replace("localhost", "host.docker.internal"); - } - return baseUrl; + let baseUrl = env.OLLAMA_API_BASE_URL || cloudflareEnv.OLLAMA_API_BASE_URL || 'http://localhost:11434'; + + if (env.RUNNING_IN_DOCKER === 'true') { + baseUrl = baseUrl.replace('localhost', 'host.docker.internal'); + } + + return baseUrl; default: - return ""; + return ''; } } diff --git a/app/lib/.server/llm/model.ts b/app/lib/.server/llm/model.ts index 2e7c568..4c4f7bd 100644 --- a/app/lib/.server/llm/model.ts +++ b/app/lib/.server/llm/model.ts @@ -1,13 +1,15 @@ -// @ts-nocheck -// Preventing TS checks with files presented in the video for a better presentation. +/* + * @ts-nocheck + * Preventing TS checks with files presented in the video for a better presentation. + */ import { getAPIKey, getBaseURL } from '~/lib/.server/llm/api-key'; import { createAnthropic } from '@ai-sdk/anthropic'; import { createOpenAI } from '@ai-sdk/openai'; import { createGoogleGenerativeAI } from '@ai-sdk/google'; import { ollama } from 'ollama-ai-provider'; -import { createOpenRouter } from "@openrouter/ai-sdk-provider"; +import { createOpenRouter } from '@openrouter/ai-sdk-provider'; import { createMistral } from '@ai-sdk/mistral'; -import { createCohere } from '@ai-sdk/cohere' +import { createCohere } from '@ai-sdk/cohere'; export function getAnthropicModel(apiKey: string, model: string) { const anthropic = createAnthropic({ @@ -16,7 +18,7 @@ export function getAnthropicModel(apiKey: string, model: string) { return anthropic(model); } -export function getOpenAILikeModel(baseURL:string,apiKey: string, model: string) { +export function getOpenAILikeModel(baseURL: string, apiKey: string, model: string) { const openai = createOpenAI({ baseURL, apiKey, @@ -25,7 +27,7 @@ export function getOpenAILikeModel(baseURL:string,apiKey: string, model: string) return openai(model); } -export function getCohereAIModel(apiKey:string, model: string){ +export function getCohereAIModel(apiKey: string, model: string) { const cohere = createCohere({ apiKey, }); @@ -43,7 +45,7 @@ export function getOpenAIModel(apiKey: string, model: string) { export function getMistralModel(apiKey: string, model: string) { const mistral = createMistral({ - apiKey + apiKey, }); return mistral(model); @@ -76,15 +78,16 @@ export function getHuggingFaceModel(apiKey: string, model: string) { } export function getOllamaModel(baseURL: string, model: string) { - let Ollama = ollama(model, { + const Ollama = ollama(model, { numCtx: 32768, }); Ollama.config.baseURL = `${baseURL}/api`; + return Ollama; } -export function getDeepseekModel(apiKey: string, model: string){ +export function getDeepseekModel(apiKey: string, model: string) { const openai = createOpenAI({ baseURL: 'https://api.deepseek.com/beta', apiKey, @@ -95,7 +98,7 @@ export function getDeepseekModel(apiKey: string, model: string){ export function getOpenRouterModel(apiKey: string, model: string) { const openRouter = createOpenRouter({ - apiKey + apiKey, }); return openRouter.chat(model); @@ -104,7 +107,7 @@ export function getOpenRouterModel(apiKey: string, model: string) { export function getLMStudioModel(baseURL: string, model: string) { const lmstudio = createOpenAI({ baseUrl: `${baseURL}/v1`, - apiKey: "", + apiKey: '', }); return lmstudio(model); @@ -119,7 +122,6 @@ export function getXAIModel(apiKey: string, model: string) { return openai(model); } - export function getModel(provider: string, model: string, env: Env, apiKeys?: Record) { const apiKey = getAPIKey(env, provider, apiKeys); const baseURL = getBaseURL(env, provider); @@ -138,11 +140,11 @@ export function getModel(provider: string, model: string, env: Env, apiKeys?: Re case 'Google': return getGoogleModel(apiKey, model); case 'OpenAILike': - return getOpenAILikeModel(baseURL,apiKey, model); + return getOpenAILikeModel(baseURL, apiKey, model); case 'Deepseek': return getDeepseekModel(apiKey, model); case 'Mistral': - return getMistralModel(apiKey, model); + return getMistralModel(apiKey, model); case 'LMStudio': return getLMStudioModel(baseURL, model); case 'xAI': diff --git a/app/lib/.server/llm/stream-text.ts b/app/lib/.server/llm/stream-text.ts index 4fedccb..b441f1c 100644 --- a/app/lib/.server/llm/stream-text.ts +++ b/app/lib/.server/llm/stream-text.ts @@ -1,5 +1,7 @@ -// @ts-nocheck -// Preventing TS checks with files presented in the video for a better presentation. +/* + * @ts-nocheck + * Preventing TS checks with files presented in the video for a better presentation. + */ import { streamText as _streamText, convertToCoreMessages } from 'ai'; import { getModel } from '~/lib/.server/llm/model'; import { MAX_TOKENS } from './constants'; @@ -34,19 +36,12 @@ function extractPropertiesFromMessage(message: Message): { model: string; provid const provider = providerMatch ? providerMatch[1] : DEFAULT_PROVIDER; // Remove model and provider lines from content - const cleanedContent = message.content - .replace(MODEL_REGEX, '') - .replace(PROVIDER_REGEX, '') - .trim(); + const cleanedContent = message.content.replace(MODEL_REGEX, '').replace(PROVIDER_REGEX, '').trim(); return { model, provider, content: cleanedContent }; } -export function streamText( - messages: Messages, - env: Env, - options?: StreamingOptions, - apiKeys?: Record -) { + +export function streamText(messages: Messages, env: Env, options?: StreamingOptions, apiKeys?: Record) { let currentModel = DEFAULT_MODEL; let currentProvider = DEFAULT_PROVIDER; @@ -63,17 +58,12 @@ export function streamText( return { ...message, content }; } - return message; + return message; }); const modelDetails = MODEL_LIST.find((m) => m.name === currentModel); - - - const dynamicMaxTokens = -modelDetails && modelDetails.maxTokenAllowed - ? modelDetails.maxTokenAllowed - : MAX_TOKENS; + const dynamicMaxTokens = modelDetails && modelDetails.maxTokenAllowed ? modelDetails.maxTokenAllowed : MAX_TOKENS; return _streamText({ model: getModel(currentProvider, currentModel, env, apiKeys), diff --git a/app/lib/persistence/db.ts b/app/lib/persistence/db.ts index 3aa2004..b21ace0 100644 --- a/app/lib/persistence/db.ts +++ b/app/lib/persistence/db.ts @@ -161,11 +161,17 @@ async function getUrlIds(db: IDBDatabase): Promise { export async function forkChat(db: IDBDatabase, chatId: string, messageId: string): Promise { const chat = await getMessages(db, chatId); - if (!chat) throw new Error('Chat not found'); + + if (!chat) { + throw new Error('Chat not found'); + } // Find the index of the message to fork at - const messageIndex = chat.messages.findIndex(msg => msg.id === messageId); - if (messageIndex === -1) throw new Error('Message not found'); + const messageIndex = chat.messages.findIndex((msg) => msg.id === messageId); + + if (messageIndex === -1) { + throw new Error('Message not found'); + } // Get messages up to and including the selected message const messages = chat.messages.slice(0, messageIndex + 1); @@ -175,19 +181,14 @@ export async function forkChat(db: IDBDatabase, chatId: string, messageId: strin const urlId = await getUrlId(db, newId); // Create the forked chat - await setMessages( - db, - newId, - messages, - urlId, - chat.description ? `${chat.description} (fork)` : 'Forked chat' - ); + await setMessages(db, newId, messages, urlId, chat.description ? `${chat.description} (fork)` : 'Forked chat'); return urlId; } export async function duplicateChat(db: IDBDatabase, id: string): Promise { const chat = await getMessages(db, id); + if (!chat) { throw new Error('Chat not found'); } @@ -200,7 +201,7 @@ export async function duplicateChat(db: IDBDatabase, id: string): Promise { + duplicateCurrentChat: async (listItemId: string) => { if (!db || (!mixedId && !listItemId)) { return; } @@ -111,7 +111,7 @@ export function useChatHistory() { } catch (error) { toast.error('Failed to duplicate chat'); } - } + }, }; } diff --git a/app/lib/runtime/action-runner.ts b/app/lib/runtime/action-runner.ts index e38a8ce..90fc829 100644 --- a/app/lib/runtime/action-runner.ts +++ b/app/lib/runtime/action-runner.ts @@ -45,7 +45,6 @@ export class ActionRunner { constructor(webcontainerPromise: Promise, getShellTerminal: () => BoltShell) { this.#webcontainer = webcontainerPromise; this.#shellTerminal = getShellTerminal; - } addAction(data: ActionCallbackData) { @@ -88,19 +87,20 @@ export class ActionRunner { if (action.executed) { return; } + if (isStreaming && action.type !== 'file') { return; } this.#updateAction(actionId, { ...action, ...data.action, executed: !isStreaming }); - return this.#currentExecutionPromise = this.#currentExecutionPromise + return (this.#currentExecutionPromise = this.#currentExecutionPromise .then(() => { return this.#executeAction(actionId, isStreaming); }) .catch((error) => { console.error('Action failed:', error); - }); + })); } async #executeAction(actionId: string, isStreaming: boolean = false) { @@ -121,17 +121,24 @@ export class ActionRunner { case 'start': { // making the start app non blocking - this.#runStartAction(action).then(()=>this.#updateAction(actionId, { status: 'complete' })) - .catch(()=>this.#updateAction(actionId, { status: 'failed', error: 'Action failed' })) - // adding a delay to avoid any race condition between 2 start actions - // i am up for a better approch - await new Promise(resolve=>setTimeout(resolve,2000)) - return + this.#runStartAction(action) + .then(() => this.#updateAction(actionId, { status: 'complete' })) + .catch(() => this.#updateAction(actionId, { status: 'failed', error: 'Action failed' })); + + /* + * adding a delay to avoid any race condition between 2 start actions + * i am up for a better approch + */ + await new Promise((resolve) => setTimeout(resolve, 2000)); + + return; break; } } - this.#updateAction(actionId, { status: isStreaming ? 'running' : action.abortSignal.aborted ? 'aborted' : 'complete' }); + this.#updateAction(actionId, { + status: isStreaming ? 'running' : action.abortSignal.aborted ? 'aborted' : 'complete', + }); } catch (error) { this.#updateAction(actionId, { status: 'failed', error: 'Action failed' }); logger.error(`[${action.type}]:Action failed\n\n`, error); @@ -145,16 +152,19 @@ export class ActionRunner { if (action.type !== 'shell') { unreachable('Expected shell action'); } - const shell = this.#shellTerminal() - await shell.ready() + + const shell = this.#shellTerminal(); + await shell.ready(); + if (!shell || !shell.terminal || !shell.process) { unreachable('Shell terminal not found'); } - const resp = await shell.executeCommand(this.runnerId.get(), action.content) - logger.debug(`${action.type} Shell Response: [exit code:${resp?.exitCode}]`) - if (resp?.exitCode != 0) { - throw new Error("Failed To Execute Shell Command"); + const resp = await shell.executeCommand(this.runnerId.get(), action.content); + logger.debug(`${action.type} Shell Response: [exit code:${resp?.exitCode}]`); + + if (resp?.exitCode != 0) { + throw new Error('Failed To Execute Shell Command'); } } @@ -162,21 +172,26 @@ export class ActionRunner { if (action.type !== 'start') { unreachable('Expected shell action'); } + if (!this.#shellTerminal) { unreachable('Shell terminal not found'); } - const shell = this.#shellTerminal() - await shell.ready() + + const shell = this.#shellTerminal(); + await shell.ready(); + if (!shell || !shell.terminal || !shell.process) { unreachable('Shell terminal not found'); } - const resp = await shell.executeCommand(this.runnerId.get(), action.content) - logger.debug(`${action.type} Shell Response: [exit code:${resp?.exitCode}]`) + + const resp = await shell.executeCommand(this.runnerId.get(), action.content); + logger.debug(`${action.type} Shell Response: [exit code:${resp?.exitCode}]`); if (resp?.exitCode != 0) { - throw new Error("Failed To Start Application"); + throw new Error('Failed To Start Application'); } - return resp + + return resp; } async #runFileAction(action: ActionState) { diff --git a/app/lib/runtime/message-parser.ts b/app/lib/runtime/message-parser.ts index 4b564da..48f3f52 100644 --- a/app/lib/runtime/message-parser.ts +++ b/app/lib/runtime/message-parser.ts @@ -55,7 +55,7 @@ interface MessageState { export class StreamingMessageParser { #messages = new Map(); - constructor(private _options: StreamingMessageParserOptions = {}) { } + constructor(private _options: StreamingMessageParserOptions = {}) {} parse(messageId: string, input: string) { let state = this.#messages.get(messageId); @@ -120,20 +120,20 @@ export class StreamingMessageParser { i = closeIndex + ARTIFACT_ACTION_TAG_CLOSE.length; } else { if ('type' in currentAction && currentAction.type === 'file') { - let content = input.slice(i); + const content = input.slice(i); this._options.callbacks?.onActionStream?.({ artifactId: currentArtifact.id, messageId, actionId: String(state.actionId - 1), action: { - ...currentAction as FileAction, + ...(currentAction as FileAction), content, filePath: currentAction.filePath, }, - }); } + break; } } else { @@ -272,7 +272,7 @@ export class StreamingMessageParser { } (actionAttributes as FileAction).filePath = filePath; - } else if (!(['shell', 'start'].includes(actionType))) { + } else if (!['shell', 'start'].includes(actionType)) { logger.warn(`Unknown action type '${actionType}'`); } diff --git a/app/lib/stores/terminal.ts b/app/lib/stores/terminal.ts index b2537cc..9de9f4e 100644 --- a/app/lib/stores/terminal.ts +++ b/app/lib/stores/terminal.ts @@ -7,7 +7,7 @@ import { coloredText } from '~/utils/terminal'; export class TerminalStore { #webcontainer: Promise; #terminals: Array<{ terminal: ITerminal; process: WebContainerProcess }> = []; - #boltTerminal = newBoltShellProcess() + #boltTerminal = newBoltShellProcess(); showTerminal: WritableAtom = import.meta.hot?.data.showTerminal ?? atom(true); @@ -27,8 +27,8 @@ export class TerminalStore { } async attachBoltTerminal(terminal: ITerminal) { try { - let wc = await this.#webcontainer - await this.#boltTerminal.init(wc, terminal) + const wc = await this.#webcontainer; + await this.#boltTerminal.init(wc, terminal); } catch (error: any) { terminal.write(coloredText.red('Failed to spawn bolt shell\n\n') + error.message); return; diff --git a/app/lib/stores/workbench.ts b/app/lib/stores/workbench.ts index 4db14e7..89b0667 100644 --- a/app/lib/stores/workbench.ts +++ b/app/lib/stores/workbench.ts @@ -11,7 +11,7 @@ import { PreviewsStore } from './previews'; import { TerminalStore } from './terminal'; import JSZip from 'jszip'; import { saveAs } from 'file-saver'; -import { Octokit, type RestEndpointMethodTypes } from "@octokit/rest"; +import { Octokit, type RestEndpointMethodTypes } from '@octokit/rest'; import * as nodePath from 'node:path'; import type { WebContainerProcess } from '@webcontainer/api'; import { extractRelativePath } from '~/utils/diff'; @@ -43,7 +43,7 @@ export class WorkbenchStore { modifiedFiles = new Set(); artifactIdList: string[] = []; #boltTerminal: { terminal: ITerminal; process: WebContainerProcess } | undefined; - #globalExecutionQueue=Promise.resolve(); + #globalExecutionQueue = Promise.resolve(); constructor() { if (import.meta.hot) { import.meta.hot.data.artifacts = this.artifacts; @@ -54,7 +54,7 @@ export class WorkbenchStore { } addToExecutionQueue(callback: () => Promise) { - this.#globalExecutionQueue=this.#globalExecutionQueue.then(()=>callback()) + this.#globalExecutionQueue = this.#globalExecutionQueue.then(() => callback()); } get previews() { @@ -96,7 +96,6 @@ export class WorkbenchStore { this.#terminalStore.attachTerminal(terminal); } attachBoltTerminal(terminal: ITerminal) { - this.#terminalStore.attachBoltTerminal(terminal); } @@ -261,7 +260,8 @@ export class WorkbenchStore { this.artifacts.setKey(messageId, { ...artifact, ...state }); } addAction(data: ActionCallbackData) { - this._addAction(data) + this._addAction(data); + // this.addToExecutionQueue(()=>this._addAction(data)) } async _addAction(data: ActionCallbackData) { @@ -277,11 +277,10 @@ export class WorkbenchStore { } runAction(data: ActionCallbackData, isStreaming: boolean = false) { - if(isStreaming) { - this._runAction(data, isStreaming) - } - else{ - this.addToExecutionQueue(()=>this._runAction(data, isStreaming)) + if (isStreaming) { + this._runAction(data, isStreaming); + } else { + this.addToExecutionQueue(() => this._runAction(data, isStreaming)); } } async _runAction(data: ActionCallbackData, isStreaming: boolean = false) { @@ -292,16 +291,21 @@ export class WorkbenchStore { if (!artifact) { unreachable('Artifact not found'); } + if (data.action.type === 'file') { - let wc = await webcontainer + const wc = await webcontainer; const fullPath = nodePath.join(wc.workdir, data.action.filePath); + if (this.selectedFile.value !== fullPath) { this.setSelectedFile(fullPath); } + if (this.currentView.value !== 'code') { this.currentView.set('code'); } + const doc = this.#editorStore.documents.get()[fullPath]; + if (!doc) { await artifact.runner.runAction(data, isStreaming); } @@ -382,7 +386,6 @@ export class WorkbenchStore { } async pushToGitHub(repoName: string, githubUsername: string, ghToken: string) { - try { // Get the GitHub auth token from environment variables const githubToken = ghToken; @@ -397,10 +400,11 @@ export class WorkbenchStore { const octokit = new Octokit({ auth: githubToken }); // Check if the repository already exists before creating it - let repo: RestEndpointMethodTypes["repos"]["get"]["response"]['data'] + let repo: RestEndpointMethodTypes['repos']['get']['response']['data']; + try { - let resp = await octokit.repos.get({ owner: owner, repo: repoName }); - repo = resp.data + const resp = await octokit.repos.get({ owner, repo: repoName }); + repo = resp.data; } catch (error) { if (error instanceof Error && 'status' in error && error.status === 404) { // Repository doesn't exist, so create a new one @@ -418,6 +422,7 @@ export class WorkbenchStore { // Get all files const files = this.files.get(); + if (!files || Object.keys(files).length === 0) { throw new Error('No files found to push'); } @@ -434,7 +439,7 @@ export class WorkbenchStore { }); return { path: extractRelativePath(filePath), sha: blob.sha }; } - }) + }), ); const validBlobs = blobs.filter(Boolean); // Filter out any undefined blobs diff --git a/app/routes/api.chat.ts b/app/routes/api.chat.ts index 47666c7..0a6826f 100644 --- a/app/routes/api.chat.ts +++ b/app/routes/api.chat.ts @@ -1,5 +1,7 @@ -// @ts-nocheck -// Preventing TS checks with files presented in the video for a better presentation. +/* + * @ts-nocheck + * Preventing TS checks with files presented in the video for a better presentation. + */ import { type ActionFunctionArgs } from '@remix-run/cloudflare'; import { MAX_RESPONSE_SEGMENTS, MAX_TOKENS } from '~/lib/.server/llm/constants'; import { CONTINUE_PROMPT } from '~/lib/.server/llm/prompts'; @@ -14,14 +16,15 @@ function parseCookies(cookieHeader) { const cookies = {}; // Split the cookie string by semicolons and spaces - const items = cookieHeader.split(";").map(cookie => cookie.trim()); + const items = cookieHeader.split(';').map((cookie) => cookie.trim()); + + items.forEach((item) => { + const [name, ...rest] = item.split('='); - items.forEach(item => { - const [name, ...rest] = item.split("="); if (name && rest) { // Decode the name and value, and join value parts in case it contains '=' const decodedName = decodeURIComponent(name.trim()); - const decodedValue = decodeURIComponent(rest.join("=").trim()); + const decodedValue = decodeURIComponent(rest.join('=').trim()); cookies[decodedName] = decodedValue; } }); @@ -31,13 +34,13 @@ function parseCookies(cookieHeader) { async function chatAction({ context, request }: ActionFunctionArgs) { const { messages } = await request.json<{ - messages: Messages + messages: Messages; }>(); - const cookieHeader = request.headers.get("Cookie"); + const cookieHeader = request.headers.get('Cookie'); // Parse the cookie's value (returns an object or null if no cookie exists) - const apiKeys = JSON.parse(parseCookies(cookieHeader).apiKeys || "{}"); + const apiKeys = JSON.parse(parseCookies(cookieHeader).apiKeys || '{}'); const stream = new SwitchableStream(); @@ -83,7 +86,7 @@ async function chatAction({ context, request }: ActionFunctionArgs) { if (error.message?.includes('API key')) { throw new Response('Invalid or missing API key', { status: 401, - statusText: 'Unauthorized' + statusText: 'Unauthorized', }); } diff --git a/app/types/model.ts b/app/types/model.ts index 12b6929..32522c6 100644 --- a/app/types/model.ts +++ b/app/types/model.ts @@ -1,10 +1,10 @@ import type { ModelInfo } from '~/utils/types'; export type ProviderInfo = { - staticModels: ModelInfo[], - name: string, - getDynamicModels?: () => Promise, - getApiKeyLink?: string, - labelForGetApiKey?: string, - icon?:string, + staticModels: ModelInfo[]; + name: string; + getDynamicModels?: () => Promise; + getApiKeyLink?: string; + labelForGetApiKey?: string; + icon?: string; }; diff --git a/app/utils/constants.ts b/app/utils/constants.ts index 942afc4..1e39232 100644 --- a/app/utils/constants.ts +++ b/app/utils/constants.ts @@ -12,26 +12,42 @@ const PROVIDER_LIST: ProviderInfo[] = [ { name: 'Anthropic', staticModels: [ - { name: 'claude-3-5-sonnet-latest', label: 'Claude 3.5 Sonnet (new)', provider: 'Anthropic', maxTokenAllowed: 8000 }, - { name: 'claude-3-5-sonnet-20240620', label: 'Claude 3.5 Sonnet (old)', provider: 'Anthropic', maxTokenAllowed: 8000 }, - { name: 'claude-3-5-haiku-latest', label: 'Claude 3.5 Haiku (new)', provider: 'Anthropic', maxTokenAllowed: 8000 }, + { + name: 'claude-3-5-sonnet-latest', + label: 'Claude 3.5 Sonnet (new)', + provider: 'Anthropic', + maxTokenAllowed: 8000, + }, + { + name: 'claude-3-5-sonnet-20240620', + label: 'Claude 3.5 Sonnet (old)', + provider: 'Anthropic', + maxTokenAllowed: 8000, + }, + { + name: 'claude-3-5-haiku-latest', + label: 'Claude 3.5 Haiku (new)', + provider: 'Anthropic', + maxTokenAllowed: 8000, + }, { name: 'claude-3-opus-latest', label: 'Claude 3 Opus', provider: 'Anthropic', maxTokenAllowed: 8000 }, { name: 'claude-3-sonnet-20240229', label: 'Claude 3 Sonnet', provider: 'Anthropic', maxTokenAllowed: 8000 }, - { name: 'claude-3-haiku-20240307', label: 'Claude 3 Haiku', provider: 'Anthropic', maxTokenAllowed: 8000 } + { name: 'claude-3-haiku-20240307', label: 'Claude 3 Haiku', provider: 'Anthropic', maxTokenAllowed: 8000 }, ], - getApiKeyLink: "https://console.anthropic.com/settings/keys", + getApiKeyLink: 'https://console.anthropic.com/settings/keys', }, { name: 'Ollama', staticModels: [], getDynamicModels: getOllamaModels, - getApiKeyLink: "https://ollama.com/download", - labelForGetApiKey: "Download Ollama", - icon: "i-ph:cloud-arrow-down", - }, { + getApiKeyLink: 'https://ollama.com/download', + labelForGetApiKey: 'Download Ollama', + icon: 'i-ph:cloud-arrow-down', + }, + { name: 'OpenAILike', staticModels: [], - getDynamicModels: getOpenAILikeModels + getDynamicModels: getOpenAILikeModels, }, { name: 'Cohere', @@ -47,7 +63,7 @@ const PROVIDER_LIST: ProviderInfo[] = [ { name: 'c4ai-aya-expanse-8b', label: 'c4AI Aya Expanse 8b', provider: 'Cohere', maxTokenAllowed: 4096 }, { name: 'c4ai-aya-expanse-32b', label: 'c4AI Aya Expanse 32b', provider: 'Cohere', maxTokenAllowed: 4096 }, ], - getApiKeyLink: 'https://dashboard.cohere.com/api-keys' + getApiKeyLink: 'https://dashboard.cohere.com/api-keys', }, { name: 'OpenRouter', @@ -56,22 +72,52 @@ const PROVIDER_LIST: ProviderInfo[] = [ { name: 'anthropic/claude-3.5-sonnet', label: 'Anthropic: Claude 3.5 Sonnet (OpenRouter)', - provider: 'OpenRouter' - , maxTokenAllowed: 8000 + provider: 'OpenRouter', + maxTokenAllowed: 8000, + }, + { + name: 'anthropic/claude-3-haiku', + label: 'Anthropic: Claude 3 Haiku (OpenRouter)', + provider: 'OpenRouter', + maxTokenAllowed: 8000, + }, + { + name: 'deepseek/deepseek-coder', + label: 'Deepseek-Coder V2 236B (OpenRouter)', + provider: 'OpenRouter', + maxTokenAllowed: 8000, + }, + { + name: 'google/gemini-flash-1.5', + label: 'Google Gemini Flash 1.5 (OpenRouter)', + provider: 'OpenRouter', + maxTokenAllowed: 8000, + }, + { + name: 'google/gemini-pro-1.5', + label: 'Google Gemini Pro 1.5 (OpenRouter)', + provider: 'OpenRouter', + maxTokenAllowed: 8000, }, - { name: 'anthropic/claude-3-haiku', label: 'Anthropic: Claude 3 Haiku (OpenRouter)', provider: 'OpenRouter', maxTokenAllowed: 8000 }, - { name: 'deepseek/deepseek-coder', label: 'Deepseek-Coder V2 236B (OpenRouter)', provider: 'OpenRouter', maxTokenAllowed: 8000 }, - { name: 'google/gemini-flash-1.5', label: 'Google Gemini Flash 1.5 (OpenRouter)', provider: 'OpenRouter', maxTokenAllowed: 8000 }, - { name: 'google/gemini-pro-1.5', label: 'Google Gemini Pro 1.5 (OpenRouter)', provider: 'OpenRouter', maxTokenAllowed: 8000 }, { name: 'x-ai/grok-beta', label: 'xAI Grok Beta (OpenRouter)', provider: 'OpenRouter', maxTokenAllowed: 8000 }, - { name: 'mistralai/mistral-nemo', label: 'OpenRouter Mistral Nemo (OpenRouter)', provider: 'OpenRouter', maxTokenAllowed: 8000 }, - { name: 'qwen/qwen-110b-chat', label: 'OpenRouter Qwen 110b Chat (OpenRouter)', provider: 'OpenRouter', maxTokenAllowed: 8000 }, - { name: 'cohere/command', label: 'Cohere Command (OpenRouter)', provider: 'OpenRouter', maxTokenAllowed: 4096 } + { + name: 'mistralai/mistral-nemo', + label: 'OpenRouter Mistral Nemo (OpenRouter)', + provider: 'OpenRouter', + maxTokenAllowed: 8000, + }, + { + name: 'qwen/qwen-110b-chat', + label: 'OpenRouter Qwen 110b Chat (OpenRouter)', + provider: 'OpenRouter', + maxTokenAllowed: 8000, + }, + { name: 'cohere/command', label: 'Cohere Command (OpenRouter)', provider: 'OpenRouter', maxTokenAllowed: 4096 }, ], getDynamicModels: getOpenRouterModels, getApiKeyLink: 'https://openrouter.ai/settings/keys', - - }, { + }, + { name: 'Google', staticModels: [ { name: 'gemini-1.5-flash-latest', label: 'Gemini 1.5 Flash', provider: 'Google', maxTokenAllowed: 8192 }, @@ -79,29 +125,50 @@ const PROVIDER_LIST: ProviderInfo[] = [ { name: 'gemini-1.5-flash-8b', label: 'Gemini 1.5 Flash-8b', provider: 'Google', maxTokenAllowed: 8192 }, { name: 'gemini-1.5-pro-latest', label: 'Gemini 1.5 Pro', provider: 'Google', maxTokenAllowed: 8192 }, { name: 'gemini-1.5-pro-002', label: 'Gemini 1.5 Pro-002', provider: 'Google', maxTokenAllowed: 8192 }, - { name: 'gemini-exp-1114', label: 'Gemini exp-1114', provider: 'Google', maxTokenAllowed: 8192 } + { name: 'gemini-exp-1114', label: 'Gemini exp-1114', provider: 'Google', maxTokenAllowed: 8192 }, ], - getApiKeyLink: 'https://aistudio.google.com/app/apikey' - }, { + getApiKeyLink: 'https://aistudio.google.com/app/apikey', + }, + { name: 'Groq', staticModels: [ { name: 'llama-3.1-70b-versatile', label: 'Llama 3.1 70b (Groq)', provider: 'Groq', maxTokenAllowed: 8000 }, { name: 'llama-3.1-8b-instant', label: 'Llama 3.1 8b (Groq)', provider: 'Groq', maxTokenAllowed: 8000 }, { name: 'llama-3.2-11b-vision-preview', label: 'Llama 3.2 11b (Groq)', provider: 'Groq', maxTokenAllowed: 8000 }, { name: 'llama-3.2-3b-preview', label: 'Llama 3.2 3b (Groq)', provider: 'Groq', maxTokenAllowed: 8000 }, - { name: 'llama-3.2-1b-preview', label: 'Llama 3.2 1b (Groq)', provider: 'Groq', maxTokenAllowed: 8000 } + { name: 'llama-3.2-1b-preview', label: 'Llama 3.2 1b (Groq)', provider: 'Groq', maxTokenAllowed: 8000 }, ], - getApiKeyLink: 'https://console.groq.com/keys' + getApiKeyLink: 'https://console.groq.com/keys', }, { name: 'HuggingFace', staticModels: [ - { name: 'Qwen/Qwen2.5-Coder-32B-Instruct', label: 'Qwen2.5-Coder-32B-Instruct (HuggingFace)', provider: 'HuggingFace', maxTokenAllowed: 8000 }, - { name: '01-ai/Yi-1.5-34B-Chat', label: 'Yi-1.5-34B-Chat (HuggingFace)', provider: 'HuggingFace', maxTokenAllowed: 8000 }, - { name: 'codellama/CodeLlama-34b-Instruct-hf', label: 'CodeLlama-34b-Instruct (HuggingFace)', provider: 'HuggingFace', maxTokenAllowed: 8000 }, - { name: 'NousResearch/Hermes-3-Llama-3.1-8B', label: 'Hermes-3-Llama-3.1-8B (HuggingFace)', provider: 'HuggingFace', maxTokenAllowed: 8000 } + { + name: 'Qwen/Qwen2.5-Coder-32B-Instruct', + label: 'Qwen2.5-Coder-32B-Instruct (HuggingFace)', + provider: 'HuggingFace', + maxTokenAllowed: 8000, + }, + { + name: '01-ai/Yi-1.5-34B-Chat', + label: 'Yi-1.5-34B-Chat (HuggingFace)', + provider: 'HuggingFace', + maxTokenAllowed: 8000, + }, + { + name: 'codellama/CodeLlama-34b-Instruct-hf', + label: 'CodeLlama-34b-Instruct (HuggingFace)', + provider: 'HuggingFace', + maxTokenAllowed: 8000, + }, + { + name: 'NousResearch/Hermes-3-Llama-3.1-8B', + label: 'Hermes-3-Llama-3.1-8B (HuggingFace)', + provider: 'HuggingFace', + maxTokenAllowed: 8000, + }, ], - getApiKeyLink: 'https://huggingface.co/settings/tokens' + getApiKeyLink: 'https://huggingface.co/settings/tokens', }, { @@ -110,23 +177,24 @@ const PROVIDER_LIST: ProviderInfo[] = [ { name: 'gpt-4o-mini', label: 'GPT-4o Mini', provider: 'OpenAI', maxTokenAllowed: 8000 }, { name: 'gpt-4-turbo', label: 'GPT-4 Turbo', provider: 'OpenAI', maxTokenAllowed: 8000 }, { name: 'gpt-4', label: 'GPT-4', provider: 'OpenAI', maxTokenAllowed: 8000 }, - { name: 'gpt-3.5-turbo', label: 'GPT-3.5 Turbo', provider: 'OpenAI', maxTokenAllowed: 8000 } + { name: 'gpt-3.5-turbo', label: 'GPT-3.5 Turbo', provider: 'OpenAI', maxTokenAllowed: 8000 }, ], - getApiKeyLink: "https://platform.openai.com/api-keys", - }, { + getApiKeyLink: 'https://platform.openai.com/api-keys', + }, + { name: 'xAI', - staticModels: [ - { name: 'grok-beta', label: 'xAI Grok Beta', provider: 'xAI', maxTokenAllowed: 8000 } - ], - getApiKeyLink: 'https://docs.x.ai/docs/quickstart#creating-an-api-key' - }, { + staticModels: [{ name: 'grok-beta', label: 'xAI Grok Beta', provider: 'xAI', maxTokenAllowed: 8000 }], + getApiKeyLink: 'https://docs.x.ai/docs/quickstart#creating-an-api-key', + }, + { name: 'Deepseek', staticModels: [ { name: 'deepseek-coder', label: 'Deepseek-Coder', provider: 'Deepseek', maxTokenAllowed: 8000 }, - { name: 'deepseek-chat', label: 'Deepseek-Chat', provider: 'Deepseek', maxTokenAllowed: 8000 } + { name: 'deepseek-chat', label: 'Deepseek-Chat', provider: 'Deepseek', maxTokenAllowed: 8000 }, ], - getApiKeyLink: 'https://platform.deepseek.com/api_keys' - }, { + getApiKeyLink: 'https://platform.deepseek.com/api_keys', + }, + { name: 'Mistral', staticModels: [ { name: 'open-mistral-7b', label: 'Mistral 7B', provider: 'Mistral', maxTokenAllowed: 8000 }, @@ -137,27 +205,29 @@ const PROVIDER_LIST: ProviderInfo[] = [ { name: 'ministral-8b-latest', label: 'Mistral 8B', provider: 'Mistral', maxTokenAllowed: 8000 }, { name: 'mistral-small-latest', label: 'Mistral Small', provider: 'Mistral', maxTokenAllowed: 8000 }, { name: 'codestral-latest', label: 'Codestral', provider: 'Mistral', maxTokenAllowed: 8000 }, - { name: 'mistral-large-latest', label: 'Mistral Large Latest', provider: 'Mistral', maxTokenAllowed: 8000 } + { name: 'mistral-large-latest', label: 'Mistral Large Latest', provider: 'Mistral', maxTokenAllowed: 8000 }, ], - getApiKeyLink: 'https://console.mistral.ai/api-keys/' - }, { + getApiKeyLink: 'https://console.mistral.ai/api-keys/', + }, + { name: 'LMStudio', staticModels: [], getDynamicModels: getLMStudioModels, getApiKeyLink: 'https://lmstudio.ai/', labelForGetApiKey: 'Get LMStudio', - icon: "i-ph:cloud-arrow-down", - } + icon: 'i-ph:cloud-arrow-down', + }, ]; export const DEFAULT_PROVIDER = PROVIDER_LIST[0]; -const staticModels: ModelInfo[] = PROVIDER_LIST.map(p => p.staticModels).flat(); +const staticModels: ModelInfo[] = PROVIDER_LIST.map((p) => p.staticModels).flat(); export let MODEL_LIST: ModelInfo[] = [...staticModels]; const getOllamaBaseUrl = () => { const defaultBaseUrl = import.meta.env.OLLAMA_API_BASE_URL || 'http://localhost:11434'; + // Check if we're in the browser if (typeof window !== 'undefined') { // Frontend always uses localhost @@ -167,22 +237,20 @@ const getOllamaBaseUrl = () => { // Backend: Check if we're running in Docker const isDocker = process.env.RUNNING_IN_DOCKER === 'true'; - return isDocker - ? defaultBaseUrl.replace('localhost', 'host.docker.internal') - : defaultBaseUrl; + return isDocker ? defaultBaseUrl.replace('localhost', 'host.docker.internal') : defaultBaseUrl; }; async function getOllamaModels(): Promise { try { const base_url = getOllamaBaseUrl(); const response = await fetch(`${base_url}/api/tags`); - const data = await response.json() as OllamaApiResponse; + const data = (await response.json()) as OllamaApiResponse; return data.models.map((model: OllamaModel) => ({ name: model.name, label: `${model.name} (${model.details.parameter_size})`, provider: 'Ollama', - maxTokenAllowed:8000, + maxTokenAllowed: 8000, })); } catch (e) { return []; @@ -192,20 +260,23 @@ async function getOllamaModels(): Promise { async function getOpenAILikeModels(): Promise { try { const base_url = import.meta.env.OPENAI_LIKE_API_BASE_URL || ''; + if (!base_url) { return []; } + const api_key = import.meta.env.OPENAI_LIKE_API_KEY ?? ''; const response = await fetch(`${base_url}/models`, { headers: { - Authorization: `Bearer ${api_key}` - } + Authorization: `Bearer ${api_key}`, + }, }); - const res = await response.json() as any; + const res = (await response.json()) as any; + return res.data.map((model: any) => ({ name: model.id, label: model.id, - provider: 'OpenAILike' + provider: 'OpenAILike', })); } catch (e) { return []; @@ -220,51 +291,66 @@ type OpenRouterModelsResponse = { pricing: { prompt: number; completion: number; - } - }[] + }; + }[]; }; async function getOpenRouterModels(): Promise { - const data: OpenRouterModelsResponse = await (await fetch('https://openrouter.ai/api/v1/models', { - headers: { - 'Content-Type': 'application/json' - } - })).json(); + const data: OpenRouterModelsResponse = await ( + await fetch('https://openrouter.ai/api/v1/models', { + headers: { + 'Content-Type': 'application/json', + }, + }) + ).json(); - return data.data.sort((a, b) => a.name.localeCompare(b.name)).map(m => ({ - name: m.id, - label: `${m.name} - in:$${(m.pricing.prompt * 1_000_000).toFixed( - 2)} out:$${(m.pricing.completion * 1_000_000).toFixed(2)} - context ${Math.floor( - m.context_length / 1000)}k`, - provider: 'OpenRouter', - maxTokenAllowed:8000, - })); + return data.data + .sort((a, b) => a.name.localeCompare(b.name)) + .map((m) => ({ + name: m.id, + label: `${m.name} - in:$${(m.pricing.prompt * 1_000_000).toFixed( + 2, + )} out:$${(m.pricing.completion * 1_000_000).toFixed(2)} - context ${Math.floor(m.context_length / 1000)}k`, + provider: 'OpenRouter', + maxTokenAllowed: 8000, + })); } async function getLMStudioModels(): Promise { try { const base_url = import.meta.env.LMSTUDIO_API_BASE_URL || 'http://localhost:1234'; const response = await fetch(`${base_url}/v1/models`); - const data = await response.json() as any; + const data = (await response.json()) as any; + return data.data.map((model: any) => ({ name: model.id, label: model.id, - provider: 'LMStudio' + provider: 'LMStudio', })); } catch (e) { return []; } } - - async function initializeModelList(): Promise { - MODEL_LIST = [...(await Promise.all( - PROVIDER_LIST - .filter((p): p is ProviderInfo & { getDynamicModels: () => Promise } => !!p.getDynamicModels) - .map(p => p.getDynamicModels()))) - .flat(), ...staticModels]; + MODEL_LIST = [ + ...( + await Promise.all( + PROVIDER_LIST.filter( + (p): p is ProviderInfo & { getDynamicModels: () => Promise } => !!p.getDynamicModels, + ).map((p) => p.getDynamicModels()), + ) + ).flat(), + ...staticModels, + ]; return MODEL_LIST; } -export { getOllamaModels, getOpenAILikeModels, getLMStudioModels, initializeModelList, getOpenRouterModels, PROVIDER_LIST }; +export { + getOllamaModels, + getOpenAILikeModels, + getLMStudioModels, + initializeModelList, + getOpenRouterModels, + PROVIDER_LIST, +}; diff --git a/app/utils/logger.ts b/app/utils/logger.ts index 1a5c932..9b2c31c 100644 --- a/app/utils/logger.ts +++ b/app/utils/logger.ts @@ -11,7 +11,7 @@ interface Logger { setLevel: (level: DebugLevel) => void; } -let currentLevel: DebugLevel = import.meta.env.VITE_LOG_LEVEL ?? import.meta.env.DEV ? 'debug' : 'info'; +let currentLevel: DebugLevel = (import.meta.env.VITE_LOG_LEVEL ?? import.meta.env.DEV) ? 'debug' : 'info'; const isWorker = 'HTMLRewriter' in globalThis; const supportsColor = !isWorker; diff --git a/app/utils/shell.ts b/app/utils/shell.ts index d45e8a6..2054936 100644 --- a/app/utils/shell.ts +++ b/app/utils/shell.ts @@ -52,66 +52,70 @@ export async function newShellProcess(webcontainer: WebContainer, terminal: ITer return process; } - - export class BoltShell { - #initialized: (() => void) | undefined - #readyPromise: Promise - #webcontainer: WebContainer | undefined - #terminal: ITerminal | undefined - #process: WebContainerProcess | undefined - executionState = atom<{ sessionId: string, active: boolean, executionPrms?: Promise } | undefined>() - #outputStream: ReadableStreamDefaultReader | undefined - #shellInputStream: WritableStreamDefaultWriter | undefined + #initialized: (() => void) | undefined; + #readyPromise: Promise; + #webcontainer: WebContainer | undefined; + #terminal: ITerminal | undefined; + #process: WebContainerProcess | undefined; + executionState = atom<{ sessionId: string; active: boolean; executionPrms?: Promise } | undefined>(); + #outputStream: ReadableStreamDefaultReader | undefined; + #shellInputStream: WritableStreamDefaultWriter | undefined; constructor() { this.#readyPromise = new Promise((resolve) => { - this.#initialized = resolve - }) + this.#initialized = resolve; + }); } ready() { return this.#readyPromise; } async init(webcontainer: WebContainer, terminal: ITerminal) { - this.#webcontainer = webcontainer - this.#terminal = terminal - let callback = (data: string) => { - console.log(data) - } - let { process, output } = await this.newBoltShellProcess(webcontainer, terminal) - this.#process = process - this.#outputStream = output.getReader() - await this.waitTillOscCode('interactive') - this.#initialized?.() + this.#webcontainer = webcontainer; + this.#terminal = terminal; + + const callback = (data: string) => { + console.log(data); + }; + const { process, output } = await this.newBoltShellProcess(webcontainer, terminal); + this.#process = process; + this.#outputStream = output.getReader(); + await this.waitTillOscCode('interactive'); + this.#initialized?.(); } get terminal() { - return this.#terminal + return this.#terminal; } get process() { - return this.#process + return this.#process; } async executeCommand(sessionId: string, command: string) { if (!this.process || !this.terminal) { - return + return; } - let state = this.executionState.get() - //interrupt the current execution - // this.#shellInputStream?.write('\x03'); + const state = this.executionState.get(); + + /* + * interrupt the current execution + * this.#shellInputStream?.write('\x03'); + */ this.terminal.input('\x03'); + if (state && state.executionPrms) { - await state.executionPrms + await state.executionPrms; } + //start a new execution this.terminal.input(command.trim() + '\n'); //wait for the execution to finish - let executionPrms = this.getCurrentExecutionResult() - this.executionState.set({ sessionId, active: true, executionPrms }) + const executionPrms = this.getCurrentExecutionResult(); + this.executionState.set({ sessionId, active: true, executionPrms }); - let resp = await executionPrms - this.executionState.set({ sessionId, active: false }) - return resp + const resp = await executionPrms; + this.executionState.set({ sessionId, active: false }); + return resp; } async newBoltShellProcess(webcontainer: WebContainer, terminal: ITerminal) { const args: string[] = []; @@ -126,6 +130,7 @@ export class BoltShell { const input = process.input.getWriter(); this.#shellInputStream = input; + const [internalOutput, terminalOutput] = process.output.tee(); const jshReady = withResolvers(); @@ -163,30 +168,41 @@ export class BoltShell { return { process, output: internalOutput }; } async getCurrentExecutionResult() { - let { output, exitCode } = await this.waitTillOscCode('exit') + const { output, exitCode } = await this.waitTillOscCode('exit'); return { output, exitCode }; } async waitTillOscCode(waitCode: string) { let fullOutput = ''; let exitCode: number = 0; - if (!this.#outputStream) return { output: fullOutput, exitCode }; - let tappedStream = this.#outputStream + + if (!this.#outputStream) { + return { output: fullOutput, exitCode }; + } + + const tappedStream = this.#outputStream; while (true) { const { value, done } = await tappedStream.read(); - if (done) break; + + if (done) { + break; + } + const text = value || ''; fullOutput += text; // Check if command completion signal with exit code const [, osc, , pid, code] = text.match(/\x1b\]654;([^\x07=]+)=?((-?\d+):(\d+))?\x07/) || []; + if (osc === 'exit') { exitCode = parseInt(code, 10); } + if (osc === waitCode) { break; } } + return { output: fullOutput, exitCode }; } } diff --git a/app/utils/types.ts b/app/utils/types.ts index 171edc3..8742891 100644 --- a/app/utils/types.ts +++ b/app/utils/types.ts @@ -1,4 +1,3 @@ - interface OllamaModelDetails { parent_model: string; format: string; @@ -29,10 +28,10 @@ export interface ModelInfo { } export interface ProviderInfo { - staticModels: ModelInfo[], - name: string, - getDynamicModels?: () => Promise, - getApiKeyLink?: string, - labelForGetApiKey?: string, - icon?:string, -}; + staticModels: ModelInfo[]; + name: string; + getDynamicModels?: () => Promise; + getApiKeyLink?: string; + labelForGetApiKey?: string; + icon?: string; +} From fe3e2ebddf034fed578fd7f40cd173545ebea860 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Oliver=20J=C3=A4gle?= Date: Thu, 21 Nov 2024 22:18:39 +0100 Subject: [PATCH 08/98] Ignore some stackblitz specific linting rules --- eslint.config.mjs | 2 ++ 1 file changed, 2 insertions(+) diff --git a/eslint.config.mjs b/eslint.config.mjs index 123aaf1..160e5f3 100644 --- a/eslint.config.mjs +++ b/eslint.config.mjs @@ -12,6 +12,8 @@ export default [ '@blitz/catch-error-name': 'off', '@typescript-eslint/no-this-alias': 'off', '@typescript-eslint/no-empty-object-type': 'off', + '@blitz/comment-syntax': 'off', + '@blitz/block-scope-case': 'off', }, }, { From 5997d31aa382830bf404e38f77d14c09daa6c44f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stig-=C3=98rjan=20Smelror?= Date: Fri, 22 Nov 2024 07:15:04 +0100 Subject: [PATCH 09/98] Update to Gemini exp-1121 --- app/utils/constants.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/app/utils/constants.ts b/app/utils/constants.ts index 942afc4..cfb704d 100644 --- a/app/utils/constants.ts +++ b/app/utils/constants.ts @@ -79,7 +79,7 @@ const PROVIDER_LIST: ProviderInfo[] = [ { name: 'gemini-1.5-flash-8b', label: 'Gemini 1.5 Flash-8b', provider: 'Google', maxTokenAllowed: 8192 }, { name: 'gemini-1.5-pro-latest', label: 'Gemini 1.5 Pro', provider: 'Google', maxTokenAllowed: 8192 }, { name: 'gemini-1.5-pro-002', label: 'Gemini 1.5 Pro-002', provider: 'Google', maxTokenAllowed: 8192 }, - { name: 'gemini-exp-1114', label: 'Gemini exp-1114', provider: 'Google', maxTokenAllowed: 8192 } + { name: 'gemini-exp-1121', label: 'Gemini exp-1121', provider: 'Google', maxTokenAllowed: 8192 } ], getApiKeyLink: 'https://aistudio.google.com/app/apikey' }, { From a6060b81a53780772c7d36e069b3f957d718b282 Mon Sep 17 00:00:00 2001 From: eduardruzga Date: Fri, 22 Nov 2024 10:17:28 +0200 Subject: [PATCH 10/98] Export chat from sidebar --- app/components/sidebar/HistoryItem.tsx | 11 +++++- app/utils/chatExport.ts | 46 ++++++++++++++++++++++++++ 2 files changed, 56 insertions(+), 1 deletion(-) create mode 100644 app/utils/chatExport.ts diff --git a/app/components/sidebar/HistoryItem.tsx b/app/components/sidebar/HistoryItem.tsx index df270c8..6a6f61b 100644 --- a/app/components/sidebar/HistoryItem.tsx +++ b/app/components/sidebar/HistoryItem.tsx @@ -1,6 +1,7 @@ import * as Dialog from '@radix-ui/react-dialog'; import { useEffect, useRef, useState } from 'react'; import { type ChatHistoryItem } from '~/lib/persistence'; +import { exportChat } from '~/utils/chatExport'; interface HistoryItemProps { item: ChatHistoryItem; @@ -43,9 +44,17 @@ export function HistoryItem({ item, onDelete, onDuplicate }: HistoryItemProps) { > {item.description} -
+
{hovering && (
+