Merge remote-tracking branch 'coleam00/main' into import-export-individual-chats

# Conflicts:
#	app/components/chat/BaseChat.tsx
#	app/components/chat/Messages.client.tsx
#	app/lib/persistence/db.ts
#	app/lib/persistence/useChatHistory.ts
This commit is contained in:
eduardruzga
2024-11-23 00:26:12 +02:00
33 changed files with 655 additions and 367 deletions

View File

@@ -1,5 +1,7 @@
// @ts-nocheck
// Preventing TS checks with files presented in the video for a better presentation.
/*
* @ts-nocheck
* Preventing TS checks with files presented in the video for a better presentation.
*/
import { env } from 'node:process';
export function getAPIKey(cloudflareEnv: Env, provider: string, userApiKeys?: Record<string, string>) {
@@ -28,17 +30,19 @@ export function getAPIKey(cloudflareEnv: Env, provider: string, userApiKeys?: Re
case 'OpenRouter':
return env.OPEN_ROUTER_API_KEY || cloudflareEnv.OPEN_ROUTER_API_KEY;
case 'Deepseek':
return env.DEEPSEEK_API_KEY || cloudflareEnv.DEEPSEEK_API_KEY
return env.DEEPSEEK_API_KEY || cloudflareEnv.DEEPSEEK_API_KEY;
case 'Mistral':
return env.MISTRAL_API_KEY || cloudflareEnv.MISTRAL_API_KEY;
case "OpenAILike":
return env.MISTRAL_API_KEY || cloudflareEnv.MISTRAL_API_KEY;
case 'OpenAILike':
return env.OPENAI_LIKE_API_KEY || cloudflareEnv.OPENAI_LIKE_API_KEY;
case "xAI":
case 'xAI':
return env.XAI_API_KEY || cloudflareEnv.XAI_API_KEY;
case "Cohere":
case 'Cohere':
return env.COHERE_API_KEY;
case 'AzureOpenAI':
return env.AZURE_OPENAI_API_KEY;
default:
return "";
return '';
}
}
@@ -47,14 +51,17 @@ export function getBaseURL(cloudflareEnv: Env, provider: string) {
case 'OpenAILike':
return env.OPENAI_LIKE_API_BASE_URL || cloudflareEnv.OPENAI_LIKE_API_BASE_URL;
case 'LMStudio':
return env.LMSTUDIO_API_BASE_URL || cloudflareEnv.LMSTUDIO_API_BASE_URL || "http://localhost:1234";
case 'Ollama':
let baseUrl = env.OLLAMA_API_BASE_URL || cloudflareEnv.OLLAMA_API_BASE_URL || "http://localhost:11434";
if (env.RUNNING_IN_DOCKER === 'true') {
baseUrl = baseUrl.replace("localhost", "host.docker.internal");
}
return baseUrl;
return env.LMSTUDIO_API_BASE_URL || cloudflareEnv.LMSTUDIO_API_BASE_URL || 'http://localhost:1234';
case 'Ollama': {
let baseUrl = env.OLLAMA_API_BASE_URL || cloudflareEnv.OLLAMA_API_BASE_URL || 'http://localhost:11434';
if (env.RUNNING_IN_DOCKER === 'true') {
baseUrl = baseUrl.replace('localhost', 'host.docker.internal');
}
return baseUrl;
}
default:
return "";
return '';
}
}

View File

@@ -1,22 +1,29 @@
// @ts-nocheck
// Preventing TS checks with files presented in the video for a better presentation.
/*
* @ts-nocheck
* Preventing TS checks with files presented in the video for a better presentation.
*/
import { getAPIKey, getBaseURL } from '~/lib/.server/llm/api-key';
import { createAnthropic } from '@ai-sdk/anthropic';
import { createOpenAI } from '@ai-sdk/openai';
import { createGoogleGenerativeAI } from '@ai-sdk/google';
import { ollama } from 'ollama-ai-provider';
import { createOpenRouter } from "@openrouter/ai-sdk-provider";
import { createOpenRouter } from '@openrouter/ai-sdk-provider';
import { createMistral } from '@ai-sdk/mistral';
import { createCohere } from '@ai-sdk/cohere'
import { createCohere } from '@ai-sdk/cohere';
import type { LanguageModelV1 } from 'ai';
export function getAnthropicModel(apiKey: string, model: string) {
export const DEFAULT_NUM_CTX = process.env.DEFAULT_NUM_CTX ? parseInt(process.env.DEFAULT_NUM_CTX, 10) : 32768;
type OptionalApiKey = string | undefined;
export function getAnthropicModel(apiKey: OptionalApiKey, model: string) {
const anthropic = createAnthropic({
apiKey,
});
return anthropic(model);
}
export function getOpenAILikeModel(baseURL:string,apiKey: string, model: string) {
export function getOpenAILikeModel(baseURL: string, apiKey: OptionalApiKey, model: string) {
const openai = createOpenAI({
baseURL,
apiKey,
@@ -25,7 +32,7 @@ export function getOpenAILikeModel(baseURL:string,apiKey: string, model: string)
return openai(model);
}
export function getCohereAIModel(apiKey:string, model: string){
export function getCohereAIModel(apiKey: OptionalApiKey, model: string) {
const cohere = createCohere({
apiKey,
});
@@ -33,7 +40,7 @@ export function getCohereAIModel(apiKey:string, model: string){
return cohere(model);
}
export function getOpenAIModel(apiKey: string, model: string) {
export function getOpenAIModel(apiKey: OptionalApiKey, model: string) {
const openai = createOpenAI({
apiKey,
});
@@ -41,15 +48,15 @@ export function getOpenAIModel(apiKey: string, model: string) {
return openai(model);
}
export function getMistralModel(apiKey: string, model: string) {
export function getMistralModel(apiKey: OptionalApiKey, model: string) {
const mistral = createMistral({
apiKey
apiKey,
});
return mistral(model);
}
export function getGoogleModel(apiKey: string, model: string) {
export function getGoogleModel(apiKey: OptionalApiKey, model: string) {
const google = createGoogleGenerativeAI({
apiKey,
});
@@ -57,7 +64,7 @@ export function getGoogleModel(apiKey: string, model: string) {
return google(model);
}
export function getGroqModel(apiKey: string, model: string) {
export function getGroqModel(apiKey: OptionalApiKey, model: string) {
const openai = createOpenAI({
baseURL: 'https://api.groq.com/openai/v1',
apiKey,
@@ -66,7 +73,7 @@ export function getGroqModel(apiKey: string, model: string) {
return openai(model);
}
export function getHuggingFaceModel(apiKey: string, model: string) {
export function getHuggingFaceModel(apiKey: OptionalApiKey, model: string) {
const openai = createOpenAI({
baseURL: 'https://api-inference.huggingface.co/v1/',
apiKey,
@@ -76,15 +83,16 @@ export function getHuggingFaceModel(apiKey: string, model: string) {
}
export function getOllamaModel(baseURL: string, model: string) {
let Ollama = ollama(model, {
numCtx: 32768,
});
const ollamaInstance = ollama(model, {
numCtx: DEFAULT_NUM_CTX,
}) as LanguageModelV1 & { config: any };
Ollama.config.baseURL = `${baseURL}/api`;
return Ollama;
ollamaInstance.config.baseURL = `${baseURL}/api`;
return ollamaInstance;
}
export function getDeepseekModel(apiKey: string, model: string){
export function getDeepseekModel(apiKey: OptionalApiKey, model: string) {
const openai = createOpenAI({
baseURL: 'https://api.deepseek.com/beta',
apiKey,
@@ -93,9 +101,9 @@ export function getDeepseekModel(apiKey: string, model: string){
return openai(model);
}
export function getOpenRouterModel(apiKey: string, model: string) {
export function getOpenRouterModel(apiKey: OptionalApiKey, model: string) {
const openRouter = createOpenRouter({
apiKey
apiKey,
});
return openRouter.chat(model);
@@ -104,13 +112,13 @@ export function getOpenRouterModel(apiKey: string, model: string) {
export function getLMStudioModel(baseURL: string, model: string) {
const lmstudio = createOpenAI({
baseUrl: `${baseURL}/v1`,
apiKey: "",
apiKey: '',
});
return lmstudio(model);
}
export function getXAIModel(apiKey: string, model: string) {
export function getXAIModel(apiKey: OptionalApiKey, model: string) {
const openai = createOpenAI({
baseURL: 'https://api.x.ai/v1',
apiKey,
@@ -119,7 +127,6 @@ export function getXAIModel(apiKey: string, model: string) {
return openai(model);
}
export function getModel(provider: string, model: string, env: Env, apiKeys?: Record<string, string>) {
const apiKey = getAPIKey(env, provider, apiKeys);
const baseURL = getBaseURL(env, provider);
@@ -138,11 +145,11 @@ export function getModel(provider: string, model: string, env: Env, apiKeys?: Re
case 'Google':
return getGoogleModel(apiKey, model);
case 'OpenAILike':
return getOpenAILikeModel(baseURL,apiKey, model);
return getOpenAILikeModel(baseURL, apiKey, model);
case 'Deepseek':
return getDeepseekModel(apiKey, model);
case 'Mistral':
return getMistralModel(apiKey, model);
return getMistralModel(apiKey, model);
case 'LMStudio':
return getLMStudioModel(baseURL, model);
case 'xAI':

View File

@@ -1,5 +1,6 @@
// @ts-nocheck
// Preventing TS checks with files presented in the video for a better presentation.
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-nocheck TODO: Provider proper types
import { streamText as _streamText, convertToCoreMessages } from 'ai';
import { getModel } from '~/lib/.server/llm/model';
import { MAX_TOKENS } from './constants';
@@ -34,19 +35,12 @@ function extractPropertiesFromMessage(message: Message): { model: string; provid
const provider = providerMatch ? providerMatch[1] : DEFAULT_PROVIDER;
// Remove model and provider lines from content
const cleanedContent = message.content
.replace(MODEL_REGEX, '')
.replace(PROVIDER_REGEX, '')
.trim();
const cleanedContent = message.content.replace(MODEL_REGEX, '').replace(PROVIDER_REGEX, '').trim();
return { model, provider, content: cleanedContent };
}
export function streamText(
messages: Messages,
env: Env,
options?: StreamingOptions,
apiKeys?: Record<string, string>
) {
export function streamText(messages: Messages, env: Env, options?: StreamingOptions, apiKeys?: Record<string, string>) {
let currentModel = DEFAULT_MODEL;
let currentProvider = DEFAULT_PROVIDER;
@@ -63,17 +57,12 @@ export function streamText(
return { ...message, content };
}
return message;
return message;
});
const modelDetails = MODEL_LIST.find((m) => m.name === currentModel);
const dynamicMaxTokens =
modelDetails && modelDetails.maxTokenAllowed
? modelDetails.maxTokenAllowed
: MAX_TOKENS;
const dynamicMaxTokens = modelDetails && modelDetails.maxTokenAllowed ? modelDetails.maxTokenAllowed : MAX_TOKENS;
return _streamText({
model: getModel(currentProvider, currentModel, env, apiKeys),

View File

@@ -2,3 +2,4 @@ export * from './useMessageParser';
export * from './usePromptEnhancer';
export * from './useShortcuts';
export * from './useSnapScroll';
export { default } from './useViewport';

View File

@@ -0,0 +1,18 @@
import { useState, useEffect } from 'react';
const useViewport = (threshold = 1024) => {
const [isSmallViewport, setIsSmallViewport] = useState(window.innerWidth < threshold);
useEffect(() => {
const handleResize = () => setIsSmallViewport(window.innerWidth < threshold);
window.addEventListener('resize', handleResize);
return () => {
window.removeEventListener('resize', handleResize);
};
}, [threshold]);
return isSmallViewport;
};
export default useViewport;

View File

@@ -161,11 +161,17 @@ async function getUrlIds(db: IDBDatabase): Promise<string[]> {
export async function forkChat(db: IDBDatabase, chatId: string, messageId: string): Promise<string> {
const chat = await getMessages(db, chatId);
if (!chat) throw new Error('Chat not found');
if (!chat) {
throw new Error('Chat not found');
}
// Find the index of the message to fork at
const messageIndex = chat.messages.findIndex(msg => msg.id === messageId);
if (messageIndex === -1) throw new Error('Message not found');
const messageIndex = chat.messages.findIndex((msg) => msg.id === messageId);
if (messageIndex === -1) {
throw new Error('Message not found');
}
// Get messages up to and including the selected message
const messages = chat.messages.slice(0, messageIndex + 1);
@@ -177,6 +183,7 @@ export async function forkChat(db: IDBDatabase, chatId: string, messageId: strin
export async function duplicateChat(db: IDBDatabase, id: string): Promise<string> {
const chat = await getMessages(db, id);
if (!chat) {
throw new Error('Chat not found');
}

View File

@@ -107,7 +107,7 @@ export function useChatHistory() {
await setMessages(db, chatId.get() as string, messages, urlId, description.get());
},
duplicateCurrentChat: async (listItemId:string) => {
duplicateCurrentChat: async (listItemId: string) => {
if (!db || (!mixedId && !listItemId)) {
return;
}
@@ -118,6 +118,7 @@ export function useChatHistory() {
toast.success('Chat duplicated successfully');
} catch (error) {
toast.error('Failed to duplicate chat');
console.log(error);
}
},
importChat: async (description: string, messages:Message[]) => {

View File

@@ -1,11 +1,10 @@
import { WebContainer, type WebContainerProcess } from '@webcontainer/api';
import { WebContainer } from '@webcontainer/api';
import { atom, map, type MapStore } from 'nanostores';
import * as nodePath from 'node:path';
import type { BoltAction } from '~/types/actions';
import { createScopedLogger } from '~/utils/logger';
import { unreachable } from '~/utils/unreachable';
import type { ActionCallbackData } from './message-parser';
import type { ITerminal } from '~/types/terminal';
import type { BoltShell } from '~/utils/shell';
const logger = createScopedLogger('ActionRunner');
@@ -45,7 +44,6 @@ export class ActionRunner {
constructor(webcontainerPromise: Promise<WebContainer>, getShellTerminal: () => BoltShell) {
this.#webcontainer = webcontainerPromise;
this.#shellTerminal = getShellTerminal;
}
addAction(data: ActionCallbackData) {
@@ -88,19 +86,21 @@ export class ActionRunner {
if (action.executed) {
return;
}
if (isStreaming && action.type !== 'file') {
return;
}
this.#updateAction(actionId, { ...action, ...data.action, executed: !isStreaming });
return this.#currentExecutionPromise = this.#currentExecutionPromise
// eslint-disable-next-line consistent-return
return (this.#currentExecutionPromise = this.#currentExecutionPromise
.then(() => {
return this.#executeAction(actionId, isStreaming);
this.#executeAction(actionId, isStreaming);
})
.catch((error) => {
console.error('Action failed:', error);
});
}));
}
async #executeAction(actionId: string, isStreaming: boolean = false) {
@@ -121,17 +121,23 @@ export class ActionRunner {
case 'start': {
// making the start app non blocking
this.#runStartAction(action).then(()=>this.#updateAction(actionId, { status: 'complete' }))
.catch(()=>this.#updateAction(actionId, { status: 'failed', error: 'Action failed' }))
// adding a delay to avoid any race condition between 2 start actions
// i am up for a better approch
await new Promise(resolve=>setTimeout(resolve,2000))
return
break;
this.#runStartAction(action)
.then(() => this.#updateAction(actionId, { status: 'complete' }))
.catch(() => this.#updateAction(actionId, { status: 'failed', error: 'Action failed' }));
/*
* adding a delay to avoid any race condition between 2 start actions
* i am up for a better approach
*/
await new Promise((resolve) => setTimeout(resolve, 2000));
return;
}
}
this.#updateAction(actionId, { status: isStreaming ? 'running' : action.abortSignal.aborted ? 'aborted' : 'complete' });
this.#updateAction(actionId, {
status: isStreaming ? 'running' : action.abortSignal.aborted ? 'aborted' : 'complete',
});
} catch (error) {
this.#updateAction(actionId, { status: 'failed', error: 'Action failed' });
logger.error(`[${action.type}]:Action failed\n\n`, error);
@@ -145,16 +151,19 @@ export class ActionRunner {
if (action.type !== 'shell') {
unreachable('Expected shell action');
}
const shell = this.#shellTerminal()
await shell.ready()
const shell = this.#shellTerminal();
await shell.ready();
if (!shell || !shell.terminal || !shell.process) {
unreachable('Shell terminal not found');
}
const resp = await shell.executeCommand(this.runnerId.get(), action.content)
logger.debug(`${action.type} Shell Response: [exit code:${resp?.exitCode}]`)
if (resp?.exitCode != 0) {
throw new Error("Failed To Execute Shell Command");
const resp = await shell.executeCommand(this.runnerId.get(), action.content);
logger.debug(`${action.type} Shell Response: [exit code:${resp?.exitCode}]`);
if (resp?.exitCode != 0) {
throw new Error('Failed To Execute Shell Command');
}
}
@@ -162,21 +171,26 @@ export class ActionRunner {
if (action.type !== 'start') {
unreachable('Expected shell action');
}
if (!this.#shellTerminal) {
unreachable('Shell terminal not found');
}
const shell = this.#shellTerminal()
await shell.ready()
const shell = this.#shellTerminal();
await shell.ready();
if (!shell || !shell.terminal || !shell.process) {
unreachable('Shell terminal not found');
}
const resp = await shell.executeCommand(this.runnerId.get(), action.content)
logger.debug(`${action.type} Shell Response: [exit code:${resp?.exitCode}]`)
const resp = await shell.executeCommand(this.runnerId.get(), action.content);
logger.debug(`${action.type} Shell Response: [exit code:${resp?.exitCode}]`);
if (resp?.exitCode != 0) {
throw new Error("Failed To Start Application");
throw new Error('Failed To Start Application');
}
return resp
return resp;
}
async #runFileAction(action: ActionState) {

View File

@@ -55,7 +55,7 @@ interface MessageState {
export class StreamingMessageParser {
#messages = new Map<string, MessageState>();
constructor(private _options: StreamingMessageParserOptions = {}) { }
constructor(private _options: StreamingMessageParserOptions = {}) {}
parse(messageId: string, input: string) {
let state = this.#messages.get(messageId);
@@ -120,20 +120,20 @@ export class StreamingMessageParser {
i = closeIndex + ARTIFACT_ACTION_TAG_CLOSE.length;
} else {
if ('type' in currentAction && currentAction.type === 'file') {
let content = input.slice(i);
const content = input.slice(i);
this._options.callbacks?.onActionStream?.({
artifactId: currentArtifact.id,
messageId,
actionId: String(state.actionId - 1),
action: {
...currentAction as FileAction,
...(currentAction as FileAction),
content,
filePath: currentAction.filePath,
},
});
}
break;
}
} else {
@@ -272,7 +272,7 @@ export class StreamingMessageParser {
}
(actionAttributes as FileAction).filePath = filePath;
} else if (!(['shell', 'start'].includes(actionType))) {
} else if (!['shell', 'start'].includes(actionType)) {
logger.warn(`Unknown action type '${actionType}'`);
}

View File

@@ -7,7 +7,7 @@ import { coloredText } from '~/utils/terminal';
export class TerminalStore {
#webcontainer: Promise<WebContainer>;
#terminals: Array<{ terminal: ITerminal; process: WebContainerProcess }> = [];
#boltTerminal = newBoltShellProcess()
#boltTerminal = newBoltShellProcess();
showTerminal: WritableAtom<boolean> = import.meta.hot?.data.showTerminal ?? atom(true);
@@ -27,8 +27,8 @@ export class TerminalStore {
}
async attachBoltTerminal(terminal: ITerminal) {
try {
let wc = await this.#webcontainer
await this.#boltTerminal.init(wc, terminal)
const wc = await this.#webcontainer;
await this.#boltTerminal.init(wc, terminal);
} catch (error: any) {
terminal.write(coloredText.red('Failed to spawn bolt shell\n\n') + error.message);
return;

View File

@@ -11,9 +11,8 @@ import { PreviewsStore } from './previews';
import { TerminalStore } from './terminal';
import JSZip from 'jszip';
import { saveAs } from 'file-saver';
import { Octokit, type RestEndpointMethodTypes } from "@octokit/rest";
import { Octokit, type RestEndpointMethodTypes } from '@octokit/rest';
import * as nodePath from 'node:path';
import type { WebContainerProcess } from '@webcontainer/api';
import { extractRelativePath } from '~/utils/diff';
export interface ArtifactState {
@@ -42,8 +41,7 @@ export class WorkbenchStore {
unsavedFiles: WritableAtom<Set<string>> = import.meta.hot?.data.unsavedFiles ?? atom(new Set<string>());
modifiedFiles = new Set<string>();
artifactIdList: string[] = [];
#boltTerminal: { terminal: ITerminal; process: WebContainerProcess } | undefined;
#globalExecutionQueue=Promise.resolve();
#globalExecutionQueue = Promise.resolve();
constructor() {
if (import.meta.hot) {
import.meta.hot.data.artifacts = this.artifacts;
@@ -54,7 +52,7 @@ export class WorkbenchStore {
}
addToExecutionQueue(callback: () => Promise<void>) {
this.#globalExecutionQueue=this.#globalExecutionQueue.then(()=>callback())
this.#globalExecutionQueue = this.#globalExecutionQueue.then(() => callback());
}
get previews() {
@@ -96,7 +94,6 @@ export class WorkbenchStore {
this.#terminalStore.attachTerminal(terminal);
}
attachBoltTerminal(terminal: ITerminal) {
this.#terminalStore.attachBoltTerminal(terminal);
}
@@ -261,7 +258,8 @@ export class WorkbenchStore {
this.artifacts.setKey(messageId, { ...artifact, ...state });
}
addAction(data: ActionCallbackData) {
this._addAction(data)
this._addAction(data);
// this.addToExecutionQueue(()=>this._addAction(data))
}
async _addAction(data: ActionCallbackData) {
@@ -277,11 +275,10 @@ export class WorkbenchStore {
}
runAction(data: ActionCallbackData, isStreaming: boolean = false) {
if(isStreaming) {
this._runAction(data, isStreaming)
}
else{
this.addToExecutionQueue(()=>this._runAction(data, isStreaming))
if (isStreaming) {
this._runAction(data, isStreaming);
} else {
this.addToExecutionQueue(() => this._runAction(data, isStreaming));
}
}
async _runAction(data: ActionCallbackData, isStreaming: boolean = false) {
@@ -292,16 +289,21 @@ export class WorkbenchStore {
if (!artifact) {
unreachable('Artifact not found');
}
if (data.action.type === 'file') {
let wc = await webcontainer
const wc = await webcontainer;
const fullPath = nodePath.join(wc.workdir, data.action.filePath);
if (this.selectedFile.value !== fullPath) {
this.setSelectedFile(fullPath);
}
if (this.currentView.value !== 'code') {
this.currentView.set('code');
}
const doc = this.#editorStore.documents.get()[fullPath];
if (!doc) {
await artifact.runner.runAction(data, isStreaming);
}
@@ -382,7 +384,6 @@ export class WorkbenchStore {
}
async pushToGitHub(repoName: string, githubUsername: string, ghToken: string) {
try {
// Get the GitHub auth token from environment variables
const githubToken = ghToken;
@@ -397,10 +398,11 @@ export class WorkbenchStore {
const octokit = new Octokit({ auth: githubToken });
// Check if the repository already exists before creating it
let repo: RestEndpointMethodTypes["repos"]["get"]["response"]['data']
let repo: RestEndpointMethodTypes['repos']['get']['response']['data'];
try {
let resp = await octokit.repos.get({ owner: owner, repo: repoName });
repo = resp.data
const resp = await octokit.repos.get({ owner, repo: repoName });
repo = resp.data;
} catch (error) {
if (error instanceof Error && 'status' in error && error.status === 404) {
// Repository doesn't exist, so create a new one
@@ -418,6 +420,7 @@ export class WorkbenchStore {
// Get all files
const files = this.files.get();
if (!files || Object.keys(files).length === 0) {
throw new Error('No files found to push');
}
@@ -434,7 +437,9 @@ export class WorkbenchStore {
});
return { path: extractRelativePath(filePath), sha: blob.sha };
}
})
return null;
}),
);
const validBlobs = blobs.filter(Boolean); // Filter out any undefined blobs