merge with upstream/main
This commit is contained in:
@@ -1,5 +1,7 @@
|
||||
// @ts-nocheck
|
||||
// Preventing TS checks with files presented in the video for a better presentation.
|
||||
/*
|
||||
* @ts-nocheck
|
||||
* Preventing TS checks with files presented in the video for a better presentation.
|
||||
*/
|
||||
import { env } from 'node:process';
|
||||
|
||||
export function getAPIKey(cloudflareEnv: Env, provider: string, userApiKeys?: Record<string, string>) {
|
||||
@@ -28,17 +30,19 @@ export function getAPIKey(cloudflareEnv: Env, provider: string, userApiKeys?: Re
|
||||
case 'OpenRouter':
|
||||
return env.OPEN_ROUTER_API_KEY || cloudflareEnv.OPEN_ROUTER_API_KEY;
|
||||
case 'Deepseek':
|
||||
return env.DEEPSEEK_API_KEY || cloudflareEnv.DEEPSEEK_API_KEY
|
||||
return env.DEEPSEEK_API_KEY || cloudflareEnv.DEEPSEEK_API_KEY;
|
||||
case 'Mistral':
|
||||
return env.MISTRAL_API_KEY || cloudflareEnv.MISTRAL_API_KEY;
|
||||
case "OpenAILike":
|
||||
return env.MISTRAL_API_KEY || cloudflareEnv.MISTRAL_API_KEY;
|
||||
case 'OpenAILike':
|
||||
return env.OPENAI_LIKE_API_KEY || cloudflareEnv.OPENAI_LIKE_API_KEY;
|
||||
case "xAI":
|
||||
case 'xAI':
|
||||
return env.XAI_API_KEY || cloudflareEnv.XAI_API_KEY;
|
||||
case "Cohere":
|
||||
case 'Cohere':
|
||||
return env.COHERE_API_KEY;
|
||||
case 'AzureOpenAI':
|
||||
return env.AZURE_OPENAI_API_KEY;
|
||||
default:
|
||||
return "";
|
||||
return '';
|
||||
}
|
||||
}
|
||||
|
||||
@@ -47,14 +51,17 @@ export function getBaseURL(cloudflareEnv: Env, provider: string) {
|
||||
case 'OpenAILike':
|
||||
return env.OPENAI_LIKE_API_BASE_URL || cloudflareEnv.OPENAI_LIKE_API_BASE_URL;
|
||||
case 'LMStudio':
|
||||
return env.LMSTUDIO_API_BASE_URL || cloudflareEnv.LMSTUDIO_API_BASE_URL || "http://localhost:1234";
|
||||
case 'Ollama':
|
||||
let baseUrl = env.OLLAMA_API_BASE_URL || cloudflareEnv.OLLAMA_API_BASE_URL || "http://localhost:11434";
|
||||
if (env.RUNNING_IN_DOCKER === 'true') {
|
||||
baseUrl = baseUrl.replace("localhost", "host.docker.internal");
|
||||
}
|
||||
return baseUrl;
|
||||
return env.LMSTUDIO_API_BASE_URL || cloudflareEnv.LMSTUDIO_API_BASE_URL || 'http://localhost:1234';
|
||||
case 'Ollama': {
|
||||
let baseUrl = env.OLLAMA_API_BASE_URL || cloudflareEnv.OLLAMA_API_BASE_URL || 'http://localhost:11434';
|
||||
|
||||
if (env.RUNNING_IN_DOCKER === 'true') {
|
||||
baseUrl = baseUrl.replace('localhost', 'host.docker.internal');
|
||||
}
|
||||
|
||||
return baseUrl;
|
||||
}
|
||||
default:
|
||||
return "";
|
||||
return '';
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,27 +1,29 @@
|
||||
// @ts-nocheck
|
||||
// Preventing TS checks with files presented in the video for a better presentation.
|
||||
/*
|
||||
* @ts-nocheck
|
||||
* Preventing TS checks with files presented in the video for a better presentation.
|
||||
*/
|
||||
import { getAPIKey, getBaseURL } from '~/lib/.server/llm/api-key';
|
||||
import { createAnthropic } from '@ai-sdk/anthropic';
|
||||
import { createOpenAI } from '@ai-sdk/openai';
|
||||
import { createGoogleGenerativeAI } from '@ai-sdk/google';
|
||||
import { ollama } from 'ollama-ai-provider';
|
||||
import { createOpenRouter } from "@openrouter/ai-sdk-provider";
|
||||
import { createOpenRouter } from '@openrouter/ai-sdk-provider';
|
||||
import { createMistral } from '@ai-sdk/mistral';
|
||||
import { createCohere } from '@ai-sdk/cohere'
|
||||
import { createCohere } from '@ai-sdk/cohere';
|
||||
import type { LanguageModelV1 } from 'ai';
|
||||
|
||||
export const DEFAULT_NUM_CTX = process.env.DEFAULT_NUM_CTX ?
|
||||
parseInt(process.env.DEFAULT_NUM_CTX, 10) :
|
||||
32768;
|
||||
export const DEFAULT_NUM_CTX = process.env.DEFAULT_NUM_CTX ? parseInt(process.env.DEFAULT_NUM_CTX, 10) : 32768;
|
||||
|
||||
export function getAnthropicModel(apiKey: string, model: string) {
|
||||
type OptionalApiKey = string | undefined;
|
||||
|
||||
export function getAnthropicModel(apiKey: OptionalApiKey, model: string) {
|
||||
const anthropic = createAnthropic({
|
||||
apiKey,
|
||||
});
|
||||
|
||||
return anthropic(model);
|
||||
}
|
||||
|
||||
export function getOpenAILikeModel(baseURL: string, apiKey: string, model: string) {
|
||||
export function getOpenAILikeModel(baseURL: string, apiKey: OptionalApiKey, model: string) {
|
||||
const openai = createOpenAI({
|
||||
baseURL,
|
||||
apiKey,
|
||||
@@ -30,7 +32,7 @@ export function getOpenAILikeModel(baseURL: string, apiKey: string, model: strin
|
||||
return openai(model);
|
||||
}
|
||||
|
||||
export function getCohereAIModel(apiKey:string, model: string){
|
||||
export function getCohereAIModel(apiKey: OptionalApiKey, model: string) {
|
||||
const cohere = createCohere({
|
||||
apiKey,
|
||||
});
|
||||
@@ -38,7 +40,7 @@ export function getCohereAIModel(apiKey:string, model: string){
|
||||
return cohere(model);
|
||||
}
|
||||
|
||||
export function getOpenAIModel(apiKey: string, model: string) {
|
||||
export function getOpenAIModel(apiKey: OptionalApiKey, model: string) {
|
||||
const openai = createOpenAI({
|
||||
apiKey,
|
||||
});
|
||||
@@ -46,15 +48,15 @@ export function getOpenAIModel(apiKey: string, model: string) {
|
||||
return openai(model);
|
||||
}
|
||||
|
||||
export function getMistralModel(apiKey: string, model: string) {
|
||||
export function getMistralModel(apiKey: OptionalApiKey, model: string) {
|
||||
const mistral = createMistral({
|
||||
apiKey
|
||||
apiKey,
|
||||
});
|
||||
|
||||
return mistral(model);
|
||||
}
|
||||
|
||||
export function getGoogleModel(apiKey: string, model: string) {
|
||||
export function getGoogleModel(apiKey: OptionalApiKey, model: string) {
|
||||
const google = createGoogleGenerativeAI({
|
||||
apiKey,
|
||||
});
|
||||
@@ -62,7 +64,7 @@ export function getGoogleModel(apiKey: string, model: string) {
|
||||
return google(model);
|
||||
}
|
||||
|
||||
export function getGroqModel(apiKey: string, model: string) {
|
||||
export function getGroqModel(apiKey: OptionalApiKey, model: string) {
|
||||
const openai = createOpenAI({
|
||||
baseURL: 'https://api.groq.com/openai/v1',
|
||||
apiKey,
|
||||
@@ -71,7 +73,7 @@ export function getGroqModel(apiKey: string, model: string) {
|
||||
return openai(model);
|
||||
}
|
||||
|
||||
export function getHuggingFaceModel(apiKey: string, model: string) {
|
||||
export function getHuggingFaceModel(apiKey: OptionalApiKey, model: string) {
|
||||
const openai = createOpenAI({
|
||||
baseURL: 'https://api-inference.huggingface.co/v1/',
|
||||
apiKey,
|
||||
@@ -81,15 +83,16 @@ export function getHuggingFaceModel(apiKey: string, model: string) {
|
||||
}
|
||||
|
||||
export function getOllamaModel(baseURL: string, model: string) {
|
||||
let Ollama = ollama(model, {
|
||||
const ollamaInstance = ollama(model, {
|
||||
numCtx: DEFAULT_NUM_CTX,
|
||||
});
|
||||
}) as LanguageModelV1 & { config: any };
|
||||
|
||||
Ollama.config.baseURL = `${baseURL}/api`;
|
||||
return Ollama;
|
||||
ollamaInstance.config.baseURL = `${baseURL}/api`;
|
||||
|
||||
return ollamaInstance;
|
||||
}
|
||||
|
||||
export function getDeepseekModel(apiKey: string, model: string) {
|
||||
export function getDeepseekModel(apiKey: OptionalApiKey, model: string) {
|
||||
const openai = createOpenAI({
|
||||
baseURL: 'https://api.deepseek.com/beta',
|
||||
apiKey,
|
||||
@@ -98,9 +101,9 @@ export function getDeepseekModel(apiKey: string, model: string) {
|
||||
return openai(model);
|
||||
}
|
||||
|
||||
export function getOpenRouterModel(apiKey: string, model: string) {
|
||||
export function getOpenRouterModel(apiKey: OptionalApiKey, model: string) {
|
||||
const openRouter = createOpenRouter({
|
||||
apiKey
|
||||
apiKey,
|
||||
});
|
||||
|
||||
return openRouter.chat(model);
|
||||
@@ -109,13 +112,13 @@ export function getOpenRouterModel(apiKey: string, model: string) {
|
||||
export function getLMStudioModel(baseURL: string, model: string) {
|
||||
const lmstudio = createOpenAI({
|
||||
baseUrl: `${baseURL}/v1`,
|
||||
apiKey: "",
|
||||
apiKey: '',
|
||||
});
|
||||
|
||||
return lmstudio(model);
|
||||
}
|
||||
|
||||
export function getXAIModel(apiKey: string, model: string) {
|
||||
export function getXAIModel(apiKey: OptionalApiKey, model: string) {
|
||||
const openai = createOpenAI({
|
||||
baseURL: 'https://api.x.ai/v1',
|
||||
apiKey,
|
||||
@@ -125,11 +128,13 @@ export function getXAIModel(apiKey: string, model: string) {
|
||||
}
|
||||
|
||||
export function getModel(provider: string, model: string, env: Env, apiKeys?: Record<string, string>) {
|
||||
let apiKey; // Declare first
|
||||
let baseURL;
|
||||
/*
|
||||
* let apiKey; // Declare first
|
||||
* let baseURL;
|
||||
*/
|
||||
|
||||
apiKey = getAPIKey(env, provider, apiKeys); // Then assign
|
||||
baseURL = getBaseURL(env, provider);
|
||||
const apiKey = getAPIKey(env, provider, apiKeys); // Then assign
|
||||
const baseURL = getBaseURL(env, provider);
|
||||
|
||||
switch (provider) {
|
||||
case 'Anthropic':
|
||||
@@ -159,4 +164,4 @@ export function getModel(provider: string, model: string, env: Env, apiKeys?: Re
|
||||
default:
|
||||
return getOllamaModel(baseURL, model);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
// @ts-nocheck
|
||||
// Preventing TS checks with files presented in the video for a better presentation.
|
||||
import { streamText as _streamText, convertToCoreMessages } from 'ai';
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-nocheck – TODO: Provider proper types
|
||||
|
||||
import { convertToCoreMessages, streamText as _streamText } from 'ai';
|
||||
import { getModel } from '~/lib/.server/llm/model';
|
||||
import { MAX_TOKENS } from './constants';
|
||||
import { getSystemPrompt } from './prompts';
|
||||
import { MODEL_LIST, DEFAULT_MODEL, DEFAULT_PROVIDER, MODEL_REGEX, PROVIDER_REGEX } from '~/utils/constants';
|
||||
import { DEFAULT_MODEL, DEFAULT_PROVIDER, MODEL_LIST, MODEL_REGEX, PROVIDER_REGEX } from '~/utils/constants';
|
||||
|
||||
interface ToolResult<Name extends string, Args, Result> {
|
||||
toolCallId: string;
|
||||
@@ -26,41 +27,41 @@ export type StreamingOptions = Omit<Parameters<typeof _streamText>[0], 'model'>;
|
||||
|
||||
function extractPropertiesFromMessage(message: Message): { model: string; provider: string; content: string } {
|
||||
const textContent = Array.isArray(message.content)
|
||||
? message.content.find(item => item.type === 'text')?.text || ''
|
||||
? message.content.find((item) => item.type === 'text')?.text || ''
|
||||
: message.content;
|
||||
|
||||
const modelMatch = textContent.match(MODEL_REGEX);
|
||||
const providerMatch = textContent.match(PROVIDER_REGEX);
|
||||
|
||||
// Extract model
|
||||
// const modelMatch = message.content.match(MODEL_REGEX);
|
||||
/*
|
||||
* Extract model
|
||||
* const modelMatch = message.content.match(MODEL_REGEX);
|
||||
*/
|
||||
const model = modelMatch ? modelMatch[1] : DEFAULT_MODEL;
|
||||
|
||||
// Extract provider
|
||||
// const providerMatch = message.content.match(PROVIDER_REGEX);
|
||||
/*
|
||||
* Extract provider
|
||||
* const providerMatch = message.content.match(PROVIDER_REGEX);
|
||||
*/
|
||||
const provider = providerMatch ? providerMatch[1] : DEFAULT_PROVIDER;
|
||||
|
||||
const cleanedContent = Array.isArray(message.content)
|
||||
? message.content.map(item => {
|
||||
if (item.type === 'text') {
|
||||
return {
|
||||
type: 'text',
|
||||
text: item.text?.replace(MODEL_REGEX, '').replace(PROVIDER_REGEX, '')
|
||||
};
|
||||
}
|
||||
return item; // Preserve image_url and other types as is
|
||||
})
|
||||
? message.content.map((item) => {
|
||||
if (item.type === 'text') {
|
||||
return {
|
||||
type: 'text',
|
||||
text: item.text?.replace(MODEL_REGEX, '').replace(PROVIDER_REGEX, ''),
|
||||
};
|
||||
}
|
||||
|
||||
return item; // Preserve image_url and other types as is
|
||||
})
|
||||
: textContent.replace(MODEL_REGEX, '').replace(PROVIDER_REGEX, '');
|
||||
|
||||
return { model, provider, content: cleanedContent };
|
||||
}
|
||||
|
||||
export function streamText(
|
||||
messages: Messages,
|
||||
env: Env,
|
||||
options?: StreamingOptions,
|
||||
apiKeys?: Record<string, string>
|
||||
) {
|
||||
export function streamText(messages: Messages, env: Env, options?: StreamingOptions, apiKeys?: Record<string, string>) {
|
||||
let currentModel = DEFAULT_MODEL;
|
||||
let currentProvider = DEFAULT_PROVIDER;
|
||||
|
||||
@@ -76,15 +77,13 @@ export function streamText(
|
||||
|
||||
return { ...message, content };
|
||||
}
|
||||
|
||||
return message;
|
||||
});
|
||||
|
||||
const modelDetails = MODEL_LIST.find((m) => m.name === currentModel);
|
||||
|
||||
const dynamicMaxTokens =
|
||||
modelDetails && modelDetails.maxTokenAllowed
|
||||
? modelDetails.maxTokenAllowed
|
||||
: MAX_TOKENS;
|
||||
const dynamicMaxTokens = modelDetails && modelDetails.maxTokenAllowed ? modelDetails.maxTokenAllowed : MAX_TOKENS;
|
||||
|
||||
return _streamText({
|
||||
...options,
|
||||
|
||||
@@ -161,46 +161,48 @@ async function getUrlIds(db: IDBDatabase): Promise<string[]> {
|
||||
|
||||
export async function forkChat(db: IDBDatabase, chatId: string, messageId: string): Promise<string> {
|
||||
const chat = await getMessages(db, chatId);
|
||||
if (!chat) throw new Error('Chat not found');
|
||||
|
||||
// Find the index of the message to fork at
|
||||
const messageIndex = chat.messages.findIndex(msg => msg.id === messageId);
|
||||
if (messageIndex === -1) throw new Error('Message not found');
|
||||
|
||||
// Get messages up to and including the selected message
|
||||
const messages = chat.messages.slice(0, messageIndex + 1);
|
||||
|
||||
// Generate new IDs
|
||||
const newId = await getNextId(db);
|
||||
const urlId = await getUrlId(db, newId);
|
||||
|
||||
// Create the forked chat
|
||||
await setMessages(
|
||||
db,
|
||||
newId,
|
||||
messages,
|
||||
urlId,
|
||||
chat.description ? `${chat.description} (fork)` : 'Forked chat'
|
||||
);
|
||||
|
||||
return urlId;
|
||||
}
|
||||
|
||||
export async function duplicateChat(db: IDBDatabase, id: string): Promise<string> {
|
||||
const chat = await getMessages(db, id);
|
||||
if (!chat) {
|
||||
throw new Error('Chat not found');
|
||||
}
|
||||
|
||||
// Find the index of the message to fork at
|
||||
const messageIndex = chat.messages.findIndex((msg) => msg.id === messageId);
|
||||
|
||||
if (messageIndex === -1) {
|
||||
throw new Error('Message not found');
|
||||
}
|
||||
|
||||
// Get messages up to and including the selected message
|
||||
const messages = chat.messages.slice(0, messageIndex + 1);
|
||||
|
||||
return createChatFromMessages(db, chat.description ? `${chat.description} (fork)` : 'Forked chat', messages);
|
||||
}
|
||||
|
||||
export async function duplicateChat(db: IDBDatabase, id: string): Promise<string> {
|
||||
const chat = await getMessages(db, id);
|
||||
|
||||
if (!chat) {
|
||||
throw new Error('Chat not found');
|
||||
}
|
||||
|
||||
return createChatFromMessages(db, `${chat.description || 'Chat'} (copy)`, chat.messages);
|
||||
}
|
||||
|
||||
export async function createChatFromMessages(
|
||||
db: IDBDatabase,
|
||||
description: string,
|
||||
messages: Message[],
|
||||
): Promise<string> {
|
||||
const newId = await getNextId(db);
|
||||
const newUrlId = await getUrlId(db, newId); // Get a new urlId for the duplicated chat
|
||||
|
||||
await setMessages(
|
||||
db,
|
||||
newId,
|
||||
chat.messages,
|
||||
messages,
|
||||
newUrlId, // Use the new urlId
|
||||
`${chat.description || 'Chat'} (copy)`
|
||||
description,
|
||||
);
|
||||
|
||||
return newUrlId; // Return the urlId instead of id for navigation
|
||||
|
||||
@@ -4,7 +4,15 @@ import { atom } from 'nanostores';
|
||||
import type { Message } from 'ai';
|
||||
import { toast } from 'react-toastify';
|
||||
import { workbenchStore } from '~/lib/stores/workbench';
|
||||
import { getMessages, getNextId, getUrlId, openDatabase, setMessages, duplicateChat } from './db';
|
||||
import {
|
||||
getMessages,
|
||||
getNextId,
|
||||
getUrlId,
|
||||
openDatabase,
|
||||
setMessages,
|
||||
duplicateChat,
|
||||
createChatFromMessages,
|
||||
} from './db';
|
||||
|
||||
export interface ChatHistoryItem {
|
||||
id: string;
|
||||
@@ -99,7 +107,7 @@ export function useChatHistory() {
|
||||
|
||||
await setMessages(db, chatId.get() as string, messages, urlId, description.get());
|
||||
},
|
||||
duplicateCurrentChat: async (listItemId:string) => {
|
||||
duplicateCurrentChat: async (listItemId: string) => {
|
||||
if (!db || (!mixedId && !listItemId)) {
|
||||
return;
|
||||
}
|
||||
@@ -110,8 +118,48 @@ export function useChatHistory() {
|
||||
toast.success('Chat duplicated successfully');
|
||||
} catch (error) {
|
||||
toast.error('Failed to duplicate chat');
|
||||
console.log(error);
|
||||
}
|
||||
}
|
||||
},
|
||||
importChat: async (description: string, messages: Message[]) => {
|
||||
if (!db) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const newId = await createChatFromMessages(db, description, messages);
|
||||
window.location.href = `/chat/${newId}`;
|
||||
toast.success('Chat imported successfully');
|
||||
} catch (error) {
|
||||
if (error instanceof Error) {
|
||||
toast.error('Failed to import chat: ' + error.message);
|
||||
} else {
|
||||
toast.error('Failed to import chat');
|
||||
}
|
||||
}
|
||||
},
|
||||
exportChat: async (id = urlId) => {
|
||||
if (!db || !id) {
|
||||
return;
|
||||
}
|
||||
|
||||
const chat = await getMessages(db, id);
|
||||
const chatData = {
|
||||
messages: chat.messages,
|
||||
description: chat.description,
|
||||
exportDate: new Date().toISOString(),
|
||||
};
|
||||
|
||||
const blob = new Blob([JSON.stringify(chatData, null, 2)], { type: 'application/json' });
|
||||
const url = URL.createObjectURL(blob);
|
||||
const a = document.createElement('a');
|
||||
a.href = url;
|
||||
a.download = `chat-${new Date().toISOString()}.json`;
|
||||
document.body.appendChild(a);
|
||||
a.click();
|
||||
document.body.removeChild(a);
|
||||
URL.revokeObjectURL(url);
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@@ -1,11 +1,10 @@
|
||||
import { WebContainer, type WebContainerProcess } from '@webcontainer/api';
|
||||
import { WebContainer } from '@webcontainer/api';
|
||||
import { atom, map, type MapStore } from 'nanostores';
|
||||
import * as nodePath from 'node:path';
|
||||
import type { BoltAction } from '~/types/actions';
|
||||
import { createScopedLogger } from '~/utils/logger';
|
||||
import { unreachable } from '~/utils/unreachable';
|
||||
import type { ActionCallbackData } from './message-parser';
|
||||
import type { ITerminal } from '~/types/terminal';
|
||||
import type { BoltShell } from '~/utils/shell';
|
||||
|
||||
const logger = createScopedLogger('ActionRunner');
|
||||
@@ -45,7 +44,6 @@ export class ActionRunner {
|
||||
constructor(webcontainerPromise: Promise<WebContainer>, getShellTerminal: () => BoltShell) {
|
||||
this.#webcontainer = webcontainerPromise;
|
||||
this.#shellTerminal = getShellTerminal;
|
||||
|
||||
}
|
||||
|
||||
addAction(data: ActionCallbackData) {
|
||||
@@ -88,15 +86,16 @@ export class ActionRunner {
|
||||
if (action.executed) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (isStreaming && action.type !== 'file') {
|
||||
return;
|
||||
}
|
||||
|
||||
this.#updateAction(actionId, { ...action, ...data.action, executed: !isStreaming });
|
||||
|
||||
return this.#currentExecutionPromise = this.#currentExecutionPromise
|
||||
this.#currentExecutionPromise = this.#currentExecutionPromise
|
||||
.then(() => {
|
||||
return this.#executeAction(actionId, isStreaming);
|
||||
this.#executeAction(actionId, isStreaming);
|
||||
})
|
||||
.catch((error) => {
|
||||
console.error('Action failed:', error);
|
||||
@@ -121,17 +120,23 @@ export class ActionRunner {
|
||||
case 'start': {
|
||||
// making the start app non blocking
|
||||
|
||||
this.#runStartAction(action).then(()=>this.#updateAction(actionId, { status: 'complete' }))
|
||||
.catch(()=>this.#updateAction(actionId, { status: 'failed', error: 'Action failed' }))
|
||||
// adding a delay to avoid any race condition between 2 start actions
|
||||
// i am up for a better approch
|
||||
await new Promise(resolve=>setTimeout(resolve,2000))
|
||||
return
|
||||
break;
|
||||
this.#runStartAction(action)
|
||||
.then(() => this.#updateAction(actionId, { status: 'complete' }))
|
||||
.catch(() => this.#updateAction(actionId, { status: 'failed', error: 'Action failed' }));
|
||||
|
||||
/*
|
||||
* adding a delay to avoid any race condition between 2 start actions
|
||||
* i am up for a better approach
|
||||
*/
|
||||
await new Promise((resolve) => setTimeout(resolve, 2000));
|
||||
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
this.#updateAction(actionId, { status: isStreaming ? 'running' : action.abortSignal.aborted ? 'aborted' : 'complete' });
|
||||
this.#updateAction(actionId, {
|
||||
status: isStreaming ? 'running' : action.abortSignal.aborted ? 'aborted' : 'complete',
|
||||
});
|
||||
} catch (error) {
|
||||
this.#updateAction(actionId, { status: 'failed', error: 'Action failed' });
|
||||
logger.error(`[${action.type}]:Action failed\n\n`, error);
|
||||
@@ -145,16 +150,19 @@ export class ActionRunner {
|
||||
if (action.type !== 'shell') {
|
||||
unreachable('Expected shell action');
|
||||
}
|
||||
const shell = this.#shellTerminal()
|
||||
await shell.ready()
|
||||
|
||||
const shell = this.#shellTerminal();
|
||||
await shell.ready();
|
||||
|
||||
if (!shell || !shell.terminal || !shell.process) {
|
||||
unreachable('Shell terminal not found');
|
||||
}
|
||||
const resp = await shell.executeCommand(this.runnerId.get(), action.content)
|
||||
logger.debug(`${action.type} Shell Response: [exit code:${resp?.exitCode}]`)
|
||||
if (resp?.exitCode != 0) {
|
||||
throw new Error("Failed To Execute Shell Command");
|
||||
|
||||
const resp = await shell.executeCommand(this.runnerId.get(), action.content);
|
||||
logger.debug(`${action.type} Shell Response: [exit code:${resp?.exitCode}]`);
|
||||
|
||||
if (resp?.exitCode != 0) {
|
||||
throw new Error('Failed To Execute Shell Command');
|
||||
}
|
||||
}
|
||||
|
||||
@@ -162,21 +170,26 @@ export class ActionRunner {
|
||||
if (action.type !== 'start') {
|
||||
unreachable('Expected shell action');
|
||||
}
|
||||
|
||||
if (!this.#shellTerminal) {
|
||||
unreachable('Shell terminal not found');
|
||||
}
|
||||
const shell = this.#shellTerminal()
|
||||
await shell.ready()
|
||||
|
||||
const shell = this.#shellTerminal();
|
||||
await shell.ready();
|
||||
|
||||
if (!shell || !shell.terminal || !shell.process) {
|
||||
unreachable('Shell terminal not found');
|
||||
}
|
||||
const resp = await shell.executeCommand(this.runnerId.get(), action.content)
|
||||
logger.debug(`${action.type} Shell Response: [exit code:${resp?.exitCode}]`)
|
||||
|
||||
const resp = await shell.executeCommand(this.runnerId.get(), action.content);
|
||||
logger.debug(`${action.type} Shell Response: [exit code:${resp?.exitCode}]`);
|
||||
|
||||
if (resp?.exitCode != 0) {
|
||||
throw new Error("Failed To Start Application");
|
||||
throw new Error('Failed To Start Application');
|
||||
}
|
||||
return resp
|
||||
|
||||
return resp;
|
||||
}
|
||||
|
||||
async #runFileAction(action: ActionState) {
|
||||
|
||||
@@ -55,7 +55,7 @@ interface MessageState {
|
||||
export class StreamingMessageParser {
|
||||
#messages = new Map<string, MessageState>();
|
||||
|
||||
constructor(private _options: StreamingMessageParserOptions = {}) { }
|
||||
constructor(private _options: StreamingMessageParserOptions = {}) {}
|
||||
|
||||
parse(messageId: string, input: string) {
|
||||
let state = this.#messages.get(messageId);
|
||||
@@ -120,20 +120,20 @@ export class StreamingMessageParser {
|
||||
i = closeIndex + ARTIFACT_ACTION_TAG_CLOSE.length;
|
||||
} else {
|
||||
if ('type' in currentAction && currentAction.type === 'file') {
|
||||
let content = input.slice(i);
|
||||
const content = input.slice(i);
|
||||
|
||||
this._options.callbacks?.onActionStream?.({
|
||||
artifactId: currentArtifact.id,
|
||||
messageId,
|
||||
actionId: String(state.actionId - 1),
|
||||
action: {
|
||||
...currentAction as FileAction,
|
||||
...(currentAction as FileAction),
|
||||
content,
|
||||
filePath: currentAction.filePath,
|
||||
},
|
||||
|
||||
});
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
} else {
|
||||
@@ -272,7 +272,7 @@ export class StreamingMessageParser {
|
||||
}
|
||||
|
||||
(actionAttributes as FileAction).filePath = filePath;
|
||||
} else if (!(['shell', 'start'].includes(actionType))) {
|
||||
} else if (!['shell', 'start'].includes(actionType)) {
|
||||
logger.warn(`Unknown action type '${actionType}'`);
|
||||
}
|
||||
|
||||
|
||||
@@ -80,10 +80,6 @@ export class FilesStore {
|
||||
this.#modifiedFiles.clear();
|
||||
}
|
||||
|
||||
markFileAsNew(filePath: string) {
|
||||
this.#modifiedFiles.set(filePath, '');
|
||||
}
|
||||
|
||||
async saveFile(filePath: string, content: string) {
|
||||
const webcontainer = await this.#webcontainer;
|
||||
|
||||
@@ -216,9 +212,5 @@ function isBinaryFile(buffer: Uint8Array | undefined) {
|
||||
* array buffer.
|
||||
*/
|
||||
function convertToBuffer(view: Uint8Array): Buffer {
|
||||
const buffer = new Uint8Array(view.buffer, view.byteOffset, view.byteLength);
|
||||
|
||||
Object.setPrototypeOf(buffer, Buffer.prototype);
|
||||
|
||||
return buffer as Buffer;
|
||||
return Buffer.from(view.buffer, view.byteOffset, view.byteLength);
|
||||
}
|
||||
|
||||
@@ -7,7 +7,7 @@ import { coloredText } from '~/utils/terminal';
|
||||
export class TerminalStore {
|
||||
#webcontainer: Promise<WebContainer>;
|
||||
#terminals: Array<{ terminal: ITerminal; process: WebContainerProcess }> = [];
|
||||
#boltTerminal = newBoltShellProcess()
|
||||
#boltTerminal = newBoltShellProcess();
|
||||
|
||||
showTerminal: WritableAtom<boolean> = import.meta.hot?.data.showTerminal ?? atom(true);
|
||||
|
||||
@@ -27,8 +27,8 @@ export class TerminalStore {
|
||||
}
|
||||
async attachBoltTerminal(terminal: ITerminal) {
|
||||
try {
|
||||
let wc = await this.#webcontainer
|
||||
await this.#boltTerminal.init(wc, terminal)
|
||||
const wc = await this.#webcontainer;
|
||||
await this.#boltTerminal.init(wc, terminal);
|
||||
} catch (error: any) {
|
||||
terminal.write(coloredText.red('Failed to spawn bolt shell\n\n') + error.message);
|
||||
return;
|
||||
|
||||
@@ -11,9 +11,8 @@ import { PreviewsStore } from './previews';
|
||||
import { TerminalStore } from './terminal';
|
||||
import JSZip from 'jszip';
|
||||
import { saveAs } from 'file-saver';
|
||||
import { Octokit, type RestEndpointMethodTypes } from "@octokit/rest";
|
||||
import { Octokit, type RestEndpointMethodTypes } from '@octokit/rest';
|
||||
import * as nodePath from 'node:path';
|
||||
import type { WebContainerProcess } from '@webcontainer/api';
|
||||
import { extractRelativePath } from '~/utils/diff';
|
||||
|
||||
export interface ArtifactState {
|
||||
@@ -32,7 +31,6 @@ export type WorkbenchViewType = 'code' | 'preview';
|
||||
export class WorkbenchStore {
|
||||
#previewsStore = new PreviewsStore(webcontainer);
|
||||
#filesStore = new FilesStore(webcontainer);
|
||||
|
||||
#editorStore = new EditorStore(this.#filesStore);
|
||||
#terminalStore = new TerminalStore(webcontainer);
|
||||
|
||||
@@ -43,7 +41,6 @@ export class WorkbenchStore {
|
||||
unsavedFiles: WritableAtom<Set<string>> = import.meta.hot?.data.unsavedFiles ?? atom(new Set<string>());
|
||||
modifiedFiles = new Set<string>();
|
||||
artifactIdList: string[] = [];
|
||||
#boltTerminal: { terminal: ITerminal; process: WebContainerProcess } | undefined;
|
||||
#globalExecutionQueue = Promise.resolve();
|
||||
constructor() {
|
||||
if (import.meta.hot) {
|
||||
@@ -55,7 +52,7 @@ export class WorkbenchStore {
|
||||
}
|
||||
|
||||
addToExecutionQueue(callback: () => Promise<void>) {
|
||||
this.#globalExecutionQueue = this.#globalExecutionQueue.then(() => callback())
|
||||
this.#globalExecutionQueue = this.#globalExecutionQueue.then(() => callback());
|
||||
}
|
||||
|
||||
get previews() {
|
||||
@@ -97,7 +94,6 @@ export class WorkbenchStore {
|
||||
this.#terminalStore.attachTerminal(terminal);
|
||||
}
|
||||
attachBoltTerminal(terminal: ITerminal) {
|
||||
|
||||
this.#terminalStore.attachBoltTerminal(terminal);
|
||||
}
|
||||
|
||||
@@ -262,7 +258,8 @@ export class WorkbenchStore {
|
||||
this.artifacts.setKey(messageId, { ...artifact, ...state });
|
||||
}
|
||||
addAction(data: ActionCallbackData) {
|
||||
this._addAction(data)
|
||||
this._addAction(data);
|
||||
|
||||
// this.addToExecutionQueue(()=>this._addAction(data))
|
||||
}
|
||||
async _addAction(data: ActionCallbackData) {
|
||||
@@ -279,10 +276,9 @@ export class WorkbenchStore {
|
||||
|
||||
runAction(data: ActionCallbackData, isStreaming: boolean = false) {
|
||||
if (isStreaming) {
|
||||
this._runAction(data, isStreaming)
|
||||
}
|
||||
else {
|
||||
this.addToExecutionQueue(() => this._runAction(data, isStreaming))
|
||||
this._runAction(data, isStreaming);
|
||||
} else {
|
||||
this.addToExecutionQueue(() => this._runAction(data, isStreaming));
|
||||
}
|
||||
}
|
||||
async _runAction(data: ActionCallbackData, isStreaming: boolean = false) {
|
||||
@@ -293,16 +289,21 @@ export class WorkbenchStore {
|
||||
if (!artifact) {
|
||||
unreachable('Artifact not found');
|
||||
}
|
||||
|
||||
if (data.action.type === 'file') {
|
||||
let wc = await webcontainer
|
||||
const wc = await webcontainer;
|
||||
const fullPath = nodePath.join(wc.workdir, data.action.filePath);
|
||||
|
||||
if (this.selectedFile.value !== fullPath) {
|
||||
this.setSelectedFile(fullPath);
|
||||
}
|
||||
|
||||
if (this.currentView.value !== 'code') {
|
||||
this.currentView.set('code');
|
||||
}
|
||||
|
||||
const doc = this.#editorStore.documents.get()[fullPath];
|
||||
|
||||
if (!doc) {
|
||||
await artifact.runner.runAction(data, isStreaming);
|
||||
}
|
||||
@@ -382,63 +383,7 @@ export class WorkbenchStore {
|
||||
return syncedFiles;
|
||||
}
|
||||
|
||||
async uploadFilesFromDisk(sourceHandle: FileSystemDirectoryHandle) {
|
||||
const loadedFiles = [];
|
||||
const wc = await webcontainer;
|
||||
const newFiles = {};
|
||||
|
||||
const processDirectory = async (handle: FileSystemDirectoryHandle, currentPath: string = '') => {
|
||||
const entries = await Array.fromAsync(handle.values());
|
||||
|
||||
for (const entry of entries) {
|
||||
const entryPath = currentPath ? `${currentPath}/${entry.name}` : entry.name;
|
||||
const fullPath = `/${entryPath}`;
|
||||
|
||||
if (entry.kind === 'directory') {
|
||||
await wc.fs.mkdir(fullPath, { recursive: true });
|
||||
const subDirHandle = await handle.getDirectoryHandle(entry.name);
|
||||
await processDirectory(subDirHandle, entryPath);
|
||||
} else {
|
||||
const file = await entry.getFile();
|
||||
const content = await file.text();
|
||||
|
||||
// Write to WebContainer
|
||||
await wc.fs.writeFile(fullPath, content);
|
||||
|
||||
// Mark file as new
|
||||
this.#filesStore.markFileAsNew(fullPath);
|
||||
|
||||
// Update the files store with the current content
|
||||
this.files.setKey(fullPath, { type: 'file', content, isBinary: false });
|
||||
|
||||
// Collect for editor store with actual content
|
||||
newFiles[fullPath] = { type: 'file', content, isBinary: false };
|
||||
loadedFiles.push(entryPath);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
await processDirectory(sourceHandle);
|
||||
|
||||
return loadedFiles;
|
||||
}
|
||||
|
||||
async refreshFiles() {
|
||||
// Clear old state
|
||||
this.modifiedFiles = new Set<string>();
|
||||
this.artifactIdList = [];
|
||||
|
||||
// Reset stores
|
||||
this.#filesStore = new FilesStore(webcontainer);
|
||||
this.#editorStore = new EditorStore(this.#filesStore);
|
||||
|
||||
// Update UI state
|
||||
this.currentView.set('code');
|
||||
this.unsavedFiles.set(new Set<string>());
|
||||
}
|
||||
|
||||
async pushToGitHub(repoName: string, githubUsername: string, ghToken: string) {
|
||||
|
||||
try {
|
||||
// Get the GitHub auth token from environment variables
|
||||
const githubToken = ghToken;
|
||||
@@ -453,10 +398,11 @@ export class WorkbenchStore {
|
||||
const octokit = new Octokit({ auth: githubToken });
|
||||
|
||||
// Check if the repository already exists before creating it
|
||||
let repo: RestEndpointMethodTypes["repos"]["get"]["response"]['data']
|
||||
let repo: RestEndpointMethodTypes['repos']['get']['response']['data'];
|
||||
|
||||
try {
|
||||
let resp = await octokit.repos.get({ owner: owner, repo: repoName });
|
||||
repo = resp.data
|
||||
const resp = await octokit.repos.get({ owner, repo: repoName });
|
||||
repo = resp.data;
|
||||
} catch (error) {
|
||||
if (error instanceof Error && 'status' in error && error.status === 404) {
|
||||
// Repository doesn't exist, so create a new one
|
||||
@@ -474,6 +420,7 @@ export class WorkbenchStore {
|
||||
|
||||
// Get all files
|
||||
const files = this.files.get();
|
||||
|
||||
if (!files || Object.keys(files).length === 0) {
|
||||
throw new Error('No files found to push');
|
||||
}
|
||||
@@ -490,7 +437,9 @@ export class WorkbenchStore {
|
||||
});
|
||||
return { path: extractRelativePath(filePath), sha: blob.sha };
|
||||
}
|
||||
})
|
||||
|
||||
return null;
|
||||
}),
|
||||
);
|
||||
|
||||
const validBlobs = blobs.filter(Boolean); // Filter out any undefined blobs
|
||||
@@ -542,21 +491,6 @@ export class WorkbenchStore {
|
||||
console.error('Error pushing to GitHub:', error instanceof Error ? error.message : String(error));
|
||||
}
|
||||
}
|
||||
|
||||
async markFileAsModified(filePath: string) {
|
||||
const file = this.#filesStore.getFile(filePath);
|
||||
if (file?.type === 'file') {
|
||||
// First collect all original content
|
||||
const originalContent = file.content;
|
||||
console.log(`Processing ${filePath}:`, originalContent);
|
||||
|
||||
// Then save modifications
|
||||
await this.saveFile(filePath, originalContent);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
}
|
||||
|
||||
export const workbenchStore = new WorkbenchStore();
|
||||
|
||||
Reference in New Issue
Block a user