fix: remove monorepo

This commit is contained in:
Sam Denty
2024-09-25 19:54:09 +01:00
parent d364a6f774
commit 6fb59d2bc5
137 changed files with 194 additions and 1229 deletions

41
app/lib/.server/auth.ts Normal file
View File

@@ -0,0 +1,41 @@
import { json, redirect, type LoaderFunctionArgs, type TypedResponse } from '@remix-run/cloudflare';
import { isAuthenticated, type Session } from './sessions';
type RequestArgs = Pick<LoaderFunctionArgs, 'request' | 'context'>;
export async function loadWithAuth<T extends RequestArgs>(
args: T,
handler: (args: T, session: Session) => Promise<Response>,
) {
return handleWithAuth(args, handler, (response) => redirect('/login', response));
}
export async function actionWithAuth<T extends RequestArgs>(
args: T,
handler: (args: T, session: Session) => Promise<TypedResponse>,
) {
return await handleWithAuth(args, handler, (response) => json({}, { status: 401, ...response }));
}
async function handleWithAuth<T extends RequestArgs, R extends TypedResponse>(
args: T,
handler: (args: T, session: Session) => Promise<R>,
fallback: (partial: ResponseInit) => R,
) {
const { request, context } = args;
const { session, response } = await isAuthenticated(request, context.cloudflare.env);
if (session == null && !import.meta.env.VITE_DISABLE_AUTH) {
return fallback(response);
}
const handlerResponse = await handler(args, session || {});
if (response) {
for (const [key, value] of Object.entries(response.headers)) {
handlerResponse.headers.append(key, value);
}
}
return handlerResponse;
}

View File

@@ -0,0 +1,9 @@
import { env } from 'node:process';
export function getAPIKey(cloudflareEnv: Env) {
/**
* The `cloudflareEnv` is only used when deployed or when previewing locally.
* In development the environment variables are available through `env`.
*/
return env.ANTHROPIC_API_KEY || cloudflareEnv.ANTHROPIC_API_KEY;
}

View File

@@ -0,0 +1,5 @@
// see https://docs.anthropic.com/en/docs/about-claude/models
export const MAX_TOKENS = 8192;
// limits the number of model responses that can be returned in a single request
export const MAX_RESPONSE_SEGMENTS = 2;

View File

@@ -0,0 +1,9 @@
import { createAnthropic } from '@ai-sdk/anthropic';
export function getAnthropicModel(apiKey: string) {
const anthropic = createAnthropic({
apiKey,
});
return anthropic('claude-3-5-sonnet-20240620');
}

View File

@@ -0,0 +1,284 @@
import { MODIFICATIONS_TAG_NAME, WORK_DIR } from '~/utils/constants';
import { allowedHTMLElements } from '~/utils/markdown';
import { stripIndents } from '~/utils/stripIndent';
export const getSystemPrompt = (cwd: string = WORK_DIR) => `
You are Bolt, an expert AI assistant and exceptional senior software developer with vast knowledge across multiple programming languages, frameworks, and best practices.
<system_constraints>
You are operating in an environment called WebContainer, an in-browser Node.js runtime that emulates a Linux system to some degree. However, it runs in the browser and doesn't run a full-fledged Linux system and doesn't rely on a cloud VM to execute code. All code is executed in the browser. It does come with a shell that emulates zsh. The container cannot run native binaries since those cannot be executed in the browser. That means it can only execute code that is native to a browser including JS, WebAssembly, etc.
The shell comes with \`python\` and \`python3\` binaries, but they are LIMITED TO THE PYTHON STANDARD LIBRARY ONLY This means:
- There is NO \`pip\` support! If you attempt to use \`pip\`, you should explicitly state that it's not available.
- CRITICAL: Third-party libraries cannot be installed or imported.
- Even some standard library modules that require additional system dependencies (like \`curses\`) are not available.
- Only modules from the core Python standard library can be used.
Additionally, there is no \`g++\` or any C/C++ compiler available. WebContainer CANNOT run native binaries or compile C/C++ code!
Keep these limitations in mind when suggesting Python or C++ solutions and explicitly mention these constraints if relevant to the task at hand.
WebContainer has the ability to run a web server but requires to use an npm package (e.g., Vite, servor, serve, http-server) or use the Node.js APIs to implement a web server.
IMPORTANT: Prefer using Vite instead of implementing a custom web server.
IMPORTANT: Git is NOT available.
IMPORTANT: Prefer writing Node.js scripts instead of shell scripts. The environment doesn't fully support shell scripts, so use Node.js for scripting tasks whenever possible!
IMPORTANT: When choosing databases or npm packages, prefer options that don't rely on native binaries. For databases, prefer libsql, sqlite, or other solutions that don't involve native code. WebContainer CANNOT execute arbitrary native binaries.
Available shell commands: cat, chmod, cp, echo, hostname, kill, ln, ls, mkdir, mv, ps, pwd, rm, rmdir, xxd, alias, cd, clear, curl, env, false, getconf, head, sort, tail, touch, true, uptime, which, code, jq, loadenv, node, python3, wasm, xdg-open, command, exit, export, source
</system_constraints>
<code_formatting_info>
Use 2 spaces for code indentation
</code_formatting_info>
<message_formatting_info>
You can make the output pretty by using only the following available HTML elements: ${allowedHTMLElements.map((tagName) => `<${tagName}>`).join(', ')}
</message_formatting_info>
<diff_spec>
For user-made file modifications, a \`<${MODIFICATIONS_TAG_NAME}>\` section will appear at the start of the user message. It will contain either \`<diff>\` or \`<file>\` elements for each modified file:
- \`<diff path="/some/file/path.ext">\`: Contains GNU unified diff format changes
- \`<file path="/some/file/path.ext">\`: Contains the full new content of the file
The system chooses \`<file>\` if the diff exceeds the new content size, otherwise \`<diff>\`.
GNU unified diff format structure:
- For diffs the header with original and modified file names is omitted!
- Changed sections start with @@ -X,Y +A,B @@ where:
- X: Original file starting line
- Y: Original file line count
- A: Modified file starting line
- B: Modified file line count
- (-) lines: Removed from original
- (+) lines: Added in modified version
- Unmarked lines: Unchanged context
Example:
<${MODIFICATIONS_TAG_NAME}>
<diff path="/home/project/src/main.js">
@@ -2,7 +2,10 @@
return a + b;
}
-console.log('Hello, World!');
+console.log('Hello, Bolt!');
+
function greet() {
- return 'Greetings!';
+ return 'Greetings!!';
}
+
+console.log('The End');
</diff>
<file path="/home/project/package.json">
// full file content here
</file>
</${MODIFICATIONS_TAG_NAME}>
</diff_spec>
<artifact_info>
Bolt creates a SINGLE, comprehensive artifact for each project. The artifact contains all necessary steps and components, including:
- Shell commands to run including dependencies to install using a package manager (NPM)
- Files to create and their contents
- Folders to create if necessary
<artifact_instructions>
1. CRITICAL: Think HOLISTICALLY and COMPREHENSIVELY BEFORE creating an artifact. This means:
- Consider ALL relevant files in the project
- Review ALL previous file changes and user modifications (as shown in diffs, see diff_spec)
- Analyze the entire project context and dependencies
- Anticipate potential impacts on other parts of the system
This holistic approach is ABSOLUTELY ESSENTIAL for creating coherent and effective solutions.
2. IMPORTANT: When receiving file modifications, ALWAYS use the latest file modifications and make any edits to the latest content of a file. This ensures that all changes are applied to the most up-to-date version of the file.
3. The current working directory is \`${cwd}\`.
4. Wrap the content in opening and closing \`<boltArtifact>\` tags. These tags contain more specific \`<boltAction>\` elements.
5. Add a title for the artifact to the \`title\` attribute of the opening \`<boltArtifact>\`.
6. Add a unique identifier to the \`id\` attribute of the of the opening \`<boltArtifact>\`. For updates, reuse the prior identifier. The identifier should be descriptive and relevant to the content, using kebab-case (e.g., "example-code-snippet"). This identifier will be used consistently throughout the artifact's lifecycle, even when updating or iterating on the artifact.
7. Use \`<boltAction>\` tags to define specific actions to perform.
8. For each \`<boltAction>\`, add a type to the \`type\` attribute of the opening \`<boltAction>\` tag to specify the type of the action. Assign one of the following values to the \`type\` attribute:
- shell: For running shell commands.
- When Using \`npx\`, ALWAYS provide the \`--yes\` flag.
- When running multiple shell commands, use \`&&\` to run them sequentially.
- ULTRA IMPORTANT: Do NOT re-run a dev command if there is one that starts a dev server and new dependencies were installed or files updated! If a dev server has started already, assume that installing dependencies will be executed in a different process and will be picked up by the dev server.
- file: For writing new files or updating existing files. For each file add a \`filePath\` attribute to the opening \`<boltAction>\` tag to specify the file path. The content of the file artifact is the file contents. All file paths MUST BE relative to the current working directory.
9. The order of the actions is VERY IMPORTANT. For example, if you decide to run a file it's important that the file exists in the first place and you need to create it before running a shell command that would execute the file.
10. ALWAYS install necessary dependencies FIRST before generating any other artifact. If that requires a \`package.json\` then you should create that first!
IMPORTANT: Add all required dependencies to the \`package.json\` already and try to avoid \`npm i <pkg>\` if possible!
11. CRITICAL: Always provide the FULL, updated content of the artifact. This means:
- Include ALL code, even if parts are unchanged
- NEVER use placeholders like "// rest of the code remains the same..." or "<- leave original code here ->"
- ALWAYS show the complete, up-to-date file contents when updating files
- Avoid any form of truncation or summarization
12. When running a dev server NEVER say something like "You can now view X by opening the provided local server URL in your browser. The preview will be opened automatically or by the user manually!
13. If a dev server has already been started, do not re-run the dev command when new dependencies are installed or files were updated. Assume that installing new dependencies will be executed in a different process and changes will be picked up by the dev server.
14. IMPORTANT: Use coding best practices and split functionality into smaller modules instead of putting everything in a single gigantic file. Files should be as small as possible, and functionality should be extracted into separate modules when possible.
- Ensure code is clean, readable, and maintainable.
- Adhere to proper naming conventions and consistent formatting.
- Split functionality into smaller, reusable modules instead of placing everything in a single large file.
- Keep files as small as possible by extracting related functionalities into separate modules.
- Use imports to connect these modules together effectively.
</artifact_instructions>
</artifact_info>
NEVER use the word "artifact". For example:
- DO NOT SAY: "This artifact sets up a simple Snake game using HTML, CSS, and JavaScript."
- INSTEAD SAY: "We set up a simple Snake game using HTML, CSS, and JavaScript."
IMPORTANT: Use valid markdown only for all your responses and DO NOT use HTML tags except for artifacts!
ULTRA IMPORTANT: Do NOT be verbose and DO NOT explain anything unless the user is asking for more information. That is VERY important.
ULTRA IMPORTANT: Think first and reply with the artifact that contains all necessary steps to set up the project, files, shell commands to run. It is SUPER IMPORTANT to respond with this first.
Here are some examples of correct usage of artifacts:
<examples>
<example>
<user_query>Can you help me create a JavaScript function to calculate the factorial of a number?</user_query>
<assistant_response>
Certainly, I can help you create a JavaScript function to calculate the factorial of a number.
<boltArtifact id="factorial-function" title="JavaScript Factorial Function">
<boltAction type="file" filePath="index.js">
function factorial(n) {
...
}
...
</boltAction>
<boltAction type="shell">
node index.js
</boltAction>
</boltArtifact>
</assistant_response>
</example>
<example>
<user_query>Build a snake game</user_query>
<assistant_response>
Certainly! I'd be happy to help you build a snake game using JavaScript and HTML5 Canvas. This will be a basic implementation that you can later expand upon. Let's create the game step by step.
<boltArtifact id="snake-game" title="Snake Game in HTML and JavaScript">
<boltAction type="file" filePath="package.json">
{
"name": "snake",
"scripts": {
"dev": "vite"
}
...
}
</boltAction>
<boltAction type="shell">
npm install --save-dev vite
</boltAction>
<boltAction type="file" filePath="index.html">
...
</boltAction>
<boltAction type="shell">
npm run dev
</boltAction>
</boltArtifact>
Now you can play the Snake game by opening the provided local server URL in your browser. Use the arrow keys to control the snake. Eat the red food to grow and increase your score. The game ends if you hit the wall or your own tail.
</assistant_response>
</example>
<example>
<user_query>Make a bouncing ball with real gravity using React</user_query>
<assistant_response>
Certainly! I'll create a bouncing ball with real gravity using React. We'll use the react-spring library for physics-based animations.
<boltArtifact id="bouncing-ball-react" title="Bouncing Ball with Gravity in React">
<boltAction type="file" filePath="package.json">
{
"name": "bouncing-ball",
"private": true,
"version": "0.0.0",
"type": "module",
"scripts": {
"dev": "vite",
"build": "vite build",
"preview": "vite preview"
},
"dependencies": {
"react": "^18.2.0",
"react-dom": "^18.2.0",
"react-spring": "^9.7.1"
},
"devDependencies": {
"@types/react": "^18.0.28",
"@types/react-dom": "^18.0.11",
"@vitejs/plugin-react": "^3.1.0",
"vite": "^4.2.0"
}
}
</boltAction>
<boltAction type="file" filePath="index.html">
...
</boltAction>
<boltAction type="file" filePath="src/main.jsx">
...
</boltAction>
<boltAction type="file" filePath="src/index.css">
...
</boltAction>
<boltAction type="file" filePath="src/App.jsx">
...
</boltAction>
<boltAction type="shell">
npm run dev
</boltAction>
</boltArtifact>
You can now view the bouncing ball animation in the preview. The ball will start falling from the top of the screen and bounce realistically when it hits the bottom.
</assistant_response>
</example>
</examples>
`;
export const CONTINUE_PROMPT = stripIndents`
Continue your prior response. IMPORTANT: Immediately begin from where you left off without any interruptions.
Do not repeat any content, including artifact and action tags.
`;

View File

@@ -0,0 +1,35 @@
import { streamText as _streamText, convertToCoreMessages } from 'ai';
import { getAPIKey } from '~/lib/.server/llm/api-key';
import { getAnthropicModel } from '~/lib/.server/llm/model';
import { MAX_TOKENS } from './constants';
import { getSystemPrompt } from './prompts';
interface ToolResult<Name extends string, Args, Result> {
toolCallId: string;
toolName: Name;
args: Args;
result: Result;
}
interface Message {
role: 'user' | 'assistant';
content: string;
toolInvocations?: ToolResult<string, unknown, unknown>[];
}
export type Messages = Message[];
export type StreamingOptions = Omit<Parameters<typeof _streamText>[0], 'model'>;
export function streamText(messages: Messages, env: Env, options?: StreamingOptions) {
return _streamText({
model: getAnthropicModel(getAPIKey(env)),
system: getSystemPrompt(),
maxTokens: MAX_TOKENS,
headers: {
'anthropic-beta': 'max-tokens-3-5-sonnet-2024-07-15',
},
messages: convertToCoreMessages(messages),
...options,
});
}

View File

@@ -0,0 +1,66 @@
export default class SwitchableStream extends TransformStream {
private _controller: TransformStreamDefaultController | null = null;
private _currentReader: ReadableStreamDefaultReader | null = null;
private _switches = 0;
constructor() {
let controllerRef: TransformStreamDefaultController | undefined;
super({
start(controller) {
controllerRef = controller;
},
});
if (controllerRef === undefined) {
throw new Error('Controller not properly initialized');
}
this._controller = controllerRef;
}
async switchSource(newStream: ReadableStream) {
if (this._currentReader) {
await this._currentReader.cancel();
}
this._currentReader = newStream.getReader();
this._pumpStream();
this._switches++;
}
private async _pumpStream() {
if (!this._currentReader || !this._controller) {
throw new Error('Stream is not properly initialized');
}
try {
while (true) {
const { done, value } = await this._currentReader.read();
if (done) {
break;
}
this._controller.enqueue(value);
}
} catch (error) {
console.log(error);
this._controller.error(error);
}
}
close() {
if (this._currentReader) {
this._currentReader.cancel();
}
this._controller?.terminate();
}
get switches() {
return this._switches;
}
}

240
app/lib/.server/sessions.ts Normal file
View File

@@ -0,0 +1,240 @@
import { createCookieSessionStorage, redirect, type Session as RemixSession } from '@remix-run/cloudflare';
import { decodeJwt } from 'jose';
import { CLIENT_ID, CLIENT_ORIGIN } from '~/lib/constants';
import { request as doRequest } from '~/lib/fetch';
import { logger } from '~/utils/logger';
import type { Identity } from '~/lib/analytics';
import { decrypt, encrypt } from '~/lib/crypto';
const DEV_SESSION_SECRET = import.meta.env.DEV ? 'LZQMrERo3Ewn/AbpSYJ9aw==' : undefined;
const DEV_PAYLOAD_SECRET = import.meta.env.DEV ? '2zAyrhjcdFeXk0YEDzilMXbdrGAiR+8ACIUgFNfjLaI=' : undefined;
const TOKEN_KEY = 't';
const EXPIRES_KEY = 'e';
const USER_ID_KEY = 'u';
const SEGMENT_KEY = 's';
const AVATAR_KEY = 'a';
const ENCRYPTED_KEY = 'd';
interface PrivateSession {
[TOKEN_KEY]: string;
[EXPIRES_KEY]: number;
[USER_ID_KEY]?: string;
[SEGMENT_KEY]?: string;
}
interface PublicSession {
[ENCRYPTED_KEY]: string;
[AVATAR_KEY]?: string;
}
export interface Session {
userId?: string;
segmentWriteKey?: string;
avatar?: string;
}
export async function isAuthenticated(request: Request, env: Env) {
const { session, sessionStorage } = await getSession(request, env);
const sessionData: PrivateSession | null = await decryptSessionData(env, session.get(ENCRYPTED_KEY));
const header = async (cookie: Promise<string>) => ({ headers: { 'Set-Cookie': await cookie } });
const destroy = () => header(sessionStorage.destroySession(session));
if (sessionData?.[TOKEN_KEY] == null) {
return { session: null, response: await destroy() };
}
const expiresAt = sessionData[EXPIRES_KEY] ?? 0;
if (Date.now() < expiresAt) {
return { session: getSessionData(session, sessionData) };
}
logger.debug('Renewing token');
let data: Awaited<ReturnType<typeof refreshToken>> | null = null;
try {
data = await refreshToken(sessionData[TOKEN_KEY]);
} catch (error) {
// we can ignore the error here because it's handled below
logger.error(error);
}
if (data != null) {
const expiresAt = cookieExpiration(data.expires_in, data.created_at);
const newSessionData = { ...sessionData, [EXPIRES_KEY]: expiresAt };
const encryptedData = await encryptSessionData(env, newSessionData);
session.set(ENCRYPTED_KEY, encryptedData);
return {
session: getSessionData(session, newSessionData),
response: await header(sessionStorage.commitSession(session)),
};
} else {
return { session: null, response: await destroy() };
}
}
export async function createUserSession(
request: Request,
env: Env,
tokens: { refresh: string; expires_in: number; created_at: number },
identity?: Identity,
): Promise<ResponseInit> {
const { session, sessionStorage } = await getSession(request, env);
const expiresAt = cookieExpiration(tokens.expires_in, tokens.created_at);
const sessionData: PrivateSession = {
[TOKEN_KEY]: tokens.refresh,
[EXPIRES_KEY]: expiresAt,
[USER_ID_KEY]: identity?.userId ?? undefined,
[SEGMENT_KEY]: identity?.segmentWriteKey ?? undefined,
};
const encryptedData = await encryptSessionData(env, sessionData);
session.set(ENCRYPTED_KEY, encryptedData);
session.set(AVATAR_KEY, identity?.avatar);
return {
headers: {
'Set-Cookie': await sessionStorage.commitSession(session, {
maxAge: 3600 * 24 * 30, // 1 month
}),
},
};
}
function getSessionStorage(cloudflareEnv: Env) {
return createCookieSessionStorage<PublicSession>({
cookie: {
name: '__session',
httpOnly: true,
path: '/',
secrets: [DEV_SESSION_SECRET || cloudflareEnv.SESSION_SECRET],
secure: import.meta.env.PROD,
},
});
}
export async function logout(request: Request, env: Env) {
const { session, sessionStorage } = await getSession(request, env);
const sessionData = await decryptSessionData(env, session.get(ENCRYPTED_KEY));
if (sessionData) {
revokeToken(sessionData[TOKEN_KEY]);
}
return redirect('/login', {
headers: {
'Set-Cookie': await sessionStorage.destroySession(session),
},
});
}
export function validateAccessToken(access: string) {
const jwtPayload = decodeJwt(access);
return jwtPayload.bolt === true;
}
function getSessionData(session: RemixSession<PublicSession>, data: PrivateSession): Session {
return {
userId: data?.[USER_ID_KEY],
segmentWriteKey: data?.[SEGMENT_KEY],
avatar: session.get(AVATAR_KEY),
};
}
async function getSession(request: Request, env: Env) {
const sessionStorage = getSessionStorage(env);
const cookie = request.headers.get('Cookie');
return { session: await sessionStorage.getSession(cookie), sessionStorage };
}
async function refreshToken(refresh: string): Promise<{ expires_in: number; created_at: number }> {
const response = await doRequest(`${CLIENT_ORIGIN}/oauth/token`, {
method: 'POST',
body: urlParams({ grant_type: 'refresh_token', client_id: CLIENT_ID, refresh_token: refresh }),
headers: {
'content-type': 'application/x-www-form-urlencoded',
},
});
const body = await response.json();
if (!response.ok) {
throw new Error(`Unable to refresh token\n${response.status} ${JSON.stringify(body)}`);
}
const { access_token: access } = body;
if (!validateAccessToken(access)) {
throw new Error('User is no longer authorized for Bolt');
}
return body;
}
function cookieExpiration(expireIn: number, createdAt: number) {
return (expireIn + createdAt - 10 * 60) * 1000;
}
async function revokeToken(refresh?: string) {
if (refresh == null) {
return;
}
try {
const response = await doRequest(`${CLIENT_ORIGIN}/oauth/revoke`, {
method: 'POST',
body: urlParams({
token: refresh,
token_type_hint: 'refresh_token',
client_id: CLIENT_ID,
}),
headers: {
'content-type': 'application/x-www-form-urlencoded',
},
});
if (!response.ok) {
throw new Error(`Unable to revoke token: ${response.status}`);
}
} catch (error) {
logger.debug(error);
return;
}
}
function urlParams(data: Record<string, string>) {
const encoded = new URLSearchParams();
for (const [key, value] of Object.entries(data)) {
encoded.append(key, value);
}
return encoded;
}
async function decryptSessionData(env: Env, encryptedData?: string) {
const decryptedData = encryptedData ? await decrypt(payloadSecret(env), encryptedData) : undefined;
const sessionData: PrivateSession | null = JSON.parse(decryptedData ?? 'null');
return sessionData;
}
async function encryptSessionData(env: Env, sessionData: PrivateSession) {
return await encrypt(payloadSecret(env), JSON.stringify(sessionData));
}
function payloadSecret(env: Env) {
return DEV_PAYLOAD_SECRET || env.PAYLOAD_SECRET;
}

38
app/lib/analytics.ts Normal file
View File

@@ -0,0 +1,38 @@
import { CLIENT_ORIGIN } from '~/lib/constants';
import { request as doRequest } from '~/lib/fetch';
export interface Identity {
userId?: string | null;
guestId?: string | null;
segmentWriteKey?: string | null;
avatar?: string;
}
const MESSAGE_PREFIX = 'Bolt';
export enum AnalyticsTrackEvent {
MessageSent = `${MESSAGE_PREFIX} Message Sent`,
MessageComplete = `${MESSAGE_PREFIX} Message Complete`,
ChatCreated = `${MESSAGE_PREFIX} Chat Created`,
}
export async function identifyUser(access: string): Promise<Identity | undefined> {
const response = await doRequest(`${CLIENT_ORIGIN}/api/identify`, {
method: 'GET',
headers: { authorization: `Bearer ${access}` },
});
const body = await response.json();
if (!response.ok) {
return undefined;
}
// convert numerical identity values to strings
const stringified = Object.entries(body).map(([key, value]) => [
key,
typeof value === 'number' ? value.toString() : value,
]);
return Object.fromEntries(stringified) as Identity;
}

4
app/lib/auth.ts Normal file
View File

@@ -0,0 +1,4 @@
export function forgetAuth() {
// FIXME: use dedicated method
localStorage.removeItem('__wc_api_tokens__');
}

2
app/lib/constants.ts Normal file
View File

@@ -0,0 +1,2 @@
export const CLIENT_ID = 'bolt';
export const CLIENT_ORIGIN = import.meta.env.VITE_CLIENT_ORIGIN ?? 'https://stackblitz.com';

58
app/lib/crypto.ts Normal file
View File

@@ -0,0 +1,58 @@
const encoder = new TextEncoder();
const decoder = new TextDecoder();
const IV_LENGTH = 16;
export async function encrypt(key: string, data: string) {
const iv = crypto.getRandomValues(new Uint8Array(IV_LENGTH));
const cryptoKey = await getKey(key);
const ciphertext = await crypto.subtle.encrypt(
{
name: 'AES-CBC',
iv,
},
cryptoKey,
encoder.encode(data),
);
const bundle = new Uint8Array(IV_LENGTH + ciphertext.byteLength);
bundle.set(new Uint8Array(ciphertext));
bundle.set(iv, ciphertext.byteLength);
return decodeBase64(bundle);
}
export async function decrypt(key: string, payload: string) {
const bundle = encodeBase64(payload);
const iv = new Uint8Array(bundle.buffer, bundle.byteLength - IV_LENGTH);
const ciphertext = new Uint8Array(bundle.buffer, 0, bundle.byteLength - IV_LENGTH);
const cryptoKey = await getKey(key);
const plaintext = await crypto.subtle.decrypt(
{
name: 'AES-CBC',
iv,
},
cryptoKey,
ciphertext,
);
return decoder.decode(plaintext);
}
async function getKey(key: string) {
return await crypto.subtle.importKey('raw', encodeBase64(key), { name: 'AES-CBC' }, false, ['encrypt', 'decrypt']);
}
function decodeBase64(encoded: Uint8Array) {
const byteChars = Array.from(encoded, (byte) => String.fromCodePoint(byte));
return btoa(byteChars.join(''));
}
function encodeBase64(data: string) {
return Uint8Array.from(atob(data), (ch) => ch.codePointAt(0)!);
}

14
app/lib/fetch.ts Normal file
View File

@@ -0,0 +1,14 @@
type CommonRequest = Omit<RequestInit, 'body'> & { body?: URLSearchParams };
export async function request(url: string, init?: CommonRequest) {
if (import.meta.env.DEV) {
const nodeFetch = await import('node-fetch');
const https = await import('node:https');
const agent = url.startsWith('https') ? new https.Agent({ rejectUnauthorized: false }) : undefined;
return nodeFetch.default(url, { ...init, agent });
}
return fetch(url, init);
}

4
app/lib/hooks/index.ts Normal file
View File

@@ -0,0 +1,4 @@
export * from './useMessageParser';
export * from './usePromptEnhancer';
export * from './useShortcuts';
export * from './useSnapScroll';

View File

@@ -0,0 +1,66 @@
import type { Message } from 'ai';
import { useCallback, useState } from 'react';
import { StreamingMessageParser } from '~/lib/runtime/message-parser';
import { workbenchStore } from '~/lib/stores/workbench';
import { createScopedLogger } from '~/utils/logger';
const logger = createScopedLogger('useMessageParser');
const messageParser = new StreamingMessageParser({
callbacks: {
onArtifactOpen: (data) => {
logger.trace('onArtifactOpen', data);
workbenchStore.showWorkbench.set(true);
workbenchStore.addArtifact(data);
},
onArtifactClose: (data) => {
logger.trace('onArtifactClose');
workbenchStore.updateArtifact(data, { closed: true });
},
onActionOpen: (data) => {
logger.trace('onActionOpen', data.action);
// we only add shell actions when when the close tag got parsed because only then we have the content
if (data.action.type !== 'shell') {
workbenchStore.addAction(data);
}
},
onActionClose: (data) => {
logger.trace('onActionClose', data.action);
if (data.action.type === 'shell') {
workbenchStore.addAction(data);
}
workbenchStore.runAction(data);
},
},
});
export function useMessageParser() {
const [parsedMessages, setParsedMessages] = useState<{ [key: number]: string }>({});
const parseMessages = useCallback((messages: Message[], isLoading: boolean) => {
let reset = false;
if (import.meta.env.DEV && !isLoading) {
reset = true;
messageParser.reset();
}
for (const [index, message] of messages.entries()) {
if (message.role === 'assistant') {
const newParsedContent = messageParser.parse(message.id, message.content);
setParsedMessages((prevParsed) => ({
...prevParsed,
[index]: !reset ? (prevParsed[index] || '') + newParsedContent : newParsedContent,
}));
}
}
}, []);
return { parsedMessages, parseMessages };
}

View File

@@ -0,0 +1,71 @@
import { useState } from 'react';
import { createScopedLogger } from '~/utils/logger';
const logger = createScopedLogger('usePromptEnhancement');
export function usePromptEnhancer() {
const [enhancingPrompt, setEnhancingPrompt] = useState(false);
const [promptEnhanced, setPromptEnhanced] = useState(false);
const resetEnhancer = () => {
setEnhancingPrompt(false);
setPromptEnhanced(false);
};
const enhancePrompt = async (input: string, setInput: (value: string) => void) => {
setEnhancingPrompt(true);
setPromptEnhanced(false);
const response = await fetch('/api/enhancer', {
method: 'POST',
body: JSON.stringify({
message: input,
}),
});
const reader = response.body?.getReader();
const originalInput = input;
if (reader) {
const decoder = new TextDecoder();
let _input = '';
let _error;
try {
setInput('');
while (true) {
const { value, done } = await reader.read();
if (done) {
break;
}
_input += decoder.decode(value);
logger.trace('Set input', _input);
setInput(_input);
}
} catch (error) {
_error = error;
setInput(originalInput);
} finally {
if (_error) {
logger.error(_error);
}
setEnhancingPrompt(false);
setPromptEnhanced(true);
setTimeout(() => {
setInput(_input);
});
}
}
};
return { enhancingPrompt, promptEnhanced, enhancePrompt, resetEnhancer };
}

View File

@@ -0,0 +1,59 @@
import { useStore } from '@nanostores/react';
import { useEffect } from 'react';
import { shortcutsStore, type Shortcuts } from '~/lib/stores/settings';
class ShortcutEventEmitter {
#emitter = new EventTarget();
dispatch(type: keyof Shortcuts) {
this.#emitter.dispatchEvent(new Event(type));
}
on(type: keyof Shortcuts, cb: VoidFunction) {
this.#emitter.addEventListener(type, cb);
return () => {
this.#emitter.removeEventListener(type, cb);
};
}
}
export const shortcutEventEmitter = new ShortcutEventEmitter();
export function useShortcuts(): void {
const shortcuts = useStore(shortcutsStore);
useEffect(() => {
const handleKeyDown = (event: KeyboardEvent): void => {
const { key, ctrlKey, shiftKey, altKey, metaKey } = event;
for (const name in shortcuts) {
const shortcut = shortcuts[name as keyof Shortcuts];
if (
shortcut.key.toLowerCase() === key.toLowerCase() &&
(shortcut.ctrlOrMetaKey
? ctrlKey || metaKey
: (shortcut.ctrlKey === undefined || shortcut.ctrlKey === ctrlKey) &&
(shortcut.metaKey === undefined || shortcut.metaKey === metaKey)) &&
(shortcut.shiftKey === undefined || shortcut.shiftKey === shiftKey) &&
(shortcut.altKey === undefined || shortcut.altKey === altKey)
) {
shortcutEventEmitter.dispatch(name as keyof Shortcuts);
event.preventDefault();
event.stopPropagation();
shortcut.action();
break;
}
}
};
window.addEventListener('keydown', handleKeyDown);
return () => {
window.removeEventListener('keydown', handleKeyDown);
};
}, [shortcuts]);
}

View File

@@ -0,0 +1,52 @@
import { useRef, useCallback } from 'react';
export function useSnapScroll() {
const autoScrollRef = useRef(true);
const scrollNodeRef = useRef<HTMLDivElement>();
const onScrollRef = useRef<() => void>();
const observerRef = useRef<ResizeObserver>();
const messageRef = useCallback((node: HTMLDivElement | null) => {
if (node) {
const observer = new ResizeObserver(() => {
if (autoScrollRef.current && scrollNodeRef.current) {
const { scrollHeight, clientHeight } = scrollNodeRef.current;
const scrollTarget = scrollHeight - clientHeight;
scrollNodeRef.current.scrollTo({
top: scrollTarget,
});
}
});
observer.observe(node);
} else {
observerRef.current?.disconnect();
observerRef.current = undefined;
}
}, []);
const scrollRef = useCallback((node: HTMLDivElement | null) => {
if (node) {
onScrollRef.current = () => {
const { scrollTop, scrollHeight, clientHeight } = node;
const scrollTarget = scrollHeight - clientHeight;
autoScrollRef.current = Math.abs(scrollTop - scrollTarget) <= 10;
};
node.addEventListener('scroll', onScrollRef.current);
scrollNodeRef.current = node;
} else {
if (onScrollRef.current) {
scrollNodeRef.current?.removeEventListener('scroll', onScrollRef.current);
}
scrollNodeRef.current = undefined;
onScrollRef.current = undefined;
}
}, []);
return [messageRef, scrollRef];
}

View File

@@ -0,0 +1,6 @@
import { useStore } from '@nanostores/react';
import { description } from './useChatHistory';
export function ChatDescription() {
return useStore(description);
}

160
app/lib/persistence/db.ts Normal file
View File

@@ -0,0 +1,160 @@
import type { Message } from 'ai';
import { createScopedLogger } from '~/utils/logger';
import type { ChatHistoryItem } from './useChatHistory';
const logger = createScopedLogger('ChatHistory');
// this is used at the top level and never rejects
export async function openDatabase(): Promise<IDBDatabase | undefined> {
return new Promise((resolve) => {
const request = indexedDB.open('boltHistory', 1);
request.onupgradeneeded = (event: IDBVersionChangeEvent) => {
const db = (event.target as IDBOpenDBRequest).result;
if (!db.objectStoreNames.contains('chats')) {
const store = db.createObjectStore('chats', { keyPath: 'id' });
store.createIndex('id', 'id', { unique: true });
store.createIndex('urlId', 'urlId', { unique: true });
}
};
request.onsuccess = (event: Event) => {
resolve((event.target as IDBOpenDBRequest).result);
};
request.onerror = (event: Event) => {
resolve(undefined);
logger.error((event.target as IDBOpenDBRequest).error);
};
});
}
export async function getAll(db: IDBDatabase): Promise<ChatHistoryItem[]> {
return new Promise((resolve, reject) => {
const transaction = db.transaction('chats', 'readonly');
const store = transaction.objectStore('chats');
const request = store.getAll();
request.onsuccess = () => resolve(request.result as ChatHistoryItem[]);
request.onerror = () => reject(request.error);
});
}
export async function setMessages(
db: IDBDatabase,
id: string,
messages: Message[],
urlId?: string,
description?: string,
): Promise<void> {
return new Promise((resolve, reject) => {
const transaction = db.transaction('chats', 'readwrite');
const store = transaction.objectStore('chats');
const request = store.put({
id,
messages,
urlId,
description,
timestamp: new Date().toISOString(),
});
request.onsuccess = () => resolve();
request.onerror = () => reject(request.error);
});
}
export async function getMessages(db: IDBDatabase, id: string): Promise<ChatHistoryItem> {
return (await getMessagesById(db, id)) || (await getMessagesByUrlId(db, id));
}
export async function getMessagesByUrlId(db: IDBDatabase, id: string): Promise<ChatHistoryItem> {
return new Promise((resolve, reject) => {
const transaction = db.transaction('chats', 'readonly');
const store = transaction.objectStore('chats');
const index = store.index('urlId');
const request = index.get(id);
request.onsuccess = () => resolve(request.result as ChatHistoryItem);
request.onerror = () => reject(request.error);
});
}
export async function getMessagesById(db: IDBDatabase, id: string): Promise<ChatHistoryItem> {
return new Promise((resolve, reject) => {
const transaction = db.transaction('chats', 'readonly');
const store = transaction.objectStore('chats');
const request = store.get(id);
request.onsuccess = () => resolve(request.result as ChatHistoryItem);
request.onerror = () => reject(request.error);
});
}
export async function deleteById(db: IDBDatabase, id: string): Promise<void> {
return new Promise((resolve, reject) => {
const transaction = db.transaction('chats', 'readwrite');
const store = transaction.objectStore('chats');
const request = store.delete(id);
request.onsuccess = () => resolve(undefined);
request.onerror = () => reject(request.error);
});
}
export async function getNextId(db: IDBDatabase): Promise<string> {
return new Promise((resolve, reject) => {
const transaction = db.transaction('chats', 'readonly');
const store = transaction.objectStore('chats');
const request = store.getAllKeys();
request.onsuccess = () => {
const highestId = request.result.reduce((cur, acc) => Math.max(+cur, +acc), 0);
resolve(String(+highestId + 1));
};
request.onerror = () => reject(request.error);
});
}
export async function getUrlId(db: IDBDatabase, id: string): Promise<string> {
const idList = await getUrlIds(db);
if (!idList.includes(id)) {
return id;
} else {
let i = 2;
while (idList.includes(`${id}-${i}`)) {
i++;
}
return `${id}-${i}`;
}
}
async function getUrlIds(db: IDBDatabase): Promise<string[]> {
return new Promise((resolve, reject) => {
const transaction = db.transaction('chats', 'readonly');
const store = transaction.objectStore('chats');
const idList: string[] = [];
const request = store.openCursor();
request.onsuccess = (event: Event) => {
const cursor = (event.target as IDBRequest<IDBCursorWithValue>).result;
if (cursor) {
idList.push(cursor.value.urlId);
cursor.continue();
} else {
resolve(idList);
}
};
request.onerror = () => {
reject(request.error);
};
});
}

View File

@@ -0,0 +1,2 @@
export * from './db';
export * from './useChatHistory';

View File

@@ -0,0 +1,109 @@
import { useLoaderData, useNavigate } from '@remix-run/react';
import { useState, useEffect } from 'react';
import { atom } from 'nanostores';
import type { Message } from 'ai';
import { toast } from 'react-toastify';
import { workbenchStore } from '~/lib/stores/workbench';
import { getMessages, getNextId, getUrlId, openDatabase, setMessages } from './db';
export interface ChatHistoryItem {
id: string;
urlId?: string;
description?: string;
messages: Message[];
timestamp: string;
}
const persistenceEnabled = !import.meta.env.VITE_DISABLE_PERSISTENCE;
export const db = persistenceEnabled ? await openDatabase() : undefined;
export const chatId = atom<string | undefined>(undefined);
export const description = atom<string | undefined>(undefined);
export function useChatHistory() {
const navigate = useNavigate();
const { id: mixedId } = useLoaderData<{ id?: string }>();
const [initialMessages, setInitialMessages] = useState<Message[]>([]);
const [ready, setReady] = useState<boolean>(false);
const [urlId, setUrlId] = useState<string | undefined>();
useEffect(() => {
if (!db) {
setReady(true);
if (persistenceEnabled) {
toast.error(`Chat persistence is unavailable`);
}
return;
}
if (mixedId) {
getMessages(db, mixedId)
.then((storedMessages) => {
if (storedMessages && storedMessages.messages.length > 0) {
setInitialMessages(storedMessages.messages);
setUrlId(storedMessages.urlId);
description.set(storedMessages.description);
chatId.set(storedMessages.id);
} else {
navigate(`/`, { replace: true });
}
setReady(true);
})
.catch((error) => {
toast.error(error.message);
});
}
}, []);
return {
ready: !mixedId || ready,
initialMessages,
storeMessageHistory: async (messages: Message[]) => {
if (!db || messages.length === 0) {
return;
}
const { firstArtifact } = workbenchStore;
if (!urlId && firstArtifact?.id) {
const urlId = await getUrlId(db, firstArtifact.id);
navigateChat(urlId);
setUrlId(urlId);
}
if (!description.get() && firstArtifact?.title) {
description.set(firstArtifact?.title);
}
if (initialMessages.length === 0 && !chatId.get()) {
const nextId = await getNextId(db);
chatId.set(nextId);
if (!urlId) {
navigateChat(nextId);
}
}
await setMessages(db, chatId.get() as string, messages, urlId, description.get());
},
};
}
function navigateChat(nextId: string) {
/**
* FIXME: Using the intended navigate function causes a rerender for <Chat /> that breaks the app.
*
* `navigate(`/chat/${nextId}`, { replace: true });`
*/
const url = new URL(window.location.href);
url.pathname = `/chat/${nextId}`;
window.history.replaceState({}, '', url);
}

View File

@@ -0,0 +1,220 @@
// Vitest Snapshot v1, https://vitest.dev/guide/snapshot.html
exports[`StreamingMessageParser > valid artifacts with actions > should correctly parse chunks and strip out bolt artifacts (0) > onActionClose 1`] = `
{
"action": {
"content": "npm install",
"type": "shell",
},
"actionId": "0",
"artifactId": "artifact_1",
"messageId": "message_1",
}
`;
exports[`StreamingMessageParser > valid artifacts with actions > should correctly parse chunks and strip out bolt artifacts (0) > onActionOpen 1`] = `
{
"action": {
"content": "",
"type": "shell",
},
"actionId": "0",
"artifactId": "artifact_1",
"messageId": "message_1",
}
`;
exports[`StreamingMessageParser > valid artifacts with actions > should correctly parse chunks and strip out bolt artifacts (0) > onArtifactClose 1`] = `
{
"id": "artifact_1",
"messageId": "message_1",
"title": "Some title",
}
`;
exports[`StreamingMessageParser > valid artifacts with actions > should correctly parse chunks and strip out bolt artifacts (0) > onArtifactOpen 1`] = `
{
"id": "artifact_1",
"messageId": "message_1",
"title": "Some title",
}
`;
exports[`StreamingMessageParser > valid artifacts with actions > should correctly parse chunks and strip out bolt artifacts (1) > onActionClose 1`] = `
{
"action": {
"content": "npm install",
"type": "shell",
},
"actionId": "0",
"artifactId": "artifact_1",
"messageId": "message_1",
}
`;
exports[`StreamingMessageParser > valid artifacts with actions > should correctly parse chunks and strip out bolt artifacts (1) > onActionClose 2`] = `
{
"action": {
"content": "some content
",
"filePath": "index.js",
"type": "file",
},
"actionId": "1",
"artifactId": "artifact_1",
"messageId": "message_1",
}
`;
exports[`StreamingMessageParser > valid artifacts with actions > should correctly parse chunks and strip out bolt artifacts (1) > onActionOpen 1`] = `
{
"action": {
"content": "",
"type": "shell",
},
"actionId": "0",
"artifactId": "artifact_1",
"messageId": "message_1",
}
`;
exports[`StreamingMessageParser > valid artifacts with actions > should correctly parse chunks and strip out bolt artifacts (1) > onActionOpen 2`] = `
{
"action": {
"content": "",
"filePath": "index.js",
"type": "file",
},
"actionId": "1",
"artifactId": "artifact_1",
"messageId": "message_1",
}
`;
exports[`StreamingMessageParser > valid artifacts with actions > should correctly parse chunks and strip out bolt artifacts (1) > onArtifactClose 1`] = `
{
"id": "artifact_1",
"messageId": "message_1",
"title": "Some title",
}
`;
exports[`StreamingMessageParser > valid artifacts with actions > should correctly parse chunks and strip out bolt artifacts (1) > onArtifactOpen 1`] = `
{
"id": "artifact_1",
"messageId": "message_1",
"title": "Some title",
}
`;
exports[`StreamingMessageParser > valid artifacts without actions > should correctly parse chunks and strip out bolt artifacts (0) > onArtifactClose 1`] = `
{
"id": "artifact_1",
"messageId": "message_1",
"title": "Some title",
}
`;
exports[`StreamingMessageParser > valid artifacts without actions > should correctly parse chunks and strip out bolt artifacts (0) > onArtifactOpen 1`] = `
{
"id": "artifact_1",
"messageId": "message_1",
"title": "Some title",
}
`;
exports[`StreamingMessageParser > valid artifacts without actions > should correctly parse chunks and strip out bolt artifacts (1) > onArtifactClose 1`] = `
{
"id": "artifact_1",
"messageId": "message_1",
"title": "Some title",
}
`;
exports[`StreamingMessageParser > valid artifacts without actions > should correctly parse chunks and strip out bolt artifacts (1) > onArtifactOpen 1`] = `
{
"id": "artifact_1",
"messageId": "message_1",
"title": "Some title",
}
`;
exports[`StreamingMessageParser > valid artifacts without actions > should correctly parse chunks and strip out bolt artifacts (2) > onArtifactClose 1`] = `
{
"id": "artifact_1",
"messageId": "message_1",
"title": "Some title",
}
`;
exports[`StreamingMessageParser > valid artifacts without actions > should correctly parse chunks and strip out bolt artifacts (2) > onArtifactOpen 1`] = `
{
"id": "artifact_1",
"messageId": "message_1",
"title": "Some title",
}
`;
exports[`StreamingMessageParser > valid artifacts without actions > should correctly parse chunks and strip out bolt artifacts (3) > onArtifactClose 1`] = `
{
"id": "artifact_1",
"messageId": "message_1",
"title": "Some title",
}
`;
exports[`StreamingMessageParser > valid artifacts without actions > should correctly parse chunks and strip out bolt artifacts (3) > onArtifactOpen 1`] = `
{
"id": "artifact_1",
"messageId": "message_1",
"title": "Some title",
}
`;
exports[`StreamingMessageParser > valid artifacts without actions > should correctly parse chunks and strip out bolt artifacts (4) > onArtifactClose 1`] = `
{
"id": "artifact_1",
"messageId": "message_1",
"title": "Some title",
}
`;
exports[`StreamingMessageParser > valid artifacts without actions > should correctly parse chunks and strip out bolt artifacts (4) > onArtifactOpen 1`] = `
{
"id": "artifact_1",
"messageId": "message_1",
"title": "Some title",
}
`;
exports[`StreamingMessageParser > valid artifacts without actions > should correctly parse chunks and strip out bolt artifacts (5) > onArtifactClose 1`] = `
{
"id": "artifact_1",
"messageId": "message_1",
"title": "Some title",
}
`;
exports[`StreamingMessageParser > valid artifacts without actions > should correctly parse chunks and strip out bolt artifacts (5) > onArtifactOpen 1`] = `
{
"id": "artifact_1",
"messageId": "message_1",
"title": "Some title",
}
`;
exports[`StreamingMessageParser > valid artifacts without actions > should correctly parse chunks and strip out bolt artifacts (6) > onArtifactClose 1`] = `
{
"id": "artifact_1",
"messageId": "message_1",
"title": "Some title",
}
`;
exports[`StreamingMessageParser > valid artifacts without actions > should correctly parse chunks and strip out bolt artifacts (6) > onArtifactOpen 1`] = `
{
"id": "artifact_1",
"messageId": "message_1",
"title": "Some title",
}
`;

View File

@@ -0,0 +1,184 @@
import { WebContainer } from '@webcontainer/api';
import { map, type MapStore } from 'nanostores';
import * as nodePath from 'node:path';
import type { BoltAction } from '~/types/actions';
import { createScopedLogger } from '~/utils/logger';
import { unreachable } from '~/utils/unreachable';
import type { ActionCallbackData } from './message-parser';
const logger = createScopedLogger('ActionRunner');
export type ActionStatus = 'pending' | 'running' | 'complete' | 'aborted' | 'failed';
export type BaseActionState = BoltAction & {
status: Exclude<ActionStatus, 'failed'>;
abort: () => void;
executed: boolean;
abortSignal: AbortSignal;
};
export type FailedActionState = BoltAction &
Omit<BaseActionState, 'status'> & {
status: Extract<ActionStatus, 'failed'>;
error: string;
};
export type ActionState = BaseActionState | FailedActionState;
type BaseActionUpdate = Partial<Pick<BaseActionState, 'status' | 'abort' | 'executed'>>;
export type ActionStateUpdate =
| BaseActionUpdate
| (Omit<BaseActionUpdate, 'status'> & { status: 'failed'; error: string });
type ActionsMap = MapStore<Record<string, ActionState>>;
export class ActionRunner {
#webcontainer: Promise<WebContainer>;
#currentExecutionPromise: Promise<void> = Promise.resolve();
actions: ActionsMap = map({});
constructor(webcontainerPromise: Promise<WebContainer>) {
this.#webcontainer = webcontainerPromise;
}
addAction(data: ActionCallbackData) {
const { actionId } = data;
const actions = this.actions.get();
const action = actions[actionId];
if (action) {
// action already added
return;
}
const abortController = new AbortController();
this.actions.setKey(actionId, {
...data.action,
status: 'pending',
executed: false,
abort: () => {
abortController.abort();
this.#updateAction(actionId, { status: 'aborted' });
},
abortSignal: abortController.signal,
});
this.#currentExecutionPromise.then(() => {
this.#updateAction(actionId, { status: 'running' });
});
}
async runAction(data: ActionCallbackData) {
const { actionId } = data;
const action = this.actions.get()[actionId];
if (!action) {
unreachable(`Action ${actionId} not found`);
}
if (action.executed) {
return;
}
this.#updateAction(actionId, { ...action, ...data.action, executed: true });
this.#currentExecutionPromise = this.#currentExecutionPromise
.then(() => {
return this.#executeAction(actionId);
})
.catch((error) => {
console.error('Action failed:', error);
});
}
async #executeAction(actionId: string) {
const action = this.actions.get()[actionId];
this.#updateAction(actionId, { status: 'running' });
try {
switch (action.type) {
case 'shell': {
await this.#runShellAction(action);
break;
}
case 'file': {
await this.#runFileAction(action);
break;
}
}
this.#updateAction(actionId, { status: action.abortSignal.aborted ? 'aborted' : 'complete' });
} catch (error) {
this.#updateAction(actionId, { status: 'failed', error: 'Action failed' });
// re-throw the error to be caught in the promise chain
throw error;
}
}
async #runShellAction(action: ActionState) {
if (action.type !== 'shell') {
unreachable('Expected shell action');
}
const webcontainer = await this.#webcontainer;
const process = await webcontainer.spawn('jsh', ['-c', action.content]);
action.abortSignal.addEventListener('abort', () => {
process.kill();
});
process.output.pipeTo(
new WritableStream({
write(data) {
console.log(data);
},
}),
);
const exitCode = await process.exit;
logger.debug(`Process terminated with code ${exitCode}`);
}
async #runFileAction(action: ActionState) {
if (action.type !== 'file') {
unreachable('Expected file action');
}
const webcontainer = await this.#webcontainer;
let folder = nodePath.dirname(action.filePath);
// remove trailing slashes
folder = folder.replace(/\/+$/g, '');
if (folder !== '.') {
try {
await webcontainer.fs.mkdir(folder, { recursive: true });
logger.debug('Created folder', folder);
} catch (error) {
logger.error('Failed to create folder\n\n', error);
}
}
try {
await webcontainer.fs.writeFile(action.filePath, action.content);
logger.debug(`File written ${action.filePath}`);
} catch (error) {
logger.error('Failed to write file\n\n', error);
}
}
#updateAction(id: string, newState: ActionStateUpdate) {
const actions = this.actions.get();
this.actions.setKey(id, { ...actions[id], ...newState });
}
}

View File

@@ -0,0 +1,207 @@
import { describe, expect, it, vi } from 'vitest';
import { StreamingMessageParser, type ActionCallback, type ArtifactCallback } from './message-parser';
interface ExpectedResult {
output: string;
callbacks?: {
onArtifactOpen?: number;
onArtifactClose?: number;
onActionOpen?: number;
onActionClose?: number;
};
}
describe('StreamingMessageParser', () => {
it('should pass through normal text', () => {
const parser = new StreamingMessageParser();
expect(parser.parse('test_id', 'Hello, world!')).toBe('Hello, world!');
});
it('should allow normal HTML tags', () => {
const parser = new StreamingMessageParser();
expect(parser.parse('test_id', 'Hello <strong>world</strong>!')).toBe('Hello <strong>world</strong>!');
});
describe('no artifacts', () => {
it.each<[string | string[], ExpectedResult | string]>([
['Foo bar', 'Foo bar'],
['Foo bar <', 'Foo bar '],
['Foo bar <p', 'Foo bar <p'],
[['Foo bar <', 's', 'p', 'an>some text</span>'], 'Foo bar <span>some text</span>'],
])('should correctly parse chunks and strip out bolt artifacts (%#)', (input, expected) => {
runTest(input, expected);
});
});
describe('invalid or incomplete artifacts', () => {
it.each<[string | string[], ExpectedResult | string]>([
['Foo bar <b', 'Foo bar '],
['Foo bar <ba', 'Foo bar <ba'],
['Foo bar <bol', 'Foo bar '],
['Foo bar <bolt', 'Foo bar '],
['Foo bar <bolta', 'Foo bar <bolta'],
['Foo bar <boltA', 'Foo bar '],
['Foo bar <boltArtifacs></boltArtifact>', 'Foo bar <boltArtifacs></boltArtifact>'],
['Before <oltArtfiact>foo</boltArtifact> After', 'Before <oltArtfiact>foo</boltArtifact> After'],
['Before <boltArtifactt>foo</boltArtifact> After', 'Before <boltArtifactt>foo</boltArtifact> After'],
])('should correctly parse chunks and strip out bolt artifacts (%#)', (input, expected) => {
runTest(input, expected);
});
});
describe('valid artifacts without actions', () => {
it.each<[string | string[], ExpectedResult | string]>([
[
'Some text before <boltArtifact title="Some title" id="artifact_1">foo bar</boltArtifact> Some more text',
{
output: 'Some text before Some more text',
callbacks: { onArtifactOpen: 1, onArtifactClose: 1, onActionOpen: 0, onActionClose: 0 },
},
],
[
['Some text before <boltArti', 'fact', ' title="Some title" id="artifact_1">foo</boltArtifact> Some more text'],
{
output: 'Some text before Some more text',
callbacks: { onArtifactOpen: 1, onArtifactClose: 1, onActionOpen: 0, onActionClose: 0 },
},
],
[
[
'Some text before <boltArti',
'fac',
't title="Some title" id="artifact_1"',
' ',
'>',
'foo</boltArtifact> Some more text',
],
{
output: 'Some text before Some more text',
callbacks: { onArtifactOpen: 1, onArtifactClose: 1, onActionOpen: 0, onActionClose: 0 },
},
],
[
[
'Some text before <boltArti',
'fact',
' title="Some title" id="artifact_1"',
' >fo',
'o</boltArtifact> Some more text',
],
{
output: 'Some text before Some more text',
callbacks: { onArtifactOpen: 1, onArtifactClose: 1, onActionOpen: 0, onActionClose: 0 },
},
],
[
[
'Some text before <boltArti',
'fact tit',
'le="Some ',
'title" id="artifact_1">fo',
'o',
'<',
'/boltArtifact> Some more text',
],
{
output: 'Some text before Some more text',
callbacks: { onArtifactOpen: 1, onArtifactClose: 1, onActionOpen: 0, onActionClose: 0 },
},
],
[
[
'Some text before <boltArti',
'fact title="Some title" id="artif',
'act_1">fo',
'o<',
'/boltArtifact> Some more text',
],
{
output: 'Some text before Some more text',
callbacks: { onArtifactOpen: 1, onArtifactClose: 1, onActionOpen: 0, onActionClose: 0 },
},
],
[
'Before <boltArtifact title="Some title" id="artifact_1">foo</boltArtifact> After',
{
output: 'Before After',
callbacks: { onArtifactOpen: 1, onArtifactClose: 1, onActionOpen: 0, onActionClose: 0 },
},
],
])('should correctly parse chunks and strip out bolt artifacts (%#)', (input, expected) => {
runTest(input, expected);
});
});
describe('valid artifacts with actions', () => {
it.each<[string | string[], ExpectedResult | string]>([
[
'Before <boltArtifact title="Some title" id="artifact_1"><boltAction type="shell">npm install</boltAction></boltArtifact> After',
{
output: 'Before After',
callbacks: { onArtifactOpen: 1, onArtifactClose: 1, onActionOpen: 1, onActionClose: 1 },
},
],
[
'Before <boltArtifact title="Some title" id="artifact_1"><boltAction type="shell">npm install</boltAction><boltAction type="file" filePath="index.js">some content</boltAction></boltArtifact> After',
{
output: 'Before After',
callbacks: { onArtifactOpen: 1, onArtifactClose: 1, onActionOpen: 2, onActionClose: 2 },
},
],
])('should correctly parse chunks and strip out bolt artifacts (%#)', (input, expected) => {
runTest(input, expected);
});
});
});
function runTest(input: string | string[], outputOrExpectedResult: string | ExpectedResult) {
let expected: ExpectedResult;
if (typeof outputOrExpectedResult === 'string') {
expected = { output: outputOrExpectedResult };
} else {
expected = outputOrExpectedResult;
}
const callbacks = {
onArtifactOpen: vi.fn<ArtifactCallback>((data) => {
expect(data).toMatchSnapshot('onArtifactOpen');
}),
onArtifactClose: vi.fn<ArtifactCallback>((data) => {
expect(data).toMatchSnapshot('onArtifactClose');
}),
onActionOpen: vi.fn<ActionCallback>((data) => {
expect(data).toMatchSnapshot('onActionOpen');
}),
onActionClose: vi.fn<ActionCallback>((data) => {
expect(data).toMatchSnapshot('onActionClose');
}),
};
const parser = new StreamingMessageParser({
artifactElement: () => '',
callbacks,
});
let message = '';
let result = '';
const chunks = Array.isArray(input) ? input : input.split('');
for (const chunk of chunks) {
message += chunk;
result += parser.parse('message_1', message);
}
for (const name in expected.callbacks) {
const callbackName = name;
expect(callbacks[callbackName as keyof typeof callbacks]).toHaveBeenCalledTimes(
expected.callbacks[callbackName as keyof typeof expected.callbacks] ?? 0,
);
}
expect(result).toEqual(expected.output);
}

View File

@@ -0,0 +1,285 @@
import type { ActionType, BoltAction, BoltActionData, FileAction, ShellAction } from '~/types/actions';
import type { BoltArtifactData } from '~/types/artifact';
import { createScopedLogger } from '~/utils/logger';
import { unreachable } from '~/utils/unreachable';
const ARTIFACT_TAG_OPEN = '<boltArtifact';
const ARTIFACT_TAG_CLOSE = '</boltArtifact>';
const ARTIFACT_ACTION_TAG_OPEN = '<boltAction';
const ARTIFACT_ACTION_TAG_CLOSE = '</boltAction>';
const logger = createScopedLogger('MessageParser');
export interface ArtifactCallbackData extends BoltArtifactData {
messageId: string;
}
export interface ActionCallbackData {
artifactId: string;
messageId: string;
actionId: string;
action: BoltAction;
}
export type ArtifactCallback = (data: ArtifactCallbackData) => void;
export type ActionCallback = (data: ActionCallbackData) => void;
export interface ParserCallbacks {
onArtifactOpen?: ArtifactCallback;
onArtifactClose?: ArtifactCallback;
onActionOpen?: ActionCallback;
onActionClose?: ActionCallback;
}
interface ElementFactoryProps {
messageId: string;
}
type ElementFactory = (props: ElementFactoryProps) => string;
export interface StreamingMessageParserOptions {
callbacks?: ParserCallbacks;
artifactElement?: ElementFactory;
}
interface MessageState {
position: number;
insideArtifact: boolean;
insideAction: boolean;
currentArtifact?: BoltArtifactData;
currentAction: BoltActionData;
actionId: number;
}
export class StreamingMessageParser {
#messages = new Map<string, MessageState>();
constructor(private _options: StreamingMessageParserOptions = {}) {}
parse(messageId: string, input: string) {
let state = this.#messages.get(messageId);
if (!state) {
state = {
position: 0,
insideAction: false,
insideArtifact: false,
currentAction: { content: '' },
actionId: 0,
};
this.#messages.set(messageId, state);
}
let output = '';
let i = state.position;
let earlyBreak = false;
while (i < input.length) {
if (state.insideArtifact) {
const currentArtifact = state.currentArtifact;
if (currentArtifact === undefined) {
unreachable('Artifact not initialized');
}
if (state.insideAction) {
const closeIndex = input.indexOf(ARTIFACT_ACTION_TAG_CLOSE, i);
const currentAction = state.currentAction;
if (closeIndex !== -1) {
currentAction.content += input.slice(i, closeIndex);
let content = currentAction.content.trim();
if ('type' in currentAction && currentAction.type === 'file') {
content += '\n';
}
currentAction.content = content;
this._options.callbacks?.onActionClose?.({
artifactId: currentArtifact.id,
messageId,
/**
* We decrement the id because it's been incremented already
* when `onActionOpen` was emitted to make sure the ids are
* the same.
*/
actionId: String(state.actionId - 1),
action: currentAction as BoltAction,
});
state.insideAction = false;
state.currentAction = { content: '' };
i = closeIndex + ARTIFACT_ACTION_TAG_CLOSE.length;
} else {
break;
}
} else {
const actionOpenIndex = input.indexOf(ARTIFACT_ACTION_TAG_OPEN, i);
const artifactCloseIndex = input.indexOf(ARTIFACT_TAG_CLOSE, i);
if (actionOpenIndex !== -1 && (artifactCloseIndex === -1 || actionOpenIndex < artifactCloseIndex)) {
const actionEndIndex = input.indexOf('>', actionOpenIndex);
if (actionEndIndex !== -1) {
state.insideAction = true;
state.currentAction = this.#parseActionTag(input, actionOpenIndex, actionEndIndex);
this._options.callbacks?.onActionOpen?.({
artifactId: currentArtifact.id,
messageId,
actionId: String(state.actionId++),
action: state.currentAction as BoltAction,
});
i = actionEndIndex + 1;
} else {
break;
}
} else if (artifactCloseIndex !== -1) {
this._options.callbacks?.onArtifactClose?.({ messageId, ...currentArtifact });
state.insideArtifact = false;
state.currentArtifact = undefined;
i = artifactCloseIndex + ARTIFACT_TAG_CLOSE.length;
} else {
break;
}
}
} else if (input[i] === '<' && input[i + 1] !== '/') {
let j = i;
let potentialTag = '';
while (j < input.length && potentialTag.length < ARTIFACT_TAG_OPEN.length) {
potentialTag += input[j];
if (potentialTag === ARTIFACT_TAG_OPEN) {
const nextChar = input[j + 1];
if (nextChar && nextChar !== '>' && nextChar !== ' ') {
output += input.slice(i, j + 1);
i = j + 1;
break;
}
const openTagEnd = input.indexOf('>', j);
if (openTagEnd !== -1) {
const artifactTag = input.slice(i, openTagEnd + 1);
const artifactTitle = this.#extractAttribute(artifactTag, 'title') as string;
const artifactId = this.#extractAttribute(artifactTag, 'id') as string;
if (!artifactTitle) {
logger.warn('Artifact title missing');
}
if (!artifactId) {
logger.warn('Artifact id missing');
}
state.insideArtifact = true;
const currentArtifact = {
id: artifactId,
title: artifactTitle,
} satisfies BoltArtifactData;
state.currentArtifact = currentArtifact;
this._options.callbacks?.onArtifactOpen?.({ messageId, ...currentArtifact });
const artifactFactory = this._options.artifactElement ?? createArtifactElement;
output += artifactFactory({ messageId });
i = openTagEnd + 1;
} else {
earlyBreak = true;
}
break;
} else if (!ARTIFACT_TAG_OPEN.startsWith(potentialTag)) {
output += input.slice(i, j + 1);
i = j + 1;
break;
}
j++;
}
if (j === input.length && ARTIFACT_TAG_OPEN.startsWith(potentialTag)) {
break;
}
} else {
output += input[i];
i++;
}
if (earlyBreak) {
break;
}
}
state.position = i;
return output;
}
reset() {
this.#messages.clear();
}
#parseActionTag(input: string, actionOpenIndex: number, actionEndIndex: number) {
const actionTag = input.slice(actionOpenIndex, actionEndIndex + 1);
const actionType = this.#extractAttribute(actionTag, 'type') as ActionType;
const actionAttributes = {
type: actionType,
content: '',
};
if (actionType === 'file') {
const filePath = this.#extractAttribute(actionTag, 'filePath') as string;
if (!filePath) {
logger.debug('File path not specified');
}
(actionAttributes as FileAction).filePath = filePath;
} else if (actionType !== 'shell') {
logger.warn(`Unknown action type '${actionType}'`);
}
return actionAttributes as FileAction | ShellAction;
}
#extractAttribute(tag: string, attributeName: string): string | undefined {
const match = tag.match(new RegExp(`${attributeName}="([^"]*)"`, 'i'));
return match ? match[1] : undefined;
}
}
const createArtifactElement: ElementFactory = (props) => {
const elementProps = [
'class="__boltArtifact__"',
...Object.entries(props).map(([key, value]) => {
return `data-${camelToDashCase(key)}=${JSON.stringify(value)}`;
}),
];
return `<div ${elementProps.join(' ')}></div>`;
};
function camelToDashCase(input: string) {
return input.replace(/([a-z])([A-Z])/g, '$1-$2').toLowerCase();
}

7
app/lib/stores/chat.ts Normal file
View File

@@ -0,0 +1,7 @@
import { map } from 'nanostores';
export const chatStore = map({
started: false,
aborted: false,
showChat: true,
});

95
app/lib/stores/editor.ts Normal file
View File

@@ -0,0 +1,95 @@
import { atom, computed, map, type MapStore, type WritableAtom } from 'nanostores';
import type { EditorDocument, ScrollPosition } from '~/components/editor/codemirror/CodeMirrorEditor';
import type { FileMap, FilesStore } from './files';
export type EditorDocuments = Record<string, EditorDocument>;
type SelectedFile = WritableAtom<string | undefined>;
export class EditorStore {
#filesStore: FilesStore;
selectedFile: SelectedFile = import.meta.hot?.data.selectedFile ?? atom<string | undefined>();
documents: MapStore<EditorDocuments> = import.meta.hot?.data.documents ?? map({});
currentDocument = computed([this.documents, this.selectedFile], (documents, selectedFile) => {
if (!selectedFile) {
return undefined;
}
return documents[selectedFile];
});
constructor(filesStore: FilesStore) {
this.#filesStore = filesStore;
if (import.meta.hot) {
import.meta.hot.data.documents = this.documents;
import.meta.hot.data.selectedFile = this.selectedFile;
}
}
setDocuments(files: FileMap) {
const previousDocuments = this.documents.value;
this.documents.set(
Object.fromEntries<EditorDocument>(
Object.entries(files)
.map(([filePath, dirent]) => {
if (dirent === undefined || dirent.type === 'folder') {
return undefined;
}
const previousDocument = previousDocuments?.[filePath];
return [
filePath,
{
value: dirent.content,
filePath,
scroll: previousDocument?.scroll,
},
] as [string, EditorDocument];
})
.filter(Boolean) as Array<[string, EditorDocument]>,
),
);
}
setSelectedFile(filePath: string | undefined) {
this.selectedFile.set(filePath);
}
updateScrollPosition(filePath: string, position: ScrollPosition) {
const documents = this.documents.get();
const documentState = documents[filePath];
if (!documentState) {
return;
}
this.documents.setKey(filePath, {
...documentState,
scroll: position,
});
}
updateFile(filePath: string, newContent: string) {
const documents = this.documents.get();
const documentState = documents[filePath];
if (!documentState) {
return;
}
const currentContent = documentState.value;
const contentChanged = currentContent !== newContent;
if (contentChanged) {
this.documents.setKey(filePath, {
...documentState,
value: newContent,
});
}
}
}

220
app/lib/stores/files.ts Normal file
View File

@@ -0,0 +1,220 @@
import type { PathWatcherEvent, WebContainer } from '@webcontainer/api';
import { getEncoding } from 'istextorbinary';
import { map, type MapStore } from 'nanostores';
import { Buffer } from 'node:buffer';
import * as nodePath from 'node:path';
import { bufferWatchEvents } from '~/utils/buffer';
import { WORK_DIR } from '~/utils/constants';
import { computeFileModifications } from '~/utils/diff';
import { createScopedLogger } from '~/utils/logger';
import { unreachable } from '~/utils/unreachable';
const logger = createScopedLogger('FilesStore');
const utf8TextDecoder = new TextDecoder('utf8', { fatal: true });
export interface File {
type: 'file';
content: string;
isBinary: boolean;
}
export interface Folder {
type: 'folder';
}
type Dirent = File | Folder;
export type FileMap = Record<string, Dirent | undefined>;
export class FilesStore {
#webcontainer: Promise<WebContainer>;
/**
* Tracks the number of files without folders.
*/
#size = 0;
/**
* @note Keeps track all modified files with their original content since the last user message.
* Needs to be reset when the user sends another message and all changes have to be submitted
* for the model to be aware of the changes.
*/
#modifiedFiles: Map<string, string> = import.meta.hot?.data.modifiedFiles ?? new Map();
/**
* Map of files that matches the state of WebContainer.
*/
files: MapStore<FileMap> = import.meta.hot?.data.files ?? map({});
get filesCount() {
return this.#size;
}
constructor(webcontainerPromise: Promise<WebContainer>) {
this.#webcontainer = webcontainerPromise;
if (import.meta.hot) {
import.meta.hot.data.files = this.files;
import.meta.hot.data.modifiedFiles = this.#modifiedFiles;
}
this.#init();
}
getFile(filePath: string) {
const dirent = this.files.get()[filePath];
if (dirent?.type !== 'file') {
return undefined;
}
return dirent;
}
getFileModifications() {
return computeFileModifications(this.files.get(), this.#modifiedFiles);
}
resetFileModifications() {
this.#modifiedFiles.clear();
}
async saveFile(filePath: string, content: string) {
const webcontainer = await this.#webcontainer;
try {
const relativePath = nodePath.relative(webcontainer.workdir, filePath);
if (!relativePath) {
throw new Error(`EINVAL: invalid file path, write '${relativePath}'`);
}
const oldContent = this.getFile(filePath)?.content;
if (!oldContent) {
unreachable('Expected content to be defined');
}
await webcontainer.fs.writeFile(relativePath, content);
if (!this.#modifiedFiles.has(filePath)) {
this.#modifiedFiles.set(filePath, oldContent);
}
// we immediately update the file and don't rely on the `change` event coming from the watcher
this.files.setKey(filePath, { type: 'file', content, isBinary: false });
logger.info('File updated');
} catch (error) {
logger.error('Failed to update file content\n\n', error);
throw error;
}
}
async #init() {
const webcontainer = await this.#webcontainer;
webcontainer.watchPaths(
{ include: [`${WORK_DIR}/**`], exclude: ['**/node_modules', '.git'], includeContent: true },
bufferWatchEvents(100, this.#processEventBuffer.bind(this)),
);
}
#processEventBuffer(events: Array<[events: PathWatcherEvent[]]>) {
const watchEvents = events.flat(2);
for (const { type, path, buffer } of watchEvents) {
// remove any trailing slashes
const sanitizedPath = path.replace(/\/+$/g, '');
switch (type) {
case 'add_dir': {
// we intentionally add a trailing slash so we can distinguish files from folders in the file tree
this.files.setKey(sanitizedPath, { type: 'folder' });
break;
}
case 'remove_dir': {
this.files.setKey(sanitizedPath, undefined);
for (const [direntPath] of Object.entries(this.files)) {
if (direntPath.startsWith(sanitizedPath)) {
this.files.setKey(direntPath, undefined);
}
}
break;
}
case 'add_file':
case 'change': {
if (type === 'add_file') {
this.#size++;
}
let content = '';
/**
* @note This check is purely for the editor. The way we detect this is not
* bullet-proof and it's a best guess so there might be false-positives.
* The reason we do this is because we don't want to display binary files
* in the editor nor allow to edit them.
*/
const isBinary = isBinaryFile(buffer);
if (!isBinary) {
content = this.#decodeFileContent(buffer);
}
this.files.setKey(sanitizedPath, { type: 'file', content, isBinary });
break;
}
case 'remove_file': {
this.#size--;
this.files.setKey(sanitizedPath, undefined);
break;
}
case 'update_directory': {
// we don't care about these events
break;
}
}
}
}
#decodeFileContent(buffer?: Uint8Array) {
if (!buffer || buffer.byteLength === 0) {
return '';
}
try {
return utf8TextDecoder.decode(buffer);
} catch (error) {
console.log(error);
return '';
}
}
}
function isBinaryFile(buffer: Uint8Array | undefined) {
if (buffer === undefined) {
return false;
}
return getEncoding(convertToBuffer(buffer), { chunkLength: 100 }) === 'binary';
}
/**
* Converts a `Uint8Array` into a Node.js `Buffer` by copying the prototype.
* The goal is to avoid expensive copies. It does create a new typed array
* but that's generally cheap as long as it uses the same underlying
* array buffer.
*/
function convertToBuffer(view: Uint8Array): Buffer {
const buffer = new Uint8Array(view.buffer, view.byteOffset, view.byteLength);
Object.setPrototypeOf(buffer, Buffer.prototype);
return buffer as Buffer;
}

View File

@@ -0,0 +1,49 @@
import type { WebContainer } from '@webcontainer/api';
import { atom } from 'nanostores';
export interface PreviewInfo {
port: number;
ready: boolean;
baseUrl: string;
}
export class PreviewsStore {
#availablePreviews = new Map<number, PreviewInfo>();
#webcontainer: Promise<WebContainer>;
previews = atom<PreviewInfo[]>([]);
constructor(webcontainerPromise: Promise<WebContainer>) {
this.#webcontainer = webcontainerPromise;
this.#init();
}
async #init() {
const webcontainer = await this.#webcontainer;
webcontainer.on('port', (port, type, url) => {
let previewInfo = this.#availablePreviews.get(port);
if (type === 'close' && previewInfo) {
this.#availablePreviews.delete(port);
this.previews.set(this.previews.get().filter((preview) => preview.port !== port));
return;
}
const previews = this.previews.get();
if (!previewInfo) {
previewInfo = { port, ready: type === 'open', baseUrl: url };
this.#availablePreviews.set(port, previewInfo);
previews.push(previewInfo);
}
previewInfo.ready = type === 'open';
previewInfo.baseUrl = url;
this.previews.set([...previews]);
});
}
}

View File

@@ -0,0 +1,39 @@
import { map } from 'nanostores';
import { workbenchStore } from './workbench';
export interface Shortcut {
key: string;
ctrlKey?: boolean;
shiftKey?: boolean;
altKey?: boolean;
metaKey?: boolean;
ctrlOrMetaKey?: boolean;
action: () => void;
}
export interface Shortcuts {
toggleTerminal: Shortcut;
}
export interface Settings {
shortcuts: Shortcuts;
}
export const shortcutsStore = map<Shortcuts>({
toggleTerminal: {
key: 'j',
ctrlOrMetaKey: true,
action: () => workbenchStore.toggleTerminal(),
},
});
export const settingsStore = map<Settings>({
shortcuts: shortcutsStore.get(),
});
shortcutsStore.subscribe((shortcuts) => {
settingsStore.set({
...settingsStore.get(),
shortcuts,
});
});

View File

@@ -0,0 +1,40 @@
import type { WebContainer, WebContainerProcess } from '@webcontainer/api';
import { atom, type WritableAtom } from 'nanostores';
import type { ITerminal } from '~/types/terminal';
import { newShellProcess } from '~/utils/shell';
import { coloredText } from '~/utils/terminal';
export class TerminalStore {
#webcontainer: Promise<WebContainer>;
#terminals: Array<{ terminal: ITerminal; process: WebContainerProcess }> = [];
showTerminal: WritableAtom<boolean> = import.meta.hot?.data.showTerminal ?? atom(false);
constructor(webcontainerPromise: Promise<WebContainer>) {
this.#webcontainer = webcontainerPromise;
if (import.meta.hot) {
import.meta.hot.data.showTerminal = this.showTerminal;
}
}
toggleTerminal(value?: boolean) {
this.showTerminal.set(value !== undefined ? value : !this.showTerminal.get());
}
async attachTerminal(terminal: ITerminal) {
try {
const shellProcess = await newShellProcess(await this.#webcontainer, terminal);
this.#terminals.push({ terminal, process: shellProcess });
} catch (error: any) {
terminal.write(coloredText.red('Failed to spawn shell\n\n') + error.message);
return;
}
}
onTerminalResize(cols: number, rows: number) {
for (const { process } of this.#terminals) {
process.resize({ cols, rows });
}
}
}

35
app/lib/stores/theme.ts Normal file
View File

@@ -0,0 +1,35 @@
import { atom } from 'nanostores';
export type Theme = 'dark' | 'light';
export const kTheme = 'bolt_theme';
export function themeIsDark() {
return themeStore.get() === 'dark';
}
export const DEFAULT_THEME = 'light';
export const themeStore = atom<Theme>(initStore());
function initStore() {
if (!import.meta.env.SSR) {
const persistedTheme = localStorage.getItem(kTheme) as Theme | undefined;
const themeAttribute = document.querySelector('html')?.getAttribute('data-theme');
return persistedTheme ?? (themeAttribute as Theme) ?? DEFAULT_THEME;
}
return DEFAULT_THEME;
}
export function toggleTheme() {
const currentTheme = themeStore.get();
const newTheme = currentTheme === 'dark' ? 'light' : 'dark';
themeStore.set(newTheme);
localStorage.setItem(kTheme, newTheme);
document.querySelector('html')?.setAttribute('data-theme', newTheme);
}

276
app/lib/stores/workbench.ts Normal file
View File

@@ -0,0 +1,276 @@
import { atom, map, type MapStore, type ReadableAtom, type WritableAtom } from 'nanostores';
import type { EditorDocument, ScrollPosition } from '~/components/editor/codemirror/CodeMirrorEditor';
import { ActionRunner } from '~/lib/runtime/action-runner';
import type { ActionCallbackData, ArtifactCallbackData } from '~/lib/runtime/message-parser';
import { webcontainer } from '~/lib/webcontainer';
import type { ITerminal } from '~/types/terminal';
import { unreachable } from '~/utils/unreachable';
import { EditorStore } from './editor';
import { FilesStore, type FileMap } from './files';
import { PreviewsStore } from './previews';
import { TerminalStore } from './terminal';
export interface ArtifactState {
id: string;
title: string;
closed: boolean;
runner: ActionRunner;
}
export type ArtifactUpdateState = Pick<ArtifactState, 'title' | 'closed'>;
type Artifacts = MapStore<Record<string, ArtifactState>>;
export type WorkbenchViewType = 'code' | 'preview';
export class WorkbenchStore {
#previewsStore = new PreviewsStore(webcontainer);
#filesStore = new FilesStore(webcontainer);
#editorStore = new EditorStore(this.#filesStore);
#terminalStore = new TerminalStore(webcontainer);
artifacts: Artifacts = import.meta.hot?.data.artifacts ?? map({});
showWorkbench: WritableAtom<boolean> = import.meta.hot?.data.showWorkbench ?? atom(false);
currentView: WritableAtom<WorkbenchViewType> = import.meta.hot?.data.currentView ?? atom('code');
unsavedFiles: WritableAtom<Set<string>> = import.meta.hot?.data.unsavedFiles ?? atom(new Set<string>());
modifiedFiles = new Set<string>();
artifactIdList: string[] = [];
constructor() {
if (import.meta.hot) {
import.meta.hot.data.artifacts = this.artifacts;
import.meta.hot.data.unsavedFiles = this.unsavedFiles;
import.meta.hot.data.showWorkbench = this.showWorkbench;
import.meta.hot.data.currentView = this.currentView;
}
}
get previews() {
return this.#previewsStore.previews;
}
get files() {
return this.#filesStore.files;
}
get currentDocument(): ReadableAtom<EditorDocument | undefined> {
return this.#editorStore.currentDocument;
}
get selectedFile(): ReadableAtom<string | undefined> {
return this.#editorStore.selectedFile;
}
get firstArtifact(): ArtifactState | undefined {
return this.#getArtifact(this.artifactIdList[0]);
}
get filesCount(): number {
return this.#filesStore.filesCount;
}
get showTerminal() {
return this.#terminalStore.showTerminal;
}
toggleTerminal(value?: boolean) {
this.#terminalStore.toggleTerminal(value);
}
attachTerminal(terminal: ITerminal) {
this.#terminalStore.attachTerminal(terminal);
}
onTerminalResize(cols: number, rows: number) {
this.#terminalStore.onTerminalResize(cols, rows);
}
setDocuments(files: FileMap) {
this.#editorStore.setDocuments(files);
if (this.#filesStore.filesCount > 0 && this.currentDocument.get() === undefined) {
// we find the first file and select it
for (const [filePath, dirent] of Object.entries(files)) {
if (dirent?.type === 'file') {
this.setSelectedFile(filePath);
break;
}
}
}
}
setShowWorkbench(show: boolean) {
this.showWorkbench.set(show);
}
setCurrentDocumentContent(newContent: string) {
const filePath = this.currentDocument.get()?.filePath;
if (!filePath) {
return;
}
const originalContent = this.#filesStore.getFile(filePath)?.content;
const unsavedChanges = originalContent !== undefined && originalContent !== newContent;
this.#editorStore.updateFile(filePath, newContent);
const currentDocument = this.currentDocument.get();
if (currentDocument) {
const previousUnsavedFiles = this.unsavedFiles.get();
if (unsavedChanges && previousUnsavedFiles.has(currentDocument.filePath)) {
return;
}
const newUnsavedFiles = new Set(previousUnsavedFiles);
if (unsavedChanges) {
newUnsavedFiles.add(currentDocument.filePath);
} else {
newUnsavedFiles.delete(currentDocument.filePath);
}
this.unsavedFiles.set(newUnsavedFiles);
}
}
setCurrentDocumentScrollPosition(position: ScrollPosition) {
const editorDocument = this.currentDocument.get();
if (!editorDocument) {
return;
}
const { filePath } = editorDocument;
this.#editorStore.updateScrollPosition(filePath, position);
}
setSelectedFile(filePath: string | undefined) {
this.#editorStore.setSelectedFile(filePath);
}
async saveFile(filePath: string) {
const documents = this.#editorStore.documents.get();
const document = documents[filePath];
if (document === undefined) {
return;
}
await this.#filesStore.saveFile(filePath, document.value);
const newUnsavedFiles = new Set(this.unsavedFiles.get());
newUnsavedFiles.delete(filePath);
this.unsavedFiles.set(newUnsavedFiles);
}
async saveCurrentDocument() {
const currentDocument = this.currentDocument.get();
if (currentDocument === undefined) {
return;
}
await this.saveFile(currentDocument.filePath);
}
resetCurrentDocument() {
const currentDocument = this.currentDocument.get();
if (currentDocument === undefined) {
return;
}
const { filePath } = currentDocument;
const file = this.#filesStore.getFile(filePath);
if (!file) {
return;
}
this.setCurrentDocumentContent(file.content);
}
async saveAllFiles() {
for (const filePath of this.unsavedFiles.get()) {
await this.saveFile(filePath);
}
}
getFileModifcations() {
return this.#filesStore.getFileModifications();
}
resetAllFileModifications() {
this.#filesStore.resetFileModifications();
}
abortAllActions() {
// TODO: what do we wanna do and how do we wanna recover from this?
}
addArtifact({ messageId, title, id }: ArtifactCallbackData) {
const artifact = this.#getArtifact(messageId);
if (artifact) {
return;
}
if (!this.artifactIdList.includes(messageId)) {
this.artifactIdList.push(messageId);
}
this.artifacts.setKey(messageId, {
id,
title,
closed: false,
runner: new ActionRunner(webcontainer),
});
}
updateArtifact({ messageId }: ArtifactCallbackData, state: Partial<ArtifactUpdateState>) {
const artifact = this.#getArtifact(messageId);
if (!artifact) {
return;
}
this.artifacts.setKey(messageId, { ...artifact, ...state });
}
async addAction(data: ActionCallbackData) {
const { messageId } = data;
const artifact = this.#getArtifact(messageId);
if (!artifact) {
unreachable('Artifact not found');
}
artifact.runner.addAction(data);
}
async runAction(data: ActionCallbackData) {
const { messageId } = data;
const artifact = this.#getArtifact(messageId);
if (!artifact) {
unreachable('Artifact not found');
}
artifact.runner.runAction(data);
}
#getArtifact(id: string) {
const artifacts = this.artifacts.get();
return artifacts[id];
}
}
export const workbenchStore = new WorkbenchStore();

View File

@@ -0,0 +1,6 @@
/**
* This client-only module that contains everything related to auth and is used
* to avoid importing `@webcontainer/api` in the server bundle.
*/
export { auth, type AuthAPI } from '@webcontainer/api';

View File

@@ -0,0 +1,37 @@
import { WebContainer } from '@webcontainer/api';
import { WORK_DIR_NAME } from '~/utils/constants';
import { forgetAuth } from '~/lib/auth';
interface WebContainerContext {
loaded: boolean;
}
export const webcontainerContext: WebContainerContext = import.meta.hot?.data.webcontainerContext ?? {
loaded: false,
};
if (import.meta.hot) {
import.meta.hot.data.webcontainerContext = webcontainerContext;
}
export let webcontainer: Promise<WebContainer> = new Promise(() => {
// noop for ssr
});
if (!import.meta.env.SSR) {
webcontainer =
import.meta.hot?.data.webcontainer ??
Promise.resolve()
.then(() => {
forgetAuth();
return WebContainer.boot({ workdirName: WORK_DIR_NAME });
})
.then((webcontainer) => {
webcontainerContext.loaded = true;
return webcontainer;
});
if (import.meta.hot) {
import.meta.hot.data.webcontainer = webcontainer;
}
}