Revert "fix: resolve chat conversation hanging and stream interruption issues (#1971)"

This reverts commit e68593f22d.
This commit is contained in:
Stijnus
2025-09-07 00:14:13 +02:00
committed by Stijnus
parent e68593f22d
commit 37217a5c7b
61 changed files with 1432 additions and 8811 deletions

View File

@@ -1,268 +0,0 @@
/**
* Stream Recovery Module
* Handles stream failures and provides automatic recovery mechanisms
* Fixes chat conversation hanging issues
* Author: Keoma Wright
*/
import { createScopedLogger } from '~/utils/logger';
const logger = createScopedLogger('stream-recovery');
export interface StreamRecoveryOptions {
maxRetries?: number;
retryDelay?: number;
timeout?: number;
onRetry?: (attempt: number) => void;
onTimeout?: () => void;
onError?: (error: any) => void;
}
export class StreamRecoveryManager {
private _retryCount = 0;
private _timeoutHandle: NodeJS.Timeout | null = null;
private _lastActivity: number = Date.now();
private _isActive = true;
constructor(private _options: StreamRecoveryOptions = {}) {
this._options = {
maxRetries: 3,
retryDelay: 1000,
timeout: 30000, // 30 seconds default timeout
..._options,
};
}
/**
* Start monitoring the stream for inactivity
*/
startMonitoring() {
this._resetTimeout();
}
/**
* Reset the timeout when activity is detected
*/
recordActivity() {
this._lastActivity = Date.now();
this._resetTimeout();
}
/**
* Reset the timeout timer
*/
private _resetTimeout() {
if (this._timeoutHandle) {
clearTimeout(this._timeoutHandle);
}
if (!this._isActive) {
return;
}
this._timeoutHandle = setTimeout(() => {
const inactiveTime = Date.now() - this._lastActivity;
logger.warn(`Stream timeout detected after ${inactiveTime}ms of inactivity`);
if (this._options.onTimeout) {
this._options.onTimeout();
}
this._handleTimeout();
}, this._options.timeout!);
}
/**
* Handle stream timeout
*/
private _handleTimeout() {
logger.error('Stream timeout - attempting recovery');
// Signal that recovery is needed
this.attemptRecovery();
}
/**
* Attempt to recover from a stream failure
*/
async attemptRecovery(): Promise<boolean> {
if (this._retryCount >= this._options.maxRetries!) {
logger.error(`Max retries (${this._options.maxRetries}) reached - cannot recover`);
return false;
}
this._retryCount++;
logger.info(`Attempting recovery (attempt ${this._retryCount}/${this._options.maxRetries})`);
if (this._options.onRetry) {
this._options.onRetry(this._retryCount);
}
// Wait before retrying
await new Promise((resolve) => setTimeout(resolve, this._options.retryDelay! * this._retryCount));
// Reset activity tracking
this.recordActivity();
return true;
}
/**
* Handle stream errors with recovery
*/
async handleError(error: any): Promise<boolean> {
logger.error('Stream error detected:', error);
if (this._options.onError) {
this._options.onError(error);
}
// Check if error is recoverable
if (this._isRecoverableError(error)) {
return await this.attemptRecovery();
}
logger.error('Non-recoverable error - cannot continue');
return false;
}
/**
* Check if an error is recoverable
*/
private _isRecoverableError(error: any): boolean {
const errorMessage = error?.message || error?.toString() || '';
// List of recoverable error patterns
const recoverablePatterns = [
'ECONNRESET',
'ETIMEDOUT',
'ENOTFOUND',
'socket hang up',
'network',
'timeout',
'abort',
'EPIPE',
'502',
'503',
'504',
'rate limit',
];
return recoverablePatterns.some((pattern) => errorMessage.toLowerCase().includes(pattern.toLowerCase()));
}
/**
* Stop monitoring and cleanup
*/
stop() {
this._isActive = false;
if (this._timeoutHandle) {
clearTimeout(this._timeoutHandle);
this._timeoutHandle = null;
}
}
/**
* Reset the recovery manager
*/
reset() {
this._retryCount = 0;
this._lastActivity = Date.now();
this._isActive = true;
this._resetTimeout();
}
}
/**
* Create a wrapped stream with recovery capabilities
*/
export function createRecoverableStream<T>(
streamFactory: () => Promise<ReadableStream<T>>,
options?: StreamRecoveryOptions,
): ReadableStream<T> {
const recovery = new StreamRecoveryManager(options);
let currentStream: ReadableStream<T> | null = null;
let reader: ReadableStreamDefaultReader<T> | null = null;
return new ReadableStream<T>({
async start(controller) {
recovery.startMonitoring();
try {
currentStream = await streamFactory();
reader = currentStream.getReader();
} catch (error) {
logger.error('Failed to create initial stream:', error);
const canRecover = await recovery.handleError(error);
if (canRecover) {
// Retry creating the stream
currentStream = await streamFactory();
reader = currentStream.getReader();
} else {
controller.error(error);
return;
}
}
},
async pull(controller) {
if (!reader) {
controller.error(new Error('No reader available'));
return;
}
try {
const { done, value } = await reader.read();
if (done) {
controller.close();
recovery.stop();
return;
}
// Record activity to reset timeout
recovery.recordActivity();
controller.enqueue(value);
} catch (error) {
logger.error('Error reading from stream:', error);
const canRecover = await recovery.handleError(error);
if (canRecover) {
// Try to recreate the stream
try {
if (reader) {
reader.releaseLock();
}
currentStream = await streamFactory();
reader = currentStream.getReader();
// Continue reading
await this.pull!(controller);
} catch (retryError) {
logger.error('Recovery failed:', retryError);
controller.error(retryError);
recovery.stop();
}
} else {
controller.error(error);
recovery.stop();
}
}
},
cancel() {
recovery.stop();
if (reader) {
reader.releaseLock();
}
},
});
}

View File

@@ -11,65 +11,6 @@ import { createFilesContext, extractPropertiesFromMessage } from './utils';
import { discussPrompt } from '~/lib/common/prompts/discuss-prompt';
import type { DesignScheme } from '~/types/design-scheme';
function getSmartAISystemPrompt(basePrompt: string): string {
const smartAIEnhancement = `
## SmartAI Mode - Enhanced Conversational Coding Assistant
You are operating in SmartAI mode, a premium Bolt.gives feature that provides detailed, educational feedback throughout the coding process.
### Your Communication Style:
- Be conversational and friendly, as if pair programming with a colleague
- Explain your thought process clearly and educationally
- Use natural language, not technical jargon unless necessary
- Keep responses visible and engaging
### What to Communicate:
**When Starting Tasks:**
✨ "I see you want [task description]. Let me [approach explanation]..."
✨ Explain your understanding and planned approach
✨ Share why you're choosing specific solutions
**During Implementation:**
📝 "Now I'm creating/updating [file] to [purpose]..."
📝 Explain what each code section does
📝 Share the patterns and best practices you're using
📝 Discuss any trade-offs or alternatives considered
**When Problem-Solving:**
🔍 "I noticed [issue]. This is likely because [reasoning]..."
🔍 Share your debugging thought process
🔍 Explain how you're identifying and fixing issues
🔍 Describe why your solution will work
**After Completing Work:**
✅ "I've successfully [what was done]. The key changes include..."
✅ Summarize what was accomplished
✅ Highlight important decisions made
✅ Suggest potential improvements or next steps
### Example Responses:
Instead of silence:
"I understand you need a contact form. Let me create a modern, accessible form with proper validation. I'll start by setting up the form structure with semantic HTML..."
While coding:
"I'm now adding email validation to ensure users enter valid email addresses. I'll use a regex pattern that covers most common email formats while keeping it user-friendly..."
When debugging:
"I see the button isn't aligning properly with the other elements. This looks like a flexbox issue. Let me adjust the container's display properties to fix the alignment..."
### Remember:
- Users chose SmartAI to learn from your process
- Make every action visible and understandable
- Be their coding companion, not just a silent worker
- Keep the conversation flowing naturally
${basePrompt}`;
return smartAIEnhancement;
}
export type Messages = Message[];
export interface StreamingOptions extends Omit<Parameters<typeof _streamText>[0], 'model'> {
@@ -141,19 +82,13 @@ export async function streamText(props: {
} = props;
let currentModel = DEFAULT_MODEL;
let currentProvider = DEFAULT_PROVIDER.name;
let smartAIEnabled = false;
let processedMessages = messages.map((message) => {
const newMessage = { ...message };
if (message.role === 'user') {
const { model, provider, content, smartAI } = extractPropertiesFromMessage(message);
const { model, provider, content } = extractPropertiesFromMessage(message);
currentModel = model;
currentProvider = provider;
if (smartAI !== undefined) {
smartAIEnabled = smartAI;
}
newMessage.content = sanitizeText(content);
} else if (message.role == 'assistant') {
newMessage.content = sanitizeText(message.content);
@@ -207,39 +142,13 @@ export async function streamText(props: {
const dynamicMaxTokens = modelDetails ? getCompletionTokenLimit(modelDetails) : Math.min(MAX_TOKENS, 16384);
// Additional safety cap - respect model-specific limits
let safeMaxTokens = dynamicMaxTokens;
// Apply model-specific caps for Anthropic models
if (modelDetails?.provider === 'Anthropic') {
if (modelDetails.name.includes('claude-sonnet-4') || modelDetails.name.includes('claude-opus-4')) {
safeMaxTokens = Math.min(dynamicMaxTokens, 64000);
} else if (modelDetails.name.includes('claude-3-7-sonnet')) {
safeMaxTokens = Math.min(dynamicMaxTokens, 64000);
} else if (modelDetails.name.includes('claude-3-5-sonnet')) {
safeMaxTokens = Math.min(dynamicMaxTokens, 8192);
} else {
safeMaxTokens = Math.min(dynamicMaxTokens, 4096);
}
} else {
// General safety cap for other providers
safeMaxTokens = Math.min(dynamicMaxTokens, 128000);
}
// Use model-specific limits directly - no artificial cap needed
const safeMaxTokens = dynamicMaxTokens;
logger.info(
`Max tokens for model ${modelDetails.name} is ${safeMaxTokens} (capped from ${dynamicMaxTokens}) based on model limits`,
`Token limits for model ${modelDetails.name}: maxTokens=${safeMaxTokens}, maxTokenAllowed=${modelDetails.maxTokenAllowed}, maxCompletionTokens=${modelDetails.maxCompletionTokens}`,
);
/*
* Check if SmartAI is enabled for supported models
* SmartAI is enabled if either:
* 1. The model itself has isSmartAIEnabled flag (for models with SmartAI in name)
* 2. The user explicitly enabled it via message flag
*/
const isSmartAISupported =
modelDetails?.supportsSmartAI && (provider.name === 'Anthropic' || provider.name === 'OpenAI');
const useSmartAI = (modelDetails?.isSmartAIEnabled || smartAIEnabled) && isSmartAISupported;
let systemPrompt =
PromptLibrary.getPropmtFromLibrary(promptId || 'default', {
cwd: WORK_DIR,
@@ -253,11 +162,6 @@ export async function streamText(props: {
},
}) ?? getSystemPrompt();
// Enhance system prompt for SmartAI if enabled and supported
if (useSmartAI) {
systemPrompt = getSmartAISystemPrompt(systemPrompt);
}
if (chatMode === 'build' && contextFiles && contextOptimization) {
const codeContext = createFilesContext(contextFiles, true);
@@ -317,11 +221,18 @@ export async function streamText(props: {
logger.info(`Sending llm call to ${provider.name} with model ${modelDetails.name}`);
// DEBUG: Log reasoning model detection
// Log reasoning model detection and token parameters
const isReasoning = isReasoningModel(modelDetails.name);
logger.info(`DEBUG STREAM: Model "${modelDetails.name}" detected as reasoning model: ${isReasoning}`);
logger.info(
`Model "${modelDetails.name}" is reasoning model: ${isReasoning}, using ${isReasoning ? 'maxCompletionTokens' : 'maxTokens'}: ${safeMaxTokens}`,
);
// console.log(systemPrompt, processedMessages);
// Validate token limits before API call
if (safeMaxTokens > (modelDetails.maxTokenAllowed || 128000)) {
logger.warn(
`Token limit warning: requesting ${safeMaxTokens} tokens but model supports max ${modelDetails.maxTokenAllowed || 128000}`,
);
}
// Use maxCompletionTokens for reasoning models (o1, GPT-5), maxTokens for traditional models
const tokenParams = isReasoning ? { maxCompletionTokens: safeMaxTokens } : { maxTokens: safeMaxTokens };

View File

@@ -8,7 +8,6 @@ export function extractPropertiesFromMessage(message: Omit<Message, 'id'>): {
model: string;
provider: string;
content: string;
smartAI?: boolean;
} {
const textContent = Array.isArray(message.content)
? message.content.find((item) => item.type === 'text')?.text || ''
@@ -17,10 +16,6 @@ export function extractPropertiesFromMessage(message: Omit<Message, 'id'>): {
const modelMatch = textContent.match(MODEL_REGEX);
const providerMatch = textContent.match(PROVIDER_REGEX);
// Check for SmartAI toggle in the message
const smartAIMatch = textContent.match(/\[SmartAI:(true|false)\]/);
const smartAI = smartAIMatch ? smartAIMatch[1] === 'true' : undefined;
/*
* Extract model
* const modelMatch = message.content.match(MODEL_REGEX);
@@ -38,21 +33,15 @@ export function extractPropertiesFromMessage(message: Omit<Message, 'id'>): {
if (item.type === 'text') {
return {
type: 'text',
text: item.text
?.replace(MODEL_REGEX, '')
.replace(PROVIDER_REGEX, '')
.replace(/\[SmartAI:(true|false)\]/g, ''),
text: item.text?.replace(MODEL_REGEX, '').replace(PROVIDER_REGEX, ''),
};
}
return item; // Preserve image_url and other types as is
})
: textContent
.replace(MODEL_REGEX, '')
.replace(PROVIDER_REGEX, '')
.replace(/\[SmartAI:(true|false)\]/g, '');
: textContent.replace(MODEL_REGEX, '').replace(PROVIDER_REGEX, '');
return { model, provider, content: cleanedContent, smartAI };
return { model, provider, content: cleanedContent };
}
export function simplifyBoltActions(input: string): string {

View File

@@ -1,374 +0,0 @@
/**
* Netlify Configuration Helper
* Contributed by Keoma Wright
*
* This module provides automatic configuration generation for Netlify deployments
*/
export interface NetlifyConfig {
build: {
command?: string;
publish: string;
functions?: string;
environment?: Record<string, string>;
};
redirects?: Array<{
from: string;
to: string;
status?: number;
force?: boolean;
}>;
headers?: Array<{
for: string;
values: Record<string, string>;
}>;
functions?: {
[key: string]: {
included_files?: string[];
external_node_modules?: string[];
};
};
}
export interface FrameworkConfig {
name: string;
buildCommand: string;
outputDirectory: string;
nodeVersion: string;
installCommand?: string;
envVars?: Record<string, string>;
}
const FRAMEWORK_CONFIGS: Record<string, FrameworkConfig> = {
react: {
name: 'React',
buildCommand: 'npm run build',
outputDirectory: 'build',
nodeVersion: '18',
installCommand: 'npm install',
},
'react-vite': {
name: 'React (Vite)',
buildCommand: 'npm run build',
outputDirectory: 'dist',
nodeVersion: '18',
installCommand: 'npm install',
},
vue: {
name: 'Vue',
buildCommand: 'npm run build',
outputDirectory: 'dist',
nodeVersion: '18',
installCommand: 'npm install',
},
angular: {
name: 'Angular',
buildCommand: 'npm run build',
outputDirectory: 'dist',
nodeVersion: '18',
installCommand: 'npm install',
},
svelte: {
name: 'Svelte',
buildCommand: 'npm run build',
outputDirectory: 'public',
nodeVersion: '18',
installCommand: 'npm install',
},
'svelte-kit': {
name: 'SvelteKit',
buildCommand: 'npm run build',
outputDirectory: '.svelte-kit',
nodeVersion: '18',
installCommand: 'npm install',
},
next: {
name: 'Next.js',
buildCommand: 'npm run build',
outputDirectory: '.next',
nodeVersion: '18',
installCommand: 'npm install',
envVars: {
NEXT_TELEMETRY_DISABLED: '1',
},
},
nuxt: {
name: 'Nuxt',
buildCommand: 'npm run build',
outputDirectory: '.output/public',
nodeVersion: '18',
installCommand: 'npm install',
},
gatsby: {
name: 'Gatsby',
buildCommand: 'npm run build',
outputDirectory: 'public',
nodeVersion: '18',
installCommand: 'npm install',
},
remix: {
name: 'Remix',
buildCommand: 'npm run build',
outputDirectory: 'public',
nodeVersion: '18',
installCommand: 'npm install',
},
astro: {
name: 'Astro',
buildCommand: 'npm run build',
outputDirectory: 'dist',
nodeVersion: '18',
installCommand: 'npm install',
},
static: {
name: 'Static Site',
buildCommand: '',
outputDirectory: '.',
nodeVersion: '18',
},
};
export function detectFramework(packageJson: any): string {
const deps = { ...packageJson.dependencies, ...packageJson.devDependencies };
// Check for specific frameworks
if (deps.next) {
return 'next';
}
if (deps.nuxt || deps.nuxt3) {
return 'nuxt';
}
if (deps.gatsby) {
return 'gatsby';
}
if (deps['@remix-run/react']) {
return 'remix';
}
if (deps.astro) {
return 'astro';
}
if (deps['@angular/core']) {
return 'angular';
}
if (deps['@sveltejs/kit']) {
return 'svelte-kit';
}
if (deps.svelte) {
return 'svelte';
}
if (deps.vue) {
return 'vue';
}
if (deps.react) {
if (deps.vite) {
return 'react-vite';
}
return 'react';
}
return 'static';
}
export function generateNetlifyConfig(framework: string, customConfig?: Partial<NetlifyConfig>): NetlifyConfig {
const frameworkConfig = FRAMEWORK_CONFIGS[framework] || FRAMEWORK_CONFIGS.static;
const config: NetlifyConfig = {
build: {
command: frameworkConfig.buildCommand,
publish: frameworkConfig.outputDirectory,
environment: {
NODE_VERSION: frameworkConfig.nodeVersion,
...frameworkConfig.envVars,
...customConfig?.build?.environment,
},
},
redirects: [],
headers: [
{
for: '/*',
values: {
'X-Frame-Options': 'DENY',
'X-XSS-Protection': '1; mode=block',
'X-Content-Type-Options': 'nosniff',
'Referrer-Policy': 'strict-origin-when-cross-origin',
},
},
],
};
// Add SPA redirect for client-side routing frameworks
if (['react', 'react-vite', 'vue', 'angular', 'svelte'].includes(framework)) {
config.redirects!.push({
from: '/*',
to: '/index.html',
status: 200,
});
}
// Add custom headers for static assets
config.headers!.push({
for: '/assets/*',
values: {
'Cache-Control': 'public, max-age=31536000, immutable',
},
});
// Merge with custom config
if (customConfig) {
if (customConfig.redirects) {
config.redirects!.push(...customConfig.redirects);
}
if (customConfig.headers) {
config.headers!.push(...customConfig.headers);
}
if (customConfig.functions) {
config.functions = customConfig.functions;
}
}
return config;
}
export function generateNetlifyToml(config: NetlifyConfig): string {
let toml = '';
// Build configuration
toml += '[build]\n';
if (config.build.command) {
toml += ` command = "${config.build.command}"\n`;
}
toml += ` publish = "${config.build.publish}"\n`;
if (config.build.functions) {
toml += ` functions = "${config.build.functions}"\n`;
}
// Environment variables
if (config.build.environment && Object.keys(config.build.environment).length > 0) {
toml += '\n[build.environment]\n';
for (const [key, value] of Object.entries(config.build.environment)) {
toml += ` ${key} = "${value}"\n`;
}
}
// Redirects
if (config.redirects && config.redirects.length > 0) {
for (const redirect of config.redirects) {
toml += '\n[[redirects]]\n';
toml += ` from = "${redirect.from}"\n`;
toml += ` to = "${redirect.to}"\n`;
if (redirect.status) {
toml += ` status = ${redirect.status}\n`;
}
if (redirect.force) {
toml += ` force = ${redirect.force}\n`;
}
}
}
// Headers
if (config.headers && config.headers.length > 0) {
for (const header of config.headers) {
toml += '\n[[headers]]\n';
toml += ` for = "${header.for}"\n`;
if (Object.keys(header.values).length > 0) {
toml += ' [headers.values]\n';
for (const [key, value] of Object.entries(header.values)) {
toml += ` "${key}" = "${value}"\n`;
}
}
}
}
// Functions configuration
if (config.functions) {
for (const [funcName, funcConfig] of Object.entries(config.functions)) {
toml += `\n[functions."${funcName}"]\n`;
if (funcConfig.included_files) {
toml += ` included_files = ${JSON.stringify(funcConfig.included_files)}\n`;
}
if (funcConfig.external_node_modules) {
toml += ` external_node_modules = ${JSON.stringify(funcConfig.external_node_modules)}\n`;
}
}
}
return toml;
}
export function validateDeploymentFiles(files: Record<string, string>): {
valid: boolean;
errors: string[];
warnings: string[];
} {
const errors: string[] = [];
const warnings: string[] = [];
// Check for index.html
const hasIndex = Object.keys(files).some(
(path) => path === '/index.html' || path === 'index.html' || path.endsWith('/index.html'),
);
if (!hasIndex) {
warnings.push('No index.html file found. Make sure your build output includes an entry point.');
}
// Check file sizes
const MAX_FILE_SIZE = 100 * 1024 * 1024; // 100MB
const WARN_FILE_SIZE = 10 * 1024 * 1024; // 10MB
for (const [path, content] of Object.entries(files)) {
const size = new Blob([content]).size;
if (size > MAX_FILE_SIZE) {
errors.push(`File ${path} exceeds maximum size of 100MB`);
} else if (size > WARN_FILE_SIZE) {
warnings.push(`File ${path} is large (${Math.round(size / 1024 / 1024)}MB)`);
}
}
// Check total deployment size
const totalSize = Object.values(files).reduce((sum, content) => sum + new Blob([content]).size, 0);
const MAX_TOTAL_SIZE = 500 * 1024 * 1024; // 500MB
if (totalSize > MAX_TOTAL_SIZE) {
errors.push(`Total deployment size exceeds 500MB limit`);
}
// Check for common issues
if (Object.keys(files).some((path) => path.includes('node_modules'))) {
warnings.push('Deployment includes node_modules - these should typically be excluded');
}
if (Object.keys(files).some((path) => path.includes('.env'))) {
errors.push('Deployment includes .env file - remove sensitive configuration files');
}
return {
valid: errors.length === 0,
errors,
warnings,
};
}

View File

@@ -20,18 +20,6 @@ export default class AmazonBedrockProvider extends BaseProvider {
};
staticModels: ModelInfo[] = [
{
name: 'anthropic.claude-sonnet-4-20250514-v1:0',
label: 'Claude Sonnet 4 (Bedrock)',
provider: 'AmazonBedrock',
maxTokenAllowed: 200000,
},
{
name: 'anthropic.claude-opus-4-1-20250805-v1:0',
label: 'Claude Opus 4.1 (Bedrock)',
provider: 'AmazonBedrock',
maxTokenAllowed: 200000,
},
{
name: 'anthropic.claude-3-5-sonnet-20241022-v2:0',
label: 'Claude 3.5 Sonnet v2 (Bedrock)',

View File

@@ -1,10 +1,10 @@
import { BaseProvider } from '~/lib/modules/llm/base-provider';
import type { ModelInfo } from '~/lib/modules/llm/types';
import type { IProviderSetting } from '~/types/model';
import type { LanguageModelV1 } from 'ai';
import type { IProviderSetting } from '~/types/model';
import { createAnthropic } from '@ai-sdk/anthropic';
export class AnthropicProvider extends BaseProvider {
export default class AnthropicProvider extends BaseProvider {
name = 'Anthropic';
getApiKeyLink = 'https://console.anthropic.com/settings/keys';
@@ -13,50 +13,6 @@ export class AnthropicProvider extends BaseProvider {
};
staticModels: ModelInfo[] = [
/*
* Claude Opus 4.1: Most powerful model for coding and reasoning
* Released August 5, 2025
*/
{
name: 'claude-opus-4-1-20250805',
label: 'Claude Opus 4.1',
provider: 'Anthropic',
maxTokenAllowed: 200000,
maxCompletionTokens: 64000,
supportsSmartAI: false, // Base model without SmartAI
},
{
name: 'claude-opus-4-1-20250805-smartai',
label: 'Claude Opus 4.1 (SmartAI)',
provider: 'Anthropic',
maxTokenAllowed: 200000,
maxCompletionTokens: 64000,
supportsSmartAI: true,
isSmartAIEnabled: true,
},
/*
* Claude Sonnet 4: Hybrid instant/extended response model
* Released May 14, 2025
*/
{
name: 'claude-sonnet-4-20250514',
label: 'Claude Sonnet 4',
provider: 'Anthropic',
maxTokenAllowed: 200000,
maxCompletionTokens: 64000,
supportsSmartAI: false, // Base model without SmartAI
},
{
name: 'claude-sonnet-4-20250514-smartai',
label: 'Claude Sonnet 4 (SmartAI)',
provider: 'Anthropic',
maxTokenAllowed: 200000,
maxCompletionTokens: 64000,
supportsSmartAI: true,
isSmartAIEnabled: true,
},
/*
* Essential fallback models - only the most stable/reliable ones
* Claude 3.5 Sonnet: 200k context, excellent for complex reasoning and coding
@@ -66,17 +22,7 @@ export class AnthropicProvider extends BaseProvider {
label: 'Claude 3.5 Sonnet',
provider: 'Anthropic',
maxTokenAllowed: 200000,
maxCompletionTokens: 8192,
supportsSmartAI: false, // Base model without SmartAI
},
{
name: 'claude-3-5-sonnet-20241022-smartai',
label: 'Claude 3.5 Sonnet (SmartAI)',
provider: 'Anthropic',
maxTokenAllowed: 200000,
maxCompletionTokens: 8192,
supportsSmartAI: true,
isSmartAIEnabled: true,
maxCompletionTokens: 128000,
},
// Claude 3 Haiku: 200k context, fastest and most cost-effective
@@ -85,17 +31,16 @@ export class AnthropicProvider extends BaseProvider {
label: 'Claude 3 Haiku',
provider: 'Anthropic',
maxTokenAllowed: 200000,
maxCompletionTokens: 4096,
supportsSmartAI: false, // Base model without SmartAI
maxCompletionTokens: 128000,
},
// Claude Opus 4: 200k context, 32k output limit (latest flagship model)
{
name: 'claude-3-haiku-20240307-smartai',
label: 'Claude 3 Haiku (SmartAI)',
name: 'claude-opus-4-20250514',
label: 'Claude 4 Opus',
provider: 'Anthropic',
maxTokenAllowed: 200000,
maxCompletionTokens: 4096,
supportsSmartAI: true,
isSmartAIEnabled: true,
maxCompletionTokens: 32000,
},
];
@@ -119,8 +64,7 @@ export class AnthropicProvider extends BaseProvider {
const response = await fetch(`https://api.anthropic.com/v1/models`, {
headers: {
'x-api-key': `${apiKey}`,
['anthropic-version']: '2023-06-01',
['Content-Type']: 'application/json',
'anthropic-version': '2023-06-01',
},
});
@@ -146,21 +90,15 @@ export class AnthropicProvider extends BaseProvider {
contextWindow = 200000; // Claude 3 Sonnet has 200k context
}
// Determine max completion tokens based on model
let maxCompletionTokens = 4096; // default fallback
// Determine completion token limits based on specific model
let maxCompletionTokens = 128000; // default for older Claude 3 models
if (m.id?.includes('claude-sonnet-4') || m.id?.includes('claude-opus-4')) {
maxCompletionTokens = 64000;
} else if (m.id?.includes('claude-3-7-sonnet')) {
maxCompletionTokens = 64000;
} else if (m.id?.includes('claude-3-5-sonnet')) {
maxCompletionTokens = 8192;
} else if (m.id?.includes('claude-3-haiku')) {
maxCompletionTokens = 4096;
} else if (m.id?.includes('claude-3-opus')) {
maxCompletionTokens = 4096;
} else if (m.id?.includes('claude-3-sonnet')) {
maxCompletionTokens = 4096;
if (m.id?.includes('claude-opus-4')) {
maxCompletionTokens = 32000; // Claude 4 Opus: 32K output limit
} else if (m.id?.includes('claude-sonnet-4')) {
maxCompletionTokens = 64000; // Claude 4 Sonnet: 64K output limit
} else if (m.id?.includes('claude-4')) {
maxCompletionTokens = 32000; // Other Claude 4 models: conservative 32K limit
}
return {
@@ -169,7 +107,6 @@ export class AnthropicProvider extends BaseProvider {
provider: this.name,
maxTokenAllowed: contextWindow,
maxCompletionTokens,
supportsSmartAI: true, // All Anthropic models support SmartAI
};
});
}
@@ -180,27 +117,19 @@ export class AnthropicProvider extends BaseProvider {
apiKeys?: Record<string, string>;
providerSettings?: Record<string, IProviderSetting>;
}) => LanguageModelV1 = (options) => {
const { model, serverEnv, apiKeys, providerSettings } = options;
const { apiKey, baseUrl } = this.getProviderBaseUrlAndKey({
const { apiKeys, providerSettings, serverEnv, model } = options;
const { apiKey } = this.getProviderBaseUrlAndKey({
apiKeys,
providerSettings: providerSettings?.[this.name],
providerSettings,
serverEnv: serverEnv as any,
defaultBaseUrlKey: '',
defaultApiTokenKey: 'ANTHROPIC_API_KEY',
});
if (!apiKey) {
throw `Missing API key for ${this.name} provider`;
}
const anthropic = createAnthropic({
apiKey,
baseURL: baseUrl || 'https://api.anthropic.com/v1',
headers: { 'anthropic-beta': 'output-128k-2025-02-19' },
});
// Handle SmartAI variant by using the base model name
const actualModel = model.replace('-smartai', '');
return anthropic(actualModel);
return anthropic(model);
};
}

View File

@@ -31,18 +31,6 @@ export default class OpenRouterProvider extends BaseProvider {
* Essential fallback models - only the most stable/reliable ones
* Claude 3.5 Sonnet via OpenRouter: 200k context
*/
{
name: 'anthropic/claude-sonnet-4-20250514',
label: 'Anthropic: Claude Sonnet 4 (OpenRouter)',
provider: 'OpenRouter',
maxTokenAllowed: 200000,
},
{
name: 'anthropic/claude-opus-4-1-20250805',
label: 'Anthropic: Claude Opus 4.1 (OpenRouter)',
provider: 'OpenRouter',
maxTokenAllowed: 200000,
},
{
name: 'anthropic/claude-3.5-sonnet',
label: 'Claude 3.5 Sonnet',

View File

@@ -17,23 +17,7 @@ export default class OpenAIProvider extends BaseProvider {
* Essential fallback models - only the most stable/reliable ones
* GPT-4o: 128k context, 4k standard output (64k with long output mode)
*/
{
name: 'gpt-4o',
label: 'GPT-4o',
provider: 'OpenAI',
maxTokenAllowed: 128000,
maxCompletionTokens: 4096,
supportsSmartAI: false, // Base model without SmartAI
},
{
name: 'gpt-4o-smartai',
label: 'GPT-4o (SmartAI)',
provider: 'OpenAI',
maxTokenAllowed: 128000,
maxCompletionTokens: 4096,
supportsSmartAI: true,
isSmartAIEnabled: true,
},
{ name: 'gpt-4o', label: 'GPT-4o', provider: 'OpenAI', maxTokenAllowed: 128000, maxCompletionTokens: 4096 },
// GPT-4o Mini: 128k context, cost-effective alternative
{
@@ -42,16 +26,6 @@ export default class OpenAIProvider extends BaseProvider {
provider: 'OpenAI',
maxTokenAllowed: 128000,
maxCompletionTokens: 4096,
supportsSmartAI: false, // Base model without SmartAI
},
{
name: 'gpt-4o-mini-smartai',
label: 'GPT-4o Mini (SmartAI)',
provider: 'OpenAI',
maxTokenAllowed: 128000,
maxCompletionTokens: 4096,
supportsSmartAI: true,
isSmartAIEnabled: true,
},
// GPT-3.5-turbo: 16k context, fast and cost-effective
@@ -61,16 +35,6 @@ export default class OpenAIProvider extends BaseProvider {
provider: 'OpenAI',
maxTokenAllowed: 16000,
maxCompletionTokens: 4096,
supportsSmartAI: false, // Base model without SmartAI
},
{
name: 'gpt-3.5-turbo-smartai',
label: 'GPT-3.5 Turbo (SmartAI)',
provider: 'OpenAI',
maxTokenAllowed: 16000,
maxCompletionTokens: 4096,
supportsSmartAI: true,
isSmartAIEnabled: true,
},
// o1-preview: 128k context, 32k output limit (reasoning model)
@@ -80,36 +44,10 @@ export default class OpenAIProvider extends BaseProvider {
provider: 'OpenAI',
maxTokenAllowed: 128000,
maxCompletionTokens: 32000,
supportsSmartAI: false, // Base model without SmartAI
},
{
name: 'o1-preview-smartai',
label: 'o1-preview (SmartAI)',
provider: 'OpenAI',
maxTokenAllowed: 128000,
maxCompletionTokens: 32000,
supportsSmartAI: true,
isSmartAIEnabled: true,
},
// o1-mini: 128k context, 65k output limit (reasoning model)
{
name: 'o1-mini',
label: 'o1-mini',
provider: 'OpenAI',
maxTokenAllowed: 128000,
maxCompletionTokens: 65000,
supportsSmartAI: false, // Base model without SmartAI
},
{
name: 'o1-mini-smartai',
label: 'o1-mini (SmartAI)',
provider: 'OpenAI',
maxTokenAllowed: 128000,
maxCompletionTokens: 65000,
supportsSmartAI: true,
isSmartAIEnabled: true,
},
{ name: 'o1-mini', label: 'o1-mini', provider: 'OpenAI', maxTokenAllowed: 128000, maxCompletionTokens: 65000 },
];
async getDynamicModels(
@@ -187,7 +125,6 @@ export default class OpenAIProvider extends BaseProvider {
provider: this.name,
maxTokenAllowed: Math.min(contextWindow, 128000), // Cap at 128k for safety
maxCompletionTokens,
supportsSmartAI: true, // All OpenAI models support SmartAI
};
});
}
@@ -216,9 +153,6 @@ export default class OpenAIProvider extends BaseProvider {
apiKey,
});
// Handle SmartAI variant by using the base model name
const actualModel = model.replace('-smartai', '');
return openai(actualModel);
return openai(model);
}
}

View File

@@ -1,4 +1,4 @@
import { AnthropicProvider } from './providers/anthropic';
import AnthropicProvider from './providers/anthropic';
import CohereProvider from './providers/cohere';
import DeepseekProvider from './providers/deepseek';
import GoogleProvider from './providers/google';

View File

@@ -11,12 +11,6 @@ export interface ModelInfo {
/** Maximum completion/output tokens - how many tokens the model can generate. If not specified, falls back to provider defaults */
maxCompletionTokens?: number;
/** Indicates if this model supports SmartAI enhanced feedback */
supportsSmartAI?: boolean;
/** Indicates if SmartAI is currently enabled for this model variant */
isSmartAIEnabled?: boolean;
}
export interface ProviderInfo {

View File

@@ -1,241 +0,0 @@
import { createScopedLogger } from '~/utils/logger';
import type { ChatHistoryItem } from './useChatHistory';
import { authStore } from '~/lib/stores/auth';
export interface IUserChatMetadata {
userId: string;
gitUrl?: string;
gitBranch?: string;
netlifySiteId?: string;
}
const logger = createScopedLogger('UserChatHistory');
/**
* Open user-specific database
*/
export async function openUserDatabase(): Promise<IDBDatabase | undefined> {
if (typeof indexedDB === 'undefined') {
console.error('indexedDB is not available in this environment.');
return undefined;
}
const authState = authStore.get();
if (!authState.user?.id) {
console.error('No authenticated user found.');
return undefined;
}
// Use user-specific database name
const dbName = `boltHistory_${authState.user.id}`;
return new Promise((resolve) => {
const request = indexedDB.open(dbName, 1);
request.onupgradeneeded = (event: IDBVersionChangeEvent) => {
const db = (event.target as IDBOpenDBRequest).result;
if (!db.objectStoreNames.contains('chats')) {
const store = db.createObjectStore('chats', { keyPath: 'id' });
store.createIndex('id', 'id', { unique: true });
store.createIndex('urlId', 'urlId', { unique: true });
store.createIndex('userId', 'userId', { unique: false });
store.createIndex('timestamp', 'timestamp', { unique: false });
}
if (!db.objectStoreNames.contains('snapshots')) {
db.createObjectStore('snapshots', { keyPath: 'chatId' });
}
if (!db.objectStoreNames.contains('settings')) {
db.createObjectStore('settings', { keyPath: 'key' });
}
if (!db.objectStoreNames.contains('workspaces')) {
const workspaceStore = db.createObjectStore('workspaces', { keyPath: 'id' });
workspaceStore.createIndex('name', 'name', { unique: false });
workspaceStore.createIndex('createdAt', 'createdAt', { unique: false });
}
};
request.onsuccess = (event: Event) => {
resolve((event.target as IDBOpenDBRequest).result);
};
request.onerror = (event: Event) => {
resolve(undefined);
logger.error((event.target as IDBOpenDBRequest).error);
};
});
}
/**
* Get all chats for current user
*/
export async function getUserChats(db: IDBDatabase): Promise<ChatHistoryItem[]> {
const authState = authStore.get();
if (!authState.user?.id) {
return [];
}
return new Promise((resolve, reject) => {
const transaction = db.transaction('chats', 'readonly');
const store = transaction.objectStore('chats');
const request = store.getAll();
request.onsuccess = () => {
// Filter by userId and sort by timestamp
const chats = (request.result as ChatHistoryItem[]).sort(
(a, b) => new Date(b.timestamp).getTime() - new Date(a.timestamp).getTime(),
);
resolve(chats);
};
request.onerror = () => reject(request.error);
});
}
/**
* Save user-specific settings
*/
export async function saveUserSetting(db: IDBDatabase, key: string, value: any): Promise<void> {
return new Promise((resolve, reject) => {
const transaction = db.transaction('settings', 'readwrite');
const store = transaction.objectStore('settings');
const request = store.put({ key, value, updatedAt: new Date().toISOString() });
request.onsuccess = () => resolve();
request.onerror = () => reject(request.error);
});
}
/**
* Load user-specific settings
*/
export async function loadUserSetting(db: IDBDatabase, key: string): Promise<any | null> {
return new Promise((resolve, reject) => {
const transaction = db.transaction('settings', 'readonly');
const store = transaction.objectStore('settings');
const request = store.get(key);
request.onsuccess = () => {
const result = request.result;
resolve(result ? result.value : null);
};
request.onerror = () => reject(request.error);
});
}
/**
* Create a workspace for the user
*/
export interface Workspace {
id: string;
name: string;
description?: string;
createdAt: string;
lastAccessed?: string;
files?: Record<string, any>;
}
export async function createWorkspace(db: IDBDatabase, workspace: Omit<Workspace, 'id'>): Promise<string> {
const authState = authStore.get();
if (!authState.user?.id) {
throw new Error('No authenticated user');
}
const workspaceId = `workspace_${Date.now()}_${Math.random().toString(36).substring(2, 15)}`;
return new Promise((resolve, reject) => {
const transaction = db.transaction('workspaces', 'readwrite');
const store = transaction.objectStore('workspaces');
const fullWorkspace: Workspace = {
id: workspaceId,
...workspace,
};
const request = store.add(fullWorkspace);
request.onsuccess = () => resolve(workspaceId);
request.onerror = () => reject(request.error);
});
}
/**
* Get user workspaces
*/
export async function getUserWorkspaces(db: IDBDatabase): Promise<Workspace[]> {
return new Promise((resolve, reject) => {
const transaction = db.transaction('workspaces', 'readonly');
const store = transaction.objectStore('workspaces');
const request = store.getAll();
request.onsuccess = () => {
const workspaces = (request.result as Workspace[]).sort(
(a, b) => new Date(b.createdAt).getTime() - new Date(a.createdAt).getTime(),
);
resolve(workspaces);
};
request.onerror = () => reject(request.error);
});
}
/**
* Delete a workspace
*/
export async function deleteWorkspace(db: IDBDatabase, workspaceId: string): Promise<void> {
return new Promise((resolve, reject) => {
const transaction = db.transaction('workspaces', 'readwrite');
const store = transaction.objectStore('workspaces');
const request = store.delete(workspaceId);
request.onsuccess = () => resolve();
request.onerror = () => reject(request.error);
});
}
/**
* Get user statistics
*/
export async function getUserStats(db: IDBDatabase): Promise<{
totalChats: number;
totalWorkspaces: number;
lastActivity?: string;
storageUsed?: number;
}> {
try {
const [chats, workspaces] = await Promise.all([getUserChats(db), getUserWorkspaces(db)]);
// Calculate last activity
let lastActivity: string | undefined;
const allTimestamps = [
...chats.map((c) => c.timestamp),
...workspaces.map((w) => w.lastAccessed || w.createdAt),
].filter(Boolean);
if (allTimestamps.length > 0) {
lastActivity = allTimestamps.sort().reverse()[0];
}
return {
totalChats: chats.length,
totalWorkspaces: workspaces.length,
lastActivity,
};
} catch (error) {
logger.error('Failed to get user stats:', error);
return {
totalChats: 0,
totalWorkspaces: 0,
};
}
}

View File

@@ -1,300 +0,0 @@
import { atom, map } from 'nanostores';
import type { UserProfile } from '~/lib/utils/fileUserStorage';
import Cookies from 'js-cookie';
export interface AuthState {
isAuthenticated: boolean;
user: Omit<UserProfile, 'passwordHash'> | null;
token: string | null;
loading: boolean;
}
// Authentication state store
export const authStore = map<AuthState>({
isAuthenticated: false,
user: null,
token: null,
loading: true,
});
// Remember me preference
export const rememberMeStore = atom<boolean>(false);
// Session timeout tracking
let sessionTimeout: NodeJS.Timeout | null = null;
const SESSION_TIMEOUT = 7 * 24 * 60 * 60 * 1000; // 7 days
/**
* Initialize auth from stored token
*/
export async function initializeAuth(): Promise<void> {
if (typeof window === 'undefined') {
return;
}
authStore.setKey('loading', true);
try {
const token = Cookies.get('auth_token');
if (token) {
// Verify token with backend
const response = await fetch('/api/auth/verify', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
Authorization: `Bearer ${token}`,
},
});
if (response.ok) {
const data = (await response.json()) as { user: Omit<UserProfile, 'passwordHash'> };
setAuthState({
isAuthenticated: true,
user: data.user,
token,
loading: false,
});
startSessionTimer();
} else {
// Token is invalid, clear it
clearAuth();
}
} else {
authStore.setKey('loading', false);
}
} catch (error) {
console.error('Failed to initialize auth:', error);
authStore.setKey('loading', false);
}
}
/**
* Set authentication state
*/
export function setAuthState(state: AuthState): void {
authStore.set(state);
if (state.token) {
// Store token in cookie
const cookieOptions = rememberMeStore.get()
? { expires: 7 } // 7 days
: undefined; // Session cookie
Cookies.set('auth_token', state.token, cookieOptions);
// Store user preferences in localStorage
if (state.user) {
localStorage.setItem(`bolt_user_${state.user.id}`, JSON.stringify(state.user.preferences || {}));
}
}
}
/**
* Login user
*/
export async function login(
username: string,
password: string,
rememberMe: boolean = false,
): Promise<{ success: boolean; error?: string }> {
try {
const response = await fetch('/api/auth/login', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify({ username, password }),
});
const data = (await response.json()) as {
success?: boolean;
error?: string;
user?: Omit<UserProfile, 'passwordHash'>;
token?: string;
};
if (response.ok) {
rememberMeStore.set(rememberMe);
setAuthState({
isAuthenticated: true,
user: data.user || null,
token: data.token || null,
loading: false,
});
startSessionTimer();
return { success: true };
} else {
return { success: false, error: data.error || 'Login failed' };
}
} catch (error) {
console.error('Login error:', error);
return { success: false, error: 'Network error' };
}
}
/**
* Signup new user
*/
export async function signup(
username: string,
password: string,
firstName: string,
avatar?: string,
): Promise<{ success: boolean; error?: string }> {
try {
const response = await fetch('/api/auth/signup', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify({ username, password, firstName, avatar }),
});
const data = (await response.json()) as {
success?: boolean;
error?: string;
user?: Omit<UserProfile, 'passwordHash'>;
token?: string;
};
if (response.ok) {
setAuthState({
isAuthenticated: true,
user: data.user || null,
token: data.token || null,
loading: false,
});
startSessionTimer();
return { success: true };
} else {
return { success: false, error: data.error || 'Signup failed' };
}
} catch (error) {
console.error('Signup error:', error);
return { success: false, error: 'Network error' };
}
}
/**
* Logout user
*/
export async function logout(): Promise<void> {
const state = authStore.get();
if (state.token) {
try {
await fetch('/api/auth/logout', {
method: 'POST',
headers: {
Authorization: `Bearer ${state.token}`,
},
});
} catch (error) {
console.error('Logout error:', error);
}
}
clearAuth();
}
/**
* Clear authentication state
*/
function clearAuth(): void {
authStore.set({
isAuthenticated: false,
user: null,
token: null,
loading: false,
});
Cookies.remove('auth_token');
stopSessionTimer();
// Clear user-specific localStorage
const currentUser = authStore.get().user;
if (currentUser?.id) {
// Keep preferences but clear sensitive data
const prefs = localStorage.getItem(`bolt_user_${currentUser.id}`);
if (prefs) {
try {
const parsed = JSON.parse(prefs);
delete parsed.deploySettings;
delete parsed.githubSettings;
localStorage.setItem(`bolt_user_${currentUser.id}`, JSON.stringify(parsed));
} catch {}
}
}
}
/**
* Start session timer
*/
function startSessionTimer(): void {
stopSessionTimer();
if (!rememberMeStore.get()) {
sessionTimeout = setTimeout(() => {
logout();
if (typeof window !== 'undefined') {
window.location.href = '/auth';
}
}, SESSION_TIMEOUT);
}
}
/**
* Stop session timer
*/
function stopSessionTimer(): void {
if (sessionTimeout) {
clearTimeout(sessionTimeout);
sessionTimeout = null;
}
}
/**
* Update user profile
*/
export async function updateProfile(
updates: Partial<Omit<UserProfile, 'passwordHash' | 'id' | 'username'>>,
): Promise<boolean> {
const state = authStore.get();
if (!state.token || !state.user) {
return false;
}
try {
const response = await fetch('/api/users/profile', {
method: 'PUT',
headers: {
'Content-Type': 'application/json',
Authorization: `Bearer ${state.token}`,
},
body: JSON.stringify(updates),
});
if (response.ok) {
const updatedUser = (await response.json()) as Omit<UserProfile, 'passwordHash'>;
authStore.setKey('user', updatedUser);
return true;
}
} catch (error) {
console.error('Failed to update profile:', error);
}
return false;
}
// Initialize auth on load
if (typeof window !== 'undefined') {
initializeAuth();
}

View File

@@ -223,13 +223,10 @@ export class WorkbenchStore {
}
async saveFile(filePath: string) {
console.log(`[WorkbenchStore] saveFile called for: ${filePath}`);
const documents = this.#editorStore.documents.get();
const document = documents[filePath];
if (document === undefined) {
console.warn(`[WorkbenchStore] No document found for: ${filePath}`);
return;
}
@@ -239,39 +236,21 @@ export class WorkbenchStore {
* This is a more complex feature that would be implemented in a future update
*/
try {
console.log(`[WorkbenchStore] Saving to file system: ${filePath}`);
await this.#filesStore.saveFile(filePath, document.value);
console.log(`[WorkbenchStore] File saved successfully: ${filePath}`);
await this.#filesStore.saveFile(filePath, document.value);
const newUnsavedFiles = new Set(this.unsavedFiles.get());
const wasUnsaved = newUnsavedFiles.has(filePath);
newUnsavedFiles.delete(filePath);
const newUnsavedFiles = new Set(this.unsavedFiles.get());
newUnsavedFiles.delete(filePath);
console.log(`[WorkbenchStore] Updating unsaved files:`, {
filePath,
wasUnsaved,
previousCount: this.unsavedFiles.get().size,
newCount: newUnsavedFiles.size,
remainingFiles: Array.from(newUnsavedFiles),
});
this.unsavedFiles.set(newUnsavedFiles);
} catch (error) {
console.error(`[WorkbenchStore] Failed to save file ${filePath}:`, error);
throw error;
}
this.unsavedFiles.set(newUnsavedFiles);
}
async saveCurrentDocument() {
const currentDocument = this.currentDocument.get();
if (currentDocument === undefined) {
console.warn('[WorkbenchStore] No current document to save');
return;
}
console.log(`[WorkbenchStore] Saving current document: ${currentDocument.filePath}`);
await this.saveFile(currentDocument.filePath);
}
@@ -293,14 +272,9 @@ export class WorkbenchStore {
}
async saveAllFiles() {
const filesToSave = Array.from(this.unsavedFiles.get());
console.log(`[WorkbenchStore] saveAllFiles called for ${filesToSave.length} files:`, filesToSave);
for (const filePath of filesToSave) {
for (const filePath of this.unsavedFiles.get()) {
await this.saveFile(filePath);
}
console.log('[WorkbenchStore] saveAllFiles complete. Remaining unsaved:', Array.from(this.unsavedFiles.get()));
}
getFileModifcations() {

View File

@@ -1,86 +0,0 @@
import bcrypt from 'bcryptjs';
import jwt from 'jsonwebtoken';
// Use a secure secret key (in production, this should be an environment variable)
const JWT_SECRET = process.env.JWT_SECRET || 'bolt-multi-user-secret-key-2024-secure';
const SALT_ROUNDS = 10;
export interface JWTPayload {
userId: string;
username: string;
firstName: string;
exp?: number;
}
/**
* Hash a password using bcrypt
*/
export async function hashPassword(password: string): Promise<string> {
return bcrypt.hash(password, SALT_ROUNDS);
}
/**
* Verify a password against a hash
*/
export async function verifyPassword(password: string, hash: string): Promise<boolean> {
return bcrypt.compare(password, hash);
}
/**
* Generate a JWT token
*/
export function generateToken(payload: Omit<JWTPayload, 'exp'>): string {
return jwt.sign(
{
...payload,
exp: Math.floor(Date.now() / 1000) + 7 * 24 * 60 * 60, // 7 days
},
JWT_SECRET,
);
}
/**
* Verify and decode a JWT token
*/
export function verifyToken(token: string): JWTPayload | null {
try {
return jwt.verify(token, JWT_SECRET) as JWTPayload;
} catch {
return null;
}
}
/**
* Generate a secure user ID
*/
export function generateUserId(): string {
return `user_${Date.now()}_${Math.random().toString(36).substring(2, 15)}`;
}
/**
* Validate password strength
*/
export function validatePassword(password: string): { valid: boolean; errors: string[] } {
const errors: string[] = [];
if (password.length < 8) {
errors.push('Password must be at least 8 characters long');
}
if (!/[A-Z]/.test(password)) {
errors.push('Password must contain at least one uppercase letter');
}
if (!/[a-z]/.test(password)) {
errors.push('Password must contain at least one lowercase letter');
}
if (!/[0-9]/.test(password)) {
errors.push('Password must contain at least one number');
}
return {
valid: errors.length === 0,
errors,
};
}

View File

@@ -1,338 +0,0 @@
import fs from 'fs/promises';
import path from 'path';
import { generateUserId, hashPassword } from './crypto';
const USERS_DIR = path.join(process.cwd(), '.users');
const USERS_INDEX_FILE = path.join(USERS_DIR, 'users.json');
const USER_DATA_DIR = path.join(USERS_DIR, 'data');
export interface UserProfile {
id: string;
username: string;
firstName: string;
passwordHash: string;
avatar?: string;
createdAt: string;
lastLogin?: string;
preferences: UserPreferences;
}
export interface UserPreferences {
theme: 'light' | 'dark';
deploySettings: {
netlify?: any;
vercel?: any;
};
githubSettings?: any;
workspaceConfig: any;
}
export interface SecurityLog {
timestamp: string;
userId?: string;
username?: string;
action: 'login' | 'logout' | 'signup' | 'delete' | 'error' | 'failed_login';
details: string;
ip?: string;
}
/**
* Initialize the user storage system
*/
export async function initializeUserStorage(): Promise<void> {
try {
// Create directories if they don't exist
await fs.mkdir(USERS_DIR, { recursive: true });
await fs.mkdir(USER_DATA_DIR, { recursive: true });
// Create users index if it doesn't exist
try {
await fs.access(USERS_INDEX_FILE);
} catch {
await fs.writeFile(USERS_INDEX_FILE, JSON.stringify({ users: [] }, null, 2));
}
} catch (error) {
console.error('Failed to initialize user storage:', error);
throw error;
}
}
/**
* Get all users (without passwords)
*/
export async function getAllUsers(): Promise<Omit<UserProfile, 'passwordHash'>[]> {
try {
await initializeUserStorage();
const data = await fs.readFile(USERS_INDEX_FILE, 'utf-8');
const { users } = JSON.parse(data) as { users: UserProfile[] };
return users.map(({ passwordHash, ...user }) => user);
} catch (error) {
console.error('Failed to get users:', error);
return [];
}
}
/**
* Get a user by username
*/
export async function getUserByUsername(username: string): Promise<UserProfile | null> {
try {
await initializeUserStorage();
const data = await fs.readFile(USERS_INDEX_FILE, 'utf-8');
const { users } = JSON.parse(data) as { users: UserProfile[] };
return users.find((u) => u.username === username) || null;
} catch (error) {
console.error('Failed to get user:', error);
return null;
}
}
/**
* Get a user by ID
*/
export async function getUserById(id: string): Promise<UserProfile | null> {
try {
await initializeUserStorage();
const data = await fs.readFile(USERS_INDEX_FILE, 'utf-8');
const { users } = JSON.parse(data) as { users: UserProfile[] };
return users.find((u) => u.id === id) || null;
} catch (error) {
console.error('Failed to get user:', error);
return null;
}
}
/**
* Create a new user
*/
export async function createUser(
username: string,
password: string,
firstName: string,
avatar?: string,
): Promise<UserProfile | null> {
try {
await initializeUserStorage();
// Check if username already exists
const existingUser = await getUserByUsername(username);
if (existingUser) {
throw new Error('Username already exists');
}
// Create new user
const newUser: UserProfile = {
id: generateUserId(),
username,
firstName,
passwordHash: await hashPassword(password),
avatar,
createdAt: new Date().toISOString(),
preferences: {
theme: 'dark',
deploySettings: {},
workspaceConfig: {},
},
};
// Load existing users
const data = await fs.readFile(USERS_INDEX_FILE, 'utf-8');
const { users } = JSON.parse(data) as { users: UserProfile[] };
// Add new user
users.push(newUser);
// Save updated users
await fs.writeFile(USERS_INDEX_FILE, JSON.stringify({ users }, null, 2));
// Create user data directory
const userDataDir = path.join(USER_DATA_DIR, newUser.id);
await fs.mkdir(userDataDir, { recursive: true });
// Log the signup
await logSecurityEvent({
timestamp: new Date().toISOString(),
userId: newUser.id,
username: newUser.username,
action: 'signup',
details: `User ${newUser.username} created successfully`,
});
return newUser;
} catch (error) {
console.error('Failed to create user:', error);
await logSecurityEvent({
timestamp: new Date().toISOString(),
action: 'error',
details: `Failed to create user ${username}: ${error}`,
});
throw error;
}
}
/**
* Update user profile
*/
export async function updateUser(userId: string, updates: Partial<UserProfile>): Promise<boolean> {
try {
await initializeUserStorage();
const data = await fs.readFile(USERS_INDEX_FILE, 'utf-8');
const { users } = JSON.parse(data) as { users: UserProfile[] };
const userIndex = users.findIndex((u) => u.id === userId);
if (userIndex === -1) {
return false;
}
// Update user (excluding certain fields)
const { id, username, passwordHash, ...safeUpdates } = updates;
users[userIndex] = {
...users[userIndex],
...safeUpdates,
};
// Save updated users
await fs.writeFile(USERS_INDEX_FILE, JSON.stringify({ users }, null, 2));
return true;
} catch (error) {
console.error('Failed to update user:', error);
return false;
}
}
/**
* Update user's last login time
*/
export async function updateLastLogin(userId: string): Promise<void> {
await updateUser(userId, { lastLogin: new Date().toISOString() });
}
/**
* Delete a user
*/
export async function deleteUser(userId: string): Promise<boolean> {
try {
await initializeUserStorage();
const data = await fs.readFile(USERS_INDEX_FILE, 'utf-8');
const { users } = JSON.parse(data) as { users: UserProfile[] };
const userIndex = users.findIndex((u) => u.id === userId);
if (userIndex === -1) {
return false;
}
const deletedUser = users[userIndex];
// Remove user from list
users.splice(userIndex, 1);
// Save updated users
await fs.writeFile(USERS_INDEX_FILE, JSON.stringify({ users }, null, 2));
// Delete user data directory
const userDataDir = path.join(USER_DATA_DIR, userId);
try {
await fs.rm(userDataDir, { recursive: true, force: true });
} catch (error) {
console.warn(`Failed to delete user data directory: ${error}`);
}
// Log the deletion
await logSecurityEvent({
timestamp: new Date().toISOString(),
userId,
username: deletedUser.username,
action: 'delete',
details: `User ${deletedUser.username} deleted`,
});
return true;
} catch (error) {
console.error('Failed to delete user:', error);
return false;
}
}
/**
* Save user-specific data
*/
export async function saveUserData(userId: string, key: string, data: any): Promise<void> {
try {
const userDataDir = path.join(USER_DATA_DIR, userId);
await fs.mkdir(userDataDir, { recursive: true });
const filePath = path.join(userDataDir, `${key}.json`);
await fs.writeFile(filePath, JSON.stringify(data, null, 2));
} catch (error) {
console.error(`Failed to save user data for ${userId}:`, error);
throw error;
}
}
/**
* Load user-specific data
*/
export async function loadUserData(userId: string, key: string): Promise<any | null> {
try {
const filePath = path.join(USER_DATA_DIR, userId, `${key}.json`);
const data = await fs.readFile(filePath, 'utf-8');
return JSON.parse(data);
} catch {
return null;
}
}
/**
* Log security events
*/
export async function logSecurityEvent(event: SecurityLog): Promise<void> {
try {
const logFile = path.join(USERS_DIR, 'security.log');
const logEntry = `${JSON.stringify(event)}\n`;
await fs.appendFile(logFile, logEntry);
} catch (error) {
console.error('Failed to log security event:', error);
}
}
/**
* Get security logs
*/
export async function getSecurityLogs(limit: number = 100): Promise<SecurityLog[]> {
try {
const logFile = path.join(USERS_DIR, 'security.log');
const data = await fs.readFile(logFile, 'utf-8');
const logs = data
.trim()
.split('\n')
.filter((line) => line)
.map((line) => {
try {
return JSON.parse(line) as SecurityLog;
} catch {
return null;
}
})
.filter(Boolean) as SecurityLog[];
return logs.slice(-limit).reverse();
} catch {
return [];
}
}