* fix: update Docker workflow to use correct target stage name - Change target from bolt-ai-production to runtime - Matches the actual stage name in the new Dockerfile structure - Fixes CI failure: target stage 'bolt-ai-production' could not be found * fix: resolve critical Docker configuration issues This commit fixes multiple critical Docker configuration issues that prevented successful builds: **Dockerfile Issues Fixed:** - Replace incomplete runtime stage with proper production stage using Wrangler - Add missing environment variables for all API providers (DeepSeek, LMStudio, Mistral, Perplexity, OpenAI-like) - Use correct port (5173) instead of 3000 to match Wrangler configuration - Add proper bindings.sh script copying and execution permissions - Configure Wrangler metrics and proper startup command **Docker Compose Issues Fixed:** - Add missing `context` and `dockerfile` fields to app-dev service - Fix target name from `bolt-ai-development` to `development` **Package.json Issues Fixed:** - Update dockerbuild script to use correct target name `development` **Testing:** - ✅ Both `pnpm run dockerbuild` and `pnpm run dockerbuild:prod` now work - ✅ All environment variables properly configured - ✅ Docker images build successfully with proper Wrangler integration Resolves Docker build failures and enables proper containerized deployment. 🤖 Generated with [Claude Code](https://claude.ai/code) Co-Authored-By: Claude <noreply@anthropic.com> * Update Dockerfile * fix: update GitHub workflow Docker targets to match Dockerfile stage names Update ci.yaml and docker.yaml workflows to use correct Docker target stage name 'bolt-ai-production' instead of 'runtime'. 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude <noreply@anthropic.com> * Refactor Dockerfile for optimized production build Adds git installation for build/runtime scripts and introduces a separate prod-deps stage to prune dependencies before final production image. Updates file copy sources to use prod-deps stage, improving build efficiency and image size. --------- Co-authored-by: Claude <noreply@anthropic.com>
99 lines
3.0 KiB
YAML
99 lines
3.0 KiB
YAML
services:
|
|
app-prod:
|
|
image: bolt-ai:production
|
|
build:
|
|
context: .
|
|
dockerfile: Dockerfile
|
|
target: bolt-ai-production
|
|
ports:
|
|
- '5173:5173'
|
|
env_file:
|
|
- '.env'
|
|
- '.env.local'
|
|
environment:
|
|
- NODE_ENV=production
|
|
- COMPOSE_PROFILES=production
|
|
# No strictly needed but serving as hints for Coolify
|
|
- PORT=5173
|
|
- GROQ_API_KEY=${GROQ_API_KEY}
|
|
- HuggingFace_API_KEY=${HuggingFace_API_KEY}
|
|
- OPENAI_API_KEY=${OPENAI_API_KEY}
|
|
- ANTHROPIC_API_KEY=${ANTHROPIC_API_KEY}
|
|
- OPEN_ROUTER_API_KEY=${OPEN_ROUTER_API_KEY}
|
|
- GOOGLE_GENERATIVE_AI_API_KEY=${GOOGLE_GENERATIVE_AI_API_KEY}
|
|
- OLLAMA_API_BASE_URL=${OLLAMA_API_BASE_URL}
|
|
- XAI_API_KEY=${XAI_API_KEY}
|
|
- TOGETHER_API_KEY=${TOGETHER_API_KEY}
|
|
- TOGETHER_API_BASE_URL=${TOGETHER_API_BASE_URL}
|
|
- AWS_BEDROCK_CONFIG=${AWS_BEDROCK_CONFIG}
|
|
- VITE_LOG_LEVEL=${VITE_LOG_LEVEL:-debug}
|
|
- DEFAULT_NUM_CTX=${DEFAULT_NUM_CTX:-32768}
|
|
- RUNNING_IN_DOCKER=true
|
|
extra_hosts:
|
|
- 'host.docker.internal:host-gateway'
|
|
command: pnpm run dockerstart
|
|
profiles:
|
|
- production
|
|
|
|
app-dev:
|
|
image: bolt-ai:development
|
|
build:
|
|
context: .
|
|
dockerfile: Dockerfile
|
|
target: development
|
|
env_file:
|
|
- '.env'
|
|
- '.env.local'
|
|
environment:
|
|
- NODE_ENV=development
|
|
- VITE_HMR_PROTOCOL=ws
|
|
- VITE_HMR_HOST=localhost
|
|
- VITE_HMR_PORT=5173
|
|
- CHOKIDAR_USEPOLLING=true
|
|
- WATCHPACK_POLLING=true
|
|
- PORT=5173
|
|
- GROQ_API_KEY=${GROQ_API_KEY}
|
|
- HuggingFace_API_KEY=${HuggingFace_API_KEY}
|
|
- OPENAI_API_KEY=${OPENAI_API_KEY}
|
|
- ANTHROPIC_API_KEY=${ANTHROPIC_API_KEY}
|
|
- OPEN_ROUTER_API_KEY=${OPEN_ROUTER_API_KEY}
|
|
- XAI_API_KEY=${XAI_API_KEY}
|
|
- GOOGLE_GENERATIVE_AI_API_KEY=${GOOGLE_GENERATIVE_AI_API_KEY}
|
|
- OLLAMA_API_BASE_URL=${OLLAMA_API_BASE_URL}
|
|
- TOGETHER_API_KEY=${TOGETHER_API_KEY}
|
|
- TOGETHER_API_BASE_URL=${TOGETHER_API_BASE_URL}
|
|
- AWS_BEDROCK_CONFIG=${AWS_BEDROCK_CONFIG}
|
|
- VITE_LOG_LEVEL=${VITE_LOG_LEVEL:-debug}
|
|
- DEFAULT_NUM_CTX=${DEFAULT_NUM_CTX:-32768}
|
|
- RUNNING_IN_DOCKER=true
|
|
extra_hosts:
|
|
- 'host.docker.internal:host-gateway'
|
|
volumes:
|
|
- type: bind
|
|
source: .
|
|
target: /app
|
|
consistency: cached
|
|
- /app/node_modules
|
|
ports:
|
|
- '5173:5173'
|
|
command: pnpm run dev --host 0.0.0.0
|
|
profiles: ['development', 'default']
|
|
|
|
app-prebuild:
|
|
image: ghcr.io/stackblitz-labs/bolt.diy:latest
|
|
ports:
|
|
- '5173:5173'
|
|
environment:
|
|
- NODE_ENV=production
|
|
- COMPOSE_PROFILES=production
|
|
# No strictly needed but serving as hints for Coolify
|
|
- PORT=5173
|
|
- OLLAMA_API_BASE_URL=http://127.0.0.1:11434
|
|
- DEFAULT_NUM_CTX=${DEFAULT_NUM_CTX:-32768}
|
|
- RUNNING_IN_DOCKER=true
|
|
extra_hosts:
|
|
- 'host.docker.internal:host-gateway'
|
|
command: pnpm run dockerstart
|
|
profiles:
|
|
- prebuilt
|