feat: add first version of workbench, increase token limit, improve system prompt
This commit is contained in:
@@ -1,9 +1,9 @@
|
||||
import { json, redirect, type LoaderFunctionArgs, type MetaFunction } from '@remix-run/cloudflare';
|
||||
import { ClientOnly } from 'remix-utils/client-only';
|
||||
import { BaseChat } from '~/components/chat/BaseChat';
|
||||
import { Chat } from '~/components/chat/Chat.client';
|
||||
import { Header } from '~/components/Header';
|
||||
import { isAuthenticated } from '~/lib/.server/sessions';
|
||||
import { BaseChat } from '../components/chat/BaseChat';
|
||||
import { Chat } from '../components/chat/Chat.client';
|
||||
import { Header } from '../components/Header';
|
||||
import { isAuthenticated } from '../lib/.server/sessions';
|
||||
|
||||
export const meta: MetaFunction = () => {
|
||||
return [{ title: 'Bolt' }, { name: 'description', content: 'Talk with Bolt, an AI assistant from StackBlitz' }];
|
||||
|
||||
@@ -1,36 +1,11 @@
|
||||
import { type ActionFunctionArgs } from '@remix-run/cloudflare';
|
||||
import { convertToCoreMessages, streamText } from 'ai';
|
||||
import { getAPIKey } from '~/lib/.server/llm/api-key';
|
||||
import { getAnthropicModel } from '~/lib/.server/llm/model';
|
||||
import { systemPrompt } from '~/lib/.server/llm/prompts';
|
||||
|
||||
interface ToolResult<Name extends string, Args, Result> {
|
||||
toolCallId: string;
|
||||
toolName: Name;
|
||||
args: Args;
|
||||
result: Result;
|
||||
}
|
||||
|
||||
interface Message {
|
||||
role: 'user' | 'assistant';
|
||||
content: string;
|
||||
toolInvocations?: ToolResult<string, unknown, unknown>[];
|
||||
}
|
||||
import { streamText, type Messages } from '../lib/.server/llm/stream-text';
|
||||
|
||||
export async function action({ context, request }: ActionFunctionArgs) {
|
||||
const { messages } = await request.json<{ messages: Message[] }>();
|
||||
const { messages } = await request.json<{ messages: Messages }>();
|
||||
|
||||
try {
|
||||
const result = await streamText({
|
||||
model: getAnthropicModel(getAPIKey(context.cloudflare.env)),
|
||||
messages: convertToCoreMessages(messages),
|
||||
toolChoice: 'none',
|
||||
onFinish: ({ finishReason, usage, warnings }) => {
|
||||
console.log({ finishReason, usage, warnings });
|
||||
},
|
||||
system: systemPrompt,
|
||||
});
|
||||
|
||||
const result = await streamText(messages, context.cloudflare.env, { toolChoice: 'none' });
|
||||
return result.toAIStreamResponse();
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
|
||||
@@ -1,9 +1,7 @@
|
||||
import { type ActionFunctionArgs } from '@remix-run/cloudflare';
|
||||
import { StreamingTextResponse, convertToCoreMessages, parseStreamPart, streamText } from 'ai';
|
||||
import { getAPIKey } from '~/lib/.server/llm/api-key';
|
||||
import { getAnthropicModel } from '~/lib/.server/llm/model';
|
||||
import { systemPrompt } from '~/lib/.server/llm/prompts';
|
||||
import { stripIndents } from '~/utils/stripIndent';
|
||||
import { StreamingTextResponse, parseStreamPart } from 'ai';
|
||||
import { streamText } from '../lib/.server/llm/stream-text';
|
||||
import { stripIndents } from '../utils/stripIndent';
|
||||
|
||||
const encoder = new TextEncoder();
|
||||
const decoder = new TextDecoder();
|
||||
@@ -12,30 +10,23 @@ export async function action({ context, request }: ActionFunctionArgs) {
|
||||
const { message } = await request.json<{ message: string }>();
|
||||
|
||||
try {
|
||||
const result = await streamText({
|
||||
model: getAnthropicModel(getAPIKey(context.cloudflare.env)),
|
||||
system: systemPrompt,
|
||||
messages: convertToCoreMessages([
|
||||
const result = await streamText(
|
||||
[
|
||||
{
|
||||
role: 'user',
|
||||
content: stripIndents`
|
||||
I want you to improve the user prompt that is wrapped in \`<original_prompt>\` tags.
|
||||
I want you to improve the user prompt that is wrapped in \`<original_prompt>\` tags.
|
||||
|
||||
IMPORTANT: Only respond with the improved prompt and nothing else!
|
||||
IMPORTANT: Only respond with the improved prompt and nothing else!
|
||||
|
||||
<original_prompt>
|
||||
${message}
|
||||
</original_prompt>
|
||||
`,
|
||||
<original_prompt>
|
||||
${message}
|
||||
</original_prompt>
|
||||
`,
|
||||
},
|
||||
]),
|
||||
});
|
||||
|
||||
if (import.meta.env.DEV) {
|
||||
result.usage.then((usage) => {
|
||||
console.log('Usage', usage);
|
||||
});
|
||||
}
|
||||
],
|
||||
context.cloudflare.env,
|
||||
);
|
||||
|
||||
const transformStream = new TransformStream({
|
||||
transform(chunk, controller) {
|
||||
|
||||
@@ -6,8 +6,8 @@ import {
|
||||
type TypedResponse,
|
||||
} from '@remix-run/cloudflare';
|
||||
import { Form, useActionData } from '@remix-run/react';
|
||||
import { verifyPassword } from '~/lib/.server/login';
|
||||
import { createUserSession, isAuthenticated } from '~/lib/.server/sessions';
|
||||
import { verifyPassword } from '../lib/.server/login';
|
||||
import { createUserSession, isAuthenticated } from '../lib/.server/sessions';
|
||||
|
||||
interface Errors {
|
||||
password?: string;
|
||||
|
||||
Reference in New Issue
Block a user