feat: initial commit

This commit is contained in:
Dominic Elm
2024-07-10 18:44:39 +02:00
commit 6927c07451
64 changed files with 12330 additions and 0 deletions

View File

@@ -0,0 +1,18 @@
import type { MetaFunction } from '@remix-run/cloudflare';
import { ClientOnly } from 'remix-utils/client-only';
import { BaseChat } from '~/components/chat/BaseChat';
import { Chat } from '~/components/chat/Chat.client';
import { Header } from '~/components/Header';
export const meta: MetaFunction = () => {
return [{ title: 'Bolt' }, { name: 'description', content: 'Talk with Bolt, an AI assistant from StackBlitz' }];
};
export default function Index() {
return (
<div className="flex flex-col h-full w-full">
<Header />
<ClientOnly fallback={<BaseChat />}>{() => <Chat />}</ClientOnly>
</div>
);
}

View File

@@ -0,0 +1,43 @@
import { type ActionFunctionArgs } from '@remix-run/cloudflare';
import { convertToCoreMessages, streamText } from 'ai';
import { getAPIKey } from '~/lib/.server/llm/api-key';
import { getAnthropicModel } from '~/lib/.server/llm/model';
import { systemPrompt } from '~/lib/.server/llm/prompts';
interface ToolResult<Name extends string, Args, Result> {
toolCallId: string;
toolName: Name;
args: Args;
result: Result;
}
interface Message {
role: 'user' | 'assistant';
content: string;
toolInvocations?: ToolResult<string, unknown, unknown>[];
}
export async function action({ context, request }: ActionFunctionArgs) {
const { messages } = await request.json<{ messages: Message[] }>();
try {
const result = await streamText({
model: getAnthropicModel(getAPIKey(context.cloudflare.env)),
messages: convertToCoreMessages(messages),
toolChoice: 'none',
onFinish: ({ finishReason, usage, warnings }) => {
console.log({ finishReason, usage, warnings });
},
system: systemPrompt,
});
return result.toAIStreamResponse();
} catch (error) {
console.log(error);
throw new Response(null, {
status: 500,
statusText: 'Internal Server Error',
});
}
}

View File

@@ -0,0 +1,65 @@
import { type ActionFunctionArgs } from '@remix-run/cloudflare';
import { StreamingTextResponse, convertToCoreMessages, parseStreamPart, streamText } from 'ai';
import { getAPIKey } from '~/lib/.server/llm/api-key';
import { getAnthropicModel } from '~/lib/.server/llm/model';
import { systemPrompt } from '~/lib/.server/llm/prompts';
import { stripIndents } from '~/utils/stripIndent';
const encoder = new TextEncoder();
const decoder = new TextDecoder();
export async function action({ context, request }: ActionFunctionArgs) {
const { message } = await request.json<{ message: string }>();
try {
const result = await streamText({
model: getAnthropicModel(getAPIKey(context.cloudflare.env)),
system: systemPrompt,
messages: convertToCoreMessages([
{
role: 'user',
content: stripIndents`
I want you to improve the following prompt.
IMPORTANT: Only respond with the improved prompt and nothing else!
<original_prompt>
${message}
</original_prompt>
`,
},
]),
});
if (import.meta.env.DEV) {
result.usage.then((usage) => {
console.log('Usage', usage);
});
}
const transformStream = new TransformStream({
transform(chunk, controller) {
const processedChunk = decoder
.decode(chunk)
.split('\n')
.filter((line) => line !== '')
.map(parseStreamPart)
.map((part) => part.value)
.join('');
controller.enqueue(encoder.encode(processedChunk));
},
});
const transformedStream = result.toAIStream().pipeThrough(transformStream);
return new StreamingTextResponse(transformedStream);
} catch (error) {
console.log(error);
throw new Response(null, {
status: 500,
statusText: 'Internal Server Error',
});
}
}