Merge branch 'main' into context-optimization

This commit is contained in:
Anirban Kar
2024-12-12 02:44:36 +05:30
25 changed files with 657 additions and 571 deletions

View File

@@ -3,6 +3,7 @@ import { MAX_RESPONSE_SEGMENTS, MAX_TOKENS } from '~/lib/.server/llm/constants';
import { CONTINUE_PROMPT } from '~/lib/.server/llm/prompts';
import { streamText, type Messages, type StreamingOptions } from '~/lib/.server/llm/stream-text';
import SwitchableStream from '~/lib/.server/llm/switchable-stream';
import type { IProviderSetting } from '~/types/model';
export async function action(args: ActionFunctionArgs) {
return chatAction(args);
@@ -38,6 +39,9 @@ async function chatAction({ context, request }: ActionFunctionArgs) {
// Parse the cookie's value (returns an object or null if no cookie exists)
const apiKeys = JSON.parse(parseCookies(cookieHeader || '').apiKeys || '{}');
const providerSettings: Record<string, IProviderSetting> = JSON.parse(
parseCookies(cookieHeader || '').providers || '{}',
);
const stream = new SwitchableStream();
@@ -60,13 +64,27 @@ async function chatAction({ context, request }: ActionFunctionArgs) {
messages.push({ role: 'assistant', content });
messages.push({ role: 'user', content: CONTINUE_PROMPT });
const result = await streamText(messages, context.cloudflare.env, options, apiKeys, files);
const result = await streamText({
messages,
env: context.cloudflare.env,
options,
apiKeys,
files,
providerSettings,
});
return stream.switchSource(result.toAIStream());
},
};
const result = await streamText(messages, context.cloudflare.env, options, apiKeys, files);
const result = await streamText({
messages,
env: context.cloudflare.env,
options,
apiKeys,
files,
providerSettings,
});
stream.switchSource(result.toAIStream());