feat: Experimental Prompt Library Added
This commit is contained in:
@@ -1,6 +1,6 @@
|
||||
import { type ActionFunctionArgs } from '@remix-run/cloudflare';
|
||||
import { MAX_RESPONSE_SEGMENTS, MAX_TOKENS } from '~/lib/.server/llm/constants';
|
||||
import { CONTINUE_PROMPT } from '~/lib/.server/llm/prompts';
|
||||
import { CONTINUE_PROMPT } from '~/lib/common/prompts/prompts';
|
||||
import { streamText, type Messages, type StreamingOptions } from '~/lib/.server/llm/stream-text';
|
||||
import SwitchableStream from '~/lib/.server/llm/switchable-stream';
|
||||
import type { IProviderSetting } from '~/types/model';
|
||||
@@ -30,9 +30,10 @@ function parseCookies(cookieHeader: string) {
|
||||
}
|
||||
|
||||
async function chatAction({ context, request }: ActionFunctionArgs) {
|
||||
const { messages, files } = await request.json<{
|
||||
const { messages, files, promptId } = await request.json<{
|
||||
messages: Messages;
|
||||
files: any;
|
||||
promptId?: string;
|
||||
}>();
|
||||
|
||||
const cookieHeader = request.headers.get('Cookie');
|
||||
@@ -71,6 +72,7 @@ async function chatAction({ context, request }: ActionFunctionArgs) {
|
||||
apiKeys,
|
||||
files,
|
||||
providerSettings,
|
||||
promptId,
|
||||
});
|
||||
|
||||
return stream.switchSource(result.toAIStream());
|
||||
@@ -84,6 +86,7 @@ async function chatAction({ context, request }: ActionFunctionArgs) {
|
||||
apiKeys,
|
||||
files,
|
||||
providerSettings,
|
||||
promptId,
|
||||
});
|
||||
|
||||
stream.switchSource(result.toAIStream());
|
||||
|
||||
Reference in New Issue
Block a user