merge with upstream/main
This commit is contained in:
@@ -1,5 +1,7 @@
|
||||
// @ts-nocheck
|
||||
// Preventing TS checks with files presented in the video for a better presentation.
|
||||
/*
|
||||
* @ts-nocheck
|
||||
* Preventing TS checks with files presented in the video for a better presentation.
|
||||
*/
|
||||
import { env } from 'node:process';
|
||||
|
||||
export function getAPIKey(cloudflareEnv: Env, provider: string, userApiKeys?: Record<string, string>) {
|
||||
@@ -28,17 +30,19 @@ export function getAPIKey(cloudflareEnv: Env, provider: string, userApiKeys?: Re
|
||||
case 'OpenRouter':
|
||||
return env.OPEN_ROUTER_API_KEY || cloudflareEnv.OPEN_ROUTER_API_KEY;
|
||||
case 'Deepseek':
|
||||
return env.DEEPSEEK_API_KEY || cloudflareEnv.DEEPSEEK_API_KEY
|
||||
return env.DEEPSEEK_API_KEY || cloudflareEnv.DEEPSEEK_API_KEY;
|
||||
case 'Mistral':
|
||||
return env.MISTRAL_API_KEY || cloudflareEnv.MISTRAL_API_KEY;
|
||||
case "OpenAILike":
|
||||
return env.MISTRAL_API_KEY || cloudflareEnv.MISTRAL_API_KEY;
|
||||
case 'OpenAILike':
|
||||
return env.OPENAI_LIKE_API_KEY || cloudflareEnv.OPENAI_LIKE_API_KEY;
|
||||
case "xAI":
|
||||
case 'xAI':
|
||||
return env.XAI_API_KEY || cloudflareEnv.XAI_API_KEY;
|
||||
case "Cohere":
|
||||
case 'Cohere':
|
||||
return env.COHERE_API_KEY;
|
||||
case 'AzureOpenAI':
|
||||
return env.AZURE_OPENAI_API_KEY;
|
||||
default:
|
||||
return "";
|
||||
return '';
|
||||
}
|
||||
}
|
||||
|
||||
@@ -47,14 +51,17 @@ export function getBaseURL(cloudflareEnv: Env, provider: string) {
|
||||
case 'OpenAILike':
|
||||
return env.OPENAI_LIKE_API_BASE_URL || cloudflareEnv.OPENAI_LIKE_API_BASE_URL;
|
||||
case 'LMStudio':
|
||||
return env.LMSTUDIO_API_BASE_URL || cloudflareEnv.LMSTUDIO_API_BASE_URL || "http://localhost:1234";
|
||||
case 'Ollama':
|
||||
let baseUrl = env.OLLAMA_API_BASE_URL || cloudflareEnv.OLLAMA_API_BASE_URL || "http://localhost:11434";
|
||||
if (env.RUNNING_IN_DOCKER === 'true') {
|
||||
baseUrl = baseUrl.replace("localhost", "host.docker.internal");
|
||||
}
|
||||
return baseUrl;
|
||||
return env.LMSTUDIO_API_BASE_URL || cloudflareEnv.LMSTUDIO_API_BASE_URL || 'http://localhost:1234';
|
||||
case 'Ollama': {
|
||||
let baseUrl = env.OLLAMA_API_BASE_URL || cloudflareEnv.OLLAMA_API_BASE_URL || 'http://localhost:11434';
|
||||
|
||||
if (env.RUNNING_IN_DOCKER === 'true') {
|
||||
baseUrl = baseUrl.replace('localhost', 'host.docker.internal');
|
||||
}
|
||||
|
||||
return baseUrl;
|
||||
}
|
||||
default:
|
||||
return "";
|
||||
return '';
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,27 +1,29 @@
|
||||
// @ts-nocheck
|
||||
// Preventing TS checks with files presented in the video for a better presentation.
|
||||
/*
|
||||
* @ts-nocheck
|
||||
* Preventing TS checks with files presented in the video for a better presentation.
|
||||
*/
|
||||
import { getAPIKey, getBaseURL } from '~/lib/.server/llm/api-key';
|
||||
import { createAnthropic } from '@ai-sdk/anthropic';
|
||||
import { createOpenAI } from '@ai-sdk/openai';
|
||||
import { createGoogleGenerativeAI } from '@ai-sdk/google';
|
||||
import { ollama } from 'ollama-ai-provider';
|
||||
import { createOpenRouter } from "@openrouter/ai-sdk-provider";
|
||||
import { createOpenRouter } from '@openrouter/ai-sdk-provider';
|
||||
import { createMistral } from '@ai-sdk/mistral';
|
||||
import { createCohere } from '@ai-sdk/cohere'
|
||||
import { createCohere } from '@ai-sdk/cohere';
|
||||
import type { LanguageModelV1 } from 'ai';
|
||||
|
||||
export const DEFAULT_NUM_CTX = process.env.DEFAULT_NUM_CTX ?
|
||||
parseInt(process.env.DEFAULT_NUM_CTX, 10) :
|
||||
32768;
|
||||
export const DEFAULT_NUM_CTX = process.env.DEFAULT_NUM_CTX ? parseInt(process.env.DEFAULT_NUM_CTX, 10) : 32768;
|
||||
|
||||
export function getAnthropicModel(apiKey: string, model: string) {
|
||||
type OptionalApiKey = string | undefined;
|
||||
|
||||
export function getAnthropicModel(apiKey: OptionalApiKey, model: string) {
|
||||
const anthropic = createAnthropic({
|
||||
apiKey,
|
||||
});
|
||||
|
||||
return anthropic(model);
|
||||
}
|
||||
|
||||
export function getOpenAILikeModel(baseURL: string, apiKey: string, model: string) {
|
||||
export function getOpenAILikeModel(baseURL: string, apiKey: OptionalApiKey, model: string) {
|
||||
const openai = createOpenAI({
|
||||
baseURL,
|
||||
apiKey,
|
||||
@@ -30,7 +32,7 @@ export function getOpenAILikeModel(baseURL: string, apiKey: string, model: strin
|
||||
return openai(model);
|
||||
}
|
||||
|
||||
export function getCohereAIModel(apiKey:string, model: string){
|
||||
export function getCohereAIModel(apiKey: OptionalApiKey, model: string) {
|
||||
const cohere = createCohere({
|
||||
apiKey,
|
||||
});
|
||||
@@ -38,7 +40,7 @@ export function getCohereAIModel(apiKey:string, model: string){
|
||||
return cohere(model);
|
||||
}
|
||||
|
||||
export function getOpenAIModel(apiKey: string, model: string) {
|
||||
export function getOpenAIModel(apiKey: OptionalApiKey, model: string) {
|
||||
const openai = createOpenAI({
|
||||
apiKey,
|
||||
});
|
||||
@@ -46,15 +48,15 @@ export function getOpenAIModel(apiKey: string, model: string) {
|
||||
return openai(model);
|
||||
}
|
||||
|
||||
export function getMistralModel(apiKey: string, model: string) {
|
||||
export function getMistralModel(apiKey: OptionalApiKey, model: string) {
|
||||
const mistral = createMistral({
|
||||
apiKey
|
||||
apiKey,
|
||||
});
|
||||
|
||||
return mistral(model);
|
||||
}
|
||||
|
||||
export function getGoogleModel(apiKey: string, model: string) {
|
||||
export function getGoogleModel(apiKey: OptionalApiKey, model: string) {
|
||||
const google = createGoogleGenerativeAI({
|
||||
apiKey,
|
||||
});
|
||||
@@ -62,7 +64,7 @@ export function getGoogleModel(apiKey: string, model: string) {
|
||||
return google(model);
|
||||
}
|
||||
|
||||
export function getGroqModel(apiKey: string, model: string) {
|
||||
export function getGroqModel(apiKey: OptionalApiKey, model: string) {
|
||||
const openai = createOpenAI({
|
||||
baseURL: 'https://api.groq.com/openai/v1',
|
||||
apiKey,
|
||||
@@ -71,7 +73,7 @@ export function getGroqModel(apiKey: string, model: string) {
|
||||
return openai(model);
|
||||
}
|
||||
|
||||
export function getHuggingFaceModel(apiKey: string, model: string) {
|
||||
export function getHuggingFaceModel(apiKey: OptionalApiKey, model: string) {
|
||||
const openai = createOpenAI({
|
||||
baseURL: 'https://api-inference.huggingface.co/v1/',
|
||||
apiKey,
|
||||
@@ -81,15 +83,16 @@ export function getHuggingFaceModel(apiKey: string, model: string) {
|
||||
}
|
||||
|
||||
export function getOllamaModel(baseURL: string, model: string) {
|
||||
let Ollama = ollama(model, {
|
||||
const ollamaInstance = ollama(model, {
|
||||
numCtx: DEFAULT_NUM_CTX,
|
||||
});
|
||||
}) as LanguageModelV1 & { config: any };
|
||||
|
||||
Ollama.config.baseURL = `${baseURL}/api`;
|
||||
return Ollama;
|
||||
ollamaInstance.config.baseURL = `${baseURL}/api`;
|
||||
|
||||
return ollamaInstance;
|
||||
}
|
||||
|
||||
export function getDeepseekModel(apiKey: string, model: string) {
|
||||
export function getDeepseekModel(apiKey: OptionalApiKey, model: string) {
|
||||
const openai = createOpenAI({
|
||||
baseURL: 'https://api.deepseek.com/beta',
|
||||
apiKey,
|
||||
@@ -98,9 +101,9 @@ export function getDeepseekModel(apiKey: string, model: string) {
|
||||
return openai(model);
|
||||
}
|
||||
|
||||
export function getOpenRouterModel(apiKey: string, model: string) {
|
||||
export function getOpenRouterModel(apiKey: OptionalApiKey, model: string) {
|
||||
const openRouter = createOpenRouter({
|
||||
apiKey
|
||||
apiKey,
|
||||
});
|
||||
|
||||
return openRouter.chat(model);
|
||||
@@ -109,13 +112,13 @@ export function getOpenRouterModel(apiKey: string, model: string) {
|
||||
export function getLMStudioModel(baseURL: string, model: string) {
|
||||
const lmstudio = createOpenAI({
|
||||
baseUrl: `${baseURL}/v1`,
|
||||
apiKey: "",
|
||||
apiKey: '',
|
||||
});
|
||||
|
||||
return lmstudio(model);
|
||||
}
|
||||
|
||||
export function getXAIModel(apiKey: string, model: string) {
|
||||
export function getXAIModel(apiKey: OptionalApiKey, model: string) {
|
||||
const openai = createOpenAI({
|
||||
baseURL: 'https://api.x.ai/v1',
|
||||
apiKey,
|
||||
@@ -125,11 +128,13 @@ export function getXAIModel(apiKey: string, model: string) {
|
||||
}
|
||||
|
||||
export function getModel(provider: string, model: string, env: Env, apiKeys?: Record<string, string>) {
|
||||
let apiKey; // Declare first
|
||||
let baseURL;
|
||||
/*
|
||||
* let apiKey; // Declare first
|
||||
* let baseURL;
|
||||
*/
|
||||
|
||||
apiKey = getAPIKey(env, provider, apiKeys); // Then assign
|
||||
baseURL = getBaseURL(env, provider);
|
||||
const apiKey = getAPIKey(env, provider, apiKeys); // Then assign
|
||||
const baseURL = getBaseURL(env, provider);
|
||||
|
||||
switch (provider) {
|
||||
case 'Anthropic':
|
||||
@@ -159,4 +164,4 @@ export function getModel(provider: string, model: string, env: Env, apiKeys?: Re
|
||||
default:
|
||||
return getOllamaModel(baseURL, model);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
// @ts-nocheck
|
||||
// Preventing TS checks with files presented in the video for a better presentation.
|
||||
import { streamText as _streamText, convertToCoreMessages } from 'ai';
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-nocheck – TODO: Provider proper types
|
||||
|
||||
import { convertToCoreMessages, streamText as _streamText } from 'ai';
|
||||
import { getModel } from '~/lib/.server/llm/model';
|
||||
import { MAX_TOKENS } from './constants';
|
||||
import { getSystemPrompt } from './prompts';
|
||||
import { MODEL_LIST, DEFAULT_MODEL, DEFAULT_PROVIDER, MODEL_REGEX, PROVIDER_REGEX } from '~/utils/constants';
|
||||
import { DEFAULT_MODEL, DEFAULT_PROVIDER, MODEL_LIST, MODEL_REGEX, PROVIDER_REGEX } from '~/utils/constants';
|
||||
|
||||
interface ToolResult<Name extends string, Args, Result> {
|
||||
toolCallId: string;
|
||||
@@ -26,41 +27,41 @@ export type StreamingOptions = Omit<Parameters<typeof _streamText>[0], 'model'>;
|
||||
|
||||
function extractPropertiesFromMessage(message: Message): { model: string; provider: string; content: string } {
|
||||
const textContent = Array.isArray(message.content)
|
||||
? message.content.find(item => item.type === 'text')?.text || ''
|
||||
? message.content.find((item) => item.type === 'text')?.text || ''
|
||||
: message.content;
|
||||
|
||||
const modelMatch = textContent.match(MODEL_REGEX);
|
||||
const providerMatch = textContent.match(PROVIDER_REGEX);
|
||||
|
||||
// Extract model
|
||||
// const modelMatch = message.content.match(MODEL_REGEX);
|
||||
/*
|
||||
* Extract model
|
||||
* const modelMatch = message.content.match(MODEL_REGEX);
|
||||
*/
|
||||
const model = modelMatch ? modelMatch[1] : DEFAULT_MODEL;
|
||||
|
||||
// Extract provider
|
||||
// const providerMatch = message.content.match(PROVIDER_REGEX);
|
||||
/*
|
||||
* Extract provider
|
||||
* const providerMatch = message.content.match(PROVIDER_REGEX);
|
||||
*/
|
||||
const provider = providerMatch ? providerMatch[1] : DEFAULT_PROVIDER;
|
||||
|
||||
const cleanedContent = Array.isArray(message.content)
|
||||
? message.content.map(item => {
|
||||
if (item.type === 'text') {
|
||||
return {
|
||||
type: 'text',
|
||||
text: item.text?.replace(MODEL_REGEX, '').replace(PROVIDER_REGEX, '')
|
||||
};
|
||||
}
|
||||
return item; // Preserve image_url and other types as is
|
||||
})
|
||||
? message.content.map((item) => {
|
||||
if (item.type === 'text') {
|
||||
return {
|
||||
type: 'text',
|
||||
text: item.text?.replace(MODEL_REGEX, '').replace(PROVIDER_REGEX, ''),
|
||||
};
|
||||
}
|
||||
|
||||
return item; // Preserve image_url and other types as is
|
||||
})
|
||||
: textContent.replace(MODEL_REGEX, '').replace(PROVIDER_REGEX, '');
|
||||
|
||||
return { model, provider, content: cleanedContent };
|
||||
}
|
||||
|
||||
export function streamText(
|
||||
messages: Messages,
|
||||
env: Env,
|
||||
options?: StreamingOptions,
|
||||
apiKeys?: Record<string, string>
|
||||
) {
|
||||
export function streamText(messages: Messages, env: Env, options?: StreamingOptions, apiKeys?: Record<string, string>) {
|
||||
let currentModel = DEFAULT_MODEL;
|
||||
let currentProvider = DEFAULT_PROVIDER;
|
||||
|
||||
@@ -76,15 +77,13 @@ export function streamText(
|
||||
|
||||
return { ...message, content };
|
||||
}
|
||||
|
||||
return message;
|
||||
});
|
||||
|
||||
const modelDetails = MODEL_LIST.find((m) => m.name === currentModel);
|
||||
|
||||
const dynamicMaxTokens =
|
||||
modelDetails && modelDetails.maxTokenAllowed
|
||||
? modelDetails.maxTokenAllowed
|
||||
: MAX_TOKENS;
|
||||
const dynamicMaxTokens = modelDetails && modelDetails.maxTokenAllowed ? modelDetails.maxTokenAllowed : MAX_TOKENS;
|
||||
|
||||
return _streamText({
|
||||
...options,
|
||||
|
||||
Reference in New Issue
Block a user