merge with upstream/main
This commit is contained in:
@@ -1,27 +1,29 @@
|
||||
// @ts-nocheck
|
||||
// Preventing TS checks with files presented in the video for a better presentation.
|
||||
/*
|
||||
* @ts-nocheck
|
||||
* Preventing TS checks with files presented in the video for a better presentation.
|
||||
*/
|
||||
import { getAPIKey, getBaseURL } from '~/lib/.server/llm/api-key';
|
||||
import { createAnthropic } from '@ai-sdk/anthropic';
|
||||
import { createOpenAI } from '@ai-sdk/openai';
|
||||
import { createGoogleGenerativeAI } from '@ai-sdk/google';
|
||||
import { ollama } from 'ollama-ai-provider';
|
||||
import { createOpenRouter } from "@openrouter/ai-sdk-provider";
|
||||
import { createOpenRouter } from '@openrouter/ai-sdk-provider';
|
||||
import { createMistral } from '@ai-sdk/mistral';
|
||||
import { createCohere } from '@ai-sdk/cohere'
|
||||
import { createCohere } from '@ai-sdk/cohere';
|
||||
import type { LanguageModelV1 } from 'ai';
|
||||
|
||||
export const DEFAULT_NUM_CTX = process.env.DEFAULT_NUM_CTX ?
|
||||
parseInt(process.env.DEFAULT_NUM_CTX, 10) :
|
||||
32768;
|
||||
export const DEFAULT_NUM_CTX = process.env.DEFAULT_NUM_CTX ? parseInt(process.env.DEFAULT_NUM_CTX, 10) : 32768;
|
||||
|
||||
export function getAnthropicModel(apiKey: string, model: string) {
|
||||
type OptionalApiKey = string | undefined;
|
||||
|
||||
export function getAnthropicModel(apiKey: OptionalApiKey, model: string) {
|
||||
const anthropic = createAnthropic({
|
||||
apiKey,
|
||||
});
|
||||
|
||||
return anthropic(model);
|
||||
}
|
||||
|
||||
export function getOpenAILikeModel(baseURL: string, apiKey: string, model: string) {
|
||||
export function getOpenAILikeModel(baseURL: string, apiKey: OptionalApiKey, model: string) {
|
||||
const openai = createOpenAI({
|
||||
baseURL,
|
||||
apiKey,
|
||||
@@ -30,7 +32,7 @@ export function getOpenAILikeModel(baseURL: string, apiKey: string, model: strin
|
||||
return openai(model);
|
||||
}
|
||||
|
||||
export function getCohereAIModel(apiKey:string, model: string){
|
||||
export function getCohereAIModel(apiKey: OptionalApiKey, model: string) {
|
||||
const cohere = createCohere({
|
||||
apiKey,
|
||||
});
|
||||
@@ -38,7 +40,7 @@ export function getCohereAIModel(apiKey:string, model: string){
|
||||
return cohere(model);
|
||||
}
|
||||
|
||||
export function getOpenAIModel(apiKey: string, model: string) {
|
||||
export function getOpenAIModel(apiKey: OptionalApiKey, model: string) {
|
||||
const openai = createOpenAI({
|
||||
apiKey,
|
||||
});
|
||||
@@ -46,15 +48,15 @@ export function getOpenAIModel(apiKey: string, model: string) {
|
||||
return openai(model);
|
||||
}
|
||||
|
||||
export function getMistralModel(apiKey: string, model: string) {
|
||||
export function getMistralModel(apiKey: OptionalApiKey, model: string) {
|
||||
const mistral = createMistral({
|
||||
apiKey
|
||||
apiKey,
|
||||
});
|
||||
|
||||
return mistral(model);
|
||||
}
|
||||
|
||||
export function getGoogleModel(apiKey: string, model: string) {
|
||||
export function getGoogleModel(apiKey: OptionalApiKey, model: string) {
|
||||
const google = createGoogleGenerativeAI({
|
||||
apiKey,
|
||||
});
|
||||
@@ -62,7 +64,7 @@ export function getGoogleModel(apiKey: string, model: string) {
|
||||
return google(model);
|
||||
}
|
||||
|
||||
export function getGroqModel(apiKey: string, model: string) {
|
||||
export function getGroqModel(apiKey: OptionalApiKey, model: string) {
|
||||
const openai = createOpenAI({
|
||||
baseURL: 'https://api.groq.com/openai/v1',
|
||||
apiKey,
|
||||
@@ -71,7 +73,7 @@ export function getGroqModel(apiKey: string, model: string) {
|
||||
return openai(model);
|
||||
}
|
||||
|
||||
export function getHuggingFaceModel(apiKey: string, model: string) {
|
||||
export function getHuggingFaceModel(apiKey: OptionalApiKey, model: string) {
|
||||
const openai = createOpenAI({
|
||||
baseURL: 'https://api-inference.huggingface.co/v1/',
|
||||
apiKey,
|
||||
@@ -81,15 +83,16 @@ export function getHuggingFaceModel(apiKey: string, model: string) {
|
||||
}
|
||||
|
||||
export function getOllamaModel(baseURL: string, model: string) {
|
||||
let Ollama = ollama(model, {
|
||||
const ollamaInstance = ollama(model, {
|
||||
numCtx: DEFAULT_NUM_CTX,
|
||||
});
|
||||
}) as LanguageModelV1 & { config: any };
|
||||
|
||||
Ollama.config.baseURL = `${baseURL}/api`;
|
||||
return Ollama;
|
||||
ollamaInstance.config.baseURL = `${baseURL}/api`;
|
||||
|
||||
return ollamaInstance;
|
||||
}
|
||||
|
||||
export function getDeepseekModel(apiKey: string, model: string) {
|
||||
export function getDeepseekModel(apiKey: OptionalApiKey, model: string) {
|
||||
const openai = createOpenAI({
|
||||
baseURL: 'https://api.deepseek.com/beta',
|
||||
apiKey,
|
||||
@@ -98,9 +101,9 @@ export function getDeepseekModel(apiKey: string, model: string) {
|
||||
return openai(model);
|
||||
}
|
||||
|
||||
export function getOpenRouterModel(apiKey: string, model: string) {
|
||||
export function getOpenRouterModel(apiKey: OptionalApiKey, model: string) {
|
||||
const openRouter = createOpenRouter({
|
||||
apiKey
|
||||
apiKey,
|
||||
});
|
||||
|
||||
return openRouter.chat(model);
|
||||
@@ -109,13 +112,13 @@ export function getOpenRouterModel(apiKey: string, model: string) {
|
||||
export function getLMStudioModel(baseURL: string, model: string) {
|
||||
const lmstudio = createOpenAI({
|
||||
baseUrl: `${baseURL}/v1`,
|
||||
apiKey: "",
|
||||
apiKey: '',
|
||||
});
|
||||
|
||||
return lmstudio(model);
|
||||
}
|
||||
|
||||
export function getXAIModel(apiKey: string, model: string) {
|
||||
export function getXAIModel(apiKey: OptionalApiKey, model: string) {
|
||||
const openai = createOpenAI({
|
||||
baseURL: 'https://api.x.ai/v1',
|
||||
apiKey,
|
||||
@@ -125,11 +128,13 @@ export function getXAIModel(apiKey: string, model: string) {
|
||||
}
|
||||
|
||||
export function getModel(provider: string, model: string, env: Env, apiKeys?: Record<string, string>) {
|
||||
let apiKey; // Declare first
|
||||
let baseURL;
|
||||
/*
|
||||
* let apiKey; // Declare first
|
||||
* let baseURL;
|
||||
*/
|
||||
|
||||
apiKey = getAPIKey(env, provider, apiKeys); // Then assign
|
||||
baseURL = getBaseURL(env, provider);
|
||||
const apiKey = getAPIKey(env, provider, apiKeys); // Then assign
|
||||
const baseURL = getBaseURL(env, provider);
|
||||
|
||||
switch (provider) {
|
||||
case 'Anthropic':
|
||||
@@ -159,4 +164,4 @@ export function getModel(provider: string, model: string, env: Env, apiKeys?: Re
|
||||
default:
|
||||
return getOllamaModel(baseURL, model);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user