* Update LLM providers and constants - Updated constants in app/lib/.server/llm/constants.ts - Modified stream-text functionality in app/lib/.server/llm/stream-text.ts - Updated Anthropic provider in app/lib/modules/llm/providers/anthropic.ts - Modified GitHub provider in app/lib/modules/llm/providers/github.ts - Updated Google provider in app/lib/modules/llm/providers/google.ts - Modified OpenAI provider in app/lib/modules/llm/providers/openai.ts - Updated LLM types in app/lib/modules/llm/types.ts - Modified API route in app/routes/api.llmcall.ts * Fix maxCompletionTokens Implementation for All Providers - Cohere: Added maxCompletionTokens: 4000 to all 10 static models - DeepSeek: Added maxCompletionTokens: 8192 to all 3 static models - Groq: Added maxCompletionTokens: 8192 to both static models - Mistral: Added maxCompletionTokens: 8192 to all 9 static models - Together: Added maxCompletionTokens: 8192 to both static models - Groq: Fixed getDynamicModels to include maxCompletionTokens: 8192 - Together: Fixed getDynamicModels to include maxCompletionTokens: 8192 - OpenAI: Fixed getDynamicModels with proper logic for reasoning models (o1: 16384, o1-mini: 8192) and standard models
99 lines
2.9 KiB
TypeScript
99 lines
2.9 KiB
TypeScript
import { BaseProvider, getOpenAILikeModel } from '~/lib/modules/llm/base-provider';
|
|
import type { ModelInfo } from '~/lib/modules/llm/types';
|
|
import type { IProviderSetting } from '~/types/model';
|
|
import type { LanguageModelV1 } from 'ai';
|
|
|
|
export default class TogetherProvider extends BaseProvider {
|
|
name = 'Together';
|
|
getApiKeyLink = 'https://api.together.xyz/settings/api-keys';
|
|
|
|
config = {
|
|
baseUrlKey: 'TOGETHER_API_BASE_URL',
|
|
apiTokenKey: 'TOGETHER_API_KEY',
|
|
};
|
|
|
|
staticModels: ModelInfo[] = [
|
|
/*
|
|
* Essential fallback models - only the most stable/reliable ones
|
|
* Llama 3.2 90B Vision: 128k context, multimodal capabilities
|
|
*/
|
|
{
|
|
name: 'meta-llama/Llama-3.2-90B-Vision-Instruct-Turbo',
|
|
label: 'Llama 3.2 90B Vision',
|
|
provider: 'Together',
|
|
maxTokenAllowed: 128000,
|
|
maxCompletionTokens: 8192,
|
|
},
|
|
|
|
// Mixtral 8x7B: 32k context, strong performance
|
|
{
|
|
name: 'mistralai/Mixtral-8x7B-Instruct-v0.1',
|
|
label: 'Mixtral 8x7B Instruct',
|
|
provider: 'Together',
|
|
maxTokenAllowed: 32000,
|
|
maxCompletionTokens: 8192,
|
|
},
|
|
];
|
|
|
|
async getDynamicModels(
|
|
apiKeys?: Record<string, string>,
|
|
settings?: IProviderSetting,
|
|
serverEnv: Record<string, string> = {},
|
|
): Promise<ModelInfo[]> {
|
|
const { baseUrl: fetchBaseUrl, apiKey } = this.getProviderBaseUrlAndKey({
|
|
apiKeys,
|
|
providerSettings: settings,
|
|
serverEnv,
|
|
defaultBaseUrlKey: 'TOGETHER_API_BASE_URL',
|
|
defaultApiTokenKey: 'TOGETHER_API_KEY',
|
|
});
|
|
const baseUrl = fetchBaseUrl || 'https://api.together.xyz/v1';
|
|
|
|
if (!baseUrl || !apiKey) {
|
|
return [];
|
|
}
|
|
|
|
// console.log({ baseUrl, apiKey });
|
|
|
|
const response = await fetch(`${baseUrl}/models`, {
|
|
headers: {
|
|
Authorization: `Bearer ${apiKey}`,
|
|
},
|
|
});
|
|
|
|
const res = (await response.json()) as any;
|
|
const data = (res || []).filter((model: any) => model.type === 'chat');
|
|
|
|
return data.map((m: any) => ({
|
|
name: m.id,
|
|
label: `${m.display_name} - in:$${m.pricing.input.toFixed(2)} out:$${m.pricing.output.toFixed(2)} - context ${Math.floor(m.context_length / 1000)}k`,
|
|
provider: this.name,
|
|
maxTokenAllowed: 8000,
|
|
maxCompletionTokens: 8192,
|
|
}));
|
|
}
|
|
|
|
getModelInstance(options: {
|
|
model: string;
|
|
serverEnv: Env;
|
|
apiKeys?: Record<string, string>;
|
|
providerSettings?: Record<string, IProviderSetting>;
|
|
}): LanguageModelV1 {
|
|
const { model, serverEnv, apiKeys, providerSettings } = options;
|
|
|
|
const { baseUrl, apiKey } = this.getProviderBaseUrlAndKey({
|
|
apiKeys,
|
|
providerSettings: providerSettings?.[this.name],
|
|
serverEnv: serverEnv as any,
|
|
defaultBaseUrlKey: 'TOGETHER_API_BASE_URL',
|
|
defaultApiTokenKey: 'TOGETHER_API_KEY',
|
|
});
|
|
|
|
if (!baseUrl || !apiKey) {
|
|
throw new Error(`Missing configuration for ${this.name} provider`);
|
|
}
|
|
|
|
return getOpenAILikeModel(baseUrl, apiKey, model);
|
|
}
|
|
}
|