fix: maxCompletionTokens Implementation for All Providers (#1938)
* Update LLM providers and constants - Updated constants in app/lib/.server/llm/constants.ts - Modified stream-text functionality in app/lib/.server/llm/stream-text.ts - Updated Anthropic provider in app/lib/modules/llm/providers/anthropic.ts - Modified GitHub provider in app/lib/modules/llm/providers/github.ts - Updated Google provider in app/lib/modules/llm/providers/google.ts - Modified OpenAI provider in app/lib/modules/llm/providers/openai.ts - Updated LLM types in app/lib/modules/llm/types.ts - Modified API route in app/routes/api.llmcall.ts * Fix maxCompletionTokens Implementation for All Providers - Cohere: Added maxCompletionTokens: 4000 to all 10 static models - DeepSeek: Added maxCompletionTokens: 8192 to all 3 static models - Groq: Added maxCompletionTokens: 8192 to both static models - Mistral: Added maxCompletionTokens: 8192 to all 9 static models - Together: Added maxCompletionTokens: 8192 to both static models - Groq: Fixed getDynamicModels to include maxCompletionTokens: 8192 - Together: Fixed getDynamicModels to include maxCompletionTokens: 8192 - OpenAI: Fixed getDynamicModels with proper logic for reasoning models (o1: 16384, o1-mini: 8192) and standard models
This commit is contained in:
@@ -13,15 +13,69 @@ export default class MistralProvider extends BaseProvider {
|
||||
};
|
||||
|
||||
staticModels: ModelInfo[] = [
|
||||
{ name: 'open-mistral-7b', label: 'Mistral 7B', provider: 'Mistral', maxTokenAllowed: 8000 },
|
||||
{ name: 'open-mixtral-8x7b', label: 'Mistral 8x7B', provider: 'Mistral', maxTokenAllowed: 8000 },
|
||||
{ name: 'open-mixtral-8x22b', label: 'Mistral 8x22B', provider: 'Mistral', maxTokenAllowed: 8000 },
|
||||
{ name: 'open-codestral-mamba', label: 'Codestral Mamba', provider: 'Mistral', maxTokenAllowed: 8000 },
|
||||
{ name: 'open-mistral-nemo', label: 'Mistral Nemo', provider: 'Mistral', maxTokenAllowed: 8000 },
|
||||
{ name: 'ministral-8b-latest', label: 'Mistral 8B', provider: 'Mistral', maxTokenAllowed: 8000 },
|
||||
{ name: 'mistral-small-latest', label: 'Mistral Small', provider: 'Mistral', maxTokenAllowed: 8000 },
|
||||
{ name: 'codestral-latest', label: 'Codestral', provider: 'Mistral', maxTokenAllowed: 8000 },
|
||||
{ name: 'mistral-large-latest', label: 'Mistral Large Latest', provider: 'Mistral', maxTokenAllowed: 8000 },
|
||||
{
|
||||
name: 'open-mistral-7b',
|
||||
label: 'Mistral 7B',
|
||||
provider: 'Mistral',
|
||||
maxTokenAllowed: 8000,
|
||||
maxCompletionTokens: 8192,
|
||||
},
|
||||
{
|
||||
name: 'open-mixtral-8x7b',
|
||||
label: 'Mistral 8x7B',
|
||||
provider: 'Mistral',
|
||||
maxTokenAllowed: 8000,
|
||||
maxCompletionTokens: 8192,
|
||||
},
|
||||
{
|
||||
name: 'open-mixtral-8x22b',
|
||||
label: 'Mistral 8x22B',
|
||||
provider: 'Mistral',
|
||||
maxTokenAllowed: 8000,
|
||||
maxCompletionTokens: 8192,
|
||||
},
|
||||
{
|
||||
name: 'open-codestral-mamba',
|
||||
label: 'Codestral Mamba',
|
||||
provider: 'Mistral',
|
||||
maxTokenAllowed: 8000,
|
||||
maxCompletionTokens: 8192,
|
||||
},
|
||||
{
|
||||
name: 'open-mistral-nemo',
|
||||
label: 'Mistral Nemo',
|
||||
provider: 'Mistral',
|
||||
maxTokenAllowed: 8000,
|
||||
maxCompletionTokens: 8192,
|
||||
},
|
||||
{
|
||||
name: 'ministral-8b-latest',
|
||||
label: 'Mistral 8B',
|
||||
provider: 'Mistral',
|
||||
maxTokenAllowed: 8000,
|
||||
maxCompletionTokens: 8192,
|
||||
},
|
||||
{
|
||||
name: 'mistral-small-latest',
|
||||
label: 'Mistral Small',
|
||||
provider: 'Mistral',
|
||||
maxTokenAllowed: 8000,
|
||||
maxCompletionTokens: 8192,
|
||||
},
|
||||
{
|
||||
name: 'codestral-latest',
|
||||
label: 'Codestral',
|
||||
provider: 'Mistral',
|
||||
maxTokenAllowed: 8000,
|
||||
maxCompletionTokens: 8192,
|
||||
},
|
||||
{
|
||||
name: 'mistral-large-latest',
|
||||
label: 'Mistral Large Latest',
|
||||
provider: 'Mistral',
|
||||
maxTokenAllowed: 8000,
|
||||
maxCompletionTokens: 8192,
|
||||
},
|
||||
];
|
||||
|
||||
getModelInstance(options: {
|
||||
|
||||
Reference in New Issue
Block a user