fix: updated logger and model caching minor bugfix #release (#895)

* fix: updated logger and model caching

* usage token stream issue fix

* minor changes

* updated starter template change to fix the app title

* starter template bigfix

* fixed hydretion errors and raw logs

* removed raw log

* made auto select template false by default

* more cleaner logs and updated logic to call dynamicModels only if not found in static models

* updated starter template instructions

* browser console log improved for firefox

* provider icons fix icons
This commit is contained in:
Anirban Kar
2024-12-31 22:47:32 +05:30
committed by GitHub
parent 389eedcac4
commit 6494f5ac2e
23 changed files with 478 additions and 567 deletions

View File

@@ -25,6 +25,30 @@ export default class HuggingFaceProvider extends BaseProvider {
provider: 'HuggingFace',
maxTokenAllowed: 8000,
},
{
name: 'codellama/CodeLlama-34b-Instruct-hf',
label: 'CodeLlama-34b-Instruct (HuggingFace)',
provider: 'HuggingFace',
maxTokenAllowed: 8000,
},
{
name: 'NousResearch/Hermes-3-Llama-3.1-8B',
label: 'Hermes-3-Llama-3.1-8B (HuggingFace)',
provider: 'HuggingFace',
maxTokenAllowed: 8000,
},
{
name: 'Qwen/Qwen2.5-Coder-32B-Instruct',
label: 'Qwen2.5-Coder-32B-Instruct (HuggingFace)',
provider: 'HuggingFace',
maxTokenAllowed: 8000,
},
{
name: 'Qwen/Qwen2.5-72B-Instruct',
label: 'Qwen2.5-72B-Instruct (HuggingFace)',
provider: 'HuggingFace',
maxTokenAllowed: 8000,
},
{
name: 'meta-llama/Llama-3.1-70B-Instruct',
label: 'Llama-3.1-70B-Instruct (HuggingFace)',
@@ -37,6 +61,24 @@ export default class HuggingFaceProvider extends BaseProvider {
provider: 'HuggingFace',
maxTokenAllowed: 8000,
},
{
name: '01-ai/Yi-1.5-34B-Chat',
label: 'Yi-1.5-34B-Chat (HuggingFace)',
provider: 'HuggingFace',
maxTokenAllowed: 8000,
},
{
name: 'codellama/CodeLlama-34b-Instruct-hf',
label: 'CodeLlama-34b-Instruct (HuggingFace)',
provider: 'HuggingFace',
maxTokenAllowed: 8000,
},
{
name: 'NousResearch/Hermes-3-Llama-3.1-8B',
label: 'Hermes-3-Llama-3.1-8B (HuggingFace)',
provider: 'HuggingFace',
maxTokenAllowed: 8000,
},
];
getModelInstance(options: {