Making Ollama work within the Docker container, very important fix
This commit is contained in:
@@ -47,9 +47,25 @@ const staticModels: ModelInfo[] = [
|
||||
|
||||
export let MODEL_LIST: ModelInfo[] = [...staticModels];
|
||||
|
||||
const getOllamaBaseUrl = () => {
|
||||
const defaultBaseUrl = import.meta.env.OLLAMA_API_BASE_URL || 'http://localhost:11434';
|
||||
// Check if we're in the browser
|
||||
if (typeof window !== 'undefined') {
|
||||
// Frontend always uses localhost
|
||||
return defaultBaseUrl;
|
||||
}
|
||||
|
||||
// Backend: Check if we're running in Docker
|
||||
const isDocker = process.env.RUNNING_IN_DOCKER === 'true';
|
||||
|
||||
return isDocker
|
||||
? defaultBaseUrl.replace("localhost", "host.docker.internal")
|
||||
: defaultBaseUrl;
|
||||
};
|
||||
|
||||
async function getOllamaModels(): Promise<ModelInfo[]> {
|
||||
try {
|
||||
const base_url = import.meta.env.OLLAMA_API_BASE_URL || "http://localhost:11434";
|
||||
const base_url = getOllamaBaseUrl();
|
||||
const response = await fetch(`${base_url}/api/tags`);
|
||||
const data = await response.json() as OllamaApiResponse;
|
||||
|
||||
|
||||
Reference in New Issue
Block a user