Fix: Ollama chat cannot access remote deployment (#9816)

### What problem does this PR solve?

Fix Ollama chat can only access localhost instance. #9806.

### Type of change

- [x] Bug Fix (non-breaking change which fixes an issue)
This commit is contained in:
Yongteng Lei
2025-08-29 13:35:41 +08:00
committed by GitHub
parent fe9adbf0a5
commit fcd18d7d87
2 changed files with 2 additions and 1 deletions

View File

@ -43,6 +43,7 @@ FACTORY_DEFAULT_BASE_URL = {
SupportedLiteLLMProvider.Tongyi_Qianwen: "https://dashscope.aliyuncs.com/compatible-mode/v1",
SupportedLiteLLMProvider.Dashscope: "https://dashscope.aliyuncs.com/compatible-mode/v1",
SupportedLiteLLMProvider.Moonshot: "https://api.moonshot.cn/v1",
SupportedLiteLLMProvider.Ollama: "",
}

View File

@ -1362,7 +1362,7 @@ class LiteLLMBase(ABC):
self.prefix = LITELLM_PROVIDER_PREFIX.get(self.provider, "")
self.model_name = f"{self.prefix}{model_name}"
self.api_key = key
self.base_url = base_url or FACTORY_DEFAULT_BASE_URL.get(self.provider, "")
self.base_url = (base_url or FACTORY_DEFAULT_BASE_URL.get(self.provider, "")).rstrip('/')
# Configure retry parameters
self.max_retries = kwargs.get("max_retries", int(os.environ.get("LLM_MAX_RETRIES", 5)))
self.base_delay = kwargs.get("retry_interval", float(os.environ.get("LLM_BASE_DELAY", 2.0)))