[bug] Fix bug with ollama provider

This commit is contained in:
Sergey Konovalov
2025-07-29 19:45:00 +03:00
parent d09f60912d
commit 3eccabccc1
2 changed files with 5 additions and 5 deletions

View File

@ -106,7 +106,7 @@ const sandbox = {
options.body || null,
timeoutOptions,
null,
false
true //true because request limited by local network
)
.then(async (result) => {
const responseBuffer = await buffer(result.stream);

View File

@ -241,7 +241,7 @@ async function proxyRequest(req, res) {
body: body.data,
timeout: timeoutOptions,
limit: null,
filterPrivate: false
isInJwtToken: providerMatched //true because it passed provider's filter
};
// Log the sanitized request parameters
@ -256,7 +256,7 @@ async function proxyRequest(req, res) {
requestParams.body, // Request body
requestParams.timeout, // Timeout configuration
requestParams.limit, // Size limit
requestParams.filterPrivate // Filter private requests
requestParams.isInJwtToken // Filter private requests
);
// Set the response headers to match the target response
@ -451,9 +451,9 @@ async function requestModels(req, res) {
if (AI.Providers[body.name]) {
AI.Providers[body.name].key = body.key;
AI.Providers[body.name].url = body.url;
}
}
let getRes = await AI.getModels(body);
getRes.modelsApi = AI.TmpProviderForModels?.models;
getRes.modelsApi = AI.TmpProviderForModels?.models;
res.json(getRes);
} catch (error) {
ctx.logger.error('getModels error: %s', error.stack);