From 2e683f112be33ccd0769950d80552755261008a5 Mon Sep 17 00:00:00 2001 From: Danny Avila Date: Sat, 9 May 2026 16:12:25 -0400 Subject: [PATCH] =?UTF-8?q?=F0=9F=A6=98=20fix:=20Skip=20OpenAI=20Model=20F?= =?UTF-8?q?etch=20For=20User-Provided=20Keys=20(#13038)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * fix: skip OpenAI model fetch if using user-provided key There was a check present (via `opts.userProvidedOpenAI`), but it wasn't working because `loadDefaultModels()` doesn't provide that parameter. As a result, the server would repeatedly try to request models from OpenAI and get 401 errors in return. We now check the env var directly, which matches how `getAnthropicModels()` works. * chore: remove unused OpenAI model option * fix: honor explicit OpenAI key for model fetch * fix: fall back from empty OpenAI option key --------- Co-authored-by: Dan Lew --- packages/api/src/endpoints/models.spec.ts | 47 ++++++++++++++++++++++- packages/api/src/endpoints/models.ts | 10 +++-- 2 files changed, 52 insertions(+), 5 deletions(-) diff --git a/packages/api/src/endpoints/models.spec.ts b/packages/api/src/endpoints/models.spec.ts index 2838bee293..bb359968ab 100644 --- a/packages/api/src/endpoints/models.spec.ts +++ b/packages/api/src/endpoints/models.spec.ts @@ -1,5 +1,5 @@ import axios from 'axios'; -import { Time, EModelEndpoint, defaultModels } from 'librechat-data-provider'; +import { Time, EModelEndpoint, defaultModels, AuthType } from 'librechat-data-provider'; import { fetchModels, splitAndTrim, @@ -212,6 +212,51 @@ describe('getOpenAIModels', () => { expect(models).toContain('gpt-4'); }); + it('returns default models when OpenAI API key is user provided', async () => { + mockedAxios.get.mockResolvedValue({ data: { data: [{ id: 'should-not-appear' }] } }); + process.env.OPENAI_API_KEY = AuthType.USER_PROVIDED; + + const models = await getOpenAIModels({ user: 'user456' }); + + expect(mockedAxios.get).not.toHaveBeenCalled(); + expect(models).not.toContain('should-not-appear'); + expect(models).toContain('gpt-4'); + }); + + it('fetches models when OpenAI API key is provided through options', async () => { + mockedAxios.get.mockResolvedValue({ data: { data: [{ id: 'gpt-runtime-key' }] } }); + process.env.OPENAI_API_KEY = AuthType.USER_PROVIDED; + + const models = await getOpenAIModels({ user: 'user456', openAIApiKey: 'sk-runtime' }); + + expect(mockedAxios.get).toHaveBeenCalledWith( + expect.stringContaining('https://api.openai.com/v1/models'), + expect.objectContaining({ + headers: expect.objectContaining({ + Authorization: 'Bearer sk-runtime', + }), + }), + ); + expect(models).toEqual(['gpt-runtime-key']); + }); + + it('falls back to environment OpenAI API key when options key is empty', async () => { + mockedAxios.get.mockResolvedValue({ data: { data: [{ id: 'gpt-env-key' }] } }); + process.env.OPENAI_API_KEY = 'sk-env'; + + const models = await getOpenAIModels({ user: 'user456', openAIApiKey: '' }); + + expect(mockedAxios.get).toHaveBeenCalledWith( + expect.stringContaining('https://api.openai.com/v1/models'), + expect.objectContaining({ + headers: expect.objectContaining({ + Authorization: 'Bearer sk-env', + }), + }), + ); + expect(models).toEqual(['gpt-env-key']); + }); + it('returns `AZURE_OPENAI_MODELS` with `azure` flag (and fetch fails)', async () => { process.env.AZURE_OPENAI_MODELS = 'azure-model,azure-model-2'; const models = await getOpenAIModels({ azure: true }); diff --git a/packages/api/src/endpoints/models.ts b/packages/api/src/endpoints/models.ts index fd296e59ec..aac168ad7a 100644 --- a/packages/api/src/endpoints/models.ts +++ b/packages/api/src/endpoints/models.ts @@ -224,12 +224,14 @@ export interface GetOpenAIModelsOptions { assistants?: boolean; /** OpenAI API key (if not using environment variable) */ openAIApiKey?: string; - /** Whether user provides their own API key */ - userProvidedOpenAI?: boolean; /** Skip MODEL_QUERIES cache (e.g., for user-provided keys) */ skipCache?: boolean; } +function resolveOpenAIApiKey(opts: GetOpenAIModelsOptions): string | undefined { + return opts.openAIApiKey || process.env.OPENAI_API_KEY; +} + /** * Fetches models from OpenAI or Azure based on the provided options. * @param opts - Options for fetching models @@ -241,7 +243,7 @@ export async function fetchOpenAIModels( _models: string[] = [], ): Promise { let models = _models.slice() ?? []; - const apiKey = opts.openAIApiKey ?? process.env.OPENAI_API_KEY; + const apiKey = resolveOpenAIApiKey(opts); const openaiBaseURL = 'https://api.openai.com/v1'; let baseURL = openaiBaseURL; let reverseProxyUrl = process.env.OPENAI_REVERSE_PROXY; @@ -310,7 +312,7 @@ export async function getOpenAIModels(opts: GetOpenAIModelsOptions = {}): Promis return splitAndTrim(process.env[key]); } - if (opts.userProvidedOpenAI) { + if (isUserProvided(resolveOpenAIApiKey(opts))) { return models; }