diff --git a/packages/api/src/endpoints/models.spec.ts b/packages/api/src/endpoints/models.spec.ts index 2838bee293..bb359968ab 100644 --- a/packages/api/src/endpoints/models.spec.ts +++ b/packages/api/src/endpoints/models.spec.ts @@ -1,5 +1,5 @@ import axios from 'axios'; -import { Time, EModelEndpoint, defaultModels } from 'librechat-data-provider'; +import { Time, EModelEndpoint, defaultModels, AuthType } from 'librechat-data-provider'; import { fetchModels, splitAndTrim, @@ -212,6 +212,51 @@ describe('getOpenAIModels', () => { expect(models).toContain('gpt-4'); }); + it('returns default models when OpenAI API key is user provided', async () => { + mockedAxios.get.mockResolvedValue({ data: { data: [{ id: 'should-not-appear' }] } }); + process.env.OPENAI_API_KEY = AuthType.USER_PROVIDED; + + const models = await getOpenAIModels({ user: 'user456' }); + + expect(mockedAxios.get).not.toHaveBeenCalled(); + expect(models).not.toContain('should-not-appear'); + expect(models).toContain('gpt-4'); + }); + + it('fetches models when OpenAI API key is provided through options', async () => { + mockedAxios.get.mockResolvedValue({ data: { data: [{ id: 'gpt-runtime-key' }] } }); + process.env.OPENAI_API_KEY = AuthType.USER_PROVIDED; + + const models = await getOpenAIModels({ user: 'user456', openAIApiKey: 'sk-runtime' }); + + expect(mockedAxios.get).toHaveBeenCalledWith( + expect.stringContaining('https://api.openai.com/v1/models'), + expect.objectContaining({ + headers: expect.objectContaining({ + Authorization: 'Bearer sk-runtime', + }), + }), + ); + expect(models).toEqual(['gpt-runtime-key']); + }); + + it('falls back to environment OpenAI API key when options key is empty', async () => { + mockedAxios.get.mockResolvedValue({ data: { data: [{ id: 'gpt-env-key' }] } }); + process.env.OPENAI_API_KEY = 'sk-env'; + + const models = await getOpenAIModels({ user: 'user456', openAIApiKey: '' }); + + expect(mockedAxios.get).toHaveBeenCalledWith( + expect.stringContaining('https://api.openai.com/v1/models'), + expect.objectContaining({ + headers: expect.objectContaining({ + Authorization: 'Bearer sk-env', + }), + }), + ); + expect(models).toEqual(['gpt-env-key']); + }); + it('returns `AZURE_OPENAI_MODELS` with `azure` flag (and fetch fails)', async () => { process.env.AZURE_OPENAI_MODELS = 'azure-model,azure-model-2'; const models = await getOpenAIModels({ azure: true }); diff --git a/packages/api/src/endpoints/models.ts b/packages/api/src/endpoints/models.ts index fd296e59ec..aac168ad7a 100644 --- a/packages/api/src/endpoints/models.ts +++ b/packages/api/src/endpoints/models.ts @@ -224,12 +224,14 @@ export interface GetOpenAIModelsOptions { assistants?: boolean; /** OpenAI API key (if not using environment variable) */ openAIApiKey?: string; - /** Whether user provides their own API key */ - userProvidedOpenAI?: boolean; /** Skip MODEL_QUERIES cache (e.g., for user-provided keys) */ skipCache?: boolean; } +function resolveOpenAIApiKey(opts: GetOpenAIModelsOptions): string | undefined { + return opts.openAIApiKey || process.env.OPENAI_API_KEY; +} + /** * Fetches models from OpenAI or Azure based on the provided options. * @param opts - Options for fetching models @@ -241,7 +243,7 @@ export async function fetchOpenAIModels( _models: string[] = [], ): Promise { let models = _models.slice() ?? []; - const apiKey = opts.openAIApiKey ?? process.env.OPENAI_API_KEY; + const apiKey = resolveOpenAIApiKey(opts); const openaiBaseURL = 'https://api.openai.com/v1'; let baseURL = openaiBaseURL; let reverseProxyUrl = process.env.OPENAI_REVERSE_PROXY; @@ -310,7 +312,7 @@ export async function getOpenAIModels(opts: GetOpenAIModelsOptions = {}): Promis return splitAndTrim(process.env[key]); } - if (opts.userProvidedOpenAI) { + if (isUserProvided(resolveOpenAIApiKey(opts))) { return models; }