LibreChat/api/server/utils/import/defaults.spec.js
Danny Avila 65990a33e9
📥 fix: Resolve Imported-Conversation Default Model From Runtime modelsConfig (#12885)
* 📥 fix: Use Endpoint-Aware Default Model on Imported Conversations

Claude conversations imported from claude.ai's data export display
"gpt-4o-mini" in the chat UI until the page is refreshed, and any
attempt to send a message before refreshing fails with "The model
'gpt-4o-mini' is not available for Anthropic."

Root cause: ImportBatchBuilder.finishConversation() unconditionally
defaulted the saved conversation's `model` field to
openAISettings.model.default, regardless of `this.endpoint`. Claude
exports don't carry a model name, so every imported Claude conversation
landed with endpoint=anthropic but model=gpt-4o-mini.

Fix: pick the default based on `this.endpoint` via a small lookup
(openAI -> gpt-4o-mini, anthropic -> claude-3-5-sonnet-latest), keeping
the existing OpenAI default as the fallback for unknown endpoints.

Fixes #12844

* 🪄 refactor: Resolve Import Default Model From `modelsConfig`

Replace the hardcoded per-endpoint default lookup added in the previous
commit with a runtime resolver that consults the same models config the
chat UI uses (`getModelsConfig` in ModelController -> `loadDefaultModels`
+ `loadConfigModels`). This way an imported conversation defaults to a
model the LibreChat instance has actually configured / discovered for
the endpoint, instead of a hardcoded constant that may not exist on this
deployment.

Resolution order:
1. First non-empty model in `modelsConfig[endpoint]`.
2. Per-endpoint hardcoded fallback (anthropic/openAI settings) if the
   runtime config is empty for the endpoint or `getModelsConfig` throws.
3. `openAISettings.model.default` if even the per-endpoint fallback is
   missing (unknown endpoint).

`importBatchBuilder.finishConversation` now accepts an optional
`defaultModel` argument; each importer resolves it once at the top via
`resolveImportDefaultModel({ endpoint, requestUserId, userRole })` and
threads it through. ChatGPT message-level model selection also falls
back to the resolved default before the hardcoded gpt-4o-mini.
2026-04-30 00:43:04 -04:00

122 lines
3.8 KiB
JavaScript

const { EModelEndpoint, openAISettings, anthropicSettings } = require('librechat-data-provider');
const mockGetModelsConfig = jest.fn();
jest.mock('~/server/controllers/ModelController', () => ({
getModelsConfig: (...args) => mockGetModelsConfig(...args),
}));
jest.mock('@librechat/data-schemas', () => {
const actual = jest.requireActual('@librechat/data-schemas');
return {
...actual,
getTenantId: () => 'test-tenant',
logger: { warn: jest.fn(), error: jest.fn(), info: jest.fn(), debug: jest.fn() },
};
});
const {
pickFirstConfiguredModel,
resolveImportDefaultModel,
FALLBACK_MODEL_BY_ENDPOINT,
} = require('./defaults');
afterEach(() => {
jest.clearAllMocks();
});
describe('pickFirstConfiguredModel', () => {
it('returns the first non-empty string for the endpoint', () => {
const modelsConfig = {
[EModelEndpoint.anthropic]: ['claude-opus-4-7', 'claude-3-5-sonnet-latest'],
};
expect(pickFirstConfiguredModel(EModelEndpoint.anthropic, modelsConfig)).toBe(
'claude-opus-4-7',
);
});
it('skips empty strings', () => {
const modelsConfig = {
[EModelEndpoint.openAI]: ['', 'gpt-4o'],
};
expect(pickFirstConfiguredModel(EModelEndpoint.openAI, modelsConfig)).toBe('gpt-4o');
});
it('returns undefined when modelsConfig is missing', () => {
expect(pickFirstConfiguredModel(EModelEndpoint.anthropic, undefined)).toBeUndefined();
});
it('returns undefined when the endpoint has no models', () => {
expect(pickFirstConfiguredModel(EModelEndpoint.anthropic, {})).toBeUndefined();
expect(
pickFirstConfiguredModel(EModelEndpoint.anthropic, { [EModelEndpoint.anthropic]: [] }),
).toBeUndefined();
});
it('returns undefined when the endpoint value is not an array', () => {
expect(
pickFirstConfiguredModel(EModelEndpoint.anthropic, {
[EModelEndpoint.anthropic]: 'claude-opus-4-7',
}),
).toBeUndefined();
});
});
describe('resolveImportDefaultModel', () => {
it('returns the first model from modelsConfig when present', async () => {
mockGetModelsConfig.mockResolvedValueOnce({
[EModelEndpoint.anthropic]: ['claude-opus-4-7'],
});
const result = await resolveImportDefaultModel({
endpoint: EModelEndpoint.anthropic,
requestUserId: 'user-1',
userRole: 'USER',
});
expect(result).toBe('claude-opus-4-7');
expect(mockGetModelsConfig).toHaveBeenCalledWith({
user: { id: 'user-1', role: 'USER', tenantId: 'test-tenant' },
});
});
it('falls back to the per-endpoint default when modelsConfig has no models for the endpoint', async () => {
mockGetModelsConfig.mockResolvedValueOnce({});
const result = await resolveImportDefaultModel({
endpoint: EModelEndpoint.anthropic,
requestUserId: 'user-1',
});
expect(result).toBe(anthropicSettings.model.default);
});
it('falls back to the openAI default for unknown endpoints with no modelsConfig entry', async () => {
mockGetModelsConfig.mockResolvedValueOnce({});
const result = await resolveImportDefaultModel({
endpoint: 'some-custom-endpoint',
requestUserId: 'user-1',
});
expect(result).toBe(openAISettings.model.default);
});
it('falls back to the per-endpoint default when getModelsConfig rejects', async () => {
mockGetModelsConfig.mockRejectedValueOnce(new Error('boom'));
const result = await resolveImportDefaultModel({
endpoint: EModelEndpoint.anthropic,
requestUserId: 'user-1',
});
expect(result).toBe(anthropicSettings.model.default);
});
it('exposes hardcoded fallbacks for openAI and anthropic', () => {
expect(FALLBACK_MODEL_BY_ENDPOINT[EModelEndpoint.openAI]).toBe(openAISettings.model.default);
expect(FALLBACK_MODEL_BY_ENDPOINT[EModelEndpoint.anthropic]).toBe(
anthropicSettings.model.default,
);
});
});