mirror of
https://github.com/danny-avila/LibreChat.git
synced 2026-05-13 16:07:30 +00:00
🦘 fix: Skip OpenAI Model Fetch For User-Provided Keys (#13038)
* fix: skip OpenAI model fetch if using user-provided key There was a check present (via `opts.userProvidedOpenAI`), but it wasn't working because `loadDefaultModels()` doesn't provide that parameter. As a result, the server would repeatedly try to request models from OpenAI and get 401 errors in return. We now check the env var directly, which matches how `getAnthropicModels()` works. * chore: remove unused OpenAI model option * fix: honor explicit OpenAI key for model fetch * fix: fall back from empty OpenAI option key --------- Co-authored-by: Dan Lew <daniel@mightyacorn.com>
This commit is contained in:
parent
80ce956c94
commit
2e683f112b
2 changed files with 52 additions and 5 deletions
|
|
@ -1,5 +1,5 @@
|
|||
import axios from 'axios';
|
||||
import { Time, EModelEndpoint, defaultModels } from 'librechat-data-provider';
|
||||
import { Time, EModelEndpoint, defaultModels, AuthType } from 'librechat-data-provider';
|
||||
import {
|
||||
fetchModels,
|
||||
splitAndTrim,
|
||||
|
|
@ -212,6 +212,51 @@ describe('getOpenAIModels', () => {
|
|||
expect(models).toContain('gpt-4');
|
||||
});
|
||||
|
||||
it('returns default models when OpenAI API key is user provided', async () => {
|
||||
mockedAxios.get.mockResolvedValue({ data: { data: [{ id: 'should-not-appear' }] } });
|
||||
process.env.OPENAI_API_KEY = AuthType.USER_PROVIDED;
|
||||
|
||||
const models = await getOpenAIModels({ user: 'user456' });
|
||||
|
||||
expect(mockedAxios.get).not.toHaveBeenCalled();
|
||||
expect(models).not.toContain('should-not-appear');
|
||||
expect(models).toContain('gpt-4');
|
||||
});
|
||||
|
||||
it('fetches models when OpenAI API key is provided through options', async () => {
|
||||
mockedAxios.get.mockResolvedValue({ data: { data: [{ id: 'gpt-runtime-key' }] } });
|
||||
process.env.OPENAI_API_KEY = AuthType.USER_PROVIDED;
|
||||
|
||||
const models = await getOpenAIModels({ user: 'user456', openAIApiKey: 'sk-runtime' });
|
||||
|
||||
expect(mockedAxios.get).toHaveBeenCalledWith(
|
||||
expect.stringContaining('https://api.openai.com/v1/models'),
|
||||
expect.objectContaining({
|
||||
headers: expect.objectContaining({
|
||||
Authorization: 'Bearer sk-runtime',
|
||||
}),
|
||||
}),
|
||||
);
|
||||
expect(models).toEqual(['gpt-runtime-key']);
|
||||
});
|
||||
|
||||
it('falls back to environment OpenAI API key when options key is empty', async () => {
|
||||
mockedAxios.get.mockResolvedValue({ data: { data: [{ id: 'gpt-env-key' }] } });
|
||||
process.env.OPENAI_API_KEY = 'sk-env';
|
||||
|
||||
const models = await getOpenAIModels({ user: 'user456', openAIApiKey: '' });
|
||||
|
||||
expect(mockedAxios.get).toHaveBeenCalledWith(
|
||||
expect.stringContaining('https://api.openai.com/v1/models'),
|
||||
expect.objectContaining({
|
||||
headers: expect.objectContaining({
|
||||
Authorization: 'Bearer sk-env',
|
||||
}),
|
||||
}),
|
||||
);
|
||||
expect(models).toEqual(['gpt-env-key']);
|
||||
});
|
||||
|
||||
it('returns `AZURE_OPENAI_MODELS` with `azure` flag (and fetch fails)', async () => {
|
||||
process.env.AZURE_OPENAI_MODELS = 'azure-model,azure-model-2';
|
||||
const models = await getOpenAIModels({ azure: true });
|
||||
|
|
|
|||
|
|
@ -224,12 +224,14 @@ export interface GetOpenAIModelsOptions {
|
|||
assistants?: boolean;
|
||||
/** OpenAI API key (if not using environment variable) */
|
||||
openAIApiKey?: string;
|
||||
/** Whether user provides their own API key */
|
||||
userProvidedOpenAI?: boolean;
|
||||
/** Skip MODEL_QUERIES cache (e.g., for user-provided keys) */
|
||||
skipCache?: boolean;
|
||||
}
|
||||
|
||||
function resolveOpenAIApiKey(opts: GetOpenAIModelsOptions): string | undefined {
|
||||
return opts.openAIApiKey || process.env.OPENAI_API_KEY;
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetches models from OpenAI or Azure based on the provided options.
|
||||
* @param opts - Options for fetching models
|
||||
|
|
@ -241,7 +243,7 @@ export async function fetchOpenAIModels(
|
|||
_models: string[] = [],
|
||||
): Promise<string[]> {
|
||||
let models = _models.slice() ?? [];
|
||||
const apiKey = opts.openAIApiKey ?? process.env.OPENAI_API_KEY;
|
||||
const apiKey = resolveOpenAIApiKey(opts);
|
||||
const openaiBaseURL = 'https://api.openai.com/v1';
|
||||
let baseURL = openaiBaseURL;
|
||||
let reverseProxyUrl = process.env.OPENAI_REVERSE_PROXY;
|
||||
|
|
@ -310,7 +312,7 @@ export async function getOpenAIModels(opts: GetOpenAIModelsOptions = {}): Promis
|
|||
return splitAndTrim(process.env[key]);
|
||||
}
|
||||
|
||||
if (opts.userProvidedOpenAI) {
|
||||
if (isUserProvided(resolveOpenAIApiKey(opts))) {
|
||||
return models;
|
||||
}
|
||||
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue