mirror of
https://github.com/danny-avila/LibreChat.git
synced 2026-05-13 07:46:47 +00:00
☁️ fix: Enable Azure Agent Provider Uploads (#13045)
This commit is contained in:
parent
1e9d0cbd0d
commit
030dc98a1d
10 changed files with 176 additions and 12 deletions
|
|
@ -55,6 +55,10 @@ const systemTools = {
|
|||
|
||||
const MAX_SEARCH_LEN = 100;
|
||||
const escapeRegex = (str = '') => str.replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
|
||||
const getSafeModelParameters = (modelParameters) => {
|
||||
const { useResponsesApi } = modelParameters ?? {};
|
||||
return typeof useResponsesApi === 'boolean' ? { useResponsesApi } : {};
|
||||
};
|
||||
|
||||
/**
|
||||
* Looks up each referenced agent id in Mongo, splits them into three
|
||||
|
|
@ -458,6 +462,7 @@ const getAgentHandler = async (req, res, expandProperties = false) => {
|
|||
author: agent.author,
|
||||
provider: agent.provider,
|
||||
model: agent.model,
|
||||
model_parameters: getSafeModelParameters(agent.model_parameters),
|
||||
isPublic: agent.isPublic,
|
||||
version: agent.version,
|
||||
// Safe metadata
|
||||
|
|
|
|||
|
|
@ -72,6 +72,7 @@ jest.mock('~/cache', () => ({
|
|||
|
||||
const {
|
||||
createAgent: createAgentHandler,
|
||||
getAgent: getAgentHandler,
|
||||
duplicateAgent: duplicateAgentHandler,
|
||||
revertAgentVersion: revertAgentVersionHandler,
|
||||
updateAgent: updateAgentHandler,
|
||||
|
|
@ -483,6 +484,34 @@ describe('Agent Controllers - Mass Assignment Protection', () => {
|
|||
});
|
||||
});
|
||||
|
||||
describe('getAgentHandler', () => {
|
||||
test('should return the safe Responses API flag in the basic VIEW response', async () => {
|
||||
const agent = await Agent.create({
|
||||
id: `agent_${uuidv4()}`,
|
||||
name: 'Azure Agent',
|
||||
description: 'Uses Responses API',
|
||||
provider: 'azureOpenAI',
|
||||
model: 'gpt-5.5',
|
||||
author: mockReq.user.id,
|
||||
model_parameters: {
|
||||
useResponsesApi: true,
|
||||
temperature: 0.7,
|
||||
apiKey: 'secret-value',
|
||||
},
|
||||
});
|
||||
|
||||
mockReq.params = { id: agent.id };
|
||||
|
||||
await getAgentHandler(mockReq, mockRes);
|
||||
|
||||
expect(mockRes.status).toHaveBeenCalledWith(200);
|
||||
const response = mockRes.json.mock.calls[0][0];
|
||||
expect(response.model_parameters).toEqual({ useResponsesApi: true });
|
||||
expect(response.model_parameters.temperature).toBeUndefined();
|
||||
expect(response.model_parameters.apiKey).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('updateAgentHandler', () => {
|
||||
let existingAgentId;
|
||||
let existingAgentAuthorId;
|
||||
|
|
|
|||
|
|
@ -38,8 +38,7 @@ export function DragDropProvider({ children }: { children: React.ReactNode }) {
|
|||
if (!isAgents || !conversation?.agent_id) {
|
||||
return undefined;
|
||||
}
|
||||
const agent = agentData || agentsMap?.[conversation.agent_id];
|
||||
return agent?.provider;
|
||||
return agentData?.provider ?? agentsMap?.[conversation.agent_id]?.provider;
|
||||
}, [conversation?.endpoint, conversation?.agent_id, agentData, agentsMap]);
|
||||
|
||||
const endpointType = useMemo(
|
||||
|
|
@ -49,11 +48,13 @@ export function DragDropProvider({ children }: { children: React.ReactNode }) {
|
|||
|
||||
const useResponsesApi = useMemo(() => {
|
||||
const isAgents = isAgentsEndpoint(conversation?.endpoint);
|
||||
if (!isAgents || !conversation?.agent_id || conversation?.useResponsesApi) {
|
||||
if (!isAgents || !conversation?.agent_id || conversation?.useResponsesApi !== undefined) {
|
||||
return conversation?.useResponsesApi;
|
||||
}
|
||||
const agent = agentData || agentsMap?.[conversation.agent_id];
|
||||
return agent?.model_parameters?.useResponsesApi;
|
||||
return (
|
||||
agentData?.model_parameters?.useResponsesApi ??
|
||||
agentsMap?.[conversation.agent_id]?.model_parameters?.useResponsesApi
|
||||
);
|
||||
}, [
|
||||
conversation?.endpoint,
|
||||
conversation?.agent_id,
|
||||
|
|
|
|||
|
|
@ -100,6 +100,55 @@ describe('DragDropContext endpointType resolution', () => {
|
|||
const { result } = renderHook(() => useDragDropContext(), { wrapper });
|
||||
expect(result.current.endpointType).toBe(EModelEndpoint.custom);
|
||||
});
|
||||
|
||||
it('falls back to agentsMap provider when agentData omits provider', () => {
|
||||
mockConversation = { endpoint: EModelEndpoint.agents, agent_id: 'agent-1' };
|
||||
mockAgentsMap = {
|
||||
'agent-1': { provider: EModelEndpoint.openAI, model_parameters: {} } as Partial<Agent>,
|
||||
};
|
||||
mockAgentQueryData = {} as Partial<Agent>;
|
||||
const { result } = renderHook(() => useDragDropContext(), { wrapper });
|
||||
expect(result.current.endpointType).toBe(EModelEndpoint.openAI);
|
||||
});
|
||||
});
|
||||
|
||||
describe('useResponsesApi resolution for agents', () => {
|
||||
it('uses fetched agent model parameters when conversation does not override them', () => {
|
||||
mockConversation = { endpoint: EModelEndpoint.agents, agent_id: 'agent-1' };
|
||||
mockAgentQueryData = {
|
||||
provider: EModelEndpoint.azureOpenAI,
|
||||
model_parameters: { useResponsesApi: true },
|
||||
} as Partial<Agent>;
|
||||
const { result } = renderHook(() => useDragDropContext(), { wrapper });
|
||||
expect(result.current.useResponsesApi).toBe(true);
|
||||
});
|
||||
|
||||
it('falls back to agentsMap model parameters when fetched agent omits them', () => {
|
||||
mockConversation = { endpoint: EModelEndpoint.agents, agent_id: 'agent-1' };
|
||||
mockAgentsMap = {
|
||||
'agent-1': {
|
||||
provider: EModelEndpoint.azureOpenAI,
|
||||
model_parameters: { useResponsesApi: true },
|
||||
} as Partial<Agent>,
|
||||
};
|
||||
mockAgentQueryData = { provider: EModelEndpoint.azureOpenAI } as Partial<Agent>;
|
||||
const { result } = renderHook(() => useDragDropContext(), { wrapper });
|
||||
expect(result.current.useResponsesApi).toBe(true);
|
||||
});
|
||||
|
||||
it('preserves an explicit conversation useResponsesApi false override', () => {
|
||||
mockConversation = {
|
||||
endpoint: EModelEndpoint.agents,
|
||||
agent_id: 'agent-1',
|
||||
useResponsesApi: false,
|
||||
};
|
||||
mockAgentQueryData = {
|
||||
provider: EModelEndpoint.azureOpenAI,
|
||||
model_parameters: { useResponsesApi: true },
|
||||
} as Partial<Agent>;
|
||||
const { result } = renderHook(() => useDragDropContext(), { wrapper });
|
||||
expect(result.current.useResponsesApi).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('agents endpoint without provider', () => {
|
||||
|
|
|
|||
|
|
@ -48,11 +48,13 @@ function AttachFileChat({
|
|||
});
|
||||
|
||||
const useResponsesApi = useMemo(() => {
|
||||
if (!isAgents || !conversation?.agent_id || conversation?.useResponsesApi) {
|
||||
if (!isAgents || !conversation?.agent_id || conversation?.useResponsesApi !== undefined) {
|
||||
return conversation?.useResponsesApi;
|
||||
}
|
||||
const agent = agentData || agentsMap?.[conversation.agent_id];
|
||||
return agent?.model_parameters?.useResponsesApi;
|
||||
return (
|
||||
agentData?.model_parameters?.useResponsesApi ??
|
||||
agentsMap?.[conversation.agent_id]?.model_parameters?.useResponsesApi
|
||||
);
|
||||
}, [isAgents, conversation?.agent_id, conversation?.useResponsesApi, agentData, agentsMap]);
|
||||
|
||||
const { data: fileConfig = null } = useGetFileConfig({
|
||||
|
|
@ -65,8 +67,7 @@ function AttachFileChat({
|
|||
if (!isAgents || !conversation?.agent_id) {
|
||||
return undefined;
|
||||
}
|
||||
const agent = agentData || agentsMap?.[conversation.agent_id];
|
||||
return agent?.provider;
|
||||
return agentData?.provider ?? agentsMap?.[conversation.agent_id]?.provider;
|
||||
}, [isAgents, conversation?.agent_id, agentData, agentsMap]);
|
||||
|
||||
const endpointType = useMemo(
|
||||
|
|
|
|||
|
|
@ -157,7 +157,9 @@ const AttachFileMenu = ({
|
|||
}
|
||||
|
||||
const isAzureWithResponsesApi =
|
||||
currentProvider === EModelEndpoint.azureOpenAI && useResponsesApi;
|
||||
(currentProvider === EModelEndpoint.azureOpenAI ||
|
||||
endpointType === EModelEndpoint.azureOpenAI) &&
|
||||
useResponsesApi === true;
|
||||
|
||||
if (
|
||||
isDocumentSupportedProvider(endpointType) ||
|
||||
|
|
|
|||
|
|
@ -68,7 +68,9 @@ const DragDropModal = ({ onOptionSelect, setShowModal, files, isVisible }: DragD
|
|||
const getFileType = (file: File) => inferMimeType(file.name, file.type);
|
||||
|
||||
const isAzureWithResponsesApi =
|
||||
currentProvider === EModelEndpoint.azureOpenAI && useResponsesApi;
|
||||
(currentProvider === EModelEndpoint.azureOpenAI ||
|
||||
endpointType === EModelEndpoint.azureOpenAI) &&
|
||||
useResponsesApi === true;
|
||||
|
||||
// Check if provider supports document upload
|
||||
if (
|
||||
|
|
|
|||
|
|
@ -128,6 +128,51 @@ describe('AttachFileChat', () => {
|
|||
renderComponent({ endpoint: EModelEndpoint.agents, agent_id: 'agent-2' });
|
||||
expect(mockAttachFileMenuProps.endpointType).toBe(EModelEndpoint.custom);
|
||||
});
|
||||
|
||||
it('falls back to agentsMap provider when fetched agent omits provider', () => {
|
||||
mockAgentsMap = {
|
||||
'agent-1': { provider: EModelEndpoint.openAI, model_parameters: {} } as Partial<Agent>,
|
||||
};
|
||||
mockAgentQueryData = {} as Partial<Agent>;
|
||||
renderComponent({ endpoint: EModelEndpoint.agents, agent_id: 'agent-1' });
|
||||
expect(mockAttachFileMenuProps.endpointType).toBe(EModelEndpoint.openAI);
|
||||
});
|
||||
});
|
||||
|
||||
describe('useResponsesApi resolution for agents', () => {
|
||||
it('passes useResponsesApi from fetched agent model parameters', () => {
|
||||
mockAgentQueryData = {
|
||||
provider: EModelEndpoint.azureOpenAI,
|
||||
model_parameters: { useResponsesApi: true },
|
||||
} as Partial<Agent>;
|
||||
renderComponent({ endpoint: EModelEndpoint.agents, agent_id: 'agent-1' });
|
||||
expect(mockAttachFileMenuProps.useResponsesApi).toBe(true);
|
||||
});
|
||||
|
||||
it('falls back to agentsMap model parameters when fetched agent omits them', () => {
|
||||
mockAgentsMap = {
|
||||
'agent-1': {
|
||||
provider: EModelEndpoint.azureOpenAI,
|
||||
model_parameters: { useResponsesApi: true },
|
||||
} as Partial<Agent>,
|
||||
};
|
||||
mockAgentQueryData = { provider: EModelEndpoint.azureOpenAI } as Partial<Agent>;
|
||||
renderComponent({ endpoint: EModelEndpoint.agents, agent_id: 'agent-1' });
|
||||
expect(mockAttachFileMenuProps.useResponsesApi).toBe(true);
|
||||
});
|
||||
|
||||
it('preserves an explicit conversation useResponsesApi false override', () => {
|
||||
mockAgentQueryData = {
|
||||
provider: EModelEndpoint.azureOpenAI,
|
||||
model_parameters: { useResponsesApi: true },
|
||||
} as Partial<Agent>;
|
||||
renderComponent({
|
||||
endpoint: EModelEndpoint.agents,
|
||||
agent_id: 'agent-1',
|
||||
useResponsesApi: false,
|
||||
});
|
||||
expect(mockAttachFileMenuProps.useResponsesApi).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('endpointType resolution for non-agents', () => {
|
||||
|
|
|
|||
|
|
@ -203,6 +203,17 @@ describe('AttachFileMenu', () => {
|
|||
expect(screen.getByText('Upload to Provider')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('shows "Upload to Provider" for azureOpenAI endpointType with useResponsesApi', () => {
|
||||
setupMocks();
|
||||
renderMenu({
|
||||
endpoint: EModelEndpoint.agents,
|
||||
endpointType: EModelEndpoint.azureOpenAI,
|
||||
useResponsesApi: true,
|
||||
});
|
||||
openMenu();
|
||||
expect(screen.getByText('Upload to Provider')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('shows "Upload Image" for azureOpenAI without useResponsesApi', () => {
|
||||
setupMocks({ provider: EModelEndpoint.azureOpenAI });
|
||||
renderMenu({ endpointType: EModelEndpoint.azureOpenAI, useResponsesApi: false });
|
||||
|
|
|
|||
|
|
@ -124,6 +124,25 @@ describe('DragDropModal - Provider Detection', () => {
|
|||
isDocumentSupportedProvider(scenario.currentProvider),
|
||||
).toBe(true);
|
||||
});
|
||||
|
||||
it('should handle Azure OpenAI endpointType when Responses API is enabled', () => {
|
||||
const scenario = {
|
||||
currentProvider: EModelEndpoint.agents,
|
||||
endpointType: EModelEndpoint.azureOpenAI,
|
||||
useResponsesApi: true,
|
||||
};
|
||||
|
||||
const isAzureWithResponsesApi =
|
||||
(scenario.currentProvider === EModelEndpoint.azureOpenAI ||
|
||||
scenario.endpointType === EModelEndpoint.azureOpenAI) &&
|
||||
scenario.useResponsesApi === true;
|
||||
|
||||
expect(
|
||||
isDocumentSupportedProvider(scenario.endpointType) ||
|
||||
isDocumentSupportedProvider(scenario.currentProvider) ||
|
||||
isAzureWithResponsesApi,
|
||||
).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('HEIC/HEIF file type inference', () => {
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue