Skip to content
Merged
Show file tree
Hide file tree
Changes from 3 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions api/server/controllers/agents/client.js
Original file line number Diff line number Diff line change
Expand Up @@ -836,6 +836,7 @@ class AgentClient extends BaseClient {
requestBody: config.configurable.requestBody,
user: createSafeUser(this.options.req?.user),
summarizationConfig: appConfig?.summarization,
appConfig,
tokenCounter,
});

Expand Down
1 change: 1 addition & 0 deletions api/server/controllers/agents/openai.js
Original file line number Diff line number Diff line change
Expand Up @@ -477,6 +477,7 @@ const OpenAIChatCompletionController = async (req, res) => {
initialSummary,
runId: responseId,
summarizationConfig,
appConfig,
signal: abortController.signal,
customHandlers: handlers,
requestBody: {
Expand Down
2 changes: 2 additions & 0 deletions api/server/controllers/agents/responses.js
Original file line number Diff line number Diff line change
Expand Up @@ -492,6 +492,7 @@ const createResponse = async (req, res) => {
initialSummary,
runId: responseId,
summarizationConfig,
appConfig: req.config,
signal: abortController.signal,
customHandlers: handlers,
requestBody: {
Expand Down Expand Up @@ -655,6 +656,7 @@ const createResponse = async (req, res) => {
initialSummary,
runId: responseId,
summarizationConfig,
appConfig: req.config,
signal: abortController.signal,
customHandlers: handlers,
requestBody: {
Expand Down
251 changes: 251 additions & 0 deletions packages/api/src/agents/__tests__/run-summarization.test.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@
import type { AppConfig } from '@librechat/data-schemas';
import type { SummarizationConfig } from 'librechat-data-provider';
import { EModelEndpoint } from 'librechat-data-provider';
import { createRun } from '~/agents/run';

// Mock winston logger
Expand Down Expand Up @@ -57,6 +59,7 @@ async function callAndCapture(
agents?: ReturnType<typeof makeAgent>[];
summarizationConfig?: SummarizationConfig;
initialSummary?: { text: string; tokenCount: number };
appConfig?: AppConfig;
} = {},
) {
const agents = opts.agents ?? [makeAgent()];
Expand All @@ -67,6 +70,7 @@ async function callAndCapture(
signal,
summarizationConfig: opts.summarizationConfig,
initialSummary: opts.initialSummary,
appConfig: opts.appConfig,
streaming: true,
streamUsage: true,
});
Expand All @@ -77,6 +81,17 @@ async function callAndCapture(
return callArgs.graphConfig.agents as Array<Record<string, unknown>>;
}

/** Minimal AppConfig with a single custom endpoint for testing provider resolution. */
function makeAppConfig(
customEndpoints: Array<{ name: string; baseURL: string; apiKey: string }>,
): AppConfig {
return {
endpoints: {
[EModelEndpoint.custom]: customEndpoints,
},
} as unknown as AppConfig;
}

beforeEach(() => {
jest.clearAllMocks();
});
Expand Down Expand Up @@ -297,3 +312,239 @@ describe('initialSummary passthrough', () => {
expect(agents[0].initialSummary).toBeUndefined();
});
});

// ---------------------------------------------------------------------------
// Suite 7: custom-endpoint provider resolution
// ---------------------------------------------------------------------------
describe('custom-endpoint provider resolution', () => {
it('remaps a custom endpoint name to openAI and injects baseURL/apiKey', async () => {
const appConfig = makeAppConfig([
{ name: 'Ollama', baseURL: 'http://localhost:11434/v1', apiKey: 'ollama-key' },
]);
const agents = await callAndCapture({
summarizationConfig: { provider: 'Ollama', model: 'llama3' },
appConfig,
});

const config = agents[0].summarizationConfig as Record<string, unknown>;
expect(config.provider).toBe('openAI');
expect(config.model).toBe('llama3');

const parameters = config.parameters as Record<string, unknown>;
expect(parameters).toMatchObject({
configuration: { baseURL: 'http://localhost:11434/v1' },
apiKey: 'ollama-key',
});
});

it('matches Ollama case-insensitively (via normalizeEndpointName)', async () => {
const appConfig = makeAppConfig([
{ name: 'Ollama', baseURL: 'http://localhost:11434/v1', apiKey: 'ollama-key' },
]);
const agents = await callAndCapture({
summarizationConfig: { provider: 'ollama', model: 'llama3' },
appConfig,
});

const config = agents[0].summarizationConfig as Record<string, unknown>;
expect(config.provider).toBe('openAI');
expect((config.parameters as Record<string, unknown>).apiKey).toBe('ollama-key');
});

it('resolves non-Ollama endpoints on exact-case match', async () => {
const appConfig = makeAppConfig([
{ name: 'Together', baseURL: 'https://api.together.ai/v1', apiKey: 'together-key' },
]);
const agents = await callAndCapture({
summarizationConfig: { provider: 'Together', model: 'mixtral' },
appConfig,
});
expect((agents[0].summarizationConfig as Record<string, unknown>).provider).toBe('openAI');
});

it('does not match non-Ollama endpoints with different casing', async () => {
const appConfig = makeAppConfig([
{ name: 'Together', baseURL: 'https://api.together.ai/v1', apiKey: 'together-key' },
]);
const agents = await callAndCapture({
summarizationConfig: { provider: 'together', model: 'mixtral' },
appConfig,
});
const config = agents[0].summarizationConfig as Record<string, unknown>;
expect(config.provider).toBe('together');
expect(config.parameters).toBeUndefined();
});

it('leaves known SDK providers untouched', async () => {
const appConfig = makeAppConfig([]);
const agents = await callAndCapture({
summarizationConfig: { provider: 'anthropic', model: 'claude' },
appConfig,
});

const config = agents[0].summarizationConfig as Record<string, unknown>;
expect(config.provider).toBe('anthropic');
expect(config.parameters).toBeUndefined();
});

it('preserves unknown provider names when appConfig is missing', async () => {
const agents = await callAndCapture({
summarizationConfig: { provider: 'Ollama', model: 'llama3' },
});

const config = agents[0].summarizationConfig as Record<string, unknown>;
expect(config.provider).toBe('Ollama');
expect(config.parameters).toBeUndefined();
});

it('leaves unrecognized names untouched when no matching custom endpoint exists', async () => {
const appConfig = makeAppConfig([
{ name: 'Ollama', baseURL: 'http://localhost:11434/v1', apiKey: 'ollama-key' },
]);
const agents = await callAndCapture({
summarizationConfig: { provider: 'nonexistent', model: 'foo' },
appConfig,
});

const config = agents[0].summarizationConfig as Record<string, unknown>;
expect(config.provider).toBe('nonexistent');
expect(config.parameters).toBeUndefined();
});

it('extracts ${ENV_VAR} references in custom endpoint credentials', async () => {
process.env.TEST_OLLAMA_KEY = 'resolved-key-value';
const appConfig = makeAppConfig([
{
name: 'Ollama',
baseURL: 'http://localhost:11434/v1',
apiKey: '${TEST_OLLAMA_KEY}',
},
]);
const agents = await callAndCapture({
summarizationConfig: { provider: 'Ollama', model: 'llama3' },
appConfig,
});

const config = agents[0].summarizationConfig as Record<string, unknown>;
const parameters = config.parameters as Record<string, unknown>;
expect(parameters.apiKey).toBe('resolved-key-value');
delete process.env.TEST_OLLAMA_KEY;
});

it('skips override when apiKey is marked user_provided', async () => {
const appConfig = makeAppConfig([
{ name: 'Ollama', baseURL: 'http://localhost:11434/v1', apiKey: 'user_provided' },
]);
const agents = await callAndCapture({
summarizationConfig: { provider: 'Ollama', model: 'llama3' },
appConfig,
});

const config = agents[0].summarizationConfig as Record<string, unknown>;
// Provider still remapped to the SDK-recognized name...
expect(config.provider).toBe('openAI');
// ...but credentials are not forwarded (async user lookup not supported here;
// SDK's self-summarize path will reuse the agent's clientOptions).
expect(config.parameters).toBeUndefined();
});

it('skips override when env var reference cannot be resolved', async () => {
delete process.env.UNSET_TEST_KEY;
const appConfig = makeAppConfig([
{
name: 'Ollama',
baseURL: 'http://localhost:11434/v1',
apiKey: '${UNSET_TEST_KEY}',
},
]);
const agents = await callAndCapture({
summarizationConfig: { provider: 'Ollama', model: 'llama3' },
appConfig,
});

const config = agents[0].summarizationConfig as Record<string, unknown>;
expect(config.provider).toBe('openAI');
expect(config.parameters).toBeUndefined();
});

it('skips override when partial env var reference (prefix/suffix) stays unresolved', async () => {
delete process.env.UNSET_TEST_SEGMENT;
const appConfig = makeAppConfig([
{
name: 'Ollama',
baseURL: 'https://${UNSET_TEST_SEGMENT}.example.com/v1',
apiKey: 'ollama-key',
},
]);
const agents = await callAndCapture({
summarizationConfig: { provider: 'Ollama', model: 'llama3' },
appConfig,
});

const config = agents[0].summarizationConfig as Record<string, unknown>;
expect(config.provider).toBe('openAI');
/** Even though the baseURL is a partial-match pattern, it must not be forwarded. */
expect(config.parameters).toBeUndefined();
});

it('merges overrides alongside user-supplied parameters', async () => {
const appConfig = makeAppConfig([
{ name: 'Ollama', baseURL: 'http://localhost:11434/v1', apiKey: 'ollama-key' },
]);
const agents = await callAndCapture({
summarizationConfig: {
provider: 'Ollama',
model: 'llama3',
parameters: { temperature: 0.2 },
},
appConfig,
});

const config = agents[0].summarizationConfig as Record<string, unknown>;
const parameters = config.parameters as Record<string, unknown>;
expect(parameters).toMatchObject({
temperature: 0.2,
apiKey: 'ollama-key',
});
const configuration = parameters.configuration as Record<string, unknown>;
expect(configuration.baseURL).toBe('http://localhost:11434/v1');
});

it('forwards custom-endpoint headers as configuration.defaultHeaders', async () => {
const appConfig = makeAppConfig([
{
name: 'Ollama',
baseURL: 'http://localhost:11434/v1',
apiKey: 'ollama-key',
headers: { 'X-Custom-Header': 'value-123' },
} as unknown as { name: string; baseURL: string; apiKey: string },
]);
const agents = await callAndCapture({
summarizationConfig: { provider: 'Ollama', model: 'llama3' },
appConfig,
});

const config = agents[0].summarizationConfig as Record<string, unknown>;
const parameters = config.parameters as Record<string, unknown>;
const configuration = parameters.configuration as Record<string, unknown>;
const defaultHeaders = configuration.defaultHeaders as Record<string, string>;
expect(defaultHeaders['X-Custom-Header']).toBe('value-123');
});

it('does not leak model/modelName from getOpenAIConfig defaults', async () => {
const appConfig = makeAppConfig([
{ name: 'Ollama', baseURL: 'http://localhost:11434/v1', apiKey: 'ollama-key' },
]);
const agents = await callAndCapture({
summarizationConfig: { provider: 'Ollama', model: 'llama3' },
appConfig,
});

const config = agents[0].summarizationConfig as Record<string, unknown>;
expect(config.model).toBe('llama3');
const parameters = config.parameters as Record<string, unknown>;
/** Summarization.model must win — parameters must not carry a stale model/modelName. */
expect(parameters.model).toBeUndefined();
expect(parameters.modelName).toBeUndefined();
});
});
Loading
Loading