diff --git a/packages/api/src/endpoints/openai/llm.ts b/packages/api/src/endpoints/openai/llm.ts index a89f6fce44b0..819ae381f011 100644 --- a/packages/api/src/endpoints/openai/llm.ts +++ b/packages/api/src/endpoints/openai/llm.ts @@ -159,6 +159,10 @@ export function getOpenAILLMConfig({ { streaming, model: modelOptions.model ?? '', + /** Default to 0 retries to avoid long delays from LangChain's + * exponential backoff (up to ~2 min with default maxRetries=6). + * Can be overridden via modelOptions or customParams.defaultParams. */ + maxRetries: 0, }, modelOptions, ) as Partial & Partial & Partial;