From d445a0631921f7c5e7f04182bc7f0fd0cf6abcc8 Mon Sep 17 00:00:00 2001 From: xm0onh Date: Sun, 5 Jan 2025 16:26:29 -0800 Subject: [PATCH] Diversify LLM models for each node --- auto-agents-framework/.env.sample | 5 +- .../src/agents/workflows/kol/workflow.ts | 17 +++++-- auto-agents-framework/src/config/index.ts | 9 ++-- auto-agents-framework/src/config/schema.ts | 48 +++++++++++++++---- 4 files changed, 62 insertions(+), 17 deletions(-) diff --git a/auto-agents-framework/.env.sample b/auto-agents-framework/.env.sample index 0ed36e16..5f72f908 100644 --- a/auto-agents-framework/.env.sample +++ b/auto-agents-framework/.env.sample @@ -15,7 +15,10 @@ RESPONSE_INTERVAL_MINUTES=26 POST_INTERVAL_MINUTES=30 # LLM Configuration -LLM_PROVIDER=openai # or anthropic or llama +DECISION_LLM_PROVIDER=openai # or anthropic or llama +ANALYZE_LLM_PROVIDER=openai # or anthropic or llama +GENERATION_LLM_PROVIDER=anthropic # or anthropic or llama +RESPONSE_LLM_PROVIDER=anthropic # or anthropic or llama OPENAI_API_KEY= ANTHROPIC_API_KEY= LLAMA_API_URL= diff --git a/auto-agents-framework/src/agents/workflows/kol/workflow.ts b/auto-agents-framework/src/agents/workflows/kol/workflow.ts index 81fc8df5..fd3f8923 100644 --- a/auto-agents-framework/src/agents/workflows/kol/workflow.ts +++ b/auto-agents-framework/src/agents/workflows/kol/workflow.ts @@ -63,7 +63,14 @@ export const State = Annotation.Root({ const createWorkflowConfig = async (characterFile: string): Promise => { const { USERNAME, PASSWORD, COOKIES_PATH } = config.twitterConfig; - const { LLM_PROVIDER, LARGE_LLM_MODEL, SMALL_LLM_MODEL } = config.llmConfig; + const { + DECISION_LLM_PROVIDER, + ANALYZE_LLM_PROVIDER, + GENERATION_LLM_PROVIDER, + RESPONSE_LLM_PROVIDER, + LARGE_LLM_MODEL, + SMALL_LLM_MODEL, + } = config.llmConfig; const twitterApi = await createTwitterApi(USERNAME, PASSWORD, COOKIES_PATH); const { tools } = createTools(twitterApi); @@ -75,10 +82,10 @@ const createWorkflowConfig = async (characterFile: string): Promise { POST_INTERVAL_MS: (Number(process.env.POST_INTERVAL_MINUTES) || 90) * 60 * 1000, }, llmConfig: { - LLM_PROVIDER: process.env.LLM_PROVIDER || 'openai', - LARGE_LLM_MODEL: process.env.LARGE_LLM_MODEL || 'gpt-4o', - SMALL_LLM_MODEL: process.env.SMALL_LLM_MODEL || 'gpt-4o-mini', + DECISION_LLM_PROVIDER: process.env.DECISION_LLM_PROVIDER || 'openai', + ANALYZE_LLM_PROVIDER: process.env.ANALYZE_LLM_PROVIDER || 'openai', + GENERATION_LLM_PROVIDER: process.env.GENERATION_LLM_PROVIDER || 'anthropic', + RESPONSE_LLM_PROVIDER: process.env.RESPONSE_LLM_PROVIDER || 'anthropic', + LARGE_LLM_MODEL: process.env.LARGE_LLM_MODEL || 'gpt-4', + SMALL_LLM_MODEL: process.env.SMALL_LLM_MODEL || 'gpt-4-turbo', OPENAI_API_KEY: process.env.OPENAI_API_KEY || '', ANTHROPIC_API_KEY: process.env.ANTHROPIC_API_KEY || '', LLAMA_API_URL: process.env.LLAMA_API_URL || '', diff --git a/auto-agents-framework/src/config/schema.ts b/auto-agents-framework/src/config/schema.ts index 35307c45..7a60f6e3 100644 --- a/auto-agents-framework/src/config/schema.ts +++ b/auto-agents-framework/src/config/schema.ts @@ -16,14 +16,46 @@ const twitterConfigSchema = z.object({ POST_INTERVAL_MS: z.number().int().positive(), }); -const llmConfigSchema = z.object({ - LLM_PROVIDER: z.enum(['openai', 'anthropic', 'llama']).default('openai'), - LARGE_LLM_MODEL: z.string().min(1), - SMALL_LLM_MODEL: z.string().min(1), - OPENAI_API_KEY: z.string().optional(), - ANTHROPIC_API_KEY: z.string().optional(), - LLAMA_API_URL: z.string().optional(), -}); +const llmConfigSchema = z + .object({ + DECISION_LLM_PROVIDER: z.enum(['openai', 'anthropic', 'llama']).default('openai'), + ANALYZE_LLM_PROVIDER: z.enum(['openai', 'anthropic', 'llama']).default('openai'), + GENERATION_LLM_PROVIDER: z.enum(['openai', 'anthropic', 'llama']).default('openai'), + RESPONSE_LLM_PROVIDER: z.enum(['openai', 'anthropic', 'llama']).default('openai'), + SMALL_LLM_MODEL: z.string().min(1), + LARGE_LLM_MODEL: z.string().min(1), + OPENAI_API_KEY: z.string().optional(), + ANTHROPIC_API_KEY: z.string().optional(), + LLAMA_API_URL: z.string().optional(), + }) + .superRefine((data, ctx) => { + const providers = [ + data.DECISION_LLM_PROVIDER, + data.ANALYZE_LLM_PROVIDER, + data.GENERATION_LLM_PROVIDER, + data.RESPONSE_LLM_PROVIDER, + ]; + + const missingConfigs = []; + + if (providers.includes('openai') && !data.OPENAI_API_KEY) { + missingConfigs.push('OpenAI API key'); + } + if (providers.includes('anthropic') && !data.ANTHROPIC_API_KEY) { + missingConfigs.push('Anthropic API key'); + } + if (providers.includes('llama') && !data.LLAMA_API_URL) { + missingConfigs.push('Llama API URL'); + } + + if (missingConfigs.length > 0) { + ctx.addIssue({ + code: z.ZodIssueCode.custom, + message: `Missing required configs: ${missingConfigs.join(', ')}`, + path: ['llmConfig'], + }); + } + }); const autoDriveConfigSchema = z.object({ AUTO_DRIVE_API_KEY: z.string().optional(),