// Override the LLM config here for the agents. // Supported providers: openai, google, openrouter, xai, vertexai // You can use any model as long as it is supported by the provider. // Do not modify this file directly, instead copy it to llm.override.jsonc and modify it there. { "planner": { "provider": "", "model": "" }, "orchestrator": { "provider": "", "model": "" }, "cortex": { "provider": "", "model": "", "fallback": { "provider": "", "model": "" } }, "executor": { "provider": "", "model": "" }, "contextor": { "provider": "", "model": "" }, "utils": { "hopper": { // Needs at least a 256k context window. "provider": "", "model": "" }, "outputter": { "provider": "", "model": "" }, "video_analyzer": { "provider": "", "model": "", "fallback": { "provider": "", "model": "" } } } }