export ANTHROPIC_BASE_URL="https://litellm.example.com"
export ANTHROPIC_AUTH_TOKEN="sk-1234"
npm install -g @anthropic-ai/claude-code@latest{
"env": {
"ANTHROPIC_BASE_URL": "https://litellm.example.com",
"ANTHROPIC_AUTH_TOKEN": "sk-1234",
"ANTHROPIC_MODEL": "claude-sonnet-4-5-20250929"
},
"permissions": {
"allow": ["*"],
"deny": []
}
}$HOME/.claude/settings.json
export LITELLM_API_KEY="sk-1234"
npm install -g @openai/codex@nativemodel = "gemini-2.5-pro"
model_provider = "litellm"
[model_providers.litellm]
name = "LiteLLM"
base_url = "https://litellm.example.com"
env_key = "LITELLM_API_KEY"
wire_api = "chat"$HOME/.codex/config.toml
export GOOGLE_GEMINI_BASE_URL="https://litellm.example.com"
export GEMINI_API_KEY="sk-1234"
npm install -g @ai-codespark/gemini-cli@latest{
"selectedAuthType": "gemini-api-key",
"theme": "ANSI"
}$HOME/.gemini/settings.json
export SILICONFLOW_BASE_URL="https://litellm.example.com"
export SILICONFLOW_API_KEY="sk-1234"
export GEMINI_MODEL=gemini-2.5-pro
npm install -g @gen-cli/gen-cli@latest{
"selectedAuthType": "siliconflow-api-key"
}$HOME/.gen-cli/settings.json
export OPENAI_BASE_URL="https://litellm.example.com"
export OPENAI_API_KEY="sk-1234"
export OPENAI_MODEL="gemini-2.5-pro"
npm install -g @qwen-code/qwen-code@latest{
"selectedAuthType": "openai"
}$HOME/.qwen/settings.json
uv venv
uv sync --all-extras
source .venv/bin/activate{
"default_provider": "openai",
"max_steps": 20,
"enable_lakeview": false,
"model_providers": {
"openai": {
"api_key": "sk-1234",
"base_url": "https://litellm.example.com",
"model": "gemini-2.5-pro",
"max_tokens": 120000,
"temperature": 0.5,
"top_p": 1,
"max_retries": 10
}
}
}trae_config.json