fix: correct provider setup descriptions for browser-based auth

All major providers (Claude, Codex, Gemini, OpenCode) use browser-based
OAuth authentication, not manual API keys:
- Run the CLI command
- Browser opens for sign-in
- Auth tokens saved automatically

Also added post_install_note for Ollama to show how to add the provider
after installing.

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
rob 2025-12-29 13:09:19 -04:00
parent b4c7491784
commit 9d3b65118d
1 changed files with 18 additions and 13 deletions

View File

@ -342,24 +342,24 @@ PROVIDER_INSTALL_INFO = {
"claude": {
"group": "Anthropic Claude",
"install_cmd": "npm install -g @anthropic-ai/claude-code",
"requires": "Node.js and npm",
"setup": "Run 'claude' and follow login prompts",
"cost": "Pay-per-use (API key required)",
"requires": "Node.js 18+ and npm",
"setup": "Run 'claude' - opens browser for sign-in (auto-saves auth tokens)",
"cost": "Pay-per-use (billed to your Anthropic account)",
"variants": ["claude", "claude-haiku", "claude-opus", "claude-sonnet"],
},
"codex": {
"group": "OpenAI Codex",
"install_cmd": "pip install openai-codex",
"requires": "Python 3.8+",
"setup": "Set OPENAI_API_KEY environment variable",
"cost": "Pay-per-use (API key required)",
"setup": "Run 'codex' - opens browser for sign-in (auto-saves auth tokens)",
"cost": "Pay-per-use (billed to your OpenAI account)",
"variants": ["codex"],
},
"gemini": {
"group": "Google Gemini",
"install_cmd": "pip install google-generativeai",
"requires": "Python 3.8+",
"setup": "Set GOOGLE_API_KEY or run 'gemini auth'",
"setup": "Run 'gemini auth' - opens browser for Google sign-in",
"cost": "Free tier available, pay-per-use for more",
"variants": ["gemini", "gemini-flash"],
},
@ -367,18 +367,19 @@ PROVIDER_INSTALL_INFO = {
"group": "OpenCode",
"install_cmd": "curl -fsSL https://opencode.ai/install.sh | bash",
"requires": "curl, bash",
"setup": "Run 'opencode auth' to authenticate",
"cost": "Free tier (pickle), paid for other models",
"setup": "Run 'opencode auth' - opens browser for sign-in",
"cost": "Free tier (pickle model), paid for premium models",
"variants": ["opencode-deepseek", "opencode-pickle", "opencode-nano", "opencode-reasoner", "opencode-grok"],
},
"ollama": {
"group": "Ollama (Local)",
"group": "Ollama (Local LLMs)",
"install_cmd": "curl -fsSL https://ollama.ai/install.sh | bash",
"requires": "curl, bash, decent GPU recommended",
"setup": "Run 'ollama pull llama3' to download a model",
"cost": "FREE (runs locally)",
"requires": "curl, bash, 8GB+ RAM (GPU recommended)",
"setup": "Run 'ollama pull llama3' to download a model, then add provider",
"cost": "FREE (runs entirely on your machine)",
"variants": [],
"custom": True,
"post_install_note": "After installing, add the provider:\n smarttools providers add ollama 'ollama run llama3' -d 'Local Llama 3'",
},
}
@ -464,6 +465,10 @@ def cmd_providers(args):
print()
print(f"Next steps:")
print(f" 1. {info['setup']}")
if info.get('post_install_note'):
print(f" 2. {info['post_install_note']}")
print(f" 3. Test with: smarttools providers test {selected}")
else:
print(f" 2. Test with: smarttools providers test {info['variants'][0] if info['variants'] else selected}")
else:
print()