fix: correct provider setup descriptions for browser-based auth
All major providers (Claude, Codex, Gemini, OpenCode) use browser-based OAuth authentication, not manual API keys: - Run the CLI command - Browser opens for sign-in - Auth tokens saved automatically Also added post_install_note for Ollama to show how to add the provider after installing. 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
parent
b4c7491784
commit
9d3b65118d
|
|
@ -342,24 +342,24 @@ PROVIDER_INSTALL_INFO = {
|
||||||
"claude": {
|
"claude": {
|
||||||
"group": "Anthropic Claude",
|
"group": "Anthropic Claude",
|
||||||
"install_cmd": "npm install -g @anthropic-ai/claude-code",
|
"install_cmd": "npm install -g @anthropic-ai/claude-code",
|
||||||
"requires": "Node.js and npm",
|
"requires": "Node.js 18+ and npm",
|
||||||
"setup": "Run 'claude' and follow login prompts",
|
"setup": "Run 'claude' - opens browser for sign-in (auto-saves auth tokens)",
|
||||||
"cost": "Pay-per-use (API key required)",
|
"cost": "Pay-per-use (billed to your Anthropic account)",
|
||||||
"variants": ["claude", "claude-haiku", "claude-opus", "claude-sonnet"],
|
"variants": ["claude", "claude-haiku", "claude-opus", "claude-sonnet"],
|
||||||
},
|
},
|
||||||
"codex": {
|
"codex": {
|
||||||
"group": "OpenAI Codex",
|
"group": "OpenAI Codex",
|
||||||
"install_cmd": "pip install openai-codex",
|
"install_cmd": "pip install openai-codex",
|
||||||
"requires": "Python 3.8+",
|
"requires": "Python 3.8+",
|
||||||
"setup": "Set OPENAI_API_KEY environment variable",
|
"setup": "Run 'codex' - opens browser for sign-in (auto-saves auth tokens)",
|
||||||
"cost": "Pay-per-use (API key required)",
|
"cost": "Pay-per-use (billed to your OpenAI account)",
|
||||||
"variants": ["codex"],
|
"variants": ["codex"],
|
||||||
},
|
},
|
||||||
"gemini": {
|
"gemini": {
|
||||||
"group": "Google Gemini",
|
"group": "Google Gemini",
|
||||||
"install_cmd": "pip install google-generativeai",
|
"install_cmd": "pip install google-generativeai",
|
||||||
"requires": "Python 3.8+",
|
"requires": "Python 3.8+",
|
||||||
"setup": "Set GOOGLE_API_KEY or run 'gemini auth'",
|
"setup": "Run 'gemini auth' - opens browser for Google sign-in",
|
||||||
"cost": "Free tier available, pay-per-use for more",
|
"cost": "Free tier available, pay-per-use for more",
|
||||||
"variants": ["gemini", "gemini-flash"],
|
"variants": ["gemini", "gemini-flash"],
|
||||||
},
|
},
|
||||||
|
|
@ -367,18 +367,19 @@ PROVIDER_INSTALL_INFO = {
|
||||||
"group": "OpenCode",
|
"group": "OpenCode",
|
||||||
"install_cmd": "curl -fsSL https://opencode.ai/install.sh | bash",
|
"install_cmd": "curl -fsSL https://opencode.ai/install.sh | bash",
|
||||||
"requires": "curl, bash",
|
"requires": "curl, bash",
|
||||||
"setup": "Run 'opencode auth' to authenticate",
|
"setup": "Run 'opencode auth' - opens browser for sign-in",
|
||||||
"cost": "Free tier (pickle), paid for other models",
|
"cost": "Free tier (pickle model), paid for premium models",
|
||||||
"variants": ["opencode-deepseek", "opencode-pickle", "opencode-nano", "opencode-reasoner", "opencode-grok"],
|
"variants": ["opencode-deepseek", "opencode-pickle", "opencode-nano", "opencode-reasoner", "opencode-grok"],
|
||||||
},
|
},
|
||||||
"ollama": {
|
"ollama": {
|
||||||
"group": "Ollama (Local)",
|
"group": "Ollama (Local LLMs)",
|
||||||
"install_cmd": "curl -fsSL https://ollama.ai/install.sh | bash",
|
"install_cmd": "curl -fsSL https://ollama.ai/install.sh | bash",
|
||||||
"requires": "curl, bash, decent GPU recommended",
|
"requires": "curl, bash, 8GB+ RAM (GPU recommended)",
|
||||||
"setup": "Run 'ollama pull llama3' to download a model",
|
"setup": "Run 'ollama pull llama3' to download a model, then add provider",
|
||||||
"cost": "FREE (runs locally)",
|
"cost": "FREE (runs entirely on your machine)",
|
||||||
"variants": [],
|
"variants": [],
|
||||||
"custom": True,
|
"custom": True,
|
||||||
|
"post_install_note": "After installing, add the provider:\n smarttools providers add ollama 'ollama run llama3' -d 'Local Llama 3'",
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -464,6 +465,10 @@ def cmd_providers(args):
|
||||||
print()
|
print()
|
||||||
print(f"Next steps:")
|
print(f"Next steps:")
|
||||||
print(f" 1. {info['setup']}")
|
print(f" 1. {info['setup']}")
|
||||||
|
if info.get('post_install_note'):
|
||||||
|
print(f" 2. {info['post_install_note']}")
|
||||||
|
print(f" 3. Test with: smarttools providers test {selected}")
|
||||||
|
else:
|
||||||
print(f" 2. Test with: smarttools providers test {info['variants'][0] if info['variants'] else selected}")
|
print(f" 2. Test with: smarttools providers test {info['variants'][0] if info['variants'] else selected}")
|
||||||
else:
|
else:
|
||||||
print()
|
print()
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue