Update OpenCode provider info with free models and 75+ providers

- Mention 4 free models: Big Pickle, GLM-4.7, Grok Code Fast 1, MiniMax M2.1
- Note that 75+ providers are available through OpenCode
- Change setup to "opens browser to connect more providers"
- Put opencode-pickle first in variants so test uses a free provider

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
rob 2025-12-30 00:09:14 -04:00
parent e2816bf6f6
commit c8a0433ecc
1 changed files with 4 additions and 4 deletions

View File

@ -364,12 +364,12 @@ PROVIDER_INSTALL_INFO = {
"variants": ["gemini", "gemini-flash"], "variants": ["gemini", "gemini-flash"],
}, },
"opencode": { "opencode": {
"group": "OpenCode", "group": "OpenCode (75+ providers)",
"install_cmd": "curl -fsSL https://opencode.ai/install | bash", "install_cmd": "curl -fsSL https://opencode.ai/install | bash",
"requires": "curl, bash", "requires": "curl, bash",
"setup": "Run 'opencode' - opens browser for sign-in", "setup": "Run 'opencode' - opens browser to connect more providers",
"cost": "Free tier available, paid for premium models", "cost": "4 FREE models included (Big Pickle, GLM-4.7, Grok Code Fast 1, MiniMax M2.1), 75+ more available",
"variants": ["opencode-deepseek", "opencode-pickle", "opencode-nano", "opencode-reasoner", "opencode-grok"], "variants": ["opencode-pickle", "opencode-deepseek", "opencode-nano", "opencode-reasoner", "opencode-grok"],
}, },
"ollama": { "ollama": {
"group": "Ollama (Local LLMs)", "group": "Ollama (Local LLMs)",