Skip to content

Commit a9fa918

Browse files
authored
feat: support VertexAI provider (#153)
* support: vertexai fix fix set default for vertexai added comment fix fix * create schema * fix README.md * fix order * added pupularity * set tools if tools is exists restore commentout * fix comment * set summarizer model
1 parent 49c710b commit a9fa918

File tree

9 files changed

+248
-97
lines changed

9 files changed

+248
-97
lines changed

README.md

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -89,6 +89,8 @@ You can configure OpenCode using environment variables:
8989
| `ANTHROPIC_API_KEY` | For Claude models |
9090
| `OPENAI_API_KEY` | For OpenAI models |
9191
| `GEMINI_API_KEY` | For Google Gemini models |
92+
| `VERTEXAI_PROJECT` | For Google Cloud VertexAI (Gemini) |
93+
| `VERTEXAI_LOCATION` | For Google Cloud VertexAI (Gemini) |
9294
| `GROQ_API_KEY` | For Groq models |
9395
| `AWS_ACCESS_KEY_ID` | For AWS Bedrock (Claude) |
9496
| `AWS_SECRET_ACCESS_KEY` | For AWS Bedrock (Claude) |
@@ -227,6 +229,11 @@ OpenCode supports a variety of AI models from different providers:
227229
- O3 family (o3, o3-mini)
228230
- O4 Mini
229231

232+
### Google Cloud VertexAI
233+
234+
- Gemini 2.5
235+
- Gemini 2.5 Flash
236+
230237
## Usage
231238

232239
```bash

cmd/schema/main.go

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -199,6 +199,7 @@ func generateSchema() map[string]any {
199199
string(models.ProviderOpenRouter),
200200
string(models.ProviderBedrock),
201201
string(models.ProviderAzure),
202+
string(models.ProviderVertexAI),
202203
}
203204

204205
providerSchema["additionalProperties"].(map[string]any)["properties"].(map[string]any)["provider"] = map[string]any{

internal/config/config.go

Lines changed: 45 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -276,6 +276,7 @@ func setProviderDefaults() {
276276
// 5. OpenRouter
277277
// 6. AWS Bedrock
278278
// 7. Azure
279+
// 8. Google Cloud VertexAI
279280

280281
// Anthropic configuration
281282
if key := viper.GetString("providers.anthropic.apiKey"); strings.TrimSpace(key) != "" {
@@ -348,6 +349,15 @@ func setProviderDefaults() {
348349
viper.SetDefault("agents.title.model", models.AzureGPT41Mini)
349350
return
350351
}
352+
353+
// Google Cloud VertexAI configuration
354+
if hasVertexAICredentials() {
355+
viper.SetDefault("agents.coder.model", models.VertexAIGemini25)
356+
viper.SetDefault("agents.summarizer.model", models.VertexAIGemini25)
357+
viper.SetDefault("agents.task.model", models.VertexAIGemini25Flash)
358+
viper.SetDefault("agents.title.model", models.VertexAIGemini25Flash)
359+
return
360+
}
351361
}
352362

353363
// hasAWSCredentials checks if AWS credentials are available in the environment.
@@ -376,6 +386,19 @@ func hasAWSCredentials() bool {
376386
return false
377387
}
378388

389+
// hasVertexAICredentials checks if VertexAI credentials are available in the environment.
390+
func hasVertexAICredentials() bool {
391+
// Check for explicit VertexAI parameters
392+
if os.Getenv("VERTEXAI_PROJECT") != "" && os.Getenv("VERTEXAI_LOCATION") != "" {
393+
return true
394+
}
395+
// Check for Google Cloud project and location
396+
if os.Getenv("GOOGLE_CLOUD_PROJECT") != "" && (os.Getenv("GOOGLE_CLOUD_REGION") != "" || os.Getenv("GOOGLE_CLOUD_LOCATION") != "") {
397+
return true
398+
}
399+
return false
400+
}
401+
379402
// readConfig handles the result of reading a configuration file.
380403
func readConfig(err error) error {
381404
if err == nil {
@@ -598,6 +621,10 @@ func getProviderAPIKey(provider models.ModelProvider) string {
598621
if hasAWSCredentials() {
599622
return "aws-credentials-available"
600623
}
624+
case models.ProviderVertexAI:
625+
if hasVertexAICredentials() {
626+
return "vertex-ai-credentials-available"
627+
}
601628
}
602629
return ""
603630
}
@@ -718,6 +745,24 @@ func setDefaultModelForAgent(agent AgentName) bool {
718745
return true
719746
}
720747

748+
if hasVertexAICredentials() {
749+
var model models.ModelID
750+
maxTokens := int64(5000)
751+
752+
if agent == AgentTitle {
753+
model = models.VertexAIGemini25Flash
754+
maxTokens = 80
755+
} else {
756+
model = models.VertexAIGemini25
757+
}
758+
759+
cfg.Agents[agent] = Agent{
760+
Model: model,
761+
MaxTokens: maxTokens,
762+
}
763+
return true
764+
}
765+
721766
return false
722767
}
723768

internal/llm/models/models.go

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -43,6 +43,7 @@ var ProviderPopularity = map[ModelProvider]int{
4343
ProviderOpenRouter: 5,
4444
ProviderBedrock: 6,
4545
ProviderAzure: 7,
46+
ProviderVertexAI: 8,
4647
}
4748

4849
var SupportedModels = map[ModelID]Model{
@@ -91,4 +92,5 @@ func init() {
9192
maps.Copy(SupportedModels, AzureModels)
9293
maps.Copy(SupportedModels, OpenRouterModels)
9394
maps.Copy(SupportedModels, XAIModels)
95+
maps.Copy(SupportedModels, VertexAIGeminiModels)
9496
}

internal/llm/models/vertexai.go

Lines changed: 38 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,38 @@
1+
package models
2+
3+
const (
4+
ProviderVertexAI ModelProvider = "vertexai"
5+
6+
// Models
7+
VertexAIGemini25Flash ModelID = "vertexai.gemini-2.5-flash"
8+
VertexAIGemini25 ModelID = "vertexai.gemini-2.5"
9+
)
10+
11+
var VertexAIGeminiModels = map[ModelID]Model{
12+
VertexAIGemini25Flash: {
13+
ID: VertexAIGemini25Flash,
14+
Name: "VertexAI: Gemini 2.5 Flash",
15+
Provider: ProviderVertexAI,
16+
APIModel: "gemini-2.5-flash-preview-04-17",
17+
CostPer1MIn: GeminiModels[Gemini25Flash].CostPer1MIn,
18+
CostPer1MInCached: GeminiModels[Gemini25Flash].CostPer1MInCached,
19+
CostPer1MOut: GeminiModels[Gemini25Flash].CostPer1MOut,
20+
CostPer1MOutCached: GeminiModels[Gemini25Flash].CostPer1MOutCached,
21+
ContextWindow: GeminiModels[Gemini25Flash].ContextWindow,
22+
DefaultMaxTokens: GeminiModels[Gemini25Flash].DefaultMaxTokens,
23+
SupportsAttachments: true,
24+
},
25+
VertexAIGemini25: {
26+
ID: VertexAIGemini25,
27+
Name: "VertexAI: Gemini 2.5 Pro",
28+
Provider: ProviderVertexAI,
29+
APIModel: "gemini-2.5-pro-preview-03-25",
30+
CostPer1MIn: GeminiModels[Gemini25].CostPer1MIn,
31+
CostPer1MInCached: GeminiModels[Gemini25].CostPer1MInCached,
32+
CostPer1MOut: GeminiModels[Gemini25].CostPer1MOut,
33+
CostPer1MOutCached: GeminiModels[Gemini25].CostPer1MOutCached,
34+
ContextWindow: GeminiModels[Gemini25].ContextWindow,
35+
DefaultMaxTokens: GeminiModels[Gemini25].DefaultMaxTokens,
36+
SupportsAttachments: true,
37+
},
38+
}

internal/llm/provider/gemini.go

Lines changed: 12 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -175,13 +175,16 @@ func (g *geminiClient) send(ctx context.Context, messages []message.Message, too
175175

176176
history := geminiMessages[:len(geminiMessages)-1] // All but last message
177177
lastMsg := geminiMessages[len(geminiMessages)-1]
178-
chat, _ := g.client.Chats.Create(ctx, g.providerOptions.model.APIModel, &genai.GenerateContentConfig{
178+
config := &genai.GenerateContentConfig{
179179
MaxOutputTokens: int32(g.providerOptions.maxTokens),
180180
SystemInstruction: &genai.Content{
181181
Parts: []*genai.Part{{Text: g.providerOptions.systemMessage}},
182182
},
183-
Tools: g.convertTools(tools),
184-
}, history)
183+
}
184+
if len(tools) > 0 {
185+
config.Tools = g.convertTools(tools)
186+
}
187+
chat, _ := g.client.Chats.Create(ctx, g.providerOptions.model.APIModel, config, history)
185188

186189
attempts := 0
187190
for {
@@ -260,13 +263,16 @@ func (g *geminiClient) stream(ctx context.Context, messages []message.Message, t
260263

261264
history := geminiMessages[:len(geminiMessages)-1] // All but last message
262265
lastMsg := geminiMessages[len(geminiMessages)-1]
263-
chat, _ := g.client.Chats.Create(ctx, g.providerOptions.model.APIModel, &genai.GenerateContentConfig{
266+
config := &genai.GenerateContentConfig{
264267
MaxOutputTokens: int32(g.providerOptions.maxTokens),
265268
SystemInstruction: &genai.Content{
266269
Parts: []*genai.Part{{Text: g.providerOptions.systemMessage}},
267270
},
268-
Tools: g.convertTools(tools),
269-
}, history)
271+
}
272+
if len(tools) > 0 {
273+
config.Tools = g.convertTools(tools)
274+
}
275+
chat, _ := g.client.Chats.Create(ctx, g.providerOptions.model.APIModel, config, history)
270276

271277
attempts := 0
272278
eventChan := make(chan ProviderEvent)

internal/llm/provider/provider.go

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -120,6 +120,11 @@ func NewProvider(providerName models.ModelProvider, opts ...ProviderClientOption
120120
options: clientOptions,
121121
client: newAzureClient(clientOptions),
122122
}, nil
123+
case models.ProviderVertexAI:
124+
return &baseProvider[VertexAIClient]{
125+
options: clientOptions,
126+
client: newVertexAIClient(clientOptions),
127+
}, nil
123128
case models.ProviderOpenRouter:
124129
clientOptions.openaiOptions = append(clientOptions.openaiOptions,
125130
WithOpenAIBaseURL("https://openrouter.ai/api/v1"),

internal/llm/provider/vertexai.go

Lines changed: 34 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,34 @@
1+
package provider
2+
3+
import (
4+
"context"
5+
"os"
6+
7+
"github.com/opencode-ai/opencode/internal/logging"
8+
"google.golang.org/genai"
9+
)
10+
11+
type VertexAIClient ProviderClient
12+
13+
func newVertexAIClient(opts providerClientOptions) VertexAIClient {
14+
geminiOpts := geminiOptions{}
15+
for _, o := range opts.geminiOptions {
16+
o(&geminiOpts)
17+
}
18+
19+
client, err := genai.NewClient(context.Background(), &genai.ClientConfig{
20+
Project: os.Getenv("VERTEXAI_PROJECT"),
21+
Location: os.Getenv("VERTEXAI_LOCATION"),
22+
Backend: genai.BackendVertexAI,
23+
})
24+
if err != nil {
25+
logging.Error("Failed to create VertexAI client", "error", err)
26+
return nil
27+
}
28+
29+
return &geminiClient{
30+
providerOptions: opts,
31+
options: geminiOpts,
32+
client: client,
33+
}
34+
}

0 commit comments

Comments
 (0)