Skip to content

Commit dbe4d09

Browse files
authored
Merge pull request #26 from m7medVision/feature/custom-endpoint
support Custom Endpoints
2 parents 3ceb307 + eaf3096 commit dbe4d09

File tree

9 files changed

+328
-375
lines changed

9 files changed

+328
-375
lines changed

README.md

Lines changed: 35 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@ AI-powered Git commit message generator that analyzes your staged changes and ou
55
## Features
66

77
- Generates 10 commit message suggestions from your staged diff
8-
- Providers: GitHub Copilot (default), OpenAI, OpenRouter
8+
- Providers: GitHub Copilot (default), OpenAI
99
- Interactive config to pick provider/model and set keys
1010
- Simple output suitable for piping into TUI menus (one message per line)
1111

@@ -68,25 +68,49 @@ providers:
6868
copilot:
6969
api_key: "$GITHUB_TOKEN" # Uses GitHub token; token is exchanged internally
7070
model: "gpt-4o" # or "openai/gpt-4o"; both accepted
71+
# endpoint_url: "https://api.githubcopilot.com" # Optional - uses default if not specified
7172
openai:
7273
api_key: "$OPENAI_API_KEY"
7374
model: "gpt-4o"
74-
openrouter:
75-
api_key: "$OPENROUTER_API_KEY" # or a literal key
76-
model: "openai/gpt-4o" # OpenRouter model IDs, e.g. anthropic/claude-3.5-sonnet
75+
# endpoint_url: "https://api.openai.com/v1" # Optional - uses default if not specified
76+
# Custom provider example (e.g., local Ollama):
77+
# local:
78+
# api_key: "not-needed"
79+
# model: "llama3.1:8b"
80+
# endpoint_url: "http://localhost:11434/v1"
7781
```
7882

79-
Notes:
80-
- Copilot: requires a GitHub token with models scope. The tool can also discover IDE Copilot tokens, but models scope is recommended.
81-
- Environment variable references are supported by prefixing with `$` (e.g., `$OPENAI_API_KEY`).
83+
### Custom Endpoints
8284

83-
### Configure via CLI
85+
You can configure custom API endpoints for any provider, which is useful for:
86+
- **Local AI models**: Ollama, LM Studio, or other local inference servers
87+
- **Enterprise proxies**: Internal API gateways or proxy servers
88+
- **Alternative providers**: Any OpenAI-compatible API endpoint
8489

85-
```bash
86-
lazycommit config set # interactive provider/model/key picker
87-
lazycommit config get # show current provider/model
90+
The `endpoint_url` field is optional. If not specified, the official endpoint for that provider will be used.
91+
92+
#### Examples
93+
94+
**Ollama (local):**
95+
```yaml
96+
active_provider: openai # Use openai provider for Ollama compatibility
97+
providers:
98+
openai:
99+
api_key: "ollama" # Ollama doesn't require real API keys
100+
model: "llama3.1:8b"
101+
endpoint_url: "http://localhost:11434/v1"
88102
```
89103
104+
<!-- **Z.AI (GLM models):** -->
105+
<!-- ```yaml -->
106+
<!-- active_provider: openai -->
107+
<!-- providers: -->
108+
<!-- openai: -->
109+
<!-- api_key: "$ZAI_API_KEY" -->
110+
<!-- model: "glm-4.6" -->
111+
<!-- endpoint_url: "https://api.z.ai/api/paas/v4/" -->
112+
<!-- ``` -->
113+
90114
## Integration with TUI Git clients
91115

92116
Because `lazycommit commit` prints plain lines, it plugs nicely into menu UIs.
@@ -111,22 +135,6 @@ customCommands:
111135
labelFormat: "{{ .raw | green }}"
112136
```
113137

114-
Tips:
115-
- For `lazycommit commit`, you can omit `filter` and just use `valueFormat: "{{ .raw }}"` and `labelFormat: "{{ .raw | green }}"`.
116-
- If you pipe a numbered list tool (e.g., `bunx bunnai`), keep the regex groups `number` and `message` as shown.
117-
118-
## Providers and models
119-
120-
- Copilot (default when a GitHub token is available): uses `gpt-4o` unless overridden. Accepts `openai/gpt-4o` and normalizes it to `gpt-4o`.
121-
- OpenAI: choose from models defined in the interactive picker (e.g., gpt‑4o, gpt‑4.1, o3, o1, etc.).
122-
- OpenRouter: pick from OpenRouter-prefixed IDs (e.g., `openai/gpt-4o`, `anthropic/claude-3.5-sonnet`). Extra headers are set automatically.
123-
124-
## How it works
125-
126-
- Reads `git diff --cached`.
127-
- Sends a single prompt to the selected provider to generate 10 lines.
128-
- Prints the lines exactly, suitable for piping/selecting.
129-
130138
## Troubleshooting
131139

132140
- "No staged changes to commit." — run `git add` first.

cmd/commit.go

Lines changed: 9 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -56,14 +56,20 @@ var commitCmd = &cobra.Command{
5656
}
5757
}
5858

59+
endpoint, err := config.GetEndpoint()
60+
if err != nil {
61+
fmt.Fprintf(os.Stderr, "Error getting endpoint: %v\n", err)
62+
os.Exit(1)
63+
}
64+
5965
switch providerName {
6066
case "copilot":
61-
aiProvider = provider.NewCopilotProviderWithModel(apiKey, model)
67+
aiProvider = provider.NewCopilotProviderWithModel(apiKey, model, endpoint)
6268
case "openai":
63-
aiProvider = provider.NewOpenAIProvider(apiKey, model)
69+
aiProvider = provider.NewOpenAIProvider(apiKey, model, endpoint)
6470
default:
6571
// Default to copilot if provider is not set or unknown
66-
aiProvider = provider.NewCopilotProvider(apiKey)
72+
aiProvider = provider.NewCopilotProvider(apiKey, endpoint)
6773
}
6874

6975
commitMessages, err := aiProvider.GenerateCommitMessages(context.Background(), diff)

cmd/config.go

Lines changed: 66 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@ package cmd
22

33
import (
44
"fmt"
5+
"net/url"
56
"os"
67

78
"github.com/AlecAivazis/survey/v2"
@@ -26,8 +27,14 @@ var getCmd = &cobra.Command{
2627
fmt.Println("Error getting model:", err)
2728
os.Exit(1)
2829
}
30+
endpoint, err := config.GetEndpoint()
31+
if err != nil {
32+
fmt.Println("Error getting endpoint:", err)
33+
os.Exit(1)
34+
}
2935
fmt.Printf("Active Provider: %s\n", provider)
3036
fmt.Printf("Model: %s\n", model)
37+
fmt.Printf("Endpoint: %s\n", endpoint)
3138
},
3239
}
3340

@@ -39,13 +46,40 @@ var setCmd = &cobra.Command{
3946
},
4047
}
4148

49+
func validateEndpointURL(val interface{}) error {
50+
endpoint, ok := val.(string)
51+
if !ok {
52+
return fmt.Errorf("endpoint must be a string")
53+
}
54+
55+
// Empty string is valid (uses default)
56+
if endpoint == "" {
57+
return nil
58+
}
59+
60+
parsedURL, err := url.Parse(endpoint)
61+
if err != nil {
62+
return fmt.Errorf("invalid URL format: %w", err)
63+
}
64+
65+
if parsedURL.Scheme != "http" && parsedURL.Scheme != "https" {
66+
return fmt.Errorf("endpoint must use http or https protocol")
67+
}
68+
69+
if parsedURL.Host == "" {
70+
return fmt.Errorf("endpoint must have a valid host")
71+
}
72+
73+
return nil
74+
}
75+
4276
func runInteractiveConfig() {
4377
currentProvider := config.GetProvider()
4478
currentModel, _ := config.GetModel()
4579

4680
providerPrompt := &survey.Select{
4781
Message: "Choose a provider:",
48-
Options: []string{"openai", "openrouter", "copilot"},
82+
Options: []string{"openai", "copilot"},
4983
Default: currentProvider,
5084
}
5185
var selectedProvider string
@@ -87,9 +121,8 @@ func runInteractiveConfig() {
87121

88122
// Dynamically generate available models for OpenAI
89123
availableModels := map[string][]string{
90-
"openai": {},
91-
"openrouter": {},
92-
"copilot": {"gpt-4o"}, // TODO: update if copilot models are dynamic
124+
"openai": {},
125+
"copilot": {"gpt-4o"}, // TODO: update if copilot models are dynamic
93126
}
94127

95128
modelDisplayToID := map[string]string{}
@@ -99,12 +132,6 @@ func runInteractiveConfig() {
99132
availableModels["openai"] = append(availableModels["openai"], display)
100133
modelDisplayToID[display] = string(id)
101134
}
102-
} else if selectedProvider == "openrouter" {
103-
for id, m := range models.OpenRouterModels {
104-
display := fmt.Sprintf("%s (%s)", m.Name, string(id))
105-
availableModels["openrouter"] = append(availableModels["openrouter"], display)
106-
modelDisplayToID[display] = string(id)
107-
}
108135
}
109136

110137
modelPrompt := &survey.Select{
@@ -115,7 +142,7 @@ func runInteractiveConfig() {
115142
// Try to set the default to the current model if possible
116143
isValidDefault := false
117144
currentDisplay := ""
118-
if selectedProvider == "openai" || selectedProvider == "openrouter" {
145+
if selectedProvider == "openai" {
119146
for display, id := range modelDisplayToID {
120147
if id == currentModel || display == currentModel {
121148
isValidDefault = true
@@ -144,7 +171,7 @@ func runInteractiveConfig() {
144171
}
145172

146173
selectedModel := selectedDisplay
147-
if selectedProvider == "openai" || selectedProvider == "openrouter" {
174+
if selectedProvider == "openai" {
148175
selectedModel = modelDisplayToID[selectedDisplay]
149176
}
150177

@@ -156,6 +183,33 @@ func runInteractiveConfig() {
156183
}
157184
fmt.Printf("Model set to: %s\n", selectedModel)
158185
}
186+
187+
// Get current endpoint
188+
currentEndpoint, _ := config.GetEndpoint()
189+
190+
// Endpoint configuration prompt
191+
endpointPrompt := &survey.Input{
192+
Message: "Enter custom endpoint URL (leave empty for default):",
193+
Default: currentEndpoint,
194+
}
195+
var endpoint string
196+
err = survey.AskOne(endpointPrompt, &endpoint, survey.WithValidator(validateEndpointURL))
197+
if err != nil {
198+
fmt.Println(err.Error())
199+
return
200+
}
201+
202+
// Only set endpoint if it's different from current
203+
if endpoint != currentEndpoint && endpoint != "" {
204+
err := config.SetEndpoint(selectedProvider, endpoint)
205+
if err != nil {
206+
fmt.Printf("Error setting endpoint: %v\n", err)
207+
return
208+
}
209+
fmt.Printf("Endpoint set to: %s\n", endpoint)
210+
} else if endpoint == "" {
211+
fmt.Println("Using default endpoint for provider")
212+
}
159213
}
160214

161215
func init() {

internal/config/config.go

Lines changed: 61 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@ package config
33
import (
44
"encoding/json"
55
"fmt"
6+
"net/url"
67
"os"
78
"path/filepath"
89
"runtime"
@@ -12,8 +13,9 @@ import (
1213
)
1314

1415
type ProviderConfig struct {
15-
APIKey string `mapstructure:"api_key"`
16-
Model string `mapstructure:"model"`
16+
APIKey string `mapstructure:"api_key"`
17+
Model string `mapstructure:"model"`
18+
EndpointURL string `mapstructure:"endpoint_url"`
1719
}
1820

1921
type Config struct {
@@ -124,6 +126,28 @@ func GetModel() (string, error) {
124126
return providerConfig.Model, nil
125127
}
126128

129+
func GetEndpoint() (string, error) {
130+
providerConfig, err := GetActiveProviderConfig()
131+
if err != nil {
132+
return "", err
133+
}
134+
135+
// If custom endpoint is configured, use it
136+
if providerConfig.EndpointURL != "" {
137+
return providerConfig.EndpointURL, nil
138+
}
139+
140+
// Return default endpoints based on provider
141+
switch cfg.ActiveProvider {
142+
case "openai":
143+
return "https://api.openai.com/v1", nil
144+
case "copilot":
145+
return "https://api.githubcopilot.com", nil
146+
default:
147+
return "", fmt.Errorf("no default endpoint available for provider '%s'", cfg.ActiveProvider)
148+
}
149+
}
150+
127151
func SetProvider(provider string) error {
128152
if cfg == nil {
129153
InitConfig()
@@ -150,6 +174,41 @@ func SetAPIKey(provider, apiKey string) error {
150174
return viper.WriteConfig()
151175
}
152176

177+
func validateEndpointURL(endpoint string) error {
178+
if endpoint == "" {
179+
return nil // Empty endpoint is valid (will use default)
180+
}
181+
182+
parsedURL, err := url.Parse(endpoint)
183+
if err != nil {
184+
return fmt.Errorf("invalid URL format: %w", err)
185+
}
186+
187+
if parsedURL.Scheme != "http" && parsedURL.Scheme != "https" {
188+
return fmt.Errorf("endpoint must use http or https protocol")
189+
}
190+
191+
if parsedURL.Host == "" {
192+
return fmt.Errorf("endpoint must have a valid host")
193+
}
194+
195+
return nil
196+
}
197+
198+
func SetEndpoint(provider, endpoint string) error {
199+
if cfg == nil {
200+
InitConfig()
201+
}
202+
203+
// Validate endpoint URL
204+
if err := validateEndpointURL(endpoint); err != nil {
205+
return err
206+
}
207+
208+
viper.Set(fmt.Sprintf("providers.%s.endpoint_url", provider), endpoint)
209+
return viper.WriteConfig()
210+
}
211+
153212
func LoadGitHubToken() (string, error) {
154213
if token := os.Getenv("GITHUB_TOKEN"); token != "" {
155214
return token, nil

0 commit comments

Comments
 (0)