Skip to content
This repository was archived by the owner on Jul 29, 2025. It is now read-only.

Commit 12a98d6

Browse files
Pietjankujtimiihoxha
authored andcommitted
Update readme & use provided logging package
1 parent a2524a1 commit 12a98d6

File tree

2 files changed

+54
-9
lines changed

2 files changed

+54
-9
lines changed

README.md

Lines changed: 30 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -100,6 +100,7 @@ You can configure OpenCode using environment variables:
100100
| `AZURE_OPENAI_ENDPOINT` | For Azure OpenAI models |
101101
| `AZURE_OPENAI_API_KEY` | For Azure OpenAI models (optional when using Entra ID) |
102102
| `AZURE_OPENAI_API_VERSION` | For Azure OpenAI models |
103+
| `LOCAL_ENDPOINT` | For self-hosted models |
103104
| `SHELL` | Default shell to use (if not specified in config) |
104105

105106
### Shell Configuration
@@ -566,6 +567,35 @@ The AI assistant can access LSP features through the `diagnostics` tool, allowin
566567

567568
While the LSP client implementation supports the full LSP protocol (including completions, hover, definition, etc.), currently only diagnostics are exposed to the AI assistant.
568569

570+
## Using a self-hosted model provider
571+
572+
OpenCode can also load and use models from a self-hosted (OpenAI-like) provider.
573+
This is useful for developers who want to experiment with custom models.
574+
575+
### Configuring a self-hosted provider
576+
577+
You can use a self-hosted model by setting the `LOCAL_ENDPOINT` environment variable.
578+
This will cause OpenCode to load and use the models from the specified endpoint.
579+
580+
```bash
581+
LOCAL_ENDPOINT=http://localhost:1235/v1
582+
```
583+
584+
### Configuring a self-hosted model
585+
586+
You can also configure a self-hosted model in the configuration file under the `agents` section:
587+
588+
```json
589+
{
590+
"agents": {
591+
"coder": {
592+
"model": "local.granite-3.3-2b-instruct@q8_0",
593+
"reasoningEffort": "high"
594+
}
595+
}
596+
}
597+
```
598+
569599
## Development
570600

571601
### Prerequisites

internal/llm/models/local.go

Lines changed: 24 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -3,14 +3,14 @@ package models
33
import (
44
"cmp"
55
"encoding/json"
6-
"log/slog"
76
"net/http"
87
"net/url"
98
"os"
109
"regexp"
1110
"strings"
1211
"unicode"
1312

13+
"github.com/opencode-ai/opencode/internal/logging"
1414
"github.com/spf13/viper"
1515
)
1616

@@ -25,7 +25,7 @@ func init() {
2525
if endpoint := os.Getenv("LOCAL_ENDPOINT"); endpoint != "" {
2626
localEndpoint, err := url.Parse(endpoint)
2727
if err != nil {
28-
slog.Debug("Failed to parse local endpoint",
28+
logging.Debug("Failed to parse local endpoint",
2929
"error", err,
3030
"endpoint", endpoint,
3131
)
@@ -44,7 +44,7 @@ func init() {
4444
}
4545

4646
if len(models) == 0 {
47-
slog.Debug("No local models found",
47+
logging.Debug("No local models found",
4848
"endpoint", endpoint,
4949
)
5050
return
@@ -77,23 +77,23 @@ type localModel struct {
7777
func listLocalModels(modelsEndpoint string) []localModel {
7878
res, err := http.Get(modelsEndpoint)
7979
if err != nil {
80-
slog.Debug("Failed to list local models",
80+
logging.Debug("Failed to list local models",
8181
"error", err,
8282
"endpoint", modelsEndpoint,
8383
)
8484
}
8585
defer res.Body.Close()
8686

8787
if res.StatusCode != http.StatusOK {
88-
slog.Debug("Failed to list local models",
88+
logging.Debug("Failed to list local models",
8989
"status", res.StatusCode,
9090
"endpoint", modelsEndpoint,
9191
)
9292
}
9393

9494
var modelList localModelList
9595
if err = json.NewDecoder(res.Body).Decode(&modelList); err != nil {
96-
slog.Debug("Failed to list local models",
96+
logging.Debug("Failed to list local models",
9797
"error", err,
9898
"endpoint", modelsEndpoint,
9999
)
@@ -103,7 +103,7 @@ func listLocalModels(modelsEndpoint string) []localModel {
103103
for _, model := range modelList.Data {
104104
if strings.HasSuffix(modelsEndpoint, lmStudioBetaModelsPath) {
105105
if model.Object != "model" || model.Type != "llm" {
106-
slog.Debug("Skipping unsupported LMStudio model",
106+
logging.Debug("Skipping unsupported LMStudio model",
107107
"endpoint", modelsEndpoint,
108108
"id", model.ID,
109109
"object", model.Object,
@@ -125,7 +125,7 @@ func loadLocalModels(models []localModel) {
125125
model := convertLocalModel(m)
126126
SupportedModels[model.ID] = model
127127

128-
if i == 1 || m.State == "loaded" {
128+
if i == 0 || m.State == "loaded" {
129129
viper.SetDefault("agents.coder.model", model.ID)
130130
viper.SetDefault("agents.summarizer.model", model.ID)
131131
viper.SetDefault("agents.task.model", model.ID)
@@ -150,7 +150,19 @@ func convertLocalModel(model localModel) Model {
150150
var modelInfoRegex = regexp.MustCompile(`(?i)^([a-z0-9]+)(?:[-_]?([rv]?\d[\.\d]*))?(?:[-_]?([a-z]+))?.*`)
151151

152152
func friendlyModelName(modelID string) string {
153-
match := modelInfoRegex.FindStringSubmatch(modelID)
153+
mainID := modelID
154+
tag := ""
155+
156+
if slash := strings.LastIndex(mainID, "/"); slash != -1 {
157+
mainID = mainID[slash+1:]
158+
}
159+
160+
if at := strings.Index(modelID, "@"); at != -1 {
161+
mainID = modelID[:at]
162+
tag = modelID[at+1:]
163+
}
164+
165+
match := modelInfoRegex.FindStringSubmatch(mainID)
154166
if match == nil {
155167
return modelID
156168
}
@@ -186,6 +198,9 @@ func friendlyModelName(modelID string) string {
186198
if label != "" {
187199
parts = append(parts, label)
188200
}
201+
if tag != "" {
202+
parts = append(parts, tag)
203+
}
189204

190205
return strings.Join(parts, " ")
191206
}

0 commit comments

Comments
 (0)